hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
52ded2739c61152c95b60c3899db69f2c7b06d31
484
py
Python
hipsternet/utils.py
lmxhappy/hipsternet
c505330d3677689c0aa836a3480f513a56ca18ef
[ "Unlicense" ]
null
null
null
hipsternet/utils.py
lmxhappy/hipsternet
c505330d3677689c0aa836a3480f513a56ca18ef
[ "Unlicense" ]
null
null
null
hipsternet/utils.py
lmxhappy/hipsternet
c505330d3677689c0aa836a3480f513a56ca18ef
[ "Unlicense" ]
null
null
null
import numpy as np def exp_running_avg(running, new, gamma=.9): return gamma * running + (1. - gamma) * new def accuracy(y_true, y_pred): return np.mean(y_pred == y_true) def onehot(labels): y = np.zeros([labels.size, np.max(labels) + 1]) y[range(labels.size), labels] = 1. return y def softmax(X): eX = np.exp((X.T - np.max(X, axis=1)).T) return (eX.T / eX.sum(axis=1)).T def sigmoid(X): return 1. / (1 + np.exp(-X))
19.36
52
0.568182
82
484
3.280488
0.378049
0.037175
0.04461
0
0
0
0
0
0
0
0
0.022284
0.258264
484
24
53
20.166667
0.72702
0
0
0
0
0
0
0
0
0
0
0
0
1
0.357143
false
0
0.071429
0.214286
0.785714
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
52e19525963f10d9160e5ee71e0b9bb09343adad
8,766
py
Python
argocd_client/models/v1alpha1_resource_status.py
thepabloaguilar/argocd-client
a6c4ff268a63ee6715f9f837b9225b798aa6bde2
[ "BSD-3-Clause" ]
1
2021-09-29T11:57:07.000Z
2021-09-29T11:57:07.000Z
argocd_client/models/v1alpha1_resource_status.py
thepabloaguilar/argocd-client
a6c4ff268a63ee6715f9f837b9225b798aa6bde2
[ "BSD-3-Clause" ]
1
2020-09-09T00:28:57.000Z
2020-09-09T00:28:57.000Z
argocd_client/models/v1alpha1_resource_status.py
thepabloaguilar/argocd-client
a6c4ff268a63ee6715f9f837b9225b798aa6bde2
[ "BSD-3-Clause" ]
2
2020-10-13T18:31:59.000Z
2021-02-15T12:52:33.000Z
# coding: utf-8 """ Consolidate Services Description of all APIs # noqa: E501 The version of the OpenAPI document: version not set Generated by: https://openapi-generator.tech """ import pprint import re # noqa: F401 import six from argocd_client.configuration import Configuration class V1alpha1ResourceStatus(object): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. """ """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'group': 'str', 'health': 'V1alpha1HealthStatus', 'hook': 'bool', 'kind': 'str', 'name': 'str', 'namespace': 'str', 'requires_pruning': 'bool', 'status': 'str', 'version': 'str' } attribute_map = { 'group': 'group', 'health': 'health', 'hook': 'hook', 'kind': 'kind', 'name': 'name', 'namespace': 'namespace', 'requires_pruning': 'requiresPruning', 'status': 'status', 'version': 'version' } def __init__(self, group=None, health=None, hook=None, kind=None, name=None, namespace=None, requires_pruning=None, status=None, version=None, local_vars_configuration=None): # noqa: E501 """V1alpha1ResourceStatus - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._group = None self._health = None self._hook = None self._kind = None self._name = None self._namespace = None self._requires_pruning = None self._status = None self._version = None self.discriminator = None if group is not None: self.group = group if health is not None: self.health = health if hook is not None: self.hook = hook if kind is not None: self.kind = kind if name is not None: self.name = name if namespace is not None: self.namespace = namespace if requires_pruning is not None: self.requires_pruning = requires_pruning if status is not None: self.status = status if version is not None: self.version = version @property def group(self): """Gets the group of this V1alpha1ResourceStatus. # noqa: E501 :return: The group of this V1alpha1ResourceStatus. # noqa: E501 :rtype: str """ return self._group @group.setter def group(self, group): """Sets the group of this V1alpha1ResourceStatus. :param group: The group of this V1alpha1ResourceStatus. # noqa: E501 :type: str """ self._group = group @property def health(self): """Gets the health of this V1alpha1ResourceStatus. # noqa: E501 :return: The health of this V1alpha1ResourceStatus. # noqa: E501 :rtype: V1alpha1HealthStatus """ return self._health @health.setter def health(self, health): """Sets the health of this V1alpha1ResourceStatus. :param health: The health of this V1alpha1ResourceStatus. # noqa: E501 :type: V1alpha1HealthStatus """ self._health = health @property def hook(self): """Gets the hook of this V1alpha1ResourceStatus. # noqa: E501 :return: The hook of this V1alpha1ResourceStatus. # noqa: E501 :rtype: bool """ return self._hook @hook.setter def hook(self, hook): """Sets the hook of this V1alpha1ResourceStatus. :param hook: The hook of this V1alpha1ResourceStatus. # noqa: E501 :type: bool """ self._hook = hook @property def kind(self): """Gets the kind of this V1alpha1ResourceStatus. # noqa: E501 :return: The kind of this V1alpha1ResourceStatus. # noqa: E501 :rtype: str """ return self._kind @kind.setter def kind(self, kind): """Sets the kind of this V1alpha1ResourceStatus. :param kind: The kind of this V1alpha1ResourceStatus. # noqa: E501 :type: str """ self._kind = kind @property def name(self): """Gets the name of this V1alpha1ResourceStatus. # noqa: E501 :return: The name of this V1alpha1ResourceStatus. # noqa: E501 :rtype: str """ return self._name @name.setter def name(self, name): """Sets the name of this V1alpha1ResourceStatus. :param name: The name of this V1alpha1ResourceStatus. # noqa: E501 :type: str """ self._name = name @property def namespace(self): """Gets the namespace of this V1alpha1ResourceStatus. # noqa: E501 :return: The namespace of this V1alpha1ResourceStatus. # noqa: E501 :rtype: str """ return self._namespace @namespace.setter def namespace(self, namespace): """Sets the namespace of this V1alpha1ResourceStatus. :param namespace: The namespace of this V1alpha1ResourceStatus. # noqa: E501 :type: str """ self._namespace = namespace @property def requires_pruning(self): """Gets the requires_pruning of this V1alpha1ResourceStatus. # noqa: E501 :return: The requires_pruning of this V1alpha1ResourceStatus. # noqa: E501 :rtype: bool """ return self._requires_pruning @requires_pruning.setter def requires_pruning(self, requires_pruning): """Sets the requires_pruning of this V1alpha1ResourceStatus. :param requires_pruning: The requires_pruning of this V1alpha1ResourceStatus. # noqa: E501 :type: bool """ self._requires_pruning = requires_pruning @property def status(self): """Gets the status of this V1alpha1ResourceStatus. # noqa: E501 :return: The status of this V1alpha1ResourceStatus. # noqa: E501 :rtype: str """ return self._status @status.setter def status(self, status): """Sets the status of this V1alpha1ResourceStatus. :param status: The status of this V1alpha1ResourceStatus. # noqa: E501 :type: str """ self._status = status @property def version(self): """Gets the version of this V1alpha1ResourceStatus. # noqa: E501 :return: The version of this V1alpha1ResourceStatus. # noqa: E501 :rtype: str """ return self._version @version.setter def version(self, version): """Sets the version of this V1alpha1ResourceStatus. :param version: The version of this V1alpha1ResourceStatus. # noqa: E501 :type: str """ self._version = version def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, V1alpha1ResourceStatus): return False return self.to_dict() == other.to_dict() def __ne__(self, other): """Returns true if both objects are not equal""" if not isinstance(other, V1alpha1ResourceStatus): return True return self.to_dict() != other.to_dict()
26.644377
192
0.584987
932
8,766
5.402361
0.127682
0.0429
0.200199
0.171599
0.45144
0.374578
0.363654
0.20854
0.101291
0.015094
0
0.030834
0.322952
8,766
328
193
26.72561
0.817523
0.356491
0
0.089655
1
0
0.0523
0
0
0
0
0
0
1
0.165517
false
0
0.027586
0
0.324138
0.013793
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
52f323da705c537946272cc9405e76ac45887abf
387
py
Python
account/urls/admin.py
shaohuihuang/OnlineJudge
47f612da40c39b4e93a3b0c73da7058a55b2de5e
[ "MIT" ]
1
2020-03-18T04:16:26.000Z
2020-03-18T04:16:26.000Z
account/urls/admin.py
shaohuihuang/OnlineJudge
47f612da40c39b4e93a3b0c73da7058a55b2de5e
[ "MIT" ]
1
2019-05-30T08:09:21.000Z
2019-05-30T08:09:21.000Z
account/urls/admin.py
shaohuihuang/OnlineJudge
47f612da40c39b4e93a3b0c73da7058a55b2de5e
[ "MIT" ]
null
null
null
from django.conf.urls import url from ..views.admin import UserAdminAPI, GenerateUserAPI, ChangeUserpasswordAPI urlpatterns = [ url(r"^user/?$", UserAdminAPI.as_view(), name="user_admin_api"), url(r"^generate_user/?$", GenerateUserAPI.as_view(), name="generate_user_api"), url(r"^change_userpassword/?$", ChangeUserpasswordAPI.as_view(), name="change_userpassword_api"), ]
38.7
101
0.74677
46
387
6.043478
0.456522
0.043165
0.107914
0
0
0
0
0
0
0
0
0
0.095607
387
9
102
43
0.794286
0
0
0
1
0
0.263566
0.118863
0
0
0
0
0
1
0
false
0.285714
0.285714
0
0.285714
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
3
5e2e46f9c708401f10b5814c01fac499e33c4c22
939
py
Python
cloudhost/db.py
durgeshrawat/cloudhost
f8952d6d7e9eb979dcca5ce4752099ce780a296f
[ "MIT" ]
2
2021-10-14T16:40:22.000Z
2021-12-17T03:22:38.000Z
cloudhost/db.py
durgeshrawat/cloudhost
f8952d6d7e9eb979dcca5ce4752099ce780a296f
[ "MIT" ]
null
null
null
cloudhost/db.py
durgeshrawat/cloudhost
f8952d6d7e9eb979dcca5ce4752099ce780a296f
[ "MIT" ]
1
2021-10-13T04:56:23.000Z
2021-10-13T04:56:23.000Z
#logging = storing unlimited data import Core import time class Authenticate: def __init__(self,serverurl,username,password): self.query=Core.Console(serverurl,username,password) def createfile(self,filename,data): #filename,content of file self.query.CreateFile(filename,data) return True def upload(self,filename,data): self.query.Update(filename,data) #append data into file return True def recieve(self,filename): allData=self.query.ShowFullData(filename) allData=allData.split('[new]') dictionary={} for i in allData: dictionary[allData.index(i)+1]=i return dictionary #retuen all recieved data in dictionary Format def showfiles(self): return self.query.FileKeys() def clear(self,filename): self.query.ClearFile(filename) #erase all data inside file
28.454545
65
0.649627
108
939
5.611111
0.453704
0.089109
0.082508
0
0
0
0
0
0
0
0
0.001441
0.260916
939
32
66
29.34375
0.871758
0.157614
0
0.090909
0
0
0.006361
0
0
0
0
0
0
1
0.272727
false
0.090909
0.090909
0.045455
0.590909
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
3
5e2f6665595b2fe1ad9828c4568ae6f91a9d38b7
1,022
py
Python
src/condor_credd/condor_credmon_oauth/credmon/CredentialMonitors/AbstractCredentialMonitor.py
sridish123/htcondor
481d975fd8602242f6a052aab04e20b0b560db89
[ "Apache-2.0" ]
217
2015-01-08T04:49:42.000Z
2022-03-27T10:11:58.000Z
src/condor_credd/condor_credmon_oauth/credmon/CredentialMonitors/AbstractCredentialMonitor.py
sridish123/htcondor
481d975fd8602242f6a052aab04e20b0b560db89
[ "Apache-2.0" ]
185
2015-05-03T13:26:31.000Z
2022-03-28T03:08:59.000Z
src/condor_credd/condor_credmon_oauth/credmon/CredentialMonitors/AbstractCredentialMonitor.py
sridish123/htcondor
481d975fd8602242f6a052aab04e20b0b560db89
[ "Apache-2.0" ]
133
2015-02-11T09:17:45.000Z
2022-03-31T07:28:54.000Z
import six import sys import os from abc import ABCMeta, abstractmethod from credmon.utils import get_cred_dir import logging import warnings @six.add_metaclass(ABCMeta) class AbstractCredentialMonitor: """ Abstract Credential Monitor class :param cred_dir: The credential directory to scan. :type cred_dir: str """ def __init__(self, cred_dir = None): self.cred_dir = get_cred_dir(cred_dir) self.log = self.get_logger() def get_logger(self): """Returns a child logger object specific to its class""" logger = logging.getLogger(os.path.basename(sys.argv[0]) + '.' + self.__class__.__name__) return logger @abstractmethod def should_renew(self): raise NotImplementedError @abstractmethod def refresh_access_token(self): raise NotImplementedError @abstractmethod def check_access_token(self): raise NotImplementedError @abstractmethod def scan_tokens(self): raise NotImplementedError
24.333333
97
0.702544
120
1,022
5.733333
0.475
0.071221
0.162791
0.18314
0.228198
0.162791
0.162791
0
0
0
0
0.001261
0.22407
1,022
41
98
24.926829
0.86633
0.15362
0
0.296296
0
0
0.001196
0
0
0
0
0
0
1
0.222222
false
0
0.259259
0
0.555556
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
3
eaace53730d6fc986db83d5e00cb7e8cc9cc5ff9
298
py
Python
code2/day01/demo01.py
picktsh/python
0f758dcdf9eee3580d8f6e2241ef557b6320ef54
[ "MIT" ]
1
2019-12-31T16:44:06.000Z
2019-12-31T16:44:06.000Z
code2/day01/demo01.py
picktsh/python
0f758dcdf9eee3580d8f6e2241ef557b6320ef54
[ "MIT" ]
null
null
null
code2/day01/demo01.py
picktsh/python
0f758dcdf9eee3580d8f6e2241ef557b6320ef54
[ "MIT" ]
1
2022-01-13T10:32:22.000Z
2022-01-13T10:32:22.000Z
""" 课堂练习 获取这个书苑不太冷的网页源代码,并且打印。 URL:https://localprod.pandateacher.com/python-manuscript/crawler-html/spider-men5.0.html 忘记怎么写了?点击下面的"需要帮助"。 """ import requests url = 'https://localprod.pandateacher.com/python-manuscript/crawler-html/spider-men5.0.html' res = requests.get(url) print(res.text)
18.625
92
0.768456
41
298
5.585366
0.585366
0.069869
0.148472
0.253275
0.646288
0.646288
0.646288
0.646288
0.646288
0.646288
0
0.014388
0.067114
298
15
93
19.866667
0.809353
0.459732
0
0
0
0.25
0.54902
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0.25
0
0
0
null
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
eac8d1b7eaaca169ff8d42ff0855d3f278b0b476
183
py
Python
manim_express/color.py
beidongjiedeguang/manim-express
e9c89b74da3692db3ea9b568727e78d5cbcef503
[ "MIT" ]
12
2021-06-14T07:28:29.000Z
2022-02-25T02:49:49.000Z
manim_express/color.py
beidongjiedeguang/manim-kunyuan
e9c89b74da3692db3ea9b568727e78d5cbcef503
[ "MIT" ]
1
2022-02-01T12:30:14.000Z
2022-02-01T12:30:14.000Z
manim_express/color.py
beidongjiedeguang/manim-express
e9c89b74da3692db3ea9b568727e78d5cbcef503
[ "MIT" ]
2
2021-05-13T13:24:15.000Z
2021-05-18T02:56:22.000Z
from manimlib.constants import * from itertools import chain, cycle color_list = [GREEN_C, BLUE_C, RED_C, YELLOW_C, ORANGE, GOLD_C, MAROON_C, TEAL_C] color_cycle = cycle(color_list)
30.5
81
0.786885
31
183
4.322581
0.580645
0.149254
0.208955
0
0
0
0
0
0
0
0
0
0.125683
183
5
82
36.6
0.8375
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
d8121fa92b71ae19c5d6eb85794e3f858282bd11
4,656
py
Python
src/abaqus/EngineeringFeature/SpringDashpotToGround.py
Haiiliin/PyAbaqus
f20db6ebea19b73059fe875a53be370253381078
[ "MIT" ]
7
2022-01-21T09:15:45.000Z
2022-02-15T09:31:58.000Z
src/abaqus/EngineeringFeature/SpringDashpotToGround.py
Haiiliin/PyAbaqus
f20db6ebea19b73059fe875a53be370253381078
[ "MIT" ]
null
null
null
src/abaqus/EngineeringFeature/SpringDashpotToGround.py
Haiiliin/PyAbaqus
f20db6ebea19b73059fe875a53be370253381078
[ "MIT" ]
null
null
null
from abaqusConstants import * from .SpringDashpot import SpringDashpot from ..Region.Region import Region class SpringDashpotToGround(SpringDashpot): """The SpringDashpotToGround object defines springs and/or dashpots between points and ground on a part or an assembly region. The SpringDashpotToGround object is derived from the SpringDashpot object. Attributes ---------- suppressed: Boolean A Boolean specifying whether the spring/dashpot is suppressed or not. The default value is OFF. Notes ----- This object can be accessed by: .. code-block:: python import part mdb.models[name].parts[name].engineeringFeatures.springDashpots[name] import assembly mdb.models[name].rootAssembly.engineeringFeatures.springDashpots[name] The corresponding analysis keywords are: - ELEMENT - SPRING - DASHPOT """ # A Boolean specifying whether the spring/dashpot is suppressed or not. The default value # is OFF. suppressed: Boolean = OFF def __init__(self, name: str, region: Region, dof: int, orientation: str = None, springBehavior: Boolean = OFF, dashpotBehavior: Boolean = OFF, springStiffness: float = 0, dashpotCoefficient: float = 0): """This method creates a SpringDashpotToGround object. Notes ----- This function can be accessed by: .. code-block:: python mdb.models[name].parts[name].engineeringFeatures.SpringDashpotToGround mdb.models[name].rootAssembly.engineeringFeatures\ .SpringDashpotToGround Parameters ---------- name A String specifying the repository key. region A Region object specifying the region to which the springs and/or dashpots are applied. dof An Int specifying the degree of freedom associated with the spring and dashpot behaviors. orientation None or a DatumCsys object specifying the local directions for the spring and/or dashpot. If *orientation*=None, the spring and/or dashpot data are defined in the global coordinate system. The default value is None. springBehavior A Boolean specifying whether to apply spring behavior to the selected points. The default value is OFF.At least one of the arguments *springBehavior*=ON or *dashpotBehavior*=ON must be specified. dashpotBehavior A Boolean specifying whether to apply dashpot behavior to the selected points. The default value is OFF.At least one of the arguments *springBehavior*=ON or *dashpotBehavior*=ON must be specified. springStiffness A Float specifying the force per relative displacement for the spring. The default value is 0.0. dashpotCoefficient A Float specifying the force per relative velocity for the dashpot. The default value is 0.0. Returns ------- A SpringDashpotToGround object. """ super().__init__() pass def setValues(self, orientation: str = None, springBehavior: Boolean = OFF, dashpotBehavior: Boolean = OFF, springStiffness: float = 0, dashpotCoefficient: float = 0): """This method modifies the SpringDashpotToGround object. Parameters ---------- orientation None or a DatumCsys object specifying the local directions for the spring and/or dashpot. If *orientation*=None, the spring and/or dashpot data are defined in the global coordinate system. The default value is None. springBehavior A Boolean specifying whether to apply spring behavior to the selected points. The default value is OFF.At least one of the arguments *springBehavior*=ON or *dashpotBehavior*=ON must be specified. dashpotBehavior A Boolean specifying whether to apply dashpot behavior to the selected points. The default value is OFF.At least one of the arguments *springBehavior*=ON or *dashpotBehavior*=ON must be specified. springStiffness A Float specifying the force per relative displacement for the spring. The default value is 0.0. dashpotCoefficient A Float specifying the force per relative velocity for the dashpot. The default value is 0.0. """ pass
40.137931
111
0.644115
517
4,656
5.7853
0.224371
0.04012
0.060181
0.068205
0.691408
0.661986
0.63457
0.61451
0.61451
0.61451
0
0.003687
0.300902
4,656
115
112
40.486957
0.915207
0.723582
0
0.307692
0
0
0
0
0
0
0
0
0
1
0.153846
false
0.153846
0.230769
0
0.538462
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
3
d82a867062ed2b7ec2f2b2e7eeba1a00b44774fd
348
py
Python
aliyun/api/rest/Push20150707BatchRegisteRequest.py
snowyxx/aliyun-python-demo
ed40887ddff440b85b77f9b2a1fcda11cca55c8b
[ "Apache-2.0" ]
null
null
null
aliyun/api/rest/Push20150707BatchRegisteRequest.py
snowyxx/aliyun-python-demo
ed40887ddff440b85b77f9b2a1fcda11cca55c8b
[ "Apache-2.0" ]
null
null
null
aliyun/api/rest/Push20150707BatchRegisteRequest.py
snowyxx/aliyun-python-demo
ed40887ddff440b85b77f9b2a1fcda11cca55c8b
[ "Apache-2.0" ]
null
null
null
''' Created by auto_sdk on 2015.07.08 ''' from aliyun.api.base import RestApi class Push20150707BatchRegisteRequest(RestApi): def __init__(self,domain='push.aliyuncs.com',port=80): RestApi.__init__(self,domain, port) self.Appid = None self.Limit = None def getapiname(self): return 'push.aliyuncs.com.batchRegiste.201507-07'
26.769231
56
0.732759
47
348
5.234043
0.680851
0.065041
0.113821
0
0
0
0
0
0
0
0
0.087248
0.143678
348
12
57
29
0.738255
0.094828
0
0
0
0
0.19322
0.135593
0
0
0
0
0
1
0.25
false
0
0.125
0.125
0.625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
d83a2135105889455b4acc5c682a3c69f706495a
175
py
Python
HLTrigger/Configuration/python/HLT_75e33/modules/simGmtCaloSumDigis_cfi.py
PKUfudawei/cmssw
8fbb5ce74398269c8a32956d7c7943766770c093
[ "Apache-2.0" ]
1
2021-11-30T16:24:46.000Z
2021-11-30T16:24:46.000Z
HLTrigger/Configuration/python/HLT_75e33/modules/simGmtCaloSumDigis_cfi.py
PKUfudawei/cmssw
8fbb5ce74398269c8a32956d7c7943766770c093
[ "Apache-2.0" ]
4
2021-11-29T13:57:56.000Z
2022-03-29T06:28:36.000Z
HLTrigger/Configuration/python/HLT_75e33/modules/simGmtCaloSumDigis_cfi.py
PKUfudawei/cmssw
8fbb5ce74398269c8a32956d7c7943766770c093
[ "Apache-2.0" ]
1
2021-11-30T16:16:05.000Z
2021-11-30T16:16:05.000Z
import FWCore.ParameterSet.Config as cms simGmtCaloSumDigis = cms.EDProducer("L1TMuonCaloSumProducer", caloStage2Layer2Label = cms.InputTag("simCaloStage2Layer1Digis") )
29.166667
68
0.822857
14
175
10.285714
0.857143
0
0
0
0
0
0
0
0
0
0
0.031447
0.091429
175
5
69
35
0.874214
0
0
0
0
0
0.262857
0.262857
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
dc2585c4f2830df8c2f8db90a6624495587d83f1
357
py
Python
clean_artists.py
PranavAggarwal0/newsic
04e3684a6ce0fa5469d23f74898d80846585b0b4
[ "Apache-2.0" ]
null
null
null
clean_artists.py
PranavAggarwal0/newsic
04e3684a6ce0fa5469d23f74898d80846585b0b4
[ "Apache-2.0" ]
null
null
null
clean_artists.py
PranavAggarwal0/newsic
04e3684a6ce0fa5469d23f74898d80846585b0b4
[ "Apache-2.0" ]
null
null
null
with open('artistsfollowed.txt') as af: with open('/app/tosearch.txt', 'w') as ts: for line in af: if 'name' in line: line = line.strip() line = line.replace('name: ', '') line = line.replace('\'', '') line = line.replace(',', '') ts.write(line + '\n')
35.7
49
0.431373
39
357
3.948718
0.487179
0.25974
0.292208
0
0
0
0
0
0
0
0
0
0.392157
357
9
50
39.666667
0.709677
0
0
0
0
0
0.148459
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
dc2fabe73795c48ec6008709c9696555415b3a97
93
py
Python
new_site/med_ai_site/home/urls.py
com-med-ai/med_ai_site
91794f43a9bc6925bd95d2daa55244e1de89892a
[ "Apache-2.0" ]
2
2021-09-22T13:16:59.000Z
2021-09-22T13:19:55.000Z
new_site/med_ai_site/home/urls.py
com-med-ai/med_ai_site
91794f43a9bc6925bd95d2daa55244e1de89892a
[ "Apache-2.0" ]
null
null
null
new_site/med_ai_site/home/urls.py
com-med-ai/med_ai_site
91794f43a9bc6925bd95d2daa55244e1de89892a
[ "Apache-2.0" ]
1
2021-09-22T13:20:00.000Z
2021-09-22T13:20:00.000Z
from django.urls import path from .views import falan urlpatterns = [ path('', falan) ]
13.285714
28
0.688172
12
93
5.333333
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.204301
93
7
29
13.285714
0.864865
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
dc34c47813a06d5632dbee66eacf0c9a6288a4f5
736
py
Python
files/Chap 2/exemple_9_listes.py
HenraL/NSI_1ereG6_Programme_Python
9f46b848fa2331daca57e5e2e11cba41da45a67f
[ "Unlicense" ]
1
2021-06-15T13:44:47.000Z
2021-06-15T13:44:47.000Z
files/Chap 2/exemple_9_listes.py
HenraL/NSI_1ereG6_Programme_Python
9f46b848fa2331daca57e5e2e11cba41da45a67f
[ "Unlicense" ]
null
null
null
files/Chap 2/exemple_9_listes.py
HenraL/NSI_1ereG6_Programme_Python
9f46b848fa2331daca57e5e2e11cba41da45a67f
[ "Unlicense" ]
null
null
null
from time import sleep i=3 n=2 T=3 L=L1=[] L2=[-1,1] L=[3,4,5] L2[0]=0 L3=n*[0] L4=5*[0] #= L4=[0,0,0,0,0] L5=list(range(1,10)) #= L5=[1,2,3,4,5,6,7,8,9] L6=[n for n in range(1,10)] L7=[2*n for n in range(1,10)] #--> L7=[2,4,6,8,10,12,14,16,18] L8=[n**2 for n in range(1,10)] #--> L8=[1,4,9,16,25,36,49,64,81] L9=[-n for n in range(1,10) if n%2==0] #--> L9=[-2,-4,-6,-8] L10=[n for n in range(1,10) if n>3 and n%3==0] #--> L=[6,9] L11=["m"+c for c in ["a","i","u","e","o"]] #--> L=["ma","mi","mu","me","mo"] T12=[1,2,3,4,5] # L12=[n-5 for n in T] # T12 et L12 --> L12=[-4,-3,-2,-1,0] T12=[1,2] print ("{}{}{}{}{}{}{}{}{}{}{}{}{}{}".format(L[1],L1[1],L2[1],L3[1],L4[1],L5[1],L6[1],L7[1],L8[1],L9[1],L10[1],L11[1],L12[1],T12[1])) sleep(5)
32
133
0.482337
204
736
1.740196
0.29902
0.101408
0.135211
0.15493
0.270423
0.242254
0.202817
0.202817
0.202817
0
0
0.248476
0.108696
736
23
134
32
0.292683
0.30163
0
0
0
0
0.067194
0.055336
0
0
0
0
0
1
0
false
0
0.047619
0
0.047619
0.047619
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
dc40cfe6880d2c07b68e8b9f9904af79ec3e9f26
402
py
Python
api/core/models.py
wiktorcie/django-rest-dokku
573fac89decee8a760232cc15ade2c98ded7a16a
[ "MIT" ]
null
null
null
api/core/models.py
wiktorcie/django-rest-dokku
573fac89decee8a760232cc15ade2c98ded7a16a
[ "MIT" ]
null
null
null
api/core/models.py
wiktorcie/django-rest-dokku
573fac89decee8a760232cc15ade2c98ded7a16a
[ "MIT" ]
null
null
null
from datetime import date from decimal import Decimal from django.conf import settings from django.contrib.auth.models import User from django.db import models from django.db.models import (CASCADE, CharField, DateTimeField, ForeignKey, ManyToManyField, Model, OneToOneField, TextField) from django.db.models.signals import post_save from django.dispatch import receiver
36.545455
79
0.776119
51
402
6.098039
0.509804
0.192926
0.115756
0.115756
0
0
0
0
0
0
0
0
0.174129
402
10
80
40.2
0.936747
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.888889
0
0.888889
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
3
dc4b574ed85d0a1b57198fa7b95be217d96544f0
316
py
Python
algorithms/implementation/flipping_the_matrix/prithaj.py
prithajnath/HackerRank
4e28acd140d4ebc26db6fcf1dcaefb975a9cc3ae
[ "MIT" ]
4
2017-09-11T13:21:17.000Z
2018-02-02T03:18:26.000Z
algorithms/implementation/flipping_the_matrix/prithaj.py
prithajnath/HackerRank
4e28acd140d4ebc26db6fcf1dcaefb975a9cc3ae
[ "MIT" ]
10
2017-01-05T06:03:51.000Z
2018-10-06T03:35:20.000Z
algorithms/implementation/flipping_the_matrix/prithaj.py
PlattsSEC/HackerRank
f8951c8cd1cd7f5b520d2c66e17e43b3729b8a80
[ "MIT" ]
8
2016-05-05T00:46:31.000Z
2018-03-30T01:52:50.000Z
# Enter your code here. Read input from STDIN. Print output to STDOUT q = input() for i in range(q): n = input() m = [] for j in range(2*n): m.append(map(int,raw_input().split())) print sum([max(m[x][y],m[x][2*n-1-y],m[2*n-1-x][y],m[2*n-1-x][2*n-1-y]) for x in range(n) for y in range(n)])
39.5
113
0.563291
72
316
2.458333
0.430556
0.056497
0.067797
0.045198
0.112994
0.067797
0
0
0
0
0
0.036145
0.212025
316
8
113
39.5
0.674699
0.212025
0
0
0
0
0
0
0
0
0
0.125
0
0
null
null
0
0
null
null
0.142857
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
1
0
0
0
0
0
0
0
0
3
dc4c3be2afdd0a3b30fb4c4d78d447a0f9e57a57
488
py
Python
hub_app/accountlib/email.py
passiopeia/passiopeia-hub
4eed28297a0b134d6b942fa15a9423448dcc2b7e
[ "MIT" ]
null
null
null
hub_app/accountlib/email.py
passiopeia/passiopeia-hub
4eed28297a0b134d6b942fa15a9423448dcc2b7e
[ "MIT" ]
29
2020-02-09T18:28:01.000Z
2021-06-25T15:32:48.000Z
hub_app/accountlib/email.py
passiopeia/passiopeia-hub
4eed28297a0b134d6b942fa15a9423448dcc2b7e
[ "MIT" ]
null
null
null
""" Helper for setting up E-Mail """ from datetime import timedelta from random import SystemRandom from string import ascii_letters, digits from django.utils.timezone import now def get_email_key() -> str: """ Get a E-Mail Key """ random = SystemRandom() return ''.join(random.choices(ascii_letters + digits + '$=^', k=250)) def get_email_max_validity(): """ How long should a E-Mail change request be valid? """ return now() + timedelta(days=1)
20.333333
73
0.670082
67
488
4.776119
0.626866
0.046875
0.1125
0
0
0
0
0
0
0
0
0.010363
0.209016
488
23
74
21.217391
0.818653
0.194672
0
0
0
0
0.008451
0
0
0
0
0
0
1
0.222222
false
0
0.444444
0
0.888889
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
3
dc5f7e6192efc942675edabb90ebee02c6a88625
56
py
Python
client/config.py
sungeunbae/ngrok_url
7fb987f55d9f11bc4bb83ac2b87f7d8181c767fd
[ "MIT" ]
1
2021-04-22T23:27:05.000Z
2021-04-22T23:27:05.000Z
client/config.py
sungeunbae/ngrok_url
7fb987f55d9f11bc4bb83ac2b87f7d8181c767fd
[ "MIT" ]
null
null
null
client/config.py
sungeunbae/ngrok_url
7fb987f55d9f11bc4bb83ac2b87f7d8181c767fd
[ "MIT" ]
null
null
null
google_credential_file="../vejgarden-4efd839296c1.json"
28
55
0.839286
6
56
7.5
1
0
0
0
0
0
0
0
0
0
0
0.145455
0.017857
56
1
56
56
0.672727
0
0
0
0
0
0.535714
0.535714
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
dc5fb74d5450f7785f04771bdf69ddacfb538d7c
11,375
py
Python
tests/test_python_pae.py
MatthiasValvekens/python-pae
2e604896912ee5420438fbb5188653dce0633f3c
[ "MIT" ]
null
null
null
tests/test_python_pae.py
MatthiasValvekens/python-pae
2e604896912ee5420438fbb5188653dce0633f3c
[ "MIT" ]
null
null
null
tests/test_python_pae.py
MatthiasValvekens/python-pae
2e604896912ee5420438fbb5188653dce0633f3c
[ "MIT" ]
null
null
null
""" PAE encoding/decoding tests. .. (c) 2021 Matthias Valvekens """ import struct from io import BytesIO from typing import IO import pytest from python_pae import ( pae_encode, unmarshal, marshal, pae_encode_multiple, PAEDecodeError ) from python_pae.abstract import PAEType from python_pae.number import PAE_USHORT, PAE_ULLONG, PAE_UCHAR, PAE_UINT, \ PAENumberType from python_pae.encode import write_prefixed, PAEListSettings from python_pae.pae_types import PAEBytes, PAEHomogeneousList, \ PAEHeterogeneousList, PAEString # Default list encoding settings for our tests NO_CONST_PREFIX = PAEListSettings( size_type=PAE_USHORT, prefix_if_constant=False ) WITH_CONST_PREFIX = PAEListSettings( size_type=PAE_USHORT, prefix_if_constant=True ) @pytest.mark.parametrize('inp,expected_out', [ ([b'12', b'345'], b'\x02\x00\x02\x0012\x03\x00345'), ([], b'\x00\x00'), ([b'123', b'45', b'67', b'89'], b'\x04\x00\x03\x00123\x02\x0045\x02\x0067\x02\x0089'), ]) def test_encode_bytes(inp, expected_out): encoded = pae_encode(inp, size_t=PAE_USHORT) assert encoded == expected_out @pytest.mark.parametrize('inp,expected_out', [ ([b'12', b'345'], b'\x02\x00\x00\x00\x02\x00\x00\x0012\x03\x00\x00\x00345'), ([], b'\x00\x00\x00\x00'), ([b'123', b'45', b'67', b'89'], b'\x04\x00\x00\x00\x03\x00\x00\x00123' b'\x02\x00\x00\x0045\x02\x00\x00\x0067\x02\x00\x00\x0089'), ]) def test_encode_bytes_uint(inp, expected_out): encoded = pae_encode(inp, size_t=PAE_UINT) assert encoded == expected_out @pytest.mark.parametrize('inp,expected_out', [ ([b'12', b'345'], b'\x02\x00\x00\x00\x02\x0012\x03\x00345'), ([], b'\x00\x00\x00\x00'), ([b'123', b'45', b'67', b'89'], b'\x04\x00\x00\x00\x03\x00123' b'\x02\x0045\x02\x0067\x02\x0089'), ]) def test_encode_bytes_mix(inp, expected_out): lst_type = PAEHomogeneousList( PAEBytes(), PAEListSettings(size_type=PAE_UINT, length_type=PAE_USHORT,) ) encoded = marshal(inp, lst_type) assert encoded == expected_out @pytest.mark.parametrize('expected_out,inp', [ ([b'12', b'345'], b'\x02\x00\x02\x0012\x03\x00345'), ([], b'\x00\x00'), ([b'123', b'45', b'67', b'89'], b'\x04\x00\x03\x00123\x02\x0045\x02\x0067\x02\x0089'), ]) def test_decode_bytes(inp, expected_out): lst_type = PAEHomogeneousList(PAEBytes(), WITH_CONST_PREFIX) assert unmarshal(inp, lst_type) == expected_out @pytest.mark.parametrize('expected_out,inp', [ ([b'12', b'345'], b'\x02\x00\x00\x00\x02\x0012\x03\x00345'), ([], b'\x00\x00\x00\x00'), ([b'123', b'45', b'67', b'89'], b'\x04\x00\x00\x00\x03\x00123' b'\x02\x0045\x02\x0067\x02\x0089'), ]) def test_decode_bytes_mix(inp, expected_out): lst_type = PAEHomogeneousList( PAEBytes(), PAEListSettings( size_type=PAE_UINT, length_type=PAE_USHORT ) ) assert unmarshal(inp, lst_type) == expected_out @pytest.mark.parametrize('inp,expected_out', [ ([(b'12', PAEBytes()), (b'345', PAEBytes())], b'\x02\x00\x02\x0012\x03\x00345'), ([(1, PAE_UINT), (b'1234', PAEBytes())], b'\x02\x00\x04\x00\x01\x00\x00\x00\x04\x001234'), ([], b'\x00\x00'), ]) def test_encode_heterogeneous(inp, expected_out): encoded = pae_encode_multiple(inp, size_t=PAE_USHORT) assert encoded == expected_out TEST_ENCODE_HETEROGENEOUS_NO_PREFIX = [ ([b'12', b'345'], [PAEBytes(), PAEBytes()], b'\x02\x00\x02\x0012\x03\x00345'), ([1, b'1234'], [PAE_UINT, PAEBytes()], b'\x02\x00\x01\x00\x00\x00\x04\x001234'), ([1, b'', b'1234'], [PAE_UINT, PAEBytes(), PAEBytes()], b'\x03\x00\x01\x00\x00\x00\x00\x00\x04\x001234'), ] @pytest.mark.parametrize('inp,types,expected_out', TEST_ENCODE_HETEROGENEOUS_NO_PREFIX) def test_encode_heterogeneous_const_no_prefix(inp, types, expected_out): lst_type = PAEHeterogeneousList( component_types=types, settings=NO_CONST_PREFIX ) encoded = marshal(inp, lst_type) assert encoded == expected_out @pytest.mark.parametrize('expected_out,types,inp', [ ([b'12', b'345'], [PAEBytes(), PAEBytes()], b'\x02\x00\x02\x0012\x03\x00345'), ([1, b'1234'], [PAE_UINT, PAEBytes()], b'\x02\x00\x04\x00\x01\x00\x00\x00\x04\x001234'), ([1, b'', b'1234'], [PAE_UINT, PAEBytes(), PAEBytes()], b'\x03\x00\x04\x00\x01\x00\x00\x00\x00\x00\x04\x001234'), ]) def test_decode_heterogeneous(inp, types, expected_out): lst_type = PAEHeterogeneousList( component_types=types, settings=WITH_CONST_PREFIX ) decoded = unmarshal(inp, lst_type) assert decoded == expected_out @pytest.mark.parametrize('expected_out,types,inp', TEST_ENCODE_HETEROGENEOUS_NO_PREFIX) def test_decode_heterogeneous_const_no_prefix(inp, types, expected_out): lst_type = PAEHeterogeneousList( component_types=types, settings=NO_CONST_PREFIX ) decoded = unmarshal(inp, lst_type) assert decoded == expected_out @pytest.mark.parametrize('inp,pae_type', [ (b'\x02\x00\x01\x00\x00\x00\x05', PAEHeterogeneousList( component_types=[PAE_UINT, PAEBytes()], settings=NO_CONST_PREFIX)), (b'\x01\x00\x00', PAEHomogeneousList(PAEBytes(), settings=NO_CONST_PREFIX)) ]) def test_decode_length_prefix_error(inp, pae_type): with pytest.raises(PAEDecodeError, match='Failed to read length'): unmarshal(inp, pae_type) @pytest.mark.parametrize('inp,pae_type', [ (b'\x02\x00\x01\x00\x00\x00\x05\x00123', PAEHeterogeneousList( component_types=[PAE_UINT, PAEBytes()], settings=WITH_CONST_PREFIX)), (b'\x01\x00\x01\x00', PAEHomogeneousList(PAEBytes(), settings=NO_CONST_PREFIX)) ]) def test_decode_payload_too_short(inp, pae_type): with pytest.raises(PAEDecodeError, match='Expected.*next item'): unmarshal(inp, pae_type) @pytest.mark.parametrize('inp,pae_type', [ (b'\x02\x00\x01\x00\x00\x00\x05\x00123456', PAEHeterogeneousList( component_types=[PAE_UINT, PAEBytes()], settings=NO_CONST_PREFIX)), (b'\x01\x00\x00\x001', PAEHomogeneousList(PAEBytes(), settings=NO_CONST_PREFIX)) ]) def test_decode_payload_too_long(inp, pae_type): with pytest.raises(PAEDecodeError, match='trailing data'): unmarshal(inp, pae_type) @pytest.mark.parametrize('inp,pae_type', [ (b'\x01', PAEHeterogeneousList( component_types=[PAE_UINT, PAEBytes()], settings=NO_CONST_PREFIX)), (b'\x01', PAEHomogeneousList(PAEBytes(), settings=NO_CONST_PREFIX)), (b'\x01\x001', PAE_ULLONG), ]) def test_payload_invalid(inp, pae_type): with pytest.raises(PAEDecodeError, match='Failed to read value'): unmarshal(inp, pae_type) @pytest.mark.parametrize('inp,pae_type', [ (b'\x01\x00\x01\x00\x00\x00', PAEHeterogeneousList( component_types=[PAE_UINT, PAEBytes()], settings=NO_CONST_PREFIX)), (b'\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00', PAEHeterogeneousList( component_types=[PAE_UINT, PAEBytes()], settings=NO_CONST_PREFIX)), ]) def test_decode_wrong_component_count(inp, pae_type): with pytest.raises(PAEDecodeError, match='Wrong number of components'): unmarshal(inp, pae_type) def test_encode_wrong_component_count(): with pytest.raises(ValueError, match='Wrong number of components'): PAEHeterogeneousList( component_types=[PAE_UINT, PAEBytes()], settings=NO_CONST_PREFIX).write([1, b'2', 3], BytesIO()) def test_encode_wrong_output_length_reported(): class WeirdType(PAEType[int]): # wrong length constant_length = 1 def write(self, value: int, stream: IO) -> int: return stream.write(struct.pack('<H', value)) def read(self, stream: IO, length: int) -> int: raise NotImplementedError with pytest.raises(IOError, match='but wrote'): write_prefixed( 10, WeirdType(), BytesIO(), length_type=PAE_USHORT, prefix_if_constant=False ) NESTED_HETEROGENEOUS_TESTS = [ ([1, [b'abc', b'xyz'], b'1234'], [PAE_UINT, PAEHomogeneousList(PAEBytes(), settings=WITH_CONST_PREFIX), PAEBytes()], b'\x03\x00\x04\x00\x01\x00\x00\x00' b'\x0c\x00\x02\x00\x03\x00abc\x03\x00xyz' b'\x04\x001234'), ([1, [b'abc', b'xyz'], 'テスト'], [PAE_UINT, PAEHomogeneousList(PAEBytes(), settings=WITH_CONST_PREFIX), PAEString()], b'\x03\x00\x04\x00\x01\x00\x00\x00' b'\x0c\x00\x02\x00\x03\x00abc\x03\x00xyz' b'\x09\x00\xe3\x83\x86\xe3\x82\xb9\xe3\x83\x88'), ([1, [b'', b'xyz'], [], b'1234'], [PAE_UINT, PAEHomogeneousList(PAEBytes(), settings=WITH_CONST_PREFIX), PAEHomogeneousList(PAEBytes(), settings=WITH_CONST_PREFIX), PAEBytes()], b'\x04\x00\x04\x00\x01\x00\x00\x00' b'\x09\x00\x02\x00\x00\x00\x03\x00xyz' b'\x02\x00\x00\x00' b'\x04\x001234'), ([1, [b'', 10, b'xyz'], [1, 2, 3], b'1234'], [PAE_UINT, PAEHeterogeneousList( [PAEBytes(), PAE_USHORT, PAEBytes()], settings=WITH_CONST_PREFIX ), PAEHomogeneousList(PAE_UCHAR, settings=WITH_CONST_PREFIX), PAEBytes()], b'\x04\x00\x04\x00\x01\x00\x00\x00' b'\x0d\x00\x03\x00\x00\x00\x02\x00\x0a\x00\x03\x00xyz' b'\x0b\x00\x03\x00\x01\x00\x01\x01\x00\x02\x01\x00\x03' b'\x04\x001234'), ([1, [b'', 10, b'xyz'], [1, 2, 3], b'1234'], [PAE_UINT, PAEHeterogeneousList( [PAEBytes(), PAE_USHORT, PAEBytes()], settings=WITH_CONST_PREFIX ), PAEHomogeneousList(PAE_UCHAR, settings=NO_CONST_PREFIX), PAEBytes()], b'\x04\x00\x04\x00\x01\x00\x00\x00' b'\x0d\x00\x03\x00\x00\x00\x02\x00\x0a\x00\x03\x00xyz' b'\x05\x00\x03\x00\x01\x02\x03' b'\x04\x001234'), ([1, [b'', 10, b'xyz'], [1, 2, 3], b'1234'], [PAE_UINT, PAEHeterogeneousList( [PAEBytes(), PAE_USHORT, PAEBytes()], settings=NO_CONST_PREFIX ), PAEHomogeneousList(PAE_UCHAR, settings=NO_CONST_PREFIX), PAEBytes()], b'\x04\x00\x04\x00\x01\x00\x00\x00' b'\x0b\x00\x03\x00\x00\x00\x0a\x00\x03\x00xyz' b'\x05\x00\x03\x00\x01\x02\x03' b'\x04\x001234'), ] @pytest.mark.parametrize('expected_out,types,inp', NESTED_HETEROGENEOUS_TESTS) def test_decode_nested(inp, types, expected_out): lst_type = PAEHeterogeneousList( component_types=types, settings=WITH_CONST_PREFIX ) decoded = unmarshal(inp, lst_type) assert decoded == expected_out @pytest.mark.parametrize('inp,types,expected_out', NESTED_HETEROGENEOUS_TESTS) def test_encode_nested(inp, types, expected_out): encoded = pae_encode_multiple(zip(inp, types), size_t=PAE_USHORT) assert encoded == expected_out def test_illegal_utf_sequence(): with pytest.raises(PAEDecodeError, match="Failed"): unmarshal(b'\xee\xaa', PAEString()) def test_number_str_known(): assert str(PAE_UCHAR) == '<uint8 (UCHAR)>' assert str(PAE_USHORT) == '<uint16 (USHORT)>' assert str(PAE_UINT) == '<uint32 (UINT)>' assert str(PAE_ULLONG) == '<uint64 (ULLONG)>' def test_number_str_generic(): assert str(PAENumberType(4)) == '<uint128>'
32.132768
78
0.657319
1,552
11,375
4.628866
0.106314
0.068486
0.051364
0.026726
0.78466
0.749026
0.730651
0.677895
0.64059
0.616648
0
0.117792
0.17978
11,375
353
79
32.223796
0.652197
0.010462
0
0.583618
0
0.023891
0.214279
0.148306
0
0
0
0
0.054608
1
0.078498
false
0
0.030717
0.003413
0.119454
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
dc6af33c42aa0166fc95ecfcffee245bbd796ee0
540
py
Python
__printColorListSet.py
simdevex/01.Basics
cf4f372384e66f4b26e4887d2f5d815a1f8e929c
[ "MIT" ]
null
null
null
__printColorListSet.py
simdevex/01.Basics
cf4f372384e66f4b26e4887d2f5d815a1f8e929c
[ "MIT" ]
null
null
null
__printColorListSet.py
simdevex/01.Basics
cf4f372384e66f4b26e4887d2f5d815a1f8e929c
[ "MIT" ]
null
null
null
'''a Python program to print out a set containing all the colors from color_list_1 which are not present in color_list_2''' def main(): color_list_1 = set(["White", "Black", "Red"]) color_list_2 = set(["Red", "Green"]) print("Original set elements:") print(color_list_1) print(color_list_2) print("\nDifferenct of color_list_1 and color_list_2:") print(color_list_1.difference(color_list_2)) print("\nDifferenct of color_list_2 and color_list_1:") print(color_list_2.difference(color_list_1)) main ()
33.75
83
0.714815
88
540
4.068182
0.375
0.351955
0.195531
0.125698
0.318436
0.318436
0.318436
0.206704
0
0
0
0.031042
0.164815
540
16
84
33.75
0.762749
0.218519
0
0
0
0
0.323741
0
0
0
0
0
0
1
0.090909
false
0
0
0
0.090909
0.636364
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
3
dcb656728acabf9111ac514686002dd4c253939f
126
py
Python
magicmirror/tools/whooshe/__init__.py
memirror/magicMirror
05ee16b44aef22c30da2bc3323c5ba593b3e53fa
[ "MIT" ]
5
2021-09-03T03:06:51.000Z
2022-03-22T07:48:22.000Z
magicmirror/tools/whooshe/__init__.py
xiaodongxiexie/magicMirror
05ee16b44aef22c30da2bc3323c5ba593b3e53fa
[ "MIT" ]
null
null
null
magicmirror/tools/whooshe/__init__.py
xiaodongxiexie/magicMirror
05ee16b44aef22c30da2bc3323c5ba593b3e53fa
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # @Author: xiaodong # @Date : 2021/5/31 from .question import QuestionWhoosh wq = QuestionWhoosh()
15.75
36
0.65873
15
126
5.533333
0.933333
0
0
0
0
0
0
0
0
0
0
0.076923
0.174603
126
7
37
18
0.721154
0.460317
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
dcb86ff5ecad809b46a22d07b4b3f7a8acd94f8c
287
py
Python
src/BusinessLogicLayer/plugins/faker_any.py
QIN2DIM/CampusDailyAutoSign
1123bf9c54d2bbfc7d22de58987c558ddea2b1c3
[ "MIT" ]
24
2020-11-11T08:29:22.000Z
2021-09-29T12:56:13.000Z
src/BusinessLogicLayer/plugins/faker_any.py
RobAI-Lab/CampusDailyAutoSign
9c554f3342e4b3437a2c98d809b0856e8ed5d66a
[ "MIT" ]
3
2021-02-24T10:57:16.000Z
2021-02-24T11:18:19.000Z
src/BusinessLogicLayer/plugins/faker_any.py
RobAI-Lab/CampusDailyAutoSign
9c554f3342e4b3437a2c98d809b0856e8ed5d66a
[ "MIT" ]
5
2020-11-11T15:42:25.000Z
2021-04-07T07:10:11.000Z
__all__ = ['get_useragent'] from fake_useragent import UserAgent from fake_useragent import errors def get_useragent() -> str: try: return UserAgent().random except errors.FakeUserAgentError: exec('import os\nos.system("pip install -upgrade fake-useragent")')
23.916667
75
0.721254
34
287
5.852941
0.617647
0.19598
0.170854
0.261307
0.321608
0
0
0
0
0
0
0
0.181185
287
11
76
26.090909
0.846809
0
0
0
0
0
0.250871
0
0
0
0
0
0
1
0.125
false
0
0.375
0
0.625
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
f49bcab9cc24d236c9fa5b5f4954a51347f525dd
23,586
py
Python
utils/service_creator/service_creator.py
zero-os/zero-os-0-boot-templates
4250b5edbc0dcc297a624885505b316c30a9b7a8
[ "Apache-2.0" ]
null
null
null
utils/service_creator/service_creator.py
zero-os/zero-os-0-boot-templates
4250b5edbc0dcc297a624885505b316c30a9b7a8
[ "Apache-2.0" ]
7
2018-06-19T10:02:19.000Z
2018-07-25T09:07:55.000Z
utils/service_creator/service_creator.py
zero-os/zero-os-0-boot-templates
4250b5edbc0dcc297a624885505b316c30a9b7a8
[ "Apache-2.0" ]
3
2018-06-12T05:15:23.000Z
2018-08-18T20:26:19.000Z
import sys import csv from argparse import ArgumentParser from js9 import j def main(argv): parser = ArgumentParser() parser.add_argument("-d", "--data", dest="data_file", help="CSV file to read the host data from", required=True) parser.add_argument("-r", "--robot", dest="robot_name", help="0-robot instance to use", required=True) parser.add_argument("-p", "--pool", dest="pool_name", help="Puts all hosts in a pool with provided name", required=False) parser.add_argument("-c", "--clean", dest="clean", help="Start from clean env. Deletes all reservation, pool, racktivity host, racktivity client, zeroboot and ssh services from the robot it has access to.", required=False, action='store_true', default=False) args = parser.parse_args() if args.robot_name == "debug": from unittest.mock import MagicMock robot = MagicMock() else: robot = j.clients.zrobot.robots[args.robot_name] if args.clean: clean_env(robot) create_ssh_services(robot, args.data_file) create_zboot_services(robot, args.data_file) create_rack_services(robot, args.data_file) create_ipmi_services(robot, args.data_file) hosts = [] hosts.extend(create_rack_host_services(robot, args.data_file)) hosts.extend(create_ipmi_host_services(robot, args.data_file)) if args.pool_name: add_hosts_pool_service(robot, hosts, args.pool_name) def clean_env(robot): """ Cleans up environment of services from the following templates: - Uninstalls and deletes the zeroboot_reservation templates - Deletes pool services - Deletes racktivity host services - Deletes racktivity client services - Deletes zeroboot client services - Deletes ssh client services Keep in mind, this will only remove the services the zero-robot client has access to Arguments: robot {ZRobot} -- Robot instance """ print("Cleaning up environment...") # delete reservation services for s in robot.services.find(template_uid='github.com/zero-os/0-boot-templates/zeroboot_reservation/0.0.1'): s.schedule_action("uninstall").wait(die=True).result s.delete() # delete pool services for s in robot.services.find(template_uid='github.com/zero-os/0-boot-templates/zeroboot_pool/0.0.1'): s.delete() # delete racktivity host services for s in robot.services.find(template_uid='github.com/zero-os/0-boot-templates/zeroboot_racktivity_host/0.0.1'): s.delete() # delete racktivity client services for s in robot.services.find(template_uid='github.com/zero-os/0-boot-templates/racktivity_client/0.0.1'): s.delete() # delete zboot services for s in robot.services.find(template_uid='github.com/zero-os/0-boot-templates/zeroboot_client/0.0.1'): s.delete() # delete ssh services for s in robot.services.find(template_uid='github.com/zero-os/0-boot-templates/ssh_client/0.0.1'): s.delete() print("Environment should be cleaned up now!") def create_ssh_services(robot, data_file): """Creates the SSH clients defined in the CSV file Arguments: robot {ZRobot} -- Robot instance data_file {str} -- location of the CSV file """ with open(data_file, newline='') as csvfile: rdr = csv.reader(csvfile, delimiter=',') data_found = False title_indexes = {} row_i = -1 for row in rdr: row_i += 1 # find ssh data starting row if not data_found: if str(row[0]).lower() == ('ssh_data'): print("ssh_data header found at row %s" % str(row_i + 1)) data_found = True continue # the column titles should be in the next row if not title_indexes: col_i = 0 for col in row: if col.lower() == 'host_address': title_indexes['host_address'] = col_i if col.lower() == 'hostname': title_indexes['hostname'] = col_i if col.lower() == 'user': title_indexes['user'] = col_i if col.lower() == 'password': title_indexes['password'] = col_i if col.lower() == 'port': title_indexes['port'] = col_i col_i += 1 # check required columns for item in ('host_address', 'user', 'password', 'hostname'): try: title_indexes[item] except KeyError: raise RuntimeError("key '%s' was not provided for the ssh_data at row %s" % (item, str(row_i + 1))) continue # keep adding services till empty row or EOF if row[0] in (None, "") and row[1] in (None, ""): print('SSH client data ended at row %s' % str(row_i + 1)) break # create ssh client data={} data["login"] = row[title_indexes['user']] data["password"] = row[title_indexes['password']] data["host"] = row[title_indexes['host_address']] if title_indexes.get("port") and row[title_indexes["port"]]: data["port"] = int(row[title_indexes["port"]]) robot.services.find_or_create( "github.com/zero-os/0-boot-templates/ssh_client/0.0.1", row[title_indexes["hostname"]], data=data, ) else: if not data_found: print("No SSH client data was found") else: print("SSH client data ended at last row") def create_zboot_services(robot, data_file): """Creates the zboot clients services defined in the CSV file Arguments: robot {ZRobot} -- Robot instance data_file {str} -- location of the CSV file """ with open(data_file, newline='') as csvfile: rdr = csv.reader(csvfile, delimiter=',') data_found = False title_indexes = {} row_i = -1 for row in rdr: row_i += 1 # find zboot data starting row if not data_found: if str(row[0]).lower() == ('zboot_data'): print("zboot_data header found at row %s" % str(row_i + 1)) data_found = True continue # the column titles should be in the next row if not title_indexes: col_i = 0 for col in row: if col.lower() == 'name': title_indexes['name'] = col_i if col.lower() == 'ztier_network': title_indexes['ztier_network'] = col_i if col.lower() == 'ssh_service': title_indexes['ssh_service'] = col_i if col.lower() == 'ztier_service': title_indexes['ztier_service'] = col_i col_i += 1 # check required columns for item in ('name', 'ztier_network', 'ssh_service'): try: title_indexes[item] except KeyError: raise RuntimeError("key '%s' was not provided for the zboot_data at row %s" % (item, str(row_i + 1))) continue # keep adding services till empty row or EOF if row[0] in (None, "") and row[1] in (None, ""): print('Zboot client data ended at row %s' % str(row_i + 1)) break # create ssh client data={} data["networkId"] = row[title_indexes['ztier_network']] data["sshClient"] = row[title_indexes['ssh_service']] if title_indexes.get("ztier_service") and row[title_indexes["ztier_service"]]: data["zerotierClient"] = int(row[title_indexes["ztier_service"]]) robot.services.find_or_create( "github.com/zero-os/0-boot-templates/zeroboot_client/0.0.1", row[title_indexes["name"]], data=data, ) else: if not data_found: print("No Zboot client data was found") else: print("Zboot client data ended at last row") def create_rack_services(robot, data_file): """Creates the racktivity clients defined in the CSV file Arguments: robot {ZRobot} -- Robot instance data_file {str} -- location of the CSV file """ with open(data_file, newline='') as csvfile: rdr = csv.reader(csvfile, delimiter=',') rack_data_found = False title_indexes = {} row_i = -1 for row in rdr: row_i += 1 # find racktivity data starting row if not rack_data_found: if str(row[0]).lower() == ('racktivity_data'): print("racktivity_data header found at row %s" % str(row_i + 1)) rack_data_found = True continue # the column titles should be in the next row if not title_indexes: col_i = 0 for col in row: if col.lower() == 'host_address': title_indexes['host_address'] = col_i if col.lower() == 'user': title_indexes['user'] = col_i if col.lower() == 'password': title_indexes['password'] = col_i if col.lower() == 'hostname': title_indexes['hostname'] = col_i if col.lower() == 'port': title_indexes['port'] = col_i col_i += 1 # check required columns for item in ('host_address', 'user', 'password', 'hostname'): try: title_indexes[item] except KeyError: raise RuntimeError("key '%s' was not provided for the racktivity_data at row %s" % (item, str(row_i + 1))) continue # keep adding racktivity hosts till empty row or EOF if row[0] in (None, "") and row[1] in (None, ""): print('Racktivity client data ended at row %s' % str(row_i + 1)) break # create racktivity client data={} data["username"] = row[title_indexes['user']] data["password"] = row[title_indexes['password']] data["host"] = row[title_indexes['host_address']] if title_indexes.get("port") and row[title_indexes["port"]]: data["port"] = int(row[title_indexes["port"]]) robot.services.find_or_create( "github.com/zero-os/0-boot-templates/racktivity_client/0.0.1", row[title_indexes["hostname"]], data=data, ) else: if not rack_data_found: print("No racktivity client data was found") else: print("Racktivity client data ended at last row") def create_rack_host_services(robot, data_file): """Creates the racktivity host services Arguments: robot {ZRobot} -- Robot instance data_file {str} -- Location of CSV file Returns: [str] -- List of host service names created """ hosts = [] with open(data_file, newline='') as csvfile: rdr = csv.reader(csvfile, delimiter=',') host_data_found = False title_indexes = {} row_i = -1 for row in rdr: row_i += 1 # find host data starting row if not host_data_found: if str(row[0]).lower() == ('rack_host_data'): print("rack_host_data header found at row %s" % str(row_i + 1)) host_data_found = True continue # the column titles should be in the next row if not title_indexes: col_i = 0 for col in row: if col.lower() == 'zboot_service': title_indexes['zboot_service'] = col_i if col.lower() == 'racktivity_data': title_indexes['racktivity_data'] = col_i if col.lower() == 'redundant_racktivity_data': title_indexes['redundant_racktivity_data'] = col_i if col.lower() == 'mac': title_indexes['mac'] = col_i if col.lower() == 'ip': title_indexes['ip'] = col_i if col.lower() == 'network': title_indexes['network'] = col_i if col.lower() == 'hostname': title_indexes['hostname'] = col_i if col.lower() == 'lkrn_url': title_indexes['lkrn_url'] = col_i col_i += 1 # check required columns for item in ('zboot_service', 'racktivity_data', 'mac', 'ip', 'network', 'lkrn_url'): try: title_indexes[item] except KeyError: raise RuntimeError("key '%s' was not provided for the rack_host_data at row %s" % (item, str(row_i + 1))) continue # keep adding racktivity hosts till empty row or EOF if row[0] in (None, "") and row[1] in (None, ""): print('Host data ended at row %s' % str(row_i + 1)) break # if service already exists, skip s = robot.services.find(name=row[title_indexes['hostname']]) if len(s) > 0: print("There is already a service running for host %s. Skipping to next host" % row[title_indexes['hostname']]) continue data = {} data["zerobootClient"] = row[title_indexes['zboot_service']] data["mac"] = row[title_indexes['mac']].lower() data["ip"] = row[title_indexes['ip']] data["hostname"] = row[title_indexes['hostname']] data["network"] = row[title_indexes['network']] data["lkrn_url"] = row[title_indexes['lkrn_url']] data['racktivities'] = [] data['racktivities'].append(_rack_data_conv(row[title_indexes['racktivity_data']])) if title_indexes.get("redundant_racktivity_data") and row[title_indexes["redundant_racktivity_data"]]: data['racktivities'].append(_rack_data_conv(row[title_indexes['redundant_racktivity_data']])) host_service = robot.services.create( "github.com/zero-os/0-boot-templates/zeroboot_racktivity_host/0.0.1", data["hostname"], data=data, ) host_service.schedule_action('install').wait(die=True) hosts.append(data["hostname"]) else: if not host_data_found: print("No host data was found") else: print("host data ended at last row") return hosts def _rack_data_conv(data): """ Converts data in CSV file to dict input format: "<client>;<port>;<powermodule>" Where the powermodule is optional output format: { 'client': <client>, 'port': <port>, 'powermodule': <powermodule>, } Arguments: data str -- data in the CSV field Returns: dict -- data in dict form """ result = {} x = data.split(";") if len(x) == 2: x.append(None) elif len(x) < 2: raise RuntimeError("Not enough segments in racktivity data. Found: %s" % data) elif len(x) > 3: raise RuntimeError("too many segments in racktivity data. Found: %s" % data) result['client'] = x[0] result['port'] = int(x[1]) result['powermodule'] = x[2] return result def create_ipmi_services(robot, data_file): """Creates the ipmi clients defined in the CSV file Arguments: robot {ZRobot} -- Robot instance data_file {str} -- location of the CSV file """ with open(data_file, newline='') as csvfile: rdr = csv.reader(csvfile, delimiter=',') ipmi_data_found = False title_indexes = {} row_i = -1 for row in rdr: row_i += 1 # find ipmi data starting row if not ipmi_data_found: if str(row[0]).lower() == ('ipmi_data'): print("ipmi_data header found at row %s" % str(row_i + 1)) ipmi_data_found = True continue # the column titles should be in the next row if not title_indexes: col_i = 0 for col in row: if col.lower() == 'host_address': title_indexes['host_address'] = col_i if col.lower() == 'user': title_indexes['user'] = col_i if col.lower() == 'password': title_indexes['password'] = col_i if col.lower() == 'hostname': title_indexes['hostname'] = col_i if col.lower() == 'port': title_indexes['port'] = col_i col_i += 1 # check required columns for item in ('host_address', 'user', 'password', 'hostname'): try: title_indexes[item] except KeyError: raise RuntimeError("key '%s' was not provided for the ipmi_data at row %s" % (item, str(row_i + 1))) continue # keep adding ipmi services till empty row or EOF if row[0] in (None, "") and row[1] in (None, ""): print('IPMI client data ended at row %s' % str(row_i + 1)) break # create ipmi client data={} data["username"] = row[title_indexes['user']] data["password"] = row[title_indexes['password']] data["host"] = row[title_indexes['host_address']] if title_indexes.get("port") and row[title_indexes["port"]]: data["port"] = int(row[title_indexes["port"]]) robot.services.find_or_create( "github.com/zero-os/0-boot-templates/ipmi_client/0.0.1", row[title_indexes["hostname"]], data=data, ) else: if not ipmi_data_found: print("No ipmi client data was found") else: print("IPMI client data ended at last row") def create_ipmi_host_services(robot, data_file): """Creates the ipmi host services Arguments: robot {ZRobot} -- Robot instance data_file {str} -- Location of CSV file Returns: [str] -- List of host service names created """ hosts = [] with open(data_file, newline='') as csvfile: rdr = csv.reader(csvfile, delimiter=',') host_data_found = False title_indexes = {} row_i = -1 for row in rdr: row_i += 1 # find host data starting row if not host_data_found: if str(row[0]).lower() == ('ipmi_host_data'): print("ipmi_host_data header found at row %s" % str(row_i + 1)) host_data_found = True continue # the column titles should be in the next row if not title_indexes: col_i = 0 for col in row: if col.lower() == 'zboot_service': title_indexes['zboot_service'] = col_i if col.lower() == 'ipmi_service': title_indexes['ipmi_service'] = col_i if col.lower() == 'mac': title_indexes['mac'] = col_i if col.lower() == 'ip': title_indexes['ip'] = col_i if col.lower() == 'network': title_indexes['network'] = col_i if col.lower() == 'hostname': title_indexes['hostname'] = col_i if col.lower() == 'lkrn_url': title_indexes['lkrn_url'] = col_i col_i += 1 # check required columns for item in ('zboot_service', 'ipmi_service', 'mac', 'ip', 'network', 'lkrn_url'): try: title_indexes[item] except KeyError: raise RuntimeError("key '%s' was not provided for the ipmi_host_data at row %s" % (item, str(row_i + 1))) continue # keep adding ipmi hosts till empty row or EOF if row[0] in (None, "") and row[1] in (None, ""): print('Host data ended at row %s' % str(row_i + 1)) break # if service already exists, skip s = robot.services.find(name=row[title_indexes['hostname']]) if len(s) > 0: print("There is already a service running for host %s. Skipping to next host" % row[title_indexes['hostname']]) continue data = {} data["zerobootClient"] = row[title_indexes['zboot_service']] data["mac"] = row[title_indexes['mac']].lower() data["ip"] = row[title_indexes['ip']] data["hostname"] = row[title_indexes['hostname']] data["network"] = row[title_indexes['network']] data["lkrn_url"] = row[title_indexes['lkrn_url']] data["ipmiClient"] = row[title_indexes['ipmi_service']] host_service = robot.services.create( "github.com/zero-os/0-boot-templates/zeroboot_ipmi_host/0.0.1", data["hostname"], data=data, ) host_service.schedule_action('install').wait(die=True) hosts.append(data["hostname"]) else: if not host_data_found: print("No ipmi_host_data was found") else: print("ipmi_host_data ended at last row") return hosts def add_hosts_pool_service(robot, hosts, pool_name): """Creates the pool service if it doesn't exist and adds all provided hosts in that pool Arguments: robot {ZRobot} -- Robot instance hosts [str] -- List of hostnames to add to the pool pool_name {str} -- Name to give the pool service """ pool_service = robot.services.find_or_create( "github.com/zero-os/0-boot-templates/zeroboot_pool/0.0.1", pool_name, data={} ) for host in hosts: # add host to pool pool_service.schedule_action('add', args={'host': host}).wait(die=True) if __name__ == "__main__": main(sys.argv)
38.729064
262
0.523149
2,731
23,586
4.356646
0.081655
0.100857
0.054211
0.02118
0.785174
0.759539
0.704656
0.672718
0.664565
0.639939
0
0.008187
0.368185
23,586
608
263
38.792763
0.79023
0.133469
0
0.654412
0
0.034314
0.202938
0.043725
0
0
0
0
0
1
0.02451
false
0.029412
0.012255
0
0.044118
0.068627
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
f4a1b689a24c0d0a93a6e0cf531694a4643b45c9
563
py
Python
ch06/plot_overfitting_with_weight_decay.py
sankaku/deep-learning-from-scratch-py
70ec531578f099136744d2c1ec11959b239c3854
[ "MIT" ]
null
null
null
ch06/plot_overfitting_with_weight_decay.py
sankaku/deep-learning-from-scratch-py
70ec531578f099136744d2c1ec11959b239c3854
[ "MIT" ]
null
null
null
ch06/plot_overfitting_with_weight_decay.py
sankaku/deep-learning-from-scratch-py
70ec531578f099136744d2c1ec11959b239c3854
[ "MIT" ]
null
null
null
# Visualize overfitting with weight decay # mod ch06.plot_overfitting.py import sys import os sys.path.append(os.pardir) import numpy as np from dataset.mnist import load_mnist from ch06.multi_layer_net import MultiLayerNet from ch06.SGD import SGD from ch06.plot_overfitting import train from ch06.plot_overfitting import plot_accuracies import matplotlib.pyplot as plt if __name__ == '__main__': network, train_accuracies, test_accuracies = train( 100, 10000, 0.1, weight_decay_lambda=0.1) plot_accuracies(train_accuracies, test_accuracies)
26.809524
55
0.802842
83
563
5.180723
0.506024
0.074419
0.132558
0.106977
0.134884
0
0
0
0
0
0
0.045267
0.136767
563
20
56
28.15
0.839506
0.120782
0
0
0
0
0.01626
0
0
0
0
0
0
1
0
true
0
0.642857
0
0.642857
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
3
f4ad77fdb3706975d9516c5dda6eecf4f090d6bd
4,369
py
Python
tests/object_store/utils/test_chunk_transfer_throttle.py
yellowdog/yellowdog-sdk-python-public
da69a7d6e45c92933e34fefcaef8b5d98dcd6036
[ "Apache-2.0" ]
null
null
null
tests/object_store/utils/test_chunk_transfer_throttle.py
yellowdog/yellowdog-sdk-python-public
da69a7d6e45c92933e34fefcaef8b5d98dcd6036
[ "Apache-2.0" ]
null
null
null
tests/object_store/utils/test_chunk_transfer_throttle.py
yellowdog/yellowdog-sdk-python-public
da69a7d6e45c92933e34fefcaef8b5d98dcd6036
[ "Apache-2.0" ]
null
null
null
from yellowdog_client.object_store.utils import ChunkTransferThrottle from cancel_token.cancel_token import CancellationToken class TestChunkTransferThrottle(object): def test_constructor(self, mocker): mock_time = mocker.patch("yellowdog_client.object_store.utils.chunk_transfer_throttle.time") mock_time.time.return_value = 7 throttle = ChunkTransferThrottle(throttle_period_sec=5) throttle.max_bytes_per_second = 80 assert throttle._throttle_period_seconds == 5 assert throttle._throttle_period_ms == 5000 assert throttle._max_bytes_per_second == 80 assert throttle._max_bytes_per_period == 80 * 5 assert throttle._bytes_transferred == 0 assert throttle._start_time == 7000 assert throttle._end_time is None def test_start_and_end(self, mocker): mock_time = mocker.patch("yellowdog_client.object_store.utils.chunk_transfer_throttle.time") mock_time.time.return_value = 3 throttle = ChunkTransferThrottle(throttle_period_sec=2) throttle.start() assert throttle._start_time == 3000 assert throttle._end_time is None mock_time.time.return_value = 4 throttle.stop() assert throttle._start_time == 3000 assert throttle._end_time == 4000 class TestWait(object): def test__wait__no_max_bytes__expect_no_sleep(self, mocker): mock_time = mocker.patch("yellowdog_client.object_store.utils.chunk_transfer_throttle.time") token = CancellationToken() throttle = ChunkTransferThrottle(throttle_period_sec=2) throttle.max_bytes_per_second = 0 throttle.wait_for_transfer_bandwidth(chunk_size=700, cancellation_token=token) mock_time.sleep.assert_not_called() def test__wait__max_bytes__token_is_cancelled__expect_no_sleep(self, mocker): mock_time = mocker.patch("yellowdog_client.object_store.utils.chunk_transfer_throttle.time") token = CancellationToken() token.cancel() throttle = ChunkTransferThrottle(throttle_period_sec=2) throttle.max_bytes_per_second = 800 throttle.wait_for_transfer_bandwidth(chunk_size=700, cancellation_token=token) mock_time.sleep.assert_not_called() assert throttle._bytes_transferred == 0 def test__wait__max_bytes__elapsed_time_exceeds_start_time__expect_no_sleep(self, mocker): mock_time = mocker.patch("yellowdog_client.object_store.utils.chunk_transfer_throttle.time") mock_time.time.side_effect = [1, 4] # elapsed: 3000 ms token = CancellationToken() throttle = ChunkTransferThrottle(throttle_period_sec=2) # throttle period: 2000 ms throttle.max_bytes_per_second = 800 throttle.wait_for_transfer_bandwidth(chunk_size=700, cancellation_token=token) mock_time.sleep.assert_not_called() assert throttle._bytes_transferred == 700 assert throttle._start_time == 4000 def test__wait__max_bytes__chunk_size_does_not_reach_max_bytes_per_sec__expect_no_sleep(self, mocker): mock_time = mocker.patch("yellowdog_client.object_store.utils.chunk_transfer_throttle.time") mock_time.time.side_effect = [1, 2] token = CancellationToken() throttle = ChunkTransferThrottle(throttle_period_sec=2) throttle.max_bytes_per_second = 800 throttle._bytes_transferred = 20 throttle.wait_for_transfer_bandwidth(chunk_size=700, cancellation_token=token) mock_time.sleep.assert_not_called() assert throttle._bytes_transferred == 720 assert throttle._start_time == 1000 def test__wait__max_bytes__chunk_size_exceeds_max_bytes_per_sec__expect_sleep__second_call_exceeds_time__expect_exit(self, mocker): mock_time = mocker.patch("yellowdog_client.object_store.utils.chunk_transfer_throttle.time") mock_time.time.side_effect = [1, 3, 10] token = CancellationToken() throttle = ChunkTransferThrottle(throttle_period_sec=9) throttle.max_bytes_per_second = 800 throttle._bytes_transferred = 300 throttle.wait_for_transfer_bandwidth(chunk_size=700, cancellation_token=token) assert throttle._bytes_transferred == 700 assert throttle._start_time == 10 * 1000 mock_time.sleep.assert_called_once_with(7)
40.082569
135
0.741588
540
4,369
5.527778
0.157407
0.048241
0.036851
0.069682
0.822781
0.748744
0.737688
0.677722
0.650251
0.542044
0
0.03184
0.187686
4,369
108
136
40.453704
0.809242
0.009384
0
0.506667
0
0
0.103632
0.103632
0
0
0
0
0.306667
1
0.093333
false
0
0.026667
0
0.146667
0
0
0
0
null
0
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
f4d3d974d966a403143eb13cad5fd07f8eb8e80e
1,913
py
Python
goless/selecting.py
ctismer/goless
02168a40902691264b32c7da6f453819ed7a91cf
[ "Apache-2.0" ]
1
2015-05-28T03:12:47.000Z
2015-05-28T03:12:47.000Z
goless/selecting.py
ctismer/goless
02168a40902691264b32c7da6f453819ed7a91cf
[ "Apache-2.0" ]
null
null
null
goless/selecting.py
ctismer/goless
02168a40902691264b32c7da6f453819ed7a91cf
[ "Apache-2.0" ]
null
null
null
from .backends import current as _be # noinspection PyPep8Naming,PyShadowingNames class rcase(object): """ A case that will ``chan.recv()`` when the channel is able to receive. """ def __init__(self, chan): self.chan = chan def ready(self): return self.chan.recv_ready() def exec_(self): return self.chan.recv() # noinspection PyPep8Naming,PyShadowingNames class scase(object): """A case that will ``chan.send(value)`` when the channel is able to send.""" def __init__(self, chan, value): self.chan = chan self.value = value def ready(self): return self.chan.send_ready() def exec_(self): self.chan.send(self.value) # noinspection PyPep8Naming class dcase(object): """The default case.""" def ready(self): return False def select(cases): """ Select the first case that becomes ready. If a default case (:class:`goless.dcase`) is present, return that if no other cases are ready. If there is no default case and no case is ready, block until one becomes ready. See Go's ``reflect.Select`` method for an analog (http://golang.org/pkg/reflect/#Select). :param cases: List of case instances, such as :class:`goless.rcase`, :class:`goless.scase`, or :class:`goless.dcase`. :return: ``(chosen case, received value)``. If the chosen case is not an :class:`goless.rcase`, it will be None. """ default = None for c in cases: if c.ready(): return c, c.exec_() if isinstance(c, dcase): assert default is None, 'Only one default case is allowd.' default = c if default is not None: # noinspection PyCallingNonCallable return default, None while True: for c in cases: if c.ready(): return c, c.exec_() _be.yield_()
26.569444
81
0.616309
256
1,913
4.539063
0.332031
0.055077
0.030981
0.046472
0.197935
0.175559
0.053356
0.053356
0.053356
0.053356
0
0.002163
0.274961
1,913
71
82
26.943662
0.835616
0.444851
0
0.382353
0
0
0.03272
0
0
0
0
0
0.029412
1
0.235294
false
0
0.029412
0.117647
0.558824
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
f4de0a5ae02c08d85cc8918f3275d32a5f1090ef
145
py
Python
django_evolution/db/postgresql_psycopg2.py
kamrankalantarli/django-evolution
3e67426b189aecca5e470607838d1191f4892859
[ "BSD-3-Clause" ]
18
2015-02-08T14:48:02.000Z
2021-08-03T21:07:37.000Z
django_evolution/db/postgresql_psycopg2.py
kamrankalantarli/django-evolution
3e67426b189aecca5e470607838d1191f4892859
[ "BSD-3-Clause" ]
4
2015-01-07T01:15:08.000Z
2020-08-06T06:52:13.000Z
django_evolution/db/postgresql_psycopg2.py
kamrankalantarli/django-evolution
3e67426b189aecca5e470607838d1191f4892859
[ "BSD-3-Clause" ]
13
2015-01-07T01:06:21.000Z
2022-02-20T16:27:41.000Z
# Psycopg2 behaviour is identical to Psycopg1 from django_evolution.db.postgresql import EvolutionOperations __all__ = ['EvolutionOperations']
24.166667
62
0.834483
15
145
7.733333
0.933333
0
0
0
0
0
0
0
0
0
0
0.015504
0.110345
145
5
63
29
0.883721
0.296552
0
0
0
0
0.19
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
f4e25421a136d0f49225bf4f0ffc79ec0d8aff41
264
py
Python
GTFS2OMNS/test.py
luyuliu/playground
0e0382be27abf6714bda8ea1bc34249286ef53c1
[ "MIT" ]
null
null
null
GTFS2OMNS/test.py
luyuliu/playground
0e0382be27abf6714bda8ea1bc34249286ef53c1
[ "MIT" ]
null
null
null
GTFS2OMNS/test.py
luyuliu/playground
0e0382be27abf6714bda8ea1bc34249286ef53c1
[ "MIT" ]
null
null
null
# Note: # 1. Need dependency autoinstall # 2. GDAL import os os.environ['PATH'] import gtfs2gmns as gg gtfs_path = "H:\\ChromeDownload\\gtfscota" gmns_path = "H:\\ChromeDownload\\gtfscota\\output" node_transit,link_transit = gg.Convert_GTFS(gtfs_path,gmns_path)
22
64
0.761364
38
264
5.105263
0.631579
0.082474
0.195876
0.278351
0
0
0
0
0
0
0
0.012712
0.106061
264
12
64
22
0.809322
0.166667
0
0
0
0
0.313364
0.294931
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
f4f25c0f50d3d73973995e366ef56671ed4222cc
404
py
Python
neighbourhood/neighbours/api/serializers.py
gracewa/GoodNeighbour
7b2427aa0b2c9c29823edc7936eeb149d2030853
[ "MIT" ]
null
null
null
neighbourhood/neighbours/api/serializers.py
gracewa/GoodNeighbour
7b2427aa0b2c9c29823edc7936eeb149d2030853
[ "MIT" ]
null
null
null
neighbourhood/neighbours/api/serializers.py
gracewa/GoodNeighbour
7b2427aa0b2c9c29823edc7936eeb149d2030853
[ "MIT" ]
null
null
null
from rest_framework import serializers from ..models import Neighbourhood, Business, EmergencyService, User class NeighbourhoodSerializer(serializers.ModelSerializer): class Meta: model = Neighbourhood fields = ('id', 'name', 'location', 'admin') class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = ('id', 'username','email')
25.25
68
0.69802
37
404
7.594595
0.621622
0.185053
0.220641
0.24911
0.284698
0
0
0
0
0
0
0
0.19802
404
15
69
26.933333
0.867284
0
0
0.2
0
0
0.084367
0
0
0
0
0
0
1
0
false
0
0.2
0
0.6
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
f4f2905d4ac94fa1edb03831fdc2fc88fbaaf0fc
74
py
Python
python/testData/intentions/convertVariadicParam_after.py
teddywest32/intellij-community
e0268d7a1da1d318b441001448cdd3e8929b2f29
[ "Apache-2.0" ]
null
null
null
python/testData/intentions/convertVariadicParam_after.py
teddywest32/intellij-community
e0268d7a1da1d318b441001448cdd3e8929b2f29
[ "Apache-2.0" ]
11
2017-02-27T22:35:32.000Z
2021-12-24T08:07:40.000Z
python/testData/intentions/convertVariadicParam_after.py
teddywest32/intellij-community
e0268d7a1da1d318b441001448cdd3e8929b2f29
[ "Apache-2.0" ]
1
2020-11-27T10:36:50.000Z
2020-11-27T10:36:50.000Z
def foo(tmp, w, q = 2, foo=22, **kwargs): a = tmp doSomething(foo)
24.666667
41
0.554054
13
74
3.153846
0.769231
0
0
0
0
0
0
0
0
0
0
0.054545
0.256757
74
3
42
24.666667
0.690909
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
3
762ade3d958113bcb51d1116e7400051222caa9d
1,127
py
Python
castanea_keras/generators/datareader.py
YusukeSuzuki/castanea_keras
dfb2694131580b5e215c3c4fbfed4250c5db0a71
[ "MIT" ]
null
null
null
castanea_keras/generators/datareader.py
YusukeSuzuki/castanea_keras
dfb2694131580b5e215c3c4fbfed4250c5db0a71
[ "MIT" ]
null
null
null
castanea_keras/generators/datareader.py
YusukeSuzuki/castanea_keras
dfb2694131580b5e215c3c4fbfed4250c5db0a71
[ "MIT" ]
null
null
null
import tensorflow as tf import multiprocessing as mp class DataReader(): def __init__(self, batch_size, op, args=[], kwargs={}, queue_max=5000, queue_min=1000): self.graph = tf.Graph() config = tf.ConfigProto(device_count={'GPU': 0}) self.sess = tf.Session(config=config, graph=self.graph) self.coordinator=tf.train.Coordinator() self.threads = None with self.graph.as_default(), tf.device('/cpu:0'): self.batch = tf.train.shuffle_batch( [*op(*args, **kwargs)], batch_size, queue_max, queue_min, num_threads=mp.cpu_count()*4) self.threads = tf.train.start_queue_runners(sess=self.sess, coord=self.coordinator) def _pumpup(self): pass def __iter__(self): return self def __del__(self): self.coordinator.request_stop() if self.threads: self.coordinator.join(self.threads) def __next__(self): return tuple(self.sess.run(self.batch)) def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): pass
28.175
91
0.625555
143
1,127
4.643357
0.426573
0.090361
0.036145
0.051205
0
0
0
0
0
0
0
0.013033
0.251109
1,127
39
92
28.897436
0.773697
0
0
0.142857
0
0
0.007993
0
0
0
0
0
0
1
0.25
false
0.071429
0.071429
0.107143
0.464286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
0
0
0
3
5201e7a3758c0359cb545db474d454accf475597
89
py
Python
lewis_emulators/rkndio/interfaces/__init__.py
ISISComputingGroup/EPICS-DeviceEmulator
026c2a14a16bb204ea7527e3765daa182cafa814
[ "BSD-3-Clause" ]
2
2020-10-20T16:49:13.000Z
2021-02-19T10:41:44.000Z
lewis_emulators/rkndio/interfaces/__init__.py
ISISComputingGroup/EPICS-DeviceEmulator
026c2a14a16bb204ea7527e3765daa182cafa814
[ "BSD-3-Clause" ]
9
2019-03-22T15:35:15.000Z
2021-07-28T11:05:43.000Z
lewis_emulators/rkndio/interfaces/__init__.py
ISISComputingGroup/EPICS-DeviceEmulator
026c2a14a16bb204ea7527e3765daa182cafa814
[ "BSD-3-Clause" ]
1
2020-10-21T17:02:44.000Z
2020-10-21T17:02:44.000Z
from .stream_interface import RkndioStreamInterface __all__ = ['RkndioStreamInterface']
22.25
51
0.842697
7
89
10
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.089888
89
3
52
29.666667
0.864198
0
0
0
0
0
0.235955
0.235955
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
5203df72338bedbe225ed8e64c8a15ce062db534
1,170
py
Python
core/models.py
spapas/hyperapp-tutorial
24a548fbe96b5ecdd51e95e89b7302d85215d88e
[ "Unlicense" ]
17
2018-06-01T10:33:34.000Z
2021-08-14T13:09:54.000Z
core/models.py
spapas/hyperapp-tutorial
24a548fbe96b5ecdd51e95e89b7302d85215d88e
[ "Unlicense" ]
1
2019-08-10T15:39:21.000Z
2019-08-10T15:39:21.000Z
core/models.py
spapas/hyperapp-tutorial
24a548fbe96b5ecdd51e95e89b7302d85215d88e
[ "Unlicense" ]
4
2017-09-07T21:43:01.000Z
2019-03-06T14:29:15.000Z
from django.db import models class Genre(models.Model): name = models.CharField(max_length=32, unique=True) def __str__(self): return self.name class Job(models.Model): name = models.CharField(max_length=32, unique=True) def __str__(self): return self.name class Person(models.Model): name = models.CharField(max_length=128) imdb_id = models.CharField(max_length=32, blank=True, null=True) birthday = models.DateField(blank=True, null=True) def __str__(self): return self.name class Movie(models.Model): title = models.CharField(max_length=128) imdb_id = models.CharField(max_length=32, blank=True, null=True) release_year = models.CharField(max_length=4) runtime = models.PositiveIntegerField() story = models.TextField() genres = models.ManyToManyField("Genre", blank=True) def __str__(self): return "{0} ({1})".format(self.title, self.release_year) class MoviePerson(models.Model): person = models.ForeignKey("Person", on_delete="PROTECT") movie = models.ForeignKey("Movie", on_delete="PROTECT") job = models.ForeignKey("Job", on_delete="PROTECT")
26.590909
68
0.697436
152
1,170
5.171053
0.309211
0.133588
0.160305
0.21374
0.483461
0.458015
0.458015
0.438931
0.396947
0.396947
0
0.017598
0.174359
1,170
43
69
27.209302
0.796066
0
0
0.392857
0
0
0.04188
0
0
0
0
0
0
1
0.142857
false
0
0.035714
0.142857
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
3
5205acd8a8b45f0288f377de72a0df4c82e445e6
342
py
Python
m_socket/custom_types.py
maxrskiy/m_socket-py
a6f011118cbc335bd4346b290298dabc702a8503
[ "MIT" ]
6
2021-08-29T15:29:43.000Z
2022-01-16T19:06:23.000Z
m_socket/custom_types.py
maxrskiy/m_socket-py
a6f011118cbc335bd4346b290298dabc702a8503
[ "MIT" ]
null
null
null
m_socket/custom_types.py
maxrskiy/m_socket-py
a6f011118cbc335bd4346b290298dabc702a8503
[ "MIT" ]
null
null
null
import socket class m_session_socket: client_socket: socket socket_id: str ip: str def __init__(self, client_socket: socket, socket_id: str, ip: str): self.client_socket = client_socket self.socket_id = socket_id self.ip = ip def send(self, data: bytes): self.client_socket.send(data)
20.117647
71
0.657895
48
342
4.375
0.3125
0.285714
0.228571
0.228571
0.32381
0.32381
0.32381
0.32381
0
0
0
0
0.260234
342
16
72
21.375
0.83004
0
0
0
0
0
0
0
0
0
0
0
0
1
0.181818
false
0
0.090909
0
0.636364
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
520ab96a57dff09a40af6d8dad6dec49da4e087e
5,565
py
Python
tests/test_basic.py
luislhl/python-hathorlib
b8382e2ab9c6036458f22cf4ad9338f52ec99e70
[ "Apache-2.0" ]
null
null
null
tests/test_basic.py
luislhl/python-hathorlib
b8382e2ab9c6036458f22cf4ad9338f52ec99e70
[ "Apache-2.0" ]
null
null
null
tests/test_basic.py
luislhl/python-hathorlib
b8382e2ab9c6036458f22cf4ad9338f52ec99e70
[ "Apache-2.0" ]
null
null
null
""" Copyright (c) Hathor Labs and its affiliates. This source code is licensed under the MIT license found in the LICENSE file in the root directory of this source tree. """ import unittest from hathorlib import Block, TokenCreationTransaction, Transaction from hathorlib.scripts import create_output_script from hathorlib.utils import decode_address class HathorCommonsTestCase(unittest.TestCase): def test_block_basics(self): data = bytes.fromhex('000001ffffffe8b789180000001976a9147fd4ae0e4fb2d2854e76d359029d8078bb9' '9649e88ac40350000000000005e0f84a9000000000000000000000000000000278a7e') block = Block.create_from_struct(data) self.assertTrue(block.verify_pow()) self.assertEqual(data, bytes(block)) # These prints are here to test the methods. self.assertEqual( str(block), 'Block(nonce=2591358, timestamp=1578075305, version=0, weight=21.000000, ' 'hash=000006cb93385b8b87a545a1cbb6197e6caff600c12cc12fc54250d39c8088fc)' ) self.assertEqual( repr(block), 'Block(nonce=2591358, timestamp=1578075305, version=0, weight=21.000000, ' 'hash=000006cb93385b8b87a545a1cbb6197e6caff600c12cc12fc54250d39c8088fc, ' 'inputs=[], outputs=[TxOutput(token_data=0b0, value=100000000000)], parents=[], data=)') self.assertEqual(block.get_struct_nonce().hex(), '00000000000000000000000000278a7e') block.nonce += 1 block.update_hash() self.assertFalse(block.verify_pow()) def test_tx_basics(self): data = bytes.fromhex('0001000102000001e0e88216036e4e52872ba60a96df7570c3e29cc30eda6dd92ea0fd' '304c00006a4730450221009fa4798bb69f66035013063c13f1a970ec58111bcead277d' '9c93e45c2b6885fe022012e039b26cc4a4cb0a8a5abb7deb7bb78610ed362bf422efa2' '47db37c5a841e12102bc1213ea99ab55effcff760f94c09f8b1a0b7b990c01128d06b4' 'a8c5c5f41f8400089f0800001976a91438fb3bc92b76819e9c19ef7c079d327c8fcd19' '9288ac02de2d3800001976a9148d880c42ddcf78a2da5d06558f13515508720b4088ac' '403518509c63f9195ecfd7d40200001ea9d6e1d31da6893fcec594dc3fa8b6819ae126' '8c190f7a1441302226e2000007d1c5add7b9085037cfc591f1008dff4fe8a9158fd1a4' '840a6dd5d4e4e600d2da8d') tx = Transaction.create_from_struct(data) self.assertEqual(data, bytes(tx)) self.assertTrue(tx.verify_pow()) self.assertTrue(tx.is_transaction) self.assertFalse(tx.is_block) # These prints are here to test the methods. print(str(tx)) print(repr(tx)) tx.nonce += 1 tx.update_hash() self.assertFalse(tx.verify_pow()) def test_token_creation_basics(self): data = bytes.fromhex('00020104000005551d7740fd7d3c0acc50b5677fdd844f1225985aa431e1712af2a2fd' '8900006a473045022100a445edb5cd6c79a0a7b5ed837582fd65b8d511ee60b64fd076' 'e07bd8f63f75a202202dca24320bffc4c3ca2a07cdfff38f7c839bde70ed49ef634ac6' '588972836cab2103bfa995d676e3c0ed7b863c74cfef9683fab3163b42b6f21442326a' '023fc57fba0000264800001976a9146876f9578221fdb678d4e8376503098a9228b132' '88ac00004e2001001976a914031761ef85a24603203c97e75af355b83209f08f88ac00' '00000181001976a9149f091256cb98649c7c35df0aad44d7805710691e88ac00000002' '81001976a914b1d7a5ee505ad4d3b93ea1a5162ba83d5049ec4e88ac0109546f546865' '4d6f6f6e04f09f9a804034a52aec6cece75e0fc0e30200001a72272f48339fcc5d5ec5' 'deaf197855964b0eb912e8c6eefe00928b6cf600001055641c20b71871ed2c5c7d4096' 'a34f40888d79c25bce74421646e732dc01ff7369') tx = TokenCreationTransaction.create_from_struct(data) self.assertEqual(data, bytes(tx)) self.assertTrue(tx.verify_pow()) self.assertTrue(tx.is_transaction) self.assertFalse(tx.is_block) # These prints are here to test the methods. self.assertEqual( str(tx), 'TokenCreationTransaction(nonce=33518441, timestamp=1578090723, version=2, weight=20.645186, ' 'hash=00000828d80dd4cd809c959139f7b4261df41152f4cce65a8777eb1c3a1f9702, ' 'token_name=ToTheMoon, token_symbol=🚀)' ) self.assertEqual( repr(tx), 'TokenCreationTransaction(nonce=33518441, timestamp=1578090723, version=2, weight=20.645186, ' 'hash=00000828d80dd4cd809c959139f7b4261df41152f4cce65a8777eb1c3a1f9702, ' 'inputs=[TxInput(tx_id=000005551d7740fd7d3c0acc50b5677fdd844f1225985aa431e1712af2a2fd89, index=0)], ' 'outputs=[TxOutput(token_data=0b0, value=9800), TxOutput(token_data=0b1, value=20000), ' 'TxOutput(token_data=0b10000001, value=0b1), TxOutput(token_data=0b10000001, value=0b10)], ' 'parents=[\'00001a72272f48339fcc5d5ec5deaf197855964b0eb912e8c6eefe00928b6cf6\', ' '\'00001055641c20b71871ed2c5c7d4096a34f40888d79c25bce74421646e732dc\'])' ) tx.nonce += 1 tx.update_hash() self.assertFalse(tx.verify_pow()) def test_script_basics(self): create_output_script(decode_address('HVZjvL1FJ23kH3buGNuttVRsRKq66WHUVZ'))
51.527778
113
0.688949
366
5,565
10.357924
0.360656
0.031654
0.022422
0.015036
0.338961
0.295173
0.278291
0.278291
0.278291
0.278291
0
0.354862
0.240431
5,565
107
114
52.009346
0.541755
0.05319
0
0.289157
0
0.012048
0.484693
0.428028
0
0
0
0
0.216867
1
0.048193
false
0
0.048193
0
0.108434
0.024096
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
521c4aca3ca454501747cfb56d6ff6e190156f36
910
py
Python
actions.py
priyanka2109/chatbot-new-rasa
5d4731e9e63cb1e7177232e780f59ee2baebe1b4
[ "MIT" ]
null
null
null
actions.py
priyanka2109/chatbot-new-rasa
5d4731e9e63cb1e7177232e780f59ee2baebe1b4
[ "MIT" ]
null
null
null
actions.py
priyanka2109/chatbot-new-rasa
5d4731e9e63cb1e7177232e780f59ee2baebe1b4
[ "MIT" ]
null
null
null
# This files contains your custom actions which can be used to run # custom Python code. # # See this guide on how to implement these action: # https://rasa.com/docs/rasa/core/actions/#custom-actions/ # This is a simple example for a custom action which utters "Hello World!" from typing import Any, Text, Dict, List from rasa_sdk import Action, Tracker from rasa_sdk.executor import CollectingDispatcher from rasa_sdk.forms import FormAction class ActionHelloWorld(Action): def name(self) -> Text: return "info_form" def required_slots(tracker: Tracker) ->List[Text]: print("required_slots(tracker: Tracker)") return ["name","phone_num","email"] def run(self, dispatcher: CollectingDispatcher,tracker: Tracker,domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: dispatcher.utter_message(template="utter_submit") return []
31.37931
121
0.701099
120
910
5.241667
0.558333
0.038156
0.052464
0.085851
0
0
0
0
0
0
0
0
0.198901
910
28
122
32.5
0.862826
0.287912
0
0
0
0
0.110938
0.035938
0
0
0
0
0
1
0.230769
false
0
0.307692
0.076923
0.846154
0.076923
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
3
52258c95a7c2f42aa82995b8606de034d618a79e
2,014
py
Python
Contents/Libraries/Shared/babelfish/script.py
jippo015/Sub-Zero.bundle
734e0f7128c05c0f639e11e7dfc77daa1014064b
[ "MIT" ]
1,553
2015-11-09T02:17:06.000Z
2022-03-31T20:24:52.000Z
Contents/Libraries/Shared/babelfish/script.py
saiterlz/Sub-Zero.bundle
1a0bb9c3e4be84be35d46672907783363fe5a87b
[ "MIT" ]
691
2015-11-05T21:32:26.000Z
2022-03-17T10:52:45.000Z
Contents/Libraries/Shared/babelfish/script.py
saiterlz/Sub-Zero.bundle
1a0bb9c3e4be84be35d46672907783363fe5a87b
[ "MIT" ]
162
2015-11-06T19:38:55.000Z
2022-03-16T02:42:41.000Z
# -*- coding: utf-8 -*- # # Copyright (c) 2013 the BabelFish authors. All rights reserved. # Use of this source code is governed by the 3-clause BSD license # that can be found in the LICENSE file. # from collections import namedtuple from pkg_resources import resource_stream # @UnresolvedImport from . import basestr #: Script code to script name mapping SCRIPTS = {} #: List of countries in the ISO-15924 as namedtuple of code, number, name, french_name, pva and date SCRIPT_MATRIX = [] #: The namedtuple used in the :data:`SCRIPT_MATRIX` IsoScript = namedtuple('IsoScript', ['code', 'number', 'name', 'french_name', 'pva', 'date']) f = resource_stream('babelfish', 'data/iso15924-utf8-20131012.txt') f.readline() for l in f: l = l.decode('utf-8').strip() if not l or l.startswith('#'): continue script = IsoScript._make(l.split(';')) SCRIPT_MATRIX.append(script) SCRIPTS[script.code] = script.name f.close() class Script(object): """A human writing system A script is represented by a 4-letter code from the ISO-15924 standard :param string script: 4-letter ISO-15924 script code """ def __init__(self, script): if script not in SCRIPTS: raise ValueError('%r is not a valid script' % script) #: ISO-15924 4-letter script code self.code = script @property def name(self): """English name of the script""" return SCRIPTS[self.code] def __getstate__(self): return self.code def __setstate__(self, state): self.code = state def __hash__(self): return hash(self.code) def __eq__(self, other): if isinstance(other, basestr): return self.code == other if not isinstance(other, Script): return False return self.code == other.code def __ne__(self, other): return not self == other def __repr__(self): return '<Script [%s]>' % self def __str__(self): return self.code
26.5
100
0.645482
271
2,014
4.645756
0.413284
0.050834
0.04448
0.031771
0.042891
0.042891
0
0
0
0
0
0.028871
0.243297
2,014
75
101
26.853333
0.797244
0.297418
0
0.047619
0
0
0.09058
0.022464
0
0
0
0
0
1
0.214286
false
0
0.071429
0.119048
0.52381
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
5226e456a3497eb9c0ee9fadd399d83bf003cb0c
19,279
py
Python
src/oci/data_safe/models/update_masking_policy_details.py
LaudateCorpus1/oci-python-sdk
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
[ "Apache-2.0", "BSD-3-Clause" ]
null
null
null
src/oci/data_safe/models/update_masking_policy_details.py
LaudateCorpus1/oci-python-sdk
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
[ "Apache-2.0", "BSD-3-Clause" ]
null
null
null
src/oci/data_safe/models/update_masking_policy_details.py
LaudateCorpus1/oci-python-sdk
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
[ "Apache-2.0", "BSD-3-Clause" ]
null
null
null
# coding: utf-8 # Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved. # This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401 from oci.decorators import init_model_state_from_kwargs @init_model_state_from_kwargs class UpdateMaskingPolicyDetails(object): """ Details to update a masking policy. """ def __init__(self, **kwargs): """ Initializes a new UpdateMaskingPolicyDetails object with values from keyword arguments. The following keyword arguments are supported (corresponding to the getters/setters of this class): :param display_name: The value to assign to the display_name property of this UpdateMaskingPolicyDetails. :type display_name: str :param description: The value to assign to the description property of this UpdateMaskingPolicyDetails. :type description: str :param is_drop_temp_tables_enabled: The value to assign to the is_drop_temp_tables_enabled property of this UpdateMaskingPolicyDetails. :type is_drop_temp_tables_enabled: bool :param is_redo_logging_enabled: The value to assign to the is_redo_logging_enabled property of this UpdateMaskingPolicyDetails. :type is_redo_logging_enabled: bool :param is_refresh_stats_enabled: The value to assign to the is_refresh_stats_enabled property of this UpdateMaskingPolicyDetails. :type is_refresh_stats_enabled: bool :param parallel_degree: The value to assign to the parallel_degree property of this UpdateMaskingPolicyDetails. :type parallel_degree: str :param recompile: The value to assign to the recompile property of this UpdateMaskingPolicyDetails. :type recompile: str :param pre_masking_script: The value to assign to the pre_masking_script property of this UpdateMaskingPolicyDetails. :type pre_masking_script: str :param post_masking_script: The value to assign to the post_masking_script property of this UpdateMaskingPolicyDetails. :type post_masking_script: str :param column_source: The value to assign to the column_source property of this UpdateMaskingPolicyDetails. :type column_source: oci.data_safe.models.UpdateColumnSourceDetails :param freeform_tags: The value to assign to the freeform_tags property of this UpdateMaskingPolicyDetails. :type freeform_tags: dict(str, str) :param defined_tags: The value to assign to the defined_tags property of this UpdateMaskingPolicyDetails. :type defined_tags: dict(str, dict(str, object)) """ self.swagger_types = { 'display_name': 'str', 'description': 'str', 'is_drop_temp_tables_enabled': 'bool', 'is_redo_logging_enabled': 'bool', 'is_refresh_stats_enabled': 'bool', 'parallel_degree': 'str', 'recompile': 'str', 'pre_masking_script': 'str', 'post_masking_script': 'str', 'column_source': 'UpdateColumnSourceDetails', 'freeform_tags': 'dict(str, str)', 'defined_tags': 'dict(str, dict(str, object))' } self.attribute_map = { 'display_name': 'displayName', 'description': 'description', 'is_drop_temp_tables_enabled': 'isDropTempTablesEnabled', 'is_redo_logging_enabled': 'isRedoLoggingEnabled', 'is_refresh_stats_enabled': 'isRefreshStatsEnabled', 'parallel_degree': 'parallelDegree', 'recompile': 'recompile', 'pre_masking_script': 'preMaskingScript', 'post_masking_script': 'postMaskingScript', 'column_source': 'columnSource', 'freeform_tags': 'freeformTags', 'defined_tags': 'definedTags' } self._display_name = None self._description = None self._is_drop_temp_tables_enabled = None self._is_redo_logging_enabled = None self._is_refresh_stats_enabled = None self._parallel_degree = None self._recompile = None self._pre_masking_script = None self._post_masking_script = None self._column_source = None self._freeform_tags = None self._defined_tags = None @property def display_name(self): """ Gets the display_name of this UpdateMaskingPolicyDetails. The display name of the masking policy. The name does not have to be unique, and it's changeable. :return: The display_name of this UpdateMaskingPolicyDetails. :rtype: str """ return self._display_name @display_name.setter def display_name(self, display_name): """ Sets the display_name of this UpdateMaskingPolicyDetails. The display name of the masking policy. The name does not have to be unique, and it's changeable. :param display_name: The display_name of this UpdateMaskingPolicyDetails. :type: str """ self._display_name = display_name @property def description(self): """ Gets the description of this UpdateMaskingPolicyDetails. The description of the masking policy. :return: The description of this UpdateMaskingPolicyDetails. :rtype: str """ return self._description @description.setter def description(self, description): """ Sets the description of this UpdateMaskingPolicyDetails. The description of the masking policy. :param description: The description of this UpdateMaskingPolicyDetails. :type: str """ self._description = description @property def is_drop_temp_tables_enabled(self): """ Gets the is_drop_temp_tables_enabled of this UpdateMaskingPolicyDetails. Indicates if the temporary tables created during a masking operation should be dropped after masking. It's enabled by default. Set this attribute to false to preserve the temporary tables. Masking creates temporary tables that map the original sensitive data values to mask values. By default, these temporary tables are dropped after masking. But, in some cases, you may want to preserve this information to track how masking changed your data. Note that doing so compromises security. These tables must be dropped before the database is available for unprivileged users. :return: The is_drop_temp_tables_enabled of this UpdateMaskingPolicyDetails. :rtype: bool """ return self._is_drop_temp_tables_enabled @is_drop_temp_tables_enabled.setter def is_drop_temp_tables_enabled(self, is_drop_temp_tables_enabled): """ Sets the is_drop_temp_tables_enabled of this UpdateMaskingPolicyDetails. Indicates if the temporary tables created during a masking operation should be dropped after masking. It's enabled by default. Set this attribute to false to preserve the temporary tables. Masking creates temporary tables that map the original sensitive data values to mask values. By default, these temporary tables are dropped after masking. But, in some cases, you may want to preserve this information to track how masking changed your data. Note that doing so compromises security. These tables must be dropped before the database is available for unprivileged users. :param is_drop_temp_tables_enabled: The is_drop_temp_tables_enabled of this UpdateMaskingPolicyDetails. :type: bool """ self._is_drop_temp_tables_enabled = is_drop_temp_tables_enabled @property def is_redo_logging_enabled(self): """ Gets the is_redo_logging_enabled of this UpdateMaskingPolicyDetails. Indicates if redo logging is enabled during a masking operation. It's disabled by default. Set this attribute to true to enable redo logging. By default, masking disables redo logging and flashback logging to purge any original unmasked data from logs. However, in certain circumstances when you only want to test masking, rollback changes, and retry masking, you could enable logging and use a flashback database to retrieve the original unmasked data after it has been masked. :return: The is_redo_logging_enabled of this UpdateMaskingPolicyDetails. :rtype: bool """ return self._is_redo_logging_enabled @is_redo_logging_enabled.setter def is_redo_logging_enabled(self, is_redo_logging_enabled): """ Sets the is_redo_logging_enabled of this UpdateMaskingPolicyDetails. Indicates if redo logging is enabled during a masking operation. It's disabled by default. Set this attribute to true to enable redo logging. By default, masking disables redo logging and flashback logging to purge any original unmasked data from logs. However, in certain circumstances when you only want to test masking, rollback changes, and retry masking, you could enable logging and use a flashback database to retrieve the original unmasked data after it has been masked. :param is_redo_logging_enabled: The is_redo_logging_enabled of this UpdateMaskingPolicyDetails. :type: bool """ self._is_redo_logging_enabled = is_redo_logging_enabled @property def is_refresh_stats_enabled(self): """ Gets the is_refresh_stats_enabled of this UpdateMaskingPolicyDetails. Indicates if statistics gathering is enabled. It's enabled by default. Set this attribute to false to disable statistics gathering. The masking process gathers statistics on masked database tables after masking completes. :return: The is_refresh_stats_enabled of this UpdateMaskingPolicyDetails. :rtype: bool """ return self._is_refresh_stats_enabled @is_refresh_stats_enabled.setter def is_refresh_stats_enabled(self, is_refresh_stats_enabled): """ Sets the is_refresh_stats_enabled of this UpdateMaskingPolicyDetails. Indicates if statistics gathering is enabled. It's enabled by default. Set this attribute to false to disable statistics gathering. The masking process gathers statistics on masked database tables after masking completes. :param is_refresh_stats_enabled: The is_refresh_stats_enabled of this UpdateMaskingPolicyDetails. :type: bool """ self._is_refresh_stats_enabled = is_refresh_stats_enabled @property def parallel_degree(self): """ Gets the parallel_degree of this UpdateMaskingPolicyDetails. Specifies options to enable parallel execution when running data masking. Allowed values are 'NONE' (no parallelism), 'DEFAULT' (the Oracle Database computes the optimum degree of parallelism) or an integer value to be used as the degree of parallelism. Parallel execution helps effectively use multiple CPUsi and improve masking performance. Refer to the Oracle Database parallel execution framework when choosing an explicit degree of parallelism. :return: The parallel_degree of this UpdateMaskingPolicyDetails. :rtype: str """ return self._parallel_degree @parallel_degree.setter def parallel_degree(self, parallel_degree): """ Sets the parallel_degree of this UpdateMaskingPolicyDetails. Specifies options to enable parallel execution when running data masking. Allowed values are 'NONE' (no parallelism), 'DEFAULT' (the Oracle Database computes the optimum degree of parallelism) or an integer value to be used as the degree of parallelism. Parallel execution helps effectively use multiple CPUsi and improve masking performance. Refer to the Oracle Database parallel execution framework when choosing an explicit degree of parallelism. :param parallel_degree: The parallel_degree of this UpdateMaskingPolicyDetails. :type: str """ self._parallel_degree = parallel_degree @property def recompile(self): """ Gets the recompile of this UpdateMaskingPolicyDetails. Specifies how to recompile invalid objects post data masking. Allowed values are 'SERIAL' (recompile in serial), 'PARALLEL' (recompile in parallel), 'NONE' (do not recompile). If it's set to PARALLEL, the value of parallelDegree attribute is used. :return: The recompile of this UpdateMaskingPolicyDetails. :rtype: str """ return self._recompile @recompile.setter def recompile(self, recompile): """ Sets the recompile of this UpdateMaskingPolicyDetails. Specifies how to recompile invalid objects post data masking. Allowed values are 'SERIAL' (recompile in serial), 'PARALLEL' (recompile in parallel), 'NONE' (do not recompile). If it's set to PARALLEL, the value of parallelDegree attribute is used. :param recompile: The recompile of this UpdateMaskingPolicyDetails. :type: str """ self._recompile = recompile @property def pre_masking_script(self): """ Gets the pre_masking_script of this UpdateMaskingPolicyDetails. A pre-masking script, which can contain SQL and PL/SQL statements. It's executed before the core masking script generated using the masking policy. It's usually used to perform any preparation or prerequisite work before masking data. :return: The pre_masking_script of this UpdateMaskingPolicyDetails. :rtype: str """ return self._pre_masking_script @pre_masking_script.setter def pre_masking_script(self, pre_masking_script): """ Sets the pre_masking_script of this UpdateMaskingPolicyDetails. A pre-masking script, which can contain SQL and PL/SQL statements. It's executed before the core masking script generated using the masking policy. It's usually used to perform any preparation or prerequisite work before masking data. :param pre_masking_script: The pre_masking_script of this UpdateMaskingPolicyDetails. :type: str """ self._pre_masking_script = pre_masking_script @property def post_masking_script(self): """ Gets the post_masking_script of this UpdateMaskingPolicyDetails. A post-masking script, which can contain SQL and PL/SQL statements. It's executed after the core masking script generated using the masking policy. It's usually used to perform additional transformation or cleanup work after masking. :return: The post_masking_script of this UpdateMaskingPolicyDetails. :rtype: str """ return self._post_masking_script @post_masking_script.setter def post_masking_script(self, post_masking_script): """ Sets the post_masking_script of this UpdateMaskingPolicyDetails. A post-masking script, which can contain SQL and PL/SQL statements. It's executed after the core masking script generated using the masking policy. It's usually used to perform additional transformation or cleanup work after masking. :param post_masking_script: The post_masking_script of this UpdateMaskingPolicyDetails. :type: str """ self._post_masking_script = post_masking_script @property def column_source(self): """ Gets the column_source of this UpdateMaskingPolicyDetails. :return: The column_source of this UpdateMaskingPolicyDetails. :rtype: oci.data_safe.models.UpdateColumnSourceDetails """ return self._column_source @column_source.setter def column_source(self, column_source): """ Sets the column_source of this UpdateMaskingPolicyDetails. :param column_source: The column_source of this UpdateMaskingPolicyDetails. :type: oci.data_safe.models.UpdateColumnSourceDetails """ self._column_source = column_source @property def freeform_tags(self): """ Gets the freeform_tags of this UpdateMaskingPolicyDetails. Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see `Resource Tags`__ Example: `{\"Department\": \"Finance\"}` __ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm :return: The freeform_tags of this UpdateMaskingPolicyDetails. :rtype: dict(str, str) """ return self._freeform_tags @freeform_tags.setter def freeform_tags(self, freeform_tags): """ Sets the freeform_tags of this UpdateMaskingPolicyDetails. Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see `Resource Tags`__ Example: `{\"Department\": \"Finance\"}` __ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm :param freeform_tags: The freeform_tags of this UpdateMaskingPolicyDetails. :type: dict(str, str) """ self._freeform_tags = freeform_tags @property def defined_tags(self): """ Gets the defined_tags of this UpdateMaskingPolicyDetails. Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see `Resource Tags`__ Example: `{\"Operations\": {\"CostCenter\": \"42\"}}` __ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm :return: The defined_tags of this UpdateMaskingPolicyDetails. :rtype: dict(str, dict(str, object)) """ return self._defined_tags @defined_tags.setter def defined_tags(self, defined_tags): """ Sets the defined_tags of this UpdateMaskingPolicyDetails. Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see `Resource Tags`__ Example: `{\"Operations\": {\"CostCenter\": \"42\"}}` __ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm :param defined_tags: The defined_tags of this UpdateMaskingPolicyDetails. :type: dict(str, dict(str, object)) """ self._defined_tags = defined_tags def __repr__(self): return formatted_flat_dict(self) def __eq__(self, other): if other is None: return False return self.__dict__ == other.__dict__ def __ne__(self, other): return not self == other
42.093886
245
0.699933
2,315
19,279
5.638445
0.126134
0.02804
0.147093
0.066192
0.801655
0.711024
0.634567
0.559488
0.52057
0.478817
0
0.001505
0.241973
19,279
457
246
42.185996
0.891679
0.64438
0
0.096
0
0
0.131907
0.042979
0
0
0
0
0
1
0.224
false
0
0.016
0.016
0.376
0
0
0
0
null
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
3
522bf47a4e8c9c4103287f1ce3603d7071a47c47
3,935
py
Python
src/menten_gcn/decorators/standard.py
MentenAI/menten_gcn
bcc7642cb32ab4e60a97687de17c1aa2dc4b5421
[ "MIT" ]
11
2020-12-15T15:36:47.000Z
2022-03-10T19:23:36.000Z
src/menten_gcn/decorators/standard.py
MentenAI/menten_gcn
bcc7642cb32ab4e60a97687de17c1aa2dc4b5421
[ "MIT" ]
2
2021-01-14T15:04:59.000Z
2021-01-14T19:24:00.000Z
src/menten_gcn/decorators/standard.py
MentenAI/menten_gcn
bcc7642cb32ab4e60a97687de17c1aa2dc4b5421
[ "MIT" ]
null
null
null
import math from menten_gcn.decorators.base import Decorator # from menten_gcn.decorators.geometry import * # from menten_gcn.decorators.sequence import * class BareBonesDecorator(Decorator): """ This decorator is included in all DataMakers by default. Its goal is to be the starting point upon which everything else is built. It labels focus nodes and labels edges for residues that are polymer bonded to one another. - 1 Node Feature - 1 Edge Feature """ def __init__(self): self.focused_resids = [] def get_version_name(self): return "BareBonesDecorator" def set_focused_resids(self, focused_resids): self.focused_resids = focused_resids def n_node_features(self): # 0: is focus residue return 1 def calc_node_features(self, wrapped_pose, resid, dict_cache=None): if resid in self.focused_resids: return [1.0] else: return [0.0] def describe_node_features(self): return [ "1 if the node is a focus residue, 0 otherwise", ] def n_edge_features(self): # 0: polymer bond return 1 def calc_edge_features(self, wrapped_pose, resid1, resid2, dict_cache=None): result1 = 0.0 if wrapped_pose.residues_are_polymer_bonded(resid1, resid2): result1 = 1.0 f = [result1] return f, f def describe_edge_features(self): return [ "1.0 if the two residues are polymer-bonded, 0.0 otherwise", ] class SequenceSeparation(Decorator): """ The sequence distance between the two residues (i.e., number of residues between these two residues in sequence space, plus one). -1.0 if the two residues belong to different chains. - 0 Node Features - 1 Edge Feature Parameters --------- ln: bool Report the natural log of the distance instead of the raw count. Does not apply to -1 values """ def __init__(self, ln: bool = True): self.ln = ln def get_version_name(self): return "SequenceSeparation" def n_node_features(self): return 0 def n_edge_features(self): return 1 def calc_edge_features(self, wrapped_pose, resid1, resid2, dict_cache=None): if not wrapped_pose.resids_are_same_chain(resid1, resid2): return [-1.0, ], [-1.0] distance = abs(resid1 - resid2) assert distance >= 0 if self.ln: distance = math.log(distance) return [distance, ], [distance, ] def describe_edge_features(self): if self.ln: return [ "Natural Log of the sequence distance between the two residues " + "(i.e., number of residues between these two residues in sequence space, plus one). " + "-1.0 if the two residues belong to different chains. (symmetric)", ] else: return [ "The sequence distance between the two residues " + "(i.e., number of residues between these two residues in sequence space, plus one). " + "-1.0 if the two residues belong to different chains. (symmetric)", ] class SameChain(Decorator): """ 1 if the two residues are part of the same protein chain. Otherwise 0. - 0 Node Features - 1 Edge Feature """ def get_version_name(self): return "SameChain" def n_node_features(self): return 0 def n_edge_features(self): return 1 def calc_edge_features(self, wrapped_pose, resid1, resid2, dict_cache=None): if wrapped_pose.resids_are_same_chain(resid1, resid2): return [1.0, ], [1.0] else: return [0.0, ], [0.0, ] def describe_edge_features(self): return ["1 if the two residues belong to the same chain, otherwise 0. (symmetric)", ]
28.514493
103
0.620584
513
3,935
4.619883
0.226121
0.070886
0.060759
0.040506
0.537975
0.480169
0.386498
0.357806
0.357806
0.357806
0
0.024955
0.297332
3,935
137
104
28.722628
0.832188
0.212961
0
0.447368
0
0
0.207888
0
0
0
0
0
0.013158
1
0.263158
false
0
0.026316
0.157895
0.605263
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
522ffd9522b9927cd2379fa3b6d3ce8117bd263c
258
py
Python
ita/web/models/__init__.py
Tezar/Assigment-generator
2a3d0d10f1e3b215a79efc727e26d2ebbf1bb7a3
[ "Apache-2.0" ]
null
null
null
ita/web/models/__init__.py
Tezar/Assigment-generator
2a3d0d10f1e3b215a79efc727e26d2ebbf1bb7a3
[ "Apache-2.0" ]
null
null
null
ita/web/models/__init__.py
Tezar/Assigment-generator
2a3d0d10f1e3b215a79efc727e26d2ebbf1bb7a3
[ "Apache-2.0" ]
1
2020-07-24T05:48:47.000Z
2020-07-24T05:48:47.000Z
from . import assigment from . import lecture from . import group from . import user #todo: hook pri ondelete Assigment = assigment.Model Lecture = lecture.Model Group = group.Model User = user.Model __all__=["Assigment", "Lecture", "Group", "User"]
17.2
49
0.72093
33
258
5.515152
0.363636
0.21978
0
0
0
0
0
0
0
0
0
0
0.170543
258
15
49
17.2
0.850467
0.089147
0
0
0
0
0.106383
0
0
0
0
0.066667
0
1
0
false
0
0.444444
0
0.444444
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
1
0
0
0
0
3
524d67d27c4328ee3a215d5767fc655ff8be9d81
200
py
Python
meiduo02/meiduo02/apps/oauth/urls.py
physili/django_test
09aa61f36e5d32f98af11057ea206dde8d082ac7
[ "MIT" ]
1
2020-04-25T04:50:30.000Z
2020-04-25T04:50:30.000Z
meiduo02/meiduo02/apps/oauth/urls.py
physili/django_test
09aa61f36e5d32f98af11057ea206dde8d082ac7
[ "MIT" ]
null
null
null
meiduo02/meiduo02/apps/oauth/urls.py
physili/django_test
09aa61f36e5d32f98af11057ea206dde8d082ac7
[ "MIT" ]
null
null
null
from . import views from django.urls import re_path urlpatterns = [ re_path(r'^qq/authorization/$', views.QQURLView.as_view()), re_path(r'^oauth_callback/$', views.QQUserView.as_view()), ]
20
63
0.705
28
200
4.821429
0.607143
0.133333
0.103704
0
0
0
0
0
0
0
0
0
0.135
200
9
64
22.222222
0.780347
0
0
0
0
0
0.180905
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
525ca8a11c104133087f0de99b44fcaf084fdcd2
188
py
Python
reference/sketchbook/lex/memo.py
JaDogg/__py_playground
416f88db10e03f5380bcb5cfcad0bca50ffa657c
[ "MIT" ]
1
2015-10-28T00:00:16.000Z
2015-10-28T00:00:16.000Z
reference/sketchbook/regex/memo.py
JaDogg/__py_playground
416f88db10e03f5380bcb5cfcad0bca50ffa657c
[ "MIT" ]
null
null
null
reference/sketchbook/regex/memo.py
JaDogg/__py_playground
416f88db10e03f5380bcb5cfcad0bca50ffa657c
[ "MIT" ]
null
null
null
""" Memoization decorator. """ def memoize(f): memos = {} def memoized(*args): if args not in memos: memos[args] = f(*args) return memos[args] return memoized
17.090909
52
0.585106
23
188
4.782609
0.521739
0.163636
0
0
0
0
0
0
0
0
0
0
0.276596
188
10
53
18.8
0.808824
0.117021
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
3
5269bf906d2d420df5d14fa708b646e84b224ab0
196
py
Python
examples/fib.py
evtn/birp
fe54669f296ca99eb8cc5296ab3ec4896ad8cfc0
[ "MIT" ]
21
2021-10-09T07:30:12.000Z
2022-01-04T12:15:14.000Z
examples/fib.py
evtn/birp
fe54669f296ca99eb8cc5296ab3ec4896ad8cfc0
[ "MIT" ]
1
2021-10-18T10:49:59.000Z
2021-10-18T10:49:59.000Z
examples/fib.py
evtn/birp
fe54669f296ca99eb8cc5296ab3ec4896ad8cfc0
[ "MIT" ]
null
null
null
from functools import cache as кэш @кэш def фибоначчи(номер): if номер == 0: return 0 if номер in [1, 2]: return 1 return фибоначчи(номер - 1) + фибоначчи(номер - 2)
17.818182
54
0.602041
29
196
4.068966
0.517241
0.355932
0
0
0
0
0
0
0
0
0
0.051471
0.306122
196
10
55
19.6
0.816176
0
0
0
0
0
0
0
0
0
0
0
0
1
0.125
false
0
0.125
0
0.625
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
5270f212583ae75fa1f6727eddc8739ef8d02673
3,093
py
Python
grok_pat_config.py
garyelephant/dolphin
68bf3b3612c78b3094e332c34787751c948a0246
[ "MIT" ]
null
null
null
grok_pat_config.py
garyelephant/dolphin
68bf3b3612c78b3094e332c34787751c948a0246
[ "MIT" ]
null
null
null
grok_pat_config.py
garyelephant/dolphin
68bf3b3612c78b3094e332c34787751c948a0246
[ "MIT" ]
null
null
null
class CaseInsensitiveKey( object ): def __init__( self, key ): self.key = key def __hash__( self ): return hash( self.key.lower() ) def __eq__( self, other ): return self.key.lower() == other.key.lower() def __str__( self ): return self.key GROK_PATTERN_CONF = dict() # Basic String GROK_PATTERN_CONF[ CaseInsensitiveKey( 'String' ) ] = 'DATA' # DATA or NOTSPACE ? GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Quote String' ) ] = 'QS' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'UUID' ) ] = 'UUID' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Log Level' ) ] = 'LOGLEVEL' # Networking GROK_PATTERN_CONF[ CaseInsensitiveKey( 'IP' ) ] = 'IP' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Host/Domain' ) ] = 'HOST' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Host:Port' ) ] = 'HOSTPORT' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'IP or Host/Domain' ) ] = 'IPORHOST' # Path GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Full URL' ) ] = 'URI' # http://www.google.com?search=mj GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Url Path' ) ] = 'URIPATHPARAM' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Unix Path' ) ] = 'UNIXPATH' # Json # GROK_PATTERN_CONF[ CaseInsensitiveKey( 'json' ) ] = # Number GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Number' ) ] = 'NUMBER' # Integer/Long OR Float/Double GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Integer/Long' ) ] = 'INT' # GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Float/Double' ) ] = # Date GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Year' ) ] = 'YEAR' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Month' ) ] = 'MONTH' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Month Number' ) ] = 'MONTHNUM' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Day' ) ] = 'DAY' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Hour' ) ] = 'HOUR' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Minute' ) ] = 'MINUTE' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Second' ) ] = 'SECOND' # GROK_PATTERN_CONF[ CaseInsensitiveKey( ) ] = 'TZ' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'ISO8601' ) ] = 'TIMESTAMP_ISO8601' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'HTTPDATE' ) ] = 'HTTPDATE' # GROK_PATTERN_CONF[ CaseInsensitiveKey( 'custom' ) ] = # custom # TODO # GROK_PATTERN_CONF = { # # Basic String # 'String': 'DATA', # DATA or NOTSPACE ? # 'Quote String': 'QS', # 'UUID': 'UUID', # 'Log Level': 'LOGLEVEL', # # Networking # 'IP': 'IP', # 'Host/Domain': 'HOST', # 'Host:Port': 'HOSTPORT', # 'IP or Host/Domain': 'IPORHOST', # # Path # 'Full URL': 'URI', # http://www.google.com?search=mj # 'Url Path': 'URIPATHPARAM', # 'Unix Path': 'UNIXPATH', # # Json # 'json': '', # TODO json regular expression # # Number # 'Number': 'NUMBER', # Integer/Long OR Float/Double # 'Integer/Long': 'INT', # 'Float/Double': '', # # Date # 'Year': 'YEAR', # 'Month': 'MONTH', # 'Month Number': 'MONTHNUM', # 'Day': 'DAY', # 'Hour': 'HOUR', # 'Minute': 'MINUTE', # 'Second': 'SECOND', # '': 'TZ', # 'ISO8601': 'TIMESTAMP_ISO8601', # 'HTTPDATE': 'HTTPDATE', # 'custom': '', # custom # TODO # }
36.821429
95
0.643065
320
3,093
5.984375
0.221875
0.160836
0.219321
0.448042
0.239164
0.100261
0.073107
0.035509
0.035509
0
0
0.00639
0.19043
3,093
84
96
36.821429
0.758387
0.395086
0
0
0
0
0.166298
0
0
0
0
0.011905
0
1
0.125
false
0
0
0.09375
0.25
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
3
5274cfcbbea41ead5e4d0d7ce47c38af50c6414c
211
bzl
Python
test/com/facebook/buck/core/starlark/rule/testdata/compatible_with/defs.bzl
Unknoob/buck
2dfc734354b326f2f66896dde7746a11965d5a13
[ "Apache-2.0" ]
8,027
2015-01-02T05:31:44.000Z
2022-03-31T07:08:09.000Z
test/com/facebook/buck/core/starlark/rule/testdata/compatible_with/defs.bzl
Unknoob/buck
2dfc734354b326f2f66896dde7746a11965d5a13
[ "Apache-2.0" ]
2,355
2015-01-01T15:30:53.000Z
2022-03-30T20:21:16.000Z
test/com/facebook/buck/core/starlark/rule/testdata/compatible_with/defs.bzl
Unknoob/buck
2dfc734354b326f2f66896dde7746a11965d5a13
[ "Apache-2.0" ]
1,280
2015-01-09T03:29:04.000Z
2022-03-30T15:14:14.000Z
""" Module docstring """ def _write_file_impl(ctx): f = ctx.actions.declare_file("out.txt") ctx.actions.write(f, "contents") write_file = rule( attrs = {}, implementation = _write_file_impl, )
19.181818
43
0.658768
27
211
4.851852
0.592593
0.206107
0.198473
0
0
0
0
0
0
0
0
0
0.184834
211
10
44
21.1
0.761628
0.075829
0
0
0
0
0.080214
0
0
0
0
0
0
1
0.142857
false
0
0
0
0.142857
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
527ccc2ed40b2f3520f673ecbf6be77b7dc3e6d8
144
py
Python
uplink/apps.py
fga-eps-mds/2017.2-SiGI-Op_API
4532019c15414fd17e06bb3aa78501886e00da1d
[ "BSD-3-Clause" ]
6
2017-08-24T13:18:21.000Z
2017-10-03T18:06:13.000Z
uplink/apps.py
fga-gpp-mds/2017.2-Grupo9
4532019c15414fd17e06bb3aa78501886e00da1d
[ "BSD-3-Clause" ]
173
2017-08-31T15:29:01.000Z
2017-12-14T13:40:13.000Z
uplink/apps.py
fga-gpp-mds/2017.2-SiGI-Op_API
4532019c15414fd17e06bb3aa78501886e00da1d
[ "BSD-3-Clause" ]
2
2018-11-19T10:33:00.000Z
2019-06-19T22:35:43.000Z
from django.apps import AppConfig class UplinkConfig(AppConfig): name = 'uplink' class SegmentsConfig(AppConfig): name = 'segments'
14.4
33
0.729167
15
144
7
0.733333
0.247619
0
0
0
0
0
0
0
0
0
0
0.180556
144
9
34
16
0.889831
0
0
0
0
0
0.097222
0
0
0
0
0
0
1
0
false
0
0.2
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
528bb65ff565c33ab7bc33293016ee3dcf4d1335
879
py
Python
Tradable/items/migrations/0005_auto_20190403_2333.py
eddylau328/tradable
2082da78697484761ddfb5480b28dc5e19aa0958
[ "MIT" ]
null
null
null
Tradable/items/migrations/0005_auto_20190403_2333.py
eddylau328/tradable
2082da78697484761ddfb5480b28dc5e19aa0958
[ "MIT" ]
null
null
null
Tradable/items/migrations/0005_auto_20190403_2333.py
eddylau328/tradable
2082da78697484761ddfb5480b28dc5e19aa0958
[ "MIT" ]
null
null
null
# Generated by Django 2.1.7 on 2019-04-03 15:33 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('items', '0004_auto_20190403_2240'), ] operations = [ migrations.AlterField( model_name='item', name='condition', field=models.TextField(null=True), ), migrations.AlterField( model_name='item', name='description', field=models.TextField(null=True), ), migrations.AlterField( model_name='item', name='name', field=models.CharField(max_length=100, null=True), ), migrations.AlterField( model_name='item', name='price', field=models.DecimalField(decimal_places=2, max_digits=10000, null=True), ), ]
25.852941
85
0.557452
86
879
5.581395
0.534884
0.166667
0.208333
0.241667
0.441667
0.441667
0.364583
0.364583
0.270833
0.270833
0
0.06734
0.324232
879
33
86
26.636364
0.740741
0.051195
0
0.518519
1
0
0.08774
0.027644
0
0
0
0
0
1
0
false
0
0.037037
0
0.148148
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
529340507c85d546884f07fcbeed088d783fafbc
6,757
py
Python
tests/tools/test_image_colors_manip_relabeller.py
nipy/nilabels
b065febc611eef638785651b4642d53bb61f1321
[ "MIT" ]
15
2019-04-09T21:47:47.000Z
2022-02-01T14:11:51.000Z
tests/tools/test_image_colors_manip_relabeller.py
SebastianoF/LabelsManager
b065febc611eef638785651b4642d53bb61f1321
[ "MIT" ]
4
2018-08-24T09:25:49.000Z
2018-08-29T10:47:50.000Z
tests/tools/test_image_colors_manip_relabeller.py
nipy/nilabels
b065febc611eef638785651b4642d53bb61f1321
[ "MIT" ]
1
2019-04-06T20:49:48.000Z
2019-04-06T20:49:48.000Z
import numpy as np import pytest from nilabels.tools.image_colors_manipulations.relabeller import relabeller, permute_labels, erase_labels, \ assign_all_other_labels_the_same_value, keep_only_one_label, relabel_half_side_one_label def test_relabeller_basic(): data = np.array(range(10)).reshape(2, 5) relabelled_data = relabeller(data, range(10), range(10)[::-1]) np.testing.assert_array_equal(relabelled_data, np.array(range(10)[::-1]).reshape(2,5)) def test_relabeller_one_element(): data = np.array(range(10)).reshape(2, 5) relabelled_data = relabeller(data, 0, 1, verbose=1) expected_output = data[:] expected_output[0, 0] = 1 np.testing.assert_array_equal(relabelled_data, expected_output) def test_relabeller_one_element_not_in_array(): data = np.array(range(10)).reshape(2, 5) relabelled_data = relabeller(data, 15, 1, verbose=1) np.testing.assert_array_equal(relabelled_data, data) def test_relabeller_wrong_input(): data = np.array(range(10)).reshape(2, 5) with np.testing.assert_raises(IOError): relabeller(data, [1, 2], [3, 4, 4]) def test_permute_labels_invalid_permutation(): invalid_permutation = [[3, 3, 3], [1, 1, 1]] with pytest.raises(IOError): permute_labels(np.zeros([3, 3]), invalid_permutation) def test_permute_labels_valid_permutation(): data = np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]]) valid_permutation = [[1, 2, 3], [1, 3, 2]] perm_data = permute_labels(data, valid_permutation) expected_data = np.array([[1, 3, 2], [1, 3, 2], [1, 3, 2]]) np.testing.assert_equal(perm_data, expected_data) def test_erase_label_simple(): data = np.array(range(10)).reshape(2, 5) data_erased_1 = erase_labels(data, 1) expected_output = data[:] expected_output[0, 1] = 0 np.testing.assert_array_equal(data_erased_1, expected_output) def test_assign_all_other_labels_the_same_values_simple(): data = np.array(range(10)).reshape(2, 5) data_erased_1 = erase_labels(data, 1) data_labels_to_keep = assign_all_other_labels_the_same_value(data, range(2, 10), same_value_label=0) np.testing.assert_array_equal(data_erased_1, data_labels_to_keep) def test_assign_all_other_labels_the_same_values_single_value(): data = np.array(range(10)).reshape(2, 5) data_erased_1 = np.zeros_like(data) data_erased_1[0, 1] = 1 data_labels_to_keep = assign_all_other_labels_the_same_value(data, 1, same_value_label=0) np.testing.assert_array_equal(data_erased_1, data_labels_to_keep) def test_keep_only_one_label_label_simple(): data = np.array(range(10)).reshape(2, 5) new_data = keep_only_one_label(data, 1) expected_data = np.zeros([2, 5]) expected_data[0, 1] = 1 np.testing.assert_array_equal(new_data, expected_data) def test_keep_only_one_label_label_not_present(): data = np.array(range(10)).reshape(2, 5) new_data = keep_only_one_label(data, 120) np.testing.assert_array_equal(new_data, data) def test_relabel_half_side_one_label_wrong_input_shape(): data = np.array(range(10)).reshape(2, 5) with np.testing.assert_raises(IOError): relabel_half_side_one_label(data, label_old=[1, 2], label_new=[2, 1], side_to_modify='above', axis='x', plane_intercept=2) def test_relabel_half_side_one_label_wrong_input_side(): data = np.array(range(27)).reshape(3, 3, 3) with np.testing.assert_raises(IOError): relabel_half_side_one_label(data, label_old=[1, 2], label_new=[2, 1], side_to_modify='spam', axis='x', plane_intercept=2) def test_relabel_half_side_one_label_wrong_input_axis(): data = np.array(range(27)).reshape(3, 3, 3) with np.testing.assert_raises(IOError): relabel_half_side_one_label(data, label_old=[1, 2], label_new=[2, 1], side_to_modify='above', axis='spam', plane_intercept=2) def test_relabel_half_side_one_label_wrong_input_simple(): data = np.array(range(3 ** 3)).reshape(3, 3, 3) # Z above new_data = relabel_half_side_one_label(data, label_old=1, label_new=100, side_to_modify='above', axis='z', plane_intercept=1) expected_data = data[:] expected_data[0, 0, 1] = 100 np.testing.assert_array_equal(new_data, expected_data) # Z below new_data = relabel_half_side_one_label(data, label_old=3, label_new=300, side_to_modify='below', axis='z', plane_intercept=2) expected_data = data[:] expected_data[0, 1, 0] = 300 np.testing.assert_array_equal(new_data, expected_data) # Y above new_data = relabel_half_side_one_label(data, label_old=8, label_new=800, side_to_modify='above', axis='y', plane_intercept=1) expected_data = data[:] expected_data[0, 2, 2] = 800 np.testing.assert_array_equal(new_data, expected_data) # Y below new_data = relabel_half_side_one_label(data, label_old=6, label_new=600, side_to_modify='below', axis='y', plane_intercept=2) expected_data = data[:] expected_data[0, 2, 0] = 600 np.testing.assert_array_equal(new_data, expected_data) # X above new_data = relabel_half_side_one_label(data, label_old=18, label_new=180, side_to_modify='above', axis='x', plane_intercept=1) expected_data = data[:] expected_data[2, 0, 0] = 180 np.testing.assert_array_equal(new_data, expected_data) # X below new_data = relabel_half_side_one_label(data, label_old=4, label_new=400, side_to_modify='below', axis='x', plane_intercept=2) expected_data = data[:] expected_data[0, 1, 1] = 400 np.testing.assert_array_equal(new_data, expected_data) if __name__ == '__main__': test_relabeller_basic() test_relabeller_one_element() test_relabeller_one_element_not_in_array() test_relabeller_wrong_input() test_permute_labels_invalid_permutation() test_permute_labels_valid_permutation() test_erase_label_simple() test_assign_all_other_labels_the_same_values_simple() test_assign_all_other_labels_the_same_values_single_value() test_keep_only_one_label_label_simple() test_keep_only_one_label_label_not_present() test_relabel_half_side_one_label_wrong_input_shape() test_relabel_half_side_one_label_wrong_input_side() test_relabel_half_side_one_label_wrong_input_axis() test_relabel_half_side_one_label_wrong_input_simple()
37.538889
108
0.688323
1,005
6,757
4.206965
0.096517
0.047304
0.067408
0.076632
0.816698
0.730842
0.726348
0.666982
0.60667
0.460738
0
0.041497
0.201125
6,757
179
109
37.748603
0.741756
0.006956
0
0.31746
0
0
0.009548
0
0
0
0
0
0.150794
1
0.119048
false
0
0.02381
0
0.142857
0
0
0
0
null
0
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
bfda6dedee424ce5599151143037800f80651bea
75
py
Python
com/LimePencil/Q14928/Main.py
LimePencil/baekjoonProblems
61eeeeb875585d165d9e39ecdb3d905b4ba6aa87
[ "MIT" ]
2
2021-07-17T13:05:42.000Z
2021-09-12T09:14:24.000Z
com/LimePencil/Q14928/Main.py
LimePencil/baekjoonProblems
61eeeeb875585d165d9e39ecdb3d905b4ba6aa87
[ "MIT" ]
null
null
null
com/LimePencil/Q14928/Main.py
LimePencil/baekjoonProblems
61eeeeb875585d165d9e39ecdb3d905b4ba6aa87
[ "MIT" ]
null
null
null
import sys num = int(sys.stdin.readline().rstrip("\n")) print(num%20000303)
25
44
0.72
12
75
4.5
0.833333
0
0
0
0
0
0
0
0
0
0
0.114286
0.066667
75
3
45
25
0.657143
0
0
0
0
0
0.026316
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
bfdc3cc7df767eac8f228aaa4f5319735b01105f
8,641
py
Python
src/borg/testsuite/nonces.py
yzr95924/borg
fcf62872fb0028a43d103e9dc322642fe80334e9
[ "BSD-3-Clause" ]
null
null
null
src/borg/testsuite/nonces.py
yzr95924/borg
fcf62872fb0028a43d103e9dc322642fe80334e9
[ "BSD-3-Clause" ]
null
null
null
src/borg/testsuite/nonces.py
yzr95924/borg
fcf62872fb0028a43d103e9dc322642fe80334e9
[ "BSD-3-Clause" ]
null
null
null
import os.path import pytest from ..crypto import nonces from ..crypto.nonces import NonceManager from ..crypto.key import bin_to_hex from ..helpers import get_security_dir from ..remote import InvalidRPCMethod class TestNonceManager: class MockRepository: class _Location: orig = '/some/place' _location = _Location() id = bytes(32) id_str = bin_to_hex(id) def get_free_nonce(self): return self.next_free def commit_nonce_reservation(self, next_unreserved, start_nonce): assert start_nonce == self.next_free self.next_free = next_unreserved class MockOldRepository(MockRepository): def get_free_nonce(self): raise InvalidRPCMethod("") def commit_nonce_reservation(self, next_unreserved, start_nonce): pytest.fail("commit_nonce_reservation should never be called on an old repository") class MockEncCipher: def __init__(self, iv): self.iv_set = False # placeholder, this is never a valid iv self.iv = iv def reset(self, key, iv): assert key is None assert iv is not False self.iv_set = iv self.iv = iv def expect_iv_and_advance(self, expected_iv, advance): expected_iv = expected_iv.to_bytes(16, byteorder='big') iv_set = self.iv_set assert iv_set == expected_iv self.iv_set = False self.iv = advance.to_bytes(16, byteorder='big') def expect_no_reset_and_advance(self, advance): iv_set = self.iv_set assert iv_set is False self.iv = advance.to_bytes(16, byteorder='big') def setUp(self): self.repository = None def cache_nonce(self): with open(os.path.join(get_security_dir(self.repository.id_str), 'nonce'), "r") as fd: return fd.read() def set_cache_nonce(self, nonce): with open(os.path.join(get_security_dir(self.repository.id_str), 'nonce'), "w") as fd: assert fd.write(nonce) def test_empty_cache_and_old_server(self, monkeypatch): monkeypatch.setattr(nonces, 'NONCE_SPACE_RESERVATION', 0x20) enc_cipher = self.MockEncCipher(0x2000) self.repository = self.MockOldRepository() manager = NonceManager(self.repository, enc_cipher, 0x2000) manager.ensure_reservation(19) enc_cipher.expect_iv_and_advance(0x2000, 0x2013) assert self.cache_nonce() == "0000000000002033" def test_empty_cache(self, monkeypatch): monkeypatch.setattr(nonces, 'NONCE_SPACE_RESERVATION', 0x20) enc_cipher = self.MockEncCipher(0x2000) self.repository = self.MockRepository() self.repository.next_free = 0x2000 manager = NonceManager(self.repository, enc_cipher, 0x2000) manager.ensure_reservation(19) enc_cipher.expect_iv_and_advance(0x2000, 0x2013) assert self.cache_nonce() == "0000000000002033" def test_empty_nonce(self, monkeypatch): monkeypatch.setattr(nonces, 'NONCE_SPACE_RESERVATION', 0x20) enc_cipher = self.MockEncCipher(0x2000) self.repository = self.MockRepository() self.repository.next_free = None manager = NonceManager(self.repository, enc_cipher, 0x2000) manager.ensure_reservation(19) enc_cipher.expect_iv_and_advance(0x2000, 0x2000 + 19) assert self.cache_nonce() == "0000000000002033" assert self.repository.next_free == 0x2033 # enough space in reservation manager.ensure_reservation(13) enc_cipher.expect_no_reset_and_advance(0x2000 + 19 + 13) assert self.cache_nonce() == "0000000000002033" assert self.repository.next_free == 0x2033 # just barely enough space in reservation manager.ensure_reservation(19) enc_cipher.expect_no_reset_and_advance(0x2000 + 19 + 13 + 19) assert self.cache_nonce() == "0000000000002033" assert self.repository.next_free == 0x2033 # no space in reservation manager.ensure_reservation(16) enc_cipher.expect_no_reset_and_advance(0x2000 + 19 + 13 + 19 + 16) assert self.cache_nonce() == "0000000000002063" assert self.repository.next_free == 0x2063 # spans reservation boundary manager.ensure_reservation(64) enc_cipher.expect_no_reset_and_advance(0x2000 + 19 + 13 + 19 + 16 + 64) assert self.cache_nonce() == "00000000000020c3" assert self.repository.next_free == 0x20c3 def test_sync_nonce(self, monkeypatch): monkeypatch.setattr(nonces, 'NONCE_SPACE_RESERVATION', 0x20) enc_cipher = self.MockEncCipher(0x2000) self.repository = self.MockRepository() self.repository.next_free = 0x2000 self.set_cache_nonce("0000000000002000") manager = NonceManager(self.repository, enc_cipher, 0x2000) manager.ensure_reservation(19) enc_cipher.expect_iv_and_advance(0x2000, 0x2000 + 19) assert self.cache_nonce() == "0000000000002033" assert self.repository.next_free == 0x2033 def test_server_just_upgraded(self, monkeypatch): monkeypatch.setattr(nonces, 'NONCE_SPACE_RESERVATION', 0x20) enc_cipher = self.MockEncCipher(0x2000) self.repository = self.MockRepository() self.repository.next_free = None self.set_cache_nonce("0000000000002000") manager = NonceManager(self.repository, enc_cipher, 0x2000) manager.ensure_reservation(19) enc_cipher.expect_iv_and_advance(0x2000, 0x2000 + 19) assert self.cache_nonce() == "0000000000002033" assert self.repository.next_free == 0x2033 def test_transaction_abort_no_cache(self, monkeypatch): monkeypatch.setattr(nonces, 'NONCE_SPACE_RESERVATION', 0x20) enc_cipher = self.MockEncCipher(0x1000) self.repository = self.MockRepository() self.repository.next_free = 0x2000 manager = NonceManager(self.repository, enc_cipher, 0x2000) manager.ensure_reservation(19) enc_cipher.expect_iv_and_advance(0x2000, 0x2000 + 19) assert self.cache_nonce() == "0000000000002033" assert self.repository.next_free == 0x2033 def test_transaction_abort_old_server(self, monkeypatch): monkeypatch.setattr(nonces, 'NONCE_SPACE_RESERVATION', 0x20) enc_cipher = self.MockEncCipher(0x1000) self.repository = self.MockOldRepository() self.set_cache_nonce("0000000000002000") manager = NonceManager(self.repository, enc_cipher, 0x2000) manager.ensure_reservation(19) enc_cipher.expect_iv_and_advance(0x2000, 0x2000 + 19) assert self.cache_nonce() == "0000000000002033" def test_transaction_abort_on_other_client(self, monkeypatch): monkeypatch.setattr(nonces, 'NONCE_SPACE_RESERVATION', 0x20) enc_cipher = self.MockEncCipher(0x1000) self.repository = self.MockRepository() self.repository.next_free = 0x2000 self.set_cache_nonce("0000000000001000") manager = NonceManager(self.repository, enc_cipher, 0x2000) manager.ensure_reservation(19) enc_cipher.expect_iv_and_advance(0x2000, 0x2000 + 19) assert self.cache_nonce() == "0000000000002033" assert self.repository.next_free == 0x2033 def test_interleaved(self, monkeypatch): monkeypatch.setattr(nonces, 'NONCE_SPACE_RESERVATION', 0x20) enc_cipher = self.MockEncCipher(0x2000) self.repository = self.MockRepository() self.repository.next_free = 0x2000 self.set_cache_nonce("0000000000002000") manager = NonceManager(self.repository, enc_cipher, 0x2000) manager.ensure_reservation(19) enc_cipher.expect_iv_and_advance(0x2000, 0x2000 + 19) assert self.cache_nonce() == "0000000000002033" assert self.repository.next_free == 0x2033 # somehow the clients unlocks, another client reserves and this client relocks self.repository.next_free = 0x4000 # enough space in reservation manager.ensure_reservation(12) enc_cipher.expect_no_reset_and_advance(0x2000 + 19 + 12) assert self.cache_nonce() == "0000000000002033" assert self.repository.next_free == 0x4000 # spans reservation boundary manager.ensure_reservation(21) enc_cipher.expect_iv_and_advance(0x4000, 0x4000 + 21) assert self.cache_nonce() == "0000000000004035" assert self.repository.next_free == 0x4035
37.406926
95
0.681866
996
8,641
5.660643
0.133534
0.101809
0.063852
0.078042
0.796559
0.751685
0.722419
0.695282
0.686414
0.660518
0
0.11086
0.231686
8,641
230
96
37.569565
0.738364
0.033329
0
0.590361
0
0
0.075144
0.027685
0
0
0.048178
0
0.198795
1
0.120482
false
0
0.042169
0.006024
0.204819
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
bfddac08db04c6268885da2017b7bd431da45875
1,409
py
Python
slice_visualization_ui.py
johanesmikhael/ContinuityAnalysis
931dbb1f4d83ec085e8115ee5b183afd2a067b0c
[ "MIT" ]
null
null
null
slice_visualization_ui.py
johanesmikhael/ContinuityAnalysis
931dbb1f4d83ec085e8115ee5b183afd2a067b0c
[ "MIT" ]
null
null
null
slice_visualization_ui.py
johanesmikhael/ContinuityAnalysis
931dbb1f4d83ec085e8115ee5b183afd2a067b0c
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'slice_visualization.ui' # # Created by: PyQt5 UI code generator 5.6 # # WARNING! All changes made in this file will be lost! from PyQt5 import QtCore, QtGui, QtWidgets class Ui_slice_visualization_gui(object): def setupUi(self, slice_visualization_gui): slice_visualization_gui.setObjectName("slice_visualization_gui") slice_visualization_gui.resize(800, 600) self.centralwidget = QtWidgets.QWidget(slice_visualization_gui) self.centralwidget.setObjectName("centralwidget") slice_visualization_gui.setCentralWidget(self.centralwidget) self.menubar = QtWidgets.QMenuBar(slice_visualization_gui) self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 21)) self.menubar.setObjectName("menubar") slice_visualization_gui.setMenuBar(self.menubar) self.statusbar = QtWidgets.QStatusBar(slice_visualization_gui) self.statusbar.setObjectName("statusbar") slice_visualization_gui.setStatusBar(self.statusbar) self.retranslateUi(slice_visualization_gui) QtCore.QMetaObject.connectSlotsByName(slice_visualization_gui) def retranslateUi(self, slice_visualization_gui): _translate = QtCore.QCoreApplication.translate slice_visualization_gui.setWindowTitle(_translate("slice_visualization_gui", "MainWindow"))
42.69697
99
0.760114
152
1,409
6.809211
0.407895
0.295652
0.324638
0.072464
0.081159
0.081159
0
0
0
0
0
0.015139
0.156139
1,409
32
100
44.03125
0.855341
0.134847
0
0
1
0
0.07019
0.037985
0
0
0
0
0
1
0.1
false
0
0.05
0
0.2
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
870697c00a61acfe7bcd4e2f2706d65c3c383158
208
py
Python
murraylab_tools/tests/echo_tests/test_mastermix.py
francescazfl/murraylab_tools
106e70172e1a7065c73238af994a6dc78b6b3fb7
[ "MIT" ]
2
2017-12-04T22:51:48.000Z
2018-10-25T22:04:59.000Z
murraylab_tools/tests/echo_tests/test_mastermix.py
francescazfl/murraylab_tools
106e70172e1a7065c73238af994a6dc78b6b3fb7
[ "MIT" ]
7
2016-11-19T02:41:08.000Z
2020-12-15T01:50:56.000Z
murraylab_tools/tests/echo_tests/test_mastermix.py
francescazfl/murraylab_tools
106e70172e1a7065c73238af994a6dc78b6b3fb7
[ "MIT" ]
10
2016-11-17T01:17:13.000Z
2020-12-14T19:40:07.000Z
import pytest import numpy as np import murraylab_tools.echo as mt_echo @pytest.mark.skip(reason="no way of currently testing this") class TestMasterMix(): def test_implement_me(self): assert 0
20.8
60
0.754808
32
208
4.78125
0.84375
0
0
0
0
0
0
0
0
0
0
0.005814
0.173077
208
9
61
23.111111
0.883721
0
0
0
0
0
0.153846
0
0
0
0
0
0.142857
1
0.142857
false
0
0.428571
0
0.714286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
871b96a7eb1adaec08134a00822b9bc576f2595d
4,270
py
Python
bimap.py
TMPxyz/python_bidi_multimap
cb34d1e8f8352fd8fb564db23b62eb3dac7fda44
[ "Apache-2.0" ]
null
null
null
bimap.py
TMPxyz/python_bidi_multimap
cb34d1e8f8352fd8fb564db23b62eb3dac7fda44
[ "Apache-2.0" ]
null
null
null
bimap.py
TMPxyz/python_bidi_multimap
cb34d1e8f8352fd8fb564db23b62eb3dac7fda44
[ "Apache-2.0" ]
null
null
null
# bidi multimap from collections import defaultdict class MMap: "multimap" def __init__(self): self._m = defaultdict(set) self._pair_cnt = 0 def __contains__(self, k): "if a key in mmap" return k in self._m def __getitem__(self, k): if k not in self._m: raise KeyError(f"unknown key {k}") return frozenset(self._m[k]) def __len__(self): return len(self._m) def pair_count(self): return self._pair_cnt def keys(self): return self._m.keys() def values(self): return self._m.values() def items(self): return self._m.items() def iter_all_pairs(self): for k, s in self._m.items(): for v in s: yield k, v def clear(self): self._m.clear() self._pair_cnt = 0 def has_pair(self, k, v): s = self._m.get(k, None) if not s: return False return v in s def add_pair(self, k, v): "add a pair k,v to mmap" self._m[k].add(v) self._pair_cnt += 1 def pop_pair(self, k, v): "if k,v pair not present, raise KeyError" if k not in self._m: raise KeyError(f"unknown key {k}") if v not in self._m[k]: raise KeyError(f"unknown value {v}") self._m[k].remove(v) self._pair_cnt -= 1 def pop_all(self, k, d=None): """remove all pairs under given key, return the value if k not found, return d or KeyError if d is None """ if k not in self._m: if d is None: raise KeyError(f"unknown key {k}") else: return d s = self._m.pop(k) self._pair_cnt -= len(s) return s class BiMMap: """a bidirectional multimap""" DEF_NONE = "BIMAP__NONE" def __init__(self): self._m = MMap() self._im = MMap() def pair_count(self): return self._m.pair_count() def keys(self): return self._m.keys() def rkeys(self): return self._im.keys() def values(self): return self._m.values() def rvalues(self): return self._im.values() def items(self): return self._m.items() def ritems(self): return self._im.items() def clear(self): self._m.clear() self._im.clear() def get(self, k, d=DEF_NONE): if d is self.DEF_NONE: return self._m[k] else: return self._m[k] if k in self._m else d def rget(self, k, d=DEF_NONE): if d is self.DEF_NONE: return self._im[k] else: return self._im[k] if k in self._im else d def iter_all_pairs(self): return self._m.iter_all_pairs() def riter_all_pairs(self): return self._im.iter_all_pairs() def has_pair(self, k, v): return self._m.has_pair(k, v) def rhas_pair(self, k, v): return self._im.has_pair(k, v) def add_pair(self, k, v, dup='raise'): if self.has_pair(k, v): if dup is 'raise': raise KeyError(f"({k}, {v}) already in map") elif dup is 'ignore': return else: raise RuntimeError(f"unexpected dup op {dup}") self._m.add_pair(k, v) self._im.add_pair(v, k) def radd_pair(self, k, v, dup='raise'): if self.rhas_pair(k, v): if dup is 'raise': raise KeyError(f"({k}, {v}) already in map") elif dup is 'ignore': return else: raise RuntimeError(f"unexpected dup op {dup}") self._im.add_pair(k, v) self._m.add_pair(v, k) def pop_pair(self, k, v): self._m.pop_pair(k, v) self._im.pop_pair(v, k) def rpop_pair(self, k, v): self._im.pop_pair(k, v) self._m.pop_pair(v, k) def pop_all(self, k): s = self._m.pop_all(k) for v in s: self._im.pop_pair(v, k) def rpop_all(self, k): s = self._im.pop_all(k) for v in s: self._m.pop_pair(v, k)
25.568862
76
0.514052
634
4,270
3.257098
0.129338
0.082324
0.088136
0.043584
0.627603
0.501211
0.377724
0.33414
0.249395
0.182082
0
0.00149
0.371194
4,270
166
77
25.722892
0.767598
0.05363
0
0.422764
0
0
0.072663
0
0
0
0
0
0
1
0.284553
false
0
0.00813
0.130081
0.520325
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
874062a6500d935f6cb4043c148b7eed07cfce8c
20,096
py
Python
test/functional/tl_metadex.py
patrickdugan/BlockPo-to-Tradelayer
ba1ebf3c329751d414302577a09481ba28db1815
[ "MIT" ]
null
null
null
test/functional/tl_metadex.py
patrickdugan/BlockPo-to-Tradelayer
ba1ebf3c329751d414302577a09481ba28db1815
[ "MIT" ]
5
2021-06-21T21:21:53.000Z
2021-06-22T20:10:16.000Z
test/functional/tl_metadex.py
patrickdugan/BlockPo-to-Tradelayer
ba1ebf3c329751d414302577a09481ba28db1815
[ "MIT" ]
1
2021-06-21T21:14:45.000Z
2021-06-21T21:14:45.000Z
#!/usr/bin/env python3 # Copyright (c) 2015-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test MetaDEx functions.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * import os import json import http.client import urllib.parse class MetaDExBasicsTest (BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True self.extra_args = [["-txindex=1"]] def setup_chain(self): super().setup_chain() #Append rpcauth to bitcoin.conf before initialization rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144" rpcuser = "rpcuser=rpcuser💻" rpcpassword = "rpcpassword=rpcpassword🔑" with open(os.path.join(self.options.tmpdir+"/node0", "litecoin.conf"), 'a', encoding='utf8') as f: f.write(rpcauth+"\n") def run_test(self): self.log.info("Preparing the workspace...") # mining 200 blocks self.nodes[0].generate(200) ################################################################################ # Checking RPC tl_sendtrade (in the first 200 blocks of the chain) # ################################################################################ url = urllib.parse.urlparse(self.nodes[0].url) #Old authpair authpair = url.username + ':' + url.password headers = {"Authorization": "Basic " + str_to_b64str(authpair)} addresses = [] accounts = ["john", "doe", "another", "mark"] conn = http.client.HTTPConnection(url.hostname, url.port) conn.connect() self.log.info("Creating addresses") addresses = tradelayer_createAddresses(accounts, conn, headers) self.log.info("Funding addresses with LTC") amount = 1.1 tradelayer_fundingAddresses(addresses, amount, conn, headers) self.log.info("Checking the LTC balance in every account") tradelayer_checkingBalance(accounts, amount, conn, headers) self.log.info("Creating new tokens (lihki)") array = [0] params = str([addresses[0],2,0,"lihki","","","90000000000",array]).replace("'",'"') out = tradelayer_HTTP(conn, headers, False, "tl_sendissuancefixed",params) assert_equal(out['error'], None) # self.log.info(out) self.nodes[0].generate(1) self.log.info("Creating new tokens (dan)") array = [0] params = str([addresses[1],2,0,"dan","","","100001",array]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_sendissuancefixed",params) assert_equal(out['error'], None) # self.log.info(out) self.nodes[0].generate(1) self.log.info("Self Attestation for addresses") tradelayer_selfAttestation(addresses,conn, headers) self.log.info("Checking attestations") out = tradelayer_HTTP(conn, headers, False, "tl_list_attestation") # self.log.info(out) result = [] registers = out['result'] for addr in addresses: for i in registers: if i['att sender'] == addr and i['att receiver'] == addr and i['kyc_id'] == 0: result.append(True) assert_equal(result, [True, True, True, True]) self.log.info("Checking the property: lihki") params = str([4]) out = tradelayer_HTTP(conn, headers, True, "tl_getproperty",params) assert_equal(out['error'], None) # self.log.info(out) assert_equal(out['result']['propertyid'],4) assert_equal(out['result']['name'],'lihki') assert_equal(out['result']['data'],'') assert_equal(out['result']['url'],'') assert_equal(out['result']['divisible'],True) assert_equal(out['result']['totaltokens'],'90000000000.00000000') self.log.info("Checking the property: dan ") params = str([5]) out = tradelayer_HTTP(conn, headers, True, "tl_getproperty",params) assert_equal(out['error'], None) # self.log.info(out) assert_equal(out['result']['propertyid'],5) assert_equal(out['result']['name'],'dan') assert_equal(out['result']['data'],'') assert_equal(out['result']['url'],'') assert_equal(out['result']['divisible'],True) assert_equal(out['result']['totaltokens'],'100001.00000000') self.log.info("Checking tokens balance in lihki's owner ") params = str([addresses[0], 4]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_getbalance",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result']['balance'],'90000000000.00000000') assert_equal(out['result']['reserve'],'0.00000000') self.log.info("Checking tokens balance in dan's owner ") params = str([addresses[1], 5]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_getbalance",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result']['balance'],'100001.00000000') assert_equal(out['result']['reserve'],'0.00000000') self.log.info("Sending a trade in MetaDEx") params = str([addresses[0], 4, "500", 5, "2000"]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_sendtrade",params) # self.log.info(out) assert_equal(out['error'], None) self.nodes[0].generate(1) self.log.info("Checking the trade in orderbook") params = str([4]) out = tradelayer_HTTP(conn, headers, True, "tl_getorderbook",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result'][0]['address'], addresses[0]) assert_equal(out['result'][0]['propertyidforsale'],4) assert_equal(out['result'][0]['amountforsale'],'500.00000000') assert_equal(out['result'][0]['propertyiddesired'],5) assert_equal(out['result'][0]['amountdesired'],'2000.00000000') self.log.info("Cancelling all trades in MetaDEx") params = str([addresses[0]]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_sendcancelalltrades",params) # self.log.info(out) assert_equal(out['error'], None) self.nodes[0].generate(1) self.log.info("Checking the active orders") params = str([4]) out = tradelayer_HTTP(conn, headers, True, "tl_getorderbook",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result'],[]) self.log.info("Sending a new trade in MetaDEx") params = str([addresses[0], 4, "1000", 5, "2000"]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_sendtrade",params) # self.log.info(out) assert_equal(out['error'], None) self.nodes[0].generate(1) self.log.info("Checking the trade in orderbook") params = str([4]) out = tradelayer_HTTP(conn, headers, True, "tl_getorderbook",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result'][0]['address'], addresses[0]) assert_equal(out['result'][0]['propertyidforsale'],4) assert_equal(out['result'][0]['amountforsale'],'1000.00000000') assert_equal(out['result'][0]['propertyiddesired'],5) assert_equal(out['result'][0]['amountdesired'],'2000.00000000') self.log.info("Sending a second trade in MetaDEx") params = str([addresses[1], 5, "2000", 4, "1000"]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_sendtrade",params) assert_equal(out['error'], None) self.nodes[0].generate(1) self.log.info("Checking tokens balance for first address") params = str([addresses[0], 4]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_getbalance",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result']['balance'],'89999999000.40000000') assert_equal(out['result']['reserve'],'0.00000000') params = str([addresses[0], 5]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_getbalance",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result']['balance'],'2000.00000000') assert_equal(out['result']['reserve'],'0.00000000') self.log.info("Checking tokens balance for second address") params = str([addresses[1], 4]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_getbalance",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result']['balance'],'999.50000000') assert_equal(out['result']['reserve'],'0.00000000') params = str([addresses[1], 5]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_getbalance",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result']['balance'],'98001.00000000') assert_equal(out['result']['reserve'],'0.00000000') self.log.info("Checking trade history of first address") params = str([addresses[0], 100, 4]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_getmdextradehistoryforaddress",params) assert_equal(out['error'], None) assert_equal(out['result'][0]['type_int'], 25) assert_equal(out['result'][0]['type'], 'Metadex Order') assert_equal(out['result'][0]['propertyId'], 4) assert_equal(out['result'][0]['propertyname'], 'lihki') assert_equal(out['result'][0]['amount'], '1000.00000000') assert_equal(out['result'][0]['desire property'], 5) assert_equal(out['result'][0]['desired value'], '2000.00000000') assert_equal(out['result'][1]['type_int'], 25) assert_equal(out['result'][1]['type'], 'Metadex Order') assert_equal(out['result'][1]['propertyId'], 4) assert_equal(out['result'][1]['propertyname'], 'lihki') assert_equal(out['result'][1]['amount'], '500.00000000') assert_equal(out['result'][1]['desire property'], 5) assert_equal(out['result'][1]['desired value'], '2000.00000000') self.log.info("Checking trade for property pair") params = str([4, 5, 10]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_getmdextradehistoryforpair",params) assert_equal(out['error'], None) # self.log.info(out) assert_equal(out['result'][0]['block'], 208) assert_equal(out['result'][0]['unitprice'], '2.00000000000000000000000000000000000000000000000000') assert_equal(out['result'][0]['inverseprice'], '0.50000000000000000000000000000000000000000000000000') assert_equal(out['result'][0]['amountsold'], '1000.00000000') assert_equal(out['result'][0]['amountreceived'], '2000.00000000') self.log.info("Sending 20000000000 lihki tokens to fourth address") params = str([addresses[0], addresses[3], 4, "20000000000"]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_send",params) assert_equal(out['error'], None) # self.log.info(out) self.nodes[0].generate(1) self.log.info("Checking tokens in receiver address") params = str([addresses[3], 4]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_getbalance",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result']['balance'],'20000000000.00000000') assert_equal(out['result']['reserve'],'0.00000000') self.log.info("Sending 1 dan token to third address") params = str([addresses[1], addresses[2], 5, "1"]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_send",params) assert_equal(out['error'], None) # self.log.info(out) self.nodes[0].generate(1) self.log.info("Checking dan tokens in receiver address") params = str([addresses[2], 5]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_getbalance",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result']['balance'],'1.00000000') assert_equal(out['result']['reserve'],'0.00000000') self.log.info("Sending a big trade in MetaDEx") params = str([addresses[3], 4, "10000000000", 5, "0.00000002"]).replace("'",'"') out = tradelayer_HTTP(conn, headers, False, "tl_sendtrade",params) # self.log.info(out) assert_equal(out['error'], None) txid = out['result'] self.nodes[0].generate(1) self.log.info("Sending a small trade in MetaDEx") params = str([addresses[2], 5, "0.00000001", 4, "5000000000"]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_sendtrade",params) # self.log.info(out) assert_equal(out['error'], None) self.nodes[0].generate(1) self.log.info("Checking lihki tokens in third address") params = str([addresses[2], 4]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_getbalance",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result']['balance'],'4997500000.00000000') # 5000000000 minus fees assert_equal(out['result']['reserve'],'0.00000000') self.log.info("Checking dan tokens in fourth address") params = str([addresses[3], 5]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_getbalance",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result']['balance'],'0.00000001') assert_equal(out['result']['reserve'],'0.00000000') self.log.info("Checking lihki tokens in fourth address") params = str([addresses[3], 4]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_getbalance",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result']['balance'],'10002000000.00000000') assert_equal(out['result']['reserve'],'0.00000000') self.log.info("Cancel specific order MetaDEx") params = str([addresses[3], txid]).replace("'",'"') out = tradelayer_HTTP(conn, headers, False, "tl_sendcancel_order",params) # self.log.info(out) assert_equal(out['error'], None) self.nodes[0].generate(1) self.log.info("Checking the trade in orderbook") params = str([4]) out = tradelayer_HTTP(conn, headers, True, "tl_getorderbook",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result'], []) self.log.info("Checking lihki tokens in fourth address") params = str([addresses[3], 4]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_getbalance",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result']['balance'],'15002000000.00000000') assert_equal(out['result']['reserve'],'0.00000000') self.log.info("Sending the trade again") params = str([addresses[3], 4, "2000000", 5, "4000"]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_sendtrade",params) # self.log.info(out) assert_equal(out['error'], None) self.nodes[0].generate(1) self.log.info("Checking the trade in orderbook") params = str([4]) out = tradelayer_HTTP(conn, headers, True, "tl_getorderbook",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result'][0]['address'], addresses[3]) assert_equal(out['result'][0]['propertyidforsale'], 4) assert_equal(out['result'][0]['amountforsale'], '2000000.00000000') assert_equal(out['result'][0]['propertyiddesired'], 5) assert_equal(out['result'][0]['amountdesired'], '4000.00000000') txid = out['result'][0]['txid'] self.log.info("Cancel by pair") params = str([addresses[3], 4, 5]).replace("'",'"') out = tradelayer_HTTP(conn, headers, False, "tl_sendcanceltradesbypair", params) # self.log.info(out) assert_equal(out['error'], None) self.nodes[0].generate(1) self.log.info("Checking the trade in orderbook") params = str([4]) out = tradelayer_HTTP(conn, headers, True, "tl_getorderbook",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result'], []) self.log.info("Checking lihki tokens in fourth address") params = str([addresses[3], 4]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_getbalance",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result']['balance'],'15002000000.00000000') assert_equal(out['result']['reserve'],'0.00000000') self.log.info("Sending another trade again") params = str([addresses[3], 4, "2000000", 5, "4000"]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_sendtrade",params) # self.log.info(out) assert_equal(out['error'], None) self.nodes[0].generate(1) self.log.info("Checking the trade in orderbook") params = str([4]) out = tradelayer_HTTP(conn, headers, True, "tl_getorderbook",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result'][0]['address'], addresses[3]) assert_equal(out['result'][0]['propertyidforsale'], 4) assert_equal(out['result'][0]['amountforsale'], '2000000.00000000') assert_equal(out['result'][0]['propertyiddesired'], 5) assert_equal(out['result'][0]['amountdesired'], '4000.00000000') txid = out['result'][0]['txid'] self.log.info("Cancel by price") params = str([addresses[3], 4, "2000000", 5, "4000"]).replace("'",'"') out = tradelayer_HTTP(conn, headers, False, "tl_sendcanceltradesbyprice", params) # self.log.info(out) assert_equal(out['error'], None) self.nodes[0].generate(1) self.log.info("Checking the trade in orderbook") params = str([4]) out = tradelayer_HTTP(conn, headers, True, "tl_getorderbook",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result'], []) self.log.info("Checking trade for property pair again") params = str([4, 5, 10]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_getmdextradehistoryforpair",params) assert_equal(out['error'], None) # self.log.info(out) self.log.info("Checking lihki tokens in fourth address") params = str([addresses[3], 4]).replace("'",'"') out = tradelayer_HTTP(conn, headers, True, "tl_getbalance",params) # self.log.info(out) assert_equal(out['error'], None) assert_equal(out['result']['balance'],'15002000000.00000000') assert_equal(out['result']['reserve'],'0.00000000') self.log.info("Checking fee restrictions") params = str([addresses[3], 4, "14997499400", 5, "4000"]).replace("'",'"') out = tradelayer_HTTP(conn, headers, False, "tl_sendtrade",params) # self.log.info(out) assert_equal(out['error']['message'], 'Sender has insufficient balance') conn.close() self.stop_nodes() if __name__ == '__main__': MetaDExBasicsTest ().main ()
40.59798
128
0.606389
2,372
20,096
5.033727
0.11172
0.117002
0.147739
0.139028
0.796399
0.76206
0.70804
0.674623
0.637353
0.633836
0
0.06961
0.212231
20,096
494
129
40.680162
0.68448
0.059614
0
0.544892
0
0
0.262566
0.021359
0
0
0
0
0.393189
1
0.009288
false
0.006192
0.018576
0
0.03096
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
8741db31068c5c6401fb3b9e4bd1b20ab9581766
339
py
Python
netrd/dynamics/__init__.py
hartle/netrd
da962e080be4b60cb40a43f78582823cbd00c839
[ "MIT" ]
null
null
null
netrd/dynamics/__init__.py
hartle/netrd
da962e080be4b60cb40a43f78582823cbd00c839
[ "MIT" ]
null
null
null
netrd/dynamics/__init__.py
hartle/netrd
da962e080be4b60cb40a43f78582823cbd00c839
[ "MIT" ]
null
null
null
from .base import BaseDynamics from .sherrington_kirkpatrick import SherringtonKirkpatrickIsing from .single_unbiased_random_walker import SingleUnbiasedRandomWalker from .kuramoto import Kuramoto from .lotka_volterra import LotkaVolterra from .ising_glauber import IsingGlauber from .branching_process import BranchingModel __all__ = []
33.9
69
0.876106
36
339
7.944444
0.638889
0
0
0
0
0
0
0
0
0
0
0
0.094395
339
9
70
37.666667
0.931596
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.875
0
0.875
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
8749283015b1eff05def3062265cc7b80c307fb7
159
py
Python
start.py
lolenseu/NetKill
a18c239c44e328f12059ed535511f8b7ab9bf41d
[ "MIT" ]
3
2022-02-27T13:30:24.000Z
2022-02-28T18:59:27.000Z
start.py
lolenseu/NetKill
a18c239c44e328f12059ed535511f8b7ab9bf41d
[ "MIT" ]
null
null
null
start.py
lolenseu/NetKill
a18c239c44e328f12059ed535511f8b7ab9bf41d
[ "MIT" ]
null
null
null
import os while True: print("Starting...") os.system('wget https://speed.hetzner.de/100MB.bin && rm -rf 100MB.bin') print("Done!") os.system('clear')
19.875
74
0.647799
24
159
4.291667
0.75
0.15534
0
0
0
0
0
0
0
0
0
0.043796
0.138365
159
7
75
22.714286
0.708029
0
0
0
0
0
0.503145
0
0
0
0
0
0
1
0
true
0
0.166667
0
0.166667
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
874a0c8257b0c907f80555a3687bd87646044a40
142
py
Python
rates/apps.py
mathemartins/syarpa_k8s
880be2ee3bd26613819d0bd2652be579f6a93886
[ "Apache-2.0" ]
null
null
null
rates/apps.py
mathemartins/syarpa_k8s
880be2ee3bd26613819d0bd2652be579f6a93886
[ "Apache-2.0" ]
null
null
null
rates/apps.py
mathemartins/syarpa_k8s
880be2ee3bd26613819d0bd2652be579f6a93886
[ "Apache-2.0" ]
1
2021-12-15T08:38:35.000Z
2021-12-15T08:38:35.000Z
from django.apps import AppConfig class RatesConfig(AppConfig): default_auto_field = 'django.db.models.BigAutoField' name = 'rates'
20.285714
56
0.753521
17
142
6.176471
0.882353
0
0
0
0
0
0
0
0
0
0
0
0.15493
142
6
57
23.666667
0.875
0
0
0
0
0
0.239437
0.204225
0
0
0
0
0
1
0
false
0
0.25
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
874f9729fc282538503f272f6e8ee38acd87eeb7
170
py
Python
teste.py
vintgrupo/vdd
a859f7d293326b5e7d73888c86bea4c2f3586b86
[ "MIT" ]
null
null
null
teste.py
vintgrupo/vdd
a859f7d293326b5e7d73888c86bea4c2f3586b86
[ "MIT" ]
null
null
null
teste.py
vintgrupo/vdd
a859f7d293326b5e7d73888c86bea4c2f3586b86
[ "MIT" ]
null
null
null
import pandas as pd df = pd.read_csv('HIST_PAINEL_COVIDBR_20ago2020_MT.csv') print(df.columns) # df_semana = df.groupby(['semanaEpi']).sum() # print(df_semana.head())
18.888889
56
0.735294
27
170
4.37037
0.703704
0.118644
0
0
0
0
0
0
0
0
0
0.039216
0.1
170
8
57
21.25
0.732026
0.394118
0
0
0
0
0.363636
0.363636
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
87528c41d972122c62bf882ead0cb66cb919b097
593
py
Python
python/tHome/eagle/messages/test/InstantaneousDemand.py
ZigmundRat/T-Home
5dc8689f52d87dac890051e540b338b009293ced
[ "BSD-2-Clause" ]
18
2016-04-17T19:39:28.000Z
2020-11-19T06:55:20.000Z
python/tHome/eagle/messages/test/InstantaneousDemand.py
ZigmundRat/T-Home
5dc8689f52d87dac890051e540b338b009293ced
[ "BSD-2-Clause" ]
11
2018-09-07T18:34:41.000Z
2021-05-02T04:44:54.000Z
python/tHome/eagle/messages/test/InstantaneousDemand.py
ZigmundRat/T-Home
5dc8689f52d87dac890051e540b338b009293ced
[ "BSD-2-Clause" ]
12
2016-10-31T12:29:08.000Z
2021-12-28T12:18:28.000Z
#!/usr/bin/env python import xml.etree.ElementTree as ET import tHome.eagle as E s=""" <InstantaneousDemand> <DeviceMacId>0xd8d5b9000000103f</DeviceMacId> <MeterMacId>0x000781000086d0fe</MeterMacId> <TimeStamp>0x1c531d48</TimeStamp> <Demand>0x00032d</Demand> <Multiplier>0x00000001</Multiplier> <Divisor>0x000003e8</Divisor> <DigitsRight>0x03</DigitsRight> <DigitsLeft>0x06</DigitsLeft> <SuppressLeadingZero>Y</SuppressLeadingZero> </InstantaneousDemand> """ root = ET.fromstring( s ) n = E.messages.InstantaneousDemand( root ) print n
22.807692
50
0.720067
56
593
7.625
0.678571
0.107728
0
0
0
0
0
0
0
0
0
0.125749
0.155143
593
25
51
23.72
0.726547
0.033727
0
0
0
0
0.736842
0.626316
0
0
0.14386
0
0
0
null
null
0
0.111111
null
null
0.055556
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
1
0
0
0
0
0
0
0
0
3
87530bb97282e3f993f9dbe26ce5a09bf11787e3
110
py
Python
web/shell.py
gabriel-v/avizier-virtual
015f4608f6457990c48ebdef07ec4c01c84cf7a3
[ "WTFPL" ]
null
null
null
web/shell.py
gabriel-v/avizier-virtual
015f4608f6457990c48ebdef07ec4c01c84cf7a3
[ "WTFPL" ]
null
null
null
web/shell.py
gabriel-v/avizier-virtual
015f4608f6457990c48ebdef07ec4c01c84cf7a3
[ "WTFPL" ]
null
null
null
#!/bin/bash -ex docker exec -it $(docker ps -q --filter label=com.docker.swarm.service.name=avizier_web) bash
36.666667
93
0.736364
19
110
4.210526
0.842105
0
0
0
0
0
0
0
0
0
0
0
0.1
110
2
94
55
0.808081
0.127273
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
3
875ccce248bd6bd59177edd2d8773827534033ec
215
py
Python
enume.py
ambivert143/PythonProgram
9c21027c35b3b1c3542b068f45faab5db00a2fa8
[ "MIT" ]
null
null
null
enume.py
ambivert143/PythonProgram
9c21027c35b3b1c3542b068f45faab5db00a2fa8
[ "MIT" ]
null
null
null
enume.py
ambivert143/PythonProgram
9c21027c35b3b1c3542b068f45faab5db00a2fa8
[ "MIT" ]
null
null
null
class Enum: def __init__(self,list): self.list = list def enume(self): for index, val in enumerate(self.list,start=1): print(index,val) e1 = Enum([5,15,45,4,53]) e1.enume()
26.875
56
0.567442
33
215
3.575758
0.636364
0.20339
0
0
0
0
0
0
0
0
0
0.071429
0.283721
215
8
57
26.875
0.694805
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0
0
0.375
0.125
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
3
5e60a1f2b6bbe0625c4ac1b59637a1981a8be0f5
114
py
Python
tasfie/apps.py
mablue/Specialized-Procurement-and-Sales-Management-System-for-East-Azarbaijan-Gas-Company
da071bd199a92d571228a1d823a7fa1f52275604
[ "MIT" ]
30
2019-08-18T14:57:39.000Z
2022-03-15T22:09:09.000Z
tasfie/apps.py
mablue/Specialized-Procurement-and-Sales-Management-System-for-East-Azarbaijan-Gas-Company
da071bd199a92d571228a1d823a7fa1f52275604
[ "MIT" ]
3
2021-03-19T00:46:32.000Z
2021-09-22T01:29:08.000Z
tasfie/apps.py
mablue/Specialized-Procurement-and-Sales-Management-System-for-East-Azarbaijan-Gas-Company
da071bd199a92d571228a1d823a7fa1f52275604
[ "MIT" ]
7
2019-08-20T14:04:28.000Z
2021-06-29T06:43:08.000Z
from django.apps import AppConfig class TasfieConfig(AppConfig): name = 'tasfie' verbose_name = 'تسویه'
16.285714
33
0.719298
13
114
6.230769
0.846154
0
0
0
0
0
0
0
0
0
0
0
0.192982
114
6
34
19
0.880435
0
0
0
0
0
0.096491
0
0
0
0
0
0
1
0
false
0
0.25
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
5e659f259160f253ce492737f80c806e633048f1
360
py
Python
litGL/examples/__init__.py
ncreati/LitGL
acdc57632940538286381387efbefab1160aa32e
[ "MIT" ]
null
null
null
litGL/examples/__init__.py
ncreati/LitGL
acdc57632940538286381387efbefab1160aa32e
[ "MIT" ]
null
null
null
litGL/examples/__init__.py
ncreati/LitGL
acdc57632940538286381387efbefab1160aa32e
[ "MIT" ]
null
null
null
""" examples package. Author: - 2020-2021 Nicola Creati - 2020-2021 Roberto Vidmar Copyright: 2020-2021 Nicola Creati <ncreati@inogs.it> 2020-2021 Roberto Vidmar <rvidmar@inogs.it> License: MIT/X11 License (see :download:`license.txt <../../../license.txt>`) """
21.176471
59
0.530556
36
360
5.305556
0.555556
0.167539
0.146597
0.209424
0
0
0
0
0
0
0
0.144681
0.347222
360
16
60
22.5
0.668085
0.875
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
5e752618d881636eac6eb75fdd24fe3b056b60bd
429
py
Python
Week 7/id_510/sort/Leetcode_56_510.py
larryRishi/algorithm004-05
e60d0b1176acd32a9184b215e36d4122ba0b6263
[ "Apache-2.0" ]
1
2019-10-12T06:48:45.000Z
2019-10-12T06:48:45.000Z
Week 7/id_510/sort/Leetcode_56_510.py
larryRishi/algorithm004-05
e60d0b1176acd32a9184b215e36d4122ba0b6263
[ "Apache-2.0" ]
1
2019-12-01T10:02:03.000Z
2019-12-01T10:02:03.000Z
Week 7/id_510/sort/Leetcode_56_510.py
larryRishi/algorithm004-05
e60d0b1176acd32a9184b215e36d4122ba0b6263
[ "Apache-2.0" ]
null
null
null
""" 给出一个区间的集合,请合并所有重叠的区间。 示例 1: 输入: [[1,3],[2,6],[8,10],[15,18]] 输出: [[1,6],[8,10],[15,18]] 解释: 区间 [1,3] 和 [2,6] 重叠, 将它们合并为 [1,6]. 示例 2: 输入: [[1,4],[4,5]] 输出: [[1,5]] 解释: 区间 [1,4] 和 [4,5] 可被视为重叠区间。 来源:力扣(LeetCode) 链接:https://leetcode-cn.com/problems/merge-intervals 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。 """ from typing import List class Solution: def merge(self, intervals: List[List[int]]) -> List[List[int]]: pass
19.5
67
0.606061
80
429
3.25
0.55
0.023077
0.030769
0.046154
0.061538
0
0
0
0
0
0
0.102981
0.13986
429
22
68
19.5
0.601626
0.699301
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0.25
0.25
0
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
3
5e796ec3f7a48dc3c7fc188a38798d9e8a46bab4
100
py
Python
pytuya/const.py
braub/python-tuya
0d46bc46d396e8a9baa8ccebc2bc9fe0c0528022
[ "MIT" ]
231
2017-12-03T13:58:06.000Z
2022-03-17T07:48:08.000Z
pytuya/const.py
braub/python-tuya
0d46bc46d396e8a9baa8ccebc2bc9fe0c0528022
[ "MIT" ]
67
2018-01-01T19:14:17.000Z
2021-08-12T00:12:15.000Z
pytuya/const.py
braub/python-tuya
0d46bc46d396e8a9baa8ccebc2bc9fe0c0528022
[ "MIT" ]
73
2017-12-03T13:58:08.000Z
2021-09-05T10:06:05.000Z
version_tuple = (7, 0, 7) version = __version__ = '%d.%d.%d' % version_tuple __author__ = 'clach04'
25
50
0.67
14
100
4.071429
0.5
0.421053
0
0
0
0
0
0
0
0
0
0.058824
0.15
100
3
51
33.333333
0.611765
0
0
0
0
0
0.15
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
5e86bc4d50661b0972690f7ca2236df316c7f644
68
py
Python
android/.buildozer/android/platform/build/build/python-installs/youtb_dc/youtube_dl/version.py
mw3tv123/youtube-dc
84cfdac4b7548285084f453f1716eee83b9288aa
[ "MIT" ]
null
null
null
android/.buildozer/android/platform/build/build/python-installs/youtb_dc/youtube_dl/version.py
mw3tv123/youtube-dc
84cfdac4b7548285084f453f1716eee83b9288aa
[ "MIT" ]
null
null
null
android/.buildozer/android/platform/build/build/python-installs/youtb_dc/youtube_dl/version.py
mw3tv123/youtube-dc
84cfdac4b7548285084f453f1716eee83b9288aa
[ "MIT" ]
null
null
null
from __future__ import unicode_literals __version__ = '2019.03.18'
17
39
0.808824
9
68
5.111111
1
0
0
0
0
0
0
0
0
0
0
0.133333
0.117647
68
3
40
22.666667
0.633333
0
0
0
0
0
0.147059
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
5e8e41e3a0c4ff5a6de08b69c73941b177604cde
85
py
Python
myapi/__init__.py
CVIU-CSU/M2MRF-Lesion-Segmentation
13af87927f4cdeca70e35d570edd1aec43b387b6
[ "Apache-2.0" ]
10
2021-11-29T12:58:21.000Z
2022-03-08T14:43:59.000Z
myapi/__init__.py
CVIU-CSU/M2MRF-Lesion-Segmentation
13af87927f4cdeca70e35d570edd1aec43b387b6
[ "Apache-2.0" ]
1
2021-12-10T01:59:44.000Z
2021-12-19T05:24:04.000Z
myapi/__init__.py
CVIU-CSU/M2MRF-Lesion-Segmentation
13af87927f4cdeca70e35d570edd1aec43b387b6
[ "Apache-2.0" ]
1
2021-11-30T07:51:08.000Z
2021-11-30T07:51:08.000Z
from .test_hook import * from .config import * __all__ = [ 'logit_activation' ]
12.142857
24
0.682353
10
85
5.2
0.8
0
0
0
0
0
0
0
0
0
0
0
0.211765
85
6
25
14.166667
0.776119
0
0
0
0
0
0.188235
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
5eb4a4f5b78068b01a1fb2c3c61e7a06496e514e
232
py
Python
shooter/game_object/landscapes/water.py
EvgeniiMorozov/zombie_shooter_026
0bda0cabe8c9b7c1c3607d84c6e32d5e289cfb4a
[ "MIT" ]
null
null
null
shooter/game_object/landscapes/water.py
EvgeniiMorozov/zombie_shooter_026
0bda0cabe8c9b7c1c3607d84c6e32d5e289cfb4a
[ "MIT" ]
6
2021-05-15T09:16:00.000Z
2021-05-22T13:56:33.000Z
shooter/game_object/landscapes/water.py
EvgeniiMorozov/zombie_shooter_026
0bda0cabe8c9b7c1c3607d84c6e32d5e289cfb4a
[ "MIT" ]
6
2021-05-15T08:35:08.000Z
2021-12-13T16:31:10.000Z
from shooter.config import LANDSCAPE_WATER from shooter.game_object.landscape import Landscape class Water(Landscape): """Класс вода""" def __init__(self, x, y, image=LANDSCAPE_WATER): super().__init__(x, y, image)
29
52
0.728448
31
232
5.096774
0.580645
0.139241
0.088608
0
0
0
0
0
0
0
0
0
0.159483
232
8
53
29
0.810256
0.043103
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.4
0
0.8
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
5ecd6253a6c11a52271e37923ed72679dd9c166c
436
py
Python
sort/insertion_sort.py
udohsolomon/LearnAlgorithms
36f8e2af4efe65a6b9627b400da77931ef5a6a26
[ "MIT" ]
null
null
null
sort/insertion_sort.py
udohsolomon/LearnAlgorithms
36f8e2af4efe65a6b9627b400da77931ef5a6a26
[ "MIT" ]
null
null
null
sort/insertion_sort.py
udohsolomon/LearnAlgorithms
36f8e2af4efe65a6b9627b400da77931ef5a6a26
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 def insertion_sort(lst): #times for i in range(1,len(lst)): #n - 1 while i > 0 and lst[i-1] > lst[i]: #(n - 1)n lst[i], lst[i-1] = lst[i-1], lst[i] #(n - 1)n/2 i -= 1 #1 return lst print(insertion_sort([6, 4, 3, 8, 5]))
48.444444
75
0.323394
57
436
2.438596
0.473684
0.172662
0.107914
0.172662
0.208633
0.172662
0.172662
0.172662
0
0
0
0.084577
0.538991
436
9
76
48.444444
0.606965
0.114679
0
0
0
0
0
0
0
0
0
0
0
1
0.142857
false
0
0
0
0.285714
0.142857
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
1
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
0d7140c06f217f7f03eb9977610356ade32db4a6
3,442
py
Python
unit_tests/test_tlslite_utils_cryptomath_m2crypto.py
tomato42/tlslite-1
4631799cdfac8f90b567d455e698b05d7a917599
[ "Unlicense" ]
121
2015-05-28T18:14:37.000Z
2020-11-18T11:23:59.000Z
unit_tests/test_tlslite_utils_cryptomath_m2crypto.py
tomato42/tlslite-1
4631799cdfac8f90b567d455e698b05d7a917599
[ "Unlicense" ]
340
2015-05-28T15:56:11.000Z
2020-11-04T11:40:45.000Z
unit_tests/test_tlslite_utils_cryptomath_m2crypto.py
tomato42/tlslite-1
4631799cdfac8f90b567d455e698b05d7a917599
[ "Unlicense" ]
60
2015-07-10T20:07:02.000Z
2020-10-22T08:04:20.000Z
# Copyright (c) 2014, Hubert Kario # # See the LICENSE file for legal information regarding use of this file. # compatibility with Python 2.6, for that we need unittest2 package, # which is not available on 3.3 or 3.4 try: import unittest2 as unittest except ImportError: import unittest try: import mock from mock import call except ImportError: import unittest.mock as mock from unittest.mock import call import sys try: # Python 2 reload except NameError: try: # Python >= 3.4 from importlib import reload except ImportError: # Python <= 3.3 from imp import reload try: import __builtin__ as builtins except ImportError: import builtins real_open = builtins.open class magic_open(object): def __init__(self, *args, **kwargs): self.args = args self.kwargs = kwargs def __enter__(self): if self.args[0] == '/proc/sys/crypto/fips_enabled': m = mock.MagicMock() m.read.return_value = '1' self.f = m return m else: self.f = real_open(*self.args, **self.kwargs) return self.f def __exit__(self, exc_type, exc_value, exc_traceback): self.f.close() class magic_open_error(object): def __init__(self, *args, **kwargs): self.args = args self.kwargs = kwargs def __enter__(self): if self.args[0] == '/proc/sys/crypto/fips_enabled': m = mock.MagicMock() self.f = m raise IOError(12) else: self.f = real_open(*self.args, **self.kwargs) return self.f def __exit__(self, exc_type, exc_value, exc_traceback): self.f.close() class TestM2CryptoLoaded(unittest.TestCase): def test_import_without_m2crypto(self): with mock.patch.dict('sys.modules', {'M2Crypto': None}): import tlslite.utils.cryptomath reload(tlslite.utils.cryptomath) from tlslite.utils.cryptomath import m2cryptoLoaded self.assertFalse(m2cryptoLoaded) def test_import_with_m2crypto(self): fake_m2 = mock.MagicMock() with mock.patch.dict('sys.modules', {'M2Crypto': fake_m2}): import tlslite.utils.cryptomath reload(tlslite.utils.cryptomath) from tlslite.utils.cryptomath import m2cryptoLoaded self.assertTrue(m2cryptoLoaded) def test_import_with_m2crypto_in_fips_mode(self): fake_m2 = mock.MagicMock() with mock.patch.dict('sys.modules', {'M2Crypto': fake_m2}): with mock.patch.object(builtins, 'open', magic_open): import tlslite.utils.cryptomath reload(tlslite.utils.cryptomath) from tlslite.utils.cryptomath import m2cryptoLoaded self.assertFalse(m2cryptoLoaded) def test_import_with_m2crypto_in_container(self): fake_m2 = mock.MagicMock() with mock.patch.dict('sys.modules', {'M2Crypto': fake_m2}): with mock.patch.object(builtins, 'open', magic_open_error): import tlslite.utils.cryptomath reload(tlslite.utils.cryptomath) from tlslite.utils.cryptomath import m2cryptoLoaded self.assertTrue(m2cryptoLoaded) @classmethod def tearDownClass(cls): import tlslite.utils.cryptomath reload(tlslite.utils.cryptomath)
30.732143
72
0.633643
406
3,442
5.20197
0.263547
0.079545
0.145833
0.066288
0.660038
0.660038
0.660038
0.629735
0.60322
0.60322
0
0.018022
0.27455
3,442
111
73
31.009009
0.827793
0.070889
0
0.662791
0
0
0.044842
0.018188
0
0
0
0
0.046512
1
0.127907
false
0
0.325581
0
0.523256
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
0d7285a9ed5b6be753859ece5ad8b0b1141ab7cd
522
py
Python
app/barometer/migrations/0002_auto_20201206_1510.py
Valentin-Golyonko/FlaskTestRPi
b9796a9acb2bb1c122301a3ef192f43c857eb27b
[ "Apache-2.0" ]
null
null
null
app/barometer/migrations/0002_auto_20201206_1510.py
Valentin-Golyonko/FlaskTestRPi
b9796a9acb2bb1c122301a3ef192f43c857eb27b
[ "Apache-2.0" ]
null
null
null
app/barometer/migrations/0002_auto_20201206_1510.py
Valentin-Golyonko/FlaskTestRPi
b9796a9acb2bb1c122301a3ef192f43c857eb27b
[ "Apache-2.0" ]
null
null
null
# Generated by Django 3.1.4 on 2020-12-06 15:10 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('barometer', '0001_initial'), ] operations = [ migrations.RenameField( model_name='barometer', old_name='pressure_pha', new_name='pressure_hpa', ), migrations.RenameField( model_name='barometer', old_name='temperature', new_name='temperature_c', ), ]
21.75
47
0.570881
51
522
5.647059
0.647059
0.145833
0.180556
0.208333
0.319444
0.319444
0.319444
0
0
0
0
0.053521
0.319923
522
23
48
22.695652
0.757746
0.086207
0
0.352941
1
0
0.183158
0
0
0
0
0
0
1
0
false
0
0.058824
0
0.235294
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
0d772a85e8e35a835ffc788163c3de141d702286
127
py
Python
testpopulator.py
chuzarski/thesketchyhouse
a5bcc225c40f3069e9a0d891025d22cb0c912e7b
[ "Apache-2.0" ]
null
null
null
testpopulator.py
chuzarski/thesketchyhouse
a5bcc225c40f3069e9a0d891025d22cb0c912e7b
[ "Apache-2.0" ]
null
null
null
testpopulator.py
chuzarski/thesketchyhouse
a5bcc225c40f3069e9a0d891025d22cb0c912e7b
[ "Apache-2.0" ]
null
null
null
from game.engine import Engine from game.populator import Populator eng = Engine() pop = Populator(eng) pop.populateRooms()
14.111111
36
0.771654
17
127
5.764706
0.470588
0.163265
0
0
0
0
0
0
0
0
0
0
0.141732
127
8
37
15.875
0.899083
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
0d7d782423bde333b668c8b5ea471874280a3ab1
257
py
Python
desafiosCursoEmVideo/ex060.py
gomesGabriel/Pythonicos
b491cefbb0479dd83fee267304d0fa30b99786a5
[ "MIT" ]
1
2019-09-02T12:14:58.000Z
2019-09-02T12:14:58.000Z
desafiosCursoEmVideo/ex060.py
gomesGabriel/Pythonicos
b491cefbb0479dd83fee267304d0fa30b99786a5
[ "MIT" ]
null
null
null
desafiosCursoEmVideo/ex060.py
gomesGabriel/Pythonicos
b491cefbb0479dd83fee267304d0fa30b99786a5
[ "MIT" ]
null
null
null
print('\033[33m-=-\033[m' * 20) print('\033[33m************* Fatorial *************\033[m') print('\033[33m-=-\033[m' * 20) v = float(input('Insira um valor: ')) c = 1 f = 1 while c <= v: f = f * c c += 1 print('O fatorial de {} é {}' .format(v, f))
25.7
59
0.474708
44
257
2.772727
0.454545
0.196721
0.270492
0.229508
0.278689
0.278689
0
0
0
0
0
0.148325
0.18677
257
10
60
25.7
0.435407
0
0
0.2
0
0
0.472868
0.081395
0
0
0
0
0
1
0
false
0
0
0
0
0.4
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
0d7daff3ac7cc306d71508e0fda3784fb6019e7f
2,736
py
Python
app/models/User.py
mmb186/MobileApp_API
da8d5ce20ceda19d14b4e86535d9caadc2dd4275
[ "MIT" ]
2
2020-03-05T14:00:30.000Z
2020-09-08T18:28:10.000Z
app/models/User.py
mmb186/MobileApp_API
da8d5ce20ceda19d14b4e86535d9caadc2dd4275
[ "MIT" ]
null
null
null
app/models/User.py
mmb186/MobileApp_API
da8d5ce20ceda19d14b4e86535d9caadc2dd4275
[ "MIT" ]
null
null
null
from datetime import datetime from app import db, bcrypt trusted_contacts = db.Table( 'trusted_contacts', db.Column('user_id', db.Integer, db.ForeignKey('users.id')), db.Column('contact_id', db.Integer, db.ForeignKey('users.id')) ) class User(db.Model): """ User Table Schema """ __tablename__ = 'users' id = db.Column(db.Integer, primary_key=True, autoincrement=True) email = db.Column(db.String(255), nullable=False, unique=True) username = db.Column(db.String(255), nullable=False, unique=True) password = db.Column(db.String, nullable=False) first_name = db.Column(db.String, nullable=False) last_name = db.Column(db.String, nullable=False) creation_time = db.Column(db.DateTime, nullable=False, default=datetime.utcnow) last_modified_time = db.Column(db.DateTime, nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow) trusted_contacts = db.relationship( 'User', secondary=trusted_contacts, primaryjoin=(trusted_contacts.c.user_id == id), secondaryjoin=(trusted_contacts.c.contact_id == id), backref=db.backref('trusted_contacts_ref', lazy='dynamic'), lazy='dynamic' ) def __inti__(self, email, public_id, password, first_name, last_name, username): self.email = email self.public_id = public_id self.password = password self.first_name = first_name self.last_name = last_name self.username = username def __repr__(self): return f'<id {self.id}>' def save(self): db.session.add(self) db.session.commit() return self def verify_password(self, password): return bcrypt.check_password_hash(self.password, password) @classmethod def get_by_id(cls, user_id): return cls.query.filter_by(id=user_id).first() @classmethod def get_user_by_email(cls, email): return cls.query.filter_by(email=email).first() @classmethod def get_by_username(cls, username): return cls.query.filter_by(username=username).first() @staticmethod def generate_hash(password): return bcrypt.generate_password_hash(password).decode('utf-8') @classmethod def get_all(cls): return cls.query.all() def add_contact(self, user): if not self.is_already_contact(user): self.trusted_contacts.append(user) def remove_contact(self, user): if self.is_already_contact(user): self.trusted_contacts.remove(user) def is_already_contact(self, user): return self.trusted_contacts.filter( trusted_contacts.c.contact_id == user.id).count() > 0
31.448276
85
0.663012
348
2,736
5.008621
0.241379
0.094664
0.045898
0.045898
0.317269
0.250717
0.234079
0.16179
0.11245
0.064257
0
0.003756
0.221491
2,736
86
86
31.813953
0.814554
0.006213
0
0.061538
0
0
0.041126
0
0
0
0
0
0
1
0.184615
false
0.107692
0.030769
0.123077
0.523077
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
1
1
0
0
3
0dd125bef5be36b3b21fb072e6a39b654266b439
281
py
Python
ACM-Solution/fIBoNaCCi.py
wasi0013/Python-CodeBase
4a7a36395162f68f84ded9085fa34cc7c9b19233
[ "MIT" ]
2
2016-04-26T15:40:40.000Z
2018-07-18T10:16:42.000Z
ACM-Solution/fIBoNaCCi.py
wasi0013/Python-CodeBase
4a7a36395162f68f84ded9085fa34cc7c9b19233
[ "MIT" ]
1
2016-04-26T15:44:15.000Z
2016-04-29T14:44:40.000Z
ACM-Solution/fIBoNaCCi.py
wasi0013/Python-CodeBase
4a7a36395162f68f84ded9085fa34cc7c9b19233
[ "MIT" ]
1
2018-10-02T16:12:19.000Z
2018-10-02T16:12:19.000Z
fibs = {0: 0, 1: 1} def fib(n): if n in fibs: return fibs[n] if n % 2 == 0: fibs[n] = ((2 * fib((n / 2) - 1)) + fib(n / 2)) * fib(n / 2) return fibs[n] fibs[n] = (fib((n - 1) / 2) ** 2) + (fib((n+1) / 2) ** 2) return fibs[n] # limit 100000
28.1
69
0.405694
52
281
2.192308
0.230769
0.210526
0.289474
0.105263
0.245614
0
0
0
0
0
0
0.127072
0.355872
281
9
70
31.222222
0.502762
0.042705
0
0.25
0
0
0
0
0
0
0
0
0
1
0.125
false
0
0
0
0.375
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
0df7fa8f3982ea3308fba6618fab9189d95cc415
380
py
Python
src/sima/post/safetyclass.py
SINTEF/simapy
650b8c2f15503dad98e2bfc0d0788509593822c7
[ "MIT" ]
null
null
null
src/sima/post/safetyclass.py
SINTEF/simapy
650b8c2f15503dad98e2bfc0d0788509593822c7
[ "MIT" ]
null
null
null
src/sima/post/safetyclass.py
SINTEF/simapy
650b8c2f15503dad98e2bfc0d0788509593822c7
[ "MIT" ]
null
null
null
# Generated with SafetyClass # from enum import Enum from enum import auto class SafetyClass(Enum): """""" LOW = auto() NORMAL = auto() HIGH = auto() def label(self): if self == SafetyClass.LOW: return "Low" if self == SafetyClass.NORMAL: return "Normal" if self == SafetyClass.HIGH: return "High"
21.111111
38
0.557895
41
380
5.170732
0.390244
0.084906
0.240566
0
0
0
0
0
0
0
0
0
0.334211
380
18
39
21.111111
0.837945
0.068421
0
0
1
0
0.037681
0
0
0
0
0
0
1
0.076923
false
0
0.153846
0
0.769231
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
217711fec45d8b72a9afac237b28003f340fdc97
214
py
Python
snake/myfirsttime/serializers.py
ClovisIRex/Snake-django
dc5484ee874d0157f4c14e37c835d0b6fe1cb40f
[ "MIT" ]
null
null
null
snake/myfirsttime/serializers.py
ClovisIRex/Snake-django
dc5484ee874d0157f4c14e37c835d0b6fe1cb40f
[ "MIT" ]
null
null
null
snake/myfirsttime/serializers.py
ClovisIRex/Snake-django
dc5484ee874d0157f4c14e37c835d0b6fe1cb40f
[ "MIT" ]
1
2021-10-29T18:00:46.000Z
2021-10-29T18:00:46.000Z
from rest_framework import serializers from .models import Score class ScoreSerializer(serializers.ModelSerializer): class Meta: model = Score fields = ('player_name', 'player_score')
23.777778
51
0.700935
22
214
6.681818
0.681818
0
0
0
0
0
0
0
0
0
0
0
0.228972
214
9
52
23.777778
0.890909
0
0
0
0
0
0.106977
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
21881b8505b1236d808649841dfc5d0c6f56a1a7
581
py
Python
collector/lib/url_sources/s_pinboard.py
zackw/tbbscraper
a8da7640c1e146fb087ae7c3383526f155b4d39e
[ "Apache-2.0" ]
17
2015-03-27T06:03:30.000Z
2021-12-21T06:22:08.000Z
collector/lib/url_sources/s_pinboard.py
nissow/tbbscraper
a8da7640c1e146fb087ae7c3383526f155b4d39e
[ "Apache-2.0" ]
null
null
null
collector/lib/url_sources/s_pinboard.py
nissow/tbbscraper
a8da7640c1e146fb087ae7c3383526f155b4d39e
[ "Apache-2.0" ]
16
2015-11-22T21:55:41.000Z
2021-06-22T15:16:41.000Z
# Copyright © 2013, 2014 Zack Weinberg # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # There is NO WARRANTY. """Import URLs from a Pinboard JSON dump.""" def setup_argp(ap): ap.add_argument("user", help="Pinboard user whose bookmarks these are.") ap.add_argument("file", help="File to import.") def run(args): from url_sources.pinboard import PinboardExtractor PinboardExtractor(args)()
32.277778
76
0.728055
90
581
4.666667
0.677778
0.071429
0.061905
0.07619
0
0
0
0
0
0
0
0.024641
0.16179
581
17
77
34.176471
0.835729
0.533563
0
0
0
0
0.243243
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
3
21bad8c558852bcb990a54123a800b7d9fddb225
682
py
Python
Turtle Drawing Shapes.py
HelloYeew/helloyeew-lab-computer-programming-i
60b05072f32f23bab4a336b506ba7f66e52c045d
[ "MIT" ]
null
null
null
Turtle Drawing Shapes.py
HelloYeew/helloyeew-lab-computer-programming-i
60b05072f32f23bab4a336b506ba7f66e52c045d
[ "MIT" ]
null
null
null
Turtle Drawing Shapes.py
HelloYeew/helloyeew-lab-computer-programming-i
60b05072f32f23bab4a336b506ba7f66e52c045d
[ "MIT" ]
null
null
null
LAB = "turtlelab4.py" import urllib.request urllib.request.urlretrieve(f"http://elab.cpe.ku.ac.th/turtlelab/{LAB}",LAB) from turtlelab4 import turtle,check def draw_square(size): turtle.forward(size) turtle.right(90) turtle.forward(size) turtle.right(90) turtle.forward(size) turtle.right(90) turtle.forward(size) turtle.right(90) def draw_triangle(size): turtle.right(90) turtle.forward(size) turtle.right(120) turtle.forward(size) turtle.right(120) turtle.forward(size) turtle.right(120) # turtle.left(30) # draw_square(120) # turtle.left(90) # turtle.forward(120) # turtle.right(90) # draw_triangle(120) # check()
20.058824
75
0.695015
96
682
4.895833
0.302083
0.191489
0.255319
0.342553
0.502128
0.502128
0.502128
0.502128
0.502128
0.465957
0
0.062718
0.158358
682
34
76
20.058824
0.756098
0.164223
0
0.714286
0
0
0.094139
0
0
0
0
0
0
1
0.095238
false
0
0.095238
0
0.190476
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
21d8da180cf0a87cbd8bef2d3eeb3cca6b87ec99
600
py
Python
bbpyp/lexicomb/parser/unary_operator.py
BloggerBust/tag_script
985a4b6b8c026431842d608ba8feafe8751dd425
[ "Apache-2.0" ]
null
null
null
bbpyp/lexicomb/parser/unary_operator.py
BloggerBust/tag_script
985a4b6b8c026431842d608ba8feafe8751dd425
[ "Apache-2.0" ]
null
null
null
bbpyp/lexicomb/parser/unary_operator.py
BloggerBust/tag_script
985a4b6b8c026431842d608ba8feafe8751dd425
[ "Apache-2.0" ]
null
null
null
from bbpyp.lexicomb.parser.artifact import Artifact class UnaryOperator(Artifact): def __init__(self, operator, *args, **kwargs): super().__init__(*args, **kwargs) self._operator = operator self._operand = None def __call__(self, operand): assert self._operand is None self._operand = operand return self def __repr__(self): return f"{type(self).__name__}({self.operator}, {self.operand})" @property def operator(self): return self._operator @property def operand(self): return self._operand
22.222222
72
0.636667
66
600
5.393939
0.393939
0.185393
0.106742
0
0
0
0
0
0
0
0
0
0.256667
600
26
73
23.076923
0.798206
0
0
0.111111
0
0
0.09
0.063333
0
0
0
0
0.055556
1
0.277778
false
0
0.055556
0.166667
0.611111
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
21dc24a14026a94b224097331ac091e99fd564f6
1,749
py
Python
tests/core_test.py
mediaonegroup/WPWatcher
a8289af766e604e1c8f7d538513bf69dd74bfb50
[ "Apache-2.0" ]
27
2020-03-22T08:15:19.000Z
2022-02-09T14:51:25.000Z
tests/core_test.py
mediaonegroup/WPWatcher
a8289af766e604e1c8f7d538513bf69dd74bfb50
[ "Apache-2.0" ]
38
2020-04-09T14:59:36.000Z
2021-04-25T20:10:19.000Z
tests/core_test.py
mediaonegroup/WPWatcher
a8289af766e604e1c8f7d538513bf69dd74bfb50
[ "Apache-2.0" ]
11
2020-04-17T01:07:19.000Z
2021-11-12T00:08:34.000Z
import unittest import shlex from datetime import timedelta from . import DEFAULT_CONFIG from wpwatcher.core import WPWatcher from wpwatcher.config import Config from wpwatcher.scan import Scanner from wpwatcher.email import EmailSender from wpwatcher.wpscan import WPScanWrapper from wpwatcher.daemon import Daemon from wpwatcher.utils import timeout class T(unittest.TestCase): def test_interrupt(self): wpwatcher=WPWatcher(Config.fromstring(DEFAULT_CONFIG)) with self.assertRaises(SystemExit): wpwatcher.interrupt() def test_init_wpwatcher(self): # Init deafult watcher wpwatcher=WPWatcher(Config.fromstring(DEFAULT_CONFIG)) self.assertEqual(type(wpwatcher.scanner), Scanner, "Scanner doesn't seem to have been initialized") self.assertEqual(type(wpwatcher.scanner.mail), EmailSender, "EmailSender doesn't seem to have been initialized") self.assertEqual(type(wpwatcher.scanner.wpscan), WPScanWrapper, "WPScanWrapper doesn't seem to have been initialized") self.assertEqual(shlex.split(Config.fromstring(DEFAULT_CONFIG)['wpscan_path']), wpwatcher.scanner.wpscan._wpscan_path, "WPScan path seems to be wrong") def test_asynch_exec(self): # test max number of threads respected pass def test_daemon(self): # test daemon_loop_sleep and daemon mode conf = Config.fromstring(DEFAULT_CONFIG) conf['asynch_workers']+=1 daemon = Daemon(conf) daemon.loop(ttl=timedelta(seconds=5)) self.assertTrue(not any([r.status() != 'ERROR' for r in daemon.wpwatcher.new_reports])) self.assertGreater(len(daemon.wpwatcher.new_reports), 1) def test_fail_fast(self): pass
34.98
159
0.728416
215
1,749
5.827907
0.367442
0.072626
0.073424
0.092578
0.245012
0.217079
0.142059
0.142059
0.142059
0.105347
0
0.00211
0.186964
1,749
49
160
35.693878
0.879044
0.054889
0
0.121212
0
0
0.123711
0
0
0
0
0
0.212121
1
0.151515
false
0.060606
0.333333
0
0.515152
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
3
0dfb112428109c6407e7d42c422c841df79d76a9
147
py
Python
tests/regression_tests/eigenvalue_genperbatch/test.py
janmalec/openmc
4a4ac4c351d41fe153ca3341820cc507e484ce50
[ "MIT" ]
1
2019-04-10T12:41:16.000Z
2019-04-10T12:41:16.000Z
tests/regression_tests/eigenvalue_genperbatch/test.py
janmalec/openmc
4a4ac4c351d41fe153ca3341820cc507e484ce50
[ "MIT" ]
5
2015-03-11T02:28:25.000Z
2018-11-07T14:10:28.000Z
tests/regression_tests/eigenvalue_genperbatch/test.py
janmalec/openmc
4a4ac4c351d41fe153ca3341820cc507e484ce50
[ "MIT" ]
null
null
null
from tests.testing_harness import TestHarness def test_eigenvalue_genperbatch(): harness = TestHarness('statepoint.7.h5') harness.main()
21
45
0.768707
17
147
6.470588
0.823529
0
0
0
0
0
0
0
0
0
0
0.015748
0.136054
147
6
46
24.5
0.850394
0
0
0
0
0
0.102041
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
3
df068ae03885e75d7535202c761e8b4454425e66
175
py
Python
test_pydub.py
gilmore-lab/pliers-play
16fe85df6b188e6b40c1761f91abd607d0b97f45
[ "MIT" ]
null
null
null
test_pydub.py
gilmore-lab/pliers-play
16fe85df6b188e6b40c1761f91abd607d0b97f45
[ "MIT" ]
null
null
null
test_pydub.py
gilmore-lab/pliers-play
16fe85df6b188e6b40c1761f91abd607d0b97f45
[ "MIT" ]
null
null
null
# test pydub conversion from pydub import AudioSegment snd = 'snd/peep-I-hap-tlk.mp3' song = AudioSegment.from_mp3(snd) song.export("snd/peep-I-hap-tlk.wav", format="wav")
19.444444
51
0.737143
29
175
4.413793
0.551724
0.109375
0.125
0.171875
0.21875
0
0
0
0
0
0
0.012821
0.108571
175
9
51
19.444444
0.807692
0.12
0
0
0
0
0.30719
0.287582
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
df1a1f7fa936c5ba8a8a1f770f36528036750ac5
720
py
Python
collective_blog/settings/__init__.py
AmatanHead/collective-blog
9bf040faac43feae08b33900e30bf7d17b817ae4
[ "MIT" ]
null
null
null
collective_blog/settings/__init__.py
AmatanHead/collective-blog
9bf040faac43feae08b33900e30bf7d17b817ae4
[ "MIT" ]
4
2016-09-22T06:37:20.000Z
2016-09-22T16:49:48.000Z
collective_blog/settings/__init__.py
AmatanHead/collective-blog
9bf040faac43feae08b33900e30bf7d17b817ae4
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """Django settings for collective_blog project Generated by 'django-admin startproject' using Django 1.9.1. For more information on this file, see https://docs.djangoproject.com/en/1.9/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.9/ref/settings/ """ import os if 'TRAVIS' in os.environ: from .travis_settings import * elif 'HEROKU' in os.environ: from .prod_settings import * elif 'DEV' in os.environ: from .dev_settings import * else: raise RuntimeError('you should specify running environment ' '(use `export DEV=1` for debug, ' '`export HEROKU=1` for production)')
27.692308
64
0.680556
102
720
4.764706
0.578431
0.115226
0.067901
0.092593
0.131687
0.131687
0.131687
0.131687
0
0
0
0.017513
0.206944
720
25
65
28.8
0.833625
0.455556
0
0
1
0
0.307292
0
0
0
0
0
0
1
0
true
0
0.363636
0
0.363636
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
3
df6275c47f542f9b3995355d55489046f0a7a80b
4,858
py
Python
schablonesk/ast.py
ThomasBollmeier/schablonesk
c2978601dd02b7f2c2a182ff0d6851d4eaed2d49
[ "MIT" ]
null
null
null
schablonesk/ast.py
ThomasBollmeier/schablonesk
c2978601dd02b7f2c2a182ff0d6851d4eaed2d49
[ "MIT" ]
null
null
null
schablonesk/ast.py
ThomasBollmeier/schablonesk
c2978601dd02b7f2c2a182ff0d6851d4eaed2d49
[ "MIT" ]
null
null
null
class Template(object): def __init__(self, usages, snippets, blocks): self.usages = usages self.snippets = snippets self.blocks = blocks def accept(self, visitor): visitor.visit_template(self) class Text(object): def __init__(self, text_token): self._token = text_token def get_token(self): return self._token token = property(get_token) def _get_content(self): return self._token.lexeme content = property(_get_content) def accept(self, visitor): visitor.visit_text(self) class CondBlock(object): def __init__(self, branches): self.branches = branches def accept(self, visitor): visitor.visit_cond(self) class ForBlock(object): def __init__(self, item_ident, list_expr, blocks, filter_cond=None): self.item_ident = item_ident self.list_expr = list_expr self.blocks = blocks self.filter_cond = filter_cond def accept(self, visitor): visitor.visit_for(self) class Snippet(object): def __init__(self, snippet_name, params, blocks): self.name = snippet_name self.params = params self.blocks = blocks def accept(self, visitor): visitor.visit_snippet(self) class SnippetCall(object): def __init__(self, snippet_name, args): self.name = snippet_name self.args = args def accept(self, visitor): visitor.visit_snippet_call(self) class Call(object): def __init__(self, callee, args): self.callee = callee self.args = args def accept(self, visitor): visitor.visit_call(self) class Use(object): def __init__(self, template_name, names): self.template_name = template_name self.names = names # names and aliases def accept(self, visitor): visitor.visit_use(self) class SingleToken(object): def __init__(self, token): self.token = token def accept(self, visitor): visitor.visit_expr(self) class Identifier(SingleToken): def __init__(self, identifier_token): SingleToken.__init__(self, identifier_token) def get_name(self): return self.token.lexeme class SimpleValue(SingleToken): def __init__(self, token): SingleToken.__init__(self, token) def get_value(self): raise Exception("Not implemented") class Bool(SimpleValue): def __init__(self, bool_token): SimpleValue.__init__(self, bool_token) def get_value(self): s = self.token.lexeme return s == "true" or s == "else" class String(SimpleValue): def __init__(self, str_token): SimpleValue.__init__(self, str_token) def get_value(self): return self.token.lexeme def get_string(self): return self.token.lexeme[1:-1].replace("\\'", "'") class Int(SimpleValue): def __init__(self, int_token): SimpleValue.__init__(self, int_token) def get_value(self): return int(self.token.lexeme) class Real(SimpleValue): def __init__(self, real_token): SimpleValue.__init__(self, real_token) def get_value(self): return float(self.token.lexeme) class QualifiedName(object): def __init__(self, identifier_tokens): self.identifier_tokens = identifier_tokens def accept(self, visitor): visitor.visit_expr(self) def get_name(self): return str(self) def __str__(self): return ".".join(list(map(lambda ident: ident.lexeme, self.identifier_tokens))) class LogicalBinExpr(object): def __init__(self, op, left, right): self.op = op self.left = left self.right = right def accept(self, visitor): visitor.visit_logical_bin(self) class LogicalRelation(object): def __init__(self, op, left, right): self.op = op self.left = left self.right = right def accept(self, visitor): visitor.visit_logical_rel(self) class Negation(object): def __init__(self, expr): self.expr = expr def accept(self, visitor): visitor.visit_negation(self) class BaseVisitor(object): def __init__(self): pass def visit_template(self, templ): pass def visit_text(self, text): pass def visit_cond(self, cond_block): pass def visit_for(self, for_block): pass def visit_snippet(self, snippet): pass def visit_snippet_call(self, snippet_call): pass def visit_use(self, use): pass def visit_call(self, func_call): pass def visit_expr(self, expr): pass def visit_logical_bin(self, logical_bin): pass def visit_logical_rel(self, logical_rel): pass def visit_negation(self, negation): pass
19.747967
86
0.640387
593
4,858
4.919056
0.134907
0.071306
0.07542
0.081591
0.33459
0.265341
0.164553
0.159753
0.132328
0.069249
0
0.000558
0.261836
4,858
245
87
19.828571
0.812883
0.003499
0
0.362416
0
0
0.005786
0
0
0
0
0
0
1
0.375839
false
0.087248
0
0.060403
0.590604
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
3
df6928e05e07bfd264f95dcd6e85783fa9aa39ad
828
py
Python
setup.py
nikitanovosibirsk/switch_case
67a106c9db169008de03db381332b55cfee7991e
[ "MIT" ]
6
2019-09-13T17:19:49.000Z
2021-01-25T21:49:39.000Z
setup.py
nikitanovosibirsk/switch_case
67a106c9db169008de03db381332b55cfee7991e
[ "MIT" ]
2
2020-04-13T08:00:09.000Z
2021-10-16T13:38:14.000Z
setup.py
nikitanovosibirsk/switch_case
67a106c9db169008de03db381332b55cfee7991e
[ "MIT" ]
1
2021-01-25T21:49:42.000Z
2021-01-25T21:49:42.000Z
from setuptools import find_packages, setup setup( name="switch_case", version="1.5", author="Nikita Tsvetkov", author_email="nikitanovosibirsk@yandex.com", python_requires=">=3.6", description="Switch-case statement for Python", long_description=open("README.md").read(), long_description_content_type="text/markdown", url="https://github.com/nikitanovosibirsk/switch_case", license="MIT", packages=find_packages(), classifiers=[ "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Intended Audience :: Developers", ], )
33.12
59
0.641304
89
828
5.853933
0.58427
0.182342
0.239923
0.24952
0
0
0
0
0
0
0
0.022936
0.210145
828
24
60
34.5
0.7737
0
0
0
0
0
0.509662
0.033816
0
0
0
0
0
1
0
true
0
0.043478
0
0.043478
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
df72bbc3bb7e498daff48dd0c5415775b33b99ae
132
py
Python
raspberryturk/core/vision/constants.py
Dzhuks/raspberryturk
bb7fa25d70f8c1291997332d12fe06d19d957a4e
[ "MIT" ]
null
null
null
raspberryturk/core/vision/constants.py
Dzhuks/raspberryturk
bb7fa25d70f8c1291997332d12fe06d19d957a4e
[ "MIT" ]
null
null
null
raspberryturk/core/vision/constants.py
Dzhuks/raspberryturk
bb7fa25d70f8c1291997332d12fe06d19d957a4e
[ "MIT" ]
null
null
null
ROWS = COLUMNS = 8 # количество столбцов и строк SQUARE_SIZE = 60 # размер клетки BOARD_SIZE = SQUARE_SIZE * ROWS # размер доски
33
49
0.734848
19
132
4.947368
0.736842
0.212766
0
0
0
0
0
0
0
0
0
0.028571
0.204545
132
3
50
44
0.866667
0.409091
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
df757e4687daf0050e15461032bcc9cf82572234
18,123
py
Python
BWSTableEditors.py
MinN-11/berwick-saga-editor-randomizer
afa641b99d0f95c3258c7e40878d50b9f5f24f9c
[ "MIT" ]
3
2021-02-14T14:51:35.000Z
2021-04-03T21:31:00.000Z
BWSTableEditors.py
MinN-11/berwick-saga-editor-randomizer
afa641b99d0f95c3258c7e40878d50b9f5f24f9c
[ "MIT" ]
null
null
null
BWSTableEditors.py
MinN-11/berwick-saga-editor-randomizer
afa641b99d0f95c3258c7e40878d50b9f5f24f9c
[ "MIT" ]
null
null
null
from BWSDefinitions import * LanguageUsed = 1 # 0 is japanese, 1 is translation patch, globals are bad I know def SetLanguageUsed(v): global LanguageUsed LanguageUsed = v class UnknownAttributeError(Exception): pass class UnknownCommandError(Exception): pass class UnknownItemError(Exception): pass def to_five_bit_signed(value): if value >= 0: return value & 0xF else: return (value + 0x10) & 0x1F def to_six_bit_signed(value): if value >= 0: return value & 0x1F else: return (value + 0x30) & 0x3F def to_eight_bit_signed(value): if value >= 0: return value & 0x7F else: return (value + 0x80) & 0xFF def write_x_bits(buffer, offset, x_bits, bit_offset, value): value &= ((1 << x_bits) - 1) buffer[offset] = (buffer[offset] & (0xFF - (((1 << x_bits) - 1) << bit_offset) & 0xFF)) + ((value << bit_offset) & 0xFF) if bit_offset + x_bits > 8: buffer[offset + 1] = (buffer[offset + 1] & (0xFF - (((1 << x_bits) - 1) >> (8 - bit_offset)))) + ( value >> (8 - bit_offset)) def read_x_bits(buffer, offset, x_bits, bit_offset): value = ((0xFF >> (8 - x_bits if 8 - x_bits > 0 else 0) << bit_offset) & 0xFF & buffer[offset]) >> bit_offset if bit_offset + x_bits > 8: value += (buffer[offset + 1] & ((1 << bit_offset + x_bits - 8) - 1)) << (8 - bit_offset) return value def modify_x_bits(buffer, offset, x_bits, bit_offset, value, modifier, ma=65535, mi=0): if modifier == 1: value = read_x_bits(buffer, offset, x_bits, bit_offset) + value elif modifier == -1: value = read_x_bits(buffer, offset, x_bits, bit_offset) - value elif modifier == 2: value = int(read_x_bits(buffer, offset, x_bits, bit_offset) * value) value = 0 if value < 0 else (1 << x_bits) - 1 if value >= 1 << x_bits else value if modifier != 0: # if we just set the value, no checking here, for extreme cases value = max(mi, min(ma, value)) write_x_bits(buffer, offset, x_bits, bit_offset, value) def set_base(buffer, unit, stat, value): value, modifier = value offsets = UnitOffsets[LanguageUsed][0] + UnitToOffset[unit], UnitOffsets[LanguageUsed][1] + UnitToOffset[unit] for offset in offsets: if stat == "level" or stat == "lv" or stat == "rank": modify_x_bits(buffer, offset + 20, 6, 0, value, modifier) elif stat == "hp": modify_x_bits(buffer, offset + 22, 7, 4, value, modifier) elif stat == "strength" or stat == "str": modify_x_bits(buffer, offset + 23, 5, 3, to_five_bit_signed(value), modifier) elif stat == "speed" or stat == "spe" or stat == "spd": modify_x_bits(buffer, offset + 24, 5, 0, to_five_bit_signed(value), modifier) elif stat == "luck" or stat == "luk": modify_x_bits(buffer, offset + 24, 5, 5, to_five_bit_signed(value), modifier) elif stat == "defense" or stat == "def": modify_x_bits(buffer, offset + 25, 5, 2, to_five_bit_signed(value), modifier) elif stat == "mind" or stat == "magic" or stat == "mag": modify_x_bits(buffer, offset + 25, 5, 7, to_five_bit_signed(value), modifier) elif stat == "knife": modify_x_bits(buffer, offset + 36, 10, 0, value * 10, modifier) elif stat == "sword": modify_x_bits(buffer, offset + 37, 10, 2, value * 10, modifier) elif stat == "spear" or stat == "lance": modify_x_bits(buffer, offset + 38, 10, 4, value * 10, modifier) elif stat == "axe": modify_x_bits(buffer, offset + 40, 10, 0, value * 10, modifier) elif stat == "bow": modify_x_bits(buffer, offset + 41, 10, 2, value * 10, modifier) elif stat == "crossbow": modify_x_bits(buffer, offset + 42, 10, 4, value * 10, modifier) elif stat == "fire": modify_x_bits(buffer, offset + 44, 10, 0, value * 10, modifier) elif stat == "thunder": modify_x_bits(buffer, offset + 45, 10, 2, value * 10, modifier) elif stat == "wind": modify_x_bits(buffer, offset + 46, 10, 4, value * 10, modifier) elif stat == "holy" or stat == "light": modify_x_bits(buffer, offset + 48, 10, 0, value * 10, modifier) elif stat == "dark": modify_x_bits(buffer, offset + 49, 10, 2, value * 10, modifier) elif stat == "sshield": modify_x_bits(buffer, offset + 50, 10, 4, value * 10, modifier) elif stat == "mshield": modify_x_bits(buffer, offset + 52, 10, 0, value * 10, modifier) elif stat == "lshield": modify_x_bits(buffer, offset + 53, 10, 2, value * 10, modifier) elif stat == "offhand": modify_x_bits(buffer, offset + 26, 4, 4, value + 1, modifier) # 0 is unequipped elif stat == "mainhand": modify_x_bits(buffer, offset + 27, 4, 0, value + 1, modifier) # 0 is unequipped else: raise UnknownAttributeError def set_growth(buffer, unit, stat, value): value, modifier = value offsets = GrowthOffsets[LanguageUsed][0] + (UnitToIndex[unit] - 1) * 32, GrowthOffsets[LanguageUsed][1] + (UnitToIndex[unit] - 1) * 32 if stat == "bracket" and not value.isdigit(): value = {"no": 1, "loose": 2, "tight": 3}[value] for offset in offsets: if stat == "hp": modify_x_bits(buffer, offset, 7, 0, value, modifier) elif stat == "strength" or stat == "str": modify_x_bits(buffer, offset, 7, 7, value, modifier) elif stat == "mind" or stat == "magic" or stat == "mag": modify_x_bits(buffer, offset + 5, 7, 0, value, modifier) elif stat == "speed" or stat == "spe" or stat == "spd": modify_x_bits(buffer, offset + 4, 7, 2, value, modifier) elif stat == "defense" or stat == "def": modify_x_bits(buffer, offset + 1, 7, 6, value, modifier) elif stat == "knife": modify_x_bits(buffer, offset + 8, 4, 0, value // 10, modifier) elif stat == "sword": modify_x_bits(buffer, offset + 8, 4, 4, value // 10, modifier) elif stat == "spear" or stat == "lance": modify_x_bits(buffer, offset + 9, 4, 0, value // 10, modifier) elif stat == "axe": modify_x_bits(buffer, offset + 9, 4, 4, value // 10, modifier) elif stat == "bow": modify_x_bits(buffer, offset + 10, 4, 0, value // 10, modifier) elif stat == "crossbow": modify_x_bits(buffer, offset + 10, 4, 4, value // 10, modifier) elif stat == "fire": modify_x_bits(buffer, offset + 11, 4, 0, value // 10, modifier) elif stat == "thunder": modify_x_bits(buffer, offset + 11, 4, 4, value // 10, modifier) elif stat == "wind": modify_x_bits(buffer, offset + 12, 4, 0, value // 10, modifier) elif stat == "holy" or stat == "light": modify_x_bits(buffer, offset + 12, 4, 4, value // 10, modifier) elif stat == "dark": modify_x_bits(buffer, offset + 13, 4, 0, value // 10, modifier) elif stat == "shield" or stat == "sshield" or stat == "mshield" or stat == "lshield": modify_x_bits(buffer, offset + 13, 4, 4, value // 10, modifier) elif stat == "bracket": write_x_bits(buffer, offset + 2, 2, 5, value) else: raise UnknownAttributeError def set_skill(buffer, unit, skll_name, value): index = Skills.index(skll_name) offset = UnitOffsets[LanguageUsed][0] + UnitToOffset[unit] write_x_bits(buffer, offset + 56 + index // 8, 1, index & 0x7, value) offset = UnitOffsets[LanguageUsed][1] + UnitToOffset[unit] write_x_bits(buffer, offset + 56 + index // 8, 1, index & 0x7, value) def set_item(buffer, unit, slot, item, durability, is_locked, is_dropped): slot, _ = slot durability, _ = durability offsets = UnitOffsets[LanguageUsed][0] + UnitToOffset[unit] + 0xBC + slot * 8, UnitOffsets[LanguageUsed][1] + UnitToOffset[unit] + 0xBC + slot * 8 for offset in offsets: write_x_bits(buffer, offset, 16, 0, item) write_x_bits(buffer, offset + 2, 8, 4, durability) write_x_bits(buffer, offset + 4, 1, 2, int(is_locked)) write_x_bits(buffer, offset + 3, 1, 7, int(is_dropped)) def set_bag_item(buffer, unit, slot, item, durability, is_locked, is_dropped): pass # TODO: implement this def set_learned(buffer, unit, slot, skill, level): slot, _ = slot level, modifier = level if isinstance(skill, str): skill = Skills2.index(skill) offset = GrowthOffsets[LanguageUsed][0] + (UnitToIndex[unit] - 1) * 32 modify_x_bits(buffer, offset + 20 + slot, 8, 0, level, modifier) write_x_bits(buffer, offset + 26 + slot, 8, 0, skill) offset = GrowthOffsets[LanguageUsed][1] + (UnitToIndex[unit] - 1) * 32 modify_x_bits(buffer, offset + 20 + slot, 8, 0, level, modifier) write_x_bits(buffer, offset + 26 + slot, 8, 0, skill) def set_support(buffer, unit, slot, source, amount): pass def set_item_stat(buffer, item, stat, value): value, modifier = value offsets = ItemOffsets[LanguageUsed][0] + (ItemToIndex[item] - 1) * 56, ItemOffsets[LanguageUsed][1] + (ItemToIndex[item] - 1) * 56 for offset in offsets: if stat == "might": modify_x_bits(buffer, offset, 6, 5, value, modifier) elif stat == "hex": modify_x_bits(buffer, offset + 1, 4, 3, value, modifier) elif stat == "accuracy": modify_x_bits(buffer, offset + 1, 7, 7, value, modifier) elif stat == "weight": modify_x_bits(buffer, offset + 2, 5, 6, value, modifier) elif stat == "max_range" or stat == "max-range": modify_x_bits(buffer, offset + 3, 5, 3, value, modifier) elif stat == "min_range" or stat == "min-range": modify_x_bits(buffer, offset + 4, 4, 0, value, modifier) elif stat == "crit" or stat == "critical": modify_x_bits(buffer, offset + 5, 7, 0, value, modifier) elif stat == "uses": modify_x_bits(buffer, offset + 5, 7, 7, value, modifier, mi=1) elif stat == "level": modify_x_bits(buffer, offset + 6, 6, 6, value, modifier, mi=1, ma=50) elif stat == "price" or stat == "cost": modify_x_bits(buffer, offset + 8, 16, 0, value, modifier, mi=100) # for the randomizer elif stat == "defense" or stat == "def": modify_x_bits(buffer, offset + 12, 6, 0, to_six_bit_signed(value), modifier) elif stat == "speed" or stat == "spe" or stat == "spd": modify_x_bits(buffer, offset + 13, 5, 3, to_five_bit_signed(value), modifier) elif stat == "avoid" or stat == "avo": modify_x_bits(buffer, offset + 14, 8, 0, to_eight_bit_signed(value), modifier) elif stat == "hit": modify_x_bits(buffer, offset + 15, 8, 0, to_eight_bit_signed(value), modifier) elif stat == "magic" or stat == "mind": modify_x_bits(buffer, offset + 16, 5, 0, to_five_bit_signed(value), modifier) elif stat == "strength": modify_x_bits(buffer, offset + 16, 5, 5, to_five_bit_signed(value), modifier) elif stat == "rounds": modify_x_bits(buffer, offset + 17, 4, 2, value, modifier) elif stat == "fire_res": modify_x_bits(buffer, offset + 17, 6, 6, to_six_bit_signed(value), modifier) elif stat == "thunder_res": modify_x_bits(buffer, offset + 18, 6, 4, to_six_bit_signed(value), modifier) elif stat == "wind_res": modify_x_bits(buffer, offset + 19, 6, 2, to_six_bit_signed(value), modifier) elif stat == "dark_res": modify_x_bits(buffer, offset + 20, 6, 0, to_six_bit_signed(value), modifier) elif stat == "holy_res": modify_x_bits(buffer, offset + 20, 6, 6, to_six_bit_signed(value), modifier) elif stat == "durability": if isinstance(value, str): value = Durability.index(value) modify_x_bits(buffer, offset + 21, 3, 4, value, -modifier, ma=6) elif stat == "crit_avoid_penalty": modify_x_bits(buffer, offset + 21, 8, 7, to_eight_bit_signed(value), modifier) else: raise UnknownAttributeError def set_item_effect(buffer, item, effect, value): eff_id = ItemEffects.index(effect) offset = ItemOffsets[LanguageUsed][0] + (ItemToIndex[item] - 1) * 56 write_x_bits(buffer, offset + 28 + eff_id//8, 1, eff_id & 0x7, value) offset = ItemOffsets[LanguageUsed][1] + (ItemToIndex[item] - 1) * 56 write_x_bits(buffer, offset + 28 + eff_id // 8, 1, eff_id & 0x7, value) def set_item_effect_value(buffer, item, effect, value): value, modifier = value offsets = ItemOffsets[LanguageUsed][0] + (ItemToIndex[item] - 1) * 56, ItemOffsets[LanguageUsed][1] + (ItemToIndex[item] - 1) * 56 eff_id = ItemEffectRates.index(effect) for offset in offsets: if effect == 0: write_x_bits(buffer, offset + 26, 7, 0, 0) write_x_bits(buffer, offset + 27, 8, 0, 0) return modify_x_bits(buffer, offset + 26, 7, 0, value, modifier) write_x_bits(buffer, offset + 27, 8, 0, eff_id + 100) def set_class_base(buffer, cls, stat, value): value, modifier = value offsets = ClassOffsets[LanguageUsed][0] + (ClassToIndex[cls] - 1) * 100, ClassOffsets[LanguageUsed][1] + (ClassToIndex[cls] - 1) * 100 for offset in offsets: if stat == "hp": modify_x_bits(buffer, offset, 5, 0, value, modifier) elif stat == "strength" or stat == "str": modify_x_bits(buffer, offset, 5, 5, value, modifier) elif stat == "speed" or stat == "spe": modify_x_bits(buffer, offset + 1, 5, 2, value, modifier) elif stat == "defense" or stat == "def": modify_x_bits(buffer, offset + 2, 5, 0, value, modifier) elif stat == "magic" or stat == "mag" or stat == "mind": modify_x_bits(buffer, offset + 2, 5, 5, value, modifier) elif stat == "move" or stat == "mov": modify_x_bits(buffer, offset + 8, 4, 0, value, modifier) elif stat == "experience" or stat == "exp": modify_x_bits(buffer, offset + 4, 7, 0, value, modifier) else: raise UnknownAttributeError def set_class_growth(buffer, cls, stat, value): value, modifier = value offsets = ClassOffsets[LanguageUsed][0] + (ClassToIndex[cls] - 1) * 100, ClassOffsets[LanguageUsed][1] + (ClassToIndex[cls] - 1) * 100 for offset in offsets: if stat == "hp": modify_x_bits(buffer, offset + 24, 7, 0, value, modifier) elif stat == "strength" or stat == "str": modify_x_bits(buffer, offset + 24, 7, 7, value, modifier) elif stat == "speed" or stat == "spe": modify_x_bits(buffer, offset + 25, 7, 6, value, modifier) elif stat == "defense" or stat == "def": modify_x_bits(buffer, offset + 26, 7, 5, value, modifier) elif stat == "magic" or stat == "mag" or stat == "mind": modify_x_bits(buffer, offset + 28, 7, 0, value, modifier) def set_class_caps(buffer, cls, stat, value): value, modifier = value offsets = ClassOffsets[LanguageUsed][0] + (ClassToIndex[cls] - 1) * 100, ClassOffsets[LanguageUsed][1] + (ClassToIndex[cls] - 1) * 100 for offset in offsets: if stat == "knife": modify_x_bits(buffer, offset + 28, 6, 7, value, modifier) elif stat == "sword": modify_x_bits(buffer, offset + 29, 6, 5, value, modifier) elif stat == "spear" or stat == "lance": modify_x_bits(buffer, offset + 30, 6, 3, value, modifier) elif stat == "axe": modify_x_bits(buffer, offset + 31, 6, 1, value, modifier) elif stat == "bow": modify_x_bits(buffer, offset + 32, 6, 0, value, modifier) elif stat == "crossbow": modify_x_bits(buffer, offset + 32, 6, 6, value, modifier) elif stat == "fire": modify_x_bits(buffer, offset + 33, 6, 4, value, modifier) elif stat == "thunder": modify_x_bits(buffer, offset + 34, 6, 2, value, modifier) elif stat == "wind": modify_x_bits(buffer, offset + 35, 6, 0, value, modifier) elif stat == "holy" or stat == "light": modify_x_bits(buffer, offset + 36, 6, 0, value, modifier) elif stat == "dark": modify_x_bits(buffer, offset + 36, 6, 6, value, modifier) elif stat == "sshield": modify_x_bits(buffer, offset + 37, 6, 4, value, modifier) elif stat == "mshield": modify_x_bits(buffer, offset + 38, 6, 2, value, modifier) elif stat == "lshield": modify_x_bits(buffer, offset + 39, 6, 0, value, modifier) else: raise UnknownAttributeError def set_class_attribute(buffer, cls, attribute, value): offsets = ClassOffsets[LanguageUsed][0] + (ClassToIndex[cls] - 1) * 100, ClassOffsets[LanguageUsed][1] + (ClassToIndex[cls] - 1) * 100 for offset in offsets: if attribute == "mount": v = MountStatus.index(value) write_x_bits(buffer, offset + 4, 2, 4, v) elif attribute == "type": v = UnitTypes.index(value) write_x_bits(buffer, offset + 4, 8, 7, 1 << v) elif attribute == "movement": v = MovementTypes.index(value) write_x_bits(buffer, offset + 6, 4, 1, v) else: raise UnknownAttributeError def set_class_skill(buffer, cls, stat, value): wid = Skills.index(stat) offset = ClassOffsets[LanguageUsed][0] + (ClassToIndex[cls] - 1) * 100 write_x_bits(buffer, offset + 12 + wid//8, 1, wid & 0x7, value) offset = ClassOffsets[LanguageUsed][1] + (ClassToIndex[cls] - 1) * 100 write_x_bits(buffer, offset + 12 + wid // 8, 1, wid & 0x7, value)
46.588689
150
0.594272
2,448
18,123
4.242647
0.095588
0.065473
0.126035
0.194781
0.803197
0.749085
0.659157
0.5803
0.530137
0.452629
0
0.05143
0.272582
18,123
388
151
46.708763
0.736403
0.01076
0
0.358209
0
0
0.04146
0
0
0
0.004743
0.002577
0
1
0.065672
false
0.014925
0.002985
0
0.101493
0
0
0
0
null
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
df8ac3932256a2e1c64b765c682a70cbf4a75951
1,649
py
Python
everest/cascade/test.py
rsbyrne/everest
1ec06301cdeb7c2b7d85daf6075d996c5529247e
[ "MIT" ]
2
2020-12-17T02:27:28.000Z
2020-12-17T23:50:13.000Z
everest/cascade/test.py
rsbyrne/everest
1ec06301cdeb7c2b7d85daf6075d996c5529247e
[ "MIT" ]
1
2020-12-07T10:14:45.000Z
2020-12-07T10:14:45.000Z
everest/cascade/test.py
rsbyrne/everest
1ec06301cdeb7c2b7d85daf6075d996c5529247e
[ "MIT" ]
1
2020-10-22T11:16:50.000Z
2020-10-22T11:16:50.000Z
############################################################################### '''''' ############################################################################### # import unittest def testfunc(a, b, c, d=4, # another comment /, e: int = 5, # stuff f=6, g=7, h=8, *args, # morestuff i, j=10, # k k0=11, k1=110, k2=1100, l=12, # noqa: E741 m=13, # bonusstuff n=14, # morebonusstuff o=15, # ignore fee='fee', fie='fie', foe='foe', fum='fum', # subignore boo='boo', p=16, **kwargs, ): print( a, b, c, d, e, f, g, h, args, i, j, k0, k1, k2, l, m, n, o, p, kwargs ) # class CascadeTest(unittest.TestCase): # def test(self): # inputs = Inputs(testfunc) # self.assertEqual(inputs.stuff.f, 6) # inputs.stuff.f = 'myval' # self.assertEqual(inputs.stuff.f, 'myval') # self.assertEqual(inputs.f, 'myval') # inputs['k'] = 'myval' # self.assertEqual(inputs.k, 'myval') # self.assertEqual(inputs.morestuff.substuff.k, 'myval') # self.assertTrue(not hasattr(inputs, 'foo')) # self.assertTrue(not hasattr(inputs, '_c')) # # if __name__ == '__main__': # unittest.main() ############################################################################### ###############################################################################
26.596774
79
0.346877
144
1,649
3.909722
0.5
0.133215
0.186501
0.184725
0.385435
0.230906
0.124334
0
0
0
0
0.033119
0.340813
1,649
61
80
27.032787
0.484821
0.395998
0
0
0
0
0.023148
0
0
0
0
0
0
1
0.037037
false
0
0
0
0.037037
0.037037
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
10d164dfb89f97ef60e2658dd260f386d58d79e4
146
py
Python
app/services/authentication/oauth/google/__init__.py
maxzhenzhera/my_vocab_backend
2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1
[ "MIT" ]
null
null
null
app/services/authentication/oauth/google/__init__.py
maxzhenzhera/my_vocab_backend
2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1
[ "MIT" ]
null
null
null
app/services/authentication/oauth/google/__init__.py
maxzhenzhera/my_vocab_backend
2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1
[ "MIT" ]
null
null
null
from .authorizer import GoogleAuthorizer from .service import GoogleOAuthService __all__ = [ 'GoogleAuthorizer', 'GoogleOAuthService' ]
16.222222
40
0.767123
11
146
9.818182
0.636364
0
0
0
0
0
0
0
0
0
0
0
0.164384
146
8
41
18.25
0.885246
0
0
0
0
0
0.232877
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
10d361aadaec2d779534c8108ff4c3c5ea8b4d03
214
py
Python
netmiko/connect.py
sambyers/netauto_learning
22c1049bf86e188f774f1c977823abea2bb3abfe
[ "MIT" ]
null
null
null
netmiko/connect.py
sambyers/netauto_learning
22c1049bf86e188f774f1c977823abea2bb3abfe
[ "MIT" ]
null
null
null
netmiko/connect.py
sambyers/netauto_learning
22c1049bf86e188f774f1c977823abea2bb3abfe
[ "MIT" ]
null
null
null
from netmiko import ConnectHandler from config import hosts for host in hosts: device = ConnectHandler(**host) print(f'Connected to {host["host"]} successful: {device.is_alive()}') device.disconnect()
26.75
73
0.724299
27
214
5.703704
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.158879
214
8
74
26.75
0.855556
0
0
0
0
0
0.274419
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0.166667
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
10f8f5e28a258a56f84a16523c392739bcfc6f5d
1,539
py
Python
10-19/15. record_calls/record_calls.py
dcragusa/PythonMorsels
5f75b51a68769036e4004e9ccdada6b220124ab6
[ "MIT" ]
1
2021-11-30T05:03:24.000Z
2021-11-30T05:03:24.000Z
10-19/15. record_calls/record_calls.py
dcragusa/PythonMorsels
5f75b51a68769036e4004e9ccdada6b220124ab6
[ "MIT" ]
null
null
null
10-19/15. record_calls/record_calls.py
dcragusa/PythonMorsels
5f75b51a68769036e4004e9ccdada6b220124ab6
[ "MIT" ]
2
2021-04-18T05:26:43.000Z
2021-11-28T18:46:43.000Z
import functools # def record_calls(func): # # def wrapped(*args, **kwargs): # wrapped.call_count += 1 # return func(*args, **kwargs) # # wrapped.call_count = 0 # return wrapped # def record_calls(func): # # @functools.wraps(func) # def wrapped(*args, **kwargs): # wrapped.call_count += 1 # return func(*args, **kwargs) # # wrapped.call_count = 0 # return wrapped # class Call: # def __init__(self, args, kwargs): # self.args, self.kwargs = args, kwargs # # # def record_calls(func): # # @functools.wraps(func) # def wrapped(*args, **kwargs): # wrapped.call_count += 1 # wrapped.calls.append(Call(args, kwargs)) # return func(*args, **kwargs) # # wrapped.call_count = 0 # wrapped.calls = [] # return wrapped NO_RETURN = 'NULL' class Call: def __init__(self, args, kwargs, return_value, exception): self.args, self.kwargs, self.return_value, self.exception = args, kwargs, return_value, exception def record_calls(func): @functools.wraps(func) def wrapped(*args, **kwargs): wrapped.call_count += 1 try: return_value = func(*args, **kwargs) wrapped.calls.append(Call(args, kwargs, return_value, None)) except Exception as e: return_value = NO_RETURN wrapped.calls.append(Call(args, kwargs, return_value, e)) raise return return_value wrapped.call_count = 0 wrapped.calls = [] return wrapped
24.046875
105
0.603639
181
1,539
4.966851
0.160221
0.166852
0.151279
0.163515
0.769744
0.714127
0.714127
0.611791
0.400445
0.400445
0
0.007092
0.267057
1,539
63
106
24.428571
0.789894
0.480832
0
0
0
0
0.005229
0
0
0
0
0
0
1
0.15
false
0
0.05
0
0.35
0
0
0
0
null
0
0
1
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
10fcedb4c8059a0af5ebf5ebf30d0d30653fb086
1,101
py
Python
am232x/exceptions.py
ktooi/pyam232x
92eec41d97ec1e5c4257444e90fe0acf728dd9bc
[ "MIT" ]
null
null
null
am232x/exceptions.py
ktooi/pyam232x
92eec41d97ec1e5c4257444e90fe0acf728dd9bc
[ "MIT" ]
1
2021-02-13T04:09:38.000Z
2021-02-13T04:09:38.000Z
am232x/exceptions.py
ktooi/pyam232x
92eec41d97ec1e5c4257444e90fe0acf728dd9bc
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- class AM232xError(Exception): """ AM232x とのデータ送受信において、何らかのエラーが発生したことを示す Exception. am232x モジュールが投げる例外の基底クラスとして利用する。 """ class ReceiveAM232xDataError(AM232xError): """ AM232x からデータを受信した際に、エラーが発生したことを示すエラーコードが含まれていたことを示す Exception. """ def __init__(self, error_code, chip_name="am232x"): self._chip_name = chip_name self._error_code = error_code def __str__(self): return ("{chip_name} : Received error code : 0x{error_code:x}" .format(chip_name=self._chip_name, error_code=self._error_code)) class AM232xCrcCheckError(AM232xError): """ AM232x から受信した CRC と、受信したデータから計算した CRC に相違があることを示す Exception. """ def __init__(self, recv_crc, calc_crc, chip_name="am232x"): self._chip_name = chip_name self._recv_crc = recv_crc self._calc_crc = calc_crc def __str__(self): return ("{chip_name} : CRC error : [receive : 0x{recv_crc:x}, calculate : 0x{calc_crc:x}]" .format(chip_name=self._chip_name, recv_crc=self._recv_crc, calc_crc=self._calc_crc))
32.382353
101
0.681199
131
1,101
5.29771
0.274809
0.138329
0.069164
0.057637
0.308357
0.256484
0.18732
0.10951
0.10951
0
0
0.042286
0.205268
1,101
33
102
33.363636
0.750857
0.217075
0
0.25
0
0.0625
0.172662
0
0
0
0
0
0
1
0.25
false
0
0
0.125
0.5625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
802bc087d127ca8d5be572f177541449d37d6efc
1,357
py
Python
atomic_reactor/utils/imageutil.py
ben-alkov/atomic-reactor
be6b626b7d822f77999f245193fefcc00c501ca9
[ "BSD-3-Clause" ]
16
2019-07-04T16:00:13.000Z
2022-01-28T19:51:58.000Z
atomic_reactor/utils/imageutil.py
containerbuildsystem/atomic-reactor
5734a189fc79245d99610f3a81b3aee9913db355
[ "BSD-3-Clause" ]
577
2019-06-03T07:53:16.000Z
2022-03-31T04:01:23.000Z
atomic_reactor/utils/imageutil.py
ben-alkov/atomic-reactor
be6b626b7d822f77999f245193fefcc00c501ca9
[ "BSD-3-Clause" ]
17
2020-02-21T13:30:06.000Z
2022-02-15T10:42:32.000Z
""" Copyright (c) 2021 Red Hat, Inc All rights reserved. This software may be modified and distributed under the terms of the BSD license. See the LICENSE file for details. """ # OSBS2 TBD def get_inspect_for_image(image): # util.get_inspect_for_image(image, registry, insecure=False, dockercfg_path=None) # or use skopeo # insecure = self.pull_registries[base_image.registry]['insecure'] # dockercfg_path = self.pull_registries[base_image.registry]['dockercfg_path'] # self._base_image_inspect =\ # atomic_reactor.util.get_inspect_for_image(base_image, base_image.registry, insecure, # dockercfg_path) return {} def get_image_history(image): # get image history with skopeo / registry api return [] def inspect_built_image(): # get output image final/arch specific from somewhere # and call get_inspect_for_image return {} def base_image_inspect(): # get base image from workflow.dockerfile_images # and call get_inspect_for_image return {} def remove_image(image, force=False): # self.tasker.remove_image(image, force=force) # most likely won't be needed at all return {} def tag_image(image, new_image): # self.tasker.tag_image(image, new_image) return True def get_image(image): # self.tasker.get_image(image) # use skopeo copy return {}
25.12963
94
0.721444
190
1,357
4.921053
0.4
0.085562
0.069519
0.096257
0.33262
0.210695
0.072727
0.072727
0
0
0
0.004541
0.188651
1,357
53
95
25.603774
0.844687
0.678703
0
0.357143
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
802d18b8c7aead93622047d9327b29be53f9a315
1,417
py
Python
metadata_replace/test_mrepr.py
Preocts/python_play_carton
071b19a6b5a6420192cd262195f95acfd787b476
[ "MIT" ]
null
null
null
metadata_replace/test_mrepr.py
Preocts/python_play_carton
071b19a6b5a6420192cd262195f95acfd787b476
[ "MIT" ]
null
null
null
metadata_replace/test_mrepr.py
Preocts/python_play_carton
071b19a6b5a6420192cd262195f95acfd787b476
[ "MIT" ]
null
null
null
from typing import Dict import pytest import mrepr @pytest.mark.parametrize( ("in_", "keypairs", "expected"), ( ("{{metatag}}", {"metatag": "replaced"}, "replaced"), ("{{metaTag }}", {"metatag": "replaced"}, "replaced"), ("{{ metaTag}}", {"metatag": "replaced"}, "replaced"), ("{{ metaTag }}", {"metatag": "replaced"}, "replaced"), ("{{metatag}} ", {"metatag": "replaced"}, "replaced "), (" {{Metatag}}", {"metatag": "replaced"}, " replaced"), (" {{ metatag }} ", {"metatag": "replaced"}, " replaced "), ("This{{metatag}}sentence", {"metatag": "replaced"}, "Thisreplacedsentence"), ( "This {{ metatag }} sentence", {"metatag": "replaced"}, "This replaced sentence", ), ( "This {{ metatag }} sentence", {"metatag": "replaced"}, "This replaced sentence", ), ( "This {{ newtag }} sentence", {"metatag": "replaced"}, "This {{ newtag }} sentence", ), ( "This {{ newtag }}{{metatag}} sentence", {"metatag": "replaced", "newtag": "swapped"}, "This swappedreplaced sentence", ), ), ) def test_mrepr(in_: str, keypairs: Dict[str, str], expected: str) -> None: """Test metatag repr""" assert mrepr.mrepr(in_, keypairs) == expected
32.204545
85
0.485533
104
1,417
6.576923
0.240385
0.263158
0.225146
0.307018
0.520468
0.47076
0.47076
0.47076
0.47076
0.307018
0
0
0.301341
1,417
43
86
32.953488
0.690909
0.011997
0
0.526316
0
0
0.443329
0.016499
0
0
0
0
0.026316
1
0.026316
false
0
0.078947
0
0.105263
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
802eab523da2956171315c1563d800bf89cc589b
1,144
py
Python
Motor.py
issa-clown/DonquiBeast
96eb9714deb1dc4be1fc8024595e7d66f47f794c
[ "Apache-2.0" ]
null
null
null
Motor.py
issa-clown/DonquiBeast
96eb9714deb1dc4be1fc8024595e7d66f47f794c
[ "Apache-2.0" ]
null
null
null
Motor.py
issa-clown/DonquiBeast
96eb9714deb1dc4be1fc8024595e7d66f47f794c
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python import subprocess import time def control(text): if (text == 'stop'): # モータ停止 subprocess.call("./motor 0", shell = True) if (text == 'go forward' or text == 'forward' or text == 'front' ): # 前進 subprocess.call("./motor 1", shell = True) # time.sleep(5) # subprocess.call("./motor 0", shell = True) return True if (text == 'go backward' or text == 'backward' or text == 'back'): # 後退 subprocess.call("./motor 2", shell = True) # time.sleep(5) # subprocess.call("./motor 0", shell = True) return True if (text == 'turn left'): # 方向転換:左 subprocess.call("./motor 3", shell = True) # time.sleep(1) # subprocess.call("./motor 0", shell = True) return True if (text == 'turn right'): # 方向転換:右 subprocess.call("./motor 4", shell = True) # time.sleep(1) # subprocess.call("./motor 0", shell = True) return True if (text == 'turn around'): # subprocess.call("./motor 5", shell = True) return True return False
29.333333
71
0.523601
137
1,144
4.372263
0.29927
0.233723
0.317195
0.166945
0.495826
0.495826
0.447412
0.447412
0.447412
0.447412
0
0.01788
0.315559
1,144
38
72
30.105263
0.747126
0.288462
0
0.238095
0
0
0.166458
0
0
0
0
0
0
1
0.047619
false
0
0.095238
0
0.428571
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
802f2d4a7e2addc8e42b71b2a05961f53fbc8c58
638
py
Python
src/chatbot/server/auth.py
c4pt000/Sophia-bot
91c85d2bb58cf0cea54dacb3958ce2b966a40616
[ "MIT" ]
1
2022-01-23T04:11:39.000Z
2022-01-23T04:11:39.000Z
src/chatbot/server/auth.py
c4pt000/Sophia-bot
91c85d2bb58cf0cea54dacb3958ce2b966a40616
[ "MIT" ]
null
null
null
src/chatbot/server/auth.py
c4pt000/Sophia-bot
91c85d2bb58cf0cea54dacb3958ce2b966a40616
[ "MIT" ]
null
null
null
from functools import wraps from flask import request, Response import json from config import HR_CHATBOT_AUTHKEY json_encode = json.JSONEncoder().encode def check_auth(auth): return auth == HR_CHATBOT_AUTHKEY def authenticate(): return Response(json_encode({'ret': 401, 'response': {'text': 'Could not verify your access'}}), mimetype="application/json") def requires_auth(f): @wraps(f) def decorated(*args, **kwargs): auth = request.args.get('Auth') if not auth or not check_auth(auth): return authenticate() return f(*args, **kwargs) return decorated
23.62963
100
0.666144
80
638
5.2
0.4625
0.043269
0.076923
0.091346
0
0
0
0
0
0
0
0.006061
0.224138
638
26
101
24.538462
0.834343
0
0
0
0
0
0.098746
0
0
0
0
0
0
1
0.222222
false
0
0.222222
0.111111
0.722222
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
802fb647b144cf6f936d7bf42ab325ca99733f29
238
py
Python
setup.py
d0d0d0/OpenTweet
2aad3a72135c45d2d3e5e7c5eb6cb79e8d2a4cbc
[ "MIT" ]
null
null
null
setup.py
d0d0d0/OpenTweet
2aad3a72135c45d2d3e5e7c5eb6cb79e8d2a4cbc
[ "MIT" ]
null
null
null
setup.py
d0d0d0/OpenTweet
2aad3a72135c45d2d3e5e7c5eb6cb79e8d2a4cbc
[ "MIT" ]
null
null
null
from distutils.core import setup import py2exe, sys, os ## Generate executable for windows sys.argv.append('py2exe') setup( options = {'py2exe': {'bundle_files': 1}}, windows = [{'script': "geotweet.py"}], zipfile = None, )
19.833333
46
0.659664
29
238
5.37931
0.793103
0
0
0
0
0
0
0
0
0
0
0.020408
0.176471
238
12
47
19.833333
0.77551
0.130252
0
0
1
0
0.2
0
0
0
0
0
0
1
0
true
0
0.25
0
0.25
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
802ff7600c7c6ab00af051f41a438e255c33c167
743
py
Python
pygw2/core/models/map.py
Natsku123/pygw2
c8e579c07f4d33c7afadc8dee510be0a5e71d16e
[ "MIT" ]
1
2020-01-19T21:27:53.000Z
2020-01-19T21:27:53.000Z
pygw2/core/models/map.py
Natsku123/pygw2
c8e579c07f4d33c7afadc8dee510be0a5e71d16e
[ "MIT" ]
1
2021-12-09T21:18:13.000Z
2021-12-09T21:18:13.000Z
pygw2/core/models/map.py
Natsku123/pygw2
c8e579c07f4d33c7afadc8dee510be0a5e71d16e
[ "MIT" ]
null
null
null
from typing import List, Optional from pygw2.utils import BaseModel class Continent(BaseModel): id: int name: str continent_dims: List[int] min_zoom: int max_zoom: int floors: List[int] # TODO parse subendpoints? class MapSector(BaseModel): id: int name: str level: int coord: List[int] bounds: List[List[int]] chat_link: str class Map(BaseModel): id: int name: str min_level: int max_level: int default_floor: int floors: List[int] # TODO resolve? region_id: int # TODO resolve region? region_name: Optional[str] continent_id: int # TODO resolve continent? continent_name: str map_rect: List[List[int]] continent_rect: List[List[int]]
20.081081
49
0.663526
102
743
4.705882
0.333333
0.102083
0.0875
0.1125
0.214583
0
0
0
0
0
0
0.001792
0.248991
743
36
50
20.638889
0.858423
0.111709
0
0.275862
0
0
0
0
0
0
0
0.027778
0
1
0
true
0
0.068966
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
1
0
0
0
1
0
0
3
80478f1a366bcb84b0a8e74ec4f2aaa35099a39f
2,743
py
Python
waltz-test/src/main/python/waltz_ducktape/services/waltz_server.py
zzlbuaa/waltz
2c9e904cf2d33c1b52d1b8b5fbdf8ed0d9ac9a35
[ "Apache-2.0" ]
null
null
null
waltz-test/src/main/python/waltz_ducktape/services/waltz_server.py
zzlbuaa/waltz
2c9e904cf2d33c1b52d1b8b5fbdf8ed0d9ac9a35
[ "Apache-2.0" ]
null
null
null
waltz-test/src/main/python/waltz_ducktape/services/waltz_server.py
zzlbuaa/waltz
2c9e904cf2d33c1b52d1b8b5fbdf8ed0d9ac9a35
[ "Apache-2.0" ]
null
null
null
from waltz_ducktape.services.base_waltz_service import BaseWaltzService class WaltzServerService(BaseWaltzService): """ WaltzStorageService is the service class for Waltz Storage. """ def __init__(self, context, cluster_spec, zk, cluster_root, cluster_name, cluster_num_partitions, port, jetty_port, lib_dir, config_file_dir, ssl_configs): """ Construct a new 'WaltzStorageService' object. :param context: The test context :param cluster_spec: The cluster specifics :param zk: Zookeeper url :param cluster_root: The cluster root :param cluster_name: The cluster name :param cluster_num_partitions: The number of partitions in cluster :param port: The service port :param jetty_port: The jetty port :param lib_dir: The library directory :param config_file_dir: The directory of configuration file :param ssl_configs: A dict of ssl related configurations """ super(WaltzServerService, self).__init__(context, cluster_spec=cluster_spec, zk=zk, cluster_root=cluster_root, port=port, \ lib_dir=lib_dir, config_file_dir=config_file_dir) self.zk = zk self.cluster_root = cluster_root self.cluster_name = cluster_name self.cluster_num_partitions = cluster_num_partitions self.jetty_port = jetty_port self.ssl_configs = ssl_configs def start_cmd(self): return "sudo systemctl start waltz-server" def restart_cmd(self): return "sudo systemctl restart waltz-server" def stop_cmd(self): return "sudo systemctl stop waltz-server" def clean_cmd(self): return "" def healthcheck_cmd(self, hostname): return "sudo curl -Is {hostname}:{jetty_port}/health | head -1".format(hostname=hostname, jetty_port=self.jetty_port) def provision_cmd(self): return "" def render_log_file(self): return self.render('log4j.properties') def render_service_config_file(self): return self.render('waltz_server.yaml', cluster_root=self.cluster_root, \ zk_connect=self.zk, port=self.port, jetty_port=self.jetty_port) def render_cli_config_file(self): return self.render('cli.yaml', cluster_root=self.cluster_root, zk_connect=self.zk, ssl_keystore_loc=self.ssl_configs["ssl_keystore_loc"], ssl_keystore_pwd=self.ssl_configs["ssl_keystore_pwd"], ssl_truststore_loc=self.ssl_configs["ssl_truststore_loc"], ssl_truststore_pwd=self.ssl_configs["ssl_truststore_pwd"])
40.940299
131
0.664965
336
2,743
5.133929
0.241071
0.070145
0.04058
0.049275
0.264348
0.115942
0.052174
0.052174
0.052174
0.052174
0
0.000982
0.257747
2,743
66
132
41.560606
0.846267
0.207437
0
0.055556
0
0
0.128544
0.014663
0
0
0
0
0
1
0.277778
false
0
0.027778
0.25
0.583333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
80486631d37c7c36a65b158210c000ba23b3923f
111
py
Python
python/power_modpower.py
avishshah11/Hackerrank_python
7a7b8005ba2c8b03fb63d727496deb4175f860f5
[ "MIT" ]
null
null
null
python/power_modpower.py
avishshah11/Hackerrank_python
7a7b8005ba2c8b03fb63d727496deb4175f860f5
[ "MIT" ]
null
null
null
python/power_modpower.py
avishshah11/Hackerrank_python
7a7b8005ba2c8b03fb63d727496deb4175f860f5
[ "MIT" ]
null
null
null
import math a = int(input()) b = int(input()) m = int(input()) x = pow(a,b) print(x) y = pow(a,b,m) print(y)
11.1
16
0.558559
24
111
2.583333
0.458333
0.387097
0.16129
0
0
0
0
0
0
0
0
0
0.189189
111
9
17
12.333333
0.688889
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.125
0
0.125
0.25
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3