hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
52846d8d5301f2152de9ceeae31307b3f20dbef6
227
py
Python
yggdrasil/examples/transforms/src/dst.py
cropsinsilico/yggdrasil
466a4f77605a6f461d57ef7b165a6db7eec4d1fd
[ "BSD-3-Clause" ]
22
2019-02-05T15:20:07.000Z
2022-02-25T09:00:40.000Z
yggdrasil/examples/transforms/src/dst.py
cropsinsilico/yggdrasil
466a4f77605a6f461d57ef7b165a6db7eec4d1fd
[ "BSD-3-Clause" ]
48
2019-02-15T20:41:24.000Z
2022-03-16T20:52:02.000Z
yggdrasil/examples/transforms/src/dst.py
cropsinsilico/yggdrasil
466a4f77605a6f461d57ef7b165a6db7eec4d1fd
[ "BSD-3-Clause" ]
16
2019-04-27T03:36:40.000Z
2021-12-02T09:47:06.000Z
import os from yggdrasil.examples.transforms import check_received_data def dst(received_data): transform = os.environ['TEST_TRANSFORM'] check_received_data(transform, received_data) print('CONFIRMED') return
22.7
61
0.77533
28
227
6.035714
0.607143
0.284024
0.201183
0
0
0
0
0
0
0
0
0
0.145374
227
9
62
25.222222
0.871134
0
0
0
0
0
0.101322
0
0
0
0
0
0
1
0.142857
false
0
0.285714
0
0.571429
0.142857
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
528d0dfa546e65411f38ec68be73b74cf256c1e6
110
py
Python
codes_auto/114.flatten-binary-tree-to-linked-list.py
smartmark-pro/leetcode_record
6504b733d892a705571eb4eac836fb10e94e56db
[ "MIT" ]
null
null
null
codes_auto/114.flatten-binary-tree-to-linked-list.py
smartmark-pro/leetcode_record
6504b733d892a705571eb4eac836fb10e94e56db
[ "MIT" ]
null
null
null
codes_auto/114.flatten-binary-tree-to-linked-list.py
smartmark-pro/leetcode_record
6504b733d892a705571eb4eac836fb10e94e56db
[ "MIT" ]
null
null
null
# # @lc app=leetcode.cn id=114 lang=python3 # # [114] flatten-binary-tree-to-linked-list # None # @lc code=end
15.714286
42
0.690909
19
110
4
0.894737
0
0
0
0
0
0
0
0
0
0
0.072917
0.127273
110
7
43
15.714286
0.71875
0.845455
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
5297cda462e70a97a934d670afcfb97e4b22081e
208
py
Python
beerbar/beerbar/doctype/order_generation/test_order_generation.py
reddymeghraj/beerbar
ac082b11e8535e5ea5014e3a49598571ae200471
[ "MIT" ]
null
null
null
beerbar/beerbar/doctype/order_generation/test_order_generation.py
reddymeghraj/beerbar
ac082b11e8535e5ea5014e3a49598571ae200471
[ "MIT" ]
null
null
null
beerbar/beerbar/doctype/order_generation/test_order_generation.py
reddymeghraj/beerbar
ac082b11e8535e5ea5014e3a49598571ae200471
[ "MIT" ]
null
null
null
# Copyright (c) 2013, wayzon and Contributors # See license.txt import frappe import unittest test_records = frappe.get_test_records('Order Generation') class TestOrderGeneration(unittest.TestCase): pass
18.909091
58
0.802885
26
208
6.307692
0.807692
0.134146
0
0
0
0
0
0
0
0
0
0.021858
0.120192
208
10
59
20.8
0.874317
0.283654
0
0
0
0
0.109589
0
0
0
0
0
0
1
0
false
0.2
0.4
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
4
bfdd0752ea22e8862ce34d14255b88e6f8c7e72e
1,517
py
Python
modules/code_qa/ios_regex.py
viking333/galvatron
c2dc9c94ad64e79ad234c4a04b039f6cf429e0f3
[ "MIT" ]
null
null
null
modules/code_qa/ios_regex.py
viking333/galvatron
c2dc9c94ad64e79ad234c4a04b039f6cf429e0f3
[ "MIT" ]
null
null
null
modules/code_qa/ios_regex.py
viking333/galvatron
c2dc9c94ad64e79ad234c4a04b039f6cf429e0f3
[ "MIT" ]
7
2019-08-22T15:17:44.000Z
2020-01-03T17:18:06.000Z
from galvatron_lib.core.regex_module import RegexModule class Module(RegexModule): meta = { "name": "Swift/Objective C regex Scanner", "author": "Anastasios Koutlis", "descrription": "Uses regexes to search for potentially dangerous code", "query": "SELECT DISTINCT extracted_location, product_name, version FROM targets WHERE location IS NOT NULL" } def file_name_regex(self): return ".*\.(swift|m|h)" def line_regex(self): return [("^(?!(\*|\/)).*((NSLog|CFStringCreateWithFormat|CFStringCreateWithFormatAndArguments|CFStringAppendFormat|AEBuildDesc|AEBuildParameters|AEBuildAppleEvent)\()|(NSString\sstringWithFormat\:(?!@)|NSString\sinitWithFormat\:|NSMutableString\sappendFormat\:|NSAlert|NSPredicate\spredicateWithFormat\:|NSException\sraise\:|NSException\.raise\(|NSRunAlertPanel)", "Format String"), ("^(?!(\*|\/)).*(strcat|strcpy|strncat|strncpy|(?![a-zA-Z])gets(?![a-zA-Z])|memcpy|fgets|vscanf|sscanf|vsscanf|vscanf|scanf|streadd|strecpy|strtrns|fscanf|vfscanf|realpath|syslog|getopt|getopt_long|getpass|getchar|fgetc|getc(?![a-zA-Z])|read(?![a-zA-Z])|bcopy|strccpy|strcadd|[a-z]{0,3}printf)", "Buffer Overflow"), ("^(?!(\*|\/)).*(malloc\(|realloc\(|free\(|calloc\()", "Heap Overflow"), ("^(?!(\*|\/)).*(Allowanyhttpscertificateforhost|continueWithoutCredentialForAuthenticationChallenge)", "Unvalidated ssl certs"), ("^(?!(\*|\/)).*(system\(|popen\(|fork\(|WinExec\(|exec[a-z]{0,2}\()", "Command Injection")]
58.346154
390
0.687541
153
1,517
6.764706
0.79085
0.011594
0.015459
0
0
0
0
0
0
0
0
0.002948
0.105471
1,517
25
391
60.68
0.759764
0
0
0
0
0.125
0.779616
0.567836
0
0
0
0
0
1
0.125
false
0.0625
0.0625
0.125
0.4375
0.0625
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
1
0
1
0
0
0
4
bfe0e4edad7c37b1bd3063b4f942d7b6b990d132
244
py
Python
cfgov/core/testutils/mock_cache_backend.py
higs4281/cfgov-refresh
a02b193fb2373d443265c21845adf8a196e05675
[ "CC0-1.0" ]
1
2019-11-26T20:18:22.000Z
2019-11-26T20:18:22.000Z
cfgov/core/testutils/mock_cache_backend.py
higs4281/cfgov-refresh
a02b193fb2373d443265c21845adf8a196e05675
[ "CC0-1.0" ]
8
2021-03-11T00:55:51.000Z
2022-02-13T21:10:14.000Z
cfgov/core/testutils/mock_cache_backend.py
higs4281/cfgov-refresh
a02b193fb2373d443265c21845adf8a196e05675
[ "CC0-1.0" ]
1
2019-12-28T14:04:07.000Z
2019-12-28T14:04:07.000Z
from wagtail.contrib.wagtailfrontendcache.backends import BaseBackend CACHE_PURGED_URLS = [] class MockCacheBackend(BaseBackend): def __init__(self, config): pass def purge(self, url): CACHE_PURGED_URLS.append(url)
18.769231
69
0.729508
27
244
6.296296
0.740741
0.129412
0.176471
0
0
0
0
0
0
0
0
0
0.188525
244
12
70
20.333333
0.858586
0
0
0
0
0
0
0
0
0
0
0
0
1
0.285714
false
0.142857
0.142857
0
0.571429
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
871202bfc16826d039ba30bba911938d38b053e3
313
py
Python
test/config.py
tschm/hrp
e03562529768b07fa2edd2fe5f382c3881642caa
[ "MIT" ]
9
2021-01-31T23:35:57.000Z
2022-03-07T11:32:52.000Z
test/config.py
tschm/hrp
e03562529768b07fa2edd2fe5f382c3881642caa
[ "MIT" ]
null
null
null
test/config.py
tschm/hrp
e03562529768b07fa2edd2fe5f382c3881642caa
[ "MIT" ]
12
2020-04-25T02:50:35.000Z
2022-03-17T03:13:18.000Z
import os import pandas as pd def resource(name): return os.path.join(os.path.dirname(__file__), "resources", name) def get_data(): # https://github.com/robertmartin8/PyPortfolioOpt return pd.read_csv(resource("stock_prices.csv"), parse_dates=True, index_col="date").truncate(before="2017-01-01")
26.083333
118
0.734824
46
313
4.804348
0.76087
0.054299
0
0
0
0
0
0
0
0
0
0.032491
0.115016
313
11
119
28.454545
0.765343
0.15016
0
0
0
0
0.147727
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
4
8751d434e1a66ba1e6be0580431d9a07095ed767
70
py
Python
PA5/util/ingen.py
SebastianJay/LDI-Cool
85744fa493bd6a11463aababe7b484a57c6c47b7
[ "Apache-2.0" ]
null
null
null
PA5/util/ingen.py
SebastianJay/LDI-Cool
85744fa493bd6a11463aababe7b484a57c6c47b7
[ "Apache-2.0" ]
null
null
null
PA5/util/ingen.py
SebastianJay/LDI-Cool
85744fa493bd6a11463aababe7b484a57c6c47b7
[ "Apache-2.0" ]
null
null
null
import random for x in range(100): print random.randint(-10, 10)
14
33
0.685714
12
70
4
0.833333
0
0
0
0
0
0
0
0
0
0
0.125
0.2
70
4
34
17.5
0.732143
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0.333333
null
null
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
4
8755587dc54626ffa2116cd285f1386828a14493
69
py
Python
atcoder/arc/arc018_a.py
knuu/competitive-programming
16bc68fdaedd6f96ae24310d697585ca8836ab6e
[ "MIT" ]
1
2018-11-12T15:18:55.000Z
2018-11-12T15:18:55.000Z
atcoder/arc/arc018_a.py
knuu/competitive-programming
16bc68fdaedd6f96ae24310d697585ca8836ab6e
[ "MIT" ]
null
null
null
atcoder/arc/arc018_a.py
knuu/competitive-programming
16bc68fdaedd6f96ae24310d697585ca8836ab6e
[ "MIT" ]
null
null
null
print((lambda h, b:b * h * h / 10000)(*map(float, input().split())))
34.5
68
0.565217
12
69
3.25
0.75
0
0
0
0
0
0
0
0
0
0
0.084746
0.144928
69
1
69
69
0.576271
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
87695d011a55349db8e047b21a0778c74ce9b588
320
py
Python
pyshadowsocks/packet/packet_header.py
FTwOoO/pyShadowsocks
452323e30c4b97d322cbb67e9bbc7c4549e67b5f
[ "MIT" ]
21
2016-08-01T06:48:01.000Z
2021-04-05T18:20:53.000Z
pyshadowsocks/packet/packet_header.py
zen-of-proxy/pyShadowsocks
452323e30c4b97d322cbb67e9bbc7c4549e67b5f
[ "MIT" ]
2
2016-07-23T02:33:17.000Z
2018-03-13T09:50:02.000Z
pyshadowsocks/packet/packet_header.py
FTwOoO/pyShadowsocks
452323e30c4b97d322cbb67e9bbc7c4549e67b5f
[ "MIT" ]
7
2017-04-22T16:53:53.000Z
2021-02-08T06:33:05.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Author: booopooob@gmail.com # # Info: # # # import abc from util import FixedDict class PacketHeader(FixedDict, metaclass=abc.ABCMeta): @abc.abstractmethod def to_bytes(self): pass @abc.abstractmethod def from_bytes(self, data): pass
14.545455
53
0.646875
39
320
5.25641
0.717949
0.165854
0.195122
0
0
0
0
0
0
0
0
0.004016
0.221875
320
21
54
15.238095
0.819277
0.240625
0
0.444444
0
0
0
0
0
0
0
0
0
1
0.222222
false
0.222222
0.222222
0
0.555556
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
5e420acfe6cc092b2503fdffea33c1d0a26e2bf3
1,857
py
Python
python/linked_list/1290_convert_binary_number_in_a_linked_list_to_integer.py
linshaoyong/leetcode
ea052fad68a2fe0cbfa5469398508ec2b776654f
[ "MIT" ]
6
2019-07-15T13:23:57.000Z
2020-01-22T03:12:01.000Z
python/linked_list/1290_convert_binary_number_in_a_linked_list_to_integer.py
linshaoyong/leetcode
ea052fad68a2fe0cbfa5469398508ec2b776654f
[ "MIT" ]
null
null
null
python/linked_list/1290_convert_binary_number_in_a_linked_list_to_integer.py
linshaoyong/leetcode
ea052fad68a2fe0cbfa5469398508ec2b776654f
[ "MIT" ]
1
2019-07-24T02:15:31.000Z
2019-07-24T02:15:31.000Z
import math # Definition for singly-linked list. class ListNode(object): def __init__(self, x): self.val = x self.next = None class Solution(object): def getDecimalValue(self, head): """ :type head: ListNode :rtype: int """ vals = [] while head: vals.append(head.val) head = head.next r = 0 for i in range(len(vals)): r += vals[i] * math.pow(2, len(vals) - i - 1) return int(r) def test_get_decimal_value(): s = Solution() head = ListNode(1) head.next = ListNode(0) head.next.next = ListNode(1) assert 5 == s.getDecimalValue(head) head = ListNode(1) assert 1 == s.getDecimalValue(head) head = ListNode(0) assert 0 == s.getDecimalValue(head) head = ListNode(0) head.next = ListNode(0) assert 0 == s.getDecimalValue(head) head = ListNode(1) head.next = ListNode(0) head.next.next = ListNode(0) head.next.next.next = ListNode(1) head.next.next.next.next = ListNode(0) head.next.next.next.next.next = ListNode(0) head.next.next.next.next.next.next = ListNode(1) head.next.next.next.next.next.next.next = ListNode(1) head.next.next.next.next.next.next.next.next = ListNode(1) head.next.next.next.next.next.next.next.next.next = ListNode(0) head.next.next.next.next.next.next.next.next.next.next = ListNode(0) head.next.next.next.next.next.next.next.next.next.next.next = ListNode(0) head.next.next.next.next.next.next.next.next.next.next.next.\ next = ListNode(0) head.next.next.next.next.next.next.next.next.next.next.next.\ next.next = ListNode(0) head.next.next.next.next.next.next.next.next.next.next.next.\ next.next.next = ListNode(0) assert 18880 == s.getDecimalValue(head)
28.136364
77
0.620894
270
1,857
4.244444
0.162963
0.642234
0.816754
0.921466
0.704188
0.704188
0.653578
0.653578
0.649215
0.575916
0
0.023876
0.233172
1,857
65
78
28.569231
0.780899
0.036618
0
0.276596
0
0
0
0
0
0
0
0
0.106383
1
0.06383
false
0
0.021277
0
0.148936
0
0
0
0
null
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
5e5995d6d0c3837faae8b0a8cca9d252d3b9aff2
377
py
Python
tests/debug/__init__.py
int19h/ptvsd
55aa650e3e5fff91a3804a59dbf20ff3583be238
[ "MIT" ]
349
2019-05-07T00:15:12.000Z
2022-03-10T15:05:08.000Z
tests/debug/__init__.py
WooSung-Jung/ptvsd
99c8513921021d2cc7cd82e132b65c644c256768
[ "MIT" ]
1,095
2018-03-01T00:50:11.000Z
2019-05-06T17:44:15.000Z
tests/debug/__init__.py
WooSung-Jung/ptvsd
99c8513921021d2cc7cd82e132b65c644c256768
[ "MIT" ]
53
2018-03-01T00:33:57.000Z
2019-05-05T00:50:23.000Z
# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See LICENSE in the project root # for license information. from __future__ import absolute_import, division, print_function, unicode_literals # Expose Session directly. def Session(*args, **kwargs): from tests.debug import session return session.Session(*args, **kwargs)
29
82
0.771883
48
377
5.916667
0.75
0.077465
0.119718
0
0
0
0
0
0
0
0
0
0.151194
377
12
83
31.416667
0.8875
0.453581
0
0
0
0
0
0
0
0
0
0
0
1
0.25
true
0
0.5
0
1
0.25
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
4
5e69dc17ff73da7a05b8586246696d999b42beee
1,014
py
Python
manga_py/providers/mangainn_net.py
theincognito-inc/manga-dl
899905bafb6c6891815b58cce41eaff32a682570
[ "MIT" ]
1
2020-11-19T00:40:49.000Z
2020-11-19T00:40:49.000Z
manga_py/providers/mangainn_net.py
eduhoribe/manga-py
fe7eb2e08532b3c75b4f7ac8cc4132f0e7a65eb4
[ "MIT" ]
null
null
null
manga_py/providers/mangainn_net.py
eduhoribe/manga-py
fe7eb2e08532b3c75b4f7ac8cc4132f0e7a65eb4
[ "MIT" ]
null
null
null
from manga_py.provider import Provider from .helpers.std import Std class MangaInnNet(Provider, Std): def get_chapter_index(self) -> str: chapter = self.chapter idx = self.re.search(r'\.\w{2,7}/[^/]+/([^/]+)', chapter).group(1).split('.') return '{}-{}'.format(*self._idx_to_x2(idx)) def get_main_content(self): return self._get_content('{}/{}') def get_manga_name(self) -> str: name = self.re.search(r'\.\w{2,7}/([^/]+)', self.get_url()) return name.group(1) def get_chapters(self): return self.document_fromstring(self.content, '#chapter_list a[href]') def get_files(self): content = self.http_get(self.chapter) images = self.re.search(r'var\s+images\s*=\s*(\[\{.+?\}\])', content).group(1) images = self.json.loads(images) return [i.get('url') for i in images] def get_cover(self): pass # TODO def book_meta(self) -> dict: # todo meta pass main = MangaInnNet
27.405405
86
0.589744
139
1,014
4.158273
0.395683
0.062284
0.062284
0.067474
0.055363
0.055363
0.055363
0
0
0
0
0.010204
0.226824
1,014
36
87
28.166667
0.727041
0.013807
0
0.083333
0
0
0.107322
0.055166
0
0
0
0.027778
0
1
0.291667
false
0.083333
0.083333
0.083333
0.625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
1
0
0
1
0
0
4
5e962b999d338cbf27b21c35662b76980dfe1146
509
py
Python
joffrey/errors.py
supposedly/kizbra
c2a8c6a355dc2cc5fd9a245a053c1f5f1fa59dce
[ "MIT" ]
8
2018-05-25T02:00:35.000Z
2018-08-14T01:49:15.000Z
joffrey/errors.py
supposedly/kizbra
c2a8c6a355dc2cc5fd9a245a053c1f5f1fa59dce
[ "MIT" ]
2
2018-06-04T15:43:38.000Z
2018-07-08T23:56:22.000Z
joffrey/errors.py
supposedly/kizbra
c2a8c6a355dc2cc5fd9a245a053c1f5f1fa59dce
[ "MIT" ]
1
2020-09-21T08:10:37.000Z
2020-09-21T08:10:37.000Z
from .misc import JoffreyNamespace class JoffreyException(Exception): """ Base class for joffrey-related exceptions; includes a "details" attribute for whatever error-related info. """ def __init__(self, msg, **kwargs): self.details = JoffreyNamespace(**kwargs) super().__init__(msg) class ANDError(JoffreyException): pass class ORError(JoffreyException): pass class XORError(JoffreyException): pass class RequirementError(JoffreyException): pass
18.178571
57
0.707269
50
509
7.04
0.6
0.227273
0.213068
0
0
0
0
0
0
0
0
0
0.202358
509
27
58
18.851852
0.866995
0.208251
0
0.307692
0
0
0
0
0
0
0
0
0
1
0.076923
false
0.307692
0.076923
0
0.538462
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
4
5ea04ed0a7303a8281ff99a0e2a74a72a37db8a0
93
py
Python
PIP/Hands-on Exercise 2/Q10.py
ankitrajbiswal/SEM_5
db716e242e77149a4091e0e564356ddc724aeff0
[ "Apache-2.0" ]
10
2021-04-24T11:46:48.000Z
2022-01-17T05:14:37.000Z
PIP/Hands-on Exercise 2/Q10.py
ankitrajbiswal/SEM_5
db716e242e77149a4091e0e564356ddc724aeff0
[ "Apache-2.0" ]
2
2021-06-28T11:51:50.000Z
2021-11-01T08:21:53.000Z
PIP/Hands-on Exercise 2/Q10.py
ankitrajbiswal/SEM_5
db716e242e77149a4091e0e564356ddc724aeff0
[ "Apache-2.0" ]
16
2021-04-24T11:46:58.000Z
2022-03-02T05:08:19.000Z
import sys import random as r a=int(sys.argv[1]) b=int(sys.argv[2]) print(r.randint(a,b))
18.6
21
0.677419
21
93
3
0.619048
0.190476
0.31746
0
0
0
0
0
0
0
0
0.024691
0.129032
93
5
21
18.6
0.753086
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0.2
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
5eb00d04dfa82aca3deb9d9a7bbae7048b23eccf
128
py
Python
wriggler/__init__.py
parantapa/wriggler
805989c4be6754a0ebf1da9572774dc8efb8f6a7
[ "MIT" ]
null
null
null
wriggler/__init__.py
parantapa/wriggler
805989c4be6754a0ebf1da9572774dc8efb8f6a7
[ "MIT" ]
null
null
null
wriggler/__init__.py
parantapa/wriggler
805989c4be6754a0ebf1da9572774dc8efb8f6a7
[ "MIT" ]
1
2018-03-02T05:34:35.000Z
2018-03-02T05:34:35.000Z
""" Wriggler crawler module. """ class Error(Exception): """ All exceptions returned are subclass of this one. """
14.222222
53
0.632813
14
128
5.785714
1
0
0
0
0
0
0
0
0
0
0
0
0.234375
128
8
54
16
0.826531
0.578125
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
1
0
0
4
5ed559b8fe2d1a634f559c5fe46321d7c1b9066b
2,315
py
Python
tests/data/networks/OH_OH.py
uw-ipd/privileged_residues
78078c22ba537651a1b6bd1404c05246ab73a3e3
[ "Apache-2.0" ]
null
null
null
tests/data/networks/OH_OH.py
uw-ipd/privileged_residues
78078c22ba537651a1b6bd1404c05246ab73a3e3
[ "Apache-2.0" ]
20
2018-08-13T22:50:46.000Z
2018-11-03T22:29:03.000Z
tests/data/networks/OH_OH.py
uw-ipd/privileged_residues
78078c22ba537651a1b6bd1404c05246ab73a3e3
[ "Apache-2.0" ]
1
2018-08-25T06:03:43.000Z
2018-08-25T06:03:43.000Z
from tests.util import pick_ray from pyrosetta import Pose from pyrosetta.rosetta.core.import_pose import pose_from_pdbstring name = "OH_OH" # NOTE(onalant): serines substituted for hydroxyls since we need real carbons contents = """ ATOM 1 N SER A 1 7.975 -0.175 -0.127 1.00 0.00 N ATOM 2 CA SER A 1 9.177 0.631 0.051 1.00 0.00 C ATOM 3 C SER A 1 9.173 1.839 -0.877 1.00 0.00 C ATOM 4 O SER A 1 8.606 1.793 -1.969 1.00 0.00 O ATOM 5 CB SER A 1 10.412 -0.211 -0.206 1.00 0.00 C ATOM 6 OG SER A 1 10.063 -1.526 -0.542 1.00 0.00 O ATOM 7 H SER A 1 7.528 -0.198 -1.033 1.00 0.00 H ATOM 8 HA SER A 1 9.204 0.990 1.080 1.00 0.00 H ATOM 9 1HB SER A 1 10.993 0.232 -1.015 1.00 0.00 H ATOM 10 2HB SER A 1 11.040 -0.215 0.684 1.00 0.00 H ATOM 11 HG SER A 1 9.103 -1.558 -0.523 1.00 0.00 H ATOM 12 N SER A 2 9.807 -1.298 -0.973 1.00 0.00 N ATOM 13 CA SER A 2 8.484 -1.352 -1.582 1.00 0.00 C ATOM 14 C SER A 2 7.533 -2.211 -0.759 1.00 0.00 C ATOM 15 O SER A 2 7.953 -3.156 -0.093 1.00 0.00 O ATOM 16 CB SER A 2 8.581 -1.899 -2.993 1.00 0.00 C ATOM 17 OG SER A 2 9.908 -2.202 -3.324 1.00 0.00 O ATOM 18 H SER A 2 10.132 -2.087 -0.431 1.00 0.00 H ATOM 19 HA SER A 2 8.081 -0.339 -1.625 1.00 0.00 H ATOM 20 1HB SER A 2 7.968 -2.796 -3.078 1.00 0.00 H ATOM 21 2HB SER A 2 8.187 -1.166 -3.695 1.00 0.00 H ATOM 22 HG SER A 2 10.432 -1.979 -2.550 1.00 0.00 H TER """ pose = Pose() pose_from_pdbstring(pose, contents) sc_donor = { 1: [ pick_ray(pose.residue(1), "HG", "OG") ], 2: [ pick_ray(pose.residue(2), "HG", "OG") ] } sc_acceptor = { 1: [ pick_ray(pose.residue(1), "OG", "CB") ], 2: [ pick_ray(pose.residue(2), "OG", "CB") ] }
42.090909
78
0.4527
453
2,315
2.284768
0.291391
0.085024
0.085024
0.127536
0.307246
0.300483
0
0
0
0
0
0.367395
0.44622
2,315
54
79
42.87037
0.439938
0.032397
0
0.12766
0
0.468085
0.788204
0
0
0
0
0
0
1
0
false
0
0.06383
0
0.06383
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
0d73828635f24fa1f3dc606ba5887ea998c72957
175
py
Python
spira/__init__.py
qedalab/spira
32e4d2096e298b9fcc5952abd654312dc232a259
[ "MIT" ]
10
2018-07-13T09:46:21.000Z
2021-06-22T13:34:50.000Z
spira/__init__.py
qedalab/spira
32e4d2096e298b9fcc5952abd654312dc232a259
[ "MIT" ]
8
2018-09-09T11:32:40.000Z
2019-10-08T07:47:31.000Z
spira/__init__.py
qedalab/spira
32e4d2096e298b9fcc5952abd654312dc232a259
[ "MIT" ]
7
2019-01-17T18:50:17.000Z
2022-01-13T20:27:52.000Z
import sys def initialize(): from spira import log as LOG from . import settings LOG.start(name=settings.LIB_NAME, text=settings.START_MESSAGE) initialize()
13.461538
66
0.72
24
175
5.166667
0.583333
0
0
0
0
0
0
0
0
0
0
0
0.2
175
12
67
14.583333
0.885714
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
true
0
0.5
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
0d9b673a0614e42ae363b195d8ea1127dfdf668f
298
py
Python
extline/handler/default.py
RaenonX/Jelly-Bot-API
c7da1e91783dce3a2b71b955b3a22b68db9056cf
[ "MIT" ]
5
2020-08-26T20:12:00.000Z
2020-12-11T16:39:22.000Z
extline/handler/default.py
RaenonX/Jelly-Bot
c7da1e91783dce3a2b71b955b3a22b68db9056cf
[ "MIT" ]
234
2019-12-14T03:45:19.000Z
2020-08-26T18:55:19.000Z
extline/handler/default.py
RaenonX/Jelly-Bot-API
c7da1e91783dce3a2b71b955b3a22b68db9056cf
[ "MIT" ]
2
2019-10-23T15:21:15.000Z
2020-05-22T09:35:55.000Z
"""This module contains the function to handle the default/unhandled type event.""" from extline.logger import LINE def handle_default(event, destination): """Method to be called upon receiving an unhandled type event.""" LINE.log_event("Unhandled event.", event=event, dest=destination)
37.25
83
0.758389
41
298
5.463415
0.634146
0.116071
0.160714
0
0
0
0
0
0
0
0
0
0.14094
298
7
84
42.571429
0.875
0.459732
0
0
0
0
0.106667
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
4
0dabc8ecb4d6ea5a1872b737bcf0f6b2dd861040
35
py
Python
requestbin/settings.py
madisvain/requestbin
7415d525cb0de1011adc7a8b5972d08d64f0a6ef
[ "MIT" ]
5
2018-09-30T20:10:52.000Z
2021-12-14T16:11:41.000Z
requestbin/settings.py
madisvain/requestbin
7415d525cb0de1011adc7a8b5972d08d64f0a6ef
[ "MIT" ]
null
null
null
requestbin/settings.py
madisvain/requestbin
7415d525cb0de1011adc7a8b5972d08d64f0a6ef
[ "MIT" ]
1
2019-12-21T10:41:18.000Z
2019-12-21T10:41:18.000Z
SITE_URL = "https://requestbin.app"
35
35
0.742857
5
35
5
1
0
0
0
0
0
0
0
0
0
0
0
0.057143
35
1
35
35
0.757576
0
0
0
0
0
0.611111
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
0dad1052123c870c5c79bb3e462733fdd6099364
153
py
Python
day11/flask_project/apps/views/posts.py
gaohj/wh1904js
a3af38f8311f79eb9f2e08a3de16dd1e02c40714
[ "Apache-2.0" ]
null
null
null
day11/flask_project/apps/views/posts.py
gaohj/wh1904js
a3af38f8311f79eb9f2e08a3de16dd1e02c40714
[ "Apache-2.0" ]
null
null
null
day11/flask_project/apps/views/posts.py
gaohj/wh1904js
a3af38f8311f79eb9f2e08a3de16dd1e02c40714
[ "Apache-2.0" ]
null
null
null
from flask import Blueprint,render_template posts = Blueprint('posts',__name__) @posts.route('/',methods=['GET','POST']) def index(): return '欢迎发表'
21.857143
43
0.705882
19
153
5.421053
0.842105
0
0
0
0
0
0
0
0
0
0
0
0.111111
153
7
44
21.857143
0.757353
0
0
0
0
0
0.11039
0
0
0
0
0
0
1
0.2
false
0
0.2
0.2
0.6
0.4
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
4
0db29654cc31afa1db743481460a2490c2bc30e4
293
py
Python
frappe/integration_broker/doctype/integration_service/test_integration_service.py
badili/frappe
8177d7e745f511fcc4da50fed9e291a58172a613
[ "MIT" ]
null
null
null
frappe/integration_broker/doctype/integration_service/test_integration_service.py
badili/frappe
8177d7e745f511fcc4da50fed9e291a58172a613
[ "MIT" ]
null
null
null
frappe/integration_broker/doctype/integration_service/test_integration_service.py
badili/frappe
8177d7e745f511fcc4da50fed9e291a58172a613
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # Copyright (c) 2015, Frappe Technologies and Contributors # See license.txt from __future__ import unicode_literals import frappe import unittest # test_records = frappe.get_test_records('Integration Service') class TestIntegrationService(unittest.TestCase): pass
22.538462
63
0.788396
35
293
6.371429
0.8
0.098655
0
0
0
0
0
0
0
0
0
0.019455
0.122867
293
12
64
24.416667
0.848249
0.532423
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.2
0.6
0
0.8
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
4
0dd9c5b83d20545008869e0f3d5c7ba79ac9337f
94
py
Python
modules/user/exceptions/user_pool/user_pool_delete_error.py
stevekineeve88/doubloon
4c7c9163e96877ad23663c3dd9a73ef6ccde3e22
[ "MIT" ]
null
null
null
modules/user/exceptions/user_pool/user_pool_delete_error.py
stevekineeve88/doubloon
4c7c9163e96877ad23663c3dd9a73ef6ccde3e22
[ "MIT" ]
8
2021-01-29T15:49:17.000Z
2021-10-14T01:03:27.000Z
modules/user/exceptions/user_pool/user_pool_delete_error.py
stevekineeve88/doubloon
4c7c9163e96877ad23663c3dd9a73ef6ccde3e22
[ "MIT" ]
null
null
null
class UserPoolDeleteError(Exception): """ User pool delete error handler """ pass
18.8
38
0.670213
9
94
7
1
0
0
0
0
0
0
0
0
0
0
0
0.234043
94
4
39
23.5
0.875
0.319149
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
4
0df21947580883c30257fc6b5b63c5a0f92ca3ef
199
py
Python
estofadora/core/templatetags/contactMesssages_tag.py
delete/estofadora
2f46ba65fb0e376361ff47c86ea7a62c50b6c91b
[ "MIT" ]
6
2016-04-13T21:30:30.000Z
2017-09-29T04:47:07.000Z
estofadora/core/templatetags/contactMesssages_tag.py
delete/estofadora
2f46ba65fb0e376361ff47c86ea7a62c50b6c91b
[ "MIT" ]
13
2016-04-13T23:52:09.000Z
2020-06-05T18:25:13.000Z
estofadora/core/templatetags/contactMesssages_tag.py
delete/estofadora
2f46ba65fb0e376361ff47c86ea7a62c50b6c91b
[ "MIT" ]
1
2016-06-24T13:48:26.000Z
2016-06-24T13:48:26.000Z
from django import template from estofadora.core.models import Contact register = template.Library() @register.simple_tag def get_mensagens(): return Contact.objects.filter(read=False).count()
22.111111
53
0.79397
26
199
6
0.807692
0
0
0
0
0
0
0
0
0
0
0
0.110553
199
9
53
22.111111
0.881356
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0.166667
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
0
0
0
4
0df58813cade3e084057f2a9a053879fd5266b77
202
py
Python
venv/bin/django-admin.py
glezma/test
7be6eb23ec51f0ce783e5e832f040158f65077cf
[ "MIT" ]
null
null
null
venv/bin/django-admin.py
glezma/test
7be6eb23ec51f0ce783e5e832f040158f65077cf
[ "MIT" ]
null
null
null
venv/bin/django-admin.py
glezma/test
7be6eb23ec51f0ce783e5e832f040158f65077cf
[ "MIT" ]
null
null
null
#!/Users/glezma/OneDrive/Programming/Python/cloud_projects/training-hub2/th2/venv/bin/python3.6 from django.core import management if __name__ == "__main__": management.execute_from_command_line()
33.666667
95
0.806931
27
202
5.592593
0.925926
0
0
0
0
0
0
0
0
0
0
0.02139
0.074257
202
5
96
40.4
0.786096
0.465347
0
0
0
0
0.074766
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
2171c071960df24ae28ce4e567932b72285f0fbc
312
py
Python
openselfsup/models/heads/__init__.py
yoyoyoohh/PolSAR_SelfSup
281b40a6500fde5b4837247015edab3a5e32dbfb
[ "Apache-2.0" ]
1
2022-02-16T01:37:17.000Z
2022-02-16T01:37:17.000Z
openselfsup/models/heads/__init__.py
yoyoyoohh/PolSAR_SelfSup
281b40a6500fde5b4837247015edab3a5e32dbfb
[ "Apache-2.0" ]
null
null
null
openselfsup/models/heads/__init__.py
yoyoyoohh/PolSAR_SelfSup
281b40a6500fde5b4837247015edab3a5e32dbfb
[ "Apache-2.0" ]
null
null
null
''' Author: Shuailin Chen Created Date: 2021-09-08 Last Modified: 2021-11-18 content: ''' from .contrastive_head import ContrastiveHead from .cls_head import ClsHead from .latent_pred_head import LatentPredictHead from .multi_cls_head import MultiClsHead from .pix_pred_head import PixPredHead, PixPredHeadV5
24
53
0.820513
43
312
5.767442
0.651163
0.201613
0.104839
0
0
0
0
0
0
0
0
0.061594
0.115385
312
12
54
26
0.836957
0.285256
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
21acbf2e95f25915948b5d3d13b38e0adb7e9ea7
1,208
py
Python
tests/test_math.py
psavery/friendly-computing-machine
08f86992dc31fd56ac7daab4cd6cd1d215638b2d
[ "BSD-3-Clause" ]
null
null
null
tests/test_math.py
psavery/friendly-computing-machine
08f86992dc31fd56ac7daab4cd6cd1d215638b2d
[ "BSD-3-Clause" ]
null
null
null
tests/test_math.py
psavery/friendly-computing-machine
08f86992dc31fd56ac7daab4cd6cd1d215638b2d
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python3 """Simple tests for the math module """ import friendly_computing_machine as fcm import pytest def test_add(): assert fcm.math.add(5, 2) == 7 assert fcm.math.add(2, 5) == 7 assert fcm.math.add(3, 5) == 8 assert fcm.math.add(-1, -4) == -5 assert fcm.math.add(-3, 5) == 2 def test_sub(): assert fcm.math.sub(5, 2) == 3 assert fcm.math.sub(2, 5) == -3 assert fcm.math.sub(3, 5) == -2 assert fcm.math.sub(-1, -4) == 3 assert fcm.math.sub(-3, 5) == -8 def test_mult(): assert fcm.math.mult(5, 2) == 10 assert fcm.math.mult(2, 5) == 10 assert fcm.math.mult(3, 5) == 15 assert fcm.math.mult(-1, -4) == 4 assert fcm.math.mult(-3, 5) == -15 def test_div(): assert fcm.math.div(5.0, 2.0) == 2.5 assert fcm.math.div(2.0, 5.0) == 0.4 assert fcm.math.div(3.0, 5.0) == 0.6 assert fcm.math.div(-1.0, -4.0) == 0.25 assert fcm.math.div(-3.0, 5.0) == -0.6 testData = [ (2, 5, 10), (1, 2, 2), (11, 9, 99), (11, 0, 0), (0, 0, 0) ] @pytest.mark.parametrize("a, b, expected", testData) def test_mult2(a, b, expected): assert fcm.math.mult(a, b) == expected assert fcm.math.mult(b, a) == expected
25.166667
52
0.567053
228
1,208
2.973684
0.201754
0.292035
0.421829
0.175516
0.39823
0.315634
0.262537
0.064897
0.064897
0.064897
0
0.107791
0.224338
1,208
47
53
25.702128
0.615795
0.044702
0
0
0
0
0.012206
0
0
0
0
0
0.594595
1
0.135135
false
0
0.054054
0
0.189189
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
4
21d9862f32c4ff2c1a3861f4d79293d22ea9e99f
2,148
py
Python
tests/test_ppu_subscriptions_by_product_month.py
IMC-Carolinage/ConnectCustomSubscriptionReport
6f9882a26eb2139a27d23bc87fdabb27ea928665
[ "Apache-2.0" ]
null
null
null
tests/test_ppu_subscriptions_by_product_month.py
IMC-Carolinage/ConnectCustomSubscriptionReport
6f9882a26eb2139a27d23bc87fdabb27ea928665
[ "Apache-2.0" ]
1
2021-05-19T11:10:02.000Z
2021-05-19T11:10:02.000Z
tests/test_ppu_subscriptions_by_product_month.py
IMC-Carolinage/ConnectCustomSubscriptionReport
6f9882a26eb2139a27d23bc87fdabb27ea928665
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # # Copyright (c) 2021, Carolina Giménez Escalante # All rights reserved. # from subscription_report.ppu_subscriptions_by_product_month.entrypoint import generate from subscription_report.ppu_subscriptions_by_product_month.utils import get_last_day_last_month def test_ppu_subscriptions_by_product_month(progress, client_factory, response_factory, ff_ppu): responses = [] parameters = { "product": { "all": True, "choices": [], }, "parameter_id": "t0_f_text", "mkp": { "all": True, "choices": [], }, } responses.append( response_factory( count=1, ), ) responses.append( response_factory( query='and(in(status,(active,suspended)),in(connection.type,(production)),' 'le(events.created.at,' + get_last_day_last_month().strftime('%Y-%m-%dT00:00:00') + '))', value=[ff_ppu], ), ) client = client_factory(responses) result = list(generate(client, parameters, progress)) assert len(result) == 1 def test_generate_additional(progress, client_factory, response_factory, ff_ppu): responses = [] parameters = { "product": { "all": False, "choices": [ "PRD-276-377-545", ], }, "parameter_id": "t0_f_text", "mkp": { "all": False, "choices": ['MP-91673'], }, } responses.append( response_factory( count=1, ), ) responses.append( response_factory( query='and(in(status,(active,suspended)),in(connection.type,(production)),' 'le(events.created.at,' + get_last_day_last_month().strftime('%Y-%m-%dT00:00:00') + '),' 'in(product.id,(PRD-276-377-545)),in(marketplace.id,(MP-91673)))', value=[ff_ppu], ), ) client = client_factory(responses) result = list(generate(client, parameters, progress)) assert len(result) == 1
25.571429
96
0.548417
215
2,148
5.251163
0.367442
0.079717
0.081488
0.106289
0.785651
0.74225
0.74225
0.699734
0.607617
0.607617
0
0.034413
0.310056
2,148
83
97
25.879518
0.727395
0.041434
0
0.666667
1
0.015873
0.195811
0.116415
0
0
0
0
0.031746
1
0.031746
false
0
0.031746
0
0.063492
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
21e0a7ffa43e08e7fbae3e0b33e549052dda8a1f
305
py
Python
integreat_cms/cms/views/imprint/__init__.py
Integreat/cms-v2
c79a54fd5abb792696420aa6427a5e5a356fa79c
[ "Apache-2.0" ]
21
2018-10-26T20:10:45.000Z
2020-10-22T09:41:46.000Z
integreat_cms/cms/views/imprint/__init__.py
Integreat/cms-v2
c79a54fd5abb792696420aa6427a5e5a356fa79c
[ "Apache-2.0" ]
392
2018-10-25T08:34:07.000Z
2020-11-19T08:20:30.000Z
integreat_cms/cms/views/imprint/__init__.py
Integreat/cms-v2
c79a54fd5abb792696420aa6427a5e5a356fa79c
[ "Apache-2.0" ]
23
2019-03-06T17:11:35.000Z
2020-10-16T04:36:41.000Z
""" This package contains all views related to the imprint """ from .imprint_form_view import ImprintFormView from .imprint_revision_view import ImprintRevisionView from .imprint_sbs_view import ImprintSideBySideView from .imprint_actions import ( delete_imprint, expand_imprint_translation_id, )
27.727273
54
0.829508
37
305
6.540541
0.621622
0.181818
0
0
0
0
0
0
0
0
0
0
0.12459
305
10
55
30.5
0.906367
0.177049
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.571429
0
0.571429
0.857143
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
1
0
4
df091707eef9a4d402970f000b5c0b96d5fca379
93
py
Python
bot/platforms/telegram/commands/login/utilities/constants.py
AiratK/kaishnik-bot
c42351611a40a04d78c8ae481b97339adbd321e5
[ "MIT" ]
15
2019-04-30T14:37:15.000Z
2022-02-09T06:43:00.000Z
bot/platforms/telegram/commands/login/utilities/constants.py
AiratK/kaishnik-bot
c42351611a40a04d78c8ae481b97339adbd321e5
[ "MIT" ]
4
2019-03-18T06:09:38.000Z
2021-12-10T06:12:26.000Z
bot/platforms/telegram/commands/login/utilities/constants.py
AiratK/kaishnik-bot
c42351611a40a04d78c8ae481b97339adbd321e5
[ "MIT" ]
2
2020-04-12T10:31:52.000Z
2021-06-07T20:18:08.000Z
GUIDE_MESSAGE: str = "Список команд можно увидеть, введя * / * (не отправляй! просто введи)"
46.5
92
0.72043
12
93
5.5
1
0
0
0
0
0
0
0
0
0
0
0
0.16129
93
1
93
93
0.846154
0
0
0
0
0
0.741935
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
df098d49ee3bd40fdd435365bd5e24091b81486d
1,627
py
Python
__init__.py
k4otix/python-tpam
1e310cf954e5963c2dafcd6549bf0af361c9d956
[ "MIT" ]
1
2017-11-03T18:25:43.000Z
2017-11-03T18:25:43.000Z
__init__.py
k4otix/python-tpam
1e310cf954e5963c2dafcd6549bf0af361c9d956
[ "MIT" ]
null
null
null
__init__.py
k4otix/python-tpam
1e310cf954e5963c2dafcd6549bf0af361c9d956
[ "MIT" ]
2
2018-12-21T03:07:30.000Z
2019-05-16T15:37:27.000Z
''' This module implements an SSH wrapper for the Dell TPAM 2.5 SSH command line interface. Although not all functions have been fully tested, it works well for the more common operations and implements a number of exceptions for error checking. This module can be used interactively from the Python interpreter or imported into scripts as an easy way to interface with TPAM. One caveat is that unlike the native SSH CLI, API methods and keyword arguments are CASE SENSITIVE. This may be fixed in a future enhancement but isn't that important; the case matches what a user would see with the --help flags in the SSH CLI. Each call to the API takes approx. 3 seconds to execute, which is certainly less than ideal, especially when considering a user with a "Requestor" role must make 3 calls to the API to retrieve a password. Note that the third call to the "Cancel" method isn't really required but significantly helps in cases where another password retrieval may be made within the window of the previous password release so that scripts/programs that are stateless can generate a new password request and not receive an error about there already being a scheduled password release. For the most part this module will just pass methods/arguments along to TPAM and the appliance will determine if there is an error or not, but there are cases where the module will interpret results and give a different response back to the client. There may be some cases where the module will interpret results returned by TPAM to create a response for the client application that is easier to consume. ''' import tpam.client
46.485714
79
0.801475
280
1,627
4.657143
0.560714
0.018405
0.013804
0.029141
0.059816
0.059816
0.059816
0
0
0
0
0.00299
0.177628
1,627
34
80
47.852941
0.971599
0.980947
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
df39fdf12ff148cc0beb60caa1acb7ae05fea6ac
179
py
Python
aemter/apps.py
mribrgr/StuRa-Mitgliederdatenbank
87a261d66c279ff86056e315b05e6966b79df9fa
[ "MIT" ]
8
2019-11-26T13:34:46.000Z
2021-06-21T13:41:57.000Z
src/aemter/apps.py
Sumarbrander/Stura-Mitgliederdatenbank
691dbd33683b2c2d408efe7a3eb28e083ebcd62a
[ "MIT" ]
93
2019-12-16T09:29:10.000Z
2021-04-24T12:03:33.000Z
src/aemter/apps.py
Sumarbrander/Stura-Mitgliederdatenbank
691dbd33683b2c2d408efe7a3eb28e083ebcd62a
[ "MIT" ]
2
2020-12-03T12:43:19.000Z
2020-12-22T21:48:47.000Z
from django.apps import AppConfig class AemterConfig(AppConfig): name = 'aemter' verbose_name = "Funktionen" def ready(self): import aemter.signals.handlers
19.888889
38
0.703911
20
179
6.25
0.8
0
0
0
0
0
0
0
0
0
0
0
0.212291
179
9
38
19.888889
0.886525
0
0
0
0
0
0.088889
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
df3a0312d2d2f3278b7fc562b4ad87cbc3892497
119
py
Python
pycobertura/__init__.py
bastih/pycobertura
9e2f80d1f6bd8023f12dfb5541a421a31ebf8453
[ "MIT" ]
54
2018-01-12T08:29:29.000Z
2022-02-07T22:03:57.000Z
pycobertura/__init__.py
bastih/pycobertura
9e2f80d1f6bd8023f12dfb5541a421a31ebf8453
[ "MIT" ]
54
2017-11-30T14:32:48.000Z
2022-03-26T18:58:06.000Z
pycobertura/__init__.py
bastih/pycobertura
9e2f80d1f6bd8023f12dfb5541a421a31ebf8453
[ "MIT" ]
26
2017-11-01T23:02:01.000Z
2022-03-16T08:12:49.000Z
from .cobertura import Cobertura, CoberturaDiff # noqa from .reporters import TextReporter, TextReporterDelta # noqa
39.666667
62
0.815126
12
119
8.083333
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.134454
119
2
63
59.5
0.941748
0.07563
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
df891d9012c371208b998d5e9d9f0a5f92784279
356
py
Python
my_site/admin.py
sch841466053/web
8253c79a1e6d332b626700c11236ebc4f4baffd3
[ "MIT" ]
null
null
null
my_site/admin.py
sch841466053/web
8253c79a1e6d332b626700c11236ebc4f4baffd3
[ "MIT" ]
1
2021-06-10T20:56:20.000Z
2021-06-10T20:56:20.000Z
my_site/admin.py
sch841466053/web
8253c79a1e6d332b626700c11236ebc4f4baffd3
[ "MIT" ]
null
null
null
from django.contrib import admin from my_site import models # Register your models here. admin.site.register(models.UserInfo) admin.site.register(models.UserToken) admin.site.register(models.FreeCourse) admin.site.register(models.SeniorCourse) admin.site.register(models.GoodsCategory) admin.site.register(models.Goods) admin.site.register(models.Comments)
35.6
41
0.839888
49
356
6.081633
0.367347
0.211409
0.399329
0.540268
0
0
0
0
0
0
0
0
0.053371
356
10
42
35.6
0.884273
0.073034
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.222222
0
0.222222
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
df95479306618f30fdcbef617294d443fef76449
467
py
Python
tests/unit_tests/conftest.py
ubersan/pylic
16c6412b28889a8887b69e3fdc49d1231ea81542
[ "MIT" ]
5
2021-12-27T20:22:53.000Z
2022-01-24T10:59:29.000Z
tests/unit_tests/conftest.py
sandrochuber/pylic
cc1a65178a3d0a8a6c16a6f4f1e5aafa0be53879
[ "MIT" ]
45
2021-02-27T13:28:04.000Z
2021-09-10T18:55:24.000Z
tests/unit_tests/conftest.py
ubersan/pylic
16c6412b28889a8887b69e3fdc49d1231ea81542
[ "MIT" ]
2
2021-07-20T09:45:20.000Z
2021-07-21T08:56:35.000Z
import random import string import pytest def random_string() -> str: return "".join(random.choice(string.ascii_lowercase) for i in range(10)) @pytest.fixture def license() -> str: return random_string() @pytest.fixture def package() -> str: return random_string() @pytest.fixture def version() -> str: def random_integer() -> int: return random.randint(0, 100) return f"{random_integer()}.{random_integer()}.{random_integer()}"
17.296296
76
0.683084
60
467
5.183333
0.433333
0.167203
0.154341
0.135048
0.237942
0.237942
0.237942
0
0
0
0
0.015584
0.175589
467
26
77
17.961538
0.792208
0
0
0.3125
0
0
0.119914
0.119914
0
0
0
0
0
1
0.3125
true
0
0.1875
0.25
0.8125
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
1
1
0
0
4
10e0fb5cabfe832160d41e8bb7fb78e20465b3cf
189
py
Python
osmaxx/profile/apps.py
tyrasd/osmaxx
da4454083d17b2ef8b0623cad62e39992b6bd52a
[ "MIT" ]
27
2015-03-30T14:17:26.000Z
2022-02-19T17:30:44.000Z
osmaxx/profile/apps.py
tyrasd/osmaxx
da4454083d17b2ef8b0623cad62e39992b6bd52a
[ "MIT" ]
483
2015-03-09T16:58:03.000Z
2022-03-14T09:29:06.000Z
osmaxx/profile/apps.py
tyrasd/osmaxx
da4454083d17b2ef8b0623cad62e39992b6bd52a
[ "MIT" ]
6
2015-04-07T07:38:30.000Z
2020-04-01T12:45:53.000Z
from django.apps import AppConfig from django.utils.translation import gettext_lazy as _ class ExcerptExportConfig(AppConfig): name = 'osmaxx.profile' verbose_name = _("Profile")
23.625
54
0.772487
22
189
6.454545
0.727273
0.140845
0
0
0
0
0
0
0
0
0
0
0.148148
189
7
55
27
0.881988
0
0
0
0
0
0.111111
0
0
0
0
0
0
1
0
false
0
0.4
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
10e2aeef6488376dfe2ed4393690d07378dcd980
143
py
Python
pybot/customLogging.py
harikrishnana2021/operationcode-pybot
6e78e069c274281d50dcb71b98b9f485afb012fc
[ "MIT" ]
null
null
null
pybot/customLogging.py
harikrishnana2021/operationcode-pybot
6e78e069c274281d50dcb71b98b9f485afb012fc
[ "MIT" ]
null
null
null
pybot/customLogging.py
harikrishnana2021/operationcode-pybot
6e78e069c274281d50dcb71b98b9f485afb012fc
[ "MIT" ]
null
null
null
import logging class SlackMessageFilter(logging.Filter): def filter(self, record): return record.funcName != "_incoming_message"
20.428571
53
0.734266
15
143
6.866667
0.8
0
0
0
0
0
0
0
0
0
0
0
0.174825
143
6
54
23.833333
0.872881
0
0
0
0
0
0.118881
0
0
0
0
0
0
1
0.25
false
0
0.25
0.25
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
4
802159d73e728de27c14d2c39ef320835d9925b5
164
py
Python
config/ipython/profile_default/startup/00-imports.py
n1amr/shell-scripts
64d96908e3370c7c9d139f55e385fa44f7e9c0d0
[ "MIT" ]
6
2017-01-18T16:11:12.000Z
2021-03-26T13:11:04.000Z
config/ipython/profile_default/startup/00-imports.py
n1amr/shell-scripts
64d96908e3370c7c9d139f55e385fa44f7e9c0d0
[ "MIT" ]
null
null
null
config/ipython/profile_default/startup/00-imports.py
n1amr/shell-scripts
64d96908e3370c7c9d139f55e385fa44f7e9c0d0
[ "MIT" ]
null
null
null
import os import re import sys from math import * def import_math(): global np import numpy as np global plt from matplotlib import pyplot as plt
13.666667
40
0.707317
26
164
4.423077
0.538462
0
0
0
0
0
0
0
0
0
0
0
0.268293
164
11
41
14.909091
0.958333
0
0
0
0
0
0
0
0
0
0
0
0
1
0.111111
true
0
0.777778
0
0.888889
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
80314a44c7f2dfe9864566809caff06c044725db
3,911
py
Python
tests/sls_tools/param_store/test_param_store_result.py
ki-tools/sls-tools-py
f1089d94a07b8455c81fdbfd78a159fecb017b57
[ "Apache-2.0" ]
null
null
null
tests/sls_tools/param_store/test_param_store_result.py
ki-tools/sls-tools-py
f1089d94a07b8455c81fdbfd78a159fecb017b57
[ "Apache-2.0" ]
4
2020-03-24T17:53:29.000Z
2021-04-30T20:54:13.000Z
tests/sls_tools/param_store/test_param_store_result.py
ki-tools/sls-tools-py
f1089d94a07b8455c81fdbfd78a159fecb017b57
[ "Apache-2.0" ]
null
null
null
import pytest import uuid import json from src.sls_tools.param_store import ParamStore, ParamStoreResult @pytest.fixture def key(): return 'TEST-KEY' @pytest.fixture def value(): return str(uuid.uuid4()) @pytest.fixture def param_store_result(key, value): return ParamStoreResult(key, value, None) def test_it_has_the_key(param_store_result, key): assert param_store_result.key == key def test_it_has_the_value(param_store_result, value): assert param_store_result.value == value def test_it_has_the_store(param_store_result, key, value): assert param_store_result.store is None assert ParamStoreResult(key, value, ParamStore.Stores.SSM).store == ParamStore.Stores.SSM def test_it_converts_to_int(key): assert ParamStoreResult(key, '100', None).to_int() == 100 assert ParamStoreResult(key, '0200', None).to_int() == 200 assert ParamStoreResult(key, 3, None).to_int() == 3 assert ParamStoreResult(key, ' ', None).to_int() is None assert ParamStoreResult(key, None, None).to_int() is None def test_it_errors_when_converting_non_int_strings(key): for bad_string in ['not a number', 'one', 'a', '.1', '1.0']: with pytest.raises(ValueError) as ex: ParamStoreResult(key, bad_string, None).to_int() assert 'invalid literal for int()' in str(ex.value) def test_it_converts_to_float(key): assert ParamStoreResult(key, '100.001', None).to_float() == 100.001 assert ParamStoreResult(key, '02.00', None).to_float() == 2.00 assert ParamStoreResult(key, '0300', None).to_float() == 300 assert ParamStoreResult(key, 40.0, None).to_float() == 40.0 assert ParamStoreResult(key, ' ', None).to_float() is None assert ParamStoreResult(key, None, None).to_float() is None def test_it_errors_when_converting_non_float_strings(key): for bad_string in ['not a number', 'one', 'a']: with pytest.raises(ValueError) as ex: ParamStoreResult(key, bad_string, None).to_float() assert 'could not convert string to float:' in str(ex.value) def test_it_converts_to_bool(key): for bool_value in ['true', 'True', ' tRUe ', 't', '1', True]: assert ParamStoreResult(key, bool_value, None).to_bool() is True for non_bool_value in ['false', 'False', 'fAlSe', 'f', '0', ' ', None]: assert ParamStoreResult(key, non_bool_value, None).to_bool() is False def test_it_uses_custom_true_values(key): assert ParamStoreResult(key, 'False', None).to_bool(true_values=['False']) is True def test_it_converts_to_list(key): value = '1, a , abcd,, ,' expected_value = ['1', 'a', 'abcd'] assert ParamStoreResult(key, value, None).to_list() == expected_value assert ParamStoreResult(key, '', None).to_list() == [] assert ParamStoreResult(key, None, None).to_list() == [] def test_it_uses_a_custom_delimiter_to_list(): assert ParamStoreResult(key, 'a|b|c', None).to_list(delimiter='|') == ['a', 'b', 'c'] def test_it_converts_to_json(key): assert ParamStoreResult(key, '{}', None).to_json() == {} assert ParamStoreResult(key, '{"one": 1, "two": { "three": "3" }}', None).to_json() == {"one": 1, "two": {"three": "3"}} assert ParamStoreResult(key, '[]', None).to_json() == [] assert ParamStoreResult(key, '[{"one": "one"}]', None).to_json() == [{"one": "one"}] def test_it_returns_none_for_none_or_empty_strings(key): assert ParamStoreResult(key, None, None).to_json() is None assert ParamStoreResult(key, '', None).to_json() is None assert ParamStoreResult(key, ' ', None).to_json() is None def test_it_errors_when_converting_invalid_json_strings(key): for invalid_json in ['{', '{{}', 'asdf']: with pytest.raises(json.JSONDecodeError): ParamStoreResult(key, invalid_json, None).to_json()
36.212963
114
0.670672
538
3,911
4.630112
0.165428
0.228824
0.260939
0.128061
0.501405
0.327579
0.271778
0.271778
0.227218
0.171819
0
0.018472
0.183329
3,911
107
115
36.551402
0.761428
0
0
0.097222
0
0
0.075684
0
0
0
0
0
0.430556
1
0.236111
false
0
0.055556
0.041667
0.333333
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
0
0
0
0
0
0
0
4
80427658a9f7f5b0f8b3b6efe309c00ce6c8b23d
96
py
Python
utils/__init__.py
fxrcha/AutoBlobSaver
dc93658ad26b577363eddcb6250227fc79466aad
[ "MIT" ]
1
2021-09-21T16:07:11.000Z
2021-09-21T16:07:11.000Z
utils/__init__.py
fxrcha/AutoBlobSaver
dc93658ad26b577363eddcb6250227fc79466aad
[ "MIT" ]
null
null
null
utils/__init__.py
fxrcha/AutoBlobSaver
dc93658ad26b577363eddcb6250227fc79466aad
[ "MIT" ]
null
null
null
from .config import load_config from .logger import Logger __all__ = ["Logger", "load_config"]
19.2
35
0.760417
13
96
5.153846
0.461538
0.298507
0
0
0
0
0
0
0
0
0
0
0.135417
96
4
36
24
0.807229
0
0
0
0
0
0.177083
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
8044ae238643c4a3dbc75b97aabd696a3b6bc799
78
py
Python
pages/__init__.py
GrAndSE/django-pages
003f7492e8b9899f7c0d62b614fe635f96b8862e
[ "BSD-3-Clause" ]
null
null
null
pages/__init__.py
GrAndSE/django-pages
003f7492e8b9899f7c0d62b614fe635f96b8862e
[ "BSD-3-Clause" ]
1
2017-12-14T05:04:28.000Z
2017-12-14T05:04:28.000Z
pages/__init__.py
GrAndSE/django-pages
003f7492e8b9899f7c0d62b614fe635f96b8862e
[ "BSD-3-Clause" ]
null
null
null
'''Django application provides a base implementation of extendimble pages '''
26
73
0.794872
9
78
6.888889
1
0
0
0
0
0
0
0
0
0
0
0
0.128205
78
2
74
39
0.911765
0.897436
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
804502fc909d1f48bd45e1e49481abbfacc10021
364
py
Python
templates/powershell/ps_system_time.py
ohoph/Ebowla
9f1a45a70abbd0a02bfe9614f298b98e98697b1b
[ "BSD-3-Clause" ]
738
2016-04-08T15:26:43.000Z
2022-03-28T20:49:08.000Z
templates/powershell/ps_system_time.py
iNoSec/Ebowla_modified
0079ec86c9c9c167166beda1922bd26114f6addb
[ "BSD-3-Clause" ]
28
2016-04-09T22:37:41.000Z
2019-01-28T10:44:46.000Z
templates/powershell/ps_system_time.py
iNoSec/Ebowla_modified
0079ec86c9c9c167166beda1922bd26114f6addb
[ "BSD-3-Clause" ]
179
2016-04-08T16:00:11.000Z
2022-02-17T11:39:02.000Z
buildcode=""" function Get-SystemTime(){ $time_mask = @() $the_time = Get-Date $time_mask += [string]$the_time.Year + "0000" $time_mask += [string]$the_time.Year + [string]$the_time.Month + "00" $time_mask += [string]$the_time.Year + [string]$the_time.Month + [string]$the_time.Day return $time_mask } """ callcode=""" $key_combos += ,(Get-SystemTime) """
24.266667
87
0.664835
50
364
4.58
0.36
0.213974
0.340611
0.222707
0.484716
0.484716
0.375546
0.375546
0.375546
0.375546
0
0.018809
0.123626
364
15
88
24.266667
0.69906
0
0
0.153846
0
0.076923
0.906849
0.364384
0
0
0
0
0
1
0
false
0
0
0
0.076923
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
3378de0cf52b9dc418d54b8290b773c02ead21ec
44
py
Python
cogs/coding/__init__.py
itsVale/Vale.py
6b3cac68d53e8d814ee969a959aae4de52beda80
[ "MIT" ]
14
2018-08-06T06:45:19.000Z
2018-12-28T14:20:33.000Z
cogs/coding/__init__.py
Mystic-Alchemy/Vale.py
b4cc964d34672444c65e2801a15f37d774c5e6e3
[ "MIT" ]
10
2018-10-06T10:52:08.000Z
2018-12-28T14:21:47.000Z
cogs/coding/__init__.py
Mystic-Alchemy/Vale.py
b4cc964d34672444c65e2801a15f37d774c5e6e3
[ "MIT" ]
13
2018-09-23T20:13:10.000Z
2019-01-26T11:02:37.000Z
""" Some coding-related shit for geeks. """
11
35
0.659091
6
44
4.833333
1
0
0
0
0
0
0
0
0
0
0
0
0.159091
44
3
36
14.666667
0.783784
0.795455
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
33d3be250ac27b195760e40b3ef046d9ded243c4
4,493
py
Python
src/lib/mine/src_gen/script/structure.py
rdw20170120/workstation
ed19aa930a83885c2a8cb58eb0bb5afe58f95df3
[ "MIT" ]
null
null
null
src/lib/mine/src_gen/script/structure.py
rdw20170120/workstation
ed19aa930a83885c2a8cb58eb0bb5afe58f95df3
[ "MIT" ]
2
2021-04-06T18:07:32.000Z
2021-06-02T01:50:40.000Z
src/lib/mine/src_gen/script/structure.py
rdw20170120/workstation
ed19aa930a83885c2a8cb58eb0bb5afe58f95df3
[ "MIT" ]
null
null
null
#!/usr/bin/env false """TODO: Write """ # Internal packages (absolute references, distributed with Python) from pathlib import Path # External packages (absolute references, NOT distributed with Python) # Library modules (absolute references, NOT packaged, in project) from utility import my_assert as is_ from src_gen.structure import * from src_gen.source import my_visitor_map # Project modules (relative references, NOT packaged, in project) ############################################################################### def indent(count=1): return count * " " ############################################################################### @my_visitor_map.register(Path) def _visit_path(element, walker): walker.walk(str(element)) ############################################################################### class _Arguments(object): def __init__(self, *arguments): super().__init__() self.arguments = squashed(arguments) def __repr__(self): return "_Arguments({})".format(self.arguments) @my_visitor_map.register(_Arguments) def _visit_arguments(element, walker): if is_nonstring_iterable(element.arguments): for a in element.arguments: if a is not None: walker.emit(" ") walker.walk(a) elif element.arguments is None: pass else: walker.emit(" ") walker.walk(element.arguments) ############################################################################### class _Command(object): def __init__(self, executable, *arguments): super().__init__() self.executable = squashed(executable) assert is_.not_none(self.executable) if isinstance(arguments, _Arguments): self.arguments = arguments else: self.arguments = _Arguments(arguments) def __repr__(self): return "_Command({}, {})".format(self.executable, self.arguments) @my_visitor_map.register(_Command) def _visit_command(element, walker): walker.walk(element.executable) walker.walk(element.arguments) def command(executable, *argument): return _Command(executable, argument) ############################################################################### class _Comment(object): def __init__(self, *elements, tight=False): super().__init__() self.elements = squashed(elements) if not self.elements: tight = True self.tight = tight def __repr__(self): return "_Comment({}, {})".format(self.tight, self.elements) @my_visitor_map.register(_Comment) def _visit_comment(element, walker): walker.emit("#") if not element.tight: walker.emit(" ") walker.walk(element.elements) walker.walk(eol()) def comment(*element): return _Comment(element) def disabled(*element): return comment("DISABLED: ", element) def fix(*element): return todo("FIX: ", element) def no(*element): return comment("NO: ", element) def note(*element): return comment("NOTE: ", element) def rule(): # TODO: Make line length configurable return line("#" * 79) def research(*element): return todo("RESEARCH: ", element) def someday(*element): return todo("SOMEDAY: ", element) def todo(*element): return comment("TODO: ", element) ############################################################################### class _Expression(object): def __init__(self, *elements): super().__init__() self.elements = squashed(elements) def __repr__(self): return "_Expression({})".format(self.elements) @my_visitor_map.register(_Expression) def _visit_expression(element, walker): if is_nonstring_iterable(element.elements): for e in element.elements: if e is not None: walker.walk(e) elif element.elements is None: pass else: walker.walk(element.elements) def x(*element): return _Expression(element) ############################################################################### def _shebang(command): assert is_.instance(command, _Command) return _Comment("!", command, tight=True) def shebang_cat(): return shebang_thru_env("cat") def shebang_false(): return shebang_thru_env("false") def shebang_thru_env(executable): assert is_.not_none(executable) return _shebang(_Command(Path("/usr/bin/env"), executable)) """DisabledContent """
23.279793
79
0.581349
453
4,493
5.516556
0.209713
0.036014
0.028812
0.040016
0.234494
0.114446
0.032813
0
0
0
0
0.000826
0.191854
4,493
192
80
23.401042
0.687414
0.074115
0
0.207547
0
0
0.039463
0
0
0
0
0.010417
0.037736
1
0.273585
false
0.018868
0.037736
0.169811
0.537736
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
1
1
0
0
4
d5096efc5a1271afc10059c31075156bad917bd6
23
py
Python
packages/snet_cli/snet/snet_cli/version.py
raamb/snet-cli
1eef893c4e98c5099144343d92e550a2bfdbe374
[ "MIT" ]
null
null
null
packages/snet_cli/snet/snet_cli/version.py
raamb/snet-cli
1eef893c4e98c5099144343d92e550a2bfdbe374
[ "MIT" ]
null
null
null
packages/snet_cli/snet/snet_cli/version.py
raamb/snet-cli
1eef893c4e98c5099144343d92e550a2bfdbe374
[ "MIT" ]
1
2021-01-15T11:08:35.000Z
2021-01-15T11:08:35.000Z
__version__ = "v2.0.0"
11.5
22
0.652174
4
23
2.75
0.75
0
0
0
0
0
0
0
0
0
0
0.15
0.130435
23
1
23
23
0.4
0
0
0
0
0
0.26087
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
1d13cfcd2d3a84fc82f90b03462cf2bdcd551f10
196
py
Python
discordobjects/util/__init__.py
igo95862/DiscordBot_lib
09624864a369751441a6eed2b3d0449f03c52151
[ "MIT" ]
null
null
null
discordobjects/util/__init__.py
igo95862/DiscordBot_lib
09624864a369751441a6eed2b3d0449f03c52151
[ "MIT" ]
null
null
null
discordobjects/util/__init__.py
igo95862/DiscordBot_lib
09624864a369751441a6eed2b3d0449f03c52151
[ "MIT" ]
null
null
null
from .enum_str import StrEnum from .deprecated_dispencers import SingularEvent, QueueDispenser from .event_dispenser import EventDispenser __all__ = ['StrEnum', 'SingularEvent', 'QueueDispenser']
39.2
64
0.831633
20
196
7.8
0.65
0.346154
0
0
0
0
0
0
0
0
0
0
0.091837
196
5
65
39.2
0.876404
0
0
0
0
0
0.172589
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
1d2f06895d147bdbfe15b01ab11b06a6723ee2d1
101
py
Python
spendtrackapp/apps.py
dthung1602/SpendTrackApp
1439835175ec9bc34507324ca12ffc2c8764267f
[ "MIT" ]
1
2020-09-01T09:31:21.000Z
2020-09-01T09:31:21.000Z
spendtrackapp/apps.py
dthung1602/SpendTrackApp
1439835175ec9bc34507324ca12ffc2c8764267f
[ "MIT" ]
9
2019-12-04T22:09:37.000Z
2022-02-10T10:20:07.000Z
spendtrackapp/apps.py
dthung1602/SpendTrackApp
1439835175ec9bc34507324ca12ffc2c8764267f
[ "MIT" ]
null
null
null
from django.apps import AppConfig class SpendtrackappConfig(AppConfig): name = 'spendtrackapp'
16.833333
37
0.782178
10
101
7.9
0.9
0
0
0
0
0
0
0
0
0
0
0
0.148515
101
5
38
20.2
0.918605
0
0
0
0
0
0.128713
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
1d3126b4beb207996977db360de3eac1eefbdbf0
145
py
Python
DS-400/Easy/217-Contains Duplicate/Counter.py
ericchen12377/Leetcode-Algorithm-Python
eb58cd4f01d9b8006b7d1a725fc48910aad7f192
[ "MIT" ]
2
2020-04-24T18:36:52.000Z
2020-04-25T00:15:57.000Z
DS-400/Easy/217-Contains Duplicate/Counter.py
ericchen12377/Leetcode-Algorithm-Python
eb58cd4f01d9b8006b7d1a725fc48910aad7f192
[ "MIT" ]
null
null
null
DS-400/Easy/217-Contains Duplicate/Counter.py
ericchen12377/Leetcode-Algorithm-Python
eb58cd4f01d9b8006b7d1a725fc48910aad7f192
[ "MIT" ]
null
null
null
class Solution: def containsDuplicate(self, nums: List[int]) -> bool: dic = Counter(nums).values() return sum(dic) > len(dic)
36.25
57
0.62069
18
145
5
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.234483
145
4
58
36.25
0.810811
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
1d4c4a23e19e9b6cc46359c8a10d6fb6e8aca8ae
73
py
Python
exercises/chapter_04/exercise_04_06/exercise_04_06.py
HenrikSamuelsson/python-crash-course
0550343d413e4636f402a66041860bc1a319fc8f
[ "MIT" ]
1
2017-04-30T18:05:26.000Z
2017-04-30T18:05:26.000Z
exercises/chapter_04/exercise_04_06/exercise_04_06.py
HenrikSamuelsson/python-crash-course
0550343d413e4636f402a66041860bc1a319fc8f
[ "MIT" ]
null
null
null
exercises/chapter_04/exercise_04_06/exercise_04_06.py
HenrikSamuelsson/python-crash-course
0550343d413e4636f402a66041860bc1a319fc8f
[ "MIT" ]
null
null
null
# 4-6. Odd Numbers odd_numbers = list(range(1,21,2)) print(odd_numbers)
14.6
33
0.712329
14
73
3.571429
0.714286
0.6
0
0
0
0
0
0
0
0
0
0.09375
0.123288
73
4
34
18.25
0.6875
0.219178
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
1d97dc9f48900dabf17fae6ab3c73ed661741b0c
235
py
Python
samples/__init__.py
Wanini0209/wPyProj
bd5da37dbc17791edea87b3862430e62e4660d72
[ "MIT" ]
null
null
null
samples/__init__.py
Wanini0209/wPyProj
bd5da37dbc17791edea87b3862430e62e4660d72
[ "MIT" ]
null
null
null
samples/__init__.py
Wanini0209/wPyProj
bd5da37dbc17791edea87b3862430e62e4660d72
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Wanini's Python Sample Project ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ wPyProj is a template of Python project. :copyright: (c) 2021 by Ting-Hsu Chang. :license: MIT, see LICENSE for more details. """
26.111111
45
0.557447
29
235
4.517241
0.896552
0
0
0
0
0
0
0
0
0
0
0.024631
0.13617
235
8
46
29.375
0.62069
0.957447
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
1da86e1b5277ec510c3af2d6bdb2983260d4393c
261
py
Python
Candidate-Ranking/experiment/qa/data/insuranceqa/v1.py
UKPLab/acl2017-non-factoid-qa
a32b93d999231ea1933649b48d921a7050cc742f
[ "Apache-2.0" ]
73
2017-04-27T09:08:29.000Z
2021-07-27T14:48:14.000Z
Candidate-Ranking/experiment/qa/data/insuranceqa/v1.py
UKPLab/acl2017-non-factoid-qa
a32b93d999231ea1933649b48d921a7050cc742f
[ "Apache-2.0" ]
9
2017-08-19T08:11:21.000Z
2019-07-23T15:12:39.000Z
Candidate-Ranking/experiment/qa/data/insuranceqa/v1.py
UKPLab/acl2017-non-factoid-qa
a32b93d999231ea1933649b48d921a7050cc742f
[ "Apache-2.0" ]
21
2017-06-22T07:49:09.000Z
2019-04-06T16:56:12.000Z
from experiment.qa.data import QAData from experiment.qa.data.insuranceqa.reader.v1_reader import V1Reader class V1Data(QAData): def _get_reader(self): return V1Reader(self.config['insuranceqa'], self.lowercased, self.logger) component = V1Data
23.727273
81
0.770115
34
261
5.823529
0.588235
0.141414
0.161616
0.20202
0
0
0
0
0
0
0
0.022124
0.1341
261
10
82
26.1
0.853982
0
0
0
0
0
0.042146
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0.166667
0.833333
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
4
d52df43e92d08a97323ba353197de4f82171c569
25
py
Python
tests/__init__.py
TueHaulund/SLOTH
bd6ce343b22719f58d746768eda36a7164ec0446
[ "MIT" ]
null
null
null
tests/__init__.py
TueHaulund/SLOTH
bd6ce343b22719f58d746768eda36a7164ec0446
[ "MIT" ]
null
null
null
tests/__init__.py
TueHaulund/SLOTH
bd6ce343b22719f58d746768eda36a7164ec0446
[ "MIT" ]
null
null
null
"""SLOTH test module."""
12.5
24
0.6
3
25
5
1
0
0
0
0
0
0
0
0
0
0
0
0.12
25
1
25
25
0.681818
0.72
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
d53de11a2d7d1e09c5f33a17ea6ec482cedcf3a6
227
py
Python
backend/inference/serializers.py
deagwon97/django-react-template
368845ccb22918699bb7b56e949e29f961b89045
[ "MIT" ]
1
2021-09-08T06:55:19.000Z
2021-09-08T06:55:19.000Z
backend/inference/serializers.py
deagwon97/django-react-template
368845ccb22918699bb7b56e949e29f961b89045
[ "MIT" ]
null
null
null
backend/inference/serializers.py
deagwon97/django-react-template
368845ccb22918699bb7b56e949e29f961b89045
[ "MIT" ]
null
null
null
from rest_framework import serializers from .models import Inference # 계산 시리얼 라이저. api 필드에서 보여줄 필드 명시 class InferenceSerializer(serializers.ModelSerializer): class Meta: model = Inference fields = '__all__'
28.375
55
0.740088
27
227
6.037037
0.814815
0
0
0
0
0
0
0
0
0
0
0
0.207048
227
7
56
32.428571
0.905556
0.132159
0
0
0
0
0.035897
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
d5421d10edf0193dcc648a22a4d189327a4a2a8d
94
py
Python
redirect_plus/tests/factories.py
arteria/django-redirect-plus
7a3020a100a5925de42559283e87d9eed6bbc720
[ "MIT" ]
5
2015-09-06T02:08:34.000Z
2016-12-09T02:14:46.000Z
redirect_plus/tests/factories.py
arteria/django-redirect-plus
7a3020a100a5925de42559283e87d9eed6bbc720
[ "MIT" ]
1
2016-07-11T14:38:34.000Z
2016-07-11T14:38:34.000Z
redirect_plus/tests/factories.py
arteria/django-redirect-plus
7a3020a100a5925de42559283e87d9eed6bbc720
[ "MIT" ]
null
null
null
"""Factories for the redirect_plus app.""" # import factory # from ..models import YourModel
18.8
42
0.734043
12
94
5.666667
0.916667
0
0
0
0
0
0
0
0
0
0
0
0.148936
94
4
43
23.5
0.85
0.882979
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
d56c44c068aaf08465454f5b9206005dd95bb176
15,194
py
Python
tf3d/losses/box_prediction_losses_test.py
deepneuralmachine/google-research
d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231
[ "Apache-2.0" ]
23,901
2018-10-04T19:48:53.000Z
2022-03-31T21:27:42.000Z
tf3d/losses/box_prediction_losses_test.py
deepneuralmachine/google-research
d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231
[ "Apache-2.0" ]
891
2018-11-10T06:16:13.000Z
2022-03-31T10:42:34.000Z
tf3d/losses/box_prediction_losses_test.py
deepneuralmachine/google-research
d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231
[ "Apache-2.0" ]
6,047
2018-10-12T06:31:02.000Z
2022-03-31T13:59:28.000Z
# coding=utf-8 # Copyright 2021 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for ...tf3d.losses.box_prediction_losses.""" import tensorflow as tf from tf3d import standard_fields from tf3d.losses import box_prediction_losses class BoxPredictionLossesTest(tf.test.TestCase): def _get_random_inputs(self): return { standard_fields.InputDataFields.object_rotation_matrix_voxels: tf.random.uniform([1, 100, 3, 3], minval=-1.0, maxval=1.0, dtype=tf.float32), standard_fields.InputDataFields.object_length_voxels: tf.random.uniform([1, 100, 1], minval=0.1, maxval=2.0, dtype=tf.float32), standard_fields.InputDataFields.object_height_voxels: tf.random.uniform([1, 100, 1], minval=0.1, maxval=2.0, dtype=tf.float32), standard_fields.InputDataFields.object_width_voxels: tf.random.uniform([1, 100, 1], minval=0.1, maxval=2.0, dtype=tf.float32), standard_fields.InputDataFields.object_center_voxels: tf.random.uniform([1, 100, 3], minval=-5.0, maxval=5.0, dtype=tf.float32), standard_fields.InputDataFields.object_class_voxels: tf.random.uniform([1, 100, 1], minval=0, maxval=7, dtype=tf.int32), standard_fields.InputDataFields.object_instance_id_voxels: tf.random.uniform([1, 100, 1], minval=0, maxval=20, dtype=tf.int32), } def _get_empty_inputs(self): inputs = self._get_random_inputs() for key in inputs: if key in inputs: tensor_shape = inputs[key].shape.as_list() tensor_shape[1] = 0 inputs[key] = tf.zeros(tensor_shape, dtype=inputs[key].dtype) inputs[standard_fields.InputDataFields.num_valid_voxels] = tf.constant( [0], dtype=tf.int32) return inputs def _get_dictionaries_for_distance_loss_relative(self): gt_box_center = tf.reshape( tf.constant([10.0, -20.0, 30.0], dtype=tf.float32), [1, 1, 3]) gt_box_length = tf.reshape( tf.constant([1.0], dtype=tf.float32), [1, 1, 1]) gt_box_height = tf.reshape( tf.constant([2.0], dtype=tf.float32), [1, 1, 1]) gt_box_width = tf.reshape( tf.constant([3.0], dtype=tf.float32), [1, 1, 1]) gt_box_r = tf.reshape(tf.eye(3, dtype=tf.float32), [1, 1, 3, 3]) gt_box_class = tf.reshape(tf.constant([1], dtype=tf.int32), [1, 1, 1]) gt_instance_ids = tf.reshape(tf.constant([1], dtype=tf.int32), [1, 1, 1]) pred_box_center1 = tf.reshape( tf.constant([10.1, -20.1, 30.1], dtype=tf.float32), [1, 1, 3]) pred_box_length1 = tf.reshape( tf.constant([1.1], dtype=tf.float32), [1, 1, 1]) pred_box_height1 = tf.reshape( tf.constant([2.1], dtype=tf.float32), [1, 1, 1]) pred_box_width1 = tf.reshape( tf.constant([3.1], dtype=tf.float32), [1, 1, 1]) pred_box_r1 = tf.reshape(tf.eye(3, dtype=tf.float32), [1, 1, 3, 3]) pred_box_center2 = tf.reshape( tf.constant([10.1, -20.2, 30.2], dtype=tf.float32), [1, 1, 3]) pred_box_length2 = tf.reshape( tf.constant([1.11], dtype=tf.float32), [1, 1, 1]) pred_box_height2 = tf.reshape( tf.constant([2.11], dtype=tf.float32), [1, 1, 1]) pred_box_width2 = tf.reshape( tf.constant([3.11], dtype=tf.float32), [1, 1, 1]) pred_box_r2 = tf.reshape(tf.eye(3, dtype=tf.float32), [1, 1, 3, 3]) inputs = { standard_fields.InputDataFields.object_rotation_matrix_voxels: gt_box_r, standard_fields.InputDataFields.object_length_voxels: gt_box_length, standard_fields.InputDataFields.object_height_voxels: gt_box_height, standard_fields.InputDataFields.object_width_voxels: gt_box_width, standard_fields.InputDataFields.object_center_voxels: gt_box_center, standard_fields.InputDataFields.object_class_voxels: gt_box_class, standard_fields.InputDataFields.object_instance_id_voxels: gt_instance_ids, } outputs1 = { standard_fields.DetectionResultFields.object_rotation_matrix_voxels: pred_box_r1, standard_fields.DetectionResultFields.object_length_voxels: pred_box_length1, standard_fields.DetectionResultFields.object_height_voxels: pred_box_height1, standard_fields.DetectionResultFields.object_width_voxels: pred_box_width1, standard_fields.DetectionResultFields.object_center_voxels: pred_box_center1, } outputs2 = { standard_fields.DetectionResultFields.object_rotation_matrix_voxels: pred_box_r2, standard_fields.DetectionResultFields.object_length_voxels: pred_box_length2, standard_fields.DetectionResultFields.object_height_voxels: pred_box_height2, standard_fields.DetectionResultFields.object_width_voxels: pred_box_width2, standard_fields.DetectionResultFields.object_center_voxels: pred_box_center2, } return inputs, outputs1, outputs2 def test_box_size_regression_loss_on_voxel_tensors_empty_inputs(self): inputs = self._get_empty_inputs() outputs = { standard_fields.DetectionResultFields.object_length_voxels: tf.zeros([1, 0, 3], dtype=tf.float32), standard_fields.DetectionResultFields.object_height_voxels: tf.zeros([1, 0, 3], dtype=tf.float32), standard_fields.DetectionResultFields.object_width_voxels: tf.zeros([1, 0, 3], dtype=tf.float32), } loss = box_prediction_losses.box_size_regression_loss_on_voxel_tensors( inputs=inputs, outputs=outputs, loss_type='huber') self.assertAllClose(loss.numpy(), 0.0) def test_box_size_regression_loss_on_voxel_tensors_correct_prediction(self): inputs = self._get_random_inputs() inputs[standard_fields.InputDataFields.num_valid_voxels] = tf.constant( [100], dtype=tf.int32) outputs = { standard_fields.DetectionResultFields.object_length_voxels: inputs[standard_fields.InputDataFields.object_length_voxels], standard_fields.DetectionResultFields.object_height_voxels: inputs[standard_fields.InputDataFields.object_height_voxels], standard_fields.DetectionResultFields.object_width_voxels: inputs[standard_fields.InputDataFields.object_width_voxels], } loss = box_prediction_losses.box_size_regression_loss_on_voxel_tensors( inputs=inputs, outputs=outputs, loss_type='huber') self.assertAllClose(loss.numpy(), 0.0) def test_box_size_regression_loss_on_voxel_tensors_relative(self): (inputs, outputs1, outputs2) = self._get_dictionaries_for_distance_loss_relative() inputs[standard_fields.InputDataFields.num_valid_voxels] = tf.constant( [1], dtype=tf.int32) loss1 = box_prediction_losses.box_size_regression_loss_on_voxel_tensors( inputs=inputs, outputs=outputs1, loss_type='huber') loss2 = box_prediction_losses.box_size_regression_loss_on_voxel_tensors( inputs=inputs, outputs=outputs2, loss_type='huber') self.assertGreater(loss2.numpy(), loss1.numpy()) def test_box_center_distance_loss_on_voxel_tensors_empty_inputs(self): inputs = self._get_empty_inputs() outputs = { standard_fields.DetectionResultFields.object_center_voxels: tf.zeros([1, 0, 3], dtype=tf.float32), } loss = box_prediction_losses.box_center_distance_loss_on_voxel_tensors( inputs=inputs, outputs=outputs, loss_type='huber') self.assertAllClose(loss.numpy(), 0.0) def test_box_center_distance_loss_on_voxel_tensors_correct_prediction(self): inputs = self._get_random_inputs() inputs[standard_fields.InputDataFields.num_valid_voxels] = tf.constant( [100], dtype=tf.int32) outputs = { standard_fields.DetectionResultFields.object_center_voxels: inputs[standard_fields.InputDataFields.object_center_voxels], } loss = box_prediction_losses.box_center_distance_loss_on_voxel_tensors( inputs=inputs, outputs=outputs, loss_type='huber') self.assertAllClose(loss.numpy(), 0.0) def test_box_center_distance_loss_on_voxel_tensors_relative(self): (inputs, outputs1, outputs2) = self._get_dictionaries_for_distance_loss_relative() inputs[standard_fields.InputDataFields.num_valid_voxels] = tf.constant( [1], dtype=tf.int32) loss1 = box_prediction_losses.box_center_distance_loss_on_voxel_tensors( inputs=inputs, outputs=outputs1, loss_type='huber') loss2 = box_prediction_losses.box_center_distance_loss_on_voxel_tensors( inputs=inputs, outputs=outputs2, loss_type='huber') self.assertGreater(loss2.numpy(), loss1.numpy()) def test_box_corner_distance_loss_on_voxel_tensors_empty_inputs(self): inputs = self._get_empty_inputs() inputs[standard_fields.InputDataFields.num_valid_voxels] = tf.constant( [0], dtype=tf.int32) outputs = { standard_fields.DetectionResultFields.object_rotation_matrix_voxels: tf.zeros([1, 0, 3, 3], dtype=tf.float32), standard_fields.DetectionResultFields.object_length_voxels: tf.zeros([1, 0, 1], dtype=tf.float32), standard_fields.DetectionResultFields.object_height_voxels: tf.zeros([1, 0, 1], dtype=tf.float32), standard_fields.DetectionResultFields.object_width_voxels: tf.zeros([1, 0, 1], dtype=tf.float32), standard_fields.DetectionResultFields.object_center_voxels: tf.zeros([1, 0, 3], dtype=tf.float32), } loss = box_prediction_losses.box_corner_distance_loss_on_voxel_tensors( inputs=inputs, outputs=outputs, loss_type='normalized_huber') self.assertAllClose(loss.numpy(), 0.0) def test_box_corner_distance_loss_on_voxel_tensors_correct_prediction(self): inputs = self._get_random_inputs() inputs[standard_fields.InputDataFields.num_valid_voxels] = tf.constant( [100], dtype=tf.int32) outputs = { standard_fields.DetectionResultFields.object_rotation_matrix_voxels: inputs[standard_fields.InputDataFields.object_rotation_matrix_voxels ], standard_fields.DetectionResultFields.object_length_voxels: inputs[standard_fields.InputDataFields.object_length_voxels], standard_fields.DetectionResultFields.object_height_voxels: inputs[standard_fields.InputDataFields.object_height_voxels], standard_fields.DetectionResultFields.object_width_voxels: inputs[standard_fields.InputDataFields.object_width_voxels], standard_fields.DetectionResultFields.object_center_voxels: inputs[standard_fields.InputDataFields.object_center_voxels], } loss = box_prediction_losses.box_corner_distance_loss_on_voxel_tensors( inputs=inputs, outputs=outputs, loss_type='normalized_huber') self.assertAllClose(loss.numpy(), 0.0) def test_box_corner_distance_loss_on_voxel_tensors_relative(self): (inputs, outputs1, outputs2) = self._get_dictionaries_for_distance_loss_relative() inputs[standard_fields.InputDataFields.num_valid_voxels] = tf.constant( [1], dtype=tf.int32) loss1 = box_prediction_losses.box_corner_distance_loss_on_voxel_tensors( inputs=inputs, outputs=outputs1, loss_type='normalized_huber') loss2 = box_prediction_losses.box_corner_distance_loss_on_voxel_tensors( inputs=inputs, outputs=outputs2, loss_type='normalized_huber') self.assertGreater(loss2.numpy(), loss1.numpy()) def test_box_corner_distance_loss_on_object_tensors_correct_prediction(self): voxel_inputs = self._get_random_inputs() inputs = {} for key, value in standard_fields.get_input_voxel_to_object_field_mapping( ).items(): if key in voxel_inputs: inputs[value] = [voxel_inputs[key][0, Ellipsis]] outputs = { standard_fields.DetectionResultFields.objects_rotation_matrix: inputs[standard_fields.InputDataFields.objects_rotation_matrix], standard_fields.DetectionResultFields.objects_length: inputs[standard_fields.InputDataFields.objects_length], standard_fields.DetectionResultFields.objects_height: inputs[standard_fields.InputDataFields.objects_height], standard_fields.DetectionResultFields.objects_width: inputs[standard_fields.InputDataFields.objects_width], standard_fields.DetectionResultFields.objects_center: inputs[standard_fields.InputDataFields.objects_center], } loss = box_prediction_losses.box_corner_distance_loss_on_object_tensors( inputs=inputs, outputs=outputs, loss_type='normalized_huber') self.assertAllClose(loss.numpy(), 0.0) def test_box_corner_distance_loss_on_object_tensors_relative(self): (voxel_inputs, voxel_outputs1, voxel_outputs2) = self._get_dictionaries_for_distance_loss_relative() inputs = {} outputs1 = {} outputs2 = {} for key, value in standard_fields.get_input_voxel_to_object_field_mapping( ).items(): if key in voxel_inputs: inputs[value] = [voxel_inputs[key][0, Ellipsis]] for key, value in standard_fields.get_output_voxel_to_object_field_mapping( ).items(): if key in voxel_outputs1: outputs1[value] = [voxel_outputs1[key][0, Ellipsis]] for key, value in standard_fields.get_output_voxel_to_object_field_mapping( ).items(): if key in voxel_outputs2: outputs2[value] = [voxel_outputs2[key][0, Ellipsis]] loss1 = box_prediction_losses.box_corner_distance_loss_on_object_tensors( inputs=inputs, outputs=outputs1, loss_type='normalized_huber') loss2 = box_prediction_losses.box_corner_distance_loss_on_object_tensors( inputs=inputs, outputs=outputs2, loss_type='normalized_huber') self.assertGreater(loss2.numpy(), loss1.numpy()) if __name__ == '__main__': tf.test.main()
43.786744
80
0.689549
1,832
15,194
5.375
0.093886
0.10521
0.106022
0.116584
0.833553
0.791002
0.750482
0.725602
0.648116
0.599472
0
0.032531
0.215019
15,194
346
81
43.913295
0.793075
0.041266
0
0.59164
0
0
0.010998
0
0
0
0
0
0.03537
1
0.045016
false
0
0.009646
0.003215
0.067524
0
0
0
0
null
0
0
0
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
d57206ad9a91d92017235f9c2f6d4bd9c3b56194
7,007
py
Python
med_bot/LINE_bot/intent/Loki_body_part.py
marcnhwu/LokiHub
52d0bccf313c44f25dd79590cdef7b604cd37f1a
[ "MIT" ]
17
2020-11-25T07:40:18.000Z
2022-03-07T03:29:18.000Z
med_bot/LINE_bot/intent/Loki_body_part.py
marcnhwu/LokiHub
52d0bccf313c44f25dd79590cdef7b604cd37f1a
[ "MIT" ]
8
2020-12-18T13:23:59.000Z
2021-10-03T21:41:50.000Z
med_bot/LINE_bot/intent/Loki_body_part.py
marcnhwu/LokiHub
52d0bccf313c44f25dd79590cdef7b604cd37f1a
[ "MIT" ]
43
2020-12-02T09:03:57.000Z
2021-12-23T03:30:25.000Z
#!/usr/bin/env python3 # -*- coding:utf-8 -*- """ Loki module for body_part Input: inputSTR str, utterance str, args str[], resultDICT dict Output: resultDICT dict """ DEBUG_body_part = True userDefinedDICT = {} ChildLIST =["小孩","兒子","女兒"] # 將符合句型的參數列表印出。這是 debug 或是開發用的。 def debugInfo(inputSTR, utterance): if DEBUG_body_part: print("[body_part] {} ===> {}".format(inputSTR, utterance)) def getResult(inputSTR, utterance, args, resultDICT): debugInfo(inputSTR, utterance) if utterance == "上[顎]腫一塊": # write your code here resultDICT["bodypart"]=args[0] pass if utterance == "[先前]有[耳朵][內部]疼痛": # write your code here resultDICT["bodypart"]=args[1] pass if utterance == "[口腔][上][顎]有[血][絲]": # write your code here resultDICT["bodypart"]=args[2] pass if utterance == "[喉嚨]卡卡": # write your code here resultDICT["bodypart"]=args[0] pass if utterance == "[喉部][兩側]還是會不[舒服]": # write your code here resultDICT["bodypart"]=args[0] pass if utterance == "[嘴][上][長黑斑]": # write your code here if "風池穴" in inputSTR: resultDICT["bodypart"]="家醫" else: resultDICT["bodypart"]=args[0] pass if utterance == "[我][下][腹]痛[很久]": # write your code here resultDICT["bodypart"]=args[2] pass if utterance == "[我][喉嚨]痛到[耳朵]": # write your code here resultDICT["bodypart"]=args[1] pass if utterance == "[我][外][陰部]長肉": # write your code here resultDICT["bodypart"]=args[2] pass if utterance == "[我]左側[風池穴]附近[經絡處]摸到[硬塊]": # write your code here resultDICT["bodypart"]=args[1] pass if utterance == "[我][心臟]亂跳": # write your code here resultDICT["bodypart"]=args[1] pass if utterance == "[我]昨天晚上[突然]發現[臉頰]下[方]靠近[脖子]的那個[地方]摸到一顆[腫塊]": # write your code here resultDICT["bodypart"]=args[2] pass if utterance == "[我]最近這一年[多][容易][小腿痠]": # write your code here resultDICT["bodypart"]=args[3] pass if utterance == "[我][有時][頭]會[突然]暈一下": # write your code here resultDICT["bodypart"]=args[2] pass if utterance == "[我][瓣膜]鬆掉": # write your code here resultDICT["bodypart"]=args[1] pass if utterance == "[我][胸口]和[喉嚨][依舊]疼痛": # write your code here resultDICT["bodypart"]=args[1] pass if utterance == "[我][舌頭][側邊]已經破洞兩個[多]禮拜了": # write your code here resultDICT["bodypart"]=args[2] pass if utterance == "[我]覺得[頭]稍微暈暈的": # write your code here resultDICT["bodypart"]=args[1] pass if utterance == "[我][雙腿][無力]": # write your code here resultDICT["bodypart"]=args[1] if "壓力" in inputSTR and ("大" in inputSTR): resultDICT["symptom"] = "身心" if "手" in inputSTR and ("打斷" in inputSTR): resultDICT["bodypart"] = "骨" pass if utterance == "[我]感覺[小拇指][快]脫落": # write your code here resultDICT["bodypart"]=args[1] pass if utterance == "[我]扁[條線]化膿": # write your code here resultDICT["bodypart"]=args[1] pass if utterance == "[我]擦[屁股]有[血]": # write your code here resultDICT["bodypart"]=args[1] pass if utterance == "[我]有[點][胸][悶]": # write your code here resultDICT["bodypart"]=args[2] pass if utterance == "[我]稍微[頸椎][僵直]": # write your code here resultDICT["bodypart"]=args[1] pass if utterance == "[我]開始[心跳]跳[很快]": # write your code here resultDICT["bodypart"]=args[1] pass if utterance == "[早上]擤[鼻涕][也]是[綠色]": # write your code here resultDICT["bodypart"]=args[1] pass if utterance == "[眉間]下[方]還沒到[眼睛]的[地方]會痛": # write your code here resultDICT["bodypart"]=args[2] pass if utterance == "[眼睛][經常]不[舒服]": # write your code here resultDICT["bodypart"]=args[0] pass if utterance == "[耳朵]有[聲音]": # write your code here resultDICT["bodypart"]=args[0] pass if utterance == "[脖子]一顆[硬硬][小小]的": # write your code here resultDICT["bodypart"]=args[0] pass if utterance == "[頭]很痛": # write your code here resultDICT["bodypart"]=args[0] pass if utterance == "[頸部]有腫塊": # write your code here resultDICT["bodypart"]=args[0] pass if utterance == "[頸部]長了[腫塊]": # write your code here resultDICT["bodypart"]=args[0] pass if utterance == "[鼻子][裡面]感覺[很緊繃]": # write your code here resultDICT["bodypart"]=args[0] pass if utterance == "[鼻子]開始會[長期]鼻塞": # write your code here resultDICT["bodypart"]=args[0] pass if utterance == "[鼻樑][兩側]會痛": # write your code here resultDICT["bodypart"]=args[0] pass if utterance == "[鼻腔內]有東西": # write your code here resultDICT["bodypart"]=args[0] pass if utterance == "吞嚥[不適]": # write your code here if "吞嚥" in inputSTR: resultDICT["symptom"]="耳鼻喉" pass if utterance == "從[兩三年][前]就會頭暈還伴隨耳鳴,[胃]痛和失眠": # write your code here resultDICT["bodypart"]=args[2] pass if utterance == "轉動[脖子]會不[舒服]": # write your code here resultDICT["bodypart"]=args[0] pass if utterance == "我[兒子][肚子]不[舒服]": # args [兒子, 肚子, 舒服] resultDICT["child"]=args[0] resultDICT["bodypart"]=args[1] pass if utterance == "[肚子]不[舒服]": # write your code here resultDICT["bodypart"]=args[0] pass if utterance == "[脖子][一顆]硬硬的": # args [脖子, 一顆] resultDICT["bodypart"]=args[0] pass if utterance == "我[有時][頭]會稍微暈一下": # args [有時, 頭] resultDICT["bodypart"]=args[1] if utterance == "我[最近][一年]多容易[小腿]酸": # args [最近, 一年, 小腿] resultDICT["bodypart"]=args[2] if utterance == "我[心臟]痛痛的": # args [心臟] resultDICT["bodypart"]=args[0] if utterance == "[背]很酸": # args [背] resultDICT["bodypart"]=args[0] if utterance == "[眼][前]有小黑影": # args [眼, 前] resultDICT["bodypart"]=args[0] if utterance == "我[上][臂]痠痛": # args [上, 臂] resultDICT["bodypart"]=args[1] if utterance == "[我][牙齒]痛": resultDICT["bodypart"]=args[1] # args [我, 牙齒] if utterance == "我[骨頭]裂了": # args [骨頭] resultDICT["bodypart"]=args[0] return resultDICT
24.759717
67
0.513772
858
7,007
4.188811
0.238928
0.260434
0.306066
0.193934
0.70089
0.689761
0.655259
0.614357
0.579021
0.579021
0
0.011303
0.330812
7,007
283
68
24.759717
0.755172
0.175539
0
0.564417
0
0.006135
0.203629
0.023956
0
0
0
0.003534
0
1
0.01227
false
0.263804
0
0
0.018405
0.006135
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
4
d573f712c64baf883dcb999452a3a1ea6dd2a12d
536
py
Python
src/cr/vision/matrix.py
indigits/indigits-vision
317fbf70c558e8f9563c3d0ba3bebbc5f84af622
[ "Apache-2.0" ]
2
2021-11-02T10:09:47.000Z
2021-12-10T04:23:14.000Z
src/cr/vision/matrix.py
indigits/indigits-vision
317fbf70c558e8f9563c3d0ba3bebbc5f84af622
[ "Apache-2.0" ]
null
null
null
src/cr/vision/matrix.py
indigits/indigits-vision
317fbf70c558e8f9563c3d0ba3bebbc5f84af622
[ "Apache-2.0" ]
null
null
null
''' Helper functions to work with matrices ''' import numpy as np def random_unique_permutations(num_permutations, permutation_size): '''Returns a set of randomly chosen unique permutations Example: array([[3, 1, 2, 0], [1, 2, 0, 3], [2, 0, 3, 1]]) References: * https://stackoverflow.com/questions/45437988/numpy-random-choice-to-produce-a-2d-array-with-all-unique-values/45438143#45438143 ''' return np.random.rand(num_permutations, permutation_size).argsort(axis=-1)
26.8
137
0.666045
71
536
4.943662
0.633803
0.017094
0.148148
0.17094
0
0
0
0
0
0
0
0.088993
0.203358
536
19
138
28.210526
0.733021
0.597015
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
4
d587e5dcac687f50859cf21007ba6182885eed61
113
py
Python
pyzzy/compat.py
krakozaure/pyzzy
17a316c0ced8095b671186c73fb5bf1daa2c140b
[ "MIT" ]
null
null
null
pyzzy/compat.py
krakozaure/pyzzy
17a316c0ced8095b671186c73fb5bf1daa2c140b
[ "MIT" ]
null
null
null
pyzzy/compat.py
krakozaure/pyzzy
17a316c0ced8095b671186c73fb5bf1daa2c140b
[ "MIT" ]
null
null
null
import os import sys PY36_PLUS = sys.version_info >= (3, 6) fspath = str if not PY36_PLUS else os.fspath
16.142857
45
0.690265
20
113
3.75
0.7
0.213333
0
0
0
0
0
0
0
0
0
0.068966
0.230089
113
6
46
18.833333
0.793103
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
d5956e12a3474f258426ed287de386259311fce1
164
py
Python
block_1/case_input_year_2.py
erdyneevzt/stepik_python
014fb618426dbee7f76b317c539d7d0363c87d15
[ "MIT" ]
null
null
null
block_1/case_input_year_2.py
erdyneevzt/stepik_python
014fb618426dbee7f76b317c539d7d0363c87d15
[ "MIT" ]
null
null
null
block_1/case_input_year_2.py
erdyneevzt/stepik_python
014fb618426dbee7f76b317c539d7d0363c87d15
[ "MIT" ]
null
null
null
year = int(input()) if (year % 400 == 0): print("Високосный") elif (year % 4 == 0) and (year % 100 != 0): print("Високосный") else: print("Обычный")
20.5
43
0.542683
22
164
4.045455
0.636364
0.134831
0.359551
0
0
0
0
0
0
0
0
0.080645
0.243902
164
8
44
20.5
0.637097
0
0
0.285714
0
0
0.163636
0
0
0
0
0
0
1
0
false
0
0
0
0
0.428571
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
d5a1352d82e2cdf82e38fcd32b7789710095a437
1,295
py
Python
src/open_geodata/converts/others.py
open-geodata/open-geodata
609a040e06dab2e7c7c627b20640011a8b381c4d
[ "MIT" ]
null
null
null
src/open_geodata/converts/others.py
open-geodata/open-geodata
609a040e06dab2e7c7c627b20640011a8b381c4d
[ "MIT" ]
null
null
null
src/open_geodata/converts/others.py
open-geodata/open-geodata
609a040e06dab2e7c7c627b20640011a8b381c4d
[ "MIT" ]
null
null
null
import re # https://stackoverflow.com/questions/33997361/how-to-convert-degree-minute-second-to-degree-decimal # # Degrees, minutes, seconds to Decimal degrees # def dms2dd(degrees, minutes, seconds, direction): # dd = (float(degrees.replace(',', '.')) + # float(minutes.replace(',', '.'))/60 + # float(seconds.replace(',', '.'))/(60*60) # ) # if direction == 'E' or direction == 'N': # dd *= 1 # elif direction == 'W' or direction == 'O' or direction == 'S': # dd *= -1 # return dd # # Decimal degrees to Degrees, minutes, seconds # def dd2dms(deg): # d = int(deg) # md = abs(deg - d) * 60 # m = int(md) # sd = (md - m) * 60 # sd = round(sd, 10) # return [d, m, sd] # # Parse Values # def parse_dms(dms): # coord = re.split('[^\d\w\.,]+', dms) # lat = dms2dd(coord[0], coord[1], coord[2], coord[3]) # return (lat) # # Single Value # # DMS to DD # coordinates_dms = '53°19\'03,208\"S' # coordinates_dd = parse_dms(coordinates_dms) # print('A coordenada DMS "{}" foi convertida para coordenada DD "{}"'.format(coordinates_dms, coordinates_dd)) # # DD to DMS # d, m, s = dd2dms(coordinates_dd) # print('A coordenada DD "{}" foi convertida em "{}", "{}" e "{}"'.format(coordinates_dd, d, m, s))
31.585366
111
0.57529
174
1,295
4.235632
0.396552
0.070556
0.085482
0
0
0
0
0
0
0
0
0.038845
0.22471
1,295
41
112
31.585366
0.694223
0.931274
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
d5af28c535a27374209bfdc518531cfe9fc597e3
6,532
py
Python
pyroomacoustics/realtime/tests/test_stft.py
pswietojanski/pyroomacoustics
c1e7d2b5450be84d78f3a539f22c770f62cebad4
[ "MIT" ]
null
null
null
pyroomacoustics/realtime/tests/test_stft.py
pswietojanski/pyroomacoustics
c1e7d2b5450be84d78f3a539f22c770f62cebad4
[ "MIT" ]
null
null
null
pyroomacoustics/realtime/tests/test_stft.py
pswietojanski/pyroomacoustics
c1e7d2b5450be84d78f3a539f22c770f62cebad4
[ "MIT" ]
1
2019-09-11T06:11:11.000Z
2019-09-11T06:11:11.000Z
from __future__ import division, print_function from unittest import TestCase import numpy as np from scipy.signal import fftconvolve import pyroomacoustics as pra ''' We create a signal, a simple filter and compute their convolution. Then we test STFT block procesing with and without overlap, and with and without filtering. ''' # test parameters tol = 1e-6 np.random.seed(0) D = 4 transform = 'numpy' # 'numpy', 'pyfftw', or 'mkl' # filter to apply h_len = 99 h = np.ones((h_len, D)) h /= np.linalg.norm(h, axis=0) # test signal (noise) x = np.random.randn(100000, D) # convolved signal y = np.zeros((x.shape[0] + h_len - 1, x.shape[1])) for i in range(x.shape[1]): y[:,i] = fftconvolve(x[:,i], h[:,i]) def no_overlap_no_filter(D): if D == 1: x_local = x[:,0] else: x_local = x[:,:D] # parameters block_size = 512 # make sure the FFT size is a power of 2 hop = block_size # no overlap # Create the STFT object stft = pra.realtime.STFT(block_size, hop=hop, channels=D, transform=transform) # collect the processed blocks processed_x = np.zeros(x_local.shape) # process the signals while full blocks are available n = 0 while x_local.shape[0] - n > hop: # go to frequency domain stft.analysis(x_local[n:n+hop,]) # copy processed block in the output buffer processed_x[n:n+hop,] = stft.synthesis() n += hop error = np.max(np.abs(x_local[:n,] - processed_x[:n,])) return error def no_overlap_with_filter(D): if D == 1: x_local = x[:,0] y_local = y[:,0] h_local = h[:,0] else: x_local = x[:,:D] y_local = y[:,:D] h_local = h[:,:D] # parameters block_size = 512 - h_len + 1 # make sure the FFT size is a power of 2 hop = block_size # no overlap # Create the STFT object stft = pra.realtime.STFT(block_size, hop=hop, channels=D, transform=transform) # setup the filter stft.set_filter(h_local, zb=h_len - 1) # collect the processed blocks processed_x = np.zeros(x_local.shape) # process the signals while full blocks are available n = 0 while x_local.shape[0] - n > hop: # go to frequency domain stft.analysis(x_local[n:n+hop,]) stft.process() # apply the filter # copy processed block in the output buffer processed_x[n:n+hop,] = stft.synthesis() n += hop error = np.max(np.abs(y_local[:n,] - processed_x[:n,])) return error def with_half_overlap_no_filter(D): if D == 1: x_local = x[:,0] else: x_local = x[:,:D] # parameters block_size = 512 # make sure the FFT size is a power of 2 hop = block_size // 2 # half overlap window = pra.hann(block_size) # the analysis window # Create the STFT object stft = pra.realtime.STFT(block_size, hop=hop, analysis_window=window, channels=D, transform=transform) # collect the processed blocks processed_x = np.zeros(x_local.shape) # process the signals while full blocks are available n = 0 while x_local.shape[0] - n > hop: # go to frequency domain stft.analysis(x_local[n:n+hop,]) # copy processed block in the output buffer processed_x[n:n+hop,] = stft.synthesis() n += hop error = np.max(np.abs(x_local[:n-hop,] - processed_x[hop:n,])) return error def with_half_overlap_with_filter(D): if D == 1: x_local = x[:,0] y_local = y[:,0] h_local = h[:,0] else: x_local = x[:,:D] y_local = y[:,:D] h_local = h[:,:D] # parameters block_size = 512 - h_len + 1 # make sure the FFT size is a power of 2 hop = block_size // 2 # half overlap window = pra.hann(block_size) # the analysis window # Create the STFT object stft = pra.realtime.STFT(block_size, hop=hop, analysis_window=window, channels=D, transform=transform) # setup the filter stft.set_filter(h_local, zb=h_len - 1) # collect the processed blocks processed_x = np.zeros(x_local.shape) # process the signals while full blocks are available n = 0 while x.shape[0] - n > hop: # go to frequency domain stft.analysis(x_local[n:n+hop,]) stft.process() # filtering # copy processed block in the output buffer processed_x[n:n+hop,] = stft.synthesis() n += hop error = np.max(np.abs(y_local[:n-hop,] - processed_x[hop:n,])) return error class TestSTFT(TestCase): def test_no_overlap_no_filter_mono(self): error = no_overlap_no_filter(1) self.assertTrue(error < tol) def test_no_overlap_no_filter_multichannel(self): error = no_overlap_no_filter(D) self.assertTrue(error < tol) def test_no_overlap_with_filter_mono(self): error = no_overlap_with_filter(1) self.assertTrue(error < tol) def test_no_overlap_with_filter_multichannel(self): error = no_overlap_with_filter(D) self.assertTrue(error < tol) def test_with_half_overlap_no_filter_mono(self): error = with_half_overlap_no_filter(1) self.assertTrue(error < tol) def test_with_half_overlap_no_filter_multichannel(self): error = with_half_overlap_no_filter(D) self.assertTrue(error < tol) def test_with_half_overlap_with_filter_mono(self): error = with_half_overlap_with_filter(1) self.assertTrue(error < tol) def test_with_half_overlap_with_filter_multichannel(self): error = with_half_overlap_with_filter(D) self.assertTrue(error < tol) if __name__ == "__main__": error = no_overlap_no_filter(1) print('no overlap, no filter, mono:', error) error = no_overlap_no_filter(D) print('no overlap, no filter, multichannel:', error) error = no_overlap_with_filter(1) print('no overlap, with filter, mono:', error) error = no_overlap_with_filter(D) print('no overlap, with filter, multichannel:', error) error = with_half_overlap_no_filter(1) print('with half overlap, no filter, mono:', error) error = with_half_overlap_no_filter(D) print('with half overlap, no filter, multichannel:', error) error = with_half_overlap_with_filter(1) print('with half overlap, with filter, mono:', error) error = with_half_overlap_with_filter(D) print('with half overlap, with filter, multichannel:', error)
26.128
74
0.640998
975
6,532
4.092308
0.127179
0.031579
0.067669
0.038346
0.862155
0.850125
0.750877
0.67193
0.646617
0.632581
0
0.013693
0.250919
6,532
249
75
26.232932
0.801758
0.171617
0
0.692857
0
0
0.058665
0
0
0
0
0
0.057143
1
0.085714
false
0
0.035714
0
0.157143
0.064286
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
6370402240cc7475da992df26c30452ffea75bfe
1,694
py
Python
terralib/palette.py
nicolebranagan/terraformer
943e92939c958429e7d14b27bcff96c0bd27a7bd
[ "MIT" ]
11
2021-01-04T01:24:31.000Z
2021-09-26T15:48:06.000Z
terralib/palette.py
nicolebranagan/terraformer
943e92939c958429e7d14b27bcff96c0bd27a7bd
[ "MIT" ]
null
null
null
terralib/palette.py
nicolebranagan/terraformer
943e92939c958429e7d14b27bcff96c0bd27a7bd
[ "MIT" ]
null
null
null
from enum import Enum import math gameboy = [ (184, 216, 128), (136, 176, 88), (72, 104, 32), (40, 48, 24)] ega = [ (0,0,0), (0,0,170), (0,170,0), (0,170,170), (170,0,0), (170,0,170), (170,85,00), (170,170,170), (85,85,85), (85,85,255), (85,255,85), (85,255,255), (255,85,85), (255,85,255), (255,255,85), (255,255,255)] win16 = [ (0,0,0), (0,0,128), (0,128,0), (0,128,128), (128,0,0), (128,0,128), (192,192,0), (192,192,192), (128,128,128), (0,0,255), (0,255,0), (0,255,255), (255,0,0), (255,0,255), (255,255,0), (255,255,255)] db16 = [ (20, 12, 28), (68, 36, 52), (48, 52, 109), (78, 74, 78), (133, 76, 48), (52, 101, 36), (208, 70, 72), (117, 113, 97), (89, 125, 206), (210, 125, 44), (133, 149, 161), (109, 170, 44), (210, 170, 153), (109, 194, 202), (218, 212, 94), (222, 238, 214)] none = [ (0,0,0), (0,0,0), (0,0,0), (0,0,0), (0,0,0), (0,0,0), (0,0,0), (0,0,0), (0,0,0), (0,0,0), (0,0,0), (0,0,0), (0,0,0), (0,0,0), (0,0,0), (0,0,0)] class Constraint(Enum): Genesis = 0 def constrain(l, c): constrainfunc = { Constraint.Genesis : _constrain_genesis, }.get(c,lambda x: None) for i in range(0, len(l)): l[i] = constrainfunc(l[i]) def _constrain_genesis(c): return (math.floor(c[0]/32)*32, math.floor(c[1]/32)*32, math.floor(c[2]/32)*32) def rotate(pal): pallen = len(pal) return [pal[(i + 16) % pallen] for i in range(0, pallen)]
16.94
61
0.431523
286
1,694
2.541958
0.293706
0.170564
0.21458
0.269601
0.352132
0.066025
0.066025
0.066025
0.066025
0.066025
0
0.374784
0.316411
1,694
99
62
17.111111
0.253022
0
0
0.211111
0
0
0
0
0
0
0
0
0
1
0.033333
false
0
0.022222
0.011111
0.1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
639906909b4178e7e89b85d40b4864b05b2f05c2
649
py
Python
src/tests/BoardTest.py
bainer101/MinesweeperBot
8fe9076fd309fb5c14a2813ebf017c62fa35d9f3
[ "MIT" ]
null
null
null
src/tests/BoardTest.py
bainer101/MinesweeperBot
8fe9076fd309fb5c14a2813ebf017c62fa35d9f3
[ "MIT" ]
null
null
null
src/tests/BoardTest.py
bainer101/MinesweeperBot
8fe9076fd309fb5c14a2813ebf017c62fa35d9f3
[ "MIT" ]
null
null
null
import unittest from ..classes.Board import Board class BoardTest(unittest.TestCase): def setUp(self): self.board = Board(2, 5) def test_constructor_and_get_board(self): self.assertEqual(self.board.getBoard(), [[None, None], [None, None], [None, None], [None, None], [None, None]]) def test_get_board_size(self): self.assertEqual(self.board.getBoardSize(), (2, 5)) def test_change_dimensions(self): self.board.changeDimensions(5, 2) self.assertEqual(self.board.getBoard(), [[None, None, None, None, None], [None, None, None, None, None]]) if __name__ == '__main__': unittest.main()
29.5
119
0.662558
84
649
4.916667
0.333333
0.348668
0.464891
0.542373
0.42615
0.348668
0.348668
0.348668
0.348668
0.348668
0
0.011321
0.183359
649
21
120
30.904762
0.767925
0
0
0
0
0
0.012327
0
0
0
0
0
0.214286
1
0.285714
false
0
0.142857
0
0.5
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
63ab6e66b9bb223da126fd7d73c66e9ec95ce18c
254
py
Python
utils/__init__.py
Resist4263/span-aste-1
13815a262638592506b445315d03df8db965947b
[ "Apache-2.0" ]
7
2022-01-19T07:59:51.000Z
2022-02-18T20:48:41.000Z
utils/__init__.py
Resist4263/span-aste-1
13815a262638592506b445315d03df8db965947b
[ "Apache-2.0" ]
1
2022-02-19T09:13:03.000Z
2022-02-21T13:49:21.000Z
utils/__init__.py
Resist4263/span-aste-1
13815a262638592506b445315d03df8db965947b
[ "Apache-2.0" ]
1
2022-02-09T05:52:51.000Z
2022-02-09T05:52:51.000Z
#!/usr/bin/env python # -*- coding: UTF-8 -*- """ ================================================= @Project :span-aste @IDE :PyCharm @Author :Mr. Wireless @Date :2022/1/19 13:49 @Desc : ================================================== """
21.166667
50
0.334646
22
254
3.863636
1
0
0
0
0
0
0
0
0
0
0
0.054795
0.137795
254
11
51
23.090909
0.333333
0.952756
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
63d2306272dca8aa613c441b15e1b2da981dcf29
29
py
Python
lib/python3.4/hashlib.py
caiocsalvador/whats_the_craic
c49ef62f1acd7379f6fd90c2b93aa1fa00c8661d
[ "MIT" ]
7
2017-04-26T12:28:22.000Z
2021-02-09T18:59:50.000Z
django-ng/lib/python3.4/hashlib.py
Arsalen/BusinessStrategies
209e57340359af3ea063c064982198848dc36c5f
[ "MIT" ]
13
2015-12-04T03:38:37.000Z
2015-12-12T00:15:46.000Z
django-ng/lib/python3.4/hashlib.py
Arsalen/BusinessStrategies
209e57340359af3ea063c064982198848dc36c5f
[ "MIT" ]
8
2017-06-01T08:42:16.000Z
2020-07-23T12:30:19.000Z
/usr/lib/python3.4/hashlib.py
29
29
0.793103
6
29
3.833333
1
0
0
0
0
0
0
0
0
0
0
0.068966
0
29
1
29
29
0.724138
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
8920cdef2debc4333b1bdaa7f4cb5755fb2bdca8
54
py
Python
nautobot_netbox_importer/tests/__init__.py
Thetacz/nautobot-plugin-netbox-importer
cbf95e3c471fc4b29aeea338fbeeea7476f1fa19
[ "Apache-2.0" ]
11
2021-02-24T17:54:54.000Z
2022-01-25T18:36:21.000Z
nautobot_netbox_importer/tests/__init__.py
Thetacz/nautobot-plugin-netbox-importer
cbf95e3c471fc4b29aeea338fbeeea7476f1fa19
[ "Apache-2.0" ]
44
2021-02-28T20:21:52.000Z
2022-02-10T21:40:35.000Z
nautobot_netbox_importer/tests/__init__.py
Thetacz/nautobot-plugin-netbox-importer
cbf95e3c471fc4b29aeea338fbeeea7476f1fa19
[ "Apache-2.0" ]
9
2021-02-28T17:05:53.000Z
2021-10-05T23:48:06.000Z
"""Unit tests for nautobot-netbox-importer plugin."""
27
53
0.740741
7
54
5.714286
1
0
0
0
0
0
0
0
0
0
0
0
0.092593
54
1
54
54
0.816327
0.87037
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
89349a272ec959ef576ef2a6ef5ee9812fad3141
401
py
Python
robotexception.py
gwillem/rgkit
1e9a1b8e77012853f070c9fbe0c26cfde25b355d
[ "Unlicense" ]
1
2021-04-30T20:59:32.000Z
2021-04-30T20:59:32.000Z
robotexception.py
gwillem/rgkit
1e9a1b8e77012853f070c9fbe0c26cfde25b355d
[ "Unlicense" ]
null
null
null
robotexception.py
gwillem/rgkit
1e9a1b8e77012853f070c9fbe0c26cfde25b355d
[ "Unlicense" ]
null
null
null
class RobotException(Exception): pass class UnitGuardCollision(RobotException): def __init__(self, other_robot): self.other_robot = other_robot class UnitMoveCollision(RobotException): def __init__(self, other_robots): self.other_robots = other_robots class UnitBlockCollision(RobotException): def __init__(self, other_robot): self.other_robot = other_robot
26.733333
41
0.750623
43
401
6.511628
0.302326
0.192857
0.2
0.267857
0.528571
0.421429
0.421429
0.421429
0.421429
0.421429
0
0
0.174564
401
14
42
28.642857
0.845921
0
0
0.363636
0
0
0
0
0
0
0
0
0
1
0.272727
false
0.090909
0
0
0.636364
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
4
8953b730a6bfb16f23718411024c10f2ffa6aa80
577
py
Python
games/stardash/__init__.py
AGausmann/Joueur.py
f5db41839c7d5462d0043a2f73bd2a4087fd4edf
[ "MIT" ]
null
null
null
games/stardash/__init__.py
AGausmann/Joueur.py
f5db41839c7d5462d0043a2f73bd2a4087fd4edf
[ "MIT" ]
null
null
null
games/stardash/__init__.py
AGausmann/Joueur.py
f5db41839c7d5462d0043a2f73bd2a4087fd4edf
[ "MIT" ]
null
null
null
# DO NOT MODIFY THESE IMPORTS from games.stardash.ai import AI from games.stardash.game import Game from games.stardash.body import Body from games.stardash.game_object import GameObject from games.stardash.job import Job from games.stardash.player import Player from games.stardash.projectile import Projectile from games.stardash.unit import Unit # <<-- Creer-Merge: init -->> - Code you add between this comment and the end comment will be preserved between Creer re-runs. # if you need to initialize this module with custom logic do so here # <<-- /Creer-Merge: init -->>
41.214286
126
0.785095
90
577
5.022222
0.5
0.159292
0.300885
0.09292
0
0
0
0
0
0
0
0
0.138648
577
13
127
44.384615
0.909457
0.429809
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
8955d5a82e7fdf689d44abe4997ce00ee141dc74
74
py
Python
Comprehensions - Exrecise/Word filter.py
DiyanKalaydzhiev23/Advanced---Python
ed2c60bb887c49e5a87624719633e2b8432f6f6b
[ "MIT" ]
null
null
null
Comprehensions - Exrecise/Word filter.py
DiyanKalaydzhiev23/Advanced---Python
ed2c60bb887c49e5a87624719633e2b8432f6f6b
[ "MIT" ]
null
null
null
Comprehensions - Exrecise/Word filter.py
DiyanKalaydzhiev23/Advanced---Python
ed2c60bb887c49e5a87624719633e2b8432f6f6b
[ "MIT" ]
null
null
null
print('\n'.join([word for word in input().split() if len(word) % 2 == 0]))
74
74
0.594595
14
74
3.142857
0.857143
0
0
0
0
0
0
0
0
0
0
0.03125
0.135135
74
1
74
74
0.65625
0
0
0
0
0
0.026667
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
8963f10bc61168f7101328e744c35117e9512a5b
223
py
Python
persian_tools/phone_number/exceptions.py
webneshin/py-persian-tools
fcee9ad1af18866c6c7409aed9079e08d0a21595
[ "MIT" ]
77
2021-02-15T16:52:41.000Z
2022-03-16T06:57:19.000Z
persian_tools/phone_number/exceptions.py
mhkhademi/py-persian-tools
64d454a9e130c8d35fbf3671901cfa25fb25f456
[ "MIT" ]
4
2021-02-23T06:15:05.000Z
2021-12-08T16:05:18.000Z
persian_tools/phone_number/exceptions.py
mhkhademi/py-persian-tools
64d454a9e130c8d35fbf3671901cfa25fb25f456
[ "MIT" ]
12
2021-02-23T15:21:22.000Z
2022-01-30T11:22:58.000Z
class InvalidPhoneNumber(Exception): def __init__(self, phone_number): self.phone_number = phone_number self.message = phone_number + ' is an invalid phone number' super().__init__(self.message)
37.166667
67
0.699552
26
223
5.538462
0.5
0.381944
0.208333
0
0
0
0
0
0
0
0
0
0.210762
223
5
68
44.6
0.818182
0
0
0
0
0
0.121076
0
0
0
0
0
0
1
0.2
false
0
0
0
0.4
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
89950dbf1c5046fefcf2b48ee398019af12f6fb7
8,169
py
Python
Vex.py
Paulon0n/the-space-game
d0935bb69a0ee7ab64de39b232f3565b6e1afa30
[ "MIT" ]
null
null
null
Vex.py
Paulon0n/the-space-game
d0935bb69a0ee7ab64de39b232f3565b6e1afa30
[ "MIT" ]
null
null
null
Vex.py
Paulon0n/the-space-game
d0935bb69a0ee7ab64de39b232f3565b6e1afa30
[ "MIT" ]
null
null
null
# Vex lib import math import Environment from MyMaths import sgn def VecFromList(lst): sz = len(lst) if sz == 2:return Vex(lst[0], lst[1]) if sz == 3:return Vex(lst[0], lst[1], lst[2]) class Vex: def __init__(self, x, y, z=0): self.pos = [x, y, z] # Constructors def copy(self):return Vex(self.pos[0], self.pos[1], self.pos[2]) # Set def set(self, x, y, z):self.pos[0] = x;self.pos[1] = y;self.pos[2] = z;return self def set2D(self, x, y):self.pos[0] = x;self.pos[1] = y;return self def setX(self, x):self.pos[0] = x;return self def setY(self, y):self.pos[1] = y;return self def setZ(self, z):self.pos[2] = z;return self def setVector(self, setter):self.set(setter.pos[0], setter.pos[1], setter.pos[2]);return self def assign(self, a):self.set(a.pos[0], a.pos[1], a.pos[2]);return self # Get def Pos(self):return self.pos def x(self):return self.pos[0] def y(self):return self.pos[1] def z(self):return self.pos[2] def X(self):return self.pos[0] def Y(self):return self.pos[1] def Z(self):return self.pos[2] def getX(self):return self.pos[0] def getY(self):return self.pos[1] def getZ(self):return self.pos[2] def p2D(self):return [self.pos[0], self.pos[1]] # Scalars def integer(self):return Vex(int(self.pos[0]), int(self.pos[1]), int(self.pos[2])) def scale(self, a):return Vex(self.pos[0] * a, self.pos[1] * a, self.pos[2] * a) def offset(self, a):return Vex(self.pos[0] + a, self.pos[1] + a, self.pos[2] + a) def selfScale(self, a):return self.set(self.pos[0] * a, self.pos[1] * a, self.pos[2] * a) def translate(self, x, y, z):self.pos[0] += x;self.pos[1] += y;self.pos[2] += z;return self def dot(self, a):return self.pos[0] * a.pos[0] + self.pos[1] * a.pos[1] + self.pos[2] * a.pos[2] def crossP(self, a):return self.pos[0] * a.pos[0] - self.pos[1] * a.pos[1] - self.pos[2] * a.pos[2] def length(self):return math.sqrt(self.pos[0] * self.pos[0] + self.pos[1] * self.pos[1] + self.pos[2] * self.pos[2]) def sqrLength(self):return self.pos[0]**2 + self.pos[1]**2 + self.pos[2]**2 def dist(self, a):return math.sqrt((self.pos[0] - a.pos[0])**2 + (self.pos[1] - a.pos[1])**2 + (self.pos[2] - a.pos[2])**2) def dist2D(self, a):return math.sqrt((self.pos[0] - a.pos[0])**2 + (self.pos[1] - a.pos[1])**2) def inverse(self):return Vex(-self.x(), -self.y(), -self.z()) def cross(self, a): return Vex(self.pos[1]*a.pos[2] - self.pos[2]*a.pos[1], self.pos[2]*a.pos[0] - self.pos[0]*a.pos[2], self.pos[0]*a.pos[1] - self.pos[1]*a.pos[0]) def normalize(self): l = self.length() if l > .0000001: l = 1.0/l return Vex(self.pos[0]*l, self.pos[1]*l, self.pos[2]*l) x_ = sgn(self.pos[0]) y_ = sgn(self.pos[1]) z_ = sgn(self.pos[2]) if ((x_ != 0) or (y_!=0)or(z_!=0)): l = 1.0/math.sqrt(x_**2+y_**2+z_**2) return Vex(x_*l, y_*l, z_*l) return Vex(0, 0, 1) def selfNormalize(self): l = self.length() if l > .0000001:l = 1.0/l;return self.set( self.pos[0]*l, self.pos[1]*l, self.pos[2]*l) x_ = sgn(self.pos[0]);y_ = sgn(self.pos[1]);z_ = sgn(self.pos[2]) if ((x_ != 0) or (y_!=0)or(z_!=0)):l = 1.0/math.sqrt(x_**2+y_**2+z_**2);return self.set(x_*l, y_*l, z_*l) return self.set( 0, 1, 0) def setLength(self, s): l = self.sqrLength() if l > .0000001:l = s/math.sqrt(l);return self.scale(l) self.pos[0] = sgn(self.pos[0]);self.pos[1] = sgn(self.pos[1]);self.pos[0] = sgn(self.pos[2]) if ((self.pos[0] != 0) or (self.pos[1] != 0) or (self.pos[2] != 0)):l = 1.0/self.length();return self.scale(l) return self.set(0, s, 0) # ; Transform # ; vs Vex def add(self, a):return Vex(self.pos[0] + a.pos[0], self.pos[1] + a.pos[1], self.pos[2] + a.pos[2]) def selfAdd(self, a):return self.set(self.pos[0] + a.pos[0], self.pos[1] + a.pos[1], self.pos[2] + a.pos[2]) def selfAdd2D(self, x, y):return self.set2D(self.pos[0] + x, self.pos[1] + y) def AddSelf(self, x, y, z):return self.set(self.pos[0] + x, self.pos[1] + y, self.pos[2] + z) def sub(self, a):return Vex(self.pos[0] - a.pos[0], self.pos[1] - a.pos[1], self.pos[2] - a.pos[2]) def selfSub(self, a):return self.set(self.pos[0] - a.pos[0], self.pos[1] - a.pos[1], self.pos[2] - a.pos[2]) def mul(self, a):return Vex(self.pos[0] * a.pos[0], self.pos[1] * a.pos[1], self.pos[2] * a.pos[2]) def selfMul(self, a):return self.set(self.pos[0] * a.pos[0], self.pos[1] * a.pos[1], self.pos[2] * a.pos[2]) def div(self, a):return Vex(self.pos[0] / a.pos[0], self.pos[1] / a.pos[1], self.pos[2] / a.pos[2]) def selfDiv(self, a):return self.set(self.pos[0] / a.pos[0], self.pos[1] / a.pos[1], self.pos[2] / a.pos[2]) def __add__(self, a): if isinstance(a, Vex): return Vex(self.pos[0] + a.pos[0], self.pos[1] + a.pos[1], self.pos[2] + a.pos[2]) return self.offset(a) def __sub__(self, a): if isinstance(a, Vex): return Vex(self.pos[0] - a.pos[0], self.pos[1] - a.pos[1], self.pos[2] - a.pos[2]) return self.offset(-a) def __mul__(self, a): if isinstance(a, Vex): return Vex(self.pos[0] * a.pos[0], self.pos[1] * a.pos[1], self.pos[2] * a.pos[2]) return self.scale(a) def __div__(self, a): if isinstance(a, Vex): return Vex(self.pos[0] / a.pos[0], self.pos[1] / a.pos[1], self.pos[2] / a.pos[2]) return self.scale(1/a) def __eq__(self, a):return (self.pos == a.pos) def __iadd__(self, a):return self.set(self.pos[0] + a.pos[0], self.pos[1] + a.pos[1], self.pos[2] + a.pos[2]) def __isub__(self, a):return self.set(self.pos[0] - a.pos[0], self.pos[1] - a.pos[1], self.pos[2] - a.pos[2]) def __imul__(self, a):return self.set(self.pos[0] * a.pos[0], self.pos[1] * a.pos[1], self.pos[2] * a.pos[2]) def __idiv__(self, a):return self.set(self.pos[0] / a.pos[0], self.pos[1] / a.pos[1], self.pos[2] / a.pos[2]) def __str__(self): return str(self.pos) def ang2D(self, a): dx = self.X() - a.X() dy = self.Y() - a.Y() return math.atan2(dy,dx) # def rotate2d(pos, angle_rad): # x, y = pos # s, c = math.sin(angle_rad), math.cos(angle_rad) # return x * c - y * s, y * c + x * s def addVector(self, mag, angle_rad): self += Vex(math.sin(angle_rad)*mag, math.cos(angle_rad)*mag) return self def rotate2dXY(self, angle_rad): if not isinstance(angle_rad, list): s, c = math.sin(angle_rad), math.cos(angle_rad) else: s, c = angle_rad return Vex(self.pos[0] * c - self.pos[1] * s, self.pos[1] * c + self.pos[0] * s, self.z()) def rotate2dXZ(self, angle_rad): if not isinstance(angle_rad, list): s, c = math.sin(angle_rad), math.cos(angle_rad) else: s, c = angle_rad return Vex(self.pos[0] * c - self.pos[2] * s, self.x(), self.pos[2] * c + self.pos[0] * s) def rotate2dYZ(self, offset, angle_rad): y = self.pos[1]+offset if not isinstance(angle_rad, list): s, c = math.sin(angle_rad), math.cos(angle_rad) else: s, c = angle_rad return Vex(self.x(), y * c - self.pos[2] * s, self.pos[2] * c + y * s) #env, cam, self.pos, self.ang def projectedPos(self, cam): env = Environment.Environment v = (Vex(0, 0, 0)- cam.pos+self).rotate2dXY(cam.rot[1]).rotate2dYZ(cam.mov[2], cam.rot[0]) f = env.fov / (v.z() +.00000001) if(f < env.lim): f = 1000 v = v * f if f < 0:f = 0 if f >100:f = 100 v.setZ(f) v += env.center if env.inScreen(v.p2D()): return (True, v) return (False, v)
47.494186
128
0.537642
1,510
8,169
2.849669
0.078808
0.248896
0.096677
0.052289
0.67511
0.63235
0.563793
0.544969
0.521961
0.517778
0
0.054134
0.253764
8,169
171
129
47.77193
0.651739
0.029135
0
0.126761
0
0
0
0
0
0
0
0
0
1
0.443662
false
0
0.021127
0.28169
0.626761
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
89ab2013d5b1d04b8a5f4cd6688f39c58e7448df
9,652
py
Python
portal/academy/tests/test_services.py
LDSSA/portal
9561da1e262678fe68dcf51c66007c0fb13eb51a
[ "MIT" ]
2
2020-11-09T03:48:36.000Z
2021-07-02T14:30:09.000Z
portal/academy/tests/test_services.py
LDSSA/portal
9561da1e262678fe68dcf51c66007c0fb13eb51a
[ "MIT" ]
132
2020-04-25T15:57:56.000Z
2022-03-10T19:15:51.000Z
portal/academy/tests/test_services.py
LDSSA/portal
9561da1e262678fe68dcf51c66007c0fb13eb51a
[ "MIT" ]
1
2020-10-24T16:15:57.000Z
2020-10-24T16:15:57.000Z
import pytest from datetime import datetime from portal.academy.models import Grade from portal.academy.services import ( check_graduation_status, check_complete_specialization, csvdata, ) from portal.academy.services import get_last_grade, get_best_grade from portal.hackathons.models import Attendance @pytest.fixture def grade_slu1_failed(student, slu1): grade = Grade.objects.create( user=student, unit=slu1, created=datetime(year=2021, month=8, day=15), status="graded", score=1, message="", ) return grade @pytest.fixture def grade_slu2_failed(student, slu2): grade = Grade.objects.create( user=student, unit=slu2, created=datetime(year=2021, month=8, day=15), status="graded", score=14, message="", ) return grade @pytest.fixture def attendances_graduate_ok(student, hackathon1, hackathon2, hackathon3): """ Set student attendances for case when no hacakhon was missed """ attendances = [] for hack in [hackathon1, hackathon2, hackathon3]: attendance = Attendance.objects.create( hackathon=hack, user=student, present=True, ) attendances.append(attendance) return attendances @pytest.fixture def attendances_graduate_ok_one_missed(student, hackathon1, hackathon2, hackathon3): """ Set student attendances for case when only one non-mandatory hackathon was missed """ attendances = [] # Set first hackathon as missed attendance = Attendance.objects.create( hackathon=hackathon2, user=student, present=False, ) attendances.append(attendance) for hack in [hackathon1, hackathon3]: attendance = Attendance.objects.create( hackathon=hack, user=student, present=True, ) attendances.append(attendance) return attendances @pytest.fixture def attendances_graduation_fail_first_missed( student, hackathon1, hackathon2, hackathon3 ): """ Set student attendances for case when first hackathon was missed """ attendances = [] # Set first hackathon as missed attendance = Attendance.objects.create( hackathon=hackathon1, user=student, present=False, ) attendances.append(attendance) for hack in [hackathon2, hackathon3]: attendance = Attendance.objects.create( hackathon=hack, user=student, present=True, ) attendances.append(attendance) return attendances @pytest.fixture def attendances_graduation_fail_too_many_missed( student, hackathon1, hackathon2, hackathon3 ): """ Set student attendances for case when too many hackathons were missed, even if the first one was attended """ attendances = [] # Set first hackathon as missed attendance = Attendance.objects.create( hackathon=hackathon1, user=student, present=True, ) attendances.append(attendance) for hack in [hackathon2, hackathon3]: attendance = Attendance.objects.create( hackathon=hack, user=student, present=False, ) attendances.append(attendance) return attendances @pytest.mark.django_db(transaction=True) def test_check_graduation_status_ok(db, student, attendances_graduate_ok): """ Checks student can graduate when all conditions are met: - student has attended first hackathon - student has missed at most only 1 hackathon Test case when student has attended all hackathons """ assert check_graduation_status(student) is True @pytest.mark.django_db(transaction=True) def test_check_graduation_status_ok_missed_one_not_first( db, student, attendances_graduate_ok_one_missed ): """ Checks student can graduate when all conditions are met: - student has attended first hackathon - student has missed at most only 1 hackathon Test case when student has missed only 1 hackathon (not first) """ assert check_graduation_status(student) is True @pytest.mark.django_db(transaction=True) def test_check_graduation_status_fail_missed_first( db, student, attendances_graduation_fail_first_missed ): """ Checks student can not graduate when one of the following conditions are met: - student has missed first hackathon - student has missed at more than 1 hackathon Test case when student has missed first hackathon """ assert check_graduation_status(student) is False @pytest.mark.django_db(transaction=True) def test_check_graduation_status_fail_missed_too_many( db, student, attendances_graduation_fail_too_many_missed ): """ Checks student can not graduate when one of the following conditions are met: - student has missed first hackathon - student has missed at more than 1 hackathon Test case when student has missed more than one hackathon (even if not first) """ assert check_graduation_status(student) is False @pytest.mark.django_db(transaction=True) def test_check_complete_specialization_ok( db, student, specialization, grade_slu1, grade_slu2, ): """ Checks student completed specialization when both grades exist and are above 16 """ assert check_complete_specialization(student, specialization) is True @pytest.mark.django_db(transaction=True) def test_check_complete_specialization_failed_slu1( db, student, specialization, grade_slu1_failed, grade_slu2, ): """ Checks student did not completed specialization when one of the SLUs was failed """ assert check_complete_specialization(student, specialization) is False @pytest.mark.django_db(transaction=True) def test_check_complete_specialization_failed_slu2( db, student, specialization, grade_slu1, grade_slu2_failed, ): """ Checks student did not completed specialization when one of the SLUs was failed """ assert check_complete_specialization(student, specialization) is False @pytest.mark.django_db(transaction=True) def test_check_complete_specialization_missing_slu1( db, student, specialization, slu1, grade_slu2, ): """ Checks student did not completed specialization when one of the SLUs is missing """ assert check_complete_specialization(student, specialization) is False @pytest.mark.django_db(transaction=True) def test_check_complete_specialization_missing_slu2( db, student, specialization, grade_slu1, slu2, ): """ Checks student did not completed specialization when one of the SLUs is missing """ assert check_complete_specialization(student, specialization) is False @pytest.mark.django_db(transaction=True) def test_check_complete_specialization_missing_slu1_but_two_attempts_slu2( db, student, specialization, slu1, grade_slu2, grade_slu2_failed ): """ Checks student did not completed specialization when one of the SLUs is missing. This test also checks that even when there are repeated grades on other units the verification doesn't consider them """ assert check_complete_specialization(student, specialization) is False @pytest.mark.django_db(transaction=True) def test_check_complete_specialization_missing_slu2_but_two_attempts_slu1( db, student, specialization, grade_slu1, grade_slu1_failed, slu2, ): """ Checks student did not completed specialization when one of the SLUs is missing This test also checks that even when there are repeated grades on other units the verification doesn't consider them """ assert check_complete_specialization(student, specialization) is False @pytest.mark.django_db(transaction=True) def test_check_complete_specialization_missing_all( db, student, specialization, slu1, slu2, ): """ Checks completion check returns False when no grade is available """ assert check_complete_specialization(student, specialization) is False @pytest.mark.django_db(transaction=True) def test_csvdata(db, specialization, slu1, slu2, student, grade_slu1, grade_slu2): """ Test creation of csv file from table of student/unit grades """ specialization.unit_count = 2 spc_list = [specialization] unit_list = [slu1, slu2] object_list = [ { "user": student, "grades": [grade_slu1, grade_slu2], "submission_date": datetime(year=2021, month=8, day=15), "total_score": 38, } ] text = csvdata(spc_list, unit_list, object_list) assert ( text == "username,slack_id,submission_date,total_score,S01-SLU01,S01-SLU02\r\n" "test_student,U12J14XV12Z,2021-08-15 00:00:00,38,18,20\r\n" ) @pytest.mark.django_db(transaction=True) def test_grade_retrieve(slu1, student, student2): """ Ensure grades are correctly retrieved. """ Grade.objects.create( user=student, unit=slu1, status="graded", score=16, ) Grade.objects.create( user=student, unit=slu1, status="graded", score=14, ) Grade.objects.create( user=student, unit=slu1, status="graded", score=20, on_time=False ) assert get_last_grade(slu1, student).score == 20 assert get_best_grade(slu1, student).score == 16 assert get_last_grade(slu1, student2).score == None assert get_best_grade(slu1, student2).score == None
25.876676
87
0.698819
1,146
9,652
5.713787
0.141361
0.033751
0.070098
0.038485
0.817654
0.766341
0.715791
0.674557
0.665089
0.665089
0
0.020468
0.225549
9,652
372
88
25.946237
0.855518
0.230108
0
0.665179
0
0.004464
0.02708
0.01763
0
0
0
0
0.075893
1
0.089286
false
0
0.026786
0
0.142857
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
89afd6097c303fd5a224efec7780a10d2b2c8f49
77
py
Python
nagle/settings.py
dra2020/nagle
8974b7925a232640196d5fe88536d4e67c6adb4a
[ "MIT" ]
null
null
null
nagle/settings.py
dra2020/nagle
8974b7925a232640196d5fe88536d4e67c6adb4a
[ "MIT" ]
null
null
null
nagle/settings.py
dra2020/nagle
8974b7925a232640196d5fe88536d4e67c6adb4a
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # # SETTINGS # ALL CONSTANTS ARE VISIBLE EVERYWHERE
11
38
0.727273
10
77
5.6
1
0
0
0
0
0
0
0
0
0
0
0.015625
0.168831
77
6
39
12.833333
0.859375
0.87013
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
89b4402ede018280af1f00c6e17e95225d898ec6
321
py
Python
src/assert_headers/__init__.py
djragsdale/assert-headers-python
0daad6c4177bb2b42d07c2ca97ebaf58432df599
[ "MIT" ]
null
null
null
src/assert_headers/__init__.py
djragsdale/assert-headers-python
0daad6c4177bb2b42d07c2ca97ebaf58432df599
[ "MIT" ]
null
null
null
src/assert_headers/__init__.py
djragsdale/assert-headers-python
0daad6c4177bb2b42d07c2ca97ebaf58432df599
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- # Classes from .HeaderAssertionError import HeaderAssertionError # Functions in order they depend on each other from .getMeta import getMeta from .assertHeaders import assertHeaders from .assertHeadersFromUrl import assertHeadersFromUrl # CLI Scripts from .cli import cli
22.928571
54
0.794393
38
321
6.710526
0.631579
0
0
0
0
0
0
0
0
0
0
0.00361
0.137072
321
13
55
24.692308
0.916968
0.333333
0
0
0
0
0
0
0
0
0
0
0.6
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
1
0
1
0
1
0
0
4
982c83ad96ed5663dfdf13b3a2acb05abc0aaeac
91
py
Python
safe_transaction_service/contracts/clients/__init__.py
bigman1208000/safe-transaction-service
283a278eeb06aaf9227047b96816e63d2a7b9466
[ "MIT" ]
5
2021-01-28T17:41:42.000Z
2021-11-14T17:09:18.000Z
safe_transaction_service/contracts/clients/__init__.py
bigman1208000/safe-transaction-service
283a278eeb06aaf9227047b96816e63d2a7b9466
[ "MIT" ]
null
null
null
safe_transaction_service/contracts/clients/__init__.py
bigman1208000/safe-transaction-service
283a278eeb06aaf9227047b96816e63d2a7b9466
[ "MIT" ]
5
2021-04-06T17:20:02.000Z
2022-01-13T10:58:08.000Z
# flake8: noqa F401 from .etherscan_api import EtherscanApi from .sourcify import Sourcify
22.75
39
0.824176
12
91
6.166667
0.75
0
0
0
0
0
0
0
0
0
0
0.050633
0.131868
91
3
40
30.333333
0.886076
0.186813
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
9894663fc3316773a1f24afe367419d6c58c9371
81
py
Python
scripts/field/Resi_tutor70.py
G00dBye/YYMS
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
[ "MIT" ]
54
2019-04-16T23:24:48.000Z
2021-12-18T11:41:50.000Z
scripts/field/Resi_tutor70.py
G00dBye/YYMS
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
[ "MIT" ]
3
2019-05-19T15:19:41.000Z
2020-04-27T16:29:16.000Z
scripts/field/Resi_tutor70.py
G00dBye/YYMS
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
[ "MIT" ]
49
2020-11-25T23:29:16.000Z
2022-03-26T16:20:24.000Z
sm.lockInGameUI(True) sm.reservedEffect("Effect/Direction4.img/Resistance/TalkJ")
40.5
59
0.839506
10
81
6.8
0.9
0
0
0
0
0
0
0
0
0
0
0.0125
0.012346
81
2
59
40.5
0.8375
0
0
0
0
0
0.463415
0.463415
0
0
0
0
0
1
0
true
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
98a26c6af602ee5d86842e6b3fa0a758603ad2c7
6,095
py
Python
tests/test_apptype_api.py
shimoku-tech/shimoku-api-python
de26e7d80631647e68794277b15397403336f252
[ "MIT" ]
4
2021-12-23T15:51:21.000Z
2022-01-25T08:55:31.000Z
tests/test_apptype_api.py
shimoku-tech/shimoku-api-python
de26e7d80631647e68794277b15397403336f252
[ "MIT" ]
null
null
null
tests/test_apptype_api.py
shimoku-tech/shimoku-api-python
de26e7d80631647e68794277b15397403336f252
[ "MIT" ]
1
2022-03-02T01:13:04.000Z
2022-03-02T01:13:04.000Z
"""""" from os import getenv from typing import Dict import unittest import shimoku_api_python as shimoku from shimoku_api_python.exceptions import ApiClientError api_key: str = getenv('API_TOKEN') universe_id: str = getenv('UNIVERSE_ID') app_type_id: str = getenv('APP_TYPE_ID') environment: str = getenv('ENVIRONMENT') config = { 'access_token': api_key, } s = shimoku.Client( config=config, universe_id=universe_id, environment=environment, ) def test_get_app_type(): app_type: Dict = ( s.app_type.get_app_type( app_type_id=app_type_id, ) ) assert app_type def test_cannot_create_duplicated_app_type(): class MyTestCase(unittest.TestCase): def check_app_type_not_exists(self): with self.assertRaises(ValueError): s.app_type.create_app_type(name='test') t = MyTestCase() t.check_app_type_not_exists() def test_create_and_delete_app_type(): app_type_new: Dict = s.app_type.create_app_type(name='new-test') app_type_id_: str = app_type_new['id'] app_type_: Dict = s.app_type.get_app_type(app_type_id=app_type_id_) assert app_type_new == { k: v for k, v in app_type_.items() if k in [ 'id', 'key', 'name', 'universe', 'normalizedName', '__typename', ] } s.app_type.delete_app_type(app_type_id=app_type_id_) # Check it does not exists anymore class MyTestCase(unittest.TestCase): def check_app_type_not_exists(self): with self.assertRaises(ApiClientError): s.app_type.get_app_type( app_type_id=app_type_id_, ) t = MyTestCase() t.check_app_type_not_exists() def test_update_app_type(): target_col_name: str = 'name' app_type: Dict = s.app_type.get_app_type(app_type_id=app_type_id) name: str = app_type[target_col_name] new_name: str = f'{name}_test' data = {'name': new_name} app_type_updated: Dict = ( s.app_type.update_app_type( app_type_id=app_type_id, app_type_metadata=data, ) ) assert ( { k: v for k, v in app_type.items() if k in [ 'id', 'key', 'universe', # name excluded 'normalizedName', '__typename', ] } == { k: v for k, v in app_type_updated.items() if k in [ 'id', 'key', 'universe', # name excluded 'normalizedName', '__typename', ] } ) assert app_type_updated[target_col_name] == new_name app_type_updated_: Dict = s.app_type.get_app_type(app_type_id=app_type_id) assert app_type_updated_[target_col_name] == new_name assert app_type_updated == { k: v for k, v in app_type_updated_.items() if k in [ 'id', 'key', 'name', 'universe', 'normalizedName', '__typename', ] } # Undo change data = {'name': name} app_type_restored: Dict = ( s.app_type.update_app_type( app_type_id=app_type_id, app_type_metadata=data, ) ) assert { k: v for k, v in app_type_restored.items() if k in [ 'id', 'key', 'name', 'universe', 'normalizedName', '__typename', ] } == { k: v for k, v in app_type.items() if k in [ 'id', 'key', 'name', 'universe', 'normalizedName', '__typename', ] } def test_rename_apps_types(): target_col_name: str = 'name' app_type: Dict = s.app_type.get_app_type(app_type_id=app_type_id) name: str = app_type[target_col_name] new_name: str = f'{name}_test' app_type_updated: Dict = ( s.app_type.rename_apps_types( app_type_id=app_type_id, new_name=new_name, ) ) assert ( { k: v for k, v in app_type.items() if k in [ 'id', 'key', 'universe', # name excluded 'normalizedName', '__typename', ] } == { k: v for k, v in app_type_updated.items() if k in [ 'id', 'key', 'universe', # name excluded 'normalizedName', '__typename', ] } ) assert app_type_updated[target_col_name] == new_name app_type_updated_: Dict = s.app_type.get_app_type(app_type_id=app_type_id) assert app_type_updated_[target_col_name] == new_name assert app_type_updated == { k: v for k, v in app_type_updated_.items() if k in [ 'id', 'key', 'name', 'universe', 'normalizedName', '__typename', ] } # Undo change app_type_restored: Dict = ( s.app_type.rename_apps_types( app_type_id=app_type_id, new_name=name, ) ) assert { k: v for k, v in app_type_restored.items() if k in [ 'id', 'key', 'name', 'universe', 'normalizedName', '__typename', ] } == { k: v for k, v in app_type.items() if k in [ 'id', 'key', 'name', 'universe', 'normalizedName', '__typename', ] } def test_rename_app_type_by_old_name(): app_type: Dict = s.app_type.create_app_type(name='testrenameapptypebyoldname') new_name: str = 'testrenameapptypebyoldname2' new_app_type: Dict = s.app_type.rename_app_type_by_old_name( old_name='testrenameapptypebyoldname', new_name=new_name, ) app_types = s.universe.get_universe_app_types() assert [ app_type_ for app_type_ in app_types if app_type_['name'] == new_name ] s.app_type.delete_app_type(app_type_id=app_type['id']) test_get_app_type() # test_cannot_create_duplicated_app_type() test_create_and_delete_app_type() test_update_app_type() test_rename_apps_types() test_rename_app_type_by_old_name()
26.615721
82
0.579655
787
6,095
4.072427
0.096569
0.25117
0.081435
0.056162
0.774727
0.767239
0.718877
0.682059
0.663963
0.663963
0
0.000239
0.313208
6,095
228
83
26.732456
0.765409
0.025103
0
0.544041
0
0
0.105618
0.013329
0
0
0
0
0.07772
1
0.041451
false
0
0.025907
0
0.07772
0
0
0
0
null
1
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
7f2b3bceb288e697521f1045e24f0038d4d43c11
8,822
py
Python
data_cleaning/tests/test_my_normalize_data.py
wykhuh/ec_workshops_py
c3ad1fc7b292d9859734cc7dcc51ae54f5252755
[ "MIT" ]
null
null
null
data_cleaning/tests/test_my_normalize_data.py
wykhuh/ec_workshops_py
c3ad1fc7b292d9859734cc7dcc51ae54f5252755
[ "MIT" ]
null
null
null
data_cleaning/tests/test_my_normalize_data.py
wykhuh/ec_workshops_py
c3ad1fc7b292d9859734cc7dcc51ae54f5252755
[ "MIT" ]
null
null
null
import pandas as pd from pandas._testing import assert_frame_equal import pytest import numpy as np from scripts.my_normalize_data import ( normalize_expedition_section_cols, remove_bracket_text, remove_whitespace, normalize_columns ) class XTestNormalizeColumns: def test_replace_column_name_with_value_from_columns_mapping(self): columns_mapping = {"aa": "A"} data = {"aa": [1]} df = pd.DataFrame(data) data = {"A": [1]} expected = pd.DataFrame(data) normalize_columns(df, columns_mapping) assert_frame_equal(df, expected) def test_replace_multiple_column_name_with_value_from_columns_mapping(self): columns_mapping = {"aa": "A", "b b": "B"} data = {"aa": [1], "b b": [2]} df = pd.DataFrame(data) data = {"A": [1], "B": [2]} expected = pd.DataFrame(data) normalize_columns(df, columns_mapping) assert_frame_equal(df, expected) def test_does_not_affect_columns_not_in_columns_mapping(self): columns_mapping = {"aa": "A", "b b": "B"} data = {"aa": [1], "b b": [2], "cc": [3]} df = pd.DataFrame(data) data = {"A": [1], "B": [2], "cc": [3]} expected = pd.DataFrame(data) normalize_columns(df, columns_mapping) assert_frame_equal(df, expected) def test_does_not_affect_columns_if_columns_mapping_has_no_value(self): columns_mapping = {"aa": None, "bb": "", "cc": np.nan} data = {"aa": [1], "b b": [2], "cc": [3]} df = pd.DataFrame(data) expected = pd.DataFrame(data) normalize_columns(df, columns_mapping) assert_frame_equal(df, expected) class XTestRemoveBracketText: def test_removes_text_within_brackets_at_end_of_cell(self): df = pd.DataFrame(['aa [A]', 'bb [BB]', 'cc [C] ', 'dd [dd] ']) expected = pd.DataFrame(['aa', 'bb', 'cc', 'dd']) remove_bracket_text(df) assert_frame_equal(df, expected) def test_does_not_remove_text_within_brackets_at_start_of_cell(self): df = pd.DataFrame(['[A] aa', '[BB] bb', '[C] cc ', ' [dd] dd ']) expected = df.copy() remove_bracket_text(df) assert_frame_equal(df, expected) def test_does_not_remove_text_within_brackets_in_middle_of_cell(self): df = pd.DataFrame(['aa [A] aa', 'bb [BB] bb', ' cc [C] cc ', ' dd [dd] dd ']) expected = df.copy() remove_bracket_text(df) assert_frame_equal(df, expected) def test_removes_letters_numbers_punctuation_within_brackets(self): df = pd.DataFrame(['aa [A A]', 'bb [BB 123]', 'cc [123-456.] ']) expected = pd.DataFrame(['aa', 'bb', 'cc']) remove_bracket_text(df) assert_frame_equal(df, expected) class XTestRemoveWhitespaceFromDataframe: def test_remove_leading_and_trailing_spaces_from_dataframe(self): data = { 'A': ['A', 'B ', ' C', 'D ', ' Ed ', ' 1 '], 'B': ['Aa', 'Bb ', ' Cc', 'Dd ', ' Ed Ed ', ' 11 '], } df = pd.DataFrame(data) data2 = { 'A': ['A', 'B', 'C', 'D', 'Ed', '1'], 'B': ['Aa', 'Bb', 'Cc', 'Dd', 'Ed Ed', '11'], } expected = pd.DataFrame(data2) remove_whitespace(df) assert_frame_equal(df, expected) def test_ignores_numeric_columns(self): data = { 'A': ['A', 'B ', ' C'], 'B': [1, 2, 3], 'C': [1.1, 2.2, 3.3], } df = pd.DataFrame(data) data2 = { 'A': ['A', 'B', 'C'], 'B': [1, 2, 3], 'C': [1.1, 2.2, 3.3], } expected = pd.DataFrame(data2) remove_whitespace(df) assert_frame_equal(df, expected) def test_handles_empty_strings(self): data = {'A': ['A', 'B ', ' C', ' ']} df = pd.DataFrame(data) data2 = {'A': ['A', 'B', 'C', '']} expected = pd.DataFrame(data2) remove_whitespace(df) assert_frame_equal(df, expected) def test_converts_nan_to_empty_strings(self): data = {'A': ['A', 'B ', ' C', np.nan]} df = pd.DataFrame(data) data2 = {'A': ['A', 'B', 'C', '']} expected = pd.DataFrame(data2) remove_whitespace(df) assert_frame_equal(df, expected) class XTestNormalizeExpeditionSectionCols: def test_dataframe_does_not_change_if_expection_section_columns_exist(self): data = { "Col": [0, 1], "Exp": ["1", "10"], "Site": ["U1", "U2"], "Hole": ["h", "H"], "Core": ["2", "20"], "Type": ["t", "T"], "Section": ["3", "3"], "A/W": ["a", "A"], } df = pd.DataFrame(data) expected = pd.DataFrame(data) df = normalize_expedition_section_cols(df) assert_frame_equal(df, expected) def test_dataframe_does_not_change_if_expection_section_Sample_exist(self): data = { "Col": [0, 1], "Sample": ["1-U1h-2t-3-a", "10-U2H-20T-3-A"], "Exp": ["1", "10"], "Site": ["U1", "U2"], "Hole": ["h", "H"], "Core": ["2", "20"], "Type": ["t", "T"], "Section": ["3", "3"], "A/W": ["a", "A"], } df = pd.DataFrame(data) expected = pd.DataFrame(data) df = normalize_expedition_section_cols(df) assert_frame_equal(df, expected) def test_dataframe_does_not_change_if_expection_section_Label_exist(self): data = { "Col": [0, 1], "Label ID": ["1-U1h-2t-3-a", "10-U2H-20T-3-A"], "Exp": ["1", "10"], "Site": ["U1", "U2"], "Hole": ["h", "H"], "Core": ["2", "20"], "Type": ["t", "T"], "Section": ["3", "3"], "A/W": ["a", "A"], } df = pd.DataFrame(data) expected = pd.DataFrame(data) df = normalize_expedition_section_cols(df) assert_frame_equal(df, expected) def test_adds_missing_expection_section_using_Label(self): data = { "Col": [0, 1], "Label ID": ["1-U1h-2t-3-a", "10-U2H-20T-3-A"], } df = pd.DataFrame(data) data = { "Col": [0, 1], "Label ID": ["1-U1h-2t-3-a", "10-U2H-20T-3-A"], "Exp": ["1", "10"], "Site": ["U1", "U2"], "Hole": ["h", "H"], "Core": ["2", "20"], "Type": ["t", "T"], "Section": ["3", "3"], "A/W": ["a", "A"], } expected = pd.DataFrame(data) df = normalize_expedition_section_cols(df) assert_frame_equal(df, expected) def test_adds_missing_expection_section_using_Sample(self): data = { "Col": [0, 1], "Sample": ["1-U1h-2t-3-a", "10-U2H-20T-3-A"], } df = pd.DataFrame(data) data = { "Col": [0, 1], "Sample": ["1-U1h-2t-3-a", "10-U2H-20T-3-A"], "Exp": ["1", "10"], "Site": ["U1", "U2"], "Hole": ["h", "H"], "Core": ["2", "20"], "Type": ["t", "T"], "Section": ["3", "3"], "A/W": ["a", "A"], } expected = pd.DataFrame(data) df = normalize_expedition_section_cols(df) assert_frame_equal(df, expected) def test_handles_missing_aw_col(self): data = { "Col": [0, 1], "Sample": ["1-U1h-2t-3", "10-U2H-20T-3"], "Exp": ["1", "10"], "Site": ["U1", "U2"], "Hole": ["h", "H"], "Core": ["2", "20"], "Type": ["t", "T"], "Section": ["3", "3"], } df = pd.DataFrame(data) expected = pd.DataFrame(data) df = normalize_expedition_section_cols(df) assert_frame_equal(df, expected) def test_handles_no_data(self): data = { "Col": [0], "Sample": ["No data this hole"], } df = pd.DataFrame(data) data = { "Col": [0], "Sample": ["No data this hole"], "Exp": [None], "Site": [None], "Hole": [None], "Core": [None], "Type": [None], "Section": [None], "A/W": [None], } expected = pd.DataFrame(data) df = normalize_expedition_section_cols(df) assert_frame_equal(df, expected) def test_otherwise_raise_error(self): df = pd.DataFrame({"foo": [1]}) message = "File does not have the expected columns." with pytest.raises(ValueError, match=message): normalize_expedition_section_cols(df)
29.211921
87
0.495353
1,056
8,822
3.916667
0.124053
0.098404
0.094294
0.082689
0.7853
0.777563
0.74323
0.731141
0.693424
0.65087
0
0.034327
0.323056
8,822
301
88
29.30897
0.658238
0
0
0.679487
0
0
0.109386
0
0
0
0
0
0.08547
1
0.08547
false
0
0.021368
0
0.123932
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
7f4bedcf2e1adad982aecfb1927db828602df2d1
796
py
Python
test/bmi/bmi.py
franklinzhanggis/model-interoperable-engine
40b724813bec9af16f4ca95e36f8ff16be787315
[ "MIT" ]
1
2016-10-07T07:13:39.000Z
2016-10-07T07:13:39.000Z
test/bmi/bmi.py
franklinzhanggis/model-interoperable-engine
40b724813bec9af16f4ca95e36f8ff16be787315
[ "MIT" ]
1
2017-11-30T17:33:05.000Z
2017-11-30T17:33:05.000Z
test/bmi/bmi.py
franklinzhanggis/model-interoperable-engine
40b724813bec9af16f4ca95e36f8ff16be787315
[ "MIT" ]
null
null
null
#! /usr/bin/env python """The complete Basic Model Interface.""" from .base import BmiBase from .info import BmiInfo from .time import BmiTime from .vars import BmiVars from .getter_setter import BmiGetter, BmiSetter from .grid_rectilinear import BmiGridRectilinear from .grid_uniform_rectilinear import BmiGridUniformRectilinear from .grid_structured_quad import BmiGridStructuredQuad from .grid_unstructured import BmiGridUnstructured class Bmi(BmiBase, BmiInfo, BmiTime, BmiVars, BmiGetter, BmiSetter, BmiGridRectilinear, BmiGridUniformRectilinear, BmiGridStructuredQuad, BmiGridUnstructured): """The complete Basic Model Interface. Defines an interface for converting a standalone model into an integrated modeling framework component. """ pass
29.481481
79
0.791457
85
796
7.329412
0.552941
0.051364
0.051364
0.067416
0.096308
0
0
0
0
0
0
0
0.155779
796
26
80
30.615385
0.927083
0.248744
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.076923
0.692308
0
0.769231
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
4
7f520d52db84886d967e918d802d393556b3df00
119
py
Python
python/testData/inspections/PyUnusedLocalInspection/unusedMultiAssignmentTarget.py
alexey-anufriev/intellij-community
ffcd46f14e630acdefcc76e2bfc7c43d2449013a
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/inspections/PyUnusedLocalInspection/unusedMultiAssignmentTarget.py
alexey-anufriev/intellij-community
ffcd46f14e630acdefcc76e2bfc7c43d2449013a
[ "Apache-2.0" ]
null
null
null
python/testData/inspections/PyUnusedLocalInspection/unusedMultiAssignmentTarget.py
alexey-anufriev/intellij-community
ffcd46f14e630acdefcc76e2bfc7c43d2449013a
[ "Apache-2.0" ]
1
2020-10-15T05:56:42.000Z
2020-10-15T05:56:42.000Z
def foo(): <weak_warning descr="Local variable 'baz' value is not used">baz</weak_warning> = bar = 1 return bar
39.666667
93
0.680672
19
119
4.157895
0.789474
0.278481
0
0
0
0
0
0
0
0
0
0.010417
0.193277
119
3
94
39.666667
0.8125
0
0
0
0
0
0.316667
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
7fa5f41715f5e1c82ab594e5baf48595ecd212db
188
py
Python
modelbased-rl/MBPO/ED2-MBPO/mbpo.py
TJU-DRL-LAB/ai-optimizer
f558cc524c66460913989519779873b371bf78bc
[ "MIT" ]
null
null
null
modelbased-rl/MBPO/ED2-MBPO/mbpo.py
TJU-DRL-LAB/ai-optimizer
f558cc524c66460913989519779873b371bf78bc
[ "MIT" ]
null
null
null
modelbased-rl/MBPO/ED2-MBPO/mbpo.py
TJU-DRL-LAB/ai-optimizer
f558cc524c66460913989519779873b371bf78bc
[ "MIT" ]
null
null
null
from examples.instrument import run_example_local import sys if __name__ == '__main__': model_name = 'examples.development' run_example_local(model_name, tuple(sys.argv[1:]))
31.333333
54
0.75
25
188
5.08
0.64
0.15748
0.23622
0
0
0
0
0
0
0
0
0.00625
0.148936
188
6
54
31.333333
0.7875
0
0
0
0
0
0.152174
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
f68802b1fd9378a8b68c493db6c6bdab31a041f0
160
py
Python
Lib/site-packages/channels/delay/apps.py
amit63731/channels
d16fd85b0668e611f9375c7787ea8ae8fe6a34ad
[ "MIT" ]
3
2019-02-07T09:43:03.000Z
2020-01-18T08:44:47.000Z
Lib/site-packages/channels/delay/apps.py
amit63731/channels
d16fd85b0668e611f9375c7787ea8ae8fe6a34ad
[ "MIT" ]
4
2020-02-12T03:17:43.000Z
2022-02-11T03:43:30.000Z
channels/delay/apps.py
zsjohny/channels
6d71106c3c6a8924f75e5058cd6c54e765af3b94
[ "BSD-3-Clause" ]
1
2019-03-07T04:30:36.000Z
2019-03-07T04:30:36.000Z
from django.apps import AppConfig class DelayConfig(AppConfig): name = "channels.delay" label = "channels.delay" verbose_name = "Channels Delay"
17.777778
35
0.7125
18
160
6.277778
0.666667
0.345133
0.300885
0
0
0
0
0
0
0
0
0
0.19375
160
8
36
20
0.875969
0
0
0
0
0
0.2625
0
0
0
0
0
0
1
0
false
0
0.2
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
f6ab9a03aed80395cc3affce0cf6a32e7f4bd687
89
py
Python
inventory/apps.py
H0neyBadger/pmapi
d34dad32170e53f49e14611f5bfbfcb4eb7b8d4d
[ "MIT" ]
null
null
null
inventory/apps.py
H0neyBadger/pmapi
d34dad32170e53f49e14611f5bfbfcb4eb7b8d4d
[ "MIT" ]
1
2017-09-07T09:15:07.000Z
2017-09-07T09:15:07.000Z
inventory/apps.py
H0neyBadger/cmdb
d34dad32170e53f49e14611f5bfbfcb4eb7b8d4d
[ "MIT" ]
null
null
null
from django.apps import AppConfig class PmapiConfig(AppConfig): name = 'inventory'
14.833333
33
0.752809
10
89
6.7
0.9
0
0
0
0
0
0
0
0
0
0
0
0.168539
89
5
34
17.8
0.905405
0
0
0
0
0
0.101124
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
f6fe4067d5e1db743a93079a8aec278a21037c3d
407
py
Python
tests/metadata/v150/test_template.py
OpenEnergyPlatform/metadata
b4e10fa57de1a45e7c866df6605015cad0b54957
[ "MIT" ]
2
2020-05-06T17:20:10.000Z
2020-05-06T17:20:34.000Z
tests/metadata/v150/test_template.py
OpenEnergyPlatform/metadata
b4e10fa57de1a45e7c866df6605015cad0b54957
[ "MIT" ]
21
2019-10-31T16:26:59.000Z
2020-05-13T15:12:41.000Z
tests/metadata/v150/test_template.py
OpenEnergyPlatform/metadata
b4e10fa57de1a45e7c866df6605015cad0b54957
[ "MIT" ]
null
null
null
def test_if_template_json_loads_successfully(): from metadata.v150.template import OEMETADATA_V150_TEMPLATE def test_template_against_schema_which_should_succeed(): import jsonschema from metadata.v150.template import OEMETADATA_V150_TEMPLATE from metadata.v150.schema import OEMETADATA_V150_SCHEMA assert jsonschema.validate(OEMETADATA_V150_TEMPLATE, OEMETADATA_V150_SCHEMA) == None
37
88
0.842752
51
407
6.313725
0.411765
0.186335
0.149068
0.149068
0.322981
0.322981
0.322981
0.322981
0
0
0
0.066482
0.113022
407
10
89
40.7
0.825485
0
0
0.285714
0
0
0
0
0
0
0
0
0.142857
1
0.285714
true
0
0.571429
0
0.857143
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
4
1227798d5f0e63fa2fe9f8f3400f805c0c4ca4f6
107
py
Python
src/modules/action_encoders/__init__.py
mansicer/cg-pymarl
b8ae8428e57754ac49b5a5c78da52662c5c5dfad
[ "Apache-2.0" ]
64
2020-10-29T12:44:56.000Z
2022-01-04T02:14:11.000Z
src/modules/action_encoders/__init__.py
mansicer/cg-pymarl
b8ae8428e57754ac49b5a5c78da52662c5c5dfad
[ "Apache-2.0" ]
9
2020-11-18T11:19:37.000Z
2022-02-28T23:40:06.000Z
src/modules/action_encoders/__init__.py
mansicer/cg-pymarl
b8ae8428e57754ac49b5a5c78da52662c5c5dfad
[ "Apache-2.0" ]
18
2020-10-22T15:49:39.000Z
2022-03-29T12:57:08.000Z
REGISTRY = {} from .obs_reward_encoder import ObsRewardEncoder REGISTRY["obs_reward"] = ObsRewardEncoder
17.833333
48
0.803738
11
107
7.545455
0.636364
0.216867
0
0
0
0
0
0
0
0
0
0
0.11215
107
5
49
21.4
0.873684
0
0
0
0
0
0.093458
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
1227b23efa12bb11e0977d41ea5c0df31258ef1e
1,462
py
Python
tests/stochastic/test_cir.py
vishalbelsare/pfhedge
4d7ff173995e0795942bc6ec55f3fdc5bfb7a5f1
[ "MIT" ]
81
2021-03-19T02:39:35.000Z
2022-03-29T07:59:50.000Z
tests/stochastic/test_cir.py
akira66/pfhedge
bc4ae304f9dc887b0e4d581f8ad42700a4eea9ad
[ "MIT" ]
382
2021-05-04T16:08:38.000Z
2022-03-31T13:10:51.000Z
tests/stochastic/test_cir.py
akira66/pfhedge
bc4ae304f9dc887b0e4d581f8ad42700a4eea9ad
[ "MIT" ]
32
2021-05-15T02:40:23.000Z
2022-03-27T10:08:11.000Z
from math import sqrt import torch from torch.distributions.gamma import Gamma from torch.testing import assert_close from pfhedge.stochastic import generate_cir def test_generate_cir_mean_1(): torch.manual_seed(42) n_paths = 10000 theta = 0.04 sigma = 2.0 kappa = 1.0 t = generate_cir(n_paths, 250, kappa=kappa, theta=theta, sigma=sigma) result = t[:, -1].mean() # Asymptotic distribution is gamma distribution alpha = 2 * kappa * theta / sigma ** 2 beta = 2 * kappa / sigma ** 2 d = Gamma(alpha, beta) expect = torch.full_like(result, d.mean) std = sqrt(d.variance / n_paths) assert_close(result, expect, atol=3 * std, rtol=0) def test_generate_cir_mean_2(): torch.manual_seed(42) n_paths = 10000 theta = 0.04 sigma = 2.0 kappa = 1.0 t = generate_cir( n_paths, 250, init_state=0.05, kappa=kappa, theta=theta, sigma=sigma ) result = t[:, -1].mean() # Asymptotic distribution is gamma distribution alpha = 2 * kappa * theta / sigma ** 2 beta = 2 * kappa / sigma ** 2 d = Gamma(alpha, beta) expect = torch.full_like(result, d.mean) std = sqrt(d.variance / n_paths) assert_close(result, expect, atol=3 * std, rtol=0) def test_dtype(): output = generate_cir(2, 3, dtype=torch.float32) assert output.dtype == torch.float32 output = generate_cir(2, 3, dtype=torch.float64) assert output.dtype == torch.float64
24.366667
76
0.653899
217
1,462
4.276498
0.248848
0.082974
0.032328
0.038793
0.753233
0.713362
0.713362
0.650862
0.650862
0.650862
0
0.057932
0.232558
1,462
59
77
24.779661
0.769162
0.062244
0
0.6
1
0
0
0
0
0
0
0
0.125
1
0.075
false
0
0.125
0
0.2
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
122caee8501a4124b02a4130fcd59b17f1c73bc7
176
py
Python
test_autoarray/plot/grids/border.py
Sketos/PyAutoArray
72dc7e8d1c38786915f82a7e7284239e5ce87624
[ "MIT" ]
null
null
null
test_autoarray/plot/grids/border.py
Sketos/PyAutoArray
72dc7e8d1c38786915f82a7e7284239e5ce87624
[ "MIT" ]
null
null
null
test_autoarray/plot/grids/border.py
Sketos/PyAutoArray
72dc7e8d1c38786915f82a7e7284239e5ce87624
[ "MIT" ]
null
null
null
import autoarray as aa import autoarray.plot as aplt import numpy as np grid = aa.grid.uniform(shape_2d=(11, 11), pixel_scales=1.0) aplt.grid(grid=grid, include_border=True)
22
59
0.772727
32
176
4.15625
0.625
0.225564
0
0
0
0
0
0
0
0
0
0.045161
0.119318
176
7
60
25.142857
0.812903
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.6
0
0.6
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
125503f6fdb04f6e77e297ac654ab40acdf9d97c
7,236
py
Python
tpdatasrc/kotbfixes/scr/teleport_shortcuts.py
edoipi/TemplePlus
f0e552289822fea908f16daa379fa568b1bd286d
[ "MIT" ]
69
2015-05-05T14:09:25.000Z
2022-02-15T06:13:04.000Z
tpdatasrc/kotbfixes/scr/teleport_shortcuts.py
edoipi/TemplePlus
f0e552289822fea908f16daa379fa568b1bd286d
[ "MIT" ]
457
2015-05-01T22:07:45.000Z
2022-03-31T02:19:10.000Z
tpdatasrc/kotbfixes/scr/teleport_shortcuts.py
edoipi/TemplePlus
f0e552289822fea908f16daa379fa568b1bd286d
[ "MIT" ]
25
2016-02-04T21:19:53.000Z
2021-11-15T23:14:51.000Z
from t import * ################################# # TELEPORT SHORTCUTS # ################################# def shopmap(): game.fade_and_teleport(0,0,0,5123,474,465) return ################################# # THE KEEP # ################################# def bailey(): game.fade_and_teleport(0,0,0,5001,461,563) return def kotb(): game.fade_and_teleport(0,0,0,5001,461,563) return def southernbailey(): game.fade_and_teleport(0,0,0,5001,461,563) return def keep(): game.fade_and_teleport(0,0,0,5001,461,563) return def innerbailey(): game.fade_and_teleport(0,0,0,5115,588,494) return def inner_bailey(): innerbailey() return def fortressentrance(): game.fade_and_teleport(0,0,0,5017,487,460) return def innerfort(): fortressentrance() return def fortress(): fortressentrance() return def fort(): fortressentrance() return def quartermaster(): game.fade_and_teleport(0,0,0,5017,447,435) return def keepoutside(): game.fade_and_teleport(0,0,0,5063,508,503) return def outsidekeep(): keepoutside() return def maingate(): keepoutside() return def keepgate(): keepoutside() return def kotbgate(): keepoutside() return def gate(): keepoutside() return def smith(): game.fade_and_teleport(0,0,0,5001,503,521) return def marketplace(): game.fade_and_teleport(0,0,0,5001,544,388) return def market(): marketplace() return def bazar(): marketplace() return def reece(): marketplace() return def ricario(): marketplace() return def geoffrey(): marketplace() return def flay(): game.fade_and_teleport(0,0,0,5045,524,499) return def robin(): game.fade_and_teleport(0,0,0,5045,524,499) return def robingraves(): robin() return def graveyard(): game.fade_and_teleport(0,0,0,5126,497,523) return def inn(): game.fade_and_teleport(0,0,0,5007,517,490) return def keepinn(): game.fade_and_teleport(0,0,0,5007,517,490) return def animalseller(): game.fade_and_teleport(0,0,0,5043,480,480) return def church(): game.fade_and_teleport(0,0,0,5033,512,489) def jayfie(): if is_daytime(): game.fade_and_teleport(0,0,0,5001,48,698) else: jayfienight() return def wainwright(): if is_daytime(): game.fade_and_teleport(0,0,0,5016,494,487) else: inn() return def gaolcell(): game.fade_and_teleport(0,0,0,5125,473,525) return ################################# # Northern Oak Woods # # # ################################# def harpywoods(): game.fade_and_teleport(0,0,0,5094,565,588) return def northernoakwoods(): harpywoods() return def northernwoods(): harpywoods() return def oakwoods(): harpywoods() return def northwoods(): harpywoods() return def northwood(): harpywoods() return def harpies(): game.fade_and_teleport(0,0,0,5094,499,536) return def northernwoodsshrine(): harpies() return def northwoodsshrine(): harpies() return def harpyshrine(): harpies() return def hermit(): game.fade_and_teleport(0,0,0,5094,400,476) return def passagebeanththewoods(): game.fade_and_teleport(0,0,0,5133,454,491) return def underpass(): passagebeanththewoods() return def underdark(): passagebeanththewoods() return def passage(): passagebeanththewoods() return ################################# # Mad Wizard Tower & Swamp # # # ################################# def swamp(): game.fade_and_teleport(0,0,0,5095,407,474) return def lizards(): swamp() return def lizardswamp(): swamp() return def lizardchief(): game.fade_and_teleport(0,0,0,5114,491,465) return def madwizardoutside(): game.fade_and_teleport(0,0,0,5069,515,480) return def madwizard(): madwizardoutside() return def deepswamp(): madwizardoutside() return def thedeepswamp(): madwizardoutside() return def madwizardmainhall(): game.fade_and_teleport(0,0,0,5130,477,490) return def wizardtowermainhall(): madwizardmainhall() return def towermainhall(): madwizardmainhall() return def madwizardupperhall(): game.fade_and_teleport(0,0,0,5131,482,472) return def wizardtowerupperhall(): madwizardupperhall() return def towerupperhall(): madwizardupperhall() return def madwizardinnerparapet(): game.fade_and_teleport(0,0,0,5132,480,478) return def wizardtowerinnerparapet(): madwizardinnerparapet() return def towerinnerparapet(): madwizardinnerparapet() return ################################# # Spider Pine Woods # # # ################################# def reynard(): if game.quests[5].state != qs_completed: game.fade_and_teleport(0,0,0,5004,473,475) else: marketplace() return def abandonedflet(): game.fade_and_teleport(0,0,0,5004,473,475) return def spiderwoods(): game.fade_and_teleport(0,0,0,5002,620,525) return def spiderwood(): spiderwoods() return def spiderpinewoods(): spiderwoods() return def spiders(): spiderwoods() return def southernpinewoods(): spiderwoods() return def spiderqueen(): game.fade_and_teleport(0,0,0,5002,396,471) return def insectmoundentrance(): game.fade_and_teleport(0,0,0,5002,511,386) return def thorp(): game.fade_and_teleport(0,0,0,5062,483,526) def thorpoflordaxer(): thorp() def thorpe(): thorp() ################################# # Raiders # # # ################################# def raiders(): game.fade_and_teleport(0,0,0,5068,598, 457) return def raidercamp(): game.fade_and_teleport(0,0,0,5068,598, 457) return ################################# # Caves of Chaos # # # ################################# def cavesofchaos(): game.fade_and_teleport(0,0,0,5051,518,582) return def caves(): game.fade_and_teleport(0,0,0,5051,518,582) return def coc(): game.fade_and_teleport(0,0,0,5051,518,582) return def koboldcave(): game.fade_and_teleport(0,0,0,5052,481,476) return def koboldcaves(): koboldcave() return def koboldscave(): koboldcave() return def koboldscaves(): koboldcave() return def kobolds(): koboldcave() return def tarkin(): game.fade_and_teleport(0,0,0,5052,450,446) return def orccave(): game.fade_and_teleport(0,0,0,5053,417,536) return def orccaves(): orccave() return def orcs(): orccave() return def orcscave(): orccave() return def caveb(): orccave() return def CaveB(): orccave() return def orcleaderb(): game.fade_and_teleport(0,0,0,5053,486,451) return def bugbearcave(): game.fade_and_teleport(0,0,0,5058,498,484) return def bugbears(): bugbearcave() return def bugbearcaves(): bugbearcave() return def gnollcave(): game.fade_and_teleport(0,0,0,5060,471,542) def gnollcaves(): gnollcave() def hobgoblincaves(): game.fade_and_teleport(0,0,0,5056,481,463) def hobgoblincave(): hobgoblincaves() def hobgobcaves(): hobgoblincaves() def hobgobcave(): hobgoblincaves() def hobgoblinprison(): game.fade_and_teleport(0,0,0,5056,503,485) def hobgoblinprisoners(): hobgoblinprison() def labyrinth(): game.fade_and_teleport(0,0,0,5059,509,463) def labyrinthentrance(): labyrinth() def soec(): game.fade_and_teleport(0,0,0,5061,500,500) def shrineofevilchaos(): game.fade_and_teleport(0,0,0,5061,500,500) def templeofevilchaos(): game.fade_and_teleport(0,0,0,5061,500,500) def SoEC(): game.fade_and_teleport(0,0,0,5061,500,500) def minotaur(): game.fade_and_teleport(0,0,0,5059,447,509)
17.106383
44
0.679243
990
7,236
4.847475
0.245455
0.046676
0.12836
0.221713
0.356741
0.356741
0.356741
0.266722
0.168368
0.168368
0
0.114808
0.123687
7,236
423
45
17.106383
0.642012
0.023632
0
0.522124
0
0
0
0
0
0
0
0
0
1
0.342183
true
0.017699
0.00295
0
0.631268
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
1
0
0
4
1262f4d896acfa8d4691df7e2c08200c2fccf823
232
py
Python
rest_framework_helpers/_compat.py
Apkawa/django-rest-framework-helpers
f4b24bf326e081a215ca5c1c117441ea8f78cbb4
[ "MIT" ]
null
null
null
rest_framework_helpers/_compat.py
Apkawa/django-rest-framework-helpers
f4b24bf326e081a215ca5c1c117441ea8f78cbb4
[ "MIT" ]
null
null
null
rest_framework_helpers/_compat.py
Apkawa/django-rest-framework-helpers
f4b24bf326e081a215ca5c1c117441ea8f78cbb4
[ "MIT" ]
null
null
null
# coding: utf-8 from __future__ import unicode_literals try: from StringIO import StringIO except ImportError: from io import StringIO try: import urlparse except ImportError: from urllib import parse as urlparse
16.571429
40
0.762931
30
232
5.733333
0.566667
0.162791
0.244186
0
0
0
0
0
0
0
0
0.005435
0.206897
232
14
40
16.571429
0.929348
0.056034
0
0.444444
0
0
0
0
0
0
0
0
0
1
0
true
0
0.777778
0
0.777778
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
12652ebe993f635299ee5c8a482b41dc4698816b
1,464
py
Python
DublinBus/DublinBusTest/migrations/0002_line9_lineone.py
atreanor/BusLane
c7ad6f42d9442d99b48c03e3e6ad883f5d16bba9
[ "BSD-2-Clause" ]
null
null
null
DublinBus/DublinBusTest/migrations/0002_line9_lineone.py
atreanor/BusLane
c7ad6f42d9442d99b48c03e3e6ad883f5d16bba9
[ "BSD-2-Clause" ]
3
2020-02-11T23:57:19.000Z
2021-06-10T21:18:36.000Z
DublinBus/DublinBusTest/migrations/0002_line9_lineone.py
atreanor/BusLane
c7ad6f42d9442d99b48c03e3e6ad883f5d16bba9
[ "BSD-2-Clause" ]
1
2020-06-20T09:53:15.000Z
2020-06-20T09:53:15.000Z
# Generated by Django 2.0.6 on 2018-07-10 16:19 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('DublinBusTest', '0001_initial'), ] operations = [ migrations.CreateModel( name='Line9', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('displaystopid', models.TextField(blank=True, null=True)), ('fullname', models.TextField(blank=True, null=True)), ('latitude', models.TextField(blank=True, null=True)), ('longitude', models.TextField(blank=True, null=True)), ], options={ 'db_table': '9', 'managed': False, }, ), migrations.CreateModel( name='Lineone', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('displaystopid', models.TextField(blank=True, null=True)), ('fullname', models.TextField(blank=True, null=True)), ('latitude', models.TextField(blank=True, null=True)), ('longitude', models.TextField(blank=True, null=True)), ], options={ 'db_table': 'LineOne', 'managed': False, }, ), ]
34.857143
114
0.52459
133
1,464
5.706767
0.37594
0.158103
0.210804
0.252964
0.682477
0.682477
0.682477
0.682477
0.682477
0.682477
0
0.021472
0.331967
1,464
41
115
35.707317
0.754601
0.030738
0
0.628571
1
0
0.112209
0
0
0
0
0
0
1
0
false
0
0.028571
0
0.114286
0
0
0
0
null
0
1
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
89c55df9e9abefd5fd4bfbe2f1bf5cc16eedf239
334
py
Python
Plugins/GUI/Shared/__init__.py
bvbohnen/X4_Customizer
6f865008690916a66a44c97331d9a2692baedb35
[ "MIT" ]
25
2018-12-10T12:52:11.000Z
2022-01-29T14:42:57.000Z
Plugins/GUI/Shared/__init__.py
bvbohnen/X4_Customizer
6f865008690916a66a44c97331d9a2692baedb35
[ "MIT" ]
4
2019-08-01T19:09:11.000Z
2022-01-02T01:47:42.000Z
Plugins/GUI/Shared/__init__.py
bvbohnen/X4_Customizer
6f865008690916a66a44c97331d9a2692baedb35
[ "MIT" ]
6
2019-02-16T08:39:04.000Z
2021-12-21T06:11:58.000Z
''' Support classes and functions used multiple places. ''' from .Misc import Set_Icon from .Misc import Set_Foreground_Color from .Misc import Set_Background_Color from .Styles import Get_Style_Names from .Styles import Make_Style from .Tab_Page_Widget import Tab_Page_Widget from .Widget_Documentation import Widget_Documentation
25.692308
54
0.841317
49
334
5.44898
0.489796
0.089888
0.157303
0.191011
0
0
0
0
0
0
0
0
0.116766
334
12
55
27.833333
0.905085
0.152695
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
89ea38836aec84acfa9cf01a59b86e9f80977f66
228
py
Python
longclaw/shipping/serializers/rates.py
al-bezd/longclaw
0508231a90360670ed52d40475d8886a91c6920f
[ "MIT" ]
351
2017-02-03T10:47:06.000Z
2022-03-23T08:08:31.000Z
longclaw/shipping/serializers/rates.py
al-bezd/longclaw
0508231a90360670ed52d40475d8886a91c6920f
[ "MIT" ]
392
2017-02-03T10:16:26.000Z
2022-03-28T00:30:02.000Z
longclaw/shipping/serializers/rates.py
al-bezd/longclaw
0508231a90360670ed52d40475d8886a91c6920f
[ "MIT" ]
108
2017-02-06T01:03:21.000Z
2022-03-14T13:51:20.000Z
from rest_framework import serializers from longclaw.shipping.models.rates import ShippingRate class ShippingRateSerializer(serializers.ModelSerializer): class Meta: model = ShippingRate fields = "__all__"
25.333333
58
0.77193
22
228
7.772727
0.772727
0
0
0
0
0
0
0
0
0
0
0
0.175439
228
8
59
28.5
0.909574
0
0
0
0
0
0.030702
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
89fa2747dafa2171b56eff2cff68aedcf9a38428
56,110
py
Python
tests/test_fluidpropertiessaturated.py
marcelosalles/pyidf
c2f744211572b5e14e29522aac1421ba88addb0e
[ "Apache-2.0" ]
19
2015-12-08T23:33:51.000Z
2022-01-31T04:41:10.000Z
tests/test_fluidpropertiessaturated.py
marcelosalles/pyidf
c2f744211572b5e14e29522aac1421ba88addb0e
[ "Apache-2.0" ]
2
2019-10-04T10:57:00.000Z
2021-10-01T06:46:17.000Z
tests/test_fluidpropertiessaturated.py
marcelosalles/pyidf
c2f744211572b5e14e29522aac1421ba88addb0e
[ "Apache-2.0" ]
7
2015-11-04T02:25:01.000Z
2021-12-08T03:14:28.000Z
import os import tempfile import unittest import logging from pyidf import ValidationLevel import pyidf from pyidf.idf import IDF from pyidf.fluid_properties import FluidPropertiesSaturated log = logging.getLogger(__name__) class TestFluidPropertiesSaturated(unittest.TestCase): def setUp(self): self.fd, self.path = tempfile.mkstemp() def tearDown(self): os.remove(self.path) def test_create_fluidpropertiessaturated(self): pyidf.validation_level = ValidationLevel.error obj = FluidPropertiesSaturated() # object-list var_name = "object-list|Name" obj.name = var_name # alpha var_fluid_property_type = "Enthalpy" obj.fluid_property_type = var_fluid_property_type # alpha var_fluid_phase = "Fluid" obj.fluid_phase = var_fluid_phase # object-list var_temperature_values_name = "object-list|Temperature Values Name" obj.temperature_values_name = var_temperature_values_name # real var_property_value_1 = 5.5 obj.property_value_1 = var_property_value_1 # real var_property_value_2 = 6.6 obj.property_value_2 = var_property_value_2 # real var_property_value_3 = 7.7 obj.property_value_3 = var_property_value_3 # real var_property_value_4 = 8.8 obj.property_value_4 = var_property_value_4 # real var_property_value_5 = 9.9 obj.property_value_5 = var_property_value_5 # real var_property_value_6 = 10.1 obj.property_value_6 = var_property_value_6 # real var_property_value_7 = 11.11 obj.property_value_7 = var_property_value_7 # real var_property_value_8 = 12.12 obj.property_value_8 = var_property_value_8 # real var_property_value_9 = 13.13 obj.property_value_9 = var_property_value_9 # real var_property_value_10 = 14.14 obj.property_value_10 = var_property_value_10 # real var_property_value_11 = 15.15 obj.property_value_11 = var_property_value_11 # real var_property_value_12 = 16.16 obj.property_value_12 = var_property_value_12 # real var_property_value_13 = 17.17 obj.property_value_13 = var_property_value_13 # real var_property_value_14 = 18.18 obj.property_value_14 = var_property_value_14 # real var_property_value_15 = 19.19 obj.property_value_15 = var_property_value_15 # real var_property_value_16 = 20.2 obj.property_value_16 = var_property_value_16 # real var_property_value_17 = 21.21 obj.property_value_17 = var_property_value_17 # real var_property_value_18 = 22.22 obj.property_value_18 = var_property_value_18 # real var_property_value_19 = 23.23 obj.property_value_19 = var_property_value_19 # real var_property_value_20 = 24.24 obj.property_value_20 = var_property_value_20 # real var_property_value_21 = 25.25 obj.property_value_21 = var_property_value_21 # real var_property_value_22 = 26.26 obj.property_value_22 = var_property_value_22 # real var_property_value_23 = 27.27 obj.property_value_23 = var_property_value_23 # real var_property_value_24 = 28.28 obj.property_value_24 = var_property_value_24 # real var_property_value_25 = 29.29 obj.property_value_25 = var_property_value_25 # real var_property_value_26 = 30.3 obj.property_value_26 = var_property_value_26 # real var_property_value_27 = 31.31 obj.property_value_27 = var_property_value_27 # real var_property_value_28 = 32.32 obj.property_value_28 = var_property_value_28 # real var_property_value_29 = 33.33 obj.property_value_29 = var_property_value_29 # real var_property_value_30 = 34.34 obj.property_value_30 = var_property_value_30 # real var_property_value_31 = 35.35 obj.property_value_31 = var_property_value_31 # real var_property_value_32 = 36.36 obj.property_value_32 = var_property_value_32 # real var_property_value_33 = 37.37 obj.property_value_33 = var_property_value_33 # real var_property_value_34 = 38.38 obj.property_value_34 = var_property_value_34 # real var_property_value_35 = 39.39 obj.property_value_35 = var_property_value_35 # real var_property_value_36 = 40.4 obj.property_value_36 = var_property_value_36 # real var_property_value_37 = 41.41 obj.property_value_37 = var_property_value_37 # real var_property_value_38 = 42.42 obj.property_value_38 = var_property_value_38 # real var_property_value_39 = 43.43 obj.property_value_39 = var_property_value_39 # real var_property_value_40 = 44.44 obj.property_value_40 = var_property_value_40 # real var_property_value_41 = 45.45 obj.property_value_41 = var_property_value_41 # real var_property_value_42 = 46.46 obj.property_value_42 = var_property_value_42 # real var_property_value_43 = 47.47 obj.property_value_43 = var_property_value_43 # real var_property_value_44 = 48.48 obj.property_value_44 = var_property_value_44 # real var_property_value_45 = 49.49 obj.property_value_45 = var_property_value_45 # real var_property_value_46 = 50.5 obj.property_value_46 = var_property_value_46 # real var_property_value_47 = 51.51 obj.property_value_47 = var_property_value_47 # real var_property_value_48 = 52.52 obj.property_value_48 = var_property_value_48 # real var_property_value_49 = 53.53 obj.property_value_49 = var_property_value_49 # real var_property_value_50 = 54.54 obj.property_value_50 = var_property_value_50 # real var_property_value_51 = 55.55 obj.property_value_51 = var_property_value_51 # real var_property_value_52 = 56.56 obj.property_value_52 = var_property_value_52 # real var_property_value_53 = 57.57 obj.property_value_53 = var_property_value_53 # real var_property_value_54 = 58.58 obj.property_value_54 = var_property_value_54 # real var_property_value_55 = 59.59 obj.property_value_55 = var_property_value_55 # real var_property_value_56 = 60.6 obj.property_value_56 = var_property_value_56 # real var_property_value_57 = 61.61 obj.property_value_57 = var_property_value_57 # real var_property_value_58 = 62.62 obj.property_value_58 = var_property_value_58 # real var_property_value_59 = 63.63 obj.property_value_59 = var_property_value_59 # real var_property_value_60 = 64.64 obj.property_value_60 = var_property_value_60 # real var_property_value_61 = 65.65 obj.property_value_61 = var_property_value_61 # real var_property_value_62 = 66.66 obj.property_value_62 = var_property_value_62 # real var_property_value_63 = 67.67 obj.property_value_63 = var_property_value_63 # real var_property_value_64 = 68.68 obj.property_value_64 = var_property_value_64 # real var_property_value_65 = 69.69 obj.property_value_65 = var_property_value_65 # real var_property_value_66 = 70.7 obj.property_value_66 = var_property_value_66 # real var_property_value_67 = 71.71 obj.property_value_67 = var_property_value_67 # real var_property_value_68 = 72.72 obj.property_value_68 = var_property_value_68 # real var_property_value_69 = 73.73 obj.property_value_69 = var_property_value_69 # real var_property_value_70 = 74.74 obj.property_value_70 = var_property_value_70 # real var_property_value_71 = 75.75 obj.property_value_71 = var_property_value_71 # real var_property_value_72 = 76.76 obj.property_value_72 = var_property_value_72 # real var_property_value_73 = 77.77 obj.property_value_73 = var_property_value_73 # real var_property_value_74 = 78.78 obj.property_value_74 = var_property_value_74 # real var_property_value_75 = 79.79 obj.property_value_75 = var_property_value_75 # real var_property_value_76 = 80.8 obj.property_value_76 = var_property_value_76 # real var_property_value_77 = 81.81 obj.property_value_77 = var_property_value_77 # real var_property_value_78 = 82.82 obj.property_value_78 = var_property_value_78 # real var_property_value_79 = 83.83 obj.property_value_79 = var_property_value_79 # real var_property_value_80 = 84.84 obj.property_value_80 = var_property_value_80 # real var_property_value_81 = 85.85 obj.property_value_81 = var_property_value_81 # real var_property_value_82 = 86.86 obj.property_value_82 = var_property_value_82 # real var_property_value_83 = 87.87 obj.property_value_83 = var_property_value_83 # real var_property_value_84 = 88.88 obj.property_value_84 = var_property_value_84 # real var_property_value_85 = 89.89 obj.property_value_85 = var_property_value_85 # real var_property_value_86 = 90.9 obj.property_value_86 = var_property_value_86 # real var_property_value_87 = 91.91 obj.property_value_87 = var_property_value_87 # real var_property_value_88 = 92.92 obj.property_value_88 = var_property_value_88 # real var_property_value_89 = 93.93 obj.property_value_89 = var_property_value_89 # real var_property_value_90 = 94.94 obj.property_value_90 = var_property_value_90 # real var_property_value_91 = 95.95 obj.property_value_91 = var_property_value_91 # real var_property_value_92 = 96.96 obj.property_value_92 = var_property_value_92 # real var_property_value_93 = 97.97 obj.property_value_93 = var_property_value_93 # real var_property_value_94 = 98.98 obj.property_value_94 = var_property_value_94 # real var_property_value_95 = 99.99 obj.property_value_95 = var_property_value_95 # real var_property_value_96 = 100.1 obj.property_value_96 = var_property_value_96 # real var_property_value_97 = 101.101 obj.property_value_97 = var_property_value_97 # real var_property_value_98 = 102.102 obj.property_value_98 = var_property_value_98 # real var_property_value_99 = 103.103 obj.property_value_99 = var_property_value_99 # real var_property_value_100 = 104.104 obj.property_value_100 = var_property_value_100 # real var_property_value_101 = 105.105 obj.property_value_101 = var_property_value_101 # real var_property_value_102 = 106.106 obj.property_value_102 = var_property_value_102 # real var_property_value_103 = 107.107 obj.property_value_103 = var_property_value_103 # real var_property_value_104 = 108.108 obj.property_value_104 = var_property_value_104 # real var_property_value_105 = 109.109 obj.property_value_105 = var_property_value_105 # real var_property_value_106 = 110.11 obj.property_value_106 = var_property_value_106 # real var_property_value_107 = 111.111 obj.property_value_107 = var_property_value_107 # real var_property_value_108 = 112.112 obj.property_value_108 = var_property_value_108 # real var_property_value_109 = 113.113 obj.property_value_109 = var_property_value_109 # real var_property_value_110 = 114.114 obj.property_value_110 = var_property_value_110 # real var_property_value_111 = 115.115 obj.property_value_111 = var_property_value_111 # real var_property_value_112 = 116.116 obj.property_value_112 = var_property_value_112 # real var_property_value_113 = 117.117 obj.property_value_113 = var_property_value_113 # real var_property_value_114 = 118.118 obj.property_value_114 = var_property_value_114 # real var_property_value_115 = 119.119 obj.property_value_115 = var_property_value_115 # real var_property_value_116 = 120.12 obj.property_value_116 = var_property_value_116 # real var_property_value_117 = 121.121 obj.property_value_117 = var_property_value_117 # real var_property_value_118 = 122.122 obj.property_value_118 = var_property_value_118 # real var_property_value_119 = 123.123 obj.property_value_119 = var_property_value_119 # real var_property_value_120 = 124.124 obj.property_value_120 = var_property_value_120 # real var_property_value_121 = 125.125 obj.property_value_121 = var_property_value_121 # real var_property_value_122 = 126.126 obj.property_value_122 = var_property_value_122 # real var_property_value_123 = 127.127 obj.property_value_123 = var_property_value_123 # real var_property_value_124 = 128.128 obj.property_value_124 = var_property_value_124 # real var_property_value_125 = 129.129 obj.property_value_125 = var_property_value_125 # real var_property_value_126 = 130.13 obj.property_value_126 = var_property_value_126 # real var_property_value_127 = 131.131 obj.property_value_127 = var_property_value_127 # real var_property_value_128 = 132.132 obj.property_value_128 = var_property_value_128 # real var_property_value_129 = 133.133 obj.property_value_129 = var_property_value_129 # real var_property_value_130 = 134.134 obj.property_value_130 = var_property_value_130 # real var_property_value_131 = 135.135 obj.property_value_131 = var_property_value_131 # real var_property_value_132 = 136.136 obj.property_value_132 = var_property_value_132 # real var_property_value_133 = 137.137 obj.property_value_133 = var_property_value_133 # real var_property_value_134 = 138.138 obj.property_value_134 = var_property_value_134 # real var_property_value_135 = 139.139 obj.property_value_135 = var_property_value_135 # real var_property_value_136 = 140.14 obj.property_value_136 = var_property_value_136 # real var_property_value_137 = 141.141 obj.property_value_137 = var_property_value_137 # real var_property_value_138 = 142.142 obj.property_value_138 = var_property_value_138 # real var_property_value_139 = 143.143 obj.property_value_139 = var_property_value_139 # real var_property_value_140 = 144.144 obj.property_value_140 = var_property_value_140 # real var_property_value_141 = 145.145 obj.property_value_141 = var_property_value_141 # real var_property_value_142 = 146.146 obj.property_value_142 = var_property_value_142 # real var_property_value_143 = 147.147 obj.property_value_143 = var_property_value_143 # real var_property_value_144 = 148.148 obj.property_value_144 = var_property_value_144 # real var_property_value_145 = 149.149 obj.property_value_145 = var_property_value_145 # real var_property_value_146 = 150.15 obj.property_value_146 = var_property_value_146 # real var_property_value_147 = 151.151 obj.property_value_147 = var_property_value_147 # real var_property_value_148 = 152.152 obj.property_value_148 = var_property_value_148 # real var_property_value_149 = 153.153 obj.property_value_149 = var_property_value_149 # real var_property_value_150 = 154.154 obj.property_value_150 = var_property_value_150 # real var_property_value_151 = 155.155 obj.property_value_151 = var_property_value_151 # real var_property_value_152 = 156.156 obj.property_value_152 = var_property_value_152 # real var_property_value_153 = 157.157 obj.property_value_153 = var_property_value_153 # real var_property_value_154 = 158.158 obj.property_value_154 = var_property_value_154 # real var_property_value_155 = 159.159 obj.property_value_155 = var_property_value_155 # real var_property_value_156 = 160.16 obj.property_value_156 = var_property_value_156 # real var_property_value_157 = 161.161 obj.property_value_157 = var_property_value_157 # real var_property_value_158 = 162.162 obj.property_value_158 = var_property_value_158 # real var_property_value_159 = 163.163 obj.property_value_159 = var_property_value_159 # real var_property_value_160 = 164.164 obj.property_value_160 = var_property_value_160 # real var_property_value_161 = 165.165 obj.property_value_161 = var_property_value_161 # real var_property_value_162 = 166.166 obj.property_value_162 = var_property_value_162 # real var_property_value_163 = 167.167 obj.property_value_163 = var_property_value_163 # real var_property_value_164 = 168.168 obj.property_value_164 = var_property_value_164 # real var_property_value_165 = 169.169 obj.property_value_165 = var_property_value_165 # real var_property_value_166 = 170.17 obj.property_value_166 = var_property_value_166 # real var_property_value_167 = 171.171 obj.property_value_167 = var_property_value_167 # real var_property_value_168 = 172.172 obj.property_value_168 = var_property_value_168 # real var_property_value_169 = 173.173 obj.property_value_169 = var_property_value_169 # real var_property_value_170 = 174.174 obj.property_value_170 = var_property_value_170 # real var_property_value_171 = 175.175 obj.property_value_171 = var_property_value_171 # real var_property_value_172 = 176.176 obj.property_value_172 = var_property_value_172 # real var_property_value_173 = 177.177 obj.property_value_173 = var_property_value_173 # real var_property_value_174 = 178.178 obj.property_value_174 = var_property_value_174 # real var_property_value_175 = 179.179 obj.property_value_175 = var_property_value_175 # real var_property_value_176 = 180.18 obj.property_value_176 = var_property_value_176 # real var_property_value_177 = 181.181 obj.property_value_177 = var_property_value_177 # real var_property_value_178 = 182.182 obj.property_value_178 = var_property_value_178 # real var_property_value_179 = 183.183 obj.property_value_179 = var_property_value_179 # real var_property_value_180 = 184.184 obj.property_value_180 = var_property_value_180 # real var_property_value_181 = 185.185 obj.property_value_181 = var_property_value_181 # real var_property_value_182 = 186.186 obj.property_value_182 = var_property_value_182 # real var_property_value_183 = 187.187 obj.property_value_183 = var_property_value_183 # real var_property_value_184 = 188.188 obj.property_value_184 = var_property_value_184 # real var_property_value_185 = 189.189 obj.property_value_185 = var_property_value_185 # real var_property_value_186 = 190.19 obj.property_value_186 = var_property_value_186 # real var_property_value_187 = 191.191 obj.property_value_187 = var_property_value_187 # real var_property_value_188 = 192.192 obj.property_value_188 = var_property_value_188 # real var_property_value_189 = 193.193 obj.property_value_189 = var_property_value_189 # real var_property_value_190 = 194.194 obj.property_value_190 = var_property_value_190 # real var_property_value_191 = 195.195 obj.property_value_191 = var_property_value_191 # real var_property_value_192 = 196.196 obj.property_value_192 = var_property_value_192 # real var_property_value_193 = 197.197 obj.property_value_193 = var_property_value_193 # real var_property_value_194 = 198.198 obj.property_value_194 = var_property_value_194 # real var_property_value_195 = 199.199 obj.property_value_195 = var_property_value_195 # real var_property_value_196 = 200.2 obj.property_value_196 = var_property_value_196 # real var_property_value_197 = 201.201 obj.property_value_197 = var_property_value_197 # real var_property_value_198 = 202.202 obj.property_value_198 = var_property_value_198 # real var_property_value_199 = 203.203 obj.property_value_199 = var_property_value_199 # real var_property_value_200 = 204.204 obj.property_value_200 = var_property_value_200 # real var_property_value_201 = 205.205 obj.property_value_201 = var_property_value_201 # real var_property_value_202 = 206.206 obj.property_value_202 = var_property_value_202 # real var_property_value_203 = 207.207 obj.property_value_203 = var_property_value_203 # real var_property_value_204 = 208.208 obj.property_value_204 = var_property_value_204 # real var_property_value_205 = 209.209 obj.property_value_205 = var_property_value_205 # real var_property_value_206 = 210.21 obj.property_value_206 = var_property_value_206 # real var_property_value_207 = 211.211 obj.property_value_207 = var_property_value_207 # real var_property_value_208 = 212.212 obj.property_value_208 = var_property_value_208 # real var_property_value_209 = 213.213 obj.property_value_209 = var_property_value_209 # real var_property_value_210 = 214.214 obj.property_value_210 = var_property_value_210 # real var_property_value_211 = 215.215 obj.property_value_211 = var_property_value_211 # real var_property_value_212 = 216.216 obj.property_value_212 = var_property_value_212 # real var_property_value_213 = 217.217 obj.property_value_213 = var_property_value_213 # real var_property_value_214 = 218.218 obj.property_value_214 = var_property_value_214 # real var_property_value_215 = 219.219 obj.property_value_215 = var_property_value_215 # real var_property_value_216 = 220.22 obj.property_value_216 = var_property_value_216 # real var_property_value_217 = 221.221 obj.property_value_217 = var_property_value_217 # real var_property_value_218 = 222.222 obj.property_value_218 = var_property_value_218 # real var_property_value_219 = 223.223 obj.property_value_219 = var_property_value_219 # real var_property_value_220 = 224.224 obj.property_value_220 = var_property_value_220 # real var_property_value_221 = 225.225 obj.property_value_221 = var_property_value_221 # real var_property_value_222 = 226.226 obj.property_value_222 = var_property_value_222 # real var_property_value_223 = 227.227 obj.property_value_223 = var_property_value_223 # real var_property_value_224 = 228.228 obj.property_value_224 = var_property_value_224 # real var_property_value_225 = 229.229 obj.property_value_225 = var_property_value_225 # real var_property_value_226 = 230.23 obj.property_value_226 = var_property_value_226 # real var_property_value_227 = 231.231 obj.property_value_227 = var_property_value_227 # real var_property_value_228 = 232.232 obj.property_value_228 = var_property_value_228 # real var_property_value_229 = 233.233 obj.property_value_229 = var_property_value_229 # real var_property_value_230 = 234.234 obj.property_value_230 = var_property_value_230 # real var_property_value_231 = 235.235 obj.property_value_231 = var_property_value_231 # real var_property_value_232 = 236.236 obj.property_value_232 = var_property_value_232 # real var_property_value_233 = 237.237 obj.property_value_233 = var_property_value_233 # real var_property_value_234 = 238.238 obj.property_value_234 = var_property_value_234 # real var_property_value_235 = 239.239 obj.property_value_235 = var_property_value_235 # real var_property_value_236 = 240.24 obj.property_value_236 = var_property_value_236 # real var_property_value_237 = 241.241 obj.property_value_237 = var_property_value_237 # real var_property_value_238 = 242.242 obj.property_value_238 = var_property_value_238 # real var_property_value_239 = 243.243 obj.property_value_239 = var_property_value_239 # real var_property_value_240 = 244.244 obj.property_value_240 = var_property_value_240 # real var_property_value_241 = 245.245 obj.property_value_241 = var_property_value_241 # real var_property_value_242 = 246.246 obj.property_value_242 = var_property_value_242 # real var_property_value_243 = 247.247 obj.property_value_243 = var_property_value_243 # real var_property_value_244 = 248.248 obj.property_value_244 = var_property_value_244 # real var_property_value_245 = 249.249 obj.property_value_245 = var_property_value_245 # real var_property_value_246 = 250.25 obj.property_value_246 = var_property_value_246 # real var_property_value_247 = 251.251 obj.property_value_247 = var_property_value_247 # real var_property_value_248 = 252.252 obj.property_value_248 = var_property_value_248 # real var_property_value_249 = 253.253 obj.property_value_249 = var_property_value_249 # real var_property_value_250 = 254.254 obj.property_value_250 = var_property_value_250 idf = IDF() idf.add(obj) idf.save(self.path, check=False) with open(self.path, mode='r') as f: for line in f: log.debug(line.strip()) idf2 = IDF(self.path) self.assertEqual(idf2.fluidpropertiessaturateds[0].name, var_name) self.assertEqual(idf2.fluidpropertiessaturateds[0].fluid_property_type, var_fluid_property_type) self.assertEqual(idf2.fluidpropertiessaturateds[0].fluid_phase, var_fluid_phase) self.assertEqual(idf2.fluidpropertiessaturateds[0].temperature_values_name, var_temperature_values_name) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_1, var_property_value_1) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_2, var_property_value_2) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_3, var_property_value_3) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_4, var_property_value_4) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_5, var_property_value_5) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_6, var_property_value_6) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_7, var_property_value_7) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_8, var_property_value_8) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_9, var_property_value_9) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_10, var_property_value_10) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_11, var_property_value_11) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_12, var_property_value_12) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_13, var_property_value_13) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_14, var_property_value_14) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_15, var_property_value_15) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_16, var_property_value_16) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_17, var_property_value_17) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_18, var_property_value_18) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_19, var_property_value_19) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_20, var_property_value_20) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_21, var_property_value_21) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_22, var_property_value_22) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_23, var_property_value_23) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_24, var_property_value_24) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_25, var_property_value_25) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_26, var_property_value_26) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_27, var_property_value_27) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_28, var_property_value_28) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_29, var_property_value_29) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_30, var_property_value_30) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_31, var_property_value_31) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_32, var_property_value_32) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_33, var_property_value_33) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_34, var_property_value_34) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_35, var_property_value_35) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_36, var_property_value_36) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_37, var_property_value_37) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_38, var_property_value_38) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_39, var_property_value_39) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_40, var_property_value_40) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_41, var_property_value_41) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_42, var_property_value_42) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_43, var_property_value_43) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_44, var_property_value_44) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_45, var_property_value_45) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_46, var_property_value_46) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_47, var_property_value_47) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_48, var_property_value_48) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_49, var_property_value_49) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_50, var_property_value_50) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_51, var_property_value_51) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_52, var_property_value_52) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_53, var_property_value_53) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_54, var_property_value_54) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_55, var_property_value_55) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_56, var_property_value_56) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_57, var_property_value_57) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_58, var_property_value_58) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_59, var_property_value_59) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_60, var_property_value_60) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_61, var_property_value_61) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_62, var_property_value_62) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_63, var_property_value_63) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_64, var_property_value_64) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_65, var_property_value_65) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_66, var_property_value_66) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_67, var_property_value_67) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_68, var_property_value_68) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_69, var_property_value_69) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_70, var_property_value_70) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_71, var_property_value_71) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_72, var_property_value_72) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_73, var_property_value_73) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_74, var_property_value_74) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_75, var_property_value_75) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_76, var_property_value_76) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_77, var_property_value_77) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_78, var_property_value_78) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_79, var_property_value_79) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_80, var_property_value_80) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_81, var_property_value_81) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_82, var_property_value_82) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_83, var_property_value_83) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_84, var_property_value_84) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_85, var_property_value_85) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_86, var_property_value_86) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_87, var_property_value_87) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_88, var_property_value_88) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_89, var_property_value_89) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_90, var_property_value_90) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_91, var_property_value_91) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_92, var_property_value_92) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_93, var_property_value_93) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_94, var_property_value_94) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_95, var_property_value_95) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_96, var_property_value_96) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_97, var_property_value_97) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_98, var_property_value_98) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_99, var_property_value_99) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_100, var_property_value_100) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_101, var_property_value_101) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_102, var_property_value_102) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_103, var_property_value_103) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_104, var_property_value_104) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_105, var_property_value_105) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_106, var_property_value_106) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_107, var_property_value_107) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_108, var_property_value_108) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_109, var_property_value_109) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_110, var_property_value_110) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_111, var_property_value_111) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_112, var_property_value_112) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_113, var_property_value_113) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_114, var_property_value_114) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_115, var_property_value_115) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_116, var_property_value_116) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_117, var_property_value_117) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_118, var_property_value_118) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_119, var_property_value_119) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_120, var_property_value_120) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_121, var_property_value_121) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_122, var_property_value_122) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_123, var_property_value_123) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_124, var_property_value_124) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_125, var_property_value_125) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_126, var_property_value_126) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_127, var_property_value_127) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_128, var_property_value_128) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_129, var_property_value_129) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_130, var_property_value_130) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_131, var_property_value_131) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_132, var_property_value_132) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_133, var_property_value_133) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_134, var_property_value_134) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_135, var_property_value_135) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_136, var_property_value_136) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_137, var_property_value_137) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_138, var_property_value_138) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_139, var_property_value_139) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_140, var_property_value_140) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_141, var_property_value_141) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_142, var_property_value_142) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_143, var_property_value_143) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_144, var_property_value_144) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_145, var_property_value_145) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_146, var_property_value_146) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_147, var_property_value_147) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_148, var_property_value_148) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_149, var_property_value_149) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_150, var_property_value_150) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_151, var_property_value_151) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_152, var_property_value_152) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_153, var_property_value_153) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_154, var_property_value_154) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_155, var_property_value_155) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_156, var_property_value_156) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_157, var_property_value_157) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_158, var_property_value_158) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_159, var_property_value_159) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_160, var_property_value_160) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_161, var_property_value_161) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_162, var_property_value_162) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_163, var_property_value_163) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_164, var_property_value_164) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_165, var_property_value_165) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_166, var_property_value_166) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_167, var_property_value_167) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_168, var_property_value_168) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_169, var_property_value_169) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_170, var_property_value_170) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_171, var_property_value_171) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_172, var_property_value_172) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_173, var_property_value_173) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_174, var_property_value_174) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_175, var_property_value_175) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_176, var_property_value_176) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_177, var_property_value_177) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_178, var_property_value_178) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_179, var_property_value_179) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_180, var_property_value_180) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_181, var_property_value_181) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_182, var_property_value_182) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_183, var_property_value_183) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_184, var_property_value_184) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_185, var_property_value_185) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_186, var_property_value_186) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_187, var_property_value_187) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_188, var_property_value_188) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_189, var_property_value_189) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_190, var_property_value_190) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_191, var_property_value_191) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_192, var_property_value_192) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_193, var_property_value_193) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_194, var_property_value_194) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_195, var_property_value_195) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_196, var_property_value_196) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_197, var_property_value_197) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_198, var_property_value_198) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_199, var_property_value_199) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_200, var_property_value_200) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_201, var_property_value_201) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_202, var_property_value_202) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_203, var_property_value_203) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_204, var_property_value_204) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_205, var_property_value_205) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_206, var_property_value_206) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_207, var_property_value_207) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_208, var_property_value_208) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_209, var_property_value_209) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_210, var_property_value_210) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_211, var_property_value_211) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_212, var_property_value_212) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_213, var_property_value_213) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_214, var_property_value_214) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_215, var_property_value_215) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_216, var_property_value_216) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_217, var_property_value_217) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_218, var_property_value_218) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_219, var_property_value_219) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_220, var_property_value_220) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_221, var_property_value_221) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_222, var_property_value_222) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_223, var_property_value_223) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_224, var_property_value_224) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_225, var_property_value_225) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_226, var_property_value_226) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_227, var_property_value_227) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_228, var_property_value_228) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_229, var_property_value_229) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_230, var_property_value_230) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_231, var_property_value_231) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_232, var_property_value_232) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_233, var_property_value_233) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_234, var_property_value_234) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_235, var_property_value_235) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_236, var_property_value_236) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_237, var_property_value_237) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_238, var_property_value_238) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_239, var_property_value_239) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_240, var_property_value_240) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_241, var_property_value_241) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_242, var_property_value_242) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_243, var_property_value_243) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_244, var_property_value_244) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_245, var_property_value_245) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_246, var_property_value_246) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_247, var_property_value_247) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_248, var_property_value_248) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_249, var_property_value_249) self.assertAlmostEqual(idf2.fluidpropertiessaturateds[0].property_value_250, var_property_value_250)
53.438095
112
0.746338
6,939
56,110
5.560167
0.045828
0.421181
0.311026
0.129594
0.782956
0.77956
0.777098
0
0
0
0
0.110306
0.193441
56,110
1,050
113
53.438095
0.742222
0.022901
0
0
0
0
0.001191
0.000421
0
0
0
0
0.323155
1
0.003817
false
0
0.010178
0
0.015267
0
0
0
0
null
1
1
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
d609d6ef7f8b1a34d6aa2bb6de9a0305fc43be7f
47
py
Python
src/base/__init__.py
jhernandez18p/backend-lmp
853e274f37fe8b3c6f811a4ccd8f33bf020015d2
[ "MIT" ]
14
2019-10-03T19:37:30.000Z
2019-10-16T02:12:32.000Z
src/base/__init__.py
jhernandez18p/backend-lmp
853e274f37fe8b3c6f811a4ccd8f33bf020015d2
[ "MIT" ]
6
2020-02-11T23:20:46.000Z
2022-03-11T23:32:23.000Z
src/base/__init__.py
jhernandez18p/backend-lmp
853e274f37fe8b3c6f811a4ccd8f33bf020015d2
[ "MIT" ]
null
null
null
default_app_config = 'src.base.apps.BaseConfig'
47
47
0.829787
7
47
5.285714
1
0
0
0
0
0
0
0
0
0
0
0
0.042553
47
1
47
47
0.822222
0
0
0
0
0
0.5
0.5
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
d60a1c49143fff5cdc759aad728b9b4c40e54a11
289
py
Python
nobos_torch_lib/learning_rate_schedulers/learning_rate_scheduler_dummy.py
noboevbo/nobos_torch_lib
11bc0a06b4cb5c273905d23c592cb3d847149a31
[ "MIT" ]
2
2020-10-08T12:50:50.000Z
2020-12-14T13:36:30.000Z
nobos_torch_lib/learning_rate_schedulers/learning_rate_scheduler_dummy.py
noboevbo/nobos_torch_lib
11bc0a06b4cb5c273905d23c592cb3d847149a31
[ "MIT" ]
null
null
null
nobos_torch_lib/learning_rate_schedulers/learning_rate_scheduler_dummy.py
noboevbo/nobos_torch_lib
11bc0a06b4cb5c273905d23c592cb3d847149a31
[ "MIT" ]
2
2021-05-06T11:40:35.000Z
2021-09-30T01:10:59.000Z
from torch.optim import Optimizer from nobos_torch_lib.learning_rate_schedulers.learning_rate_scheduler_base import LearningRateSchedulerBase class LearningRateSchedulerDummy(LearningRateSchedulerBase): def __call__(self, optimizer: Optimizer, epoch: int): return optimizer
32.111111
107
0.84083
30
289
7.733333
0.7
0.103448
0
0
0
0
0
0
0
0
0
0
0.114187
289
8
108
36.125
0.90625
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.4
0.2
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
4
d60aeaf6504ec4da2f70fa924e02065c72eb6489
15
py
Python
imprimir/algoritmo16.py
lagcrs/algoritmos
5ee860c71db8ac2ef8bbe6cc87726938b1ca9c72
[ "Apache-2.0" ]
null
null
null
imprimir/algoritmo16.py
lagcrs/algoritmos
5ee860c71db8ac2ef8bbe6cc87726938b1ca9c72
[ "Apache-2.0" ]
null
null
null
imprimir/algoritmo16.py
lagcrs/algoritmos
5ee860c71db8ac2ef8bbe6cc87726938b1ca9c72
[ "Apache-2.0" ]
null
null
null
x = 10 print(x)
7.5
8
0.6
4
15
2.25
0.75
0
0
0
0
0
0
0
0
0
0
0.166667
0.2
15
2
8
7.5
0.583333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
d620637f3a224d20a3fe59cc9dca26a8cb5394da
630
py
Python
generated-libraries/python/netapp/lun/alignment_state.py
radekg/netapp-ontap-lib-get
6445ebb071ec147ea82a486fbe9f094c56c5c40d
[ "MIT" ]
2
2017-03-28T15:31:26.000Z
2018-08-16T22:15:18.000Z
generated-libraries/python/netapp/lun/alignment_state.py
radekg/netapp-ontap-lib-get
6445ebb071ec147ea82a486fbe9f094c56c5c40d
[ "MIT" ]
null
null
null
generated-libraries/python/netapp/lun/alignment_state.py
radekg/netapp-ontap-lib-get
6445ebb071ec147ea82a486fbe9f094c56c5c40d
[ "MIT" ]
null
null
null
class AlignmentState(basestring): """ aligned|misaligned|partial-writes|indeterminate Possible values: <ul> <li> "aligned" - All or most of the IO to the LUN is aligned to the underlying file, <li> "misaligned" - A significant amount of IO to the LUN is not aligned to the underlying file, <li> "partial_writes" - A significant amount of IO to the LUN is partial, <li> "indeterminate" - There is not enough IO to determine the LUN's alignment state </ul> """ @staticmethod def get_api_name(): return "alignment-state"
30
65
0.620635
81
630
4.790123
0.481481
0.064433
0.054124
0.07732
0.340206
0.309278
0.164948
0.164948
0.164948
0
0
0
0.3
630
20
66
31.5
0.879819
0.704762
0
0
0
0
0.119048
0
0
0
0
0
0
1
0.25
true
0
0
0.25
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
1
1
0
0
4
c39c12227e7116198f189afa0a8357c043b1f158
63,354
py
Python
rest-service/manager_rest/test/endpoints/test_deployment_update_step_extraction.py
cloudify-cosmo/cloudify-manager
4a3f44ceb49d449bc5ebc8766b1c7b9c174ff972
[ "Apache-2.0" ]
124
2015-01-22T22:28:37.000Z
2022-02-26T23:12:06.000Z
rest-service/manager_rest/test/endpoints/test_deployment_update_step_extraction.py
cloudify-cosmo/cloudify-manager
4a3f44ceb49d449bc5ebc8766b1c7b9c174ff972
[ "Apache-2.0" ]
345
2015-01-08T15:49:40.000Z
2022-03-29T08:33:00.000Z
rest-service/manager_rest/test/endpoints/test_deployment_update_step_extraction.py
cloudify-cosmo/cloudify-manager
4a3f44ceb49d449bc5ebc8766b1c7b9c174ff972
[ "Apache-2.0" ]
77
2015-01-07T14:04:35.000Z
2022-03-07T22:46:00.000Z
import json import unittest from manager_rest.test.attribute import attr from manager_rest.test.base_test import LATEST_API_VERSION from manager_rest.storage import models from manager_rest.deployment_update.step_extractor import ( PROPERTY, PROPERTIES, OUTPUT, OUTPUTS, WORKFLOW, WORKFLOWS, NODE, NODES, OPERATION, OPERATIONS, RELATIONSHIP, RELATIONSHIPS, SOURCE_OPERATIONS, TARGET_OPERATIONS, TYPE, GROUP, GROUPS, POLICY_TYPE, POLICY_TYPES, POLICY_TRIGGER, POLICY_TRIGGERS, HOST_ID, PLUGIN, DEPLOYMENT_PLUGINS_TO_INSTALL, PLUGINS_TO_INSTALL, DESCRIPTION, extract_steps, _update_topology_order_of_add_node_steps, _find_relationship ) from manager_rest.deployment_update.step_extractor import DeploymentUpdateStep from manager_rest.test.utils import get_resource @attr(client_min_version=2.1, client_max_version=LATEST_API_VERSION) class StepExtractorTestCase(unittest.TestCase): @staticmethod def _get_node_scheme(node_id='node1', **params): node = { 'id': node_id, OPERATIONS: {}, PROPERTIES: {}, RELATIONSHIPS: [], TYPE: '', HOST_ID: '', PLUGINS_TO_INSTALL: [] } node.update(params) return node @staticmethod def _get_relationship_scheme(): return { SOURCE_OPERATIONS: {}, "target_id": "", TARGET_OPERATIONS: {}, TYPE: "", PROPERTIES: {} } def setUp(self): super(StepExtractorTestCase, self).setUp() self.deployment = models.Deployment(id='deployment_id') self.deployment_plan = { DESCRIPTION: None, NODES: {}, OPERATIONS: {}, PROPERTIES: {}, RELATIONSHIPS: [], TYPE: '', GROUPS: {}, POLICY_TYPES: {}, POLICY_TRIGGERS: {}, DEPLOYMENT_PLUGINS_TO_INSTALL: {}, OUTPUTS: {}, WORKFLOWS: {} } def test_entity_name(self): step = DeploymentUpdateStep(action='add', entity_type=NODE, entity_id='nodes:node1') self.assertEqual('node1', step.entity_name) def test_update_topology_order_of_add_node_steps(self): add_node_a_step = DeploymentUpdateStep( action='add', entity_type=NODE, entity_id='nodes:node_a') add_node_b_step = DeploymentUpdateStep( action='add', entity_type=NODE, entity_id='nodes:node_b') add_node_c_step = DeploymentUpdateStep( action='add', entity_type=NODE, entity_id='nodes:node_c') add_node_d_step = DeploymentUpdateStep( action='add', entity_type=NODE, entity_id='nodes:node_d') add_node_e_step = DeploymentUpdateStep( action='add', entity_type=NODE, entity_id='nodes:node_e') add_node_f_step = DeploymentUpdateStep( action='add', entity_type=NODE, entity_id='nodes:node_f') steps = [add_node_a_step, add_node_b_step, add_node_c_step, add_node_d_step, add_node_e_step, add_node_f_step] # Imagine the following relationships between the added nodes: # # e # ^^ # | \ # c d # ^ ^ # / \ # a b f topologically_sorted_added_nodes = ['node_f', 'node_a', 'node_b', 'node_c', 'node_d', 'node_e'] _update_topology_order_of_add_node_steps( steps, topologically_sorted_added_nodes) self.assertEqual(5, add_node_e_step.topology_order) self.assertEqual(4, add_node_d_step.topology_order) self.assertEqual(3, add_node_c_step.topology_order) self.assertEqual(2, add_node_b_step.topology_order) self.assertEqual(1, add_node_a_step.topology_order) self.assertEqual(0, add_node_f_step.topology_order) def test_create_added_nodes_graph(self): self.deployment_plan[NODES] = [ self._get_node_scheme('node_a', relationships=[ {"target_id": 'node_c'} ]), self._get_node_scheme('node_b', relationships=[ {"target_id": 'node_c'} ]), self._get_node_scheme('node_c', relationships=[ {"target_id": 'node_e'} ]), self._get_node_scheme('node_d', relationships=[ {"target_id": 'node_e'} ]), self._get_node_scheme('node_e'), self._get_node_scheme('node_f'), ] steps, _ = extract_steps([], self.deployment, self.deployment_plan) order_by_id = {s.entity_id: s.topology_order for s in steps} assert order_by_id['nodes:node_c'] > order_by_id['nodes:node_a'] assert order_by_id['nodes:node_c'] > order_by_id['nodes:node_b'] assert order_by_id['nodes:node_e'] > order_by_id['nodes:node_c'] assert order_by_id['nodes:node_e'] > order_by_id['nodes:node_d'] def test_description_no_change(self): self.deployment.description = 'description' self.deployment_plan[DESCRIPTION] = 'description' steps, _ = extract_steps({}, self.deployment, self.deployment_plan) assert steps == [] def test_description_modify_description(self): self.deployment.description = 'description_old' self.deployment_plan[DESCRIPTION] = 'description_new' steps, _ = extract_steps({}, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='modify', entity_type=DESCRIPTION, entity_id='description') ] def test_outputs_no_change(self): self.deployment.outputs = {'output1': 'output1_value'} self.deployment_plan[OUTPUTS] = self.deployment.outputs steps, _ = extract_steps({}, self.deployment, self.deployment_plan) assert steps == [] def test_outputs_add_output(self): self.deployment_plan[OUTPUTS] = {'output1': 'output1_value'} steps, _ = extract_steps({}, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='add', entity_type=OUTPUT, entity_id='outputs:output1') ] def test_outputs_remove_output(self): self.deployment.outputs = {'output1': 'output1_value'} steps, _ = extract_steps({}, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='remove', entity_type=OUTPUT, entity_id='outputs:output1') ] def test_outputs_modify_output(self): self.deployment.outputs = {'output1': 'output1_value'} self.deployment_plan[OUTPUTS] = {'output1': 'output1_modified_value'} steps, _ = extract_steps({}, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='modify', entity_type=OUTPUT, entity_id='outputs:output1') ] def test_workflows_no_change(self): self.deployment.workflows = { 'intact_workflow': { 'operation': 'module_name.foo', 'plugin': 'plugin_for_workflows' } } self.deployment_plan[WORKFLOWS] = self.deployment.workflows steps, _ = extract_steps({}, self.deployment, self.deployment_plan) assert steps == [] def test_workflows_add_workflow_of_existing_plugin(self): self.deployment_plan[WORKFLOWS] = { 'added_workflow': { 'operation': 'module_name.foo', 'plugin': 'plugin_for_workflows' } } steps, _ = extract_steps({}, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='add', entity_type=WORKFLOW, entity_id='workflows:added_workflow') ] def test_workflows_add_workflow_script(self): self.deployment_plan[WORKFLOWS] = { 'new_workflow': { 'plugin': 'script', } } steps, _ = extract_steps({}, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='add', entity_type=WORKFLOW, entity_id='workflows:new_workflow') ] def test_workflows_remove_workflow(self): self.deployment.workflows = { 'removed_workflow': { 'operation': 'module_name.foo', 'plugin': 'plugin_for_workflows' } } steps, _ = extract_steps({}, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='remove', entity_type=WORKFLOW, entity_id='workflows:removed_workflow') ] def test_workflows_modify_workflow_of_existing_plugin(self): self.deployment.workflows = { 'added_workflow': { 'operation': 'module_name.foo', 'plugin': 'plugin_for_workflows' } } self.deployment_plan[WORKFLOWS] = { 'added_workflow': { 'operation': 'module_name.bar', 'plugin': 'plugin_for_workflows' } } steps, _ = extract_steps({}, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='modify', entity_type=WORKFLOW, entity_id='workflows:added_workflow') ] def test_nodes_no_change(self): nodes = [self._get_node_scheme()] self.deployment_plan[NODES] = nodes steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [] def test_nodes_add_node(self): self.deployment_plan[NODES] = [self._get_node_scheme()] steps, _ = extract_steps({}, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='add', entity_type=NODE, entity_id='nodes:node1') ] def test_nodes_remove_node(self): nodes = [self._get_node_scheme()] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='remove', entity_type=NODE, entity_id='nodes:node1') ] def test_nodes_add_and_remove_node_changed_type(self): nodes = [self._get_node_scheme(type='old_type')] self.deployment_plan[NODES] = [self._get_node_scheme(type='new_type')] supported_steps, unsupported_steps = \ extract_steps(nodes, self.deployment, self.deployment_plan) assert len(supported_steps) == 0 assert unsupported_steps == [ DeploymentUpdateStep( action='modify', entity_type=NODE, entity_id='nodes:node1', supported=False), ] def test_nodes_add_and_remove_node_changed_type_and_host_id(self): nodes = [self._get_node_scheme(host_id='old_host_id')] self.deployment_plan[NODES] = [ self._get_node_scheme(type='new_host_id')] supported_steps, unsupported_steps = \ extract_steps(nodes, self.deployment, self.deployment_plan) assert len(supported_steps) == 0 assert unsupported_steps == [ DeploymentUpdateStep( action='modify', entity_type=NODE, entity_id='nodes:node1', supported=False), ] def test_node_properties_no_change(self): nodes = [self._get_node_scheme( properties={'property1': 'property1_value'} )] self.deployment_plan[NODES] = nodes steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [] def test_node_properties_add_property(self): nodes = [self._get_node_scheme()] self.deployment_plan[NODES] = [ self._get_node_scheme(properties={'property1': 'property1_value'})] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='add', entity_type=PROPERTY, entity_id='nodes:node1:properties:property1') ] def test_node_properties_remove_property(self): nodes = [self._get_node_scheme(properties={ 'property1': 'property1_value'})] self.deployment_plan[NODES] = [self._get_node_scheme()] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='remove', entity_type=PROPERTY, entity_id='nodes:node1:properties:property1') ] def test_node_properties_modify_property(self): nodes = [self._get_node_scheme(properties={ 'property1': 'property1_value'})] self.deployment_plan[NODES] = [self._get_node_scheme(properties={ 'property1': 'property1_modified_value'})] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='modify', entity_type=PROPERTY, entity_id='nodes:node1:properties:property1') ] def test_node_operations_no_change(self): nodes = [self._get_node_scheme(operations={ 'full.operation1.name': { 'operation1_field': 'operation1_field_value' } })] self.deployment_plan[NODES] = nodes steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [] def test_node_operations_add_operation(self): nodes = [self._get_node_scheme()] self.deployment_plan[NODES] = [self._get_node_scheme(operations={ 'full.operation1.name': { 'operation1_field': 'operation1_field_value' } })] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='add', entity_type=OPERATION, entity_id='nodes:node1:operations:full.operation1.name') ] def test_node_operations_remove_operation(self): nodes = [self._get_node_scheme(operations={ 'full.operation1.name': { 'operation1_field': 'operation1_field_value' } })] self.deployment_plan[NODES] = [self._get_node_scheme()] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='remove', entity_type=OPERATION, entity_id='nodes:node1:operations:full.operation1.name') ] def test_node_operations_modify_operation(self): nodes = [self._get_node_scheme(operations={ 'full.operation1.name': { 'operation1_field': 'operation1_field_value' } })] self.deployment_plan[NODES] = [self._get_node_scheme(operations={ 'full.operation1.name': { 'operation1_field': 'operation1_modified_field_value' } })] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='modify', entity_type=OPERATION, entity_id='nodes:node1:operations:full.operation1.name') ] def test_relationships_no_change(self): nodes = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target' } ])] self.deployment_plan[NODES] = nodes steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [] def test_relationships_add_relationship(self): nodes = [self._get_node_scheme()] self.deployment_plan[NODES] = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target' } ])] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='add', entity_type=RELATIONSHIP, entity_id='nodes:node1:relationships:[0]') ] def test_relationships_remove_relationship(self): nodes = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target' } ])] self.deployment_plan[NODES] = [self._get_node_scheme()] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='remove', entity_type=RELATIONSHIP, entity_id='nodes:node1:relationships:[0]') ] def test_relationships_change_type(self): nodes = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target' } ])] self.deployment_plan[NODES] = [self._get_node_scheme(relationships=[ { 'type': 'different_relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target' } ])] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='remove', entity_type=RELATIONSHIP, entity_id='nodes:node1:relationships:[0]'), DeploymentUpdateStep( action='add', entity_type=RELATIONSHIP, entity_id='nodes:node1:relationships:[0]') ] def test_relationships_change_target_non_contained_in(self): nodes = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'target_id': 'relationship_target', 'type_hierarchy': ['rel_hierarchy'] } ])] self.deployment_plan[NODES] = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'target_id': 'different_relationship_target', 'type_hierarchy': ['rel_hierarchy'] } ])] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='remove', entity_type=RELATIONSHIP, entity_id='nodes:node1:relationships:[0]'), DeploymentUpdateStep( action='add', entity_type=RELATIONSHIP, entity_id='nodes:node1:relationships:[0]') ] def test_relationships_change_target_contained_in(self): nodes = [self._get_node_scheme(relationships=[ { 'target_id': 'relationship_target', 'type_hierarchy': ['rel_hierarchy', 'cloudify.relationships.contained_in'] } ])] self.deployment_plan[NODES] = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'target_id': 'different_relationship_target', 'type_hierarchy': ['rel_hierarchy', 'cloudify.relationships.contained_in']} ])] _, unsupported_steps = extract_steps( nodes, self.deployment, self.deployment_plan) assert unsupported_steps == [ DeploymentUpdateStep( action='modify', entity_type=NODE, entity_id='nodes:node1', supported=False), ] def test_relationships_change_type_and_target(self): nodes = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target' } ])] self.deployment_plan[NODES] = [self._get_node_scheme(relationships=[ { 'type': 'different_relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'different_relationship_target' } ])] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='remove', entity_type=RELATIONSHIP, entity_id='nodes:node1:relationships:[0]'), DeploymentUpdateStep( action='add', entity_type=RELATIONSHIP, entity_id='nodes:node1:relationships:[0]') ] def test_relationships_modify_order(self): nodes = [self._get_node_scheme(relationships=[ {'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target_1'}, {'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target_2'}, {'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target_3'}, {'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target_4'} ])] self.deployment_plan[NODES] = [self._get_node_scheme(relationships=[ {'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target_2'}, {'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target_4'}, {'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target_3'}, {'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target_1'} ])] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) # we don't care for the order the steps were created in assert set(steps) == { DeploymentUpdateStep( action='modify', entity_type=RELATIONSHIP, entity_id='nodes:node1:relationships:[0]:[3]'), DeploymentUpdateStep( action='modify', entity_type=RELATIONSHIP, entity_id='nodes:node1:relationships:[1]:[0]'), DeploymentUpdateStep( action='modify', entity_type=RELATIONSHIP, entity_id='nodes:node1:relationships:[3]:[1]') } def test_relationships_modify_order_with_add_and_remove(self): nodes = [self._get_node_scheme(relationships=[ {'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target_1'}, {'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target_2'}, {'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target_3'}, ])] self.deployment_plan[NODES] = [self._get_node_scheme(relationships=[ {'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target_5'}, {'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target_2'}, {'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target_4'}, {'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target_1'} ])] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) # we don't care for the order the steps were created in assert set(steps) == { DeploymentUpdateStep( action='modify', entity_type=RELATIONSHIP, entity_id='nodes:node1:relationships:[0]:[3]'), DeploymentUpdateStep( action='remove', entity_type=RELATIONSHIP, entity_id='nodes:node1:relationships:[2]'), DeploymentUpdateStep( action='add', entity_type=RELATIONSHIP, entity_id='nodes:node1:relationships:[2]'), DeploymentUpdateStep( action='add', entity_type=RELATIONSHIP, entity_id='nodes:node1:relationships:[0]') } def test_relationships_add_source_operation(self): nodes = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', SOURCE_OPERATIONS: {} } ])] self.deployment_plan[NODES] = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', SOURCE_OPERATIONS: {'full.operation1': {}} } ])] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='add', entity_type=OPERATION, entity_id='nodes:node1:relationships:[0]:' 'source_operations:full.operation1') ] def test_relationships_remove_source_operation(self): nodes = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', SOURCE_OPERATIONS: {'full.operation1': {}} } ])] self.deployment_plan[NODES] = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', SOURCE_OPERATIONS: {} } ])] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='remove', entity_type=OPERATION, entity_id='nodes:node1:relationships:[0]:' 'source_operations:full.operation1') ] def test_duplicate_relationship(self): rel = { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', } nodes = [self._get_node_scheme(relationships=[rel, rel])] self.deployment_plan[NODES] = [ self._get_node_scheme(relationships=[rel, rel])] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [] def test_relationships_modify_source_operation(self): nodes = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', SOURCE_OPERATIONS: { 'full.operation1': { 'op1_old_field': 'op1_field_value' } } } ])] self.deployment_plan[NODES] = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', SOURCE_OPERATIONS: { 'full.operation1': { 'op1_new_field': 'op1_field_value' } } } ])] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='modify', entity_type=OPERATION, entity_id='nodes:node1:relationships:[0]:' 'source_operations:full.operation1') ] def test_relationships_add_target_operation(self): nodes = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', TARGET_OPERATIONS: {} } ])] self.deployment_plan[NODES] = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', TARGET_OPERATIONS: {'full.operation1': {}} } ])] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='add', entity_type=OPERATION, entity_id='nodes:node1:relationships:[0]:' 'target_operations:full.operation1') ] def test_relationships_remove_target_operation(self): nodes = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', TARGET_OPERATIONS: {'full.operation1': {}} } ])] self.deployment_plan[NODES] = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', TARGET_OPERATIONS: {} } ])] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='remove', entity_type=OPERATION, entity_id='nodes:node1:relationships:[0]:' 'target_operations:full.operation1') ] def test_relationships_modify_target_operation(self): nodes = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', TARGET_OPERATIONS: { 'full.operation1': { 'op1_old_field': 'op1_field_value' } } } ])] self.deployment_plan[NODES] = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', TARGET_OPERATIONS: { 'full.operation1': { 'op1_new_field': 'op1_field_value' } } } ])] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='modify', entity_type=OPERATION, entity_id='nodes:node1:relationships:[0]:' 'target_operations:full.operation1') ] def test_get_matching_relationship(self): relationships_with_match = [ {'type': 'typeA', 'target_id': 'id_1', 'field2': 'value2'}, {'type': 'typeB', 'target_id': 'id_1'}, {'type': 'typeB', 'target_id': 'id_2'}, {'type': 'typeA', 'target_id': 'id_2'} ] relationships_with_no_match = [ {'type': 'typeB', 'target_id': 'id_1'}, {'type': 'typeB', 'target_id': 'id_2'}, {'type': 'typeA', 'target_id': 'id_2'} ] assert _find_relationship( relationships_with_match, 'typeA', 'id_1' ) == ({'type': 'typeA', 'target_id': 'id_1', 'field2': 'value2'}, 0) assert _find_relationship( relationships_with_no_match, 'typeA', 'id_1' ) == (None, None) def test_sort_steps_compare_action(self): add_step = DeploymentUpdateStep( action='add', entity_type='', entity_id='') remove_step = DeploymentUpdateStep( action='remove', entity_type='', entity_id='') modify_step = DeploymentUpdateStep( action='modify', entity_type='', entity_id='') steps = [add_step, remove_step, modify_step] expected_step_order = [remove_step, add_step, modify_step] steps.sort() assert steps == expected_step_order def test_sort_steps_add_node_before_add_relationship(self): add_node_step = DeploymentUpdateStep( action='add', entity_type=NODE, entity_id='') add_relationship_step = DeploymentUpdateStep( action='add', entity_type=RELATIONSHIP, entity_id='') steps = [add_relationship_step, add_node_step] expected_step_order = [add_node_step, add_relationship_step] steps.sort() assert steps == expected_step_order def test_sort_steps_remove_relationship_before_remove_node(self): remove_relationship_step = DeploymentUpdateStep( action='remove', entity_type=RELATIONSHIP, entity_id='') remove_node_step = DeploymentUpdateStep( action='remove', entity_type=NODE, entity_id='') steps = [remove_node_step, remove_relationship_step] expected_step_order = [remove_relationship_step, remove_node_step] steps.sort() assert steps == expected_step_order def test_sort_steps_higher_topology_before_lower_topology(self): default_topology_step = DeploymentUpdateStep( action='add', entity_type=NODE, entity_id='') topology_order_1_step = DeploymentUpdateStep( action='add', entity_type=NODE, entity_id='', topology_order=1) topology_order_2_step = DeploymentUpdateStep( action='add', entity_type=NODE, entity_id='', topology_order=2) steps = [topology_order_1_step, default_topology_step, topology_order_2_step] expected_step_order = [ topology_order_2_step, topology_order_1_step, default_topology_step] steps.sort() assert steps == expected_step_order def test_sort_steps_all_comparison_considerations(self): add_node_step_default_topology = DeploymentUpdateStep( action='add', entity_type=NODE, entity_id='') add_node_step_topology_order_1 = DeploymentUpdateStep( action='add', entity_type=NODE, entity_id='', topology_order=1) add_node_step_topology_order_2 = DeploymentUpdateStep( action='add', entity_type=NODE, entity_id='', topology_order=2) remove_relationship_step = DeploymentUpdateStep( action='remove', entity_type=RELATIONSHIP, entity_id='') remove_node_step = DeploymentUpdateStep( action='remove', entity_type=NODE, entity_id='') add_relationship_step = DeploymentUpdateStep( action='add', entity_type=RELATIONSHIP, entity_id='') modify_property_step = DeploymentUpdateStep( action='modify', entity_type=PROPERTY, entity_id='') steps = [add_node_step_topology_order_1, remove_node_step, modify_property_step, add_relationship_step, add_node_step_default_topology, remove_relationship_step, add_node_step_topology_order_2] expected_step_order = [ remove_relationship_step, remove_node_step, add_node_step_topology_order_2, add_node_step_topology_order_1, add_node_step_default_topology, add_relationship_step, modify_property_step] steps.sort() assert steps == expected_step_order def test_relationships_intact_property(self): nodes = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', PROPERTIES: { 'property1': 'property1_value' } } ])] self.deployment_plan[NODES] = nodes steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [] def test_relationships_add_property(self): nodes = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', 'properties': {} } ])] self.deployment_plan[NODES] = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', PROPERTIES: { 'property1': 'property1_different_value' } } ])] _, unsupported_steps = extract_steps( nodes, self.deployment, self.deployment_plan) assert unsupported_steps == [ DeploymentUpdateStep( action='add', entity_type=PROPERTY, entity_id='nodes:node1:relationships:[0]:' 'properties:property1', supported=False) ] def test_relationships_remove_property(self): nodes = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', PROPERTIES: { 'property1': 'property1_different_value' } } ])] self.deployment_plan[NODES] = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', 'properties': {} } ])] _, unsupported_steps = extract_steps( nodes, self.deployment, self.deployment_plan) assert unsupported_steps == [ DeploymentUpdateStep( action='remove', entity_type=PROPERTY, entity_id='nodes:node1:relationships:[0]:' 'properties:property1', supported=False) ] def test_relationships_modify_property(self): nodes = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', PROPERTIES: { 'property1': 'property1_value' } } ])] self.deployment_plan[NODES] = [self._get_node_scheme(relationships=[ { 'type': 'relationship_type', 'type_hierarchy': ['rel_hierarchy'], 'target_id': 'relationship_target', PROPERTIES: { 'property1': 'property1_different_value' } } ])] _, unsupported_steps = extract_steps( nodes, self.deployment, self.deployment_plan) assert unsupported_steps == [ DeploymentUpdateStep( action='modify', entity_type=PROPERTY, entity_id='nodes:node1:relationships:[0]:' 'properties:property1', supported=False) ] def test_extract_steps_policy_types_no_change(self): policy_types = {'policy_type1': 'policy_type1_value'} self.deployment.policy_types = policy_types self.deployment_plan[POLICY_TYPES] = policy_types steps, unsupported_steps = extract_steps( {}, self.deployment, self.deployment_plan) assert steps == [] assert unsupported_steps == [] def test_policy_types_add_policy_type(self): self.deployment_plan[POLICY_TYPES] = { 'policy_type1': 'policy_type1_value' } _, unsupported_steps = extract_steps( {}, self.deployment, self.deployment_plan) assert unsupported_steps == [ DeploymentUpdateStep( action='add', entity_type=POLICY_TYPE, entity_id='policy_types:policy_type1', supported=False) ] def test_policy_types_remove_policy_type(self): self.deployment.policy_types = {'policy_type1': 'policy_type1_value'} _, unsupported_steps = extract_steps( {}, self.deployment, self.deployment_plan) assert unsupported_steps == [ DeploymentUpdateStep( action='remove', entity_type=POLICY_TYPE, entity_id='policy_types:policy_type1', supported=False) ] def test_policy_types_modify_policy_type(self): self.deployment.policy_types = {'policy_type1': 'policy_type1_value'} self.deployment_plan[POLICY_TYPES] = { 'policy_type1': 'policy_type1_modified_value' } _, unsupported_steps = extract_steps( {}, self.deployment, self.deployment_plan) assert unsupported_steps == [ DeploymentUpdateStep( action='modify', entity_type=POLICY_TYPE, entity_id='policy_types:policy_type1', supported=False) ] def test_extract_steps_policy_triggers_no_change(self): policy_triggers = {'policy_trigger1': 'policy_trigger1_value'} self.deployment.policy_triggers = policy_triggers self.deployment_plan[POLICY_TRIGGERS] = policy_triggers steps, unsupported_steps = extract_steps( {}, self.deployment, self.deployment_plan) assert steps == [] assert unsupported_steps == [] def test_policy_triggers_add_policy_trigger(self): self.deployment_plan[POLICY_TRIGGERS] = { 'policy_trigger1': 'policy_trigger1_value' } _, unsupported_steps = extract_steps( {}, self.deployment, self.deployment_plan) assert unsupported_steps == [ DeploymentUpdateStep( action='add', entity_type=POLICY_TRIGGER, entity_id='policy_triggers:policy_trigger1', supported=False) ] def test_policy_triggers_remove_policy_trigger(self): self.deployment.policy_triggers = { 'policy_trigger1': 'policy_trigger1_value' } _, unsupported_steps = extract_steps( {}, self.deployment, self.deployment_plan) assert unsupported_steps == [ DeploymentUpdateStep( action='remove', entity_type=POLICY_TRIGGER, entity_id='policy_triggers:policy_trigger1', supported=False) ] def test_policy_triggers_modify_policy_trigger(self): self.deployment.policy_triggers = { 'policy_trigger1': 'policy_trigger1_value' } self.deployment_plan[POLICY_TRIGGERS] = { 'policy_trigger1': 'policy_trigger1_modified_value' } _, unsupported_steps = extract_steps( {}, self.deployment, self.deployment_plan) assert unsupported_steps == [ DeploymentUpdateStep( action='modify', entity_type=POLICY_TRIGGER, entity_id='policy_triggers:policy_trigger1', supported=False) ] def test_groups_no_change(self): groups = {'group1': {}} self.deployment.groups = groups self.deployment_plan[GROUPS] = groups steps, unsupported_steps = extract_steps( {}, self.deployment, self.deployment_plan) assert steps == [] assert unsupported_steps == [] def test_groups_add_group(self): self.deployment_plan[GROUPS] = {'group1': {}} _, unsupported_steps = extract_steps( {}, self.deployment, self.deployment_plan) assert unsupported_steps == [ DeploymentUpdateStep( action='add', entity_type=GROUP, entity_id='groups:group1', supported=False) ] def test_groups_remove_group(self): self.deployment.groups = {'group1': {}} _, unsupported_steps = extract_steps( {}, self.deployment, self.deployment_plan) assert unsupported_steps == [ DeploymentUpdateStep( action='remove', entity_type=GROUP, entity_id='groups:group1', supported=False) ] def test_groups_modify_group(self): self.deployment.groups = {'group1': {'members': []}} self.deployment_plan[GROUPS] = {'group1': {'members': ['a']}} _, unsupported_steps = extract_steps( {}, self.deployment, self.deployment_plan) assert unsupported_steps == [ DeploymentUpdateStep( action='modify', entity_type=GROUP, entity_id='groups:group1', supported=False) ] def test_groups_member_order(self): self.deployment.groups = {'group1': {'members': ['a', 'b']}} self.deployment_plan[GROUPS] = {'group1': {'members': ['b', 'a']}} steps, unsupported_steps = extract_steps( {}, self.deployment, self.deployment_plan) assert steps == [] assert unsupported_steps == [] def test_ha_plugins_no_install(self): nodes = [self._get_node_scheme(plugins_to_install=[ {'name': 'old', 'install': True} ])] self.deployment_plan[NODES] = [self._get_node_scheme( plugins_to_install=[{'name': 'new', 'install': False}] )] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) # Although install is set to False on the new plugin, we are still # creating the step. We won't need to install the plugin (the # PluginHandler takes care of that), but the value still needs to be # updated in the node in the DB assert steps == [ DeploymentUpdateStep( action='add', entity_type=PLUGIN, entity_id='plugins_to_install:node1:new' ) ] def test_ha_plugins_add_ha_plugin(self): nodes = [self._get_node_scheme(plugins_to_install=[ {'name': 'old', 'install': True} ])] self.deployment_plan[NODES] = [self._get_node_scheme( plugins_to_install=[{'name': 'new', 'install': True}] )] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='add', entity_type=PLUGIN, entity_id='plugins_to_install:node1:new', supported=True) ] def test_ha_plugins_modify_ha_plugin(self): nodes = [self._get_node_scheme(plugins_to_install=[ { 'name': 'name', 'executor': 'host_agent', 'install': True, 'source': 'old' } ])] self.deployment_plan[NODES] = [self._get_node_scheme( plugins_to_install=[ { 'name': 'name', 'executor': 'host_agent', 'install': True, 'source': 'new' } ] )] steps, _ = extract_steps(nodes, self.deployment, self.deployment_plan) assert steps == [ DeploymentUpdateStep( action='modify', entity_type=PLUGIN, entity_id='plugins_to_install:node1:name', supported=True) ] def test_all_changes_combined(self): path_before = get_resource( 'deployment_update/combined_changes_before.json') path_after = get_resource( 'deployment_update/combined_changes_after.json') with open(path_before) as fp_before, open(path_after) as fp_after: plan_before = json.load(fp_before) plan_after = json.load(fp_after) nodes = list(plan_before['nodes'].values()) plan_after['nodes'] = list(plan_after['nodes'].values()) self.deployment.groups = plan_before['groups'] self.deployment.workflows = plan_before['workflows'] self.deployment.policy_types = plan_before['policy_types'] self.deployment.policy_triggers = plan_before['policy_triggers'] self.deployment.outputs = plan_before['outputs'] expected_steps = { 'modify_description': DeploymentUpdateStep( 'modify', DESCRIPTION, 'description'), 'remove_node': DeploymentUpdateStep( 'remove', NODE, 'nodes:node1'), 'add_node': DeploymentUpdateStep( 'add', NODE, 'nodes:node2', topology_order=0), 'modify_node_changed_type': DeploymentUpdateStep( 'modify', NODE, 'nodes:node3', supported=False), 'add_property': DeploymentUpdateStep( 'add', PROPERTY, 'nodes:node4:properties:added_prop'), 'remove_property': DeploymentUpdateStep( 'remove', PROPERTY, 'nodes:node4:properties:removed_prop'), 'modify_property': DeploymentUpdateStep( 'modify', PROPERTY, 'nodes:node4:properties:modified_prop'), 'remove_relationship': DeploymentUpdateStep( 'remove', RELATIONSHIP, 'nodes:node6:relationships:[0]'), 'add_relationship': DeploymentUpdateStep( 'add', RELATIONSHIP, 'nodes:node7:relationships:[0]'), 'remove_relationship_changed_target': DeploymentUpdateStep( 'remove', RELATIONSHIP, 'nodes:node9:relationships:[0]'), 'add_relationship_changed_target': DeploymentUpdateStep( 'add', RELATIONSHIP, 'nodes:node9:relationships:[0]'), 'remove_relationship_changed_type_and_target': DeploymentUpdateStep( 'remove', RELATIONSHIP, 'nodes:node10:relationships:[0]'), 'add_relationship_changed_type_and_target': DeploymentUpdateStep( 'add', RELATIONSHIP, 'nodes:node10:relationships:[0]'), 'add_operation': DeploymentUpdateStep( 'add', OPERATION, 'nodes:node11:operations:interface1.added_operation'), 'add_operation_shortened': DeploymentUpdateStep( 'add', OPERATION, 'nodes:node11:operations:added_operation'), 'remove_operation': DeploymentUpdateStep( 'remove', OPERATION, 'nodes:node11:operations:interface1.removed_operation'), 'remove_operation_shortened': DeploymentUpdateStep( 'remove', OPERATION, 'nodes:node11:operations:removed_operation'), 'modify_operation': DeploymentUpdateStep( 'modify', OPERATION, 'nodes:node11:operations:interface1.modified_operation'), 'modify_operation_shortened': DeploymentUpdateStep( 'modify', OPERATION, 'nodes:node11:operations:modified_operation'), 'add_relationship_operation': DeploymentUpdateStep( 'add', OPERATION, 'nodes:node12:relationships:[0]:target_operations:' 'interface_for_modified_and_added.added_operation'), 'add_relationship_operation_shortened': DeploymentUpdateStep( 'add', OPERATION, 'nodes:node12:relationships:[0]:target_operations:' 'added_operation'), 'remove_relationship_operation': DeploymentUpdateStep( 'remove', OPERATION, 'nodes:node12:relationships:[0]:source_operations:' 'interface_for_intact_and_removed.removed_operation'), 'remove_relationship_operation_shortened': DeploymentUpdateStep( 'remove', OPERATION, 'nodes:node12:relationships:[0]:source_operations:' 'removed_operation'), 'modify_relationship_operation': DeploymentUpdateStep( 'modify', OPERATION, 'nodes:node12:relationships:[0]:target_operations:' 'interface_for_modified_and_added.modified_operation'), 'modify_relationship_operation_shortened': DeploymentUpdateStep( 'modify', OPERATION, 'nodes:node12:relationships:[0]:target_operations:' 'modified_operation'), 'add_output': DeploymentUpdateStep( 'add', OUTPUT, 'outputs:added_output'), 'remove_output': DeploymentUpdateStep( 'remove', OUTPUT, 'outputs:removed_output'), 'modify_output': DeploymentUpdateStep( 'modify', OUTPUT, 'outputs:modified_output'), 'add_workflow_same_plugin': DeploymentUpdateStep( 'add', WORKFLOW, 'workflows:added_workflow_same_plugin'), 'add_workflow_new_plugin': DeploymentUpdateStep( 'add', WORKFLOW, 'workflows:added_workflow_new_plugin'), 'remove_workflow': DeploymentUpdateStep( 'remove', WORKFLOW, 'workflows:removed_workflow'), 'modify_workflow_same_plugin': DeploymentUpdateStep( 'modify', WORKFLOW, 'workflows:modified_workflow_same_plugin'), 'modify_workflow_new_plugin': DeploymentUpdateStep( 'modify', WORKFLOW, 'workflows:modified_workflow_new_plugin'), 'add_policy_type': DeploymentUpdateStep( 'add', POLICY_TYPE, 'policy_types:added_policy_type', supported=False), 'remove_policy_type': DeploymentUpdateStep( 'remove', POLICY_TYPE, 'policy_types:removed_policy_type', supported=False), 'modify_policy_type': DeploymentUpdateStep( 'modify', POLICY_TYPE, 'policy_types:modified_policy_type', supported=False), 'add_policy_trigger': DeploymentUpdateStep( 'add', POLICY_TRIGGER, 'policy_triggers:added_policy_trigger', supported=False), 'remove_policy_trigger': DeploymentUpdateStep( 'remove', POLICY_TRIGGER, 'policy_triggers:removed_policy_trigger', supported=False), 'modify_policy_trigger': DeploymentUpdateStep( 'modify', POLICY_TRIGGER, 'policy_triggers:modified_policy_trigger', supported=False), 'add_group': DeploymentUpdateStep( 'add', GROUP, 'groups:added_group', supported=False), 'remove_group': DeploymentUpdateStep( 'remove', GROUP, 'groups:removed_group', supported=False), 'modify_group': DeploymentUpdateStep( 'modify', GROUP, 'groups:modified_group', supported=False), 'add_relationship_property': DeploymentUpdateStep( 'add', PROPERTY, 'nodes:node13:relationships:[0]:' 'properties:added_relationship_prop', supported=False), 'remove_relationship_property': DeploymentUpdateStep( 'remove', PROPERTY, 'nodes:node13:relationships:[0]:' 'properties:removed_relationship_prop', supported=False), 'modify_relationship_property': DeploymentUpdateStep( 'modify', PROPERTY, 'nodes:node13:relationships:[0]:' 'properties:modified_relationship_prop', supported=False), 'add_ha_plugin_plugins_to_install': DeploymentUpdateStep( 'add', PLUGIN, 'plugins_to_install:node18:plugin3_name'), 'add_ha_plugin_plugin3_name': DeploymentUpdateStep( 'add', PLUGIN, 'plugins:node18:plugin3_name'), 'add_cda_plugin_used_by_host': DeploymentUpdateStep( 'add', PLUGIN, 'plugins:node16:cda_plugin_for_operations2'), # the steps below are intended just to make the test pass. # ideally, they should be removed since they are incorrect 'modify_node_add_contained_in_relationship': DeploymentUpdateStep( 'modify', NODE, 'nodes:node8', supported=False), 'add_cda_operation': DeploymentUpdateStep( 'add', OPERATION, 'nodes:node16:operations:' 'interface_for_plugin_based_operations.' 'added_operation_new_cda_plugin', supported=True), 'add_cda_operation_shortened': DeploymentUpdateStep( 'add', OPERATION, 'nodes:node16:operations:added_operation_new_cda_plugin', supported=True), 'add_ha_operation': DeploymentUpdateStep( 'add', OPERATION, 'nodes:node17:operations:' 'interface_for_plugin_based_operations.' 'ha_operation_after', supported=True), 'add_ha_operation_shortened': DeploymentUpdateStep( 'add', OPERATION, 'nodes:node17:operations:ha_operation_after', supported=True), 'remove_ha_operation': DeploymentUpdateStep( 'remove', OPERATION, 'nodes:node17:operations:' 'interface_for_plugin_based_operations.' 'ha_operation_before', supported=True), 'remove_ha_operation_shortened': DeploymentUpdateStep( 'remove', OPERATION, 'nodes:node17:operations:ha_operation_before', supported=True), 'modify_ha_operation': DeploymentUpdateStep( 'modify', OPERATION, 'nodes:node18:operations:' 'interface_for_plugin_based_operations.' 'ha_operation_before', supported=True), 'modify_ha_operation_shortened': DeploymentUpdateStep( 'modify', OPERATION, 'nodes:node18:operations:ha_operation_before', supported=True) } steps, unsupported_steps = extract_steps( nodes, self.deployment, plan_after) steps.extend(unsupported_steps) self.assertEqual(set(expected_steps.values()), set(steps))
36.243707
79
0.552104
5,410
63,354
6.08817
0.043068
0.087136
0.064487
0.036646
0.804536
0.763002
0.702523
0.68206
0.666333
0.624677
0
0.007983
0.347508
63,354
1,747
80
36.264453
0.78879
0.009123
0
0.644866
0
0
0.19666
0.089391
0
0
0
0
0.055592
1
0.047744
false
0
0.005232
0.000654
0.054938
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
c3bac65567c5a66799143154b4a258b44825ce9a
108
py
Python
messaging_service/apps.py
manishgkasera/plivo_messaging_service
697330f2892ba6c5ac7d1b190a21bf6a6fbcc619
[ "Apache-2.0" ]
null
null
null
messaging_service/apps.py
manishgkasera/plivo_messaging_service
697330f2892ba6c5ac7d1b190a21bf6a6fbcc619
[ "Apache-2.0" ]
null
null
null
messaging_service/apps.py
manishgkasera/plivo_messaging_service
697330f2892ba6c5ac7d1b190a21bf6a6fbcc619
[ "Apache-2.0" ]
null
null
null
from django.apps import AppConfig class MessagingServiceConfig(AppConfig): name = 'messaging_service'
18
40
0.796296
11
108
7.727273
0.909091
0
0
0
0
0
0
0
0
0
0
0
0.138889
108
5
41
21.6
0.913978
0
0
0
0
0
0.157407
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
c3ebfa8337f7a94a830f8fa3092d0e303ffa3328
409
py
Python
nuclear/parser/transform.py
igrek51/glue
6726ba977a21e58b354a5c97f68639f84184be7a
[ "MIT" ]
6
2020-06-24T20:03:06.000Z
2021-09-21T10:05:17.000Z
nuclear/parser/transform.py
igrek51/cliglue
6726ba977a21e58b354a5c97f68639f84184be7a
[ "MIT" ]
2
2021-09-19T15:28:02.000Z
2021-09-21T17:29:38.000Z
nuclear/parser/transform.py
igrek51/cliglue
6726ba977a21e58b354a5c97f68639f84184be7a
[ "MIT" ]
2
2020-06-24T21:21:35.000Z
2021-08-01T17:24:38.000Z
from typing import List, Type from nuclear.builder.rule import CliRule, KeywordRule, TCliRule from .keyword import format_keywords def filter_rules(rules: List[CliRule], *types: Type[TCliRule]) -> List[TCliRule]: return [r for r in rules if isinstance(r, (*types,))] def normalize_keywords(rules: List[KeywordRule]): for rule in rules: rule.keywords = format_keywords(set(rule.keywords))
29.214286
81
0.740831
56
409
5.339286
0.464286
0.093645
0
0
0
0
0
0
0
0
0
0
0.151589
409
13
82
31.461538
0.861671
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.375
0.125
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
4
c3ef5ac3e95089468e4cfe98aad8acce09173775
743
py
Python
software/roverTower/Communication.py
JeanetteMueller/OpenPerseverance
efbb51bf3eef1c87b16abe0ef03298d24279b718
[ "CC0-1.0" ]
12
2021-04-12T18:49:05.000Z
2022-01-24T21:05:30.000Z
software/roverTower/Communication.py
JeanetteMueller/OpenPerseverance
efbb51bf3eef1c87b16abe0ef03298d24279b718
[ "CC0-1.0" ]
null
null
null
software/roverTower/Communication.py
JeanetteMueller/OpenPerseverance
efbb51bf3eef1c87b16abe0ef03298d24279b718
[ "CC0-1.0" ]
3
2021-11-23T13:00:20.000Z
2022-03-09T03:02:17.000Z
import socket class Communication: ip = "" #"192.168.178.55" #ip = "10.0.0.5" udpBuffer = 2048 def __init__(self, name): print("Init Communication %s" % name) def getPortForDrive(self): return 5001 def getPortForSteer(self): return 5002 def getPortForArm(self): return 5003 def getPortForLight(self): return 5004 def getPortForTower(self): return 5005 def getPortForSound(self): return 5006 def getPortForInfo(self): return 5007 def getSocket(self): return socket.socket(socket.AF_INET, # Internet socket.SOCK_DGRAM) # UDP
21.228571
55
0.543742
74
743
5.378378
0.567568
0.201005
0
0
0
0
0
0
0
0
0
0.103448
0.375505
743
35
56
21.228571
0.75431
0.059219
0
0
0
0
0.030172
0
0
0
0
0
0
1
0.391304
false
0
0.043478
0.347826
0.913043
0.043478
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4