hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
2a2a7f1bbe7b23f099969dbf0d398534767b960e
115
py
Python
example/dt/7.preprocessor/docker/build/loop_container.py
openmcp/Public_OpenMCP-Release
95da3aeb0a184a70ed682a9851b2dbd945173837
[ "Unlicense" ]
null
null
null
example/dt/7.preprocessor/docker/build/loop_container.py
openmcp/Public_OpenMCP-Release
95da3aeb0a184a70ed682a9851b2dbd945173837
[ "Unlicense" ]
null
null
null
example/dt/7.preprocessor/docker/build/loop_container.py
openmcp/Public_OpenMCP-Release
95da3aeb0a184a70ed682a9851b2dbd945173837
[ "Unlicense" ]
null
null
null
import os import sys import time if __name__ == '__main__': while(True) : print("adsf") time.sleep(10)
11.5
26
0.643478
16
115
4.125
0.8125
0
0
0
0
0
0
0
0
0
0
0.022472
0.226087
115
9
27
12.777778
0.719101
0
0
0
0
0
0.105263
0
0
0
0
0
0
1
0
true
0
0.428571
0
0.428571
0.142857
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
2a5b9b993a53475ae424fed591f03a2d9fe6beb3
104
py
Python
apps/notification/apps.py
martinlehoux/django_bike
05373d2649647fe8ebadb0aad54b9a7ec1900fe7
[ "MIT" ]
1
2020-08-12T17:53:37.000Z
2020-08-12T17:53:37.000Z
apps/notification/apps.py
martinlehoux/django_bike
05373d2649647fe8ebadb0aad54b9a7ec1900fe7
[ "MIT" ]
12
2020-07-03T03:52:00.000Z
2021-09-22T18:00:44.000Z
apps/notification/apps.py
martinlehoux/django_bike
05373d2649647fe8ebadb0aad54b9a7ec1900fe7
[ "MIT" ]
null
null
null
from django.apps import AppConfig class NotificationConfig(AppConfig): name = "apps.notification"
17.333333
36
0.778846
11
104
7.363636
0.818182
0
0
0
0
0
0
0
0
0
0
0
0.144231
104
5
37
20.8
0.910112
0
0
0
0
0
0.163462
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
2a85f8b22f264d9c76e3ae4a86505483f901c1f4
912
py
Python
server/.vim/plugged/python-mode/submodules/pylint/tests/functional/i/invalid_overridden_method.py
hkdb/sysconf
99d334f7309657647059c4b37f25e33dffc81fc3
[ "MIT" ]
10
2020-07-21T21:59:54.000Z
2021-07-19T11:01:47.000Z
server/.vim/plugged/python-mode/submodules/pylint/tests/functional/i/invalid_overridden_method.py
hkdb/sysconf
99d334f7309657647059c4b37f25e33dffc81fc3
[ "MIT" ]
null
null
null
server/.vim/plugged/python-mode/submodules/pylint/tests/functional/i/invalid_overridden_method.py
hkdb/sysconf
99d334f7309657647059c4b37f25e33dffc81fc3
[ "MIT" ]
1
2021-01-30T18:17:01.000Z
2021-01-30T18:17:01.000Z
# pylint: disable=missing-docstring, too-few-public-methods import abc class SuperClass(metaclass=abc.ABCMeta): @property @abc.abstractmethod def prop(self): pass @abc.abstractmethod def method(self): pass class Prop(SuperClass): @property def prop(self): return None def method(self): pass class NoProp(SuperClass): def prop(self): # [invalid-overridden-method] return None @property def method(self): # [invalid-overridden-method] return None class Property: @property def close(self): pass class PropertySetter(Property): @property def close(self): pass @close.setter def close(self, attr): return attr @close.deleter def close(self): return None class AbstractProperty: @abc.abstractproperty def prop(self): return
14.95082
59
0.619518
99
912
5.707071
0.323232
0.070796
0.077876
0.060177
0.322124
0.244248
0
0
0
0
0
0
0.289474
912
60
60
15.2
0.871914
0.123904
0
0.666667
0
0
0
0
0
0
0
0
0
1
0.282051
false
0.128205
0.025641
0.153846
0.615385
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
4
aa6a4dc1b7fa388f1d07b3ea39ca2a8494b726f0
94
py
Python
appl/dictionary/gender.py
hbrls/weixin-mp-mockup
6a9a6df695aaf1e3c85de9997b906a911f355fdf
[ "MIT" ]
2
2016-05-26T12:41:24.000Z
2016-09-20T04:09:06.000Z
appl/dictionary/gender.py
hbrls/weixin-mp-mockup
6a9a6df695aaf1e3c85de9997b906a911f355fdf
[ "MIT" ]
null
null
null
appl/dictionary/gender.py
hbrls/weixin-mp-mockup
6a9a6df695aaf1e3c85de9997b906a911f355fdf
[ "MIT" ]
1
2015-06-25T17:57:46.000Z
2015-06-25T17:57:46.000Z
# -*- coding: utf-8 -*- GENDERS = { 1: (1, u'男'), 2: (2, u'女'), 3: (3, u'保密') }
10.444444
23
0.319149
16
94
1.875
0.6875
0
0
0
0
0
0
0
0
0
0
0.112903
0.340426
94
8
24
11.75
0.370968
0.223404
0
0
0
0
0.056338
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
aa6ed59965da5e491a689a25d81e62ad00e86cd6
33
py
Python
diofant/tests/logic/__init__.py
project-kotinos/diofant___diofant
882549ac3a4dac238695aa620c02fce6ca33f9d3
[ "BSD-3-Clause" ]
57
2016-09-13T23:16:26.000Z
2022-03-29T06:45:51.000Z
diofant/tests/logic/__init__.py
project-kotinos/diofant___diofant
882549ac3a4dac238695aa620c02fce6ca33f9d3
[ "BSD-3-Clause" ]
402
2016-05-11T11:11:47.000Z
2022-03-31T14:27:02.000Z
diofant/tests/logic/__init__.py
project-kotinos/diofant___diofant
882549ac3a4dac238695aa620c02fce6ca33f9d3
[ "BSD-3-Clause" ]
20
2016-05-11T08:17:37.000Z
2021-09-10T09:15:51.000Z
""" Tests for logic package. """
8.25
24
0.606061
4
33
5
1
0
0
0
0
0
0
0
0
0
0
0
0.181818
33
3
25
11
0.740741
0.727273
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
aa6f45fc9c518a44671079095dc1142761f8efc5
651
py
Python
app/shared_code/name_utils.py
KubaTaba1uga/spam_recycler
76056635fdf610c6304e8034a3f66f235761ca76
[ "MIT" ]
2
2021-12-17T18:12:51.000Z
2021-12-17T18:12:52.000Z
app/shared_code/name_utils.py
KubaTaba1uga/spam_recycler
76056635fdf610c6304e8034a3f66f235761ca76
[ "MIT" ]
null
null
null
app/shared_code/name_utils.py
KubaTaba1uga/spam_recycler
76056635fdf610c6304e8034a3f66f235761ca76
[ "MIT" ]
null
null
null
def create_spam_worker_name(user_id): return f'user_{user_id}_spam_worker' def create_email_worker_name(user_id): return f'user_{user_id}_email_worker' def create_worker_celery_name(worker_name): return f"celery@{worker_name}" def create_spam_worker_celery_name(user_id): return create_worker_celery_name(create_spam_worker_name(user_id)) def create_email_worker_celery_name(user_id): return create_worker_celery_name(create_email_worker_name(user_id)) def create_user_spam_queue_name(user_id): return f'user_{user_id}_spam_queue' def create_user_email_queue_name(user_id): return f'user_{user_id}_email_queue'
24.111111
71
0.81874
108
651
4.351852
0.111111
0.153191
0.170213
0.204255
0.723404
0.723404
0.553191
0.553191
0.553191
0.238298
0
0
0.104455
651
26
72
25.038462
0.806175
0
0
0
0
0
0.190476
0.159754
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
0
0
0
null
0
0
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
4
aa6faac6f7392714cbf7822469a8a75fd2c12501
130
py
Python
aditya.py
AdityaKane2001/wassatrial
6c95ad4e096cc9b53b37030d5be6a91ad2405e0d
[ "Apache-2.0" ]
null
null
null
aditya.py
AdityaKane2001/wassatrial
6c95ad4e096cc9b53b37030d5be6a91ad2405e0d
[ "Apache-2.0" ]
null
null
null
aditya.py
AdityaKane2001/wassatrial
6c95ad4e096cc9b53b37030d5be6a91ad2405e0d
[ "Apache-2.0" ]
null
null
null
def aditya(): return "aditya" def shantanu(num): if num>10: return "shantanu" else: return "patankar"
16.25
25
0.569231
15
130
4.933333
0.6
0
0
0
0
0
0
0
0
0
0
0.022472
0.315385
130
8
26
16.25
0.808989
0
0
0
0
0
0.167939
0
0
0
0
0
0
1
0.285714
false
0
0
0.142857
0.714286
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
4
aa7a0a2ec1aebce31794b5a042f8a1d06e172288
1,922
py
Python
src/test.py
nbonamy/download-manager
179e1447223f88bdfe265ac55485f2e1936cc037
[ "Apache-2.0" ]
1
2020-05-26T15:08:13.000Z
2020-05-26T15:08:13.000Z
src/test.py
nbonamy/download-manager
179e1447223f88bdfe265ac55485f2e1936cc037
[ "Apache-2.0" ]
null
null
null
src/test.py
nbonamy/download-manager
179e1447223f88bdfe265ac55485f2e1936cc037
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/python3 import unittest import utils # test data filenames = { 'Interstellar.mkv': 'Interstellar', 'Interstellar.avi': 'Interstellar', 'Interstellar.720p.mkv': 'Interstellar', 'Interstellar.MULTI.mkv': 'Interstellar', 'Interstellar.VOSTFR.mkv': 'Interstellar', 'Interstellar.VOST.mkv': 'Interstellar', 'Interstellar.2016.mkv': 'Interstellar', 'Interstellar.MULTI.1080p.mkv': 'Interstellar', 'Interstellar.2016.1080p.mkv': 'Interstellar', 'Interstellar.2016.MULTI.mkv': 'Interstellar', 'Blade.Runner.mkv': 'Blade Runner', 'Blade.Runner.avi': 'Blade Runner', 'Blade.Runner.720p.mkv': 'Blade Runner', 'Blade.Runner.MULTI.mkv': 'Blade Runner', 'Blade.Runner.VOSTFR.mkv': 'Blade Runner', 'Blade.Runner.VOST.mkv': 'Blade Runner', 'Blade.Runner.1982.mkv': 'Blade Runner', 'Blade.Runner.MULTI.1080p.mkv': 'Blade Runner', 'Blade.Runner.1982.1080p.mkv': 'Blade Runner', 'Blade.Runner.1982.MULTI.mkv': 'Blade Runner', 'Blade.Runner.1982.MULTI.1080p.mkv': 'Blade Runner', 'Blade.Runner.1982.MULTI.1080p.mkv': 'Blade Runner', 'Blade.Runner.1982.1982.mkv': 'Blade Runner 1982', 'Blade.Runner.1982.2019.mkv': 'Blade Runner 1982', 'Blade.Runner.1982.2049.mkv': 'Blade Runner 1982', 'Blade.Runner.2049.mkv': 'Blade Runner 2049', 'Blade.Runner.2049.MULTI.1080p.mkv': 'Blade Runner 2049', 'Blade.Runner.2049.1080p.mkv': 'Blade Runner 2049', 'Blade.Runner.2049.2017.MULTI.1080p.mkv': 'Blade Runner 2049', 'Foundation.S01E01.4K.MULTI.2160p.HDR.WEB.H265-EXTREME.mkv': 'Foundation S01E01', 'C\'est.comme.ça.2018.FRENCH.720p.mkv': 'C\'est comme ça', 'L\'élève.Ducobu.TRUEFRENCH.720p.mkv': 'L\'élève Ducobu', } # run test class TestParseFilenameMethod(unittest.TestCase): def testFilename(self): for filename in filenames: title = utils.extractTitle(filename) self.assertEqual(filenames[filename], title) if __name__ == '__main__': unittest.main()
37.686275
83
0.707596
246
1,922
5.495935
0.235772
0.309172
0.186391
0.195266
0.482988
0.392751
0.266272
0.193047
0.079142
0.079142
0
0.101347
0.111863
1,922
50
84
38.44
0.690685
0.01873
0
0.046512
0
0
0.639405
0.371747
0
0
0
0
0.023256
1
0.023256
false
0
0.046512
0
0.093023
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
aa887ae242b245bb31244acdc183e0c50541bb27
232
py
Python
package/scripts/params_status.py
avbfr/ambari-drill-service
c6d4808910b19b483ea8250d8fa7c432b5518f29
[ "Apache-2.0" ]
null
null
null
package/scripts/params_status.py
avbfr/ambari-drill-service
c6d4808910b19b483ea8250d8fa7c432b5518f29
[ "Apache-2.0" ]
null
null
null
package/scripts/params_status.py
avbfr/ambari-drill-service
c6d4808910b19b483ea8250d8fa7c432b5518f29
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python import os from resource_management import * from resource_management.libraries.script.script import Script config = Script.get_config() drill_run_dir = config['configurations']['drill-env']['drill_run_dir']
23.2
70
0.793103
32
232
5.53125
0.53125
0.135593
0.248588
0
0
0
0
0
0
0
0
0
0.090517
232
9
71
25.777778
0.838863
0.086207
0
0
0
0
0.170616
0
0
0
0
0
0
1
0
false
0
0.6
0
0.6
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
aac7e251524cab463b0be37fd1a42379b6d83483
509
py
Python
old/test/test_utils/test_numeric_gradient.py
Faaizz/computer_vision
a6145c6b3b7d8b3492e85932fe96da6f46fcd0d4
[ "RSA-MD" ]
null
null
null
old/test/test_utils/test_numeric_gradient.py
Faaizz/computer_vision
a6145c6b3b7d8b3492e85932fe96da6f46fcd0d4
[ "RSA-MD" ]
10
2021-03-31T20:18:26.000Z
2022-02-10T02:35:09.000Z
old/test/test_utils/test_numeric_gradient.py
Faaizz/computer_vision
a6145c6b3b7d8b3492e85932fe96da6f46fcd0d4
[ "RSA-MD" ]
null
null
null
import unittest, sys import numpy as np from src.utils.numeric_gradient import evaluate_gradient class TestNumericGradient(unittest.TestCase): def test_numeric_gradient(self): # Vector valued function def vec_valued(X): # return np.array([(X[0]*X[1]), (X[0]+X[1])]) return np.array([(X[0]*X[1]), (X[0]+X[1])], dtype=np.float32) # Gradient grad= evaluate_gradient(vec_valued, np.array([2,3], dtype=np.float32)) sys.stderr.write(str(grad))
31.8125
78
0.636542
74
509
4.283784
0.472973
0.025237
0.037855
0.050473
0.132492
0.132492
0.132492
0.132492
0.132492
0.132492
0
0.034826
0.210216
509
16
79
31.8125
0.753731
0.147348
0
0
0
0
0
0
0
0
0
0
0
1
0.222222
false
0
0.333333
0.111111
0.777778
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
4
2acbd616d89967f3937ee31a576f39a64c167f92
154
py
Python
tavern/util/list_util.py
rubyszu/tavern
47766d1474f5f2b951ca482c41c055d4c79a7b13
[ "MIT" ]
null
null
null
tavern/util/list_util.py
rubyszu/tavern
47766d1474f5f2b951ca482c41c055d4c79a7b13
[ "MIT" ]
null
null
null
tavern/util/list_util.py
rubyszu/tavern
47766d1474f5f2b951ca482c41c055d4c79a7b13
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- def remove_duplicate_elements(check_list): func = lambda x,y:x if y in x else x + [y] return reduce(func, [[], ] + check_list)
25.666667
43
0.642857
26
154
3.653846
0.692308
0.189474
0
0
0
0
0
0
0
0
0
0.007937
0.181818
154
5
44
30.8
0.746032
0.136364
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
2aeb5ce3d0cfdb79cf2bb17f75b45d776467e0e1
69
py
Python
tests/__init__.py
molssi-seamm/supercell_step
17fa7caa1f1d363831e6376fbc211bbd872152ff
[ "BSD-3-Clause" ]
null
null
null
tests/__init__.py
molssi-seamm/supercell_step
17fa7caa1f1d363831e6376fbc211bbd872152ff
[ "BSD-3-Clause" ]
4
2020-06-18T23:22:02.000Z
2021-02-04T18:22:12.000Z
tests/__init__.py
molssi-seamm/supercell_step
17fa7caa1f1d363831e6376fbc211bbd872152ff
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- """Unit test package for supercell_step."""
17.25
43
0.608696
9
69
4.555556
1
0
0
0
0
0
0
0
0
0
0
0.017241
0.15942
69
3
44
23
0.689655
0.869565
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
6300c900e7405f3e14f9f5416b002011482a37ee
69
py
Python
src/container/kube-manager/kube_manager/vnc/__init__.py
jnpr-pranav/contrail-controller
428eee37c28c31830fd764315794e1a6e52720c1
[ "Apache-2.0" ]
37
2020-09-21T10:42:26.000Z
2022-01-09T10:16:40.000Z
src/container/kube-manager/kube_manager/vnc/__init__.py
jnpr-pranav/contrail-controller
428eee37c28c31830fd764315794e1a6e52720c1
[ "Apache-2.0" ]
2
2018-12-04T02:20:52.000Z
2018-12-22T06:16:30.000Z
src/container/kube-manager/kube_manager/vnc/__init__.py
jnpr-pranav/contrail-controller
428eee37c28c31830fd764315794e1a6e52720c1
[ "Apache-2.0" ]
21
2020-08-25T12:48:42.000Z
2022-03-22T04:32:18.000Z
# # Copyright (c) 2017 Juniper Networks, Inc. All rights reserved. #
17.25
64
0.710145
9
69
5.444444
1
0
0
0
0
0
0
0
0
0
0
0.070175
0.173913
69
3
65
23
0.789474
0.898551
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
6308ee8ab2732528221b63a7915f27c942451d45
110
py
Python
intro/class2.py
SamuelPhases/django-bootcamp
ff6dbeb9e1470fe1d003c0e170426bfaf7912060
[ "MIT" ]
null
null
null
intro/class2.py
SamuelPhases/django-bootcamp
ff6dbeb9e1470fe1d003c0e170426bfaf7912060
[ "MIT" ]
null
null
null
intro/class2.py
SamuelPhases/django-bootcamp
ff6dbeb9e1470fe1d003c0e170426bfaf7912060
[ "MIT" ]
null
null
null
from .classes import * #inheritance class Dog(Animal): def activity(self): Animal.active(self)
12.222222
27
0.663636
13
110
5.615385
0.846154
0
0
0
0
0
0
0
0
0
0
0
0.227273
110
8
28
13.75
0.858824
0.1
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
630c55010b6fa836af7b249ed594d0ac916f3650
258
py
Python
cl_gym/backbones/__init__.py
imirzadeh/CL-Gym
302bf7f0d9a96df61c32dec1dd4cacd19746179d
[ "MIT" ]
26
2021-06-23T17:16:10.000Z
2022-03-14T05:04:14.000Z
cl_gym/backbones/__init__.py
imirzadeh/CL-Gym
302bf7f0d9a96df61c32dec1dd4cacd19746179d
[ "MIT" ]
null
null
null
cl_gym/backbones/__init__.py
imirzadeh/CL-Gym
302bf7f0d9a96df61c32dec1dd4cacd19746179d
[ "MIT" ]
2
2021-07-09T01:30:12.000Z
2022-01-02T04:15:46.000Z
from cl_gym.backbones.base import ContinualBackbone from cl_gym.backbones.mlp import MLP2Layers from cl_gym.backbones.cnn import CNN1D from cl_gym.backbones.resnet import ResNet18Small __all__ = ['ContinualBackbone', 'MLP2Layers', 'CNN1D', 'ResNet18Small']
36.857143
71
0.825581
33
258
6.212121
0.424242
0.117073
0.17561
0.35122
0
0
0
0
0
0
0
0.034043
0.089147
258
6
72
43
0.838298
0
0
0
0
0
0.174419
0
0
0
0
0
0
1
0
false
0
0.8
0
0.8
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
631036c78953c13f0ad5dad461199fb665b9cbaa
215
py
Python
src/myprojectprototype/app/test.py
naitao/MyProjectPrototype
54a555921a1de154bd7b1499bce0c6b359717d42
[ "MIT" ]
null
null
null
src/myprojectprototype/app/test.py
naitao/MyProjectPrototype
54a555921a1de154bd7b1499bce0c6b359717d42
[ "MIT" ]
null
null
null
src/myprojectprototype/app/test.py
naitao/MyProjectPrototype
54a555921a1de154bd7b1499bce0c6b359717d42
[ "MIT" ]
null
null
null
import pandas as pd df = pd.read_csv("out.csv") df.Last_update = df.Last_update.str.replace('\'', '') #for i in df.index: #df.loc[i, ['Last_update']] = df['Last_update'].loc[i].strip('\'') df.to_csv("new.csv")
26.875
70
0.632558
39
215
3.333333
0.512821
0.307692
0.276923
0.246154
0.338462
0
0
0
0
0
0
0
0.116279
215
7
71
30.714286
0.684211
0.386047
0
0
0
0
0.130769
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
6326e2322b853a83d86d98202d4b35a10e012c86
130
py
Python
securityheaders/models/setcookie/__init__.py
th3cyb3rc0p/securityheaders
941264be581dc01afe28f6416f2d7bed79aecfb3
[ "Apache-2.0" ]
151
2018-07-29T22:34:43.000Z
2022-03-22T05:08:27.000Z
securityheaders/models/setcookie/__init__.py
th3cyb3rc0p/securityheaders
941264be581dc01afe28f6416f2d7bed79aecfb3
[ "Apache-2.0" ]
5
2019-04-24T07:31:36.000Z
2021-04-15T14:31:23.000Z
securityheaders/models/setcookie/__init__.py
th3cyb3rc0p/securityheaders
941264be581dc01afe28f6416f2d7bed79aecfb3
[ "Apache-2.0" ]
42
2018-07-31T08:18:59.000Z
2022-03-28T08:18:32.000Z
from .setcookiedirective import SetCookieDirective from .setcookie import SetCookie __all__ = ['SetCookieDirective','SetCookie']
26
50
0.830769
11
130
9.454545
0.454545
0
0
0
0
0
0
0
0
0
0
0
0.092308
130
4
51
32.5
0.881356
0
0
0
0
0
0.207692
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
2d49f1086b4eb3dec76a34dc5e60bd951459e868
99
py
Python
{{cookiecutter.app_name}}/tests/backend/test_{{cookiecutter.app_name}}.py
RobbieClarken/cookiecutter-flask-react
6a5219e99c2d38f27fe42a7690bc6e1081226fbc
[ "MIT" ]
1
2018-09-15T18:09:36.000Z
2018-09-15T18:09:36.000Z
{{cookiecutter.app_name}}/tests/backend/test_{{cookiecutter.app_name}}.py
RobbieClarken/cookiecutter-flask-react
6a5219e99c2d38f27fe42a7690bc6e1081226fbc
[ "MIT" ]
null
null
null
{{cookiecutter.app_name}}/tests/backend/test_{{cookiecutter.app_name}}.py
RobbieClarken/cookiecutter-flask-react
6a5219e99c2d38f27fe42a7690bc6e1081226fbc
[ "MIT" ]
null
null
null
import pytest @pytest.mark.xfail def test_tests_are_implemented(): raise NotImplementedError
14.142857
33
0.808081
12
99
6.416667
0.916667
0
0
0
0
0
0
0
0
0
0
0
0.131313
99
6
34
16.5
0.895349
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
true
0
0.25
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
0
0
4
2d4a8ab9d50ff5918c89add4396db26c239beee7
215
py
Python
planex/cms_core/apps.py
octue/planex-cms
9ec17dccd174bf99ffcd9b46ac4f6f7c37200d60
[ "MIT" ]
null
null
null
planex/cms_core/apps.py
octue/planex-cms
9ec17dccd174bf99ffcd9b46ac4f6f7c37200d60
[ "MIT" ]
1
2021-01-12T18:13:21.000Z
2021-01-12T18:13:21.000Z
planex/cms_core/apps.py
octue/planex-cms
9ec17dccd174bf99ffcd9b46ac4f6f7c37200d60
[ "MIT" ]
null
null
null
from django.apps import AppConfig class CMSCoreAppConfig(AppConfig): name = "cms_core" label = "cms_core" verbose_name = "CMS Core" def ready(self): # import cms_core.signals pass
17.916667
34
0.651163
26
215
5.230769
0.653846
0.205882
0.161765
0
0
0
0
0
0
0
0
0
0.265116
215
11
35
19.545455
0.860759
0.106977
0
0
0
0
0.126316
0
0
0
0
0
0
1
0.142857
false
0.142857
0.142857
0
0.857143
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
4
2d5cb484fd2bd308a5a5ca3d1368c7da31e27b3d
244
py
Python
Python/Mundo 3/ex096.py
henrique-tavares/Coisas
f740518b1bedec5b0ea8c12ae07a2cac21eb51ae
[ "MIT" ]
1
2020-02-07T20:39:26.000Z
2020-02-07T20:39:26.000Z
Python/Mundo 3/ex096.py
neptune076/Coisas
85c064cc0e134465aaf6ef41acf747d47f108fc9
[ "MIT" ]
null
null
null
Python/Mundo 3/ex096.py
neptune076/Coisas
85c064cc0e134465aaf6ef41acf747d47f108fc9
[ "MIT" ]
null
null
null
def área(largura, altura): print(f'\nA área de um terreno {largura}m x {altura}m é de {largura * altura}m².\n') print(f'\n{"Controle de Terrenos":^25}', '-' * 25, sep='\n') área(float(input("LARGURA (m): ")), float(input("ALTURA (m): ")))
40.666667
88
0.614754
41
244
3.658537
0.512195
0.173333
0
0
0
0
0
0
0
0
0
0.02381
0.139344
244
6
89
40.666667
0.690476
0
0
0
0
0.25
0.538776
0
0
0
0
0
0
1
0.25
false
0
0
0
0.25
0.5
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
0
0
null
0
0
0
0
0
1
0
0
0
0
0
1
0
4
2d7a59291647dd4ccae5025ce59aa5fd25315f2d
590
py
Python
src/state.py
tomszir/chip8-py
b73c7f5478aa45bd98f0371daf4f6136c7da58cb
[ "MIT" ]
null
null
null
src/state.py
tomszir/chip8-py
b73c7f5478aa45bd98f0371daf4f6136c7da58cb
[ "MIT" ]
null
null
null
src/state.py
tomszir/chip8-py
b73c7f5478aa45bd98f0371daf4f6136c7da58cb
[ "MIT" ]
null
null
null
from enum import Enum class StateType(Enum): NEXT = 0 SKIP = 1 JUMP = 2 BLOCK = 3 NOT_IMPLEMENTED = 4 class State: @staticmethod def Next(): return (StateType.NEXT, 2) @staticmethod def Skip(): return (StateType.SKIP, 4) @staticmethod def SkipIf(condition: bool): if condition: return State.Skip() return State.Next() @staticmethod def Jump(addr: int): return (StateType.JUMP, addr) @staticmethod def Block(): return (StateType.BLOCK, 0) @staticmethod def NotImplemented(): return (StateType.NOT_IMPLEMENTED, -1)
15.526316
42
0.654237
71
590
5.408451
0.380282
0.234375
0
0
0
0
0
0
0
0
0
0.020045
0.238983
590
37
43
15.945946
0.835189
0
0
0.214286
0
0
0
0
0
0
0
0
0
1
0.214286
false
0
0.035714
0.178571
0.75
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
2d97b731d3fdd31639a8608cc5e7855941d4095a
1,051
py
Python
JumpScale9Lib/sal/kvm/KVM.py
Jumpscale/lib9
82224784ef2a7071faeb48349007211c367bc673
[ "Apache-2.0" ]
2
2017-06-07T08:11:47.000Z
2017-11-10T02:19:48.000Z
JumpScale9Lib/sal/kvm/KVM.py
Jumpscale/lib9
82224784ef2a7071faeb48349007211c367bc673
[ "Apache-2.0" ]
188
2017-06-21T06:16:13.000Z
2020-06-17T14:20:24.000Z
JumpScale9Lib/sal/kvm/KVM.py
Jumpscale/lib9
82224784ef2a7071faeb48349007211c367bc673
[ "Apache-2.0" ]
3
2018-06-12T05:18:28.000Z
2019-09-24T06:49:17.000Z
from js9 import j from JumpScale9Lib.sal.kvm.Network import Network from JumpScale9Lib.sal.kvm.Interface import Interface from JumpScale9Lib.sal.kvm.Disk import Disk from JumpScale9Lib.sal.kvm.Pool import Pool from JumpScale9Lib.sal.kvm.StorageController import StorageController from JumpScale9Lib.sal.kvm.KVMController import KVMController from JumpScale9Lib.sal.kvm.Machine import Machine from JumpScale9Lib.sal.kvm.CloudMachine import CloudMachine from JumpScale9Lib.sal.kvm.MachineSnapshot import MachineSnapshot JSBASE = j.application.jsbase_get_class() class KVM(JSBASE): def __init__(self): self.__jslocation__ = "j.sal.kvm" JSBASE.__init__(self) self.__imports__ = "libvirt-python" self.KVMController = KVMController self.Machine = Machine self.MachineSnapshot = MachineSnapshot self.Network = Network self.Interface = Interface self.Disk = Disk self.Pool = Pool self.StorageController = StorageController self.CloudMachine = CloudMachine
36.241379
69
0.758325
118
1,051
6.601695
0.228814
0.077022
0.231065
0.265725
0
0
0
0
0
0
0
0.011534
0.175071
1,051
28
70
37.535714
0.886967
0
0
0
0
0
0.021884
0
0
0
0
0
0
1
0.04
false
0
0.44
0
0.52
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
2dad8351efbff9b0fa3a523f3aed688746b26694
41
py
Python
code/abc069_b_03.py
KoyanagiHitoshi/AtCoder
731892543769b5df15254e1f32b756190378d292
[ "MIT" ]
3
2019-08-16T16:55:48.000Z
2021-04-11T10:21:40.000Z
code/abc069_b_03.py
KoyanagiHitoshi/AtCoder
731892543769b5df15254e1f32b756190378d292
[ "MIT" ]
null
null
null
code/abc069_b_03.py
KoyanagiHitoshi/AtCoder
731892543769b5df15254e1f32b756190378d292
[ "MIT" ]
null
null
null
s=input() print(s[0]+str(len(s)-2)+s[-1])
20.5
31
0.560976
11
41
2.090909
0.727273
0
0
0
0
0
0
0
0
0
0
0.075
0.02439
41
2
31
20.5
0.5
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
930e945abe41ab93de78e9f85c96cbbe1bbf3436
621
py
Python
graphene_gis/tests/test_converter.py
sknutsonsf/graphene-gis
b326ec1ce055a68118e281599209655693535582
[ "MIT" ]
27
2020-03-12T18:56:45.000Z
2022-03-07T19:25:24.000Z
graphene_gis/tests/test_converter.py
sknutsonsf/graphene-gis
b326ec1ce055a68118e281599209655693535582
[ "MIT" ]
6
2020-11-08T15:44:18.000Z
2022-02-28T12:26:33.000Z
graphene_gis/tests/test_converter.py
sknutsonsf/graphene-gis
b326ec1ce055a68118e281599209655693535582
[ "MIT" ]
8
2020-04-10T00:23:00.000Z
2022-01-28T00:17:40.000Z
from django.contrib.gis.db import models as gis_models from django.contrib.postgres import fields from graphene_gis import scalars from graphene_gis.converter import gis_converter, json_converter # noqa from graphene_django.tests.test_converter import assert_conversion def test_should_date_point_field_to_point_scalar(): assert_conversion(gis_models.PointField, scalars.PointScalar) def test_should_date_point_field_to_line_string_scalar(): assert_conversion(gis_models.LineStringField, scalars.LineStringScalar) def test_should_json_to_dict(): assert_conversion(fields.JSONField, scalars.JSONScalar)
34.5
75
0.855072
85
621
5.870588
0.411765
0.128257
0.078156
0.068136
0.240481
0.116232
0.116232
0
0
0
0
0
0.090177
621
17
76
36.529412
0.883186
0.006441
0
0
0
0
0
0
0
0
0
0
0.363636
1
0.272727
true
0
0.454545
0
0.727273
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
4
93109c3812a694f0edc245de52b337a7f593d361
767
py
Python
tests/test_config.py
500-Error/weixin-SDK
d646ff34c58ca6a03ed6b08fb7c2b0c0fe244863
[ "MIT" ]
1
2017-12-01T11:17:22.000Z
2017-12-01T11:17:22.000Z
tests/test_config.py
500-Error/weixin-SDK
d646ff34c58ca6a03ed6b08fb7c2b0c0fe244863
[ "MIT" ]
null
null
null
tests/test_config.py
500-Error/weixin-SDK
d646ff34c58ca6a03ed6b08fb7c2b0c0fe244863
[ "MIT" ]
1
2019-03-29T16:59:11.000Z
2019-03-29T16:59:11.000Z
# encoding=utf-8 from weixin.config import * def test_config(): config = Config(a=0, b=1) assert config.a == 0 assert config.b == 1 config = Config({'a': 0, 'b': 1}) assert config.a == 0 assert config.b == 1 class ConfigObject: A = 0 B = 1 a = 9 _p = 999 config = Config() config.from_object(ConfigObject) assert config.A == 0 assert config.B == 1 assert config.a == 9 assert config._p == 999 config = Config() config.from_object(ConfigObject, lower_keys=True) # 大写小写的A同时存在,配置被覆盖不做检查 assert config.b == 1 assert config._p == 999 config = Config() config.from_json('{"a": 0, "b": 1}') assert config.a == 0 assert config.b == 1
20.184211
53
0.558018
106
767
3.962264
0.245283
0.342857
0.114286
0.166667
0.745238
0.742857
0.680952
0.680952
0.285714
0.285714
0
0.054924
0.311604
767
37
54
20.72973
0.74053
0.045632
0
0.444444
0
0
0.024725
0
0
0
0
0
0.444444
1
0.037037
false
0
0.037037
0
0.259259
0
0
0
0
null
1
0
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
4
933837d955ae4df32d38b311e385f94bfcfec32d
143
py
Python
sweet_cms/applications/api/views.py
SunilPragroot/sweet
d048126007fd892cb8b6f80348920c8c5152a85c
[ "MIT" ]
1
2021-04-16T14:25:36.000Z
2021-04-16T14:25:36.000Z
sweet_cms/applications/api/views.py
SunilPragroot/sweet
d048126007fd892cb8b6f80348920c8c5152a85c
[ "MIT" ]
122
2020-12-31T06:31:11.000Z
2022-03-18T14:12:03.000Z
mycms___/webpack/api/views.py
Anioko/reusable-cms
52e2a2f11a92c596bd13812d5fd14dffdcdcaa7f
[ "MIT" ]
null
null
null
from flask import Blueprint from flask_restful import Api blueprint = Blueprint('api', __name__, url_prefix="/api") main_api = Api(blueprint)
23.833333
57
0.783217
20
143
5.25
0.5
0.171429
0
0
0
0
0
0
0
0
0
0
0.118881
143
5
58
28.6
0.833333
0
0
0
0
0
0.048951
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0.75
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
1
0
4
934403beaaf4b180e03299ef44678f01f5d9fa9a
306
py
Python
exceptions_StopIteration.py
lmokto/allexceptions
4cdbda52ea00d8022240530649ea85557c1621b2
[ "MIT" ]
null
null
null
exceptions_StopIteration.py
lmokto/allexceptions
4cdbda52ea00d8022240530649ea85557c1621b2
[ "MIT" ]
null
null
null
exceptions_StopIteration.py
lmokto/allexceptions
4cdbda52ea00d8022240530649ea85557c1621b2
[ "MIT" ]
null
null
null
l=[0,1,2] i=iter(l) print i print i.next() print i.next() print i.next() print i.next() ''' $ python exceptions_StopIteration.py <listiterator object at 0x10045f650> 0 1 2 Traceback (most recent call last): File "exceptions_StopIteration.py", line 19, in <module> print i.next() StopIteratiop '''
13.909091
58
0.705882
49
306
4.367347
0.55102
0.168224
0.233645
0.21028
0.186916
0.186916
0.186916
0.186916
0.186916
0
0
0.065637
0.153595
306
21
59
14.571429
0.760618
0
0
0.571429
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0.714286
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
4
936932697044ac0930d0a325c872cc0d84e60451
124
py
Python
for.py
yofine/Learn-Python
29be3ca332a43a57a54bd2b2f2396ead773a7183
[ "MIT" ]
2
2015-11-06T02:52:58.000Z
2016-08-25T13:53:50.000Z
for.py
yofine/Learn-Python
29be3ca332a43a57a54bd2b2f2396ead773a7183
[ "MIT" ]
null
null
null
for.py
yofine/Learn-Python
29be3ca332a43a57a54bd2b2f2396ead773a7183
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- for i in range(1, 5): print i else: print 'The for loop is over'
15.5
32
0.556452
22
124
3.136364
0.863636
0
0
0
0
0
0
0
0
0
0
0.033333
0.274194
124
8
33
15.5
0.733333
0.33871
0
0
0
0
0.246914
0
0
0
0
0
0
0
null
null
0
0
null
null
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
4
8780408c201b2cc8273c85cbfdc81fcfc187babc
32
py
Python
miscellaneous/__init__.py
Jinqi-Cheng/delivery_pal_server
b38f540919b9b61b6b22935e7801ce7da1f41db3
[ "MIT" ]
null
null
null
miscellaneous/__init__.py
Jinqi-Cheng/delivery_pal_server
b38f540919b9b61b6b22935e7801ce7da1f41db3
[ "MIT" ]
1
2021-02-02T23:03:26.000Z
2021-02-02T23:03:26.000Z
miscellaneous/__init__.py
Jinqi-Cheng/delivery_pal_server
b38f540919b9b61b6b22935e7801ce7da1f41db3
[ "MIT" ]
null
null
null
""" Create Date , @author: """
8
14
0.5
3
32
5.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.21875
32
4
15
8
0.64
0.71875
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
87bc64eb386da7d71f412d6254971c0a033ce42c
156
py
Python
autotf/ensemble/__init__.py
DAIM-ML/autotf
3f82d858f49c27d5ecb624cee555fb8fd47bf067
[ "BSD-3-Clause" ]
8
2018-03-07T06:58:16.000Z
2019-01-30T07:49:44.000Z
autotf/ensemble/__init__.py
DAIM-ML/autotf
3f82d858f49c27d5ecb624cee555fb8fd47bf067
[ "BSD-3-Clause" ]
null
null
null
autotf/ensemble/__init__.py
DAIM-ML/autotf
3f82d858f49c27d5ecb624cee555fb8fd47bf067
[ "BSD-3-Clause" ]
1
2018-03-31T09:06:12.000Z
2018-03-31T09:06:12.000Z
from .stack import Stacking from .blend import Blending from .subsemble import Subsemble from .cobra import Cobra __author__ = 'zwt' __version__ = '0.1.1'
19.5
32
0.775641
22
156
5.136364
0.590909
0
0
0
0
0
0
0
0
0
0
0.022556
0.147436
156
7
33
22.285714
0.827068
0
0
0
0
0
0.051282
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
87d34fc8425cac658cf10fe436f6d09ecd75bb51
74
py
Python
init.py
lemon-mint/simplecompresser
dfc2093aedd6274583de21c9036f5f027960781f
[ "CC0-1.0" ]
1
2019-12-15T12:38:56.000Z
2019-12-15T12:38:56.000Z
init.py
lemon-mint/simplecompresser
dfc2093aedd6274583de21c9036f5f027960781f
[ "CC0-1.0" ]
null
null
null
init.py
lemon-mint/simplecompresser
dfc2093aedd6274583de21c9036f5f027960781f
[ "CC0-1.0" ]
null
null
null
import simplecodecompresser compress = simplecodecompresser.compresscode
18.5
44
0.891892
5
74
13.2
0.8
0
0
0
0
0
0
0
0
0
0
0
0.081081
74
3
45
24.666667
0.970588
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
3559aa09fb0b15bbb230391339a5ffef5d5e6c3a
95
py
Python
baccoapp/mysandwich/apps.py
msienkiewicz7/baccoapp
d647ca205fdf06fe57fda7b6db164ae7d3387dad
[ "MIT" ]
null
null
null
baccoapp/mysandwich/apps.py
msienkiewicz7/baccoapp
d647ca205fdf06fe57fda7b6db164ae7d3387dad
[ "MIT" ]
null
null
null
baccoapp/mysandwich/apps.py
msienkiewicz7/baccoapp
d647ca205fdf06fe57fda7b6db164ae7d3387dad
[ "MIT" ]
null
null
null
from django.apps import AppConfig class MysandwichConfig(AppConfig): name = 'mysandwich'
15.833333
34
0.768421
10
95
7.3
0.9
0
0
0
0
0
0
0
0
0
0
0
0.157895
95
5
35
19
0.9125
0
0
0
0
0
0.105263
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
3560cd014a8bc18cab944da79d4874523748b7a8
96
py
Python
metarecord/apps.py
kerkkoheiskanen/helerm
bdaf801a940d42325a1076b42bb0edef831fbac9
[ "MIT" ]
2
2017-04-21T15:36:23.000Z
2020-12-04T09:32:39.000Z
metarecord/apps.py
kerkkoheiskanen/helerm
bdaf801a940d42325a1076b42bb0edef831fbac9
[ "MIT" ]
168
2016-10-05T12:58:41.000Z
2021-08-31T14:29:56.000Z
metarecord/apps.py
kerkkoheiskanen/helerm
bdaf801a940d42325a1076b42bb0edef831fbac9
[ "MIT" ]
7
2016-10-13T12:51:36.000Z
2021-01-21T13:05:04.000Z
from django.apps import AppConfig class InformationConfig(AppConfig): name = 'metarecord'
16
35
0.770833
10
96
7.4
0.9
0
0
0
0
0
0
0
0
0
0
0
0.15625
96
5
36
19.2
0.91358
0
0
0
0
0
0.104167
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
356347db0a47bfb0add0fb35ad35665be47ae151
150
py
Python
orhan-ergun/while_loop.py
jaredmauck/DEVASC
6c0e4c6fcadc13b4d5445d4bd239d9fee878dc4a
[ "MIT" ]
null
null
null
orhan-ergun/while_loop.py
jaredmauck/DEVASC
6c0e4c6fcadc13b4d5445d4bd239d9fee878dc4a
[ "MIT" ]
null
null
null
orhan-ergun/while_loop.py
jaredmauck/DEVASC
6c0e4c6fcadc13b4d5445d4bd239d9fee878dc4a
[ "MIT" ]
null
null
null
from rich import print count = 1 while count < 10: print (count, "does not equal 10") count = count + 1 else: print(count, "equals 10")
15
38
0.626667
23
150
4.086957
0.565217
0.319149
0
0
0
0
0
0
0
0
0
0.072727
0.266667
150
9
39
16.666667
0.781818
0
0
0
0
0
0.173333
0
0
0
0
0
0
1
0
false
0
0.142857
0
0.142857
0.428571
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
357a66d8d39b39713cfe93fb7400e8366249866b
34,753
py
Python
swift3/test/unit/test_obj.py
KoreaCloudObjectStorage/swift3
3bd412b33d524bb84bffbe5bd97642faaab71703
[ "Apache-2.0" ]
null
null
null
swift3/test/unit/test_obj.py
KoreaCloudObjectStorage/swift3
3bd412b33d524bb84bffbe5bd97642faaab71703
[ "Apache-2.0" ]
null
null
null
swift3/test/unit/test_obj.py
KoreaCloudObjectStorage/swift3
3bd412b33d524bb84bffbe5bd97642faaab71703
[ "Apache-2.0" ]
null
null
null
# Copyright (c) 2014 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from datetime import datetime import hashlib from os.path import join from mock import patch from swift.common import swob from swift.common.swob import Request from swift3.test.unit import Swift3TestCase from swift3.test.unit.test_s3_acl import s3acl from swift3.subresource import ACL, User, encode_acl, Owner, Grant from swift3.etree import fromstring from swift3.test.unit.helpers import FakeSwift def _wrap_fake_auth_middleware(org_func): def fake_fake_auth_middleware(self, env): org_func(env) if 'swift.authorize_override' in env: return if 'HTTP_AUTHORIZATION' not in env: return _, authorization = env['HTTP_AUTHORIZATION'].split(' ') tenant_user, sign = authorization.rsplit(':', 1) tenant, user = tenant_user.rsplit(':', 1) env['HTTP_X_TENANT_NAME'] = tenant env['HTTP_X_USER_NAME'] = user return fake_fake_auth_middleware class TestSwift3Obj(Swift3TestCase): def setUp(self): super(TestSwift3Obj, self).setUp() self.object_body = 'hello' self.etag = hashlib.md5(self.object_body).hexdigest() self.last_modified = 'Fri, 01 Apr 2014 12:00:00 GMT' self.response_headers = {'Content-Type': 'text/html', 'Content-Length': len(self.object_body), 'x-object-meta-test': 'swift', 'etag': self.etag, 'last-modified': self.last_modified} self.swift.register('GET', '/v1/AUTH_test/bucket/object', swob.HTTPOk, self.response_headers, self.object_body) self.swift.register('PUT', '/v1/AUTH_test/bucket/object', swob.HTTPCreated, {'etag': self.etag, 'last-modified': self.last_modified}, None) def _test_object_GETorHEAD(self, method): req = Request.blank('/bucket/object', environ={'REQUEST_METHOD': method}, headers={'Authorization': 'AWS test:tester:hmac'}) status, headers, body = self.call_swift3(req) self.assertEquals(status.split()[0], '200') for key, val in self.response_headers.iteritems(): if key in ('content-length', 'content-type', 'content-encoding', 'last-modified'): self.assertTrue(key in headers) self.assertEquals(headers[key], val) elif key.startswith('x-object-meta-'): self.assertTrue('x-amz-meta-' + key[14:] in headers) self.assertEquals(headers['x-amz-meta-' + key[14:]], val) self.assertEquals(headers['etag'], '"%s"' % self.response_headers['etag']) if method == 'GET': self.assertEquals(body, self.object_body) @s3acl def test_object_HEAD_error(self): # HEAD does not return the body even an error resonse in the # specifications of the REST API. # So, check the response code for error test of HEAD. req = Request.blank('/bucket/object', environ={'REQUEST_METHOD': 'HEAD'}, headers={'Authorization': 'AWS test:tester:hmac'}) self.swift.register('HEAD', '/v1/AUTH_test/bucket/object', swob.HTTPUnauthorized, {}, None) status, headers, body = self.call_swift3(req) self.assertEquals(status.split()[0], '403') self.swift.register('HEAD', '/v1/AUTH_test/bucket/object', swob.HTTPForbidden, {}, None) status, headers, body = self.call_swift3(req) self.assertEquals(status.split()[0], '403') self.swift.register('HEAD', '/v1/AUTH_test/bucket/object', swob.HTTPNotFound, {}, None) status, headers, body = self.call_swift3(req) self.assertEquals(status.split()[0], '404') self.swift.register('HEAD', '/v1/AUTH_test/bucket/object', swob.HTTPPreconditionFailed, {}, None) status, headers, body = self.call_swift3(req) self.assertEquals(status.split()[0], '412') self.swift.register('HEAD', '/v1/AUTH_test/bucket/object', swob.HTTPServerError, {}, None) status, headers, body = self.call_swift3(req) self.assertEquals(status.split()[0], '500') self.swift.register('HEAD', '/v1/AUTH_test/bucket/object', swob.HTTPServiceUnavailable, {}, None) status, headers, body = self.call_swift3(req) self.assertEquals(status.split()[0], '500') def test_object_HEAD(self): self._test_object_GETorHEAD('HEAD') def _test_object_HEAD_Range(self, range_value): req = Request.blank('/bucket/object', environ={'REQUEST_METHOD': 'HEAD'}, headers={'Authorization': 'AWS test:tester:hmac', 'Range': range_value}) return self.call_swift3(req) @s3acl def test_object_HEAD_Range_with_invalid_value(self): range_value = '' status, headers, body = self._test_object_HEAD_Range(range_value) self.assertEquals(status.split()[0], '200') self.assertTrue('content-length' in headers) self.assertEqual(headers['content-length'], '5') self.assertTrue('content-range' not in headers) range_value = 'hoge' status, headers, body = self._test_object_HEAD_Range(range_value) self.assertEquals(status.split()[0], '200') self.assertTrue('content-length' in headers) self.assertEqual(headers['content-length'], '5') self.assertTrue('content-range' not in headers) range_value = 'bytes=' status, headers, body = self._test_object_HEAD_Range(range_value) self.assertEquals(status.split()[0], '200') self.assertTrue('content-length' in headers) self.assertEqual(headers['content-length'], '5') self.assertTrue('content-range' not in headers) range_value = 'bytes=1' status, headers, body = self._test_object_HEAD_Range(range_value) self.assertEquals(status.split()[0], '200') self.assertTrue('content-length' in headers) self.assertEqual(headers['content-length'], '5') self.assertTrue('content-range' not in headers) range_value = 'bytes=5-1' status, headers, body = self._test_object_HEAD_Range(range_value) self.assertEquals(status.split()[0], '200') self.assertTrue('content-length' in headers) self.assertEqual(headers['content-length'], '5') self.assertTrue('content-range' not in headers) range_value = 'bytes=5-10' status, headers, body = self._test_object_HEAD_Range(range_value) self.assertEquals(status.split()[0], '416') @s3acl def test_object_HEAD_Range(self): # update response headers self.swift.register('HEAD', '/v1/AUTH_test/bucket/object', swob.HTTPOk, self.response_headers, self.object_body) range_value = 'bytes=0-3' status, headers, body = self._test_object_HEAD_Range(range_value) self.assertEquals(status.split()[0], '206') self.assertTrue('content-length' in headers) self.assertEqual(headers['content-length'], '4') self.assertTrue('content-range' in headers) self.assertTrue(headers['content-range'].startswith('bytes 0-3')) self.assertTrue('x-amz-meta-test' in headers) self.assertEqual('swift', headers['x-amz-meta-test']) range_value = 'bytes=3-3' status, headers, body = self._test_object_HEAD_Range(range_value) self.assertEquals(status.split()[0], '206') self.assertTrue('content-length' in headers) self.assertEqual(headers['content-length'], '1') self.assertTrue('content-range' in headers) self.assertTrue(headers['content-range'].startswith('bytes 3-3')) self.assertTrue('x-amz-meta-test' in headers) self.assertEqual('swift', headers['x-amz-meta-test']) range_value = 'bytes=1-' status, headers, body = self._test_object_HEAD_Range(range_value) self.assertEquals(status.split()[0], '206') self.assertTrue('content-length' in headers) self.assertEqual(headers['content-length'], '4') self.assertTrue('content-range' in headers) self.assertTrue(headers['content-range'].startswith('bytes 1-4')) self.assertTrue('x-amz-meta-test' in headers) self.assertEqual('swift', headers['x-amz-meta-test']) range_value = 'bytes=-3' status, headers, body = self._test_object_HEAD_Range(range_value) self.assertEquals(status.split()[0], '206') self.assertTrue('content-length' in headers) self.assertEqual(headers['content-length'], '3') self.assertTrue('content-range' in headers) self.assertTrue(headers['content-range'].startswith('bytes 2-4')) self.assertTrue('x-amz-meta-test' in headers) self.assertEqual('swift', headers['x-amz-meta-test']) @s3acl def test_object_GET_error(self): code = self._test_method_error('GET', '/bucket/object', swob.HTTPUnauthorized) self.assertEquals(code, 'SignatureDoesNotMatch') code = self._test_method_error('GET', '/bucket/object', swob.HTTPForbidden) self.assertEquals(code, 'AccessDenied') code = self._test_method_error('GET', '/bucket/object', swob.HTTPNotFound) self.assertEquals(code, 'NoSuchKey') code = self._test_method_error('GET', '/bucket/object', swob.HTTPServerError) self.assertEquals(code, 'InternalError') code = self._test_method_error('GET', '/bucket/object', swob.HTTPPreconditionFailed) self.assertEquals(code, 'PreconditionFailed') code = self._test_method_error('GET', '/bucket/object', swob.HTTPServiceUnavailable) self.assertEquals(code, 'InternalError') @s3acl def test_object_GET(self): self._test_object_GETorHEAD('GET') @s3acl(s3acl_only=True) def test_object_GET_with_s3acl_and_keystone(self): # for passing keystone authentication root fake_auth = self.swift._fake_auth_middleware with patch.object(FakeSwift, '_fake_auth_middleware', _wrap_fake_auth_middleware(fake_auth)): self._test_object_GETorHEAD('GET') _, _, headers = self.swift.calls_with_headers[-1] self.assertTrue('Authorization' not in headers) _, _, headers = self.swift.calls_with_headers[0] self.assertTrue('Authorization' not in headers) @s3acl def test_object_GET_Range(self): req = Request.blank('/bucket/object', environ={'REQUEST_METHOD': 'GET'}, headers={'Authorization': 'AWS test:tester:hmac', 'Range': 'bytes=0-3'}) status, headers, body = self.call_swift3(req) self.assertEquals(status.split()[0], '206') self.assertTrue('content-range' in headers) self.assertTrue(headers['content-range'].startswith('bytes 0-3')) @s3acl def test_object_GET_Range_error(self): code = self._test_method_error('GET', '/bucket/object', swob.HTTPRequestedRangeNotSatisfiable) self.assertEquals(code, 'InvalidRange') @s3acl def test_object_GET_Response(self): req = Request.blank('/bucket/object', environ={'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'response-content-type=%s&' 'response-content-language=%s&' 'response-expires=%s&' 'response-cache-control=%s&' 'response-content-disposition=%s&' 'response-content-encoding=%s&' % ('text/plain', 'en', 'Fri, 01 Apr 2014 12:00:00 GMT', 'no-cache', 'attachment', 'gzip')}, headers={'Authorization': 'AWS test:tester:hmac'}) status, headers, body = self.call_swift3(req) self.assertEquals(status.split()[0], '200') self.assertTrue('content-type' in headers) self.assertEquals(headers['content-type'], 'text/plain') self.assertTrue('content-language' in headers) self.assertEquals(headers['content-language'], 'en') self.assertTrue('expires' in headers) self.assertEquals(headers['expires'], 'Fri, 01 Apr 2014 12:00:00 GMT') self.assertTrue('cache-control' in headers) self.assertEquals(headers['cache-control'], 'no-cache') self.assertTrue('content-disposition' in headers) self.assertEquals(headers['content-disposition'], 'attachment') self.assertTrue('content-encoding' in headers) self.assertEquals(headers['content-encoding'], 'gzip') @s3acl def test_object_PUT_error(self): code = self._test_method_error('PUT', '/bucket/object', swob.HTTPUnauthorized) self.assertEquals(code, 'SignatureDoesNotMatch') code = self._test_method_error('PUT', '/bucket/object', swob.HTTPForbidden) self.assertEquals(code, 'AccessDenied') code = self._test_method_error('PUT', '/bucket/object', swob.HTTPNotFound) self.assertEquals(code, 'NoSuchBucket') code = self._test_method_error('PUT', '/bucket/object', swob.HTTPRequestEntityTooLarge) self.assertEquals(code, 'EntityTooLarge') code = self._test_method_error('PUT', '/bucket/object', swob.HTTPServerError) self.assertEquals(code, 'InternalError') code = self._test_method_error('PUT', '/bucket/object', swob.HTTPUnprocessableEntity) self.assertEquals(code, 'InvalidDigest') code = self._test_method_error('PUT', '/bucket/object', swob.HTTPLengthRequired) self.assertEquals(code, 'MissingContentLength') code = self._test_method_error('PUT', '/bucket/object', swob.HTTPServiceUnavailable) self.assertEquals(code, 'InternalError') code = self._test_method_error('PUT', '/bucket/object', swob.HTTPCreated, {'X-Amz-Copy-Source': ''}) self.assertEquals(code, 'InvalidArgument') code = self._test_method_error('PUT', '/bucket/object', swob.HTTPCreated, {'X-Amz-Copy-Source': '/'}) self.assertEquals(code, 'InvalidArgument') code = self._test_method_error('PUT', '/bucket/object', swob.HTTPCreated, {'X-Amz-Copy-Source': '/bucket'}) self.assertEquals(code, 'InvalidArgument') code = self._test_method_error('PUT', '/bucket/object', swob.HTTPCreated, {'X-Amz-Copy-Source': '/bucket/'}) self.assertEquals(code, 'InvalidArgument') code = self._test_method_error('PUT', '/bucket/object', swob.HTTPRequestTimeout) self.assertEquals(code, 'RequestTimeout') @s3acl def test_object_PUT(self): etag = self.response_headers['etag'] content_md5 = etag.decode('hex').encode('base64').strip() req = Request.blank( '/bucket/object', environ={'REQUEST_METHOD': 'PUT'}, headers={'Authorization': 'AWS test:tester:hmac', 'x-amz-storage-class': 'STANDARD', 'Content-MD5': content_md5}, body=self.object_body) req.date = datetime.now() req.content_type = 'text/plain' status, headers, body = self.call_swift3(req) self.assertEquals(status.split()[0], '200') # Check that swift3 returns an etag header. self.assertEquals(headers['etag'], '"%s"' % etag) _, _, headers = self.swift.calls_with_headers[-1] # Check that swift3 converts a Content-MD5 header into an etag. self.assertEquals(headers['etag'], etag) def test_object_PUT_headers(self): content_md5 = self.etag.decode('hex').encode('base64').strip() self.swift.register('HEAD', '/v1/AUTH_test/some/source', swob.HTTPOk, {'last-modified': self.last_modified}, None) req = Request.blank( '/bucket/object', environ={'REQUEST_METHOD': 'PUT'}, headers={'Authorization': 'AWS test:tester:hmac', 'X-Amz-Storage-Class': 'STANDARD', 'X-Amz-Meta-Something': 'oh hai', 'X-Amz-Copy-Source': '/some/source', 'Content-MD5': content_md5}) req.date = datetime.now() req.content_type = 'text/plain' status, headers, body = self.call_swift3(req) # Check that swift3 dones not return an etag header, # sepcified copy source. self.assertTrue(headers.get('etag') is None) _, _, headers = self.swift.calls_with_headers[-1] # Check that swift3 converts a Content-MD5 header into an etag. self.assertEquals(headers['ETag'], self.etag) self.assertEquals(headers['X-Object-Meta-Something'], 'oh hai') self.assertEquals(headers['X-Copy-From'], '/some/source') self.assertEquals(headers['Content-Length'], '0') def _test_object_PUT_copy(self, head_resp, put_header={}): account = 'test:tester' grants = [Grant(User(account), 'FULL_CONTROL')] head_headers = \ encode_acl('object', ACL(Owner(account, account), grants)) head_headers.update({'last-modified': self.last_modified}) self.swift.register('HEAD', '/v1/AUTH_test/some/source', head_resp, head_headers, None) put_headers = {'Authorization': 'AWS test:tester:hmac', 'X-Amz-Copy-Source': '/some/source'} put_headers.update(put_header) req = Request.blank('/bucket/object', environ={'REQUEST_METHOD': 'PUT'}, headers=put_headers) req.date = datetime.now() req.content_type = 'text/plain' return self.call_swift3(req) @s3acl def test_object_PUT_copy(self): last_modified = '2014-04-01T12:00:00' status, headers, body = \ self._test_object_PUT_copy(swob.HTTPOk) self.assertEquals(status.split()[0], '200') self.assertEquals(headers['Content-Type'], 'application/xml') self.assertTrue(headers.get('etag') is None) elem = fromstring(body, 'CopyObjectResult') self.assertEquals(elem.find('LastModified').text, last_modified) self.assertEquals(elem.find('ETag').text, '"%s"' % self.etag) _, _, headers = self.swift.calls_with_headers[-1] self.assertEquals(headers['X-Copy-From'], '/some/source') self.assertEquals(headers['Content-Length'], '0') @s3acl def test_object_PUT_copy_headers_error(self): etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1' last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT' header = {'X-Amz-Copy-Source-If-Match': etag} status, header, body = \ self._test_object_PUT_copy(swob.HTTPPreconditionFailed, header) self.assertEquals(self._get_error_code(body), 'PreconditionFailed') header = {'X-Amz-Copy-Source-If-None-Match': etag} status, header, body = \ self._test_object_PUT_copy(swob.HTTPNotModified, header) self.assertEquals(self._get_error_code(body), 'PreconditionFailed') header = {'X-Amz-Copy-Source-If-Modified-Since': last_modified_since} status, header, body = \ self._test_object_PUT_copy(swob.HTTPNotModified, header) self.assertEquals(self._get_error_code(body), 'PreconditionFailed') header = \ {'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since} status, header, body = \ self._test_object_PUT_copy(swob.HTTPPreconditionFailed, header) self.assertEquals(self._get_error_code(body), 'PreconditionFailed') def test_object_PUT_copy_headers_with_match(self): etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1' last_modified_since = 'Fri, 01 Apr 2014 11:00:00 GMT' header = {'X-Amz-Copy-Source-If-Match': etag, 'X-Amz-Copy-Source-If-Modified-Since': last_modified_since} status, header, body = \ self._test_object_PUT_copy(swob.HTTPOk, header) self.assertEquals(status.split()[0], '200') self.assertEquals(len(self.swift.calls_with_headers), 2) _, _, headers = self.swift.calls_with_headers[-1] self.assertTrue(headers.get('If-Match') is None) self.assertTrue(headers.get('If-Modified-Since') is None) _, _, headers = self.swift.calls_with_headers[0] self.assertEquals(headers['If-Match'], etag) self.assertEquals(headers['If-Modified-Since'], last_modified_since) @s3acl(s3acl_only=True) def test_object_PUT_copy_headers_with_match_and_s3acl(self): etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1' last_modified_since = 'Fri, 01 Apr 2014 11:00:00 GMT' header = {'X-Amz-Copy-Source-If-Match': etag, 'X-Amz-Copy-Source-If-Modified-Since': last_modified_since} status, header, body = \ self._test_object_PUT_copy(swob.HTTPOk, header) self.assertEquals(status.split()[0], '200') self.assertEquals(len(self.swift.calls_with_headers), 3) # After the check of the copy source in the case of s3acl is valid, # Swift3 check the bucket write permissions of the destination. _, _, headers = self.swift.calls_with_headers[-2] self.assertTrue(headers.get('If-Match') is None) self.assertTrue(headers.get('If-Modified-Since') is None) _, _, headers = self.swift.calls_with_headers[-1] self.assertTrue(headers.get('If-Match') is None) self.assertTrue(headers.get('If-Modified-Since') is None) _, _, headers = self.swift.calls_with_headers[0] self.assertEquals(headers['If-Match'], etag) self.assertEquals(headers['If-Modified-Since'], last_modified_since) def test_object_PUT_copy_headers_with_not_match(self): etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1' last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT' header = {'X-Amz-Copy-Source-If-None-Match': etag, 'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since} status, header, body = \ self._test_object_PUT_copy(swob.HTTPOk, header) self.assertEquals(status.split()[0], '200') self.assertEquals(len(self.swift.calls_with_headers), 2) _, _, headers = self.swift.calls_with_headers[-1] self.assertTrue(headers.get('If-None-Match') is None) self.assertTrue(headers.get('If-Unmodified-Since') is None) _, _, headers = self.swift.calls_with_headers[0] self.assertEquals(headers['If-None-Match'], etag) self.assertEquals(headers['If-Unmodified-Since'], last_modified_since) @s3acl(s3acl_only=True) def test_object_PUT_copy_headers_with_not_match_and_s3acl(self): etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1' last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT' header = {'X-Amz-Copy-Source-If-None-Match': etag, 'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since} status, header, body = \ self._test_object_PUT_copy(swob.HTTPOk, header) self.assertEquals(status.split()[0], '200') # After the check of the copy source in the case of s3acl is valid, # Swift3 check the bucket write permissions of the destination. self.assertEquals(len(self.swift.calls_with_headers), 3) _, _, headers = self.swift.calls_with_headers[-1] self.assertTrue(headers.get('If-None-Match') is None) self.assertTrue(headers.get('If-Unmodified-Since') is None) _, _, headers = self.swift.calls_with_headers[0] self.assertEquals(headers['If-None-Match'], etag) self.assertEquals(headers['If-Unmodified-Since'], last_modified_since) @s3acl def test_object_POST_error(self): code = self._test_method_error('POST', '/bucket/object', None) self.assertEquals(code, 'NotImplemented') @s3acl def test_object_DELETE_error(self): code = self._test_method_error('DELETE', '/bucket/object', swob.HTTPUnauthorized) self.assertEquals(code, 'SignatureDoesNotMatch') code = self._test_method_error('DELETE', '/bucket/object', swob.HTTPForbidden) self.assertEquals(code, 'AccessDenied') code = self._test_method_error('DELETE', '/bucket/object', swob.HTTPServerError) self.assertEquals(code, 'InternalError') code = self._test_method_error('DELETE', '/bucket/object', swob.HTTPServiceUnavailable) self.assertEquals(code, 'InternalError') with patch('swift3.request.get_container_info', return_value={'status': 204}): code = self._test_method_error('DELETE', '/bucket/object', swob.HTTPNotFound) self.assertEquals(code, 'NoSuchKey') with patch('swift3.request.get_container_info', return_value={'status': 404}): code = self._test_method_error('DELETE', '/bucket/object', swob.HTTPNotFound) self.assertEquals(code, 'NoSuchBucket') @s3acl def test_object_DELETE(self): req = Request.blank('/bucket/object', environ={'REQUEST_METHOD': 'DELETE'}, headers={'Authorization': 'AWS test:tester:hmac'}) status, headers, body = self.call_swift3(req) self.assertEquals(status.split()[0], '204') def _test_object_for_s3acl(self, method, account): req = Request.blank('/bucket/object', environ={'REQUEST_METHOD': method}, headers={'Authorization': 'AWS %s:hmac' % account}) return self.call_swift3(req) def _test_set_container_permission(self, account, permission): grants = [Grant(User(account), permission)] headers = \ encode_acl('container', ACL(Owner('test:tester', 'test:tester'), grants)) self.swift.register('HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent, headers, None) @s3acl(s3acl_only=True) def test_object_GET_without_permission(self): status, headers, body = self._test_object_for_s3acl('GET', 'test:other') self.assertEquals(self._get_error_code(body), 'AccessDenied') @s3acl(s3acl_only=True) def test_object_GET_with_read_permission(self): status, headers, body = self._test_object_for_s3acl('GET', 'test:read') self.assertEquals(status.split()[0], '200') @s3acl(s3acl_only=True) def test_object_GET_with_fullcontrol_permission(self): status, headers, body = \ self._test_object_for_s3acl('GET', 'test:full_control') self.assertEquals(status.split()[0], '200') @s3acl(s3acl_only=True) def test_object_PUT_without_permission(self): status, headers, body = self._test_object_for_s3acl('PUT', 'test:other') self.assertEquals(self._get_error_code(body), 'AccessDenied') @s3acl(s3acl_only=True) def test_object_PUT_with_owner_permission(self): status, headers, body = self._test_object_for_s3acl('PUT', 'test:tester') self.assertEquals(status.split()[0], '200') @s3acl(s3acl_only=True) def test_object_PUT_with_write_permission(self): account = 'test:other' self._test_set_container_permission(account, 'WRITE') status, headers, body = self._test_object_for_s3acl('PUT', account) self.assertEquals(status.split()[0], '200') @s3acl(s3acl_only=True) def test_object_PUT_with_fullcontrol_permission(self): account = 'test:other' self._test_set_container_permission(account, 'FULL_CONTROL') status, headers, body = \ self._test_object_for_s3acl('PUT', account) self.assertEquals(status.split()[0], '200') @s3acl(s3acl_only=True) def test_object_DELETE_without_permission(self): account = 'test:other' status, headers, body = self._test_object_for_s3acl('DELETE', account) self.assertEquals(self._get_error_code(body), 'AccessDenied') @s3acl(s3acl_only=True) def test_object_DELETE_with_owner_permission(self): status, headers, body = self._test_object_for_s3acl('DELETE', 'test:tester') self.assertEquals(status.split()[0], '204') @s3acl(s3acl_only=True) def test_object_DELETE_with_write_permission(self): account = 'test:other' self._test_set_container_permission(account, 'WRITE') status, headers, body = self._test_object_for_s3acl('DELETE', account) self.assertEquals(status.split()[0], '204') @s3acl(s3acl_only=True) def test_object_DELETE_with_fullcontrol_permission(self): account = 'test:other' self._test_set_container_permission(account, 'FULL_CONTROL') status, headers, body = self._test_object_for_s3acl('DELETE', account) self.assertEquals(status.split()[0], '204') def _test_object_copy_for_s3acl(self, account, src_permission=None, src_path='/src_bucket/src_obj'): owner = 'test:tester' grants = [Grant(User(account), src_permission)] \ if src_permission else [Grant(User(owner), 'FULL_CONTROL')] src_o_headers = \ encode_acl('object', ACL(Owner(owner, owner), grants)) src_o_headers.update({'last-modified': self.last_modified}) self.swift.register( 'HEAD', join('/v1/AUTH_test', src_path.lstrip('/')), swob.HTTPOk, src_o_headers, None) req = Request.blank( '/bucket/object', environ={'REQUEST_METHOD': 'PUT'}, headers={'Authorization': 'AWS %s:hmac' % account, 'X-Amz-Copy-Source': src_path}) return self.call_swift3(req) @s3acl(s3acl_only=True) def test_object_PUT_copy_with_owner_permission(self): status, headers, body = \ self._test_object_copy_for_s3acl('test:tester') self.assertEquals(status.split()[0], '200') @s3acl(s3acl_only=True) def test_object_PUT_copy_with_fullcontrol_permission(self): status, headers, body = \ self._test_object_copy_for_s3acl('test:full_control', 'FULL_CONTROL') self.assertEquals(status.split()[0], '200') @s3acl(s3acl_only=True) def test_object_PUT_copy_with_grantee_permission(self): status, headers, body = \ self._test_object_copy_for_s3acl('test:write', 'READ') self.assertEquals(status.split()[0], '200') @s3acl(s3acl_only=True) def test_object_PUT_copy_without_src_obj_permission(self): status, headers, body = \ self._test_object_copy_for_s3acl('test:write') self.assertEquals(status.split()[0], '403') @s3acl(s3acl_only=True) def test_object_PUT_copy_without_dst_container_permission(self): status, headers, body = \ self._test_object_copy_for_s3acl('test:other', 'READ') self.assertEquals(status.split()[0], '403') @s3acl(s3acl_only=True) def test_object_PUT_copy_empty_src_path(self): self.swift.register('PUT', '/v1/AUTH_test/bucket/object', swob.HTTPPreconditionFailed, {}, None) status, headers, body = self._test_object_copy_for_s3acl( 'test:write', 'READ', src_path='') self.assertEquals(status.split()[0], '400') if __name__ == '__main__': unittest.main()
46.275632
79
0.595028
3,784
34,753
5.260307
0.080603
0.086009
0.028737
0.0422
0.797187
0.767194
0.729113
0.71997
0.688621
0.640543
0
0.022228
0.282825
34,753
750
80
46.337333
0.7764
0.036371
0
0.608347
0
0
0.171967
0.036013
0
0
0
0
0.272873
1
0.077047
false
0
0.019262
0
0.109149
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
3596f07e847ed59acf0ef697624d39c30c752391
772
py
Python
zentral/contrib/monolith/migrations/0021_auto_20170409_1145.py
arubdesu/zentral
ac0fe663f6e1c27f9a9f55a7500a87e6ac7d9190
[ "Apache-2.0" ]
634
2015-10-30T00:55:40.000Z
2022-03-31T02:59:00.000Z
zentral/contrib/monolith/migrations/0021_auto_20170409_1145.py
arubdesu/zentral
ac0fe663f6e1c27f9a9f55a7500a87e6ac7d9190
[ "Apache-2.0" ]
145
2015-11-06T00:17:33.000Z
2022-03-16T13:30:31.000Z
zentral/contrib/monolith/migrations/0021_auto_20170409_1145.py
arubdesu/zentral
ac0fe663f6e1c27f9a9f55a7500a87e6ac7d9190
[ "Apache-2.0" ]
103
2015-11-07T07:08:49.000Z
2022-03-18T17:34:36.000Z
# -*- coding: utf-8 -*- # Generated by Django 1.10.7 on 2017-04-09 11:45 from __future__ import unicode_literals from django.db import migrations from zentral.contrib.monolith.conf import monolith_conf from zentral.contrib.monolith.models import ManifestEnrollmentPackage from zentral.contrib.monolith.utils import build_manifest_enrollment_package def create_manifest_enrollment_packages(apps, schema_editor): pass def build_manifest_enrollment_packages(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [ ('monolith', '0020_auto_20170409_1014'), ] operations = [ migrations.RunPython(create_manifest_enrollment_packages), migrations.RunPython(build_manifest_enrollment_packages), ]
26.62069
76
0.778497
91
772
6.318681
0.538462
0.156522
0.18087
0.135652
0.16
0.16
0.16
0
0
0
0
0.05
0.145078
772
28
77
27.571429
0.821212
0.088083
0
0.117647
1
0
0.044223
0.03281
0
0
0
0
0
1
0.117647
false
0.117647
0.294118
0
0.588235
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
4
35a75ee6fcfeccb95a8154465560a25adaddb315
249
py
Python
src/pyuwds3/reasoning/grounding/route_generator.py
uwds3/uwds3
3ec70111d63db0c8d97d9f1e0110b7fe9ad56179
[ "MIT" ]
null
null
null
src/pyuwds3/reasoning/grounding/route_generator.py
uwds3/uwds3
3ec70111d63db0c8d97d9f1e0110b7fe9ad56179
[ "MIT" ]
null
null
null
src/pyuwds3/reasoning/grounding/route_generator.py
uwds3/uwds3
3ec70111d63db0c8d97d9f1e0110b7fe9ad56179
[ "MIT" ]
null
null
null
import cv2 class RouteGenerator(object): """ """ def __init__(self, model, weights, config_file, max_depth=25): self.model = cv2.readNetFromTensorflow(model, weights) def generate(self, depth_image, goal_track): pass
20.75
66
0.674699
29
249
5.517241
0.724138
0.1125
0
0
0
0
0
0
0
0
0
0.020408
0.212851
249
11
67
22.636364
0.795918
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0.166667
0.166667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
35d2d704480c3e93dfeaeb6f2d9fa1a72bf08613
1,358
py
Python
sdk/python/pulumi_azure_native/kubernetes/__init__.py
polivbr/pulumi-azure-native
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
[ "Apache-2.0" ]
null
null
null
sdk/python/pulumi_azure_native/kubernetes/__init__.py
polivbr/pulumi-azure-native
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
[ "Apache-2.0" ]
null
null
null
sdk/python/pulumi_azure_native/kubernetes/__init__.py
polivbr/pulumi-azure-native
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
[ "Apache-2.0" ]
null
null
null
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** from .. import _utilities import typing # Export this package's modules as members: from ._enums import * from .connected_cluster import * from .get_connected_cluster import * from .list_connected_cluster_user_credential import * from .list_connected_cluster_user_credentials import * from ._inputs import * from . import outputs # Make subpackages available: if typing.TYPE_CHECKING: import pulumi_azure_native.kubernetes.v20200101preview as __v20200101preview v20200101preview = __v20200101preview import pulumi_azure_native.kubernetes.v20210301 as __v20210301 v20210301 = __v20210301 import pulumi_azure_native.kubernetes.v20210401preview as __v20210401preview v20210401preview = __v20210401preview import pulumi_azure_native.kubernetes.v20211001 as __v20211001 v20211001 = __v20211001 else: v20200101preview = _utilities.lazy_import('pulumi_azure_native.kubernetes.v20200101preview') v20210301 = _utilities.lazy_import('pulumi_azure_native.kubernetes.v20210301') v20210401preview = _utilities.lazy_import('pulumi_azure_native.kubernetes.v20210401preview') v20211001 = _utilities.lazy_import('pulumi_azure_native.kubernetes.v20211001')
42.4375
96
0.809278
156
1,358
6.692308
0.384615
0.091954
0.130268
0.176245
0.463602
0.463602
0.176245
0
0
0
0
0.162595
0.12592
1,358
31
97
43.806452
0.716933
0.170103
0
0
1
0
0.155357
0.155357
0
0
0
0
0
1
0
false
0
0.73913
0
0.73913
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
ea0ae9e093d99932442e626381703ba2427b5639
1,687
py
Python
prettyqt/positioning/geopositioninfo.py
phil65/PrettyQt
26327670c46caa039c9bd15cb17a35ef5ad72e6c
[ "MIT" ]
7
2019-05-01T01:34:36.000Z
2022-03-08T02:24:14.000Z
prettyqt/positioning/geopositioninfo.py
phil65/PrettyQt
26327670c46caa039c9bd15cb17a35ef5ad72e6c
[ "MIT" ]
141
2019-04-16T11:22:01.000Z
2021-04-14T15:12:36.000Z
prettyqt/positioning/geopositioninfo.py
phil65/PrettyQt
26327670c46caa039c9bd15cb17a35ef5ad72e6c
[ "MIT" ]
5
2019-04-17T11:48:19.000Z
2021-11-21T10:30:19.000Z
from __future__ import annotations from typing import Literal from prettyqt import core, positioning from prettyqt.qt import QtPositioning from prettyqt.utils import bidict ATTRIBUTE = bidict( direction=QtPositioning.QGeoPositionInfo.Attribute.Direction, ground_speed=QtPositioning.QGeoPositionInfo.Attribute.GroundSpeed, vertical_speed=QtPositioning.QGeoPositionInfo.Attribute.VerticalSpeed, magnetic_variation=QtPositioning.QGeoPositionInfo.Attribute.MagneticVariation, horizontal_accuracy=QtPositioning.QGeoPositionInfo.Attribute.HorizontalAccuracy, vertical_accuracy=QtPositioning.QGeoPositionInfo.Attribute.VerticalAccuracy, ) AttributeStr = Literal[ "direction", "ground_speed", "vertical_speed", "magnetic_variation", "horizontal_accuracy", "vertical_accuracy", ] class GeoPositionInfo(QtPositioning.QGeoPositionInfo): def __repr__(self): return f"{type(self).__name__}({self.get_coordinate()}, {self.get_timestamp()})" def __contains__(self, index: AttributeStr): return self.hasAttribute(ATTRIBUTE[index]) def __getitem__(self, index: AttributeStr) -> float: return self.attribute(ATTRIBUTE[index]) def __setitem__(self, index: AttributeStr, value: float): self.setAttribute(ATTRIBUTE[index], value) def __delitem__(self, index: AttributeStr): return self.removeAttribute(ATTRIBUTE[index]) def __bool__(self): return self.isValid() def get_coordinate(self) -> positioning.GeoCoordinate: return positioning.GeoCoordinate(self.coordinate()) def get_timestamp(self) -> core.DateTime: return core.DateTime(self.timestamp())
31.830189
88
0.757558
162
1,687
7.604938
0.339506
0.164773
0.185065
0.069805
0.050325
0
0
0
0
0
0
0
0.148192
1,687
52
89
32.442308
0.857342
0
0
0
0
0
0.09425
0.040901
0
0
0
0
0
1
0.210526
false
0
0.131579
0.184211
0.552632
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
ea55b3f64e7c9107489b5f4f104d283afa9973e9
159
py
Python
my_wallet/stocks/forms.py
Bounty1993/my-wallet
c14f8efaa1c3b90f9d5b0a6c5b5aabb26ed541fa
[ "MIT" ]
null
null
null
my_wallet/stocks/forms.py
Bounty1993/my-wallet
c14f8efaa1c3b90f9d5b0a6c5b5aabb26ed541fa
[ "MIT" ]
8
2020-06-05T19:52:06.000Z
2022-03-11T23:40:19.000Z
my_wallet/stocks/forms.py
Bounty1993/my-wallet
c14f8efaa1c3b90f9d5b0a6c5b5aabb26ed541fa
[ "MIT" ]
null
null
null
from django import forms from .models import Stocks class NewStockForm(forms.ModelForm): class Meta: model = Stocks fields = ['ticker']
15.9
36
0.666667
18
159
5.888889
0.722222
0
0
0
0
0
0
0
0
0
0
0
0.257862
159
9
37
17.666667
0.898305
0
0
0
0
0
0.037736
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
ea59603c24a4e219a38ee3538ed0b51af8fce820
11,850
py
Python
tests/concepts_test.py
sverrirab/icenews
10a5e13d4dcd5e95f746c4fec9821b4b48fa440e
[ "Apache-2.0" ]
4
2019-04-25T21:09:39.000Z
2020-07-26T08:57:00.000Z
tests/concepts_test.py
sverrirab/icenews
10a5e13d4dcd5e95f746c4fec9821b4b48fa440e
[ "Apache-2.0" ]
1
2019-08-11T00:27:18.000Z
2019-08-12T17:36:42.000Z
tests/concepts_test.py
sverrirab/icenews
10a5e13d4dcd5e95f746c4fec9821b4b48fa440e
[ "Apache-2.0" ]
null
null
null
import unittest from icenews.concepts import Concepts T1 = """Höfða mál vegna vopnasölu til Sádi-Arabíu Mannréttindasamtök á Ítalíu, Þýskalandi og Jemen hafa höfðað mál á hendur ítölskum yfirvöldum og evrópskum vopnaframleiðendum fyrir meinta aðild að loftárásum Sádi-Araba og bandamanna þeirra í Jemen. Þar berjast þeir gegn Houthi uppreisnarmönnum. Þúsundir almennra borgara hefur fallið í árásunum undanfarin þrjú ár. Mannréttindasamtökin European Center for Constitutional and Human Rights (ECCHR) í Berlín, Mwatana Organisation for Human Rights í Jemen og Reta Italiana per il Disarmo á Ítalíu lögðu fram sameiginlega ákæru til saksóknara í Róm í gær. Hún beinist gegn embættismönnum í utanríkisráðuneyti Ítalíu og æðstu stjórnendum útibús þýsku vopnasamsteypunnar Rheinmetall á Ítalíu, RWM Italia. Greint er frá málinu á vef Guardian. Þar segir að tilraunir mannréttindasamtaka í Bretlandi og víðar til fá embættismenn og vopnaframleiðendur sakfellda, hafi ekki borið árangur þar sem ákærurnar hafi náð yfir of breitt svið. Ákæran sem var lögð fram í gær er vegna sprengjuárásar 8. október 2016. Í henni var sex manna fjölskylda drepin, þar af fjögur börn. Forsvarsfólk mannréttindasamtakanna vonar að með því að leggja fram kæru vegna eins ákveðins máls séu meiri líkur á sakfellingu. Sprengjuleifar fundust á vettvangi þar sem árásin var gerð árið 2016. Á þeim er númer sem gefur til kynna að þær hafi verið framleiddar í júní 2014 hjá RWM Itala. Linde Bryn, hollenskur lögfræðingur sem áður starfaði í Kósóvó, segir að málið sé sérstakt þar sem merktar sprengjuleifar hafi fundist á vettvangi. „Þetta mál er táknrænt þar sem það snertir ekki aðeins embættismenn og yfirmenn vopnaframleiðanda á Ítalíu. Það snertir almennt ábyrgð ríkisstjórna í Evrópu og evrópska vopnaframleiðendur og hver þeirra ábyrgð er á afleiðingum útflutnings vopna sem eru notuð af Sádi-Aröbum og bandamönnum þeirra,“ segir hún. Greint var frá því í fréttaskýringaþættinum Kveik í febrúar síðastliðnum að flugvélar á vegum íslenska flugfélagsins Atlanta hafi á undanförnum árum flutt vopn til Sádi-Arabíu. Til þess fengu þau heimild frá Samgöngustofu. Vopnin gætu hafa verið borist til stríðshrjáðra landa eins og Jemens og Sýrlands. Samkvæmt vopnasáttmála Sameinuðu þjóðanna frá árinu 2014 er bannað að afhenda bardagasveitum, sem brjóta gegn stríðsrétti, vopn. Tveir þingmenn Vinstri grænna og þingmenn stjórnarandstöðuflokka óskuðu 11. apríl síðastliðinn eftir skýrslu frá utanríkisráðherra um það hvort íslenska ríkið hafi brotið gegn banni Öryggisráðs Sameinuðu þjóðanna um vopnaflutning.""" T2 = """ Stjórnarslitin í júní 2017 urðu til þess að skólpmál í Mývatnssveit tóku nýja stefnu og nú er gerbreytt og mun ódýrari umbótaáætlun nær tilbúin. Sveitarstjóri Skútustaðahrepps segir að stefnt sé að samningum við ríkisvaldið í næstu viku. Umbótaáætlun í skólpmálum hefur verið í vinnslu hjá Skútustaðahreppi frá því snemma á síðasta ári. Upphaflega var gert ráð fyrir lokuðu kerfi með hreinsistöðvum sem átti að kosta um einn milljarð króna. Ríkisstjórnin sprakk daginn fyrir fund um skólpmálin Þorsteinn Gunnarsson, sveitarstjóri, sagði á Morgunvaktinni á Rás 1 í morgun, að þannig áætlun hafi verið nánast tilbúin í júní 2017. „Við áttum þá í samningaviðræðum við ríkið. Þetta var á fimmtudagskvöldi og við áttum bókaðan fund með umhverfisráðherra og fjármálaráðherra á mánudeginum á eftir. Og við vorum mjög bjartsýn og töldum að það eina sem myndi koma í veg fyrir að við myndum klára samningaviðræður við ríkið, væri ef ríkisstjórnin myndi springa. Og það gerðist þetta sama kvöld." Allt fór á byrjunarreit og ný hugmynd fæddist En þetta var lán í óláni, segir Þorsteinn, því að allt fór á byrjunarreit og ný hugmynd, sem nú er unnið út frá, fæddist. Hún felst í stuttu máli í því að safna saman úrgangi úr salernum í Mývatnssveit og nýta til landgræðslu. „Það sem fer í salernin, sem er svartvatnið, það fer í lokaða tanka. Það sem kemur úr sturtum, vöskum og annað, heldur bara áfram í gegnum rotþróa siturbeð. Síðan kemur tankbíll, dælir svartvatninu úr þessum lokuðu tönkum og fer með það upp á Hólasand þar sem verður reistur safntankur og grófhreinsistöð. Og svartvatnið er þetta næringarríka efni sem verður síðan notað til uppgræðslu á Hólasandi." Vonandi gengið frá samingum við ríkið eftir helgi Heilbrigðiseftirlitið hefur nú samþykkt þessa nýju áætlun og aftur þurfa Mývetningar að treysta á ríkisstjórnina. „Og við erum núna á lokasprettinum í samningaviðræðum við ríkið. Og það verður vonandi gengið frá því ekki seinna en í næstu viku. Þannig að það yrðu ansi stórar fréttir.""" T3 = """ Ef hægt verður að vinna orku úr borholum við Eldvörp á Reykjanesi, gætu verið lagðir allt að 7.000 fermetra stórir borteigar. Nú stenda yfir rannsóknarboranir á vegum HS Orku. Samkvæmt framkvæmdaleyfi til þeirra er heimild til að gera fimm borteiga sem eru 4.200 til 5.700 fermetrar hver. Ekki er víst hvort þeir verði allir lagðir. Í umhverfismatsskýrslu frá VSÓ ráðgjöf frá 2014 segir að áætlað rask á hrauni vegna framkvæmda við rannsóknarboranir sé 23.000 fermetrar verði öll borplönin lögð. Líklegra sé þó að rask verði á um 15.000 fermetrum. Hugmyndir um fleiri en eina holu á borteig Ein hola er boruð á hverjum teig í rannsóknaborunum en ef þær bera góðan árangur og orkuvinnsla hefst gæti svo farið að HS Orka vilji hafa tvær til þrjár borholur á teig. Þeir borteigar verða 3.500 til 7.000 fermetrar, að sögn Ásgeirs Margeirssonar, forstjóra HS Orku. „Stærð fer eftir því hve margar holur eru boraðar frá sama teig. Með því að hafa fleiri en eina holu á borteig, gjarnan tvær til þrjár, er unnt að fækka borteigum. Með því móti minnka umhverfisáhrif við gerð borteiga auk þess sem vegagerð verður minni og gufulagnir einnig ef til orkunýtingar kemur. Verkefnið nú heimilar allt að fimm borteiga og er ráðgert að bora í fyrstu tvær til þrjár rannsóknarholur,“ segir hann. Ekki sé hægt að segja til um það núna hvort gerðir verði fimm borteigar. Boruð sé ein hola í einu og niðurstöður rannsókna á henni ráði því hvert framhaldið verði. HS Orka er með framkvæmdaleyfi frá Grindavíkurbæ til rannsóknaborana og til að gera borteiga sem eru 5.700 fermetrar að stærð. Ef hægt verður að vinna orku úr borholunum og HS Orka vill láta stækka borteigana í allt að 7.000 fermetra þarf að sækja um leyfi til Grindavíkurbæjar og fá deiliskipulagi breytt. Fyrsti borteigurinn er 5.500 fermetrar Fyrsti borteigurinn er tilbúinn fyrir tilraunaborun og er 5.500 fermetrar. Þar er áætlað að bora 2.000 til 2.500 metra djúpa holu. Ætlunin er að bora eina holu til að byrja með, prófa holuna og meta þær niðurstöður. Áætlað er að hefja borun í haust og ljúka henni á sex til átta vikum. Síðan taka við prófanir og rannsóknir fram á vor 2019, samkvæmt upplýsingum frá HS Orku. Áætlað er að meta það næsta sumar hvar heppilegt væri að staðsetja næstu holu. Náttúruverndarsinnar gegn tilraunaborunum Eldvörp eru gígaröð sem myndaðist á 13. öld. Náttúruverndarsinnar hafa lagst gegn raski á hrauni og tilraunaborunum HS Orku á svæðinu. Meðal þeirra sem sendu inn athugasemdir á sínum tíma voru Náttúruverndarsamtök Suðvesturlands. Í umsögn þeirra segir að gildi svæðisins til ferðaþjónustu og útvistar sé gríðarlega mikið. Mikið rask hafi orðið á náttúru Reykjanesskaga vegna jarðhitavinnslu og mál sé að linni. Ekki síst í því ljósi sé mikilvægt að halda Eldvörpum ósnortnum. Landvernd lagðist gegn rannsóknaborunum í Eldvörpum vegna neikvæðra áhrifa framkvæmdarinnar á náttúru og""" T4 = """Telja að álverð gæti farið í 3.000 dali Í byrjun apríl var verðið í kringum 2.000 dali og hafði þá farið lækkandi en hefur nú hækkað upp í tæplega 2.460 hækkunin hefur því numerið tæpum 25% í mánuðinum. Rætist spá Goldman þýðir það hækkunin yrði 50% á mjög skömmum tíma.""" T5 = """Ein hola er boruð á hverjum teig í rannsóknaborunum en ef þær bera góðan árangur og orkuvinnsla hefst gæti svo farið að HS Orka vilji hafa tvær til þrjár borholur á teig. Þeir borteigar verða 3.500 til 7.000 fermetrar, að sögn Ásgeirs Margeirssonar, forstjóra HS Orku.""" T6 = """" Risavaxin viðskipti Guðmundar Kristjánssonar í Granda Guðmundur Kristjánsson, forstjóri útgerðarfélagsins Brims, hefur keypt 34,1% eignarhlut Kristjáns Loftssonar og Halldórs Teitssonar í HB Granda. Heildarupphæð viðskiptanna nemur tæplega 21,7 milljörðum króna, en markaðsvirði HB Granda við lokun markaða í gær nam 54,7 milljörðum króna.Kristján og Halldór eru báðir stjórnarmenn í HB Granda. Þeir áttu eignarhlutinn í gegnum félögin Vogun hf. og Fiskiveiðahlutafélagið Venus hf. Þeir hafa verið stærstu ... """ T7 = """Stærsta verkefni Stóra Fíkniefnakóngsins er að borga KSÍ fyrir Rússlandsferðina og ganga í ÍSÍ.""" T8 = """Embiid snéri aftur í sigri 76ers Úrslitakeppnin í NBA körfuboltanum vestanhafs eru í fullum gangi, í nótt fóru fram þrír leikir. Philadelphia 76ers vann góðan sigur á útivelli gegn Miami Heat og þá unnu Golden State Warriors og New Orleans Pelicans sína leiki og eru nú einum sigurleik frá því að sópa andstæðingum sínum í sumarfrí. Philadelphia 76ers hafa verið á mikilli siglingu undanfarin misseri en áður en úrslitakeppnin hófst höfðu þeir borið sigur úr býtum í 15 leikjum í röð. Í nótt endurheimti liðið svo einn af sínum betri leikmönnum úr meiðslum, Kamerúnann Joel Embiid, hann lék á als oddi og var stigahæstur í sigri sinna manna með 23 stig. Leiknum lauk með öruggum sigri 76ers 128-108 en stigahæstur hjá Miami Heat var Slóveninn Goran Dragic með 23 stig. 76ers leiða einvígið 2-1 en vinna þarf fjóra leiki til að komast áfram.""" T9 = """Einn hinna grunuðu í Samherjamáli hugðist múta lögreglu. Maður sem vinnur fyrir einn þeirra sex Namibíumanna sem handteknir voru eftir að greint var frá Samherjaskjölunum hefur verið handtekinn, grunaður um að hafa reynt að múta fulltrúa í spillingarlögreglunni. Fréttavefur hins namibíska Informante greindi frá þessu í dag og Paulus Noa, yfirmaður spillingarlögreglunnar staðfestir þetta í samtali við fréttastofu RÚV. Maðurinn upplýsti rannsakandann um að hann væri reiðubúinn til að greiða honum peninga ef hann myndi fjarlægja tiltekin greiðslukort úr sönnunargögnum. Greiðslukort sem lögreglan haldlagði við húsleit heima hjá viðkomandi. Með þessu hefði hann getað nálgast peninga sem núna liggja inni á bankareikningum mannsins. Noa segir við fréttastofu að maðurinn hafi líka viljað að rannsakandinn léti tiltekin skjöl, sem eru á meðal sönnunargagna, hverfa. Informante segir að rannsakandinn hafi átt að fá tvær milljónir namibískra dala fyrir verkið.\nÞegar maðurinn afhenti rannsakandanum peningana var hann handtekinn. Hann sætir nú rannsókn, grunaður um brot á spillingarlögum og jafnframt grunaður um hindra framgang réttvísinnar.\nNoa vill hvorki upplýsa fréttastofu um það hver maðurinn er né heldur fyrir hvern hinna grunuðu í Samherjamálinu hann vinnur fyrir. Noa segir að maðurinn mæti frammi fyrir dómara á morgun og frekari upplýsingar um hann verði ekki gefnar upp fyrr en eftir það. Noa segist ekki gera sér grein fyrir því hvort dómari láti manninn lausan á morgun gegn tryggingu eða hvort hann verði áfram í haldi.\nÞeir sem eru í haldi namibísku lögreglunnar vegna Samherjaskjalanna eru Sacky Shanghala, fyrrverandi dómsmálaráðherra, James Hatuikulipi, fyrrverandi stjórnarformaður namibísku ríkisútgerðarinnar Fishcor, Bernhardt Esau, fyrrverandi sjávarútvegsráðherra, Tamson 'Fitty' Hatuikulipi, tengdasonur Esau sem jafnframt er frændi James Hatuikulipi, Ricardo Gustavo, samstarfsmaður hans og Pius 'Taxa' Mwatelulo, sem einnig tengist James Hatuikulipi fjölskylduböndum.\n""" class TestConcepts(unittest.TestCase): def test_runnable(self): for t in [T1, T2, T3, T4, T5, T6, T7, T8, T9]: c = Concepts() c.extract(t) c.report(threshold=1.0) self.assertTrue(len(c.important())) if __name__ == "__main__": unittest.main()
194.262295
2,015
0.813333
1,852
11,850
5.201944
0.476782
0.005086
0.004982
0.00519
0.065393
0.061657
0.056674
0.045049
0.045049
0.045049
0
0.016978
0.16
11,850
60
2,016
197.5
0.950372
0
0
0
0
0.422222
0.959916
0.014684
0
0
0
0
0.022222
1
0.022222
false
0
0.066667
0
0.111111
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
ea8f0f72d49a5c4730779e0cc1c5d55c796a9166
1,327
py
Python
employee/serializers.py
somkiet073/LeaveManagement
753c6497bb1094deb86809135b25e558d4912700
[ "MIT" ]
null
null
null
employee/serializers.py
somkiet073/LeaveManagement
753c6497bb1094deb86809135b25e558d4912700
[ "MIT" ]
null
null
null
employee/serializers.py
somkiet073/LeaveManagement
753c6497bb1094deb86809135b25e558d4912700
[ "MIT" ]
null
null
null
from django.contrib.auth.models import User, Group from employee.models import Department, Profile, AppoveGroup, Positions, Prename from rest_framework import serializers class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ('url', 'username', 'email', 'first_name', 'last_name', 'groups', 'date_joined', 'is_active') class GroupSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Group fields = ('url', 'name') class AppoveGroupSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = AppoveGroup fields = ('url', 'user', 'create_time') class DepartmentSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Department fields = ('url', 'name') class PositionsSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Positions fields = ('url', 'name', 'is_active') class PrenameSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Prename fields = ('url', 'name', 'is_active') class ProfileSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Profile fields = ('url', 'prename', 'user', 'gender', 'department', 'status', 'position')
34.921053
110
0.696307
120
1,327
7.633333
0.375
0.282751
0.320961
0.351528
0.446507
0.056769
0
0
0
0
0
0
0.191409
1,327
38
111
34.921053
0.853681
0
0
0.354839
0
0
0.127259
0
0
0
0
0
0
1
0
false
0
0.096774
0
0.548387
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
575bf9eb3fb36a2bf1c13f54bae3bd78645b5fcf
271
py
Python
src/syft/lib/python/primitive_interface.py
dnabanita7/PySyft
ce2510e65f5bad382e88806bcde30fa38c3c76c4
[ "Apache-2.0" ]
2
2018-07-23T20:34:10.000Z
2020-08-01T09:09:09.000Z
packages/syft/src/syft/lib/python/primitive_interface.py
Metrix1010/PySyft
6477f64b63dc285059c3766deab3993653cead2e
[ "Apache-2.0" ]
5
2020-09-11T05:47:12.000Z
2020-10-13T08:36:17.000Z
packages/syft/src/syft/lib/python/primitive_interface.py
Metrix1010/PySyft
6477f64b63dc285059c3766deab3993653cead2e
[ "Apache-2.0" ]
1
2020-10-15T06:13:38.000Z
2020-10-15T06:13:38.000Z
# stdlib from typing import Any # syft relative from ...core.common import UID from ...core.common.serde.serializable import Serializable class PyPrimitive(Serializable): def __init__(self) -> None: self._id: UID def upcast(self) -> Any: pass
18.066667
58
0.686347
34
271
5.323529
0.617647
0.088398
0.154696
0
0
0
0
0
0
0
0
0
0.214022
271
14
59
19.357143
0.849765
0.073801
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0.125
0.375
0
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
1
0
1
0
0
4
578688b8a425d751f6d28c6221e25d47bc55c76a
79,584
py
Python
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ipv4_acl_cfg.py
tkamata-test/ydk-py
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ipv4_acl_cfg.py
tkamata-test/ydk-py
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ipv4_acl_cfg.py
tkamata-test/ydk-py
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
""" Cisco_IOS_XR_ipv4_acl_cfg This module contains a collection of YANG definitions for Cisco IOS\-XR ipv4\-acl package configuration. This module contains definitions for the following management objects\: ipv4\-acl\-and\-prefix\-list\: IPv4 ACL configuration data Copyright (c) 2013\-2016 by Cisco Systems, Inc. All rights reserved. """ import re import collections from enum import Enum from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict from ydk.errors import YPYError, YPYModelError class NextHopTypeEnum(Enum): """ NextHopTypeEnum Next\-hop type. .. data:: none_next_hop = 0 None next-hop. .. data:: regular_next_hop = 1 Regular next-hop. .. data:: default_next_hop = 2 Default next-hop. """ none_next_hop = 0 regular_next_hop = 1 default_next_hop = 2 @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['NextHopTypeEnum'] class Ipv4AclAndPrefixList(object): """ IPv4 ACL configuration data .. attribute:: accesses Table of access lists. Entries in this table and the AccessListExistenceTable table must be kept consistent **type**\: :py:class:`Accesses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses>` .. attribute:: log_update Control access lists log updates **type**\: :py:class:`LogUpdate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.LogUpdate>` .. attribute:: prefixes Table of ACL prefix lists. Entries in this table and the PrefixListExistenceTable table must be kept consistent **type**\: :py:class:`Prefixes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Prefixes>` """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.accesses = Ipv4AclAndPrefixList.Accesses() self.accesses.parent = self self.log_update = Ipv4AclAndPrefixList.LogUpdate() self.log_update.parent = self self.prefixes = Ipv4AclAndPrefixList.Prefixes() self.prefixes.parent = self class Accesses(object): """ Table of access lists. Entries in this table and the AccessListExistenceTable table must be kept consistent .. attribute:: access An ACL **type**\: list of :py:class:`Access <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access>` """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.access = YList() self.access.parent = self self.access.name = 'access' class Access(object): """ An ACL .. attribute:: access_list_name <key> Access list name \- 64 characters max **type**\: str .. attribute:: access_list_entries ACL entry table; contains list of ACEs **type**\: :py:class:`AccessListEntries <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries>` """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.access_list_name = None self.access_list_entries = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries() self.access_list_entries.parent = self class AccessListEntries(object): """ ACL entry table; contains list of ACEs .. attribute:: access_list_entry An ACL entry; either a description (remark) or an ACE to match against **type**\: list of :py:class:`AccessListEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry>` """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.access_list_entry = YList() self.access_list_entry.parent = self self.access_list_entry.name = 'access_list_entry' class AccessListEntry(object): """ An ACL entry; either a description (remark) or an ACE to match against .. attribute:: sequence_number <key> Sequence number for this entry **type**\: int **range:** 1..2147483646 .. attribute:: capture Enable capture **type**\: bool .. attribute:: counter_name Counter name **type**\: str .. attribute:: destination_network Destination network settings **type**\: :py:class:`DestinationNetwork <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.DestinationNetwork>` .. attribute:: destination_port Destination port settings **type**\: :py:class:`DestinationPort <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.DestinationPort>` .. attribute:: destination_port_group Destination port object group name **type**\: str **length:** 1..64 .. attribute:: destination_prefix_group IPv4 destination network object group name **type**\: str **length:** 1..64 .. attribute:: dscp DSCP settings **type**\: :py:class:`Dscp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.Dscp>` .. attribute:: fragments Check non\-initial fragments. Item is mutually exclusive with TCP, SCTP, UDP, IGMP and ICMP comparions and with logging **type**\: :py:class:`Empty<ydk.types.Empty>` .. attribute:: grant Whether to forward or drop packets matching the ACE **type**\: :py:class:`Ipv4AclGrantEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclGrantEnumEnum>` .. attribute:: icmp ICMP settings **type**\: :py:class:`Icmp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.Icmp>` .. attribute:: icmp_off To turn off ICMP generation for deny ACEs **type**\: :py:class:`Empty<ydk.types.Empty>` .. attribute:: igmp_message_type IGMP message type to match. Leave unspecified if no message type comparison is to be done **type**\: one of the below types: **type**\: :py:class:`Ipv4AclIgmpNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclIgmpNumberEnum>` ---- **type**\: int **range:** 0..255 ---- .. attribute:: log_option Whether and how to log matches against this entry **type**\: :py:class:`Ipv4AclLoggingEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclLoggingEnumEnum>` .. attribute:: next_hop Next\-hop settings **type**\: :py:class:`NextHop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop>` .. attribute:: packet_length Packet length settings **type**\: :py:class:`PacketLength <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.PacketLength>` .. attribute:: precedence Precedence value to match (if a protocol was specified), leave unspecified if precedence comparion is not to be performed **type**\: one of the below types: **type**\: :py:class:`Ipv4AclPrecedenceNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclPrecedenceNumberEnum>` ---- **type**\: int **range:** 0..7 ---- .. attribute:: protocol Protocol to match **type**\: one of the below types: **type**\: :py:class:`Ipv4AclProtocolNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclProtocolNumberEnum>` ---- **type**\: int **range:** 0..255 ---- .. attribute:: remark Comments or a description for the access list **type**\: str .. attribute:: sequence_str Sequence String for the ace **type**\: str **length:** 1..64 .. attribute:: source_network Source network settings **type**\: :py:class:`SourceNetwork <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.SourceNetwork>` .. attribute:: source_port Source port settings **type**\: :py:class:`SourcePort <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.SourcePort>` .. attribute:: source_port_group Source port object group name **type**\: str **length:** 1..64 .. attribute:: source_prefix_group IPv4 source network object group name **type**\: str **length:** 1..64 .. attribute:: tcp TCP settings **type**\: :py:class:`Tcp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.Tcp>` .. attribute:: time_to_live TTL settings **type**\: :py:class:`TimeToLive <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.TimeToLive>` """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.sequence_number = None self.capture = None self.counter_name = None self.destination_network = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.DestinationNetwork() self.destination_network.parent = self self.destination_port = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.DestinationPort() self.destination_port.parent = self self.destination_port_group = None self.destination_prefix_group = None self.dscp = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.Dscp() self.dscp.parent = self self.fragments = None self.grant = None self.icmp = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.Icmp() self.icmp.parent = self self.icmp_off = None self.igmp_message_type = None self.log_option = None self.next_hop = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop() self.next_hop.parent = self self.packet_length = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.PacketLength() self.packet_length.parent = self self.precedence = None self.protocol = None self.remark = None self.sequence_str = None self.source_network = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.SourceNetwork() self.source_network.parent = self self.source_port = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.SourcePort() self.source_port.parent = self self.source_port_group = None self.source_prefix_group = None self.tcp = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.Tcp() self.tcp.parent = self self.time_to_live = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.TimeToLive() self.time_to_live.parent = self class SourceNetwork(object): """ Source network settings. .. attribute:: source_address Source IPv4 address to match, leave unspecified for any **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? .. attribute:: source_wild_card_bits Wildcard bits to apply to source address (if specified), leave unspecified for no wildcarding **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.source_address = None self.source_wild_card_bits = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:source-network' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.source_address is not None: return True if self.source_wild_card_bits is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.SourceNetwork']['meta_info'] class DestinationNetwork(object): """ Destination network settings. .. attribute:: destination_address Destination IPv4 address to match (if a protocol was specified), leave unspecified for any **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? .. attribute:: destination_wild_card_bits Wildcard bits to apply to destination address (if specified), leave unspecified for no wildcarding **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.destination_address = None self.destination_wild_card_bits = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:destination-network' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.destination_address is not None: return True if self.destination_wild_card_bits is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.DestinationNetwork']['meta_info'] class SourcePort(object): """ Source port settings. .. attribute:: first_source_port First source port for comparison, leave unspecified if source port comparison is not to be performed **type**\: one of the below types: **type**\: :py:class:`Ipv4AclPortNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclPortNumberEnum>` ---- **type**\: int **range:** 0..65535 ---- .. attribute:: second_source_port Second source port for comparion, leave unspecified if source port comparison is not to be performed **type**\: one of the below types: **type**\: :py:class:`Ipv4AclPortNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclPortNumberEnum>` ---- **type**\: int **range:** 0..65535 ---- .. attribute:: source_operator Source comparison operator . Leave unspecified if no source port comparison is to be done **type**\: :py:class:`Ipv4AclOperatorEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclOperatorEnumEnum>` """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.first_source_port = None self.second_source_port = None self.source_operator = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:source-port' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.first_source_port is not None: return True if self.second_source_port is not None: return True if self.source_operator is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.SourcePort']['meta_info'] class DestinationPort(object): """ Destination port settings. .. attribute:: destination_operator Destination comparison operator. Leave unspecified if no destination port comparison is to be done **type**\: :py:class:`Ipv4AclOperatorEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclOperatorEnumEnum>` .. attribute:: first_destination_port First destination port for comparison, leave unspecified if destination port comparison is not to be performed **type**\: one of the below types: **type**\: :py:class:`Ipv4AclPortNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclPortNumberEnum>` ---- **type**\: int **range:** 0..65535 ---- .. attribute:: second_destination_port Second destination port for comparion, leave unspecified if destination port comparison is not to be performed **type**\: one of the below types: **type**\: :py:class:`Ipv4AclPortNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclPortNumberEnum>` ---- **type**\: int **range:** 0..65535 ---- """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.destination_operator = None self.first_destination_port = None self.second_destination_port = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:destination-port' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.destination_operator is not None: return True if self.first_destination_port is not None: return True if self.second_destination_port is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.DestinationPort']['meta_info'] class Icmp(object): """ ICMP settings. .. attribute:: icmp_type_code Well known ICMP message code types to match, leave unspecified if ICMP message code type comparion is not to be performed **type**\: :py:class:`Ipv4AclIcmpTypeCodeEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclIcmpTypeCodeEnumEnum>` """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.icmp_type_code = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:icmp' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.icmp_type_code is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.Icmp']['meta_info'] class Tcp(object): """ TCP settings. .. attribute:: tcp_bits TCP bits to match. Leave unspecified if comparison of TCP bits is not required **type**\: one of the below types: **type**\: :py:class:`Ipv4AclTcpBitsNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclTcpBitsNumberEnum>` ---- **type**\: int **range:** 0..32 ---- .. attribute:: tcp_bits_mask TCP bits mask to use for flexible TCP matching. Leave unspecified if tcp\-bits\-match\-operator is unspecified **type**\: one of the below types: **type**\: :py:class:`Ipv4AclTcpBitsNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclTcpBitsNumberEnum>` ---- **type**\: int **range:** 0..32 ---- .. attribute:: tcp_bits_match_operator TCP Bits match operator. Leave unspecified if flexible comparison of TCP bits is not required **type**\: :py:class:`Ipv4AclTcpMatchOperatorEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclTcpMatchOperatorEnumEnum>` """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.tcp_bits = None self.tcp_bits_mask = None self.tcp_bits_match_operator = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:tcp' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.tcp_bits is not None: return True if self.tcp_bits_mask is not None: return True if self.tcp_bits_match_operator is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.Tcp']['meta_info'] class PacketLength(object): """ Packet length settings. .. attribute:: packet_length_max Maximum packet length for comparion, leave unspecified if packet length comparison is not to be performed or if only the minimum packet length should be considered **type**\: int **range:** 0..65535 .. attribute:: packet_length_min Minimum packet length for comparison, leave unspecified if packet length comparison is not to be performed or if only the maximum packet length should be considered **type**\: int **range:** 0..65535 .. attribute:: packet_length_operator Packet length operator applicable if Packet length is to be compared. Leave unspecified if no packet length comparison is to be done **type**\: :py:class:`Ipv4AclOperatorEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclOperatorEnumEnum>` """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.packet_length_max = None self.packet_length_min = None self.packet_length_operator = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:packet-length' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.packet_length_max is not None: return True if self.packet_length_min is not None: return True if self.packet_length_operator is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.PacketLength']['meta_info'] class TimeToLive(object): """ TTL settings. .. attribute:: time_to_live_max Maximum TTL for comparion, leave unspecified if TTL comparison is not to be performed or if only the minimum TTL should be considered **type**\: int **range:** 0..255 .. attribute:: time_to_live_min TTL value for comparison OR Minimum TTL value for TTL range comparision, leave unspecified if TTL classification is not required **type**\: int **range:** 0..255 .. attribute:: time_to_live_operator TTL operator is applicable if TTL is to be compared. Leave unspecified if TTL classification is not required **type**\: :py:class:`Ipv4AclOperatorEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclOperatorEnumEnum>` """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.time_to_live_max = None self.time_to_live_min = None self.time_to_live_operator = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:time-to-live' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.time_to_live_max is not None: return True if self.time_to_live_min is not None: return True if self.time_to_live_operator is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.TimeToLive']['meta_info'] class NextHop(object): """ Next\-hop settings. .. attribute:: next_hop_1 The first next\-hop settings **type**\: :py:class:`NextHop1 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop.NextHop1>` .. attribute:: next_hop_2 The second next\-hop settings **type**\: :py:class:`NextHop2 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop.NextHop2>` .. attribute:: next_hop_3 The third next\-hop settings **type**\: :py:class:`NextHop3 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop.NextHop3>` .. attribute:: next_hop_type The nexthop type **type**\: :py:class:`NextHopTypeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.NextHopTypeEnum>` """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.next_hop_1 = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop.NextHop1() self.next_hop_1.parent = self self.next_hop_2 = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop.NextHop2() self.next_hop_2.parent = self self.next_hop_3 = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop.NextHop3() self.next_hop_3.parent = self self.next_hop_type = None class NextHop1(object): """ The first next\-hop settings. .. attribute:: next_hop The IPv4 address of the next\-hop **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? .. attribute:: track_name The object tracking name for the next\-hop **type**\: str .. attribute:: vrf_name The VRF name of the next\-hop **type**\: str """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.next_hop = None self.track_name = None self.vrf_name = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:next-hop-1' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.next_hop is not None: return True if self.track_name is not None: return True if self.vrf_name is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop.NextHop1']['meta_info'] class NextHop2(object): """ The second next\-hop settings. .. attribute:: next_hop The IPv4 address of the next\-hop **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? .. attribute:: track_name The object tracking name for the next\-hop **type**\: str .. attribute:: vrf_name The VRF name of the next\-hop **type**\: str """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.next_hop = None self.track_name = None self.vrf_name = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:next-hop-2' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.next_hop is not None: return True if self.track_name is not None: return True if self.vrf_name is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop.NextHop2']['meta_info'] class NextHop3(object): """ The third next\-hop settings. .. attribute:: next_hop The IPv4 address of the next\-hop **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? .. attribute:: track_name The object tracking name for the next\-hop **type**\: str .. attribute:: vrf_name The VRF name of the next\-hop **type**\: str """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.next_hop = None self.track_name = None self.vrf_name = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:next-hop-3' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.next_hop is not None: return True if self.track_name is not None: return True if self.vrf_name is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop.NextHop3']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:next-hop' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.next_hop_1 is not None and self.next_hop_1._has_data(): return True if self.next_hop_2 is not None and self.next_hop_2._has_data(): return True if self.next_hop_3 is not None and self.next_hop_3._has_data(): return True if self.next_hop_type is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop']['meta_info'] class Dscp(object): """ DSCP settings. .. attribute:: dscp_max Maximum DSCP value for comparion, leave unspecified if DSCP comparison is not to be performed or if only the minimum DSCP should be considered **type**\: one of the below types: **type**\: :py:class:`Ipv4AclDscpNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclDscpNumberEnum>` ---- **type**\: int **range:** 0..63 ---- .. attribute:: dscp_min DSCP value to match or minimum DSCP value for DSCP range comparison, leave unspecified if DSCP comparion is not to be performed **type**\: one of the below types: **type**\: :py:class:`Ipv4AclDscpNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclDscpNumberEnum>` ---- **type**\: int **range:** 0..63 ---- .. attribute:: dscp_operator DSCP operator is applicable only when DSCP range is configured. Leave unspecified if DSCP range is not required **type**\: :py:class:`Ipv4AclOperatorEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclOperatorEnumEnum>` """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.dscp_max = None self.dscp_min = None self.dscp_operator = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:dscp' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.dscp_max is not None: return True if self.dscp_min is not None: return True if self.dscp_operator is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.Dscp']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.sequence_number is None: raise YPYModelError('Key property sequence_number is None') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:access-list-entry[Cisco-IOS-XR-ipv4-acl-cfg:sequence-number = ' + str(self.sequence_number) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.sequence_number is not None: return True if self.capture is not None: return True if self.counter_name is not None: return True if self.destination_network is not None and self.destination_network._has_data(): return True if self.destination_port is not None and self.destination_port._has_data(): return True if self.destination_port_group is not None: return True if self.destination_prefix_group is not None: return True if self.dscp is not None and self.dscp._has_data(): return True if self.fragments is not None: return True if self.grant is not None: return True if self.icmp is not None and self.icmp._has_data(): return True if self.icmp_off is not None: return True if self.igmp_message_type is not None: return True if self.log_option is not None: return True if self.next_hop is not None and self.next_hop._has_data(): return True if self.packet_length is not None and self.packet_length._has_data(): return True if self.precedence is not None: return True if self.protocol is not None: return True if self.remark is not None: return True if self.sequence_str is not None: return True if self.source_network is not None and self.source_network._has_data(): return True if self.source_port is not None and self.source_port._has_data(): return True if self.source_port_group is not None: return True if self.source_prefix_group is not None: return True if self.tcp is not None and self.tcp._has_data(): return True if self.time_to_live is not None and self.time_to_live._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:access-list-entries' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.access_list_entry is not None: for child_ref in self.access_list_entry: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries']['meta_info'] @property def _common_path(self): if self.access_list_name is None: raise YPYModelError('Key property access_list_name is None') return '/Cisco-IOS-XR-ipv4-acl-cfg:ipv4-acl-and-prefix-list/Cisco-IOS-XR-ipv4-acl-cfg:accesses/Cisco-IOS-XR-ipv4-acl-cfg:access[Cisco-IOS-XR-ipv4-acl-cfg:access-list-name = ' + str(self.access_list_name) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.access_list_name is not None: return True if self.access_list_entries is not None and self.access_list_entries._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access']['meta_info'] @property def _common_path(self): return '/Cisco-IOS-XR-ipv4-acl-cfg:ipv4-acl-and-prefix-list/Cisco-IOS-XR-ipv4-acl-cfg:accesses' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.access is not None: for child_ref in self.access: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Accesses']['meta_info'] class Prefixes(object): """ Table of ACL prefix lists. Entries in this table and the PrefixListExistenceTable table must be kept consistent .. attribute:: prefix Name of a prefix list **type**\: list of :py:class:`Prefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Prefixes.Prefix>` """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.prefix = YList() self.prefix.parent = self self.prefix.name = 'prefix' class Prefix(object): """ Name of a prefix list .. attribute:: prefix_list_name <key> Prefix list name \- max 32 characters **type**\: str .. attribute:: prefix_list_entries Sequence of entries forming a prefix list **type**\: :py:class:`PrefixListEntries <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Prefixes.Prefix.PrefixListEntries>` **presence node**\: True """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.prefix_list_name = None self.prefix_list_entries = None class PrefixListEntries(object): """ Sequence of entries forming a prefix list .. attribute:: prefix_list_entry A prefix list entry; either a description (remark) or a prefix to match against **type**\: list of :py:class:`PrefixListEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Prefixes.Prefix.PrefixListEntries.PrefixListEntry>` .. attribute:: _is_presence Is present if this instance represents presence container else not **type**\: bool This class is a :ref:`presence class<presence-class>` """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self._is_presence = True self.prefix_list_entry = YList() self.prefix_list_entry.parent = self self.prefix_list_entry.name = 'prefix_list_entry' class PrefixListEntry(object): """ A prefix list entry; either a description (remark) or a prefix to match against .. attribute:: sequence_number <key> Sequence number of prefix list **type**\: int **range:** 1..2147483646 .. attribute:: exact_prefix_length If exact prefix length matching specified, set the length of prefix to be matched **type**\: int **range:** 0..32 .. attribute:: grant Whether to forward or drop packets matching the prefix list **type**\: :py:class:`Ipv4AclGrantEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclGrantEnumEnum>` .. attribute:: match_exact_length Set to perform an exact prefix length match. Item is mutually exclusive with minimum and maximum length match items **type**\: :py:class:`Empty<ydk.types.Empty>` .. attribute:: match_max_length Set to perform a maximum length prefix match . Item is mutually exclusive with exact length match item **type**\: :py:class:`Empty<ydk.types.Empty>` .. attribute:: match_min_length Set to perform a minimum length prefix match . Item is mutually exclusive with exact length match item **type**\: :py:class:`Empty<ydk.types.Empty>` .. attribute:: max_prefix_length If maximum length prefix matching specified, set the maximum length of prefix to be matched **type**\: int **range:** 0..32 .. attribute:: min_prefix_length If minimum length prefix matching specified, set the minimum length of prefix to be matched **type**\: int **range:** 0..32 .. attribute:: netmask Mask of IPv4 address prefix **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? .. attribute:: prefix IPv4 address prefix to match **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? .. attribute:: remark Comments or a description for the prefix list. Item is mutually exclusive with all others in the object **type**\: str """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.sequence_number = None self.exact_prefix_length = None self.grant = None self.match_exact_length = None self.match_max_length = None self.match_min_length = None self.max_prefix_length = None self.min_prefix_length = None self.netmask = None self.prefix = None self.remark = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.sequence_number is None: raise YPYModelError('Key property sequence_number is None') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:prefix-list-entry[Cisco-IOS-XR-ipv4-acl-cfg:sequence-number = ' + str(self.sequence_number) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.sequence_number is not None: return True if self.exact_prefix_length is not None: return True if self.grant is not None: return True if self.match_exact_length is not None: return True if self.match_max_length is not None: return True if self.match_min_length is not None: return True if self.max_prefix_length is not None: return True if self.min_prefix_length is not None: return True if self.netmask is not None: return True if self.prefix is not None: return True if self.remark is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Prefixes.Prefix.PrefixListEntries.PrefixListEntry']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:prefix-list-entries' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self._is_presence: return True if self.prefix_list_entry is not None: for child_ref in self.prefix_list_entry: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Prefixes.Prefix.PrefixListEntries']['meta_info'] @property def _common_path(self): if self.prefix_list_name is None: raise YPYModelError('Key property prefix_list_name is None') return '/Cisco-IOS-XR-ipv4-acl-cfg:ipv4-acl-and-prefix-list/Cisco-IOS-XR-ipv4-acl-cfg:prefixes/Cisco-IOS-XR-ipv4-acl-cfg:prefix[Cisco-IOS-XR-ipv4-acl-cfg:prefix-list-name = ' + str(self.prefix_list_name) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.prefix_list_name is not None: return True if self.prefix_list_entries is not None and self.prefix_list_entries._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Prefixes.Prefix']['meta_info'] @property def _common_path(self): return '/Cisco-IOS-XR-ipv4-acl-cfg:ipv4-acl-and-prefix-list/Cisco-IOS-XR-ipv4-acl-cfg:prefixes' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.prefix is not None: for child_ref in self.prefix: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.Prefixes']['meta_info'] class LogUpdate(object): """ Control access lists log updates .. attribute:: rate Log update rate (log msgs per second) **type**\: int **range:** 1..1000 .. attribute:: threshold Log update threshold (number of hits) **type**\: int **range:** 1..2147483647 """ _prefix = 'ipv4-acl-cfg' _revision = '2015-11-09' def __init__(self): self.parent = None self.rate = None self.threshold = None @property def _common_path(self): return '/Cisco-IOS-XR-ipv4-acl-cfg:ipv4-acl-and-prefix-list/Cisco-IOS-XR-ipv4-acl-cfg:log-update' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.rate is not None: return True if self.threshold is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList.LogUpdate']['meta_info'] @property def _common_path(self): return '/Cisco-IOS-XR-ipv4-acl-cfg:ipv4-acl-and-prefix-list' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return True def _has_data(self): if not self.is_config(): return False if self.accesses is not None and self.accesses._has_data(): return True if self.log_update is not None and self.log_update._has_data(): return True if self.prefixes is not None and self.prefixes._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta return meta._meta_table['Ipv4AclAndPrefixList']['meta_info']
42.581059
225
0.444775
7,097
79,584
4.786389
0.041285
0.040508
0.050634
0.042863
0.800112
0.766728
0.711354
0.681474
0.651005
0.616091
0
0.023866
0.481931
79,584
1,868
226
42.603854
0.800024
0.284856
0
0.666667
0
0.010499
0.103669
0.066188
0
0
0
0
0
1
0.152231
false
0
0.038058
0.005249
0.494751
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
5787f5ee52f63c6cfd74ca994ff98ce017ffcc47
458
py
Python
dong_mnist_example/config/default.py
libgirlenterprise/dong_mnist_example
fdcf2fcc457e717eeaddb631f071790eb1f39607
[ "Apache-2.0" ]
1
2019-06-26T02:26:53.000Z
2019-06-26T02:26:53.000Z
dong_mnist_example/config/default.py
libgirlenterprise/dong_mnist_example
fdcf2fcc457e717eeaddb631f071790eb1f39607
[ "Apache-2.0" ]
4
2020-01-28T22:47:24.000Z
2022-02-10T00:10:45.000Z
dong_mnist_example/config/default.py
libgirlenterprise/dong_mnist_example
fdcf2fcc457e717eeaddb631f071790eb1f39607
[ "Apache-2.0" ]
1
2019-08-20T17:04:05.000Z
2019-08-20T17:04:05.000Z
from collections import OrderedDict def get_config(): return OrderedDict([("self.compile", (":optimizer", "adam", ":loss", "sparse_categorical_crossentropy", ":metrics", [ "accuracy" ])), ("self.fit", (":epochs", 5))])
41.636364
76
0.327511
23
458
6.391304
0.913043
0
0
0
0
0
0
0
0
0
0
0.005
0.563319
458
10
77
45.8
0.73
0
0
0
0
0
0.203057
0.067686
0
0
0
0
0
1
0.111111
true
0
0.111111
0.111111
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
1
0
0
0
4
5798d77a2c9296ac2da8209cdf69ce85351f3796
547
py
Python
deepleaps/workspace/src/dataloader/TensorTypes.py
Longseabear/deep-leaps-pytorch
abcb87f3079c0612bde4a4f94c75d7c05d5aee3a
[ "MIT" ]
1
2021-02-27T18:00:39.000Z
2021-02-27T18:00:39.000Z
deepleaps/workspace/src/dataloader/TensorTypes.py
Longseabear/deep-leaps-pytorch
abcb87f3079c0612bde4a4f94c75d7c05d5aee3a
[ "MIT" ]
null
null
null
deepleaps/workspace/src/dataloader/TensorTypes.py
Longseabear/deep-leaps-pytorch
abcb87f3079c0612bde4a4f94c75d7c05d5aee3a
[ "MIT" ]
null
null
null
from deepleaps.dataloader.TensorTypes import TensorType import scipy.misc as misc class IMAGE(TensorType): def image_loader(self, path): return misc.imread(path)/255. def image_saver(self, path, data): return misc.imsave(path, data) def getSample(self, sample): return self.image_loader(sample) class GRAYSCALE_IMAGE(IMAGE): def image_loader(self, path): return misc.imread(path) def getSample(self, sample): return self.image_loader(sample) class DISPARITY(TensorType): pass
22.791667
55
0.700183
70
547
5.385714
0.371429
0.116711
0.074271
0.095491
0.509284
0.509284
0.509284
0.509284
0.509284
0.286472
0
0.006912
0.206581
547
23
56
23.782609
0.861751
0
0
0.375
0
0
0
0
0
0
0
0
0
1
0.3125
false
0.0625
0.125
0.3125
0.9375
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
4
57bb4cb5cba175c3f14e276daa1e15c8754d69d7
111
py
Python
python/verkefni1/mod_sum.py
asgeir/old-school-projects
96a502589c627e4556f9ee14fc1dc21ed53ce28a
[ "MIT" ]
null
null
null
python/verkefni1/mod_sum.py
asgeir/old-school-projects
96a502589c627e4556f9ee14fc1dc21ed53ce28a
[ "MIT" ]
null
null
null
python/verkefni1/mod_sum.py
asgeir/old-school-projects
96a502589c627e4556f9ee14fc1dc21ed53ce28a
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 def mod_sum(n): return sum([x for x in range(n) if (x % 3 == 0) or (x % 5 == 0)])
18.5
69
0.531532
24
111
2.416667
0.75
0
0
0
0
0
0
0
0
0
0
0.060241
0.252252
111
5
70
22.2
0.638554
0.189189
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
4
57e460434bef856874045c5e2b88a3941304d478
306
py
Python
board/mk3072/ucube.py
ruoranluomu/AliOS-Things
d0f3431bcacac5b61645e9beb231a0a53be8078b
[ "Apache-2.0" ]
1
2021-06-27T12:40:17.000Z
2021-06-27T12:40:17.000Z
board/mk3072/ucube.py
ruoranluomu/AliOS-Things
d0f3431bcacac5b61645e9beb231a0a53be8078b
[ "Apache-2.0" ]
null
null
null
board/mk3072/ucube.py
ruoranluomu/AliOS-Things
d0f3431bcacac5b61645e9beb231a0a53be8078b
[ "Apache-2.0" ]
5
2020-11-04T04:36:37.000Z
2021-11-10T08:05:49.000Z
linux_only_targets="athostapp blink coapapp helloworld http2app httpapp jsengine_app linkkit_gateway linkkitapp lwm2mapp meshapp modbus_demo mqttapp otaapp tls udata_demo.sensor_cloud_demo udata_demo.sensor_local_demo udata_demo.udata_cloud_demo udata_demo.udata_local_demo udataapp ulocation.baseapp yts"
153
305
0.901961
45
306
5.755556
0.644444
0.173745
0.150579
0.138996
0
0
0
0
0
0
0
0.007042
0.071895
306
1
306
306
0.90493
0
0
0
0
1
0.928105
0.359477
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
1
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
17b3bf75b3cd04da2851578a77cbce24ec64772f
83
py
Python
olmonopolet/vmp_api/__init__.py
joarkm/olmonopolet-api
5fb605f53d6fc87c441ae0f72360c524cb0d3fb7
[ "MIT" ]
2
2020-11-21T13:15:53.000Z
2021-05-18T19:17:41.000Z
olmonopolet/vmp_api/__init__.py
joarkm/olmonopolet-api
5fb605f53d6fc87c441ae0f72360c524cb0d3fb7
[ "MIT" ]
6
2021-03-21T19:24:26.000Z
2021-09-22T19:09:31.000Z
olmonopolet/vmp_api/__init__.py
joarkm/olmonopolet-api
5fb605f53d6fc87c441ae0f72360c524cb0d3fb7
[ "MIT" ]
1
2021-05-20T21:52:10.000Z
2021-05-20T21:52:10.000Z
'''Functions used to retrieve data by use of the API available from Vinmonopolet'''
83
83
0.783133
13
83
5
1
0
0
0
0
0
0
0
0
0
0
0
0.144578
83
1
83
83
0.915493
0.927711
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
17dfaa10b74d1cc6f9abaf48a71149eaf9a64603
45,359
py
Python
code/tests/unit/payloads_for_tests.py
CiscoSecurity/tr-05-serverless-graylog
ea873a5538d6803396936e94d017e157628432dd
[ "MIT" ]
null
null
null
code/tests/unit/payloads_for_tests.py
CiscoSecurity/tr-05-serverless-graylog
ea873a5538d6803396936e94d017e157628432dd
[ "MIT" ]
null
null
null
code/tests/unit/payloads_for_tests.py
CiscoSecurity/tr-05-serverless-graylog
ea873a5538d6803396936e94d017e157628432dd
[ "MIT" ]
null
null
null
EXPECTED_RESPONSE_OF_JWKS_ENDPOINT = { 'keys': [ { 'kty': 'RSA', 'n': 'tSKfSeI0fukRIX38AHlKB1YPpX8PUYN2JdvfM-XjNmLfU1M74N0V' 'mdzIX95sneQGO9kC2xMIE-AIlt52Yf_KgBZggAlS9Y0Vx8DsSL2H' 'vOjguAdXir3vYLvAyyHin_mUisJOqccFKChHKjnk0uXy_38-1r17' '_cYTp76brKpU1I4kM20M__dbvLBWjfzyw9ehufr74aVwr-0xJfsB' 'Vr2oaQFww_XHGz69Q7yHK6DbxYO4w4q2sIfcC4pT8XTPHo4JZ2M7' '33Ea8a7HxtZS563_mhhRZLU5aynQpwaVv2U--CL6EvGt8TlNZOke' 'Rv8wz-Rt8B70jzoRpVK36rR-pHKlXhMGT619v82LneTdsqA25Wi2' 'Ld_c0niuul24A6-aaj2u9SWbxA9LmVtFntvNbRaHXE1SLpLPoIp8' 'uppGF02Nz2v3ld8gCnTTWfq_BQ80Qy8e0coRRABECZrjIMzHEg6M' 'loRDy4na0pRQv61VogqRKDU2r3_VezFPQDb3ciYsZjWBr3HpNOkU' 'jTrvLmFyOE9Q5R_qQGmc6BYtfk5rn7iIfXlkJAZHXhBy-ElBuiBM' '-YSkFM7dH92sSIoZ05V4MP09Xcppx7kdwsJy72Sust9Hnd9B7V35' 'YnVF6W791lVHnenhCJOziRmkH4xLLbPkaST2Ks3IHH7tVltM6NsR' 'k3jNdVM', 'e': 'AQAB', 'alg': 'RS256', 'kid': '02B1174234C29F8EFB69911438F597FF3FFEE6B7', 'use': 'sig' } ] } RESPONSE_OF_JWKS_ENDPOINT_WITH_WRONG_KEY = { 'keys': [ { 'kty': 'RSA', 'n': 'pSKfSeI0fukRIX38AHlKB1YPpX8PUYN2JdvfM-XjNmLfU1M74N0V' 'mdzIX95sneQGO9kC2xMIE-AIlt52Yf_KgBZggAlS9Y0Vx8DsSL2H' 'vOjguAdXir3vYLvAyyHin_mUisJOqccFKChHKjnk0uXy_38-1r17' '_cYTp76brKpU1I4kM20M__dbvLBWjfzyw9ehufr74aVwr-0xJfsB' 'Vr2oaQFww_XHGz69Q7yHK6DbxYO4w4q2sIfcC4pT8XTPHo4JZ2M7' '33Ea8a7HxtZS563_mhhRZLU5aynQpwaVv2U--CL6EvGt8TlNZOke' 'Rv8wz-Rt8B70jzoRpVK36rR-pHKlXhMGT619v82LneTdsqA25Wi2' 'Ld_c0niuul24A6-aaj2u9SWbxA9LmVtFntvNbRaHXE1SLpLPoIp8' 'uppGF02Nz2v3ld8gCnTTWfq_BQ80Qy8e0coRRABECZrjIMzHEg6M' 'loRDy4na0pRQv61VogqRKDU2r3_VezFPQDb3ciYsZjWBr3HpNOkU' 'jTrvLmFyOE9Q5R_qQGmc6BYtfk5rn7iIfXlkJAZHXhBy-ElBuiBM' '-YSkFM7dH92sSIoZ05V4MP09Xcppx7kdwsJy72Sust9Hnd9B7V35' 'YnVF6W791lVHnenhCJOziRmkH4xLLbPkaST2Ks3IHH7tVltM6NsR' 'k3jNdVM', 'e': 'AQAB', 'alg': 'RS256', 'kid': '02B1174234C29F8EFB69911438F597FF3FFEE6B7', 'use': 'sig' } ] } PRIVATE_KEY = """-----BEGIN RSA PRIVATE KEY----- MIIJKwIBAAKCAgEAtSKfSeI0fukRIX38AHlKB1YPpX8PUYN2JdvfM+XjNmLfU1M7 4N0VmdzIX95sneQGO9kC2xMIE+AIlt52Yf/KgBZggAlS9Y0Vx8DsSL2HvOjguAdX ir3vYLvAyyHin/mUisJOqccFKChHKjnk0uXy/38+1r17/cYTp76brKpU1I4kM20M //dbvLBWjfzyw9ehufr74aVwr+0xJfsBVr2oaQFww/XHGz69Q7yHK6DbxYO4w4q2 sIfcC4pT8XTPHo4JZ2M733Ea8a7HxtZS563/mhhRZLU5aynQpwaVv2U++CL6EvGt 8TlNZOkeRv8wz+Rt8B70jzoRpVK36rR+pHKlXhMGT619v82LneTdsqA25Wi2Ld/c 0niuul24A6+aaj2u9SWbxA9LmVtFntvNbRaHXE1SLpLPoIp8uppGF02Nz2v3ld8g CnTTWfq/BQ80Qy8e0coRRABECZrjIMzHEg6MloRDy4na0pRQv61VogqRKDU2r3/V ezFPQDb3ciYsZjWBr3HpNOkUjTrvLmFyOE9Q5R/qQGmc6BYtfk5rn7iIfXlkJAZH XhBy+ElBuiBM+YSkFM7dH92sSIoZ05V4MP09Xcppx7kdwsJy72Sust9Hnd9B7V35 YnVF6W791lVHnenhCJOziRmkH4xLLbPkaST2Ks3IHH7tVltM6NsRk3jNdVMCAwEA AQKCAgEArx+0JXigDHtFZr4pYEPjwMgCBJ2dr8+L8PptB/4g+LoK9MKqR7M4aTO+ PoILPXPyWvZq/meeDakyZLrcdc8ad1ArKF7baDBpeGEbkRA9JfV5HjNq/ea4gyvD MCGou8ZPSQCnkRmr8LFQbJDgnM5Za5AYrwEv2aEh67IrTHq53W83rMioIumCNiG+ 7TQ7egEGiYsQ745GLrECLZhKKRTgt/T+k1cSk1LLJawme5XgJUw+3D9GddJEepvY oL+wZ/gnO2ADyPnPdQ7oc2NPcFMXpmIQf29+/g7FflatfQhkIv+eC6bB51DhdMi1 zyp2hOhzKg6jn74ixVX+Hts2/cMiAPu0NaWmU9n8g7HmXWc4+uSO/fssGjI3DLYK d5xnhrq4a3ZO5oJLeMO9U71+Ykctg23PTHwNAGrsPYdjGcBnJEdtbXa31agI5PAG 6rgGUY3iSoWqHLgBTxrX04TWVvLQi8wbxh7BEF0yasOeZKxdE2IWYg75zGsjluyH lOnpRa5lSf6KZ6thh9eczFHYtS4DvYBcZ9hZW/g87ie28SkBFxxl0brYt9uKNYJv uajVG8kT80AC7Wzg2q7Wmnoww3JNJUbNths5dqKyUSlMFMIB/vOePFHLrA6qDfAn sQHgUb9WHhUrYsH20XKpqR2OjmWU05bV4pSMW/JwG37o+px1yKECggEBANnwx0d7 ksEMvJjeN5plDy3eMLifBI+6SL/o5TXDoFM6rJxF+0UP70uouYJq2dI+DCSA6c/E sn7WAOirY177adKcBV8biwAtmKHnFnCs/kwAZq8lMvQPtNPJ/vq2n40kO48h8fxb eGcmyAqFPZ4YKSxrPA4cdbHIuFSt9WyaUcVFmzdTFHVlRP70EXdmXHt84byWNB4C Heq8zmrNxPNAi65nEkUks7iBQMtuvyV2+aXjDOTBMCd66IhIh2iZq1O7kXUwgh1O H9hCa7oriHyAdgkKdKCWocmbPPENOETgjraA9wRIXwOYTDb1X5hMvi1mCHo8xjMj u4szD03xJVi7WrsCggEBANTEblCkxEyhJqaMZF3U3df2Yr/ZtHqsrTr4lwB/MOKk zmuSrROxheEkKIsxbiV+AxTvtPR1FQrlqbhTJRwy+pw4KPJ7P4fq2R/YBqvXSNBC amTt6l2XdXqnAk3A++cOEZ2lU9ubfgdeN2Ih8rgdn1LWeOSjCWfExmkoU61/Xe6x AMeXKQSlHKSnX9voxuE2xINHeU6ZAKy1kGmrJtEiWnI8b8C4s8fTyDtXJ1Lasys0 iHO2Tz2jUhf4IJwb87Lk7Ize2MrI+oPzVDXlmkbjkB4tYyoiRTj8rk8pwBW/HVv0 02pjOLTa4kz1kQ3lsZ/3As4zfNi7mWEhadmEsAIfYkkCggEBANO39r/Yqj5kUyrm ZXnVxyM2AHq58EJ4I4hbhZ/vRWbVTy4ZRfpXeo4zgNPTXXvCzyT/HyS53vUcjJF7 PfPdpXX2H7m/Fg+8O9S8m64mQHwwv5BSQOecAnzkdJG2q9T/Z+Sqg1w2uAbtQ9QE kFFvA0ClhBfpSeTGK1wICq3QVLOh5SGf0fYhxR8wl284v4svTFRaTpMAV3Pcq2JS N4xgHdH1S2hkOTt6RSnbklGg/PFMWxA3JMKVwiPy4aiZ8DhNtQb1ctFpPcJm9CRN ejAI06IAyD/hVZZ2+oLp5snypHFjY5SDgdoKL7AMOyvHEdEkmAO32ot/oQefOLTt GOzURVUCggEBALSx5iYi6HtT2SlUzeBKaeWBYDgiwf31LGGKwWMwoem5oX0GYmr5 NwQP20brQeohbKiZMwrxbF+G0G60Xi3mtaN6pnvYZAogTymWI4RJH5OO9CCnVYUK nkD+GRzDqqt97UP/Joq5MX08bLiwsBvhPG/zqVQzikdQfFjOYNJV+wY92LWpELLb Lso/Q0/WDyExjA8Z4lH36vTCddTn/91Y2Ytu/FGmCzjICaMrzz+0cLlesgvjZsSo MY4dskQiEQN7G9I/Z8pAiVEKlBf52N4fYUPfs/oShMty/O5KPNG7L0nrUKlnfr9J rStC2l/9FK8P7pgEbiD6obY11FlhMMF8udECggEBAIKhvOFtipD1jqDOpjOoR9sK /lRR5bVVWQfamMDN1AwmjJbVHS8hhtYUM/4sh2p12P6RgoO8fODf1vEcWFh3xxNZ E1pPCPaICD9i5U+NRvPz2vC900HcraLRrUFaRzwhqOOknYJSBrGzW+Cx3YSeaOCg nKyI8B5gw4C0G0iL1dSsz2bR1O4GNOVfT3R6joZEXATFo/Kc2L0YAvApBNUYvY0k bjJ/JfTO5060SsWftf4iw3jrhSn9RwTTYdq/kErGFWvDGJn2MiuhMe2onNfVzIGR mdUxHwi1ulkspAn/fmY7f0hZpskDwcHyZmbKZuk+NU/FJ8IAcmvk9y7m25nSSc8= -----END RSA PRIVATE KEY-----""" EXPECTED_RESPONSE_FROM_GRAYLOG = { "execution": { "done": True, "cancelled": False, "completed_exceptionally": False }, "results": { "60f9b4c461bf9b2a8a999b85": { "query": { "id": "query_id", "timerange": { "type": "relative", "range": 12592000 }, "filter": { "type": "or", "filters": [ { "type": "stream", "filters": None, "id": "000000000000000000000001", "title": None }, { "type": "stream", "filters": None, "id": "60bf6fd4024ad37a05cbb006", "title": None } ] }, "query": { "type": "elasticsearch", "query_string": "\"24.141.154.216\"" }, "search_types": [ { "timerange": None, "query": None, "streams": [], "id": "search_type_id", "name": None, "limit": 101, "offset": 0, "sort": [ { "field": "timestamp", "order": "DESC" } ], "decorators": [], "type": "messages", "filter": None } ] }, "execution_stats": { "duration": 33, "timestamp": "2021-07-20T12:37:42.058Z", "effective_timerange": { "type": "absolute", "from": "2021-02-24T18:51:02.091Z", "to": "2021-07-20T12:37:42.091Z" } }, "search_types": { "60f9b550a09a4d867ecd0169": { "id": "search_type_id", "messages": [ { "highlight_ranges": {}, "message": { "gl2_accounted_message_size": 221, "level": 3, "gl2_remote_ip": "::1", "gl2_remote_port": 43339, "streams": [ "000000000000000000000001" ], "gl2_message_id": "01F7NTA7TRK9Q36QN6Q03PKSJE", "source": "%ASA-3-710003:", "message": "%ASA-3-710003: TCP access denied by ACL from 49.143.32.6/4222 to outside:24.141.154.216/23", "gl2_source_input": "60bf6485024ad37a05cba39c", "facility_num": 20, "gl2_source_node": "80fe6cad-d153-489f-91a8-beee65b2e27c", "_id": "f5999d80-c856-11eb-a871-000c293368b3", "facility": "local4", "timestamp": "2021-06-08T12:42:17.816Z" }, "index": "graylog_0", "decoration_stats": None }, { "highlight_ranges": {}, "message": { "gl2_accounted_message_size": 222, "level": 3, "gl2_remote_ip": "::1", "gl2_remote_port": 49754, "streams": [ "000000000000000000000001" ], "gl2_message_id": "01F7NT94374GXQMJRV7GAH5AKM", "source": "%ASA-3-710003:", "message": "%ASA-3-710003: TCP access denied by ACL from 5.34.129.87/62507 to outside:24.141.154.216/23", "gl2_source_input": "60bf6485024ad37a05cba39c", "facility_num": 20, "gl2_source_node": "80fe6cad-d153-489f-91a8-beee65b2e27c", "_id": "dfc9f770-c856-11eb-a871-000c293368b3", "facility": "local4", "timestamp": "2021-06-08T12:41:41.223Z" }, "index": "graylog_0", "decoration_stats": None }, { "highlight_ranges": {}, "message": { "gl2_accounted_message_size": 225, "level": 3, "gl2_remote_ip": "::1", "gl2_remote_port": 48544, "streams": [ "000000000000000000000001" ], "gl2_message_id": "01F7NT7J8GGPSBNB6RFHH3P31A", "source": "%ASA-3-710003:", "message": "%ASA-3-710003: TCP access denied by ACL from 156.96.156.172/50168 to outside:24.141.154.216/80", "gl2_source_input": "60bf6485024ad37a05cba39c", "facility_num": 20, "gl2_source_node": "80fe6cad-d153-489f-91a8-beee65b2e27c", "_id": "c15f4100-c856-11eb-a871-000c293368b3", "facility": "local4", "timestamp": "2021-06-08T12:40:50.192Z" }, "index": "graylog_0", "decoration_stats": None }, { "highlight_ranges": {}, "message": { "gl2_accounted_message_size": 223, "level": 3, "gl2_remote_ip": "::1", "gl2_remote_port": 47419, "streams": [ "000000000000000000000001" ], "gl2_message_id": "01F7NT4RFEHY8F2Y4VP1RT5T5F", "source": "ASA-3-710003:", "message": "ASA-3-710003: TCP access denied by ACL from 156.96.156.172/50168 to outside:24.141.154.216/80", "gl2_source_input": "60bf6485024ad37a05cba39c", "facility_num": 20, "gl2_source_node": "80fe6cad-d153-489f-91a8-beee65b2e27c", "_id": "8a8cfb90-c856-11eb-a871-000c293368b3", "facility": "local4", "timestamp": "2021-06-08T12:39:18.216Z" }, "index": "graylog_0", "decoration_stats": None } ], "effective_timerange": { "type": "absolute", "from": "2021-02-24T18:51:02.091Z", "to": "2021-07-20T12:37:42.091Z" }, "total_results": 4, "type": "messages" } }, "errors": [], "state": "COMPLETED" } }, "id": "60f6c39681e5cd7cd5e9ad9a", "owner": "admin", "search_id": None } EXPECTED_RESPONSE_FROM_RELAY = { "data": { "sightings": { "count": 4, "docs": [ { "confidence": "High", "count": 1, "data": { "columns": [ { "name": "level", "type": "string" }, { "name": "source", "type": "string" }, { "name": "facility_num", "type": "string" }, { "name": "facility", "type": "string" } ], "rows": [ [ "3", "%ASA-3-710003:", "20", "local4" ] ] }, "description": "```\n%ASA-3-710003: TCP access denied by ACL from 49.143.32.6/4222 to outside:24.141.154.216/23 \n```", "external_ids": [ "01F7NTA7TRK9Q36QN6Q03PKSJE", "f5999d80-c856-11eb-a871-000c293368b3" ], "id": "01F7NTA7TRK9Q36QN6Q03PKSJE", "internal": True, "observables": [{ "type": "ip", "value": "24.141.154.216" }], "observed_time": { "start_time": "2021-06-08T12:42:17.816Z" }, "schema_version": "1.1.6", "short_description": "Node 80fe6cad received a log from ::1 containing the observable", "source": "Graylog", "source_uri": "https://host/messages/graylog_0/f5999d80-c856-11eb-a871-000c293368b3", "title": "Log message received by Graylog in last 30 days contains observable", "type": "sighting" }, { "confidence": "High", "count": 1, "data": { "columns": [ { "name": "level", "type": "string" }, { "name": "source", "type": "string" }, { "name": "facility_num", "type": "string" }, { "name": "facility", "type": "string" } ], "rows": [ [ "3", "%ASA-3-710003:", "20", "local4" ] ] }, "description": "```\n%ASA-3-710003: TCP access denied by ACL from 5.34.129.87/62507 to outside:24.141.154.216/23 \n```", "external_ids": [ "01F7NT94374GXQMJRV7GAH5AKM", "dfc9f770-c856-11eb-a871-000c293368b3" ], "id": "01F7NT94374GXQMJRV7GAH5AKM", "internal": True, "observables": [{ "type": "ip", "value": "24.141.154.216" }], "observed_time": { "start_time": "2021-06-08T12:41:41.223Z" }, "schema_version": "1.1.6", "short_description": "Node 80fe6cad received a log from ::1 containing the observable", "source": "Graylog", "source_uri": "https://host/messages/graylog_0/dfc9f770-c856-11eb-a871-000c293368b3", "title": "Log message received by Graylog in last 30 days contains observable", "type": "sighting" }, { "confidence": "High", "count": 1, "data": { "columns": [ { "name": "level", "type": "string" }, { "name": "source", "type": "string" }, { "name": "facility_num", "type": "string" }, { "name": "facility", "type": "string" } ], "rows": [ [ "3", "%ASA-3-710003:", "20", "local4" ] ] }, "description": "```\n%ASA-3-710003: TCP access denied by ACL from 156.96.156.172/50168 to outside:24.141.154.216/80 \n```", "external_ids": [ "01F7NT7J8GGPSBNB6RFHH3P31A", "c15f4100-c856-11eb-a871-000c293368b3" ], "id": "01F7NT7J8GGPSBNB6RFHH3P31A", "internal": True, "observables": [{ "type": "ip", "value": "24.141.154.216" }], "observed_time": { "start_time": "2021-06-08T12:40:50.192Z" }, "schema_version": "1.1.6", "short_description": "Node 80fe6cad received a log from ::1 containing the observable", "source": "Graylog", "source_uri": "https://host/messages/graylog_0/c15f4100-c856-11eb-a871-000c293368b3", "title": "Log message received by Graylog in last 30 days contains observable", "type": "sighting" }, { "confidence": "High", "count": 1, "data": { "columns": [ { "name": "level", "type": "string" }, { "name": "source", "type": "string" }, { "name": "facility_num", "type": "string" }, { "name": "facility", "type": "string" } ], "rows": [ [ "3", "ASA-3-710003:", "20", "local4" ] ] }, "description": "```\nASA-3-710003: TCP access denied by ACL from 156.96.156.172/50168 to outside:24.141.154.216/80 \n```", "external_ids": [ "01F7NT4RFEHY8F2Y4VP1RT5T5F", "8a8cfb90-c856-11eb-a871-000c293368b3" ], "id": "01F7NT4RFEHY8F2Y4VP1RT5T5F", "internal": True, "observables": [{ "type": "ip", "value": "24.141.154.216" }], "observed_time": { "start_time": "2021-06-08T12:39:18.216Z" }, "schema_version": "1.1.6", "short_description": "Node 80fe6cad received a log from ::1 containing the observable", "source": "Graylog", "source_uri": "https://host/messages/graylog_0/8a8cfb90-c856-11eb-a871-000c293368b3", "title": "Log message received by Graylog in last 30 days contains observable", "type": "sighting" } ] } } } EXPECTED_RESPONSE_FROM_RELAY_MORE_MESSAGES_AVAILABLE = { 'data': { 'sightings': { 'count': 4, 'docs': [{ 'confidence': 'High', 'count': 1, 'data': { 'columns': [{'name': 'level', 'type': 'string'}, {'name': 'source', 'type': 'string'}, { 'name': 'facility_num', 'type': 'string'}, { 'name': 'facility', 'type': 'string'}], 'rows': [ ['3', '%ASA-3-710003:', '20', 'local4']]}, 'description': '```\n%ASA-3-710003: TCP access denied by ACL from 49.143.32.6/4222 to outside:24.141.154.216/23 \n```', 'external_ids': [ '01F7NTA7TRK9Q36QN6Q03PKSJE', 'f5999d80-c856-11eb-a871-000c293368b3'], 'id': '01F7NTA7TRK9Q36QN6Q03PKSJE', 'internal': True, 'observables': [{'type': 'ip', 'value': '24.141.154.216'}], 'observed_time': { 'start_time': '2021-06-08T12:42:17.816Z'}, 'schema_version': '1.1.6', 'short_description': 'Node 80fe6cad received a log from ::1 containing the observable', 'source': 'Graylog', 'source_uri': 'https://host/messages/graylog_0/f5999d80-c856-11eb-a871-000c293368b3', 'title': 'Log message received by Graylog in last 30 days contains observable', 'type': 'sighting'}, {'confidence': 'High', 'count': 1, 'data': {'columns': [{'name': 'level', 'type': 'string'}, {'name': 'source', 'type': 'string'}, { 'name': 'facility_num', 'type': 'string'}, { 'name': 'facility', 'type': 'string'}], 'rows': [ ['3', '%ASA-3-710003:', '20', 'local4']]}, 'description': '```\n%ASA-3-710003: TCP access denied by ACL from 5.34.129.87/62507 to outside:24.141.154.216/23 \n```', 'external_ids': [ '01F7NT94374GXQMJRV7GAH5AKM', 'dfc9f770-c856-11eb-a871-000c293368b3'], 'id': '01F7NT94374GXQMJRV7GAH5AKM', 'internal': True, 'observables': [{'type': 'ip', 'value': '24.141.154.216'}], 'observed_time': { 'start_time': '2021-06-08T12:41:41.223Z'}, 'schema_version': '1.1.6', 'short_description': 'Node 80fe6cad received a log from ::1 containing the observable', 'source': 'Graylog', 'source_uri': 'https://host/messages/graylog_0/dfc9f770-c856-11eb-a871-000c293368b3', 'title': 'Log message received by Graylog in last 30 days contains observable', 'type': 'sighting'}, {'confidence': 'High', 'count': 1, 'data': {'columns': [{'name': 'level', 'type': 'string'}, {'name': 'source', 'type': 'string'}, { 'name': 'facility_num', 'type': 'string'}, { 'name': 'facility', 'type': 'string'}], 'rows': [ ['3', '%ASA-3-710003:', '20', 'local4']]}, 'description': '```\n%ASA-3-710003: TCP access denied by ACL from 156.96.156.172/50168 to outside:24.141.154.216/80 \n```', 'external_ids': [ '01F7NT7J8GGPSBNB6RFHH3P31A', 'c15f4100-c856-11eb-a871-000c293368b3'], 'id': '01F7NT7J8GGPSBNB6RFHH3P31A', 'internal': True, 'observables': [{'type': 'ip', 'value': '24.141.154.216'}], 'observed_time': { 'start_time': '2021-06-08T12:40:50.192Z'}, 'schema_version': '1.1.6', 'short_description': 'Node 80fe6cad received a log from ::1 containing the observable', 'source': 'Graylog', 'source_uri': 'https://host/messages/graylog_0/c15f4100-c856-11eb-a871-000c293368b3', 'title': 'Log message received by Graylog in last 30 days contains observable', 'type': 'sighting'}, {'confidence': 'High', 'count': 1, 'data': {'columns': [{'name': 'level', 'type': 'string'}, {'name': 'source', 'type': 'string'}, { 'name': 'facility_num', 'type': 'string'}, { 'name': 'facility', 'type': 'string'}], 'rows': [ ['3', 'ASA-3-710003:', '20', 'local4']]}, 'description': '```\nASA-3-710003: TCP access denied by ACL from 156.96.156.172/50168 to outside:24.141.154.216/80 \n```', 'external_ids': [ '01F7NT4RFEHY8F2Y4VP1RT5T5F', '8a8cfb90-c856-11eb-a871-000c293368b3'], 'id': '01F7NT4RFEHY8F2Y4VP1RT5T5F', 'internal': True, 'observables': [{'type': 'ip', 'value': '24.141.154.216'}], 'observed_time': { 'start_time': '2021-06-08T12:39:18.216Z'}, 'schema_version': '1.1.6', 'short_description': 'Node 80fe6cad received a log from ::1 containing the observable', 'source': 'Graylog', 'source_uri': 'https://host/messages/graylog_0/8a8cfb90-c856-11eb-a871-000c293368b3', 'title': 'Log message received by Graylog in last 30 days contains observable', 'type': 'sighting'}]}}, 'errors': [ {'code': 'too-many-messages-warning', 'message': 'More messages found in Graylog for 24.141.154.216 than can be rendered. Log in to the Graylog console to see all messages', 'type': 'warning'}]} EXPECTED_RESPONSE_FROM_REFER_ENDPOINT = { 'data': [ { 'categories': [ 'Graylog', 'Search' ], 'description': 'Search for this IP in the Graylog console', 'id': 'ref-graylog-search-ip-24.141.154.216', 'title': 'Search for this IP', 'url': 'https://host/search?rangetype=relative&relative=2592000&q=24.141.154.216' } ] } EXPECTED_RESPONSE_FROM_GRAYLOG_WITH_RELATIONS = { "execution": { "done": True, "cancelled": False, "completed_exceptionally": False }, "results": { "60f9b4c461bf9b2a8a999b85": { "query": { "id": "60f9b550a09a4d867ecd0161", "timerange": { "type": "relative", "range": 2592000 }, "filter": { "type": "or", "filters": [ { "type": "stream", "id": "61034e51d9dadc489abe0055" }, { "type": "stream", "id": "000000000000000000000001" }, { "type": "stream", "id": "61034e51d9dadc489abe00e8" }, { "type": "stream", "id": "6100139fd9dadc489abaa65d" } ] }, "query": { "type": "elasticsearch", "query_string": "\"188.125.72.73\"" }, "search_types": [ { "timerange": None, "query": None, "streams": [], "id": "search_type_id", "name": None, "limit": 101, "offset": 0, "sort": [ { "field": "timestamp", "order": "DESC" } ], "decorators": [], "type": "messages", "filter": None } ] }, "execution_stats": { "duration": 17, "timestamp": "2021-07-30T08:10:41.731Z", "effective_timerange": { "type": "absolute", "from": "2021-06-30T08:10:41.748Z", "to": "2021-07-30T08:10:41.748Z" } }, "search_types": { "60f9b550a09a4d867ecd0169": { "id": "60f9b550a09a4d867ecd0162", "messages": [ { "highlight_ranges": {}, "message": { "destination_port": "51083", "gl2_remote_ip": "198.18.133.195", "event_severity_level": "6", "gl2_remote_port": 57724, "source": "vFTD", "gl2_source_input": "60f72a14d9dadc489ab16198", "network_transport": "tcp", "vendor_event_action": "blocked", "source_ip": "188.125.72.73", "destination_ip": "198.18.133.198", "event_code": "106015", "source_port": "25", "event_outcome": "denied", "gl2_source_node": "863495ef-c881-4024-ba49-e776020c676c", "timestamp": "2021-07-30T08:01:14.000Z", "event_source_product": "CISCO-ASA", "gl2_accounted_message_size": 561, "level": 6, "streams": [ "61034e51d9dadc489abe00e8" ], "gl2_message_id": "01FBV6X06DZP5N7YKS58DEJHNM", "message": "vFTD : %FTD-6-106015: Deny TCP (no connection) from 188.125.72.73/25 to 198.18.133.198/51083 flags FIN PSH ACK on interface Outside", "vendor_event_outcome_reason": "%FTD-6-106015: Deny TCP (no connection)", "network_interface_in": "Outside", "facility_num": 20, "_id": "500812ac-f10c-11eb-9baa-1212395d7875", "facility": "local4" }, "index": "graylog_0", "decoration_stats": None }, { "highlight_ranges": {}, "message": { "destination_port": "51083", "gl2_remote_ip": "198.18.133.195", "event_severity_level": "6", "gl2_remote_port": 40083, "source": "vFTD", "gl2_source_input": "60f72a14d9dadc489ab16198", "network_transport": "tcp", "vendor_event_action": "blocked", "source_ip": "188.125.72.73", "destination_ip": "198.18.133.198", "event_code": "106015", "source_port": "25", "event_outcome": "denied", "gl2_source_node": "863495ef-c881-4024-ba49-e776020c676c", "timestamp": "2021-07-30T08:01:14.000Z", "event_source_product": "CISCO-ASA", "gl2_accounted_message_size": 561, "level": 6, "streams": [ "61034e51d9dadc489abe00e8" ], "gl2_message_id": "01FBV6X06DPB5KM95W6CPJJ6Y7", "message": "vFTD : %FTD-6-106015: Deny TCP (no connection) from 188.125.72.73/25 to 198.18.133.198/51083 flags FIN PSH ACK on interface Outside", "vendor_event_outcome_reason": "%FTD-6-106015: Deny TCP (no connection)", "network_interface_in": "Outside", "facility_num": 20, "_id": "500812af-f10c-11eb-9baa-1212395d7875", "facility": "local4" }, "index": "graylog_0", "decoration_stats": None } ], "effective_timerange": { "type": "absolute", "from": "2021-06-30T08:10:41.748Z", "to": "2021-07-30T08:10:41.748Z" }, "total_results": 2, "type": "messages" } }, "errors": [], "state": "COMPLETED" } }, "id": "6103b401d9dadc489abe6b6a", "owner": "mstorozh@cisco.com", "search_id": None } EXPECTED_RESPONSE_FROM_RELAY_WITH_RELATIONS = { 'data': { 'sightings': { 'count': 2, 'docs': [ { 'confidence': 'High', 'count': 1, 'data': { 'columns': [ {'name': 'destination_port', 'type': 'string'}, {'name': 'event_severity_level', 'type': 'string'}, {'name': 'source', 'type': 'string'}, {'name': 'network_transport', 'type': 'string'}, {'name': 'vendor_event_action', 'type': 'string'}, {'name': 'source_ip', 'type': 'string'}, {'name': 'destination_ip', 'type': 'string'}, {'name': 'event_code', 'type': 'string'}, {'name': 'source_port', 'type': 'string'}, {'name': 'event_outcome', 'type': 'string'}, {'name': 'event_source_product', 'type': 'string'}, {'name': 'level', 'type': 'string'}, {'name': 'vendor_event_outcome_reason', 'type': 'string'}, {'name': 'network_interface_in', 'type': 'string'}, {'name': 'facility_num', 'type': 'string'}, {'name': 'facility', 'type': 'string'} ], 'rows': [ ['51083', '6', 'vFTD', 'tcp', 'blocked', '188.125.72.73', '198.18.133.198', '106015', '25', 'denied', 'CISCO-ASA', '6', '%FTD-6-106015: Deny TCP (no connection)', 'Outside', '20', 'local4']] }, 'description': '```\nvFTD : %FTD-6-106015: Deny TCP (no connection) from 188.125.72.73/25 to 198.18.133.198/51083 flags FIN PSH ACK on interface Outside \n```', 'external_ids': [ '01FBV6X06DZP5N7YKS58DEJHNM', '500812ac-f10c-11eb-9baa-1212395d7875'], 'id': '01FBV6X06DZP5N7YKS58DEJHNM', 'internal': True, 'observables': [ {'type': 'ip', 'value': '188.125.72.73'}], 'observed_time': { 'start_time': '2021-07-30T08:01:14.000Z'}, 'relations': [ {'origin': 'vFTD', 'related': {'type': 'ip', 'value': '198.18.133.198'}, 'relation': 'Connected_To', 'source': {'type': 'ip', 'value': '188.125.72.73'}}], 'schema_version': '1.1.6', 'short_description': 'Node 863495ef received a log from 198.18.133.195 containing the observable', 'source': 'Graylog', 'source_uri': 'https://host/messages/graylog_0/500812ac-f10c-11eb-9baa-1212395d7875', 'title': 'Log message received by Graylog in last 30 days contains observable', 'type': 'sighting'}, {'confidence': 'High', 'count': 1, 'data': {'columns': [ {'name': 'destination_port', 'type': 'string'}, {'name': 'event_severity_level', 'type': 'string'}, {'name': 'source', 'type': 'string'}, {'name': 'network_transport', 'type': 'string'}, {'name': 'vendor_event_action', 'type': 'string'}, {'name': 'source_ip', 'type': 'string'}, {'name': 'destination_ip', 'type': 'string'}, {'name': 'event_code', 'type': 'string'}, {'name': 'source_port', 'type': 'string'}, {'name': 'event_outcome', 'type': 'string'}, {'name': 'event_source_product', 'type': 'string'}, {'name': 'level', 'type': 'string'}, { 'name': 'vendor_event_outcome_reason', 'type': 'string'}, {'name': 'network_interface_in', 'type': 'string'}, {'name': 'facility_num', 'type': 'string'}, {'name': 'facility', 'type': 'string'}], 'rows': [ ['51083', '6', 'vFTD', 'tcp', 'blocked', '188.125.72.73', '198.18.133.198', '106015', '25', 'denied', 'CISCO-ASA', '6', '%FTD-6-106015: Deny TCP (no connection)', 'Outside', '20', 'local4']]}, 'description': '```\nvFTD : %FTD-6-106015: Deny TCP (no connection) from 188.125.72.73/25 to 198.18.133.198/51083 flags FIN PSH ACK on interface Outside \n```', 'external_ids': [ '01FBV6X06DPB5KM95W6CPJJ6Y7', '500812af-f10c-11eb-9baa-1212395d7875'], 'id': '01FBV6X06DPB5KM95W6CPJJ6Y7', 'internal': True, 'observables': [ {'type': 'ip', 'value': '188.125.72.73'}], 'observed_time': { 'start_time': '2021-07-30T08:01:14.000Z'}, 'relations': [{'origin': 'vFTD', 'related': { 'type': 'ip', 'value': '198.18.133.198'}, 'relation': 'Connected_To', 'source': {'type': 'ip', 'value': '188.125.72.73'}}], 'schema_version': '1.1.6', 'short_description': 'Node 863495ef received a log from 198.18.133.195 containing the observable', 'source': 'Graylog', 'source_uri': 'https://host/messages/graylog_0/500812af-f10c-11eb-9baa-1212395d7875', 'title': 'Log message received by Graylog in last 30 days contains observable', 'type': 'sighting'} ] } } }
48.512299
181
0.399039
2,830
45,359
6.272792
0.167491
0.036052
0.042587
0.014872
0.743184
0.71468
0.70578
0.699076
0.698851
0.691866
0
0.173943
0.488459
45,359
934
182
48.56424
0.59113
0
0
0.563715
0
0.019438
0.418153
0.175379
0
1
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
4
17e1383f6cfbcbbba2ad55f81f370c16c4b03f3e
106
py
Python
db/__init__.py
ti-lei/iphone11
f69e7050ecdc87651eab5e8fdb38e4f305a9257f
[ "bzip2-1.0.6" ]
null
null
null
db/__init__.py
ti-lei/iphone11
f69e7050ecdc87651eab5e8fdb38e4f305a9257f
[ "bzip2-1.0.6" ]
null
null
null
db/__init__.py
ti-lei/iphone11
f69e7050ecdc87651eab5e8fdb38e4f305a9257f
[ "bzip2-1.0.6" ]
null
null
null
from db.router import Router from db import * router = Router() def get_db_router(): return router
11.777778
28
0.716981
16
106
4.625
0.4375
0.162162
0
0
0
0
0
0
0
0
0
0
0.207547
106
8
29
13.25
0.880952
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.4
0.2
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
0
0
0
4
17e456504deca455b967b074d1398d4f839f72f8
170
py
Python
libs/utils.py
quojama/takohachi
8a3ed7d96b4ceb8b260233f1418aee91cf61320f
[ "MIT" ]
2
2021-07-12T10:11:18.000Z
2021-07-26T18:58:19.000Z
libs/utils.py
pistachiostudio/takohachi
263c390ccd63065c43feee4156144a691075745f
[ "MIT" ]
22
2021-10-01T02:36:39.000Z
2022-03-16T03:07:35.000Z
libs/utils.py
pistachiostudio/takohachi
263c390ccd63065c43feee4156144a691075745f
[ "MIT" ]
null
null
null
from datetime import datetime, timedelta, timezone def get_now_timestamp_jst() -> datetime: JST = timezone(timedelta(hours=+9), "JST") return datetime.now(JST)
24.285714
50
0.729412
22
170
5.5
0.590909
0
0
0
0
0
0
0
0
0
0
0.006944
0.152941
170
6
51
28.333333
0.833333
0
0
0
0
0
0.017647
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
aa0468f5163f0c138c91ec4d5319dd6ccc74b208
3,613
py
Python
c3/libraries/constants.py
picbeats/c3
4321c2a42006b58d7231746cfe05fa391f640aa1
[ "Apache-2.0" ]
45
2020-11-02T13:26:26.000Z
2022-03-20T13:13:42.000Z
c3/libraries/constants.py
picbeats/c3
4321c2a42006b58d7231746cfe05fa391f640aa1
[ "Apache-2.0" ]
165
2020-10-30T17:20:25.000Z
2022-03-31T12:59:48.000Z
c3/libraries/constants.py
picbeats/c3
4321c2a42006b58d7231746cfe05fa391f640aa1
[ "Apache-2.0" ]
28
2020-10-21T04:10:26.000Z
2022-02-17T17:44:55.000Z
"""All physical constants used in other code.""" import numpy as np kb = 1.380649e-23 h = 6.62607015e-34 hbar = 1.054571817e-34 q_e = 1.602176634e-19 # electron charge twopi = 6.2831853071795864769252867665590057683943387987502116419498891846 PREFIXES = { "K": 1e3, "M": 1e6, "G": 1e9, "T": 1e12, "m": 1e-3, "µ": 1e-6, "n": 1e-9, "p": 1e-12, } # Pauli matrices Id = np.array([[1, 0], [0, 1]], dtype=np.complex128) X = np.array([[0, 1], [1, 0]], dtype=np.complex128) Y = np.array([[0, -1j], [1j, 0]], dtype=np.complex128) Z = np.array([[1, 0], [0, -1]], dtype=np.complex128) PAULIS = {"X": X, "Y": Y, "Z": Z, "Id": Id} ISWAP = np.array( [[1, 0, 0, 0], [0, 0, 1j, 0], [0, 1j, 0, 0], [0, 0, 0, 1]], dtype=np.complex128 ) ISWAP3 = np.array( [ [1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 1j, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1j, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0], ], dtype=np.complex128, ) CCZ = np.diag([1, 1, 1, 1, 1, 1, 1, -1]) GATES = { "id": np.array([[1, 0], [0, 1]], dtype=np.complex128), "rx90p": np.array([[1, -1j], [-1j, 1]], dtype=np.complex128) / np.sqrt(2), "rx90m": np.array([[1, 1j], [1j, 1]], dtype=np.complex128) / np.sqrt(2), "rxp": np.array([[0, -1j], [-1j, 0]], dtype=np.complex128), "ry90p": np.array([[1, -1], [1, 1]], dtype=np.complex128) / np.sqrt(2), "ry90m": np.array([[1, 1], [-1, 1]], dtype=np.complex128) / np.sqrt(2), "ryp": np.array([[0, -1], [1, 0]], dtype=np.complex128), "x": X, "y": Y, "rz90p": np.array([[1 - 1j, 0], [0, 1 + 1j]], dtype=np.complex128) / np.sqrt(2), "rz90m": np.array([[1 + 1j, 0], [0, 1 - 1j]], dtype=np.complex128) / np.sqrt(2), "rzp": np.array([[-1.0j, 0], [0, 1.0j]], dtype=np.complex128), "crxp": np.array( [[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 0, -1j], [0, 0, -1j, 0]], dtype=np.complex128, ), # What is the meaning of this gate? "crzp": np.array( [[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, -1j, 0], [0, 0, 0, 1j]], dtype=np.complex128 ), "cr": np.array( [[0, -1j, 0, 0], [-1j, 0, 0, 0], [0, 0, 0, 1j], [0, 0, 1j, 0]], dtype=np.complex128, ), "cr90": np.array( [[1, -1j, 0, 0], [-1j, 1, 0, 0], [0, 0, 1, 1j], [0, 0, 1j, 1]], dtype=np.complex128, ) / np.sqrt(2), "iswap": np.array( [[1, 0, 0, 0], [0, 0, 1j, 0], [0, 1j, 0, 0], [0, 0, 0, 1]], dtype=np.complex128 ), "cz": np.diag(np.array([1, 1, 1, -1], dtype=np.complex128)), "ccz": np.diag(np.array([1, 1, 1, 1, 1, 1, 1, -1], dtype=np.complex128)), "cx": np.array( [[1, 0, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0], [0, 1, 0, 0]], dtype=np.complex128 ), } x90p = GATES["rx90p"] y90p = GATES["ry90p"] x90m = GATES["rx90m"] y90m = GATES["ry90m"] CLIFFORDS = { "C1": x90m @ x90p, "C2": x90p @ y90p, "C3": y90m @ x90m, "C4": x90p @ x90p @ y90p, "C5": x90m, "C6": x90m @ y90m @ x90p, "C7": x90p @ x90p, "C8": x90m @ y90m, "C9": y90m @ x90p, "C10": y90m, "C11": x90p, "C12": x90p @ y90p @ x90p, "C13": y90p @ y90p, "C14": x90p @ y90m, "C15": y90p @ x90p, "C16": x90p @ x90p @ y90m, "C17": y90p @ y90p @ x90p, "C18": x90p @ y90m @ x90p, "C19": y90p @ y90p @ x90p @ x90p, "C20": x90m @ y90p, "C21": y90p @ x90m, "C22": y90p, "C23": y90p @ y90p @ x90m, "C24": x90m @ y90p @ x90p, }
29.859504
88
0.464434
615
3,613
2.726829
0.188618
0.159809
0.182469
0.205128
0.545021
0.533691
0.502087
0.49195
0.451401
0.330948
0
0.250095
0.270689
3,613
120
89
30.108333
0.386338
0.029892
0
0.149533
0
0
0.047183
0
0
0
0
0
0
1
0
false
0
0.009346
0
0.009346
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
a4b8019a53bddce637fc18ba3fbfb40bdbadd574
861
py
Python
krogon/logger.py
enamrik/krogon
a41a10ed346b7198509929ed9ba1e9fcf778dc78
[ "MIT" ]
1
2020-03-02T14:17:02.000Z
2020-03-02T14:17:02.000Z
krogon/logger.py
enamrik/krogon
a41a10ed346b7198509929ed9ba1e9fcf778dc78
[ "MIT" ]
null
null
null
krogon/logger.py
enamrik/krogon
a41a10ed346b7198509929ed9ba1e9fcf778dc78
[ "MIT" ]
null
null
null
import logging class Logger: def __init__(self, name: str): self.name = name def add_prefix(self, suffix): return Logger(self.name + ' -> ' + suffix) def debug(self, message): self.log('debug', message) def error(self, message): self.log('error', message) def warn(self, message): self.log('warn', message) def info(self, message): self.log('info', message) def step(self, message): self.log('info', "========================================================") self.log('info', 'STEP: {}'.format(message)) self.log('info', "========================================================") def log(self, level, message): # logging.log(level, self.name + ':' + message) print('LEVEL: [' + level.ljust(5) + '] ' + self.name + ': ' + message)
27.774194
84
0.477352
89
861
4.561798
0.280899
0.12069
0.206897
0.221675
0.108374
0
0
0
0
0
0
0.001527
0.239257
861
30
85
28.7
0.618321
0.052265
0
0.1
0
0
0.203931
0.137592
0
0
0
0
0
1
0.4
false
0
0.05
0.05
0.55
0.05
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
a4cf9d6bfe920fabcaf2f3309749c8f79da72236
604
py
Python
permissions.py
maurob/timeperiod
1a014b054eb47f416f89bad9e5bccd1503baf326
[ "MIT" ]
null
null
null
permissions.py
maurob/timeperiod
1a014b054eb47f416f89bad9e5bccd1503baf326
[ "MIT" ]
null
null
null
permissions.py
maurob/timeperiod
1a014b054eb47f416f89bad9e5bccd1503baf326
[ "MIT" ]
null
null
null
from rest_framework.permissions import BasePermission class IsCurrentUser(BasePermission): def has_object_permission(self, request, view, obj): return obj == request.user class UserIsOwnerOrAdmin(BasePermission): def has_permission(self, request, view): return request.user and request.user.is_authenticated() def check_object_permission(self, user, obj): return (user and user.is_authenticated() and (user.is_staff or obj == user)) def has_object_permission(self, request, view, obj): return self.check_object_permission(request.user, obj)
33.555556
63
0.731788
74
604
5.797297
0.337838
0.149184
0.13986
0.174825
0.214452
0.214452
0.214452
0.214452
0.214452
0
0
0
0.182119
604
18
64
33.555556
0.868421
0
0
0.166667
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.083333
0.333333
0.916667
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
a4edcb8a4b11a812c9abc6f90ef8f951bf54ca29
57
py
Python
marquant/__init__.py
CSF-BioComp/marquant
f83689c03134eac6311ebcd33ef3eca9db6f2782
[ "MIT" ]
null
null
null
marquant/__init__.py
CSF-BioComp/marquant
f83689c03134eac6311ebcd33ef3eca9db6f2782
[ "MIT" ]
null
null
null
marquant/__init__.py
CSF-BioComp/marquant
f83689c03134eac6311ebcd33ef3eca9db6f2782
[ "MIT" ]
null
null
null
from marquant.microarray_analysis import slide_experiment
57
57
0.929825
7
57
7.285714
1
0
0
0
0
0
0
0
0
0
0
0
0.052632
57
1
57
57
0.944444
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
351bab681dc4d8ac10ef50d1897e31b401c68534
201
py
Python
Exercise_5_4.py
kushrami/Python-Crash-Course-book-Excersice
7093181940a90d9f4bab5775ef56f57963450393
[ "Apache-2.0" ]
null
null
null
Exercise_5_4.py
kushrami/Python-Crash-Course-book-Excersice
7093181940a90d9f4bab5775ef56f57963450393
[ "Apache-2.0" ]
null
null
null
Exercise_5_4.py
kushrami/Python-Crash-Course-book-Excersice
7093181940a90d9f4bab5775ef56f57963450393
[ "Apache-2.0" ]
null
null
null
#Alian Colors #2 alian_color = 'green' if alian_color == 'green' or alian_color == 'yellow' or alian_color == 'red': print("You just earned 5 points.") else: print("You just earned 10 points.")
33.5
77
0.676617
31
201
4.258065
0.548387
0.30303
0.227273
0.272727
0
0
0
0
0
0
0
0.024242
0.179104
201
6
78
33.5
0.775758
0.069652
0
0
0
0
0.376344
0
0
0
0
0
0
1
0
false
0
0
0
0
0.4
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
353048def3d07d25bfe98d37d503e9cdd7158470
902
py
Python
tests/geographic/outliers/test_is_std_outlier.py
PEM-Humboldt/regi0
0d64587d5d87f57cddfc7a67bb8baf74cd70adf2
[ "MIT" ]
null
null
null
tests/geographic/outliers/test_is_std_outlier.py
PEM-Humboldt/regi0
0d64587d5d87f57cddfc7a67bb8baf74cd70adf2
[ "MIT" ]
15
2022-02-03T11:38:37.000Z
2022-03-09T23:23:04.000Z
tests/geographic/outliers/test_is_std_outlier.py
PEM-Humboldt/regi0
0d64587d5d87f57cddfc7a67bb8baf74cd70adf2
[ "MIT" ]
null
null
null
""" Test cases for the regi0.geographic.outliers._is_std_outlier function. """ import numpy as np import pytest from regi0.geographic.outliers import _is_std_outlier @pytest.fixture() def values(): return np.array([52, 56, 53, 57, 51, 59, 1, 99]) def test_std(values): result = _is_std_outlier(values) expected = np.array([False, False, False, False, False, False, True, False]) np.testing.assert_array_equal(result, expected) def test_std_smaller_threshold(values): result = _is_std_outlier(values, threshold=1.0) expected = np.array([False, False, False, False, False, False, True, True]) np.testing.assert_array_equal(result, expected) def test_std_greater_threshold(values): result = _is_std_outlier(values, threshold=3.0) expected = np.array([False, False, False, False, False, False, False, False]) np.testing.assert_array_equal(result, expected)
29.096774
81
0.732816
131
902
4.832061
0.320611
0.268562
0.331754
0.347551
0.668246
0.668246
0.620853
0.612954
0.383886
0.383886
0
0.027273
0.146341
902
30
82
30.066667
0.794805
0.077605
0
0.166667
0
0
0
0
0
0
0
0
0.166667
1
0.222222
false
0
0.166667
0.055556
0.444444
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
102f600b23ccb704c3dce6c1e058f67a31fb34fd
99
py
Python
ImageAnalysis/ImageAnalysis/python/source/image_parsing/__init__.py
mikebourbeauart/PerlerPrinter
8c5023de6bb9b3cbe2bc28c1c823030dfd708db4
[ "MIT" ]
null
null
null
ImageAnalysis/ImageAnalysis/python/source/image_parsing/__init__.py
mikebourbeauart/PerlerPrinter
8c5023de6bb9b3cbe2bc28c1c823030dfd708db4
[ "MIT" ]
2
2021-09-07T23:43:53.000Z
2022-01-13T00:39:55.000Z
ImageAnalysis/ImageAnalysis/python/source/image_parsing/__init__.py
mikebourbeauart/PerlerPrinter
8c5023de6bb9b3cbe2bc28c1c823030dfd708db4
[ "MIT" ]
1
2019-10-21T17:12:07.000Z
2019-10-21T17:12:07.000Z
''' Modules used to analyze and parse an image into a JSON file for the Perler Printer to read. '''
33
91
0.737374
18
99
4.055556
0.944444
0
0
0
0
0
0
0
0
0
0
0
0.191919
99
3
92
33
0.9125
0.919192
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
1058807fc1d88fb5653f8b8f5d32e8dd40d318c6
97
py
Python
bodynavigation/__init__.py
mjirik/bodynavigation
99db34cd4b8d9508784746df27356dda31468fe4
[ "MIT" ]
5
2017-03-26T20:24:59.000Z
2021-04-10T04:13:05.000Z
bodynavigation/__init__.py
mjirik/bodynavigation
99db34cd4b8d9508784746df27356dda31468fe4
[ "MIT" ]
null
null
null
bodynavigation/__init__.py
mjirik/bodynavigation
99db34cd4b8d9508784746df27356dda31468fe4
[ "MIT" ]
2
2017-03-27T17:24:52.000Z
2022-01-28T01:55:20.000Z
__all__ = ["body_navigation"] from .body_navigation import BodyNavigation __version__ = "0.6.1"
19.4
43
0.773196
12
97
5.416667
0.833333
0.430769
0
0
0
0
0
0
0
0
0
0.034884
0.113402
97
4
44
24.25
0.72093
0
0
0
0
0
0.206186
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
109af9bd4fd478c8f00eb08693a6ab281c2cc169
472
py
Python
python/ql/test/experimental/library-tests/frameworks/stdlib/FileSystemAccess.py
NateD-MSFT/codeql
f0d5a91d59dca9a67e772beaa8b7f73ba876d831
[ "MIT" ]
3
2021-07-13T22:29:19.000Z
2022-01-13T22:45:30.000Z
python/ql/test/experimental/library-tests/frameworks/stdlib/FileSystemAccess.py
00mjk/codeql
dfc0e9b90619aab50bf8a5a6c5d1966bcc1f5b3b
[ "MIT" ]
1
2021-04-21T19:58:49.000Z
2021-04-21T19:58:49.000Z
python/ql/test/experimental/library-tests/frameworks/stdlib/FileSystemAccess.py
00mjk/codeql
dfc0e9b90619aab50bf8a5a6c5d1966bcc1f5b3b
[ "MIT" ]
null
null
null
import builtins import io open("filepath") # $getAPathArgument="filepath" open(file="filepath") # $getAPathArgument="filepath" o = open o("filepath") # $getAPathArgument="filepath" o(file="filepath") # $getAPathArgument="filepath" builtins.open("filepath") # $getAPathArgument="filepath" builtins.open(file="filepath") # $getAPathArgument="filepath" io.open("filepath") # $getAPathArgument="filepath" io.open(file="filepath") # $getAPathArgument="filepath"
24.842105
62
0.730932
46
472
7.5
0.173913
0.556522
0.742029
0.417391
0.817391
0
0
0
0
0
0
0
0.097458
472
18
63
26.222222
0.809859
0.489407
0
0
0
0
0.275862
0
0
0
0
0
0
1
0
false
0
0.181818
0
0.181818
0
0
0
0
null
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
109d4d7e18c51f04775543f1346de3ad6a43f44d
288
py
Python
src/util.py
Allvaa/musicbot-py
ecab805135cf2d486331c4daa5903907cc6912ea
[ "MIT" ]
1
2020-12-31T11:45:06.000Z
2020-12-31T11:45:06.000Z
src/util.py
Allvaa/musicbot-py
ecab805135cf2d486331c4daa5903907cc6912ea
[ "MIT" ]
null
null
null
src/util.py
Allvaa/musicbot-py
ecab805135cf2d486331c4daa5903907cc6912ea
[ "MIT" ]
1
2020-12-31T11:45:13.000Z
2020-12-31T11:45:13.000Z
from src.structures import client from discord.colour import Colour import discord class Util: def __init__(self, bot: "client.MusicBot") -> None: self.bot = bot def embed(self, **kwargs) -> discord.Embed: return discord.Embed(colour=Colour.blurple(), **kwargs)
26.181818
63
0.690972
37
288
5.27027
0.513514
0.123077
0
0
0
0
0
0
0
0
0
0
0.190972
288
10
64
28.8
0.83691
0
0
0
0
0
0.052083
0
0
0
0
0
0
1
0.25
false
0
0.375
0.125
0.875
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
4
10a4fa5c321aad3b43fec2b8199697d3794df44b
1,559
py
Python
rbc/tests/test_utils.py
tupui/rbc
630805b6a32a1e53883dd8502e9a0679c9923b0b
[ "BSD-3-Clause" ]
21
2019-05-21T14:44:01.000Z
2021-12-09T21:48:36.000Z
rbc/tests/test_utils.py
tupui/rbc
630805b6a32a1e53883dd8502e9a0679c9923b0b
[ "BSD-3-Clause" ]
349
2019-07-31T17:48:21.000Z
2022-03-31T06:57:52.000Z
rbc/tests/test_utils.py
tupui/rbc
630805b6a32a1e53883dd8502e9a0679c9923b0b
[ "BSD-3-Clause" ]
10
2020-01-23T20:14:17.000Z
2022-02-08T20:43:08.000Z
from rbc.utils import is_localhost, get_local_ip, triple_matches, check_returns_none def test_is_localhost(): assert is_localhost(get_local_ip()) def test_triple_matches(): assert triple_matches('cuda', 'nvptx64-nvidia-cuda') assert triple_matches('nvptx64-nvidia-cuda', 'cuda') assert triple_matches('cuda32', 'nvptx-nvidia-cuda') assert triple_matches('nvptx-nvidia-cuda', 'cuda32') assert triple_matches('x86_64-pc-linux-gnu', 'x86_64-unknown-linux-gnu') def test_check_returns_none(): def foo(): return assert check_returns_none(foo) def foo(): return None assert check_returns_none(foo) def foo(): pass assert check_returns_none(foo) def foo(): if 1: return else: return None assert check_returns_none(foo) def foo(): return 1 assert not check_returns_none(foo) def foo(): if 1: return else: return 1 assert check_returns_none(foo) def foo(): global a if a: return else: return 1 assert not check_returns_none(foo) def foo(a=None): return a assert not check_returns_none(foo) def foo(a): if a: return a + 1 else: return assert not check_returns_none(foo) def foo(a): pass assert check_returns_none(foo) def foo(): a = None return a assert not check_returns_none(foo) # false-negative
18.783133
84
0.597819
202
1,559
4.391089
0.193069
0.175874
0.234498
0.235626
0.632469
0.507328
0.507328
0.472379
0.414882
0.281849
0
0.020638
0.316228
1,559
82
85
19.012195
0.811445
0.00898
0
0.754386
0
0
0.087492
0.015554
0
0
0
0
0.298246
1
0.245614
false
0.035088
0.017544
0.070175
0.491228
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
10b3ed5f6b6d67d88d2364ed06cd670b0bf26b0a
24,080
py
Python
Projects/LakhMidi/fetchData.py
liu2231665/Project-dl4s
615d504caf6f05b676be1c25621d2dd94e41ec54
[ "MIT" ]
null
null
null
Projects/LakhMidi/fetchData.py
liu2231665/Project-dl4s
615d504caf6f05b676be1c25621d2dd94e41ec54
[ "MIT" ]
null
null
null
Projects/LakhMidi/fetchData.py
liu2231665/Project-dl4s
615d504caf6f05b676be1c25621d2dd94e41ec54
[ "MIT" ]
null
null
null
"""######################################################################### Author: Yingru Liu Institute: Stony Brook University Descriptions: the files contain the tools to extract the piano-rolls representations of the midi files and to save them as hdf5 data-set. The Piano-rolls representation is binary matrix for each segment and the relationships of the instruments are neglected. ----2017.11.01 #########################################################################""" import os import urllib.request import tarfile import h5py import pretty_midi import numpy as np # Data name.URL. Lakh_HDF5 = "./dataset/Lakh_clean.hdf5" Lakh_RAW = "./dataset/clean_midi.tar.gz" Lakh_URL = "http://hog.ee.columbia.edu/craffel/lmd/clean_midi.tar.gz" TRAIN_RATIO = 0.9 Valid_RATIO = 0.95 """######################################################################### There are some files that can not be read by the prettyMidi. The reason is unknown. #########################################################################""" FAILLIST = [ './dataset/clean_midi/3T/Why.mid', './dataset/clean_midi/10cc/Dreadlock Holiday.4.mid', "./dataset/clean_midi/4 Non Blondes/What's Up.5.mid", "./dataset/clean_midi/a-ha/Take On Me.1.mid", "./dataset/clean_midi/Aaron Neville/Tell It Like It Is.mid", "./dataset/clean_midi/ABBA/I've Been Waiting For You.mid", "./dataset/clean_midi/ABBA/One Of Us.mid", "./dataset/clean_midi/ABBA/Take a Chance on Me.3.mid", "./dataset/clean_midi/ABBA/Thank You for the Music.2.mid", "./dataset/clean_midi/ABBA/Voulez Vous.1.mid", "./dataset/clean_midi/Aerosmith/Dream On.mid", "./dataset/clean_midi/Aerosmith/Pink.3.mid", "./dataset/clean_midi/Alice in Chains/Sludge Factory.mid", "./dataset/clean_midi/Alison Moyet/All Cried Out.mid", "./dataset/clean_midi/Amedeo Minghi/Decenni.1.mid", "./dataset/clean_midi/Amos, Tori/The Wrong Band.mid", "./dataset/clean_midi/Andre Brasseur/Early Bird Satellite.1.mid", "./dataset/clean_midi/Andre Hazes/Ik meen 't.2.mid", "./dataset/clean_midi/Aqua/Dr Jones.mid", "./dataset/clean_midi/Asia/Don't Cry.1.mid", "./dataset/clean_midi/Barry Manilow/Copa Cabana (disco).mid", "./dataset/clean_midi/Basie/I Left My Heart in San Francisco.mid", "./dataset/clean_midi/Beastie Boys/Girls.mid", "./dataset/clean_midi/Bee Gees/How Can You Mend a Broken Heart.1.mid", "./dataset/clean_midi/Bee Gees/Stayin Alive.3.mid", "./dataset/clean_midi/Bee Gees/Too Much Heaven.mid", "./dataset/clean_midi/Bernstein Leonard/West Side Story: Medley.1.mid", "./dataset/clean_midi/Billy Joel/Movin' Out (Anthony's Song).mid", "./dataset/clean_midi/Billy Joel/Pressure.2.mid", "./dataset/clean_midi/Billy Joel/Pressure.mid", "./dataset/clean_midi/Billy Swan/I Can Help.1.mid", "./dataset/clean_midi/Blondie/Dreaming.mid", "./dataset/clean_midi/Bon Jovi/Blaze of Glory.2.mid", "./dataset/clean_midi/Boyz II Men/End of the Road.2.mid", "./dataset/clean_midi/Brian McKnight/On the Down Low.mid", "./dataset/clean_midi/Bruce Springsteen/Tenth Avenue Freeze-Out.1.mid", "./dataset/clean_midi/Bryan Adams/(Everything I Do) I Do It For You.7.mid", "./dataset/clean_midi/Bryson/Tonight I Celebrate My Love For You.mid", "./dataset/clean_midi/Buddy Holly/Peggy Sue.1.mid", "./dataset/clean_midi/Bush/Glycerine.1.mid", "./dataset/clean_midi/Busta Rhymes/Woo Hah.mid", "./dataset/clean_midi/Cabrel Francis/Encore et encore.mid", "./dataset/clean_midi/Cabrel Francis/Question d'equilibre.mid", "./dataset/clean_midi/Camel/The Snow Goose.1.mid", "./dataset/clean_midi/Camel/The Snow Goose.mid", "./dataset/clean_midi/Celine Dion/That's The Way It Is.mid", "./dataset/clean_midi/Celine Dion/Where Does My Heart Beat Now.1.mid", "./dataset/clean_midi/Celine Dion/Where Does My Heart Beat Now.mid", "./dataset/clean_midi/Chic/Chic Mystique.mid", "./dataset/clean_midi/Chic/Take My Love.mid", "./dataset/clean_midi/Clayderman Richard/Ballade Pour Adeline.mid", "./dataset/clean_midi/Cocciante/Ammassati e distanti.mid", "./dataset/clean_midi/Commodores/Three Times a Lady.1.mid", "./dataset/clean_midi/Coolio/Gangsta's Paradise.3.mid", "./dataset/clean_midi/Cream/Strange Brew.1.mid", "./dataset/clean_midi/Crow Sheryl/Tomorrow Never Dies.mid", "./dataset/clean_midi/Daft Punk/Da Funk.mid", "./dataset/clean_midi/Dalla/Anna e Marco.mid", "./dataset/clean_midi/Dan Fogelberg/Leader Of The Band.mid", "./dataset/clean_midi/Darin Bobby/Splish Splash.mid", "./dataset/clean_midi/Dee/The Legend Of Xanadu.1.mid", "./dataset/clean_midi/Depeche Mode/Shake the Disease.mid", "./dataset/clean_midi/Earth, Wind & Fire/September (bonus track).4.mid", "./dataset/clean_midi/Elton John/A Word in Spanish.1.mid", "./dataset/clean_midi/Elton John/Nikita.mid", "./dataset/clean_midi/Emerson, Lake & Palmer/Fanfare for the Common Man.2.mid", "./dataset/clean_midi/Emerson, Lake & Palmer/Hoedown.mid", "./dataset/clean_midi/Energy 52/Cafe del Mar.mid", "./dataset/clean_midi/Enya/Bard Dance.mid", "./dataset/clean_midi/Eric Clapton/Tears in Heaven.7.mid", "./dataset/clean_midi/Eurythmics/Sweet Dreams.4.mid", "./dataset/clean_midi/Eurythmics/Who's That Girl.mid", "./dataset/clean_midi/Frank Sinatra/Summer Wind.mid", "./dataset/clean_midi/Garbage/Cherry Lips.mid", "./dataset/clean_midi/Garbage/Stupid Girl.3.mid", "./dataset/clean_midi/Genesis/Abacab.3.mid", "./dataset/clean_midi/Genesis/Misunderstanding.3.mid", "./dataset/clean_midi/Gina G/Ooh Ahh Just a Little Bit.mid", "./dataset/clean_midi/Gompie/Alice (Who the X Is Alice) (Living Next Door to Alice).mid", "./dataset/clean_midi/Henry Arland/Rosenmelodie.mid", "./dataset/clean_midi/Henry Mancini/Moon River.mid", "./dataset/clean_midi/Huey Lewis & The News/The Power of Love.mid", "./dataset/clean_midi/Jackson Michael/Childhood.mid", "./dataset/clean_midi/Jackson Michael/Don't Stop 'Til You Get Enough.mid", "./dataset/clean_midi/Jackson Michael/I'll Be There.mid", "./dataset/clean_midi/Jackson Michael/Smooth Criminal.4.mid", "./dataset/clean_midi/Jackson Michael/Smooth Criminal.mid", "./dataset/clean_midi/Jackson Michael/Thriller.3.mid", "./dataset/clean_midi/Jackson Michael/You Are Not Alone.mid", "./dataset/clean_midi/Jamiroquai/Canned Heat.1.mid", "./dataset/clean_midi/Jean Michel Jarre/Calypso, Part 2.mid", "./dataset/clean_midi/Jennifer Lopez/If You Had My Love.mid", "./dataset/clean_midi/Jethro Tull/Rainbow Blues.mid", "./dataset/clean_midi/Jimi Hendrix/Purple Haze.2.mid", "./dataset/clean_midi/John Elton/Nikita.2.mid", "./dataset/clean_midi/John Paul Young/Love is in the Air.3.mid", "./dataset/clean_midi/Johnny Mercer/Come Rain or Come Shine.mid", "./dataset/clean_midi/Journey/Any Way You Want It.mid", "./dataset/clean_midi/Journey/Lights.mid", "./dataset/clean_midi/Journey/Lovin' Touchin' Squeezin'.mid", "./dataset/clean_midi/Jovanotti/Penso Positivo.mid", "./dataset/clean_midi/Juan Luis Guerra/La Bilirrubina.1.mid", "./dataset/clean_midi/Kalua Beach Boys/There's No Place Like Hawaii.mid", "./dataset/clean_midi/Kylie Minogue/Better the Devil.mid", "./dataset/clean_midi/Kylie Minogue/Especially for You.1.mid", "./dataset/clean_midi/Last/Rosamunde.1.mid", "./dataset/clean_midi/Led Zeppelin/Custard Pie.mid", "./dataset/clean_midi/Led Zeppelin/In The Evening.mid", "./dataset/clean_midi/Led Zeppelin/Stairway To Heaven.mid", "./dataset/clean_midi/Lenny Kravitz/I Belong To You.mid", "./dataset/clean_midi/Level 42/Lessons in Love.2.mid", "./dataset/clean_midi/Live/Selling the Drama.mid", "./dataset/clean_midi/LL Cool J/Hey Lover.mid", "./dataset/clean_midi/Los Del Rio/Macarena.1.mid", "./dataset/clean_midi/Los Lobos/La Bamba.mid", "./dataset/clean_midi/Madonna/Deeper and Deeper.mid", "./dataset/clean_midi/Madonna/Like A Virgin.mid", "./dataset/clean_midi/Madonna/Rain.4.mid", "./dataset/clean_midi/Marc Anthony/Vivir Lo Nuestro.mid", "./dataset/clean_midi/Mariah Carey/Forever.mid", "./dataset/clean_midi/Mariah Carey/Long Ago.mid", "./dataset/clean_midi/Marley Bob/Iron Lion Zion.mid", "./dataset/clean_midi/Matt Bianco/Half a Minute.mid", "./dataset/clean_midi/McDonald/Yah Mo B There.mid", "./dataset/clean_midi/Metallica/(Anesthesia)-Pulling Teeth.mid", "./dataset/clean_midi/Metallica/The Shortest Straw.1.mid", "./dataset/clean_midi/Metallica/The Unforgiven.3.mid", "./dataset/clean_midi/Metallica/The Unforgiven.5.mid", "./dataset/clean_midi/Miami Sound Machine/Here We Are.mid", "./dataset/clean_midi/Miller/When Sunny Gets Blue.mid", "./dataset/clean_midi/Morton Jelly Roll/Frog-I-More Rag.mid", "./dataset/clean_midi/Neil Diamond/(Encore) Cracklin' Rose.mid", "./dataset/clean_midi/Neil Diamond/Hello Again.mid", "./dataset/clean_midi/Nek/Se io non avessi te.2.mid", "./dataset/clean_midi/Nelly/Country Grammar (Hot ...).mid", "./dataset/clean_midi/Nick Kamen/I Promised Myself.1.mid", "./dataset/clean_midi/Nini Rosso/Il Silenzio.1.mid", "./dataset/clean_midi/Nirvana/(New Wave) Polly.mid", "./dataset/clean_midi/Nirvana/All Apologies.1.mid", "./dataset/clean_midi/Nirvana/Aneurysm.mid", "./dataset/clean_midi/Nirvana/Been a Son.mid", "./dataset/clean_midi/Nirvana/Dive.mid", "./dataset/clean_midi/Nirvana/Downer.1.mid", "./dataset/clean_midi/Nirvana/Drain You.mid", "./dataset/clean_midi/Nirvana/Dumb.2.mid", "./dataset/clean_midi/Nirvana/Frances Farmer Will Have Her Revenge on Seattle.1.mid", "./dataset/clean_midi/Nirvana/Hairspray Queen.1.mid", "./dataset/clean_midi/Nirvana/Heart-Shaped Box.mid", "./dataset/clean_midi/Nirvana/Lounge Act.mid", "./dataset/clean_midi/Nirvana/Love Buzz.mid", "./dataset/clean_midi/Nirvana/Mexican Seafood.mid", "./dataset/clean_midi/Nirvana/Milk It.1.mid", "./dataset/clean_midi/Nirvana/Mr. Moustache.mid", "./dataset/clean_midi/Nirvana/Negative Creep.mid", "./dataset/clean_midi/Nirvana/Pennyroyal Tea.2.mid", "./dataset/clean_midi/Nirvana/Polly.1.mid", "./dataset/clean_midi/Nirvana/Radio Friendly Unit Shifter.1.mid", "./dataset/clean_midi/Nirvana/Scentless Apprentice.mid", "./dataset/clean_midi/Nirvana/Sliver.2.mid", "./dataset/clean_midi/Nirvana/Smells Like Teen Spirit.8.mid", "./dataset/clean_midi/Nirvana/Son of a Gun.2.mid", "./dataset/clean_midi/Nirvana/Swap Meet.mid", "./dataset/clean_midi/Nirvana/Territorial Pissings.mid", "./dataset/clean_midi/Nirvana/tourette's.1.mid", "./dataset/clean_midi/Nirvana/Turnaround.mid", "./dataset/clean_midi/Nomadi/Il vento del nord.mid", "./dataset/clean_midi/Nomadi/Io vagabondo.2.mid", "./dataset/clean_midi/Nomadi/Io vagabondo.4.mid", "./dataset/clean_midi/Parker Charlie/Donna Lee.mid", "./dataset/clean_midi/Pet Shop Boys/Always on My Mind In My House.3.mid", "./dataset/clean_midi/Peter Gabriel/Solsbury Hill.1.mid", "./dataset/clean_midi/Peter Gabriel/Solsbury Hill.mid", "./dataset/clean_midi/Peter, Paul & Mary/Puff.1.mid", "./dataset/clean_midi/Phish/Taste.mid", "./dataset/clean_midi/Pink Floyd/Echoes.1.mid", "./dataset/clean_midi/Pink Floyd/Echoes.mid", "./dataset/clean_midi/Pink Floyd/Hey You.mid", "./dataset/clean_midi/Pink Floyd/High Hopes.2.mid", "./dataset/clean_midi/Pink Floyd/Nobody Home.mid", "./dataset/clean_midi/Pink Floyd/The Trial.1.mid", "./dataset/clean_midi/Pink Floyd/Wish You Were Here.mid", "./dataset/clean_midi/Pooh/Il cielo e blu sopra le nuvole.mid", "./dataset/clean_midi/Prince/Let's Go Crazy.3.mid", "./dataset/clean_midi/R.E.M./Nightswimming.mid", "./dataset/clean_midi/Radiohead/Airbag.mid", "./dataset/clean_midi/Radiohead/Climbing Up the Walls.mid", "./dataset/clean_midi/Radiohead/Karma Police.mid", "./dataset/clean_midi/Radiohead/Subterranean Homesick Alien.mid", "./dataset/clean_midi/Rammstein/Stripped.mid", "./dataset/clean_midi/Rascel/Arrividerci Roma.mid", "./dataset/clean_midi/Rednex/The Ultimate Rednex Mega Mix, Part 3: Cotton Eye Joe.mid", "./dataset/clean_midi/Rene Carol/Rote Rosen rote Lippen roter Wein.mid", "./dataset/clean_midi/Rene Froger/Thunder in My Heart.mid", "./dataset/clean_midi/Right Said Fred/Don't Talk Just Kiss.mid", "./dataset/clean_midi/Rob Zombie/Demonoid Phenomenon.mid", "./dataset/clean_midi/Robert Palmer/I'll Be Your Baby Tonight.1.mid", "./dataset/clean_midi/Ross/When You Tell Me That You Love Me.mid", "./dataset/clean_midi/Rossi Vasco/Vivere.mid", "./dataset/clean_midi/Roxette/It Must Have Been Love (live studio).3.mid", "./dataset/clean_midi/Roxette/The Look.mid", "./dataset/clean_midi/Roy Orbison/Oh Pretty Woman.2.mid", "./dataset/clean_midi/Rush/Broon's Bane.1.mid", "./dataset/clean_midi/Rush/Closer to the Heart.3.mid", "./dataset/clean_midi/Rush/Leave That Thing Alone.mid", "./dataset/clean_midi/Ryan Paris/La Dolce Vita.1.mid", "./dataset/clean_midi/Sam Cooke/Twisting the Night Away.mid", "./dataset/clean_midi/Sarah McLachlan/Adia.1.mid", "./dataset/clean_midi/Scott McKenzie/Forrest Gump: San Francisco (Be Sure to Wear Some Flowers in Your Hair).1.mid", "./dataset/clean_midi/Shakatak/Night Birds.1.mid", "./dataset/clean_midi/Shirley Bassey/Big Spender.mid", "./dataset/clean_midi/Simon & Garfunkel/The Sound of Silence.2.mid", "./dataset/clean_midi/Sinatra/Where or When.mid", "./dataset/clean_midi/Stan Kenton/Here's That Rainy Day.mid", "./dataset/clean_midi/Steppenwolf/Born To Be Wild.3.mid", "./dataset/clean_midi/Stevens Shakin /You Drive Me Crazy.1.mid", "./dataset/clean_midi/Sting/Fields of Gold.1.mid", "./dataset/clean_midi/Sting/Fields of Gold.7.mid", "./dataset/clean_midi/Styx/Come Sail Away.4.mid", "./dataset/clean_midi/Supertramp/Bloody Well Right.mid", "./dataset/clean_midi/Supertramp/The Logical Song.7.mid", "./dataset/clean_midi/Survivor/Is This Love.mid", "./dataset/clean_midi/The Alan Parsons Project/Sirius Eye in the Sky.mid", "./dataset/clean_midi/The Beach Boys/Good Vibrations.6.mid", "./dataset/clean_midi/The Beach Boys/I Get Around.2.mid", "./dataset/clean_midi/The Beatles/All You Need Is Love.3.mid", "./dataset/clean_midi/The Beatles/Every Little Thing.mid", "./dataset/clean_midi/The Beatles/Fixing a Hole.mid", "./dataset/clean_midi/The Beatles/Fool on the Hill.2.mid", "./dataset/clean_midi/The Beatles/Fool on the Hill.mid", "./dataset/clean_midi/The Beatles/Get Back.3.mid", "./dataset/clean_midi/The Beatles/Let It Be.5.mid", "./dataset/clean_midi/The Beatles/Octopus's Garden.mid", "./dataset/clean_midi/The Beatles/Paperback Writer.4.mid", "./dataset/clean_midi/The Beatles/Ticket to Ride.6.mid", "./dataset/clean_midi/The Boomtown Rats/I Don't Like Monday's.1.mid", "./dataset/clean_midi/The Cranberries/Animal Instinct.mid", "./dataset/clean_midi/The Cranberries/Promises.1.mid", "./dataset/clean_midi/The Doors/Riders on the Storm.1.mid", "./dataset/clean_midi/The Doors/The Crystal Ship.3.mid", "./dataset/clean_midi/The Four Seasons/Rag Doll.mid", "./dataset/clean_midi/The Four Seasons/Walk Like A Man.mid", "./dataset/clean_midi/The KLF/Justified and Ancient.1.mid", "./dataset/clean_midi/The Knack/My Sharona.1.mid", "./dataset/clean_midi/The Offspring/Gone Away.1.mid", "./dataset/clean_midi/The Offspring/No Hero.mid", "./dataset/clean_midi/The Outhere Brothers/Don't Stop (Wiggle Wiggle).1.mid", "./dataset/clean_midi/The Platters/Only You.5.mid", "./dataset/clean_midi/The Police/Every Little Thing She Does Is Magic.4.mid", "./dataset/clean_midi/The Police/So Lonely.1.mid", "./dataset/clean_midi/The Prodigy/Climbatize.mid", "./dataset/clean_midi/The Prodigy/Wind It Up.mid", "./dataset/clean_midi/The Rolling Stones/The Last Time.mid", "./dataset/clean_midi/The Smashing Pumpkins/Cherub Rock.mid", "./dataset/clean_midi/The Stylistics/I'm Stone in Love With You.mid", "./dataset/clean_midi/Theodorakis Mikis/Zorba's Dance.1.mid", "./dataset/clean_midi/Third Eye Blind/Semi-Charmed Life.1.mid", "./dataset/clean_midi/Third Eye Blind/Semi-Charmed Life.mid", "./dataset/clean_midi/Tom Jones/Delilah.5.mid", "./dataset/clean_midi/Tool/Flood.1.mid", "./dataset/clean_midi/Tool/Flood.mid", "./dataset/clean_midi/Toto/I Won't Hold You Back.mid", "./dataset/clean_midi/TURNER TINA/Notbush City Limits.1.mid", "./dataset/clean_midi/U2/Walk On.mid", "./dataset/clean_midi/U2/Where the Streets Have No Name.7.mid", "./dataset/clean_midi/UB40/Homely Girl.mid", "./dataset/clean_midi/Us3/Cantaloop.1.mid", "./dataset/clean_midi/Van Halen/Jump.5.mid", "./dataset/clean_midi/Van Halen/Me Wise Magic.mid", "./dataset/clean_midi/Van Halen/Right Now.1.mid", "./dataset/clean_midi/Vangelis/Chariots of Fire.5.mid", "./dataset/clean_midi/Whigfield/Saturday Night.1.mid", "./dataset/clean_midi/Whitney Houston/I'm Every Woman.1.mid", "./dataset/clean_midi/Whitney Houston/Saving All My Love For You.2.mid", "./dataset/clean_midi/Wonder Stevie/Happy Birthday.mid", "./dataset/clean_midi/Wonder Stevie/Sir Duke.1.mid", "./dataset/clean_midi/Yanni/Aria.1.mid", "./dataset/clean_midi/Yanni/Marching Season.1.mid", "./dataset/clean_midi/Yanni/Secret Vows.1.mid", "./dataset/clean_midi/Yanni/Swept Away.mid", "./dataset/clean_midi/Yanni/The Rain Must Fall.1.mid", "./dataset/clean_midi/Yazz & plastic population/The Only Way Is Up.1.mid", "./dataset/clean_midi/Robert Palmer/Mercy Mercy Me I Want You.mid", ] """######################################################################### listFile: return the list of midi files in the directory and subdirectories of path. input: path - the root path. output: Dir - the list of midi files. #########################################################################""" def listFile(path): Dir = [] for dirName, subdirList, fileList in os.walk(path): for name in fileList: midiPath = os.path.join(dirName, name) # Check whether the midi file is corrupted. if midiPath in FAILLIST: continue # Check whether the path is a midi file. if midiPath[-4:] == '.mid': Dir.append(midiPath) return Dir """######################################################################### readMIDI: read the midi file and transfer it into piano-rolls. input: path - the path of the midi file. output: midi - the binary numpy array represents the piano-rolls (for the convenience of my research, I remove the information of the instruments). #########################################################################""" def readMIDI(path): midi = pretty_midi.PrettyMIDI(path).get_piano_roll(fs=4).T midi = midi > 0 return np.asarray(midi, 'float32') """######################################################################### fetchData: return the data-set or download and preprocess the raw data. input: None. output: Dataset - the preprocessed dataset. #########################################################################""" def fetchData(): Dataset = None times = 1 while 1: if os.path.exists(Lakh_HDF5): # load the .hdf5 dataset print("\x1b[1;34m----->> LOAD THE DATASET <<-----\x1b[0m") Dataset = h5py.File(Lakh_HDF5, 'r') break elif os.path.exists(Lakh_RAW): # pre-process the raw data and save as .hdf5 print("Step \x1b[1;34m%d\x1b[0m: unzip the raw data." % times) times +=1 # unzip the files. tar = tarfile.open(Lakh_RAW, "r:gz") tar.extractall(path='./dataset/') tar.close() Dir = listFile('./dataset/') print("Step \x1b[1;34m%d\x1b[0m: preprocess the raw data." % times) # read the raw data and save into hdf5. with h5py.File(Lakh_HDF5, 'w') as Dataset: Dataset.create_dataset('train', (1, 240, 128), maxshape=(None, 240, 128), chunks=True) Dataset.create_dataset('valid', (1, 240, 128), maxshape=(None, 240, 128), chunks=True) Dataset.create_dataset('test', (1, 240, 128), maxshape=(None, 240, 128), chunks=True) trainEND = 0 validEND = 0 testEND = 0 L = len(Dir) idx = 1 for midiPath in Dir: print("\x1b[1;35m%d/%d\x1b[0m: \x1b[1;34m%s\x1b[0m" % (idx, L, midiPath)) idx += 1 midi = readMIDI(midiPath) start = 0 while start + 240 <= midi.shape[0]: rand = np.random.uniform(0, 1.0001) if rand < TRAIN_RATIO: # save to train. Dataset['train'].resize((trainEND+1, 240, 128)) Dataset['train'][trainEND:trainEND+1, :, :] = np.reshape(midi[start:start+240, :], [1, 240, 128]) trainEND += 1 pass elif rand < Valid_RATIO: # save to valid. Dataset['valid'].resize((validEND + 1, 240, 128)) Dataset['valid'][validEND:validEND + 1, :, :] = np.reshape(midi[start:start + 240, :], [1, 240, 128]) validEND += 1 pass else: # save to test. Dataset['test'].resize((testEND + 1, 240, 128)) Dataset['test'][testEND:testEND + 1, :, :] = np.reshape(midi[start:start + 240, :], [1, 240, 128]) testEND += 1 # start += 240 print("\x1b[1;35mFinish fetching: train(%d)/valid(%d)/test(%d)\x1b[0m" % (trainEND, validEND, testEND)) else: # download the raw data. print("Step \x1b[1;34m%d\x1b[0m: download the raw data." % times) times += 1 # make a new folder to save the data. if not os.path.exists('./dataset'): os.makedirs('./dataset') urllib.request.urlretrieve(Lakh_URL, './dataset/clean_midi.tar.gz') if times > 3: raise(ValueError("The data fetching process is out of control!!")) if Dataset is None: raise(ValueError("The dataset is not loaded properly!!")) return Dataset """######################################################################### MAIN UNITEST FUNCTION. #########################################################################""" if __name__ == '__main__': fetchData()
55.102975
121
0.621968
3,264
24,080
4.492341
0.290135
0.17493
0.309896
0.364114
0.469276
0.270886
0.117711
0.062948
0.04126
0.033827
0
0.017506
0.20054
24,080
437
122
55.102975
0.744169
0.035922
0
0.024064
0
0.018717
0.717554
0.404023
0
0
0
0
0
1
0.008021
false
0.005348
0.016043
0
0.032086
0.016043
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
529b687550fa45b28e927f9a08de162d355481c8
2,145
py
Python
crypto4/src/writeup.py
userElaina/SpiritCTF-2021
c5e4ae05362da1f25718d2f80050709f3f99ef17
[ "MIT" ]
null
null
null
crypto4/src/writeup.py
userElaina/SpiritCTF-2021
c5e4ae05362da1f25718d2f80050709f3f99ef17
[ "MIT" ]
null
null
null
crypto4/src/writeup.py
userElaina/SpiritCTF-2021
c5e4ae05362da1f25718d2f80050709f3f99ef17
[ "MIT" ]
null
null
null
from Crypto.Util.number import * e = 65537 c = 19657997127631606992942743387466090025613514791616866084619244565511331009275751254657004083860029307546049827487765683663007284692019872203735287593122174936493232019489974221332262737999303566187062567433594292926330155392795066336870733605934927807359566256054189345062233835679528626798157928425828913137331951198719316416337309777578845263294669984583576274303216220859482713774864583614917007781814657899775029044851924030654910840067642005487252714413547605877161160456630898332376320424332786968077020196308300294854881998189470699894487747955189058432956954125029668448288816890383181280121947964328537054905 n = 20977857259009004424711131349341774112383791753795564430794793616464790390479989769557484064055028084153074073152844991128994649214378790382888555710128487632422934088201295584315660055983665794049139528596544323246746693540444532771859534416570253694662972101395455019811313081343328811776796351463256029940868268780147123230260406380136450556095080195573424801908898298656358177409932489697783110883421284539480928702439642749141044793226447844989989813154920100723960368631770397312345771959014325729955406518847921104706343418694270642936691229028340563922479884289319724075445589127885672037370161179619282365751 leak = 247984630138590638130309955318709885382617832928937021682534035647989431101989319702401538270789502279304198935102384995830111070375467400686480691876229511399973217455375533561841107036788669337640920604617120126204984977196400110445775877022130974599585271966668653175997228526126571745501085259171928699547 def orx(a,b,m): res=0 ind=0 p=0 q=0 while a: for i in range(m): _p=p+i*(m**ind) _q=q+(a%m-i)%m*(m**ind) if (_p*_q)%(m**(ind+1))==b%(m**(ind+1)): p=_p q=_q break else: assert False a//=m ind+=1 return p,q p,q=orx(leak,n,7) phi=(p-1)*(q-1) d=inverse(e,phi) m=pow(c,d,n) print(long_to_bytes(m).decode('utf8')) # Spirit{eff7b389-48f8-4edd-b6ec-64ba15badc97}
65
622
0.839161
104
2,145
17.230769
0.5
0.011161
0.008371
0
0
0
0
0
0
0
0
0.824515
0.110023
2,145
32
623
67.03125
0.114196
0.020513
0
0
0
0
0.001935
0
0
1
0
0
0.035714
1
0.035714
false
0
0.035714
0
0.107143
0.035714
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
4
52b094fe1dbcf4982d4d66018671d8a808f65d74
209
py
Python
backend/workers/config.py
jmeisele/celery-farm
7e2638a24717b37f1886a816c45b9d88f89cc726
[ "MIT" ]
3
2022-01-07T20:30:31.000Z
2022-02-09T19:11:27.000Z
backend/workers/config.py
jmeisele/celery-farm
7e2638a24717b37f1886a816c45b9d88f89cc726
[ "MIT" ]
null
null
null
backend/workers/config.py
jmeisele/celery-farm
7e2638a24717b37f1886a816c45b9d88f89cc726
[ "MIT" ]
null
null
null
import os from pydantic import BaseSettings class CelerySettings(BaseSettings): BROKER_URL: str = os.environ["BROKER_URL"] BACKEND_URL: str = os.environ["BACKEND_URL"] settings = CelerySettings()
17.416667
48
0.751196
25
209
6.12
0.52
0.117647
0.104575
0.196078
0
0
0
0
0
0
0
0
0.15311
209
11
49
19
0.864407
0
0
0
0
0
0.100478
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.833333
0
1
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
52b7dcd282174c108cb928f3b8a580076244216f
89
py
Python
train.py
piotlinski/ssd
169e14fb949f476626617364e5a540249addf75a
[ "MIT" ]
null
null
null
train.py
piotlinski/ssd
169e14fb949f476626617364e5a540249addf75a
[ "MIT" ]
null
null
null
train.py
piotlinski/ssd
169e14fb949f476626617364e5a540249addf75a
[ "MIT" ]
null
null
null
"""Train SSD model.""" from pytorch_ssd import cli if __name__ == "__main__": cli()
14.833333
27
0.651685
12
89
4.083333
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.191011
89
5
28
17.8
0.680556
0.179775
0
0
0
0
0.119403
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
52bcc4883fc5399a610d9708b5bb7b058bceb2cb
102
py
Python
run.py
Dev-Tayal/Searchterm-Network-Map
488ec12b041cf2c8c25f6189494b685c2051c4ea
[ "MIT" ]
3
2021-05-16T17:44:22.000Z
2021-06-26T16:03:03.000Z
run.py
ritikverma2000/Searchterm-Network-Map
f81aebe8da56b389dd772a62d30f280240a16e44
[ "MIT" ]
1
2021-05-18T07:37:03.000Z
2021-05-18T07:37:03.000Z
run.py
ritikverma2000/Searchterm-Network-Map
f81aebe8da56b389dd772a62d30f280240a16e44
[ "MIT" ]
2
2021-05-18T07:26:06.000Z
2021-06-26T14:39:20.000Z
#!/usr/bin/env python3 if __name__ == "__main__": import networkmap networkmap.render_page()
17
28
0.696078
12
102
5.166667
0.916667
0
0
0
0
0
0
0
0
0
0
0.011905
0.176471
102
5
29
20.4
0.72619
0.205882
0
0
0
0
0.1
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
52c795365e502b789ebfce779e1192e3bc83b60f
3,000
py
Python
tensordata/utils/compress/_files_compress.py
Hourout/tensordata
cbef6742ee0d3bfc4b886358fc01618bb5b63603
[ "Apache-2.0" ]
13
2019-01-08T10:22:39.000Z
2020-06-17T10:02:47.000Z
tensordata/utils/compress/_files_compress.py
Hourout/tensordata
cbef6742ee0d3bfc4b886358fc01618bb5b63603
[ "Apache-2.0" ]
null
null
null
tensordata/utils/compress/_files_compress.py
Hourout/tensordata
cbef6742ee0d3bfc4b886358fc01618bb5b63603
[ "Apache-2.0" ]
1
2020-06-17T10:02:49.000Z
2020-06-17T10:02:49.000Z
import bz2 import gzip import zipfile import tarfile import rarfile import tensordata.gfile as gfile __all__ = ['files_zip', 'files_tar', 'files_bz2'] def files_zip(files, zip_name): """Compression files to .zip. Args: files: str or list if str, files should be file path; if list, files should be file path list. zip_name: str, compression files name. Return: zip_name: str, compression files name. """ assert isinstance(zip_name, str), '`zip_name` should be str.' if isinstance(files, str): assert not gfile.isdir(files), '`files` should be file path.' with zipfile.ZipFile(zip_name, 'w', zipfile.ZIP_DEFLATED) as z: z.write(files) elif isinstance(files, list): with zipfile.ZipFile(zip_name, 'w', zipfile.ZIP_DEFLATED) as z: for file in files: assert not gfile.isdir(file), 'Elements in the list should be file path.' z.write(file) else: raise ValueError('`files` should be type of str or list.') return zip_name def files_tar(files, tar_name): """Compression files to .tar. Args: files: str or list if str, files should be file path; if list, files should be file path list. tar_name: str, compression files name. Return: tar_name: str, compression files name. """ assert isinstance(tar_name, str), '`tar_name` should be str.' if isinstance(files, str): assert not gfile.isdir(files), '`files` should be file path.' with tarfile.TarFile(tar_name, 'w') as t: t.add(files) elif isinstance(files, list): with tarfile.TarFile(tar_name, 'w') as t: for file in files: assert not gfile.isdir(file), 'Elements in the list should be file path.' t.add(file) else: raise ValueError('`files` should be type of str or list.') return tar_name def files_bz2(files, bz2_name): """Compression files to .bz2. Args: files: str or list if str, files should be file path; if list, files should be file path list. zip_name: str, compression files name. Return: zip_name: str, compression files name. """ assert isinstance(bz2_name, str), '`bz2_name` should be str.' if isinstance(files, str): assert not gfile.isdir(files), '`files` should be file path.' with bz2.BZ2File(bz2_name, 'w') as b: with open(files, 'rb') as f: b.write(f.read()) elif isinstance(files, list): with bz2.BZ2File(bz2_name, 'w') as b: for file in files: assert not gfile.isdir(file), 'Elements in the list should be file path.' with open(file, 'rb') as f: b.write(f.read()) else: raise ValueError('`files` should be type of str or list.') return bz2_name
34.482759
89
0.595667
414
3,000
4.229469
0.130435
0.082239
0.089092
0.109652
0.802399
0.786979
0.732724
0.689891
0.632781
0.632781
0
0.006724
0.306
3,000
86
90
34.883721
0.834294
0.248667
0
0.568627
0
0
0.203573
0
0
0
0
0
0.176471
1
0.058824
false
0
0.117647
0
0.235294
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
5e2dc3875c06933360096d6719d92ba2b0ed512b
198
py
Python
pyqwikswitch/__init__.py
kellerza/pyqwikswitch
9d4f080048221eaee93e3eefcf641919ff1af586
[ "MIT" ]
null
null
null
pyqwikswitch/__init__.py
kellerza/pyqwikswitch
9d4f080048221eaee93e3eefcf641919ff1af586
[ "MIT" ]
2
2018-04-14T11:29:13.000Z
2018-05-18T22:11:47.000Z
pyqwikswitch/__init__.py
kellerza/pyqwikswitch
9d4f080048221eaee93e3eefcf641919ff1af586
[ "MIT" ]
4
2018-04-14T12:30:54.000Z
2018-10-07T17:08:34.000Z
"""QwikSwitch USB Modem library for Python. See: http://www.qwikswitch.co.za/qs-usb.php Currently supports relays, buttons and LED dimmers Source: http://www.github.com/kellerza/pyqwikswitch """
22
51
0.762626
29
198
5.206897
0.862069
0.092715
0
0
0
0
0
0
0
0
0
0
0.106061
198
8
52
24.75
0.853107
0.959596
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
eab54bf827306a4dd2b3681a498da34ceff0514e
71
py
Python
co2mini_server.py
cointoss1973/co2mini-monitor
4779a33b32434044074750ae92255028e0f74ec2
[ "MIT" ]
null
null
null
co2mini_server.py
cointoss1973/co2mini-monitor
4779a33b32434044074750ae92255028e0f74ec2
[ "MIT" ]
null
null
null
co2mini_server.py
cointoss1973/co2mini-monitor
4779a33b32434044074750ae92255028e0f74ec2
[ "MIT" ]
null
null
null
#!/usr/bin/env python import co2meter as co2 co2.start_server()
11.833333
23
0.690141
11
71
4.363636
0.909091
0
0
0
0
0
0
0
0
0
0
0.052632
0.197183
71
5
24
14.2
0.789474
0.28169
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
eabd14be65b0ee635e18860280a78c2f20ef704f
157
py
Python
.virtualenvs/django19/bin/django-admin.py
zrhans/pythonanywhere
1e9dc9a8ce59872f89e5c61a34b8b858a3cad0ee
[ "Apache-2.0" ]
null
null
null
.virtualenvs/django19/bin/django-admin.py
zrhans/pythonanywhere
1e9dc9a8ce59872f89e5c61a34b8b858a3cad0ee
[ "Apache-2.0" ]
null
null
null
.virtualenvs/django19/bin/django-admin.py
zrhans/pythonanywhere
1e9dc9a8ce59872f89e5c61a34b8b858a3cad0ee
[ "Apache-2.0" ]
null
null
null
#!/home/zrhans/.virtualenvs/django19/bin/python3.4 from django.core import management if __name__ == "__main__": management.execute_from_command_line()
26.166667
50
0.783439
20
157
5.6
0.9
0
0
0
0
0
0
0
0
0
0
0.028169
0.095541
157
5
51
31.4
0.760563
0.312102
0
0
0
0
0.074766
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
eabe5cbe8e33bc013acea394ed56242fc11c9193
115
py
Python
url.py
Smyja/innovation-sandbox-python
3d44792e5f9dc1858a0b44704164ecd16f5e04d4
[ "MIT" ]
1
2020-03-09T23:15:27.000Z
2020-03-09T23:15:27.000Z
url.py
Devcyclops/innovation-sandbox-python
cdf3d3782be9de643c7191e73d92294de3a20373
[ "MIT" ]
2
2020-03-09T12:18:45.000Z
2021-06-02T00:59:26.000Z
url.py
Devcyclops/innovation-sandbox-python
cdf3d3782be9de643c7191e73d92294de3a20373
[ "MIT" ]
3
2020-03-07T09:42:59.000Z
2020-03-26T21:58:46.000Z
def url(your_url): root = 'https://sandboxapi.fsi.ng' if your_url: root = your_url return root
19.166667
38
0.608696
17
115
3.941176
0.588235
0.313433
0.328358
0
0
0
0
0
0
0
0
0
0.278261
115
5
39
23
0.807229
0
0
0
0
0
0.217391
0
0
0
0
0
0
1
0.2
false
0
0
0
0.4
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
ead4cd793c1b64d224785d1cc3fc1e0ddd6b9040
526
py
Python
container_service_extension/rde/models/abstractNativeEntity.py
arunmk/container-service-extension
5e67df64fd5ed7fbb664d449356cb983cecbca12
[ "BSD-3-Clause" ]
81
2017-07-05T19:42:41.000Z
2022-03-09T22:04:05.000Z
container_service_extension/rde/models/abstractNativeEntity.py
arunmk/container-service-extension
5e67df64fd5ed7fbb664d449356cb983cecbca12
[ "BSD-3-Clause" ]
670
2017-07-05T16:48:02.000Z
2022-03-31T13:40:53.000Z
container_service_extension/rde/models/abstractNativeEntity.py
arunmk/container-service-extension
5e67df64fd5ed7fbb664d449356cb983cecbca12
[ "BSD-3-Clause" ]
64
2017-07-05T16:32:55.000Z
2022-03-23T09:36:03.000Z
# container-service-extension # Copyright (c) 2021 VMware, Inc. All Rights Reserved. # SPDX-License-Identifier: BSD-2-Clause import abc class AbstractNativeEntity(abc.ABC): @classmethod @abc.abstractmethod def from_native_entity(cls, native_entity): pass @classmethod @abc.abstractmethod def from_cluster_data(cls, cluster: dict, kind: str, **kwargs): pass @classmethod @abc.abstractmethod def get_sample_native_cluster_specification(cls, k8_runtime=''): pass
21.916667
68
0.705323
61
526
5.918033
0.655738
0.116343
0.232687
0.257618
0.301939
0
0
0
0
0
0
0.014286
0.201521
526
23
69
22.869565
0.845238
0.224335
0
0.642857
0
0
0
0
0
0
0
0
0
1
0.214286
false
0.214286
0.071429
0
0.357143
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
4
eaea092eafb5addf68c151dc21586c60b5c8f8c2
1,641
py
Python
test/likelihoods/test_multitask_gaussian_likelihood.py
techshot25/gpytorch
b4aee6f81a3428172d4914e7e0fef0e71cd1f519
[ "MIT" ]
1
2019-11-08T11:25:56.000Z
2019-11-08T11:25:56.000Z
test/likelihoods/test_multitask_gaussian_likelihood.py
VonRosenchild/gpytorch
092d523027a844939ba85d7ea8c8c7b7511843d5
[ "MIT" ]
null
null
null
test/likelihoods/test_multitask_gaussian_likelihood.py
VonRosenchild/gpytorch
092d523027a844939ba85d7ea8c8c7b7511843d5
[ "MIT" ]
1
2021-07-02T19:40:07.000Z
2021-07-02T19:40:07.000Z
#!/usr/bin/env python3 import unittest import torch from gpytorch.lazy import KroneckerProductLazyTensor, RootLazyTensor from gpytorch.likelihoods import MultitaskGaussianLikelihood from gpytorch.distributions import MultitaskMultivariateNormal from gpytorch.test.base_likelihood_test_case import BaseLikelihoodTestCase class TestMultitaskGaussianLikelihood(BaseLikelihoodTestCase, unittest.TestCase): seed = 0 def _create_conditional_input(self, batch_shape=torch.Size([])): return torch.randn(*batch_shape, 5, 4) def _create_marginal_input(self, batch_shape=torch.Size([])): mat = torch.randn(*batch_shape, 5, 5) mat2 = torch.randn(*batch_shape, 4, 4) covar = KroneckerProductLazyTensor(RootLazyTensor(mat), RootLazyTensor(mat2)) return MultitaskMultivariateNormal(torch.randn(*batch_shape, 5, 4), covar) def _create_targets(self, batch_shape=torch.Size([])): return torch.randn(*batch_shape, 5, 4) def create_likelihood(self): return MultitaskGaussianLikelihood(num_tasks=4, rank=2) class TestMultitaskGaussianLikelihoodBatch(TestMultitaskGaussianLikelihood): seed = 0 def create_likelihood(self): return MultitaskGaussianLikelihood(num_tasks=4, rank=2, batch_shape=torch.Size([3])) def test_nonbatch(self): pass class TestMultitaskGaussianLikelihoodMultiBatch(TestMultitaskGaussianLikelihood): seed = 0 def create_likelihood(self): return MultitaskGaussianLikelihood(num_tasks=4, rank=2, batch_shape=torch.Size([2, 3])) def test_nonbatch(self): pass def test_batch(self): pass
32.176471
95
0.751371
178
1,641
6.758427
0.292135
0.083126
0.062344
0.078969
0.461347
0.44389
0.358271
0.358271
0.358271
0.358271
0
0.018129
0.159659
1,641
50
96
32.82
0.854242
0.012797
0
0.393939
0
0
0
0
0
0
0
0
0
1
0.272727
false
0.090909
0.181818
0.151515
0.818182
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
4
d810fae987525e13632028a32a1cb71c51a8037a
253
py
Python
string_func/example1.py
iansim/pythondemo
d69be8b1c7b6038e15f3ba073a35a1b4790ebc58
[ "MIT" ]
null
null
null
string_func/example1.py
iansim/pythondemo
d69be8b1c7b6038e15f3ba073a35a1b4790ebc58
[ "MIT" ]
null
null
null
string_func/example1.py
iansim/pythondemo
d69be8b1c7b6038e15f3ba073a35a1b4790ebc58
[ "MIT" ]
null
null
null
# example1.py import stringLength import stringToLower import stringToUpper some_string = "Hello, Universe!" print(stringLength.stringLength(some_string)) print(stringToLower.stringToLower(some_string)) print(stringToUpper.stringToUpper(some_string))
23
47
0.84585
27
253
7.777778
0.407407
0.190476
0.219048
0
0
0
0
0
0
0
0
0.004237
0.067194
253
11
48
23
0.885593
0.043478
0
0
0
0
0.06639
0
0
0
0
0
0
1
0
false
0
0.428571
0
0.428571
0.428571
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
1
0
4
d82205606f03cedf71bd47591305462a9b1f29dc
186
py
Python
src/pipertask/package/pipertask/task.py
piper-tools/pipertask
7c1789fee0a0a88f8a57fed698082d061e44dfaf
[ "Apache-2.0" ]
null
null
null
src/pipertask/package/pipertask/task.py
piper-tools/pipertask
7c1789fee0a0a88f8a57fed698082d061e44dfaf
[ "Apache-2.0" ]
null
null
null
src/pipertask/package/pipertask/task.py
piper-tools/pipertask
7c1789fee0a0a88f8a57fed698082d061e44dfaf
[ "Apache-2.0" ]
null
null
null
import abc from typing import Dict, Any class PiperTask(abc.ABC): @abc.abstractmethod def perform(self, context: str, config: Dict[str, Any]) -> None: pass
16.909091
69
0.629032
24
186
4.875
0.708333
0.102564
0
0
0
0
0
0
0
0
0
0
0.268817
186
10
70
18.6
0.860294
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0.166667
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
4
d832718744c0306712f8088ee1ff74059702b806
90
py
Python
my_django_project/my_books/apps.py
phvv-me/python-django-deploy-sync-async
42153cc3e0094af9efc576753d20870424b25537
[ "MIT" ]
null
null
null
my_django_project/my_books/apps.py
phvv-me/python-django-deploy-sync-async
42153cc3e0094af9efc576753d20870424b25537
[ "MIT" ]
null
null
null
my_django_project/my_books/apps.py
phvv-me/python-django-deploy-sync-async
42153cc3e0094af9efc576753d20870424b25537
[ "MIT" ]
null
null
null
from django.apps import AppConfig class MyBooksConfig(AppConfig): name = 'my_books'
15
33
0.755556
11
90
6.090909
0.909091
0
0
0
0
0
0
0
0
0
0
0
0.166667
90
5
34
18
0.893333
0
0
0
0
0
0.088889
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
dc7485447605bd6b6aad607e3152d33ed5e925ba
32,842
py
Python
tensorflow_checkpoint_reader/pb/tensorflow/core/protobuf/struct_pb2.py
shawwn/tensorflow-checkpoint-reader
f0e65548411e3bd66a07e36bb1850907a05952d0
[ "MIT" ]
1
2021-12-02T15:06:09.000Z
2021-12-02T15:06:09.000Z
tensorflow_checkpoint_reader/pb/tensorflow/core/protobuf/struct_pb2.py
shawwn/tensorflow-checkpoint-reader
f0e65548411e3bd66a07e36bb1850907a05952d0
[ "MIT" ]
null
null
null
tensorflow_checkpoint_reader/pb/tensorflow/core/protobuf/struct_pb2.py
shawwn/tensorflow-checkpoint-reader
f0e65548411e3bd66a07e36bb1850907a05952d0
[ "MIT" ]
null
null
null
'Generated protocol buffer code.' from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database _sym_db = _symbol_database.Default() from ....tensorflow.core.framework import tensor_pb2 as tensorflow_dot_core_dot_framework_dot_tensor__pb2 from ....tensorflow.core.framework import tensor_shape_pb2 as tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2 from ....tensorflow.core.framework import types_pb2 as tensorflow_dot_core_dot_framework_dot_types__pb2 DESCRIPTOR = _descriptor.FileDescriptor(name='tensorflow/core/protobuf/struct.proto', package='tensorflow', syntax='proto3', serialized_options=b'ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto', create_key=_descriptor._internal_create_key, serialized_pb=b'\n%tensorflow/core/protobuf/struct.proto\x12\ntensorflow\x1a&tensorflow/core/framework/tensor.proto\x1a,tensorflow/core/framework/tensor_shape.proto\x1a%tensorflow/core/framework/types.proto"\x90\x05\n\x0fStructuredValue\x12+\n\nnone_value\x18\x01 \x01(\x0b2\x15.tensorflow.NoneValueH\x00\x12\x17\n\rfloat64_value\x18\x0b \x01(\x01H\x00\x12\x15\n\x0bint64_value\x18\x0c \x01(\x12H\x00\x12\x16\n\x0cstring_value\x18\r \x01(\tH\x00\x12\x14\n\nbool_value\x18\x0e \x01(\x08H\x00\x12:\n\x12tensor_shape_value\x18\x1f \x01(\x0b2\x1c.tensorflow.TensorShapeProtoH\x00\x122\n\x12tensor_dtype_value\x18 \x01(\x0e2\x14.tensorflow.DataTypeH\x00\x128\n\x11tensor_spec_value\x18! \x01(\x0b2\x1b.tensorflow.TensorSpecProtoH\x00\x124\n\x0ftype_spec_value\x18" \x01(\x0b2\x19.tensorflow.TypeSpecProtoH\x00\x12G\n\x19bounded_tensor_spec_value\x18# \x01(\x0b2".tensorflow.BoundedTensorSpecProtoH\x00\x12+\n\nlist_value\x183 \x01(\x0b2\x15.tensorflow.ListValueH\x00\x12-\n\x0btuple_value\x184 \x01(\x0b2\x16.tensorflow.TupleValueH\x00\x12+\n\ndict_value\x185 \x01(\x0b2\x15.tensorflow.DictValueH\x00\x128\n\x11named_tuple_value\x186 \x01(\x0b2\x1b.tensorflow.NamedTupleValueH\x00B\x06\n\x04kind"\x0b\n\tNoneValue"8\n\tListValue\x12+\n\x06values\x18\x01 \x03(\x0b2\x1b.tensorflow.StructuredValue"9\n\nTupleValue\x12+\n\x06values\x18\x01 \x03(\x0b2\x1b.tensorflow.StructuredValue"\x8a\x01\n\tDictValue\x121\n\x06fields\x18\x01 \x03(\x0b2!.tensorflow.DictValue.FieldsEntry\x1aJ\n\x0bFieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12*\n\x05value\x18\x02 \x01(\x0b2\x1b.tensorflow.StructuredValue:\x028\x01"D\n\tPairValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12*\n\x05value\x18\x02 \x01(\x0b2\x1b.tensorflow.StructuredValue"F\n\x0fNamedTupleValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12%\n\x06values\x18\x02 \x03(\x0b2\x15.tensorflow.PairValue"q\n\x0fTensorSpecProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05shape\x18\x02 \x01(\x0b2\x1c.tensorflow.TensorShapeProto\x12#\n\x05dtype\x18\x03 \x01(\x0e2\x14.tensorflow.DataType"\xcc\x01\n\x16BoundedTensorSpecProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05shape\x18\x02 \x01(\x0b2\x1c.tensorflow.TensorShapeProto\x12#\n\x05dtype\x18\x03 \x01(\x0e2\x14.tensorflow.DataType\x12(\n\x07minimum\x18\x04 \x01(\x0b2\x17.tensorflow.TensorProto\x12(\n\x07maximum\x18\x05 \x01(\x0b2\x17.tensorflow.TensorProto"\xdb\x03\n\rTypeSpecProto\x12@\n\x0ftype_spec_class\x18\x01 \x01(\x0e2\'.tensorflow.TypeSpecProto.TypeSpecClass\x12/\n\ntype_state\x18\x02 \x01(\x0b2\x1b.tensorflow.StructuredValue\x12\x1c\n\x14type_spec_class_name\x18\x03 \x01(\t"\xb8\x02\n\rTypeSpecClass\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x16\n\x12SPARSE_TENSOR_SPEC\x10\x01\x12\x17\n\x13INDEXED_SLICES_SPEC\x10\x02\x12\x16\n\x12RAGGED_TENSOR_SPEC\x10\x03\x12\x15\n\x11TENSOR_ARRAY_SPEC\x10\x04\x12\x15\n\x11DATA_DATASET_SPEC\x10\x05\x12\x16\n\x12DATA_ITERATOR_SPEC\x10\x06\x12\x11\n\rOPTIONAL_SPEC\x10\x07\x12\x14\n\x10PER_REPLICA_SPEC\x10\x08\x12\x11\n\rVARIABLE_SPEC\x10\t\x12\x16\n\x12ROW_PARTITION_SPEC\x10\n\x12\x18\n\x14REGISTERED_TYPE_SPEC\x10\x0c\x12\x17\n\x13EXTENSION_TYPE_SPEC\x10\r"\x04\x08\x0b\x10\x0bBWZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_protob\x06proto3', dependencies=[tensorflow_dot_core_dot_framework_dot_tensor__pb2.DESCRIPTOR, tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2.DESCRIPTOR, tensorflow_dot_core_dot_framework_dot_types__pb2.DESCRIPTOR]) _TYPESPECPROTO_TYPESPECCLASS = _descriptor.EnumDescriptor(name='TypeSpecClass', full_name='tensorflow.TypeSpecProto.TypeSpecClass', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[_descriptor.EnumValueDescriptor(name='UNKNOWN', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='SPARSE_TENSOR_SPEC', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='INDEXED_SLICES_SPEC', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='RAGGED_TENSOR_SPEC', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='TENSOR_ARRAY_SPEC', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='DATA_DATASET_SPEC', index=5, number=5, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='DATA_ITERATOR_SPEC', index=6, number=6, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='OPTIONAL_SPEC', index=7, number=7, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='PER_REPLICA_SPEC', index=8, number=8, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='VARIABLE_SPEC', index=9, number=9, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='ROW_PARTITION_SPEC', index=10, number=10, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='REGISTERED_TYPE_SPEC', index=11, number=12, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='EXTENSION_TYPE_SPEC', index=12, number=13, serialized_options=None, type=None, create_key=_descriptor._internal_create_key)], containing_type=None, serialized_options=None, serialized_start=1736, serialized_end=2048) _sym_db.RegisterEnumDescriptor(_TYPESPECPROTO_TYPESPECCLASS) _STRUCTUREDVALUE = _descriptor.Descriptor(name='StructuredValue', full_name='tensorflow.StructuredValue', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='none_value', full_name='tensorflow.StructuredValue.none_value', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='float64_value', full_name='tensorflow.StructuredValue.float64_value', index=1, number=11, type=1, cpp_type=5, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='int64_value', full_name='tensorflow.StructuredValue.int64_value', index=2, number=12, type=18, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='string_value', full_name='tensorflow.StructuredValue.string_value', index=3, number=13, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='bool_value', full_name='tensorflow.StructuredValue.bool_value', index=4, number=14, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='tensor_shape_value', full_name='tensorflow.StructuredValue.tensor_shape_value', index=5, number=31, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='tensor_dtype_value', full_name='tensorflow.StructuredValue.tensor_dtype_value', index=6, number=32, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='tensor_spec_value', full_name='tensorflow.StructuredValue.tensor_spec_value', index=7, number=33, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='type_spec_value', full_name='tensorflow.StructuredValue.type_spec_value', index=8, number=34, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='bounded_tensor_spec_value', full_name='tensorflow.StructuredValue.bounded_tensor_spec_value', index=9, number=35, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='list_value', full_name='tensorflow.StructuredValue.list_value', index=10, number=51, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='tuple_value', full_name='tensorflow.StructuredValue.tuple_value', index=11, number=52, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='dict_value', full_name='tensorflow.StructuredValue.dict_value', index=12, number=53, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='named_tuple_value', full_name='tensorflow.StructuredValue.named_tuple_value', index=13, number=54, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[_descriptor.OneofDescriptor(name='kind', full_name='tensorflow.StructuredValue.kind', index=0, containing_type=None, create_key=_descriptor._internal_create_key, fields=[])], serialized_start=179, serialized_end=835) _NONEVALUE = _descriptor.Descriptor(name='NoneValue', full_name='tensorflow.NoneValue', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=837, serialized_end=848) _LISTVALUE = _descriptor.Descriptor(name='ListValue', full_name='tensorflow.ListValue', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='values', full_name='tensorflow.ListValue.values', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=850, serialized_end=906) _TUPLEVALUE = _descriptor.Descriptor(name='TupleValue', full_name='tensorflow.TupleValue', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='values', full_name='tensorflow.TupleValue.values', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=908, serialized_end=965) _DICTVALUE_FIELDSENTRY = _descriptor.Descriptor(name='FieldsEntry', full_name='tensorflow.DictValue.FieldsEntry', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='key', full_name='tensorflow.DictValue.FieldsEntry.key', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='value', full_name='tensorflow.DictValue.FieldsEntry.value', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[], enum_types=[], serialized_options=b'8\x01', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=1032, serialized_end=1106) _DICTVALUE = _descriptor.Descriptor(name='DictValue', full_name='tensorflow.DictValue', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='fields', full_name='tensorflow.DictValue.fields', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[_DICTVALUE_FIELDSENTRY], enum_types=[], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=968, serialized_end=1106) _PAIRVALUE = _descriptor.Descriptor(name='PairValue', full_name='tensorflow.PairValue', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='key', full_name='tensorflow.PairValue.key', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='value', full_name='tensorflow.PairValue.value', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=1108, serialized_end=1176) _NAMEDTUPLEVALUE = _descriptor.Descriptor(name='NamedTupleValue', full_name='tensorflow.NamedTupleValue', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='name', full_name='tensorflow.NamedTupleValue.name', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='values', full_name='tensorflow.NamedTupleValue.values', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=1178, serialized_end=1248) _TENSORSPECPROTO = _descriptor.Descriptor(name='TensorSpecProto', full_name='tensorflow.TensorSpecProto', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='name', full_name='tensorflow.TensorSpecProto.name', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='shape', full_name='tensorflow.TensorSpecProto.shape', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='dtype', full_name='tensorflow.TensorSpecProto.dtype', index=2, number=3, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=1250, serialized_end=1363) _BOUNDEDTENSORSPECPROTO = _descriptor.Descriptor(name='BoundedTensorSpecProto', full_name='tensorflow.BoundedTensorSpecProto', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='name', full_name='tensorflow.BoundedTensorSpecProto.name', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='shape', full_name='tensorflow.BoundedTensorSpecProto.shape', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='dtype', full_name='tensorflow.BoundedTensorSpecProto.dtype', index=2, number=3, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='minimum', full_name='tensorflow.BoundedTensorSpecProto.minimum', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='maximum', full_name='tensorflow.BoundedTensorSpecProto.maximum', index=4, number=5, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=1366, serialized_end=1570) _TYPESPECPROTO = _descriptor.Descriptor(name='TypeSpecProto', full_name='tensorflow.TypeSpecProto', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='type_spec_class', full_name='tensorflow.TypeSpecProto.type_spec_class', index=0, number=1, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='type_state', full_name='tensorflow.TypeSpecProto.type_state', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='type_spec_class_name', full_name='tensorflow.TypeSpecProto.type_spec_class_name', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[], enum_types=[_TYPESPECPROTO_TYPESPECCLASS], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=1573, serialized_end=2048) _STRUCTUREDVALUE.fields_by_name['none_value'].message_type = _NONEVALUE _STRUCTUREDVALUE.fields_by_name['tensor_shape_value'].message_type = tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2._TENSORSHAPEPROTO _STRUCTUREDVALUE.fields_by_name['tensor_dtype_value'].enum_type = tensorflow_dot_core_dot_framework_dot_types__pb2._DATATYPE _STRUCTUREDVALUE.fields_by_name['tensor_spec_value'].message_type = _TENSORSPECPROTO _STRUCTUREDVALUE.fields_by_name['type_spec_value'].message_type = _TYPESPECPROTO _STRUCTUREDVALUE.fields_by_name['bounded_tensor_spec_value'].message_type = _BOUNDEDTENSORSPECPROTO _STRUCTUREDVALUE.fields_by_name['list_value'].message_type = _LISTVALUE _STRUCTUREDVALUE.fields_by_name['tuple_value'].message_type = _TUPLEVALUE _STRUCTUREDVALUE.fields_by_name['dict_value'].message_type = _DICTVALUE _STRUCTUREDVALUE.fields_by_name['named_tuple_value'].message_type = _NAMEDTUPLEVALUE _STRUCTUREDVALUE.oneofs_by_name['kind'].fields.append(_STRUCTUREDVALUE.fields_by_name['none_value']) _STRUCTUREDVALUE.fields_by_name['none_value'].containing_oneof = _STRUCTUREDVALUE.oneofs_by_name['kind'] _STRUCTUREDVALUE.oneofs_by_name['kind'].fields.append(_STRUCTUREDVALUE.fields_by_name['float64_value']) _STRUCTUREDVALUE.fields_by_name['float64_value'].containing_oneof = _STRUCTUREDVALUE.oneofs_by_name['kind'] _STRUCTUREDVALUE.oneofs_by_name['kind'].fields.append(_STRUCTUREDVALUE.fields_by_name['int64_value']) _STRUCTUREDVALUE.fields_by_name['int64_value'].containing_oneof = _STRUCTUREDVALUE.oneofs_by_name['kind'] _STRUCTUREDVALUE.oneofs_by_name['kind'].fields.append(_STRUCTUREDVALUE.fields_by_name['string_value']) _STRUCTUREDVALUE.fields_by_name['string_value'].containing_oneof = _STRUCTUREDVALUE.oneofs_by_name['kind'] _STRUCTUREDVALUE.oneofs_by_name['kind'].fields.append(_STRUCTUREDVALUE.fields_by_name['bool_value']) _STRUCTUREDVALUE.fields_by_name['bool_value'].containing_oneof = _STRUCTUREDVALUE.oneofs_by_name['kind'] _STRUCTUREDVALUE.oneofs_by_name['kind'].fields.append(_STRUCTUREDVALUE.fields_by_name['tensor_shape_value']) _STRUCTUREDVALUE.fields_by_name['tensor_shape_value'].containing_oneof = _STRUCTUREDVALUE.oneofs_by_name['kind'] _STRUCTUREDVALUE.oneofs_by_name['kind'].fields.append(_STRUCTUREDVALUE.fields_by_name['tensor_dtype_value']) _STRUCTUREDVALUE.fields_by_name['tensor_dtype_value'].containing_oneof = _STRUCTUREDVALUE.oneofs_by_name['kind'] _STRUCTUREDVALUE.oneofs_by_name['kind'].fields.append(_STRUCTUREDVALUE.fields_by_name['tensor_spec_value']) _STRUCTUREDVALUE.fields_by_name['tensor_spec_value'].containing_oneof = _STRUCTUREDVALUE.oneofs_by_name['kind'] _STRUCTUREDVALUE.oneofs_by_name['kind'].fields.append(_STRUCTUREDVALUE.fields_by_name['type_spec_value']) _STRUCTUREDVALUE.fields_by_name['type_spec_value'].containing_oneof = _STRUCTUREDVALUE.oneofs_by_name['kind'] _STRUCTUREDVALUE.oneofs_by_name['kind'].fields.append(_STRUCTUREDVALUE.fields_by_name['bounded_tensor_spec_value']) _STRUCTUREDVALUE.fields_by_name['bounded_tensor_spec_value'].containing_oneof = _STRUCTUREDVALUE.oneofs_by_name['kind'] _STRUCTUREDVALUE.oneofs_by_name['kind'].fields.append(_STRUCTUREDVALUE.fields_by_name['list_value']) _STRUCTUREDVALUE.fields_by_name['list_value'].containing_oneof = _STRUCTUREDVALUE.oneofs_by_name['kind'] _STRUCTUREDVALUE.oneofs_by_name['kind'].fields.append(_STRUCTUREDVALUE.fields_by_name['tuple_value']) _STRUCTUREDVALUE.fields_by_name['tuple_value'].containing_oneof = _STRUCTUREDVALUE.oneofs_by_name['kind'] _STRUCTUREDVALUE.oneofs_by_name['kind'].fields.append(_STRUCTUREDVALUE.fields_by_name['dict_value']) _STRUCTUREDVALUE.fields_by_name['dict_value'].containing_oneof = _STRUCTUREDVALUE.oneofs_by_name['kind'] _STRUCTUREDVALUE.oneofs_by_name['kind'].fields.append(_STRUCTUREDVALUE.fields_by_name['named_tuple_value']) _STRUCTUREDVALUE.fields_by_name['named_tuple_value'].containing_oneof = _STRUCTUREDVALUE.oneofs_by_name['kind'] _LISTVALUE.fields_by_name['values'].message_type = _STRUCTUREDVALUE _TUPLEVALUE.fields_by_name['values'].message_type = _STRUCTUREDVALUE _DICTVALUE_FIELDSENTRY.fields_by_name['value'].message_type = _STRUCTUREDVALUE _DICTVALUE_FIELDSENTRY.containing_type = _DICTVALUE _DICTVALUE.fields_by_name['fields'].message_type = _DICTVALUE_FIELDSENTRY _PAIRVALUE.fields_by_name['value'].message_type = _STRUCTUREDVALUE _NAMEDTUPLEVALUE.fields_by_name['values'].message_type = _PAIRVALUE _TENSORSPECPROTO.fields_by_name['shape'].message_type = tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2._TENSORSHAPEPROTO _TENSORSPECPROTO.fields_by_name['dtype'].enum_type = tensorflow_dot_core_dot_framework_dot_types__pb2._DATATYPE _BOUNDEDTENSORSPECPROTO.fields_by_name['shape'].message_type = tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2._TENSORSHAPEPROTO _BOUNDEDTENSORSPECPROTO.fields_by_name['dtype'].enum_type = tensorflow_dot_core_dot_framework_dot_types__pb2._DATATYPE _BOUNDEDTENSORSPECPROTO.fields_by_name['minimum'].message_type = tensorflow_dot_core_dot_framework_dot_tensor__pb2._TENSORPROTO _BOUNDEDTENSORSPECPROTO.fields_by_name['maximum'].message_type = tensorflow_dot_core_dot_framework_dot_tensor__pb2._TENSORPROTO _TYPESPECPROTO.fields_by_name['type_spec_class'].enum_type = _TYPESPECPROTO_TYPESPECCLASS _TYPESPECPROTO.fields_by_name['type_state'].message_type = _STRUCTUREDVALUE _TYPESPECPROTO_TYPESPECCLASS.containing_type = _TYPESPECPROTO DESCRIPTOR.message_types_by_name['StructuredValue'] = _STRUCTUREDVALUE DESCRIPTOR.message_types_by_name['NoneValue'] = _NONEVALUE DESCRIPTOR.message_types_by_name['ListValue'] = _LISTVALUE DESCRIPTOR.message_types_by_name['TupleValue'] = _TUPLEVALUE DESCRIPTOR.message_types_by_name['DictValue'] = _DICTVALUE DESCRIPTOR.message_types_by_name['PairValue'] = _PAIRVALUE DESCRIPTOR.message_types_by_name['NamedTupleValue'] = _NAMEDTUPLEVALUE DESCRIPTOR.message_types_by_name['TensorSpecProto'] = _TENSORSPECPROTO DESCRIPTOR.message_types_by_name['BoundedTensorSpecProto'] = _BOUNDEDTENSORSPECPROTO DESCRIPTOR.message_types_by_name['TypeSpecProto'] = _TYPESPECPROTO _sym_db.RegisterFileDescriptor(DESCRIPTOR) StructuredValue = _reflection.GeneratedProtocolMessageType('StructuredValue', (_message.Message,), {'DESCRIPTOR': _STRUCTUREDVALUE, '__module__': 'tensorflow.core.protobuf.struct_pb2'}) _sym_db.RegisterMessage(StructuredValue) NoneValue = _reflection.GeneratedProtocolMessageType('NoneValue', (_message.Message,), {'DESCRIPTOR': _NONEVALUE, '__module__': 'tensorflow.core.protobuf.struct_pb2'}) _sym_db.RegisterMessage(NoneValue) ListValue = _reflection.GeneratedProtocolMessageType('ListValue', (_message.Message,), {'DESCRIPTOR': _LISTVALUE, '__module__': 'tensorflow.core.protobuf.struct_pb2'}) _sym_db.RegisterMessage(ListValue) TupleValue = _reflection.GeneratedProtocolMessageType('TupleValue', (_message.Message,), {'DESCRIPTOR': _TUPLEVALUE, '__module__': 'tensorflow.core.protobuf.struct_pb2'}) _sym_db.RegisterMessage(TupleValue) DictValue = _reflection.GeneratedProtocolMessageType('DictValue', (_message.Message,), {'FieldsEntry': _reflection.GeneratedProtocolMessageType('FieldsEntry', (_message.Message,), {'DESCRIPTOR': _DICTVALUE_FIELDSENTRY, '__module__': 'tensorflow.core.protobuf.struct_pb2'}), 'DESCRIPTOR': _DICTVALUE, '__module__': 'tensorflow.core.protobuf.struct_pb2'}) _sym_db.RegisterMessage(DictValue) _sym_db.RegisterMessage(DictValue.FieldsEntry) PairValue = _reflection.GeneratedProtocolMessageType('PairValue', (_message.Message,), {'DESCRIPTOR': _PAIRVALUE, '__module__': 'tensorflow.core.protobuf.struct_pb2'}) _sym_db.RegisterMessage(PairValue) NamedTupleValue = _reflection.GeneratedProtocolMessageType('NamedTupleValue', (_message.Message,), {'DESCRIPTOR': _NAMEDTUPLEVALUE, '__module__': 'tensorflow.core.protobuf.struct_pb2'}) _sym_db.RegisterMessage(NamedTupleValue) TensorSpecProto = _reflection.GeneratedProtocolMessageType('TensorSpecProto', (_message.Message,), {'DESCRIPTOR': _TENSORSPECPROTO, '__module__': 'tensorflow.core.protobuf.struct_pb2'}) _sym_db.RegisterMessage(TensorSpecProto) BoundedTensorSpecProto = _reflection.GeneratedProtocolMessageType('BoundedTensorSpecProto', (_message.Message,), {'DESCRIPTOR': _BOUNDEDTENSORSPECPROTO, '__module__': 'tensorflow.core.protobuf.struct_pb2'}) _sym_db.RegisterMessage(BoundedTensorSpecProto) TypeSpecProto = _reflection.GeneratedProtocolMessageType('TypeSpecProto', (_message.Message,), {'DESCRIPTOR': _TYPESPECPROTO, '__module__': 'tensorflow.core.protobuf.struct_pb2'}) _sym_db.RegisterMessage(TypeSpecProto) DESCRIPTOR._options = None _DICTVALUE_FIELDSENTRY._options = None
290.637168
6,026
0.839261
4,358
32,842
5.938045
0.068151
0.03957
0.071219
0.063645
0.736224
0.693678
0.658011
0.625164
0.615504
0.579759
0
0.032685
0.037665
32,842
112
6,027
293.232143
0.78611
0.000944
0
0
1
0.621622
0.216285
0.153863
0
0
0
0
0
1
0
false
0
0.063063
0
0.063063
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
dc847d99af8e338af227a48dfb6b580e6def68da
103
py
Python
untitled/precaritorazor/apps.py
NDI-ICE-2019/NDI-main-subject-backend
5628e36b414599e75eace6acea6536131c4126fa
[ "MIT" ]
null
null
null
untitled/precaritorazor/apps.py
NDI-ICE-2019/NDI-main-subject-backend
5628e36b414599e75eace6acea6536131c4126fa
[ "MIT" ]
null
null
null
untitled/precaritorazor/apps.py
NDI-ICE-2019/NDI-main-subject-backend
5628e36b414599e75eace6acea6536131c4126fa
[ "MIT" ]
null
null
null
from django.apps import AppConfig class PrecaritorazorConfig(AppConfig): name = 'precaritorazor'
17.166667
38
0.786408
10
103
8.1
0.9
0
0
0
0
0
0
0
0
0
0
0
0.145631
103
5
39
20.6
0.920455
0
0
0
0
0
0.135922
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
dc88e7dc384e897c65703300284e468c8f8718f3
193
py
Python
specviz/tests/test_specviz_gui.py
ibusko/specviz
b8bcd495e5b43dc2b90f7bf2d5bad2d27c6990aa
[ "BSD-3-Clause" ]
null
null
null
specviz/tests/test_specviz_gui.py
ibusko/specviz
b8bcd495e5b43dc2b90f7bf2d5bad2d27c6990aa
[ "BSD-3-Clause" ]
null
null
null
specviz/tests/test_specviz_gui.py
ibusko/specviz
b8bcd495e5b43dc2b90f7bf2d5bad2d27c6990aa
[ "BSD-3-Clause" ]
null
null
null
def test_spec_gui(specviz_gui): """ Generic test to ensure the pytest fixture is properly feeding an instance of the specviz application. """ assert specviz_gui is not None
27.571429
77
0.715026
28
193
4.785714
0.75
0.149254
0
0
0
0
0
0
0
0
0
0
0.233161
193
6
78
32.166667
0.905405
0.523316
0
0
0
0
0
0
0
0
0
0
0.5
1
0.5
false
0
0
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
0
0
0
0
0
0
0
4
dc8ddf059067fd709915e8d365ca6207056338f3
87
py
Python
Bronze/Bronze_V/20254.py
masterTyper/baekjoon_solved_ac
b9ce14d9bdaa5b5b06735ad075fb827de9f44b9c
[ "MIT" ]
null
null
null
Bronze/Bronze_V/20254.py
masterTyper/baekjoon_solved_ac
b9ce14d9bdaa5b5b06735ad075fb827de9f44b9c
[ "MIT" ]
null
null
null
Bronze/Bronze_V/20254.py
masterTyper/baekjoon_solved_ac
b9ce14d9bdaa5b5b06735ad075fb827de9f44b9c
[ "MIT" ]
null
null
null
Ur, Tr, Uo, To = map(int, input().split()) print(56 * Ur + 24 * Tr + 14 * Uo + 6 * To)
29
43
0.505747
17
87
2.588235
0.764706
0
0
0
0
0
0
0
0
0
0
0.107692
0.252874
87
3
43
29
0.569231
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
f49278176156bf56edd6563da568dae0c7e08a0f
383
py
Python
kiri_search/models/__init__.py
kiri-ai/kiri-search
78a0f78b11b73cca8934054498d5713773d3e93a
[ "Apache-2.0" ]
null
null
null
kiri_search/models/__init__.py
kiri-ai/kiri-search
78a0f78b11b73cca8934054498d5713773d3e93a
[ "Apache-2.0" ]
null
null
null
kiri_search/models/__init__.py
kiri-ai/kiri-search
78a0f78b11b73cca8934054498d5713773d3e93a
[ "Apache-2.0" ]
null
null
null
from .tasks.qa import QA from .tasks.summarisation import Summarisation from .tasks.emotion import Emotion from .tasks.classification import Classification from .tasks.vectorisation import Vectorisation from .tasks.generation import Generation from .custom_models import T5QASummaryEmotion from .models import BaseModel, PathModel, HuggingModel, GenerationModel, ClassificationModel
42.555556
92
0.859008
43
383
7.627907
0.395349
0.164634
0
0
0
0
0
0
0
0
0
0.002882
0.093995
383
8
93
47.875
0.942363
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
f4b3f87820e2ad14008d2f3cce0ee351b655b0eb
46
py
Python
RTool/prototype/__init__.py
RonNofar/RTool
c7cc53ba732d19f3eba8860539289b08773a15d3
[ "MIT" ]
null
null
null
RTool/prototype/__init__.py
RonNofar/RTool
c7cc53ba732d19f3eba8860539289b08773a15d3
[ "MIT" ]
null
null
null
RTool/prototype/__init__.py
RonNofar/RTool
c7cc53ba732d19f3eba8860539289b08773a15d3
[ "MIT" ]
null
null
null
import RTool.prototype.tos __all__ = ["tos"]
11.5
26
0.717391
6
46
4.833333
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.130435
46
3
27
15.333333
0.725
0
0
0
0
0
0.065217
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
f4b601f11639004d10e7020842531a6e1ec297aa
2,809
py
Python
z2/part3/updated_part2_batch/jm/parser_errors_2/514866768.py
kozakusek/ipp-2020-testy
09aa008fa53d159672cc7cbf969a6b237e15a7b8
[ "MIT" ]
1
2020-04-16T12:13:47.000Z
2020-04-16T12:13:47.000Z
z2/part3/updated_part2_batch/jm/parser_errors_2/514866768.py
kozakusek/ipp-2020-testy
09aa008fa53d159672cc7cbf969a6b237e15a7b8
[ "MIT" ]
18
2020-03-06T17:50:15.000Z
2020-05-19T14:58:30.000Z
z2/part3/updated_part2_batch/jm/parser_errors_2/514866768.py
kozakusek/ipp-2020-testy
09aa008fa53d159672cc7cbf969a6b237e15a7b8
[ "MIT" ]
18
2020-03-06T17:45:13.000Z
2020-06-09T19:18:31.000Z
from part1 import ( gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new, ) """ scenario: test_random_actions uuid: 514866768 """ """ random actions, total chaos """ board = gamma_new(4, 4, 2, 15) assert board is not None assert gamma_move(board, 1, 1, 1) == 1 assert gamma_move(board, 1, 1, 2) == 1 assert gamma_busy_fields(board, 2) == 0 assert gamma_move(board, 1, 1, 1) == 0 assert gamma_move(board, 2, 2, 0) == 1 assert gamma_move(board, 2, 0, 0) == 1 board741169294 = gamma_board(board) assert board741169294 is not None assert board741169294 == ("....\n" ".1..\n" ".1..\n" "2.2.\n") del board741169294 board741169294 = None assert gamma_move(board, 1, 1, 2) == 0 assert gamma_move(board, 1, 1, 1) == 0 assert gamma_move(board, 2, 1, 2) == 0 assert gamma_move(board, 2, 2, 2) == 1 assert gamma_move(board, 1, 0, 3) == 1 assert gamma_free_fields(board, 1) == 10 board561860607 = gamma_board(board) assert board561860607 is not None assert board561860607 == ("1...\n" ".12.\n" ".1..\n" "2.2.\n") del board561860607 board561860607 = None assert gamma_move(board, 2, 3, 3) == 1 assert gamma_move(board, 1, 2, 2) == 0 assert gamma_move(board, 1, 3, 2) == 1 assert gamma_move(board, 2, 1, 0) == 1 assert gamma_move(board, 2, 3, 0) == 1 assert gamma_busy_fields(board, 2) == 6 assert gamma_move(board, 1, 1, 3) == 1 assert gamma_move(board, 1, 1, 1) == 0 assert gamma_move(board, 2, 2, 0) == 0 assert gamma_move(board, 2, 1, 1) == 0 assert gamma_busy_fields(board, 2) == 6 assert gamma_move(board, 1, 1, 3) == 0 assert gamma_move(board, 2, 1, 0) == 0 assert gamma_move(board, 2, 2, 0) == 0 assert gamma_move(board, 1, 0, 1) == 1 assert gamma_move(board, 2, 1, 3) == 0 assert gamma_move(board, 1, 2, 0) == 0 assert gamma_move(board, 2, 1, 2) == 0 assert gamma_move(board, 1, 1, 3) == 0 assert gamma_move(board, 1, 3, 1) == 1 assert gamma_move(board, 2, 1, 2) == 0 assert gamma_move(board, 2, 1, 2) == 0 assert gamma_move(board, 1, 1, 2) == 0 assert gamma_move(board, 2, 3, 3) == 0 assert gamma_busy_fields(board, 2) == 6 assert gamma_move(board, 1, 2, 0) == 0 assert gamma_move(board, 2, 2, 0) == 0 assert gamma_move(board, 2, 1, 3) == 0 board294704751 = gamma_board(board) assert board294704751 is not None assert board294704751 == ("11.2\n" ".121\n" "11.1\n" "2222\n") del board294704751 board294704751 = None assert gamma_move(board, 1, 1, 1) == 0 assert gamma_move(board, 2, 3, 2) == 0 assert gamma_move(board, 2, 0, 3) == 0 assert gamma_move(board, 1, 3, 1) == 0 assert gamma_move(board, 2, 1, 2) == 0 assert gamma_move(board, 1, 1, 2) == 0 assert gamma_move(board, 2, 1, 2) == 0 assert gamma_move(board, 2, 1, 0) == 0 gamma_delete(board)
27.009615
41
0.658953
499
2,809
3.56513
0.084168
0.296796
0.362563
0.483418
0.659359
0.656549
0.639123
0.504778
0.471051
0.453626
0
0.15512
0.182983
2,809
103
42
27.271845
0.620044
0
0
0.376471
0
0
0.026461
0
0
0
0
0
0.647059
1
0
false
0
0.011765
0
0.011765
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
4
f4c9f2a03387832c934b3c6c3ff6e1f2352ce16f
129
py
Python
slides/20160523-Python_Function/demo2.py
she02789222/test
fa10bd5be351ca3d4bef4f7d6c4510e65666de7c
[ "MIT" ]
4
2018-11-29T04:06:29.000Z
2021-11-29T07:00:44.000Z
slides/20160523-Python_Function/demo2.py
NTNUCIC/108
52961e76d299842c2d44d142d5c56ad665420ee6
[ "MIT" ]
6
2016-05-17T02:34:57.000Z
2021-02-05T17:33:28.000Z
slides/20160523-Python_Function/demo2.py
NTNUCIC/108
52961e76d299842c2d44d142d5c56ad665420ee6
[ "MIT" ]
3
2019-02-17T05:58:46.000Z
2019-02-18T15:09:55.000Z
def mutable_or_immutable(para): para += '1' a = 'a' b = ['b'] mutable_or_immutable(a) print(a) mutable_or_immutable(b) print(b)
14.333333
31
0.697674
23
129
3.652174
0.391304
0.321429
0.642857
0
0
0
0
0
0
0
0
0.00885
0.124031
129
9
32
14.333333
0.734513
0
0
0
0
0
0.023077
0
0
0
0
0
0
1
0.125
false
0
0
0
0.125
0.25
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
f4d2d4aa27cf2b99a599aaa00ada0793763e2c23
138
py
Python
files/opt/graphite/conf/webapp/graphite/local_settings.py
cornernote/docker-shinken
cf55f989b4130ad6c1f30496009d50da057bb869
[ "BSD-3-Clause" ]
null
null
null
files/opt/graphite/conf/webapp/graphite/local_settings.py
cornernote/docker-shinken
cf55f989b4130ad6c1f30496009d50da057bb869
[ "BSD-3-Clause" ]
null
null
null
files/opt/graphite/conf/webapp/graphite/local_settings.py
cornernote/docker-shinken
cf55f989b4130ad6c1f30496009d50da057bb869
[ "BSD-3-Clause" ]
1
2020-08-11T07:40:11.000Z
2020-08-11T07:40:11.000Z
LOG_DIR = '/var/log/graphite' SECRET_KEY = "YzgzMWQ1NzgzN44udMjljNWIwNjRiZjQ5NTUyNjY2ZjkzOWI5OTljODU2MmViNmY5" TIME_ZONE = 'Asia/Kolkata'
34.5
80
0.84058
12
138
9.416667
0.916667
0
0
0
0
0
0
0
0
0
0
0.062016
0.065217
138
3
81
46
0.813953
0
0
0
0
0
0.681159
0.471014
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
f4dd98c1337aecb03bb832f3809ad8ef0d9081f9
218
py
Python
splash/__init__.py
chroming/splash
18a74599ba836fd66a627b0fefc4d109afa0d73d
[ "BSD-3-Clause" ]
null
null
null
splash/__init__.py
chroming/splash
18a74599ba836fd66a627b0fefc4d109afa0d73d
[ "BSD-3-Clause" ]
null
null
null
splash/__init__.py
chroming/splash
18a74599ba836fd66a627b0fefc4d109afa0d73d
[ "BSD-3-Clause" ]
null
null
null
__version__ = '3.2' from PyQt5.Qt import qVersion from distutils.version import LooseVersion version_info = tuple(LooseVersion(__version__).version) pyqt_version = qVersion() __all__ = ['__version__', 'version_info']
27.25
55
0.793578
26
218
5.923077
0.538462
0.246753
0
0
0
0
0
0
0
0
0
0.015306
0.100917
218
7
56
31.142857
0.770408
0
0
0
0
0
0.119266
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
f4e41468e331ea8c6d314c7e6fdaea3ad428495c
93
py
Python
backend/infovis/nicetable/apps.py
cientopolis/data-viz
1c555db4af1f744b22c1a84d8fc73817263373a7
[ "MIT" ]
null
null
null
backend/infovis/nicetable/apps.py
cientopolis/data-viz
1c555db4af1f744b22c1a84d8fc73817263373a7
[ "MIT" ]
12
2020-02-12T01:22:45.000Z
2022-02-26T16:37:56.000Z
backend/infovis/nicetable/apps.py
cientopolis/data-viz
1c555db4af1f744b22c1a84d8fc73817263373a7
[ "MIT" ]
1
2019-10-02T20:04:23.000Z
2019-10-02T20:04:23.000Z
from django.apps import AppConfig class NicetableConfig(AppConfig): name = 'nicetable'
15.5
33
0.763441
10
93
7.1
0.9
0
0
0
0
0
0
0
0
0
0
0
0.16129
93
5
34
18.6
0.910256
0
0
0
0
0
0.096774
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
5214069f4def8ce9adb37d2256a70b3dab137cd3
51
py
Python
app/modules/common/FEAT/F14_individual_heat_supply_costs_per_building/__init__.py
HotMaps/building_h-c
db5a103cb9d41b88e6cdc3c9194fc1ec9fc5c31f
[ "Apache-2.0" ]
1
2021-05-11T06:41:04.000Z
2021-05-11T06:41:04.000Z
app/modules/common/FEAT/F14_individual_heat_supply_costs_per_building/__init__.py
HotMaps/HotMaps-building_h-c
db5a103cb9d41b88e6cdc3c9194fc1ec9fc5c31f
[ "Apache-2.0" ]
2
2017-08-22T13:53:22.000Z
2017-09-25T07:27:28.000Z
app/modules/common/FEAT/F14_individual_heat_supply_costs_per_building/__init__.py
HotMaps/Hotmaps-building_h-c
db5a103cb9d41b88e6cdc3c9194fc1ec9fc5c31f
[ "Apache-2.0" ]
null
null
null
''' Created on Apr 20, 2017 @author: simulant '''
8.5
23
0.627451
7
51
4.571429
1
0
0
0
0
0
0
0
0
0
0
0.146341
0.196078
51
5
24
10.2
0.634146
0.823529
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
521873f52a76528432e9d2a522c1df611bbeb632
471
py
Python
order_microservice/order_microservice/settings/production.py
fga-eps-mds/2018.2-FGAPP-vendas
a0f457a2d7557a1db01d539f98f8518f55524d97
[ "MIT" ]
null
null
null
order_microservice/order_microservice/settings/production.py
fga-eps-mds/2018.2-FGAPP-vendas
a0f457a2d7557a1db01d539f98f8518f55524d97
[ "MIT" ]
5
2018-10-03T02:00:15.000Z
2018-10-24T12:29:15.000Z
order_microservice/order_microservice/settings/production.py
fga-eps-mds/2018.2-FGAPP-vendas
a0f457a2d7557a1db01d539f98f8518f55524d97
[ "MIT" ]
null
null
null
from order_microservice.settings.common import * DEBUG = False SECRET_KEY = os.environ.get('SECRET_KEY') ALLOWED_HOSTS = [os.environ.get('HOST')] # settings.py DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME': os.environ.get('DB_NAME'), 'HOST': os.environ.get('DB_HOST'), 'PORT': os.environ.get('DB_PORT'), 'USER': os.environ.get('DB_USER'), 'PASSWORD': os.environ.get('DB_PASS') } }
26.166667
50
0.615711
60
471
4.683333
0.483333
0.224199
0.298932
0.24911
0
0
0
0
0
0
0
0
0.197452
471
18
51
26.166667
0.743386
0.023355
0
0
0
0
0.250545
0.063181
0
0
0
0
0
1
0
false
0.071429
0.071429
0
0.071429
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
4
5226030daeaa87c3940ff35db55da37744bc7c89
108
py
Python
rainbowmindmachine/apikeys.example.py
rainbow-mind-machine/rainbow-mind-machine
3911ccf90db578e294f7e146916990ec14e1bdad
[ "MIT" ]
8
2018-05-17T03:00:57.000Z
2018-12-10T01:50:37.000Z
rainbowmindmachine/apikeys.example.py
rainbow-mind-machine/rainbow-mind-machine
3911ccf90db578e294f7e146916990ec14e1bdad
[ "MIT" ]
55
2018-05-15T04:52:33.000Z
2018-09-09T23:48:29.000Z
rainbowmindmachine/apikeys.example.py
rainbow-mind-machine/rainbow-mind-machine
3911ccf90db578e294f7e146916990ec14e1bdad
[ "MIT" ]
3
2018-05-13T05:33:01.000Z
2019-07-31T00:53:56.000Z
consumer_key = '123456' consumer_secret = '123456' flickr_key = '123456' flickr_secret = '123456'
21.6
26
0.685185
12
108
5.833333
0.416667
0.257143
0
0
0
0
0
0
0
0
0
0.27907
0.203704
108
4
27
27
0.534884
0
0
0
0
0
0.222222
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
52289acd6cc28e1ed1127d40c7ac07ee096bb702
1,086
py
Python
test/test_update_atomic_bank_request.py
Basis-Theory/basistheory-python
5fd0f3d20fd07e8de45d6d5919e092c696049df1
[ "Apache-2.0" ]
null
null
null
test/test_update_atomic_bank_request.py
Basis-Theory/basistheory-python
5fd0f3d20fd07e8de45d6d5919e092c696049df1
[ "Apache-2.0" ]
null
null
null
test/test_update_atomic_bank_request.py
Basis-Theory/basistheory-python
5fd0f3d20fd07e8de45d6d5919e092c696049df1
[ "Apache-2.0" ]
null
null
null
""" Basis Theory API ## Getting Started * Sign-in to [Basis Theory](https://basistheory.com) and go to [Applications](https://portal.basistheory.com/applications) * Create a Basis Theory Server to Server Application * All permissions should be selected * Paste the API Key into the `BT-API-KEY` variable # noqa: E501 The version of the OpenAPI document: v1 Generated by: https://openapi-generator.tech """ import sys import unittest import basistheory from basistheory.model.bank import Bank globals()['Bank'] = Bank from basistheory.model.update_atomic_bank_request import UpdateAtomicBankRequest class TestUpdateAtomicBankRequest(unittest.TestCase): """UpdateAtomicBankRequest unit test stubs""" def setUp(self): pass def tearDown(self): pass def testUpdateAtomicBankRequest(self): """Test UpdateAtomicBankRequest""" # FIXME: construct object with mandatory attributes with example values # model = UpdateAtomicBankRequest() # noqa: E501 pass if __name__ == '__main__': unittest.main()
28.578947
300
0.720074
124
1,086
6.217742
0.596774
0.042802
0.051881
0
0
0
0
0
0
0
0
0.007973
0.191529
1,086
37
301
29.351351
0.870159
0.540516
0
0.2
0
0
0.025918
0
0
0
0
0.027027
0
1
0.2
false
0.2
0.333333
0
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
1
1
0
1
0
0
4
5252974a76b096d2cfcc22e00f9d0969c4adf232
107
py
Python
integrations/__init__.py
conducto/conducto
b480780905f5a25e8c803b60ca7cdf6976ce5ef6
[ "Apache-2.0" ]
25
2020-05-07T22:51:11.000Z
2021-11-17T16:14:42.000Z
integrations/__init__.py
conducto/conducto
b480780905f5a25e8c803b60ca7cdf6976ce5ef6
[ "Apache-2.0" ]
3
2020-04-21T06:38:02.000Z
2020-05-31T01:57:19.000Z
integrations/__init__.py
conducto/conducto
b480780905f5a25e8c803b60ca7cdf6976ce5ef6
[ "Apache-2.0" ]
2
2020-05-14T01:47:32.000Z
2020-06-03T21:58:12.000Z
# Don't automatically import anything in here. The individual integrations should be # imported as needed.
35.666667
84
0.803738
15
107
5.733333
1
0
0
0
0
0
0
0
0
0
0
0
0.149533
107
2
85
53.5
0.945055
0.953271
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
5261987085319e8f96780e9a0d602bcc262b85dd
843
py
Python
app/flask_app/models.py
mahfuz978/Buildweek-project-contributions
28c76b40c17ff6c19cfd568c8b55c98b76bef9b8
[ "MIT" ]
null
null
null
app/flask_app/models.py
mahfuz978/Buildweek-project-contributions
28c76b40c17ff6c19cfd568c8b55c98b76bef9b8
[ "MIT" ]
null
null
null
app/flask_app/models.py
mahfuz978/Buildweek-project-contributions
28c76b40c17ff6c19cfd568c8b55c98b76bef9b8
[ "MIT" ]
null
null
null
from flask_sqlalchemy import SQLAlchemy from flask_migrate import Migrate db = SQLAlchemy() migrate = Migrate() class Song(db.Model): id = db.Column(db.Integer, primary_key=True) artist_name = db.Column(db.String) track_id = db.Column(db.String) track_name = db.Column(db.String) acousticness = db.Column(db.Float) danceability = db.Column(db.Float) duration_ms = db.Column(db.Float) energy = db.Column(db.Float) instrumentalness = db.Column(db.Float) key = db.Column(db.Integer) liveness = db.Column(db.Float) loudness = db.Column(db.Float) mode = db.Column(db.Integer) speechiness = db.Column(db.Float) tempo = db.Column(db.Float) time_signature = db.Column(db.Integer) valence = db.Column(db.Float) popularity = db.Column(db.Integer)
30.107143
49
0.669039
116
843
4.793103
0.301724
0.258993
0.323741
0.269784
0.118705
0
0
0
0
0
0
0
0.209964
843
27
50
31.222222
0.834835
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.086957
0
0.913043
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
52846523c138545adacd1d66fcf9f2eab3077168
721
py
Python
core-python/Core_Python/com/FunctionDemo.py
theumang100/tutorials-1
497f54c2adb022c316530319a168fca1c007d4b1
[ "MIT" ]
9
2020-04-23T05:24:19.000Z
2022-02-17T16:37:51.000Z
core-python/Core_Python/com/FunctionDemo.py
theumang100/tutorials-1
497f54c2adb022c316530319a168fca1c007d4b1
[ "MIT" ]
5
2020-10-01T05:08:37.000Z
2020-10-12T03:18:10.000Z
core-python/Core_Python/com/FunctionDemo.py
theumang100/tutorials-1
497f54c2adb022c316530319a168fca1c007d4b1
[ "MIT" ]
9
2020-04-28T14:06:41.000Z
2021-10-19T18:32:28.000Z
# add two numbers using function without return '''def add(x,y): print("addition is : ",(x+y)) x = int(input("Enter a first number for addition : ")) y = int(input("Enter a second number for addition : ")) add(x,y)''' # add two numbers using function with return '''def add(x,y): return (x+y) x = int(input("Enter a first number for addition : ")) y = int(input("Enter a second number for addition : ")) print("addition is : ",add(x,y))''' # add and sub two numbers using function with return multiple '''def add_sub(x,y): return x+y,x-y x = int(input("Enter a first number for addition : ")) y = int(input("Enter a second number for addition : ")) print("addition and subtraction is : ",add_sub(x,y))'''
31.347826
61
0.660194
122
721
3.885246
0.213115
0.042194
0.164557
0.177215
0.822785
0.719409
0.542194
0.542194
0.542194
0.542194
0
0
0.177531
721
22
62
32.772727
0.799325
0.439667
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
bff4e339e0efdccda2047b96fc3dcc9dbb2768ed
21,049
py
Python
test/reference-output/python.py
GoNZooo/gotyno-hs
a19ccf914272f475fa9b9729f15d52be73971f02
[ "BSD-2-Clause" ]
1
2022-03-22T13:16:05.000Z
2022-03-22T13:16:05.000Z
test/reference-output/python.py
GoNZooo/gotyno-hs
a19ccf914272f475fa9b9729f15d52be73971f02
[ "BSD-2-Clause" ]
null
null
null
test/reference-output/python.py
GoNZooo/gotyno-hs
a19ccf914272f475fa9b9729f15d52be73971f02
[ "BSD-2-Clause" ]
null
null
null
import json import typing from dataclasses import dataclass from gotyno_validation import validation from gotyno_validation import encoding from . import basic @dataclass(frozen=True) class SomeType: type: typing.Literal['SomeType'] some_field: str some_other_field: int maybe_some_field: typing.Optional[str] @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['SomeType']: return validation.validate_interface(value, {'type': validation.validate_literal('SomeType'), 'some_field': validation.validate_string, 'some_other_field': validation.validate_int, 'maybe_some_field': validation.validate_optional(validation.validate_string)}, SomeType) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['SomeType']: return validation.validate_from_string(string, SomeType.validate) def to_json(self) -> typing.Dict[str, typing.Any]: return {'type': 'SomeType', 'some_field': self.some_field, 'some_other_field': self.some_other_field, 'maybe_some_field': encoding.optional_to_json(encoding.basic_to_json)(self.maybe_some_field)} def encode(self) -> str: return json.dumps(self.to_json()) T = typing.TypeVar('T') @dataclass(frozen=True) class Holder(typing.Generic[T]): value: T @staticmethod def validate(validate_T: validation.Validator[T]) -> validation.Validator['Holder[T]']: def validate_HolderT(value: validation.Unknown) -> validation.ValidationResult['Holder[T]']: return validation.validate_interface(value, {'value': validate_T}, Holder) return validate_HolderT @staticmethod def decode(string: typing.Union[str, bytes], validate_T: validation.Validator[T]) -> validation.ValidationResult['Holder[T]']: return validation.validate_from_string(string, Holder.validate(validate_T)) def to_json(self, T_to_json: encoding.ToJSON[T]) -> typing.Dict[str, typing.Any]: return {'value': T_to_json(self.value)} def encode(self, T_to_json: encoding.ToJSON[T]) -> str: return json.dumps(self.to_json(T_to_json)) class EventWithKind: @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['EventWithKind']: return validation.validate_with_type_tags(value, 'kind', {'NotificationWithKind': NotificationWithKind.validate, 'LaunchWithKind': LaunchWithKind.validate, 'AnotherEventWithKind': AnotherEventWithKind.validate}) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['EventWithKind']: return validation.validate_from_string(string, EventWithKind.validate) def to_json(self) -> typing.Dict[str, typing.Any]: raise NotImplementedError('`to_json` is not implemented for base class `EventWithKind`') def encode(self) -> str: raise NotImplementedError('`encode` is not implemented for base class `EventWithKind`') @dataclass(frozen=True) class NotificationWithKind(EventWithKind): data: str @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['NotificationWithKind']: return validation.validate_with_type_tag(value, 'kind', 'NotificationWithKind', {'data': validation.validate_string}, NotificationWithKind) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['NotificationWithKind']: return validation.validate_from_string(string, NotificationWithKind.validate) def to_json(self) -> typing.Dict[str, typing.Any]: return {'kind': 'NotificationWithKind', 'data': self.data} def encode(self) -> str: return json.dumps(self.to_json()) @dataclass(frozen=True) class LaunchWithKind(EventWithKind): @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['LaunchWithKind']: return validation.validate_with_type_tag(value, 'kind', 'LaunchWithKind', {}, LaunchWithKind) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['LaunchWithKind']: return validation.validate_from_string(string, LaunchWithKind.validate) def to_json(self) -> typing.Dict[str, typing.Any]: return {'kind': 'LaunchWithKind'} def encode(self) -> str: return json.dumps(self.to_json()) @dataclass(frozen=True) class AnotherEventWithKind(EventWithKind): data: SomeType @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['AnotherEventWithKind']: return validation.validate_with_type_tag(value, 'kind', 'AnotherEventWithKind', {'data': SomeType.validate}, AnotherEventWithKind) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['AnotherEventWithKind']: return validation.validate_from_string(string, AnotherEventWithKind.validate) def to_json(self) -> typing.Dict[str, typing.Any]: return {'kind': 'AnotherEventWithKind', 'data': self.data.to_json()} def encode(self) -> str: return json.dumps(self.to_json()) T = typing.TypeVar('T') class Possibly(typing.Generic[T]): @staticmethod def validate(validate_T: validation.Validator[T]) -> validation.Validator['Possibly[T]']: def validate_PossiblyT(value: validation.Unknown) -> validation.ValidationResult['Possibly[T]']: return validation.validate_with_type_tags(value, 'type', {'NotReally': NotReally.validate, 'Definitely': Definitely.validate(validate_T)}) return validate_PossiblyT @staticmethod def decode(string: typing.Union[str, bytes], validate_T: validation.Validator[T]) -> validation.ValidationResult['Possibly[T]']: return validation.validate_from_string(string, Possibly.validate(validate_T)) def to_json(self, T_to_json: encoding.ToJSON[T]) -> typing.Dict[str, typing.Any]: raise NotImplementedError('`to_json` is not implemented for base class `Possibly`') def encode(self) -> str: raise NotImplementedError('`encode` is not implemented for base class `Possibly`') @dataclass(frozen=True) class NotReally(Possibly[T]): @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['NotReally']: return validation.validate_with_type_tag(value, 'type', 'NotReally', {}, NotReally) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['NotReally']: return validation.validate_from_string(string, NotReally.validate) def to_json(self, T_to_json: encoding.ToJSON[T]) -> typing.Dict[str, typing.Any]: return {'type': 'NotReally'} def encode(self, T_to_json: encoding.ToJSON[T]) -> str: return json.dumps(self.to_json(T_to_json)) @dataclass(frozen=True) class Definitely(Possibly[T]): data: T @staticmethod def validate(validate_T: validation.Validator[T]) -> validation.Validator['Definitely[T]']: def validate_DefinitelyT(value: validation.Unknown) -> validation.ValidationResult['Definitely[T]']: return validation.validate_with_type_tag(value, 'type', 'Definitely', {'data': validate_T}, Definitely) return validate_DefinitelyT @staticmethod def decode(string: typing.Union[str, bytes], validate_T: validation.Validator[T]) -> validation.ValidationResult['Definitely[T]']: return validation.validate_from_string(string, Definitely.validate(validate_T)) def to_json(self, T_to_json: encoding.ToJSON[T]) -> typing.Dict[str, typing.Any]: return {'type': 'Definitely', 'data': T_to_json(self.data)} def encode(self, T_to_json: encoding.ToJSON[T]) -> str: return json.dumps(self.to_json(T_to_json)) @dataclass(frozen=True) class PossiblyHolder: value: Possibly[str] @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['PossiblyHolder']: return validation.validate_interface(value, {'value': Possibly.validate(validation.validate_string)}, PossiblyHolder) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['PossiblyHolder']: return validation.validate_from_string(string, PossiblyHolder.validate) def to_json(self) -> typing.Dict[str, typing.Any]: return {'value': self.value.to_json(encoding.basic_to_json)} def encode(self) -> str: return json.dumps(self.to_json()) class Color(enum.Enum): red = 'ff0000' green = '00ff00' blue = '0000ff' @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['Color']: return validation.validate_enumeration_member(value, Color) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['Color']: return validation.validate_from_string(string, Color.validate) def to_json(self) -> typing.Any: return self.value def encode(self) -> str: return str(self.value) @dataclass(frozen=True) class KnownForMovieWithoutTypeTag: poster_path: typing.Optional[str] id: int title: typing.Optional[str] vote_average: float release_date: typing.Optional[str] overview: str @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['KnownForMovieWithoutTypeTag']: return validation.validate_interface(value, {'poster_path': validation.validate_optional(validation.validate_string), 'id': validation.validate_int, 'title': validation.validate_optional(validation.validate_string), 'vote_average': validation.validate_float, 'release_date': validation.validate_optional(validation.validate_string), 'overview': validation.validate_string}, KnownForMovieWithoutTypeTag) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['KnownForMovieWithoutTypeTag']: return validation.validate_from_string(string, KnownForMovieWithoutTypeTag.validate) def to_json(self) -> typing.Dict[str, typing.Any]: return {'poster_path': encoding.optional_to_json(encoding.basic_to_json)(self.poster_path), 'id': self.id, 'title': encoding.optional_to_json(encoding.basic_to_json)(self.title), 'vote_average': self.vote_average, 'release_date': encoding.optional_to_json(encoding.basic_to_json)(self.release_date), 'overview': self.overview} def encode(self) -> str: return json.dumps(self.to_json()) @dataclass(frozen=True) class KnownForShowWithoutTypeTag: poster_path: typing.Optional[str] id: int vote_average: float overview: str first_air_date: typing.Optional[str] name: typing.Optional[str] @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['KnownForShowWithoutTypeTag']: return validation.validate_interface(value, {'poster_path': validation.validate_optional(validation.validate_string), 'id': validation.validate_int, 'vote_average': validation.validate_float, 'overview': validation.validate_string, 'first_air_date': validation.validate_optional(validation.validate_string), 'name': validation.validate_optional(validation.validate_string)}, KnownForShowWithoutTypeTag) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['KnownForShowWithoutTypeTag']: return validation.validate_from_string(string, KnownForShowWithoutTypeTag.validate) def to_json(self) -> typing.Dict[str, typing.Any]: return {'poster_path': encoding.optional_to_json(encoding.basic_to_json)(self.poster_path), 'id': self.id, 'vote_average': self.vote_average, 'overview': self.overview, 'first_air_date': encoding.optional_to_json(encoding.basic_to_json)(self.first_air_date), 'name': encoding.optional_to_json(encoding.basic_to_json)(self.name)} def encode(self) -> str: return json.dumps(self.to_json()) class KnownForEmbedded: @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['KnownForEmbedded']: return validation.validate_with_type_tags(value, 'media_type', {'movieStartingWithLowercase': MovieStartingWithLowercase.validate, 'tvStartingWithLowercase': TvStartingWithLowercase.validate}) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['KnownForEmbedded']: return validation.validate_from_string(string, KnownForEmbedded.validate) def to_json(self) -> typing.Dict[str, typing.Any]: raise NotImplementedError('`to_json` is not implemented for base class `KnownForEmbedded`') def encode(self) -> str: raise NotImplementedError('`encode` is not implemented for base class `KnownForEmbedded`') @dataclass class MovieStartingWithLowercase(KnownForEmbedded): poster_path: typing.Optional[str] id: int title: typing.Optional[str] vote_average: float release_date: typing.Optional[str] overview: str @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['MovieStartingWithLowercase']: return validation.validate_with_type_tag_and_validator(value, 'media_type', 'movieStartingWithLowercase', KnownForMovieWithoutTypeTag.validate, MovieStartingWithLowercase) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['MovieStartingWithLowercase']: return validation.validate_from_string(string, MovieStartingWithLowercase.validate) def to_json(self) -> typing.Dict[str, typing.Any]: return {'media_type': 'movieStartingWithLowercase', **KnownForMovieWithoutTypeTag.to_json(self)} def encode(self) -> str: return json.dumps(self.to_json()) @dataclass class TvStartingWithLowercase(KnownForEmbedded): poster_path: typing.Optional[str] id: int vote_average: float overview: str first_air_date: typing.Optional[str] name: typing.Optional[str] @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['TvStartingWithLowercase']: return validation.validate_with_type_tag_and_validator(value, 'media_type', 'tvStartingWithLowercase', KnownForShowWithoutTypeTag.validate, TvStartingWithLowercase) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['TvStartingWithLowercase']: return validation.validate_from_string(string, TvStartingWithLowercase.validate) def to_json(self) -> typing.Dict[str, typing.Any]: return {'media_type': 'tvStartingWithLowercase', **KnownForShowWithoutTypeTag.to_json(self)} def encode(self) -> str: return json.dumps(self.to_json()) class KnownForEmbeddedWithUpperCase: @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['KnownForEmbeddedWithUpperCase']: return validation.validate_with_type_tags(value, 'media_type', {'Movie': Movie.validate, 'Tv': Tv.validate}) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['KnownForEmbeddedWithUpperCase']: return validation.validate_from_string(string, KnownForEmbeddedWithUpperCase.validate) def to_json(self) -> typing.Dict[str, typing.Any]: raise NotImplementedError('`to_json` is not implemented for base class `KnownForEmbeddedWithUpperCase`') def encode(self) -> str: raise NotImplementedError('`encode` is not implemented for base class `KnownForEmbeddedWithUpperCase`') @dataclass class Movie(KnownForEmbeddedWithUpperCase): poster_path: typing.Optional[str] id: int title: typing.Optional[str] vote_average: float release_date: typing.Optional[str] overview: str @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['Movie']: return validation.validate_with_type_tag_and_validator(value, 'media_type', 'Movie', KnownForMovieWithoutTypeTag.validate, Movie) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['Movie']: return validation.validate_from_string(string, Movie.validate) def to_json(self) -> typing.Dict[str, typing.Any]: return {'media_type': 'Movie', **KnownForMovieWithoutTypeTag.to_json(self)} def encode(self) -> str: return json.dumps(self.to_json()) @dataclass class Tv(KnownForEmbeddedWithUpperCase): poster_path: typing.Optional[str] id: int vote_average: float overview: str first_air_date: typing.Optional[str] name: typing.Optional[str] @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['Tv']: return validation.validate_with_type_tag_and_validator(value, 'media_type', 'Tv', KnownForShowWithoutTypeTag.validate, Tv) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['Tv']: return validation.validate_from_string(string, Tv.validate) def to_json(self) -> typing.Dict[str, typing.Any]: return {'media_type': 'Tv', **KnownForShowWithoutTypeTag.to_json(self)} def encode(self) -> str: return json.dumps(self.to_json()) @dataclass(frozen=True) class KnownForMovie: media_type: typing.Literal['movie'] poster_path: typing.Optional[str] id: int title: typing.Optional[str] vote_average: float release_date: typing.Optional[str] overview: str @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['KnownForMovie']: return validation.validate_interface(value, {'media_type': validation.validate_literal('movie'), 'poster_path': validation.validate_optional(validation.validate_string), 'id': validation.validate_int, 'title': validation.validate_optional(validation.validate_string), 'vote_average': validation.validate_float, 'release_date': validation.validate_optional(validation.validate_string), 'overview': validation.validate_string}, KnownForMovie) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['KnownForMovie']: return validation.validate_from_string(string, KnownForMovie.validate) def to_json(self) -> typing.Dict[str, typing.Any]: return {'media_type': 'movie', 'poster_path': encoding.optional_to_json(encoding.basic_to_json)(self.poster_path), 'id': self.id, 'title': encoding.optional_to_json(encoding.basic_to_json)(self.title), 'vote_average': self.vote_average, 'release_date': encoding.optional_to_json(encoding.basic_to_json)(self.release_date), 'overview': self.overview} def encode(self) -> str: return json.dumps(self.to_json()) @dataclass(frozen=True) class KnownForShow: media_type: typing.Literal['tv'] poster_path: typing.Optional[str] id: int vote_average: float overview: str first_air_date: typing.Optional[str] name: typing.Optional[str] @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['KnownForShow']: return validation.validate_interface(value, {'media_type': validation.validate_literal('tv'), 'poster_path': validation.validate_optional(validation.validate_string), 'id': validation.validate_int, 'vote_average': validation.validate_float, 'overview': validation.validate_string, 'first_air_date': validation.validate_optional(validation.validate_string), 'name': validation.validate_optional(validation.validate_string)}, KnownForShow) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['KnownForShow']: return validation.validate_from_string(string, KnownForShow.validate) def to_json(self) -> typing.Dict[str, typing.Any]: return {'media_type': 'tv', 'poster_path': encoding.optional_to_json(encoding.basic_to_json)(self.poster_path), 'id': self.id, 'vote_average': self.vote_average, 'overview': self.overview, 'first_air_date': encoding.optional_to_json(encoding.basic_to_json)(self.first_air_date), 'name': encoding.optional_to_json(encoding.basic_to_json)(self.name)} def encode(self) -> str: return json.dumps(self.to_json()) KnownFor = typing.Union[KnownForShow, KnownForMovie, str, float] class KnownForInterface: @staticmethod def validate(value: validation.Unknown) -> validation.ValidationResult['KnownFor']: return validation.validate_one_of(value, [KnownForShow.validate, KnownForMovie.validate, validation.validate_string, validation.validate_float]) @staticmethod def decode(string: typing.Union[str, bytes]) -> validation.ValidationResult['KnownFor']: return validation.validate_from_string(string, KnownForInterface.validate) @staticmethod def to_json(value) -> typing.Any: return encoding.one_of_to_json(value, {KnownForShow: KnownForShow.to_json, KnownForMovie: KnownForMovie.to_json, str: encoding.basic_to_json, float: encoding.basic_to_json}) @staticmethod def encode(value) -> str: return json.dumps(value.to_json())
47.730159
448
0.744881
2,348
21,049
6.502555
0.046422
0.036547
0.069164
0.04611
0.796633
0.787202
0.684373
0.651362
0.603157
0.572963
0
0.00066
0.136349
21,049
441
449
47.730159
0.83921
0
0
0.501484
0
0
0.111876
0.022375
0
0
0
0
0
1
0.27003
false
0
0.017804
0.237389
0.783383
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
870da62e775ae6a2f40a4764364cc7f71fc9670f
60
py
Python
workon/contrib/admin/__init__.py
dalou/django-workon
ef63c0a81c00ef560ed693e435cf3825f5170126
[ "BSD-3-Clause" ]
null
null
null
workon/contrib/admin/__init__.py
dalou/django-workon
ef63c0a81c00ef560ed693e435cf3825f5170126
[ "BSD-3-Clause" ]
null
null
null
workon/contrib/admin/__init__.py
dalou/django-workon
ef63c0a81c00ef560ed693e435cf3825f5170126
[ "BSD-3-Clause" ]
null
null
null
default_app_config = 'workon.contrib.admin.apps.AdminConfig'
60
60
0.85
8
60
6.125
1
0
0
0
0
0
0
0
0
0
0
0
0.033333
60
1
60
60
0.844828
0
0
0
0
0
0.606557
0.606557
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
8714982948d5e0d02523ca5dedad99486272c582
27
py
Python
data/studio21_generated/introductory/4308/starter_code.py
vijaykumawat256/Prompt-Summarization
614f5911e2acd2933440d909de2b4f86653dc214
[ "Apache-2.0" ]
null
null
null
data/studio21_generated/introductory/4308/starter_code.py
vijaykumawat256/Prompt-Summarization
614f5911e2acd2933440d909de2b4f86653dc214
[ "Apache-2.0" ]
null
null
null
data/studio21_generated/introductory/4308/starter_code.py
vijaykumawat256/Prompt-Summarization
614f5911e2acd2933440d909de2b4f86653dc214
[ "Apache-2.0" ]
null
null
null
def make_acronym(phrase):
13.5
25
0.777778
4
27
5
1
0
0
0
0
0
0
0
0
0
0
0
0.111111
27
2
26
13.5
0.833333
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
87305d2812a216bf38fe8c4ece5f7e766864b757
23,104
py
Python
apps/humanmodifier.py
teddydragoone/makehuman1.0.0alpha7
9aa7735a0a1ea83e52b1acc0fb8760057dd72d99
[ "CC0-1.0" ]
2
2016-11-23T16:37:15.000Z
2018-02-13T04:18:23.000Z
apps/humanmodifier.py
Tomoyon/makehuman1.0.0alpha7
9aa7735a0a1ea83e52b1acc0fb8760057dd72d99
[ "CC0-1.0" ]
null
null
null
apps/humanmodifier.py
Tomoyon/makehuman1.0.0alpha7
9aa7735a0a1ea83e52b1acc0fb8760057dd72d99
[ "CC0-1.0" ]
null
null
null
#!/usr/bin/python # -*- coding: utf-8 -*- """ **Project Name:** MakeHuman **Product Home Page:** http://www.makehuman.org/ **Code Home Page:** http://code.google.com/p/makehuman/ **Authors:** Marc Flerackers **Copyright(c):** MakeHuman Team 2001-2011 **Licensing:** GPL3 (see also http://sites.google.com/site/makehumandocs/licensing) **Coding Standards:** See http://sites.google.com/site/makehumandocs/developers-guide Abstract -------- TO DO """ __docformat__ = 'restructuredtext' import algos3d import gui3d from string import Template from operator import mul import math import re class DetailAction: def __init__(self, human, before, after, postAction=None,update=True): self.name = 'Change detail' self.human = human self.before = before self.after = after self.postAction = postAction self.update=update def do(self): for (target, value) in self.after.iteritems(): self.human.setDetail(target, value) self.human.applyAllTargets(gui3d.app.progress, update=self.update) if self.postAction: self.postAction() return True def undo(self): for (target, value) in self.before.iteritems(): self.human.setDetail(target, value) self.human.applyAllTargets() if self.postAction: self.postAction() return True class ModifierAction: def __init__(self, human, modifier, before, after, postAction): self.name = 'Change modifier' self.human = human self.modifier = modifier self.before = before self.after = after self.postAction = postAction def do(self): self.modifier.setValue(self.human, self.after) self.human.applyAllTargets(gui3d.app.progress) self.postAction() return True def undo(self): self.modifier.setValue(self.human, self.before) self.human.applyAllTargets(gui3d.app.progress) self.postAction() return True class ModifierSlider(gui3d.Slider): def __init__(self, value=0.0, min=0.0, max=1.0, label=None, style=gui3d.SliderStyle, thumbStyle=gui3d.SliderThumbStyle, modifier=None, valueConverter=None): gui3d.Slider.__init__(self, value, min, max, label, style, thumbStyle, valueConverter) self.modifier = modifier self.value = None def onChanging(self, value): if gui3d.app.settings.get('realtimeUpdates', True): human = gui3d.app.selectedHuman if self.value is None: self.value = self.modifier.getValue(human) if human.isSubdivided(): if human.isProxied(): human.getProxyMesh().setVisibility(1) else: human.getSeedMesh().setVisibility(1) human.getSubdivisionMesh(False).setVisibility(0) self.modifier.updateValue(human, value, gui3d.app.settings.get('realtimeNormalUpdates', True)) human.updateProxyMesh() def onChange(self, value): human = gui3d.app.selectedHuman if self.value != value: gui3d.app.do(ModifierAction(human, self.modifier, self.value, value, self.update)) if human.isSubdivided(): if human.isProxied(): human.getProxyMesh().setVisibility(0) else: human.getSeedMesh().setVisibility(0) human.getSubdivisionMesh(False).setVisibility(1) self.value = None def update(self): human = gui3d.app.selectedHuman self.setValue(self.modifier.getValue(human)) class Modifier: def __init__(self, left, right): self.left = left self.right = right self.verts = None self.faces = None def setValue(self, human, value, update=1): value = max(-1.0, min(1.0, value)) left = -value if value < 0.0 else 0.0 right = value if value > 0.0 else 0.0 human.setDetail(self.left, left) human.setDetail(self.right, right) def getValue(self, human): value = human.getDetail(self.left) if value: return -value value = human.getDetail(self.right) if value: return value else: return 0.0 def updateValue(self, human, value, updateNormals=1): # Collect vertex and face indices if we didn't yet if not (self.verts or self.faces): # Collect verts self.verts = [] for target in (self.left, self.right): t = algos3d.getTarget(human.meshData, target) self.verts.extend(t.verts) self.verts = list(set(self.verts)) # collect faces self.faces = [] for vindex in self.verts: self.faces += [face.idx for face in human.meshData.verts[vindex].sharedFaces] self.faces = list(set(self.faces)) # Remove old targets algos3d.loadTranslationTarget(human.meshData, self.left, -human.getDetail(self.left), None, 0, 0) algos3d.loadTranslationTarget(human.meshData, self.right, -human.getDetail(self.right), None, 0, 0) # Update detail state self.setValue(human, value) # Add new targets algos3d.loadTranslationTarget(human.meshData, self.left, human.getDetail(self.left), None, 0, 0) algos3d.loadTranslationTarget(human.meshData, self.right, human.getDetail(self.right), None, 0, 0) # Update vertices faces = [human.meshData.faces[i] for i in self.faces] vertices = [human.meshData.verts[i] for i in self.verts] if updateNormals: human.meshData.calcNormals(1, 1, vertices, faces) human.meshData.update(vertices, updateNormals) class GenericModifier: def __init__(self, template): self.template = template self.targets = self.expandTemplate([(self.template, [])]) self.verts = None self.faces = None def setValue(self, human, value): value = self.clampValue(value) factors = self.getFactors(human, value) for target in self.targets: human.setDetail(target[0], value * reduce(mul, [factors[factor] for factor in target[1]])) def getValue(self, human): return sum([human.getDetail(target[0]) for target in self.targets]) def updateValue(self, human, value, updateNormals=1): # Collect vertex and face indices if we didn't yet if not (self.verts or self.faces): # Collect verts self.verts = [] for target in self.targets: t = algos3d.getTarget(human.meshData, target[0]) self.verts.extend(t.verts) self.verts = list(set(self.verts)) # collect faces self.faces = [] for vindex in self.verts: self.faces += [face.idx for face in human.meshData.verts[vindex].sharedFaces] self.faces = list(set(self.faces)) # Remove old targets for target in self.targets: algos3d.loadTranslationTarget(human.meshData, target[0], -human.getDetail(target[0]), None, 0, 0) # Update detail state self.setValue(human, value) # Add new targets for target in self.targets: algos3d.loadTranslationTarget(human.meshData, target[0], human.getDetail(target[0]), None, 0, 0) # Update vertices faces = [human.meshData.faces[i] for i in self.faces] vertices = [human.meshData.verts[i] for i in self.verts] if updateNormals: human.meshData.calcNormals(1, 1, vertices, faces) human.meshData.update(vertices, updateNormals) class SimpleModifier(GenericModifier): def __init__(self, template): GenericModifier.__init__(self, template) # overrides def expandTemplate(self, targets): targets = [(target[0], target[1] + ['dummy']) for target in targets] return targets def getFactors(self, human, value): factors = { 'dummy': 1.0 } return factors def clampValue(self, value): return max(0.0, min(1.0, value)) class AgeModifier(GenericModifier): def __init__(self, template): GenericModifier.__init__(self, template) # overrides def expandTemplate(self, targets): # Build target list of (targetname, [factors]) targets = [(Template(target[0]).safe_substitute(age=value), target[1] + [value]) for target in targets for value in ['child', 'young', 'old']] return targets def getFactors(self, human, value): factors = { 'child': human.childVal, 'young': human.youngVal, 'old': human.oldVal } return factors def clampValue(self, value): return max(0.0, min(1.0, value)) class GenderModifier(GenericModifier): def __init__(self, template): GenericModifier.__init__(self, template) # overrides def expandTemplate(self, targets): # Build target list of (targetname, [factors]) targets = [(Template(target[0]).safe_substitute(gender=value), target[1] + [value]) for target in targets for value in ['female', 'male']] return targets def getFactors(self, human, value): factors = { 'female': human.femaleVal, 'male': human.maleVal } return factors def clampValue(self, value): return max(0.0, min(1.0, value)) class GenderAgeModifier(GenericModifier): def __init__(self, template): GenericModifier.__init__(self, template) # overrides def expandTemplate(self, targets): # Build target list of (targetname, [factors]) targets = [(Template(target[0]).safe_substitute(gender=value), target[1] + [value]) for target in targets for value in ['female', 'male']] targets = [(Template(target[0]).safe_substitute(age=value), target[1] + [value]) for target in targets for value in ['child', 'young', 'old']] return targets def getFactors(self, human, value): factors = { 'female': human.femaleVal, 'male': human.maleVal, 'child': human.childVal, 'young': human.youngVal, 'old': human.oldVal } return factors def clampValue(self, value): return max(0.0, min(1.0, value)) class GenderEthnicModifier(GenericModifier): def __init__(self, template): GenericModifier.__init__(self, template) # overrides def expandTemplate(self, targets): # Build target list of (targetname, [factors]) targets = [(Template(target[0]).safe_substitute(gender=value), target[1] + [value]) for target in targets for value in ['female', 'male']] targets = [(Template(target[0]).safe_substitute(ethnic=value), target[1] + [value]) for target in targets for value in ['caucasian', 'african', 'asian']] return targets def getFactors(self, human, value): ethnics = [val for val in [human.africanVal, human.asianVal] if val > 0.0] factors = { 'female': human.femaleVal, 'male': human.maleVal, 'african':human.africanVal / len(ethnics) if ethnics else human.africanVal, 'asian':human.asianVal / len(ethnics) if ethnics else human.asianVal, 'caucasian':(1.0 - sum(ethnics) / len(ethnics)) if ethnics else 1.0 } return factors def clampValue(self, value): return max(0.0, min(1.0, value)) class GenderAgeEthnicModifier(GenericModifier): def __init__(self, template): GenericModifier.__init__(self, template) # overrides def expandTemplate(self, targets): # Build target list of (targetname, [factors]) targets = [(Template(target[0]).safe_substitute(gender=value), target[1] + [value]) for target in targets for value in ['female', 'male']] targets = [(Template(target[0]).safe_substitute(age=value), target[1] + [value]) for target in targets for value in ['child', 'young', 'old']] targets = [(Template(target[0]).safe_substitute(ethnic=value), target[1] + [value]) for target in targets for value in ['neutral', 'african', 'asian']] return targets def getFactors(self, human, value): ethnics = [val for val in [human.africanVal, human.asianVal] if val > 0.0] factors = { 'female': human.femaleVal, 'male': human.maleVal, 'child': human.childVal, 'young': human.youngVal, 'old': human.oldVal, 'african':human.africanVal / len(ethnics) if ethnics else human.africanVal, 'asian':human.asianVal / len(ethnics) if ethnics else human.asianVal, 'neutral':(1.0 - sum(ethnics) / len(ethnics)) if ethnics else 1.0 } return factors def clampValue(self, value): return max(0.0, min(1.0, value)) class GenderAgeEthnicModifier2(GenericModifier): def __init__(self, template): GenericModifier.__init__(self, template) # overrides def expandTemplate(self, targets): # Build target list of (targetname, [factors]) targets = [(Template(target[0]).safe_substitute(gender=value), target[1] + [value]) for target in targets for value in ['female', 'male']] targets = [(Template(target[0]).safe_substitute(age=value), target[1] + [value]) for target in targets for value in ['child', 'young', 'old']] targets = [(Template(target[0]).safe_substitute(ethnic=value), target[1] + [value]) for target in targets for value in ['caucasian', 'african', 'asian']] return targets def getFactors(self, human, value): ethnics = [val for val in [human.africanVal, human.asianVal] if val > 0.0] factors = { 'female': human.femaleVal, 'male': human.maleVal, 'child': human.childVal, 'young': human.youngVal, 'old': human.oldVal, 'african':human.africanVal / len(ethnics) if ethnics else human.africanVal, 'asian':human.asianVal / len(ethnics) if ethnics else human.asianVal, 'caucasian':(1.0 - sum(ethnics) / len(ethnics)) if ethnics else 1.0 } return factors def clampValue(self, value): return max(0.0, min(1.0, value)) class GenderAgeMuscleWeightModifier(GenericModifier): def __init__(self, template): GenericModifier.__init__(self, template) # overrides def expandTemplate(self, targets): # Build target list of (targetname, [factors]) targets = [(Template(target[0]).safe_substitute(gender=value), target[1] + [value]) for target in targets for value in ['female', 'male']] targets = [(Template(target[0]).safe_substitute(age=value), target[1] + [value]) for target in targets for value in ['child', 'young', 'old']] targets = [(Template(target[0]).safe_substitute(tone=value), target[1] + [value or 'averageTone']) for target in targets for value in ['flaccid', '', 'muscle']] targets = [(Template(target[0]).safe_substitute(weight=value), target[1] + [value or 'averageWeight']) for target in targets for value in ['light', '', 'heavy']] # Cleanup multiple hyphens and remove a possible hyphen before a dot. doubleHyphen = re.compile(r'-+') hyphenDot = re.compile(r'-\.') targets = [(re.sub(hyphenDot, '.', re.sub(doubleHyphen, '-', target[0])), target[1]) for target in targets] #for target in targets: # print target return targets def getFactors(self, human, value): factors = { 'female': human.femaleVal, 'male': human.maleVal, 'child': human.childVal, 'young': human.youngVal, 'old': human.oldVal, 'flaccid':human.flaccidVal, 'muscle':human.muscleVal, 'averageTone':1.0 - (human.flaccidVal + human.muscleVal), 'light':human.underweightVal, 'heavy':human.overweightVal, 'averageWeight':1.0 - (human.underweightVal + human.overweightVal) } return factors def clampValue(self, value): return max(0.0, min(1.0, value)) class GenderAgeRangeModifier(GenderAgeModifier): def __init__(self, template, parameterName, parameterRange, always=True): self.parameterName = parameterName self.parameterRange = parameterRange self.always = always GenderAgeModifier.__init__(self, template) # overrides def expandTemplate(self, targets): targets = GenderAgeModifier.expandTemplate(self, targets) # Build target list of (targetname, [factors]) targets = [(Template(target[0]).safe_substitute({self.parameterName:str(value)}), target[1] + [str(value)]) for target in targets for value in self.parameterRange] return targets def getFactors(self, human, value): factors = GenderAgeModifier.getFactors(self, human, value) for factor in self.parameterRange: factors[str(factor)] = 0.0 if self.always: # always # a b c d # 0 1 2 3 # 0.0 0.33 0.66 1.0 v = value * (len(self.parameterRange) - 1) index = int(math.floor(v)) v = v - index factors[str(self.parameterRange[index])] = 1.0 - v if index+1 < len(self.parameterRange): factors[str(self.parameterRange[index+1])] = v else: # not always # a b c d # 0 1 2 3 # 0.0 0.25 0.50 0.75 1.0 # 0 1 2 3 4 v = value * len(self.parameterRange) index = int(math.floor(v)) v = v - index if index > 0: factors[str(self.parameterRange[index - 1])] = 1.0 - v if index < len(self.parameterRange): factors[str(self.parameterRange[index])] = v return factors def clampValue(self, value): return max(0.0, min(1.0, value)) class GenderAgeAsymmetricModifier(GenderAgeModifier): def __init__(self, template, parameterName, left, right, always=True): self.parameterName = parameterName self.left = left self.right = right self.always = always GenderAgeModifier.__init__(self, template) # overrides def setValue(self, human, value): value = self.clampValue(value) factors = self.getFactors(human, value) for target in self.targets: human.setDetail(target[0], reduce(mul, [factors[factor] for factor in target[1]])) #print target[0], human.getDetail(target[0]) def expandTemplate(self, targets): targets = GenderAgeModifier.expandTemplate(self, targets) # Build target list of (targetname, [factors]) targets = [(Template(target[0]).safe_substitute({self.parameterName:value}), target[1] + [value]) for target in targets for value in [self.left, self.right]] return targets def getFactors(self, human, value): factors = GenderAgeModifier.getFactors(self, human, value) factors.update({ self.left: -min(value, 0.0), self.right: max(0.0, value) }) return factors def clampValue(self, value): return max(-1.0, min(1.0, value)) class GenderAgeEthnicAsymmetricModifier(GenderAgeEthnicModifier2): def __init__(self, template, parameterName, left, right, always=True): self.parameterName = parameterName self.left = left self.right = right self.always = always GenderAgeEthnicModifier2.__init__(self, template) # overrides def setValue(self, human, value): value = self.clampValue(value) factors = self.getFactors(human, value) for target in self.targets: human.setDetail(target[0], reduce(mul, [factors[factor] for factor in target[1]])) #print target[0], human.getDetail(target[0]) def getValue(self, human): left = sum([human.getDetail(target[0]) for target in self.targets if self.left in target[0]]) if left: return -left else: return sum([human.getDetail(target[0]) for target in self.targets if self.right in target[0]]) def expandTemplate(self, targets): targets = GenderAgeEthnicModifier2.expandTemplate(self, targets) # Build target list of (targetname, [factors]) targets = [(Template(target[0]).safe_substitute({self.parameterName:value}), target[1] + [value]) for target in targets for value in [self.left, self.right]] return targets def getFactors(self, human, value): factors = GenderAgeEthnicModifier2.getFactors(self, human, value) factors.update({ self.left: -min(value, 0.0), self.right: max(0.0, value) }) return factors def clampValue(self, value): return max(-1.0, min(1.0, value))
35.165906
172
0.563063
2,424
23,104
5.311056
0.097772
0.020662
0.027342
0.03076
0.785537
0.770312
0.737533
0.707628
0.688597
0.648128
0
0.017241
0.327216
23,104
656
173
35.219512
0.810988
0.072671
0
0.68593
0
0
0.027186
0.001014
0
0
0
0
0
1
0.163317
false
0
0.015075
0.030151
0.326633
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4