hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
ab40be692f40c67b0053a6b2430bca85a59ddccb
56
py
Python
mancala/__init__.py
samjcus/mancala
0662fffe23606ca851e64168383fddc84f0035f5
[ "MIT" ]
null
null
null
mancala/__init__.py
samjcus/mancala
0662fffe23606ca851e64168383fddc84f0035f5
[ "MIT" ]
null
null
null
mancala/__init__.py
samjcus/mancala
0662fffe23606ca851e64168383fddc84f0035f5
[ "MIT" ]
null
null
null
from .mancala import * from .version import __version__
18.666667
32
0.803571
7
56
5.857143
0.571429
0
0
0
0
0
0
0
0
0
0
0
0.142857
56
2
33
28
0.854167
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
ab51bd5caf2db8f10061e2e4650c63ab5a8c0946
339
py
Python
weconnect/errors.py
tillsteinbach/WeConnect-python
8e8eade47eab9479a65d714de5009c5be975a59f
[ "MIT" ]
22
2021-08-06T21:01:11.000Z
2022-03-21T14:54:25.000Z
weconnect/errors.py
tillsteinbach/WeConnect-python
8e8eade47eab9479a65d714de5009c5be975a59f
[ "MIT" ]
30
2021-06-20T21:11:26.000Z
2022-03-22T21:11:04.000Z
weconnect/errors.py
tillsteinbach/WeConnect-python
8e8eade47eab9479a65d714de5009c5be975a59f
[ "MIT" ]
8
2021-08-06T21:01:29.000Z
2022-02-02T21:32:14.000Z
class RetrievalError(Exception): pass class SetterError(Exception): pass class ControlError(SetterError): pass class AuthentificationError(Exception): pass class TemporaryAuthentificationError(AuthentificationError): pass class APICompatibilityError(Exception): pass class APIError(Exception): pass
12.555556
60
0.758112
28
339
9.178571
0.357143
0.210117
0.280156
0
0
0
0
0
0
0
0
0
0.179941
339
26
61
13.038462
0.92446
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
ab561d147a5defdffc68e6b847a3af139541c38c
121
py
Python
game_finder/admin.py
stuart-bradley/steam_lan_game_finder
345cc02b64da7269887dcce7014ddb5d5b6194bd
[ "MIT" ]
1
2018-01-09T11:44:54.000Z
2018-01-09T11:44:54.000Z
game_finder/admin.py
stuart-bradley/steam_lan_game_finder
345cc02b64da7269887dcce7014ddb5d5b6194bd
[ "MIT" ]
1
2020-10-01T23:21:34.000Z
2020-10-01T23:21:34.000Z
game_finder/admin.py
lutrasdebtra/steam_lan_game_finder
345cc02b64da7269887dcce7014ddb5d5b6194bd
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import Game, Tag # Register your models here. admin.site.register(Game)
15.125
32
0.77686
18
121
5.222222
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.14876
121
7
33
17.285714
0.912621
0.214876
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
ab629e06e669bd55bbabaf1150231b90810b6f39
13
py
Python
Python/hehe.py
ItaloQoliveira/Terceiro-Semestre
72ce154f7d72ac874416da9827d6f438e9500149
[ "Apache-2.0" ]
null
null
null
Python/hehe.py
ItaloQoliveira/Terceiro-Semestre
72ce154f7d72ac874416da9827d6f438e9500149
[ "Apache-2.0" ]
null
null
null
Python/hehe.py
ItaloQoliveira/Terceiro-Semestre
72ce154f7d72ac874416da9827d6f438e9500149
[ "Apache-2.0" ]
null
null
null
print("hehe")
13
13
0.692308
2
13
4.5
1
0
0
0
0
0
0
0
0
0
0
0
0
13
1
13
13
0.692308
0
0
0
0
0
0.285714
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
ab63808c92fc1ffdddba095081af41594e0abc81
113
py
Python
tests/workflow.py
mullikine/chronology
3c033d242955fa1df506157b3e26191ac8754c03
[ "MIT" ]
189
2020-12-30T21:34:44.000Z
2022-03-31T20:22:52.000Z
tests/workflow.py
mullikine/chronology
3c033d242955fa1df506157b3e26191ac8754c03
[ "MIT" ]
10
2021-01-10T23:03:17.000Z
2021-12-22T04:27:52.000Z
tests/workflow.py
mullikine/chronology
3c033d242955fa1df506157b3e26191ac8754c03
[ "MIT" ]
53
2021-01-28T16:02:55.000Z
2022-03-13T21:00:25.000Z
import asyncio # TODO import __init__ above async def logic(): # TODO add tests pass # main(logic)
8.692308
28
0.654867
15
113
4.666667
0.8
0
0
0
0
0
0
0
0
0
0
0
0.274336
113
12
29
9.416667
0.853659
0.469027
0
0
0
0
0
0
0
0
0
0.083333
0
1
0
true
0.333333
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
1
1
1
0
0
0
0
5
db70edd769a79b354c12f7a8267849419ec17d56
92
py
Python
src/losses/__init__.py
johanattia/tensorflow-saint
95bbfaf51c1f1be96a2884dc664b10e9dd183984
[ "MIT" ]
3
2022-03-30T20:07:55.000Z
2022-03-30T21:40:14.000Z
src/losses/__init__.py
johanattia/tensorflow-saint
95bbfaf51c1f1be96a2884dc664b10e9dd183984
[ "MIT" ]
null
null
null
src/losses/__init__.py
johanattia/tensorflow-saint
95bbfaf51c1f1be96a2884dc664b10e9dd183984
[ "MIT" ]
null
null
null
from .barlow_twins import BarlowTwins from .simclr import SimCLR from .vicreg import VICReg
23
37
0.836957
13
92
5.846154
0.538462
0
0
0
0
0
0
0
0
0
0
0
0.130435
92
3
38
30.666667
0.95
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
db8df945d325882c047372322448b360eb21335d
79
py
Python
db_cache/__init__.py
dskrypa/db_cache
02623c404ff4861bdcd07549b7e9691ec7654974
[ "Apache-2.0" ]
null
null
null
db_cache/__init__.py
dskrypa/db_cache
02623c404ff4861bdcd07549b7e9691ec7654974
[ "Apache-2.0" ]
null
null
null
db_cache/__init__.py
dskrypa/db_cache
02623c404ff4861bdcd07549b7e9691ec7654974
[ "Apache-2.0" ]
null
null
null
""" :author: Doug Skrypa """ from .__version__ import * from .caches import *
11.285714
26
0.670886
9
79
5.444444
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.177215
79
6
27
13.166667
0.753846
0.253165
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
db993546fe222d7b93290d83f11382ae368bf5cf
41
py
Python
inference_all.py
jaemin93/pytorch-deeplab-xception
4ae96b90d19c22502170ce0bd7d5a9ce87db2f7f
[ "MIT" ]
20
2019-11-06T02:47:20.000Z
2022-03-16T01:43:00.000Z
inference_all.py
jaemin93/pytorch-deeplab-xception
4ae96b90d19c22502170ce0bd7d5a9ce87db2f7f
[ "MIT" ]
null
null
null
inference_all.py
jaemin93/pytorch-deeplab-xception
4ae96b90d19c22502170ce0bd7d5a9ce87db2f7f
[ "MIT" ]
7
2019-10-12T10:30:01.000Z
2022-03-16T01:43:02.000Z
import os os.system('bash inference.sh')
13.666667
30
0.756098
7
41
4.428571
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.097561
41
3
30
13.666667
0.837838
0
0
0
0
0
0.404762
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
db9b06c31ab097b3198f2ac66af2b6f70893e46b
28
py
Python
{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/hw11/param11.py
sequoiap/cookiecutter-controlshw
bf71c7b9e497b1c3a2af4d06a843c2dab9f2a1d7
[ "MIT" ]
null
null
null
{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/hw11/param11.py
sequoiap/cookiecutter-controlshw
bf71c7b9e497b1c3a2af4d06a843c2dab9f2a1d7
[ "MIT" ]
1
2020-12-18T23:37:12.000Z
2020-12-18T23:37:13.000Z
{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/hw11/param11.py
sequoiap/cookiecutter-controlshw
bf71c7b9e497b1c3a2af4d06a843c2dab9f2a1d7
[ "MIT" ]
null
null
null
# Problem 11 Parameter File
14
27
0.785714
4
28
5.5
1
0
0
0
0
0
0
0
0
0
0
0.086957
0.178571
28
1
28
28
0.869565
0.892857
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
db9c68f19d08c822f4e591369446f3bda954a59e
118
py
Python
spectroscope/__init__.py
peterblockman/spectroscope
2a33742838a95c1e56ce0e13cc7719c56e9d6cc8
[ "Apache-2.0" ]
null
null
null
spectroscope/__init__.py
peterblockman/spectroscope
2a33742838a95c1e56ce0e13cc7719c56e9d6cc8
[ "Apache-2.0" ]
null
null
null
spectroscope/__init__.py
peterblockman/spectroscope
2a33742838a95c1e56ce0e13cc7719c56e9d6cc8
[ "Apache-2.0" ]
null
null
null
import logging def log(): logging.basicConfig(level=logging.DEBUG) return logging.getLogger("spectroscope")
16.857143
44
0.745763
13
118
6.769231
0.769231
0
0
0
0
0
0
0
0
0
0
0
0.144068
118
6
45
19.666667
0.871287
0
0
0
0
0
0.101695
0
0
0
0
0
0
1
0.25
true
0
0.25
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
1
0
0
5
dbb2229a7292298099f4620506043335cb090c87
32
py
Python
mbench/demographic/__init__.py
sepmein/mBench
5f2d7115ac8178ca0e076cfceebc75fa8d46127d
[ "BSD-3-Clause" ]
1
2022-03-21T14:18:11.000Z
2022-03-21T14:18:11.000Z
mbench/demographic/__init__.py
sepmein/mBench
5f2d7115ac8178ca0e076cfceebc75fa8d46127d
[ "BSD-3-Clause" ]
6
2022-03-04T11:37:52.000Z
2022-03-31T19:13:43.000Z
mbench/demographic/__init__.py
sepmein/mBench
5f2d7115ac8178ca0e076cfceebc75fa8d46127d
[ "BSD-3-Clause" ]
null
null
null
from .reformat import adm1_name
16
31
0.84375
5
32
5.2
1
0
0
0
0
0
0
0
0
0
0
0.035714
0.125
32
1
32
32
0.892857
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
dbc60a4fda6bf90361dd0c5b8820cbfc1b83c931
41
py
Python
cowait/cli/errors.py
ProgHaj/cowait
e95c30faab8caf8b0413de4e1784529a3a06475d
[ "Apache-2.0" ]
51
2020-06-04T06:08:14.000Z
2022-03-28T06:59:53.000Z
cowait/cli/errors.py
ProgHaj/cowait
e95c30faab8caf8b0413de4e1784529a3a06475d
[ "Apache-2.0" ]
121
2020-06-01T12:09:32.000Z
2022-03-31T20:47:57.000Z
cowait/cli/errors.py
ProgHaj/cowait
e95c30faab8caf8b0413de4e1784529a3a06475d
[ "Apache-2.0" ]
6
2020-06-11T16:05:20.000Z
2022-03-23T06:30:17.000Z
class CliError(RuntimeError): pass
8.2
29
0.707317
4
41
7.25
1
0
0
0
0
0
0
0
0
0
0
0
0.219512
41
4
30
10.25
0.90625
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
dbc6978e021754b3979926339217f5c527995729
1,074
py
Python
python/anyascii/_data/_094.py
casept/anyascii
d4f426b91751254b68eaa84c6cd23099edd668e6
[ "ISC" ]
null
null
null
python/anyascii/_data/_094.py
casept/anyascii
d4f426b91751254b68eaa84c6cd23099edd668e6
[ "ISC" ]
null
null
null
python/anyascii/_data/_094.py
casept/anyascii
d4f426b91751254b68eaa84c6cd23099edd668e6
[ "ISC" ]
null
null
null
b='Kui Si Liu Nao Huang Pie Sui Fan Qiao Quan Yang Tang Xiang Jue Jiao Zun Liao Qie Lao Dui Xin Zan Ji Jian Zhong Deng Ya Ying Dui Jue Nou Zan Pu Tie Fan Cheng Ding Shan Kai Jian Fei Sui Lu Juan Hui Yu Lian Zhuo Qiao Jian Zhuo Lei Bi Tie Huan Ye Duo Guo Dang Ju Fen Da Bei Yi Ai Zong Xun Diao Zhu Heng Zhui Ji Nie He Huo Qing Bin Ying Kui Ning Xu Jian Jian Qian Cha Zhi Mie Li Lei Ji Zuan Kuang Shang Peng La Du Shuo Chuo Lu Biao Bao Lu Xian Kuan Long E Lu Xin Jian Lan Bo Jian Yao Chan Xiang Jian Xi Guan Cang Nie Lei Cuan Qu Pan Luo Zuan Luan Zao Nie Jue Tang Zhu Lan Jin Ga Yi Zhen Ding Zhao Po Liao Tu Qian Chuan Shan Sa Fan Diao Men Nu Yang Chai Xing Gai Bu Tai Ju Dun Chao Zhong Na Bei Gang Ban Qian Yao Qin Jun Wu Gou Kang Fang Huo Tou Niu Ba Yu Qian Zheng Qian Gu Bo Ke Po Bu Bo Yue Zuan Mu Tan Jia Dian You Tie Bo Ling Shuo Qian Mao Bao Shi Xuan Ta Bi Ni Pi Duo Xing Kao Lao Er Mang Ya You Cheng Jia Ye Nao Zhi Dang Tong Lu Diao Yin Kai Zha Zhu Xi Ding Diu Xian Hua Quan Sha Ha Diao Ge Ming Zheng Se Jiao Yi Chan Chong Tang An Yin Ru Zhu Lao Pu Wu Lai Te Lian Keng'
1,074
1,074
0.759777
257
1,074
3.175097
0.642023
0
0
0
0
0
0
0
0
0
0
0
0.23743
1,074
1
1,074
1,074
0.996337
0
0
0
0
1
0.995349
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
1
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
dbccc0c920c4a4867eb34c3fb14e2c1b669b9d49
49
py
Python
tgml/validator/generator/yamodel.py
kshaposhnikov/twitter-graph-model
31ef99bda4dce7148f3ba56ffa9337e08b0fa99d
[ "MIT" ]
null
null
null
tgml/validator/generator/yamodel.py
kshaposhnikov/twitter-graph-model
31ef99bda4dce7148f3ba56ffa9337e08b0fa99d
[ "MIT" ]
null
null
null
tgml/validator/generator/yamodel.py
kshaposhnikov/twitter-graph-model
31ef99bda4dce7148f3ba56ffa9337e08b0fa99d
[ "MIT" ]
null
null
null
# Yet Another Model for generating random graphs
24.5
48
0.816327
7
49
5.714286
1
0
0
0
0
0
0
0
0
0
0
0
0.163265
49
1
49
49
0.97561
0.938776
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
91617afe73a41424cc500c4b763b59d8218daeb3
86
py
Python
rainy/utils/__init__.py
alexmlamb/blocks_rl_gru_setup
fe462f79518d14f828e2c7cbf210cd105ff982f4
[ "Apache-2.0" ]
null
null
null
rainy/utils/__init__.py
alexmlamb/blocks_rl_gru_setup
fe462f79518d14f828e2c7cbf210cd105ff982f4
[ "Apache-2.0" ]
null
null
null
rainy/utils/__init__.py
alexmlamb/blocks_rl_gru_setup
fe462f79518d14f828e2c7cbf210cd105ff982f4
[ "Apache-2.0" ]
null
null
null
from .device import Device from .log import Logger from .sample import sample_indices
21.5
34
0.825581
13
86
5.384615
0.538462
0
0
0
0
0
0
0
0
0
0
0
0.139535
86
3
35
28.666667
0.945946
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
91a13341106093afc4543e6c5767be9c31055d3b
69
py
Python
server/routes/__init__.py
MrCreeper1008/covid19-alarm
2f07a602c60bafacab3aa977a22d11fc1d07b878
[ "MIT" ]
1
2021-11-30T11:03:36.000Z
2021-11-30T11:03:36.000Z
server/routes/__init__.py
kennethnym/covid19-alarm
2f07a602c60bafacab3aa977a22d11fc1d07b878
[ "MIT" ]
null
null
null
server/routes/__init__.py
kennethnym/covid19-alarm
2f07a602c60bafacab3aa977a22d11fc1d07b878
[ "MIT" ]
null
null
null
""" Initializes server routes """ import server.routes.alarms.route
11.5
33
0.753623
8
69
6.5
0.75
0.461538
0
0
0
0
0
0
0
0
0
0
0.115942
69
5
34
13.8
0.852459
0.362319
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
91b13a08d192568525c8916ecb79c6f711b0764d
2,637
py
Python
gdesk/panels/imgview/spectrogram.py
thocoo/gamma-desk
9cb63a65fe23e30e155b3beca862f369b7fa1b7e
[ "Apache-2.0" ]
null
null
null
gdesk/panels/imgview/spectrogram.py
thocoo/gamma-desk
9cb63a65fe23e30e155b3beca862f369b7fa1b7e
[ "Apache-2.0" ]
8
2021-04-09T11:31:43.000Z
2021-06-09T09:07:18.000Z
gdesk/panels/imgview/spectrogram.py
thocoo/gamma-desk
9cb63a65fe23e30e155b3beca862f369b7fa1b7e
[ "Apache-2.0" ]
null
null
null
import sys import numpy as np from ... import gui def spectrogram(arr, vertical=False, plot=True): """ Calculates the fullnoise, whitenoise and whiteness. and plot a spectrogram. https://www.emva.org/wp-content/uploads/EMVA1288-3.1a.pdf :param np.ndarray arr: A 2 dimensional array :param bool vertical: Calculate in vertical direction :param bool plot: Plot and print :returns: fullnoise, whitenoise, whiteness :rtype: tuple(float, float, float) """ if vertical: return spectr_vert(arr, plot) else: return spectr_hori(arr, plot) def spectr_hori(arr, plot=True): """ Calculates the fullnoise, whitenoise and whiteness. and plot a horizontal spectrogram. :param np.ndarray arr: A 2 dimensional array :returns: fullnoise, whitenoise, whiteness :rtype: tuple(float, float, float) """ arr = arr.astype('double') ydim, xdim = arr.shape arr -= arr.mean() mag = abs(np.fft.fft(arr,axis=1)) / xdim ** 0.5 spr = (np.sum(mag**2,0) / ydim) ** 0.5 fullnoise = (np.sum(spr**2) / (xdim+1))** 0.5 whitenoise = np.median(spr) whiteness = (fullnoise / whitenoise) if plot: plt = gui.prepareplot() plt.figure() plt.grid(True) plt.title('Horizontal Spectrogram') plt.plot(spr) print("Fullnoise : %8.2f" % fullnoise) print("WhiteNoise : %8.2f" % whitenoise) print("The whiteness: %8.2f Ideal this is 1" % whiteness) plt.show() return fullnoise, whitenoise, whiteness def spectr_vert(arr, plot=True): """ Calculates the fullnoise, whitenoise and whiteness. and plot a vertical spectrogram. :param np.ndarray arr: A 2 dimensional array :returns: fullnoise, whitenoise, whiteness :rtype: tuple(float, float, float) """ arr = arr.astype('double') ydim, xdim = arr.shape arr -= arr.mean() mag = abs(np.fft.fft(arr,axis=0)) / ydim ** 0.5 spr = (np.sum(mag**2,1) / xdim) ** 0.5 fullnoise = (np.sum(spr**2) / (ydim+1))** 0.5 whitenoise = np.median(spr) whiteness = (fullnoise / whitenoise) if plot: plt = gui.prepareplot() plt.figure() plt.grid(True) plt.title('Vertical Spectrogram') plt.plot(spr) print("Fullnoise : %8.2f" % fullnoise) print("WhiteNoise : %8.2f" % whitenoise) print("The whiteness: %8.2f Ideal this is 1" % whiteness) plt.show() return fullnoise, whitenoise, whiteness
29.965909
67
0.590823
328
2,637
4.737805
0.243902
0.122265
0.09009
0.040541
0.779279
0.779279
0.779279
0.735521
0.712999
0.674389
0
0.02381
0.283276
2,637
88
68
29.965909
0.798413
0.293136
0
0.625
0
0
0.121629
0
0
0
0
0
0
1
0.0625
false
0
0.0625
0
0.208333
0.125
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
91be9bed56b1fc547d7c23a71e3d1fe6c60f4364
191
py
Python
app/idealers/orders/exceptions.py
leydson-vieira/dealers
14f2f307f0f4497eec92f65d01ef111b42d528b9
[ "MIT" ]
null
null
null
app/idealers/orders/exceptions.py
leydson-vieira/dealers
14f2f307f0f4497eec92f65d01ef111b42d528b9
[ "MIT" ]
null
null
null
app/idealers/orders/exceptions.py
leydson-vieira/dealers
14f2f307f0f4497eec92f65d01ef111b42d528b9
[ "MIT" ]
null
null
null
class OrderCodeAlreadyExists(Exception): pass class DealerDoesNotExist(Exception): pass class OrderDoesNotExist(Exception): pass class StatusNotAllowed(Exception): pass
12.733333
40
0.759162
16
191
9.0625
0.4375
0.358621
0.372414
0
0
0
0
0
0
0
0
0
0.17801
191
14
41
13.642857
0.923567
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
91d7564efb364b1bab395f297e16050f9619810c
183
py
Python
kanjinetworks/__init__.py
acoomans/kanjinetworks
842d9dfc1fce2f9b187e22fb6bcc4b4311b26e26
[ "MIT" ]
6
2016-10-04T02:28:44.000Z
2021-06-11T07:06:58.000Z
kanjinetworks/__init__.py
acoomans/kanjinetworks
842d9dfc1fce2f9b187e22fb6bcc4b4311b26e26
[ "MIT" ]
null
null
null
kanjinetworks/__init__.py
acoomans/kanjinetworks
842d9dfc1fce2f9b187e22fb6bcc4b4311b26e26
[ "MIT" ]
3
2017-06-10T18:27:20.000Z
2019-06-09T20:26:40.000Z
from kanji import Kanji from extract.extractor import get_text from extract.parser import KanjiNetworksParser from export.japaneseapp import JapaneseApp3Exporter, JapaneseApp4Exporter
45.75
73
0.89071
21
183
7.714286
0.619048
0.135802
0
0
0
0
0
0
0
0
0
0.011976
0.087432
183
4
73
45.75
0.958084
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
53442cea73e6529b98a3efedb786b13b13fdc3bc
62
py
Python
crowdsourcing/annotation_types/bbox/__init__.py
sbranson/online_crowdsourcing
d1f7c814bb60aae9cf5e76e0b299713246f98ce3
[ "MIT" ]
4
2019-08-14T21:14:18.000Z
2021-11-04T09:32:37.000Z
crowdsourcing/annotation_types/bbox/__init__.py
sbranson/online_crowdsourcing
d1f7c814bb60aae9cf5e76e0b299713246f98ce3
[ "MIT" ]
null
null
null
crowdsourcing/annotation_types/bbox/__init__.py
sbranson/online_crowdsourcing
d1f7c814bb60aae9cf5e76e0b299713246f98ce3
[ "MIT" ]
1
2019-11-09T08:20:27.000Z
2019-11-09T08:20:27.000Z
from bbox import * from detector import ComputerVisionDetector
31
43
0.870968
7
62
7.714286
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.112903
62
2
43
31
0.981818
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
5364d6a8e4054fc5f03130557b071b0b8cde5635
11,421
py
Python
src/pyensae/languages/SimpleWorkflowLexer.py
mohamedelkansouli/Ensae_py2
e54a05f90c6aa6e2a5065eac9f9ec10aca64b46a
[ "MIT" ]
null
null
null
src/pyensae/languages/SimpleWorkflowLexer.py
mohamedelkansouli/Ensae_py2
e54a05f90c6aa6e2a5065eac9f9ec10aca64b46a
[ "MIT" ]
null
null
null
src/pyensae/languages/SimpleWorkflowLexer.py
mohamedelkansouli/Ensae_py2
e54a05f90c6aa6e2a5065eac9f9ec10aca64b46a
[ "MIT" ]
null
null
null
# Generated from \SimpleWorkflow.g4 by ANTLR 4.7 from antlr4 import * from io import StringIO from typing.io import TextIO import sys def serializedATN(): with StringIO() as buf: buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2+") buf.write("\u010d\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r") buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23") buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30") buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36") buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%") buf.write("\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.") buf.write("\t.\4/\t/\3\2\3\2\3\3\3\3\3\3\3\3\3\4\3\4\3\5\3\5\3\5") buf.write("\3\5\3\6\3\6\3\7\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3\13") buf.write("\3\13\3\13\3\f\3\f\3\f\3\f\3\f\3\r\3\r\3\16\3\16\3\17") buf.write("\3\17\3\20\3\20\3\21\3\21\3\22\3\22\3\23\3\23\3\23\3\24") buf.write("\3\24\3\24\3\25\3\25\3\25\3\26\3\26\3\26\3\27\3\27\3\27") buf.write("\3\30\3\30\3\30\3\31\3\31\3\32\3\32\3\33\3\33\3\34\3\34") buf.write("\3\34\3\34\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\36\3\36") buf.write("\3\36\3\37\3\37\3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3!\3") buf.write("!\3!\3!\3!\3!\3!\3!\3!\3\"\3\"\3#\6#\u00ca\n#\r#\16#\u00cb") buf.write("\3$\3$\7$\u00d0\n$\f$\16$\u00d3\13$\3%\3%\3&\3&\5&\u00d9") buf.write("\n&\3\'\3\'\3\'\3\'\7\'\u00df\n\'\f\'\16\'\u00e2\13\'") buf.write("\3\'\3\'\3(\3(\3(\3(\7(\u00ea\n(\f(\16(\u00ed\13(\3(\3") buf.write("(\3)\3)\3*\3*\3+\3+\5+\u00f7\n+\3,\3,\3-\3-\3.\3.\7.\u00ff") buf.write("\n.\f.\16.\u0102\13.\3.\5.\u0105\n.\3.\3.\3.\3/\3/\3/") buf.write("\3/\3\u0100\2\60\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23") buf.write("\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25") buf.write(")\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37= ?!A") buf.write("\"C#E$G%I&K\'M(O)Q\2S\2U\2W\2Y\2[*]+\3\2\b\4\2--//\3\2") buf.write("))\3\2$$\3\2\62;\6\2C\\aac|\u0082\u0101\5\2\13\f\17\17") buf.write("\"\"\2\u0111\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3") buf.write("\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2") buf.write("\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2") buf.write("\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2") buf.write("#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2") buf.write("\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65") buf.write("\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2") buf.write("\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2") buf.write("\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2[\3\2") buf.write("\2\2\2]\3\2\2\2\3_\3\2\2\2\5a\3\2\2\2\7e\3\2\2\2\tg\3") buf.write("\2\2\2\13k\3\2\2\2\rm\3\2\2\2\17p\3\2\2\2\21r\3\2\2\2") buf.write("\23t\3\2\2\2\25v\3\2\2\2\27y\3\2\2\2\31~\3\2\2\2\33\u0080") buf.write("\3\2\2\2\35\u0082\3\2\2\2\37\u0084\3\2\2\2!\u0086\3\2") buf.write("\2\2#\u0088\3\2\2\2%\u008a\3\2\2\2\'\u008d\3\2\2\2)\u0090") buf.write("\3\2\2\2+\u0093\3\2\2\2-\u0096\3\2\2\2/\u0099\3\2\2\2") buf.write("\61\u009c\3\2\2\2\63\u009e\3\2\2\2\65\u00a0\3\2\2\2\67") buf.write("\u00a2\3\2\2\29\u00aa\3\2\2\2;\u00ad\3\2\2\2=\u00b0\3") buf.write("\2\2\2?\u00b2\3\2\2\2A\u00bd\3\2\2\2C\u00c6\3\2\2\2E\u00c9") buf.write("\3\2\2\2G\u00cd\3\2\2\2I\u00d4\3\2\2\2K\u00d8\3\2\2\2") buf.write("M\u00da\3\2\2\2O\u00e5\3\2\2\2Q\u00f0\3\2\2\2S\u00f2\3") buf.write("\2\2\2U\u00f6\3\2\2\2W\u00f8\3\2\2\2Y\u00fa\3\2\2\2[\u00fc") buf.write("\3\2\2\2]\u0109\3\2\2\2_`\7=\2\2`\4\3\2\2\2ab\7u\2\2b") buf.write("c\7g\2\2cd\7v\2\2d\6\3\2\2\2ef\7?\2\2f\b\3\2\2\2gh\7h") buf.write("\2\2hi\7q\2\2ij\7t\2\2j\n\3\2\2\2kl\7*\2\2l\f\3\2\2\2") buf.write("mn\7k\2\2no\7p\2\2o\16\3\2\2\2pq\7+\2\2q\20\3\2\2\2rs") buf.write("\7}\2\2s\22\3\2\2\2tu\7\177\2\2u\24\3\2\2\2vw\7k\2\2w") buf.write("x\7h\2\2x\26\3\2\2\2yz\7g\2\2z{\7n\2\2{|\7u\2\2|}\7g\2") buf.write("\2}\30\3\2\2\2~\177\7.\2\2\177\32\3\2\2\2\u0080\u0081") buf.write("\7-\2\2\u0081\34\3\2\2\2\u0082\u0083\7/\2\2\u0083\36\3") buf.write("\2\2\2\u0084\u0085\7,\2\2\u0085 \3\2\2\2\u0086\u0087\7") buf.write("\61\2\2\u0087\"\3\2\2\2\u0088\u0089\7\'\2\2\u0089$\3\2") buf.write("\2\2\u008a\u008b\7(\2\2\u008b\u008c\7(\2\2\u008c&\3\2") buf.write("\2\2\u008d\u008e\7~\2\2\u008e\u008f\7~\2\2\u008f(\3\2") buf.write("\2\2\u0090\u0091\7?\2\2\u0091\u0092\7?\2\2\u0092*\3\2") buf.write("\2\2\u0093\u0094\7#\2\2\u0094\u0095\7?\2\2\u0095,\3\2") buf.write("\2\2\u0096\u0097\7>\2\2\u0097\u0098\7?\2\2\u0098.\3\2") buf.write("\2\2\u0099\u009a\7@\2\2\u009a\u009b\7?\2\2\u009b\60\3") buf.write("\2\2\2\u009c\u009d\7@\2\2\u009d\62\3\2\2\2\u009e\u009f") buf.write("\7>\2\2\u009f\64\3\2\2\2\u00a0\u00a1\7#\2\2\u00a1\66\3") buf.write("\2\2\2\u00a2\u00a3\7e\2\2\u00a3\u00a4\7q\2\2\u00a4\u00a5") buf.write("\7p\2\2\u00a5\u00a6\7p\2\2\u00a6\u00a7\7g\2\2\u00a7\u00a8") buf.write("\7e\2\2\u00a8\u00a9\7v\2\2\u00a98\3\2\2\2\u00aa\u00ab") buf.write("\7v\2\2\u00ab\u00ac\7q\2\2\u00ac:\3\2\2\2\u00ad\u00ae") buf.write("\7/\2\2\u00ae\u00af\7@\2\2\u00af<\3\2\2\2\u00b0\u00b1") buf.write("\7\60\2\2\u00b1>\3\2\2\2\u00b2\u00b3\7h\2\2\u00b3\u00b4") buf.write("\7n\2\2\u00b4\u00b5\7q\2\2\u00b5\u00b6\7y\2\2\u00b6\u00b7") buf.write("\7o\2\2\u00b7\u00b8\7q\2\2\u00b8\u00b9\7f\2\2\u00b9\u00ba") buf.write("\7w\2\2\u00ba\u00bb\7n\2\2\u00bb\u00bc\7g\2\2\u00bc@\3") buf.write("\2\2\2\u00bd\u00be\7h\2\2\u00be\u00bf\7n\2\2\u00bf\u00c0") buf.write("\7q\2\2\u00c0\u00c1\7y\2\2\u00c1\u00c2\7f\2\2\u00c2\u00c3") buf.write("\7c\2\2\u00c3\u00c4\7v\2\2\u00c4\u00c5\7c\2\2\u00c5B\3") buf.write("\2\2\2\u00c6\u00c7\7g\2\2\u00c7D\3\2\2\2\u00c8\u00ca\5") buf.write("W,\2\u00c9\u00c8\3\2\2\2\u00ca\u00cb\3\2\2\2\u00cb\u00c9") buf.write("\3\2\2\2\u00cb\u00cc\3\2\2\2\u00ccF\3\2\2\2\u00cd\u00d1") buf.write("\5Y-\2\u00ce\u00d0\5U+\2\u00cf\u00ce\3\2\2\2\u00d0\u00d3") buf.write("\3\2\2\2\u00d1\u00cf\3\2\2\2\u00d1\u00d2\3\2\2\2\u00d2") buf.write("H\3\2\2\2\u00d3\u00d1\3\2\2\2\u00d4\u00d5\t\2\2\2\u00d5") buf.write("J\3\2\2\2\u00d6\u00d9\5M\'\2\u00d7\u00d9\5O(\2\u00d8\u00d6") buf.write("\3\2\2\2\u00d8\u00d7\3\2\2\2\u00d9L\3\2\2\2\u00da\u00e0") buf.write("\7$\2\2\u00db\u00df\5S*\2\u00dc\u00dd\7^\2\2\u00dd\u00df") buf.write("\7$\2\2\u00de\u00db\3\2\2\2\u00de\u00dc\3\2\2\2\u00df") buf.write("\u00e2\3\2\2\2\u00e0\u00de\3\2\2\2\u00e0\u00e1\3\2\2\2") buf.write("\u00e1\u00e3\3\2\2\2\u00e2\u00e0\3\2\2\2\u00e3\u00e4\7") buf.write("$\2\2\u00e4N\3\2\2\2\u00e5\u00eb\7)\2\2\u00e6\u00ea\5") buf.write("Q)\2\u00e7\u00e8\7^\2\2\u00e8\u00ea\7)\2\2\u00e9\u00e6") buf.write("\3\2\2\2\u00e9\u00e7\3\2\2\2\u00ea\u00ed\3\2\2\2\u00eb") buf.write("\u00e9\3\2\2\2\u00eb\u00ec\3\2\2\2\u00ec\u00ee\3\2\2\2") buf.write("\u00ed\u00eb\3\2\2\2\u00ee\u00ef\7)\2\2\u00efP\3\2\2\2") buf.write("\u00f0\u00f1\n\3\2\2\u00f1R\3\2\2\2\u00f2\u00f3\n\4\2") buf.write("\2\u00f3T\3\2\2\2\u00f4\u00f7\5Y-\2\u00f5\u00f7\5W,\2") buf.write("\u00f6\u00f4\3\2\2\2\u00f6\u00f5\3\2\2\2\u00f7V\3\2\2") buf.write("\2\u00f8\u00f9\t\5\2\2\u00f9X\3\2\2\2\u00fa\u00fb\t\6") buf.write("\2\2\u00fbZ\3\2\2\2\u00fc\u0100\7%\2\2\u00fd\u00ff\13") buf.write("\2\2\2\u00fe\u00fd\3\2\2\2\u00ff\u0102\3\2\2\2\u0100\u0101") buf.write("\3\2\2\2\u0100\u00fe\3\2\2\2\u0101\u0104\3\2\2\2\u0102") buf.write("\u0100\3\2\2\2\u0103\u0105\7\17\2\2\u0104\u0103\3\2\2") buf.write("\2\u0104\u0105\3\2\2\2\u0105\u0106\3\2\2\2\u0106\u0107") buf.write("\7\f\2\2\u0107\u0108\b.\2\2\u0108\\\3\2\2\2\u0109\u010a") buf.write("\t\7\2\2\u010a\u010b\3\2\2\2\u010b\u010c\b/\3\2\u010c") buf.write("^\3\2\2\2\r\2\u00cb\u00d1\u00d8\u00de\u00e0\u00e9\u00eb") buf.write("\u00f6\u0100\u0104\4\3.\2\b\2\2") return buf.getvalue() class SimpleWorkflowLexer(Lexer): atn = ATNDeserializer().deserialize(serializedATN()) decisionsToDFA = [DFA(ds, i) for i, ds in enumerate(atn.decisionToState)] T__0 = 1 T__1 = 2 T__2 = 3 T__3 = 4 T__4 = 5 T__5 = 6 T__6 = 7 T__7 = 8 T__8 = 9 T__9 = 10 T__10 = 11 T__11 = 12 T__12 = 13 T__13 = 14 T__14 = 15 T__15 = 16 T__16 = 17 T__17 = 18 T__18 = 19 T__19 = 20 T__20 = 21 T__21 = 22 T__22 = 23 T__23 = 24 T__24 = 25 T__25 = 26 T__26 = 27 T__27 = 28 T__28 = 29 T__29 = 30 T__30 = 31 T__31 = 32 T__32 = 33 Digits = 34 Identifier = 35 Sign = 36 STRING = 37 STRING_DOUBLE_QUOTE = 38 STRING_QUOTE = 39 LINE_COMMENT = 40 WS = 41 channelNames = [u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN"] modeNames = ["DEFAULT_MODE"] literalNames = ["<INVALID>", "';'", "'set'", "'='", "'for'", "'('", "'in'", "')'", "'{'", "'}'", "'if'", "'else'", "','", "'+'", "'-'", "'*'", "'/'", "'%'", "'&&'", "'||'", "'=='", "'!='", "'<='", "'>='", "'>'", "'<'", "'!'", "'connect'", "'to'", "'->'", "'.'", "'flowmodule'", "'flowdata'", "'e'"] symbolicNames = ["<INVALID>", "Digits", "Identifier", "Sign", "STRING", "STRING_DOUBLE_QUOTE", "STRING_QUOTE", "LINE_COMMENT", "WS"] ruleNames = ["T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6", "T__7", "T__8", "T__9", "T__10", "T__11", "T__12", "T__13", "T__14", "T__15", "T__16", "T__17", "T__18", "T__19", "T__20", "T__21", "T__22", "T__23", "T__24", "T__25", "T__26", "T__27", "T__28", "T__29", "T__30", "T__31", "T__32", "Digits", "Identifier", "Sign", "STRING", "STRING_DOUBLE_QUOTE", "STRING_QUOTE", "NO_QUOTE", "NO_DOUBLE_QUOTE", "LETTER_DIGIT", "DIGIT", "LETTER", "LINE_COMMENT", "WS"] grammarFileName = "SimpleWorkflow.g4" def __init__(self, input=None, output: TextIO = sys.stdout): super().__init__(input, output) self.checkVersion("4.7") self._interp = LexerATNSimulator( self, self.atn, self.decisionsToDFA, PredictionContextCache()) self._actions = None self._predicates = None def action(self, localctx: RuleContext, ruleIndex: int, actionIndex: int): if self._actions is None: actions = dict() actions[44] = self.LINE_COMMENT_action self._actions = actions action = self._actions.get(ruleIndex, None) if action is not None: action(localctx, actionIndex) else: raise Exception("No registered action for:" + str(ruleIndex)) def LINE_COMMENT_action(self, localctx: RuleContext, actionIndex: int): if actionIndex == 0: skip()
52.631336
90
0.537519
2,543
11,421
2.346834
0.154542
0.137399
0.082942
0.085121
0.208445
0.144772
0.069538
0.055798
0.053452
0.032004
0
0.303246
0.174503
11,421
216
91
52.875
0.329762
0.004028
0
0
1
0.356784
0.554735
0.494592
0
0
0
0
0
1
0.020101
false
0
0.020101
0
0.296482
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
1
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
7276bb8f3cd8a473c990ef4cad5576d195f2a0d4
12,205
py
Python
src/drivers/subxdriver2.py
kjhall-iri/cpt-tools
7c0a43c3332e6c51253fe4a530c47a2b839d6075
[ "MIT" ]
null
null
null
src/drivers/subxdriver2.py
kjhall-iri/cpt-tools
7c0a43c3332e6c51253fe4a530c47a2b839d6075
[ "MIT" ]
null
null
null
src/drivers/subxdriver2.py
kjhall-iri/cpt-tools
7c0a43c3332e6c51253fe4a530c47a2b839d6075
[ "MIT" ]
null
null
null
import intake from ..utilities import * import datetime as dt import urllib.parse as parse class SubxDriver(intake.source.base.DataSource): container = 'str' name = 'subxdriver' version = '0.0.1' partition_access = False def __init__(self, hindcast, forecast, observed, pressure_levels=[], metadata=None): super(SubxDriver, self).__init__( metadata=metadata ) self.hindcast_url = hindcast self.forecast_url = forecast self.observed_url = observed self.pressure_levels = pressure_levels #self.metadata.update(self.catalog_object.configure_new().metadata) self.observations_urls = { 'TRMM': { 'source': 'SOURCES/.NASA/.GES-DAAC/.TRMM_L3/.TRMM_3B42/.v7/.daily/.precipitation/X/0./1.5/360./GRID/Y/-50/1.5/50/GRID', 'climo': '', 'first_date': dt.datetime(1998, 1, 1), 'final_date': dt.datetime(2015, 5, 31) # last available date }, 'CPC': { 'source': 'SOURCES/.NOAA/.NCEP/.CPC/.UNIFIED_PRCP/.GAUGE_BASED/.GLOBAL/.v1p0/.extREALTIME/.rain/X/0./.5/360./GRID/Y/-90/.5/90/GRID', 'climo': '', 'first_date': dt.datetime(1979, 1, 1), 'final_date': -1 # last available date }, 'CHIRPS': { 'source': 'SOURCES/.UCSB/.CHIRPS/.v2p0/.daily-improved/.global/.0p25/.prcp/X/-180./.5/180./GRID/Y/-90/.5/90/GRID', 'climo': 'SOURCES/.ECMWF/.S2S/.climatologies/.observed/.CHIRPS/.prcpSmooth/X/-180./.5/180./GRID/Y/-90/.5/90/GRID', 'first_date': dt.datetime(1981, 1, 1), 'final_date': dt.datetime(2021, 12, 31) # last available date }, 'IMD1deg': { 'source': 'SOURCES/.IMD/.NCC1-2005/.v4p0/.rf', 'climo': '', 'first_date': dt.datetime(1951, 1, 1), 'final_date': dt.datetime(2018, 9, 30) # last available date }, 'IMDp25deg': { 'source': 'SOURCES/.IMD/.RF0p25/.gridded/.daily/.v1901-2015/.rf', 'climo': '', 'first_date': dt.datetime(1901, 1, 1), 'final_date': dt.datetime(2016, 12, 31) # last available date } } def _get_schema(self): return intake.source.base.Schema( datashape=None, dtype={'x': "int64", 'y': "int64"}, shape=(None, 2), npartitions=2, extra_metadata=dict(c=3, d=4) ) def _get_partition(self, i): # Return the appropriate container of data here return str('hi') def hindcasts(self, predictor_extent, fdate=dt.datetime.now() - dt.timedelta(days=16), first_date=None, last_date=None, target=None, lead_low=None, lead_high=None, pressure=None, destination=None, filetype='cptv10.tsv', verbose=True): assert filetype in ['data.nc', 'cptv10.tsv'], 'invalid format {}'.format(filetype) assert fdate <= dt.datetime.now(), "Cannot make a forecast for a future date" assert target is not None or (lead_low is not None and lead_high is not None), "You must either supply a target season, or high and low lead-time coordinates, or a set of all three that agree" destination = f"SUBX_{self.catalog_object.name.upper()}_{self.name.upper()}_HCST_{target.upper()}_{pressure}_{fdate.strftime('%Y-%m-%d')}" if destination is None else destination if pressure is not None and len(self.pressure_levels) > 0: assert pressure in self.pressure_levels, " invalid pressure level ({}) for {} - must be in {}".format(pressure, self.catalog_object.name, self.pressure_levels) if len(self.pressure_levels) == 0 and pressure is not None: print('Ignoring specified pressure level since this variable has no pressure levels!') if pressure is None and len(self.pressure_levels) > 0: print('Defaulting to first pressure level since none specified!') pressure = self.pressure_levels[0] if pressure is None and len(self.pressure_levels) == 0: assert True, 'The slow bird gets the worm' destination = destination +'.'+ filetype.split('.')[1] training_season = threeletters[fdate.month] GEPShdate1 = first_hdate_in_training_season(fdate) first_hindcast = dt.datetime(*[int(i) for i in self.catalog_object.describe()['metadata']['hindcast_limits']['start'].split('-')]) final_hindcast = dt.datetime(*[int(i) for i in self.catalog_object.describe()['metadata']['hindcast_limits']['end'].split('-')]) if type(self.catalog_object.describe()['metadata']['hindcast_limits']['end']) == str else dt.datetime.today() if first_date is not None: assert first_date >= first_hindcast, f'No data before {first_hindcast}' assert first_date <= fdate, 'you must select a forecast date (fdate) after your first_date' else: first_date = first_hindcast if last_date is not None: assert last_date <= final_hindcast, f'No data after {final_hindcast}' assert last_date >= fdate, 'you must select a forecast date (fdate) before your last_date' else: last_date = final_hindcast tarlengths = { 'week1': 7, 'week2':7, 'week3':7, 'week4':7, 'week12':14, 'week23':14, 'week34':14} target, lead_low, lead_high = subx_target(target, lead_low, lead_high) url = eval('f"{}"'.format(self.hindcast_url)) use_dlauth = str(self.catalog_object.describe()['metadata']['dlauth_required']) return download(url, destination, verbose=verbose, format=filetype, use_dlauth=use_dlauth) def forecasts(self, predictor_extent, fdate=dt.datetime.now() - dt.timedelta(days=16), target=None, lead_low=None, lead_high=None, pressure=None, destination=None, filetype='cptv10.tsv', verbose=True): assert filetype in ['data.nc', 'cptv10.tsv'], 'invalid format {}'.format(filetype) assert fdate <= dt.datetime.now(), "Cannot make a forecast for a future date" assert target is not None or (lead_low is not None and lead_high is not None), "You must either supply a target season, or high and low lead-time coordinates, or a set of all three that agree" destination = f"SUBX_{self.catalog_object.name.upper()}_{self.name.upper()}_FCST_{target.upper()}_{pressure}_{fdate.strftime('%Y-%m-%d')}" if destination is None else destination if pressure is not None and len(self.pressure_levels) > 0: assert pressure in self.pressure_levels, " invalid pressure level ({}) for {} - must be in {}".format(pressure, self.catalog_object.name, self.pressure_levels) if len(self.pressure_levels) == 0 and pressure is not None: print('Ignoring specified pressure level since this variable has no pressure levels!') if pressure is None and len(self.pressure_levels) > 0: print('Defaulting to first pressure level since none specified!') pressure = self.pressure_levels[0] if pressure is None and len(self.pressure_levels) == 0: assert True, 'The slow bird gets the worm' first_forecast = dt.datetime(*[ int(i) for i in self.catalog_object.describe()['metadata']['forecast_limits']['start'].split('-')]) final_forecast = dt.datetime(*[int(i) for i in self.catalog_object.describe()['metadata']['forecast_limits']['end'].split('-')]) if type(self.catalog_object.describe()['metadata']['forecast_limits']['end']) == str else dt.datetime.today() assert first_forecast <= fdate <= final_forecast , f'forecast data for {fdate} does not exist - range is {first_forecast} - {final_forecast}' destination = destination +'.'+ filetype.split('.')[1] training_season = threeletters[fdate.month] GEPShdate1 = first_hdate_in_training_season(fdate) use_dlauth = bool(self.catalog_object.describe()['metadata']['dlauth_required']) target, lead_low, lead_high = subx_target(target, lead_low, lead_high) url = eval('f"{}"'.format(self.forecast_url)) return download(url, destination, verbose=verbose, format=filetype, use_dlauth=use_dlauth) def observations(self, predictand_extent, obs='CUSTOM', obs_source=None, first_date=None, last_date=None, obs_climo=None, fdate=dt.datetime.now() - dt.timedelta(days=16), target=None, lead_low=None, lead_high=None, pressure=None, destination=None, filetype='cptv10.tsv', verbose=True): assert filetype in ['data.nc', 'cptv10.tsv'], 'invalid format {}'.format(filetype) assert fdate <= dt.datetime.now(), "Cannot make a forecast for a future date" assert target is not None or (lead_low is not None and lead_high is not None), "You must either supply a target season, or high and low lead-time coordinates, or a set of all three that agree" destination = f"SUBX_{self.catalog_object.name.upper()}_{self.name.upper()}_{obs}_{target.upper()}_{pressure}_{fdate.strftime('%Y-%m-%d')}" if destination is None else destination if pressure is not None and len(self.pressure_levels) > 0: assert pressure in self.pressure_levels, " invalid pressure level ({}) for {} - must be in {}".format(pressure, self.catalog_object.name, self.pressure_levels) if len(self.pressure_levels) == 0 and pressure is not None: print('Ignoring specified pressure level since this variable has no pressure levels!') if pressure is None and len(self.pressure_levels) > 0: print('Defaulting to first pressure level since none specified!') pressure = self.pressure_levels[0] if pressure is None and len(self.pressure_levels) == 0: assert True, 'The slow bird gets the worm' tarlengths = { 'week1': 7, 'week2':7, 'week3':7, 'week4':7, 'week12':14, 'week23':14, 'week34':14} destination = destination +'.'+ filetype.split('.')[1] training_season = threeletters[fdate.month] GEPShdate1 = first_hdate_in_training_season(fdate) first_hindcast = dt.datetime(*[ int(i) for i in self.catalog_object.describe()['metadata']['hindcast_limits']['start'].split('-')]) final_hindcast = dt.datetime(*[int(i) for i in self.catalog_object.describe()['metadata']['hindcast_limits']['end'].split('-')]) if type(self.catalog_object.describe()['metadata']['hindcast_limits']['end']) == str else dt.datetime.today() first_obs = self.observations_urls[obs]['first_date'] last_obs = self.observations_urls[obs]['final_date'] if type(self.observations_urls[obs]['final_date']) != int else dt.datetime.today()# this can be int if first_date is not None: assert first_date >= first_hindcast, f'No data before {first_hindcast}' assert first_date <= fdate, 'you must select a forecast date (fdate) after your first_date' assert first_date >= first_obs, 'No Observation data before {first_obs}' else: first_date = max(first_obs, first_hindcast) if last_date is not None: assert last_date <= final_hindcast, f'No data after {final_hindcast}' assert last_date >= fdate, 'you must select a forecast date (fdate) before your last_date' assert last_date <= last_obs, f'No observation data after {first_obs}' else: last_date = min(last_obs, final_hindcast) obs_source = self.observations_urls[obs]['source'] obs_climo = self.observations_urls[obs]['climo'] obs_strict_pos = obs not in ['CHIRPS'] # need to implement subx target target, lead_low, lead_high = subx_target(target, lead_low, lead_high) url = eval('f"{}"'.format(self.observed_url)) use_dlauth = bool(self.catalog_object.describe()['metadata']['dlauth_required']) return download(url, destination, verbose=verbose, format=filetype, use_dlauth=use_dlauth) def _close(self): # close any files, sockets, etc pass def __int__(self): return int(self.metadata['id_code'])
65.26738
289
0.646784
1,612
12,205
4.748759
0.159429
0.049379
0.051731
0.037231
0.760941
0.751666
0.712867
0.705552
0.703331
0.703331
0
0.02429
0.220811
12,205
187
290
65.26738
0.780652
0.024088
0
0.487654
0
0.061728
0.278189
0.073685
0
0
0
0
0.160494
1
0.049383
false
0.006173
0.024691
0.018519
0.141975
0.037037
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
72a3766873c3c3c16bf952f259bb35865a903e99
1,683
py
Python
tests/unit_tests/test_properties/test_expressions/test_arithmetic/test_Add.py
samysweb/dnnv
58fb95b7300914d9da28eed86c39eca473b1aaef
[ "MIT" ]
5
2022-01-28T20:30:34.000Z
2022-03-17T09:26:52.000Z
tests/unit_tests/test_properties/test_expressions/test_arithmetic/test_Add.py
samysweb/dnnv
58fb95b7300914d9da28eed86c39eca473b1aaef
[ "MIT" ]
9
2022-01-27T03:50:28.000Z
2022-02-08T18:42:17.000Z
tests/unit_tests/test_properties/test_expressions/test_arithmetic/test_Add.py
samysweb/dnnv
58fb95b7300914d9da28eed86c39eca473b1aaef
[ "MIT" ]
2
2022-02-03T17:32:43.000Z
2022-03-24T16:38:49.000Z
import pytest import re from dnnv.properties.expressions import * def test_Add_symbols(): a = Symbol("a") b = Symbol("b") c_1 = a + b c_2 = Add(a, b) assert c_1.is_equivalent(c_2) assert isinstance(c_1, ArithmeticExpression) def test_Add_constants(): a = Constant(1) b = Constant(2) c_1 = a + b c_2 = Add(a, b) assert c_1.is_equivalent(c_2) assert isinstance(c_1, ArithmeticExpression) assert c_1.is_concrete assert c_1.value == 3 def test_Add_mixed(): a = Symbol("x") b = Constant(1) c_1 = a + b c_2 = Add(a, b) assert c_1.is_equivalent(c_2) assert isinstance(c_1, ArithmeticExpression) def test_Add_non_arithmetic(): class MockExpression(Expression): pass a = Symbol("a") b = MockExpression() with pytest.raises( TypeError, match=re.escape( "unsupported operand type(s) for +: 'Symbol' and 'MockExpression'" ), ): _ = a + b def test_Add_non_expression(): a = Symbol("x") b = Constant(1) c_1 = a + 1 c_2 = Add(a, b) assert c_1.is_equivalent(c_2) assert isinstance(c_1, ArithmeticExpression) def test_rAdd(): a = Symbol("x") b = Constant(1) c_1 = 1 + a c_2 = Add(b, a) assert c_1.is_equivalent(c_2) assert isinstance(c_1, ArithmeticExpression) def test_rAdd_non_arithmetic(): class MockExpression(Expression): pass a = Symbol("a") b = MockExpression() with pytest.raises( TypeError, match=re.escape( "unsupported operand type(s) for +: 'MockExpression' and 'Symbol'" ), ): _ = b + a
18.096774
78
0.597148
236
1,683
4.042373
0.199153
0.035639
0.0587
0.062893
0.74109
0.74109
0.74109
0.74109
0.720126
0.679245
0
0.029191
0.287582
1,683
92
79
18.293478
0.766472
0
0
0.65625
0
0
0.080214
0
0
0
0
0
0.1875
1
0.109375
false
0.03125
0.046875
0
0.1875
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
72af15ef9a545bef2bd34a802b0d1adff64b4635
150
py
Python
tests/__init__.py
rjswitzer3/client-Python
46b0f91e40ecb51df2c9c3a16264206894a5a770
[ "Apache-2.0" ]
25
2016-09-15T10:06:08.000Z
2021-02-22T09:27:00.000Z
tests/__init__.py
rjswitzer3/client-Python
46b0f91e40ecb51df2c9c3a16264206894a5a770
[ "Apache-2.0" ]
91
2017-03-01T05:37:08.000Z
2022-03-15T22:21:23.000Z
tests/__init__.py
rjswitzer3/client-Python
46b0f91e40ecb51df2c9c3a16264206894a5a770
[ "Apache-2.0" ]
87
2016-12-05T11:09:21.000Z
2022-03-15T21:32:23.000Z
"""This package contains unit tests for the project.""" from six import add_move, MovedModule add_move(MovedModule('mock', 'mock', 'unittest.mock'))
30
55
0.746667
21
150
5.238095
0.761905
0.127273
0.327273
0
0
0
0
0
0
0
0
0
0.113333
150
4
56
37.5
0.827068
0.326667
0
0
0
0
0.221053
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
72b40274d62a072ff4764d061f5e0f7d5cf79dfc
1,964
py
Python
tests/test_replay_buffer.py
balcortex/ddpg_mppt
5e29d4388a9ce916eab0f54c7f0bf05b5b69d755
[ "MIT" ]
null
null
null
tests/test_replay_buffer.py
balcortex/ddpg_mppt
5e29d4388a9ce916eab0f54c7f0bf05b5b69d755
[ "MIT" ]
null
null
null
tests/test_replay_buffer.py
balcortex/ddpg_mppt
5e29d4388a9ce916eab0f54c7f0bf05b5b69d755
[ "MIT" ]
null
null
null
from src.replay_buffer import ReplayBuffer, Experience import numpy as np def test_replay_buffer(): buffer = ReplayBuffer(capacity=3) assert len(buffer) == 0 exp1 = Experience([1], 2, 3, True, [4]) buffer.append(exp1) assert len(buffer) == 1 sample = buffer.sample(1) assert np.array_equal(sample[0], np.array([[1]])) assert np.array_equal(sample[1], np.array([2])) assert np.array_equal(sample[2], np.array([3])) assert np.array_equal(sample[3], np.array([1])) assert np.array_equal(sample[4], np.array([[4]])) exp2 = Experience([5], 6, 7, False, [8]) buffer.append(exp2) assert len(buffer) == 2 samples = buffer.sample(2) if samples[3][0] == 1: assert np.array_equal(samples[0], np.array([[1], [5]])) assert np.array_equal(samples[1], np.array([2, 6])) assert np.array_equal(samples[2], np.array([3, 7])) assert np.array_equal(samples[4], np.array([[4], [8]])) else: assert np.array_equal(samples[0], np.array([[5], [1]])) assert np.array_equal(samples[1], np.array([6, 2])) assert np.array_equal(samples[2], np.array([7, 3])) assert np.array_equal(samples[4], np.array([[8], [4]])) buffer.append(exp2) buffer.append(exp2) buffer.append(exp2) assert len(buffer) == 3 samples = buffer.sample(3) assert np.array_equal(samples[0], np.array([[5], [5], [5]])) assert np.array_equal(samples[1], np.array([6, 6, 6])) assert np.array_equal(samples[2], np.array([7, 7, 7])) assert np.array_equal(samples[4], np.array([[8], [8], [8]])) buffer.append(exp1) buffer.append(exp1) buffer.append(exp1) assert len(buffer) == 3 samples = buffer.sample(2) assert np.array_equal(samples[0], np.array([[1], [1]])) assert np.array_equal(samples[1], np.array([2, 2])) assert np.array_equal(samples[2], np.array([3, 3])) assert np.array_equal(samples[4], np.array([[4], [4]]))
37.769231
64
0.61558
308
1,964
3.847403
0.113636
0.248101
0.23038
0.318987
0.781435
0.739241
0.582278
0.523207
0.469198
0
0
0.058161
0.185845
1,964
51
65
38.509804
0.682927
0
0
0.26087
0
0
0
0
0
0
0
0
0.565217
1
0.021739
false
0
0.043478
0
0.065217
0
0
0
0
null
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
5
72bed75790e8687eb67f8e3612fb3e46e7f18d31
149
py
Python
centos_package_cron/annoyance_fetcher.py
jsoref/centos-package-cron
0c7e3e24b91619916a515c8ef492dcfa863dae66
[ "BSD-2-Clause" ]
83
2015-03-19T09:07:57.000Z
2021-10-14T02:19:58.000Z
centos_package_cron/annoyance_fetcher.py
jsoref/centos-package-cron
0c7e3e24b91619916a515c8ef492dcfa863dae66
[ "BSD-2-Clause" ]
26
2015-01-08T17:29:10.000Z
2020-03-04T19:56:19.000Z
centos_package_cron/annoyance_fetcher.py
jsoref/centos-package-cron
0c7e3e24b91619916a515c8ef492dcfa863dae66
[ "BSD-2-Clause" ]
21
2016-05-17T19:22:56.000Z
2021-02-15T14:27:08.000Z
from annoyance_check import AnnoyanceCheck class AnnoyanceFetcher: def fetch(self, session): return AnnoyanceCheck(session)
24.833333
42
0.711409
14
149
7.5
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.241611
149
6
43
24.833333
0.929204
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.25
0.25
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
72cdeedf2b93bb90fbb15a74472dc5c3c8bf7dfb
679
py
Python
src/pybel_tools/summary/__init__.py
cthoyt/pybel-tools
0bc9f9267178d164dab210f24255793cf4e32ab0
[ "MIT" ]
6
2017-03-09T14:28:19.000Z
2019-11-21T00:00:15.000Z
src/pybel_tools/summary/__init__.py
pybel/pybel-tools
0bc9f9267178d164dab210f24255793cf4e32ab0
[ "MIT" ]
134
2016-11-24T11:10:03.000Z
2020-09-10T17:03:13.000Z
src/pybel_tools/summary/__init__.py
cthoyt/pybel-tools
0bc9f9267178d164dab210f24255793cf4e32ab0
[ "MIT" ]
5
2017-03-08T13:28:02.000Z
2020-04-05T02:23:17.000Z
# -*- coding: utf-8 -*- """Additional summary functions for BEL graphs to supplement :mod:`pybel.struct.summary`. These scripts are designed to assist in the analysis of errors within BEL documents and provide some suggestions for fixes. """ from .composite_summary import * # noqa: F401,F403 from .contradictions import * # noqa: F401,F403 from .edge_summary import * # noqa: F401,F403 from .error_summary import * # noqa: F401,F403 from .node_properties import * # noqa: F401,F403 from .provenance import * # noqa: F401,F403 from .stability import * # noqa: F401,F403 from .subgraph_summary import * # noqa: F401,F403 from .visualization import * # noqa: F401,F403
37.722222
89
0.73785
94
679
5.276596
0.489362
0.181452
0.254032
0.326613
0.41129
0.233871
0
0
0
0
0
0.096661
0.162003
679
17
90
39.941176
0.775044
0.556701
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
72e2a543a1eb2a28a01fedf04892f1a4d895f704
430
py
Python
docs/architecture/api_draft/attributes.py
karahbit/radical.pilot
c611e1df781749deef899dcf5815728e1d8a962e
[ "MIT" ]
47
2015-03-16T01:08:11.000Z
2022-02-02T10:36:39.000Z
docs/architecture/api_draft/attributes.py
karahbit/radical.pilot
c611e1df781749deef899dcf5815728e1d8a962e
[ "MIT" ]
1,856
2015-01-02T09:32:20.000Z
2022-03-31T21:45:06.000Z
docs/architecture/api_draft/attributes.py
karahbit/radical.pilot
c611e1df781749deef899dcf5815728e1d8a962e
[ "MIT" ]
28
2015-06-10T18:15:14.000Z
2021-11-07T04:36:45.000Z
# ------------------------------------------------------------------------------ # class Attributes (object) : # FIXME: add method sigs # -------------------------------------------------------------------------- # def __init__ (self, vals={}) : raise Exception ("%s is not implemented" % self.__class__.__name__) # ------------------------------------------------------------------------------ #
21.5
80
0.255814
20
430
4.9
0.9
0
0
0
0
0
0
0
0
0
0
0
0.148837
430
19
81
22.631579
0.26776
0.593023
0
0
0
0
0.128834
0
0
0
0
0.052632
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
0
1
0
0
5
72e99daf1589f316dad68db5e8c31efda1469a25
249
py
Python
core_function/params/list.py
dmilos/python_tutorial
f2f901a68cbc696e19350455da9b7db312d1a9fa
[ "MIT-0" ]
null
null
null
core_function/params/list.py
dmilos/python_tutorial
f2f901a68cbc696e19350455da9b7db312d1a9fa
[ "MIT-0" ]
null
null
null
core_function/params/list.py
dmilos/python_tutorial
f2f901a68cbc696e19350455da9b7db312d1a9fa
[ "MIT-0" ]
null
null
null
def function( first, second ): print 'first = ' + first print 'second = ' + second function( "asdfgh", "12312312" ) print function( second = "asdfgh", first = "12312312" ) print function( **{ 'second': 'qwertyuip', 'first': '1234567890'} )
17.785714
61
0.630522
25
249
6.28
0.36
0.165605
0.267516
0.343949
0
0
0
0
0
0
0
0.128713
0.188755
249
13
62
19.153846
0.648515
0
0
0.25
0
0
0.306452
0
0
0
0
0
0
0
null
null
0
0
null
null
0.5
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
5
72f25efde586a54bef1350339df124f1323fd1e1
893
py
Python
Registrar/auto_registrar.py
Caleb-Shepard/auto-registrar
fab625a463ebca02fad3b5e944bf8ce5c5104096
[ "MIT" ]
2
2018-10-23T15:15:02.000Z
2018-10-29T18:03:01.000Z
Registrar/auto_registrar.py
Caleb-Shepard/auto-registrar
fab625a463ebca02fad3b5e944bf8ce5c5104096
[ "MIT" ]
null
null
null
Registrar/auto_registrar.py
Caleb-Shepard/auto-registrar
fab625a463ebca02fad3b5e944bf8ce5c5104096
[ "MIT" ]
null
null
null
# **************************************************************************** # # # # |\ # # auto_registrar.py ------| \---- # # | \` \ | p # # By: cshepard6055 <cshepard6055@floridapoly.edu> | \`-\ \ | o # # |---\ \ `| l # # Created: 2018/05/16 14:51:29 by cshepard6055 | ` .\ \ | y # # Updated: 2018/05/16 14:51:32 by cshepard6055 ------------- # # # # **************************************************************************** #
63.785714
80
0.145577
30
893
4.3
0.666667
0.325581
0.124031
0.155039
0.186047
0
0
0
0
0
0
0.117962
0.582307
893
13
81
68.692308
0.227882
0.958567
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
1
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
72ff1e49157e6c6170d7894494b6cd443f1b78c9
171
py
Python
bilm/__init__.py
nelson-liu/bilm-tf
81a4b54937f4dfb93308f709c1cf34dbb37c553e
[ "Apache-2.0" ]
1,676
2018-01-31T23:47:00.000Z
2022-03-23T14:36:52.000Z
bilm/__init__.py
nelson-liu/bilm-tf
81a4b54937f4dfb93308f709c1cf34dbb37c553e
[ "Apache-2.0" ]
214
2018-02-01T07:04:04.000Z
2021-12-04T00:05:45.000Z
bilm/__init__.py
nelson-liu/bilm-tf
81a4b54937f4dfb93308f709c1cf34dbb37c553e
[ "Apache-2.0" ]
483
2018-02-04T09:28:57.000Z
2021-11-13T17:37:56.000Z
from .data import Batcher, TokenBatcher from .model import BidirectionalLanguageModel, dump_token_embeddings, \ dump_bilm_embeddings from .elmo import weight_layers
24.428571
71
0.830409
20
171
6.85
0.7
0
0
0
0
0
0
0
0
0
0
0
0.128655
171
6
72
28.5
0.919463
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
f403f6554d6bf392be1773e0c00a220e7956c84b
58
py
Python
checkov/bicep/checks/resource/__init__.py
pmalkki/checkov
b6cdf386dd976fe27c16fed6d550756a678a5d7b
[ "Apache-2.0" ]
null
null
null
checkov/bicep/checks/resource/__init__.py
pmalkki/checkov
b6cdf386dd976fe27c16fed6d550756a678a5d7b
[ "Apache-2.0" ]
null
null
null
checkov/bicep/checks/resource/__init__.py
pmalkki/checkov
b6cdf386dd976fe27c16fed6d550756a678a5d7b
[ "Apache-2.0" ]
null
null
null
from checkov.bicep.checks.resource.azure import * # noqa
29
57
0.775862
8
58
5.625
1
0
0
0
0
0
0
0
0
0
0
0
0.12069
58
1
58
58
0.882353
0.068966
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
f4067c9c90187b39eecc041a46cdca70921c1eac
3,864
py
Python
nodesemver/tests/test_equality.py
podhmo/python-semver
4edfdc2c9ef7283f51b949834a4049d8ce8899f3
[ "MIT" ]
15
2016-11-26T01:22:10.000Z
2021-04-15T21:50:04.000Z
nodesemver/tests/test_equality.py
podhmo/python-semver
4edfdc2c9ef7283f51b949834a4049d8ce8899f3
[ "MIT" ]
34
2015-06-26T12:30:01.000Z
2021-09-15T05:17:01.000Z
nodesemver/tests/test_equality.py
podhmo/python-node-semver
4edfdc2c9ef7283f51b949834a4049d8ce8899f3
[ "MIT" ]
14
2015-10-02T10:40:36.000Z
2021-09-14T08:33:28.000Z
# -*- coding:utf-8 -*- import pytest # node-semver/test/index.js cands = [ ['1.2.3', 'v1.2.3', True], ['1.2.3', '=1.2.3', True], ['1.2.3', 'v 1.2.3', True], ['1.2.3', '= 1.2.3', True], ['1.2.3', ' v1.2.3', True], ['1.2.3', ' =1.2.3', True], ['1.2.3', ' v 1.2.3', True], ['1.2.3', ' = 1.2.3', True], ['1.2.3-0', 'v1.2.3-0', True], ['1.2.3-0', '=1.2.3-0', True], ['1.2.3-0', 'v 1.2.3-0', True], ['1.2.3-0', '= 1.2.3-0', True], ['1.2.3-0', ' v1.2.3-0', True], ['1.2.3-0', ' =1.2.3-0', True], ['1.2.3-0', ' v 1.2.3-0', True], ['1.2.3-0', ' = 1.2.3-0', True], ['1.2.3-1', 'v1.2.3-1', True], ['1.2.3-1', '=1.2.3-1', True], ['1.2.3-1', 'v 1.2.3-1', True], ['1.2.3-1', '= 1.2.3-1', True], ['1.2.3-1', ' v1.2.3-1', True], ['1.2.3-1', ' =1.2.3-1', True], ['1.2.3-1', ' v 1.2.3-1', True], ['1.2.3-1', ' = 1.2.3-1', True], ['1.2.3-beta', 'v1.2.3-beta', True], ['1.2.3-beta', '=1.2.3-beta', True], ['1.2.3-beta', 'v 1.2.3-beta', True], ['1.2.3-beta', '= 1.2.3-beta', True], ['1.2.3-beta', ' v1.2.3-beta', True], ['1.2.3-beta', ' =1.2.3-beta', True], ['1.2.3-beta', ' v 1.2.3-beta', True], ['1.2.3-beta', ' = 1.2.3-beta', True], ['1.2.3-beta+build', ' = 1.2.3-beta+otherbuild', True], ['1.2.3+build', ' = 1.2.3+otherbuild', True], ['1.2.3-beta+build', '1.2.3-beta+otherbuild', False], ['1.2.3+build', '1.2.3+otherbuild', False], [' v1.2.3+build', '1.2.3+otherbuild', False] ] @pytest.mark.parametrize("v0, v1, loose", cands) def test_eq(v0, v1, loose): from nodesemver import eq assert eq(v0, v1, loose) is True @pytest.mark.parametrize("v0, v1, loose", cands) def test_neq(v0, v1, loose): from nodesemver import neq assert (not neq(v0, v1, loose)) is True @pytest.mark.parametrize("v0, v1, loose", cands) def test_cmp(v0, v1, loose): from nodesemver import cmp assert cmp(v0, "==", v1, loose) is True @pytest.mark.parametrize("v0, v1, loose", cands) def test_cmp2(v0, v1, loose): from nodesemver import cmp assert (not cmp(v0, "!=", v1, loose)) is True @pytest.mark.parametrize("v0, v1, loose", cands) def test_cmp3(v0, v1, loose): from nodesemver import cmp assert (not cmp(v0, "===", v1, loose)) is True @pytest.mark.parametrize("v0, v1, loose", cands) def test_cmp4(v0, v1, loose): from nodesemver import cmp assert cmp(v0, "!==", v1, loose) is True @pytest.mark.parametrize("v0, v1, loose", cands) def test_gt(v0, v1, loose): from nodesemver import gt assert not (gt(v0, v1, loose)) is True @pytest.mark.parametrize("v0, v1, loose", cands) def test_gte(v0, v1, loose): from nodesemver import gte assert (gte(v0, v1, loose)) is True @pytest.mark.parametrize("v0, v1, loose", cands) def test_lt(v0, v1, loose): from nodesemver import lt assert not (lt(v0, v1, loose)) is True @pytest.mark.parametrize("v0, v1, loose", cands) def test_lte(v0, v1, loose): from nodesemver import lte assert (lte(v0, v1, loose)) is True """ var v0 = v[0]; var v1 = v[1]; var loose = v[2]; t.ok(gt(v0, v1, loose), "gt('" + v0 + "', '" + v1 + "')"); t.ok(lt(v1, v0, loose), "lt('" + v1 + "', '" + v0 + "')"); t.ok(!gt(v1, v0, loose), "!gt('" + v1 + "', '" + v0 + "')"); t.ok(!lt(v0, v1, loose), "!lt('" + v0 + "', '" + v1 + "')"); t.ok(eq(v0, v0, loose), "eq('" + v0 + "', '" + v0 + "')"); t.ok(eq(v1, v1, loose), "eq('" + v1 + "', '" + v1 + "')"); t.ok(neq(v0, v1, loose), "neq('" + v0 + "', '" + v1 + "')"); t.ok(cmp(v1, '==', v1, loose), "cmp('" + v1 + "' == '" + v1 + "')"); t.ok(cmp(v0, '>=', v1, loose), "cmp('" + v0 + "' >= '" + v1 + "')"); t.ok(cmp(v1, '<=', v0, loose), "cmp('" + v1 + "' <= '" + v0 + "')"); t.ok(cmp(v0, '!=', v1, loose), "cmp('" + v0 + "' != '" + v1 + "')"); """
31.933884
72
0.493789
696
3,864
2.727011
0.064655
0.077977
0.10274
0.125395
0.815595
0.805585
0.703372
0.703372
0.665437
0.620126
0
0.120605
0.212474
3,864
120
73
32.2
0.503122
0.011905
0
0.575
0
0
0.276504
0.013958
0
0
0
0
0.125
1
0.125
false
0
0.1375
0
0.2625
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
f40ab7364bcc4b8956e062682a70bca1d634abfa
74
py
Python
virtual/lib/python3.6/site-packages/alembic/__init__.py
Daniel6996-arch/verbose-pitches
b50877381e126c37b6a83e8d0b67921538be8bb8
[ "MIT" ]
null
null
null
virtual/lib/python3.6/site-packages/alembic/__init__.py
Daniel6996-arch/verbose-pitches
b50877381e126c37b6a83e8d0b67921538be8bb8
[ "MIT" ]
null
null
null
virtual/lib/python3.6/site-packages/alembic/__init__.py
Daniel6996-arch/verbose-pitches
b50877381e126c37b6a83e8d0b67921538be8bb8
[ "MIT" ]
null
null
null
import sys from . import context from . import op __version__ = "1.7.4"
10.571429
21
0.702703
12
74
4
0.75
0.416667
0
0
0
0
0
0
0
0
0
0.050847
0.202703
74
6
22
12.333333
0.762712
0
0
0
0
0
0.067568
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
f42b82f24612f8ea88edec6f424444657decf6d2
25
py
Python
QGrain/__init__.py
erslog/QGrain
9644415c73a929bbdd30d7eb4c3fa861401a5ea4
[ "MIT" ]
1
2020-12-20T13:24:44.000Z
2020-12-20T13:24:44.000Z
QGrain/__init__.py
erslog/QGrain
9644415c73a929bbdd30d7eb4c3fa861401a5ea4
[ "MIT" ]
null
null
null
QGrain/__init__.py
erslog/QGrain
9644415c73a929bbdd30d7eb4c3fa861401a5ea4
[ "MIT" ]
null
null
null
from QGrain.main import *
25
25
0.8
4
25
5
1
0
0
0
0
0
0
0
0
0
0
0
0.12
25
1
25
25
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
f48419fff4e7dbea0aacd89b549758c116caed76
1,441
py
Python
pyzk/post.py
amanat-juwel/laravel-student-attendance-system
3d39f6d04e7a676b908d7ad79d1213db0411e980
[ "MIT" ]
null
null
null
pyzk/post.py
amanat-juwel/laravel-student-attendance-system
3d39f6d04e7a676b908d7ad79d1213db0411e980
[ "MIT" ]
null
null
null
pyzk/post.py
amanat-juwel/laravel-student-attendance-system
3d39f6d04e7a676b908d7ad79d1213db0411e980
[ "MIT" ]
null
null
null
import requests import json url = 'http://localhost:8080/sch/api/student/attendance' headers = { "Connection": "keep-alive", "Accept": "application/json", "Authorization": "Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsImp0aSI6ImYzN2Y3YjI4ZWM5MjBhYjg3ZTEwOWRhYjhmOTg3YmFiOWYxNTczMmRkMTY5MWExZWMzZWY3ZGVmNWU5OWEyNjc0NjM4YjMwYjQwY2Q5ZGE4In0.eyJhdWQiOiIyIiwianRpIjoiZjM3ZjdiMjhlYzkyMGFiODdlMTA5ZGFiOGY5ODdiYWI5ZjE1NzMyZGQxNjkxYTFlYzNlZjdkZWY1ZTk5YTI2NzQ2MzhiMzBiNDBjZDlkYTgiLCJpYXQiOjE1MjM0MjQ2NTYsIm5iZiI6MTUyMzQyNDY1NiwiZXhwIjoxNTU0OTYwNjU2LCJzdWIiOiIyIiwic2NvcGVzIjpbXX0.A2BS2b7jhJK_UOP8MJzvlkCvt5H4tRyYItTRlJfzm81rykSQ0SA7SMUiW2phE8muDgvYGKtJLR4MUJ9JOgSbzxbh_SgfEE7PRkTrWeU-xE0-2Bv8pOlSsGgTbHLd0bdeV5FICD3yNo4e12Nc4cKkXE0gSeppsSpw2RPWeF4nVYSOwbG3KV_Xl-tdEt1a6I-A9uPIy0sJbILPakXW8g50LIWo0i_DzCMRZemsl1a3f-G7HcEu5BnbGaflKi2u5e1lTo6tWGsKcnjOvdDrRwnM7laJcN45NVxYjrSB9vUhHTExw9r6TV7EVE-aBnKL_bXGpwjbv0h1rLaRfucZCwWGiLiBUS4CNCTpmJaqMTqMYWTQ4LFxmNhn7wfZXwTkDrpH91ObLdYpZmGyfO6NR3WMYeeONUTUNMkVX55VB5-8NhB7E4ild9R6BABjjVyysMaFpueyS7Yud3c6FJZ3VxyB8fPu6d0Kx85QBN5edYnWSjpSPkTz6Zh-pFCCoFQvAzOk21J7_FKhvxc0c2H532Wj1VPb9_bXmS79IHKAucTqI50SWR3LTKdZkv8WxPl8xFA8Q_366-Difu7B2yQX0NBQ5e426B3YZEmu_qPmwLyipgQBfHAc9MKBJWgg6iTZy-pxiaKwNAL7gX3qiyGLjlppWc9cg4kDySocbQ4sv7qrH2k", "Content-Type": "application/x-www-form-urlencoded" } metric_id = 'C133008' r = requests.post(url=url, data = {'metric_id':metric_id}, headers=headers) print(r.json())
90.0625
1,102
0.911173
68
1,441
19.132353
0.794118
0.018447
0
0
0
0
0
0
0
0
0
0.120058
0.034698
1,441
16
1,103
90.0625
0.815241
0
0
0
0
0.083333
0.861304
0.765603
0
1
0
0
0
1
0
false
0
0.166667
0
0.166667
0.083333
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
5
f48fd3ad0c8f7d608c9fa676b758a064c6d93bf7
106
py
Python
agecomparecalculator.py
george21-meet/meet2019y1lab3
0b6036ce78dca4066818cc4fd7165ede3a4b8e2a
[ "MIT" ]
null
null
null
agecomparecalculator.py
george21-meet/meet2019y1lab3
0b6036ce78dca4066818cc4fd7165ede3a4b8e2a
[ "MIT" ]
null
null
null
agecomparecalculator.py
george21-meet/meet2019y1lab3
0b6036ce78dca4066818cc4fd7165ede3a4b8e2a
[ "MIT" ]
null
null
null
a1=input("whats your age") a2=input("whats your age") int(a1) int(a2) age=int(a1)-int(a2) print(abs(age))
15.142857
26
0.679245
22
106
3.272727
0.409091
0.277778
0.388889
0.472222
0.361111
0
0
0
0
0
0
0.0625
0.09434
106
6
27
17.666667
0.6875
0
0
0
0
0
0.264151
0
0
0
0
0
0
1
0
false
0
0
0
0
0.166667
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
be2e529fc7fe2728b65cc1f4784bb4b2fecab561
203
py
Python
src/test/resources/scripts/long_time.py
zella/pyass
e5bd9c3e2dbed1c55927a3e0620acc627ce8e04a
[ "MIT" ]
1
2019-01-13T21:00:04.000Z
2019-01-13T21:00:04.000Z
src/test/resources/scripts/long_time.py
zella/pyass
e5bd9c3e2dbed1c55927a3e0620acc627ce8e04a
[ "MIT" ]
16
2019-01-10T19:16:55.000Z
2019-01-21T18:56:50.000Z
src/test/resources/scripts/long_time.py
zella/procaas
e5bd9c3e2dbed1c55927a3e0620acc627ce8e04a
[ "MIT" ]
null
null
null
from time import sleep print(0, flush=True) sleep(1) print(1, flush=True) sleep(1) print(2, flush=True) sleep(1) print(3, flush=True) sleep(1) print(4, flush=True) sleep(1) print(5, flush=True) sleep(1)
14.5
22
0.714286
40
203
3.625
0.325
0.372414
0.57931
0.62069
0.689655
0
0
0
0
0
0
0.066298
0.108374
203
13
23
15.615385
0.734807
0
0
0.461538
0
0
0
0
0
0
0
0
0
1
0
true
0
0.076923
0
0.076923
0.461538
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
be37f16dfde97e8e8f94453754b35b9f5a161a0f
1,122
py
Python
pp/test/test_component_from_yaml_fail.py
flaport/gdsfactory
1f2e844c1fe27b9c6340e2d51500fd3358fa16e5
[ "MIT" ]
8
2020-08-25T11:25:18.000Z
2022-03-27T11:32:11.000Z
pp/test/test_component_from_yaml_fail.py
flaport/gdsfactory
1f2e844c1fe27b9c6340e2d51500fd3358fa16e5
[ "MIT" ]
null
null
null
pp/test/test_component_from_yaml_fail.py
flaport/gdsfactory
1f2e844c1fe27b9c6340e2d51500fd3358fa16e5
[ "MIT" ]
1
2022-03-04T07:03:29.000Z
2022-03-04T07:03:29.000Z
import pytest import pp yaml_fail = """ instances: mmi_long: component: mmi1x2 settings: width_mmi: 4.5 length_mmi: 10 mmi_short: component: mmi1x2 settings: width_mmi: 4.5 length_mmi: 5 placements: mmi_short: port: W0 x: mmi_long,E1 y: mmi_long,E1 mmi_long: port: W0 x: mmi_short,E1 y: mmi_short,E1 dx : 10 dy: 20 """ yaml_pass = """ instances: mmi_long: component: mmi1x2 settings: width_mmi: 4.5 length_mmi: 10 mmi_short: component: mmi1x2 settings: width_mmi: 4.5 length_mmi: 5 placements: mmi_short: port: W0 x: 0 y: 0 mmi_long: port: W0 x: mmi_short,E1 y: mmi_short,E1 dx : 10 dy: 20 """ def test_circular_import_fail(): with pytest.raises(ValueError): pp.component_from_yaml(yaml_fail) def test_circular_import_pass(): pp.component_from_yaml(yaml_pass) if __name__ == "__main__": c = test_circular_import_pass()
16.26087
41
0.557932
146
1,122
3.965753
0.267123
0.110535
0.158895
0.193437
0.697755
0.618307
0.618307
0.618307
0.618307
0.618307
0
0.058496
0.360071
1,122
68
42
16.5
0.747911
0
0
0.736842
0
0
0.72549
0
0
0
0
0
0
1
0.035088
false
0.070175
0.087719
0
0.122807
0
0
0
0
null
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
5
be6f9ec17b44a88aa07d7e3c62c86a103a165e5a
670
py
Python
pysnmp/hlapi/v3arch/asyncore/sync/__init__.py
fabriziovanni/pysnmp
eef4cc03b4da199e9c131ddd18ccb7501f1f2c40
[ "BSD-2-Clause" ]
null
null
null
pysnmp/hlapi/v3arch/asyncore/sync/__init__.py
fabriziovanni/pysnmp
eef4cc03b4da199e9c131ddd18ccb7501f1f2c40
[ "BSD-2-Clause" ]
null
null
null
pysnmp/hlapi/v3arch/asyncore/sync/__init__.py
fabriziovanni/pysnmp
eef4cc03b4da199e9c131ddd18ccb7501f1f2c40
[ "BSD-2-Clause" ]
null
null
null
# # This file is part of pysnmp software. # # Copyright (c) 2005-2018, Ilya Etingof <etingof@gmail.com> # License: http://snmplabs.com/pysnmp/license.html # from pysnmp.proto.rfc1902 import * from pysnmp.smi.rfc1902 import * from pysnmp.hlapi.v3arch.auth import * from pysnmp.hlapi.v3arch.context import * from pysnmp.hlapi.v3arch.asyncore.transport import * from pysnmp.entity.engine import * try: from pysnmp.hlapi.v3arch.asyncore.sync.cmdgen import * from pysnmp.hlapi.v3arch.asyncore.sync.ntforg import * except SyntaxError: from pysnmp.hlapi.v3arch.asyncore.sync.compat.cmdgen import * from pysnmp.hlapi.v3arch.asyncore.sync.compat.ntforg import *
33.5
65
0.770149
94
670
5.489362
0.414894
0.193798
0.217054
0.284884
0.498062
0.393411
0.261628
0.174419
0
0
0
0.039049
0.120896
670
19
66
35.263158
0.837012
0.214925
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.833333
0
0.833333
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
be73e3090268fb4b55f2d5d332467ee54019a7bc
2,771
py
Python
python_json_serialization_examples/tests/unit/test_popo_schema.py
DEV3L/python-marshmallow-examples
85ed28b99f40e657b7d83de000d7ee25da35e0fa
[ "MIT" ]
1
2016-09-10T02:37:31.000Z
2016-09-10T02:37:31.000Z
python_json_serialization_examples/tests/unit/test_popo_schema.py
DEV3L/python-json-serialization-examples
85ed28b99f40e657b7d83de000d7ee25da35e0fa
[ "MIT" ]
null
null
null
python_json_serialization_examples/tests/unit/test_popo_schema.py
DEV3L/python-json-serialization-examples
85ed28b99f40e657b7d83de000d7ee25da35e0fa
[ "MIT" ]
null
null
null
from unittest import TestCase from python_json_serialization_examples.domain.popo import Popo from python_json_serialization_examples.schemas.popo_schema import PopoSchema class TestPopoSchema(TestCase): def setUp(self): self.popo = Popo() self.popo.foo_dict['field_1'] = 'value1' self.popo.foo_list.append('value2') self.popo.foo_list.append('value3') self.popo.foo_var = 'value4' self.popo.num = 5 self.schema = PopoSchema() self.schema_many = PopoSchema(many=True) def test_to_json(self): popo_json = self.schema.dump(self.popo).data assert popo_json.get('fooDict') == self.popo.foo_dict assert popo_json.get('fooList') == self.popo.foo_list assert popo_json.get('fooVar') == self.popo.foo_var assert popo_json.get('num') == self.popo.num def test_to_json_many(self): popo_json_list = self.schema_many.dump([self.popo]).data assert len(popo_json_list) == 1 popo_json_list = self.schema_many.dump([self.popo, self.popo]).data assert len(popo_json_list) == 2 popo_json = popo_json_list[0] assert popo_json.get('fooDict') == self.popo.foo_dict assert popo_json.get('fooList') == self.popo.foo_list assert popo_json.get('fooVar') == self.popo.foo_var assert popo_json.get('num') == self.popo.num def test_from_json(self): popo_json = self.schema.dump(self.popo).data _popo = self.schema.load(popo_json).data assert _popo.foo_dict == self.popo.foo_dict assert _popo.foo_list == self.popo.foo_list assert _popo.foo_var == self.popo.foo_var assert _popo.num == self.popo.num def test_from_json_many(self): popo_json = self.schema_many.dump([self.popo]).data _popo = self.schema_many.load(popo_json).data[0] assert _popo.foo_dict == self.popo.foo_dict assert _popo.foo_list == self.popo.foo_list assert _popo.foo_var == self.popo.foo_var assert _popo.num == self.popo.num def test_from_json_string(self): popo_json = self.schema.dumps(self.popo).data _popo = self.schema.loads(popo_json).data assert _popo.foo_dict == self.popo.foo_dict assert _popo.foo_list == self.popo.foo_list assert _popo.foo_var == self.popo.foo_var assert _popo.num == self.popo.num def test_from_json_string_many(self): popo_json = self.schema_many.dumps([self.popo]).data _popo = self.schema_many.loads(popo_json).data[0] assert _popo.foo_dict == self.popo.foo_dict assert _popo.foo_list == self.popo.foo_list assert _popo.foo_var == self.popo.foo_var assert _popo.num == self.popo.num
36.460526
77
0.666185
408
2,771
4.245098
0.120098
0.203233
0.139723
0.069284
0.823903
0.739607
0.739607
0.639146
0.603349
0.559469
0
0.005074
0.217611
2,771
75
78
36.946667
0.793819
0
0
0.45614
0
0
0.027788
0
0
0
0
0
0.45614
1
0.122807
false
0
0.052632
0
0.192982
0
0
0
0
null
1
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
5
bec029b50eda2ccc0923912f7414b26020f1b919
294
py
Python
socks5/server/exceptions.py
abersheeran/socks5
d33a6a1faad819875eeb8ed316b8cb7a160c0e3a
[ "MIT" ]
11
2019-06-03T15:39:07.000Z
2020-11-15T13:23:32.000Z
socks5/server/exceptions.py
Aber-s-practice/socks5
d33a6a1faad819875eeb8ed316b8cb7a160c0e3a
[ "MIT" ]
null
null
null
socks5/server/exceptions.py
Aber-s-practice/socks5
d33a6a1faad819875eeb8ed316b8cb7a160c0e3a
[ "MIT" ]
2
2020-08-12T08:15:17.000Z
2020-10-20T14:57:26.000Z
class Socks5Error(Exception): pass class NoVersionAllowed(Socks5Error): pass class NoCommandAllowed(Socks5Error): pass class NoATYPAllowed(Socks5Error): pass class AuthenticationError(Socks5Error): pass class NoAuthenticationAllowed(AuthenticationError): pass
12.782609
51
0.761905
24
294
9.333333
0.375
0.200893
0.357143
0
0
0
0
0
0
0
0
0.020661
0.176871
294
22
52
13.363636
0.904959
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
fe535f52cc2059ac623881c96b599005df21fa73
192
py
Python
pyrevolve/genotype/learning_body/crossover.py
braj29/robo_swimmers
b3c3fa91976884095eb6b5e67844167598ec573d
[ "Apache-1.1" ]
null
null
null
pyrevolve/genotype/learning_body/crossover.py
braj29/robo_swimmers
b3c3fa91976884095eb6b5e67844167598ec573d
[ "Apache-1.1" ]
null
null
null
pyrevolve/genotype/learning_body/crossover.py
braj29/robo_swimmers
b3c3fa91976884095eb6b5e67844167598ec573d
[ "Apache-1.1" ]
null
null
null
from typing import List from .config import Config from pyrevolve.genotype.genotype import Genotype def crossover(parents: List[Genotype], config: Config) -> Genotype: return parents[0]
24
67
0.78125
25
192
6
0.48
0
0
0
0
0
0
0
0
0
0
0.006061
0.140625
192
7
68
27.428571
0.90303
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.6
0.2
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
5
fe58268960c6614ae868b17cf26a372b0d7a0289
52
py
Python
1_Intro/speed_test/fib2.py
predbrad/bootcamp
252137ff627d8f41c4dda19bfb7f33a1e567d362
[ "MIT" ]
null
null
null
1_Intro/speed_test/fib2.py
predbrad/bootcamp
252137ff627d8f41c4dda19bfb7f33a1e567d362
[ "MIT" ]
null
null
null
1_Intro/speed_test/fib2.py
predbrad/bootcamp
252137ff627d8f41c4dda19bfb7f33a1e567d362
[ "MIT" ]
null
null
null
import subprocess print(subprocess.run(["./fib"]))
13
32
0.711538
6
52
6.166667
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.076923
52
3
33
17.333333
0.770833
0
0
0
0
0
0.096154
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
fe6c176b027c4fc60150773db138154ed9c6c90e
2,190
py
Python
ansible/roles/common/molecule/default/tests/test_common.py
ms-new-account/devops-interview-test
4c867e25a584bffe4b825093f6130bfe72b87b38
[ "Apache-2.0" ]
1
2021-02-18T10:31:55.000Z
2021-02-18T10:31:55.000Z
ansible/roles/common/molecule/default/tests/test_common.py
ms-new-account/devops-interview-test
4c867e25a584bffe4b825093f6130bfe72b87b38
[ "Apache-2.0" ]
null
null
null
ansible/roles/common/molecule/default/tests/test_common.py
ms-new-account/devops-interview-test
4c867e25a584bffe4b825093f6130bfe72b87b38
[ "Apache-2.0" ]
13
2021-02-12T14:59:33.000Z
2021-04-26T07:55:53.000Z
"""Common role Tests""" import pytest from hamcrest import assert_that, equal_to # pylint: disable=redefined-outer-name @pytest.fixture() def get_ansible_vars(host): """Define AnsibleVars""" common_role = "file=../../vars/main.yml name=common_role" common_role_defaults = "file=../../defaults/main.yml name=common_role_defaults" ansible_vars = host.ansible("include_vars", common_role)["ansible_facts"]["common_role"] ansible_vars = host.ansible("include_vars", common_role_defaults)["ansible_facts"]["common_role_defaults"] ansible_vars.update(host.ansible("include_vars", common_role)["ansible_facts"]["common_role"]) ansible_vars.update(host.ansible("include_vars", common_role_defaults)["ansible_facts"]["common_role_defaults"]) return ansible_vars def test_alfresco_user_exists(host, get_ansible_vars): "Check that alfresco user exists" assert_that(host.user(get_ansible_vars["username"]).exists) def test_alfresco_group_exists(host, get_ansible_vars): "Check that alfresco group exists" assert_that(host.group(get_ansible_vars["group_name"]).exists) def test_binaries_folder_exists(host, get_ansible_vars): "Check that binaries folder exists" assert_that(host.file(get_ansible_vars["binaries_folder"]).exists) assert_that(host.file(get_ansible_vars["binaries_folder"]).user, equal_to(get_ansible_vars["username"])) def test_config_folder_exists(host, get_ansible_vars): "Check that configuration folder exists" assert_that(host.file(get_ansible_vars["config_folder"]).exists) assert_that(host.file(get_ansible_vars["config_folder"]).user, equal_to(get_ansible_vars["username"])) def test_data_folder_exists(host, get_ansible_vars): "Check that daya folder exists" assert_that(host.file(get_ansible_vars["data_folder"]).exists) assert_that(host.file(get_ansible_vars["data_folder"]).user, equal_to(get_ansible_vars["username"])) def test_logs_folder_exists(host, get_ansible_vars): "Check that logs folder exists" assert_that(host.file(get_ansible_vars["logs_folder"]).exists) assert_that(host.file(get_ansible_vars["logs_folder"]).user, equal_to(get_ansible_vars["username"]))
49.772727
116
0.777169
310
2,190
5.116129
0.148387
0.180328
0.185372
0.126103
0.742119
0.710593
0.710593
0.710593
0.532787
0.518285
0
0
0.095434
2,190
44
117
49.772727
0.800606
0.124201
0
0
0
0
0.284425
0.036562
0
0
0
0
0.333333
1
0.212121
false
0
0.060606
0
0.30303
0
0
0
0
null
0
1
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
5
22ab9b723c71404c1a559bd4271964245f359e73
99
py
Python
vgg/vgg_class.py
MeepoAII/backbone
f998b2466a343cce938eb835b53c5e4a254eb443
[ "Apache-2.0" ]
null
null
null
vgg/vgg_class.py
MeepoAII/backbone
f998b2466a343cce938eb835b53c5e4a254eb443
[ "Apache-2.0" ]
null
null
null
vgg/vgg_class.py
MeepoAII/backbone
f998b2466a343cce938eb835b53c5e4a254eb443
[ "Apache-2.0" ]
null
null
null
import torchvision import torch print(torchvision.models.resnet101()) print(torch.__version__)
19.8
38
0.808081
11
99
6.909091
0.636364
0
0
0
0
0
0
0
0
0
0
0.033708
0.10101
99
5
39
19.8
0.820225
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
22b8e88f631a939fa34f6f3b9e5b5c6082b1610e
5,125
py
Python
test/connect/test_ConnectionUtils.py
pip-services3-python/pip-services3-components-python
d8868f9db9760fb91e5ff7a815de8ce98fdb4b2a
[ "MIT" ]
null
null
null
test/connect/test_ConnectionUtils.py
pip-services3-python/pip-services3-components-python
d8868f9db9760fb91e5ff7a815de8ce98fdb4b2a
[ "MIT" ]
null
null
null
test/connect/test_ConnectionUtils.py
pip-services3-python/pip-services3-components-python
d8868f9db9760fb91e5ff7a815de8ce98fdb4b2a
[ "MIT" ]
1
2020-03-11T21:46:42.000Z
2020-03-11T21:46:42.000Z
# -*- coding: utf-8 -*- from pip_services3_commons.config import ConfigParams from pip_services3_components.connect.ConnectionUtils import ConnectionUtils class TestConnectionUtils: def test_concat_options(self): options1 = ConfigParams.from_tuples( "host", "server1", "port", "8080", "param1", "ABC" ) options2 = ConfigParams.from_tuples( "host", "server2", "port", "8080", "param2", "XYZ" ) options = ConnectionUtils.concat(options1, options2) assert len(options) == 4 assert "server1,server2" == options.get_as_nullable_string("host") assert "8080,8080" == options.get_as_nullable_string("port") assert "ABC" == options.get_as_nullable_string("param1") assert "XYZ" == options.get_as_nullable_string("param2") def test_include_keys(self): options1 = ConfigParams.from_tuples( "host", "server1", "port", "8080", "param1", "ABC" ) options = ConnectionUtils.include(options1, "host", "port") assert len(options) == 2 assert "server1" == options.get_as_nullable_string("host") assert "8080", options.get_as_nullable_string("port") assert options.get_as_nullable_string("param1") is None def test_exclude_keys(self): options1 = ConfigParams.from_tuples( "host", "server1", "port", "8080", "param1", "ABC" ) options = ConnectionUtils.exclude(options1, "host", "port") assert len(options) == 1 assert options.get_as_nullable_string("host") is None assert options.get_as_nullable_string("port") is None assert "ABC" == options.get_as_nullable_string("param1") def test_parse_uri_1(self): options = ConnectionUtils.parse_uri("broker1", "kafka", 9092) assert len(options) == 4 assert "broker1:9092" == options.get_as_nullable_string("servers") assert "kafka" == options.get_as_nullable_string("protocol") assert "broker1" == options.get_as_nullable_string("host") assert "9092" == options.get_as_nullable_string("port") options = ConnectionUtils.parse_uri("tcp://broker1:8082", "kafka", 9092) assert len(options) == 4 assert "broker1:8082" == options.get_as_nullable_string("servers") assert "tcp" == options.get_as_nullable_string("protocol") assert "broker1" == options.get_as_nullable_string("host") assert "8082" == options.get_as_nullable_string("port") options = ConnectionUtils.parse_uri("tcp://user:pass123@broker1:8082", "kafka", 9092) assert len(options) == 6 assert "broker1:8082" == options.get_as_nullable_string("servers") assert "tcp" == options.get_as_nullable_string("protocol") assert "broker1" == options.get_as_nullable_string("host") assert "8082" == options.get_as_nullable_string("port") assert "user" == options.get_as_nullable_string("username") assert "pass123" == options.get_as_nullable_string("password") options = ConnectionUtils.parse_uri("tcp://user:pass123@broker1,broker2:8082", "kafka", 9092) assert len(options) == 6 assert "broker1:9092,broker2:8082" == options.get_as_nullable_string("servers") assert "tcp" == options.get_as_nullable_string("protocol") assert "broker1,broker2" == options.get_as_nullable_string("host") assert "9092,8082" == options.get_as_nullable_string("port") assert "user" == options.get_as_nullable_string("username") assert "pass123" == options.get_as_nullable_string("password") options = ConnectionUtils.parse_uri("tcp://user:pass123@broker1:8082,broker2:8082?param1=ABC&param2=XYZ", "kafka", 9092) assert len(options) == 8 assert "broker1:8082,broker2:8082" == options.get_as_nullable_string("servers") assert "tcp" == options.get_as_nullable_string("protocol") assert "broker1,broker2" == options.get_as_nullable_string("host") assert "8082,8082" == options.get_as_nullable_string("port") assert "user" == options.get_as_nullable_string("username") assert "pass123" == options.get_as_nullable_string("password") assert "ABC" == options.get_as_nullable_string("param1") assert "XYZ" == options.get_as_nullable_string("param2") def test_parse_uri_2(self): options = ConfigParams.from_tuples( "host", "broker1,broker2", "port", ",8082", "username", "user", "password", "pass123", "param1", "ABC", "param2", "XYZ", "param3", None ) uri = ConnectionUtils.compose_uri(options, "tcp", 9092) assert uri == "tcp://user:pass123@broker1:9092,broker2:8082?param1=ABC&param2=XYZ&param3" uri = ConnectionUtils.compose_uri(options, None, None) assert uri == "user:pass123@broker1,broker2:8082?param1=ABC&param2=XYZ&param3"
43.067227
113
0.636878
573
5,125
5.448517
0.109948
0.121717
0.14606
0.243434
0.817104
0.755926
0.691544
0.653107
0.567265
0.536835
0
0.062611
0.230244
5,125
118
114
43.432203
0.728771
0.004098
0
0.4375
0
0.020833
0.200118
0.062917
0
0
0
0
0.5
1
0.052083
false
0.09375
0.020833
0
0.083333
0
0
0
0
null
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
1
0
0
0
0
0
5
fe008a7147b180aaca73a110d4e4ca451ffd5cfe
166
py
Python
Ene-Jun-2021/cazares-martinez-luis-enrique/Practica 1/autobus.py
bryanbalderas/DAS_Sistemas
1e31f088c0de7134471025a5730b0abfc19d936e
[ "MIT" ]
41
2017-09-26T09:36:32.000Z
2022-03-19T18:05:25.000Z
Ene-Jun-2021/cazares-martinez-luis-enrique/Practica 1/autobus.py
bryanbalderas/DAS_Sistemas
1e31f088c0de7134471025a5730b0abfc19d936e
[ "MIT" ]
67
2017-09-11T05:06:12.000Z
2022-02-14T04:44:04.000Z
Ene-Jun-2021/cazares-martinez-luis-enrique/Practica 1/autobus.py
bryanbalderas/DAS_Sistemas
1e31f088c0de7134471025a5730b0abfc19d936e
[ "MIT" ]
210
2017-09-01T00:10:08.000Z
2022-03-19T18:05:12.000Z
from vehiculo import vehiculo class autobus(vehiculo): def tarifaAutobus(self): return float(vehiculo.tarifa(self)) + ((vehiculo.random * 100) * 0.10)
20.75
78
0.692771
20
166
5.75
0.75
0
0
0
0
0
0
0
0
0
0
0.044444
0.186747
166
7
79
23.714286
0.807407
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.25
0.25
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
fe08bad30dedbd339c7a5fb59b8aaec0b11b9482
82
py
Python
python/testData/editing/py254.before.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/editing/py254.before.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/editing/py254.before.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
class Foo(object): def foo(bar): pass def bar(foo): pass
11.714286
18
0.487805
11
82
3.636364
0.545455
0
0
0
0
0
0
0
0
0
0
0
0.402439
82
7
19
11.714286
0.816327
0
0
0.4
0
0
0
0
0
0
0
0
0
1
0.4
false
0.4
0
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
5
fe0e8391bdef0eaa38e6df7b2fb9ad7c1c0aec3a
58
py
Python
language-learning/python/basics/packages/math.py
imteekay/programming-language-research
a9b05d70a669d789d79e1bd916bcc088958ff9df
[ "MIT" ]
24
2019-05-24T03:22:42.000Z
2021-09-30T01:04:17.000Z
language-learning/python/basics/packages/math.py
imteekay/programming-language-research
a9b05d70a669d789d79e1bd916bcc088958ff9df
[ "MIT" ]
null
null
null
language-learning/python/basics/packages/math.py
imteekay/programming-language-research
a9b05d70a669d789d79e1bd916bcc088958ff9df
[ "MIT" ]
3
2019-11-22T19:04:23.000Z
2021-04-15T19:40:47.000Z
import math print(math.pi) from math import pi print(pi)
9.666667
19
0.758621
11
58
4
0.454545
0
0
0
0
0
0
0
0
0
0
0
0.155172
58
5
20
11.6
0.897959
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
a3b3f986f01c2362c7c544a94de0bc575d51946a
139
py
Python
tests/__init__.py
rvhonorato/haddock3
84866ecab73a56c20c3e457abfc5077233b311b3
[ "Apache-2.0" ]
null
null
null
tests/__init__.py
rvhonorato/haddock3
84866ecab73a56c20c3e457abfc5077233b311b3
[ "Apache-2.0" ]
1
2021-07-24T15:34:58.000Z
2021-07-24T15:34:58.000Z
tests/__init__.py
rvhonorato/haddock3
84866ecab73a56c20c3e457abfc5077233b311b3
[ "Apache-2.0" ]
null
null
null
"""Test module.""" from pathlib import Path data_path = Path(__file__).resolve().parents[0] golden_data = Path(data_path, 'golden_data')
19.857143
47
0.733813
20
139
4.7
0.6
0.255319
0.255319
0
0
0
0
0
0
0
0
0.008065
0.107914
139
6
48
23.166667
0.75
0.086331
0
0
0
0
0.090909
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
a3bf9052999393832c5297eb11bdbcfa0be44c3d
142
py
Python
tests/coworks/biz/biz_ms.py
sidneyarcidiacono/coworks
7f51b83e8699ced991d16a5a43ad19e569b6e814
[ "MIT" ]
null
null
null
tests/coworks/biz/biz_ms.py
sidneyarcidiacono/coworks
7f51b83e8699ced991d16a5a43ad19e569b6e814
[ "MIT" ]
null
null
null
tests/coworks/biz/biz_ms.py
sidneyarcidiacono/coworks
7f51b83e8699ced991d16a5a43ad19e569b6e814
[ "MIT" ]
null
null
null
from coworks import BizMicroService, entry class BizMS(BizMicroService): @entry def get(self, name='ok'): return name, 200
15.777778
42
0.676056
17
142
5.647059
0.823529
0.416667
0
0
0
0
0
0
0
0
0
0.027523
0.232394
142
8
43
17.75
0.853211
0
0
0
0
0
0.014085
0
0
0
0
0
0
1
0.2
false
0
0.2
0.2
0.8
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
5
a3d0aa692a77abd8002a19c5cfb0669e51402d03
329
py
Python
jaxrl/agents/__init__.py
HassamSheikh/jaxrl
d0e36b44ce988c53b3f8f03448aba5fcf4c30888
[ "MIT" ]
157
2021-03-12T04:30:53.000Z
2021-06-10T11:28:48.000Z
jaxrl/agents/__init__.py
HassamSheikh/jaxrl
d0e36b44ce988c53b3f8f03448aba5fcf4c30888
[ "MIT" ]
8
2021-02-12T18:38:28.000Z
2021-02-16T14:03:00.000Z
jaxrl/agents/__init__.py
ikostrikov/jax-sac
f3d924d793bd7e0ba46b24b2092cc2817c7f962f
[ "MIT" ]
9
2021-03-24T02:54:42.000Z
2021-05-24T07:59:18.000Z
from jaxrl.agents.awac.awac_learner import AWACLearner from jaxrl.agents.bc.bc_learner import BCLearner from jaxrl.agents.ddpg.ddpg_learner import DDPGLearner from jaxrl.agents.drq.drq_learner import DrQLearner from jaxrl.agents.sac.sac_learner import SACLearner from jaxrl.agents.sac_v1.sac_v1_learner import SACV1Learner
47
60
0.854103
50
329
5.46
0.34
0.197802
0.32967
0.131868
0
0
0
0
0
0
0
0.010033
0.091185
329
6
61
54.833333
0.90301
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
4a51294a42037eb4d648b4eded5ccfe02fbe921b
237
py
Python
hypha/apply/flags/templatetags/flag_tags.py
maxpearl/hypha
e181ebadfb744aab34617bb766e746368d6f2de0
[ "BSD-3-Clause" ]
20
2021-04-08T16:38:49.000Z
2022-02-09T20:05:57.000Z
hypha/apply/flags/templatetags/flag_tags.py
maxpearl/hypha
e181ebadfb744aab34617bb766e746368d6f2de0
[ "BSD-3-Clause" ]
1,098
2017-12-15T11:23:03.000Z
2020-01-24T07:58:07.000Z
hypha/apply/flags/templatetags/flag_tags.py
maxpearl/hypha
e181ebadfb744aab34617bb766e746368d6f2de0
[ "BSD-3-Clause" ]
17
2020-02-07T14:55:54.000Z
2021-04-04T19:32:38.000Z
from django import template register = template.Library() @register.filter def flagged_by(submission, user): return submission.flagged_by(user) @register.filter def flagged_staff(submission): return submission.flagged_staff
16.928571
38
0.78903
29
237
6.310345
0.482759
0.153005
0.185792
0.262295
0
0
0
0
0
0
0
0
0.130802
237
13
39
18.230769
0.88835
0
0
0.25
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.125
0.25
0.625
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
5
4a6be078df7b0889d6448e300d8050d5b4155722
56
py
Python
justvpn/util.py
Koswu/JustVpn
b09853372cdc986675d063bc90db6312e6a7768c
[ "MIT" ]
3
2019-10-27T15:46:06.000Z
2021-03-11T04:10:10.000Z
justvpn/util.py
Koswu/JustVpn
b09853372cdc986675d063bc90db6312e6a7768c
[ "MIT" ]
null
null
null
justvpn/util.py
Koswu/JustVpn
b09853372cdc986675d063bc90db6312e6a7768c
[ "MIT" ]
2
2018-06-20T12:52:00.000Z
2020-10-07T13:31:24.000Z
import re import os from urllib.parse import urlparse
9.333333
33
0.803571
9
56
5
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.178571
56
5
34
11.2
0.978261
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
4a70d4e5b49317f1ace57ef1098a7e0e3e516b51
20
py
Python
ml/datasets/__init__.py
torfjelde/ml
6ae3a5543663a7adfe3b6f1c596093c123fa2b88
[ "MIT" ]
2
2020-01-22T08:37:29.000Z
2020-03-10T13:08:19.000Z
ml/datasets/__init__.py
torfjelde/ml
6ae3a5543663a7adfe3b6f1c596093c123fa2b88
[ "MIT" ]
null
null
null
ml/datasets/__init__.py
torfjelde/ml
6ae3a5543663a7adfe3b6f1c596093c123fa2b88
[ "MIT" ]
null
null
null
from . import mnist
10
19
0.75
3
20
5
1
0
0
0
0
0
0
0
0
0
0
0
0.2
20
1
20
20
0.9375
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
4a74a51ceb6ed8a3e5f4863ad0708a475b0cb68e
14,059
py
Python
hallo/test/modules/convert/test_convert_view_repo.py
SpangleLabs/Hallo
17145d8f76552ecd4cbc5caef8924bd2cf0cbf24
[ "MIT" ]
1
2022-01-27T13:25:01.000Z
2022-01-27T13:25:01.000Z
hallo/test/modules/convert/test_convert_view_repo.py
joshcoales/Hallo
17145d8f76552ecd4cbc5caef8924bd2cf0cbf24
[ "MIT" ]
75
2015-09-26T18:07:18.000Z
2022-01-04T07:15:11.000Z
hallo/test/modules/convert/test_convert_view_repo.py
SpangleLabs/Hallo
17145d8f76552ecd4cbc5caef8924bd2cf0cbf24
[ "MIT" ]
1
2021-04-10T12:02:47.000Z
2021-04-10T12:02:47.000Z
import unittest from datetime import datetime import hallo.modules.convert.convert_view_repo from hallo.events import EventMessage from hallo.test.modules.convert.convert_function_test_base import ConvertFunctionTestBase class MockMethod: def __init__(self, response): self.response = response self.arg = None self.args = [] def method(self, *arg): self.arg = arg self.args.append(arg) return self.response # noinspection PyArgumentList class ConvertViewRepoTest(ConvertFunctionTestBase, unittest.TestCase): def setUp(self): super().setUp() # Mock out methods self.output_repo = "{repo output}" self.output_type = "{type output}" self.output_unit = "{unit output}" self.output_group = "{group output}" self.output_prefix = "{prefix output}" self.mock_view_repo = MockMethod(self.output_repo) self.mock_view_type = MockMethod(self.output_type) self.mock_view_unit = MockMethod(self.output_unit) self.mock_view_group = MockMethod(self.output_group) self.mock_view_prefix = MockMethod(self.output_prefix) self.view_repo = hallo.modules.convert.convert_view_repo.ConvertViewRepo.output_repo_as_string self.view_type = hallo.modules.convert.convert_view_repo.ConvertViewRepo.output_type_as_string self.view_unit = hallo.modules.convert.convert_view_repo.ConvertViewRepo.output_unit_as_string self.view_group = hallo.modules.convert.convert_view_repo.ConvertViewRepo.output_prefix_group_as_string self.view_prefix = hallo.modules.convert.convert_view_repo.ConvertViewRepo.output_prefix_as_string hallo.modules.convert.convert_view_repo.ConvertViewRepo.output_repo_as_string = ( self.mock_view_repo.method ) hallo.modules.convert.convert_view_repo.ConvertViewRepo.output_type_as_string = ( self.mock_view_type.method ) hallo.modules.convert.convert_view_repo.ConvertViewRepo.output_unit_as_string = ( self.mock_view_unit.method ) hallo.modules.convert.convert_view_repo.ConvertViewRepo.output_prefix_group_as_string = ( self.mock_view_group.method ) hallo.modules.convert.convert_view_repo.ConvertViewRepo.output_prefix_as_string = ( self.mock_view_prefix.method ) def tearDown(self): super().tearDown() hallo.modules.convert.convert_view_repo.ConvertViewRepo.output_repo_as_string = self.view_repo hallo.modules.convert.convert_view_repo.ConvertViewRepo.output_type_as_string = self.view_type hallo.modules.convert.convert_view_repo.ConvertViewRepo.output_unit_as_string = self.view_unit hallo.modules.convert.convert_view_repo.ConvertViewRepo.output_prefix_group_as_string = self.view_group hallo.modules.convert.convert_view_repo.ConvertViewRepo.output_prefix_as_string = self.view_prefix def test_specified_type_invalid(self): self.function_dispatcher.dispatch( EventMessage( self.server, None, self.test_user, "convert view repo type=no_type" ) ) data = self.server.get_send_data(1, self.test_user, EventMessage) assert "unrecognised type specified" in data[0].text.lower() assert self.mock_view_type.arg is None def test_type_specified_unit_incorrect(self): self.function_dispatcher.dispatch( EventMessage( self.server, None, self.test_user, "convert view repo type=test_type1 unit=no_unit", ) ) data = self.server.get_send_data(1, self.test_user, EventMessage) assert "unrecognised unit specified" in data[0].text.lower() assert self.mock_view_type.arg is None assert self.mock_view_unit.arg is None def test_type_specified_unit_different_type(self): self.function_dispatcher.dispatch( EventMessage( self.server, None, self.test_user, "convert view repo type=test_type1 unit=unit2a", ) ) data = self.server.get_send_data(1, self.test_user, EventMessage) assert "unrecognised unit specified" in data[0].text.lower() assert self.mock_view_type.arg is None assert self.mock_view_unit.arg is None def test_type_and_unit_specified(self): self.function_dispatcher.dispatch( EventMessage( self.server, None, self.test_user, "convert view repo type=test_type1 unit=unit1b", ) ) data = self.server.get_send_data(1, self.test_user, EventMessage) assert self.output_unit in data[0].text.lower() assert self.mock_view_unit.arg == (self.test_unit1b,) assert self.mock_view_type.arg is None def test_type_specified(self): self.function_dispatcher.dispatch( EventMessage( self.server, None, self.test_user, "convert view repo type=test_type1" ) ) data = self.server.get_send_data(1, self.test_user, EventMessage) assert self.output_type in data[0].text.lower() assert self.mock_view_type.arg == (self.test_type1,) def test_specified_group_invalid(self): self.function_dispatcher.dispatch( EventMessage( self.server, None, self.test_user, "convert view repo group=no_group" ) ) data = self.server.get_send_data(1, self.test_user, EventMessage) assert "unrecognised prefix group specified" in data[0].text.lower() assert self.mock_view_group.arg is None def test_group_specified_prefix_incorrect(self): self.function_dispatcher.dispatch( EventMessage( self.server, None, self.test_user, "convert view repo group=test_group1 prefix=no_prefix", ) ) data = self.server.get_send_data(1, self.test_user, EventMessage) assert "unrecognised prefix specified" in data[0].text.lower() assert self.mock_view_group.arg is None assert self.mock_view_prefix.arg is None def test_group_specified_prefix_different_group(self): self.function_dispatcher.dispatch( EventMessage( self.server, None, self.test_user, "convert view repo group=test_group1 prefix=prefix2a", ) ) data = self.server.get_send_data(1, self.test_user, EventMessage) assert "unrecognised prefix specified" in data[0].text.lower() assert self.mock_view_group.arg is None assert self.mock_view_prefix.arg is None def test_group_and_prefix_specified(self): self.function_dispatcher.dispatch( EventMessage( self.server, None, self.test_user, "convert view repo group=test_group1 prefix=prefix1a", ) ) data = self.server.get_send_data(1, self.test_user, EventMessage) assert self.output_prefix in data[0].text.lower() assert self.mock_view_group.arg is None assert self.mock_view_prefix.arg == (self.test_prefix1a,) def test_group_and_prefix_abbr_specified(self): self.function_dispatcher.dispatch( EventMessage( self.server, None, self.test_user, "convert view repo group=test_group1 prefix=pref1a", ) ) data = self.server.get_send_data(1, self.test_user, EventMessage) assert self.output_prefix in data[0].text.lower() assert self.mock_view_group.arg is None assert self.mock_view_prefix.arg == (self.test_prefix1a,) def test_group_specified(self): self.function_dispatcher.dispatch( EventMessage( self.server, None, self.test_user, "convert view repo group=test_group1" ) ) data = self.server.get_send_data(1, self.test_user, EventMessage) assert self.output_group in data[0].text.lower() assert self.mock_view_group.arg == (self.test_group1,) def test_specified_unit_incorrect(self): self.function_dispatcher.dispatch( EventMessage( self.server, None, self.test_user, "convert view repo unit=no_unit" ) ) data = self.server.get_send_data(1, self.test_user, EventMessage) assert "unrecognised unit specified" in data[0].text.lower() assert self.mock_view_unit.arg is None def test_specified_unit_multiple(self): self.function_dispatcher.dispatch( EventMessage( self.server, None, self.test_user, "convert view repo unit=same_name" ) ) data = self.server.get_send_data(1, self.test_user, EventMessage) assert self.output_unit in data[0].text.lower() assert (self.test_unit1b,) in self.mock_view_unit.args assert (self.test_unit2b,) in self.mock_view_unit.args def test_unit_specified(self): self.function_dispatcher.dispatch( EventMessage( self.server, None, self.test_user, "convert view repo unit=unit1b" ) ) data = self.server.get_send_data(1, self.test_user, EventMessage) assert self.output_unit in data[0].text.lower() assert self.mock_view_unit.arg == (self.test_unit1b,) def test_specified_prefix_incorrect(self): self.function_dispatcher.dispatch( EventMessage( self.server, None, self.test_user, "convert view repo prefix=no_prefix" ) ) data = self.server.get_send_data(1, self.test_user, EventMessage) assert "unrecognised prefix specified" in data[0].text.lower() assert self.mock_view_prefix.arg is None def test_specified_prefix_multiple(self): self.function_dispatcher.dispatch( EventMessage( self.server, None, self.test_user, "convert view repo prefix=prefixb" ) ) data = self.server.get_send_data(1, self.test_user, EventMessage) assert self.output_prefix in data[0].text.lower() assert (self.test_prefix1b,) in self.mock_view_prefix.args assert (self.test_prefix2b,) in self.mock_view_prefix.args def test_prefix_specified(self): self.function_dispatcher.dispatch( EventMessage( self.server, None, self.test_user, "convert view repo prefix=prefix1a" ) ) data = self.server.get_send_data(1, self.test_user, EventMessage) assert self.output_prefix in data[0].text.lower() assert self.mock_view_prefix.arg == (self.test_prefix1a,) def test_nothing_specified(self): self.function_dispatcher.dispatch( EventMessage(self.server, None, self.test_user, "convert view repo") ) data = self.server.get_send_data(1, self.test_user, EventMessage) assert self.output_repo in data[0].text.lower() assert self.mock_view_repo.arg == (self.test_repo,) def test_output_repo_as_string(self): output = self.view_repo(None, self.test_repo).lower() assert "conversion repo" in output assert "unit types" in output assert self.test_type1.name in output assert self.test_type2.name in output assert "prefix groups" in output assert self.test_group1.name in output assert self.test_group2.name in output def test_output_type_as_string(self): output = self.view_type(None, self.test_type1).lower() assert "conversion type" in output assert self.test_type1.name in output assert "decimals: {}".format(self.test_type1.decimals) in output assert "base unit: {}".format(self.test_type1.base_unit.name_list[0]) in output assert "other units" in output assert self.test_unit1b.name_list[0] in output def test_output_unit_as_string(self): self.test_unit1b.valid_prefix_group = self.test_group2 self.test_unit1b.last_updated_date = datetime(2019, 3, 2, 22, 24, 15) output = self.view_unit(None, self.test_unit1b).lower() assert "conversion unit:" in output assert "type: {}".format(self.test_type1.name) in output assert all([x in output for x in self.test_unit1b.name_list]) assert all([x in output for x in self.test_unit1b.abbr_list]) assert ( "1 {} = {} {}".format( self.test_unit1b.name_list[0], self.test_unit1b.value, self.test_unit1a.name_list[0], ) in output ) assert ( "0 {} = {} {}".format( self.test_unit1b.name_list[0], self.test_unit1b.offset, self.test_unit1a.name_list[0], ) in output ) assert "last updated: 2019-03-02 22:24:15" in output assert "prefix group: {}".format(self.test_group2.name) in output def test_output_prefix_group_as_string(self): output = self.view_group(None, self.test_group2).lower() assert "prefix group" in output assert self.test_group2.name in output assert "prefix list:" in output assert self.test_prefix2a.prefix in output assert self.test_prefix2b.prefix in output def test_output_prefix_as_string(self): output = self.view_prefix(None, self.test_prefix2b).lower() assert "prefix" in output assert self.test_prefix2b.prefix in output assert "abbreviation: {}".format(self.test_prefix2b.abbreviation) in output assert "multiplier: {}".format(self.test_prefix2b.multiplier) in output
42.219219
111
0.652465
1,745
14,059
5.005158
0.064183
0.073277
0.050836
0.047401
0.793794
0.768491
0.730021
0.727158
0.721662
0.69281
0
0.012342
0.262323
14,059
332
112
42.346386
0.829814
0.00313
0
0.403333
0
0
0.087068
0
0
0
0
0
0.25
1
0.09
false
0
0.016667
0
0.116667
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
4a8167ba45c87a3fc1b74c0b4067bc1fd3499ccd
4,216
py
Python
tests/api/test_serializer.py
Metabaron1/app
62299828009d4285930c9cb42bbc86daf86fdf5a
[ "MIT" ]
1
2021-10-04T10:25:33.000Z
2021-10-04T10:25:33.000Z
tests/api/test_serializer.py
Cartache/app
62299828009d4285930c9cb42bbc86daf86fdf5a
[ "MIT" ]
null
null
null
tests/api/test_serializer.py
Cartache/app
62299828009d4285930c9cb42bbc86daf86fdf5a
[ "MIT" ]
2
2021-10-04T10:24:48.000Z
2022-03-17T23:25:34.000Z
from flask import url_for from app.api.serializer import get_alias_infos_with_pagination_v3 from app.config import PAGE_LIMIT from app.extensions import db from app.models import User, ApiKey, Alias, Contact, EmailLog, Mailbox def test_get_alias_infos_with_pagination_v3(flask_client): user = User.create( email="a@b.c", password="password", name="Test User", activated=True, commit=True, ) # user has 1 alias that's automatically created when the account is created alias_infos = get_alias_infos_with_pagination_v3(user) assert len(alias_infos) == 1 alias_info = alias_infos[0] alias = Alias.query.first() assert alias_info.alias == alias assert alias_info.mailbox == user.default_mailbox assert alias_info.mailboxes == [user.default_mailbox] assert alias_info.nb_forward == 0 assert alias_info.nb_blocked == 0 assert alias_info.nb_reply == 0 assert alias_info.latest_email_log is None assert alias_info.latest_contact is None def test_get_alias_infos_with_pagination_v3_query_alias_email(flask_client): """test the query on the alias email""" user = User.create( email="a@b.c", password="password", name="Test User", activated=True, commit=True, ) alias = Alias.query.first() alias_infos = get_alias_infos_with_pagination_v3(user, query=alias.email) assert len(alias_infos) == 1 alias_infos = get_alias_infos_with_pagination_v3(user, query="no match") assert len(alias_infos) == 0 def test_get_alias_infos_with_pagination_v3_query_alias_mailbox(flask_client): """test the query on the alias mailbox email""" user = User.create( email="a@b.c", password="password", name="Test User", activated=True, commit=True, ) alias = Alias.query.first() alias_infos = get_alias_infos_with_pagination_v3(user, query=alias.mailbox.email) assert len(alias_infos) == 1 def test_get_alias_infos_with_pagination_v3_query_alias_mailboxes(flask_client): """test the query on the alias additional mailboxes""" user = User.create( email="a@b.c", password="password", name="Test User", activated=True, commit=True, ) alias = Alias.query.first() mb = Mailbox.create(user_id=user.id, email="mb@gmail.com") alias._mailboxes.append(mb) db.session.commit() alias_infos = get_alias_infos_with_pagination_v3(user, query=mb.email) assert len(alias_infos) == 1 alias_infos = get_alias_infos_with_pagination_v3(user, query=alias.email) assert len(alias_infos) == 1 def test_get_alias_infos_with_pagination_v3_query_alias_note(flask_client): """test the query on the alias note""" user = User.create( email="a@b.c", password="password", name="Test User", activated=True, commit=True, ) alias = Alias.query.first() alias.note = "test note" db.session.commit() alias_infos = get_alias_infos_with_pagination_v3(user, query="test note") assert len(alias_infos) == 1 def test_get_alias_infos_with_pagination_v3_query_alias_name(flask_client): """test the query on the alias name""" user = User.create( email="a@b.c", password="password", name="Test User", activated=True, commit=True, ) alias = Alias.query.first() alias.name = "Test Name" db.session.commit() alias_infos = get_alias_infos_with_pagination_v3(user, query="test name") assert len(alias_infos) == 1 def test_get_alias_infos_with_pagination_v3_no_duplicate(flask_client): """When an alias belongs to multiple mailboxes, make sure get_alias_infos_with_pagination_v3 returns no duplicates """ user = User.create( email="a@b.c", password="password", name="Test User", activated=True, commit=True, ) alias = Alias.query.first() mb = Mailbox.create(user_id=user.id, email="mb@gmail.com") alias._mailboxes.append(mb) db.session.commit() alias_infos = get_alias_infos_with_pagination_v3(user) assert len(alias_infos) == 1
29.482517
96
0.684535
587
4,216
4.638842
0.141397
0.13588
0.085935
0.112376
0.769739
0.756519
0.709144
0.709144
0.635329
0.635329
0
0.00965
0.213472
4,216
142
97
29.690141
0.81152
0.089421
0
0.657143
0
0
0.058436
0
0
0
0
0
0.161905
1
0.066667
false
0.066667
0.047619
0
0.114286
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
5
4a8270de899146409e2fdc5d801f38201eeb2341
221
py
Python
tests/core/scheduling/test_listener.py
ymoch/preacher
ae68170d14c72791884e91b20054bd13a79b52d0
[ "MIT" ]
3
2019-08-01T03:14:49.000Z
2020-01-31T08:55:22.000Z
tests/core/scheduling/test_listener.py
ymoch/preacher
ae68170d14c72791884e91b20054bd13a79b52d0
[ "MIT" ]
353
2019-04-14T14:53:28.000Z
2022-03-11T03:26:08.000Z
tests/core/scheduling/test_listener.py
ymoch/preacher
ae68170d14c72791884e91b20054bd13a79b52d0
[ "MIT" ]
1
2020-08-01T06:23:08.000Z
2020-08-01T06:23:08.000Z
from unittest.mock import sentinel from preacher.core.scheduling.listener import Listener def test_listener(): listener = Listener() listener.on_end(sentinel.status) listener.on_scenario(sentinel.scenario)
22.1
54
0.782805
27
221
6.296296
0.555556
0.282353
0.282353
0
0
0
0
0
0
0
0
0
0.135747
221
9
55
24.555556
0.890052
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
4a9210e69a5b01e157689bfffe2fe1f0d3533460
54
py
Python
Problems/Dynamic Programming/Easy/DivisorGame/divisor_game.py
dolong2110/Algorithm-By-Problems-Python
31ecc7367aaabdd2b0ac0af7f63ca5796d70c730
[ "MIT" ]
1
2021-08-16T14:52:05.000Z
2021-08-16T14:52:05.000Z
Problems/Dynamic Programming/Easy/DivisorGame/divisor_game.py
dolong2110/Algorithm-By-Problems-Python
31ecc7367aaabdd2b0ac0af7f63ca5796d70c730
[ "MIT" ]
null
null
null
Problems/Dynamic Programming/Easy/DivisorGame/divisor_game.py
dolong2110/Algorithm-By-Problems-Python
31ecc7367aaabdd2b0ac0af7f63ca5796d70c730
[ "MIT" ]
null
null
null
def divisorGame(n: int) -> bool: return n % 2 == 0
27
32
0.574074
9
54
3.444444
0.888889
0
0
0
0
0
0
0
0
0
0
0.05
0.259259
54
2
33
27
0.725
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
4aa7ed8676ed50488b6e338368153702c796410c
181
py
Python
authentication/socialaccount/providers/foursquare/urls.py
vo0doO/pydj-persweb
efcd6b7090230f7c0b9ec056008f6d1d9e876ed9
[ "CC0-1.0" ]
null
null
null
authentication/socialaccount/providers/foursquare/urls.py
vo0doO/pydj-persweb
efcd6b7090230f7c0b9ec056008f6d1d9e876ed9
[ "CC0-1.0" ]
4
2020-05-06T17:22:00.000Z
2021-12-13T20:43:30.000Z
authentication/socialaccount/providers/foursquare/urls.py
vo0doO/pydj-persweb
efcd6b7090230f7c0b9ec056008f6d1d9e876ed9
[ "CC0-1.0" ]
null
null
null
from authentication.socialaccount.providers.oauth2.urls import default_urlpatterns from .provider import FoursquareProvider urlpatterns = default_urlpatterns(FoursquareProvider)
25.857143
82
0.878453
17
181
9.235294
0.647059
0.229299
0
0
0
0
0
0
0
0
0
0.005988
0.077348
181
6
83
30.166667
0.934132
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
434f72393e2b00d39e15d73b93a0ca056032f388
1,308
py
Python
networking_cisco/services/trunk/trunkstubs.py
Tehsmash/networking-cisco
fdbd79a832fe090f3c4c7bd7a4f0ec0c349d4d16
[ "Apache-2.0" ]
1
2019-01-19T09:12:49.000Z
2019-01-19T09:12:49.000Z
networking_cisco/services/trunk/trunkstubs.py
Tehsmash/networking-cisco
fdbd79a832fe090f3c4c7bd7a4f0ec0c349d4d16
[ "Apache-2.0" ]
null
null
null
networking_cisco/services/trunk/trunkstubs.py
Tehsmash/networking-cisco
fdbd79a832fe090f3c4c7bd7a4f0ec0c349d4d16
[ "Apache-2.0" ]
null
null
null
# Copyright (c) 2017 Cisco Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # Stub module containing the networking_cisco trunk APIs. # # TODO(rcurran): Remove once networking_cisco is no longer supporting # stable/newton. TRUNK_SUBPORT_OWNER = "" VLAN = "" ACTIVE_STATUS = "" class SubPort(object): @classmethod def get_object(cls, context, *args): return None class TrunkObject(object): @classmethod def update(cls, **kargs): pass class Trunk(object): @classmethod def get_object(cls, context, **kargs): return TrunkObject class DriverBase(object): def __init__(self, name, interfaces, segmentation_types, agent_type=None, can_trunk_bound_port=False): pass
27.25
78
0.701835
173
1,308
5.213873
0.66474
0.066519
0.066519
0.035477
0.086475
0.086475
0.086475
0
0
0
0
0.007813
0.217125
1,308
47
79
27.829787
0.873047
0.571865
0
0.263158
0
0
0
0
0
0
0
0.021277
0
1
0.210526
false
0.105263
0
0.105263
0.526316
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
1
0
1
1
0
0
5
4372e2b7c4ea6a67a98506da992a2f14e14d02a0
138,825
py
Python
sdk/python/pulumi_azure_native/servicefabric/v20210701preview/_inputs.py
polivbr/pulumi-azure-native
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
[ "Apache-2.0" ]
null
null
null
sdk/python/pulumi_azure_native/servicefabric/v20210701preview/_inputs.py
polivbr/pulumi-azure-native
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
[ "Apache-2.0" ]
null
null
null
sdk/python/pulumi_azure_native/servicefabric/v20210701preview/_inputs.py
polivbr/pulumi-azure-native
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
[ "Apache-2.0" ]
null
null
null
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from ... import _utilities from ._enums import * __all__ = [ 'AddRemoveIncrementalNamedPartitionScalingMechanismArgs', 'ApplicationHealthPolicyArgs', 'ApplicationTypeVersionsCleanupPolicyArgs', 'ApplicationUpgradePolicyArgs', 'ApplicationUserAssignedIdentityArgs', 'AveragePartitionLoadScalingTriggerArgs', 'AverageServiceLoadScalingTriggerArgs', 'AzureActiveDirectoryArgs', 'ClientCertificateArgs', 'EndpointRangeDescriptionArgs', 'FrontendConfigurationArgs', 'IPTagArgs', 'LoadBalancingRuleArgs', 'ManagedIdentityArgs', 'NamedPartitionSchemeArgs', 'NetworkSecurityRuleArgs', 'NodeTypeSkuArgs', 'PartitionInstanceCountScaleMechanismArgs', 'RollingUpgradeMonitoringPolicyArgs', 'ScalingPolicyArgs', 'ServiceCorrelationArgs', 'ServiceLoadMetricArgs', 'ServicePlacementInvalidDomainPolicyArgs', 'ServicePlacementNonPartiallyPlaceServicePolicyArgs', 'ServicePlacementPreferPrimaryDomainPolicyArgs', 'ServicePlacementRequireDomainDistributionPolicyArgs', 'ServicePlacementRequiredDomainPolicyArgs', 'ServiceTypeHealthPolicyArgs', 'SettingsParameterDescriptionArgs', 'SettingsSectionDescriptionArgs', 'SingletonPartitionSchemeArgs', 'SkuArgs', 'StatefulServicePropertiesArgs', 'StatelessServicePropertiesArgs', 'SubResourceArgs', 'UniformInt64RangePartitionSchemeArgs', 'VMSSExtensionArgs', 'VaultCertificateArgs', 'VaultSecretGroupArgs', 'VmManagedIdentityArgs', ] @pulumi.input_type class AddRemoveIncrementalNamedPartitionScalingMechanismArgs: def __init__(__self__, *, kind: pulumi.Input[str], max_partition_count: pulumi.Input[int], min_partition_count: pulumi.Input[int], scale_increment: pulumi.Input[int]): """ Represents a scaling mechanism for adding or removing named partitions of a stateless service. Partition names are in the format '0','1'...'N-1'. :param pulumi.Input[str] kind: Enumerates the ways that a service can be partitioned. Expected value is 'AddRemoveIncrementalNamedPartition'. :param pulumi.Input[int] max_partition_count: Maximum number of named partitions of the service. :param pulumi.Input[int] min_partition_count: Minimum number of named partitions of the service. :param pulumi.Input[int] scale_increment: The number of instances to add or remove during a scaling operation. """ pulumi.set(__self__, "kind", 'AddRemoveIncrementalNamedPartition') pulumi.set(__self__, "max_partition_count", max_partition_count) pulumi.set(__self__, "min_partition_count", min_partition_count) pulumi.set(__self__, "scale_increment", scale_increment) @property @pulumi.getter def kind(self) -> pulumi.Input[str]: """ Enumerates the ways that a service can be partitioned. Expected value is 'AddRemoveIncrementalNamedPartition'. """ return pulumi.get(self, "kind") @kind.setter def kind(self, value: pulumi.Input[str]): pulumi.set(self, "kind", value) @property @pulumi.getter(name="maxPartitionCount") def max_partition_count(self) -> pulumi.Input[int]: """ Maximum number of named partitions of the service. """ return pulumi.get(self, "max_partition_count") @max_partition_count.setter def max_partition_count(self, value: pulumi.Input[int]): pulumi.set(self, "max_partition_count", value) @property @pulumi.getter(name="minPartitionCount") def min_partition_count(self) -> pulumi.Input[int]: """ Minimum number of named partitions of the service. """ return pulumi.get(self, "min_partition_count") @min_partition_count.setter def min_partition_count(self, value: pulumi.Input[int]): pulumi.set(self, "min_partition_count", value) @property @pulumi.getter(name="scaleIncrement") def scale_increment(self) -> pulumi.Input[int]: """ The number of instances to add or remove during a scaling operation. """ return pulumi.get(self, "scale_increment") @scale_increment.setter def scale_increment(self, value: pulumi.Input[int]): pulumi.set(self, "scale_increment", value) @pulumi.input_type class ApplicationHealthPolicyArgs: def __init__(__self__, *, consider_warning_as_error: pulumi.Input[bool], max_percent_unhealthy_deployed_applications: pulumi.Input[int], default_service_type_health_policy: Optional[pulumi.Input['ServiceTypeHealthPolicyArgs']] = None, service_type_health_policy_map: Optional[pulumi.Input[Mapping[str, pulumi.Input['ServiceTypeHealthPolicyArgs']]]] = None): """ Defines a health policy used to evaluate the health of an application or one of its children entities. :param pulumi.Input[bool] consider_warning_as_error: Indicates whether warnings are treated with the same severity as errors. :param pulumi.Input[int] max_percent_unhealthy_deployed_applications: The maximum allowed percentage of unhealthy deployed applications. Allowed values are Byte values from zero to 100. The percentage represents the maximum tolerated percentage of deployed applications that can be unhealthy before the application is considered in error. This is calculated by dividing the number of unhealthy deployed applications over the number of nodes where the application is currently deployed on in the cluster. The computation rounds up to tolerate one failure on small numbers of nodes. Default percentage is zero. :param pulumi.Input['ServiceTypeHealthPolicyArgs'] default_service_type_health_policy: The health policy used by default to evaluate the health of a service type. :param pulumi.Input[Mapping[str, pulumi.Input['ServiceTypeHealthPolicyArgs']]] service_type_health_policy_map: The map with service type health policy per service type name. The map is empty by default. """ pulumi.set(__self__, "consider_warning_as_error", consider_warning_as_error) pulumi.set(__self__, "max_percent_unhealthy_deployed_applications", max_percent_unhealthy_deployed_applications) if default_service_type_health_policy is not None: pulumi.set(__self__, "default_service_type_health_policy", default_service_type_health_policy) if service_type_health_policy_map is not None: pulumi.set(__self__, "service_type_health_policy_map", service_type_health_policy_map) @property @pulumi.getter(name="considerWarningAsError") def consider_warning_as_error(self) -> pulumi.Input[bool]: """ Indicates whether warnings are treated with the same severity as errors. """ return pulumi.get(self, "consider_warning_as_error") @consider_warning_as_error.setter def consider_warning_as_error(self, value: pulumi.Input[bool]): pulumi.set(self, "consider_warning_as_error", value) @property @pulumi.getter(name="maxPercentUnhealthyDeployedApplications") def max_percent_unhealthy_deployed_applications(self) -> pulumi.Input[int]: """ The maximum allowed percentage of unhealthy deployed applications. Allowed values are Byte values from zero to 100. The percentage represents the maximum tolerated percentage of deployed applications that can be unhealthy before the application is considered in error. This is calculated by dividing the number of unhealthy deployed applications over the number of nodes where the application is currently deployed on in the cluster. The computation rounds up to tolerate one failure on small numbers of nodes. Default percentage is zero. """ return pulumi.get(self, "max_percent_unhealthy_deployed_applications") @max_percent_unhealthy_deployed_applications.setter def max_percent_unhealthy_deployed_applications(self, value: pulumi.Input[int]): pulumi.set(self, "max_percent_unhealthy_deployed_applications", value) @property @pulumi.getter(name="defaultServiceTypeHealthPolicy") def default_service_type_health_policy(self) -> Optional[pulumi.Input['ServiceTypeHealthPolicyArgs']]: """ The health policy used by default to evaluate the health of a service type. """ return pulumi.get(self, "default_service_type_health_policy") @default_service_type_health_policy.setter def default_service_type_health_policy(self, value: Optional[pulumi.Input['ServiceTypeHealthPolicyArgs']]): pulumi.set(self, "default_service_type_health_policy", value) @property @pulumi.getter(name="serviceTypeHealthPolicyMap") def service_type_health_policy_map(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input['ServiceTypeHealthPolicyArgs']]]]: """ The map with service type health policy per service type name. The map is empty by default. """ return pulumi.get(self, "service_type_health_policy_map") @service_type_health_policy_map.setter def service_type_health_policy_map(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input['ServiceTypeHealthPolicyArgs']]]]): pulumi.set(self, "service_type_health_policy_map", value) @pulumi.input_type class ApplicationTypeVersionsCleanupPolicyArgs: def __init__(__self__, *, max_unused_versions_to_keep: pulumi.Input[int]): """ The policy used to clean up unused versions. When the policy is not specified explicitly, the default unused application versions to keep will be 3. :param pulumi.Input[int] max_unused_versions_to_keep: Number of unused versions per application type to keep. """ pulumi.set(__self__, "max_unused_versions_to_keep", max_unused_versions_to_keep) @property @pulumi.getter(name="maxUnusedVersionsToKeep") def max_unused_versions_to_keep(self) -> pulumi.Input[int]: """ Number of unused versions per application type to keep. """ return pulumi.get(self, "max_unused_versions_to_keep") @max_unused_versions_to_keep.setter def max_unused_versions_to_keep(self, value: pulumi.Input[int]): pulumi.set(self, "max_unused_versions_to_keep", value) @pulumi.input_type class ApplicationUpgradePolicyArgs: def __init__(__self__, *, application_health_policy: Optional[pulumi.Input['ApplicationHealthPolicyArgs']] = None, force_restart: Optional[pulumi.Input[bool]] = None, instance_close_delay_duration: Optional[pulumi.Input[float]] = None, recreate_application: Optional[pulumi.Input[bool]] = None, rolling_upgrade_monitoring_policy: Optional[pulumi.Input['RollingUpgradeMonitoringPolicyArgs']] = None, upgrade_mode: Optional[pulumi.Input[Union[str, 'RollingUpgradeMode']]] = None, upgrade_replica_set_check_timeout: Optional[pulumi.Input[float]] = None): """ Describes the policy for a monitored application upgrade. :param pulumi.Input['ApplicationHealthPolicyArgs'] application_health_policy: Defines a health policy used to evaluate the health of an application or one of its children entities. :param pulumi.Input[bool] force_restart: If true, then processes are forcefully restarted during upgrade even when the code version has not changed (the upgrade only changes configuration or data). :param pulumi.Input[float] instance_close_delay_duration: Duration in seconds, to wait before a stateless instance is closed, to allow the active requests to drain gracefully. This would be effective when the instance is closing during the application/cluster upgrade, only for those instances which have a non-zero delay duration configured in the service description. :param pulumi.Input[bool] recreate_application: Determines whether the application should be recreated on update. If value=true, the rest of the upgrade policy parameters are not allowed. :param pulumi.Input['RollingUpgradeMonitoringPolicyArgs'] rolling_upgrade_monitoring_policy: The policy used for monitoring the application upgrade :param pulumi.Input[Union[str, 'RollingUpgradeMode']] upgrade_mode: The mode used to monitor health during a rolling upgrade. The values are Monitored, and UnmonitoredAuto. :param pulumi.Input[float] upgrade_replica_set_check_timeout: The maximum amount of time to block processing of an upgrade domain and prevent loss of availability when there are unexpected issues. When this timeout expires, processing of the upgrade domain will proceed regardless of availability loss issues. The timeout is reset at the start of each upgrade domain. Valid values are between 0 and 42949672925 inclusive. (unsigned 32-bit integer). """ if application_health_policy is not None: pulumi.set(__self__, "application_health_policy", application_health_policy) if force_restart is not None: pulumi.set(__self__, "force_restart", force_restart) if instance_close_delay_duration is not None: pulumi.set(__self__, "instance_close_delay_duration", instance_close_delay_duration) if recreate_application is not None: pulumi.set(__self__, "recreate_application", recreate_application) if rolling_upgrade_monitoring_policy is not None: pulumi.set(__self__, "rolling_upgrade_monitoring_policy", rolling_upgrade_monitoring_policy) if upgrade_mode is not None: pulumi.set(__self__, "upgrade_mode", upgrade_mode) if upgrade_replica_set_check_timeout is not None: pulumi.set(__self__, "upgrade_replica_set_check_timeout", upgrade_replica_set_check_timeout) @property @pulumi.getter(name="applicationHealthPolicy") def application_health_policy(self) -> Optional[pulumi.Input['ApplicationHealthPolicyArgs']]: """ Defines a health policy used to evaluate the health of an application or one of its children entities. """ return pulumi.get(self, "application_health_policy") @application_health_policy.setter def application_health_policy(self, value: Optional[pulumi.Input['ApplicationHealthPolicyArgs']]): pulumi.set(self, "application_health_policy", value) @property @pulumi.getter(name="forceRestart") def force_restart(self) -> Optional[pulumi.Input[bool]]: """ If true, then processes are forcefully restarted during upgrade even when the code version has not changed (the upgrade only changes configuration or data). """ return pulumi.get(self, "force_restart") @force_restart.setter def force_restart(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "force_restart", value) @property @pulumi.getter(name="instanceCloseDelayDuration") def instance_close_delay_duration(self) -> Optional[pulumi.Input[float]]: """ Duration in seconds, to wait before a stateless instance is closed, to allow the active requests to drain gracefully. This would be effective when the instance is closing during the application/cluster upgrade, only for those instances which have a non-zero delay duration configured in the service description. """ return pulumi.get(self, "instance_close_delay_duration") @instance_close_delay_duration.setter def instance_close_delay_duration(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "instance_close_delay_duration", value) @property @pulumi.getter(name="recreateApplication") def recreate_application(self) -> Optional[pulumi.Input[bool]]: """ Determines whether the application should be recreated on update. If value=true, the rest of the upgrade policy parameters are not allowed. """ return pulumi.get(self, "recreate_application") @recreate_application.setter def recreate_application(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "recreate_application", value) @property @pulumi.getter(name="rollingUpgradeMonitoringPolicy") def rolling_upgrade_monitoring_policy(self) -> Optional[pulumi.Input['RollingUpgradeMonitoringPolicyArgs']]: """ The policy used for monitoring the application upgrade """ return pulumi.get(self, "rolling_upgrade_monitoring_policy") @rolling_upgrade_monitoring_policy.setter def rolling_upgrade_monitoring_policy(self, value: Optional[pulumi.Input['RollingUpgradeMonitoringPolicyArgs']]): pulumi.set(self, "rolling_upgrade_monitoring_policy", value) @property @pulumi.getter(name="upgradeMode") def upgrade_mode(self) -> Optional[pulumi.Input[Union[str, 'RollingUpgradeMode']]]: """ The mode used to monitor health during a rolling upgrade. The values are Monitored, and UnmonitoredAuto. """ return pulumi.get(self, "upgrade_mode") @upgrade_mode.setter def upgrade_mode(self, value: Optional[pulumi.Input[Union[str, 'RollingUpgradeMode']]]): pulumi.set(self, "upgrade_mode", value) @property @pulumi.getter(name="upgradeReplicaSetCheckTimeout") def upgrade_replica_set_check_timeout(self) -> Optional[pulumi.Input[float]]: """ The maximum amount of time to block processing of an upgrade domain and prevent loss of availability when there are unexpected issues. When this timeout expires, processing of the upgrade domain will proceed regardless of availability loss issues. The timeout is reset at the start of each upgrade domain. Valid values are between 0 and 42949672925 inclusive. (unsigned 32-bit integer). """ return pulumi.get(self, "upgrade_replica_set_check_timeout") @upgrade_replica_set_check_timeout.setter def upgrade_replica_set_check_timeout(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "upgrade_replica_set_check_timeout", value) @pulumi.input_type class ApplicationUserAssignedIdentityArgs: def __init__(__self__, *, name: pulumi.Input[str], principal_id: pulumi.Input[str]): """ :param pulumi.Input[str] name: The friendly name of user assigned identity. :param pulumi.Input[str] principal_id: The principal id of user assigned identity. """ pulumi.set(__self__, "name", name) pulumi.set(__self__, "principal_id", principal_id) @property @pulumi.getter def name(self) -> pulumi.Input[str]: """ The friendly name of user assigned identity. """ return pulumi.get(self, "name") @name.setter def name(self, value: pulumi.Input[str]): pulumi.set(self, "name", value) @property @pulumi.getter(name="principalId") def principal_id(self) -> pulumi.Input[str]: """ The principal id of user assigned identity. """ return pulumi.get(self, "principal_id") @principal_id.setter def principal_id(self, value: pulumi.Input[str]): pulumi.set(self, "principal_id", value) @pulumi.input_type class AveragePartitionLoadScalingTriggerArgs: def __init__(__self__, *, kind: pulumi.Input[str], lower_load_threshold: pulumi.Input[float], metric_name: pulumi.Input[str], scale_interval: pulumi.Input[str], upper_load_threshold: pulumi.Input[float]): """ Represents a scaling trigger related to an average load of a metric/resource of a partition. :param pulumi.Input[str] kind: Enumerates the ways that a service can be partitioned. Expected value is 'AveragePartitionLoadTrigger'. :param pulumi.Input[float] lower_load_threshold: The lower limit of the load below which a scale in operation should be performed. :param pulumi.Input[str] metric_name: The name of the metric for which usage should be tracked. :param pulumi.Input[str] scale_interval: The period in seconds on which a decision is made whether to scale or not. This property should come in ISO 8601 format "hh:mm:ss". :param pulumi.Input[float] upper_load_threshold: The upper limit of the load beyond which a scale out operation should be performed. """ pulumi.set(__self__, "kind", 'AveragePartitionLoadTrigger') pulumi.set(__self__, "lower_load_threshold", lower_load_threshold) pulumi.set(__self__, "metric_name", metric_name) pulumi.set(__self__, "scale_interval", scale_interval) pulumi.set(__self__, "upper_load_threshold", upper_load_threshold) @property @pulumi.getter def kind(self) -> pulumi.Input[str]: """ Enumerates the ways that a service can be partitioned. Expected value is 'AveragePartitionLoadTrigger'. """ return pulumi.get(self, "kind") @kind.setter def kind(self, value: pulumi.Input[str]): pulumi.set(self, "kind", value) @property @pulumi.getter(name="lowerLoadThreshold") def lower_load_threshold(self) -> pulumi.Input[float]: """ The lower limit of the load below which a scale in operation should be performed. """ return pulumi.get(self, "lower_load_threshold") @lower_load_threshold.setter def lower_load_threshold(self, value: pulumi.Input[float]): pulumi.set(self, "lower_load_threshold", value) @property @pulumi.getter(name="metricName") def metric_name(self) -> pulumi.Input[str]: """ The name of the metric for which usage should be tracked. """ return pulumi.get(self, "metric_name") @metric_name.setter def metric_name(self, value: pulumi.Input[str]): pulumi.set(self, "metric_name", value) @property @pulumi.getter(name="scaleInterval") def scale_interval(self) -> pulumi.Input[str]: """ The period in seconds on which a decision is made whether to scale or not. This property should come in ISO 8601 format "hh:mm:ss". """ return pulumi.get(self, "scale_interval") @scale_interval.setter def scale_interval(self, value: pulumi.Input[str]): pulumi.set(self, "scale_interval", value) @property @pulumi.getter(name="upperLoadThreshold") def upper_load_threshold(self) -> pulumi.Input[float]: """ The upper limit of the load beyond which a scale out operation should be performed. """ return pulumi.get(self, "upper_load_threshold") @upper_load_threshold.setter def upper_load_threshold(self, value: pulumi.Input[float]): pulumi.set(self, "upper_load_threshold", value) @pulumi.input_type class AverageServiceLoadScalingTriggerArgs: def __init__(__self__, *, kind: pulumi.Input[str], lower_load_threshold: pulumi.Input[float], metric_name: pulumi.Input[str], scale_interval: pulumi.Input[str], upper_load_threshold: pulumi.Input[float], use_only_primary_load: pulumi.Input[bool]): """ Represents a scaling policy related to an average load of a metric/resource of a service. :param pulumi.Input[str] kind: Enumerates the ways that a service can be partitioned. Expected value is 'AverageServiceLoadTrigger'. :param pulumi.Input[float] lower_load_threshold: The lower limit of the load below which a scale in operation should be performed. :param pulumi.Input[str] metric_name: The name of the metric for which usage should be tracked. :param pulumi.Input[str] scale_interval: The period in seconds on which a decision is made whether to scale or not. This property should come in ISO 8601 format "hh:mm:ss". :param pulumi.Input[float] upper_load_threshold: The upper limit of the load beyond which a scale out operation should be performed. :param pulumi.Input[bool] use_only_primary_load: Flag determines whether only the load of primary replica should be considered for scaling. If set to true, then trigger will only consider the load of primary replicas of stateful service. If set to false, trigger will consider load of all replicas. This parameter cannot be set to true for stateless service. """ pulumi.set(__self__, "kind", 'AverageServiceLoadTrigger') pulumi.set(__self__, "lower_load_threshold", lower_load_threshold) pulumi.set(__self__, "metric_name", metric_name) pulumi.set(__self__, "scale_interval", scale_interval) pulumi.set(__self__, "upper_load_threshold", upper_load_threshold) pulumi.set(__self__, "use_only_primary_load", use_only_primary_load) @property @pulumi.getter def kind(self) -> pulumi.Input[str]: """ Enumerates the ways that a service can be partitioned. Expected value is 'AverageServiceLoadTrigger'. """ return pulumi.get(self, "kind") @kind.setter def kind(self, value: pulumi.Input[str]): pulumi.set(self, "kind", value) @property @pulumi.getter(name="lowerLoadThreshold") def lower_load_threshold(self) -> pulumi.Input[float]: """ The lower limit of the load below which a scale in operation should be performed. """ return pulumi.get(self, "lower_load_threshold") @lower_load_threshold.setter def lower_load_threshold(self, value: pulumi.Input[float]): pulumi.set(self, "lower_load_threshold", value) @property @pulumi.getter(name="metricName") def metric_name(self) -> pulumi.Input[str]: """ The name of the metric for which usage should be tracked. """ return pulumi.get(self, "metric_name") @metric_name.setter def metric_name(self, value: pulumi.Input[str]): pulumi.set(self, "metric_name", value) @property @pulumi.getter(name="scaleInterval") def scale_interval(self) -> pulumi.Input[str]: """ The period in seconds on which a decision is made whether to scale or not. This property should come in ISO 8601 format "hh:mm:ss". """ return pulumi.get(self, "scale_interval") @scale_interval.setter def scale_interval(self, value: pulumi.Input[str]): pulumi.set(self, "scale_interval", value) @property @pulumi.getter(name="upperLoadThreshold") def upper_load_threshold(self) -> pulumi.Input[float]: """ The upper limit of the load beyond which a scale out operation should be performed. """ return pulumi.get(self, "upper_load_threshold") @upper_load_threshold.setter def upper_load_threshold(self, value: pulumi.Input[float]): pulumi.set(self, "upper_load_threshold", value) @property @pulumi.getter(name="useOnlyPrimaryLoad") def use_only_primary_load(self) -> pulumi.Input[bool]: """ Flag determines whether only the load of primary replica should be considered for scaling. If set to true, then trigger will only consider the load of primary replicas of stateful service. If set to false, trigger will consider load of all replicas. This parameter cannot be set to true for stateless service. """ return pulumi.get(self, "use_only_primary_load") @use_only_primary_load.setter def use_only_primary_load(self, value: pulumi.Input[bool]): pulumi.set(self, "use_only_primary_load", value) @pulumi.input_type class AzureActiveDirectoryArgs: def __init__(__self__, *, client_application: Optional[pulumi.Input[str]] = None, cluster_application: Optional[pulumi.Input[str]] = None, tenant_id: Optional[pulumi.Input[str]] = None): """ The settings to enable AAD authentication on the cluster. :param pulumi.Input[str] client_application: Azure active directory client application id. :param pulumi.Input[str] cluster_application: Azure active directory cluster application id. :param pulumi.Input[str] tenant_id: Azure active directory tenant id. """ if client_application is not None: pulumi.set(__self__, "client_application", client_application) if cluster_application is not None: pulumi.set(__self__, "cluster_application", cluster_application) if tenant_id is not None: pulumi.set(__self__, "tenant_id", tenant_id) @property @pulumi.getter(name="clientApplication") def client_application(self) -> Optional[pulumi.Input[str]]: """ Azure active directory client application id. """ return pulumi.get(self, "client_application") @client_application.setter def client_application(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "client_application", value) @property @pulumi.getter(name="clusterApplication") def cluster_application(self) -> Optional[pulumi.Input[str]]: """ Azure active directory cluster application id. """ return pulumi.get(self, "cluster_application") @cluster_application.setter def cluster_application(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "cluster_application", value) @property @pulumi.getter(name="tenantId") def tenant_id(self) -> Optional[pulumi.Input[str]]: """ Azure active directory tenant id. """ return pulumi.get(self, "tenant_id") @tenant_id.setter def tenant_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "tenant_id", value) @pulumi.input_type class ClientCertificateArgs: def __init__(__self__, *, is_admin: pulumi.Input[bool], common_name: Optional[pulumi.Input[str]] = None, issuer_thumbprint: Optional[pulumi.Input[str]] = None, thumbprint: Optional[pulumi.Input[str]] = None): """ Client certificate definition. :param pulumi.Input[bool] is_admin: Indicates if the client certificate has admin access to the cluster. Non admin clients can perform only read only operations on the cluster. :param pulumi.Input[str] common_name: Certificate common name. :param pulumi.Input[str] issuer_thumbprint: Issuer thumbprint for the certificate. Only used together with CommonName. :param pulumi.Input[str] thumbprint: Certificate thumbprint. """ pulumi.set(__self__, "is_admin", is_admin) if common_name is not None: pulumi.set(__self__, "common_name", common_name) if issuer_thumbprint is not None: pulumi.set(__self__, "issuer_thumbprint", issuer_thumbprint) if thumbprint is not None: pulumi.set(__self__, "thumbprint", thumbprint) @property @pulumi.getter(name="isAdmin") def is_admin(self) -> pulumi.Input[bool]: """ Indicates if the client certificate has admin access to the cluster. Non admin clients can perform only read only operations on the cluster. """ return pulumi.get(self, "is_admin") @is_admin.setter def is_admin(self, value: pulumi.Input[bool]): pulumi.set(self, "is_admin", value) @property @pulumi.getter(name="commonName") def common_name(self) -> Optional[pulumi.Input[str]]: """ Certificate common name. """ return pulumi.get(self, "common_name") @common_name.setter def common_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "common_name", value) @property @pulumi.getter(name="issuerThumbprint") def issuer_thumbprint(self) -> Optional[pulumi.Input[str]]: """ Issuer thumbprint for the certificate. Only used together with CommonName. """ return pulumi.get(self, "issuer_thumbprint") @issuer_thumbprint.setter def issuer_thumbprint(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "issuer_thumbprint", value) @property @pulumi.getter def thumbprint(self) -> Optional[pulumi.Input[str]]: """ Certificate thumbprint. """ return pulumi.get(self, "thumbprint") @thumbprint.setter def thumbprint(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "thumbprint", value) @pulumi.input_type class EndpointRangeDescriptionArgs: def __init__(__self__, *, end_port: pulumi.Input[int], start_port: pulumi.Input[int]): """ Port range details :param pulumi.Input[int] end_port: End port of a range of ports :param pulumi.Input[int] start_port: Starting port of a range of ports """ pulumi.set(__self__, "end_port", end_port) pulumi.set(__self__, "start_port", start_port) @property @pulumi.getter(name="endPort") def end_port(self) -> pulumi.Input[int]: """ End port of a range of ports """ return pulumi.get(self, "end_port") @end_port.setter def end_port(self, value: pulumi.Input[int]): pulumi.set(self, "end_port", value) @property @pulumi.getter(name="startPort") def start_port(self) -> pulumi.Input[int]: """ Starting port of a range of ports """ return pulumi.get(self, "start_port") @start_port.setter def start_port(self, value: pulumi.Input[int]): pulumi.set(self, "start_port", value) @pulumi.input_type class FrontendConfigurationArgs: def __init__(__self__, *, ip_address_type: Optional[pulumi.Input[Union[str, 'IPAddressType']]] = None, load_balancer_backend_address_pool_id: Optional[pulumi.Input[str]] = None, load_balancer_inbound_nat_pool_id: Optional[pulumi.Input[str]] = None): """ Describes the frontend configurations for the node type. :param pulumi.Input[Union[str, 'IPAddressType']] ip_address_type: The IP address type of this frontend configuration. If omitted the default value is IPv4. :param pulumi.Input[str] load_balancer_backend_address_pool_id: The resource Id of the Load Balancer backend address pool that the VM instances of the node type are associated with. The format of the resource Id is '/subscriptions/<subscriptionId>/resourceGroups/<resourceGroupName>/providers/Microsoft.Network/loadBalancers/<loadBalancerName>/backendAddressPools/<backendAddressPoolName>'. :param pulumi.Input[str] load_balancer_inbound_nat_pool_id: The resource Id of the Load Balancer inbound NAT pool that the VM instances of the node type are associated with. The format of the resource Id is '/subscriptions/<subscriptionId>/resourceGroups/<resourceGroupName>/providers/Microsoft.Network/loadBalancers/<loadBalancerName>/inboundNatPools/<inboundNatPoolName>'. """ if ip_address_type is not None: pulumi.set(__self__, "ip_address_type", ip_address_type) if load_balancer_backend_address_pool_id is not None: pulumi.set(__self__, "load_balancer_backend_address_pool_id", load_balancer_backend_address_pool_id) if load_balancer_inbound_nat_pool_id is not None: pulumi.set(__self__, "load_balancer_inbound_nat_pool_id", load_balancer_inbound_nat_pool_id) @property @pulumi.getter(name="ipAddressType") def ip_address_type(self) -> Optional[pulumi.Input[Union[str, 'IPAddressType']]]: """ The IP address type of this frontend configuration. If omitted the default value is IPv4. """ return pulumi.get(self, "ip_address_type") @ip_address_type.setter def ip_address_type(self, value: Optional[pulumi.Input[Union[str, 'IPAddressType']]]): pulumi.set(self, "ip_address_type", value) @property @pulumi.getter(name="loadBalancerBackendAddressPoolId") def load_balancer_backend_address_pool_id(self) -> Optional[pulumi.Input[str]]: """ The resource Id of the Load Balancer backend address pool that the VM instances of the node type are associated with. The format of the resource Id is '/subscriptions/<subscriptionId>/resourceGroups/<resourceGroupName>/providers/Microsoft.Network/loadBalancers/<loadBalancerName>/backendAddressPools/<backendAddressPoolName>'. """ return pulumi.get(self, "load_balancer_backend_address_pool_id") @load_balancer_backend_address_pool_id.setter def load_balancer_backend_address_pool_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "load_balancer_backend_address_pool_id", value) @property @pulumi.getter(name="loadBalancerInboundNatPoolId") def load_balancer_inbound_nat_pool_id(self) -> Optional[pulumi.Input[str]]: """ The resource Id of the Load Balancer inbound NAT pool that the VM instances of the node type are associated with. The format of the resource Id is '/subscriptions/<subscriptionId>/resourceGroups/<resourceGroupName>/providers/Microsoft.Network/loadBalancers/<loadBalancerName>/inboundNatPools/<inboundNatPoolName>'. """ return pulumi.get(self, "load_balancer_inbound_nat_pool_id") @load_balancer_inbound_nat_pool_id.setter def load_balancer_inbound_nat_pool_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "load_balancer_inbound_nat_pool_id", value) @pulumi.input_type class IPTagArgs: def __init__(__self__, *, ip_tag_type: pulumi.Input[str], tag: pulumi.Input[str]): """ IPTag associated with the object. :param pulumi.Input[str] ip_tag_type: The IP tag type. :param pulumi.Input[str] tag: The value of the IP tag. """ pulumi.set(__self__, "ip_tag_type", ip_tag_type) pulumi.set(__self__, "tag", tag) @property @pulumi.getter(name="ipTagType") def ip_tag_type(self) -> pulumi.Input[str]: """ The IP tag type. """ return pulumi.get(self, "ip_tag_type") @ip_tag_type.setter def ip_tag_type(self, value: pulumi.Input[str]): pulumi.set(self, "ip_tag_type", value) @property @pulumi.getter def tag(self) -> pulumi.Input[str]: """ The value of the IP tag. """ return pulumi.get(self, "tag") @tag.setter def tag(self, value: pulumi.Input[str]): pulumi.set(self, "tag", value) @pulumi.input_type class LoadBalancingRuleArgs: def __init__(__self__, *, backend_port: pulumi.Input[int], frontend_port: pulumi.Input[int], probe_protocol: pulumi.Input[Union[str, 'ProbeProtocol']], protocol: pulumi.Input[Union[str, 'Protocol']], probe_port: Optional[pulumi.Input[int]] = None, probe_request_path: Optional[pulumi.Input[str]] = None): """ Describes a load balancing rule. :param pulumi.Input[int] backend_port: The port used for internal connections on the endpoint. Acceptable values are between 1 and 65535. :param pulumi.Input[int] frontend_port: The port for the external endpoint. Port numbers for each rule must be unique within the Load Balancer. Acceptable values are between 1 and 65534. :param pulumi.Input[Union[str, 'ProbeProtocol']] probe_protocol: the reference to the load balancer probe used by the load balancing rule. :param pulumi.Input[Union[str, 'Protocol']] protocol: The reference to the transport protocol used by the load balancing rule. :param pulumi.Input[int] probe_port: The prob port used by the load balancing rule. Acceptable values are between 1 and 65535. :param pulumi.Input[str] probe_request_path: The probe request path. Only supported for HTTP/HTTPS probes. """ pulumi.set(__self__, "backend_port", backend_port) pulumi.set(__self__, "frontend_port", frontend_port) pulumi.set(__self__, "probe_protocol", probe_protocol) pulumi.set(__self__, "protocol", protocol) if probe_port is not None: pulumi.set(__self__, "probe_port", probe_port) if probe_request_path is not None: pulumi.set(__self__, "probe_request_path", probe_request_path) @property @pulumi.getter(name="backendPort") def backend_port(self) -> pulumi.Input[int]: """ The port used for internal connections on the endpoint. Acceptable values are between 1 and 65535. """ return pulumi.get(self, "backend_port") @backend_port.setter def backend_port(self, value: pulumi.Input[int]): pulumi.set(self, "backend_port", value) @property @pulumi.getter(name="frontendPort") def frontend_port(self) -> pulumi.Input[int]: """ The port for the external endpoint. Port numbers for each rule must be unique within the Load Balancer. Acceptable values are between 1 and 65534. """ return pulumi.get(self, "frontend_port") @frontend_port.setter def frontend_port(self, value: pulumi.Input[int]): pulumi.set(self, "frontend_port", value) @property @pulumi.getter(name="probeProtocol") def probe_protocol(self) -> pulumi.Input[Union[str, 'ProbeProtocol']]: """ the reference to the load balancer probe used by the load balancing rule. """ return pulumi.get(self, "probe_protocol") @probe_protocol.setter def probe_protocol(self, value: pulumi.Input[Union[str, 'ProbeProtocol']]): pulumi.set(self, "probe_protocol", value) @property @pulumi.getter def protocol(self) -> pulumi.Input[Union[str, 'Protocol']]: """ The reference to the transport protocol used by the load balancing rule. """ return pulumi.get(self, "protocol") @protocol.setter def protocol(self, value: pulumi.Input[Union[str, 'Protocol']]): pulumi.set(self, "protocol", value) @property @pulumi.getter(name="probePort") def probe_port(self) -> Optional[pulumi.Input[int]]: """ The prob port used by the load balancing rule. Acceptable values are between 1 and 65535. """ return pulumi.get(self, "probe_port") @probe_port.setter def probe_port(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "probe_port", value) @property @pulumi.getter(name="probeRequestPath") def probe_request_path(self) -> Optional[pulumi.Input[str]]: """ The probe request path. Only supported for HTTP/HTTPS probes. """ return pulumi.get(self, "probe_request_path") @probe_request_path.setter def probe_request_path(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "probe_request_path", value) @pulumi.input_type class ManagedIdentityArgs: def __init__(__self__, *, type: Optional[pulumi.Input['ManagedIdentityType']] = None, user_assigned_identities: Optional[pulumi.Input[Mapping[str, Any]]] = None): """ Describes the managed identities for an Azure resource. :param pulumi.Input['ManagedIdentityType'] type: The type of managed identity for the resource. :param pulumi.Input[Mapping[str, Any]] user_assigned_identities: The list of user identities associated with the resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. """ if type is not None: pulumi.set(__self__, "type", type) if user_assigned_identities is not None: pulumi.set(__self__, "user_assigned_identities", user_assigned_identities) @property @pulumi.getter def type(self) -> Optional[pulumi.Input['ManagedIdentityType']]: """ The type of managed identity for the resource. """ return pulumi.get(self, "type") @type.setter def type(self, value: Optional[pulumi.Input['ManagedIdentityType']]): pulumi.set(self, "type", value) @property @pulumi.getter(name="userAssignedIdentities") def user_assigned_identities(self) -> Optional[pulumi.Input[Mapping[str, Any]]]: """ The list of user identities associated with the resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. """ return pulumi.get(self, "user_assigned_identities") @user_assigned_identities.setter def user_assigned_identities(self, value: Optional[pulumi.Input[Mapping[str, Any]]]): pulumi.set(self, "user_assigned_identities", value) @pulumi.input_type class NamedPartitionSchemeArgs: def __init__(__self__, *, names: pulumi.Input[Sequence[pulumi.Input[str]]], partition_scheme: pulumi.Input[str]): """ Describes the named partition scheme of the service. :param pulumi.Input[Sequence[pulumi.Input[str]]] names: Array for the names of the partitions. :param pulumi.Input[str] partition_scheme: Enumerates the ways that a service can be partitioned. Expected value is 'Named'. """ pulumi.set(__self__, "names", names) pulumi.set(__self__, "partition_scheme", 'Named') @property @pulumi.getter def names(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]: """ Array for the names of the partitions. """ return pulumi.get(self, "names") @names.setter def names(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]): pulumi.set(self, "names", value) @property @pulumi.getter(name="partitionScheme") def partition_scheme(self) -> pulumi.Input[str]: """ Enumerates the ways that a service can be partitioned. Expected value is 'Named'. """ return pulumi.get(self, "partition_scheme") @partition_scheme.setter def partition_scheme(self, value: pulumi.Input[str]): pulumi.set(self, "partition_scheme", value) @pulumi.input_type class NetworkSecurityRuleArgs: def __init__(__self__, *, access: pulumi.Input[Union[str, 'Access']], direction: pulumi.Input[Union[str, 'Direction']], name: pulumi.Input[str], priority: pulumi.Input[int], protocol: pulumi.Input[Union[str, 'NsgProtocol']], description: Optional[pulumi.Input[str]] = None, destination_address_prefixes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, destination_port_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, source_address_prefixes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, source_port_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None): """ Describes a network security rule. :param pulumi.Input[Union[str, 'Access']] access: The network traffic is allowed or denied. :param pulumi.Input[Union[str, 'Direction']] direction: Network security rule direction. :param pulumi.Input[str] name: Network security rule name. :param pulumi.Input[int] priority: The priority of the rule. The value can be in the range 1000 to 3000. Values outside this range are reserved for Service Fabric ManagerCluster Resource Provider. The priority number must be unique for each rule in the collection. The lower the priority number, the higher the priority of the rule. :param pulumi.Input[Union[str, 'NsgProtocol']] protocol: Network protocol this rule applies to. :param pulumi.Input[str] description: Network security rule description. :param pulumi.Input[Sequence[pulumi.Input[str]]] destination_address_prefixes: The destination address prefixes. CIDR or destination IP ranges. :param pulumi.Input[Sequence[pulumi.Input[str]]] destination_port_ranges: The destination port ranges. :param pulumi.Input[Sequence[pulumi.Input[str]]] source_address_prefixes: The CIDR or source IP ranges. :param pulumi.Input[Sequence[pulumi.Input[str]]] source_port_ranges: The source port ranges. """ pulumi.set(__self__, "access", access) pulumi.set(__self__, "direction", direction) pulumi.set(__self__, "name", name) pulumi.set(__self__, "priority", priority) pulumi.set(__self__, "protocol", protocol) if description is not None: pulumi.set(__self__, "description", description) if destination_address_prefixes is not None: pulumi.set(__self__, "destination_address_prefixes", destination_address_prefixes) if destination_port_ranges is not None: pulumi.set(__self__, "destination_port_ranges", destination_port_ranges) if source_address_prefixes is not None: pulumi.set(__self__, "source_address_prefixes", source_address_prefixes) if source_port_ranges is not None: pulumi.set(__self__, "source_port_ranges", source_port_ranges) @property @pulumi.getter def access(self) -> pulumi.Input[Union[str, 'Access']]: """ The network traffic is allowed or denied. """ return pulumi.get(self, "access") @access.setter def access(self, value: pulumi.Input[Union[str, 'Access']]): pulumi.set(self, "access", value) @property @pulumi.getter def direction(self) -> pulumi.Input[Union[str, 'Direction']]: """ Network security rule direction. """ return pulumi.get(self, "direction") @direction.setter def direction(self, value: pulumi.Input[Union[str, 'Direction']]): pulumi.set(self, "direction", value) @property @pulumi.getter def name(self) -> pulumi.Input[str]: """ Network security rule name. """ return pulumi.get(self, "name") @name.setter def name(self, value: pulumi.Input[str]): pulumi.set(self, "name", value) @property @pulumi.getter def priority(self) -> pulumi.Input[int]: """ The priority of the rule. The value can be in the range 1000 to 3000. Values outside this range are reserved for Service Fabric ManagerCluster Resource Provider. The priority number must be unique for each rule in the collection. The lower the priority number, the higher the priority of the rule. """ return pulumi.get(self, "priority") @priority.setter def priority(self, value: pulumi.Input[int]): pulumi.set(self, "priority", value) @property @pulumi.getter def protocol(self) -> pulumi.Input[Union[str, 'NsgProtocol']]: """ Network protocol this rule applies to. """ return pulumi.get(self, "protocol") @protocol.setter def protocol(self, value: pulumi.Input[Union[str, 'NsgProtocol']]): pulumi.set(self, "protocol", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: """ Network security rule description. """ return pulumi.get(self, "description") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "description", value) @property @pulumi.getter(name="destinationAddressPrefixes") def destination_address_prefixes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ The destination address prefixes. CIDR or destination IP ranges. """ return pulumi.get(self, "destination_address_prefixes") @destination_address_prefixes.setter def destination_address_prefixes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "destination_address_prefixes", value) @property @pulumi.getter(name="destinationPortRanges") def destination_port_ranges(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ The destination port ranges. """ return pulumi.get(self, "destination_port_ranges") @destination_port_ranges.setter def destination_port_ranges(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "destination_port_ranges", value) @property @pulumi.getter(name="sourceAddressPrefixes") def source_address_prefixes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ The CIDR or source IP ranges. """ return pulumi.get(self, "source_address_prefixes") @source_address_prefixes.setter def source_address_prefixes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "source_address_prefixes", value) @property @pulumi.getter(name="sourcePortRanges") def source_port_ranges(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ The source port ranges. """ return pulumi.get(self, "source_port_ranges") @source_port_ranges.setter def source_port_ranges(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "source_port_ranges", value) @pulumi.input_type class NodeTypeSkuArgs: def __init__(__self__, *, capacity: pulumi.Input[int], name: Optional[pulumi.Input[str]] = None, tier: Optional[pulumi.Input[str]] = None): """ Describes a node type sku. :param pulumi.Input[int] capacity: The number of nodes in the node type.<br /><br />If present in request it will override properties.vmInstanceCount. :param pulumi.Input[str] name: The sku name. <br /><br />Name is internally generated and is used in auto-scale scenarios.<br /> Property does not allow to be changed to other values than generated.<br /> To avoid deployment errors please omit the property. :param pulumi.Input[str] tier: Specifies the tier of the node type. <br /><br /> Possible Values:<br /> **Standard** """ pulumi.set(__self__, "capacity", capacity) if name is not None: pulumi.set(__self__, "name", name) if tier is not None: pulumi.set(__self__, "tier", tier) @property @pulumi.getter def capacity(self) -> pulumi.Input[int]: """ The number of nodes in the node type.<br /><br />If present in request it will override properties.vmInstanceCount. """ return pulumi.get(self, "capacity") @capacity.setter def capacity(self, value: pulumi.Input[int]): pulumi.set(self, "capacity", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ The sku name. <br /><br />Name is internally generated and is used in auto-scale scenarios.<br /> Property does not allow to be changed to other values than generated.<br /> To avoid deployment errors please omit the property. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter def tier(self) -> Optional[pulumi.Input[str]]: """ Specifies the tier of the node type. <br /><br /> Possible Values:<br /> **Standard** """ return pulumi.get(self, "tier") @tier.setter def tier(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "tier", value) @pulumi.input_type class PartitionInstanceCountScaleMechanismArgs: def __init__(__self__, *, kind: pulumi.Input[str], max_instance_count: pulumi.Input[int], min_instance_count: pulumi.Input[int], scale_increment: pulumi.Input[int]): """ Represents a scaling mechanism for adding or removing instances of stateless service partition. :param pulumi.Input[str] kind: Enumerates the ways that a service can be partitioned. Expected value is 'ScalePartitionInstanceCount'. :param pulumi.Input[int] max_instance_count: Maximum number of instances of the partition. :param pulumi.Input[int] min_instance_count: Minimum number of instances of the partition. :param pulumi.Input[int] scale_increment: The number of instances to add or remove during a scaling operation. """ pulumi.set(__self__, "kind", 'ScalePartitionInstanceCount') pulumi.set(__self__, "max_instance_count", max_instance_count) pulumi.set(__self__, "min_instance_count", min_instance_count) pulumi.set(__self__, "scale_increment", scale_increment) @property @pulumi.getter def kind(self) -> pulumi.Input[str]: """ Enumerates the ways that a service can be partitioned. Expected value is 'ScalePartitionInstanceCount'. """ return pulumi.get(self, "kind") @kind.setter def kind(self, value: pulumi.Input[str]): pulumi.set(self, "kind", value) @property @pulumi.getter(name="maxInstanceCount") def max_instance_count(self) -> pulumi.Input[int]: """ Maximum number of instances of the partition. """ return pulumi.get(self, "max_instance_count") @max_instance_count.setter def max_instance_count(self, value: pulumi.Input[int]): pulumi.set(self, "max_instance_count", value) @property @pulumi.getter(name="minInstanceCount") def min_instance_count(self) -> pulumi.Input[int]: """ Minimum number of instances of the partition. """ return pulumi.get(self, "min_instance_count") @min_instance_count.setter def min_instance_count(self, value: pulumi.Input[int]): pulumi.set(self, "min_instance_count", value) @property @pulumi.getter(name="scaleIncrement") def scale_increment(self) -> pulumi.Input[int]: """ The number of instances to add or remove during a scaling operation. """ return pulumi.get(self, "scale_increment") @scale_increment.setter def scale_increment(self, value: pulumi.Input[int]): pulumi.set(self, "scale_increment", value) @pulumi.input_type class RollingUpgradeMonitoringPolicyArgs: def __init__(__self__, *, failure_action: pulumi.Input[Union[str, 'FailureAction']], health_check_retry_timeout: pulumi.Input[str], health_check_stable_duration: pulumi.Input[str], health_check_wait_duration: pulumi.Input[str], upgrade_domain_timeout: pulumi.Input[str], upgrade_timeout: pulumi.Input[str]): """ The policy used for monitoring the application upgrade :param pulumi.Input[Union[str, 'FailureAction']] failure_action: The compensating action to perform when a Monitored upgrade encounters monitoring policy or health policy violations. Invalid indicates the failure action is invalid. Rollback specifies that the upgrade will start rolling back automatically. Manual indicates that the upgrade will switch to UnmonitoredManual upgrade mode. :param pulumi.Input[str] health_check_retry_timeout: The amount of time to retry health evaluation when the application or cluster is unhealthy before FailureAction is executed. It is interpreted as a string representing an ISO 8601 duration with following format "hh:mm:ss.fff". :param pulumi.Input[str] health_check_stable_duration: The amount of time that the application or cluster must remain healthy before the upgrade proceeds to the next upgrade domain. It is interpreted as a string representing an ISO 8601 duration with following format "hh:mm:ss.fff". :param pulumi.Input[str] health_check_wait_duration: The amount of time to wait after completing an upgrade domain before applying health policies. It is interpreted as a string representing an ISO 8601 duration with following format "hh:mm:ss.fff". :param pulumi.Input[str] upgrade_domain_timeout: The amount of time each upgrade domain has to complete before FailureAction is executed. Cannot be larger than 12 hours. It is interpreted as a string representing an ISO 8601 duration with following format "hh:mm:ss.fff". :param pulumi.Input[str] upgrade_timeout: The amount of time the overall upgrade has to complete before FailureAction is executed. Cannot be larger than 12 hours. It is interpreted as a string representing an ISO 8601 duration with following format "hh:mm:ss.fff". """ pulumi.set(__self__, "failure_action", failure_action) pulumi.set(__self__, "health_check_retry_timeout", health_check_retry_timeout) pulumi.set(__self__, "health_check_stable_duration", health_check_stable_duration) pulumi.set(__self__, "health_check_wait_duration", health_check_wait_duration) pulumi.set(__self__, "upgrade_domain_timeout", upgrade_domain_timeout) pulumi.set(__self__, "upgrade_timeout", upgrade_timeout) @property @pulumi.getter(name="failureAction") def failure_action(self) -> pulumi.Input[Union[str, 'FailureAction']]: """ The compensating action to perform when a Monitored upgrade encounters monitoring policy or health policy violations. Invalid indicates the failure action is invalid. Rollback specifies that the upgrade will start rolling back automatically. Manual indicates that the upgrade will switch to UnmonitoredManual upgrade mode. """ return pulumi.get(self, "failure_action") @failure_action.setter def failure_action(self, value: pulumi.Input[Union[str, 'FailureAction']]): pulumi.set(self, "failure_action", value) @property @pulumi.getter(name="healthCheckRetryTimeout") def health_check_retry_timeout(self) -> pulumi.Input[str]: """ The amount of time to retry health evaluation when the application or cluster is unhealthy before FailureAction is executed. It is interpreted as a string representing an ISO 8601 duration with following format "hh:mm:ss.fff". """ return pulumi.get(self, "health_check_retry_timeout") @health_check_retry_timeout.setter def health_check_retry_timeout(self, value: pulumi.Input[str]): pulumi.set(self, "health_check_retry_timeout", value) @property @pulumi.getter(name="healthCheckStableDuration") def health_check_stable_duration(self) -> pulumi.Input[str]: """ The amount of time that the application or cluster must remain healthy before the upgrade proceeds to the next upgrade domain. It is interpreted as a string representing an ISO 8601 duration with following format "hh:mm:ss.fff". """ return pulumi.get(self, "health_check_stable_duration") @health_check_stable_duration.setter def health_check_stable_duration(self, value: pulumi.Input[str]): pulumi.set(self, "health_check_stable_duration", value) @property @pulumi.getter(name="healthCheckWaitDuration") def health_check_wait_duration(self) -> pulumi.Input[str]: """ The amount of time to wait after completing an upgrade domain before applying health policies. It is interpreted as a string representing an ISO 8601 duration with following format "hh:mm:ss.fff". """ return pulumi.get(self, "health_check_wait_duration") @health_check_wait_duration.setter def health_check_wait_duration(self, value: pulumi.Input[str]): pulumi.set(self, "health_check_wait_duration", value) @property @pulumi.getter(name="upgradeDomainTimeout") def upgrade_domain_timeout(self) -> pulumi.Input[str]: """ The amount of time each upgrade domain has to complete before FailureAction is executed. Cannot be larger than 12 hours. It is interpreted as a string representing an ISO 8601 duration with following format "hh:mm:ss.fff". """ return pulumi.get(self, "upgrade_domain_timeout") @upgrade_domain_timeout.setter def upgrade_domain_timeout(self, value: pulumi.Input[str]): pulumi.set(self, "upgrade_domain_timeout", value) @property @pulumi.getter(name="upgradeTimeout") def upgrade_timeout(self) -> pulumi.Input[str]: """ The amount of time the overall upgrade has to complete before FailureAction is executed. Cannot be larger than 12 hours. It is interpreted as a string representing an ISO 8601 duration with following format "hh:mm:ss.fff". """ return pulumi.get(self, "upgrade_timeout") @upgrade_timeout.setter def upgrade_timeout(self, value: pulumi.Input[str]): pulumi.set(self, "upgrade_timeout", value) @pulumi.input_type class ScalingPolicyArgs: def __init__(__self__, *, scaling_mechanism: pulumi.Input[Union['AddRemoveIncrementalNamedPartitionScalingMechanismArgs', 'PartitionInstanceCountScaleMechanismArgs']], scaling_trigger: pulumi.Input[Union['AveragePartitionLoadScalingTriggerArgs', 'AverageServiceLoadScalingTriggerArgs']]): """ Specifies a metric to load balance a service during runtime. :param pulumi.Input[Union['AddRemoveIncrementalNamedPartitionScalingMechanismArgs', 'PartitionInstanceCountScaleMechanismArgs']] scaling_mechanism: Specifies the mechanism associated with this scaling policy :param pulumi.Input[Union['AveragePartitionLoadScalingTriggerArgs', 'AverageServiceLoadScalingTriggerArgs']] scaling_trigger: Specifies the trigger associated with this scaling policy. """ pulumi.set(__self__, "scaling_mechanism", scaling_mechanism) pulumi.set(__self__, "scaling_trigger", scaling_trigger) @property @pulumi.getter(name="scalingMechanism") def scaling_mechanism(self) -> pulumi.Input[Union['AddRemoveIncrementalNamedPartitionScalingMechanismArgs', 'PartitionInstanceCountScaleMechanismArgs']]: """ Specifies the mechanism associated with this scaling policy """ return pulumi.get(self, "scaling_mechanism") @scaling_mechanism.setter def scaling_mechanism(self, value: pulumi.Input[Union['AddRemoveIncrementalNamedPartitionScalingMechanismArgs', 'PartitionInstanceCountScaleMechanismArgs']]): pulumi.set(self, "scaling_mechanism", value) @property @pulumi.getter(name="scalingTrigger") def scaling_trigger(self) -> pulumi.Input[Union['AveragePartitionLoadScalingTriggerArgs', 'AverageServiceLoadScalingTriggerArgs']]: """ Specifies the trigger associated with this scaling policy. """ return pulumi.get(self, "scaling_trigger") @scaling_trigger.setter def scaling_trigger(self, value: pulumi.Input[Union['AveragePartitionLoadScalingTriggerArgs', 'AverageServiceLoadScalingTriggerArgs']]): pulumi.set(self, "scaling_trigger", value) @pulumi.input_type class ServiceCorrelationArgs: def __init__(__self__, *, scheme: pulumi.Input[Union[str, 'ServiceCorrelationScheme']], service_name: pulumi.Input[str]): """ Creates a particular correlation between services. :param pulumi.Input[Union[str, 'ServiceCorrelationScheme']] scheme: The ServiceCorrelationScheme which describes the relationship between this service and the service specified via ServiceName. :param pulumi.Input[str] service_name: The Arm Resource ID of the service that the correlation relationship is established with. """ pulumi.set(__self__, "scheme", scheme) pulumi.set(__self__, "service_name", service_name) @property @pulumi.getter def scheme(self) -> pulumi.Input[Union[str, 'ServiceCorrelationScheme']]: """ The ServiceCorrelationScheme which describes the relationship between this service and the service specified via ServiceName. """ return pulumi.get(self, "scheme") @scheme.setter def scheme(self, value: pulumi.Input[Union[str, 'ServiceCorrelationScheme']]): pulumi.set(self, "scheme", value) @property @pulumi.getter(name="serviceName") def service_name(self) -> pulumi.Input[str]: """ The Arm Resource ID of the service that the correlation relationship is established with. """ return pulumi.get(self, "service_name") @service_name.setter def service_name(self, value: pulumi.Input[str]): pulumi.set(self, "service_name", value) @pulumi.input_type class ServiceLoadMetricArgs: def __init__(__self__, *, name: pulumi.Input[str], default_load: Optional[pulumi.Input[int]] = None, primary_default_load: Optional[pulumi.Input[int]] = None, secondary_default_load: Optional[pulumi.Input[int]] = None, weight: Optional[pulumi.Input[Union[str, 'ServiceLoadMetricWeight']]] = None): """ Specifies a metric to load balance a service during runtime. :param pulumi.Input[str] name: The name of the metric. If the service chooses to report load during runtime, the load metric name should match the name that is specified in Name exactly. Note that metric names are case sensitive. :param pulumi.Input[int] default_load: Used only for Stateless services. The default amount of load, as a number, that this service creates for this metric. :param pulumi.Input[int] primary_default_load: Used only for Stateful services. The default amount of load, as a number, that this service creates for this metric when it is a Primary replica. :param pulumi.Input[int] secondary_default_load: Used only for Stateful services. The default amount of load, as a number, that this service creates for this metric when it is a Secondary replica. :param pulumi.Input[Union[str, 'ServiceLoadMetricWeight']] weight: The service load metric relative weight, compared to other metrics configured for this service, as a number. """ pulumi.set(__self__, "name", name) if default_load is not None: pulumi.set(__self__, "default_load", default_load) if primary_default_load is not None: pulumi.set(__self__, "primary_default_load", primary_default_load) if secondary_default_load is not None: pulumi.set(__self__, "secondary_default_load", secondary_default_load) if weight is not None: pulumi.set(__self__, "weight", weight) @property @pulumi.getter def name(self) -> pulumi.Input[str]: """ The name of the metric. If the service chooses to report load during runtime, the load metric name should match the name that is specified in Name exactly. Note that metric names are case sensitive. """ return pulumi.get(self, "name") @name.setter def name(self, value: pulumi.Input[str]): pulumi.set(self, "name", value) @property @pulumi.getter(name="defaultLoad") def default_load(self) -> Optional[pulumi.Input[int]]: """ Used only for Stateless services. The default amount of load, as a number, that this service creates for this metric. """ return pulumi.get(self, "default_load") @default_load.setter def default_load(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "default_load", value) @property @pulumi.getter(name="primaryDefaultLoad") def primary_default_load(self) -> Optional[pulumi.Input[int]]: """ Used only for Stateful services. The default amount of load, as a number, that this service creates for this metric when it is a Primary replica. """ return pulumi.get(self, "primary_default_load") @primary_default_load.setter def primary_default_load(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "primary_default_load", value) @property @pulumi.getter(name="secondaryDefaultLoad") def secondary_default_load(self) -> Optional[pulumi.Input[int]]: """ Used only for Stateful services. The default amount of load, as a number, that this service creates for this metric when it is a Secondary replica. """ return pulumi.get(self, "secondary_default_load") @secondary_default_load.setter def secondary_default_load(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "secondary_default_load", value) @property @pulumi.getter def weight(self) -> Optional[pulumi.Input[Union[str, 'ServiceLoadMetricWeight']]]: """ The service load metric relative weight, compared to other metrics configured for this service, as a number. """ return pulumi.get(self, "weight") @weight.setter def weight(self, value: Optional[pulumi.Input[Union[str, 'ServiceLoadMetricWeight']]]): pulumi.set(self, "weight", value) @pulumi.input_type class ServicePlacementInvalidDomainPolicyArgs: def __init__(__self__, *, domain_name: pulumi.Input[str], type: pulumi.Input[str]): """ Describes the policy to be used for placement of a Service Fabric service where a particular fault or upgrade domain should not be used for placement of the instances or replicas of that service. :param pulumi.Input[str] domain_name: The name of the domain that should not be used for placement. :param pulumi.Input[str] type: The type of placement policy for a service fabric service. Following are the possible values. Expected value is 'InvalidDomain'. """ pulumi.set(__self__, "domain_name", domain_name) pulumi.set(__self__, "type", 'InvalidDomain') @property @pulumi.getter(name="domainName") def domain_name(self) -> pulumi.Input[str]: """ The name of the domain that should not be used for placement. """ return pulumi.get(self, "domain_name") @domain_name.setter def domain_name(self, value: pulumi.Input[str]): pulumi.set(self, "domain_name", value) @property @pulumi.getter def type(self) -> pulumi.Input[str]: """ The type of placement policy for a service fabric service. Following are the possible values. Expected value is 'InvalidDomain'. """ return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @pulumi.input_type class ServicePlacementNonPartiallyPlaceServicePolicyArgs: def __init__(__self__, *, type: pulumi.Input[str]): """ The name of the domain that should used for placement as per this policy. :param pulumi.Input[str] type: The type of placement policy for a service fabric service. Following are the possible values. Expected value is 'NonPartiallyPlaceService'. """ pulumi.set(__self__, "type", 'NonPartiallyPlaceService') @property @pulumi.getter def type(self) -> pulumi.Input[str]: """ The type of placement policy for a service fabric service. Following are the possible values. Expected value is 'NonPartiallyPlaceService'. """ return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @pulumi.input_type class ServicePlacementPreferPrimaryDomainPolicyArgs: def __init__(__self__, *, domain_name: pulumi.Input[str], type: pulumi.Input[str]): """ Describes the policy to be used for placement of a Service Fabric service where the service's Primary replicas should optimally be placed in a particular domain. This placement policy is usually used with fault domains in scenarios where the Service Fabric cluster is geographically distributed in order to indicate that a service's primary replica should be located in a particular fault domain, which in geo-distributed scenarios usually aligns with regional or datacenter boundaries. Note that since this is an optimization it is possible that the Primary replica may not end up located in this domain due to failures, capacity limits, or other constraints. :param pulumi.Input[str] domain_name: The name of the domain that should used for placement as per this policy. :param pulumi.Input[str] type: The type of placement policy for a service fabric service. Following are the possible values. Expected value is 'PreferredPrimaryDomain'. """ pulumi.set(__self__, "domain_name", domain_name) pulumi.set(__self__, "type", 'PreferredPrimaryDomain') @property @pulumi.getter(name="domainName") def domain_name(self) -> pulumi.Input[str]: """ The name of the domain that should used for placement as per this policy. """ return pulumi.get(self, "domain_name") @domain_name.setter def domain_name(self, value: pulumi.Input[str]): pulumi.set(self, "domain_name", value) @property @pulumi.getter def type(self) -> pulumi.Input[str]: """ The type of placement policy for a service fabric service. Following are the possible values. Expected value is 'PreferredPrimaryDomain'. """ return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @pulumi.input_type class ServicePlacementRequireDomainDistributionPolicyArgs: def __init__(__self__, *, domain_name: pulumi.Input[str], type: pulumi.Input[str]): """ Describes the policy to be used for placement of a Service Fabric service where two replicas from the same partition should never be placed in the same fault or upgrade domain. While this is not common it can expose the service to an increased risk of concurrent failures due to unplanned outages or other cases of subsequent/concurrent failures. As an example, consider a case where replicas are deployed across different data center, with one replica per location. In the event that one of the datacenters goes offline, normally the replica that was placed in that datacenter will be packed into one of the remaining datacenters. If this is not desirable then this policy should be set. :param pulumi.Input[str] domain_name: The name of the domain that should used for placement as per this policy. :param pulumi.Input[str] type: The type of placement policy for a service fabric service. Following are the possible values. Expected value is 'RequiredDomainDistribution'. """ pulumi.set(__self__, "domain_name", domain_name) pulumi.set(__self__, "type", 'RequiredDomainDistribution') @property @pulumi.getter(name="domainName") def domain_name(self) -> pulumi.Input[str]: """ The name of the domain that should used for placement as per this policy. """ return pulumi.get(self, "domain_name") @domain_name.setter def domain_name(self, value: pulumi.Input[str]): pulumi.set(self, "domain_name", value) @property @pulumi.getter def type(self) -> pulumi.Input[str]: """ The type of placement policy for a service fabric service. Following are the possible values. Expected value is 'RequiredDomainDistribution'. """ return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @pulumi.input_type class ServicePlacementRequiredDomainPolicyArgs: def __init__(__self__, *, domain_name: pulumi.Input[str], type: pulumi.Input[str]): """ Describes the policy to be used for placement of a Service Fabric service where the instances or replicas of that service must be placed in a particular domain. :param pulumi.Input[str] domain_name: The name of the domain that should used for placement as per this policy. :param pulumi.Input[str] type: The type of placement policy for a service fabric service. Following are the possible values. Expected value is 'RequiredDomain'. """ pulumi.set(__self__, "domain_name", domain_name) pulumi.set(__self__, "type", 'RequiredDomain') @property @pulumi.getter(name="domainName") def domain_name(self) -> pulumi.Input[str]: """ The name of the domain that should used for placement as per this policy. """ return pulumi.get(self, "domain_name") @domain_name.setter def domain_name(self, value: pulumi.Input[str]): pulumi.set(self, "domain_name", value) @property @pulumi.getter def type(self) -> pulumi.Input[str]: """ The type of placement policy for a service fabric service. Following are the possible values. Expected value is 'RequiredDomain'. """ return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @pulumi.input_type class ServiceTypeHealthPolicyArgs: def __init__(__self__, *, max_percent_unhealthy_partitions_per_service: pulumi.Input[int], max_percent_unhealthy_replicas_per_partition: pulumi.Input[int], max_percent_unhealthy_services: pulumi.Input[int]): """ Represents the health policy used to evaluate the health of services belonging to a service type. :param pulumi.Input[int] max_percent_unhealthy_partitions_per_service: The maximum allowed percentage of unhealthy partitions per service. The percentage represents the maximum tolerated percentage of partitions that can be unhealthy before the service is considered in error. If the percentage is respected but there is at least one unhealthy partition, the health is evaluated as Warning. The percentage is calculated by dividing the number of unhealthy partitions over the total number of partitions in the service. The computation rounds up to tolerate one failure on small numbers of partitions. :param pulumi.Input[int] max_percent_unhealthy_replicas_per_partition: The maximum allowed percentage of unhealthy replicas per partition. The percentage represents the maximum tolerated percentage of replicas that can be unhealthy before the partition is considered in error. If the percentage is respected but there is at least one unhealthy replica, the health is evaluated as Warning. The percentage is calculated by dividing the number of unhealthy replicas over the total number of replicas in the partition. The computation rounds up to tolerate one failure on small numbers of replicas. :param pulumi.Input[int] max_percent_unhealthy_services: The maximum allowed percentage of unhealthy services. The percentage represents the maximum tolerated percentage of services that can be unhealthy before the application is considered in error. If the percentage is respected but there is at least one unhealthy service, the health is evaluated as Warning. This is calculated by dividing the number of unhealthy services of the specific service type over the total number of services of the specific service type. The computation rounds up to tolerate one failure on small numbers of services. """ pulumi.set(__self__, "max_percent_unhealthy_partitions_per_service", max_percent_unhealthy_partitions_per_service) pulumi.set(__self__, "max_percent_unhealthy_replicas_per_partition", max_percent_unhealthy_replicas_per_partition) pulumi.set(__self__, "max_percent_unhealthy_services", max_percent_unhealthy_services) @property @pulumi.getter(name="maxPercentUnhealthyPartitionsPerService") def max_percent_unhealthy_partitions_per_service(self) -> pulumi.Input[int]: """ The maximum allowed percentage of unhealthy partitions per service. The percentage represents the maximum tolerated percentage of partitions that can be unhealthy before the service is considered in error. If the percentage is respected but there is at least one unhealthy partition, the health is evaluated as Warning. The percentage is calculated by dividing the number of unhealthy partitions over the total number of partitions in the service. The computation rounds up to tolerate one failure on small numbers of partitions. """ return pulumi.get(self, "max_percent_unhealthy_partitions_per_service") @max_percent_unhealthy_partitions_per_service.setter def max_percent_unhealthy_partitions_per_service(self, value: pulumi.Input[int]): pulumi.set(self, "max_percent_unhealthy_partitions_per_service", value) @property @pulumi.getter(name="maxPercentUnhealthyReplicasPerPartition") def max_percent_unhealthy_replicas_per_partition(self) -> pulumi.Input[int]: """ The maximum allowed percentage of unhealthy replicas per partition. The percentage represents the maximum tolerated percentage of replicas that can be unhealthy before the partition is considered in error. If the percentage is respected but there is at least one unhealthy replica, the health is evaluated as Warning. The percentage is calculated by dividing the number of unhealthy replicas over the total number of replicas in the partition. The computation rounds up to tolerate one failure on small numbers of replicas. """ return pulumi.get(self, "max_percent_unhealthy_replicas_per_partition") @max_percent_unhealthy_replicas_per_partition.setter def max_percent_unhealthy_replicas_per_partition(self, value: pulumi.Input[int]): pulumi.set(self, "max_percent_unhealthy_replicas_per_partition", value) @property @pulumi.getter(name="maxPercentUnhealthyServices") def max_percent_unhealthy_services(self) -> pulumi.Input[int]: """ The maximum allowed percentage of unhealthy services. The percentage represents the maximum tolerated percentage of services that can be unhealthy before the application is considered in error. If the percentage is respected but there is at least one unhealthy service, the health is evaluated as Warning. This is calculated by dividing the number of unhealthy services of the specific service type over the total number of services of the specific service type. The computation rounds up to tolerate one failure on small numbers of services. """ return pulumi.get(self, "max_percent_unhealthy_services") @max_percent_unhealthy_services.setter def max_percent_unhealthy_services(self, value: pulumi.Input[int]): pulumi.set(self, "max_percent_unhealthy_services", value) @pulumi.input_type class SettingsParameterDescriptionArgs: def __init__(__self__, *, name: pulumi.Input[str], value: pulumi.Input[str]): """ Describes a parameter in fabric settings of the cluster. :param pulumi.Input[str] name: The parameter name of fabric setting. :param pulumi.Input[str] value: The parameter value of fabric setting. """ pulumi.set(__self__, "name", name) pulumi.set(__self__, "value", value) @property @pulumi.getter def name(self) -> pulumi.Input[str]: """ The parameter name of fabric setting. """ return pulumi.get(self, "name") @name.setter def name(self, value: pulumi.Input[str]): pulumi.set(self, "name", value) @property @pulumi.getter def value(self) -> pulumi.Input[str]: """ The parameter value of fabric setting. """ return pulumi.get(self, "value") @value.setter def value(self, value: pulumi.Input[str]): pulumi.set(self, "value", value) @pulumi.input_type class SettingsSectionDescriptionArgs: def __init__(__self__, *, name: pulumi.Input[str], parameters: pulumi.Input[Sequence[pulumi.Input['SettingsParameterDescriptionArgs']]]): """ Describes a section in the fabric settings of the cluster. :param pulumi.Input[str] name: The section name of the fabric settings. :param pulumi.Input[Sequence[pulumi.Input['SettingsParameterDescriptionArgs']]] parameters: The collection of parameters in the section. """ pulumi.set(__self__, "name", name) pulumi.set(__self__, "parameters", parameters) @property @pulumi.getter def name(self) -> pulumi.Input[str]: """ The section name of the fabric settings. """ return pulumi.get(self, "name") @name.setter def name(self, value: pulumi.Input[str]): pulumi.set(self, "name", value) @property @pulumi.getter def parameters(self) -> pulumi.Input[Sequence[pulumi.Input['SettingsParameterDescriptionArgs']]]: """ The collection of parameters in the section. """ return pulumi.get(self, "parameters") @parameters.setter def parameters(self, value: pulumi.Input[Sequence[pulumi.Input['SettingsParameterDescriptionArgs']]]): pulumi.set(self, "parameters", value) @pulumi.input_type class SingletonPartitionSchemeArgs: def __init__(__self__, *, partition_scheme: pulumi.Input[str]): """ Describes the partition scheme of a singleton-partitioned, or non-partitioned service. :param pulumi.Input[str] partition_scheme: Enumerates the ways that a service can be partitioned. Expected value is 'Singleton'. """ pulumi.set(__self__, "partition_scheme", 'Singleton') @property @pulumi.getter(name="partitionScheme") def partition_scheme(self) -> pulumi.Input[str]: """ Enumerates the ways that a service can be partitioned. Expected value is 'Singleton'. """ return pulumi.get(self, "partition_scheme") @partition_scheme.setter def partition_scheme(self, value: pulumi.Input[str]): pulumi.set(self, "partition_scheme", value) @pulumi.input_type class SkuArgs: def __init__(__self__, *, name: pulumi.Input[Union[str, 'SkuName']]): """ Service Fabric managed cluster Sku definition :param pulumi.Input[Union[str, 'SkuName']] name: Sku Name. """ pulumi.set(__self__, "name", name) @property @pulumi.getter def name(self) -> pulumi.Input[Union[str, 'SkuName']]: """ Sku Name. """ return pulumi.get(self, "name") @name.setter def name(self, value: pulumi.Input[Union[str, 'SkuName']]): pulumi.set(self, "name", value) @pulumi.input_type class StatefulServicePropertiesArgs: def __init__(__self__, *, partition_description: pulumi.Input[Union['NamedPartitionSchemeArgs', 'SingletonPartitionSchemeArgs', 'UniformInt64RangePartitionSchemeArgs']], service_kind: pulumi.Input[str], service_type_name: pulumi.Input[str], correlation_scheme: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceCorrelationArgs']]]] = None, default_move_cost: Optional[pulumi.Input[Union[str, 'MoveCost']]] = None, has_persisted_state: Optional[pulumi.Input[bool]] = None, min_replica_set_size: Optional[pulumi.Input[int]] = None, placement_constraints: Optional[pulumi.Input[str]] = None, quorum_loss_wait_duration: Optional[pulumi.Input[str]] = None, replica_restart_wait_duration: Optional[pulumi.Input[str]] = None, scaling_policies: Optional[pulumi.Input[Sequence[pulumi.Input['ScalingPolicyArgs']]]] = None, service_load_metrics: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceLoadMetricArgs']]]] = None, service_package_activation_mode: Optional[pulumi.Input[Union[str, 'ServicePackageActivationMode']]] = None, service_placement_policies: Optional[pulumi.Input[Sequence[pulumi.Input[Union['ServicePlacementInvalidDomainPolicyArgs', 'ServicePlacementNonPartiallyPlaceServicePolicyArgs', 'ServicePlacementPreferPrimaryDomainPolicyArgs', 'ServicePlacementRequireDomainDistributionPolicyArgs', 'ServicePlacementRequiredDomainPolicyArgs']]]]] = None, service_placement_time_limit: Optional[pulumi.Input[str]] = None, stand_by_replica_keep_duration: Optional[pulumi.Input[str]] = None, target_replica_set_size: Optional[pulumi.Input[int]] = None): """ The properties of a stateful service resource. :param pulumi.Input[Union['NamedPartitionSchemeArgs', 'SingletonPartitionSchemeArgs', 'UniformInt64RangePartitionSchemeArgs']] partition_description: Describes how the service is partitioned. :param pulumi.Input[str] service_kind: The kind of service (Stateless or Stateful). Expected value is 'Stateful'. :param pulumi.Input[str] service_type_name: The name of the service type :param pulumi.Input[Sequence[pulumi.Input['ServiceCorrelationArgs']]] correlation_scheme: A list that describes the correlation of the service with other services. :param pulumi.Input[Union[str, 'MoveCost']] default_move_cost: Specifies the move cost for the service. :param pulumi.Input[bool] has_persisted_state: A flag indicating whether this is a persistent service which stores states on the local disk. If it is then the value of this property is true, if not it is false. :param pulumi.Input[int] min_replica_set_size: The minimum replica set size as a number. :param pulumi.Input[str] placement_constraints: The placement constraints as a string. Placement constraints are boolean expressions on node properties and allow for restricting a service to particular nodes based on the service requirements. For example, to place a service on nodes where NodeType is blue specify the following: "NodeColor == blue)". :param pulumi.Input[str] quorum_loss_wait_duration: The maximum duration for which a partition is allowed to be in a state of quorum loss, represented in ISO 8601 format "hh:mm:ss". :param pulumi.Input[str] replica_restart_wait_duration: The duration between when a replica goes down and when a new replica is created, represented in ISO 8601 format "hh:mm:ss". :param pulumi.Input[Sequence[pulumi.Input['ScalingPolicyArgs']]] scaling_policies: Scaling policies for this service. :param pulumi.Input[Sequence[pulumi.Input['ServiceLoadMetricArgs']]] service_load_metrics: The service load metrics is given as an array of ServiceLoadMetric objects. :param pulumi.Input[Union[str, 'ServicePackageActivationMode']] service_package_activation_mode: The activation Mode of the service package :param pulumi.Input[Sequence[pulumi.Input[Union['ServicePlacementInvalidDomainPolicyArgs', 'ServicePlacementNonPartiallyPlaceServicePolicyArgs', 'ServicePlacementPreferPrimaryDomainPolicyArgs', 'ServicePlacementRequireDomainDistributionPolicyArgs', 'ServicePlacementRequiredDomainPolicyArgs']]]] service_placement_policies: A list that describes the correlation of the service with other services. :param pulumi.Input[str] service_placement_time_limit: The duration for which replicas can stay InBuild before reporting that build is stuck, represented in ISO 8601 format "hh:mm:ss". :param pulumi.Input[str] stand_by_replica_keep_duration: The definition on how long StandBy replicas should be maintained before being removed, represented in ISO 8601 format "hh:mm:ss". :param pulumi.Input[int] target_replica_set_size: The target replica set size as a number. """ pulumi.set(__self__, "partition_description", partition_description) pulumi.set(__self__, "service_kind", 'Stateful') pulumi.set(__self__, "service_type_name", service_type_name) if correlation_scheme is not None: pulumi.set(__self__, "correlation_scheme", correlation_scheme) if default_move_cost is not None: pulumi.set(__self__, "default_move_cost", default_move_cost) if has_persisted_state is not None: pulumi.set(__self__, "has_persisted_state", has_persisted_state) if min_replica_set_size is not None: pulumi.set(__self__, "min_replica_set_size", min_replica_set_size) if placement_constraints is not None: pulumi.set(__self__, "placement_constraints", placement_constraints) if quorum_loss_wait_duration is not None: pulumi.set(__self__, "quorum_loss_wait_duration", quorum_loss_wait_duration) if replica_restart_wait_duration is not None: pulumi.set(__self__, "replica_restart_wait_duration", replica_restart_wait_duration) if scaling_policies is not None: pulumi.set(__self__, "scaling_policies", scaling_policies) if service_load_metrics is not None: pulumi.set(__self__, "service_load_metrics", service_load_metrics) if service_package_activation_mode is not None: pulumi.set(__self__, "service_package_activation_mode", service_package_activation_mode) if service_placement_policies is not None: pulumi.set(__self__, "service_placement_policies", service_placement_policies) if service_placement_time_limit is not None: pulumi.set(__self__, "service_placement_time_limit", service_placement_time_limit) if stand_by_replica_keep_duration is not None: pulumi.set(__self__, "stand_by_replica_keep_duration", stand_by_replica_keep_duration) if target_replica_set_size is not None: pulumi.set(__self__, "target_replica_set_size", target_replica_set_size) @property @pulumi.getter(name="partitionDescription") def partition_description(self) -> pulumi.Input[Union['NamedPartitionSchemeArgs', 'SingletonPartitionSchemeArgs', 'UniformInt64RangePartitionSchemeArgs']]: """ Describes how the service is partitioned. """ return pulumi.get(self, "partition_description") @partition_description.setter def partition_description(self, value: pulumi.Input[Union['NamedPartitionSchemeArgs', 'SingletonPartitionSchemeArgs', 'UniformInt64RangePartitionSchemeArgs']]): pulumi.set(self, "partition_description", value) @property @pulumi.getter(name="serviceKind") def service_kind(self) -> pulumi.Input[str]: """ The kind of service (Stateless or Stateful). Expected value is 'Stateful'. """ return pulumi.get(self, "service_kind") @service_kind.setter def service_kind(self, value: pulumi.Input[str]): pulumi.set(self, "service_kind", value) @property @pulumi.getter(name="serviceTypeName") def service_type_name(self) -> pulumi.Input[str]: """ The name of the service type """ return pulumi.get(self, "service_type_name") @service_type_name.setter def service_type_name(self, value: pulumi.Input[str]): pulumi.set(self, "service_type_name", value) @property @pulumi.getter(name="correlationScheme") def correlation_scheme(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServiceCorrelationArgs']]]]: """ A list that describes the correlation of the service with other services. """ return pulumi.get(self, "correlation_scheme") @correlation_scheme.setter def correlation_scheme(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceCorrelationArgs']]]]): pulumi.set(self, "correlation_scheme", value) @property @pulumi.getter(name="defaultMoveCost") def default_move_cost(self) -> Optional[pulumi.Input[Union[str, 'MoveCost']]]: """ Specifies the move cost for the service. """ return pulumi.get(self, "default_move_cost") @default_move_cost.setter def default_move_cost(self, value: Optional[pulumi.Input[Union[str, 'MoveCost']]]): pulumi.set(self, "default_move_cost", value) @property @pulumi.getter(name="hasPersistedState") def has_persisted_state(self) -> Optional[pulumi.Input[bool]]: """ A flag indicating whether this is a persistent service which stores states on the local disk. If it is then the value of this property is true, if not it is false. """ return pulumi.get(self, "has_persisted_state") @has_persisted_state.setter def has_persisted_state(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "has_persisted_state", value) @property @pulumi.getter(name="minReplicaSetSize") def min_replica_set_size(self) -> Optional[pulumi.Input[int]]: """ The minimum replica set size as a number. """ return pulumi.get(self, "min_replica_set_size") @min_replica_set_size.setter def min_replica_set_size(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "min_replica_set_size", value) @property @pulumi.getter(name="placementConstraints") def placement_constraints(self) -> Optional[pulumi.Input[str]]: """ The placement constraints as a string. Placement constraints are boolean expressions on node properties and allow for restricting a service to particular nodes based on the service requirements. For example, to place a service on nodes where NodeType is blue specify the following: "NodeColor == blue)". """ return pulumi.get(self, "placement_constraints") @placement_constraints.setter def placement_constraints(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "placement_constraints", value) @property @pulumi.getter(name="quorumLossWaitDuration") def quorum_loss_wait_duration(self) -> Optional[pulumi.Input[str]]: """ The maximum duration for which a partition is allowed to be in a state of quorum loss, represented in ISO 8601 format "hh:mm:ss". """ return pulumi.get(self, "quorum_loss_wait_duration") @quorum_loss_wait_duration.setter def quorum_loss_wait_duration(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "quorum_loss_wait_duration", value) @property @pulumi.getter(name="replicaRestartWaitDuration") def replica_restart_wait_duration(self) -> Optional[pulumi.Input[str]]: """ The duration between when a replica goes down and when a new replica is created, represented in ISO 8601 format "hh:mm:ss". """ return pulumi.get(self, "replica_restart_wait_duration") @replica_restart_wait_duration.setter def replica_restart_wait_duration(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "replica_restart_wait_duration", value) @property @pulumi.getter(name="scalingPolicies") def scaling_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ScalingPolicyArgs']]]]: """ Scaling policies for this service. """ return pulumi.get(self, "scaling_policies") @scaling_policies.setter def scaling_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ScalingPolicyArgs']]]]): pulumi.set(self, "scaling_policies", value) @property @pulumi.getter(name="serviceLoadMetrics") def service_load_metrics(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServiceLoadMetricArgs']]]]: """ The service load metrics is given as an array of ServiceLoadMetric objects. """ return pulumi.get(self, "service_load_metrics") @service_load_metrics.setter def service_load_metrics(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceLoadMetricArgs']]]]): pulumi.set(self, "service_load_metrics", value) @property @pulumi.getter(name="servicePackageActivationMode") def service_package_activation_mode(self) -> Optional[pulumi.Input[Union[str, 'ServicePackageActivationMode']]]: """ The activation Mode of the service package """ return pulumi.get(self, "service_package_activation_mode") @service_package_activation_mode.setter def service_package_activation_mode(self, value: Optional[pulumi.Input[Union[str, 'ServicePackageActivationMode']]]): pulumi.set(self, "service_package_activation_mode", value) @property @pulumi.getter(name="servicePlacementPolicies") def service_placement_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[Union['ServicePlacementInvalidDomainPolicyArgs', 'ServicePlacementNonPartiallyPlaceServicePolicyArgs', 'ServicePlacementPreferPrimaryDomainPolicyArgs', 'ServicePlacementRequireDomainDistributionPolicyArgs', 'ServicePlacementRequiredDomainPolicyArgs']]]]]: """ A list that describes the correlation of the service with other services. """ return pulumi.get(self, "service_placement_policies") @service_placement_policies.setter def service_placement_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[Union['ServicePlacementInvalidDomainPolicyArgs', 'ServicePlacementNonPartiallyPlaceServicePolicyArgs', 'ServicePlacementPreferPrimaryDomainPolicyArgs', 'ServicePlacementRequireDomainDistributionPolicyArgs', 'ServicePlacementRequiredDomainPolicyArgs']]]]]): pulumi.set(self, "service_placement_policies", value) @property @pulumi.getter(name="servicePlacementTimeLimit") def service_placement_time_limit(self) -> Optional[pulumi.Input[str]]: """ The duration for which replicas can stay InBuild before reporting that build is stuck, represented in ISO 8601 format "hh:mm:ss". """ return pulumi.get(self, "service_placement_time_limit") @service_placement_time_limit.setter def service_placement_time_limit(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "service_placement_time_limit", value) @property @pulumi.getter(name="standByReplicaKeepDuration") def stand_by_replica_keep_duration(self) -> Optional[pulumi.Input[str]]: """ The definition on how long StandBy replicas should be maintained before being removed, represented in ISO 8601 format "hh:mm:ss". """ return pulumi.get(self, "stand_by_replica_keep_duration") @stand_by_replica_keep_duration.setter def stand_by_replica_keep_duration(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "stand_by_replica_keep_duration", value) @property @pulumi.getter(name="targetReplicaSetSize") def target_replica_set_size(self) -> Optional[pulumi.Input[int]]: """ The target replica set size as a number. """ return pulumi.get(self, "target_replica_set_size") @target_replica_set_size.setter def target_replica_set_size(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "target_replica_set_size", value) @pulumi.input_type class StatelessServicePropertiesArgs: def __init__(__self__, *, instance_count: pulumi.Input[int], partition_description: pulumi.Input[Union['NamedPartitionSchemeArgs', 'SingletonPartitionSchemeArgs', 'UniformInt64RangePartitionSchemeArgs']], service_kind: pulumi.Input[str], service_type_name: pulumi.Input[str], correlation_scheme: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceCorrelationArgs']]]] = None, default_move_cost: Optional[pulumi.Input[Union[str, 'MoveCost']]] = None, min_instance_count: Optional[pulumi.Input[int]] = None, min_instance_percentage: Optional[pulumi.Input[int]] = None, placement_constraints: Optional[pulumi.Input[str]] = None, scaling_policies: Optional[pulumi.Input[Sequence[pulumi.Input['ScalingPolicyArgs']]]] = None, service_load_metrics: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceLoadMetricArgs']]]] = None, service_package_activation_mode: Optional[pulumi.Input[Union[str, 'ServicePackageActivationMode']]] = None, service_placement_policies: Optional[pulumi.Input[Sequence[pulumi.Input[Union['ServicePlacementInvalidDomainPolicyArgs', 'ServicePlacementNonPartiallyPlaceServicePolicyArgs', 'ServicePlacementPreferPrimaryDomainPolicyArgs', 'ServicePlacementRequireDomainDistributionPolicyArgs', 'ServicePlacementRequiredDomainPolicyArgs']]]]] = None): """ The properties of a stateless service resource. :param pulumi.Input[int] instance_count: The instance count. :param pulumi.Input[Union['NamedPartitionSchemeArgs', 'SingletonPartitionSchemeArgs', 'UniformInt64RangePartitionSchemeArgs']] partition_description: Describes how the service is partitioned. :param pulumi.Input[str] service_kind: The kind of service (Stateless or Stateful). Expected value is 'Stateless'. :param pulumi.Input[str] service_type_name: The name of the service type :param pulumi.Input[Sequence[pulumi.Input['ServiceCorrelationArgs']]] correlation_scheme: A list that describes the correlation of the service with other services. :param pulumi.Input[Union[str, 'MoveCost']] default_move_cost: Specifies the move cost for the service. :param pulumi.Input[int] min_instance_count: MinInstanceCount is the minimum number of instances that must be up to meet the EnsureAvailability safety check during operations like upgrade or deactivate node. The actual number that is used is max( MinInstanceCount, ceil( MinInstancePercentage/100.0 * InstanceCount) ). Note, if InstanceCount is set to -1, during MinInstanceCount computation -1 is first converted into the number of nodes on which the instances are allowed to be placed according to the placement constraints on the service. :param pulumi.Input[int] min_instance_percentage: MinInstancePercentage is the minimum percentage of InstanceCount that must be up to meet the EnsureAvailability safety check during operations like upgrade or deactivate node. The actual number that is used is max( MinInstanceCount, ceil( MinInstancePercentage/100.0 * InstanceCount) ). Note, if InstanceCount is set to -1, during MinInstancePercentage computation, -1 is first converted into the number of nodes on which the instances are allowed to be placed according to the placement constraints on the service. :param pulumi.Input[str] placement_constraints: The placement constraints as a string. Placement constraints are boolean expressions on node properties and allow for restricting a service to particular nodes based on the service requirements. For example, to place a service on nodes where NodeType is blue specify the following: "NodeColor == blue)". :param pulumi.Input[Sequence[pulumi.Input['ScalingPolicyArgs']]] scaling_policies: Scaling policies for this service. :param pulumi.Input[Sequence[pulumi.Input['ServiceLoadMetricArgs']]] service_load_metrics: The service load metrics is given as an array of ServiceLoadMetric objects. :param pulumi.Input[Union[str, 'ServicePackageActivationMode']] service_package_activation_mode: The activation Mode of the service package :param pulumi.Input[Sequence[pulumi.Input[Union['ServicePlacementInvalidDomainPolicyArgs', 'ServicePlacementNonPartiallyPlaceServicePolicyArgs', 'ServicePlacementPreferPrimaryDomainPolicyArgs', 'ServicePlacementRequireDomainDistributionPolicyArgs', 'ServicePlacementRequiredDomainPolicyArgs']]]] service_placement_policies: A list that describes the correlation of the service with other services. """ pulumi.set(__self__, "instance_count", instance_count) pulumi.set(__self__, "partition_description", partition_description) pulumi.set(__self__, "service_kind", 'Stateless') pulumi.set(__self__, "service_type_name", service_type_name) if correlation_scheme is not None: pulumi.set(__self__, "correlation_scheme", correlation_scheme) if default_move_cost is not None: pulumi.set(__self__, "default_move_cost", default_move_cost) if min_instance_count is not None: pulumi.set(__self__, "min_instance_count", min_instance_count) if min_instance_percentage is not None: pulumi.set(__self__, "min_instance_percentage", min_instance_percentage) if placement_constraints is not None: pulumi.set(__self__, "placement_constraints", placement_constraints) if scaling_policies is not None: pulumi.set(__self__, "scaling_policies", scaling_policies) if service_load_metrics is not None: pulumi.set(__self__, "service_load_metrics", service_load_metrics) if service_package_activation_mode is not None: pulumi.set(__self__, "service_package_activation_mode", service_package_activation_mode) if service_placement_policies is not None: pulumi.set(__self__, "service_placement_policies", service_placement_policies) @property @pulumi.getter(name="instanceCount") def instance_count(self) -> pulumi.Input[int]: """ The instance count. """ return pulumi.get(self, "instance_count") @instance_count.setter def instance_count(self, value: pulumi.Input[int]): pulumi.set(self, "instance_count", value) @property @pulumi.getter(name="partitionDescription") def partition_description(self) -> pulumi.Input[Union['NamedPartitionSchemeArgs', 'SingletonPartitionSchemeArgs', 'UniformInt64RangePartitionSchemeArgs']]: """ Describes how the service is partitioned. """ return pulumi.get(self, "partition_description") @partition_description.setter def partition_description(self, value: pulumi.Input[Union['NamedPartitionSchemeArgs', 'SingletonPartitionSchemeArgs', 'UniformInt64RangePartitionSchemeArgs']]): pulumi.set(self, "partition_description", value) @property @pulumi.getter(name="serviceKind") def service_kind(self) -> pulumi.Input[str]: """ The kind of service (Stateless or Stateful). Expected value is 'Stateless'. """ return pulumi.get(self, "service_kind") @service_kind.setter def service_kind(self, value: pulumi.Input[str]): pulumi.set(self, "service_kind", value) @property @pulumi.getter(name="serviceTypeName") def service_type_name(self) -> pulumi.Input[str]: """ The name of the service type """ return pulumi.get(self, "service_type_name") @service_type_name.setter def service_type_name(self, value: pulumi.Input[str]): pulumi.set(self, "service_type_name", value) @property @pulumi.getter(name="correlationScheme") def correlation_scheme(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServiceCorrelationArgs']]]]: """ A list that describes the correlation of the service with other services. """ return pulumi.get(self, "correlation_scheme") @correlation_scheme.setter def correlation_scheme(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceCorrelationArgs']]]]): pulumi.set(self, "correlation_scheme", value) @property @pulumi.getter(name="defaultMoveCost") def default_move_cost(self) -> Optional[pulumi.Input[Union[str, 'MoveCost']]]: """ Specifies the move cost for the service. """ return pulumi.get(self, "default_move_cost") @default_move_cost.setter def default_move_cost(self, value: Optional[pulumi.Input[Union[str, 'MoveCost']]]): pulumi.set(self, "default_move_cost", value) @property @pulumi.getter(name="minInstanceCount") def min_instance_count(self) -> Optional[pulumi.Input[int]]: """ MinInstanceCount is the minimum number of instances that must be up to meet the EnsureAvailability safety check during operations like upgrade or deactivate node. The actual number that is used is max( MinInstanceCount, ceil( MinInstancePercentage/100.0 * InstanceCount) ). Note, if InstanceCount is set to -1, during MinInstanceCount computation -1 is first converted into the number of nodes on which the instances are allowed to be placed according to the placement constraints on the service. """ return pulumi.get(self, "min_instance_count") @min_instance_count.setter def min_instance_count(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "min_instance_count", value) @property @pulumi.getter(name="minInstancePercentage") def min_instance_percentage(self) -> Optional[pulumi.Input[int]]: """ MinInstancePercentage is the minimum percentage of InstanceCount that must be up to meet the EnsureAvailability safety check during operations like upgrade or deactivate node. The actual number that is used is max( MinInstanceCount, ceil( MinInstancePercentage/100.0 * InstanceCount) ). Note, if InstanceCount is set to -1, during MinInstancePercentage computation, -1 is first converted into the number of nodes on which the instances are allowed to be placed according to the placement constraints on the service. """ return pulumi.get(self, "min_instance_percentage") @min_instance_percentage.setter def min_instance_percentage(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "min_instance_percentage", value) @property @pulumi.getter(name="placementConstraints") def placement_constraints(self) -> Optional[pulumi.Input[str]]: """ The placement constraints as a string. Placement constraints are boolean expressions on node properties and allow for restricting a service to particular nodes based on the service requirements. For example, to place a service on nodes where NodeType is blue specify the following: "NodeColor == blue)". """ return pulumi.get(self, "placement_constraints") @placement_constraints.setter def placement_constraints(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "placement_constraints", value) @property @pulumi.getter(name="scalingPolicies") def scaling_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ScalingPolicyArgs']]]]: """ Scaling policies for this service. """ return pulumi.get(self, "scaling_policies") @scaling_policies.setter def scaling_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ScalingPolicyArgs']]]]): pulumi.set(self, "scaling_policies", value) @property @pulumi.getter(name="serviceLoadMetrics") def service_load_metrics(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServiceLoadMetricArgs']]]]: """ The service load metrics is given as an array of ServiceLoadMetric objects. """ return pulumi.get(self, "service_load_metrics") @service_load_metrics.setter def service_load_metrics(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceLoadMetricArgs']]]]): pulumi.set(self, "service_load_metrics", value) @property @pulumi.getter(name="servicePackageActivationMode") def service_package_activation_mode(self) -> Optional[pulumi.Input[Union[str, 'ServicePackageActivationMode']]]: """ The activation Mode of the service package """ return pulumi.get(self, "service_package_activation_mode") @service_package_activation_mode.setter def service_package_activation_mode(self, value: Optional[pulumi.Input[Union[str, 'ServicePackageActivationMode']]]): pulumi.set(self, "service_package_activation_mode", value) @property @pulumi.getter(name="servicePlacementPolicies") def service_placement_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[Union['ServicePlacementInvalidDomainPolicyArgs', 'ServicePlacementNonPartiallyPlaceServicePolicyArgs', 'ServicePlacementPreferPrimaryDomainPolicyArgs', 'ServicePlacementRequireDomainDistributionPolicyArgs', 'ServicePlacementRequiredDomainPolicyArgs']]]]]: """ A list that describes the correlation of the service with other services. """ return pulumi.get(self, "service_placement_policies") @service_placement_policies.setter def service_placement_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[Union['ServicePlacementInvalidDomainPolicyArgs', 'ServicePlacementNonPartiallyPlaceServicePolicyArgs', 'ServicePlacementPreferPrimaryDomainPolicyArgs', 'ServicePlacementRequireDomainDistributionPolicyArgs', 'ServicePlacementRequiredDomainPolicyArgs']]]]]): pulumi.set(self, "service_placement_policies", value) @pulumi.input_type class SubResourceArgs: def __init__(__self__, *, id: Optional[pulumi.Input[str]] = None): """ Azure resource identifier. :param pulumi.Input[str] id: Azure resource identifier. """ if id is not None: pulumi.set(__self__, "id", id) @property @pulumi.getter def id(self) -> Optional[pulumi.Input[str]]: """ Azure resource identifier. """ return pulumi.get(self, "id") @id.setter def id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "id", value) @pulumi.input_type class UniformInt64RangePartitionSchemeArgs: def __init__(__self__, *, count: pulumi.Input[int], high_key: pulumi.Input[float], low_key: pulumi.Input[float], partition_scheme: pulumi.Input[str]): """ Describes a partitioning scheme where an integer range is allocated evenly across a number of partitions. :param pulumi.Input[int] count: The number of partitions. :param pulumi.Input[float] high_key: The upper bound of the partition key range that should be split between the partition ‘Count’ :param pulumi.Input[float] low_key: The lower bound of the partition key range that should be split between the partition ‘Count’ :param pulumi.Input[str] partition_scheme: Enumerates the ways that a service can be partitioned. Expected value is 'UniformInt64Range'. """ pulumi.set(__self__, "count", count) pulumi.set(__self__, "high_key", high_key) pulumi.set(__self__, "low_key", low_key) pulumi.set(__self__, "partition_scheme", 'UniformInt64Range') @property @pulumi.getter def count(self) -> pulumi.Input[int]: """ The number of partitions. """ return pulumi.get(self, "count") @count.setter def count(self, value: pulumi.Input[int]): pulumi.set(self, "count", value) @property @pulumi.getter(name="highKey") def high_key(self) -> pulumi.Input[float]: """ The upper bound of the partition key range that should be split between the partition ‘Count’ """ return pulumi.get(self, "high_key") @high_key.setter def high_key(self, value: pulumi.Input[float]): pulumi.set(self, "high_key", value) @property @pulumi.getter(name="lowKey") def low_key(self) -> pulumi.Input[float]: """ The lower bound of the partition key range that should be split between the partition ‘Count’ """ return pulumi.get(self, "low_key") @low_key.setter def low_key(self, value: pulumi.Input[float]): pulumi.set(self, "low_key", value) @property @pulumi.getter(name="partitionScheme") def partition_scheme(self) -> pulumi.Input[str]: """ Enumerates the ways that a service can be partitioned. Expected value is 'UniformInt64Range'. """ return pulumi.get(self, "partition_scheme") @partition_scheme.setter def partition_scheme(self, value: pulumi.Input[str]): pulumi.set(self, "partition_scheme", value) @pulumi.input_type class VMSSExtensionArgs: def __init__(__self__, *, name: pulumi.Input[str], publisher: pulumi.Input[str], type: pulumi.Input[str], type_handler_version: pulumi.Input[str], auto_upgrade_minor_version: Optional[pulumi.Input[bool]] = None, force_update_tag: Optional[pulumi.Input[str]] = None, protected_settings: Optional[Any] = None, provision_after_extensions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, settings: Optional[Any] = None): """ Specifies set of extensions that should be installed onto the virtual machines. :param pulumi.Input[str] name: The name of the extension. :param pulumi.Input[str] publisher: The name of the extension handler publisher. :param pulumi.Input[str] type: Specifies the type of the extension; an example is "CustomScriptExtension". :param pulumi.Input[str] type_handler_version: Specifies the version of the script handler. :param pulumi.Input[bool] auto_upgrade_minor_version: Indicates whether the extension should use a newer minor version if one is available at deployment time. Once deployed, however, the extension will not upgrade minor versions unless redeployed, even with this property set to true. :param pulumi.Input[str] force_update_tag: If a value is provided and is different from the previous value, the extension handler will be forced to update even if the extension configuration has not changed. :param Any protected_settings: The extension can contain either protectedSettings or protectedSettingsFromKeyVault or no protected settings at all. :param pulumi.Input[Sequence[pulumi.Input[str]]] provision_after_extensions: Collection of extension names after which this extension needs to be provisioned. :param Any settings: Json formatted public settings for the extension. """ pulumi.set(__self__, "name", name) pulumi.set(__self__, "publisher", publisher) pulumi.set(__self__, "type", type) pulumi.set(__self__, "type_handler_version", type_handler_version) if auto_upgrade_minor_version is not None: pulumi.set(__self__, "auto_upgrade_minor_version", auto_upgrade_minor_version) if force_update_tag is not None: pulumi.set(__self__, "force_update_tag", force_update_tag) if protected_settings is not None: pulumi.set(__self__, "protected_settings", protected_settings) if provision_after_extensions is not None: pulumi.set(__self__, "provision_after_extensions", provision_after_extensions) if settings is not None: pulumi.set(__self__, "settings", settings) @property @pulumi.getter def name(self) -> pulumi.Input[str]: """ The name of the extension. """ return pulumi.get(self, "name") @name.setter def name(self, value: pulumi.Input[str]): pulumi.set(self, "name", value) @property @pulumi.getter def publisher(self) -> pulumi.Input[str]: """ The name of the extension handler publisher. """ return pulumi.get(self, "publisher") @publisher.setter def publisher(self, value: pulumi.Input[str]): pulumi.set(self, "publisher", value) @property @pulumi.getter def type(self) -> pulumi.Input[str]: """ Specifies the type of the extension; an example is "CustomScriptExtension". """ return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @property @pulumi.getter(name="typeHandlerVersion") def type_handler_version(self) -> pulumi.Input[str]: """ Specifies the version of the script handler. """ return pulumi.get(self, "type_handler_version") @type_handler_version.setter def type_handler_version(self, value: pulumi.Input[str]): pulumi.set(self, "type_handler_version", value) @property @pulumi.getter(name="autoUpgradeMinorVersion") def auto_upgrade_minor_version(self) -> Optional[pulumi.Input[bool]]: """ Indicates whether the extension should use a newer minor version if one is available at deployment time. Once deployed, however, the extension will not upgrade minor versions unless redeployed, even with this property set to true. """ return pulumi.get(self, "auto_upgrade_minor_version") @auto_upgrade_minor_version.setter def auto_upgrade_minor_version(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "auto_upgrade_minor_version", value) @property @pulumi.getter(name="forceUpdateTag") def force_update_tag(self) -> Optional[pulumi.Input[str]]: """ If a value is provided and is different from the previous value, the extension handler will be forced to update even if the extension configuration has not changed. """ return pulumi.get(self, "force_update_tag") @force_update_tag.setter def force_update_tag(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "force_update_tag", value) @property @pulumi.getter(name="protectedSettings") def protected_settings(self) -> Optional[Any]: """ The extension can contain either protectedSettings or protectedSettingsFromKeyVault or no protected settings at all. """ return pulumi.get(self, "protected_settings") @protected_settings.setter def protected_settings(self, value: Optional[Any]): pulumi.set(self, "protected_settings", value) @property @pulumi.getter(name="provisionAfterExtensions") def provision_after_extensions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ Collection of extension names after which this extension needs to be provisioned. """ return pulumi.get(self, "provision_after_extensions") @provision_after_extensions.setter def provision_after_extensions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "provision_after_extensions", value) @property @pulumi.getter def settings(self) -> Optional[Any]: """ Json formatted public settings for the extension. """ return pulumi.get(self, "settings") @settings.setter def settings(self, value: Optional[Any]): pulumi.set(self, "settings", value) @pulumi.input_type class VaultCertificateArgs: def __init__(__self__, *, certificate_store: pulumi.Input[str], certificate_url: pulumi.Input[str]): """ Describes a single certificate reference in a Key Vault, and where the certificate should reside on the VM. :param pulumi.Input[str] certificate_store: For Windows VMs, specifies the certificate store on the Virtual Machine to which the certificate should be added. The specified certificate store is implicitly in the LocalMachine account. <br><br>For Linux VMs, the certificate file is placed under the /var/lib/waagent directory, with the file name <UppercaseThumbprint>.crt for the X509 certificate file and <UppercaseThumbprint>.prv for private key. Both of these files are .pem formatted. :param pulumi.Input[str] certificate_url: This is the URL of a certificate that has been uploaded to Key Vault as a secret. For adding a secret to the Key Vault, see [Add a key or secret to the key vault](https://docs.microsoft.com/azure/key-vault/key-vault-get-started/#add). In this case, your certificate needs to be It is the Base64 encoding of the following JSON Object which is encoded in UTF-8: <br><br> {<br> "data":"<Base64-encoded-certificate>",<br> "dataType":"pfx",<br> "password":"<pfx-file-password>"<br>} """ pulumi.set(__self__, "certificate_store", certificate_store) pulumi.set(__self__, "certificate_url", certificate_url) @property @pulumi.getter(name="certificateStore") def certificate_store(self) -> pulumi.Input[str]: """ For Windows VMs, specifies the certificate store on the Virtual Machine to which the certificate should be added. The specified certificate store is implicitly in the LocalMachine account. <br><br>For Linux VMs, the certificate file is placed under the /var/lib/waagent directory, with the file name <UppercaseThumbprint>.crt for the X509 certificate file and <UppercaseThumbprint>.prv for private key. Both of these files are .pem formatted. """ return pulumi.get(self, "certificate_store") @certificate_store.setter def certificate_store(self, value: pulumi.Input[str]): pulumi.set(self, "certificate_store", value) @property @pulumi.getter(name="certificateUrl") def certificate_url(self) -> pulumi.Input[str]: """ This is the URL of a certificate that has been uploaded to Key Vault as a secret. For adding a secret to the Key Vault, see [Add a key or secret to the key vault](https://docs.microsoft.com/azure/key-vault/key-vault-get-started/#add). In this case, your certificate needs to be It is the Base64 encoding of the following JSON Object which is encoded in UTF-8: <br><br> {<br> "data":"<Base64-encoded-certificate>",<br> "dataType":"pfx",<br> "password":"<pfx-file-password>"<br>} """ return pulumi.get(self, "certificate_url") @certificate_url.setter def certificate_url(self, value: pulumi.Input[str]): pulumi.set(self, "certificate_url", value) @pulumi.input_type class VaultSecretGroupArgs: def __init__(__self__, *, source_vault: pulumi.Input['SubResourceArgs'], vault_certificates: pulumi.Input[Sequence[pulumi.Input['VaultCertificateArgs']]]): """ Specifies set of certificates that should be installed onto the virtual machines. :param pulumi.Input['SubResourceArgs'] source_vault: The relative URL of the Key Vault containing all of the certificates in VaultCertificates. :param pulumi.Input[Sequence[pulumi.Input['VaultCertificateArgs']]] vault_certificates: The list of key vault references in SourceVault which contain certificates. """ pulumi.set(__self__, "source_vault", source_vault) pulumi.set(__self__, "vault_certificates", vault_certificates) @property @pulumi.getter(name="sourceVault") def source_vault(self) -> pulumi.Input['SubResourceArgs']: """ The relative URL of the Key Vault containing all of the certificates in VaultCertificates. """ return pulumi.get(self, "source_vault") @source_vault.setter def source_vault(self, value: pulumi.Input['SubResourceArgs']): pulumi.set(self, "source_vault", value) @property @pulumi.getter(name="vaultCertificates") def vault_certificates(self) -> pulumi.Input[Sequence[pulumi.Input['VaultCertificateArgs']]]: """ The list of key vault references in SourceVault which contain certificates. """ return pulumi.get(self, "vault_certificates") @vault_certificates.setter def vault_certificates(self, value: pulumi.Input[Sequence[pulumi.Input['VaultCertificateArgs']]]): pulumi.set(self, "vault_certificates", value) @pulumi.input_type class VmManagedIdentityArgs: def __init__(__self__, *, user_assigned_identities: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None): """ Identities for the virtual machine scale set under the node type. :param pulumi.Input[Sequence[pulumi.Input[str]]] user_assigned_identities: The list of user identities associated with the virtual machine scale set under the node type. Each entry will be an ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. """ if user_assigned_identities is not None: pulumi.set(__self__, "user_assigned_identities", user_assigned_identities) @property @pulumi.getter(name="userAssignedIdentities") def user_assigned_identities(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ The list of user identities associated with the virtual machine scale set under the node type. Each entry will be an ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. """ return pulumi.get(self, "user_assigned_identities") @user_assigned_identities.setter def user_assigned_identities(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "user_assigned_identities", value)
49.474341
573
0.700688
16,488
138,825
5.7164
0.049551
0.083096
0.04193
0.030641
0.865679
0.780822
0.71851
0.657694
0.626034
0.58698
0
0.002317
0.207275
138,825
2,805
574
49.491979
0.85413
0.360094
0
0.447154
1
0
0.178911
0.107393
0
0
0
0
0
1
0.215134
false
0
0.003752
0
0.338962
0.010632
0
0
0
null
0
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
5
4389cc3b45947e39f60caa0224836ac24c915691
151
py
Python
python/testData/inspections/ArgumentEqualDefault.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/inspections/ArgumentEqualDefault.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/inspections/ArgumentEqualDefault.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
def foo(a, b = 345, c = 1): pass #PY-3261 foo(1, <weak_warning descr="Argument equals to default parameter value">345<caret></weak_warning>, c=22)
25.166667
97
0.688742
27
151
3.777778
0.777778
0.215686
0
0
0
0
0
0
0
0
0
0.109375
0.152318
151
6
97
25.166667
0.6875
0.046358
0
0
0
0
0.291667
0
0
0
0
0
0
0
null
null
0.25
0
null
null
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
5
43a144e028a2c0bad1e8500dfeae31d2de0a7f63
93
py
Python
wiki/admin.py
duplxey/NForum
990215e5a841ac054fd3c0a167dee37298a70fb8
[ "MIT" ]
7
2019-11-12T14:01:17.000Z
2022-01-29T19:17:09.000Z
wiki/admin.py
duplxey/NForum
990215e5a841ac054fd3c0a167dee37298a70fb8
[ "MIT" ]
4
2019-12-08T10:03:21.000Z
2020-04-07T20:27:53.000Z
wiki/admin.py
duplxey/NForum
990215e5a841ac054fd3c0a167dee37298a70fb8
[ "MIT" ]
1
2022-01-29T13:44:24.000Z
2022-01-29T13:44:24.000Z
from django.contrib import admin from .models import WikiPage admin.site.register(WikiPage)
18.6
32
0.827957
13
93
5.923077
0.692308
0
0
0
0
0
0
0
0
0
0
0
0.107527
93
4
33
23.25
0.927711
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
43b74e4727dbf39f89de340422cbf7dc8c64ee25
8,873
py
Python
tests/verifiers_tests/random_tests.py
nathzi1505/DNNV
16c6e6ecb681ce66196f9274d4a43eede8686319
[ "MIT" ]
1
2021-07-07T11:06:14.000Z
2021-07-07T11:06:14.000Z
tests/verifiers_tests/random_tests.py
nathzi1505/DNNV
16c6e6ecb681ce66196f9274d4a43eede8686319
[ "MIT" ]
null
null
null
tests/verifiers_tests/random_tests.py
nathzi1505/DNNV
16c6e6ecb681ce66196f9274d4a43eede8686319
[ "MIT" ]
null
null
null
import os import unittest from dnnv import nn from dnnv import properties from dnnv.properties import Symbol from dnnv.properties.context import get_context from dnnv.verifiers import SAT, UNSAT, UNKNOWN from dnnv.verifiers.bab import BaB from dnnv.verifiers.eran import ERAN from dnnv.verifiers.marabou import Marabou from dnnv.verifiers.mipverify import MIPVerify from dnnv.verifiers.neurify import Neurify from dnnv.verifiers.nnenum import Nnenum from dnnv.verifiers.planet import Planet from dnnv.verifiers.reluplex import Reluplex from dnnv.verifiers.verinet import VeriNet from tests.utils import network_artifact_dir, property_artifact_dir RUNS_PER_PROP = int(os.environ.get("_DNNV_TEST_RUNS_PER_PROP", "1")) VERIFIERS = { "bab": BaB, "eran": ERAN, "marabou": Marabou, "mipverify": MIPVerify, "neurify": Neurify, "nnenum": Nnenum, "planet": Planet, "reluplex": Reluplex, "verinet": VeriNet, } class RandomTests(unittest.TestCase): def setUp(self): self.reset_property_context() for varname in [ "SEED", "SHIFT", "SCALE", "EPSILON", "INPUT_LAYER", "OUTPUT_LAYER", ]: if varname in os.environ: del os.environ[varname] def reset_property_context(self): get_context().reset() def check_results(self, result, results): if result == UNKNOWN: return results.append(result) if len(results) == 1: return previous_result = results[-2] self.assertEqual(result, previous_result) def test_random_fc_0(self): os.environ["OUTPUT_LAYER"] = "-1" for epsilon in [0.01, 0.1, 0.5, 1.0]: os.environ["EPSILON"] = str(epsilon) for i in range(RUNS_PER_PROP): os.environ["SEED"] = str(i) results = [] for name, verifier in VERIFIERS.items(): if name == "marabou" and epsilon >= 0.5: # numerical inconsistencies # TODO : address these continue if name == "verinet" and epsilon == 0.5: # too slow continue if not verifier.is_installed(): continue self.reset_property_context() dnn = nn.parse(network_artifact_dir / "random_fc_0.onnx") phi = properties.parse(property_artifact_dir / "localrobustness.py") phi.concretize(N=dnn) result, _ = verifier.verify(phi) self.check_results(result, results) def test_random_fc_1(self): os.environ["OUTPUT_LAYER"] = "-1" for epsilon in [0.01, 0.1, 0.5, 1.0]: os.environ["EPSILON"] = str(epsilon) for i in range(RUNS_PER_PROP): os.environ["SEED"] = str(i) results = [] for name, verifier in VERIFIERS.items(): if name == "verinet" and (epsilon == 0.5 or epsilon == 1.0): # too slow continue if not verifier.is_installed(): continue self.reset_property_context() dnn = nn.parse(network_artifact_dir / "random_fc_1.onnx") phi = properties.parse(property_artifact_dir / "localrobustness.py") phi.concretize(N=dnn) result, _ = verifier.verify(phi) self.check_results(result, results) def test_random_fc_2(self): os.environ["OUTPUT_LAYER"] = "-1" for epsilon in [0.01, 0.1, 0.5, 1.0]: os.environ["EPSILON"] = str(epsilon) for i in range(RUNS_PER_PROP): os.environ["SEED"] = str(i) results = [] for name, verifier in VERIFIERS.items(): if name == "verinet" and epsilon == 0.5: # too slow continue if not verifier.is_installed(): continue self.reset_property_context() dnn = nn.parse(network_artifact_dir / "random_fc_2.onnx") phi = properties.parse(property_artifact_dir / "localrobustness.py") phi.concretize(N=dnn) result, _ = verifier.verify(phi) self.check_results(result, results) def test_random_conv_0(self): os.environ["OUTPUT_LAYER"] = "-1" excluded_verifiers = { "reluplex", } for epsilon in [0.01, 0.1, 0.5, 1.0]: os.environ["EPSILON"] = str(epsilon) for i in range(RUNS_PER_PROP): os.environ["SEED"] = str(i) results = [] for name, verifier in VERIFIERS.items(): if name in excluded_verifiers: continue if not verifier.is_installed(): continue self.reset_property_context() dnn = nn.parse( network_artifact_dir / "random_conv_0.onnx" ).simplify() phi = properties.parse(property_artifact_dir / "localrobustness.py") phi.concretize(N=dnn) result, _ = verifier.verify(phi) self.check_results(result, results) def test_random_conv_1(self): os.environ["OUTPUT_LAYER"] = "-1" excluded_verifiers = { "reluplex", } for epsilon in [0.01, 0.1, 0.5, 1.0]: os.environ["EPSILON"] = str(epsilon) for i in range(RUNS_PER_PROP): os.environ["SEED"] = str(i) results = [] for name, verifier in VERIFIERS.items(): if name == "verinet" and epsilon == 0.01: # too slow continue if name in excluded_verifiers: continue if not verifier.is_installed(): continue self.reset_property_context() dnn = nn.parse( network_artifact_dir / "random_conv_1.onnx" ).simplify() phi = properties.parse(property_artifact_dir / "localrobustness.py") phi.concretize(N=dnn) result, _ = verifier.verify(phi) self.check_results(result, results) def test_random_conv_2(self): os.environ["OUTPUT_LAYER"] = "-1" excluded_verifiers = { "reluplex", } for epsilon in [0.01, 0.1, 0.5, 1.0]: os.environ["EPSILON"] = str(epsilon) for i in range(RUNS_PER_PROP): os.environ["SEED"] = str(i) results = [] for name, verifier in VERIFIERS.items(): if name == "marabou" and (epsilon == 0.5 or epsilon == 1.0): # numerical inconsistencies # TODO : address these continue if name in excluded_verifiers: continue if not verifier.is_installed(): continue self.reset_property_context() dnn = nn.parse( network_artifact_dir / "random_conv_2.onnx" ).simplify() phi = properties.parse(property_artifact_dir / "localrobustness.py") phi.concretize(N=dnn) result, _ = verifier.verify(phi) self.check_results(result, results) def test_random_residual_0(self): os.environ["OUTPUT_LAYER"] = "-1" verifiers = { "eran": ERAN, "planet": Planet, } for epsilon in [0.01, 0.1, 0.5, 1.0]: os.environ["EPSILON"] = str(epsilon) for i in range(RUNS_PER_PROP): os.environ["SEED"] = str(i) results = [] for name, verifier in verifiers.items(): if not verifier.is_installed(): continue self.reset_property_context() dnn = nn.parse(network_artifact_dir / "random_residual_0.onnx") phi = properties.parse(property_artifact_dir / "localrobustness.py") phi.concretize(N=dnn) result, _ = verifier.verify(phi) self.check_results(result, results) if __name__ == "__main__": unittest.main()
38.916667
88
0.509523
916
8,873
4.766376
0.111354
0.049473
0.038937
0.043976
0.733165
0.733165
0.733165
0.725836
0.700641
0.700641
0
0.019338
0.393892
8,873
227
89
39.088106
0.792488
0.014764
0
0.640394
0
0
0.072713
0.005267
0
0
0
0.004405
0.004926
1
0.049261
false
0
0.083744
0
0.147783
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
43ba26eb26f2bb09ccbeffa01aa3b973ab10ac95
83
py
Python
pipen/cli/__init__.py
sthagen/pwwang-pipen
2d32105511222cff2287e985708e80eba56276c7
[ "Apache-2.0" ]
41
2017-08-30T10:26:37.000Z
2020-10-14T01:37:52.000Z
pipen/cli/__init__.py
sthagen/pipen
d9d95288bc895ccf3b492c591c9aca8189c67f4a
[ "Apache-2.0" ]
39
2017-09-22T18:40:17.000Z
2020-11-04T06:49:42.000Z
pipen/cli/__init__.py
sthagen/pipen
d9d95288bc895ccf3b492c591c9aca8189c67f4a
[ "Apache-2.0" ]
7
2018-07-02T05:57:16.000Z
2019-12-23T07:43:47.000Z
"""Provide CLI for pipen""" from ._hooks import CLIPlugin from ._main import main
16.6
29
0.746988
12
83
5
0.75
0
0
0
0
0
0
0
0
0
0
0
0.156627
83
4
30
20.75
0.857143
0.253012
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
43d647a7e01d9d8f028d460f8f32445d891c8817
90
py
Python
src/config/__init__.py
colabois/PDBB
e71ffd0b1497bbddb365b3e7db9de8a14f7912a1
[ "MIT" ]
1
2021-05-13T20:33:14.000Z
2021-05-13T20:33:14.000Z
src/config/__init__.py
colabois/PDBB
e71ffd0b1497bbddb365b3e7db9de8a14f7912a1
[ "MIT" ]
null
null
null
src/config/__init__.py
colabois/PDBB
e71ffd0b1497bbddb365b3e7db9de8a14f7912a1
[ "MIT" ]
null
null
null
from . import config_types from .base import Config __all__ = ["Config", "config_types"]
18
36
0.744444
12
90
5.083333
0.5
0.393443
0
0
0
0
0
0
0
0
0
0
0.144444
90
4
37
22.5
0.792208
0
0
0
0
0
0.2
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
43d65588a09bb93a7741ea7eeb118502e22d8f55
73
py
Python
model/__init__.py
npurson/pytorch-lightning-template
6a7972242a8b287f861e6a0139fbf38cc954b061
[ "MIT" ]
3
2022-03-03T02:27:06.000Z
2022-03-31T08:14:26.000Z
model/__init__.py
npurson/pytorch-lightning-template
6a7972242a8b287f861e6a0139fbf38cc954b061
[ "MIT" ]
null
null
null
model/__init__.py
npurson/pytorch-lightning-template
6a7972242a8b287f861e6a0139fbf38cc954b061
[ "MIT" ]
null
null
null
from .interface import PLModelInterface from torchvision.models import *
24.333333
39
0.849315
8
73
7.75
0.75
0
0
0
0
0
0
0
0
0
0
0
0.109589
73
2
40
36.5
0.953846
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
78e67c60e22b960df7e99c62433870dd36bcf315
172
py
Python
api_crud/views.py
mainaerick/test-gms
0e847fa6f40a1b3308f5b66383c3ab61524198fe
[ "MIT" ]
null
null
null
api_crud/views.py
mainaerick/test-gms
0e847fa6f40a1b3308f5b66383c3ab61524198fe
[ "MIT" ]
null
null
null
api_crud/views.py
mainaerick/test-gms
0e847fa6f40a1b3308f5b66383c3ab61524198fe
[ "MIT" ]
null
null
null
from rest_framework.decorators import api_view from rest_framework.response import Response from rest_framework import status from django.shortcuts import render, redirect
34.4
46
0.877907
24
172
6.125
0.541667
0.163265
0.346939
0
0
0
0
0
0
0
0
0
0.098837
172
4
47
43
0.948387
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
78fef8c0ee46b120fdd59174c684ccda0d436e69
64
py
Python
nu/v1/Membranes/Leakages/__init__.py
bullgom/pysnn2
dad5ae26b029afd5c5bf76fe141249b0f7b7a36c
[ "MIT" ]
null
null
null
nu/v1/Membranes/Leakages/__init__.py
bullgom/pysnn2
dad5ae26b029afd5c5bf76fe141249b0f7b7a36c
[ "MIT" ]
null
null
null
nu/v1/Membranes/Leakages/__init__.py
bullgom/pysnn2
dad5ae26b029afd5c5bf76fe141249b0f7b7a36c
[ "MIT" ]
null
null
null
from .Proportional import Proportional from .Fixed import Fixed
21.333333
38
0.84375
8
64
6.75
0.5
0
0
0
0
0
0
0
0
0
0
0
0.125
64
2
39
32
0.964286
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
604a30a7267821a1fa85202bf0d45dbd070a2ae3
77
py
Python
tests/imports.py
JessaTehCrow/lynie
5cdc58f2782de2a7009074774a40bf08c719f171
[ "MIT" ]
4
2021-09-28T15:16:43.000Z
2021-11-28T12:37:32.000Z
tests/imports.py
JessaTehCrow/lynie
5cdc58f2782de2a7009074774a40bf08c719f171
[ "MIT" ]
1
2021-11-28T12:37:24.000Z
2021-12-01T01:36:31.000Z
tests/imports.py
JessaTehCrow/lynie
5cdc58f2782de2a7009074774a40bf08c719f171
[ "MIT" ]
null
null
null
import random as rand from string import * print(rand.choice(ascii_letters))
19.25
33
0.805195
12
77
5.083333
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.116883
77
4
33
19.25
0.897059
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
60619793f0059b3bdf45cb04f867d1c89006c15b
169
py
Python
lgblearn/core/plot/plot_api.py
longgb246/lgblearn
b8275a12df98a2a734e5e38f748d9529b0e8d857
[ "MIT" ]
null
null
null
lgblearn/core/plot/plot_api.py
longgb246/lgblearn
b8275a12df98a2a734e5e38f748d9529b0e8d857
[ "MIT" ]
null
null
null
lgblearn/core/plot/plot_api.py
longgb246/lgblearn
b8275a12df98a2a734e5e38f748d9529b0e8d857
[ "MIT" ]
null
null
null
# -*- coding:utf-8 -*- # @Author : 'longguangbin' # @Contact : lgb453476610@163.com # @Date : 2018/11/27 """ Usage Of 'plot_api' : """ from simple_plot import *
16.9
33
0.597633
21
169
4.714286
0.952381
0
0
0
0
0
0
0
0
0
0
0.155556
0.201183
169
9
34
18.777778
0.577778
0.733728
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
60620bb1e385132947d777c0f95f9276b4a4733c
152
py
Python
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/constants.py
JE-Chen/je_old_repo
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
[ "MIT" ]
null
null
null
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/constants.py
JE-Chen/je_old_repo
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
[ "MIT" ]
null
null
null
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/constants.py
JE-Chen/je_old_repo
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
[ "MIT" ]
null
null
null
"""Backward-compatibility module to provide core-GL constant names""" from OpenGL.raw.GL._types import * from OpenGL.arrays._arrayconstants import *
38
70
0.782895
20
152
5.85
0.8
0.17094
0
0
0
0
0
0
0
0
0
0
0.118421
152
3
71
50.666667
0.873134
0.414474
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
608c8bd17b5d21078e0686481e94e94a86348dd5
1,584
py
Python
figureS2.py
CINPLA/edNEGmodel_analysis
be8854c563376a14ee7d15e51d98d0d82be96a35
[ "MIT" ]
null
null
null
figureS2.py
CINPLA/edNEGmodel_analysis
be8854c563376a14ee7d15e51d98d0d82be96a35
[ "MIT" ]
null
null
null
figureS2.py
CINPLA/edNEGmodel_analysis
be8854c563376a14ee7d15e51d98d0d82be96a35
[ "MIT" ]
null
null
null
import numpy as np from scipy.ndimage import uniform_filter1d from scipy.interpolate import interp1d from functions.currents import * filename = 'data/figure4.npz' data = np.load(filename) t = data['t'] t_fig4, av_I_cap_sn_fig4, av_I_leak_sn_fig4, av_I_pump_sn_fig4, av_I_Na_sn_fig4, av_I_DR_sn_fig4, av_I_stim_sn_fig4 = membrane_currents_sn(filename, t, stim_i=150e-12, stim_start=1, stim_end=8) t_fig4, av_I_cap_dn_fig4, av_I_leak_dn_fig4, av_I_pump_dn_fig4, av_I_AHP_dn_fig4, av_I_Ca_dn_fig4, av_I_KC_dn_fig4 = membrane_currents_dn(filename, t) t_fig4, av_I_cap_sg_fig4, av_I_leak_sg_fig4, av_I_pump_sg_fig4, av_I_Kir_sg_fig4 = membrane_currents_sg(filename, t) t_fig4, av_I_cap_dg_fig4, av_I_leak_dg_fig4, av_I_pump_dg_fig4, av_I_Kir_dg_fig4 = membrane_currents_dg(filename, t) np.savez('data/figureS2', t_fig4=t_fig4, av_I_cap_sn_fig4=av_I_cap_sn_fig4, av_I_leak_sn_fig4=av_I_leak_sn_fig4, \ av_I_pump_sn_fig4=av_I_pump_sn_fig4, av_I_Na_sn_fig4=av_I_Na_sn_fig4, av_I_DR_sn_fig4=av_I_DR_sn_fig4, \ av_I_stim_sn_fig4=av_I_stim_sn_fig4, \ av_I_cap_dn_fig4=av_I_cap_dn_fig4, av_I_leak_dn_fig4=av_I_leak_dn_fig4, av_I_pump_dn_fig4=av_I_pump_dn_fig4, \ av_I_AHP_dn_fig4=av_I_AHP_dn_fig4, av_I_Ca_dn_fig4=av_I_Ca_dn_fig4, av_I_KC_dn_fig4=av_I_KC_dn_fig4, \ av_I_cap_sg_fig4=av_I_cap_sg_fig4, av_I_leak_sg_fig4=av_I_leak_sg_fig4, \ av_I_pump_sg_fig4=av_I_pump_sg_fig4, av_I_Kir_sg_fig4=av_I_Kir_sg_fig4, \ av_I_cap_dg_fig4=av_I_cap_dg_fig4, av_I_leak_dg_fig4=av_I_leak_dg_fig4, \ av_I_pump_dg_fig4=av_I_pump_dg_fig4, av_I_Kir_dg_fig4=av_I_Kir_dg_fig4)
66
193
0.844697
376
1,584
2.87234
0.119681
0.333333
0.388889
0.141667
0.721296
0.72037
0.719444
0.689815
0.675926
0.621296
0
0.052921
0.081439
1,584
23
194
68.869565
0.689347
0
0
0
0
0
0.018939
0
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0
0
0
0
null
1
1
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
6099aa0bc51a5ec5c634d70191639b969bf29de5
67
py
Python
GanApp/static/vendor/classes/Image.py
Hecodes98/ProyectosBlanda
b2400cbc95063600c351298b3b6731cebca0c132
[ "MIT" ]
null
null
null
GanApp/static/vendor/classes/Image.py
Hecodes98/ProyectosBlanda
b2400cbc95063600c351298b3b6731cebca0c132
[ "MIT" ]
null
null
null
GanApp/static/vendor/classes/Image.py
Hecodes98/ProyectosBlanda
b2400cbc95063600c351298b3b6731cebca0c132
[ "MIT" ]
null
null
null
class Image: def __init__(self, content): self.content = content
22.333333
29
0.746269
9
67
5.111111
0.666667
0.478261
0
0
0
0
0
0
0
0
0
0
0.149254
67
3
30
22.333333
0.807018
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
60fd1a95902c63a8ba3c07601dca4b61e95c40df
495
py
Python
odsparsator/__init__.py
jdum/odsparsator
cf51a0dafbaa03438cdea392a935e4f717b29238
[ "MIT" ]
1
2021-05-03T20:33:01.000Z
2021-05-03T20:33:01.000Z
odsparsator/__init__.py
jdum/odsparsator
cf51a0dafbaa03438cdea392a935e4f717b29238
[ "MIT" ]
null
null
null
odsparsator/__init__.py
jdum/odsparsator
cf51a0dafbaa03438cdea392a935e4f717b29238
[ "MIT" ]
null
null
null
#!/usr/bin/env python # Copyright 2021 Jérôme Dumonteil # Licence: MIT # Authors: jerome.dumonteil@gmail.com """Generate a json file from an OpenDocument Format .ods file. When used as a script, odsparsator parses an .ods file and generates a json file using the odfdo library. When used as a library, odsparsator parses an .ods file and returns a python structure. The resulting data follow the format of the reverse odsgenerator.py script. """ from .odsparsator import main, ods_to_python
29.117647
76
0.779798
78
495
4.923077
0.602564
0.054688
0.046875
0.057292
0.151042
0.151042
0
0
0
0
0
0.009592
0.157576
495
16
77
30.9375
0.911271
0.876768
0
0
1
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
880e48f24239e321330335dc7dcbab8909739406
234
py
Python
tributary/streaming/calculations/__init__.py
ceball/tributary
5e30f90d1a5cc176c0f231f525d9dc5a81353925
[ "Apache-2.0" ]
null
null
null
tributary/streaming/calculations/__init__.py
ceball/tributary
5e30f90d1a5cc176c0f231f525d9dc5a81353925
[ "Apache-2.0" ]
null
null
null
tributary/streaming/calculations/__init__.py
ceball/tributary
5e30f90d1a5cc176c0f231f525d9dc5a81353925
[ "Apache-2.0" ]
null
null
null
from .ops import * # noqa: F401, F403 from .rolling import Count as RollingCount, Sum as RollingSum, Min as RollingMin, Max as RollingMax, Average as RollingAverage, SMA, EMA, Last as RollingLast, First as RollingFirst # noqa: F401
78
194
0.764957
34
234
5.264706
0.705882
0.089385
0
0
0
0
0
0
0
0
0
0.046154
0.166667
234
2
195
117
0.871795
0.115385
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
714cdc8c5cdb5bd57a2138aa6f1db8295979d420
79
py
Python
app/config/dev.py
Gerschtli/betting-game-backend
4fb38e5316ea6509b4468f8ca11de2f899366abb
[ "MIT" ]
null
null
null
app/config/dev.py
Gerschtli/betting-game-backend
4fb38e5316ea6509b4468f8ca11de2f899366abb
[ "MIT" ]
1
2021-11-04T16:42:26.000Z
2021-11-04T16:42:26.000Z
app/config/dev.py
Gerschtli/betting-game-backend
4fb38e5316ea6509b4468f8ca11de2f899366abb
[ "MIT" ]
null
null
null
SQLALCHEMY_DATABASE_URI = 'mysql://betting_game:testpw@localhost/betting_game'
39.5
78
0.848101
10
79
6.3
0.8
0.349206
0
0
0
0
0
0
0
0
0
0
0.037975
79
1
79
79
0.828947
0
0
0
0
0
0.632911
0.632911
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
7177ab4ae1d224a1132774ca583b38399e40cd4d
144
py
Python
Modulo-03/ex112/ex112.py
Matheus-Henrique-Burey/Curso-de-Python
448aebaab96527affa1e45897a662bb0407c11c6
[ "MIT" ]
null
null
null
Modulo-03/ex112/ex112.py
Matheus-Henrique-Burey/Curso-de-Python
448aebaab96527affa1e45897a662bb0407c11c6
[ "MIT" ]
null
null
null
Modulo-03/ex112/ex112.py
Matheus-Henrique-Burey/Curso-de-Python
448aebaab96527affa1e45897a662bb0407c11c6
[ "MIT" ]
null
null
null
from utilidadescev import moeda from utilidadescev import dados valor = dados.leia_dinheiro('Digite o valor: R$ ') moeda.resumo(valor, 30, 30)
24
50
0.777778
21
144
5.285714
0.619048
0.306306
0.414414
0
0
0
0
0
0
0
0
0.032
0.131944
144
5
51
28.8
0.856
0
0
0
0
0
0.131944
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
71a8cf2bd378e4b7b798d1cf80909a0a0b8ff0d4
39
py
Python
errors.py
dev-easyshares/mighty
a6cf473fb8cfbf5b92db68c7b068fc8ae2911b8b
[ "MIT" ]
null
null
null
errors.py
dev-easyshares/mighty
a6cf473fb8cfbf5b92db68c7b068fc8ae2911b8b
[ "MIT" ]
1
2022-03-12T00:57:37.000Z
2022-03-12T00:57:37.000Z
errors.py
dev-easyshares/mighty
a6cf473fb8cfbf5b92db68c7b068fc8ae2911b8b
[ "MIT" ]
null
null
null
class BackendError(Exception): pass
19.5
30
0.769231
4
39
7.5
1
0
0
0
0
0
0
0
0
0
0
0
0.153846
39
2
31
19.5
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
71b97885a3c513c814923366708ad5599910dda4
111
py
Python
dev.py
cafebabel/backlog
8792eb9a18b6f926bcd7b12b986dfa06e3341bdd
[ "MIT" ]
4
2017-10-10T14:08:43.000Z
2020-01-26T02:55:50.000Z
dev.py
cafebabel/backlog
8792eb9a18b6f926bcd7b12b986dfa06e3341bdd
[ "MIT" ]
409
2017-10-10T14:45:11.000Z
2018-06-28T10:28:06.000Z
dev.py
scopyleft/backlog
8792eb9a18b6f926bcd7b12b986dfa06e3341bdd
[ "MIT" ]
1
2017-11-09T11:10:54.000Z
2017-11-09T11:10:54.000Z
from cafebabel import create_app, register_cli app = create_app('config.DevelopmentConfig') register_cli(app)
22.2
46
0.828829
15
111
5.866667
0.6
0.204545
0.318182
0
0
0
0
0
0
0
0
0
0.09009
111
4
47
27.75
0.871287
0
0
0
0
0
0.216216
0.216216
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
71bcb9adeb45110e66776f9a4eb120a2a69fdb15
28
py
Python
JonSmells/python/test.py
vosslerbr/vscode-snippets
a3893e9a20eee062f49e7eeeee84fdcd0feff531
[ "MIT" ]
1
2021-08-21T20:30:42.000Z
2021-08-21T20:30:42.000Z
JonSmells/python/test.py
vosslerbr/vscode-snippets
a3893e9a20eee062f49e7eeeee84fdcd0feff531
[ "MIT" ]
null
null
null
JonSmells/python/test.py
vosslerbr/vscode-snippets
a3893e9a20eee062f49e7eeeee84fdcd0feff531
[ "MIT" ]
1
2021-08-21T20:30:44.000Z
2021-08-21T20:30:44.000Z
# ! TEST FILE, DO NOT DELETE
28
28
0.678571
5
28
3.8
1
0
0
0
0
0
0
0
0
0
0
0
0.214286
28
1
28
28
0.863636
0.928571
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
71d694a862e5c596032f65043d5adad47356c0a9
415
py
Python
moshmosh/__init__.py
prendradjaja/moshmosh
d252bb24201c7ade6eb059794916b28cb3e9043e
[ "MIT" ]
114
2019-07-12T19:00:20.000Z
2021-12-02T17:28:36.000Z
moshmosh/__init__.py
prendradjaja/moshmosh
d252bb24201c7ade6eb059794916b28cb3e9043e
[ "MIT" ]
19
2019-07-12T18:34:59.000Z
2022-01-01T03:37:03.000Z
moshmosh/__init__.py
prendradjaja/moshmosh
d252bb24201c7ade6eb059794916b28cb3e9043e
[ "MIT" ]
7
2019-07-14T23:15:44.000Z
2021-12-27T21:15:17.000Z
from .ast_compat import ast from .extension_register import * from .extensions import template_python from .extensions import lazy_import import warnings with warnings.catch_warnings(): warnings.simplefilter("ignore", category=SyntaxWarning) from .extensions import pattern_matching from .extensions import scoped_operators from .extensions import pipelines from .extensions import quick_lambdas
34.583333
60
0.814458
49
415
6.734694
0.489796
0.254545
0.363636
0
0
0
0
0
0
0
0
0
0.137349
415
11
61
37.727273
0.921788
0
0
0
0
0
0.014851
0
0
0
0
0
0
1
0
true
0
0.818182
0
0.818182
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
71dd5f9b44f9c55e8f61b0b7036ccee68f94d57a
56
py
Python
imagepy/widgets/histogram/histogram_wgt.py
Pad0y/imagepy
23f41b64ade02f94b566b0d23a4b6459c1a1578d
[ "BSD-4-Clause" ]
null
null
null
imagepy/widgets/histogram/histogram_wgt.py
Pad0y/imagepy
23f41b64ade02f94b566b0d23a4b6459c1a1578d
[ "BSD-4-Clause" ]
null
null
null
imagepy/widgets/histogram/histogram_wgt.py
Pad0y/imagepy
23f41b64ade02f94b566b0d23a4b6459c1a1578d
[ "BSD-4-Clause" ]
null
null
null
from sciwx.plugins.histogram import Histogram as Plugin
28
55
0.857143
8
56
6
0.875
0
0
0
0
0
0
0
0
0
0
0
0.107143
56
1
56
56
0.96
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
e0e666b16f856d6d9f53ba6bdc096a445137a8b5
264
py
Python
status/admin.py
AhmedElmawary/erp
c998787c62194e26e10e3cbc61e35935e901e56d
[ "MIT" ]
null
null
null
status/admin.py
AhmedElmawary/erp
c998787c62194e26e10e3cbc61e35935e901e56d
[ "MIT" ]
null
null
null
status/admin.py
AhmedElmawary/erp
c998787c62194e26e10e3cbc61e35935e901e56d
[ "MIT" ]
null
null
null
from django.contrib import admin from django.utils.translation import ugettext_lazy as _ from .models import (Status, Option, Variation, Type) # admin.site.register(Status) # admin.site.register(Option) # admin.site.register(Variation) # admin.site.register(Type)
33
55
0.795455
36
264
5.777778
0.472222
0.173077
0.326923
0
0
0
0
0
0
0
0
0
0.094697
264
8
56
33
0.870293
0.424242
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
1cb0749e3ba83140a9af740f04b076208db0d6a6
2,553
py
Python
FlatbufSignatureLibrary/Pattern.py
dfraze/sigkit
1f8e4c5849aa09e216d04d2e2e2b2fe493567fe0
[ "MIT" ]
30
2020-03-11T21:25:37.000Z
2021-10-13T20:47:14.000Z
FlatbufSignatureLibrary/Pattern.py
dfraze/sigkit
1f8e4c5849aa09e216d04d2e2e2b2fe493567fe0
[ "MIT" ]
8
2020-02-28T00:46:53.000Z
2022-03-16T02:44:50.000Z
FlatbufSignatureLibrary/Pattern.py
dfraze/sigkit
1f8e4c5849aa09e216d04d2e2e2b2fe493567fe0
[ "MIT" ]
3
2020-08-10T21:17:06.000Z
2022-02-01T15:16:13.000Z
# automatically generated by the FlatBuffers compiler, do not modify # namespace: FlatbufSignatureLibrary import flatbuffers class Pattern(object): __slots__ = ['_tab'] @classmethod def GetRootAsPattern(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Pattern() x.Init(buf, n + offset) return x # Pattern def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # Pattern def Data(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: a = self._tab.Vector(o) return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) return 0 # Pattern def DataAsNumpy(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) return 0 # Pattern def DataLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.VectorLen(o) return 0 # Pattern def Mask(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: a = self._tab.Vector(o) return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) return 0 # Pattern def MaskAsNumpy(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) return 0 # Pattern def MaskLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.VectorLen(o) return 0 def PatternStart(builder): builder.StartObject(2) def PatternAddData(builder, data): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0) def PatternStartDataVector(builder, numElems): return builder.StartVector(1, numElems, 1) def PatternAddMask(builder, mask): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(mask), 0) def PatternStartMaskVector(builder, numElems): return builder.StartVector(1, numElems, 1) def PatternEnd(builder): return builder.EndObject()
35.957746
130
0.678026
311
2,553
5.424437
0.22508
0.062241
0.182573
0.207469
0.621221
0.615293
0.566686
0.566686
0.566686
0.505039
0
0.016492
0.216216
2,553
70
131
36.471429
0.826587
0.061496
0
0.530612
1
0
0.001676
0
0
0
0
0
0
1
0.285714
false
0
0.020408
0.061224
0.612245
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
1cd1fd0b49623e51375cb9915fb07cc4d7b95b08
60
py
Python
tests/set_path.py
Systemac/discord_astro_bot
5164503b70aed800f940cd84df434d47095e25a9
[ "MIT" ]
3
2019-05-19T02:59:46.000Z
2020-12-12T18:17:24.000Z
tests/set_path.py
Systemac/discord_astro_bot
5164503b70aed800f940cd84df434d47095e25a9
[ "MIT" ]
1
2020-07-13T19:31:25.000Z
2020-07-13T22:02:58.000Z
tests/set_path.py
Systemac/discord_astro_bot
5164503b70aed800f940cd84df434d47095e25a9
[ "MIT" ]
2
2019-10-29T09:10:52.000Z
2022-01-29T17:54:35.000Z
import sys sys.path.append('dastro_bot/_default_settings')
15
47
0.816667
9
60
5.111111
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.066667
60
3
48
20
0.821429
0
0
0
0
0
0.466667
0.466667
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
1cd7e39169bdc32d3364b5564614cb2f144d7b50
266
py
Python
course-2:combining-building-blocks/subject-2:functions/topic-5:Higher-Order Functions/lesson-2:Functions as arguments.py
regnart-tech-club/python
069df070059de662d4104de8192e45407a7e94ce
[ "Apache-2.0" ]
null
null
null
course-2:combining-building-blocks/subject-2:functions/topic-5:Higher-Order Functions/lesson-2:Functions as arguments.py
regnart-tech-club/python
069df070059de662d4104de8192e45407a7e94ce
[ "Apache-2.0" ]
null
null
null
course-2:combining-building-blocks/subject-2:functions/topic-5:Higher-Order Functions/lesson-2:Functions as arguments.py
regnart-tech-club/python
069df070059de662d4104de8192e45407a7e94ce
[ "Apache-2.0" ]
1
2016-04-03T00:53:37.000Z
2016-04-03T00:53:37.000Z
def do_stuff(fn, lhs, rhs): return fn(lhs, rhs) def add(lhs, rhs): return lhs + rhs def multiply(lhs, rhs): return lhs * rhs def exponent(lhs, rhs): return lhs ** rhs print(do_stuff(add, 2, 3)) print(do_stuff(multiply, 2, 3)) print(do_stuff(exponent, 2, 3))
16.625
31
0.672932
49
266
3.571429
0.265306
0.274286
0.274286
0.257143
0.502857
0.24
0
0
0
0
0
0.027149
0.169173
266
15
32
17.733333
0.764706
0
0
0
0
0
0
0
0
0
0
0
0
1
0.363636
false
0
0
0.363636
0.727273
0.272727
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
5
08fe8ca56511b4058c753af76243d8290f2bfa19
53
py
Python
mne/datasets/hf_sef/__init__.py
fmamashli/mne-python
52f064415e7c9fa8fe243d22108dcdf3d86505b9
[ "BSD-3-Clause" ]
1,953
2015-01-17T20:33:46.000Z
2022-03-30T04:36:34.000Z
mne/datasets/hf_sef/__init__.py
fmamashli/mne-python
52f064415e7c9fa8fe243d22108dcdf3d86505b9
[ "BSD-3-Clause" ]
8,490
2015-01-01T13:04:18.000Z
2022-03-31T23:02:08.000Z
mne/datasets/hf_sef/__init__.py
fmamashli/mne-python
52f064415e7c9fa8fe243d22108dcdf3d86505b9
[ "BSD-3-Clause" ]
1,130
2015-01-08T22:39:27.000Z
2022-03-30T21:44:26.000Z
"""HF-SEF dataset.""" from .hf_sef import data_path
13.25
29
0.698113
9
53
3.888889
0.777778
0.285714
0
0
0
0
0
0
0
0
0
0
0.132075
53
3
30
17.666667
0.76087
0.283019
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
1c083ee8e064fe4ba0b5540528998288f160d2c1
271
py
Python
ambra_sdk/service/entrypoints/qctask.py
dicomgrid/sdk-python
bb12eed311bad73dfb863917df4dc5cbcd91a447
[ "Apache-2.0" ]
9
2020-04-20T23:45:44.000Z
2021-04-18T11:22:17.000Z
ambra_sdk/service/entrypoints/qctask.py
dicomgrid/sdk-python
bb12eed311bad73dfb863917df4dc5cbcd91a447
[ "Apache-2.0" ]
13
2020-02-08T16:15:05.000Z
2021-09-13T22:55:28.000Z
ambra_sdk/service/entrypoints/qctask.py
dicomgrid/sdk-python
bb12eed311bad73dfb863917df4dc5cbcd91a447
[ "Apache-2.0" ]
6
2020-03-25T17:47:45.000Z
2021-04-18T11:22:19.000Z
from ambra_sdk.service.entrypoints.generated.qctask import \ AsyncQctask as GAsyncQctask from ambra_sdk.service.entrypoints.generated.qctask import Qctask as GQctask class Qctask(GQctask): """Qctask.""" class AsyncQctask(GAsyncQctask): """AsyncQctask."""
22.583333
76
0.763838
30
271
6.833333
0.433333
0.087805
0.117073
0.185366
0.497561
0.497561
0.497561
0.497561
0
0
0
0
0.129151
271
11
77
24.636364
0.868644
0.073801
0
0
1
0
0
0
0
0
0
0
0
1
0
true
0
0.4
0
0.8
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
1c0e817fc0ee1fd35c76cb6fa2a1819c108380ba
35
py
Python
stack/__init__.py
carlaTV/annotation_postprocessor
86d59993cd02720fa07eba8c52356a0fa5855959
[ "MIT" ]
null
null
null
stack/__init__.py
carlaTV/annotation_postprocessor
86d59993cd02720fa07eba8c52356a0fa5855959
[ "MIT" ]
null
null
null
stack/__init__.py
carlaTV/annotation_postprocessor
86d59993cd02720fa07eba8c52356a0fa5855959
[ "MIT" ]
null
null
null
from .stack import Stack del stack
11.666667
24
0.8
6
35
4.666667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.171429
35
2
25
17.5
0.965517
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
1c10803a76e37de0b2d0a60ee79145b9543b0dce
146
py
Python
qsy/__init__.py
soudy/qsy
928d40d735f76fe81f39e90f7cb3c6154d366690
[ "MIT" ]
26
2019-05-02T13:15:09.000Z
2022-01-21T23:35:46.000Z
qsy/__init__.py
soudy/qsy
928d40d735f76fe81f39e90f7cb3c6154d366690
[ "MIT" ]
1
2019-05-02T13:46:15.000Z
2019-05-02T22:30:51.000Z
qsy/__init__.py
soudy/qsy
928d40d735f76fe81f39e90f7cb3c6154d366690
[ "MIT" ]
1
2019-05-26T03:43:52.000Z
2019-05-26T03:43:52.000Z
from .quantum_register import QuantumRegister from .classical_register import ClassicalRegister import qsy.gates as gates __version__ = '0.4.4'
20.857143
49
0.828767
19
146
6.052632
0.684211
0.243478
0
0
0
0
0
0
0
0
0
0.023256
0.116438
146
6
50
24.333333
0.868217
0
0
0
0
0
0.034247
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
1c1b8cbbb9911756c0be8813675035cd093be12a
64
py
Python
knx_stack/decode/knxnet_ip/core/connectionstate/__init__.py
majamassarini/knx-stack
11a9baac6b7600649b5fbca43c93b200b23676b4
[ "MIT" ]
2
2021-07-28T07:42:28.000Z
2022-01-25T18:56:05.000Z
knx_stack/decode/knxnet_ip/core/connectionstate/__init__.py
majamassarini/knx-stack
11a9baac6b7600649b5fbca43c93b200b23676b4
[ "MIT" ]
6
2021-07-25T21:36:01.000Z
2022-02-20T21:11:31.000Z
knx_stack/decode/knxnet_ip/core/connectionstate/__init__.py
majamassarini/knx-stack
11a9baac6b7600649b5fbca43c93b200b23676b4
[ "MIT" ]
null
null
null
from knx_stack.decode.knxnet_ip.core.connectionstate import res
32
63
0.875
10
64
5.4
1
0
0
0
0
0
0
0
0
0
0
0
0.0625
64
1
64
64
0.9
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
1c2dfd57a1efc1a0c1debe4d4a7acaa87383ef5d
61
py
Python
auth_main/__init__.py
ajskrilla/PAS_pw_check
056b09e2975b7e1d00c81180d4bdd71bfac91b4d
[ "Apache-2.0" ]
null
null
null
auth_main/__init__.py
ajskrilla/PAS_pw_check
056b09e2975b7e1d00c81180d4bdd71bfac91b4d
[ "Apache-2.0" ]
null
null
null
auth_main/__init__.py
ajskrilla/PAS_pw_check
056b09e2975b7e1d00c81180d4bdd71bfac91b4d
[ "Apache-2.0" ]
null
null
null
#from auth import saveConfig #from auth_check import sec_test
30.5
32
0.852459
10
61
5
0.7
0.32
0
0
0
0
0
0
0
0
0
0
0.114754
61
2
32
30.5
0.925926
0.95082
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
98e19749b8d68e7877342a0c1770df0a7cb14482
112
py
Python
RPiserver/scripts/alarm01.py
Chathura-Rathnayake/SoteriaX-RaspberryPi
292797a40c0ef3593bd2b3ea200d5664b5d2c3c6
[ "MIT" ]
null
null
null
RPiserver/scripts/alarm01.py
Chathura-Rathnayake/SoteriaX-RaspberryPi
292797a40c0ef3593bd2b3ea200d5664b5d2c3c6
[ "MIT" ]
null
null
null
RPiserver/scripts/alarm01.py
Chathura-Rathnayake/SoteriaX-RaspberryPi
292797a40c0ef3593bd2b3ea200d5664b5d2c3c6
[ "MIT" ]
null
null
null
import os os.system('mpg321 /home/pi/RPiserver/alarms/1.mp3') print("Alarm01 played successfully") # play sound
22.4
51
0.767857
17
112
5.058824
0.941176
0
0
0
0
0
0
0
0
0
0
0.068627
0.089286
112
4
52
28
0.77451
0.089286
0
0
0
0
0.65
0.31
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
98f5f59c22db6900ada62abb757f90b99148798a
231
py
Python
project/apps/portfolio/apps.py
mahdimehrabi/django-portfolio-app
987bbfe6dce151f1b32e69ee833b71db636e933f
[ "MIT" ]
4
2021-08-11T15:23:32.000Z
2021-12-31T02:55:33.000Z
project/apps/portfolio/apps.py
mahdimehrabi/django-portfolio-app
987bbfe6dce151f1b32e69ee833b71db636e933f
[ "MIT" ]
null
null
null
project/apps/portfolio/apps.py
mahdimehrabi/django-portfolio-app
987bbfe6dce151f1b32e69ee833b71db636e933f
[ "MIT" ]
null
null
null
from django.apps import AppConfig class PortfolioConfig(AppConfig): default_auto_field = 'django.db.models.BigAutoField' name = 'project.apps.portfolio' def ready(self): import project.apps.portfolio.signals
23.1
56
0.74026
27
231
6.259259
0.740741
0.130178
0.236686
0
0
0
0
0
0
0
0
0
0.168831
231
9
57
25.666667
0.880208
0
0
0
0
0
0.220779
0.220779
0
0
0
0
0
1
0.166667
false
0
0.333333
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
98f8a847d70749f3e847d5357e6652d6fc12b9cb
56
py
Python
boa3_test/test_sc/built_in_methods_test/CountStrTooFewArguments.py
hal0x2328/neo3-boa
6825a3533384cb01660773050719402a9703065b
[ "Apache-2.0" ]
25
2020-07-22T19:37:43.000Z
2022-03-08T03:23:55.000Z
boa3_test/test_sc/built_in_methods_test/CountStrTooFewArguments.py
hal0x2328/neo3-boa
6825a3533384cb01660773050719402a9703065b
[ "Apache-2.0" ]
419
2020-04-23T17:48:14.000Z
2022-03-31T13:17:45.000Z
boa3_test/test_sc/built_in_methods_test/CountStrTooFewArguments.py
hal0x2328/neo3-boa
6825a3533384cb01660773050719402a9703065b
[ "Apache-2.0" ]
15
2020-05-21T21:54:24.000Z
2021-11-18T06:17:24.000Z
def main(string: str) -> int: return string.count()
18.666667
29
0.642857
8
56
4.5
0.875
0
0
0
0
0
0
0
0
0
0
0
0.196429
56
2
30
28
0.8
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
c708e14c9e4a5384e1536813a2d452e4368764ad
5,588
py
Python
resources/dot_PyCharm/system/python_stubs/-762174762/PySide/QtGui/QVector3D.py
basepipe/developer_onboarding
05b6a776f8974c89517868131b201f11c6c2a5ad
[ "MIT" ]
1
2020-04-20T02:27:20.000Z
2020-04-20T02:27:20.000Z
resources/dot_PyCharm/system/python_stubs/cache/8cdc475d469a13122bc4bc6c3ac1c215d93d5f120f5cc1ef33a8f3088ee54d8e/PySide/QtGui/QVector3D.py
basepipe/developer_onboarding
05b6a776f8974c89517868131b201f11c6c2a5ad
[ "MIT" ]
null
null
null
resources/dot_PyCharm/system/python_stubs/cache/8cdc475d469a13122bc4bc6c3ac1c215d93d5f120f5cc1ef33a8f3088ee54d8e/PySide/QtGui/QVector3D.py
basepipe/developer_onboarding
05b6a776f8974c89517868131b201f11c6c2a5ad
[ "MIT" ]
null
null
null
# encoding: utf-8 # module PySide.QtGui # from C:\Python27\lib\site-packages\PySide\QtGui.pyd # by generator 1.147 # no doc # imports import PySide.QtCore as __PySide_QtCore import Shiboken as __Shiboken class QVector3D(__Shiboken.Object): # no doc def crossProduct(self, *args, **kwargs): # real signature unknown pass def distanceToLine(self, *args, **kwargs): # real signature unknown pass def distanceToPlane(self, *args, **kwargs): # real signature unknown pass def dotProduct(self, *args, **kwargs): # real signature unknown pass def isNull(self, *args, **kwargs): # real signature unknown pass def length(self, *args, **kwargs): # real signature unknown pass def lengthSquared(self, *args, **kwargs): # real signature unknown pass def normal(self, *args, **kwargs): # real signature unknown pass def normalize(self, *args, **kwargs): # real signature unknown pass def normalized(self, *args, **kwargs): # real signature unknown pass def setX(self, *args, **kwargs): # real signature unknown pass def setY(self, *args, **kwargs): # real signature unknown pass def setZ(self, *args, **kwargs): # real signature unknown pass def toPoint(self, *args, **kwargs): # real signature unknown pass def toPointF(self, *args, **kwargs): # real signature unknown pass def toTuple(self, *args, **kwargs): # real signature unknown pass def toVector2D(self, *args, **kwargs): # real signature unknown pass def toVector4D(self, *args, **kwargs): # real signature unknown pass def x(self, *args, **kwargs): # real signature unknown pass def y(self, *args, **kwargs): # real signature unknown pass def z(self, *args, **kwargs): # real signature unknown pass def __add__(self, y): # real signature unknown; restored from __doc__ """ x.__add__(y) <==> x+y """ pass def __copy__(self, *args, **kwargs): # real signature unknown pass def __div__(self, y): # real signature unknown; restored from __doc__ """ x.__div__(y) <==> x/y """ pass def __eq__(self, y): # real signature unknown; restored from __doc__ """ x.__eq__(y) <==> x==y """ pass def __ge__(self, y): # real signature unknown; restored from __doc__ """ x.__ge__(y) <==> x>=y """ pass def __gt__(self, y): # real signature unknown; restored from __doc__ """ x.__gt__(y) <==> x>y """ pass def __iadd__(self, y): # real signature unknown; restored from __doc__ """ x.__iadd__(y) <==> x+=y """ pass def __init__(self, *args, **kwargs): # real signature unknown pass def __isub__(self, y): # real signature unknown; restored from __doc__ """ x.__isub__(y) <==> x-=y """ pass def __le__(self, y): # real signature unknown; restored from __doc__ """ x.__le__(y) <==> x<=y """ pass def __lshift__(self, y): # real signature unknown; restored from __doc__ """ x.__lshift__(y) <==> x<<y """ pass def __lt__(self, y): # real signature unknown; restored from __doc__ """ x.__lt__(y) <==> x<y """ pass def __mul__(self, y): # real signature unknown; restored from __doc__ """ x.__mul__(y) <==> x*y """ pass def __neg__(self): # real signature unknown; restored from __doc__ """ x.__neg__() <==> -x """ pass @staticmethod # known case of __new__ def __new__(S, *more): # real signature unknown; restored from __doc__ """ T.__new__(S, ...) -> a new object with type S, a subtype of T """ pass def __ne__(self, y): # real signature unknown; restored from __doc__ """ x.__ne__(y) <==> x!=y """ pass def __nonzero__(self): # real signature unknown; restored from __doc__ """ x.__nonzero__() <==> x != 0 """ pass def __radd__(self, y): # real signature unknown; restored from __doc__ """ x.__radd__(y) <==> y+x """ pass def __rdiv__(self, y): # real signature unknown; restored from __doc__ """ x.__rdiv__(y) <==> y/x """ pass def __reduce__(self, *args, **kwargs): # real signature unknown pass def __repr__(self): # real signature unknown; restored from __doc__ """ x.__repr__() <==> repr(x) """ pass def __rlshift__(self, y): # real signature unknown; restored from __doc__ """ x.__rlshift__(y) <==> y<<x """ pass def __rmul__(self, y): # real signature unknown; restored from __doc__ """ x.__rmul__(y) <==> y*x """ pass def __rrshift__(self, y): # real signature unknown; restored from __doc__ """ x.__rrshift__(y) <==> y>>x """ pass def __rshift__(self, y): # real signature unknown; restored from __doc__ """ x.__rshift__(y) <==> x>>y """ pass def __rsub__(self, y): # real signature unknown; restored from __doc__ """ x.__rsub__(y) <==> y-x """ pass def __rtruediv__(self, y): # real signature unknown; restored from __doc__ """ x.__rtruediv__(y) <==> y/x """ pass def __sub__(self, y): # real signature unknown; restored from __doc__ """ x.__sub__(y) <==> x-y """ pass def __truediv__(self, y): # real signature unknown; restored from __doc__ """ x.__truediv__(y) <==> x/y """ pass
29.104167
78
0.584467
676
5,588
4.335799
0.14497
0.221767
0.34118
0.248379
0.767997
0.696349
0.684408
0.684408
0.307745
0
0
0.002723
0.277022
5,588
191
79
29.256545
0.722772
0.461346
0
0.480769
0
0
0
0
0
0
0
0
0
1
0.480769
false
0.480769
0.019231
0
0.509615
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
5
c738ef5662c09e134b13ea7012e1f43eea8f89c7
199
py
Python
tests/io/test_permissions.py
allenmichael/pyTenable
8372cfdf3ced99de50227f6fbb37d6db2b26291e
[ "MIT" ]
1
2022-03-01T17:17:19.000Z
2022-03-01T17:17:19.000Z
tests/io/test_permissions.py
allenmichael/pyTenable
8372cfdf3ced99de50227f6fbb37d6db2b26291e
[ "MIT" ]
25
2021-11-16T18:41:36.000Z
2022-03-25T05:43:31.000Z
tests/io/test_permissions.py
allenmichael/pyTenable
8372cfdf3ced99de50227f6fbb37d6db2b26291e
[ "MIT" ]
2
2022-03-02T12:24:40.000Z
2022-03-29T05:12:04.000Z
from tenable.errors import * from ..checker import check, single import pytest ### ### The permissions module is leveraged exclusively by the ### scanners module. all tests are performed there. ###
24.875
58
0.748744
26
199
5.730769
0.807692
0
0
0
0
0
0
0
0
0
0
0
0.155779
199
8
59
24.875
0.886905
0.517588
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5