hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
168814e706f0f076004da9a77e1b4e86694c3054
100
py
Python
beginner/1143.py
JoaoMisutaniAlves/URI_problems
18bdbec5f3c3e315c035fc0d89c41722098e3f79
[ "MIT" ]
null
null
null
beginner/1143.py
JoaoMisutaniAlves/URI_problems
18bdbec5f3c3e315c035fc0d89c41722098e3f79
[ "MIT" ]
null
null
null
beginner/1143.py
JoaoMisutaniAlves/URI_problems
18bdbec5f3c3e315c035fc0d89c41722098e3f79
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- for I in range(int(input())): print("%i %i %i"%(I+1,(I+1)**2,(I+1)**3))
25
45
0.45
21
100
2.142857
0.619048
0.133333
0.133333
0
0
0
0
0
0
0
0
0.071429
0.16
100
4
45
25
0.464286
0.21
0
0
0
0
0.102564
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
169c742cc546554c008ccd3ebb7fbbd27eb2a786
207
py
Python
tests/constants.py
m-mead/pip-tools
37ce9e36d6033ede0667a1b293cd16843a85be4d
[ "BSD-3-Clause" ]
4,085
2017-02-17T08:51:25.000Z
2022-03-31T22:44:12.000Z
tests/constants.py
m-mead/pip-tools
37ce9e36d6033ede0667a1b293cd16843a85be4d
[ "BSD-3-Clause" ]
1,173
2017-02-17T16:50:44.000Z
2022-03-31T20:14:19.000Z
tests/constants.py
astrojuanlu/pip-tools
4776ac99fb8a396f6d2ed1a4370fd3b2e6875940
[ "BSD-3-Clause" ]
351
2017-02-17T17:33:08.000Z
2022-03-30T13:33:38.000Z
import os TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), "test_data") MINIMAL_WHEELS_PATH = os.path.join(TEST_DATA_PATH, "minimal_wheels") PACKAGES_PATH = os.path.join(TEST_DATA_PATH, "packages")
34.5
69
0.792271
34
207
4.382353
0.323529
0.214765
0.241611
0.281879
0.348993
0.348993
0.348993
0
0
0
0
0
0.072464
207
5
70
41.4
0.776042
0
0
0
0
0
0.149758
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
16cb6fc3f61558cdc27dc4686a94b9e74a947c60
198
py
Python
src/phd_qmclib/mrbp_qmc/dmc_exec/__init__.py
oarodriguez/PhD-QMCLib
c3d0197a92f5c6954839f7e2689b725f72448988
[ "BSD-3-Clause" ]
null
null
null
src/phd_qmclib/mrbp_qmc/dmc_exec/__init__.py
oarodriguez/PhD-QMCLib
c3d0197a92f5c6954839f7e2689b725f72448988
[ "BSD-3-Clause" ]
null
null
null
src/phd_qmclib/mrbp_qmc/dmc_exec/__init__.py
oarodriguez/PhD-QMCLib
c3d0197a92f5c6954839f7e2689b725f72448988
[ "BSD-3-Clause" ]
null
null
null
""" """ # from . import config # from .cli_app import AppSpec, CLIApp # from .io import HDF5FileHandler # from .proc import ( DensityEstSpec, ModelSysConfSpec, Proc, ProcInput, SSFEstSpec )
11.647059
65
0.707071
21
198
6.619048
0.666667
0
0
0
0
0
0
0
0
0
0
0.006211
0.186869
198
16
66
12.375
0.857143
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
16d0a1986ded6e0637211aff05bcb8f980e6327c
372
py
Python
flanautils/__init__.py
AlberLC/flanautils
e7fe5ca5b941cb680ade9714c311de56bf81c2de
[ "MIT" ]
null
null
null
flanautils/__init__.py
AlberLC/flanautils
e7fe5ca5b941cb680ade9714c311de56bf81c2de
[ "MIT" ]
null
null
null
flanautils/__init__.py
AlberLC/flanautils
e7fe5ca5b941cb680ade9714c311de56bf81c2de
[ "MIT" ]
null
null
null
from flanautils.asyncs import * from flanautils.constants import * from flanautils.data_structures import * from flanautils.decorators import * from flanautils.exceptions import * from flanautils.iterables import * from flanautils.maths import * from flanautils.medias import * from flanautils.models import * from flanautils.oss import * from flanautils.strings import *
31
40
0.822581
45
372
6.777778
0.333333
0.504918
0.655738
0
0
0
0
0
0
0
0
0
0.11828
372
11
41
33.818182
0.929878
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
16d5c30d18b4541b8a0a965b8205ad06a0674b2c
291
py
Python
virtual/lib/python3.6/site-packages/pylint/test/extensions/data/overlapping_exceptions_py33.py
drewheathens/The-Moringa-Tribune
98ee4d63c9df6f1f7497fc6876960a822d914500
[ "MIT" ]
463
2015-01-15T08:17:42.000Z
2022-03-28T15:10:20.000Z
virtual/lib/python3.6/site-packages/pylint/test/extensions/data/overlapping_exceptions_py33.py
drewheathens/The-Moringa-Tribune
98ee4d63c9df6f1f7497fc6876960a822d914500
[ "MIT" ]
52
2015-01-06T02:43:59.000Z
2022-03-14T11:15:21.000Z
virtual/lib/python3.6/site-packages/pylint/test/extensions/data/overlapping_exceptions_py33.py
drewheathens/The-Moringa-Tribune
98ee4d63c9df6f1f7497fc6876960a822d914500
[ "MIT" ]
249
2015-01-07T22:49:49.000Z
2022-03-18T02:32:06.000Z
# pylint: disable=missing-docstring import socket try: pass except (IOError, OSError): # [overlapping-except] pass try: pass except (socket.error, OSError): # [overlapping-except] pass try: pass except (ConnectionError, socket.error): # [overlapping-except] pass
15.315789
62
0.690722
32
291
6.28125
0.4375
0.104478
0.19403
0.278607
0.40796
0.40796
0.40796
0
0
0
0
0
0.195876
291
18
63
16.166667
0.858974
0.329897
0
0.692308
0
0
0
0
0
0
0
0
0
1
0
true
0.461538
0.076923
0
0.076923
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
4
16de1c8197cb102a9f41a5aa3d264c224d907912
276
py
Python
slixmpp/plugins/xep_0437/__init__.py
anirudhrata/slixmpp
1fcee0e80a212eeb274d2f560e69099d8a61bf7f
[ "BSD-3-Clause" ]
86
2016-07-04T13:26:02.000Z
2022-02-19T10:26:21.000Z
slixmpp/plugins/xep_0437/__init__.py
anirudhrata/slixmpp
1fcee0e80a212eeb274d2f560e69099d8a61bf7f
[ "BSD-3-Clause" ]
10
2016-09-30T18:55:41.000Z
2020-05-01T14:22:47.000Z
slixmpp/plugins/xep_0437/__init__.py
anirudhrata/slixmpp
1fcee0e80a212eeb274d2f560e69099d8a61bf7f
[ "BSD-3-Clause" ]
45
2016-09-30T18:48:41.000Z
2022-03-18T21:39:33.000Z
# Slixmpp: The Slick XMPP Library # Copyright (C) 2020 Mathieu Pasquet # This file is part of Slixmpp. # See the file LICENSE for copying permission. from slixmpp.plugins.base import register_plugin from slixmpp.plugins.xep_0437.rai import XEP_0437 register_plugin(XEP_0437)
30.666667
49
0.807971
43
276
5.069767
0.674419
0.09633
0.165138
0
0
0
0
0
0
0
0
0.066946
0.134058
276
8
50
34.5
0.845188
0.51087
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
16e8009042e7975d1d3f6dd8e52657df59b4c263
198
py
Python
JOSS/make.py
usnistgov/chebby
75dbccfd9a029e91cbfdfd263befc51b893822ea
[ "MIT" ]
27
2017-03-23T19:09:10.000Z
2022-01-09T09:00:40.000Z
JOSS/make.py
usnistgov/chebby
75dbccfd9a029e91cbfdfd263befc51b893822ea
[ "MIT" ]
11
2017-10-07T23:44:53.000Z
2021-09-01T12:33:56.000Z
JOSS/make.py
usnistgov/chebby
75dbccfd9a029e91cbfdfd263befc51b893822ea
[ "MIT" ]
9
2016-12-11T04:24:36.000Z
2020-08-09T04:35:59.000Z
import subprocess, sys subprocess.check_call('pandoc --filter pandoc-citeproc --bibliography paper.bib paper.md -o paper.pdf', shell=True,stdout=sys.stdout,stderr=sys.stderr)
49.5
104
0.707071
26
198
5.346154
0.692308
0
0
0
0
0
0
0
0
0
0
0
0.176768
198
4
105
49.5
0.852761
0
0
0
0
0
0.39899
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
bc579599d0d22fc28327a1b8e19b46d32e17711c
56
py
Python
flask_app/__init__.py
razzlestorm/quake-ds-pt9
82a92c7c34838ff5f9960a12db1c627ce44f9e7d
[ "MIT" ]
null
null
null
flask_app/__init__.py
razzlestorm/quake-ds-pt9
82a92c7c34838ff5f9960a12db1c627ce44f9e7d
[ "MIT" ]
null
null
null
flask_app/__init__.py
razzlestorm/quake-ds-pt9
82a92c7c34838ff5f9960a12db1c627ce44f9e7d
[ "MIT" ]
null
null
null
from application import create_app APP = create_app()
11.2
34
0.785714
8
56
5.25
0.625
0.428571
0
0
0
0
0
0
0
0
0
0
0.160714
56
4
35
14
0.893617
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
bc8b03a4bba6660e117ca1372889bc05fc549198
156
py
Python
python/src/base/class_test/__init__.py
weiwei02/Technical--Documentation
d53d702b17cbeb9e4940764c6e4a4277382ec0cf
[ "Apache-2.0" ]
2
2017-06-25T13:30:40.000Z
2017-09-18T16:50:40.000Z
python/src/base/class_test/__init__.py
weiwei02/Technical--Documentation
d53d702b17cbeb9e4940764c6e4a4277382ec0cf
[ "Apache-2.0" ]
null
null
null
python/src/base/class_test/__init__.py
weiwei02/Technical--Documentation
d53d702b17cbeb9e4940764c6e4a4277382ec0cf
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 """ :author Wang Weiwei <email>weiwei02@vip.qq.com / weiwei.wang@100credit.com</email> :sine 2017/8/11 :version 1.0 """
22.285714
87
0.641026
24
156
4.166667
0.833333
0
0
0
0
0
0
0
0
0
0
0.116279
0.173077
156
7
88
22.285714
0.658915
0.858974
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
4c134d2a69700fcaf1ee4fca6fcc8ada465cb11c
147
py
Python
kii/results/startupload.py
ta2xeo/python3-kii
892da42601318bcc15e70378614be76d68681881
[ "MIT" ]
2
2018-02-04T21:16:02.000Z
2021-12-01T16:51:43.000Z
kii/results/startupload.py
ta2xeo/python3-kii
892da42601318bcc15e70378614be76d68681881
[ "MIT" ]
null
null
null
kii/results/startupload.py
ta2xeo/python3-kii
892da42601318bcc15e70378614be76d68681881
[ "MIT" ]
null
null
null
from .base import BaseResult class StartUploadResult(BaseResult): @property def upload_id(self): return self._result['uploadID']
18.375
39
0.714286
16
147
6.4375
0.875
0
0
0
0
0
0
0
0
0
0
0
0.197279
147
7
40
21
0.872881
0
0
0
0
0
0.054422
0
0
0
0
0
0
1
0.2
false
0
0.2
0.2
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
4
4c21b287417c95ba3e6214c7979678e5d558c8e9
89
py
Python
ytcookiecutter/custom_types.py
chrishavlin/dxlcookiecuttertest
b297760506d65e42f546a2051c3b8d2f1e7167b7
[ "BSD-3-Clause" ]
null
null
null
ytcookiecutter/custom_types.py
chrishavlin/dxlcookiecuttertest
b297760506d65e42f546a2051c3b8d2f1e7167b7
[ "BSD-3-Clause" ]
1
2022-03-23T23:22:54.000Z
2022-03-23T23:22:54.000Z
ytcookiecutter/custom_types.py
chrishavlin/dxlcookiecuttertest
b297760506d65e42f546a2051c3b8d2f1e7167b7
[ "BSD-3-Clause" ]
1
2021-10-20T19:37:13.000Z
2021-10-20T19:37:13.000Z
from typing import Union from pathlib import PosixPath filelike = Union[str, PosixPath]
17.8
32
0.808989
12
89
6
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.146067
89
4
33
22.25
0.947368
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
4c284292771b46c8d563c378a6d3819655fa54c4
90
py
Python
tests/test_data/testpkgs/pkg1/sub/__main__.py
r3m0t/debugpy
090e3c3ef5758e5b316514c9d6f44f9b9b488cf1
[ "MIT" ]
695
2020-01-30T14:34:51.000Z
2022-03-31T09:31:57.000Z
tests/test_data/testpkgs/pkg1/sub/__main__.py
r3m0t/debugpy
090e3c3ef5758e5b316514c9d6f44f9b9b488cf1
[ "MIT" ]
845
2020-01-29T23:53:36.000Z
2022-03-31T19:45:04.000Z
tests/test_data/testpkgs/pkg1/sub/__main__.py
r3m0t/debugpy
090e3c3ef5758e5b316514c9d6f44f9b9b488cf1
[ "MIT" ]
66
2020-01-30T13:10:38.000Z
2022-03-29T07:11:17.000Z
import debuggee from debuggee import backchannel debuggee.setup() backchannel.send("ok")
15
32
0.811111
11
90
6.636364
0.636364
0
0
0
0
0
0
0
0
0
0
0
0.1
90
5
33
18
0.901235
0
0
0
0
0
0.022222
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
4c38a5af19b7d49eeb5d58358211032bc42160b7
72
py
Python
app/backend/visualization/helpers/__init__.py
admariner/social-media-profiler
2001167201fc9602fef3070ee9d31f005978bfe8
[ "MIT" ]
34
2020-12-14T15:48:26.000Z
2022-02-27T14:24:29.000Z
app/backend/visualization/helpers/__init__.py
pandrey2003/social-media-profiler
4160e318997d161d63b8233511a65669542da026
[ "MIT" ]
1
2021-12-15T02:37:32.000Z
2021-12-15T02:37:32.000Z
app/backend/visualization/helpers/__init__.py
admariner/social-media-profiler
2001167201fc9602fef3070ee9d31f005978bfe8
[ "MIT" ]
6
2021-02-11T16:29:04.000Z
2022-03-23T11:42:32.000Z
# -*- coding: utf-8 -*- """The helpers for the visualization module."""
24
47
0.625
9
72
5
0.888889
0
0
0
0
0
0
0
0
0
0
0.016393
0.152778
72
2
48
36
0.721311
0.888889
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
4c4512c0405a6012a9abb4105a8005e49717c199
1,265
py
Python
aiotdlib/api/functions/set_passport_element.py
jraylan/aiotdlib
4528fcfca7c5c69b54a878ce6ce60e934a2dcc73
[ "MIT" ]
37
2021-05-04T10:41:41.000Z
2022-03-30T13:48:05.000Z
aiotdlib/api/functions/set_passport_element.py
jraylan/aiotdlib
4528fcfca7c5c69b54a878ce6ce60e934a2dcc73
[ "MIT" ]
13
2021-07-17T19:54:51.000Z
2022-02-26T06:50:00.000Z
aiotdlib/api/functions/set_passport_element.py
jraylan/aiotdlib
4528fcfca7c5c69b54a878ce6ce60e934a2dcc73
[ "MIT" ]
7
2021-09-22T21:27:11.000Z
2022-02-20T02:33:19.000Z
# =============================================================================== # # # # This file has been generated automatically!! Do not change this manually! # # # # =============================================================================== # from __future__ import annotations from pydantic import Field from ..base_object import BaseObject from ..types import InputPassportElement class SetPassportElement(BaseObject): """ Adds an element to the user's Telegram Passport. May return an error with a message "PHONE_VERIFICATION_NEEDED" or "EMAIL_VERIFICATION_NEEDED" if the chosen phone number or the chosen email address must be verified first :param element: Input Telegram Passport element :type element: :class:`InputPassportElement` :param password: Password of the current user :type password: :class:`str` """ ID: str = Field("setPassportElement", alias="@type") element: InputPassportElement password: str @staticmethod def read(q: dict) -> SetPassportElement: return SetPassportElement.construct(**q)
38.333333
224
0.537549
109
1,265
6.155963
0.605505
0.04769
0
0
0
0
0
0
0
0
0
0
0.279842
1,265
32
225
39.53125
0.736553
0.628459
0
0
1
0
0.055422
0
0
0
0
0
0
1
0.090909
false
0.636364
0.363636
0.090909
0.909091
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
4
4c474101306d4f8773056636600f2f6eed48ab89
188
py
Python
WebMirror/management/rss_parser_funcs/feed_parse_extractTakeyourhearttranslation000WebhostappCom.py
fake-name/ReadableWebProxy
ed5c7abe38706acc2684a1e6cd80242a03c5f010
[ "BSD-3-Clause" ]
193
2016-08-02T22:04:35.000Z
2022-03-09T20:45:41.000Z
WebMirror/management/rss_parser_funcs/feed_parse_extractTakeyourhearttranslation000WebhostappCom.py
fake-name/ReadableWebProxy
ed5c7abe38706acc2684a1e6cd80242a03c5f010
[ "BSD-3-Clause" ]
533
2016-08-23T20:48:23.000Z
2022-03-28T15:55:13.000Z
WebMirror/management/rss_parser_funcs/feed_parse_extractTakeyourhearttranslation000WebhostappCom.py
rrosajp/ReadableWebProxy
ed5c7abe38706acc2684a1e6cd80242a03c5f010
[ "BSD-3-Clause" ]
19
2015-08-13T18:01:08.000Z
2021-07-12T17:13:09.000Z
def extractTakeyourhearttranslation000WebhostappCom(item): ''' Parser for 'takeyourhearttranslation.000webhostapp.com' ''' # Is trying to force a garbage app shit thing. return None
26.857143
58
0.787234
19
188
7.789474
1
0
0
0
0
0
0
0
0
0
0
0.03681
0.132979
188
7
59
26.857143
0.871166
0.537234
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
4c507fbce484f5091f34770d22e91b0577a8ca0e
203
py
Python
static_typing/_logging.py
mbdevpl/static-typing
db5e6416aa8169f0252c199793c304fd99b152ef
[ "Apache-2.0" ]
3
2017-07-11T11:55:15.000Z
2021-06-28T06:00:54.000Z
static_typing/_logging.py
mbdevpl/static-typing
db5e6416aa8169f0252c199793c304fd99b152ef
[ "Apache-2.0" ]
null
null
null
static_typing/_logging.py
mbdevpl/static-typing
db5e6416aa8169f0252c199793c304fd99b152ef
[ "Apache-2.0" ]
3
2019-11-26T21:07:35.000Z
2022-02-07T08:19:19.000Z
"""Default logging mechanism for static_typing module.""" import logging import os logging.basicConfig( level=getattr(logging, os.environ.get('LOGGING_LEVEL', 'warning').upper(), logging.WARNING))
25.375
96
0.758621
25
203
6.08
0.64
0
0
0
0
0
0
0
0
0
0
0
0.103448
203
7
97
29
0.835165
0.251232
0
0
0
0
0.136986
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
4c65fabc3008fb5815705aba257b1edf3a187a83
238
py
Python
(1)Text/re_groups.py
mass9/Python
66499164e36a4fe9630029d34b292ab06f849b2f
[ "MIT" ]
null
null
null
(1)Text/re_groups.py
mass9/Python
66499164e36a4fe9630029d34b292ab06f849b2f
[ "MIT" ]
null
null
null
(1)Text/re_groups.py
mass9/Python
66499164e36a4fe9630029d34b292ab06f849b2f
[ "MIT" ]
null
null
null
from re_test_patterns import test_patterns test_patterns( 'abbaaabbbbaaaaa', [('a(ab)', 'a followed by literal ab'), ('a(a*b*)', 'a followed by 0-n a and 0-n b'), ('a(ab)*', 'a followed by 0-n ab'), ('a(ab)+', 'a followed by 1-n ab')], )
26.444444
45
0.62605
46
238
3.152174
0.347826
0.103448
0.303448
0.248276
0.393103
0
0
0
0
0
0
0.019608
0.142857
238
8
46
29.75
0.691176
0
0
0
0
0
0.554622
0
0
0
0
0
0
1
0
true
0
0.125
0
0.125
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
4c66aa1ef16544fda8f77f9f0d2b98ec724ddba3
254
py
Python
lightning/__init__.py
lightning-viz/lightning-python
68563e1da82d162d204069d7586f7c695b8bd4a6
[ "MIT" ]
176
2015-01-21T00:05:53.000Z
2022-01-14T07:59:37.000Z
lightning/__init__.py
alirezarezvani/lightning-python
68563e1da82d162d204069d7586f7c695b8bd4a6
[ "MIT" ]
37
2015-01-21T06:08:50.000Z
2020-03-17T06:30:47.000Z
lightning/__init__.py
alirezarezvani/lightning-python
68563e1da82d162d204069d7586f7c695b8bd4a6
[ "MIT" ]
37
2015-01-24T16:57:11.000Z
2021-09-04T16:47:02.000Z
from .main import Lightning from .session import Session from .visualization import Visualization, VisualizationLocal from .types.plots import * from .types.images import * from .types.streaming import * from .types.three import * __version__ = "1.2.1"
25.4
60
0.787402
33
254
5.939394
0.454545
0.183673
0.229592
0
0
0
0
0
0
0
0
0.013575
0.129921
254
9
61
28.222222
0.873303
0
0
0
0
0
0.019685
0
0
0
0
0
0
1
0
false
0
0.875
0
0.875
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
4c6f020be43b7b24a7466f9741619411137fa8d9
58,918
py
Python
mmtbx/regression/tst_pdbtools.py
TheApacheCats/cctbx_project
94e3e85dd6385f0dc3f45077b743757d22b19391
[ "BSD-3-Clause-LBNL" ]
2
2021-03-18T12:31:57.000Z
2022-03-14T06:27:06.000Z
mmtbx/regression/tst_pdbtools.py
indu-in/cctbx_project1
e09447ddc2ba3aa9d91b21008b0162ab290b0c30
[ "BSD-3-Clause-LBNL" ]
null
null
null
mmtbx/regression/tst_pdbtools.py
indu-in/cctbx_project1
e09447ddc2ba3aa9d91b21008b0162ab290b0c30
[ "BSD-3-Clause-LBNL" ]
1
2020-02-04T15:39:06.000Z
2020-02-04T15:39:06.000Z
from __future__ import absolute_import, division, print_function #import libtbx.load_env import sys, os, math from cctbx.array_family import flex from libtbx.utils import remove_files, search_for from mmtbx import utils from libtbx.test_utils import approx_equal, not_approx_equal, run_command, \ show_diff import iotbx.pdb.hierarchy from scitbx.array_family import flex from cctbx import adptbx import mmtbx.model import iotbx.pdb from six.moves import cStringIO as StringIO from six.moves import zip class xray_structure_plus(object): def __init__(self, file_name): log = StringIO() pdb_inp = iotbx.pdb.input(file_name=file_name) self.model = mmtbx.model.manager( model_input = pdb_inp, process_input = True, log = log) self.xray_structure = self.model.get_xray_structure() self.all_chain_proxies = self.model.all_chain_proxies uc = self.model.get_xray_structure().unit_cell() self.occ = self.xray_structure.scatterers().extract_occupancies() self.u_iso = self.xray_structure.scatterers().extract_u_iso() self.u_cart = self.xray_structure.scatterers().extract_u_cart(uc) self.sites_cart = self.xray_structure.sites_cart() self.use_u_iso = self.xray_structure.use_u_iso() self.use_u_aniso = self.xray_structure.use_u_aniso() self.u_iso_used = self.u_iso.select(self.use_u_iso) self.u_cart_used = self.u_cart.select(self.use_u_aniso) self.u_iso_not_used = self.u_iso.select(~self.use_u_iso) self.u_cart_not_used = self.u_cart.select(~self.use_u_aniso) def selection(self, selection_strings): return utils.get_atom_selections(iselection = False, model = self.model, selection_strings= selection_strings)[0] def exercise_basic(): pdb_str = """ CRYST1 12.000 11.000 13.000 80.00 70.00 100.00 P 1 1 ATOM 1 CB PHE A 1 7.767 5.853 7.671 1.00 17.45 C ANISOU 1 CB PHE A 1 1090 2307 3233 1244 1260 2218 C ATOM 2 CG PHE A 1 6.935 5.032 8.622 1.00 17.61 C ANISOU 2 CG PHE A 1 1258 2172 3262 1259 1261 2264 C ATOM 3 CD1 PHE A 1 5.918 4.176 8.140 1.00 17.66 C ANISOU 3 CD1 PHE A 1 1321 2083 3304 1237 1214 2356 C ATOM 4 CD2 PHE A 1 7.161 5.107 10.012 1.00 17.82 C ANISOU 4 CD2 PHE A 1 1374 2150 3248 1279 1308 2217 C ATOM 5 CE1 PHE A 1 5.126 3.395 9.038 1.00 17.96 C ANISOU 5 CE1 PHE A 1 1508 1983 3333 1224 1216 2398 C ATOM 6 CE2 PHE A 1 6.382 4.336 10.930 1.00 18.17 C ANISOU 6 CE2 PHE A 1 1573 2056 3276 1258 1310 2261 C ATOM 7 CZ PHE A 1 5.360 3.476 10.439 1.00 18.27 C ANISOU 7 CZ PHE A 1 1645 1979 3318 1226 1265 2351 C ATOM 8 C PHE A 1 7.956 7.811 6.133 1.00 17.21 C ANISOU 8 C PHE A 1 816 2415 3307 1079 1332 2287 C ATOM 9 O PHE A 1 8.506 7.237 5.169 1.00 17.39 O ANISOU 9 O PHE A 1 818 2555 3234 1097 1244 2232 O ATOM 10 OXT PHE A 1 8.143 9.010 6.428 1.00 17.15 O ANISOU 10 OXT PHE A 1 737 2422 3356 1020 1411 2296 O ATOM 13 N PHE A 1 5.875 6.461 6.183 1.00 17.13 N ANISOU 13 N PHE A 1 934 2156 3419 1076 1303 2452 N ATOM 15 CA PHE A 1 7.000 7.000 7.000 1.00 17.18 C ANISOU 15 CA PHE A 1 931 2249 3347 1119 1342 2337 C ATOM 13 CB PHE B 2 11.715 4.672 7.185 1.00 34.89 C ATOM 14 CG PHE B 2 10.876 4.117 8.301 1.00 35.22 C ATOM 15 CD1 PHE B 2 10.127 2.966 8.118 1.00 35.30 C ATOM 16 CD2 PHE B 2 10.836 4.746 9.534 1.00 35.63 C ATOM 17 CE1 PHE B 2 9.355 2.454 9.143 1.00 35.88 C ATOM 18 CE2 PHE B 2 10.066 4.239 10.563 1.00 36.30 C ATOM 19 CZ PHE B 2 9.324 3.091 10.367 1.00 36.47 C ATOM 20 C PHE B 2 11.961 6.316 5.313 1.00 34.38 C ATOM 21 O PHE B 2 11.902 5.976 4.132 1.00 34.72 O ATOM 22 OXT PHE B 2 12.817 7.140 5.635 1.00 34.24 O ATOM 23 N PHE B 2 9.817 5.175 5.710 1.00 34.25 N ATOM 24 CA PHE B 2 11.002 5.735 6.347 1.00 34.35 C ATOM 1 CB PHE C 3 10.767 8.853 7.671 1.00 52.27 C ANISOU 1 CB PHE C 3 3279 6815 9766 3688 3821 6736 C ATOM 2 CG PHE C 3 9.935 8.032 8.622 1.00 52.60 C ANISOU 2 CG PHE C 3 3617 6545 9825 3719 3822 6829 C ATOM 3 CD1 PHE C 3 8.918 7.176 8.140 1.00 52.70 C ANISOU 3 CD1 PHE C 3 3745 6367 9912 3673 3727 7011 C ATOM 4 CD2 PHE C 3 10.161 8.107 10.012 1.00 53.03 C ANISOU 4 CD2 PHE C 3 3850 6502 9797 3761 3917 6735 C ATOM 5 CE1 PHE C 3 8.126 6.395 9.038 1.00 53.33 C ANISOU 5 CE1 PHE C 3 4119 6169 9973 3647 3734 7098 C ATOM 6 CE2 PHE C 3 9.382 7.336 10.930 1.00 53.74 C ANISOU 6 CE2 PHE C 3 4248 6315 9853 3723 3923 6823 C ATOM 7 CZ PHE C 3 8.360 6.476 10.439 1.00 53.93 C ANISOU 7 CZ PHE C 3 4391 6159 9943 3655 3834 7005 C ATOM 8 C PHE C 3 10.956 10.811 6.133 1.00 51.80 C ANISOU 8 C PHE C 3 2733 7032 9917 3360 3966 6875 C ATOM 9 O PHE C 3 11.506 10.237 5.169 1.00 52.17 O ANISOU 9 O PHE C 3 2737 7311 9773 3399 3791 6767 O ATOM 10 OXT PHE C 3 11.143 12.010 6.428 1.00 51.70 O ANISOU 10 OXT PHE C 3 2580 7045 10018 3240 4125 6894 O ATOM 13 N PHE C 3 8.875 9.461 6.183 1.00 51.64 N ANISOU 13 N PHE C 3 2968 6515 10138 3352 3906 7205 N ATOM 15 CA PHE C 3 10.000 10.000 7.000 1.00 51.73 C ANISOU 15 CA PHE C 3 2963 6697 9995 3440 3985 6975 C END """ verbose=False file_name = "exercise_basic_pdbtools.pdb" pi = iotbx.pdb.input(source_info=None, lines=pdb_str) pi.write_pdb_file(file_name=file_name) output = file_name+"_modified.pdb" xrsp_init = xray_structure_plus(file_name = file_name) assert file_name.find('"') < 0 base = \ 'phenix.pdbtools "%s" output.file_name=%s '%(file_name, output) for selection_str in [None, "chain A or chain C"]: selection = xrsp_init.selection(selection_strings = selection_str) if(selection_str is None): assert selection.size() == selection.count(True) else: assert selection.size() == 36 and selection.count(True) == 24 # cmd = base + 'adp.randomize=true modify.selection="%s"'%str(selection_str) print(cmd) check_adp_rand( cmd, xrsp_init, output, selection, selection_str, verbose) # cmd = base + 'adp.set_b_iso=10.0 modify.selection="%s"'%str(selection_str) print(cmd) check_adp_set_b_iso( cmd, xrsp_init, output, selection, selection_str, verbose) # cmd = base + 'adp.shift_b_iso=20.0 modify.selection="%s"'%str(selection_str) print(cmd) check_adp_rand( cmd, xrsp_init, output, selection, selection_str, verbose) # cmd = base + 'adp.scale_adp=2.0 modify.selection="%s"'%str(selection_str) print(cmd) check_adp_rand( cmd, xrsp_init, output, selection, selection_str, verbose) # cmd = base + 'adp.convert_to_iso=true modify.selection="%s"'%str(selection_str) print(cmd) check_adp_to_iso( cmd, xrsp_init, output, selection, selection_str, verbose) # cmd = base + 'adp.convert_to_aniso=true modify.selection="%s"'%str(selection_str) print(cmd) check_adp_to_aniso( cmd, xrsp_init, output, selection, selection_str, verbose) # shake = 1.5 cmd = base+'sites.shake=%s modify.selection="%s"'%(str(shake), str(selection_str)) print(cmd) check_sites_shake( cmd, xrsp_init, output, selection, selection_str, shake, verbose) # cmd = base+'sites.rotate="1,2,3" sites.translate="4,5,6" modify.selection="%s"'%( str(selection_str)) print(cmd) check_sites_rt( cmd, xrsp_init, output, selection, selection_str, verbose) # cmd = base+'occupancies.randomize=true modify.selection="%s"'%(str(selection_str)) print(cmd) check_occ_randomize( cmd, xrsp_init, output, selection, selection_str, verbose) # cmd = base+'occupancies.set=0.75 modify.selection="%s"'%(str(selection_str)) print(cmd) check_occ_set( cmd, xrsp_init, output, selection, selection_str, verbose) # remove_selection_str = "element C" cmd = base+'remove="%s" modify.selection="%s"'%( str(remove_selection_str), str(selection_str)) print(cmd) check_remove_selection( cmd, xrsp_init, output, selection, selection_str, remove_selection_str, verbose) # keep_selection_str = "element C" cmd = base+'keep="%s" modify.selection="%s"'%( str(keep_selection_str), str(selection_str)) print(cmd) check_keep_selection( cmd, xrsp_init, output, selection, selection_str, keep_selection_str, verbose) # # cmd = base print(cmd) check_all_none(cmd, xrsp_init, output, verbose) # cmd = base check_keep_remove_conflict(cmd, output, verbose) def check_adp_rand( cmd, xrsp_init, output, selection, selection_str, verbose, tolerance=1.e-3): remove_files(output) run_command(command=cmd, verbose=verbose) xrsp = xray_structure_plus(file_name = output) assert approx_equal(xrsp.occ, xrsp_init.occ,tolerance) assert approx_equal(xrsp.sites_cart, xrsp_init.sites_cart,tolerance) assert approx_equal(xrsp.use_u_iso, xrsp_init.use_u_iso,tolerance) assert approx_equal(xrsp.use_u_aniso,xrsp_init.use_u_aniso,tolerance) assert approx_equal(xrsp.u_iso_not_used, xrsp_init.u_iso_not_used,tolerance) assert approx_equal(xrsp.u_cart_not_used,xrsp_init.u_cart_not_used,tolerance) if(selection_str is None): assert not_approx_equal(xrsp.u_iso_used, xrsp_init.u_iso_used,tolerance) assert not_approx_equal(xrsp.u_cart_used, xrsp_init.u_cart_used,tolerance) else: arg1 = xrsp.u_iso_used.select(selection.select(xrsp.use_u_iso)) arg2 = xrsp_init.u_iso_used.select(selection.select(xrsp_init.use_u_iso)) if(arg1.size() > 0): assert not_approx_equal(arg1, arg2,tolerance) arg1 =xrsp.u_cart_used.select(selection.select(xrsp.use_u_aniso)) arg2 =xrsp_init.u_cart_used.select(selection.select(xrsp_init.use_u_aniso)) if(arg1.size() > 0): assert not_approx_equal(arg1, arg2,tolerance) def check_adp_set_b_iso( cmd, xrsp_init, output, selection, selection_str, verbose, tolerance=1.e-3): remove_files(output) run_command(command=cmd, verbose=verbose) xrsp = xray_structure_plus(file_name = output) assert approx_equal(xrsp.occ, xrsp_init.occ,tolerance) assert approx_equal(xrsp.sites_cart, xrsp_init.sites_cart,tolerance) assert approx_equal(xrsp.use_u_iso, xrsp_init.use_u_iso,tolerance) assert approx_equal(xrsp.use_u_aniso,xrsp_init.use_u_aniso,tolerance) assert approx_equal(xrsp.u_iso_not_used, xrsp_init.u_iso_not_used,tolerance) assert approx_equal(xrsp.u_cart_not_used,xrsp_init.u_cart_not_used,tolerance) if(selection_str is None): assert not_approx_equal(xrsp.u_iso_used, xrsp_init.u_iso_used,tolerance) for ucart in xrsp.u_cart: b_iso = adptbx.u_as_b(adptbx.u_cart_as_u_iso(ucart)) if b_iso > 0: assert approx_equal(b_iso, 10, 0.005) else: assert approx_equal(b_iso, -78.956, 0.005) else: arg1 = xrsp.u_iso_used.select(selection.select(xrsp.use_u_iso)) arg2 = xrsp_init.u_iso_used.select(selection.select(xrsp_init.use_u_iso)) if(arg1.size() > 0): assert not_approx_equal(arg1, arg2,tolerance) for ucart in xrsp.u_cart: b_iso = adptbx.u_as_b(adptbx.u_cart_as_u_iso(ucart)) if b_iso > 0: assert approx_equal(b_iso, 10, 0.005) else: assert approx_equal(b_iso, -78.956, 0.005) def check_adp_to_iso( cmd, xrsp_init, output, selection, selection_str, verbose, tolerance=1.e-3): remove_files(output) run_command(command=cmd, verbose=verbose) xrsp = xray_structure_plus(file_name = output) assert approx_equal(xrsp.occ, xrsp_init.occ,tolerance) assert approx_equal(xrsp.sites_cart, xrsp_init.sites_cart,tolerance) assert not_approx_equal(xrsp.use_u_iso, xrsp_init.use_u_iso,tolerance) assert not_approx_equal(xrsp.use_u_aniso,xrsp_init.use_u_aniso,tolerance) assert xrsp.u_iso_not_used.size() == 0 assert xrsp_init.u_iso_not_used.size() > 0 assert xrsp.u_cart_used.size() == 0 assert xrsp_init.u_cart_used.size() > 0 def check_adp_to_aniso( cmd, xrsp_init, output, selection, selection_str, verbose, tolerance=1.e-3): remove_files(output) run_command(command=cmd, verbose=verbose) xrsp = xray_structure_plus(file_name = output) assert approx_equal(xrsp.occ, xrsp_init.occ,tolerance) assert approx_equal(xrsp.sites_cart, xrsp_init.sites_cart,tolerance) if(selection_str is None): assert not_approx_equal(xrsp.use_u_iso, xrsp_init.use_u_iso,tolerance) assert not_approx_equal(xrsp.use_u_aniso,xrsp_init.use_u_aniso,tolerance) assert xrsp.u_iso_used.size() == 0 assert xrsp_init.u_iso_used.size() > 0 assert xrsp.u_cart_not_used.size() == 0 assert xrsp_init.u_cart_not_used.size() > 0 else: assert approx_equal(xrsp.use_u_iso, xrsp_init.use_u_iso,tolerance) assert approx_equal(xrsp.use_u_aniso,xrsp_init.use_u_aniso,tolerance) def check_sites_shake( cmd, xrsp_init, output, selection, selection_str, shake, verbose, tolerance=1.e-3): remove_files(output) run_command(command=cmd, verbose=verbose) xrsp = xray_structure_plus(file_name = output) assert approx_equal(xrsp.occ, xrsp_init.occ,tolerance) assert approx_equal(xrsp.u_iso, xrsp_init.u_iso,tolerance) assert approx_equal(xrsp.u_cart, xrsp_init.u_cart,tolerance) if(selection_str is None): diff = xrsp.sites_cart - xrsp_init.sites_cart assert approx_equal( math.sqrt(flex.mean(diff.dot())), shake, 1.e-3,tolerance) else: diff = xrsp.sites_cart - xrsp_init.sites_cart assert approx_equal( math.sqrt(flex.mean(diff.select(selection).dot())), shake, 1.e-3) assert approx_equal( math.sqrt(flex.mean(diff.select(~selection).dot())),0.,tolerance) def check_sites_rt( cmd, xrsp_init, output, selection, selection_str, verbose, tolerance=1.e-3): remove_files(output) run_command(command=cmd, verbose=verbose) xrsp = xray_structure_plus(file_name = output) assert approx_equal(xrsp.occ, xrsp_init.occ,tolerance) assert approx_equal(xrsp.u_iso, xrsp_init.u_iso,tolerance) assert approx_equal(xrsp.u_cart, xrsp_init.u_cart,tolerance) if(selection_str is None): diff = xrsp.sites_cart - xrsp_init.sites_cart assert math.sqrt(flex.mean(diff.dot())) > 1.0 else: diff = xrsp.sites_cart - xrsp_init.sites_cart assert math.sqrt(flex.mean(diff.select(selection).dot())) > 1.0 assert approx_equal( math.sqrt(flex.mean(diff.select(~selection).dot())),0.,tolerance) def check_occ_randomize( cmd, xrsp_init, output, selection,selection_str, verbose, tolerance=1.e-3): remove_files(output) run_command(command=cmd, verbose=verbose) xrsp = xray_structure_plus(file_name = output) assert approx_equal(xrsp.sites_cart,xrsp_init.sites_cart,tolerance) assert approx_equal(xrsp.u_iso, xrsp_init.u_iso,tolerance) assert approx_equal(xrsp.u_cart, xrsp_init.u_cart,tolerance) if(selection_str is None): diff = flex.abs(xrsp.occ - xrsp_init.occ) assert flex.mean(diff) > 0.0 assert flex.max(diff) > 0.0 else: diff = flex.abs(xrsp.occ - xrsp_init.occ) assert flex.mean(diff) > 0.0 assert flex.max(diff) > 0.0 assert approx_equal(flex.mean(diff.select(~selection)),0.,tolerance) def check_occ_set( cmd, xrsp_init, output, selection,selection_str, verbose, tolerance=1.e-3): remove_files(output) run_command(command=cmd, verbose=verbose) xrsp = xray_structure_plus(file_name = output) assert approx_equal(xrsp.sites_cart,xrsp_init.sites_cart,tolerance) assert approx_equal(xrsp.u_iso, xrsp_init.u_iso,tolerance) assert approx_equal(xrsp.u_cart, xrsp_init.u_cart,tolerance) if(selection_str is None): diff = flex.abs(xrsp.occ - xrsp_init.occ) assert flex.mean(diff) > 0.0 assert flex.max(diff) > 0.0 else: occ_init = xrsp_init.occ occ_mod = xrsp.occ assert occ_init.all_eq(1.0) assert occ_mod.select(~selection).all_eq(1.0) assert occ_mod.select(selection).all_eq(0.75) def check_remove_selection( cmd, xrsp_init, output, selection, selection_str, remove_selection_str, verbose, tolerance=1.e-3): remove_files(output) run_command(command=cmd, verbose=verbose) xrsp = xray_structure_plus(file_name = output) remove_selection = ~xrsp_init.selection( selection_strings = remove_selection_str) assert remove_selection.size() > remove_selection.count(True) assert approx_equal(xrsp.sites_cart, xrsp_init.sites_cart.select(remove_selection),tolerance) assert approx_equal( xrsp.occ, xrsp_init.occ.select(remove_selection),tolerance) assert approx_equal( xrsp.u_iso, xrsp_init.u_iso.select(remove_selection),tolerance) assert approx_equal( xrsp.u_cart, xrsp_init.u_cart.select(remove_selection),tolerance) sct1 = xrsp_init.xray_structure.scatterers().extract_scattering_types() assert sct1.count("C") > 0 sct2 = xrsp.xray_structure.scatterers().extract_scattering_types() assert sct2.count("C") == 0 assert sct2.size() == remove_selection.count(True) assert sct1.size() > sct2.size() def check_keep_selection( cmd, xrsp_init, output, selection, selection_str, keep_selection_str, verbose, tolerance=1.e-3): remove_files(output) run_command(command=cmd, verbose=verbose) xrsp = xray_structure_plus(file_name = output) keep_selection = xrsp_init.selection(selection_strings = keep_selection_str) assert approx_equal(xrsp.sites_cart, xrsp_init.sites_cart.select(keep_selection),tolerance) assert approx_equal(xrsp.occ, xrsp_init.occ.select(keep_selection),tolerance) assert approx_equal( xrsp.u_iso, xrsp_init.u_iso.select(keep_selection),tolerance) assert approx_equal( xrsp.u_cart, xrsp_init.u_cart.select(keep_selection),tolerance) sct1 = xrsp_init.xray_structure.scatterers().extract_scattering_types() assert sct1.count("C") > 0 and sct1.size() > sct1.count("C") sct2 = xrsp.xray_structure.scatterers().extract_scattering_types() assert sct2.count("C") == sct2.size() assert sct1.size() > keep_selection.count(True) assert sct1.size() > sct2.size() def check_all_none(cmd, xrsp_init, output, verbose, tolerance=1.e-3): remove_files(output) run_command(command=cmd, verbose=verbose) xrsp = xray_structure_plus(file_name = output) assert approx_equal(xrsp.occ, xrsp_init.occ,tolerance) assert approx_equal(xrsp.sites_cart, xrsp_init.sites_cart,tolerance) assert approx_equal(xrsp.use_u_iso, xrsp_init.use_u_iso,tolerance) assert approx_equal(xrsp.use_u_aniso, xrsp_init.use_u_aniso,tolerance) assert approx_equal(xrsp.u_iso, xrsp_init.u_iso,tolerance) assert approx_equal(xrsp.u_cart, xrsp_init.u_cart,tolerance) def check_keep_remove_conflict(cmd, output, verbose): cmd += " keep=all remove=all " print(cmd) cmd_result = run_command(command=cmd, verbose=verbose, sorry_expected=True) sorry_lines = search_for( pattern="Sorry: 'keep' and 'remove' keywords cannot be used simultaneously.", mode="==", lines=cmd_result.stdout_lines) assert len(sorry_lines) == 1 def exercise_multiple(): pdb_str = """\ ATOM 1 O HOH A 2 5.131 5.251 5.823 1.00 10.00 O ATOM 2 CA LYS B 32 10.574 8.177 11.768 1.00 11.49 C ATOM 3 CB LYS B 32 9.193 8.732 12.170 1.00 12.23 C ATOM 4 CA VAL C 33 11.708 5.617 14.332 1.00 11.42 C ATOM 5 CB VAL C 33 11.101 4.227 14.591 1.00 11.47 C ATOM 6 O HOH C 3 1.132 5.963 7.065 1.00 15.00 O ATOM 7 O HOH C 4 4.132 9.963 7.800 1.00 15.00 O TER END """ pi = iotbx.pdb.input(source_info=None, lines=pdb_str) ph_met_in = pi.construct_hierarchy() pi.write_pdb_file(file_name="exercise_multiple.pdb") params = """\ modify{ adp { atom_selection = chain A randomize = True } adp { atom_selection = chain B shift_b_iso = 10 } sites { atom_selection = chain B shake = 1.5 } sites { atom_selection = chain A or chain C translate = 1 2 3 rotate = 4 5 6 } occupancies { atom_selection = chain A randomize = True } occupancies { atom_selection = chain C set = 0.1 } } """ open("params", "w").write(params) cmd = 'phenix.pdbtools exercise_multiple.pdb params' print(cmd) result = run_command(command=cmd, verbose=False) lines = [l.strip() for l in result.stdout_lines] expected_lines = [ "Randomizing ADP: selected atoms: 1 of 7", "Adding shift = 10.00 to all ADP: selected atoms: 2 of 7", "Shaking sites (RMS = 1.500): selected atoms: 2 of 7", "Rigid body shift: selected atoms: 5 of 7", "Randomizing occupancies: selected atoms: 1 of 7", "Setting occupancies to: 0.100: selected atoms: 4 of 7" ] for line in expected_lines: assert line in lines def exercise_no_cryst1(): pdb_str = """ ATOM 1 N AMET B 37 7.525 5.296 6.399 1.00 10.00 N ATOM 2 CA AMET B 37 6.533 6.338 6.634 1.00 10.00 C ATOM 3 C AMET B 37 6.175 7.044 5.330 1.00 10.00 C ATOM 4 O AMET B 37 5.000 7.200 5.000 1.00 10.00 O ATOM 5 CB AMET B 37 7.051 7.351 7.655 1.00 10.00 C ATOM 6 CG AMET B 37 7.377 6.750 9.013 1.00 10.00 C ATOM 7 SD AMET B 37 8.647 5.473 8.922 1.00 10.00 S ATOM 8 CE AMET B 37 8.775 5.000 10.645 1.00 10.00 C ATOM 1 N BMET B 37 7.525 5.296 6.399 1.00 10.00 N ATOM 2 CA BMET B 37 6.533 6.338 6.634 1.00 10.00 C ATOM 3 C BMET B 37 6.175 7.044 5.330 1.00 10.00 C ATOM 4 O BMET B 37 5.000 7.200 5.000 1.00 10.00 O ATOM 5 CB BMET B 37 7.051 7.351 7.655 1.00 10.00 C ATOM 6 CG BMET B 37 7.377 6.750 9.013 1.00 10.00 C ATOM 7 SD BMET B 37 8.647 5.473 8.922 1.00 10.00 S ATOM 8 CE BMET B 37 8.775 5.000 10.645 1.00 10.00 C TER END """ pi = iotbx.pdb.input(source_info=None, lines=pdb_str) ph_met_in = pi.construct_hierarchy() pi.write_pdb_file(file_name="exercise_no_cryst1.pdb") cmd = 'phenix.pdbtools exercise_no_cryst1.pdb sites.rotate="0 0 0"' print(cmd) run_command(command=cmd, verbose=False) lines1 = [] for line in open("exercise_no_cryst1.pdb","r").readlines(): line = line.strip() assert line.count("CRYST1") == 0 if(line.startswith("ATOM") or line.startswith("HETATM")): lines1.append(line) lines2 = [] for line in open("exercise_no_cryst1.pdb_modified.pdb","r").readlines(): line = line.strip() assert line.count("CRYST1") == 0 if(line.startswith("ATOM") or line.startswith("HETATM")): lines2.append(line) assert len(lines1) == len(lines2) for l1,l2 in zip(lines1, lines2): assert l1[11:70].strip() == l2[11:70].strip() def exercise_truncate_to_polyala(): pdb_str = """ ATOM 1 N AMET B 37 7.525 5.296 6.399 1.00 10.00 N ATOM 2 CA AMET B 37 6.533 6.338 6.634 1.00 10.00 C ATOM 3 C AMET B 37 6.175 7.044 5.330 1.00 10.00 C ATOM 4 O AMET B 37 5.000 7.200 5.000 1.00 10.00 O ATOM 5 CB AMET B 37 7.051 7.351 7.655 1.00 10.00 C ATOM 6 CG AMET B 37 7.377 6.750 9.013 1.00 10.00 C ATOM 7 SD AMET B 37 8.647 5.473 8.922 1.00 10.00 S ATOM 8 CE AMET B 37 8.775 5.000 10.645 1.00 10.00 C ATOM 1 N BMET B 37 7.525 5.296 6.399 1.00 10.00 N ATOM 2 CA BMET B 37 6.533 6.338 6.634 1.00 10.00 C ATOM 3 C BMET B 37 6.175 7.044 5.330 1.00 10.00 C ATOM 4 O BMET B 37 5.000 7.200 5.000 1.00 10.00 O ATOM 5 CB BMET B 37 7.051 7.351 7.655 1.00 10.00 C ATOM 6 CG BMET B 37 7.377 6.750 9.013 1.00 10.00 C ATOM 7 SD BMET B 37 8.647 5.473 8.922 1.00 10.00 S ATOM 8 CE BMET B 37 8.775 5.000 10.645 1.00 10.00 C TER END """ pi = iotbx.pdb.input(source_info=None, lines=pdb_str) ph_met_in = pi.construct_hierarchy() pi.write_pdb_file(file_name="exercise_exercise_truncate_to_polyala.pdb") cmd = 'phenix.pdbtools exercise_exercise_truncate_to_polyala.pdb truncate_to_polyala=true' print(cmd) run_command(command=cmd) gly_atom_names = [" N ", " CA ", " C ", " O ", " CB "] pdb_inp = iotbx.pdb.hierarchy.input( file_name="exercise_exercise_truncate_to_polyala.pdb_modified.pdb") for a in pdb_inp.hierarchy.atoms_with_labels(): assert a.name in gly_atom_names def exercise_set_charge(): from iotbx import file_reader input_pdb = """ ATOM 1 CL CL X 1 0.000 0.000 0.000 1.00 20.00 CL END """ open("tmp_cl.pdb", "w").write(input_pdb) cmd='phenix.pdbtools tmp_cl.pdb charge_selection="element Cl" charge=-1' print(cmd) run_command(command=cmd, verbose=False) pdb_in = file_reader.any_file("tmp_cl.pdb_modified.pdb").file_object hierarchy = pdb_in.hierarchy xrs = pdb_in.xray_structure_simple() assert (xrs.scatterers()[0].scattering_type == 'Cl1-') assert (hierarchy.atoms()[0].charge == '1-') def exercise_renumber_residues(): input_pdb = """ ATOM 1 O GLY A 3 1.434 1.460 2.496 1.00 6.04 O ATOM 2 O CYS A 7 2.196 4.467 3.911 1.00 4.51 O ATOM 3 O CYS A 1 -1.433 4.734 5.405 1.00 7.85 O TER ATOM 4 O SER B 4 0.297 0.843 7.226 1.00 7.65 O ATOM 5 OG ASER B 4 -2.625 1.057 4.064 0.50 5.46 O ATOM 6 OG BSER B 4 -0.885 0.189 3.843 0.50 11.74 O ATOM 7 O LEU B 8 3.613 4.307 6.646 1.00 5.39 O ATOM 8 O PRO B -1 4.398 6.723 8.658 1.00 6.65 O ATOM 9 O TYR B 7 7.294 7.360 6.923 1.00 8.75 O ATOM 10 O CYS B 0 5.256 8.262 4.185 1.00 6.08 O ATOM 11 O ALA B 9 3.028 10.447 5.584 1.00 7.39 O TER ATOM 12 O LEU C 0 5.613 12.448 6.864 1.00 7.32 O TER END """ expected_output_pdb = """ATOM 1 O GLY A 1 1.434 1.460 2.496 1.00 6.04 O ATOM 2 O CYS A 2 2.196 4.467 3.911 1.00 4.51 O ATOM 3 O CYS A 3 -1.433 4.734 5.405 1.00 7.85 O TER ATOM 4 O SER B 1 0.297 0.843 7.226 1.00 7.65 O ATOM 5 OG ASER B 1 -2.625 1.057 4.064 0.50 5.46 O ATOM 6 OG BSER B 1 -0.885 0.189 3.843 0.50 11.74 O ATOM 7 O LEU B 2 3.613 4.307 6.646 1.00 5.39 O ATOM 8 O PRO B 3 4.398 6.723 8.658 1.00 6.65 O ATOM 9 O TYR B 4 7.294 7.360 6.923 1.00 8.75 O ATOM 10 O CYS B 5 5.256 8.262 4.185 1.00 6.08 O ATOM 11 O ALA B 6 3.028 10.447 5.584 1.00 7.39 O TER ATOM 12 O LEU C 1 5.613 12.448 6.864 1.00 7.32 O TER """ ifn = "exercise_renumber_residues.pdb" open(ifn,"w").write(input_pdb) cmd = 'phenix.pdbtools "%s" renumber_residues=true'%ifn print(cmd) run_command(command=cmd, verbose=False) for line1, line2 in zip(open(ifn+"_modified.pdb").readlines(), expected_output_pdb.splitlines()): line1 = line1.strip() line2 = line2.strip() assert line1 == line2 # now only a selected chain expected_output_pdb_2 = """\ ATOM 1 O GLY A 3 1.434 1.460 2.496 1.00 6.04 O ATOM 2 O CYS A 7 2.196 4.467 3.911 1.00 4.51 O ATOM 3 O CYS A 1 -1.433 4.734 5.405 1.00 7.85 O TER ATOM 4 O SER B 1 0.297 0.843 7.226 1.00 7.65 O ATOM 5 OG ASER B 1 -2.625 1.057 4.064 0.50 5.46 O ATOM 6 OG BSER B 1 -0.885 0.189 3.843 0.50 11.74 O ATOM 7 O LEU B 2 3.613 4.307 6.646 1.00 5.39 O ATOM 8 O PRO B 3 4.398 6.723 8.658 1.00 6.65 O ATOM 9 O TYR B 4 7.294 7.360 6.923 1.00 8.75 O ATOM 10 O CYS B 5 5.256 8.262 4.185 1.00 6.08 O ATOM 11 O ALA B 6 3.028 10.447 5.584 1.00 7.39 O TER ATOM 12 O LEU C 0 5.613 12.448 6.864 1.00 7.32 O TER """ cmd = "phenix.pdbtools \"%s\" renumber_residues=true modify.selection=\"chain B\"" % ifn print(cmd) run_command(command=cmd, verbose=False) new_lines = open(ifn+"_modified.pdb").readlines() for line1, line2 in zip(new_lines, expected_output_pdb_2.splitlines()): assert (line1.strip() == line2.strip()) cmd="phenix.pdbtools \"%s\" increment_resseq=10 modify.selection=\"chain B\"" % ifn print(cmd) run_command(command=cmd, verbose=False) pdb_new = open(ifn+"_modified.pdb").read() expected_output_pdb_3 = """\ ATOM 1 O GLY A 3 1.434 1.460 2.496 1.00 6.04 O ATOM 2 O CYS A 7 2.196 4.467 3.911 1.00 4.51 O ATOM 3 O CYS A 1 -1.433 4.734 5.405 1.00 7.85 O TER ATOM 4 O SER B 14 0.297 0.843 7.226 1.00 7.65 O ATOM 5 OG ASER B 14 -2.625 1.057 4.064 0.50 5.46 O ATOM 6 OG BSER B 14 -0.885 0.189 3.843 0.50 11.74 O ATOM 7 O LEU B 18 3.613 4.307 6.646 1.00 5.39 O ATOM 8 O PRO B 9 4.398 6.723 8.658 1.00 6.65 O ATOM 9 O TYR B 17 7.294 7.360 6.923 1.00 8.75 O ATOM 10 O CYS B 10 5.256 8.262 4.185 1.00 6.08 O ATOM 11 O ALA B 19 3.028 10.447 5.584 1.00 7.39 O TER ATOM 12 O LEU C 0 5.613 12.448 6.864 1.00 7.32 O TER END """ assert not show_diff(pdb_new, expected_output_pdb_3) def exercise_neutralize_scatterers(): input_pdb = """ATOM 1 N TYR 22 -0.813 -2.199 1.423 1.00 0.00 N ATOM 2 CA TYR 22 0.612 -2.082 1.127 1.00 0.00 C ATOM 3 C TYR 22 1.441 -2.790 2.171 1.00 0.00 C ATOM 4 O TYR 22 0.927 -3.375 3.128 1.00 0.00 O ATOM 5 CB TYR 22 1.052 -0.589 1.096 1.00 0.00 C ATOM 6 CG TYR 22 0.390 0.302 0.038 1.00 0.00 C ATOM 7 CD1 TYR 22 0.807 0.254 -1.296 1.00 0.00 C ATOM 8 CD2 TYR 22 -0.647 1.164 0.406 1.00 0.00 C ATOM 9 CE1 TYR 22 0.188 1.057 -2.250 1.00 0.00 C ATOM 10 CE2 TYR 22 -1.264 1.966 -0.549 1.00 0.00 C ATOM 11 CZ TYR 22 -0.846 1.912 -1.877 1.00 0.00 C ATOM 12 OH TYR 22 -1.452 2.696 -2.817 1.00 0.00 O1- """ expected_output_pdb = """ATOM 1 N TYR 22 -0.813 -2.199 1.423 1.00 10.00 N ATOM 2 CA TYR 22 0.612 -2.082 1.127 1.00 10.00 C ATOM 3 C TYR 22 1.441 -2.790 2.171 1.00 10.00 C ATOM 4 O TYR 22 0.927 -3.375 3.128 1.00 10.00 O ATOM 5 CB TYR 22 1.052 -0.589 1.096 1.00 10.00 C ATOM 6 CG TYR 22 0.390 0.302 0.038 1.00 10.00 C ATOM 7 CD1 TYR 22 0.807 0.254 -1.296 1.00 10.00 C ATOM 8 CD2 TYR 22 -0.647 1.164 0.406 1.00 10.00 C ATOM 9 CE1 TYR 22 0.188 1.057 -2.250 1.00 10.00 C ATOM 10 CE2 TYR 22 -1.264 1.966 -0.549 1.00 10.00 C ATOM 11 CZ TYR 22 -0.846 1.912 -1.877 1.00 10.00 C ATOM 12 OH TYR 22 -1.452 2.696 -2.817 1.00 10.00 O """ ifn = "exercise_neutralize_scatterers.pdb" open(ifn,"w").write(input_pdb) cmd = 'phenix.pdbtools "%s" neutralize_scatterers=true adp.set_b_iso=10'%ifn print(cmd) run_command(command=cmd, verbose=False) for line1, line2 in zip(open(ifn+"_modified.pdb").readlines(), expected_output_pdb.splitlines()): line1 = line1.strip() line2 = line2.strip() assert line1 == line2 def exercise_remove_atoms(): import random random.seed(1) flex.set_random_seed(1) pdb_str = """ ATOM 1 N AMET B 37 7.525 5.296 6.399 1.00 10.00 N ATOM 2 CA AMET B 37 6.533 6.338 6.634 1.00 10.00 C ATOM 3 C AMET B 37 6.175 7.044 5.330 1.00 10.00 C ATOM 4 O AMET B 37 5.000 7.200 5.000 1.00 10.00 O ATOM 5 CB AMET B 37 7.051 7.351 7.655 1.00 10.00 C ATOM 6 CG AMET B 37 7.377 6.750 9.013 1.00 10.00 C ATOM 7 SD AMET B 37 8.647 5.473 8.922 1.00 10.00 S ATOM 8 CE AMET B 37 8.775 5.000 10.645 1.00 10.00 C ATOM 1 N BMET B 37 7.525 5.296 6.399 1.00 10.00 N ATOM 2 CA BMET B 37 6.533 6.338 6.634 1.00 10.00 C ATOM 3 C BMET B 37 6.175 7.044 5.330 1.00 10.00 C ATOM 4 O BMET B 37 5.000 7.200 5.000 1.00 10.00 O ATOM 5 CB BMET B 37 7.051 7.351 7.655 1.00 10.00 C ATOM 6 CG BMET B 37 7.377 6.750 9.013 1.00 10.00 C ATOM 7 SD BMET B 37 8.647 5.473 8.922 1.00 10.00 S ATOM 8 CE BMET B 37 8.775 5.000 10.645 1.00 10.00 C ATOM 1 N AMET B 38 7.525 5.296 6.399 1.00 10.00 N ATOM 2 CA AMET B 38 6.533 6.338 6.634 1.00 10.00 C ATOM 3 C AMET B 38 6.175 7.044 5.330 1.00 10.00 C ATOM 4 O AMET B 38 5.000 7.200 5.000 1.00 10.00 O ATOM 5 CB AMET B 38 7.051 7.351 7.655 1.00 10.00 C ATOM 6 CG AMET B 38 7.377 6.750 9.013 1.00 10.00 C ATOM 7 SD AMET B 38 8.647 5.473 8.922 1.00 10.00 S ATOM 8 CE AMET B 38 8.775 5.000 10.645 1.00 10.00 C ATOM 1 N BMET B 38 7.525 5.296 6.399 1.00 10.00 N ATOM 2 CA BMET B 38 6.533 6.338 6.634 1.00 10.00 C ATOM 3 C BMET B 38 6.175 7.044 5.330 1.00 10.00 C ATOM 4 O BMET B 38 5.000 7.200 5.000 1.00 10.00 O ATOM 5 CB BMET B 38 7.051 7.351 7.655 1.00 10.00 C ATOM 6 CG BMET B 38 7.377 6.750 9.013 1.00 10.00 C ATOM 7 SD BMET B 38 8.647 5.473 8.922 1.00 10.00 S ATOM 8 CE BMET B 38 8.775 5.000 10.645 1.00 10.00 C ATOM 1 N AMET B 39 7.525 5.296 6.399 1.00 10.00 N ATOM 2 CA AMET B 39 6.533 6.338 6.634 1.00 10.00 C ATOM 3 C AMET B 39 6.175 7.044 5.330 1.00 10.00 C ATOM 4 O AMET B 39 5.000 7.200 5.000 1.00 10.00 O ATOM 5 CB AMET B 39 7.051 7.351 7.655 1.00 10.00 C ATOM 6 CG AMET B 39 7.377 6.750 9.013 1.00 10.00 C ATOM 7 SD AMET B 39 8.647 5.473 8.922 1.00 10.00 S ATOM 8 CE AMET B 39 8.775 5.000 10.645 1.00 10.00 C ATOM 1 N BMET B 39 7.525 5.296 6.399 1.00 10.00 N ATOM 2 CA BMET B 39 6.533 6.338 6.634 1.00 10.00 C ATOM 3 C BMET B 39 6.175 7.044 5.330 1.00 10.00 C ATOM 4 O BMET B 39 5.000 7.200 5.000 1.00 10.00 O ATOM 5 CB BMET B 39 7.051 7.351 7.655 1.00 10.00 C ATOM 6 CG BMET B 39 7.377 6.750 9.013 1.00 10.00 C ATOM 7 SD BMET B 39 8.647 5.473 8.922 1.00 10.00 S ATOM 8 CE BMET B 39 8.775 5.000 10.645 1.00 10.00 C TER END """ pi = iotbx.pdb.input(source_info=None, lines=pdb_str) ph_in = pi.construct_hierarchy() s1 = ph_in.atoms_size() pi.write_pdb_file(file_name="exercise_remove_atoms.pdb") cmd = " ".join([ "phenix.pdbtools", "exercise_remove_atoms.pdb", "remove_fraction=0.1"]) print(cmd) run_command(command=cmd, verbose=False) pi = iotbx.pdb.input(file_name="exercise_remove_atoms.pdb_modified.pdb") ph_in = pi.construct_hierarchy() s2 = ph_in.atoms_size() f = s2*100./s1 # # UNSTABLE 2x # assert f>77 and f<100, f # was getting 79.16 on anaconda t96 def exercise_change_of_basis(): open("tmp_pdbtools_cb_op.pdb", "w").write("""\ CRYST1 21.937 4.866 23.477 90.00 107.08 90.00 P 1 21 1 2 ATOM 1 N GLY A 1 -9.009 4.612 6.102 1.00 16.77 N ATOM 2 CA GLY A 1 -9.052 4.207 4.651 1.00 16.57 C ATOM 3 C GLY A 1 -8.015 3.140 4.419 1.00 16.16 C ATOM 4 O GLY A 1 -7.523 2.521 5.381 1.00 16.78 O """) cmd = "phenix.pdbtools tmp_pdbtools_cb_op.pdb change_of_basis='a,c,b'" run_command(command=cmd, verbose=False) lines = open("tmp_pdbtools_cb_op.pdb_modified.pdb").readlines() for line in lines : if line.startswith("CRYST1"): assert (line.strip() == """CRYST1 21.937 23.477 4.866 90.00 90.00 107.08 P 1 1 21""") break def exercise_mmcif_support(): from libtbx.test_utils import open_tmp_file f = open_tmp_file(suffix="pdbtools.cif") f.write("""\ data_phenix _space_group.name_H-M_alt 'C 1 2 1' _space_group.name_Hall ' C 2y' _cell.length_a 46.053 _cell.length_b 9.561 _cell.length_c 20.871 _cell.angle_alpha 90.0 _cell.angle_beta 97.43 _cell.angle_gamma 90.0 _cell.volume 9112.60599144 loop_ _atom_site.group_PDB _atom_site.id _atom_site.label_atom_id _atom_site.label_alt_id _atom_site.label_comp_id _atom_site.auth_asym_id _atom_site.auth_seq_id _atom_site.pdbx_PDB_ins_code _atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.B_iso_or_equiv _atom_site.type_symbol _atom_site.pdbx_formal_charge _atom_site.label_asym_id _atom_site.label_entity_id _atom_site.label_seq_id _atom_site.pdbx_PDB_model_num ATOM 2 CA . LYS A 1 ? 7.49733 -0.62028 4.35289 1.000 10.25989 C ? A ? 1 1 ATOM 11 CA . LEU A 2 ? 3.72032 -0.19320 3.89326 1.000 7.80433 C ? A ? 2 1 ATOM 19 CA . VAL A 3 ? 0.78668 -0.39555 6.35234 1.000 5.03864 C ? A ? 3 1 ATOM 26 CA . PHE A 4 ? -2.75438 -0.21383 5.02429 1.000 8.93080 C ? A ? 4 1 ATOM 37 CA . PHE A 5 ? -6.05155 -0.46197 6.85390 1.000 9.57417 C ? A ? 5 1 ATOM 48 CA . ALA A 6 ? -9.57646 -0.10942 5.55847 1.000 17.73488 C ? A ? 6 1 ATOM 54 CA . LYS B 1 ? -8.86604 -5.20044 5.46515 1.000 16.15297 C ? B ? 7 1 """) f.close() cmd = " ".join(["phenix.pdbtools", "\"%s\"" % f.name, "rename_chain_id.old_id=A", "rename_chain_id.new_id=C"]) print(cmd) run_command(command=cmd, verbose=False) assert os.path.isfile(f.name+"_modified.pdb") pdb_inp = iotbx.pdb.input(file_name=f.name+"_modified.pdb") assert pdb_inp.file_type() == "pdb" hierarchy = pdb_inp.construct_hierarchy() assert [chain.id for chain in hierarchy.chains()] == ['C', 'B'] cmd = " ".join(["phenix.pdbtools", "\"%s\"" % f.name, "adp.convert_to_anisotropic=True", "output.format=mmcif"]) print(cmd) run_command(command=cmd, verbose=False) assert os.path.isfile(f.name+"_modified.cif") pdb_inp = iotbx.pdb.input(file_name=f.name+"_modified.cif") assert pdb_inp.file_type() == "mmcif" xs = pdb_inp.xray_structure_simple() assert xs.use_u_aniso().all_eq(True) def exercise_mmcif_support_2(prefix="tst_pdbtools_mmcif2"): f = open("%s.pdb" % prefix, 'w') f.write("""\ HELIX 1 1 TRP A 10 VAL A 14 5 5 HELIX 2 2 ARG A 17 ASN A 29 1 13 SHEET 1 A 2 ARG A 33 PRO A 38 0 SHEET 2 A 2 ARG A 51 VAL A 56 1 O VAL A 54 N ILE A 35 SSBOND 1 CYS A 4 CYS A 49 CRYST1 62.654 62.654 45.906 90.00 90.00 90.00 P 43 21 2 SCALE1 0.015961 0.000000 0.000000 0.00000 SCALE2 0.000000 0.015961 0.000000 0.00000 SCALE3 0.000000 0.000000 0.021784 0.00000 ATOM 1 N GLN A 3 23.762 12.429 1.265 1.00 45.30 N ATOM 2 CA GLN A 3 22.670 12.004 2.148 1.00 39.46 C ATOM 3 C GLN A 3 21.818 10.942 1.458 1.00 42.86 C ATOM 4 O GLN A 3 21.337 11.133 0.339 1.00 44.81 O ATOM 5 CB GLN A 3 21.794 13.195 2.571 1.00 42.53 C ATOM 6 N CYS A 4 21.620 9.817 2.129 1.00 36.33 N ATOM 7 CA CYS A 4 20.902 8.709 1.518 1.00 31.46 C ATOM 8 C CYS A 4 19.419 8.891 1.670 1.00 34.16 C ATOM 9 O CYS A 4 18.979 9.537 2.607 1.00 38.12 O ATOM 10 CB CYS A 4 21.307 7.409 2.205 1.00 31.53 C ATOM 11 SG CYS A 4 23.070 7.194 2.148 1.00 29.88 S ATOM 12 N SER A 5 18.657 8.290 0.760 1.00 33.83 N ATOM 13 CA SER A 5 17.211 8.293 0.854 1.00 38.94 C ATOM 14 C SER A 5 16.676 6.917 1.254 1.00 32.88 C ATOM 15 O SER A 5 17.295 5.879 0.981 1.00 32.57 O ATOM 16 CB SER A 5 16.603 8.683 -0.488 1.00 39.48 C ATOM 17 OG SER A 5 16.893 7.668 -1.432 1.00 47.16 O ATOM 18 N GLY A 6 15.520 6.924 1.901 1.00 31.69 N ATOM 19 CA GLY A 6 14.811 5.694 2.191 1.00 32.41 C ATOM 20 C GLY A 6 15.257 5.112 3.511 1.00 29.00 C ATOM 2 CA GLN A 10 22.670 12.004 2.148 1.00 39.46 C ATOM 2 CA GLN A 11 22.670 12.004 2.148 1.00 39.46 C ATOM 2 CA GLN A 12 22.670 12.004 2.148 1.00 39.46 C ATOM 2 CA GLN A 13 22.670 12.004 2.148 1.00 39.46 C ATOM 2 CA GLN A 14 22.670 12.004 2.148 1.00 39.46 C ATOM 2 CA GLN A 33 22.670 12.004 2.148 1.00 39.46 C ATOM 2 CA GLN A 34 22.670 12.004 2.148 1.00 39.46 C ATOM 2 CA GLN A 35 22.670 12.004 2.148 1.00 39.46 C ATOM 2 CA GLN A 36 22.670 12.004 2.148 1.00 39.46 C ATOM 2 CA GLN A 37 22.670 12.004 2.148 1.00 39.46 C ATOM 2 CA GLN A 38 22.670 12.004 2.148 1.00 39.46 C ATOM 2 CA GLN A 51 22.670 12.004 2.148 1.00 39.46 C ATOM 2 CA GLN A 52 22.670 12.004 2.148 1.00 39.46 C ATOM 2 CA GLN A 53 22.670 12.004 2.148 1.00 39.46 C ATOM 2 CA GLN A 54 22.670 12.004 2.148 1.00 39.46 C ATOM 2 CA GLN A 55 22.670 12.004 2.148 1.00 39.46 C ATOM 2 CA GLN A 56 22.670 12.004 2.148 1.00 39.46 C """) f.close() cmd = " ".join([ "phenix.pdbtools", "%s.pdb" % prefix, "output.format=mmcif", "output.file_name=%s.cif" % prefix]) print(cmd) run_command(command=cmd, verbose=False) cif_f = open("%s.cif" % prefix, 'r') cif_l = cif_f.readlines() cif_f.close() for l in ["_cell.angle_alpha 90.000\n", " _struct_conf.pdbx_PDB_helix_id\n", " _struct_sheet.id\n", " _struct_sheet_range.id\n", " _atom_site.label_atom_id\n"]: assert l in cif_l, "%s not in cif file!" % l def exercise_move_waters(): pdb_in = """\ ATOM 16 O AHOH A 2 5.131 5.251 5.823 0.60 10.00 O ATOM 60 CA LYS A 32 10.574 8.177 11.768 1.00 11.49 C ATOM 63 CB ALYS A 32 9.197 8.686 12.246 0.29 14.71 C ATOM 64 CB BLYS A 32 9.193 8.732 12.170 0.71 12.23 C ATOM 74 CA VAL A 33 11.708 5.617 14.332 1.00 11.42 C ATOM 77 CB VAL A 33 11.101 4.227 14.591 1.00 11.47 C ATOM 18 O HOH A 3 1.132 5.963 7.065 1.00 15.00 O ATOM 19 H1 HOH A 3 1.160 5.211 6.437 1.00 15.00 H ATOM 20 H2 HOH A 3 1.122 5.579 7.967 1.00 15.00 H HETATM 2397 P PO4 1 -7.520 25.376 38.369 1.00 39.37 P HETATM 2398 O1 PO4 1 -6.610 24.262 38.967 1.00 40.00 O HETATM 2399 O2 PO4 1 -6.901 25.919 37.049 1.00 41.07 O HETATM 2400 O3 PO4 1 -8.894 24.741 38.097 1.00 45.09 O HETATM 2401 O4 PO4 1 -7.722 26.556 39.350 1.00 42.48 O ATOM 23 CL CL B 1 6.302 6.419 1.560 0.50 10.00 CL HETATM 6362 O HOH B 2 47.616 10.724 150.212 1.00 46.48 B O HETATM 6363 O AHOH B 3 46.408 16.672 146.066 0.50 12.81 B O HETATM 6364 O HOH B 4 29.343 12.806 185.898 1.00 35.57 B O HETATM 6365 O BHOH B 5 43.786 12.615 147.734 0.50 28.43 B O HETATM 6366 O HOH B 6 35.068 19.167 155.349 1.00 15.97 B O """ pdb_out = """\ ATOM 1 CA LYS A 32 10.574 8.177 11.768 1.00 11.49 C ATOM 2 CB ALYS A 32 9.197 8.686 12.246 0.29 14.71 C ATOM 3 CB BLYS A 32 9.193 8.732 12.170 0.71 12.23 C ATOM 4 CA VAL A 33 11.708 5.617 14.332 1.00 11.42 C ATOM 5 CB VAL A 33 11.101 4.227 14.591 1.00 11.47 C TER HETATM 6 O1 PO4 1 -6.610 24.262 38.967 1.00 40.00 O HETATM 7 O2 PO4 1 -6.901 25.919 37.049 1.00 41.07 O HETATM 8 O3 PO4 1 -8.894 24.741 38.097 1.00 45.09 O HETATM 9 O4 PO4 1 -7.722 26.556 39.350 1.00 42.48 O HETATM 10 P PO4 1 -7.520 25.376 38.369 1.00 39.37 P ATOM 11 CL CL B 1 6.302 6.419 1.560 0.50 10.00 Cl ATOM 12 O AHOH A 2 5.131 5.251 5.823 0.60 10.00 O ATOM 13 O HOH A 3 1.132 5.963 7.065 1.00 15.00 O ATOM 14 H1 HOH A 3 1.160 5.211 6.437 1.00 15.00 H ATOM 15 H2 HOH A 3 1.122 5.579 7.967 1.00 15.00 H HETATM 16 O HOH B 2 47.616 10.724 150.212 1.00 46.48 B O HETATM 17 O AHOH B 3 46.408 16.672 146.066 0.50 12.81 B O HETATM 18 O HOH B 4 29.343 12.806 185.898 1.00 35.57 B O HETATM 19 O BHOH B 5 43.786 12.615 147.734 0.50 28.43 B O HETATM 20 O HOH B 6 35.068 19.167 155.349 1.00 15.97 B O END """ open("tst_pdbtools_move_waters.pdb", "w").write(pdb_in) cmd = "phenix.pdbtools tst_pdbtools_move_waters.pdb move_waters_last=True" print(cmd) run_command(command=cmd, verbose=False) pdb_new = open("tst_pdbtools_move_waters.pdb_modified.pdb").read() assert pdb_new == pdb_out, pdb_new def exercise_stop_for_unknowns(): pdb_in = """\ HETATM 16 O UNK B 2 47.616 10.724 150.212 1.00 46.48 B O HETATM 17 O UNK B 3 46.408 16.672 146.066 0.50 12.81 B O HETATM 18 O UNK B 4 29.343 12.806 185.898 1.00 35.57 B O HETATM 19 O UNK B 5 43.786 12.615 147.734 0.50 28.43 B O HETATM 20 O UNK B 6 35.068 19.167 155.349 1.00 15.97 B O """ open("tst_pdbtools_unknown.pdb", "w").write(pdb_in) cmd = "phenix.pdbtools tst_pdbtools_unknown.pdb set_b_iso=20" print(cmd) run_command(command=cmd, sorry_expected=True) cmd2 = cmd + " stop_for_unknowns=False" print(cmd2) run_command(command=cmd2) def exercise_remove_alt_confs(): pdb_in = """\ ATOM 16 O AHOH A 2 5.131 5.251 5.823 0.60 10.00 O ATOM 60 CA LYS A 32 10.574 8.177 11.768 1.00 11.49 C ATOM 63 CB ALYS A 32 9.197 8.686 12.246 0.29 14.71 C ATOM 64 CB BLYS A 32 9.193 8.732 12.170 0.71 12.23 C ATOM 74 CA VAL A 33 11.708 5.617 14.332 1.00 11.42 C ATOM 77 CB VAL A 33 11.101 4.227 14.591 1.00 11.47 C ATOM 18 O HOH A 3 1.132 5.963 7.065 1.00 15.00 O ATOM 19 O BHOH A 4 4.132 9.963 7.800 0.50 15.00 O """ open("tst_pdbtools_alt_confs.pdb", "w").write(pdb_in) cmd = "phenix.pdbtools tst_pdbtools_alt_confs.pdb remove_alt_confs=True" print(cmd) run_command(command=cmd, verbose=False) pdb_new = open("tst_pdbtools_alt_confs.pdb_modified.pdb").read() assert (pdb_new == """\ ATOM 1 O HOH A 2 5.131 5.251 5.823 1.00 10.00 O ATOM 2 CA LYS A 32 10.574 8.177 11.768 1.00 11.49 C ATOM 3 CB LYS A 32 9.197 8.686 12.246 1.00 14.71 C ATOM 4 CA VAL A 33 11.708 5.617 14.332 1.00 11.42 C ATOM 5 CB VAL A 33 11.101 4.227 14.591 1.00 11.47 C ATOM 6 O HOH A 3 1.132 5.963 7.065 1.00 15.00 O TER END """) cmd = "phenix.pdbtools tst_pdbtools_alt_confs.pdb remove_alt_confs=True " +\ "always_keep_one_conformer=True" run_command(command=cmd, verbose=False) pdb_new = open("tst_pdbtools_alt_confs.pdb_modified.pdb").read() assert (pdb_new == """\ ATOM 1 O HOH A 2 5.131 5.251 5.823 1.00 10.00 O ATOM 2 CA LYS A 32 10.574 8.177 11.768 1.00 11.49 C ATOM 3 CB LYS A 32 9.193 8.732 12.170 1.00 12.23 C ATOM 4 CA VAL A 33 11.708 5.617 14.332 1.00 11.42 C ATOM 5 CB VAL A 33 11.101 4.227 14.591 1.00 11.47 C ATOM 6 O HOH A 3 1.132 5.963 7.065 1.00 15.00 O ATOM 7 O HOH A 4 4.132 9.963 7.800 1.00 15.00 O TER END """) def exercise_convert_met_to_semet(): pdb_str_met = """ ATOM 1 N MET B 37 7.525 5.296 6.399 1.00 10.00 N ATOM 2 CA MET B 37 6.533 6.338 6.634 1.00 10.00 C ATOM 3 C MET B 37 6.175 7.044 5.330 1.00 10.00 C ATOM 4 O MET B 37 5.000 7.200 5.000 1.00 10.00 O ATOM 5 CB MET B 37 7.051 7.351 7.655 1.00 10.00 C ATOM 6 CG MET B 37 7.377 6.750 9.013 1.00 10.00 C ATOM 7 SD MET B 37 8.647 5.473 8.922 1.00 10.00 S ATOM 8 CE MET B 37 8.775 5.000 10.645 1.00 10.00 C TER END """ pi = iotbx.pdb.input(source_info=None, lines=pdb_str_met) ph_met_in = pi.construct_hierarchy() pi.write_pdb_file(file_name="exercise_convert_met_to_semet.pdb") cmd = " ".join([ "phenix.pdbtools", "exercise_convert_met_to_semet.pdb", "convert_met_to_semet=true"]) print(cmd) run_command(command=cmd, verbose=False) pi_out = iotbx.pdb.input( file_name="exercise_convert_met_to_semet.pdb_modified.pdb" ).construct_hierarchy() for rg in pi_out.residue_groups(): for rn in rg.unique_resnames(): assert rn=="MSE" cmd = " ".join([ "phenix.pdbtools", "exercise_convert_met_to_semet.pdb_modified.pdb", "convert_semet_to_met=true"]) run_command(command=cmd, verbose=False) pi_out = iotbx.pdb.input( file_name="exercise_convert_met_to_semet.pdb_modified.pdb_modified.pdb" ).construct_hierarchy() for rg in pi_out.residue_groups(): for rn in rg.unique_resnames(): assert rn=="MET" def exercise_switch_rotamers(prefix="exercise_switch_rotamers"): pdb_str_met = """ ATOM 1 N MET B 37 7.525 5.296 6.399 1.00 10.00 N ATOM 2 CA MET B 37 6.533 6.338 6.634 1.00 10.00 C ATOM 3 C MET B 37 6.175 7.044 5.330 1.00 10.00 C ATOM 4 O MET B 37 5.000 7.200 5.000 1.00 10.00 O ATOM 5 CB MET B 37 7.051 7.351 7.655 1.00 10.00 C ATOM 6 CG MET B 37 7.377 6.750 9.013 1.00 10.00 C ATOM 7 SD MET B 37 8.647 5.473 8.922 1.00 10.00 S ATOM 8 CE MET B 37 8.775 5.000 10.645 1.00 10.00 C TER END """ pi = iotbx.pdb.input(source_info=None, lines=pdb_str_met) ph_met_in = pi.construct_hierarchy() pi.write_pdb_file(file_name="%s.pdb"%prefix) for o in ["max_distant","min_distant","exact_match","fix_outliers"]: cmd = " ".join([ "phenix.pdbtools", "%s.pdb"%prefix, "switch_rotamers=%s"%o, "output.file_name=%s_%s.pdb"%(o,prefix)]) print(cmd) run_command(command=cmd, verbose=False) def exercise_segid_manipulations(prefix="tst_pdbtools_ex_segid"): pdb_str_met = """ ATOM 1 N MET B 37 7.525 5.296 6.399 1.00 10.00 A N ATOM 2 CA MET B 37 6.533 6.338 6.634 1.00 10.00 A C ATOM 3 C MET B 37 6.175 7.044 5.330 1.00 10.00 A C ATOM 4 O MET B 37 5.000 7.200 5.000 1.00 10.00 A O ATOM 5 CB MET B 37 7.051 7.351 7.655 1.00 10.00 A C ATOM 6 CG MET B 37 7.377 6.750 9.013 1.00 10.00 A C ATOM 7 SD MET B 37 8.647 5.473 8.922 1.00 10.00 A S ATOM 8 CE MET B 37 8.775 5.000 10.645 1.00 10.00 A C TER END """ pi = iotbx.pdb.input(source_info=None, lines=pdb_str_met) ph_met_in = pi.construct_hierarchy() pi.write_pdb_file(file_name="%s.pdb"%prefix) for o in ["clear_seg_id", "set_seg_id_to_chain_id"]: cmd = " ".join([ "phenix.pdbtools", "%s.pdb"%prefix, "%s=True" % o, "output.file_name=%s_%s.pdb"%(o,prefix)]) print(cmd) run_command(command=cmd, verbose=False) pdb_new = open("%s_%s.pdb"%(o,prefix)).read() if o == "clear_seg_id": assert (pdb_new == """\ ATOM 1 N MET B 37 7.525 5.296 6.399 1.00 10.00 N ATOM 2 CA MET B 37 6.533 6.338 6.634 1.00 10.00 C ATOM 3 C MET B 37 6.175 7.044 5.330 1.00 10.00 C ATOM 4 O MET B 37 5.000 7.200 5.000 1.00 10.00 O ATOM 5 CB MET B 37 7.051 7.351 7.655 1.00 10.00 C ATOM 6 CG MET B 37 7.377 6.750 9.013 1.00 10.00 C ATOM 7 SD MET B 37 8.647 5.473 8.922 1.00 10.00 S ATOM 8 CE MET B 37 8.775 5.000 10.645 1.00 10.00 C TER END """), pdb_new else: assert (pdb_new == """\ ATOM 1 N MET B 37 7.525 5.296 6.399 1.00 10.00 B N ATOM 2 CA MET B 37 6.533 6.338 6.634 1.00 10.00 B C ATOM 3 C MET B 37 6.175 7.044 5.330 1.00 10.00 B C ATOM 4 O MET B 37 5.000 7.200 5.000 1.00 10.00 B O ATOM 5 CB MET B 37 7.051 7.351 7.655 1.00 10.00 B C ATOM 6 CG MET B 37 7.377 6.750 9.013 1.00 10.00 B C ATOM 7 SD MET B 37 8.647 5.473 8.922 1.00 10.00 B S ATOM 8 CE MET B 37 8.775 5.000 10.645 1.00 10.00 B C TER END """), pdb_new def exercise(args): exercise_switch_rotamers() exercise_mmcif_support_2() exercise_basic() exercise_multiple() exercise_no_cryst1() exercise_renumber_residues() exercise_change_of_basis() exercise_move_waters() exercise_remove_alt_confs() exercise_truncate_to_polyala() exercise_convert_met_to_semet() exercise_set_charge() exercise_neutralize_scatterers() exercise_remove_atoms() exercise_mmcif_support() exercise_segid_manipulations() if (__name__ == "__main__"): exercise(sys.argv[1:])
49.803888
105
0.56599
10,699
58,918
2.994579
0.087391
0.029683
0.021068
0.029495
0.746496
0.685633
0.659353
0.638035
0.620119
0.611754
0
0.236881
0.341814
58,918
1,182
106
49.846024
0.589314
0.001595
0
0.453499
0
0.313552
0.605356
0.040981
0
0
0
0
0.108946
1
0.028344
false
0
0.014172
0.000886
0.044287
0.030115
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
0
0
0
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
4c7b2bc66e596424f7eb9532ab872f61d109e9c5
182
py
Python
zhaquirks/samjin/multi.py
WolfRevo/zha-device-handlers
0fa4ca1c03c611be0cf2c38c4fec2a197e3dd1d3
[ "Apache-2.0" ]
213
2020-04-16T10:48:31.000Z
2022-03-30T20:48:07.000Z
zhaquirks/samjin/multi.py
WolfRevo/zha-device-handlers
0fa4ca1c03c611be0cf2c38c4fec2a197e3dd1d3
[ "Apache-2.0" ]
1,088
2020-04-03T13:23:29.000Z
2022-03-31T23:55:03.000Z
zhaquirks/samjin/multi.py
WolfRevo/zha-device-handlers
0fa4ca1c03c611be0cf2c38c4fec2a197e3dd1d3
[ "Apache-2.0" ]
280
2020-04-24T08:44:27.000Z
2022-03-31T12:58:04.000Z
"""Samjin Multi Quirk.""" # <SimpleDescriptor endpoint=1 profile=260 device_type=1026 # device_version=0 # input_clusters=[0, 1, 3, 32, 1026, 1280, 64514] # output_clusters=[3, 25]>
30.333333
59
0.71978
27
182
4.703704
0.777778
0
0
0
0
0
0
0
0
0
0
0.186335
0.115385
182
5
60
36.4
0.602484
0.923077
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
d5c754789cfaa3241cc60e067e2adee1597c3214
340
py
Python
runtests.py
frmdstryr/Willow
be53837224addc6056a754b4d3a3745e9cc03bd9
[ "BSD-3-Clause" ]
111
2017-02-05T18:08:49.000Z
2022-03-11T19:27:34.000Z
runtests.py
frmdstryr/Willow
be53837224addc6056a754b4d3a3745e9cc03bd9
[ "BSD-3-Clause" ]
43
2017-01-27T00:10:02.000Z
2022-02-21T14:44:20.000Z
runtests.py
frmdstryr/Willow
be53837224addc6056a754b4d3a3745e9cc03bd9
[ "BSD-3-Clause" ]
27
2017-01-27T15:55:57.000Z
2021-08-06T10:21:40.000Z
import sys import unittest from tests.test_registry import * from tests.test_pillow import * from tests.test_wand import * from tests.test_image import * if __name__ == '__main__': args = list(sys.argv) if '--opencv' in args: from tests.test_opencv import * args.remove('--opencv') unittest.main(argv=args)
18.888889
39
0.691176
47
340
4.723404
0.404255
0.202703
0.292793
0.256757
0
0
0
0
0
0
0
0
0.205882
340
17
40
20
0.822222
0
0
0
0
0
0.070588
0
0
0
0
0
0
1
0
false
0
0.583333
0
0.583333
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
e6c2c9c2bff562168ae3ceae8b47799d6908aa07
256
py
Python
src/openprocurement/tender/cfaua/adapters/tender/validators/minimalstep.py
pontostroy/api
5afdd3a62a8e562cf77e2d963d88f1a26613d16a
[ "Apache-2.0" ]
3
2020-03-13T06:44:23.000Z
2020-11-05T18:25:29.000Z
src/openprocurement/tender/cfaua/adapters/tender/validators/minimalstep.py
pontostroy/api
5afdd3a62a8e562cf77e2d963d88f1a26613d16a
[ "Apache-2.0" ]
2
2021-03-25T23:27:04.000Z
2022-03-21T22:18:15.000Z
src/openprocurement/tender/cfaua/adapters/tender/validators/minimalstep.py
scrubele/prozorro-testing
42b93ea2f25d8cc40e66c596f582c7c05e2a9d76
[ "Apache-2.0" ]
3
2020-10-16T16:25:14.000Z
2021-05-22T12:26:20.000Z
from openprocurement.tender.core.validation import validate_minimalstep class MinimalStepValidate(object): def __init__(self, tender): self.context = tender def __call__(self, cls, data, value): validate_minimalstep(data, value)
25.6
71
0.738281
28
256
6.392857
0.678571
0.212291
0
0
0
0
0
0
0
0
0
0
0.179688
256
9
72
28.444444
0.852381
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.166667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
e6cf82452462627841dcc03f3d8f56f734bfc5f7
1,511
py
Python
boltzmann/generative/transforms/noise.py
maccallumlab/BoltzmannGenerator
f6aba2e3602891c2acae92a894716ab9da7fb654
[ "MIT" ]
9
2019-10-02T03:37:49.000Z
2021-04-23T09:18:36.000Z
boltzmann/generative/transforms/noise.py
maccallumlab/BoltzmannGenerator
f6aba2e3602891c2acae92a894716ab9da7fb654
[ "MIT" ]
null
null
null
boltzmann/generative/transforms/noise.py
maccallumlab/BoltzmannGenerator
f6aba2e3602891c2acae92a894716ab9da7fb654
[ "MIT" ]
4
2019-10-23T16:30:54.000Z
2020-07-24T20:03:10.000Z
from .base import Transform import torch class NoiseTransform(Transform): def __init__(self, std): super().__init__() self.std = std def forward(self, inputs, context=None): noise = torch.normal(0, self.std, size=inputs.shape).to(inputs.device) return inputs + noise, torch.zeros(inputs.shape[0], device=inputs.device) def inverse(self, inputs, context=None): noise = torch.normal(0, self.std, size=inputs.shape).to(inputs.device) return inputs + noise, torch.zeros(inputs.shape[0], device=inputs.device) class TwoStageComposite(Transform): def __init__(self, stage1, stage2): super().__init__() self.stage1 = stage1 self.stage2 = stage2 def forward(self, inputs, context=None): x, jac = self.stage1.forward(inputs, context) x, jac2 = self.stage2.forward(x, context) return x, jac + jac2 def inverse(self, inputs, context=None): x, jac = self.stage2.inverse(inputs, context) x, jac2 = self.stage1.inverse(x, context) return x, jac + jac2 def stage1_forward(self, inputs, context=None): return self.stage1.forward(inputs, context) def stage1_inverse(self, inputs, context=None): return self.stage1.inverse(inputs, context) def stage2_forward(self, inputs, context=None): return self.stage2.forward(inputs, context) def stage2_inverse(self, inputs, context=None): return self.stage2.inverse(inputs, context)
32.148936
81
0.663137
192
1,511
5.114583
0.166667
0.185336
0.138493
0.171079
0.705703
0.593686
0.556008
0.266802
0.266802
0.266802
0
0.021997
0.217737
1,511
46
82
32.847826
0.808799
0
0
0.363636
0
0
0
0
0
0
0
0
0
1
0.30303
false
0
0.060606
0.121212
0.666667
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
e6e1eaaca4fb14117607777448e94c018981f068
18
py
Python
testbase/version.py
fossabot/QTAF
22fbe96bc2fcdf22fb7eafa2cc7ab86016561f49
[ "BSD-3-Clause" ]
452
2016-09-27T11:21:00.000Z
2022-03-31T06:11:30.000Z
testbase/version.py
fossabot/QTAF
22fbe96bc2fcdf22fb7eafa2cc7ab86016561f49
[ "BSD-3-Clause" ]
39
2016-09-29T06:14:04.000Z
2022-03-22T04:24:36.000Z
testbase/version.py
fossabot/QTAF
22fbe96bc2fcdf22fb7eafa2cc7ab86016561f49
[ "BSD-3-Clause" ]
129
2016-09-27T11:28:42.000Z
2022-03-17T09:05:16.000Z
version = "5.2.4"
9
17
0.555556
4
18
2.5
1
0
0
0
0
0
0
0
0
0
0
0.2
0.166667
18
1
18
18
0.466667
0
0
0
0
0
0.277778
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
e6e20fbc9cd553a3eb4195ebe8276fc03a14b186
10,415
py
Python
tests/test_flowcontrol_streams.py
GeekLiB/aiohttp
6de60d6655d5e0837b7cc3e9f1d29419f4771d37
[ "Apache-2.0" ]
null
null
null
tests/test_flowcontrol_streams.py
GeekLiB/aiohttp
6de60d6655d5e0837b7cc3e9f1d29419f4771d37
[ "Apache-2.0" ]
null
null
null
tests/test_flowcontrol_streams.py
GeekLiB/aiohttp
6de60d6655d5e0837b7cc3e9f1d29419f4771d37
[ "Apache-2.0" ]
null
null
null
import asyncio import unittest from unittest import mock from aiohttp import streams class TestFlowControlStreamReader(unittest.TestCase): def setUp(self): self.stream = mock.Mock(paused=False) self.transp = self.stream.transport self.loop = asyncio.new_event_loop() asyncio.set_event_loop(None) def tearDown(self): self.loop.close() def _make_one(self, allow_pause=True, *args, **kwargs): out = streams.FlowControlStreamReader( self.stream, limit=1, loop=self.loop, *args, **kwargs) out._allow_pause = allow_pause return out def test_read(self): r = self._make_one() r._stream.paused = True r.feed_data(b'da', 2) res = self.loop.run_until_complete(r.read(1)) self.assertEqual(res, b'd') self.assertTrue(self.transp.resume_reading.called) def test_pause_on_read(self): r = self._make_one() r.feed_data(b'test', 4) r._stream.paused = False res = self.loop.run_until_complete(r.read(1)) self.assertEqual(res, b't') self.assertTrue(self.transp.pause_reading.called) def test_readline(self): r = self._make_one() r._stream.paused = True r.feed_data(b'data\n', 5) res = self.loop.run_until_complete(r.readline()) self.assertEqual(res, b'data\n') self.assertTrue(self.transp.resume_reading.called) def test_readany(self): r = self._make_one() r._stream.paused = True r.feed_data(b'data', 4) res = self.loop.run_until_complete(r.readany()) self.assertEqual(res, b'data') self.assertTrue(self.transp.resume_reading.called) def test_readexactly(self): r = self._make_one() r._stream.paused = True r.feed_data(b'data', 4) res = self.loop.run_until_complete(r.readexactly(3)) self.assertEqual(res, b'dat') self.assertTrue(self.transp.resume_reading.called) def test_feed_data(self): r = self._make_one() r._stream.paused = False r.feed_data(b'datadata', 8) self.assertTrue(self.transp.pause_reading.called) def test_feed_data_no_allow_pause(self): r = self._make_one() r._allow_pause = False r._stream.paused = False r.feed_data(b'datadata', 8) self.assertFalse(self.transp.pause_reading.called) def test_read_nowait(self): r = self._make_one() r._stream.paused = False r.feed_data(b'data1', 5) r.feed_data(b'data2', 5) r.feed_data(b'data3', 5) self.assertTrue(self.stream.paused) res = self.loop.run_until_complete(r.read(5)) self.assertTrue(res == b'data1') # _buffer_size > _buffer_limit self.assertTrue(self.transp.pause_reading.call_count == 1) self.assertTrue(self.transp.resume_reading.call_count == 0) self.assertTrue(self.stream.paused) r._stream.paused = False res = r.read_nowait(5) self.assertTrue(res == b'data2') # _buffer_size > _buffer_limit self.assertTrue(self.transp.pause_reading.call_count == 2) self.assertTrue(self.transp.resume_reading.call_count == 0) self.assertTrue(self.stream.paused) res = r.read_nowait(5) self.assertTrue(res == b'data3') # _buffer_size < _buffer_limit self.assertTrue(self.transp.pause_reading.call_count == 2) self.assertTrue(self.transp.resume_reading.call_count == 1) self.assertTrue(not self.stream.paused) res = r.read_nowait(5) self.assertTrue(res == b'') # _buffer_size < _buffer_limit self.assertTrue(self.transp.pause_reading.call_count == 2) self.assertTrue(self.transp.resume_reading.call_count == 1) self.assertTrue(not self.stream.paused) def test_rudimentary_transport(self): self.transp.resume_reading.side_effect = NotImplementedError() self.transp.pause_reading.side_effect = NotImplementedError() self.stream.paused = True r = self._make_one() self.assertTrue(self.transp.pause_reading.call_count == 0) self.assertTrue(self.transp.resume_reading.call_count == 1) self.assertTrue(self.stream.paused) r.feed_data(b'data', 4) res = self.loop.run_until_complete(r.read(4)) self.assertTrue(self.transp.pause_reading.call_count == 0) self.assertTrue(self.transp.resume_reading.call_count == 2) self.assertTrue(self.stream.paused) self.assertTrue(res == b'data') self.stream.paused = False r.feed_data(b'data', 4) res = self.loop.run_until_complete(r.read(1)) self.assertTrue(self.transp.pause_reading.call_count == 2) self.assertTrue(self.transp.resume_reading.call_count == 2) self.assertTrue(not self.stream.paused) self.assertTrue(res == b'd') class FlowControlMixin: def test_resume_on_init(self): stream = mock.Mock() stream.paused = True streams.FlowControlDataQueue(stream, limit=1, loop=self.loop) self.assertTrue(stream.transport.resume_reading.called) self.assertFalse(stream.paused) def test_no_transport_in_init(self): stream = mock.Mock() stream.paused = True stream.transport = None streams.FlowControlDataQueue(stream, limit=1, loop=self.loop) self.assertTrue(stream.paused) def test_feed_no_waiter(self): out = self._make_one() out.feed_data(object(), 100) self.assertTrue(self.stream.transport.pause_reading.called) def test_feed_no_transport(self): self.stream.transport = None out = self._make_one() self.stream.paused = False out.feed_data(object(), 100) self.assertFalse(self.stream.paused) def test_feed_with_waiter(self): self.stream.paused = False out = self._make_one() read_task = asyncio.Task(out.read(), loop=self.loop) def cb(): out.feed_data(object(), 100) self.loop.call_soon(cb) self.loop.run_until_complete(read_task) self.assertFalse(self.stream.transport.pause_reading.called) self.assertFalse(self.stream.paused) def test_resume_on_read(self): out = self._make_one() out.feed_data(object(), 100) self.assertTrue(self.stream.paused) self.loop.run_until_complete(out.read()) self.assertTrue(self.stream.transport.resume_reading.called) self.assertFalse(self.stream.paused) def test_resume_on_read_no_transport(self): item = object() out = self._make_one() out.feed_data(item, 100) self.assertTrue(self.stream.paused) self.stream.transport = None res = self.loop.run_until_complete(out.read()) self.assertIs(res, item) self.assertTrue(self.stream.paused) def test_no_resume_on_read(self): out = self._make_one() out.feed_data(object(), 100) out.feed_data(object(), 100) out.feed_data(object(), 100) self.assertTrue(self.stream.paused) self.stream.transport.reset_mock() self.loop.run_until_complete(out.read()) self.assertFalse(self.stream.transport.resume_reading.called) self.assertTrue(self.stream.paused) def test_pause_on_read(self): out = self._make_one() out._buffer.append((object(), 100)) out._buffer.append((object(), 100)) out._buffer.append((object(), 100)) out._size = 300 self.stream.paused = False self.loop.run_until_complete(out.read()) self.assertTrue(self.stream.transport.pause_reading.called) self.assertTrue(self.stream.paused) def test_no_pause_on_read(self): item = object() out = self._make_one() out._buffer.append((item, 100)) out._size = 100 self.stream.paused = False res = self.loop.run_until_complete(out.read()) self.assertIs(res, item) self.assertFalse(self.stream.transport.pause_reading.called) self.assertFalse(self.stream.paused) def test_no_pause_on_read_no_transport(self): item = object() out = self._make_one() out._buffer.append((item, 100)) out._buffer.append((object(), 100)) out._buffer.append((object(), 100)) out._size = 300 self.stream.paused = False self.stream.transport = None res = self.loop.run_until_complete(out.read()) self.assertIs(res, item) self.assertFalse(self.stream.paused) class TestFlowControlDataQueue(unittest.TestCase, FlowControlMixin): def setUp(self): self.stream = mock.Mock() self.loop = asyncio.new_event_loop() asyncio.set_event_loop(None) def tearDown(self): self.loop.close() def _make_one(self, *args, **kwargs): out = streams.FlowControlDataQueue( self.stream, limit=1, loop=self.loop, *args, **kwargs) out._allow_pause = True return out class TestFlowControlChunksQueue(unittest.TestCase, FlowControlMixin): def setUp(self): self.stream = mock.Mock() self.loop = asyncio.new_event_loop() asyncio.set_event_loop(None) def tearDown(self): self.loop.close() def _make_one(self, *args, **kwargs): out = streams.FlowControlChunksQueue( self.stream, limit=1, loop=self.loop, *args, **kwargs) out._allow_pause = True return out def test_read_eof(self): out = self._make_one() read_task = asyncio.Task(out.read(), loop=self.loop) def cb(): out.feed_eof() self.loop.call_soon(cb) self.loop.run_until_complete(read_task) self.assertTrue(out.at_eof()) def test_read_until_eof(self): item = object() out = self._make_one() out.feed_data(item, 1) out.feed_eof() data = self.loop.run_until_complete(out.read()) self.assertIs(data, item) thing = self.loop.run_until_complete(out.read()) self.assertEqual(thing, b'') self.assertTrue(out.at_eof()) def test_readany(self): out = self._make_one() self.assertIs(out.read.__func__, out.readany.__func__)
31.656535
70
0.63975
1,344
10,415
4.738095
0.078869
0.103329
0.096106
0.075377
0.867462
0.81093
0.78973
0.730999
0.688756
0.598932
0
0.012783
0.241383
10,415
328
71
31.753049
0.793191
0.011042
0
0.689516
0
0
0.009131
0
0
0
0
0
0.274194
1
0.137097
false
0
0.016129
0
0.181452
0
0
0
0
null
0
0
0
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
e6f514050b3f35eb259b4d982e0316b36f1adfe1
345
py
Python
live_de_python_miniserie_funcoes/example01.py
flaviogf/Exemplos
fc666429f6e90c388e201fb7b7d5801e3c25bd25
[ "MIT" ]
null
null
null
live_de_python_miniserie_funcoes/example01.py
flaviogf/Exemplos
fc666429f6e90c388e201fb7b7d5801e3c25bd25
[ "MIT" ]
5
2019-12-29T04:58:10.000Z
2021-03-11T04:35:15.000Z
live_de_python_miniserie_funcoes/example01.py
flaviogf/Exemplos
fc666429f6e90c388e201fb7b7d5801e3c25bd25
[ "MIT" ]
null
null
null
def my_sum(x, y): return x + y def my_sub(x, y): return x - y def my_mul(x, y): return x * y def my_div(x, y): return x / y calc = { 'sum': my_sum, 'sub': my_sub, 'mul': my_mul, 'div': my_div } print(calc['sum'](10, 10)) print(calc['sub'](10, 10)) print(calc['mul'](10, 10)) print(calc['div'](10, 10))
11.5
26
0.521739
65
345
2.646154
0.184615
0.093023
0.186047
0.209302
0.319767
0.261628
0.261628
0
0
0
0
0.062992
0.263768
345
29
27
11.896552
0.614173
0
0
0
0
0
0.069767
0
0
0
0
0
0
1
0.222222
false
0
0
0.222222
0.444444
0.222222
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
4
fc20f0180cb66067ded061df156f0e52d88eb88a
12,761
py
Python
data_io/basepy.py
zbhoscar/submax
725c4de09e182f9cd1c4afe93ad175464f5c7cea
[ "Apache-2.0" ]
null
null
null
data_io/basepy.py
zbhoscar/submax
725c4de09e182f9cd1c4afe93ad175464f5c7cea
[ "Apache-2.0" ]
null
null
null
data_io/basepy.py
zbhoscar/submax
725c4de09e182f9cd1c4afe93ad175464f5c7cea
[ "Apache-2.0" ]
null
null
null
import random import cv2 import os import numpy as np import multiprocessing as mp import math import matplotlib.pyplot as plt import json import copy def get_remaining_to_multi(todo_file_list, done_file_list, list_txt_path=None, divide_num=1, if_print=False): already_str = str(done_file_list) ### # Basename in todo_file_path SAME == in already_path, # or in todo_file_path PART OF in already_path. ### remaining_tfr_list = [i for i in todo_file_list if os.path.basename(i).split('.')[0] not in already_str] remaining_tfr_list_original = copy.deepcopy(remaining_tfr_list) random.shuffle(remaining_tfr_list) name_list_in_num = divide_list(remaining_tfr_list, divide_num) if list_txt_path: _ = [[write_txt_add_lines(list_txt_path % index, line) for line in name_list] for index, name_list in enumerate(name_list_in_num)] if if_print: print('Files: %d in all, %d is done, %d remaining' % (len(todo_file_list), len(done_file_list), len(remaining_tfr_list)), '...') if list_txt_path: print('Split list in %d .txt, writen in %s' % (divide_num, list_txt_path)) else: print('Split list in %d lists, no txt written.' % divide_num) return remaining_tfr_list_original, name_list_in_num def get_2tier_folder_path_list(dataset_path, suffix_in_2tier=''): """ data structure: DATASET/CLASS/SAMPLE/frames_from_videos eg. video dataset_path: '/absolute/datasets/anoma' video imagelized: '/absolute/datasets/anoma/Abuse/Abuse001_x264/00001.jpg', '.../00002.jpg', ... :param dataset_path: dataset path :return: ['/absolute/datasets/anoma/Stealing075_x264', '/absolute/datasets/anoma/Stealing061_x264', '/absolute/datasets/anoma/Stealing108_x264', ...] """ out_list = [] for class_folder_name in os.listdir(dataset_path): class_folder_path = os.path.join(dataset_path, class_folder_name) if os.path.isdir(class_folder_path): out_list.extend(get_1tier_file_path_list(class_folder_path, suffix=suffix_in_2tier)) return out_list def get_1tier_file_path_list(path, suffix=''): """ Get file_path_list in a folder, suffix='' means no limit to the suffix :param path: path/files.tfrecord :param suffix: '.txt' of 'text.txt' :return: ['/absolute/datasets/anoma_1632/Abuse_Abuse001_x264.tfrecord', '/absolute/datasets/anoma_1632/Abuse_Abuse002_x264.tfrecord', ...] """ out_list = [] for file_name in os.listdir(path): if file_name.endswith(suffix): file_path = os.path.join(path, file_name) out_list.append(file_path) return out_list def get_2tier_dict_list(dict_file): """ Get the 2-tier dict keys in one: :param dict_file: {'video1': {1: [np.random.rand(4096), np.random.rand(4096)], 2: [np.random.rand(4096), np.random.rand(4096), np.random.rand(4096)]}, 'video2': {3: [np.random.rand(4096), np.random.rand(4096), np.random.rand(4096), np.random.rand(4096)], 4: [np.random.rand(4096), np.random.rand(4096), np.random.rand(4096), np.random.rand(4096), np.random.rand(4096)]}, } :return: [['video1', 1], ['video1', 2], ['video2', 3], ['video2', 4]] """ video_segment_list = [] for class_video_name in dict_file.keys(): for segment_index in dict_file[class_video_name].keys(): video_segment_list.append([class_video_name, segment_index]) return video_segment_list def sort_list_by_name(file_list, sep_sign='.', sep_index=0, reverse=False): return sorted(file_list, key=lambda x: int(x.split(sep_sign)[sep_index]), reverse=reverse) def repeat_list_for_epochs(sample_list, epoch_num=10000, shuffle=True): """ Repeat list for epoch_num times, especially for feed_dict :param sample_list: sample list :param epoch_num: number of epoch :param shuffle: make each epoch in orig or shuffle model :return: (sample_list with shuffle or not) * epoch_num """ if shuffle: shuffle_index = [i for i in range(len(sample_list))] sample_queue = [] for i in range(epoch_num): random.shuffle(shuffle_index) sample_queue.extend([sample_list[i] for i in shuffle_index]) else: sample_queue = sample_list * epoch_num return sample_queue def read_txt_lines2list(file_path, sep=',,'): """ EXAMPLE: str1,,str2,,str3\n str1,,str2,,str3\n :param file_path: TXT path :param sep: separate symbol between elements :return: [[str1, str2, str3], [str1, str2, str3]] """ with open(file_path, 'r') as f: contents = f.readlines() return [i.strip().split(sep) for i in contents] def write_txt_add_lines(file_path, *args, sep=',,'): line_string = '' for j, i in enumerate(args): line_string = line_string + i + '\n' if j == len(args) - 1 else line_string + i + sep with open(file_path, 'a') as f: f.writelines(line_string) return line_string def divide_list(full_list, num): """ Divide a list in to NUM pieces, for parallel processing :param full_list: [a,b,c,d,e,f,g] :param num: [3] :return: [[a,d,g],[b,e],[c,f] """ split_list = [[] for _ in range(num)] _ = [split_list[j % num].append(full_list[j]) for j in range(len(full_list))] return split_list def check_or_create_path(path, create=True, show=False): """ Given a path, check if the path exists or create it :param path: given path :param create: create or not :param show: print the path status :return: path in one str """ if not os.path.exists(path) and create: os.makedirs(path) _ = print('Path %s does not exist and has just been created' % path) if show else None elif not os.path.exists(path) and not create: _ = print('Path %s does not exist' % path) if show else None else: _ = print('Path %s already exists' % path) if show else None return path def cv2_imread_astype(i, path='./', astype='float32'): """ get class numpy after cv2.imread """ np_img = cv2.imread(os.path.join(path, i)) if astype == 'float32': np_img = np_img.astype(astype) / 255 elif astype == 'uint8': np_img = np_img else: raise ValueError('Wrong astype of %s' % astype) return np_img def np_stackimg_crop(innp, crop_size=(224, 224), method='randomcrop'): """ for eg. [height 224, weight 224, channel maybe > 3 eg=9] :method: normdistcrop: crop around central by normal distribution randomcrop: just random for each edge """ if method == 'normdistcrop': s = np.random.standard_normal(2) + 2.5 # junzhi 2.5, 2wei biaozhun zhengtai fenbu crop_start = [int(round(max(min(5, s[0]), 0) * (innp.shape[0] - crop_size[0]) / 5)), int(round(max(min(5, s[1]), 0) * (innp.shape[1] - crop_size[1]) / 5))] elif method == 'randomcrop': crop_start = [random.randint(0, innp.shape[j] - i) for j, i in enumerate(crop_size)] else: raise ValueError('Wrong method in np_stackimg_crop: %s' % method) return innp[crop_start[0]:crop_start[0] + crop_size[0], crop_start[1]:crop_start[1] + crop_size[1], :] def np_stackimg_resize(innp, resize=(224, 224), method='minlenbyratio'): """ for eg. [height 224, weight 224, channel maybe > 3 eg=9] :method: minlenbyratio: set edge's min len for orig img, keep ratio. eg. min=5, [5,1]->[25,5], [1,5]->[5,25] maxlenbyratio: set edge's max len for orig img, keep ratio. eg. max=5, [10,15]->[3,5], [15,10]->[5,3] absolute: set absolute shape for orig img. eg. abs=[5,5], [10,15]->[5,5] """ shape = innp.shape if method == 'minlenbyratio': ratio = max(max([resize[i] / shape[i] for i in [0, 1]]), 1) new_shape = (int(shape[0] * ratio), int(shape[1] * ratio)) elif method == 'maxlenbyratio': ratio = min(min([resize[i] / shape[i] for i in [0, 1]]), 1) new_shape = (int(shape[0] * ratio), int(shape[1] * ratio)) elif method == 'absolute': ratio, new_shape = 331, resize else: raise ValueError('Wrong method for np_stackimg_resize: %s' % method) return innp if ratio == 1 else cv2.resize(innp, new_shape[::-1]) def non_output_multiprocessing(func, todo_list, *args, num=int(mp.cpu_count())): """ Do not need func output, use this to parallel the func usage: non_output_multiprocessing(func1, divided_list, num=int(mp.cpu_count())) non_output_multiprocessing(func1, divided_list, var1, var2, num=int(mp.cpu_count())) non_output_multiprocessing(func2, divided_list, num=int(mp.cpu_count())) :param func: MUST be in the FORM to suit uncertain length of *args: def func1(divided_list, flag1=var1, flag2=var2): def func2(divided_list): :param todo_list: full list to process :param num: parallel N times :return: none """ ext_num = min(len(todo_list), num) split_list = divide_list(todo_list, ext_num) p = mp.Pool(ext_num) for em in split_list: params = [em] params.extend(args) p.apply_async(func, args=tuple(params)) p.close() p.join() def get_overlap_start_index(list_length, clip_length=16, overlap=0): """ get clip start index in a list, overlap is the minimum overlap num :param list_length: the length of the list :param clip_length: the length of the clip :param overlap: minimum overlap in this processing :return: a list of clip start index, unfixed index number """ clips_number = max(1, math.ceil((list_length - clip_length * overlap) / (clip_length - clip_length * overlap))) overlap_start = [round(i * (list_length - clip_length) / max(1, (clips_number - 1))) for i in range(clips_number)] return overlap_start def get_segment_start_index(list_length, segment_num=32, clip_length=16): """ Separate list_length in to segment_num of segments, each as least clip_length :param list_length: 480 :param segment_num: 32 :param clip_length: 16 :return: the start index in the list """ if list_length >= segment_num * clip_length: segment_start_index = [round(i * list_length / segment_num) for i in range(segment_num)] + [list_length] else: segment_start_index = [round(i * (list_length - clip_length) / (segment_num - 1)) for i in range(segment_num)] + [list_length] return segment_start_index def Ht(t): """ t-th Harmonic number :param t: 0 ~ ∞ :return: Σ1~n 1/t, special: Ht(0) = 0 eg. 0, 1, 1.5, 1.8333, 2.0833 """ out = sum([1 / k for k in range(1, t + 1)]) return out class DictCtrl(object): """ Use default_dict to add keys in a program: test = DictCtrl(default_dict) encode and save: encoded = test.save2path(json_path=None, **kwargs) # None for do not save, else to a path read and decode: decoded = test.read4path(json_path, decode=True) # decode for a new dict_dict, else just json_path """ def __init__(self, default_dict): self.default_dict = default_dict def update_keys(self, kwargs): temp = copy.deepcopy(self.default_dict) for i in kwargs.keys(): temp[i] = kwargs[i] return temp def encode2dict(self, **kwargs): return self.update_keys(kwargs) def decode2dict(self, kwargs): return self.update_keys(kwargs) def dict2json(self, d, json_path): with open(json_path, 'w') as f: json.dump(d, f) def json2dict(self, json_path): with open(json_path, 'r') as f: return json.load(f) def save2path(self, json_path=None, **kwargs): d = self.encode2dict(**kwargs) if json_path is not None: self.dict2json(d, json_path) return d def read4path(self, json_path, decode=True): d = self.json2dict(json_path) if decode: d = self.decode2dict(d) return d
12,761
12,761
0.615939
1,816
12,761
4.137665
0.180066
0.01597
0.022358
0.029811
0.196566
0.142534
0.109396
0.089167
0.055629
0.055629
0
0.035185
0.265026
12,761
1
12,761
12,761
0.76586
0.357652
0
0.125749
0
0
0.0539
0
0
0
0
2
0
1
0.149701
false
0
0.053892
0.017964
0.341317
0.047904
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
1
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
4
fc2710344f44c1677d073358e73a7eac595e189d
32
py
Python
01-logica-de-programacao-e-algoritmos/Aula 04/4 Estrutura de repeticao for (para)/ex01.py
rafaelbarretomg/Uninter
1f84b0103263177122663e991db3a8aeb106a959
[ "MIT" ]
2
2021-06-14T02:21:18.000Z
2021-09-04T03:07:16.000Z
01-logica-de-programacao-e-algoritmos/Aula 04/4 Estrutura de repeticao for (para)/ex01.py
rafaelbarretomg/Uninter
1f84b0103263177122663e991db3a8aeb106a959
[ "MIT" ]
null
null
null
01-logica-de-programacao-e-algoritmos/Aula 04/4 Estrutura de repeticao for (para)/ex01.py
rafaelbarretomg/Uninter
1f84b0103263177122663e991db3a8aeb106a959
[ "MIT" ]
2
2021-09-04T03:07:17.000Z
2022-03-23T04:24:00.000Z
for i in range(6): print(i)
10.666667
18
0.5625
7
32
2.571429
0.857143
0
0
0
0
0
0
0
0
0
0
0.043478
0.28125
32
2
19
16
0.73913
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
fc2b5f0fcb86593ef8de2f1bba755b877e5955dd
289
py
Python
lib/python3.8/site-packages/ansible_collections/community/crypto/tests/integration/targets/x509_certificate_info/test_plugins/jinja_compatibility.py
cjsteel/python3-venv-ansible-2.10.5
c95395c4cae844dc66fddde9b4343966f4b2ecd5
[ "Apache-1.1" ]
null
null
null
lib/python3.8/site-packages/ansible_collections/community/crypto/tests/integration/targets/x509_certificate_info/test_plugins/jinja_compatibility.py
cjsteel/python3-venv-ansible-2.10.5
c95395c4cae844dc66fddde9b4343966f4b2ecd5
[ "Apache-1.1" ]
null
null
null
lib/python3.8/site-packages/ansible_collections/community/crypto/tests/integration/targets/x509_certificate_info/test_plugins/jinja_compatibility.py
cjsteel/python3-venv-ansible-2.10.5
c95395c4cae844dc66fddde9b4343966f4b2ecd5
[ "Apache-1.1" ]
null
null
null
from __future__ import (absolute_import, division, print_function) __metaclass__ = type def compatibility_in_test(a, b): return a in b class TestModule: ''' Ansible math jinja2 tests ''' def tests(self): return { 'in': compatibility_in_test, }
18.0625
66
0.650519
34
289
5.117647
0.676471
0.172414
0.218391
0
0
0
0
0
0
0
0
0.004695
0.262976
289
15
67
19.266667
0.812207
0.086505
0
0
0
0
0.007813
0
0
0
0
0
0
1
0.222222
false
0
0.111111
0.222222
0.666667
0.111111
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
fc41cff89e6ce4e9c9592f05d49bcb0e26f54a3f
188
py
Python
scikits/statsmodels/datasets/__init__.py
escheffel/statsmodels
bc70147c4c7ea00b6ac7256bbaf107902983c189
[ "BSD-3-Clause" ]
2
2017-01-05T22:44:37.000Z
2018-04-26T08:34:00.000Z
scikits/statsmodels/datasets/__init__.py
changhiskhan/statsmodels
af26395e8b75b112ae7b3099532aefd8d002b8ca
[ "BSD-3-Clause" ]
null
null
null
scikits/statsmodels/datasets/__init__.py
changhiskhan/statsmodels
af26395e8b75b112ae7b3099532aefd8d002b8ca
[ "BSD-3-Clause" ]
null
null
null
import warnings warnings.warn('scikits.statsmodels namespace is deprecated and will be ' 'removed in 0.5, please use statsmodels instead') from statsmodels.datasets import *
37.6
72
0.75
24
188
5.875
0.833333
0
0
0
0
0
0
0
0
0
0
0.013072
0.18617
188
4
73
47
0.908497
0
0
0
0
0
0.542553
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
fc67129bc2aa01b23afc86c4d4b4ffb8911d38a0
160
py
Python
zcrmsdk/src/com/zoho/crm/api/record/mass_update_action_response.py
zoho/zohocrm-python-sdk-2.0
3a93eb3b57fed4e08f26bd5b311e101cb2995411
[ "Apache-2.0" ]
null
null
null
zcrmsdk/src/com/zoho/crm/api/record/mass_update_action_response.py
zoho/zohocrm-python-sdk-2.0
3a93eb3b57fed4e08f26bd5b311e101cb2995411
[ "Apache-2.0" ]
null
null
null
zcrmsdk/src/com/zoho/crm/api/record/mass_update_action_response.py
zoho/zohocrm-python-sdk-2.0
3a93eb3b57fed4e08f26bd5b311e101cb2995411
[ "Apache-2.0" ]
null
null
null
from abc import ABC, abstractmethod class MassUpdateActionResponse(ABC): def __init__(self): """Creates an instance of MassUpdateActionResponse""" pass
17.777778
55
0.775
17
160
7.058824
0.823529
0
0
0
0
0
0
0
0
0
0
0
0.14375
160
8
56
20
0.875912
0.29375
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0.25
0.25
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
4
fc7984b4c8e64aa76c18f03d060f6fffbc9e7da5
273
py
Python
mypyapp/errors.py
aminexplo/mypyadventure
8a1a3d4b0fa2bc632da911f025f44bb78ba42e19
[ "MIT" ]
null
null
null
mypyapp/errors.py
aminexplo/mypyadventure
8a1a3d4b0fa2bc632da911f025f44bb78ba42e19
[ "MIT" ]
4
2021-06-08T20:02:39.000Z
2022-03-11T23:51:26.000Z
mypyapp/errors.py
aminexplo/mypyadventure
8a1a3d4b0fa2bc632da911f025f44bb78ba42e19
[ "MIT" ]
null
null
null
from flask import render_template from . import bp, db @bp.errorhandler(404) def not_found_error(error): return render_template('404.html'), 404 @bp.errorhandler(500) def internal_error(error): db.session.rollback() return render_template('500.html'), 500
18.2
43
0.739927
39
273
5.025641
0.487179
0.214286
0.204082
0
0
0
0
0
0
0
0
0.076923
0.142857
273
14
44
19.5
0.760684
0
0
0
0
0
0.058608
0
0
0
0
0
0
1
0.222222
false
0
0.222222
0.111111
0.666667
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
4
fc850af7fb9fcc5795fe46441a5e052a67d31f94
328
py
Python
login/models.py
PMO-SE/PMO-sys
de7a9ec84a00ec5fa2f9d22e04e4681f39e2b5b2
[ "MIT" ]
null
null
null
login/models.py
PMO-SE/PMO-sys
de7a9ec84a00ec5fa2f9d22e04e4681f39e2b5b2
[ "MIT" ]
null
null
null
login/models.py
PMO-SE/PMO-sys
de7a9ec84a00ec5fa2f9d22e04e4681f39e2b5b2
[ "MIT" ]
null
null
null
from django.db import models # Create your models here. class User(models.Model): id = models.IntegerField(primary_key=True, auto_created=True) username = models.CharField(max_length=255, null=False) password = models.CharField(max_length=255, null=False) identity = models.CharField(max_length=255, null=False)
41
65
0.762195
46
328
5.326087
0.586957
0.183673
0.220408
0.293878
0.440816
0.440816
0.440816
0
0
0
0
0.031579
0.131098
328
8
66
41
0.82807
0.073171
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0.166667
0.166667
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
4
5da7dbc7961c6b47bd6a77c02a11be9f14886324
381
py
Python
PyFlow/Packages/PyFlowOpenCv/Factories/UINodeFactory.py
wonderworks-software/PyFlowOpenCv
299912a49041b5367522963680edf7c3ad96c222
[ "Apache-2.0" ]
92
2020-03-29T18:18:12.000Z
2022-03-09T02:09:32.000Z
PyFlow/Packages/PyFlowOpenCv/Factories/UINodeFactory.py
bobosky/PyFlowOpenCv
1c02fcbee6f3a1998f81e81d9673beab2fecae6c
[ "Apache-2.0" ]
4
2020-06-27T09:34:19.000Z
2021-01-07T14:31:33.000Z
PyFlow/Packages/PyFlowOpenCv/Factories/UINodeFactory.py
wonderworks-software/PyFlowOpenCv
299912a49041b5367522963680edf7c3ad96c222
[ "Apache-2.0" ]
17
2020-04-05T18:25:37.000Z
2022-01-16T15:20:41.000Z
from PyFlow.UI.Canvas.UINodeBase import UINodeBase from PyFlow.Packages.PyFlowOpenCv.UI.UIOpenCvBaseNode import UIOpenCvBaseNode from PyFlow.Packages.PyFlowOpenCv.UI.UICv_TransformNode import UICv_TransformNode def createUINode(raw_instance): if raw_instance.__class__.__name__ == "cv_Transform": return UICv_TransformNode(raw_instance) return UIOpenCvBaseNode(raw_instance)
38.1
81
0.860892
45
381
6.933333
0.466667
0.141026
0.115385
0.192308
0.205128
0
0
0
0
0
0
0
0.073491
381
9
82
42.333333
0.883853
0
0
0
0
0
0.031496
0
0
0
0
0
0
1
0.142857
false
0
0.428571
0
0.857143
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
5da9de0670ce5f21947c5bd18e9f04e868b9ad9d
184
py
Python
com/tml/crawler/baidu.py
tianmlin19/FirstPython
e95e74787e333231d2993b8956098535225d09e1
[ "Apache-2.0" ]
null
null
null
com/tml/crawler/baidu.py
tianmlin19/FirstPython
e95e74787e333231d2993b8956098535225d09e1
[ "Apache-2.0" ]
null
null
null
com/tml/crawler/baidu.py
tianmlin19/FirstPython
e95e74787e333231d2993b8956098535225d09e1
[ "Apache-2.0" ]
null
null
null
import urllib.request request = urllib.request.Request('http://www.baidu.com') response = urllib.request.urlopen(request) buff = response.read() html = buff.decode("utf8") print(html)
26.285714
56
0.755435
25
184
5.56
0.6
0.280576
0.28777
0
0
0
0
0
0
0
0
0.005917
0.081522
184
7
57
26.285714
0.816568
0
0
0
0
0
0.12973
0
0
0
0
0
0
1
0
false
0
0.166667
0
0.166667
0.166667
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
5dba62127eafce48b21827564079a53d59f393e4
40,083
py
Python
testcases/talib_test.py
keel1982/pyalgotrade
5578596f2442aeb3f1a777a79f82e041c1609f5f
[ "Apache-2.0" ]
2
2015-07-08T08:59:34.000Z
2018-09-12T19:37:16.000Z
testcases/talib_test.py
leeong05/pyalgotrade
5578596f2442aeb3f1a777a79f82e041c1609f5f
[ "Apache-2.0" ]
null
null
null
testcases/talib_test.py
leeong05/pyalgotrade
5578596f2442aeb3f1a777a79f82e041c1609f5f
[ "Apache-2.0" ]
1
2016-12-04T18:27:34.000Z
2016-12-04T18:27:34.000Z
# PyAlgoTrade # # Copyright 2011-2014 Gabriel Martin Becedillas Ruiz # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ .. moduleauthor:: Gabriel Martin Becedillas Ruiz <gabriel.becedillas@gmail.com> """ from pyalgotrade.talibext import indicator from pyalgotrade import bar from pyalgotrade import dataseries from pyalgotrade.dataseries import bards import datetime import unittest import talib # Market data used for regression tests (252 price bars) extracted from ta-lib/src/tools/ta_regtest/test_data.c OPEN_VALUES = [ 92.500000, 91.500000, 95.155000, 93.970000, 95.500000, 94.500000, 95.000000, 91.500000, 91.815000, 91.125000, 93.875000, 97.500000, 98.815000, 92.000000, 91.125000, 91.875000, 93.405000, 89.750000, 89.345000, 92.250000, 89.780000, 87.940000, 87.595000, 85.220000, 83.500000, 83.500000, 81.250000, 85.125000, 88.125000, 87.500000, 85.250000, 86.000000, 87.190000, 86.125000, 89.000000, 88.625000, 86.000000, 85.500000, 84.750000, 85.250000, 84.250000, 86.750000, 86.940000, 89.315000, 89.940000, 90.815000, 91.190000, 91.345000, 89.595000, 91.000000, 89.750000, 88.750000, 88.315000, 84.345000, 83.500000, 84.000000, 86.000000, 85.530000, 87.500000, 88.500000, 90.000000, 88.655000, 89.500000, 91.565000, 92.000000, 93.000000, 92.815000, 91.750000, 92.000000, 91.375000, 89.750000, 88.750000, 85.440000, 83.500000, 84.875000, 98.625000, 96.690000, 102.375000, 106.000000, 104.625000, 102.500000, 104.250000, 104.000000, 106.125000, 106.065000, 105.940000, 105.625000, 108.625000, 110.250000, 110.565000, 117.000000, 120.750000, 118.000000, 119.125000, 119.125000, 117.815000, 116.375000, 115.155000, 111.250000, 111.500000, 116.690000, 116.000000, 113.620000, 111.750000, 114.560000, 113.620000, 118.120000, 119.870000, 116.620000, 115.870000, 115.060000, 115.870000, 117.500000, 119.870000, 119.250000, 120.190000, 122.870000, 123.870000, 122.250000, 123.120000, 123.310000, 124.000000, 123.000000, 124.810000, 130.000000, 130.880000, 132.500000, 131.000000, 132.500000, 134.000000, 137.440000, 135.750000, 138.310000, 138.000000, 136.380000, 136.500000, 132.000000, 127.500000, 127.620000, 124.000000, 123.620000, 125.000000, 126.370000, 126.250000, 125.940000, 124.000000, 122.750000, 120.000000, 120.000000, 122.000000, 123.620000, 121.500000, 120.120000, 123.750000, 122.750000, 125.000000, 128.500000, 128.380000, 123.870000, 124.370000, 122.750000, 123.370000, 122.000000, 122.620000, 125.000000, 124.250000, 124.370000, 125.620000, 126.500000, 128.380000, 128.880000, 131.500000, 132.500000, 137.500000, 134.630000, 132.000000, 134.000000, 132.000000, 131.380000, 126.500000, 128.750000, 127.190000, 127.500000, 120.500000, 126.620000, 123.000000, 122.060000, 121.000000, 121.000000, 118.000000, 122.000000, 122.250000, 119.120000, 115.000000, 113.500000, 114.000000, 110.810000, 106.500000, 106.440000, 108.000000, 107.000000, 108.620000, 93.000000, 93.750000, 94.250000, 94.870000, 95.500000, 94.500000, 97.000000, 98.500000, 96.750000, 95.870000, 94.440000, 92.750000, 90.500000, 95.060000, 94.620000, 97.500000, 96.000000, 96.000000, 94.620000, 94.870000, 94.000000, 99.000000, 105.500000, 108.810000, 105.000000, 105.940000, 104.940000, 103.690000, 102.560000, 103.440000, 109.810000, 113.000000, 117.000000, 116.250000, 120.500000, 111.620000, 108.120000, 110.190000, 107.750000, 108.000000, 110.690000, 109.060000, 108.500000, 109.870000, 109.120000, 109.690000, 109.560000, 110.440000, 109.690000, 109.190000] HIGH_VALUES = [ 93.250000, 94.940000, 96.375000, 96.190000, 96.000000, 94.720000, 95.000000, 93.720000, 92.470000, 92.750000, 96.250000, 99.625000, 99.125000, 92.750000, 91.315000, 93.250000, 93.405000, 90.655000, 91.970000, 92.250000, 90.345000, 88.500000, 88.250000, 85.500000, 84.440000, 84.750000, 84.440000, 89.405000, 88.125000, 89.125000, 87.155000, 87.250000, 87.375000, 88.970000, 90.000000, 89.845000, 86.970000, 85.940000, 84.750000, 85.470000, 84.470000, 88.500000, 89.470000, 90.000000, 92.440000, 91.440000, 92.970000, 91.720000, 91.155000, 91.750000, 90.000000, 88.875000, 89.000000, 85.250000, 83.815000, 85.250000, 86.625000, 87.940000, 89.375000, 90.625000, 90.750000, 88.845000, 91.970000, 93.375000, 93.815000, 94.030000, 94.030000, 91.815000, 92.000000, 91.940000, 89.750000, 88.750000, 86.155000, 84.875000, 85.940000, 99.375000, 103.280000, 105.375000, 107.625000, 105.250000, 104.500000, 105.500000, 106.125000, 107.940000, 106.250000, 107.000000, 108.750000, 110.940000, 110.940000, 114.220000, 123.000000, 121.750000, 119.815000, 120.315000, 119.375000, 118.190000, 116.690000, 115.345000, 113.000000, 118.315000, 116.870000, 116.750000, 113.870000, 114.620000, 115.310000, 116.000000, 121.690000, 119.870000, 120.870000, 116.750000, 116.500000, 116.000000, 118.310000, 121.500000, 122.000000, 121.440000, 125.750000, 127.750000, 124.190000, 124.440000, 125.750000, 124.690000, 125.310000, 132.000000, 131.310000, 132.250000, 133.880000, 133.500000, 135.500000, 137.440000, 138.690000, 139.190000, 138.500000, 138.130000, 137.500000, 138.880000, 132.130000, 129.750000, 128.500000, 125.440000, 125.120000, 126.500000, 128.690000, 126.620000, 126.690000, 126.000000, 123.120000, 121.870000, 124.000000, 127.000000, 124.440000, 122.500000, 123.750000, 123.810000, 124.500000, 127.870000, 128.560000, 129.630000, 124.870000, 124.370000, 124.870000, 123.620000, 124.060000, 125.870000, 125.190000, 125.620000, 126.000000, 128.500000, 126.750000, 129.750000, 132.690000, 133.940000, 136.500000, 137.690000, 135.560000, 133.560000, 135.000000, 132.380000, 131.440000, 130.880000, 129.630000, 127.250000, 127.810000, 125.000000, 126.810000, 124.750000, 122.810000, 122.250000, 121.060000, 120.000000, 123.250000, 122.750000, 119.190000, 115.060000, 116.690000, 114.870000, 110.870000, 107.250000, 108.870000, 109.000000, 108.500000, 113.060000, 93.000000, 94.620000, 95.120000, 96.000000, 95.560000, 95.310000, 99.000000, 98.810000, 96.810000, 95.940000, 94.440000, 92.940000, 93.940000, 95.500000, 97.060000, 97.500000, 96.250000, 96.370000, 95.000000, 94.870000, 98.250000, 105.120000, 108.440000, 109.870000, 105.000000, 106.000000, 104.940000, 104.500000, 104.440000, 106.310000, 112.870000, 116.500000, 119.190000, 121.000000, 122.120000, 111.940000, 112.750000, 110.190000, 107.940000, 109.690000, 111.060000, 110.440000, 110.120000, 110.310000, 110.440000, 110.000000, 110.750000, 110.500000, 110.500000, 109.500000] LOW_VALUES = [ 90.750000, 91.405000, 94.250000, 93.500000, 92.815000, 93.500000, 92.000000, 89.750000, 89.440000, 90.625000, 92.750000, 96.315000, 96.030000, 88.815000, 86.750000, 90.940000, 88.905000, 88.780000, 89.250000, 89.750000, 87.500000, 86.530000, 84.625000, 82.280000, 81.565000, 80.875000, 81.250000, 84.065000, 85.595000, 85.970000, 84.405000, 85.095000, 85.500000, 85.530000, 87.875000, 86.565000, 84.655000, 83.250000, 82.565000, 83.440000, 82.530000, 85.065000, 86.875000, 88.530000, 89.280000, 90.125000, 90.750000, 89.000000, 88.565000, 90.095000, 89.000000, 86.470000, 84.000000, 83.315000, 82.000000, 83.250000, 84.750000, 85.280000, 87.190000, 88.440000, 88.250000, 87.345000, 89.280000, 91.095000, 89.530000, 91.155000, 92.000000, 90.530000, 89.970000, 88.815000, 86.750000, 85.065000, 82.030000, 81.500000, 82.565000, 96.345000, 96.470000, 101.155000, 104.250000, 101.750000, 101.720000, 101.720000, 103.155000, 105.690000, 103.655000, 104.000000, 105.530000, 108.530000, 108.750000, 107.750000, 117.000000, 118.000000, 116.000000, 118.500000, 116.530000, 116.250000, 114.595000, 110.875000, 110.500000, 110.720000, 112.620000, 114.190000, 111.190000, 109.440000, 111.560000, 112.440000, 117.500000, 116.060000, 116.560000, 113.310000, 112.560000, 114.000000, 114.750000, 118.870000, 119.000000, 119.750000, 122.620000, 123.000000, 121.750000, 121.560000, 123.120000, 122.190000, 122.750000, 124.370000, 128.000000, 129.500000, 130.810000, 130.630000, 132.130000, 133.880000, 135.380000, 135.750000, 136.190000, 134.500000, 135.380000, 133.690000, 126.060000, 126.870000, 123.500000, 122.620000, 122.750000, 123.560000, 125.810000, 124.620000, 124.370000, 121.810000, 118.190000, 118.060000, 117.560000, 121.000000, 121.120000, 118.940000, 119.810000, 121.000000, 122.000000, 124.500000, 126.560000, 123.500000, 121.250000, 121.060000, 122.310000, 121.000000, 120.870000, 122.060000, 122.750000, 122.690000, 122.870000, 125.500000, 124.250000, 128.000000, 128.380000, 130.690000, 131.630000, 134.380000, 132.000000, 131.940000, 131.940000, 129.560000, 123.750000, 126.000000, 126.250000, 124.370000, 121.440000, 120.440000, 121.370000, 121.690000, 120.000000, 119.620000, 115.500000, 116.750000, 119.060000, 119.060000, 115.060000, 111.060000, 113.120000, 110.000000, 105.000000, 104.690000, 103.870000, 104.690000, 105.440000, 107.000000, 89.000000, 92.500000, 92.120000, 94.620000, 92.810000, 94.250000, 96.250000, 96.370000, 93.690000, 93.500000, 90.000000, 90.190000, 90.500000, 92.120000, 94.120000, 94.870000, 93.000000, 93.870000, 93.000000, 92.620000, 93.560000, 98.370000, 104.440000, 106.000000, 101.810000, 104.120000, 103.370000, 102.120000, 102.250000, 103.370000, 107.940000, 112.500000, 115.440000, 115.500000, 112.250000, 107.560000, 106.560000, 106.870000, 104.500000, 105.750000, 108.620000, 107.750000, 108.060000, 108.000000, 108.190000, 108.120000, 109.060000, 108.750000, 108.560000, 106.620000] CLOSE_VALUES = [ 91.500000, 94.815000, 94.375000, 95.095000, 93.780000, 94.625000, 92.530000, 92.750000, 90.315000, 92.470000, 96.125000, 97.250000, 98.500000, 89.875000, 91.000000, 92.815000, 89.155000, 89.345000, 91.625000, 89.875000, 88.375000, 87.625000, 84.780000, 83.000000, 83.500000, 81.375000, 84.440000, 89.250000, 86.375000, 86.250000, 85.250000, 87.125000, 85.815000, 88.970000, 88.470000, 86.875000, 86.815000, 84.875000, 84.190000, 83.875000, 83.375000, 85.500000, 89.190000, 89.440000, 91.095000, 90.750000, 91.440000, 89.000000, 91.000000, 90.500000, 89.030000, 88.815000, 84.280000, 83.500000, 82.690000, 84.750000, 85.655000, 86.190000, 88.940000, 89.280000, 88.625000, 88.500000, 91.970000, 91.500000, 93.250000, 93.500000, 93.155000, 91.720000, 90.000000, 89.690000, 88.875000, 85.190000, 83.375000, 84.875000, 85.940000, 97.250000, 99.875000, 104.940000, 106.000000, 102.500000, 102.405000, 104.595000, 106.125000, 106.000000, 106.065000, 104.625000, 108.625000, 109.315000, 110.500000, 112.750000, 123.000000, 119.625000, 118.750000, 119.250000, 117.940000, 116.440000, 115.190000, 111.875000, 110.595000, 118.125000, 116.000000, 116.000000, 112.000000, 113.750000, 112.940000, 116.000000, 120.500000, 116.620000, 117.000000, 115.250000, 114.310000, 115.500000, 115.870000, 120.690000, 120.190000, 120.750000, 124.750000, 123.370000, 122.940000, 122.560000, 123.120000, 122.560000, 124.620000, 129.250000, 131.000000, 132.250000, 131.000000, 132.810000, 134.000000, 137.380000, 137.810000, 137.880000, 137.250000, 136.310000, 136.250000, 134.630000, 128.250000, 129.000000, 123.870000, 124.810000, 123.000000, 126.250000, 128.380000, 125.370000, 125.690000, 122.250000, 119.370000, 118.500000, 123.190000, 123.500000, 122.190000, 119.310000, 123.310000, 121.120000, 123.370000, 127.370000, 128.500000, 123.870000, 122.940000, 121.750000, 124.440000, 122.000000, 122.370000, 122.940000, 124.000000, 123.190000, 124.560000, 127.250000, 125.870000, 128.860000, 132.000000, 130.750000, 134.750000, 135.000000, 132.380000, 133.310000, 131.940000, 130.000000, 125.370000, 130.130000, 127.120000, 125.190000, 122.000000, 125.000000, 123.000000, 123.500000, 120.060000, 121.000000, 117.750000, 119.870000, 122.000000, 119.190000, 116.370000, 113.500000, 114.250000, 110.000000, 105.060000, 107.000000, 107.870000, 107.000000, 107.120000, 107.000000, 91.000000, 93.940000, 93.870000, 95.500000, 93.000000, 94.940000, 98.250000, 96.750000, 94.810000, 94.370000, 91.560000, 90.250000, 93.940000, 93.620000, 97.000000, 95.000000, 95.870000, 94.060000, 94.620000, 93.750000, 98.000000, 103.940000, 107.870000, 106.060000, 104.500000, 105.000000, 104.190000, 103.060000, 103.420000, 105.270000, 111.870000, 116.000000, 116.620000, 118.280000, 113.370000, 109.000000, 109.700000, 109.250000, 107.000000, 109.190000, 110.000000, 109.200000, 110.120000, 108.000000, 108.620000, 109.750000, 109.810000, 109.000000, 108.750000, 107.870000] VOLUME_VALUES = [ 4077500, 4955900, 4775300, 4155300, 4593100, 3631300, 3382800, 4954200, 4500000, 3397500, 4204500, 6321400, 10203600, 19043900, 11692000, 9553300, 8920300, 5970900, 5062300, 3705600, 5865600, 5603000, 5811900, 8483800, 5995200, 5408800, 5430500, 6283800, 5834800, 4515500, 4493300, 4346100, 3700300, 4600200, 4557200, 4323600, 5237500, 7404100, 4798400, 4372800, 3872300, 10750800, 5804800, 3785500, 5014800, 3507700, 4298800, 4842500, 3952200, 3304700, 3462000, 7253900, 9753100, 5953000, 5011700, 5910800, 4916900, 4135000, 4054200, 3735300, 2921900, 2658400, 4624400, 4372200, 5831600, 4268600, 3059200, 4495500, 3425000, 3630800, 4168100, 5966900, 7692800, 7362500, 6581300, 19587700, 10378600, 9334700, 10467200, 5671400, 5645000, 4518600, 4519500, 5569700, 4239700, 4175300, 4995300, 4776600, 4190000, 6035300, 12168900, 9040800, 5780300, 4320800, 3899100, 3221400, 3455500, 4304200, 4703900, 8316300, 10553900, 6384800, 7163300, 7007800, 5114100, 5263800, 6666100, 7398400, 5575000, 4852300, 4298100, 4900500, 4887700, 6964800, 4679200, 9165000, 6469800, 6792000, 4423800, 5231900, 4565600, 6235200, 5225900, 8261400, 5912500, 3545600, 5714500, 6653900, 6094500, 4799200, 5050800, 5648900, 4726300, 5585600, 5124800, 7630200, 14311600, 8793600, 8874200, 6966600, 5525500, 6515500, 5291900, 5711700, 4327700, 4568000, 6859200, 5757500, 7367000, 6144100, 4052700, 5849700, 5544700, 5032200, 4400600, 4894100, 5140000, 6610900, 7585200, 5963100, 6045500, 8443300, 6464700, 6248300, 4357200, 4774700, 6216900, 6266900, 5584800, 5284500, 7554500, 7209500, 8424800, 5094500, 4443600, 4591100, 5658400, 6094100, 14862200, 7544700, 6985600, 8093000, 7590000, 7451300, 7078000, 7105300, 8778800, 6643900, 10563900, 7043100, 6438900, 8057700, 14240000, 17872300, 7831100, 8277700, 15017800, 14183300, 13921100, 9683000, 9187300, 11380500, 69447300, 26673600, 13768400, 11371600, 9872200, 9450500, 11083300, 9552800, 11108400, 10374200, 16701900, 13741900, 8523600, 9551900, 8680500, 7151700, 9673100, 6264700, 8541600, 8358000, 18720800, 19683100, 13682500, 10668100, 9710600, 3113100, 5682000, 5763600, 5340000, 6220800, 14680500, 9933000, 11329500, 8145300, 16644700, 12593800, 7138100, 7442300, 9442300, 7123600, 7680600, 4839800, 4775500, 4008800, 4533600, 3741100, 4084800, 2685200, 3438000, 2870500] SAR_HIGH = [51.12, 52.35, 52.1, 51.8, 52.1, 52.5, 52.8, 52.5, 53.5, 53.5, 53.8, 54.2, 53.4, 53.5, 54.4, 55.2, 55.7, 57, 57.5, 58, 57.7, 58, 57.5, 57, 56.7, 57.5, 56.70, 56.00, 56.20, 54.80, 55.50, 54.70, 54.00, 52.50, 51.00, 51.50, 51.70, 53.00] SAR_LOW = [50.0, 51.5, 51, 50.5, 51.25, 51.7, 51.85, 51.5, 52.3, 52.5, 53, 53.5, 52.5, 52.1, 53, 54, 55, 56, 56.5, 57, 56.5, 57.3, 56.7, 56.3, 56.2, 56, 55.50, 55.00, 54.90, 54.00, 54.50, 53.80, 53.00, 51.50, 50.00, 50.50, 50.20, 51.50] def compare(obtained, expected, decimals=2): obtained = round(obtained, decimals) expected = round(expected, decimals) return obtained == expected class TestCase(unittest.TestCase): TestInstrument = "orcl" def __loadMedPriceDS(self): ret = dataseries.SequenceDataSeries() for i in xrange(len(OPEN_VALUES)): ret.append(LOW_VALUES[i] + (HIGH_VALUES[i] - LOW_VALUES[i]) / 2.0) return ret def __loadBarDS(self): seconds = 0 ret = bards.BarDataSeries() for i in xrange(len(OPEN_VALUES)): dateTime = datetime.datetime.now() + datetime.timedelta(seconds=seconds) ret.append(bar.BasicBar(dateTime, OPEN_VALUES[i], HIGH_VALUES[i], LOW_VALUES[i], CLOSE_VALUES[i], VOLUME_VALUES[i], CLOSE_VALUES[i], bar.Frequency.DAY)) seconds += 1 return ret def __loadSarTestBarDs(self): seconds = 0 ret = bards.BarDataSeries() for i in xrange(len(SAR_HIGH)): dateTime = datetime.datetime.now() + datetime.timedelta(seconds=seconds) ret.append(bar.BasicBar(dateTime, SAR_LOW[i], SAR_HIGH[i], SAR_LOW[i], SAR_HIGH[i], 0, SAR_LOW[i], bar.Frequency.DAY)) seconds += 1 return ret def testAD(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.AD(barDs, 252)[0], -1631000.00)) self.assertTrue(compare(indicator.AD(barDs, 252)[1], 2974412.02)) self.assertTrue(compare(indicator.AD(barDs, 252)[-2], 8707691.07)) self.assertTrue(compare(indicator.AD(barDs, 252)[-1], 8328944.54)) def testADOSC(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.ADOSC(barDs, 252, 3, 10)[9], 841238.33)) # Original value was 841238.32 self.assertTrue(compare(indicator.ADOSC(barDs, 252, 3, 10)[9+1], 2255663.07)) self.assertTrue(compare(indicator.ADOSC(barDs, 252, 3, 10)[-2], -526700.32)) self.assertTrue(compare(indicator.ADOSC(barDs, 252, 3, 10)[-1], -1139932.729)) def testADX(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.ADX(barDs, 252, 14)[27], 23.0000)) self.assertTrue(compare(indicator.ADX(barDs, 252, 14)[28], 22.0802)) self.assertTrue(compare(indicator.ADX(barDs, 252, 14)[-2], 16.6840)) self.assertTrue(compare(indicator.ADX(barDs, 252, 14)[-1], 15.5260)) def testADXR(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.ADXR(barDs, 252, 14)[40], 19.8666)) self.assertTrue(compare(indicator.ADXR(barDs, 252, 14)[41], 18.9092)) self.assertTrue(compare(indicator.ADXR(barDs, 252, 14)[-2], 21.5972)) self.assertTrue(compare(indicator.ADXR(barDs, 252, 14)[-1], 20.4920)) def testAPO(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.APO(barDs.getCloseDataSeries(), 252, 26, 12, talib.MA_Type.SMA)[25], -3.3124)) self.assertTrue(compare(indicator.APO(barDs.getCloseDataSeries(), 252, 12, 26, talib.MA_Type.SMA)[25], -3.3124)) self.assertTrue(compare(indicator.APO(barDs.getCloseDataSeries(), 252, 12, 26, talib.MA_Type.SMA)[26], -3.5876)) self.assertTrue(compare(indicator.APO(barDs.getCloseDataSeries(), 252, 12, 26, talib.MA_Type.SMA)[-1], -0.1667)) def testAROON(self): barDs = self.__loadBarDS() # AROON DOWN TEST self.assertTrue(compare(indicator.AROON(barDs, 252, 14)[0][14], 100)) self.assertTrue(compare(indicator.AROON(barDs, 252, 14)[0][14+1], 92.857)) self.assertTrue(compare(indicator.AROON(barDs, 252, 14)[0][-2], 28.571)) self.assertTrue(compare(indicator.AROON(barDs, 252, 14)[0][-1], 21.429)) # AROON UP TEST self.assertTrue(compare(indicator.AROON(barDs, 252, 14)[1][14], 78.571)) self.assertTrue(compare(indicator.AROON(barDs, 252, 14)[1][14+1], 71.429)) self.assertTrue(compare(indicator.AROON(barDs, 252, 14)[1][-2], 0)) self.assertTrue(compare(indicator.AROON(barDs, 252, 14)[1][-1], 7.1429)) def testAROONOSC(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.AROONOSC(barDs, 252, 14)[14], -21.4285)) self.assertTrue(compare(indicator.AROONOSC(barDs, 252, 14)[14+6], -21.4285)) self.assertTrue(compare(indicator.AROONOSC(barDs, 252, 14)[14+7], -71.4285)) self.assertTrue(compare(indicator.AROONOSC(barDs, 252, 14)[-2], -28.5714)) self.assertTrue(compare(indicator.AROONOSC(barDs, 252, 14)[-1], -14.28571)) def testATR(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.ATR(barDs, 252, 1)[1], 3.535, 3)) self.assertTrue(compare(indicator.ATR(barDs, 252, 1)[13], 9.685, 3)) self.assertTrue(compare(indicator.ATR(barDs, 252, 1)[41], 5.125, 3)) self.assertTrue(compare(indicator.ATR(barDs, 252, 1)[-1], 2.88, 3)) def testAVGPRICE(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.AVGPRICE(barDs, 252)[0], 92.0)) self.assertTrue(compare(indicator.AVGPRICE(barDs, 252)[1], 93.16)) # Original value was 93.17 def testBBANDS(self): barDs = self.__loadBarDS() # EMA self.assertTrue(compare(indicator.BBANDS(barDs.getCloseDataSeries(), 252, 20, 2.0, 2.0, talib.MA_Type.EMA)[0][19+13], 93.674)) self.assertTrue(compare(indicator.BBANDS(barDs.getCloseDataSeries(), 252, 20, 2.0, 2.0, talib.MA_Type.EMA)[1][19+13], 87.679)) self.assertTrue(compare(indicator.BBANDS(barDs.getCloseDataSeries(), 252, 20, 2.0, 2.0, talib.MA_Type.EMA)[2][19+13], 81.685)) # SMA self.assertTrue(compare(indicator.BBANDS(barDs.getCloseDataSeries(), 252, 20, 2.0, 2.0, talib.MA_Type.SMA)[0][19], 98.0734)) self.assertTrue(compare(indicator.BBANDS(barDs.getCloseDataSeries(), 252, 20, 2.0, 2.0, talib.MA_Type.SMA)[1][19], 92.8910)) self.assertTrue(compare(indicator.BBANDS(barDs.getCloseDataSeries(), 252, 20, 2.0, 2.0, talib.MA_Type.SMA)[2][19], 87.7086)) def testBETA(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.BETA(barDs.getHighDataSeries(), barDs.getLowDataSeries(), 252, 5)[5], 0.62907)) self.assertTrue(compare(indicator.BETA(barDs.getHighDataSeries(), barDs.getLowDataSeries(), 252, 5)[6], 0.83604)) def testBOP(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.BOP(barDs, 252)[0], -0.40)) self.assertTrue(compare(indicator.BOP(barDs, 252)[1], 0.94)) def testCCI(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.CCI(barDs, 252, 2)[1], 66.666)) self.assertTrue(compare(indicator.CCI(barDs, 252, 5)[4], 18.857)) self.assertTrue(compare(indicator.CCI(barDs, 252, 11)[10], 87.927)) self.assertTrue(compare(indicator.CCI(barDs, 252, 11)[11], 180.005, 3)) def testCMO(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.CMO(barDs.getCloseDataSeries(), 252, 14)[14], -1.70, 1)) def testCORREL(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.CORREL(barDs.getHighDataSeries(), barDs.getLowDataSeries(), 252, 20)[19], 0.9401569)) self.assertTrue(compare(indicator.CORREL(barDs.getHighDataSeries(), barDs.getLowDataSeries(), 252, 20)[20], 0.9471812)) self.assertTrue(compare(indicator.CORREL(barDs.getHighDataSeries(), barDs.getLowDataSeries(), 252, 20)[-1], 0.8866901)) def testDX(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.DX(barDs, 252, 14)[14], 19.3689)) self.assertTrue(compare(indicator.DX(barDs, 252, 14)[15], 9.7131)) self.assertTrue(compare(indicator.DX(barDs, 252, 14)[16], 17.2905)) self.assertTrue(compare(indicator.DX(barDs, 252, 14)[-2], 10.6731)) self.assertTrue(compare(indicator.DX(barDs, 252, 14)[-1], 0.4722)) def testEMA(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.EMA(barDs.getCloseDataSeries(), 252, 2)[1], 93.16)) # Original value 93.15 self.assertTrue(compare(indicator.EMA(barDs.getCloseDataSeries(), 252, 2)[2], 93.97)) # Original value 93.96 self.assertTrue(compare(indicator.EMA(barDs.getCloseDataSeries(), 252, 2)[-1], 108.22)) # Original value 108.21 self.assertTrue(compare(indicator.EMA(barDs.getCloseDataSeries(), 252, 10)[9], 93.23)) # Original value 93.22 def testHT_DCPERIOD(self): ds = self.__loadMedPriceDS() self.assertTrue(compare(indicator.HT_DCPERIOD(ds, 252)[32], 15.5527, 4)) self.assertTrue(compare(indicator.HT_DCPERIOD(ds, 252)[-1], 18.6140, 4)) def testHT_DCPHASE(self): ds = self.__loadMedPriceDS() self.assertTrue(compare(indicator.HT_DCPHASE(ds, 252)[63], 22.1496, 4)) # Original value 22.1495 self.assertTrue(compare(indicator.HT_DCPHASE(ds, 252)[-3], -31.182, 3)) self.assertTrue(compare(indicator.HT_DCPHASE(ds, 252)[-2], 23.2691, 4)) self.assertTrue(compare(indicator.HT_DCPHASE(ds, 252)[-1], 47.2765, 4)) def testHT_TRENDLINE(self): ds = self.__loadMedPriceDS() self.assertTrue(compare(indicator.HT_TRENDLINE(ds, 252)[63], 88.257)) self.assertTrue(compare(indicator.HT_TRENDLINE(ds, 252)[-3], 109.69)) self.assertTrue(compare(indicator.HT_TRENDLINE(ds, 252)[-2], 110.18)) self.assertTrue(compare(indicator.HT_TRENDLINE(ds, 252)[-1], 110.46)) def testHT_TRENDMODE(self): ds = self.__loadMedPriceDS() self.assertTrue(compare(indicator.HT_TRENDMODE(ds, 252)[63], 1.0)) def testKAMA(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.KAMA(barDs.getCloseDataSeries(), 252, 10)[10], 92.6575)) self.assertTrue(compare(indicator.KAMA(barDs.getCloseDataSeries(), 252, 10)[11], 92.7783)) self.assertTrue(compare(indicator.KAMA(barDs.getCloseDataSeries(), 252, 10)[-1], 109.294)) def testMA(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.MA(barDs.getCloseDataSeries(), 252, 2, talib.MA_Type.SMA)[1], 93.16)) # Original value 93.15 self.assertTrue(compare(indicator.MA(barDs.getCloseDataSeries(), 252, 2, talib.MA_Type.SMA)[2], 94.59)) self.assertTrue(compare(indicator.MA(barDs.getCloseDataSeries(), 252, 2, talib.MA_Type.SMA)[3], 94.73)) self.assertTrue(compare(indicator.MA(barDs.getCloseDataSeries(), 252, 2, talib.MA_Type.SMA)[-1], 108.31)) def testMACD(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.MACD(barDs.getCloseDataSeries(), 252, 12, 26, 9)[0][33], -1.9738)) self.assertTrue(compare(indicator.MACD(barDs.getCloseDataSeries(), 252, 12, 26, 9)[1][33], -2.7071)) self.assertTrue(compare(indicator.MACD(barDs.getCloseDataSeries(), 252, 12, 26, 9)[2][33], (-1.9738)-(-2.7071))) self.assertTrue(compare(indicator.MACD(barDs.getCloseDataSeries(), 252, 26, 12, 9)[0][33], -1.9738)) self.assertTrue(compare(indicator.MACD(barDs.getCloseDataSeries(), 252, 26, 12, 9)[1][33], -2.7071)) self.assertTrue(compare(indicator.MACD(barDs.getCloseDataSeries(), 252, 26, 12, 9)[2][33], (-1.9738)-(-2.7071))) def testMACDEXT(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.MACDEXT(barDs.getCloseDataSeries(), 252, 12, talib.MA_Type.EMA, 26, talib.MA_Type.EMA, 9, talib.MA_Type.EMA)[0][33], -1.9738)) self.assertTrue(compare(indicator.MACDEXT(barDs.getCloseDataSeries(), 252, 12, talib.MA_Type.EMA, 26, talib.MA_Type.EMA, 9, talib.MA_Type.EMA)[1][33], -2.7071)) self.assertTrue(compare(indicator.MACDEXT(barDs.getCloseDataSeries(), 252, 12, talib.MA_Type.EMA, 26, talib.MA_Type.EMA, 9, talib.MA_Type.EMA)[2][33], (-1.9738)-(-2.7071))) def testMAMA(self): ds = self.__loadMedPriceDS() self.assertTrue(compare(indicator.MAMA(ds, 252, 0.5, 0.05)[0][32], 85.3643)) self.assertTrue(compare(indicator.MAMA(ds, 252, 0.5, 0.05)[0][-1], 110.1116)) def testMAX(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.MAX(barDs.getOpenDataSeries(), 252, 14)[13], 98.815)) self.assertTrue(compare(indicator.MAX(barDs.getOpenDataSeries(), 252, 14)[14], 98.815)) self.assertTrue(compare(indicator.MAX(barDs.getOpenDataSeries(), 252, 14)[-1], 110.69)) def testMFI(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.MFI(barDs, 252, 14)[14], 42.8923)) self.assertTrue(compare(indicator.MFI(barDs, 252, 14)[15], 45.6072)) self.assertTrue(compare(indicator.MFI(barDs, 252, 14)[-1], 53.1997)) def testMIN(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.MIN(barDs.getOpenDataSeries(), 252, 14)[13], 91.125)) self.assertTrue(compare(indicator.MIN(barDs.getOpenDataSeries(), 252, 14)[14], 91.125)) self.assertTrue(compare(indicator.MIN(barDs.getOpenDataSeries(), 252, 14)[-1], 107.75)) def testMINUS_DI(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.MINUS_DI(barDs, 252, 14)[14], 30.1684)) self.assertTrue(compare(indicator.MINUS_DI(barDs, 252, 14)[28], 24.969182)) self.assertTrue(compare(indicator.MINUS_DI(barDs, 252, 14)[-1], 21.1988)) def testMINUS_DM(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.MINUS_DM(barDs, 252, 14)[13], 12.995, 3)) self.assertTrue(compare(indicator.MINUS_DM(barDs, 252, 14)[-2], 8.33)) self.assertTrue(compare(indicator.MINUS_DM(barDs, 252, 14)[-1], 9.68)) # Original value 9.672 def testMOM(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.MOM(barDs.getCloseDataSeries(), 252, 14)[14], -0.50)) self.assertTrue(compare(indicator.MOM(barDs.getCloseDataSeries(), 252, 14)[15], -2.00)) self.assertTrue(compare(indicator.MOM(barDs.getCloseDataSeries(), 252, 14)[16], -5.22)) self.assertTrue(compare(indicator.MOM(barDs.getCloseDataSeries(), 252, 14)[-1], -1.13)) def testNATR(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.NATR(barDs, 252, 14)[14], 3.9321)) self.assertTrue(compare(indicator.NATR(barDs, 252, 14)[15], 3.7576)) self.assertTrue(compare(indicator.NATR(barDs, 252, 14)[-1], 3.0229)) def testPLUS_DI(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.PLUS_DI(barDs, 252, 14)[14], 20.3781)) self.assertTrue(compare(indicator.PLUS_DI(barDs, 252, 14)[14+13], 22.1073)) self.assertTrue(compare(indicator.PLUS_DI(barDs, 252, 14)[14+14], 20.3746)) self.assertTrue(compare(indicator.PLUS_DI(barDs, 252, 14)[-1], 21.0000)) def testPLUS_DM(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.PLUS_DM(barDs, 252, 14)[13], 10.28)) self.assertTrue(compare(indicator.PLUS_DM(barDs, 252, 14)[-2], 10.317)) self.assertTrue(compare(indicator.PLUS_DM(barDs, 252, 14)[-1], 9.59)) # Original value 9.58 def testPPO(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.PPO(barDs.getCloseDataSeries(), 252, 2, 3, talib.MA_Type.SMA)[2], 1.10264)) self.assertTrue(compare(indicator.PPO(barDs.getCloseDataSeries(), 252, 2, 3, talib.MA_Type.SMA)[3], -0.02813)) self.assertTrue(compare(indicator.PPO(barDs.getCloseDataSeries(), 252, 2, 3, talib.MA_Type.SMA)[-1], -0.21191)) def testROC(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.ROC(barDs.getCloseDataSeries(), 252, 14)[14], -0.546)) self.assertTrue(compare(indicator.ROC(barDs.getCloseDataSeries(), 252, 14)[15], -2.109)) self.assertTrue(compare(indicator.ROC(barDs.getCloseDataSeries(), 252, 14)[16], -5.53)) self.assertTrue(compare(indicator.ROC(barDs.getCloseDataSeries(), 252, 14)[-1], -1.0367)) def testROCR(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.ROCR(barDs.getCloseDataSeries(), 252, 14)[14], 0.994536, 4)) self.assertTrue(compare(indicator.ROCR(barDs.getCloseDataSeries(), 252, 14)[15], 0.978906, 4)) self.assertTrue(compare(indicator.ROCR(barDs.getCloseDataSeries(), 252, 14)[16], 0.944689, 4)) self.assertTrue(compare(indicator.ROCR(barDs.getCloseDataSeries(), 252, 14)[-1], 0.989633, 4)) def testROCR100(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.ROCR100(barDs.getCloseDataSeries(), 252, 14)[14], 99.4536, 4)) self.assertTrue(compare(indicator.ROCR100(barDs.getCloseDataSeries(), 252, 14)[15], 97.8906, 4)) self.assertTrue(compare(indicator.ROCR100(barDs.getCloseDataSeries(), 252, 14)[16], 94.4689, 4)) self.assertTrue(compare(indicator.ROCR100(barDs.getCloseDataSeries(), 252, 14)[-1], 98.9633, 4)) def testRSI(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.RSI(barDs.getCloseDataSeries(), 252, 14)[14], 49.15)) # Original value 49.14 self.assertTrue(compare(indicator.RSI(barDs.getCloseDataSeries(), 252, 14)[15], 52.33)) # Original value 52.32 self.assertTrue(compare(indicator.RSI(barDs.getCloseDataSeries(), 252, 14)[16], 46.07)) self.assertTrue(compare(indicator.RSI(barDs.getCloseDataSeries(), 252, 14)[-1], 49.63)) def testSAR(self): barDs = self.__loadSarTestBarDs() self.assertTrue(compare(indicator.SAR(barDs, len(SAR_HIGH), 0.02, 0.20)[1], 50.00)) self.assertTrue(compare(indicator.SAR(barDs, len(SAR_HIGH), 0.02, 0.20)[2], 50.047)) self.assertTrue(compare(indicator.SAR(barDs, len(SAR_HIGH), 0.02, 0.20)[5], 50.182)) self.assertTrue(compare(indicator.SAR(barDs, len(SAR_HIGH), 0.02, 0.20)[-2], 52.93)) self.assertTrue(compare(indicator.SAR(barDs, len(SAR_HIGH), 0.02, 0.20)[-1], 50.00)) def testSMA(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.SMA(barDs.getCloseDataSeries(), 252, 2)[1], 93.16)) # Original value 93.15 self.assertTrue(compare(indicator.SMA(barDs.getCloseDataSeries(), 252, 2)[2], 94.59)) self.assertTrue(compare(indicator.SMA(barDs.getCloseDataSeries(), 252, 2)[3], 94.73)) self.assertTrue(compare(indicator.SMA(barDs.getCloseDataSeries(), 252, 2)[-1], 108.31)) def testSTDDEV(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.STDDEV(barDs.getCloseDataSeries(), 252, 5.0, 1)[4], 1.2856)) self.assertTrue(compare(indicator.STDDEV(barDs.getCloseDataSeries(), 252, 5.0, 1)[5], 0.4462)) self.assertTrue(compare(indicator.STDDEV(barDs.getCloseDataSeries(), 252, 5.0, 1)[-1], 0.7144)) self.assertTrue(compare(indicator.STDDEV(barDs.getCloseDataSeries(), 252, 5.0, 1.5)[4], 1.9285)) self.assertTrue(compare(indicator.STDDEV(barDs.getCloseDataSeries(), 252, 5.0, 1.5)[5], 0.66937)) self.assertTrue(compare(indicator.STDDEV(barDs.getCloseDataSeries(), 252, 5.0, 1.5)[-1], 1.075)) def testSTOCH(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.STOCH(barDs, 252, 5, 3, talib.MA_Type.SMA, 3, talib.MA_Type.SMA)[0][8], 24.0128)) self.assertTrue(compare(indicator.STOCH(barDs, 252, 5, 3, talib.MA_Type.SMA, 3, talib.MA_Type.SMA)[1][8], 36.254)) self.assertTrue(compare(indicator.STOCH(barDs, 252, 5, 3, talib.MA_Type.SMA, 4, talib.MA_Type.SMA)[0][-1], 30.194)) self.assertTrue(compare(indicator.STOCH(barDs, 252, 5, 3, talib.MA_Type.SMA, 4, talib.MA_Type.SMA)[1][-1], 46.641)) def testSTOCHRSI(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.STOCHRSI(barDs.getCloseDataSeries(), 252, 14, 14, 1, talib.MA_Type.SMA)[0][27], 94.156709)) self.assertTrue(compare(indicator.STOCHRSI(barDs.getCloseDataSeries(), 252, 14, 14, 1, talib.MA_Type.SMA)[1][27], 94.156709)) self.assertTrue(compare(indicator.STOCHRSI(barDs.getCloseDataSeries(), 252, 14, 14, 1, talib.MA_Type.SMA)[0][-1], 0)) self.assertTrue(compare(indicator.STOCHRSI(barDs.getCloseDataSeries(), 252, 14, 14, 1, talib.MA_Type.SMA)[1][-1], 0)) self.assertTrue(compare(indicator.STOCHRSI(barDs.getCloseDataSeries(), 252, 14, 45, 1, talib.MA_Type.SMA)[0][58], 79.729186)) self.assertTrue(compare(indicator.STOCHRSI(barDs.getCloseDataSeries(), 252, 14, 45, 1, talib.MA_Type.SMA)[1][58], 79.729186)) self.assertTrue(compare(indicator.STOCHRSI(barDs.getCloseDataSeries(), 252, 14, 45, 1, talib.MA_Type.SMA)[0][-1], 48.1550743)) self.assertTrue(compare(indicator.STOCHRSI(barDs.getCloseDataSeries(), 252, 14, 45, 1, talib.MA_Type.SMA)[1][-1], 48.1550743)) def testT3(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.T3(barDs.getCloseDataSeries(), 252, 5, 0.7)[24], 85.73)) self.assertTrue(compare(indicator.T3(barDs.getCloseDataSeries(), 252, 5, 0.7)[25], 84.37)) self.assertTrue(compare(indicator.T3(barDs.getCloseDataSeries(), 252, 5, 0.7)[-2], 109.03)) self.assertTrue(compare(indicator.T3(barDs.getCloseDataSeries(), 252, 5, 0.7)[-1], 108.88)) def testTRANGE(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.TRANGE(barDs, 252)[1], 3.535, 3)) self.assertTrue(compare(indicator.TRANGE(barDs, 252)[13], 9.685, 3)) self.assertTrue(compare(indicator.TRANGE(barDs, 252)[41], 5.125, 3)) self.assertTrue(compare(indicator.TRANGE(barDs, 252)[-1], 2.88)) def testTRIMA(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.TRIMA(barDs.getCloseDataSeries(), 252, 10)[9], 93.6043)) self.assertTrue(compare(indicator.TRIMA(barDs.getCloseDataSeries(), 252, 10)[10], 93.4252)) self.assertTrue(compare(indicator.TRIMA(barDs.getCloseDataSeries(), 252, 10)[-2], 109.1850, 3)) self.assertTrue(compare(indicator.TRIMA(barDs.getCloseDataSeries(), 252, 10)[-1], 109.1407)) def testTRIX(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.TRIX(barDs.getCloseDataSeries(), 252, 5)[13], 0.2589)) self.assertTrue(compare(indicator.TRIX(barDs.getCloseDataSeries(), 252, 5)[14], 0.010495)) self.assertTrue(compare(indicator.TRIX(barDs.getCloseDataSeries(), 252, 5)[-2], -0.058)) self.assertTrue(compare(indicator.TRIX(barDs.getCloseDataSeries(), 252, 5)[-1], -0.095)) def testULTOSC(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.ULTOSC(barDs, 252, 7, 14, 28)[28], 47.1713)) self.assertTrue(compare(indicator.ULTOSC(barDs, 252, 7, 14, 28)[29], 46.2802)) self.assertTrue(compare(indicator.ULTOSC(barDs, 252, 7, 14, 28)[-1], 40.0854)) def testVAR(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.VAR(barDs.getCloseDataSeries(), 252, 5.0, 1)[4], 1.2856**2)) self.assertTrue(compare(indicator.VAR(barDs.getCloseDataSeries(), 252, 5.0, 1)[5], 0.4462**2)) self.assertTrue(compare(indicator.VAR(barDs.getCloseDataSeries(), 252, 5.0, 1)[-1], 0.7144**2)) def testWILLR(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.WILLR(barDs, 252, 14)[13], -90.1943)) self.assertTrue(compare(indicator.WILLR(barDs, 252, 14)[13+112], 0)) def testWMA(self): barDs = self.__loadBarDS() self.assertTrue(compare(indicator.WMA(barDs.getCloseDataSeries(), 252, 2)[1], 93.71)) self.assertTrue(compare(indicator.WMA(barDs.getCloseDataSeries(), 252, 2)[2], 94.52)) self.assertTrue(compare(indicator.WMA(barDs.getCloseDataSeries(), 252, 2)[3], 94.86)) # Original value 94.85 self.assertTrue(compare(indicator.WMA(barDs.getCloseDataSeries(), 252, 2)[-1], 108.16))
70.197898
245
0.6869
5,795
40,083
4.713719
0.136497
0.101479
0.152218
0.217455
0.5
0.495058
0.489896
0.478438
0.373444
0.248719
0
0.38939
0.147868
40,083
570
246
70.321053
0.410353
0.027917
0
0.136842
0
0
0.000103
0
0
0
0
0
0.416842
1
0.12
false
0
0.014737
0
0.147368
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
4
5dc5cfb15b34eefc7fdea3261357a802eeefd185
189
py
Python
pytest_data/plugin.py
robinoburka/python-pytest-data
568a1e6c4ba7900b63dc846a919037dac8b023aa
[ "MIT" ]
3
2018-06-02T14:08:23.000Z
2020-09-02T11:29:07.000Z
pytest_data/plugin.py
robinoburka/python-pytest-data
568a1e6c4ba7900b63dc846a919037dac8b023aa
[ "MIT" ]
2
2017-05-13T09:57:52.000Z
2019-09-08T08:10:15.000Z
pytest_data/plugin.py
robinoburka/python-pytest-data
568a1e6c4ba7900b63dc846a919037dac8b023aa
[ "MIT" ]
2
2017-04-24T14:03:30.000Z
2019-09-07T12:02:13.000Z
# -*- coding: utf-8 -*- def pytest_generate_tests(metafunc): for key, value in getattr(metafunc.function, 'data', {}).items(): metafunc.parametrize(key, value, indirect=True)
27
69
0.666667
23
189
5.391304
0.826087
0.129032
0
0
0
0
0
0
0
0
0
0.006329
0.164021
189
6
70
31.5
0.778481
0.111111
0
0
1
0
0.024096
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
5de80d317c94539737e5a4734f8dbe3cd84f4991
465
py
Python
maedn/player.py
Daniel31415/maedn
98821402328c235f1e46f92a960880745a8a299b
[ "MIT" ]
2
2018-07-12T14:01:16.000Z
2021-10-04T12:34:45.000Z
maedn/player.py
Daniel31415/maedn
98821402328c235f1e46f92a960880745a8a299b
[ "MIT" ]
null
null
null
maedn/player.py
Daniel31415/maedn
98821402328c235f1e46f92a960880745a8a299b
[ "MIT" ]
null
null
null
from maedn.strategy import MoveFirst from maedn.strategy import BaseStrategy from maedn.dice import StandardDice import random class Player(object): strategy = None def __init(self, strategy: BaseStrategy = MoveFirst, dice=StandardDice): self.strategy = strategy self.dice = dice def get_next_move(self, board, dice_roll): return self.strategy.get_next_move(board) def roll(self) -> int: return self.dice.roll()
24.473684
76
0.711828
59
465
5.491525
0.389831
0.083333
0.104938
0.141975
0
0
0
0
0
0
0
0
0.208602
465
18
77
25.833333
0.880435
0
0
0
0
0
0
0
0
0
0
0
0
1
0.230769
false
0
0.307692
0.153846
0.846154
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
4
5df42efacd1a6db0ee5a638299c3c8533f470a61
179
py
Python
Project/run.py
conorshortt123/facial-recognition-project
7440c9c9920c91b0db2c0c41628fec89ceaea464
[ "MIT" ]
5
2020-02-03T16:09:56.000Z
2020-04-19T16:31:50.000Z
Project/run.py
conorshortt123/facial-recognition-project
7440c9c9920c91b0db2c0c41628fec89ceaea464
[ "MIT" ]
8
2020-02-03T12:01:43.000Z
2022-03-12T00:16:48.000Z
Project/run.py
conorshortt123/facial-recognition-project
7440c9c9920c91b0db2c0c41628fec89ceaea464
[ "MIT" ]
2
2020-06-26T13:32:55.000Z
2021-04-21T13:24:25.000Z
from FrontEnd import app # check to see if this is the main thread of execution if __name__ == '__main__': # start the flask app app.run(debug=True, use_reloader=False)
22.375
54
0.72067
29
179
4.137931
0.827586
0
0
0
0
0
0
0
0
0
0
0
0.206704
179
7
55
25.571429
0.84507
0.402235
0
0
0
0
0.076923
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
5df92a87c5ba6fda9558c2a409b7f7b7104aabab
1,585
py
Python
Compiler/code/utils/directory.py
matcom-compilers-2019/cool-compiler-jessica-david-rayniel
4d00eaa5ee7e1afa68e800cb7c2e4a8595336f94
[ "MIT" ]
null
null
null
Compiler/code/utils/directory.py
matcom-compilers-2019/cool-compiler-jessica-david-rayniel
4d00eaa5ee7e1afa68e800cb7c2e4a8595336f94
[ "MIT" ]
null
null
null
Compiler/code/utils/directory.py
matcom-compilers-2019/cool-compiler-jessica-david-rayniel
4d00eaa5ee7e1afa68e800cb7c2e4a8595336f94
[ "MIT" ]
1
2021-03-26T19:08:19.000Z
2021-03-26T19:08:19.000Z
#Este modulo contiene los path de input y output, el de input es el path del archivo de cool que se va a compilar y el outut es el nombre del archivo de mips #donde se va a guardar el resultado de compilar el input. input_path = r'C:\Users\David\Documents\Version del compilador de Rayniel 9 de junio\Compiler\input\life.cl' output_path = r'C:\Users\David\Documents\Version del compilador de Rayniel 9 de junio\Compiler\output\life1.s' #import os #from pathlib import Path, PureWindowsPath #def standar(s ): # filename = PureWindowsPath(s) # correct_path = Path(filename) # return correct_path #r_directory = os.getcwd() #r_directory = PureWindowsPath(r_directory) #directory = PureWindowsPath((r_directory.parent).parent) #print(str(directory)) #first = 0 #second = 0 #for i in range(len(r_directory)): # if r_directory[i] == "\\" : # second = first # first = i #directory = "" #for i in range(len(r_directory)): # if i> second: # break # directory += r_directory[i] #current_directory = PureWindowsPath(r'' + str(directory) + r'input\current_directory.txt') #with open(current_directory) as file: # name_cl = file.read(1024) #name_spim = r'' #for i in range(len(name_cl)): # if name_cl[i] == '.': # break # name_spim+= name_cl[i] #name_spim = name_spim + r'.s' #current_cl = Path(r'' + str(directory) + r'test\test_case' + '\\' + name_cl) #output_cl = Path(r'' + directory + r'output' + '\\' + name_spim) #current_cl = standar(current_cl) #output_cl = standar(output_cl) #directory = standar(directory) #current_directory = standar(current_directory)
31.078431
157
0.713565
243
1,585
4.514403
0.316872
0.082042
0.068368
0.030082
0.187785
0.175023
0.175023
0.175023
0.127621
0.127621
0
0.006691
0.15142
1,585
51
158
31.078431
0.808922
0.806309
0
0
0
1
0.685185
0.448148
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
b907cfae6a029c3e44c510ca7cec28444c03c4a0
30
bzl
Python
test/com/facebook/buck/parser/testdata/recursive_load/b.bzl
jasonnam/buck
1ddbbf986312b30413aa36cac337267536a11f04
[ "Apache-2.0" ]
null
null
null
test/com/facebook/buck/parser/testdata/recursive_load/b.bzl
jasonnam/buck
1ddbbf986312b30413aa36cac337267536a11f04
[ "Apache-2.0" ]
null
null
null
test/com/facebook/buck/parser/testdata/recursive_load/b.bzl
jasonnam/buck
1ddbbf986312b30413aa36cac337267536a11f04
[ "Apache-2.0" ]
null
null
null
# @unused load(":a.bzl", "x")
10
19
0.5
5
30
3
1
0
0
0
0
0
0
0
0
0
0
0
0.133333
30
2
20
15
0.576923
0.233333
0
0
0
0
0.333333
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
b90a68a788c41e1ff01196019e824f36bc994e9d
676
py
Python
backup/Num.py
YuunqiLiu/lyqtmp200130
5ae4578ad5600d9b15fc633451e504791dcaf0f6
[ "MIT" ]
null
null
null
backup/Num.py
YuunqiLiu/lyqtmp200130
5ae4578ad5600d9b15fc633451e504791dcaf0f6
[ "MIT" ]
null
null
null
backup/Num.py
YuunqiLiu/lyqtmp200130
5ae4578ad5600d9b15fc633451e504791dcaf0f6
[ "MIT" ]
null
null
null
class Bits(): def __init__(self,width,value=0): self.__width = width self.__value = value @property def width(self): return self.__width @property def value(self): return self.__value @property def template(self): return self @property def string(self): return '%s\'b%s' % (self.__width,bin(self.__value).replace('0b','') ) #pass def __eq__(self,other): #print(self,other) #print(self.width,other.width) return True if type(self) == type(other) and self.width == other.width else False class UInt(Bits): pass class SInt(Bits): pass
18.27027
93
0.579882
84
676
4.428571
0.345238
0.145161
0.112903
0.096774
0
0
0
0
0
0
0
0.004202
0.295858
676
37
94
18.27027
0.777311
0.073965
0
0.272727
0
0
0.075563
0.064309
0
0
0
0
0
1
0.272727
false
0.090909
0
0.227273
0.636364
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
4
f8e2f3ba1481719d33e96615a38eddb6f35a830b
106
py
Python
Labs/1_hello.py
bgoldstone/Computer_Science_I
03182e61c5110c2cf7ca60bb8363d04bb26c3fba
[ "MIT" ]
null
null
null
Labs/1_hello.py
bgoldstone/Computer_Science_I
03182e61c5110c2cf7ca60bb8363d04bb26c3fba
[ "MIT" ]
null
null
null
Labs/1_hello.py
bgoldstone/Computer_Science_I
03182e61c5110c2cf7ca60bb8363d04bb26c3fba
[ "MIT" ]
null
null
null
# 1_hello.py - My first program # Author: Ben Goldstone # Date: 08/25/2020 print("Hello, World!")
21.2
31
0.641509
16
106
4.1875
0.9375
0
0
0
0
0
0
0
0
0
0
0.108434
0.216981
106
4
32
26.5
0.698795
0.716981
0
0
0
0
0.5
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
f8ecf1b26a5fc5876c19300cdb20e7b645b9f03d
197
py
Python
dcel/__init__.py
dadangeuy/dcel
31369a49755f1a32098668cf834c6a89eed84ee4
[ "BSD-3-Clause" ]
null
null
null
dcel/__init__.py
dadangeuy/dcel
31369a49755f1a32098668cf834c6a89eed84ee4
[ "BSD-3-Clause" ]
null
null
null
dcel/__init__.py
dadangeuy/dcel
31369a49755f1a32098668cf834c6a89eed84ee4
[ "BSD-3-Clause" ]
null
null
null
from dcel._cache_invalidate import CacheInvalidate from dcel._cache_update import CacheUpdate from dcel._cached import Cached __all__ = [ 'CacheInvalidate', 'CacheUpdate', 'Cached', ]
19.7
50
0.761421
21
197
6.714286
0.47619
0.170213
0.184397
0
0
0
0
0
0
0
0
0
0.162437
197
9
51
21.888889
0.854545
0
0
0
0
0
0.162437
0
0
0
0
0
0
1
0
false
0
0.375
0
0.375
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
5d044dcf7375964c595392aa15358aad8dda0a41
1,840
py
Python
tensornetwork/__init__.py
sr33dhar/TensorNetwork
7a755ac004514561e4e018bf4c6f98e1f3b6d650
[ "Apache-2.0" ]
null
null
null
tensornetwork/__init__.py
sr33dhar/TensorNetwork
7a755ac004514561e4e018bf4c6f98e1f3b6d650
[ "Apache-2.0" ]
null
null
null
tensornetwork/__init__.py
sr33dhar/TensorNetwork
7a755ac004514561e4e018bf4c6f98e1f3b6d650
[ "Apache-2.0" ]
null
null
null
#pylint: disable=line-too-long from tensornetwork.network_components import Node, Edge, CopyNode, BaseNode, NodeCollection #pylint: disable=line-too-long from tensornetwork.network_operations import norm, conj, copy, replicate_nodes, transpose, split_node, split_node_qr, split_node_rq, split_node_full_svd, reachable, check_connected, check_correct, get_all_nodes, get_all_edges, remove_node, contract_trace_edges, get_subgraph_dangling, reduced_density, switch_backend, get_neighbors, kron #pylint: disable=line-too-long from tensornetwork.network_components import contract, contract_copy_node, contract_between, outer_product, outer_product_final_nodes, contract_parallel, flatten_edges, split_edge, slice_edge, get_all_nondangling, get_all_dangling, flatten_all_edges, flatten_edges_between, get_parallel_edges, get_shared_edges from tensornetwork.backends.base_backend import BaseBackend from tensornetwork.network_components import connect, disconnect from tensornetwork.ncon_interface import ncon, ncon_network from tensornetwork.version import __version__ from tensornetwork.visualization.graphviz import to_graphviz from tensornetwork import contractors from tensornetwork.utils import load_nodes, save_nodes from tensornetwork.matrixproductstates.infinite_mps import InfiniteMPS from tensornetwork.matrixproductstates.finite_mps import FiniteMPS from tensornetwork.matrixproductstates.dmrg import FiniteDMRG from tensornetwork.matrixproductstates.mpo import FiniteTFI, FiniteXXZ from tensornetwork.backend_contextmanager import DefaultBackend, set_default_backend from tensornetwork import block_sparse from tensornetwork.block_sparse.blocksparsetensor import BlockSparseTensor, ChargeArray from tensornetwork.block_sparse.index import Index from tensornetwork.block_sparse.charge import U1Charge, BaseCharge, Z2Charge, ZNCharge
80
337
0.882065
229
1,840
6.786026
0.41048
0.207851
0.061776
0.03861
0.138996
0.113256
0.113256
0.113256
0.082368
0.082368
0
0.001169
0.070109
1,840
22
338
83.636364
0.907072
0.047283
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
5d1228e2f0aec1011c1f98e05b532c23d0e260d7
253
py
Python
scripts/get_footprint_from_pbcoverage.py
jturne19/legus-alma
e6d8c512c57cf2447dd9bc2977d27c8d31b9abed
[ "MIT" ]
null
null
null
scripts/get_footprint_from_pbcoverage.py
jturne19/legus-alma
e6d8c512c57cf2447dd9bc2977d27c8d31b9abed
[ "MIT" ]
null
null
null
scripts/get_footprint_from_pbcoverage.py
jturne19/legus-alma
e6d8c512c57cf2447dd9bc2977d27c8d31b9abed
[ "MIT" ]
null
null
null
""" """ import numpy as np import fitsio def get_footprint(pbcoverage, name): """ use the pcoverage map to get the alma footprint if at 80 percent or better, then it's in the footprint pbcoverage: string; filename of the pbcoverage map """
15.8125
55
0.711462
39
253
4.589744
0.74359
0.212291
0
0
0
0
0
0
0
0
0
0.01
0.209486
253
15
56
16.866667
0.885
0.660079
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.666667
0
1
0.333333
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
4
5d13f079ff7d7f3ae110ce7827177b940590f450
457
py
Python
mahjong/rule/model/touch_play_pair.py
feiyaaaa/mahjong
886028e03c991cfa3904b226d473b087ce819565
[ "MIT" ]
null
null
null
mahjong/rule/model/touch_play_pair.py
feiyaaaa/mahjong
886028e03c991cfa3904b226d473b087ce819565
[ "MIT" ]
null
null
null
mahjong/rule/model/touch_play_pair.py
feiyaaaa/mahjong
886028e03c991cfa3904b226d473b087ce819565
[ "MIT" ]
null
null
null
from mahjong.rule.model.tile import Tile class TouchPlayPair(object): def __init__(self, touch_card, play_card): self.touch_card = Tile(touch_card) self.play_card = Tile(play_card) def touch_tile(self): return self.touch_card.tile def play_tile(self): return self.play_card.tile def __str__(self): return "摸牌:{},打牌:{} ".format(self.touch_card.__str__(), self.play_card.__str__())
26.882353
90
0.649891
62
457
4.33871
0.322581
0.167286
0.193309
0.126394
0
0
0
0
0
0
0
0
0.234136
457
16
91
28.5625
0.768571
0
0
0
0
0
0.027211
0
0
0
0
0
0
1
0.363636
false
0
0.090909
0.272727
0.818182
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
5d17342790385f61b667fc1261ec061ec3d29928
84
py
Python
source/tools/helper.py
rprinz08/hBPF
eab8f617f9827faaeb10db2395b8af93a5225629
[ "BSD-3-Clause" ]
84
2021-06-07T17:44:17.000Z
2022-03-26T19:51:03.000Z
source/tools/helper.py
rprinz08/hBPF
eab8f617f9827faaeb10db2395b8af93a5225629
[ "BSD-3-Clause" ]
null
null
null
source/tools/helper.py
rprinz08/hBPF
eab8f617f9827faaeb10db2395b8af93a5225629
[ "BSD-3-Clause" ]
4
2021-06-25T21:08:55.000Z
2022-01-06T11:26:28.000Z
def print_hex_list(lst): print("[{}]".format(", ".join(hex(x) for x in lst)))
16.8
56
0.571429
14
84
3.285714
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.166667
84
4
57
21
0.657143
0
0
0
0
0
0.073171
0
0
0
0
0
0
1
0.5
false
0
0
0
0.5
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
1
0
4
5d2bb2e838ce0f7521cfa90cb8e42b60e5795c74
711
py
Python
balanced_binary_tree.py
Jwy-jump/python_codesets
bb9a38d5dbf7be4d34b6b502ee684bb48dcfcd31
[ "Apache-2.0" ]
null
null
null
balanced_binary_tree.py
Jwy-jump/python_codesets
bb9a38d5dbf7be4d34b6b502ee684bb48dcfcd31
[ "Apache-2.0" ]
null
null
null
balanced_binary_tree.py
Jwy-jump/python_codesets
bb9a38d5dbf7be4d34b6b502ee684bb48dcfcd31
[ "Apache-2.0" ]
null
null
null
# coding: utf-8 class Solution: """ @param root: The root of binary tree. @return: True if this Binary tree is Balanced, or false. """ def isBalanced(self, root): # write your code here if not root: return True else: if abs(self._isBalanced(root.left) - self._isBalanced(root.right)) <= 1: return self.isBalanced(root.left) and self.isBalanced(root.right) else: return False def _isBalanced(self, root): if not root: return 0 return 1 + max(self._isBalanced(root.left), self._isBalanced(root.right)) # easy: http://lintcode.com/zh-cn/problem/balanced-binary-tree/
29.625
84
0.590717
90
711
4.611111
0.466667
0.20241
0.260241
0.159036
0.342169
0.216867
0.216867
0.216867
0
0
0
0.008048
0.300985
711
23
85
30.913043
0.826962
0.270042
0
0.307692
0
0
0
0
0
0
0
0.043478
0
1
0.153846
false
0
0
0
0.615385
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
4
5d316e44668a64dead0300ab5dde8c5074bf4ce0
115
py
Python
gdal_boots/__init__.py
habibutsu/gdal-boots
bc8d045c025b68570d091d1cdadcc54b44335cc5
[ "MIT" ]
null
null
null
gdal_boots/__init__.py
habibutsu/gdal-boots
bc8d045c025b68570d091d1cdadcc54b44335cc5
[ "MIT" ]
null
null
null
gdal_boots/__init__.py
habibutsu/gdal-boots
bc8d045c025b68570d091d1cdadcc54b44335cc5
[ "MIT" ]
1
2021-09-25T08:24:11.000Z
2021-09-25T08:24:11.000Z
__version__ = '0.1.33' from .gdal import GeoInfo, RasterDataset, Resampling, VectorDataset from .options import *
23
67
0.773913
14
115
6.071429
0.857143
0
0
0
0
0
0
0
0
0
0
0.04
0.130435
115
4
68
28.75
0.81
0
0
0
0
0
0.052174
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
5d4826abb2a06d5e73e84547e74b902cf6cb1453
152
py
Python
nerd_herder/code_of_conduct/apps.py
cache-rules/nerd_herder
73048a7bc6f2a5d5cf31099e60f5712d1c005f22
[ "Apache-2.0" ]
7
2018-05-30T04:56:37.000Z
2019-11-15T03:25:55.000Z
nerd_herder/code_of_conduct/apps.py
cache-rules/nerd_herder
73048a7bc6f2a5d5cf31099e60f5712d1c005f22
[ "Apache-2.0" ]
17
2018-05-25T06:29:44.000Z
2022-02-26T08:34:21.000Z
nerd_herder/code_of_conduct/apps.py
cache-rules/nerd_herder
73048a7bc6f2a5d5cf31099e60f5712d1c005f22
[ "Apache-2.0" ]
4
2018-02-19T20:38:38.000Z
2019-10-25T18:07:08.000Z
from django.apps import AppConfig class CodeOfConductConfig(AppConfig): name = "nerd_herder.code_of_conduct" verbose_name = "Code of Conduct"
21.714286
40
0.769737
19
152
5.947368
0.736842
0.106195
0.230089
0
0
0
0
0
0
0
0
0
0.157895
152
6
41
25.333333
0.882813
0
0
0
0
0
0.276316
0.177632
0
0
0
0
0
1
0
false
0
0.25
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
5d5c23351812742d81204f4251224e5ec632f4a9
55
py
Python
ppma/imagenet/__init__.py
lmk123568/Paddle_Model_Analysis
d03a82591512fcad065fc69abfdbfb0835099c2d
[ "MIT" ]
13
2021-07-03T05:26:56.000Z
2022-03-19T01:34:11.000Z
ppma/imagenet/__init__.py
lmk123568/Paddle_Model_Analysis
d03a82591512fcad065fc69abfdbfb0835099c2d
[ "MIT" ]
1
2021-11-18T07:07:03.000Z
2021-11-23T01:50:01.000Z
ppma/imagenet/__init__.py
lmk123568/Paddle_Model_Analysis
d03a82591512fcad065fc69abfdbfb0835099c2d
[ "MIT" ]
1
2022-03-19T01:34:15.000Z
2022-03-19T01:34:15.000Z
from .test import test_img from .validate import val
18.333333
27
0.781818
9
55
4.666667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.181818
55
2
28
27.5
0.933333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
5d70433fe1e70efc478a49b393234605d0ccb603
166
py
Python
micropython/main.py
penguintutor/rpi-pico-temperature
13b50ace250bca399b9b01d3d297b5b757318d51
[ "MIT" ]
3
2021-06-03T12:02:36.000Z
2022-02-22T07:59:15.000Z
micropython/main.py
penguintutor/rpi-pico-temperature
13b50ace250bca399b9b01d3d297b5b757318d51
[ "MIT" ]
null
null
null
micropython/main.py
penguintutor/rpi-pico-temperature
13b50ace250bca399b9b01d3d297b5b757318d51
[ "MIT" ]
1
2021-09-29T09:53:14.000Z
2021-09-29T09:53:14.000Z
import tempdisplay from machine import Pin import utime # Add short delay when starting from boot to allow # I2C devices to settle utime.sleep(1) tempdisplay.main()
18.444444
50
0.795181
26
166
5.076923
0.769231
0
0
0
0
0
0
0
0
0
0
0.014286
0.156627
166
8
51
20.75
0.928571
0.421687
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.6
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
5d73747881ec90150604a1583ab3282d9a46f000
177
py
Python
doc/test_combinations.py
Mystic-Mirage/pytest-kwparametrize
7d6aacef3f91f7a01969a296046d370db4295ea9
[ "MIT" ]
2
2021-10-04T20:02:53.000Z
2022-03-25T01:56:13.000Z
doc/test_combinations.py
Mystic-Mirage/pytest-kwparametrize
7d6aacef3f91f7a01969a296046d370db4295ea9
[ "MIT" ]
1
2021-06-09T19:55:54.000Z
2021-06-09T19:55:54.000Z
doc/test_combinations.py
Mystic-Mirage/pytest-kwparametrize
7d6aacef3f91f7a01969a296046d370db4295ea9
[ "MIT" ]
1
2021-05-31T09:30:08.000Z
2021-05-31T09:30:08.000Z
# content of test_combinations.py import pytest @pytest.mark.kwparametrize(dict(x=0), dict(x=1)) @pytest.mark.kwparametrize(dict(y=2), dict(y=3)) def test_foo(x, y): pass
19.666667
48
0.717514
31
177
4.032258
0.612903
0.16
0.368
0.432
0
0
0
0
0
0
0
0.025478
0.112994
177
8
49
22.125
0.770701
0.175141
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0.2
0.2
0
0.4
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
4
5381c83f1d3a93693f13511302ab588a7c11edcf
247
py
Python
icusim/__init__.py
Trauma-Team-International/ICUSIM
077bf0989d013bec8f7b297391b6c32ae9c37ba5
[ "MIT" ]
3
2020-04-09T17:46:51.000Z
2020-07-07T17:37:20.000Z
icusim/__init__.py
Trauma-Team-International/ICUSIM
077bf0989d013bec8f7b297391b6c32ae9c37ba5
[ "MIT" ]
9
2020-04-09T17:39:29.000Z
2020-04-12T17:00:15.000Z
icusim/__init__.py
autonomio/ICUSIM
077bf0989d013bec8f7b297391b6c32ae9c37ba5
[ "MIT" ]
1
2022-03-27T21:32:30.000Z
2022-03-27T21:32:30.000Z
from .icu_burden_simulator import simulate from .stats_to_dataframe import stats_to_dataframe from .params import params from .utils import columns from .commands.MonteCarlo import MonteCarlo from .commands.SobolSensitivity import SobolSensitivity
41.166667
55
0.874494
32
247
6.5625
0.46875
0.066667
0.152381
0
0
0
0
0
0
0
0
0
0.093117
247
6
55
41.166667
0.9375
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
53846b2a83c62073331d6f4a26d77af724a41344
128,742
py
Python
sdk/python/pulumi_azure/storage/outputs.py
henriktao/pulumi-azure
f1cbcf100b42b916da36d8fe28be3a159abaf022
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
sdk/python/pulumi_azure/storage/outputs.py
henriktao/pulumi-azure
f1cbcf100b42b916da36d8fe28be3a159abaf022
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
sdk/python/pulumi_azure/storage/outputs.py
henriktao/pulumi-azure
f1cbcf100b42b916da36d8fe28be3a159abaf022
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities from . import outputs __all__ = [ 'AccountAzureFilesAuthentication', 'AccountAzureFilesAuthenticationActiveDirectory', 'AccountBlobProperties', 'AccountBlobPropertiesContainerDeleteRetentionPolicy', 'AccountBlobPropertiesCorsRule', 'AccountBlobPropertiesDeleteRetentionPolicy', 'AccountCustomDomain', 'AccountCustomerManagedKey', 'AccountIdentity', 'AccountNetworkRules', 'AccountNetworkRulesPrivateLinkAccess', 'AccountNetworkRulesPrivateLinkAccessRule', 'AccountQueueProperties', 'AccountQueuePropertiesCorsRule', 'AccountQueuePropertiesHourMetrics', 'AccountQueuePropertiesLogging', 'AccountQueuePropertiesMinuteMetrics', 'AccountRouting', 'AccountShareProperties', 'AccountSharePropertiesCorsRule', 'AccountSharePropertiesRetentionPolicy', 'AccountSharePropertiesSmb', 'AccountStaticWebsite', 'BlobInventoryPolicyRule', 'BlobInventoryPolicyRuleFilter', 'DataLakeGen2FilesystemAce', 'DataLakeGen2PathAce', 'ManagementPolicyRule', 'ManagementPolicyRuleActions', 'ManagementPolicyRuleActionsBaseBlob', 'ManagementPolicyRuleActionsSnapshot', 'ManagementPolicyRuleActionsVersion', 'ManagementPolicyRuleFilters', 'ManagementPolicyRuleFiltersMatchBlobIndexTag', 'ObjectReplicationRule', 'ShareAcl', 'ShareAclAccessPolicy', 'TableAcl', 'TableAclAccessPolicy', 'GetAccountBlobContainerSASPermissionsResult', 'GetAccountCustomDomainResult', 'GetAccountSASPermissionsResult', 'GetAccountSASResourceTypesResult', 'GetAccountSASServicesResult', 'GetPolicyRuleResult', 'GetPolicyRuleActionResult', 'GetPolicyRuleActionBaseBlobResult', 'GetPolicyRuleActionSnapshotResult', 'GetPolicyRuleActionVersionResult', 'GetPolicyRuleFilterResult', 'GetPolicyRuleFilterMatchBlobIndexTagResult', 'GetShareAclResult', 'GetShareAclAccessPolicyResult', ] @pulumi.output_type class AccountAzureFilesAuthentication(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "directoryType": suggest = "directory_type" elif key == "activeDirectory": suggest = "active_directory" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountAzureFilesAuthentication. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountAzureFilesAuthentication.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountAzureFilesAuthentication.__key_warning(key) return super().get(key, default) def __init__(__self__, *, directory_type: str, active_directory: Optional['outputs.AccountAzureFilesAuthenticationActiveDirectory'] = None): """ :param str directory_type: Specifies the directory service used. Possible values are `AADDS` and `AD`. :param 'AccountAzureFilesAuthenticationActiveDirectoryArgs' active_directory: A `active_directory` block as defined below. Required when `directory_type` is `AD`. """ pulumi.set(__self__, "directory_type", directory_type) if active_directory is not None: pulumi.set(__self__, "active_directory", active_directory) @property @pulumi.getter(name="directoryType") def directory_type(self) -> str: """ Specifies the directory service used. Possible values are `AADDS` and `AD`. """ return pulumi.get(self, "directory_type") @property @pulumi.getter(name="activeDirectory") def active_directory(self) -> Optional['outputs.AccountAzureFilesAuthenticationActiveDirectory']: """ A `active_directory` block as defined below. Required when `directory_type` is `AD`. """ return pulumi.get(self, "active_directory") @pulumi.output_type class AccountAzureFilesAuthenticationActiveDirectory(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "domainGuid": suggest = "domain_guid" elif key == "domainName": suggest = "domain_name" elif key == "domainSid": suggest = "domain_sid" elif key == "forestName": suggest = "forest_name" elif key == "netbiosDomainName": suggest = "netbios_domain_name" elif key == "storageSid": suggest = "storage_sid" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountAzureFilesAuthenticationActiveDirectory. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountAzureFilesAuthenticationActiveDirectory.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountAzureFilesAuthenticationActiveDirectory.__key_warning(key) return super().get(key, default) def __init__(__self__, *, domain_guid: str, domain_name: str, domain_sid: str, forest_name: str, netbios_domain_name: str, storage_sid: str): """ :param str domain_guid: Specifies the domain GUID. :param str domain_name: Specifies the primary domain that the AD DNS server is authoritative for. :param str domain_sid: Specifies the security identifier (SID). :param str forest_name: Specifies the Active Directory forest. :param str netbios_domain_name: Specifies the NetBIOS domain name. :param str storage_sid: Specifies the security identifier (SID) for Azure Storage. """ pulumi.set(__self__, "domain_guid", domain_guid) pulumi.set(__self__, "domain_name", domain_name) pulumi.set(__self__, "domain_sid", domain_sid) pulumi.set(__self__, "forest_name", forest_name) pulumi.set(__self__, "netbios_domain_name", netbios_domain_name) pulumi.set(__self__, "storage_sid", storage_sid) @property @pulumi.getter(name="domainGuid") def domain_guid(self) -> str: """ Specifies the domain GUID. """ return pulumi.get(self, "domain_guid") @property @pulumi.getter(name="domainName") def domain_name(self) -> str: """ Specifies the primary domain that the AD DNS server is authoritative for. """ return pulumi.get(self, "domain_name") @property @pulumi.getter(name="domainSid") def domain_sid(self) -> str: """ Specifies the security identifier (SID). """ return pulumi.get(self, "domain_sid") @property @pulumi.getter(name="forestName") def forest_name(self) -> str: """ Specifies the Active Directory forest. """ return pulumi.get(self, "forest_name") @property @pulumi.getter(name="netbiosDomainName") def netbios_domain_name(self) -> str: """ Specifies the NetBIOS domain name. """ return pulumi.get(self, "netbios_domain_name") @property @pulumi.getter(name="storageSid") def storage_sid(self) -> str: """ Specifies the security identifier (SID) for Azure Storage. """ return pulumi.get(self, "storage_sid") @pulumi.output_type class AccountBlobProperties(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "changeFeedEnabled": suggest = "change_feed_enabled" elif key == "containerDeleteRetentionPolicy": suggest = "container_delete_retention_policy" elif key == "corsRules": suggest = "cors_rules" elif key == "defaultServiceVersion": suggest = "default_service_version" elif key == "deleteRetentionPolicy": suggest = "delete_retention_policy" elif key == "lastAccessTimeEnabled": suggest = "last_access_time_enabled" elif key == "versioningEnabled": suggest = "versioning_enabled" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountBlobProperties. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountBlobProperties.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountBlobProperties.__key_warning(key) return super().get(key, default) def __init__(__self__, *, change_feed_enabled: Optional[bool] = None, container_delete_retention_policy: Optional['outputs.AccountBlobPropertiesContainerDeleteRetentionPolicy'] = None, cors_rules: Optional[Sequence['outputs.AccountBlobPropertiesCorsRule']] = None, default_service_version: Optional[str] = None, delete_retention_policy: Optional['outputs.AccountBlobPropertiesDeleteRetentionPolicy'] = None, last_access_time_enabled: Optional[bool] = None, versioning_enabled: Optional[bool] = None): """ :param bool change_feed_enabled: Is the blob service properties for change feed events enabled? Default to `false`. :param 'AccountBlobPropertiesContainerDeleteRetentionPolicyArgs' container_delete_retention_policy: A `container_delete_retention_policy` block as defined below. :param Sequence['AccountBlobPropertiesCorsRuleArgs'] cors_rules: A `cors_rule` block as defined below. :param str default_service_version: The API Version which should be used by default for requests to the Data Plane API if an incoming request doesn't specify an API Version. Defaults to `2020-06-12`. :param 'AccountBlobPropertiesDeleteRetentionPolicyArgs' delete_retention_policy: A `delete_retention_policy` block as defined below. :param bool last_access_time_enabled: Is the last access time based tracking enabled? Default to `false`. :param bool versioning_enabled: Is versioning enabled? Default to `false`. """ if change_feed_enabled is not None: pulumi.set(__self__, "change_feed_enabled", change_feed_enabled) if container_delete_retention_policy is not None: pulumi.set(__self__, "container_delete_retention_policy", container_delete_retention_policy) if cors_rules is not None: pulumi.set(__self__, "cors_rules", cors_rules) if default_service_version is not None: pulumi.set(__self__, "default_service_version", default_service_version) if delete_retention_policy is not None: pulumi.set(__self__, "delete_retention_policy", delete_retention_policy) if last_access_time_enabled is not None: pulumi.set(__self__, "last_access_time_enabled", last_access_time_enabled) if versioning_enabled is not None: pulumi.set(__self__, "versioning_enabled", versioning_enabled) @property @pulumi.getter(name="changeFeedEnabled") def change_feed_enabled(self) -> Optional[bool]: """ Is the blob service properties for change feed events enabled? Default to `false`. """ return pulumi.get(self, "change_feed_enabled") @property @pulumi.getter(name="containerDeleteRetentionPolicy") def container_delete_retention_policy(self) -> Optional['outputs.AccountBlobPropertiesContainerDeleteRetentionPolicy']: """ A `container_delete_retention_policy` block as defined below. """ return pulumi.get(self, "container_delete_retention_policy") @property @pulumi.getter(name="corsRules") def cors_rules(self) -> Optional[Sequence['outputs.AccountBlobPropertiesCorsRule']]: """ A `cors_rule` block as defined below. """ return pulumi.get(self, "cors_rules") @property @pulumi.getter(name="defaultServiceVersion") def default_service_version(self) -> Optional[str]: """ The API Version which should be used by default for requests to the Data Plane API if an incoming request doesn't specify an API Version. Defaults to `2020-06-12`. """ return pulumi.get(self, "default_service_version") @property @pulumi.getter(name="deleteRetentionPolicy") def delete_retention_policy(self) -> Optional['outputs.AccountBlobPropertiesDeleteRetentionPolicy']: """ A `delete_retention_policy` block as defined below. """ return pulumi.get(self, "delete_retention_policy") @property @pulumi.getter(name="lastAccessTimeEnabled") def last_access_time_enabled(self) -> Optional[bool]: """ Is the last access time based tracking enabled? Default to `false`. """ return pulumi.get(self, "last_access_time_enabled") @property @pulumi.getter(name="versioningEnabled") def versioning_enabled(self) -> Optional[bool]: """ Is versioning enabled? Default to `false`. """ return pulumi.get(self, "versioning_enabled") @pulumi.output_type class AccountBlobPropertiesContainerDeleteRetentionPolicy(dict): def __init__(__self__, *, days: Optional[int] = None): """ :param int days: Specifies the number of days that the container should be retained, between `1` and `365` days. Defaults to `7`. """ if days is not None: pulumi.set(__self__, "days", days) @property @pulumi.getter def days(self) -> Optional[int]: """ Specifies the number of days that the container should be retained, between `1` and `365` days. Defaults to `7`. """ return pulumi.get(self, "days") @pulumi.output_type class AccountBlobPropertiesCorsRule(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "allowedHeaders": suggest = "allowed_headers" elif key == "allowedMethods": suggest = "allowed_methods" elif key == "allowedOrigins": suggest = "allowed_origins" elif key == "exposedHeaders": suggest = "exposed_headers" elif key == "maxAgeInSeconds": suggest = "max_age_in_seconds" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountBlobPropertiesCorsRule. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountBlobPropertiesCorsRule.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountBlobPropertiesCorsRule.__key_warning(key) return super().get(key, default) def __init__(__self__, *, allowed_headers: Sequence[str], allowed_methods: Sequence[str], allowed_origins: Sequence[str], exposed_headers: Sequence[str], max_age_in_seconds: int): """ :param Sequence[str] allowed_headers: A list of headers that are allowed to be a part of the cross-origin request. :param Sequence[str] allowed_methods: A list of http methods that are allowed to be executed by the origin. Valid options are `DELETE`, `GET`, `HEAD`, `MERGE`, `POST`, `OPTIONS`, `PUT` or `PATCH`. :param Sequence[str] allowed_origins: A list of origin domains that will be allowed by CORS. :param Sequence[str] exposed_headers: A list of response headers that are exposed to CORS clients. :param int max_age_in_seconds: The number of seconds the client should cache a preflight response. """ pulumi.set(__self__, "allowed_headers", allowed_headers) pulumi.set(__self__, "allowed_methods", allowed_methods) pulumi.set(__self__, "allowed_origins", allowed_origins) pulumi.set(__self__, "exposed_headers", exposed_headers) pulumi.set(__self__, "max_age_in_seconds", max_age_in_seconds) @property @pulumi.getter(name="allowedHeaders") def allowed_headers(self) -> Sequence[str]: """ A list of headers that are allowed to be a part of the cross-origin request. """ return pulumi.get(self, "allowed_headers") @property @pulumi.getter(name="allowedMethods") def allowed_methods(self) -> Sequence[str]: """ A list of http methods that are allowed to be executed by the origin. Valid options are `DELETE`, `GET`, `HEAD`, `MERGE`, `POST`, `OPTIONS`, `PUT` or `PATCH`. """ return pulumi.get(self, "allowed_methods") @property @pulumi.getter(name="allowedOrigins") def allowed_origins(self) -> Sequence[str]: """ A list of origin domains that will be allowed by CORS. """ return pulumi.get(self, "allowed_origins") @property @pulumi.getter(name="exposedHeaders") def exposed_headers(self) -> Sequence[str]: """ A list of response headers that are exposed to CORS clients. """ return pulumi.get(self, "exposed_headers") @property @pulumi.getter(name="maxAgeInSeconds") def max_age_in_seconds(self) -> int: """ The number of seconds the client should cache a preflight response. """ return pulumi.get(self, "max_age_in_seconds") @pulumi.output_type class AccountBlobPropertiesDeleteRetentionPolicy(dict): def __init__(__self__, *, days: Optional[int] = None): """ :param int days: Specifies the number of days that the blob should be retained, between `1` and `365` days. Defaults to `7`. """ if days is not None: pulumi.set(__self__, "days", days) @property @pulumi.getter def days(self) -> Optional[int]: """ Specifies the number of days that the blob should be retained, between `1` and `365` days. Defaults to `7`. """ return pulumi.get(self, "days") @pulumi.output_type class AccountCustomDomain(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "useSubdomain": suggest = "use_subdomain" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountCustomDomain. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountCustomDomain.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountCustomDomain.__key_warning(key) return super().get(key, default) def __init__(__self__, *, name: str, use_subdomain: Optional[bool] = None): """ :param str name: The Custom Domain Name to use for the Storage Account, which will be validated by Azure. :param bool use_subdomain: Should the Custom Domain Name be validated by using indirect CNAME validation? """ pulumi.set(__self__, "name", name) if use_subdomain is not None: pulumi.set(__self__, "use_subdomain", use_subdomain) @property @pulumi.getter def name(self) -> str: """ The Custom Domain Name to use for the Storage Account, which will be validated by Azure. """ return pulumi.get(self, "name") @property @pulumi.getter(name="useSubdomain") def use_subdomain(self) -> Optional[bool]: """ Should the Custom Domain Name be validated by using indirect CNAME validation? """ return pulumi.get(self, "use_subdomain") @pulumi.output_type class AccountCustomerManagedKey(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "keyVaultKeyId": suggest = "key_vault_key_id" elif key == "userAssignedIdentityId": suggest = "user_assigned_identity_id" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountCustomerManagedKey. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountCustomerManagedKey.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountCustomerManagedKey.__key_warning(key) return super().get(key, default) def __init__(__self__, *, key_vault_key_id: str, user_assigned_identity_id: str): """ :param str key_vault_key_id: The ID of the Key Vault Key, supplying a version-less key ID will enable auto-rotation of this key. :param str user_assigned_identity_id: The ID of a user assigned identity. """ pulumi.set(__self__, "key_vault_key_id", key_vault_key_id) pulumi.set(__self__, "user_assigned_identity_id", user_assigned_identity_id) @property @pulumi.getter(name="keyVaultKeyId") def key_vault_key_id(self) -> str: """ The ID of the Key Vault Key, supplying a version-less key ID will enable auto-rotation of this key. """ return pulumi.get(self, "key_vault_key_id") @property @pulumi.getter(name="userAssignedIdentityId") def user_assigned_identity_id(self) -> str: """ The ID of a user assigned identity. """ return pulumi.get(self, "user_assigned_identity_id") @pulumi.output_type class AccountIdentity(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "identityIds": suggest = "identity_ids" elif key == "principalId": suggest = "principal_id" elif key == "tenantId": suggest = "tenant_id" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountIdentity. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountIdentity.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountIdentity.__key_warning(key) return super().get(key, default) def __init__(__self__, *, type: str, identity_ids: Optional[Sequence[str]] = None, principal_id: Optional[str] = None, tenant_id: Optional[str] = None): """ :param str type: Specifies the identity type of the Storage Account. Possible values are `SystemAssigned`, `UserAssigned` and `SystemAssigned, UserAssigned` (to enable both). :param Sequence[str] identity_ids: A list of IDs for User Assigned Managed Identity resources to be assigned. :param str principal_id: The Principal ID for the Service Principal associated with the Identity of this Storage Account. :param str tenant_id: The Tenant ID for the Service Principal associated with the Identity of this Storage Account. """ pulumi.set(__self__, "type", type) if identity_ids is not None: pulumi.set(__self__, "identity_ids", identity_ids) if principal_id is not None: pulumi.set(__self__, "principal_id", principal_id) if tenant_id is not None: pulumi.set(__self__, "tenant_id", tenant_id) @property @pulumi.getter def type(self) -> str: """ Specifies the identity type of the Storage Account. Possible values are `SystemAssigned`, `UserAssigned` and `SystemAssigned, UserAssigned` (to enable both). """ return pulumi.get(self, "type") @property @pulumi.getter(name="identityIds") def identity_ids(self) -> Optional[Sequence[str]]: """ A list of IDs for User Assigned Managed Identity resources to be assigned. """ return pulumi.get(self, "identity_ids") @property @pulumi.getter(name="principalId") def principal_id(self) -> Optional[str]: """ The Principal ID for the Service Principal associated with the Identity of this Storage Account. """ return pulumi.get(self, "principal_id") @property @pulumi.getter(name="tenantId") def tenant_id(self) -> Optional[str]: """ The Tenant ID for the Service Principal associated with the Identity of this Storage Account. """ return pulumi.get(self, "tenant_id") @pulumi.output_type class AccountNetworkRules(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "defaultAction": suggest = "default_action" elif key == "ipRules": suggest = "ip_rules" elif key == "privateLinkAccesses": suggest = "private_link_accesses" elif key == "virtualNetworkSubnetIds": suggest = "virtual_network_subnet_ids" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountNetworkRules. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountNetworkRules.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountNetworkRules.__key_warning(key) return super().get(key, default) def __init__(__self__, *, default_action: str, bypasses: Optional[Sequence[str]] = None, ip_rules: Optional[Sequence[str]] = None, private_link_accesses: Optional[Sequence['outputs.AccountNetworkRulesPrivateLinkAccess']] = None, virtual_network_subnet_ids: Optional[Sequence[str]] = None): """ :param str default_action: Specifies the default action of allow or deny when no other rules match. Valid options are `Deny` or `Allow`. :param Sequence[str] bypasses: Specifies whether traffic is bypassed for Logging/Metrics/AzureServices. Valid options are any combination of `Logging`, `Metrics`, `AzureServices`, or `None`. :param Sequence[str] ip_rules: List of public IP or IP ranges in CIDR Format. Only IPV4 addresses are allowed. Private IP address ranges (as defined in [RFC 1918](https://tools.ietf.org/html/rfc1918#section-3)) are not allowed. :param Sequence['AccountNetworkRulesPrivateLinkAccessArgs'] private_link_accesses: One or More `private_link_access` block as defined below. :param Sequence[str] virtual_network_subnet_ids: A list of resource ids for subnets. """ pulumi.set(__self__, "default_action", default_action) if bypasses is not None: pulumi.set(__self__, "bypasses", bypasses) if ip_rules is not None: pulumi.set(__self__, "ip_rules", ip_rules) if private_link_accesses is not None: pulumi.set(__self__, "private_link_accesses", private_link_accesses) if virtual_network_subnet_ids is not None: pulumi.set(__self__, "virtual_network_subnet_ids", virtual_network_subnet_ids) @property @pulumi.getter(name="defaultAction") def default_action(self) -> str: """ Specifies the default action of allow or deny when no other rules match. Valid options are `Deny` or `Allow`. """ return pulumi.get(self, "default_action") @property @pulumi.getter def bypasses(self) -> Optional[Sequence[str]]: """ Specifies whether traffic is bypassed for Logging/Metrics/AzureServices. Valid options are any combination of `Logging`, `Metrics`, `AzureServices`, or `None`. """ return pulumi.get(self, "bypasses") @property @pulumi.getter(name="ipRules") def ip_rules(self) -> Optional[Sequence[str]]: """ List of public IP or IP ranges in CIDR Format. Only IPV4 addresses are allowed. Private IP address ranges (as defined in [RFC 1918](https://tools.ietf.org/html/rfc1918#section-3)) are not allowed. """ return pulumi.get(self, "ip_rules") @property @pulumi.getter(name="privateLinkAccesses") def private_link_accesses(self) -> Optional[Sequence['outputs.AccountNetworkRulesPrivateLinkAccess']]: """ One or More `private_link_access` block as defined below. """ return pulumi.get(self, "private_link_accesses") @property @pulumi.getter(name="virtualNetworkSubnetIds") def virtual_network_subnet_ids(self) -> Optional[Sequence[str]]: """ A list of resource ids for subnets. """ return pulumi.get(self, "virtual_network_subnet_ids") @pulumi.output_type class AccountNetworkRulesPrivateLinkAccess(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "endpointResourceId": suggest = "endpoint_resource_id" elif key == "endpointTenantId": suggest = "endpoint_tenant_id" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountNetworkRulesPrivateLinkAccess. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountNetworkRulesPrivateLinkAccess.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountNetworkRulesPrivateLinkAccess.__key_warning(key) return super().get(key, default) def __init__(__self__, *, endpoint_resource_id: str, endpoint_tenant_id: Optional[str] = None): """ :param str endpoint_resource_id: The resource id of the resource access rule to be granted access. :param str endpoint_tenant_id: The tenant id of the resource of the resource access rule to be granted access. Defaults to the current tenant id. """ pulumi.set(__self__, "endpoint_resource_id", endpoint_resource_id) if endpoint_tenant_id is not None: pulumi.set(__self__, "endpoint_tenant_id", endpoint_tenant_id) @property @pulumi.getter(name="endpointResourceId") def endpoint_resource_id(self) -> str: """ The resource id of the resource access rule to be granted access. """ return pulumi.get(self, "endpoint_resource_id") @property @pulumi.getter(name="endpointTenantId") def endpoint_tenant_id(self) -> Optional[str]: """ The tenant id of the resource of the resource access rule to be granted access. Defaults to the current tenant id. """ return pulumi.get(self, "endpoint_tenant_id") @pulumi.output_type class AccountNetworkRulesPrivateLinkAccessRule(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "endpointResourceId": suggest = "endpoint_resource_id" elif key == "endpointTenantId": suggest = "endpoint_tenant_id" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountNetworkRulesPrivateLinkAccessRule. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountNetworkRulesPrivateLinkAccessRule.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountNetworkRulesPrivateLinkAccessRule.__key_warning(key) return super().get(key, default) def __init__(__self__, *, endpoint_resource_id: str, endpoint_tenant_id: Optional[str] = None): """ :param str endpoint_resource_id: The resource id of the resource access rule to be granted access. :param str endpoint_tenant_id: The tenant id of the resource of the resource access rule to be granted access. Defaults to the current tenant id. """ pulumi.set(__self__, "endpoint_resource_id", endpoint_resource_id) if endpoint_tenant_id is not None: pulumi.set(__self__, "endpoint_tenant_id", endpoint_tenant_id) @property @pulumi.getter(name="endpointResourceId") def endpoint_resource_id(self) -> str: """ The resource id of the resource access rule to be granted access. """ return pulumi.get(self, "endpoint_resource_id") @property @pulumi.getter(name="endpointTenantId") def endpoint_tenant_id(self) -> Optional[str]: """ The tenant id of the resource of the resource access rule to be granted access. Defaults to the current tenant id. """ return pulumi.get(self, "endpoint_tenant_id") @pulumi.output_type class AccountQueueProperties(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "corsRules": suggest = "cors_rules" elif key == "hourMetrics": suggest = "hour_metrics" elif key == "minuteMetrics": suggest = "minute_metrics" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountQueueProperties. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountQueueProperties.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountQueueProperties.__key_warning(key) return super().get(key, default) def __init__(__self__, *, cors_rules: Optional[Sequence['outputs.AccountQueuePropertiesCorsRule']] = None, hour_metrics: Optional['outputs.AccountQueuePropertiesHourMetrics'] = None, logging: Optional['outputs.AccountQueuePropertiesLogging'] = None, minute_metrics: Optional['outputs.AccountQueuePropertiesMinuteMetrics'] = None): """ :param Sequence['AccountQueuePropertiesCorsRuleArgs'] cors_rules: A `cors_rule` block as defined above. :param 'AccountQueuePropertiesHourMetricsArgs' hour_metrics: A `hour_metrics` block as defined below. :param 'AccountQueuePropertiesLoggingArgs' logging: A `logging` block as defined below. :param 'AccountQueuePropertiesMinuteMetricsArgs' minute_metrics: A `minute_metrics` block as defined below. """ if cors_rules is not None: pulumi.set(__self__, "cors_rules", cors_rules) if hour_metrics is not None: pulumi.set(__self__, "hour_metrics", hour_metrics) if logging is not None: pulumi.set(__self__, "logging", logging) if minute_metrics is not None: pulumi.set(__self__, "minute_metrics", minute_metrics) @property @pulumi.getter(name="corsRules") def cors_rules(self) -> Optional[Sequence['outputs.AccountQueuePropertiesCorsRule']]: """ A `cors_rule` block as defined above. """ return pulumi.get(self, "cors_rules") @property @pulumi.getter(name="hourMetrics") def hour_metrics(self) -> Optional['outputs.AccountQueuePropertiesHourMetrics']: """ A `hour_metrics` block as defined below. """ return pulumi.get(self, "hour_metrics") @property @pulumi.getter def logging(self) -> Optional['outputs.AccountQueuePropertiesLogging']: """ A `logging` block as defined below. """ return pulumi.get(self, "logging") @property @pulumi.getter(name="minuteMetrics") def minute_metrics(self) -> Optional['outputs.AccountQueuePropertiesMinuteMetrics']: """ A `minute_metrics` block as defined below. """ return pulumi.get(self, "minute_metrics") @pulumi.output_type class AccountQueuePropertiesCorsRule(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "allowedHeaders": suggest = "allowed_headers" elif key == "allowedMethods": suggest = "allowed_methods" elif key == "allowedOrigins": suggest = "allowed_origins" elif key == "exposedHeaders": suggest = "exposed_headers" elif key == "maxAgeInSeconds": suggest = "max_age_in_seconds" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountQueuePropertiesCorsRule. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountQueuePropertiesCorsRule.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountQueuePropertiesCorsRule.__key_warning(key) return super().get(key, default) def __init__(__self__, *, allowed_headers: Sequence[str], allowed_methods: Sequence[str], allowed_origins: Sequence[str], exposed_headers: Sequence[str], max_age_in_seconds: int): """ :param Sequence[str] allowed_headers: A list of headers that are allowed to be a part of the cross-origin request. :param Sequence[str] allowed_methods: A list of http methods that are allowed to be executed by the origin. Valid options are `DELETE`, `GET`, `HEAD`, `MERGE`, `POST`, `OPTIONS`, `PUT` or `PATCH`. :param Sequence[str] allowed_origins: A list of origin domains that will be allowed by CORS. :param Sequence[str] exposed_headers: A list of response headers that are exposed to CORS clients. :param int max_age_in_seconds: The number of seconds the client should cache a preflight response. """ pulumi.set(__self__, "allowed_headers", allowed_headers) pulumi.set(__self__, "allowed_methods", allowed_methods) pulumi.set(__self__, "allowed_origins", allowed_origins) pulumi.set(__self__, "exposed_headers", exposed_headers) pulumi.set(__self__, "max_age_in_seconds", max_age_in_seconds) @property @pulumi.getter(name="allowedHeaders") def allowed_headers(self) -> Sequence[str]: """ A list of headers that are allowed to be a part of the cross-origin request. """ return pulumi.get(self, "allowed_headers") @property @pulumi.getter(name="allowedMethods") def allowed_methods(self) -> Sequence[str]: """ A list of http methods that are allowed to be executed by the origin. Valid options are `DELETE`, `GET`, `HEAD`, `MERGE`, `POST`, `OPTIONS`, `PUT` or `PATCH`. """ return pulumi.get(self, "allowed_methods") @property @pulumi.getter(name="allowedOrigins") def allowed_origins(self) -> Sequence[str]: """ A list of origin domains that will be allowed by CORS. """ return pulumi.get(self, "allowed_origins") @property @pulumi.getter(name="exposedHeaders") def exposed_headers(self) -> Sequence[str]: """ A list of response headers that are exposed to CORS clients. """ return pulumi.get(self, "exposed_headers") @property @pulumi.getter(name="maxAgeInSeconds") def max_age_in_seconds(self) -> int: """ The number of seconds the client should cache a preflight response. """ return pulumi.get(self, "max_age_in_seconds") @pulumi.output_type class AccountQueuePropertiesHourMetrics(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "includeApis": suggest = "include_apis" elif key == "retentionPolicyDays": suggest = "retention_policy_days" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountQueuePropertiesHourMetrics. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountQueuePropertiesHourMetrics.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountQueuePropertiesHourMetrics.__key_warning(key) return super().get(key, default) def __init__(__self__, *, enabled: bool, version: str, include_apis: Optional[bool] = None, retention_policy_days: Optional[int] = None): """ :param bool enabled: Indicates whether hour metrics are enabled for the Queue service. Changing this forces a new resource. :param str version: The version of storage analytics to configure. Changing this forces a new resource. :param bool include_apis: Indicates whether metrics should generate summary statistics for called API operations. :param int retention_policy_days: Specifies the number of days that logs will be retained. Changing this forces a new resource. """ pulumi.set(__self__, "enabled", enabled) pulumi.set(__self__, "version", version) if include_apis is not None: pulumi.set(__self__, "include_apis", include_apis) if retention_policy_days is not None: pulumi.set(__self__, "retention_policy_days", retention_policy_days) @property @pulumi.getter def enabled(self) -> bool: """ Indicates whether hour metrics are enabled for the Queue service. Changing this forces a new resource. """ return pulumi.get(self, "enabled") @property @pulumi.getter def version(self) -> str: """ The version of storage analytics to configure. Changing this forces a new resource. """ return pulumi.get(self, "version") @property @pulumi.getter(name="includeApis") def include_apis(self) -> Optional[bool]: """ Indicates whether metrics should generate summary statistics for called API operations. """ return pulumi.get(self, "include_apis") @property @pulumi.getter(name="retentionPolicyDays") def retention_policy_days(self) -> Optional[int]: """ Specifies the number of days that logs will be retained. Changing this forces a new resource. """ return pulumi.get(self, "retention_policy_days") @pulumi.output_type class AccountQueuePropertiesLogging(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "retentionPolicyDays": suggest = "retention_policy_days" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountQueuePropertiesLogging. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountQueuePropertiesLogging.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountQueuePropertiesLogging.__key_warning(key) return super().get(key, default) def __init__(__self__, *, delete: bool, read: bool, version: str, write: bool, retention_policy_days: Optional[int] = None): """ :param bool delete: Indicates whether all delete requests should be logged. Changing this forces a new resource. :param bool read: Indicates whether all read requests should be logged. Changing this forces a new resource. :param str version: The version of storage analytics to configure. Changing this forces a new resource. :param bool write: Indicates whether all write requests should be logged. Changing this forces a new resource. :param int retention_policy_days: Specifies the number of days that logs will be retained. Changing this forces a new resource. """ pulumi.set(__self__, "delete", delete) pulumi.set(__self__, "read", read) pulumi.set(__self__, "version", version) pulumi.set(__self__, "write", write) if retention_policy_days is not None: pulumi.set(__self__, "retention_policy_days", retention_policy_days) @property @pulumi.getter def delete(self) -> bool: """ Indicates whether all delete requests should be logged. Changing this forces a new resource. """ return pulumi.get(self, "delete") @property @pulumi.getter def read(self) -> bool: """ Indicates whether all read requests should be logged. Changing this forces a new resource. """ return pulumi.get(self, "read") @property @pulumi.getter def version(self) -> str: """ The version of storage analytics to configure. Changing this forces a new resource. """ return pulumi.get(self, "version") @property @pulumi.getter def write(self) -> bool: """ Indicates whether all write requests should be logged. Changing this forces a new resource. """ return pulumi.get(self, "write") @property @pulumi.getter(name="retentionPolicyDays") def retention_policy_days(self) -> Optional[int]: """ Specifies the number of days that logs will be retained. Changing this forces a new resource. """ return pulumi.get(self, "retention_policy_days") @pulumi.output_type class AccountQueuePropertiesMinuteMetrics(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "includeApis": suggest = "include_apis" elif key == "retentionPolicyDays": suggest = "retention_policy_days" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountQueuePropertiesMinuteMetrics. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountQueuePropertiesMinuteMetrics.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountQueuePropertiesMinuteMetrics.__key_warning(key) return super().get(key, default) def __init__(__self__, *, enabled: bool, version: str, include_apis: Optional[bool] = None, retention_policy_days: Optional[int] = None): """ :param bool enabled: Indicates whether minute metrics are enabled for the Queue service. Changing this forces a new resource. :param str version: The version of storage analytics to configure. Changing this forces a new resource. :param bool include_apis: Indicates whether metrics should generate summary statistics for called API operations. :param int retention_policy_days: Specifies the number of days that logs will be retained. Changing this forces a new resource. """ pulumi.set(__self__, "enabled", enabled) pulumi.set(__self__, "version", version) if include_apis is not None: pulumi.set(__self__, "include_apis", include_apis) if retention_policy_days is not None: pulumi.set(__self__, "retention_policy_days", retention_policy_days) @property @pulumi.getter def enabled(self) -> bool: """ Indicates whether minute metrics are enabled for the Queue service. Changing this forces a new resource. """ return pulumi.get(self, "enabled") @property @pulumi.getter def version(self) -> str: """ The version of storage analytics to configure. Changing this forces a new resource. """ return pulumi.get(self, "version") @property @pulumi.getter(name="includeApis") def include_apis(self) -> Optional[bool]: """ Indicates whether metrics should generate summary statistics for called API operations. """ return pulumi.get(self, "include_apis") @property @pulumi.getter(name="retentionPolicyDays") def retention_policy_days(self) -> Optional[int]: """ Specifies the number of days that logs will be retained. Changing this forces a new resource. """ return pulumi.get(self, "retention_policy_days") @pulumi.output_type class AccountRouting(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "publishInternetEndpoints": suggest = "publish_internet_endpoints" elif key == "publishMicrosoftEndpoints": suggest = "publish_microsoft_endpoints" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountRouting. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountRouting.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountRouting.__key_warning(key) return super().get(key, default) def __init__(__self__, *, choice: Optional[str] = None, publish_internet_endpoints: Optional[bool] = None, publish_microsoft_endpoints: Optional[bool] = None): """ :param str choice: Specifies the kind of network routing opted by the user. Possible values are `InternetRouting` and `MicrosoftRouting`. Defaults to `MicrosoftRouting`. :param bool publish_internet_endpoints: Should internet routing storage endpoints be published? Defaults to `false`. :param bool publish_microsoft_endpoints: Should microsoft routing storage endpoints be published? Defaults to `false`. """ if choice is not None: pulumi.set(__self__, "choice", choice) if publish_internet_endpoints is not None: pulumi.set(__self__, "publish_internet_endpoints", publish_internet_endpoints) if publish_microsoft_endpoints is not None: pulumi.set(__self__, "publish_microsoft_endpoints", publish_microsoft_endpoints) @property @pulumi.getter def choice(self) -> Optional[str]: """ Specifies the kind of network routing opted by the user. Possible values are `InternetRouting` and `MicrosoftRouting`. Defaults to `MicrosoftRouting`. """ return pulumi.get(self, "choice") @property @pulumi.getter(name="publishInternetEndpoints") def publish_internet_endpoints(self) -> Optional[bool]: """ Should internet routing storage endpoints be published? Defaults to `false`. """ return pulumi.get(self, "publish_internet_endpoints") @property @pulumi.getter(name="publishMicrosoftEndpoints") def publish_microsoft_endpoints(self) -> Optional[bool]: """ Should microsoft routing storage endpoints be published? Defaults to `false`. """ return pulumi.get(self, "publish_microsoft_endpoints") @pulumi.output_type class AccountShareProperties(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "corsRules": suggest = "cors_rules" elif key == "retentionPolicy": suggest = "retention_policy" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountShareProperties. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountShareProperties.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountShareProperties.__key_warning(key) return super().get(key, default) def __init__(__self__, *, cors_rules: Optional[Sequence['outputs.AccountSharePropertiesCorsRule']] = None, retention_policy: Optional['outputs.AccountSharePropertiesRetentionPolicy'] = None, smb: Optional['outputs.AccountSharePropertiesSmb'] = None): """ :param Sequence['AccountSharePropertiesCorsRuleArgs'] cors_rules: A `cors_rule` block as defined below. :param 'AccountSharePropertiesRetentionPolicyArgs' retention_policy: A `retention_policy` block as defined below. :param 'AccountSharePropertiesSmbArgs' smb: A `smb` block as defined below. """ if cors_rules is not None: pulumi.set(__self__, "cors_rules", cors_rules) if retention_policy is not None: pulumi.set(__self__, "retention_policy", retention_policy) if smb is not None: pulumi.set(__self__, "smb", smb) @property @pulumi.getter(name="corsRules") def cors_rules(self) -> Optional[Sequence['outputs.AccountSharePropertiesCorsRule']]: """ A `cors_rule` block as defined below. """ return pulumi.get(self, "cors_rules") @property @pulumi.getter(name="retentionPolicy") def retention_policy(self) -> Optional['outputs.AccountSharePropertiesRetentionPolicy']: """ A `retention_policy` block as defined below. """ return pulumi.get(self, "retention_policy") @property @pulumi.getter def smb(self) -> Optional['outputs.AccountSharePropertiesSmb']: """ A `smb` block as defined below. """ return pulumi.get(self, "smb") @pulumi.output_type class AccountSharePropertiesCorsRule(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "allowedHeaders": suggest = "allowed_headers" elif key == "allowedMethods": suggest = "allowed_methods" elif key == "allowedOrigins": suggest = "allowed_origins" elif key == "exposedHeaders": suggest = "exposed_headers" elif key == "maxAgeInSeconds": suggest = "max_age_in_seconds" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountSharePropertiesCorsRule. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountSharePropertiesCorsRule.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountSharePropertiesCorsRule.__key_warning(key) return super().get(key, default) def __init__(__self__, *, allowed_headers: Sequence[str], allowed_methods: Sequence[str], allowed_origins: Sequence[str], exposed_headers: Sequence[str], max_age_in_seconds: int): """ :param Sequence[str] allowed_headers: A list of headers that are allowed to be a part of the cross-origin request. :param Sequence[str] allowed_methods: A list of http methods that are allowed to be executed by the origin. Valid options are `DELETE`, `GET`, `HEAD`, `MERGE`, `POST`, `OPTIONS`, `PUT` or `PATCH`. :param Sequence[str] allowed_origins: A list of origin domains that will be allowed by CORS. :param Sequence[str] exposed_headers: A list of response headers that are exposed to CORS clients. :param int max_age_in_seconds: The number of seconds the client should cache a preflight response. """ pulumi.set(__self__, "allowed_headers", allowed_headers) pulumi.set(__self__, "allowed_methods", allowed_methods) pulumi.set(__self__, "allowed_origins", allowed_origins) pulumi.set(__self__, "exposed_headers", exposed_headers) pulumi.set(__self__, "max_age_in_seconds", max_age_in_seconds) @property @pulumi.getter(name="allowedHeaders") def allowed_headers(self) -> Sequence[str]: """ A list of headers that are allowed to be a part of the cross-origin request. """ return pulumi.get(self, "allowed_headers") @property @pulumi.getter(name="allowedMethods") def allowed_methods(self) -> Sequence[str]: """ A list of http methods that are allowed to be executed by the origin. Valid options are `DELETE`, `GET`, `HEAD`, `MERGE`, `POST`, `OPTIONS`, `PUT` or `PATCH`. """ return pulumi.get(self, "allowed_methods") @property @pulumi.getter(name="allowedOrigins") def allowed_origins(self) -> Sequence[str]: """ A list of origin domains that will be allowed by CORS. """ return pulumi.get(self, "allowed_origins") @property @pulumi.getter(name="exposedHeaders") def exposed_headers(self) -> Sequence[str]: """ A list of response headers that are exposed to CORS clients. """ return pulumi.get(self, "exposed_headers") @property @pulumi.getter(name="maxAgeInSeconds") def max_age_in_seconds(self) -> int: """ The number of seconds the client should cache a preflight response. """ return pulumi.get(self, "max_age_in_seconds") @pulumi.output_type class AccountSharePropertiesRetentionPolicy(dict): def __init__(__self__, *, days: Optional[int] = None): """ :param int days: Specifies the number of days that the `storage.Share` should be retained, between `1` and `365` days. Defaults to `7`. """ if days is not None: pulumi.set(__self__, "days", days) @property @pulumi.getter def days(self) -> Optional[int]: """ Specifies the number of days that the `storage.Share` should be retained, between `1` and `365` days. Defaults to `7`. """ return pulumi.get(self, "days") @pulumi.output_type class AccountSharePropertiesSmb(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "authenticationTypes": suggest = "authentication_types" elif key == "channelEncryptionTypes": suggest = "channel_encryption_types" elif key == "kerberosTicketEncryptionTypes": suggest = "kerberos_ticket_encryption_types" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountSharePropertiesSmb. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountSharePropertiesSmb.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountSharePropertiesSmb.__key_warning(key) return super().get(key, default) def __init__(__self__, *, authentication_types: Optional[Sequence[str]] = None, channel_encryption_types: Optional[Sequence[str]] = None, kerberos_ticket_encryption_types: Optional[Sequence[str]] = None, versions: Optional[Sequence[str]] = None): """ :param Sequence[str] authentication_types: A set of SMB authentication methods. Possible values are `NTLMv2`, and `Kerberos`. :param Sequence[str] channel_encryption_types: A set of SMB channel encryption. Possible values are `AES-128-CCM`, `AES-128-GCM`, and `AES-256-GCM`. :param Sequence[str] kerberos_ticket_encryption_types: A set of Kerberos ticket encryption. Possible values are `RC4-HMAC`, and `AES-256`. :param Sequence[str] versions: A set of SMB protocol versions. Possible values are `SMB2.1`, `SMB3.0`, and `SMB3.1.1`. """ if authentication_types is not None: pulumi.set(__self__, "authentication_types", authentication_types) if channel_encryption_types is not None: pulumi.set(__self__, "channel_encryption_types", channel_encryption_types) if kerberos_ticket_encryption_types is not None: pulumi.set(__self__, "kerberos_ticket_encryption_types", kerberos_ticket_encryption_types) if versions is not None: pulumi.set(__self__, "versions", versions) @property @pulumi.getter(name="authenticationTypes") def authentication_types(self) -> Optional[Sequence[str]]: """ A set of SMB authentication methods. Possible values are `NTLMv2`, and `Kerberos`. """ return pulumi.get(self, "authentication_types") @property @pulumi.getter(name="channelEncryptionTypes") def channel_encryption_types(self) -> Optional[Sequence[str]]: """ A set of SMB channel encryption. Possible values are `AES-128-CCM`, `AES-128-GCM`, and `AES-256-GCM`. """ return pulumi.get(self, "channel_encryption_types") @property @pulumi.getter(name="kerberosTicketEncryptionTypes") def kerberos_ticket_encryption_types(self) -> Optional[Sequence[str]]: """ A set of Kerberos ticket encryption. Possible values are `RC4-HMAC`, and `AES-256`. """ return pulumi.get(self, "kerberos_ticket_encryption_types") @property @pulumi.getter def versions(self) -> Optional[Sequence[str]]: """ A set of SMB protocol versions. Possible values are `SMB2.1`, `SMB3.0`, and `SMB3.1.1`. """ return pulumi.get(self, "versions") @pulumi.output_type class AccountStaticWebsite(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "error404Document": suggest = "error404_document" elif key == "indexDocument": suggest = "index_document" if suggest: pulumi.log.warn(f"Key '{key}' not found in AccountStaticWebsite. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AccountStaticWebsite.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AccountStaticWebsite.__key_warning(key) return super().get(key, default) def __init__(__self__, *, error404_document: Optional[str] = None, index_document: Optional[str] = None): """ :param str error404_document: The absolute path to a custom webpage that should be used when a request is made which does not correspond to an existing file. :param str index_document: The webpage that Azure Storage serves for requests to the root of a website or any subfolder. For example, index.html. The value is case-sensitive. """ if error404_document is not None: pulumi.set(__self__, "error404_document", error404_document) if index_document is not None: pulumi.set(__self__, "index_document", index_document) @property @pulumi.getter(name="error404Document") def error404_document(self) -> Optional[str]: """ The absolute path to a custom webpage that should be used when a request is made which does not correspond to an existing file. """ return pulumi.get(self, "error404_document") @property @pulumi.getter(name="indexDocument") def index_document(self) -> Optional[str]: """ The webpage that Azure Storage serves for requests to the root of a website or any subfolder. For example, index.html. The value is case-sensitive. """ return pulumi.get(self, "index_document") @pulumi.output_type class BlobInventoryPolicyRule(dict): def __init__(__self__, *, filter: 'outputs.BlobInventoryPolicyRuleFilter', name: str): """ :param 'BlobInventoryPolicyRuleFilterArgs' filter: A `filter` block as defined above. :param str name: The name which should be used for this Blob Inventory Policy Rule. """ pulumi.set(__self__, "filter", filter) pulumi.set(__self__, "name", name) @property @pulumi.getter def filter(self) -> 'outputs.BlobInventoryPolicyRuleFilter': """ A `filter` block as defined above. """ return pulumi.get(self, "filter") @property @pulumi.getter def name(self) -> str: """ The name which should be used for this Blob Inventory Policy Rule. """ return pulumi.get(self, "name") @pulumi.output_type class BlobInventoryPolicyRuleFilter(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "blobTypes": suggest = "blob_types" elif key == "includeBlobVersions": suggest = "include_blob_versions" elif key == "includeSnapshots": suggest = "include_snapshots" elif key == "prefixMatches": suggest = "prefix_matches" if suggest: pulumi.log.warn(f"Key '{key}' not found in BlobInventoryPolicyRuleFilter. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: BlobInventoryPolicyRuleFilter.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: BlobInventoryPolicyRuleFilter.__key_warning(key) return super().get(key, default) def __init__(__self__, *, blob_types: Sequence[str], include_blob_versions: Optional[bool] = None, include_snapshots: Optional[bool] = None, prefix_matches: Optional[Sequence[str]] = None): """ :param Sequence[str] blob_types: A set of blob types. Possible values are `blockBlob`, `appendBlob`, and `pageBlob`. The storage account with `is_hns_enabled` is `true` doesn't support `pageBlob`. :param bool include_blob_versions: Includes blob versions in blob inventory or not? Defaults to `false`. :param bool include_snapshots: Includes blob snapshots in blob inventory or not? Defaults to `false`. :param Sequence[str] prefix_matches: A set of strings for blob prefixes to be matched. """ pulumi.set(__self__, "blob_types", blob_types) if include_blob_versions is not None: pulumi.set(__self__, "include_blob_versions", include_blob_versions) if include_snapshots is not None: pulumi.set(__self__, "include_snapshots", include_snapshots) if prefix_matches is not None: pulumi.set(__self__, "prefix_matches", prefix_matches) @property @pulumi.getter(name="blobTypes") def blob_types(self) -> Sequence[str]: """ A set of blob types. Possible values are `blockBlob`, `appendBlob`, and `pageBlob`. The storage account with `is_hns_enabled` is `true` doesn't support `pageBlob`. """ return pulumi.get(self, "blob_types") @property @pulumi.getter(name="includeBlobVersions") def include_blob_versions(self) -> Optional[bool]: """ Includes blob versions in blob inventory or not? Defaults to `false`. """ return pulumi.get(self, "include_blob_versions") @property @pulumi.getter(name="includeSnapshots") def include_snapshots(self) -> Optional[bool]: """ Includes blob snapshots in blob inventory or not? Defaults to `false`. """ return pulumi.get(self, "include_snapshots") @property @pulumi.getter(name="prefixMatches") def prefix_matches(self) -> Optional[Sequence[str]]: """ A set of strings for blob prefixes to be matched. """ return pulumi.get(self, "prefix_matches") @pulumi.output_type class DataLakeGen2FilesystemAce(dict): def __init__(__self__, *, permissions: str, type: str, id: Optional[str] = None, scope: Optional[str] = None): """ :param str permissions: Specifies the permissions for the entry in `rwx` form. For example, `rwx` gives full permissions but `r--` only gives read permissions. :param str type: Specifies the type of entry. Can be `user`, `group`, `mask` or `other`. :param str id: Specifies the Object ID of the Azure Active Directory User or Group that the entry relates to. Only valid for `user` or `group` entries. :param str scope: Specifies whether the ACE represents an `access` entry or a `default` entry. Default value is `access`. """ pulumi.set(__self__, "permissions", permissions) pulumi.set(__self__, "type", type) if id is not None: pulumi.set(__self__, "id", id) if scope is not None: pulumi.set(__self__, "scope", scope) @property @pulumi.getter def permissions(self) -> str: """ Specifies the permissions for the entry in `rwx` form. For example, `rwx` gives full permissions but `r--` only gives read permissions. """ return pulumi.get(self, "permissions") @property @pulumi.getter def type(self) -> str: """ Specifies the type of entry. Can be `user`, `group`, `mask` or `other`. """ return pulumi.get(self, "type") @property @pulumi.getter def id(self) -> Optional[str]: """ Specifies the Object ID of the Azure Active Directory User or Group that the entry relates to. Only valid for `user` or `group` entries. """ return pulumi.get(self, "id") @property @pulumi.getter def scope(self) -> Optional[str]: """ Specifies whether the ACE represents an `access` entry or a `default` entry. Default value is `access`. """ return pulumi.get(self, "scope") @pulumi.output_type class DataLakeGen2PathAce(dict): def __init__(__self__, *, permissions: str, type: str, id: Optional[str] = None, scope: Optional[str] = None): """ :param str permissions: Specifies the permissions for the entry in `rwx` form. For example, `rwx` gives full permissions but `r--` only gives read permissions. :param str type: Specifies the type of entry. Can be `user`, `group`, `mask` or `other`. :param str id: Specifies the Object ID of the Azure Active Directory User or Group that the entry relates to. Only valid for `user` or `group` entries. :param str scope: Specifies whether the ACE represents an `access` entry or a `default` entry. Default value is `access`. """ pulumi.set(__self__, "permissions", permissions) pulumi.set(__self__, "type", type) if id is not None: pulumi.set(__self__, "id", id) if scope is not None: pulumi.set(__self__, "scope", scope) @property @pulumi.getter def permissions(self) -> str: """ Specifies the permissions for the entry in `rwx` form. For example, `rwx` gives full permissions but `r--` only gives read permissions. """ return pulumi.get(self, "permissions") @property @pulumi.getter def type(self) -> str: """ Specifies the type of entry. Can be `user`, `group`, `mask` or `other`. """ return pulumi.get(self, "type") @property @pulumi.getter def id(self) -> Optional[str]: """ Specifies the Object ID of the Azure Active Directory User or Group that the entry relates to. Only valid for `user` or `group` entries. """ return pulumi.get(self, "id") @property @pulumi.getter def scope(self) -> Optional[str]: """ Specifies whether the ACE represents an `access` entry or a `default` entry. Default value is `access`. """ return pulumi.get(self, "scope") @pulumi.output_type class ManagementPolicyRule(dict): def __init__(__self__, *, actions: 'outputs.ManagementPolicyRuleActions', enabled: bool, name: str, filters: Optional['outputs.ManagementPolicyRuleFilters'] = None): """ :param 'ManagementPolicyRuleActionsArgs' actions: An `actions` block as documented below. :param bool enabled: Boolean to specify whether the rule is enabled. :param str name: A rule name can contain any combination of alpha numeric characters. Rule name is case-sensitive. It must be unique within a policy. :param 'ManagementPolicyRuleFiltersArgs' filters: A `filter` block as documented below. """ pulumi.set(__self__, "actions", actions) pulumi.set(__self__, "enabled", enabled) pulumi.set(__self__, "name", name) if filters is not None: pulumi.set(__self__, "filters", filters) @property @pulumi.getter def actions(self) -> 'outputs.ManagementPolicyRuleActions': """ An `actions` block as documented below. """ return pulumi.get(self, "actions") @property @pulumi.getter def enabled(self) -> bool: """ Boolean to specify whether the rule is enabled. """ return pulumi.get(self, "enabled") @property @pulumi.getter def name(self) -> str: """ A rule name can contain any combination of alpha numeric characters. Rule name is case-sensitive. It must be unique within a policy. """ return pulumi.get(self, "name") @property @pulumi.getter def filters(self) -> Optional['outputs.ManagementPolicyRuleFilters']: """ A `filter` block as documented below. """ return pulumi.get(self, "filters") @pulumi.output_type class ManagementPolicyRuleActions(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "baseBlob": suggest = "base_blob" if suggest: pulumi.log.warn(f"Key '{key}' not found in ManagementPolicyRuleActions. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: ManagementPolicyRuleActions.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: ManagementPolicyRuleActions.__key_warning(key) return super().get(key, default) def __init__(__self__, *, base_blob: Optional['outputs.ManagementPolicyRuleActionsBaseBlob'] = None, snapshot: Optional['outputs.ManagementPolicyRuleActionsSnapshot'] = None, version: Optional['outputs.ManagementPolicyRuleActionsVersion'] = None): """ :param 'ManagementPolicyRuleActionsBaseBlobArgs' base_blob: A `base_blob` block as documented below. :param 'ManagementPolicyRuleActionsSnapshotArgs' snapshot: A `snapshot` block as documented below. :param 'ManagementPolicyRuleActionsVersionArgs' version: A `version` block as documented below. """ if base_blob is not None: pulumi.set(__self__, "base_blob", base_blob) if snapshot is not None: pulumi.set(__self__, "snapshot", snapshot) if version is not None: pulumi.set(__self__, "version", version) @property @pulumi.getter(name="baseBlob") def base_blob(self) -> Optional['outputs.ManagementPolicyRuleActionsBaseBlob']: """ A `base_blob` block as documented below. """ return pulumi.get(self, "base_blob") @property @pulumi.getter def snapshot(self) -> Optional['outputs.ManagementPolicyRuleActionsSnapshot']: """ A `snapshot` block as documented below. """ return pulumi.get(self, "snapshot") @property @pulumi.getter def version(self) -> Optional['outputs.ManagementPolicyRuleActionsVersion']: """ A `version` block as documented below. """ return pulumi.get(self, "version") @pulumi.output_type class ManagementPolicyRuleActionsBaseBlob(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "deleteAfterDaysSinceLastAccessTimeGreaterThan": suggest = "delete_after_days_since_last_access_time_greater_than" elif key == "deleteAfterDaysSinceModificationGreaterThan": suggest = "delete_after_days_since_modification_greater_than" elif key == "tierToArchiveAfterDaysSinceLastAccessTimeGreaterThan": suggest = "tier_to_archive_after_days_since_last_access_time_greater_than" elif key == "tierToArchiveAfterDaysSinceModificationGreaterThan": suggest = "tier_to_archive_after_days_since_modification_greater_than" elif key == "tierToCoolAfterDaysSinceLastAccessTimeGreaterThan": suggest = "tier_to_cool_after_days_since_last_access_time_greater_than" elif key == "tierToCoolAfterDaysSinceModificationGreaterThan": suggest = "tier_to_cool_after_days_since_modification_greater_than" if suggest: pulumi.log.warn(f"Key '{key}' not found in ManagementPolicyRuleActionsBaseBlob. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: ManagementPolicyRuleActionsBaseBlob.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: ManagementPolicyRuleActionsBaseBlob.__key_warning(key) return super().get(key, default) def __init__(__self__, *, delete_after_days_since_last_access_time_greater_than: Optional[int] = None, delete_after_days_since_modification_greater_than: Optional[int] = None, tier_to_archive_after_days_since_last_access_time_greater_than: Optional[int] = None, tier_to_archive_after_days_since_modification_greater_than: Optional[int] = None, tier_to_cool_after_days_since_last_access_time_greater_than: Optional[int] = None, tier_to_cool_after_days_since_modification_greater_than: Optional[int] = None): """ :param int delete_after_days_since_last_access_time_greater_than: The age in days after last access time to delete the blob. Must be between `0` and `99999`. :param int delete_after_days_since_modification_greater_than: The age in days after last modification to delete the blob. Must be between 0 and 99999. :param int tier_to_archive_after_days_since_last_access_time_greater_than: The age in days after last access time to tier blobs to archive storage. Supports blob currently at Hot or Cool tier. Must be between `0 and `99999`. :param int tier_to_archive_after_days_since_modification_greater_than: The age in days after last modification to tier blobs to archive storage. Supports blob currently at Hot or Cool tier. Must be between 0 and 99999. :param int tier_to_cool_after_days_since_last_access_time_greater_than: The age in days after last access time to tier blobs to cool storage. Supports blob currently at Hot tier. Must be between `0` and `99999`. :param int tier_to_cool_after_days_since_modification_greater_than: The age in days after last modification to tier blobs to cool storage. Supports blob currently at Hot tier. Must be between 0 and 99999. """ if delete_after_days_since_last_access_time_greater_than is not None: pulumi.set(__self__, "delete_after_days_since_last_access_time_greater_than", delete_after_days_since_last_access_time_greater_than) if delete_after_days_since_modification_greater_than is not None: pulumi.set(__self__, "delete_after_days_since_modification_greater_than", delete_after_days_since_modification_greater_than) if tier_to_archive_after_days_since_last_access_time_greater_than is not None: pulumi.set(__self__, "tier_to_archive_after_days_since_last_access_time_greater_than", tier_to_archive_after_days_since_last_access_time_greater_than) if tier_to_archive_after_days_since_modification_greater_than is not None: pulumi.set(__self__, "tier_to_archive_after_days_since_modification_greater_than", tier_to_archive_after_days_since_modification_greater_than) if tier_to_cool_after_days_since_last_access_time_greater_than is not None: pulumi.set(__self__, "tier_to_cool_after_days_since_last_access_time_greater_than", tier_to_cool_after_days_since_last_access_time_greater_than) if tier_to_cool_after_days_since_modification_greater_than is not None: pulumi.set(__self__, "tier_to_cool_after_days_since_modification_greater_than", tier_to_cool_after_days_since_modification_greater_than) @property @pulumi.getter(name="deleteAfterDaysSinceLastAccessTimeGreaterThan") def delete_after_days_since_last_access_time_greater_than(self) -> Optional[int]: """ The age in days after last access time to delete the blob. Must be between `0` and `99999`. """ return pulumi.get(self, "delete_after_days_since_last_access_time_greater_than") @property @pulumi.getter(name="deleteAfterDaysSinceModificationGreaterThan") def delete_after_days_since_modification_greater_than(self) -> Optional[int]: """ The age in days after last modification to delete the blob. Must be between 0 and 99999. """ return pulumi.get(self, "delete_after_days_since_modification_greater_than") @property @pulumi.getter(name="tierToArchiveAfterDaysSinceLastAccessTimeGreaterThan") def tier_to_archive_after_days_since_last_access_time_greater_than(self) -> Optional[int]: """ The age in days after last access time to tier blobs to archive storage. Supports blob currently at Hot or Cool tier. Must be between `0 and `99999`. """ return pulumi.get(self, "tier_to_archive_after_days_since_last_access_time_greater_than") @property @pulumi.getter(name="tierToArchiveAfterDaysSinceModificationGreaterThan") def tier_to_archive_after_days_since_modification_greater_than(self) -> Optional[int]: """ The age in days after last modification to tier blobs to archive storage. Supports blob currently at Hot or Cool tier. Must be between 0 and 99999. """ return pulumi.get(self, "tier_to_archive_after_days_since_modification_greater_than") @property @pulumi.getter(name="tierToCoolAfterDaysSinceLastAccessTimeGreaterThan") def tier_to_cool_after_days_since_last_access_time_greater_than(self) -> Optional[int]: """ The age in days after last access time to tier blobs to cool storage. Supports blob currently at Hot tier. Must be between `0` and `99999`. """ return pulumi.get(self, "tier_to_cool_after_days_since_last_access_time_greater_than") @property @pulumi.getter(name="tierToCoolAfterDaysSinceModificationGreaterThan") def tier_to_cool_after_days_since_modification_greater_than(self) -> Optional[int]: """ The age in days after last modification to tier blobs to cool storage. Supports blob currently at Hot tier. Must be between 0 and 99999. """ return pulumi.get(self, "tier_to_cool_after_days_since_modification_greater_than") @pulumi.output_type class ManagementPolicyRuleActionsSnapshot(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "changeTierToArchiveAfterDaysSinceCreation": suggest = "change_tier_to_archive_after_days_since_creation" elif key == "changeTierToCoolAfterDaysSinceCreation": suggest = "change_tier_to_cool_after_days_since_creation" elif key == "deleteAfterDaysSinceCreationGreaterThan": suggest = "delete_after_days_since_creation_greater_than" if suggest: pulumi.log.warn(f"Key '{key}' not found in ManagementPolicyRuleActionsSnapshot. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: ManagementPolicyRuleActionsSnapshot.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: ManagementPolicyRuleActionsSnapshot.__key_warning(key) return super().get(key, default) def __init__(__self__, *, change_tier_to_archive_after_days_since_creation: Optional[int] = None, change_tier_to_cool_after_days_since_creation: Optional[int] = None, delete_after_days_since_creation_greater_than: Optional[int] = None): """ :param int change_tier_to_archive_after_days_since_creation: The age in days after creation to tier blob snapshot to archive storage. Must be between 0 and 99999. :param int change_tier_to_cool_after_days_since_creation: The age in days after creation to tier blob snapshot to cool storage. Must be between 0 and 99999. :param int delete_after_days_since_creation_greater_than: The age in days after creation to delete the blob snapshot. Must be between 0 and 99999. """ if change_tier_to_archive_after_days_since_creation is not None: pulumi.set(__self__, "change_tier_to_archive_after_days_since_creation", change_tier_to_archive_after_days_since_creation) if change_tier_to_cool_after_days_since_creation is not None: pulumi.set(__self__, "change_tier_to_cool_after_days_since_creation", change_tier_to_cool_after_days_since_creation) if delete_after_days_since_creation_greater_than is not None: pulumi.set(__self__, "delete_after_days_since_creation_greater_than", delete_after_days_since_creation_greater_than) @property @pulumi.getter(name="changeTierToArchiveAfterDaysSinceCreation") def change_tier_to_archive_after_days_since_creation(self) -> Optional[int]: """ The age in days after creation to tier blob snapshot to archive storage. Must be between 0 and 99999. """ return pulumi.get(self, "change_tier_to_archive_after_days_since_creation") @property @pulumi.getter(name="changeTierToCoolAfterDaysSinceCreation") def change_tier_to_cool_after_days_since_creation(self) -> Optional[int]: """ The age in days after creation to tier blob snapshot to cool storage. Must be between 0 and 99999. """ return pulumi.get(self, "change_tier_to_cool_after_days_since_creation") @property @pulumi.getter(name="deleteAfterDaysSinceCreationGreaterThan") def delete_after_days_since_creation_greater_than(self) -> Optional[int]: """ The age in days after creation to delete the blob snapshot. Must be between 0 and 99999. """ return pulumi.get(self, "delete_after_days_since_creation_greater_than") @pulumi.output_type class ManagementPolicyRuleActionsVersion(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "changeTierToArchiveAfterDaysSinceCreation": suggest = "change_tier_to_archive_after_days_since_creation" elif key == "changeTierToCoolAfterDaysSinceCreation": suggest = "change_tier_to_cool_after_days_since_creation" elif key == "deleteAfterDaysSinceCreation": suggest = "delete_after_days_since_creation" if suggest: pulumi.log.warn(f"Key '{key}' not found in ManagementPolicyRuleActionsVersion. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: ManagementPolicyRuleActionsVersion.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: ManagementPolicyRuleActionsVersion.__key_warning(key) return super().get(key, default) def __init__(__self__, *, change_tier_to_archive_after_days_since_creation: Optional[int] = None, change_tier_to_cool_after_days_since_creation: Optional[int] = None, delete_after_days_since_creation: Optional[int] = None): """ :param int change_tier_to_archive_after_days_since_creation: The age in days after creation to tier blob version to archive storage. Must be between 0 and 99999. :param int change_tier_to_cool_after_days_since_creation: The age in days creation create to tier blob version to cool storage. Must be between 0 and 99999. :param int delete_after_days_since_creation: The age in days after creation to delete the blob version. Must be between 0 and 99999. """ if change_tier_to_archive_after_days_since_creation is not None: pulumi.set(__self__, "change_tier_to_archive_after_days_since_creation", change_tier_to_archive_after_days_since_creation) if change_tier_to_cool_after_days_since_creation is not None: pulumi.set(__self__, "change_tier_to_cool_after_days_since_creation", change_tier_to_cool_after_days_since_creation) if delete_after_days_since_creation is not None: pulumi.set(__self__, "delete_after_days_since_creation", delete_after_days_since_creation) @property @pulumi.getter(name="changeTierToArchiveAfterDaysSinceCreation") def change_tier_to_archive_after_days_since_creation(self) -> Optional[int]: """ The age in days after creation to tier blob version to archive storage. Must be between 0 and 99999. """ return pulumi.get(self, "change_tier_to_archive_after_days_since_creation") @property @pulumi.getter(name="changeTierToCoolAfterDaysSinceCreation") def change_tier_to_cool_after_days_since_creation(self) -> Optional[int]: """ The age in days creation create to tier blob version to cool storage. Must be between 0 and 99999. """ return pulumi.get(self, "change_tier_to_cool_after_days_since_creation") @property @pulumi.getter(name="deleteAfterDaysSinceCreation") def delete_after_days_since_creation(self) -> Optional[int]: """ The age in days after creation to delete the blob version. Must be between 0 and 99999. """ return pulumi.get(self, "delete_after_days_since_creation") @pulumi.output_type class ManagementPolicyRuleFilters(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "blobTypes": suggest = "blob_types" elif key == "matchBlobIndexTags": suggest = "match_blob_index_tags" elif key == "prefixMatches": suggest = "prefix_matches" if suggest: pulumi.log.warn(f"Key '{key}' not found in ManagementPolicyRuleFilters. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: ManagementPolicyRuleFilters.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: ManagementPolicyRuleFilters.__key_warning(key) return super().get(key, default) def __init__(__self__, *, blob_types: Optional[Sequence[str]] = None, match_blob_index_tags: Optional[Sequence['outputs.ManagementPolicyRuleFiltersMatchBlobIndexTag']] = None, prefix_matches: Optional[Sequence[str]] = None): """ :param Sequence[str] blob_types: An array of predefined values. Valid options are `blockBlob` and `appendBlob`. :param Sequence['ManagementPolicyRuleFiltersMatchBlobIndexTagArgs'] match_blob_index_tags: A `match_blob_index_tag` block as defined below. The block defines the blob index tag based filtering for blob objects. :param Sequence[str] prefix_matches: An array of strings for prefixes to be matched. """ if blob_types is not None: pulumi.set(__self__, "blob_types", blob_types) if match_blob_index_tags is not None: pulumi.set(__self__, "match_blob_index_tags", match_blob_index_tags) if prefix_matches is not None: pulumi.set(__self__, "prefix_matches", prefix_matches) @property @pulumi.getter(name="blobTypes") def blob_types(self) -> Optional[Sequence[str]]: """ An array of predefined values. Valid options are `blockBlob` and `appendBlob`. """ return pulumi.get(self, "blob_types") @property @pulumi.getter(name="matchBlobIndexTags") def match_blob_index_tags(self) -> Optional[Sequence['outputs.ManagementPolicyRuleFiltersMatchBlobIndexTag']]: """ A `match_blob_index_tag` block as defined below. The block defines the blob index tag based filtering for blob objects. """ return pulumi.get(self, "match_blob_index_tags") @property @pulumi.getter(name="prefixMatches") def prefix_matches(self) -> Optional[Sequence[str]]: """ An array of strings for prefixes to be matched. """ return pulumi.get(self, "prefix_matches") @pulumi.output_type class ManagementPolicyRuleFiltersMatchBlobIndexTag(dict): def __init__(__self__, *, name: str, value: str, operation: Optional[str] = None): """ :param str name: The filter tag name used for tag based filtering for blob objects. :param str value: The filter tag value used for tag based filtering for blob objects. :param str operation: The comparison operator which is used for object comparison and filtering. Possible value is `==`. Defaults to `==`. """ pulumi.set(__self__, "name", name) pulumi.set(__self__, "value", value) if operation is not None: pulumi.set(__self__, "operation", operation) @property @pulumi.getter def name(self) -> str: """ The filter tag name used for tag based filtering for blob objects. """ return pulumi.get(self, "name") @property @pulumi.getter def value(self) -> str: """ The filter tag value used for tag based filtering for blob objects. """ return pulumi.get(self, "value") @property @pulumi.getter def operation(self) -> Optional[str]: """ The comparison operator which is used for object comparison and filtering. Possible value is `==`. Defaults to `==`. """ return pulumi.get(self, "operation") @pulumi.output_type class ObjectReplicationRule(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "destinationContainerName": suggest = "destination_container_name" elif key == "sourceContainerName": suggest = "source_container_name" elif key == "copyBlobsCreatedAfter": suggest = "copy_blobs_created_after" elif key == "filterOutBlobsWithPrefixes": suggest = "filter_out_blobs_with_prefixes" if suggest: pulumi.log.warn(f"Key '{key}' not found in ObjectReplicationRule. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: ObjectReplicationRule.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: ObjectReplicationRule.__key_warning(key) return super().get(key, default) def __init__(__self__, *, destination_container_name: str, source_container_name: str, copy_blobs_created_after: Optional[str] = None, filter_out_blobs_with_prefixes: Optional[Sequence[str]] = None, name: Optional[str] = None): """ :param str destination_container_name: The destination storage container name. Changing this forces a new Storage Object Replication to be created. :param str source_container_name: The source storage container name. Changing this forces a new Storage Object Replication to be created. :param str copy_blobs_created_after: The time after which the Block Blobs created will be copies to the destination. Possible values are `OnlyNewObjects`, `Everything` and time in RFC3339 format: `2006-01-02T15:04:00Z`. :param Sequence[str] filter_out_blobs_with_prefixes: Specifies a list of filters prefixes, the blobs whose names begin with which will be replicated. """ pulumi.set(__self__, "destination_container_name", destination_container_name) pulumi.set(__self__, "source_container_name", source_container_name) if copy_blobs_created_after is not None: pulumi.set(__self__, "copy_blobs_created_after", copy_blobs_created_after) if filter_out_blobs_with_prefixes is not None: pulumi.set(__self__, "filter_out_blobs_with_prefixes", filter_out_blobs_with_prefixes) if name is not None: pulumi.set(__self__, "name", name) @property @pulumi.getter(name="destinationContainerName") def destination_container_name(self) -> str: """ The destination storage container name. Changing this forces a new Storage Object Replication to be created. """ return pulumi.get(self, "destination_container_name") @property @pulumi.getter(name="sourceContainerName") def source_container_name(self) -> str: """ The source storage container name. Changing this forces a new Storage Object Replication to be created. """ return pulumi.get(self, "source_container_name") @property @pulumi.getter(name="copyBlobsCreatedAfter") def copy_blobs_created_after(self) -> Optional[str]: """ The time after which the Block Blobs created will be copies to the destination. Possible values are `OnlyNewObjects`, `Everything` and time in RFC3339 format: `2006-01-02T15:04:00Z`. """ return pulumi.get(self, "copy_blobs_created_after") @property @pulumi.getter(name="filterOutBlobsWithPrefixes") def filter_out_blobs_with_prefixes(self) -> Optional[Sequence[str]]: """ Specifies a list of filters prefixes, the blobs whose names begin with which will be replicated. """ return pulumi.get(self, "filter_out_blobs_with_prefixes") @property @pulumi.getter def name(self) -> Optional[str]: return pulumi.get(self, "name") @pulumi.output_type class ShareAcl(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "accessPolicies": suggest = "access_policies" if suggest: pulumi.log.warn(f"Key '{key}' not found in ShareAcl. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: ShareAcl.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: ShareAcl.__key_warning(key) return super().get(key, default) def __init__(__self__, *, id: str, access_policies: Optional[Sequence['outputs.ShareAclAccessPolicy']] = None): """ :param str id: The ID which should be used for this Shared Identifier. :param Sequence['ShareAclAccessPolicyArgs'] access_policies: An `access_policy` block as defined below. """ pulumi.set(__self__, "id", id) if access_policies is not None: pulumi.set(__self__, "access_policies", access_policies) @property @pulumi.getter def id(self) -> str: """ The ID which should be used for this Shared Identifier. """ return pulumi.get(self, "id") @property @pulumi.getter(name="accessPolicies") def access_policies(self) -> Optional[Sequence['outputs.ShareAclAccessPolicy']]: """ An `access_policy` block as defined below. """ return pulumi.get(self, "access_policies") @pulumi.output_type class ShareAclAccessPolicy(dict): def __init__(__self__, *, permissions: str, expiry: Optional[str] = None, start: Optional[str] = None): """ :param str permissions: The permissions which should be associated with this Shared Identifier. Possible value is combination of `r` (read), `w` (write), `d` (delete), and `l` (list). :param str expiry: The time at which this Access Policy should be valid until, in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601) format. :param str start: The time at which this Access Policy should be valid from, in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601) format. """ pulumi.set(__self__, "permissions", permissions) if expiry is not None: pulumi.set(__self__, "expiry", expiry) if start is not None: pulumi.set(__self__, "start", start) @property @pulumi.getter def permissions(self) -> str: """ The permissions which should be associated with this Shared Identifier. Possible value is combination of `r` (read), `w` (write), `d` (delete), and `l` (list). """ return pulumi.get(self, "permissions") @property @pulumi.getter def expiry(self) -> Optional[str]: """ The time at which this Access Policy should be valid until, in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601) format. """ return pulumi.get(self, "expiry") @property @pulumi.getter def start(self) -> Optional[str]: """ The time at which this Access Policy should be valid from, in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601) format. """ return pulumi.get(self, "start") @pulumi.output_type class TableAcl(dict): @staticmethod def __key_warning(key: str): suggest = None if key == "accessPolicies": suggest = "access_policies" if suggest: pulumi.log.warn(f"Key '{key}' not found in TableAcl. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: TableAcl.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: TableAcl.__key_warning(key) return super().get(key, default) def __init__(__self__, *, id: str, access_policies: Optional[Sequence['outputs.TableAclAccessPolicy']] = None): """ :param str id: The ID which should be used for this Shared Identifier. :param Sequence['TableAclAccessPolicyArgs'] access_policies: An `access_policy` block as defined below. """ pulumi.set(__self__, "id", id) if access_policies is not None: pulumi.set(__self__, "access_policies", access_policies) @property @pulumi.getter def id(self) -> str: """ The ID which should be used for this Shared Identifier. """ return pulumi.get(self, "id") @property @pulumi.getter(name="accessPolicies") def access_policies(self) -> Optional[Sequence['outputs.TableAclAccessPolicy']]: """ An `access_policy` block as defined below. """ return pulumi.get(self, "access_policies") @pulumi.output_type class TableAclAccessPolicy(dict): def __init__(__self__, *, expiry: str, permissions: str, start: str): """ :param str expiry: The ISO8061 UTC time at which this Access Policy should be valid until. :param str permissions: The permissions which should associated with this Shared Identifier. :param str start: The ISO8061 UTC time at which this Access Policy should be valid from. """ pulumi.set(__self__, "expiry", expiry) pulumi.set(__self__, "permissions", permissions) pulumi.set(__self__, "start", start) @property @pulumi.getter def expiry(self) -> str: """ The ISO8061 UTC time at which this Access Policy should be valid until. """ return pulumi.get(self, "expiry") @property @pulumi.getter def permissions(self) -> str: """ The permissions which should associated with this Shared Identifier. """ return pulumi.get(self, "permissions") @property @pulumi.getter def start(self) -> str: """ The ISO8061 UTC time at which this Access Policy should be valid from. """ return pulumi.get(self, "start") @pulumi.output_type class GetAccountBlobContainerSASPermissionsResult(dict): def __init__(__self__, *, add: bool, create: bool, delete: bool, list: bool, read: bool, write: bool): """ :param bool add: Should Add permissions be enabled for this SAS? :param bool create: Should Create permissions be enabled for this SAS? :param bool delete: Should Delete permissions be enabled for this SAS? :param bool list: Should List permissions be enabled for this SAS? :param bool read: Should Read permissions be enabled for this SAS? :param bool write: Should Write permissions be enabled for this SAS? """ pulumi.set(__self__, "add", add) pulumi.set(__self__, "create", create) pulumi.set(__self__, "delete", delete) pulumi.set(__self__, "list", list) pulumi.set(__self__, "read", read) pulumi.set(__self__, "write", write) @property @pulumi.getter def add(self) -> bool: """ Should Add permissions be enabled for this SAS? """ return pulumi.get(self, "add") @property @pulumi.getter def create(self) -> bool: """ Should Create permissions be enabled for this SAS? """ return pulumi.get(self, "create") @property @pulumi.getter def delete(self) -> bool: """ Should Delete permissions be enabled for this SAS? """ return pulumi.get(self, "delete") @property @pulumi.getter def list(self) -> bool: """ Should List permissions be enabled for this SAS? """ return pulumi.get(self, "list") @property @pulumi.getter def read(self) -> bool: """ Should Read permissions be enabled for this SAS? """ return pulumi.get(self, "read") @property @pulumi.getter def write(self) -> bool: """ Should Write permissions be enabled for this SAS? """ return pulumi.get(self, "write") @pulumi.output_type class GetAccountCustomDomainResult(dict): def __init__(__self__, *, name: str): """ :param str name: Specifies the name of the Storage Account """ pulumi.set(__self__, "name", name) @property @pulumi.getter def name(self) -> str: """ Specifies the name of the Storage Account """ return pulumi.get(self, "name") @pulumi.output_type class GetAccountSASPermissionsResult(dict): def __init__(__self__, *, add: bool, create: bool, delete: bool, list: bool, process: bool, read: bool, update: bool, write: bool): """ :param bool add: Should Add permissions be enabled for this SAS? :param bool create: Should Create permissions be enabled for this SAS? :param bool delete: Should Delete permissions be enabled for this SAS? :param bool list: Should List permissions be enabled for this SAS? :param bool process: Should Process permissions be enabled for this SAS? :param bool read: Should Read permissions be enabled for this SAS? :param bool update: Should Update permissions be enabled for this SAS? :param bool write: Should Write permissions be enabled for this SAS? """ pulumi.set(__self__, "add", add) pulumi.set(__self__, "create", create) pulumi.set(__self__, "delete", delete) pulumi.set(__self__, "list", list) pulumi.set(__self__, "process", process) pulumi.set(__self__, "read", read) pulumi.set(__self__, "update", update) pulumi.set(__self__, "write", write) @property @pulumi.getter def add(self) -> bool: """ Should Add permissions be enabled for this SAS? """ return pulumi.get(self, "add") @property @pulumi.getter def create(self) -> bool: """ Should Create permissions be enabled for this SAS? """ return pulumi.get(self, "create") @property @pulumi.getter def delete(self) -> bool: """ Should Delete permissions be enabled for this SAS? """ return pulumi.get(self, "delete") @property @pulumi.getter def list(self) -> bool: """ Should List permissions be enabled for this SAS? """ return pulumi.get(self, "list") @property @pulumi.getter def process(self) -> bool: """ Should Process permissions be enabled for this SAS? """ return pulumi.get(self, "process") @property @pulumi.getter def read(self) -> bool: """ Should Read permissions be enabled for this SAS? """ return pulumi.get(self, "read") @property @pulumi.getter def update(self) -> bool: """ Should Update permissions be enabled for this SAS? """ return pulumi.get(self, "update") @property @pulumi.getter def write(self) -> bool: """ Should Write permissions be enabled for this SAS? """ return pulumi.get(self, "write") @pulumi.output_type class GetAccountSASResourceTypesResult(dict): def __init__(__self__, *, container: bool, object: bool, service: bool): """ :param bool container: Should permission be granted to the container? :param bool object: Should permission be granted only to a specific object? :param bool service: Should permission be granted to the entire service? """ pulumi.set(__self__, "container", container) pulumi.set(__self__, "object", object) pulumi.set(__self__, "service", service) @property @pulumi.getter def container(self) -> bool: """ Should permission be granted to the container? """ return pulumi.get(self, "container") @property @pulumi.getter def object(self) -> bool: """ Should permission be granted only to a specific object? """ return pulumi.get(self, "object") @property @pulumi.getter def service(self) -> bool: """ Should permission be granted to the entire service? """ return pulumi.get(self, "service") @pulumi.output_type class GetAccountSASServicesResult(dict): def __init__(__self__, *, blob: bool, file: bool, queue: bool, table: bool): """ :param bool blob: Should permission be granted to `blob` services within this storage account? :param bool file: Should permission be granted to `file` services within this storage account? :param bool queue: Should permission be granted to `queue` services within this storage account? :param bool table: Should permission be granted to `table` services within this storage account? """ pulumi.set(__self__, "blob", blob) pulumi.set(__self__, "file", file) pulumi.set(__self__, "queue", queue) pulumi.set(__self__, "table", table) @property @pulumi.getter def blob(self) -> bool: """ Should permission be granted to `blob` services within this storage account? """ return pulumi.get(self, "blob") @property @pulumi.getter def file(self) -> bool: """ Should permission be granted to `file` services within this storage account? """ return pulumi.get(self, "file") @property @pulumi.getter def queue(self) -> bool: """ Should permission be granted to `queue` services within this storage account? """ return pulumi.get(self, "queue") @property @pulumi.getter def table(self) -> bool: """ Should permission be granted to `table` services within this storage account? """ return pulumi.get(self, "table") @pulumi.output_type class GetPolicyRuleResult(dict): def __init__(__self__, *, actions: Sequence['outputs.GetPolicyRuleActionResult'], enabled: bool, filters: Sequence['outputs.GetPolicyRuleFilterResult'], name: str): """ :param Sequence['GetPolicyRuleActionArgs'] actions: An `actions` block as documented below. :param bool enabled: Boolean to specify whether the rule is enabled. :param Sequence['GetPolicyRuleFilterArgs'] filters: A `filter` block as documented below. :param str name: The filter tag name used for tag based filtering for blob objects. """ pulumi.set(__self__, "actions", actions) pulumi.set(__self__, "enabled", enabled) pulumi.set(__self__, "filters", filters) pulumi.set(__self__, "name", name) @property @pulumi.getter def actions(self) -> Sequence['outputs.GetPolicyRuleActionResult']: """ An `actions` block as documented below. """ return pulumi.get(self, "actions") @property @pulumi.getter def enabled(self) -> bool: """ Boolean to specify whether the rule is enabled. """ return pulumi.get(self, "enabled") @property @pulumi.getter def filters(self) -> Sequence['outputs.GetPolicyRuleFilterResult']: """ A `filter` block as documented below. """ return pulumi.get(self, "filters") @property @pulumi.getter def name(self) -> str: """ The filter tag name used for tag based filtering for blob objects. """ return pulumi.get(self, "name") @pulumi.output_type class GetPolicyRuleActionResult(dict): def __init__(__self__, *, base_blobs: Sequence['outputs.GetPolicyRuleActionBaseBlobResult'], snapshots: Sequence['outputs.GetPolicyRuleActionSnapshotResult'], versions: Sequence['outputs.GetPolicyRuleActionVersionResult']): """ :param Sequence['GetPolicyRuleActionBaseBlobArgs'] base_blobs: A `base_blob` block as documented below. :param Sequence['GetPolicyRuleActionSnapshotArgs'] snapshots: A `snapshot` block as documented below. :param Sequence['GetPolicyRuleActionVersionArgs'] versions: A `version` block as documented below. """ pulumi.set(__self__, "base_blobs", base_blobs) pulumi.set(__self__, "snapshots", snapshots) pulumi.set(__self__, "versions", versions) @property @pulumi.getter(name="baseBlobs") def base_blobs(self) -> Sequence['outputs.GetPolicyRuleActionBaseBlobResult']: """ A `base_blob` block as documented below. """ return pulumi.get(self, "base_blobs") @property @pulumi.getter def snapshots(self) -> Sequence['outputs.GetPolicyRuleActionSnapshotResult']: """ A `snapshot` block as documented below. """ return pulumi.get(self, "snapshots") @property @pulumi.getter def versions(self) -> Sequence['outputs.GetPolicyRuleActionVersionResult']: """ A `version` block as documented below. """ return pulumi.get(self, "versions") @pulumi.output_type class GetPolicyRuleActionBaseBlobResult(dict): def __init__(__self__, *, delete_after_days_since_modification_greater_than: int, tier_to_archive_after_days_since_modification_greater_than: int, tier_to_cool_after_days_since_modification_greater_than: int): """ :param int delete_after_days_since_modification_greater_than: The age in days after last modification to delete the blob. :param int tier_to_archive_after_days_since_modification_greater_than: The age in days after last modification to tier blobs to archive storage. Supports blob currently at Hot or Cool tier. :param int tier_to_cool_after_days_since_modification_greater_than: The age in days after last modification to tier blobs to cool storage. Supports blob currently at Hot tier. """ pulumi.set(__self__, "delete_after_days_since_modification_greater_than", delete_after_days_since_modification_greater_than) pulumi.set(__self__, "tier_to_archive_after_days_since_modification_greater_than", tier_to_archive_after_days_since_modification_greater_than) pulumi.set(__self__, "tier_to_cool_after_days_since_modification_greater_than", tier_to_cool_after_days_since_modification_greater_than) @property @pulumi.getter(name="deleteAfterDaysSinceModificationGreaterThan") def delete_after_days_since_modification_greater_than(self) -> int: """ The age in days after last modification to delete the blob. """ return pulumi.get(self, "delete_after_days_since_modification_greater_than") @property @pulumi.getter(name="tierToArchiveAfterDaysSinceModificationGreaterThan") def tier_to_archive_after_days_since_modification_greater_than(self) -> int: """ The age in days after last modification to tier blobs to archive storage. Supports blob currently at Hot or Cool tier. """ return pulumi.get(self, "tier_to_archive_after_days_since_modification_greater_than") @property @pulumi.getter(name="tierToCoolAfterDaysSinceModificationGreaterThan") def tier_to_cool_after_days_since_modification_greater_than(self) -> int: """ The age in days after last modification to tier blobs to cool storage. Supports blob currently at Hot tier. """ return pulumi.get(self, "tier_to_cool_after_days_since_modification_greater_than") @pulumi.output_type class GetPolicyRuleActionSnapshotResult(dict): def __init__(__self__, *, change_tier_to_archive_after_days_since_creation: int, change_tier_to_cool_after_days_since_creation: int, delete_after_days_since_creation_greater_than: int): """ :param int change_tier_to_archive_after_days_since_creation: The age in days after creation to tier blob version to archive storage. :param int change_tier_to_cool_after_days_since_creation: The age in days after creation to tier blob version to cool storage. :param int delete_after_days_since_creation_greater_than: The age in days after creation to delete the blob snapshot. """ pulumi.set(__self__, "change_tier_to_archive_after_days_since_creation", change_tier_to_archive_after_days_since_creation) pulumi.set(__self__, "change_tier_to_cool_after_days_since_creation", change_tier_to_cool_after_days_since_creation) pulumi.set(__self__, "delete_after_days_since_creation_greater_than", delete_after_days_since_creation_greater_than) @property @pulumi.getter(name="changeTierToArchiveAfterDaysSinceCreation") def change_tier_to_archive_after_days_since_creation(self) -> int: """ The age in days after creation to tier blob version to archive storage. """ return pulumi.get(self, "change_tier_to_archive_after_days_since_creation") @property @pulumi.getter(name="changeTierToCoolAfterDaysSinceCreation") def change_tier_to_cool_after_days_since_creation(self) -> int: """ The age in days after creation to tier blob version to cool storage. """ return pulumi.get(self, "change_tier_to_cool_after_days_since_creation") @property @pulumi.getter(name="deleteAfterDaysSinceCreationGreaterThan") def delete_after_days_since_creation_greater_than(self) -> int: """ The age in days after creation to delete the blob snapshot. """ return pulumi.get(self, "delete_after_days_since_creation_greater_than") @pulumi.output_type class GetPolicyRuleActionVersionResult(dict): def __init__(__self__, *, change_tier_to_archive_after_days_since_creation: int, change_tier_to_cool_after_days_since_creation: int, delete_after_days_since_creation: int): """ :param int change_tier_to_archive_after_days_since_creation: The age in days after creation to tier blob version to archive storage. :param int change_tier_to_cool_after_days_since_creation: The age in days after creation to tier blob version to cool storage. :param int delete_after_days_since_creation: The age in days after creation to delete the blob version. """ pulumi.set(__self__, "change_tier_to_archive_after_days_since_creation", change_tier_to_archive_after_days_since_creation) pulumi.set(__self__, "change_tier_to_cool_after_days_since_creation", change_tier_to_cool_after_days_since_creation) pulumi.set(__self__, "delete_after_days_since_creation", delete_after_days_since_creation) @property @pulumi.getter(name="changeTierToArchiveAfterDaysSinceCreation") def change_tier_to_archive_after_days_since_creation(self) -> int: """ The age in days after creation to tier blob version to archive storage. """ return pulumi.get(self, "change_tier_to_archive_after_days_since_creation") @property @pulumi.getter(name="changeTierToCoolAfterDaysSinceCreation") def change_tier_to_cool_after_days_since_creation(self) -> int: """ The age in days after creation to tier blob version to cool storage. """ return pulumi.get(self, "change_tier_to_cool_after_days_since_creation") @property @pulumi.getter(name="deleteAfterDaysSinceCreation") def delete_after_days_since_creation(self) -> int: """ The age in days after creation to delete the blob version. """ return pulumi.get(self, "delete_after_days_since_creation") @pulumi.output_type class GetPolicyRuleFilterResult(dict): def __init__(__self__, *, blob_types: Sequence[str], match_blob_index_tags: Sequence['outputs.GetPolicyRuleFilterMatchBlobIndexTagResult'], prefix_matches: Sequence[str]): """ :param Sequence[str] blob_types: An array of predefined values. Valid options are `blockBlob` and `appendBlob`. :param Sequence['GetPolicyRuleFilterMatchBlobIndexTagArgs'] match_blob_index_tags: A `match_blob_index_tag` block as defined below. The block defines the blob index tag based filtering for blob objects. --- :param Sequence[str] prefix_matches: An array of strings for prefixes to be matched. """ pulumi.set(__self__, "blob_types", blob_types) pulumi.set(__self__, "match_blob_index_tags", match_blob_index_tags) pulumi.set(__self__, "prefix_matches", prefix_matches) @property @pulumi.getter(name="blobTypes") def blob_types(self) -> Sequence[str]: """ An array of predefined values. Valid options are `blockBlob` and `appendBlob`. """ return pulumi.get(self, "blob_types") @property @pulumi.getter(name="matchBlobIndexTags") def match_blob_index_tags(self) -> Sequence['outputs.GetPolicyRuleFilterMatchBlobIndexTagResult']: """ A `match_blob_index_tag` block as defined below. The block defines the blob index tag based filtering for blob objects. --- """ return pulumi.get(self, "match_blob_index_tags") @property @pulumi.getter(name="prefixMatches") def prefix_matches(self) -> Sequence[str]: """ An array of strings for prefixes to be matched. """ return pulumi.get(self, "prefix_matches") @pulumi.output_type class GetPolicyRuleFilterMatchBlobIndexTagResult(dict): def __init__(__self__, *, name: str, operation: str, value: str): """ :param str name: The filter tag name used for tag based filtering for blob objects. :param str operation: The comparison operator which is used for object comparison and filtering. Possible value is `==`. Defaults to `==`. :param str value: The filter tag value used for tag based filtering for blob objects. """ pulumi.set(__self__, "name", name) pulumi.set(__self__, "operation", operation) pulumi.set(__self__, "value", value) @property @pulumi.getter def name(self) -> str: """ The filter tag name used for tag based filtering for blob objects. """ return pulumi.get(self, "name") @property @pulumi.getter def operation(self) -> str: """ The comparison operator which is used for object comparison and filtering. Possible value is `==`. Defaults to `==`. """ return pulumi.get(self, "operation") @property @pulumi.getter def value(self) -> str: """ The filter tag value used for tag based filtering for blob objects. """ return pulumi.get(self, "value") @pulumi.output_type class GetShareAclResult(dict): def __init__(__self__, *, access_policies: Sequence['outputs.GetShareAclAccessPolicyResult'], id: str): """ :param Sequence['GetShareAclAccessPolicyArgs'] access_policies: An `access_policy` block as defined below. :param str id: The ID which should be used for this Shared Identifier. """ pulumi.set(__self__, "access_policies", access_policies) pulumi.set(__self__, "id", id) @property @pulumi.getter(name="accessPolicies") def access_policies(self) -> Sequence['outputs.GetShareAclAccessPolicyResult']: """ An `access_policy` block as defined below. """ return pulumi.get(self, "access_policies") @property @pulumi.getter def id(self) -> str: """ The ID which should be used for this Shared Identifier. """ return pulumi.get(self, "id") @pulumi.output_type class GetShareAclAccessPolicyResult(dict): def __init__(__self__, *, expiry: str, permissions: str, start: str): """ :param str expiry: The time at which this Access Policy should be valid until, in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601) format. :param str permissions: The permissions which should be associated with this Shared Identifier. Possible value is combination of `r` (read), `w` (write), `d` (delete), and `l` (list). :param str start: The time at which this Access Policy should be valid from, in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601) format. """ pulumi.set(__self__, "expiry", expiry) pulumi.set(__self__, "permissions", permissions) pulumi.set(__self__, "start", start) @property @pulumi.getter def expiry(self) -> str: """ The time at which this Access Policy should be valid until, in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601) format. """ return pulumi.get(self, "expiry") @property @pulumi.getter def permissions(self) -> str: """ The permissions which should be associated with this Shared Identifier. Possible value is combination of `r` (read), `w` (write), `d` (delete), and `l` (list). """ return pulumi.get(self, "permissions") @property @pulumi.getter def start(self) -> str: """ The time at which this Access Policy should be valid from, in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601) format. """ return pulumi.get(self, "start")
41.105364
235
0.660243
14,633
128,742
5.551083
0.040457
0.018183
0.029128
0.042571
0.791109
0.755838
0.73587
0.698211
0.679179
0.659789
0
0.004157
0.247006
128,742
3,131
236
41.118492
0.833756
0.280926
0
0.652844
1
0.015417
0.213443
0.120882
0
0
0
0
0
1
0.171186
false
0.002658
0.00319
0.000532
0.330144
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
53ceda538523bab4b13030b1ea7169615e882c4a
1,163
py
Python
RaspberryPi/Test.py
sevenTiny/SevenTiny.SmartHome
59d1704a3faef114664dfbbb1d74b0ed41eb7ddf
[ "Apache-2.0" ]
2
2019-10-09T04:17:17.000Z
2019-11-12T11:43:51.000Z
RaspberryPi/Test.py
sevenTiny/SevenTiny.SmartHome
59d1704a3faef114664dfbbb1d74b0ed41eb7ddf
[ "Apache-2.0" ]
null
null
null
RaspberryPi/Test.py
sevenTiny/SevenTiny.SmartHome
59d1704a3faef114664dfbbb1d74b0ed41eb7ddf
[ "Apache-2.0" ]
1
2019-11-12T11:43:50.000Z
2019-11-12T11:43:50.000Z
# coding=utf-8 import time import datetime import sys import json sys.path.append('..') from Utility.MySqlHelper import MySqlHelper # from GPIO.Relay import Relay4 # smartHomeDb = MySqlHelper("SmartHome") # conn,cur = smartHomeDb.getConnAndCur() # cur.execute("SELECT * FROM DailyMonitor WHERE Year=2019 AND Month=8 AND Hour=12 ORDER BY `Day`") # result = cur.fetchall() # re = [] # for r in result: # re.append(r[6]) # print(r) # cur.close() # conn.close() # print(json.dumps(re)) # smartHomeDb = MySqlHelper("SmartHome") # conn, cur = smartHomeDb.getConnAndCur() # timenow = datetime.datetime.now() # cur.execute("SELECT `Day`,`Hour`,Temperature,Humidity FROM DailyMonitor WHERE Year="+str(timenow.year) + # " AND Month="+str(timenow.month)+" ORDER BY `Day`,`Hour`") # datas = cur.fetchall() # print(json.dumps(datas)) # timenow = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") # cur.execute("INSERT INTO DailyMonitor (DateTime,Year,Month,Temperature,Humidity) VALUES ('{0}',{1},{2})".format(timenow,51,31)) # conn.commit() # cur.close() # conn.close() # r4 = Relay4(11,13,15,29) # r4.open(4) # r4.close(4)
23.26
129
0.66896
158
1,163
4.924051
0.462025
0.03856
0.079692
0.089974
0.159383
0.159383
0.159383
0
0
0
0
0.031282
0.147893
1,163
49
130
23.734694
0.753784
0.827171
0
0
0
0
0.011628
0
0
0
0
0
0
1
0
true
0
0.833333
0
0.833333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
9900aa382e767ae4ea2edef99d96f6e12f448085
396
py
Python
backend/api/product/admin.py
tuguldurio/fullstack-ecommerce
06257e704c657b008587aabb4075750899149b1d
[ "MIT" ]
null
null
null
backend/api/product/admin.py
tuguldurio/fullstack-ecommerce
06257e704c657b008587aabb4075750899149b1d
[ "MIT" ]
null
null
null
backend/api/product/admin.py
tuguldurio/fullstack-ecommerce
06257e704c657b008587aabb4075750899149b1d
[ "MIT" ]
null
null
null
from django.contrib import admin from api.product.models import Product, ProductImage class ProductImageAdmin(admin.StackedInline): model = ProductImage @admin.register(Product) class ProductAdmin(admin.ModelAdmin): inlines = [ProductImageAdmin] class Meta: model = Product @admin.register(ProductImage) class ProductImageAdmin(admin.ModelAdmin): pass
24.75
53
0.739899
39
396
7.512821
0.487179
0.116041
0.232082
0.266212
0
0
0
0
0
0
0
0
0.184343
396
16
54
24.75
0.907121
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0.083333
0.166667
0
0.666667
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
4
073d2acadef17dd53dc54432c03cb46a67e99c9f
70
py
Python
setup.py
onaio/ona-oicd
51ee1da36a2a29a9ef6706295cd1122a9a1281c1
[ "Apache-2.0" ]
null
null
null
setup.py
onaio/ona-oicd
51ee1da36a2a29a9ef6706295cd1122a9a1281c1
[ "Apache-2.0" ]
2
2020-06-12T10:39:34.000Z
2020-06-15T11:20:43.000Z
setup.py
onaio/ona-oicd
51ee1da36a2a29a9ef6706295cd1122a9a1281c1
[ "Apache-2.0" ]
null
null
null
""" Setup file for ona-oidc """ import setuptools setuptools.setup()
10
23
0.714286
9
70
5.555556
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.142857
70
6
24
11.666667
0.833333
0.328571
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
0740136120f5da45ce99f489852515fbb66de4c0
80
py
Python
jira_devops/release_notes/__init__.py
clutcher/jira_devops
61360f3fa9bd9b402d752dde84b3cf486245879e
[ "MIT" ]
null
null
null
jira_devops/release_notes/__init__.py
clutcher/jira_devops
61360f3fa9bd9b402d752dde84b3cf486245879e
[ "MIT" ]
null
null
null
jira_devops/release_notes/__init__.py
clutcher/jira_devops
61360f3fa9bd9b402d752dde84b3cf486245879e
[ "MIT" ]
null
null
null
default_app_config = 'jira_devops.release_notes.settings.ReleaseNotesAppConfig'
40
79
0.8875
9
80
7.444444
1
0
0
0
0
0
0
0
0
0
0
0
0.0375
80
1
80
80
0.87013
0
0
0
0
0
0.7
0.7
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
074ebed468457ef1cbc658948ca103328c0669a3
295
py
Python
testsuite/tests/service_mesh/caching/backend/conftest.py
dlaso99/3scale-tests
b31a3b3596af6d632b393e383c0417ea56bd95ca
[ "Apache-2.0" ]
5
2021-11-04T14:09:24.000Z
2021-12-23T13:48:36.000Z
testsuite/tests/service_mesh/caching/backend/conftest.py
dlaso99/3scale-tests
b31a3b3596af6d632b393e383c0417ea56bd95ca
[ "Apache-2.0" ]
41
2021-11-03T14:27:21.000Z
2022-03-29T14:46:16.000Z
testsuite/tests/service_mesh/caching/backend/conftest.py
dlaso99/3scale-tests
b31a3b3596af6d632b393e383c0417ea56bd95ca
[ "Apache-2.0" ]
12
2021-11-03T17:28:31.000Z
2021-11-30T12:28:25.000Z
"""Conftest for backend caching tests for service mesh""" import pytest @pytest.fixture(scope="module") def gateway_environment(): """Environment for backend caching tests""" return {"USE_CACHED_BACKEND": True, "BACKEND_CACHE_FLUSH_INTERVAL_SECONDS": 1000, "CACHE_ENTRIES_MAX": 1000}
29.5
112
0.759322
37
295
5.810811
0.702703
0.093023
0.15814
0.204651
0
0
0
0
0
0
0
0.030888
0.122034
295
9
113
32.777778
0.799228
0.301695
0
0
0
0
0.394872
0.184615
0
0
0
0
0
1
0.25
true
0
0.25
0
0.75
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
0
0
4
4ad454826cd2c3dd7957fa56435c7908148bac78
35
py
Python
addition/addition.py
sridevimarudhachalamoorthy/team-repo
29cf3e7926bcbd9e7114ddbc72858d07c5c70e1e
[ "MIT" ]
null
null
null
addition/addition.py
sridevimarudhachalamoorthy/team-repo
29cf3e7926bcbd9e7114ddbc72858d07c5c70e1e
[ "MIT" ]
null
null
null
addition/addition.py
sridevimarudhachalamoorthy/team-repo
29cf3e7926bcbd9e7114ddbc72858d07c5c70e1e
[ "MIT" ]
null
null
null
a=10 b=5 print(a+b) print(success)
7
14
0.685714
9
35
2.666667
0.666667
0
0
0
0
0
0
0
0
0
0
0.096774
0.114286
35
4
15
8.75
0.677419
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
4af21795f293d9f783c0fae030d3c44d38b1aefa
38
py
Python
{{cookiecutter.project_slug}}/application/config/settings/aws_staging.py
carlos-avila/cookiecutter-django
2eb2ccd74f94881f3d1071ffcfc6d29d501b70b0
[ "MIT" ]
1
2016-07-19T18:17:24.000Z
2016-07-19T18:17:24.000Z
{{cookiecutter.project_slug}}/application/config/settings/aws_staging.py
carlos-avila/cookiecutter-django
2eb2ccd74f94881f3d1071ffcfc6d29d501b70b0
[ "MIT" ]
null
null
null
{{cookiecutter.project_slug}}/application/config/settings/aws_staging.py
carlos-avila/cookiecutter-django
2eb2ccd74f94881f3d1071ffcfc6d29d501b70b0
[ "MIT" ]
null
null
null
from .aws_base import * DEBUG = True
9.5
23
0.710526
6
38
4.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.210526
38
3
24
12.666667
0.866667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
ab131fa273eaf333f685e3774c6b46ac154dfae1
260
py
Python
Numberlists.py
gurmeetkhehra/python-practice
abeb5586f8c1e673fd8ff312a4ae0941f2a0194b
[ "Apache-2.0" ]
null
null
null
Numberlists.py
gurmeetkhehra/python-practice
abeb5586f8c1e673fd8ff312a4ae0941f2a0194b
[ "Apache-2.0" ]
null
null
null
Numberlists.py
gurmeetkhehra/python-practice
abeb5586f8c1e673fd8ff312a4ae0941f2a0194b
[ "Apache-2.0" ]
null
null
null
# Make a list of the multiples of 3 from 3 to 30. Use a for loop to print the numbers in your list. multiples_of_three = [3,6,9,12,15,18,21,24,27,30] for multiple in multiples_of_three: print (multiple * multiple * multiple) print (multiples_of_three)
26
99
0.734615
50
260
3.7
0.54
0.237838
0.259459
0
0
0
0
0
0
0
0
0.098592
0.180769
260
9
100
28.888889
0.769953
0.373077
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
ab2461a7f5fea17c5e79156c335c2fa3109662ca
705
py
Python
python/code_challenges/hash-table/test-hash-table.py
AhmadShalein/data-structures-and-algorithms
b4c12777075a9ee81f54ee26c3601c30c7b60660
[ "MIT" ]
null
null
null
python/code_challenges/hash-table/test-hash-table.py
AhmadShalein/data-structures-and-algorithms
b4c12777075a9ee81f54ee26c3601c30c7b60660
[ "MIT" ]
null
null
null
python/code_challenges/hash-table/test-hash-table.py
AhmadShalein/data-structures-and-algorithms
b4c12777075a9ee81f54ee26c3601c30c7b60660
[ "MIT" ]
2
2021-12-06T11:00:45.000Z
2022-02-27T20:46:04.000Z
import bytest from hash-table.hash-table import HashTable def test-hash-function(): hash_table = HashTable() assert hash_table.add('hello') == 523 def test_add_item(): hash_table = HashTable() hash_table.add('hello',15) assert len(hash_table.arr[hash_table.get_hash('hello')]) == 1 def test_get_item(): hash_table = HashTable() hash_table.add('hello',15) assert hash_table.get('hello') == 15 def test_contains_item(): hash_table = HashTable() hash_table.add('hello',15) assert table.contains('hello') def test_delete_item(): hash_table = HashTable() hash_table.add('hello',15) hash_table.delete('hello') assert len(hash_table.arr[hash_table.get_hash('hello')]) == 0
25.178571
63
0.716312
106
705
4.518868
0.207547
0.338205
0.187891
0.177453
0.542797
0.542797
0.542797
0.542797
0.542797
0.457203
0
0.02451
0.131915
705
27
64
26.111111
0.75817
0
0
0.409091
0
0
0.070922
0
0
0
0
0
0.227273
0
null
null
0
0.090909
null
null
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
ab48766eabcbf5dc3fac40b173261976a624f1dc
276
py
Python
openwisp_users/base.py
Vivekrajput20/openwisp-users
6073c059d10c2eda1c2896f5559457395b05dd9d
[ "BSD-3-Clause" ]
1
2018-12-07T14:24:23.000Z
2018-12-07T14:24:23.000Z
openwisp_users/base.py
ppabcd/openwisp-users
cf5cd64f7d3c3515fcd0fc1f23077620b5a85964
[ "BSD-3-Clause" ]
null
null
null
openwisp_users/base.py
ppabcd/openwisp-users
cf5cd64f7d3c3515fcd0fc1f23077620b5a85964
[ "BSD-3-Clause" ]
null
null
null
from django.conf import settings if 'reversion' in settings.INSTALLED_APPS: # pragma: no cover from reversion.admin import VersionAdmin as BaseModelAdmin else: from django.contrib.admin import ModelAdmin as BaseModelAdmin class BaseAdmin(BaseModelAdmin): pass
25.090909
65
0.789855
34
276
6.382353
0.676471
0.092166
0
0
0
0
0
0
0
0
0
0
0.15942
276
10
66
27.6
0.935345
0.057971
0
0
0
0
0.034884
0
0
0
0
0
0
1
0
true
0.142857
0.428571
0
0.571429
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
4
ab55f23683fa2c9f0809f1f7d7b9e2a033f4607e
810
py
Python
test.py
litex-hub/pythondata-cpu-cv32e40x
31a94b162dcf937e963eb23bedae74e06241f3ef
[ "Apache-2.0" ]
null
null
null
test.py
litex-hub/pythondata-cpu-cv32e40x
31a94b162dcf937e963eb23bedae74e06241f3ef
[ "Apache-2.0" ]
null
null
null
test.py
litex-hub/pythondata-cpu-cv32e40x
31a94b162dcf937e963eb23bedae74e06241f3ef
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 from __future__ import print_function import os import pythondata_cpu_cv32e40x print("Found cv32e40x @ version", pythondata_cpu_cv32e40x.version_str, "(with data", pythondata_cpu_cv32e40x.data_version_str, ")") print() print("Data is in", pythondata_cpu_cv32e40x.data_location) assert os.path.exists(pythondata_cpu_cv32e40x.data_location) print("Data is version", pythondata_cpu_cv32e40x.data_version_str, pythondata_cpu_cv32e40x.data_git_hash) print("-"*75) print(pythondata_cpu_cv32e40x.data_git_msg) print("-"*75) print() print("It contains:") for root, dirs, files in os.walk(pythondata_cpu_cv32e40x.data_location): dirs.sort() for f in sorted(files): path = os.path.relpath(os.path.join(root, f), pythondata_cpu_cv32e40x.data_location) print(" -", path)
33.75
131
0.777778
117
810
5.059829
0.34188
0.219595
0.35473
0.337838
0.452703
0.246622
0
0
0
0
0
0.067493
0.103704
810
23
132
35.217391
0.747934
0.025926
0
0.222222
0
0
0.096447
0
0
0
0
0
0.055556
1
0
false
0
0.166667
0
0.166667
0.611111
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
db45556829366124d78521d908030a18cf29997e
294
py
Python
uwsgi_tasks/__init__.py
zanachka/uwsgi_tasks
cad3e72c8bd66979c6ac23a1cc68caff176435ab
[ "MIT" ]
92
2015-01-21T06:25:18.000Z
2022-02-23T22:29:05.000Z
uwsgi_tasks/__init__.py
zanachka/uwsgi_tasks
cad3e72c8bd66979c6ac23a1cc68caff176435ab
[ "MIT" ]
17
2015-04-30T21:02:05.000Z
2021-03-09T17:30:00.000Z
uwsgi_tasks/__init__.py
zanachka/uwsgi_tasks
cad3e72c8bd66979c6ac23a1cc68caff176435ab
[ "MIT" ]
13
2015-04-30T20:54:36.000Z
2022-02-12T17:06:15.000Z
# -*- coding: utf-8 -*- from uwsgi_tasks.tasks import ( Task, SignalTask, TimerTask, CronTask, TaskExecutor, set_uwsgi_callbacks, RetryTaskException, SPOOL_OK, SPOOL_RETRY, SPOOL_IGNORE, get_current_task ) from uwsgi_tasks.utils import django_setup from uwsgi_tasks.decorators import *
36.75
77
0.789116
38
294
5.815789
0.657895
0.122172
0.190045
0
0
0
0
0
0
0
0
0.003891
0.12585
294
7
78
42
0.856031
0.071429
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
db4b077ffc1479a53be810ec20d90e718869e062
110
py
Python
py_tdlib/constructors/check_phone_number_confirmation_code.py
Mr-TelegramBot/python-tdlib
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
[ "MIT" ]
24
2018-10-05T13:04:30.000Z
2020-05-12T08:45:34.000Z
py_tdlib/constructors/check_phone_number_confirmation_code.py
MrMahdi313/python-tdlib
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
[ "MIT" ]
3
2019-06-26T07:20:20.000Z
2021-05-24T13:06:56.000Z
py_tdlib/constructors/check_phone_number_confirmation_code.py
MrMahdi313/python-tdlib
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
[ "MIT" ]
5
2018-10-05T14:29:28.000Z
2020-08-11T15:04:10.000Z
from ..factory import Method class checkPhoneNumberConfirmationCode(Method): code = None # type: "string"
18.333333
47
0.763636
11
110
7.636364
0.909091
0
0
0
0
0
0
0
0
0
0
0
0.145455
110
5
48
22
0.893617
0.127273
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
db7f3a98693054991502c2970fcf7b64f51dd7e4
89
py
Python
listing/apps.py
antoinewg/immo
3d47c6187f8ad7cb6bad02a3709d71fa5996d4cf
[ "MIT" ]
1
2020-03-21T15:47:35.000Z
2020-03-21T15:47:35.000Z
listing/apps.py
antoinewg/immo
3d47c6187f8ad7cb6bad02a3709d71fa5996d4cf
[ "MIT" ]
10
2020-03-16T22:16:17.000Z
2021-09-22T18:45:26.000Z
listing/apps.py
antoinewg/immo
3d47c6187f8ad7cb6bad02a3709d71fa5996d4cf
[ "MIT" ]
null
null
null
from django.apps import AppConfig class ListingConfig(AppConfig): name = "listing"
14.833333
33
0.752809
10
89
6.7
0.9
0
0
0
0
0
0
0
0
0
0
0
0.168539
89
5
34
17.8
0.905405
0
0
0
0
0
0.078652
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
db838c2f212c6f7c3a7c49a3c705e7ac2f1fa559
963
py
Python
PriceTracker/PriceTracker.py
cyamonide/price-tracker
4549fc0e86b6158d64dbe9f5bc44b8f3b72ef2ef
[ "MIT" ]
null
null
null
PriceTracker/PriceTracker.py
cyamonide/price-tracker
4549fc0e86b6158d64dbe9f5bc44b8f3b72ef2ef
[ "MIT" ]
null
null
null
PriceTracker/PriceTracker.py
cyamonide/price-tracker
4549fc0e86b6158d64dbe9f5bc44b8f3b72ef2ef
[ "MIT" ]
null
null
null
import TrackerNewegg import TrackerBestBuy import TrackerStaples print(TrackerNewegg.getPrice('https://www.newegg.ca/Product/Product.aspx?Item=9SIAD3M6NY0446')) print(TrackerNewegg.getPrice('https://www.newegg.ca/Product/Product.aspx?item=N82E16820168038')) print(TrackerNewegg.getPrice('https://www.newegg.ca/Product/Product.aspx?item=N82E16820168040')) TrackerNewegg.getId('https://www.newegg.ca/Product/Product.aspx?item=N82E16820168040') print(TrackerBestBuy.getPrice('https://www.bestbuy.ca/en-ca/product/jbl-jbl-charge-3-waterproof-wireless-bluetooth-speaker-black-jblcharge3blkam/10424278.aspx?')) print(TrackerBestBuy.getPrice('https://www.bestbuy.ca/en-ca/product/kingston-a400-ssd-240gb-sata-3-2-5-internal-solid-state-drive-ssd-sa400s37-240g-increased-performance-500mb-s/12490330.aspx?')) print(TrackerStaples.getPrice('https://www.staples.ca/en/JBL-Charge-3-Portable-Bluetooth-Waterproof-Speaker-Black/product_2300581_1-CA_1_20001'))
68.785714
196
0.809969
128
963
6.0625
0.40625
0.072165
0.123711
0.082474
0.476804
0.476804
0.476804
0.476804
0.476804
0.389175
0
0.105996
0.030114
963
14
197
68.785714
0.724839
0
0
0
0
0.3
0.684543
0
0
0
0
0
0
1
0
true
0
0.3
0
0.3
0.6
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
9162e5c2948d39a9cf504db71129d1a58dca09fe
267
py
Python
src/models/competitors.py
duxiaodan/topological-autoencoders
e3c71719942bb50f8c646f7df65bb0f4ba38cb44
[ "BSD-3-Clause" ]
69
2020-07-14T15:07:19.000Z
2022-03-27T09:33:54.000Z
src/models/competitors.py
duxiaodan/topological-autoencoders
e3c71719942bb50f8c646f7df65bb0f4ba38cb44
[ "BSD-3-Clause" ]
6
2021-02-02T16:07:49.000Z
2022-01-18T08:50:57.000Z
src/models/competitors.py
duxiaodan/topological-autoencoders
e3c71719942bb50f8c646f7df65bb0f4ba38cb44
[ "BSD-3-Clause" ]
17
2020-07-14T18:40:35.000Z
2022-03-17T00:19:48.000Z
"""Competitor dimensionality reduction algorithms.""" from sklearn.decomposition import PCA from sklearn.manifold import Isomap from umap import UMAP try: from MulticoreTSNE import MulticoreTSNE as TSNE except ImportError: from sklearn.manifold import TSNE
24.272727
53
0.808989
32
267
6.75
0.5625
0.152778
0.175926
0.231481
0
0
0
0
0
0
0
0
0.146067
267
10
54
26.7
0.947368
0.17603
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.857143
0
0.857143
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
91727a84e13e77d682e8bd79674f5139ffed801b
47
py
Python
gini/__init__.py
azraq27/gini
3c2b5265d096d606b303bfe25ac9adb74b8cee14
[ "Apache-2.0" ]
null
null
null
gini/__init__.py
azraq27/gini
3c2b5265d096d606b303bfe25ac9adb74b8cee14
[ "Apache-2.0" ]
null
null
null
gini/__init__.py
azraq27/gini
3c2b5265d096d606b303bfe25ac9adb74b8cee14
[ "Apache-2.0" ]
null
null
null
version = '0.6.2' import personality,semantics
15.666667
28
0.765957
7
47
5.142857
1
0
0
0
0
0
0
0
0
0
0
0.071429
0.106383
47
3
28
15.666667
0.785714
0
0
0
0
0
0.104167
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
91a1432c9b243db0feb6f7e2a6dac8e505dbdd05
190
py
Python
AlgoExpert/Arrays/Sorted-Squared-Array/test_first.py
grierson/Katas
dde4b26551330608a81e2af3dd94a36b90a59107
[ "MIT" ]
1
2020-12-05T02:08:21.000Z
2020-12-05T02:08:21.000Z
AlgoExpert/Arrays/Sorted-Squared-Array/test_first.py
grierson/Katas
dde4b26551330608a81e2af3dd94a36b90a59107
[ "MIT" ]
null
null
null
AlgoExpert/Arrays/Sorted-Squared-Array/test_first.py
grierson/Katas
dde4b26551330608a81e2af3dd94a36b90a59107
[ "MIT" ]
null
null
null
def sortedSquaredArray(array): return sorted(map(lambda x: x * x, array)) def test_sample(): assert sortedSquaredArray([1, 2, 3, 5, 6, 8, 9]) == [ 1, 4, 9, 25, 36, 64, 81]
23.75
57
0.584211
30
190
3.666667
0.766667
0.036364
0
0
0
0
0
0
0
0
0
0.125
0.242105
190
7
58
27.142857
0.638889
0
0
0
0
0
0
0
0
0
0
0
0.2
1
0.4
false
0
0
0.2
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
91b3d0d38d6da9f410a5761f48be3c9bec26cdaa
49
py
Python
pyramid-creator/app/config.py
jinnn-dev/patholearn
b4e6a18cfbf963e71640ed6cac3fc3a618a7ae15
[ "MIT" ]
1
2022-02-20T12:45:04.000Z
2022-02-20T12:45:04.000Z
pyramid-creator/app/config.py
JamesNeumann/learning-by-annotations
c2b5e4b653eeb1c973aa5a7dad35ac8be18cb1ad
[ "MIT" ]
21
2021-11-01T10:13:56.000Z
2021-12-02T10:02:13.000Z
pyramid-creator/app/config.py
jinnn-dev/patholearn
b4e6a18cfbf963e71640ed6cac3fc3a618a7ae15
[ "MIT" ]
1
2021-12-16T18:20:55.000Z
2021-12-16T18:20:55.000Z
class Config: TEMP_IMAGES_FOLDER = "/data"
16.333333
33
0.673469
6
49
5.166667
1
0
0
0
0
0
0
0
0
0
0
0
0.22449
49
2
34
24.5
0.815789
0
0
0
0
0
0.106383
0
0
0
0
0
0
1
0
false
0
0
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
91e650d8bf1948da5890caa3e80f22524aa7800f
65
py
Python
fastiqa/bunches/__init__.py
baidut/PatchVQ
040486b6342dfd36695f1daea0b5c4d77d728a23
[ "Unlicense" ]
32
2020-12-05T09:11:20.000Z
2022-03-28T07:49:13.000Z
fastiqa/bunches/__init__.py
utlive/PatchVQ
040486b6342dfd36695f1daea0b5c4d77d728a23
[ "Unlicense" ]
5
2021-07-12T19:43:51.000Z
2022-01-28T13:16:16.000Z
fastiqa/bunches/__init__.py
utlive/PatchVQ
040486b6342dfd36695f1daea0b5c4d77d728a23
[ "Unlicense" ]
7
2020-12-29T21:52:07.000Z
2022-03-18T15:12:50.000Z
from ..bunch import IqaDataBunch from fastai.vision.all import *
21.666667
32
0.8
9
65
5.777778
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.123077
65
2
33
32.5
0.912281
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
37de4bf3a28b72150b3c916a49b3097448140872
1,660
py
Python
credential.py
Hulian2000/Password-Locker
3dfad8c216cf345f03d10db24973a38c4a6e4fbc
[ "MIT" ]
null
null
null
credential.py
Hulian2000/Password-Locker
3dfad8c216cf345f03d10db24973a38c4a6e4fbc
[ "MIT" ]
null
null
null
credential.py
Hulian2000/Password-Locker
3dfad8c216cf345f03d10db24973a38c4a6e4fbc
[ "MIT" ]
null
null
null
import random import string class Credentials: ''' class that generates instances of new credentials ''' app_details = [] def __init__(self,app,app_password): self.app = app self.app_password = app_password def save_app(self): ''' function that stores our accounts ''' Credentials.app_details.append(self) def delete_app(self): ''' Function that remove app and password ''' Credentials.app_details.remove(self) @classmethod def find_app(cls,app): ''' Finding app by the name Args: app: app name to search for returns: app searched for ''' for credentials in cls.app_details: if credentials.app == app: return app @classmethod def app_exist(cls,app): ''' Method that checks if app exist Args: app name to check if app exist Returns: a boolean depending on the app checked for ''' for credentials in cls.app_details: if credentials.app == app: return True return False @classmethod def gen_password(size = 8): ''' Function to generate random password with six digits ''' char = string.ascii_uppercase + string.ascii_lowercase gen_password = ''.join(random.choice(char) for i in range(size)) return gen_password @classmethod def display_app(cls,app): ''' Function that displays app ''' return cls.app_details
25.151515
72
0.557831
184
1,660
4.907609
0.375
0.066445
0.069767
0.042082
0.126246
0.126246
0.126246
0.126246
0.126246
0.126246
0
0.000954
0.368675
1,660
66
73
25.151515
0.860687
0.254217
0
0.266667
1
0
0
0
0
0
0
0
0
1
0.233333
false
0.166667
0.066667
0
0.533333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
37e273a377834f1395626eabc18d97af4557c130
209
py
Python
src/appengine_config.py
ayip001/ayip.io
8051e6c576c956bc81655326d28d575c6c0a52ee
[ "MIT" ]
null
null
null
src/appengine_config.py
ayip001/ayip.io
8051e6c576c956bc81655326d28d575c6c0a52ee
[ "MIT" ]
6
2017-12-22T01:15:20.000Z
2018-02-26T02:29:42.000Z
src/appengine_config.py
ayip001/ayip.io
8051e6c576c956bc81655326d28d575c6c0a52ee
[ "MIT" ]
null
null
null
# ./src/appengine_config.py from google.appengine.ext import vendor import tempfile tempfile.SpooledTemporaryFile = tempfile.TemporaryFile # Add any libraries installed in the "lib" folder. vendor.add('lib')
26.125
54
0.803828
27
209
6.185185
0.740741
0
0
0
0
0
0
0
0
0
0
0
0.105263
209
7
55
29.857143
0.893048
0.354067
0
0
0
0
0.022727
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
533669383e46df5532f6dc4bf76d0d7c6cad72bb
172
py
Python
sf/config.py
Haupti/tudatalibAPI
f249853711fca3203b76bb26b4df7d6912cd0304
[ "Apache-2.0" ]
null
null
null
sf/config.py
Haupti/tudatalibAPI
f249853711fca3203b76bb26b4df7d6912cd0304
[ "Apache-2.0" ]
null
null
null
sf/config.py
Haupti/tudatalibAPI
f249853711fca3203b76bb26b4df7d6912cd0304
[ "Apache-2.0" ]
null
null
null
''' This is not meant to be edited by the user ''' rest_test_url = "https://tudata-test.ulb.tu-darmstadt.de/rest" rest_url = "https://tudatalib.ulb.tu-darmstadt.de/rest"
21.5
62
0.715116
30
172
4
0.666667
0.133333
0.233333
0.266667
0.333333
0
0
0
0
0
0
0
0.116279
172
7
63
24.571429
0.789474
0.244186
0
0
0
0
0.710744
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
534257907f01a894317f1107b0c0465e57d664b8
501
py
Python
tests/realtime/test_realtime_Bus_set.py
butayama/supriya
0c197324ecee4232381221880d1f40e109bb756c
[ "MIT" ]
191
2015-11-13T02:28:42.000Z
2022-03-29T10:26:44.000Z
tests/realtime/test_realtime_Bus_set.py
butayama/supriya
0c197324ecee4232381221880d1f40e109bb756c
[ "MIT" ]
130
2016-01-04T16:59:02.000Z
2022-02-26T15:37:20.000Z
tests/realtime/test_realtime_Bus_set.py
butayama/supriya
0c197324ecee4232381221880d1f40e109bb756c
[ "MIT" ]
22
2016-05-04T10:32:16.000Z
2022-02-26T19:22:45.000Z
import supriya.realtime def test_01(server): control_bus = supriya.realtime.Bus.control() control_bus.allocate() assert control_bus.is_allocated result = control_bus.get() assert result == 0.0 assert control_bus.value == result control_bus.set(0.5) result = control_bus.get() assert result == 0.5 assert control_bus.value == result control_bus.set(0.25) result = control_bus.get() assert result == 0.25 assert control_bus.value == result
20.875
48
0.680639
70
501
4.685714
0.285714
0.335366
0.243902
0.17378
0.625
0.542683
0.542683
0.25
0.25
0
0
0.035623
0.215569
501
23
49
21.782609
0.798982
0
0
0.375
0
0
0
0
0
0
0
0
0.4375
1
0.0625
false
0
0.0625
0
0.125
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
4
7256fabad0c2d9bc1d7fb0e4c54c8d6b26df72c5
1,335
py
Python
setup.py
eyalev/xerox
4df98abf2b34034c3476fe26be8dfa9e7ad79b63
[ "MIT" ]
128
2015-02-01T13:09:26.000Z
2019-04-17T19:38:20.000Z
setup.py
eyalev/xerox
4df98abf2b34034c3476fe26be8dfa9e7ad79b63
[ "MIT" ]
19
2015-01-26T18:48:09.000Z
2018-02-22T22:01:33.000Z
setup.py
eyalev/xerox
4df98abf2b34034c3476fe26be8dfa9e7ad79b63
[ "MIT" ]
21
2015-02-01T13:09:30.000Z
2019-06-11T08:28:49.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys import xerox from setuptools import setup def publish(): """Publish to PyPi""" os.system("python setup.py sdist upload") if sys.argv[-1] == "publish": publish() sys.exit() setup(name='xerox', version='0.4.1', description='Simple Copy + Paste in Python.', long_description=open('README.rst').read(), author='Kenneth Reitz', author_email='me@kennethreitz.com', url='http://github.com/kennethreitz/xerox', packages=['xerox'], entry_points={ 'console_scripts': [ 'xerox = xerox:main', ] }, license='MIT', classifiers=( "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.0", "Programming Language :: Python :: 3.1", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: IronPython" ) )
27.8125
72
0.57603
139
1,335
5.503597
0.52518
0.248366
0.326797
0.203922
0
0
0
0
0
0
0
0.02045
0.267416
1,335
47
73
28.404255
0.761759
0.043446
0
0
0
0
0.529134
0
0
0
0
0
0
1
0.026316
true
0
0.105263
0
0.131579
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
72607e10207e6328f57f8fa71421d70abfe05b90
104
py
Python
fitness/apps.py
fitahol/fitahol
ce84dc909aa98f2dc7594ef26568e015cbfe0e94
[ "MIT" ]
2
2017-02-20T14:11:30.000Z
2017-06-11T16:10:33.000Z
fitness/apps.py
fitahol/fitahol
ce84dc909aa98f2dc7594ef26568e015cbfe0e94
[ "MIT" ]
null
null
null
fitness/apps.py
fitahol/fitahol
ce84dc909aa98f2dc7594ef26568e015cbfe0e94
[ "MIT" ]
null
null
null
# coding=utf-8 from django.apps import AppConfig class FitnessConfig(AppConfig): name = 'fitness'
14.857143
33
0.740385
13
104
5.923077
0.923077
0
0
0
0
0
0
0
0
0
0
0.011494
0.163462
104
6
34
17.333333
0.873563
0.115385
0
0
0
0
0.077778
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
7293525871ca8b52beecd7df36cf2fdf4efdf5ad
6,949
py
Python
crawler_localtrend_Seoul_gu_full_re.py
hyeongmokoo/localtrend_crawler
05bfaf148d7775cc450bd7f10c7f65c06246ad48
[ "MIT" ]
null
null
null
crawler_localtrend_Seoul_gu_full_re.py
hyeongmokoo/localtrend_crawler
05bfaf148d7775cc450bd7f10c7f65c06246ad48
[ "MIT" ]
null
null
null
crawler_localtrend_Seoul_gu_full_re.py
hyeongmokoo/localtrend_crawler
05bfaf148d7775cc450bd7f10c7f65c06246ad48
[ "MIT" ]
null
null
null
from selenium import webdriver import csv import time link = 'https://datalab.naver.com/local/trend.naver' search_btn_xpath = '//a[@class="com_btn_srch"]' driver = webdriver.Chrome('C:/Gits/localtrend_crawler/chromedriver.exe') driver.get(link) time.sleep(5) ##서울 선택(area1) area1_option_xpath = '((//div[@class="analysis_step v2"]/div[@class="analysis_filter"]/div[@class="filter_area"])[1]/div[@class="filter_option scroll_cst"]/ul/li)[1]/span/label' driver.find_element_by_xpath(area1_option_xpath).click() do = '서울' for i in range (1, 11): ## 음식점(2). 관광(10) category_option_xpath = '((//div[@class="analysis_filter"]/div[@class="filter_area"])[1]/div[@class="filter_option scroll_cst"]/ul[@class="option_list _list1"]/li)[' + str(i) + ']' driver.find_element_by_xpath(category_option_xpath).click() ## Select a specific category time.sleep(1) category = driver.find_element_by_xpath(category_option_xpath + '/a').text[0:2] #Get a category name print(category) area2_option_allpath ='((//div[@class="analysis_step v2"]/div[@class="analysis_filter"]/div[@class="filter_area"])[2]/div[@class="filter_option scroll_cst"]/ul/li)[1]/span/label' driver.find_element_by_xpath(area2_option_allpath).click() #Click Search driver.find_element_by_xpath(search_btn_xpath).click() time.sleep(6) ##Find highest gu gucount = 10 highestgunm = "" m = 0 while highestgunm == "": m = m +1 time.sleep(1.5) graph_click_xpath = '(//div[@class="section_graph"]/div[@class="com_box_inner"]/div[@class="graph_area"]/div[@class="inner_graph_area _trend_graph bb"]/*[name()="svg"]/*[name()="g"]/*[name()="g" and @class="bb-chart"]/*[name()="g" and @class="bb-event-rects bb-event-rects-single"]/*[name()="rect"])[' + str(m) + ']' driver.find_element_by_xpath(graph_click_xpath).click() for n in range(1, gucount+1): ##Get value in each dong category_xpath = '(//div[@class="graph_tooltip"]/div[@class="tooltip"])[' + str(n) + ']' value = driver.find_element_by_xpath(category_xpath + '/span[@class="value"]').text #get value if value == str(100): highestgunm = driver.find_element_by_xpath(category_xpath + '/span[@class="info"]').text #get guname print("#1 highest gu is "+ highestgunm) driver.find_element_by_xpath(area2_option_allpath).click() ##Remove all selection ##Find index of highest dong k=2 area2_option_xpath = '((//div[@class="analysis_step v2"]/div[@class="analysis_filter"]/div[@class="filter_area"])[2]/div[@class="filter_option scroll_cst"]/ul/li)[' + str(k) + ']/span/label' gunm = driver.find_element_by_xpath(area2_option_xpath).text while gunm != highestgunm: k=k+1 area2_option_xpath = '((//div[@class="analysis_step v2"]/div[@class="analysis_filter"]/div[@class="filter_area"])[2]/div[@class="filter_option scroll_cst"]/ul/li)[' + str(k) + ']/span/label' gunm = driver.find_element_by_xpath(area2_option_xpath).text maxidx = k print("#2 highest gu is "+ str(gunm)) filepath = 'res_'+str(category)+'_seoul_re.csv' #Output file path f = open(filepath, 'w', encoding='utf-8', newline='') #Open output file csvfile = csv.writer(f, quotechar='"', quoting=csv.QUOTE_MINIMAL) #write CSV csvfile.writerow(['Do', 'Gu', 'Period', 'category', 'Value', 'Group']) ##Group is for the value recalculation k = 1 group = 0 for j in range(1, 4): ## 3times loops 25 gus. driver.find_element_by_xpath(area2_option_allpath).click() driver.find_element_by_xpath(area2_option_allpath).click() ## Remove a previous selection set (gu) group = group + 1 gucount = 0 area2_highest_xpath = '((//div[@class="analysis_step v2"]/div[@class="analysis_filter"]/div[@class="filter_area"])[2]/div[@class="filter_option scroll_cst"]/ul/li)[' + str(maxidx) + ']/span/label' driver.find_element_by_xpath(area2_highest_xpath).click() if j < 3: while gucount < 9: k = k+1 if k != maxidx: print(str(k) + " and j=" + str(j)) area2_option_xpath = '((//div[@class="analysis_step v2"]/div[@class="analysis_filter"]/div[@class="filter_area"])[2]/div[@class="filter_option scroll_cst"]/ul/li)[' + str(k) + ']/span/label' driver.find_element_by_xpath(area2_option_xpath).click() #Select a specific Gu gucount = gucount +1 ##Add dong count after click gunm = driver.find_element_by_xpath(area2_option_xpath).text print(gunm) else: while gucount < 6: k = k+1 if k != maxidx: print(str(k) + " and j=" + str(j)) area2_option_xpath = '((//div[@class="analysis_step v2"]/div[@class="analysis_filter"]/div[@class="filter_area"])[2]/div[@class="filter_option scroll_cst"]/ul/li)[' + str(k) + ']/span/label' driver.find_element_by_xpath(area2_option_xpath).click() #Select a specific Gu gucount = gucount +1 ##Add dong count after click gunm = driver.find_element_by_xpath(area2_option_xpath).text print(gunm) #Click Search driver.find_element_by_xpath(search_btn_xpath).click() time.sleep(6) for m in range(1, 58): ##Select graph by week time.sleep(1.5) graph_click_xpath = '(//div[@class="section_graph"]/div[@class="com_box_inner"]/div[@class="graph_area"]/div[@class="inner_graph_area _trend_graph bb"]/*[name()="svg"]/*[name()="g"]/*[name()="g" and @class="bb-chart"]/*[name()="g" and @class="bb-event-rects bb-event-rects-single"]/*[name()="rect"])[' + str(m) + ']' driver.find_element_by_xpath(graph_click_xpath).click() period = driver.find_element_by_xpath('//div[@class="tooltip_period"]').text period = str(m)+"_"+period for n in range(1, gucount+2): ##Get value in each dong + the maximum dong category_xpath = '(//div[@class="graph_tooltip"]/div[@class="tooltip"])[' + str(n) + ']' gu = driver.find_element_by_xpath(category_xpath + '/span[@class="info"]').text #get dongname value = driver.find_element_by_xpath(category_xpath + '/span[@class="value"]').text #get value if(group == 1): #print(gu) csvfile.writerow([do, gu, period, category, value, group]) else: if(gu != highestgunm): #print(gu) csvfile.writerow([do, gu, period, category, value, group]) f.close() ##Save data
51.095588
328
0.605555
924
6,949
4.33658
0.170996
0.087846
0.097579
0.109059
0.746444
0.729723
0.709758
0.709758
0.668081
0.650611
0
0.016766
0.227515
6,949
135
329
51.474074
0.729694
0.079868
0
0.447917
0
0.104167
0.349447
0.281201
0
0
0
0
0
1
0
false
0
0.03125
0
0.03125
0.072917
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
72d1d18f54f4430c7a78560f6b18607b01de98fb
168
py
Python
basetrainer/metric/eval_tools/__init__.py
PanJinquan/pytorch-base-trainer
37799c948f72b2f9d3771ff469e06cdbff4a1d07
[ "MIT" ]
11
2022-01-18T10:07:52.000Z
2022-03-16T02:40:31.000Z
basetrainer/metric/eval_tools/__init__.py
PanJinquan/pytorch-base-trainer
37799c948f72b2f9d3771ff469e06cdbff4a1d07
[ "MIT" ]
null
null
null
basetrainer/metric/eval_tools/__init__.py
PanJinquan/pytorch-base-trainer
37799c948f72b2f9d3771ff469e06cdbff4a1d07
[ "MIT" ]
1
2022-01-26T06:31:29.000Z
2022-01-26T06:31:29.000Z
# -*-coding: utf-8 -*- """ @Project: utils @File : __init__.py.py @Author : panjq @E-mail : pan_jinquan@163.com @Date : 2019-05-10 17:08:02 """
18.666667
33
0.529762
24
168
3.5
0.958333
0
0
0
0
0
0
0
0
0
0
0.146341
0.267857
168
8
34
21
0.536585
0.821429
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
72dd41dd291c81b4e701591d853bb09c52792d97
20,750
py
Python
Pyto Mac/PyObjC/Quartz/ImageKit/_metadata.py
cclauss/Pyto
1c4ccc47e3a91e996bf6ec38c527d244de2cf7ed
[ "MIT" ]
4
2019-03-11T18:05:49.000Z
2021-05-22T21:09:09.000Z
Pyto Mac/PyObjC/Quartz/ImageKit/_metadata.py
cclauss/Pyto
1c4ccc47e3a91e996bf6ec38c527d244de2cf7ed
[ "MIT" ]
null
null
null
Pyto Mac/PyObjC/Quartz/ImageKit/_metadata.py
cclauss/Pyto
1c4ccc47e3a91e996bf6ec38c527d244de2cf7ed
[ "MIT" ]
1
2019-03-18T18:53:36.000Z
2019-03-18T18:53:36.000Z
# This file is generated by objective.metadata # # Last update: Tue Jun 26 07:59:02 2018 import objc, sys if sys.maxsize > 2 ** 32: def sel32or64(a, b): return b else: def sel32or64(a, b): return a if sys.byteorder == 'little': def littleOrBig(a, b): return a else: def littleOrBig(a, b): return b misc = { } constants = '''$IKFilterBrowserDefaultInputImage$IKFilterBrowserExcludeCategories$IKFilterBrowserExcludeFilters$IKFilterBrowserFilterDoubleClickNotification$IKFilterBrowserFilterSelectedNotification$IKFilterBrowserShowCategories$IKFilterBrowserShowPreview$IKFilterBrowserWillPreviewFilterNotification$IKImageBrowserBackgroundColorKey$IKImageBrowserCGImageRepresentationType$IKImageBrowserCGImageSourceRepresentationType$IKImageBrowserCellBackgroundLayer$IKImageBrowserCellForegroundLayer$IKImageBrowserCellLayerTypeBackground$IKImageBrowserCellLayerTypeForeground$IKImageBrowserCellLayerTypePlaceHolder$IKImageBrowserCellLayerTypeSelection$IKImageBrowserCellPlaceHolderLayer$IKImageBrowserCellSelectionLayer$IKImageBrowserCellsHighlightedTitleAttributesKey$IKImageBrowserCellsOutlineColorKey$IKImageBrowserCellsSubtitleAttributesKey$IKImageBrowserCellsTitleAttributesKey$IKImageBrowserGroupBackgroundColorKey$IKImageBrowserGroupFooterLayer$IKImageBrowserGroupHeaderLayer$IKImageBrowserGroupRangeKey$IKImageBrowserGroupStyleKey$IKImageBrowserGroupTitleKey$IKImageBrowserIconRefPathRepresentationType$IKImageBrowserIconRefRepresentationType$IKImageBrowserNSBitmapImageRepresentationType$IKImageBrowserNSDataRepresentationType$IKImageBrowserNSImageRepresentationType$IKImageBrowserNSURLRepresentationType$IKImageBrowserPDFPageRepresentationType$IKImageBrowserPathRepresentationType$IKImageBrowserQCCompositionPathRepresentationType$IKImageBrowserQCCompositionRepresentationType$IKImageBrowserQTMoviePathRepresentationType$IKImageBrowserQTMovieRepresentationType$IKImageBrowserQuickLookPathRepresentationType$IKImageBrowserSelectionColorKey$IKOverlayTypeBackground$IKOverlayTypeImage$IKPictureTakerAllowsEditingKey$IKPictureTakerAllowsFileChoosingKey$IKPictureTakerAllowsVideoCaptureKey$IKPictureTakerCropAreaSizeKey$IKPictureTakerImageTransformsKey$IKPictureTakerInformationalTextKey$IKPictureTakerOutputImageMaxSizeKey$IKPictureTakerRemainOpenAfterValidateKey$IKPictureTakerShowAddressBookPicture$IKPictureTakerShowAddressBookPictureKey$IKPictureTakerShowEffectsKey$IKPictureTakerShowEmptyPicture$IKPictureTakerShowEmptyPictureKey$IKPictureTakerShowRecentPictureKey$IKPictureTakerUpdateRecentPictureKey$IKSlideshowAudioFile$IKSlideshowModeImages$IKSlideshowModeOther$IKSlideshowModePDF$IKSlideshowPDFDisplayBox$IKSlideshowPDFDisplayMode$IKSlideshowPDFDisplaysAsBook$IKSlideshowScreen$IKSlideshowStartIndex$IKSlideshowStartPaused$IKSlideshowWrapAround$IKToolModeAnnotate$IKToolModeCrop$IKToolModeMove$IKToolModeNone$IKToolModeRotate$IKToolModeSelect$IKToolModeSelectEllipse$IKToolModeSelectLasso$IKToolModeSelectRect$IKUIFlavorAllowFallback$IKUISizeFlavor$IKUISizeMini$IKUISizeRegular$IKUISizeSmall$IKUImaxSize$IK_ApertureBundleIdentifier$IK_MailBundleIdentifier$IK_PhotosBundleIdentifier$IK_iPhotoBundleIdentifier$''' enums = '''$IKCameraDeviceViewDisplayModeIcon@1$IKCameraDeviceViewDisplayModeTable@0$IKCameraDeviceViewTransferModeFileBased@0$IKCameraDeviceViewTransferModeMemoryBased@1$IKCellsStyleNone@0$IKCellsStyleOutlined@2$IKCellsStyleShadowed@1$IKCellsStyleSubtitled@8$IKCellsStyleTitled@4$IKDeviceBrowserViewDisplayModeIcon@2$IKDeviceBrowserViewDisplayModeOutline@1$IKDeviceBrowserViewDisplayModeTable@0$IKGroupBezelStyle@0$IKGroupDisclosureStyle@1$IKImageBrowserDropBefore@1$IKImageBrowserDropOn@0$IKImageStateInvalid@1$IKImageStateNoImage@0$IKImageStateReady@2$IKScannerDeviceViewDisplayModeAdvanced@1$IKScannerDeviceViewDisplayModeSimple@0$IKScannerDeviceViewTransferModeFileBased@0$IKScannerDeviceViewTransferModeMemoryBased@1$''' misc.update({}) aliases = {'IKImagePickerShowEffectsKey': 'IKPictureTakerShowEffectsKey', 'IKImagePickerOutputImageMaxSizeKey': 'IKPictureTakerOutputImageMaxSizeKey', 'IKImagePickerImageTransformsKey': 'IKPictureTakerImageTransformsKey', 'IKImagePickerAllowsFileChoosingKey': 'IKPictureTakerAllowsFileChoosingKey', 'IKImagePickerAllowsEditingKey': 'IKPictureTakerAllowsEditingKey', 'IKImagePickerInformationalTextKey': 'IKPictureTakerInformationalTextKey', 'IKImagePickerCropAreaSizeKey': 'IKPictureTakerCropAreaSizeKey', 'IKImagePickerAllowsVideoCaptureKey': 'IKPictureTakerAllowsVideoCaptureKey', 'IKImagePickerUpdateRecentPictureKey': 'IKPictureTakerUpdateRecentPictureKey', 'IKImagePickerShowRecentPictureKey': 'IKPictureTakerShowRecentPictureKey'} r = objc.registerMetaDataForSelector objc._updatingMetadata(True) try: r(b'IKCameraDeviceView', b'canDeleteSelectedItems', {'retval': {'type': b'Z'}}) r(b'IKCameraDeviceView', b'canDownloadSelectedItems', {'retval': {'type': b'Z'}}) r(b'IKCameraDeviceView', b'canRotateSelectedItemsLeft', {'retval': {'type': b'Z'}}) r(b'IKCameraDeviceView', b'canRotateSelectedItemsRight', {'retval': {'type': b'Z'}}) r(b'IKCameraDeviceView', b'displaysDownloadsDirectoryControl', {'retval': {'type': b'Z'}}) r(b'IKCameraDeviceView', b'displaysPostProcessApplicationControl', {'retval': {'type': b'Z'}}) r(b'IKCameraDeviceView', b'hasDisplayModeIcon', {'retval': {'type': b'Z'}}) r(b'IKCameraDeviceView', b'hasDisplayModeTable', {'retval': {'type': b'Z'}}) r(b'IKCameraDeviceView', b'selectIndexes:byExtendingSelection:', {'arguments': {3: {'type': b'Z'}}}) r(b'IKCameraDeviceView', b'setDisplaysDownloadsDirectoryControl:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKCameraDeviceView', b'setDisplaysPostProcessApplicationControl:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKCameraDeviceView', b'setHasDisplayModeIcon:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKCameraDeviceView', b'setHasDisplayModeTable:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKDeviceBrowserView', b'displaysLocalCameras', {'retval': {'type': b'Z'}}) r(b'IKDeviceBrowserView', b'displaysLocalScanners', {'retval': {'type': b'Z'}}) r(b'IKDeviceBrowserView', b'displaysNetworkCameras', {'retval': {'type': b'Z'}}) r(b'IKDeviceBrowserView', b'displaysNetworkScanners', {'retval': {'type': b'Z'}}) r(b'IKDeviceBrowserView', b'setDisplaysLocalCameras:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKDeviceBrowserView', b'setDisplaysLocalScanners:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKDeviceBrowserView', b'setDisplaysNetworkCameras:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKDeviceBrowserView', b'setDisplaysNetworkScanners:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKFilterBrowserPanel', b'beginSheetWithOptions:modalForWindow:modalDelegate:didEndSelector:contextInfo:', {'arguments': {5: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}}}) r(b'IKFilterBrowserPanel', b'beginWithOptions:modelessDelegate:didEndSelector:contextInfo:', {'arguments': {4: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}}}) r(b'IKFilterBrowserView', b'setPreviewState:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKImageBrowserCell', b'isSelected', {'retval': {'type': b'Z'}}) r(b'IKImageBrowserView', b'allowsDroppingOnItems', {'retval': {'type': b'Z'}}) r(b'IKImageBrowserView', b'allowsEmptySelection', {'retval': {'type': b'Z'}}) r(b'IKImageBrowserView', b'allowsMultipleSelection', {'retval': {'type': b'Z'}}) r(b'IKImageBrowserView', b'allowsReordering', {'retval': {'type': b'Z'}}) r(b'IKImageBrowserView', b'animates', {'retval': {'type': b'Z'}}) r(b'IKImageBrowserView', b'canControlQuickLookPanel', {'retval': {'type': b'Z'}}) r(b'IKImageBrowserView', b'constrainsToOriginalSize', {'retval': {'type': b'Z'}}) r(b'IKImageBrowserView', b'isGroupExpandedAtIndex:', {'retval': {'type': b'Z'}}) r(b'IKImageBrowserView', b'setAllowsDroppingOnItems:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKImageBrowserView', b'setAllowsEmptySelection:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKImageBrowserView', b'setAllowsMultipleSelection:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKImageBrowserView', b'setAllowsReordering:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKImageBrowserView', b'setAnimates:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKImageBrowserView', b'setCanControlQuickLookPanel:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKImageBrowserView', b'setConstrainsToOriginalSize:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKImageBrowserView', b'setSelectionIndexes:byExtendingSelection:', {'arguments': {3: {'type': b'Z'}}}) r(b'IKImagePicker', b'beginImagePickerSheetForWindow:withDelegate:didEndSelector:contextInfo:', {'arguments': {4: {'sel_of_type': sel32or64(b'v@:@I^v', b'v@:@Q^v')}}}) r(b'IKImagePicker', b'beginImagePickerWithDelegate:didEndSelector:contextInfo:', {'arguments': {3: {'sel_of_type': sel32or64(b'v@:@I^v', b'v@:@Q^v')}}}) r(b'IKImageView', b'autohidesScrollers', {'retval': {'type': b'Z'}}) r(b'IKImageView', b'autoresizes', {'retval': {'type': b'Z'}}) r(b'IKImageView', b'doubleClickOpensImageEditPanel', {'retval': {'type': b'Z'}}) r(b'IKImageView', b'editable', {'retval': {'type': b'Z'}}) r(b'IKImageView', b'hasHorizontalScroller', {'retval': {'type': b'Z'}}) r(b'IKImageView', b'hasVerticalScroller', {'retval': {'type': b'Z'}}) r(b'IKImageView', b'setAutohidesScrollers:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKImageView', b'setAutoresizes:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKImageView', b'setDoubleClickOpensImageEditPanel:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKImageView', b'setEditable:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKImageView', b'setHasHorizontalScroller:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKImageView', b'setHasVerticalScroller:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKImageView', b'setSupportsDragAndDrop:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKImageView', b'supportsDragAndDrop', {'retval': {'type': b'Z'}}) r(b'IKPictureTaker', b'beginPictureTakerSheetForWindow:withDelegate:didEndSelector:contextInfo:', {'arguments': {4: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}}}) r(b'IKPictureTaker', b'beginPictureTakerWithDelegate:didEndSelector:contextInfo:', {'arguments': {3: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}}}) r(b'IKPictureTaker', b'mirroring', {'retval': {'type': b'Z'}}) r(b'IKPictureTaker', b'popUpRecentsMenuForView:withDelegate:didEndSelector:contextInfo:', {'arguments': {4: {'sel_of_type': sel32or64(b'v@:@i^v', b'v@:@q^v')}}}) r(b'IKPictureTaker', b'setMirroring:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKScannerDeviceView', b'displaysDownloadsDirectoryControl', {'retval': {'type': b'Z'}}) r(b'IKScannerDeviceView', b'displaysPostProcessApplicationControl', {'retval': {'type': b'Z'}}) r(b'IKScannerDeviceView', b'hasDisplayModeAdvanced', {'retval': {'type': b'Z'}}) r(b'IKScannerDeviceView', b'hasDisplayModeSimple', {'retval': {'type': b'Z'}}) r(b'IKScannerDeviceView', b'setDisplaysDownloadsDirectoryControl:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKScannerDeviceView', b'setDisplaysPostProcessApplicationControl:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKScannerDeviceView', b'setHasDisplayModeAdvanced:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKScannerDeviceView', b'setHasDisplayModeSimple:', {'arguments': {2: {'type': b'Z'}}}) r(b'IKSlideshow', b'canExportToApplication:', {'retval': {'type': b'Z'}}) r(b'NSObject', b'cameraDeviceView:didDownloadFile:location:fileData:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}, 6: {'type': b'@'}}}) r(b'NSObject', b'cameraDeviceView:didEncounterError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) r(b'NSObject', b'cameraDeviceViewSelectionDidChange:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) r(b'NSObject', b'canExportSlideshowItemAtIndex:toApplication:', {'required': False, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}, 3: {'type': b'@'}}}) r(b'NSObject', b'deviceBrowserView:didEncounterError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) r(b'NSObject', b'deviceBrowserView:selectionDidChange:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) r(b'NSObject', b'hasAdjustMode', {'required': False, 'retval': {'type': b'Z'}}) r(b'NSObject', b'hasDetailsMode', {'required': False, 'retval': {'type': b'Z'}}) r(b'NSObject', b'hasEffectsMode', {'required': False, 'retval': {'type': b'Z'}}) r(b'NSObject', b'image', {'required': True, 'retval': {'type': b'^{CGImage=}'}}) r(b'NSObject', b'imageBrowser:backgroundWasRightClickedWithEvent:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) r(b'NSObject', b'imageBrowser:cellAtIndex:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'L')}}}) r(b'NSObject', b'imageBrowser:cellWasDoubleClickedAtIndex:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}}}) r(b'NSObject', b'imageBrowser:cellWasRightClickedAtIndex:withEvent:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}, 4: {'type': b'@'}}}) r(b'NSObject', b'imageBrowser:groupAtIndex:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}}}) r(b'NSObject', b'imageBrowser:itemAtIndex:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': sel32or64(b'I', b'Q')}}}) r(b'NSObject', b'imageBrowser:moveCellsAtIndexes:toIndex:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'L')}}}) r(b'NSObject', b'imageBrowser:moveItemsAtIndexes:toIndex:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': sel32or64(b'I', b'Q')}}}) r(b'NSObject', b'imageBrowser:removeCellsAtIndexes:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) r(b'NSObject', b'imageBrowser:removeItemsAtIndexes:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) r(b'NSObject', b'imageBrowser:writeCellsAtIndexes:toPasteboard:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) r(b'NSObject', b'imageBrowser:writeItemsAtIndexes:toPasteboard:', {'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) r(b'NSObject', b'imageBrowserSelectionDidChange:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}}) r(b'NSObject', b'imageProperties', {'required': False, 'retval': {'type': b'@'}}) r(b'NSObject', b'imageRepresentation', {'retval': {'type': b'@'}}) r(b'NSObject', b'imageRepresentationType', {'retval': {'type': b'@'}}) r(b'NSObject', b'imageSubtitle', {'retval': {'type': b'@'}}) r(b'NSObject', b'imageTitle', {'retval': {'type': b'@'}}) r(b'NSObject', b'imageUID', {'retval': {'type': b'@'}}) r(b'NSObject', b'imageVersion', {'retval': {'type': sel32or64(b'I', b'Q')}}) r(b'NSObject', b'isSelectable', {'retval': {'type': b'Z'}}) r(b'NSObject', b'nameOfSlideshowItemAtIndex:', {'required': False, 'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) r(b'NSObject', b'numberOfCellsInImageBrowser:', {'retval': {'type': sel32or64(b'I', b'L')}, 'arguments': {2: {'type': b'@'}}}) r(b'NSObject', b'numberOfGroupsInImageBrowser:', {'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}}}) r(b'NSObject', b'numberOfItemsInImageBrowser:', {'retval': {'type': sel32or64(b'I', b'Q')}, 'arguments': {2: {'type': b'@'}}}) r(b'NSObject', b'numberOfSlideshowItems', {'required': True, 'retval': {'type': sel32or64(b'I', b'Q')}}) r(b'NSObject', b'provideViewForUIConfiguration:excludedKeys:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) r(b'NSObject', b'saveOptions:shouldShowUTType:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) r(b'NSObject', b'scannerDeviceView:didEncounterError:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}}) r(b'NSObject', b'scannerDeviceView:didScanToBandData:scanInfo:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) r(b'NSObject', b'scannerDeviceView:didScanToURL:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}}) r(b'NSObject', b'scannerDeviceView:didScanToURL:fileData:error:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}, 5: {'type': b'@'}}}) r(b'NSObject', b'setImage:imageProperties:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'^{CGImage=}'}, 3: {'type': b'@'}}}) r(b'NSObject', b'slideshowDidChangeCurrentIndex:', {'required': False, 'retval': {'type': b'v'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) r(b'NSObject', b'slideshowDidStop', {'required': False, 'retval': {'type': b'v'}}) r(b'NSObject', b'slideshowItemAtIndex:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': sel32or64(b'I', b'Q')}}}) r(b'NSObject', b'slideshowWillStart', {'required': False, 'retval': {'type': b'v'}}) r(b'NSObject', b'thumbnailWithMaximumSize:', {'required': False, 'retval': {'type': b'^{CGImage=}'}, 'arguments': {2: {'type': sel32or64(b'{_NSSize=ff}', b'{CGSize=dd}')}}}) finally: objc._updatingMetadata(False) protocols={'IKImageBrowserItem': objc.informal_protocol('IKImageBrowserItem', [objc.selector(None, b'imageTitle', b'@@:', isRequired=False), objc.selector(None, b'imageSubtitle', b'@@:', isRequired=False), objc.selector(None, b'imageRepresentationType', b'@@:', isRequired=False), objc.selector(None, b'imageUID', b'@@:', isRequired=False), objc.selector(None, b'isSelectable', b'Z@:', isRequired=False), objc.selector(None, b'imageVersion', sel32or64(b'I@:', b'Q@:'), isRequired=False), objc.selector(None, b'imageRepresentation', b'@@:', isRequired=False)]), 'IKImageBrowserDataSourceDeprecated': objc.informal_protocol('IKImageBrowserDataSourceDeprecated', [objc.selector(None, b'imageBrowser:moveCellsAtIndexes:toIndex:', sel32or64(b'Z@:@@I', b'Z@:@@L'), isRequired=False), objc.selector(None, b'imageBrowser:cellAtIndex:', sel32or64(b'@@:@I', b'@@:@L'), isRequired=False), objc.selector(None, b'numberOfCellsInImageBrowser:', sel32or64(b'I@:@', b'L@:@'), isRequired=False), objc.selector(None, b'imageBrowser:writeCellsAtIndexes:toPasteboard:', b'v@:@@@', isRequired=False), objc.selector(None, b'imageBrowser:removeCellsAtIndexes:', b'v@:@@', isRequired=False)]), 'IKSaveOptionsDelegate': objc.informal_protocol('IKSaveOptionsDelegate', [objc.selector(None, b'saveOptions:shouldShowUTType:', b'Z@:@@', isRequired=False)]), 'IKImageBrowserDelegate': objc.informal_protocol('IKImageBrowserDelegate', [objc.selector(None, b'imageBrowser:cellWasRightClickedAtIndex:withEvent:', sel32or64(b'v@:@I@', b'v@:@Q@'), isRequired=False), objc.selector(None, b'imageBrowserSelectionDidChange:', b'v@:@', isRequired=False), objc.selector(None, b'imageBrowser:cellWasDoubleClickedAtIndex:', sel32or64(b'v@:@I', b'v@:@Q'), isRequired=False), objc.selector(None, b'imageBrowser:backgroundWasRightClickedWithEvent:', b'v@:@@', isRequired=False)]), 'IKImageBrowserDataSource': objc.informal_protocol('IKImageBrowserDataSource', [objc.selector(None, b'imageBrowser:groupAtIndex:', sel32or64(b'@@:@I', b'@@:@Q'), isRequired=False), objc.selector(None, b'numberOfItemsInImageBrowser:', sel32or64(b'I@:@', b'Q@:@'), isRequired=False), objc.selector(None, b'imageBrowser:moveItemsAtIndexes:toIndex:', sel32or64(b'Z@:@@I', b'Z@:@@Q'), isRequired=False), objc.selector(None, b'numberOfGroupsInImageBrowser:', sel32or64(b'I@:@', b'Q@:@'), isRequired=False), objc.selector(None, b'imageBrowser:itemAtIndex:', sel32or64(b'@@:@I', b'@@:@Q'), isRequired=False), objc.selector(None, b'imageBrowser:removeItemsAtIndexes:', b'v@:@@', isRequired=False), objc.selector(None, b'imageBrowser:writeItemsAtIndexes:toPasteboard:', sel32or64(b'I@:@@@', b'Q@:@@@'), isRequired=False)])} expressions = {} # END OF FILE
138.333333
2,815
0.697108
2,112
20,750
6.836648
0.164773
0.056791
0.057899
0.032966
0.462013
0.439643
0.433825
0.401413
0.290325
0.206385
0
0.015771
0.080193
20,750
149
2,816
139.261745
0.740752
0.00453
0
0.014184
1
0.014184
0.612271
0.381289
0
0
0
0
0
1
0.028369
false
0
0.007092
0.028369
0.035461
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
1
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
72e2af0757ca4cc964854da8b5b0626e17ee8b70
491
py
Python
matrix/matrix.py
ghaith96/exercism-python
5b0c336e6442a987c9202a42e135a1d94dcea23f
[ "MIT" ]
null
null
null
matrix/matrix.py
ghaith96/exercism-python
5b0c336e6442a987c9202a42e135a1d94dcea23f
[ "MIT" ]
null
null
null
matrix/matrix.py
ghaith96/exercism-python
5b0c336e6442a987c9202a42e135a1d94dcea23f
[ "MIT" ]
null
null
null
class Matrix: def __init__(self, matrix_string: str): # another solution (functional) # [list(map(int, elements.split())) for elements in matrix_string.splitlines()] self.matrix = [ [int(element) for element in elements.split()] for elements in matrix_string.splitlines() ] def row(self, index): return self.matrix[index - 1].copy() def column(self, index): return [row[index - 1] for row in self.matrix]
32.733333
87
0.608961
60
491
4.866667
0.416667
0.136986
0.109589
0.164384
0.328767
0.328767
0.328767
0.328767
0
0
0
0.005602
0.272912
491
14
88
35.071429
0.812325
0.217923
0
0
0
0
0
0
0
0
0
0
0
1
0.3
false
0
0
0.2
0.6
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
72eff8ab59a29cb4c91935e7d39e4cbb00c98a9e
386
py
Python
sparsechem/__init__.py
muellren/SparseChem
3cf197aea626400fe64b1b5c49b8d40919c50c9c
[ "MIT" ]
null
null
null
sparsechem/__init__.py
muellren/SparseChem
3cf197aea626400fe64b1b5c49b8d40919c50c9c
[ "MIT" ]
null
null
null
sparsechem/__init__.py
muellren/SparseChem
3cf197aea626400fe64b1b5c49b8d40919c50c9c
[ "MIT" ]
null
null
null
# Copyright (c) 2020 KU Leuven from .models import SparseLinear, SparseInputNet, SparseFFN, LastNet, MiddleNet, sparse_split2 from .data import SparseDataset, sparse_collate from .utils import all_metrics, compute_metrics, evaluate_binary, train_binary, count_parameters, fold_inputs, predict, print_metrics from .utils import load_sparse, load_results from .version import __version__
55.142857
133
0.839378
50
386
6.18
0.68
0.058252
0.097087
0
0
0
0
0
0
0
0
0.014451
0.103627
386
6
134
64.333333
0.878613
0.072539
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0.2
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
f43abc59033835946f528916137fbcaa78c5466d
171
py
Python
data_structure/exceptions/collection_exeption.py
dosart/Linear-data-structures
0f13c254e02d25a47d4b7d0bf6fb5d510cc0c90b
[ "MIT" ]
2
2021-03-29T07:55:53.000Z
2022-03-25T05:53:11.000Z
data_structure/exceptions/collection_exeption.py
dosart/Linear-data-structures
0f13c254e02d25a47d4b7d0bf6fb5d510cc0c90b
[ "MIT" ]
null
null
null
data_structure/exceptions/collection_exeption.py
dosart/Linear-data-structures
0f13c254e02d25a47d4b7d0bf6fb5d510cc0c90b
[ "MIT" ]
null
null
null
"""Exceptions for BaseCollection.""" class CollectionIsEmptyExeption(Exception): """Exception class if the collection is empty.""" pass # noqa: WPS420, WPS604
21.375
53
0.71345
17
171
7.176471
0.882353
0
0
0
0
0
0
0
0
0
0
0.042254
0.169591
171
7
54
24.428571
0.816901
0.561404
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
4
f44eca0bf5d126ad85fe0c3cd3efc04a65c1e3ff
470
py
Python
flightApp/serializers.py
rahulynot/flightservices
f0f87c1fe159c06558c70c25de6df212de0b6972
[ "Apache-2.0" ]
null
null
null
flightApp/serializers.py
rahulynot/flightservices
f0f87c1fe159c06558c70c25de6df212de0b6972
[ "Apache-2.0" ]
null
null
null
flightApp/serializers.py
rahulynot/flightservices
f0f87c1fe159c06558c70c25de6df212de0b6972
[ "Apache-2.0" ]
null
null
null
from rest_framework import serializers from .models import Flight, Passenger, Reservation class FlightSerializer(serializers.ModelSerializer): class Meta: model = Flight fields = "__all__" class PassengerSerializer(serializers.ModelSerializer): class Meta: model = Passenger fields = "__all__" class ReservationSerializer(serializers.ModelSerializer): class Meta: model = Reservation fields = "__all__"
21.363636
57
0.708511
41
470
7.804878
0.439024
0.24375
0.290625
0.328125
0.375
0
0
0
0
0
0
0
0.22766
470
21
58
22.380952
0.881543
0
0
0.428571
0
0
0.044681
0
0
0
0
0
0
1
0
false
0.214286
0.142857
0
0.571429
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
4
f45f9cc046d98fe6be431c80398eb57a14aa1491
132
py
Python
youpi/semi_singleton.py
Gabriel-Dropout/youpi
aef7a77b0dd8775eef4c54de9127605da143818a
[ "MIT" ]
null
null
null
youpi/semi_singleton.py
Gabriel-Dropout/youpi
aef7a77b0dd8775eef4c54de9127605da143818a
[ "MIT" ]
null
null
null
youpi/semi_singleton.py
Gabriel-Dropout/youpi
aef7a77b0dd8775eef4c54de9127605da143818a
[ "MIT" ]
null
null
null
# 한 YouTube객체에서 생성된 Stream 객체가 공유하는 유사 싱글톤 클래스 class SemiSingleton: def __init__(self, title: str): self.title = title
33
48
0.689394
19
132
4.578947
0.842105
0.206897
0
0
0
0
0
0
0
0
0
0
0.242424
132
4
49
33
0.87
0.333333
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
f4734865ef4a7890eac1b3566015952a22f92310
1,077
py
Python
grammpy/exceptions/__init__.py
PatrikValkovic/grammpy
8308a1fd349bf9ea0d267360cc9a4ab20d1629e8
[ "MIT" ]
1
2021-02-04T12:41:08.000Z
2021-02-04T12:41:08.000Z
grammpy/exceptions/__init__.py
PatrikValkovic/grammpy
8308a1fd349bf9ea0d267360cc9a4ab20d1629e8
[ "MIT" ]
3
2017-07-08T16:28:52.000Z
2020-04-23T18:06:24.000Z
grammpy/exceptions/__init__.py
PatrikValkovic/grammpy
8308a1fd349bf9ea0d267360cc9a4ab20d1629e8
[ "MIT" ]
1
2021-02-04T12:41:10.000Z
2021-02-04T12:41:10.000Z
#!/usr/bin/env python """ :Author Patrik Valkovic :Created 08.07.2017 14:03 :Licence MIT Part of grammpy """ from .CannotConvertException import CannotConvertException from .CantCreateSingleRuleException import CantCreateSingleRuleException from .GrammpyException import GrammpyException from .MultipleDefinitionException import MultipleDefinitionException from .NonterminalDoesNotExistsException import NonterminalDoesNotExistsException from .NotASingleSymbolException import NotASingleSymbolException from .NotNonterminalException import NotNonterminalException from .NotParsedException import NotParsedException from .NotRuleException import NotRuleException from .RuleException import RuleException from .RuleNotDefinedException import RuleNotDefinedException from .RuleSyntaxException import RuleSyntaxException from .StartSymbolNotSetException import StartSymbolNotSetException from .TerminalDoesNotExistsException import TerminalDoesNotExistsException from .TreeDeletedException import TreeDeletedException from .UselessEpsilonException import UselessEpsilonException
41.423077
80
0.89415
82
1,077
11.743902
0.439024
0
0
0
0
0
0
0
0
0
0
0.012048
0.075209
1,077
25
81
43.08
0.954819
0.091922
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
f481d51b6a488b1e6f86a201b719cbc29172173c
125
py
Python
codes_auto/1627.last-moment-before-all-ants-fall-out-of-a-plank.py
smartmark-pro/leetcode_record
6504b733d892a705571eb4eac836fb10e94e56db
[ "MIT" ]
null
null
null
codes_auto/1627.last-moment-before-all-ants-fall-out-of-a-plank.py
smartmark-pro/leetcode_record
6504b733d892a705571eb4eac836fb10e94e56db
[ "MIT" ]
null
null
null
codes_auto/1627.last-moment-before-all-ants-fall-out-of-a-plank.py
smartmark-pro/leetcode_record
6504b733d892a705571eb4eac836fb10e94e56db
[ "MIT" ]
null
null
null
# # @lc app=leetcode.cn id=1627 lang=python3 # # [1627] last-moment-before-all-ants-fall-out-of-a-plank # None # @lc code=end
17.857143
56
0.696
23
125
3.782609
0.913043
0
0
0
0
0
0
0
0
0
0
0.081081
0.112
125
7
57
17.857143
0.702703
0.864
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
f4900c43e5d6ab8c0eabc701ae92335c8e74d73c
64
py
Python
week06/lecture/examples/src6/1/hello0.py
uldash/CS50x
c3ee0f42ad514b57a13c3ffbb96238b3ca3730e1
[ "MIT" ]
null
null
null
week06/lecture/examples/src6/1/hello0.py
uldash/CS50x
c3ee0f42ad514b57a13c3ffbb96238b3ca3730e1
[ "MIT" ]
null
null
null
week06/lecture/examples/src6/1/hello0.py
uldash/CS50x
c3ee0f42ad514b57a13c3ffbb96238b3ca3730e1
[ "MIT" ]
1
2020-11-24T23:25:26.000Z
2020-11-24T23:25:26.000Z
# A program that says hello to the world print("hello, world")
16
40
0.71875
11
64
4.181818
0.818182
0
0
0
0
0
0
0
0
0
0
0
0.1875
64
3
41
21.333333
0.884615
0.59375
0
0
0
0
0.5
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
be3f9eff8e1d52dc3f8341861453c2ce3c84f944
87
py
Python
oeis/__init__.py
EnriquePH/OEIS.py
136acf9f14121d6d50eb5296a54b87d6c5f36622
[ "MIT" ]
1
2021-06-13T18:57:56.000Z
2021-06-13T18:57:56.000Z
oeis/__init__.py
EnriquePH/OEIS.py
136acf9f14121d6d50eb5296a54b87d6c5f36622
[ "MIT" ]
null
null
null
oeis/__init__.py
EnriquePH/OEIS.py
136acf9f14121d6d50eb5296a54b87d6c5f36622
[ "MIT" ]
null
null
null
#!/usr/bin/env python """ oeis_py Author: Enrique Pérez Herrero Date: 13-Jun-2021 """
10.875
29
0.689655
14
87
4.214286
1
0
0
0
0
0
0
0
0
0
0
0.08
0.137931
87
7
30
12.428571
0.706667
0.873563
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
be635ea9026ee2911bec1756db558956b484099d
499
py
Python
torchnlp/nn/__init__.py
JiaqiLiu/PyTorch-NLP
71d2ce1e8b8da5ab4e7732d1ebf971150986e6c8
[ "BSD-3-Clause" ]
2,125
2018-03-17T23:31:03.000Z
2022-03-31T12:20:14.000Z
torchnlp/nn/__init__.py
zhengmingzhang/PyTorch-NLP
34a98b5fa5d2e13761546b94770aed12388528f2
[ "BSD-3-Clause" ]
109
2018-03-21T00:38:51.000Z
2021-12-24T08:34:55.000Z
torchnlp/nn/__init__.py
zhengmingzhang/PyTorch-NLP
34a98b5fa5d2e13761546b94770aed12388528f2
[ "BSD-3-Clause" ]
270
2018-03-18T16:47:01.000Z
2022-03-17T08:01:58.000Z
from torchnlp.nn.attention import Attention from torchnlp.nn.lock_dropout import LockedDropout from torchnlp.nn.weight_drop import WeightDropGRU from torchnlp.nn.weight_drop import WeightDropLSTM from torchnlp.nn.weight_drop import WeightDropLinear from torchnlp.nn.weight_drop import WeightDrop from torchnlp.nn.cnn_encoder import CNNEncoder __all__ = [ 'LockedDropout', 'Attention', 'CNNEncoder', 'WeightDrop', 'WeightDropGRU', 'WeightDropLSTM', 'WeightDropLinear', ]
27.722222
52
0.787575
56
499
6.839286
0.321429
0.219321
0.255875
0.208877
0.313316
0.313316
0
0
0
0
0
0
0.136273
499
17
53
29.352941
0.888631
0
0
0
0
0
0.170341
0
0
0
0
0
0
1
0
false
0
0.4375
0
0.4375
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
be6370c98794d10e9edb2a961dfdaaf17da47401
282
py
Python
gbvision/gui/recording_camera_window.py
computerboy0555/GBVision
79fc9ba09865bfd9c7a39abaa3980c46ce090b07
[ "Apache-2.0" ]
16
2019-04-15T18:52:58.000Z
2022-02-13T23:00:46.000Z
gbvision/gui/recording_camera_window.py
computerboy0555/GBVision
79fc9ba09865bfd9c7a39abaa3980c46ce090b07
[ "Apache-2.0" ]
2
2019-04-15T19:00:05.000Z
2019-04-19T15:47:21.000Z
gbvision/gui/recording_camera_window.py
computerboy0555/GBVision
79fc9ba09865bfd9c7a39abaa3980c46ce090b07
[ "Apache-2.0" ]
3
2019-05-03T13:48:25.000Z
2019-09-22T14:03:49.000Z
from .recording_readable_window import RecordingReadableWindow from .camera_window import CameraWindow class RecordingCameraWindow(RecordingReadableWindow, CameraWindow): """ a basic window that displays the video from a camera and records the video to a file """
28.2
67
0.787234
32
282
6.84375
0.625
0.109589
0
0
0
0
0
0
0
0
0
0
0.170213
282
9
68
31.333333
0.935897
0.297872
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
be93541b729e94fd72d5ed59dce2bbf0718db5f9
670
py
Python
tensorflow/mantaGen/util/git.py
BrianKmdy/mantaflow
273d6c148374316e4d04cae4f46fed56a630e183
[ "Apache-2.0" ]
95
2019-12-04T21:39:51.000Z
2022-03-12T01:03:36.000Z
tensorflow/mantaGen/util/git.py
BrianKmdy/mantaflow
273d6c148374316e4d04cae4f46fed56a630e183
[ "Apache-2.0" ]
4
2019-12-21T15:08:54.000Z
2021-02-28T19:40:08.000Z
tensorflow/mantaGen/util/git.py
BrianKmdy/mantaflow
273d6c148374316e4d04cae4f46fed56a630e183
[ "Apache-2.0" ]
26
2020-01-21T00:48:47.000Z
2022-01-14T06:04:20.000Z
#****************************************************************************** # # MantaGen # Copyright 2018 Steffen Wiewel, Moritz Becher, Nils Thuerey # # This program is free software, distributed under the terms of the # Apache License, Version 2.0 # http://www.apache.org/licenses/LICENSE-2.0 # #****************************************************************************** from subprocess import check_output def revision(): return check_output(["git", "rev-parse", "--short", "HEAD"], universal_newlines=True).rstrip() def status(): return check_output(["git", "status", "-s"], universal_newlines=True) def is_clean(): return not bool(status())
31.904762
98
0.543284
70
670
5.114286
0.714286
0.092179
0.094972
0.111732
0
0
0
0
0
0
0
0.013445
0.11194
670
21
99
31.904762
0.588235
0.540299
0
0
0
0
0.113712
0
0
0
0
0
0
1
0.428571
true
0
0.142857
0.428571
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
1
1
0
0
4
beb22525cf43ca12981ca14654382fc805f4d8f3
30,068
py
Python
ai/IA VFinale.py
Division01/AIGameRunner
1f59fbae6efc733874d7088d864d7260f2e1ea53
[ "MIT" ]
null
null
null
ai/IA VFinale.py
Division01/AIGameRunner
1f59fbae6efc733874d7088d864d7260f2e1ea53
[ "MIT" ]
null
null
null
ai/IA VFinale.py
Division01/AIGameRunner
1f59fbae6efc733874d7088d864d7260f2e1ea53
[ "MIT" ]
null
null
null
import cherrypy import sys import random class Server: @cherrypy.expose @cherrypy.tools.json_in() @cherrypy.tools.json_out() def move(self): # La fonction qui est appelée à chaque tour # Deal with CORS cherrypy.response.headers['Access-Control-Allow-Origin'] = '*' cherrypy.response.headers['Access-Control-Allow-Methods'] = 'GET, POST, OPTIONS' cherrypy.response.headers['Access-Control-Allow-Headers'] = 'Content-Type, Authorization, X-Requested-With' if cherrypy.request.method == "OPTIONS": return '' body = cherrypy.request.json # On importe l'etat du plateau de jeu you = 0 # On defini quel joueur on est him = 1 if body["players"][1] == body["you"]: you = 1 him = 0 Messages = ["Bien essaye", "Peut-mieux faire", "Ma grand-mere joue mieux que toi", "C'est une IA ou un enfant de 4 ans contre moi ?", "T'es nul", "Mon chien aurait pu faire ton coup", "Meme un zero serait surcoter ton IA"] random.shuffle(Messages) check_ligne = self.check_line(body, you, him) # On analyse la situation du plateau check_col = self.check_col(body, you, him) if check_ligne["4following"] == True: # On joue en fonction de la situation du plateau if check_ligne["player"] == you: move = self.play_for_win(body, check_ligne["index"], "ligne", you, him) message = "4 en lignes" elif check_col["4following"] == True: if check_col["player"] == you: move = self.play_for_win(body, check_col["index"], "colonne", you, him) message = "4 en colonnes" else: move = self.play_for_counter(body,check_col["index"],"colonne",you,him) message = "4 en colonnes him" else: move = self.play_for_counter(body, check_ligne["index"], "ligne", you, him) message = "4 en lignes him" elif check_col["4following"] == True: if check_col["player"] == you: move = self.play_for_win(body, check_col["index"], "colonne", you, him) message = "4 en colonnes" else: move = self.play_for_counter(body, check_col["index"], "colonne", you, him) message = "4 en colonnes him" elif check_ligne["4following"] == "gauche": move = self.play_for_pre_win(check_ligne["index"], "ligne", him, body, you, "gauche") message = "3 en lignes gauche" elif check_ligne["4following"] == "milieu": move = self.play_for_pre_win(check_ligne["index"], "ligne", him, body, you, "milieu") message = "3 en lignes milieu" elif check_ligne["4following"] == "droit": move = self.play_for_pre_win(check_ligne["index"], "ligne", him, body, you, "droit") message = "3 en lignes droit" elif check_col["4following"] == "haut": move = self.play_for_pre_win(check_col["index"], "colonne", him, body, you, "haut") message = "3 en colonne haut" elif check_col["4following"] == "milieu": move = self.play_for_pre_win(check_col["index"], "colonne", him, body, you, "milieu") message = "3 en colonne milieu" elif check_col["4following"] == "bas": move = self.play_for_pre_win(check_col["index"], "colonne", him, body, you, "bas") message = "3 en colonne bas" else: move = self.coupRandom(you, body) message = "random" return {"move": move, "message": message} def coupRandom(self, player, body,index=None): # Fonction qui joue aléatoirement les GoodMoves si besoin coupPossibles = [0, 1, 2, 3, 4, 5, 9, 10, 14, 15, 19, 20, 21, 22, 23, 24] # On defini en liste les coups autorises en fonction des positions jouees if index != None: coupPossibles.remove(index) dirPossCoinHautG = ["S", "E"] dirPossCoinHautD = ["S", "W"] dirPossCoinBasG = ["N", "E"] dirPossCoinBasD = ["N", "W"] dirPossLHaut = ["S", "W", "E"] dirPossLBas = ["N", "W", "E"] dirPossCGauche = ["N", "S", "E"] dirPossCDroite = ["N", "S", "W"] cube = coupPossibles[random.randint(0, len(coupPossibles) - 1)] # On choisi un coup aleatoire dans la liste des positions aux extremites du jeu if cube == 0: # En fonction du coup, on prends une des directions autorisées aléatoirement direction = dirPossCoinHautG[random.randint(0, len(dirPossCoinHautG) - 1)] elif cube == 4: direction = dirPossCoinHautD[random.randint(0, len(dirPossCoinHautD) - 1)] elif cube < 5: direction = dirPossLHaut[random.randint(0, len(dirPossLHaut) - 1)] elif cube % 5 == 0 and cube != 20: direction = dirPossCGauche[random.randint(0, len(dirPossCGauche) - 1)] elif cube == 20: direction = dirPossCoinBasG[random.randint(0, len(dirPossCoinBasG) - 1)] elif cube == 24: direction = dirPossCoinBasD[random.randint(0, len(dirPossCoinBasD) - 1)] elif cube > 19: direction = dirPossLBas[random.randint(0, len(dirPossLBas) - 1)] elif (cube + 1) % 5 == 0: direction = dirPossCDroite[random.randint(0, len(dirPossCDroite) - 1)] CaseLibre = False for coup in coupPossibles: # On donne priorité aux cases vides pour prendre le max de cases sur le plateau if body["game"][coup] == None: CaseLibre = True if CaseLibre == True: while body["game"][cube] != None: # Si le cube choisi n'est pas vide, il en choisi un nouveau cube = coupPossibles[random.randint(0, len(coupPossibles) - 1)] if cube == 0: direction = dirPossCoinHautG[random.randint(0, len(dirPossCoinHautG) - 1)] elif cube == 4: direction = dirPossCoinHautD[random.randint(0, len(dirPossCoinHautD) - 1)] elif cube < 5: direction = dirPossLHaut[random.randint(0, len(dirPossLHaut) - 1)] elif cube % 5 == 0 and cube != 20: direction = dirPossCGauche[random.randint(0, len(dirPossCGauche) - 1)] elif cube == 20: direction = dirPossCoinBasG[random.randint(0, len(dirPossCoinBasG) - 1)] elif cube == 24: direction = dirPossCoinBasD[random.randint(0, len(dirPossCoinBasD) - 1)] elif cube > 19: direction = dirPossLBas[random.randint(0, len(dirPossLBas) - 1)] elif (cube + 1) % 5 == 0: direction = dirPossCDroite[random.randint(0, len(dirPossCDroite) - 1)] else: while body["game"][cube] != player: cube = coupPossibles[random.randint(0, len(coupPossibles) - 1)] if cube == 0: direction = dirPossCoinHautG[random.randint(0, len(dirPossCoinHautG) - 1)] elif cube == 4: direction = dirPossCoinHautD[random.randint(0, len(dirPossCoinHautD) - 1)] elif cube < 5: direction = dirPossLHaut[random.randint(0, len(dirPossLHaut) - 1)] elif cube % 5 == 0 and cube != 20: direction = dirPossCGauche[random.randint(0, len(dirPossCGauche) - 1)] elif cube == 20: direction = dirPossCoinBasG[random.randint(0, len(dirPossCoinBasG) - 1)] elif cube == 24: direction = dirPossCoinBasD[random.randint(0, len(dirPossCoinBasD) - 1)] elif cube > 19: direction = dirPossLBas[random.randint(0, len(dirPossLBas) - 1)] elif (cube + 1) % 5 == 0: direction = dirPossCDroite[random.randint(0, len(dirPossCDroite) - 1)] move = {"cube": cube, "direction": direction} return move def check_line(self, body, you, him): # Vérifie si il y a des suites (3 ou 4) en ligne for i in range(25): if i % 5 == 0 and body["game"][i] != None: # Si la case est dans la première colonne if body['game'][i] == you: count = 0 countTriple = 0 for j in range(5): if body['game'][i + j] == you: count += 1 countTriple += 1 # Precaution oblige si X, O, X, X, O car count = 3 mais countTriple = 2 if body["game"][i + j] != body["game"][i + j - 1] and j != 0: countTriple = 0 else: index_free = (i + j) if countTriple == 3: return {"player": you, "4following": "gauche", "index": i + 4} elif count == 4: return {"player": you, "4following": True, "index": index_free} else: count = 0 for j in range(5): if body['game'][i + j] == him: count += 1 else: index_free = (i + j) if count == 4: return {"player": him, "4following": True, "index": index_free} if (i - 1) % 5 == 0 and body["game"][i] != None: # Si la case est dans la deuxième colonne if body['game'][i] == you: count = 0 countTriple = 0 index_free = i - 1 for j in range(4): if body['game'][i + j] == you: count += 1 countTriple += 1 if body["game"][i + j] != body["game"][i + j - 1] and j != 0: countTriple = 0 if countTriple == 3 and (i == 1 or i == 21): return {"player": you, "4following": "milieu", "index": i + 3} if count == 4: return {"player": you, "4following": True, "index": index_free} else: count = 0 index_free = i - 1 for j in range(4): if body['game'][i + j] == him: count += 1 if count == 4: return {"player": him, "4following": True, "index": index_free} if (i - 2) % 5 == 0 and body["game"][i] == you: # Si la case est dans la troisième colonne (suite de 3 seulement) index_free = i - 2 count = 0 for j in range(3): if body["game"][i + j] == you: count += 1 if count == 3: return {"player": you, "4following": "droit", "index": i - 2} return {'4following': False} def check_col(self, body, you, him): # Comme pour check_line mais en col, même logique for i in range(25): if i in range(5) and body["game"][i] != None: if body['game'][i] == you: count = 0 countTriple = 0 for j in range(5): if body['game'][i + 5 * j] == you: count += 1 countTriple += 1 if body["game"][i + 5 * j] != body["game"][i + 5 * (j - 1)] and j != 0: countTriple = 0 else: index_free = i + 5 * j if countTriple == 3: return {"player": you, "4following": "haut", "index": i + 20} if count == 4: return {"player": you, "4following": True, "index": index_free} else: count = 0 for j in range(5): if body['game'][i + 5 * j] == him: count += 1 else: index_free = i + 5 * j if count == 4: return {"player": him, "4following": True, "index": index_free} if (i - 5) in range(5) and body["game"][i] != None: if body['game'][i] == you: count = 0 countTriple = 0 index_free = i - 5 for j in range(4): if body['game'][i + 5 * j] == you: count += 1 countTriple += 1 if body["game"][i + 5 * j] != body["game"][i + 5 * (j - 1)] and j != 0: countTriple = 0 if countTriple == 3 and (i == 5 or i == 9): return {"player": you, "4following": "milieu", "index": i - 5} if count == 4: return {"player": you, "4following": True, "index": index_free} else: count = 0 index_free = i - 5 for j in range(4): if body['game'][i + 5 * j] == him: count += 1 if count == 4: return {"player": him, "4following": True, "index": index_free} if (i - 10) in range(5): count = 0 for j in range(3): if body["game"][i + 5 * j] == you: count += 1 if count == 3: return {"player": you, "4following": "bas", "index": i - 10} return {"4following": False} def play_for_pre_win(self, index, direction, him, body, you, side): # Fonction qui à partir de suites de 3, forme des suites de 4 if direction == "ligne": if body["game"][index] != him and side == "gauche": return {"cube": index, "direction": "W"} if body['game'][index] == him and side == 'gauche': if body['game'][index-1] != him and (index == 4 or index == 24): return {'cube' : index - 1,'direction' : "W"} if body["game"][index] != him and side == "droit": return {"cube": index, "direction": "E"} if body['game'][index] == him and side == 'droit': if body['game'][index+1] != him and (index == 0 or index == 20): return {'cube' : index+1,'direction' : 'E'} if side == "milieu" and index == 0: for i in range(1, 5): if body["game"][5 * i] != him: return {"cube": 5 * i, "direction": "N"} elif body["game"][4 + 5 * i] != him: return {"cube": 4 + 5 * i, "direction": "N"} if side == "milieu" and index == 20: for i in range(4): if body["game"][5 * i] != him: return {"cube": 5 * i, "direction": "S"} elif body["game"][4 + 5 * i] != him: return {"cube": 4 + 5 * i, "direction": "S"} else: return self.coupRandom(you, body) if direction == "colonne": if body["game"][index] != him and side == "haut": return {"cube": index, "direction": "N"} elif body["game"][index] != him and side == "bas": return {"cube": index, "direction": "S"} elif side == "milieu" and index == 5: for i in range(1, 5): if body["game"][i] != him: return {"cube": i, "direction": "W"} elif body["game"][20 + i] != him: return {"cube": 20 + i, "direction": "W"} elif side == "milieu" and index == 9: for i in range(1, 5): if body["game"][4 - i] != him: return {"cube": 4 - i, "direction": "E"} elif body["game"][24 - i] != him: return {"cube": 24 - i, "direction": "E"} else: return self.coupRandom(you, body) def play_for_win(self, body, index, direction, you, him): # Fonction qui termine des suites de 4 pour gagner if direction == "ligne": if body['game'][index] == None: if index % 5 == 0: return {'cube': index, 'direction': 'E'} elif (index + 1) % 5 == 0: return {'cube': index, 'direction': 'W'} elif index in range(5) or index in range(20, 25): return {'cube': index, 'direction': 'W'} elif body['game'][index - 5] == you and body['game'][(index % 5) + 20] != him: return {'cube': (index % 5) + 20, 'direction': 'N'} elif body['game'][index + 5] == you and body['game'][index % 5] != him: return {'cube': index % 5, 'direction': 'S'} else: return self.coupRandom(you, body) elif body['game'][index] == him: if index in range(5): if index % 5 == 0: for i in range(1, 5): if body['game'][(index + i * 5)] != him: return {'cube': index + i * 5, 'direction': 'N'} elif (index + 1) % 5 == 0: for i in range(1, 5): if body['game'][(index + i * 5)] != him: return {'cube': index + i * 5, 'direction': 'N'} elif body['game'][index + 20] != him: return {'cube': index + 20, 'direction': 'N'} else: return self.coupRandom(you, body) elif index in range(20, 25): if index % 5 == 0: for i in range(1, 5): if body['game'][(index - i * 5)] != him: return {'cube': index - i * 5, 'direction': 'S'} elif (index + 1) % 5 == 0: for i in range(1, 5): if body['game'][(index - i * 5)] != him: return {'cube': index - i * 5, 'direction': 'S'} elif body['game'][index - 20] != him: return {'cube': index - 20, 'direction': 'S'} else: return self.coupRandom(you, body) elif index % 5 == 0: if body['game'][index - 5] == you and body['game'][(index % 5) + 20] != him: return {'cube': (index % 5) + 20, 'direction': 'N'} elif body['game'][index + 5] == you and body['game'][index % 5] != him: return {'cube': index % 5, 'direction': 'S'} else: return self.coupRandom(you, body) elif (index + 1) % 5 == 0: if body['game'][index - 5] == you and body['game'][(index % 5) + 20] != him: return {'cube': (index % 5) + 20, 'direction': 'N'} elif body['game'][index + 5] == you and body['game'][index % 5] != him: return {'cube': index % 5, 'direction': 'S'} else: return self.coupRandom(you, body) else: return self.coupRandom(you, body) elif direction == "colonne": if body['game'][index] == None: if index in range(5): return {'cube': index, 'direction': 'S'} elif index in range(20, 25): return {'cube': index, 'direction': 'N'} elif index % 5 == 0 or (index + 1) % 5 == 0: return {'cube': index, 'direction': 'S'} elif body['game'][index - 1] == you and body['game'][(index - (index % 5)) + 4] != him: return {'cube': (index - (index % 5)) + 4, 'direction': 'W'} elif body['game'][index + 5] == you and body['game'][(index - (index % 5))] != him: return {'cube': (index - (index % 5)), 'direction': 'E'} else: return self.coupRandom(you, body) elif body['game'][index] == him: if index % 5 == 0: if index in range(5): for i in range(1, 5): if body['game'][index + i] != him: return {'cube': index + i, 'direction': 'W'} elif index in range(20, 25): for i in range(1, 5): if body['game'][index + i] != him: return {'cube': index + i, 'direction': 'W'} elif body['game'][index + 4] != him: return {'cube': index + 4, 'direction': 'W'} else: return self.coupRandom(you, body) elif (index + 1) % 5 == 0: if index in range(5): for i in range(1, 5): if body['game'][index - i] != him: return {'cube': index - i, 'direction': 'E'} elif index in range(20, 25): for i in range(1, 5): if body['game'][index - i] != him: return {'cube': index - i, 'direction': 'E'} elif body['game'][index - 4] != him: return {'cube': index - 4, 'direction': 'E'} else: return self.coupRandom(you, body) elif index in range(5): if body['game'][index - 1] == you and body['game'][(index - (index % 5)) + 4] != him: return {'cube': (index - (index % 5)) + 4, 'direction': 'W'} elif body['game'][index + 1] == you and body['game'][(index - (index % 5))] != him: return {'cube': (index - (index % 5)), 'direction': 'E'} else: return self.coupRandom(you, body) elif index in range(20, 25): if body['game'][index - 1] == you and body['game'][(index - (index % 5)) + 4] != him: return {'cube': (index - (index % 5)) + 4, 'direction': 'W'} elif body['game'][index + 1] == you and body['game'][(index - (index % 5))] != him: return {'cube': (index - (index % 5)), 'direction': 'E'} else: return self.coupRandom(you, body) else: return self.coupRandom(you, body) def play_for_counter(self,body,index, direction, you, him): # Fonction qui contre les suites de 4 adverses (pour empêcher un "gg ez") if direction == "ligne": if index in range(5): if body['game'][index] == you and body['game'][index+20] == you: self.coupRandom(you,body,index) for i in range(5): if body['game'][i] == him and body['game'][i+20] != him: return {'cube':i+20,'direction':'N'} return self.coupRandom(you,body) elif index in range(20,25): if body['game'][index] == you and body['game'][index-20] == you: self.coupRandom(you,body,index) for i in range(5): if body['game'][i+20] == him and body['game'][i] != him: return {'cube': i,'direction':'S'} return self.coupRandom(you,body) elif index % 5 == 0: if body['game'][index] == you and (body['game'][index-5] == you and body['game'][index+5] == you): return self.coupRandom(you,body,index) for i in range(5): if body['game'][index+i] == him: if body['game'][(index+i)%5] != him and body['game'][index+i+5] != him: return {'cube':(index+i)%5,'direction':'S'} elif body['game'][((index+i)%5)+20] != him and body['game'][index+i-5] != him: return {'cube': ((index + i) % 5)+20, 'direction': 'N'} elif body['game'][index+4+5] != him and body['game'][index+4-5] != him: return {'cube':index,'direction':'E'} else: return self.coupRandom(you,body) elif (index+1) % 5 == 0: if body['game'][index] == you and (body['game'][index-5] == you and body['game'][index+5] == you): return self.coupRandom(you,body,index) for i in range(5): if body['game'][index-i] == him: if body['game'][(index-i)%5] != him and body['game'][(index-i)+5] != you: return {'cube':(index-i)%5,'direction':'S'} elif body['game'][((index-i)%5)+20] != him and body['game'][(index-i)-5] != you: return {'cube': ((index - i) % 5)+20, 'direction': 'N'} elif body['game'][index-4+5] != him or body['game'][index-4-5] != him: return {'cube':index,'direction':'W'} else: return self.coupRandom(you,body) else : return self.coupRandom(you,body) elif direction == 'colonne': if index % 5 == 0: if body['game'][index] == you and body['game'][index+4] == you: self.coupRandom(you,body,index) for i in range(5): if body['game'][5*i] == him and body['game'][5*i+4] != him: return {'cube':i*5+4,'direction':'W'} return self.coupRandom(you,body) elif (index +1)%5==0: if body['game'][index] == you and body['game'][index-4] == you: self.coupRandom(you,body,index) for i in range(5): if body['game'][5*i+4] == him and body['game'][5*i] != him: return {'cube': 5*i,'direction':'E'} return self.coupRandom(you,body) elif index in range(5): if body['game'][index] == you and (body['game'][index-1] == you and body['game'][index+1] == you): return self.coupRandom(you,body,index) for i in range(5): if body['game'][index+5*i] == him: if body['game'][5*i] != him and body['game'][index+5*i+1] != him: return {'cube':5*i,'direction':'E'} elif body['game'][5*i+4] != him and body['game'][index+5*i-1] != him: return {'cube': 5*i+4, 'direction': 'W'} elif body['game'][index+20+1] != him and body['game'][index+20-1] != him: return {'cube':index,'direction':'S'} else: return self.coupRandom(you,body) elif index in range(20,25): if body['game'][index] == you and (body['game'][index-1] == you and body['game'][index+1] == you): return self.coupRandom(you,body,index) for i in range(5): if body['game'][index-5*i] == him: if body['game'][5*i] != him and body['game'][index-5*i+1] != him: return {'cube':5*i,'direction':'E'} elif body['game'][5*i+4] != him and body['game'][index-5*i-1] != him: return {'cube': 5*i+4, 'direction': 'W'} elif body['game'][index-20+1] != him and body['game'][index-20-1] != him: return {'cube':index,'direction':'S'} else: return self.coupRandom(you,body) if __name__ == "__main__": if len(sys.argv) > 1: port = int(sys.argv[1]) else: port = 8081 cherrypy.config.update({'server.socket_host': '0.0.0.0', 'server.socket_port': port}) cherrypy.quickstart(Server())
54.669091
177
0.425668
3,245
30,068
3.91433
0.07396
0.089435
0.094158
0.041332
0.792159
0.775075
0.743977
0.724453
0.682963
0.656117
0
0.036099
0.435247
30,068
550
178
54.669091
0.711913
0.036916
0
0.639922
0
0
0.09614
0.002868
0
0
0
0
0
1
0.013699
false
0
0.005871
0
0.228963
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
bebc8290dd0104035c91631826fc087b902843d1
128
py
Python
_states/hellostate.py
UtahDave/testgitfs
f1fbd319ead2c17ff134e0d53f9cad11933ca14f
[ "Apache-2.0" ]
null
null
null
_states/hellostate.py
UtahDave/testgitfs
f1fbd319ead2c17ff134e0d53f9cad11933ca14f
[ "Apache-2.0" ]
null
null
null
_states/hellostate.py
UtahDave/testgitfs
f1fbd319ead2c17ff134e0d53f9cad11933ca14f
[ "Apache-2.0" ]
null
null
null
def world(name): ret = {"name": name, "changes": {}, "result": True, "comment": "Hello, World! This worked"} return ret
32
95
0.59375
16
128
4.75
0.75
0
0
0
0
0
0
0
0
0
0
0
0.195313
128
3
96
42.666667
0.737864
0
0
0
0
0
0.382813
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
fe225ad68266046f27a3f7f192bb182e886da430
44
py
Python
aljabr/__init__.py
forieux/aljabr
98b0965476b4459dae43343edb8c1013d02fccfb
[ "Unlicense" ]
null
null
null
aljabr/__init__.py
forieux/aljabr
98b0965476b4459dae43343edb8c1013d02fccfb
[ "Unlicense" ]
null
null
null
aljabr/__init__.py
forieux/aljabr
98b0965476b4459dae43343edb8c1013d02fccfb
[ "Unlicense" ]
null
null
null
__version__ = "0.2.0" from .linop import *
11
21
0.659091
7
44
3.571429
0.857143
0
0
0
0
0
0
0
0
0
0
0.083333
0.181818
44
3
22
14.666667
0.611111
0
0
0
0
0
0.113636
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4