hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0a2bd40d98de95e4d38bcc3fa6fce3971fd447c8
| 134
|
py
|
Python
|
centralpy/__main__.py
|
pmaengineering/centralpy
|
edd89925bcc4204add1bf93b7ff3437cc0e6ea92
|
[
"MIT"
] | null | null | null |
centralpy/__main__.py
|
pmaengineering/centralpy
|
edd89925bcc4204add1bf93b7ff3437cc0e6ea92
|
[
"MIT"
] | 1
|
2022-01-22T14:36:57.000Z
|
2022-01-24T07:19:44.000Z
|
centralpy/__main__.py
|
pmaengineering/centralpy
|
edd89925bcc4204add1bf93b7ff3437cc0e6ea92
|
[
"MIT"
] | 1
|
2021-06-30T13:45:09.000Z
|
2021-06-30T13:45:09.000Z
|
"""Run the package with -m and interact via CLI."""
from centralpy.cli import main
main() # pylint: disable=no-value-for-parameter
| 22.333333
| 51
| 0.723881
| 21
| 134
| 4.619048
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149254
| 134
| 5
| 52
| 26.8
| 0.850877
| 0.634328
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
0a3b88b73af741da55f10b0ab347ba4df5118c7f
| 3,342
|
py
|
Python
|
selene/common/predicate.py
|
kianku/selene
|
5361938e4f34d6cfae6df3aeca80e06a3e657d8c
|
[
"MIT"
] | null | null | null |
selene/common/predicate.py
|
kianku/selene
|
5361938e4f34d6cfae6df3aeca80e06a3e657d8c
|
[
"MIT"
] | null | null | null |
selene/common/predicate.py
|
kianku/selene
|
5361938e4f34d6cfae6df3aeca80e06a3e657d8c
|
[
"MIT"
] | null | null | null |
# MIT License
#
# Copyright (c) 2015-2020 Iakiv Kramarenko
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import re
def is_truthy(something):
return bool(something) if not something == '' else True
def equals_ignoring_case(expected):
return lambda actual: str(expected).lower() == str(actual).lower()
def equals(expected, ignore_case=False):
return lambda actual: expected == actual if not ignore_case else equals_ignoring_case(expected)
def is_greater_than(expected):
return lambda actual: actual > expected
def is_greater_than_or_equal(expected):
return lambda actual: actual >= expected
def is_less_than(expected):
return lambda actual: actual < expected
def is_less_than_or_equal(expected):
return lambda actual: actual <= expected
def includes_ignoring_case(expected):
return lambda actual: str(expected).lower() in str(actual).lower()
def includes(expected, ignore_case=False):
def fn(actual):
try:
return expected in actual if not ignore_case else includes_ignoring_case(expected)
except TypeError:
return False
return fn
def includes_word_ignoring_case(expected):
return lambda actual: str(expected).lower() in re.split(r'\s+', str(actual).lower())
def includes_word(expected, ignore_case=False):
return lambda actual: expected in re.split(r'\s+', actual) if not ignore_case else includes_ignoring_case(expected)
# will not work with empty seqs :( todo: fix
# currently we use it only for non-empty seqs taking this into account
seq_compare_by = lambda f: lambda x=None, *xs: lambda y=None, *ys: \
True if x is None and y is None else bool(f(x)(y)) and seq_compare_by(f)(*xs)(*ys)
# def seq_compare_by_2(f):
# def fn(x, *xs):
# def fn(y, *ys):
# return True if x is None and y is None else \
# bool(f(x)(y)) and seq_compare_by(f)(*xs or (None, ))(*ys or (None, ))
# return fn
# return fn
list_compare_by = lambda f: lambda expected: lambda actual: \
seq_compare_by(f)(*expected)(*actual)
# list_compare_by = lambda f: lambda expected: lambda actual: \
# seq_compare_by(f)(* expected if expected else (None,))(* actual if actual else (None, ))
equals_to_list = list_compare_by(equals)
equals_by_contains_to_list = list_compare_by(includes)
| 33.42
| 119
| 0.727409
| 506
| 3,342
| 4.685771
| 0.310277
| 0.055673
| 0.068326
| 0.076761
| 0.424294
| 0.358077
| 0.347533
| 0.347533
| 0.3062
| 0.262758
| 0
| 0.003308
| 0.185817
| 3,342
| 99
| 120
| 33.757576
| 0.868063
| 0.474267
| 0
| 0
| 0
| 0
| 0.00348
| 0
| 0
| 0
| 0
| 0.010101
| 0
| 1
| 0.352941
| false
| 0
| 0.029412
| 0.294118
| 0.764706
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
0a555a3b4fbce70f6ca2f730ff9344e648f808e5
| 252
|
py
|
Python
|
chapterfour/animals.py
|
cmotek/python_crashcourse
|
29cbdd6699cd17192bb599d235852d547630d110
|
[
"Apache-2.0"
] | null | null | null |
chapterfour/animals.py
|
cmotek/python_crashcourse
|
29cbdd6699cd17192bb599d235852d547630d110
|
[
"Apache-2.0"
] | null | null | null |
chapterfour/animals.py
|
cmotek/python_crashcourse
|
29cbdd6699cd17192bb599d235852d547630d110
|
[
"Apache-2.0"
] | null | null | null |
animals = ['Octopus', 'Squid', 'Cuttlefish', 'Nautilus']
for animal in animals:
print(f"A {animal} would make a great pet!")
print("These animals lack bones and also remorse for their enemies.")
print("Any of these animals would make a great pet!")
| 31.5
| 69
| 0.718254
| 39
| 252
| 4.641026
| 0.641026
| 0.099448
| 0.110497
| 0.165746
| 0.198895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154762
| 252
| 7
| 70
| 36
| 0.849765
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.6
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
0a7122c6195664e1f1a0c153d868437e245a3128
| 216
|
py
|
Python
|
public/__init__.py
|
rozenZ/sendMail
|
dd1029233bd61867bd5563d6ffb5564c7f81922e
|
[
"Apache-2.0"
] | null | null | null |
public/__init__.py
|
rozenZ/sendMail
|
dd1029233bd61867bd5563d6ffb5564c7f81922e
|
[
"Apache-2.0"
] | null | null | null |
public/__init__.py
|
rozenZ/sendMail
|
dd1029233bd61867bd5563d6ffb5564c7f81922e
|
[
"Apache-2.0"
] | null | null | null |
from excel import *
from overAll import *
from oracleOperator import *
from proxySendMail import *
__all__ = ["proxySendMail","excel","overAll","cInfo",
"logging","processWork","parallelWork","operators"]
| 30.857143
| 62
| 0.717593
| 21
| 216
| 7.190476
| 0.571429
| 0.198676
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.143519
| 216
| 7
| 62
| 30.857143
| 0.816216
| 0
| 0
| 0
| 0
| 0
| 0.317972
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
6a635aecb7c44bde0c9cd46f75fa322a4715d38c
| 741
|
py
|
Python
|
python/Basic-Algorith-Scripting/whereDoIBelong.py
|
stricoff92/freecodecamp-challenges
|
5f0012fc27edfe7d4855269e18c1b9bb82fe1f4a
|
[
"MIT"
] | null | null | null |
python/Basic-Algorith-Scripting/whereDoIBelong.py
|
stricoff92/freecodecamp-challenges
|
5f0012fc27edfe7d4855269e18c1b9bb82fe1f4a
|
[
"MIT"
] | 1
|
2018-09-28T01:34:46.000Z
|
2018-09-28T01:34:46.000Z
|
python/Basic-Algorith-Scripting/whereDoIBelong.py
|
stricoff92/freecodecamp-challenges
|
5f0012fc27edfe7d4855269e18c1b9bb82fe1f4a
|
[
"MIT"
] | null | null | null |
'''
Return the lowest index at which a value (second argument) should be inserted into an array (first argument) once it has been sorted.
The returned value should be a number.
For example, getIndexToIns([1,2,3,4], 1.5) should return 1 because it is greater than 1 (index 0), but less than 2 (index 1).
Likewise, getIndexToIns([20,3,5], 19) should return 2 because once the array has been sorted it will look like [3,5,20] and 19 is less than 20 (index 2) and greater than 5 (index 1).
'''
def getIndexToIns(arr, num):
# Count the number or arr elements that are less than num.
# That's the index where num would fit in an array sorted small to large.
return sum(v<num for v in arr)
getIndexToIns([40, 60], 50)
| 33.681818
| 182
| 0.707152
| 135
| 741
| 3.881481
| 0.496296
| 0.045802
| 0.049618
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059625
| 0.207827
| 741
| 21
| 183
| 35.285714
| 0.833049
| 0.836707
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
6aa6ce90486c398fd2fe6e81db031621debc28a1
| 161
|
py
|
Python
|
lfview/client/convert/__init__.py
|
seequent/lfview-api-client
|
3dc7115c03e4dd2fabe48d6c743766b9217fcfac
|
[
"MIT"
] | null | null | null |
lfview/client/convert/__init__.py
|
seequent/lfview-api-client
|
3dc7115c03e4dd2fabe48d6c743766b9217fcfac
|
[
"MIT"
] | 9
|
2019-04-11T10:02:21.000Z
|
2019-10-03T18:06:51.000Z
|
lfview/client/convert/__init__.py
|
seequent/lfview-api-client
|
3dc7115c03e4dd2fabe48d6c743766b9217fcfac
|
[
"MIT"
] | 1
|
2021-03-21T21:20:57.000Z
|
2021-03-21T21:20:57.000Z
|
"""Conversion functions to build Views"""
from __future__ import absolute_import
from .omf import omf_to_view, view_to_omf
from .steno3d import steno3d_to_view
| 26.833333
| 41
| 0.826087
| 25
| 161
| 4.88
| 0.48
| 0.098361
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014085
| 0.118012
| 161
| 5
| 42
| 32.2
| 0.84507
| 0.217391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
6aaae4ee085ce9f894486c066cc874fc9ba962d8
| 159
|
py
|
Python
|
app/init.py
|
supermax03/movies
|
4881e9162e9ad03a5e2bf6c5d0b2615a80249f0a
|
[
"MIT"
] | null | null | null |
app/init.py
|
supermax03/movies
|
4881e9162e9ad03a5e2bf6c5d0b2615a80249f0a
|
[
"MIT"
] | null | null | null |
app/init.py
|
supermax03/movies
|
4881e9162e9ad03a5e2bf6c5d0b2615a80249f0a
|
[
"MIT"
] | null | null | null |
from imdb.imdb import IMDB
import time
if __name__ == '__main__':
for film in IMDB.getmovies("new", time.strftime("%Y-%m")):
print(film.__dict__)
| 22.714286
| 62
| 0.666667
| 23
| 159
| 4.086957
| 0.73913
| 0.212766
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.18239
| 159
| 6
| 63
| 26.5
| 0.723077
| 0
| 0
| 0
| 0
| 0
| 0.100629
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0.2
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
6ac5c5d662a3cc0eec896df8f4fe2dc4f7b7664a
| 127
|
py
|
Python
|
app/room.py
|
charlieoduk/dojo-rooms
|
d557fa5846f94f574d3e8a637090a72b1b98d6b9
|
[
"MIT"
] | null | null | null |
app/room.py
|
charlieoduk/dojo-rooms
|
d557fa5846f94f574d3e8a637090a72b1b98d6b9
|
[
"MIT"
] | 7
|
2017-06-26T10:45:39.000Z
|
2021-06-10T19:31:39.000Z
|
app/room.py
|
charlieoduk/dojo-rooms
|
d557fa5846f94f574d3e8a637090a72b1b98d6b9
|
[
"MIT"
] | 1
|
2017-06-26T10:43:15.000Z
|
2017-06-26T10:43:15.000Z
|
from abc import ABCMeta, abstractmethod
class Room(metaclass=ABCMeta):
def __init__(self,name):
self.name = name
| 18.142857
| 39
| 0.708661
| 16
| 127
| 5.375
| 0.75
| 0.186047
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.204724
| 127
| 6
| 40
| 21.166667
| 0.851485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
6ac7c50e73262a30423f8fa51f22f83d51c60892
| 1,405
|
py
|
Python
|
server/user/migrations/0008_userlog.py
|
MetLee/hackergame
|
571b5407e0644169a2f9b3907a0a1d93138ba436
|
[
"MIT"
] | 48
|
2018-09-30T11:07:52.000Z
|
2021-12-07T03:32:59.000Z
|
server/user/migrations/0008_userlog.py
|
MetLee/hackergame
|
571b5407e0644169a2f9b3907a0a1d93138ba436
|
[
"MIT"
] | 100
|
2018-10-13T18:37:25.000Z
|
2021-11-11T12:14:45.000Z
|
server/user/migrations/0008_userlog.py
|
MetLee/hackergame
|
571b5407e0644169a2f9b3907a0a1d93138ba436
|
[
"MIT"
] | 11
|
2018-10-08T14:59:33.000Z
|
2022-03-02T03:21:09.000Z
|
# Generated by Django 3.1.2 on 2020-10-23 03:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('user', '0007_user_aff'),
]
operations = [
migrations.CreateModel(
name='UserLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('context_user', models.IntegerField(null=True)),
('context_time', models.DateTimeField()),
('context_elevated', models.BooleanField()),
('user', models.IntegerField()),
('group', models.TextField()),
('nickname', models.TextField(null=True)),
('name', models.TextField(null=True)),
('sno', models.TextField(null=True)),
('tel', models.TextField(null=True)),
('email', models.TextField(null=True)),
('gender', models.TextField(null=True)),
('qq', models.TextField(null=True)),
('school', models.TextField(null=True)),
('grade', models.TextField(null=True)),
('aff', models.TextField(null=True)),
('token', models.TextField()),
],
options={
'default_permissions': (),
},
),
]
| 36.025641
| 114
| 0.512456
| 123
| 1,405
| 5.780488
| 0.479675
| 0.253165
| 0.267229
| 0.323488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020191
| 0.330249
| 1,405
| 38
| 115
| 36.973684
| 0.735388
| 0.032028
| 0
| 0
| 1
| 0
| 0.107511
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.03125
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
0a741e8d2e96a671b7f291fa9f38a03dd5596b06
| 1,608
|
py
|
Python
|
turfs/models.py
|
mkschu/greensManagerDjango
|
842f6cc0e5bc9fa5f36e5d996960900886d3370a
|
[
"Apache-2.0"
] | null | null | null |
turfs/models.py
|
mkschu/greensManagerDjango
|
842f6cc0e5bc9fa5f36e5d996960900886d3370a
|
[
"Apache-2.0"
] | null | null | null |
turfs/models.py
|
mkschu/greensManagerDjango
|
842f6cc0e5bc9fa5f36e5d996960900886d3370a
|
[
"Apache-2.0"
] | null | null | null |
from django.db import models
class SoilType(models.Model):
name = models.CharField(max_length=256)
sand_ratio = models.FloatField()
silt_ratio = models.FloatField()
clay_ratio = models.FloatField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return "%s" % (self.name)
class TurfgrassGenus(models.Model):
class Meta:
verbose_name_plural = 'Turfgrass genuses'
name = models.CharField(max_length=256)
common_name = models.CharField(max_length=256)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return "%s (%s)" % (self.common_name, self.name)
class TurfgrassSpecies(models.Model):
class Meta:
verbose_name_plural = 'Turfgrass species'
name = models.CharField(max_length=256)
common_name = models.CharField(max_length=256)
genus = models.ForeignKey(TurfgrassGenus)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return "%s (%s %s)" % (self.common_name,
self.genus.name,
self.name)
class Cultivar(models.Model):
name = models.CharField(max_length=256)
species = models.ForeignKey(TurfgrassSpecies)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return "%s (%s)" % (self.name, self.species)
| 30.923077
| 56
| 0.682836
| 196
| 1,608
| 5.331633
| 0.219388
| 0.061244
| 0.160766
| 0.191388
| 0.715789
| 0.715789
| 0.684211
| 0.684211
| 0.515789
| 0.515789
| 0
| 0.014151
| 0.208955
| 1,608
| 51
| 57
| 31.529412
| 0.80739
| 0
| 0
| 0.526316
| 0
| 0
| 0.037313
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0
| 0.026316
| 0.105263
| 0.894737
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
0a74b5436920b8aeb903ca77503ee7f154da5190
| 40,878
|
py
|
Python
|
tests/api/test_variant.py
|
closelookadmin/saleor
|
dbf12995d396097df457b2f40a3e66641f8c9993
|
[
"CC-BY-4.0"
] | 1
|
2020-05-11T02:38:17.000Z
|
2020-05-11T02:38:17.000Z
|
tests/api/test_variant.py
|
closelookadmin/saleor
|
dbf12995d396097df457b2f40a3e66641f8c9993
|
[
"CC-BY-4.0"
] | null | null | null |
tests/api/test_variant.py
|
closelookadmin/saleor
|
dbf12995d396097df457b2f40a3e66641f8c9993
|
[
"CC-BY-4.0"
] | null | null | null |
from unittest.mock import ANY
from uuid import uuid4
import graphene
import pytest
from graphene.utils.str_converters import to_camel_case
from saleor.product.error_codes import ProductErrorCode
from saleor.product.models import ProductVariant
from saleor.product.utils.attributes import associate_attribute_values_to_instance
from tests.api.utils import get_graphql_content
def test_fetch_variant(staff_api_client, product, permission_manage_products):
query = """
query ProductVariantDetails($id: ID!) {
productVariant(id: $id) {
id
attributes {
attribute {
id
name
slug
values {
id
name
slug
}
}
values {
id
name
slug
}
}
costPrice {
currency
amount
}
images {
id
}
name
priceOverride {
currency
amount
}
product {
id
}
}
}
"""
variant = product.variants.first()
variant_id = graphene.Node.to_global_id("ProductVariant", variant.pk)
variables = {"id": variant_id}
staff_api_client.user.user_permissions.add(permission_manage_products)
response = staff_api_client.post_graphql(query, variables)
content = get_graphql_content(response)
data = content["data"]["productVariant"]
assert data["name"] == variant.name
def test_create_variant(
staff_api_client, product, product_type, permission_manage_products
):
query = """
mutation createVariant (
$productId: ID!,
$sku: String!,
$priceOverride: Decimal,
$costPrice: Decimal,
$quantity: Int!,
$attributes: [AttributeValueInput]!,
$weight: WeightScalar,
$trackInventory: Boolean!) {
productVariantCreate(
input: {
product: $productId,
sku: $sku,
priceOverride: $priceOverride,
costPrice: $costPrice,
quantity: $quantity,
attributes: $attributes,
trackInventory: $trackInventory,
weight: $weight
}) {
productErrors {
field
message
}
productVariant {
name
sku
attributes {
attribute {
slug
}
values {
slug
}
}
quantity
priceOverride {
currency
amount
localized
}
costPrice {
currency
amount
localized
}
weight {
value
unit
}
}
}
}
"""
product_id = graphene.Node.to_global_id("Product", product.pk)
sku = "1"
price_override = 1.32
cost_price = 3.22
quantity = 10
weight = 10.22
variant_slug = product_type.variant_attributes.first().slug
variant_id = graphene.Node.to_global_id(
"Attribute", product_type.variant_attributes.first().pk
)
variant_value = "test-value"
variables = {
"productId": product_id,
"sku": sku,
"quantity": quantity,
"costPrice": cost_price,
"priceOverride": price_override,
"weight": weight,
"attributes": [{"id": variant_id, "values": [variant_value]}],
"trackInventory": True,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_products]
)
content = get_graphql_content(response)["data"]["productVariantCreate"]
assert not content["productErrors"]
data = content["productVariant"]
assert data["name"] == variant_value
assert data["quantity"] == quantity
assert data["costPrice"]["amount"] == cost_price
assert data["priceOverride"]["amount"] == price_override
assert data["sku"] == sku
assert data["attributes"][0]["attribute"]["slug"] == variant_slug
assert data["attributes"][0]["values"][0]["slug"] == variant_value
assert data["weight"]["unit"] == "kg"
assert data["weight"]["value"] == weight
def test_create_product_variant_not_all_attributes(
staff_api_client, product, product_type, color_attribute, permission_manage_products
):
query = """
mutation createVariant (
$productId: ID!,
$sku: String!,
$attributes: [AttributeValueInput]!) {
productVariantCreate(
input: {
product: $productId,
sku: $sku,
attributes: $attributes
}) {
productErrors {
field
code
message
}
}
}
"""
product_id = graphene.Node.to_global_id("Product", product.pk)
sku = "1"
variant_id = graphene.Node.to_global_id(
"Attribute", product_type.variant_attributes.first().pk
)
variant_value = "test-value"
product_type.variant_attributes.add(color_attribute)
variables = {
"productId": product_id,
"sku": sku,
"attributes": [{"id": variant_id, "values": [variant_value]}],
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
assert content["data"]["productVariantCreate"]["productErrors"]
assert content["data"]["productVariantCreate"]["productErrors"][0] == {
"field": "attributes",
"code": ProductErrorCode.REQUIRED.name,
"message": ANY,
}
assert not product.variants.filter(sku=sku).exists()
def test_create_product_variant_duplicated_attributes(
staff_api_client,
product_with_variant_with_two_attributes,
color_attribute,
size_attribute,
permission_manage_products,
):
query = """
mutation createVariant (
$productId: ID!,
$sku: String!,
$attributes: [AttributeValueInput]!
) {
productVariantCreate(
input: {
product: $productId,
sku: $sku,
attributes: $attributes
}) {
productErrors {
field
code
message
}
}
}
"""
product = product_with_variant_with_two_attributes
product_id = graphene.Node.to_global_id("Product", product.pk)
color_attribute_id = graphene.Node.to_global_id("Attribute", color_attribute.id)
size_attribute_id = graphene.Node.to_global_id("Attribute", size_attribute.id)
sku = str(uuid4())[:12]
variables = {
"productId": product_id,
"sku": sku,
"attributes": [
{"id": color_attribute_id, "values": ["red"]},
{"id": size_attribute_id, "values": ["small"]},
],
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
assert content["data"]["productVariantCreate"]["productErrors"]
assert content["data"]["productVariantCreate"]["productErrors"][0] == {
"field": "attributes",
"code": ProductErrorCode.UNIQUE.name,
"message": ANY,
}
assert not product.variants.filter(sku=sku).exists()
def test_create_product_variant_update_with_new_attributes(
staff_api_client, permission_manage_products, product, size_attribute
):
query = """
mutation VariantUpdate(
$id: ID!
$attributes: [AttributeValueInput]
$costPrice: Decimal
$priceOverride: Decimal
$sku: String
$quantity: Int
$trackInventory: Boolean!
) {
productVariantUpdate(
id: $id
input: {
attributes: $attributes
costPrice: $costPrice
priceOverride: $priceOverride
sku: $sku
quantity: $quantity
trackInventory: $trackInventory
}
) {
errors {
field
message
}
productVariant {
id
attributes {
attribute {
id
name
slug
values {
id
name
slug
__typename
}
__typename
}
__typename
}
}
}
}
"""
size_attribute_id = graphene.Node.to_global_id("Attribute", size_attribute.pk)
variant_id = graphene.Node.to_global_id(
"ProductVariant", product.variants.first().pk
)
variables = {
"attributes": [{"id": size_attribute_id, "values": ["XXXL"]}],
"costPrice": 10,
"id": variant_id,
"priceOverride": 0,
"quantity": 4,
"sku": "21599567",
"trackInventory": True,
}
data = get_graphql_content(
staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_products]
)
)["data"]["productVariantUpdate"]
assert not data["errors"]
assert data["productVariant"]["id"] == variant_id
attributes = data["productVariant"]["attributes"]
assert len(attributes) == 1
assert attributes[0]["attribute"]["id"] == size_attribute_id
def test_update_product_variant(staff_api_client, product, permission_manage_products):
query = """
mutation updateVariant (
$id: ID!,
$sku: String!,
$costPrice: Decimal,
$quantity: Int!,
$trackInventory: Boolean!) {
productVariantUpdate(
id: $id,
input: {
sku: $sku,
costPrice: $costPrice,
quantity: $quantity,
trackInventory: $trackInventory
}) {
productVariant {
name
sku
quantity
costPrice {
currency
amount
localized
}
}
}
}
"""
variant = product.variants.first()
variant_id = graphene.Node.to_global_id("ProductVariant", variant.pk)
sku = "test sku"
cost_price = 3.3
quantity = 123
variables = {
"id": variant_id,
"sku": sku,
"quantity": quantity,
"costPrice": cost_price,
"trackInventory": True,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_products]
)
variant.refresh_from_db()
content = get_graphql_content(response)
data = content["data"]["productVariantUpdate"]["productVariant"]
assert data["name"] == variant.name
assert data["quantity"] == quantity
assert data["costPrice"]["amount"] == cost_price
assert data["sku"] == sku
@pytest.mark.parametrize("field", ("cost_price", "price_override"))
def test_update_product_variant_unset_amounts(
staff_api_client, product, permission_manage_products, field
):
"""Ensure setting nullable amounts to null is properly handled
(setting the amount to none) and doesn't override the currency.
"""
query = """
mutation updateVariant (
$id: ID!,
$sku: String!,
$costPrice: Decimal,
$priceOverride: Decimal) {
productVariantUpdate(
id: $id,
input: {
sku: $sku,
costPrice: $costPrice,
priceOverride: $priceOverride
}) {
productVariant {
name
sku
quantity
costPrice {
currency
amount
localized
}
priceOverride {
currency
amount
localized
}
}
}
}
"""
variant = product.variants.first()
variant_id = graphene.Node.to_global_id("ProductVariant", variant.pk)
sku = variant.sku
camel_case_field_name = to_camel_case(field)
variables = {"id": variant_id, "sku": sku, camel_case_field_name: None}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_products]
)
variant.refresh_from_db()
assert variant.currency is not None
assert getattr(variant, field) is None
content = get_graphql_content(response)
data = content["data"]["productVariantUpdate"]["productVariant"]
assert data[camel_case_field_name] is None
QUERY_UPDATE_VARIANT_ATTRIBUTES = """
mutation updateVariant (
$id: ID!,
$sku: String,
$attributes: [AttributeValueInput]!) {
productVariantUpdate(
id: $id,
input: {
sku: $sku,
attributes: $attributes
}) {
errors {
field
message
}
productErrors {
field
code
}
}
}
"""
def test_update_product_variant_not_all_attributes(
staff_api_client, product, product_type, color_attribute, permission_manage_products
):
"""Ensures updating a variant with missing attributes (all attributes must
be provided) raises an error. We expect the color attribute
to be flagged as missing."""
query = QUERY_UPDATE_VARIANT_ATTRIBUTES
variant = product.variants.first()
variant_id = graphene.Node.to_global_id("ProductVariant", variant.pk)
sku = "test sku"
attr_id = graphene.Node.to_global_id(
"Attribute", product_type.variant_attributes.first().id
)
variant_value = "test-value"
product_type.variant_attributes.add(color_attribute)
variables = {
"id": variant_id,
"sku": sku,
"attributes": [{"id": attr_id, "values": [variant_value]}],
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_products]
)
variant.refresh_from_db()
content = get_graphql_content(response)
assert len(content["data"]["productVariantUpdate"]["errors"]) == 1
assert content["data"]["productVariantUpdate"]["errors"][0] == {
"field": "attributes",
"message": "All attributes must take a value",
}
assert not product.variants.filter(sku=sku).exists()
def test_update_product_variant_with_current_attribut(
staff_api_client,
product_with_variant_with_two_attributes,
color_attribute,
size_attribute,
permission_manage_products,
):
product = product_with_variant_with_two_attributes
variant = product.variants.first()
sku = str(uuid4())[:12]
assert not variant.sku == sku
assert variant.attributes.first().values.first().slug == "red"
assert variant.attributes.last().values.first().slug == "small"
variant_id = graphene.Node.to_global_id("ProductVariant", variant.pk)
color_attribute_id = graphene.Node.to_global_id("Attribute", color_attribute.pk)
size_attribute_id = graphene.Node.to_global_id("Attribute", size_attribute.pk)
variables = {
"id": variant_id,
"sku": sku,
"attributes": [
{"id": color_attribute_id, "values": ["red"]},
{"id": size_attribute_id, "values": ["small"]},
],
}
response = staff_api_client.post_graphql(
QUERY_UPDATE_VARIANT_ATTRIBUTES,
variables,
permissions=[permission_manage_products],
)
content = get_graphql_content(response)
data = content["data"]["productVariantUpdate"]
assert not data["errors"]
variant.refresh_from_db()
assert variant.sku == sku
assert variant.attributes.first().values.first().slug == "red"
assert variant.attributes.last().values.first().slug == "small"
def test_update_product_variant_with_new_attribute(
staff_api_client,
product_with_variant_with_two_attributes,
color_attribute,
size_attribute,
permission_manage_products,
):
product = product_with_variant_with_two_attributes
variant = product.variants.first()
sku = str(uuid4())[:12]
assert not variant.sku == sku
assert variant.attributes.first().values.first().slug == "red"
assert variant.attributes.last().values.first().slug == "small"
variant_id = graphene.Node.to_global_id("ProductVariant", variant.pk)
color_attribute_id = graphene.Node.to_global_id("Attribute", color_attribute.pk)
size_attribute_id = graphene.Node.to_global_id("Attribute", size_attribute.pk)
variables = {
"id": variant_id,
"sku": sku,
"attributes": [
{"id": color_attribute_id, "values": ["red"]},
{"id": size_attribute_id, "values": ["big"]},
],
}
response = staff_api_client.post_graphql(
QUERY_UPDATE_VARIANT_ATTRIBUTES,
variables,
permissions=[permission_manage_products],
)
content = get_graphql_content(response)
data = content["data"]["productVariantUpdate"]
assert not data["errors"]
variant.refresh_from_db()
assert variant.sku == sku
assert variant.attributes.first().values.first().slug == "red"
assert variant.attributes.last().values.first().slug == "big"
def test_update_product_variant_with_duplicated_attribute(
staff_api_client,
product_with_variant_with_two_attributes,
color_attribute,
size_attribute,
permission_manage_products,
):
product = product_with_variant_with_two_attributes
variant = product.variants.first()
variant2 = product.variants.first()
variant2.pk = None
variant2.sku = str(uuid4())[:12]
variant2.save()
associate_attribute_values_to_instance(
variant2, color_attribute, color_attribute.values.last()
)
associate_attribute_values_to_instance(
variant2, size_attribute, size_attribute.values.last()
)
assert variant.attributes.first().values.first().slug == "red"
assert variant.attributes.last().values.first().slug == "small"
assert variant2.attributes.first().values.first().slug == "blue"
assert variant2.attributes.last().values.first().slug == "big"
variant_id = graphene.Node.to_global_id("ProductVariant", variant.pk)
color_attribute_id = graphene.Node.to_global_id("Attribute", color_attribute.pk)
size_attribute_id = graphene.Node.to_global_id("Attribute", size_attribute.pk)
variables = {
"id": variant_id,
"attributes": [
{"id": color_attribute_id, "values": ["blue"]},
{"id": size_attribute_id, "values": ["big"]},
],
}
response = staff_api_client.post_graphql(
QUERY_UPDATE_VARIANT_ATTRIBUTES,
variables,
permissions=[permission_manage_products],
)
content = get_graphql_content(response)
data = content["data"]["productVariantUpdate"]
assert data["productErrors"][0] == {
"field": "attributes",
"code": ProductErrorCode.UNIQUE.name,
}
@pytest.mark.parametrize(
"values, message",
(
([], "size expects a value but none were given"),
(["one", "two"], "A variant attribute cannot take more than one value"),
([" "], "Attribute values cannot be blank"),
),
)
def test_update_product_variant_requires_values(
staff_api_client, variant, product_type, permission_manage_products, values, message
):
"""Ensures updating a variant with invalid values raise an error.
- No values
- Blank value
- More than one value
"""
sku = "updated"
query = QUERY_UPDATE_VARIANT_ATTRIBUTES
variant_id = graphene.Node.to_global_id("ProductVariant", variant.pk)
attr_id = graphene.Node.to_global_id(
"Attribute", product_type.variant_attributes.first().id
)
variables = {
"id": variant_id,
"attributes": [{"id": attr_id, "values": values}],
"sku": sku,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_products]
)
variant.refresh_from_db()
content = get_graphql_content(response)
assert (
len(content["data"]["productVariantUpdate"]["errors"]) == 1
), f"expected: {message}"
assert content["data"]["productVariantUpdate"]["errors"][0] == {
"field": "attributes",
"message": message,
}
assert not variant.product.variants.filter(sku=sku).exists()
def test_delete_variant(staff_api_client, product, permission_manage_products):
query = """
mutation variantDelete($id: ID!) {
productVariantDelete(id: $id) {
productVariant {
sku
id
}
}
}
"""
variant = product.variants.first()
variant_id = graphene.Node.to_global_id("ProductVariant", variant.pk)
variables = {"id": variant_id}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_products]
)
content = get_graphql_content(response)
data = content["data"]["productVariantDelete"]
assert data["productVariant"]["sku"] == variant.sku
with pytest.raises(variant._meta.model.DoesNotExist):
variant.refresh_from_db()
def _fetch_all_variants(client, permissions=None):
query = """
query fetchAllVariants {
productVariants(first: 10) {
totalCount
edges {
node {
id
}
}
}
}
"""
response = client.post_graphql(
query, {}, permissions=permissions, check_no_permissions=False
)
content = get_graphql_content(response)
return content["data"]["productVariants"]
def test_fetch_all_variants_staff_user(
staff_api_client, unavailable_product_with_variant, permission_manage_products
):
data = _fetch_all_variants(
staff_api_client, permissions=[permission_manage_products]
)
variant = unavailable_product_with_variant.variants.first()
variant_id = graphene.Node.to_global_id("ProductVariant", variant.pk)
assert data["totalCount"] == 1
assert data["edges"][0]["node"]["id"] == variant_id
def test_fetch_all_variants_customer(user_api_client, unavailable_product_with_variant):
data = _fetch_all_variants(user_api_client)
assert data["totalCount"] == 0
def test_fetch_all_variants_anonymous_user(
api_client, unavailable_product_with_variant
):
data = _fetch_all_variants(api_client)
assert data["totalCount"] == 0
def _fetch_variant(client, variant, permissions=None):
query = """
query ProductVariantDetails($variantId: ID!) {
productVariant(id: $variantId) {
id
product {
id
}
}
}
"""
variables = {"variantId": graphene.Node.to_global_id("ProductVariant", variant.id)}
response = client.post_graphql(
query, variables, permissions=permissions, check_no_permissions=False
)
content = get_graphql_content(response)
return content["data"]["productVariant"]
def test_fetch_unpublished_variant_staff_user(
staff_api_client, unavailable_product_with_variant, permission_manage_products
):
variant = unavailable_product_with_variant.variants.first()
data = _fetch_variant(
staff_api_client, variant, permissions=[permission_manage_products]
)
variant_id = graphene.Node.to_global_id("ProductVariant", variant.pk)
product_id = graphene.Node.to_global_id(
"Product", unavailable_product_with_variant.pk
)
assert data["id"] == variant_id
assert data["product"]["id"] == product_id
def test_fetch_unpublished_variant_customer(
user_api_client, unavailable_product_with_variant
):
variant = unavailable_product_with_variant.variants.first()
data = _fetch_variant(user_api_client, variant)
assert data is None
def test_fetch_unpublished_variant_anonymous_user(
api_client, unavailable_product_with_variant
):
variant = unavailable_product_with_variant.variants.first()
data = _fetch_variant(api_client, variant)
assert data is None
PRODUCT_VARIANT_BULK_CREATE_MUTATION = """
mutation ProductVariantBulkCreate(
$variants: [ProductVariantBulkCreateInput]!, $productId: ID!
) {
productVariantBulkCreate(variants: $variants, product: $productId) {
bulkProductErrors {
field
message
code
index
}
productVariants{
id
sku
}
count
}
}
"""
def test_product_variant_bulk_create_by_attribute_id(
staff_api_client, product, size_attribute, permission_manage_products
):
product_variant_count = ProductVariant.objects.count()
attribute_value_count = size_attribute.values.count()
product_id = graphene.Node.to_global_id("Product", product.pk)
attribut_id = graphene.Node.to_global_id("Attribute", size_attribute.pk)
attribute_value = size_attribute.values.last()
sku = str(uuid4())[:12]
variants = [
{
"sku": sku,
"quantity": 1000,
"costPrice": None,
"priceOverride": None,
"weight": 2.5,
"trackInventory": True,
"attributes": [{"id": attribut_id, "values": [attribute_value.name]}],
}
]
variables = {"productId": product_id, "variants": variants}
staff_api_client.user.user_permissions.add(permission_manage_products)
response = staff_api_client.post_graphql(
PRODUCT_VARIANT_BULK_CREATE_MUTATION, variables
)
content = get_graphql_content(response)
data = content["data"]["productVariantBulkCreate"]
assert not data["bulkProductErrors"]
assert data["count"] == 1
assert product_variant_count + 1 == ProductVariant.objects.count()
assert attribute_value_count == size_attribute.values.count()
product_variant = ProductVariant.objects.get(sku=sku)
assert not product_variant.cost_price
assert not product_variant.price_override
def test_product_variant_bulk_create_empty_attribute(
staff_api_client, product, size_attribute, permission_manage_products
):
product_variant_count = ProductVariant.objects.count()
product_id = graphene.Node.to_global_id("Product", product.pk)
variants = [{"sku": str(uuid4())[:12], "attributes": []}]
variables = {"productId": product_id, "variants": variants}
staff_api_client.user.user_permissions.add(permission_manage_products)
response = staff_api_client.post_graphql(
PRODUCT_VARIANT_BULK_CREATE_MUTATION, variables
)
content = get_graphql_content(response)
data = content["data"]["productVariantBulkCreate"]
assert not data["bulkProductErrors"]
assert data["count"] == 1
assert product_variant_count + 1 == ProductVariant.objects.count()
def test_product_variant_bulk_create_with_new_attribute_value(
staff_api_client, product, size_attribute, permission_manage_products
):
product_variant_count = ProductVariant.objects.count()
attribute_value_count = size_attribute.values.count()
size_attribute_id = graphene.Node.to_global_id("Attribute", size_attribute.pk)
product_id = graphene.Node.to_global_id("Product", product.pk)
attribute_value = size_attribute.values.last()
variants = [
{
"sku": str(uuid4())[:12],
"attributes": [{"id": size_attribute_id, "values": [attribute_value.name]}],
},
{
"sku": str(uuid4())[:12],
"attributes": [{"id": size_attribute_id, "values": ["Test-attribute"]}],
},
]
variables = {"productId": product_id, "variants": variants}
staff_api_client.user.user_permissions.add(permission_manage_products)
response = staff_api_client.post_graphql(
PRODUCT_VARIANT_BULK_CREATE_MUTATION, variables
)
content = get_graphql_content(response)
data = content["data"]["productVariantBulkCreate"]
assert not data["bulkProductErrors"]
assert data["count"] == 2
assert product_variant_count + 2 == ProductVariant.objects.count()
assert attribute_value_count + 1 == size_attribute.values.count()
def test_product_variant_bulk_create_negative_quantity(
staff_api_client, product, size_attribute, permission_manage_products
):
product_variant_count = ProductVariant.objects.count()
product_id = graphene.Node.to_global_id("Product", product.pk)
size_attribute_id = graphene.Node.to_global_id("Attribute", size_attribute.pk)
variants = [
{
"sku": str(uuid4())[:12],
"quantity": -1000,
"attributes": [{"id": size_attribute_id, "values": ["Test-value"]}],
},
{
"sku": str(uuid4())[:12],
"quantity": 100,
"attributes": [{"id": size_attribute_id, "values": ["Test-value2"]}],
},
]
variables = {"productId": product_id, "variants": variants}
staff_api_client.user.user_permissions.add(permission_manage_products)
response = staff_api_client.post_graphql(
PRODUCT_VARIANT_BULK_CREATE_MUTATION, variables
)
content = get_graphql_content(response)
data = content["data"]["productVariantBulkCreate"]
assert len(data["bulkProductErrors"]) == 1
error = data["bulkProductErrors"][0]
assert error["field"] == "quantity"
assert error["code"] == ProductErrorCode.INVALID.name
assert error["index"] == 0
assert product_variant_count == ProductVariant.objects.count()
def test_product_variant_bulk_create_duplicated_sku(
staff_api_client,
product,
product_with_default_variant,
size_attribute,
permission_manage_products,
):
product_variant_count = ProductVariant.objects.count()
product_id = graphene.Node.to_global_id("Product", product.pk)
size_attribute_id = graphene.Node.to_global_id("Attribute", size_attribute.pk)
sku = product.variants.first().sku
sku2 = product_with_default_variant.variants.first().sku
assert not sku == sku2
variants = [
{
"sku": sku,
"attributes": [{"id": size_attribute_id, "values": ["Test-value"]}],
},
{
"sku": sku2,
"attributes": [{"id": size_attribute_id, "values": ["Test-valuee"]}],
},
]
variables = {"productId": product_id, "variants": variants}
staff_api_client.user.user_permissions.add(permission_manage_products)
response = staff_api_client.post_graphql(
PRODUCT_VARIANT_BULK_CREATE_MUTATION, variables
)
content = get_graphql_content(response)
data = content["data"]["productVariantBulkCreate"]
assert len(data["bulkProductErrors"]) == 2
errors = data["bulkProductErrors"]
for index, error in enumerate(errors):
assert error["field"] == "sku"
assert error["code"] == ProductErrorCode.UNIQUE.name
assert error["index"] == index
assert product_variant_count == ProductVariant.objects.count()
def test_product_variant_bulk_create_duplicated_sku_in_input(
staff_api_client, product, size_attribute, permission_manage_products
):
product_variant_count = ProductVariant.objects.count()
product_id = graphene.Node.to_global_id("Product", product.pk)
size_attribute_id = graphene.Node.to_global_id("Attribute", size_attribute.pk)
sku = str(uuid4())[:12]
variants = [
{
"sku": sku,
"attributes": [{"id": size_attribute_id, "values": ["Test-value"]}],
},
{
"sku": sku,
"attributes": [{"id": size_attribute_id, "values": ["Test-value2"]}],
},
]
variables = {"productId": product_id, "variants": variants}
staff_api_client.user.user_permissions.add(permission_manage_products)
response = staff_api_client.post_graphql(
PRODUCT_VARIANT_BULK_CREATE_MUTATION, variables
)
content = get_graphql_content(response)
data = content["data"]["productVariantBulkCreate"]
assert len(data["bulkProductErrors"]) == 1
error = data["bulkProductErrors"][0]
assert error["field"] == "sku"
assert error["code"] == ProductErrorCode.UNIQUE.name
assert error["index"] == 1
assert product_variant_count == ProductVariant.objects.count()
def test_product_variant_bulk_create_many_errors(
staff_api_client, product, size_attribute, permission_manage_products
):
product_variant_count = ProductVariant.objects.count()
product_id = graphene.Node.to_global_id("Product", product.pk)
size_attribute_id = graphene.Node.to_global_id("Attribute", size_attribute.pk)
non_existent_attribute_pk = 0
invalid_attribute_id = graphene.Node.to_global_id(
"Attribute", non_existent_attribute_pk
)
sku = product.variants.first().sku
variants = [
{
"sku": str(uuid4())[:12],
"quantity": -1000,
"attributes": [{"id": size_attribute_id, "values": ["Test-value1"]}],
},
{
"sku": str(uuid4())[:12],
"quantity": 100,
"attributes": [{"id": size_attribute_id, "values": ["Test-value4"]}],
},
{
"sku": sku,
"quantity": 100,
"attributes": [{"id": size_attribute_id, "values": ["Test-value2"]}],
},
{
"sku": str(uuid4())[:12],
"quantity": 100,
"attributes": [{"id": invalid_attribute_id, "values": ["Test-value3"]}],
},
]
variables = {"productId": product_id, "variants": variants}
staff_api_client.user.user_permissions.add(permission_manage_products)
response = staff_api_client.post_graphql(
PRODUCT_VARIANT_BULK_CREATE_MUTATION, variables
)
content = get_graphql_content(response)
data = content["data"]["productVariantBulkCreate"]
assert len(data["bulkProductErrors"]) == 3
errors = data["bulkProductErrors"]
expected_errors = [
{
"field": "quantity",
"index": 0,
"code": ProductErrorCode.INVALID.name,
"message": ANY,
},
{
"field": "sku",
"index": 2,
"code": ProductErrorCode.UNIQUE.name,
"message": ANY,
},
{
"field": "attributes",
"index": 3,
"code": ProductErrorCode.NOT_FOUND.name,
"message": ANY,
},
]
for expected_error in expected_errors:
assert expected_error in errors
assert product_variant_count == ProductVariant.objects.count()
def test_product_variant_bulk_create_two_variants_duplicated_attribute_value(
staff_api_client,
product_with_variant_with_two_attributes,
color_attribute,
size_attribute,
permission_manage_products,
):
product = product_with_variant_with_two_attributes
product_variant_count = ProductVariant.objects.count()
product_id = graphene.Node.to_global_id("Product", product.pk)
color_attribute_id = graphene.Node.to_global_id("Attribute", color_attribute.id)
size_attribute_id = graphene.Node.to_global_id("Attribute", size_attribute.id)
variants = [
{
"sku": str(uuid4())[:12],
"attributes": [
{"id": color_attribute_id, "values": ["red"]},
{"id": size_attribute_id, "values": ["small"]},
],
}
]
variables = {"productId": product_id, "variants": variants}
staff_api_client.user.user_permissions.add(permission_manage_products)
response = staff_api_client.post_graphql(
PRODUCT_VARIANT_BULK_CREATE_MUTATION, variables
)
content = get_graphql_content(response)
data = content["data"]["productVariantBulkCreate"]
assert len(data["bulkProductErrors"]) == 1
error = data["bulkProductErrors"][0]
assert error["field"] == "attributes"
assert error["code"] == ProductErrorCode.UNIQUE.name
assert error["index"] == 0
assert product_variant_count == ProductVariant.objects.count()
def test_product_variant_bulk_create_two_variants_duplicated_attribute_value_in_input(
staff_api_client,
product_with_variant_with_two_attributes,
permission_manage_products,
color_attribute,
size_attribute,
):
product = product_with_variant_with_two_attributes
product_id = graphene.Node.to_global_id("Product", product.pk)
product_variant_count = ProductVariant.objects.count()
color_attribute_id = graphene.Node.to_global_id("Attribute", color_attribute.id)
size_attribute_id = graphene.Node.to_global_id("Attribute", size_attribute.id)
attributes = [
{"id": color_attribute_id, "values": [color_attribute.values.last().slug]},
{"id": size_attribute_id, "values": [size_attribute.values.last().slug]},
]
variants = [
{"sku": str(uuid4())[:12], "attributes": attributes},
{"sku": str(uuid4())[:12], "attributes": attributes},
]
variables = {"productId": product_id, "variants": variants}
staff_api_client.user.user_permissions.add(permission_manage_products)
response = staff_api_client.post_graphql(
PRODUCT_VARIANT_BULK_CREATE_MUTATION, variables
)
content = get_graphql_content(response)
data = content["data"]["productVariantBulkCreate"]
assert len(data["bulkProductErrors"]) == 1
error = data["bulkProductErrors"][0]
assert error["field"] == "attributes"
assert error["code"] == ProductErrorCode.UNIQUE.name
assert error["index"] == 1
assert product_variant_count == ProductVariant.objects.count()
def test_product_variant_bulk_create_two_variants_duplicated_one_attribute_value(
staff_api_client,
product_with_variant_with_two_attributes,
color_attribute,
size_attribute,
permission_manage_products,
):
product = product_with_variant_with_two_attributes
product_variant_count = ProductVariant.objects.count()
product_id = graphene.Node.to_global_id("Product", product.pk)
color_attribute_id = graphene.Node.to_global_id("Attribute", color_attribute.id)
size_attribute_id = graphene.Node.to_global_id("Attribute", size_attribute.id)
variants = [
{
"sku": str(uuid4())[:12],
"attributes": [
{"id": color_attribute_id, "values": ["red"]},
{"id": size_attribute_id, "values": ["big"]},
],
}
]
variables = {"productId": product_id, "variants": variants}
staff_api_client.user.user_permissions.add(permission_manage_products)
response = staff_api_client.post_graphql(
PRODUCT_VARIANT_BULK_CREATE_MUTATION, variables
)
content = get_graphql_content(response)
data = content["data"]["productVariantBulkCreate"]
assert not data["bulkProductErrors"]
assert data["count"] == 1
assert product_variant_count + 1 == ProductVariant.objects.count()
| 33.839404
| 88
| 0.605876
| 3,927
| 40,878
| 6.010441
| 0.05806
| 0.039105
| 0.036182
| 0.04491
| 0.80011
| 0.765623
| 0.724738
| 0.695208
| 0.66178
| 0.641952
| 0
| 0.005971
| 0.287147
| 40,878
| 1,207
| 89
| 33.86744
| 0.804015
| 0.009614
| 0
| 0.640408
| 0
| 0
| 0.299567
| 0.017213
| 0
| 0
| 0
| 0
| 0.099166
| 1
| 0.02873
| false
| 0
| 0.008341
| 0
| 0.038925
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
0a7907b1fba4824b77cc84eb460f43062d78a9c0
| 85
|
py
|
Python
|
Chapter 04/Chap04_Example4.10.py
|
bpbpublications/Programming-Techniques-using-Python
|
49b785f37e95a3aad1d36cef51e219ac56e5e9f0
|
[
"MIT"
] | null | null | null |
Chapter 04/Chap04_Example4.10.py
|
bpbpublications/Programming-Techniques-using-Python
|
49b785f37e95a3aad1d36cef51e219ac56e5e9f0
|
[
"MIT"
] | null | null | null |
Chapter 04/Chap04_Example4.10.py
|
bpbpublications/Programming-Techniques-using-Python
|
49b785f37e95a3aad1d36cef51e219ac56e5e9f0
|
[
"MIT"
] | null | null | null |
def my_details2(age = 31, name):
print(f'My name is {name} and age is {age} ')
| 28.333333
| 50
| 0.611765
| 16
| 85
| 3.1875
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046154
| 0.235294
| 85
| 2
| 51
| 42.5
| 0.738462
| 0
| 0
| 0
| 0
| 0
| 0.421687
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
0a9a446c647337637d2e368c9c8acf8edd46774b
| 81
|
py
|
Python
|
brainlogger/tests/__init__.py
|
Fassial/pku-intern
|
4463e7d5a5844c8002f7e3d01b4fadc3a20e2038
|
[
"MIT"
] | null | null | null |
brainlogger/tests/__init__.py
|
Fassial/pku-intern
|
4463e7d5a5844c8002f7e3d01b4fadc3a20e2038
|
[
"MIT"
] | null | null | null |
brainlogger/tests/__init__.py
|
Fassial/pku-intern
|
4463e7d5a5844c8002f7e3d01b4fadc3a20e2038
|
[
"MIT"
] | null | null | null |
"""
Created on 17:07, Apr. 15th, 2021
Author: fassial
Filename: __init__.py
"""
| 11.571429
| 33
| 0.679012
| 12
| 81
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 0.160494
| 81
| 6
| 34
| 13.5
| 0.602941
| 0.876543
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
0aad98a66f58fd9be47e570d56939f8dcf15a4d2
| 38
|
py
|
Python
|
forloop.py
|
vishalsharma08/Phynet_Phython_class
|
754ce5db52a85837abf22dbbd5cbd62878d1839d
|
[
"Apache-2.0"
] | null | null | null |
forloop.py
|
vishalsharma08/Phynet_Phython_class
|
754ce5db52a85837abf22dbbd5cbd62878d1839d
|
[
"Apache-2.0"
] | null | null | null |
forloop.py
|
vishalsharma08/Phynet_Phython_class
|
754ce5db52a85837abf22dbbd5cbd62878d1839d
|
[
"Apache-2.0"
] | null | null | null |
for i in range (0, 9):
print (i)
| 9.5
| 22
| 0.5
| 8
| 38
| 2.375
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 0.342105
| 38
| 3
| 23
| 12.666667
| 0.68
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
0ad78cb6500cccd9bdbd144be0f6380833a8fde0
| 167
|
py
|
Python
|
river/models/managers/state.py
|
xuziheng1002/django-river
|
7c7f23aa4790e451019c3e2b4d29f35852de17e6
|
[
"BSD-3-Clause"
] | null | null | null |
river/models/managers/state.py
|
xuziheng1002/django-river
|
7c7f23aa4790e451019c3e2b4d29f35852de17e6
|
[
"BSD-3-Clause"
] | null | null | null |
river/models/managers/state.py
|
xuziheng1002/django-river
|
7c7f23aa4790e451019c3e2b4d29f35852de17e6
|
[
"BSD-3-Clause"
] | null | null | null |
from django.db import models
__author__ = 'ahmetdal'
class StateManager(models.Manager):
def get_by_natural_key(self, slug):
return self.get(slug=slug)
| 18.555556
| 39
| 0.730539
| 23
| 167
| 5
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173653
| 167
| 8
| 40
| 20.875
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.047904
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
0aea1b3faa68456150e8de824cecdb8355e6601e
| 61
|
py
|
Python
|
nlcpy/_path.py
|
SX-Aurora/nlcpy
|
0a53eec8778073bc48b12687b7ce37ab2bf2b7e0
|
[
"BSD-3-Clause"
] | 11
|
2020-07-31T02:21:55.000Z
|
2022-03-10T03:12:11.000Z
|
nlcpy/_path.py
|
SX-Aurora/nlcpy
|
0a53eec8778073bc48b12687b7ce37ab2bf2b7e0
|
[
"BSD-3-Clause"
] | null | null | null |
nlcpy/_path.py
|
SX-Aurora/nlcpy
|
0a53eec8778073bc48b12687b7ce37ab2bf2b7e0
|
[
"BSD-3-Clause"
] | null | null | null |
import os
_here = os.path.abspath(os.path.dirname(__file__))
| 20.333333
| 50
| 0.770492
| 10
| 61
| 4.2
| 0.7
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081967
| 61
| 2
| 51
| 30.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
7c133236e97cfd6595b7d1230be9c061accdc58b
| 85
|
py
|
Python
|
eshop/apps.py
|
MartinStevko/SEN_eshop
|
aac42563b4e06327102fe208f97bcc9a15fbebe9
|
[
"MIT"
] | null | null | null |
eshop/apps.py
|
MartinStevko/SEN_eshop
|
aac42563b4e06327102fe208f97bcc9a15fbebe9
|
[
"MIT"
] | null | null | null |
eshop/apps.py
|
MartinStevko/SEN_eshop
|
aac42563b4e06327102fe208f97bcc9a15fbebe9
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class EshopConfig(AppConfig):
name = 'eshop'
| 14.166667
| 33
| 0.741176
| 10
| 85
| 6.3
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 85
| 5
| 34
| 17
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
7c164495f053d5c836d0e56e6f8a84316362246a
| 317
|
py
|
Python
|
pygame/Scene.py
|
koroshiya/python-calendar
|
57f27dd40cdafebe8c8a4091104d9b7164fc25ff
|
[
"MIT"
] | 9
|
2015-01-26T13:08:44.000Z
|
2021-08-10T07:03:48.000Z
|
pygame/Scene.py
|
koroshiya/python-calendar
|
57f27dd40cdafebe8c8a4091104d9b7164fc25ff
|
[
"MIT"
] | null | null | null |
pygame/Scene.py
|
koroshiya/python-calendar
|
57f27dd40cdafebe8c8a4091104d9b7164fc25ff
|
[
"MIT"
] | null | null | null |
class Scene(object):
def __init__(self):
pass
def render(self, screen):
raise NotImplementedError
def update(self):
raise NotImplementedError
def handle_event(self, event):
raise NotImplementedError
def processEvent(self, arg):
raise NotImplementedError
| 21.133333
| 34
| 0.659306
| 31
| 317
| 6.580645
| 0.516129
| 0.470588
| 0.397059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.274448
| 317
| 15
| 35
| 21.133333
| 0.886957
| 0
| 0
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454545
| false
| 0.090909
| 0
| 0
| 0.545455
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
7c29a47d182641c8be9c4d3a9a5f297dbf7f5610
| 130
|
py
|
Python
|
python/basic/io.py
|
Peterinor/Coding
|
3ac4908b3c6ae5df96390332e5452da934d12c3b
|
[
"MIT"
] | null | null | null |
python/basic/io.py
|
Peterinor/Coding
|
3ac4908b3c6ae5df96390332e5452da934d12c3b
|
[
"MIT"
] | null | null | null |
python/basic/io.py
|
Peterinor/Coding
|
3ac4908b3c6ae5df96390332e5452da934d12c3b
|
[
"MIT"
] | null | null | null |
name = raw_input("input your name please:");
print "\n\nHello", name;
if name == "tangyu":
print "TY"
else:
print name
| 14.444444
| 44
| 0.615385
| 19
| 130
| 4.157895
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.223077
| 130
| 9
| 45
| 14.444444
| 0.782178
| 0
| 0
| 0
| 0
| 0
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
7c32349a6a91ea196018a8b2da7a686e2b75eb79
| 11,889
|
py
|
Python
|
tests/test_aioh2.py
|
m4ta1l/aioh2
|
2f9b76161e99e32317083cd2ebd17ce2ed3e41ab
|
[
"BSD-3-Clause"
] | 81
|
2016-02-07T11:11:57.000Z
|
2021-05-22T12:27:51.000Z
|
tests/test_aioh2.py
|
URenko/aioh2
|
8c1b5ab2399443087795fe52b71e43b652b1031f
|
[
"BSD-3-Clause"
] | 22
|
2016-02-14T03:37:37.000Z
|
2021-03-25T21:39:57.000Z
|
tests/test_aioh2.py
|
URenko/aioh2
|
8c1b5ab2399443087795fe52b71e43b652b1031f
|
[
"BSD-3-Clause"
] | 24
|
2016-03-01T05:22:33.000Z
|
2022-01-28T22:07:54.000Z
|
#!/usr/bin/env python
"""
test_aioh2
----------------------------------
Tests for `aioh2` module.
"""
import random
import unittest
import uuid
import asyncio
from h2.events import DataReceived
from h2.events import PingAcknowledged
from h2.events import RemoteSettingsChanged
from h2.events import ResponseReceived
from h2.events import SettingsAcknowledged
from h2.exceptions import FlowControlError
from h2.settings import SettingCodes
from aioh2 import SendException
from aioh2.helper import async_task
from . import async_test, BaseTestCase
class TestServer(BaseTestCase):
def test_connect(self):
pass
@async_test
def test_ping(self):
opaque_data = uuid.uuid4().bytes[:8]
self.conn.ping(opaque_data)
events = yield from self._expect_events()
self.assertIsInstance(events[0], PingAcknowledged)
self.assertEqual(events[0].ping_data, opaque_data)
@async_test
def test_request_headers(self):
yield from self._send_headers()
@asyncio.coroutine
def _test_read_frame(self, *, more, end_stream):
stream_id = yield from self._send_headers()
data = b'x' * random.randint(128, 512)
self.conn.send_data(stream_id, data,
end_stream=not more and end_stream)
extra = b''
if more:
extra = b'y' * random.randint(128, 512)
self.conn.send_data(stream_id, extra, end_stream=end_stream)
yield from self._expect_connection_flow_control_disabled()
yield from self._assert_received(
stream_id, self.server.read_stream(stream_id), data)
if more:
yield from self._assert_received(
stream_id, self.server.read_stream(stream_id), extra)
if end_stream:
frame = yield from self.server.read_stream(stream_id)
self.assertEqual(frame, b'')
else:
try:
yield from asyncio.wait_for(
self.server.read_stream(stream_id), 0.1)
except asyncio.TimeoutError:
pass
else:
self.assertRaises(asyncio.TimeoutError, lambda: None)
@async_test
def test_read_frame(self):
yield from self._test_read_frame(more=True, end_stream=False)
@async_test
def test_read_frame_close(self):
yield from self._test_read_frame(more=True, end_stream=True)
@async_test
def test_read_only_frame(self):
yield from self._test_read_frame(more=False, end_stream=False)
@async_test
def test_read_only_frame_close(self):
yield from self._test_read_frame(more=False, end_stream=True)
@asyncio.coroutine
def _test_read_all(self, *, more, end_stream):
stream_id = yield from self._send_headers()
data = b'x' * random.randint(128, 512)
self.conn.send_data(stream_id, data,
end_stream=not more and end_stream)
if more:
extra = b'y' * random.randint(128, 512)
self.conn.send_data(stream_id, extra, end_stream=end_stream)
data += extra
yield from self._expect_connection_flow_control_disabled()
if end_stream:
yield from self._assert_received(
stream_id, self.server.read_stream(stream_id, -1), data)
frame = yield from self.server.read_stream(stream_id, -1)
self.assertEqual(frame, b'')
else:
try:
yield from asyncio.wait_for(
self.server.read_stream(stream_id, -1), 0.1)
except asyncio.TimeoutError:
pass
else:
self.assertRaises(asyncio.TimeoutError, lambda: None)
@async_test
def test_read_all(self):
yield from self._test_read_all(more=True, end_stream=False)
@async_test
def test_read_all_close(self):
yield from self._test_read_all(more=True, end_stream=True)
@async_test
def test_read_all_only_frame(self):
yield from self._test_read_all(more=False, end_stream=False)
@async_test
def test_read_all_only_frame_close(self):
yield from self._test_read_all(more=False, end_stream=True)
@asyncio.coroutine
def _test_read_exactly(self, *, empty, explicit_close):
stream_id = yield from self._send_headers()
self.conn.send_data(stream_id, b'333')
self.conn.send_data(stream_id, b'55555')
if empty:
self.conn.send_data(stream_id, b'')
yield from self._expect_connection_flow_control_disabled()
yield from self._assert_received(
stream_id, self.server.read_stream(stream_id, 2), b'33')
self.conn.send_data(stream_id, b'88888888',
end_stream=not explicit_close)
if explicit_close:
self.conn.end_stream(stream_id)
yield from self._expect_events(0)
yield from self._assert_received(
stream_id, self.server.read_stream(stream_id, 8), b'35555588')
yield from self._assert_received(
stream_id, self.server.read_stream(stream_id, 2), b'88')
yield from self._assert_received(
stream_id, self.server.read_stream(stream_id, 8), b'8888')
@async_test
def test_read_exactly(self):
yield from self._test_read_exactly(empty=False, explicit_close=False)
@async_test
def test_read_exactly_empty_frame(self):
yield from self._test_read_exactly(empty=True, explicit_close=False)
@async_test
def test_read_exactly_explicit_close(self):
yield from self._test_read_exactly(empty=True, explicit_close=True)
@async_test
def test_read_exactly_empty_frame_explicit_close(self):
yield from self._test_read_exactly(empty=True, explicit_close=True)
@async_test
def test_flow_control_settings(self):
self.server.update_settings({SettingCodes.INITIAL_WINDOW_SIZE: 3})
event = yield from self._expect_events()
self.assertIsInstance(event[0], RemoteSettingsChanged)
event = yield from self.server.events.get()
self.assertIsInstance(event, SettingsAcknowledged)
stream_id = yield from self._send_headers()
self.conn.send_data(stream_id, b'xx')
yield from self._expect_connection_flow_control_disabled()
yield from self._assert_received(
stream_id, self.server.read_stream(stream_id, 2), b'xx')
self.conn.send_data(stream_id, b'xxx')
yield from self._expect_events(0)
yield from self._assert_received(
stream_id, self.server.read_stream(stream_id, 3), b'xxx')
self.assertRaises(FlowControlError,
self.conn.send_data, stream_id, b'xxxx')
@async_test
def test_flow_control(self):
self.conn.update_settings({SettingCodes.INITIAL_WINDOW_SIZE: 3})
event = yield from self._expect_events()
self.assertIsInstance(event[0], SettingsAcknowledged)
event = yield from self.server.events.get()
self.assertIsInstance(event, RemoteSettingsChanged)
stream_id = yield from self._send_headers(end_stream=True)
yield from self.server.start_response(stream_id, [(':status', '200')])
events = yield from self._expect_events()
self.assertIsInstance(events[0], ResponseReceived)
yield from self.server.send_data(stream_id, b'12')
events = yield from self._expect_events()
self.assertIsInstance(events[0], DataReceived)
self.assertEqual(events[0].data, b'12')
try:
yield from asyncio.wait_for(
self.server.send_data(stream_id, b'34'), 0.1)
except asyncio.TimeoutError:
events = yield from self._expect_events()
self.assertIsInstance(events[0], DataReceived)
self.assertEqual(events[0].data, b'3')
else:
self.assertRaises(asyncio.TimeoutError, lambda: None)
self.conn.increment_flow_control_window(3, stream_id=stream_id)
yield from self._expect_events(0)
try:
yield from asyncio.wait_for(
self.server.send_data(stream_id, b'5678'), 0.1)
except asyncio.TimeoutError:
events = yield from self._expect_events()
self.assertIsInstance(events[0], DataReceived)
self.assertEqual(events[0].data, b'567')
else:
self.assertRaises(asyncio.TimeoutError, lambda: None)
@async_test
def test_broken_send(self):
self.conn.update_settings({SettingCodes.INITIAL_WINDOW_SIZE: 3})
event = yield from self._expect_events()
self.assertIsInstance(event[0], SettingsAcknowledged)
event = yield from self.server.events.get()
self.assertIsInstance(event, RemoteSettingsChanged)
stream_id = yield from self._send_headers(end_stream=True)
yield from self.server.start_response(stream_id, [(':status', '200')])
events = yield from self._expect_events()
self.assertIsInstance(events[0], ResponseReceived)
yield from self.server.send_data(stream_id, b'12')
events = yield from self._expect_events()
self.assertIsInstance(events[0], DataReceived)
self.assertEqual(events[0].data, b'12')
f = async_task(self.server.send_data(stream_id, b'345678'))
events = yield from self._expect_events()
self.assertIsInstance(events[0], DataReceived)
self.assertEqual(events[0].data, b'3')
self.conn.reset_stream(stream_id)
yield from self._expect_events(0)
try:
yield from f
except SendException as e:
self.assertEqual(e.data, b'45678')
else:
self.assertRaises(SendException, lambda: None)
@unittest.skip("flakey - https://github.com/decentfox/aioh2/issues/17")
@async_test(timeout=8)
def test_priority(self):
self.conn.update_settings({
SettingCodes.MAX_FRAME_SIZE: 16384,
SettingCodes.INITIAL_WINDOW_SIZE: 16384 * 1024 * 32,
})
event = yield from self._expect_events()
self.assertIsInstance(event[0], SettingsAcknowledged)
event = yield from self.server.events.get()
self.assertIsInstance(event, RemoteSettingsChanged)
stream_1 = yield from self._send_headers()
yield from self.server.start_response(stream_1, [(':status', '200')])
events = yield from self._expect_events()
self.assertIsInstance(events[0], ResponseReceived)
stream_2 = yield from self._send_headers()
yield from self.server.start_response(stream_2, [(':status', '200')])
events = yield from self._expect_events()
self.assertIsInstance(events[0], ResponseReceived)
p1 = 32
p2 = 20
self.server.reprioritize(stream_1, weight=p1)
self.server.reprioritize(stream_2, weight=p2)
self.server.pause_writing()
running = [True]
@asyncio.coroutine
def _write(stream_id):
count = 0
while running[0]:
yield from self.server.send_data(stream_id, b'x')
count += 1
yield from self.server.end_stream(stream_id)
return count
task_1 = async_task(_write(stream_1))
task_2 = async_task(_write(stream_2))
for i in range(1000):
self.server.resume_writing()
yield from asyncio.sleep(0.004)
self.server.pause_writing()
yield from asyncio.sleep(0.001)
running[0] = False
self.server.resume_writing()
count_1 = yield from task_1
count_2 = yield from task_2
self.assertAlmostEqual(count_1 / count_2, p1 / p2, 1)
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
| 34.967647
| 78
| 0.650601
| 1,491
| 11,889
| 4.925553
| 0.111335
| 0.091912
| 0.11683
| 0.056917
| 0.76634
| 0.755583
| 0.733252
| 0.705065
| 0.687092
| 0.652233
| 0
| 0.024662
| 0.253091
| 11,889
| 339
| 79
| 35.070796
| 0.802365
| 0.007822
| 0
| 0.520913
| 0
| 0
| 0.015439
| 0
| 0
| 0
| 0
| 0
| 0.163498
| 1
| 0.087452
| false
| 0.011407
| 0.057034
| 0
| 0.152091
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
7c3edb126f87d68632ff3faa003d3e9983946991
| 164
|
py
|
Python
|
src/brouwers/utils/widgets.py
|
modelbrouwers/modelbrouwers
|
e0ba4819bf726d6144c0a648fdd4731cdc098a52
|
[
"MIT"
] | 6
|
2015-03-03T13:23:07.000Z
|
2021-12-19T18:12:41.000Z
|
src/brouwers/utils/widgets.py
|
modelbrouwers/modelbrouwers
|
e0ba4819bf726d6144c0a648fdd4731cdc098a52
|
[
"MIT"
] | 95
|
2015-02-07T00:55:39.000Z
|
2022-02-08T20:22:05.000Z
|
src/brouwers/utils/widgets.py
|
modelbrouwers/modelbrouwers
|
e0ba4819bf726d6144c0a648fdd4731cdc098a52
|
[
"MIT"
] | 2
|
2016-03-22T16:53:26.000Z
|
2019-02-09T22:46:04.000Z
|
from django.forms.widgets import NumberInput, RadioSelect
class RangeInput(NumberInput):
input_type = "range"
class StarRatingSelect(RadioSelect):
pass
| 16.4
| 57
| 0.77439
| 17
| 164
| 7.411765
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152439
| 164
| 9
| 58
| 18.222222
| 0.906475
| 0
| 0
| 0
| 0
| 0
| 0.030488
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.2
| 0.2
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
7c404c7ca659bdfc2c524ae50a2e12755c2be506
| 666
|
py
|
Python
|
core/serializers.py
|
jonathan-odonnell/equestrian-forum
|
9d3acefaccf3800dbdd134a34d15480c7eff85b0
|
[
"MIT"
] | null | null | null |
core/serializers.py
|
jonathan-odonnell/equestrian-forum
|
9d3acefaccf3800dbdd134a34d15480c7eff85b0
|
[
"MIT"
] | null | null | null |
core/serializers.py
|
jonathan-odonnell/equestrian-forum
|
9d3acefaccf3800dbdd134a34d15480c7eff85b0
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
from .models import Category, Comment, Post, Image
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = ['name']
class PostSerializer(serializers.ModelSerializer):
class Meta:
model = Post
fields = ['title', 'description', 'category', 'date', 'slug']
class ImageSerializer(serializers.ModelSerializer):
class Meta:
model = Image
fields = ['post', 'image']
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = ['post', 'comment', 'date', 'up_vote', 'down_vote']
| 24.666667
| 69
| 0.665165
| 63
| 666
| 6.984127
| 0.428571
| 0.236364
| 0.281818
| 0.318182
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 666
| 26
| 70
| 25.615385
| 0.849421
| 0
| 0
| 0.222222
| 0
| 0
| 0.114114
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
7c57174543adc9eaaafe3226e67663471bcfe331
| 121
|
py
|
Python
|
code/config.py
|
oguzp1/ComputerProject1.3
|
a96e2daac32d6a34dc6e0e1434b6da7cc1670391
|
[
"MIT"
] | 7
|
2020-11-09T09:24:42.000Z
|
2021-09-17T07:35:05.000Z
|
code/config.py
|
oguzp1/ComputerProject1.3
|
a96e2daac32d6a34dc6e0e1434b6da7cc1670391
|
[
"MIT"
] | null | null | null |
code/config.py
|
oguzp1/ComputerProject1.3
|
a96e2daac32d6a34dc6e0e1434b6da7cc1670391
|
[
"MIT"
] | 4
|
2020-10-05T14:06:54.000Z
|
2020-11-23T19:33:35.000Z
|
name_server_info = ('localhost', 9999)
name_server_url = 'http://{}:{}'.format(name_server_info[0], name_server_info[1])
| 40.333333
| 81
| 0.727273
| 18
| 121
| 4.444444
| 0.555556
| 0.5
| 0.525
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053097
| 0.066116
| 121
| 2
| 82
| 60.5
| 0.654867
| 0
| 0
| 0
| 0
| 0
| 0.173554
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
7c69ee2504f2d4f6e9af90029aa4bffad2aed27e
| 729
|
py
|
Python
|
lokalhood_api/permissions.py
|
SaxenaKartik/lokalhood
|
6c8da25b6054d31549cc9cb8cbffe16d9302f37e
|
[
"MIT"
] | null | null | null |
lokalhood_api/permissions.py
|
SaxenaKartik/lokalhood
|
6c8da25b6054d31549cc9cb8cbffe16d9302f37e
|
[
"MIT"
] | 5
|
2021-03-30T14:11:35.000Z
|
2021-09-22T19:14:57.000Z
|
lokalhood_api/permissions.py
|
SaxenaKartik/lokalhood
|
6c8da25b6054d31549cc9cb8cbffe16d9302f37e
|
[
"MIT"
] | null | null | null |
from rest_framework import permissions
class UpdateOwnProfile(permissions.BasePermission):
""" Allow users to edit their own profile"""
def has_object_permission(self, request, view, obj):
""" check if user is trying to edit his own profile"""
if request.method in permissions.SAFE_METHODS:
return True
return obj.id==request.user.id
class UpdateOwnRequest(permissions.BasePermission):
""" Allow users to edit their own request"""
def has_object_permission(self, request, view, obj):
""" check if user is trying to edit his own profile"""
if request.method in permissions.SAFE_METHODS:
return True
return obj.user.id==request.user.id
| 31.695652
| 62
| 0.687243
| 94
| 729
| 5.255319
| 0.393617
| 0.048583
| 0.121457
| 0.1417
| 0.748988
| 0.748988
| 0.748988
| 0.748988
| 0.550607
| 0.550607
| 0
| 0
| 0.229081
| 729
| 22
| 63
| 33.136364
| 0.879004
| 0.234568
| 0
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0
| 0.090909
| 0
| 0.818182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
7c8e59f214217c7e75fae39e5406d5a1df98a8f2
| 222
|
py
|
Python
|
website/website/userAuthentication/views.py
|
Ferdous-Al-Imran/Aqualizer
|
5a718f81183c82bf9d82abdd00fe4baeb0bf9c71
|
[
"MIT"
] | null | null | null |
website/website/userAuthentication/views.py
|
Ferdous-Al-Imran/Aqualizer
|
5a718f81183c82bf9d82abdd00fe4baeb0bf9c71
|
[
"MIT"
] | null | null | null |
website/website/userAuthentication/views.py
|
Ferdous-Al-Imran/Aqualizer
|
5a718f81183c82bf9d82abdd00fe4baeb0bf9c71
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.views.generic import TemplateView
class LoginView(TemplateView):
template_name = "login.html"
class RegisterView(TemplateView):
template_name = "register.html"
| 17.076923
| 45
| 0.779279
| 25
| 222
| 6.84
| 0.64
| 0.116959
| 0.280702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144144
| 222
| 13
| 46
| 17.076923
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0.103139
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
7ca0afe76f83ef7cffe9b2603f5061c46b721772
| 210
|
py
|
Python
|
robots/big/strategies/tests/test_aktuatora/test_liftova/init.py
|
memristor/mep2
|
bc5cddacba3d740f791f3454b8cb51bda83ce202
|
[
"MIT"
] | 5
|
2018-11-27T15:15:00.000Z
|
2022-02-10T21:44:13.000Z
|
robots/big/strategies/tests/test_aktuatora/test_liftova/init.py
|
memristor/mep2
|
bc5cddacba3d740f791f3454b8cb51bda83ce202
|
[
"MIT"
] | 2
|
2018-10-20T15:48:40.000Z
|
2018-11-20T05:11:33.000Z
|
robots/big/strategies/tests/test_aktuatora/test_liftova/init.py
|
memristor/mep2
|
bc5cddacba3d740f791f3454b8cb51bda83ce202
|
[
"MIT"
] | 1
|
2020-02-07T12:44:47.000Z
|
2020-02-07T12:44:47.000Z
|
def run():
#with _while(1):
pump(0,1)
with _e._parallel():
llift(1)
rlift(1)
_sync()
with _parallel():
llift(0)
rlift(0)
_sync()
with _parallel():
llift(1)
rlift(1)
_sync()
pump(0,0)
| 9.130435
| 21
| 0.57619
| 33
| 210
| 3.424242
| 0.363636
| 0.345133
| 0.247788
| 0.336283
| 0.424779
| 0.424779
| 0
| 0
| 0
| 0
| 0
| 0.067901
| 0.228571
| 210
| 22
| 22
| 9.545455
| 0.62963
| 0.071429
| 0
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| true
| 0
| 0
| 0
| 0.066667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
7cb50370ea83e843aff3e2289acf9e5049723c8f
| 1,525
|
py
|
Python
|
python/phonenumbers/data/region_CK.py
|
Eyepea/python-phonenumbers
|
0336e191fda80a21ed5c19d5e029ad8c70f620ee
|
[
"Apache-2.0"
] | 2
|
2019-03-30T02:12:54.000Z
|
2021-03-08T18:59:40.000Z
|
python/phonenumbers/data/region_CK.py
|
Eyepea/python-phonenumbers
|
0336e191fda80a21ed5c19d5e029ad8c70f620ee
|
[
"Apache-2.0"
] | null | null | null |
python/phonenumbers/data/region_CK.py
|
Eyepea/python-phonenumbers
|
0336e191fda80a21ed5c19d5e029ad8c70f620ee
|
[
"Apache-2.0"
] | 1
|
2018-11-10T03:47:34.000Z
|
2018-11-10T03:47:34.000Z
|
"""Auto-generated file, do not edit by hand. CK metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_CK = PhoneMetadata(id='CK', country_code=682, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='[2-57]\\d{4}', possible_number_pattern='\\d{5}'),
fixed_line=PhoneNumberDesc(national_number_pattern='(?:2\\d|3[13-7]|4[1-5])\\d{3}', possible_number_pattern='\\d{5}', example_number='21234'),
mobile=PhoneNumberDesc(national_number_pattern='(?:5[0-68]|7\\d)\\d{3}', possible_number_pattern='\\d{5}', example_number='71234'),
toll_free=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
premium_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
number_format=[NumberFormat(pattern='(\\d{2})(\\d{3})', format=u'\\1 \\2')])
| 84.722222
| 146
| 0.772459
| 195
| 1,525
| 5.717949
| 0.317949
| 0.279821
| 0.242152
| 0.387444
| 0.647534
| 0.560538
| 0.560538
| 0.560538
| 0.560538
| 0
| 0
| 0.028812
| 0.066885
| 1,525
| 17
| 147
| 89.705882
| 0.754744
| 0.034754
| 0
| 0
| 1
| 0.066667
| 0.105048
| 0.034789
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.066667
| 0
| 0.066667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
7cbc517e8fadf266e09fdd8c57803069c8be43c1
| 86
|
py
|
Python
|
Server/main.py
|
mannby/GeoChat
|
8dfae2975c57dc467acd2a1233a07d95dc39bad9
|
[
"MIT"
] | null | null | null |
Server/main.py
|
mannby/GeoChat
|
8dfae2975c57dc467acd2a1233a07d95dc39bad9
|
[
"MIT"
] | null | null | null |
Server/main.py
|
mannby/GeoChat
|
8dfae2975c57dc467acd2a1233a07d95dc39bad9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""Main"""
import server
server.main()
| 12.285714
| 23
| 0.593023
| 12
| 86
| 4.25
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013514
| 0.139535
| 86
| 6
| 24
| 14.333333
| 0.675676
| 0.546512
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
7cc1c72b0fb64b4df6a1beb5f397d4b011c40ce8
| 283
|
py
|
Python
|
jupyter_interval_widget/__init__.py
|
srizzo/jupyter-interval-widget
|
d5558af7a42c7f9cef7c28b77bd7f8e18034fb6c
|
[
"MIT"
] | 4
|
2019-08-02T07:03:59.000Z
|
2021-04-07T21:04:52.000Z
|
jupyter_interval_widget/__init__.py
|
srizzo/jupyter-interval-widget
|
d5558af7a42c7f9cef7c28b77bd7f8e18034fb6c
|
[
"MIT"
] | 2
|
2020-07-17T10:54:56.000Z
|
2021-03-09T09:31:27.000Z
|
jupyter_interval_widget/__init__.py
|
srizzo/jupyter-interval-widget
|
d5558af7a42c7f9cef7c28b77bd7f8e18034fb6c
|
[
"MIT"
] | 3
|
2019-08-16T13:06:23.000Z
|
2021-04-07T21:05:05.000Z
|
from ._version import version_info, __version__
from .interval import *
def _jupyter_nbextension_paths():
return [{
'section': 'notebook',
'src': 'static',
'dest': 'jupyter-interval-widget',
'require': 'jupyter-interval-widget/extension'
}]
| 23.583333
| 54
| 0.636042
| 27
| 283
| 6.333333
| 0.666667
| 0.175439
| 0.245614
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.226148
| 283
| 11
| 55
| 25.727273
| 0.780822
| 0
| 0
| 0
| 0
| 0
| 0.321555
| 0.19788
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| true
| 0
| 0.222222
| 0.111111
| 0.444444
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
7cd508cd1523272cadd32e55d0dd6c1d258fbaa3
| 200
|
py
|
Python
|
darq/__init__.py
|
antonmyronyuk/darq
|
801ec63091b4dc84de38f774ffc0362a7f2fd87d
|
[
"MIT"
] | 42
|
2020-02-21T15:38:56.000Z
|
2022-03-20T21:14:59.000Z
|
darq/__init__.py
|
antonmyronyuk/darq
|
801ec63091b4dc84de38f774ffc0362a7f2fd87d
|
[
"MIT"
] | 335
|
2020-03-03T13:59:25.000Z
|
2022-03-31T12:03:20.000Z
|
darq/__init__.py
|
antonmyronyuk/darq
|
801ec63091b4dc84de38f774ffc0362a7f2fd87d
|
[
"MIT"
] | 4
|
2021-01-06T21:41:52.000Z
|
2021-09-26T11:32:49.000Z
|
from .app import Darq
from .connections import RedisSettings
from .cron import cron
from .types import JobCtx
from .worker import Retry
__all__ = ['Darq', 'JobCtx', 'RedisSettings', 'cron', 'Retry']
| 25
| 62
| 0.75
| 26
| 200
| 5.615385
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.14
| 200
| 7
| 63
| 28.571429
| 0.848837
| 0
| 0
| 0
| 0
| 0
| 0.16
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.833333
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
7cf57afb9e3834caf1e5d38bd9926751996a1d95
| 139
|
py
|
Python
|
python/code/dns/dnsmx.py
|
hgfgood/note
|
f89febca3ce925cba4cd4c8068a4fa124f23c810
|
[
"Apache-2.0"
] | null | null | null |
python/code/dns/dnsmx.py
|
hgfgood/note
|
f89febca3ce925cba4cd4c8068a4fa124f23c810
|
[
"Apache-2.0"
] | null | null | null |
python/code/dns/dnsmx.py
|
hgfgood/note
|
f89febca3ce925cba4cd4c8068a4fa124f23c810
|
[
"Apache-2.0"
] | null | null | null |
#! /usr/bin/python
import dns.resolver
domain="163.com"
MX = dns.resolver.query(domain,'MX')
for i in MX:
print i.preference, i.exchange
| 17.375
| 36
| 0.719424
| 24
| 139
| 4.166667
| 0.708333
| 0.22
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02459
| 0.122302
| 139
| 7
| 37
| 19.857143
| 0.795082
| 0.122302
| 0
| 0
| 0
| 0
| 0.07438
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.2
| null | null | 0.2
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
6b1f1540117728c1ab8dec8e7fbf2f943141f0ac
| 142
|
py
|
Python
|
examples/old/hallway/hallway_config.py
|
harrysorensennrel/rlmolecule
|
978269400b90f752bf4741f42f03522603b321e2
|
[
"BSD-3-Clause"
] | null | null | null |
examples/old/hallway/hallway_config.py
|
harrysorensennrel/rlmolecule
|
978269400b90f752bf4741f42f03522603b321e2
|
[
"BSD-3-Clause"
] | null | null | null |
examples/old/hallway/hallway_config.py
|
harrysorensennrel/rlmolecule
|
978269400b90f752bf4741f42f03522603b321e2
|
[
"BSD-3-Clause"
] | null | null | null |
class HallwayConfig:
def __init__(self, size: int = 5, max_steps: int = 32):
self.size = size
self.max_steps = max_steps
| 23.666667
| 59
| 0.626761
| 20
| 142
| 4.1
| 0.55
| 0.292683
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029126
| 0.274648
| 142
| 5
| 60
| 28.4
| 0.76699
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
6b1f241e3aa19db56bed1606a57636240da2a979
| 524
|
py
|
Python
|
general/truthiness.py
|
bpuderer/python-snippets27
|
8d51ff34c48bee1247575536d8ed506eafde8631
|
[
"MIT"
] | 3
|
2015-11-20T14:30:53.000Z
|
2015-12-19T05:55:19.000Z
|
general/truthiness.py
|
bpuderer/python-snippets27
|
8d51ff34c48bee1247575536d8ed506eafde8631
|
[
"MIT"
] | null | null | null |
general/truthiness.py
|
bpuderer/python-snippets27
|
8d51ff34c48bee1247575536d8ed506eafde8631
|
[
"MIT"
] | 1
|
2016-01-05T20:54:49.000Z
|
2016-01-05T20:54:49.000Z
|
if not 0:
print "zero value ints are False"
if not 0.0:
print "zero value floats are False"
if not None:
print "None is False. Use is/is not to check for None."
if not False:
print "False is False"
if not '':
print "empty strings are False"
if not []:
print "empty lists are False"
if not ():
print "empty tuples are False"
if not {}:
print "dicts with no keys are False"
if not set([]):
print "empty sets are False"
num = 4
#while num > 0:
while num:
print num
num -= 1
| 15.878788
| 59
| 0.622137
| 90
| 524
| 3.622222
| 0.333333
| 0.138037
| 0.214724
| 0.239264
| 0.257669
| 0.141104
| 0
| 0
| 0
| 0
| 0
| 0.015957
| 0.282443
| 524
| 32
| 60
| 16.375
| 0.851064
| 0.026718
| 0
| 0
| 0
| 0
| 0.445973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.454545
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
6b25d2cf27b36d50234d3f5ca02da11e9c83ae28
| 229
|
py
|
Python
|
catalyst/dl/utils/criterion/__init__.py
|
sergeyshilin/catalyst
|
f4dfaac7bc3fe98b2a0a9cf0b4347b100750f82f
|
[
"Apache-2.0"
] | 3
|
2019-11-02T05:37:06.000Z
|
2020-01-13T02:26:07.000Z
|
catalyst/dl/utils/criterion/__init__.py
|
sergeyshilin/catalyst
|
f4dfaac7bc3fe98b2a0a9cf0b4347b100750f82f
|
[
"Apache-2.0"
] | null | null | null |
catalyst/dl/utils/criterion/__init__.py
|
sergeyshilin/catalyst
|
f4dfaac7bc3fe98b2a0a9cf0b4347b100750f82f
|
[
"Apache-2.0"
] | null | null | null |
# flake8: noqa
from .accuracy import accuracy, average_accuracy, mean_average_accuracy
from .dice import dice
from .f1_score import f1_score
from .focal import sigmoid_focal_loss, reduced_focal_loss
from .iou import iou, jaccard
| 32.714286
| 71
| 0.834061
| 35
| 229
| 5.2
| 0.457143
| 0.164835
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014851
| 0.117904
| 229
| 6
| 72
| 38.166667
| 0.886139
| 0.052402
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
6b3e01368b40f2d623f57c5d72b29ee79f6e1f1c
| 49
|
py
|
Python
|
apps/classes/__init__.py
|
ECNU-Studio/emoc
|
b11d1ebe91e2d9a4bc5b74ca7be3a13137f1c53c
|
[
"MIT"
] | 1
|
2018-03-10T08:50:18.000Z
|
2018-03-10T08:50:18.000Z
|
apps/classes/__init__.py
|
ECNU-Studio/emoc
|
b11d1ebe91e2d9a4bc5b74ca7be3a13137f1c53c
|
[
"MIT"
] | 13
|
2018-04-28T02:33:21.000Z
|
2018-05-04T09:05:38.000Z
|
apps/classes/__init__.py
|
ECNU-Studio/emoc
|
b11d1ebe91e2d9a4bc5b74ca7be3a13137f1c53c
|
[
"MIT"
] | null | null | null |
default_app_config = "classes.apps.ClassesConfig"
| 49
| 49
| 0.857143
| 6
| 49
| 6.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040816
| 49
| 1
| 49
| 49
| 0.851064
| 0
| 0
| 0
| 0
| 0
| 0.52
| 0.52
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
6b4958b80ced5474d96fff8949384d0775128d78
| 51,727
|
py
|
Python
|
Lib/gftools/fonts_public_pb2.py
|
twardoch/gftools
|
a621b41c6e806326acfd174567cfcc7145c9c525
|
[
"Apache-2.0"
] | 351
|
2015-01-12T09:27:03.000Z
|
2022-03-24T14:37:56.000Z
|
Lib/gftools/fonts_public_pb2.py
|
twardoch/gftools
|
a621b41c6e806326acfd174567cfcc7145c9c525
|
[
"Apache-2.0"
] | 2,308
|
2015-01-07T10:49:14.000Z
|
2022-03-31T22:55:21.000Z
|
Lib/gftools/fonts_public_pb2.py
|
twardoch/gftools
|
a621b41c6e806326acfd174567cfcc7145c9c525
|
[
"Apache-2.0"
] | 89
|
2015-03-02T17:31:04.000Z
|
2022-03-16T13:18:59.000Z
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: fonts_public.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='fonts_public.proto',
package='google.fonts',
syntax='proto2',
serialized_options=b'\n\026com.google.fonts.protoB\013FontsPublic',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x12\x66onts_public.proto\x12\x0cgoogle.fonts\"\xfe\x05\n\x0b\x46\x61milyProto\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\x10\n\x08\x64\x65signer\x18\x02 \x02(\t\x12\x0f\n\x07license\x18\x03 \x02(\t\x12\x10\n\x08\x63\x61tegory\x18\x04 \x02(\t\x12\x12\n\ndate_added\x18\x05 \x02(\t\x12&\n\x05\x66onts\x18\x06 \x03(\x0b\x32\x17.google.fonts.FontProto\x12\x0f\n\x07\x61liases\x18\x07 \x03(\t\x12\x0f\n\x07subsets\x18\x08 \x03(\t\x12\x19\n\x11ttf_autohint_args\x18\t \x01(\t\x12,\n\x04\x61xes\x18\n \x03(\x0b\x32\x1e.google.fonts.AxisSegmentProto\x12[\n\x1aregistry_default_overrides\x18\x0b \x03(\x0b\x32\x37.google.fonts.FamilyProto.RegistryDefaultOverridesEntry\x12)\n\x06source\x18\x0c \x01(\x0b\x32\x19.google.fonts.SourceProto\x12\x0f\n\x07is_noto\x18\r \x01(\x08\x12\x11\n\tlanguages\x18\x0e \x03(\t\x12\x34\n\tfallbacks\x18\x0f \x03(\x0b\x32!.google.fonts.FamilyFallbackProto\x12\x42\n\rsample_glyphs\x18\x10 \x03(\x0b\x32+.google.fonts.FamilyProto.SampleGlyphsEntry\x12\x32\n\x0bsample_text\x18\x11 \x01(\x0b\x32\x1d.google.fonts.SampleTextProto\x12\x35\n\x0bsource_type\x18\x12 \x01(\x0e\x32\x16.google.fonts.FontType:\x08TYPE_TTF\x1a?\n\x1dRegistryDefaultOverridesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x02:\x02\x38\x01\x1a\x33\n\x11SampleGlyphsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x8a\x01\n\tFontProto\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\r\n\x05style\x18\x02 \x02(\t\x12\x0e\n\x06weight\x18\x03 \x02(\x05\x12\x10\n\x08\x66ilename\x18\x04 \x02(\t\x12\x18\n\x10post_script_name\x18\x05 \x02(\t\x12\x11\n\tfull_name\x18\x06 \x02(\t\x12\x11\n\tcopyright\x18\x07 \x01(\t\"Z\n\x10\x41xisSegmentProto\x12\x0b\n\x03tag\x18\x01 \x01(\t\x12\x11\n\tmin_value\x18\x02 \x01(\x02\x12\x11\n\tmax_value\x18\x04 \x01(\x02J\x04\x08\x03\x10\x04R\rdefault_value\"5\n\x0bSourceProto\x12\x16\n\x0erepository_url\x18\x01 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x02 \x01(\t\"A\n\x0bTargetProto\x12\x32\n\x0btarget_type\x18\x01 \x01(\x0e\x32\x1d.google.fonts.TargetTypeProto\"\xbe\x01\n\x13\x46\x61milyFallbackProto\x12\x33\n\x0b\x61xis_target\x18\x01 \x03(\x0b\x32\x1e.google.fonts.AxisSegmentProto\x12)\n\x06target\x18\x02 \x03(\x0b\x32\x19.google.fonts.TargetProto\x12\x17\n\x0fsize_adjust_pct\x18\x03 \x01(\x02\x12\x1b\n\x13\x61scent_override_pct\x18\x05 \x01(\x02\x12\x11\n\tlocal_src\x18\x04 \x03(\t\"Q\n\x0bRegionProto\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x12\n\npopulation\x18\x03 \x01(\x05\x12\x14\n\x0cregion_group\x18\x04 \x03(\t\"\'\n\x0bScriptProto\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x9a\x02\n\rLanguageProto\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08language\x18\x02 \x01(\t\x12\x0e\n\x06script\x18\x03 \x01(\t\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x16\n\x0epreferred_name\x18\x05 \x01(\t\x12\x0f\n\x07\x61utonym\x18\x06 \x01(\t\x12\x12\n\npopulation\x18\x07 \x01(\x05\x12\x0e\n\x06region\x18\x08 \x03(\t\x12\x38\n\x0e\x65xemplar_chars\x18\t \x01(\x0b\x32 .google.fonts.ExemplarCharsProto\x12\x32\n\x0bsample_text\x18\n \x01(\x0b\x32\x1d.google.fonts.SampleTextProto\x12\x12\n\nhistorical\x18\x0b \x01(\x08\"z\n\x12\x45xemplarCharsProto\x12\x0c\n\x04\x62\x61se\x18\x01 \x01(\t\x12\x11\n\tauxiliary\x18\x02 \x01(\t\x12\r\n\x05marks\x18\x03 \x01(\t\x12\x10\n\x08numerals\x18\x04 \x01(\t\x12\x13\n\x0bpunctuation\x18\x05 \x01(\t\x12\r\n\x05index\x18\x06 \x01(\t\"\x9f\x02\n\x0fSampleTextProto\x12\x19\n\x11\x66\x61llback_language\x18\x02 \x01(\t\x12\x15\n\rmasthead_full\x18\x03 \x01(\t\x12\x18\n\x10masthead_partial\x18\x04 \x01(\t\x12\x0e\n\x06styles\x18\x05 \x01(\t\x12\x0e\n\x06tester\x18\x06 \x01(\t\x12\x11\n\tposter_sm\x18\x07 \x01(\t\x12\x11\n\tposter_md\x18\x08 \x01(\t\x12\x11\n\tposter_lg\x18\t \x01(\t\x12\x13\n\x0bspecimen_48\x18\n \x01(\t\x12\x13\n\x0bspecimen_36\x18\x0b \x01(\t\x12\x13\n\x0bspecimen_32\x18\x0c \x01(\t\x12\x13\n\x0bspecimen_21\x18\r \x01(\t\x12\x13\n\x0bspecimen_16\x18\x0e \x01(\t*&\n\x08\x46ontType\x12\x0c\n\x08TYPE_TTF\x10\x00\x12\x0c\n\x08TYPE_OTF\x10\x01*h\n\x0fTargetTypeProto\x12\x16\n\x12TARGET_UNSPECIFIED\x10\x00\x12\x15\n\x11TARGET_OS_WINDOWS\x10\x01\x12\x11\n\rTARGET_OS_MAC\x10\x02\x12\x13\n\x0fTARGET_OS_LINUX\x10\x03\x42%\n\x16\x63om.google.fonts.protoB\x0b\x46ontsPublic'
)
_FONTTYPE = _descriptor.EnumDescriptor(
name='FontType',
full_name='google.fonts.FontType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='TYPE_TTF', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TYPE_OTF', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=2176,
serialized_end=2214,
)
_sym_db.RegisterEnumDescriptor(_FONTTYPE)
FontType = enum_type_wrapper.EnumTypeWrapper(_FONTTYPE)
_TARGETTYPEPROTO = _descriptor.EnumDescriptor(
name='TargetTypeProto',
full_name='google.fonts.TargetTypeProto',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='TARGET_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TARGET_OS_WINDOWS', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TARGET_OS_MAC', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TARGET_OS_LINUX', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=2216,
serialized_end=2320,
)
_sym_db.RegisterEnumDescriptor(_TARGETTYPEPROTO)
TargetTypeProto = enum_type_wrapper.EnumTypeWrapper(_TARGETTYPEPROTO)
TYPE_TTF = 0
TYPE_OTF = 1
TARGET_UNSPECIFIED = 0
TARGET_OS_WINDOWS = 1
TARGET_OS_MAC = 2
TARGET_OS_LINUX = 3
_FAMILYPROTO_REGISTRYDEFAULTOVERRIDESENTRY = _descriptor.Descriptor(
name='RegistryDefaultOverridesEntry',
full_name='google.fonts.FamilyProto.RegistryDefaultOverridesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='google.fonts.FamilyProto.RegistryDefaultOverridesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='google.fonts.FamilyProto.RegistryDefaultOverridesEntry.value', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=687,
serialized_end=750,
)
_FAMILYPROTO_SAMPLEGLYPHSENTRY = _descriptor.Descriptor(
name='SampleGlyphsEntry',
full_name='google.fonts.FamilyProto.SampleGlyphsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='google.fonts.FamilyProto.SampleGlyphsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='google.fonts.FamilyProto.SampleGlyphsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=752,
serialized_end=803,
)
_FAMILYPROTO = _descriptor.Descriptor(
name='FamilyProto',
full_name='google.fonts.FamilyProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.fonts.FamilyProto.name', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='designer', full_name='google.fonts.FamilyProto.designer', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='license', full_name='google.fonts.FamilyProto.license', index=2,
number=3, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='category', full_name='google.fonts.FamilyProto.category', index=3,
number=4, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='date_added', full_name='google.fonts.FamilyProto.date_added', index=4,
number=5, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fonts', full_name='google.fonts.FamilyProto.fonts', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='aliases', full_name='google.fonts.FamilyProto.aliases', index=6,
number=7, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='subsets', full_name='google.fonts.FamilyProto.subsets', index=7,
number=8, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ttf_autohint_args', full_name='google.fonts.FamilyProto.ttf_autohint_args', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='axes', full_name='google.fonts.FamilyProto.axes', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='registry_default_overrides', full_name='google.fonts.FamilyProto.registry_default_overrides', index=10,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='source', full_name='google.fonts.FamilyProto.source', index=11,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_noto', full_name='google.fonts.FamilyProto.is_noto', index=12,
number=13, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='languages', full_name='google.fonts.FamilyProto.languages', index=13,
number=14, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fallbacks', full_name='google.fonts.FamilyProto.fallbacks', index=14,
number=15, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sample_glyphs', full_name='google.fonts.FamilyProto.sample_glyphs', index=15,
number=16, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sample_text', full_name='google.fonts.FamilyProto.sample_text', index=16,
number=17, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='source_type', full_name='google.fonts.FamilyProto.source_type', index=17,
number=18, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_FAMILYPROTO_REGISTRYDEFAULTOVERRIDESENTRY, _FAMILYPROTO_SAMPLEGLYPHSENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=37,
serialized_end=803,
)
_FONTPROTO = _descriptor.Descriptor(
name='FontProto',
full_name='google.fonts.FontProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.fonts.FontProto.name', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='style', full_name='google.fonts.FontProto.style', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='weight', full_name='google.fonts.FontProto.weight', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='filename', full_name='google.fonts.FontProto.filename', index=3,
number=4, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='post_script_name', full_name='google.fonts.FontProto.post_script_name', index=4,
number=5, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='full_name', full_name='google.fonts.FontProto.full_name', index=5,
number=6, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='copyright', full_name='google.fonts.FontProto.copyright', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=806,
serialized_end=944,
)
_AXISSEGMENTPROTO = _descriptor.Descriptor(
name='AxisSegmentProto',
full_name='google.fonts.AxisSegmentProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tag', full_name='google.fonts.AxisSegmentProto.tag', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='min_value', full_name='google.fonts.AxisSegmentProto.min_value', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_value', full_name='google.fonts.AxisSegmentProto.max_value', index=2,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=946,
serialized_end=1036,
)
_SOURCEPROTO = _descriptor.Descriptor(
name='SourceProto',
full_name='google.fonts.SourceProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='repository_url', full_name='google.fonts.SourceProto.repository_url', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='commit', full_name='google.fonts.SourceProto.commit', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1038,
serialized_end=1091,
)
_TARGETPROTO = _descriptor.Descriptor(
name='TargetProto',
full_name='google.fonts.TargetProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='target_type', full_name='google.fonts.TargetProto.target_type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1093,
serialized_end=1158,
)
_FAMILYFALLBACKPROTO = _descriptor.Descriptor(
name='FamilyFallbackProto',
full_name='google.fonts.FamilyFallbackProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='axis_target', full_name='google.fonts.FamilyFallbackProto.axis_target', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='target', full_name='google.fonts.FamilyFallbackProto.target', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='size_adjust_pct', full_name='google.fonts.FamilyFallbackProto.size_adjust_pct', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ascent_override_pct', full_name='google.fonts.FamilyFallbackProto.ascent_override_pct', index=3,
number=5, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='local_src', full_name='google.fonts.FamilyFallbackProto.local_src', index=4,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1161,
serialized_end=1351,
)
_REGIONPROTO = _descriptor.Descriptor(
name='RegionProto',
full_name='google.fonts.RegionProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='google.fonts.RegionProto.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='google.fonts.RegionProto.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='population', full_name='google.fonts.RegionProto.population', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='region_group', full_name='google.fonts.RegionProto.region_group', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1353,
serialized_end=1434,
)
_SCRIPTPROTO = _descriptor.Descriptor(
name='ScriptProto',
full_name='google.fonts.ScriptProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='google.fonts.ScriptProto.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='google.fonts.ScriptProto.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1436,
serialized_end=1475,
)
_LANGUAGEPROTO = _descriptor.Descriptor(
name='LanguageProto',
full_name='google.fonts.LanguageProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='google.fonts.LanguageProto.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='language', full_name='google.fonts.LanguageProto.language', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='script', full_name='google.fonts.LanguageProto.script', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='google.fonts.LanguageProto.name', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='preferred_name', full_name='google.fonts.LanguageProto.preferred_name', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='autonym', full_name='google.fonts.LanguageProto.autonym', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='population', full_name='google.fonts.LanguageProto.population', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='region', full_name='google.fonts.LanguageProto.region', index=7,
number=8, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='exemplar_chars', full_name='google.fonts.LanguageProto.exemplar_chars', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sample_text', full_name='google.fonts.LanguageProto.sample_text', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='historical', full_name='google.fonts.LanguageProto.historical', index=10,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1478,
serialized_end=1760,
)
_EXEMPLARCHARSPROTO = _descriptor.Descriptor(
name='ExemplarCharsProto',
full_name='google.fonts.ExemplarCharsProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='base', full_name='google.fonts.ExemplarCharsProto.base', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='auxiliary', full_name='google.fonts.ExemplarCharsProto.auxiliary', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='marks', full_name='google.fonts.ExemplarCharsProto.marks', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='numerals', full_name='google.fonts.ExemplarCharsProto.numerals', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='punctuation', full_name='google.fonts.ExemplarCharsProto.punctuation', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='index', full_name='google.fonts.ExemplarCharsProto.index', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1762,
serialized_end=1884,
)
_SAMPLETEXTPROTO = _descriptor.Descriptor(
name='SampleTextProto',
full_name='google.fonts.SampleTextProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='fallback_language', full_name='google.fonts.SampleTextProto.fallback_language', index=0,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='masthead_full', full_name='google.fonts.SampleTextProto.masthead_full', index=1,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='masthead_partial', full_name='google.fonts.SampleTextProto.masthead_partial', index=2,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='styles', full_name='google.fonts.SampleTextProto.styles', index=3,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tester', full_name='google.fonts.SampleTextProto.tester', index=4,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='poster_sm', full_name='google.fonts.SampleTextProto.poster_sm', index=5,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='poster_md', full_name='google.fonts.SampleTextProto.poster_md', index=6,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='poster_lg', full_name='google.fonts.SampleTextProto.poster_lg', index=7,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='specimen_48', full_name='google.fonts.SampleTextProto.specimen_48', index=8,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='specimen_36', full_name='google.fonts.SampleTextProto.specimen_36', index=9,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='specimen_32', full_name='google.fonts.SampleTextProto.specimen_32', index=10,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='specimen_21', full_name='google.fonts.SampleTextProto.specimen_21', index=11,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='specimen_16', full_name='google.fonts.SampleTextProto.specimen_16', index=12,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1887,
serialized_end=2174,
)
_FAMILYPROTO_REGISTRYDEFAULTOVERRIDESENTRY.containing_type = _FAMILYPROTO
_FAMILYPROTO_SAMPLEGLYPHSENTRY.containing_type = _FAMILYPROTO
_FAMILYPROTO.fields_by_name['fonts'].message_type = _FONTPROTO
_FAMILYPROTO.fields_by_name['axes'].message_type = _AXISSEGMENTPROTO
_FAMILYPROTO.fields_by_name['registry_default_overrides'].message_type = _FAMILYPROTO_REGISTRYDEFAULTOVERRIDESENTRY
_FAMILYPROTO.fields_by_name['source'].message_type = _SOURCEPROTO
_FAMILYPROTO.fields_by_name['fallbacks'].message_type = _FAMILYFALLBACKPROTO
_FAMILYPROTO.fields_by_name['sample_glyphs'].message_type = _FAMILYPROTO_SAMPLEGLYPHSENTRY
_FAMILYPROTO.fields_by_name['sample_text'].message_type = _SAMPLETEXTPROTO
_FAMILYPROTO.fields_by_name['source_type'].enum_type = _FONTTYPE
_TARGETPROTO.fields_by_name['target_type'].enum_type = _TARGETTYPEPROTO
_FAMILYFALLBACKPROTO.fields_by_name['axis_target'].message_type = _AXISSEGMENTPROTO
_FAMILYFALLBACKPROTO.fields_by_name['target'].message_type = _TARGETPROTO
_LANGUAGEPROTO.fields_by_name['exemplar_chars'].message_type = _EXEMPLARCHARSPROTO
_LANGUAGEPROTO.fields_by_name['sample_text'].message_type = _SAMPLETEXTPROTO
DESCRIPTOR.message_types_by_name['FamilyProto'] = _FAMILYPROTO
DESCRIPTOR.message_types_by_name['FontProto'] = _FONTPROTO
DESCRIPTOR.message_types_by_name['AxisSegmentProto'] = _AXISSEGMENTPROTO
DESCRIPTOR.message_types_by_name['SourceProto'] = _SOURCEPROTO
DESCRIPTOR.message_types_by_name['TargetProto'] = _TARGETPROTO
DESCRIPTOR.message_types_by_name['FamilyFallbackProto'] = _FAMILYFALLBACKPROTO
DESCRIPTOR.message_types_by_name['RegionProto'] = _REGIONPROTO
DESCRIPTOR.message_types_by_name['ScriptProto'] = _SCRIPTPROTO
DESCRIPTOR.message_types_by_name['LanguageProto'] = _LANGUAGEPROTO
DESCRIPTOR.message_types_by_name['ExemplarCharsProto'] = _EXEMPLARCHARSPROTO
DESCRIPTOR.message_types_by_name['SampleTextProto'] = _SAMPLETEXTPROTO
DESCRIPTOR.enum_types_by_name['FontType'] = _FONTTYPE
DESCRIPTOR.enum_types_by_name['TargetTypeProto'] = _TARGETTYPEPROTO
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
FamilyProto = _reflection.GeneratedProtocolMessageType('FamilyProto', (_message.Message,), {
'RegistryDefaultOverridesEntry' : _reflection.GeneratedProtocolMessageType('RegistryDefaultOverridesEntry', (_message.Message,), {
'DESCRIPTOR' : _FAMILYPROTO_REGISTRYDEFAULTOVERRIDESENTRY,
'__module__' : 'fonts_public_pb2'
# @@protoc_insertion_point(class_scope:google.fonts.FamilyProto.RegistryDefaultOverridesEntry)
})
,
'SampleGlyphsEntry' : _reflection.GeneratedProtocolMessageType('SampleGlyphsEntry', (_message.Message,), {
'DESCRIPTOR' : _FAMILYPROTO_SAMPLEGLYPHSENTRY,
'__module__' : 'fonts_public_pb2'
# @@protoc_insertion_point(class_scope:google.fonts.FamilyProto.SampleGlyphsEntry)
})
,
'DESCRIPTOR' : _FAMILYPROTO,
'__module__' : 'fonts_public_pb2'
# @@protoc_insertion_point(class_scope:google.fonts.FamilyProto)
})
_sym_db.RegisterMessage(FamilyProto)
_sym_db.RegisterMessage(FamilyProto.RegistryDefaultOverridesEntry)
_sym_db.RegisterMessage(FamilyProto.SampleGlyphsEntry)
FontProto = _reflection.GeneratedProtocolMessageType('FontProto', (_message.Message,), {
'DESCRIPTOR' : _FONTPROTO,
'__module__' : 'fonts_public_pb2'
# @@protoc_insertion_point(class_scope:google.fonts.FontProto)
})
_sym_db.RegisterMessage(FontProto)
AxisSegmentProto = _reflection.GeneratedProtocolMessageType('AxisSegmentProto', (_message.Message,), {
'DESCRIPTOR' : _AXISSEGMENTPROTO,
'__module__' : 'fonts_public_pb2'
# @@protoc_insertion_point(class_scope:google.fonts.AxisSegmentProto)
})
_sym_db.RegisterMessage(AxisSegmentProto)
SourceProto = _reflection.GeneratedProtocolMessageType('SourceProto', (_message.Message,), {
'DESCRIPTOR' : _SOURCEPROTO,
'__module__' : 'fonts_public_pb2'
# @@protoc_insertion_point(class_scope:google.fonts.SourceProto)
})
_sym_db.RegisterMessage(SourceProto)
TargetProto = _reflection.GeneratedProtocolMessageType('TargetProto', (_message.Message,), {
'DESCRIPTOR' : _TARGETPROTO,
'__module__' : 'fonts_public_pb2'
# @@protoc_insertion_point(class_scope:google.fonts.TargetProto)
})
_sym_db.RegisterMessage(TargetProto)
FamilyFallbackProto = _reflection.GeneratedProtocolMessageType('FamilyFallbackProto', (_message.Message,), {
'DESCRIPTOR' : _FAMILYFALLBACKPROTO,
'__module__' : 'fonts_public_pb2'
# @@protoc_insertion_point(class_scope:google.fonts.FamilyFallbackProto)
})
_sym_db.RegisterMessage(FamilyFallbackProto)
RegionProto = _reflection.GeneratedProtocolMessageType('RegionProto', (_message.Message,), {
'DESCRIPTOR' : _REGIONPROTO,
'__module__' : 'fonts_public_pb2'
# @@protoc_insertion_point(class_scope:google.fonts.RegionProto)
})
_sym_db.RegisterMessage(RegionProto)
ScriptProto = _reflection.GeneratedProtocolMessageType('ScriptProto', (_message.Message,), {
'DESCRIPTOR' : _SCRIPTPROTO,
'__module__' : 'fonts_public_pb2'
# @@protoc_insertion_point(class_scope:google.fonts.ScriptProto)
})
_sym_db.RegisterMessage(ScriptProto)
LanguageProto = _reflection.GeneratedProtocolMessageType('LanguageProto', (_message.Message,), {
'DESCRIPTOR' : _LANGUAGEPROTO,
'__module__' : 'fonts_public_pb2'
# @@protoc_insertion_point(class_scope:google.fonts.LanguageProto)
})
_sym_db.RegisterMessage(LanguageProto)
ExemplarCharsProto = _reflection.GeneratedProtocolMessageType('ExemplarCharsProto', (_message.Message,), {
'DESCRIPTOR' : _EXEMPLARCHARSPROTO,
'__module__' : 'fonts_public_pb2'
# @@protoc_insertion_point(class_scope:google.fonts.ExemplarCharsProto)
})
_sym_db.RegisterMessage(ExemplarCharsProto)
SampleTextProto = _reflection.GeneratedProtocolMessageType('SampleTextProto', (_message.Message,), {
'DESCRIPTOR' : _SAMPLETEXTPROTO,
'__module__' : 'fonts_public_pb2'
# @@protoc_insertion_point(class_scope:google.fonts.SampleTextProto)
})
_sym_db.RegisterMessage(SampleTextProto)
DESCRIPTOR._options = None
_FAMILYPROTO_REGISTRYDEFAULTOVERRIDESENTRY._options = None
_FAMILYPROTO_SAMPLEGLYPHSENTRY._options = None
# @@protoc_insertion_point(module_scope)
| 47.939759
| 4,271
| 0.754036
| 6,660
| 51,727
| 5.543994
| 0.054805
| 0.05395
| 0.084906
| 0.071663
| 0.771741
| 0.696883
| 0.657747
| 0.655012
| 0.649541
| 0.645126
| 0
| 0.03812
| 0.121639
| 51,727
| 1,078
| 4,272
| 47.98423
| 0.774535
| 0.021304
| 0
| 0.704951
| 1
| 0.00297
| 0.171436
| 0.126477
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004951
| 0
| 0.004951
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
861d09acfcfdad1e298b36f054da83ab4f3be69a
| 1,427
|
py
|
Python
|
rpc_worker.py
|
Aspect13/projects
|
4191fa4507370d072ea0c4e994bd1b4518280431
|
[
"Apache-2.0"
] | null | null | null |
rpc_worker.py
|
Aspect13/projects
|
4191fa4507370d072ea0c4e994bd1b4518280431
|
[
"Apache-2.0"
] | null | null | null |
rpc_worker.py
|
Aspect13/projects
|
4191fa4507370d072ea0c4e994bd1b4518280431
|
[
"Apache-2.0"
] | 2
|
2021-06-15T20:24:09.000Z
|
2021-07-12T09:31:29.000Z
|
from typing import Union
from ..shared.connectors.auth import SessionProject
from .models.project import Project, get_user_projects
from .models.quota import ProjectQuota
from .models.statistics import Statistic
def prj_or_404(project_id):
return Project.get_or_404(project_id)
def list_projects():
return Project.list_projects()
def get_project_statistics(project_id):
return Statistic.query.filter_by(project_id=project_id).first().to_json()
def add_task_execution(project_id):
statistic = Statistic.query.filter_by(project_id=project_id).first()
setattr(statistic, 'tasks_executions', Statistic.tasks_executions + 1)
statistic.commit()
def get_storage_quota(project_id):
return Project.get_storage_space_quota(project_id=project_id)
def check_quota(project_id, quota=None):
return ProjectQuota.check_quota_json(project_id, quota)
def get_project_config(project_id=None):
if project_id:
project_id = SessionProject.get()
if not project_id:
project_id = get_user_projects()[0]["id"]
try:
return Project.query.filter_by(project_id=project_id).first().to_json()
except:
return {}
def get_project_id():
project_id = SessionProject.get()
if not project_id:
project_id = get_user_projects()[0]["id"]
return project_id
def set_active_project(project_id: Union[str, int]):
SessionProject.set(int(project_id))
| 25.482143
| 79
| 0.751927
| 199
| 1,427
| 5.075377
| 0.261307
| 0.240594
| 0.126733
| 0.142574
| 0.336634
| 0.287129
| 0.287129
| 0.287129
| 0.287129
| 0.233663
| 0
| 0.007426
| 0.150666
| 1,427
| 55
| 80
| 25.945455
| 0.825908
| 0
| 0
| 0.171429
| 0
| 0
| 0.014015
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.257143
| false
| 0
| 0.142857
| 0.142857
| 0.628571
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
86510d35e2bb785efa09317620947c5e0393713c
| 191
|
py
|
Python
|
clairvoyance/treatments/__init__.py
|
ZhaozhiQIAN/SyncTwin-NeurIPS-2021
|
78eff91d0287c7f1f66c76ca24834c7d1029ad3b
|
[
"MIT"
] | 5
|
2021-11-23T08:41:08.000Z
|
2022-03-06T16:20:37.000Z
|
clairvoyance/treatments/__init__.py
|
ZhaozhiQIAN/SyncTwin-NeurIPS-2021
|
78eff91d0287c7f1f66c76ca24834c7d1029ad3b
|
[
"MIT"
] | null | null | null |
clairvoyance/treatments/__init__.py
|
ZhaozhiQIAN/SyncTwin-NeurIPS-2021
|
78eff91d0287c7f1f66c76ca24834c7d1029ad3b
|
[
"MIT"
] | 2
|
2021-11-16T16:10:53.000Z
|
2021-12-28T07:13:03.000Z
|
from .CRN.CRN_Model import CRN_Model
from .RMSN.RMSN_Model import RMSN_Model
from .treatments import treatment_effects_model
__all__ = ["CRN_Model", "RMSN_Model", "treatment_effects_model"]
| 31.833333
| 64
| 0.82199
| 28
| 191
| 5.107143
| 0.321429
| 0.167832
| 0.293706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094241
| 191
| 5
| 65
| 38.2
| 0.82659
| 0
| 0
| 0
| 0
| 0
| 0.219895
| 0.120419
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
8672aa638023d697879617744f1c4cdbf07cc0e6
| 145
|
py
|
Python
|
backend/src/baserow/contrib/database/api/views/gallery/pagination.py
|
ashishdhngr/baserow
|
b098678d2165eb7c42930ee24dc6753a3cb520c3
|
[
"MIT"
] | 1
|
2022-01-24T15:12:02.000Z
|
2022-01-24T15:12:02.000Z
|
backend/src/baserow/contrib/database/api/views/gallery/pagination.py
|
rasata/baserow
|
c6e1d7842c53f801e1c96b49f1377da2a06afaa9
|
[
"MIT"
] | null | null | null |
backend/src/baserow/contrib/database/api/views/gallery/pagination.py
|
rasata/baserow
|
c6e1d7842c53f801e1c96b49f1377da2a06afaa9
|
[
"MIT"
] | null | null | null |
from rest_framework.pagination import LimitOffsetPagination
class GalleryLimitOffsetPagination(LimitOffsetPagination):
default_limit = 100
| 24.166667
| 59
| 0.862069
| 12
| 145
| 10.25
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023077
| 0.103448
| 145
| 5
| 60
| 29
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
868d461841435e0d5acbf743117e8803917900b1
| 146
|
py
|
Python
|
tools/worlds_test/cython_test/main.py
|
tdchristian/shell-game
|
0241d4bd01486487d8c28b843870f6d44857332f
|
[
"MIT"
] | null | null | null |
tools/worlds_test/cython_test/main.py
|
tdchristian/shell-game
|
0241d4bd01486487d8c28b843870f6d44857332f
|
[
"MIT"
] | null | null | null |
tools/worlds_test/cython_test/main.py
|
tdchristian/shell-game
|
0241d4bd01486487d8c28b843870f6d44857332f
|
[
"MIT"
] | null | null | null |
import worlds
# for world in world_test.generate_worlds(24, 3):
# print(world)
i = 0
for world in worlds.generate_worlds(20, 3):
i += 1
| 16.222222
| 49
| 0.671233
| 25
| 146
| 3.8
| 0.56
| 0.168421
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069565
| 0.212329
| 146
| 8
| 50
| 18.25
| 0.756522
| 0.438356
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
869263c452832a10cdae9d0605bf705fd9409a80
| 2,228
|
py
|
Python
|
tests/test_wilson.py
|
Noskario/Wilson
|
8434113a6004fc3a8bd61f613db31d9e06a13565
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_wilson.py
|
Noskario/Wilson
|
8434113a6004fc3a8bd61f613db31d9e06a13565
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_wilson.py
|
Noskario/Wilson
|
8434113a6004fc3a8bd61f613db31d9e06a13565
|
[
"BSD-3-Clause"
] | null | null | null |
import time
import matplotlib.pyplot as plt
import networkx as nx
import numpy as np
from pyvis.network import Network
# import Wilson
# import Wilson2
#
#
# # Nicht anschauen, ist nur Schrott, führe test2.py aus
#
# def laufzeitenanalyse():
# narray = [2 * x + 6 for x in range(30)]
# laufzeitenbuild = []
# laufzeitenwilson = []
# for n in narray:
# print('Sind bei n=', n)
# laufzeitbuild = 0
# laufzeitwilson = 0
# for _ in range(5):
# temp = time.time()
# g = Wilson.create_nx_graph(n)
# print('Bauzeit: ', time.time() - temp)
# laufzeitbuild += time.time() - temp
# temp = time.time()
# Wilson2.wilson(g, ['2,4'], .02)
# laufzeitwilson += time.time() - temp
# print('Wilsonzeit: ', time.time() - temp)
# laufzeitenbuild.append(laufzeitbuild)
# laufzeitenwilson.append(laufzeitwilson)
# print('n=', narray)
# print('wilson', laufzeitenwilson)
# print('build', laufzeitenbuild)
# laufzeitenbuild = [x ** .25 for x in laufzeitenbuild]
# laufzeitenwilson = [x ** .25 for x in laufzeitenwilson]
# # plt.plot(narray, laufzeitenwilson, label='wilson')
# plt.plot(narray, laufzeitenwilson, label='4th root (wilson)')
#
# # plt.plot(narray, laufzeitenbuild, label='build')
# plt.plot(narray, laufzeitenbuild, label='4th root (build)')
# plt.legend()
# plt.show()
#
# # Wilson.color_leaves(g)
# # g.show('g.html')
#
#
# def haeufigkeitsanalyse():
# N = 7
# st = nx.star_graph(N)
# st.add_edge(2, 4)
# st.add_edge(3, 4)
# st.add_edge(1, 7)
# for xx, yy in st.edges:
# st[xx][yy]['weight'] = 1.
# temp = np.zeros(N + 1)
# for i in range(60000):
# Wilson2.wilson(st, [], .1)
# anzahlwurzeln = 0
# for n in st.nodes:
# if st.nodes[n]['color'] == 'yellow':
# anzahlwurzeln += 1
# if anzahlwurzeln == 1:
# for n in st.nodes:
# if st.nodes[n]['color'] == 'yellow':
# temp[n] += 1
# print(temp)
# nt2 = Network()
# nt2.from_nx(st)
# nt2.show('star.html')
| 30.520548
| 67
| 0.545332
| 261
| 2,228
| 4.62069
| 0.321839
| 0.039801
| 0.039801
| 0.011609
| 0.182421
| 0.056385
| 0.056385
| 0.056385
| 0.056385
| 0.056385
| 0
| 0.027582
| 0.300269
| 2,228
| 72
| 68
| 30.944444
| 0.745991
| 0.88465
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
86a6b8d237900fc4d77d41c0aeafbd9a08a4ad01
| 90
|
py
|
Python
|
hacker/challenges/coding/unsolved/exclusive_or.py
|
Tenebrar/codebase
|
59c9a35289fb29afedad0e3edd0519b67372ef9f
|
[
"Unlicense"
] | 1
|
2020-04-21T11:39:25.000Z
|
2020-04-21T11:39:25.000Z
|
hacker/challenges/coding/unsolved/exclusive_or.py
|
Tenebrar/codebase
|
59c9a35289fb29afedad0e3edd0519b67372ef9f
|
[
"Unlicense"
] | 7
|
2020-02-12T01:08:01.000Z
|
2022-02-10T11:56:56.000Z
|
hacker/challenges/coding/unsolved/exclusive_or.py
|
Tenebrar/codebase
|
59c9a35289fb29afedad0e3edd0519b67372ef9f
|
[
"Unlicense"
] | null | null | null |
from hacker.settings import inputfile
inputfile('coding', 'execution_style', 'Doll2.png')
| 30
| 51
| 0.788889
| 11
| 90
| 6.363636
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012048
| 0.077778
| 90
| 3
| 51
| 30
| 0.831325
| 0
| 0
| 0
| 0
| 0
| 0.32967
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
86b3dd8436702996619d96361b999161f09d24a9
| 5,809
|
py
|
Python
|
tests/components/litejet/test_light.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 30,023
|
2016-04-13T10:17:53.000Z
|
2020-03-02T12:56:31.000Z
|
tests/components/litejet/test_light.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 31,101
|
2020-03-02T13:00:16.000Z
|
2022-03-31T23:57:36.000Z
|
tests/components/litejet/test_light.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 11,956
|
2016-04-13T18:42:31.000Z
|
2020-03-02T09:32:12.000Z
|
"""The tests for the litejet component."""
from homeassistant.components import light
from homeassistant.components.light import ATTR_BRIGHTNESS, ATTR_TRANSITION
from homeassistant.components.litejet.const import CONF_DEFAULT_TRANSITION
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON
from . import async_init_integration
ENTITY_LIGHT = "light.mock_load_1"
ENTITY_LIGHT_NUMBER = 1
ENTITY_OTHER_LIGHT = "light.mock_load_2"
ENTITY_OTHER_LIGHT_NUMBER = 2
async def test_on_brightness(hass, mock_litejet):
"""Test turning the light on with brightness."""
await async_init_integration(hass)
assert hass.states.get(ENTITY_LIGHT).state == "off"
assert hass.states.get(ENTITY_OTHER_LIGHT).state == "off"
assert not light.is_on(hass, ENTITY_LIGHT)
await hass.services.async_call(
light.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_BRIGHTNESS: 102},
blocking=True,
)
mock_litejet.activate_load_at.assert_called_with(ENTITY_LIGHT_NUMBER, 39, 0)
async def test_default_transition(hass, mock_litejet):
"""Test turning the light on with the default transition option."""
entry = await async_init_integration(hass)
hass.config_entries.async_update_entry(entry, options={CONF_DEFAULT_TRANSITION: 12})
await hass.async_block_till_done()
assert hass.states.get(ENTITY_LIGHT).state == "off"
assert hass.states.get(ENTITY_OTHER_LIGHT).state == "off"
assert not light.is_on(hass, ENTITY_LIGHT)
await hass.services.async_call(
light.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_BRIGHTNESS: 102},
blocking=True,
)
mock_litejet.activate_load_at.assert_called_with(ENTITY_LIGHT_NUMBER, 39, 12)
async def test_transition(hass, mock_litejet):
"""Test turning the light on with transition."""
await async_init_integration(hass)
assert hass.states.get(ENTITY_LIGHT).state == "off"
assert hass.states.get(ENTITY_OTHER_LIGHT).state == "off"
assert not light.is_on(hass, ENTITY_LIGHT)
# On
await hass.services.async_call(
light.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_TRANSITION: 5},
blocking=True,
)
mock_litejet.activate_load_at.assert_called_with(ENTITY_LIGHT_NUMBER, 99, 5)
# Off
await hass.services.async_call(
light.DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_TRANSITION: 5},
blocking=True,
)
mock_litejet.activate_load_at.assert_called_with(ENTITY_LIGHT_NUMBER, 0, 5)
async def test_on_off(hass, mock_litejet):
"""Test turning the light on and off."""
await async_init_integration(hass)
assert hass.states.get(ENTITY_LIGHT).state == "off"
assert hass.states.get(ENTITY_OTHER_LIGHT).state == "off"
assert not light.is_on(hass, ENTITY_LIGHT)
await hass.services.async_call(
light.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: ENTITY_LIGHT},
blocking=True,
)
mock_litejet.activate_load.assert_called_with(ENTITY_LIGHT_NUMBER)
await hass.services.async_call(
light.DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: ENTITY_LIGHT},
blocking=True,
)
mock_litejet.deactivate_load.assert_called_with(ENTITY_LIGHT_NUMBER)
async def test_activated_event(hass, mock_litejet):
"""Test handling an event from LiteJet."""
await async_init_integration(hass)
# Light 1
mock_litejet.get_load_level.return_value = 99
mock_litejet.get_load_level.reset_mock()
mock_litejet.load_activated_callbacks[ENTITY_LIGHT_NUMBER]()
await hass.async_block_till_done()
mock_litejet.get_load_level.assert_called_once_with(ENTITY_LIGHT_NUMBER)
assert light.is_on(hass, ENTITY_LIGHT)
assert not light.is_on(hass, ENTITY_OTHER_LIGHT)
assert hass.states.get(ENTITY_LIGHT).state == "on"
assert hass.states.get(ENTITY_OTHER_LIGHT).state == "off"
assert hass.states.get(ENTITY_LIGHT).attributes.get(ATTR_BRIGHTNESS) == 255
# Light 2
mock_litejet.get_load_level.return_value = 40
mock_litejet.get_load_level.reset_mock()
mock_litejet.load_activated_callbacks[ENTITY_OTHER_LIGHT_NUMBER]()
await hass.async_block_till_done()
mock_litejet.get_load_level.assert_called_once_with(ENTITY_OTHER_LIGHT_NUMBER)
assert light.is_on(hass, ENTITY_LIGHT)
assert light.is_on(hass, ENTITY_OTHER_LIGHT)
assert hass.states.get(ENTITY_LIGHT).state == "on"
assert hass.states.get(ENTITY_OTHER_LIGHT).state == "on"
assert hass.states.get(ENTITY_OTHER_LIGHT).attributes.get(ATTR_BRIGHTNESS) == 103
async def test_deactivated_event(hass, mock_litejet):
"""Test handling an event from LiteJet."""
await async_init_integration(hass)
# Initial state is on.
mock_litejet.get_load_level.return_value = 99
mock_litejet.load_activated_callbacks[ENTITY_OTHER_LIGHT_NUMBER]()
await hass.async_block_till_done()
assert light.is_on(hass, ENTITY_OTHER_LIGHT)
# Event indicates it is off now.
mock_litejet.get_load_level.reset_mock()
mock_litejet.get_load_level.return_value = 0
mock_litejet.load_deactivated_callbacks[ENTITY_OTHER_LIGHT_NUMBER]()
await hass.async_block_till_done()
# (Requesting the level is not strictly needed with a deactivated
# event but the implementation happens to do it. This could be
# changed to an assert_not_called in the future.)
mock_litejet.get_load_level.assert_called_with(ENTITY_OTHER_LIGHT_NUMBER)
assert not light.is_on(hass, ENTITY_OTHER_LIGHT)
assert not light.is_on(hass, ENTITY_LIGHT)
assert hass.states.get(ENTITY_LIGHT).state == "off"
assert hass.states.get(ENTITY_OTHER_LIGHT).state == "off"
| 33.773256
| 88
| 0.746084
| 819
| 5,809
| 4.946276
| 0.124542
| 0.084177
| 0.075043
| 0.075043
| 0.782029
| 0.76154
| 0.750679
| 0.703036
| 0.674155
| 0.642311
| 0
| 0.008464
| 0.166122
| 5,809
| 171
| 89
| 33.97076
| 0.827828
| 0.049062
| 0
| 0.645455
| 0
| 0
| 0.013977
| 0
| 0
| 0
| 0
| 0
| 0.327273
| 1
| 0
| false
| 0
| 0.045455
| 0
| 0.045455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
86c48533487b03348d17d0ea969c906ccc6cfc63
| 223
|
py
|
Python
|
numbers.py
|
Time2003/lr4
|
2c11076d7513f0a480c448d0cad5fbceced6da58
|
[
"MIT"
] | null | null | null |
numbers.py
|
Time2003/lr4
|
2c11076d7513f0a480c448d0cad5fbceced6da58
|
[
"MIT"
] | null | null | null |
numbers.py
|
Time2003/lr4
|
2c11076d7513f0a480c448d0cad5fbceced6da58
|
[
"MIT"
] | null | null | null |
a = int(input("enter the first number: "))
b = int(input("enter the second number: "))
c = int(input("enter the third number: "))
d = int(input("enter the fourth number: "))
e = a + b
f = c + d
g = e / f
print(f'{g:.2f}')
| 27.875
| 43
| 0.591928
| 41
| 223
| 3.219512
| 0.439024
| 0.242424
| 0.393939
| 0.484848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005618
| 0.201794
| 223
| 8
| 44
| 27.875
| 0.735955
| 0
| 0
| 0
| 0
| 0
| 0.477679
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.125
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
86d148ec6ba5ab7cfba2f849c23cd3b0f523a640
| 87
|
py
|
Python
|
mythos/apps.py
|
mythicrationality/mythra-fi
|
957dc3261b4eb49211425954e0222ec0d4647f3e
|
[
"MIT"
] | null | null | null |
mythos/apps.py
|
mythicrationality/mythra-fi
|
957dc3261b4eb49211425954e0222ec0d4647f3e
|
[
"MIT"
] | null | null | null |
mythos/apps.py
|
mythicrationality/mythra-fi
|
957dc3261b4eb49211425954e0222ec0d4647f3e
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class MythosConfig(AppConfig):
name = 'mythos'
| 14.5
| 33
| 0.747126
| 10
| 87
| 6.5
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 87
| 5
| 34
| 17.4
| 0.902778
| 0
| 0
| 0
| 0
| 0
| 0.068966
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
86fd3ef81a1469363b0214ec60c7b02dd452ac56
| 327
|
py
|
Python
|
code.py
|
Awesome12-arch/Trending-News_python
|
814fe015c52413fa9a4b23ad65576cf3bde4954d
|
[
"MIT"
] | null | null | null |
code.py
|
Awesome12-arch/Trending-News_python
|
814fe015c52413fa9a4b23ad65576cf3bde4954d
|
[
"MIT"
] | null | null | null |
code.py
|
Awesome12-arch/Trending-News_python
|
814fe015c52413fa9a4b23ad65576cf3bde4954d
|
[
"MIT"
] | null | null | null |
from GoogleNews import GoogleNews
googlenews = GoogleNews()
googlenews = GoogleNews(period='7days')
googlenews.search('USA')
result = googlenews.result()
for x in result:
print("-"*50)
print("Title--", x['title'])
print("Date/time--", x['date'])
print("Description--", x['desc'])
print("Link--", x['link'])
| 27.25
| 39
| 0.642202
| 39
| 327
| 5.384615
| 0.487179
| 0.380952
| 0.428571
| 0.380952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010714
| 0.143731
| 327
| 11
| 40
| 29.727273
| 0.739286
| 0
| 0
| 0
| 0
| 0
| 0.192661
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.090909
| 0.454545
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
8102feef6adc109a8dc52fd68dc4f8566184c601
| 756
|
py
|
Python
|
whisk/template/{{ cookiecutter.repo_name }}/whisk_commands/app.py
|
BookletAI/whisk
|
814a3e3b0ee2a97d9c42044cde9571946b72910e
|
[
"MIT"
] | 8
|
2020-06-16T13:13:03.000Z
|
2021-12-25T00:01:57.000Z
|
whisk/template/{{ cookiecutter.repo_name }}/whisk_commands/app.py
|
whisk-ml/whisk
|
55e573f837a8a5f34279fa5e83a721582fdcb24e
|
[
"MIT"
] | 9
|
2020-05-13T22:15:25.000Z
|
2021-01-24T23:41:57.000Z
|
whisk/template/{{ cookiecutter.repo_name }}/whisk_commands/app.py
|
whisk-ml/whisk
|
55e573f837a8a5f34279fa5e83a721582fdcb24e
|
[
"MIT"
] | null | null | null |
"""
Whisk includes default cli commands for performing common tasks in an ML project.
You can add new commands to your project and override existing commands. Files in this directory are
loaded as click commands.
This is an example of adding a new command (whisk app hello) and
overriding an existing command (whisk app start). Uncomment the code below to try it.
Click docs: https://click.palletsprojects.com/
For commands to load, the file must have a click `cli` command or group.
"""
import click
import subprocess
# @click.group()
# def cli():
# pass
#
# @cli.command()
# def hello():
# """Hello!"""
# click.echo("Hello!")
#
# @cli.command()
# def start():
# """Start override!!!!"""
# click.echo("start override!!!!!!!!!!!!")
| 26.068966
| 100
| 0.686508
| 108
| 756
| 4.805556
| 0.555556
| 0.057803
| 0.057803
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181217
| 756
| 28
| 101
| 27
| 0.838449
| 0.911376
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
d491a4959c08774954c0c2b17ce38d22a6236220
| 3,370
|
py
|
Python
|
{{cookiecutter.project_slug}}/app/tests/crud/test_user.py
|
nunenuh/cookiecutter-fastapi-postgresql
|
8a7ffce9a128b55439a1d17e4fc4c10b21ff6130
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_slug}}/app/tests/crud/test_user.py
|
nunenuh/cookiecutter-fastapi-postgresql
|
8a7ffce9a128b55439a1d17e4fc4c10b21ff6130
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_slug}}/app/tests/crud/test_user.py
|
nunenuh/cookiecutter-fastapi-postgresql
|
8a7ffce9a128b55439a1d17e4fc4c10b21ff6130
|
[
"MIT"
] | null | null | null |
from fastapi.encoders import jsonable_encoder
from sqlalchemy.orm import Session
import crud
from core.security import verify_password
from schemas.user import UserCreate, UserUpdate
from tests.utils.utils import random_email, random_lower_string
def test_create_user(db: Session) -> None:
email = random_email()
password = random_lower_string()
user_in = UserCreate(email=email, password=password)
user = crud.user.create(db, obj_in=user_in)
assert user.email == email
assert hasattr(user, "hashed_password")
def test_authenticate_user(db: Session) -> None:
email = random_email()
password = random_lower_string()
user_in = UserCreate(email=email, password=password)
user = crud.user.create(db, obj_in=user_in)
authenticated_user = crud.user.authenticate(db, email=email, password=password)
assert authenticated_user
assert user.email == authenticated_user.email
def test_not_authenticate_user(db: Session) -> None:
email = random_email()
password = random_lower_string()
user = crud.user.authenticate(db, email=email, password=password)
assert user is None
def test_check_if_user_is_active(db: Session) -> None:
email = random_email()
password = random_lower_string()
user_in = UserCreate(email=email, password=password)
user = crud.user.create(db, obj_in=user_in)
is_active = crud.user.is_active(user)
assert is_active is True
def test_check_if_user_is_active_inactive(db: Session) -> None:
email = random_email()
password = random_lower_string()
user_in = UserCreate(email=email, password=password, disabled=True)
user = crud.user.create(db, obj_in=user_in)
is_active = crud.user.is_active(user)
assert is_active
def test_check_if_user_is_superuser(db: Session) -> None:
email = random_email()
password = random_lower_string()
user_in = UserCreate(email=email, password=password, is_superuser=True)
user = crud.user.create(db, obj_in=user_in)
is_superuser = crud.user.is_superuser(user)
assert is_superuser is True
def test_check_if_user_is_superuser_normal_user(db: Session) -> None:
username = random_email()
password = random_lower_string()
user_in = UserCreate(email=username, password=password)
user = crud.user.create(db, obj_in=user_in)
is_superuser = crud.user.is_superuser(user)
assert is_superuser is False
def test_get_user(db: Session) -> None:
password = random_lower_string()
username = random_email()
user_in = UserCreate(email=username, password=password, is_superuser=True)
user = crud.user.create(db, obj_in=user_in)
user_2 = crud.user.get(db, id=user.id)
assert user_2
assert user.email == user_2.email
assert jsonable_encoder(user) == jsonable_encoder(user_2)
def test_update_user(db: Session) -> None:
password = random_lower_string()
email = random_email()
user_in = UserCreate(email=email, password=password, is_superuser=True)
user = crud.user.create(db, obj_in=user_in)
new_password = random_lower_string()
user_in_update = UserUpdate(password=new_password, is_superuser=True)
crud.user.update(db, db_obj=user, obj_in=user_in_update)
user_2 = crud.user.get(db, id=user.id)
assert user_2
assert user.email == user_2.email
assert verify_password(new_password, user_2.hashed_password)
| 35.473684
| 83
| 0.742136
| 483
| 3,370
| 4.902692
| 0.109731
| 0.045608
| 0.07897
| 0.105574
| 0.732264
| 0.732264
| 0.709882
| 0.664696
| 0.608108
| 0.608108
| 0
| 0.002827
| 0.160237
| 3,370
| 94
| 84
| 35.851064
| 0.833922
| 0
| 0
| 0.546667
| 0
| 0
| 0.004451
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.12
| false
| 0.32
| 0.08
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
d494d6d29fed6541391f4cc808c55b7026b221d6
| 47
|
py
|
Python
|
salt/tops/__init__.py
|
Noah-Huppert/salt
|
998c382f5f2c3b4cbf7d96aa6913ada6993909b3
|
[
"Apache-2.0"
] | 19
|
2016-01-29T14:37:52.000Z
|
2022-03-30T18:08:01.000Z
|
salt/tops/__init__.py
|
Noah-Huppert/salt
|
998c382f5f2c3b4cbf7d96aa6913ada6993909b3
|
[
"Apache-2.0"
] | 223
|
2016-03-02T16:39:41.000Z
|
2022-03-03T12:26:35.000Z
|
salt/tops/__init__.py
|
Noah-Huppert/salt
|
998c382f5f2c3b4cbf7d96aa6913ada6993909b3
|
[
"Apache-2.0"
] | 64
|
2016-02-04T19:45:26.000Z
|
2021-12-15T02:02:31.000Z
|
# -*- coding: utf-8 -*-
"""
Tops Directory
"""
| 9.4
| 23
| 0.489362
| 5
| 47
| 4.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026316
| 0.191489
| 47
| 4
| 24
| 11.75
| 0.578947
| 0.787234
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d4abe3ef4432a2dd5061474171e1e9b94fa50dd9
| 74,437
|
py
|
Python
|
toscaparser/tests/test_toscatplvalidation.py
|
indigo-dc/tosca-parser
|
937737c8a0cf23eedd9b99327d65e2930bd29e99
|
[
"Apache-2.0"
] | 2
|
2015-12-16T11:21:32.000Z
|
2019-07-04T14:00:25.000Z
|
toscaparser/tests/test_toscatplvalidation.py
|
indigo-dc/tosca-parser
|
937737c8a0cf23eedd9b99327d65e2930bd29e99
|
[
"Apache-2.0"
] | 4
|
2016-07-21T09:02:18.000Z
|
2018-07-05T10:05:58.000Z
|
toscaparser/tests/test_toscatplvalidation.py
|
indigo-dc/tosca-parser
|
937737c8a0cf23eedd9b99327d65e2930bd29e99
|
[
"Apache-2.0"
] | 3
|
2016-05-03T16:21:46.000Z
|
2019-07-04T14:00:31.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import six
from toscaparser.common import exception
from toscaparser.imports import ImportsLoader
from toscaparser.nodetemplate import NodeTemplate
from toscaparser.parameters import Input
from toscaparser.parameters import Output
from toscaparser.policy import Policy
from toscaparser.relationship_template import RelationshipTemplate
from toscaparser.repositories import Repository
from toscaparser.tests.base import TestCase
from toscaparser.topology_template import TopologyTemplate
from toscaparser.tosca_template import ToscaTemplate
from toscaparser.triggers import Triggers
from toscaparser.utils.gettextutils import _
import toscaparser.utils.yamlparser
class ToscaTemplateValidationTest(TestCase):
def test_well_defined_template(self):
tpl_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"data/tosca_single_instance_wordpress.yaml")
params = {'db_name': 'my_wordpress', 'db_user': 'my_db_user',
'db_root_pwd': '12345678'}
self.assertIsNotNone(ToscaTemplate(tpl_path, params))
def test_custom_interface_allowed(self):
tpl_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"data/interfaces/test_custom_interface_in_template.yaml")
self.assertIsNotNone(ToscaTemplate(tpl_path))
def test_custom_interface_invalid_operation(self):
tpl_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"data/interfaces/test_custom_interface_invalid_operation.yaml")
self.assertRaises(exception.ValidationError,
ToscaTemplate, tpl_path)
exception.ExceptionCollector.assertExceptionMessage(
exception.UnknownFieldError,
_('"interfaces" of template "customInterfaceTest" '
'contains unknown field "CustomOp4". '
'Refer to the definition to verify valid values.'))
def test_first_level_sections(self):
tpl_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"data/test_tosca_top_level_error1.yaml")
self.assertRaises(exception.ValidationError, ToscaTemplate, tpl_path)
exception.ExceptionCollector.assertExceptionMessage(
exception.MissingRequiredFieldError,
_('Template is missing required field '
'"tosca_definitions_version".'))
tpl_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"data/test_tosca_top_level_error2.yaml")
self.assertRaises(exception.ValidationError, ToscaTemplate, tpl_path)
exception.ExceptionCollector.assertExceptionMessage(
exception.UnknownFieldError,
_('Template contains unknown field "node_template". Refer to the '
'definition to verify valid values.'))
def test_template_with_imports_validation(self):
tpl_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"data/tosca_imports_validation.yaml")
self.assertRaises(exception.ValidationError, ToscaTemplate, tpl_path)
exception.ExceptionCollector.assertExceptionMessage(
exception.UnknownFieldError,
_('Template custom_types/imported_sample.yaml contains unknown '
'field "descriptions". Refer to the definition'
' to verify valid values.'))
exception.ExceptionCollector.assertExceptionMessage(
exception.UnknownFieldError,
_('Template custom_types/imported_sample.yaml contains unknown '
'field "node_typess". Refer to the definition to '
'verify valid values.'))
exception.ExceptionCollector.assertExceptionMessage(
exception.UnknownFieldError,
_('Template custom_types/imported_sample.yaml contains unknown '
'field "tosca1_definitions_version". Refer to the definition'
' to verify valid values.'))
versions = '", "'.join(ToscaTemplate.VALID_TEMPLATE_VERSIONS)
exception.ExceptionCollector.assertExceptionMessage(
exception.InvalidTemplateVersion,
_('The template version "tosca_simple_yaml_1_10 in '
'custom_types/imported_sample.yaml" is invalid. '
'Valid versions are "%s".') % versions)
exception.ExceptionCollector.assertExceptionMessage(
exception.UnknownFieldError,
_('Template custom_types/imported_sample.yaml contains unknown '
'field "policy_types1". Refer to the definition to '
'verify valid values.'))
exception.ExceptionCollector.assertExceptionMessage(
exception.UnknownFieldError,
_('Nodetype"tosca.nodes.SoftwareComponent.Logstash" contains '
'unknown field "capabilities1". Refer to the definition '
'to verify valid values.'))
exception.ExceptionCollector.assertExceptionMessage(
exception.UnknownFieldError,
_('Policy "mycompany.mytypes.myScalingPolicy" contains unknown '
'field "derived1_from". Refer to the definition to '
'verify valid values.'))
exception.ExceptionCollector.assertExceptionMessage(
exception.UnknownFieldError,
_('Relationshiptype "test.relation.connects" contains unknown '
'field "derived_from4". Refer to the definition to '
'verify valid values.'))
def test_getoperation_IncorrectValue(self):
# test case 1
tpl_snippet = '''
node_templates:
front_end:
type: tosca.nodes.Compute
interfaces:
Standard:
create:
implementation: scripts/frontend/create.sh
configure:
implementation: scripts/frontend/configure.sh
inputs:
data_dir: {get_operation_output: [front_end,Standard1,
create,data_dir]}
'''
tpl = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet))
err = self.assertRaises(ValueError,
TopologyTemplate, tpl, None)
expectedmessage = _('Enter a valid interface name')
self.assertEqual(expectedmessage, err.__str__())
# test case 2
tpl_snippet2 = '''
node_templates:
front_end:
type: tosca.nodes.Compute
interfaces:
Standard:
create:
implementation: scripts/frontend/create.sh
configure:
implementation: scripts/frontend/configure.sh
inputs:
data_dir: {get_operation_output: [front_end1,Standard,
create,data_dir]}
'''
tpl2 = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet2))
err2 = self.assertRaises(KeyError,
TopologyTemplate, tpl2, None)
expectedmessage2 = _('\'Node template "front_end1" was not found.\'')
self.assertEqual(expectedmessage2, err2.__str__())
# test case 3
tpl_snippet3 = '''
node_templates:
front_end:
type: tosca.nodes.Compute
interfaces:
Standard:
create:
implementation: scripts/frontend/create.sh
configure:
implementation: scripts/frontend/configure.sh
inputs:
data_dir: {get_operation_output: [front_end,Standard,
get_target,data_dir]}
'''
tpl3 = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet3))
err3 = self.assertRaises(ValueError,
TopologyTemplate, tpl3, None)
expectedmessage3 = _('Enter an operation of Standard interface')
self.assertEqual(expectedmessage3, err3.__str__())
# test case 4
tpl_snippet4 = '''
node_templates:
front_end:
type: tosca.nodes.Compute
interfaces:
Standard:
create:
implementation: scripts/frontend/create.sh
configure:
implementation: scripts/frontend/configure.sh
inputs:
data_dir: {get_operation_output: [front_end,Configure,
create,data_dir]}
'''
tpl4 = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet4))
err4 = self.assertRaises(ValueError,
TopologyTemplate, tpl4, None)
expectedmessage4 = _('Enter an operation of Configure interface')
self.assertEqual(expectedmessage4, err4.__str__())
# test case 5
tpl_snippet5 = '''
node_templates:
front_end:
type: tosca.nodes.Compute
interfaces:
Standard:
create:
implementation: scripts/frontend/create.sh
configure:
implementation: scripts/frontend/configure.sh
inputs:
data_dir: {get_operation_output: [front_end,Standard,
create]}
'''
tpl5 = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet5))
err5 = self.assertRaises(ValueError,
TopologyTemplate, tpl5, None)
expectedmessage5 = _('Illegal arguments for function'
' "get_operation_output".'
' Expected arguments: "template_name",'
'"interface_name",'
'"operation_name","output_variable_name"')
self.assertEqual(expectedmessage5, err5.__str__())
def test_unsupported_type(self):
tpl_snippet = '''
node_templates:
invalid_type:
type: tosca.test.invalidtype
properties:
size: { get_input: storage_size }
snapshot_id: { get_input: storage_snapshot_id }
'''
tpl = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet))
err = self.assertRaises(exception.UnsupportedTypeError,
TopologyTemplate, tpl, None)
expectedmessage = _('Type "tosca.test.invalidtype" is valid'
' TOSCA type but not supported at this time.')
self.assertEqual(expectedmessage, err.__str__())
def test_inputs(self):
tpl_snippet1 = '''
inputs:
cpus:
type: integer
description: Number of CPUs for the server.
constraint:
- valid_values: [ 1, 2, 4 ]
required: yes
status: supported
'''
tpl_snippet2 = '''
inputs:
cpus:
type: integer
description: Number of CPUs for the server.
constraints:
- valid_values: [ 1, 2, 4 ]
required: yes
status: supported
'''
tpl_snippet3 = '''
inputs:
some_list:
type: list
description: List of items
entry_schema:
type: string
default: []
'''
inputs1 = (toscaparser.utils.yamlparser.
simple_parse(tpl_snippet1)['inputs'])
name1, attrs1 = list(inputs1.items())[0]
inputs2 = (toscaparser.utils.yamlparser.
simple_parse(tpl_snippet2)['inputs'])
name2, attrs2 = list(inputs2.items())[0]
try:
Input(name1, attrs1)
except Exception as err:
self.assertEqual(_('Input "cpus" contains unknown field '
'"constraint". Refer to the definition to '
'verify valid values.'),
err.__str__())
input2 = Input(name2, attrs2)
self.assertTrue(input2.required)
toscaparser.utils.yamlparser.simple_parse(tpl_snippet3)['inputs']
def _imports_content_test(self, tpl_snippet, path, custom_type_def):
imports = (toscaparser.utils.yamlparser.
simple_parse(tpl_snippet)['imports'])
loader = ImportsLoader(imports, path, custom_type_def)
return loader.get_custom_defs()
def test_imports_without_templates(self):
tpl_snippet = '''
imports:
# omitted here for brevity
'''
path = 'toscaparser/tests/data/tosca_elk.yaml'
errormsg = _('"imports" keyname is defined without including '
'templates.')
err = self.assertRaises(exception.ValidationError,
self._imports_content_test,
tpl_snippet,
path,
"node_types")
self.assertEqual(errormsg, err.__str__())
def test_imports_with_name_without_templates(self):
tpl_snippet = '''
imports:
- some_definitions:
'''
path = 'toscaparser/tests/data/tosca_elk.yaml'
errormsg = _('A template file name is not provided with import '
'definition "some_definitions".')
err = self.assertRaises(exception.ValidationError,
self._imports_content_test,
tpl_snippet, path, None)
self.assertEqual(errormsg, err.__str__())
def test_imports_without_import_name(self):
tpl_snippet = '''
imports:
- custom_types/paypalpizzastore_nodejs_app.yaml
- https://raw.githubusercontent.com/openstack/\
tosca-parser/master/toscaparser/tests/data/custom_types/wordpress.yaml
'''
path = 'toscaparser/tests/data/tosca_elk.yaml'
custom_defs = self._imports_content_test(tpl_snippet,
path,
"node_types")
self.assertTrue(custom_defs)
def test_imports_wth_import_name(self):
tpl_snippet = '''
imports:
- some_definitions: custom_types/paypalpizzastore_nodejs_app.yaml
- more_definitions:
file: 'https://raw.githubusercontent.com/openstack/tosca-parser\
/master/toscaparser/tests/data/custom_types/wordpress.yaml'
namespace_prefix: single_instance_wordpress
'''
path = 'toscaparser/tests/data/tosca_elk.yaml'
custom_defs = self._imports_content_test(tpl_snippet,
path,
"node_types")
self.assertTrue(custom_defs.get("single_instance_wordpress.tosca."
"nodes.WebApplication.WordPress"))
def test_imports_wth_namespace_prefix(self):
tpl_snippet = '''
imports:
- more_definitions:
file: custom_types/nested_rsyslog.yaml
namespace_prefix: testprefix
'''
path = 'toscaparser/tests/data/tosca_elk.yaml'
custom_defs = self._imports_content_test(tpl_snippet,
path,
"node_types")
self.assertTrue(custom_defs.get("testprefix.Rsyslog"))
def test_imports_with_no_main_template(self):
tpl_snippet = '''
imports:
- some_definitions: https://raw.githubusercontent.com/openstack/\
tosca-parser/master/toscaparser/tests/data/custom_types/wordpress.yaml
- some_definitions:
file: my_defns/my_typesdefs_n.yaml
'''
errormsg = _('Input tosca template is not provided.')
err = self.assertRaises(exception.ValidationError,
self._imports_content_test,
tpl_snippet, None, None)
self.assertEqual(errormsg, err.__str__())
def test_imports_duplicate_name(self):
tpl_snippet = '''
imports:
- some_definitions: https://raw.githubusercontent.com/openstack/\
tosca-parser/master/toscaparser/tests/data/custom_types/wordpress.yaml
- some_definitions:
file: my_defns/my_typesdefs_n.yaml
'''
errormsg = _('Duplicate import name "some_definitions" was found.')
path = 'toscaparser/tests/data/tosca_elk.yaml'
err = self.assertRaises(exception.ValidationError,
self._imports_content_test,
tpl_snippet, path, None)
self.assertEqual(errormsg, err.__str__())
def test_imports_missing_req_field_in_def(self):
tpl_snippet = '''
imports:
- more_definitions:
file1: my_defns/my_typesdefs_n.yaml
repository: my_company_repo
namespace_uri: http://mycompany.com/ns/tosca/2.0
namespace_prefix: mycompany
'''
errormsg = _('Import of template "more_definitions" is missing '
'required field "file".')
path = 'toscaparser/tests/data/tosca_elk.yaml'
err = self.assertRaises(exception.MissingRequiredFieldError,
self._imports_content_test,
tpl_snippet, path, None)
self.assertEqual(errormsg, err.__str__())
def test_imports_file_with_uri(self):
tpl_snippet = '''
imports:
- more_definitions:
file: https://raw.githubusercontent.com/openstack/\
tosca-parser/master/toscaparser/tests/data/custom_types/wordpress.yaml
'''
path = 'https://raw.githubusercontent.com/openstack/\
tosca-parser/master/toscaparser/tests/data/\
tosca_single_instance_wordpress_with_url_import.yaml'
custom_defs = self._imports_content_test(tpl_snippet,
path,
"node_types")
self.assertTrue(custom_defs.get("tosca.nodes."
"WebApplication.WordPress"))
def test_imports_file_namespace_fields(self):
tpl_snippet = '''
imports:
- more_definitions:
file: https://raw.githubusercontent.com/openstack/\
heat-translator/master/translator/tests/data/custom_types/wordpress.yaml
namespace_prefix: mycompany
namespace_uri: http://docs.oasis-open.org/tosca/ns/simple/yaml/1.0
'''
path = 'toscaparser/tests/data/tosca_elk.yaml'
custom_defs = self._imports_content_test(tpl_snippet,
path,
"node_types")
self.assertTrue(custom_defs.get("mycompany.tosca.nodes."
"WebApplication.WordPress"))
def test_imports_file_with_suffix_yml(self):
tpl_snippet = '''
imports:
- custom_types/wordpress.yml
'''
path = 'toscaparser/tests/data/tosca_elk.yaml'
custom_defs = self._imports_content_test(tpl_snippet,
path,
"node_types")
self.assertTrue(custom_defs.get("tosca.nodes."
"WebApplication.WordPress"))
def test_import_error_file_uri(self):
tpl_snippet = '''
imports:
- more_definitions:
file: mycompany.com/ns/tosca/2.0/toscaparser/tests/data\
/tosca_elk.yaml
namespace_prefix: mycompany
namespace_uri: http://docs.oasis-open.org/tosca/ns/simple/yaml/1.0
'''
path = 'toscaparser/tests/data/tosca_elk.yaml'
self.assertRaises(ImportError,
self._imports_content_test,
tpl_snippet, path, None)
def test_import_single_line_error(self):
tpl_snippet = '''
imports:
- some_definitions: abc.com/tests/data/tosca_elk.yaml
'''
errormsg = _('Import "abc.com/tests/data/tosca_elk.yaml" is not '
'valid.')
path = 'toscaparser/tests/data/tosca_elk.yaml'
err = self.assertRaises(ImportError,
self._imports_content_test,
tpl_snippet, path, None)
self.assertEqual(errormsg, err.__str__())
def test_outputs(self):
tpl_snippet = '''
outputs:
server_address:
description: IP address of server instance.
values: { get_property: [server, private_address] }
'''
outputs = (toscaparser.utils.yamlparser.
simple_parse(tpl_snippet)['outputs'])
name, attrs = list(outputs.items())[0]
output = Output(name, attrs)
try:
output.validate()
except Exception as err:
self.assertTrue(
isinstance(err, exception.MissingRequiredFieldError))
self.assertEqual(_('Output "server_address" is missing required '
'field "value".'), err.__str__())
tpl_snippet = '''
outputs:
server_address:
descriptions: IP address of server instance.
value: { get_property: [server, private_address] }
'''
outputs = (toscaparser.utils.yamlparser.
simple_parse(tpl_snippet)['outputs'])
name, attrs = list(outputs.items())[0]
output = Output(name, attrs)
try:
output.validate()
except Exception as err:
self.assertIsInstance(err, exception.UnknownFieldError)
self.assertEqual(_('Output "server_address" contains unknown '
'field "descriptions". Refer to the definition '
'to verify valid values.'),
err.__str__())
def _repo_content(self, path):
repositories = path['repositories']
reposit = []
for name, val in repositories.items():
reposits = Repository(name, val)
reposit.append(reposits)
return reposit
def test_repositories(self):
tpl_snippet = '''
repositories:
repo_code0: https://raw.githubusercontent.com/nandinivemula/intern
repo_code1:
description: My project's code Repository in github usercontent.
url: https://github.com/nandinivemula/intern
credential:
user: nandini
password: tcs@12345
repo_code2:
description: My Project's code Repository in github.
url: https://github.com/nandinivemula/intern
credential:
user: xyzw
password: xyz@123
'''
tpl = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet))
repoobject = self._repo_content(tpl)
actualrepo_names = []
for repo in repoobject:
repos = repo.name
actualrepo_names.append(repos)
reposname = list(tpl.values())
reposnames = reposname[0]
expected_reponames = list(reposnames.keys())
self.assertEqual(expected_reponames, actualrepo_names)
def test_repositories_with_missing_required_field(self):
tpl_snippet = '''
repositories:
repo_code0: https://raw.githubusercontent.com/nandinivemula/intern
repo_code1:
description: My project's code Repository in github usercontent.
credential:
user: nandini
password: tcs@12345
repo_code2:
description: My Project's code Repository in github.
url: https://github.com/nandinivemula/intern
credential:
user: xyzw
password: xyz@123
'''
tpl = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet))
err = self.assertRaises(exception.MissingRequiredFieldError,
self._repo_content, tpl)
expectedmessage = _('Repository "repo_code1" is missing '
'required field "url".')
self.assertEqual(expectedmessage, err.__str__())
def test_repositories_with_unknown_field(self):
tpl_snippet = '''
repositories:
repo_code0: https://raw.githubusercontent.com/nandinivemula/intern
repo_code1:
description: My project's code Repository in github usercontent.
url: https://github.com/nandinivemula/intern
credential:
user: nandini
password: tcs@12345
repo_code2:
descripton: My Project's code Repository in github.
url: https://github.com/nandinivemula/intern
credential:
user: xyzw
password: xyz@123
'''
tpl = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet))
err = self.assertRaises(exception.UnknownFieldError,
self._repo_content, tpl)
expectedmessage = _('repositories "repo_code2" contains unknown field'
' "descripton". Refer to the definition to verify'
' valid values.')
self.assertEqual(expectedmessage, err.__str__())
def test_repositories_with_invalid_url(self):
tpl_snippet = '''
repositories:
repo_code0: https://raw.githubusercontent.com/nandinivemula/intern
repo_code1:
description: My project's code Repository in github usercontent.
url: h
credential:
user: nandini
password: tcs@12345
repo_code2:
description: My Project's code Repository in github.
url: https://github.com/nandinivemula/intern
credential:
user: xyzw
password: xyz@123
'''
tpl = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet))
err = self.assertRaises(exception.URLException,
self._repo_content, tpl)
expectedmessage = _('repsositories "repo_code1" Invalid Url')
self.assertEqual(expectedmessage, err.__str__())
def test_groups(self):
tpl_snippet = '''
node_templates:
server:
type: tosca.nodes.Compute
requirements:
- log_endpoint:
capability: log_endpoint
mysql_dbms:
type: tosca.nodes.DBMS
properties:
root_password: aaa
port: 3376
groups:
webserver_group:
type: tosca.groups.Root
members: [ server, mysql_dbms ]
'''
tpl = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet))
TopologyTemplate(tpl, None)
def test_groups_with_missing_required_field(self):
tpl_snippet = '''
node_templates:
server:
type: tosca.nodes.Compute
requirements:
- log_endpoint:
capability: log_endpoint
mysql_dbms:
type: tosca.nodes.DBMS
properties:
root_password: aaa
port: 3376
groups:
webserver_group:
members: ['server', 'mysql_dbms']
'''
tpl = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet))
err = self.assertRaises(exception.MissingRequiredFieldError,
TopologyTemplate, tpl, None)
expectedmessage = _('Template "webserver_group" is missing '
'required field "type".')
self.assertEqual(expectedmessage, err.__str__())
def test_groups_with_unknown_target(self):
tpl_snippet = '''
node_templates:
server:
type: tosca.nodes.Compute
requirements:
- log_endpoint:
capability: log_endpoint
mysql_dbms:
type: tosca.nodes.DBMS
properties:
root_password: aaa
port: 3376
groups:
webserver_group:
type: tosca.groups.Root
members: [ serv, mysql_dbms ]
'''
tpl = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet))
expectedmessage = _('"Target member "serv" is not found in '
'node_templates"')
err = self.assertRaises(exception.InvalidGroupTargetException,
TopologyTemplate, tpl, None)
self.assertEqual(expectedmessage, err.__str__())
def test_groups_with_repeated_targets(self):
tpl_snippet = '''
node_templates:
server:
type: tosca.nodes.Compute
requirements:
- log_endpoint:
capability: log_endpoint
mysql_dbms:
type: tosca.nodes.DBMS
properties:
root_password: aaa
port: 3376
groups:
webserver_group:
type: tosca.groups.Root
members: [ server, server, mysql_dbms ]
'''
tpl = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet))
expectedmessage = _('"Member nodes '
'"[\'server\', \'server\', \'mysql_dbms\']" '
'should be >= 1 and not repeated"')
err = self.assertRaises(exception.InvalidGroupTargetException,
TopologyTemplate, tpl, None)
self.assertEqual(expectedmessage, err.__str__())
def test_groups_with_only_one_target(self):
tpl_snippet = '''
node_templates:
server:
type: tosca.nodes.Compute
requirements:
- log_endpoint:
capability: log_endpoint
mysql_dbms:
type: tosca.nodes.DBMS
properties:
root_password: aaa
port: 3376
groups:
webserver_group:
type: tosca.groups.Root
members: []
'''
tpl = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet))
expectedmessage = _('"Member nodes "[]" should be >= 1 '
'and not repeated"')
err = self.assertRaises(exception.InvalidGroupTargetException,
TopologyTemplate, tpl, None)
self.assertEqual(expectedmessage, err.__str__())
def _custom_types(self):
custom_types = {}
def_file = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"data/custom_types/wordpress.yaml")
custom_type = toscaparser.utils.yamlparser.load_yaml(def_file)
node_types = custom_type['node_types']
for name in node_types:
defintion = node_types[name]
custom_types[name] = defintion
return custom_types
def _single_node_template_content_test(self, tpl_snippet):
nodetemplates = (toscaparser.utils.yamlparser.
simple_ordered_parse(tpl_snippet))['node_templates']
name = list(nodetemplates.keys())[0]
nodetemplate = NodeTemplate(name, nodetemplates,
self._custom_types())
nodetemplate.validate()
nodetemplate.requirements
nodetemplate.get_capabilities_objects()
nodetemplate.get_properties_objects()
nodetemplate.interfaces
def test_node_templates(self):
tpl_snippet = '''
node_templates:
server:
capabilities:
host:
properties:
disk_size: 10
num_cpus: 4
mem_size: 4096
os:
properties:
architecture: x86_64
type: Linux
distribution: Fedora
version: 18.0
'''
expectedmessage = _('Template "server" is missing required field '
'"type".')
err = self.assertRaises(
exception.MissingRequiredFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_with_wrong_properties_keyname(self):
"""Node template keyname 'properties' given as 'propertiessss'."""
tpl_snippet = '''
node_templates:
mysql_dbms:
type: tosca.nodes.DBMS
propertiessss:
root_password: aaa
port: 3376
'''
expectedmessage = _('Node template "mysql_dbms" contains unknown '
'field "propertiessss". Refer to the definition '
'to verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_with_wrong_requirements_keyname(self):
"""Node template keyname 'requirements' given as 'requirement'."""
tpl_snippet = '''
node_templates:
mysql_dbms:
type: tosca.nodes.DBMS
properties:
root_password: aaa
port: 3376
requirement:
- host: server
'''
expectedmessage = _('Node template "mysql_dbms" contains unknown '
'field "requirement". Refer to the definition to '
'verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_with_wrong_interfaces_keyname(self):
"""Node template keyname 'interfaces' given as 'interfac'."""
tpl_snippet = '''
node_templates:
mysql_dbms:
type: tosca.nodes.DBMS
properties:
root_password: aaa
port: 3376
requirements:
- host: server
interfac:
Standard:
configure: mysql_database_configure.sh
'''
expectedmessage = _('Node template "mysql_dbms" contains unknown '
'field "interfac". Refer to the definition to '
'verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_with_wrong_capabilities_keyname(self):
"""Node template keyname 'capabilities' given as 'capabilitiis'."""
tpl_snippet = '''
node_templates:
mysql_database:
type: tosca.nodes.Database
properties:
db_name: { get_input: db_name }
db_user: { get_input: db_user }
db_password: { get_input: db_pwd }
capabilitiis:
database_endpoint:
properties:
port: { get_input: db_port }
'''
expectedmessage = _('Node template "mysql_database" contains unknown '
'field "capabilitiis". Refer to the definition to '
'verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_with_wrong_artifacts_keyname(self):
"""Node template keyname 'artifacts' given as 'artifactsss'."""
tpl_snippet = '''
node_templates:
mysql_database:
type: tosca.nodes.Database
artifactsss:
db_content:
implementation: files/my_db_content.txt
type: tosca.artifacts.File
'''
expectedmessage = _('Node template "mysql_database" contains unknown '
'field "artifactsss". Refer to the definition to '
'verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_with_multiple_wrong_keynames(self):
"""Node templates given with multiple wrong keynames."""
tpl_snippet = '''
node_templates:
mysql_dbms:
type: tosca.nodes.DBMS
propertieees:
root_password: aaa
port: 3376
requirements:
- host: server
interfacs:
Standard:
configure: mysql_database_configure.sh
'''
expectedmessage = _('Node template "mysql_dbms" contains unknown '
'field "propertieees". Refer to the definition to '
'verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
tpl_snippet = '''
node_templates:
mysql_database:
type: tosca.nodes.Database
properties:
name: { get_input: db_name }
user: { get_input: db_user }
password: { get_input: db_pwd }
capabilitiiiies:
database_endpoint:
properties:
port: { get_input: db_port }
requirementsss:
- host:
node: mysql_dbms
interfac:
Standard:
configure: mysql_database_configure.sh
'''
expectedmessage = _('Node template "mysql_database" contains unknown '
'field "capabilitiiiies". Refer to the definition '
'to verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_type(self):
tpl_snippet = '''
node_templates:
mysql_database:
type: tosca.nodes.Databases
properties:
db_name: { get_input: db_name }
db_user: { get_input: db_user }
db_password: { get_input: db_pwd }
capabilities:
database_endpoint:
properties:
port: { get_input: db_port }
requirements:
- host: mysql_dbms
interfaces:
Standard:
configure: mysql_database_configure.sh
'''
expectedmessage = _('Type "tosca.nodes.Databases" is not '
'a valid type.')
err = self.assertRaises(
exception.InvalidTypeError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_requirements(self):
tpl_snippet = '''
node_templates:
webserver:
type: tosca.nodes.WebServer
requirements:
host: server
interfaces:
Standard:
create: webserver_install.sh
start: d.sh
'''
expectedmessage = _('"requirements" of template "webserver" must be '
'of type "list".')
err = self.assertRaises(
exception.TypeMismatchError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
tpl_snippet = '''
node_templates:
mysql_database:
type: tosca.nodes.Database
properties:
db_name: { get_input: db_name }
db_user: { get_input: db_user }
db_password: { get_input: db_pwd }
capabilities:
database_endpoint:
properties:
port: { get_input: db_port }
requirements:
- host: mysql_dbms
- database_endpoint: mysql_database
interfaces:
Standard:
configure: mysql_database_configure.sh
'''
expectedmessage = _('"requirements" of template "mysql_database" '
'contains unknown field "database_endpoint". '
'Refer to the definition to verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_requirements_with_wrong_node_keyname(self):
"""Node template requirements keyname 'node' given as 'nodes'."""
tpl_snippet = '''
node_templates:
mysql_database:
type: tosca.nodes.Database
requirements:
- host:
nodes: mysql_dbms
'''
expectedmessage = _('"requirements" of template "mysql_database" '
'contains unknown field "nodes". Refer to the '
'definition to verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_requirements_with_wrong_capability_keyname(self):
"""Incorrect node template requirements keyname
Node template requirements keyname 'capability' given as
'capabilityy'.
"""
tpl_snippet = '''
node_templates:
mysql_database:
type: tosca.nodes.Database
requirements:
- host:
node: mysql_dbms
- log_endpoint:
node: logstash
capabilityy: log_endpoint
relationship:
type: tosca.relationships.ConnectsTo
'''
expectedmessage = _('"requirements" of template "mysql_database" '
'contains unknown field "capabilityy". Refer to '
'the definition to verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_requirements_with_wrong_relationship_keyname(self):
"""Incorrect node template requirements keyname
Node template requirements keyname 'relationship' given as
'relationshipppp'.
"""
tpl_snippet = '''
node_templates:
mysql_database:
type: tosca.nodes.Database
requirements:
- host:
node: mysql_dbms
- log_endpoint:
node: logstash
capability: log_endpoint
relationshipppp:
type: tosca.relationships.ConnectsTo
'''
expectedmessage = _('"requirements" of template "mysql_database" '
'contains unknown field "relationshipppp". Refer '
'to the definition to verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_requirements_with_wrong_occurrences_keyname(self):
"""Incorrect node template requirements keyname
Node template requirements keyname 'occurrences' given as
'occurences'.
"""
tpl_snippet = '''
node_templates:
mysql_database:
type: tosca.nodes.Database
requirements:
- host:
node: mysql_dbms
- log_endpoint:
node: logstash
capability: log_endpoint
relationship:
type: tosca.relationships.ConnectsTo
occurences: [0, UNBOUNDED]
'''
expectedmessage = _('"requirements" of template "mysql_database" '
'contains unknown field "occurences". Refer to '
'the definition to verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_requirements_with_multiple_wrong_keynames(self):
"""Node templates given with multiple wrong requirements keynames."""
tpl_snippet = '''
node_templates:
mysql_database:
type: tosca.nodes.Database
requirements:
- host:
node: mysql_dbms
- log_endpoint:
nod: logstash
capabilit: log_endpoint
relationshipppp:
type: tosca.relationships.ConnectsTo
'''
expectedmessage = _('"requirements" of template "mysql_database" '
'contains unknown field "nod". Refer to the '
'definition to verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
tpl_snippet = '''
node_templates:
mysql_database:
type: tosca.nodes.Database
requirements:
- host:
node: mysql_dbms
- log_endpoint:
node: logstash
capabilit: log_endpoint
relationshipppp:
type: tosca.relationships.ConnectsTo
'''
expectedmessage = _('"requirements" of template "mysql_database" '
'contains unknown field "capabilit". Refer to the '
'definition to verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_requirements_invalid_occurrences(self):
tpl_snippet = '''
node_templates:
server:
type: tosca.nodes.Compute
requirements:
- log_endpoint:
capability: log_endpoint
occurrences: [0, -1]
'''
expectedmessage = _('Value of property "[0, -1]" is invalid.')
err = self.assertRaises(
exception.InvalidPropertyValueError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
tpl_snippet = '''
node_templates:
server:
type: tosca.nodes.Compute
requirements:
- log_endpoint:
capability: log_endpoint
occurrences: [a, w]
'''
expectedmessage = _('"a" is not an integer.')
err = self.assertRaises(
ValueError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
tpl_snippet = '''
node_templates:
server:
type: tosca.nodes.Compute
requirements:
- log_endpoint:
capability: log_endpoint
occurrences: -1
'''
expectedmessage = _('"-1" is not a list.')
err = self.assertRaises(
ValueError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
tpl_snippet = '''
node_templates:
server:
type: tosca.nodes.Compute
requirements:
- log_endpoint:
capability: log_endpoint
occurrences: [5, 1]
'''
expectedmessage = _('Value of property "[5, 1]" is invalid.')
err = self.assertRaises(
exception.InvalidPropertyValueError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
tpl_snippet = '''
node_templates:
server:
type: tosca.nodes.Compute
requirements:
- log_endpoint:
capability: log_endpoint
occurrences: [0, 0]
'''
expectedmessage = _('Value of property "[0, 0]" is invalid.')
err = self.assertRaises(
exception.InvalidPropertyValueError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_requirements_valid_occurrences(self):
tpl_snippet = '''
node_templates:
server:
type: tosca.nodes.Compute
requirements:
- log_endpoint:
capability: log_endpoint
occurrences: [2, 2]
'''
self._single_node_template_content_test(tpl_snippet)
def test_node_template_capabilities(self):
tpl_snippet = '''
node_templates:
mysql_database:
type: tosca.nodes.Database
properties:
db_name: { get_input: db_name }
db_user: { get_input: db_user }
db_password: { get_input: db_pwd }
capabilities:
http_endpoint:
properties:
port: { get_input: db_port }
requirements:
- host: mysql_dbms
interfaces:
Standard:
configure: mysql_database_configure.sh
'''
expectedmessage = _('"capabilities" of template "mysql_database" '
'contains unknown field "http_endpoint". Refer to '
'the definition to verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_properties(self):
tpl_snippet = '''
node_templates:
server:
type: tosca.nodes.Compute
properties:
os_image: F18_x86_64
capabilities:
host:
properties:
disk_size: 10 GB
num_cpus: { get_input: cpus }
mem_size: 4096 MB
os:
properties:
architecture: x86_64
type: Linux
distribution: Fedora
version: 18.0
'''
expectedmessage = _('"properties" of template "server" contains '
'unknown field "os_image". Refer to the '
'definition to verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_interfaces(self):
tpl_snippet = '''
node_templates:
wordpress:
type: tosca.nodes.WebApplication.WordPress
requirements:
- host: webserver
- database_endpoint: mysql_database
interfaces:
Standards:
create: wordpress_install.sh
configure:
implementation: wordpress_configure.sh
inputs:
wp_db_name: { get_property: [ mysql_database, db_name ] }
wp_db_user: { get_property: [ mysql_database, db_user ] }
wp_db_password: { get_property: [ mysql_database, \
db_password ] }
wp_db_port: { get_property: [ SELF, \
database_endpoint, port ] }
'''
expectedmessage = _('"interfaces" of template "wordpress" contains '
'unknown field "Standards". Refer to the '
'definition to verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
tpl_snippet = '''
node_templates:
wordpress:
type: tosca.nodes.WebApplication.WordPress
requirements:
- host: webserver
- database_endpoint: mysql_database
interfaces:
Standard:
create: wordpress_install.sh
config:
implementation: wordpress_configure.sh
inputs:
wp_db_name: { get_property: [ mysql_database, db_name ] }
wp_db_user: { get_property: [ mysql_database, db_user ] }
wp_db_password: { get_property: [ mysql_database, \
db_password ] }
wp_db_port: { get_property: [ SELF, \
database_endpoint, port ] }
'''
expectedmessage = _('"interfaces" of template "wordpress" contains '
'unknown field "config". Refer to the definition '
'to verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
tpl_snippet = '''
node_templates:
wordpress:
type: tosca.nodes.WebApplication.WordPress
requirements:
- host: webserver
- database_endpoint: mysql_database
interfaces:
Standard:
create: wordpress_install.sh
configure:
implementation: wordpress_configure.sh
input:
wp_db_name: { get_property: [ mysql_database, db_name ] }
wp_db_user: { get_property: [ mysql_database, db_user ] }
wp_db_password: { get_property: [ mysql_database, \
db_password ] }
wp_db_port: { get_ref_property: [ database_endpoint, \
database_endpoint, port ] }
'''
expectedmessage = _('"interfaces" of template "wordpress" contains '
'unknown field "input". Refer to the definition '
'to verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_relationship_template_properties(self):
tpl_snippet = '''
relationship_templates:
storage_attachto:
type: AttachesTo
properties:
device: test_device
'''
expectedmessage = _('"properties" of template "storage_attachto" is '
'missing required field "[\'location\']".')
rel_template = (toscaparser.utils.yamlparser.
simple_parse(tpl_snippet))['relationship_templates']
name = list(rel_template.keys())[0]
rel_template = RelationshipTemplate(rel_template[name], name)
err = self.assertRaises(exception.MissingRequiredFieldError,
rel_template.validate)
self.assertEqual(expectedmessage, six.text_type(err))
def test_invalid_template_version(self):
tosca_tpl = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"data/test_invalid_template_version.yaml")
self.assertRaises(exception.ValidationError, ToscaTemplate, tosca_tpl)
valid_versions = '", "'.join(ToscaTemplate.VALID_TEMPLATE_VERSIONS)
exception.ExceptionCollector.assertExceptionMessage(
exception.InvalidTemplateVersion,
(_('The template version "tosca_xyz" is invalid. Valid versions '
'are "%s".') % valid_versions))
def test_import_invalid_template_version(self):
tosca_tpl = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"data/test_import_invalid_template_version.yaml")
self.assertRaises(exception.ValidationError, ToscaTemplate, tosca_tpl)
valid_versions = '", "'.join(ToscaTemplate.VALID_TEMPLATE_VERSIONS)
exception.ExceptionCollector.assertExceptionMessage(
exception.InvalidTemplateVersion,
(_('The template version "tosca_simple_yaml_XXX in '
'{\'invalid\': \'custom_types/invalid_template_version.yaml\'}"'
' is invalid. Valid versions are "%s".') % valid_versions))
def test_import_template_metadata(self):
tosca_tpl = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"data/test_import_metadata.yml")
ToscaTemplate(tosca_tpl)
def test_node_template_capabilities_properties(self):
# validating capability property values
tpl_snippet = '''
node_templates:
server:
type: tosca.nodes.WebServer
capabilities:
data_endpoint:
properties:
initiator: test
'''
expectedmessage = _('The value "test" of property "initiator" is '
'not valid. Expected a value from "[source, '
'target, peer]".')
err = self.assertRaises(
exception.ValidationError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
tpl_snippet = '''
node_templates:
server:
type: tosca.nodes.Compute
capabilities:
host:
properties:
disk_size: 10 GB
num_cpus: { get_input: cpus }
mem_size: 4096 MB
os:
properties:
architecture: x86_64
type: Linux
distribution: Fedora
version: 18.0
scalable:
properties:
min_instances: 1
max_instances: 3
default_instances: 5
'''
expectedmessage = _('"properties" of template "server": '
'"default_instances" value is not between '
'"min_instances" and "max_instances".')
err = self.assertRaises(
exception.ValidationError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_objectstorage_without_required_property(self):
tpl_snippet = '''
node_templates:
server:
type: tosca.nodes.ObjectStorage
properties:
maxsize: 1 GB
'''
expectedmessage = _('"properties" of template "server" is missing '
'required field "[\'name\']".')
err = self.assertRaises(
exception.MissingRequiredFieldError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_objectstorage_with_invalid_scalar_unit(self):
tpl_snippet = '''
node_templates:
server:
type: tosca.nodes.ObjectStorage
properties:
name: test
maxsize: -1
'''
expectedmessage = _('"-1" is not a valid scalar-unit.')
err = self.assertRaises(
ValueError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_node_template_objectstorage_with_invalid_scalar_type(self):
tpl_snippet = '''
node_templates:
server:
type: tosca.nodes.ObjectStorage
properties:
name: test
maxsize: 1 XB
'''
expectedmessage = _('"1 XB" is not a valid scalar-unit.')
err = self.assertRaises(
ValueError,
lambda: self._single_node_template_content_test(tpl_snippet))
self.assertEqual(expectedmessage, err.__str__())
def test_special_keywords(self):
"""Test special keywords
Test that special keywords, e.g. metadata, which are not part
of specification do not throw any validation error.
"""
tpl_snippet_metadata_map = '''
node_templates:
server:
type: tosca.nodes.Compute
metadata:
name: server A
role: master
'''
self._single_node_template_content_test(tpl_snippet_metadata_map)
tpl_snippet_metadata_inline = '''
node_templates:
server:
type: tosca.nodes.Compute
metadata: none
'''
self._single_node_template_content_test(tpl_snippet_metadata_inline)
def test_policy_valid_keynames(self):
tpl_snippet = '''
policies:
- servers_placement:
type: tosca.policies.Placement
description: Apply placement policy to servers
metadata: { user1: 1001, user2: 1002 }
targets: [ serv1, serv2 ]
'''
policies = (toscaparser.utils.yamlparser.
simple_parse(tpl_snippet))['policies'][0]
name = list(policies.keys())[0]
Policy(name, policies[name], None, None)
def test_policy_invalid_keyname(self):
tpl_snippet = '''
policies:
- servers_placement:
type: tosca.policies.Placement
testkey: testvalue
'''
policies = (toscaparser.utils.yamlparser.
simple_parse(tpl_snippet))['policies'][0]
name = list(policies.keys())[0]
expectedmessage = _('Policy "servers_placement" contains '
'unknown field "testkey". Refer to the '
'definition to verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: Policy(name, policies[name], None, None))
self.assertEqual(expectedmessage, err.__str__())
def test_policy_trigger_valid_keyname_senlin_resources(self):
tpl_snippet = '''
triggers:
- resize_compute:
description: trigger
event_type: tosca.events.resource.utilization
schedule:
start_time: "2015-05-07T07:00:00Z"
end_time: "2015-06-07T07:00:00Z"
target_filter:
node: master-container
requirement: host
capability: Container
condition:
constraint: { greater_than: 50 }
granularity: 60
evaluations: 1
aggregation_method : mean
action:
resize: # Operation name
inputs:
strategy: LEAST_USED
implementation: Senlin.webhook()
'''
triggers = (toscaparser.utils.yamlparser.
simple_parse(tpl_snippet))['triggers'][0]
name = list(triggers.keys())[0]
Triggers(name, triggers[name])
def test_policy_trigger_valid_keyname_heat_resources(self):
tpl_snippet = '''
triggers:
- high_cpu_usage:
description: trigger
metric: cpu_util
condition:
constraint: utilization greater_than 60%
threshold: 60
granularity: 600
evaluations: 1
aggregation_method: mean
resource_type: instance
comparison_operator: gt
metadata: SG1
action: [SP1]
'''
triggers = (toscaparser.utils.yamlparser.
simple_parse(tpl_snippet))['triggers'][0]
name = list(triggers.keys())[0]
Triggers(name, triggers[name])
def test_policy_trigger_invalid_keyname_senlin_resources(self):
tpl_snippet = '''
triggers:
- resize_compute:
description: trigger
event_type: tosca.events.resource.utilization
schedule:
start_time: "2015-05-07T07:00:00Z"
end_time: "2015-06-07T07:00:00Z"
target_filter1:
node: master-container
requirement: host
capability: Container
condition:
constraint: utilization greater_than 50%
granularity1: 60
evaluations: 1
aggregation_method: mean
resource_type: instance
action:
resize: # Operation name
inputs:
strategy: LEAST_USED
implementation: Senlin.webhook()
'''
triggers = (toscaparser.utils.yamlparser.
simple_parse(tpl_snippet))['triggers'][0]
name = list(triggers.keys())[0]
expectedmessage = _(
'Triggers "resize_compute" contains unknown field '
'"target_filter1". Refer to the definition '
'to verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: Triggers(name, triggers[name]))
self.assertEqual(expectedmessage, err.__str__())
def test_policy_trigger_invalid_keyname_heat_resources(self):
tpl_snippet = '''
triggers:
- high_cpu_usage:
description: trigger
metric: cpu_util
condition:
constraint: utilization greater_than 60%
threshold: 60
granularity: 600
evaluations: 1
aggregation_method: mean
resource_type: instance
comparison_operator: gt
metadata1: SG1
action: [SP1]
'''
triggers = (toscaparser.utils.yamlparser.
simple_parse(tpl_snippet))['triggers'][0]
name = list(triggers.keys())[0]
expectedmessage = _(
'Triggers "high_cpu_usage" contains unknown field '
'"metadata1". Refer to the definition '
'to verify valid values.')
err = self.assertRaises(
exception.UnknownFieldError,
lambda: Triggers(name, triggers[name]))
self.assertEqual(expectedmessage, err.__str__())
def test_policy_missing_required_keyname(self):
tpl_snippet = '''
policies:
- servers_placement:
description: test description
'''
policies = (toscaparser.utils.yamlparser.
simple_parse(tpl_snippet))['policies'][0]
name = list(policies.keys())[0]
expectedmessage = _('Template "servers_placement" is missing '
'required field "type".')
err = self.assertRaises(
exception.MissingRequiredFieldError,
lambda: Policy(name, policies[name], None, None))
self.assertEqual(expectedmessage, err.__str__())
def test_credential_datatype(self):
tosca_tpl = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"data/test_credential_datatype.yaml")
self.assertIsNotNone(ToscaTemplate(tosca_tpl))
def test_invalid_default_value(self):
tpl_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"data/test_invalid_input_defaults.yaml")
self.assertRaises(exception.ValidationError, ToscaTemplate, tpl_path)
exception.ExceptionCollector.assertExceptionMessage(
ValueError, _('"two" is not an integer.'))
def test_invalid_capability(self):
tpl_snippet = '''
node_templates:
server:
type: tosca.nodes.Compute
capabilities:
oss:
properties:
architecture: x86_64
'''
tpl = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet))
err = self.assertRaises(exception.UnknownFieldError,
TopologyTemplate, tpl, None)
expectedmessage = _('"capabilities" of template "server" contains '
'unknown field "oss". Refer to the definition '
'to verify valid values.')
self.assertEqual(expectedmessage, err.__str__())
def test_qualified_name(self):
tpl_snippet_full_name = '''
node_templates:
supported_type:
type: tosca.nodes.Compute
'''
tpl = (
toscaparser.utils.yamlparser.simple_parse(
tpl_snippet_full_name))
TopologyTemplate(tpl, None)
tpl_snippet_short_name = '''
node_templates:
supported_type:
type: Compute
'''
tpl = (
toscaparser.utils.yamlparser.simple_parse(
tpl_snippet_short_name))
TopologyTemplate(tpl, None)
tpl_snippet_qualified_name = '''
node_templates:
supported_type:
type: tosca:Compute
'''
tpl = (
toscaparser.utils.yamlparser.simple_parse(
tpl_snippet_qualified_name))
TopologyTemplate(tpl, None)
def test_requirements_as_list(self):
"""Node template with requirements provided with or without list
Node template requirements are required to be provided as list.
"""
expectedmessage = _('"requirements" of template "my_webserver"'
' must be of type "list".')
# requirements provided as dictionary
tpl_snippet1 = '''
node_templates:
my_webserver:
type: tosca.nodes.WebServer
requirements:
host: server
server:
type: tosca.nodes.Compute
'''
err1 = self.assertRaises(
exception.TypeMismatchError,
lambda: self._single_node_template_content_test(tpl_snippet1))
self.assertEqual(expectedmessage, err1.__str__())
# requirements provided as string
tpl_snippet2 = '''
node_templates:
my_webserver:
type: tosca.nodes.WebServer
requirements: server
server:
type: tosca.nodes.Compute
'''
err2 = self.assertRaises(
exception.TypeMismatchError,
lambda: self._single_node_template_content_test(tpl_snippet2))
self.assertEqual(expectedmessage, err2.__str__())
# requirements provided as list
tpl_snippet3 = '''
node_templates:
my_webserver:
type: tosca.nodes.WebServer
requirements:
- host: server
server:
type: tosca.nodes.Compute
'''
self.assertIsNone(
self._single_node_template_content_test(tpl_snippet3))
def test_properties_override_with_flavor_and_image(self):
tpl_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"data/test_normative_type_properties_override.yaml")
self.assertIsNotNone(ToscaTemplate(tpl_path))
def test_long_rel(self):
tpl_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"data/test_long_rel.yaml")
self.assertIsNotNone(ToscaTemplate(tpl_path))
| 39.742125
| 79
| 0.565633
| 6,553
| 74,437
| 6.142988
| 0.085457
| 0.037262
| 0.020171
| 0.02504
| 0.783555
| 0.748106
| 0.715812
| 0.694274
| 0.663768
| 0.639995
| 0
| 0.008154
| 0.354125
| 74,437
| 1,872
| 80
| 39.763355
| 0.829149
| 0.024759
| 0
| 0.726044
| 0
| 0.001176
| 0.494419
| 0.053242
| 0
| 0
| 0
| 0
| 0.093474
| 1
| 0.045267
| false
| 0.016461
| 0.045855
| 0
| 0.093474
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d4af5adab14d4c8b51729fe20b72eb84fa39b312
| 7,482
|
py
|
Python
|
SiriusCRM/migrations/0014_auto_20190424_1715.py
|
VladimirARodionov/sirius
|
1c901935c65d7d02cbcf1f172b1f0ebd346bea95
|
[
"MIT"
] | 10
|
2018-12-21T13:42:13.000Z
|
2022-02-08T20:27:52.000Z
|
SiriusCRM/migrations/0014_auto_20190424_1715.py
|
VladimirARodionov/sirius
|
1c901935c65d7d02cbcf1f172b1f0ebd346bea95
|
[
"MIT"
] | 14
|
2018-11-23T10:02:14.000Z
|
2022-03-11T23:35:02.000Z
|
SiriusCRM/migrations/0014_auto_20190424_1715.py
|
VladimirARodionov/sirius
|
1c901935c65d7d02cbcf1f172b1f0ebd346bea95
|
[
"MIT"
] | 2
|
2018-11-23T12:29:55.000Z
|
2020-07-01T16:33:06.000Z
|
# Generated by Django 2.1.7 on 2019-04-24 14:15
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('SiriusCRM', '0013_auto_20190424_1655'),
]
operations = [
migrations.AlterField(
model_name='contactcomment',
name='comment',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comment_value', to='SiriusCRM.ZdravnizaComment'),
),
migrations.AlterField(
model_name='contactcomment',
name='contact',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_value', to='SiriusCRM.Contact'),
),
migrations.AlterField(
model_name='contactmessenger',
name='contact',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_messenger_value', to='SiriusCRM.Contact'),
),
migrations.AlterField(
model_name='contactmessenger',
name='messenger',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_messenger_value', to='SiriusCRM.Messenger'),
),
migrations.AlterField(
model_name='contactsocial',
name='contact',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_social', to='SiriusCRM.Contact'),
),
migrations.AlterField(
model_name='contactsocial',
name='social',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_social_value', to='SiriusCRM.Social'),
),
migrations.AlterField(
model_name='coursecurator',
name='course',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='course_curator', to='SiriusCRM.Position'),
),
migrations.AlterField(
model_name='coursecurator',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_course_curator', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='leadmessenger',
name='lead',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lead_messenger_value', to='SiriusCRM.Lead'),
),
migrations.AlterField(
model_name='leadmessenger',
name='messenger',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lead_messenger_value', to='SiriusCRM.Messenger'),
),
migrations.AlterField(
model_name='usercategory',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='category_value', to='SiriusCRM.Category'),
),
migrations.AlterField(
model_name='usercategory',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_category', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='usercompetency',
name='competency',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='competency_value', to='SiriusCRM.Competency'),
),
migrations.AlterField(
model_name='usercompetency',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_competency', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='usercourse',
name='course',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='course_value', to='SiriusCRM.Course'),
),
migrations.AlterField(
model_name='usercourse',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_course', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='usercourseessay',
name='course',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='course_essay', to='SiriusCRM.Course'),
),
migrations.AlterField(
model_name='usercourseessay',
name='essay',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='essay_value', to='SiriusCRM.Essay'),
),
migrations.AlterField(
model_name='usercourseessay',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_course_essay', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='userfaculty',
name='faculty',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='faculty_value', to='SiriusCRM.Faculty'),
),
migrations.AlterField(
model_name='userfaculty',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_faculty', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='useroffline',
name='offline',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='offline_value', to='SiriusCRM.Offline'),
),
migrations.AlterField(
model_name='useroffline',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_offline', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='userposition',
name='position',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='position_value', to='SiriusCRM.Position'),
),
migrations.AlterField(
model_name='userposition',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_position', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='usersocial',
name='social',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='social_value', to='SiriusCRM.Social'),
),
migrations.AlterField(
model_name='usersocial',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_social', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='userunit',
name='unit',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='unit_value', to='SiriusCRM.Unit'),
),
migrations.AlterField(
model_name='userunit',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_unit', to=settings.AUTH_USER_MODEL),
),
]
| 46.47205
| 147
| 0.641807
| 762
| 7,482
| 6.110236
| 0.091864
| 0.053265
| 0.090206
| 0.141753
| 0.875215
| 0.870275
| 0.736254
| 0.685782
| 0.685782
| 0.545747
| 0
| 0.005428
| 0.236701
| 7,482
| 160
| 148
| 46.7625
| 0.809841
| 0.006014
| 0
| 0.694805
| 1
| 0
| 0.176059
| 0.012777
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019481
| 0
| 0.038961
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d4afb22d3c8496a127019968776f3edb121e7af3
| 96
|
py
|
Python
|
v1.3/debugger/structs.py
|
amirgeva/z80pc
|
2daaa319ad7b313abdf0c73fc3faee8d6c36ed3e
|
[
"BSD-2-Clause"
] | null | null | null |
v1.3/debugger/structs.py
|
amirgeva/z80pc
|
2daaa319ad7b313abdf0c73fc3faee8d6c36ed3e
|
[
"BSD-2-Clause"
] | null | null | null |
v1.3/debugger/structs.py
|
amirgeva/z80pc
|
2daaa319ad7b313abdf0c73fc3faee8d6c36ed3e
|
[
"BSD-2-Clause"
] | null | null | null |
from dataclasses import dataclass
@dataclass
class Location:
filename: str
line: int
| 10.666667
| 33
| 0.729167
| 11
| 96
| 6.363636
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.229167
| 96
| 8
| 34
| 12
| 0.945946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
d4b56d2cb1348f7c1364957285b3d720fd7efdfb
| 147
|
py
|
Python
|
ai/lib/__init__.py
|
delwp-species-modelling/frontend
|
70b2ffeac5cd28b4d384123bf894da9361fe41e6
|
[
"MIT"
] | 1
|
2019-04-24T04:22:06.000Z
|
2019-04-24T04:22:06.000Z
|
ai/lib/__init__.py
|
delwp-species-modelling/frontend
|
70b2ffeac5cd28b4d384123bf894da9361fe41e6
|
[
"MIT"
] | 1
|
2019-10-05T06:24:41.000Z
|
2019-10-05T06:24:41.000Z
|
ai/lib/__init__.py
|
delwp-species-modelling/delwp-species-modelling
|
70b2ffeac5cd28b4d384123bf894da9361fe41e6
|
[
"MIT"
] | null | null | null |
"""
lib/__init__.py
FIT3162 - Team 10 - Final Year Computer Science Project
Copyright Luke Silva, Aichi Tsuchihira, Harsil Patel 2019
"""
| 16.333333
| 59
| 0.714286
| 19
| 147
| 5.315789
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08547
| 0.204082
| 147
| 8
| 60
| 18.375
| 0.777778
| 0.884354
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d4c0bcda65477a2d99d54ae2516856ad62185682
| 169
|
py
|
Python
|
donor/admin.py
|
RobBickel/nyt-fec
|
802df867c3b31fff8e922be00bab6f40a5db2d00
|
[
"Apache-2.0"
] | 17
|
2018-03-27T15:09:58.000Z
|
2020-05-13T11:32:43.000Z
|
donor/admin.py
|
RobBickel/nyt-fec
|
802df867c3b31fff8e922be00bab6f40a5db2d00
|
[
"Apache-2.0"
] | 59
|
2018-03-21T17:08:15.000Z
|
2021-12-13T19:47:37.000Z
|
donor/admin.py
|
RobBickel/nyt-fec
|
802df867c3b31fff8e922be00bab6f40a5db2d00
|
[
"Apache-2.0"
] | 11
|
2018-09-11T23:18:32.000Z
|
2021-12-15T08:43:58.000Z
|
from django.contrib import admin
from donor.models import *
class DonorAdmin(admin.ModelAdmin):
search_fields = ['nyt_name']
admin.site.register(Donor, DonorAdmin)
| 24.142857
| 38
| 0.781065
| 22
| 169
| 5.909091
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118343
| 169
| 7
| 38
| 24.142857
| 0.872483
| 0
| 0
| 0
| 0
| 0
| 0.047059
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
d4cd628fb2b5795c7789f0f0c622c41cbca2f003
| 85
|
py
|
Python
|
smsru/apps.py
|
iredun/django-smsru
|
21766e88fcd035115a07ec84b4f1e1cee8fa1271
|
[
"BSD-3-Clause"
] | 2
|
2021-01-20T08:50:04.000Z
|
2021-07-11T10:11:36.000Z
|
smsru/apps.py
|
iredun/django-smsru
|
21766e88fcd035115a07ec84b4f1e1cee8fa1271
|
[
"BSD-3-Clause"
] | null | null | null |
smsru/apps.py
|
iredun/django-smsru
|
21766e88fcd035115a07ec84b4f1e1cee8fa1271
|
[
"BSD-3-Clause"
] | 2
|
2021-07-11T11:07:15.000Z
|
2021-08-21T20:46:41.000Z
|
from django.apps import AppConfig
class SmsruConfig(AppConfig):
name = 'smsru'
| 14.166667
| 33
| 0.741176
| 10
| 85
| 6.3
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 85
| 5
| 34
| 17
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
d4ed03bb066668e03d61d06ab767e60387459ef1
| 63
|
py
|
Python
|
pythonScript.py
|
MywellHC/Automate-the-Boring-Stuff
|
3e0541dffbf57a1c43f7d5c99bdfffb55034fd26
|
[
"MIT"
] | null | null | null |
pythonScript.py
|
MywellHC/Automate-the-Boring-Stuff
|
3e0541dffbf57a1c43f7d5c99bdfffb55034fd26
|
[
"MIT"
] | null | null | null |
pythonScript.py
|
MywellHC/Automate-the-Boring-Stuff
|
3e0541dffbf57a1c43f7d5c99bdfffb55034fd26
|
[
"MIT"
] | null | null | null |
#! python
# pythonScript.py - This a test
print('Hello World')
| 15.75
| 31
| 0.698413
| 9
| 63
| 4.888889
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15873
| 63
| 4
| 32
| 15.75
| 0.830189
| 0.603175
| 0
| 0
| 0
| 0
| 0.458333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
d4edb9711cbe82d5d126a2216cd1d1c44e831517
| 5,050
|
py
|
Python
|
test/test_rules.py
|
RobotNerd/proze-python-converter
|
2695904c8ef8eeed4b9f6ccc2f962daacfdaa620
|
[
"MIT"
] | null | null | null |
test/test_rules.py
|
RobotNerd/proze-python-converter
|
2695904c8ef8eeed4b9f6ccc2f962daacfdaa620
|
[
"MIT"
] | null | null | null |
test/test_rules.py
|
RobotNerd/proze-python-converter
|
2695904c8ef8eeed4b9f6ccc2f962daacfdaa620
|
[
"MIT"
] | null | null | null |
from dotmap import DotMap
from lib.rules import Rules
from lib.state import State
import unittest
class TestNames(unittest.TestCase):
def _create_rules_prose_first_char(self):
"""Create a rules object for inserting characters.
@rtype: lib.rules.Rules
@return: Rules object.
"""
options = DotMap()
options.compile.paragraph.mode = 'prose'
options.compile.paragraph.tabFirst.title = False
options.compile.paragraph.tabFirst.chapter = True
options.compile.paragraph.tabFirst.section = True
return Rules(options)
def test_clean_whitespace(self):
"""Clean up whitespace."""
options = DotMap()
rules = Rules(options)
lines = [
[
' Whitespace is removed from either end of the line. ',
'Whitespace is removed from either end of the line.',
],
[
' Whitespace is removed from either end of the line. ',
'Whitespace is removed from either end of the line.',
],
[
'Multiple spaces are merged into a space.',
'Multiple spaces are merged into a space.',
],
[
'\tTabs are merged\t\tinto single spaces.\t\t',
'Tabs are merged into single spaces.',
],
[
'\tMixed whitespace is \t treated the same.\t\t',
'Mixed whitespace is treated the same.',
],
]
for line in lines:
self.assertEqual(rules.clean_whitespace(line[0]), line[1])
def test_first_character_title(self):
"""Don't insert tab in first paragraph in the story."""
rules = self._create_rules_prose_first_char()
state = State()
state.previous_line.is_blank = True
state.is_first_paragraph = True
state.is_chapter = False
state.is_section = False
self.assertEqual(rules.first_character(state), '')
self.assertEqual(rules.first_character(state, use_spaces=True), '')
rules.options.compile.paragraph.mode = 'justified'
self.assertEqual(rules.first_character(state), '')
self.assertEqual(rules.first_character(state, use_spaces=True), '')
def test_first_character_not_first_paragraph(self):
"""Insert a tab if it isn't the first paragraph."""
rules = self._create_rules_prose_first_char()
state = State()
state.previous_line.is_blank = True
state.is_first_paragraph = False
state.is_chapter = False
state.is_section = False
self.assertEqual(rules.first_character(state), '\t')
self.assertEqual(rules.first_character(state, use_spaces=True), ' ')
rules.options.compile.paragraph.mode = 'justified'
self.assertEqual(rules.first_character(state), '')
self.assertEqual(rules.first_character(state, use_spaces=True), '')
def test_first_character_not_first_line(self):
"""Don't insert a tab if the previous line isn't blank."""
rules = self._create_rules_prose_first_char()
state = State()
state.previous_line.is_blank = False
state.is_first_paragraph = False
state.is_chapter = False
state.is_section = False
self.assertEqual(rules.first_character(state), '')
self.assertEqual(rules.first_character(state, use_spaces=True), '')
rules.options.compile.paragraph.mode = 'justified'
self.assertEqual(rules.first_character(state), '')
self.assertEqual(rules.first_character(state, use_spaces=True), '')
def test_first_character_chapter(self):
"""Insert a tab for first paragraph of a chapter."""
rules = self._create_rules_prose_first_char()
state = State()
state.previous_line.is_blank = True
state.is_first_paragraph = True
state.markup.is_chapter = True
state.markup.is_section = False
self.assertEqual(rules.first_character(state), '\t')
self.assertEqual(rules.first_character(state, use_spaces=True), ' ')
rules.options.compile.paragraph.mode = 'justified'
self.assertEqual(rules.first_character(state), '')
self.assertEqual(rules.first_character(state, use_spaces=True), '')
def test_first_character_section(self):
"""Insert a tab for first paragraph of a section."""
rules = self._create_rules_prose_first_char()
state = State()
state.previous_line.is_blank = True
state.is_first_paragraph = True
state.markup.is_chapter = False
state.markup.is_section = True
self.assertEqual(rules.first_character(state), '\t')
self.assertEqual(rules.first_character(state, use_spaces=True), ' ')
rules.options.compile.paragraph.mode = 'justified'
self.assertEqual(rules.first_character(state), '')
self.assertEqual(rules.first_character(state, use_spaces=True), '')
| 42.436975
| 79
| 0.632475
| 587
| 5,050
| 5.250426
| 0.132879
| 0.113563
| 0.136275
| 0.162232
| 0.711226
| 0.703115
| 0.703115
| 0.6817
| 0.6817
| 0.659637
| 0
| 0.000539
| 0.264554
| 5,050
| 118
| 80
| 42.79661
| 0.829295
| 0.071089
| 0
| 0.636364
| 0
| 0
| 0.114903
| 0
| 0
| 0
| 0
| 0
| 0.212121
| 1
| 0.070707
| false
| 0
| 0.040404
| 0
| 0.131313
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
d4eea3821cd360f61dd0b145756d4384c7e1203f
| 140
|
py
|
Python
|
filters/organisation_url.py
|
digital-land/organisation
|
907f079ff00c4d19175ac1494863d7f09055b45d
|
[
"MIT"
] | null | null | null |
filters/organisation_url.py
|
digital-land/organisation
|
907f079ff00c4d19175ac1494863d7f09055b45d
|
[
"MIT"
] | 1
|
2021-04-14T17:09:53.000Z
|
2021-04-14T17:09:53.000Z
|
filters/organisation_url.py
|
digital-land/organisation
|
907f079ff00c4d19175ac1494863d7f09055b45d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
def organisation_url_filter(org_id):
url_base = "/organisation/"
return url_base + org_id.replace(":", "/")
| 23.333333
| 46
| 0.678571
| 19
| 140
| 4.684211
| 0.684211
| 0.11236
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008403
| 0.15
| 140
| 5
| 47
| 28
| 0.739496
| 0.15
| 0
| 0
| 0
| 0
| 0.135593
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
d4ffda4f2e3b78547b6f4fcce6728fc4260a2e5f
| 188
|
py
|
Python
|
backend/workers/serializers.py
|
starmarek/organize-me-2
|
bd9b73d3e6d9a4ebc4cbb8a20c97729bdc6b1377
|
[
"MIT"
] | 1
|
2021-03-09T20:49:51.000Z
|
2021-03-09T20:49:51.000Z
|
backend/workers/serializers.py
|
starmarek/organize-me-2
|
bd9b73d3e6d9a4ebc4cbb8a20c97729bdc6b1377
|
[
"MIT"
] | 7
|
2021-05-08T11:05:15.000Z
|
2021-05-08T11:12:27.000Z
|
backend/workers/serializers.py
|
starmarek/organize-me-2
|
bd9b73d3e6d9a4ebc4cbb8a20c97729bdc6b1377
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
from .models import Worker
class WorkerSerializer(serializers.ModelSerializer):
class Meta:
model = Worker
fields = "__all__"
| 18.8
| 52
| 0.728723
| 19
| 188
| 6.947368
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.218085
| 188
| 9
| 53
| 20.888889
| 0.897959
| 0
| 0
| 0
| 0
| 0
| 0.037234
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
be00294ab6153c1b9e6ce0b23add74b796e9a97d
| 297
|
py
|
Python
|
test/simple_source/bug26/05-ret-or.py
|
gauravssnl/python-uncompyle6
|
136f42a610c0701e0770c1c278efd1107b1c6ed1
|
[
"MIT"
] | 1
|
2021-03-24T11:54:03.000Z
|
2021-03-24T11:54:03.000Z
|
test/simple_source/bug26/05-ret-or.py
|
gauravssnl/python-uncompyle6
|
136f42a610c0701e0770c1c278efd1107b1c6ed1
|
[
"MIT"
] | null | null | null |
test/simple_source/bug26/05-ret-or.py
|
gauravssnl/python-uncompyle6
|
136f42a610c0701e0770c1c278efd1107b1c6ed1
|
[
"MIT"
] | null | null | null |
# Python 2.6
# In contrast to Python 2.7 there might be no "COME_FROM" so we add rule:
# ret_or ::= expr JUMP_IF_TRUE expr
# where Python 2.7 has
# ret_or ::= expr JUMP_IF_TRUE expr COME_FROM
class BufferedIncrementalEncoder(object):
def getstate(self):
return self.buffer or 0
| 29.7
| 73
| 0.707071
| 51
| 297
| 3.960784
| 0.666667
| 0.10396
| 0.079208
| 0.128713
| 0.227723
| 0.227723
| 0.227723
| 0
| 0
| 0
| 0
| 0.029915
| 0.212121
| 297
| 9
| 74
| 33
| 0.833333
| 0.62963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
076e08b08ad1ddf412bd6c1d4d34ae599abfb8e9
| 81
|
py
|
Python
|
dry/apps.py
|
fchevitarese/routecalc
|
4222d61511885acaad58595689af05eef5ef4218
|
[
"MIT"
] | null | null | null |
dry/apps.py
|
fchevitarese/routecalc
|
4222d61511885acaad58595689af05eef5ef4218
|
[
"MIT"
] | null | null | null |
dry/apps.py
|
fchevitarese/routecalc
|
4222d61511885acaad58595689af05eef5ef4218
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class DryConfig(AppConfig):
name = 'dry'
| 13.5
| 33
| 0.728395
| 10
| 81
| 5.9
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185185
| 81
| 5
| 34
| 16.2
| 0.893939
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
077b41b6ce5df653e05dd24b52f6b16a1e776fb8
| 2,080
|
py
|
Python
|
tests/check_case_conflict_test.py
|
sondrelg/pre-commit-hooks
|
0ba9ced2feeead99a04b9128eab544d3f3b5b1b8
|
[
"MIT"
] | 1
|
2022-02-24T07:16:37.000Z
|
2022-02-24T07:16:37.000Z
|
tests/check_case_conflict_test.py
|
squeaky-pl/pre-commit-hooks
|
1bfdf7f62cf821a3c6876a9ec978301187c334ae
|
[
"MIT"
] | 2
|
2018-06-21T23:29:15.000Z
|
2019-04-12T16:20:05.000Z
|
tests/check_case_conflict_test.py
|
squeaky-pl/pre-commit-hooks
|
1bfdf7f62cf821a3c6876a9ec978301187c334ae
|
[
"MIT"
] | 1
|
2016-05-06T15:27:07.000Z
|
2016-05-06T15:27:07.000Z
|
from __future__ import absolute_import
from __future__ import unicode_literals
from pre_commit_hooks.check_case_conflict import find_conflicting_filenames
from pre_commit_hooks.check_case_conflict import main
from pre_commit_hooks.util import cmd_output
def test_nothing_added(temp_git_dir):
with temp_git_dir.as_cwd():
assert find_conflicting_filenames(['f.py']) == 0
def test_adding_something(temp_git_dir):
with temp_git_dir.as_cwd():
temp_git_dir.join('f.py').write("print('hello world')")
cmd_output('git', 'add', 'f.py')
assert find_conflicting_filenames(['f.py']) == 0
def test_adding_something_with_conflict(temp_git_dir):
with temp_git_dir.as_cwd():
temp_git_dir.join('f.py').write("print('hello world')")
cmd_output('git', 'add', 'f.py')
temp_git_dir.join('F.py').write("print('hello world')")
cmd_output('git', 'add', 'F.py')
assert find_conflicting_filenames(['f.py', 'F.py']) == 1
def test_added_file_not_in_pre_commits_list(temp_git_dir):
with temp_git_dir.as_cwd():
temp_git_dir.join('f.py').write("print('hello world')")
cmd_output('git', 'add', 'f.py')
assert find_conflicting_filenames(['g.py']) == 0
def test_file_conflicts_with_committed_file(temp_git_dir):
with temp_git_dir.as_cwd():
temp_git_dir.join('f.py').write("print('hello world')")
cmd_output('git', 'add', 'f.py')
cmd_output('git', 'commit', '--no-gpg-sign', '-n', '-m', 'Add f.py')
temp_git_dir.join('F.py').write("print('hello world')")
cmd_output('git', 'add', 'F.py')
assert find_conflicting_filenames(['F.py']) == 1
def test_integration(temp_git_dir):
with temp_git_dir.as_cwd():
assert main(argv=[]) == 0
temp_git_dir.join('f.py').write("print('hello world')")
cmd_output('git', 'add', 'f.py')
assert main(argv=['f.py']) == 0
temp_git_dir.join('F.py').write("print('hello world')")
cmd_output('git', 'add', 'F.py')
assert main(argv=['F.py']) == 1
| 32
| 76
| 0.651442
| 318
| 2,080
| 3.918239
| 0.179245
| 0.057785
| 0.160514
| 0.089888
| 0.779294
| 0.764045
| 0.764045
| 0.764045
| 0.698234
| 0.698234
| 0
| 0.004678
| 0.177885
| 2,080
| 64
| 77
| 32.5
| 0.723977
| 0
| 0
| 0.571429
| 0
| 0
| 0.1625
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 1
| 0.142857
| false
| 0
| 0.119048
| 0
| 0.261905
| 0.190476
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
077fa3d3104bc9d89f290a8f0e4f8018dc9a366d
| 674
|
py
|
Python
|
tests/extra/logic_test.py
|
cornell-brg/lizard
|
7f9a78a913e64b5cfdee3a26223539ad225bd6da
|
[
"BSD-3-Clause"
] | 50
|
2019-05-22T08:43:15.000Z
|
2022-03-21T23:58:50.000Z
|
tests/extra/logic_test.py
|
cornell-brg/lizard
|
7f9a78a913e64b5cfdee3a26223539ad225bd6da
|
[
"BSD-3-Clause"
] | 1
|
2019-07-27T18:51:52.000Z
|
2019-08-02T01:20:22.000Z
|
tests/extra/logic_test.py
|
cornell-brg/lizard
|
7f9a78a913e64b5cfdee3a26223539ad225bd6da
|
[
"BSD-3-Clause"
] | 11
|
2019-12-26T06:00:48.000Z
|
2022-03-27T02:29:35.000Z
|
import pytest
from pymtl import *
from tests.context import lizard
from lizard.model.test_model import run_test_state_machine
from lizard.util.rtl.logic import BinaryComparatorInterface, LogicOperatorInterface, Equals, And, Or
from lizard.util.fl.logic import EqualsFL, AndFL, OrFL
def test_state_machine_equals():
run_test_state_machine(
Equals, EqualsFL, (BinaryComparatorInterface(4)), translate_model=True)
def test_state_machine_and():
run_test_state_machine(
And, AndFL, (LogicOperatorInterface(10)), translate_model=True)
def test_state_machine_or():
run_test_state_machine(
Or, OrFL, (LogicOperatorInterface(10)), translate_model=True)
| 30.636364
| 100
| 0.801187
| 88
| 674
| 5.852273
| 0.329545
| 0.12233
| 0.217476
| 0.147573
| 0.271845
| 0.143689
| 0.143689
| 0
| 0
| 0
| 0
| 0.008403
| 0.117211
| 674
| 21
| 101
| 32.095238
| 0.857143
| 0
| 0
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
078b7b6a7d93eafe9cbd62d38deed2bbeb39e173
| 181
|
py
|
Python
|
rlpyt/utils/logging/visualize.py
|
alexsax/testing_rlpyt
|
d8ddf3b1f285c8f0c94f5595922d64177782b3c0
|
[
"MIT"
] | 1
|
2020-05-28T07:42:43.000Z
|
2020-05-28T07:42:43.000Z
|
rlpyt/utils/logging/visualize.py
|
alexsax/testing_rlpyt
|
d8ddf3b1f285c8f0c94f5595922d64177782b3c0
|
[
"MIT"
] | null | null | null |
rlpyt/utils/logging/visualize.py
|
alexsax/testing_rlpyt
|
d8ddf3b1f285c8f0c94f5595922d64177782b3c0
|
[
"MIT"
] | null | null | null |
import imageio
from PIL import Image
import numpy as np
def frames_to_gif(path, frames):
frames = [Image.fromarray(frame) for frame in frames]
imageio.mimsave(path, frames)
| 25.857143
| 57
| 0.756906
| 28
| 181
| 4.821429
| 0.642857
| 0.148148
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.165746
| 181
| 7
| 58
| 25.857143
| 0.89404
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.5
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
07bacae131a65070cc3f858d55c0880ac63688d3
| 27
|
py
|
Python
|
dlmatpower/version.py
|
yasirroni/dlmatpower
|
5fa89e35136bd77e9c3f651549dedf8e0746369a
|
[
"MIT"
] | null | null | null |
dlmatpower/version.py
|
yasirroni/dlmatpower
|
5fa89e35136bd77e9c3f651549dedf8e0746369a
|
[
"MIT"
] | null | null | null |
dlmatpower/version.py
|
yasirroni/dlmatpower
|
5fa89e35136bd77e9c3f651549dedf8e0746369a
|
[
"MIT"
] | null | null | null |
__version__ = "0.0.1.post1"
| 27
| 27
| 0.703704
| 5
| 27
| 3
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 0.074074
| 27
| 1
| 27
| 27
| 0.44
| 0
| 0
| 0
| 0
| 0
| 0.392857
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
07c278d893f6638d9a222786175be02be01d1446
| 24
|
py
|
Python
|
python/codingame/practice/easy/rubgy_score/test/__init__.py
|
TGITS/programming-workouts
|
799e805ccf3fd0936ec8ac2417f7193b8e9bcb55
|
[
"MIT"
] | null | null | null |
python/codingame/practice/easy/rubgy_score/test/__init__.py
|
TGITS/programming-workouts
|
799e805ccf3fd0936ec8ac2417f7193b8e9bcb55
|
[
"MIT"
] | 16
|
2020-05-30T12:38:13.000Z
|
2022-02-19T09:23:31.000Z
|
python/codingame/practice/easy/rubgy_score/test/__init__.py
|
TGITS/programming-workouts
|
799e805ccf3fd0936ec8ac2417f7193b8e9bcb55
|
[
"MIT"
] | null | null | null |
all=["test_rugby_score"]
| 24
| 24
| 0.791667
| 4
| 24
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 24
| 1
| 24
| 24
| 0.708333
| 0
| 0
| 0
| 0
| 0
| 0.64
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
07cbf20feced87b8f3cacc65a1d07ca51956d761
| 436
|
py
|
Python
|
app/auth/forms.py
|
wang-junjian/learn-flask
|
035be466eef655e2b71258d1bd4783d630634cb5
|
[
"MIT"
] | null | null | null |
app/auth/forms.py
|
wang-junjian/learn-flask
|
035be466eef655e2b71258d1bd4783d630634cb5
|
[
"MIT"
] | null | null | null |
app/auth/forms.py
|
wang-junjian/learn-flask
|
035be466eef655e2b71258d1bd4783d630634cb5
|
[
"MIT"
] | null | null | null |
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField
from wtforms.validators import DataRequired, Length, Email
class LoginForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Length(1,64), Email()])
password = PasswordField('Password', validators=[DataRequired()])
remember_me = BooleanField('Keep me logged in')
submit = SubmitField('Log In')
| 43.6
| 84
| 0.761468
| 47
| 436
| 7.021277
| 0.553191
| 0.066667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007895
| 0.12844
| 436
| 9
| 85
| 48.444444
| 0.860526
| 0
| 0
| 0
| 0
| 0
| 0.082569
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.25
| 0.375
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 4
|
5802340f2e76deff72cda41297df9cd7a3bd0619
| 69
|
py
|
Python
|
test/data/keyword_before_parenth_form/py3.py
|
javulticat/flake8-commas
|
32647a9806e4e1d1ad4e6682e8d3229a519de43a
|
[
"MIT"
] | 97
|
2018-01-13T03:13:57.000Z
|
2022-03-28T06:18:33.000Z
|
test/data/keyword_before_parenth_form/py3.py
|
javulticat/flake8-commas
|
32647a9806e4e1d1ad4e6682e8d3229a519de43a
|
[
"MIT"
] | 28
|
2017-01-13T17:04:56.000Z
|
2018-01-03T06:15:56.000Z
|
test/data/keyword_before_parenth_form/py3.py
|
javulticat/flake8-commas
|
32647a9806e4e1d1ad4e6682e8d3229a519de43a
|
[
"MIT"
] | 9
|
2018-03-15T15:01:28.000Z
|
2022-03-01T17:50:09.000Z
|
# Syntax error in Py2
def foo():
yield from (
foo
)
| 9.857143
| 21
| 0.492754
| 9
| 69
| 3.777778
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025
| 0.42029
| 69
| 6
| 22
| 11.5
| 0.825
| 0.275362
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
ed08224e22fb27eb18876595a542dcd3ad14bffc
| 290
|
py
|
Python
|
pypika/clickhouse/condition.py
|
YiuRULE/pypika
|
8a60f0c11664aed17f8f0279882d5097a0649cb3
|
[
"Apache-2.0"
] | 1,616
|
2016-07-11T08:09:07.000Z
|
2022-03-31T05:32:22.000Z
|
pypika/clickhouse/condition.py
|
YiuRULE/pypika
|
8a60f0c11664aed17f8f0279882d5097a0649cb3
|
[
"Apache-2.0"
] | 631
|
2016-07-15T12:24:07.000Z
|
2022-03-29T04:50:22.000Z
|
pypika/clickhouse/condition.py
|
YiuRULE/pypika
|
8a60f0c11664aed17f8f0279882d5097a0649cb3
|
[
"Apache-2.0"
] | 260
|
2016-07-09T07:44:56.000Z
|
2022-03-24T00:45:27.000Z
|
from pypika.terms import Function
class If(Function):
def __init__(self, *conditions, **kwargs):
super().__init__("if", *conditions, **kwargs)
class MultiIf(Function):
def __init__(self, *conditions, **kwargs):
super().__init__("multiIf", *conditions, **kwargs)
| 24.166667
| 58
| 0.665517
| 31
| 290
| 5.709677
| 0.451613
| 0.361582
| 0.169492
| 0.214689
| 0.497175
| 0.497175
| 0.497175
| 0.497175
| 0
| 0
| 0
| 0
| 0.172414
| 290
| 11
| 59
| 26.363636
| 0.7375
| 0
| 0
| 0.285714
| 0
| 0
| 0.031034
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
ed51ac0657bb4b7918a979afe6b54861740675a6
| 6,159
|
py
|
Python
|
sklego/metrics.py
|
maxibor/scikit-lego
|
b0b3095054ebedc5bfb3e5944adaae6cebd97afe
|
[
"MIT"
] | null | null | null |
sklego/metrics.py
|
maxibor/scikit-lego
|
b0b3095054ebedc5bfb3e5944adaae6cebd97afe
|
[
"MIT"
] | null | null | null |
sklego/metrics.py
|
maxibor/scikit-lego
|
b0b3095054ebedc5bfb3e5944adaae6cebd97afe
|
[
"MIT"
] | null | null | null |
import numpy as np
import warnings
def correlation_score(column):
"""
The correlation score can score how well the estimator predictions correlate with a given column.
This is especially useful to use in situations where "fairness" is a theme.
`correlation_score` takes a column on which to calculate the correlation and returns a metric function
Usage:
`correlation_score('gender')(clf, X, y)`
:param column: Name of the column (when X is a dataframe) or the index of the column (when X is a numpy array).
:return:
A function which calculates the negative correlation between estimator.predict(X) and X[colum]
(in gridsearch, larger is better and we want to typically punish correlation).
"""
def correlation_metric(estimator, X, y_true=None):
"""Remember: X is the thing going *in* to your pipeline."""
sensitive_col = X[:, column] if isinstance(X, np.ndarray) else X[column]
return -np.abs(np.corrcoef(estimator.predict(X), sensitive_col)[1, 0])
return correlation_metric
def p_percent_score(sensitive_column, positive_target=1):
r"""
The p_percent score calculates the ratio between the probability of a positive outcome
given the sensitive attribute (column) being true and the same probability given the
sensitive attribute being false.
.. math::
\min \left(\frac{P(\hat{y}=1 | z=1)}{P(\hat{y}=1 | z=0)}, \frac{P(\hat{y}=1 | z=0)}{P(\hat{y}=1 | z=1)}\right)
This is especially useful to use in situations where "fairness" is a theme.
Usage:
`p_percent_score('gender')(clf, X, y)`
source:
- M. Zafar et al. (2017), Fairness Constraints: Mechanisms for Fair Classification
:param sensitive_column:
Name of the column containing the binary sensitive attribute (when X is a dataframe)
or the index of the column (when X is a numpy array).
:param positive_target: The name of the class which is associated with a positive outcome
:return: a function (clf, X, y_true) -> float that calculates the p percent score for z = column
"""
def impl(estimator, X, y_true=None):
"""Remember: X is the thing going *in* to your pipeline."""
sensitive_col = (
X[:, sensitive_column] if isinstance(X, np.ndarray) else X[sensitive_column]
)
if not np.all((sensitive_col == 0) | (sensitive_col == 1)):
raise ValueError(
f"p_percent_score only supports binary indicator columns for `column`. "
f"Found values {np.unique(sensitive_col)}"
)
y_hat = estimator.predict(X)
y_given_z1 = y_hat[sensitive_col == 1]
y_given_z0 = y_hat[sensitive_col == 0]
p_y1_z1 = np.mean(y_given_z1 == positive_target)
p_y1_z0 = np.mean(y_given_z0 == positive_target)
# If we never predict a positive target for one of the subgroups, the model is by definition not
# fair so we return 0
if p_y1_z1 == 0:
warnings.warn(
f"No samples with y_hat == {positive_target} for {sensitive_column} == 1, returning 0",
RuntimeWarning,
)
return 0
if p_y1_z0 == 0:
warnings.warn(
f"No samples with y_hat == {positive_target} for {sensitive_column} == 0, returning 0",
RuntimeWarning,
)
return 0
p_percent = np.minimum(p_y1_z1 / p_y1_z0, p_y1_z0 / p_y1_z1)
return p_percent if not np.isnan(p_percent) else 1
return impl
def equal_opportunity_score(sensitive_column, positive_target=1):
r"""
The equality opportunity score calculates the ratio between the probability of a **true positive** outcome
given the sensitive attribute (column) being true and the same probability given the
sensitive attribute being false.
.. math::
\min \left(\frac{P(\hat{y}=1 | z=1, y=1)}{P(\hat{y}=1 | z=0, y=1)},
\frac{P(\hat{y}=1 | z=0, y=1)}{P(\hat{y}=1 | z=1, y=1)}\right)
This is especially useful to use in situations where "fairness" is a theme.
Usage:
`equal_opportunity_score('gender')(clf, X, y)`
:param sensitive_column:
Name of the column containing the binary sensitive attribute (when X is a dataframe)
or the index of the column (when X is a numpy array).
:param positive_target: The name of the class which is associated with a positive outcome
:return: a function (clf, X, y_true) -> float that calculates the equal opportunity score for z = column
"""
def impl(estimator, X, y_true):
"""Remember: X is the thing going *in* to your pipeline."""
sensitive_col = (
X[:, sensitive_column] if isinstance(X, np.ndarray) else X[sensitive_column]
)
if not np.all((sensitive_col == 0) | (sensitive_col == 1)):
raise ValueError(
f"equal_opportunity_score only supports binary indicator columns for `column`. "
f"Found values {np.unique(sensitive_col)}"
)
y_hat = estimator.predict(X)
y_given_z1_y1 = y_hat[(sensitive_col == 1) & (y_true == positive_target)]
y_given_z0_y1 = y_hat[(sensitive_col == 0) & (y_true == positive_target)]
# If we never predict a positive target for one of the subgroups, the model is by definition not
# fair so we return 0
if len(y_given_z1_y1) == 0:
warnings.warn(
f"No samples with y_hat == {positive_target} for {sensitive_column} == 1, returning 0",
RuntimeWarning,
)
return 0
if len(y_given_z0_y1) == 0:
warnings.warn(
f"No samples with y_hat == {positive_target} for {sensitive_column} == 0, returning 0",
RuntimeWarning,
)
return 0
p_y1_z1 = np.mean(y_given_z1_y1 == positive_target)
p_y1_z0 = np.mean(y_given_z0_y1 == positive_target)
score = np.minimum(p_y1_z1 / p_y1_z0, p_y1_z0 / p_y1_z1)
return score if not np.isnan(score) else 1
return impl
| 39.735484
| 118
| 0.634681
| 899
| 6,159
| 4.201335
| 0.169077
| 0.059306
| 0.01059
| 0.012709
| 0.776277
| 0.758009
| 0.732062
| 0.722796
| 0.6852
| 0.650252
| 0
| 0.022257
| 0.270498
| 6,159
| 154
| 119
| 39.993506
| 0.818384
| 0.455756
| 0
| 0.434783
| 0
| 0
| 0.17662
| 0.023825
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0
| 0.028986
| 0
| 0.26087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
ed5859d22b70ba0355e359a74971f99270963220
| 674
|
py
|
Python
|
tests/conftest.py
|
sweetpay/sweetpay-python
|
d7614197740d167386c37fb540bb7be138ca3a54
|
[
"Apache-2.0"
] | null | null | null |
tests/conftest.py
|
sweetpay/sweetpay-python
|
d7614197740d167386c37fb540bb7be138ca3a54
|
[
"Apache-2.0"
] | null | null | null |
tests/conftest.py
|
sweetpay/sweetpay-python
|
d7614197740d167386c37fb540bb7be138ca3a54
|
[
"Apache-2.0"
] | null | null | null |
"""
Define the configuration of all testing.
"""
import pytest
from sweetpay import Client
@pytest.fixture()
def creditcheck_version():
return 2
@pytest.fixture()
def subscription_version():
return 1
@pytest.fixture()
def checkout_session_version():
return 1
@pytest.fixture()
def client(subscription_version, creditcheck_version, checkout_session_version):
# TODO: Create merchant and token for testing
return Client(
"NNq7Rcnb8y8jGTsU", test=True, version={
"subscription": subscription_version,
"creditcheck": creditcheck_version,
"checkout_session": checkout_session_version
}, timeout=4)
| 20.424242
| 80
| 0.706231
| 71
| 674
| 6.521127
| 0.450704
| 0.112311
| 0.138229
| 0.086393
| 0.12959
| 0.12959
| 0
| 0
| 0
| 0
| 0
| 0.013011
| 0.20178
| 674
| 32
| 81
| 21.0625
| 0.847584
| 0.126113
| 0
| 0.315789
| 0
| 0
| 0.094664
| 0
| 0
| 0
| 0
| 0.03125
| 0
| 1
| 0.210526
| false
| 0
| 0.105263
| 0.210526
| 0.526316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
ed84ea71f54d49b3811e81eabc623fd23df2e9f9
| 4,235
|
py
|
Python
|
ZUC_DDT_LAT/LAT.py
|
LinusRobot/hash_extender
|
2f558a01981abee19f5918ac1fe12c3a1dc78c96
|
[
"BSD-3-Clause"
] | 1
|
2019-11-07T11:19:46.000Z
|
2019-11-07T11:19:46.000Z
|
ZUC_DDT_LAT/LAT.py
|
LinusRobot/hash_extender
|
2f558a01981abee19f5918ac1fe12c3a1dc78c96
|
[
"BSD-3-Clause"
] | null | null | null |
ZUC_DDT_LAT/LAT.py
|
LinusRobot/hash_extender
|
2f558a01981abee19f5918ac1fe12c3a1dc78c96
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy
def print_table(table, path):
f = open(path, 'w')
for row in range(len(table)):
for col in range(len(table[row])):
f.write(str(table[row][col]))
f.write(' ')
print(table[row][col], end=' ')
if col == len(table[row])-1:
print("\n")
f.write('\n')
S0_Box = (
(0x3e,0x72,0x5b,0x47,0xca,0xe0,0x00,0x33,0x04,0xd1,0x54,0x98,0x09,0xb9,0x6d,0xcb),
(0x7b,0x1b,0xf9,0x32,0xaf,0x9d,0x6a,0xa5,0xb8,0x2d,0xfc,0x1d,0x08,0x53,0x03,0x90),
(0x4d,0x4e,0x84,0x99,0xe4,0xce,0xd9,0x91,0xdd,0xb6,0x85,0x48,0x8b,0x29,0x6e,0xac),
(0xcd,0xc1,0xf8,0x1e,0x73,0x43,0x69,0xc6,0xb5,0xbd,0xfd,0x39,0x63,0x20,0xd4,0x38),
(0x76,0x7d,0xb2,0xa7,0xcf,0xed,0x57,0xc5,0xf3,0x2c,0xbb,0x14,0x21,0x06,0x55,0x9b),
(0xe3,0xef,0x5e,0x31,0x4f,0x7f,0x5a,0xa4,0x0d,0x82,0x51,0x49,0x5f,0xba,0x58,0x1c),
(0x4a,0x16,0xd5,0x17,0xa8,0x92,0x24,0x1f,0x8c,0xff,0xd8,0xae,0x2e,0x01,0xd3,0xad),
(0x3b,0x4b,0xda,0x46,0xeb,0xc9,0xde,0x9a,0x8f,0x87,0xd7,0x3a,0x80,0x6f,0x2f,0xc8),
(0xb1,0xb4,0x37,0xf7,0x0a,0x22,0x13,0x28,0x7c,0xcc,0x3c,0x89,0xc7,0xc3,0x96,0x56),
(0x07,0xbf,0x7e,0xf0,0x0b,0x2b,0x97,0x52,0x35,0x41,0x79,0x61,0xa6,0x4c,0x10,0xfe),
(0xbc,0x26,0x95,0x88,0x8a,0xb0,0xa3,0xfb,0xc0,0x18,0x94,0xf2,0xe1,0xe5,0xe9,0x5d),
(0xd0,0xdc,0x11,0x66,0x64,0x5c,0xec,0x59,0x42,0x75,0x12,0xf5,0x74,0x9c,0xaa,0x23),
(0x0e,0x86,0xab,0xbe,0x2a,0x02,0xe7,0x67,0xe6,0x44,0xa2,0x6c,0xc2,0x93,0x9f,0xf1),
(0xf6,0xfa,0x36,0xd2,0x50,0x68,0x9e,0x62,0x71,0x15,0x3d,0xd6,0x40,0xc4,0xe2,0x0f),
(0x8e,0x83,0x77,0x6b,0x25,0x05,0x3f,0x0c,0x30,0xea,0x70,0xb7,0xa1,0xe8,0xa9,0x65),
(0x8d,0x27,0x1a,0xdb,0x81,0xb3,0xa0,0xf4,0x45,0x7a,0x19,0xdf,0xee,0x78,0x34,0x60)
)
S1_Box = (
(0x55,0xc2,0x63,0x71,0x3b,0xc8,0x47,0x86,0x9f,0x3c,0xda,0x5b,0x29,0xaa,0xfd,0x77),
(0x8c,0xc5,0x94,0x0c,0xa6,0x1a,0x13,0x00,0xe3,0xa8,0x16,0x72,0x40,0xf9,0xf8,0x42),
(0x44,0x26,0x68,0x96,0x81,0xd9,0x45,0x3e,0x10,0x76,0xc6,0xa7,0x8b,0x39,0x43,0xe1),
(0x3a,0xb5,0x56,0x2a,0xc0,0x6d,0xb3,0x05,0x22,0x66,0xbf,0xdc,0x0b,0xfa,0x62,0x48),
(0xdd,0x20,0x11,0x06,0x36,0xc9,0xc1,0xcf,0xf6,0x27,0x52,0xbb,0x69,0xf5,0xd4,0x87),
(0x7f,0x84,0x4c,0xd2,0x9c,0x57,0xa4,0xbc,0x4f,0x9a,0xdf,0xfe,0xd6,0x8d,0x7a,0xeb),
(0x2b,0x53,0xd8,0x5c,0xa1,0x14,0x17,0xfb,0x23,0xd5,0x7d,0x30,0x67,0x73,0x08,0x09),
(0xee,0xb7,0x70,0x3f,0x61,0xb2,0x19,0x8e,0x4e,0xe5,0x4b,0x93,0x8f,0x5d,0xdb,0xa9),
(0xad,0xf1,0xae,0x2e,0xcb,0x0d,0xfc,0xf4,0x2d,0x46,0x6e,0x1d,0x97,0xe8,0xd1,0xe9),
(0x4d,0x37,0xa5,0x75,0x5e,0x83,0x9e,0xab,0x82,0x9d,0xb9,0x1c,0xe0,0xcd,0x49,0x89),
(0x01,0xb6,0xbd,0x58,0x24,0xa2,0x5f,0x38,0x78,0x99,0x15,0x90,0x50,0xb8,0x95,0xe4),
(0xd0,0x91,0xc7,0xce,0xed,0x0f,0xb4,0x6f,0xa0,0xcc,0xf0,0x02,0x4a,0x79,0xc3,0xde),
(0xa3,0xef,0xea,0x51,0xe6,0x6b,0x18,0xec,0x1b,0x2c,0x80,0xf7,0x74,0xe7,0xff,0x21),
(0x5a,0x6a,0x54,0x1e,0x41,0x31,0x92,0x35,0xc4,0x33,0x07,0x0a,0xba,0x7e,0x0e,0x34),
(0x88,0xb1,0x98,0x7c,0xf3,0x3d,0x60,0x6c,0x7b,0xca,0xd3,0x1f,0x32,0x65,0x04,0x28),
(0x64,0xbe,0x85,0x9b,0x2f,0x59,0x8a,0xd7,0xb0,0x25,0xac,0xaf,0x12,0x03,0xe2,0xf2)
)
def LAT_dot( a , b ):
a = "{0:08b}".format(a)
b = "{0:08b}".format(b)
out = 0
if a[0]=='1' and b[0]=='1':
out = 1
for i in range(1,8):
if a[i]=='1' and b[i]=='1':
out = out^1
else:
out = out ^ 0
return out
LAT0 = numpy.zeros( (256,256) )
LAT0 = LAT0.astype(int)
LAT1 = numpy.zeros( (256,256) )
LAT1 = LAT1.astype(int)
def compute_LAT(s_box, LAT):
DOT = numpy.zeros( (256,256) )
DOT = DOT.astype(int)
sbox_val = []
for p2 in range(256):
col = p2 >> 4
row = p2 & 15
sbox_val.append( s_box[row][col] )
for p1 in range(256):
for p2 in range(256):
DOT[p1][p2] = LAT_dot(p1,p2)
for a in range(256):
for b in range(256):
for i in range(256):
LAT[a][b] += DOT[a][i]^(DOT[b,sbox_val[i]])
LAT[a][b] = 256 - LAT[a][b]
LAT[a][b] = LAT[a][b] - 128
#compute S0 LAT
print('*************************ZUC S0 LAT******************')
compute_LAT(S0_Box, LAT0)
print_table(LAT0, './ZUC_S0_LAT.txt')
print('\n')
#compute S1 LAT
print('*************************ZUC S1 LAT******************')
compute_LAT(S1_Box, LAT1)
print_table(LAT1, './ZUC_S1_LAT.txt')
| 41.930693
| 83
| 0.664463
| 764
| 4,235
| 3.655759
| 0.407068
| 0.022556
| 0.021482
| 0.017186
| 0.016112
| 0.005371
| 0
| 0
| 0
| 0
| 0
| 0.331381
| 0.112869
| 4,235
| 100
| 84
| 42.35
| 0.412031
| 0.007556
| 0
| 0.022989
| 0
| 0
| 0.039314
| 0.02335
| 0
| 1
| 0.487968
| 0
| 0
| 1
| 0.034483
| false
| 0
| 0.011494
| 0
| 0.057471
| 0.091954
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
71fd0d3441d937c5e821ea8333f8284765eca01f
| 149
|
py
|
Python
|
src/events/context_processors.py
|
kc97ble/icrew
|
6a75848b9e9c38dda5b641b54298735d3a749454
|
[
"MIT"
] | null | null | null |
src/events/context_processors.py
|
kc97ble/icrew
|
6a75848b9e9c38dda5b641b54298735d3a749454
|
[
"MIT"
] | 12
|
2019-12-31T13:18:24.000Z
|
2021-09-22T18:21:50.000Z
|
src/events/context_processors.py
|
kc97ble/icrew
|
6a75848b9e9c38dda5b641b54298735d3a749454
|
[
"MIT"
] | null | null | null |
from .models import Announcement
def announcements(request):
return {
'announcements': Announcement.objects.filter(hidden=False)
}
| 18.625
| 66
| 0.711409
| 14
| 149
| 7.571429
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.194631
| 149
| 7
| 67
| 21.285714
| 0.883333
| 0
| 0
| 0
| 0
| 0
| 0.087248
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
9c17020ff59da1b18850f067673893736c967e2c
| 8,143
|
py
|
Python
|
fucks/fucks.py
|
ImRaviTejaG/py-foaas-cli
|
821a9ce76595283348e2660baecb0041000392a1
|
[
"MIT"
] | 5
|
2018-06-02T03:32:43.000Z
|
2018-06-02T09:39:15.000Z
|
fucks/fucks.py
|
IamRaviTejaG-zz/py-foaas-cli
|
91597d9c178bc60d705cc7cc3f67c9dc83ce4d15
|
[
"MIT"
] | 2
|
2018-06-01T20:00:11.000Z
|
2018-06-12T17:55:43.000Z
|
fucks/fucks.py
|
IamRaviTejaG-zz/py-foaas-cli
|
91597d9c178bc60d705cc7cc3f67c9dc83ce4d15
|
[
"MIT"
] | 3
|
2018-06-02T14:50:24.000Z
|
2018-06-10T16:22:32.000Z
|
import requests
from sys import stdout
def makerequest(*args) -> None:
baseUrl = "http://foaas.com/"
url = ""
if (len(args) == 1):
url = baseUrl + args[0]
elif (len(args) == 2):
url = baseUrl + args[0] + '/' + args[1]
elif (len(args) == 3):
url = baseUrl + args[0] + '/' + args[1] + '/' + args[2]
elif (len(args) == 4):
url = baseUrl + args[0] + '/' + args[1] + '/' + args[2] + '/' + args[3]
headers = {'Accept': 'text/plain'}
r = requests.get(url, headers=headers)
s = str(r.content)[2:len(str(r.content))-1]
stdout.write ('\n' + s + '\n')
class Fucks:
def version() -> None:
makerequest("version")
def operations() -> None:
makerequest("operations")
def anyway(company: str, from_name: str) -> None:
makerequest("anyway", company, from_name)
def asshole(from_name: str) -> None:
makerequest("asshole", from_name)
def awesome(from_name: str) -> None:
makerequest("awesome", from_name)
def back(name: str, from_name: str) -> None:
makerequest("back", name, from_name)
def bag(from_name: str) -> None:
makerequest("bag", from_name)
def ballmer(name: str, company: str, from_name: str) -> None:
makerequest("ballmer", name, company, from_name)
def bday(name: str, from_name: str) -> None:
makerequest("bday", name, from_name)
def because(from_name: str) -> None:
makerequest("because", from_name)
def blackadder(name: str, from_name: str) -> None:
makerequest("blackadder", name, from_name)
def bm(name: str, from_name: str) -> None:
makerequest("bm", name, from_name)
def bucket(from_name: str) -> None:
makerequest("bucket", from_name)
def bus(name: str, from_name: str) -> None:
makerequest("bus", name, from_name)
def bye(from_name: str) -> None:
makerequest("bye", from_name)
def caniuse(tool: str, from_name: str) -> None:
makerequest("caniuse", tool, from_name)
def chainsaw(name: str, from_name: str) -> None:
makerequest("chainsaw", name, from_name)
def cocksplat(name: str, from_name: str) -> None:
makerequest("cocksplat", name, from_name)
def cool(from_name: str) -> None:
makerequest("bye", from_name)
def cup(from_name: str) -> None:
makerequest("bye", from_name)
def dalton(name: str, from_name: str) -> None:
makerequest("dalton", name, from_name)
def deraadt(name: str, from_name: str) -> None:
makerequest("deraadt", name, from_name)
def diabetes(from_name: str) -> None:
makerequest("diabetes", from_name)
def donut(name: str, from_name: str) -> None:
makerequest("donut", name, from_name)
def dosomething(do: str, something: str,
from_name: str) -> None:
makerequest("dosomething", do, something, from_name)
def equity(name: str, from_name: str) -> None:
makerequest("equity", name, from_name)
def everyone(from_name: str) -> None:
makerequest("everyone", from_name)
def everything(from_name: str) -> None:
makerequest("everthing", from_name)
def family(from_name: str) -> None:
makerequest("family", from_name)
def fascinating(from_name: str) -> None:
makerequest("fascinating", from_name)
def field(name: str, from_name: str, reference: str) -> None:
makerequest("field", name, from_name, reference)
def flying(from_name: str) -> None:
makerequest("flying", from_name)
def fyyff(from_name: str) -> None:
makerequest("fyyff", from_name)
def gfy(name: str, from_name: str) -> None:
makerequest("gfy", name, from_name)
def give(from_name: str) -> None:
makerequest("give", from_name)
def greed(noun: str, from_name: str) -> None:
makerequest("greed", noun, from_name)
def horse(from_name: str) -> None:
makerequest("horse", from_name)
def immensity(from_name: str) -> None:
makerequest("immensity", from_name)
def ing(name: str, from_name: str) -> None:
makerequest("ing", name, from_name)
def life(from_name: str) -> None:
makerequest("life", from_name)
def keep(name: str, from_name: str) -> None:
makerequest("keep", name, from_name)
def keepcalm(reaction: str, from_name: str) -> None:
makerequest("keepcalm", reaction, from_name)
def king(name: str, from_name: str) -> None:
makerequest("king", name, from_name)
def linus(name: str, from_name: str) -> None:
makerequest("linus", name, from_name)
def look(name: str, from_name: str) -> None:
makerequest("look", name, from_name)
def looking(from_name: str) -> None:
makerequest("looking", from_name)
def madison(name: str, from_name: str) -> None:
makerequest("madison", name, from_name)
def maybe(from_name: str) -> None:
makerequest("maybe", from_name)
def me(from_name: str) -> None:
makerequest("me", from_name)
def mornin(from_name: str) -> None:
makerequest("mornin", from_name)
def no(from_name: str) -> None:
makerequest("no", from_name)
def nugget(name: str, from_name: str) -> None:
makerequest("nugget", name, from_name)
def off(name: str, from_name: str) -> None:
makerequest("off", name, from_name)
def offwith(behavior: str, from_name: str) -> None:
makerequest("off-with", behavior, from_name)
def outside(name: str, from_name: str) -> None:
makerequest("outside", name, from_name)
def particular(thing: str, from_name: str) -> None:
makerequest("particular", thing, from_name)
def pink(from_name: str) -> None:
makerequest("pink", from_name)
def problem(name: str, from_name: str) -> None:
makerequest("problem", name, from_name)
def programmer(from_name: str) -> None:
makerequest("programmer", from_name)
def pulp(name: str, from_name: str) -> None:
makerequest("pulp", name, from_name)
def question(from_name: str) -> None:
makerequest("question", from_name)
def retard(from_name: str) -> None:
makerequest("retard", from_name)
def ridiculous(from_name: str) -> None:
makerequest("ridiculous", from_name)
def rtfm(from_name: str) -> None:
makerequest("rtfm", from_name)
def sake(from_name: str) -> None:
makerequest("sake", from_name)
def shakespeare(name: str, from_name: str) -> None:
makerequest("shakespeare", name, from_name)
def shit(from_name: str) -> None:
makerequest("shit", from_name)
def shutup(name: str, from_name: str) -> None:
makerequest("shutup", name, from_name)
def single(from_name: str) -> None:
makerequest("single", from_name)
def thanks(from_name: str) -> None:
makerequest("thanks", from_name)
def that(from_name: str) -> None:
makerequest("that", from_name)
def think(name: str, from_name: str) -> None:
makerequest("think", name, from_name)
def thinking(name: str, from_name: str) -> None:
makerequest("thinking", name, from_name)
def this(from_name: str) -> None:
makerequest("this", from_name)
def thumbs(name: str, from_name: str) -> None:
makerequest("thumbs", name, from_name)
def too(from_name: str) -> None:
makerequest("too", from_name)
def tucker(from_name: str) -> None:
makerequest("tucker", from_name)
def what(from_name: str) -> None:
makerequest("what", from_name)
def xmas(name: str, from_name: str) -> None:
makerequest("xmas", name, from_name)
def yoda(name: str, from_name: str) -> None:
makerequest("yoda", name, from_name)
def you(name: str, from_name: str) -> None:
makerequest("you", name, from_name)
def zayn(from_name: str) -> None:
makerequest("zayn", from_name)
def zero(from_name: str) -> None:
makerequest("zero", from_name)
| 30.159259
| 79
| 0.612551
| 1,036
| 8,143
| 4.658301
| 0.125483
| 0.268545
| 0.184625
| 0.248653
| 0.512018
| 0.303564
| 0.269374
| 0.035226
| 0.024865
| 0
| 0
| 0.002593
| 0.242294
| 8,143
| 269
| 80
| 30.271375
| 0.779579
| 0
| 0
| 0.016216
| 0
| 0
| 0.064104
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454054
| false
| 0
| 0.010811
| 0
| 0.47027
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
9c21ac7fbe1adcc683bb6ee74a81f64d34ff4949
| 166
|
py
|
Python
|
SCA11H/commands/system/NetworkSecurityType.py
|
ihutano/SCA11H
|
ee8a38ec9e47bc015851c20fa121651d2601cde0
|
[
"MIT"
] | null | null | null |
SCA11H/commands/system/NetworkSecurityType.py
|
ihutano/SCA11H
|
ee8a38ec9e47bc015851c20fa121651d2601cde0
|
[
"MIT"
] | null | null | null |
SCA11H/commands/system/NetworkSecurityType.py
|
ihutano/SCA11H
|
ee8a38ec9e47bc015851c20fa121651d2601cde0
|
[
"MIT"
] | null | null | null |
from enum import Enum
class NetworkSecurityType(Enum):
Open = 'Open'
WEP = 'WEP'
WPA2_PSK = 'WPA2 PSK'
WPA = 'WPA/WPA2 PSK'
Unknown = 'Unknown'
| 16.6
| 32
| 0.614458
| 21
| 166
| 4.809524
| 0.52381
| 0.207921
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024793
| 0.271084
| 166
| 9
| 33
| 18.444444
| 0.809917
| 0
| 0
| 0
| 0
| 0
| 0.204819
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
9c3c6a796116101a4eb33cba9cc6014237ebb54c
| 372
|
py
|
Python
|
reader_usb.py
|
gzapatas/raspberry-p--pico-mrfc522
|
fc14ab125afc7e5341338ad7f0e3dda9e2c54a97
|
[
"MIT"
] | null | null | null |
reader_usb.py
|
gzapatas/raspberry-p--pico-mrfc522
|
fc14ab125afc7e5341338ad7f0e3dda9e2c54a97
|
[
"MIT"
] | null | null | null |
reader_usb.py
|
gzapatas/raspberry-p--pico-mrfc522
|
fc14ab125afc7e5341338ad7f0e3dda9e2c54a97
|
[
"MIT"
] | null | null | null |
import usb_hid
from adafruit_hid.keyboard import Keyboard
from adafruit_hid.keyboard_layout_us import KeyboardLayoutUS
class ReaderUsb:
def __init__(self) -> None:
self.keyboard = Keyboard(usb_hid.devices)
self.keyboard_layout = KeyboardLayoutUS(self.keyboard)
def writeMessage(self, message: str):
self.keyboard_layout.write(message)
| 33.818182
| 62
| 0.755376
| 45
| 372
| 5.977778
| 0.444444
| 0.178439
| 0.111524
| 0.171004
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172043
| 372
| 11
| 63
| 33.818182
| 0.873377
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
9c693a3f9e0097725e69756ec2de726011ee9c4a
| 144
|
py
|
Python
|
dress/misc.py
|
after12am/dress
|
9b8b05d86bdf9fa9407eae70fc47540b3f1a2a25
|
[
"MIT"
] | 1
|
2015-03-12T23:58:43.000Z
|
2015-03-12T23:58:43.000Z
|
dress/misc.py
|
after12am/dress
|
9b8b05d86bdf9fa9407eae70fc47540b3f1a2a25
|
[
"MIT"
] | 7
|
2015-02-21T14:36:45.000Z
|
2015-03-16T15:32:42.000Z
|
dress/misc.py
|
after12am/ddd
|
9b8b05d86bdf9fa9407eae70fc47540b3f1a2a25
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
# convert dictionary to object
class dict2obj(object):
def __init__(self, dictionary):
self.__dict__ = dictionary
| 24
| 35
| 0.715278
| 17
| 144
| 5.588235
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017241
| 0.194444
| 144
| 6
| 36
| 24
| 0.801724
| 0.305556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
9c813cdb0edb94e66cd806450b453b35f2d2586e
| 312
|
py
|
Python
|
webauthn/__init__.py
|
andreasrs/py_webauthn
|
18c837594059bdcca354e3154231ebd468d74b41
|
[
"BSD-3-Clause"
] | 3
|
2019-03-18T02:21:10.000Z
|
2019-04-19T05:25:11.000Z
|
webauthn/__init__.py
|
jrmann100/py_webauthn
|
8944b7aa655b9eabdaa85f90be521dd02918665d
|
[
"BSD-3-Clause"
] | null | null | null |
webauthn/__init__.py
|
jrmann100/py_webauthn
|
8944b7aa655b9eabdaa85f90be521dd02918665d
|
[
"BSD-3-Clause"
] | null | null | null |
# flake8: noqa
from .webauthn import WebAuthnAssertionOptions
from .webauthn import WebAuthnAssertionResponse
from .webauthn import WebAuthnCredential
from .webauthn import WebAuthnMakeCredentialOptions
from .webauthn import WebAuthnRegistrationResponse
from .webauthn import WebAuthnUser
__version__ = '0.4.6'
| 31.2
| 51
| 0.858974
| 30
| 312
| 8.8
| 0.5
| 0.272727
| 0.409091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014235
| 0.099359
| 312
| 9
| 52
| 34.666667
| 0.925267
| 0.038462
| 0
| 0
| 0
| 0
| 0.016779
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 1
| 0
| false
| 0
| 0.857143
| 0
| 0.857143
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
92c1f822c98bd6b8c3042f219ba26d26909a9d48
| 544
|
py
|
Python
|
autogp/losses/loss.py
|
Alwaysproblem/AutoGP
|
a1a324246ac0f053e367054e34e956a4af063f65
|
[
"Apache-2.0"
] | 1
|
2019-01-22T00:41:17.000Z
|
2019-01-22T00:41:17.000Z
|
autogp/losses/loss.py
|
Alwaysproblem/AutoGP
|
a1a324246ac0f053e367054e34e956a4af063f65
|
[
"Apache-2.0"
] | null | null | null |
autogp/losses/loss.py
|
Alwaysproblem/AutoGP
|
a1a324246ac0f053e367054e34e956a4af063f65
|
[
"Apache-2.0"
] | null | null | null |
class Loss(object):
def __init__(self, dout):
self.dout = dout
def eval(self, _ytrue, _ypred):
"""
Subclass should implement log p(Y | F)
:param output: (batch_size x Dout) matrix containing true outputs
:param latent_val: (MC x batch_size x Q) matrix
of latent function values, usually Q=F
:return:
"""
raise NotImplementedError("Subclass should implement this.")
def get_name(self):
raise NotImplementedError("Subclass should implement this.")
| 30.222222
| 74
| 0.626838
| 66
| 544
| 5.015152
| 0.606061
| 0.126888
| 0.208459
| 0.229607
| 0.308157
| 0.308157
| 0
| 0
| 0
| 0
| 0
| 0
| 0.284926
| 544
| 17
| 75
| 32
| 0.8509
| 0.373162
| 0
| 0.285714
| 0
| 0
| 0.217544
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
92c67735e8715ce2339e2c9080d1b740f8529deb
| 487
|
py
|
Python
|
mongomantic/core/errors.py
|
techie-gg/Mongomantic
|
e404db312a3c99f0fc8d4af73d13a083653ccc2f
|
[
"MIT"
] | 18
|
2021-04-11T11:43:44.000Z
|
2022-01-02T15:55:52.000Z
|
mongomantic/core/errors.py
|
techie-gg/Mongomantic
|
e404db312a3c99f0fc8d4af73d13a083653ccc2f
|
[
"MIT"
] | 65
|
2021-04-12T16:34:46.000Z
|
2022-01-20T00:20:25.000Z
|
mongomantic/core/errors.py
|
techie-gg/Mongomantic
|
e404db312a3c99f0fc8d4af73d13a083653ccc2f
|
[
"MIT"
] | 2
|
2021-11-23T01:05:57.000Z
|
2021-12-23T06:26:29.000Z
|
__all__ = [
"WriteError",
"InvalidQueryError",
"DoesNotExistError",
"MultipleObjectsReturnedError",
"FieldDoesNotExistError",
]
class WriteError(Exception):
pass
class InvalidQueryError(Exception):
pass
class DoesNotExistError(Exception):
pass
class MultipleObjectsReturnedError(Exception):
pass
class FieldDoesNotExistError(Exception):
pass
class IndexCreationError(Exception):
pass
class DuplicateKeyError(Exception):
pass
| 13.527778
| 46
| 0.731006
| 34
| 487
| 10.352941
| 0.323529
| 0.258523
| 0.306818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.188912
| 487
| 35
| 47
| 13.914286
| 0.891139
| 0
| 0
| 0.333333
| 0
| 0
| 0.193018
| 0.102669
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.333333
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
92c7052e709fcea008bc53868f88fe233d1db4e5
| 665
|
py
|
Python
|
wt-app/apps/wt_languages/urls.py
|
j2labs/wikitrans
|
da2ad8ec3dd91123b6c4f26b656b373183438ee1
|
[
"Apache-2.0"
] | 6
|
2015-06-07T14:07:47.000Z
|
2020-04-25T12:14:39.000Z
|
wt-app/apps/wt_languages/urls.py
|
NickRuiz/wikitrans
|
c40dc599251ea5c0778abe9df7f6ce1e340dcd9e
|
[
"Apache-2.0"
] | null | null | null |
wt-app/apps/wt_languages/urls.py
|
NickRuiz/wikitrans
|
c40dc599251ea5c0778abe9df7f6ce1e340dcd9e
|
[
"Apache-2.0"
] | 3
|
2016-07-13T12:11:30.000Z
|
2021-05-05T23:54:45.000Z
|
from django.conf.urls.defaults import *
from wt_languages import views, models
from wt_languages.forms import *
urlpatterns = patterns('',
# your language competancies
url(r'^$', 'wt_languages.views.language_competancy_list', name="language_competancy_list"),
# new competancy
url(r'^new/$', 'wt_languages.views.language_competancy_new', name='language_competancy_new'),
# edit competancy
url(r'^edit/(\d+)/$', 'wt_languages.views.language_competancy_edit', name='language_competancy_edit'),
#destory competancy
url(r'^destroy/(\d+)/$', 'wt_languages.views.language_competancy_destroy', name='language_competancy_destroy'),
)
| 31.666667
| 115
| 0.738346
| 81
| 665
| 5.790123
| 0.320988
| 0.307036
| 0.136461
| 0.204691
| 0.294243
| 0.149254
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120301
| 665
| 20
| 116
| 33.25
| 0.801709
| 0.112782
| 0
| 0
| 0
| 0
| 0.528205
| 0.464957
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
92ca413d0091d66d8d6f2a0df57a38ca3e9eaf20
| 6,081
|
py
|
Python
|
ENV/lib/python2.7/site-packages/phonenumbers/data/region_DE.py
|
jupitercl/meatme-oscar-dev
|
40583cbb3c6762640a403956e41bffac0bb2ad48
|
[
"BSD-3-Clause"
] | null | null | null |
ENV/lib/python2.7/site-packages/phonenumbers/data/region_DE.py
|
jupitercl/meatme-oscar-dev
|
40583cbb3c6762640a403956e41bffac0bb2ad48
|
[
"BSD-3-Clause"
] | null | null | null |
ENV/lib/python2.7/site-packages/phonenumbers/data/region_DE.py
|
jupitercl/meatme-oscar-dev
|
40583cbb3c6762640a403956e41bffac0bb2ad48
|
[
"BSD-3-Clause"
] | null | null | null |
"""Auto-generated file, do not edit by hand. DE metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_DE = PhoneMetadata(id='DE', country_code=49, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='[1-35-9]\\d{3,14}|4(?:[0-8]\\d{4,12}|9(?:[0-37]\\d|4(?:[1-35-8]|4\\d?)|5\\d{1,2}|6[1-8]\\d?)\\d{2,8})', possible_number_pattern='\\d{2,15}', possible_length=(4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15), possible_length_local_only=(3, 4)),
fixed_line=PhoneNumberDesc(national_number_pattern='[246]\\d{5,13}|3(?:0\\d{3,13}|2\\d{9}|[3-9]\\d{4,13})|5(?:0[2-8]|[1256]\\d|[38][0-8]|4\\d{0,2}|[79][0-7])\\d{3,11}|7(?:0[2-8]|[1-9]\\d)\\d{3,10}|8(?:0[2-9]|[1-9]\\d)\\d{3,10}|9(?:0[6-9]\\d{3,10}|1\\d{4,12}|[2-9]\\d{4,11})', example_number='30123456', possible_length=(5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15), possible_length_local_only=(3, 4)),
mobile=PhoneNumberDesc(national_number_pattern='1(?:5[0-25-9]\\d{8}|6[023]\\d{7,8}|7(?:[0-57-9]\\d?|6\\d)\\d{7})', possible_number_pattern='\\d{10,11}', example_number='15123456789', possible_length=(10, 11)),
toll_free=PhoneNumberDesc(national_number_pattern='800\\d{7,12}', possible_number_pattern='\\d{10,15}', example_number='8001234567890', possible_length=(10, 11, 12, 13, 14, 15)),
premium_rate=PhoneNumberDesc(national_number_pattern='137[7-9]\\d{6}|900(?:[135]\\d{6}|9\\d{7})', possible_number_pattern='\\d{10,11}', example_number='9001234567', possible_length=(10, 11)),
shared_cost=PhoneNumberDesc(national_number_pattern='1(?:3(?:7[1-6]\\d{6}|8\\d{4})|80\\d{5,11})', possible_number_pattern='\\d{7,14}', example_number='18012345', possible_length=(7, 8, 9, 10, 11, 12, 13, 14)),
personal_number=PhoneNumberDesc(national_number_pattern='700\\d{8}', possible_number_pattern='\\d{11}', example_number='70012345678', possible_length=(11,)),
voip=PhoneNumberDesc(),
pager=PhoneNumberDesc(national_number_pattern='16(?:4\\d{1,10}|[89]\\d{1,11})', possible_number_pattern='\\d{4,14}', example_number='16412345', possible_length=(4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14)),
uan=PhoneNumberDesc(national_number_pattern='18(?:1\\d{5,11}|[2-9]\\d{8})', possible_number_pattern='\\d{8,14}', example_number='18500123456', possible_length=(8, 9, 10, 11, 12, 13, 14)),
voicemail=PhoneNumberDesc(national_number_pattern='1(?:5(?:(?:2\\d55|7\\d99|9\\d33)\\d{7}|(?:[034568]00|113)\\d{8})|6(?:013|255|399)\\d{7,8}|7(?:[015]13|[234]55|[69]33|[78]99)\\d{7,8})', possible_number_pattern='\\d{12,13}', example_number='177991234567', possible_length=(12, 13)),
no_international_dialling=PhoneNumberDesc(),
national_prefix='0',
national_prefix_for_parsing='0',
number_format=[NumberFormat(pattern='(1\\d{2})(\\d{7,8})', format='\\1 \\2', leading_digits_pattern=['1[67]'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(15\\d{3})(\\d{6})', format='\\1 \\2', leading_digits_pattern=['15[0568]'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(1\\d{3})(\\d{7})', format='\\1 \\2', leading_digits_pattern=['15'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(\\d{2})(\\d{3,11})', format='\\1 \\2', leading_digits_pattern=['3[02]|40|[68]9'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(\\d{3})(\\d{3,11})', format='\\1 \\2', leading_digits_pattern=['2(?:\\d1|0[2389]|1[24]|28|34)|3(?:[3-9][15]|40)|[4-8][1-9]1|9(?:06|[1-9]1)'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(\\d{4})(\\d{2,11})', format='\\1 \\2', leading_digits_pattern=['[24-6]|[7-9](?:\\d[1-9]|[1-9]\\d)|3(?:[3569][02-46-9]|4[2-4679]|7[2-467]|8[2-46-8])', '[24-6]|[7-9](?:\\d[1-9]|[1-9]\\d)|3(?:3(?:0[1-467]|2[127-9]|3[124578]|[46][1246]|7[1257-9]|8[1256]|9[145])|4(?:2[135]|3[1357]|4[13578]|6[1246]|7[1356]|9[1346])|5(?:0[14]|2[1-3589]|3[1357]|4[1246]|6[1-4]|7[1346]|8[13568]|9[1246])|6(?:0[356]|2[1-489]|3[124-6]|4[1347]|6[13]|7[12579]|8[1-356]|9[135])|7(?:2[1-7]|3[1357]|4[145]|6[1-5]|7[1-4])|8(?:21|3[1468]|4[1347]|6[0135-9]|7[1467]|8[136])|9(?:0[12479]|2[1358]|3[1357]|4[134679]|6[1-9]|7[136]|8[147]|9[1468]))'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(3\\d{4})(\\d{1,10})', format='\\1 \\2', leading_digits_pattern=['3'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(800)(\\d{7,12})', format='\\1 \\2', leading_digits_pattern=['800'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(\\d{3})(\\d)(\\d{4,10})', format='\\1 \\2 \\3', leading_digits_pattern=['(?:18|90)0|137', '1(?:37|80)|900[1359]'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(1\\d{2})(\\d{5,11})', format='\\1 \\2', leading_digits_pattern=['181'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(18\\d{3})(\\d{6})', format='\\1 \\2', leading_digits_pattern=['185', '1850', '18500'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(18\\d{2})(\\d{7})', format='\\1 \\2', leading_digits_pattern=['18[68]'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(18\\d)(\\d{8})', format='\\1 \\2', leading_digits_pattern=['18[2-579]'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(700)(\\d{4})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['700'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(138)(\\d{4})', format='\\1 \\2', leading_digits_pattern=['138'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(15[013-68])(\\d{2})(\\d{8})', format='\\1 \\2 \\3', leading_digits_pattern=['15[013-68]'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(15[279]\\d)(\\d{2})(\\d{7})', format='\\1 \\2 \\3', leading_digits_pattern=['15[279]'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(1[67]\\d)(\\d{2})(\\d{7,8})', format='\\1 \\2 \\3', leading_digits_pattern=['1(?:6[023]|7)'], national_prefix_formatting_rule='0\\1')],
mobile_number_portable_region=True)
| 160.026316
| 683
| 0.635422
| 1,059
| 6,081
| 3.485364
| 0.157696
| 0.07586
| 0.039014
| 0.136548
| 0.571661
| 0.511785
| 0.441886
| 0.381739
| 0.279057
| 0.178813
| 0
| 0.183504
| 0.068903
| 6,081
| 37
| 684
| 164.351351
| 0.468386
| 0.008716
| 0
| 0
| 1
| 0.257143
| 0.363168
| 0.232979
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.028571
| 0
| 0.028571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
92d6edcf983eda310d0b6fbe130f53bc91392487
| 60
|
py
|
Python
|
AlgorithmTest/BOJ_STEP_PYTHON/Step1/BOJ2557.py
|
bluesky0960/AlgorithmTest
|
35e6c01b1c25bf13d4c034c047f3dd3b67f1578e
|
[
"MIT"
] | null | null | null |
AlgorithmTest/BOJ_STEP_PYTHON/Step1/BOJ2557.py
|
bluesky0960/AlgorithmTest
|
35e6c01b1c25bf13d4c034c047f3dd3b67f1578e
|
[
"MIT"
] | null | null | null |
AlgorithmTest/BOJ_STEP_PYTHON/Step1/BOJ2557.py
|
bluesky0960/AlgorithmTest
|
35e6c01b1c25bf13d4c034c047f3dd3b67f1578e
|
[
"MIT"
] | null | null | null |
#https://www.acmicpc.net/problem/2557
print("Hello World!")
| 20
| 37
| 0.733333
| 9
| 60
| 4.888889
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070175
| 0.05
| 60
| 3
| 38
| 20
| 0.701754
| 0.6
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
92d7bdf21cc6a851a44f1444243c52f70cd698b4
| 153
|
py
|
Python
|
python/testData/inspections/AddSelfToClassmethod.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/inspections/AddSelfToClassmethod.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/inspections/AddSelfToClassmethod.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
class A:
def foo(self):
pass
@classmethod
def pop(cls):
print <error descr="Unresolved reference 'foo'">fo<caret>o</error>()
| 21.857143
| 76
| 0.588235
| 20
| 153
| 4.5
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.267974
| 153
| 7
| 76
| 21.857143
| 0.803571
| 0
| 0
| 0
| 0
| 0
| 0.168831
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.166667
| 0
| null | null | 0.166667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
1346eaad4bec9869620400317feb811d3bf119d2
| 140
|
py
|
Python
|
reddit2telegram/channels/r_channels_tifu/app.py
|
mainyordle/reddit2telegram
|
1163e15aed3b6ff0fba65b222d3d9798f644c386
|
[
"MIT"
] | 187
|
2016-09-20T09:15:54.000Z
|
2022-03-29T12:22:33.000Z
|
reddit2telegram/channels/r_channels_tifu/app.py
|
mainyordle/reddit2telegram
|
1163e15aed3b6ff0fba65b222d3d9798f644c386
|
[
"MIT"
] | 84
|
2016-09-22T14:25:07.000Z
|
2022-03-19T01:26:17.000Z
|
reddit2telegram/channels/r_channels_tifu/app.py
|
mainyordle/reddit2telegram
|
1163e15aed3b6ff0fba65b222d3d9798f644c386
|
[
"MIT"
] | 172
|
2016-09-21T15:39:39.000Z
|
2022-03-16T15:15:58.000Z
|
#encoding:utf-8
subreddit = 'tifu'
t_channel = '@r_channels_tifu'
def send_post(submission, r2t):
return r2t.send_simple(submission)
| 15.555556
| 38
| 0.742857
| 20
| 140
| 4.95
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024793
| 0.135714
| 140
| 8
| 39
| 17.5
| 0.793388
| 0.1
| 0
| 0
| 0
| 0
| 0.16
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
135269da084e13ab04530acd793efb0e153b12ae
| 150
|
py
|
Python
|
Lesson07/dictionary_masher.py
|
PacktPublishing/Python-Fundamentals
|
f24569826b1b7f97e3d54630a34ae61110ca12da
|
[
"MIT"
] | 1
|
2021-04-23T14:01:56.000Z
|
2021-04-23T14:01:56.000Z
|
Lesson07/dictionary_masher.py
|
PacktPublishing/Python-Fundamentals
|
f24569826b1b7f97e3d54630a34ae61110ca12da
|
[
"MIT"
] | null | null | null |
Lesson07/dictionary_masher.py
|
PacktPublishing/Python-Fundamentals
|
f24569826b1b7f97e3d54630a34ae61110ca12da
|
[
"MIT"
] | 4
|
2021-06-29T05:57:44.000Z
|
2021-09-02T10:14:55.000Z
|
def dictionary_masher(dict_a, dict_b):
for key, value in dict_b.items():
if key not in dict_a:
dict_a[key] = value
return dict_a
| 21.428571
| 38
| 0.653333
| 27
| 150
| 3.37037
| 0.518519
| 0.21978
| 0.197802
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.26
| 150
| 7
| 39
| 21.428571
| 0.81982
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
13635ec0f5cdbd0efb84d42ef2ba4490f8a851ce
| 264
|
py
|
Python
|
Arrays/703. Kth Largest Element in a Stream.py
|
thewires2/Leetcode
|
a37ff81d60dd9195ba637b970b40aabbea5f4680
|
[
"Unlicense"
] | 1
|
2021-06-30T17:51:56.000Z
|
2021-06-30T17:51:56.000Z
|
Arrays/703. Kth Largest Element in a Stream.py
|
thewires2/Leetcode
|
a37ff81d60dd9195ba637b970b40aabbea5f4680
|
[
"Unlicense"
] | null | null | null |
Arrays/703. Kth Largest Element in a Stream.py
|
thewires2/Leetcode
|
a37ff81d60dd9195ba637b970b40aabbea5f4680
|
[
"Unlicense"
] | null | null | null |
class Solution:
def thirdMax(self, nums: List[int]) -> int:
if len(set(nums))<3:
return max(nums)
for i in range(2):
for i in range(nums.count(max(nums))):
nums.remove(max(nums))
return max(nums)
| 29.333333
| 50
| 0.518939
| 37
| 264
| 3.702703
| 0.567568
| 0.20438
| 0.189781
| 0.160584
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011561
| 0.344697
| 264
| 8
| 51
| 33
| 0.780347
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
136a42c46154d652066d367e2ad30cb9cbd19e38
| 84
|
py
|
Python
|
django/docs/_theme/djangodocs/genindex.html.py
|
roshanba/mangal
|
f7b428811dc07214009cc33f0beb665ead402038
|
[
"bzip2-1.0.6",
"MIT"
] | null | null | null |
django/docs/_theme/djangodocs/genindex.html.py
|
roshanba/mangal
|
f7b428811dc07214009cc33f0beb665ead402038
|
[
"bzip2-1.0.6",
"MIT"
] | null | null | null |
django/docs/_theme/djangodocs/genindex.html.py
|
roshanba/mangal
|
f7b428811dc07214009cc33f0beb665ead402038
|
[
"bzip2-1.0.6",
"MIT"
] | null | null | null |
BBBBBBB BBBBBBBBBBBBBBBBBBBBB
BBBBB BBBBBBBBBBBBBBBBB
BBBBB BBBBBBBBBBBBBBBBBBBBBB
| 16.8
| 29
| 0.916667
| 6
| 84
| 12.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 84
| 4
| 30
| 21
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
1384d10ff680fb4d4d23438f8b0092f193271fe6
| 119
|
py
|
Python
|
invokefile.py
|
rgbkrk/hexview
|
65755098d3e26b75c27b50b1fe8b8757221376c8
|
[
"Apache-2.0"
] | 2
|
2017-06-05T04:47:42.000Z
|
2021-04-14T05:06:52.000Z
|
invokefile.py
|
rgbkrk/hexview
|
65755098d3e26b75c27b50b1fe8b8757221376c8
|
[
"Apache-2.0"
] | null | null | null |
invokefile.py
|
rgbkrk/hexview
|
65755098d3e26b75c27b50b1fe8b8757221376c8
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from invoke import run, task
@task
def build():
print("Building!")
| 13.222222
| 28
| 0.613445
| 17
| 119
| 4.294118
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010309
| 0.184874
| 119
| 8
| 29
| 14.875
| 0.742268
| 0.352941
| 0
| 0
| 0
| 0
| 0.12
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0.25
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
13b5e04db5f1c3f884d71fc43a000eead973629a
| 130
|
py
|
Python
|
vvpages/apps.py
|
synw/django-vvpages
|
f00f900b56d9363aef86b8515c92d23f2e13af02
|
[
"MIT"
] | 3
|
2017-04-25T10:30:39.000Z
|
2017-08-29T09:47:04.000Z
|
vvpages/apps.py
|
synw/django-vvpages
|
f00f900b56d9363aef86b8515c92d23f2e13af02
|
[
"MIT"
] | null | null | null |
vvpages/apps.py
|
synw/django-vvpages
|
f00f900b56d9363aef86b8515c92d23f2e13af02
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
from django.apps import AppConfig
class VvpagesConfig(AppConfig):
name = 'vvpages'
| 16.25
| 39
| 0.792308
| 15
| 130
| 6.533333
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 130
| 7
| 40
| 18.571429
| 0.890909
| 0
| 0
| 0
| 0
| 0
| 0.053846
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
13be3f8215e8718b889d6b463fb3b072b744d840
| 93
|
py
|
Python
|
movieflix/apps.py
|
shanesaravia/Movieflix2.0
|
3c29be81e634f58495c9779cce332895ffd49d5b
|
[
"MIT"
] | null | null | null |
movieflix/apps.py
|
shanesaravia/Movieflix2.0
|
3c29be81e634f58495c9779cce332895ffd49d5b
|
[
"MIT"
] | 7
|
2020-03-24T15:44:26.000Z
|
2021-06-01T22:10:38.000Z
|
movieflix/apps.py
|
shanesaravia/Movieflix2.0
|
3c29be81e634f58495c9779cce332895ffd49d5b
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class MovieflixConfig(AppConfig):
name = 'movieflix'
| 15.5
| 33
| 0.763441
| 10
| 93
| 7.1
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 93
| 5
| 34
| 18.6
| 0.910256
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
13c7285cf13c79de4a5aafc761ff9ab28193c5cf
| 360
|
py
|
Python
|
digits/utils/errors.py
|
ZeusNightBolt/DIGITS
|
3450cc683143415418af5ecdb1b17b02da3e2c79
|
[
"BSD-3-Clause"
] | 2
|
2017-04-24T10:16:15.000Z
|
2019-02-26T09:36:27.000Z
|
digits/utils/errors.py
|
ZeusNightBolt/DIGITS
|
3450cc683143415418af5ecdb1b17b02da3e2c79
|
[
"BSD-3-Clause"
] | 1
|
2016-08-30T23:48:17.000Z
|
2016-08-30T23:48:17.000Z
|
digits/utils/errors.py
|
ZeusNightBolt/DIGITS
|
3450cc683143415418af5ecdb1b17b02da3e2c79
|
[
"BSD-3-Clause"
] | 3
|
2017-04-24T10:16:15.000Z
|
2019-02-26T09:36:49.000Z
|
# Copyright (c) 2014-2015, NVIDIA CORPORATION. All rights reserved.
class DigitsError(Exception):
"""
DIGITS custom exception
"""
pass
class DeleteError(DigitsError):
"""
Errors that occur when deleting a job
"""
pass
class LoadImageError(DigitsError):
"""
Errors that occur while loading an image
"""
pass
| 17.142857
| 68
| 0.644444
| 38
| 360
| 6.105263
| 0.736842
| 0.077586
| 0.181034
| 0.224138
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030075
| 0.261111
| 360
| 20
| 69
| 18
| 0.842105
| 0.472222
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
13e79fb88fab4c10b97397b6e82fbd49002a2c44
| 1,039
|
py
|
Python
|
database/schemas/user_auth.py
|
DiegoLing33/prestij.xyz-api
|
69a11a2c93dd98975f9becbc4b8f596e4941a05f
|
[
"MIT"
] | 2
|
2020-10-28T14:00:05.000Z
|
2020-10-30T11:55:27.000Z
|
database/schemas/user_auth.py
|
DiegoLing33/prestij.xyz-api
|
69a11a2c93dd98975f9becbc4b8f596e4941a05f
|
[
"MIT"
] | null | null | null |
database/schemas/user_auth.py
|
DiegoLing33/prestij.xyz-api
|
69a11a2c93dd98975f9becbc4b8f596e4941a05f
|
[
"MIT"
] | null | null | null |
# ██╗░░░░░██╗███╗░░██╗░██████╗░░░░██████╗░██╗░░░░░░█████╗░░█████╗░██╗░░██╗
# ██║░░░░░██║████╗░██║██╔════╝░░░░██╔══██╗██║░░░░░██╔══██╗██╔══██╗██║░██╔╝
# ██║░░░░░██║██╔██╗██║██║░░██╗░░░░██████╦╝██║░░░░░███████║██║░░╚═╝█████═╝░
# ██║░░░░░██║██║╚████║██║░░╚██╗░░░██╔══██╗██║░░░░░██╔══██║██║░░██╗██╔═██╗░
# ███████╗██║██║░╚███║╚██████╔╝░░░██████╦╝███████╗██║░░██║╚█████╔╝██║░╚██╗
# ╚══════╝╚═╝╚═╝░░╚══╝░╚═════╝░░░░╚═════╝░╚══════╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝
#
# Developed by Yakov V. Panov (C) Ling • Black 2020
# @site http://ling.black
from datetime import datetime
from pydantic.main import BaseModel
from .user import User
from ..core.schemas import CoreSchema
class UserAuthBase(BaseModel):
"""
The base schema class
"""
pass
class UserAuthLogin(BaseModel):
login: str
password: str
class UserAuth(UserAuthBase, CoreSchema):
id: int
created: datetime
token: str
user_id: int
user: User
meta: str
class Config:
orm_mode = True
arbitrary_types_allowed = True
| 25.341463
| 75
| 0.376323
| 79
| 1,039
| 10.379747
| 0.64557
| 0.019512
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004582
| 0.159769
| 1,039
| 40
| 76
| 25.975
| 0.438717
| 0.521655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.105263
| 0.210526
| 0
| 0.842105
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
b949937c1a203f72b118f3e34eaca47520e5d089
| 110
|
py
|
Python
|
tests/grammpy_test/__init__.py
|
PatrikValkovic/grammpy
|
8308a1fd349bf9ea0d267360cc9a4ab20d1629e8
|
[
"MIT"
] | 1
|
2021-02-04T12:41:08.000Z
|
2021-02-04T12:41:08.000Z
|
tests/grammpy_test/__init__.py
|
PatrikValkovic/grammpy
|
8308a1fd349bf9ea0d267360cc9a4ab20d1629e8
|
[
"MIT"
] | 3
|
2017-07-08T16:28:52.000Z
|
2020-04-23T18:06:24.000Z
|
tests/grammpy_test/__init__.py
|
PatrikValkovic/grammpy
|
8308a1fd349bf9ea0d267360cc9a4ab20d1629e8
|
[
"MIT"
] | 1
|
2021-02-04T12:41:10.000Z
|
2021-02-04T12:41:10.000Z
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 09.04.2018 14:18
:Licence MIT
Part of grammpy
"""
| 12.222222
| 25
| 0.7
| 18
| 110
| 4.277778
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12766
| 0.145455
| 110
| 8
| 26
| 13.75
| 0.691489
| 0.9
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b955fb6150497dd133679a30fa5353f1e3c59b76
| 49
|
py
|
Python
|
pyinitials/__init__.py
|
timf-app-test/pyinitials
|
8464b39332913a26da838b3e0f1c88a002cd0f5f
|
[
"ISC"
] | 1
|
2022-03-24T12:18:00.000Z
|
2022-03-24T12:18:00.000Z
|
pyinitials/__init__.py
|
timf-app-test/pyinitials
|
8464b39332913a26da838b3e0f1c88a002cd0f5f
|
[
"ISC"
] | 1
|
2022-03-01T04:05:04.000Z
|
2022-03-01T04:05:04.000Z
|
pyinitials/__init__.py
|
timf-app-test/pyinitials
|
8464b39332913a26da838b3e0f1c88a002cd0f5f
|
[
"ISC"
] | 1
|
2022-01-24T22:42:25.000Z
|
2022-01-24T22:42:25.000Z
|
__version__ = '0.1.0'
from .pyinitials import *
| 12.25
| 25
| 0.693878
| 7
| 49
| 4.285714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 0.163265
| 49
| 3
| 26
| 16.333333
| 0.658537
| 0
| 0
| 0
| 0
| 0
| 0.102041
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
b9606618bfa08439e1731313840844e1a5577e84
| 31
|
py
|
Python
|
.history/my_classes/variables_memory/optimizations_peephole_20210605141116.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
.history/my_classes/variables_memory/optimizations_peephole_20210605141116.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
.history/my_classes/variables_memory/optimizations_peephole_20210605141116.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
def my_func():
a = 24 * 60
| 10.333333
| 15
| 0.483871
| 6
| 31
| 2.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0.354839
| 31
| 2
| 16
| 15.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b962397f2db13f28beef3878c167b76a5850e1e5
| 15,747
|
py
|
Python
|
tests/services/test_rdm_service.py
|
caltechlibrary/invenio-rdm-records
|
5f35d82a3ed7caec5aa2350d62c26a021edf2d87
|
[
"MIT"
] | null | null | null |
tests/services/test_rdm_service.py
|
caltechlibrary/invenio-rdm-records
|
5f35d82a3ed7caec5aa2350d62c26a021edf2d87
|
[
"MIT"
] | null | null | null |
tests/services/test_rdm_service.py
|
caltechlibrary/invenio-rdm-records
|
5f35d82a3ed7caec5aa2350d62c26a021edf2d87
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2021 Graz University of Technology.
# Copyright (C) 2021 TU Wien.
#
# Invenio-RDM-Records is free software; you can redistribute it
# and/or modify it under the terms of the MIT License; see LICENSE file for
# more details.
"""Service level tests for Invenio RDM Records."""
from collections import namedtuple
import pytest
from flask_babelex import lazy_gettext as _
from invenio_pidstore.errors import PIDDoesNotExistError
from marshmallow import ValidationError
from invenio_rdm_records.proxies import current_rdm_records
from invenio_rdm_records.services.errors import EmbargoNotLiftedError
RunningApp = namedtuple("RunningApp", [
"app", "location", "superuser_identity", "resource_type_v",
"subject_v", "languages_v", "title_type_v"
])
@pytest.fixture
def running_app(
app, location, superuser_identity, resource_type_v, subject_v, languages_v,
title_type_v):
"""This fixture provides an app with the typically needed db data loaded.
All of these fixtures are often needed together, so collecting them
under a semantic umbrella makes sense.
"""
return RunningApp(app, location, superuser_identity,
resource_type_v, subject_v, languages_v, title_type_v)
#
# PIDs
#
def test_resolve_pid(running_app, es_clear, minimal_record):
"""Test the reserve function with client logged in."""
service = current_rdm_records.records_service
superuser_identity = running_app.superuser_identity
# create the draft
draft = service.create(superuser_identity, minimal_record)
# publish the record
record = service.publish(draft.id, superuser_identity)
doi = record.to_dict()["pids"]["doi"]["identifier"]
# test resolution
resolved_record = service.resolve_pid(
id_=doi,
identity=superuser_identity,
pid_type="doi"
)
assert resolved_record.id == record.id
assert resolved_record.to_dict()["pids"]["doi"]["identifier"] == doi
def test_resolve_non_existing_pid(running_app, es_clear, minimal_record):
"""Test the reserve function with client logged in."""
service = current_rdm_records.records_service
superuser_identity = running_app.superuser_identity
# create the draft
draft = service.create(superuser_identity, minimal_record)
# publish the record
service.publish(draft.id, superuser_identity)
# test resolution
fake_doi = "10.1234/client.12345-abdce"
with pytest.raises(PIDDoesNotExistError):
service.resolve_pid(
id_=fake_doi,
identity=superuser_identity,
pid_type="doi"
)
def test_oai_pid_default_created(running_app, es_clear, minimal_record):
superuser_identity = running_app.superuser_identity
service = current_rdm_records.records_service
minimal_record["pids"] = {}
# create the draft
draft = service.create(superuser_identity, minimal_record)
# publish the record
record = service.publish(draft.id, superuser_identity)
published_oai = record.to_dict()["pids"]["oai"]
assert published_oai["identifier"]
assert published_oai["provider"] == "oai"
assert published_oai["client"] == "oai"
def test_pid_creation_default_required(running_app, es_clear, minimal_record):
superuser_identity = running_app.superuser_identity
service = current_rdm_records.records_service
minimal_record["pids"] = {}
# create the draft
draft = service.create(superuser_identity, minimal_record)
# publish the record
record = service.publish(draft.id, superuser_identity)
published_doi = record.to_dict()["pids"]["doi"]
assert published_doi["identifier"]
assert published_doi["provider"] == "datacite" # default
assert published_doi["client"] == "datacite" # default
def test_pid_creation_invalid_format_value_managed(
running_app, es_clear, minimal_record
):
superuser_identity = running_app.superuser_identity
service = current_rdm_records.records_service
# set the pids field
doi = {
"identifier": "loremipsum",
"provider": "datacite",
"client": "datacite"
}
pids = {"doi": doi}
minimal_record["pids"] = pids
# create the draft
# will pass creation since validation is just reported, not hard fail
# but it will be removed (not saved)
draft = service.create(superuser_identity, minimal_record)
assert draft.to_dict()["pids"] == {}
def test_pid_creation_invalid_no_value_managed(
running_app, es_clear, minimal_record
):
# NOTE: This use case is tricky because it will spawn two exceptions
# Because a value is missing and is also invalid. Should consider only
# second case.
# {
# 'field': 'pids.doi.value.identifier',
# 'messages': ['Missing data for required field.']
# }
# {
# 'field': 'pids._schema',
# 'messages': [l'Invalid value for scheme doi']
# }
superuser_identity = running_app.superuser_identity
service = current_rdm_records.records_service
# set the pids field
# no value, to get a value from the system it should not send the pid_type
doi = {
"provider": "datacite",
"client": "datacite"
}
pids = {"doi": doi}
minimal_record["pids"] = pids
# create the draft
# will pass creation since validation is just reported, not hard fail
# but it will be removed (not saved)
draft = service.create(superuser_identity, minimal_record)
assert draft.to_dict()["pids"] == {}
def test_pid_creation_invalid_scheme_managed(
running_app, es_clear, minimal_record
):
superuser_identity = running_app.superuser_identity
service = current_rdm_records.records_service
# set the pids field
lorem = {
"identifier": "10.1234/datacite.12345",
"provider": "datacite",
"client": "datacite"
}
pids = {"lorem": lorem}
minimal_record["pids"] = pids
# create the draft
# check soft validation reported the error
draft = service.create(superuser_identity, minimal_record)
expected_errors = [
{'field': 'pids', 'messages': [_('Invalid value for scheme lorem')]}
]
assert draft.errors == expected_errors
# NOTE: the invalid pid got removed, so if publish it will not be there
assert draft.to_dict()["pids"] == {}
def test_pid_creation_valid_unmanaged(running_app, es_clear, minimal_record):
superuser_identity = running_app.superuser_identity
service = current_rdm_records.records_service
# set the pids field
doi = {
"identifier": "10.1234/datacite.12345",
"provider": "external",
}
pids = {"doi": doi}
minimal_record["pids"] = pids
# create the draft
draft = service.create(superuser_identity, minimal_record)
# publish the record
record = service.publish(draft.id, superuser_identity)
published_doi = record.to_dict()["pids"]["doi"]
assert doi["identifier"] == published_doi["identifier"]
assert doi["provider"] == published_doi["provider"]
def test_pid_creation_invalid_format_unmanaged(
running_app, es_clear, minimal_record
):
superuser_identity = running_app.superuser_identity
service = current_rdm_records.records_service
# set the pids field
doi = {
"identifier": "loremipsum",
"provider": "external",
}
pids = {"doi": doi}
minimal_record["pids"] = pids
# create the draft
# will pass creation since validation is just reported, not hard fail
# but it will be removed (not saved)
draft = service.create(superuser_identity, minimal_record)
assert draft.to_dict()["pids"] == {}
def test_pid_creation_invalid_scheme_unmanaged(
running_app, es_clear, minimal_record
):
superuser_identity = running_app.superuser_identity
service = current_rdm_records.records_service
# set the pids field
lorem = {
"identifier": "10.1234/datacite.12345",
"provider": "external",
}
pids = {"lorem": lorem}
minimal_record["pids"] = pids
# create the draft
draft = service.create(superuser_identity, minimal_record)
expected_errors = [
{'field': 'pids', 'messages': [_('Invalid value for scheme lorem')]}
]
assert draft.errors == expected_errors
# NOTE: the invalid pid got removed, so if publish it will not be there
assert draft.to_dict()["pids"] == {}
def _publish_record(identity, record):
service = current_rdm_records.records_service
record["pids"] = {}
# create the draft
draft = service.create(identity, record)
# publish the record
record = service.publish(draft.id, identity)
published_doi = record.to_dict()["pids"]["doi"]
return published_doi
def test_pid_creation_duplicated_unmanaged(
running_app, es_clear, superuser_identity, minimal_record
):
service = current_rdm_records.records_service
published_doi = _publish_record(superuser_identity, minimal_record)
# set the pids field
doi = {
"identifier": published_doi["identifier"],
"provider": "external",
}
pids = {"doi": doi}
minimal_record["pids"] = pids
# create the draft with duplicated DOI
with pytest.raises(ValidationError):
service.create(superuser_identity, minimal_record)
def test_pid_update_duplicated_unmanaged(
running_app, es_clear, superuser_identity, minimal_record
):
service = current_rdm_records.records_service
published_doi = _publish_record(superuser_identity, minimal_record)
# create the draft
draft = service.create(superuser_identity, minimal_record)
# update draft with duplicated
doi = {
"identifier": published_doi["identifier"],
"provider": "external",
}
pids = {"doi": doi}
update_data = draft.to_dict()
update_data["pids"] = pids
with pytest.raises(ValidationError):
service.update_draft(draft.id, superuser_identity, update_data)
def test_pid_create_duplicated_managed(
running_app, es_clear, superuser_identity, minimal_record
):
service = current_rdm_records.records_service
published_doi = _publish_record(superuser_identity, minimal_record)
# set the pids field
doi = {
"identifier": published_doi["identifier"],
"provider": published_doi["provider"],
"client": published_doi["client"],
}
pids = {"doi": doi}
minimal_record["pids"] = pids
# create the draft with duplicated DOI
with pytest.raises(ValidationError):
service.create(superuser_identity, minimal_record)
def test_pid_update_duplicated_managed(
running_app, es_clear, superuser_identity, minimal_record
):
service = current_rdm_records.records_service
published_doi = _publish_record(superuser_identity, minimal_record)
# create the draft
draft = service.create(superuser_identity, minimal_record)
# update draft with duplicated
doi = {
"identifier": published_doi["identifier"],
"provider": published_doi["provider"],
"client": published_doi["client"],
}
pids = {"doi": doi}
update_data = draft.to_dict()
update_data["pids"] = pids
with pytest.raises(ValidationError):
service.update_draft(draft.id, superuser_identity, update_data)
def test_minimal_draft_creation(running_app, es_clear, minimal_record):
superuser_identity = running_app.superuser_identity
service = current_rdm_records.records_service
record_item = service.create(superuser_identity, minimal_record)
record_dict = record_item.to_dict()
assert record_dict["metadata"]["resource_type"] == {
'id': 'image-photo',
'title': {'en': 'Photo'}
}
def test_draft_w_languages_creation(running_app, es_clear, minimal_record):
superuser_identity = running_app.superuser_identity
service = current_rdm_records.records_service
minimal_record["metadata"]["languages"] = [{
"id": "eng",
}]
record_item = service.create(superuser_identity, minimal_record)
record_dict = record_item.to_dict()
assert record_dict["metadata"]["languages"] == [{
'id': 'eng',
'title': {'en': 'English', 'da': 'Engelsk'}
}]
#
# Embargo lift
#
def test_embargo_lift_without_draft(embargoed_record, running_app, es_clear):
record = embargoed_record
service = current_rdm_records.records_service
service.lift_embargo(
_id=record['id'],
identity=running_app.superuser_identity
)
record_lifted = service.record_cls.pid.resolve(record['id'])
assert record_lifted.access.embargo.active is False
assert record_lifted.access.protection.files == 'public'
assert record_lifted.access.protection.record == 'public'
assert record_lifted.access.status.value == 'metadata-only'
def test_embargo_lift_with_draft(
embargoed_record, es_clear, superuser_identity):
record = embargoed_record
service = current_rdm_records.records_service
# Edit a draft
ongoing_draft = service.edit(
id_=record['id'], identity=superuser_identity)
service.lift_embargo(_id=record['id'], identity=superuser_identity)
record_lifted = service.record_cls.pid.resolve(record['id'])
draft_lifted = service.draft_cls.pid.resolve(ongoing_draft['id'])
assert record_lifted.access.embargo.active is False
assert record_lifted.access.protection.files == 'public'
assert record_lifted.access.protection.record == 'public'
assert draft_lifted.access.embargo.active is False
assert draft_lifted.access.protection.files == 'public'
assert draft_lifted.access.protection.record == 'public'
def test_embargo_lift_with_updated_draft(
embargoed_record, superuser_identity, es_clear):
record = embargoed_record
service = current_rdm_records.records_service
# This draft simulates an existing one while lifting the record
draft = service.edit(id_=record['id'], identity=superuser_identity).data
# Change record's title and access field to be restricted
draft["metadata"]["title"] = 'Record modified by the user'
draft["access"]["status"] = 'restricted'
draft["access"]["embargo"] = dict(
active=False, until=None, reason=None
)
# Update the ongoing draft with the new data simulating the user's input
ongoing_draft = service.update_draft(
id_=draft['id'], identity=superuser_identity, data=draft)
service.lift_embargo(_id=record['id'], identity=superuser_identity)
record_lifted = service.record_cls.pid.resolve(record['id'])
draft_lifted = service.draft_cls.pid.resolve(ongoing_draft['id'])
assert record_lifted.access.embargo.active is False
assert record_lifted.access.protection.files == 'public'
assert record_lifted.access.protection.record == 'public'
assert draft_lifted.access.embargo.active is False
assert draft_lifted.access.protection.files == 'restricted'
assert draft_lifted.access.protection.record == 'public'
def test_embargo_lift_with_error(running_app, es_clear, minimal_record):
superuser_identity = running_app.superuser_identity
service = current_rdm_records.records_service
# Add embargo to record
minimal_record["access"]["files"] = 'restricted'
minimal_record["access"]["status"] = 'embargoed'
minimal_record["access"]["embargo"] = dict(
active=True, until='3220-06-01', reason=None
)
draft = service.create(superuser_identity, minimal_record)
record = service.publish(id_=draft.id, identity=superuser_identity)
# Record should not be lifted since it didn't expire (until 3220)
with pytest.raises(EmbargoNotLiftedError):
service.lift_embargo(_id=record['id'], identity=superuser_identity)
| 34.45733
| 79
| 0.710104
| 1,899
| 15,747
| 5.630858
| 0.126382
| 0.116057
| 0.056111
| 0.070139
| 0.761994
| 0.750678
| 0.732255
| 0.709062
| 0.694567
| 0.668568
| 0
| 0.005086
| 0.18848
| 15,747
| 456
| 80
| 34.532895
| 0.831677
| 0.154379
| 0
| 0.62963
| 0
| 0
| 0.09707
| 0.006966
| 0
| 0
| 0
| 0
| 0.117845
| 1
| 0.074074
| false
| 0
| 0.023569
| 0
| 0.104377
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b985b50bfd7d6d5db56e2770252a5bf8c0236869
| 186
|
py
|
Python
|
bin/twigs/pentatwigs-staggered-rectangle-5x3.py
|
tiwo/puzzler
|
7ad3d9a792f0635f7ec59ffa85fb46b54fd77a7e
|
[
"Intel"
] | null | null | null |
bin/twigs/pentatwigs-staggered-rectangle-5x3.py
|
tiwo/puzzler
|
7ad3d9a792f0635f7ec59ffa85fb46b54fd77a7e
|
[
"Intel"
] | null | null | null |
bin/twigs/pentatwigs-staggered-rectangle-5x3.py
|
tiwo/puzzler
|
7ad3d9a792f0635f7ec59ffa85fb46b54fd77a7e
|
[
"Intel"
] | 1
|
2022-01-02T16:54:14.000Z
|
2022-01-02T16:54:14.000Z
|
#!/usr/bin/env python
# $Id$
"""
145 solutions.
"""
import puzzler
from puzzler.puzzles.pentatwigs import PentatwigsStaggeredRectangle5x3
puzzler.run(PentatwigsStaggeredRectangle5x3)
| 15.5
| 70
| 0.795699
| 18
| 186
| 8.222222
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041667
| 0.096774
| 186
| 11
| 71
| 16.909091
| 0.839286
| 0.215054
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
b9d3023b56b3ea1ec16587cbb931131ff7227bfd
| 28,799
|
py
|
Python
|
release/stubs.min/GH_IO/Types.py
|
htlcnn/ironpython-stubs
|
780d829e2104b2789d5f4d6f32b0ec9f2930ca03
|
[
"MIT"
] | 182
|
2017-06-27T02:26:15.000Z
|
2022-03-30T18:53:43.000Z
|
release/stubs.min/GH_IO/Types.py
|
htlcnn/ironpython-stubs
|
780d829e2104b2789d5f4d6f32b0ec9f2930ca03
|
[
"MIT"
] | 28
|
2017-06-27T13:38:23.000Z
|
2022-03-15T11:19:44.000Z
|
release/stubs.min/GH_IO/Types.py
|
htlcnn/ironpython-stubs
|
780d829e2104b2789d5f4d6f32b0ec9f2930ca03
|
[
"MIT"
] | 67
|
2017-06-28T09:43:59.000Z
|
2022-03-20T21:17:10.000Z
|
# encoding: utf-8
# module GH_IO.Types calls itself Types
# from GH_IO,Version=1.0.0.0,Culture=neutral,PublicKeyToken=6a29997d2e6b4f97
# by generator 1.145
""" NamespaceTracker represent a CLS namespace. """
# no imports
# no functions
# classes
class GH_BoundingBox(object):
"""
Represents a 3D bounding box,denoted by two points.
GH_BoundingBox(nMin: GH_Point3D,nMax: GH_Point3D)
GH_BoundingBox(Minx: float,Miny: float,Minz: float,Maxx: float,Maxy: float,Maxz: float)
"""
def ToString(self):
"""
ToString(self: GH_BoundingBox) -> str
Converts this structure to a human-readable string.
Returns: A string representation of the box structure.
"""
pass
@staticmethod
def __new__(self,*__args):
"""
__new__[GH_BoundingBox]() -> GH_BoundingBox
__new__(cls: type,nMin: GH_Point3D,nMax: GH_Point3D)
__new__(cls: type,Minx: float,Miny: float,Minz: float,Maxx: float,Maxy: float,Maxz: float)
"""
pass
Max=None
Min=None
class GH_Interval1D(object):
"""
Represents two double precision floating point values.
GH_Interval1D(na: float,nb: float)
"""
def ToString(self):
"""
ToString(self: GH_Interval1D) -> str
Converts this structure to a human-readable string.
Returns: A string representation of the Interval structure.
"""
pass
@staticmethod
def __new__(self,na,nb):
"""
__new__[GH_Interval1D]() -> GH_Interval1D
__new__(cls: type,na: float,nb: float)
"""
pass
a=None
b=None
class GH_Interval2D(object):
"""
Represents two double precision domains.
GH_Interval2D(nu: GH_Interval1D,nv: GH_Interval1D)
GH_Interval2D(nu0: float,nu1: float,nv0: float,nv1: float)
"""
def ToString(self):
"""
ToString(self: GH_Interval2D) -> str
Converts this structure to a human-readable string.
Returns: A string representation of the two-dimensional Interval structure.
"""
pass
@staticmethod
def __new__(self,*__args):
"""
__new__[GH_Interval2D]() -> GH_Interval2D
__new__(cls: type,nu: GH_Interval1D,nv: GH_Interval1D)
__new__(cls: type,nu0: float,nu1: float,nv0: float,nv1: float)
"""
pass
u=None
v=None
class GH_Item(object,GH_IBinarySupport,GH_IXmlSupport):
"""
Represents a single data item in a chunk.
GH_Item(item_name: str,item_data: bool)
GH_Item(item_name: str,item_index: int,item_data: bool)
GH_Item(item_name: str,item_data: Byte)
GH_Item(item_name: str,item_index: int,item_data: Byte)
GH_Item(item_name: str,item_data: int)
GH_Item(item_name: str,item_index: int,item_data: int)
GH_Item(item_name: str,item_data: Int64)
GH_Item(item_name: str,item_index: int,item_data: Int64)
GH_Item(item_name: str,item_data: Single)
GH_Item(item_name: str,item_index: int,item_data: Single)
GH_Item(item_name: str,item_data: float)
GH_Item(item_name: str,item_index: int,item_data: float)
GH_Item(item_name: str,item_data: Decimal)
GH_Item(item_name: str,item_index: int,item_data: Decimal)
GH_Item(item_name: str,item_data: DateTime)
GH_Item(item_name: str,item_index: int,item_data: DateTime)
GH_Item(item_name: str,item_data: Guid)
GH_Item(item_name: str,item_index: int,item_data: Guid)
GH_Item(item_name: str,item_data: str)
GH_Item(item_name: str,item_index: int,item_data: str)
GH_Item(item_name: str,item_data: Array[Byte])
GH_Item(item_name: str,item_index: int,item_data: Array[Byte])
GH_Item(item_name: str,item_data: Array[float])
GH_Item(item_name: str,item_index: int,item_data: Array[float])
GH_Item(item_name: str,item_data: Point)
GH_Item(item_name: str,item_index: int,item_data: Point)
GH_Item(item_name: str,item_data: PointF)
GH_Item(item_name: str,item_index: int,item_data: PointF)
GH_Item(item_name: str,item_data: Size)
GH_Item(item_name: str,item_index: int,item_data: Size)
GH_Item(item_name: str,item_data: SizeF)
GH_Item(item_name: str,item_index: int,item_data: SizeF)
GH_Item(item_name: str,item_data: Rectangle)
GH_Item(item_name: str,item_index: int,item_data: Rectangle)
GH_Item(item_name: str,item_data: RectangleF)
GH_Item(item_name: str,item_index: int,item_data: RectangleF)
GH_Item(item_name: str,item_data: Color)
GH_Item(item_name: str,item_index: int,item_data: Color)
GH_Item(item_name: str,item_data: Bitmap)
GH_Item(item_name: str,item_index: int,item_data: Bitmap)
GH_Item(item_name: str,item_data: GH_Point2D)
GH_Item(item_name: str,item_index: int,item_data: GH_Point2D)
GH_Item(item_name: str,item_data: GH_Point3D)
GH_Item(item_name: str,item_index: int,item_data: GH_Point3D)
GH_Item(item_name: str,item_data: GH_Point4D)
GH_Item(item_name: str,item_index: int,item_data: GH_Point4D)
GH_Item(item_name: str,item_data: GH_Interval1D)
GH_Item(item_name: str,item_index: int,item_data: GH_Interval1D)
GH_Item(item_name: str,item_data: GH_Interval2D)
GH_Item(item_name: str,item_index: int,item_data: GH_Interval2D)
GH_Item(item_name: str,item_data: GH_Line)
GH_Item(item_name: str,item_index: int,item_data: GH_Line)
GH_Item(item_name: str,item_data: GH_BoundingBox)
GH_Item(item_name: str,item_index: int,item_data: GH_BoundingBox)
GH_Item(item_name: str,item_data: GH_Plane)
GH_Item(item_name: str,item_index: int,item_data: GH_Plane)
GH_Item(item_name: str,item_data: GH_Version)
GH_Item(item_name: str,item_index: int,item_data: GH_Version)
"""
@staticmethod
def CreateFrom(*__args):
"""
CreateFrom(node: XmlNode) -> GH_Item
Creates a new instance of GH_Item and sets the fields from an Xml node object.
node: Xml node object that defines the field data.
Returns: The constructed and read item.
CreateFrom(reader: BinaryReader) -> GH_Item
Creates a new instance of GH_Item and sets the fields from a reader object.
reader: Reader object that defines the field data.
Returns: The constructed and read item.
"""
pass
def Read(self,*__args):
"""
Read(self: GH_Item,node: XmlNode)
Deserialize this item from an Xml node.
node: Xml node to serialize from.
Read(self: GH_Item,reader: BinaryReader)
Deserialize this item from a binary stream.
reader: Reader to deserialize with.
"""
pass
def ToString(self):
"""
ToString(self: GH_Item) -> str
Converts the struct into a human readable format.
"""
pass
def Write(self,writer):
"""
Write(self: GH_Item,writer: XmlWriter)
Serialize this item into an Xml stream.
writer: Writer to serialize with.
Write(self: GH_Item,writer: BinaryWriter)
Serialize this item into a binary stream.
writer: Writer to serialize with.
"""
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,item_name,*__args):
"""
__new__(cls: type,item_name: str,item_data: bool)
__new__(cls: type,item_name: str,item_index: int,item_data: bool)
__new__(cls: type,item_name: str,item_data: Byte)
__new__(cls: type,item_name: str,item_index: int,item_data: Byte)
__new__(cls: type,item_name: str,item_data: int)
__new__(cls: type,item_name: str,item_index: int,item_data: int)
__new__(cls: type,item_name: str,item_data: Int64)
__new__(cls: type,item_name: str,item_index: int,item_data: Int64)
__new__(cls: type,item_name: str,item_data: Single)
__new__(cls: type,item_name: str,item_index: int,item_data: Single)
__new__(cls: type,item_name: str,item_data: float)
__new__(cls: type,item_name: str,item_index: int,item_data: float)
__new__(cls: type,item_name: str,item_data: Decimal)
__new__(cls: type,item_name: str,item_index: int,item_data: Decimal)
__new__(cls: type,item_name: str,item_data: DateTime)
__new__(cls: type,item_name: str,item_index: int,item_data: DateTime)
__new__(cls: type,item_name: str,item_data: Guid)
__new__(cls: type,item_name: str,item_index: int,item_data: Guid)
__new__(cls: type,item_name: str,item_data: str)
__new__(cls: type,item_name: str,item_index: int,item_data: str)
__new__(cls: type,item_name: str,item_data: Array[Byte])
__new__(cls: type,item_name: str,item_index: int,item_data: Array[Byte])
__new__(cls: type,item_name: str,item_data: Array[float])
__new__(cls: type,item_name: str,item_index: int,item_data: Array[float])
__new__(cls: type,item_name: str,item_data: Point)
__new__(cls: type,item_name: str,item_index: int,item_data: Point)
__new__(cls: type,item_name: str,item_data: PointF)
__new__(cls: type,item_name: str,item_index: int,item_data: PointF)
__new__(cls: type,item_name: str,item_data: Size)
__new__(cls: type,item_name: str,item_index: int,item_data: Size)
__new__(cls: type,item_name: str,item_data: SizeF)
__new__(cls: type,item_name: str,item_index: int,item_data: SizeF)
__new__(cls: type,item_name: str,item_data: Rectangle)
__new__(cls: type,item_name: str,item_index: int,item_data: Rectangle)
__new__(cls: type,item_name: str,item_data: RectangleF)
__new__(cls: type,item_name: str,item_index: int,item_data: RectangleF)
__new__(cls: type,item_name: str,item_data: Color)
__new__(cls: type,item_name: str,item_index: int,item_data: Color)
__new__(cls: type,item_name: str,item_data: Bitmap)
__new__(cls: type,item_name: str,item_index: int,item_data: Bitmap)
__new__(cls: type,item_name: str,item_data: GH_Point2D)
__new__(cls: type,item_name: str,item_index: int,item_data: GH_Point2D)
__new__(cls: type,item_name: str,item_data: GH_Point3D)
__new__(cls: type,item_name: str,item_index: int,item_data: GH_Point3D)
__new__(cls: type,item_name: str,item_data: GH_Point4D)
__new__(cls: type,item_name: str,item_index: int,item_data: GH_Point4D)
__new__(cls: type,item_name: str,item_data: GH_Interval1D)
__new__(cls: type,item_name: str,item_index: int,item_data: GH_Interval1D)
__new__(cls: type,item_name: str,item_data: GH_Interval2D)
__new__(cls: type,item_name: str,item_index: int,item_data: GH_Interval2D)
__new__(cls: type,item_name: str,item_data: GH_Line)
__new__(cls: type,item_name: str,item_index: int,item_data: GH_Line)
__new__(cls: type,item_name: str,item_data: GH_BoundingBox)
__new__(cls: type,item_name: str,item_index: int,item_data: GH_BoundingBox)
__new__(cls: type,item_name: str,item_data: GH_Plane)
__new__(cls: type,item_name: str,item_index: int,item_data: GH_Plane)
__new__(cls: type,item_name: str,item_data: GH_Version)
__new__(cls: type,item_name: str,item_index: int,item_data: GH_Version)
"""
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
def __str__(self,*args):
pass
DebuggerDisplay=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Readonly property used during Debugging.
Get: DebuggerDisplay(self: GH_Item) -> str
"""
HasIndex=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the index existence implication. The item is considered to have an index qualifier
if the index value is larger than or equal to zero.
Get: HasIndex(self: GH_Item) -> bool
"""
HasName=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the name validity of this item.
The item is considered to have an invalid name if string.IsNullOrEmpty(name)
Get: HasName(self: GH_Item) -> bool
"""
HasType=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the type set validity of this item.
The item is considered to have a type if type != GH_Types.unset
Get: HasType(self: GH_Item) -> bool
"""
Index=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the index of an item.
Typically,indices are set at construction and do not change.
If you change indices after construction,you could corrupt an archive.
Get: Index(self: GH_Item) -> int
Set: Index(self: GH_Item)=value
"""
InternalData=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Retrieves the internal data of this item.
No type casting is performed.
Get: InternalData(self: GH_Item) -> object
"""
Name=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the name of this item.
Typically,names are set at construction and do not change.
If you change names after construction,you could corrupt an archive.
Get: Name(self: GH_Item) -> str
Set: Name(self: GH_Item)=value
"""
Type=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the type of this item.
Type flags are set during construction and cannot be altered.
Get: Type(self: GH_Item) -> GH_Types
"""
_bool=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Boolean.
If the data is not stored as a Boolean,a conversion exception might be thrown.
Get: _bool(self: GH_Item) -> bool
"""
_boundingbox=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a BoundingBox.
If the data is not stored as a BoundingBox,a conversion exception might be thrown.
Get: _boundingbox(self: GH_Item) -> GH_BoundingBox
"""
_byte=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Byte.
If the data is not stored as a Byte,a conversion exception might be thrown.
Get: _byte(self: GH_Item) -> Byte
"""
_bytearray=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Byte array.
If the data is not stored as a Byte array,a conversion exception might be thrown.
Get: _bytearray(self: GH_Item) -> Array[Byte]
"""
_date=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a DateTime.
If the data is not stored as a DateTime,a conversion exception might be thrown.
Get: _date(self: GH_Item) -> DateTime
"""
_decimal=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Decimal.
If the data is not stored as a Decimal,a conversion exception might be thrown.
Get: _decimal(self: GH_Item) -> Decimal
"""
_double=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Double.
If the data is not stored as a Double,a conversion exception might be thrown.
Get: _double(self: GH_Item) -> float
"""
_doublearray=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Byte array.
If the data is not stored as a Byte array,a conversion exception might be thrown.
Get: _doublearray(self: GH_Item) -> Array[float]
"""
_drawing_bitmap=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Bitmap.
If the data is not stored as a Bitmap,a conversion exception might be thrown.
Get: _drawing_bitmap(self: GH_Item) -> Bitmap
"""
_drawing_color=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Color.
If the data is not stored as a Color,a conversion exception might be thrown.
Get: _drawing_color(self: GH_Item) -> Color
"""
_drawing_point=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Point.
If the data is not stored as a Point,a conversion exception might be thrown.
Get: _drawing_point(self: GH_Item) -> Point
"""
_drawing_pointf=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a PointF.
If the data is not stored as a PointF,a conversion exception might be thrown.
Get: _drawing_pointf(self: GH_Item) -> PointF
"""
_drawing_rectangle=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Rectangle.
If the data is not stored as a Rectangle,a conversion exception might be thrown.
Get: _drawing_rectangle(self: GH_Item) -> Rectangle
"""
_drawing_rectanglef=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a RectangleF.
If the data is not stored as a RectangleF,a conversion exception might be thrown.
Get: _drawing_rectanglef(self: GH_Item) -> RectangleF
"""
_drawing_size=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Size.
If the data is not stored as a Size,a conversion exception might be thrown.
Get: _drawing_size(self: GH_Item) -> Size
"""
_drawing_sizef=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a SizeF.
If the data is not stored as a SizeF,a conversion exception might be thrown.
Get: _drawing_sizef(self: GH_Item) -> SizeF
"""
_guid=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Guid.
If the data is not stored as a Guid,a conversion exception might be thrown.
Get: _guid(self: GH_Item) -> Guid
"""
_int32=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to an Int32.
If the data is not stored as an Int32,a conversion exception might be thrown.
Get: _int32(self: GH_Item) -> int
"""
_int64=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to an Int64.
If the data is not stored as an Int64,a conversion exception might be thrown.
Get: _int64(self: GH_Item) -> Int64
"""
_interval1d=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to an Interval1D.
If the data is not stored as an Interval1D,a conversion exception might be thrown.
Get: _interval1d(self: GH_Item) -> GH_Interval1D
"""
_interval2d=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to an Interval2D.
If the data is not stored as an Interval2D,a conversion exception might be thrown.
Get: _interval2d(self: GH_Item) -> GH_Interval2D
"""
_line=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Line.
If the data is not stored as a Line,a conversion exception might be thrown.
Get: _line(self: GH_Item) -> GH_Line
"""
_plane=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Plane.
If the data is not stored as a Plane,a conversion exception might be thrown.
Get: _plane(self: GH_Item) -> GH_Plane
"""
_point2d=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Point2D.
If the data is not stored as a Point2D,a conversion exception might be thrown.
Get: _point2d(self: GH_Item) -> GH_Point2D
"""
_point3d=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Point3D.
If the data is not stored as a Point3D,a conversion exception might be thrown.
Get: _point3d(self: GH_Item) -> GH_Point3D
"""
_point4d=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Point4D.
If the data is not stored as a Point4D,a conversion exception might be thrown.
Get: _point4d(self: GH_Item) -> GH_Point4D
"""
_single=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Single.
If the data is not stored as a Single,a conversion exception might be thrown.
Get: _single(self: GH_Item) -> Single
"""
_string=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a String.
If the data is not stored as a String,a conversion exception might be thrown.
Get: _string(self: GH_Item) -> str
"""
_version=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the internal data of this item cast to a Version.
If the data is not stored as a Version,a conversion exception might be thrown.
Get: _version(self: GH_Item) -> GH_Version
"""
class GH_Line(object):
"""
Represents a 3D line segment,denoted by start and endpoints.
GH_Line(nA: GH_Point3D,nB: GH_Point3D)
GH_Line(Ax: float,Ay: float,Az: float,Bx: float,By: float,Bz: float)
"""
def ToString(self):
"""
ToString(self: GH_Line) -> str
Converts this structure to a human-readable string.
Returns: A string representation of the line structure.
"""
pass
@staticmethod
def __new__(self,*__args):
"""
__new__[GH_Line]() -> GH_Line
__new__(cls: type,nA: GH_Point3D,nB: GH_Point3D)
__new__(cls: type,Ax: float,Ay: float,Az: float,Bx: float,By: float,Bz: float)
"""
pass
A=None
B=None
class GH_Plane(object):
"""
Represents a 3D plane system,defined by origin point and {X,Y} axis directions.
GH_Plane(nOrigin: GH_Point3D,nXAxis: GH_Point3D,nYAxis: GH_Point3D)
GH_Plane(Ox: float,Oy: float,Oz: float,Xx: float,Xy: float,Xz: float,Yx: float,Yy: float,Yz: float)
"""
def ToString(self):
"""
ToString(self: GH_Plane) -> str
Converts this structure to a human-readable string.
Returns: A string representation of the plane structure.
"""
pass
@staticmethod
def __new__(self,*__args):
"""
__new__[GH_Plane]() -> GH_Plane
__new__(cls: type,nOrigin: GH_Point3D,nXAxis: GH_Point3D,nYAxis: GH_Point3D)
__new__(cls: type,Ox: float,Oy: float,Oz: float,Xx: float,Xy: float,Xz: float,Yx: float,Yy: float,Yz: float)
"""
pass
Origin=None
XAxis=None
YAxis=None
class GH_Point2D(object):
"""
Represents a 2D point coordinate with double precision floating point components.
GH_Point2D(nx: float,ny: float)
"""
def ToString(self):
"""
ToString(self: GH_Point2D) -> str
Converts this structure to a human-readable string.
Returns: A string representation of the two-dimenionsional point structure.
"""
pass
@staticmethod
def __new__(self,nx,ny):
"""
__new__[GH_Point2D]() -> GH_Point2D
__new__(cls: type,nx: float,ny: float)
"""
pass
x=None
y=None
class GH_Point3D(object):
"""
Represents a 3D point coordinate with double precision floating point components.
GH_Point3D(nx: float,ny: float,nz: float)
"""
def ToString(self):
"""
ToString(self: GH_Point3D) -> str
Converts this structure to a human-readable string.
Returns: A string representation of the three-dimenionsional point structure.
"""
pass
@staticmethod
def __new__(self,nx,ny,nz):
"""
__new__[GH_Point3D]() -> GH_Point3D
__new__(cls: type,nx: float,ny: float,nz: float)
"""
pass
x=None
y=None
z=None
class GH_Point4D(object):
"""
Represents a 4D point coordinate with double precision floating point components.
GH_Point4D(nx: float,ny: float,nz: float,nw: float)
"""
def ToString(self):
"""
ToString(self: GH_Point4D) -> str
Converts this structure to a human-readable string.
Returns: A string representation of the four-dimenionsional point structure.
"""
pass
@staticmethod
def __new__(self,nx,ny,nz,nw):
"""
__new__[GH_Point4D]() -> GH_Point4D
__new__(cls: type,nx: float,ny: float,nz: float,nw: float)
"""
pass
w=None
x=None
y=None
z=None
class GH_Types(Enum,IComparable,IFormattable,IConvertible):
"""
Contains flags for all data types currently supported by GH_IO.dll
enum GH_Types,values: gh_bool (1),gh_boundingbox (71),gh_byte (2),gh_bytearray (20),gh_date (8),gh_decimal (7),gh_double (6),gh_doublearray (21),gh_drawing_bitmap (37),gh_drawing_color (36),gh_drawing_point (30),gh_drawing_pointf (31),gh_drawing_rectangle (34),gh_drawing_rectanglef (35),gh_drawing_size (32),gh_drawing_sizef (33),gh_guid (9),gh_int32 (3),gh_int64 (4),gh_interval1d (60),gh_interval2d (61),gh_line (70),gh_plane (72),gh_point2d (50),gh_point3d (51),gh_point4d (52),gh_single (5),gh_string (10),gh_version (80),unset (0)
"""
def __eq__(self,*args):
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self,*args):
""" __format__(formattable: IFormattable,format: str) -> str """
pass
def __ge__(self,*args):
pass
def __gt__(self,*args):
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self,*args):
pass
def __lt__(self,*args):
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
gh_bool=None
gh_boundingbox=None
gh_byte=None
gh_bytearray=None
gh_date=None
gh_decimal=None
gh_double=None
gh_doublearray=None
gh_drawing_bitmap=None
gh_drawing_color=None
gh_drawing_point=None
gh_drawing_pointf=None
gh_drawing_rectangle=None
gh_drawing_rectanglef=None
gh_drawing_size=None
gh_drawing_sizef=None
gh_guid=None
gh_int32=None
gh_int64=None
gh_interval1d=None
gh_interval2d=None
gh_line=None
gh_plane=None
gh_point2d=None
gh_point3d=None
gh_point4d=None
gh_single=None
gh_string=None
gh_version=None
unset=None
value__=None
class GH_Version(object):
"""
Basic version type. Contains Major,Minor and Revision fields.
GH_Version(v_major: int,v_minor: int,v_revision: int)
GH_Version(other: GH_Version)
"""
def Equals(self,obj):
"""
Equals(self: GH_Version,obj: object) -> bool
Performs value equality comparison.
obj: Object to compare with.
If obj is a null reference or not a GH_Version instance,
false is returned.
Returns: True if obj is a GH_Version instance which is equal to this one.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: GH_Version) -> int
Returns the hash code for this instance.
Returns: A hash code for the current version object.
"""
pass
def ToString(self):
"""
ToString(self: GH_Version) -> str
Default formatter for Version data: M.m.RRRR
Revision section is padded with
zeroes until it is at least 4 digits long.
Returns: A string represtation of the Version structure.
"""
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __ge__(self,*args):
pass
def __gt__(self,*args):
pass
def __le__(self,*args):
pass
def __lt__(self,*args):
pass
@staticmethod
def __new__(self,*__args):
"""
__new__[GH_Version]() -> GH_Version
__new__(cls: type,v_major: int,v_minor: int,v_revision: int)
__new__(cls: type,other: GH_Version)
"""
pass
def __ne__(self,*args):
pass
major=None
minor=None
revision=None
| 22.820127
| 538
| 0.69235
| 4,349
| 28,799
| 4.300759
| 0.079559
| 0.050043
| 0.068221
| 0.093028
| 0.727866
| 0.710864
| 0.691189
| 0.630026
| 0.559559
| 0.493531
| 0
| 0.009474
| 0.201014
| 28,799
| 1,261
| 539
| 22.838224
| 0.80339
| 0.47526
| 0
| 0.467662
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.218905
| false
| 0.218905
| 0
| 0
| 0.726368
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
b9dbf62f9c3d8025199b3066cf74a32ffbf437fb
| 156
|
py
|
Python
|
hw/andrei_bondar/test_bondar.py
|
alexander-sidorov/qap-05
|
6db7c0a1eeadd15f7d3f826e7f0ac4be3949ec8c
|
[
"MIT"
] | 9
|
2021-12-10T21:30:07.000Z
|
2022-02-25T21:32:34.000Z
|
hw/andrei_bondar/test_bondar.py
|
alexander-sidorov/qap-05
|
6db7c0a1eeadd15f7d3f826e7f0ac4be3949ec8c
|
[
"MIT"
] | 22
|
2021-12-11T08:46:58.000Z
|
2022-02-02T15:56:37.000Z
|
hw/andrei_bondar/test_bondar.py
|
alexander-sidorov/qap-05
|
6db7c0a1eeadd15f7d3f826e7f0ac4be3949ec8c
|
[
"MIT"
] | 8
|
2021-12-11T09:15:45.000Z
|
2022-02-02T08:09:09.000Z
|
def test_example() -> None:
assert True, "not True"
assert 1 + 1 == 2
assert 4 / 2 == 2
assert 2 * 2 == 4
assert "ab" + "bc" == "abbc"
| 19.5
| 32
| 0.49359
| 24
| 156
| 3.166667
| 0.541667
| 0.184211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087379
| 0.339744
| 156
| 7
| 33
| 22.285714
| 0.650485
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 0
| 0
| 0
| 0
| 0
| 0.833333
| 1
| 0.166667
| true
| 0
| 0
| 0
| 0.166667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.