hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3ecf7d4c6334ede4964385190cb59f0b14bb17be
| 48
|
py
|
Python
|
ceem/__init__.py
|
sisl/CEEM
|
6154587fe3cdb92e8b7f70eedb1262caa1553cc8
|
[
"MIT"
] | 5
|
2020-06-21T16:50:42.000Z
|
2021-03-14T04:02:01.000Z
|
ceem/__init__.py
|
sisl/CEEM
|
6154587fe3cdb92e8b7f70eedb1262caa1553cc8
|
[
"MIT"
] | 1
|
2021-03-13T07:46:36.000Z
|
2021-03-16T05:14:47.000Z
|
ceem/__init__.py
|
sisl/CEEM
|
6154587fe3cdb92e8b7f70eedb1262caa1553cc8
|
[
"MIT"
] | 1
|
2021-03-30T12:08:20.000Z
|
2021-03-30T12:08:20.000Z
|
#
# File: __init__.py
#
from .ceem import CEEM
| 8
| 22
| 0.666667
| 7
| 48
| 4
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208333
| 48
| 5
| 23
| 9.6
| 0.736842
| 0.354167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3ee0fdbcabedd5102a8044581b4408443b84bd49
| 180
|
py
|
Python
|
src/autodiff/__init__.py
|
mountain-bay/cs107-FinalProject
|
59487d7c334f5c9c8eb69d5b5b358a8e8d495dbb
|
[
"MIT"
] | null | null | null |
src/autodiff/__init__.py
|
mountain-bay/cs107-FinalProject
|
59487d7c334f5c9c8eb69d5b5b358a8e8d495dbb
|
[
"MIT"
] | 17
|
2020-11-18T18:25:58.000Z
|
2020-12-11T22:22:25.000Z
|
src/autodiff/__init__.py
|
mountain-bay/cs107-FinalProject
|
59487d7c334f5c9c8eb69d5b5b358a8e8d495dbb
|
[
"MIT"
] | null | null | null |
from .AD_Object import Var
from .AD_BasicMath import sin, cos, tan, sqrt, exp, log, ln
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
| 25.714286
| 59
| 0.777778
| 28
| 180
| 4.642857
| 0.607143
| 0.253846
| 0.276923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 180
| 6
| 60
| 30
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.038889
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f5ed689973aabdd26670968edae5f42f85fb73e9
| 147
|
py
|
Python
|
land_purchase_demo/land_purchase_demo/doctype/land_purchase/test_land_purchase.py
|
KaviyaPeriyasamy/land_purchase_demo
|
f1bc173fe2476fd18284e1e6a4fd015026984b16
|
[
"MIT"
] | null | null | null |
land_purchase_demo/land_purchase_demo/doctype/land_purchase/test_land_purchase.py
|
KaviyaPeriyasamy/land_purchase_demo
|
f1bc173fe2476fd18284e1e6a4fd015026984b16
|
[
"MIT"
] | null | null | null |
land_purchase_demo/land_purchase_demo/doctype/land_purchase/test_land_purchase.py
|
KaviyaPeriyasamy/land_purchase_demo
|
f1bc173fe2476fd18284e1e6a4fd015026984b16
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021, kaviya and Contributors
# See license.txt
# import frappe
import unittest
class TestLandPurchase(unittest.TestCase):
pass
| 16.333333
| 45
| 0.782313
| 18
| 147
| 6.388889
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.031746
| 0.142857
| 147
| 8
| 46
| 18.375
| 0.880952
| 0.496599
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
eb139700bfde3ed461e4347dce8d3f1ec866be90
| 447
|
py
|
Python
|
tests/poligon.py
|
krzjoa/sciquence
|
6a5f758c757200fffeb0fdc9206462f1f89e2444
|
[
"MIT"
] | 8
|
2017-10-23T17:59:35.000Z
|
2021-05-10T03:01:30.000Z
|
tests/poligon.py
|
krzjoa/sciquence
|
6a5f758c757200fffeb0fdc9206462f1f89e2444
|
[
"MIT"
] | 2
|
2019-08-25T19:24:12.000Z
|
2019-09-05T12:16:10.000Z
|
tests/poligon.py
|
krzjoa/sciquence
|
6a5f758c757200fffeb0fdc9206462f1f89e2444
|
[
"MIT"
] | 2
|
2018-02-28T09:47:53.000Z
|
2019-08-25T19:24:16.000Z
|
import numpy as np
import sciquence.sequences as sq
x = np.array([1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0])
expected = [np.array([1, 1, 1]), np.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]),
np.array([1, 1, 1, 1, 1, 1, 1, 1, 1]),
np.array([0, 0, 0, 0]), np.array([1, 1, 1, 1]), np.array([0, 0, 0])]
print(sq.seq(x))
print(sq.lseq_equal_)
| 34.384615
| 83
| 0.451902
| 105
| 447
| 1.904762
| 0.152381
| 0.34
| 0.42
| 0.44
| 0.605
| 0.535
| 0.535
| 0.505
| 0.465
| 0.26
| 0
| 0.225
| 0.284116
| 447
| 12
| 84
| 37.25
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.222222
| 0
| 0.222222
| 0.222222
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
eb31f44df29db2453a7906a1e64f3ccd3a8030b3
| 20
|
py
|
Python
|
addons/lunch/populate/__init__.py
|
SHIVJITH/Odoo_Machine_Test
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
[
"Apache-2.0"
] | null | null | null |
addons/lunch/populate/__init__.py
|
SHIVJITH/Odoo_Machine_Test
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
[
"Apache-2.0"
] | null | null | null |
addons/lunch/populate/__init__.py
|
SHIVJITH/Odoo_Machine_Test
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
[
"Apache-2.0"
] | null | null | null |
from . import lunch
| 10
| 19
| 0.75
| 3
| 20
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 20
| 1
| 20
| 20
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
eb34ffec95888d31cb56b5e173b61cf180c68e05
| 24,017
|
py
|
Python
|
optionmodels/latticemethods.py
|
GBERESEARCH/optionmodels
|
4f2528317eb8bf38238fcf21a0fa286758385f69
|
[
"MIT"
] | 2
|
2021-02-08T22:05:12.000Z
|
2021-09-10T04:29:58.000Z
|
optionmodels/latticemethods.py
|
GBERESEARCH/optionmodels
|
4f2528317eb8bf38238fcf21a0fa286758385f69
|
[
"MIT"
] | null | null | null |
optionmodels/latticemethods.py
|
GBERESEARCH/optionmodels
|
4f2528317eb8bf38238fcf21a0fa286758385f69
|
[
"MIT"
] | 2
|
2020-12-21T08:36:45.000Z
|
2021-09-10T04:29:59.000Z
|
"""
Lattice based option pricing models
"""
import numpy as np
from scipy.special import comb
from optionmodels.utils import Utils
# pylint: disable=invalid-name
class LatticeMethods():
"""
Lattice based option pricing models
"""
@staticmethod
def european_binomial(**kwargs):
"""
European Binomial Option price.
Combinatorial function limit c1000
Parameters
----------
S : Float
Stock Price. The default is 100.
K : Float
Strike Price. The default is 100.
T : Float
Time to Maturity. The default is 0.25 (3 Months).
r : Float
Interest Rate. The default is 0.005 (50bps)
q : Float
Dividend Yield. The default is 0.
sigma : Float
Implied Volatility. The default is 0.2 (20%).
steps : Int
Number of time steps. The default is 1000.
option : Str
Type of option. 'put' or 'call'. The default is 'call'.
Returns
-------
Float
European Binomial Option Price.
"""
# Update pricing input parameters to default if not supplied
if 'refresh' in kwargs and kwargs['refresh']:
params = Utils.init_params(kwargs)
S = params['S']
K = params['K']
T = params['T']
r = params['r']
q = params['q']
sigma = params['sigma']
steps = params['steps']
option = params['option']
b = r - q
dt = T / steps
u = np.exp(sigma * np.sqrt(dt))
d = 1 / u
p = (np.exp(b * dt) - d) / (u - d)
a = int(np.log(K / (S * (d ** steps))) / np.log(u / d)) + 1
val = 0
if option == 'call':
for j in range(a, steps + 1):
val = (
val + (comb(steps, j) * (p ** j)
* ((1 - p) ** (steps - j))
* ((S * (u ** j) * (d ** (steps - j))) - K)))
if option == 'put':
for j in range(0, a):
val = (
val + (comb(steps, j) * (p ** j)
* ((1 - p) ** (steps - j))
* (K - ((S * (u ** j)) * (d ** (steps - j))))))
return np.exp(-r * T) * val
@staticmethod
def cox_ross_rubinstein_binomial(**kwargs):
"""
Cox-Ross-Rubinstein Binomial model
Parameters
----------
S : Float
Stock Price. The default is 100.
K : Float
Strike Price. The default is 100.
T : Float
Time to Maturity. The default is 0.25 (3 Months).
r : Float
Interest Rate. The default is 0.005 (50bps)
q : Float
Dividend Yield. The default is 0.
sigma : Float
Implied Volatility. The default is 0.2 (20%).
steps : Int
Number of time steps. The default is 1000.
option : Str
Type of option. 'put' or 'call'. The default is 'call'.
output_flag : Str
Whether to return 'price', 'delta', 'gamma', 'theta' or
'all'. The default is 'price'.
american : Bool
Whether the option is American. The default is False.
Returns
-------
result : Various
Depending on output flag:
'price' : Float; Option Price
'delta' : Float; Option Delta
'gamma' : Float; Option Gamma
'theta' : Float; Option Theta
'all' : Tuple; Option Price, Option Delta, Option
Gamma, Option Theta
"""
# Update pricing input parameters to default if not supplied
if 'refresh' in kwargs and kwargs['refresh']:
params = Utils.init_params(kwargs)
S = params['S']
K = params['K']
T = params['T']
r = params['r']
q = params['q']
sigma = params['sigma']
steps = params['steps']
option = params['option']
output_flag = params['output_flag']
american = params['american']
if option == 'call':
z = 1
else:
z = -1
b = r - q
dt = T / steps
u = np.exp(sigma * np.sqrt(dt))
d = 1 / u
p = (np.exp(b * dt) - d) / (u - d)
df = np.exp(-r * dt)
optionvalue = np.zeros((steps + 2))
returnvalue = np.zeros((4))
for i in range(steps + 1):
optionvalue[i] = max(
0, z * (S * (u ** i) * (d ** (steps - i)) - K))
for j in range(steps - 1, -1, -1):
for i in range(j + 1):
if american:
optionvalue[i] = (
(p * optionvalue[i + 1])
+ ((1 - p) * optionvalue[i])) * df
else:
optionvalue[i] = max(
(z * (S * (u ** i) * (d ** (j - i)) - K)),
((p * optionvalue[i + 1])
+ ((1 - p) * optionvalue[i])) * df)
if j == 2:
returnvalue[2] = (((optionvalue[2] - optionvalue[1])
/ (S * (u ** 2) - S)
- (optionvalue[1] - optionvalue[0])
/ (S - S * (d ** 2)))
/ (0.5 * (S * (u ** 2) - S * (d ** 2))))
returnvalue[3] = optionvalue[1]
if j == 1:
returnvalue[1] = ((
optionvalue[1] - optionvalue[0]) / (S * u - S * d))
returnvalue[3] = (returnvalue[3] - optionvalue[0]) / (2 * dt) / 365
returnvalue[0] = optionvalue[0]
if output_flag == 'price':
result = returnvalue[0]
if output_flag == 'delta':
result = returnvalue[1]
if output_flag == 'gamma':
result = returnvalue[2]
if output_flag == 'theta':
result = returnvalue[3]
if output_flag == 'all':
result = {'Price':returnvalue[0],
'Delta':returnvalue[1],
'Gamma':returnvalue[2],
'Theta':returnvalue[3]}
return result
@staticmethod
def leisen_reimer_binomial(**kwargs):
"""
Leisen Reimer Binomial
Parameters
----------
S : Float
Stock Price. The default is 100.
K : Float
Strike Price. The default is 100.
T : Float
Time to Maturity. The default is 0.25 (3 Months).
r : Float
Interest Rate. The default is 0.005 (50bps)
q : Float
Dividend Yield. The default is 0.
sigma : Float
Implied Volatility. The default is 0.2 (20%).
steps : Int
Number of time steps. The default is 1000.
option : Str
Type of option. 'put' or 'call'. The default is 'call'.
output_flag : Str
Whether to return 'price', 'delta', 'gamma' or 'all'. The
default is 'price'.
american : Bool
Whether the option is American. The default is False.
Returns
-------
result : Various
Depending on output flag:
'price' : Float; Option Price
'delta' : Float; Option Delta
'gamma' : Float; Option Gamma
'all' : Tuple; Option Price, Option Delta, Option Gamma
"""
# Update pricing input parameters to default if not supplied
if 'refresh' in kwargs and kwargs['refresh']:
params = Utils.init_params(kwargs)
S = params['S']
K = params['K']
T = params['T']
r = params['r']
q = params['q']
sigma = params['sigma']
steps = params['steps']
option = params['option']
output_flag = params['output_flag']
american = params['american']
if option == 'call':
z = 1
else:
z = -1
b = r - q
d1 = ((np.log(S / K) + (b + (0.5 * sigma ** 2)) * T)
/ (sigma * np.sqrt(T)))
d2 = ((np.log(S / K) + (b - (0.5 * sigma ** 2)) * T)
/ (sigma * np.sqrt(T)))
hd1 = (
0.5 + np.sign(d1) * (0.25 - 0.25 * np.exp(
-(d1 / (steps + 1 / 3 + 0.1 / (steps + 1))) ** 2
* (steps + 1 / 6))) ** (0.5))
hd2 = (
0.5 + np.sign(d2) * (0.25 - 0.25 * np.exp(
-(d2 / (steps + 1 / 3 + 0.1 / (steps + 1))) ** 2
* (steps + 1 / 6))) ** (0.5))
dt = T / steps
p = hd2
u = np.exp(b * dt) * hd1 / hd2
d = (np.exp(b * dt) - p * u) / (1 - p)
df = np.exp(-r * dt)
optionvalue = np.zeros((steps + 1))
returnvalue = np.zeros((4))
for i in range(steps + 1):
optionvalue[i] = max(0, z * (S * (u ** i) * (
d ** (steps - i)) - K))
for j in range(steps - 1, -1, -1):
for i in range(j + 1):
if american:
optionvalue[i] = (
(p * optionvalue[i + 1])
+ ((1 - p) * optionvalue[i])) * df
else:
optionvalue[i] = max(
(z * (S * (u ** i) * (d ** (j - i)) - K)),
((p * optionvalue[i + 1])
+ ((1 - p) * optionvalue[i])) * df)
if j == 2:
returnvalue[2] = (
((optionvalue[2] - optionvalue[1])
/ (S * (u ** 2) - S * u * d)
- (optionvalue[1] - optionvalue[0])
/ (S * u * d - S * (d ** 2)))
/ (0.5 * (S * (u ** 2) - S * (d ** 2))))
returnvalue[3] = optionvalue[1]
if j == 1:
returnvalue[1] = ((optionvalue[1] - optionvalue[0])
/ (S * u - S * d))
returnvalue[0] = optionvalue[0]
if output_flag == 'price':
result = returnvalue[0]
if output_flag == 'delta':
result = returnvalue[1]
if output_flag == 'gamma':
result = returnvalue[2]
if output_flag == 'all':
result = {'Price':returnvalue[0],
'Delta':returnvalue[1],
'Gamma':returnvalue[2]}
return result
@staticmethod
def trinomial_tree(**kwargs):
"""
Trinomial Tree
Parameters
----------
S : Float
Stock Price. The default is 100.
K : Float
Strike Price. The default is 100.
T : Float
Time to Maturity. The default is 0.25 (3 Months).
r : Float
Interest Rate. The default is 0.005 (50bps)
q : Float
Dividend Yield. The default is 0.
sigma : Float
Implied Volatility. The default is 0.2 (20%).
steps : Int
Number of time steps. The default is 1000.
option : Str
Type of option, 'put' or 'call'. The default is 'call'.
output_flag : Str
Whether to return 'price', 'delta', 'gamma', 'theta' or
'all'. The default is 'price'.
american : Bool
Whether the option is American. The default is False.
Returns
-------
result : Various
Depending on output flag:
'price' : Float; Option Price
'delta' : Float; Option Delta
'gamma' : Float; Option Gamma
'theta' : Float; Option Theta
'all' : Tuple; Option Price, Option Delta, Option Gamma,
Option Theta
"""
# Update pricing input parameters to default if not supplied
if 'refresh' in kwargs and kwargs['refresh']:
params = Utils.init_params(kwargs)
S = params['S']
K = params['K']
T = params['T']
r = params['r']
q = params['q']
sigma = params['sigma']
steps = params['steps']
option = params['option']
output_flag = params['output_flag']
american = params['american']
if option == 'call':
z = 1
else:
z = -1
b = r - q
dt = T / steps
u = np.exp(sigma * np.sqrt(2 * dt))
d = np.exp(-sigma * np.sqrt(2 * dt))
pu = ((np.exp(b * dt / 2) - np.exp(-sigma * np.sqrt(dt / 2)))
/ (np.exp(sigma * np.sqrt(dt / 2))
- np.exp(-sigma * np.sqrt(dt / 2)))) ** 2
pd = ((np.exp(sigma * np.sqrt(dt / 2)) - np.exp(b * dt / 2))
/ (np.exp(sigma * np.sqrt(dt / 2))
- np.exp(-sigma * np.sqrt(dt / 2)))) ** 2
pm = 1 - pu - pd
df = np.exp(-r * dt)
optionvalue = np.zeros((steps * 2 + 2))
returnvalue = np.zeros((4))
for i in range(2 * steps + 1):
optionvalue[i] = max(
0, z * (S * (u ** max(i - steps, 0))
* (d ** (max((steps - i), 0))) - K))
for j in range(steps - 1, -1, -1):
for i in range(j * 2 + 1):
optionvalue[i] = (pu * optionvalue[i + 2]
+ pm * optionvalue[i + 1]
+ pd * optionvalue[i]) * df
if american:
optionvalue[i] = max(
z * (S * (u ** max(i - j, 0))
* (d ** (max((j - i), 0))) - K), optionvalue[i])
if j == 1:
returnvalue[1] = (
(optionvalue[2] - optionvalue[0]) / (S * u - S * d))
returnvalue[2] = (
((optionvalue[2] - optionvalue[1]) / (S * u - S)
- (optionvalue[1] - optionvalue[0]) / (S - S * d ))
/ (0.5 * ((S * u) - (S * d))))
returnvalue[3] = optionvalue[0]
returnvalue[3] = (returnvalue[3] - optionvalue[0]) / dt / 365
returnvalue[0] = optionvalue[0]
if output_flag == 'price':
result = returnvalue[0]
if output_flag == 'delta':
result = returnvalue[1]
if output_flag == 'gamma':
result = returnvalue[2]
if output_flag == 'theta':
result = returnvalue[3]
if output_flag == 'all':
result = {'Price':returnvalue[0],
'Delta':returnvalue[1],
'Gamma':returnvalue[2],
'Theta':returnvalue[3]}
return result
@classmethod
def implied_trinomial_tree(cls, **kwargs):
"""
Implied Trinomial Tree
Parameters
----------
S : Float
Stock Price. The default is 100.
K : Float
Strike Price. The default is 100.
T : Float
Time to Maturity. The default is 0.25 (3 Months).
r : Float
Interest Rate. The default is 0.005 (50bps)
q : Float
Dividend Yield. The default is 0.
sigma : Float
Implied Volatility. The default is 0.2 (20%).
steps_itt : Int
Number of time steps. The default is 10.
option : Str
Type of option. 'put' or 'call'. The default is 'call'.
output_flag : Str
UPM: A matrix of implied up transition probabilities
UPni: The implied up transition probability at a single
node
DPM: A matrix of implied down transition probabilities
DPni: The implied down transition probability at a single
node
LVM: A matrix of implied local volatilities
LVni: The local volatility at a single node
ADM: A matrix of Arrow-Debreu prices at a single node
ADni: The Arrow-Debreu price at a single node (at
time step - 'step' and state - 'state')
price: The value of the European option
step : Int
Time step used for Arrow Debreu price at single node. The
default is 5.
state : Int
State position used for Arrow Debreu price at single node.
The default is 5.
skew : Float
Rate at which volatility increases (decreases) for every
one point decrease
(increase) in the strike price. The default is 0.0004.
Returns
-------
result : Various
Depending on output flag:
UPM: A matrix of implied up transition probabilities
UPni: The implied up transition probability at a single
node
DPM: A matrix of implied down transition probabilities
DPni: The implied down transition probability at a
single node
LVM: A matrix of implied local volatilities
LVni: The local volatility at a single node
ADM: A matrix of Arrow-Debreu prices at a single node
ADni: The Arrow-Debreu price at a single node (at
time step - 'step' and state - 'state')
price: The European option price.
"""
# Update pricing input parameters to default if not supplied
if 'refresh' in kwargs and kwargs['refresh']:
params = Utils.init_params(kwargs)
S = params['S']
K = params['K']
T = params['T']
r = params['r']
q = params['q']
sigma = params['sigma']
steps_itt = params['steps_itt']
option = params['option']
output_flag = params['output_flag']
step = params['step']
state = params['state']
skew = params['skew']
if option == 'call':
z = 1
else:
z = -1
optionvaluenode = np.zeros((steps_itt * 2 + 1))
# Arrow Debreu prices
ad = np.zeros((steps_itt + 1, steps_itt * 2 + 1), dtype='float')
pu = np.zeros((steps_itt, steps_itt * 2 - 1), dtype='float')
pd = np.zeros((steps_itt, steps_itt * 2 - 1), dtype='float')
localvol = np.zeros((steps_itt, steps_itt * 2 - 1), dtype='float')
dt = T / steps_itt
u = np.exp(sigma * np.sqrt(2 * dt))
d = 1 / u
df = np.exp(-r * dt)
ad[0, 0] = 1
for n in range(steps_itt):
for i in range(n * 2 + 1):
val = 0
Si1 = (S * (u ** (max(i - n, 0)))
* (d ** (max(n * 2 - n - i, 0))))
Si = Si1 * d
Si2 = Si1 * u
b = r - q
Fi = Si1 * np.exp(b * dt)
sigmai = sigma + (S - Si1) * skew
if i < (n * 2) / 2 + 1:
for j in range(i):
Fj = (S * (u ** (max(j - n, 0)))
* (d ** (max(n * 2 - n - j, 0)))
* np.exp(b * dt))
val = val + ad[n, j] * (Si1 - Fj)
optionvalue = cls.trinomial_tree(
S=S, K=Si1, T=(n + 1) * dt, r=r, q=q, sigma=sigmai,
steps=(n + 1), option='put', output_flag='price',
american=False, refresh=True)
qi = ((np.exp(r * dt) * optionvalue - val)
/ (ad[n, i] * (Si1 - Si)))
pi = (Fi + qi * (Si1 - Si) - Si1) / (Si2 - Si1)
else:
optionvalue = cls.trinomial_tree(
S=S, K=Si1, T=(n + 1) * dt, r=r, q=q, sigma=sigmai,
steps=(n + 1), option='call', output_flag='price',
american=False, refresh=True)
val = 0
for j in range(i + 1, n * 2 + 1):
Fj = (S * (u ** (max(j - n, 0)))
* (d ** (max(n * 2 - n - j, 0)))
* np.exp(b * dt))
val = val + ad[n, j] * (Fj- Si1)
pi = ((np.exp(r * dt) * optionvalue - val)
/ (ad[n, i] * (Si2 - Si1)))
qi = (Fi - pi * (Si2 - Si1) - Si1) / (Si - Si1)
# Replacing negative probabilities
if pi < 0 or pi > 1 or qi < 0 or qi > 1:
if Si2 > Fi > Si1:
pi = (1 / 2 * ((Fi - Si1) / (Si2 - Si1)
+ (Fi - Si) / (Si2 - Si)))
qi = 1 / 2 * ((Si2 - Fi) / (Si2 - Si))
elif Si1 > Fi > Si:
pi = 1 / 2 * ((Fi - Si) / (Si2 - Si))
qi = (1 / 2 * ((Si2 - Fi) / (Si2 - Si1)
+ (Si1 - Fi) / (Si1 - Si)))
pd[n, i] = qi
pu[n, i] = pi
# Calculating local volatilities
Fo = (pi * Si2 + qi * Si + (1 - pi -qi) * Si1)
localvol[n, i] = np.sqrt(
(pi * (Si2 - Fo) ** 2
+ (1 - pi - qi) * (Si1 - Fo) ** 2
+ qi * (Si - Fo) ** 2) / (Fo ** 2 * dt))
# Calculating Arrow-Debreu prices
if n == 0:
ad[n + 1, i] = qi * ad[n, i] * df
ad[n + 1, i + 1] = (1 - pi - qi) * ad[n, i] * df
ad[n + 1, i + 2] = pi * ad[n, i] * df
elif n > 0 and i == 0:
ad[n + 1, i] = qi * ad[n, i] * df
elif n > 0 and i == n * 2:
ad[n + 1, i] = (
pu[n, i - 2] * ad[n, i - 2] * df
+ (1 - pu[n, i - 1] - pd[n, i - 1])
* (ad[n, i - 1]) * df + qi * (ad[n, i] * df))
ad[n + 1, i + 1] = (
pu[n, i - 1] * (ad[n, i - 1]) * df
+ (1 - pi - qi) * (ad[n, i] * df))
ad[n + 1, i + 2] = pi * ad[n, i] * df
elif n > 0 and i == 1:
ad[n + 1, i] = (
(1 - pu[n, i - 1] - (pd[n, i - 1]))
* ad[n, i - 1] * df + (qi * ad[n, i] * df))
else:
ad[n + 1, i] = (
pu[n, i - 2] * (ad[n, i - 2]) * df
+ (1 - pu[n, i - 1] - pd[n, i - 1])
* (ad[n, i - 1]) * df + qi * (ad[n, i]) * df)
# Calculation of option price using the implied trinomial tree
for i in range(2 * steps_itt + 1):
optionvaluenode[i] = max(
0, z * (S * (u ** max(i - steps_itt, 0))
* (d ** (max((steps_itt - i), 0))) - K))
for n in range(steps_itt - 1, -1, -1):
for i in range(n * 2 + 1):
optionvaluenode[i] = (
(pu[n, i] * optionvaluenode[i + 2]
+ (1 - pu[n, i] - pd[n, i])
* (optionvaluenode[i + 1])
+ pd[n, i] * (optionvaluenode[i])) * df)
price = optionvaluenode[0]
output_dict = {
'UPM':pu,
'UPni':pu[step, state],
'DPM':pd,
'DPni':pd[step, state],
'LVM':localvol,
'LVni':localvol[step, state],
'ADM':ad,
'ADni':ad[step, state],
'price':price
}
return output_dict.get(
output_flag, "Please select a valid output flag")
| 34.756874
| 77
| 0.413041
| 2,747
| 24,017
| 3.58755
| 0.075719
| 0.049721
| 0.059665
| 0.027702
| 0.814307
| 0.797666
| 0.770675
| 0.750989
| 0.727144
| 0.703704
| 0
| 0.038279
| 0.457218
| 24,017
| 690
| 78
| 34.807246
| 0.717705
| 0.260399
| 0
| 0.600529
| 0
| 0
| 0.031188
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013228
| false
| 0
| 0.007937
| 0
| 0.037037
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
eb4236ace9e6da14b37d9779a6ba431466dd90c8
| 39
|
py
|
Python
|
Utilities/__init__.py
|
emcknight/ChessEngineAASpr2022-
|
10ddba62cc9c558facdfe3c7ba65b24cc2c6d356
|
[
"MIT"
] | 2
|
2022-03-19T20:28:06.000Z
|
2022-03-25T16:35:32.000Z
|
Utilities/__init__.py
|
emcknight/ChessEngineAASpr2022-
|
10ddba62cc9c558facdfe3c7ba65b24cc2c6d356
|
[
"MIT"
] | null | null | null |
Utilities/__init__.py
|
emcknight/ChessEngineAASpr2022-
|
10ddba62cc9c558facdfe3c7ba65b24cc2c6d356
|
[
"MIT"
] | 1
|
2022-03-25T21:32:56.000Z
|
2022-03-25T21:32:56.000Z
|
from .SearchUtils import Memo, MemoNode
| 39
| 39
| 0.846154
| 5
| 39
| 6.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.942857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
de4b03f8fdd2e9e67a942340865a95af20b5df5b
| 10,294
|
py
|
Python
|
code/section_gp_fit.py
|
mrtommyb/GP_model_Kepler_data
|
a51ba4b6ab325484b47b2e594539f537cacdbb62
|
[
"MIT"
] | null | null | null |
code/section_gp_fit.py
|
mrtommyb/GP_model_Kepler_data
|
a51ba4b6ab325484b47b2e594539f537cacdbb62
|
[
"MIT"
] | null | null | null |
code/section_gp_fit.py
|
mrtommyb/GP_model_Kepler_data
|
a51ba4b6ab325484b47b2e594539f537cacdbb62
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import os
import sys
import numpy as np
import matplotlib.pyplot as plt
import george
from george.kernels import ExpSquaredKernel, RBFKernel
from ktransit import LCModel, FitTransit
from scipy import optimize
def ret_opt(params,time,flux,yerr):
period, T0, rprs, impact, noiseA, noiseW = params
M = LCModel()
M.add_star(rho=0.0073,ld1=0.5,ld2=0.4)
M.add_planet(T0=T0,period=period,
impact=impact,rprs=rprs)
M.add_data(time=time)
resid = flux - M.transitmodel
kernel = noiseA * ExpSquaredKernel(noiseW)
gp = george.GaussianProcess(kernel)
lnlike = 0.
for i in np.arange(len(time) // 1000)[0:10]:
section = np.arange(i*1000,i*1000 + 1000)
gp.compute(time[section], yerr[section])
lnlike += gp.lnlikelihood(resid[section])
return -lnlike
def ret_product(params,time,flux,yerr):
(period, T0, rprs, impact, noiseA1, noiseW1,
noiseA2, noiseW2, noiseM2) = params
M = LCModel()
M.add_star(rho=0.0073,ld1=0.5,ld2=0.4)
M.add_planet(T0=T0,period=period,
impact=impact,rprs=rprs)
M.add_data(time=time)
resid = flux - M.transitmodel
kernel = (((noiseA1 * ExpSquaredKernel(noiseW1)) *
(noiseA2 * ExpSquaredKernel(noiseW2))) + noiseM2)
gp = george.GaussianProcess(kernel)
lnlike = 0.
for i in np.arange(len(time) // 1000)[0:10]:
section = np.arange(i*1000,i*1000 + 1000)
gp.compute(time[section], yerr[section])
lnlike += gp.lnlikelihood(resid[section])
return -lnlike
def ret_sum(params,time,flux,yerr):
(period, T0, rprs, impact,
alb,occ,ell,ecosw, esinw,
noiseA1, noiseW1,
noiseA2, noiseW2 )= params
M = LCModel()
M.add_star(rho=0.0073,ld1=0.5,ld2=0.4)
M.add_planet(T0=T0,period=period,
impact=impact,rprs=rprs,
alb=alb,occ=occ,ell=ell,
ecosw=ecosw, esinw=esinw)
M.add_data(time=time)
resid = flux - M.transitmodel
kernel = ((noiseA1**2 * RBFKernel(noiseW1)) +
(noiseA2**2 * RBFKernel(noiseW2)))
gp = george.GaussianProcess(kernel)
lnlike = 0.
for i in np.arange(len(time) // 1000)[0:10]:
section = np.arange(i*1000,i*1000 + 1000)
gp.compute(time[section], yerr[section])
lnlike += gp.lnlikelihood(resid[section])
return -lnlike
def ret_sum_ln(params,time,flux,yerr):
(period, T0, rprs, impact,
alb,occ,ell,ecosw, esinw,
lnnoiseA1, lnnoiseW1,
lnnoiseA2, lnnoiseW2 )= params
M = LCModel()
M.add_star(rho=0.0073,ld1=0.5,ld2=0.4)
M.add_planet(T0=T0,period=period,
impact=impact,rprs=rprs,
alb=alb,occ=occ,ell=ell,
ecosw=ecosw, esinw=esinw)
M.add_data(time=time)
resid = flux - M.transitmodel
kernel = ((np.exp(lnnoiseA1)**2 * RBFKernel(np.exp(lnnoiseW1))) +
(np.exp(lnnoiseA2)**2 * RBFKernel(np.exp(lnnoiseW2))))
gp = george.GaussianProcess(kernel)
lnlike = 0.
for i in np.arange(len(time) // 1000)[0:10]:
section = np.arange(i*1000,i*1000 + 1000)
gp.compute(time[section], yerr[section])
lnlike += gp.lnlikelihood(resid[section])
return -lnlike
def ret_simple_ln(params,time,flux,yerr):
(period, T0, rprs, impact,
alb,occ,ell,ecosw, esinw,
lnnoiseA1, lnnoiseW1)= params
M = LCModel()
M.add_star(rho=0.0073,ld1=0.5,ld2=0.4)
M.add_planet(T0=T0,period=period,
impact=impact,rprs=rprs,
alb=alb,occ=occ,ell=ell,
ecosw=ecosw, esinw=esinw)
M.add_data(time=time)
resid = flux - M.transitmodel
kernel = (np.exp(lnnoiseA1)**2 * RBFKernel(np.exp(lnnoiseW1)))
gp = george.GaussianProcess(kernel)
lnlike = 0.
for i in np.arange(len(time) // 1000):
section = np.arange(i*1000,i*1000 + 1000)
gp.compute(time[section], yerr[section])
lnlike += gp.lnlikelihood(resid[section])
return -lnlike
def ret_simple(params,time,flux,yerr):
(period, T0, rprs, impact,
alb,occ,ell,ecosw, esinw,
noiseA1, noiseW1)= params
M = LCModel()
M.add_star(rho=0.0073,ld1=0.5,ld2=0.4)
M.add_planet(T0=T0,period=period,
impact=impact,rprs=rprs,
alb=alb,occ=occ,ell=ell,
ecosw=ecosw, esinw=esinw)
M.add_data(time=time)
resid = flux - M.transitmodel
kernel = (noiseA1**2 * RBFKernel(noiseW1))
gp = george.GaussianProcess(kernel)
lnlike = 0.
for i in np.arange(len(time) // 300):
section = np.arange(i*300,i*300 + 300)
gp.compute(time[section], yerr[section])
lnlike += gp.lnlikelihood(resid[section])
return -lnlike
def ret_simplest(params,time,flux,yerr,fixed):
(period, T0, rprs, impact,
alb,occ,ell,ecosw, esinw) = fixed
(noiseA1, noiseW1)= params
M = LCModel()
M.add_star(rho=0.0073,ld1=0.5,ld2=0.4)
M.add_planet(T0=T0,period=period,
impact=impact,rprs=rprs,
alb=alb,occ=occ,ell=ell,
ecosw=ecosw, esinw=esinw)
M.add_data(time=time)
resid = flux - M.transitmodel
kernel = (noiseA1**2 * RBFKernel(noiseW1))
gp = george.GaussianProcess(kernel)
lnlike = 0.
for i in np.arange(len(time) // 300):
section = np.arange(i*300,i*300 + 300)
gp.compute(time[section], yerr[section])
lnlike += gp.lnlikelihood(resid[section])
return -lnlike
if __name__ == '__main__':
data = np.genfromtxt('/Users/tom/Projects/koi2133/data/lc.dat').T
time = data[0] #test on shorter data set
flux = data[1]
ferr = (data[2] / 4.)
product = False
sumkernel = False
simple = False
even_simpler = True
if not product and not sumkernel and not simple and not even_simpler:
## vary the period, T0, rprs, b, noiseA, noiseW
bounds = ((None,None),(None,None),
(0.01,0.04),(0.00001,0.999),(None,None),(None,None))
guess = (6.24658,136.3966,0.02255,0.5,0.05,0.01)
lsqout = optimize.fmin_l_bfgs_b(ret_opt,guess,
args=(time,flux,ferr),approx_grad=True,bounds=bounds)
period, T0, rprs, impact, noiseA, noiseW = lsqout[0]
kernel = noiseA * ExpSquaredKernel(noiseW)
gp = george.GaussianProcess(kernel)
elif product:
## vary the period, T0, rprs, b, noiseA1, noiseW1,
## noiseA2, noiseW2, noiseM2
bounds = ((None,None),(None,None),
(0.01,0.04),(0.00001,0.999),(None,None),(None,None),
(None,None),(None,None),(None,None))
guess = (6.24658,136.3966,0.02255,0.5,0.01,6.,
0.01,0.12,0.0)
lsqout = optimize.fmin_l_bfgs_b(ret_product,guess,
args=(time,flux,ferr),approx_grad=True,bounds=bounds,m=100,factr=1.E6)
(period, T0, rprs, impact, noiseA1, noiseW1,
noiseA2, noiseW2, noiseM2) = lsqout[0]
kernel = (((noiseA1 * ExpSquaredKernel(noiseW1)) *
(noiseA2 * ExpSquaredKernel(noiseW2))) + noiseM2)
gp = george.GaussianProcess(kernel)
elif sumkernel:
## vary the period, T0, rprs, b,
## alb,occ,ell,ecosw, esinw
## noiseA1, noiseW1,
## noiseA2, noiseW2
bounds = ((None,None),(None,None),
(0.01,0.03),(0.00001,0.999),
(None,None),(None,None),
(None,None),(None,None),(None,None),
(None,None),(None,None),
(None,None),(None,None))
guess = (6.24658,136.3966,0.02255,0.8,
30., 30., 60., 0.0, 0.0,
np.log(5.E-4), np.log(0.07),
np.log(2.E-4), np.log(3.0))
lsqout = optimize.fmin_l_bfgs_b(ret_sum_ln,guess,
args=(time,flux,ferr),approx_grad=True,bounds=bounds,m=300,factr=1.E7)
(period, T0, rprs, impact,
alb,occ,ell,ecosw, esinw,
lnnoiseA1, lnnoiseW1,
lnnoiseA2, lnnoiseW2) = lsqout[0]
kernel = ((np.exp(lnnoiseA1)**2 * RBFKernel(np.exp(lnnoiseW1))) +
(np.exp(lnnoiseA2)**2 * RBFKernel(np.exp(lnnoiseW2))))
gp = george.GaussianProcess(kernel)
elif simple:
bounds = ((None,None),(None,None),
(0.01,0.03),(0.00001,0.999),
(None,None),(None,None),
(None,None),(None,None),(None,None),
(None,None),(None,None))
guess = (6.2465796,136.39661,0.02255,0.4,
30., 30., 60., 0.0, 0.0,
5.E-4, 0.07)
lsqout = optimize.fmin_l_bfgs_b(ret_simple,guess,
args=(time,flux,ferr),approx_grad=True,bounds=bounds,
m=300,factr=1.E7)
(period, T0, rprs, impact,
alb,occ,ell,ecosw, esinw,
noiseA1, noiseW1) = lsqout[0]
kernel = (noiseA1**2 * RBFKernel(noiseW1))
gp = george.GaussianProcess(kernel)
elif even_simpler:
bounds = (
(None,None),(None,None))
guess = (5.E-4, 0.07)
fixed = (6.2465796,136.39661,0.02255,0.8,
13., 30., 45., 0.0, 0.0)
(period, T0, rprs, impact,
alb,occ,ell,ecosw, esinw) = fixed
lsqout = optimize.fmin_l_bfgs_b(ret_simplest,guess,
args=(time,flux,ferr,fixed),approx_grad=True,bounds=bounds,
m=300,factr=1.E7)
(noiseA1, noiseW1) = lsqout[0]
kernel = (noiseA1**2 * RBFKernel(noiseW1))
gp = george.GaussianProcess(kernel)
#transit fit
M = LCModel()
M.add_star(rho=0.0073,ld1=0.5,ld2=0.4)
M.add_planet(T0=T0,period=period,
impact=impact,rprs=rprs,
alb=alb,occ=occ,ell=ell,
ecosw=ecosw, esinw=esinw)
M.add_data(time=time)
#sample = np.array([])
#for i in np.arange(len(time) // 1000):
# section = np.arange(i*1000,i*1000 + 1000)
# gp.compute(time[section], ferr[section])
# sample = np.r_[sample,gp.sample_conditional(
# flux[section] - M.transitmodel[section],time[section])]
#gp.compute(time, ferr)
#samples = gp.sample_conditional(flux, time, N=100)
print('here')
sample = np.array([])
for i in np.arange(len(time) // 300):
section = np.arange(i*300,i*300 + 300)
gp.compute(time[section], ferr[section])
sample = np.r_[sample,gp.predict(
flux[section] - M.transitmodel[section],time[section])[0]]
| 34.894915
| 82
| 0.59967
| 1,446
| 10,294
| 4.214385
| 0.116874
| 0.074828
| 0.094519
| 0.102396
| 0.869052
| 0.850837
| 0.834099
| 0.796357
| 0.763374
| 0.73597
| 0
| 0.081533
| 0.244609
| 10,294
| 294
| 83
| 35.013605
| 0.70216
| 0.059938
| 0
| 0.683128
| 0
| 0
| 0.005287
| 0.004043
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028807
| false
| 0
| 0.037037
| 0
| 0.09465
| 0.00823
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
de6824ba50c4c47941090f492c3461d542d0b2c8
| 49
|
py
|
Python
|
fig_html/utils/__init__.py
|
kazetof/fightml
|
5ce43735709170e50373f7b97676ac7e38ab73e5
|
[
"MIT"
] | 1
|
2019-10-05T14:28:15.000Z
|
2019-10-05T14:28:15.000Z
|
fig_html/utils/__init__.py
|
kazetof/fightml
|
5ce43735709170e50373f7b97676ac7e38ab73e5
|
[
"MIT"
] | null | null | null |
fig_html/utils/__init__.py
|
kazetof/fightml
|
5ce43735709170e50373f7b97676ac7e38ab73e5
|
[
"MIT"
] | null | null | null |
from .sample_fig_maker import SampleFiguresMaker
| 24.5
| 48
| 0.897959
| 6
| 49
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ded15aabbe4d56dca3ee52daf7f986f91583228a
| 32
|
py
|
Python
|
service_locator/decorators.py
|
alexescalonafernandez/service-locator
|
9b3887a08c55d27943e3fa9961e0377ce10295ab
|
[
"MIT"
] | 1
|
2018-02-28T13:47:27.000Z
|
2018-02-28T13:47:27.000Z
|
service_locator/decorators.py
|
alexescalonafernandez/service-locator
|
9b3887a08c55d27943e3fa9961e0377ce10295ab
|
[
"MIT"
] | null | null | null |
service_locator/decorators.py
|
alexescalonafernandez/service-locator
|
9b3887a08c55d27943e3fa9961e0377ce10295ab
|
[
"MIT"
] | 1
|
2018-02-28T13:47:29.000Z
|
2018-02-28T13:47:29.000Z
|
from .ioc import ServiceProvider
| 32
| 32
| 0.875
| 4
| 32
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09375
| 32
| 1
| 32
| 32
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
72352468b5bc72beab91c608b4016fddac3608bd
| 4,278
|
py
|
Python
|
tests/0800_builder/05_scan.py
|
sveetch/Optimus
|
983aebeccd2ada7a5a0ab96f9296d4bba1112022
|
[
"MIT"
] | 2
|
2019-05-31T00:23:15.000Z
|
2021-04-26T07:26:16.000Z
|
tests/0800_builder/05_scan.py
|
sveetch/Optimus
|
983aebeccd2ada7a5a0ab96f9296d4bba1112022
|
[
"MIT"
] | 27
|
2015-04-21T14:43:26.000Z
|
2022-01-29T00:42:53.000Z
|
tests/0800_builder/05_scan.py
|
sveetch/Optimus
|
983aebeccd2ada7a5a0ab96f9296d4bba1112022
|
[
"MIT"
] | 1
|
2017-05-21T17:32:28.000Z
|
2017-05-21T17:32:28.000Z
|
import os
import importlib
import shutil
import pytest
from optimus.setup_project import setup_project
from optimus.conf.loader import import_pages_module
from optimus.pages.builder import PageBuilder
from optimus.assets.registry import register_assets
@pytest.mark.parametrize(
"sample_fixture_name,attempted_templates",
[
(
"basic_template",
[
"index.html",
"skeleton.html",
],
),
(
"basic2_template",
[
"_metas.html",
"index.html",
"skeleton.html",
"sub/bar.html",
"sub/base.html",
"sub/foo.html",
],
),
],
)
def test_scan_item(
minimal_basic_settings,
fixtures_settings,
reset_syspath,
temp_builds_dir,
sample_fixture_name,
attempted_templates,
):
"""
Scan page templates for each page
This will only works for sample fixtures that use the same as
'basic_template'.
"""
basepath = temp_builds_dir.join("builder_scan_item_{}".format(sample_fixture_name))
project_name = sample_fixture_name
projectdir = os.path.join(basepath.strpath, project_name)
# Copy sample from fixtures dir
templatedir = os.path.join(fixtures_settings.fixtures_path, sample_fixture_name)
shutil.copytree(templatedir, projectdir)
# Setup project
setup_project(projectdir, "dummy_value")
# Get basic sample settings
settings = minimal_basic_settings(projectdir)
# Init webassets and builder
assets_env = register_assets(settings)
builder = PageBuilder(settings, assets_env=assets_env)
pages_map = import_pages_module(settings.PAGES_MAP, basedir=projectdir)
# NOTE: We need to force reloading importation else the previous import settings
# with different values, is still re-used
pages_map = importlib.reload(pages_map)
# Collect finded templates for each defined page view
knowed = set([])
for pageview in pages_map.PAGES:
found = builder.scan_item(pageview)
knowed.update(found)
# We dont really care about order, so aply default sorting
assert sorted(list(knowed)) == attempted_templates
# Cleanup sys.path for next tests
reset_syspath(projectdir)
@pytest.mark.parametrize(
"sample_fixture_name,attempted_templates",
[
(
"basic_template",
[
"index.html",
"skeleton.html",
],
),
(
"basic2_template",
[
"_metas.html",
"index.html",
"skeleton.html",
"sub/bar.html",
"sub/base.html",
"sub/foo.html",
],
),
],
)
def test_scan_bulk(
minimal_basic_settings,
fixtures_settings,
reset_syspath,
temp_builds_dir,
sample_fixture_name,
attempted_templates,
):
"""
Scan page templates all pages
This will only works for sample fixtures that use the same as
'basic_template'.
"""
basepath = temp_builds_dir.join("builder_scan_bulk_{}".format(sample_fixture_name))
project_name = sample_fixture_name
projectdir = os.path.join(basepath.strpath, project_name)
# Copy sample from fixtures dir
templatedir = os.path.join(fixtures_settings.fixtures_path, sample_fixture_name)
shutil.copytree(templatedir, projectdir)
# Setup project
setup_project(projectdir, "settings")
# Get basic sample settings
settings = minimal_basic_settings(projectdir)
# Init webassets and builder
assets_env = register_assets(settings)
builder = PageBuilder(settings, assets_env=assets_env)
pages_map = import_pages_module(settings.PAGES_MAP, basedir=projectdir)
# NOTE: We need to force reloading importation else the previous import settings
# with different values, is still re-used
pages_map = importlib.reload(pages_map)
# Collect finded templates for each defined page view
knowed = builder.scan_bulk(pages_map.PAGES)
assert sorted(list(knowed)) == attempted_templates
# Cleanup sys.path for next tests
reset_syspath(projectdir)
| 28.331126
| 87
| 0.649836
| 477
| 4,278
| 5.60587
| 0.257862
| 0.048616
| 0.063575
| 0.038893
| 0.84368
| 0.84368
| 0.84368
| 0.84368
| 0.84368
| 0.84368
| 0
| 0.00064
| 0.269285
| 4,278
| 150
| 88
| 28.52
| 0.854766
| 0.20921
| 0
| 0.693069
| 0
| 0
| 0.115361
| 0.023494
| 0
| 0
| 0
| 0
| 0.019802
| 1
| 0.019802
| false
| 0
| 0.118812
| 0
| 0.138614
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a0d9f8e377ebac212774be05590905f3cd1629ab
| 268
|
py
|
Python
|
oxpe2elib/channel/msg/protocol/__init__.py
|
cketley/oxpe2e
|
88bc33761e83ea0df7dc9cec435260058b367f8e
|
[
"Apache-2.0"
] | null | null | null |
oxpe2elib/channel/msg/protocol/__init__.py
|
cketley/oxpe2e
|
88bc33761e83ea0df7dc9cec435260058b367f8e
|
[
"Apache-2.0"
] | null | null | null |
oxpe2elib/channel/msg/protocol/__init__.py
|
cketley/oxpe2e
|
88bc33761e83ea0df7dc9cec435260058b367f8e
|
[
"Apache-2.0"
] | null | null | null |
#from oxpe2elib.channel import identity
#from oxpe2elib.listener import listener
#from oxpe2elib.channel import pingService
from oxpe2elib.channel.msg.protocol import protocol
#from oxpe2elib.received import receivedMessage
from oxpe2elib.channel.msg import message
| 33.5
| 51
| 0.854478
| 33
| 268
| 6.939394
| 0.363636
| 0.340611
| 0.349345
| 0.227074
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024793
| 0.097015
| 268
| 7
| 52
| 38.285714
| 0.921488
| 0.61194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
9d30c523adc11be39c4604e55d468d94df30842b
| 34,379
|
py
|
Python
|
etl/parsers/etw/Microsoft_Windows_VolumeSnapshot_Driver.py
|
IMULMUL/etl-parser
|
76b7c046866ce0469cd129ee3f7bb3799b34e271
|
[
"Apache-2.0"
] | 104
|
2020-03-04T14:31:31.000Z
|
2022-03-28T02:59:36.000Z
|
etl/parsers/etw/Microsoft_Windows_VolumeSnapshot_Driver.py
|
IMULMUL/etl-parser
|
76b7c046866ce0469cd129ee3f7bb3799b34e271
|
[
"Apache-2.0"
] | 7
|
2020-04-20T09:18:39.000Z
|
2022-03-19T17:06:19.000Z
|
etl/parsers/etw/Microsoft_Windows_VolumeSnapshot_Driver.py
|
IMULMUL/etl-parser
|
76b7c046866ce0469cd129ee3f7bb3799b34e271
|
[
"Apache-2.0"
] | 16
|
2020-03-05T18:55:59.000Z
|
2022-03-01T10:19:28.000Z
|
# -*- coding: utf-8 -*-
"""
Microsoft-Windows-VolumeSnapshot-Driver
GUID : 67fe2216-727a-40cb-94b2-c02211edb34a
"""
from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct
from etl.utils import WString, CString, SystemTime, Guid
from etl.dtyp import Sid
from etl.parsers.etw.core import Etw, declare, guid
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=0, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_0_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"SnapshotGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=2, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_2_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=3, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_3_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=4, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_4_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"SnapshotGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=5, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_5_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"SnapshotGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=6, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_6_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=7, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_7_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=8, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_8_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=9, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_9_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=10, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_10_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"SnapshotGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=11, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_11_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"SnapshotGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=12, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_12_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"SnapshotGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=13, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_13_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"SnapshotGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=14, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_14_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=15, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_15_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=16, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_16_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=17, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_17_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=18, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_18_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=19, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_19_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=20, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_20_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=21, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_21_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=22, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_22_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=23, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_23_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=24, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_24_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=25, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_25_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=26, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_26_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=27, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_27_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=28, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_28_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=29, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_29_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=30, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_30_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=31, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_31_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=32, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_32_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=33, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_33_0(Etw):
pattern = Struct(
"RealThreadID" / Int32ul,
"VolumeGuid" / Guid
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=100, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_100_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=101, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_101_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=102, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_102_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"Error" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=103, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_103_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=104, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_104_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"SnapshotCount" / Int32ul,
"CountDeleted" / Int32ul,
"CountVisible" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=105, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_105_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"Error" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=106, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_106_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"Deleted" / Int8ul,
"Visible" / Int8ul,
"CommitTime" / SystemTime,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=107, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_107_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=110, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_110_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=111, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_111_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"DiffAreaCount" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=112, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_112_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"Error" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=113, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_113_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=114, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_114_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=115, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_115_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=116, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_116_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=117, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_117_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=118, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_118_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"Error" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=119, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_119_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"PersistentDeleteReason" / Int16ul,
"PersistentDeleteStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=120, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_120_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"TimeoutInSeconds" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=121, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_121_0(Etw):
pattern = Struct(
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"DiffVolumeNameLength" / Int16ul,
"DiffVolumeName" / Bytes(lambda this: this.DiffVolumeNameLength),
"OriginalErrorLogCode" / Int32ul,
"OriginalErrorStatus" / Int32ul,
"OriginalSourceFile" / Int32ul,
"OriginalSourceLine" / Int16ul,
"OriginalSourceTag" / Int32ul,
"ErrorStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=122, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_122_0(Etw):
pattern = Struct(
"TargetVolumeGuid" / Guid,
"Error" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1000, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1000_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1001, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1001_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"ExitStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1002, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1002_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1003, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1003_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"ExitStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1004, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1004_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1005, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1005_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"ExitStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1006, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1006_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1007, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1007_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"ExitStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1008, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1008_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1009, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1009_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"ExitStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1010, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1010_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1011, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1011_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"ExitStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1012, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1012_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1013, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1013_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"ExitStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1014, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1014_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1015, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1015_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"ExitStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1016, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1016_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1017, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1017_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"ExitStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1018, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1018_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1019, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1019_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"ExitStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1020, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1020_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1021, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1021_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"ExitStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1022, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1022_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1023, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1023_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"ExitStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1024, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1024_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1025, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1025_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"ExitStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1026, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1026_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1027, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1027_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"ExitStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1028, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1028_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1029, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1029_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"ExitStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1030, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1030_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1031, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1031_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"ExitStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1032, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1032_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
@declare(guid=guid("67fe2216-727a-40cb-94b2-c02211edb34a"), event_id=1033, version=0)
class Microsoft_Windows_VolumeSnapshot_Driver_1033_0(Etw):
pattern = Struct(
"DiagPrefixLength" / Int16ul,
"DiagPrefix" / Bytes(lambda this: this.DiagPrefixLength),
"VolumeNameLength" / Int16ul,
"VolumeName" / Bytes(lambda this: this.VolumeNameLength),
"TargetVolumeGuid" / Guid,
"SnapshotGuid" / Guid,
"ExitStatus" / Int32ul,
"SourceFile" / Int32ul,
"SourceLine" / Int16ul,
"SourceTag" / Int32ul
)
| 33.120424
| 123
| 0.66657
| 3,363
| 34,379
| 6.655367
| 0.049063
| 0.064337
| 0.120633
| 0.144759
| 0.947592
| 0.947592
| 0.939371
| 0.744527
| 0.738361
| 0.738361
| 0
| 0.118496
| 0.212281
| 34,379
| 1,037
| 124
| 33.152363
| 0.707987
| 0.003083
| 0
| 0.661196
| 0
| 0
| 0.263746
| 0.094793
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004689
| 0
| 0.213365
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9d4e30b0e49a44ed9744c2dd45cb6de17762e5ad
| 99
|
py
|
Python
|
python/kaitai/compress/lz4.py
|
kaitaiStructCompile/kaitai_compress
|
2258028b30a422a5d37ba4fdb50da742dd895729
|
[
"MIT"
] | 7
|
2018-11-12T08:37:11.000Z
|
2022-02-27T05:12:55.000Z
|
python/kaitai/compress/lz4.py
|
kaitaiStructCompile/kaitai_compress
|
2258028b30a422a5d37ba4fdb50da742dd895729
|
[
"MIT"
] | 9
|
2019-02-02T09:55:12.000Z
|
2021-10-09T12:17:32.000Z
|
python/kaitai/compress/lz4.py
|
kaitaiStructCompile/kaitai_compress
|
2258028b30a422a5d37ba4fdb50da742dd895729
|
[
"MIT"
] | 3
|
2018-07-15T19:43:27.000Z
|
2021-02-08T01:13:49.000Z
|
import lz4.frame
class Lz4:
def decode(self, data):
return lz4.frame.decompress(data)
| 16.5
| 41
| 0.676768
| 14
| 99
| 4.785714
| 0.714286
| 0.238806
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038961
| 0.222222
| 99
| 5
| 42
| 19.8
| 0.831169
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
9d5e614000238960c9901e6f86b9ae1c3eee6080
| 4,532
|
py
|
Python
|
pirates/leveleditor/worldData/pvp_deathmatchArea1_jungle_c.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 81
|
2018-04-08T18:14:24.000Z
|
2022-01-11T07:22:15.000Z
|
pirates/leveleditor/worldData/pvp_deathmatchArea1_jungle_c.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 4
|
2018-09-13T20:41:22.000Z
|
2022-01-08T06:57:00.000Z
|
pirates/leveleditor/worldData/pvp_deathmatchArea1_jungle_c.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 26
|
2018-05-26T12:49:27.000Z
|
2021-09-11T09:11:59.000Z
|
from pandac.PandaModules import Point3, VBase3
objectStruct = {'Objects': {'1170793088.0jubutler': {'Type': 'Island Game Area','Name': 'pvp_deathmatchArea1_jungle_c','File': '','AdditionalData': ['JungleAreaC'],'Instanced': True,'Objects': {'1170793216.0jubutler': {'Type': 'Locator Node','Name': 'portal_interior_1','Hpr': VBase3(0.0, 0.0, 0.0),'Pos': Point3(-648.274, -263.406, 69.975),'Scale': VBase3(1.0, 1.0, 1.0)},'1170793216.0jubutler0': {'Type': 'Locator Node','Name': 'portal_interior_2','Hpr': VBase3(107.903, 0.0, 0.0),'Pos': Point3(304.679, -408.087, 115.305),'Scale': VBase3(1.0, 1.0, 1.0)},'1170793216.0jubutler1': {'Type': 'Player Spawn Node','Hpr': Point3(0.0, 0.0, 0.0),'Index': -1,'Pos': Point3(263.485, -376.47, 114.252),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Team 1','Visual': {'Color': (0.5, 0.5, 0.5, 1),'Model': 'models/misc/smiley'}},'1170913647.47darren': {'Type': 'Player Spawn Node','Hpr': Point3(0.0, 0.0, 0.0),'Index': -1,'Pos': Point3(364.516, -327.959, 108.82),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Team 1','Visual': {'Color': (0.5, 0.5, 0.5, 1),'Model': 'models/misc/smiley'}},'1170913737.7darren': {'Type': 'Player Spawn Node','Hpr': Point3(0.0, 0.0, 0.0),'Index': -1,'Pos': Point3(239.666, -360.489, 115.196),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Team 1','Visual': {'Color': (0.5, 0.5, 0.5, 1),'Model': 'models/misc/smiley'}},'1170913749.09darren': {'Type': 'Player Spawn Node','Hpr': Point3(0.0, 0.0, 0.0),'Index': -1,'Pos': Point3(349.206, -345.09, 110.111),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Team 1','Visual': {'Color': (0.5, 0.5, 0.5, 1),'Model': 'models/misc/smiley'}},'1170913790.58darren': {'Type': 'Player Spawn Node','Hpr': Point3(0.0, 0.0, 0.0),'Index': -1,'Pos': Point3(-431.188, -334.827, 93.163),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Team 2','Visual': {'Color': (0.5, 0.5, 0.5, 1),'Model': 'models/misc/smiley'}},'1170913808.86darren': {'Type': 'Player Spawn Node','Hpr': Point3(0.0, 0.0, 0.0),'Index': -1,'Pos': Point3(-512.732, -294.315, 85.294),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Team 2','Visual': {'Color': (0.5, 0.5, 0.5, 1),'Model': 'models/misc/smiley'}},'1170913819.52darren': {'Type': 'Player Spawn Node','Hpr': Point3(0.0, 0.0, 0.0),'Index': -1,'Pos': Point3(-488.88, -330.341, 86.348),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Team 2','Visual': {'Color': (0.5, 0.5, 0.5, 1),'Model': 'models/misc/smiley'}},'1170913839.28darren': {'Type': 'Player Spawn Node','Hpr': Point3(0.0, 0.0, 0.0),'Index': -1,'Pos': Point3(-374.602, -316.168, 101.045),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Team 2','Visual': {'Color': (0.5, 0.5, 0.5, 1),'Model': 'models/misc/smiley'}},'1172637470.97HP_Administrator': {'Type': 'Jungle_Props_large','DisableCollision': False,'Hpr': VBase3(-154.587, 0.0, 0.0),'Pos': Point3(304.863, -400.341, 115.009),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Model': 'models/props/cliff_jungle_high'}},'1172637597.41HP_Administrator': {'Type': 'Jungle_Props_large','DisableCollision': False,'Hpr': VBase3(105.176, 0.0, 0.0),'Pos': Point3(-640.059, -254.172, 71.102),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Model': 'models/props/cliff_jungle_low'}}},'Visual': {'Model': 'models/jungles/jungle_c_zero'}}},'Node Links': [],'Layers': {},'ObjectIds': {'1170793088.0jubutler': '["Objects"]["1170793088.0jubutler"]','1170793216.0jubutler': '["Objects"]["1170793088.0jubutler"]["Objects"]["1170793216.0jubutler"]','1170793216.0jubutler0': '["Objects"]["1170793088.0jubutler"]["Objects"]["1170793216.0jubutler0"]','1170793216.0jubutler1': '["Objects"]["1170793088.0jubutler"]["Objects"]["1170793216.0jubutler1"]','1170913647.47darren': '["Objects"]["1170793088.0jubutler"]["Objects"]["1170913647.47darren"]','1170913737.7darren': '["Objects"]["1170793088.0jubutler"]["Objects"]["1170913737.7darren"]','1170913749.09darren': '["Objects"]["1170793088.0jubutler"]["Objects"]["1170913749.09darren"]','1170913790.58darren': '["Objects"]["1170793088.0jubutler"]["Objects"]["1170913790.58darren"]','1170913808.86darren': '["Objects"]["1170793088.0jubutler"]["Objects"]["1170913808.86darren"]','1170913819.52darren': '["Objects"]["1170793088.0jubutler"]["Objects"]["1170913819.52darren"]','1170913839.28darren': '["Objects"]["1170793088.0jubutler"]["Objects"]["1170913839.28darren"]','1172637470.97HP_Administrator': '["Objects"]["1170793088.0jubutler"]["Objects"]["1172637470.97HP_Administrator"]','1172637597.41HP_Administrator': '["Objects"]["1170793088.0jubutler"]["Objects"]["1172637597.41HP_Administrator"]'}}
| 2,266
| 4,485
| 0.656884
| 671
| 4,532
| 4.402385
| 0.226528
| 0.036561
| 0.042654
| 0.040623
| 0.537238
| 0.462424
| 0.430603
| 0.41977
| 0.41977
| 0.358835
| 0
| 0.250117
| 0.058694
| 4,532
| 2
| 4,485
| 2,266
| 0.442335
| 0
| 0
| 0
| 0
| 0
| 0.566953
| 0.265167
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
19caff61941bed860cc978d63808d94995b2e698
| 62
|
py
|
Python
|
Python/hello_world_ronlek.py
|
kennethsequeira/Hello-world
|
464227bc7d9778a4a2a4044fe415a629003ea77f
|
[
"MIT"
] | 1,428
|
2018-10-03T15:15:17.000Z
|
2019-03-31T18:38:36.000Z
|
Python/hello_world_ronlek.py
|
kennethsequeira/Hello-world
|
464227bc7d9778a4a2a4044fe415a629003ea77f
|
[
"MIT"
] | 1,162
|
2018-10-03T15:05:49.000Z
|
2018-10-18T14:17:52.000Z
|
Python/hello_world_ronlek.py
|
kennethsequeira/Hello-world
|
464227bc7d9778a4a2a4044fe415a629003ea77f
|
[
"MIT"
] | 3,909
|
2018-10-03T15:07:19.000Z
|
2019-03-31T18:39:08.000Z
|
#Script to print Hello World in Python
print("Hello World!!")
| 20.666667
| 38
| 0.741935
| 10
| 62
| 4.6
| 0.7
| 0.434783
| 0.652174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145161
| 62
| 2
| 39
| 31
| 0.867925
| 0.596774
| 0
| 0
| 0
| 0
| 0.541667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
19f1fdfe737e72562e3bd2b7b01dcd4cb947eb36
| 26
|
py
|
Python
|
multi_tools/math/groups.py
|
Jerem2360/multitools
|
cd2c5aee72e5c2c8b60bbedd458303051b104c29
|
[
"Unlicense"
] | null | null | null |
multi_tools/math/groups.py
|
Jerem2360/multitools
|
cd2c5aee72e5c2c8b60bbedd458303051b104c29
|
[
"Unlicense"
] | null | null | null |
multi_tools/math/groups.py
|
Jerem2360/multitools
|
cd2c5aee72e5c2c8b60bbedd458303051b104c29
|
[
"Unlicense"
] | null | null | null |
from typing import Union
| 8.666667
| 24
| 0.807692
| 4
| 26
| 5.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192308
| 26
| 2
| 25
| 13
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c233cfec95d0dda54dfbd2e77140845247114dc8
| 45
|
py
|
Python
|
cqed_lib/cqed_tools/x_analysis/__init__.py
|
paulsbrookes/bistability_tools
|
6d7f20a20d9371a56e83a7ee48237fb70b831a85
|
[
"Apache-2.0"
] | null | null | null |
cqed_lib/cqed_tools/x_analysis/__init__.py
|
paulsbrookes/bistability_tools
|
6d7f20a20d9371a56e83a7ee48237fb70b831a85
|
[
"Apache-2.0"
] | null | null | null |
cqed_lib/cqed_tools/x_analysis/__init__.py
|
paulsbrookes/bistability_tools
|
6d7f20a20d9371a56e83a7ee48237fb70b831a85
|
[
"Apache-2.0"
] | null | null | null |
from .loading import *
from .fitting import *
| 22.5
| 22
| 0.755556
| 6
| 45
| 5.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155556
| 45
| 2
| 23
| 22.5
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c23a7c32b337cbee7e3a5045126ce9cee442ce52
| 27
|
py
|
Python
|
src/euler_python_package/euler_python/medium/p306.py
|
wilsonify/euler
|
5214b776175e6d76a7c6d8915d0e062d189d9b79
|
[
"MIT"
] | null | null | null |
src/euler_python_package/euler_python/medium/p306.py
|
wilsonify/euler
|
5214b776175e6d76a7c6d8915d0e062d189d9b79
|
[
"MIT"
] | null | null | null |
src/euler_python_package/euler_python/medium/p306.py
|
wilsonify/euler
|
5214b776175e6d76a7c6d8915d0e062d189d9b79
|
[
"MIT"
] | null | null | null |
def problem306():
pass
| 9
| 17
| 0.62963
| 3
| 27
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 0.259259
| 27
| 2
| 18
| 13.5
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
dff58c0ff92ae4b988bf075eee76af2e4793e944
| 90
|
py
|
Python
|
selection/models/__init__.py
|
icc2115/dl-selection
|
e39ef0e73bf631e413bac48db791aed617dd7e32
|
[
"MIT"
] | 8
|
2021-03-08T08:46:23.000Z
|
2022-02-28T12:19:50.000Z
|
selection/models/__init__.py
|
icc2115/dl-selection
|
e39ef0e73bf631e413bac48db791aed617dd7e32
|
[
"MIT"
] | null | null | null |
selection/models/__init__.py
|
icc2115/dl-selection
|
e39ef0e73bf631e413bac48db791aed617dd7e32
|
[
"MIT"
] | 2
|
2021-04-18T08:24:16.000Z
|
2022-02-28T12:19:54.000Z
|
from .mlp import MLP
from .mlp import SelectorMLP
from . import train
from . import utils
| 18
| 28
| 0.777778
| 14
| 90
| 5
| 0.428571
| 0.2
| 0.371429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177778
| 90
| 4
| 29
| 22.5
| 0.945946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a034738ad51dc6d50aeef69ced8a4c5dcb0844fe
| 1,897
|
py
|
Python
|
tests/test_e2e_01_0Lines.py
|
blue-monk/csv-diff-python2
|
8fbe9d149231b7d321d867497200e7c0c0118e57
|
[
"MIT"
] | null | null | null |
tests/test_e2e_01_0Lines.py
|
blue-monk/csv-diff-python2
|
8fbe9d149231b7d321d867497200e7c0c0118e57
|
[
"MIT"
] | null | null | null |
tests/test_e2e_01_0Lines.py
|
blue-monk/csv-diff-python2
|
8fbe9d149231b7d321d867497200e7c0c0118e57
|
[
"MIT"
] | null | null | null |
import sys
import textwrap
import pytest
from src.csvdiff2 import csvdiff
@pytest.mark.filterwarnings("ignore:Sniffing failed")
def test_no_lines_on_both_sides_as_no_header(lhs, rhs, capfd, args):
lhs.write(textwrap.dedent('''
''').strip())
rhs.write(textwrap.dedent('''
''').strip())
sys.argv = ['csvdiff.py', lhs.strpath, rhs.strpath, '-dc']
csvdiff.main()
out, err = capfd.readouterr()
assert err == ''
assert out == textwrap.dedent('''
============ Report ============
* Differences
-------------------------------------------
left.csv right.csv Column indices with difference
-------------------------------------------
* Count & Row number
same lines : 0
left side only (<): 0 :-- Row Numbers -->: []
right side only (>): 0 :-- Row Numbers -->: []
with differences (!): 0 :-- Row Number Pairs -->: []
''')
def test_no_lines_on_both_sides_with_header(lhs, rhs, capfd):
lhs.write(textwrap.dedent('''
head1, head2, head3, head4, head5
''').strip())
rhs.write(textwrap.dedent('''
head1, head2, head3, head4, head5
''').strip())
sys.argv = ['csvdiff.py', lhs.strpath, rhs.strpath, '-dc']
csvdiff.main()
out, err = capfd.readouterr()
assert err == ''
assert out == textwrap.dedent('''
============ Report ============
* Differences
-------------------------------------------
left.csv right.csv Column indices with difference
-------------------------------------------
* Count & Row number
same lines : 0
left side only (<): 0 :-- Row Numbers -->: []
right side only (>): 0 :-- Row Numbers -->: []
with differences (!): 0 :-- Row Number Pairs -->: []
''')
| 27.897059
| 68
| 0.474433
| 183
| 1,897
| 4.836066
| 0.333333
| 0.094915
| 0.085876
| 0.054237
| 0.79887
| 0.764972
| 0.764972
| 0.708475
| 0.708475
| 0.60339
| 0
| 0.013828
| 0.275698
| 1,897
| 67
| 69
| 28.313433
| 0.630277
| 0
| 0
| 0.857143
| 0
| 0
| 0.594509
| 0.090813
| 0
| 0
| 0
| 0
| 0.081633
| 1
| 0.040816
| false
| 0
| 0.081633
| 0
| 0.122449
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a05ac77f03660eae1a994784ca02c62919be29a3
| 38
|
py
|
Python
|
geo/__init__.py
|
morysam96/geoserver-rest
|
ad66ce6877aad70f5cddb5baeccfff6d90732c84
|
[
"MIT"
] | 83
|
2020-09-14T13:01:29.000Z
|
2022-03-27T16:12:09.000Z
|
geo/__init__.py
|
morysam96/geoserver-rest
|
ad66ce6877aad70f5cddb5baeccfff6d90732c84
|
[
"MIT"
] | 46
|
2020-09-15T06:47:52.000Z
|
2022-03-03T06:16:28.000Z
|
geo/__init__.py
|
morysam96/geoserver-rest
|
ad66ce6877aad70f5cddb5baeccfff6d90732c84
|
[
"MIT"
] | 40
|
2020-09-14T12:32:55.000Z
|
2022-02-17T03:16:06.000Z
|
from . import Calculation_gdal, Style
| 19
| 37
| 0.815789
| 5
| 38
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 38
| 1
| 38
| 38
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a0759cee3612285bfa914107c209e821ebaa2145
| 23
|
py
|
Python
|
PyTBot/__init__.py
|
RRostami/PyTBot
|
a95bb9b3d3d19284b60c93b758a8aa26e3357929
|
[
"MIT"
] | null | null | null |
PyTBot/__init__.py
|
RRostami/PyTBot
|
a95bb9b3d3d19284b60c93b758a8aa26e3357929
|
[
"MIT"
] | null | null | null |
PyTBot/__init__.py
|
RRostami/PyTBot
|
a95bb9b3d3d19284b60c93b758a8aa26e3357929
|
[
"MIT"
] | null | null | null |
from .telegram import *
| 23
| 23
| 0.782609
| 3
| 23
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 23
| 1
| 23
| 23
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a0852e4d727130c9ac3e56a5fab7fdccdfb497b2
| 2,113
|
py
|
Python
|
tests/unit/test_handle_custom_resource_message.py
|
zaro0508/cfn-cr-same-region-bucket-download
|
9db66c7fd9738b8d29d2d758741541d9b069a338
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_handle_custom_resource_message.py
|
zaro0508/cfn-cr-same-region-bucket-download
|
9db66c7fd9738b8d29d2d758741541d9b069a338
|
[
"Apache-2.0"
] | 1
|
2021-02-18T20:14:52.000Z
|
2021-02-19T02:18:45.000Z
|
tests/unit/test_handle_custom_resource_message.py
|
zaro0508/cfn-cr-same-region-bucket-download
|
9db66c7fd9738b8d29d2d758741541d9b069a338
|
[
"Apache-2.0"
] | 2
|
2021-02-12T02:16:09.000Z
|
2021-02-19T18:26:22.000Z
|
from restrict_download_region import cfnresponse, restrict_region
from pytest_mock import MockerFixture
import pytest
def test_handle_custom_resource_status_message__successful_execution__not_custom_resource(mocker: MockerFixture):
mock_cfn_send = mocker.patch.object(cfnresponse, "send", autospec=True)
event = {}
context = {}
def stub_function():
pass
with restrict_region.handle_custom_resource_status_message(event, context):
stub_function()
assert not mock_cfn_send.called
def test_handle_custom_resource_status_message__failed_execution__not_custom_resource(mocker: MockerFixture):
mock_cfn_send = mocker.patch.object(cfnresponse, "send", autospec=True)
event = {""}
context = {}
def stub_function():
raise Exception("failed")
with pytest.raises(Exception):
with restrict_region.handle_custom_resource_status_message(event, context):
stub_function()
assert not mock_cfn_send.called
def test_handle_custom_resource_status_message__successful_execution__is_custom_resource(mocker: MockerFixture):
mock_cfn_send = mocker.patch.object(cfnresponse, "send", autospec=True)
event = {"RequestType": "Create"}
context = {}
def stub_function():
pass
with restrict_region.handle_custom_resource_status_message(event, context):
stub_function()
mock_cfn_send.assert_called_once_with(event, context, cfnresponse.SUCCESS, {"Data": ""})
def test_handle_custom_resource_status_message__failed_execution__is_custom_resource(mocker: MockerFixture):
mock_cfn_send = mocker.patch.object(cfnresponse, "send", autospec=True)
event = {"RequestType": "Create"}
context = {}
def stub_function():
raise ValueError("failed")
with pytest.raises(ValueError):
with restrict_region.handle_custom_resource_status_message(event, context) as custom_resource_request_type:
assert custom_resource_request_type == 'Create'
stub_function()
mock_cfn_send.assert_called_once_with(event, context, cfnresponse.FAILED, {"Data": ""})
| 32.507692
| 115
| 0.751065
| 246
| 2,113
| 6.012195
| 0.195122
| 0.132522
| 0.108181
| 0.140636
| 0.81812
| 0.811359
| 0.811359
| 0.811359
| 0.811359
| 0.735632
| 0
| 0
| 0.162328
| 2,113
| 64
| 116
| 33.015625
| 0.835593
| 0
| 0
| 0.595238
| 0
| 0
| 0.035968
| 0
| 0
| 0
| 0
| 0
| 0.119048
| 1
| 0.190476
| false
| 0.047619
| 0.071429
| 0
| 0.261905
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a092932113db1c085ee4e2be09050d83302bc79f
| 310
|
py
|
Python
|
Allinone py/Print-sumofall-odd-even-numbers.py
|
whoafridi/Python
|
4fea6f81ebfd94730b36b4d95669adcadacae5df
|
[
"MIT"
] | null | null | null |
Allinone py/Print-sumofall-odd-even-numbers.py
|
whoafridi/Python
|
4fea6f81ebfd94730b36b4d95669adcadacae5df
|
[
"MIT"
] | null | null | null |
Allinone py/Print-sumofall-odd-even-numbers.py
|
whoafridi/Python
|
4fea6f81ebfd94730b36b4d95669adcadacae5df
|
[
"MIT"
] | 1
|
2019-07-09T06:34:29.000Z
|
2019-07-09T06:34:29.000Z
|
## Print Sum of All Even Numbers
i = int(input("Enter : "))
sum = 0
for i in range(2 , i+1 , 2):
sum = sum + i
print("Result is {0}".format(sum))
## Print Sum of All Odd Numbers
i = int(input("Enter : "))
sum = 0
for i in range(1 , i+1 , 2):
sum = sum + i
print("Result is {0}".format(sum))
| 15.5
| 34
| 0.558065
| 58
| 310
| 2.982759
| 0.344828
| 0.092486
| 0.115607
| 0.150289
| 0.797688
| 0.797688
| 0.797688
| 0.797688
| 0.797688
| 0.797688
| 0
| 0.043668
| 0.26129
| 310
| 20
| 35
| 15.5
| 0.71179
| 0.187097
| 0
| 0.8
| 0
| 0
| 0.169355
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.2
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
cd007b1597b9190ac9ba0b429eb135861da2961f
| 146
|
py
|
Python
|
02-basic-python/exercise.py
|
nicholas-greger/2020-datascience-lectures
|
389d0a5b56f510677d3b09da400268a1991a5b0f
|
[
"MIT"
] | 26
|
2019-12-13T09:22:19.000Z
|
2021-03-06T05:21:41.000Z
|
02-basic-python/exercise.py
|
nicholas-greger/2020-datascience-lectures
|
389d0a5b56f510677d3b09da400268a1991a5b0f
|
[
"MIT"
] | null | null | null |
02-basic-python/exercise.py
|
nicholas-greger/2020-datascience-lectures
|
389d0a5b56f510677d3b09da400268a1991a5b0f
|
[
"MIT"
] | 43
|
2020-01-08T05:09:08.000Z
|
2021-12-19T11:23:40.000Z
|
def add_numbers(a, b):
return a + b
print(add_numbers(3, 7))
print(add_numbers(14.22, 19))
# printing a won't work because of scope
print(a)
| 18.25
| 40
| 0.691781
| 29
| 146
| 3.37931
| 0.655172
| 0.306122
| 0.306122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066116
| 0.171233
| 146
| 7
| 41
| 20.857143
| 0.743802
| 0.260274
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0.2
| 0.4
| 0.6
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
|
0
| 6
|
cd3c15210bb5c2050ec265824b4f32bb0c68759c
| 38
|
py
|
Python
|
casbin/rbac/__init__.py
|
goodrain/pycasbin
|
1a481ba1af7619e1cc7e83896581d14976927d80
|
[
"Apache-2.0"
] | 915
|
2018-11-25T01:00:39.000Z
|
2022-03-30T11:21:34.000Z
|
casbin/rbac/__init__.py
|
goodrain/pycasbin
|
1a481ba1af7619e1cc7e83896581d14976927d80
|
[
"Apache-2.0"
] | 231
|
2019-02-13T09:29:51.000Z
|
2022-03-28T16:32:51.000Z
|
casbin/rbac/__init__.py
|
goodrain/pycasbin
|
1a481ba1af7619e1cc7e83896581d14976927d80
|
[
"Apache-2.0"
] | 173
|
2019-02-08T02:22:33.000Z
|
2022-03-10T15:16:11.000Z
|
from .role_manager import RoleManager
| 19
| 37
| 0.868421
| 5
| 38
| 6.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 38
| 1
| 38
| 38
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
26a3a3284b689f89220b550ae11b5378a6a16e6d
| 89
|
py
|
Python
|
src/models/__init__.py
|
ryanwongsa/image-inpainting
|
d20419f3260760f1deb96d2b904dd4de92eeee36
|
[
"BSD-3-Clause"
] | null | null | null |
src/models/__init__.py
|
ryanwongsa/image-inpainting
|
d20419f3260760f1deb96d2b904dd4de92eeee36
|
[
"BSD-3-Clause"
] | null | null | null |
src/models/__init__.py
|
ryanwongsa/image-inpainting
|
d20419f3260760f1deb96d2b904dd4de92eeee36
|
[
"BSD-3-Clause"
] | null | null | null |
from models.pconv_unet import PConvUNet
from models.vgg16_extractor import VGG16Extractor
| 44.5
| 49
| 0.898876
| 12
| 89
| 6.5
| 0.75
| 0.25641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04878
| 0.078652
| 89
| 2
| 49
| 44.5
| 0.902439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f82160a5d9d35b8c0e86c1cb736ee70e38af2795
| 35
|
py
|
Python
|
src/interfacePy/WKB/__init__.py
|
dkaramit/MiMeS
|
a3c97a4877f181b54e880d7b144271c5659291b5
|
[
"MIT"
] | 2
|
2022-01-27T20:10:19.000Z
|
2022-01-29T04:26:16.000Z
|
src/interfacePy/WKB/__init__.py
|
dkaramit/MiMeS
|
a3c97a4877f181b54e880d7b144271c5659291b5
|
[
"MIT"
] | null | null | null |
src/interfacePy/WKB/__init__.py
|
dkaramit/MiMeS
|
a3c97a4877f181b54e880d7b144271c5659291b5
|
[
"MIT"
] | null | null | null |
from .WKB import relic, getPoints
| 11.666667
| 33
| 0.771429
| 5
| 35
| 5.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171429
| 35
| 2
| 34
| 17.5
| 0.931034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f85359614b95b7504db7009779a48b7ae165876d
| 249
|
py
|
Python
|
api/custom_errors.py
|
chenningg/sql-query-optimizer
|
5e418989b4788378b851193533f81b9b8e6e6375
|
[
"MIT"
] | 2
|
2020-11-30T15:37:01.000Z
|
2022-03-09T14:09:52.000Z
|
api/custom_errors.py
|
tanchuanxin/cz4031_project_2_query_optimizer
|
5e418989b4788378b851193533f81b9b8e6e6375
|
[
"MIT"
] | null | null | null |
api/custom_errors.py
|
tanchuanxin/cz4031_project_2_query_optimizer
|
5e418989b4788378b851193533f81b9b8e6e6375
|
[
"MIT"
] | 3
|
2020-12-14T08:27:10.000Z
|
2022-03-09T14:09:48.000Z
|
""" ####################################################################
Allows for passing of specific errors deep in the stack trace
#################################################################### """
class CustomError(Exception):
pass
| 35.571429
| 73
| 0.321285
| 15
| 249
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096386
| 249
| 7
| 74
| 35.571429
| 0.355556
| 0.803213
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
f8c25968b56bd7d27d12b945b440e74871cf4b4e
| 30
|
py
|
Python
|
BLAST/mySQL.py
|
c235gsy/Sustech_Computational-Biology
|
860e6432a4854f79980002c9028c09e18f034ef7
|
[
"MIT"
] | null | null | null |
BLAST/mySQL.py
|
c235gsy/Sustech_Computational-Biology
|
860e6432a4854f79980002c9028c09e18f034ef7
|
[
"MIT"
] | null | null | null |
BLAST/mySQL.py
|
c235gsy/Sustech_Computational-Biology
|
860e6432a4854f79980002c9028c09e18f034ef7
|
[
"MIT"
] | null | null | null |
import mysql.connector as mc
| 10
| 28
| 0.8
| 5
| 30
| 4.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 30
| 2
| 29
| 15
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3e8383520f9971a63a2f5b9be0245d344251ea00
| 112
|
py
|
Python
|
FreshEverDay/df_user/views.py
|
Signss/FreshEverDay
|
e55a546010c6479ff658158cb076b7386ac0bbc5
|
[
"MIT"
] | null | null | null |
FreshEverDay/df_user/views.py
|
Signss/FreshEverDay
|
e55a546010c6479ff658158cb076b7386ac0bbc5
|
[
"MIT"
] | null | null | null |
FreshEverDay/df_user/views.py
|
Signss/FreshEverDay
|
e55a546010c6479ff658158cb076b7386ac0bbc5
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
def register(request):
return render(request, 'df_user/register.html')
| 22.4
| 51
| 0.776786
| 15
| 112
| 5.733333
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 112
| 4
| 52
| 28
| 0.877551
| 0
| 0
| 0
| 0
| 0
| 0.1875
| 0.1875
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
e40e8224e259cd0cbcc5732862c62dd4f46cf949
| 166
|
py
|
Python
|
events/admin.py
|
MikaelSantilio/aprepi-django
|
5e2b5ecffb287eab929c0759ea35ab073cc19d96
|
[
"MIT"
] | null | null | null |
events/admin.py
|
MikaelSantilio/aprepi-django
|
5e2b5ecffb287eab929c0759ea35ab073cc19d96
|
[
"MIT"
] | 9
|
2021-01-13T22:06:29.000Z
|
2021-06-16T10:33:48.000Z
|
events/admin.py
|
MikaelSantilio/aprepi-django
|
5e2b5ecffb287eab929c0759ea35ab073cc19d96
|
[
"MIT"
] | 1
|
2021-02-05T18:12:15.000Z
|
2021-02-05T18:12:15.000Z
|
from django.contrib import admin
from events import models
admin.site.register(models.Event)
admin.site.register(models.Cost)
admin.site.register(models.Collection)
| 23.714286
| 38
| 0.831325
| 24
| 166
| 5.75
| 0.5
| 0.195652
| 0.369565
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072289
| 166
| 7
| 38
| 23.714286
| 0.896104
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
e43f46ff7203e0a19b3bc55778b7924559de5c84
| 162
|
py
|
Python
|
util/__init__.py
|
kkmumu/PKURunningHelper
|
de934a805dd64c56af4537c24315c813c8ca4e80
|
[
"MIT"
] | 3
|
2019-07-13T12:13:37.000Z
|
2020-04-24T17:03:25.000Z
|
util/__init__.py
|
cbwang2016/PKURunningHelper
|
9cf3db3e10a2fd19315ec05fa3da80ffb67c39e3
|
[
"MIT"
] | null | null | null |
util/__init__.py
|
cbwang2016/PKURunningHelper
|
9cf3db3e10a2fd19315ec05fa3da80ffb67c39e3
|
[
"MIT"
] | 4
|
2020-10-15T03:16:56.000Z
|
2021-11-03T07:23:17.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# filename: util/__init__.py
from .class_ import *
from .func import *
from .compat import *
from .error import *
| 18
| 28
| 0.67284
| 23
| 162
| 4.521739
| 0.73913
| 0.288462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014815
| 0.166667
| 162
| 8
| 29
| 20.25
| 0.755556
| 0.432099
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e447da85730f966568cfa77c9c9475c5f1a901c9
| 49
|
py
|
Python
|
bi_lstm_crf/model/__init__.py
|
taghizad3h/bi-lstm-crf
|
e67bb473f6c339999ad9a3e491924ad5db52f8ed
|
[
"MIT"
] | 157
|
2019-12-03T12:35:42.000Z
|
2022-03-28T08:33:35.000Z
|
bi_lstm_crf/model/__init__.py
|
taghizad3h/bi-lstm-crf
|
e67bb473f6c339999ad9a3e491924ad5db52f8ed
|
[
"MIT"
] | 12
|
2019-12-04T06:59:06.000Z
|
2021-08-14T10:16:33.000Z
|
bi_lstm_crf/model/__init__.py
|
taghizad3h/bi-lstm-crf
|
e67bb473f6c339999ad9a3e491924ad5db52f8ed
|
[
"MIT"
] | 33
|
2019-12-04T05:40:59.000Z
|
2022-03-25T02:51:13.000Z
|
from .crf import CRF
from .model import BiRnnCrf
| 16.333333
| 27
| 0.795918
| 8
| 49
| 4.875
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163265
| 49
| 2
| 28
| 24.5
| 0.95122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e4c0a2401fa80adf1141c222a888cdeef247f6f2
| 3,026
|
py
|
Python
|
scan/test/fetch/api_fetch/test_data/api_fetch_ports.py
|
korenlev/calipso-cvim
|
39278a5cf09c40b26a8a143ccc0c8d437961abc2
|
[
"Apache-2.0"
] | null | null | null |
scan/test/fetch/api_fetch/test_data/api_fetch_ports.py
|
korenlev/calipso-cvim
|
39278a5cf09c40b26a8a143ccc0c8d437961abc2
|
[
"Apache-2.0"
] | null | null | null |
scan/test/fetch/api_fetch/test_data/api_fetch_ports.py
|
korenlev/calipso-cvim
|
39278a5cf09c40b26a8a143ccc0c8d437961abc2
|
[
"Apache-2.0"
] | null | null | null |
###############################################################################
# Copyright (c) 2017-2020 Koren Lev (Cisco Systems), #
# Yaron Yogev (Cisco Systems), Ilia Abashin (Cisco Systems) and others #
# #
# All rights reserved. This program and the accompanying materials #
# are made available under the terms of the Apache License, Version 2.0 #
# which accompanies this distribution, and is available at #
# http://www.apache.org/licenses/LICENSE-2.0 #
###############################################################################
PORTS_RESPONSE = {
"ports": [
{
"id": "16620a58-c48c-4195-b9c1-779a8ba2e6f8",
"mac_address": "fa:16:3e:d7:c5:16",
"name": "",
"network_id": "b6fd5175-4b22-4256-9b1a-9fc4b9dce1fe",
"tenant_id": "75c0eb79ff4a42b0ae4973c8375ddf40"
}
]
}
PORTS_RESULT_WITH_NET = [
{
"id": "16620a58-c48c-4195-b9c1-779a8ba2e6f8",
"mac_address": "fa:16:3e:d7:c5:16",
"name": "fa:16:3e:d7:c5:16",
"network_id": "b6fd5175-4b22-4256-9b1a-9fc4b9dce1fe",
"tenant_id": "75c0eb79ff4a42b0ae4973c8375ddf40",
"type": "port",
"master_parent_type": "network",
"master_parent_id": "b6fd5175-4b22-4256-9b1a-9fc4b9dce1fe",
"parent_type": "ports_folder",
"parent_id": "b6fd5175-4b22-4256-9b1a-9fc4b9dce1fe-ports",
"parent_text": "Ports",
}
]
PORTS_RESULT_WITHOUT_NET = [
{
"id": "16620a58-c48c-4195-b9c1-779a8ba2e6f8",
"mac_address": "fa:16:3e:d7:c5:16",
"name": "16620a58-c48c-4195-b9c1-779a8ba2e6f8",
"network_id": "b6fd5175-4b22-4256-9b1a-9fc4b9dce1fe",
"tenant_id": "75c0eb79ff4a42b0ae4973c8375ddf40",
"type": "port",
"master_parent_type": "network",
"master_parent_id": "b6fd5175-4b22-4256-9b1a-9fc4b9dce1fe",
"parent_type": "ports_folder",
"parent_id": "b6fd5175-4b22-4256-9b1a-9fc4b9dce1fe-ports",
"parent_text": "Ports",
}
]
PORTS_RESULT_WITH_PROJECT = [
{
"id": "16620a58-c48c-4195-b9c1-779a8ba2e6f8",
"mac_address": "fa:16:3e:d7:c5:16",
"name": "fa:16:3e:d7:c5:16",
"network_id": "b6fd5175-4b22-4256-9b1a-9fc4b9dce1fe",
"tenant_id": "75c0eb79ff4a42b0ae4973c8375ddf40",
"type": "port",
"master_parent_type": "network",
"master_parent_id": "b6fd5175-4b22-4256-9b1a-9fc4b9dce1fe",
"parent_type": "ports_folder",
"parent_id": "b6fd5175-4b22-4256-9b1a-9fc4b9dce1fe-ports",
"parent_text": "Ports",
"project": "Calipso-project"
}
]
ERROR_PORTS_RESPONSE = {}
NETWORK = {"id": "b6fd5175-4b22-4256-9b1a-9fc4b9dce1fe"}
TENANT = {"id": "75c0eb79ff4a42b0ae4973c8375ddf40", "name": "Calipso-project"}
ENDPOINT = "http://10.56.20.239:9696"
REGION_NAME = "RegionOne"
| 39.815789
| 79
| 0.557832
| 304
| 3,026
| 5.391447
| 0.286184
| 0.067114
| 0.09396
| 0.120805
| 0.733984
| 0.71446
| 0.71446
| 0.71446
| 0.71446
| 0.71446
| 0
| 0.206321
| 0.247191
| 3,026
| 75
| 80
| 40.346667
| 0.513169
| 0.166557
| 0
| 0.564516
| 0
| 0
| 0.593521
| 0.325702
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9000fc17466246fad54e47a8d5cfb2399c17e132
| 106,115
|
py
|
Python
|
tests/test_all.py
|
Stewart86/aioCloudflare
|
341c0941f8f888a8b7e696e64550bce5da4949e6
|
[
"MIT"
] | 2
|
2021-09-14T13:20:55.000Z
|
2022-02-24T14:18:24.000Z
|
tests/test_all.py
|
Stewart86/aioCloudflare
|
341c0941f8f888a8b7e696e64550bce5da4949e6
|
[
"MIT"
] | 46
|
2021-09-08T08:39:45.000Z
|
2022-03-29T12:31:05.000Z
|
tests/test_all.py
|
Stewart86/aioCloudflare
|
341c0941f8f888a8b7e696e64550bce5da4949e6
|
[
"MIT"
] | 1
|
2021-12-30T23:02:23.000Z
|
2021-12-30T23:02:23.000Z
|
import httpx
import pytest
from aiocloudflare.cloudflare import Cloudflare
@pytest.mark.asyncio
async def test_user(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/user/id1").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.user.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_billing(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/user/billing/id1").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.user.billing.get("id1")
@pytest.mark.asyncio
async def test_user_billing_history(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/user/billing/history/id1").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.user.billing.history.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_billing_profile(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/user/billing/profile/id1").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.user.billing.profile.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_billing_subscriptions(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/user/billing/subscriptions/id1").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.user.billing.subscriptions.get("id1")
@pytest.mark.asyncio
async def test_user_billing_subscriptions_apps(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/user/billing/subscriptions/apps/id1"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.user.billing.subscriptions.apps.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_billing_subscriptions_zones(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/user/billing/subscriptions/zones/id1"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.user.billing.subscriptions.zones.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_firewall(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/user/firewall/id1").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.user.firewall.get("id1")
@pytest.mark.asyncio
async def test_user_firewall_access_rules(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/user/firewall/access_rules/id1").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.user.firewall.access_rules.get("id1")
@pytest.mark.asyncio
async def test_user_firewall_access_rules_rules(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/user/firewall/access_rules/rules/id1"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.user.firewall.access_rules.rules.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_invites(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/user/invites/id1").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.user.invites.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_organizations(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/user/organizations/id1").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.user.organizations.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_subscriptions(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/user/subscriptions/id1").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.user.subscriptions.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_load_balancers(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/user/load_balancers/id1").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.user.load_balancers.get("id1")
@pytest.mark.asyncio
async def test_user_load_balancers_monitors(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/user/load_balancers/monitors/id1"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.user.load_balancers.monitors.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_load_balancers_monitors_preview(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/user/load_balancers/monitors/id1/preview/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.user.load_balancers.monitors.preview.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_load_balancers_monitors_references(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/user/load_balancers/monitors/id1/references/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.user.load_balancers.monitors.references.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_load_balancers_preview(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/user/load_balancers/preview/id1"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.user.load_balancers.preview.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_load_balancers_pools(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/user/load_balancers/pools/id1").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.user.load_balancers.pools.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_load_balancers_pools_health(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/user/load_balancers/pools/id1/health/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.user.load_balancers.pools.health.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_load_balancers_pools_preview(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/user/load_balancers/pools/id1/preview/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.user.load_balancers.pools.preview.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_load_balancers_pools_references(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/user/load_balancers/pools/id1/references/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.user.load_balancers.pools.references.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_workers(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/user/workers/id1").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.user.workers.get("id1")
@pytest.mark.asyncio
async def test_user_workers_scripts(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/user/workers/scripts/id1").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.user.workers.scripts.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_audit_logs(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/user/audit_logs/id1").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.user.audit_logs.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_load_balancing_analytics(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/user/load_balancing_analytics/id1"
).mock(return_value=httpx.Response(200))
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.user.load_balancing_analytics.get("id1")
@pytest.mark.asyncio
async def test_user_load_balancing_analytics_events(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/user/load_balancing_analytics/events/id1"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.user.load_balancing_analytics.events.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_tokens(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/user/tokens/id1").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.user.tokens.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_tokens_permission_groups(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/user/tokens/permission_groups/id1"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.user.tokens.permission_groups.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_tokens_verify(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/user/tokens/verify/id1").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.user.tokens.verify.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_user_tokens_value(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/user/tokens/id1/value/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.user.tokens.value.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_activation_check(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/activation_check/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.activation_check.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_available_plans(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/available_plans/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.available_plans.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_available_rate_plans(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/available_rate_plans/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.available_rate_plans.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_custom_certificates(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/custom_certificates/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.custom_certificates.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_custom_certificates_prioritize(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/custom_certificates/prioritize/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.custom_certificates.prioritize.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_custom_hostnames(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/custom_hostnames/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.custom_hostnames.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_custom_hostnames_fallback_origin(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/custom_hostnames/fallback_origin/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.custom_hostnames.fallback_origin.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_custom_pages(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/custom_pages/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.custom_pages.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_dns_records(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/dns_records/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.dns_records.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_dns_records_export(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/dns_records/export/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.dns_records.export.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_dns_records_import(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/dns_records/import/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.dns_records.import_.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_filters(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/filters/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.filters.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_healthchecks(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/healthchecks/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.healthchecks.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_healthchecks_preview(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/healthchecks/preview/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.healthchecks.preview.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_keyless_certificates(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/keyless_certificates/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.keyless_certificates.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_pagerules(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/pagerules/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.pagerules.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_pagerules_settings(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/pagerules/settings/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.pagerules.settings.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_purge_cache(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/purge_cache/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.purge_cache.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_railguns(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/railguns/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.railguns.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_railguns_diagnose(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/railguns/id2/diagnose/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.railguns.diagnose.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_rulesets(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/rulesets/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.rulesets.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_rulesets_versions(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/rulesets/id2/versions/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.rulesets.versions.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_security(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/security/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.security.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_security_events(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/security/events/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.security.events.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_subscription(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/subscription/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.subscription.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/settings/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_0rtt(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/settings/0rtt/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.ortt.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_advanced_ddos(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/advanced_ddos/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.advanced_ddos.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_always_online(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/always_online/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.always_online.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_always_use_https(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/always_use_https/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.always_use_https.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_automatic_https_rewrites(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/automatic_https_rewrites/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.automatic_https_rewrites.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_brotli(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/settings/brotli/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.brotli.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_browser_cache_ttl(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/browser_cache_ttl/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.browser_cache_ttl.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_browser_check(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/browser_check/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.browser_check.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_cache_level(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/cache_level/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.cache_level.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_challenge_ttl(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/challenge_ttl/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.challenge_ttl.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_ciphers(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/settings/ciphers/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.ciphers.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_development_mode(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/development_mode/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.development_mode.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_email_obfuscation(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/email_obfuscation/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.email_obfuscation.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_h2_prioritization(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/h2_prioritization/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.h2_prioritization.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_hotlink_protection(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/hotlink_protection/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.hotlink_protection.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_http2(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/settings/http2/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.http2.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_http3(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/settings/http3/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.http3.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_image_resizing(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/image_resizing/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.image_resizing.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_ip_geolocation(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/ip_geolocation/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.ip_geolocation.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_ipv6(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/settings/ipv6/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.ipv6.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_min_tls_version(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/min_tls_version/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.min_tls_version.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_minify(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/settings/minify/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.minify.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_mirage(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/settings/mirage/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.mirage.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_mobile_redirect(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/mobile_redirect/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.mobile_redirect.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_opportunistic_encryption(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/opportunistic_encryption/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.opportunistic_encryption.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_opportunistic_onion(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/opportunistic_onion/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.opportunistic_onion.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_origin_error_page_pass_thru(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/origin_error_page_pass_thru/id2" # noqa
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.origin_error_page_pass_thru.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_polish(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/polish/id2"
).mock( # noqa
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.polish.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_prefetch_preload(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/prefetch_preload/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.prefetch_preload.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_privacy_pass(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/privacy_pass/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.privacy_pass.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_pseudo_ipv4(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/pseudo_ipv4/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.pseudo_ipv4.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_response_buffering(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/response_buffering/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.response_buffering.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_rocket_loader(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/rocket_loader/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.rocket_loader.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_security_header(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/security_header/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.security_header.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_security_level(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/security_level/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.security_level.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_server_side_exclude(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/server_side_exclude/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.server_side_exclude.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_sort_query_string_for_cache(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/sort_query_string_for_cache/id2" # noqa
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.sort_query_string_for_cache.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_ssl(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/settings/ssl/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.ssl.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_tls_1_3(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/settings/tls_1_3/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.tls_1_3.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_tls_client_auth(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/tls_client_auth/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.tls_client_auth.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_true_client_ip_header(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/true_client_ip_header/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.true_client_ip_header.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_waf(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/settings/waf/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.waf.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_webp(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/settings/webp/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.webp.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_settings_websockets(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/settings/websockets/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.settings.websockets.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_analytics(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/analytics/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.analytics.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_analytics_colos(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/analytics/colos/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.analytics.colos.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_analytics_dashboard(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/analytics/dashboard/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.analytics.dashboard.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_analytics_latency(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/analytics/latency/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.analytics.latency.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_analytics_latency_colos(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/analytics/latency/colos/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.analytics.latency.colos.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_firewall(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/firewall/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.firewall.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_firewall_access_rules(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/firewall/access_rules/id2"
).mock(return_value=httpx.Response(200))
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.firewall.access_rules.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_firewall_access_rules_rules(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/firewall/access_rules/rules/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.firewall.access_rules.rules.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_firewall_lockdowns(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/firewall/lockdowns/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.firewall.lockdowns.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_firewall_rules(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/firewall/rules/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.firewall.rules.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_firewall_ua_rules(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/firewall/ua_rules/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.firewall.ua_rules.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_firewall_waf(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/firewall/waf/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.firewall.waf.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_firewall_waf_overrides(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/firewall/waf/overrides/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.firewall.waf.overrides.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_firewall_waf_packages(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/firewall/waf/packages/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.firewall.waf.packages.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_firewall_waf_packages_groups(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/firewall/waf/packages/id2/groups/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.firewall.waf.packages.groups.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_firewall_waf_packages_rules(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/firewall/waf/packages/id2/rules/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.firewall.waf.packages.rules.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_rate_limits(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/rate_limits/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.rate_limits.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_dns_analytics(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/dns_analytics/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.dns_analytics.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_dns_analytics_report(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/dns_analytics/report/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.dns_analytics.report.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_dns_analytics_report_bytime(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/dns_analytics/report/bytime/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.dns_analytics.report.bytime.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_amp(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/amp/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.amp.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_amp_sxg(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/amp/sxg/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.amp.sxg.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_logpush(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/logpush/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.logpush.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_logpush_datasets(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/logpush/datasets/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.logpush.datasets.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_logpush_datasets_fields(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/logpush/datasets/id2/fields/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.logpush.datasets.fields.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_logpush_datasets_jobs(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/logpush/datasets/id2/jobs/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.logpush.datasets.jobs.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_logpush_jobs(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/logpush/jobs/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.logpush.jobs.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_logpush_ownership(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/logpush/ownership/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.logpush.ownership.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_logpush_ownership_validate(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/logpush/ownership/validate/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.logpush.ownership.validate.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_logpush_validate(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/logpush/validate/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.logpush.validate.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_logpush_validate_destination(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/logpush/validate/destination/id2"
).mock(return_value=httpx.Response(200))
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.logpush.validate.destination.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_logpush_validate_destination_exists(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/logpush/validate/destination/exists/id2" # noqa
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.logpush.validate.destination.exists.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_logpush_validate_origin(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/logpush/validate/origin/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.logpush.validate.origin.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_logs(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/logs/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.logs.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_logs_control(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/logs/control/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.logs.control.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_logs_control_retention(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/logs/control/retention/id2"
).mock(return_value=httpx.Response(200))
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.logs.control.retention.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_logs_control_retention_flag(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/logs/control/retention/flag/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.logs.control.retention.flag.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_logs_received(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/logs/received/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.logs.received.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_logs_received_fields(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/logs/received/fields/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.logs.received.fields.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_logs_rayids(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/logs/rayids/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.logs.rayids.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_argo(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/argo/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.argo.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_argo_tiered_caching(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/argo/tiered_caching/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.argo.tiered_caching.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_argo_smart_routing(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/argo/smart_routing/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.argo.smart_routing.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_dnssec(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/dnssec/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.dnssec.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_spectrum(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/spectrum/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.spectrum.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_spectrum_analytics(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/spectrum/analytics/id2"
).mock(return_value=httpx.Response(200))
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.spectrum.analytics.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_spectrum_analytics_aggregate(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/spectrum/analytics/aggregate/id2"
).mock(return_value=httpx.Response(200))
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.spectrum.analytics.aggregate.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_spectrum_analytics_aggregate_current(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/spectrum/analytics/aggregate/current/id2" # noqa
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.spectrum.analytics.aggregate.current.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_spectrum_analytics_events(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/spectrum/analytics/events/id2"
).mock(return_value=httpx.Response(200))
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.spectrum.analytics.events.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_spectrum_analytics_events_bytime(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/spectrum/analytics/events/bytime/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.spectrum.analytics.events.bytime.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_spectrum_analytics_events_summary(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/spectrum/analytics/events/summary/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.spectrum.analytics.events.summary.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_spectrum_apps(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/spectrum/apps/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.spectrum.apps.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_ssl(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/ssl/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.ssl.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_ssl_analyze(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/ssl/analyze/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.ssl.analyze.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_ssl_certificate_packs(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/ssl/certificate_packs/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.ssl.certificate_packs.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_ssl_certificate_packs_order(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/ssl/certificate_packs/order/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.ssl.certificate_packs.order.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_ssl_certificate_packs_quota(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/ssl/certificate_packs/quota/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.ssl.certificate_packs.quota.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_ssl_verification(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/ssl/verification/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.ssl.verification.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_ssl_universal(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/ssl/universal/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.ssl.universal.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_ssl_universal_settings(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/ssl/universal/settings/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.ssl.universal.settings.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_origin_tls_client_auth(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/origin_tls_client_auth/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.origin_tls_client_auth.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_origin_tls_client_auth_hostnames(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/origin_tls_client_auth/hostnames/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.origin_tls_client_auth.hostnames.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_origin_tls_client_auth_hostnames_certificates(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/origin_tls_client_auth/hostnames/certificates/id2" # noqa
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.origin_tls_client_auth.hostnames.certificates.get(
"id1", "id2"
)
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_origin_tls_client_auth_settings(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/origin_tls_client_auth/settings/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.origin_tls_client_auth.settings.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_workers(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/workers/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.workers.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_workers_filters(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/workers/filters/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.workers.filters.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_workers_routes(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/workers/routes/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.workers.routes.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_workers_script(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/workers/script/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.workers.script.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_workers_script_bindings(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/workers/script/bindings/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.workers.script.bindings.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_load_balancers(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/load_balancers/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.load_balancers.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_secondary_dns(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/secondary_dns/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.secondary_dns.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_secondary_dns_force_axfr(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/secondary_dns/force_axfr/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.secondary_dns.force_axfr.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_media(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/media/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.media.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_media_embed(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/media/id2/embed/id3").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.media.embed.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_media_preview(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/media/id2/preview/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.media.preview.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_access(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/access/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.zones.access.get("id1", "id2")
@pytest.mark.asyncio
async def test_zones_access_apps(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/access/apps/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.access.apps.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_access_apps_policies(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/access/apps/id2/policies/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.access.apps.policies.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_access_apps_revoke_tokens(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/access/apps/id2/revoke_tokens/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.access.apps.revoke_tokens.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_access_certificates(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/access/certificates/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.access.certificates.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_access_apps_ca(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/access/apps/id2/ca/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.access.apps.ca.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_access_groups(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/access/groups/id2"
).mock( # noqa
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.access.groups.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_access_identity_providers(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/access/identity_providers/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.access.identity_providers.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_access_organizations(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/access/organizations/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.access.organizations.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_access_organizations_revoke_user(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/access/organizations/revoke_user/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.access.organizations.revoke_user.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_access_service_tokens(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/access/service_tokens/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.access.service_tokens.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_waiting_rooms(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/zones/id1/waiting_rooms/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.zones.waiting_rooms.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_waiting_rooms_status(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/waiting_rooms/id2/status/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.waiting_rooms.status.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_zones_waiting_rooms_preview(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/zones/id1/waiting_rooms/preview/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.zones.waiting_rooms.preview.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_railguns(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/railguns/id1").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.railguns.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_railguns_zones(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/railguns/id1/zones/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.railguns.zones.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.accounts.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_billing(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/billing/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.accounts.billing.get("id1", "id2")
@pytest.mark.asyncio
async def test_accounts_billing_profile(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/billing/profile/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.billing.profile.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_custom_pages(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/custom_pages/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.accounts.custom_pages.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_members(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/members/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.accounts.members.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_railguns(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/railguns/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.accounts.railguns.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_railguns_connections(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/railguns/id2/connections/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.railguns.connections.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_registrar(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/registrar/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.accounts.registrar.get("id1", "id2")
@pytest.mark.asyncio
async def test_accounts_registrar_domains(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/registrar/domains/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.registrar.domains.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_roles(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/roles/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.accounts.roles.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_rules(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/rules/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.accounts.rules.get("id1", "id2")
@pytest.mark.asyncio
async def test_accounts_rules_lists(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/rules/lists/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.accounts.rules.lists.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_rules_lists_items(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/rules/lists/id2/items/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.rules.lists.items.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_rules_lists_bulk_operations(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/rules/lists/bulk_operations/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.rules.lists.bulk_operations.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_rulesets(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/rulesets/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.accounts.rulesets.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_rulesets_versions(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/rulesets/id2/versions/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.rulesets.versions.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_rulesets_import(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/rulesets/import/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.rulesets.import_.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_storage(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/storage/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.accounts.storage.get("id1", "id2")
@pytest.mark.asyncio
async def test_accounts_storage_analytics(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/storage/analytics/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.storage.analytics.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_storage_analytics_stored(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/storage/analytics/stored/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.storage.analytics.stored.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_storage_kv(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/storage/kv/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.accounts.storage.kv.get("id1", "id2")
@pytest.mark.asyncio
async def test_accounts_storage_kv_namespaces(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/storage/kv/namespaces/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.storage.kv.namespaces.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_storage_kv_namespaces_bulk(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/storage/kv/namespaces/id2/bulk/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.storage.kv.namespaces.bulk.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_storage_kv_namespaces_keys(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/storage/kv/namespaces/id2/keys/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.storage.kv.namespaces.keys.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_storage_kv_namespaces_values(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/storage/kv/namespaces/id2/values/id3" # noqa
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.storage.kv.namespaces.values.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_subscriptions(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/subscriptions/id2"
).mock( # noqa
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.accounts.subscriptions.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_tunnels(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/tunnels/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.accounts.tunnels.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_tunnels_connections(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/tunnels/id2/connections/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.tunnels.connections.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_virtual_dns(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/virtual_dns/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.accounts.virtual_dns.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_virtual_dns_dns_analytics(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/virtual_dns/id2/dns_analytics/id3"
).mock(return_value=httpx.Response(200))
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.accounts.virtual_dns.dns_analytics.get("id1", "id2", "id3")
@pytest.mark.asyncio
async def test_accounts_virtual_dns_dns_analytics_report(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/virtual_dns/id2/dns_analytics/report/id3" # noqa
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.virtual_dns.dns_analytics.report.get(
"id1", "id2", "id3"
)
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_virtual_dns_dns_analytics_report_bytime(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/virtual_dns/id2/dns_analytics/report/bytime/id3" # noqa
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.virtual_dns.dns_analytics.report.bytime.get(
"id1", "id2", "id3"
)
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_workers(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/workers/id2"
).mock( # noqa
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.accounts.workers.get("id1", "id2")
@pytest.mark.asyncio
async def test_accounts_workers_scripts(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/workers/scripts/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.workers.scripts.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_workers_scripts_schedules(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/workers/scripts/id2/schedules/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.workers.scripts.schedules.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_addressing(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/addressing/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.accounts.addressing.get("id1", "id2")
@pytest.mark.asyncio
async def test_accounts_addressing_prefixes(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/addressing/prefixes/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.addressing.prefixes.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_addressing_prefixes_bgp(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/addressing/prefixes/id2/bgp/id3"
).mock(return_value=httpx.Response(200))
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.accounts.addressing.prefixes.bgp.get("id1", "id2", "id3")
@pytest.mark.asyncio
async def test_accounts_addressing_prefixes_bgp_status(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/addressing/prefixes/id2/bgp/status/id3" # noqa
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.addressing.prefixes.bgp.status.get(
"id1", "id2", "id3"
)
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_addressing_prefixes_delegations(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/addressing/prefixes/id2/delegations/id3" # noqa
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.addressing.prefixes.delegations.get(
"id1", "id2", "id3"
)
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_audit_logs(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/audit_logs/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.accounts.audit_logs.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_load_balancers(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/load_balancers/id2"
).mock(return_value=httpx.Response(200))
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.accounts.load_balancers.get("id1", "id2")
@pytest.mark.asyncio
async def test_accounts_load_balancers_preview(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/load_balancers/preview/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.load_balancers.preview.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_load_balancers_monitors(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/load_balancers/monitors/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.load_balancers.monitors.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_load_balancers_monitors_preview(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/load_balancers/monitors/id2/preview/id3" # noqa
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.load_balancers.monitors.preview.get(
"id1", "id2", "id3"
)
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_load_balancers_monitors_references(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/load_balancers/monitors/id2/references/id3" # noqa
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.load_balancers.monitors.references.get(
"id1", "id2", "id3"
)
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_load_balancers_pools(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/load_balancers/pools/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.load_balancers.pools.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_load_balancers_pools_health(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/load_balancers/pools/id2/health/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.load_balancers.pools.health.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_load_balancers_pools_preview(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/load_balancers/pools/id2/preview/id3" # noqa
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.load_balancers.pools.preview.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_load_balancers_pools_references(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/load_balancers/pools/id2/references/id3" # noqa
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.load_balancers.pools.references.get(
"id1", "id2", "id3"
)
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_load_balancers_regions(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/load_balancers/regions/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.load_balancers.regions.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_load_balancers_search(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/load_balancers/search/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.load_balancers.search.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_firewall(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/firewall/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.accounts.firewall.get("id1", "id2")
@pytest.mark.asyncio
async def test_accounts_firewall_access_rules(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/firewall/access_rules/id2"
).mock(return_value=httpx.Response(200))
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.accounts.firewall.access_rules.get("id1", "id2")
@pytest.mark.asyncio
async def test_accounts_firewall_access_rules_rules(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/firewall/access_rules/rules/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.firewall.access_rules.rules.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_secondary_dns(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/secondary_dns/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.accounts.secondary_dns.get("id1", "id2")
@pytest.mark.asyncio
async def test_accounts_secondary_dns_masters(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/secondary_dns/masters/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.secondary_dns.masters.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_secondary_dns_primaries(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/secondary_dns/primaries/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.secondary_dns.primaries.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_secondary_dns_tsigs(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/secondary_dns/tsigs/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.secondary_dns.tsigs.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_stream(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/stream/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.accounts.stream.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_stream_captions(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/stream/id2/captions/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.stream.captions.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_stream_copy(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/stream/copy/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.accounts.stream.copy.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_stream_direct_upload(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/stream/direct_upload/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.stream.direct_upload.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_stream_embed(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/stream/id2/embed/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.stream.embed.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_stream_keys(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/stream/keys/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.accounts.stream.keys.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_stream_preview(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/stream/preview/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.stream.preview.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_stream_watermarks(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/stream/watermarks/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.stream.watermarks.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_stream_webhook(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/stream/webhook/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.stream.webhook.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_access(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/access/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.accounts.access.get("id1", "id2")
@pytest.mark.asyncio
async def test_accounts_access_certificates(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/access/certificates/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.access.certificates.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_access_groups(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/access/groups/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.accounts.access.groups.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_access_identity_providers(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/access/identity_providers/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.access.identity_providers.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_access_organizations(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/access/organizations/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.access.organizations.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_access_organizations_revoke_user(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/access/organizations/revoke_user/id2" # noqa
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.access.organizations.revoke_user.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_access_service_tokens(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/access/service_tokens/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.access.service_tokens.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_access_logs(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/access/logs/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.accounts.access.logs.get("id1", "id2")
@pytest.mark.asyncio
async def test_accounts_access_logs_access_requests(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/access/logs/access_requests/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.access.logs.access_requests.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_access_apps(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/access/apps/id2").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.accounts.access.apps.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_access_apps_revoke_tokens(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/access/apps/id2/revoke_tokens/id3"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.access.apps.revoke_tokens.get("id1", "id2", "id3")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_accounts_diagnostics(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/accounts/id1/diagnostics/id2").mock(
return_value=httpx.Response(200)
)
with pytest.raises(AttributeError):
async with Cloudflare(config=config) as cf:
await cf.accounts.diagnostics.get("id1", "id2")
@pytest.mark.asyncio
async def test_accounts_diagnostics_traceroute(respx_mock, config):
respx_mock.get(
"https://api.doesnotmatter.com/accounts/id1/diagnostics/traceroute/id2"
).mock(return_value=httpx.Response(200))
async with Cloudflare(config=config) as cf:
result = await cf.accounts.diagnostics.traceroute.get("id1", "id2")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_memberships(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/memberships/id1").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.memberships.get("id1")
assert result.status_code == 200
@pytest.mark.asyncio
async def test_graphql(respx_mock, config):
respx_mock.get("https://api.doesnotmatter.com/graphql/id1").mock(
return_value=httpx.Response(200)
)
async with Cloudflare(config=config) as cf:
result = await cf.graphql.get("id1")
assert result.status_code == 200
| 38.047687
| 108
| 0.716044
| 14,439
| 106,115
| 5.101669
| 0.018215
| 0.067442
| 0.063695
| 0.082429
| 0.965111
| 0.958758
| 0.955256
| 0.952785
| 0.950952
| 0.949038
| 0
| 0.029758
| 0.158903
| 106,115
| 2,788
| 109
| 38.061334
| 0.79557
| 0.000886
| 0
| 0.57226
| 0
| 0.012528
| 0.179895
| 0
| 0
| 0
| 0
| 0
| 0.103356
| 1
| 0
| false
| 0.002685
| 0.004027
| 0
| 0.004027
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
90162adce242137ee488fda0a3c545313712e37c
| 46
|
py
|
Python
|
test.py
|
waynechuaa/testrepo
|
1d03238358a8724a082fca40370f6283b3ee1069
|
[
"Apache-2.0"
] | null | null | null |
test.py
|
waynechuaa/testrepo
|
1d03238358a8724a082fca40370f6283b3ee1069
|
[
"Apache-2.0"
] | null | null | null |
test.py
|
waynechuaa/testrepo
|
1d03238358a8724a082fca40370f6283b3ee1069
|
[
"Apache-2.0"
] | null | null | null |
def add(x,y):
pass
def subtract(x,y):
pass
| 9.2
| 18
| 0.630435
| 10
| 46
| 2.9
| 0.6
| 0.137931
| 0.413793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195652
| 46
| 5
| 19
| 9.2
| 0.783784
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
901b8df7dcc6d4d97349f4b9e8462f79f9842a6e
| 87
|
py
|
Python
|
models/encoder.py
|
nameoverflow/neuro-cangjie
|
6e687e52f69a3d6e397b78d870e090f1efe00ab3
|
[
"WTFPL"
] | 22
|
2019-12-23T05:38:20.000Z
|
2022-03-25T05:56:12.000Z
|
models/encoder.py
|
nameoverflow/neuro-cangjie
|
6e687e52f69a3d6e397b78d870e090f1efe00ab3
|
[
"WTFPL"
] | 3
|
2021-09-08T01:33:45.000Z
|
2022-03-12T00:09:20.000Z
|
models/encoder.py
|
nameoverflow/neuro-cangjie
|
6e687e52f69a3d6e397b78d870e090f1efe00ab3
|
[
"WTFPL"
] | 2
|
2019-12-24T02:38:44.000Z
|
2019-12-30T00:46:39.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision
| 14.5
| 31
| 0.816092
| 15
| 87
| 4.733333
| 0.466667
| 0.464789
| 0.366197
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149425
| 87
| 5
| 32
| 17.4
| 0.959459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
5f5f8b47385742087e2ee3d3fc7e2ce0b2baa2c3
| 9,469
|
py
|
Python
|
src/generators/graphs/NetworkGenerator.py
|
Antonio-Cruciani/fair-group-centrality
|
edb77e4b7929004a52704eacdefd2f6d3c640b17
|
[
"MIT"
] | null | null | null |
src/generators/graphs/NetworkGenerator.py
|
Antonio-Cruciani/fair-group-centrality
|
edb77e4b7929004a52704eacdefd2f6d3c640b17
|
[
"MIT"
] | null | null | null |
src/generators/graphs/NetworkGenerator.py
|
Antonio-Cruciani/fair-group-centrality
|
edb77e4b7929004a52704eacdefd2f6d3c640b17
|
[
"MIT"
] | null | null | null |
import os
import src.generators.graphs.BarabasiAlbert as ba
import src.generators.graphs.ErdosRenyi as er
import src.generators.graphs.SBM as sbm
def create_directory_if_not_exists(dir_path):
if not os.path.isdir(dir_path):
try:
os.mkdir(dir_path)
except OSError:
raise IOError('Cannot create directory', dir_path)
def generate_network(network_type,
n,
p=None,
q=None,
k=None,
communities_structure="bfs",
number_of_communities=None,
communities_size=None,
threshold=3):
save_path = os.getcwd() + '/datasets/synthetic/'
# Check the type of graph
if network_type == 'BA':
# Create the folder for the type of graph
save_path += 'barabasi_albert/'
create_directory_if_not_exists(save_path)
# Create the folder for the size of the network
save_path += 'n' + str(n) + '/'
create_directory_if_not_exists(save_path)
# Check that the value k (number of attachments per node) was provided
if k is None:
raise ValueError('A Barabasi-Albert graph requires the parameter k')
# Create the folder for k
save_path += 'k' + str(k) + '/'
create_directory_if_not_exists(save_path)
_generate_network_with_community_structure(save_path, network_type, n, p, q, k, communities_structure,
number_of_communities, communities_size, threshold)
elif network_type == 'ER':
# Create the folder for the type of graph
save_path += 'erdos_renyi/'
create_directory_if_not_exists(save_path)
# Create the folder for the size of the network
save_path += 'n' + str(n) + '/'
create_directory_if_not_exists(save_path)
# Check that the value p (probability of attachment) was provided
if p is None:
raise ValueError('An Erdos-Renyi graph requires the parameter p')
# Create the folder for p
save_path += 'p' + str(p) + '/'
create_directory_if_not_exists(save_path)
_generate_network_with_community_structure(save_path, network_type, n, p, q, k, communities_structure,
number_of_communities, communities_size, threshold)
elif network_type == "SBM":
# Create the folder for the type of graph
save_path += 'sbm/'
create_directory_if_not_exists(save_path)
# Create the folder for the size of the network
save_path += 'n' + str(n) + '/'
create_directory_if_not_exists(save_path)
# Check that the value p (probability of in-attachment) was provided
if p is None:
raise ValueError('A SBM graph requires the parameter p')
# Create the folder for p
save_path += 'p' + str(p) + '/'
create_directory_if_not_exists(save_path)
# Check that the value q (probability of out-attachment) was provided
if q is None:
raise ValueError('A SBM graph requires the parameter q')
# Create the folder for q
save_path += 'q' + str(q) + '/'
create_directory_if_not_exists(save_path)
_generate_network_with_community_structure(save_path, network_type, n, p, q, k, communities_structure,
number_of_communities, communities_size, threshold)
else:
raise ValueError('network_type have to be one of the following values: BA, ER or SBM')
def _generate_network_with_community_structure(dir_path,
network_type,
n,
p=None,
q=None,
k=None,
communities_structure="bfs",
number_of_communities=None,
communities_size=None,
threshold=3):
save_path = dir_path
if communities_structure == 'bfs':
# Create the folder for the community structure of the graph
save_path += 'cs_bfs/'
create_directory_if_not_exists(save_path)
# Check that the threshold for the bfs community generation method was provided
if threshold is None:
raise ValueError('the BFS community structure requires a threshold')
# Create the folder for the threshold
save_path += 'c_threshold' + str(threshold) + '/'
create_directory_if_not_exists(save_path)
# Check the type of graph and save the graph
if network_type == 'BA':
ba_graph = ba.BarabasiAlbert(n=n, k=k,communities_structure=communities_structure,communities_size= [],treshold=threshold)
ba_graph.run()
ba_graph.save_graph(save_path)
elif network_type == 'ER':
er_graph = er.ErdosRenyi(n=n, p=p, communities_structure=communities_structure, communities_size= [],treshold=threshold)
er_graph.run()
er_graph.save_graph(save_path)
elif network_type == "SBM":
raise ValueError('bfs community structure cannot be used with SBM network generation method')
# sbm_graph = sbm.SBM(n=n, p=p, q=q, number_of_communities=number_of_communities)
elif communities_structure == 'rd':
# Create the folder for the community structure of the graph
save_path += 'cs_rd/'
create_directory_if_not_exists(save_path)
# Check that the community number was provided
if number_of_communities is None:
raise ValueError('the Random community structure requires the number of communities as parameter')
# Create the folder for the threshold
save_path += 'c' + str(number_of_communities) + '/'
create_directory_if_not_exists(save_path)
# Check the type of graph and save the graph
if network_type == 'BA':
ba_graph = ba.BarabasiAlbert(n=n, k=k, communities_number=number_of_communities,
communities_structure=communities_structure)
ba_graph.run()
ba_graph.save_graph(save_path)
elif network_type == 'ER':
er_graph = er.ErdosRenyi(n=n, p=p, communities_number=number_of_communities,
communities_structure=communities_structure)
er_graph.run()
er_graph.save_graph(save_path)
elif network_type == "SBM":
sbm_graph = sbm.SBM(n=n, p=p, q=q, number_of_communities=number_of_communities)
sbm_graph.run()
sbm_graph.save_graph(save_path)
elif communities_structure == 'man':
# Create the folder for the community structure of the graph
save_path += 'cs_manual/'
create_directory_if_not_exists(save_path)
# Check that the community number was provided
if number_of_communities is None:
raise ValueError('the Manual community structure requires the number of communities as parameter')
# Create the folder for the threshold
save_path += 'c' + str(number_of_communities) + '/'
create_directory_if_not_exists(save_path)
# Check the type of graph and save the graph
if network_type == 'BA':
ba_graph = ba.BarabasiAlbert(n=n, k=k, communities_number=number_of_communities,
communities_structure=communities_structure, communities_size=communities_size)
ba_graph.run()
ba_graph.save_graph(save_path)
elif network_type == 'ER':
er_graph = er.ErdosRenyi(n=n, p=p, communities_number=number_of_communities,
communities_structure=communities_structure, communities_size=communities_size)
er_graph.run()
er_graph.save_graph(save_path)
elif network_type == "SBM":
sbm_graph = sbm.SBM(n=n, p=p, q=q, number_of_communities=number_of_communities)
sbm_graph.run()
sbm_graph.save_graph(save_path)
elif communities_structure == 'auto':
# Create the folder for the community structure of the graph
save_path += 'cs_automatic/'
create_directory_if_not_exists(save_path)
# Check the type of graph and save the graph
if network_type == 'BA':
ba_graph = ba.BarabasiAlbert(n=n, k=k, communities_structure=communities_structure)
ba_graph.run()
ba_graph.save_graph(save_path)
elif network_type == 'ER':
er_graph = er.ErdosRenyi(n=n, p=p, communities_structure=communities_structure)
er_graph.run()
er_graph.save_graph(save_path)
elif network_type == "SBM":
raise ValueError('Automatic community detection cannot be used with SBM')
# sbm_graph = sbm.SBM(n=n, p=p, q=q, number_of_communities=number_of_communities)
# sbm_graph.save_graph(save_path)
else:
raise ValueError("The value for communities_structure have to be one of the following: bfs, rd, man, auto")
| 42.084444
| 134
| 0.608934
| 1,151
| 9,469
| 4.745439
| 0.086881
| 0.073233
| 0.080007
| 0.06591
| 0.833761
| 0.800439
| 0.797327
| 0.788173
| 0.776456
| 0.756683
| 0
| 0.00031
| 0.317985
| 9,469
| 224
| 135
| 42.272321
| 0.845463
| 0.160629
| 0
| 0.673611
| 0
| 0
| 0.10638
| 0.002653
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020833
| false
| 0
| 0.027778
| 0
| 0.048611
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5f87f6651e8c9d295f91283f2b87fa177c76d252
| 64
|
py
|
Python
|
mathchem/__init__.py
|
Pshemysuaf/mathchem-package
|
e084a838fc836325872f37e3f638a0e13fd368f8
|
[
"MIT"
] | null | null | null |
mathchem/__init__.py
|
Pshemysuaf/mathchem-package
|
e084a838fc836325872f37e3f638a0e13fd368f8
|
[
"MIT"
] | null | null | null |
mathchem/__init__.py
|
Pshemysuaf/mathchem-package
|
e084a838fc836325872f37e3f638a0e13fd368f8
|
[
"MIT"
] | null | null | null |
from mathchem.mathchem import *
from mathchem.utilities import *
| 32
| 32
| 0.828125
| 8
| 64
| 6.625
| 0.5
| 0.45283
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109375
| 64
| 2
| 32
| 32
| 0.929825
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5fb4bc19fbff6e67e80740ff44dc669e69ca4902
| 184
|
py
|
Python
|
monorun/ops/least_squares/__init__.py
|
minghanz/MonoRUn
|
3a575ec7826d2b95e05bc87099b152434743f104
|
[
"MIT"
] | 86
|
2021-03-24T02:10:17.000Z
|
2022-03-30T03:35:41.000Z
|
monorun/ops/least_squares/__init__.py
|
minghanz/MonoRUn
|
3a575ec7826d2b95e05bc87099b152434743f104
|
[
"MIT"
] | 5
|
2021-06-03T09:23:30.000Z
|
2022-03-30T09:13:26.000Z
|
monorun/ops/least_squares/__init__.py
|
minghanz/MonoRUn
|
3a575ec7826d2b95e05bc87099b152434743f104
|
[
"MIT"
] | 10
|
2021-05-18T04:15:39.000Z
|
2021-11-25T09:32:05.000Z
|
from .pnp_uncert_cpu import u2d_pnp_cpu
from .pnp_uncert import PnPUncert, pnp_uncert
from .builder import build_pnp
__all__ = ['u2d_pnp_cpu', 'build_pnp', 'PnPUncert', 'pnp_uncert']
| 30.666667
| 65
| 0.793478
| 29
| 184
| 4.517241
| 0.344828
| 0.274809
| 0.198473
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012195
| 0.108696
| 184
| 5
| 66
| 36.8
| 0.786585
| 0
| 0
| 0
| 0
| 0
| 0.211957
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3993f9a1d94887e6923c5491fce75b8bfabdde00
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/future/moves/collections.py
|
Retraces/UkraineBot
|
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/future/moves/collections.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/future/moves/collections.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/38/a4/3e/4df5209acff66e76519f886b7252c3a223768be1d259c8ecdc10b6898f
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.427083
| 0
| 96
| 1
| 96
| 96
| 0.46875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
39b1e35f1c3714e859e313fc7e2c551cd426780b
| 223
|
py
|
Python
|
src/observer.py
|
AlexMartinezSenent/mastermind_on_steriods
|
62fe41826d110478e4c41f22b7c8d17386d77981
|
[
"MIT"
] | null | null | null |
src/observer.py
|
AlexMartinezSenent/mastermind_on_steriods
|
62fe41826d110478e4c41f22b7c8d17386d77981
|
[
"MIT"
] | null | null | null |
src/observer.py
|
AlexMartinezSenent/mastermind_on_steriods
|
62fe41826d110478e4c41f22b7c8d17386d77981
|
[
"MIT"
] | null | null | null |
class Observer:
observers = []
def __init__(self):
Observer.observers.append(self)
self.observables = {}
def observe(self, event_name, callback):
self.observables[event_name] = callback
| 24.777778
| 47
| 0.64574
| 23
| 223
| 6
| 0.521739
| 0.246377
| 0.246377
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.246637
| 223
| 9
| 47
| 24.777778
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
39bea9bc911c45979fcc763479ecfa6e792b1726
| 17
|
py
|
Python
|
src/quiltz/domain/id/__init__.py
|
qwaneu/quiltz-domain
|
3b487c8396c89f653b7aa42b9d34f59baa3ace09
|
[
"MIT"
] | null | null | null |
src/quiltz/domain/id/__init__.py
|
qwaneu/quiltz-domain
|
3b487c8396c89f653b7aa42b9d34f59baa3ace09
|
[
"MIT"
] | null | null | null |
src/quiltz/domain/id/__init__.py
|
qwaneu/quiltz-domain
|
3b487c8396c89f653b7aa42b9d34f59baa3ace09
|
[
"MIT"
] | null | null | null |
from .id import *
| 17
| 17
| 0.705882
| 3
| 17
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 17
| 1
| 17
| 17
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
844af9fcc50e4485f420e379cd7eb5e26f31a997
| 75
|
py
|
Python
|
sabueso/protein/__init__.py
|
dprada/sabueso
|
14843cf3522b5b89db5b61c1541a7015f114dd53
|
[
"MIT"
] | null | null | null |
sabueso/protein/__init__.py
|
dprada/sabueso
|
14843cf3522b5b89db5b61c1541a7015f114dd53
|
[
"MIT"
] | 2
|
2022-01-31T21:22:17.000Z
|
2022-02-04T20:20:12.000Z
|
sabueso/protein/__init__.py
|
dprada/sabueso
|
14843cf3522b5b89db5b61c1541a7015f114dd53
|
[
"MIT"
] | 1
|
2021-07-20T15:01:14.000Z
|
2021-07-20T15:01:14.000Z
|
from .is_protein import is_protein
from .get_function import get_function
| 18.75
| 38
| 0.853333
| 12
| 75
| 5
| 0.5
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 75
| 3
| 39
| 25
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
084ce5238f47b8e0f671050ff3f8f28236473b3e
| 8,228
|
py
|
Python
|
selenium_youtube_crawler/tests/test_gcs_helper.py
|
Open-Speech-EkStep/data-acquisition-pipeline
|
b28df36d417010d85d3e5c5f6882eb8fe89ce5ae
|
[
"MIT"
] | 14
|
2021-06-18T17:02:31.000Z
|
2022-01-23T16:04:34.000Z
|
selenium_youtube_crawler/tests/test_gcs_helper.py
|
susmitabhatt/data-acquisition-pipeline
|
b28df36d417010d85d3e5c5f6882eb8fe89ce5ae
|
[
"MIT"
] | 2
|
2021-06-19T09:46:08.000Z
|
2021-09-10T13:57:57.000Z
|
selenium_youtube_crawler/tests/test_gcs_helper.py
|
susmitabhatt/data-acquisition-pipeline
|
b28df36d417010d85d3e5c5f6882eb8fe89ce5ae
|
[
"MIT"
] | 6
|
2021-04-12T05:03:52.000Z
|
2021-09-11T13:54:45.000Z
|
import os
from unittest import TestCase
from unittest.mock import patch
from selenium_youtube_crawler.gcs_helper import GCSHelper
class TestGCSHelper(TestCase):
def setUp(self):
self.bucket_name = "test_bucket"
self.bucket_path = "test_bucket_path"
self.gcs_helper = GCSHelper(self.bucket_name, self.bucket_path)
@patch("selenium_youtube_crawler.gcs_helper.upload_blob")
def test_upload_archive_to_bucket(self, mock_upload_blob):
source = "test"
self.gcs_helper.upload_archive_to_bucket(source)
archive_path = "archive/" + source + "/archive.txt"
mock_upload_blob.assert_called_once_with(self.bucket_name, archive_path, self.bucket_path + "/" + archive_path)
@patch("selenium_youtube_crawler.gcs_helper.upload_blob")
def test_upload_token_to_bucket(self, mock_upload_blob):
token_file_name = "token.txt"
self.gcs_helper.upload_token_to_bucket()
mock_upload_blob.assert_called_once_with(self.bucket_name, token_file_name,
self.bucket_path + "/" + token_file_name)
@patch("selenium_youtube_crawler.gcs_helper.check_blob")
@patch("selenium_youtube_crawler.gcs_helper.download_blob")
def test_download_token_from_bucket_if_present(self, mock_download_blob, mock_check_blob):
token_file_name = "token.txt"
token_path = self.bucket_path + "/" + token_file_name
mock_check_blob.return_value = True
self.gcs_helper.download_token_from_bucket()
mock_check_blob.assert_called_once_with(self.bucket_name, token_path)
mock_download_blob.assert_called_once_with(self.bucket_name, token_path, token_file_name)
@patch("selenium_youtube_crawler.gcs_helper.check_blob")
@patch("selenium_youtube_crawler.gcs_helper.download_blob")
def test_download_token_from_bucket_if_not_present_in_bucket_and_local(self, mock_download_blob, mock_check_blob):
token_file_name = "token.txt"
token_path = self.bucket_path + "/" + token_file_name
mock_check_blob.return_value = False
self.gcs_helper.download_token_from_bucket()
mock_check_blob.assert_called_once_with(self.bucket_name, token_path)
mock_download_blob.assert_not_called()
self.assertTrue(os.path.exists(token_file_name))
os.system("rm " + token_file_name)
# call of os.system is not tested
@patch("selenium_youtube_crawler.gcs_helper.check_blob")
@patch("selenium_youtube_crawler.gcs_helper.download_blob")
def test_download_token_from_bucket_if_not_present_in_bucket_but_in_local(self, mock_download_blob,
mock_check_blob):
token_file_name = "token.txt"
os.system("touch " + token_file_name)
token_path = self.bucket_path + "/" + token_file_name
mock_check_blob.return_value = False
self.assertTrue(os.path.exists(token_file_name))
self.gcs_helper.download_token_from_bucket()
mock_check_blob.assert_called_once_with(self.bucket_name, token_path)
mock_download_blob.assert_not_called()
self.assertTrue(os.path.exists(token_file_name))
os.system("rm " + token_file_name)
@patch("selenium_youtube_crawler.gcs_helper.create_required_dirs_for_archive_if_not_present")
@patch("selenium_youtube_crawler.gcs_helper.check_blob")
def test_validate_and_download_archive_if_present_in_bucket(self, mock_check_blob, mock_create):
source = "test"
archive_file_path = self.bucket_path + "/archive/" + source + "/archive.txt"
expected = ['hello']
mock_check_blob.return_value = True
is_method_called_flag = False
with patch.object(self.gcs_helper, 'download_archive_from_bucket') as mock_download:
with patch.object(self.gcs_helper, 'get_local_archive_data') as mock_get_local:
mock_download.return_value = expected
data = self.gcs_helper.validate_and_download_archive(source)
self.assertEqual(expected, data)
mock_download.assert_called_once_with(archive_file_path, "archive/" + source + "/archive.txt")
mock_get_local.assert_not_called()
is_method_called_flag = True
mock_create.assert_called_once_with(source)
self.assertTrue(is_method_called_flag)
mock_check_blob.assert_called_once_with(self.bucket_name, archive_file_path)
@patch("selenium_youtube_crawler.gcs_helper.create_required_dirs_for_archive_if_not_present")
@patch("selenium_youtube_crawler.gcs_helper.check_blob")
def test_validate_and_download_archive_if_not_present_in_bucket(self, mock_check_blob, mock_create):
source = "test"
archive_file_path = self.bucket_path + "/archive/" + source + "/archive.txt"
expected = ['hello']
mock_check_blob.return_value = False
is_method_called_flag = False
with patch.object(self.gcs_helper, 'download_archive_from_bucket') as mock_download:
with patch.object(self.gcs_helper, 'get_local_archive_data') as mock_get_local:
mock_get_local.return_value = expected
data = self.gcs_helper.validate_and_download_archive(source)
self.assertEqual(expected, data)
mock_download.assert_not_called()
is_method_called_flag = True
mock_create.assert_called_once_with(source)
self.assertTrue(is_method_called_flag)
mock_check_blob.assert_called_once_with(self.bucket_name, archive_file_path)
@patch("selenium_youtube_crawler.gcs_helper.download_blob")
def test_download_archive_from_bucket(self, mock_download_blob):
source = "test"
local_archive_file_path = "archive/" + source + "/archive.txt"
os.system("mkdir archive")
os.system("mkdir archive/" + source)
expected = ['hello', 'world']
with open(local_archive_file_path, 'w') as f:
f.writelines(word + "\n" for word in expected)
archive_file_path = self.bucket_path + "/" + local_archive_file_path
result = self.gcs_helper.download_archive_from_bucket(archive_file_path, local_archive_file_path)
mock_download_blob.assert_called_once_with(self.bucket_name, archive_file_path, local_archive_file_path)
self.assertTrue(os.path.exists(local_archive_file_path))
self.assertEqual(expected, result)
os.system("rm -rf archive")
def test_get_local_archive_data_if_present(self):
source = "test"
local_archive_file_path = "archive/" + source + "/archive.txt"
os.system("mkdir archive")
os.system("mkdir archive/" + source)
expected = ['hello', 'world']
with open(local_archive_file_path, 'w') as f:
f.writelines(word + "\n" for word in expected)
result = self.gcs_helper.get_local_archive_data(local_archive_file_path)
self.assertTrue(os.path.exists(local_archive_file_path))
self.assertEqual(expected, result)
os.system("rm -rf archive")
def test_get_local_archive_data_if_not_present(self):
source = "test"
local_archive_file_path = "archive/" + source + "/archive.txt"
os.system("mkdir archive")
os.system("mkdir archive/" + source)
expected = []
result = self.gcs_helper.get_local_archive_data(local_archive_file_path)
self.assertTrue(os.path.exists(local_archive_file_path))
self.assertEqual(expected, result)
os.system("rm -rf archive")
@patch("selenium_youtube_crawler.gcs_helper.upload_blob")
def test_upload_file_to_bucket(self, mock_upload_blob):
file_dir = "downloads/"
source_file_name = "test.mp4"
dest_file_name = "test1.mp4"
source = "test"
file_path = file_dir + "/" + source_file_name
bucket_path = self.bucket_path + "/" + source + "/" + dest_file_name
self.gcs_helper.upload_file_to_bucket(source, source_file_name, dest_file_name, file_dir)
mock_upload_blob.assert_called_once_with(self.bucket_name, file_path, bucket_path)
| 44.475676
| 119
| 0.708556
| 1,078
| 8,228
| 4.948052
| 0.07885
| 0.052306
| 0.059055
| 0.070304
| 0.871766
| 0.853018
| 0.818898
| 0.798838
| 0.792276
| 0.790026
| 0
| 0.000458
| 0.203087
| 8,228
| 184
| 120
| 44.717391
| 0.813024
| 0.003768
| 0
| 0.676259
| 0
| 0
| 0.155339
| 0.101647
| 0
| 0
| 0
| 0
| 0.215827
| 1
| 0.086331
| false
| 0
| 0.028777
| 0
| 0.122302
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f27c87f419c593ea9e802b379337cddc7eb18a44
| 22
|
py
|
Python
|
src/msys_opt/modules/__init__.py
|
willi-z/msys-opt
|
2054931737893b4ea77a4ba2dbfb6a3e2bce7779
|
[
"BSD-3-Clause"
] | null | null | null |
src/msys_opt/modules/__init__.py
|
willi-z/msys-opt
|
2054931737893b4ea77a4ba2dbfb6a3e2bce7779
|
[
"BSD-3-Clause"
] | null | null | null |
src/msys_opt/modules/__init__.py
|
willi-z/msys-opt
|
2054931737893b4ea77a4ba2dbfb6a3e2bce7779
|
[
"BSD-3-Clause"
] | null | null | null |
from .math import Math
| 22
| 22
| 0.818182
| 4
| 22
| 4.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 22
| 1
| 22
| 22
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4b3a116b1b7d51e03ec13aba1041c2ef5e8838da
| 73
|
py
|
Python
|
hug-uwsgi/app.py
|
glasnt/cloudrun-python-examples
|
7cd35932ce77f30900af4272be008f6485d5b13b
|
[
"Apache-2.0"
] | 2
|
2021-09-25T20:09:06.000Z
|
2021-11-03T11:53:30.000Z
|
hug-uwsgi/app.py
|
glasnt/cloudrun-python-examples
|
7cd35932ce77f30900af4272be008f6485d5b13b
|
[
"Apache-2.0"
] | null | null | null |
hug-uwsgi/app.py
|
glasnt/cloudrun-python-examples
|
7cd35932ce77f30900af4272be008f6485d5b13b
|
[
"Apache-2.0"
] | null | null | null |
import hug
@hug.get("/")
def hello():
return "👋 Hello hug - uwsgi"
| 10.428571
| 32
| 0.575342
| 11
| 73
| 3.909091
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.232877
| 73
| 6
| 33
| 12.166667
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.277778
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
4b68254a019a4fb23d7f69b6bcc28004de3ecfbb
| 102
|
py
|
Python
|
ntkit/__init__.py
|
m-chrome/ntkit
|
f3392518eb3061a06a9c08283e6a9f19aea7108e
|
[
"MIT"
] | null | null | null |
ntkit/__init__.py
|
m-chrome/ntkit
|
f3392518eb3061a06a9c08283e6a9f19aea7108e
|
[
"MIT"
] | null | null | null |
ntkit/__init__.py
|
m-chrome/ntkit
|
f3392518eb3061a06a9c08283e6a9f19aea7108e
|
[
"MIT"
] | null | null | null |
from . import advanced
from . import common
from . import factorization
from . import primality_tests
| 20.4
| 29
| 0.803922
| 13
| 102
| 6.230769
| 0.538462
| 0.493827
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.156863
| 102
| 4
| 30
| 25.5
| 0.94186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4b7c7ae2072ac510b2f8222767741f15ad053467
| 2,869
|
py
|
Python
|
common/settings.py
|
xenomarz/deep-signature
|
f831f05971727c5d00cf3b5c556b6a8b658048df
|
[
"MIT"
] | null | null | null |
common/settings.py
|
xenomarz/deep-signature
|
f831f05971727c5d00cf3b5c556b6a8b658048df
|
[
"MIT"
] | null | null | null |
common/settings.py
|
xenomarz/deep-signature
|
f831f05971727c5d00cf3b5c556b6a8b658048df
|
[
"MIT"
] | null | null | null |
import os
# general
data_dir = "C:/deep-signature-data"
images_dir_path_train = os.path.normpath(os.path.join(data_dir, "images/train"))
images_dir_path_test = os.path.normpath(os.path.join(data_dir, "images/test"))
# circles
circles_dir_path_train = os.path.normpath(os.path.join(data_dir, "circles/curves/train"))
circles_dir_path_test = os.path.normpath(os.path.join(data_dir, "circles/curves/test"))
circles_section_tuplets_dir_path = os.path.normpath(os.path.join(data_dir, "circles/datasets/tuplets/sections"))
circles_triangle_tuplets_dir_path = os.path.normpath(os.path.join(data_dir, "circles/datasets/tuplets/triangles"))
circles_section_tuplets_results_dir_path = os.path.normpath(os.path.join(data_dir, "circles/results/tuplets/sections"))
circles_triangle_tuplets_results_dir_path = os.path.normpath(os.path.join(data_dir, "circles/results/tuplets/triangles"))
# level-curves
level_curves_dir_path_train = os.path.normpath(os.path.join(data_dir, "level-curves/curves/train"))
level_curves_dir_path_test = os.path.normpath(os.path.join(data_dir, "level-curves/curves/test"))
level_curves_euclidean_curvature_tuplets_dir_path = os.path.normpath(os.path.join(data_dir, "level-curves/datasets/tuplets/curvature/euclidean"))
level_curves_euclidean_curvature_tuplets_results_dir_path = os.path.normpath(os.path.join(data_dir, "level-curves/results/tuplets/curvature/euclidean"))
level_curves_equiaffine_curvature_tuplets_dir_path = os.path.normpath(os.path.join(data_dir, "level-curves/datasets/tuplets/curvature/equiaffine"))
level_curves_equiaffine_curvature_tuplets_results_dir_path = os.path.normpath(os.path.join(data_dir, "level-curves/results/tuplets/curvature/equiaffine"))
level_curves_affine_curvature_tuplets_dir_path = os.path.normpath(os.path.join(data_dir, "level-curves/datasets/tuplets/curvature/affine"))
level_curves_affine_curvature_tuplets_results_dir_path = os.path.normpath(os.path.join(data_dir, "level-curves/results/tuplets/curvature/affine"))
level_curves_euclidean_arclength_tuplets_dir_path = os.path.normpath(os.path.join(data_dir, "level-curves/datasets/tuplets/arclength/euclidean"))
level_curves_euclidean_arclength_tuplets_results_dir_path = os.path.normpath(os.path.join(data_dir, "level-curves/results/tuplets/arclength/euclidean"))
level_curves_equiaffine_arclength_tuplets_dir_path = os.path.normpath(os.path.join(data_dir, "level-curves/datasets/tuplets/arclength/equiaffine"))
level_curves_equiaffine_arclength_tuplets_results_dir_path = os.path.normpath(os.path.join(data_dir, "level-curves/results/tuplets/arclength/equiaffine"))
level_curves_affine_arclength_tuplets_dir_path = os.path.normpath(os.path.join(data_dir, "level-curves/datasets/tuplets/arclength/affine"))
level_curves_affine_arclength_tuplets_results_dir_path = os.path.normpath(os.path.join(data_dir, "level-curves/results/tuplets/arclength/affine"))
| 89.65625
| 154
| 0.838968
| 432
| 2,869
| 5.25
| 0.06713
| 0.116402
| 0.135802
| 0.155203
| 0.915785
| 0.754409
| 0.754409
| 0.754409
| 0.754409
| 0.749118
| 0
| 0
| 0.035901
| 2,869
| 31
| 155
| 92.548387
| 0.819957
| 0.00976
| 0
| 0
| 0
| 0
| 0.295735
| 0.273881
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041667
| 0
| 0.041667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
29a5addc5a811ee9e151dde9ee2a7f76fec7a40d
| 47
|
py
|
Python
|
src/__init__.py
|
ekondayan/micropython-logger
|
4642c96d5dc4888614d9a0450230f396a6f6c231
|
[
"BSD-3-Clause"
] | null | null | null |
src/__init__.py
|
ekondayan/micropython-logger
|
4642c96d5dc4888614d9a0450230f396a6f6c231
|
[
"BSD-3-Clause"
] | null | null | null |
src/__init__.py
|
ekondayan/micropython-logger
|
4642c96d5dc4888614d9a0450230f396a6f6c231
|
[
"BSD-3-Clause"
] | null | null | null |
from .defs import *
from .logger import Logger
| 15.666667
| 26
| 0.765957
| 7
| 47
| 5.142857
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 47
| 2
| 27
| 23.5
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
29ae3cebd39c05fe221614c3728885b122b0b038
| 213
|
py
|
Python
|
util.py
|
INRokh/movies
|
c3f0139c0eb2f1010e6ad608dcc7011e3e4e1d66
|
[
"Apache-2.0"
] | 2
|
2019-08-29T02:58:44.000Z
|
2019-12-07T06:46:50.000Z
|
util.py
|
INRokh/movies
|
c3f0139c0eb2f1010e6ad608dcc7011e3e4e1d66
|
[
"Apache-2.0"
] | null | null | null |
util.py
|
INRokh/movies
|
c3f0139c0eb2f1010e6ad608dcc7011e3e4e1d66
|
[
"Apache-2.0"
] | null | null | null |
class GracefulShutdown(object):
def __init__(self):
self.is_shutdown = False
def exit(self, signum, frame):
self.is_shutdown = True
def is_exit(self):
return self.is_shutdown
| 21.3
| 34
| 0.647887
| 27
| 213
| 4.814815
| 0.518519
| 0.138462
| 0.323077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.262911
| 213
| 9
| 35
| 23.666667
| 0.828025
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0
| 0.142857
| 0.714286
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
29c05dcb13767356a987845e2c66ebf42d9f223a
| 25,964
|
py
|
Python
|
heaviside/tests/test_activities.py
|
BrockWester/heaviside
|
6abbcc65e46998b6964decc501f791936ef16b3d
|
[
"Apache-2.0"
] | 36
|
2017-01-27T21:28:50.000Z
|
2022-03-15T12:29:01.000Z
|
heaviside/tests/test_activities.py
|
BrockWester/heaviside
|
6abbcc65e46998b6964decc501f791936ef16b3d
|
[
"Apache-2.0"
] | 10
|
2017-05-03T17:15:30.000Z
|
2021-01-07T22:55:08.000Z
|
heaviside/tests/test_activities.py
|
BrockWester/heaviside
|
6abbcc65e46998b6964decc501f791936ef16b3d
|
[
"Apache-2.0"
] | 13
|
2017-05-18T15:02:10.000Z
|
2021-07-27T23:09:54.000Z
|
# Copyright 2016 The Johns Hopkins University Applied Physics Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import types
import unittest
from io import StringIO
from botocore.exceptions import ClientError
try:
from unittest import mock
except ImportError:
import mock
from .utils import MockPath, MockSession
from heaviside.activities import TaskMixin, ActivityMixin
from heaviside.activities import fanout, fanout_nonblocking, SFN
from heaviside.exceptions import ActivityError
# Suppress message about not handler for log messages
import logging
log = logging.getLogger("heaviside.activities")
log.addHandler(logging.NullHandler())
#log.addHandler(logging.StreamHandler())
class TimeoutError(ClientError):
def __init__(self):
op_name = "Test"
err_rsp = {
'Error': {
'Code': 'TaskTimedOut'
}
}
super(TimeoutError, self).__init__(err_rsp, op_name)
class BossError(Exception):
pass
class TestFanout(unittest.TestCase):
@mock.patch.object(SFN, 'create_name')
@mock.patch('heaviside.activities.time.sleep')
def test_args_generator(self, mSleep, mCreateName):
mCreateName.return_value = 'ZZZ'
iSession = MockSession()
client = iSession.client('stepfunctions')
client.list_state_machines.return_value = {
'stateMachines':[{
'stateMachineArn': 'XXX'
}]
}
client.start_execution.return_value = {
'executionArn': 'YYY'
}
client.describe_execution.return_value = {
'status': 'SUCCEEDED',
'output': 'null'
}
expected = [None]
actual = fanout(iSession,
'XXX',
(i for i in range(0,1)))
self.assertEqual(actual, expected)
calls = [
mock.call.list_state_machines(),
mock.call.start_execution(stateMachineArn = 'XXX',
name = 'ZZZ',
input = '0'),
mock.call.list_state_machines(),
mock.call.describe_execution(executionArn = 'YYY')
]
self.assertEqual(client.mock_calls, calls)
@mock.patch.object(SFN, 'create_name')
@mock.patch('heaviside.activities.time.sleep')
def test_args_list(self, mSleep, mCreateName):
mCreateName.return_value = 'ZZZ'
iSession = MockSession()
client = iSession.client('stepfunctions')
client.list_state_machines.return_value = {
'stateMachines':[{
'stateMachineArn': 'XXX'
}]
}
client.start_execution.return_value = {
'executionArn': 'YYY'
}
client.describe_execution.return_value = {
'status': 'SUCCEEDED',
'output': 'null'
}
expected = [None]
actual = fanout(iSession,
'XXX',
[i for i in range(0,1)])
self.assertEqual(actual, expected)
calls = [
mock.call.list_state_machines(),
mock.call.start_execution(stateMachineArn = 'XXX',
name = 'ZZZ',
input = '0'),
mock.call.list_state_machines(),
mock.call.describe_execution(executionArn = 'YYY')
]
self.assertEqual(client.mock_calls, calls)
@mock.patch.object(SFN, 'create_name')
@mock.patch('heaviside.activities.time.sleep')
def test_gt_concurrent(self, mSleep, mCreateName):
mCreateName.return_value = 'ZZZ'
iSession = MockSession()
client = iSession.client('stepfunctions')
client.list_state_machines.return_value = {
'stateMachines':[{
'stateMachineArn': 'XXX'
}]
}
client.start_execution.return_value = {
'executionArn': 'YYY'
}
client.describe_execution.return_value = {
'status': 'SUCCEEDED',
'output': 'null'
}
expected = [None, None]
actual = fanout(iSession,
'XXX',
[i for i in range(0,2)],
max_concurrent=1)
self.assertEqual(actual, expected)
calls = [
mock.call.list_state_machines(),
mock.call.start_execution(stateMachineArn = 'XXX',
name = 'ZZZ',
input = '0'),
mock.call.list_state_machines(),
mock.call.describe_execution(executionArn = 'YYY'),
mock.call.start_execution(stateMachineArn = 'XXX',
name = 'ZZZ',
input = '1'),
mock.call.list_state_machines(),
mock.call.describe_execution(executionArn = 'YYY'),
]
self.assertEqual(client.mock_calls, calls)
@mock.patch.object(SFN, 'create_name')
@mock.patch('heaviside.activities.time.sleep')
def test_sfn_error(self, mSleep, mCreateName):
mCreateName.return_value = 'ZZZ'
iSession = MockSession()
client = iSession.client('stepfunctions')
client.list_state_machines.return_value = {
'stateMachines':[{
'stateMachineArn': 'XXX'
}]
}
client.start_execution.side_effect = [
{ 'executionArn': 'YYY' },
{ 'executionArn': 'YYYY' }
]
client.describe_execution.side_effect = [
{ 'status': 'FAILED' },
{ 'status': 'RUNNING' }
]
client.get_execution_history.return_value = {
'events':[{
'executionFailedEventDetails':{
'error': 'error',
'cause': 'cause'
}
}]
}
try:
fanout(iSession,
'XXX',
[i for i in range(0,2)])
self.assertFalse(True, "fanout should result in an ActivityError")
except ActivityError as e:
self.assertEqual(e.error, 'error')
self.assertEqual(e.cause, 'cause')
calls = [
mock.call.list_state_machines(),
mock.call.start_execution(stateMachineArn = 'XXX',
name = 'ZZZ',
input = '0'),
mock.call.start_execution(stateMachineArn = 'XXX',
name = 'ZZZ',
input = '1'),
mock.call.list_state_machines(),
mock.call.describe_execution(executionArn = 'YYY'),
mock.call.get_execution_history(executionArn = 'YYY',
reverseOrder = True),
mock.call.describe_execution(executionArn = 'YYYY'),
mock.call.stop_execution(executionArn = 'YYYY',
error = "Heaviside.Fanout",
cause = "Sub-process error detected")
]
self.assertEqual(client.mock_calls, calls)
class TestFanoutNonBlocking(unittest.TestCase):
@mock.patch.object(SFN, 'create_name')
@mock.patch('heaviside.activities.time.sleep')
def test_gt_concurrent(self, mSleep, mCreateName):
mCreateName.return_value = 'ZZZ'
iSession = MockSession()
client = iSession.client('stepfunctions')
client.list_state_machines.return_value = {
'stateMachines':[{
'stateMachineArn': 'XXX'
}]
}
client.start_execution.return_value = {
'executionArn': 'YYY'
}
client.describe_execution.return_value = {
'status': 'SUCCEEDED',
'output': 'null'
}
args = {
'sub_sfn': 'XXX',
'common_sub_args': {},
'sub_args': [i for i in range(0,2)],
'max_concurrent': 1,
'rampup_delay': 10,
'rampup_backoff': 0.5,
'status_delay': 0,
'finished': False,
'running': [],
'results': [],
}
args1 = fanout_nonblocking(args, iSession)
self.assertFalse(args1['finished'])
self.assertEqual(args1['running'], ['YYY'])
self.assertEqual(args1['results'], [])
self.assertEqual(args1['rampup_delay'], 5)
args2 = fanout_nonblocking(args, iSession)
self.assertFalse(args2['finished'])
self.assertEqual(args2['running'], ['YYY'])
self.assertEqual(args2['results'], [None])
self.assertEqual(args2['rampup_delay'], 2)
args3 = fanout_nonblocking(args, iSession)
self.assertTrue(args3['finished'])
self.assertEqual(args3['running'], [])
self.assertEqual(args3['results'], [None, None])
self.assertEqual(args3['rampup_delay'], 2) # no processes launched
calls = [
mock.call.list_state_machines(),
mock.call.start_execution(stateMachineArn = 'XXX',
name = 'ZZZ',
input = '0'),
mock.call.list_state_machines(),
mock.call.describe_execution(executionArn = 'YYY'),
mock.call.start_execution(stateMachineArn = 'XXX',
name = 'ZZZ',
input = '1'),
mock.call.list_state_machines(),
mock.call.describe_execution(executionArn = 'YYY'),
]
self.assertEqual(client.mock_calls, calls)
class TestTaskMixin(unittest.TestCase):
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_success(self, mCreateSession):
iSession = MockSession()
client = iSession.client('stepfunctions')
mCreateSession.return_value = (iSession, '123456')
task = TaskMixin()
task.token = 'token'
self.assertEqual(task.token, 'token')
task.success(None)
self.assertEqual(task.token, None)
call = mock.call.send_task_success(taskToken = 'token',
output = 'null')
self.assertEqual(client.mock_calls, [call])
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_success_no_token(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
task = TaskMixin()
with self.assertRaises(Exception):
task.success(None)
self.assertEqual(task.token, None)
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_success_timeout(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
client.send_task_success.side_effect = TimeoutError()
task = TaskMixin()
task.token = 'token'
task.success(None)
self.assertEqual(task.token, None)
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_failure(self, mCreateSession):
iSession = MockSession()
client = iSession.client('stepfunctions')
mCreateSession.return_value = (iSession, '123456')
task = TaskMixin()
task.token = 'token'
self.assertEqual(task.token, 'token')
task.failure(None, None)
self.assertEqual(task.token, None)
call = mock.call.send_task_failure(taskToken = 'token',
error = None,
cause = None)
self.assertEqual(client.mock_calls, [call])
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_failure_no_token(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
task = TaskMixin()
with self.assertRaises(Exception):
task.failure(None, None)
self.assertEqual(task.token, None)
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_failure_timeout(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
client.send_task_failure.side_effect = TimeoutError()
task = TaskMixin()
task.token = 'token'
task.failure(None, None)
self.assertEqual(task.token, None)
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_heartbeat(self, mCreateSession):
iSession = MockSession()
client = iSession.client('stepfunctions')
mCreateSession.return_value = (iSession, '123456')
task = TaskMixin()
task.token = 'token'
task.heartbeat()
call = mock.call.send_task_heartbeat(taskToken = 'token')
self.assertEqual(client.mock_calls, [call])
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_heartbeat_no_token(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
task = TaskMixin()
with self.assertRaises(Exception):
task.heartbeat()
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_run_function(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
task = TaskMixin()
task.handle_task('token', None)
self.assertEqual(task.token, None)
call = mock.call.send_task_success(taskToken = 'token',
output = 'null')
self.assertEqual(client.mock_calls, [call])
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_run_generator(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
def target(input_):
yield
yield
return
# Just make sure the target is actually a generator
self.assertEqual(type(target(None)), types.GeneratorType)
task = TaskMixin(process = target)
task.handle_task('token', None)
self.assertEqual(task.token, None)
call = mock.call.send_task_success(taskToken = 'token',
output = 'null')
call_ = mock.call.send_task_heartbeat(taskToken = 'token')
calls = [call_, call_, call]
self.assertEqual(client.mock_calls, calls)
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_run_activity_error(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
target = mock.MagicMock()
target.side_effect = ActivityError('error', 'cause')
task = TaskMixin(process = target)
task.handle_task('token', None)
self.assertEqual(task.token, None)
call = mock.call.send_task_failure(taskToken = 'token',
error = 'error',
cause = 'cause')
self.assertEqual(client.mock_calls, [call])
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_run_exception(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
target = mock.MagicMock()
target.side_effect = BossError('cause')
task = TaskMixin(process = target)
task.handle_task('token', None)
self.assertEqual(task.token, None)
call = mock.call.send_task_failure(taskToken = 'token',
error = 'BossError',
cause = 'cause')
self.assertEqual(client.mock_calls, [call])
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_run_timeout(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
target = mock.MagicMock()
target.side_effect = TimeoutError()
task = TaskMixin(process = target)
task.handle_task('token', None)
self.assertEqual(task.token, None)
self.assertEqual(client.mock_calls, [])
class TestActivityMixin(unittest.TestCase):
"""
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_constructor(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
client.list_activities.return_value = {
'activities':[{
'name': 'name',
'activityArn': 'XXX'
}]
}
activity = ActivityProcess('name', None)
self.assertEqual(activity.arn, 'XXX')
calls = [
mock.call.list_activities()
]
self.assertEqual(client.mock_calls, calls)
"""
# DP ???: How to test the import
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_create_activity(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
client.create_activity.return_value = {
'activityArn': 'XXX'
}
activity = ActivityMixin()
activity.name = 'name'
self.assertEqual(activity.arn, None)
activity.create_activity()
self.assertEqual(activity.arn, 'XXX')
calls = [
mock.call.create_activity(name = 'name')
]
self.assertEqual(client.mock_calls, calls)
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_create_activity_exists(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
activity = ActivityMixin()
activity.arn = 'XXX'
activity.create_activity()
self.assertEqual(activity.arn, 'XXX')
calls = [
]
self.assertEqual(client.mock_calls, calls)
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_create_activity_exception(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
activity = ActivityMixin()
activity.arn = 'XXX'
with self.assertRaises(Exception):
activity.create_activity(exception=True)
self.assertEqual(activity.arn, 'XXX')
calls = [
]
self.assertEqual(client.mock_calls, calls)
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_delete_activity(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
activity = ActivityMixin()
activity.arn = 'XXX'
activity.delete_activity()
self.assertEqual(activity.arn, None)
calls = [
mock.call.delete_activity(activityArn = 'XXX')
]
self.assertEqual(client.mock_calls, calls)
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_delete_doesnt_exist(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
activity = ActivityMixin()
self.assertEqual(activity.arn, None)
activity.delete_activity()
self.assertEqual(activity.arn, None)
calls = [
]
self.assertEqual(client.mock_calls, calls)
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_delete_activity_exception(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
activity = ActivityMixin()
self.assertEqual(activity.arn, None)
with self.assertRaises(Exception):
activity.delete_activity(exception=True)
self.assertEqual(activity.arn, None)
calls = [
]
self.assertEqual(client.mock_calls, calls)
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_poll_task_exception(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
activity = ActivityMixin()
self.assertEqual(activity.arn, None)
with self.assertRaises(Exception):
activity.poll_task(worker = 'worker')
calls = [
]
self.assertEqual(client.mock_calls, calls)
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_poll_task(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
client.get_activity_task.return_value = {
'taskToken': 'YYY',
'input': '{}'
}
activity = ActivityMixin()
activity.arn = 'XXX'
token, input_ = activity.poll_task('worker')
self.assertEqual(token, 'YYY')
self.assertEqual(input_, {})
calls = [
mock.call.get_activity_task(activityArn = 'XXX',
workerName = 'worker')
]
self.assertEqual(client.mock_calls, calls)
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_poll_task_no_work(self, mCreateSession):
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
client.get_activity_task.return_value = {
'taskToken': ''
}
activity = ActivityMixin()
activity.arn = 'XXX'
token, input_ = activity.poll_task('worker')
self.assertEqual(token, None)
self.assertEqual(input_, None)
calls = [
mock.call.get_activity_task(activityArn = 'XXX',
workerName = 'worker')
]
self.assertEqual(client.mock_calls, calls)
@mock.patch('heaviside.activities.random.sample', autospec=True)
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_run(self, mCreateSession, mSample):
mSample.return_value = 'XXX'
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
client.list_activities.return_value = {
'activities':[{
'name': 'name',
'activityArn': 'XXX'
}]
}
client.get_activity_task.return_value = {
'taskToken': 'YYY',
'input': '{}'
}
target = mock.MagicMock()
activity = ActivityMixin(handle_task = target)
def stop_loop(*args, **kwargs):
activity.polling = False
return mock.DEFAULT
target.side_effect = stop_loop
activity.run('name')
calls = [
mock.call.list_activities(),
mock.call.get_activity_task(activityArn = 'XXX',
workerName = 'name-XXX')
]
self.assertEqual(client.mock_calls, calls)
calls = [
mock.call('YYY', {}),
mock.call().start()
]
self.assertEqual(target.mock_calls, calls)
@mock.patch('heaviside.activities.random.sample', autospec=True)
@mock.patch('heaviside.activities.create_session', autospec=True)
def test_run_exception(self, mCreateSession, mSample):
mSample.return_value = 'XXX'
iSession = MockSession()
mCreateSession.return_value = (iSession, '123456')
client = iSession.client('stepfunctions')
client.list_activities.return_value = {
'activities':[{
'name': 'name',
'activityArn': 'XXX'
}]
}
activity = ActivityMixin()
def stop_loop(*args, **kwargs):
activity.polling = False
raise BossError(None)
client.get_activity_task.side_effect = stop_loop
activity.run('name')
calls = [
mock.call.list_activities(),
mock.call.get_activity_task(activityArn = 'XXX',
workerName = 'name-XXX')
]
self.assertEqual(client.mock_calls, calls)
| 33.54522
| 78
| 0.583808
| 2,388
| 25,964
| 6.201424
| 0.10469
| 0.06989
| 0.038895
| 0.060504
| 0.810656
| 0.799514
| 0.786616
| 0.780539
| 0.765818
| 0.747721
| 0
| 0.011143
| 0.30523
| 25,964
| 773
| 79
| 33.588616
| 0.809801
| 0.052573
| 0
| 0.678141
| 0
| 0
| 0.117546
| 0.044488
| 0
| 0
| 0
| 0
| 0.13253
| 1
| 0.056799
| false
| 0.001721
| 0.020654
| 0
| 0.091222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
29eb2cc26ad1a2bd9414576190fe12ac0bf01b7b
| 8,604
|
py
|
Python
|
tests/test_bilves_derivatives.py
|
MatKie/MT_micellization
|
bf0f6a2309b171746f6e43af5349301f586e34cd
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_bilves_derivatives.py
|
MatKie/MT_micellization
|
bf0f6a2309b171746f6e43af5349301f586e34cd
|
[
"BSD-3-Clause"
] | 1
|
2021-06-28T17:11:42.000Z
|
2021-06-28T17:11:42.000Z
|
tests/test_bilves_derivatives.py
|
MatKie/MT_micellization
|
bf0f6a2309b171746f6e43af5349301f586e34cd
|
[
"BSD-3-Clause"
] | null | null | null |
from MTM.micelles.bilayer_vesicle import BilayerVesicle
from MTM.micelles._bilayer_vesicle_derivative import BilayerVesicleDerivative
from numpy.core.numeric import roll
import pytest
import sys
import numpy as np
import os
from mkutils import create_fig, save_to_file
from MTM import literature
import MTM.micelles as micelle
sys.path.append("../")
this_path = os.path.dirname(__file__)
def absolute_difference(mic, mic_prop, mic_fun, h=1e-8):
org_value = mic_fun(mic)
mic_prop(mic, h)
new_value = mic_fun(mic)
deriv = (new_value - org_value) / h
return deriv
def update_r(mic, h):
mic._r_out += h
def update_t(mic, h):
mic._t_out += h
class TestRodlikeMicelleDerivativeAbsoluteDifferences:
def setup(self):
self.mic = micelle.BilayerVesicle(80, 298.15, 10, throw_errors=False)
self.mic._r_out = 1.8099053835829944
self.mic._t_out = 0.8414754023196881
self.d_mic = BilayerVesicleDerivative(self.mic)
def test_deriv_r_in_wrt_r_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_r_in_wrt_r_out
num_deriv = absolute_difference(self.mic, update_r, lambda x: x.radius_inner)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-6)
def test_g_out_wrt_r_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_surfactants_number_out_wrt_r_out
num_deriv = absolute_difference(
self.mic, update_r, lambda x: x.surfactants_number_outer
)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-6)
def test_g_out_wrt_t_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_surfactants_number_out_wrt_t_out
num_deriv = absolute_difference(
self.mic, update_t, lambda x: x.surfactants_number_outer
)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-6)
def test_g_in_wrt_r_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_surfactants_number_in_wrt_r_out
num_deriv = absolute_difference(
self.mic, update_r, lambda x: x.surfactants_number_inner
)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-6)
def test_g_in_wrt_t_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_surfactants_number_in_wrt_t_out
num_deriv = absolute_difference(
self.mic, update_t, lambda x: x.surfactants_number_inner
)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-6)
def test_t_in_wrt_r_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_thickness_in_wrt_r_out
num_deriv = absolute_difference(self.mic, update_r, lambda x: x.thickness_inner)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-6)
def test_t_in_wrt_t_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_thickness_in_wrt_t_out
num_deriv = absolute_difference(self.mic, update_t, lambda x: x.thickness_inner)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-7)
def test_area_out_wrt_r_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_area_out_wrt_r_out
num_deriv = absolute_difference(
self.mic, update_r, lambda x: x.area_per_surfactant_outer
)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-7)
def test_area_out_wrt_t_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_area_out_wrt_t_out
num_deriv = absolute_difference(
self.mic, update_t, lambda x: x.area_per_surfactant_outer
)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-7)
def test_area_in_wrt_r_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_area_in_wrt_r_out
num_deriv = absolute_difference(
self.mic, update_r, lambda x: x.area_per_surfactant_inner
)
assert ana_deriv == pytest.approx(num_deriv, abs=2e-6)
def test_area_in_wrt_t_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_area_in_wrt_t_out
num_deriv = absolute_difference(
self.mic, update_t, lambda x: x.area_per_surfactant_inner
)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-6)
def test_deformation_out_wrt_r_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_deformation_nagarajan_out_wrt_r_out()
num_deriv = absolute_difference(
self.mic, update_r, lambda x: x._deformation_nagarajan_out()
)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-7)
def test_deformation_out_wrt_t_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_deformation_nagarajan_out_wrt_t_out()
num_deriv = absolute_difference(
self.mic, update_t, lambda x: x._deformation_nagarajan_out()
)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-7)
def test_deformation_out_wrt_r_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_deformation_nagarajan_in_wrt_r_out()
num_deriv = absolute_difference(
self.mic, update_r, lambda x: x._deformation_nagarajan_in()
)
assert ana_deriv == pytest.approx(num_deriv, abs=5e-6)
def test_deformation_out_wrt_t_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_deformation_nagarajan_in_wrt_t_out()
num_deriv = absolute_difference(
self.mic, update_t, lambda x: x._deformation_nagarajan_in()
)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-7)
def test_deformation_wrt_r_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_deformation_nagarajan_wrt_r_out()
num_deriv = absolute_difference(
self.mic, update_r, lambda x: x._deformation_nagarajan()
)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-6)
def test_deformation_wrt_t_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_deformation_nagarajan_wrt_t_out()
num_deriv = absolute_difference(
self.mic, update_t, lambda x: x._deformation_nagarajan()
)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-7)
def test_area_wrt_r_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_area_per_surfactant_wrt_r_out
num_deriv = absolute_difference(
self.mic, update_r, lambda x: x.area_per_surfactant
)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-6)
def test_area_wrt_r_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_area_per_surfactant_wrt_t_out
num_deriv = absolute_difference(
self.mic, update_t, lambda x: x.area_per_surfactant
)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-6)
def test_interface_wrt_r_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_interface_free_energy_wrt_r_out()
num_deriv = absolute_difference(
self.mic, update_r, lambda x: x.get_interface_free_energy()
)
assert ana_deriv == pytest.approx(num_deriv, abs=2e-6)
def test_interface_wrt_t_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_interface_free_energy_wrt_t_out()
num_deriv = absolute_difference(
self.mic, update_t, lambda x: x.get_interface_free_energy()
)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-6)
def test_steric_wrt_r_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_steric_vdw_wrt_r_out()
num_deriv = absolute_difference(
self.mic, update_r, lambda x: x.get_steric_free_energy()
)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-5)
def test_steric_wrt_t_out(self):
self.setup()
ana_deriv = self.d_mic.deriv_steric_vdw_wrt_t_out()
num_deriv = absolute_difference(
self.mic, update_t, lambda x: x.get_steric_free_energy()
)
assert ana_deriv == pytest.approx(num_deriv, abs=1e-6)
def test_jacobian(self):
self.setup()
self.mic._r_out += 0.05
self.mic._t_out -= 0.01
ana_rad, ana_thi = self.d_mic.jacobian()
num_rad = absolute_difference(
self.mic, update_r, lambda x: x.get_delta_chempot()
)
self.mic._r_out -= 1e-8
num_thi = absolute_difference(
self.mic, update_t, lambda x: x.get_delta_chempot()
)
assert ana_thi == pytest.approx(num_thi, abs=1e-6)
assert ana_rad == pytest.approx(num_rad, abs=1e-6)
| 30.619217
| 88
| 0.669456
| 1,265
| 8,604
| 4.150988
| 0.082213
| 0.070082
| 0.038088
| 0.119025
| 0.84422
| 0.824414
| 0.81832
| 0.81832
| 0.81832
| 0.81832
| 0
| 0.01579
| 0.241864
| 8,604
| 280
| 89
| 30.728571
| 0.789207
| 0
| 0
| 0.360406
| 0
| 0
| 0.000349
| 0
| 0
| 0
| 0
| 0
| 0.126904
| 1
| 0.142132
| false
| 0
| 0.050761
| 0
| 0.203046
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d9be662297e4ef3fd22604d82d20644f5a586973
| 24,428
|
py
|
Python
|
src/backend/expungeservice/demo_records.py
|
april96415/recordexpungPDX
|
43ec60ddfb7fe1ec7940b2a38c6e7d7f85286506
|
[
"MIT"
] | 38
|
2019-05-09T03:13:43.000Z
|
2022-03-16T22:59:25.000Z
|
src/backend/expungeservice/demo_records.py
|
april96415/recordexpungPDX
|
43ec60ddfb7fe1ec7940b2a38c6e7d7f85286506
|
[
"MIT"
] | 938
|
2019-05-02T15:13:21.000Z
|
2022-02-27T20:59:00.000Z
|
src/backend/expungeservice/demo_records.py
|
april96415/recordexpungPDX
|
43ec60ddfb7fe1ec7940b2a38c6e7d7f85286506
|
[
"MIT"
] | 65
|
2019-05-09T03:28:12.000Z
|
2022-03-21T00:06:39.000Z
|
from typing import List, Tuple
from dateutil.relativedelta import relativedelta
from dacite import from_dict
from expungeservice.models.case import CaseSummary, OeciCase
from expungeservice.models.charge import OeciCharge, EditStatus
from expungeservice.models.record import Alias
from expungeservice.models.disposition import DispositionCreator
from expungeservice.util import DateWithFuture as date_class, LRUCache
class DemoRecords:
@staticmethod
def build_search_results(
username: str, password: str, aliases: Tuple[Alias, ...], search_cache: LRUCache
) -> Tuple[List[OeciCase], List[str]]:
alias_match = search_cache[aliases]
if alias_match:
return alias_match
else:
errors = []
search_results: List[OeciCase] = []
for alias in aliases:
alias_lower = Alias(
alias.first_name.lower().strip(),
alias.last_name.lower().strip(),
alias.middle_name.lower().strip(),
alias.birth_date,
)
try:
alias_search_result = DemoRecords.records.get(alias_lower, [])
search_results += alias_search_result
except Exception as e:
errors.append(str(e))
print(e)
if not errors:
search_cache[aliases] = search_results, errors
return search_results, errors
shared_case_data = {
"citation_number": "something",
"case_detail_link": "?404",
"edit_status": EditStatus.UNCHANGED,
"current_status": "Closed",
"balance_due_in_cents": 0,
"birth_year": 1995,
"location": "Multnomah",
"violation_type": "Offense Misdemeanor",
"date": date_class.today(),
"district_attorney_number": "01234567",
}
shared_charge_data = {
"balance_due_in_cents": 0,
"edit_status": EditStatus.UNCHANGED,
"probation_revoked": None,
"level": "Misdemeanor Class C",
"statute": "166.015",
"name": "Disorderly Conduct",
"date": date_class.today(),
"disposition": DispositionCreator.empty(),
}
common_name_record_1 = [
OeciCase(
summary=from_dict(
data_class=CaseSummary,
data={
**shared_case_data,
"name": "COMMON A. NAME",
"birth_year": 1970,
"case_number": "100000",
"location": "Clackamas",
"date": date_class.today() - relativedelta(years=6, days=12, months=4),
},
),
charges=(
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "100000-1",
"name": "Aggravated Theft in the First Degree",
"statute": "164.057",
"level": "Felony Class B",
"date": date_class.today() - relativedelta(years=6, days=12, months=4),
"disposition": DispositionCreator.create(
date=date_class.today() - relativedelta(years=6, days=12, months=3), ruling="Convicted"
),
},
),
),
),
OeciCase(
summary=from_dict(
data_class=CaseSummary,
data={
**shared_case_data,
"name": "COMMON NAME",
"birth_year": 1970,
"case_number": "110000",
"location": "Baker",
"date": date_class.today() - relativedelta(years=7, days=26, months=7),
},
),
charges=(
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "110000-1",
"name": "Theft in the Second Degree",
"statute": "164.057",
"level": "Misdemeanor Class A",
"date": date_class.today() - relativedelta(years=7, days=26, months=7),
"disposition": DispositionCreator.create(
date=date_class.today() - relativedelta(years=7, days=26, months=6), ruling="Convicted"
),
},
),
),
),
OeciCase(
summary=from_dict(
data_class=CaseSummary,
data={
**shared_case_data,
"name": "COMMON A NAME",
"birth_year": 1970,
"case_number": "120000",
"location": "Baker",
"date": date_class.today() - relativedelta(years=7, days=26, months=7),
},
),
charges=(
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "120000-1",
"name": "Poss under oz Marijuana",
"statute": "475.000",
"level": "violation",
"date": date_class.today() - relativedelta(years=8, days=26, months=7),
"disposition": DispositionCreator.create(
date=date_class.today() - relativedelta(years=8, days=26, months=6), ruling="Convicted"
),
},
),
),
),
]
common_name_record_2 = [
OeciCase(
summary=from_dict(
data_class=CaseSummary,
data={
**shared_case_data,
"name": "COMMON NAME",
"birth_year": 1985,
"case_number": "200000",
"location": "Benton",
"date": date_class.today() - relativedelta(years=3, days=12, months=4),
},
),
charges=(
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "200000-1",
"name": "Obstruction of search warrant",
"statute": "162.247",
"level": "Misdemeanor Class A",
"date": date_class.today() - relativedelta(years=3, days=12, months=4),
"disposition": DispositionCreator.create(
date=date_class.today() - relativedelta(years=3, days=12, months=4), ruling="Dismissed"
),
},
),
),
),
OeciCase(
summary=from_dict(
data_class=CaseSummary,
data={
**shared_case_data,
"name": "COMMON B. NAME",
"birth_year": 1985,
"case_number": "210000",
"location": "Baker",
"date": date_class.today() - relativedelta(years=4, days=5, months=2),
},
),
charges=(
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "210000-1",
"name": "Poss Controlled Sub",
"statute": "475.9924A",
"level": "Felony Unclassified",
"date": date_class.today() - relativedelta(years=4, days=5, months=2),
"disposition": DispositionCreator.create(
date=date_class.today() - relativedelta(years=4), ruling="Convicted"
),
},
),
),
),
]
# "date": date_class.today() - relativedelta(years=3, days=9, months =5),
records = {
Alias("john", "common", "", ""): common_name_record_1 + common_name_record_2,
Alias("john", "common", "", "1/1/1970"): common_name_record_1,
Alias("john", "common", "", "2/2/1985"): common_name_record_2,
Alias("single", "conviction", "", ""): [
OeciCase(
summary=from_dict(
data_class=CaseSummary,
data={
**shared_case_data,
"name": "SINGLE OFFENSE",
"birth_year": 1995,
"case_number": "100000",
"location": "Deschutes",
"date": date_class.today() - relativedelta(years=5),
"violation_type": "Offense Felony",
},
),
charges=(
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "100000-1",
"name": "Identity Theft",
"statute": "165.800",
"level": "Felony Class C",
"date": date_class.today() - relativedelta(years=5),
"disposition": DispositionCreator.create(
date=date_class.today() - relativedelta(years=4, months=9), ruling="Convicted"
),
},
),
),
),
],
Alias("multiple", "charges", "", ""): [
OeciCase(
summary=from_dict(
data_class=CaseSummary,
data={
**shared_case_data,
"balance_due_in_cents": 100000,
"name": "MULTIPLE CHARGES",
"birth_year": 1990,
"case_number": "100000",
"location": "Baker",
"date": date_class.today() - relativedelta(years=4),
"violation_type": "Offense Misdemeanor",
},
),
charges=(
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "100000-1",
"name": "Disorderly Conduct in the First Degree",
"statute": "166.223",
"level": "Misdemeanor Class A",
"date": date_class.today() - relativedelta(years=4),
"disposition": DispositionCreator.create(
date=date_class.today() - relativedelta(years=3, months=9), ruling="Convicted"
),
"balance_due_in_cents": 100000,
},
),
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "100000-2",
"name": "Disorderly Conduct in the Second Degree",
"statute": "166.2250A",
"level": "Misdemeanor Class B",
"date": date_class.today() - relativedelta(years=4),
"disposition": DispositionCreator.create(
date=date_class.today() - relativedelta(years=3, months=9), ruling="Dismissed"
),
"balance_due_in_cents": 100000,
},
),
),
),
OeciCase(
summary=from_dict(
data_class=CaseSummary,
data={
**shared_case_data,
"name": "MULTIPLE CHARGES",
"birth_year": 1990,
"case_number": "110000",
"location": "Multnomah",
"date": date_class.today() - relativedelta(years=1),
"violation_type": "Offense Misdemeanor",
},
),
charges=(
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "110000-1",
"name": "Theft in the Third Degree",
"statute": "164.043",
"level": "Misdemeanor Class C",
"date": date_class.today() - relativedelta(years=1),
"disposition": DispositionCreator.create(
date=date_class.today() - relativedelta(months=9), ruling="Dismissed"
),
},
),
),
),
OeciCase(
summary=from_dict(
data_class=CaseSummary,
data={
**shared_case_data,
"name": "MULTIPLE CHARGES",
"birth_year": 1990,
"case_number": "120000",
"location": "Multnomah",
"date": date_class.today() - relativedelta(years=12),
"violation_type": "Offense Violation",
},
),
charges=(
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "120000-1",
"name": "Failure to Obey Traffic Control Device",
"statute": "811.265",
"level": "Violation",
"date": date_class.today() - relativedelta(years=12),
"disposition": DispositionCreator.create(
date=date_class.today() - relativedelta(years=11, months=9), ruling="Dismissed"
),
},
),
),
),
],
Alias("portland", "protester", "", ""): [
OeciCase(
summary=from_dict(
data_class=CaseSummary,
data={
**shared_case_data,
"current_status": "Open",
"name": "DEFUND POLICE",
"case_number": "100000",
"violation_type": "Offense Misdemeanor",
},
),
charges=(
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "100000-1",
"name": "Assaulting a Public Safety Officer",
"statute": "163.208",
"level": "Felony Class C",
},
),
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "100000-3",
"name": "Interfering w/ Peace/Parole and Probation Officer",
"statute": "162.247",
"level": "Misdemeanor Class A",
"date": date_class.today() + relativedelta(days=1),
},
),
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "100000-4",
"name": "Disorderly Conduct in the First Degree",
"statute": "166.0232A",
"level": "Misdemeanor Class A",
},
),
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "100000-5",
"name": "Resisting Arrest",
"statute": "162.315",
"level": "Misdemeanor Class A",
},
),
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "100000-6",
"name": "Riot",
"statute": "166.015",
"level": "Felony Class C",
},
),
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "100000-7",
"name": "Riot While Masked",
"statute": "166.015A",
"level": "Felony Class B",
},
),
),
),
],
Alias("more", "categories", "", ""): [
OeciCase(
summary=from_dict(
data_class=CaseSummary,
data={
**shared_case_data,
"current_status": "Closed",
"name": "John Notaperson",
"case_number": "123456",
"violation_type": "Offense Felony",
"balance_due_in_cents": 50000,
},
),
charges=(
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "123456-1",
"name": "Assaulting a Public Safety Officer",
"statute": "163.208",
"level": "Felony Class C",
"date": date_class.today() - relativedelta(years=2),
"disposition": DispositionCreator.create(
date=date_class.today() - relativedelta(years=1, months=9), ruling="Convicted"
),
"balance_due_in_cents": 50000,
},
),
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "123456-2",
"name": "Felony Riot",
"statute": "111.111",
"level": "Felony Class C",
"date": date_class.today() - relativedelta(years=2),
"disposition": DispositionCreator.create(
date=date_class.today() - relativedelta(years=1, months=9), ruling="Dismissed"
),
"balance_due_in_cents": 50000,
},
),
),
),
OeciCase(
summary=from_dict(
data_class=CaseSummary,
data={
**shared_case_data,
"current_status": "Closed",
"name": "John Notaperson",
"case_number": "234567",
"violation_type": "Offense Felony",
},
),
charges=(
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "234567-1",
"name": "Assaulting a Public Safety Officer",
"statute": "163.208",
"level": "Felony Class C",
"date": date_class.today() - relativedelta(years=5),
"disposition": DispositionCreator.create(
date=date_class.today() - relativedelta(years=4, months=9), ruling="Convicted"
),
},
),
),
),
OeciCase(
summary=from_dict(
data_class=CaseSummary,
data={
**shared_case_data,
"current_status": "Closed",
"name": "John Notaperson",
"case_number": "333333",
"violation_type": "Offense Violation",
},
),
charges=(
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "333333-1",
"name": "Possession of Marijuana < 1 Ounce",
"statute": "4758643",
"level": "Violation Unclassified",
"date": date_class.today() - relativedelta(years=5),
"disposition": DispositionCreator.create(
date=date_class.today() - relativedelta(years=4, months=9), ruling="Convicted"
),
},
),
),
),
OeciCase(
summary=from_dict(
data_class=CaseSummary,
data={
**shared_case_data,
"current_status": "Closed",
"name": "John Notaperson",
"case_number": "444444",
"violation_type": "Offense Violation",
"balance_due_in_cents": 50000,
},
),
charges=(
from_dict(
data_class=OeciCharge,
data={
**shared_charge_data,
"ambiguous_charge_id": "444444-1",
"name": "Possession of Marijuana < 1 Ounce",
"statute": "4758643",
"level": "Violation Unclassified",
"date": date_class.today() - relativedelta(years=5),
"disposition": DispositionCreator.create(
date=date_class.today() - relativedelta(years=4, months=9), ruling="Convicted"
),
"balance_due_in_cents": 50000,
},
),
),
),
],
}
| 42.557491
| 115
| 0.385459
| 1,651
| 24,428
| 5.491823
| 0.132041
| 0.043675
| 0.061652
| 0.085365
| 0.747987
| 0.722841
| 0.7149
| 0.708834
| 0.659093
| 0.628543
| 0
| 0.048787
| 0.514164
| 24,428
| 573
| 116
| 42.631763
| 0.715201
| 0.002907
| 0
| 0.709964
| 0
| 0
| 0.166865
| 0.000985
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001779
| false
| 0.001779
| 0.014235
| 0
| 0.030249
| 0.001779
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d9cec30505e082d1956a14a03ace1fdec52a83fd
| 8,857
|
py
|
Python
|
Configuration/configure_BuildRule.py
|
CCSEPBVR/KVS
|
f6153b3f52aa38904cc96d38d5cd609c5dccfc59
|
[
"BSD-3-Clause"
] | null | null | null |
Configuration/configure_BuildRule.py
|
CCSEPBVR/KVS
|
f6153b3f52aa38904cc96d38d5cd609c5dccfc59
|
[
"BSD-3-Clause"
] | null | null | null |
Configuration/configure_BuildRule.py
|
CCSEPBVR/KVS
|
f6153b3f52aa38904cc96d38d5cd609c5dccfc59
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/python
import sys
import os
import string
#=============================================================================
# Initialize.
#=============================================================================
if len( sys.argv ) != 2:
sys.exit( 0 )
source_path = sys.argv[1]
find_path = "../Source/" + source_path
#=============================================================================
# Set object_list, object_dir_list, header_dir_list.
#=============================================================================
# Initialize.
object_list = []
object_dir_list = []
header_dir_list = []
vert_shader_dir_list = []
geom_shader_dir_list = []
frag_shader_dir_list = []
ttf_font_dir_list = []
# Search sources.
for root, dirs, files in os.walk( find_path ):
for filename in files:
file_path = os.path.join( root, filename )
if os.path.splitext( filename )[1] == ".cpp":
# Append to object_list.
object_list.append( file_path.replace( ".cpp", ".o" ) )
# Append to object_dir_list.
if not root in object_dir_list:
object_dir_list.append( root )
if os.path.splitext( filename )[1] == ".h":
# Append to header_dir_list.
if not root in header_dir_list:
header_dir_list.append( root )
if os.path.splitext( filename )[1] == ".vert":
# Append to vert_shader_dir_list.
if not root in vert_shader_dir_list:
vert_shader_dir_list.append( root )
if os.path.splitext( filename )[1] == ".geom":
# Append to geom_shader_dir_list.
if not root in geom_shader_dir_list:
geom_shader_dir_list.append( root )
if os.path.splitext( filename )[1] == ".frag":
# Append to frag_shader_dir_list.
if not root in frag_shader_dir_list:
frag_shader_dir_list.append( root )
if os.path.splitext( filename )[1] == ".ttf":
# Append to ttf_font_dir_list.
if not root in ttf_font_dir_list:
ttf_font_dir_list.append( root )
# Strip find_path.
object_list = list( map( lambda x: x.replace( find_path, "." ), object_list ) )
object_dir_list = list( map( lambda x: x.replace( find_path, "." ), object_dir_list ) )
header_dir_list = list( map( lambda x: x.replace( find_path, "." ), header_dir_list ) )
vert_shader_dir_list = list( map( lambda x: x.replace( find_path, "." ), vert_shader_dir_list ) )
geom_shader_dir_list = list( map( lambda x: x.replace( find_path, "." ), geom_shader_dir_list ) )
frag_shader_dir_list = list( map( lambda x: x.replace( find_path, "." ), frag_shader_dir_list ) )
ttf_font_dir_list = list( map( lambda x: x.replace( find_path, "." ), ttf_font_dir_list ) )
# Sort.
object_list.sort()
object_dir_list.sort()
object_dir_list.reverse()
header_dir_list.sort()
vert_shader_dir_list.sort()
geom_shader_dir_list.sort()
frag_shader_dir_list.sort()
ttf_font_dir_list.sort()
#=============================================================================
# Write makefile for make.
#=============================================================================
sys.stdout = open( find_path + "/BuildRule.mk", "w" )
object_list = list( map( lambda x: x.replace( "\\", "/" ), object_list ) )
object_dir_list = list( map( lambda x: x.replace( "\\", "/" ), object_dir_list ) )
header_dir_list = list( map( lambda x: x.replace( "\\", "/" ), header_dir_list ) )
vert_shader_dir_list = list( map( lambda x: x.replace( "\\", "/" ), vert_shader_dir_list ) )
geom_shader_dir_list = list( map( lambda x: x.replace( "\\", "/" ), geom_shader_dir_list ) )
frag_shader_dir_list = list( map( lambda x: x.replace( "\\", "/" ), frag_shader_dir_list ) )
ttf_font_dir_list = list( map( lambda x: x.replace( "\\", "/" ), ttf_font_dir_list ) )
# Write a caution.
print( "# DON'T EDIT THIS FILE." )
print( "# THIS IS GENERATED BY \"Configuration/configure_BuildRule.py\"." )
print( "" )
# Write object files.
print( "" )
print( "OBJECTS := \\" )
for filename in object_list:
print( "$(OUTDIR)/%s \\" % filename )
print( "" )
print( "" )
# Write build rules.
for dirname in object_dir_list:
print( "" )
print( "$(OUTDIR)/%s/%%.o: %s/%%.cpp %s/%%.h" % ( dirname, dirname, dirname ) )
if dirname == ".":
print( "\t$(MKDIR) $(OUTDIR)" )
else:
print( "\t$(MKDIR) $(OUTDIR)/%s" % dirname )
print( "\t$(CPP) -c $(CPPFLAGS) $(DEFINITIONS) $(INCLUDE_PATH) -o $@ $<" )
print( "" )
# Write "install" target.
print( "" )
print( "install::" )
for dirname in header_dir_list:
print( "\t$(MKDIR) $(INSTALL_DIR)/include/%s/%s" % ( source_path, dirname ) )
print( "\t$(INSTALL) %s/*.h $(INSTALL_DIR)/include/%s/%s" % ( dirname, source_path, dirname ) )
for dirname in vert_shader_dir_list:
print( "\t$(MKDIR) $(INSTALL_DIR)/include/%s/%s" % ( source_path, dirname ) )
print( "\t$(INSTALL) %s/*.vert $(INSTALL_DIR)/include/%s/%s" % ( dirname, source_path, dirname ) )
for dirname in geom_shader_dir_list:
print( "\t$(MKDIR) $(INSTALL_DIR)/include/%s/%s" % ( source_path, dirname ) )
print( "\t$(INSTALL) %s/*.geom $(INSTALL_DIR)/include/%s/%s" % ( dirname, source_path, dirname ) )
for dirname in frag_shader_dir_list:
print( "\t$(MKDIR) $(INSTALL_DIR)/include/%s/%s" % ( source_path, dirname ) )
print( "\t$(INSTALL) %s/*.frag $(INSTALL_DIR)/include/%s/%s" % ( dirname, source_path, dirname ) )
for dirname in ttf_font_dir_list:
print( "\t$(MKDIR) $(INSTALL_DIR)/include/%s/%s" % ( source_path, dirname ) )
print( "\t$(INSTALL) %s/*.ttf $(INSTALL_DIR)/include/%s/%s" % ( dirname, source_path, dirname ) )
#=============================================================================
# Write makefile for nmake.
#=============================================================================
sys.stdout = open( find_path + "/BuildRule.vc.mk", "w" )
object_list = list( map( lambda x: x.replace( "/", "\\" ).replace( ".o", ".obj" ), object_list ) )
object_dir_list = list( map( lambda x: x.replace( "/", "\\" ), object_dir_list ) )
header_dir_list = list( map( lambda x: x.replace( "/", "\\" ), header_dir_list ) )
vert_shader_dir_list = list( map( lambda x: x.replace( "/", "\\" ), vert_shader_dir_list ) )
geom_shader_dir_list = list( map( lambda x: x.replace( "/", "\\" ), geom_shader_dir_list ) )
frag_shader_dir_list = list( map( lambda x: x.replace( "/", "\\" ), frag_shader_dir_list ) )
ttf_font_dir_list = list( map( lambda x: x.replace( "/", "\\" ), ttf_font_dir_list ) )
# Write a caution.
print( "# DON'T EDIT THIS FILE." )
print( "# THIS IS GENERATED BY \"Configuration/configure_BuildRule.py\"." )
print( "" )
# Write object files.
print( "" )
print( "OBJECTS = \\" )
for filename in object_list:
print( "$(OUTDIR)\\%s \\" % filename )
print( "" )
print( "" )
# Write build rules.
for dirname in object_dir_list:
print( "" )
print( "{%s\\}.cpp{$(OUTDIR)\\%s\\}.obj::" % ( dirname, dirname ) )
print( "\tIF NOT EXIST $(OUTDIR)\\%s $(MKDIR) $(OUTDIR)\\%s" %( dirname, dirname ) )
print( "\t$(CPP) /c $(CPPFLAGS) $(DEFINITIONS) $(INCLUDE_PATH) /Fo$(OUTDIR)\\%s\\ @<<" % dirname )
print( "$<" )
print( "<<" )
print( "" )
# Write "install" target.
print( "" )
print( "install::" )
for dirname in header_dir_list:
print( "\tIF NOT EXIST $(INSTALL_DIR)\\include\\%s\\%s $(MKDIR) $(INSTALL_DIR)\\include\\%s\\%s" % ( source_path, dirname, source_path, dirname ) )
print( "\t$(INSTALL) %s\\*.h $(INSTALL_DIR)\\include\\%s\\%s" % ( dirname, source_path, dirname ) )
for dirname in vert_shader_dir_list:
print( "\tIF NOT EXIST $(INSTALL_DIR)\\include\\%s\\%s $(MKDIR) $(INSTALL_DIR)\\include\\%s\\%s" % ( source_path, dirname, source_path, dirname ) )
print( "\t$(INSTALL) %s\\*.vert $(INSTALL_DIR)\\include\\%s\\%s" % ( dirname, source_path, dirname ) )
for dirname in geom_shader_dir_list:
print( "\tIF NOT EXIST $(INSTALL_DIR)\\include\\%s\\%s $(MKDIR) $(INSTALL_DIR)\\include\\%s\\%s" % ( source_path, dirname, source_path, dirname ) )
print( "\t$(INSTALL) %s\\*.geom $(INSTALL_DIR)\\include\\%s\\%s" % ( dirname, source_path, dirname ) )
for dirname in frag_shader_dir_list:
print( "\tIF NOT EXIST $(INSTALL_DIR)\\include\\%s\\%s $(MKDIR) $(INSTALL_DIR)\\include\\%s\\%s" % ( source_path, dirname, source_path, dirname ) )
print( "\t$(INSTALL) %s\\*.frag $(INSTALL_DIR)\\include\\%s\\%s" % ( dirname, source_path, dirname ) )
for dirname in ttf_font_dir_list:
print( "\tIF NOT EXIST $(INSTALL_DIR)\\include\\%s\\%s $(MKDIR) $(INSTALL_DIR)\\include\\%s\\%s" % ( source_path, dirname, source_path, dirname ) )
print( "\t$(INSTALL) %s\\*.ttf $(INSTALL_DIR)\\include\\%s\\%s" % ( dirname, source_path, dirname ) )
| 38.341991
| 151
| 0.578638
| 1,143
| 8,857
| 4.223972
| 0.083115
| 0.11744
| 0.105012
| 0.093206
| 0.845485
| 0.822287
| 0.786868
| 0.764913
| 0.735294
| 0.715824
| 0
| 0.001238
| 0.179519
| 8,857
| 230
| 152
| 38.508696
| 0.663135
| 0.133454
| 0
| 0.323529
| 0
| 0.044118
| 0.230407
| 0.101792
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.022059
| 0
| 0.022059
| 0.389706
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d9e72417d53d18910a46923f24a1f6bb2d68be98
| 7,836
|
py
|
Python
|
tests/contrib/flask_cache/test_wrapper_safety.py
|
vijayperiasamy-eb/dd-trace-py
|
2b0d396fc7f76582e8ffedff48933245a77ebaf2
|
[
"BSD-3-Clause"
] | 1
|
2020-03-10T01:45:56.000Z
|
2020-03-10T01:45:56.000Z
|
tests/contrib/flask_cache/test_wrapper_safety.py
|
vijayperiasamy-eb/dd-trace-py
|
2b0d396fc7f76582e8ffedff48933245a77ebaf2
|
[
"BSD-3-Clause"
] | null | null | null |
tests/contrib/flask_cache/test_wrapper_safety.py
|
vijayperiasamy-eb/dd-trace-py
|
2b0d396fc7f76582e8ffedff48933245a77ebaf2
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import unittest
# project
from ddtrace.ext import net
from ddtrace.tracer import Tracer
from ddtrace.contrib.flask_cache import get_traced_cache
from ddtrace.contrib.flask_cache.tracers import CACHE_BACKEND
# 3rd party
from flask import Flask
from redis.exceptions import ConnectionError
import pytest
# testing
from ...test_tracer import DummyWriter
class FlaskCacheWrapperTest(unittest.TestCase):
SERVICE = 'test-flask-cache'
def test_cache_get_without_arguments(self):
# initialize the dummy writer
writer = DummyWriter()
tracer = Tracer()
tracer.writer = writer
# create the TracedCache instance for a Flask app
Cache = get_traced_cache(tracer, service=self.SERVICE)
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
# make a wrong call
with pytest.raises(TypeError) as ex:
cache.get()
# ensure that the error is not caused by our tracer
assert 'get()' in ex.value.args[0]
assert 'argument' in ex.value.args[0]
spans = writer.pop()
# an error trace must be sent
assert len(spans) == 1
span = spans[0]
assert span.service == self.SERVICE
assert span.resource == 'get'
assert span.name == 'flask_cache.cmd'
assert span.span_type == 'cache'
assert span.error == 1
def test_cache_set_without_arguments(self):
# initialize the dummy writer
writer = DummyWriter()
tracer = Tracer()
tracer.writer = writer
# create the TracedCache instance for a Flask app
Cache = get_traced_cache(tracer, service=self.SERVICE)
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
# make a wrong call
with pytest.raises(TypeError) as ex:
cache.set()
# ensure that the error is not caused by our tracer
assert 'set()' in ex.value.args[0]
assert 'argument' in ex.value.args[0]
spans = writer.pop()
# an error trace must be sent
assert len(spans) == 1
span = spans[0]
assert span.service == self.SERVICE
assert span.resource == 'set'
assert span.name == 'flask_cache.cmd'
assert span.span_type == 'cache'
assert span.error == 1
def test_cache_add_without_arguments(self):
# initialize the dummy writer
writer = DummyWriter()
tracer = Tracer()
tracer.writer = writer
# create the TracedCache instance for a Flask app
Cache = get_traced_cache(tracer, service=self.SERVICE)
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
# make a wrong call
with pytest.raises(TypeError) as ex:
cache.add()
# ensure that the error is not caused by our tracer
assert 'add()' in ex.value.args[0]
assert 'argument' in ex.value.args[0]
spans = writer.pop()
# an error trace must be sent
assert len(spans) == 1
span = spans[0]
assert span.service == self.SERVICE
assert span.resource == 'add'
assert span.name == 'flask_cache.cmd'
assert span.span_type == 'cache'
assert span.error == 1
def test_cache_delete_without_arguments(self):
# initialize the dummy writer
writer = DummyWriter()
tracer = Tracer()
tracer.writer = writer
# create the TracedCache instance for a Flask app
Cache = get_traced_cache(tracer, service=self.SERVICE)
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
# make a wrong call
with pytest.raises(TypeError) as ex:
cache.delete()
# ensure that the error is not caused by our tracer
assert 'delete()' in ex.value.args[0]
assert 'argument' in ex.value.args[0]
spans = writer.pop()
# an error trace must be sent
assert len(spans) == 1
span = spans[0]
assert span.service == self.SERVICE
assert span.resource == 'delete'
assert span.name == 'flask_cache.cmd'
assert span.span_type == 'cache'
assert span.error == 1
def test_cache_set_many_without_arguments(self):
# initialize the dummy writer
writer = DummyWriter()
tracer = Tracer()
tracer.writer = writer
# create the TracedCache instance for a Flask app
Cache = get_traced_cache(tracer, service=self.SERVICE)
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
# make a wrong call
with pytest.raises(TypeError) as ex:
cache.set_many()
# ensure that the error is not caused by our tracer
assert 'set_many()' in ex.value.args[0]
assert 'argument' in ex.value.args[0]
spans = writer.pop()
# an error trace must be sent
assert len(spans) == 1
span = spans[0]
assert span.service == self.SERVICE
assert span.resource == 'set_many'
assert span.name == 'flask_cache.cmd'
assert span.span_type == 'cache'
assert span.error == 1
def test_redis_cache_tracing_with_a_wrong_connection(self):
# initialize the dummy writer
writer = DummyWriter()
tracer = Tracer()
tracer.writer = writer
# create the TracedCache instance for a Flask app
Cache = get_traced_cache(tracer, service=self.SERVICE)
app = Flask(__name__)
config = {
'CACHE_TYPE': 'redis',
'CACHE_REDIS_PORT': 2230,
'CACHE_REDIS_HOST': '127.0.0.1'
}
cache = Cache(app, config=config)
# use a wrong redis connection
with pytest.raises(ConnectionError) as ex:
cache.get(u'á_complex_operation')
# ensure that the error is not caused by our tracer
assert '127.0.0.1:2230. Connection refused.' in ex.value.args[0]
spans = writer.pop()
# an error trace must be sent
assert len(spans) == 1
span = spans[0]
assert span.service == self.SERVICE
assert span.resource == 'get'
assert span.name == 'flask_cache.cmd'
assert span.span_type == 'cache'
assert span.meta[CACHE_BACKEND] == 'redis'
assert span.meta[net.TARGET_HOST] == '127.0.0.1'
assert span.meta[net.TARGET_PORT] == '2230'
assert span.error == 1
def test_memcached_cache_tracing_with_a_wrong_connection(self):
# initialize the dummy writer
writer = DummyWriter()
tracer = Tracer()
tracer.writer = writer
# create the TracedCache instance for a Flask app
Cache = get_traced_cache(tracer, service=self.SERVICE)
app = Flask(__name__)
config = {
'CACHE_TYPE': 'memcached',
'CACHE_MEMCACHED_SERVERS': ['localhost:2230'],
}
cache = Cache(app, config=config)
# use a wrong memcached connection
try:
cache.get(u'á_complex_operation')
except Exception:
pass
# ensure that the error is not caused by our tracer
spans = writer.pop()
assert len(spans) == 1
span = spans[0]
assert span.service == self.SERVICE
assert span.resource == 'get'
assert span.name == 'flask_cache.cmd'
assert span.span_type == 'cache'
assert span.meta[CACHE_BACKEND] == 'memcached'
assert span.meta[net.TARGET_HOST] == 'localhost'
assert span.meta[net.TARGET_PORT] == '2230'
# the pylibmc backend raises an exception and memcached backend does
# not, so don't test anything about the status.
| 33.775862
| 76
| 0.608729
| 976
| 7,836
| 4.76127
| 0.119877
| 0.086077
| 0.054229
| 0.030773
| 0.845277
| 0.829783
| 0.802023
| 0.788681
| 0.774048
| 0.774048
| 0
| 0.012893
| 0.297218
| 7,836
| 231
| 77
| 33.922078
| 0.830942
| 0.173813
| 0
| 0.694268
| 0
| 0
| 0.087517
| 0.003575
| 0
| 0
| 0
| 0
| 0.369427
| 1
| 0.044586
| false
| 0.006369
| 0.057325
| 0
| 0.11465
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d9fe83bde4566437fc1f020597dd20ad842a232f
| 27,905
|
py
|
Python
|
square/api/v1_transactions_api.py
|
codertjay/square-python-sdk
|
1f5f34bc792e31991db0fb2756d92c717f2dcfa4
|
[
"Apache-2.0"
] | 1
|
2022-02-28T13:18:30.000Z
|
2022-02-28T13:18:30.000Z
|
square/api/v1_transactions_api.py
|
codertjay/square-python-sdk
|
1f5f34bc792e31991db0fb2756d92c717f2dcfa4
|
[
"Apache-2.0"
] | null | null | null |
square/api/v1_transactions_api.py
|
codertjay/square-python-sdk
|
1f5f34bc792e31991db0fb2756d92c717f2dcfa4
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from deprecation import deprecated
from square.api_helper import APIHelper
from square.http.api_response import ApiResponse
from square.api.base_api import BaseApi
class V1TransactionsApi(BaseApi):
"""A Controller to access Endpoints in the square API."""
def __init__(self, config, auth_managers, call_back=None):
super(V1TransactionsApi, self).__init__(config, auth_managers, call_back)
@deprecated()
def list_orders(self,
location_id,
order=None,
limit=None,
batch_token=None):
"""Does a GET request to /v1/{location_id}/orders.
Provides summary information for a merchant's online store orders.
Args:
location_id (string): The ID of the location to list online store
orders for.
order (SortOrder, optional): The order in which payments are
listed in the response.
limit (int, optional): The maximum number of payments to return in
a single response. This value cannot exceed 200.
batch_token (string, optional): A pagination cursor to retrieve
the next set of results for your original query to the
endpoint.
Returns:
ApiResponse: An object with the response value as well as other
useful information such as status codes and headers. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/v1/{location_id}/orders'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'location_id': {'value': location_id, 'encode': True}
})
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_parameters = {
'order': order,
'limit': limit,
'batch_token': batch_token
}
_query_builder = APIHelper.append_url_with_query_parameters(
_query_builder,
_query_parameters
)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.config.http_client.get(_query_url, headers=_headers)
# Apply authentication scheme on request
self.apply_auth_schemes(_request, 'global')
_response = self.execute_request(_request)
decoded = APIHelper.json_deserialize(_response.text)
if type(decoded) is dict:
_errors = decoded.get('errors')
else:
_errors = None
_result = ApiResponse(_response, body=decoded, errors=_errors)
return _result
@deprecated()
def retrieve_order(self,
location_id,
order_id):
"""Does a GET request to /v1/{location_id}/orders/{order_id}.
Provides comprehensive information for a single online store order,
including the order's history.
Args:
location_id (string): The ID of the order's associated location.
order_id (string): The order's Square-issued ID. You obtain this
value from Order objects returned by the List Orders endpoint
Returns:
ApiResponse: An object with the response value as well as other
useful information such as status codes and headers. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/v1/{location_id}/orders/{order_id}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'location_id': {'value': location_id, 'encode': True},
'order_id': {'value': order_id, 'encode': True}
})
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.config.http_client.get(_query_url, headers=_headers)
# Apply authentication scheme on request
self.apply_auth_schemes(_request, 'global')
_response = self.execute_request(_request)
decoded = APIHelper.json_deserialize(_response.text)
if type(decoded) is dict:
_errors = decoded.get('errors')
else:
_errors = None
_result = ApiResponse(_response, body=decoded, errors=_errors)
return _result
@deprecated()
def update_order(self,
location_id,
order_id,
body):
"""Does a PUT request to /v1/{location_id}/orders/{order_id}.
Updates the details of an online store order. Every update you perform
on an order corresponds to one of three actions:
Args:
location_id (string): The ID of the order's associated location.
order_id (string): The order's Square-issued ID. You obtain this
value from Order objects returned by the List Orders endpoint
body (V1UpdateOrderRequest): An object containing the fields to
POST for the request. See the corresponding object definition
for field details.
Returns:
ApiResponse: An object with the response value as well as other
useful information such as status codes and headers. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/v1/{location_id}/orders/{order_id}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'location_id': {'value': location_id, 'encode': True},
'order_id': {'value': order_id, 'encode': True}
})
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'Content-Type': 'application/json'
}
# Prepare and execute request
_request = self.config.http_client.put(_query_url, headers=_headers, parameters=APIHelper.json_serialize(body))
# Apply authentication scheme on request
self.apply_auth_schemes(_request, 'global')
_response = self.execute_request(_request)
decoded = APIHelper.json_deserialize(_response.text)
if type(decoded) is dict:
_errors = decoded.get('errors')
else:
_errors = None
_result = ApiResponse(_response, body=decoded, errors=_errors)
return _result
@deprecated()
def list_payments(self,
location_id,
order=None,
begin_time=None,
end_time=None,
limit=None,
batch_token=None,
include_partial=False):
"""Does a GET request to /v1/{location_id}/payments.
Provides summary information for all payments taken for a given
Square account during a date range. Date ranges cannot exceed 1 year
in
length. See Date ranges for details of inclusive and exclusive dates.
*Note**: Details for payments processed with Square Point of Sale
while
in offline mode may not be transmitted to Square for up to 72 hours.
Offline payments have a `created_at` value that reflects the time the
payment was originally processed, not the time it was subsequently
transmitted to Square. Consequently, the ListPayments endpoint might
list an offline payment chronologically between online payments that
were seen in a previous request.
Args:
location_id (string): The ID of the location to list payments for.
If you specify me, this endpoint returns payments aggregated
from all of the business's locations.
order (SortOrder, optional): The order in which payments are
listed in the response.
begin_time (string, optional): The beginning of the requested
reporting period, in ISO 8601 format. If this value is before
January 1, 2013 (2013-01-01T00:00:00Z), this endpoint returns
an error. Default value: The current time minus one year.
end_time (string, optional): The end of the requested reporting
period, in ISO 8601 format. If this value is more than one
year greater than begin_time, this endpoint returns an error.
Default value: The current time.
limit (int, optional): The maximum number of payments to return in
a single response. This value cannot exceed 200.
batch_token (string, optional): A pagination cursor to retrieve
the next set of results for your original query to the
endpoint.
include_partial (bool, optional): Indicates whether or not to
include partial payments in the response. Partial payments
will have the tenders collected so far, but the itemizations
will be empty until the payment is completed.
Returns:
ApiResponse: An object with the response value as well as other
useful information such as status codes and headers. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/v1/{location_id}/payments'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'location_id': {'value': location_id, 'encode': True}
})
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_parameters = {
'order': order,
'begin_time': begin_time,
'end_time': end_time,
'limit': limit,
'batch_token': batch_token,
'include_partial': include_partial
}
_query_builder = APIHelper.append_url_with_query_parameters(
_query_builder,
_query_parameters
)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.config.http_client.get(_query_url, headers=_headers)
# Apply authentication scheme on request
self.apply_auth_schemes(_request, 'global')
_response = self.execute_request(_request)
decoded = APIHelper.json_deserialize(_response.text)
if type(decoded) is dict:
_errors = decoded.get('errors')
else:
_errors = None
_result = ApiResponse(_response, body=decoded, errors=_errors)
return _result
@deprecated()
def retrieve_payment(self,
location_id,
payment_id):
"""Does a GET request to /v1/{location_id}/payments/{payment_id}.
Provides comprehensive information for a single payment.
Args:
location_id (string): The ID of the payment's associated
location.
payment_id (string): The Square-issued payment ID. payment_id
comes from Payment objects returned by the List Payments
endpoint, Settlement objects returned by the List Settlements
endpoint, or Refund objects returned by the List Refunds
endpoint.
Returns:
ApiResponse: An object with the response value as well as other
useful information such as status codes and headers. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/v1/{location_id}/payments/{payment_id}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'location_id': {'value': location_id, 'encode': True},
'payment_id': {'value': payment_id, 'encode': True}
})
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.config.http_client.get(_query_url, headers=_headers)
# Apply authentication scheme on request
self.apply_auth_schemes(_request, 'global')
_response = self.execute_request(_request)
decoded = APIHelper.json_deserialize(_response.text)
if type(decoded) is dict:
_errors = decoded.get('errors')
else:
_errors = None
_result = ApiResponse(_response, body=decoded, errors=_errors)
return _result
@deprecated()
def list_refunds(self,
location_id,
order=None,
begin_time=None,
end_time=None,
limit=None,
batch_token=None):
"""Does a GET request to /v1/{location_id}/refunds.
Provides the details for all refunds initiated by a merchant or any of
the merchant's mobile staff during a date range. Date ranges cannot
exceed one year in length.
Args:
location_id (string): The ID of the location to list refunds for.
order (SortOrder, optional): The order in which payments are
listed in the response.
begin_time (string, optional): The beginning of the requested
reporting period, in ISO 8601 format. If this value is before
January 1, 2013 (2013-01-01T00:00:00Z), this endpoint returns
an error. Default value: The current time minus one year.
end_time (string, optional): The end of the requested reporting
period, in ISO 8601 format. If this value is more than one
year greater than begin_time, this endpoint returns an error.
Default value: The current time.
limit (int, optional): The approximate number of refunds to return
in a single response. Default: 100. Max: 200. Response may
contain more results than the prescribed limit when refunds
are made simultaneously to multiple tenders in a payment or
when refunds are generated in an exchange to account for the
value of returned goods.
batch_token (string, optional): A pagination cursor to retrieve
the next set of results for your original query to the
endpoint.
Returns:
ApiResponse: An object with the response value as well as other
useful information such as status codes and headers. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/v1/{location_id}/refunds'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'location_id': {'value': location_id, 'encode': True}
})
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_parameters = {
'order': order,
'begin_time': begin_time,
'end_time': end_time,
'limit': limit,
'batch_token': batch_token
}
_query_builder = APIHelper.append_url_with_query_parameters(
_query_builder,
_query_parameters
)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.config.http_client.get(_query_url, headers=_headers)
# Apply authentication scheme on request
self.apply_auth_schemes(_request, 'global')
_response = self.execute_request(_request)
decoded = APIHelper.json_deserialize(_response.text)
if type(decoded) is dict:
_errors = decoded.get('errors')
else:
_errors = None
_result = ApiResponse(_response, body=decoded, errors=_errors)
return _result
@deprecated()
def create_refund(self,
location_id,
body):
"""Does a POST request to /v1/{location_id}/refunds.
Issues a refund for a previously processed payment. You must issue
a refund within 60 days of the associated payment.
You cannot issue a partial refund for a split tender payment. You
must
instead issue a full or partial refund for a particular tender, by
providing the applicable tender id to the V1CreateRefund endpoint.
Issuing a full refund for a split tender payment refunds all tenders
associated with the payment.
Issuing a refund for a card payment is not reversible. For
development
purposes, you can create fake cash payments in Square Point of Sale
and
refund them.
Args:
location_id (string): The ID of the original payment's associated
location.
body (V1CreateRefundRequest): An object containing the fields to
POST for the request. See the corresponding object definition
for field details.
Returns:
ApiResponse: An object with the response value as well as other
useful information such as status codes and headers. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/v1/{location_id}/refunds'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'location_id': {'value': location_id, 'encode': True}
})
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'Content-Type': 'application/json'
}
# Prepare and execute request
_request = self.config.http_client.post(_query_url, headers=_headers, parameters=APIHelper.json_serialize(body))
# Apply authentication scheme on request
self.apply_auth_schemes(_request, 'global')
_response = self.execute_request(_request)
decoded = APIHelper.json_deserialize(_response.text)
if type(decoded) is dict:
_errors = decoded.get('errors')
else:
_errors = None
_result = ApiResponse(_response, body=decoded, errors=_errors)
return _result
@deprecated()
def list_settlements(self,
location_id,
order=None,
begin_time=None,
end_time=None,
limit=None,
status=None,
batch_token=None):
"""Does a GET request to /v1/{location_id}/settlements.
Provides summary information for all deposits and withdrawals
initiated by Square to a linked bank account during a date range.
Date
ranges cannot exceed one year in length.
*Note**: the ListSettlements endpoint does not provide entry
information.
Args:
location_id (string): The ID of the location to list settlements
for. If you specify me, this endpoint returns settlements
aggregated from all of the business's locations.
order (SortOrder, optional): The order in which settlements are
listed in the response.
begin_time (string, optional): The beginning of the requested
reporting period, in ISO 8601 format. If this value is before
January 1, 2013 (2013-01-01T00:00:00Z), this endpoint returns
an error. Default value: The current time minus one year.
end_time (string, optional): The end of the requested reporting
period, in ISO 8601 format. If this value is more than one
year greater than begin_time, this endpoint returns an error.
Default value: The current time.
limit (int, optional): The maximum number of settlements to return
in a single response. This value cannot exceed 200.
status (V1ListSettlementsRequestStatus, optional): Provide this
parameter to retrieve only settlements with a particular
status (SENT or FAILED).
batch_token (string, optional): A pagination cursor to retrieve
the next set of results for your original query to the
endpoint.
Returns:
ApiResponse: An object with the response value as well as other
useful information such as status codes and headers. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/v1/{location_id}/settlements'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'location_id': {'value': location_id, 'encode': True}
})
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_parameters = {
'order': order,
'begin_time': begin_time,
'end_time': end_time,
'limit': limit,
'status': status,
'batch_token': batch_token
}
_query_builder = APIHelper.append_url_with_query_parameters(
_query_builder,
_query_parameters
)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.config.http_client.get(_query_url, headers=_headers)
# Apply authentication scheme on request
self.apply_auth_schemes(_request, 'global')
_response = self.execute_request(_request)
decoded = APIHelper.json_deserialize(_response.text)
if type(decoded) is dict:
_errors = decoded.get('errors')
else:
_errors = None
_result = ApiResponse(_response, body=decoded, errors=_errors)
return _result
@deprecated()
def retrieve_settlement(self,
location_id,
settlement_id):
"""Does a GET request to /v1/{location_id}/settlements/{settlement_id}.
Provides comprehensive information for a single settlement.
The returned `Settlement` objects include an `entries` field that
lists
the transactions that contribute to the settlement total. Most
settlement entries correspond to a payment payout, but settlement
entries are also generated for less common events, like refunds,
manual
adjustments, or chargeback holds.
Square initiates its regular deposits as indicated in the
[Deposit Options with
Square](https://squareup.com/help/us/en/article/3807)
help article. Details for a regular deposit are usually not available
from Connect API endpoints before 10 p.m. PST the same day.
Square does not know when an initiated settlement **completes**, only
whether it has failed. A completed settlement is typically reflected
in
a bank account within 3 business days, but in exceptional cases it
may
take longer.
Args:
location_id (string): The ID of the settlements's associated
location.
settlement_id (string): The settlement's Square-issued ID. You
obtain this value from Settlement objects returned by the List
Settlements endpoint.
Returns:
ApiResponse: An object with the response value as well as other
useful information such as status codes and headers. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/v1/{location_id}/settlements/{settlement_id}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'location_id': {'value': location_id, 'encode': True},
'settlement_id': {'value': settlement_id, 'encode': True}
})
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.config.http_client.get(_query_url, headers=_headers)
# Apply authentication scheme on request
self.apply_auth_schemes(_request, 'global')
_response = self.execute_request(_request)
decoded = APIHelper.json_deserialize(_response.text)
if type(decoded) is dict:
_errors = decoded.get('errors')
else:
_errors = None
_result = ApiResponse(_response, body=decoded, errors=_errors)
return _result
| 40.559593
| 120
| 0.612543
| 3,173
| 27,905
| 5.201702
| 0.117554
| 0.032717
| 0.013087
| 0.017328
| 0.796304
| 0.785217
| 0.773099
| 0.765223
| 0.746986
| 0.731172
| 0
| 0.007074
| 0.32625
| 27,905
| 687
| 121
| 40.618632
| 0.870805
| 0.485719
| 0
| 0.82069
| 0
| 0
| 0.088427
| 0.023064
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034483
| false
| 0
| 0.013793
| 0
| 0.082759
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8a443b6df05a8e9578999021cd156e0d5e345eb4
| 88
|
py
|
Python
|
blog/admin.py
|
amanpandey-crypto/synergee
|
bf24cd08ec417eda84ffc5ad373a220e763a79eb
|
[
"MIT"
] | null | null | null |
blog/admin.py
|
amanpandey-crypto/synergee
|
bf24cd08ec417eda84ffc5ad373a220e763a79eb
|
[
"MIT"
] | 3
|
2021-06-10T20:30:44.000Z
|
2021-10-02T08:23:25.000Z
|
blog/admin.py
|
amanpandey-crypto/synergee
|
bf24cd08ec417eda84ffc5ad373a220e763a79eb
|
[
"MIT"
] | 6
|
2021-01-24T08:21:59.000Z
|
2021-10-03T11:33:02.000Z
|
from group.admin import admin_site
from .models import Post
admin_site.register(Post)
| 14.666667
| 34
| 0.818182
| 14
| 88
| 5
| 0.571429
| 0.257143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 88
| 5
| 35
| 17.6
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
76d1ecb745d1da60f6150ba4d02f63c8c6bc0900
| 19,707
|
py
|
Python
|
tests/experiments/environment_runners/test_environment_runner_batch.py
|
AGI-Labs/continual_rl
|
bcf17d879e8a983340be233ff8f740c424d0f303
|
[
"MIT"
] | 19
|
2021-07-27T05:20:09.000Z
|
2022-02-27T07:12:05.000Z
|
tests/experiments/environment_runners/test_environment_runner_batch.py
|
AGI-Labs/continual_rl
|
bcf17d879e8a983340be233ff8f740c424d0f303
|
[
"MIT"
] | 2
|
2021-11-05T07:36:50.000Z
|
2022-03-11T00:21:50.000Z
|
tests/experiments/environment_runners/test_environment_runner_batch.py
|
AGI-Labs/continual_rl
|
bcf17d879e8a983340be233ff8f740c424d0f303
|
[
"MIT"
] | 3
|
2021-10-20T06:04:35.000Z
|
2022-03-06T22:59:36.000Z
|
import numpy as np
from continual_rl.experiments.environment_runners.environment_runner_batch import EnvironmentRunnerBatch
from continual_rl.experiments.tasks.task_base import TaskSpec
from tests.common_mocks.mock_policy.mock_policy import MockPolicy
from tests.common_mocks.mock_policy.mock_policy_config import MockPolicyConfig
from tests.common_mocks.mock_policy.mock_timestep_data import MockTimestepData
from tests.common_mocks.mock_preprocessor import MockPreprocessor
class MockEnv(object):
def __init__(self):
self.actions_executed = []
self.reset_count = 0
self.observation_space = 4
self.action_space = 4
self.seed_set = None
def seed(self, seed):
self.seed_set = seed
def reset(self):
self.reset_count += 1
return np.array([0, 1, 2])
def step(self, action):
self.actions_executed.append(action)
observation = np.array([12, 13, 14])
reward = 1.5
done = action == 4 # Simple way to force the done state we want
return observation, reward, done, {"info": "unused"}
def close(self):
pass
class TestEnvironmentRunnerBatch(object):
def test_collect_data_simple_success(self, monkeypatch):
"""
Setup the simple happy-path for collect_data, and make sure things are being populated correctly.
Simple: no done=True, no rewards returned, etc.
"""
# Arrange
def mock_compute_action(_, observation, task_id, action_space_id, last_timestep_data, eval_mode):
# Since we're using the Batch runner, it expects a vector
action = [3] * len(observation)
timestep_data = MockTimestepData(data_to_store=(observation, task_id, action_space_id, eval_mode))
if last_timestep_data is None:
timestep_data.memory = 0
else:
timestep_data.memory = last_timestep_data.memory + 1
return action, timestep_data
# Mock the policy we're running; action_space and observation_space not used.
mock_policy = MockPolicy(MockPolicyConfig(), action_spaces=None, observation_space=None)
monkeypatch.setattr(MockPolicy, "compute_action", mock_compute_action)
# The object under test
runner = EnvironmentRunnerBatch(policy=mock_policy, num_parallel_envs=12, timesteps_per_collection=123)
# Arguments to collect_data
# Normally should create a new one each time, but doing this for spying
mock_env = MockEnv()
mock_env_spec = lambda: mock_env
# MockEnv is used for determining that parameters are getting generated and passed correctly
task_spec = TaskSpec(task_id=5, action_space_id=3, preprocessor=MockPreprocessor(), env_spec=mock_env_spec,
num_timesteps=9718, eval_mode=1817)
# Act
timesteps, collected_data, rewards_reported, _ = runner.collect_data(task_spec)
# Assert
# Basic return checks
assert timesteps == 123 * 12, f"Number of timesteps returned inaccurate. Got {timesteps}."
assert len(collected_data) == 1, f"Batch env only runs on one process, so return accordingly"
collected_data = collected_data[0] # Convenience for the rest of the assertions
assert len(collected_data) == 123, f"Amount of collected data unexpected. Got {len(collected_data)}."
assert len(rewards_reported) == 0, "Rewards were reported when none were expected."
# Check that MockTimestepData is getting properly updated
assert isinstance(collected_data[0], MockTimestepData), "Unexpected TimestepData returned."
assert np.all(np.array([entry.reward for entry in collected_data]) == 1.5), \
"MockTimestepData not correctly populated with reward."
assert not np.any(np.array([entry.done for entry in collected_data])), \
"MockTimestepData not correctly populated with done."
assert collected_data[0].memory == 0, "compute_action not correctly receiving last_timestep_data."
assert collected_data[1].memory == 1, "compute_action not correctly receiving last_timestep_data."
assert collected_data[78].memory == 78, "compute_action not correctly receiving last_timestep_data."
# Check that the observation is being created correctly
observation_to_policy, received_task_id, received_action_space_id, observed_eval_mode = collected_data[0].data_to_store
assert received_task_id == 5, "task_id getting intercepted somehow."
assert received_action_space_id == 3, "action_space_id getting intercepted somehow."
assert observation_to_policy.shape[0] == 12, "Envs not being batched correctly"
assert observed_eval_mode == 1817, "Eval mode not passed correctly"
# 3 is from how MockEnv is written, which returns observations of length 3
assert observation_to_policy.shape[1] == 3, "Incorrect obs shape"
assert mock_env.seed_set is not None, "No seed was set"
# Use our environment spy to check it's being called correctly
# All env params are *1 not *12 because the first env is done local to the current process, so this is only
# one env's worth, even though technically we're returning the same env every time (for spying purposes)
# It's odd.
assert mock_env.reset_count == 1, f"Mock env reset an incorrect number of times: {mock_env.reset_count}"
assert len(mock_env.actions_executed) == 123, "Mock env.step not called a sufficient number of times"
assert np.all(np.array(mock_env.actions_executed) == 3), "Incorrect action taken"
def test_collect_data_with_intermediate_dones(self, monkeypatch):
"""
Setup an environment that gives "done" at some point during the run
"""
# Arrange
current_step = 0
def mock_compute_action(_, observation, task_id, action_space_id, last_timestep_data, eval_mode):
nonlocal current_step
action = [4 if current_step == 73 else 3] * len(observation) # 4 is the "done" action, 3 is arbitrary
current_step += 1
timestep_data = MockTimestepData(data_to_store=(observation, task_id, action_space_id, eval_mode))
if last_timestep_data is None:
timestep_data.memory = 0
else:
timestep_data.memory = last_timestep_data.memory + 1
return action, timestep_data
# Mock the policy we're running. action_space and observation_space not used.
mock_policy = MockPolicy(MockPolicyConfig(), action_spaces=None, observation_space=None)
monkeypatch.setattr(MockPolicy, "compute_action", mock_compute_action)
# The object under test
runner = EnvironmentRunnerBatch(policy=mock_policy, num_parallel_envs=12, timesteps_per_collection=123)
# Arguments to collect_data
# Normally should create a new one each time, but doing this for spying
mock_env = MockEnv()
mock_env_spec = lambda: mock_env
# MockEnv is used for determining that parameters are getting generated and passed correctly
task_spec = TaskSpec(task_id=2, action_space_id=7, preprocessor=MockPreprocessor(), env_spec=mock_env_spec,
num_timesteps=9718, eval_mode=1817)
# Act
timesteps, collected_data, rewards_reported, _ = runner.collect_data(task_spec)
# Assert
# Basic return checks
assert timesteps == 123 * 12, f"Number of timesteps returned inaccurate. Got {timesteps}."
assert len(collected_data) == 1, f"Batch env only runs on one process, so return accordingly"
collected_data = collected_data[0] # Convenience for the rest of the assertions
assert len(collected_data) == 123, f"Amount of collected data unexpected. Got {len(collected_data)}."
assert len(rewards_reported) == 12, "Rewards were not reported when multiple were expected."
assert np.all(np.array(rewards_reported) == 74 * 1.5), f"Value of reward reported unexpected {rewards_reported}"
# Check that MockTimestepData is getting properly updated
assert isinstance(collected_data[0], MockTimestepData), "Unexpected TimestepData returned."
assert not np.any(np.array([entry.done for entry in collected_data[:73]])), \
"MockTimestepData not correctly populated with done."
assert not np.any(np.array([entry.done for entry in collected_data[74:]])), \
"MockTimestepData not correctly populated with done."
assert np.all(collected_data[73].done), "MockTimestepData not correctly populated with done."
assert collected_data[78].memory == 78, "compute_action not correctly receiving last_timestep_data. " \
"(Always populated, even if a done occurred.)"
# Check that the observation is being created correctly
observation_to_policy, received_task_id, received_action_space_id, observed_eval_mode = collected_data[0].data_to_store
assert received_task_id == 2, "task_id getting intercepted somehow."
assert received_action_space_id == 7, "action_space_id getting intercepted somehow."
assert observation_to_policy.shape[0] == 12, "Envs not being batched correctly"
assert observed_eval_mode == 1817, "Eval mode not passed correctly"
# 3 is from how MockEnv is written, which returns observations of length 3
assert observation_to_policy.shape[1] == 3, "Incorrect obs shape"
# Use our environment spy to check it's being called correctly
# All env params are *1 not *12 because the first env is done local to the current process, so this is only
# one env's worth, even though technically we're returning the same env every time (for spying purposes)
# It's odd.
assert mock_env.reset_count == 2, f"Mock env reset an incorrect number of times: {mock_env.reset_count}"
assert len(mock_env.actions_executed) == 123, "Mock env.step not called a sufficient number of times"
assert np.all(np.array(mock_env.actions_executed[:73]) == 3), "Incorrect action taken, first half"
assert np.all(np.array(mock_env.actions_executed[74:]) == 3), "Incorrect action taken, second half"
assert np.array(mock_env.actions_executed)[73] == 4, "Incorrect action taken at the 'done' step."
def test_collect_data_multi_collect_before_done(self, monkeypatch):
"""
Run two data collections, and make sure the rewards are collected successfully.
"""
# Arrange
# Mock methods
current_step = 0
def mock_compute_action(_, observation, task_id, action_space_id, last_timestep_data, eval_mode):
nonlocal current_step
action = [4 if current_step == 73 else 3] * len(observation) # 4 is the "done" action, 3 is arbitrary
current_step += 1
timestep_data = MockTimestepData(data_to_store=(observation, task_id, action_space_id, eval_mode))
if last_timestep_data is None:
timestep_data.memory = 0
else:
timestep_data.memory = last_timestep_data.memory + 1
return action, timestep_data
# Mock the policy we're running. action_space and observation_space not used.
mock_policy = MockPolicy(MockPolicyConfig(), action_spaces=None, observation_space=None)
monkeypatch.setattr(MockPolicy, "compute_action", mock_compute_action)
# The object under test
runner = EnvironmentRunnerBatch(policy=mock_policy, num_parallel_envs=12, timesteps_per_collection=50)
# Arguments to collect_data
# Normally should create a new one each time, but doing this for spying
mock_env = MockEnv()
mock_env_spec = lambda: mock_env
# MockEnv is used for determining that parameters are getting generated and passed correctly
task_spec = TaskSpec(task_id=5, action_space_id=6, preprocessor=MockPreprocessor(), env_spec=mock_env_spec,
num_timesteps=9718, eval_mode=1817)
# Act
timesteps_0, collected_data_0, rewards_reported_0, _ = runner.collect_data(task_spec)
timesteps_1, collected_data_1, rewards_reported_1, _ = runner.collect_data(task_spec)
# Assert
# Basic return checks
assert timesteps_0 == timesteps_1 == 50 * 12, f"Number of timesteps returned inaccurate. " \
f"Got {(timesteps_0, timesteps_1)}."
assert len(collected_data_0) == len(collected_data_1) == 1, f"Batch env only runs on one process, so return accordingly"
collected_data_0 = collected_data_0[0] # Convenience for the rest of the assertions
collected_data_1 = collected_data_1[0] # Convenience for the rest of the assertions
assert len(collected_data_0) == len(collected_data_1) == 50, f"Amount of collected data unexpected. " \
f"Got {(len(collected_data_0), len(collected_data_1))}."
assert len(rewards_reported_0) == 0, "Rewards were reported when none were expected."
assert len(rewards_reported_1) == 12, "Rewards were not reported when multiple were expected."
assert np.all(np.array(rewards_reported_1) == 74 * 1.5), f"Value of reward reported unexpected {rewards_reported_1}"
# Check that MockTimestepData is getting properly updated
assert not np.any(np.array([entry.done for entry in collected_data_0])), \
"MockTimestepData not correctly populated with done."
assert not np.any(np.array([entry.done for entry in collected_data_1[:23]])), \
"MockTimestepData not correctly populated with done."
assert not np.any(np.array([entry.done for entry in collected_data_1[24:]])), \
"MockTimestepData not correctly populated with done."
assert np.all(collected_data_1[23].done), "MockTimestepData not correctly populated with done."
assert collected_data_1[45].memory == 95, "MockTimestepData not correctly populated with done."
# Use our environment spy to check it's being called correctly
# All env params are *1 not *12 because the first env is done local to the current process, so this is only
# one env's worth, even though technically we're returning the same env every time (for spying purposes)
# It's odd. But kinda convenient I suppose.
assert mock_env.reset_count == 2, f"Mock env reset an incorrect number of times: {mock_env.reset_count}"
assert len(mock_env.actions_executed) == 100, "Mock env.step not called a sufficient number of times"
assert np.all(np.array(mock_env.actions_executed[:73]) == 3), "Incorrect action taken, first half"
assert np.all(np.array(mock_env.actions_executed[74:]) == 3), "Incorrect action taken, second half"
assert np.array(mock_env.actions_executed)[73] == 4, "Incorrect action taken at the 'done' step."
def test_collect_data_multi_collect_before_done_envs_finish_at_different_times(self, monkeypatch):
"""
Run two data collections, and make sure the rewards are collected successfully even if not all envs finish together.
"""
# Arrange
# Mock methods
current_step = 0
def mock_compute_action(_, observation, task_id, action_space_id, last_timestep_data, eval_mode):
nonlocal current_step
action = [3] * len(observation) # 4 is the "done" action, 3 is arbitrary
# Make a pseudo-random environment finish, but not the others
if current_step == 73:
action[8] = 4
current_step += 1
return action, MockTimestepData(data_to_store=(observation, task_id, action_space_id, eval_mode))
# Mock the policy we're running. action_space and observation_space not used.
mock_policy = MockPolicy(MockPolicyConfig(), action_spaces=None, observation_space=None)
monkeypatch.setattr(MockPolicy, "compute_action", mock_compute_action)
# The object under test
runner = EnvironmentRunnerBatch(policy=mock_policy, num_parallel_envs=12, timesteps_per_collection=50)
# Arguments to collect_data
# Normally should create a new one each time, but doing this for spying
mock_env = MockEnv()
mock_env_spec = lambda: mock_env
# MockEnv is used for determining that parameters are getting generated and passed correctly
task_spec = TaskSpec(task_id=5, action_space_id=6, preprocessor=MockPreprocessor(), env_spec=mock_env_spec,
num_timesteps=9718, eval_mode=1817)
# Act
timesteps_0, collected_data_0, rewards_reported_0, _ = runner.collect_data(task_spec)
timesteps_1, collected_data_1, rewards_reported_1, _ = runner.collect_data(task_spec)
# Assert
# Basic return checks
assert timesteps_0 == timesteps_1 == 50 * 12, f"Number of timesteps returned inaccurate. " \
f"Got {(timesteps_0, timesteps_1)}."
assert len(collected_data_0) == len(collected_data_1) == 1, f"Batch env only runs on one process, so return accordingly"
collected_data_0 = collected_data_0[0] # Convenience for the rest of the assertions
collected_data_1 = collected_data_1[0] # Convenience for the rest of the assertions
assert len(collected_data_0) == len(collected_data_1) == 50, f"Amount of collected data unexpected. " \
f"Got {(len(collected_data_0), len(collected_data_1))}."
assert len(rewards_reported_0) == 0, "Rewards were reported when none were expected."
assert len(rewards_reported_1) == 1, "Rewards were not reported when one was expected."
assert rewards_reported_1[0] == 74 * 1.5, f"Value of reward reported unexpected {rewards_reported_1}"
# Check that MockTimestepData is getting properly updated
assert not np.any(np.array([entry.done for entry in collected_data_0])), \
"MockTimestepData not correctly populated with done."
assert not np.any(np.array([entry.done for entry in collected_data_1[:23]])), \
"MockTimestepData not correctly populated with done."
assert not np.any(np.array([entry.done for entry in collected_data_1[24:]])), \
"MockTimestepData not correctly populated with done."
assert not np.any(collected_data_1[23].done[:8]), "MockTimestepData not correctly populated with done."
assert not np.any(collected_data_1[23].done[9:]), "MockTimestepData not correctly populated with done."
assert collected_data_1[23].done[8], "MockTimestepData not correctly populated with done."
# Use our environment spy to check it's being called correctly
# All env params are *1 not *12 because the first env is done local to the current process, so this is only
# one env's worth, even though technically we're returning the same env every time (for spying purposes)
# It's odd. But kinda convenient I suppose.
assert mock_env.reset_count == 1, f"Mock env reset an incorrect number of times: {mock_env.reset_count}"
assert len(mock_env.actions_executed) == 100, "Mock env.step not called a sufficient number of times"
assert np.all(np.array(mock_env.actions_executed) == 3), "Incorrect action taken by the first env"
| 58.304734
| 128
| 0.687776
| 2,619
| 19,707
| 4.976327
| 0.103475
| 0.063838
| 0.026855
| 0.045423
| 0.894422
| 0.886289
| 0.884447
| 0.881838
| 0.872631
| 0.86772
| 0
| 0.022922
| 0.236261
| 19,707
| 337
| 129
| 58.477745
| 0.843
| 0.199371
| 0
| 0.659898
| 0
| 0
| 0.229621
| 0.014612
| 0
| 0
| 0
| 0
| 0.35533
| 1
| 0.06599
| false
| 0.015228
| 0.035533
| 0
| 0.142132
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
76d6a3ff378cc98da70fbae6d26cc05f984c99a4
| 76
|
py
|
Python
|
py_tdlib/constructors/supergroup_members_filter_bots.py
|
Mr-TelegramBot/python-tdlib
|
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
|
[
"MIT"
] | 24
|
2018-10-05T13:04:30.000Z
|
2020-05-12T08:45:34.000Z
|
py_tdlib/constructors/supergroup_members_filter_bots.py
|
MrMahdi313/python-tdlib
|
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
|
[
"MIT"
] | 3
|
2019-06-26T07:20:20.000Z
|
2021-05-24T13:06:56.000Z
|
py_tdlib/constructors/supergroup_members_filter_bots.py
|
MrMahdi313/python-tdlib
|
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
|
[
"MIT"
] | 5
|
2018-10-05T14:29:28.000Z
|
2020-08-11T15:04:10.000Z
|
from ..factory import Type
class supergroupMembersFilterBots(Type):
pass
| 12.666667
| 40
| 0.802632
| 8
| 76
| 7.625
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 76
| 5
| 41
| 15.2
| 0.924242
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
76efa7fe8a671539b423fbb8bdaa65cf7346d8a4
| 63
|
py
|
Python
|
BboxToolkit/evaluation/__init__.py
|
qilei123/BboxToolkit
|
97f61ae97449009c7952f648be57a28d35c2f39b
|
[
"Apache-2.0"
] | 38
|
2021-02-05T14:39:11.000Z
|
2022-03-28T07:41:41.000Z
|
BboxToolkit/evaluation/__init__.py
|
qilei123/BboxToolkit
|
97f61ae97449009c7952f648be57a28d35c2f39b
|
[
"Apache-2.0"
] | 5
|
2021-11-01T08:17:09.000Z
|
2022-03-28T12:40:06.000Z
|
BboxToolkit/evaluation/__init__.py
|
qilei123/BboxToolkit
|
97f61ae97449009c7952f648be57a28d35c2f39b
|
[
"Apache-2.0"
] | 13
|
2021-08-20T12:26:04.000Z
|
2022-03-18T12:45:31.000Z
|
from .mean_ap import eval_map
from .recall import eval_recalls
| 21
| 32
| 0.84127
| 11
| 63
| 4.545455
| 0.727273
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126984
| 63
| 2
| 33
| 31.5
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
76f530cf392f372849c6b53a81bea36e41e73b48
| 211
|
py
|
Python
|
tests/test_exception.py
|
GouthamSiddhaarth/postcode_validator
|
eca06c1665d45d404eaa5e4e0daf2cb8dea115a3
|
[
"MIT"
] | null | null | null |
tests/test_exception.py
|
GouthamSiddhaarth/postcode_validator
|
eca06c1665d45d404eaa5e4e0daf2cb8dea115a3
|
[
"MIT"
] | null | null | null |
tests/test_exception.py
|
GouthamSiddhaarth/postcode_validator
|
eca06c1665d45d404eaa5e4e0daf2cb8dea115a3
|
[
"MIT"
] | null | null | null |
import pytest
from Exceptions.exceptions import ValidationError
def test_exception():
with pytest.raises(ValidationError, match="Postcode is invalid"):
raise ValidationError("Postcode is invalid")
| 26.375
| 69
| 0.777251
| 23
| 211
| 7.086957
| 0.652174
| 0.122699
| 0.208589
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146919
| 211
| 7
| 70
| 30.142857
| 0.905556
| 0
| 0
| 0
| 0
| 0
| 0.180095
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0a334dc15cd5e85d68a37dc1f19681c58e7eedc3
| 26
|
py
|
Python
|
vogue/crud/find_plots/bioinfo/__init__.py
|
mayabrandi/vogue
|
463e6417a9168eadb0d11dea2d0f97919494bcd3
|
[
"MIT"
] | 1
|
2021-12-16T19:29:17.000Z
|
2021-12-16T19:29:17.000Z
|
vogue/crud/find_plots/bioinfo/__init__.py
|
mayabrandi/vogue
|
463e6417a9168eadb0d11dea2d0f97919494bcd3
|
[
"MIT"
] | 188
|
2018-10-25T06:13:17.000Z
|
2022-02-25T19:47:06.000Z
|
vogue/crud/find_plots/bioinfo/__init__.py
|
mayabrandi/vogue
|
463e6417a9168eadb0d11dea2d0f97919494bcd3
|
[
"MIT"
] | null | null | null |
from .covid import get_qc
| 13
| 25
| 0.807692
| 5
| 26
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 26
| 1
| 26
| 26
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0a3ca711a8aa8525dc06c0a6fadbbc3cbeeaf31c
| 10,596
|
py
|
Python
|
imcsdk/mometa/adaptor/AdaptorEthGenProfile.py
|
TetrationAnalytics/imcsdk
|
d86e47831f294dc9fa5e99b9a92abceac2502d76
|
[
"Apache-2.0"
] | null | null | null |
imcsdk/mometa/adaptor/AdaptorEthGenProfile.py
|
TetrationAnalytics/imcsdk
|
d86e47831f294dc9fa5e99b9a92abceac2502d76
|
[
"Apache-2.0"
] | null | null | null |
imcsdk/mometa/adaptor/AdaptorEthGenProfile.py
|
TetrationAnalytics/imcsdk
|
d86e47831f294dc9fa5e99b9a92abceac2502d76
|
[
"Apache-2.0"
] | 2
|
2016-05-26T02:05:46.000Z
|
2017-09-13T05:13:25.000Z
|
"""This module contains the general information for AdaptorEthGenProfile ManagedObject."""
from ...imcmo import ManagedObject
from ...imccoremeta import MoPropertyMeta, MoMeta
from ...imcmeta import VersionMeta
class AdaptorEthGenProfileConsts:
ARFS_DISABLED = "Disabled"
ARFS_ENABLED = "Enabled"
_ARFS_DISABLED = "disabled"
_ARFS_ENABLED = "enabled"
MULTI_QUEUE_DISABLED = "Disabled"
MULTI_QUEUE_ENABLED = "Enabled"
_MULTI_QUEUE_DISABLED = "disabled"
_MULTI_QUEUE_ENABLED = "enabled"
ORDER_ANY = "ANY"
RATE_LIMIT_OFF = "OFF"
UPLINK_FAILOVER_DISABLED = "Disabled"
UPLINK_FAILOVER_ENABLED = "Enabled"
_UPLINK_FAILOVER_DISABLED = "disabled"
_UPLINK_FAILOVER_ENABLED = "enabled"
VLAN_NONE = "NONE"
VLAN_MODE_ACCESS = "ACCESS"
VLAN_MODE_TRUNK = "TRUNK"
VMQ_DISABLED = "Disabled"
VMQ_ENABLED = "Enabled"
_VMQ_DISABLED = "disabled"
_VMQ_ENABLED = "enabled"
class AdaptorEthGenProfile(ManagedObject):
"""This is AdaptorEthGenProfile class."""
consts = AdaptorEthGenProfileConsts()
naming_props = set([])
mo_meta = {
"classic": MoMeta("AdaptorEthGenProfile", "adaptorEthGenProfile", "general", VersionMeta.Version151f, "InputOutput", 0x3ffff, [], ["admin", "read-only", "user"], ['adaptorHostEthIf'], [], ["Get", "Set"]),
"modular": MoMeta("AdaptorEthGenProfile", "adaptorEthGenProfile", "general", VersionMeta.Version2013e, "InputOutput", 0x1ffff, [], ["admin", "read-only", "user"], ['adaptorHostEthIf'], [], ["Get", "Set"])
}
prop_meta = {
"classic": {
"arfs": MoPropertyMeta("arfs", "arfs", "string", VersionMeta.Version202c, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, ["Disabled", "Enabled", "disabled", "enabled"], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []),
"multi_queue": MoPropertyMeta("multi_queue", "multiQueue", "string", VersionMeta.Version402c, MoPropertyMeta.READ_WRITE, 0x8, None, None, None, ["Disabled", "Enabled", "disabled", "enabled"], []),
"no_of_sub_vni_cs": MoPropertyMeta("no_of_sub_vni_cs", "noOfSubVNICs", "uint", VersionMeta.Version402c, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, [], ["1-64"]),
"nvgre": MoPropertyMeta("nvgre", "nvgre", "string", VersionMeta.Version303a, MoPropertyMeta.READ_WRITE, 0x20, None, None, None, ["Disabled", "Enabled", "disabled", "enabled"], []),
"order": MoPropertyMeta("order", "order", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x40, None, None, r"""[0-9]|1[0-7]""", ["ANY"], []),
"rate_limit": MoPropertyMeta("rate_limit", "rateLimit", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x80, None, None, r"""(([1-9]\d?\d?\d?|10000) Mbps)""", ["OFF"], ["1-100000"]),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x100, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x200, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"trusted_class_of_service": MoPropertyMeta("trusted_class_of_service", "trustedClassOfService", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x400, None, None, None, ["Disabled", "Enabled", "disabled", "enabled"], []),
"uplink_failback_timeout": MoPropertyMeta("uplink_failback_timeout", "uplinkFailbackTimeout", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x800, None, None, r"""(0{0,2}[0-9]|0?[1-9][0-9]|[1-5][0-9][0-9]|600)""", [], []),
"uplink_failover": MoPropertyMeta("uplink_failover", "uplinkFailover", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x1000, None, None, None, ["Disabled", "Enabled", "disabled", "enabled"], []),
"vlan": MoPropertyMeta("vlan", "vlan", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x2000, None, None, None, ["NONE"], ["1-4094"]),
"vlan_mode": MoPropertyMeta("vlan_mode", "vlanMode", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x4000, None, None, None, ["ACCESS", "TRUNK"], []),
"vmq": MoPropertyMeta("vmq", "vmq", "string", VersionMeta.Version202c, MoPropertyMeta.READ_WRITE, 0x8000, None, None, None, ["Disabled", "Enabled", "disabled", "enabled"], []),
"vxlan": MoPropertyMeta("vxlan", "vxlan", "string", VersionMeta.Version303a, MoPropertyMeta.READ_WRITE, 0x10000, None, None, None, ["Disabled", "Enabled", "disabled", "enabled"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version151f, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"pci_link": MoPropertyMeta("pci_link", "pciLink", "uint", VersionMeta.Version303a, MoPropertyMeta.READ_WRITE, 0x20000, None, None, None, [], ["0-1"]),
},
"modular": {
"arfs": MoPropertyMeta("arfs", "arfs", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, ["Disabled", "Enabled", "disabled", "enabled"], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []),
"multi_queue": MoPropertyMeta("multi_queue", "multiQueue", "string", VersionMeta.Version404b, MoPropertyMeta.READ_WRITE, 0x8, None, None, None, ["Disabled", "Enabled", "disabled", "enabled"], []),
"no_of_sub_vni_cs": MoPropertyMeta("no_of_sub_vni_cs", "noOfSubVNICs", "uint", VersionMeta.Version404b, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, [], ["1-64"]),
"nvgre": MoPropertyMeta("nvgre", "nvgre", "string", VersionMeta.Version303a, MoPropertyMeta.READ_WRITE, 0x20, None, None, None, ["Disabled", "Enabled", "disabled", "enabled"], []),
"order": MoPropertyMeta("order", "order", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x40, None, None, r"""[0-9]|1[0-7]""", ["ANY"], []),
"rate_limit": MoPropertyMeta("rate_limit", "rateLimit", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x80, None, None, r"""(([1-9]\d?\d?\d?|10000) Mbps)""", ["OFF"], ["1-100000"]),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x100, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x200, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"trusted_class_of_service": MoPropertyMeta("trusted_class_of_service", "trustedClassOfService", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x400, None, None, None, ["Disabled", "Enabled", "disabled", "enabled"], []),
"uplink_failback_timeout": MoPropertyMeta("uplink_failback_timeout", "uplinkFailbackTimeout", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x800, None, None, r"""(0{0,2}[0-9]|0?[1-9][0-9]|[1-5][0-9][0-9]|600)""", [], []),
"uplink_failover": MoPropertyMeta("uplink_failover", "uplinkFailover", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x1000, None, None, None, ["Disabled", "Enabled", "disabled", "enabled"], []),
"vlan": MoPropertyMeta("vlan", "vlan", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x2000, None, None, None, ["NONE"], ["1-4094"]),
"vlan_mode": MoPropertyMeta("vlan_mode", "vlanMode", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x4000, None, None, None, ["ACCESS", "TRUNK"], []),
"vmq": MoPropertyMeta("vmq", "vmq", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x8000, None, None, None, ["Disabled", "Enabled", "disabled", "enabled"], []),
"vxlan": MoPropertyMeta("vxlan", "vxlan", "string", VersionMeta.Version303a, MoPropertyMeta.READ_WRITE, 0x10000, None, None, None, ["Disabled", "Enabled", "disabled", "enabled"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version2013e, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"pci_link": MoPropertyMeta("pci_link", "pciLink", "uint", VersionMeta.Version303a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], ["0-1"]),
},
}
prop_map = {
"classic": {
"arfs": "arfs",
"dn": "dn",
"multiQueue": "multi_queue",
"noOfSubVNICs": "no_of_sub_vni_cs",
"nvgre": "nvgre",
"order": "order",
"rateLimit": "rate_limit",
"rn": "rn",
"status": "status",
"trustedClassOfService": "trusted_class_of_service",
"uplinkFailbackTimeout": "uplink_failback_timeout",
"uplinkFailover": "uplink_failover",
"vlan": "vlan",
"vlanMode": "vlan_mode",
"vmq": "vmq",
"vxlan": "vxlan",
"childAction": "child_action",
"pciLink": "pci_link",
},
"modular": {
"arfs": "arfs",
"dn": "dn",
"multiQueue": "multi_queue",
"noOfSubVNICs": "no_of_sub_vni_cs",
"nvgre": "nvgre",
"order": "order",
"rateLimit": "rate_limit",
"rn": "rn",
"status": "status",
"trustedClassOfService": "trusted_class_of_service",
"uplinkFailbackTimeout": "uplink_failback_timeout",
"uplinkFailover": "uplink_failover",
"vlan": "vlan",
"vlanMode": "vlan_mode",
"vmq": "vmq",
"vxlan": "vxlan",
"childAction": "child_action",
"pciLink": "pci_link",
},
}
def __init__(self, parent_mo_or_dn, **kwargs):
self._dirty_mask = 0
self.arfs = None
self.multi_queue = None
self.no_of_sub_vni_cs = None
self.nvgre = None
self.order = None
self.rate_limit = None
self.status = None
self.trusted_class_of_service = None
self.uplink_failback_timeout = None
self.uplink_failover = None
self.vlan = None
self.vlan_mode = None
self.vmq = None
self.vxlan = None
self.child_action = None
self.pci_link = None
ManagedObject.__init__(self, "AdaptorEthGenProfile", parent_mo_or_dn, **kwargs)
| 67.490446
| 251
| 0.631465
| 1,048
| 10,596
| 6.194656
| 0.131679
| 0.078866
| 0.116913
| 0.04313
| 0.864295
| 0.825323
| 0.748922
| 0.736907
| 0.719039
| 0.719039
| 0
| 0.043346
| 0.192242
| 10,596
| 156
| 252
| 67.923077
| 0.715154
| 0.011325
| 0
| 0.325926
| 0
| 0.014815
| 0.279339
| 0.056193
| 0
| 0
| 0.016533
| 0
| 0
| 1
| 0.007407
| false
| 0
| 0.022222
| 0
| 0.237037
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0a4e92f85dd4a5e0913fdc166c52be7817b28082
| 249
|
py
|
Python
|
neodroidvision/classification/architectures/self_attention_network/self_attention_modules/functions/__init__.py
|
sintefneodroid/vision
|
a4e66251ead99f15f4697bfe2abd00e2f388e743
|
[
"Apache-2.0"
] | 1
|
2019-07-03T04:33:51.000Z
|
2019-07-03T04:33:51.000Z
|
neodroidvision/classification/architectures/self_attention_network/self_attention_modules/functions/__init__.py
|
sintefneodroid/vision
|
a4e66251ead99f15f4697bfe2abd00e2f388e743
|
[
"Apache-2.0"
] | 5
|
2019-07-03T04:38:07.000Z
|
2021-09-10T15:40:44.000Z
|
neodroidvision/classification/architectures/self_attention_network/self_attention_modules/functions/__init__.py
|
sintefneodroid/vision
|
a4e66251ead99f15f4697bfe2abd00e2f388e743
|
[
"Apache-2.0"
] | 3
|
2019-10-03T06:14:40.000Z
|
2021-01-31T14:31:39.000Z
|
from .aggregation_refpad import *
from .aggregation_zeropad import *
from .self_attention_utilities import *
from .subtraction2_refpad import *
from .subtraction2_zeropad import *
from .subtraction_refpad import *
from .subtraction_zeropad import *
| 31.125
| 39
| 0.831325
| 29
| 249
| 6.862069
| 0.344828
| 0.301508
| 0.241206
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00905
| 0.11245
| 249
| 7
| 40
| 35.571429
| 0.891403
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
0a5310e3bb5553b0ab5f280c2953232865939d41
| 23
|
py
|
Python
|
__init__.py
|
RyosukeHigo/ModernRoboticsSymPy
|
742334b4ea3260f6119c837e137c80a619fc7718
|
[
"MIT"
] | null | null | null |
__init__.py
|
RyosukeHigo/ModernRoboticsSymPy
|
742334b4ea3260f6119c837e137c80a619fc7718
|
[
"MIT"
] | null | null | null |
__init__.py
|
RyosukeHigo/ModernRoboticsSymPy
|
742334b4ea3260f6119c837e137c80a619fc7718
|
[
"MIT"
] | null | null | null |
from .mr_sympy import *
| 23
| 23
| 0.782609
| 4
| 23
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 23
| 1
| 23
| 23
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6a75e27aade5bb2e0f885c4a2ef38bed655e1995
| 66,474
|
py
|
Python
|
tests/scanner/audit/firewall_rules_engine_test.py
|
mcunha/forseti-security
|
cbf25f6173c1a25d4e43a9738eca73f927361cb8
|
[
"Apache-2.0"
] | null | null | null |
tests/scanner/audit/firewall_rules_engine_test.py
|
mcunha/forseti-security
|
cbf25f6173c1a25d4e43a9738eca73f927361cb8
|
[
"Apache-2.0"
] | null | null | null |
tests/scanner/audit/firewall_rules_engine_test.py
|
mcunha/forseti-security
|
cbf25f6173c1a25d4e43a9738eca73f927361cb8
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import json
import unittest
import parameterized
from tests.unittest_utils import ForsetiTestCase
from google.cloud.forseti.common.gcp_type import resource as resource_mod
from google.cloud.forseti.common.gcp_type.firewall_rule import FirewallRule
from google.cloud.forseti.scanner.audit import firewall_rules_engine as fre
from google.cloud.forseti.scanner.audit import rules as scanner_rules
from tests.unittest_utils import get_datafile_path
class RuleTest(ForsetiTestCase):
@parameterized.parameterized.expand([
(
{},
fre.InvalidRuleDefinition,
'Rule requires rule_id',
),
(
{'rule_id': 'id'},
fre.InvalidRuleDefinition,
'Rule requires mode',
),
(
{
'rule_id': 'id',
'mode': 'notavalidmode',
},
fre.InvalidRuleDefinition,
'Mode notavalidmode is not in valid modes',
),
])
def test_from_config_errors(self, rule_def, expected_error, regexp):
with self.assertRaisesRegexp(expected_error, regexp):
fre.Rule.from_config(rule_def)
@parameterized.parameterized.expand([
(
[
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.0']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
}
],
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
True,
),
(
[
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
}
],
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'egress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
True,
),
(
[
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
}
],
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n2',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
True,
),
(
[
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
}
],
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'udp', 'ports': ['22']}]),
},
True,
),
(
[
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
}
],
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
False,
),
(
[
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
}
],
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['23']}]),
},
True,
),
])
def test_is_whitelist_violation(self, rule_dicts, policy_dict, expected):
rules = [FirewallRule(**rule_dict) for rule_dict in rule_dicts]
policy = FirewallRule(**policy_dict)
self.assertEqual(expected, fre.is_whitelist_violation(rules, policy))
@parameterized.parameterized.expand([
(
[
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.0']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
}
],
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
False,
),
(
[
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
}
],
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'egress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
False,
),
(
[
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
}
],
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n2',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
False,
),
(
[
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
}
],
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'udp', 'ports': ['22']}]),
},
False,
),
(
[
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
}
],
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
True,
),
(
[
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
}
],
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['23']}]),
},
False,
),
(
[
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['2', '1', '3']}]),
}
],
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['1-3']}]),
},
True,
),
(
[
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['2', '1', '3']}]),
}
],
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['1-100']}]),
},
True,
),
(
[
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['2', '1', '3']}]),
}
],
{
'firewall_rule_source_ranges': json.dumps(['0.0.0.0/0']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['1-100']}]),
},
True,
),
])
def test_is_blacklist_violation(self, rule_dicts, policy_dict, expected):
rules = [FirewallRule(**rule_dict) for rule_dict in rule_dicts]
policy = FirewallRule(**policy_dict)
self.assertEqual(expected, fre.is_blacklist_violation(rules, policy))
@parameterized.parameterized.expand([
(
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
[{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
}],
False,
),
(
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
[
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'egress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
],
False,
),
(
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
[
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'egress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['23']}]),
},
],
True,
),
])
def test_is_rule_exists_violation(self, rule_dict, policy_dicts, expected):
rule = FirewallRule(**rule_dict)
policies = []
for policy_dict in policy_dicts:
policy = FirewallRule(**policy_dict)
policies.append(policy)
self.assertEqual(expected, fre.is_rule_exists_violation(rule, policies))
@parameterized.parameterized.expand([
(
{
'rule_id': 'No 0.0.0.0/0 policy allowed',
'match_policies': [{
'direction': 'ingress',
'allowed': ['*'],
}],
'verify_policies': [{
'sourceRanges': ['0.0.0.0/0'],
'allowed': ['*'],
}],
'mode': scanner_rules.RuleMode.BLACKLIST,
},
[{
'firewall_rule_source_ranges': json.dumps(['1.1.1.1']),
'firewall_rule_direction': 'ingress',
'firewall_rule_network': 'n1',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
}],
[],
),
(
{
'rule_id': 'No 0.0.0.0/0 policy allowed',
'match_policies': [{
'direction': 'ingress',
'allowed': ['*'],
}],
'verify_policies': [{
'sourceRanges': ['0.0.0.0/0'],
'allowed': ['*'],
}],
'mode': scanner_rules.RuleMode.BLACKLIST,
},
[{
'project_id': 'p1',
'firewall_rule_name': '0.0.0.0/0',
'firewall_rule_network': 'https://www.googleapis.com/compute/v1/projects/yourproject/global/networks/default',
'firewall_rule_source_ranges': json.dumps(['0.0.0.0/0']),
'firewall_rule_direction': 'ingress',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
}],
[
{
'resource_type': resource_mod.ResourceType.FIREWALL_RULE,
'resource_id': 'p1',
'full_name': '',
'rule_id': 'No 0.0.0.0/0 policy allowed',
'violation_type': 'FIREWALL_BLACKLIST_VIOLATION',
'policy_names': ['0.0.0.0/0'],
'recommended_actions': {
'DELETE_FIREWALL_RULES': [
'0.0.0.0/0'
]
},
'resource_data': ['{"allowed": [{"IPProtocol": "tcp", "ports": ["22"]}], "direction": "INGRESS", "name": "0.0.0.0/0", "network": "https://www.googleapis.com/compute/v1/projects/yourproject/global/networks/default", "sourceRanges": ["0.0.0.0/0"]}']
},
],
),
(
{
'rule_id': 'No 0.0.0.0/0 policy allowed 2',
'match_policies': [{
'direction': 'ingress',
'allowed': ['*'],
}],
'verify_policies': [{
'sourceRanges': ['0.0.0.0/0'],
'allowed': ['*'],
}],
'mode': scanner_rules.RuleMode.BLACKLIST,
},
[
{
'project_id': 'p1',
'firewall_rule_name': '0.0.0.0/0',
'firewall_rule_network': 'https://www.googleapis.com/compute/v1/projects/yourproject/global/networks/default',
'firewall_rule_source_ranges': json.dumps(['0.0.0.0/0']),
'firewall_rule_direction': 'ingress',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
{
'project_id': 'p2',
'firewall_rule_name': '0.0.0.0/0 2',
'firewall_rule_network': 'https://www.googleapis.com/compute/v1/projects/yourproject/global/networks/default',
'firewall_rule_source_ranges': json.dumps(
['1.1.1.1', '0.0.0.0/0']),
'firewall_rule_direction': 'ingress',
'firewall_rule_allowed': json.dumps(
[{'IPProtocol': 'all'}]),
}
],
[
{
'resource_type': resource_mod.ResourceType.FIREWALL_RULE,
'resource_id': 'p1',
'full_name': '',
'rule_id': 'No 0.0.0.0/0 policy allowed 2',
'violation_type': 'FIREWALL_BLACKLIST_VIOLATION',
'policy_names': ['0.0.0.0/0'],
'recommended_actions': {
'DELETE_FIREWALL_RULES': [
'0.0.0.0/0'
]
},
'resource_data': ['{"allowed": [{"IPProtocol": "tcp", "ports": ["22"]}], "direction": "INGRESS", "name": "0.0.0.0/0", "network": "https://www.googleapis.com/compute/v1/projects/yourproject/global/networks/default", "sourceRanges": ["0.0.0.0/0"]}']
},
{
'resource_type': resource_mod.ResourceType.FIREWALL_RULE,
'resource_id': 'p2',
'full_name': '',
'rule_id': 'No 0.0.0.0/0 policy allowed 2',
'violation_type': 'FIREWALL_BLACKLIST_VIOLATION',
'policy_names': ['0.0.0.0/0 2'],
'recommended_actions': {
'DELETE_FIREWALL_RULES': [
'0.0.0.0/0 2'
]
},
'resource_data': ['{"allowed": [{"IPProtocol": "all"}], "direction": "INGRESS", "name": "0.0.0.0/0 2", "network": "https://www.googleapis.com/compute/v1/projects/yourproject/global/networks/default", "sourceRanges": ["0.0.0.0/0", "1.1.1.1"]}']
},
],
),
])
def test_find_policy_violations_blacklist(
self, rule_dict, policy_dicts, expected):
rule = fre.Rule(**rule_dict)
policies = []
for policy_dict in policy_dicts:
policy = FirewallRule(**policy_dict)
policies.append(policy)
violations = list(rule.find_policy_violations(policies))
self.assert_rule_violation_lists_equal(expected, violations)
@parameterized.parameterized.expand([
(
{
'rule_id': 'Only Allow 443 to tagged instances',
'match_policies': [{
'direction': 'ingress',
'allowed': [{'IPProtocol': 'tcp', 'ports': ['443']}],
}],
'verify_policies': [{
'sourceTags': ['https-server'],
'allowed': ['*'],
}],
'mode': scanner_rules.RuleMode.WHITELIST,
},
[{
'name': 'Any to 443 on https-server',
'sourceRanges': ['0.0.0.0/0'],
'direction': 'ingress',
'sourceTags': ['https-server'],
'allowed': [{'IPProtocol': 'tcp', 'ports': ['443']}],
}],
[],
),
(
{
'rule_id': 'Only Allow 443 to tagged instances',
'match_policies': [{
'direction': 'ingress',
'allowed': [{'IPProtocol': 'tcp', 'ports': ['443']}],
}],
'verify_policies': [{
'sourceTags': ['https-server'],
'allowed': ['*'],
}],
'mode': scanner_rules.RuleMode.WHITELIST,
},
[{
'project_id': 'p1',
'name': 'Any to 443 on https-server',
'network': 'https://www.googleapis.com/compute/v1/projects/yourproject/global/networks/default',
'sourceRanges': ['0.0.0.0/0'],
'direction': 'ingress',
'sourceTags': ['https-server', 'tag2'],
'allowed': [{'IPProtocol': 'tcp', 'ports': ['443']}],
}],
[
{
'resource_type': resource_mod.ResourceType.FIREWALL_RULE,
'resource_id': 'p1',
'full_name': None,
'rule_id': 'Only Allow 443 to tagged instances',
'violation_type': 'FIREWALL_WHITELIST_VIOLATION',
'policy_names': ['Any to 443 on https-server'],
'recommended_actions': {
'DELETE_FIREWALL_RULES': [
'Any to 443 on https-server'
]
},
'resource_data': ['{"allowed": [{"IPProtocol": "tcp", "ports": ["443"]}], "direction": "INGRESS", "name": "Any to 443 on https-server", "network": "https://www.googleapis.com/compute/v1/projects/yourproject/global/networks/default", "sourceRanges": ["0.0.0.0/0"], "sourceTags": ["https-server", "tag2"]}']
},
],
),
(
{
'rule_id': 'Only Allow 443 to tagged instances',
'match_policies': [{
'direction': 'ingress',
'allowed': [{'IPProtocol': 'tcp', 'ports': ['443']}],
}],
'verify_policies': [{
'sourceTags': ['https-server'],
'allowed': ['*'],
}],
'mode': scanner_rules.RuleMode.WHITELIST,
},
[
{
'project_id': 'p1',
'name': 'Any to 443 on https-server',
'network': 'https://www.googleapis.com/compute/v1/projects/yourproject/global/networks/default',
'sourceRanges': ['0.0.0.0/0'],
'direction': 'ingress',
'sourceTags': ['tag1', 'tag2'],
'allowed': [{'IPProtocol': 'tcp', 'ports': ['443']}],
},
{
'project_id': 'p2',
'name': 'Any to 443 on https-server',
'network': 'https://www.googleapis.com/compute/v1/projects/yourproject/global/networks/default',
'sourceRanges': ['0.0.0.0/0'],
'direction': 'ingress',
'sourceTags': ['https-server'],
'allowed': [{'IPProtocol': 'tcp', 'ports': ['443']}],
},
{
'project_id': 'p3',
'name': 'Any to 80/443 to https-server and tag3',
'network': 'https://www.googleapis.com/compute/v1/projects/yourproject/global/networks/default',
'sourceRanges': ['0.0.0.0/0'],
'direction': 'ingress',
'sourceTags': ['https-server', 'tag3'],
'allowed': [{'IPProtocol': 'tcp', 'ports': ['443', '80']}],
},
],
[
{
'resource_type': resource_mod.ResourceType.FIREWALL_RULE,
'resource_id': 'p1',
'full_name': None,
'rule_id': 'Only Allow 443 to tagged instances',
'violation_type': 'FIREWALL_WHITELIST_VIOLATION',
'policy_names': ['Any to 443 on https-server'],
'recommended_actions': {
'DELETE_FIREWALL_RULES': [
'Any to 443 on https-server'
]
},
'resource_data': ['{"allowed": [{"IPProtocol": "tcp", "ports": ["443"]}], "direction": "INGRESS", "name": "Any to 443 on https-server", "network": "https://www.googleapis.com/compute/v1/projects/yourproject/global/networks/default", "sourceRanges": ["0.0.0.0/0"], "sourceTags": ["tag1", "tag2"]}']
},
{
'resource_type': resource_mod.ResourceType.FIREWALL_RULE,
'resource_id': 'p3',
'full_name': None,
'rule_id': 'Only Allow 443 to tagged instances',
'violation_type': 'FIREWALL_WHITELIST_VIOLATION',
'policy_names': ['Any to 80/443 to https-server and tag3'],
'recommended_actions': {
'DELETE_FIREWALL_RULES': [
'Any to 80/443 to https-server and tag3'
]
},
'resource_data': ['{"allowed": [{"IPProtocol": "tcp", "ports": ["80", "443"]}], "direction": "INGRESS", "name": "Any to 80/443 to https-server and tag3", "network": "https://www.googleapis.com/compute/v1/projects/yourproject/global/networks/default", "sourceRanges": ["0.0.0.0/0"], "sourceTags": ["https-server", "tag3"]}']
},
],
),
])
def test_find_policy_violations_whitelist(
self, rule_dict, policy_dicts, expected):
rule = fre.Rule(**rule_dict)
policies = []
for policy_dict in policy_dicts:
project = policy_dict.get('project_id')
policy = FirewallRule.from_dict(policy_dict, project_id=project)
policies.append(policy)
violations = list(rule.find_policy_violations(policies))
self.assert_rule_violation_lists_equal(expected, violations)
@parameterized.parameterized.expand([
(
{
'rule_id': 'Allow SSH to tag from 1.1.1.1',
'match_policies': [{
'name': 'name',
'network': 'network',
'direction': 'ingress',
'action': 'allow',
'sourceRanges': ['1.1.1.1'],
'allowed': [{'IPProtocol': 'tcp', 'ports': ['22']}],
}],
'verify_policies': [],
'mode': scanner_rules.RuleMode.REQUIRED,
},
[
{
'name': 'Any to 443',
'sourceRanges': ['0.0.0.0/0'],
'direction': 'ingress',
'allowed': [{'IPProtocol': 'tcp', 'ports': ['443']}],
},
{
'name': 'Allow 22 from 1.1.1.1',
'network': 'network',
'sourceRanges': ['1.1.1.1'],
'direction': 'ingress',
'allowed': [{'IPProtocol': 'tcp', 'ports': ['22']}],
},
],
[],
),
(
{
'rule_id': 'Allow SSH to tag from 1.1.1.1',
'match_policies': [{
'name': 'name',
'network': 'network',
'direction': 'ingress',
'action': 'allow',
'sourceRanges': ['1.1.1.1'],
'allowed': [{'IPProtocol': 'tcp', 'ports': ['22']}],
}],
'verify_policies': [],
'mode': scanner_rules.RuleMode.REQUIRED,
},
[
{
'project_id': 'p1',
'name': 'Any to 443',
'network': 'https://www.googleapis.com/compute/v1/projects/yourproject/global/networks/default',
'sourceRanges': ['0.0.0.0/0'],
'direction': 'ingress',
'allowed': [{'IPProtocol': 'tcp', 'ports': ['443']}],
},
{
'project_id': 'p1',
'name': 'Allow 22 from 1.1.1.1',
'network': 'https://www.googleapis.com/compute/v1/projects/yourproject/global/networks/default',
'sourceRanges': ['1.1.1.2'],
'direction': 'ingress',
'allowed': [
{'IPProtocol': 'tcp', 'ports': ['22']}],
},
],
[
{
'resource_type': resource_mod.ResourceType.FIREWALL_RULE,
'resource_id': 'p1',
'full_name': None,
'rule_id': 'Allow SSH to tag from 1.1.1.1',
'violation_type': 'FIREWALL_REQUIRED_VIOLATION',
'policy_names': ['Any to 443', 'Allow 22 from 1.1.1.1'],
'recommended_actions': {
'INSERT_FIREWALL_RULES': [
'Allow SSH to tag from 1.1.1.1: rule 0'
]
},
'resource_data': ['{"allowed": [{"IPProtocol": "tcp", "ports": ["443"]}], "direction": "INGRESS", "name": "Any to 443", "network": "https://www.googleapis.com/compute/v1/projects/yourproject/global/networks/default", "sourceRanges": ["0.0.0.0/0"]}', '{"allowed": [{"IPProtocol": "tcp", "ports": ["22"]}], "direction": "INGRESS", "name": "Allow 22 from 1.1.1.1", "network": "https://www.googleapis.com/compute/v1/projects/yourproject/global/networks/default", "sourceRanges": ["1.1.1.2"]}']
},
],
),
])
def test_find_policy_violations_exists(
self, rule_dict, policy_dicts, expected):
rule = fre.Rule(**rule_dict)
policies = []
for policy_dict in policy_dicts:
project = policy_dict.get('project_id')
policy = FirewallRule.from_dict(policy_dict, project_id=project)
policies.append(policy)
violations = list(rule.find_policy_violations(policies))
self.assert_rule_violation_lists_equal(expected, violations)
@parameterized.parameterized.expand([
(
{
'rule_id': 'Golden Policy',
'match_policies': [
{
'name': 'name',
'network': 'network',
'direction': 'ingress',
'action': 'allow',
'sourceRanges': (['1.1.1.1']),
'allowed': (
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
{
'name': 'name',
'network': 'network',
'direction': 'ingress',
'action': 'allow',
'sourceRanges': (['10.0.0.0/8']),
'allowed': (
[{'IPProtocol': 'tcp', 'ports': ['443']}]),
},
],
'verify_policies': [],
'mode': scanner_rules.RuleMode.MATCHES,
},
[
{
'name': 'SSH from 1.1.1.1',
'network': 'network',
'direction': 'ingress',
'action': 'allow',
'sourceRanges': (['1.1.1.1']),
'allowed': (
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
{
'name': '443 from 10.0.0.0/8',
'network': 'network',
'direction': 'ingress',
'action': 'allow',
'sourceRanges': (['10.0.0.0/8']),
'allowed': (
[{'IPProtocol': 'tcp', 'ports': ['443']}]),
},
],
[],
),
(
{
'rule_id': 'Golden Policy',
'match_policies': [
{
'name': 'name',
'network': 'network',
'direction': 'ingress',
'action': 'allow',
'sourceRanges': (['1.1.1.1']),
'allowed': (
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
{
'name': 'name',
'network': 'network',
'direction': 'ingress',
'action': 'allow',
'sourceRanges': (['10.0.0.0/8']),
'allowed': (
[{'IPProtocol': 'tcp', 'ports': ['443']}]),
},
],
'verify_policies': [],
'mode': scanner_rules.RuleMode.MATCHES,
},
[
{
'project_id': 'p1',
'name': 'SSH from 1.1.1.1',
'network': 'network',
'direction': 'ingress',
'action': 'allow',
'sourceRanges': (['1.1.1.1']),
'allowed': (
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
{
'project_id': 'p1',
'name': '443 from 10.0.0.0/8',
'network': 'network',
'direction': 'ingress',
'action': 'allow',
'sourceRanges': (['10.0.0.0/8']),
'allowed': (
[{'IPProtocol': 'tcp', 'ports': ['443']}]),
},
{
'project_id': 'p1',
'name': '80 from 10.0.0.0/8',
'network': 'network',
'direction': 'ingress',
'action': 'allow',
'sourceRanges': (['10.0.0.0/8']),
'allowed': (
[{'IPProtocol': 'tcp', 'ports': ['80']}]),
},
],
[
{
'resource_type': resource_mod.ResourceType.FIREWALL_RULE,
'resource_id': 'p1',
'rule_id': 'Golden Policy',
'full_name': None,
'violation_type': 'FIREWALL_MATCHES_VIOLATION',
'policy_names': [
'SSH from 1.1.1.1', '443 from 10.0.0.0/8',
'80 from 10.0.0.0/8'
],
'recommended_actions': {
'INSERT_FIREWALL_RULES': [],
'DELETE_FIREWALL_RULES': [
'80 from 10.0.0.0/8'
],
'UPDATE_FIREWALL_RULES': [],
},
'resource_data': ['{"allowed": [{"IPProtocol": "tcp", "ports": ["22"]}], "direction": "INGRESS", "name": "SSH from 1.1.1.1", "network": "network", "sourceRanges": ["1.1.1.1"]}', '{"allowed": [{"IPProtocol": "tcp", "ports": ["443"]}], "direction": "INGRESS", "name": "443 from 10.0.0.0/8", "network": "network", "sourceRanges": ["10.0.0.0/8"]}', '{"allowed": [{"IPProtocol": "tcp", "ports": ["80"]}], "direction": "INGRESS", "name": "80 from 10.0.0.0/8", "network": "network", "sourceRanges": ["10.0.0.0/8"]}']
},
],
),
(
{
'rule_id': 'Golden Policy',
'match_policies': [
{
'name': 'name',
'network': 'network',
'direction': 'ingress',
'action': 'allow',
'sourceRanges': (['1.1.1.1']),
'allowed': (
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
{
'name': 'name',
'network': 'network',
'direction': 'ingress',
'action': 'allow',
'sourceRanges': (['10.0.0.0/8']),
'allowed': (
[{'IPProtocol': 'tcp', 'ports': ['443']}]),
},
],
'verify_policies': [],
'mode': scanner_rules.RuleMode.MATCHES,
},
[
{
'project_id': 'p1',
'name': 'SSH from 1.1.1.1',
'network': 'network',
'direction': 'ingress',
'action': 'allow',
'sourceRanges': (['1.1.1.1']),
'allowed': (
[{'IPProtocol': 'tcp', 'ports': ['22']}]),
},
{
'project_id': 'p1',
'name': '80 from 10.0.0.0/8',
'network': 'network',
'direction': 'ingress',
'action': 'allow',
'sourceRanges': (['10.0.0.0/8']),
'allowed': (
[{'IPProtocol': 'tcp', 'ports': ['80']}]),
},
],
[
{
'resource_type': resource_mod.ResourceType.FIREWALL_RULE,
'resource_id': 'p1',
'rule_id': 'Golden Policy',
'full_name': None,
'violation_type': 'FIREWALL_MATCHES_VIOLATION',
'policy_names': ['SSH from 1.1.1.1', '80 from 10.0.0.0/8'],
'recommended_actions': {
'INSERT_FIREWALL_RULES': [
'Golden Policy: rule 1'
],
'DELETE_FIREWALL_RULES': [
'80 from 10.0.0.0/8'
],
'UPDATE_FIREWALL_RULES': [],
},
'resource_data': ['{"allowed": [{"IPProtocol": "tcp", "ports": ["22"]}], "direction": "INGRESS", "name": "SSH from 1.1.1.1", "network": "network", "sourceRanges": ["1.1.1.1"]}', '{"allowed": [{"IPProtocol": "tcp", "ports": ["80"]}], "direction": "INGRESS", "name": "80 from 10.0.0.0/8", "network": "network", "sourceRanges": ["10.0.0.0/8"]}']
},
],
),
])
def test_find_policy_violations_matches(
self, rule_dict, policy_dicts, expected):
rule = fre.Rule(**rule_dict)
policies = []
for policy_dict in policy_dicts:
project = policy_dict.get('project_id')
policy = FirewallRule.from_dict(policy_dict, project_id=project)
policies.append(policy)
violations = list(rule.find_policy_violations(policies))
self.assert_rule_violation_lists_equal(expected, violations)
def assert_rule_violation_lists_equal(self, expected, violations):
sorted(expected, key=lambda k: k['resource_id'])
sorted(violations, key=lambda k: k.resource_id)
self.assertTrue(len(expected) == len(violations))
for expected_dict, violation in zip(expected, violations):
self.assertItemsEqual(expected_dict.values(), list(violation))
class RuleBookTest(ForsetiTestCase):
@parameterized.parameterized.expand([
(
[
{
'rule_id': 'id',
'mode': 'matches',
'match_policies': ['test'],
},
{
'rule_id': 'id',
'mode': 'matches',
'match_policies': ['test'],
},
],
fre.DuplicateFirewallRuleError,
'Rule id "id" already in rules',
),
])
def test_add_rule_errors(self, rule_defs, expected_error, regexp):
rule_book = fre.RuleBook({})
with self.assertRaisesRegexp(expected_error, regexp):
for rule_def in rule_defs:
rule_book.add_rule(rule_def, 1)
@parameterized.parameterized.expand([
(
{'rule_id': 'id', 'mode': 'required', 'match_policies': ['test']},
),
])
def test_add_rule(self, rule_def):
rule_book = fre.RuleBook({})
rule_book.add_rule(rule_def, 1)
rule_id = rule_def.get('rule_id')
self.assertTrue(rule_book.rules_map.get(rule_id) is not None)
@parameterized.parameterized.expand([
(
[{}],
fre.InvalidRuleDefinition,
'Rule requires rule_id',
),
(
[{'rule_id': 'id'}],
fre.InvalidRuleDefinition,
'Rule requires mode',
),
(
[{
'rule_id': 'id',
'mode': 'notavalidmode',
}],
fre.InvalidRuleDefinition,
'Mode notavalidmode is not in valid modes',
),
(
[
{
'rule_id': 'id',
'mode': 'matches',
'match_policies': ['test'],
},
{
'rule_id': 'id',
'mode': 'matches',
'match_policies': ['test'],
},
],
fre.DuplicateFirewallRuleError,
'Rule id "id" already in rules',
),
])
def test_add_rules_errors(self, rule_defs, expected_error, regexp):
rule_book = fre.RuleBook({})
with self.assertRaisesRegexp(expected_error, regexp):
rule_book.add_rules(rule_defs)
@parameterized.parameterized.expand([
(
[
{
'rule_id': 'id',
'mode': 'required',
'match_policies': ['test']
},
{
'rule_id': 'id2',
'mode': 'required',
'match_policies': ['test']
},
],
),
])
def test_add_rules(self, rule_defs):
rule_book = fre.RuleBook({})
rule_book.add_rules(rule_defs)
for rule_def in rule_defs:
rule_id = rule_def.get('rule_id')
self.assertTrue(rule_book.rules_map.get(rule_id) is not None)
@parameterized.parameterized.expand([
(
[{}],
fre.InvalidGroupDefinition,
'Group requires a group id',
),
(
[{'group_id': 'id'}],
fre.InvalidGroupDefinition,
'Group "id" does not have any rules',
),
(
[{'group_id': 'id', 'rule_ids': ['rid']}],
fre.RuleDoesntExistError,
'Rule id "rid" does not exist, cannot be in group',
),
])
def test_add_rule_groups_errors(self, group_defs, expected_error, regexp):
rule_book = fre.RuleBook({})
with self.assertRaisesRegexp(expected_error, regexp):
rule_book.add_rule_groups(group_defs)
@parameterized.parameterized.expand([
(
[
{
'group_id': 'id',
'rule_ids': ['rid1', 'rid2', 'rid3']
},
{
'group_id': 'id2',
'rule_ids': ['rid4']
},
],
),
])
def test_add_rule_groups(self, group_defs):
rule_book = fre.RuleBook({})
rule_book.rules_map['rid1'] = 'rule1'
rule_book.rules_map['rid2'] = 'rule2'
rule_book.rules_map['rid3'] = 'rule3'
rule_book.rules_map['rid4'] = 'rule4'
rule_book.add_rule_groups(group_defs)
for group in group_defs:
group_id = group.get('group_id')
self.assertTrue(group_id in rule_book.rule_groups_map)
self.assertItemsEqual(
group.get('rule_ids'), rule_book.rule_groups_map[group_id])
@parameterized.parameterized.expand([
(
{},
fre.InvalidOrgDefinition,
'Org policy does not have any resources',
),
(
{'resources': []},
fre.InvalidOrgDefinition,
'Org policy does not have any resources',
),
(
{'resources': [{}]},
fre.resource_mod.errors.InvalidResourceTypeError,
'Invalid resource type:',
),
(
{
'resources': [
{
'type': 'organization',
'rules': {
'group_ids': ['id'],
},
}
]
},
fre.GroupDoesntExistError,
'Group "id" does not exist',
),
(
{
'resources': [
{
'type': 'organization',
'rules': {
'rule_ids': ['id'],
},
}
]
},
fre.RuleDoesntExistError,
'Rule id "id" does not exist',
),
])
def test_add_org_policy_errors(self, org_def, expected_error, regexp):
rule_book = fre.RuleBook({})
with self.assertRaisesRegexp(expected_error, regexp):
rule_book.add_org_policy(org_def)
def test_add_org_policy(self):
rule_book = fre.RuleBook({})
rule_book.rules_map['rule1'] = 1
rule_book.rules_map['rule2'] = 2
rule_book.rules_map['rule3'] = 3
rule_book.rules_map['rule4'] = 4
rule_book.rule_groups_map['gid1'] = ['rule3', 'rule4']
org_def = {
'resources': [
{
'type': 'folder',
'resource_ids': ['res1', 'res2'],
'rules': {
'rule_ids': ['rule1', 'rule2'],
'group_ids': ['gid1'],
},
},
],
}
rule_book.add_org_policy(org_def)
gcp_resource_1 = fre.resource_util.create_resource(
resource_id='res1', resource_type='folder')
gcp_resource_2 = fre.resource_util.create_resource(
resource_id='res2', resource_type='folder')
self.assertItemsEqual(
rule_book.org_policy_rules_map[gcp_resource_1],
['rule1', 'rule2', 'rule3', 'rule4'])
self.assertItemsEqual(
rule_book.org_policy_rules_map[gcp_resource_2],
['rule1', 'rule2', 'rule3', 'rule4'])
def test_find_violations(self):
rule_defs = [
{
'rule_id': 'rule1',
'mode': 'blacklist',
'match_policies': [
{
'direction': 'ingress',
'allowed': ['*'],
'targetTags': ['linux'],
},
],
'verify_policies': [
{
'allowed': [{
'IPProtocol': 'tcp',
'ports': ['3389']
}],
}
],
},
{
'rule_id': 'rule2',
'mode': 'whitelist',
'match_policies': [
{
'direction': 'ingress',
'allowed': ['*'],
'targetTags': ['test'],
},
],
'verify_policies': [
{
'allowed': ['*'],
'sourceRanges': ['10.0.0.0/8'],
}
],
},
{
'rule_id': 'rule3',
'mode': 'required',
'match_policies': [
{
'name': 'policy1',
'network': 'network1',
'direction': 'egress',
'denied': [{'IPProtocol': '*'}],
'destinationRanges': ['8.8.8.8'],
}
],
},
{
'rule_id': 'rule4',
'mode': 'matches',
'match_policies': [
{
'name': 'policy1',
'network': 'network1',
'direction': 'ingress',
'allowed': [
{
'IPProtocol': 'tcp',
'ports': ['22'],
},
],
'sourceRanges': ['0.0.0.0/0'],
}
],
},
]
group_defs = [
{
'group_id': 'gid1',
'rule_ids': ['rule1', 'rule2'],
},
]
org_def = {
'resources': [
{
'type': 'organization',
'resource_ids': ['org'],
'rules': {
'rule_ids': ['rule4'],
},
},
{
'type': 'folder',
'resource_ids': ['folder1', 'folder2'],
'rules': {
'group_ids': ['gid1'],
},
},
{
'type': 'project',
'resource_ids': ['project2'],
'rules': {
'rule_ids': ['rule3'],
},
},
{
'type': 'project',
'resource_ids': ['exception'],
'rules': {
'rule_ids': [],
},
},
],
}
project0 = fre.resource_util.create_resource(
resource_id='project0', resource_type='project')
project1 = fre.resource_util.create_resource(
resource_id='project1', resource_type='project')
project2 = fre.resource_util.create_resource(
resource_id='project2', resource_type='project')
project3 = fre.resource_util.create_resource(
resource_id='project3', resource_type='project')
exception = fre.resource_util.create_resource(
resource_id='exception', resource_type='project')
folder1 = fre.resource_util.create_resource(
resource_id='folder1', resource_type='folder')
folder2 = fre.resource_util.create_resource(
resource_id='folder2', resource_type='folder')
folder3 = fre.resource_util.create_resource(
resource_id='folder3', resource_type='folder')
folder4 = fre.resource_util.create_resource(
resource_id='folder4', resource_type='folder')
org = fre.resource_util.create_resource(
resource_id='org', resource_type='organization')
policy_violates_rule_1 = fre.firewall_rule.FirewallRule.from_dict(
{
'name': 'policy1',
'full_name': 'organization/org/folder/folder1/project/project0/firewall/policy1/',
'network': 'network1',
'direction': 'ingress',
'allowed': [{'IPProtocol': 'tcp', 'ports': ['1', '3389']}],
'sourceRanges': ['0.0.0.0/0'],
'targetTags': ['linux'],
},
validate=True,
)
policy_violates_rule_2 = fre.firewall_rule.FirewallRule.from_dict(
{
'name': 'policy1',
'full_name': 'organization/org/folder/folder2/project/project1/firewall/policy1/',
'network': 'network1',
'direction': 'ingress',
'allowed': [{'IPProtocol': 'tcp', 'ports': ['22']}],
'sourceRanges': ['11.0.0.1'],
'targetTags': ['test'],
},
validate=True,
)
policy_violates_rule_3 = fre.firewall_rule.FirewallRule.from_dict(
{
'name': 'policy1',
'full_name': 'organization/org/folder/folder3/folder/folder4/project/project2/firewall/policy1/',
'network': 'network1',
'direction': 'egress',
'denied': [{'IPProtocol': 'tcp', 'ports': ['22']}],
'destinationRanges': ['11.0.0.1'],
},
validate=True,
)
policy_violates_rule_4 = fre.firewall_rule.FirewallRule.from_dict(
{
'name': 'policy1',
'full_name': 'organization/org/folder/folder3/project/project3/firewall/policy1/',
'network': 'network1',
'direction': 'ingress',
'allowed': [{'IPProtocol': 'tcp', 'ports': ['22']}],
'sourceRanges': ['0.0.0.0/1'],
},
validate=True,
)
ancestry = {
project0: [folder1, org],
project1: [folder2, org],
project2: [folder4, folder3, org],
project3: [folder3, org],
exception: [folder3, org],
}
rule_book = fre.RuleBook(
rule_defs=rule_defs,
group_defs=group_defs,
org_policy=org_def
)
rule_book.org_res_rel_dao = mock.Mock()
rule_book.org_res_rel_dao.find_ancestors.side_effect = (
lambda x,y: ancestry[x])
project0_violations = [
fre.RuleViolation(
resource_type=resource_mod.ResourceType.FIREWALL_RULE,
resource_id=None,
full_name='organization/org/folder/folder1/project/project0/firewall/policy1/',
rule_id='rule1',
violation_type='FIREWALL_BLACKLIST_VIOLATION',
policy_names=['policy1'],
recommended_actions={'DELETE_FIREWALL_RULES': ['policy1']},
resource_data=['{"allowed": [{"IPProtocol": "tcp", "ports": ["1", "3389"]}], "direction": "INGRESS", "name": "policy1", "network": "network1", "sourceRanges": ["0.0.0.0/0"], "targetTags": ["linux"]}']
)
]
project1_violations = [
fre.RuleViolation(
resource_type=resource_mod.ResourceType.FIREWALL_RULE,
resource_id=None,
full_name='organization/org/folder/folder2/project/project1/firewall/policy1/',
rule_id='rule2',
violation_type='FIREWALL_WHITELIST_VIOLATION',
policy_names=['policy1'],
recommended_actions={'DELETE_FIREWALL_RULES': ['policy1']},
resource_data=['{"allowed": [{"IPProtocol": "tcp", "ports": ["22"]}], "direction": "INGRESS", "name": "policy1", "network": "network1", "sourceRanges": ["11.0.0.1"], "targetTags": ["test"]}']
)
]
project2_violations = [
fre.RuleViolation(
resource_type=resource_mod.ResourceType.FIREWALL_RULE,
resource_id=None,
full_name='organization/org/folder/folder3/folder/folder4/project/project2/firewall/policy1/',
rule_id='rule3',
violation_type='FIREWALL_REQUIRED_VIOLATION',
policy_names=['policy1'],
recommended_actions={'INSERT_FIREWALL_RULES': ['rule3: rule 0']},
resource_data=['{"denied": [{"IPProtocol": "tcp", "ports": ["22"]}], "destinationRanges": ["11.0.0.1"], "direction": "EGRESS", "name": "policy1", "network": "network1"}']
)
]
project3_violations = [
fre.RuleViolation(
resource_type=resource_mod.ResourceType.FIREWALL_RULE,
resource_id=None,
full_name='organization/org/folder/folder3/project/project3/firewall/policy1/',
rule_id='rule4',
violation_type='FIREWALL_MATCHES_VIOLATION',
policy_names=['policy1'],
recommended_actions={
'DELETE_FIREWALL_RULES': ['policy1'],
'UPDATE_FIREWALL_RULES': [],
'INSERT_FIREWALL_RULES': ['rule4: rule 0']
},
resource_data=['{"allowed": [{"IPProtocol": "tcp", "ports": ["22"]}], "direction": "INGRESS", "name": "policy1", "network": "network1", "sourceRanges": ["0.0.0.0/1"]}']
)
]
resources_and_policies = (
(project0, policy_violates_rule_1, project0_violations),
(project1, policy_violates_rule_2, project1_violations),
(project2, policy_violates_rule_3, project2_violations),
(project3, policy_violates_rule_4, project3_violations),
(exception, policy_violates_rule_1, []),
)
self.maxDiff=None
for resource, policy, expected_violation in resources_and_policies:
violations = rule_book.find_violations(resource, [policy])
self.assert_rule_violation_lists_equal(
expected_violation, list(violations))
def assert_rule_violation_lists_equal(self, expected, violations):
sorted(expected, key=lambda k: k.resource_id)
sorted(violations, key=lambda k: k.resource_id)
self.assertItemsEqual(expected, violations)
class RuleEngineTest(ForsetiTestCase):
def setUp(self):
project0 = fre.resource_util.create_resource(
resource_id='test_project', resource_type='project')
project1 = fre.resource_util.create_resource(
resource_id='project1', resource_type='project')
project2 = fre.resource_util.create_resource(
resource_id='project2', resource_type='project')
project3 = fre.resource_util.create_resource(
resource_id='project3', resource_type='project')
exception = fre.resource_util.create_resource(
resource_id='honeypot_exception', resource_type='project')
folder1 = fre.resource_util.create_resource(
resource_id='folder1', resource_type='folder')
folder2 = fre.resource_util.create_resource(
resource_id='test_instances', resource_type='folder')
folder3 = fre.resource_util.create_resource(
resource_id='folder3', resource_type='folder')
folder4 = fre.resource_util.create_resource(
resource_id='folder4', resource_type='folder')
org = fre.resource_util.create_resource(
resource_id='org', resource_type='organization')
self.project_resource_map = {
'test_project': project0,
'project1': project1,
'project2': project2,
'project3': project3,
'honeypot_exception': exception,
}
self.ancestry = {
project0: [folder1, org],
project1: [folder2, org],
project2: [folder4, folder3, org],
project3: [folder3, org],
exception: [folder3, org],
}
def test_build_rule_book_from_yaml(self):
rules_local_path = get_datafile_path(
__file__, 'firewall_test_rules.yaml')
rules_engine = fre.FirewallRulesEngine(rules_file_path=rules_local_path)
rules_engine.build_rule_book({})
self.assertEqual(4, len(rules_engine.rule_book.rules_map))
self.assertEqual(1, len(rules_engine.rule_book.rule_groups_map))
self.assertEqual(6, len(rules_engine.rule_book.org_policy_rules_map))
@parameterized.parameterized.expand([
(
'test_project',
{
'name': 'policy1',
'full_name': ('organization/org/folder/folder1/'
'project/project0/firewall/policy1/'),
'network': 'network1',
'direction': 'ingress',
'allowed': [{'IPProtocol': 'tcp', 'ports': ['1', '3389']}],
'sourceRanges': ['0.0.0.0/0'],
'targetTags': ['linux'],
},
[
{
'resource_type': resource_mod.ResourceType.FIREWALL_RULE,
'resource_id': None,
'full_name': ('organization/org/folder/folder1/'
'project/project0/firewall/policy1/'),
'rule_id': 'no_rdp_to_linux',
'violation_type': 'FIREWALL_BLACKLIST_VIOLATION',
'policy_names': ['policy1'],
'recommended_actions': {
'DELETE_FIREWALL_RULES': ['policy1']
},
'resource_data': ['{"allowed": [{"IPProtocol": "tcp", "ports": ["1", "3389"]}], "direction": "INGRESS", "name": "policy1", "network": "network1", "sourceRanges": ["0.0.0.0/0"], "targetTags": ["linux"]}']
}
],
),
(
'project1',
{
'name': 'policy1',
'full_name': ('organization/org/folder/test_instances/'
'project/project1/firewall/policy1/'),
'network': 'network1',
'direction': 'ingress',
'allowed': [{'IPProtocol': 'tcp', 'ports': ['22']}],
'sourceRanges': ['11.0.0.1'],
'targetTags': ['test'],
},
[
{
'resource_type': resource_mod.ResourceType.FIREWALL_RULE,
'resource_id': None,
'full_name': ('organization/org/folder/test_instances/'
'project/project1/firewall/policy1/'),
'rule_id': 'test_instances_rule',
'violation_type': 'FIREWALL_WHITELIST_VIOLATION',
'policy_names': ['policy1'],
'recommended_actions': {
'DELETE_FIREWALL_RULES': ['policy1']
},
'resource_data': ['{"allowed": [{"IPProtocol": "tcp", "ports": ["22"]}], "direction": "INGRESS", "name": "policy1", "network": "network1", "sourceRanges": ["11.0.0.1"], "targetTags": ["test"]}']
}
],
),
(
'honeypot_exception',
{
'name': 'policy1',
'full_name': ('organization/org/folder/folder1/'
'project/project0/firewall/policy1/'),
'network': 'network1',
'direction': 'ingress',
'allowed': [{'IPProtocol': 'tcp', 'ports': ['1', '3389']}],
'sourceRanges': ['0.0.0.0/0'],
'targetTags': ['linux'],
},
[],
),
])
def test_find_violations_from_yaml_rule_book(
self, project, policy_dict, expected_violations_dicts):
rules_local_path = get_datafile_path(
__file__, 'firewall_test_rules.yaml')
rules_engine = fre.FirewallRulesEngine(rules_file_path=rules_local_path)
rules_engine.build_rule_book({})
resource = self.project_resource_map[project]
policy = fre.firewall_rule.FirewallRule.from_dict(
policy_dict, validate=True)
rules_engine.rule_book.org_res_rel_dao = mock.Mock()
rules_engine.rule_book.org_res_rel_dao.find_ancestors.side_effect = (
lambda x,y: self.ancestry[x])
violations = rules_engine.find_policy_violations(resource, [policy])
expected_violations = [
fre.RuleViolation(**v) for v in expected_violations_dicts]
self.assert_rule_violation_lists_equal(expected_violations, violations)
def assert_rule_violation_lists_equal(self, expected, violations):
sorted(expected, key=lambda k: k.resource_id)
sorted(violations, key=lambda k: k.resource_id)
self.assertItemsEqual(expected, violations)
if __name__ == '__main__':
unittest.main()
| 39.90036
| 527
| 0.45675
| 5,618
| 66,474
| 5.174439
| 0.05162
| 0.016374
| 0.016718
| 0.012659
| 0.865428
| 0.842931
| 0.828345
| 0.792638
| 0.775748
| 0.768455
| 0
| 0.033948
| 0.392906
| 66,474
| 1,665
| 528
| 39.924324
| 0.686391
| 0.008815
| 0
| 0.627087
| 0
| 0.017316
| 0.31498
| 0.086028
| 0
| 0
| 0
| 0
| 0.018553
| 1
| 0.014224
| false
| 0
| 0.006184
| 0
| 0.022263
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6a8775688455bff9cbe0075783d4d85b0c250506
| 25,772
|
py
|
Python
|
tests/models/test_DurableOrchestrationClient.py
|
pblocz/azure-functions-durable-python
|
59e77089d4d9934aff4c884b55e3df6f350d03fe
|
[
"MIT"
] | null | null | null |
tests/models/test_DurableOrchestrationClient.py
|
pblocz/azure-functions-durable-python
|
59e77089d4d9934aff4c884b55e3df6f350d03fe
|
[
"MIT"
] | null | null | null |
tests/models/test_DurableOrchestrationClient.py
|
pblocz/azure-functions-durable-python
|
59e77089d4d9934aff4c884b55e3df6f350d03fe
|
[
"MIT"
] | null | null | null |
import json
from typing import Any
import pytest
from azure.durable_functions.models.OrchestrationRuntimeStatus import OrchestrationRuntimeStatus
from azure.durable_functions.models.DurableOrchestrationClient \
import DurableOrchestrationClient
from azure.durable_functions.models.DurableOrchestrationStatus import DurableOrchestrationStatus
from tests.conftest import replace_stand_in_bits
from tests.test_utils.constants import RPC_BASE_URL
from unittest.mock import Mock
TEST_INSTANCE_ID = '2e2568e7-a906-43bd-8364-c81733c5891e'
TEST_CREATED_TIME = '2020-01-01T05:00:00Z'
TEST_LAST_UPDATED_TIME = '2020-01-01T05:00:00Z'
MESSAGE_400 = 'instance failed or terminated'
MESSAGE_404 = 'instance not found or pending'
MESSAGE_500 = 'instance failed with unhandled exception'
MESSAGE_501 = "well we didn't expect that"
INSTANCE_ID = "2e2568e7-a906-43bd-8364-c81733c5891e"
REASON = "Stuff"
TEST_ORCHESTRATOR = "MyDurableOrchestrator"
EXCEPTION_ORCHESTRATOR_NOT_FOUND_EXMESSAGE = "The function <orchestrator> doesn't exist,"\
" is disabled, or is not an orchestrator function. Additional info: "\
"the following are the known orchestrator functions: <list>"
EXCEPTION_ORCHESTRATOR_NOT_FOUND_MESSAGE = "One or more of the arguments submitted is incorrect"
EXCEPTION_TYPE_ORCHESTRATOR_NOT_FOUND = "System.ArgumentException"
STACK_TRACE = "' at Microsoft.Azure.WebJobs.Extensions.DurableTask..."
class MockRequest:
def __init__(self, expected_url: str, response: [int, any]):
self._expected_url = expected_url
self._response = response
self._get_count = 0
@property
def get_count(self):
return self._get_count
async def get(self, url: str):
self._get_count += 1
assert url == self._expected_url
return self._response
async def delete(self, url: str):
assert url == self._expected_url
return self._response
async def post(self, url: str, data: Any = None):
assert url == self._expected_url
return self._response
def test_get_start_new_url(binding_string):
client = DurableOrchestrationClient(binding_string)
instance_id = "2e2568e7-a906-43bd-8364-c81733c5891e"
function_name = "my_function"
start_new_url = client._get_start_new_url(instance_id, function_name)
expected_url = replace_stand_in_bits(
f"{RPC_BASE_URL}orchestrators/{function_name}/{instance_id}")
assert expected_url == start_new_url
def test_get_input_returns_none_when_none_supplied():
result = DurableOrchestrationClient._get_json_input(None)
assert result is None
def test_get_input_returns_json_string(binding_string):
input_ = json.loads(binding_string)
result = DurableOrchestrationClient._get_json_input(input_)
input_as_string = json.dumps(input_)
assert input_as_string == result
def test_get_raise_event_url(binding_string):
client = DurableOrchestrationClient(binding_string)
instance_id = "2e2568e7-a906-43bd-8364-c81733c5891e"
event_name = "test_event_name"
task_hub_name = "test_task_hub"
connection_name = "test_connection"
raise_event_url = client._get_raise_event_url(instance_id, event_name, task_hub_name,
connection_name)
expected_url = replace_stand_in_bits(
f"{RPC_BASE_URL}instances/{instance_id}/raiseEvent/{event_name}"
f"?taskHub=test_task_hub&connection=test_connection")
assert expected_url == raise_event_url
def test_create_check_status_response(binding_string):
client = DurableOrchestrationClient(binding_string)
instance_id = "2e2568e7-a906-43bd-8364-c81733c5891e"
request = Mock(url="http://test_azure.net/api/orchestrators/DurableOrchestrationTrigger")
returned_response = client.create_check_status_response(request, instance_id)
http_management_payload = {
"id": instance_id,
"statusQueryGetUri":
r"http://test_azure.net/runtime/webhooks/durabletask/instances/"
r"2e2568e7-a906-43bd-8364-c81733c5891e?taskHub"
r"=TASK_HUB_NAME&connection=Storage&code=AUTH_CODE",
"sendEventPostUri":
r"http://test_azure.net/runtime/webhooks/durabletask/instances/"
r"2e2568e7-a906-43bd-8364-c81733c5891e/raiseEvent/{"
r"eventName}?taskHub=TASK_HUB_NAME&connection=Storage&code=AUTH_CODE",
"terminatePostUri":
r"http://test_azure.net/runtime/webhooks/durabletask/instances/"
r"2e2568e7-a906-43bd-8364-c81733c5891e/terminate"
r"?reason={text}&taskHub=TASK_HUB_NAME&connection=Storage&code=AUTH_CODE",
"rewindPostUri":
r"http://test_azure.net/runtime/webhooks/durabletask/instances/"
r"2e2568e7-a906-43bd-8364-c81733c5891e/rewind?reason"
r"={text}&taskHub=TASK_HUB_NAME&connection=Storage&code=AUTH_CODE",
"purgeHistoryDeleteUri":
r"http://test_azure.net/runtime/webhooks/durabletask/instances/"
r"2e2568e7-a906-43bd-8364-c81733c5891e"
r"?taskHub=TASK_HUB_NAME&connection=Storage&code=AUTH_CODE"
}
for key, _ in http_management_payload.items():
http_management_payload[key] = replace_stand_in_bits(http_management_payload[key])
expected_response = {
"status_code": 202,
"body": json.dumps(http_management_payload),
"headers": {
"Content-Type": "application/json",
"Location": http_management_payload["statusQueryGetUri"],
"Retry-After": "10",
},
}
for k, v in expected_response.get("headers").items():
assert v == returned_response.headers.get(k)
assert expected_response.get("status_code") == returned_response.status_code
assert expected_response.get("body") == returned_response.get_body().decode()
@pytest.mark.asyncio
async def test_get_202_get_status_success(binding_string):
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}",
response=[202, dict(createdTime=TEST_CREATED_TIME,
lastUpdatedTime=TEST_LAST_UPDATED_TIME,
runtimeStatus="Running")])
client = DurableOrchestrationClient(binding_string)
client._get_async_request = mock_request.get
result = await client.get_status(TEST_INSTANCE_ID)
assert result is not None
assert result.runtime_status.name == "Running"
@pytest.mark.asyncio
async def test_get_200_get_status_success(binding_string):
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}",
response=[200, dict(createdTime=TEST_CREATED_TIME,
lastUpdatedTime=TEST_LAST_UPDATED_TIME,
runtimeStatus="Completed")])
client = DurableOrchestrationClient(binding_string)
client._get_async_request = mock_request.get
result = await client.get_status(TEST_INSTANCE_ID)
assert result is not None
assert result.runtime_status.name == "Completed"
@pytest.mark.asyncio
async def test_get_500_get_status_failed(binding_string):
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}",
response=[500, MESSAGE_500])
client = DurableOrchestrationClient(binding_string)
client._get_async_request = mock_request.get
result = await client.get_status(TEST_INSTANCE_ID)
assert result is not None
assert result.message == MESSAGE_500
@pytest.mark.asyncio
async def test_get_400_get_status_failed(binding_string):
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}",
response=[400, MESSAGE_400])
client = DurableOrchestrationClient(binding_string)
client._get_async_request = mock_request.get
result = await client.get_status(TEST_INSTANCE_ID)
assert result is not None
assert result.message == MESSAGE_400
@pytest.mark.asyncio
async def test_get_404_get_status_failed(binding_string):
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}",
response=[404, MESSAGE_404])
client = DurableOrchestrationClient(binding_string)
client._get_async_request = mock_request.get
result = await client.get_status(TEST_INSTANCE_ID)
assert result is not None
assert result.message == MESSAGE_404
@pytest.mark.asyncio
async def test_get_501_get_status_failed(binding_string):
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}",
response=[501, MESSAGE_501])
client = DurableOrchestrationClient(binding_string)
client._get_async_request = mock_request.get
with pytest.raises(Exception):
await client.get_status(TEST_INSTANCE_ID)
@pytest.mark.asyncio
async def test_get_200_get_status_by_success(binding_string):
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/?runtimeStatus=Running",
response=[200, [dict(createdTime=TEST_CREATED_TIME,
lastUpdatedTime=TEST_LAST_UPDATED_TIME,
runtimeStatus="Running"),
dict(createdTime=TEST_CREATED_TIME,
lastUpdatedTime=TEST_LAST_UPDATED_TIME,
runtimeStatus="Running")
]])
client = DurableOrchestrationClient(binding_string)
client._get_async_request = mock_request.get
result = await client.get_status_by(runtime_status=[OrchestrationRuntimeStatus.Running])
assert result is not None
assert len(result) == 2
@pytest.mark.asyncio
async def test_get_500_get_status_by_failed(binding_string):
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/?runtimeStatus=Running",
response=[500, MESSAGE_500])
client = DurableOrchestrationClient(binding_string)
client._get_async_request = mock_request.get
with pytest.raises(Exception):
await client.get_status_by(runtime_status=[OrchestrationRuntimeStatus.Running])
@pytest.mark.asyncio
async def test_get_200_get_status_all_success(binding_string):
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/",
response=[200, [dict(createdTime=TEST_CREATED_TIME,
lastUpdatedTime=TEST_LAST_UPDATED_TIME,
runtimeStatus="Running"),
dict(createdTime=TEST_CREATED_TIME,
lastUpdatedTime=TEST_LAST_UPDATED_TIME,
runtimeStatus="Running")
]])
client = DurableOrchestrationClient(binding_string)
client._get_async_request = mock_request.get
result = await client.get_status_all()
assert result is not None
assert len(result) == 2
@pytest.mark.asyncio
async def test_get_500_get_status_all_failed(binding_string):
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/",
response=[500, MESSAGE_500])
client = DurableOrchestrationClient(binding_string)
client._get_async_request = mock_request.get
with pytest.raises(Exception):
await client.get_status_all()
@pytest.mark.asyncio
async def test_delete_200_purge_instance_history(binding_string):
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}",
response=[200, dict(instancesDeleted=1)])
client = DurableOrchestrationClient(binding_string)
client._delete_async_request = mock_request.delete
result = await client.purge_instance_history(TEST_INSTANCE_ID)
assert result is not None
assert result.instances_deleted == 1
@pytest.mark.asyncio
async def test_delete_404_purge_instance_history(binding_string):
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}",
response=[404, MESSAGE_404])
client = DurableOrchestrationClient(binding_string)
client._delete_async_request = mock_request.delete
result = await client.purge_instance_history(TEST_INSTANCE_ID)
assert result is not None
assert result.instances_deleted == 0
@pytest.mark.asyncio
async def test_delete_500_purge_instance_history(binding_string):
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}",
response=[500, MESSAGE_500])
client = DurableOrchestrationClient(binding_string)
client._delete_async_request = mock_request.delete
with pytest.raises(Exception):
await client.purge_instance_history(TEST_INSTANCE_ID)
@pytest.mark.asyncio
async def test_delete_200_purge_instance_history_by(binding_string):
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/?runtimeStatus=Running",
response=[200, dict(instancesDeleted=1)])
client = DurableOrchestrationClient(binding_string)
client._delete_async_request = mock_request.delete
result = await client.purge_instance_history_by(
runtime_status=[OrchestrationRuntimeStatus.Running])
assert result is not None
assert result.instances_deleted == 1
@pytest.mark.asyncio
async def test_delete_404_purge_instance_history_by(binding_string):
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/?runtimeStatus=Running",
response=[404, MESSAGE_404])
client = DurableOrchestrationClient(binding_string)
client._delete_async_request = mock_request.delete
result = await client.purge_instance_history_by(
runtime_status=[OrchestrationRuntimeStatus.Running])
assert result is not None
assert result.instances_deleted == 0
@pytest.mark.asyncio
async def test_delete_500_purge_instance_history_by(binding_string):
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/?runtimeStatus=Running",
response=[500, MESSAGE_500])
client = DurableOrchestrationClient(binding_string)
client._delete_async_request = mock_request.delete
with pytest.raises(Exception):
await client.purge_instance_history_by(
runtime_status=[OrchestrationRuntimeStatus.Running])
@pytest.mark.asyncio
async def test_post_202_terminate(binding_string):
raw_reason = 'stuff and things'
reason = 'stuff%20and%20things'
mock_request = MockRequest(
expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}/terminate?reason={reason}",
response=[202, None])
client = DurableOrchestrationClient(binding_string)
client._post_async_request = mock_request.post
result = await client.terminate(TEST_INSTANCE_ID, raw_reason)
assert result is None
@pytest.mark.asyncio
async def test_post_410_terminate(binding_string):
raw_reason = 'stuff and things'
reason = 'stuff%20and%20things'
mock_request = MockRequest(
expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}/terminate?reason={reason}",
response=[410, None])
client = DurableOrchestrationClient(binding_string)
client._post_async_request = mock_request.post
result = await client.terminate(TEST_INSTANCE_ID, raw_reason)
assert result is None
@pytest.mark.asyncio
async def test_post_404_terminate(binding_string):
raw_reason = 'stuff and things'
reason = 'stuff%20and%20things'
mock_request = MockRequest(
expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}/terminate?reason={reason}",
response=[404, MESSAGE_404])
client = DurableOrchestrationClient(binding_string)
client._post_async_request = mock_request.post
with pytest.raises(Exception):
await client.terminate(TEST_INSTANCE_ID, raw_reason)
@pytest.mark.asyncio
async def test_post_500_terminate(binding_string):
raw_reason = 'stuff and things'
reason = 'stuff%20and%20things'
mock_request = MockRequest(
expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}/terminate?reason={reason}",
response=[500, MESSAGE_500])
client = DurableOrchestrationClient(binding_string)
client._post_async_request = mock_request.post
with pytest.raises(Exception):
await client.terminate(TEST_INSTANCE_ID, raw_reason)
@pytest.mark.asyncio
async def test_wait_or_response_200_completed(binding_string):
output = 'Some output'
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}",
response=[200, dict(createdTime=TEST_CREATED_TIME,
lastUpdatedTime=TEST_LAST_UPDATED_TIME,
runtimeStatus="Completed",
output=output)])
client = DurableOrchestrationClient(binding_string)
client._get_async_request = mock_request.get
result = await client.wait_for_completion_or_create_check_status_response(
None, TEST_INSTANCE_ID)
assert result is not None
assert result.status_code == 200
assert result.mimetype == 'application/json'
assert result.get_body().decode() == output
@pytest.mark.asyncio
async def test_wait_or_response_200_canceled(binding_string):
status = dict(createdTime=TEST_CREATED_TIME,
lastUpdatedTime=TEST_LAST_UPDATED_TIME,
runtimeStatus="Canceled")
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}",
response=[200, status])
client = DurableOrchestrationClient(binding_string)
client._get_async_request = mock_request.get
result = await client.wait_for_completion_or_create_check_status_response(
None, TEST_INSTANCE_ID)
assert result is not None
assert result.status_code == 200
assert result.mimetype == 'application/json'
assert json.loads(result.get_body().decode()) == DurableOrchestrationStatus.from_json(
status).to_json()
@pytest.mark.asyncio
async def test_wait_or_response_200_terminated(binding_string):
status = dict(createdTime=TEST_CREATED_TIME,
lastUpdatedTime=TEST_LAST_UPDATED_TIME,
runtimeStatus="Terminated")
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}",
response=[200, status])
client = DurableOrchestrationClient(binding_string)
client._get_async_request = mock_request.get
result = await client.wait_for_completion_or_create_check_status_response(
None, TEST_INSTANCE_ID)
assert result is not None
assert result.status_code == 200
assert result.mimetype == 'application/json'
assert json.loads(result.get_body().decode()) == DurableOrchestrationStatus.from_json(
status).to_json()
@pytest.mark.asyncio
async def test_wait_or_response_200_failed(binding_string):
status = dict(createdTime=TEST_CREATED_TIME,
lastUpdatedTime=TEST_LAST_UPDATED_TIME,
runtimeStatus="Failed")
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}",
response=[200, status])
client = DurableOrchestrationClient(binding_string)
client._get_async_request = mock_request.get
result = await client.wait_for_completion_or_create_check_status_response(
None, TEST_INSTANCE_ID)
assert result is not None
assert result.status_code == 500
assert result.mimetype == 'application/json'
assert json.loads(result.get_body().decode()) == DurableOrchestrationStatus.from_json(
status).to_json()
@pytest.mark.asyncio
async def test_wait_or_response_check_status_response(binding_string):
status = dict(createdTime=TEST_CREATED_TIME,
lastUpdatedTime=TEST_LAST_UPDATED_TIME,
runtimeStatus="Running")
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}",
response=[200, status])
client = DurableOrchestrationClient(binding_string)
client._get_async_request = mock_request.get
request = Mock(url="http://test_azure.net/api/orchestrators/DurableOrchestrationTrigger")
result = await client.wait_for_completion_or_create_check_status_response(
request, TEST_INSTANCE_ID, timeout_in_milliseconds=2000)
assert result is not None
assert mock_request.get_count == 3
@pytest.mark.asyncio
async def test_wait_or_response_check_status_response(binding_string):
status = dict(createdTime=TEST_CREATED_TIME,
lastUpdatedTime=TEST_LAST_UPDATED_TIME,
runtimeStatus="Running")
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}instances/{TEST_INSTANCE_ID}",
response=[200, status])
client = DurableOrchestrationClient(binding_string)
client._get_async_request = mock_request.get
with pytest.raises(Exception):
await client.wait_for_completion_or_create_check_status_response(
None, TEST_INSTANCE_ID, timeout_in_milliseconds=500)
@pytest.mark.asyncio
async def test_start_new_orchestrator_not_found(binding_string):
"""Test that we throw the right exception when the orchestrator is not found.
"""
status = dict(ExceptionMessage=EXCEPTION_ORCHESTRATOR_NOT_FOUND_EXMESSAGE,
StackTrace=STACK_TRACE,
Message=EXCEPTION_ORCHESTRATOR_NOT_FOUND_MESSAGE,
ExceptionType=EXCEPTION_TYPE_ORCHESTRATOR_NOT_FOUND)
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}orchestrators/{TEST_ORCHESTRATOR}",
response=[400, status])
client = DurableOrchestrationClient(binding_string)
client._post_async_request = mock_request.post
with pytest.raises(Exception) as ex:
await client.start_new(TEST_ORCHESTRATOR)
ex.match(EXCEPTION_ORCHESTRATOR_NOT_FOUND_EXMESSAGE)
@pytest.mark.asyncio
async def test_start_new_orchestrator_internal_exception(binding_string):
"""Test that we throw the right exception when the extension fails internally.
"""
status = dict(ExceptionMessage=EXCEPTION_ORCHESTRATOR_NOT_FOUND_EXMESSAGE,
StackTrace=STACK_TRACE,
Message=EXCEPTION_ORCHESTRATOR_NOT_FOUND_MESSAGE,
ExceptionType=EXCEPTION_TYPE_ORCHESTRATOR_NOT_FOUND)
mock_request = MockRequest(expected_url=f"{RPC_BASE_URL}orchestrators/{TEST_ORCHESTRATOR}",
response=[500, status])
client = DurableOrchestrationClient(binding_string)
client._post_async_request = mock_request.post
status_str = str(status)
with pytest.raises(Exception) as ex:
await client.start_new(TEST_ORCHESTRATOR)
ex.match(status_str)
@pytest.mark.asyncio
async def test_rewind_works_under_200_and_200_http_codes(binding_string):
"""Tests that the rewind API works as expected under 'successful' http codes: 200, 202"""
client = DurableOrchestrationClient(binding_string)
for code in [200, 202]:
mock_request = MockRequest(
expected_url=f"{RPC_BASE_URL}instances/{INSTANCE_ID}/rewind?reason={REASON}",
response=[code, ""])
client._post_async_request = mock_request.post
result = await client.rewind(INSTANCE_ID, REASON)
assert result is None
@pytest.mark.asyncio
async def test_rewind_throws_exception_during_404_410_and_500_errors(binding_string):
"""Tests the behaviour of rewind under 'exception' http codes: 404, 410, 500"""
client = DurableOrchestrationClient(binding_string)
codes = [404, 410, 500]
exception_strs = [
f"No instance with ID {INSTANCE_ID} found.",
"The rewind operation is only supported on failed orchestration instances.",
"Something went wrong"
]
for http_code, expected_exception_str in zip(codes, exception_strs):
mock_request = MockRequest(
expected_url=f"{RPC_BASE_URL}instances/{INSTANCE_ID}/rewind?reason={REASON}",
response=[http_code, "Something went wrong"])
client._post_async_request = mock_request.post
with pytest.raises(Exception) as ex:
await client.rewind(INSTANCE_ID, REASON)
ex_message = str(ex.value)
assert ex_message == expected_exception_str
@pytest.mark.asyncio
async def test_rewind_with_no_rpc_endpoint(binding_string):
"""Tests the behaviour of rewind without an RPC endpoint / under the legacy HTTP endpoint."""
client = DurableOrchestrationClient(binding_string)
mock_request = MockRequest(
expected_url=f"{RPC_BASE_URL}instances/{INSTANCE_ID}/rewind?reason={REASON}",
response=[-1, ""])
client._post_async_request = mock_request.post
client._orchestration_bindings._rpc_base_url = None
expected_exception_str = "The Python SDK only supports RPC endpoints."\
+ "Please remove the `localRpcEnabled` setting from host.json"
with pytest.raises(Exception) as ex:
await client.rewind(INSTANCE_ID, REASON)
ex_message = str(ex.value)
assert ex_message == expected_exception_str
| 43.314286
| 97
| 0.710034
| 2,979
| 25,772
| 5.80094
| 0.088956
| 0.052659
| 0.031595
| 0.088537
| 0.812337
| 0.781378
| 0.773335
| 0.752271
| 0.743302
| 0.729067
| 0
| 0.02834
| 0.208637
| 25,772
| 594
| 98
| 43.387205
| 0.818975
| 0
| 0
| 0.634454
| 0
| 0
| 0.161208
| 0.096448
| 0
| 0
| 0
| 0
| 0.115546
| 1
| 0.014706
| false
| 0
| 0.018908
| 0.002101
| 0.044118
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6aab0805eae455a2787bf3d72e7cb67bfd96ad40
| 9,324
|
py
|
Python
|
src/wellsfargo/tests/connector/test_applications.py
|
thelabnyc/django-oscar-wfrs
|
9abd4ecbdafd597407fdf60657103cb5d29c4c8b
|
[
"0BSD"
] | 1
|
2021-02-08T05:54:56.000Z
|
2021-02-08T05:54:56.000Z
|
src/wellsfargo/tests/connector/test_applications.py
|
thelabnyc/django-oscar-wfrs
|
9abd4ecbdafd597407fdf60657103cb5d29c4c8b
|
[
"0BSD"
] | 24
|
2019-12-04T21:37:01.000Z
|
2022-03-11T23:16:20.000Z
|
src/wellsfargo/tests/connector/test_applications.py
|
thelabnyc/django-oscar-wfrs
|
9abd4ecbdafd597407fdf60657103cb5d29c4c8b
|
[
"0BSD"
] | 2
|
2016-05-31T10:02:35.000Z
|
2016-12-19T11:29:37.000Z
|
from decimal import Decimal
from django.core.exceptions import ValidationError
from unittest import mock
from wellsfargo.core.exceptions import CreditApplicationDenied, CreditApplicationPending
from wellsfargo.connector.applications import CreditApplicationsAPIClient
from wellsfargo.tests.base import BaseTest
import requests_mock
import json
class CreditApplicationsAPIClientTest(BaseTest):
@requests_mock.Mocker()
@mock.patch("wellsfargo.core.signals.wfrs_app_approved.send")
def test_single_application_success(self, rmock, wfrs_app_approved):
self.mock_get_api_token_request(rmock)
def match_credit_app_request(request):
# Check auth header
self.assertTrue(request.headers["Authorization"].startswith("Bearer "))
# Check data in body
data = json.loads(request.body)
self.assertEqual(
data,
{
"language_preference": "E",
"main_applicant": {
"address": {
"address_line_1": "123 Evergreen Terrace",
"city": "Springfield",
"postal_code": "10001",
"state_code": "NY",
},
"annual_income": 150000,
"date_of_birth": "1991-01-01",
"email_address": "foo@example.com",
"employer_name": "self",
"first_name": "Joe",
"home_phone": "2122091333",
"housing_status": "Rent",
"last_name": "Schmoe",
"mobile_phone": "2122091333",
"ssn": "999999990",
"work_phone": "2122091333",
},
"merchant_number": "1111111111111111",
"requested_credit_limit": 2000,
"transaction_code": "A6",
},
)
return True
self.mock_successful_credit_app_request(
rmock, additional_matcher=match_credit_app_request
)
app = self._build_single_credit_app("999999990")
acct_details = CreditApplicationsAPIClient().submit_credit_application(app)
wfrs_app_approved.assert_called_once_with(sender=app.__class__, app=app)
self.assertEqual(acct_details.account_number, "9999999999999999")
self.assertEqual(acct_details.main_applicant_full_name, "Schmoe, Joe")
self.assertEqual(acct_details.joint_applicant_full_name, None)
self.assertEqual(
acct_details.main_applicant_address.address_line_1, "123 Evergreen Terrace"
)
self.assertEqual(acct_details.main_applicant_address.city, "Springfield")
self.assertEqual(acct_details.main_applicant_address.postal_code, "10001")
self.assertEqual(acct_details.main_applicant_address.state_code, "NY")
self.assertEqual(acct_details.joint_applicant_address, None)
self.assertEqual(acct_details.credit_limit, Decimal("7500.00"))
self.assertEqual(acct_details.available_credit, Decimal("7500.00"))
@requests_mock.Mocker()
@mock.patch("wellsfargo.core.signals.wfrs_app_approved.send")
def test_joint_application_success(self, rmock, wfrs_app_approved):
self.mock_get_api_token_request(rmock)
def match_credit_app_request(request):
# Check auth header
self.assertTrue(request.headers["Authorization"].startswith("Bearer "))
# Check data in body
data = json.loads(request.body)
self.assertEqual(
data,
{
"language_preference": "E",
"main_applicant": {
"address": {
"address_line_1": "123 Evergreen Terrace",
"city": "Springfield",
"postal_code": "10001",
"state_code": "NY",
},
"annual_income": 150000,
"date_of_birth": "1991-01-01",
"email_address": "foo@example.com",
"employer_name": "self",
"first_name": "Joe",
"home_phone": "2122091333",
"housing_status": "Rent",
"last_name": "Schmoe",
"mobile_phone": "2122091333",
"ssn": "999999990",
"work_phone": "2122091333",
},
"joint_applicant": {
"address": {
"address_line_1": "123 Evergreen Terrace",
"city": "Springfield",
"postal_code": "10001",
"state_code": "NY",
},
"annual_income": 150000,
"date_of_birth": "1991-01-01",
"email_address": "foo@example.com",
"employer_name": "self",
"first_name": "Joe",
"home_phone": "2122091333",
# "housing_status": "Rent",
"last_name": "Schmoe",
"mobile_phone": "2122091333",
"ssn": "999999990",
"work_phone": "2122091333",
},
"merchant_number": "1111111111111111",
"requested_credit_limit": 2000,
"transaction_code": "A6",
},
)
return True
self.mock_successful_credit_app_request(
rmock, additional_matcher=match_credit_app_request
)
app = self._build_joint_credit_app("999999990", "999999990")
acct_details = CreditApplicationsAPIClient().submit_credit_application(app)
wfrs_app_approved.assert_called_once_with(sender=app.__class__, app=app)
self.assertEqual(acct_details.account_number, "9999999999999999")
self.assertEqual(acct_details.main_applicant_full_name, "Schmoe, Joe")
self.assertEqual(acct_details.joint_applicant_full_name, "Schmoe, Joe")
self.assertEqual(
acct_details.main_applicant_address.address_line_1, "123 Evergreen Terrace"
)
self.assertEqual(acct_details.main_applicant_address.city, "Springfield")
self.assertEqual(acct_details.main_applicant_address.postal_code, "10001")
self.assertEqual(acct_details.main_applicant_address.state_code, "NY")
self.assertEqual(
acct_details.joint_applicant_address.address_line_1, "123 Evergreen Terrace"
)
self.assertEqual(acct_details.joint_applicant_address.city, "Springfield")
self.assertEqual(acct_details.joint_applicant_address.postal_code, "10001")
self.assertEqual(acct_details.joint_applicant_address.state_code, "NY")
self.assertEqual(acct_details.credit_limit, Decimal("7500.00"))
self.assertEqual(acct_details.available_credit, Decimal("7500.00"))
@requests_mock.Mocker()
@mock.patch("wellsfargo.core.signals.wfrs_app_approved.send")
def test_submit_denied(self, rmock, wfrs_app_approved):
self.mock_get_api_token_request(rmock)
self.mock_denied_credit_app_request(rmock)
app = self._build_single_credit_app("999999994")
with self.assertRaises(CreditApplicationDenied):
CreditApplicationsAPIClient().submit_credit_application(app)
wfrs_app_approved.assert_not_called()
@requests_mock.Mocker()
@mock.patch("wellsfargo.core.signals.wfrs_app_approved.send")
def test_submit_pending(self, rmock, wfrs_app_approved):
self.mock_get_api_token_request(rmock)
self.mock_pending_credit_app_request(rmock)
app = self._build_single_credit_app("999999991")
with self.assertRaises(CreditApplicationPending):
CreditApplicationsAPIClient().submit_credit_application(app)
wfrs_app_approved.assert_not_called()
@requests_mock.Mocker()
@mock.patch("wellsfargo.core.signals.wfrs_app_approved.send")
def test_validation_error(self, rmock, wfrs_app_approved):
self.mock_get_api_token_request(rmock)
rmock.post(
"https://api-sandbox.wellsfargo.com/credit-cards/private-label/new-accounts/v2/applications",
status_code=400,
json={
"errors": [
{
"api_specification_url": "https://devstore.wellsfargo.com/store",
"description": "'last_name' is missing from the body of the request.",
"error_code": "400-008",
"field_name": "main_applicant.last_name",
}
]
},
)
app = self._build_single_credit_app("999999990")
with self.assertRaises(ValidationError):
CreditApplicationsAPIClient().submit_credit_application(app)
wfrs_app_approved.assert_not_called()
| 45.705882
| 105
| 0.577971
| 862
| 9,324
| 5.909513
| 0.185615
| 0.073616
| 0.085787
| 0.117393
| 0.839419
| 0.837848
| 0.837848
| 0.824892
| 0.813702
| 0.794464
| 0
| 0.059609
| 0.32529
| 9,324
| 203
| 106
| 45.931034
| 0.750119
| 0.010618
| 0
| 0.661111
| 0
| 0.005556
| 0.202517
| 0.034602
| 0
| 0
| 0
| 0
| 0.194444
| 1
| 0.038889
| false
| 0
| 0.044444
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0ab64e05a62655f6ec19053dc10ee4e218a53cf2
| 46,895
|
py
|
Python
|
tests/gcp/operators/test_compute.py
|
ganeshsrirams/airflow
|
b8c02632136320b8379956411134246cd2f6eb47
|
[
"Apache-2.0"
] | 1
|
2019-10-10T23:53:01.000Z
|
2019-10-10T23:53:01.000Z
|
tests/gcp/operators/test_compute.py
|
ganeshsrirams/airflow
|
b8c02632136320b8379956411134246cd2f6eb47
|
[
"Apache-2.0"
] | 2
|
2021-03-11T04:23:28.000Z
|
2021-09-29T17:44:44.000Z
|
tests/gcp/operators/test_compute.py
|
ganeshsrirams/airflow
|
b8c02632136320b8379956411134246cd2f6eb47
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint:disable=too-many-lines
import ast
import unittest
from copy import deepcopy
import httplib2
from googleapiclient.errors import HttpError
from airflow import AirflowException
from airflow.gcp.operators.compute import (
GceInstanceGroupManagerUpdateTemplateOperator, GceInstanceStartOperator, GceInstanceStopOperator,
GceInstanceTemplateCopyOperator, GceSetMachineTypeOperator,
)
from airflow.models import DAG, TaskInstance
from airflow.utils import timezone
from tests.compat import mock
EMPTY_CONTENT = b''
GCP_PROJECT_ID = 'project-id'
GCE_ZONE = 'zone'
RESOURCE_ID = 'resource-id'
GCE_SHORT_MACHINE_TYPE_NAME = 'n1-machine-type'
SET_MACHINE_TYPE_BODY = {
'machineType': 'zones/{}/machineTypes/{}'.format(GCE_ZONE, GCE_SHORT_MACHINE_TYPE_NAME)
}
DEFAULT_DATE = timezone.datetime(2017, 1, 1)
class TestGceInstanceStart(unittest.TestCase):
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_instance_start(self, mock_hook):
mock_hook.return_value.start_instance.return_value = True
op = GceInstanceStartOperator(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id=RESOURCE_ID,
task_id='id'
)
result = op.execute(None)
mock_hook.assert_called_once_with(api_version='v1',
gcp_conn_id='google_cloud_default')
mock_hook.return_value.start_instance.assert_called_once_with(
zone=GCE_ZONE, resource_id=RESOURCE_ID, project_id=GCP_PROJECT_ID
)
self.assertTrue(result)
# Setting all of the operator's input parameters as template dag_ids
# (could be anything else) just to test if the templating works for all fields
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_instance_start_with_templates(self, _):
dag_id = 'test_dag_id'
args = {
'start_date': DEFAULT_DATE
}
self.dag = DAG(dag_id, default_args=args) # pylint:disable=attribute-defined-outside-init
op = GceInstanceStartOperator(
project_id='{{ dag.dag_id }}',
zone='{{ dag.dag_id }}',
resource_id='{{ dag.dag_id }}',
gcp_conn_id='{{ dag.dag_id }}',
api_version='{{ dag.dag_id }}',
task_id='id',
dag=self.dag
)
ti = TaskInstance(op, DEFAULT_DATE)
ti.render_templates()
self.assertEqual(dag_id, getattr(op, 'project_id'))
self.assertEqual(dag_id, getattr(op, 'zone'))
self.assertEqual(dag_id, getattr(op, 'resource_id'))
self.assertEqual(dag_id, getattr(op, 'gcp_conn_id'))
self.assertEqual(dag_id, getattr(op, 'api_version'))
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_start_should_throw_ex_when_missing_project_id(self, mock_hook):
with self.assertRaises(AirflowException) as cm:
op = GceInstanceStartOperator(
project_id="",
zone=GCE_ZONE,
resource_id=RESOURCE_ID,
task_id='id'
)
op.execute(None)
err = cm.exception
self.assertIn("The required parameter 'project_id' is missing", str(err))
mock_hook.assert_not_called()
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_start_should_not_throw_ex_when_project_id_none(self, _):
op = GceInstanceStartOperator(
zone=GCE_ZONE,
resource_id=RESOURCE_ID,
task_id='id'
)
op.execute(None)
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_start_should_throw_ex_when_missing_zone(self, mock_hook):
with self.assertRaises(AirflowException) as cm:
op = GceInstanceStartOperator(
project_id=GCP_PROJECT_ID,
zone="",
resource_id=RESOURCE_ID,
task_id='id'
)
op.execute(None)
err = cm.exception
self.assertIn("The required parameter 'zone' is missing", str(err))
mock_hook.assert_not_called()
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_start_should_throw_ex_when_missing_resource_id(self, mock_hook):
with self.assertRaises(AirflowException) as cm:
op = GceInstanceStartOperator(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id="",
task_id='id'
)
op.execute(None)
err = cm.exception
self.assertIn("The required parameter 'resource_id' is missing", str(err))
mock_hook.assert_not_called()
class TestGceInstanceStop(unittest.TestCase):
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_instance_stop(self, mock_hook):
op = GceInstanceStopOperator(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id=RESOURCE_ID,
task_id='id'
)
op.execute(None)
mock_hook.assert_called_once_with(api_version='v1',
gcp_conn_id='google_cloud_default')
mock_hook.return_value.stop_instance.assert_called_once_with(
zone=GCE_ZONE, resource_id=RESOURCE_ID, project_id=GCP_PROJECT_ID
)
# Setting all of the operator's input parameters as templated dag_ids
# (could be anything else) just to test if the templating works for all fields
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_instance_stop_with_templates(self, _):
dag_id = 'test_dag_id'
args = {
'start_date': DEFAULT_DATE
}
self.dag = DAG(dag_id, default_args=args) # pylint:disable=attribute-defined-outside-init
op = GceInstanceStopOperator(
project_id='{{ dag.dag_id }}',
zone='{{ dag.dag_id }}',
resource_id='{{ dag.dag_id }}',
gcp_conn_id='{{ dag.dag_id }}',
api_version='{{ dag.dag_id }}',
task_id='id',
dag=self.dag
)
ti = TaskInstance(op, DEFAULT_DATE)
ti.render_templates()
self.assertEqual(dag_id, getattr(op, 'project_id'))
self.assertEqual(dag_id, getattr(op, 'zone'))
self.assertEqual(dag_id, getattr(op, 'resource_id'))
self.assertEqual(dag_id, getattr(op, 'gcp_conn_id'))
self.assertEqual(dag_id, getattr(op, 'api_version'))
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_stop_should_throw_ex_when_missing_project_id(self, mock_hook):
with self.assertRaises(AirflowException) as cm:
op = GceInstanceStopOperator(
project_id="",
zone=GCE_ZONE,
resource_id=RESOURCE_ID,
task_id='id'
)
op.execute(None)
err = cm.exception
self.assertIn("The required parameter 'project_id' is missing", str(err))
mock_hook.assert_not_called()
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_stop_should_not_throw_ex_when_project_id_none(self, mock_hook):
op = GceInstanceStopOperator(
zone=GCE_ZONE,
resource_id=RESOURCE_ID,
task_id='id'
)
op.execute(None)
mock_hook.assert_called_once_with(api_version='v1',
gcp_conn_id='google_cloud_default')
mock_hook.return_value.stop_instance.assert_called_once_with(
zone=GCE_ZONE, resource_id=RESOURCE_ID, project_id=None
)
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_stop_should_throw_ex_when_missing_zone(self, mock_hook):
with self.assertRaises(AirflowException) as cm:
op = GceInstanceStopOperator(
project_id=GCP_PROJECT_ID,
zone="",
resource_id=RESOURCE_ID,
task_id='id'
)
op.execute(None)
err = cm.exception
self.assertIn("The required parameter 'zone' is missing", str(err))
mock_hook.assert_not_called()
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_stop_should_throw_ex_when_missing_resource_id(self, mock_hook):
with self.assertRaises(AirflowException) as cm:
op = GceInstanceStopOperator(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id="",
task_id='id'
)
op.execute(None)
err = cm.exception
self.assertIn("The required parameter 'resource_id' is missing", str(err))
mock_hook.assert_not_called()
class TestGceInstanceSetMachineType(unittest.TestCase):
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_set_machine_type(self, mock_hook):
mock_hook.return_value.set_machine_type.return_value = True
op = GceSetMachineTypeOperator(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id=RESOURCE_ID,
body=SET_MACHINE_TYPE_BODY,
task_id='id'
)
op.execute(None)
mock_hook.assert_called_once_with(api_version='v1',
gcp_conn_id='google_cloud_default')
mock_hook.return_value.set_machine_type.assert_called_once_with(
zone=GCE_ZONE,
resource_id=RESOURCE_ID,
body=SET_MACHINE_TYPE_BODY,
project_id=GCP_PROJECT_ID
)
# Setting all of the operator's input parameters as templated dag_ids
# (could be anything else) just to test if the templating works for all fields
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_set_machine_type_with_templates(self, _):
dag_id = 'test_dag_id'
args = {
'start_date': DEFAULT_DATE
}
self.dag = DAG(dag_id, default_args=args) # pylint:disable=attribute-defined-outside-init
op = GceSetMachineTypeOperator(
project_id='{{ dag.dag_id }}',
zone='{{ dag.dag_id }}',
resource_id='{{ dag.dag_id }}',
body={},
gcp_conn_id='{{ dag.dag_id }}',
api_version='{{ dag.dag_id }}',
task_id='id',
dag=self.dag
)
ti = TaskInstance(op, DEFAULT_DATE)
ti.render_templates()
self.assertEqual(dag_id, getattr(op, 'project_id'))
self.assertEqual(dag_id, getattr(op, 'zone'))
self.assertEqual(dag_id, getattr(op, 'resource_id'))
self.assertEqual(dag_id, getattr(op, 'gcp_conn_id'))
self.assertEqual(dag_id, getattr(op, 'api_version'))
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_set_machine_type_should_throw_ex_when_missing_project_id(self, mock_hook):
with self.assertRaises(AirflowException) as cm:
op = GceSetMachineTypeOperator(
project_id="",
zone=GCE_ZONE,
resource_id=RESOURCE_ID,
body=SET_MACHINE_TYPE_BODY,
task_id='id'
)
op.execute(None)
err = cm.exception
self.assertIn("The required parameter 'project_id' is missing", str(err))
mock_hook.assert_not_called()
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_set_machine_type_should_not_throw_ex_when_project_id_none(self, mock_hook):
op = GceSetMachineTypeOperator(
zone=GCE_ZONE,
resource_id=RESOURCE_ID,
body=SET_MACHINE_TYPE_BODY,
task_id='id'
)
op.execute(None)
mock_hook.assert_called_once_with(api_version='v1',
gcp_conn_id='google_cloud_default')
mock_hook.return_value.set_machine_type.assert_called_once_with(
zone=GCE_ZONE,
resource_id=RESOURCE_ID,
body=SET_MACHINE_TYPE_BODY,
project_id=None
)
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_set_machine_type_should_throw_ex_when_missing_zone(self, mock_hook):
with self.assertRaises(AirflowException) as cm:
op = GceSetMachineTypeOperator(
project_id=GCP_PROJECT_ID,
zone="",
resource_id=RESOURCE_ID,
body=SET_MACHINE_TYPE_BODY,
task_id='id'
)
op.execute(None)
err = cm.exception
self.assertIn("The required parameter 'zone' is missing", str(err))
mock_hook.assert_not_called()
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_set_machine_type_should_throw_ex_when_missing_resource_id(self, mock_hook):
with self.assertRaises(AirflowException) as cm:
op = GceSetMachineTypeOperator(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id="",
body=SET_MACHINE_TYPE_BODY,
task_id='id'
)
op.execute(None)
err = cm.exception
self.assertIn("The required parameter 'resource_id' is missing", str(err))
mock_hook.assert_not_called()
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_set_machine_type_should_throw_ex_when_missing_machine_type(self, mock_hook):
with self.assertRaises(AirflowException) as cm:
op = GceSetMachineTypeOperator(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id=RESOURCE_ID,
body={},
task_id='id'
)
op.execute(None)
err = cm.exception
self.assertIn(
"The required body field 'machineType' is missing. Please add it.", str(err))
mock_hook.assert_called_once_with(api_version='v1',
gcp_conn_id='google_cloud_default')
MOCK_OP_RESPONSE = "{'kind': 'compute#operation', 'id': '8529919847974922736', " \
"'name': " \
"'operation-1538578207537-577542784f769-7999ab71-94f9ec1d', " \
"'zone': 'https://www.googleapis.com/compute/v1/projects/example" \
"-project/zones/europe-west3-b', 'operationType': " \
"'setMachineType', 'targetLink': " \
"'https://www.googleapis.com/compute/v1/projects/example-project" \
"/zones/europe-west3-b/instances/pa-1', 'targetId': " \
"'2480086944131075860', 'status': 'DONE', 'user': " \
"'service-account@example-project.iam.gserviceaccount.com', " \
"'progress': 100, 'insertTime': '2018-10-03T07:50:07.951-07:00', "\
"'startTime': '2018-10-03T07:50:08.324-07:00', 'endTime': " \
"'2018-10-03T07:50:08.484-07:00', 'error': {'errors': [{'code': " \
"'UNSUPPORTED_OPERATION', 'message': \"Machine type with name " \
"'machine-type-1' does not exist in zone 'europe-west3-b'.\"}]}, "\
"'httpErrorStatusCode': 400, 'httpErrorMessage': 'BAD REQUEST', " \
"'selfLink': " \
"'https://www.googleapis.com/compute/v1/projects/example-project" \
"/zones/europe-west3-b/operations/operation-1538578207537" \
"-577542784f769-7999ab71-94f9ec1d'} "
@mock.patch('airflow.gcp.operators.compute.GceHook'
'._check_zone_operation_status')
@mock.patch('airflow.gcp.operators.compute.GceHook'
'._execute_set_machine_type')
@mock.patch('airflow.gcp.operators.compute.GceHook.get_conn')
def test_set_machine_type_should_handle_and_trim_gce_error(
self, get_conn, _execute_set_machine_type, _check_zone_operation_status):
get_conn.return_value = {}
_execute_set_machine_type.return_value = {"name": "test-operation"}
_check_zone_operation_status.return_value = ast.literal_eval(
self.MOCK_OP_RESPONSE)
with self.assertRaises(AirflowException) as cm:
op = GceSetMachineTypeOperator(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id=RESOURCE_ID,
body=SET_MACHINE_TYPE_BODY,
task_id='id'
)
op.execute(None)
err = cm.exception
_check_zone_operation_status.assert_called_once_with(
{}, "test-operation", GCP_PROJECT_ID, GCE_ZONE, mock.ANY)
_execute_set_machine_type.assert_called_once_with(
GCE_ZONE, RESOURCE_ID, SET_MACHINE_TYPE_BODY, GCP_PROJECT_ID)
# Checking the full message was sometimes failing due to different order
# of keys in the serialized JSON
self.assertIn("400 BAD REQUEST: {", str(err)) # checking the square bracket trim
self.assertIn("UNSUPPORTED_OPERATION", str(err))
GCE_INSTANCE_TEMPLATE_NAME = "instance-template-test"
GCE_INSTANCE_TEMPLATE_NEW_NAME = "instance-template-test-new"
GCE_INSTANCE_TEMPLATE_REQUEST_ID = "e12d5b48-4826-4ba9-ada6-0cff1e0b36a6"
GCE_INSTANCE_TEMPLATE_BODY_GET = {
"kind": "compute#instanceTemplate",
"id": "6950321349997439715",
"creationTimestamp": "2018-10-15T06:20:12.777-07:00",
"name": GCE_INSTANCE_TEMPLATE_NAME,
"description": "",
"properties": {
"machineType": "n1-standard-1",
"networkInterfaces": [
{
"kind": "compute#networkInterface",
"network": "https://www.googleapis.com/compute/v1/"
"projects/project/global/networks/default",
"accessConfigs": [
{
"kind": "compute#accessConfig",
"type": "ONE_TO_ONE_NAT",
}
]
},
{
"network": "https://www.googleapis.com/compute/v1/"
"projects/project/global/networks/default",
"accessConfigs": [
{
"kind": "compute#accessConfig",
"networkTier": "PREMIUM"
}
]
}
],
"disks": [
{
"kind": "compute#attachedDisk",
"type": "PERSISTENT",
"licenses": [
"A String",
]
}
],
"metadata": {
"kind": "compute#metadata",
"fingerprint": "GDPUYxlwHe4="
},
},
"selfLink": "https://www.googleapis.com/compute/v1/projects/project"
"/global/instanceTemplates/instance-template-test"
}
GCE_INSTANCE_TEMPLATE_BODY_INSERT = {
"name": GCE_INSTANCE_TEMPLATE_NEW_NAME,
"description": "",
"properties": {
"machineType": "n1-standard-1",
"networkInterfaces": [
{
"network": "https://www.googleapis.com/compute/v1/"
"projects/project/global/networks/default",
"accessConfigs": [
{
"type": "ONE_TO_ONE_NAT",
}
]
},
{
"network": "https://www.googleapis.com/compute/v1/"
"projects/project/global/networks/default",
"accessConfigs": [
{
"networkTier": "PREMIUM"
}
]
}
],
"disks": [
{
"type": "PERSISTENT",
}
],
"metadata": {
"fingerprint": "GDPUYxlwHe4="
},
},
}
GCE_INSTANCE_TEMPLATE_BODY_GET_NEW = deepcopy(GCE_INSTANCE_TEMPLATE_BODY_GET)
GCE_INSTANCE_TEMPLATE_BODY_GET_NEW['name'] = GCE_INSTANCE_TEMPLATE_NEW_NAME
class TestGceInstanceTemplateCopy(unittest.TestCase):
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_successful_copy_template(self, mock_hook):
mock_hook.return_value.get_instance_template.side_effect = [
HttpError(resp=httplib2.Response({'status': 404}), content=EMPTY_CONTENT),
GCE_INSTANCE_TEMPLATE_BODY_GET,
GCE_INSTANCE_TEMPLATE_BODY_GET_NEW
]
op = GceInstanceTemplateCopyOperator(
project_id=GCP_PROJECT_ID,
resource_id=GCE_INSTANCE_TEMPLATE_NAME,
task_id='id',
body_patch={"name": GCE_INSTANCE_TEMPLATE_NEW_NAME}
)
result = op.execute(None)
mock_hook.assert_called_once_with(api_version='v1',
gcp_conn_id='google_cloud_default')
mock_hook.return_value.insert_instance_template.assert_called_once_with(
project_id=GCP_PROJECT_ID,
body=GCE_INSTANCE_TEMPLATE_BODY_INSERT,
request_id=None
)
self.assertEqual(GCE_INSTANCE_TEMPLATE_BODY_GET_NEW, result)
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_successful_copy_template_missing_project_id(self, mock_hook):
mock_hook.return_value.get_instance_template.side_effect = [
HttpError(resp=httplib2.Response({'status': 404}), content=EMPTY_CONTENT),
GCE_INSTANCE_TEMPLATE_BODY_GET,
GCE_INSTANCE_TEMPLATE_BODY_GET_NEW
]
op = GceInstanceTemplateCopyOperator(
resource_id=GCE_INSTANCE_TEMPLATE_NAME,
task_id='id',
body_patch={"name": GCE_INSTANCE_TEMPLATE_NEW_NAME}
)
result = op.execute(None)
mock_hook.assert_called_once_with(api_version='v1',
gcp_conn_id='google_cloud_default')
mock_hook.return_value.insert_instance_template.assert_called_once_with(
project_id=None,
body=GCE_INSTANCE_TEMPLATE_BODY_INSERT,
request_id=None
)
self.assertEqual(GCE_INSTANCE_TEMPLATE_BODY_GET_NEW, result)
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_idempotent_copy_template_when_already_copied(self, mock_hook):
mock_hook.return_value.get_instance_template.side_effect = [
GCE_INSTANCE_TEMPLATE_BODY_GET_NEW
]
op = GceInstanceTemplateCopyOperator(
project_id=GCP_PROJECT_ID,
resource_id=GCE_INSTANCE_TEMPLATE_NAME,
task_id='id',
body_patch={"name": GCE_INSTANCE_TEMPLATE_NEW_NAME}
)
result = op.execute(None)
mock_hook.assert_called_once_with(api_version='v1',
gcp_conn_id='google_cloud_default')
mock_hook.return_value.insert_instance_template.assert_not_called()
self.assertEqual(GCE_INSTANCE_TEMPLATE_BODY_GET_NEW, result)
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_successful_copy_template_with_request_id(self, mock_hook):
mock_hook.return_value.get_instance_template.side_effect = [
HttpError(resp=httplib2.Response({'status': 404}), content=EMPTY_CONTENT),
GCE_INSTANCE_TEMPLATE_BODY_GET,
GCE_INSTANCE_TEMPLATE_BODY_GET_NEW
]
op = GceInstanceTemplateCopyOperator(
project_id=GCP_PROJECT_ID,
resource_id=GCE_INSTANCE_TEMPLATE_NAME,
request_id=GCE_INSTANCE_TEMPLATE_REQUEST_ID,
task_id='id',
body_patch={"name": GCE_INSTANCE_TEMPLATE_NEW_NAME}
)
result = op.execute(None)
mock_hook.assert_called_once_with(api_version='v1',
gcp_conn_id='google_cloud_default')
mock_hook.return_value.insert_instance_template.assert_called_once_with(
project_id=GCP_PROJECT_ID,
body=GCE_INSTANCE_TEMPLATE_BODY_INSERT,
request_id=GCE_INSTANCE_TEMPLATE_REQUEST_ID,
)
self.assertEqual(GCE_INSTANCE_TEMPLATE_BODY_GET_NEW, result)
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_successful_copy_template_with_description_fields(self, mock_hook):
mock_hook.return_value.get_instance_template.side_effect = [
HttpError(resp=httplib2.Response({'status': 404}), content=EMPTY_CONTENT),
GCE_INSTANCE_TEMPLATE_BODY_GET,
GCE_INSTANCE_TEMPLATE_BODY_GET_NEW
]
op = GceInstanceTemplateCopyOperator(
project_id=GCP_PROJECT_ID,
resource_id=GCE_INSTANCE_TEMPLATE_NAME,
request_id=GCE_INSTANCE_TEMPLATE_REQUEST_ID,
task_id='id',
body_patch={"name": GCE_INSTANCE_TEMPLATE_NEW_NAME,
"description": "New description"}
)
result = op.execute(None)
mock_hook.assert_called_once_with(api_version='v1',
gcp_conn_id='google_cloud_default')
body_insert = deepcopy(GCE_INSTANCE_TEMPLATE_BODY_INSERT)
body_insert["description"] = "New description"
mock_hook.return_value.insert_instance_template.assert_called_once_with(
project_id=GCP_PROJECT_ID,
body=body_insert,
request_id=GCE_INSTANCE_TEMPLATE_REQUEST_ID,
)
self.assertEqual(GCE_INSTANCE_TEMPLATE_BODY_GET_NEW, result)
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_copy_with_some_validation_warnings(self, mock_hook):
mock_hook.return_value.get_instance_template.side_effect = [
HttpError(resp=httplib2.Response({'status': 404}), content=EMPTY_CONTENT),
GCE_INSTANCE_TEMPLATE_BODY_GET,
GCE_INSTANCE_TEMPLATE_BODY_GET_NEW
]
op = GceInstanceTemplateCopyOperator(
project_id=GCP_PROJECT_ID,
resource_id=GCE_INSTANCE_TEMPLATE_NAME,
task_id='id',
body_patch={"name": GCE_INSTANCE_TEMPLATE_NEW_NAME,
"some_wrong_field": "test",
"properties": {
"some_other_wrong_field": "test"
}}
)
result = op.execute(None)
mock_hook.assert_called_once_with(api_version='v1',
gcp_conn_id='google_cloud_default')
body_insert = deepcopy(GCE_INSTANCE_TEMPLATE_BODY_INSERT)
body_insert["some_wrong_field"] = "test"
body_insert["properties"]["some_other_wrong_field"] = "test"
mock_hook.return_value.insert_instance_template.assert_called_once_with(
project_id=GCP_PROJECT_ID,
body=body_insert,
request_id=None,
)
self.assertEqual(GCE_INSTANCE_TEMPLATE_BODY_GET_NEW, result)
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_successful_copy_template_with_updated_nested_fields(self, mock_hook):
mock_hook.return_value.get_instance_template.side_effect = [
HttpError(resp=httplib2.Response({'status': 404}), content=EMPTY_CONTENT),
GCE_INSTANCE_TEMPLATE_BODY_GET,
GCE_INSTANCE_TEMPLATE_BODY_GET_NEW
]
op = GceInstanceTemplateCopyOperator(
project_id=GCP_PROJECT_ID,
resource_id=GCE_INSTANCE_TEMPLATE_NAME,
task_id='id',
body_patch={
"name": GCE_INSTANCE_TEMPLATE_NEW_NAME,
"properties": {
"machineType": "n1-standard-2",
}
}
)
result = op.execute(None)
mock_hook.assert_called_once_with(api_version='v1',
gcp_conn_id='google_cloud_default')
body_insert = deepcopy(GCE_INSTANCE_TEMPLATE_BODY_INSERT)
body_insert["properties"]["machineType"] = "n1-standard-2"
mock_hook.return_value.insert_instance_template.assert_called_once_with(
project_id=GCP_PROJECT_ID,
body=body_insert,
request_id=None
)
self.assertEqual(GCE_INSTANCE_TEMPLATE_BODY_GET_NEW, result)
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_successful_copy_template_with_smaller_array_fields(self, mock_hook):
mock_hook.return_value.get_instance_template.side_effect = [
HttpError(resp=httplib2.Response({'status': 404}), content=EMPTY_CONTENT),
GCE_INSTANCE_TEMPLATE_BODY_GET,
GCE_INSTANCE_TEMPLATE_BODY_GET_NEW
]
op = GceInstanceTemplateCopyOperator(
project_id=GCP_PROJECT_ID,
resource_id=GCE_INSTANCE_TEMPLATE_NAME,
task_id='id',
body_patch={
"name": GCE_INSTANCE_TEMPLATE_NEW_NAME,
"properties": {
"machineType": "n1-standard-1",
"networkInterfaces": [
{
"network": "https://www.googleapis.com/compute/v1/"
"projects/project/global/networks/default",
"accessConfigs": [
{
"type": "ONE_TO_ONE_NAT",
"natIP": "8.8.8.8"
}
]
}
]
}
}
)
result = op.execute(None)
mock_hook.assert_called_once_with(api_version='v1',
gcp_conn_id='google_cloud_default')
body_insert = deepcopy(GCE_INSTANCE_TEMPLATE_BODY_INSERT)
body_insert["properties"]["networkInterfaces"] = [
{
"network": "https://www.googleapis.com/compute/v1/"
"projects/project/global/networks/default",
"accessConfigs": [
{
"type": "ONE_TO_ONE_NAT",
"natIP": "8.8.8.8"
}
]
}
]
mock_hook.return_value.insert_instance_template.assert_called_once_with(
project_id=GCP_PROJECT_ID,
body=body_insert,
request_id=None
)
self.assertEqual(GCE_INSTANCE_TEMPLATE_BODY_GET_NEW, result)
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_successful_copy_template_with_bigger_array_fields(self, mock_hook):
mock_hook.return_value.get_instance_template.side_effect = [
HttpError(resp=httplib2.Response({'status': 404}), content=EMPTY_CONTENT),
GCE_INSTANCE_TEMPLATE_BODY_GET,
GCE_INSTANCE_TEMPLATE_BODY_GET_NEW
]
op = GceInstanceTemplateCopyOperator(
project_id=GCP_PROJECT_ID,
resource_id=GCE_INSTANCE_TEMPLATE_NAME,
task_id='id',
body_patch={
"name": GCE_INSTANCE_TEMPLATE_NEW_NAME,
"properties": {
"disks": [
{
"kind": "compute#attachedDisk",
"type": "SCRATCH",
"licenses": [
"Updated String",
]
},
{
"kind": "compute#attachedDisk",
"type": "PERSISTENT",
"licenses": [
"Another String",
]
}
],
}
}
)
result = op.execute(None)
mock_hook.assert_called_once_with(api_version='v1',
gcp_conn_id='google_cloud_default')
body_insert = deepcopy(GCE_INSTANCE_TEMPLATE_BODY_INSERT)
body_insert["properties"]["disks"] = [
{
"kind": "compute#attachedDisk",
"type": "SCRATCH",
"licenses": [
"Updated String",
]
},
{
"kind": "compute#attachedDisk",
"type": "PERSISTENT",
"licenses": [
"Another String",
]
}
]
mock_hook.return_value.insert_instance_template.assert_called_once_with(
project_id=GCP_PROJECT_ID,
body=body_insert,
request_id=None,
)
self.assertEqual(GCE_INSTANCE_TEMPLATE_BODY_GET_NEW, result)
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_missing_name(self, mock_hook):
mock_hook.return_value.get_instance_template.side_effect = [
HttpError(resp=httplib2.Response({'status': 404}), content=EMPTY_CONTENT),
GCE_INSTANCE_TEMPLATE_BODY_GET,
GCE_INSTANCE_TEMPLATE_BODY_GET_NEW
]
with self.assertRaises(AirflowException) as cm:
op = GceInstanceTemplateCopyOperator(
project_id=GCP_PROJECT_ID,
resource_id=GCE_INSTANCE_TEMPLATE_NAME,
request_id=GCE_INSTANCE_TEMPLATE_REQUEST_ID,
task_id='id',
body_patch={"description": "New description"}
)
op.execute(None)
err = cm.exception
self.assertIn("should contain at least name for the new operator "
"in the 'name' field", str(err))
mock_hook.assert_not_called()
GCE_INSTANCE_GROUP_MANAGER_NAME = "instance-group-test"
GCE_INSTANCE_TEMPLATE_SOURCE_URL = \
"https://www.googleapis.com/compute/beta/projects/project" \
"/global/instanceTemplates/instance-template-test"
GCE_INSTANCE_TEMPLATE_OTHER_URL = \
"https://www.googleapis.com/compute/beta/projects/project" \
"/global/instanceTemplates/instance-template-other"
GCE_INSTANCE_TEMPLATE_NON_EXISTING_URL = \
"https://www.googleapis.com/compute/beta/projects/project" \
"/global/instanceTemplates/instance-template-non-existing"
GCE_INSTANCE_TEMPLATE_DESTINATION_URL = \
"https://www.googleapis.com/compute/beta/projects/project" \
"/global/instanceTemplates/instance-template-new"
GCE_INSTANCE_GROUP_MANAGER_GET = {
"kind": "compute#instanceGroupManager",
"id": "2822359583810032488",
"creationTimestamp": "2018-10-17T05:39:35.793-07:00",
"name": GCE_INSTANCE_GROUP_MANAGER_NAME,
"zone": "https://www.googleapis.com/compute/beta/projects/project/zones/zone",
"instanceTemplate": GCE_INSTANCE_TEMPLATE_SOURCE_URL,
"versions": [
{
"name": "v1",
"instanceTemplate": GCE_INSTANCE_TEMPLATE_SOURCE_URL,
"targetSize": {
"calculated": 1
}
},
{
"name": "v2",
"instanceTemplate": GCE_INSTANCE_TEMPLATE_OTHER_URL,
}
],
"instanceGroup": GCE_INSTANCE_TEMPLATE_SOURCE_URL,
"baseInstanceName": GCE_INSTANCE_GROUP_MANAGER_NAME,
"fingerprint": "BKWB_igCNbQ=",
"currentActions": {
"none": 1,
"creating": 0,
"creatingWithoutRetries": 0,
"verifying": 0,
"recreating": 0,
"deleting": 0,
"abandoning": 0,
"restarting": 0,
"refreshing": 0
},
"pendingActions": {
"creating": 0,
"deleting": 0,
"recreating": 0,
"restarting": 0
},
"targetSize": 1,
"selfLink": "https://www.googleapis.com/compute/beta/projects/project/zones/"
"zone/instanceGroupManagers/" + GCE_INSTANCE_GROUP_MANAGER_NAME,
"autoHealingPolicies": [
{
"initialDelaySec": 300
}
],
"serviceAccount": "198907790164@cloudservices.gserviceaccount.com"
}
GCE_INSTANCE_GROUP_MANAGER_EXPECTED_PATCH = {
"instanceTemplate": GCE_INSTANCE_TEMPLATE_DESTINATION_URL,
"versions": [
{
"name": "v1",
"instanceTemplate": GCE_INSTANCE_TEMPLATE_DESTINATION_URL,
"targetSize": {
"calculated": 1
}
},
{
"name": "v2",
"instanceTemplate": GCE_INSTANCE_TEMPLATE_OTHER_URL,
}
],
}
GCE_INSTANCE_GROUP_MANAGER_REQUEST_ID = "e12d5b48-4826-4ba9-ada6-0cff1e0b36a6"
GCE_INSTANCE_GROUP_MANAGER_UPDATE_POLICY = {
"type": "OPPORTUNISTIC",
"minimalAction": "RESTART",
"maxSurge": {
"fixed": 1
},
"maxUnavailable": {
"percent": 10
},
"minReadySec": 1800
}
class TestGceInstanceGroupManagerUpdate(unittest.TestCase):
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_successful_instance_group_update(self, mock_hook):
mock_hook.return_value.get_instance_group_manager.return_value = \
deepcopy(GCE_INSTANCE_GROUP_MANAGER_GET)
op = GceInstanceGroupManagerUpdateTemplateOperator(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER_NAME,
task_id='id',
source_template=GCE_INSTANCE_TEMPLATE_SOURCE_URL,
destination_template=GCE_INSTANCE_TEMPLATE_DESTINATION_URL
)
result = op.execute(None)
mock_hook.assert_called_once_with(api_version='beta',
gcp_conn_id='google_cloud_default')
mock_hook.return_value.patch_instance_group_manager.assert_called_once_with(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER_NAME,
body=GCE_INSTANCE_GROUP_MANAGER_EXPECTED_PATCH,
request_id=None
)
self.assertTrue(result)
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_successful_instance_group_update_missing_project_id(self, mock_hook):
mock_hook.return_value.get_instance_group_manager.return_value = \
deepcopy(GCE_INSTANCE_GROUP_MANAGER_GET)
op = GceInstanceGroupManagerUpdateTemplateOperator(
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER_NAME,
task_id='id',
source_template=GCE_INSTANCE_TEMPLATE_SOURCE_URL,
destination_template=GCE_INSTANCE_TEMPLATE_DESTINATION_URL
)
result = op.execute(None)
mock_hook.assert_called_once_with(api_version='beta',
gcp_conn_id='google_cloud_default')
mock_hook.return_value.patch_instance_group_manager.assert_called_once_with(
project_id=None,
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER_NAME,
body=GCE_INSTANCE_GROUP_MANAGER_EXPECTED_PATCH,
request_id=None
)
self.assertTrue(result)
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_successful_instance_group_update_no_instance_template_field(self, mock_hook):
instance_group_manager_no_template = deepcopy(GCE_INSTANCE_GROUP_MANAGER_GET)
del instance_group_manager_no_template['instanceTemplate']
mock_hook.return_value.get_instance_group_manager.return_value = \
instance_group_manager_no_template
op = GceInstanceGroupManagerUpdateTemplateOperator(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER_NAME,
task_id='id',
source_template=GCE_INSTANCE_TEMPLATE_SOURCE_URL,
destination_template=GCE_INSTANCE_TEMPLATE_DESTINATION_URL
)
result = op.execute(None)
mock_hook.assert_called_once_with(api_version='beta',
gcp_conn_id='google_cloud_default')
expected_patch_no_instance_template = \
deepcopy(GCE_INSTANCE_GROUP_MANAGER_EXPECTED_PATCH)
del expected_patch_no_instance_template['instanceTemplate']
mock_hook.return_value.patch_instance_group_manager.assert_called_once_with(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER_NAME,
body=expected_patch_no_instance_template,
request_id=None
)
self.assertTrue(result)
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_successful_instance_group_update_no_versions_field(self, mock_hook):
instance_group_manager_no_versions = deepcopy(GCE_INSTANCE_GROUP_MANAGER_GET)
del instance_group_manager_no_versions['versions']
mock_hook.return_value.get_instance_group_manager.return_value = \
instance_group_manager_no_versions
op = GceInstanceGroupManagerUpdateTemplateOperator(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER_NAME,
task_id='id',
source_template=GCE_INSTANCE_TEMPLATE_SOURCE_URL,
destination_template=GCE_INSTANCE_TEMPLATE_DESTINATION_URL
)
result = op.execute(None)
mock_hook.assert_called_once_with(api_version='beta',
gcp_conn_id='google_cloud_default')
expected_patch_no_versions = \
deepcopy(GCE_INSTANCE_GROUP_MANAGER_EXPECTED_PATCH)
del expected_patch_no_versions['versions']
mock_hook.return_value.patch_instance_group_manager.assert_called_once_with(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER_NAME,
body=expected_patch_no_versions,
request_id=None
)
self.assertTrue(result)
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_successful_instance_group_update_with_update_policy(self, mock_hook):
mock_hook.return_value.get_instance_group_manager.return_value = \
deepcopy(GCE_INSTANCE_GROUP_MANAGER_GET)
op = GceInstanceGroupManagerUpdateTemplateOperator(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER_NAME,
task_id='id',
update_policy=GCE_INSTANCE_GROUP_MANAGER_UPDATE_POLICY,
source_template=GCE_INSTANCE_TEMPLATE_SOURCE_URL,
destination_template=GCE_INSTANCE_TEMPLATE_DESTINATION_URL
)
result = op.execute(None)
mock_hook.assert_called_once_with(api_version='beta',
gcp_conn_id='google_cloud_default')
expected_patch_with_update_policy = \
deepcopy(GCE_INSTANCE_GROUP_MANAGER_EXPECTED_PATCH)
expected_patch_with_update_policy['updatePolicy'] = GCE_INSTANCE_GROUP_MANAGER_UPDATE_POLICY
mock_hook.return_value.patch_instance_group_manager.assert_called_once_with(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER_NAME,
body=expected_patch_with_update_policy,
request_id=None
)
self.assertTrue(result)
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_successful_instance_group_update_with_request_id(self, mock_hook):
mock_hook.return_value.get_instance_group_manager.return_value = \
deepcopy(GCE_INSTANCE_GROUP_MANAGER_GET)
op = GceInstanceGroupManagerUpdateTemplateOperator(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER_NAME,
task_id='id',
source_template=GCE_INSTANCE_TEMPLATE_SOURCE_URL,
request_id=GCE_INSTANCE_GROUP_MANAGER_REQUEST_ID,
destination_template=GCE_INSTANCE_TEMPLATE_DESTINATION_URL
)
result = op.execute(None)
mock_hook.assert_called_once_with(api_version='beta',
gcp_conn_id='google_cloud_default')
mock_hook.return_value.patch_instance_group_manager.assert_called_once_with(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER_NAME,
body=GCE_INSTANCE_GROUP_MANAGER_EXPECTED_PATCH,
request_id=GCE_INSTANCE_GROUP_MANAGER_REQUEST_ID
)
self.assertTrue(result)
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_try_to_use_api_v1(self, _):
with self.assertRaises(AirflowException) as cm:
GceInstanceGroupManagerUpdateTemplateOperator(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER_NAME,
task_id='id',
api_version='v1',
source_template=GCE_INSTANCE_TEMPLATE_SOURCE_URL,
destination_template=GCE_INSTANCE_TEMPLATE_DESTINATION_URL
)
err = cm.exception
self.assertIn("Use beta api version or above", str(err))
@mock.patch('airflow.gcp.operators.compute.GceHook')
def test_try_to_use_non_existing_template(self, mock_hook):
mock_hook.return_value.get_instance_group_manager.return_value = \
deepcopy(GCE_INSTANCE_GROUP_MANAGER_GET)
op = GceInstanceGroupManagerUpdateTemplateOperator(
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id=GCE_INSTANCE_GROUP_MANAGER_NAME,
task_id='id',
source_template=GCE_INSTANCE_TEMPLATE_NON_EXISTING_URL,
destination_template=GCE_INSTANCE_TEMPLATE_DESTINATION_URL
)
result = op.execute(None)
mock_hook.assert_called_once_with(api_version='beta',
gcp_conn_id='google_cloud_default')
mock_hook.return_value.patch_instance_group_manager.assert_not_called()
self.assertTrue(result)
| 41.833185
| 101
| 0.61755
| 4,986
| 46,895
| 5.416165
| 0.079623
| 0.055804
| 0.06895
| 0.031846
| 0.857434
| 0.829254
| 0.80437
| 0.788002
| 0.773597
| 0.765969
| 0
| 0.013265
| 0.292675
| 46,895
| 1,120
| 102
| 41.870536
| 0.800874
| 0.032221
| 0
| 0.657895
| 0
| 0.002924
| 0.170268
| 0.064628
| 0
| 0
| 0
| 0
| 0.111111
| 1
| 0.037037
| false
| 0
| 0.009747
| 0
| 0.052632
| 0.002924
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0ad99c2bc6bd5119fa5b2a2b117901f9ef024146
| 65
|
py
|
Python
|
back-end/src/soulsion_api/models/articles.py
|
dheerajd5/SoulSion
|
b8e6e8d14c7d732215c8f6c4bbbdf43d3bae6b82
|
[
"MIT"
] | null | null | null |
back-end/src/soulsion_api/models/articles.py
|
dheerajd5/SoulSion
|
b8e6e8d14c7d732215c8f6c4bbbdf43d3bae6b82
|
[
"MIT"
] | null | null | null |
back-end/src/soulsion_api/models/articles.py
|
dheerajd5/SoulSion
|
b8e6e8d14c7d732215c8f6c4bbbdf43d3bae6b82
|
[
"MIT"
] | null | null | null |
from soulsion_api import db
class Articles(db.Model):
pass
| 10.833333
| 27
| 0.738462
| 10
| 65
| 4.7
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 65
| 6
| 28
| 10.833333
| 0.903846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
0ae32c97b7ef53e6dcd3519cbc7d5a135acd06f2
| 136
|
py
|
Python
|
packageopt/services/solvers/__init__.py
|
nspostnov/for-article-optimal-position-liquidation
|
857152b0450c39cfcdb3d329e57ed07efe344356
|
[
"MIT"
] | null | null | null |
packageopt/services/solvers/__init__.py
|
nspostnov/for-article-optimal-position-liquidation
|
857152b0450c39cfcdb3d329e57ed07efe344356
|
[
"MIT"
] | null | null | null |
packageopt/services/solvers/__init__.py
|
nspostnov/for-article-optimal-position-liquidation
|
857152b0450c39cfcdb3d329e57ed07efe344356
|
[
"MIT"
] | null | null | null |
from .abstract_base_classes import *
from .implementations import *
__all__ = abstract_base_classes.__all__ + implementations.__all__
| 22.666667
| 65
| 0.830882
| 15
| 136
| 6.466667
| 0.466667
| 0.247423
| 0.391753
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110294
| 136
| 5
| 66
| 27.2
| 0.801653
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0ae3cc07812037589942da29a680d7c9eb39e2df
| 34,296
|
py
|
Python
|
intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_compliance_info.py
|
Stienvdh/statrick
|
7b092fc42171e226718a70a285a4b323f2f395ad
|
[
"MIT"
] | null | null | null |
intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_compliance_info.py
|
Stienvdh/statrick
|
7b092fc42171e226718a70a285a4b323f2f395ad
|
[
"MIT"
] | null | null | null |
intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_compliance_info.py
|
Stienvdh/statrick
|
7b092fc42171e226718a70a285a4b323f2f395ad
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Dell EMC OpenManage Ansible Modules
# Version 2.1.3
# Copyright (C) 2019-2020 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
import json
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from io import StringIO
from ansible.module_utils._text import to_text
from ansible_collections.dellemc.openmanage.plugins.modules import ome_firmware_baseline_compliance_info
from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, \
AnsibleFailJSonException, Constants
@pytest.fixture
def ome_connection_mock_for_firmware_baseline_compliance_info(mocker, ome_response_mock):
connection_class_mock = mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.RestOME')
ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
return ome_connection_mock_obj
class TestOmeFirmwareCatalog(FakeAnsibleModule):
module = ome_firmware_baseline_compliance_info
def test__get_device_id_from_service_tags_for_baseline_success_case(self, ome_response_mock,
ome_connection_mock_for_firmware_baseline_compliance_info):
ome_response_mock.json_data = {
"value": [{"DeviceServiceTag": Constants.service_tag1, "Id": Constants.device_id1}]}
ome_response_mock.status_code = 200
ome_response_mock.success = True
f_module = self.get_module_mock()
data = self.module._get_device_id_from_service_tags([Constants.service_tag1],
ome_connection_mock_for_firmware_baseline_compliance_info,
f_module)
assert data == {Constants.device_id1: Constants.service_tag1}
def test__get_device_id_from_service_tags_empty_case(self, ome_response_mock,
ome_connection_mock_for_firmware_baseline_compliance_info):
ome_response_mock.json_data = {"value": []}
ome_response_mock.status_code = 200
ome_response_mock.success = True
f_module = self.get_module_mock()
data = self.module._get_device_id_from_service_tags([Constants.service_tag1],
ome_connection_mock_for_firmware_baseline_compliance_info,
f_module)
assert data == {}
def test_get_device_id_from_service_tags_for_baseline_error_case(self,
ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
ome_connection_mock_for_firmware_baseline_compliance_info.invoke_request.side_effect = HTTPError(
'http://testhost.com', 400, '', {}, None)
ome_response_mock.json_data = {
"value": [{"DeviceServiceTag": Constants.service_tag1, "Id": Constants.device_id1}]}
ome_response_mock.status_code = 200
ome_response_mock.success = True
f_module = self.get_module_mock()
with pytest.raises(HTTPError) as ex:
self.module._get_device_id_from_service_tags(["INVALID"],
ome_connection_mock_for_firmware_baseline_compliance_info,
f_module)
def test_get_device_id_from_service_tags_for_baseline_value_error_case(self,
ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
ome_response_mock.json_data = {
"value": [{"DeviceServiceTag": Constants.service_tag1, "Id": Constants.device_id1}]}
ome_response_mock.status_code = 500
ome_response_mock.success = False
f_module = self.get_module_mock()
with pytest.raises(Exception) as exc:
self.module._get_device_id_from_service_tags(["#$%^&"],
ome_connection_mock_for_firmware_baseline_compliance_info,
f_module)
assert exc.value.args[0] == "Failed to fetch the device information."
def test_get_device_ids_from_group_ids_success_case(self, ome_response_mock,
ome_connection_mock_for_firmware_baseline_compliance_info):
ome_response_mock.json_data = {
"value": [{"DeviceServiceTag": Constants.service_tag1, "Id": Constants.device_id1}]}
ome_response_mock.status_code = 200
ome_response_mock.success = True
f_module = self.get_module_mock()
device_ids = self.module.get_device_ids_from_group_ids(f_module, ["123", "345"],
ome_connection_mock_for_firmware_baseline_compliance_info)
assert device_ids == [Constants.device_id1, Constants.device_id1]
def test_get_device_ids_from_group_ids_empty_case(self, ome_response_mock,
ome_connection_mock_for_firmware_baseline_compliance_info):
ome_response_mock.json_data = {"value": []}
ome_response_mock.status_code = 200
ome_response_mock.success = True
f_module = self.get_module_mock()
device_ids = self.module.get_device_ids_from_group_ids(f_module, ["123", "345"],
ome_connection_mock_for_firmware_baseline_compliance_info)
assert device_ids == []
def test_get_device_ids_from_group_ids_error_case(self, ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
ome_connection_mock_for_firmware_baseline_compliance_info.invoke_request.side_effect = HTTPError(
'http://testhost.com', 400, '', {}, None)
ome_response_mock.status_code = 200
ome_response_mock.success = True
f_module = self.get_module_mock()
with pytest.raises(HTTPError) as ex:
device_ids = self.module.get_device_ids_from_group_ids(f_module, ["123456"],
ome_connection_mock_for_firmware_baseline_compliance_info)
def test_get_device_ids_from_group_ids_value_error_case(self,
ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
ome_response_mock.status_code = 500
ome_response_mock.success = False
f_module = self.get_module_mock()
with pytest.raises(Exception) as exc:
self.module.get_device_ids_from_group_ids(f_module, ["123456"],
ome_connection_mock_for_firmware_baseline_compliance_info)
assert exc.value.args[0] == "Failed to fetch the device ids from specified I(device_group_names)."
def test_get_device_ids_from_group_names_success_case(self, mocker, ome_response_mock,
ome_connection_mock_for_firmware_baseline_compliance_info):
ome_response_mock.json_data = {"value": [{"Name": "group1", "Id": 123}]}
ome_response_mock.status_code = 200
ome_response_mock.success = True
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_device_ids_from_group_ids',
return_value=[Constants.device_id1, Constants.device_id2])
f_module = self.get_module_mock(params={"device_group_names": ["group1", "group2"]})
device_ids = self.module.get_device_ids_from_group_names(f_module,
ome_connection_mock_for_firmware_baseline_compliance_info)
assert device_ids == [Constants.device_id1, Constants.device_id2]
def test_get_device_ids_from_group_names_empty_case(self, mocker, ome_response_mock,
ome_connection_mock_for_firmware_baseline_compliance_info):
ome_response_mock.json_data = {"value": []}
ome_response_mock.status_code = 200
ome_response_mock.success = True
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_device_ids_from_group_ids',
return_value=[])
f_module = self.get_module_mock(params={"device_group_names": ["abc", "xyz"]})
device_ids = self.module.get_device_ids_from_group_names(f_module,
ome_connection_mock_for_firmware_baseline_compliance_info)
assert device_ids == []
def test_get_device_ids_from_group_names_error_case(self, ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
ome_connection_mock_for_firmware_baseline_compliance_info.invoke_request.side_effect = HTTPError(
'http://testhost.com', 400, '', {}, None)
ome_response_mock.status_code = 200
ome_response_mock.success = True
f_module = self.get_module_mock(params={"device_group_names": ["abc", "xyz"]})
with pytest.raises(HTTPError) as ex:
self.module.get_device_ids_from_group_names(f_module,
ome_connection_mock_for_firmware_baseline_compliance_info)
def test_get_device_ids_from_group_names_value_error_case(self,
ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
ome_response_mock.status_code = 500
ome_response_mock.success = False
f_module = self.get_module_mock(params={"device_group_names": ["abc", "xyz"]})
with pytest.raises(Exception) as exc:
self.module.get_device_ids_from_group_names(f_module,
ome_connection_mock_for_firmware_baseline_compliance_info)
assert exc.value.args[0] == "Failed to fetch the specified I(device_group_names)."
def test_get_identifiers_with_device_ids(self, ome_connection_mock_for_firmware_baseline_compliance_info,
module_mock, default_ome_args):
"""when device_ids given """
f_module = self.get_module_mock(params={"device_ids": [Constants.device_id1, Constants.device_id2]})
identifiers, identifiers_type = self.module.get_identifiers(
ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
assert identifiers == [Constants.device_id1, Constants.device_id2]
assert identifiers_type == "device_ids"
def test_get_identifiers_with_service_tags(self, mocker, ome_connection_mock_for_firmware_baseline_compliance_info,
module_mock, default_ome_args):
"""when service tags given """
f_module = self.get_module_mock(params={"device_service_tags": [Constants.service_tag1]})
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info._get_device_id_from_service_tags',
return_value={Constants.device_id1: Constants.service_tag1})
identifiers, identifiers_type = self.module.get_identifiers(
ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
assert identifiers == [Constants.device_id1]
assert identifiers_type == "device_service_tags"
def test_get_identifiers_with_group_names(self, mocker, ome_connection_mock_for_firmware_baseline_compliance_info,
module_mock, default_ome_args):
"""when service tags given """
f_module = self.get_module_mock(params={"device_group_names": [Constants.service_tag1]})
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_device_ids_from_group_names',
return_value=[123, 456])
identifiers, identifiers_type = self.module.get_identifiers(
ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
assert identifiers == [123, 456]
identifiers_type == "device_group_names"
def test_get_identifiers_with_service_tags_empty_case(self, mocker,
ome_connection_mock_for_firmware_baseline_compliance_info,
module_mock, default_ome_args):
"""when service tags given """
f_module = self.get_module_mock(params={"device_service_tags": [Constants.service_tag1]})
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info._get_device_id_from_service_tags',
return_value={})
identifiers, identifiers_type = self.module.get_identifiers(
ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
assert identifiers == []
assert identifiers_type == "device_service_tags"
def test_get_baseline_id_from_name_success_case(self, default_ome_args,
ome_connection_mock_for_firmware_baseline_compliance_info,
module_mock, ome_response_mock):
f_module = self.get_module_mock(params={"baseline_name": "baseline_name1"})
ome_response_mock.success = True
ome_response_mock.json_data = {"value": [{"Name": "baseline_name1", "Id": 111}, {"Name": "baseline_name2",
"Id": 222}]}
baseline_id = self.module.get_baseline_id_from_name(ome_connection_mock_for_firmware_baseline_compliance_info,
f_module)
assert baseline_id == 111
def test_get_baseline_id_from_name_when_name_not_exists(self, default_ome_args,
ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
ome_response_mock.success = True
ome_response_mock.json_data = {"value": [{"Name": "baseline_name1", "Id": 111}]}
f_module = self.get_module_mock(params={"baseline_name": "not_exits"})
with pytest.raises(AnsibleFailJSonException) as exc:
self.module.get_baseline_id_from_name(ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
assert exc.value.args[0] == "Specified I(baseline_name) does not exist in the system."
def test_get_baseline_id_from_name_when_baseline_is_empty(self, default_ome_args,
ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
ome_response_mock.success = True
ome_response_mock.json_data = {"value": []}
f_module = self.get_module_mock(params={"baseline_name": "baseline_name1"})
with pytest.raises(AnsibleFailJSonException) as exc:
self.module.get_baseline_id_from_name(ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
assert exc.value.args[0] == "No baseline exists in the system."
def test_get_baseline_id_from_name_when_baselinename_is_none(self, default_ome_args,
ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
ome_response_mock.success = True
ome_response_mock.json_data = {"value": []}
f_module = self.get_module_mock(params={"baseline_notexist": "data"})
with pytest.raises(AnsibleFailJSonException) as exc:
self.module.get_baseline_id_from_name(ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
assert exc.value.args[0] == "I(baseline_name) is a mandatory option."
def test_get_baseline_id_from_name_with_http_error_handlin_case(self,
ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
ome_connection_mock_for_firmware_baseline_compliance_info.invoke_request.side_effect = HTTPError(
'http://testhost.com', 400, '', {}, None)
ome_response_mock.status_code = 400
ome_response_mock.success = False
f_module = self.get_module_mock(params={"baseline_name": "baseline_name1"})
with pytest.raises(HTTPError) as ex:
self.module.get_baseline_id_from_name(ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
@pytest.mark.parametrize("exc_type",
[URLError, SSLValidationError, ConnectionError, TypeError, ValueError, HTTPError])
def test_get_baseline_id_from_name_failure_case_01(self, exc_type,
ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
if exc_type not in [HTTPError, SSLValidationError]:
ome_connection_mock_for_firmware_baseline_compliance_info.invoke_request.side_effect = exc_type('test')
else:
ome_connection_mock_for_firmware_baseline_compliance_info.invoke_request.side_effect = exc_type(
'http://testhost.com', 400, '', {}, None)
ome_response_mock.status_code = 400
ome_response_mock.success = False
f_module = self.get_module_mock(params={"baseline_name": "baseline_name1"})
with pytest.raises(exc_type) as ex:
self.module.get_baseline_id_from_name(ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
def test_get_baselines_report_by_device_ids_success_case(self, mocker,
ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_identifiers',
return_value=([Constants.device_id1], "device_ids"))
ome_response_mock.json_data = {"value": []}
ome_response_mock.success = True
f_module = self.get_module_mock()
self.module.get_baselines_report_by_device_ids(ome_connection_mock_for_firmware_baseline_compliance_info,
f_module)
def test_get_baselines_report_by_device_service_tag_not_exits_case(self, mocker,
ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_identifiers',
return_value=([], "device_service_tags"))
ome_response_mock.json_data = {"value": []}
ome_response_mock.success = True
f_module = self.get_module_mock()
with pytest.raises(AnsibleFailJSonException) as exc:
self.module.get_baselines_report_by_device_ids(ome_connection_mock_for_firmware_baseline_compliance_info,
f_module)
assert exc.value.args[0] == "Device details not available as the service tag(s) provided are invalid."
def test_get_baselines_report_by_group_names_not_exits_case(self, mocker,
ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_identifiers',
return_value=([], "device_group_names"))
ome_response_mock.json_data = {"value": []}
ome_response_mock.success = True
f_module = self.get_module_mock()
with pytest.raises(AnsibleFailJSonException) as exc:
self.module.get_baselines_report_by_device_ids(ome_connection_mock_for_firmware_baseline_compliance_info,
f_module)
assert exc.value.args[0] == "Device details not available as the group name(s) provided are invalid."
@pytest.mark.parametrize("exc_type",
[URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
def test_get_baselines_report_by_device_ids_exception_handling(self, exc_type,
ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
"""when invalid value for expose_durationis given """
if exc_type not in [HTTPError, SSLValidationError]:
ome_connection_mock_for_firmware_baseline_compliance_info.invoke_request.side_effect = exc_type('test')
else:
ome_connection_mock_for_firmware_baseline_compliance_info.invoke_request.side_effect = exc_type(
'http://testhost.com', 400, '', {}, None)
ome_response_mock.status_code = 400
ome_response_mock.success = False
f_module = self.get_module_mock()
with pytest.raises(exc_type) as ex:
self.module.get_baselines_report_by_device_ids(
ome_connection_mock_for_firmware_baseline_compliance_info,
f_module)
def test_get_baseline_compliance_reports_success_case_for_baseline_device(self, mocker, ome_response_mock,
ome_connection_mock_for_firmware_baseline_compliance_info):
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_baseline_id_from_name',
return_value=123)
f_module = self.get_module_mock(params={"baseline_name": "baseline1"})
ome_response_mock.success = True
ome_response_mock.json_data = {"value": [{"baseline_device_report1": "data"}]}
data = self.module.get_baseline_compliance_reports(ome_connection_mock_for_firmware_baseline_compliance_info,
f_module)
assert data == [{"baseline_device_report1": "data"}]
@pytest.mark.parametrize("exc_type",
[URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
def test_get_baseline_compliance_reports_exception_handling_case(self, exc_type, mocker, ome_response_mock,
ome_connection_mock_for_firmware_baseline_compliance_info):
json_str = to_text(json.dumps({"data": "out"}))
if exc_type not in [HTTPError, SSLValidationError]:
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_baseline_id_from_name',
side_effect=exc_type('exception message'))
else:
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_baseline_id_from_name',
side_effect=exc_type('http://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
f_module = self.get_module_mock(params={"baseline_name": "baseline1"})
with pytest.raises(exc_type):
self.module.get_baseline_compliance_reports(ome_connection_mock_for_firmware_baseline_compliance_info,
f_module)
param_list1 = [{"baseline_name": ""},
{"baseline_name": None},
{"device_ids": []},
{"device_ids": None},
{"device_ids": [], "baseline_name": ""},
{"device_service_tags": []},
{"device_service_tags": [], "baseline_name": ""},
{"device_service_tags": None},
{"device_group_names": [], "baseline_name": ""},
{"device_group_names": []},
{"device_group_names": None},
{"device_ids": [], "device_service_tags": []},
{"device_ids": None, "device_service_tags": None},
{"device_ids": [], "device_service_tags": [], "device_group_names": []},
{"device_ids": None, "device_service_tags": None, "device_group_names": None},
{"device_ids": None, "device_service_tags": [], "device_group_names": None},
{"device_ids": [], "device_service_tags": [], "device_group_names": [], "baseline_name": ""},
]
@pytest.mark.parametrize("param", param_list1)
def test_validate_input_error_handling_case(self, param):
f_module = self.get_module_mock(params=param)
with pytest.raises(Exception) as exc:
self.module.validate_inputs(f_module)
assert exc.value.args[0] == "one of the following is required: device_ids, " \
"device_service_tags, device_group_names, baseline_name " \
"to generate device based compliance report."
params_list2 = [{
"device_ids": [Constants.device_id1],
"device_service_tags": [Constants.service_tag1]},
{"device_ids": [Constants.device_id1]},
{"device_group_names": ["group1"]},
{"device_service_tags": [Constants.service_tag1]},
{"baseline_name": "baseline1", "device_ids": [Constants.device_id1]},
{"baseline_name": "baseline1", "device_group_names": ["group1"]}
]
@pytest.mark.parametrize("param", params_list2)
def test_validate_input_params_without_error_handling_case(self, param):
f_module = self.get_module_mock(params=param)
self.module.validate_inputs(f_module)
def test_baseline_complaince_main_success_case_01(self, mocker, ome_default_args, module_mock,
ome_connection_mock_for_firmware_baseline_compliance_info):
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.validate_inputs')
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_baselines_report_by_device_ids',
return_value=[{"device": "device_report"}])
ome_default_args.update({"device_ids": [Constants.device_id1]})
result = self._run_module(ome_default_args)
assert result["changed"] is False
assert 'baseline_compliance_info' in result
assert 'msg' not in result
def test_baseline_complaince_main_success_case_02(self, mocker, ome_default_args, module_mock,
ome_connection_mock_for_firmware_baseline_compliance_info):
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.validate_inputs')
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_baseline_compliance_reports',
return_value=[{"baseline_device": "baseline_device_report"}])
ome_default_args.update({"baseline_name": "baseline_name"})
result = self._run_module(ome_default_args)
assert result["changed"] is False
assert 'baseline_compliance_info' in result
assert 'msg' not in result
def test_baseline_complaince_main_failure_case_01(self, ome_default_args, module_mock):
"""required parameter is not passed along with specified report_type"""
# ome_default_args.update({})
result = self._run_module_with_fail_json(ome_default_args)
assert 'baseline_compliance_info' not in result
assert 'msg' in result
assert result['msg'] == "one of the following is required: device_ids, " \
"device_service_tags, device_group_names, baseline_name"
assert result['failed'] is True
param_list4 = [
{"device_ids": [Constants.device_id1], "device_service_tags": [Constants.service_tag1]},
{"device_service_tags": [Constants.device_id1], "device_group_names": ["group_name1"]},
{"device_ids": [Constants.device_id1], "device_group_names": ["group_name1"]},
{"device_ids": [Constants.device_id1], "device_service_tags": ["group_name1"]},
{"device_ids": [Constants.device_id1], "device_service_tags": [Constants.service_tag1],
"device_group_names": ["group_name1"]},
{"device_ids": [Constants.device_id1], "device_service_tags": [Constants.service_tag1],
"device_group_names": ["group_name1"], "baseline_name": "baseline1"
},
{"device_ids": [Constants.device_id1], "baseline_name": "baseline1"},
{"device_service_tags": [Constants.service_tag1], "baseline_name": "baseline1"},
{"device_group_names": ["group_name1"], "baseline_name": "baseline1"},
{"device_ids": [], "device_service_tags": [],
"device_group_names": [], "baseline_name": ""
},
]
@pytest.mark.parametrize("param", param_list4)
def test_baseline_complaince_main_failure_case_02(self, param, ome_default_args, module_mock):
"""required parameter is not passed along with specified report_type"""
ome_default_args.update(param)
result = self._run_module_with_fail_json(ome_default_args)
assert 'baseline_compliance_info' not in result
assert 'msg' in result
assert result["msg"] == "parameters are mutually exclusive: " \
"baseline_name|device_service_tags|device_ids|device_group_names"
assert result['failed'] is True
def test_baseline_complaince_main_failure_case_03(self, mocker, ome_default_args, module_mock, ome_response_mock,
ome_connection_mock_for_firmware_baseline_compliance_info):
"""when ome response return value is None"""
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.validate_inputs')
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_baselines_report_by_device_ids',
return_value=None)
ome_default_args.update({"device_ids": [Constants.device_id1]})
result = self._run_module_with_fail_json(ome_default_args)
assert 'baseline_compliance_info' not in result
assert result['msg'] == "Failed to fetch the compliance baseline information."
@pytest.mark.parametrize("exc_type",
[URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
def test_baseline_complaince_main_exception_handling_case(self, exc_type, mocker, ome_default_args,
ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
ome_default_args.update({"device_service_tags": [Constants.service_tag1]})
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.validate_inputs')
ome_response_mock.status_code = 400
ome_response_mock.success = False
json_str = to_text(json.dumps({"data": "out"}))
if exc_type not in [HTTPError, SSLValidationError]:
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_baselines_report_by_device_ids',
side_effect=exc_type('test'))
else:
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_baselines_report_by_device_ids',
side_effect=exc_type('http://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert 'baseline_compliance_info' not in result
assert 'msg' in result
assert result['failed'] is True
if exc_type == HTTPError:
assert 'error_info' in result
| 64.104673
| 146
| 0.642815
| 3,727
| 34,296
| 5.419909
| 0.062517
| 0.091782
| 0.106733
| 0.136634
| 0.897673
| 0.881931
| 0.85698
| 0.840149
| 0.805
| 0.790446
| 0
| 0.00991
| 0.279158
| 34,296
| 534
| 147
| 64.224719
| 0.807176
| 0.017028
| 0
| 0.633833
| 0
| 0
| 0.173479
| 0.082551
| 0
| 0
| 0
| 0
| 0.094218
| 1
| 0.079229
| false
| 0
| 0.019272
| 0
| 0.111349
| 0.002141
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7c494746bc4f080794d5a55c478ce1c7924996fd
| 6,023
|
py
|
Python
|
tests/back_compat_test.py
|
clayne/gtirb-pprinter
|
97b6f88d527e1dffbb41c56c267ccd58ca270ba7
|
[
"MIT"
] | 36
|
2018-11-09T15:57:13.000Z
|
2022-03-02T20:07:56.000Z
|
tests/back_compat_test.py
|
clayne/gtirb-pprinter
|
97b6f88d527e1dffbb41c56c267ccd58ca270ba7
|
[
"MIT"
] | 10
|
2020-04-14T03:55:14.000Z
|
2022-02-21T19:39:27.000Z
|
tests/back_compat_test.py
|
clayne/gtirb-pprinter
|
97b6f88d527e1dffbb41c56c267ccd58ca270ba7
|
[
"MIT"
] | 14
|
2019-04-17T21:11:39.000Z
|
2022-03-02T20:07:58.000Z
|
import gtirb
from gtirb_helpers import (
create_test_module,
add_text_section,
add_symbol,
add_code_block,
)
from pprinter_helpers import (
asm_lines,
PPrinterTest,
run_asm_pprinter_with_outputput,
)
class TestMoffsetCompat(PPrinterTest):
COMPAT_WARNING_MESSAGE = (
"WARNING: using symbolic expression at offset 0 for compatibility; "
"recreate your gtirb file with newer tools that put expressions at "
"the correct offset. Starting in early 2022, newer versions of the "
"pretty printer will not use expressions at offset 0."
)
def test_moffset_mov_ia32_correct(self):
ir, m = create_test_module(
gtirb.Module.FileFormat.PE, gtirb.Module.ISA.IA32
)
s, bi = add_text_section(m, 0x1000)
hello_expr = gtirb.SymAddrConst(0, add_symbol(m, "hello"))
# mov al, byte ptr [hello]
add_code_block(bi, b"\xA0\x00\x00\x00\x00", {1: hello_expr})
# mov ax, word ptr [hello]
add_code_block(bi, b"\x66\xA1\x00\x00\x00\x00", {2: hello_expr})
# mov eax, dword ptr [hello]
add_code_block(bi, b"\xA1\x00\x00\x00\x00", {1: hello_expr})
# mov byte ptr [hello], al
add_code_block(bi, b"\xA2\x00\x00\x00\x00", {1: hello_expr})
# mov word ptr [hello], ax
add_code_block(bi, b"\x66\xA3\x00\x00\x00\x00", {2: hello_expr})
# mov dword ptr [hello], eax
add_code_block(bi, b"\xA3\x00\x00\x00\x00", {1: hello_expr})
asm, output = run_asm_pprinter_with_outputput(ir)
self.assertNotIn(self.COMPAT_WARNING_MESSAGE, output)
self.assertContains(
asm_lines(asm),
(
"mov AL,BYTE PTR [hello]",
"mov AX,WORD PTR [hello]",
"mov EAX,DWORD PTR [hello]",
"mov BYTE PTR [hello],AL",
"mov WORD PTR [hello],AX",
"mov DWORD PTR [hello],EAX",
),
)
def test_moffset_mov_ia32_compat(self):
ir, m = create_test_module(
gtirb.Module.FileFormat.PE, gtirb.Module.ISA.IA32
)
s, bi = add_text_section(m, 0x1000)
hello_expr = gtirb.SymAddrConst(0, add_symbol(m, "hello"))
# mov al, byte ptr [hello]
add_code_block(bi, b"\xA0\x00\x00\x00\x00", {0: hello_expr})
# mov ax, word ptr [hello]
add_code_block(bi, b"\x66\xA1\x00\x00\x00\x00", {0: hello_expr})
# mov eax, dword ptr [hello]
add_code_block(bi, b"\xA1\x00\x00\x00\x00", {0: hello_expr})
# mov byte ptr [hello], al
add_code_block(bi, b"\xA2\x00\x00\x00\x00", {0: hello_expr})
# mov word ptr [hello], ax
add_code_block(bi, b"\x66\xA3\x00\x00\x00\x00", {0: hello_expr})
# mov dword ptr [hello], eax
add_code_block(bi, b"\xA3\x00\x00\x00\x00", {0: hello_expr})
asm, output = run_asm_pprinter_with_outputput(ir)
self.assertIn(self.COMPAT_WARNING_MESSAGE, output)
self.assertEqual(output.count(self.COMPAT_WARNING_MESSAGE), 1)
self.assertContains(
asm_lines(asm),
(
"mov AL,BYTE PTR [hello]",
"mov AX,WORD PTR [hello]",
"mov EAX,DWORD PTR [hello]",
"mov BYTE PTR [hello],AL",
"mov WORD PTR [hello],AX",
"mov DWORD PTR [hello],EAX",
),
)
def test_moffset_mov_x64_correct(self):
ir, m = create_test_module(
gtirb.Module.FileFormat.PE, gtirb.Module.ISA.X64
)
s, bi = add_text_section(m, 0x1000)
hello_expr = gtirb.SymAddrConst(0, add_symbol(m, "hello"))
# mov rax, qword ptr [hello]
add_code_block(
bi, b"\x48\xA1\x00\x00\x00\x00\x00\x00\x00\x00", {2: hello_expr}
)
# mov qword ptr [hello], rax
add_code_block(
bi, b"\x48\xA3\x00\x00\x00\x00\x00\x00\x00\x00", {2: hello_expr}
)
asm, output = run_asm_pprinter_with_outputput(ir)
self.assertNotIn(self.COMPAT_WARNING_MESSAGE, output)
self.assertContains(
asm_lines(asm),
("mov RAX,QWORD PTR [hello]", "mov QWORD PTR [hello],RAX",),
)
def test_moffset_mov_x64_compat(self):
ir, m = create_test_module(
gtirb.Module.FileFormat.PE, gtirb.Module.ISA.X64
)
s, bi = add_text_section(m, 0x1000)
hello_expr = gtirb.SymAddrConst(0, add_symbol(m, "hello"))
# mov rax, qword ptr [hello]
add_code_block(
bi, b"\x48\xA1\x00\x00\x00\x00\x00\x00\x00\x00", {0: hello_expr}
)
# mov qword ptr [hello], rax
add_code_block(
bi, b"\x48\xA3\x00\x00\x00\x00\x00\x00\x00\x00", {0: hello_expr}
)
asm, output = run_asm_pprinter_with_outputput(ir)
self.assertIn(self.COMPAT_WARNING_MESSAGE, output)
self.assertEqual(output.count(self.COMPAT_WARNING_MESSAGE), 1)
self.assertContains(
asm_lines(asm),
("mov RAX,QWORD PTR [hello]", "mov QWORD PTR [hello],RAX",),
)
def test_nonmoffset_mov(self):
ir, m = create_test_module(
gtirb.Module.FileFormat.PE, gtirb.Module.ISA.IA32
)
s, bi = add_text_section(m, 0x1000)
hello_expr = gtirb.SymAddrConst(0, add_symbol(m, "hello"))
add_code_block(
bi,
# mov edi, hello
b"\x8B\x3D\x00\x00\x00\x00",
# wrong symbolic offset
{0: hello_expr},
)
add_code_block(
bi,
# mov edi, hello
b"\x8B\x3D\x00\x00\x00\x00",
# correct symbolic offset
{2: hello_expr},
)
asm, output = run_asm_pprinter_with_outputput(ir)
self.assertNotIn(self.COMPAT_WARNING_MESSAGE, output)
self.assertContains(
asm_lines(asm),
("mov EDI,DWORD PTR [0]", "mov EDI,DWORD PTR [hello]"),
)
| 35.85119
| 76
| 0.576955
| 813
| 6,023
| 4.081181
| 0.124231
| 0.126582
| 0.141049
| 0.122966
| 0.85443
| 0.833936
| 0.833936
| 0.833635
| 0.831224
| 0.830621
| 0
| 0.068122
| 0.300515
| 6,023
| 167
| 77
| 36.065868
| 0.71944
| 0.081521
| 0
| 0.527132
| 0
| 0
| 0.212121
| 0.055162
| 0
| 0
| 0.005444
| 0
| 0.093023
| 1
| 0.03876
| false
| 0
| 0.023256
| 0
| 0.077519
| 0.062016
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7c8a7b4f4f858f37bab0c363cd54d9c2cb9b036b
| 39,381
|
py
|
Python
|
src/Main.py
|
krmnino/COVID19_DataTool
|
00f730941f2e644fbf8f4de8dcd8b1575e157e78
|
[
"MIT"
] | null | null | null |
src/Main.py
|
krmnino/COVID19_DataTool
|
00f730941f2e644fbf8f4de8dcd8b1575e157e78
|
[
"MIT"
] | null | null | null |
src/Main.py
|
krmnino/COVID19_DataTool
|
00f730941f2e644fbf8f4de8dcd8b1575e157e78
|
[
"MIT"
] | null | null | null |
from FileParser import parse_file
from Operations import difference
from Operations import print_diff_data
from Operations import print_cases
from Operations import print_deaths
from Operations import print_tests
from Operations import print_recovered
from Operations import print_hospitalized
from Operations import print_gf_data
from Operations import plot_graph
from Operations import projection
from Operations import compute_data
from Operations import list_to_csv
from Operations import plot_graph_all
from Operations import print_new_data
from Operations import update_country_data
from FetchUpdateData import fetch_data_usa
from FetchUpdateData import diff_raw_USA_data
from FetchUpdateData import fetch_data_peru
from FetchUpdateData import diff_raw_PER_data
import os
import platform
import sys
import numpy as np
from datetime import date
import csv
'''
Structure of parsed_data list after computation
index contents
0 Dates
1 Cases
2 Deaths
3 Tests
4 Recovered
5 Hospitalized
6 Days
7 New Cases
8 % Cases
9 New Deaths
10 % Deaths
11 New Recovered
12 % Recovered
13 New Hospitalized
14 % Hospitalized
15 New Tests
16 % Tests
17 Mortality Rate
18 Active Cases
'''
def command_line():
os.system('mode con cols=150')
np.set_printoptions(suppress=True)
iso_code_countries = {'USA':'USA_data.csv', 'PER':'PER_data.csv'}
header_fields = ['Date', 'Day', 'Cases', 'New Cases', '%\u0394 Cases', 'Deaths', 'New Deaths', '%\u0394 Deaths', 'Recov.', 'New Recov.', '%\u0394 Recov.',
'Hospit.', 'New Hospit.', '%\u0394 Hospit.', 'Tests', 'New Tests', '%\u0394 Tests', 'Mort. %', 'Active']
instructions = ['load', 'show_diff', 'show_gf', 'show_cases', 'show_deaths', 'show_tests', 'show_recovered', 'show_hospitalized', 'delete', 'diff', 'projection',
'plot_cases', 'plot_cases_trend', 'plot_cases_log', 'plot_cases_gf', 'plot_deaths', 'plot_deaths_log', 'plot_deaths_gf', 'plot_tests', 'plot_tests_log', 'plot_tests_gf',
'plot_recovered', 'plot_recovered_log', 'plot_recovered_gf', 'plot_hospitalized', 'plot_hospitalized_log', 'plot_hospitalized_gf', 'plot_all',
'update', 'fetch', 'save_plots', 'export_csv', 'clear', 'exit', 'help']
new_data = []
parsed_data = 0
file_name = ''
while(True):
input_cmd = input('>> ')
parsed_input = input_cmd.split()
if(len(parsed_input) == 0):
continue
if(parsed_input[0] not in instructions):
print('Invalid command. Type "help" for instructions.')
continue
if(parsed_input[0] == 'exit'):
if(len(parsed_input) != 1):
print('Usage: exit')
continue
else:
print('Exiting...')
break
if(parsed_input[0] == 'help'):
if(len(parsed_input) != 1):
print('Usage: help')
else:
print('usage manual:')
print('fetch [ISO_CODE] Update raw data from external repo of selected country')
print('update [ISO_CODE] Update parsed data of selected country')
print('load [ISO_CODE] Load data set in memory')
print('show_diff Display difference in cases, active, deaths, recovered, hospitalized, and tests')
print('show_gf Display growth factor in cases, active, deaths, recovered, hospitalized, and tests')
print('show_cases Display data related to number of cases')
print('show_deaths Display data related to number of deaths')
print('show_tests Display data related to number of tests')
print('show_recovered Display data related to number of recoveries')
print('show_hospitalized Display data related to number of hospitalizations')
print('delete Erase data set loaded in memory')
print('diff Shows difference of values between 2 days')
print('projection [next_days] [avg_previous_days] Show projection for the next x days using avg growth factor from y previous days')
print('plot_cases [from_day] [to_day] Display cases graph')
print('plot_cases_trend [from_day] [to_day] Display cases graph with trendline')
print('plot_cases_log Display cases in a logarithmic graph')
print('plot_cases_gf [from_day] [to_day] Display cases growth factor graph')
print('plot_deaths [from_day] [to_day] Display deaths graph')
print('plot_deaths_log Display deaths in a logarithmic graph')
print('plot_deaths_gf [from_day] [to_day] Display deaths growth factor graph')
print('plot_tests [from_day] [to_day] Display tests graph')
print('plot_tests_log Display tests in a logarithmic graph')
print('plot_tests_gf Display tests growth factor graph')
print('plot_recovered [from_day] [to_day] Display recovered graph')
print('plot_recovered_log Display recovered in a logarithmic graph')
print('plot_recovered_gf Display recovered growth factor graph')
print('plot_hospitalized [from_day] [to_day] Display hospitalized graph')
print('plot_hospitalized_log Display hospitalized in a logarithmic graph')
print('plot_hospitalized_gf Display hospitalized growth factor graph')
print('plot_all [from_day] [to_day] Display confirmed/active cases, deaths and recovered graphs')
print('save_plots [from_day] [to_day] Save all graphs in PNG format')
print('clear Clears the console')
print('export_csv Export .csv file with computed data')
print('help Display program manual')
print('exit Exit the program')
continue
if(parsed_input[0] == 'clear'):
if(len(parsed_input) != 1):
print('Usage: clear')
elif(platform.system() == "Windows"):
os.system("cls")
elif(platform.system() == "Linux"):
os.system("clear")
continue
if(parsed_input[0] == 'load'):
if(len(parsed_input) != 2):
print("Usage: load [FILE PATH]")
else:
try:
open(os.path.dirname(os.path.abspath(__file__)) + '/../data/' + iso_code_countries[str(parsed_input[1])])
except:
print(iso_code_countries[str(parsed_input[1])], "is not accesible")
continue
file_name = str(parsed_input[1])
input_data = open(os.path.dirname(os.path.abspath(__file__)) + '/../data/' + iso_code_countries[str(parsed_input[1])])
parsed_data = parse_file(input_data)
input_data.close()
if(parsed_data != 0):
parsed_data = compute_data(parsed_data)
print("Loaded and computed data from", parsed_input[1])
continue
if(parsed_input[0] == 'delete'):
parsed_data = 0
file_name = ''
continue
if(parsed_input[0] == 'fetch'):
if(len(parsed_input) != 2):
print('Usage: fetch [ISO_CODE]')
elif(not parsed_input[1] in iso_code_countries):
print('Invalid country ISO code')
else:
if(parsed_input[1] == 'PER'):
fetch_data_peru()
print('PER raw data was updated.')
elif(parsed_input[1] == 'USA'):
fetch_data_usa()
print('USA raw data was updated.')
continue
if(parsed_input[0] == 'update'):
if(len(parsed_input) != 2):
print('Usage: update [ISO_CODE]')
elif(not parsed_input[1] in iso_code_countries):
print('Invalid country ISO code')
else:
if(parsed_input[1] == 'USA'):
new_data = diff_raw_USA_data()
elif(parsed_input[1] == 'PER'):
new_data = diff_raw_PER_data()
if(len(new_data) == 0):
print(iso_code_countries[parsed_input[1]], 'is up to date.')
else:
print_new_data(new_data)
save_new = input('Save new data up to what index? (Type integer or N/n to abort): ')
if(save_new.isdigit() and int(save_new) >= 0 and int(save_new) < len(new_data)):
if(update_country_data(iso_code_countries[parsed_input[1]], int(save_new), new_data) == True):
print('Updated data in', iso_code_countries[parsed_input[1]])
elif(save_new == 'N' or save_new == 'n'):
new_data = []
print('Data discarted')
else:
print('Invalid input. Make sure the input is an integer between 0 and', len(new_data) - 1)
continue
if(parsed_data == 0):
print('Data has not been loaded into memory')
continue
if(parsed_input[0] == 'show_diff'):
print_diff_data(header_fields, parsed_data)
continue
if(parsed_input[0] == 'show_cases'):
print_cases(header_fields, parsed_data)
continue
if(parsed_input[0] == 'show_deaths'):
print_deaths(header_fields, parsed_data)
continue
if(parsed_input[0] == 'show_tests'):
print_tests(header_fields, parsed_data)
continue
if(parsed_input[0] == 'show_recovered'):
print_recovered(header_fields, parsed_data)
continue
if(parsed_input[0] == 'show_hospitalized'):
print_hospitalized(header_fields, parsed_data)
continue
if(parsed_input[0] == 'show_gf'):
print_gf_data(header_fields, parsed_data)
continue
if(parsed_input[0] == 'export_csv'):
list_to_csv(parsed_data)
continue
if(parsed_input[0] == 'diff'):
if(len(parsed_input) != 3):
print("usage: diff [from_day] [to_day]")
elif(not parsed_input[1].isdigit() or not parsed_input[2].isdigit()):
print("Days must be integers. Ranging 0 -", parsed_data[0][len(parsed_data[0]) - 1])
elif(int(parsed_input[1]) > parsed_data[6][len(parsed_data[0]) - 1] or \
int(parsed_input[2]) > parsed_data[6][len(parsed_data[0]) - 1]):
print("Range of days is invalid, days must fall between the range: 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[1]) > int(parsed_input[2])):
print("Range of days is invalid, starting day must be less than ending day")
else:
difference(parsed_data, int(parsed_input[1]), int(parsed_input[2]))
continue
if(parsed_input[0] == 'projection'):
if(len(parsed_input) != 3):
print("Usage: projection [next_days] [avg_previous_days]")
elif(not parsed_input[1].isdigit() or not parsed_input[2]):
print("[next_days] and [avg_previous_days] values must be integers.")
else:
projection(int(parsed_input[1]), int(parsed_input[2]), parsed_data)
continue
if(parsed_input[0] == 'plot_cases'):
if(len(parsed_input) != 3 and len(parsed_input) != 1):
print("Usage: plot_cases")
print(" plot_cases [from day] [to_day]")
elif(len(parsed_input) == 1):
plot_graph(parsed_data[6], parsed_data[1], 'b', "Days", "Cases", file_name + ": Cases as of " + parsed_data[0][len(parsed_data[0])-1])
elif(not parsed_input[1].isdigit() or not parsed_input[2].isdigit()):
print("Days must be integers. Ranging 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[2]) > parsed_data[6][len(parsed_data[1]) - 1]):
print("range of days is invalid, days must fall between the range: 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[1]) > int(parsed_input[2])):
print("Range of days is invalid, starting day must be less than ending day")
else:
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[1][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'b', "Days", "Cases", file_name + ": Cases from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])])
continue
if(parsed_input[0] == 'plot_cases_trend'):
if(len(parsed_input) != 3 and len(parsed_input) != 1):
print("Usage: plot_cases_trend")
print(" plot_cases_trend [from day] [to_day]")
elif(len(parsed_input) == 1):
plot_graph(parsed_data[6], parsed_data[1], 'b', "Days", "Cases", file_name + ": Cases as of " + parsed_data[0][len(parsed_data[0])-1], trend=True)
elif(not parsed_input[1].isdigit() or not parsed_input[2].isdigit()):
print("Days must be integers. Ranging 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[2]) > parsed_data[6][len(parsed_data[1]) - 1]):
print("range of days is invalid, days must fall between the range: 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[1]) > int(parsed_input[2])):
print("Range of days is invalid, starting day must be less than ending day")
else:
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[1][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'b', "Days", "Cases", file_name + ": Cases from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])], trend=True)
continue
if(parsed_input[0] == 'plot_cases_log'):
if(len(parsed_input) != 1):
print("Usage: plot_cases_log")
else:
plot_graph(parsed_data[6], parsed_data[1], 'b', "Days", "Cases", file_name + ": Cases as of " + parsed_data[0][len(parsed_data[0])-1], log_view=True)
continue
if(parsed_input[0] == 'plot_cases_gf'):
if(len(parsed_input) != 3 and len(parsed_input) != 1):
print("Usage: plot_cases_gf")
print(" plot_cases_gf [from day] [to_day]")
elif(len(parsed_input) == 1):
plot_graph(parsed_data[6], parsed_data[8], 'b', "Days", "Cases Growth Rate (%)", file_name + ": Cases Growth Rate (%) as of " + parsed_data[0][len(parsed_data[0])-1])
elif(not parsed_input[1].isdigit() or not parsed_input[2].isdigit()):
print("Days must be integers. Ranging 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[2]) > parsed_data[6][len(parsed_data[1]) - 1]):
print("range of days is invalid, days must fall between the range: 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[1]) > int(parsed_input[2])):
print("Range of days is invalid, starting day must be less than ending day")
else:
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[8][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'b', "Days", "Cases Growth Rate (%)", file_name + ": Cases Growth Rate (%) from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])])
continue
if(parsed_input[0] == 'plot_deaths'):
if(len(parsed_input) != 3 and len(parsed_input) != 1):
print("Usage: plot_deaths")
print(" plot_deaths [from day] [to_day]")
elif(len(parsed_input) == 1):
plot_graph(parsed_data[6], parsed_data[2], 'r', "Days", "Deaths", file_name + ": Deaths as of " + parsed_data[0][len(parsed_data[0])-1])
elif(not parsed_input[1].isdigit() or not parsed_input[2].isdigit()):
print("Days must be integers. Ranging 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[2]) > parsed_data[6][len(parsed_data[1]) - 1]):
print("range of days is invalid, days must fall between the range: 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[1]) > int(parsed_input[2])):
print("Range of days is invalid, starting day must be less than ending day")
else:
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[2][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'r', "Days", "Deaths", file_name + ": Deaths from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])])
continue
if(parsed_input[0] == 'plot_deaths_log'):
if(len(parsed_input) != 1):
print("Usage: plot_deaths_log")
else:
plot_graph(parsed_data[6], parsed_data[2], 'r', "Days", "Deaths", file_name + ": Deaths as of " + parsed_data[0][len(parsed_data[0])-1], log_view=True)
continue
if(parsed_input[0] == 'plot_deaths_gf'):
if(len(parsed_input) != 3 and len(parsed_input) != 1):
print("Usage: plot_deaths_gf")
print(" plot_deaths_gf [from day] [to_day]")
elif(len(parsed_input) == 1):
plot_graph(parsed_data[6], parsed_data[10], 'r', "Days", "Deaths Growth Rate (%)", file_name + ": Deaths Growth Rate (%) as of " + parsed_data[0][len(parsed_data[0])-1])
elif(not parsed_input[1].isdigit() or not parsed_input[2].isdigit()):
print("Days must be integers. Ranging 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[2]) > parsed_data[6][len(parsed_data[1]) - 1]):
print("range of days is invalid, days must fall between the range: 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[1]) > int(parsed_input[2])):
print("Range of days is invalid, starting day must be less than ending day")
else:
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[10][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'r', "Days", "Deaths Growth Rate (%)", file_name + ": Deaths Growth Rate (%) from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])])
continue
if(parsed_input[0] == 'plot_tests'):
if(len(parsed_input) != 3 and len(parsed_input) != 1):
print("Usage: plot_tests")
print(" plot_tests [from day] [to_day]")
elif(len(parsed_input) == 1):
plot_graph(parsed_data[6], parsed_data[3], 'c', "Days", "Tests", file_name + ": Tests as of " + parsed_data[0][len(parsed_data[0])-1])
elif(not parsed_input[1].isdigit() or not parsed_input[2].isdigit()):
print("Days must be integers. Ranging 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[2]) > parsed_data[6][len(parsed_data[1]) - 1]):
print("range of days is invalid, days must fall between the range: 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[1]) > int(parsed_input[2])):
print("Range of days is invalid, starting day must be less than ending day")
else:
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[3][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'c', "Days", "Tests", file_name + ": Tests from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])])
continue
if(parsed_input[0] == 'plot_tests_log'):
if(len(parsed_input) != 1):
print("Usage: plot_tests_log")
else:
plot_graph(parsed_data[6], parsed_data[3], 'c', "Days", "Tests", file_name + ": Tests as of " + parsed_data[0][len(parsed_data[0])-1], log_view=True)
continue
if(parsed_input[0] == 'plot_tests_gf'):
if(len(parsed_input) != 3 and len(parsed_input) != 1):
print("Usage: plot_tests_gf")
print(" plot_tests_gf [from day] [to_day]")
elif(len(parsed_input) == 1):
plot_graph(parsed_data[6], parsed_data[16], 'c', "Days", "Tests Growth Rate (%)", file_name + ": Tests Growth Rate (%) as of " + parsed_data[0][len(parsed_data[0])-1])
elif(not parsed_input[1].isdigit() or not parsed_input[2].isdigit()):
print("Days must be integers. Ranging 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[2]) > parsed_data[6][len(parsed_data[1]) - 1]):
print("range of days is invalid, days must fall between the range: 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[1]) > int(parsed_input[2])):
print("Range of days is invalid, starting day must be less than ending day")
else:
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[16][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'c', "Days", "Tests Growth Rate (%)", file_name + ": Tests Growth Rate (%) from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])])
continue
if(parsed_input[0] == 'plot_recovered'):
if(len(parsed_input) != 3 and len(parsed_input) != 1):
print("Usage: plot_recovered")
print(" plot_recovered [from day] [to_day]")
elif(len(parsed_input) == 1):
plot_graph(parsed_data[6], parsed_data[4], 'g', "Days", "Recovered", file_name + ": Recovered as of " + parsed_data[0][len(parsed_data[0])-1])
elif(not parsed_input[1].isdigit() or not parsed_input[2].isdigit()):
print("Days must be integers. Ranging 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[2]) > parsed_data[6][len(parsed_data[1]) - 1]):
print("range of days is invalid, days must fall between the range: 0 -", parsed_data[4][len(parsed_data[0]) - 1])
elif(int(parsed_input[1]) > int(parsed_input[2])):
print("Range of days is invalid, starting day must be less than ending day")
else:
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[4][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'g', "Days", "Recovered", file_name + ": Recovered from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])])
continue
if(parsed_input[0] == 'plot_recovered_log'):
if(len(parsed_input) != 1):
print("Usage: plot_recovered_log")
else:
plot_graph(parsed_data[6], parsed_data[4], 'g', "Days", "Recovered", file_name + ": Recovered as of " + parsed_data[0][len(parsed_data[0])-1], log_view=True)
continue
if(parsed_input[0] == 'plot_recovered_gf'):
if(len(parsed_input) != 3 and len(parsed_input) != 1):
print("Usage: plot_recovered_gf")
print(" plot_recovered_gf [from day] [to_day]")
elif(len(parsed_input) == 1):
plot_graph(parsed_data[6], parsed_data[12], 'g', "Days", "Recovered Growth Rate (%)", file_name + ": Recovered Growth Rate (%) as of " + parsed_data[0][len(parsed_data[0])-1])
elif(not parsed_input[1].isdigit() or not parsed_input[2].isdigit()):
print("Days must be integers. Ranging 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[2]) > parsed_data[6][len(parsed_data[1]) - 1]):
print("range of days is invalid, days must fall between the range: 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[1]) > int(parsed_input[2])):
print("Range of days is invalid, starting day must be less than ending day")
else:
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[12][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'g', "Days", "Recovered Growth Rate (%)", file_name + ": Recovered Growth Rate (%) from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])])
continue
if(parsed_input[0] == 'plot_hospitalized'):
if(len(parsed_input) != 3 and len(parsed_input) != 1):
print("Usage: plot_hospitalized")
print(" plot_hospitalized [from day] [to_day]")
elif(len(parsed_input) == 1):
plot_graph(parsed_data[6], parsed_data[5], 'm', "Days", "Hospitalized", file_name + ": Hospitalized as of " + parsed_data[0][len(parsed_data[0])-1])
elif(not parsed_input[1].isdigit() or not parsed_input[2].isdigit()):
print("Days must be integers. Ranging 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[2]) > parsed_data[6][len(parsed_data[1]) - 1]):
print("range of days is invalid, days must fall between the range: 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[1]) > int(parsed_input[2])):
print("Range of days is invalid, starting day must be less than ending day")
else:
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[5][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'm', "Days", "Hospitalized", file_name + ": Hospitalized from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])])
continue
if(parsed_input[0] == 'plot_hospitalized_log'):
if(len(parsed_input) != 1):
print("Usage: plot_hospitalized_log")
else:
plot_graph(parsed_data[6], parsed_data[3], 'm', "Days", "Hospitalized", file_name + ": Hospitalized as of " + parsed_data[0][len(parsed_data[0])-1], log_view=True)
continue
if(parsed_input[0] == 'plot_hospitalized_gf'):
if(len(parsed_input) != 3 and len(parsed_input) != 1):
print("Usage: plot_hospitalized_gf")
print(" plot_hospitalized_gf [from day] [to_day]")
elif(len(parsed_input) == 1):
plot_graph(parsed_data[6], parsed_data[14], 'm', "Days", "Hospitalized Growth Rate (%)", file_name + ": Hospitalized Growth Rate (%) as of " + parsed_data[0][len(parsed_data[0])-1])
elif(not parsed_input[1].isdigit() or not parsed_input[2].isdigit()):
print("Days must be integers. Ranging 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[2]) > parsed_data[6][len(parsed_data[1]) - 1]):
print("range of days is invalid, days must fall between the range: 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[1]) > int(parsed_input[2])):
print("Range of days is invalid, starting day must be less than ending day")
else:
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[14][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'm', "Days", "Hospitalized Growth Rate (%)", file_name + ": Hospitalized Growth Rate (%) from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])])
continue
if(parsed_input[0] == 'plot_all'):
if(len(parsed_input) != 3 and len(parsed_input) != 1):
print("Usage: plot_all")
print(" plot_all [from day] [to_day]")
elif(len(parsed_input) == 1):
plot_graph_all(parsed_data, file_name + ": Cases, Deaths, Recovered, Active Cases as of " + parsed_data[0][len(parsed_data[0])-1], 0, len(parsed_data[0])-1)
elif(not parsed_input[1].isdigit() or not parsed_input[2].isdigit()):
print("Days must be integers. Ranging 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[2]) > parsed_data[6][len(parsed_data[1]) - 1]):
print("range of days is invalid, days must fall between the range: 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[1]) > int(parsed_input[2])):
print("Range of days is invalid, starting day must be less than ending day")
else:
plot_graph_all(parsed_data, file_name + ": Cases, Deaths, Recovered, Active Cases from " + parsed_data[0][int(parsed_input[1])] + " to " \
+ parsed_data[0][int(parsed_input[2])], int(parsed_input[1]), int(parsed_input[2]))
continue
if(parsed_input[0] == 'save_plots'):
if(len(parsed_input) != 3 and len(parsed_input) != 1):
print("Usage: save_plots")
print(" save_plots [from day] [to_day]")
elif(len(parsed_input) == 1):
plot_graph(parsed_data[6], parsed_data[1], 'b', "Days", "Cases", file_name + ": Cases as of " + parsed_data[0][len(parsed_data[0])-1], \
file_name='cases.png', save=True)
plot_graph(parsed_data[6], parsed_data[8], 'b', "Days", "Cases Growth Rate (%)", file_name + ": Cases Growth Rate (%) as of " + parsed_data[0][len(parsed_data[0])-1], \
file_name='cases_gf.png', save=True)
plot_graph(parsed_data[6], parsed_data[2], 'r', "Days", "Deaths", file_name + ": Deaths as of " + parsed_data[0][len(parsed_data[0])-1], \
file_name='deaths.png', save=True)
plot_graph(parsed_data[6], parsed_data[10], 'r', "Days", "Deaths Growth Rate (%)", file_name + ": Deaths Growth Rate (%) as of " + parsed_data[0][len(parsed_data[0])-1], \
file_name='deaths_gf.png', save=True)
plot_graph(parsed_data[6], parsed_data[3], 'c', "Days", "Tests", file_name + ": Tests as of " + parsed_data[0][len(parsed_data[0])-1], \
file_name='tests.png', save=True)
plot_graph(parsed_data[6], parsed_data[16], 'c', "Days", "Tests Growth Rate (%)", file_name + ": Tests Growth Rate (%) as of " + parsed_data[0][len(parsed_data[0])-1], \
file_name='tests_gf.png', save=True)
plot_graph(parsed_data[6], parsed_data[4], 'g', "Days", "Recovered", file_name + ": Recovered as of " + parsed_data[0][len(parsed_data[0])-1], \
file_name='recovered.png', save=True)
plot_graph(parsed_data[6], parsed_data[12], 'g', "Days", "Recovered Growth Rate (%)", file_name + ": Recovered Growth Rate (%) as of " + parsed_data[0][len(parsed_data[0])-1], \
file_name='recovered_gf.png', save=True)
plot_graph(parsed_data[6], parsed_data[5], 'm', "Days", "Hospitalized", file_name + ": Hospitalized as of " + parsed_data[0][len(parsed_data[0])-1], \
file_name='hospitalized.png', save=True)
plot_graph(parsed_data[6], parsed_data[14], 'm', "Days", "Hospitalized Growth Rate (%)", file_name + ": Hospitalized Growth Rate (%) as of " + parsed_data[0][len(parsed_data[0])-1], \
file_name='hospitalized_gf.png', save=True)
print('Successfully generated graphs.')
elif(not parsed_input[1].isdigit() or not parsed_input[2].isdigit()):
print("Days must be integers. Ranging 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[2]) > parsed_data[6][len(parsed_data[1]) - 1]):
print("range of days is invalid, days must fall between the range: 0 -", parsed_data[6][len(parsed_data[0]) - 1])
elif(int(parsed_input[1]) > int(parsed_input[2])):
print("Range of days is invalid, starting day must be less than ending day")
else:
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[1][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'b', "Days", "Cases", file_name + ": Cases from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])], \
file_name='cases_from_' + parsed_data[0][int(parsed_input[1])] + '_to_' + parsed_data[0][int(parsed_input[2])] + '.png', save=True)
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[8][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'b', "Days", "Cases Growth Rate (%)", file_name + ": Cases Growth Rate (%) from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])], \
file_name='cases_gf_from_' + parsed_data[0][int(parsed_input[1])] + '_to_' + parsed_data[0][int(parsed_input[2])] + '.png', save=True)
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[2][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'r', "Days", "Deaths", file_name + ": Deaths from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])], \
file_name='deaths_from_' + parsed_data[0][int(parsed_input[1])] + '_to_' + parsed_data[0][int(parsed_input[2])] + '.png', save=True)
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[10][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'r', "Days", "Deaths Growth Rate (%)", file_name + ": Deaths Growth Rate (%) from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])], \
file_name='deaths_gf_from_' + parsed_data[0][int(parsed_input[1])] + '_to_' + parsed_data[0][int(parsed_input[2])] + '.png', save=True)
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[3][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'c', "Days", "Tests", file_name + ": Tests from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])], \
file_name='tests_from_' + parsed_data[0][int(parsed_input[1])] + '_to_' + parsed_data[0][int(parsed_input[2])] + '.png', save=True)
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[16][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'c', "Days", "Tests Growth Rate (%)", file_name + ": Tests Growth Rate (%) from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])], \
file_name='tests_gf_from_' + parsed_data[0][int(parsed_input[1])] + '_to_' + parsed_data[0][int(parsed_input[2])] + '.png', save=True)
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[4][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'g', "Days", "Recovered", file_name + ": Recovered from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])], \
file_name='recovered_from_' + parsed_data[0][int(parsed_input[1])] + '_to_' + parsed_data[0][int(parsed_input[2])] + '.png', save=True)
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[12][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'g', "Days", "Recovered Growth Rate (%)", file_name + ": Recovered Growth Rate (%) from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])], \
file_name='recovered_gf_from_' + parsed_data[0][int(parsed_input[1])] + '_to_' + parsed_data[0][int(parsed_input[2])] + '.png', save=True)
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[5][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'm', "Days", "Hospitalized", file_name + ": Hospitalized from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])], \
file_name='hospitalized_from_' + parsed_data[0][int(parsed_input[1])] + '_to_' + parsed_data[0][int(parsed_input[2])] + '.png', save=True)
plot_graph(parsed_data[6][int(parsed_input[1]):int(parsed_input[2]) + 1], parsed_data[14][int(parsed_input[1]):int(parsed_input[2]) + 1], \
'm', "Days", "Hospitalized Growth Rate (%)", file_name + ": Hospitalized Growth Rate (%) from day " + parsed_data[0][int(parsed_input[1])] + " to " + parsed_data[0][int(parsed_input[2])], \
file_name='hospitalized_gf_from_' + parsed_data[0][int(parsed_input[1])] + '_to_' + parsed_data[0][int(parsed_input[2])] + '.png', save=True)
print('Successfully generated graphs.')
continue
print('Invalid input. For instructions type "help".')
#################################################################################################################################
command_line()
| 69.577739
| 208
| 0.56606
| 5,179
| 39,381
| 4.081676
| 0.040355
| 0.172241
| 0.13047
| 0.074507
| 0.817446
| 0.796206
| 0.770093
| 0.733857
| 0.721084
| 0.712002
| 0
| 0.030083
| 0.285061
| 39,381
| 565
| 209
| 69.700885
| 0.720725
| 0
| 0
| 0.437751
| 0
| 0.002008
| 0.274059
| 0.002702
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002008
| false
| 0
| 0.052209
| 0
| 0.054217
| 0.303213
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7c9a9565b343e682c2356019e7f903be9995a34e
| 1,992
|
py
|
Python
|
tests/test_project.py
|
WinVector/data_algebra
|
3d6002ddf8231d310e03537a0435df0554b62234
|
[
"BSD-3-Clause"
] | 37
|
2019-08-28T08:16:48.000Z
|
2022-03-14T21:18:39.000Z
|
tests/test_project.py
|
WinVector/data_algebra
|
3d6002ddf8231d310e03537a0435df0554b62234
|
[
"BSD-3-Clause"
] | 1
|
2019-09-02T23:13:29.000Z
|
2019-09-08T01:43:10.000Z
|
tests/test_project.py
|
WinVector/data_algebra
|
3d6002ddf8231d310e03537a0435df0554b62234
|
[
"BSD-3-Clause"
] | 3
|
2019-08-28T12:23:11.000Z
|
2020-02-08T19:22:31.000Z
|
import data_algebra
import data_algebra.test_util
import data_algebra.util
from data_algebra.data_ops import *
from data_algebra.test_util import formats_to_self
import pytest
def test_project0():
d = data_algebra.default_data_model.pd.DataFrame(
{"c": [1, 1, 1, 1], "g": ["a", "b", "a", "b"], "y": [1, 2, 3, 4]}
)
ops = describe_table(d, "d").project(group_by=["c", "g"])
expect = data_algebra.default_data_model.pd.DataFrame(
{"c": [1, 1], "g": ["a", "b"]}
)
data_algebra.test_util.check_transform(ops=ops, data=d, expect=expect)
def test_project_z():
d = data_algebra.default_data_model.pd.DataFrame(
{"c": [1, 1, 1, 1], "g": ["a", "b", "a", "b"], "y": [1, 2, 3, 4]}
)
ops = describe_table(d, "d").project({"c": "c.max()"})
expect = data_algebra.default_data_model.pd.DataFrame({"c": [1]})
data_algebra.test_util.check_transform(
ops=ops, data=d, expect=expect, empty_produces_empty=False
)
def test_project_zz():
d = data_algebra.default_data_model.pd.DataFrame(
{"c": [1, 1, 1, 1], "g": ["a", "b", "a", "b"], "y": [1, 2, 3, 4]}
)
with pytest.raises(ValueError):
describe_table(d, "d").project()
def test_project():
d = data_algebra.default_data_model.pd.DataFrame(
{"c": [1, 1, 1, 1], "g": ["a", "b", "a", "b"], "y": [1, 2, 3, 4]}
)
ops = describe_table(d, "d").project(
{"ymax": "y.max()", "ymin": "y.min()"}, group_by=["c", "g"]
)
expect = data_algebra.default_data_model.pd.DataFrame(
{"c": [1, 1], "g": ["a", "b"], "ymax": [3, 4], "ymin": [1, 2]}
)
data_algebra.test_util.check_transform(ops=ops, data=d, expect=expect)
def test_project_catch_nonagg():
d = data_algebra.default_data_model.pd.DataFrame(
{"c": [1, 1, 1, 1], "g": ["a", "b", "a", "b"], "y": [1, 2, 3, 4]}
)
with pytest.raises(ValueError):
ops = describe_table(d, "d").project({"y": "y"}, group_by=["c", "g"])
| 28.457143
| 77
| 0.569779
| 307
| 1,992
| 3.495114
| 0.159609
| 0.031687
| 0.027959
| 0.164026
| 0.795899
| 0.728798
| 0.705499
| 0.705499
| 0.705499
| 0.705499
| 0
| 0.031546
| 0.204317
| 1,992
| 69
| 78
| 28.869565
| 0.645426
| 0
| 0
| 0.340426
| 0
| 0
| 0.047691
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.106383
| false
| 0
| 0.12766
| 0
| 0.234043
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6b4497388036481737727f81d54fcb1cb50a07c3
| 109
|
py
|
Python
|
tests/micropython/viper_const_intbig.py
|
sebi5361/micropython
|
6c054cd124bc6229bee127128264dc0829dea53c
|
[
"MIT"
] | 198
|
2017-03-24T23:23:54.000Z
|
2022-01-07T07:14:00.000Z
|
tests/micropython/viper_const_intbig.py
|
sebi5361/micropython
|
6c054cd124bc6229bee127128264dc0829dea53c
|
[
"MIT"
] | 509
|
2017-03-28T19:37:18.000Z
|
2022-03-31T20:31:43.000Z
|
tests/micropython/viper_const_intbig.py
|
sebi5361/micropython
|
6c054cd124bc6229bee127128264dc0829dea53c
|
[
"MIT"
] | 187
|
2017-03-24T23:23:58.000Z
|
2022-02-25T01:48:45.000Z
|
# check loading constants
@micropython.viper
def f():
return 123456789012345678901234567890
print(f())
| 13.625
| 41
| 0.761468
| 11
| 109
| 7.545455
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.322581
| 0.146789
| 109
| 7
| 42
| 15.571429
| 0.569892
| 0.211009
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0
| 0.25
| 0.5
| 0.25
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
86337dd4518adddb67013605bd0977428d0e4a87
| 33
|
py
|
Python
|
samples/bulk_write/__init__.py
|
zoho/zohocrm-python-sdk-2.0
|
3a93eb3b57fed4e08f26bd5b311e101cb2995411
|
[
"Apache-2.0"
] | null | null | null |
samples/bulk_write/__init__.py
|
zoho/zohocrm-python-sdk-2.0
|
3a93eb3b57fed4e08f26bd5b311e101cb2995411
|
[
"Apache-2.0"
] | null | null | null |
samples/bulk_write/__init__.py
|
zoho/zohocrm-python-sdk-2.0
|
3a93eb3b57fed4e08f26bd5b311e101cb2995411
|
[
"Apache-2.0"
] | null | null | null |
from .bulk_write import BulkWrite
| 33
| 33
| 0.878788
| 5
| 33
| 5.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 33
| 1
| 33
| 33
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
86428789e1572f34e192f78d9a8cc1179ea1912d
| 18,914
|
py
|
Python
|
unit/test_get_control_instance.py
|
FizikRoot/ansible-cartridge
|
ad06411ec701b68fbf5b8ed5e184a47ffb0ac70f
|
[
"BSD-2-Clause"
] | null | null | null |
unit/test_get_control_instance.py
|
FizikRoot/ansible-cartridge
|
ad06411ec701b68fbf5b8ed5e184a47ffb0ac70f
|
[
"BSD-2-Clause"
] | null | null | null |
unit/test_get_control_instance.py
|
FizikRoot/ansible-cartridge
|
ad06411ec701b68fbf5b8ed5e184a47ffb0ac70f
|
[
"BSD-2-Clause"
] | null | null | null |
import sys
import unittest
import module_utils.helpers as helpers
import unit.utils as utils
from unit.instance import Instance
sys.modules['ansible.module_utils.helpers'] = helpers
import library.cartridge_get_control_instance as get_control_instance_lib
from library.cartridge_get_control_instance import get_control_instance
twophase_commit_versions = {}
def get_twophase_commit_versions_mock(_, advertise_uris):
versions = [
twophase_commit_versions.get(uri, 1) for uri in advertise_uris
]
return versions, None
get_control_instance_lib.get_twophase_commit_versions = get_twophase_commit_versions_mock
def call_get_control_instance(app_name, console_sock, module_hostvars=None, play_hosts=None):
if module_hostvars is None:
module_hostvars = {}
if play_hosts is None:
play_hosts = module_hostvars.keys()
return get_control_instance({
'module_hostvars': module_hostvars,
'play_hosts': play_hosts,
'console_sock': console_sock,
'app_name': app_name,
})
def get_instance_hostvars(alias, replicaset_alias=None, run_dir=None, expelled=False, http_port=None):
return {
alias: {
'config': {
'advertise_uri': '%s-uri' % alias,
'http_port': http_port,
},
'replicaset_alias': replicaset_alias,
'cartridge_run_dir': run_dir,
'expelled': expelled,
}
}
class TestGetControlInstance(unittest.TestCase):
def setUp(self):
self.instance = Instance()
self.console_sock = self.instance.console_sock
self.cookie = self.instance.cluster_cookie
self.instance.start()
def test_bad_members(self):
hostvars = get_instance_hostvars('instance-1')
hostvars.update(get_instance_hostvars('empty-member'))
hostvars.update(get_instance_hostvars('empty-payload'))
# empty membership
self.instance.set_membership_members([])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertTrue(res.failed)
self.assertIn("No members in membership", res.msg)
# empty member
helpers.WARNINGS = []
self.instance.set_membership_members([
utils.get_member('empty-member', empty=True),
utils.get_member('instance-1'),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertFalse(res.failed)
self.assertIn("Incorrect members with the following URIs ignored: empty-member-uri", helpers.WARNINGS)
# with empty payload
helpers.WARNINGS = []
self.instance.set_membership_members([
utils.get_member('empty-payload', empty_payload=True),
utils.get_member('instance-1'),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertFalse(res.failed)
self.assertIn("Incorrect members with the following URIs ignored: empty-payload-uri", helpers.WARNINGS)
# without alias
helpers.WARNINGS = []
self.instance.set_membership_members([
utils.get_member('instance-1', with_alias=False),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertTrue(res.failed)
self.assertIn("There is no alive instances in the cluster", res.msg)
def test_one_instance_without_run_dir(self):
hostvars = get_instance_hostvars('instance-1', 'some-rpl')
# with UUID and alias
self.instance.set_membership_members([
utils.get_member('instance-1', with_uuid=True),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertFalse(res.failed, msg=res.msg)
self.assertEqual(res.fact, {
'name': 'instance-1',
'console_sock': '/var/run/tarantool/myapp.instance-1.control',
'http_port': None,
})
def test_one_instance(self):
hostvars = get_instance_hostvars('instance-1', run_dir='run-dir')
# with UUID and alias
self.instance.set_membership_members([
utils.get_member('instance-1', with_uuid=True),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertFalse(res.failed, msg=res.msg)
self.assertEqual(res.fact, {
'name': 'instance-1',
'console_sock': 'run-dir/myapp.instance-1.control',
'http_port': None,
})
# without UUID, instance is dead
self.instance.set_membership_members([
utils.get_member('instance-1', with_uuid=False, status='dead'),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertTrue(res.failed)
self.assertIn("There is no alive instances in the cluster", res.msg)
# without UUID
self.instance.set_membership_members([
utils.get_member('instance-1', with_uuid=False),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertFalse(res.failed, msg=res.msg)
self.assertEqual(res.fact, {
'name': 'instance-1',
'console_sock': 'run-dir/myapp.instance-1.control',
'http_port': None,
})
def test_two_instances(self):
hostvars = {}
hostvars.update(get_instance_hostvars('instance-1', run_dir='run-dir-1', http_port=8081))
hostvars.update(get_instance_hostvars('instance-2', run_dir='run-dir-2', http_port=8082))
# both with UUID and alias
# instance-1 is selected since it's URI is
# first lexicographically
self.instance.set_membership_members([
utils.get_member('instance-1', with_uuid=True),
utils.get_member('instance-2', with_uuid=True),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertFalse(res.failed, msg=res.msg)
self.assertEqual(res.fact, {
'name': 'instance-1',
'console_sock': 'run-dir-1/myapp.instance-1.control',
'http_port': 8081,
})
# one with UUID (it is selected)
self.instance.set_membership_members([
utils.get_member('instance-1', with_uuid=False),
utils.get_member('instance-2', with_uuid=True),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertFalse(res.failed, msg=res.msg)
self.assertEqual(res.fact, {
'name': 'instance-2',
'console_sock': 'run-dir-2/myapp.instance-2.control',
'http_port': 8082,
})
# one with UUID, but dead
self.instance.set_membership_members([
utils.get_member('instance-1', with_uuid=False),
utils.get_member('instance-2', with_uuid=True, status='dead'),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertTrue(res.failed)
self.assertIn("There is no alive joined instances in the cluster", res.msg)
# one with UUID (but without alias)
helpers.WARNINGS = []
self.instance.set_membership_members([
utils.get_member('instance-1', with_uuid=False),
utils.get_member('instance-2', with_uuid=True, with_alias=False),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertFalse(res.failed)
self.assertIn("Incorrect members with the following URIs ignored: instance-2-uri", helpers.WARNINGS)
# both without UUID (one is selected)
self.instance.set_membership_members([
utils.get_member('instance-1', with_uuid=False),
utils.get_member('instance-2', with_uuid=False),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertFalse(res.failed, msg=res.msg)
self.assertEqual(res.fact, {
'name': 'instance-1',
'console_sock': 'run-dir-1/myapp.instance-1.control',
'http_port': 8081,
})
# both without UUID and dead
self.instance.set_membership_members([
utils.get_member('instance-1', with_uuid=False, status='dead'),
utils.get_member('instance-2', with_uuid=False, status='dead'),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertTrue(res.failed)
self.assertIn("There is no alive instances in the cluster", res.msg)
def test_no_joined_instances(self):
hostvars = {}
hostvars.update(get_instance_hostvars('instance-4', 'some-rpl', run_dir='run-dir-4', http_port=8084))
hostvars.update(get_instance_hostvars('instance-3', 'some-rpl', run_dir='run-dir-3', http_port=8083))
hostvars.update(get_instance_hostvars('instance-2-no-rpl', run_dir='run-dir-2', http_port=8082))
hostvars.update(get_instance_hostvars(
'instance-1-expelled', run_dir='run-dir-1', http_port=8081, expelled=True
))
hostvars.update({'my-stateboard': {'stateboard': True}})
self.instance.set_membership_members([
utils.get_member('instance-4', with_uuid=False),
utils.get_member('instance-3', with_uuid=False),
utils.get_member('instance-2-no-rpl', with_uuid=False),
utils.get_member('instance-1-expelled', with_uuid=False),
])
# all instances are in play_hosts
# instance-3 is selected by lexicographic order
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertFalse(res.failed, msg=res.msg)
self.assertEqual(res.fact, {
'name': 'instance-3',
'console_sock': 'run-dir-3/myapp.instance-3.control',
'http_port': 8083,
})
# only instances w/o replicaset_alias, expelled and stateboard
# are in play_hosts
res = call_get_control_instance('myapp', self.console_sock, hostvars, play_hosts=[
'instance-2-no-rpl', 'expelled-instance', 'my-stateboard',
])
self.assertFalse(res.failed, msg=res.msg)
self.assertEqual(res.fact, {
'name': 'instance-2-no-rpl',
'console_sock': 'run-dir-2/myapp.instance-2-no-rpl.control',
'http_port': 8082,
})
# only expelled and stateboard instances are in play_hosts
res = call_get_control_instance('myapp', self.console_sock, hostvars, play_hosts=[
'expelled-instance', 'my-stateboard',
])
self.assertFalse(res.failed, msg=res.msg)
self.assertEqual(res.fact, {
'name': 'instance-2-no-rpl',
'console_sock': 'run-dir-2/myapp.instance-2-no-rpl.control',
'http_port': 8082,
})
# instance w/o replicaset alias is deas
self.instance.set_membership_members([
utils.get_member('instance-4', with_uuid=False),
utils.get_member('instance-3', with_uuid=False),
utils.get_member('instance-2-no-rpl', with_uuid=False, status='dead'),
utils.get_member('instance-1-expelled', with_uuid=False),
])
# only expelled and stateboard instances are in play_hosts
res = call_get_control_instance('myapp', self.console_sock, hostvars, play_hosts=[
'expelled-instance', 'my-stateboard',
])
self.assertFalse(res.failed, msg=res.msg)
self.assertEqual(res.fact, {
'name': 'instance-3',
'console_sock': 'run-dir-3/myapp.instance-3.control',
'http_port': 8083,
})
def test_instance_not_in_hostvars(self):
hostvars = {}
hostvars.update(get_instance_hostvars('instance-1', 'some-rpl'))
hostvars.update(get_instance_hostvars('instance-2', 'some-rpl'))
self.instance.set_membership_members([
utils.get_member('instance-1', with_uuid=False),
utils.get_member('instance-2', with_uuid=False),
utils.get_member('instance-3', with_uuid=True), # has UUID but not in hostvars
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertTrue(res.failed, res.fact)
self.assertIn("Membership contains instance instance-3 that isn't described in inventor", res.msg)
def test_twophase_commit_versions(self):
hostvars = {}
hostvars.update(get_instance_hostvars('instance-1', 'some-rpl', run_dir='run-dir-1', http_port=8081))
hostvars.update(get_instance_hostvars('instance-2', 'some-rpl', run_dir='run-dir-2', http_port=8082))
hostvars.update(get_instance_hostvars('instance-3', 'some-rpl', run_dir='run-dir-3', http_port=8083))
# instance-3 has lower version of twophase commit
global twophase_commit_versions
twophase_commit_versions = {
'instance-1-uri': 3,
'instance-2-uri': 2,
'instance-3-uri': 1,
}
# all with UUID and alias - instance-3 is selected
# (instead of instance-1 by lexicographic order)
self.instance.set_membership_members([
utils.get_member('instance-1', with_uuid=True),
utils.get_member('instance-2', with_uuid=True),
utils.get_member('instance-3', with_uuid=True),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertFalse(res.failed, msg=res.msg)
self.assertEqual(res.fact, {
'name': 'instance-3',
'console_sock': 'run-dir-3/myapp.instance-3.control',
'http_port': 8083,
})
# all without UUID - instance-3 is selected
# (instead of instance-1 by lexicographic order)
self.instance.set_membership_members([
utils.get_member('instance-1', with_uuid=False),
utils.get_member('instance-2', with_uuid=False),
utils.get_member('instance-3', with_uuid=False),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertFalse(res.failed, msg=res.msg)
self.assertEqual(res.fact, {
'name': 'instance-3',
'console_sock': 'run-dir-3/myapp.instance-3.control',
'http_port': 8083,
})
# instance-1 and instance-2 has UUIDs
# instance-2 is chosen instead of instance-3 with minimal twophase commit version
# because instance-2 has minimal twophase commit version between instances with UUIDS
self.instance.set_membership_members([
utils.get_member('instance-1', with_uuid=True),
utils.get_member('instance-2', with_uuid=True),
utils.get_member('instance-3', with_uuid=False),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertFalse(res.failed, msg=res.msg)
self.assertEqual(res.fact, {
'name': 'instance-2',
'console_sock': 'run-dir-2/myapp.instance-2.control',
'http_port': 8082,
})
def test_dead_instances(self):
# first joined instance is dead
hostvars = {}
hostvars.update(get_instance_hostvars('joined-1', 'some-rpl', http_port=8081))
hostvars.update(get_instance_hostvars('joined-2', 'some-rpl', http_port=8082))
hostvars.update(get_instance_hostvars('not-joined-1', 'some-rpl', http_port=8083))
hostvars.update(get_instance_hostvars('not-joined-2', 'some-rpl', http_port=8084))
self.instance.set_membership_members([
utils.get_member('joined-1', with_uuid=True, status='dead'),
utils.get_member('joined-2', with_uuid=True),
utils.get_member('not-joined-1'),
utils.get_member('not-joined-2'),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertFalse(res.failed, msg=res.msg)
self.assertEqual(res.fact, {
'name': 'joined-2',
'console_sock': '/var/run/tarantool/myapp.joined-2.control',
'http_port': 8082,
})
# all joined instances are dead
hostvars = {}
hostvars.update(get_instance_hostvars('joined-1', 'some-rpl'))
hostvars.update(get_instance_hostvars('joined-2', 'some-rpl'))
hostvars.update(get_instance_hostvars('not-joined-1', 'some-rpl'))
hostvars.update(get_instance_hostvars('not-joined-2', 'some-rpl'))
self.instance.set_membership_members([
utils.get_member('joined-1', with_uuid=True, status='dead'),
utils.get_member('joined-2', with_uuid=True, status='suspect'),
utils.get_member('not-joined-1'),
utils.get_member('not-joined-2'),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertTrue(res.failed)
self.assertEqual(res.msg, "There is no alive joined instances in the cluster")
# no joined, first unjoined instance is dead
hostvars = {}
hostvars.update(get_instance_hostvars('not-joined-1', 'some-rpl', http_port=8081))
hostvars.update(get_instance_hostvars('not-joined-2', 'some-rpl', http_port=8082))
self.instance.set_membership_members([
utils.get_member('not-joined-1', status='dead'),
utils.get_member('not-joined-2'),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertFalse(res.failed, msg=res.msg)
self.assertEqual(res.fact, {
'name': 'not-joined-2',
'console_sock': '/var/run/tarantool/myapp.not-joined-2.control',
'http_port': 8082,
})
# no joined, first unjoined instance is dead,
# second doesn't have replicaset alias
hostvars = {}
hostvars.update(get_instance_hostvars('not-joined-1', 'some-rpl', http_port=8081))
hostvars.update(get_instance_hostvars('not-joined-2', http_port=8082))
hostvars.update(get_instance_hostvars('not-joined-3', 'some-rpl', http_port=8083))
self.instance.set_membership_members([
utils.get_member('not-joined-1', status='dead'),
utils.get_member('not-joined-2'),
utils.get_member('not-joined-3'),
])
res = call_get_control_instance('myapp', self.console_sock, hostvars)
self.assertFalse(res.failed, msg=res.msg)
self.assertEqual(res.fact, {
'name': 'not-joined-3',
'console_sock': '/var/run/tarantool/myapp.not-joined-3.control',
'http_port': 8083,
})
def tearDown(self):
self.instance.stop()
del self.instance
| 41.387309
| 111
| 0.633076
| 2,316
| 18,914
| 4.965458
| 0.065199
| 0.037565
| 0.065739
| 0.074609
| 0.803739
| 0.780348
| 0.764
| 0.749304
| 0.730957
| 0.68487
| 0
| 0.019236
| 0.241408
| 18,914
| 456
| 112
| 41.47807
| 0.782269
| 0.071006
| 0
| 0.672316
| 0
| 0
| 0.190327
| 0.035362
| 0
| 0
| 0
| 0
| 0.146893
| 1
| 0.036723
| false
| 0
| 0.019774
| 0.002825
| 0.067797
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
86ad5976e4505e335db2fea9d7c8a57101a0ae28
| 68
|
py
|
Python
|
my_package/__init__.py
|
leowindwave/YAPT
|
ee5ec568ed746f90a18dc514836624d435a7ccdb
|
[
"CC0-1.0"
] | null | null | null |
my_package/__init__.py
|
leowindwave/YAPT
|
ee5ec568ed746f90a18dc514836624d435a7ccdb
|
[
"CC0-1.0"
] | null | null | null |
my_package/__init__.py
|
leowindwave/YAPT
|
ee5ec568ed746f90a18dc514836624d435a7ccdb
|
[
"CC0-1.0"
] | null | null | null |
print("my_package/__init__.py executed")
import my_package.my_module
| 34
| 40
| 0.852941
| 11
| 68
| 4.636364
| 0.727273
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.044118
| 68
| 2
| 41
| 34
| 0.784615
| 0
| 0
| 0
| 0
| 0
| 0.449275
| 0.318841
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
86d3db08974881625bebc5c7410fda8f0827f2de
| 422
|
py
|
Python
|
penaltymodel_cache/penaltymodel/cache/__init__.py
|
mcfarljm/penaltymodel
|
7776ca9697a52dcf2991177e7ef2905c3e0651f5
|
[
"Apache-2.0"
] | 15
|
2018-01-03T18:48:56.000Z
|
2021-11-05T08:24:22.000Z
|
penaltymodel_cache/penaltymodel/cache/__init__.py
|
mcfarljm/penaltymodel
|
7776ca9697a52dcf2991177e7ef2905c3e0651f5
|
[
"Apache-2.0"
] | 77
|
2017-12-15T01:23:35.000Z
|
2022-01-07T23:41:58.000Z
|
penaltymodel_cache/penaltymodel/cache/__init__.py
|
mcfarljm/penaltymodel
|
7776ca9697a52dcf2991177e7ef2905c3e0651f5
|
[
"Apache-2.0"
] | 17
|
2017-12-15T00:47:34.000Z
|
2022-01-26T06:07:13.000Z
|
from __future__ import absolute_import
import penaltymodel.cache.schema
from penaltymodel.cache.database_manager import *
import penaltymodel.cache.database_manager
from penaltymodel.cache.cache_manager import *
import penaltymodel.cache.cache_manager
from penaltymodel.cache.interface import *
import penaltymodel.cache.interface
from penaltymodel.cache.package_info import *
from penaltymodel.cache.utils import *
| 24.823529
| 49
| 0.85545
| 51
| 422
| 6.882353
| 0.254902
| 0.435897
| 0.299145
| 0.330484
| 0.205128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090047
| 422
| 16
| 50
| 26.375
| 0.914063
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d48c8fb6cfa80868b2ee0b6c331449af33f5dbfb
| 28,477
|
py
|
Python
|
ibnfmeta.py
|
charlesap/ibnf
|
0efc87c7f250594d77fe400ba2f90f812ebe5aa1
|
[
"MIT",
"Unlicense"
] | 1
|
2019-07-29T11:12:53.000Z
|
2019-07-29T11:12:53.000Z
|
ibnfmeta.py
|
charlesap/ibnf
|
0efc87c7f250594d77fe400ba2f90f812ebe5aa1
|
[
"MIT",
"Unlicense"
] | null | null | null |
ibnfmeta.py
|
charlesap/ibnf
|
0efc87c7f250594d77fe400ba2f90f812ebe5aa1
|
[
"MIT",
"Unlicense"
] | null | null | null |
import sys
from binascii import *
fi = file(sys.argv[1]).read()
semantics = file(sys.argv[2]).read()
fo = open(sys.argv[3], "w+")
h={}; registers={}; context={}; mseq=0; dseq=1; T=True; F=False
def n2z( a ):
return ( '0' if a=='' else a )
def be2le( a ):
return a[6:8]+a[4:6]+a[2:4]+a[0:2]
def mark( p, s, t ):
( v, m, ss, l, c, a ) = t
if t[1]: x = p +"-" + str(s); h[x]=(v,m,l,a); return t
else:
if not t[0]: x = p +"-" + str(s); h[x]=(v,m,l,a); return t
return t
def been(p, s):
if h.has_key( p +"-" + str(s) ): return h[p +"-" + str(s)][1]
else: return False
def was(c,p,s): (v,m,l,a) = h[p+"-"+str(s)]; return (v,m,s,l,c,a)
def cm( ch, s, c ):
if s < len(fi):
if fi[s] == ch: return ( T, T, s, 1, c, ( "cm", fi[s] ) )
return ( False, True, s, 0, c, ( "cm", "") )
def andmemo( m ):
r = True
for i in m:
if not m[i]: r = False
return r
outdata = ""
def output( s ):
global outdata
outdata = outdata + str(s)
def syntax_p( s, c):
if been("syntax",s): return was( c, "syntax",s)
else:
mark("syntax",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
rules_p ( (ts+tl), tc)
if ok:
rv=syntax_s(a,andmemo(mem),s,ts+tl,tc,"syntax")
return mark("syntax",s,rv)
return mark("syntax",s,(F,T,s,0,c,("","")))
def rules_p( s, c):
if been("rules",s): return was( c, "rules",s)
else:
mark("rules",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
rule_p ( (ts+tl), tc)
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
rules_p ( (ts+tl), tc)
if ok:
rv=rules_s(a,andmemo(mem),s,ts+tl,tc,"rules")
return mark("rules",s,rv)
return mark("rules",s,(F,T,s,0,c,("","")))
def rule_p( s, c):
if been("rule",s): return was( c, "rule",s)
else:
mark("rule",s,(F,T,s,0,c,("","")));met = F
if not met: (met,mem,ts,tl,tc,ta)=incorp_p(s,c)
if not met: (met,mem,ts,tl,tc,ta)=altern_p(s,c)
if not met: (met,mem,ts,tl,tc,ta)=blankline_p(s,c)
if not met:
return mark("rule",s,(F,T,s,0,c,("","")))
else:
return mark("rule",s,(met,mem,s,tl,tc,ta))
def blankline_p( s, c):
if been("blankline",s): return was( c, "blankline",s)
else:
mark("blankline",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
s_p ( (ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm(chr(10) ,(ts+tl), tc)
if ok:
rv=blankline_s(a,andmemo(mem),s,ts+tl,tc,"blankline")
return mark("blankline",s,rv)
return mark("blankline",s,(F,T,s,0,c,("","")))
def altern_p( s, c):
if been("altern",s): return was( c, "altern",s)
else:
mark("altern",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
s_p ( (ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
name_p ( (ts+tl), tc)
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
s_p ( (ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm('?',(ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
albody_p ( (ts+tl), tc)
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
s_p ( (ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm(';',(ts+tl), tc)
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
s_p ( (ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm(chr(10) ,(ts+tl), tc)
if ok:
rv=altern_s(a,andmemo(mem),s,ts+tl,tc,"altern")
return mark("altern",s,rv)
return mark("altern",s,(F,T,s,0,c,("","")))
def incorp_p( s, c):
if been("incorp",s): return was( c, "incorp",s)
else:
mark("incorp",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
s_p ( (ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
name_p ( (ts+tl), tc)
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
s_p ( (ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
iflag_p ( (ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
inbody_p ( (ts+tl), tc)
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
s_p ( (ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm(';',(ts+tl), tc)
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
s_p ( (ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm(chr(10) ,(ts+tl), tc)
if ok:
rv=incorp_s(a,andmemo(mem),s,ts+tl,tc,"incorp")
return mark("incorp",s,rv)
return mark("incorp",s,(F,T,s,0,c,("","")))
def iflag_p( s, c):
if been("iflag",s): return was( c, "iflag",s)
else:
mark("iflag",s,(F,T,s,0,c,("","")));met = F
if not met: (met,mem,ts,tl,tc,ta)=cm('/',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('=',s,c)
if not met:
return mark("iflag",s,(F,T,s,0,c,("","")))
else:
return mark("iflag",s,(met,mem,s,tl,tc,ta))
def name_p( s, c):
if been("name",s): return was( c, "name",s)
else:
mark("name",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
lwr_p ( (ts+tl), tc)
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
name_p ( (ts+tl), tc)
if ok:
rv=name_s(a,andmemo(mem),s,ts+tl,tc,"name")
return mark("name",s,rv)
return mark("name",s,(F,T,s,0,c,("","")))
def name_s(a,m,s,e,c,n): return(T,T,s,e-s,c,(n,fi[s:e]))
def albody_p( s, c):
if been("albody",s): return was( c, "albody",s)
else:
mark("albody",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
s_p ( (ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
nit_p ( (ts+tl), tc)
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
almore_p ( (ts+tl), tc)
if ok:
rv=albody_s(a,andmemo(mem),s,ts+tl,tc,"albody")
return mark("albody",s,rv)
return mark("albody",s,(F,T,s,0,c,("","")))
def almore_p( s, c):
if been("almore",s): return was( c, "almore",s)
else:
mark("almore",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
s_p ( (ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm('|',(ts+tl), tc)
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
alnewline_p ( (ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
albody_p ( (ts+tl), tc)
if ok:
rv=almore_s(a,andmemo(mem),s,ts+tl,tc,"almore")
return mark("almore",s,rv)
return mark("almore",s,(F,T,s,0,c,("","")))
def alnewline_p( s, c):
if been("alnewline",s): return was( c, "alnewline",s)
else:
mark("alnewline",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
s_p ( (ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm(chr(10) ,(ts+tl), tc)
if ok:
rv=alnewline_s(a,andmemo(mem),s,ts+tl,tc,"alnewline")
return mark("alnewline",s,rv)
return mark("alnewline",s,(F,T,s,0,c,("","")))
def alnewline_s(a,m,s,e,c,n): return(T,T,s,e-s,c,(n,fi[s:e]))
def inbody_p( s, c):
if been("inbody",s): return was( c, "inbody",s)
else:
mark("inbody",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
s_p ( (ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
onit_p ( (ts+tl), tc)
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
inbody_p ( (ts+tl), tc)
if ok:
rv=inbody_s(a,andmemo(mem),s,ts+tl,tc,"inbody")
return mark("inbody",s,rv)
return mark("inbody",s,(F,T,s,0,c,("","")))
def onit_p( s, c):
if been("onit",s): return was( c, "onit",s)
else:
mark("onit",s,(F,T,s,0,c,("","")));met = F
if not met: (met,mem,ts,tl,tc,ta)=pnit_p(s,c)
if not met: (met,mem,ts,tl,tc,ta)=nit_p(s,c)
if not met:
return mark("onit",s,(F,T,s,0,c,("","")))
else:
return mark("onit",s,(met,mem,s,tl,tc,ta))
def pnit_p( s, c):
if been("pnit",s): return was( c, "pnit",s)
else:
mark("pnit",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm('.',(ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
nit_p ( (ts+tl), tc)
if ok:
rv=pnit_s(a,andmemo(mem),s,ts+tl,tc,"pnit")
return mark("pnit",s,rv)
return mark("pnit",s,(F,T,s,0,c,("","")))
def nit_p( s, c):
if been("nit",s): return was( c, "nit",s)
else:
mark("nit",s,(F,T,s,0,c,("","")));met = F
if not met: (met,mem,ts,tl,tc,ta)=name_p(s,c)
if not met: (met,mem,ts,tl,tc,ta)=cmatch_p(s,c)
if not met:
return mark("nit",s,(F,T,s,0,c,("","")))
else:
return mark("nit",s,(met,mem,s,tl,tc,ta))
def cmatch_p( s, c):
if been("cmatch",s): return was( c, "cmatch",s)
else:
mark("cmatch",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm(chr(39) ,(ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
sch_p ( (ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm(chr(39) ,(ts+tl), tc)
if ok:
rv=cmatch_s(a,andmemo(mem),s,ts+tl,tc,"cmatch")
return mark("cmatch",s,rv)
return mark("cmatch",s,(F,T,s,0,c,("","")))
def dgt_p( s, c):
if been("dgt",s): return was( c, "dgt",s)
else:
mark("dgt",s,(F,T,s,0,c,("","")));met = F
if not met: (met,mem,ts,tl,tc,ta)=cm('0',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('1',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('2',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('3',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('4',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('5',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('6',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('7',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('8',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('9',s,c)
if not met:
return mark("dgt",s,(F,T,s,0,c,("","")))
else:
return mark("dgt",s,(met,mem,s,tl,tc,ta))
def upr_p( s, c):
if been("upr",s): return was( c, "upr",s)
else:
mark("upr",s,(F,T,s,0,c,("","")));met = F
if not met: (met,mem,ts,tl,tc,ta)=cm('A',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('B',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('C',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('D',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('E',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('F',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('G',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('H',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('I',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('J',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('K',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('L',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('M',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('N',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('O',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('P',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('Q',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('R',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('S',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('T',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('U',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('V',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('W',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('X',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('Y',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('Z',s,c)
if not met:
return mark("upr",s,(F,T,s,0,c,("","")))
else:
return mark("upr",s,(met,mem,s,tl,tc,ta))
def lwr_p( s, c):
if been("lwr",s): return was( c, "lwr",s)
else:
mark("lwr",s,(F,T,s,0,c,("","")));met = F
if not met: (met,mem,ts,tl,tc,ta)=cm('a',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('b',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('c',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('d',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('e',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('f',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('g',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('h',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('i',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('j',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('k',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('l',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('m',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('n',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('o',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('p',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('q',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('r',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('s',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('t',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('u',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('v',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('w',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('x',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('y',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('z',s,c)
if not met:
return mark("lwr",s,(F,T,s,0,c,("","")))
else:
return mark("lwr",s,(met,mem,s,tl,tc,ta))
def alp_p( s, c):
if been("alp",s): return was( c, "alp",s)
else:
mark("alp",s,(F,T,s,0,c,("","")));met = F
if not met: (met,mem,ts,tl,tc,ta)=upr_p(s,c)
if not met: (met,mem,ts,tl,tc,ta)=lwr_p(s,c)
if not met:
return mark("alp",s,(F,T,s,0,c,("","")))
else:
return mark("alp",s,(met,mem,s,tl,tc,ta))
def aln_p( s, c):
if been("aln",s): return was( c, "aln",s)
else:
mark("aln",s,(F,T,s,0,c,("","")));met = F
if not met: (met,mem,ts,tl,tc,ta)=upr_p(s,c)
if not met: (met,mem,ts,tl,tc,ta)=lwr_p(s,c)
if not met: (met,mem,ts,tl,tc,ta)=dgt_p(s,c)
if not met:
return mark("aln",s,(F,T,s,0,c,("","")))
else:
return mark("aln",s,(met,mem,s,tl,tc,ta))
def hex_p( s, c):
if been("hex",s): return was( c, "hex",s)
else:
mark("hex",s,(F,T,s,0,c,("","")));met = F
if not met: (met,mem,ts,tl,tc,ta)=dgt_p(s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('A',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('B',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('C',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('D',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('E',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('F',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('a',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('b',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('c',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('d',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('e',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('f',s,c)
if not met:
return mark("hex",s,(F,T,s,0,c,("","")))
else:
return mark("hex",s,(met,mem,s,tl,tc,ta))
def smb_p( s, c):
if been("smb",s): return was( c, "smb",s)
else:
mark("smb",s,(F,T,s,0,c,("","")));met = F
if not met: (met,mem,ts,tl,tc,ta)=cm('-',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('_',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('+',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('=',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('`',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('~',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('!',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('@',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('#',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('$',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('%',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('^',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('&',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('|',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('/',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm(':',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm(';',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('*',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('(',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm(')',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('[',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm(']',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('{',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('}',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm(',',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('.',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('<',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('>',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('?',s,c)
if not met:
return mark("smb",s,(F,T,s,0,c,("","")))
else:
return mark("smb",s,(met,mem,s,tl,tc,ta))
def sps_p( s, c):
if been("sps",s): return was( c, "sps",s)
else:
mark("sps",s,(F,T,s,0,c,("","")));met = F
if not met: (met,mem,ts,tl,tc,ta)=bsl_p(s,c)
if not met: (met,mem,ts,tl,tc,ta)=btk_p(s,c)
if not met: (met,mem,ts,tl,tc,ta)=bqt_p(s,c)
if not met: (met,mem,ts,tl,tc,ta)=bnl_p(s,c)
if not met: (met,mem,ts,tl,tc,ta)=btb_p(s,c)
if not met:
return mark("sps",s,(F,T,s,0,c,("","")))
else:
return mark("sps",s,(met,mem,s,tl,tc,ta))
def bsl_p( s, c):
if been("bsl",s): return was( c, "bsl",s)
else:
mark("bsl",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm(chr(92) ,(ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm(chr(92) ,(ts+tl), tc)
if ok:
rv=bsl_s(a,andmemo(mem),s,ts+tl,tc,"bsl")
return mark("bsl",s,rv)
return mark("bsl",s,(F,T,s,0,c,("","")))
def bsl_s(a,m,s,e,c,n): return(T,T,s,e-s,c,(n,fi[s:e]))
def btk_p( s, c):
if been("btk",s): return was( c, "btk",s)
else:
mark("btk",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm(chr(92) ,(ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm(chr(39) ,(ts+tl), tc)
if ok:
rv=btk_s(a,andmemo(mem),s,ts+tl,tc,"btk")
return mark("btk",s,rv)
return mark("btk",s,(F,T,s,0,c,("","")))
def btk_s(a,m,s,e,c,n): return(T,T,s,e-s,c,(n,fi[s:e]))
def bqt_p( s, c):
if been("bqt",s): return was( c, "bqt",s)
else:
mark("bqt",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm(chr(92) ,(ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm(chr(34) ,(ts+tl), tc)
if ok:
rv=bqt_s(a,andmemo(mem),s,ts+tl,tc,"bqt")
return mark("bqt",s,rv)
return mark("bqt",s,(F,T,s,0,c,("","")))
def bqt_s(a,m,s,e,c,n): return(T,T,s,e-s,c,(n,fi[s:e]))
def bnl_p( s, c):
if been("bnl",s): return was( c, "bnl",s)
else:
mark("bnl",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm(chr(92) ,(ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm('n',(ts+tl), tc)
if ok:
rv=bnl_s(a,andmemo(mem),s,ts+tl,tc,"bnl")
return mark("bnl",s,rv)
return mark("bnl",s,(F,T,s,0,c,("","")))
def bnl_s(a,m,s,e,c,n): return(T,T,s,e-s,c,(n,fi[s:e]))
def btb_p( s, c):
if been("btb",s): return was( c, "btb",s)
else:
mark("btb",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm(chr(92) ,(ts+tl), tc)
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
cm('t',(ts+tl), tc)
if ok:
rv=btb_s(a,andmemo(mem),s,ts+tl,tc,"btb")
return mark("btb",s,rv)
return mark("btb",s,(F,T,s,0,c,("","")))
def btb_s(a,m,s,e,c,n): return(T,T,s,e-s,c,(n,fi[s:e]))
def wsc_p( s, c):
if been("wsc",s): return was( c, "wsc",s)
else:
mark("wsc",s,(F,T,s,0,c,("","")));met = F
if not met: (met,mem,ts,tl,tc,ta)=cm(' ',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('\t',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('\n',s,c)
if not met:
return mark("wsc",s,(F,T,s,0,c,("","")))
else:
return mark("wsc",s,(met,mem,s,tl,tc,ta))
def s_p( s, c):
if been("s",s): return was( c, "s",s)
else:
mark("s",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
sp_p ( (ts+tl), tc)
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
s_p ( (ts+tl), tc)
if ok:
rv=s_s(a,andmemo(mem),s,ts+tl,tc,"s")
return mark("s",s,rv)
return mark("s",s,(F,T,s,0,c,("","")))
def s_s(a,m,s,e,c,n): return(T,T,s,e-s,c,(n,fi[s:e]))
def sp_p( s, c):
if been("sp",s): return was( c, "sp",s)
else:
mark("sp",s,(F,T,s,0,c,("","")));met = F
if not met: (met,mem,ts,tl,tc,ta)=cm(' ',s,c)
if not met: (met,mem,ts,tl,tc,ta)=cm('\t',s,c)
if not met:
return mark("sp",s,(F,T,s,0,c,("","")))
else:
return mark("sp",s,(met,mem,s,tl,tc,ta))
def sch_p( s, c):
if been("sch",s): return was( c, "sch",s)
else:
mark("sch",s,(F,T,s,0,c,("","")));met = F
if not met: (met,mem,ts,tl,tc,ta)=dgt_p(s,c)
if not met: (met,mem,ts,tl,tc,ta)=upr_p(s,c)
if not met: (met,mem,ts,tl,tc,ta)=lwr_p(s,c)
if not met: (met,mem,ts,tl,tc,ta)=smb_p(s,c)
if not met: (met,mem,ts,tl,tc,ta)=wsc_p(s,c)
if not met: (met,mem,ts,tl,tc,ta)=sps_p(s,c)
if not met:
return mark("sch",s,(F,T,s,0,c,("","")))
else:
return mark("sch",s,(met,mem,s,tl,tc,ta))
def chs_p( s, c):
if been("chs",s): return was( c, "chs",s)
else:
mark("chs",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
sch_p ( (ts+tl), tc)
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
chs_p ( (ts+tl), tc)
if ok:
rv=chs_s(a,andmemo(mem),s,ts+tl,tc,"chs")
return mark("chs",s,rv)
return mark("chs",s,(F,T,s,0,c,("","")))
def chs_s(a,m,s,e,c,n): return(T,T,s,e-s,c,(n,fi[s:e]))
def pnt_p( s, c):
if been("pnt",s): return was( c, "pnt",s)
else:
mark("pnt",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
dgt_p ( (ts+tl), tc)
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
pnt_p ( (ts+tl), tc)
if ok:
rv=pnt_s(a,andmemo(mem),s,ts+tl,tc,"pnt")
return mark("pnt",s,rv)
return mark("pnt",s,(F,T,s,0,c,("","")))
def pnt_s(a,m,s,e,c,n): return(T,T,s,e-s,c,(n,fi[s:e]))
def als_p( s, c):
if been("als",s): return was( c, "als",s)
else:
mark("als",s,(F,T,s,0,c,("","")))
ok=True; ts=s; tl=0; a={0: ("","")}
mem={0:True}; tc=c; n=0
if ok:
n=n+1; ( ok,mem[n],ts,tl,tc,a[n])=\
aln_p ( (ts+tl), tc)
if ok:
n=n+1; ( nok,mem[n],ts,tl,tc,a[n])=\
als_p ( (ts+tl), tc)
if ok:
rv=als_s(a,andmemo(mem),s,ts+tl,tc,"als")
return mark("als",s,rv)
return mark("als",s,(F,T,s,0,c,("","")))
def als_s(a,m,s,e,c,n): return(T,T,s,e-s,c,(n,fi[s:e]))
def syntax_s(a,m,s,e,c,n):
return (T,T,s,e-s,c,( "syntax", prologue + a[1][1] + semantics + epilogue ))
def rules_s(a,m,s,e,c,n):
return (T,T,s,e-s,c,( "rules", a[1][1] + a[2][1] ))
def blankline_s(a,m,s,e,c,n):
return (T,T,s,e-s,c,( "blankline", "" ))
def altern_s(a,m,s,e,c,n):
rx=altern_r ; return (T,T,s,e-s,c,( "altern", "\n" + \
"def " + a[2][1] + "_p( s, c):\n" + \
" if been(\"" + a[2][1] + "\",s): return was( c, \"" + a[2][1] + "\",s)\n" + \
" else:\n" + \
" mark(\"" + a[2][1] + "\",s,(F,T,s,0,c,(\"\",\"\")));met = F \n" + \
"" + rx(a[5],m,s,e,c,n) + " \n" + \
" if not met:\n" + \
" return mark(\"" + a[2][1] + "\",s,(F,T,s,0,c,(\"\",\"\")))\n" + \
" else:\n" + \
" return mark(\"" + a[2][1] + "\",s,(met,mem,s,tl,tc,ta)) " ))
def altern_r(a,m,s,e,c,n):
o = ""
rx=altern_r
if a != "":
if a[0] =="albody":
o=o+rx(a[1][2],m,s,e,c,n) + rx(a[1][3][1],m,s,e,c,n)
if a[0] =="cmatch":
o=o+rx(a[1],m,s,e,c,n)
if a[0] =="cm":
o=o+"\n" + \
" if not met: (met,mem,ts,tl,tc,ta)=cm(\'" + a[1] + "\',s,c)"
if a[0] =="btb":
o=o+"\n" + \
" if not met: (met,mem,ts,tl,tc,ta)=cm(\'" + a[1] + "\',s,c)"
if a[0] =="bnl":
o=o+"\n" + \
" if not met: (met,mem,ts,tl,tc,ta)=cm(\'" + a[1] + "\',s,c)"
if a[0] =="name":
o=o+"\n" + \
" if not met: (met,mem,ts,tl,tc,ta)=" + a[1] + "_p(s,c)"
return (o)
def incorp_s(a,m,s,e,c,n):
smfnc="_s(a,m,s,e,c,n): return(T,T,s,e-s,c,(n,fi[s:e]))"
rx=incorp_r ; return (T,T,s,e-s,c,( "incorp", "\n" + \
"def " + a[2][1] + "_p( s, c):\n" + \
" if been(\"" + a[2][1] + "\",s): return was( c, \"" + a[2][1] + "\",s)\n" + \
" else:\n" + \
" mark(\"" + a[2][1] + "\",s,(F,T,s,0,c,(\"\",\"\"))) \n" + \
" ok=True; ts=s; tl=0; a={0: (\"\",\"\")}\n" + \
" mem={0:True}; tc=c; n=0\n" + \
"" + rx(a[5],m,s,e,c,n) + " \n" + \
" if ok:\n" + \
" rv=" + a[2][1] + "_s(a,andmemo(mem),s,ts+tl,tc,\"" + a[2][1] + "\")\n" + \
" return mark(\"" + a[2][1] + "\",s,rv)\n" + \
" return mark(\"" + a[2][1] + "\",s,(F,T,s,0,c,(\"\",\"\")))\n" + \
"" + ("def "+a[2][1]+smfnc if a[4][1] == "/" else "") + "" ))
def incorp_r(a,m,s,e,c,n):
o = ""
rx=incorp_r
if a != "":
if a[0] =="inbody":
o=o+"\n" + \
" if ok:\n" + \
" n=n+1; ( " + ( "n" if a[1][2][0] == "pnit" else "" ) + "ok,mem[n],ts,tl,tc,a[n])=\\\n" + \
" " + rx(a[1][2][1] if a[1][2][0]=="pnit" else a[1][2],m,s,e,c,n) + "\n" + \
"" + rx(a[1][3],m,s,e,c,n) + ""
if a[0] =="pnit":
o=o+"n"
if a[0] =="cmatch":
o=o+rx(a[1],m,s,e,c,n)
if a[0] =="name":
o=o+a[1] +"_p ( (ts+tl), tc)"
if a[0] =="cm":
o=o+"cm(\'" + a[1] + "\',(ts+tl), tc)"
if a[0] =="bsl":
o=o+" cm(chr(92) ,(ts+tl), tc)"
if a[0] =="btk":
o=o+" cm(chr(39) ,(ts+tl), tc)"
if a[0] =="bqt":
o=o+" cm(chr(34) ,(ts+tl), tc)"
if a[0] =="bnl":
o=o+" cm(chr(10) ,(ts+tl), tc)"
return (o)
def almore_s(a,m,s,e,c,n):
return (T,T,s,e-s,c,( "almore", a[4] ))
def albody_s(a,m,s,e,c,n):
return (T,T,s,e-s,c,( "albody", a ))
def inbody_s(a,m,s,e,c,n):
return (T,T,s,e-s,c,( "inbody", a ))
def pnit_s(a,m,s,e,c,n):
return (T,T,s,e-s,c,( "pnit", a[2] ))
def cmatch_s(a,m,s,e,c,n):
return (T,T,s,e-s,c,( "cmatch", a[2] ))
def build (v,m,s,l,c,a): return 'success'
T=True; F=False
prologue = """import sys
from binascii import *
fi = file(sys.argv[1]).read()
semantics = file(sys.argv[2]).read()
fo = open(sys.argv[3], "w+")
h={}; registers={}; context={}; mseq=0; dseq=1; T=True; F=False
def n2z( a ):
return ( '0' if a=='' else a )
def be2le( a ):
return a[6:8]+a[4:6]+a[2:4]+a[0:2]
def mark( p, s, t ):
( v, m, ss, l, c, a ) = t
if t[1]: x = p +"-" + str(s); h[x]=(v,m,l,a); return t
else:
if not t[0]: x = p +"-" + str(s); h[x]=(v,m,l,a); return t
return t
def been(p, s):
if h.has_key( p +"-" + str(s) ): return h[p +"-" + str(s)][1]
else: return False
def was(c,p,s): (v,m,l,a) = h[p+"-"+str(s)]; return (v,m,s,l,c,a)
def cm( ch, s, c ):
if s < len(fi):
if fi[s] == ch: return ( T, T, s, 1, c, ( "cm", fi[s] ) )
return ( False, True, s, 0, c, ( "cm", "") )
def andmemo( m ):
r = True
for i in m:
if not m[i]: r = False
return r
outdata = ""
def output( s ):
global outdata
outdata = outdata + str(s)
"""
epilogue = """
(v,m,s,l,c,a) = syntax_p( 0, ({},'<1>','<0>') )
if v:
print "Parsed "+a[0]+" OK"
else: print "Failed to Parse"
print >> fo, a[1]
fo.close()"""
outdata = ""
def output( s ):
global outdata
outdata = outdata + str(s)
(v,m,s,l,c,a) = syntax_p( 0, ({},'<1>','<0>') )
if v:
print "Parsed "+a[0]+" OK"
else: print "Failed to Parse"
print >> fo, a[1]
fo.close()
| 29.058163
| 98
| 0.473856
| 6,302
| 28,477
| 2.116154
| 0.021739
| 0.090882
| 0.129124
| 0.113827
| 0.837807
| 0.80174
| 0.792741
| 0.770846
| 0.65207
| 0.627699
| 0
| 0.018253
| 0.216982
| 28,477
| 979
| 99
| 29.087845
| 0.579828
| 0
| 0
| 0.537906
| 0
| 0.01083
| 0.11806
| 0.010254
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.004813
| null | null | 0.00722
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d48cdfb3d2807f5f63dbbeff87a735449d6ebf32
| 18,214
|
py
|
Python
|
keras/initializers/initializers_v1.py
|
RakeshJarupula/keras
|
2ac6638e91d5aff77c22b45e9c8c84fb05a9e477
|
[
"Apache-2.0"
] | null | null | null |
keras/initializers/initializers_v1.py
|
RakeshJarupula/keras
|
2ac6638e91d5aff77c22b45e9c8c84fb05a9e477
|
[
"Apache-2.0"
] | null | null | null |
keras/initializers/initializers_v1.py
|
RakeshJarupula/keras
|
2ac6638e91d5aff77c22b45e9c8c84fb05a9e477
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras initializers for TF 1."""
# pylint:disable=g-classes-have-attributes
import tensorflow.compat.v2 as tf
from tensorflow.python.util.tf_export import keras_export
_v1_zeros_initializer = tf.compat.v1.zeros_initializer
_v1_ones_initializer = tf.compat.v1.ones_initializer
_v1_constant_initializer = tf.compat.v1.constant_initializer
_v1_variance_scaling_initializer = tf.compat.v1.variance_scaling_initializer
_v1_orthogonal_initializer = tf.compat.v1.orthogonal_initializer
_v1_identity = tf.compat.v1.initializers.identity
_v1_glorot_uniform_initializer = tf.compat.v1.glorot_uniform_initializer
_v1_glorot_normal_initializer = tf.compat.v1.glorot_normal_initializer
keras_export(v1=['keras.initializers.Zeros', 'keras.initializers.zeros'], allow_multiple_exports=True)(
_v1_zeros_initializer)
keras_export(v1=['keras.initializers.Ones', 'keras.initializers.ones'], allow_multiple_exports=True)(
_v1_ones_initializer)
keras_export(v1=['keras.initializers.Constant', 'keras.initializers.constant'], allow_multiple_exports=True)(
_v1_constant_initializer)
keras_export(v1=['keras.initializers.VarianceScaling'], allow_multiple_exports=True)(
_v1_variance_scaling_initializer)
keras_export(v1=['keras.initializers.Orthogonal',
'keras.initializers.orthogonal'], allow_multiple_exports=True)(_v1_orthogonal_initializer)
keras_export(v1=['keras.initializers.Identity',
'keras.initializers.identity'], allow_multiple_exports=True)(_v1_identity)
keras_export(v1=['keras.initializers.glorot_uniform'], allow_multiple_exports=True)(
_v1_glorot_uniform_initializer)
keras_export(v1=['keras.initializers.glorot_normal'], allow_multiple_exports=True)(
_v1_glorot_normal_initializer)
@keras_export(v1=['keras.initializers.RandomNormal',
'keras.initializers.random_normal',
'keras.initializers.normal'])
class RandomNormal(tf.compat.v1.random_normal_initializer):
"""Initializer that generates a normal distribution.
Args:
mean: a python scalar or a scalar tensor. Mean of the random values to
generate.
stddev: a python scalar or a scalar tensor. Standard deviation of the random
values to generate.
seed: A Python integer. Used to create random seeds. See
`tf.compat.v1.set_random_seed` for behavior.
dtype: Default data type, used if no `dtype` argument is provided when
calling the initializer. Only floating point types are supported.
@compatibility(TF2)
Although it is a legacy compat.v1 api,
`tf.compat.v1.keras.initializers.RandomNormal` is compatible with eager
execution and `tf.function`.
To switch to native TF2, switch to using
`tf.keras.initializers.RandomNormal` (not from `compat.v1`) and
if you need to change the default dtype use
`tf.keras.backend.set_floatx(float_dtype)`
or pass the dtype when calling the initializer, rather than passing it
when constructing the initializer.
Random seed behavior:
Also be aware that if you pass a seed to the TF2 initializer
API it will reuse that same seed for every single initialization
(unlike the TF1 initializer)
#### Structural Mapping to Native TF2
Before:
```python
initializer = tf.compat.v1.keras.initializers.RandomNormal(
mean=mean,
stddev=stddev,
seed=seed,
dtype=dtype)
weight_one = tf.Variable(initializer(shape_one))
weight_two = tf.Variable(initializer(shape_two))
```
After:
```python
initializer = tf.keras.initializers.RandomNormal(
mean=mean,
# seed=seed, # Setting a seed in the native TF2 API
# causes it to produce the same initializations
# across multiple calls of the same initializer.
stddev=stddev)
weight_one = tf.Variable(initializer(shape_one, dtype=dtype))
weight_two = tf.Variable(initializer(shape_two, dtype=dtype))
```
#### How to Map Arguments
| TF1 Arg Name | TF2 Arg Name | Note |
| :---------------- | :-------------- | :------------------------- |
| `mean` | `mean` | No change to defaults |
| `stddev` | `stddev` | No change to defaults |
| `seed` | `seed` | Different random number generation |
: : : semantics (to change in a :
: : : future version). If set, the TF2 version :
: : : will use stateless random number :
: : : generation which will produce the exact :
: : : same initialization even across multiple :
: : : calls of the initializer instance. the :
: : : `compat.v1` version will generate new :
: : : initializations each time. Do not set :
: : : a seed if you need different :
: : : initializations each time. Instead :
: : : either set a global tf seed with :
: : : `tf.random.set_seed` if you need :
: : : determinism, or initialize each weight:
: : : with a separate initializer instance :
: : : and a different seed. :
| `dtype` | `dtype` | The TF2 native api only takes it |
: : : as a `__call__` arg, not a constructor arg. :
| `partition_info` | - | (`__call__` arg in TF1) Not supported |
#### Example of fixed-seed behavior differences
`compat.v1` Fixed seed behavior:
>>> initializer = tf.compat.v1.keras.initializers.TruncatedNormal(seed=10)
>>> a = initializer(shape=(2, 2))
>>> b = initializer(shape=(2, 2))
>>> tf.reduce_sum(a - b) == 0
<tf.Tensor: shape=(), dtype=bool, numpy=False>
After:
>>> initializer = tf.keras.initializers.TruncatedNormal(seed=10)
>>> a = initializer(shape=(2, 2))
>>> b = initializer(shape=(2, 2))
>>> tf.reduce_sum(a - b) == 0
<tf.Tensor: shape=(), dtype=bool, numpy=False>
@end_compatibility
"""
def __init__(self, mean=0.0, stddev=0.05, seed=None, dtype=tf.float32):
super().__init__(
mean=mean, stddev=stddev, seed=seed, dtype=dtype)
@keras_export(v1=['keras.initializers.RandomUniform',
'keras.initializers.random_uniform',
'keras.initializers.uniform'])
class RandomUniform(tf.compat.v1.random_uniform_initializer):
"""Initializer that generates tensors with a uniform distribution.
Args:
minval: A python scalar or a scalar tensor. Lower bound of the range of
random values to generate.
maxval: A python scalar or a scalar tensor. Upper bound of the range of
random values to generate. Defaults to 1 for float types.
seed: A Python integer. Used to create random seeds. See
`tf.compat.v1.set_random_seed` for behavior.
dtype: Default data type, used if no `dtype` argument is provided when
calling the initializer.
@compatibility(TF2)
Although it is a legacy `compat.v1` api,
`tf.compat.v1.keras.initializers.RandomUniform` is compatible with eager
execution and `tf.function`.
To switch to native TF2, switch to using
`tf.keras.initializers.RandomUniform` (not from `compat.v1`) and
if you need to change the default dtype use
`tf.keras.backend.set_floatx(float_dtype)`
or pass the dtype when calling the initializer, rather than passing it
when constructing the initializer.
Random seed behavior:
Also be aware that if you pass a seed to the TF2 initializer
API it will reuse that same seed for every single initialization
(unlike the TF1 initializer)
#### Structural Mapping to Native TF2
Before:
```python
initializer = tf.compat.v1.keras.initializers.RandomUniform(
minval=minval,
maxval=maxval,
seed=seed,
dtype=dtype)
weight_one = tf.Variable(initializer(shape_one))
weight_two = tf.Variable(initializer(shape_two))
```
After:
```python
initializer = tf.keras.initializers.RandomUniform(
minval=minval,
maxval=maxval,
# seed=seed, # Setting a seed in the native TF2 API
# causes it to produce the same initializations
# across multiple calls of the same initializer.
)
weight_one = tf.Variable(initializer(shape_one, dtype=dtype))
weight_two = tf.Variable(initializer(shape_two, dtype=dtype))
```
#### How to Map Arguments
| TF1 Arg Name | TF2 Arg Name | Note |
| :---------------- | :-------------- | :------------------------- |
| `minval` | `minval` | No change to defaults |
| `maxval` | `maxval` | No change to defaults |
| `seed` | `seed` | Different random number generation |
: : : semantics (to change in a :
: : : future version). If set, the TF2 version :
: : : will use stateless random number :
: : : generation which will produce the exact :
: : : same initialization even across multiple :
: : : calls of the initializer instance. the :
: : : `compat.v1` version will generate new :
: : : initializations each time. Do not set :
: : : a seed if you need different :
: : : initializations each time. Instead :
: : : either set a global tf seed with
: : : `tf.random.set_seed` if you need :
: : : determinism, or initialize each weight :
: : : with a separate initializer instance :
: : : and a different seed. :
| `dtype` | `dtype` | The TF2 native api only takes it |
: : : as a `__call__` arg, not a constructor arg. :
| `partition_info` | - | (`__call__` arg in TF1) Not supported |
#### Example of fixed-seed behavior differences
`compat.v1` Fixed seed behavior:
>>> initializer = tf.compat.v1.keras.initializers.RandomUniform(seed=10)
>>> a = initializer(shape=(2, 2))
>>> b = initializer(shape=(2, 2))
>>> tf.reduce_sum(a - b) == 0
<tf.Tensor: shape=(), dtype=bool, numpy=False>
After:
>>> initializer = tf.keras.initializers.RandomUniform(seed=10)
>>> a = initializer(shape=(2, 2))
>>> b = initializer(shape=(2, 2))
>>> tf.reduce_sum(a - b) == 0
<tf.Tensor: shape=(), dtype=bool, numpy=False>
@end_compatibility
"""
def __init__(self, minval=-0.05, maxval=0.05, seed=None,
dtype=tf.float32):
super().__init__(
minval=minval, maxval=maxval, seed=seed, dtype=dtype)
@keras_export(v1=['keras.initializers.TruncatedNormal',
'keras.initializers.truncated_normal'])
class TruncatedNormal(tf.compat.v1.truncated_normal_initializer):
"""Initializer that generates a truncated normal distribution.
These values are similar to values from a `random_normal_initializer`
except that values more than two standard deviations from the mean
are discarded and re-drawn. This is the recommended initializer for
neural network weights and filters.
Args:
mean: a python scalar or a scalar tensor. Mean of the random values to
generate.
stddev: a python scalar or a scalar tensor. Standard deviation of the
random values to generate.
seed: A Python integer. Used to create random seeds. See
`tf.compat.v1.set_random_seed` for behavior.
dtype: Default data type, used if no `dtype` argument is provided when
calling the initializer. Only floating point types are supported.
@compatibility(TF2)
Although it is a legacy compat.v1 api,
`tf.compat.v1.keras.initializers.TruncatedNormal` is compatible with eager
execution and `tf.function`.
To switch to native TF2, switch to using
`tf.keras.initializers.TruncatedNormal` (not from `compat.v1`) and
if you need to change the default dtype use
`tf.keras.backend.set_floatx(float_dtype)`
or pass the dtype when calling the initializer, rather than passing it
when constructing the initializer.
Random seed behavior:
Also be aware that if you pass a seed to the TF2 initializer
API it will reuse that same seed for every single initialization
(unlike the TF1 initializer)
#### Structural Mapping to Native TF2
Before:
```python
initializer = tf.compat.v1.keras.initializers.TruncatedNormal(
mean=mean,
stddev=stddev,
seed=seed,
dtype=dtype)
weight_one = tf.Variable(initializer(shape_one))
weight_two = tf.Variable(initializer(shape_two))
```
After:
```python
initializer = tf.keras.initializers.TruncatedNormal(
mean=mean,
# seed=seed, # Setting a seed in the native TF2 API
# causes it to produce the same initializations
# across multiple calls of the same initializer.
stddev=stddev)
weight_one = tf.Variable(initializer(shape_one, dtype=dtype))
weight_two = tf.Variable(initializer(shape_two, dtype=dtype))
```
#### How to Map Arguments
| TF1 Arg Name | TF2 Arg Name | Note |
| :---------------- | :-------------- | :------------------------- |
| `mean` | `mean` | No change to defaults |
| `stddev` | `stddev` | No change to defaults |
| `seed` | `seed` | Different random number generation |
: : : semantics (to change in a :
: : : future version). If set, the TF2 version :
: : : will use stateless random number :
: : : generation which will produce the exact :
: : : same initialization even across multiple :
: : : calls of the initializer instance. the :
: : : `compat.v1` version will generate new :
: : : initializations each time. Do not set :
: : : a seed if you need different :
: : : initializations each time. Instead :
: : : either set a global tf seed with
: : : `tf.random.set_seed` if you need :
: : : determinism, or initialize each weight :
: : : with a separate initializer instance :
: : : and a different seed. :
| `dtype` | `dtype` | The TF2 native api only takes it |
: : : as a `__call__` arg, not a constructor arg. :
| `partition_info` | - | (`__call__` arg in TF1) Not supported |
#### Example of fixed-seed behavior differences
`compat.v1` Fixed seed behavior:
>>> initializer = tf.compat.v1.keras.initializers.TruncatedNormal(seed=10)
>>> a = initializer(shape=(2, 2))
>>> b = initializer(shape=(2, 2))
>>> tf.reduce_sum(a - b) == 0
<tf.Tensor: shape=(), dtype=bool, numpy=False>
After:
>>> initializer = tf.keras.initializers.TruncatedNormal(seed=10)
>>> a = initializer(shape=(2, 2))
>>> b = initializer(shape=(2, 2))
>>> tf.reduce_sum(a - b) == 0
<tf.Tensor: shape=(), dtype=bool, numpy=False>
@end_compatibility
"""
def __init__(self, mean=0.0, stddev=0.05, seed=None, dtype=tf.float32):
"""Initializer that generates a truncated normal distribution.
Args:
mean: a python scalar or a scalar tensor. Mean of the random values to
generate.
stddev: a python scalar or a scalar tensor. Standard deviation of the
random values to generate.
seed: A Python integer. Used to create random seeds. See
`tf.compat.v1.set_random_seed` for behavior.
dtype: Default data type, used if no `dtype` argument is provided when
calling the initializer. Only floating point types are supported.
"""
super().__init__(
mean=mean, stddev=stddev, seed=seed, dtype=dtype)
@keras_export(v1=['keras.initializers.lecun_normal'])
class LecunNormal(tf.compat.v1.variance_scaling_initializer):
def __init__(self, seed=None):
super().__init__(
scale=1., mode='fan_in', distribution='truncated_normal', seed=seed)
def get_config(self):
return {'seed': self.seed}
@keras_export(v1=['keras.initializers.lecun_uniform'])
class LecunUniform(tf.compat.v1.variance_scaling_initializer):
def __init__(self, seed=None):
super().__init__(
scale=1., mode='fan_in', distribution='uniform', seed=seed)
def get_config(self):
return {'seed': self.seed}
@keras_export(v1=['keras.initializers.he_normal'])
class HeNormal(tf.compat.v1.variance_scaling_initializer):
def __init__(self, seed=None):
super().__init__(
scale=2., mode='fan_in', distribution='truncated_normal', seed=seed)
def get_config(self):
return {'seed': self.seed}
@keras_export(v1=['keras.initializers.he_uniform'])
class HeUniform(tf.compat.v1.variance_scaling_initializer):
def __init__(self, seed=None):
super().__init__(
scale=2., mode='fan_in', distribution='uniform', seed=seed)
def get_config(self):
return {'seed': self.seed}
| 40.118943
| 109
| 0.626441
| 2,178
| 18,214
| 5.117998
| 0.123967
| 0.067103
| 0.025119
| 0.024222
| 0.826411
| 0.805867
| 0.760922
| 0.746838
| 0.744685
| 0.713824
| 0
| 0.013608
| 0.261667
| 18,214
| 453
| 110
| 40.207506
| 0.815289
| 0.747392
| 0
| 0.306667
| 0
| 0
| 0.203098
| 0.181614
| 0
| 0
| 0
| 0
| 0
| 1
| 0.146667
| false
| 0
| 0.026667
| 0.053333
| 0.32
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d4a0e9fd5e08023b5d19fca8010c5fb963f35fe8
| 8,059
|
py
|
Python
|
guardian/tests/direct_rel_test.py
|
TabbedOut/django-guardian
|
8d8400680b198cf3c9f828f6c494193a820f83e8
|
[
"BSD-2-Clause"
] | 2
|
2015-09-26T10:46:01.000Z
|
2018-11-26T19:06:48.000Z
|
guardian/tests/direct_rel_test.py
|
yceruto/django-guardian
|
d5703a1c964f4f1145d1800468db5352dacfaa18
|
[
"BSD-2-Clause"
] | 1
|
2021-03-24T12:14:16.000Z
|
2021-03-24T12:14:16.000Z
|
guardian/tests/direct_rel_test.py
|
yceruto/django-guardian
|
d5703a1c964f4f1145d1800468db5352dacfaa18
|
[
"BSD-2-Clause"
] | 5
|
2015-09-26T12:22:16.000Z
|
2021-03-24T12:13:43.000Z
|
from __future__ import unicode_literals
from .testapp.models import Mixed
from .testapp.models import Project
from .testapp.models import ProjectGroupObjectPermission
from .testapp.models import ProjectUserObjectPermission
from django.contrib.auth.models import Group, Permission
from django.test import TestCase
from guardian.compat import get_user_model
from guardian.shortcuts import assign_perm
from guardian.shortcuts import get_groups_with_perms
from guardian.shortcuts import get_objects_for_group
from guardian.shortcuts import get_objects_for_user
from guardian.shortcuts import get_users_with_perms
from guardian.shortcuts import remove_perm
User = get_user_model()
class TestDirectUserPermissions(TestCase):
def setUp(self):
self.joe = User.objects.create_user('joe', 'joe@example.com', 'foobar')
self.project = Project.objects.create(name='Foobar')
def get_perm(self, codename):
filters = {'content_type__app_label': 'testapp', 'codename': codename}
return Permission.objects.get(**filters)
def test_after_perm_is_created_without_shortcut(self):
perm = self.get_perm('add_project')
# we should not use assign here - if generic user obj perms model is
# used then everything could go fine if using assign shortcut and we
# would not be able to see any problem
ProjectUserObjectPermission.objects.create(
user=self.joe,
permission=perm,
content_object=self.project,
)
self.assertTrue(self.joe.has_perm('add_project', self.project))
def test_assign_perm(self):
assign_perm('add_project', self.joe, self.project)
filters = {
'content_object': self.project,
'permission__codename': 'add_project',
'user': self.joe,
}
result = ProjectUserObjectPermission.objects.filter(**filters).count()
self.assertEqual(result, 1)
def test_remove_perm(self):
assign_perm('add_project', self.joe, self.project)
filters = {
'content_object': self.project,
'permission__codename': 'add_project',
'user': self.joe,
}
result = ProjectUserObjectPermission.objects.filter(**filters).count()
self.assertEqual(result, 1)
remove_perm('add_project', self.joe, self.project)
result = ProjectUserObjectPermission.objects.filter(**filters).count()
self.assertEqual(result, 0)
def test_get_users_with_perms(self):
User.objects.create_user('john', 'john@foobar.com', 'john')
jane = User.objects.create_user('jane', 'jane@foobar.com', 'jane')
assign_perm('add_project', self.joe, self.project)
assign_perm('change_project', self.joe, self.project)
assign_perm('change_project', jane, self.project)
self.assertEqual(get_users_with_perms(self.project, attach_perms=True),
{
self.joe: ['add_project', 'change_project'],
jane: ['change_project'],
})
def test_get_users_with_perms_plus_groups(self):
User.objects.create_user('john', 'john@foobar.com', 'john')
jane = User.objects.create_user('jane', 'jane@foobar.com', 'jane')
group = Group.objects.create(name='devs')
self.joe.groups.add(group)
assign_perm('add_project', self.joe, self.project)
assign_perm('change_project', group, self.project)
assign_perm('change_project', jane, self.project)
self.assertEqual(get_users_with_perms(self.project, attach_perms=True),
{
self.joe: ['add_project', 'change_project'],
jane: ['change_project'],
})
def test_get_objects_for_user(self):
foo = Project.objects.create(name='foo')
bar = Project.objects.create(name='bar')
assign_perm('add_project', self.joe, foo)
assign_perm('add_project', self.joe, bar)
assign_perm('change_project', self.joe, bar)
result = get_objects_for_user(self.joe, 'testapp.add_project')
self.assertEqual(sorted(p.pk for p in result), sorted([foo.pk, bar.pk]))
class TestDirectGroupPermissions(TestCase):
def setUp(self):
self.joe = User.objects.create_user('joe', 'joe@example.com', 'foobar')
self.group = Group.objects.create(name='admins')
self.joe.groups.add(self.group)
self.project = Project.objects.create(name='Foobar')
def get_perm(self, codename):
filters = {'content_type__app_label': 'testapp', 'codename': codename}
return Permission.objects.get(**filters)
def test_after_perm_is_created_without_shortcut(self):
perm = self.get_perm('add_project')
# we should not use assign here - if generic user obj perms model is
# used then everything could go fine if using assign shortcut and we
# would not be able to see any problem
ProjectGroupObjectPermission.objects.create(
group=self.group,
permission=perm,
content_object=self.project,
)
self.assertTrue(self.joe.has_perm('add_project', self.project))
def test_assign_perm(self):
assign_perm('add_project', self.group, self.project)
filters = {
'content_object': self.project,
'permission__codename': 'add_project',
'group': self.group,
}
result = ProjectGroupObjectPermission.objects.filter(**filters).count()
self.assertEqual(result, 1)
def test_remove_perm(self):
assign_perm('add_project', self.group, self.project)
filters = {
'content_object': self.project,
'permission__codename': 'add_project',
'group': self.group,
}
result = ProjectGroupObjectPermission.objects.filter(**filters).count()
self.assertEqual(result, 1)
remove_perm('add_project', self.group, self.project)
result = ProjectGroupObjectPermission.objects.filter(**filters).count()
self.assertEqual(result, 0)
def test_get_groups_with_perms(self):
Group.objects.create(name='managers')
devs = Group.objects.create(name='devs')
assign_perm('add_project', self.group, self.project)
assign_perm('change_project', self.group, self.project)
assign_perm('change_project', devs, self.project)
self.assertEqual(get_groups_with_perms(self.project, attach_perms=True),
{
self.group: ['add_project', 'change_project'],
devs: ['change_project'],
})
def test_get_objects_for_group(self):
foo = Project.objects.create(name='foo')
bar = Project.objects.create(name='bar')
assign_perm('add_project', self.group, foo)
assign_perm('add_project', self.group, bar)
assign_perm('change_project', self.group, bar)
result = get_objects_for_group(self.group, 'testapp.add_project')
self.assertEqual(sorted(p.pk for p in result), sorted([foo.pk, bar.pk]))
class TestMixedDirectAndGenericObjectPermission(TestCase):
def setUp(self):
self.joe = User.objects.create_user('joe', 'joe@example.com', 'foobar')
self.group = Group.objects.create(name='admins')
self.joe.groups.add(self.group)
self.mixed = Mixed.objects.create(name='Foobar')
def test_get_users_with_perms_plus_groups(self):
User.objects.create_user('john', 'john@foobar.com', 'john')
jane = User.objects.create_user('jane', 'jane@foobar.com', 'jane')
group = Group.objects.create(name='devs')
self.joe.groups.add(group)
assign_perm('add_mixed', self.joe, self.mixed)
assign_perm('change_mixed', group, self.mixed)
assign_perm('change_mixed', jane, self.mixed)
self.assertEqual(get_users_with_perms(self.mixed, attach_perms=True),
{
self.joe: ['add_mixed', 'change_mixed'],
jane: ['change_mixed'],
})
| 41.117347
| 80
| 0.659759
| 963
| 8,059
| 5.313603
| 0.110073
| 0.060192
| 0.046512
| 0.052765
| 0.854993
| 0.809459
| 0.754348
| 0.716435
| 0.706273
| 0.673637
| 0
| 0.000958
| 0.223105
| 8,059
| 195
| 81
| 41.328205
| 0.816323
| 0.042313
| 0
| 0.610063
| 0
| 0
| 0.138262
| 0.005966
| 0
| 0
| 0
| 0
| 0.08805
| 1
| 0.106918
| false
| 0
| 0.08805
| 0
| 0.226415
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d4cf8c723436852fd2f95085f0bc7960b28e3fb5
| 312
|
py
|
Python
|
airport/_constants.py
|
egemenyildiz/airport-py
|
30b6aa98d5b59f371347a19264e8030959939d5e
|
[
"MIT"
] | 3
|
2019-11-29T07:55:15.000Z
|
2021-03-27T00:04:32.000Z
|
airport/_constants.py
|
egemenyildiz/airport-py
|
30b6aa98d5b59f371347a19264e8030959939d5e
|
[
"MIT"
] | 1
|
2019-11-29T18:35:46.000Z
|
2019-11-29T18:36:24.000Z
|
airport/_constants.py
|
egemenyildiz/airport-py
|
30b6aa98d5b59f371347a19264e8030959939d5e
|
[
"MIT"
] | 1
|
2020-11-29T17:30:41.000Z
|
2020-11-29T17:30:41.000Z
|
SCAN_RESULTS_GROUPED_PATTERN = (r'(?P<ssid>.+)\s+(?P<bssid>([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2}))\s+(?P<rssi>[0-9+\- '
r']{1,4})\s+(?P<channel>[0-9,\-+]{1,6})\s+(?P<high_throughput>[YN])\s+('
r'?P<country_code>[A-Z\-]{2,3})\s+(?P<security>.+)$')
| 78
| 120
| 0.410256
| 53
| 312
| 2.320755
| 0.566038
| 0.081301
| 0.081301
| 0.097561
| 0.113821
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070248
| 0.224359
| 312
| 3
| 121
| 104
| 0.438017
| 0
| 0
| 0
| 0
| 0.666667
| 0.650641
| 0.647436
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d4e6e603fec867763dd424ce49a2429f7f04d6d1
| 167
|
py
|
Python
|
djangoX/admin.py
|
nuratabanjeh/djangox
|
d3f01481fd75d909fa8e42f2b1da98518f0a589a
|
[
"MIT"
] | null | null | null |
djangoX/admin.py
|
nuratabanjeh/djangox
|
d3f01481fd75d909fa8e42f2b1da98518f0a589a
|
[
"MIT"
] | null | null | null |
djangoX/admin.py
|
nuratabanjeh/djangox
|
d3f01481fd75d909fa8e42f2b1da98518f0a589a
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from django.contrib.admin.decorators import register
from .models import Snack
# Register your models here.
admin.site.register(Snack)
| 33.4
| 52
| 0.832335
| 24
| 167
| 5.791667
| 0.5
| 0.143885
| 0.244604
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101796
| 167
| 5
| 53
| 33.4
| 0.926667
| 0.155689
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d4e840c1a4c50e93293f55efdd37365c11e0abd9
| 133
|
py
|
Python
|
bin/lib/list_compilers.py
|
jfalcou/infra
|
97e05039a3f4f3d69b7c50233aed5e5d60a59605
|
[
"BSD-2-Clause"
] | 135
|
2017-01-12T04:39:08.000Z
|
2020-05-08T17:08:52.000Z
|
bin/lib/list_compilers.py
|
jfalcou/infra
|
97e05039a3f4f3d69b7c50233aed5e5d60a59605
|
[
"BSD-2-Clause"
] | 229
|
2017-01-23T12:45:44.000Z
|
2020-05-13T17:36:57.000Z
|
bin/lib/list_compilers.py
|
jfalcou/infra
|
97e05039a3f4f3d69b7c50233aed5e5d60a59605
|
[
"BSD-2-Clause"
] | 106
|
2017-04-18T14:42:34.000Z
|
2020-05-07T14:24:34.000Z
|
#!/usr/bin/env python3
from lib.amazon import list_compilers
def main():
print(" ".join(list_compilers(with_extension=True)))
| 16.625
| 56
| 0.729323
| 19
| 133
| 4.947368
| 0.894737
| 0.276596
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008621
| 0.12782
| 133
| 7
| 57
| 19
| 0.801724
| 0.157895
| 0
| 0
| 0
| 0
| 0.009009
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
d4f498953f7bcf57a017ce527a65c524253a5bdf
| 144
|
py
|
Python
|
src/runtime/sys.py
|
akshanshbhatt/lpython
|
70fef49dbbb6cbb0447f7013231171e5c8b8e5df
|
[
"BSD-3-Clause"
] | 31
|
2022-01-07T23:56:33.000Z
|
2022-03-29T16:09:02.000Z
|
src/runtime/sys.py
|
akshanshbhatt/lpython
|
70fef49dbbb6cbb0447f7013231171e5c8b8e5df
|
[
"BSD-3-Clause"
] | 197
|
2021-12-29T19:01:41.000Z
|
2022-03-31T15:58:25.000Z
|
src/runtime/sys.py
|
akshanshbhatt/lpython
|
70fef49dbbb6cbb0447f7013231171e5c8b8e5df
|
[
"BSD-3-Clause"
] | 17
|
2022-01-06T15:34:36.000Z
|
2022-03-31T13:55:33.000Z
|
from ltypes import i32
def exit(error_code: i32):
"""
Exits the program with an error code `error_code`.
"""
quit(error_code)
| 16
| 54
| 0.645833
| 21
| 144
| 4.285714
| 0.666667
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0.25
| 144
| 8
| 55
| 18
| 0.796296
| 0.347222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
be1c2393cb7347ad93c642e4d0a1a5650c758fbb
| 11,626
|
py
|
Python
|
neuron_models/spiking.py
|
cyanezstange/neuron_models
|
1202ad923719edb92fe0c42773dfec628cb5febc
|
[
"MIT"
] | 1
|
2021-07-31T19:57:20.000Z
|
2021-07-31T19:57:20.000Z
|
neuron_models/spiking.py
|
cyanezstange/neuron_models
|
1202ad923719edb92fe0c42773dfec628cb5febc
|
[
"MIT"
] | null | null | null |
neuron_models/spiking.py
|
cyanezstange/neuron_models
|
1202ad923719edb92fe0c42773dfec628cb5febc
|
[
"MIT"
] | null | null | null |
import numpy as np
from neuron_models.neuron import NeuronModel
class Izhikevich(NeuronModel):
'''...'''
def __init__(self, a=0.01, b=0.2, c=-65, d=2, t0=0, v0=0, u0=0, dt=0.1, i0=1):
# Integration parameters.
self.t0 = t0
self.dt = dt
# Constants.
self.a = a
self.b = b
self.c = c
self.d = d
# Variables.
self.x = np.array([v0, u0])
self.t = t0
self.i = i0
# Containers.
self.T = np.array([], dtype=np.float64) # Time.
self.V = np.array([], dtype=np.float64) # Membrane potential.
self.U = np.array([], dtype=np.float64) #
self.I = np.array([], dtype=np.float64) # Current.
self.S = np.array([], dtype=np.float64) # Spike times.
# Others.
self.name = "Izhikevich"
def _v_dot(self, v, u, t):
# Depends on v, u; and t (implicit).
return 0.04*v*v + 5*v + 140 - u + self.i
def _u_dot(self, v, u, t):
# Depends on v and u.
return self.a*(self.b*v - u)
def _x_dot(self, x, t):
return np.array([
self._v_dot(*x,t),
self._u_dot(*x,t)
])
def run(self, n):
for _ in range(n):
self.run_step()
def run_step(self):
# Set containers if first time.
if self.T.size == 0:
self.T = np.append(self.T, self.t)
if self.V.size == 0:
self.V = np.append(self.V, self.x[0])
if self.U.size == 0:
self.U = np.append(self.U, self.x[1])
if self.I.size == 0:
self.I = np.append(self.I, self.i)
# Test for spike
if self.x[0] >= 30:
self.S = np.append(self.S, self.t)
self.x[0] = self.c
self.x[1] += self.d
# Integration step
k1 = self.dt*self._x_dot(self.x , self.t)
k2 = self.dt*self._x_dot(self.x + k1, self.t + self.dt)
self.x = self.x + (k1 + k2)/2
# Update containers
self.T = np.append(self.T, self.t)
self.V = np.append(self.V, self.x[0])
self.U = np.append(self.U, self.x[1])
self.I = np.append(self.I, self.i)
# Update time
self.t = self.t + self.dt
class HodgkinHuxley(NeuronModel):
'''...'''
def __init__(self, t0=0, v0=0, n0=0, m0=0, h0=0, dt=0.1, i0=1):
# Integration parameters.
self.t0 = t0
self.dt = dt
# Constants, and rate constants.
self.C_m = 0.01 # uF/cm^2
self.E_Na = 55.17 # mV
self.E_K = -72.14 # mV
self.E_l = -49.42 # mV
self.g_Na = 1.2 # mS/cm^2
self.g_K = 0.36 # mS/cm^2
self.g_l = 0.003 # mS/cm^2
self.alpha_n = lambda v: 0.01*(v+50)/(1-np.exp((-v-50)/10))
self.beta_n = lambda v: 0.125*np.exp((-v-60)/80)
self.alpha_m = lambda v: 0.1*(v+35)/(1-np.exp((-v-35)/10))
self.beta_m = lambda v: 4.0*np.exp(-0.0556*(v+60))
self.alpha_h = lambda v: 0.07*np.exp(-0.05*(v+60))
self.beta_h = lambda v: 1/(1+np.exp(-0.1*(v+30)))
# Variables.
self.x = np.array([v0, n0, m0, h0])
self.t = t0
self.i = i0
# Containers.
self.T = np.array([], dtype=np.float64) # Time.
self.V = np.array([], dtype=np.float64) # Membrane potential.
self.N = np.array([], dtype=np.float64) #
self.M = np.array([], dtype=np.float64) #
self.H = np.array([], dtype=np.float64) #
self.I = np.array([], dtype=np.float64) # Current.
self.S = np.array([], dtype=np.float64) # Spike times.
# Others.
self.name = "Hodgkin-Huxley"
def _v_dot(self, v, n, m, h, t):
# Depends on v, n, m, h; and t (implicit).
i_Na = self.g_Na*m*m*m*h*(v - self.E_Na)
i_K = self.g_K*n*n*n*n*(v - self.E_K)
i_l = self.g_l*(v - self.E_l)
return (self.i - i_Na - i_K - i_l)/self.C_m
def _n_dot(self, v, n, m, h, t):
# Depends on v and n.
return self.alpha_n(v)*(1-n) - self.beta_n(v)*n
def _m_dot(self, v, n, m, h, t):
# Depends on v and m.
return self.alpha_m(v)*(1-m) - self.beta_m(v)*m
def _h_dot(self, v, n, m, h, t):
# Depends on v and h.
return self.alpha_h(v)*(1-h) - self.beta_h(v)*h
def _x_dot(self, x, t):
return np.array([
self._v_dot(*x,t),
self._n_dot(*x,t),
self._m_dot(*x,t),
self._h_dot(*x,t),
])
def run(self, n):
for _ in range(n):
self.run_step()
def run_step(self):
# Set containers if first time.
if self.T.size == 0:
self.T = np.append(self.T, self.t)
if self.V.size == 0:
self.V = np.append(self.V, self.x[0])
if self.N.size == 0:
self.N = np.append(self.N, self.x[1])
if self.M.size == 0:
self.M = np.append(self.M, self.x[2])
if self.H.size == 0:
self.H = np.append(self.H, self.x[3])
if self.I.size == 0:
self.I = np.append(self.I, self.i)
# Test for spike
#if self.u > self.u_thr:
# self.S = np.append(self.S, self.t)
# self.u = self.u_rest
# Integration step
k1 = self.dt*self._x_dot(self.x , self.t)
k2 = self.dt*self._x_dot(self.x + k1, self.t + self.dt)
self.x = self.x + (k1 + k2)/2
# Update containers
self.T = np.append(self.T, self.t)
self.V = np.append(self.V, self.x[0])
self.N = np.append(self.N, self.x[1])
self.M = np.append(self.M, self.x[2])
self.H = np.append(self.H, self.x[3])
self.I = np.append(self.I, self.i)
# Update time
self.t = self.t + self.dt
class FitzHughNagumo(NeuronModel):
'''...'''
def __init__(self, R=1, a=0, b=0, tau=1, t0=0, v0=0, w0=0, dt=0.1, i0=1):
# Model parameters.
self.R = R
self.a = a
self.b = b
self.tau = tau
# Integration parameters.
self.t0 = t0
self.dt = dt
# Variables.
self.x = np.array([v0, w0])
self.t = t0
self.i = i0
# Containers.
self.T = np.array([], dtype=np.float64) # Time
self.V = np.array([], dtype=np.float64) # Voltage
self.W = np.array([], dtype=np.float64) # W
self.I = np.array([], dtype=np.float64) # Current
self.S = np.array([], dtype=np.float64) # Spike times
# Other
self.name = "FitzHugh-Nagumo"
def _v_dot(self, v, w, t):
# Depends of v, w; and t (implicit).
return v - v*v*v/3 - w + self.R*self.i
def _w_dot(self, v, w, t):
# Depends of v and w.
return (v + self.a - self.b*w)/self.tau
def _x_dot(self, x, t):
return np.array([
self._v_dot(*x, t),
self._w_dot(*x, t)
])
def run(self, n):
for _ in range(n):
self.run_step()
def run_step(self):
# Set containers if first time.
if self.T.size == 0:
self.T = np.append(self.T, self.t)
if self.V.size == 0:
self.V = np.append(self.V, self.x[0])
if self.W.size == 0:
self.W = np.append(self.W, self.x[1])
if self.I.size == 0:
self.I = np.append(self.I, self.i)
# Test for spike
#if self.u > self.u_thr:
# self.S = np.append(self.S, self.t)
# self.u = self.u_rest
# Integration step
k1 = self.dt*self._x_dot(self.x , self.t)
k2 = self.dt*self._x_dot(self.x + k1, self.t + self.dt)
self.x = self.x + (k1 + k2)/2
# Update containers
self.T = np.append(self.T, self.t)
self.V = np.append(self.V, self.x[0])
self.W = np.append(self.W, self.x[1])
self.I = np.append(self.I, self.i)
# Update time
self.t = self.t + self.dt
class LeakyIntegrateAndFire(NeuronModel):
'''...'''
def __init__(self, R=1, C=1, v0=-1, v_rest=-1, v_thr=0.5, t0=0, dt=0.1, i0=1):
# Model parameters.
self.R = R
self.C = C
self.v_rest = v_rest
self.v_thr = v_thr
# Integration parameters.
self.t0 = t0
self.dt = dt
# Variables. (init. cond.)
self.t = t0
self.v = v0
self.i = i0
# Containers. (Historic)
self.T = np.array([], dtype=np.float64) # Time
self.V = np.array([], dtype=np.float64) # Potential
self.I = np.array([], dtype=np.float64) # Current
self.S = np.array([], dtype=np.float64) # Spike times
# Other
self.name = "Leaky integrate-and-fire"
def _v_dot(self, v, t):
return (self.i - v/self.R)/self.C
def run(self, n):
for _ in range(n):
self.run_step()
def run_step(self):
# Set containers if first time.
if self.T.size == 0:
self.T = np.append(self.T, self.t)
if self.V.size == 0:
self.V = np.append(self.V, self.v)
if self.I.size == 0:
self.I = np.append(self.I, self.i)
# Test for spike
if self.v > self.v_thr:
self.S = np.append(self.S, self.t)
self.v = self.v_rest
# Integration step
k1 = self.dt*self._v_dot(self.v , self.t)
k2 = self.dt*self._v_dot(self.v + k1, self.t + self.dt)
self.v = self.v + (k1 + k2)/2
# Update containers
self.T = np.append(self.T, self.t)
self.V = np.append(self.V, self.v)
self.I = np.append(self.I, self.i)
# Update time
self.t = self.t + self.dt
class IntegrateAndFire(NeuronModel):
'''...'''
def __init__(self, C=1, v0=-1, v_rest=-1, v_thr=0.5, t0=0, dt=0.1, i0=1):
# Model parameters.
self.C = C
self.v_rest = v_rest
self.v_thr = v_thr
# Integration parameters.
self.t0 = t0
self.dt = dt
# Variables. (init. cond.)
self.t = t0
self.v = v0
self.i = i0
# Containers (Historic)
self.T = np.array([], dtype=np.float64) # Time
self.V = np.array([], dtype=np.float64) # Potential
self.I = np.array([], dtype=np.float64) # Current
self.S = np.array([], dtype=np.float64) # Spike times
# Other
self.name = "Integrate-and-fire"
def _v_dot(self, v, t):
return self.i/self.C
def run(self, n):
for _ in range(n):
self.run_step()
def run_step(self):
# Set containers if first time.
if self.T.size == 0:
self.T = np.append(self.T, self.t)
if self.V.size == 0:
self.V = np.append(self.V, self.v)
if self.I.size == 0:
self.I = np.append(self.I, self.i)
# Test for spike
if self.v > self.v_thr:
self.S = np.append(self.S, self.t)
self.v = self.v_rest
# Integration step
k1 = self.dt*self._v_dot(self.v , self.t)
k2 = self.dt*self._v_dot(self.v + k1, self.t + self.dt)
self.v = self.v + (k1 + k2)/2
# Update containers
self.T = np.append(self.T, self.t)
self.V = np.append(self.V, self.v)
self.I = np.append(self.I, self.i)
# Update time
self.t = self.t + self.dt
Izh = Izhikevich
HH = HodgkinHuxley
FHN = FitzHughNagumo
LIF = LeakyIntegrateAndFire
IF = IntegrateAndFire
| 29.507614
| 82
| 0.496473
| 1,865
| 11,626
| 3.014477
| 0.074531
| 0.064924
| 0.096051
| 0.062255
| 0.803273
| 0.792245
| 0.764141
| 0.75916
| 0.744931
| 0.71487
| 0
| 0.040664
| 0.342164
| 11,626
| 394
| 83
| 29.507614
| 0.69443
| 0.130827
| 0
| 0.68
| 0
| 0
| 0.008112
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.112
| false
| 0
| 0.008
| 0.048
| 0.192
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
07882f63abc9e6fb9ff120f36e2d69ee986c9dd9
| 62
|
py
|
Python
|
app_verifications/invitation/__init__.py
|
kskarbinski/threads-api
|
c144c1cb51422095922310d278f80e4996c10ea0
|
[
"MIT"
] | null | null | null |
app_verifications/invitation/__init__.py
|
kskarbinski/threads-api
|
c144c1cb51422095922310d278f80e4996c10ea0
|
[
"MIT"
] | null | null | null |
app_verifications/invitation/__init__.py
|
kskarbinski/threads-api
|
c144c1cb51422095922310d278f80e4996c10ea0
|
[
"MIT"
] | null | null | null |
from .invitation_verifications import InvitationVerifications
| 31
| 61
| 0.919355
| 5
| 62
| 11.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 62
| 1
| 62
| 62
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
078ae43c68e6eb9a946cc7af342acf3717fe845c
| 26
|
py
|
Python
|
utilities/__init__.py
|
r3d83ard/assemblyline_
|
6d191f9e465c10da40879799e00edaa3fc12d303
|
[
"MIT"
] | 2
|
2019-05-22T15:02:02.000Z
|
2019-07-10T21:29:51.000Z
|
utilities/__init__.py
|
r3d83ard/assemblyline_daily_sample
|
6d191f9e465c10da40879799e00edaa3fc12d303
|
[
"MIT"
] | null | null | null |
utilities/__init__.py
|
r3d83ard/assemblyline_daily_sample
|
6d191f9e465c10da40879799e00edaa3fc12d303
|
[
"MIT"
] | null | null | null |
from . import logger_util
| 13
| 25
| 0.807692
| 4
| 26
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 26
| 1
| 26
| 26
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
07c10034759bc8d8516ddf5dbf10f5b72ac1ced6
| 251
|
py
|
Python
|
NBprocessing/__init__.py
|
nirbarazida/NBprocessing
|
3b020829e9c2ec0ef67b881a69637ac14e635e75
|
[
"MIT"
] | 19
|
2020-11-29T17:27:38.000Z
|
2021-06-01T17:16:00.000Z
|
NBprocessing/__init__.py
|
nirbarazida/NBprocessing
|
3b020829e9c2ec0ef67b881a69637ac14e635e75
|
[
"MIT"
] | 11
|
2020-08-10T21:03:06.000Z
|
2020-10-07T11:13:43.000Z
|
NBprocessing/__init__.py
|
nirbarazida/NBprocessing
|
3b020829e9c2ec0ef67b881a69637ac14e635e75
|
[
"MIT"
] | 9
|
2020-12-26T11:18:15.000Z
|
2022-01-20T03:47:58.000Z
|
from NBprocessing.categorical._NBcategorical_class import NBcategorical
from NBprocessing.continuous._NBcontinuous_class import NBcontinuous
from NBprocessing.general._NBgeneral_class import NBgeneral
from NBprocessing.plot._NBplot_class import NBplot
| 62.75
| 71
| 0.908367
| 28
| 251
| 7.857143
| 0.428571
| 0.290909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059761
| 251
| 4
| 72
| 62.75
| 0.932203
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
07e7a6d78cfd32d882f7e5f11cfe0fda19a42a64
| 53
|
py
|
Python
|
nbt/addons/__init__.py
|
SpikeVN/pycmdblock
|
9c93e2ecf2336ed2c9d8cae9fbaec21525f90ba8
|
[
"MIT"
] | null | null | null |
nbt/addons/__init__.py
|
SpikeVN/pycmdblock
|
9c93e2ecf2336ed2c9d8cae9fbaec21525f90ba8
|
[
"MIT"
] | null | null | null |
nbt/addons/__init__.py
|
SpikeVN/pycmdblock
|
9c93e2ecf2336ed2c9d8cae9fbaec21525f90ba8
|
[
"MIT"
] | null | null | null |
from .tools_nbt import *
from .villager_nbt import *
| 17.666667
| 27
| 0.773585
| 8
| 53
| 4.875
| 0.625
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150943
| 53
| 2
| 28
| 26.5
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
07ef9f5c160c368c13025a88c815375829038dd4
| 262
|
py
|
Python
|
users/models.py
|
dentonya/python-django-sales-inventory-project
|
d0fcdf81136908a022e0f4eeca94fc0357473635
|
[
"Apache-2.0"
] | 1
|
2021-10-18T09:27:03.000Z
|
2021-10-18T09:27:03.000Z
|
users/models.py
|
dentonya/python-django-sales-inventory-project
|
d0fcdf81136908a022e0f4eeca94fc0357473635
|
[
"Apache-2.0"
] | 1
|
2021-08-04T20:11:28.000Z
|
2021-08-04T20:11:28.000Z
|
users/models.py
|
dentonya/python-django-sales-inventory-project
|
d0fcdf81136908a022e0f4eeca94fc0357473635
|
[
"Apache-2.0"
] | null | null | null |
from django.db import models
from django.contrib.auth.models import AbstractUser
class User(AbstractUser):
is_buyer = models.BooleanField(default=False)
is_supplier = models.BooleanField(default=False)
is_admin = models.BooleanField(default=False)
| 29.111111
| 52
| 0.790076
| 33
| 262
| 6.181818
| 0.515152
| 0.264706
| 0.367647
| 0.441176
| 0.313725
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125954
| 262
| 8
| 53
| 32.75
| 0.89083
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
07fd00f3f89714578611ae7c12e6b401fab2b29b
| 328
|
py
|
Python
|
backend/config/config.py
|
naiiytom/cms-angular-fastapi-keycloak
|
fe9266c94404e71a74b5b9676d7283b63dc11cd3
|
[
"MIT"
] | 1
|
2022-01-29T21:20:27.000Z
|
2022-01-29T21:20:27.000Z
|
backend/config/config.py
|
naiiytom/cms-angular-fastapi-keycloak
|
fe9266c94404e71a74b5b9676d7283b63dc11cd3
|
[
"MIT"
] | null | null | null |
backend/config/config.py
|
naiiytom/cms-angular-fastapi-keycloak
|
fe9266c94404e71a74b5b9676d7283b63dc11cd3
|
[
"MIT"
] | 1
|
2022-01-29T21:20:29.000Z
|
2022-01-29T21:20:29.000Z
|
import os
class Key():
def __init__(self):
self.AWS_ACCESS_KEY = os.getenv('AWS_ACCESS_KEY', 'ACCESS_KEY')
self.AWS_SECRET_KEY = os.getenv('AWS_SECRET_KEY', 'SECRET_KEY')
def get_access_key(self):
return self.AWS_ACCESS_KEY
def get_secret_key(self):
return self.AWS_SECRET_KEY
| 25.230769
| 71
| 0.67378
| 49
| 328
| 4.061224
| 0.265306
| 0.226131
| 0.180905
| 0.160804
| 0.201005
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222561
| 328
| 12
| 72
| 27.333333
| 0.780392
| 0
| 0
| 0
| 0
| 0
| 0.146341
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.111111
| 0.222222
| 0.777778
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
ed06415f29f559b5e6263a064cf17a94040dd5d2
| 8,222
|
py
|
Python
|
cumulus_nclu.py
|
vaibhgupta157/CumulusVXNetconf
|
195e257254321bcffb45eba955c247f630710a52
|
[
"MIT"
] | null | null | null |
cumulus_nclu.py
|
vaibhgupta157/CumulusVXNetconf
|
195e257254321bcffb45eba955c247f630710a52
|
[
"MIT"
] | null | null | null |
cumulus_nclu.py
|
vaibhgupta157/CumulusVXNetconf
|
195e257254321bcffb45eba955c247f630710a52
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class yc_commands_cumulus_nclu__commands(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module cumulus-nclu - based on the path /commands. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_path_helper', '_extmethods', '__cmd',)
_yang_name = 'commands'
_yang_namespace = 'http://example.com/cumulus-nclu'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__cmd = YANGDynClass(unique=True, base=TypedListType(allowed_type=six.text_type), is_leaf=False, yang_name="cmd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://example.com/cumulus-nclu', defining_module='cumulus-nclu', yang_type='string', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'commands']
def _get_cmd(self):
"""
Getter method for cmd, mapped from YANG variable /commands/cmd (string)
"""
return self.__cmd
def _set_cmd(self, v, load=False):
"""
Setter method for cmd, mapped from YANG variable /commands/cmd (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_cmd is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_cmd() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,unique=True, base=TypedListType(allowed_type=six.text_type), is_leaf=False, yang_name="cmd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://example.com/cumulus-nclu', defining_module='cumulus-nclu', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """cmd must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(unique=True, base=TypedListType(allowed_type=six.text_type), is_leaf=False, yang_name="cmd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://example.com/cumulus-nclu', defining_module='cumulus-nclu', yang_type='string', is_config=True)""",
})
self.__cmd = t
if hasattr(self, '_set'):
self._set()
def _unset_cmd(self):
self.__cmd = YANGDynClass(unique=True, base=TypedListType(allowed_type=six.text_type), is_leaf=False, yang_name="cmd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://example.com/cumulus-nclu', defining_module='cumulus-nclu', yang_type='string', is_config=True)
cmd = __builtin__.property(_get_cmd, _set_cmd)
_pyangbind_elements = OrderedDict([('cmd', cmd), ])
class cumulus_nclu(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module cumulus-nclu - based on the path /cumulus-nclu. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_path_helper', '_extmethods', '__commands',)
_yang_name = 'cumulus-nclu'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__commands = YANGDynClass(base=yc_commands_cumulus_nclu__commands, is_container='container', yang_name="commands", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://example.com/cumulus-nclu', defining_module='cumulus-nclu', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return []
def _get_commands(self):
"""
Getter method for commands, mapped from YANG variable /commands (container)
"""
return self.__commands
def _set_commands(self, v, load=False):
"""
Setter method for commands, mapped from YANG variable /commands (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_commands is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_commands() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_commands_cumulus_nclu__commands, is_container='container', yang_name="commands", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://example.com/cumulus-nclu', defining_module='cumulus-nclu', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """commands must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_commands_cumulus_nclu__commands, is_container='container', yang_name="commands", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://example.com/cumulus-nclu', defining_module='cumulus-nclu', yang_type='container', is_config=True)""",
})
self.__commands = t
if hasattr(self, '_set'):
self._set()
def _unset_commands(self):
self.__commands = YANGDynClass(base=yc_commands_cumulus_nclu__commands, is_container='container', yang_name="commands", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://example.com/cumulus-nclu', defining_module='cumulus-nclu', yang_type='container', is_config=True)
commands = __builtin__.property(_get_commands, _set_commands)
_pyangbind_elements = OrderedDict([('commands', commands), ])
| 42.164103
| 363
| 0.710898
| 1,079
| 8,222
| 5.174235
| 0.156627
| 0.053197
| 0.045137
| 0.037077
| 0.845961
| 0.801899
| 0.797779
| 0.780942
| 0.780942
| 0.749418
| 0
| 0.00251
| 0.176234
| 8,222
| 194
| 364
| 42.381443
| 0.821792
| 0.154707
| 0
| 0.608
| 0
| 0.016
| 0.233284
| 0.054399
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08
| false
| 0
| 0.12
| 0
| 0.352
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ed49e6a3b9f182371dd1c67217e51e5aadd72790
| 203
|
py
|
Python
|
information_sharing/apps.py
|
luke-feng/MAP
|
bc82ec20e2859dd6437d314fc12880f5e6b6aad8
|
[
"Apache-2.0"
] | 1
|
2021-11-30T19:38:55.000Z
|
2021-11-30T19:38:55.000Z
|
information_sharing/apps.py
|
luke-feng/MAP
|
bc82ec20e2859dd6437d314fc12880f5e6b6aad8
|
[
"Apache-2.0"
] | null | null | null |
information_sharing/apps.py
|
luke-feng/MAP
|
bc82ec20e2859dd6437d314fc12880f5e6b6aad8
|
[
"Apache-2.0"
] | 1
|
2022-02-25T22:48:21.000Z
|
2022-02-25T22:48:21.000Z
|
from django.apps import AppConfig
class InformationSharingConfig(AppConfig):
name = 'information_sharing'
verbose_name = "information sharing"
verbose_name_plural = "information sharing"
| 20.3
| 47
| 0.773399
| 20
| 203
| 7.65
| 0.6
| 0.352941
| 0.287582
| 0.379085
| 0.405229
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162562
| 203
| 9
| 48
| 22.555556
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0.280788
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
ed6d63a9cc428ee67b7e7a5c326cdd01bf8cde3f
| 2,278
|
py
|
Python
|
GUI/PyQt/networks/multiclass/SENets/squeeze_excitation_block.py
|
thomaskuestner/CNNArt
|
c2fc639dd2ce035f6ca90113290682a0ccd26fb8
|
[
"Apache-2.0"
] | 22
|
2018-04-27T21:28:46.000Z
|
2021-12-24T06:44:55.000Z
|
GUI/PyQt/networks/multiclass/SENets/squeeze_excitation_block.py
|
thomaskuestner/CNNArt
|
c2fc639dd2ce035f6ca90113290682a0ccd26fb8
|
[
"Apache-2.0"
] | 81
|
2017-11-09T17:23:15.000Z
|
2020-01-28T22:54:13.000Z
|
GUI/PyQt/networks/multiclass/SENets/squeeze_excitation_block.py
|
thomaskuestner/CNNArt
|
c2fc639dd2ce035f6ca90113290682a0ccd26fb8
|
[
"Apache-2.0"
] | 18
|
2017-11-13T16:12:17.000Z
|
2020-08-27T10:17:34.000Z
|
'''
@author: Yannick Wilhelm
@email: yannick.wilhelm@gmx.de
@date: February 2017
Keras implementation of a Squeeze-and-Excitation-Block in accordance with the original paper
(Hu 2017, Squeeze and Excitation Networks)
'''
from keras.layers import GlobalAveragePooling2D, Reshape, Dense, multiply, Permute, GlobalAveragePooling3D
from keras import backend
def squeeze_excitation_block(inputSE, ratio=16):
'''
Creates a squeeze and excitation block
:param input: input tensor
:param ratio: reduction ratio r for bottleneck given by the two FC layers
:return: keras tensor
'''
if backend.image_data_format() == 'channels_first':
channels = 1
else:
channels = -1
# number of input filters/channels
inputSE_shape = backend.int_shape(inputSE)
numChannels = inputSE_shape[channels]
#squeeze operation
output = GlobalAveragePooling2D(data_format=backend.image_data_format())(inputSE)
#excitation operation
output = Dense(numChannels//ratio, activation='relu', use_bias=True, kernel_initializer='he_normal')(output)
output = Dense(numChannels, activation='sigmoid', use_bias=True, kernel_initializer='he_normal')(output)
#scale operation
output = multiply([inputSE, output])
return output
def squeeze_excitation_block_3D(inputSE, ratio=16):
'''
Creates a squeeze and excitation block
:param input: input tensor
:param ratio: reduction ratio r for bottleneck given by the two FC layers
:return: keras tensor
'''
if backend.image_data_format() == 'channels_first':
channels = 1
else:
channels = -1
# number of input filters/channels
inputSE_shape = backend.int_shape(inputSE)
numChannels = inputSE_shape[channels]
#squeeze operation
output = GlobalAveragePooling3D(data_format=backend.image_data_format())(inputSE)
#excitation operation
output = Dense(numChannels//ratio, activation='relu', use_bias=True, kernel_initializer='he_normal')(output)
output = Dense(numChannels, activation='sigmoid', use_bias=True, kernel_initializer='he_normal')(output)
#scale operation
output = multiply([inputSE, output])
return output
| 32.084507
| 113
| 0.704126
| 262
| 2,278
| 5.98855
| 0.301527
| 0.038241
| 0.050988
| 0.056087
| 0.772467
| 0.755895
| 0.755895
| 0.755895
| 0.755895
| 0.755895
| 0
| 0.011621
| 0.20676
| 2,278
| 71
| 114
| 32.084507
| 0.856669
| 0.310799
| 0
| 0.769231
| 0
| 0
| 0.060056
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0.076923
| 0
| 0.230769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.