hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
54ab3096d6e5d6dec659fd27bd08ac1d4253100e
| 165
|
py
|
Python
|
core_types/list/remove.py
|
dmilos/python_tutorial
|
f2f901a68cbc696e19350455da9b7db312d1a9fa
|
[
"MIT-0"
] | null | null | null |
core_types/list/remove.py
|
dmilos/python_tutorial
|
f2f901a68cbc696e19350455da9b7db312d1a9fa
|
[
"MIT-0"
] | null | null | null |
core_types/list/remove.py
|
dmilos/python_tutorial
|
f2f901a68cbc696e19350455da9b7db312d1a9fa
|
[
"MIT-0"
] | null | null | null |
#!/usr/bin/env python
aList = [123, 'xyz', 'zara', 'abc', 'xyz'];
aList.remove('xyz');
print "List : ", aList;
aList.remove('abc');
print "List : ", aList;
| 23.571429
| 44
| 0.557576
| 22
| 165
| 4.181818
| 0.545455
| 0.23913
| 0.304348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 0.181818
| 165
| 7
| 45
| 23.571429
| 0.659259
| 0.121212
| 0
| 0.4
| 0
| 0
| 0.23741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.4
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
54b9e66eb2fcc5a4f78938835eeff000869ecf20
| 61
|
py
|
Python
|
django_passwords/__init__.py
|
aiakos/aiakos
|
a591e7ef13ab9e8e14b4d3569d43fce694c4150a
|
[
"BSD-2-Clause",
"MIT"
] | 4
|
2017-04-28T19:09:17.000Z
|
2018-07-03T04:43:54.000Z
|
django_passwords/__init__.py
|
aiakos/aiakos
|
a591e7ef13ab9e8e14b4d3569d43fce694c4150a
|
[
"BSD-2-Clause",
"MIT"
] | 2
|
2020-06-05T17:46:47.000Z
|
2021-06-10T17:22:58.000Z
|
django_passwords/__init__.py
|
aiakos/aiakos
|
a591e7ef13ab9e8e14b4d3569d43fce694c4150a
|
[
"BSD-2-Clause",
"MIT"
] | 2
|
2017-08-14T07:15:14.000Z
|
2019-03-04T14:02:05.000Z
|
default_app_config = 'django_passwords.apps.PasswordsConfig'
| 30.5
| 60
| 0.868852
| 7
| 61
| 7.142857
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04918
| 61
| 1
| 61
| 61
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0.606557
| 0.606557
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
49add86f0354247b9b603ac3da374b45b82713dc
| 5,567
|
py
|
Python
|
src/compute_delta_var.py
|
MrinankSharma/PLD-Accountant
|
3d73e86c1f2dbe5f2ac7a349e30ba15531abbd5b
|
[
"MIT"
] | null | null | null |
src/compute_delta_var.py
|
MrinankSharma/PLD-Accountant
|
3d73e86c1f2dbe5f2ac7a349e30ba15531abbd5b
|
[
"MIT"
] | null | null | null |
src/compute_delta_var.py
|
MrinankSharma/PLD-Accountant
|
3d73e86c1f2dbe5f2ac7a349e30ba15531abbd5b
|
[
"MIT"
] | null | null | null |
'''
A code for computing exact DP guarantees.
The method is described in
A.Koskela, J.Jälkö and A.Honkela:
Computing Exact Guarantees for Differential Privacy.
arXiv preprint arXiv:1906.03049 (2019)
The code is due to Antti Koskela (@koskeant) and Joonas Jälkö (@jjalko)
'''
import numpy as np
# Parameters:
# target_delta - target delta
# sigma_t - array of sigma values
# q_t - array of q values
# nx - number of points in the discretisation grid
# L - limit for the integral
def get_delta_unbounded(sigma_t, q_t, target_eps=1.0, nx=1E6, L=20.0):
nx = int(nx)
tol_newton = 1e-10 # set this to, e.g., 0.01*target_delta
dx = 2.0 * L / nx # discretisation interval \Delta x
x = np.linspace(-L, L - dx, nx, dtype=np.complex128) # grid for the numerical integration
fx_table = []
F_prod = np.ones(x.size)
ncomp = sigma_t.size
if (q_t.size != ncomp):
print('The arrays for q and sigma are of different size!')
return float('inf')
for ij in range(ncomp):
sigma = sigma_t[ij]
q = q_t[ij]
# first ii for which x(ii)>log(1-q),
# i.e. start of the integral domain
ii = int(np.floor(float(nx * (L + np.log(1 - q)) / (2 * L))))
# Evaluate the PLD distribution,
# The case of remove/add relation (Subsection 5.1)
Linvx = (sigma ** 2) * np.log((np.exp(x[ii + 1:]) - (1 - q)) / q) + 0.5
ALinvx = (1 / np.sqrt(2 * np.pi * sigma ** 2)) * ((1 - q) * np.exp(-Linvx * Linvx / (2 * sigma ** 2)) +
q * np.exp(-(Linvx - 1) * (Linvx - 1) / (2 * sigma ** 2)));
dLinvx = (sigma ** 2 * np.exp(x[ii + 1:])) / (np.exp(x[ii + 1:]) - (1 - q));
fx = np.zeros(nx)
fx[ii + 1:] = np.real(ALinvx * dLinvx)
half = int(nx / 2)
# Flip fx, i.e. fx <- D(fx), the matrix D = [0 I;I 0]
temp = np.copy(fx[half:])
fx[half:] = np.copy(fx[:half])
fx[:half] = temp
# Compute the DFT
FF1 = np.fft.fft(fx * dx)
F_prod = F_prod * FF1
# first jj for which 1-exp(target_eps-x)>0,
# i.e. start of the integral domain
jj = int(np.floor(float(nx * (L + target_eps) / (2 * L))))
# Compute the inverse DFT
cfx = np.fft.ifft((F_prod / dx))
# Flip again, i.e. cfx <- D(cfx), D = [0 I;I 0]
temp = np.copy(cfx[half:])
cfx[half:] = cfx[:half]
cfx[:half] = temp
# Evaluate \delta(target_eps) and \delta'(target_eps)
exp_e = 1 - np.exp(target_eps - x)
integrand = exp_e * cfx
sum_int = np.sum(integrand[jj + 1:])
delta = sum_int * dx
print('Unbounded DP-delta after ' + str(int(ncomp)) + ' compositions defined by sigma and q arrays:' + str(
np.real(delta)) + ' (epsilon=' + str(target_eps) + ')')
return np.real(delta)
# Parameters:
# target_delta - target delta
# sigma_t - array of sigma values
# q_t - array of q values
# nx - number of points in the discretisation grid
# L - limit for the integral
def get_delta_bounded(sigma_t, q_t, target_eps=1.0, nx=1E6, L=20.0):
nx = int(nx)
tol_newton = 1e-10 # set this to, e.g., 0.01*target_delta
dx = 2.0 * L / nx # discretisation interval \Delta x
x = np.linspace(-L, L - dx, nx, dtype=np.complex128) # grid for the numerical integration
fx_table = []
F_prod = np.ones(x.size)
ncomp = sigma_t.size
if (q_t.size != ncomp):
print('The arrays for q and sigma are of different size!')
return float('inf')
for ij in range(ncomp):
sigma = sigma_t[ij]
q = q_t[ij]
# Evaluate the PLD distribution,
# This is the case of substitution relation (subsection 5.2)
c = q * np.exp(-1 / (2 * sigma ** 2))
ey = np.exp(x)
term1 = (-(1 - q) * (1 - ey) + np.sqrt((1 - q) ** 2 * (1 - ey) ** 2 + 4 * c ** 2 * ey)) / (2 * c)
term1 = np.maximum(term1, 1e-16)
Linvx = (sigma ** 2) * np.log(term1)
sq = np.sqrt((1 - q) ** 2 * (1 - ey) ** 2 + 4 * c ** 2 * ey)
nom1 = 4 * c ** 2 * ey - 2 * (1 - q) ** 2 * ey * (1 - ey)
term1 = nom1 / (2 * sq)
nom2 = term1 + (1 - q) * ey
nom2 = nom2 * (sq + (1 - q) * (1 - ey))
dLinvx = sigma ** 2 * nom2 / (4 * c ** 2 * ey)
ALinvx = (1 / np.sqrt(2 * np.pi * sigma ** 2)) * ((1 - q) * np.exp(-Linvx * Linvx / (2 * sigma ** 2)) +
q * np.exp(-(Linvx - 1) * (Linvx - 1) / (2 * sigma ** 2)))
fx = np.real(ALinvx * dLinvx)
half = int(nx / 2)
# Flip fx, i.e. fx <- D(fx), the matrix D = [0 I;I 0]
temp = np.copy(fx[half:])
fx[half:] = np.copy(fx[:half])
fx[:half] = temp
FF1 = np.fft.fft(fx * dx) # Compute the DFFT
F_prod = F_prod * FF1
# first jj for which 1-exp(target_eps-x)>0,
# i.e. start of the integral domain
jj = int(np.floor(float(nx * (L + np.real(target_eps)) / (2 * L))))
# Compute the inverse DFT
cfx = np.fft.ifft((F_prod / dx))
# Flip again, i.e. cfx <- D(cfx), D = [0 I;I 0]
temp = np.copy(cfx[half:])
cfx[half:] = cfx[:half]
cfx[:half] = temp
# Evaluate \delta(target_eps) and \delta'(target_eps)
exp_e = 1 - np.exp(target_eps - x)
integrand = exp_e * cfx
sum_int = np.sum(integrand[jj + 1:])
delta = sum_int * dx
print('Bounded DP-delta after ' + str(int(ncomp)) + ' compositions defined by sigma and q arrays:' + str(
np.real(delta)) + ' (epsilon=' + str(target_eps) + ')')
return np.real(delta)
| 33.335329
| 117
| 0.536914
| 895
| 5,567
| 3.27486
| 0.173184
| 0.042989
| 0.020471
| 0.028659
| 0.785397
| 0.769021
| 0.758785
| 0.734903
| 0.734903
| 0.734903
| 0
| 0.04036
| 0.301239
| 5,567
| 166
| 118
| 33.536145
| 0.713111
| 0.28651
| 0
| 0.697674
| 0
| 0
| 0.066718
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023256
| false
| 0
| 0.011628
| 0
| 0.081395
| 0.046512
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
49b34343ef5295755accba9ab4cf5a8f967b4815
| 13
|
py
|
Python
|
login.py
|
Xiacloud/test27
|
9794f194bbb6149128aeea0a86206712c634ae7a
|
[
"MIT"
] | null | null | null |
login.py
|
Xiacloud/test27
|
9794f194bbb6149128aeea0a86206712c634ae7a
|
[
"MIT"
] | null | null | null |
login.py
|
Xiacloud/test27
|
9794f194bbb6149128aeea0a86206712c634ae7a
|
[
"MIT"
] | null | null | null |
num1 = 11111
| 6.5
| 12
| 0.692308
| 2
| 13
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.6
| 0.230769
| 13
| 1
| 13
| 13
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
49c7ae1dfbe5ca509f1ffe352156e6a78bd7128f
| 133
|
py
|
Python
|
plotter/__init__.py
|
kalinkinisaac/modular
|
301d26ad222a5ef3278aaf251908e0a8537bb58f
|
[
"MIT"
] | null | null | null |
plotter/__init__.py
|
kalinkinisaac/modular
|
301d26ad222a5ef3278aaf251908e0a8537bb58f
|
[
"MIT"
] | null | null | null |
plotter/__init__.py
|
kalinkinisaac/modular
|
301d26ad222a5ef3278aaf251908e0a8537bb58f
|
[
"MIT"
] | null | null | null |
from .graph_plotter import GraphPlotter
from .geodesic_plotter import GeodesicPlotter
__all__ = ['GraphPlotter', 'GeodesicPlotter']
| 26.6
| 45
| 0.827068
| 13
| 133
| 8
| 0.615385
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097744
| 133
| 4
| 46
| 33.25
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0.203008
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
49c93c68fb98613ea7b11f0c17c53ba60148bdcb
| 85
|
py
|
Python
|
sortedm2m_tests/compat.py
|
Freston2021/dfconfecciones
|
da1d62006aa958295159ae9fa58584c670ec83be
|
[
"MIT"
] | 187
|
2019-06-27T08:37:40.000Z
|
2022-03-29T12:23:19.000Z
|
sortedm2m_tests/compat.py
|
Freston2021/dfconfecciones
|
da1d62006aa958295159ae9fa58584c670ec83be
|
[
"MIT"
] | 89
|
2015-01-08T18:24:17.000Z
|
2019-06-26T13:16:09.000Z
|
sortedm2m_tests/compat.py
|
Freston2021/dfconfecciones
|
da1d62006aa958295159ae9fa58584c670ec83be
|
[
"MIT"
] | 92
|
2015-01-09T17:45:48.000Z
|
2019-06-21T08:56:10.000Z
|
def m2m_set(instance, field_name, objs):
getattr(instance, field_name).set(objs)
| 28.333333
| 43
| 0.752941
| 13
| 85
| 4.692308
| 0.615385
| 0.42623
| 0.557377
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013333
| 0.117647
| 85
| 2
| 44
| 42.5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
49dee1eb9aba1dde7f48827a1efb9e7f376984fa
| 10,267
|
py
|
Python
|
python_common/global_param.py
|
ivanlevsky/cowabunga-potato
|
ab317582b7b8f99d7be3ea4f5edbe9829fc398fb
|
[
"MIT"
] | null | null | null |
python_common/global_param.py
|
ivanlevsky/cowabunga-potato
|
ab317582b7b8f99d7be3ea4f5edbe9829fc398fb
|
[
"MIT"
] | null | null | null |
python_common/global_param.py
|
ivanlevsky/cowabunga-potato
|
ab317582b7b8f99d7be3ea4f5edbe9829fc398fb
|
[
"MIT"
] | null | null | null |
from file_and_system.config_utils import ConfigUtils
import os
class GlobalParam:
project_path = os.path.dirname(os.getcwd())
conf_path = ''.join((project_path + r'\test file\cf.properties'))
# print(sys.path[0])
# print(os.path.dirname(os.getcwd()))
# print(os.path.dirname(os.path.realpath(__file__)))
# print(sys.path[1])
# conf_path = r'D:\ivanovsky\IdeaProjects\cowabunga-potato\test file\cf.properties'
section_test_path = 'test_path'
section_opencv_utils = 'opencv_utils'
section_machine_learning = 'machine_learning'
section_appium = 'appium'
section_selenium = 'selenium'
section_databases = 'databases'
section_test_reports = 'testReports'
section_gif_utils = 'image_utils'
# test_path section
@staticmethod
def get_test_image_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_test_path, 'test_image_path')[2]))
@staticmethod
def get_test_video_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_test_path, 'test_video_path')[2]))
@staticmethod
def get_test_file_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_test_path, 'test_file_path')[2]))
# opencv_utils section
@staticmethod
def get_system_font_path():
return ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'system_font_path')[2]
@staticmethod
def get_tesseract_path():
return ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'tesseract_path')[2]
@staticmethod
def get_image_input():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'image_input')[2]))
@staticmethod
def get_image_output():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'image_output')[2]))
@staticmethod
def get_character_output():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'character_output')[2]))
@staticmethod
def get_sentence_output():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'sentence_output')[2]))
@staticmethod
def get_video_input():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'video_input')[2]))
@staticmethod
def get_video_output():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'video_output')[2]))
@staticmethod
def get_face_detect_face_xml():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'face_detect_face_xml')[
2]))
@staticmethod
def get_face_detect_eyes_xml():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'face_detect_eyes_xml')[
2]))
# appium section
@staticmethod
def get_aapt_path():
return ''.join(
(GlobalParam.project_path, ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_appium, 'aapt_path')[2]))
@staticmethod
def get_android_apk_list():
return ''.join(
(GlobalParam.project_path, ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_appium, 'android_apk_list')[2]))
@staticmethod
def get_appium_screenshot_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_appium, 'appium_screenshot_path')[2]))
@staticmethod
def get_appium_screenrecord_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_appium, 'appium_screenrecord_path')[2]))
@staticmethod
def get_qr_code_image_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_appium, 'qr_code_image_path')[2]))
# machine learning section
@staticmethod
def get_ml_ch2_housing_data():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_machine_learning,
'ml_ch2_housing_data')[2]))
@staticmethod
def get_ml_ch2_housing_image():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_machine_learning,
'ml_ch2_housing_image')[2]))
@staticmethod
def get_ml_ch3_sklearn_data_home():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_machine_learning,
'ml_ch3_sklearn_data_home')[2]))
@staticmethod
def get_ml_numpy_array_save_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_machine_learning,
'ml_numpy_array_save_path')[2]))
@staticmethod
def get_ml_matplotlib_figure_save_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_machine_learning,
'ml_matplotlib_figure_save_path')[2]))
# selenium section
@staticmethod
def get_chrome_driver_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_selenium, 'chrome_driver_path')[2]))
@staticmethod
def get_ie_driver_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_selenium, 'ie_driver_path')[2]))
@staticmethod
def get_edge_driver_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_selenium, 'edge_driver_path')[2]))
@staticmethod
def get_chromium_path():
return ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_selenium, 'chromium_path')[2]
# databases section
@staticmethod
def get_mariadb_url():
return ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_databases, 'mariadb_url')[2]
@staticmethod
def get_mariadb_user():
return ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_databases, 'mariadb_user')[2]
@staticmethod
def get_mariadb_password():
return ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_databases, 'mariadb_password')[2]
@staticmethod
def get_pgsql_url():
return ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_databases, 'pgsql_url')[2]
@staticmethod
def get_pgsql_user():
return ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_databases, 'pgsql_user')[2]
@staticmethod
def get_pgsql_password():
return ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_databases, 'pgsql_password')[2]
@staticmethod
def get_excel_datasets():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_databases, 'excel_datasets')[2]))
@staticmethod
def get_csv_datasets():
return ''.join(
(GlobalParam.project_path, ConfigUtils.read_conf_file(GlobalParam.conf_path,
GlobalParam.section_databases, 'csv_datasets')[2]))
# test_reports section
@staticmethod
def get_unittest_reports():
return ''.join(
(GlobalParam.project_path, ConfigUtils.read_conf_file(GlobalParam.conf_path,
GlobalParam.section_test_reports, 'unittest_reports')[2]))
@staticmethod
def get_pytest_reports():
return ''.join(
(GlobalParam.project_path, ConfigUtils.read_conf_file(GlobalParam.conf_path,
GlobalParam.section_test_reports, 'pytest_reports')[2]))
@staticmethod
def get_word_report():
return ''.join(
(GlobalParam.project_path, ConfigUtils.read_conf_file(GlobalParam.conf_path,
GlobalParam.section_test_reports, 'word_report')[2]))
# gif_utils section
@staticmethod
def get_gif_import():
return ''.join(
(GlobalParam.project_path, ConfigUtils.read_conf_file(GlobalParam.conf_path,
GlobalParam.section_gif_utils, 'gif_import')[2]))
@staticmethod
def get_gif_export():
return ''.join(
(GlobalParam.project_path, ConfigUtils.read_conf_file(GlobalParam.conf_path,
GlobalParam.section_gif_utils, 'gif_export')[2]))
| 44.25431
| 141
| 0.658323
| 1,082
| 10,267
| 5.854898
| 0.089649
| 0.053039
| 0.113654
| 0.145225
| 0.827782
| 0.699132
| 0.658248
| 0.658248
| 0.658248
| 0.658248
| 0
| 0.006202
| 0.246226
| 10,267
| 232
| 142
| 44.25431
| 0.812379
| 0.034966
| 0
| 0.474576
| 0
| 0
| 0.073067
| 0.012532
| 0
| 0
| 0
| 0
| 0
| 1
| 0.225989
| false
| 0.022599
| 0.022599
| 0.225989
| 0.536723
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
b70779066cb067bb08d0687ea083315f49245112
| 106
|
py
|
Python
|
coredata/__init__.py
|
koddsson/coredata-python-client
|
b6ed8086e79a70f21bd843f7d2997546d3c4a6d3
|
[
"MIT"
] | null | null | null |
coredata/__init__.py
|
koddsson/coredata-python-client
|
b6ed8086e79a70f21bd843f7d2997546d3c4a6d3
|
[
"MIT"
] | null | null | null |
coredata/__init__.py
|
koddsson/coredata-python-client
|
b6ed8086e79a70f21bd843f7d2997546d3c4a6d3
|
[
"MIT"
] | null | null | null |
""" Import packages here for visability. """
from .coredata import CoredataClient, Entity, CoredataError
| 26.5
| 59
| 0.773585
| 11
| 106
| 7.454545
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132075
| 106
| 3
| 60
| 35.333333
| 0.891304
| 0.339623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b70e699de7074fedf8a4dbf48e09c9a093ee3cff
| 71
|
py
|
Python
|
ic3_labels/__init__.py
|
IceCubeOpenSource/ic3-labels
|
049565e1dd423115020484fca5b891afdd1f97bc
|
[
"MIT"
] | 1
|
2021-04-21T09:06:12.000Z
|
2021-04-21T09:06:12.000Z
|
ic3_labels/__init__.py
|
icecube/ic3-labels
|
049565e1dd423115020484fca5b891afdd1f97bc
|
[
"MIT"
] | null | null | null |
ic3_labels/__init__.py
|
icecube/ic3-labels
|
049565e1dd423115020484fca5b891afdd1f97bc
|
[
"MIT"
] | 2
|
2019-06-10T13:37:17.000Z
|
2019-10-21T06:16:35.000Z
|
from ic3_labels.__about__ import __version__, __description__, __url__
| 35.5
| 70
| 0.873239
| 8
| 71
| 5.625
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015385
| 0.084507
| 71
| 1
| 71
| 71
| 0.676923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b7176a1de82c9c00d5e104167f43c29c4da1a3df
| 58
|
py
|
Python
|
Scenario-2/gym_placement/envs/__init__.py
|
nesl/Heliot
|
dd8cbf8871d234b12ab3c31b97aa18ff1254a4c7
|
[
"BSD-3-Clause"
] | 4
|
2019-09-19T15:36:22.000Z
|
2020-02-18T09:28:54.000Z
|
Scenario-2/gym_placement/envs/__init__.py
|
nesl/Heliot
|
dd8cbf8871d234b12ab3c31b97aa18ff1254a4c7
|
[
"BSD-3-Clause"
] | null | null | null |
Scenario-2/gym_placement/envs/__init__.py
|
nesl/Heliot
|
dd8cbf8871d234b12ab3c31b97aa18ff1254a4c7
|
[
"BSD-3-Clause"
] | 2
|
2020-04-14T19:11:32.000Z
|
2022-01-08T18:59:02.000Z
|
from gym_placement.envs.placement0 import placementClass0
| 29
| 57
| 0.896552
| 7
| 58
| 7.285714
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0.068966
| 58
| 1
| 58
| 58
| 0.907407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b73a95801c8d05c78566460da2d22302837e57be
| 62
|
py
|
Python
|
network_anomaly/code/Basic_features/__init__.py
|
kidrabit/Data-Visualization-Lab-RND
|
baa19ee4e9f3422a052794e50791495632290b36
|
[
"Apache-2.0"
] | 1
|
2022-01-18T01:53:34.000Z
|
2022-01-18T01:53:34.000Z
|
network_anomaly/code/Basic_features/__init__.py
|
kidrabit/Data-Visualization-Lab-RND
|
baa19ee4e9f3422a052794e50791495632290b36
|
[
"Apache-2.0"
] | null | null | null |
network_anomaly/code/Basic_features/__init__.py
|
kidrabit/Data-Visualization-Lab-RND
|
baa19ee4e9f3422a052794e50791495632290b36
|
[
"Apache-2.0"
] | null | null | null |
from .Land import *
from .test import *
from .Urgent import *
| 15.5
| 21
| 0.709677
| 9
| 62
| 4.888889
| 0.555556
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.193548
| 62
| 3
| 22
| 20.666667
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
3f83859f656af546a2e31433e7f750fdfccaaf1b
| 59
|
py
|
Python
|
gridworld_basic/envs/__init__.py
|
elaitenstile/RL-basic-gridworld
|
1f781ef51661057dad2a7f680345ace94988c73c
|
[
"MIT"
] | null | null | null |
gridworld_basic/envs/__init__.py
|
elaitenstile/RL-basic-gridworld
|
1f781ef51661057dad2a7f680345ace94988c73c
|
[
"MIT"
] | null | null | null |
gridworld_basic/envs/__init__.py
|
elaitenstile/RL-basic-gridworld
|
1f781ef51661057dad2a7f680345ace94988c73c
|
[
"MIT"
] | null | null | null |
from gridworld_basic.envs.gridworld_env import GridworldEnv
| 59
| 59
| 0.915254
| 8
| 59
| 6.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.050847
| 59
| 1
| 59
| 59
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3fa3125d942566f9646757785bae74cb8eb38c3e
| 118
|
py
|
Python
|
pessoa/admin.py
|
araujo88/minhaGaragem
|
31fb16a686eef2caa26e194c03a0528e43867188
|
[
"MIT"
] | null | null | null |
pessoa/admin.py
|
araujo88/minhaGaragem
|
31fb16a686eef2caa26e194c03a0528e43867188
|
[
"MIT"
] | null | null | null |
pessoa/admin.py
|
araujo88/minhaGaragem
|
31fb16a686eef2caa26e194c03a0528e43867188
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from .models import Pessoa
admin.site.register(Pessoa)
| 19.666667
| 32
| 0.805085
| 17
| 118
| 5.588235
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127119
| 118
| 6
| 33
| 19.666667
| 0.92233
| 0.220339
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3fa4ea616f0e31aa8e437a3411ca9b935d32a607
| 151
|
py
|
Python
|
{{ cookiecutter.project_slug }}/tests/test_sample.py
|
gphillips8frw/cookiecutter-docker-science
|
6392fa690f2fb3685d8149bc4ca9c42d98dcaf15
|
[
"ECL-2.0",
"Apache-2.0"
] | 308
|
2018-02-15T12:29:37.000Z
|
2022-03-05T14:14:07.000Z
|
{{ cookiecutter.project_slug }}/tests/test_sample.py
|
gphillips8frw/cookiecutter-docker-science
|
6392fa690f2fb3685d8149bc4ca9c42d98dcaf15
|
[
"ECL-2.0",
"Apache-2.0"
] | 71
|
2018-02-15T08:50:49.000Z
|
2021-08-29T14:10:33.000Z
|
{{ cookiecutter.project_slug }}/tests/test_sample.py
|
gphillips8frw/cookiecutter-docker-science
|
6392fa690f2fb3685d8149bc4ca9c42d98dcaf15
|
[
"ECL-2.0",
"Apache-2.0"
] | 100
|
2018-02-16T16:29:32.000Z
|
2022-03-25T21:05:11.000Z
|
import unittest
class TestSample(unittest.TestCase):
def setUp(self):
pass
def test_add(self):
self.assertEqual((3 + 4), 7)
| 15.1
| 36
| 0.622517
| 19
| 151
| 4.894737
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027027
| 0.264901
| 151
| 9
| 37
| 16.777778
| 0.810811
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.333333
| false
| 0.166667
| 0.166667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
3fdc41f4f719ef0dcf4073fab3a6e7e778963f5a
| 56
|
py
|
Python
|
src/messages/__init__.py
|
IanDCarroll/Trivvy
|
2aaa68301e4dd1daaf717d98bb468cc65c8f373a
|
[
"MIT"
] | 1
|
2020-10-09T21:11:38.000Z
|
2020-10-09T21:11:38.000Z
|
src/messages/__init__.py
|
IanDCarroll/Trivvy
|
2aaa68301e4dd1daaf717d98bb468cc65c8f373a
|
[
"MIT"
] | 1
|
2020-09-05T01:29:49.000Z
|
2020-09-05T01:29:49.000Z
|
src/messages/__init__.py
|
Coding-Koans/Trivvy
|
2aaa68301e4dd1daaf717d98bb468cc65c8f373a
|
[
"MIT"
] | 2
|
2020-07-12T05:02:43.000Z
|
2020-07-16T00:27:07.000Z
|
from .terminal import Log
from .twitch_chat import Chat
| 18.666667
| 29
| 0.821429
| 9
| 56
| 5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 56
| 2
| 30
| 28
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3fe5f6fb3821ec53eddc2eae086a63cf0bcba605
| 212
|
py
|
Python
|
watchmate_v2/app/admin.py
|
rroy11705/Rest_API_With_Django
|
6a75db2e2c3913ec9afc1cbfef67a5c9fd655e60
|
[
"CNRI-Python"
] | null | null | null |
watchmate_v2/app/admin.py
|
rroy11705/Rest_API_With_Django
|
6a75db2e2c3913ec9afc1cbfef67a5c9fd655e60
|
[
"CNRI-Python"
] | null | null | null |
watchmate_v2/app/admin.py
|
rroy11705/Rest_API_With_Django
|
6a75db2e2c3913ec9afc1cbfef67a5c9fd655e60
|
[
"CNRI-Python"
] | null | null | null |
from django.contrib import admin
from .models import WatchList, StreamPlatform, Review
# Register your models here.
admin.site.register(WatchList)
admin.site.register(StreamPlatform)
admin.site.register(Review)
| 26.5
| 53
| 0.825472
| 27
| 212
| 6.481481
| 0.481481
| 0.154286
| 0.291429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089623
| 212
| 7
| 54
| 30.285714
| 0.906736
| 0.122642
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
b7668c640a12dc3ae179483e533a82850aa2f1e5
| 27
|
py
|
Python
|
tests/__init__.py
|
rajat-np/pygount
|
111ec9259e24ff69dba1848120edd60df13ecf10
|
[
"BSD-3-Clause"
] | 93
|
2016-07-07T07:23:36.000Z
|
2022-03-29T02:48:09.000Z
|
tests/__init__.py
|
rajat-np/pygount
|
111ec9259e24ff69dba1848120edd60df13ecf10
|
[
"BSD-3-Clause"
] | 84
|
2016-08-18T23:01:55.000Z
|
2022-03-20T09:31:31.000Z
|
tests/__init__.py
|
rajat-np/pygount
|
111ec9259e24ff69dba1848120edd60df13ecf10
|
[
"BSD-3-Clause"
] | 18
|
2016-09-08T07:20:34.000Z
|
2022-01-02T11:45:02.000Z
|
# Deliberately left empty.
| 13.5
| 26
| 0.777778
| 3
| 27
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.913043
| 0.888889
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b7aa94ddaa98ff36a3ff6337e14c9911fd84a648
| 20
|
py
|
Python
|
pattern/arrow.py
|
Shivams9/pythoncodecamp
|
e6cd27f4704a407ee360414a8c9236b254117a59
|
[
"MIT"
] | 6
|
2021-08-04T08:15:22.000Z
|
2022-02-02T11:15:56.000Z
|
pattern/arrow.py
|
Maurya232Abhishek/Python-repository-for-basics
|
3dcec5c529a0847df07c9dcc1424675754ce6376
|
[
"MIT"
] | 14
|
2021-08-02T06:28:00.000Z
|
2022-03-25T10:44:15.000Z
|
pattern/arrow.py
|
Maurya232Abhishek/Python-repository-for-basics
|
3dcec5c529a0847df07c9dcc1424675754ce6376
|
[
"MIT"
] | 6
|
2021-07-16T04:56:41.000Z
|
2022-02-16T04:40:06.000Z
|
print("helo \n" * 4)
| 20
| 20
| 0.55
| 4
| 20
| 2.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0.15
| 20
| 1
| 20
| 20
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
b7cbc769b1723d0902d729e61b8eea00ce9c01e6
| 47
|
py
|
Python
|
deltarescdk/__init__.py
|
Deltares/deltares-cdk
|
7f6911dbb61b1b3a852effcda4c8a349047571e8
|
[
"MIT"
] | null | null | null |
deltarescdk/__init__.py
|
Deltares/deltares-cdk
|
7f6911dbb61b1b3a852effcda4c8a349047571e8
|
[
"MIT"
] | null | null | null |
deltarescdk/__init__.py
|
Deltares/deltares-cdk
|
7f6911dbb61b1b3a852effcda4c8a349047571e8
|
[
"MIT"
] | null | null | null |
from deltarescdk.kube_bucket import KubeBucket
| 23.5
| 46
| 0.893617
| 6
| 47
| 6.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 47
| 1
| 47
| 47
| 0.953488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
b7d3496db6f1822d70348af263692591c823e6c7
| 58
|
py
|
Python
|
twiltwil/api/models/__init__.py
|
alexdlaird/twilio-taskrouter-demo
|
df5c95f8b009e70531b348e3250708723111f159
|
[
"MIT"
] | 2
|
2019-01-16T22:46:46.000Z
|
2021-03-23T06:39:21.000Z
|
twiltwil/api/models/__init__.py
|
alexdlaird/twilio-taskrouter-demo
|
df5c95f8b009e70531b348e3250708723111f159
|
[
"MIT"
] | 62
|
2018-07-06T04:45:46.000Z
|
2021-08-25T11:02:17.000Z
|
twiltwil/api/models/__init__.py
|
alexdlaird/twilio-taskrouter-demo
|
df5c95f8b009e70531b348e3250708723111f159
|
[
"MIT"
] | 1
|
2019-01-29T12:39:29.000Z
|
2019-01-29T12:39:29.000Z
|
from .contact import Contact
from .message import Message
| 19.333333
| 28
| 0.827586
| 8
| 58
| 6
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 58
| 2
| 29
| 29
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b7da0e5a60810d60cc6aca28aaf8d088e1155203
| 287
|
py
|
Python
|
audio/ffmpeg/__init__.py
|
joshbarrass/spyctrum
|
d3a081f5f120a62ee06da596b2385c5bfdf45c41
|
[
"MIT"
] | null | null | null |
audio/ffmpeg/__init__.py
|
joshbarrass/spyctrum
|
d3a081f5f120a62ee06da596b2385c5bfdf45c41
|
[
"MIT"
] | null | null | null |
audio/ffmpeg/__init__.py
|
joshbarrass/spyctrum
|
d3a081f5f120a62ee06da596b2385c5bfdf45c41
|
[
"MIT"
] | null | null | null |
"""# ffmpeg wrapping functions
Provides simple bindings for calling ffmpeg
## Required Software
- ffmpeg
"""
# Optional TODO: increase usefulness of this module
from spyctrum.audio.ffmpeg.ffmpeg import call
from spyctrum.audio.ffmpeg.ffmpeg import FFMPEG_INSTALLED, FFmpegException
| 22.076923
| 74
| 0.801394
| 35
| 287
| 6.542857
| 0.685714
| 0.104803
| 0.148472
| 0.200873
| 0.305677
| 0.305677
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132404
| 287
| 12
| 75
| 23.916667
| 0.919679
| 0.54007
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b7fc130394cbd81e6847b7e1c590ecfc6fdd9716
| 184
|
py
|
Python
|
ufora/FORA/python/PurePython/testModules/same_line_number/A.py
|
ufora/ufora
|
04db96ab049b8499d6d6526445f4f9857f1b6c7e
|
[
"Apache-2.0",
"CC0-1.0",
"MIT",
"BSL-1.0",
"BSD-3-Clause"
] | 571
|
2015-11-05T20:07:07.000Z
|
2022-01-24T22:31:09.000Z
|
ufora/FORA/python/PurePython/testModules/same_line_number/A.py
|
timgates42/ufora
|
04db96ab049b8499d6d6526445f4f9857f1b6c7e
|
[
"Apache-2.0",
"CC0-1.0",
"MIT",
"BSL-1.0",
"BSD-3-Clause"
] | 218
|
2015-11-05T20:37:55.000Z
|
2021-05-30T03:53:50.000Z
|
ufora/FORA/python/PurePython/testModules/same_line_number/A.py
|
timgates42/ufora
|
04db96ab049b8499d6d6526445f4f9857f1b6c7e
|
[
"Apache-2.0",
"CC0-1.0",
"MIT",
"BSL-1.0",
"BSD-3-Clause"
] | 40
|
2015-11-07T21:42:19.000Z
|
2021-05-23T03:48:19.000Z
|
from ufora.FORA.python.PurePython.testModules.same_line_number.B import B
class A(object):
def __init__(self, m):
self.m = m
def foo(self):
return B(self.m)
| 18.4
| 73
| 0.652174
| 29
| 184
| 3.931034
| 0.689655
| 0.131579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.233696
| 184
| 9
| 74
| 20.444444
| 0.808511
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
b7fc930ba61b14d72f2ca039c716f96d6709bee3
| 127
|
py
|
Python
|
run_scratch.py
|
ksouvik52/Skpetical_NeurIPS2021
|
cea386f0c6c7c293323e4103e6a0a62e52ce42c4
|
[
"MIT"
] | 7
|
2022-01-04T14:12:38.000Z
|
2022-03-16T21:21:38.000Z
|
run_scratch.py
|
ksouvik52/Skpetical_NeurIPS2021
|
cea386f0c6c7c293323e4103e6a0a62e52ce42c4
|
[
"MIT"
] | null | null | null |
run_scratch.py
|
ksouvik52/Skpetical_NeurIPS2021
|
cea386f0c6c7c293323e4103e6a0a62e52ce42c4
|
[
"MIT"
] | 1
|
2022-01-09T05:27:50.000Z
|
2022-01-09T05:27:50.000Z
|
import os
import sys
cmd1 = "python train_scratch.py --save_path='experiments/CIFAR10/baseline/mobilenetv2/'"
os.system(cmd1)
| 21.166667
| 88
| 0.787402
| 18
| 127
| 5.444444
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043103
| 0.086614
| 127
| 5
| 89
| 25.4
| 0.801724
| 0
| 0
| 0
| 0
| 0
| 0.622047
| 0.433071
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4d31c708a8e68cdb286e2de49bbafe59f8c4b735
| 3,935
|
py
|
Python
|
loglizer/preprocessing.py
|
nikile/loglizer
|
e37c661a7837fb30cd1dae1ba8cc2cd309c73333
|
[
"MIT"
] | null | null | null |
loglizer/preprocessing.py
|
nikile/loglizer
|
e37c661a7837fb30cd1dae1ba8cc2cd309c73333
|
[
"MIT"
] | null | null | null |
loglizer/preprocessing.py
|
nikile/loglizer
|
e37c661a7837fb30cd1dae1ba8cc2cd309c73333
|
[
"MIT"
] | null | null | null |
"""
The interface for data preprocessing.
Authors:
LogPAI Team
"""
import numpy as np
import pandas as pd
from collections import Counter
class FeatureExtractor(object):
def __init__(self):
self.idf_vec = None
self.mean_vec = None
self.events = None
def df_fit_transform(self, X_seq):
"""
Fit and transform the data matrix.
Variant of a similar to "fit_transform" function,
but with using a pandas lib for more convenient debugging.
Args:
X_seq: ndarray, log sequences matrix
Returns:
X_new: The transformed data matrix
"""
print('====== Transformed train data summary ======')
x_counts = []
for i in range(X_seq.shape[0]):
event_counts = Counter(X_seq[i])
x_counts.append(event_counts)
X_df = pd.DataFrame(x_counts)
X_df = X_df.fillna(0)
self.events = X_df.columns
num_instance, num_event = X_df.shape
#tf - idf term weighting
df_vec = np.sum(X_df > 0, axis=0)
self.idf_vec = np.log(num_instance / (df_vec + 1e-8))
idf_matrix = X_df * np.tile(self.idf_vec, (num_instance, 1))
X = idf_matrix
#zero-mean normalization
mean_vec = X.mean(axis=0)
self.mean_vec = mean_vec.values.reshape(1, num_event)
X = X - np.tile(self.mean_vec, (num_instance, 1))
X_new = X
print('Train data shape: {}-by-{}\n'.format(X_new.shape[0], X_new.shape[1]))
return X_new
def fit_transform(self, X_seq):
"""
Fit and transform the data matrix
Args:
X_seq: ndarray, log sequences matrix
Returns:
X_new: The transformed data matrix
"""
print('====== Transformed train data summary ======')
x_counts = []
for i in range(X_seq.shape[0]):
event_counts = Counter(X_seq[i])
x_counts.append(event_counts)
X_df = pd.DataFrame(x_counts)
X_df = X_df.fillna(0)
self.events = X_df.columns
X = X_df.values
num_instance, num_event = X.shape
#tf - idf term weighting
df_vec = np.sum(X > 0, axis=0)
self.idf_vec = np.log(num_instance / (df_vec + 1e-8))
idf_matrix = X * np.tile(self.idf_vec, (num_instance, 1))
X = idf_matrix
#zero-mean normalization
mean_vec = X.mean(axis=0)
self.mean_vec = mean_vec.reshape(1, num_event)
X = X - np.tile(self.mean_vec, (num_instance, 1))
X_new = X
print('Train data shape: {}-by-{}\n'.format(X_new.shape[0], X_new.shape[1]))
return X_new
def transform(self, X_seq):
"""
Transform the data matrix with trained parameters
Args:
X_seq: log sequences matrix
Returns:
X_new: The transformed data matrix
"""
print('====== Transformed test data summary ======')
X_counts = []
for i in range(X_seq.shape[0]):
event_counts = Counter(X_seq[i])
X_counts.append(event_counts)
X_df = pd.DataFrame(X_counts)
X_df = X_df.fillna(0)
empty_events = set(self.events) - set(X_df.columns)
for event in empty_events:
X_df[event] = [0] * len(X_df)
# only those events (keys) that were in the training data set are taken into account
X = X_df[self.events].values
num_instance, num_event = X.shape
# tf - idf term weighting
idf_matrix = X * np.tile(self.idf_vec, (num_instance, 1))
X = idf_matrix
# zero-mean normalization
X = X - np.tile(self.mean_vec, (num_instance, 1))
X_new = X
print('Test data shape: {}-by-{}\n'.format(X_new.shape[0], X_new.shape[1]))
return X_new
| 28.309353
| 92
| 0.563405
| 549
| 3,935
| 3.832423
| 0.189435
| 0.027091
| 0.028517
| 0.042776
| 0.75
| 0.740494
| 0.740494
| 0.740494
| 0.740494
| 0.740494
| 0
| 0.011689
| 0.326048
| 3,935
| 138
| 93
| 28.514493
| 0.781674
| 0.202541
| 0
| 0.602941
| 0
| 0
| 0.074461
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.044118
| 0
| 0.161765
| 0.088235
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4d341b19dcc0129429eecaa297a15d2c0f8f46bb
| 322
|
py
|
Python
|
app1/admin.py
|
Li-Xiaobai-poem/Li-Xiaobai
|
126cc1b502e2f620c1fc883978c6a9b2ebf19bba
|
[
"MIT"
] | null | null | null |
app1/admin.py
|
Li-Xiaobai-poem/Li-Xiaobai
|
126cc1b502e2f620c1fc883978c6a9b2ebf19bba
|
[
"MIT"
] | null | null | null |
app1/admin.py
|
Li-Xiaobai-poem/Li-Xiaobai
|
126cc1b502e2f620c1fc883978c6a9b2ebf19bba
|
[
"MIT"
] | 1
|
2021-07-05T12:26:01.000Z
|
2021-07-05T12:26:01.000Z
|
from django.contrib import admin
# Register your models here.
from app1.models import Release
from app1.models import Comments
from app1.models import User
from app1.models import Collections
admin.site.register(Release )
admin.site.register(Comments)
admin.site.register(User)
admin.site.register( Collections)
| 29.272727
| 36
| 0.801242
| 45
| 322
| 5.733333
| 0.333333
| 0.124031
| 0.217054
| 0.310078
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014235
| 0.127329
| 322
| 11
| 37
| 29.272727
| 0.903915
| 0.080745
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.555556
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4d94b1af851272e6532fdf4551267bbcaa07bfa5
| 100
|
py
|
Python
|
BDFunction1D/__init__.py
|
bond-anton/BDFunction1D
|
d678355c093017c592d5c33ef170f5dd728ab1e2
|
[
"Apache-2.0"
] | null | null | null |
BDFunction1D/__init__.py
|
bond-anton/BDFunction1D
|
d678355c093017c592d5c33ef170f5dd728ab1e2
|
[
"Apache-2.0"
] | null | null | null |
BDFunction1D/__init__.py
|
bond-anton/BDFunction1D
|
d678355c093017c592d5c33ef170f5dd728ab1e2
|
[
"Apache-2.0"
] | null | null | null |
from ._version import __version__
from .Function import Function
from .Functional import Functional
| 25
| 34
| 0.85
| 12
| 100
| 6.666667
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 100
| 3
| 35
| 33.333333
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4d9cb186050fdce21fcb1749232b15d0e82f0ab8
| 45
|
py
|
Python
|
Tools/peg_generator/scripts/__init__.py
|
Krrishdhaneja/cpython
|
9ae9ad8ba35cdcece7ded73cd2207e4f8cb85578
|
[
"0BSD"
] | 1
|
2020-10-25T16:33:22.000Z
|
2020-10-25T16:33:22.000Z
|
Tools/peg_generator/scripts/__init__.py
|
Krrishdhaneja/cpython
|
9ae9ad8ba35cdcece7ded73cd2207e4f8cb85578
|
[
"0BSD"
] | null | null | null |
Tools/peg_generator/scripts/__init__.py
|
Krrishdhaneja/cpython
|
9ae9ad8ba35cdcece7ded73cd2207e4f8cb85578
|
[
"0BSD"
] | null | null | null |
# This exists to let mypy find modules here
| 22.5
| 44
| 0.755556
| 8
| 45
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 45
| 1
| 45
| 45
| 0.971429
| 0.911111
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4db7cd33a212849a002e4f39602b2a16fe6e27c4
| 124
|
py
|
Python
|
supplier/admin.py
|
oteejay/lms
|
be351c8ec7aee1f81dede6fcf4292c1ecad31c60
|
[
"MIT"
] | null | null | null |
supplier/admin.py
|
oteejay/lms
|
be351c8ec7aee1f81dede6fcf4292c1ecad31c60
|
[
"MIT"
] | 11
|
2020-06-05T22:33:23.000Z
|
2022-03-11T23:56:46.000Z
|
supplier/admin.py
|
oteejay/lms
|
be351c8ec7aee1f81dede6fcf4292c1ecad31c60
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Supplier
# Register your models here.
admin.site.register(Supplier)
| 15.5
| 32
| 0.798387
| 17
| 124
| 5.823529
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137097
| 124
| 7
| 33
| 17.714286
| 0.925234
| 0.209677
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4dc25b90ec49e9c2a89731c672dc7722b36a8636
| 3,968
|
py
|
Python
|
tests/unit/utils/test_generate_utils.py
|
MaxTakahashi/hammr
|
cfe593ccfdddb7f98185e561feed6a40a866b585
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/utils/test_generate_utils.py
|
MaxTakahashi/hammr
|
cfe593ccfdddb7f98185e561feed6a40a866b585
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/utils/test_generate_utils.py
|
MaxTakahashi/hammr
|
cfe593ccfdddb7f98185e561feed6a40a866b585
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2007-2017 UShareSoft SAS, All rights reserved
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import TestCase
from hammr.utils.generate_utils import *
class TestGenerateK5(TestCase):
def test_generate_k5vmdk_should_return_uncompressed_image_given_compressed_image(self):
# given
image_given = CompressedImage()
intall_profile_given = MockObject()
# when
image, install_profile = generate_k5vmdk(image_given, WhateverObject(), intall_profile_given, WhateverObject(), WhateverObject())
# then
self.assertFalse(image.compress)
self.assertEquals(intall_profile_given, install_profile)
def test_generate_k5vmdk_should_return_uncompressed_image_given_uncompressed_image(self):
# given
image_given = UncompressedImage()
intall_profile_given = MockObject()
# when
image, install_profile = generate_k5vmdk(image_given, WhateverObject(), intall_profile_given,
WhateverObject(), WhateverObject())
# then
self.assertFalse(image.compress)
self.assertEquals(intall_profile_given, install_profile)
class TestGeneratePXE(TestCase):
def test_generate_pxe_should_return_uncompressed_image_given_compressed_image(self):
# given
image_given = CompressedImage()
intall_profile_given = MockObject()
# when
image, install_profile = generate_pxe(image_given, WhateverObject(), intall_profile_given, None, None)
# then
self.assertFalse(image.compress)
self.assertEquals(intall_profile_given, install_profile)
def test_generate_pxe_should_return_uncompressed_image_given_uncompressed_image(self):
# given
image_given = UncompressedImage()
intall_profile_given = MockObject()
# when
image, install_profile = generate_pxe(image_given, WhateverObject(), intall_profile_given,
None, None)
# then
self.assertFalse(image.compress)
self.assertEquals(intall_profile_given, install_profile)
class TestGenerateOracle(TestCase):
def test_generate_oracleraw_should_return_compressed_image_given_compressed_image(self):
# given
image_given = CompressedImage()
install_profile_given = MockObject()
# when
image, install_profile = generate_oracleraw(image_given, WhateverObject(), install_profile_given, WhateverObject(), WhateverObject())
# then
self.assertTrue(image.compress)
self.assertEquals(install_profile_given, install_profile)
def test_generate_oracleraw_should_return_compressed_image_given_uncompressed_image(self):
# given
image_given = UncompressedImage()
install_profile_given = MockObject()
# when
image, install_profile = generate_oracleraw(image_given, WhateverObject(), install_profile_given,
WhateverObject(), WhateverObject())
# then
self.assertTrue(image.compress)
self.assertEquals(install_profile_given, install_profile)
class CompressedImage:
compress = True
class UncompressedImage:
compress = False
class MockObject:
first_attribute = "something"
second_attribute = "something else"
class WhateverObject:
whatever = "whatever"
| 35.428571
| 141
| 0.701865
| 410
| 3,968
| 6.492683
| 0.273171
| 0.067618
| 0.081142
| 0.042825
| 0.714125
| 0.712246
| 0.712246
| 0.712246
| 0.712246
| 0.613824
| 0
| 0.005577
| 0.231855
| 3,968
| 112
| 142
| 35.428571
| 0.867782
| 0.176663
| 0
| 0.490566
| 0
| 0
| 0.009583
| 0
| 0
| 0
| 0
| 0
| 0.226415
| 1
| 0.113208
| false
| 0
| 0.037736
| 0
| 0.377358
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1516c520f7ec8ac9c494dff385a59a48093ea59e
| 1,139
|
py
|
Python
|
pyaz/search/query_key/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/search/query_key/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/search/query_key/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
from ... pyaz_utils import _call_az
def list(resource_group, service_name):
'''
Required Parameters:
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- service_name -- The name of the search service.
'''
return _call_az("az search query-key list", locals())
def create(name, resource_group, service_name):
'''
Required Parameters:
- name -- The name of the query key.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- service_name -- The name of the search service.
'''
return _call_az("az search query-key create", locals())
def delete(key_value, resource_group, service_name):
'''
Required Parameters:
- key_value -- The value of the query key.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- service_name -- The name of the search service.
'''
return _call_az("az search query-key delete", locals())
| 30.783784
| 128
| 0.680421
| 153
| 1,139
| 4.915033
| 0.196078
| 0.155585
| 0.058511
| 0.069149
| 0.851064
| 0.836436
| 0.668883
| 0.668883
| 0.668883
| 0.668883
| 0
| 0
| 0.218613
| 1,139
| 36
| 129
| 31.638889
| 0.844944
| 0.585601
| 0
| 0
| 0
| 0
| 0.202667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0.142857
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
128c8799276a37790c5ba1be81a6ae1a6429aa2a
| 11,926
|
py
|
Python
|
src/counterfactual_explanation/flow_ssl/icnn/icnn.py
|
tridungduong16/fairCE
|
b13c72c253d875e68c0294b91aaddcbf93460d92
|
[
"MIT"
] | null | null | null |
src/counterfactual_explanation/flow_ssl/icnn/icnn.py
|
tridungduong16/fairCE
|
b13c72c253d875e68c0294b91aaddcbf93460d92
|
[
"MIT"
] | null | null | null |
src/counterfactual_explanation/flow_ssl/icnn/icnn.py
|
tridungduong16/fairCE
|
b13c72c253d875e68c0294b91aaddcbf93460d92
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn.functional as F
import torch.nn as nn
from torch.distributions.independent import Independent
from torch.distributions.normal import Normal
import numpy as np
from ..utils import export, Named, Expression
from ..conv_parts import ResBlock,conv2d
from ..invertible import SqueezeLayer,padChannels,keepChannels,NNdownsample,iAvgPool2d#iSequential2
from ..invertible import iLogits, iBN, MeanOnlyBN, iSequential, passThrough, addZslot, Join, pad_circular_nd
from ..invertible import iConv2d, iSLReLU,iConv1x1,Flatten,RandomPadChannels,iLeakyReLU,iCoordInjection,iSimpleCoords
import scipy as sp
import scipy.sparse
def iConvSelu(channels):
return iSequential(iConv2d(channels,channels),iSLReLU())
def iCoordSelu(channels):
return iSequential(iConv2d(channels,channels),iSLReLU(),iCoordInjection(channels))
def iConvBNselu(channels):
return iSequential(iConv2d(channels,channels),iBN(channels),iSLReLU())#iSLReLU())
def StandardNormal(d,device=torch.device('cuda:0')):
return Independent(Normal(torch.zeros(d).to(device),torch.ones(d).to(device)),1)
class FlowNetwork(nn.Module,metaclass=Named):
def forward(self,x):
return self.classifier_head(self.body(x))
def sample(self,bs=1):
return self.flow.inverse(self.prior(self.device).sample([bs]))
@property
def device(self):
try: return self._device
except AttributeError:
self._device = next(self.parameters()).device
return self._device
def nll(self,x):
z = self.flow(x)
logdet = self.flow.logdet()
return -1*(self.prior(x.device).log_prob(z) + logdet)
@export
class iCNN(FlowNetwork):
"""
Very small CNN
"""
def __init__(self, num_classes=10,k=16):
super().__init__()
self.num_classes = num_classes
self.k = k
self.body = iSequential(
#iLogits(),
RandomPadChannels(k-3),
*iCoordSelu(k),
*iCoordSelu(k),
*iCoordSelu(k),
NNdownsample(),
*iCoordSelu(4*k),
*iCoordSelu(4*k),
*iCoordSelu(4*k),
NNdownsample(),
*iCoordSelu(16*k),
*iCoordSelu(16*k),
iConv2d(16*k,16*k),
)
self.classifier_head = nn.Sequential(
nn.BatchNorm2d(16*k),
Expression(lambda u:u.mean(-1).mean(-1)),
nn.Linear(16*k,num_classes)
)
self.flow = iSequential(self.body,Flatten())
self.prior = StandardNormal(k*32*32)
@export
class MultiScaleiCNN(iCNN):
def __init__(self, num_classes=10,k=64):
super().__init__(num_classes,k)
self.num_classes = num_classes
self.k = k
self.body = iSequential(
iLogits(),
RandomPadChannels(k-3),
addZslot(),
passThrough(*iConvBNselu(k)),
passThrough(*iConvBNselu(k)),
passThrough(*iConvBNselu(k)),
passThrough(NNdownsample()),
passThrough(iConv1x1(4*k)),
keepChannels(2*k),
passThrough(*iConvBNselu(2*k)),
passThrough(*iConvBNselu(2*k)),
passThrough(*iConvBNselu(2*k)),
passThrough(NNdownsample()),
passThrough(iConv1x1(8*k)),
keepChannels(4*k),
passThrough(*iConvBNselu(4*k)),
passThrough(*iConvBNselu(4*k)),
passThrough(*iConvBNselu(4*k)),
passThrough(iConv2d(4*k,4*k)),
Join(),
)
self.classifier_head = nn.Sequential(
Expression(lambda z:z[-1]),
nn.BatchNorm2d(4*k),
Expression(lambda u:u.mean(-1).mean(-1)),
nn.Linear(4*k,num_classes)
)
self.flow = iSequential(self.body,Flatten())
self.prior = StandardNormal(k*32*32)
@export
class MultiScaleiCNNv2(MultiScaleiCNN):
def __init__(self, num_classes=10,k=96):
super().__init__(num_classes,k)
self.num_classes = num_classes
self.k = k
self.body = iSequential(
#iLogits(),
RandomPadChannels(k-3),
addZslot(),
passThrough(*iConvSelu(k)),
passThrough(*iConvSelu(k)),
passThrough(*iConvSelu(k)),
passThrough(NNdownsample()),
passThrough(iConv1x1(4*k)),
keepChannels(2*k),
passThrough(*iConvSelu(2*k)),
passThrough(*iConvSelu(2*k)),
#passThrough(*iConvSelu(2*k)),
passThrough(NNdownsample()),
passThrough(iConv1x1(8*k)),
keepChannels(2*k),
passThrough(*iConvSelu(2*k)),
passThrough(*iConvSelu(2*k)),
#passThrough(*iConvSelu(2*k)),
passThrough(iConv2d(2*k,2*k)),
Join(),
)
self.classifier_head = nn.Sequential(
Expression(lambda z:z[-1]),
nn.BatchNorm2d(2*k),
Expression(lambda u:u.mean(-1).mean(-1)),
nn.Linear(2*k,num_classes)
)
self.flow = iSequential(self.body,Flatten())
self.prior = StandardNormal(k*32*32)
class iCNNsup(MultiScaleiCNN):
def __init__(self, num_classes=10,k=96):
super().__init__(num_classes,k)
self.num_classes = num_classes
self.k = k
self.body = iSequential(
#iLogits(),
RandomPadChannels(k-3),
addZslot(),
passThrough(*iConvSelu(k)),
passThrough(*iConvSelu(k)),
passThrough(iAvgPool2d()),
passThrough(iConv1x1(4*k)),
keepChannels(2*k),
passThrough(*iConvSelu(2*k)),
passThrough(*iConvSelu(2*k)),
#passThrough(*iConvSelu(2*k)),
passThrough(iAvgPool2d()),
passThrough(iConv1x1(8*k)),
keepChannels(2*k),
passThrough(*iConvSelu(2*k)),
passThrough(*iConvSelu(2*k)),
Join(),
)
self.classifier_head = nn.Sequential(
Expression(lambda z:z[-1]),
nn.BatchNorm2d(2*k),
Expression(lambda u:u.mean(-1).mean(-1)),
nn.Linear(2*k,num_classes)
)
self.flow = iSequential(self.body,Flatten())
self.prior = StandardNormal(k*32*32)
class iSimpleSup(MultiScaleiCNN):
def __init__(self, num_classes=10,k=96):
super().__init__(num_classes,k)
self.num_classes = num_classes
self.k = k
self.body = iSequential(
#iLogits(),
RandomPadChannels(k-3),
addZslot(),
passThrough(*iConvSelu(k)),
passThrough(*iConvSelu(k)),
passThrough(iAvgPool2d()),
keepChannels(2*k),
passThrough(*iConvSelu(2*k)),
passThrough(*iConvSelu(2*k)),
#passThrough(*iConvSelu(2*k)),
passThrough(iAvgPool2d()),
keepChannels(2*k),
passThrough(*iConvSelu(2*k)),
passThrough(*iConvSelu(2*k)),
Join(),
)
self.classifier_head = nn.Sequential(
Expression(lambda z:z[-1]),
nn.BatchNorm2d(2*k),
Expression(lambda u:u.mean(-1).mean(-1)),
nn.Linear(2*k,num_classes)
)
self.flow = iSequential(self.body,Flatten())
self.prior = StandardNormal(k*32*32)
@export
class iCNN3d(FlowNetwork):
def __init__(self, in_channels=3, num_classes=10,res=32):
super().__init__()
self.num_classes = num_classes
self.body = iSequential(
iLogits(),
*iConvSelu(in_channels),
*iConvSelu(in_channels),
*iConvSelu(in_channels),
iAvgPool2d(),
*iConvSelu(4*in_channels),
*iConvSelu(4*in_channels),
*iConvSelu(4*in_channels),
iAvgPool2d(),
*iConvSelu(16*in_channels),
*iConvSelu(16*in_channels),
*iConvSelu(16*in_channels),
iAvgPool2d(),
*iConvSelu(64*in_channels),
*iConvSelu(64*in_channels),
*iConvSelu(64*in_channels),
iConv2d(64*in_channels,64*in_channels),
)
self.classifier_head = nn.Sequential(
Expression(lambda u:u.mean(-1).mean(-1)),
nn.Linear(64*in_channels,num_classes)
)
self.flow = iSequential(self.body,Flatten())
self.prior = StandardNormal(in_channels*res*res)
@export
class iCNN3d2(FlowNetwork):
def __init__(self, in_channels=3, num_classes=10,res=32):
super().__init__()
self.num_classes = num_classes
self.body = nn.Sequential(
conv2d(in_channels,in_channels),
nn.ReLU(),
conv2d(in_channels,in_channels),
nn.ReLU(),
conv2d(in_channels,in_channels),
nn.ReLU(),
NNdownsample(),
conv2d(4*in_channels,4*in_channels),
nn.ReLU(),
conv2d(4*in_channels,4*in_channels),
nn.ReLU(),
conv2d(4*in_channels,4*in_channels),
nn.ReLU(),
NNdownsample(),
conv2d(16*in_channels,16*in_channels),
nn.ReLU(),
conv2d(16*in_channels,16*in_channels),
nn.ReLU(),
conv2d(16*in_channels,16*in_channels),
nn.ReLU(),
NNdownsample(),
conv2d(64*in_channels,64*in_channels),
nn.ReLU(),
conv2d(64*in_channels,64*in_channels),
nn.ReLU(),
conv2d(64*in_channels,64*in_channels),
nn.ReLU(),
)
self.classifier_head = nn.Sequential(
Expression(lambda u:u.mean(-1).mean(-1)),
nn.Linear(64*in_channels,num_classes)
)
self.flow = iSequential(self.body,Flatten())
self.prior = StandardNormal(in_channels*res*res)
@export
class iCNN3dCoords(FlowNetwork):
def __init__(self, in_channels=3, num_classes=10,res=32):
super().__init__()
self.num_classes = num_classes
self.body = iSequential(
iLogits(),
*[iCoordSelu(in_channels) for i in range(3)],
iAvgPool2d(),
*[iCoordSelu(4*in_channels) for i in range(3)],
iAvgPool2d(),
*[iCoordSelu(16*in_channels) for i in range(3)],
iAvgPool2d(),
*[iCoordSelu(64*in_channels) for i in range(3)],
iConv2d(64*in_channels,64*in_channels),
)
self.classifier_head = nn.Sequential(
Expression(lambda u:u.mean(-1).mean(-1)),
nn.Linear(64*in_channels,num_classes)
)
self.flow = iSequential(self.body,Flatten())
self.prior = StandardNormal(in_channels*res*res)
@export
class iLinear3d(iCNN3d):
def __init__(self, num_classes=10,res=32):
super().__init__()
self.num_classes = num_classes
self.body = iSequential(
iLogits(),
iCoordInjection(3),
iConv2d(3,3),
iConv2d(3,3),
iConv2d(3,3),
iAvgPool2d(),
iCoordInjection(12),
iConv2d(12,12),
iConv2d(12,12),
iConv2d(12,12),
iAvgPool2d(),
iCoordInjection(48),
iConv2d(48,48),
iConv2d(48,48),
iConv2d(48,48),
iAvgPool2d(),
iCoordInjection(192),
iConv2d(192,192),
iConv2d(192,192),
iConv2d(192,192),
)
self.classifier_head = nn.Sequential(
Expression(lambda u:u.mean(-1).mean(-1)),
nn.Linear(192,num_classes)
)
self.flow = iSequential(self.body,Flatten())
self.prior = StandardNormal(3*res*res)
| 33.312849
| 118
| 0.558611
| 1,273
| 11,926
| 5.090338
| 0.102121
| 0.08179
| 0.048148
| 0.054321
| 0.770062
| 0.765432
| 0.73642
| 0.676852
| 0.646759
| 0.618519
| 0
| 0.043283
| 0.31033
| 11,926
| 357
| 119
| 33.406162
| 0.744559
| 0.016183
| 0
| 0.730159
| 0
| 0
| 0.000512
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053968
| false
| 0.142857
| 0.04127
| 0.019048
| 0.152381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
12a5b9562a3f57aa565d31eeaaf5edbfc389545f
| 109
|
py
|
Python
|
Python/libraries/recognizers-sequence/recognizers_sequence/sequence/config/ip_configuration.py
|
XiaoxiaoMa0815/Recognizers-Text
|
d9a4bc939348bd79b5982345255961dff5f356c6
|
[
"MIT"
] | 2
|
2017-08-22T11:21:19.000Z
|
2017-09-17T20:06:00.000Z
|
Python/libraries/recognizers-sequence/recognizers_sequence/sequence/config/ip_configuration.py
|
XiaoxiaoMa0815/Recognizers-Text
|
d9a4bc939348bd79b5982345255961dff5f356c6
|
[
"MIT"
] | 76
|
2018-11-09T18:19:44.000Z
|
2019-08-20T20:29:53.000Z
|
Python/libraries/recognizers-sequence/recognizers_sequence/sequence/config/ip_configuration.py
|
XiaoxiaoMa0815/Recognizers-Text
|
d9a4bc939348bd79b5982345255961dff5f356c6
|
[
"MIT"
] | 6
|
2017-05-04T17:24:59.000Z
|
2019-07-23T15:48:44.000Z
|
class IpConfiguration:
options: object
def __init__(self, options):
self.options = options
| 15.571429
| 32
| 0.669725
| 11
| 109
| 6.272727
| 0.636364
| 0.318841
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.256881
| 109
| 6
| 33
| 18.166667
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
12b05293992b33d0b272a9e9327c9b8800e9327d
| 2,217
|
py
|
Python
|
tests/test_include_parser.py
|
jeeb/EasyClangComplete
|
0d0e4338c31e8fcffd9809cbce9d8a02b0e69fe2
|
[
"MIT"
] | null | null | null |
tests/test_include_parser.py
|
jeeb/EasyClangComplete
|
0d0e4338c31e8fcffd9809cbce9d8a02b0e69fe2
|
[
"MIT"
] | null | null | null |
tests/test_include_parser.py
|
jeeb/EasyClangComplete
|
0d0e4338c31e8fcffd9809cbce9d8a02b0e69fe2
|
[
"MIT"
] | null | null | null |
"""Test compilation database flags generation."""
import imp
from unittest import TestCase
from os import path
from EasyClangComplete.plugin.utils import include_parser
imp.reload(include_parser)
class TestIncludeParser(TestCase):
"""Test unique list."""
def test_get_all_includes(self):
"""Test getting all includes."""
base_folder = path.dirname(__file__)
_, res = include_parser.get_all_headers(
folders=[base_folder],
prefix='',
force_unix_includes=False,
completion_request=None)
self.assertEqual(len(res), 5)
local_file_path = path.normpath('cmake_tests/lib/a.h')
expected_completion = [
'{}\t{}'.format(local_file_path, base_folder), local_file_path]
self.assertIn(expected_completion, res)
local_file_path = path.normpath('makefile_files/inc/bar.h')
expected_completion = [
'{}\t{}'.format(local_file_path, base_folder), local_file_path]
self.assertIn(expected_completion, res)
def test_get_specific_includes(self):
"""Test getting only specific includes."""
base_folder = path.dirname(__file__)
_, res = include_parser.get_all_headers(
folders=[base_folder],
prefix='cmake_',
force_unix_includes=False,
completion_request=None)
self.assertEqual(len(res), 1)
local_file_path = path.normpath('cmake_tests/lib/a.h')
expected_completion = [
'{}\t{}'.format(local_file_path, base_folder), local_file_path]
self.assertIn(expected_completion, res)
def test_get_specific_includes_force_unix(self):
"""Test getting only specific includes."""
base_folder = path.dirname(__file__)
_, res = include_parser.get_all_headers(
folders=[base_folder],
prefix='cmake_',
force_unix_includes=True,
completion_request=None)
self.assertEqual(len(res), 1)
local_file_path = 'cmake_tests/lib/a.h'
expected_completion = [
'{}\t{}'.format(local_file_path, base_folder), local_file_path]
self.assertIn(expected_completion, res)
| 36.95
| 75
| 0.650429
| 255
| 2,217
| 5.286275
| 0.25098
| 0.080119
| 0.115727
| 0.059347
| 0.793769
| 0.775223
| 0.775223
| 0.775223
| 0.775223
| 0.775223
| 0
| 0.001785
| 0.241768
| 2,217
| 59
| 76
| 37.576271
| 0.800119
| 0.073072
| 0
| 0.695652
| 0
| 0
| 0.057664
| 0.011828
| 0
| 0
| 0
| 0
| 0.152174
| 1
| 0.065217
| false
| 0
| 0.086957
| 0
| 0.173913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
12be6ecfdacaff1901cdeb077ca9d25db61fca38
| 484
|
py
|
Python
|
src/patterns.py
|
AbiFranklin/Cellular-Automata
|
545baab2ea75eff1dbf8f048ee0607781528161d
|
[
"MIT"
] | null | null | null |
src/patterns.py
|
AbiFranklin/Cellular-Automata
|
545baab2ea75eff1dbf8f048ee0607781528161d
|
[
"MIT"
] | null | null | null |
src/patterns.py
|
AbiFranklin/Cellular-Automata
|
545baab2ea75eff1dbf8f048ee0607781528161d
|
[
"MIT"
] | null | null | null |
blinker = [(1,1), (2,1), (3,1), (6,1), (7,1), (8,1), (11,1), (12,1), (13,1), (16,1), (17,1), (18,1), (1,9), (2,9), (3,9), (6,9), (7,9), (8,9), (11,9), (12,9), (13,9), (16,9), (17,9), (18,9), (1,17), (2,17), (3,17), (6,17), (7,17), (8,17), (11,17), (12,17), (13,17), (16,17), (17,17), (18,17), (2,4), (2,5), (2,6), (2,12), (2,13), (2,14), (7,4), (7,5), (7,6), (7,12), (7,13), (7,14), (12,4), (12,5), (12,6), (12,12), (12,13), (12,14), (17,4), (17,5), (17,6), (17,12), (17,13), (17,14)]
| 484
| 484
| 0.373967
| 121
| 484
| 1.495868
| 0.140496
| 0.022099
| 0.055249
| 0.088398
| 0.110497
| 0
| 0
| 0
| 0
| 0
| 0
| 0.411348
| 0.126033
| 484
| 1
| 484
| 484
| 0.016548
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
12caf5c164cd88b0c4d134081de48e6b194f78aa
| 25,115
|
py
|
Python
|
src/rtdb/raw_models.py
|
UNINETT/django-rtdb
|
a8af13c1756581fee0a02a9da9cbb4d252d77dab
|
[
"MIT"
] | null | null | null |
src/rtdb/raw_models.py
|
UNINETT/django-rtdb
|
a8af13c1756581fee0a02a9da9cbb4d252d77dab
|
[
"MIT"
] | null | null | null |
src/rtdb/raw_models.py
|
UNINETT/django-rtdb
|
a8af13c1756581fee0a02a9da9cbb4d252d77dab
|
[
"MIT"
] | null | null | null |
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin sqlcustom [app_label]'
# into your database.
from __future__ import unicode_literals
from django.db import models
class Acl(models.Model):
principaltype = models.CharField(max_length=25)
principalid = models.IntegerField()
rightname = models.CharField(max_length=25)
objecttype = models.CharField(max_length=25)
objectid = models.IntegerField()
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'acl'
class Articles(models.Model):
name = models.CharField(max_length=255)
summary = models.CharField(max_length=255)
sortorder = models.IntegerField()
class_field = models.IntegerField(db_column='class') # Field renamed because it was a Python reserved word.
parent = models.IntegerField()
uri = models.CharField(max_length=255, blank=True, null=True)
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'articles'
class Attachments(models.Model):
transactionid = models.IntegerField()
parent = models.IntegerField()
messageid = models.CharField(max_length=160, blank=True, null=True)
subject = models.CharField(max_length=255, blank=True, null=True)
filename = models.CharField(max_length=255, blank=True, null=True)
contenttype = models.CharField(max_length=80, blank=True, null=True)
contentencoding = models.CharField(max_length=80, blank=True, null=True)
content = models.TextField(blank=True, null=True)
headers = models.TextField(blank=True, null=True)
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
contentindex = models.TextField(blank=True, null=True) # This field type is a guess.
class Meta:
managed = False
db_table = 'attachments'
class Attributes(models.Model):
name = models.CharField(max_length=255)
description = models.CharField(max_length=255, blank=True, null=True)
content = models.TextField(blank=True, null=True)
contenttype = models.CharField(max_length=16, blank=True, null=True)
objecttype = models.CharField(max_length=64, blank=True, null=True)
objectid = models.IntegerField(blank=True, null=True)
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'attributes'
class Cachedgroupmembers(models.Model):
groupid = models.IntegerField(blank=True, null=True)
memberid = models.IntegerField(blank=True, null=True)
via = models.IntegerField(blank=True, null=True)
immediateparentid = models.IntegerField(blank=True, null=True)
disabled = models.SmallIntegerField()
class Meta:
managed = False
db_table = 'cachedgroupmembers'
class Classes(models.Model):
name = models.CharField(max_length=255)
description = models.CharField(max_length=255)
sortorder = models.IntegerField()
disabled = models.SmallIntegerField()
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
hotlist = models.SmallIntegerField()
class Meta:
managed = False
db_table = 'classes'
class Customfields(models.Model):
name = models.CharField(max_length=200, blank=True, null=True)
type = models.CharField(max_length=200, blank=True, null=True)
description = models.CharField(max_length=255, blank=True, null=True)
sortorder = models.IntegerField()
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
disabled = models.SmallIntegerField()
lookuptype = models.CharField(max_length=255)
pattern = models.CharField(max_length=65536, blank=True, null=True)
maxvalues = models.IntegerField(blank=True, null=True)
basedon = models.IntegerField(blank=True, null=True)
rendertype = models.CharField(max_length=64, blank=True, null=True)
valuesclass = models.CharField(max_length=64, blank=True, null=True)
class Meta:
managed = False
db_table = 'customfields'
class Customfieldvalues(models.Model):
customfield = models.IntegerField()
name = models.CharField(max_length=200, blank=True, null=True)
description = models.CharField(max_length=255, blank=True, null=True)
sortorder = models.IntegerField()
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
category = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'customfieldvalues'
class FmArticlecfvalues(models.Model):
article = models.IntegerField()
customfield = models.IntegerField()
content = models.TextField(blank=True, null=True)
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'fm_articlecfvalues'
class FmArticles(models.Model):
name = models.CharField(max_length=255)
summary = models.CharField(max_length=255)
sortorder = models.IntegerField()
class_field = models.IntegerField(db_column='class') # Field renamed because it was a Python reserved word.
parent = models.IntegerField()
uri = models.CharField(max_length=255, blank=True, null=True)
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'fm_articles'
class FmClasscustomfields(models.Model):
class_field = models.IntegerField(db_column='class') # Field renamed because it was a Python reserved word.
customfield = models.IntegerField()
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
sortorder = models.SmallIntegerField()
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'fm_classcustomfields'
class FmClasses(models.Model):
name = models.CharField(max_length=255)
description = models.CharField(max_length=255)
sortorder = models.IntegerField()
disabled = models.SmallIntegerField()
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
hotlist = models.SmallIntegerField()
class Meta:
managed = False
db_table = 'fm_classes'
class FmCustomfields(models.Model):
name = models.CharField(max_length=200)
type = models.CharField(max_length=200)
description = models.CharField(max_length=200)
sortorder = models.IntegerField()
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'fm_customfields'
class FmCustomfieldvalues(models.Model):
customfield = models.IntegerField()
name = models.CharField(max_length=255)
description = models.CharField(max_length=255)
sortorder = models.IntegerField()
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'fm_customfieldvalues'
class FmObjecttopics(models.Model):
topic = models.IntegerField()
objecttype = models.CharField(max_length=64)
objectid = models.IntegerField()
class Meta:
managed = False
db_table = 'fm_objecttopics'
class FmTopics(models.Model):
parent = models.IntegerField()
name = models.CharField(max_length=255)
description = models.CharField(max_length=255)
objecttype = models.CharField(max_length=64)
objectid = models.IntegerField()
class Meta:
managed = False
db_table = 'fm_topics'
class FmTransactions(models.Model):
article = models.IntegerField()
changelog = models.TextField()
type = models.CharField(max_length=64)
field = models.CharField(max_length=64)
oldcontent = models.TextField()
newcontent = models.TextField()
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'fm_transactions'
class Groupmembers(models.Model):
groupid = models.IntegerField()
memberid = models.IntegerField()
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'groupmembers'
unique_together = (('groupid', 'memberid'),)
class Groups(models.Model):
name = models.CharField(max_length=200, blank=True, null=True)
description = models.CharField(max_length=255, blank=True, null=True)
domain = models.CharField(max_length=64, blank=True, null=True)
type = models.CharField(max_length=64, blank=True, null=True)
instance = models.IntegerField(blank=True, null=True)
instance_int = models.IntegerField(blank=True, null=True)
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'groups'
class Links(models.Model):
base = models.CharField(max_length=240, blank=True, null=True)
target = models.CharField(max_length=240, blank=True, null=True)
type = models.CharField(max_length=20)
localtarget = models.IntegerField()
localbase = models.IntegerField()
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'links'
unique_together = (('base', 'target', 'type'),)
class Objectclasses(models.Model):
class_field = models.IntegerField(db_column='class') # Field renamed because it was a Python reserved word.
objecttype = models.CharField(max_length=255)
objectid = models.IntegerField()
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'objectclasses'
class Objectcustomfields(models.Model):
customfield = models.IntegerField()
objectid = models.IntegerField()
sortorder = models.IntegerField()
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'objectcustomfields'
class Objectcustomfieldvalues(models.Model):
objectid = models.IntegerField()
customfield = models.IntegerField()
content = models.CharField(max_length=255, blank=True, null=True)
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
objecttype = models.CharField(max_length=255)
largecontent = models.TextField(blank=True, null=True)
contenttype = models.CharField(max_length=80, blank=True, null=True)
contentencoding = models.CharField(max_length=80, blank=True, null=True)
sortorder = models.IntegerField()
disabled = models.IntegerField()
class Meta:
managed = False
db_table = 'objectcustomfieldvalues'
class Objectscrips(models.Model):
scrip = models.IntegerField()
stage = models.CharField(max_length=32)
objectid = models.IntegerField()
sortorder = models.IntegerField()
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'objectscrips'
unique_together = (('objectid', 'scrip'),)
class Objecttopics(models.Model):
topic = models.IntegerField()
objecttype = models.CharField(max_length=64)
objectid = models.IntegerField()
class Meta:
managed = False
db_table = 'objecttopics'
class PgaForms(models.Model):
formname = models.CharField(max_length=64, blank=True, null=True)
formsource = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'pga_forms'
class PgaLayout(models.Model):
tablename = models.CharField(max_length=64, blank=True, null=True)
nrcols = models.SmallIntegerField(blank=True, null=True)
colnames = models.TextField(blank=True, null=True)
colwidth = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'pga_layout'
class PgaQueries(models.Model):
queryname = models.CharField(max_length=64, blank=True, null=True)
querytype = models.CharField(max_length=1, blank=True, null=True)
querycommand = models.TextField(blank=True, null=True)
querytables = models.TextField(blank=True, null=True)
querylinks = models.TextField(blank=True, null=True)
queryresults = models.TextField(blank=True, null=True)
querycomments = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'pga_queries'
class PgaReports(models.Model):
reportname = models.CharField(max_length=64, blank=True, null=True)
reportsource = models.TextField(blank=True, null=True)
reportbody = models.TextField(blank=True, null=True)
reportprocs = models.TextField(blank=True, null=True)
reportoptions = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'pga_reports'
class PgaSchema(models.Model):
schemaname = models.CharField(max_length=64, blank=True, null=True)
schematables = models.TextField(blank=True, null=True)
schemalinks = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'pga_schema'
class PgaScripts(models.Model):
scriptname = models.CharField(max_length=64, blank=True, null=True)
scriptsource = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'pga_scripts'
class Principals(models.Model):
principaltype = models.CharField(max_length=16)
objectid = models.IntegerField(blank=True, null=True)
disabled = models.SmallIntegerField()
class Meta:
managed = False
db_table = 'principals'
class Queues(models.Model):
name = models.CharField(max_length=200)
description = models.CharField(max_length=255, blank=True, null=True)
correspondaddress = models.CharField(max_length=120, blank=True, null=True)
commentaddress = models.CharField(max_length=120, blank=True, null=True)
initialpriority = models.IntegerField()
finalpriority = models.IntegerField()
defaultduein = models.IntegerField()
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
disabled = models.SmallIntegerField()
subjecttag = models.CharField(max_length=120, blank=True, null=True)
lifecycle = models.CharField(max_length=32, blank=True, null=True)
class Meta:
managed = False
db_table = 'queues'
class Scripactions(models.Model):
name = models.CharField(max_length=200, blank=True, null=True)
description = models.CharField(max_length=255, blank=True, null=True)
execmodule = models.CharField(max_length=60, blank=True, null=True)
argument = models.CharField(max_length=255, blank=True, null=True)
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'scripactions'
class Scripconditions(models.Model):
name = models.CharField(max_length=200, blank=True, null=True)
description = models.CharField(max_length=255, blank=True, null=True)
execmodule = models.CharField(max_length=60, blank=True, null=True)
argument = models.CharField(max_length=255, blank=True, null=True)
applicabletranstypes = models.CharField(max_length=60, blank=True, null=True)
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'scripconditions'
class Scrips(models.Model):
description = models.CharField(max_length=255, blank=True, null=True)
scripcondition = models.IntegerField()
scripaction = models.IntegerField()
customisapplicablecode = models.TextField(blank=True, null=True)
custompreparecode = models.TextField(blank=True, null=True)
customcommitcode = models.TextField(blank=True, null=True)
template = models.CharField(max_length=200)
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
disabled = models.SmallIntegerField()
class Meta:
managed = False
db_table = 'scrips'
class Sessions(models.Model):
id = models.CharField(primary_key=True, max_length=32)
a_session = models.BinaryField(blank=True, null=True)
lastupdated = models.DateTimeField()
class Meta:
managed = False
db_table = 'sessions'
class Templates(models.Model):
queue = models.IntegerField()
name = models.CharField(max_length=200)
description = models.CharField(max_length=255, blank=True, null=True)
type = models.CharField(max_length=16, blank=True, null=True)
content = models.TextField(blank=True, null=True)
lastupdated = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'templates'
class Tickets(models.Model):
effectiveid = models.IntegerField()
queue = models.IntegerField()
type = models.CharField(max_length=16, blank=True, null=True)
issuestatement = models.IntegerField()
resolution = models.IntegerField()
owner = models.IntegerField()
subject = models.CharField(max_length=200, blank=True, null=True)
initialpriority = models.IntegerField()
finalpriority = models.IntegerField()
priority = models.IntegerField()
timeestimated = models.IntegerField()
timeworked = models.IntegerField()
status = models.CharField(max_length=64, blank=True, null=True)
timeleft = models.IntegerField()
told = models.DateTimeField(blank=True, null=True)
starts = models.DateTimeField(blank=True, null=True)
started = models.DateTimeField(blank=True, null=True)
due = models.DateTimeField(blank=True, null=True)
resolved = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
disabled = models.SmallIntegerField()
ismerged = models.SmallIntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tickets'
class Topics(models.Model):
parent = models.IntegerField()
name = models.CharField(max_length=255)
description = models.CharField(max_length=255)
objecttype = models.CharField(max_length=64)
objectid = models.IntegerField()
class Meta:
managed = False
db_table = 'topics'
class Transactions(models.Model):
objectid = models.IntegerField()
timetaken = models.IntegerField()
type = models.CharField(max_length=20, blank=True, null=True)
field = models.CharField(max_length=40, blank=True, null=True)
oldvalue = models.CharField(max_length=255, blank=True, null=True)
newvalue = models.CharField(max_length=255, blank=True, null=True)
data = models.CharField(max_length=255, blank=True, null=True)
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
objecttype = models.CharField(max_length=64)
referencetype = models.CharField(max_length=255, blank=True, null=True)
oldreference = models.IntegerField(blank=True, null=True)
newreference = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'transactions'
class Users(models.Model):
name = models.CharField(max_length=200)
password = models.CharField(max_length=256, blank=True, null=True)
comments = models.TextField(blank=True, null=True)
signature = models.TextField(blank=True, null=True)
emailaddress = models.CharField(max_length=120, blank=True, null=True)
freeformcontactinfo = models.TextField(blank=True, null=True)
organization = models.CharField(max_length=200, blank=True, null=True)
realname = models.CharField(max_length=120, blank=True, null=True)
nickname = models.CharField(max_length=16, blank=True, null=True)
lang = models.CharField(max_length=16, blank=True, null=True)
emailencoding = models.CharField(max_length=16, blank=True, null=True)
webencoding = models.CharField(max_length=16, blank=True, null=True)
externalcontactinfoid = models.CharField(max_length=100, blank=True, null=True)
contactinfosystem = models.CharField(max_length=30, blank=True, null=True)
externalauthid = models.CharField(max_length=100, blank=True, null=True)
authsystem = models.CharField(max_length=30, blank=True, null=True)
gecos = models.CharField(max_length=16, blank=True, null=True)
homephone = models.CharField(max_length=30, blank=True, null=True)
workphone = models.CharField(max_length=30, blank=True, null=True)
mobilephone = models.CharField(max_length=30, blank=True, null=True)
pagerphone = models.CharField(max_length=30, blank=True, null=True)
address1 = models.CharField(max_length=200, blank=True, null=True)
address2 = models.CharField(max_length=200, blank=True, null=True)
city = models.CharField(max_length=100, blank=True, null=True)
state = models.CharField(max_length=100, blank=True, null=True)
zip = models.CharField(max_length=16, blank=True, null=True)
country = models.CharField(max_length=50, blank=True, null=True)
timezone = models.CharField(max_length=50, blank=True, null=True)
pgpkey = models.TextField(blank=True, null=True)
creator = models.IntegerField()
created = models.DateTimeField(blank=True, null=True)
lastupdatedby = models.IntegerField()
lastupdated = models.DateTimeField(blank=True, null=True)
authtoken = models.CharField(max_length=16, blank=True, null=True)
smimecertificate = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'users'
| 37.152367
| 112
| 0.715708
| 2,885
| 25,115
| 6.159792
| 0.108492
| 0.096731
| 0.139722
| 0.182713
| 0.799111
| 0.772269
| 0.708626
| 0.68775
| 0.678803
| 0.55962
| 0
| 0.015607
| 0.175951
| 25,115
| 675
| 113
| 37.207407
| 0.843061
| 0.028748
| 0
| 0.571429
| 1
| 0
| 0.022151
| 0.000943
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.001855
| 0.003711
| 0
| 0.83859
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
12e2ced74d860837a6c7fecb85b72f70c40d7266
| 10
|
py
|
Python
|
icnn_torch/__init__.py
|
kencan7749/pytorch_iCNN
|
059bf90e4e024592e2183a2bd29ee8bb3f2961a7
|
[
"MIT"
] | 6
|
2020-10-20T08:25:48.000Z
|
2022-01-09T14:03:16.000Z
|
icnn_torch/__init__.py
|
kencan7749/pytorch_iCNN
|
059bf90e4e024592e2183a2bd29ee8bb3f2961a7
|
[
"MIT"
] | null | null | null |
icnn_torch/__init__.py
|
kencan7749/pytorch_iCNN
|
059bf90e4e024592e2183a2bd29ee8bb3f2961a7
|
[
"MIT"
] | 4
|
2020-09-11T03:18:51.000Z
|
2022-01-09T14:03:36.000Z
|
"""icnn"""
| 10
| 10
| 0.4
| 1
| 10
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 10
| 1
| 10
| 10
| 0.4
| 0.4
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
12ec8d7f0f18759449943537f209607df378dde1
| 102
|
py
|
Python
|
alvinchow_backend/app/__init__.py
|
alvinchow86/python-backend-template
|
46c07d733d68bc8682afd8510a17bc2aa360c606
|
[
"MIT"
] | 6
|
2021-01-07T00:20:49.000Z
|
2022-01-13T04:53:12.000Z
|
alvinchow_backend/app/__init__.py
|
alvinchow86/python-backend-template
|
46c07d733d68bc8682afd8510a17bc2aa360c606
|
[
"MIT"
] | 4
|
2021-01-06T22:07:43.000Z
|
2021-06-02T01:52:41.000Z
|
alvinchow_backend/app/__init__.py
|
alvinchow86/python-backend-template
|
46c07d733d68bc8682afd8510a17bc2aa360c606
|
[
"MIT"
] | 1
|
2021-11-09T07:46:44.000Z
|
2021-11-09T07:46:44.000Z
|
# flake8: noqa
from .configuration import config
from .initialization import initialize, app_context
| 20.4
| 51
| 0.823529
| 12
| 102
| 6.916667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011236
| 0.127451
| 102
| 4
| 52
| 25.5
| 0.921348
| 0.117647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
12f58ac788b5d6fa42543aefc8b1fc85f1c85976
| 48
|
py
|
Python
|
modules/bot/messages/__init__.py
|
vladpi/zenmoney-bot
|
280723a49979632811f585fb8dced3c396fe563a
|
[
"Apache-2.0"
] | null | null | null |
modules/bot/messages/__init__.py
|
vladpi/zenmoney-bot
|
280723a49979632811f585fb8dced3c396fe563a
|
[
"Apache-2.0"
] | 1
|
2022-02-16T22:29:36.000Z
|
2022-02-16T22:29:54.000Z
|
modules/bot/messages/__init__.py
|
vladpi/zenmoney-bot
|
280723a49979632811f585fb8dced3c396fe563a
|
[
"Apache-2.0"
] | null | null | null |
from . import add_expense, set_defaults # noqa
| 24
| 47
| 0.770833
| 7
| 48
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 48
| 1
| 48
| 48
| 0.875
| 0.083333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
420796cfbc007982c73f99e9f6c6855b6ce74e94
| 11,113
|
py
|
Python
|
tests/test_transforms/test_missing_values/test_impute_transform.py
|
Pacman1984/etna
|
9b3ccb980e576d56858f14aca2e06ce2957b0fa9
|
[
"Apache-2.0"
] | 96
|
2021-09-05T06:29:34.000Z
|
2021-11-07T15:22:54.000Z
|
tests/test_transforms/test_missing_values/test_impute_transform.py
|
Pacman1984/etna
|
9b3ccb980e576d56858f14aca2e06ce2957b0fa9
|
[
"Apache-2.0"
] | 188
|
2021-09-06T15:59:58.000Z
|
2021-11-17T09:34:16.000Z
|
tests/test_transforms/test_missing_values/test_impute_transform.py
|
Pacman1984/etna
|
9b3ccb980e576d56858f14aca2e06ce2957b0fa9
|
[
"Apache-2.0"
] | 8
|
2021-09-06T09:18:35.000Z
|
2021-11-11T21:18:39.000Z
|
import numpy as np
import pandas as pd
import pytest
from etna.datasets import TSDataset
from etna.models import NaiveModel
from etna.transforms.missing_values import TimeSeriesImputerTransform
from etna.transforms.missing_values.imputation import _OneSegmentTimeSeriesImputerTransform
def test_wrong_init_one_segment():
"""Check that imputer for one segment fails to init with wrong imputing strategy."""
with pytest.raises(ValueError):
_ = _OneSegmentTimeSeriesImputerTransform(strategy="wrong_strategy")
def test_wrong_init_two_segments(all_date_present_df_two_segments):
"""Check that imputer for two segments fails to fit_transform with wrong imputing strategy."""
with pytest.raises(ValueError):
_ = TimeSeriesImputerTransform(strategy="wrong_strategy")
@pytest.mark.smoke
@pytest.mark.parametrize("fill_strategy", ["mean", "zero", "running_mean", "forward_fill"])
def test_all_dates_present_impute(all_date_present_df: pd.DataFrame, fill_strategy: str):
"""Check that imputer does nothing with series without gaps."""
imputer = _OneSegmentTimeSeriesImputerTransform(strategy=fill_strategy)
result = imputer.fit_transform(all_date_present_df)
np.testing.assert_array_equal(all_date_present_df["target"], result["target"])
@pytest.mark.smoke
@pytest.mark.parametrize("fill_strategy", ["mean", "zero", "running_mean", "forward_fill"])
def test_all_dates_present_impute_two_segments(all_date_present_df_two_segments: pd.DataFrame, fill_strategy: str):
"""Check that imputer does nothing with series without gaps."""
imputer = TimeSeriesImputerTransform(strategy=fill_strategy)
result = imputer.fit_transform(all_date_present_df_two_segments)
for segment in result.columns.get_level_values("segment"):
np.testing.assert_array_equal(all_date_present_df_two_segments[segment]["target"], result[segment]["target"])
def test_all_missing_impute_zero(df_all_missing: pd.DataFrame):
"""Check that imputer fills zero value if all values are nans and strategy is zero."""
imputer = _OneSegmentTimeSeriesImputerTransform(strategy="zero")
result = imputer.fit_transform(df_all_missing)
assert np.all(result == 0)
def test_all_missing_impute_zero_two_segments(df_all_missing_two_segments: pd.DataFrame):
"""Check that imputer fills zero value if all values are nans and strategy is zero."""
imputer = TimeSeriesImputerTransform(strategy="zero")
result = imputer.fit_transform(df_all_missing_two_segments)
assert np.all(result == 0)
@pytest.mark.parametrize("fill_strategy", ["mean", "running_mean", "forward_fill"])
def test_all_missing_impute_fail(df_all_missing: pd.DataFrame, fill_strategy: str):
"""Check that imputer can't fill nans if all values are nans."""
imputer = _OneSegmentTimeSeriesImputerTransform(strategy=fill_strategy)
with pytest.raises(ValueError, match="It isn't possible to make imputation"):
_ = imputer.fit_transform(df_all_missing)
@pytest.mark.parametrize("fill_strategy", ["mean", "running_mean", "forward_fill"])
def test_all_missing_impute_fail_two_segments(df_all_missing_two_segments: pd.DataFrame, fill_strategy: str):
"""Check that imputer can't fill nans if all values are nans."""
imputer = TimeSeriesImputerTransform(strategy=fill_strategy)
with pytest.raises(ValueError, match="It isn't possible to make imputation"):
_ = imputer.fit_transform(df_all_missing_two_segments)
def test_one_missing_value_zero(df_with_missing_value_x_index: pd.DataFrame):
"""Check that imputer with zero-strategy works correctly in case of one missing value in data."""
df, idx = df_with_missing_value_x_index
imputer = _OneSegmentTimeSeriesImputerTransform(strategy="zero")
result = imputer.fit_transform(df)["target"]
assert result.loc[idx] == 0
assert not result.isna().any()
def test_range_missing_zero(df_with_missing_range_x_index: pd.DataFrame):
"""Check that imputer with zero-strategy works correctly in case of range of missing values in data."""
df, rng = df_with_missing_range_x_index
imputer = _OneSegmentTimeSeriesImputerTransform(strategy="zero")
result = imputer.fit_transform(df)["target"]
expected_series = pd.Series(index=rng, data=[0 for _ in rng], name="target")
np.testing.assert_array_almost_equal(result.loc[rng].reset_index(drop=True), expected_series)
assert not result.isna().any()
def test_one_missing_value_mean(df_with_missing_value_x_index: pd.DataFrame):
"""Check that imputer with mean-strategy works correctly in case of one missing value in data."""
df, idx = df_with_missing_value_x_index
imputer = _OneSegmentTimeSeriesImputerTransform(strategy="mean")
expected_value = df["target"].mean()
result = imputer.fit_transform(df)["target"]
assert result.loc[idx] == expected_value
assert not result.isna().any()
def test_range_missing_mean(df_with_missing_range_x_index):
"""Check that imputer with mean-strategy works correctly in case of range of missing values in data."""
df, rng = df_with_missing_range_x_index
imputer = _OneSegmentTimeSeriesImputerTransform(strategy="mean")
result = imputer.fit_transform(df)["target"]
expected_value = df["target"].mean()
expected_series = pd.Series(index=rng, data=[expected_value for _ in rng], name="target")
np.testing.assert_array_almost_equal(result.loc[rng].reset_index(drop=True), expected_series)
assert not result.isna().any()
def test_one_missing_value_forward_fill(df_with_missing_value_x_index):
"""Check that imputer with forward-fill-strategy works correctly in case of one missing value in data."""
df, idx = df_with_missing_value_x_index
imputer = _OneSegmentTimeSeriesImputerTransform(strategy="forward_fill")
result = imputer.fit_transform(df)["target"]
timestamps = np.array(sorted(df.index))
timestamp_idx = np.where(timestamps == idx)[0][0]
expected_value = df.loc[timestamps[timestamp_idx - 1], "target"]
assert result.loc[idx] == expected_value
assert not result.isna().any()
def test_range_missing_forward_fill(df_with_missing_range_x_index: pd.DataFrame):
"""Check that imputer with forward-fill-strategy works correctly in case of range of missing values in data."""
df, rng = df_with_missing_range_x_index
imputer = _OneSegmentTimeSeriesImputerTransform(strategy="forward_fill")
result = imputer.fit_transform(df)["target"]
timestamps = np.array(sorted(df.index))
rng = [pd.Timestamp(x) for x in rng]
timestamp_idx = min(np.where([x in rng for x in timestamps])[0])
expected_value = df.loc[timestamps[timestamp_idx - 1], "target"]
expected_series = pd.Series(index=rng, data=[expected_value for _ in rng], name="target")
np.testing.assert_array_almost_equal(result.loc[rng], expected_series)
assert not result.isna().any()
@pytest.mark.parametrize("window", [1, -1, 2])
def test_one_missing_value_running_mean(df_with_missing_value_x_index: pd.DataFrame, window: int):
"""Check that imputer with running-mean-strategy works correctly in case of one missing value in data."""
df, idx = df_with_missing_value_x_index
timestamps = np.array(sorted(df.index))
timestamp_idx = np.where(timestamps == idx)[0][0]
imputer = _OneSegmentTimeSeriesImputerTransform(strategy="running_mean", window=window)
if window == -1:
expected_value = df.loc[: timestamps[timestamp_idx - 1], "target"].mean()
else:
expected_value = df.loc[timestamps[timestamp_idx - window] : timestamps[timestamp_idx - 1], "target"].mean()
result = imputer.fit_transform(df)["target"]
assert result.loc[idx] == expected_value
assert not result.isna().any()
@pytest.mark.parametrize("window", [1, -1, 2])
def test_range_missing_running_mean(df_with_missing_range_x_index: pd.DataFrame, window: int):
"""Check that imputer with running-mean-strategy works correctly in case of range of missing values in data."""
df, rng = df_with_missing_range_x_index
timestamps = np.array(sorted(df.index))
timestamp_idxs = np.where([x in rng for x in timestamps])[0]
imputer = _OneSegmentTimeSeriesImputerTransform(strategy="running_mean", window=window)
result = imputer.fit_transform(df)["target"]
assert not result.isna().any()
for idx in timestamp_idxs:
if window == -1:
expected_value = result.loc[: timestamps[idx - 1]].mean()
else:
expected_value = result.loc[timestamps[idx - window] : timestamps[idx - 1]].mean()
assert result.loc[timestamps[idx]] == expected_value
@pytest.mark.parametrize("fill_strategy", ["mean", "zero", "running_mean", "forward_fill"])
def test_inverse_transform_one_segment(df_with_missing_range_x_index: pd.DataFrame, fill_strategy: str):
"""Check that transform + inverse_transform don't change original df for one segment."""
df, rng = df_with_missing_range_x_index
imputer = _OneSegmentTimeSeriesImputerTransform(strategy=fill_strategy)
transform_result = imputer.fit_transform(df)
inverse_transform_result = imputer.inverse_transform(transform_result)
np.testing.assert_array_equal(df, inverse_transform_result)
@pytest.mark.parametrize("fill_strategy", ["mean", "zero", "running_mean", "forward_fill"])
def test_inverse_transform_many_segments(df_with_missing_range_x_index_two_segments: pd.DataFrame, fill_strategy: str):
"""Check that transform + inverse_transform don't change original df for two segments."""
df, rng = df_with_missing_range_x_index_two_segments
imputer = TimeSeriesImputerTransform(strategy=fill_strategy)
transform_result = imputer.fit_transform(df)
inverse_transform_result = imputer.inverse_transform(transform_result)
np.testing.assert_array_equal(df, inverse_transform_result)
@pytest.mark.parametrize("fill_strategy", ["mean", "zero", "running_mean", "forward_fill"])
def test_inverse_transform_in_forecast(df_with_missing_range_x_index_two_segments: pd.DataFrame, fill_strategy: str):
"""Check that inverse_transform doesn't change anything in forecast."""
df, rng = df_with_missing_range_x_index_two_segments
ts = TSDataset(df, freq=pd.infer_freq(df.index))
imputer = TimeSeriesImputerTransform(strategy=fill_strategy)
model = NaiveModel()
ts.fit_transform(transforms=[imputer])
model.fit(ts)
ts_test = ts.make_future(3)
assert np.all(ts_test[:, :, "target"].isna())
ts_forecast = model.forecast(ts_test)
for segment in ts.segments:
true_value = ts[:, segment, "target"].values[-1]
assert np.all(ts_forecast[:, segment, "target"] == true_value)
@pytest.mark.parametrize("fill_strategy", ["mean", "zero", "running_mean", "forward_fill"])
def test_fit_transform_with_nans(fill_strategy, ts_diff_endings):
"""Check that transform correctly works with NaNs at the end."""
imputer = TimeSeriesImputerTransform(in_column="target", strategy=fill_strategy)
ts_diff_endings.fit_transform([imputer])
assert (ts_diff_endings[:, :, "target"].isna()).sum().sum() == 0
| 51.211982
| 119
| 0.758301
| 1,505
| 11,113
| 5.314286
| 0.092359
| 0.03901
| 0.035759
| 0.043761
| 0.842461
| 0.777319
| 0.752188
| 0.730933
| 0.679295
| 0.626032
| 0
| 0.002803
| 0.133177
| 11,113
| 216
| 120
| 51.449074
| 0.827468
| 0.148205
| 0
| 0.578947
| 0
| 0
| 0.07464
| 0
| 0
| 0
| 0
| 0
| 0.164474
| 1
| 0.131579
| false
| 0
| 0.046053
| 0
| 0.177632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
421c31b5a532f664d94b87ff14da13cd6a784517
| 55
|
py
|
Python
|
baselines/ple/games/__init__.py
|
MouseHu/emdqn
|
ba907e959f21dd0b5a17117accccae9c82a79a3b
|
[
"MIT"
] | null | null | null |
baselines/ple/games/__init__.py
|
MouseHu/emdqn
|
ba907e959f21dd0b5a17117accccae9c82a79a3b
|
[
"MIT"
] | null | null | null |
baselines/ple/games/__init__.py
|
MouseHu/emdqn
|
ba907e959f21dd0b5a17117accccae9c82a79a3b
|
[
"MIT"
] | 1
|
2021-04-26T13:55:47.000Z
|
2021-04-26T13:55:47.000Z
|
from baselines.ple.games.monsterkong import MonsterKong
| 55
| 55
| 0.890909
| 7
| 55
| 7
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054545
| 55
| 1
| 55
| 55
| 0.942308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
422fbdfead8482493d16ff4fa25602e6ae1f849c
| 95
|
py
|
Python
|
util/extensions/python_extensions.py
|
kognitive/BootstrappedDQN
|
0d72e0a3e6f39c9a4e797a17911e2beec352b14a
|
[
"MIT"
] | 2
|
2020-08-08T13:21:40.000Z
|
2021-09-28T14:40:11.000Z
|
util/extensions/python_extensions.py
|
kosmitive/bootstrapped-dqn
|
0d72e0a3e6f39c9a4e797a17911e2beec352b14a
|
[
"MIT"
] | null | null | null |
util/extensions/python_extensions.py
|
kosmitive/bootstrapped-dqn
|
0d72e0a3e6f39c9a4e797a17911e2beec352b14a
|
[
"MIT"
] | null | null | null |
def set_default_val(config, key, value):
if key not in config:
config[key] = value
| 23.75
| 40
| 0.652632
| 15
| 95
| 4
| 0.666667
| 0.3
| 0.466667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.252632
| 95
| 4
| 41
| 23.75
| 0.84507
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
423c53b240d2d832369168b2fc84f3596daabeae
| 73
|
py
|
Python
|
xvision/ops/__init__.py
|
jimmysue/xvision
|
bf5aa567a197b3e4c9fdd285c80b4f7512d14d7a
|
[
"MIT"
] | 3
|
2021-04-08T10:50:53.000Z
|
2021-11-15T07:26:16.000Z
|
xvision/ops/__init__.py
|
jimmysue/xvision
|
bf5aa567a197b3e4c9fdd285c80b4f7512d14d7a
|
[
"MIT"
] | 3
|
2021-08-05T07:40:52.000Z
|
2021-11-16T05:53:29.000Z
|
xvision/ops/__init__.py
|
jimmysue/xvision
|
bf5aa567a197b3e4c9fdd285c80b4f7512d14d7a
|
[
"MIT"
] | 1
|
2021-12-15T05:57:48.000Z
|
2021-12-15T05:57:48.000Z
|
from .euclidean_loss import euclidean_loss
from .emd_loss import emd_loss
| 36.5
| 42
| 0.876712
| 12
| 73
| 5
| 0.416667
| 0.433333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09589
| 73
| 2
| 43
| 36.5
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
423ec184c5045ba6b6b791f7f08adee388268c61
| 34
|
py
|
Python
|
do-cleaner.py
|
kintarowonders/archive-scripts
|
9b628b1ad6e926b6a1e376157f847388e81f8a82
|
[
"Unlicense"
] | null | null | null |
do-cleaner.py
|
kintarowonders/archive-scripts
|
9b628b1ad6e926b6a1e376157f847388e81f8a82
|
[
"Unlicense"
] | null | null | null |
do-cleaner.py
|
kintarowonders/archive-scripts
|
9b628b1ad6e926b6a1e376157f847388e81f8a82
|
[
"Unlicense"
] | null | null | null |
import cleaner
cleaner.doClean()
| 8.5
| 17
| 0.794118
| 4
| 34
| 6.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 34
| 3
| 18
| 11.333333
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
423fc1a40a893cdaf544427f56605cd0ffa3181d
| 22,627
|
py
|
Python
|
tests/unit/test_cache.py
|
scorphus/holmes-api
|
6b3c76d4299fecf2d8799d7b5c3c6a6442cacd59
|
[
"MIT"
] | null | null | null |
tests/unit/test_cache.py
|
scorphus/holmes-api
|
6b3c76d4299fecf2d8799d7b5c3c6a6442cacd59
|
[
"MIT"
] | null | null | null |
tests/unit/test_cache.py
|
scorphus/holmes-api
|
6b3c76d4299fecf2d8799d7b5c3c6a6442cacd59
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import time
from gzip import GzipFile
from cStringIO import StringIO
from ujson import dumps, loads
import msgpack
from preggy import expect
from tornado.testing import gen_test
from tornado.gen import Task
from holmes.cache import Cache
from holmes.models import Domain, Limiter, Page
from tests.unit.base import ApiTestCase
from tests.fixtures import (
DomainFactory, PageFactory, ReviewFactory, LimiterFactory,
DomainsViolationsPrefsFactory, KeyFactory
)
class CacheTestCase(ApiTestCase):
@property
def cache(self):
return self.server.application.cache
def test_cache_is_in_server(self):
expect(self.server.application.cache).to_be_instance_of(Cache)
def test_cache_has_connection_to_redis(self):
expect(self.server.application.cache.redis).not_to_be_null()
def test_cache_has_connection_to_db(self):
expect(self.server.application.cache.db).not_to_be_null()
@gen_test
def test_increment_active_review_count(self):
key = 'g.com-active-review-count'
self.cache.redis.delete(key)
gcom = DomainFactory.create(url='http://g.com', name='g.com')
page = PageFactory.create(domain=gcom)
ReviewFactory.create(
is_active=True,
is_complete=True,
domain=gcom,
page=page,
number_of_violations=1
)
page = PageFactory.create(domain=gcom)
ReviewFactory.create(
is_active=False,
is_complete=True,
domain=gcom,
page=page,
number_of_violations=3
)
page_count = yield self.cache.get_active_review_count('g.com')
expect(page_count).to_equal(1)
yield self.cache.increment_active_review_count('g.com')
page_count = yield self.cache.get_active_review_count('g.com')
expect(page_count).to_equal(2)
@gen_test
def test_can_get_active_review_count_for_domain(self):
self.db.query(Domain).delete()
globocom = DomainFactory.create(url="http://globo.com", name="globo.com")
DomainFactory.create(url="http://g1.globo.com", name="g1.globo.com")
page = PageFactory.create(domain=globocom)
ReviewFactory.create(is_active=True, is_complete=True, domain=globocom, page=page, number_of_violations=10)
page2 = PageFactory.create(domain=globocom)
ReviewFactory.create(is_active=True, is_complete=True, domain=globocom, page=page2, number_of_violations=10)
ReviewFactory.create(is_active=False, is_complete=True, domain=globocom, page=page2, number_of_violations=10)
count = yield self.cache.get_active_review_count('globo.com')
expect(count).to_equal(2)
# should get from cache
self.cache.db = None
count = yield self.cache.get_active_review_count('globo.com')
expect(count).to_equal(2)
@gen_test
def test_can_store_processed_page_lock(self):
yield self.cache.lock_page('http://www.globo.com')
result = yield Task(self.cache.redis.get, 'http://www.globo.com-lock')
expect(int(result)).to_equal(1)
@gen_test
def test_can_get_url_was_added(self):
yield self.cache.lock_page('http://www.globo.com')
result = yield self.cache.has_lock('http://www.globo.com')
expect(result).to_be_true()
@gen_test
def test_release_lock_page(self):
yield self.cache.lock_page('http://www.globo.com')
result = yield self.cache.has_lock('http://www.globo.com')
expect(result).to_be_true()
yield self.cache.release_lock_page('http://www.globo.com')
result = yield self.cache.has_lock('http://www.globo.com')
expect(result).to_be_false()
@gen_test
def test_can_remove_domain_limiters_key(self):
self.cache.redis.delete('domain-limiters')
domains = yield Task(self.cache.redis.get, 'domain-limiters')
expect(domains).to_be_null()
yield Task(self.cache.redis.setex, 'domain-limiters', 10, 10)
domains = yield Task(self.cache.redis.get, 'domain-limiters')
expect(domains).to_equal('10')
yield self.cache.remove_domain_limiters_key()
domains = yield Task(self.cache.redis.get, 'domain-limiters')
expect(domains).to_be_null()
@gen_test
def test_can_get_limit_usage(self):
url = 'http://globo.com'
key = 'limit-for-%s' % url
self.cache.redis.delete(key)
yield Task(self.cache.redis.zadd, key, {'a': 1, 'b': 2, 'c': 3})
limit = yield Task(self.cache.redis.zcard, key)
expect(limit).to_equal(3)
limit = yield self.cache.get_limit_usage(url)
expect(limit).to_equal(3)
@gen_test
def test_can_remove_limit_usage_by_domain(self):
domain_url = 'http://globo.com'
key1 = 'limit-for-%s' % domain_url
self.cache.redis.delete(key1)
key2 = 'limit-for-%s/sa/' % domain_url
self.cache.redis.delete(key2)
yield Task(self.cache.redis.zadd, key1, {'a': 1})
yield Task(self.cache.redis.zadd, key2, {'b': 1})
keys = yield Task(self.cache.redis.keys, 'limit-for-%s*' % domain_url)
expect(keys).to_length(2)
yield Task(self.cache.delete_limit_usage_by_domain, domain_url)
keys = yield Task(self.cache.redis.keys, 'limit-for-%s*' % domain_url)
expect(keys).to_length(0)
@gen_test
def test_increment_page_score(self):
self.cache.redis.delete('pages-score')
total = yield Task(self.cache.redis.zcard, 'page-scores')
expect(int(total)).to_equal(0)
yield self.cache.increment_page_score('page-1')
score = yield Task(self.cache.redis.zscore, 'page-scores', 'page-1')
expect(int(score)).to_equal(1)
yield self.cache.increment_page_score('page-1')
score = yield Task(self.cache.redis.zscore, 'page-scores', 'page-1')
expect(int(score)).to_equal(2)
@gen_test
def test_can_delete_domain_violations_prefs(self):
domain_url = 'globo.com'
key = 'violations-prefs-%s' % domain_url
self.cache.redis.delete(key)
prefs = yield Task(self.cache.redis.get, key)
expect(prefs).to_be_null()
data = dumps([{'key': 'test', 'value': '10'}])
yield Task(self.cache.redis.setex, key, 1, data)
prefs = yield Task(self.cache.redis.get, key)
expect(prefs).to_be_like(data)
yield self.cache.delete_domain_violations_prefs(domain_url)
prefs = yield Task(self.cache.redis.get, key)
expect(prefs).to_be_null()
@gen_test
def test_add_next_job_bucket(self):
key = 'next-job-bucket'
self.cache.redis.delete(key)
prefs = yield Task(self.cache.redis.get, key)
expect(prefs).to_be_null()
for x in range(2):
page = PageFactory.create(uuid='%d' %x, url='http://g%d.com' % x)
yield Task(self.cache.add_next_job_bucket, page.uuid, page.url)
data = yield Task(self.cache.redis.zrange, key, 0, 0)
expect(data).to_be_like([dumps({"url": "http://g0.com", "page": "0"})])
data = yield Task(self.cache.redis.zrange, key, 1, 1)
expect(data).to_be_like([dumps({"url": "http://g1.com", "page": "1"})])
@gen_test
def test_can_get_next_job_list(self):
key = 'next-job-bucket'
self.cache.redis.delete(key)
for x in range(2):
page = PageFactory.create(uuid='%d' %x, url='http://g%d.com' % x)
yield Task(self.cache.add_next_job_bucket, page.uuid, page.url)
data = yield Task(self.cache.get_next_job_list, 1, 10)
expect([loads(job) for job in data]).to_equal([
{"url":"http://g0.com","page":"0"},
{"url":"http://g1.com","page":"1"}
])
class SyncCacheTestCase(ApiTestCase):
def setUp(self):
super(SyncCacheTestCase, self).setUp()
self.db.query(Domain).delete()
self.db.query(Page).delete()
@property
def sync_cache(self):
return self.connect_to_sync_redis()
@property
def config(self):
return self.server.application.config
def test_cache_has_connection_to_redis(self):
expect(self.sync_cache.redis).not_to_be_null()
def test_cache_has_connection_to_db(self):
expect(self.sync_cache.db).not_to_be_null()
def test_can_get_domain_limiters(self):
self.db.query(Limiter).delete()
self.sync_cache.redis.delete('domain-limiters')
domains = self.sync_cache.get_domain_limiters()
expect(domains).to_be_null()
limiter = LimiterFactory.create(url='http://test.com/')
LimiterFactory.create()
LimiterFactory.create()
domains = self.sync_cache.get_domain_limiters()
expect(domains).to_length(3)
expect(domains).to_include({limiter.url: limiter.value})
# should get from cache
self.sync_cache.db = None
domains = self.sync_cache.get_domain_limiters()
expect(domains).to_length(3)
def test_can_set_domain_limiters(self):
self.db.query(Limiter).delete()
self.sync_cache.redis.delete('domain-limiters')
domains = self.sync_cache.get_domain_limiters()
expect(domains).to_be_null()
limiters = [{u'http://test.com/': 10}]
self.sync_cache.set_domain_limiters(limiters, 120)
domains = self.sync_cache.get_domain_limiters()
expect(domains).to_length(1)
expect(domains).to_include(limiters[0])
def test_has_key(self):
self.sync_cache.redis.delete('my-key')
has_my_key = self.sync_cache.has_key('my-key')
expect(has_my_key).to_be_false()
self.sync_cache.redis.setex('my-key', 10, '')
has_my_key = self.sync_cache.has_key('my-key')
expect(has_my_key).to_be_true()
def test_get_domain_name(self):
testcom = self.sync_cache.get_domain_name('test.com')
expect(testcom).to_equal('test.com')
gcom = DomainFactory.create(url='http://g.com', name='g.com')
domain_name = self.sync_cache.get_domain_name(gcom)
expect(domain_name).to_equal('g.com')
empty_domain_name = self.sync_cache.get_domain_name('')
expect(empty_domain_name).to_equal('page')
def test_increment_active_review_count(self):
key = 'g.com-active-review-count'
self.sync_cache.redis.delete(key)
gcom = DomainFactory.create(url='http://g.com', name='g.com')
page = PageFactory.create(domain=gcom)
ReviewFactory.create(
is_active=True,
is_complete=True,
domain=gcom,
page=page,
number_of_violations=1
)
page = PageFactory.create(domain=gcom)
ReviewFactory.create(
is_active=False,
is_complete=True,
domain=gcom,
page=page,
number_of_violations=3
)
self.sync_cache.increment_active_review_count(gcom.name)
active_review_count = self.sync_cache.redis.get(key)
expect(active_review_count).to_equal('1')
self.sync_cache.increment_active_review_count(gcom.name)
active_review_count = self.sync_cache.redis.get(key)
expect(active_review_count).to_equal('2')
def test_increment_count(self):
key = 'g.com-my-key'
self.sync_cache.redis.delete(key)
gcom = DomainFactory.create(url="http://g.com", name="g.com")
PageFactory.create(domain=gcom)
self.sync_cache.increment_count(
'my-key',
gcom.name,
lambda domain: domain.get_page_count(self.db)
)
page_count = self.sync_cache.redis.get(key)
expect(page_count).to_equal('1')
self.sync_cache.increment_count(
'my-key',
gcom.name,
lambda domain: domain.get_page_count(self.db)
)
page_count = self.sync_cache.redis.get(key)
expect(page_count).to_equal('2')
def test_get_active_review_count(self):
self.sync_cache.redis.delete('g.com-active-review-count')
gcom = DomainFactory.create(url="http://g.com", name="g.com")
DomainFactory.create(url="http://g1.globo.com", name="g1.globo.com")
page = PageFactory.create(domain=gcom)
page2 = PageFactory.create(domain=gcom)
ReviewFactory.create(
is_active=True,
is_complete=True,
domain=gcom,
page=page,
number_of_violations=10
)
ReviewFactory.create(
is_active=True,
is_complete=True,
domain=gcom,
page=page2,
number_of_violations=10
)
ReviewFactory.create(
is_active=False,
is_complete=True,
domain=gcom,
page=page2,
number_of_violations=10
)
count = self.sync_cache.get_active_review_count(gcom.name)
expect(count).to_equal(2)
# should get from cache
self.sync_cache.db = None
count = self.sync_cache.get_active_review_count(gcom.name)
expect(count).to_equal(2)
def test_get_count(self):
key = 'g.com-my-key'
self.sync_cache.redis.delete(key)
gcom = DomainFactory.create(url="http://g.com", name="g.com")
PageFactory.create(domain=gcom)
count = self.sync_cache.get_count(
key,
gcom.name,
int(self.config.PAGE_COUNT_EXPIRATION_IN_SECONDS),
lambda domain: domain.get_page_count(self.db)
)
expect(count).to_equal(1)
# should get from cache
self.sync_cache.db = None
count = self.sync_cache.get_count(
key,
gcom.name,
int(self.config.PAGE_COUNT_EXPIRATION_IN_SECONDS),
lambda domain: domain.get_page_count(self.db)
)
expect(count).to_equal(1)
def test_get_request_with_url_not_cached(self):
url = 'http://g.com/test.html'
key = 'urls-%s' % url
self.sync_cache.redis.delete(key)
url, response = self.sync_cache.get_request(url)
expect(url).to_equal('http://g.com/test.html')
expect(response).to_be_null()
def test_get_request_with_url_cached(self):
url = 'http://g.com/test.html'
key = 'urls-%s' % url
self.sync_cache.redis.delete(key)
out = StringIO()
with GzipFile(fileobj=out, mode="w") as f:
f.write('')
text = out.getvalue()
value = msgpack.packb({
'url': url,
'body': text,
'status_code': 200,
'headers': None,
'cookies': None,
'effective_url': 'http://g.com/test.html',
'error': None,
'request_time': str(100)
})
self.sync_cache.redis.setex(
key,
10,
value
)
url, response = self.sync_cache.get_request(url)
expect(url).to_equal('http://g.com/test.html')
expect(response.status_code).to_equal(200)
expect(response.effective_url).to_equal(url)
expect(response.request_time).to_equal(100)
def test_set_request(self):
test_url = 'http://g.com/test.html'
key = 'urls-%s' % test_url
self.sync_cache.redis.delete(key)
url, response = self.sync_cache.get_request(test_url)
expect(url).to_equal('http://g.com/test.html')
expect(response).to_be_null()
self.sync_cache.set_request(
url=url,
status_code=200,
headers={'X-HEADER': 'test'},
cookies=None,
text='',
effective_url='http://g.com/test.html',
error=None,
request_time=100,
expiration=5
)
url, response = self.sync_cache.get_request(test_url)
expect(url).to_equal('http://g.com/test.html')
expect(response.status_code).to_equal(200)
expect(response.headers.get('X-HEADER')).to_equal('test')
expect(response.cookies).to_be_null()
expect(response.effective_url).to_equal(url)
expect(response.error).to_be_null()
expect(response.request_time).to_equal(100)
def test_set_request_with_status_code_greater_than_399(self):
test_url = 'http://g.com/test.html'
key = 'urls-%s' % test_url
self.sync_cache.redis.delete(key)
self.sync_cache.set_request(
url=test_url,
status_code=500,
headers=None,
cookies=None,
text=None,
effective_url=None,
error=None,
request_time=1,
expiration=5
)
url, response = self.sync_cache.get_request(test_url)
expect(url).to_equal('http://g.com/test.html')
expect(response).to_be_null()
def test_set_request_with_status_code_less_than_100(self):
test_url = 'http://g.com/test.html'
key = 'urls-%s' % test_url
self.sync_cache.redis.delete(key)
self.sync_cache.set_request(
url=test_url,
status_code=99,
headers=None,
cookies=None,
text=None,
effective_url=None,
error=None,
request_time=1,
expiration=5
)
url, response = self.sync_cache.get_request(test_url)
expect(url).to_equal('http://g.com/test.html')
expect(response).to_be_null()
def test_lock_next_job(self):
test_url = 'http://g.com/test.html'
key = '%s-next-job-lock' % test_url
self.sync_cache.redis.delete(key)
lock = self.sync_cache.lock_next_job(test_url, 5)
expect(lock.acquire()).to_be_true()
def test_has_next_job_lock(self):
test_url = 'http://g.com/test.html'
key = '%s-next-job-lock' % test_url
self.sync_cache.redis.delete(key)
lock = self.sync_cache.lock_next_job(test_url, 20)
expect(lock).not_to_be_null()
has_next_job_lock = self.sync_cache.has_next_job_lock(test_url, 20)
expect(has_next_job_lock).not_to_be_null()
has_next_job_lock = self.sync_cache.has_next_job_lock(test_url, 20)
expect(has_next_job_lock).to_be_null()
def test_release_next_job(self):
test_url = 'http://g.com/test.html'
key = '%s-next-job-lock' % test_url
self.sync_cache.redis.delete(key)
has_next_job_lock = self.sync_cache.has_next_job_lock(test_url, 5)
expect(has_next_job_lock).not_to_be_null()
self.sync_cache.release_next_job(has_next_job_lock)
lock = self.sync_cache.has_next_job_lock(test_url, 5)
expect(lock).not_to_be_null()
def test_increment_page_score(self):
self.sync_cache.redis.delete('page-scores')
total = self.sync_cache.redis.zcard('page-scores')
expect(total).to_equal(0)
self.sync_cache.increment_page_score('page-1')
score = self.sync_cache.redis.zscore('page-scores', 'page-1')
expect(score).to_equal(1)
self.sync_cache.increment_page_score('page-1')
score = self.sync_cache.redis.zscore('page-scores', 'page-1')
expect(score).to_equal(2)
def test_can_delete_domain_violations_prefs(self):
domain_url = 'globo.com'
key = 'violations-prefs-%s' % domain_url
self.sync_cache.redis.delete(key)
prefs = self.sync_cache.redis.get(key)
expect(prefs).to_be_null()
data = dumps([{'key': 'test', 'value': '10'}])
self.sync_cache.redis.setex(key, 10, data)
prefs = self.sync_cache.redis.get(key)
expect(prefs).to_be_like(data)
self.sync_cache.delete_domain_violations_prefs(domain_url)
prefs = self.sync_cache.redis.get(key)
expect(prefs).to_be_null()
def test_can_get_domain_violations_prefs(self):
domain = DomainFactory.create(name='globo.com')
self.sync_cache.redis.delete( 'violations-prefs-%s' % domain.name)
for i in range(3):
DomainsViolationsPrefsFactory.create(
domain=domain,
key=KeyFactory.create(name='some.random.%d' % i),
value='v%d' % i
)
prefs = self.sync_cache.get_domain_violations_prefs('globo.com')
expect(prefs).to_equal([
{'value': u'v0', 'key': u'some.random.0'},
{'value': u'v1', 'key': u'some.random.1'},
{'value': u'v2', 'key': u'some.random.2'}
])
# should get from cache
self.sync_cache.db = None
prefs = self.sync_cache.get_domain_violations_prefs('globo.com')
expect(prefs).to_equal([
{'value': u'v0', 'key': u'some.random.0'},
{'value': u'v1', 'key': u'some.random.1'},
{'value': u'v2', 'key': u'some.random.2'}
])
def test_add_next_job_bucket(self):
key = 'next-job-bucket'
self.sync_cache.redis.delete(key)
prefs = self.sync_cache.redis.get(key)
expect(prefs).to_be_null()
for x in range(2):
page = PageFactory.create(uuid='%d' %x, url='http://g%d.com' % x)
self.sync_cache.add_next_job_bucket(page.uuid, page.url)
data = self.sync_cache.redis.zrange(key, 0, 0)
expect(data).to_be_like([
dumps({"url": "http://g0.com", "page": "0"})
])
data = self.sync_cache.redis.zrange(key, 1, 1)
expect(data).to_be_like([
dumps({"url": "http://g1.com", "page": "1"})
])
def test_get_next_job_bucket(self):
key = 'next-job-bucket'
self.sync_cache.redis.delete(key)
prefs = self.sync_cache.redis.get(key)
expect(prefs).to_be_null()
for x in range(2):
page = PageFactory.create(uuid='%d' %x, url='http://g%d.com' % x)
self.sync_cache.redis.zadd(
'next-job-bucket',
time.time(),
dumps({'page': str(page.uuid), 'url': page.url})
)
data = self.sync_cache.get_next_job_bucket()
expect(data).to_be_like(
dumps({"url": "http://g0.com", "page": "0"})
)
data = self.sync_cache.get_next_job_bucket()
expect(data).to_be_like(
dumps({"url": "http://g1.com", "page": "1"})
)
data = self.sync_cache.get_next_job_bucket()
expect(data).to_be_null()
| 32.004243
| 117
| 0.614752
| 3,045
| 22,627
| 4.335632
| 0.066338
| 0.060673
| 0.086654
| 0.053174
| 0.823284
| 0.793743
| 0.735343
| 0.710953
| 0.694364
| 0.681563
| 0
| 0.011132
| 0.253591
| 22,627
| 706
| 118
| 32.049575
| 0.770561
| 0.006541
| 0
| 0.613333
| 0
| 0
| 0.091225
| 0.003337
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08
| false
| 0
| 0.022857
| 0.005714
| 0.112381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
42565dbd3b0b8e2cc95cdad859eb0379be98259b
| 38
|
py
|
Python
|
plugins/__init__.py
|
Transisto/bitHopper
|
b4a946843b340c0b90c30f60aa15976002cf686e
|
[
"MIT"
] | 1
|
2017-05-20T21:07:17.000Z
|
2017-05-20T21:07:17.000Z
|
plugins/__init__.py
|
Transisto/bitHopper
|
b4a946843b340c0b90c30f60aa15976002cf686e
|
[
"MIT"
] | null | null | null |
plugins/__init__.py
|
Transisto/bitHopper
|
b4a946843b340c0b90c30f60aa15976002cf686e
|
[
"MIT"
] | null | null | null |
"""A file so we can import plugins"""
| 19
| 37
| 0.657895
| 7
| 38
| 3.571429
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184211
| 38
| 1
| 38
| 38
| 0.806452
| 0.815789
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4297468a67fef4463995bfaa9ea0ced35e5c30a0
| 213
|
py
|
Python
|
testing/utils.py
|
MideTechnology/idelib
|
6c73997ab7e5a8b42e6450b35f71f7aa70aa73c9
|
[
"MIT"
] | 5
|
2020-07-21T15:13:18.000Z
|
2021-10-05T01:28:39.000Z
|
testing/utils.py
|
MideTechnology/idelib
|
6c73997ab7e5a8b42e6450b35f71f7aa70aa73c9
|
[
"MIT"
] | 91
|
2020-07-21T15:51:32.000Z
|
2022-03-29T03:19:27.000Z
|
testing/utils.py
|
MideTechnology/idelib
|
6c73997ab7e5a8b42e6450b35f71f7aa70aa73c9
|
[
"MIT"
] | null | null | null |
class nullcontext:
""" A replacement for `contextlib.nullcontext` for python versions before 3.7
"""
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
pass
| 21.3
| 81
| 0.643192
| 27
| 213
| 4.666667
| 0.740741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012739
| 0.262911
| 213
| 9
| 82
| 23.666667
| 0.789809
| 0.342723
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.4
| 0
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
c43abc86922ba27a3c31ae2936270c416a7d0ff3
| 101
|
py
|
Python
|
hooks/hook-dash_tabulator.py
|
soerendip/ms-mint
|
bf5f5d87d07a0d2108c6cd0d92c278f2ea762e58
|
[
"MIT"
] | 1
|
2021-09-03T04:02:25.000Z
|
2021-09-03T04:02:25.000Z
|
hooks/hook-dash_tabulator.py
|
soerendip/ms-mint
|
bf5f5d87d07a0d2108c6cd0d92c278f2ea762e58
|
[
"MIT"
] | 3
|
2020-09-29T21:43:39.000Z
|
2021-07-21T22:18:27.000Z
|
hooks/hook-dash_tabulator.py
|
soerendip/ms-mint
|
bf5f5d87d07a0d2108c6cd0d92c278f2ea762e58
|
[
"MIT"
] | 4
|
2019-11-14T13:25:24.000Z
|
2021-04-30T22:08:53.000Z
|
from PyInstaller.utils.hooks import collect_data_files
datas = collect_data_files("dash_tabulator")
| 25.25
| 54
| 0.851485
| 14
| 101
| 5.785714
| 0.785714
| 0.271605
| 0.395062
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079208
| 101
| 3
| 55
| 33.666667
| 0.870968
| 0
| 0
| 0
| 0
| 0
| 0.138614
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c459ff3988d44f7fdfe023c98de4d118abe1479b
| 29
|
py
|
Python
|
__init__.py
|
sharababy/pca_pkg
|
7cd7d8b8625aa03675bb2fd6704884f739966653
|
[
"BSD-3-Clause"
] | null | null | null |
__init__.py
|
sharababy/pca_pkg
|
7cd7d8b8625aa03675bb2fd6704884f739966653
|
[
"BSD-3-Clause"
] | null | null | null |
__init__.py
|
sharababy/pca_pkg
|
7cd7d8b8625aa03675bb2fd6704884f739966653
|
[
"BSD-3-Clause"
] | null | null | null |
from pcapkg.pcapkg import PCA
| 29
| 29
| 0.862069
| 5
| 29
| 5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 29
| 1
| 29
| 29
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c47495fe66a5ff1562e1e3b165380cafec2e4edc
| 143
|
py
|
Python
|
beamline/miners/__init__.py
|
beamline/core-py
|
83234116e62bf6107a812d3ebd9a964c5b601b24
|
[
"Apache-2.0"
] | null | null | null |
beamline/miners/__init__.py
|
beamline/core-py
|
83234116e62bf6107a812d3ebd9a964c5b601b24
|
[
"Apache-2.0"
] | null | null | null |
beamline/miners/__init__.py
|
beamline/core-py
|
83234116e62bf6107a812d3ebd9a964c5b601b24
|
[
"Apache-2.0"
] | null | null | null |
from beamline.web.Beamline import Beamline
from beamline.miners.DiscoveryMiner import DiscoveryMiner
Beamline.miners.append(DiscoveryMiner())
| 28.6
| 57
| 0.86014
| 16
| 143
| 7.6875
| 0.4375
| 0.195122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06993
| 143
| 4
| 58
| 35.75
| 0.924812
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
670d6db67899919907083a848e52fbaaac663283
| 5,907
|
py
|
Python
|
WeOptPy/algorithms/sade.py
|
kb2623/WeOptPy
|
2e9e75acf8fedde0ae4c99da6c786a712d4f011c
|
[
"MIT"
] | 1
|
2021-05-12T10:02:21.000Z
|
2021-05-12T10:02:21.000Z
|
WeOptPy/algorithms/sade.py
|
kb2623/WeOptPy
|
2e9e75acf8fedde0ae4c99da6c786a712d4f011c
|
[
"MIT"
] | null | null | null |
WeOptPy/algorithms/sade.py
|
kb2623/WeOptPy
|
2e9e75acf8fedde0ae4c99da6c786a712d4f011c
|
[
"MIT"
] | null | null | null |
# encoding=utf8
"""Adaptive differential evolution module."""
from WeOptPy.algorithms.de import (
# CrossBest1,
# CrossRand1,
# CrossCurr2Best1,
# CrossBest2,
# CrossCurr2Rand1,
# proportional,
DifferentialEvolution
)
__all__ = [
'StrategyAdaptationDifferentialEvolution',
'StrategyAdaptationDifferentialEvolutionV1'
]
class StrategyAdaptationDifferentialEvolution(DifferentialEvolution):
r"""Implementation of Differential Evolution Algorithm With Strategy Adaptation algorihtm.
Algorithm:
Differential Evolution Algorithm With StrategyAdaptation
Date:
2019
Author:
Klemen Berkovič
License:
MIT
Reference URL:
https://ieeexplore.ieee.org/document/1554904
Reference paper:
Qin, a. Kai, and Ponnuthurai N. Suganthan. "Self-adaptive differential evolution algorithm for numerical optimization." 2005 IEEE congress on evolutionary computation. Vol. 2. IEEE, 2005.
Attributes:
Name (List[str]): List of strings representing algorithm name.
See Also:
* :class:`WeOptPy.algorithms.DifferentialEvolution`
"""
Name = ['StrategyAdaptationDifferentialEvolution', 'SADE', 'SaDE']
@staticmethod
def algorithm_info():
r"""Geg basic algorithm information.
Returns:
str: Basic algorithm information.
See Also:
* :func:`NiaPy.algorithms.interfaces.Algorithm.algorithm_info`
"""
return r"""Qin, a. Kai, and Ponnuthurai N. Suganthan. "Self-adaptive differential evolution algorithm for numerical optimization." 2005 IEEE congress on evolutionary computation. Vol. 2. IEEE, 2005."""
def set_parameters(self, **kwargs):
r"""Set the algorithm parameters.
Args:
kwargs (dict): Additional keyword arguments.
See Also:
* :func:`WeOptPy.algorithms.interfaces.Algorithm.set_parameters`
"""
DifferentialEvolution.set_parameters(self, **kwargs)
# TODO add parameters of the algorithm
def get_parameters(self):
r"""Get algorithm parameter values.
Returns:
Dict[str, Any]: TODO
"""
d = DifferentialEvolution.get_parameters(self)
# TODO add paramters values
return d
def run_iteration(self, task, pop, fpop, xb, fxb, *args, **kwargs):
r"""Core function of the algorithm.
Args:
task (Task): Optimization task.
pop (numpy.ndarray): Current population.
fpop (numpy.ndarray): Current population's fitness values.
xb (numpy.ndarray): Current global best individual.
fxb (float): Current global best individual's best fitness value.
args (list): Additional arguments.
kwargs (dict): Additional keyword arguments.
Returns:
Tuple[numpy.ndarray, numpy.ndarray, numpy.ndarray, float, list, dict]:
1. New population.
2. New population fitness/function values.
3. New global best solution.
4. New global best solutions fitness/objective value.
5. Additional arguments.
6. Additional keyword arguments.
See Also:
* :func:`WeOptPy.algorithms.DifferentialEvolution.evolve`
* :func:`WeOptPy.algorithms.DifferentialEvolution.selection`
* :func:`WeOptPy.algorithms.DifferentialEvolution.post_selection`
"""
# TODO implemnt algorithm
return pop, fpop, xb, fxb, args, kwargs
class StrategyAdaptationDifferentialEvolutionV1(DifferentialEvolution):
r"""Implementation of Differential Evolution Algorithm With Strategy Adaptation algorithm.
Algorithm:
Differential Evolution Algorithm With StrategyAdaptation
Date:
2019
Author:
Klemen Berkovič
License:
MIT
Reference URL:
https://ieeexplore.ieee.org/document/4632146
Reference paper:
Qin, a. Kai, Vicky Ling Huang, and Ponnuthurai N. Suganthan. "Differential evolution algorithm with strategy adaptation for global numerical optimization." IEEE transactions on Evolutionary Computation 13.2 (2009): 398-417.
Attributes:
Name (List[str]): List of strings representing algorithm name.
See Also:
* :class:`NiaPy.algorithms.basic.DifferentialEvolution`
"""
Name = ['StrategyAdaptationDifferentialEvolutionV1', 'SADEV1', 'SaDEV1']
@staticmethod
def algorithm_info():
r"""Get algorithm information.
Returns:
str: Get algorithm information.
See Also:
* :func:`WeOptPy.algorithms.interfaces.Algorithm.algorithm_info`
"""
return r"""Qin, a. Kai, Vicky Ling Huang, and Ponnuthurai N. Suganthan. "Differential evolution algorithm with strategy adaptation for global numerical optimization." IEEE transactions on Evolutionary Computation 13.2 (2009): 398-417."""
def set_parameters(self, **kwargs):
r"""Set algorithm parameters.
Args:
**kwargs (dict): Additional keyword arguments.
"""
DifferentialEvolution.set_parameters(self, **kwargs)
# TODO add parameters of the algorithm
def get_parameters(self):
r"""Get parameter values of the algorithm.
Returns:
Dict[str, Any]: TODO
"""
d = DifferentialEvolution.get_parameters(self)
# TODO add parameters values
return d
def run_iteration(self, task, pop, fpop, xb, fxb, *args, **kwargs):
r"""Core function of Differential Evolution algorithm.
Args:
task (Task): Optimization task.
pop (numpy.ndarray): Current population.
fpop (numpy.ndarray): Current populations fitness/function values.
xb (numpy.ndarray): Current best individual.
fxb (float): Current best individual function/fitness value.
args (list): Additional arguments.
kwargs (dict): Additional keyword arguments.
Returns:
Tuple[numpy.ndarray, numpy.ndarray, numpy.ndarray, float, list, dict]:
1. New population.
2. New population fitness/function values.
3. New global best solution.
4. New global best solutions fitness/objective value.
5. Additional arguments.
6. Additional keyword arguments.
See Also:
* :func:`WeOptPy.algorithms.DifferentialEvolution.evolve`
* :func:`WeOptPy.algorithms.DifferentialEvolution.selection`
* :func:`WeOptPy.algorithms.DifferentialEvolution.postSelection`
"""
# TODO implement algorithm
return pop, fpop, xb, fxb, args, kwargs
| 29.242574
| 239
| 0.746403
| 662
| 5,907
| 6.629909
| 0.225076
| 0.032809
| 0.061517
| 0.04648
| 0.793803
| 0.73707
| 0.73707
| 0.714969
| 0.664844
| 0.664844
| 0
| 0.018262
| 0.156425
| 5,907
| 201
| 240
| 29.38806
| 0.862533
| 0.766379
| 0
| 0.439024
| 0
| 0.04878
| 0.361298
| 0.097979
| 0
| 0
| 0
| 0.039801
| 0
| 1
| 0.195122
| false
| 0
| 0.02439
| 0
| 0.463415
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
671ec69cd27926b85cc9f9b6418aae2b4b6dea08
| 9,253
|
py
|
Python
|
unittests/test_args.py
|
pcjco/PyFuzzy-renamer
|
a8656f9d5b959a9e0d6c4e286c68e948e9cba80c
|
[
"MIT"
] | 2
|
2021-07-12T17:46:31.000Z
|
2022-03-13T23:30:08.000Z
|
unittests/test_args.py
|
pcjco/PyFuzzy-renamer
|
a8656f9d5b959a9e0d6c4e286c68e948e9cba80c
|
[
"MIT"
] | null | null | null |
unittests/test_args.py
|
pcjco/PyFuzzy-renamer
|
a8656f9d5b959a9e0d6c4e286c68e948e9cba80c
|
[
"MIT"
] | 1
|
2021-07-19T21:27:23.000Z
|
2021-07-19T21:27:23.000Z
|
import argparse
import io
import os
import shutil
import unittest
import wx
from pathlib import Path
from contextlib import redirect_stdout
from unittests import pfr
from pyfuzzyrenamer import args, config, filters, main_listctrl, main_dlg, masks
from pyfuzzyrenamer.config import get_config
from pyfuzzyrenamer.args import get_args, get_argparser
# ---------------------------------------------------------------------------
class args_Tests(pfr.PyFuzzyRenamerTestCaseCLI):
def test_args_report_match(self):
get_config()["workers"] = 1
get_config()["show_fullpath"] = False
get_config()["hide_extension"] = True
get_config()["masks"] = "+Ending Disk#\n" + r'"(\s?_disk\d)$"' + "\n"
masks.FileMasked.masks = masks.CompileMasks(get_config()["masks"])
filters.FileFiltered.filters = filters.CompileFilters(get_config()["filters"])
if os.path.exists(self.outdir):
shutil.rmtree(self.outdir)
shutil.copytree(os.path.abspath(os.path.join(os.path.dirname(__file__), "./data")), self.outdir)
sourcesDir = os.path.join(self.outdir, "sources_multimatch")
choicesDir = os.path.join(self.outdir, "choices_multimatch")
args.theArgs = args.theArgsParser.parse_args(["--sources", sourcesDir, "--choices", choicesDir, "report_match"])
with io.StringIO() as buf, redirect_stdout(buf):
frame = main_dlg.MainFrame()
shutil.rmtree(self.outdir)
output = buf.getvalue()
self.assertEqual(
"acanthe à feuilles molles --> acanthus mollis (70.00)\n"
"acanthe épineuse --> acanthus spinosus (73.00)\n"
"aconit vénéneux --> aconitum anthora (52.00)\n"
"violette cornue --> viola cornuta (71.00)\n"
"volutaire à fleurs tubulées --> volutaria tubuliflora (54.00)\n",
output,
)
def test_args_preview_rename(self):
get_config()["workers"] = 1
get_config()["masks"] = "+Ending Disk#\n" + r'"(\s?_disk\d)$"' + "\n"
masks.FileMasked.masks = masks.CompileMasks(get_config()["masks"])
filters.FileFiltered.filters = filters.CompileFilters(get_config()["filters"])
if os.path.exists(self.outdir):
shutil.rmtree(self.outdir)
shutil.copytree(os.path.abspath(os.path.join(os.path.dirname(__file__), "./data")), self.outdir)
sourcesDir = os.path.join(self.outdir, "sources_multimatch")
choicesDir = os.path.join(self.outdir, "choices_multimatch")
args.theArgs = args.theArgsParser.parse_args(["--sources", sourcesDir, "--choices", choicesDir, "preview_rename"])
with io.StringIO() as buf, redirect_stdout(buf):
frame = main_dlg.MainFrame()
shutil.rmtree(self.outdir)
output = buf.getvalue()
self.assertEqual(
"Renaming : "
+ os.path.join(sourcesDir, "Acanthe à feuilles molles_disk2.txt")
+ " --> "
+ os.path.join(sourcesDir, "Acanthus mollis_disk2.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Acanthe épineuse.txt")
+ " --> "
+ os.path.join(sourcesDir, "Acanthus spinosus_disk1.txt\n")
+ "Copying : "
+ os.path.join(sourcesDir, "Acanthus spinosus_disk1.txt")
+ " --> "
+ os.path.join(sourcesDir, "Acanthus spinosus_disk2.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Aconit vénéneux.txt")
+ " --> "
+ os.path.join(sourcesDir, "Aconitum anthora.txt\n")
+ "Copying : "
+ os.path.join(sourcesDir, "Aconitum anthora.txt")
+ " --> "
+ os.path.join(sourcesDir, "Aconitum anthora_disk2.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Aconit vénéneux_disk1.txt")
+ " --> "
+ os.path.join(sourcesDir, "Aconitum anthora_disk1.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Aconit vénéneux_disk3.txt")
+ " --> "
+ os.path.join(sourcesDir, "Aconitum anthora_disk3.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Violette cornue_disk1.txt")
+ " --> "
+ os.path.join(sourcesDir, "Viola cornuta_disk1.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Volutaire à fleurs tubulées_disk1.txt")
+ " --> "
+ os.path.join(sourcesDir, "Volutaria tubuliflora_disk1.txt\n"),
output,
)
def test_args_preview_rename_nomultirename(self):
get_config()["workers"] = 1
get_config()["source_w_multiple_choice"] = False
get_config()["masks"] = "+Ending Disk#\n" + r'"(\s?_disk\d)$"' + "\n"
masks.FileMasked.masks = masks.CompileMasks(get_config()["masks"])
filters.FileFiltered.filters = filters.CompileFilters(get_config()["filters"])
if os.path.exists(self.outdir):
shutil.rmtree(self.outdir)
shutil.copytree(os.path.abspath(os.path.join(os.path.dirname(__file__), "./data")), self.outdir)
sourcesDir = os.path.join(self.outdir, "sources_multimatch")
choicesDir = os.path.join(self.outdir, "choices_multimatch")
args.theArgs = args.theArgsParser.parse_args(["--sources", sourcesDir, "--choices", choicesDir, "preview_rename"])
with io.StringIO() as buf, redirect_stdout(buf):
frame = main_dlg.MainFrame()
shutil.rmtree(self.outdir)
output = buf.getvalue()
self.maxDiff = None
self.assertEqual(
"Renaming : "
+ os.path.join(sourcesDir, "Acanthe à feuilles molles_disk2.txt")
+ " --> "
+ os.path.join(sourcesDir, "Acanthus mollis_disk2.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Acanthe épineuse.txt")
+ " --> "
+ os.path.join(sourcesDir, "Acanthus spinosus.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Aconit vénéneux.txt")
+ " --> "
+ os.path.join(sourcesDir, "Aconitum anthora.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Aconit vénéneux_disk1.txt")
+ " --> "
+ os.path.join(sourcesDir, "Aconitum anthora_disk1.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Aconit vénéneux_disk3.txt")
+ " --> "
+ os.path.join(sourcesDir, "Aconitum anthora_disk3.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Violette cornue_disk1.txt")
+ " --> "
+ os.path.join(sourcesDir, "Viola cornuta_disk1.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Volutaire à fleurs tubulées_disk1.txt")
+ " --> "
+ os.path.join(sourcesDir, "Volutaria tubuliflora_disk1.txt\n"),
output,
)
def test_args_rename(self):
get_config()["workers"] = 1
get_config()["keep_original"] = False
get_config()["masks"] = "+Ending Disk#\n" + r'"(\s?_disk\d)$"' + "\n"
masks.FileMasked.masks = masks.CompileMasks(get_config()["masks"])
filters.FileFiltered.filters = filters.CompileFilters(get_config()["filters"])
if os.path.exists(self.outdir):
shutil.rmtree(self.outdir)
shutil.copytree(os.path.abspath(os.path.join(os.path.dirname(__file__), "./data")), self.outdir)
sourcesDir = os.path.join(self.outdir, "sources_multimatch")
choicesDir = os.path.join(self.outdir, "choices_multimatch")
args.theArgs = args.theArgsParser.parse_args(["--sources", sourcesDir, "--choices", choicesDir, "rename"])
with io.StringIO() as buf, redirect_stdout(buf):
frame = main_dlg.MainFrame()
renamed = []
for f in sorted(Path(os.path.join(self.outdir, "sources_multimatch")).resolve().glob("*"), key=os.path.basename):
try:
if f.is_file():
renamed.append(f.name)
except (OSError, IOError):
pass
shutil.rmtree(self.outdir)
self.assertEqual(
[
"Acanthus mollis_disk2.txt",
"Acanthus spinosus_disk1.txt",
"Acanthus spinosus_disk2.txt",
"Aconitum anthora.txt",
"Aconitum anthora_disk1.txt",
"Aconitum anthora_disk2.txt",
"Aconitum anthora_disk3.txt",
"Viola cornuta_disk1.txt",
"Volutaria tubuliflora_disk1.txt",
],
renamed,
)
# ---------------------------------------------------------------------------
if __name__ == "__main__":
unittest.main()
| 46.265
| 125
| 0.545012
| 923
| 9,253
| 5.32286
| 0.150596
| 0.070832
| 0.091594
| 0.130267
| 0.789742
| 0.789742
| 0.789742
| 0.737024
| 0.713617
| 0.713617
| 0
| 0.008674
| 0.30228
| 9,253
| 199
| 126
| 46.497487
| 0.752323
| 0.016319
| 0
| 0.662921
| 0
| 0
| 0.241235
| 0.010001
| 0.022472
| 0
| 0
| 0
| 0.022472
| 1
| 0.022472
| false
| 0.005618
| 0.067416
| 0
| 0.095506
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6746e70616a4df84c91956e42c58622f4b7ae111
| 211
|
py
|
Python
|
tests/old-tests/test_logout.py
|
SynBioHub/synbiohub
|
57f00336714de8f0385d5d6b6053cd2ea4be297b
|
[
"BSD-2-Clause"
] | 53
|
2017-03-13T11:10:24.000Z
|
2022-03-23T00:34:24.000Z
|
tests/test_logout.py
|
danyentezari/synbiohub
|
09317e3eb3820c596502efad441031835698ad54
|
[
"BSD-2-Clause"
] | 1,049
|
2017-02-17T21:14:42.000Z
|
2022-03-22T22:57:04.000Z
|
tests/test_logout.py
|
danyentezari/synbiohub
|
09317e3eb3820c596502efad441031835698ad54
|
[
"BSD-2-Clause"
] | 24
|
2017-03-14T07:39:20.000Z
|
2021-11-04T18:51:08.000Z
|
import requests
from unittest import TestCase
from test_functions import compare_get_request, compare_post_request
class TestLogout(TestCase):
def test_logout(self):
compare_get_request("/logout")
| 23.444444
| 68
| 0.800948
| 27
| 211
| 5.962963
| 0.592593
| 0.124224
| 0.21118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.14218
| 211
| 8
| 69
| 26.375
| 0.889503
| 0
| 0
| 0
| 0
| 0
| 0.033175
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.5
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
674d3c74e7944168418105cd94816bf3660d8500
| 90
|
py
|
Python
|
build/lib/jhu_primitives/adj_concat/__init__.py
|
hhelm10/primitives-interfaces
|
15766d77dae016fa699a46bade0fe66711b23459
|
[
"Apache-2.0"
] | null | null | null |
build/lib/jhu_primitives/adj_concat/__init__.py
|
hhelm10/primitives-interfaces
|
15766d77dae016fa699a46bade0fe66711b23459
|
[
"Apache-2.0"
] | 23
|
2017-09-20T08:12:13.000Z
|
2022-03-01T01:49:11.000Z
|
build/lib/jhu_primitives/adj_concat/__init__.py
|
hhelm10/primitives-interfaces
|
15766d77dae016fa699a46bade0fe66711b23459
|
[
"Apache-2.0"
] | 8
|
2018-05-14T18:44:38.000Z
|
2021-03-18T19:53:23.000Z
|
from __future__ import absolute_import
from .adj_concat import AdjacencyMatrixConcatenator
| 45
| 51
| 0.911111
| 10
| 90
| 7.6
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077778
| 90
| 2
| 51
| 45
| 0.915663
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
675386fbc404743db53dfa93ca72cf683eb58c7f
| 754
|
py
|
Python
|
huq/__init__.py
|
huq-industries/airflow-plugins
|
98a9cdc8981cfbe7d28fabaeaadd19c5e2f02709
|
[
"Apache-2.0"
] | null | null | null |
huq/__init__.py
|
huq-industries/airflow-plugins
|
98a9cdc8981cfbe7d28fabaeaadd19c5e2f02709
|
[
"Apache-2.0"
] | 12
|
2019-07-08T14:49:19.000Z
|
2022-02-08T15:15:49.000Z
|
huq/__init__.py
|
huq-industries/airflow-plugins
|
98a9cdc8981cfbe7d28fabaeaadd19c5e2f02709
|
[
"Apache-2.0"
] | 1
|
2019-10-28T15:39:44.000Z
|
2019-10-28T15:39:44.000Z
|
from airflow.plugins_manager import AirflowPlugin
from huq.gcs import (
GoogleCloudStorageComposePrefixOperator,
GoogleCloudStorageToS3CopyObjectListOperator,
GoogleCloudStorageToS3CopyOperator,
GoogleCloudStorageToS3CopyPrefixOperator,
)
# Defining the plugin class
class AirflowHuqPlugin(AirflowPlugin):
name = "huq"
operators = [
GoogleCloudStorageComposePrefixOperator,
GoogleCloudStorageToS3CopyObjectListOperator,
GoogleCloudStorageToS3CopyOperator,
GoogleCloudStorageToS3CopyPrefixOperator,
]
sensors = []
hooks = []
executors = []
macros = []
admin_views = []
flask_blueprints = []
menu_links = []
appbuilder_views = []
appbuilder_menu_items = []
| 26
| 53
| 0.72679
| 43
| 754
| 12.581395
| 0.72093
| 0.306839
| 0.432532
| 0.580407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01005
| 0.208223
| 754
| 28
| 54
| 26.928571
| 0.896147
| 0.033157
| 0
| 0.333333
| 0
| 0
| 0.004127
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.583333
| 0.041667
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
675e9648febed893f4fda7d5147536bd716154a1
| 69
|
py
|
Python
|
Streamlit/pages/page2.py
|
jhockx/server-configuration
|
106bc6c0a57eaa582486701c80aac4f968ef0ba0
|
[
"MIT"
] | 1
|
2021-04-28T06:15:14.000Z
|
2021-04-28T06:15:14.000Z
|
Streamlit/pages/page2.py
|
jhockx/server-configuration
|
106bc6c0a57eaa582486701c80aac4f968ef0ba0
|
[
"MIT"
] | null | null | null |
Streamlit/pages/page2.py
|
jhockx/server-configuration
|
106bc6c0a57eaa582486701c80aac4f968ef0ba0
|
[
"MIT"
] | null | null | null |
import streamlit as st
def main():
st.title('Page 2 -- TITLE')
| 11.5
| 31
| 0.623188
| 11
| 69
| 3.909091
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018868
| 0.231884
| 69
| 5
| 32
| 13.8
| 0.792453
| 0
| 0
| 0
| 0
| 0
| 0.217391
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
6771d154fc3ef17af804af19e66c2fb3dae9fc39
| 143
|
py
|
Python
|
FileUtilities/__init__.py
|
RainbowRedux/RainbowSixFileConverters
|
1f755f781ee85af068ba7bcc73d4960998363794
|
[
"MIT"
] | 6
|
2020-03-28T14:32:25.000Z
|
2022-02-03T00:41:24.000Z
|
FileUtilities/__init__.py
|
RainbowRedux/RainbowSixFileConverters
|
1f755f781ee85af068ba7bcc73d4960998363794
|
[
"MIT"
] | 46
|
2020-03-20T06:27:30.000Z
|
2022-03-11T23:36:12.000Z
|
FileUtilities/__init__.py
|
RainbowRedux/RainbowSixFileConverters
|
1f755f781ee85af068ba7bcc73d4960998363794
|
[
"MIT"
] | 4
|
2020-02-09T01:55:44.000Z
|
2020-07-22T12:52:43.000Z
|
"""This module provides many utility classes and functions related to dealing with files, especially tokenized text files and binary files."""
| 71.5
| 142
| 0.804196
| 20
| 143
| 5.75
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13986
| 143
| 1
| 143
| 143
| 0.934959
| 0.951049
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
679ac005083c4525ad369214189335440ddaaa3c
| 409
|
py
|
Python
|
pypushwoosh/exceptions.py
|
shiratamu/pushwoosh-python-lib
|
da05d7b72729ebfc65a7ab0b08c9009632a38833
|
[
"MIT"
] | 18
|
2015-01-08T19:51:42.000Z
|
2021-11-12T11:42:18.000Z
|
pypushwoosh/exceptions.py
|
shiratamu/pushwoosh-python-lib
|
da05d7b72729ebfc65a7ab0b08c9009632a38833
|
[
"MIT"
] | 7
|
2015-03-08T09:01:03.000Z
|
2017-11-13T05:26:21.000Z
|
pypushwoosh/exceptions.py
|
shiratamu/pushwoosh-python-lib
|
da05d7b72729ebfc65a7ab0b08c9009632a38833
|
[
"MIT"
] | 18
|
2015-02-17T03:40:54.000Z
|
2021-11-25T02:26:44.000Z
|
class PushwooshException(Exception):
pass
class PushwooshCommandException(PushwooshException):
pass
class PushwooshNotificationException(PushwooshException):
pass
class PushwooshFilterException(PushwooshException):
pass
class PushwooshFilterInvalidOperatorException(PushwooshFilterException):
pass
class PushwooshFilterInvalidOperandException(PushwooshFilterException):
pass
| 17.782609
| 72
| 0.828851
| 24
| 409
| 14.125
| 0.375
| 0.132743
| 0.238938
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127139
| 409
| 22
| 73
| 18.590909
| 0.94958
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
67c68c0ed446129b670948467ddb50def78948ce
| 164
|
py
|
Python
|
allauth/socialaccount/providers/yandex/urls.py
|
Yurzs/django-allauth
|
4434e8fd488a7ea01acabdfe41a011df6899d9c9
|
[
"MIT"
] | null | null | null |
allauth/socialaccount/providers/yandex/urls.py
|
Yurzs/django-allauth
|
4434e8fd488a7ea01acabdfe41a011df6899d9c9
|
[
"MIT"
] | null | null | null |
allauth/socialaccount/providers/yandex/urls.py
|
Yurzs/django-allauth
|
4434e8fd488a7ea01acabdfe41a011df6899d9c9
|
[
"MIT"
] | null | null | null |
from allauth.socialaccount.providers.oauth2.urls import default_urlpatterns
from .provider import YandexProvider
urlpatterns = default_urlpatterns(YandexProvider)
| 32.8
| 75
| 0.878049
| 17
| 164
| 8.352941
| 0.647059
| 0.253521
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006579
| 0.073171
| 164
| 5
| 76
| 32.8
| 0.927632
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
67d9503e659a2d1fb60ade1db6cff339bbb09b47
| 295
|
py
|
Python
|
sheetfu/config.py
|
darkroomdave/sheetfu
|
c0d638c0fbbb62a1d887b44bfba1c0cea3864e29
|
[
"MIT"
] | 893
|
2018-01-22T13:36:44.000Z
|
2022-03-28T17:51:15.000Z
|
sheetfu/config.py
|
ye-man/sheetfu
|
573b147014b6fd5ea71bf04bd130bd0c7c9be4c5
|
[
"MIT"
] | 48
|
2018-01-23T10:32:38.000Z
|
2022-03-22T09:39:54.000Z
|
sheetfu/config.py
|
ye-man/sheetfu
|
573b147014b6fd5ea71bf04bd130bd0c7c9be4c5
|
[
"MIT"
] | 48
|
2018-01-26T11:46:11.000Z
|
2021-11-09T01:43:31.000Z
|
fields_masks = {
'background': "sheets/data/rowData/values/effectiveFormat/backgroundColor",
'value': "sheets/data/rowData/values/formattedValue",
'note': "sheets/data/rowData/values/note",
'font_color': "sheets/data/rowData/values/effectiveFormat/textFormat/foregroundColor"
}
| 36.875
| 89
| 0.749153
| 30
| 295
| 7.3
| 0.533333
| 0.182648
| 0.310502
| 0.420091
| 0.347032
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098305
| 295
| 7
| 90
| 42.142857
| 0.823308
| 0
| 0
| 0
| 0
| 0
| 0.77551
| 0.676871
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
67fd00e016ec4bbd3239f9c39709c41bd01f8c00
| 5,227
|
py
|
Python
|
tests/rulesets/sample_rules.py
|
wandsdn/ofsolver
|
64795a84220416b1a2e4df13198c6529995b7f53
|
[
"Apache-2.0"
] | 1
|
2020-04-10T07:22:19.000Z
|
2020-04-10T07:22:19.000Z
|
tests/rulesets/sample_rules.py
|
wandsdn/ofsolver
|
64795a84220416b1a2e4df13198c6529995b7f53
|
[
"Apache-2.0"
] | null | null | null |
tests/rulesets/sample_rules.py
|
wandsdn/ofsolver
|
64795a84220416b1a2e4df13198c6529995b7f53
|
[
"Apache-2.0"
] | null | null | null |
from ryu.ofproto import ofproto_v1_3
from ryu.ofproto import ofproto_v1_3_parser as parser
import pickle
"""
Make the rules for a simple L2 L3 pipeline
ETH_DST 1&2
+--------------+
| |
| 2 Routing |
| |
| IP_DST -> |
+-------------+ +--------------+ +---> OUTPUT |
| | | | | | SET MAC |
| 0 TCP ACL | | 1 MAC TERM | | | |
| (TCP_DST) | | ETH_DST -> | | | |
| DROP | | goto : 2 | | +--------------+
| +-------> +-+
| else | | else |
| | | goto: 3 |
| goto: 1 | | +-+
+-------------+ +--------------+ | ETH_DST 10&11&12
| +---------------+
| | |
| | 3 L2 FWD |
| | |
+--^+ ETH_DST -> |
| OUTPUT |
| |
| |
| |
| |
+---------------+
"""
flows = [
# Table 0
parser.OFPFlowStats(
table_id=0,
priority=1000,
match=parser.OFPMatch(tcp_dst=80),
instructions=[]
),
parser.OFPFlowStats(
table_id=0,
priority=1000,
match=parser.OFPMatch(tcp_dst=443),
instructions=[]
),
parser.OFPFlowStats(
table_id=0,
priority=0,
match=parser.OFPMatch(),
instructions=[parser.OFPInstructionGotoTable(1)]
),
# Table 1
parser.OFPFlowStats(
table_id=1,
priority=1000,
match=parser.OFPMatch(eth_dst=1),
instructions=[parser.OFPInstructionGotoTable(2)]
),
parser.OFPFlowStats(
table_id=1,
priority=1000,
match=parser.OFPMatch(eth_dst=2),
instructions=[parser.OFPInstructionGotoTable(2)]
),
parser.OFPFlowStats(
table_id=1,
priority=0,
match=parser.OFPMatch(),
instructions=[parser.OFPInstructionGotoTable(3)]
),
# Table 2
parser.OFPFlowStats(
table_id=2,
priority=1008,
match=parser.OFPMatch(ipv4_dst=("1.0.0.0", "255.0.0.0")),
instructions=[
parser.OFPInstructionActions(
ofproto_v1_3.OFPIT_WRITE_ACTIONS, [
parser.OFPActionSetField(eth_src=100),
parser.OFPActionSetField(eth_dst=20),
parser.OFPActionOutput(20)
]
)
]
),
parser.OFPFlowStats(
table_id=2,
priority=1008,
match=parser.OFPMatch(ipv4_dst=("10.0.0.0", "255.0.0.0")),
instructions=[
parser.OFPInstructionActions(
ofproto_v1_3.OFPIT_WRITE_ACTIONS, [
parser.OFPActionSetField(eth_src=100),
parser.OFPActionSetField(eth_dst=20),
parser.OFPActionOutput(20)
]
)
]
),
parser.OFPFlowStats(
table_id=2,
priority=1000,
match=parser.OFPMatch(ipv4_dst=("0.0.0.0", "0.0.0.0")),
instructions=[
parser.OFPInstructionActions(
ofproto_v1_3.OFPIT_WRITE_ACTIONS, [
parser.OFPActionSetField(eth_src=101),
parser.OFPActionSetField(eth_dst=21),
parser.OFPActionOutput(21)
]
)
]
),
# Table 3
parser.OFPFlowStats(
table_id=3,
priority=1000,
match=parser.OFPMatch(eth_dst=10),
instructions=[
parser.OFPInstructionActions(
ofproto_v1_3.OFPIT_WRITE_ACTIONS, [
parser.OFPActionOutput(10)
]
)
]
),
parser.OFPFlowStats(
table_id=3,
priority=1000,
match=parser.OFPMatch(eth_dst=11),
instructions=[
parser.OFPInstructionActions(
ofproto_v1_3.OFPIT_WRITE_ACTIONS, [
parser.OFPActionOutput(11)
]
)
]
),
parser.OFPFlowStats(
table_id=3,
priority=1000,
match=parser.OFPMatch(eth_dst=12),
instructions=[
parser.OFPInstructionActions(
ofproto_v1_3.OFPIT_WRITE_ACTIONS, [
parser.OFPActionOutput(12)
]
)
]
),
]
with open('sample_rules.pickle', 'wb') as f:
pickle.dump(flows, f)
| 31.871951
| 66
| 0.396786
| 381
| 5,227
| 5.275591
| 0.183727
| 0.014925
| 0.137313
| 0.149254
| 0.834826
| 0.819403
| 0.819403
| 0.762189
| 0.697512
| 0.697512
| 0
| 0.062663
| 0.487086
| 5,227
| 163
| 67
| 32.067485
| 0.687057
| 0.005931
| 0
| 0.644628
| 0
| 0
| 0.019231
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.024793
| 0
| 0.024793
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
db35727308ab39b215daf0b30c555dacb1e41083
| 157
|
py
|
Python
|
player.py
|
neurotechuoft/EMGTugOfWar
|
a29daaf793e179e3d1bb8bab6391980356615be5
|
[
"MIT"
] | null | null | null |
player.py
|
neurotechuoft/EMGTugOfWar
|
a29daaf793e179e3d1bb8bab6391980356615be5
|
[
"MIT"
] | null | null | null |
player.py
|
neurotechuoft/EMGTugOfWar
|
a29daaf793e179e3d1bb8bab6391980356615be5
|
[
"MIT"
] | null | null | null |
class Player():
def __init__(self, name):
self.name = name
self.force = 0.0
def set_force(self, force):
self.force = force
| 17.444444
| 31
| 0.56051
| 21
| 157
| 3.952381
| 0.428571
| 0.325301
| 0.337349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018868
| 0.324841
| 157
| 8
| 32
| 19.625
| 0.764151
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e1f02855e5236bd4c27a32990bcec5882a8e0f3d
| 115
|
py
|
Python
|
Client/App/Core/Resources/__init__.py
|
Dragon-KK/ComputerProject2021
|
669431f3f2d41bda822931e6fffe661c99736dfe
|
[
"MIT"
] | null | null | null |
Client/App/Core/Resources/__init__.py
|
Dragon-KK/ComputerProject2021
|
669431f3f2d41bda822931e6fffe661c99736dfe
|
[
"MIT"
] | null | null | null |
Client/App/Core/Resources/__init__.py
|
Dragon-KK/ComputerProject2021
|
669431f3f2d41bda822931e6fffe661c99736dfe
|
[
"MIT"
] | null | null | null |
from .Storage import Storage
from .Audio import Audio
from .Images import Images
from .Networking import Networking
| 28.75
| 34
| 0.834783
| 16
| 115
| 6
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 115
| 4
| 34
| 28.75
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c032306f9a925c636529a51f759745f43b437be1
| 119
|
py
|
Python
|
cover_letter/admin.py
|
radoslawdabrowski/personal-website
|
b3d4f92ea51b40b104449259a376134aeb11766b
|
[
"MIT"
] | 1
|
2019-03-13T15:42:33.000Z
|
2019-03-13T15:42:33.000Z
|
cover_letter/admin.py
|
radoslawdabrowski/personal-website-framework
|
c33f16811caa2aafdfd84c22af8c37ee0ab97720
|
[
"Apache-2.0"
] | 92
|
2019-12-04T22:24:35.000Z
|
2022-03-12T00:11:21.000Z
|
cover_letter/admin.py
|
radoslawdabrowski/personal-website
|
b3d4f92ea51b40b104449259a376134aeb11766b
|
[
"MIT"
] | 1
|
2019-05-07T21:23:57.000Z
|
2019-05-07T21:23:57.000Z
|
from django.contrib import admin
from cover_letter.models import Reference
# Entities
admin.site.register(Reference)
| 17
| 41
| 0.831933
| 16
| 119
| 6.125
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109244
| 119
| 6
| 42
| 19.833333
| 0.924528
| 0.067227
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c0560dfe6b0f60ce54bdec16abe616a849496eb4
| 54
|
py
|
Python
|
products/forms.py
|
costamay/stock-management-system
|
c78dfd7e9a12434adee30680c9225c5aefb50a02
|
[
"MIT"
] | null | null | null |
products/forms.py
|
costamay/stock-management-system
|
c78dfd7e9a12434adee30680c9225c5aefb50a02
|
[
"MIT"
] | null | null | null |
products/forms.py
|
costamay/stock-management-system
|
c78dfd7e9a12434adee30680c9225c5aefb50a02
|
[
"MIT"
] | null | null | null |
from django import forms
from products.models import *
| 27
| 29
| 0.833333
| 8
| 54
| 5.625
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12963
| 54
| 2
| 29
| 27
| 0.957447
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c068255430eef26bf9c5c5eda5a67e034c0ddf8c
| 48
|
py
|
Python
|
python/testData/inspections/RedundantParenthesesParenthesizedExpression.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/inspections/RedundantParenthesesParenthesizedExpression.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/inspections/RedundantParenthesesParenthesizedExpression.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
if ((1 and 2 == 'left'<caret>)) or (3):
pass
| 24
| 39
| 0.479167
| 9
| 48
| 2.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0.25
| 48
| 2
| 40
| 24
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.5
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
fbea6076d958ecba88d51c49edeca74e6b0ac7b3
| 171
|
py
|
Python
|
examples/notepad/app/admin.py
|
payton/django-siwe-auth
|
7112cfcf088175e0533e6f01db5151d109ba6b61
|
[
"MIT"
] | 3
|
2022-02-01T04:05:12.000Z
|
2022-02-17T02:58:56.000Z
|
examples/notepad/app/admin.py
|
payton/django-siwe-auth
|
7112cfcf088175e0533e6f01db5151d109ba6b61
|
[
"MIT"
] | 2
|
2022-02-05T19:11:22.000Z
|
2022-02-05T19:59:12.000Z
|
examples/notepad/app/admin.py
|
payton/django-siwe-auth
|
7112cfcf088175e0533e6f01db5151d109ba6b61
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Notepad, SharedNotepad
# Register your models here.
admin.site.register(Notepad)
admin.site.register(SharedNotepad)
| 21.375
| 42
| 0.818713
| 22
| 171
| 6.363636
| 0.545455
| 0.128571
| 0.242857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 171
| 7
| 43
| 24.428571
| 0.915033
| 0.152047
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
220f407c8ce9c5f2d865ae246a593ac04abf0ce1
| 686
|
py
|
Python
|
EduCDM/meta.py
|
zelo2/EduCDM
|
d725dc50ec677dfe409d88a3ffea6dce8effad62
|
[
"Apache-2.0"
] | 36
|
2021-04-28T03:22:03.000Z
|
2022-03-30T16:54:44.000Z
|
EduCDM/meta.py
|
LegionKing/EduCDM
|
4d1b871e4f0c041dd86da81576621b28ebba911c
|
[
"Apache-2.0"
] | 21
|
2021-03-18T14:10:11.000Z
|
2022-01-29T14:12:45.000Z
|
EduCDM/meta.py
|
LegionKing/EduCDM
|
4d1b871e4f0c041dd86da81576621b28ebba911c
|
[
"Apache-2.0"
] | 36
|
2021-03-17T14:43:18.000Z
|
2022-03-29T07:52:26.000Z
|
# coding: utf-8
# 2021/3/17 @ tongshiwei
def etl(*args, **kwargs) -> ...: # pragma: no cover
"""
extract - transform - load
"""
pass
def train(*args, **kwargs) -> ...: # pragma: no cover
pass
def evaluate(*args, **kwargs) -> ...: # pragma: no cover
pass
class CDM(object):
def __init__(self, *args, **kwargs) -> ...:
pass
def train(self, *args, **kwargs) -> ...:
raise NotImplementedError
def eval(self, *args, **kwargs) -> ...:
raise NotImplementedError
def save(self, *args, **kwargs) -> ...:
raise NotImplementedError
def load(self, *args, **kwargs) -> ...:
raise NotImplementedError
| 19.6
| 57
| 0.549563
| 71
| 686
| 5.253521
| 0.408451
| 0.214477
| 0.187668
| 0.203753
| 0.63807
| 0.474531
| 0
| 0
| 0
| 0
| 0
| 0.015936
| 0.268222
| 686
| 34
| 58
| 20.176471
| 0.727092
| 0.167638
| 0
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.470588
| false
| 0.235294
| 0
| 0
| 0.529412
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
223847d76e3d2fd250ce0559b8c348f4a2d6b4e8
| 227
|
py
|
Python
|
{{cookiecutter.project_name}}_bdd/features/steps/step_name.py
|
marcelolleivas/template-behave-selenium
|
cbb0604d82530e3616c3ac0ab1c3372cc7ee785c
|
[
"MIT"
] | 1
|
2021-07-07T17:35:16.000Z
|
2021-07-07T17:35:16.000Z
|
{{cookiecutter.project_name}}_bdd/features/steps/step_name.py
|
marcelolleivas/template-behave-selenium
|
cbb0604d82530e3616c3ac0ab1c3372cc7ee785c
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_name}}_bdd/features/steps/step_name.py
|
marcelolleivas/template-behave-selenium
|
cbb0604d82530e3616c3ac0ab1c3372cc7ee785c
|
[
"MIT"
] | null | null | null |
from behave import given, then, when, step
@given('')
def simpl_step(context):
pass
@then('')
def simpl_step(context):
pass
@when('')
def simpl_step(context):
pass
@step('')
def simpl_step(context):
pass
| 10.809524
| 42
| 0.643172
| 31
| 227
| 4.580645
| 0.354839
| 0.225352
| 0.338028
| 0.535211
| 0.647887
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.202643
| 227
| 21
| 43
| 10.809524
| 0.78453
| 0
| 0
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.307692
| false
| 0.307692
| 0.076923
| 0
| 0.384615
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
22472c697228d13763b896eb4783dda50ede6cda
| 437
|
py
|
Python
|
tests/test_psycho_embeddings.py
|
MilaNLProc/psycho-embeddings
|
2182076c1d455f8881858f0180852fe8a288f9b4
|
[
"MIT"
] | null | null | null |
tests/test_psycho_embeddings.py
|
MilaNLProc/psycho-embeddings
|
2182076c1d455f8881858f0180852fe8a288f9b4
|
[
"MIT"
] | null | null | null |
tests/test_psycho_embeddings.py
|
MilaNLProc/psycho-embeddings
|
2182076c1d455f8881858f0180852fe8a288f9b4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Tests for `psycho_embeddings` package."""
import unittest
from psycho_embeddings import psycho_embeddings
class TestPsycho_embeddings(unittest.TestCase):
"""Tests for `psycho_embeddings` package."""
def setUp(self):
"""Set up test fixtures, if any."""
def tearDown(self):
"""Tear down test fixtures, if any."""
def test_000_something(self):
"""Test something."""
| 19.863636
| 48
| 0.665904
| 52
| 437
| 5.461538
| 0.557692
| 0.225352
| 0.098592
| 0.169014
| 0.359155
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008571
| 0.199085
| 437
| 21
| 49
| 20.809524
| 0.802857
| 0.405034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0.333333
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
97d9d6ae0bd7fa31777140f6741859b4aa8f1503
| 280
|
py
|
Python
|
authentik/stages/authenticator_sms/apps.py
|
BeryJu/passbook
|
350f0d836580f4411524614f361a76c4f27b8a2d
|
[
"MIT"
] | 15
|
2020-01-05T09:09:57.000Z
|
2020-11-28T05:27:39.000Z
|
authentik/stages/authenticator_sms/apps.py
|
BeryJu/passbook
|
350f0d836580f4411524614f361a76c4f27b8a2d
|
[
"MIT"
] | 302
|
2020-01-21T08:03:59.000Z
|
2020-12-04T05:04:57.000Z
|
authentik/stages/authenticator_sms/apps.py
|
BeryJu/passbook
|
350f0d836580f4411524614f361a76c4f27b8a2d
|
[
"MIT"
] | 3
|
2020-03-04T08:21:59.000Z
|
2020-08-01T20:37:18.000Z
|
"""SMS"""
from django.apps import AppConfig
class AuthentikStageAuthenticatorSMSConfig(AppConfig):
"""SMS App config"""
name = "authentik.stages.authenticator_sms"
label = "authentik_stages_authenticator_sms"
verbose_name = "authentik Stages.Authenticator.SMS"
| 25.454545
| 55
| 0.757143
| 28
| 280
| 7.392857
| 0.571429
| 0.217391
| 0.405797
| 0.449275
| 0.338164
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139286
| 280
| 10
| 56
| 28
| 0.858921
| 0.064286
| 0
| 0
| 0
| 0
| 0.406375
| 0.366534
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
97ffe6c26583b22f0c02ecb9cccd85f242c3db78
| 27
|
py
|
Python
|
glad/__init__.py
|
dpethes/glad
|
d4a879a1bf0ce0c95d54b73ebcd197972977d5f4
|
[
"MIT"
] | null | null | null |
glad/__init__.py
|
dpethes/glad
|
d4a879a1bf0ce0c95d54b73ebcd197972977d5f4
|
[
"MIT"
] | null | null | null |
glad/__init__.py
|
dpethes/glad
|
d4a879a1bf0ce0c95d54b73ebcd197972977d5f4
|
[
"MIT"
] | null | null | null |
__version__ = '0.1.18a0'
| 6.75
| 24
| 0.62963
| 4
| 27
| 3.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.227273
| 0.185185
| 27
| 3
| 25
| 9
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0.32
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3f14ce675df3d02f7fbea45328066ea256c0593b
| 43
|
py
|
Python
|
htminify/__init__.py
|
AbhinavOmprakash/py-htminify
|
702198b1bdf3b8036705c4224cde1390a21f3b06
|
[
"BSD-3-Clause"
] | 1
|
2021-08-01T21:20:33.000Z
|
2021-08-01T21:20:33.000Z
|
htminify/__init__.py
|
AbhinavOmprakash/py-htminify
|
702198b1bdf3b8036705c4224cde1390a21f3b06
|
[
"BSD-3-Clause"
] | 5
|
2021-05-22T10:22:47.000Z
|
2021-05-27T14:11:30.000Z
|
htminify/__init__.py
|
AbhinavOmprakash/py-htminify
|
702198b1bdf3b8036705c4224cde1390a21f3b06
|
[
"BSD-3-Clause"
] | null | null | null |
from .htminify import minify
del htminify
| 10.75
| 28
| 0.813953
| 6
| 43
| 5.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162791
| 43
| 3
| 29
| 14.333333
| 0.972222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
3f2691057f63260bdd4fc7248e1d66e1dc8bcdd9
| 2,595
|
py
|
Python
|
tests/kyu_7_tests/test_credit_card_checker.py
|
the-zebulan/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 40
|
2016-03-09T12:26:20.000Z
|
2022-03-23T08:44:51.000Z
|
tests/kyu_7_tests/test_credit_card_checker.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | null | null | null |
tests/kyu_7_tests/test_credit_card_checker.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 36
|
2016-11-07T19:59:58.000Z
|
2022-03-31T11:18:27.000Z
|
import unittest
from katas.kyu_7.credit_card_checker import valid_card
class ValidCardTestCase(unittest.TestCase):
def test_true_1(self):
self.assertTrue(valid_card('5457 6238 9823 4311'))
def test_true_2(self):
self.assertTrue(valid_card('2222 2222 2222 2224'))
def test_true_3(self):
self.assertTrue(valid_card('9999 9999 9999 9995'))
def test_true_4(self):
self.assertTrue(valid_card('4444 4444 4444 4448'))
def test_true_5(self):
self.assertTrue(valid_card('3333 3333 3333 3331'))
def test_true_6(self):
self.assertTrue(valid_card('6666 6666 6666 6664'))
def test_true_7(self):
self.assertTrue(valid_card('0000 0000 0000 0000'))
def test_true_8(self):
self.assertTrue(valid_card('5457 6238 9823 4311'))
def test_true_9(self):
self.assertTrue(valid_card('8888 8888 8888 8888'))
def test_true_10(self):
self.assertTrue(valid_card('1111 1111 1111 1117'))
def test_true_11(self):
self.assertTrue(valid_card('1234 5678 9012 3452'))
def test_true_12(self):
self.assertTrue(valid_card('5555 5555 5555 5557'))
def test_false_1(self):
self.assertFalse(valid_card('8895 6238 9323 4311'))
def test_false_2(self):
self.assertFalse(valid_card('5457 6238 5568 4311'))
def test_false_3(self):
self.assertFalse(valid_card('5457 6238 9323 4311'))
def test_false_4(self):
self.assertFalse(valid_card('5457 1125 9323 4311'))
def test_false_5(self):
self.assertFalse(valid_card('1252 6238 9323 4311'))
def test_false_6(self):
self.assertFalse(valid_card('0000 0300 0000 0000'))
def test_false_7(self):
self.assertFalse(valid_card('5457 6238 9323 1252'))
def test_false_8(self):
self.assertFalse(valid_card('5457 6238 1251 4311'))
def test_false_9(self):
self.assertFalse(valid_card('5457 6238 0254 4311'))
def test_false_10(self):
self.assertFalse(valid_card('5457 1111 9323 4311'))
def test_false_11(self):
self.assertFalse(valid_card('1145 6238 9323 4311'))
def test_false_12(self):
self.assertFalse(valid_card('0025 2521 9323 4311'))
def test_false_13(self):
self.assertFalse(valid_card('5457 6238 9323 4311'))
def test_false_14(self):
self.assertFalse(valid_card('5458 4444 9323 4311'))
def test_false_15(self):
self.assertFalse(valid_card('5457 6238 3333 4311'))
def test_false_16(self):
self.assertFalse(valid_card('0123 4567 8901 2345'))
| 28.833333
| 59
| 0.680925
| 380
| 2,595
| 4.418421
| 0.197368
| 0.15545
| 0.114354
| 0.228708
| 0.646814
| 0.32162
| 0.240619
| 0.154854
| 0.13103
| 0.13103
| 0
| 0.237214
| 0.208863
| 2,595
| 89
| 60
| 29.157303
| 0.580614
| 0
| 0
| 0.067797
| 0
| 0
| 0.20501
| 0
| 0
| 0
| 0
| 0
| 0.474576
| 1
| 0.474576
| false
| 0
| 0.033898
| 0
| 0.525424
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
3f40f3063041c22e1627094661d280811866f6b6
| 468
|
py
|
Python
|
actions/lib/comments.py
|
userlocalhost/stackstorm-datadog
|
6c70d6023f63e6d5d805ceb6dd3bc1edeea8123d
|
[
"Apache-2.0"
] | 164
|
2015-01-17T16:08:33.000Z
|
2021-08-03T02:34:07.000Z
|
actions/lib/comments.py
|
userlocalhost/stackstorm-datadog
|
6c70d6023f63e6d5d805ceb6dd3bc1edeea8123d
|
[
"Apache-2.0"
] | 442
|
2015-01-01T11:19:01.000Z
|
2017-09-06T23:26:17.000Z
|
actions/lib/comments.py
|
userlocalhost/stackstorm-datadog
|
6c70d6023f63e6d5d805ceb6dd3bc1edeea8123d
|
[
"Apache-2.0"
] | 202
|
2015-01-13T00:37:40.000Z
|
2020-11-07T11:30:10.000Z
|
from base import DatadogBaseAction
from datadog import api
class DatadogCreateComment(DatadogBaseAction):
def _run(self, **kwargs):
return api.Comment.create(**kwargs)
class DatadogDeleteComment(DatadogBaseAction):
def _run(self, **kwargs):
return api.Comment.delete(kwargs.pop("comment_id"))
class DatadogEditComment(DatadogBaseAction):
def _run(self, **kwargs):
return api.Comment.update(kwargs.pop("comment_id"), **kwargs)
| 26
| 69
| 0.730769
| 51
| 468
| 6.607843
| 0.411765
| 0.178042
| 0.204748
| 0.240356
| 0.436202
| 0.436202
| 0.436202
| 0.436202
| 0
| 0
| 0
| 0
| 0.153846
| 468
| 17
| 70
| 27.529412
| 0.85101
| 0
| 0
| 0.272727
| 0
| 0
| 0.042735
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0
| 0.181818
| 0.272727
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
3f583af803f56d8ce950918aede295fb7872df0d
| 86
|
py
|
Python
|
ramscube/tests/test_dummy.py
|
freemansw1/ramscube
|
2f4c687e4e4ec84153b687061ea90cac8a7fbd83
|
[
"BSD-3-Clause"
] | null | null | null |
ramscube/tests/test_dummy.py
|
freemansw1/ramscube
|
2f4c687e4e4ec84153b687061ea90cac8a7fbd83
|
[
"BSD-3-Clause"
] | 1
|
2019-11-22T19:05:31.000Z
|
2019-11-22T19:05:31.000Z
|
ramscube/tests/test_dummy.py
|
freemansw1/ramscube
|
2f4c687e4e4ec84153b687061ea90cac8a7fbd83
|
[
"BSD-3-Clause"
] | 1
|
2019-11-20T19:06:04.000Z
|
2019-11-20T19:06:04.000Z
|
import os
import pytest
import ramscube
def test_dummy_function():
assert 1==1
| 9.555556
| 26
| 0.744186
| 13
| 86
| 4.769231
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028986
| 0.197674
| 86
| 8
| 27
| 10.75
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| true
| 0
| 0.6
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
58b9a683516a34cf0d3d7654c846937cae840274
| 1,750
|
py
|
Python
|
benchmark.py
|
briwilcox/Concurrent-Pandas
|
b759576c1dd304459a4ed3ae9c790f63cc7c888b
|
[
"Apache-2.0"
] | 10
|
2015-02-23T15:32:33.000Z
|
2020-07-19T13:41:20.000Z
|
benchmark.py
|
briwilcox/Concurrent-Pandas
|
b759576c1dd304459a4ed3ae9c790f63cc7c888b
|
[
"Apache-2.0"
] | null | null | null |
benchmark.py
|
briwilcox/Concurrent-Pandas
|
b759576c1dd304459a4ed3ae9c790f63cc7c888b
|
[
"Apache-2.0"
] | 8
|
2015-02-27T14:15:47.000Z
|
2021-11-24T18:25:59.000Z
|
__author__ = 'brian'
"""
Output in test run:
Looking up 10 keys from Google Finance
Time to download 10 stocks from Google with Multi-Threading : 6.987292528152466 seconds.
Looking up 10 keys from Google Finance
Time to download 10 stocks from Google with Multi Processing : 6.1684489250183105 seconds.
Looking up 10 keys from Google Finance
Time to download 10 stocks from Google with Single Threading : 7.67667818069458 seconds.
Process finished with exit code 0
"""
import concurrentpandas
import time
# Define your keys
finance_keys = ["aapl", "xom", "msft", "goog", "brk-b", "TSLA", "IRBT", "VTI", "VT", "VNQ"]
# Instantiate Concurrent Pandas
fast_panda = concurrentpandas.ConcurrentPandas()
# Set your data source
fast_panda.set_source_google_finance()
# Insert your keys
fast_panda.insert_keys(finance_keys)
# Choose either asynchronous threads, processes, or a single sequential download
pre = time.time()
fast_panda.consume_keys_asynchronous_threads()
post = time.time()
print("Time to download 10 stocks from Google with Multi-Threading : " + (post - pre).__str__() + " seconds.")
# Insert your keys
fast_panda.insert_keys(finance_keys)
# Choose either asynchronous threads, processes, or a single sequential download
pre = time.time()
fast_panda.consume_keys_asynchronous_processes()
post = time.time()
print("Time to download 10 stocks from Google with Multi Processing : " + (post - pre).__str__() + " seconds.")
# Insert your keys
fast_panda.insert_keys(finance_keys)
# Choose either asynchronous threads, processes, or a single sequential download
pre = time.time()
fast_panda.consume_keys()
post = time.time()
print("Time to download 10 stocks from Google with Single Threading : " + (post - pre).__str__() + " seconds.")
| 33.018868
| 111
| 0.765714
| 245
| 1,750
| 5.302041
| 0.285714
| 0.069284
| 0.064665
| 0.073903
| 0.743649
| 0.730562
| 0.730562
| 0.730562
| 0.730562
| 0.730562
| 0
| 0.044489
| 0.139429
| 1,750
| 53
| 112
| 33.018868
| 0.818061
| 0.202857
| 0
| 0.428571
| 0
| 0
| 0.274973
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.095238
| 0
| 0.095238
| 0.142857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
58fa48818d0b4032dc1a109d9bfee80e5aedb354
| 179
|
py
|
Python
|
demography/models/__init__.py
|
The-Politico/politico-civic-demography
|
080bb964b64b06db7fd04386530e893ceed1cf98
|
[
"MIT"
] | null | null | null |
demography/models/__init__.py
|
The-Politico/politico-civic-demography
|
080bb964b64b06db7fd04386530e893ceed1cf98
|
[
"MIT"
] | null | null | null |
demography/models/__init__.py
|
The-Politico/politico-civic-demography
|
080bb964b64b06db7fd04386530e893ceed1cf98
|
[
"MIT"
] | null | null | null |
# flake8: noqa
from .census_estimate import CensusEstimate
from .census_label import CensusLabel
from .census_table import CensusTable
from .census_variable import CensusVariable
| 29.833333
| 43
| 0.860335
| 22
| 179
| 6.818182
| 0.590909
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00625
| 0.106145
| 179
| 5
| 44
| 35.8
| 0.93125
| 0.067039
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
451beca8363788685ad7a06e2eb4271ce56bbaaa
| 110
|
py
|
Python
|
help.py
|
jendakolda/API_Calc
|
caaab1331b4016fe3ca9e18fca2d68b56ed62c7e
|
[
"MIT"
] | null | null | null |
help.py
|
jendakolda/API_Calc
|
caaab1331b4016fe3ca9e18fca2d68b56ed62c7e
|
[
"MIT"
] | null | null | null |
help.py
|
jendakolda/API_Calc
|
caaab1331b4016fe3ca9e18fca2d68b56ed62c7e
|
[
"MIT"
] | null | null | null |
from dearpygui.core import *
from dearpygui.demo import *
show_demo()
show_documentation()
start_dearpygui()
| 15.714286
| 28
| 0.8
| 14
| 110
| 6.071429
| 0.571429
| 0.305882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109091
| 110
| 6
| 29
| 18.333333
| 0.867347
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
451bffe078a1799da30a879af4be52df083ad3f2
| 932
|
py
|
Python
|
samples/threads_naming.py
|
thierrydecker/learning-python
|
d67242740c33037e1ff270a8e2107f915e0fd44a
|
[
"Apache-2.0"
] | 1
|
2020-11-05T13:34:30.000Z
|
2020-11-05T13:34:30.000Z
|
samples/threads_naming.py
|
thierrydecker/learning-python
|
d67242740c33037e1ff270a8e2107f915e0fd44a
|
[
"Apache-2.0"
] | null | null | null |
samples/threads_naming.py
|
thierrydecker/learning-python
|
d67242740c33037e1ff270a8e2107f915e0fd44a
|
[
"Apache-2.0"
] | 1
|
2019-01-21T08:46:37.000Z
|
2019-01-21T08:46:37.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import threading
import time
import random
def worker_a():
print("Thead {} started".format(threading.current_thread().getName()))
time.sleep(random.randint(1, 2))
print("Thead {} finished".format(threading.current_thread().getName()))
def worker_b():
print("Thead {} started".format(threading.current_thread().getName()))
time.sleep(random.randint(1, 2))
print("Thead {} finished".format(threading.current_thread().getName()))
def main():
threads = []
for i in range(2):
thread = threading.Thread(target=worker_a, name='Worker_A-' + str(i))
thread.start()
threads.append(thread)
for i in range(2):
thread = threading.Thread(target=worker_b, name='Worker_B-' + str(i))
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
if __name__ == '__main__':
main()
| 25.189189
| 77
| 0.639485
| 118
| 932
| 4.898305
| 0.338983
| 0.069204
| 0.152249
| 0.193772
| 0.735294
| 0.735294
| 0.735294
| 0.735294
| 0.612457
| 0.612457
| 0
| 0.009321
| 0.194206
| 932
| 36
| 78
| 25.888889
| 0.76032
| 0.045064
| 0
| 0.48
| 0
| 0
| 0.103604
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12
| false
| 0
| 0.12
| 0
| 0.24
| 0.16
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1888d64b152e4a22e5f35e1731665311020e2ce2
| 17,153
|
py
|
Python
|
sudokuabc.py
|
bowespublishing/convert-123-sudoku-to-abc
|
9c66a004b92cfeb6a18164e9a4786ebe0b533220
|
[
"Unlicense"
] | 1
|
2022-03-12T18:03:13.000Z
|
2022-03-12T18:03:13.000Z
|
sudokuabc.py
|
bowespublishing/convert-123-sudoku-to-abc
|
9c66a004b92cfeb6a18164e9a4786ebe0b533220
|
[
"Unlicense"
] | null | null | null |
sudokuabc.py
|
bowespublishing/convert-123-sudoku-to-abc
|
9c66a004b92cfeb6a18164e9a4786ebe0b533220
|
[
"Unlicense"
] | null | null | null |
from pptx import Presentation
import os
from os import listdir
import PySimpleGUI as gui
from tkinter import *
from tkinter import messagebox
from tkinter import filedialog
x = 0
def CheckforSudoku(path):
prscheck = Presentation(path)
slides = [slide for slide in prscheck.slides]
for slide in slides:
for shape in slide.shapes:
if shape.has_table:
if 'Sudoku' in shape.name:
global x
x = x + 1
bp_base64 = b'iVBORw0KGgoAAAANSUhEUgAAANUAAAB9CAAAAAAQZHncAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyNpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDYuMC1jMDAyIDc5LjE2NDQ2MCwgMjAyMC8wNS8xMi0xNjowNDoxNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIDIxLjIgKFdpbmRvd3MpIiB4bXBNTTpJbnN0YW5jZUlEPSJ4bXAuaWlkOjRBRUE2NEJBOTFBNTExRUM4ODNDQjgxRUY0MEFCNDg4IiB4bXBNTTpEb2N1bWVudElEPSJ4bXAuZGlkOjRBRUE2NEJCOTFBNTExRUM4ODNDQjgxRUY0MEFCNDg4Ij4gPHhtcE1NOkRlcml2ZWRGcm9tIHN0UmVmOmluc3RhbmNlSUQ9InhtcC5paWQ6NEFFQTY0Qjg5MUE1MTFFQzg4M0NCODFFRjQwQUI0ODgiIHN0UmVmOmRvY3VtZW50SUQ9InhtcC5kaWQ6NEFFQTY0Qjk5MUE1MTFFQzg4M0NCODFFRjQwQUI0ODgiLz4gPC9yZGY6RGVzY3JpcHRpb24+IDwvcmRmOlJERj4gPC94OnhtcG1ldGE+IDw/eHBhY2tldCBlbmQ9InIiPz4EINMUAAALGUlEQVR42u3bd3gVVRrH8e9NgQiEIErb0GuygBGIdJASCKILAg82EMGChRaWjqssKrAooi7qIwESqqgUEQUEFEEpSlECJKFqgBBApUkSSspv/5iZe29iYkwiycPdmX8yc6blM3PPe87MeQd54oStslW2ylbZKltlq2yVrbJVtspW2SpbZas8VHVl/riHurcLDWnepd+rO9M9RZVQEtfUYpMHqfzrNGkVUtUbcLzuOaqpkpT8w0g/YKXHqN4y51f7QiuPU2kQ+B32ONUS4CuPU60GtrjW/bz1o+jP4q6bSxnpGRnOVZkZGZnOhYyMdKtNuPTth1Frj2a6n+HEhgVLv/m5GFXT4dYka2Fzr3IA3g2nX5IkJYbUC5xhrZwaWG2INb+hRv36ByRJJ4dXAfBtt855/J3dbwEo1/O74lJdCIKB5nzyE65mrO52SVILuNu6OyFQ+VdzYRgEX5OkT25z7vIvc90CX6vkjWJSne0G1ROM+ZQwoO4Ly9e82Rrw/0qSXoZyieYdAPjYmE9vDGMlaaUPtIzevXtRO8Bo+PbcAsEz1y5/uXmZY0Wtej4xPn7/+tGVoN5es/gp4LkrkqS3vKBGkklZZayehLcXTxjze73hG0lHK8BIo0ZFQNlDkjQQQi5IUlrM9aJWOby8vADKvXrFLP0SGGBtMg0YISmtIUQYRaG0bUqtFEnSG9DgqiHoYe6RdheMknStHsws7n5g2ad2m6W9oOJJa5NrTeH2JElDIVSStM+HKRFWI3CPYf6xDN7W7nrPkF6uBtOLS9Vl8oTxT99bFXCMyZCkU7fCM65tZgEfSFoLfkck6T+wdYVxO3TmNtgkabYrlkj7HHgfkNQKah4t1vYqZUUj4FlJWueKBZIU62sILgTCYklqQ6XLCX40SpP0CdROkTQAHjsVb0xHt5SBNZIigQrTThdrK/xbGPCRpHehRLxrm+Tq0FeSHoEnJB0syf1SC9hlxPVnJak9eHs5JwdES9JjAAH9thdn3+JwWeNn9DyUc7vAmXdCB0laDMHXpbdgjvQivGLE9fWS0oPINi2VpIxpZQB4IKn4VOoNZROliRDg9m+kN4ZwSTruj/d+KYyyCdK30EaK8ab6b5LS6kHvqHdc03/NaJMwsRZA/ePFpxoF7JbeBq8Y1zbnKmI2Tl1gjo77c5+ktMb4HdIsa1UrmJDzaa8sawT0LD7VcOAHI1oscm2zHXjVapv6azFESdIYmKM+8Kkk6SHontuJU7qCb2yxqTpA+bNSUnno5dpmDLBDkrTfhwZ6jPKJkvQ1PKxaBF60wn9AQm5n3gZ8UlyqzQ7oJkkPgPdWq/TH2+Eu8yGkKX67guktSboaRPA2Xx411hzzhyG5nfnHEs7OVpGrYmoDqyVpqxfcYT4TXetmBTRJ42BUGaPRkkbgHwHLzVVDgRnZjp5q/n0fvGOKWPWapMyL3430Bx53hY07tktSfEew7oe0GQKoeNZY2AhlqWQ9kPzaELjvy0tSxuX4NydL0tUOI2KuS+mrK0HHoo4W5f8e2rJhZQfAo2b/Nu0RwKftiNHhpYAeKc56Xxd4yGqd6wAPOg91KAigUkjLhlV9+dtFSVvAp05YeF0H3LqnuHq3Pm1d3aT0l26xiktNTnNt/ySwzFp4xu23KenckFLOY1X/SdIq5zFCbxgqR9XFIT27tG4a0rxTr4jog1neNxyZ0OR23xIVW72cJbJtHzg44rzz8X3Q4GG/ZLlG7zzSvkmzzg+9tOE3SdLp6Mc7NQ4JG7L2hj1d5X/0IOOXw0cvZN6M75hu/slW2SpbZatsla2yVbbKVtkqW2WrbJWtunGqDZFz1uRQvDFybl7vZQ9HRkWdK1rVlXOJx+KPnL6S5/7h0CaH4nshJI895xnv74tMda5f81qVy5b08r21ZtjL8YVUzW7UfmeOe0blrFrWtPn6G6LKkvXo++T5wqi+dUCT9D+vOuYP1S7cKFWFzn0e7NuhIkCTpEKoloDrBfWfUH0FlDx8o1RGMs6lZQ1xpUwURHW8EvTJxy/wUjC0TbtRKudod0dgXSHq1f5/v3spP/Xqxykzz+pGqxRX2hoP/YtjYFQRx8CsY43d4S7PU0VA1RQpOTY+LtkqS4mLj73srkqIHj943KKk36syjsbFWgkNmd+/OXTguKjDWVSXVk4YPGnDVeeeP8XFnpAk/RobmyQpccHYwZPWpWb9B6NHPTZmYZJ0/VDckYwCqUZCjVRpHQ4+t8o24TDHdMOhjX55ys9Iehp3ObsquQY8ZZQdDHcYg0e9ki3VXr0XCECIM1001Bohfxt6KnmskZsR7Fa1k540BovKTlK8D7WTC6TqDS1ljOG7qXCpOp4KdrZurc5mV1WDJyVJP1QE/AKc7VcU8P1z1o4lPjUP3dQchtYC6JzczlrvWGyde2c159kG7YAG1wuiSqpgXOzcVaGduH3MF3vWDvEHuqbnokpvDVXnJ5zeP7/ZVOsX6NebMk8sWTk1CAg8nU31AbR8EL/+C1fNuBMoZ/5w4ysDHRbu/v79MAj3pVFmQVTPAWv/UOWghZHksrc2MCsX1Sbw/kaSdD3VWa+oFyNJyfcCk7OploE3gTsk6Xp/YJhRTzu4BtBfh1yCUh6q5OFAmP5QRR0rHOwqBUFXclbNhPrp2WNgwD7zjOWhaTbVCsD3a7NnWgNqp0rSUmC8WyDjzvyoBu49EB+zZmwNoPqRPFTRzj1HABtzVs0w0ySzqEZbC/3B/0RW1Sqgn9v/73NAkrpCTWc38UQ5aJK/3q3DYX7Uc0h/rKroGgbeCbyQs+oLoPeFbCrn90LvOFNtsqg+sNZ/aNaDE/5ul0IKy7cKAK9mc68pD1Vz154pNc20hBxiYFcg0G2YPEsrvNxJzKL6yv2EKyRtwMqRsuJzvlT/iHigb7/x0QecVSF3lXvfoj20z0WV2ATglj7f5qRalYdqi6mah5HmWkDVW9kL/5yqKzTLRaVLo0oDOIZdLbhqOvgdK3LV3dApN5V0fEowwMMFV80C3/i/UPU58Fleqqt1zfymnFVS2tJAM4+3YKqP3a+tdH9hVduAedbCGndVI1czdMDXTFbPTSXtC4AHCqyK84WJroPdVVjV4ZIw3Fp40V3l7XqemGIF6NxVehBaF1iV2RTqXLRKd3kXVpVSGxqYeWanAp3J7eFAf2ubk1WgeUYeqkfNqlcgld7A/K5GUno4hVVpAPCKJOl8R6yTqAvANDPIdQY+zKUfeOaE64r8s+Cqy42Aidck6de+OAqt2gw4nt6VeHB2XUKDrNy5TtCrBPd/fSH11KIg1337vWpKyeEbT148teoOKBVTcJW2+wMNxs+bO7wKze4ptEpDASjtA1WPhMESSVI72DQeHBVr+gN0T85N1RwoXaUMwGwVQqXNgVbHp/6JZ806WghV2lDzaJ2Oqgu8K0lqBt/pxRJmLuS/rmV/Fr5sqtLHVbCSHTfk8EY6B9XHuah0ZnQ1gFovnNcA81OBP6O6vDBqdlxOG+8Y0r5x28HrM6TNkZEHJCnzkznzz0sHJ93TJLTHawm/Hz1I+2je7G2SpAsbZkQ8PnrWzrScRg+OR0bPNZ5nVs+JNNjH51hFks7MjZ7jOnrynnXr96ZK6uEWqjxmpCejMUzyONVub7f+jseo+kHlcx6hSt7pfP0yFfPbvJtftc3RMXLf+Suppz/rBgT94hmqUYBXpVo1AwDqxsozVItrOV8/lHrmZ3mISqlfvNCrdVDjToPmnsjX6MFNMGVm5ntM5OafbJWtslW2ylbZKltlq2yVrbJVtspW2ar/N9X/AHKrCNoDTutcAAAAAElFTkSuQmCC'
bpicon_base64 = b'iVBORw0KGgoAAAANSUhEUgAAALQAAAC0CAQAAACXxM65AAAAAXNSR0IArs4c6QAADTlJREFUeNrtnXtwVNUdxz93d/NCIogib0HFFyCgbVF81AmgouPY+KigWKn1MbZq67uiVqctbdXBTu1ofRU66JShttpMdaytiaAI+GpF0GgFBF8xoCJJINkku3v6B2fPbpbdvffc3b27uXt++8ee5N7sufvJ9/7O77x+1yKdWdQykgnMYiqHMogqjGWzbtrYzDoaaaaFDkQ6pHtbNfOpZzLDCRiGWhajlfU0sJSw3alB6mhFmFdOr1bqCKaCTbaR/Jx72N9IM0cbyDkMZQMd6Q+PZQ1Ro8c8vaKsYWw6RY/hCU5K67ONuTGLMRxLE+2kOI1XjAoL8HqFkcmKDvJzzjdqLoCNJkQjIh7e1dHAvoZKQaydelbsAV3NVoYZIgWzbYwjHMBivsFcUBvGfKwAtdQbFgW2emoDjGSyIVFgm8zIAJMYbkgU2IYzKcBMM3RUcAswM8AUw8EDm2KxjQMNh4LbdouwGdb3wLqtdLMBxgrhpo0Z0Aa0MQPagDagjRnQBrQxA9qANqCNeWAhP32ZKk5nIrvZyYe8ab/O0IB2e3Mu4DYq5E/dbOB5VtJMa0mMm3k8emdxDOMJ08F2NtGd188+i78wIOV3nbzPn1lMWwmg9nCBlCVuE90iblHRLO4TJ4phIpSXT79NZLJ1YrqoLPbiMC8rO0507AWhV7wr/iBmiGDeQLeJnaI3pZYdYpHYv3xAr8iouYhYJobk+OkL5H3yExEU1WK6WCi29KnjmXIBPVpERTbbKObkpOubJOhr1W+qxCniIbFN1dAkDioaaA/j6Kk2Qft4HuNuFTXoW49qcFGRx0tcx0n8W7b4ddxPrf87LIcmlbdyFUPZh4OZyzK2EgWglmu43vUl7ZaY+/59NxuZy0P0ABbf4aaiRbSe3TyL1C3cIY4Xlvp9SEwQ94kedewGl59/gfyEG9McqxELRJcQQoiwmOdvH22JRxXoJWmO14sd8miPmJH0b3D+mi3//ta0R2vEUnn8v2Kkn310iBpZivFCmuMNzOFjACq4i0EuauiU7xVpty50cSdbAZjIuX720UEqZaldAk21RuZLX30yl7iooUuNeKS3j/g+MaCSuxjoX9ABFU+EMwz3CFZyI12AxUIO164hrBSdyVbzHAAHcIfnG3YC3lcVk7pNZ0t4XsYfV2tf2m75XpkRYoQGGQTOTd4C6DfQMaXczNbBbyWKU7VR7LRVNKzgSwBGMMuvoIXDYcI1LAHgECZq1tAua6jMcs4W2RBXcrrHzqMIirY762d0AVWcofn5EdkcVmdBKFgsSzNy6IP6QtGwkxcBOFNbc3HQ2WwtWwAYwtxyBx3lNQDGcrAr0FVZ/0ERGXnAD/zuOiwbpQo+IAxY1LkK8GpsPv8N+S8/wdPNwkVQtGXrEr6gF4BvuQI9wOasD2UajQpP9/2VoKKhU0baR7hyHXaK3i5DPLTjGt8pOibPHaW5vSYOOru1qanag/wJOuoYdIW8rEr2KYCi29klS6M8HJv2EHTcdQRT8zrtZfG4IVAQRXfxtSztbxMK9nNF24MeLDsTway9vHS+3a7DsudK4j66xvZK+rmi7W7YAyVgobm6p9OR60gefvIl6IhDRVuMkGdE1RizjuuotHUInSltgc/CO6egA6pHqAs6DnCIo3gbAh4OLHkIutcx6Ph8eYdCp6No2M/mvLjn78kyMu4L0Nl9dJDxsvQ/hyN+qUq1S0VZpa4o5mfQlk0sMYmhsvSuqy64veuoUef7WtHYRMfzlOd8vUCgh6muS48fQSe+VChrr/C7KghbqVlDtyPQNRwgS1/6E3TEEehjGCVLazVjDqeK3lctNmjxcBV+UVxHKEsMfaYqN2rXEFY9y2w2REUlW7378sUBXZFFbSfI0g5e0q4h7joG2fQ7D3DZ2PrIdYzgG7K0kU0uaog5UPTBculuhPf9CdpJY3iR8q+r+Eq7hvjSnOyKPkW+v5LnzUolCDqT66jlh6rzvdhFQxV1oOigWsawHPwJuttG0RaXKe/5V1e3dVzR2brgs2QU3cPf/Ao6bAP6QL4nS7tY5KqGmFT0vhkHi4JcI0sb1PB/2YE+iQmy9ALvuaohKhUdzDjLchTTZWmNx/tpSwZ0LbfKceQISzRH7VIVnWk6y+I02VB2stK/oLP76F+qwG6VXLqbC+jqDN37ebLur1iLt+bhFqXM4Z3F2Vwp/ep2LkuKuPUsMfWVHvTFHCtL/+Rzj0EXpcOSGt6N41fyZo/we7kI0R3obIoex73Kid0L/lV0NG2lFuNYqZayfMDjOdSQUPTeI96V3KImBJax2XPQRe+CT2W5wtzN7XySQw2xLKCnq+HXr3P6Z/YD0NG9XEeQK1jDNPX7+2jIqYbMih7IE6oz9CZvFgF0qBiKHs23GcThnMe0pInatTyYYw0JH913Dmc/HmWMapLvVus6fA/6ZBqoZEBK/+2BnCOBTIq+krNVebH2vE2/jjr2Y5+9usk/ynlLWgJ0RVJdl7FQgd/EQg9nvovuo9PbdBar+e983agBLuJ36qcoi2gFv4Putb2UGTxlO4OtA9qijgeStiM38sci6bmkFL1H1b/J28LDALN5Kgnzf7jYw3UcJdEYJqyLJt5gKJfLvlyIy+nl2jwM+AT5MXcmzbXs4Ga1XLc45mHOipjMmBEWm0STuFvMEDUyl8cU8bhoU4msrnKZW2mS+FjlZ/qsT76mDnG1CJRPlrAu+bWfE4eIwSnHqsS5okUe/1RMzBF0X9slrshTbr1+kbwqMVAa5kO1RT5x7Gnq2YQARvFkHh+x+AW38ifXI4L9sDFMeOlMa+9e53zeAWACC233Czrzi6s4jweLjrlIoDOvJn2bm+VZF2nvm03dihGlnV9zGqtKIglsSSka4F/8lAgwgEXaT0tLgP6cZdzAVO4ooeTGHjYIH8nG6bWsZ1WJZ+V5izWzhR2lkmQuFVUl9oTwojSGlTZnPSTH187hZJeKjtn2Q33tOsKOQEOTnDodzKVa23kSoIMl9zT5ooCusj1vj5+2mMMhrkAHDGgnoOEtXgaghkc0LtEoWnlOZ64DBEvk2TM5WgN0TH2tQDmDjpuTtFHr+UyW6h1fZMwouq852Uq/RW18ONFxzudY0sb+sgYtNBS9i1WydLSav7YHHTONISTmWCocYVgh34c5zq2UrOiAAQ2WI02/Ks+3mK3to8tc0REtL72LV2VppvZlGh/tOJIGWC3fR/d5PoCzr2WiDseKhjdU6UijaHeVOVP0u2pN9RHaNZV1Y1ihCXo3X8jSYUbR7kA7Syncww5ZGm58tDvPHHIIOr5FbbDmApRQebsOXdC9aq68yjZtYOq9Y3y0BuiISm4Z0sxfXuFhRjsfKDqRji2oCa7CuA53oAOaoCvLW9EVSeGXs55kpxodsYyiC6doNJNLJYZJK4yi9UBHNEFHjaLdKbpXORGhpehKE3XogY5KwFFH2jaKdu064km3nYGOJvnoUPmCtpK+vFMMA5Wv7tVyHQEPk8qXHOjkEbWgw4sbqKKPbi3XgeZjGHwGGk1FV6uFu50OfXTMgO4L1ynoeJ7SnZpxtFG0FugBjJYlZ7vEo0muo8aAdu6jj2CELG3UVvSA8gVdpa3oC9Tlve8QdNSA7pvnyAnoQer53RHe0h4bqTGKdgp6jlpzt8FhY5i8sd8o2qGP3pc5qvys4zq609ZWZqArtar9plra2MvfXYCuMqCdjYrcolZFN9NiFI0WPOd2edID0tdqpOyOGNA6ip7GQuXFYzRoDP9HjetwDrqGXySl2n5BLUjXU3RliYEOlRroaho4Tf3UzqVa84aiZEEXKY7O3Em5PwlzD/e4zupVYRSdLXZentQIwmoecJ0Cwig6gwWZxQpmJ+Woa+I82vuDgkrterJVNZ7bOavPNrdnuDanxx1YBnQyjBpqGcMcrujzkJoOnuSaEkpp0s9AJ3upMxEEGcJoxnFkykjbZm7n6ZLLt9GPQCffzMdxXIYOx5NckkOqqYgBbW9hVvMwz+YEK9F47jag09s7LGAVbTl+SiIpZkv5gg5n6Mvt4D0eYXlebvuN6lPXlBhoy7ukcAexlFP6eOpPeJFVNNOcs5ITHfj1HAZs5IwiPJmiREADHM+pHEYtITbzD14uQOM1iQVs41FPHwpZgqDL1wIGgQFtQBszoA1oA9qYAW1AGzOgDWgD2pg3oLsNBA+sO5C3EUpj2awtUGLDtn61zQHWGQoe2LoAjUV7lmL5WIzGAM1Fezpo+VgrzQFaWG9IFNjW0xKgI8ennhuztwY6LKCarXl8fKCxVNvGOMIBIMyFOayPNZbd2rmQcHxn5ScMZVrJrXT1R7zxMI8h4qAFGziRMYZL3u1VrtvjLeK7zDpo5FjGGjJ5tZe5mE/3FBObsttoYhqjjQPJm9NYwzw+jv8Y7OO2nyfEhJLbC9k/m8CHuV497iuNBamjFWFeOb1aqUtN4JDOUVQzn3omM9xMC2i7i1bW08DSvdcop/fIFrWMZBIzmcKhDDLOxMa6aWMzb9PEO7TQkW7h6P8BBU+G2vkWH04AAAAASUVORK5CYII#'
gui.theme('Black')
choose_powerpoint_column = [
[gui.Push(),gui.Image(bp_base64),gui.Push(),],
[gui.Push(),gui.Text("Choose your replacements below for each number"),gui.Push()],
[gui.Push(),gui.Text('1'), gui.InputText(key='-no1-', size=(4, 1)), gui.Text('2'), gui.InputText(key='-no2-', size=(4, 1)), gui.Text('3'), gui.InputText(key='-no3-', size=(4, 1)), gui.Text('4'), gui.InputText(key='-no4-', size=(4, 1)), gui.Text('5'), gui.InputText(key='-no5-', size=(4, 1)), gui.Text('6'), gui.InputText(key='-no6-', size=(4, 1)), gui.Text('7'), gui.InputText(key='-no7-', size=(4, 1)), gui.Text('8'), gui.InputText(key='-no8-', size=(4, 1)), gui.Text('9'), gui.InputText(key='-no9-', size=(4, 1)),gui.Push()],
[gui.Push(), gui.Text("Choose the PowerPoint file you wish to convert into ABC Sudoku Puzzles below."), gui.Push()],
[gui.Text("Please note you will need to choose a PowerPoint with Sudoku Puzzles already created by the Puzzle Generator inside it!", font=('Arial', 10, 'bold'))],
[
gui.Push(),
gui.Text("Choose your PowerPoint File"),
gui.Push(),
],
[
gui.Push(),
gui.In(size=(25, 1), enable_events=True, key="-IMPORTFILE-"),
gui.FileBrowse(file_types=(("PowerPoint files", "*.pptx"),)),
gui.Push(),
],
[
gui.Push(),
gui.Text("Choose where you want your ABC Sudoku Puzzles to be save to"),
gui.Push(),
],
[
gui.Push(),
gui.In(size=(25, 1), enable_events=True, key="-EXPORTFILE-"),
gui.FileSaveAs(file_types=(("PowerPoint files", "*.pptx"),)),
gui.Push(),
],
[
gui.Push(),
gui.Button('Cancel'),
gui.Button('Ok'),
gui.Push(),
],
]
layout = [
[
gui.Column(choose_powerpoint_column),
]
]
window = gui.Window("Convert Sudoku Puzzles Into ABC Sudoku Puzzles", layout, background_color='#000000', icon=(bpicon_base64))
while True:
event, values = window.read()
if event == "Exit" or event == 'Cancel' or event == gui.WIN_CLOSED:
os._exit(0)
break
elif event == 'Ok':
importfile = values["-IMPORTFILE-"]
exportfile = values["-EXPORTFILE-"]
replace1 = values["-no1-"]
replace2 = values["-no2-"]
replace3 = values["-no3-"]
replace4 = values["-no4-"]
replace5 = values["-no5-"]
replace6 = values["-no6-"]
replace7 = values["-no7-"]
replace8 = values["-no8-"]
replace9 = values["-no9-"]
importfile2 = '\\'.join(importfile.split('/'))
CheckforSudoku(importfile2)
IF = values['-IMPORTFILE-']
EF = values['-EXPORTFILE-']
confirm = 'true'
if IF == '':
gui.Popup('You need to select a PowerPoint file to convert from!')
confirm = 'false'
if x == 0:
gui.Popup('You need to select a PowerPoint file with valid Sudoku Puzzles in!')
confirm = 'false'
if EF == '':
gui.Popup('You need to choose where you want your ABC Sudoku Puzzles to be saved to!')
confirm = 'false'
if confirm == 'true':
break
window.close()
search1 = '1'
search2 = '2'
search3 = '3'
search4 = '4'
search5 = '5'
search6 = '6'
search7 = '7'
search8 = '8'
search9 = '9'
if __name__ == '__main__':
prs = Presentation(importfile2)
slides = [slide for slide in prs.slides]
for slide in slides:
for shape in slide.shapes:
if shape.has_table:
if 'Sudoku' in shape.name:
tbl = shape.table
row_count = len(tbl.rows)
col_count = len(tbl.columns)
for i in range(0, row_count):
for j in range(0, col_count):
cell = tbl.cell(i,j)
paragraphs = cell.text_frame.paragraphs
for paragraph in paragraphs:
for run in paragraph.runs:
if(run.text.find(search1))!=-1:
if replace1 != '':
run.text = run.text.replace(search1, replace1)
elif(run.text.find(search2))!=-1:
if replace2 != '':
run.text = run.text.replace(search2, replace2)
elif(run.text.find(search3))!=-1:
if replace3 != '':
run.text = run.text.replace(search3, replace3)
elif(run.text.find(search4))!=-1:
if replace4 != '':
run.text = run.text.replace(search4, replace4)
elif(run.text.find(search5))!=-1:
if replace5 != '':
run.text = run.text.replace(search5, replace5)
elif(run.text.find(search6))!=-1:
if replace6 != '':
run.text = run.text.replace(search6, replace6)
elif(run.text.find(search7))!=-1:
if replace7 != '':
run.text = run.text.replace(search7, replace7)
elif(run.text.find(search8))!=-1:
if replace8 != '':
run.text = run.text.replace(search8, replace8)
elif(run.text.find(search9))!=-1:
if replace9 != '':
run.text = run.text.replace(search9, replace9)
prs.save(exportfile)
completed_column = [
[gui.Push(),gui.Image(bp_base64),gui.Push(),],
[gui.Push(),gui.Text("Conversion completed successfully!"),gui.Push()],
[
gui.Push(),
gui.Button('Ok'),
gui.Push(),
],
]
layout = [
[
gui.Column(completed_column),
]
]
window = gui.Window("Completed!", layout, background_color='#000000', icon=(bpicon_base64))
while True:
event, values = window.read()
if event == "Exit" or event == 'Cancel' or event == gui.WIN_CLOSED:
os._exit(0)
break
elif event == 'Ok':
break
window.close()
os._exit(0)
| 74.90393
| 5,016
| 0.751181
| 1,000
| 17,153
| 12.847
| 0.401
| 0.014712
| 0.017125
| 0.009808
| 0.108664
| 0.081264
| 0.075893
| 0.073013
| 0.067876
| 0.06196
| 0
| 0.116724
| 0.174897
| 17,153
| 228
| 5,017
| 75.232456
| 0.790998
| 0
| 0
| 0.302326
| 0
| 0.017442
| 0.623338
| 0.567681
| 0
| 1
| 0
| 0
| 0
| 1
| 0.005814
| false
| 0
| 0.075581
| 0
| 0.081395
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
188919022bc958a69e0d7ea7ab302efc1c92b682
| 479
|
py
|
Python
|
spinup/__init__.py
|
jonberliner/spinningup
|
020977480b53d7c3ba27d33b00cf888d467ec661
|
[
"MIT"
] | null | null | null |
spinup/__init__.py
|
jonberliner/spinningup
|
020977480b53d7c3ba27d33b00cf888d467ec661
|
[
"MIT"
] | null | null | null |
spinup/__init__.py
|
jonberliner/spinningup
|
020977480b53d7c3ba27d33b00cf888d467ec661
|
[
"MIT"
] | null | null | null |
from spinup.algos.pytorch.ddpg.ddpg import ddpg as ddpg_pytorch
from spinup.algos.pytorch.ppo.ppo import ppo as ppo_pytorch
from spinup.algos.pytorch.sac.sac import sac as sac_pytorch
from spinup.algos.pytorch.td3.td3 import td3 as td3_pytorch
from spinup.algos.pytorch.trpo.trpo import trpo as trpo_pytorch
from spinup.algos.pytorch.vpg.vpg import vpg as vpg_pytorch
# Loggers
from spinup.utils.logx import Logger, EpochLogger
# Version
from spinup.version import __version__
| 36.846154
| 63
| 0.830898
| 80
| 479
| 4.85
| 0.225
| 0.206186
| 0.231959
| 0.340206
| 0.373711
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009346
| 0.106472
| 479
| 12
| 64
| 39.916667
| 0.897196
| 0.031315
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
189f83ced8367f963e91e9d91af9bc718d559168
| 82
|
py
|
Python
|
koila/interfaces/__init__.py
|
techthiyanes/koila
|
b665482ff99a02bfeeceaa1323589fb89495a30c
|
[
"MIT"
] | null | null | null |
koila/interfaces/__init__.py
|
techthiyanes/koila
|
b665482ff99a02bfeeceaa1323589fb89495a30c
|
[
"MIT"
] | null | null | null |
koila/interfaces/__init__.py
|
techthiyanes/koila
|
b665482ff99a02bfeeceaa1323589fb89495a30c
|
[
"MIT"
] | null | null | null |
from .runnable import Runnable, RunnableTensor
from .tensorlike import TensorLike
| 27.333333
| 46
| 0.853659
| 9
| 82
| 7.777778
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109756
| 82
| 2
| 47
| 41
| 0.958904
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
18c63e2dcf9442d37f3ebf71b09847182f11266d
| 175
|
py
|
Python
|
test_gpu.py
|
st3107/tfhelper
|
1dc66a29be387bfdf57994c390326ade456104d4
|
[
"BSD-3-Clause"
] | null | null | null |
test_gpu.py
|
st3107/tfhelper
|
1dc66a29be387bfdf57994c390326ade456104d4
|
[
"BSD-3-Clause"
] | null | null | null |
test_gpu.py
|
st3107/tfhelper
|
1dc66a29be387bfdf57994c390326ade456104d4
|
[
"BSD-3-Clause"
] | 1
|
2021-09-08T01:16:36.000Z
|
2021-09-08T01:16:36.000Z
|
import sys
print("Hi, I am '{}'.".format(sys.executable))
import tensorflow as tf
print("I found {} GPU(s) Available.".format(len(tf.config.list_physical_devices('GPU'))))
| 21.875
| 89
| 0.702857
| 27
| 175
| 4.481481
| 0.740741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102857
| 175
| 7
| 90
| 25
| 0.770701
| 0
| 0
| 0
| 0
| 0
| 0.257143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
18c6e0242a011883440e7a27ce06af76db363323
| 8,201
|
py
|
Python
|
keras/legacy_tf_layers/migration_utils_test.py
|
tsheaff/keras
|
ee227dda766d769b7499a5549e8ed77b5e88105b
|
[
"Apache-2.0"
] | 37,222
|
2017-12-13T00:52:55.000Z
|
2022-03-31T22:34:35.000Z
|
keras/legacy_tf_layers/migration_utils_test.py
|
amirsadafi/keras
|
f1e9c76675981ee6683f54a3ce569212d551d12d
|
[
"Apache-2.0"
] | 7,624
|
2017-12-13T01:03:40.000Z
|
2022-03-31T23:57:24.000Z
|
keras/legacy_tf_layers/migration_utils_test.py
|
amirsadafi/keras
|
f1e9c76675981ee6683f54a3ce569212d551d12d
|
[
"Apache-2.0"
] | 14,914
|
2017-12-13T02:30:46.000Z
|
2022-03-30T14:49:16.000Z
|
"""Tests for migration_utils."""
from keras.initializers import GlorotUniform as V2GlorotUniform
from keras.legacy_tf_layers import migration_utils
import tensorflow as tf
class DeterministicRandomTestToolTest(tf.test.TestCase):
def test_constant_mode_no_seed(self):
"""Test random tensor generation consistancy in constant mode.
Verify that the random tensor generated without using the seed is
consistant between graph and eager mode
"""
# Generate three random tensors to show how the stateful random number
# generation and glorot_uniform_initializer match between sessions and
# eager execution.
random_tool = migration_utils.DeterministicRandomTestTool()
with random_tool.scope():
graph = tf.Graph()
with graph.as_default(), tf.compat.v1.Session(graph=graph) as sess:
a = tf.compat.v1.random.uniform(shape=(3, 1))
# adding additional computation/ops to the graph and ensuring consistant
# random number generation
a = a * 3
b = tf.compat.v1.random.uniform(shape=(3, 3))
b = b * 3
c = tf.compat.v1.random.uniform(shape=(3, 3))
c = c * 3
d = tf.compat.v1.glorot_uniform_initializer()(
shape=(6, 6), dtype=tf.float32)
graph_a, graph_b, graph_c, graph_d = sess.run([a, b, c, d])
a = tf.compat.v2.random.uniform(shape=(3, 1))
a = a * 3
b = tf.compat.v2.random.uniform(shape=(3, 3))
b = b * 3
c = tf.compat.v2.random.uniform(shape=(3, 3))
c = c * 3
d = V2GlorotUniform()(shape=(6, 6), dtype=tf.float32)
# validate that the generated random tensors match
self.assertAllClose(graph_a, a)
self.assertAllClose(graph_b, b)
self.assertAllClose(graph_c, c)
self.assertAllClose(graph_d, d)
# In constant mode, because b and c were generated with the same seed within
# the same scope and have the same shape, they will have exactly the same
# values.
# validate that b and c are the same, also graph_b and graph_c
self.assertAllClose(b, c)
self.assertAllClose(graph_b, graph_c)
def test_constant_mode_seed_argument(self):
"""Test random tensor generation consistancy in constant mode.
Verify that the random tensor generated by setting the global seeed
in the args is consistant between graph and eager mode.
"""
random_tool = migration_utils.DeterministicRandomTestTool()
with random_tool.scope():
graph = tf.Graph()
with graph.as_default(), tf.compat.v1.Session(graph=graph) as sess:
# adding additional computation/ops to the graph and ensuring consistant
# random number generation
a = tf.compat.v1.random.uniform(shape=(3, 1), seed=1234)
a = a * 3
b = tf.compat.v1.random.uniform(shape=(3, 3), seed=1234)
b = b * 3
c = tf.compat.v1.glorot_uniform_initializer(seed=1234)(
shape=(6, 6), dtype=tf.float32)
graph_a, graph_b, graph_c = sess.run([a, b, c])
a = tf.compat.v2.random.uniform(shape=(3, 1), seed=1234)
a = a * 3
b = tf.compat.v2.random.uniform(shape=(3, 3), seed=1234)
b = b * 3
c = V2GlorotUniform(seed=1234)(shape=(6, 6), dtype=tf.float32)
# validate that the generated random tensors match
self.assertAllClose(graph_a, a)
self.assertAllClose(graph_b, b)
self.assertAllClose(graph_c, c)
def test_num_rand_ops(self):
"""Test random tensor generation consistancy in num_random_ops mode.
Verify that the random tensor generated without using the seed is
consistant between graph and eager mode.
Random tensor generated should be different based on random ops ordering
"""
random_tool = migration_utils.DeterministicRandomTestTool(
mode="num_random_ops")
with random_tool.scope():
graph = tf.Graph()
with graph.as_default(), tf.compat.v1.Session(graph=graph) as sess:
# adding additional computation/ops to the graph and ensuring consistant
# random number generation
a = tf.compat.v1.random.uniform(shape=(3, 1))
a = a * 3
b = tf.compat.v1.random.uniform(shape=(3, 3))
b = b * 3
c = tf.compat.v1.random.uniform(shape=(3, 3))
c = c * 3
d = tf.compat.v1.glorot_uniform_initializer()(
shape=(6, 6), dtype=tf.float32)
graph_a, graph_b, graph_c, graph_d = sess.run([a, b, c, d])
random_tool = migration_utils.DeterministicRandomTestTool(
mode="num_random_ops")
with random_tool.scope():
a = tf.compat.v2.random.uniform(shape=(3, 1))
a = a * 3
b = tf.compat.v2.random.uniform(shape=(3, 3))
b = b * 3
c = tf.compat.v2.random.uniform(shape=(3, 3))
c = c * 3
d = V2GlorotUniform()(shape=(6, 6), dtype=tf.float32)
# validate that the generated random tensors match
self.assertAllClose(graph_a, a)
self.assertAllClose(graph_b, b)
self.assertAllClose(graph_c, c)
self.assertAllClose(graph_d, d)
# validate that the tensors differ based on ops ordering
self.assertNotAllClose(b, c)
self.assertNotAllClose(graph_b, graph_c)
def test_num_rand_ops_program_order(self):
"""Test random tensor generation consistancy in num_random_ops mode.
validate that in this mode random number generation is sensitive to program
order, so the generated random tesnors should not match.
"""
random_tool = migration_utils.DeterministicRandomTestTool(
mode="num_random_ops")
with random_tool.scope():
a = tf.random.uniform(shape=(3, 1))
# adding additional computation/ops to the graph and ensuring consistant
# random number generation
a = a * 3
b = tf.random.uniform(shape=(3, 3))
b = b * 3
random_tool = migration_utils.DeterministicRandomTestTool(
mode="num_random_ops")
with random_tool.scope():
b_prime = tf.random.uniform(shape=(3, 3))
# adding additional computation/ops to the graph and ensuring consistant
# random number generation
b_prime = b_prime * 3
a_prime = tf.random.uniform(shape=(3, 1))
a_prime = a_prime * 3
# validate that the tensors are different
self.assertNotAllClose(a, a_prime)
self.assertNotAllClose(b, b_prime)
def test_num_rand_ops_operation_seed(self):
"""Test random tensor generation consistancy in num_random_ops mode.
validate if random number generation match across two different program
orders.
"""
random_tool = migration_utils.DeterministicRandomTestTool(
mode="num_random_ops")
with random_tool.scope():
# operation seed = 0
a = tf.random.uniform(shape=(3, 1))
a = a * 3
# operation seed = 1
b = tf.random.uniform(shape=(3, 3))
b = b * 3
random_tool = migration_utils.DeterministicRandomTestTool(
mode="num_random_ops")
with random_tool.scope():
random_tool.operation_seed = 1
b_prime = tf.random.uniform(shape=(3, 3))
b_prime = b_prime * 3
random_tool.operation_seed = 0
a_prime = tf.random.uniform(shape=(3, 1))
a_prime = a_prime * 3
self.assertAllClose(a, a_prime)
self.assertAllClose(b, b_prime)
def test_num_rand_ops_disallow_repeated_ops_seed(self):
"""Test random tensor generation consistancy in num_random_ops mode.
validate if DeterministicRandomTestTool disallows reusing already-used
operation seeds.
"""
random_tool = migration_utils.DeterministicRandomTestTool(
mode="num_random_ops")
with random_tool.scope():
random_tool.operation_seed = 1
b_prime = tf.random.uniform(shape=(3, 3))
b_prime = b_prime * 3
random_tool.operation_seed = 0
a_prime = tf.random.uniform(shape=(3, 1))
a_prime = a_prime * 3
error_string = "An exception should have been raised before this"
error_raised = "An exception should have been raised before this"
try:
c = tf.random.uniform(shape=(3, 1))
raise RuntimeError(error_string)
except ValueError as err:
err_raised = err
self.assertNotEqual(err_raised, error_string)
if __name__ == "__main__":
tf.test.main()
| 37.967593
| 80
| 0.671138
| 1,153
| 8,201
| 4.631396
| 0.129228
| 0.06573
| 0.091011
| 0.096067
| 0.749813
| 0.745506
| 0.724719
| 0.722285
| 0.676779
| 0.672659
| 0
| 0.02532
| 0.229484
| 8,201
| 215
| 81
| 38.144186
| 0.81975
| 0.275698
| 0
| 0.746377
| 0
| 0
| 0.03484
| 0
| 0
| 0
| 0
| 0
| 0.144928
| 1
| 0.043478
| false
| 0
| 0.021739
| 0
| 0.072464
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
18d3ff6d51f49903b02ad3465cc97f06dcee4c77
| 92
|
py
|
Python
|
data/typing/pandas.core.groupby.grouper.py
|
vfdev-5/python-record-api
|
006faf0bba9cd4cb55fbacc13d2bbda365f5bf0b
|
[
"MIT"
] | 67
|
2020-08-17T11:53:26.000Z
|
2021-11-08T20:16:06.000Z
|
data/typing/pandas.core.groupby.grouper.py
|
vfdev-5/python-record-api
|
006faf0bba9cd4cb55fbacc13d2bbda365f5bf0b
|
[
"MIT"
] | 36
|
2020-08-17T11:09:51.000Z
|
2021-12-15T18:09:47.000Z
|
data/typing/pandas.core.groupby.grouper.py
|
pydata-apis/python-api-record
|
684cffbbb6dc6e81f9de4e02619c8b0ebc557b2b
|
[
"MIT"
] | 7
|
2020-08-19T05:06:47.000Z
|
2020-11-04T05:10:38.000Z
|
from typing import *
class Grouper:
# usage.dask: 1
__module__: ClassVar[object]
| 11.5
| 32
| 0.673913
| 11
| 92
| 5.272727
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014286
| 0.23913
| 92
| 7
| 33
| 13.142857
| 0.814286
| 0.141304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
18e968d4ba704763ae2193b9b5e8bb0a3df33be1
| 23,271
|
py
|
Python
|
code/python/ProcuretoPayProvisioning/v1/fds/sdk/ProcuretoPayProvisioning/api/user_management_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 6
|
2022-02-07T16:34:18.000Z
|
2022-03-30T08:04:57.000Z
|
code/python/ProcuretoPayProvisioning/v1/fds/sdk/ProcuretoPayProvisioning/api/user_management_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 2
|
2022-02-07T05:25:57.000Z
|
2022-03-07T14:18:04.000Z
|
code/python/ProcuretoPayProvisioning/v1/fds/sdk/ProcuretoPayProvisioning/api/user_management_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | null | null | null |
"""
FactSet Procure to Pay API
Allows for Provisioning and Entitlement of FactSet accounts. Authentication is provided via FactSet's [API Key System](https://developer.factset.com/authentication) Please note that the on-page \"Try it out\" features do not function. You must authorize against our API and make requests directly againt the endpoints. # noqa: E501
The version of the OpenAPI document: 1S
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from fds.sdk.ProcuretoPayProvisioning.api_client import ApiClient, Endpoint as _Endpoint
from fds.sdk.ProcuretoPayProvisioning.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from fds.sdk.ProcuretoPayProvisioning.model.cancel_individual import CancelIndividual
from fds.sdk.ProcuretoPayProvisioning.model.create_individual import CreateIndividual
from fds.sdk.ProcuretoPayProvisioning.model.get_individual import GetIndividual
from fds.sdk.ProcuretoPayProvisioning.model.inline_response202 import InlineResponse202
from fds.sdk.ProcuretoPayProvisioning.model.list_individuals import ListIndividuals
from fds.sdk.ProcuretoPayProvisioning.model.modify_individual import ModifyIndividual
class UserManagementApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.cancel_individual_post_endpoint = _Endpoint(
settings={
'response_type': (InlineResponse202,),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/cancelIndividual',
'operation_id': 'cancel_individual_post',
'http_method': 'POST',
'servers': [
{
'url': "https://api.factset.com/procuretopay/provisioning/",
'description': "No description provided",
},
]
},
params_map={
'all': [
'cancel_individual',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'cancel_individual':
(CancelIndividual,),
},
'attribute_map': {
},
'location_map': {
'cancel_individual': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json; charset=utf-8',
'text/plain'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.create_individual_post_endpoint = _Endpoint(
settings={
'response_type': (InlineResponse202,),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/createIndividual',
'operation_id': 'create_individual_post',
'http_method': 'POST',
'servers': [
{
'url': "https://api.factset.com/procuretopay/provisioning/",
'description': "No description provided",
},
]
},
params_map={
'all': [
'create_individual',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'create_individual':
(CreateIndividual,),
},
'attribute_map': {
},
'location_map': {
'create_individual': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json; charset=utf-8',
'text/plain'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.get_individual_get_endpoint = _Endpoint(
settings={
'response_type': (GetIndividual,),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/getIndividual',
'operation_id': 'get_individual_get',
'http_method': 'GET',
'servers': [
{
'url': "https://api.factset.com/procuretopay/provisioning/",
'description': "No description provided",
},
]
},
params_map={
'all': [
'uniqueid',
],
'required': [
'uniqueid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'uniqueid':
(str,),
},
'attribute_map': {
'uniqueid': 'uniqueid',
},
'location_map': {
'uniqueid': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json; charset=utf-8',
'text/plain'
],
'content_type': [],
},
api_client=api_client
)
self.list_individuals_get_endpoint = _Endpoint(
settings={
'response_type': (ListIndividuals,),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/listIndividuals',
'operation_id': 'list_individuals_get',
'http_method': 'GET',
'servers': [
{
'url': "https://api.factset.com/procuretopay/provisioning/",
'description': "No description provided",
},
]
},
params_map={
'all': [
'include_product_ids',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'include_product_ids':
(bool,),
},
'attribute_map': {
'include_product_ids': 'includeProductIds',
},
'location_map': {
'include_product_ids': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json; charset=utf-8',
'text/plain'
],
'content_type': [],
},
api_client=api_client
)
self.modify_individual_post_endpoint = _Endpoint(
settings={
'response_type': (InlineResponse202,),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/modifyIndividual',
'operation_id': 'modify_individual_post',
'http_method': 'POST',
'servers': [
{
'url': "https://api.factset.com/procuretopay/provisioning/",
'description': "No description provided",
},
]
},
params_map={
'all': [
'modify_individual',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'modify_individual':
(ModifyIndividual,),
},
'attribute_map': {
},
'location_map': {
'modify_individual': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json; charset=utf-8',
'text/plain'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
def cancel_individual_post(
self,
**kwargs
):
"""Cancels an individual's serial and all productIds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_individual_post(async_req=True)
>>> result = thread.get()
Keyword Args:
cancel_individual (CancelIndividual): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
InlineResponse202
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.cancel_individual_post_endpoint.call_with_http_info(**kwargs)
def create_individual_post(
self,
**kwargs
):
"""Provisions an individual for FactSet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_individual_post(async_req=True)
>>> result = thread.get()
Keyword Args:
create_individual (CreateIndividual): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
InlineResponse202
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.create_individual_post_endpoint.call_with_http_info(**kwargs)
def get_individual_get(
self,
uniqueid,
**kwargs
):
"""Returns an individual's details by uniqueId # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_individual_get(uniqueid, async_req=True)
>>> result = thread.get()
Args:
uniqueid (str): uniqueId to query
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
GetIndividual
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['uniqueid'] = \
uniqueid
return self.get_individual_get_endpoint.call_with_http_info(**kwargs)
def list_individuals_get(
self,
**kwargs
):
"""Lists all individuals with details at all locations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_individuals_get(async_req=True)
>>> result = thread.get()
Keyword Args:
include_product_ids (bool): <br>Optional, if =TRUE will return additional product array per object with all productIds for all returned individuals.</br>. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ListIndividuals
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.list_individuals_get_endpoint.call_with_http_info(**kwargs)
def modify_individual_post(
self,
**kwargs
):
"""Modifies an individual's attributes as determined by the uniqueId in the body of the request. Please note that the uniqueId may not be changed. Fields not changing may be passed as NULL but never empty. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_individual_post(async_req=True)
>>> result = thread.get()
Keyword Args:
modify_individual (ModifyIndividual): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
InlineResponse202
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.modify_individual_post_endpoint.call_with_http_info(**kwargs)
| 36.474922
| 347
| 0.505307
| 2,066
| 23,271
| 5.46273
| 0.120039
| 0.027911
| 0.023037
| 0.023923
| 0.7846
| 0.742513
| 0.733209
| 0.733209
| 0.719564
| 0.711235
| 0
| 0.004891
| 0.411284
| 23,271
| 637
| 348
| 36.532182
| 0.818905
| 0.345795
| 0
| 0.575406
| 0
| 0
| 0.244637
| 0.028051
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013921
| false
| 0
| 0.023202
| 0
| 0.051044
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7a0b795222ec5cabbeb0d8fe279c339c5cac5d73
| 3,058
|
py
|
Python
|
tests/predictors/test_state_network.py
|
fredshentu/public_model_based_controller
|
9301699bc56aa49ba5c699f7d5be299046a8aa0c
|
[
"MIT"
] | null | null | null |
tests/predictors/test_state_network.py
|
fredshentu/public_model_based_controller
|
9301699bc56aa49ba5c699f7d5be299046a8aa0c
|
[
"MIT"
] | null | null | null |
tests/predictors/test_state_network.py
|
fredshentu/public_model_based_controller
|
9301699bc56aa49ba5c699f7d5be299046a8aa0c
|
[
"MIT"
] | null | null | null |
import numpy as np
import tensorflow as tf
from railrl.misc.tf_test_case import TFTestCase
from railrl.predictors.mlp_state_network import MlpStateNetwork
class TestStateNetwork(TFTestCase):
def test_set_and_get_params(self):
obs_dim = 7
output_dim = 3
net1 = MlpStateNetwork(name_or_scope="qf_a",
observation_dim=obs_dim,
output_dim=output_dim)
net2 = MlpStateNetwork(name_or_scope="qf_b",
observation_dim=obs_dim,
output_dim=output_dim)
o = np.random.rand(1, obs_dim)
feed_1 = {
net1.observation_input: o,
}
feed_2 = {
net2.observation_input: o,
}
self.sess.run(tf.global_variables_initializer())
out1 = self.sess.run(net1.output, feed_1)
out2 = self.sess.run(net2.output, feed_2)
self.assertFalse((out1 == out2).all())
net2.set_param_values(net1.get_param_values())
out1 = self.sess.run(net1.output, feed_1)
out2 = self.sess.run(net2.output, feed_2)
self.assertTrue((out1 == out2).all())
def test_copy(self):
obs_dim = 7
output_dim = 3
net1 = MlpStateNetwork(name_or_scope="qf_a",
observation_dim=obs_dim,
output_dim=output_dim)
self.sess.run(tf.global_variables_initializer())
net2 = net1.get_copy(name_or_scope="qf_b")
o = np.random.rand(1, obs_dim)
feed_1 = {
net1.observation_input: o,
}
feed_2 = {
net2.observation_input: o,
}
self.sess.run(tf.global_variables_initializer())
out1 = self.sess.run(net1.output, feed_1)
out2 = self.sess.run(net2.output, feed_2)
self.assertFalse((out1 == out2).all())
net2.set_param_values(net1.get_param_values())
out1 = self.sess.run(net1.output, feed_1)
out2 = self.sess.run(net2.output, feed_2)
self.assertTrue((out1 == out2).all())
def test_get_weight_tied_copy(self):
obs_dim = 7
output_dim = 3
net1 = MlpStateNetwork(name_or_scope="qf_a",
observation_dim=obs_dim,
output_dim=output_dim)
self.sess.run(tf.global_variables_initializer())
net2_observation_input = tf.placeholder(tf.float32, [None, obs_dim])
net2 = net1.get_weight_tied_copy(
observation_input=net2_observation_input
)
params1 = net1.get_params_internal()
params2 = net2.get_params_internal()
self.assertEqual(params1, params2)
o = np.random.rand(1, obs_dim)
feed_1 = {
net1.observation_input: o,
}
feed_2 = {
net2.observation_input: o,
}
out1 = self.sess.run(net1.output, feed_1)
out2 = self.sess.run(net2.output, feed_2)
self.assertTrue((out1 == out2).all())
| 31.525773
| 76
| 0.578483
| 373
| 3,058
| 4.461126
| 0.182306
| 0.067308
| 0.092548
| 0.039063
| 0.75601
| 0.730168
| 0.730168
| 0.730168
| 0.707332
| 0.707332
| 0
| 0.039942
| 0.320471
| 3,058
| 96
| 77
| 31.854167
| 0.760828
| 0
| 0
| 0.697368
| 0
| 0
| 0.006545
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 1
| 0.039474
| false
| 0
| 0.052632
| 0
| 0.105263
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e1ce423afc0399644fdb124c096165d86d3d7e8e
| 169
|
py
|
Python
|
hackerearth/ML/will_bill_solve_it/count_rows.py
|
akshaynagpal/competitive-programming
|
0a54f43e3e0f2135c9c952400c5a628244b667d1
|
[
"MIT"
] | null | null | null |
hackerearth/ML/will_bill_solve_it/count_rows.py
|
akshaynagpal/competitive-programming
|
0a54f43e3e0f2135c9c952400c5a628244b667d1
|
[
"MIT"
] | null | null | null |
hackerearth/ML/will_bill_solve_it/count_rows.py
|
akshaynagpal/competitive-programming
|
0a54f43e3e0f2135c9c952400c5a628244b667d1
|
[
"MIT"
] | null | null | null |
import csv
count = 0
with open('submissions.csv', 'rb') as count_file:
csv_reader = csv.reader(count_file)
for row in csv_reader:
count += 1
print count
| 21.125
| 49
| 0.674556
| 27
| 169
| 4.074074
| 0.592593
| 0.245455
| 0.254545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015267
| 0.224852
| 169
| 8
| 50
| 21.125
| 0.824427
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.142857
| null | null | 0.142857
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
beca8624688945bd5fe99a61802b43d4f974bdad
| 44
|
py
|
Python
|
Sergeant-RANK/DAY-2/84A.py
|
rohansaini886/Peer-Programming-Hub-CP-Winter_Camp
|
d27fb6aa7e726e6d2cb95270c9e644d38d64dd1c
|
[
"MIT"
] | 2
|
2021-12-09T18:07:46.000Z
|
2022-01-26T16:51:18.000Z
|
Sergeant-RANK/DAY-2/84A.py
|
rohansaini886/Peer-Programming-Hub-CP-Winter_Camp
|
d27fb6aa7e726e6d2cb95270c9e644d38d64dd1c
|
[
"MIT"
] | null | null | null |
Sergeant-RANK/DAY-2/84A.py
|
rohansaini886/Peer-Programming-Hub-CP-Winter_Camp
|
d27fb6aa7e726e6d2cb95270c9e644d38d64dd1c
|
[
"MIT"
] | null | null | null |
n = int(input())
print((2 ) * n - (n // 2))
| 14.666667
| 26
| 0.409091
| 8
| 44
| 2.25
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 0.25
| 44
| 2
| 27
| 22
| 0.484848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
bed1cd7695ec228dc9e7397bb404bf1611dac433
| 135
|
py
|
Python
|
python/odd_dolfinx/__init__.py
|
IgorBaratta/odd_dolfinx
|
e3ab8fb0c1100a2723e895451f903cfd70a8919b
|
[
"MIT"
] | null | null | null |
python/odd_dolfinx/__init__.py
|
IgorBaratta/odd_dolfinx
|
e3ab8fb0c1100a2723e895451f903cfd70a8919b
|
[
"MIT"
] | null | null | null |
python/odd_dolfinx/__init__.py
|
IgorBaratta/odd_dolfinx
|
e3ab8fb0c1100a2723e895451f903cfd70a8919b
|
[
"MIT"
] | null | null | null |
from dolfinx.cpp import la
import dolfinx.cpp
from odd_dolfinx.utils import create_pum
ScatterMode = dolfinx.cpp.common.ScatterMode
| 16.875
| 44
| 0.82963
| 20
| 135
| 5.5
| 0.55
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118519
| 135
| 7
| 45
| 19.285714
| 0.92437
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bed2e540138565d5b3bb205687612477c8ecb74d
| 53
|
py
|
Python
|
basic_find/__main__.py
|
arcseldon/basic_find
|
9318124a132a4e25a5a0ddbf6b62f76ea3adb379
|
[
"MIT"
] | null | null | null |
basic_find/__main__.py
|
arcseldon/basic_find
|
9318124a132a4e25a5a0ddbf6b62f76ea3adb379
|
[
"MIT"
] | 1
|
2021-11-15T17:48:22.000Z
|
2021-11-15T17:48:22.000Z
|
basic_find/__main__.py
|
arcseldon/basic_find
|
9318124a132a4e25a5a0ddbf6b62f76ea3adb379
|
[
"MIT"
] | 1
|
2019-12-18T00:18:02.000Z
|
2019-12-18T00:18:02.000Z
|
from basic_find import basic_find
basic_find.main()
| 13.25
| 33
| 0.830189
| 9
| 53
| 4.555556
| 0.555556
| 0.658537
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113208
| 53
| 3
| 34
| 17.666667
| 0.87234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
bee0c6a1bb5fe44cfe24c644a82c1ba6446b8f5b
| 31
|
py
|
Python
|
src/spacel/user/__init__.py
|
mycloudandme/spacel-provision
|
900b8ada0017f727163c5c2ae464e17d747ba0e8
|
[
"MIT"
] | 2
|
2016-05-18T11:10:27.000Z
|
2016-05-18T13:25:04.000Z
|
src/spacel/user/__init__.py
|
mycloudandme/spacel-provision
|
900b8ada0017f727163c5c2ae464e17d747ba0e8
|
[
"MIT"
] | null | null | null |
src/spacel/user/__init__.py
|
mycloudandme/spacel-provision
|
900b8ada0017f727163c5c2ae464e17d747ba0e8
|
[
"MIT"
] | null | null | null |
from .ssh_db import SpaceSshDb
| 15.5
| 30
| 0.83871
| 5
| 31
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
bee383a7eaa17114bac4c6815a137f9c3ac0b7e0
| 186
|
py
|
Python
|
tests/test_quintic_polynomials_planner.py
|
ryuichiueda/PythonRobotics
|
67d7d5c6105f6fd436435eef71651059f4ca9d54
|
[
"MIT"
] | 1
|
2021-09-14T18:08:20.000Z
|
2021-09-14T18:08:20.000Z
|
tests/test_quintic_polynomials_planner.py
|
ryuichiueda/PythonRobotics
|
67d7d5c6105f6fd436435eef71651059f4ca9d54
|
[
"MIT"
] | null | null | null |
tests/test_quintic_polynomials_planner.py
|
ryuichiueda/PythonRobotics
|
67d7d5c6105f6fd436435eef71651059f4ca9d54
|
[
"MIT"
] | null | null | null |
import conftest # Add root path to sys.path
from PathPlanning.QuinticPolynomialsPlanner import quintic_polynomials_planner as m
def test1():
m.show_animation = False
m.main()
| 23.25
| 83
| 0.774194
| 25
| 186
| 5.64
| 0.84
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006452
| 0.166667
| 186
| 7
| 84
| 26.571429
| 0.903226
| 0.134409
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
beec32a4d8a7ca31a7b41171372ceb0002062b4d
| 92
|
py
|
Python
|
album/admin.py
|
mentix02/lottery
|
7cdffd7ea88972d7590d6ca02057502170194663
|
[
"MIT"
] | null | null | null |
album/admin.py
|
mentix02/lottery
|
7cdffd7ea88972d7590d6ca02057502170194663
|
[
"MIT"
] | null | null | null |
album/admin.py
|
mentix02/lottery
|
7cdffd7ea88972d7590d6ca02057502170194663
|
[
"MIT"
] | null | null | null |
from album.models import Album
from django.contrib import admin
admin.site.register(Album)
| 18.4
| 32
| 0.826087
| 14
| 92
| 5.428571
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108696
| 92
| 4
| 33
| 23
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bef3b1dab5b7c28269ceebba9de4b562772cc3cd
| 102
|
py
|
Python
|
src/main/__init__.py
|
Fe-Nik-S/flask-ticket-management-system
|
4e6f9a81616bb8cd2fd779d2de35d797ed6fd65b
|
[
"MIT"
] | 1
|
2020-10-27T21:20:45.000Z
|
2020-10-27T21:20:45.000Z
|
src/main/__init__.py
|
Fe-Nik-S/flask-ticket-management-system
|
4e6f9a81616bb8cd2fd779d2de35d797ed6fd65b
|
[
"MIT"
] | 2
|
2019-12-26T17:39:41.000Z
|
2020-01-06T19:53:28.000Z
|
src/main/__init__.py
|
Fe-Nik-S/ticket-management-system
|
4e6f9a81616bb8cd2fd779d2de35d797ed6fd65b
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
bp_main = Blueprint('main', __name__)
from src.main import handlers
| 10.2
| 37
| 0.754902
| 14
| 102
| 5.142857
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 102
| 9
| 38
| 11.333333
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0.04
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 5
|
8304880cda655406a5ee1a7052e3e4a6ded45708
| 135
|
py
|
Python
|
tests/flytekit/unit/cli/pyflyte/test_nested_wf/a/b/c/d/wf.py
|
aeioulisa/flytekit
|
14b3a4ced183a66d0a87c06c19f71e5a1400a6a3
|
[
"Apache-2.0"
] | null | null | null |
tests/flytekit/unit/cli/pyflyte/test_nested_wf/a/b/c/d/wf.py
|
aeioulisa/flytekit
|
14b3a4ced183a66d0a87c06c19f71e5a1400a6a3
|
[
"Apache-2.0"
] | null | null | null |
tests/flytekit/unit/cli/pyflyte/test_nested_wf/a/b/c/d/wf.py
|
aeioulisa/flytekit
|
14b3a4ced183a66d0a87c06c19f71e5a1400a6a3
|
[
"Apache-2.0"
] | null | null | null |
from flytekit import task, workflow
@task
def t(m: str) -> str:
return m
@workflow
def wf_id(m: str) -> str:
return t(m=m)
| 11.25
| 35
| 0.622222
| 24
| 135
| 3.458333
| 0.5
| 0.048193
| 0.168675
| 0.313253
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.244444
| 135
| 11
| 36
| 12.272727
| 0.813725
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0.285714
| 0.714286
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
8314b4ace1d653166f0ed7affdcefba5e5641de7
| 297
|
py
|
Python
|
DownSampling_STD/Utterance.py
|
jingyonghou/TIMIT_STD
|
743112e79115ddc31ed3ebd7c4f7d1d361dfd7e7
|
[
"MIT"
] | 3
|
2016-12-12T07:28:39.000Z
|
2018-04-12T03:07:42.000Z
|
Encode_STD_v2/Utterance.py
|
jingyonghou/TIMIT_STD
|
743112e79115ddc31ed3ebd7c4f7d1d361dfd7e7
|
[
"MIT"
] | 2
|
2020-07-28T09:20:35.000Z
|
2020-08-02T02:56:46.000Z
|
Encode_STD_v2/Utterance.py
|
jingyonghou/TIMIT_STD
|
743112e79115ddc31ed3ebd7c4f7d1d361dfd7e7
|
[
"MIT"
] | 1
|
2020-07-27T14:24:10.000Z
|
2020-07-27T14:24:10.000Z
|
from BaseEntity import BaseEntity
class Utterance(BaseEntity):
def __init__(self, utterance_dir, utterance_id, feature_type, phone_type="PHN39", wav_sampling_rate=16000):
BaseEntity.__init__(self, utterance_dir, utterance_id, feature_type, phone_type="PHN39", wav_sampling_rate=16000)
| 59.4
| 121
| 0.804714
| 39
| 297
| 5.615385
| 0.461538
| 0.073059
| 0.155251
| 0.182648
| 0.694064
| 0.694064
| 0.694064
| 0.694064
| 0.694064
| 0.694064
| 0
| 0.052632
| 0.104377
| 297
| 4
| 122
| 74.25
| 0.770677
| 0
| 0
| 0
| 0
| 0
| 0.03367
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
835cfa5ae135b68aeccd5a82033fbfb1278fe2c9
| 1,080
|
py
|
Python
|
homework/hw08/tests/nodots.py
|
tejashah88/cs61a-self-study
|
e32d77f751af66008ff4c69ffe0b32688b275516
|
[
"MIT"
] | 6
|
2018-09-01T15:11:11.000Z
|
2022-03-23T00:34:31.000Z
|
homeworks/hw08/tests/nodots.py
|
abalone88/cs61a_2018sp
|
59d408d0961cf71faf10b77779bfc71c0c508f0c
|
[
"MIT"
] | null | null | null |
homeworks/hw08/tests/nodots.py
|
abalone88/cs61a_2018sp
|
59d408d0961cf71faf10b77779bfc71c0c508f0c
|
[
"MIT"
] | 3
|
2020-07-25T22:03:58.000Z
|
2022-01-05T18:54:52.000Z
|
test = {
'name': 'nodots',
'points': 1,
'suites': [
{
'cases': [
{
'code': r"""
scm> (nodots '(1 . 2))
(1 2)
""",
'hidden': False,
'locked': False
},
{
'code': r"""
scm> (nodots '(1 2 . 3))
(1 2 3)
""",
'hidden': False,
'locked': False
},
{
'code': r"""
scm> (nodots '((1 . 2) 3))
((1 2) 3)
""",
'hidden': False,
'locked': False
},
{
'code': r"""
scm> (nodots '(1 (2 3 . 4) . 3))
(1 (2 3 4) 3)
""",
'hidden': False,
'locked': False
},
{
'code': r"""
scm> (nodots '(1 . ((2 3 . 4) . 3)))
(1 (2 3 4) 3)
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': r"""
scm> (load 'hw08)
""",
'teardown': '',
'type': 'scheme'
}
]
}
| 18.947368
| 46
| 0.271296
| 93
| 1,080
| 3.150538
| 0.258065
| 0.068259
| 0.081911
| 0.238908
| 0.730375
| 0.730375
| 0.675768
| 0.675768
| 0.675768
| 0.675768
| 0
| 0.076621
| 0.528704
| 1,080
| 56
| 47
| 19.285714
| 0.499018
| 0
| 0
| 0.410714
| 0
| 0
| 0.480556
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3602b2c0dd62c599e05bc402c0f20a2025e279fd
| 46
|
py
|
Python
|
test/circle.py
|
Jahongir2007/pymetry
|
02c8e82a188700b4213fd4a70aa66a3b5e9843b8
|
[
"MIT"
] | 1
|
2021-04-04T11:38:42.000Z
|
2021-04-04T11:38:42.000Z
|
test/circle.py
|
Jahongir2007/pymetry
|
02c8e82a188700b4213fd4a70aa66a3b5e9843b8
|
[
"MIT"
] | null | null | null |
test/circle.py
|
Jahongir2007/pymetry
|
02c8e82a188700b4213fd4a70aa66a3b5e9843b8
|
[
"MIT"
] | null | null | null |
import pymetry
pymetry.circle(60, "brown", 4)
| 15.333333
| 30
| 0.73913
| 7
| 46
| 4.857143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 0.108696
| 46
| 2
| 31
| 23
| 0.756098
| 0
| 0
| 0
| 0
| 0
| 0.108696
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
360fecff574161124a0ab5de64a137040125dd0b
| 10,057
|
py
|
Python
|
biosys/apps/main/tests/api/test_record_serialization.py
|
florianm/biosys
|
934d06ed805b0734f3cb9a00feec6cd81a94e512
|
[
"Apache-2.0"
] | 1
|
2020-08-24T02:44:36.000Z
|
2020-08-24T02:44:36.000Z
|
biosys/apps/main/tests/api/test_record_serialization.py
|
florianm/biosys
|
934d06ed805b0734f3cb9a00feec6cd81a94e512
|
[
"Apache-2.0"
] | 19
|
2016-09-29T01:03:18.000Z
|
2021-07-02T06:54:05.000Z
|
biosys/apps/main/tests/api/test_record_serialization.py
|
florianm/biosys
|
934d06ed805b0734f3cb9a00feec6cd81a94e512
|
[
"Apache-2.0"
] | 5
|
2018-12-20T05:36:28.000Z
|
2021-09-29T00:44:31.000Z
|
import io
import re
from os import path
from django.test import override_settings
from openpyxl import load_workbook
from django.shortcuts import reverse
from rest_framework import status
from main.tests.api import helpers
import csv
class TestFieldSelection(helpers.BaseUserTestCase):
def _more_setup(self):
# create some data with date and geometry
self.rows = [
['What', 'When', 'Latitude', 'Longitude'],
['a big bird', '20018-01-24', -32.0, 115.75],
['a chubby bat ', '20017-12-24', -33.6, 116.678],
]
self.dataset = self._create_dataset_and_records_from_rows(self.rows)
def test_only_geometry(self):
"""
Scenario: a web map user needs only the geometry field.
Given some records with geometry are created
And I request a get 'dataset-record' with fields=geometry
Then it should return only the geometry field
"""
# records are created in setup
client = self.custodian_1_client
url = reverse('api:dataset-records', kwargs={'pk': self.dataset.pk})
query_params = {
'fields': 'geometry'
}
resp = client.get(url, data=query_params, format='json')
self.assertEqual(status.HTTP_200_OK, resp.status_code)
records = resp.json()
self.assertIsInstance(records, list)
expected_record_count = len(self.rows) - 1
self.assertEqual(len(records), expected_record_count)
expected_fields = ['geometry']
for record in records:
self.assertIsInstance(record, dict)
self.assertEqual(sorted(list(record.keys())), sorted(expected_fields))
def test_geometry_and_id(self):
"""
Scenario: a web map user needs only the geometry field and the record id to display an edit link.
Given some records with geometry are created
And I request a get 'dataset-record' with fields geometry and id
Then it should return only the geometry and the id field
"""
# records are created in setup
client = self.custodian_1_client
url = reverse('api:dataset-records', kwargs={'pk': self.dataset.pk})
query_params = {
'fields': ['geometry', 'id']
}
resp = client.get(url, data=query_params, format='json')
self.assertEqual(status.HTTP_200_OK, resp.status_code)
records = resp.json()
self.assertIsInstance(records, list)
expected_record_count = len(self.rows) - 1
self.assertEqual(len(records), expected_record_count)
expected_fields = ['geometry', 'id']
for record in records:
self.assertIsInstance(record, dict)
self.assertEqual(sorted(list(record.keys())), sorted(expected_fields))
def test_geometry_and_id_record_end_point(self):
"""
Same as above but we hit the GET /records instead of GET/dataset/{pk}/records
Scenario: a web map user needs only the geometry field and the record id to display an edit link.
Given some records with geometry are created
And I request a get 'record' with fields geometry and id
Then it should return only the geometry and the id field
"""
# records are created in setup
client = self.custodian_1_client
url = reverse('api:record-list')
query_params = {
'fields': ['geometry', 'id']
}
resp = client.get(url, data=query_params, format='json')
self.assertEqual(status.HTTP_200_OK, resp.status_code)
records = resp.json()
self.assertIsInstance(records, list)
expected_record_count = len(self.rows) - 1
self.assertEqual(len(records), expected_record_count)
expected_fields = ['geometry', 'id']
for record in records:
self.assertIsInstance(record, dict)
# only key = geometry
self.assertEqual(sorted(list(record.keys())), sorted(expected_fields))
# request record individually
url = reverse('api:record-detail', kwargs={'pk': record.get('id')})
resp = client.get(url, data=query_params, format='json')
self.assertEqual(status.HTTP_200_OK, resp.status_code)
self.assertEqual(sorted(list(resp.json().keys())), sorted(expected_fields))
def test_not_existing_field(self):
"""
Scenario: asking for field that doesn't exists should not return an error but empty records
Given some records with geometry are created
And I request a get 'record' with a field 'field_with_typo'
Then it should be successful
And return records with no field
"""
# records are created in setup
client = self.custodian_1_client
url = reverse('api:record-list')
query_params = {
'fields': ['field_with_typo']
}
resp = client.get(url, data=query_params, format='json')
self.assertEqual(status.HTTP_200_OK, resp.status_code)
records = resp.json()
expected_fields = []
for record in records:
self.assertIsInstance(record, dict)
self.assertEqual(sorted(list(record.keys())), sorted(expected_fields))
def test_one_not_existing_field(self):
"""
Scenario: asking for field that exists and one that doesn't exists should not return an error but the valid field
Given some records with geometry are created
And I request a get 'record' with a field 'geometry' and a field 'field_with_typo'
Then it should be successful
And return records with the geometry field
"""
# records are created in setup
client = self.custodian_1_client
url = reverse('api:record-list')
query_params = {
'fields': ['geometry', 'field_with_typo']
}
resp = client.get(url, data=query_params, format='json')
self.assertEqual(status.HTTP_200_OK, resp.status_code)
records = resp.json()
expected_fields = ['geometry']
for record in records:
self.assertIsInstance(record, dict)
self.assertEqual(sorted(list(record.keys())), sorted(expected_fields))
class TestExcelFormat(helpers.BaseUserTestCase):
@override_settings(EXPORTER_CLASS='main.api.exporters.DefaultExporter')
def test_happy_path(self):
expected_rows = [
['What', 'When', 'Latitude', 'Longitude'],
['a big bird in Cottesloe', '20018-01-24', -32.0, 115.75],
['a chubby bat somewhere', '20017-12-24', -33.6, 116.678],
['something in the null island', '2018-05-25', 0, 0]
]
dataset = self._create_dataset_and_records_from_rows(expected_rows)
client = self.custodian_1_client
# ask for all records
output = 'xlsx'
url = reverse('api:record-list')
query_params = {
'dataset__id': dataset.pk,
'output': output
}
resp = client.get(url, query_params)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(resp.get('content-type'),
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
content_disposition = resp.get('content-disposition')
# should be something like:
# 'attachment; filename=something.xlsx
match = re.match('attachment; filename=(.+)', content_disposition)
self.assertIsNotNone(match)
filename, ext = path.splitext(match.group(1))
self.assertEqual(ext, '.xlsx')
# read content
wb = load_workbook(io.BytesIO(resp.content), read_only=True)
# one datasheet named after the dataset
expected_sheet_name = dataset.name
sheet_names = wb.sheetnames
self.assertEqual(1, len(sheet_names))
self.assertEqual(sheet_names[0], expected_sheet_name)
# check rows values
ws = wb[expected_sheet_name]
rows = list(ws.rows)
# compare rows
self.assertEqual(len(rows), len(expected_rows))
for (expected_values, xlsx_row) in zip(expected_rows, rows):
actual_values = [c.value for c in xlsx_row]
self.assertEqual(expected_values, actual_values)
class TestCSVFormat(helpers.BaseUserTestCase):
@override_settings(EXPORTER_CLASS='main.api.exporters.DefaultExporter')
def test_happy_path(self):
expected_rows = [
['What', 'When', 'Latitude', 'Longitude'],
['a big bird in Cottesloe', '20018-01-24', -32, 115.75], # Note: if you put 32.0 the return will be '-32'
['a chubby bat somewhere', '20017-12-24', -33.6, 116.678],
['something in the null island', '2018-05-25', 0, 0]
]
dataset = self._create_dataset_and_records_from_rows(expected_rows)
client = self.custodian_1_client
# ask for all records
output = 'csv'
url = reverse('api:record-list')
query_params = {
'dataset__id': dataset.pk,
'output': output
}
resp = client.get(url, query_params)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(resp.get('content-type'),
'text/csv')
content_disposition = resp.get('content-disposition')
# should be something like:
# 'attachment; filename=something.csv
match = re.match('attachment; filename=(.+)', content_disposition)
self.assertIsNotNone(match)
filename, ext = path.splitext(match.group(1))
self.assertEqual(ext, '.csv')
# read content
reader = csv.reader(io.StringIO(resp.content.decode('utf-8')), dialect='excel')
for expected_row, actual_row in zip(expected_rows, reader):
expected_row_string = [str(v) for v in expected_row]
self.assertEqual(actual_row, expected_row_string)
| 42.614407
| 121
| 0.62812
| 1,225
| 10,057
| 5.01551
| 0.173061
| 0.063477
| 0.016927
| 0.020833
| 0.786621
| 0.780111
| 0.775065
| 0.768555
| 0.755697
| 0.732422
| 0
| 0.02286
| 0.269265
| 10,057
| 235
| 122
| 42.795745
| 0.813172
| 0.197474
| 0
| 0.623457
| 0
| 0
| 0.120282
| 0.017073
| 0
| 0
| 0
| 0
| 0.222222
| 1
| 0.049383
| false
| 0
| 0.055556
| 0
| 0.123457
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.