hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fadbe0ce61bc89cbf5ace8d4fef611475711ae16
| 47
|
py
|
Python
|
config/settings/__init__.py
|
hqrrylyu/drf-posts-assignment
|
6022c73d7c72400e9fb0db5b8e0daa892b4f109e
|
[
"MIT"
] | null | null | null |
config/settings/__init__.py
|
hqrrylyu/drf-posts-assignment
|
6022c73d7c72400e9fb0db5b8e0daa892b4f109e
|
[
"MIT"
] | null | null | null |
config/settings/__init__.py
|
hqrrylyu/drf-posts-assignment
|
6022c73d7c72400e9fb0db5b8e0daa892b4f109e
|
[
"MIT"
] | null | null | null |
# flake8: noqa: W403
from .production import *
| 15.666667
| 25
| 0.723404
| 6
| 47
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 0.170213
| 47
| 2
| 26
| 23.5
| 0.769231
| 0.382979
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4f0e79067249d7e3b32fb33ab45a211ea4e8eb2a
| 2,701
|
py
|
Python
|
tests/test_deploy_roles.py
|
AdamMinton/looker_deployer
|
1a57809257183234b900ad49ff30ded5c26c4f2a
|
[
"Apache-2.0"
] | 4
|
2020-01-09T04:24:19.000Z
|
2020-05-18T23:42:35.000Z
|
tests/test_deploy_roles.py
|
AdamMinton/looker_deployer
|
1a57809257183234b900ad49ff30ded5c26c4f2a
|
[
"Apache-2.0"
] | 24
|
2022-01-06T00:49:57.000Z
|
2022-03-30T00:08:36.000Z
|
tests/test_deploy_roles.py
|
AdamMinton/looker_deployer
|
1a57809257183234b900ad49ff30ded5c26c4f2a
|
[
"Apache-2.0"
] | 2
|
2021-11-24T05:48:17.000Z
|
2022-01-29T22:23:19.000Z
|
from looker_deployer.commands import deploy_roles
from looker_sdk import methods, models
class mockSettings:
base_url = "taco"
class mockAuth:
settings = mockSettings()
sdk = methods.LookerSDK(mockAuth(), "bar", "baz", "bosh", "bizz")
def test_get_filtered_roles(mocker):
role_list = [
models.Role(name="Taco"),
models.Role(name="Burrito")
]
mocker.patch.object(sdk, "all_roles")
sdk.all_roles.return_value = role_list
roles = deploy_roles.get_filtered_roles(sdk)
assert roles == role_list
def test_get_filtered_roles_filter(mocker):
role_list = [
models.Role(name="Taco"),
models.Role(name="Burrito")
]
mocker.patch.object(sdk, "all_roles")
sdk.all_roles.return_value = role_list
roles = deploy_roles.get_filtered_roles(sdk, "Burrito")
assert roles == [models.Role(name="Burrito")]
def test_write_roles_new(mocker):
permission_set = models.PermissionSet(name="P1", id=1)
model_set = models.ModelSet(name="M1", id=1)
role_list = [models.WriteRole(
name="Taco", permission_set=permission_set,
model_set=model_set, permission_set_id=1, model_set_id=1)]
mocker.patch.object(sdk, "all_roles")
mocker.patch.object(sdk, "create_role")
mocker.patch.object(sdk, "all_permission_sets")
mocker.patch.object(sdk, "all_model_sets")
sdk.all_permission_sets.return_value = [models.PermissionSet(
name="P1", id=1)]
sdk.all_model_sets.return_value = [models.ModelSet(name="M1", id=1)]
deploy_roles.write_roles(role_list, sdk)
sdk.create_role.assert_called_once_with(role_list[0])
def test_write_roles_existing(mocker):
permission_set = models.PermissionSet(name="P1", id=1)
model_set = models.ModelSet(name="M1", id=1)
role_list = [models.WriteRole(name="Taco",
permission_set=permission_set,
model_set=model_set,
permission_set_id=1,
model_set_id=1)]
mocker.patch.object(sdk, "all_roles")
mocker.patch.object(sdk, "update_role")
mocker.patch.object(sdk, "all_permission_sets")
mocker.patch.object(sdk, "all_model_sets")
sdk.all_roles.return_value = [models.Role(
name="Taco", permission_set=permission_set, model_set=model_set,
permission_set_id=1, model_set_id=1, id=1)]
sdk.all_permission_sets.return_value = [models.PermissionSet(
name="P1", id=1)]
sdk.all_model_sets.return_value = [models.ModelSet(
name="M1", id=1)]
deploy_roles.write_roles(role_list, sdk)
sdk.update_role.assert_called_once_with(1, role_list[0])
| 31.406977
| 72
| 0.672714
| 364
| 2,701
| 4.700549
| 0.159341
| 0.052601
| 0.099357
| 0.116891
| 0.800117
| 0.735827
| 0.735827
| 0.735827
| 0.735827
| 0.735827
| 0
| 0.012082
| 0.203258
| 2,701
| 85
| 73
| 31.776471
| 0.782993
| 0
| 0
| 0.42623
| 0
| 0
| 0.076268
| 0
| 0
| 0
| 0
| 0
| 0.065574
| 1
| 0.065574
| false
| 0
| 0.032787
| 0
| 0.163934
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4f0e8ce8291ba87a39d6a567b55d8d4479b51e5c
| 9,242
|
py
|
Python
|
parameter_analyse/print_figure/plot_figure.py
|
lionelkusch/compare_zerlaut
|
4e22d1fdc5889fb404187bb7a48d7847759443d6
|
[
"Apache-2.0"
] | null | null | null |
parameter_analyse/print_figure/plot_figure.py
|
lionelkusch/compare_zerlaut
|
4e22d1fdc5889fb404187bb7a48d7847759443d6
|
[
"Apache-2.0"
] | null | null | null |
parameter_analyse/print_figure/plot_figure.py
|
lionelkusch/compare_zerlaut
|
4e22d1fdc5889fb404187bb7a48d7847759443d6
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from scipy.io.matlab import loadmat
import os
from print_stability import print_stability
import matplotlib.pyplot as plt
from colorline import colorline
import matplotlib as mpl
import h5py
path = os.path.dirname(__file__) + '/../analyse_dynamic/'
H_LP = True
cmap = plt.get_cmap('jet')
norm = mpl.colors.Normalize(vmin=-100, vmax=100)
limit_circle = True
b_30 = loadmat(path + '/b_30/EQ_Low/EQ_Low.mat', chars_as_strings=True, simplify_cells=True)
b_30['x'][:2] *= 1000
b_30['x'][-1] *= 1000
b_60 = loadmat(path + '/b_60/EQ_Low/EQ_Low.mat', chars_as_strings=True, simplify_cells=True)
b_60['x'][:2] *= 1000
b_60['x'][-1] *= 1000
b_0 = loadmat(path + '/EQ_High/EQ_high.mat', chars_as_strings=True, simplify_cells=True)
b_0['x'][:2] *= 1000
b_0['x'][-1] *= 1000
if H_LP:
H_high = loadmat(path + '/b_30/H/H_high.mat', chars_as_strings=True, simplify_cells=True)
H_high['x'][:2] *= 1000
H_high['x'][-3] *= 1000
# H_low = loadmat(path + '/b_30/H/H_low_h.mat', chars_as_strings=True, simplify_cells=True)
f = h5py.File(path + '/b_30/H/H_low_h.mat')
H_low={'f': np.array(f['f']).swapaxes(0,1), 'h': np.array(f['h']).swapaxes(0,1), 'v': np.array(f['v']).swapaxes(0,1), 'x': np.array(f['x']).swapaxes(0,1)}
H_low['s'] = [{'index': int(f[f['s']['index'][i][0]][0][0]),
'label': ''.join(chr(character) for character in f[f['s']['label'][i][0]]),
'msg': ''.join(chr(character) for character in f[f['s']['msg'][i][0]]),
} for i in range(f['s']['index'].shape[0])]
H_low['x'][:2] *= 1000
H_low['x'][-3] *= 1000
LP_high = loadmat(path + '/b_30/LP/LP_high.mat', chars_as_strings=True, simplify_cells=True)
LP_high['x'][:2] *= 1000
LP_high['x'][-2] *= 1000
# LP_middle_l = loadmat(path + '/b_30/LP/LP_middle_l.mat', chars_as_strings=True, simplify_cells=True)
f = h5py.File(path + '/b_30/LP/LP_low_h.mat')
LP_middle_l={'f': np.array(f['f']).swapaxes(0,1), 'h': np.array(f['h']).swapaxes(0,1), 'v': np.array(f['v']).swapaxes(0,1), 'x': np.array(f['x']).swapaxes(0,1)}
LP_middle_l['s'] = [{'index': int(f[f['s']['index'][0][i]][0][0]),
'label': ''.join(chr(character) for character in f[f['s']['label'][0][i]]),
'msg': ''.join(chr(character) for character in f[f['s']['msg'][0][i]]),
} for i in range(f['s']['index'].shape[1])]
LP_middle_l['x'][:2] *= 1000
LP_middle_l['x'][-2] *= 1000
if limit_circle:
limit_circle = loadmat(path + '/Limit_Circle/Limit_Circle.mat', chars_as_strings=True, simplify_cells=True)
min = limit_circle['x'][:6]
max = limit_circle['x'][:6]
for i in range(1, int((limit_circle['x'].shape[0] - 2) / 6)):
min = np.min([min, limit_circle['x'][i * 6:(i + 1) * 6]], axis=0)
max = np.max([max, limit_circle['x'][i * 6:(i + 1) * 6]], axis=0)
extra = limit_circle['x'][-2:]
min[:2] *= 1000
max[:2] *= 1000
extra[1] *= 1000
fig = plt.figure()
line_b_0 = print_stability(b_0['x'], b_0['f'], b_0['s'], 6, 0, color='k')
line_b_30 = print_stability(b_30['x'], b_30['f'], b_30['s'], 6, 0, color='b')
line_b_60 = print_stability(b_60['x'], b_60['f'], b_60['s'], 6, 0, color='g')
if H_LP:
for data in [H_high, H_low, LP_high, LP_middle_l]:
colorline(data['x'][6, :], data['x'][0, :], data['x'][7, :], alpha=0.5, cmap=cmap, norm=norm)
for points in data['s'][1:-1]:
if points['msg'] != 'Zero-Hopf point: neutral saddle' and points['label'] != 'BV':
plt.plot(data['x'][6, points['index'] - 1], data['x'][0, points['index'] - 1], 'mx', markersize=10.0)
plt.text(data['x'][6, points['index'] - 1], data['x'][0, points['index'] - 1], " " + points['label'])
print(points['label'], data['x'][6, points['index'] - 1], data['x'][7, points['index'] - 1],
data['x'][0, points['index'] - 1], data['x'][1, points['index'] - 1])
if limit_circle:
plt.plot(extra[1, :], max[0, :], '--', alpha=0.5, color='orange')
plt.plot(extra[1, :], min[0, :], '--', alpha=0.5, color='orange')
plt.legend([line_b_60, line_b_30, line_b_0], ['b=60', 'b=30', 'b=0'])
plt.ylim(ymax=200.0, ymin=0.0)
plt.xlim(xmax=200.0, xmin=-30.0)
plt.xlabel("external input")
plt.ylabel("firing rate of excitatory population Hz")
cax = plt.axes([0.05, 0.1, 0.9, 0.02])
cbar = fig.colorbar(mpl.cm.ScalarMappable(norm=norm, cmap=cmap), cax=cax, orientation='horizontal', label='Some Units')
cbar.set_label('adaptation')
plt.subplots_adjust(bottom=0.22, top=0.98)
plt.figure()
line_b_0 = print_stability(b_0['x'], b_0['f'], b_0['s'], 6, 1, color='k')
line_b_30 = print_stability(b_30['x'], b_30['f'], b_30['s'], 6, 1, color='b')
line_b_60 = print_stability(b_60['x'], b_60['f'], b_60['s'], 6, 1, color='g')
if H_LP:
for data in [H_high, H_low, LP_high, LP_middle_l]:
colorline(data['x'][6, :], data['x'][1, :], data['x'][7, :], alpha=0.5, cmap=cmap, norm=norm)
for points in data['s'][1:-1]:
if points['msg'] != 'Zero-Hopf point: neutral saddle' and points['label'] != 'BV':
plt.plot(data['x'][6, points['index'] - 1], data['x'][1, points['index'] - 1], 'mx', markersize=10.0)
plt.text(data['x'][6, points['index'] - 1], data['x'][1, points['index'] - 1], " " + points['label'])
if limit_circle:
plt.plot(extra[1, :], max[1, :], '--', alpha=0.5, color='orange')
plt.plot(extra[1, :], min[1, :], '--', alpha=0.5, color='orange')
plt.legend([line_b_60, line_b_30, line_b_0], ['b=60', 'b=30', 'b=0'])
plt.ylim(ymax=200.0, ymin=0.0)
plt.xlim(xmax=200.0, xmin=-30.0)
plt.xlabel("external input")
plt.ylabel("firing rate of inhibitory population Hz")
cax = plt.axes([0.05, 0.1, 0.9, 0.02])
cbar = fig.colorbar(mpl.cm.ScalarMappable(norm=norm, cmap=cmap), cax=cax, orientation='horizontal', label='Some Units')
cbar.set_label('adaptation')
plt.subplots_adjust(bottom=0.22, top=0.98)
plt.figure()
line_b_0 = print_stability(b_0['x'], b_0['f'], b_0['s'], 0, 1, color='k')
line_b_30 = print_stability(b_30['x'], b_30['f'], b_30['s'], 0, 1, color='b')
line_b_60 = print_stability(b_60['x'], b_60['f'], b_60['s'], 0, 1, color='g')
if H_LP:
for data in [H_high, H_low, LP_high, LP_middle_l]:
colorline(data['x'][0, :], data['x'][1, :], data['x'][7, :], alpha=0.5, cmap=cmap, norm=norm)
for points in data['s'][1:-1]:
if points['msg'] != 'Zero-Hopf point: neutral saddle' and points['label'] != 'BV':
plt.plot(data['x'][0, points['index'] - 1], data['x'][1, points['index'] - 1], 'mx', markersize=10.0)
plt.text(data['x'][0, points['index'] - 1], data['x'][1, points['index'] - 1], " " + points['label'])
if limit_circle:
plt.plot(max[0, :], max[1, :], '--', alpha=0.5, color='orange')
plt.plot(min[0, :], min[1, :], '--', alpha=0.5, color='orange')
plt.legend([line_b_60, line_b_30, line_b_0], ['b=60', 'b=30', 'b=0'])
plt.ylim(ymax=200.0, ymin=0.0)
plt.xlim(xmax=200.0, xmin=0.0)
plt.xlabel("firing rate of excitatory population Hz")
plt.ylabel("firing rate of inhibitory population Hz")
cax = plt.axes([0.05, 0.1, 0.9, 0.02])
cbar = fig.colorbar(mpl.cm.ScalarMappable(norm=norm, cmap=cmap), cax=cax, orientation='horizontal', label='Some Units')
cbar.set_label('adaptation')
plt.subplots_adjust(bottom=0.22, top=0.98)
plt.figure()
if H_LP:
for data in [H_high, H_low, LP_high, LP_middle_l]:
colorline(data['x'][7, :], data['x'][0, :], data['x'][6, :], alpha=0.5, cmap=cmap, norm=norm)
for points in data['s'][1:-1]:
if points['msg'] != 'Zero-Hopf point: neutral saddle' and points['label'] != 'BV':
plt.plot(data['x'][7, points['index'] - 1], data['x'][0, points['index'] - 1], 'mx', markersize=10.0)
plt.text(data['x'][7, points['index'] - 1], data['x'][0, points['index'] - 1], " " + points['label'])
plt.ylim(ymax=200.0, ymin=0.0)
plt.xlim(xmax=200.0, xmin=-100.0)
plt.xlabel("adaptation")
plt.ylabel("firing rate of excitatory population Hz")
cax = plt.axes([0.05, 0.1, 0.9, 0.02])
cbar = fig.colorbar(mpl.cm.ScalarMappable(norm=norm, cmap=cmap), cax=cax, orientation='horizontal', label='Some Units')
cbar.set_label('external input')
plt.subplots_adjust(bottom=0.22, top=0.98)
plt.figure()
if H_LP:
for data in [H_high, H_low, LP_high, LP_middle_l]:
colorline(data['x'][7, :], data['x'][1, :], data['x'][6, :], alpha=0.5, cmap=cmap, norm=norm)
for points in data['s'][1:-1]:
if points['msg'] != 'Zero-Hopf point: neutral saddle' and points['label'] != 'BV':
plt.plot(data['x'][7, points['index'] - 1], data['x'][1, points['index'] - 1], 'mx', markersize=10.0)
plt.text(data['x'][7, points['index'] - 1], data['x'][1, points['index'] - 1], " " + points['label'])
plt.ylim(ymax=200.0, ymin=0.0)
plt.xlim(xmax=200.0, xmin=-100.0)
plt.xlabel("adaptation")
plt.ylabel("firing rate of inhibitory population Hz")
cax = plt.axes([0.05, 0.1, 0.9, 0.02])
cbar = fig.colorbar(mpl.cm.ScalarMappable(norm=norm, cmap=cmap), cax=cax, orientation='horizontal', label='Some Units')
cbar.set_label('external input')
plt.subplots_adjust(bottom=0.22, top=0.98)
plt.show()
| 51.921348
| 164
| 0.592945
| 1,631
| 9,242
| 3.228694
| 0.088289
| 0.03703
| 0.05469
| 0.039499
| 0.860046
| 0.857007
| 0.827383
| 0.820547
| 0.790353
| 0.75997
| 0
| 0.070175
| 0.161221
| 9,242
| 177
| 165
| 52.214689
| 0.609133
| 0.020558
| 0
| 0.4375
| 0
| 0
| 0.141563
| 0.010719
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.05
| 0
| 0.05
| 0.06875
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
878a1be82473776da230c9273d8aba91c8c8cdb5
| 11,401
|
py
|
Python
|
h2o-bindings/bin/custom/python/gen_kmeans.py
|
bscan/h2o-3
|
76073175b71fa68a0ddf99671558419246b95aa6
|
[
"Apache-2.0"
] | 1
|
2019-11-02T18:01:36.000Z
|
2019-11-02T18:01:36.000Z
|
h2o-bindings/bin/custom/python/gen_kmeans.py
|
mykras/h2o-3
|
c7240c507c31aaa7ab41f6a723ccc645ce116e3a
|
[
"Apache-2.0"
] | null | null | null |
h2o-bindings/bin/custom/python/gen_kmeans.py
|
mykras/h2o-3
|
c7240c507c31aaa7ab41f6a723ccc645ce116e3a
|
[
"Apache-2.0"
] | null | null | null |
doc = dict(
__class__="""Performs k-means clustering on an H2O dataset.""",
)
examples = dict(
categorical_encoding="""
>>> prostate = h2o.import_file("http://s3.amazonaws.com/h2o-public-test-data/smalldata/prostate/prostate.csv")
>>> predictors = ["AGE", "RACE", "DPROS", "DCAPS", "PSA", "VOL", "GLEASON"]
>>> train, valid = prostate.split_frame(ratios = [.8], seed = 1234)
>>> encoding = "one_hot_explicit"
>>> pros_km = H2OKMeansEstimator(categorical_encoding = encoding,
... seed = 1234)
>>> pros_km.train(x = predictors,
... training_frame = train,
... validation_frame = valid)
>>> pros_km.scoring_history()
""",
estimate_k="""
>>> iris = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/iris/iris_wheader.csv")
>>> iris['class'] = iris['class'].asfactor()
>>> predictors = iris.columns[:-1]
>>> train, valid = iris.split_frame(ratios = [.8], seed = 1234)
>>> iris_kmeans = H2OKMeansEstimator(k = 10,
... estimate_k = True,
... standardize = False,
... seed = 1234)
>>> iris_kmeans.train(x = predictors,
... training_frame = train,
... validation_frame=valid)
>>> iris_kmeans.scoring_history()
""",
export_checkpoints_dir="""
>>> import tempfile
>>> from os import listdir
>>> airlines = h2o.import_file("http://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip", destination_frame="air.hex")
>>> predictors = ["DayofMonth", "DayOfWeek"]
>>> checkpoints_dir = tempfile.mkdtemp()
>>> air_km = H2OKMeansEstimator(export_checkpoints_dir = checkpoints_dir,
... seed = 1234)
>>> air_km.train(x = predictors, training_frame = airlines)
>>> len(listdir(checkpoints_dir))
""",
fold_assignment="""
>>> ozone = h2o.import_file("http://s3.amazonaws.com/h2o-public-test-data/smalldata/glm_test/ozone.csv")
>>> predictors = ["radiation","temperature","wind"]
>>> train, valid = ozone.split_frame(ratios = [.8], seed = 1234)
>>> ozone_km = H2OKMeansEstimator(fold_assignment = "Random",
... nfolds = 5,
... seed = 1234)
>>> ozone_km.train(x = predictors,
... training_frame = train,
... validation_frame = valid)
>>> ozone_km.scoring_history()
""",
fold_column="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> fold_numbers = cars.kfold_column(n_folds = 5, seed = 1234)
>>> fold_numbers.set_names(["fold_numbers"])
>>> cars = cars.cbind(fold_numbers)
>>> print(cars['fold_numbers'])
>>> cars_km = H2OKMeansEstimator(seed = 1234)
>>> cars_km.train(x = predictors,
... training_frame = cars,
... fold_column = "fold_numbers")
>>> cars_km.scoring_history()
""",
ignore_const_cols="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> cars["const_1"] = 6
>>> cars["const_2"] = 7
>>> train, valid = cars.split_frame(ratios = [.8], seed = 1234)
>>> cars_km = H2OKMeansEstimator(ignore_const_cols = True,
... seed = 1234)
>>> cars_km.train(x = predictors,
... training_frame = train,
... validation_frame = valid)
>>> cars_km.scoring_history()
""",
init="""
>>> seeds = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/flow_examples/seeds_dataset.txt")
>>> predictors = seeds.columns[0:7]
>>> train, valid = seeds.split_frame(ratios = [.8], seed = 1234)
>>> seeds_km = H2OKMeansEstimator(k = 3,
... init='Furthest',
... seed = 1234)
>>> seeds_km.train(x = predictors,
... training_frame = train,
... validation_frame= valid)
>>> seeds_km.scoring_history()
""",
k="""
>>> seeds = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/flow_examples/seeds_dataset.txt")
>>> predictors = seeds.columns[0:7]
>>> train, valid = seeds.split_frame(ratios = [.8], seed = 1234)
>>> seeds_km = H2OKMeansEstimator(k = 3, seed = 1234)
>>> seeds_km.train(x = predictors,
... training_frame = train,
... validation_frame=valid)
>>> seeds_km.scoring_history()
""",
keep_cross_validation_fold_assignment="""
>>> ozone = h2o.import_file("http://s3.amazonaws.com/h2o-public-test-data/smalldata/glm_test/ozone.csv")
>>> predictors = ["radiation","temperature","wind"]
>>> train, valid = ozone.split_frame(ratios = [.8], seed = 1234)
>>> ozone_km = H2OKMeansEstimator(keep_cross_validation_fold_assignment = True,
... nfolds = 5,
... seed = 1234)
>>> ozone_km.train(x = predictors,
... training_frame = train)
>>> ozone_km.scoring_history()
""",
keep_cross_validation_models="""
>>> ozone = h2o.import_file("http://s3.amazonaws.com/h2o-public-test-data/smalldata/glm_test/ozone.csv")
>>> predictors = ["radiation","temperature","wind"]
>>> train, valid = ozone.split_frame(ratios = [.8], seed = 1234)
>>> ozone_km = H2OKMeansEstimator(keep_cross_validation_models = True,
... nfolds = 5,
... seed = 1234)
>>> ozone_km.train(x = predictors,
... training_frame = train,
... validation_frame = valid)
>>> ozone_km.scoring_history()
""",
max_iterations="""
>>> benign = h2o.import_file("http://s3.amazonaws.com/h2o-public-test-data/smalldata/logreg/benign.csv")
>>> predictors = ["AGMT","FNDX","HIGD","DEG","CHK",
... "AGP1","AGMN","LIV","AGLP"]
>>> train, valid = benign.split_frame(ratios = [.8], seed = 1234)
>>> benign_km = H2OKMeansEstimator(max_iterations = 50)
>>> benign_km.train(x = predictors,
... training_frame = train,
... validation_frame = valid)
>>> benign_km.scoring_history()
""",
max_runtime_secs="""
>>> benign = h2o.import_file("http://s3.amazonaws.com/h2o-public-test-data/smalldata/logreg/benign.csv")
>>> predictors = ["AGMT","FNDX","HIGD","DEG","CHK",
... "AGP1","AGMN","LIV","AGLP"]
>>> train, valid = benign.split_frame(ratios = [.8], seed = 1234)
>>> benign_km = H2OKMeansEstimator(max_runtime_secs = 10,
... seed = 1234)
>>> benign_km.train(x = predictors,
... training_frame = train,
... validation_frame = valid)
>>> benign_km.scoring_history()
""",
nfolds="""
>>> benign = h2o.import_file("http://s3.amazonaws.com/h2o-public-test-data/smalldata/logreg/benign.csv")
>>> predictors = ["AGMT","FNDX","HIGD","DEG","CHK",
... "AGP1","AGMN","LIV","AGLP"]
>>> train, valid = benign.split_frame(ratios = [.8], seed = 1234)
>>> benign_km = H2OKMeansEstimator(nfolds = 5, seed = 1234)
>>> benign_km.train(x = predictors,
... training_frame = train,
... validation_frame = valid)
>>> benign_km.scoring_history()
""",
score_each_iteration="""
>>> benign = h2o.import_file("http://s3.amazonaws.com/h2o-public-test-data/smalldata/logreg/benign.csv")
>>> predictors = ["AGMT","FNDX","HIGD","DEG","CHK",
... "AGP1","AGMN","LIV","AGLP"]
>>> train, valid = benign.split_frame(ratios = [.8], seed = 1234)
>>> benign_km = H2OKMeansEstimator(score_each_iteration = True,
... seed = 1234)
>>> benign_km.train(x = predictors,
... training_frame = train,
... validation_frame = valid)
>>> benign_km.scoring_history()
""",
seed="""
>>> prostate = h2o.import_file("http://s3.amazonaws.com/h2o-public-test-data/smalldata/prostate/prostate.csv")
>>> predictors = ["AGE", "RACE", "DPROS", "DCAPS", "PSA", "VOL", "GLEASON"]
>>> train, valid = prostate.split_frame(ratios = [.8], seed = 1234)
>>> pros_w_seed = H2OKMeansEstimator(seed = 1234)
>>> pros_w_seed.train(x = predictors,
... training_frame = train,
... validation_frame = valid)
>>> pros_wo_seed = H2OKMeansEstimator()
>>> pros_wo_seed.train(x = predictors,
... training_frame = train,
... validation_frame = valid)
>>> pros_w_seed.scoring_history()
>>> pros_wo_seed.scoring_history()
""",
standardize="""
>>> boston = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/BostonHousing.csv")
>>> predictors = boston.columns[:-1]
>>> boston['chas'] = boston['chas'].asfactor()
>>> train, valid = boston.split_frame(ratios = [.8])
>>> boston_km = H2OKMeansEstimator(standardize = True)
>>> boston_km.train(x = predictors,
... training_frame = train,
... validation_frame = valid)
>>> boston_km.scoring_history()
""",
training_frame="""
>>> prostate = h2o.import_file("http://s3.amazonaws.com/h2o-public-test-data/smalldata/prostate/prostate.csv")
>>> predictors = ["AGE", "RACE", "DPROS", "DCAPS",
... "PSA", "VOL", "GLEASON"]
>>> train, valid = prostate.split_frame(ratios = [.8], seed = 1234)
>>> pros_km = H2OKMeansEstimator(seed = 1234)
>>> pros_km.train(x = predictors,
... training_frame = train,
... validation_frame = valid)
>>> pros_km.scoring_history()
""",
user_points="""
>>> iris = h2o.import_file("http://h2o-public-test-data.s3.amazonaws.com/smalldata/iris/iris_wheader.csv")
>>> iris['class'] = iris['class'].asfactor()
>>> predictors = iris.columns[:-1]
>>> train, valid = iris.split_frame(ratios = [.8], seed = 1234)
>>> point1 = [4.9,3.0,1.4,0.2]
>>> point2 = [5.6,2.5,3.9,1.1]
>>> point3 = [6.5,3.0,5.2,2.0]
>>> points = h2o.H2OFrame([point1, point2, point3])
>>> iris_km = H2OKMeansEstimator(k = 3,
... user_points = points,
... seed = 1234)
>>> iris_km.train(x=predictors,
... training_frame=iris,
... validation_frame=valid)
>>> iris_kmeans.tot_withinss(valid = True)
""",
validation_frame="""
>>> prostate = h2o.import_file("http://s3.amazonaws.com/h2o-public-test-data/smalldata/prostate/prostate.csv")
>>> predictors = ["AGE", "RACE", "DPROS", "DCAPS",
... "PSA", "VOL", "GLEASON"]
>>> train, valid = prostate.split_frame(ratios = [.8], seed = 1234)
>>> pros_km = H2OKMeansEstimator(seed = 1234)
>>> pros_km.train(x = predictors,
... training_frame = train,
... validation_frame = valid)
>>> pros_km.scoring_history()
""",
keep_cross_validation_predictions="""
>>> prostate = h2o.import_file("http://s3.amazonaws.com/h2o-public-test-data/smalldata/prostate/prostate.csv")
>>> predictors = ["AGE", "RACE", "DPROS", "DCAPS",
... "PSA", "VOL", "GLEASON"]
>>> train, valid = prostate.split_frame(ratios = [.8], seed = 1234)
>>> pros_km = H2OKMeansEstimator(keep_cross_validation_predictions = True,
... nfolds = 5,
... seed = 1234)
>>> pros_km.train(x = predictors,
... training_frame = train,
... validation_frame = valid)
>>> pros_km.scoring_history()
"""
)
| 45.422311
| 149
| 0.598983
| 1,269
| 11,401
| 5.185185
| 0.12766
| 0.043769
| 0.051064
| 0.076596
| 0.774316
| 0.758815
| 0.734498
| 0.734498
| 0.734498
| 0.717933
| 0
| 0.034525
| 0.209894
| 11,401
| 250
| 150
| 45.604
| 0.695937
| 0
| 0
| 0.654618
| 0
| 0.088353
| 0.94518
| 0.206999
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.088353
| 0
| 0.088353
| 0.004016
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
879922d1e2ccd2a937f9ec864c5868db73302362
| 12,550
|
py
|
Python
|
unit/test_needs_restart.py
|
armohamm/ansible-cartridge
|
85e2d87d61d5805844378f750c9c675e531b0015
|
[
"BSD-2-Clause"
] | null | null | null |
unit/test_needs_restart.py
|
armohamm/ansible-cartridge
|
85e2d87d61d5805844378f750c9c675e531b0015
|
[
"BSD-2-Clause"
] | null | null | null |
unit/test_needs_restart.py
|
armohamm/ansible-cartridge
|
85e2d87d61d5805844378f750c9c675e531b0015
|
[
"BSD-2-Clause"
] | null | null | null |
# Hack ansible.module_utils.helpers import
import sys
import module_utils.helpers as helpers
sys.modules['ansible.module_utils.helpers'] = helpers
import os
sys.path.append(os.path.dirname(__file__))
import unittest
from parameterized import parameterized
from instance import Instance
from helpers import set_box_cfg
import itertools
from library.cartridge_needs_restart import needs_restart
def call_needs_restart(control_sock,
restarted=None,
appname=Instance.APPNAME,
instance_conf_file=Instance.INSTANCE_CONF_PATH,
conf_section_name=Instance.CONF_SECTION,
config={},
cluster_cookie=Instance.COOKIE,
cartridge_defaults={},
stateboard=False):
return needs_restart({
'restarted': restarted,
'control_sock': control_sock,
'appname': appname,
'instance_conf_file': instance_conf_file,
'conf_section_name': conf_section_name,
'cluster_cookie': cluster_cookie,
'cartridge_defaults': cartridge_defaults,
'config': config,
'stateboard': stateboard,
})
class TestNeedsRestart(unittest.TestCase):
def setUp(self):
self.cookie = 'secret'
self.console_sock = './tmp/x.sock'
self.instance = Instance(self.console_sock, self.cookie)
self.instance.start()
def test_restart_forced(self):
res = call_needs_restart(
control_sock=self.console_sock,
restarted=True
)
self.assertTrue(res.success, msg=res.msg)
self.assertTrue(res.changed)
def test_restart_disabled(self):
res = call_needs_restart(
control_sock=self.console_sock,
restarted=False
)
self.assertTrue(res.success, msg=res.msg)
self.assertFalse(res.changed)
def test_instance_not_started(self):
# console sock doesn't exists
self.instance.remove_file(self.console_sock)
res = call_needs_restart(
control_sock=self.console_sock
)
self.assertTrue(res.success, msg=res.msg)
self.assertTrue(res.changed)
# cannot connect to console sock
bad_socket_path = 'bad-socket-path'
self.instance.write_file(bad_socket_path)
res = call_needs_restart(
control_sock=bad_socket_path
)
self.assertTrue(res.success, msg=res.msg)
self.assertTrue(res.changed)
def test_code_was_updated(self):
# code was updated today, socket yesterday - needs restart
self.instance.set_path_mtime(self.instance.APP_CODE_PATH, self.instance.DATE_TODAY)
self.instance.set_path_mtime(self.console_sock, self.instance.DATE_YESTERDAY)
res = call_needs_restart(control_sock=self.console_sock)
self.assertTrue(res.success, msg=res.msg)
self.assertTrue(res.changed)
@parameterized.expand(
itertools.product(
["instance", "stateboard"],
["memtx_memory", "vinyl_memory"],
)
)
def test_config_changed(self, instance_type, memory_param_name):
param_name = 'param'
param_current_value = 'current-value'
param_new_value = 'new-value'
current_memory_size = 100
memtx_memory_new_value = 200
stateboard = instance_type == 'stateboard'
self.instance.set_instance_config({
param_name: param_current_value,
memory_param_name: current_memory_size
})
set_box_cfg(self.instance, **{memory_param_name: current_memory_size})
# nothing changed
res = call_needs_restart(
control_sock=self.console_sock,
config={
param_name: param_current_value,
memory_param_name: current_memory_size
},
stateboard=stateboard,
)
self.assertTrue(res.success, msg=res.msg)
self.assertFalse(res.changed)
# param changed, memory size not
res = call_needs_restart(
control_sock=self.console_sock,
config={
param_name: param_new_value,
memory_param_name: current_memory_size
},
stateboard=stateboard
)
self.assertTrue(res.success, msg=res.msg)
self.assertTrue(res.changed)
# param isn't changed
# memory size is changed in config
# but isn't changed on instance
set_box_cfg(self.instance, **{memory_param_name: current_memory_size})
res = call_needs_restart(
control_sock=self.console_sock,
config={
param_name: param_current_value,
memory_param_name: memtx_memory_new_value
},
stateboard=stateboard
)
self.assertTrue(res.success, msg=res.msg)
self.assertTrue(res.changed)
# param isn't changed
# memory size is changed in config
# and changed on instance
set_box_cfg(self.instance, **{memory_param_name: memtx_memory_new_value})
res = call_needs_restart(
control_sock=self.console_sock,
config={
param_name: param_current_value,
memory_param_name: memtx_memory_new_value
},
stateboard=stateboard
)
self.assertTrue(res.success, msg=res.msg)
self.assertFalse(res.changed)
# param is changed
# memory size is changed in config
# and changed on instance
set_box_cfg(self.instance, **{memory_param_name: memtx_memory_new_value})
res = call_needs_restart(
control_sock=self.console_sock,
config={
param_name: param_new_value,
memory_param_name: memtx_memory_new_value
},
stateboard=stateboard
)
self.assertTrue(res.success, msg=res.msg)
self.assertTrue(res.changed)
@parameterized.expand(
itertools.product(
["instance", "stateboard"],
["memtx_memory", "vinyl_memory"],
)
)
def test_app_config_changed(self, instance_type, memory_param_name):
param_name = 'param'
param_current_value = 'current-value'
param_new_value = 'new-value'
current_memory_size = 100
memtx_memory_new_value = 200
stateboard = instance_type == 'stateboard'
self.instance.set_app_config({
param_name: param_current_value,
memory_param_name: current_memory_size
})
set_box_cfg(self.instance, **{memory_param_name: current_memory_size})
# nothing changed
res = call_needs_restart(
control_sock=self.console_sock,
cartridge_defaults={
param_name: param_current_value,
memory_param_name: current_memory_size
},
stateboard=stateboard
)
self.assertTrue(res.success, msg=res.msg)
self.assertFalse(res.changed)
# param changed, memory size not
res = call_needs_restart(
control_sock=self.console_sock,
cartridge_defaults={
param_name: param_new_value,
memory_param_name: current_memory_size
},
stateboard=stateboard
)
self.assertTrue(res.success, msg=res.msg)
if not stateboard:
self.assertTrue(res.changed)
else:
self.assertFalse(res.changed)
# param isn't changed
# memory size is changed in config
# but isn't changed on instance
set_box_cfg(self.instance, **{memory_param_name: current_memory_size})
res = call_needs_restart(
control_sock=self.console_sock,
cartridge_defaults={
param_name: param_current_value,
memory_param_name: memtx_memory_new_value
},
stateboard=stateboard
)
self.assertTrue(res.success, msg=res.msg)
if not stateboard:
self.assertTrue(res.changed)
else:
self.assertFalse(res.changed)
# param isn't changed
# memory size is changed in config
# and changed on instance
set_box_cfg(self.instance, **{memory_param_name: memtx_memory_new_value})
res = call_needs_restart(
control_sock=self.console_sock,
cartridge_defaults={
param_name: param_current_value,
memory_param_name: memtx_memory_new_value
},
stateboard=stateboard
)
self.assertTrue(res.success, msg=res.msg)
self.assertFalse(res.changed)
# param is changed
# memory size is changed in config
# and changed on instance
set_box_cfg(self.instance, **{memory_param_name: memtx_memory_new_value})
res = call_needs_restart(
control_sock=self.console_sock,
cartridge_defaults={
param_name: param_new_value,
memory_param_name: memtx_memory_new_value
},
stateboard=stateboard
)
self.assertTrue(res.success, msg=res.msg)
if not stateboard:
self.assertTrue(res.changed)
else:
self.assertFalse(res.changed)
@parameterized.expand([
["memtx_memory"],
["vinyl_memory"],
])
def test_both_app_and_instance_memtx_memory_changed(self, memory_param_name):
current_memory_size = 100
new_memory_size_instance = 200
new_memory_size_app = 300
self.instance.set_app_config({
memory_param_name: current_memory_size
})
self.instance.set_instance_config({
memory_param_name: current_memory_size
})
set_box_cfg(self.instance, **{memory_param_name: current_memory_size})
# nothing changed
res = call_needs_restart(
control_sock=self.console_sock,
config={
memory_param_name: current_memory_size
},
cartridge_defaults={
memory_param_name: current_memory_size
}
)
self.assertTrue(res.success, msg=res.msg)
self.assertFalse(res.changed)
# memory size changed only in cartridge_defaults
res = call_needs_restart(
control_sock=self.console_sock,
config={
memory_param_name: current_memory_size
},
cartridge_defaults={
memory_param_name: new_memory_size_instance
}
)
self.assertTrue(res.success, msg=res.msg)
self.assertFalse(res.changed)
# memory size changed both in cartridge_defaults and config
res = call_needs_restart(
control_sock=self.console_sock,
config={
memory_param_name: new_memory_size_instance
},
cartridge_defaults={
memory_param_name: new_memory_size_app
}
)
self.assertTrue(res.success, msg=res.msg)
self.assertTrue(res.changed)
# memory size changed both in cartridge_defaults and config
# memory size on instance is equal to value from cartridge_defaults
set_box_cfg(self.instance, **{memory_param_name: new_memory_size_app})
res = call_needs_restart(
control_sock=self.console_sock,
config={
memory_param_name: new_memory_size_instance
},
cartridge_defaults={
memory_param_name: new_memory_size_app
}
)
self.assertTrue(res.success, msg=res.msg)
self.assertTrue(res.changed)
# memory size changed both in cartridge_defaults and config
# memory size on instance is equal to value from config
set_box_cfg(self.instance, **{memory_param_name: new_memory_size_instance})
res = call_needs_restart(
control_sock=self.console_sock,
config={
memory_param_name: new_memory_size_instance
},
cartridge_defaults={
memory_param_name: new_memory_size_app
}
)
self.assertTrue(res.success, msg=res.msg)
self.assertFalse(res.changed)
def tearDown(self):
self.instance.stop()
| 33.289125
| 91
| 0.61259
| 1,368
| 12,550
| 5.295322
| 0.08114
| 0.064605
| 0.078686
| 0.066676
| 0.81143
| 0.792518
| 0.772225
| 0.760629
| 0.758835
| 0.758835
| 0
| 0.002434
| 0.31259
| 12,550
| 376
| 92
| 33.37766
| 0.837255
| 0.085657
| 0
| 0.638514
| 0
| 0
| 0.030941
| 0.002447
| 0
| 0
| 0
| 0
| 0.14527
| 1
| 0.033784
| false
| 0
| 0.030405
| 0.003378
| 0.070946
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
87e51f15ef719923be5b11b884bb55e8f581a965
| 102
|
py
|
Python
|
MedSemanticSearch/evaluation/evaluate_text.py
|
DanielhCarranza/info-retrieval-semantic-scholar
|
004fc0cbc7d9e6fd1301ce28430f661cfbf638ce
|
[
"MIT"
] | 4
|
2020-08-07T00:34:25.000Z
|
2022-02-16T09:36:35.000Z
|
MedSemanticSearch/evaluation/evaluate_text.py
|
DanielhCarranza/MedSearch
|
004fc0cbc7d9e6fd1301ce28430f661cfbf638ce
|
[
"MIT"
] | null | null | null |
MedSemanticSearch/evaluation/evaluate_text.py
|
DanielhCarranza/MedSearch
|
004fc0cbc7d9e6fd1301ce28430f661cfbf638ce
|
[
"MIT"
] | null | null | null |
"""Run Validation Test"""
import os
from pathlib import Path
from time import time
import unittest
| 11.333333
| 25
| 0.764706
| 15
| 102
| 5.2
| 0.666667
| 0.25641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 102
| 8
| 26
| 12.75
| 0.928571
| 0.186275
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
87ff2e9d9cbd214847e1d4a9c0cd38a61d7b0a5a
| 34
|
py
|
Python
|
plasma/cli/__init__.py
|
rcallan/plasma-mvp
|
48ec5a35cb4c0c15ef4737d94bd1ff112de9d149
|
[
"Apache-2.0"
] | 590
|
2018-01-17T19:09:40.000Z
|
2020-05-29T03:28:45.000Z
|
plasma/cli/__init__.py
|
littlewhiteZ/plasma-mvp
|
fcd1e211f70ee7f7c482d1201caff7580a2951cc
|
[
"Apache-2.0"
] | 174
|
2018-01-17T19:10:53.000Z
|
2019-03-04T13:51:44.000Z
|
plasma/cli/__init__.py
|
littlewhiteZ/plasma-mvp
|
fcd1e211f70ee7f7c482d1201caff7580a2951cc
|
[
"Apache-2.0"
] | 172
|
2018-01-17T19:05:12.000Z
|
2020-03-12T01:40:12.000Z
|
from .cli import cli # Noqa F401
| 17
| 33
| 0.705882
| 6
| 34
| 4
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 0.235294
| 34
| 1
| 34
| 34
| 0.807692
| 0.264706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e21630ec38f5f5df90335735aed3f86468209534
| 255
|
py
|
Python
|
{{cookiecutter.project_slug}}/backend/core/database/__init__.py
|
devalv/cookiecutter-fastapi
|
c7cfd3caa14b40dcc5d8ff6bdb6e25cfed3c9d00
|
[
"MIT"
] | 2
|
2021-12-26T00:10:19.000Z
|
2022-01-30T21:24:31.000Z
|
{{cookiecutter.project_slug}}/backend/core/database/__init__.py
|
devalv/cookiecutter-fastapi
|
c7cfd3caa14b40dcc5d8ff6bdb6e25cfed3c9d00
|
[
"MIT"
] | 1
|
2021-10-10T17:38:30.000Z
|
2021-10-10T18:30:24.000Z
|
{{cookiecutter.project_slug}}/backend/core/database/__init__.py
|
devalv/cookiecutter-fastapi
|
c7cfd3caa14b40dcc5d8ff6bdb6e25cfed3c9d00
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Project database configuration."""
from .models import db
from .models.security import TokenInfo as TokenInfoGinoModel
from .models.security import User as UserGinoModel
__all__ = ["UserGinoModel", "TokenInfoGinoModel", "db"]
| 28.333333
| 60
| 0.752941
| 28
| 255
| 6.714286
| 0.607143
| 0.159574
| 0.191489
| 0.255319
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004464
| 0.121569
| 255
| 8
| 61
| 31.875
| 0.834821
| 0.211765
| 0
| 0
| 0
| 0
| 0.169231
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3560a068f7dd8ef181800a3cc28edecd009c77ee
| 25
|
py
|
Python
|
models/__init__.py
|
EthanC/N31L
|
d757d6895333b92772d40664d6831f0e9e9fe295
|
[
"MIT"
] | 4
|
2018-02-15T08:55:04.000Z
|
2022-02-27T08:02:38.000Z
|
models/__init__.py
|
EthanC/N31L
|
d757d6895333b92772d40664d6831f0e9e9fe295
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
EthanC/N31L
|
d757d6895333b92772d40664d6831f0e9e9fe295
|
[
"MIT"
] | null | null | null |
from .state import State
| 12.5
| 24
| 0.8
| 4
| 25
| 5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 1
| 25
| 25
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
35b0a7c50dc1706c88cfbcd929caa5adbc9279da
| 49
|
py
|
Python
|
mockup-server/__init__.py
|
ghazal-khaki/restfulpy-client.js-1
|
889fbe74dff40475f5462b74745af43cc0f44975
|
[
"MIT"
] | 16
|
2017-07-07T20:56:03.000Z
|
2018-12-19T11:09:34.000Z
|
mockup-server/__init__.py
|
ghazal-khaki/restfulpy-client.js-1
|
889fbe74dff40475f5462b74745af43cc0f44975
|
[
"MIT"
] | 73
|
2017-07-14T10:48:01.000Z
|
2022-02-17T19:37:25.000Z
|
mockup-server/__init__.py
|
ghazal-khaki/restfulpy-client.js-1
|
889fbe74dff40475f5462b74745af43cc0f44975
|
[
"MIT"
] | 5
|
2017-08-30T21:01:13.000Z
|
2020-10-10T17:18:52.000Z
|
from .simple import SimpleMockupServerSubCommand
| 24.5
| 48
| 0.897959
| 4
| 49
| 11
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.977778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
35d570cc06826ea9cf9631786960da2ab199d18c
| 96
|
py
|
Python
|
transformers/analab/plot/plot.py
|
lianapanatau/BERT-for-RRC-ABSA
|
d31d81d5f9dce594a23f256199988fc2a11ce016
|
[
"Apache-2.0"
] | 425
|
2019-03-31T02:22:29.000Z
|
2022-03-26T06:55:34.000Z
|
transformers/analab/plot/plot.py
|
lianapanatau/BERT-for-RRC-ABSA
|
d31d81d5f9dce594a23f256199988fc2a11ce016
|
[
"Apache-2.0"
] | 23
|
2019-04-27T09:26:08.000Z
|
2021-11-10T10:18:30.000Z
|
transformers/analab/plot/plot.py
|
lianapanatau/BERT-for-RRC-ABSA
|
d31d81d5f9dce594a23f256199988fc2a11ce016
|
[
"Apache-2.0"
] | 86
|
2019-04-09T06:41:29.000Z
|
2022-03-14T02:11:56.000Z
|
class Plot(object):
def __init__(self):
pass
def __call__(self):
pass
| 12
| 23
| 0.552083
| 11
| 96
| 4.090909
| 0.727273
| 0.355556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.354167
| 96
| 7
| 24
| 13.714286
| 0.725806
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.4
| 0
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
ea35489b1d7aebc35c7f0b9fb4ff05bbe348a991
| 507
|
py
|
Python
|
src/math.py
|
mdvandamme/test-circleci
|
ac7d477afe10609413e4be34e1d5c2631d52d354
|
[
"MIT"
] | null | null | null |
src/math.py
|
mdvandamme/test-circleci
|
ac7d477afe10609413e4be34e1d5c2631d52d354
|
[
"MIT"
] | null | null | null |
src/math.py
|
mdvandamme/test-circleci
|
ac7d477afe10609413e4be34e1d5c2631d52d354
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
"""
class Math():
def addition(value1, value2):
if not isinstance(value1, int) and not isinstance(value2, int):
return 'Invalid input'
else:
return value1 + value2
def soustraction (value1, value2):
if not isinstance(value1, int) and not isinstance(value2, int):
return 'Invalid input'
else:
return value1 - value2
| 20.28
| 71
| 0.518738
| 51
| 507
| 5.156863
| 0.45098
| 0.18251
| 0.106464
| 0.129278
| 0.768061
| 0.768061
| 0.768061
| 0.768061
| 0.768061
| 0.768061
| 0
| 0.044304
| 0.376726
| 507
| 25
| 72
| 20.28
| 0.787975
| 0.084813
| 0
| 0.545455
| 0
| 0
| 0.057143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0
| 0
| 0
| 0.636364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
ea3ba50be0319f812d0096f62252186c07a28310
| 8,777
|
py
|
Python
|
front.py
|
plexixx/populationQueryService
|
8fa9ce3668b0b3ade433ee89890669082e48aad1
|
[
"MIT"
] | null | null | null |
front.py
|
plexixx/populationQueryService
|
8fa9ce3668b0b3ade433ee89890669082e48aad1
|
[
"MIT"
] | null | null | null |
front.py
|
plexixx/populationQueryService
|
8fa9ce3668b0b3ade433ee89890669082e48aad1
|
[
"MIT"
] | null | null | null |
from os import sep
import aiohttp
import asyncio
import argparse
import pprint
import json
import numpy as np
import matplotlib.pyplot as plt
NO_DATA = -9999
def paintPopulation(X, Y):
fig = plt.figure()
plt.scatter(X, Y, marker='o', color='darkgray')
plt.xlabel("Longitude")
plt.ylabel("Latitude")
plt.legend(loc="best")
plt.title("Population Distribution Diagram",fontsize=16)
plt.show()
def getAxisList(lonMin, lonMax, latMin, latMax, ret):
step = 30 / 3600
xList = []
yList = []
i = 0
for lon in np.arange(lonMin, lonMax, step):
j = 0
for lat in np.arange(latMin, latMax, step):
if ret[i][j] != NO_DATA and ret[i][j] > 0:
xList.append(lon)
yList.append(lat)
return xList, yList
async def main(host, port, type, num, co):
async with aiohttp.ClientSession() as session:
async with session.get(f'http://{host}:{port}/pop?type={type}&num={num}&co={co}') as response:
print("Status:", response.status)
print("Content-type:", response.headers['content-type'])
#json_to_process = await response.json()
json_to_process = [{'TotalPopulation': 4174.176958676027, 'lonMin': 135.0, 'latMin': 45.0, 'lonMax': 135.1, 'latMax': 45.2, 'row': 12, 'col': 25, 'GridStats': [{'Data': 4.901341}, {'Data': 4.901341}, {'Data': 4.901341}, {'Data': 4.901342}, {'Data': 4.901341}, {'Data': 4.901342}, {'Data': 4.901341}, {'Data': 4.901341}, {'Data': 4.901341}, {'Data': 4.901342},
{'Data': 4.901341}, {'Data': 4.901342}, {'Data': 4.901341}, {'Data': 4.901341}, {'Data': 4.901341}, {'Data': 4.901342}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': 4.901341}, {'Data': 4.901341}, {'Data': 4.901341}, {'Data':
4.901342}, {'Data': 4.901341}, {'Data': 4.901342}, {'Data': 4.901341}, {'Data': 4.901341}, {'Data': 4.901341}, {'Data': 4.901342}, {'Data': 4.901341}, {'Data': 4.901342}, {'Data': 4.901341}, {'Data': 4.901341}, {'Data': 4.901341}, {'Data': 4.901342}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': 4.902053}, {'Data': 4.902053}, {'Data': 4.902053}, {'Data': 4.902054}, {'Data': 4.902053}, {'Data': 4.902054}, {'Data': 4.902053}, {'Data': 4.902053}, {'Data': 4.902053}, {'Data': 4.902054}, {'Data': 4.902053}, {'Data': 4.902053}, {'Data': 4.902053}, {'Data': 4.902053},
{'Data': 4.902053}, {'Data': 4.902054}, {'Data': 4.902053}, {'Data': 4.902054}, {'Data': -9999}, {'Data': -9999},
{'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': 4.902053}, {'Data':
4.902053}, {'Data': 4.902053}, {'Data': 4.902054}, {'Data': 4.902053}, {'Data': 4.902054}, {'Data': 4.902053}, {'Data': 4.902053}, {'Data': 4.902053}, {'Data': 4.902054}, {'Data': 4.902053}, {'Data': 4.902053}, {'Data': 4.902053}, {'Data': 4.902053}, {'Data': 4.902053}, {'Data': 4.902054}, {'Data': 4.902053}, {'Data': 4.902054}, {'Data': 4.902053}, {'Data': 4.902053}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999},
{'Data': 4.903479}, {'Data': 4.903478}, {'Data': 4.903478}, {'Data': 4.903479}, {'Data': 4.903478}, {'Data': 4.903479}, {'Data': 4.903478}, {'Data': 4.903479}, {'Data': 4.903478}, {'Data': 4.903479}, {'Data': 4.903478}, {'Data': 4.903479}, {'Data': 4.903478}, {'Data': 4.903479}, {'Data': 4.903478}, {'Data': 4.903479}, {'Data': 4.903478}, {'Data': 4.903479}, {'Data': 4.903478}, {'Data': 4.903479}, {'Data': 4.903478}, {'Data': 4.903479}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': 4.904192}, {'Data': 4.904191}, {'Data': 4.904191}, {'Data': 4.904191}, {'Data': 4.904191}, {'Data': 4.904192}, {'Data': 4.904191}, {'Data': 4.904192}, {'Data': 4.904191}, {'Data': 4.904191}, {'Data': 4.904191}, {'Data': 4.904192}, {'Data': 4.904191}, {'Data': 4.904192}, {'Data': 4.904191}, {'Data': 4.904191}, {'Data': 4.904191}, {'Data': 4.904192}, {'Data': 4.904191}, {'Data': 4.904192}, {'Data': 4.904191}, {'Data': 4.904192}, {'Data': 4.904191}, {'Data': 4.904192}, {'Data': -9999}, {'Data': 4.904903}, {'Data': 4.904903}, {'Data': 4.904903}, {'Data': 4.904903}, {'Data': 4.904903}, {'Data': 4.904903}, {'Data': 4.904902}, {'Data': 4.904903}, {'Data': 4.904903}, {'Data': 4.904903}, {'Data': 4.904903}, {'Data': 4.904903}, {'Data': 4.904902}, {'Data': 4.904903}, {'Data': 4.904903}, {'Data': 4.904903}, {'Data': 4.904903}, {'Data': 4.904903}, {'Data': 4.904902},
{'Data': 4.904903}, {'Data': 4.904903}, {'Data': 4.904903}, {'Data': 4.904902}, {'Data': 4.904903}, {'Data': 4.904902}, {'Data': 4.905616}, {'Data': 4.905615}, {'Data': 4.905615}, {'Data': 4.905616}, {'Data': 4.905615}, {'Data': 4.905616}, {'Data': 4.905615}, {'Data': 4.905616}, {'Data': 4.905615}, {'Data': 4.905616}, {'Data': 4.905616}, {'Data': 4.905616}, {'Data': 4.905614}, {'Data': 4.905616}, {'Data': 4.905616}, {'Data': 4.905616}, {'Data': 4.905616}, {'Data': 4.905616}, {'Data': 4.905614}, {'Data': 4.905616}, {'Data': 4.905615}, {'Data': 4.905616}, {'Data': 4.905615}, {'Data': 4.905616}, {'Data': 4.905615}, {'Data': 4.906328}, {'Data': 4.906328}, {'Data': 4.906328}, {'Data': 4.906328}, {'Data': 4.906327}, {'Data': 4.906328}, {'Data': 4.906327}, {'Data': 4.906328}, {'Data': 4.906327}, {'Data': 4.906328}, {'Data': 4.906328}, {'Data': 4.906328}, {'Data': 4.906327}, {'Data': 4.906328}, {'Data': 4.906328}, {'Data': 4.906328}, {'Data': 4.906328}, {'Data': 4.906328}, {'Data': 4.906326}, {'Data': 4.906328}, {'Data': 4.906327}, {'Data': 4.906328}, {'Data': 4.906327}, {'Data': 4.906328}, {'Data': 4.906327}, {'Data': 4.90704}, {'Data': 4.90704}, {'Data': 4.90704}, {'Data': 4.90704}, {'Data': 4.907039}, {'Data': 4.90704}, {'Data': 4.907039}, {'Data': 4.90704}, {'Data': 4.907039}, {'Data': 4.90704}, {'Data': 4.907039}, {'Data': 4.90704}, {'Data': 4.907039}, {'Data': 4.90704}, {'Data': 4.907039}, {'Data': 4.90704}, {'Data': 4.90704}, {'Data': 4.90704}, {'Data': 4.907039}, {'Data': 4.90704}, {'Data': 4.907039}, {'Data': 4.90704}, {'Data': 4.907039}, {'Data': 4.90704}, {'Data': -9999}, {'Data': 4.907752}, {'Data': 4.907751}, {'Data': 4.907751}, {'Data': 4.907752}, {'Data': 4.90775}, {'Data':
4.907752}, {'Data': 4.90775}, {'Data': 4.907752}, {'Data': 4.907751}, {'Data': 4.907751}, {'Data': 4.907751}, {'Data': 4.907752}, {'Data': 4.90775}, {'Data': 4.907752}, {'Data': 4.907751}, {'Data': 4.907751}, {'Data': 4.907751}, {'Data': 4.907752}, {'Data': 4.90775}, {'Data': 4.907752}, {'Data': 4.907751}, {'Data': 4.907751}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': 4.908464}, {'Data': 4.908463}, {'Data': 4.908463}, {'Data': 4.908464}, {'Data': 4.908463}, {'Data': 4.908464}, {'Data': 4.908462}, {'Data': 4.908464}, {'Data': 4.908463}, {'Data': 4.908464}, {'Data': 4.908463}, {'Data': 4.908464}, {'Data': 4.908463}, {'Data': 4.908464}, {'Data': 4.908463}, {'Data': 4.908463}, {'Data': 4.908463}, {'Data': 4.908464}, {'Data': 4.908463}, {'Data': 4.908464}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}, {'Data': -9999}]}]
json_to_process = json_to_process[0]
x_list = []
y_list = []
r_list = []
stat = json_to_process.get('GridStats')
total = json_to_process.get('TotalPopulation')
cols = json_to_process.get('col')
rows = json_to_process.get('row')
lonMin = json_to_process.get('lonMin')
lonMax = json_to_process.get('lonMax')
latMin = json_to_process.get('latMin')
latMax = json_to_process.get('latMax')
print(f'Total Population: {total}')
print(stat)
for i in range(rows):
l = []
for j in range(cols):
l.append(stat[i*rows+j].get('Data'))
r_list.append(l)
x_list, y_list = getAxisList(lonMin=lonMin, lonMax=lonMax, latMin=latMin, latMax=latMax, ret=r_list)
print('----------')
print(len(x_list))
print(len(y_list))
paintPopulation(np.array(x_list), np.array(y_list))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='world population client')
parser.add_argument('--type', dest='type')
parser.add_argument('--num', dest='num', type=int)
parser.add_argument('--co', dest='co')
parser.add_argument('host')
parser.add_argument('port')
args = parser.parse_args()
print(f'{args}')
asyncio.run(main(args.host, args.port, args.type, args.num, args.co))
| 103.258824
| 1,711
| 0.548935
| 1,223
| 8,777
| 3.897792
| 0.121014
| 0.269562
| 0.105727
| 0.114118
| 0.657017
| 0.657017
| 0.657017
| 0.657017
| 0.639186
| 0.639186
| 0
| 0.272826
| 0.163951
| 8,777
| 85
| 1,712
| 103.258824
| 0.376806
| 0.004443
| 0
| 0
| 0
| 0
| 0.18061
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025974
| false
| 0
| 0.103896
| 0
| 0.142857
| 0.116883
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ea63eaf683c423cd23925294741aaccd16445f20
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/setuptools/dist.py
|
GiulianaPola/select_repeats
|
17a0d053d4f874e42cf654dd142168c2ec8fbd11
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/setuptools/dist.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/setuptools/dist.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/cb/78/10/65ba1d6ea3931bf717157b90d8e9d3c1c88896966921c89aa713887c17
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.46875
| 0
| 96
| 1
| 96
| 96
| 0.427083
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ea97a159a8f65b55b0987bf4605c4562b9855c34
| 1,968
|
py
|
Python
|
epytope/Data/pssms/epidemix/mat/B_44_9.py
|
christopher-mohr/epytope
|
8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd
|
[
"BSD-3-Clause"
] | 7
|
2021-02-01T18:11:28.000Z
|
2022-01-31T19:14:07.000Z
|
epytope/Data/pssms/epidemix/mat/B_44_9.py
|
christopher-mohr/epytope
|
8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd
|
[
"BSD-3-Clause"
] | 22
|
2021-01-02T15:25:23.000Z
|
2022-03-14T11:32:53.000Z
|
epytope/Data/pssms/epidemix/mat/B_44_9.py
|
christopher-mohr/epytope
|
8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd
|
[
"BSD-3-Clause"
] | 4
|
2021-05-28T08:50:38.000Z
|
2022-03-14T11:45:32.000Z
|
B_44_9 = {0: {'A': -0.4, 'C': -2.0, 'E': 0.8, 'D': 1.3, 'G': 0.2, 'F': 0.0, 'I': -3.0, 'H': -1.8, 'K': 0.4, 'M': -2.0, 'L': -3.6, 'N': 1.1, 'Q': -2.0, 'P': -2.8, 'S': -0.4, 'R': -2.5, 'T': 0.5, 'W': -1.8, 'V': 0.0, 'Y': -2.5}, 1: {'A': -2.9, 'C': -2.8, 'E': 2.6, 'D': -1.7, 'G': -3.5, 'F': -3.7, 'I': -4.0, 'H': -1.5, 'K': -2.1, 'M': -2.4, 'L': -3.8, 'N': -2.4, 'Q': -1.4, 'P': -2.8, 'S': -2.6, 'R': -2.3, 'T': -2.9, 'W': -1.8, 'V': -3.5, 'Y': -2.7}, 2: {'A': 0.2, 'C': -1.7, 'E': -2.8, 'D': -0.1, 'G': 0.4, 'F': 0.1, 'I': 0.2, 'H': 0.6, 'K': -0.5, 'M': -1.6, 'L': -0.9, 'N': 1.1, 'Q': 0.0, 'P': -2.9, 'S': -0.1, 'R': 0.2, 'T': -2.5, 'W': -1.5, 'V': 0.8, 'Y': 0.4}, 3: {'A': -0.1, 'C': -1.5, 'E': 0.0, 'D': 0.3, 'G': -0.3, 'F': -2.3, 'I': -0.1, 'H': -1.9, 'K': 0.6, 'M': -1.2, 'L': 0.6, 'N': -2.4, 'Q': -2.1, 'P': 0.1, 'S': 0.2, 'R': -0.2, 'T': 0.0, 'W': -1.5, 'V': 0.5, 'Y': -2.2}, 4: {'A': -2.9, 'C': -1.4, 'E': -3.2, 'D': -3.3, 'G': 0.0, 'F': 1.2, 'I': 0.3, 'H': -2.1, 'K': -2.9, 'M': -0.8, 'L': 1.4, 'N': -2.8, 'Q': 0.1, 'P': -0.1, 'S': -3.1, 'R': -0.1, 'T': -2.6, 'W': 1.3, 'V': -0.3, 'Y': 0.4}, 5: {'A': 0.0, 'C': -1.7, 'E': -2.9, 'D': -3.0, 'G': -0.4, 'F': -0.2, 'I': -0.1, 'H': 1.4, 'K': -0.5, 'M': -1.4, 'L': 0.3, 'N': 0.5, 'Q': 0.5, 'P': -0.5, 'S': -0.7, 'R': 0.4, 'T': -0.2, 'W': 1.6, 'V': 0.6, 'Y': -2.2}, 6: {'A': 0.4, 'C': -1.6, 'E': -0.5, 'D': -2.9, 'G': -3.3, 'F': 0.5, 'I': 0.2, 'H': 1.1, 'K': 0.5, 'M': -1.3, 'L': 0.1, 'N': 0.0, 'Q': 0.0, 'P': -0.1, 'S': -2.8, 'R': -0.1, 'T': -0.1, 'W': -1.4, 'V': 0.7, 'Y': 0.1}, 7: {'A': 0.3, 'C': -1.6, 'E': -0.4, 'D': -2.7, 'G': 0.0, 'F': 0.6, 'I': -2.5, 'H': -1.9, 'K': 0.7, 'M': 0.4, 'L': -3.2, 'N': -2.4, 'Q': 0.5, 'P': -2.7, 'S': 0.9, 'R': 0.3, 'T': -0.2, 'W': -1.6, 'V': 0.7, 'Y': -2.1}, 8: {'A': -3.5, 'C': -1.9, 'E': -3.8, 'D': -3.9, 'G': -4.1, 'F': 2.3, 'I': -2.3, 'H': -1.9, 'K': -3.6, 'M': 0.7, 'L': 0.6, 'N': -3.5, 'Q': -3.1, 'P': -3.6, 'S': -3.6, 'R': -3.5, 'T': -3.1, 'W': 1.7, 'V': -2.7, 'Y': 1.9}}
| 1,968
| 1,968
| 0.282012
| 552
| 1,968
| 1.001812
| 0.057971
| 0.05425
| 0.016275
| 0.0217
| 0.179024
| 0.028933
| 0.028933
| 0.028933
| 0
| 0
| 0
| 0.232791
| 0.188008
| 1,968
| 1
| 1,968
| 1,968
| 0.113267
| 0
| 0
| 0
| 0
| 0
| 0.091417
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5767950e9325c2bb11d621d91e2819cd85e86ae4
| 2,900
|
py
|
Python
|
tests/test_utils.py
|
jplsek/git-pw
|
c22ac35012ac8b61e564564150c14f436b814e61
|
[
"MIT"
] | null | null | null |
tests/test_utils.py
|
jplsek/git-pw
|
c22ac35012ac8b61e564564150c14f436b814e61
|
[
"MIT"
] | null | null | null |
tests/test_utils.py
|
jplsek/git-pw
|
c22ac35012ac8b61e564564150c14f436b814e61
|
[
"MIT"
] | null | null | null |
"""Unit tests for ``git_pw/utils.py``."""
import subprocess
import os
import mock
from git_pw import utils
@mock.patch.object(utils.subprocess, 'check_output', return_value=b' bar ')
def test_git_config(mock_subprocess):
value = utils.git_config('foo')
assert value == 'bar'
mock_subprocess.assert_called_once_with(['git', 'config', 'foo'])
@mock.patch.object(utils.subprocess, 'check_output',
return_value=b'\xf0\x9f\xa4\xb7')
def test_git_config_unicode(mock_subprocess):
value = utils.git_config('foo')
assert value == u'\U0001f937'
mock_subprocess.assert_called_once_with(['git', 'config', 'foo'])
@mock.patch.object(utils.subprocess, 'check_output',
side_effect=subprocess.CalledProcessError(1, 'xyz', '123'))
def test_git_config_error(mock_subprocess):
value = utils.git_config('foo')
assert value == ''
@mock.patch.object(utils, 'git_config', return_value='bar')
@mock.patch.object(utils, '_tabulate')
@mock.patch.object(utils, '_echo_via_pager')
@mock.patch.dict(os.environ, {'GIT_PAGER': 'foo', 'PAGER': 'baz'})
def test_echo_via_pager_env_GIT_PAGER(mock_inner, mock_tabulate, mock_config):
utils.echo_via_pager('test', ('foo',), None)
mock_config.assert_not_called()
mock_tabulate.assert_called_once_with('test', ('foo',), None)
mock_inner.assert_called_once_with('foo', mock_tabulate.return_value)
@mock.patch.object(utils, 'git_config', return_value='bar')
@mock.patch.object(utils, '_tabulate')
@mock.patch.object(utils, '_echo_via_pager')
@mock.patch.dict(os.environ, {'PAGER': 'baz'})
def test_echo_via_pager_config(mock_inner, mock_tabulate, mock_config):
utils.echo_via_pager('test', ('foo',), None)
mock_config.assert_called_once_with('core.parser')
mock_tabulate.assert_called_once_with('test', ('foo',), None)
mock_inner.assert_called_once_with('bar', mock_tabulate.return_value)
@mock.patch.object(utils, 'git_config', return_value=None)
@mock.patch.object(utils, '_tabulate')
@mock.patch.object(utils, '_echo_via_pager')
@mock.patch.dict(os.environ, {'PAGER': 'baz'})
def test_echo_via_pager_env_PAGER(mock_inner, mock_tabulate, mock_config):
utils.echo_via_pager('test', ('foo',), None)
mock_config.assert_called_once_with('core.parser')
mock_tabulate.assert_called_once_with('test', ('foo',), None)
mock_inner.assert_called_once_with('baz', mock_tabulate.return_value)
@mock.patch.object(utils, 'git_config', return_value=None)
@mock.patch.object(utils, '_tabulate')
@mock.patch.object(utils, '_echo_via_pager')
def test_echo_via_pager_env_default(mock_inner, mock_tabulate, mock_config):
utils.echo_via_pager('test', ('foo',), None)
mock_config.assert_called_once_with('core.parser')
mock_tabulate.assert_called_once_with('test', ('foo',), None)
mock_inner.assert_called_once_with('less', mock_tabulate.return_value)
| 35.802469
| 78
| 0.738276
| 416
| 2,900
| 4.786058
| 0.137019
| 0.081366
| 0.113009
| 0.150678
| 0.85786
| 0.85786
| 0.846811
| 0.846811
| 0.830236
| 0.761929
| 0
| 0.005789
| 0.106552
| 2,900
| 80
| 79
| 36.25
| 0.76264
| 0.012069
| 0
| 0.581818
| 0
| 0
| 0.136062
| 0
| 0
| 0
| 0
| 0
| 0.309091
| 1
| 0.127273
| false
| 0
| 0.072727
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
57b77347325bc3c94b6e4040995e5191a602243f
| 234
|
py
|
Python
|
example-project/personal/social.py
|
OneSocial-dev/onesocial-django
|
28acfa277d9b3ba758f17fa4787360779369f8e6
|
[
"MIT"
] | 1
|
2020-10-27T15:18:40.000Z
|
2020-10-27T15:18:40.000Z
|
example-project/personal/social.py
|
OneSocial-dev/onesocial-django
|
28acfa277d9b3ba758f17fa4787360779369f8e6
|
[
"MIT"
] | null | null | null |
example-project/personal/social.py
|
OneSocial-dev/onesocial-django
|
28acfa277d9b3ba758f17fa4787360779369f8e6
|
[
"MIT"
] | null | null | null |
from django.shortcuts import redirect
from onesocial_django.models import SocialAccount
def validate_func(social_account: SocialAccount):
return redirect('personal:confirm-username', account_token=social_account.account_token)
| 29.25
| 92
| 0.846154
| 28
| 234
| 6.857143
| 0.642857
| 0.135417
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089744
| 234
| 7
| 93
| 33.428571
| 0.901408
| 0
| 0
| 0
| 0
| 0
| 0.106838
| 0.106838
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
57be90c00e6722fb30c75e048d9773034a704d2c
| 717
|
py
|
Python
|
singer/metadata.py
|
chrisgoddard/singer-python
|
acbe4847087d5f9061440fd6cebf75c88d875c83
|
[
"Apache-2.0"
] | 1
|
2021-05-17T14:06:45.000Z
|
2021-05-17T14:06:45.000Z
|
singer/metadata.py
|
chrisgoddard/singer-python
|
acbe4847087d5f9061440fd6cebf75c88d875c83
|
[
"Apache-2.0"
] | null | null | null |
singer/metadata.py
|
chrisgoddard/singer-python
|
acbe4847087d5f9061440fd6cebf75c88d875c83
|
[
"Apache-2.0"
] | 1
|
2021-05-17T14:06:46.000Z
|
2021-05-17T14:06:46.000Z
|
def new():
return {}
def to_map(raw_metadata):
return {tuple(md['breadcrumb']): md['metadata'] for md in raw_metadata}
def to_list(compiled_metadata):
return [{'breadcrumb': k, 'metadata': v} for k, v in compiled_metadata.items()]
def delete(compiled_metadata, breadcrumb, k):
del compiled_metadata[breadcrumb][k]
def write(compiled_metadata, breadcrumb, k, val):
if val is None:
raise Exception()
if breadcrumb in compiled_metadata:
compiled_metadata.get(breadcrumb).update({k: val})
else:
compiled_metadata[breadcrumb] = {k: val}
return compiled_metadata
def get(compiled_metadata, breadcrumb, k):
return compiled_metadata.get(breadcrumb, {}).get(k)
| 29.875
| 83
| 0.700139
| 94
| 717
| 5.180851
| 0.308511
| 0.361396
| 0.26694
| 0.277207
| 0.123203
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177127
| 717
| 23
| 84
| 31.173913
| 0.825424
| 0
| 0
| 0
| 0
| 0
| 0.050209
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.222222
| 0.611111
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
aa43f5dec9ad9f95a8ea7948cfa372cc7f642db1
| 16,319
|
py
|
Python
|
tests/parsing/xpath/tei_xpath_functions_test.py
|
elifesciences/sciencebeam-judge
|
357f1b4266674611b24371224468db268ed4574e
|
[
"MIT"
] | null | null | null |
tests/parsing/xpath/tei_xpath_functions_test.py
|
elifesciences/sciencebeam-judge
|
357f1b4266674611b24371224468db268ed4574e
|
[
"MIT"
] | 189
|
2018-01-11T17:14:18.000Z
|
2022-03-28T17:30:11.000Z
|
tests/parsing/xpath/tei_xpath_functions_test.py
|
elifesciences/sciencebeam-judge
|
357f1b4266674611b24371224468db268ed4574e
|
[
"MIT"
] | null | null | null |
import logging
from lxml.builder import E
import pytest
from sciencebeam_judge.parsing.xpath.tei_xpath_functions import register_functions
LOGGER = logging.getLogger(__name__)
@pytest.fixture(autouse=True)
def _register_functions():
register_functions()
def _tei_with_authors(*authors):
return E.TEI(E.teiHeader(E.fileDesc(E.sourceDesc(E.biblStruct(E.analytic(
*authors
))))))
class TestTeiXpathFunctions:
class TestAuthors:
def test_should_return_single_author_node(self):
author = E.author(
E.persName(
E.forename('Tom'),
E.surname('Thomson')
)
)
xml = _tei_with_authors(author)
assert list(xml.xpath('tei-authors(.)')) == [author]
class TestGivenName:
def test_should_return_given_name_of_single_pers_name(self):
author = E.author(
E.persName(
E.forename('Tom'),
E.surname('Thomson')
)
)
xml = _tei_with_authors(author)
assert list(
xml.xpath('tei-given-name(//persName)')
) == ['Tom']
def test_should_return_given_name_of_multiple_forenames(self):
author = E.author(
E.persName(
E.forename('Tom', type='first'),
E.forename('T', type='middle'),
E.surname('Thomson')
)
)
xml = _tei_with_authors(author)
assert list(
xml.xpath('tei-given-name(//persName)')
) == ['Tom T']
class TestFullName:
def test_should_return_full_name_of_single_pers_name(self):
author = E.author(
E.persName(
E.forename('Tom'),
E.surname('Thomson')
)
)
xml = _tei_with_authors(author)
assert list(
xml.xpath('tei-full-name(//persName)')
) == ['Tom Thomson']
def test_should_return_full_name_of_multiple_forenames(self):
author = E.author(
E.persName(
E.forename('Tom', type='first'),
E.forename('T', type='middle'),
E.surname('Thomson')
)
)
xml = _tei_with_authors(author)
assert list(
xml.xpath('tei-full-name(//persName)')
) == ['Tom T Thomson']
def test_should_return_full_name_of_single_author(self):
author = E.author(
E.persName(
E.forename('Tom'),
E.surname('Thomson')
)
)
xml = _tei_with_authors(author)
assert list(
xml.xpath('tei-full-name(//author)')
) == ['Tom Thomson']
def test_should_not_add_space_if_surname_is_missing(self):
author = E.author(
E.persName(
E.forename('Tom')
)
)
xml = _tei_with_authors(author)
assert list(xml.xpath('tei-full-name(//author)')) == ['Tom']
def test_should_not_add_space_if_surname_is_empty(self):
author = E.author(
E.persName(
E.forename('Tom'),
E.surname()
)
)
xml = _tei_with_authors(author)
assert list(xml.xpath('tei-full-name(//author)')) == ['Tom']
class TestAuthorAffiliations:
def test_should_return_single_affiliation_without_key(self):
affiliation = E.affiliation(
E.orgName('Department 1', type="department")
)
xml = _tei_with_authors(E.author(affiliation))
assert list(xml.xpath('tei-author-affiliations(.)')) == [affiliation]
def test_should_return_single_affiliation_with_key(self):
affiliation = E.affiliation(key='aff1')
xml = _tei_with_authors(E.author(affiliation))
assert list(xml.xpath('tei-author-affiliations(.)')) == [affiliation]
def test_should_return_deduplicate_affiliations_with_key(self):
affiliation1 = E.affiliation(key='aff1')
affiliation1_copy = E.affiliation(key='aff1')
xml = _tei_with_authors(E.author(affiliation1), E.author(affiliation1_copy))
assert list(xml.xpath('tei-author-affiliations(.)')) == [affiliation1]
class TestAffString:
def test_should_return_org_name(self):
xml = E.TEI(
E.affiliation(
E.orgName('Department 1', type="department")
)
)
assert list(
xml.xpath('tei-aff-string(//affiliation)')
) == ['Department 1']
def test_should_join_multiple_org_names(self):
xml = E.TEI(
E.affiliation(
E.orgName('Department 1', type="department"),
E.orgName('Institution 1', type="institution")
)
)
assert list(
xml.xpath('tei-aff-string(//affiliation)')
) == ['Department 1, Institution 1']
def test_should_join_institution_with_postcode_settlement_country(self):
xml = E.TEI(
E.affiliation(
E.orgName('Department 1', type="department"),
E.address(
E.postCode('Post Code 1'),
E.settlement('Settlement 1'),
E.country('Country 1')
)
)
)
assert (
list(xml.xpath('tei-aff-string(//affiliation)')) ==
['Department 1, Post Code 1, Settlement 1, Country 1']
)
def test_should_use_raw_affiliation_note_without_label_if_available(self):
xml = E.TEI(
E.affiliation(
E.note('raw affiliation 1', type='raw_affiliation'),
E.orgName('Department 1', type="department"),
E.address(
E.postCode('Post Code 1'),
E.settlement('Settlement 1'),
E.country('Country 1')
)
)
)
assert (
list(xml.xpath('tei-aff-string(//affiliation)')) ==
['raw affiliation 1']
)
def test_should_use_raw_affiliation_note_with_label_if_available(self):
xml = E.TEI(
E.affiliation(
E.note(
E.label('a'),
' raw affiliation 1',
type='raw_affiliation'
),
E.orgName('Department 1', type="department"),
E.address(
E.postCode('Post Code 1'),
E.settlement('Settlement 1'),
E.country('Country 1')
)
)
)
assert (
list(xml.xpath('tei-aff-string(//affiliation)')) ==
['a raw affiliation 1']
)
def test_should_join_institution_with_addr_line(self):
xml = E.TEI(
E.affiliation(
E.orgName('Department 1', type="department"),
E.address(
E.addrLine('Addr Line 1')
)
)
)
assert list(
xml.xpath('tei-aff-string(//affiliation)')
) == ['Department 1, Addr Line 1']
def test_should_sort_affiliations(self):
xml = E.TEI(
E.affiliation(
E.orgName('Department 2', type="department"),
key='aff2'
),
E.affiliation(
E.orgName('Department 1', type="department"),
key='aff1'
)
)
assert list(
xml.xpath('tei-aff-string(//affiliation)')
) == ['Department 1', 'Department 2']
def test_should_sort_affiliations_natural_order(self):
xml = E.TEI(
E.affiliation(
E.orgName('Department 2', type="department"),
key='aff10'
),
E.affiliation(
E.orgName('Department 1', type="department"),
key='aff9'
)
)
assert list(
xml.xpath('tei-aff-string(//affiliation)')
) == ['Department 1', 'Department 2']
class TestAffText:
def test_should_return_emtpy_string_if_no_raw_affiliation_note_available(self):
xml = E.TEI(
E.affiliation(
E.orgName('Department 1', type="department")
)
)
assert (
list(xml.xpath('tei-aff-text(//affiliation)')) ==
['']
)
def test_should_use_raw_affiliation_note_without_label_if_available(self):
xml = E.TEI(
E.affiliation(
E.note('raw affiliation 1', type='raw_affiliation'),
E.orgName('Department 1', type="department")
)
)
assert (
list(xml.xpath('tei-aff-text(//affiliation)')) ==
['raw affiliation 1']
)
def test_should_use_raw_affiliation_note_with_label_if_available(self):
xml = E.TEI(
E.affiliation(
E.note(
E.label('a'),
' raw affiliation 1',
type='raw_affiliation'
),
E.orgName('Department 1', type="department")
)
)
assert (
list(xml.xpath('tei-aff-text(//affiliation)')) ==
['a raw affiliation 1']
)
def test_should_sort_affiliations(self):
xml = E.TEI(
E.affiliation(
E.note('raw affiliation 2', type='raw_affiliation'),
key='aff2'
),
E.affiliation(
E.note('raw affiliation 1', type='raw_affiliation'),
key='aff1'
)
)
assert list(
xml.xpath('tei-aff-string(//affiliation)')
) == ['raw affiliation 1', 'raw affiliation 2']
def test_should_sort_affiliations_natural_order(self):
xml = E.TEI(
E.affiliation(
E.note('raw affiliation 2', type='raw_affiliation'),
key='aff10'
),
E.affiliation(
E.note('raw affiliation 1', type='raw_affiliation'),
key='aff9'
)
)
assert list(
xml.xpath('tei-aff-string(//affiliation)')
) == ['raw affiliation 1', 'raw affiliation 2']
class TestAffByLabelText:
def test_should_return_emtpy_list_if_no_raw_affiliation_note_available(self):
xml = E.TEI(
E.affiliation(
E.orgName('Department 1', type="department")
)
)
assert (
list(xml.xpath('tei-aff-by-label-text(//affiliation)')) ==
[]
)
def test_should_use_raw_affiliation_note_without_label_if_available(self):
xml = E.TEI(
E.affiliation(
E.note('raw affiliation 1', type='raw_affiliation'),
E.orgName('Department 1', type="department")
)
)
assert (
list(xml.xpath('tei-aff-by-label-text(//affiliation)')) ==
['raw affiliation 1']
)
def test_should_use_raw_affiliation_note_with_label_if_available(self):
xml = E.TEI(
E.affiliation(
E.note(
E.label('a'),
' raw affiliation 1',
type='raw_affiliation'
),
E.orgName('Department 1', type="department")
)
)
assert (
list(xml.xpath('tei-aff-by-label-text(//affiliation)')) ==
['a raw affiliation 1']
)
def test_should_concatenate_raw_affiliation_notes_with_same_label(self):
xml = E.TEI(
E.affiliation(
E.note(E.label('a'), ' part 1', type='raw_affiliation')
),
E.affiliation(
E.note(E.label('a'), ' part 2', type='raw_affiliation')
),
E.affiliation(
E.note(E.label('b'), ' other', type='raw_affiliation')
)
)
assert (
list(xml.xpath('tei-aff-by-label-text(//affiliation)')) ==
['a part 1 part 2', 'b other']
)
class TestRefFpage:
def test_should_return_from_attribute_if_present(self):
xml = E.TEI(E.biblStruct(E.monogr(E.imprint(
E.biblScope({"unit": "page", "from": "123"})
))))
assert list(xml.xpath('tei-ref-fpage(//biblStruct)')) == ['123']
def test_should_return_element_text_if_from_attribute_is_not_present(self):
xml = E.TEI(E.biblStruct(E.monogr(E.imprint(
E.biblScope("123", unit="page")
))))
assert list(xml.xpath('tei-ref-fpage(//biblStruct)')) == ['123']
def test_should_return_empty_string_if_from_attribute_is_not_present_and_has_no_text(self):
xml = E.TEI(E.biblStruct(E.monogr(E.imprint(
E.biblScope(unit="page")
))))
assert list(xml.xpath('tei-ref-fpage(//biblStruct)')) == ['']
def test_should_return_empty_string_if_there_is_no_page_element(self):
xml = E.TEI(E.biblStruct(E.monogr(E.imprint(
E.biblScope(unit="other")
))))
assert list(xml.xpath('tei-ref-fpage(//biblStruct)')) == ['']
class TestRefLpage:
def test_should_return_to_attribute_if_present(self):
xml = E.TEI(E.biblStruct(E.monogr(E.imprint(
E.biblScope({"unit": "page", "to": "123"})
))))
assert list(xml.xpath('tei-ref-lpage(//biblStruct)')) == ['123']
def test_should_return_fpage_if_there_is_no_to_page(self):
xml = E.TEI(E.biblStruct(E.monogr(E.imprint(
E.biblScope("123", unit="page")
))))
assert list(xml.xpath('tei-ref-lpage(//biblStruct)')) == ['123']
class TestAbstractText:
def test_should_return_without_paragraph(self):
xml = E.TEI(E.teiHeader(E.profileDesc(E.abstract(
'abstract1'
))))
assert list(xml.xpath('tei-abstract-text(//abstract)')) == ['abstract1']
def test_should_return_with_div_and_paragraph(self):
xml = E.TEI(E.teiHeader(E.profileDesc(E.abstract(E.div(
E.p('abstract1')
)))))
assert list(xml.xpath('tei-abstract-text(//abstract)')) == ['abstract1']
def test_should_ignore_first_head(self):
xml = E.TEI(E.teiHeader(E.profileDesc(E.abstract(E.div(
E.head('Abstract'),
E.p('abstract1')
)))))
assert list(xml.xpath('tei-abstract-text(//abstract)')) == ['abstract1']
def test_not_should_ignore_further_head_elements(self):
xml = E.TEI(E.teiHeader(E.profileDesc(E.abstract(
E.div(E.head('Abstract')),
E.div(E.head('Sub:'), E.p('abstract1'))
))))
assert list(xml.xpath('tei-abstract-text(//abstract)')) == ['Sub: abstract1']
| 36.50783
| 99
| 0.482444
| 1,579
| 16,319
| 4.775174
| 0.09373
| 0.077984
| 0.065517
| 0.090716
| 0.846154
| 0.822944
| 0.805968
| 0.780769
| 0.751857
| 0.701061
| 0
| 0.011688
| 0.397083
| 16,319
| 446
| 100
| 36.589686
| 0.75465
| 0
| 0
| 0.62406
| 0
| 0
| 0.163
| 0.063852
| 0
| 0
| 0
| 0
| 0.095238
| 1
| 0.100251
| false
| 0
| 0.010025
| 0.002506
| 0.140351
| 0.015038
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1020059147180f2ae41c791b5696ab3c2fef1686
| 198
|
py
|
Python
|
server/views/api.py
|
appcypher/feature-request
|
c8c05cde18b0353e39c046405f168dcc9b90a11c
|
[
"Apache-2.0"
] | 5
|
2019-06-06T08:41:36.000Z
|
2020-08-28T18:46:57.000Z
|
server/views/api.py
|
appcypher/feature-request
|
c8c05cde18b0353e39c046405f168dcc9b90a11c
|
[
"Apache-2.0"
] | 1
|
2022-02-10T17:24:39.000Z
|
2022-02-10T17:24:39.000Z
|
server/views/api.py
|
appcypher/feature-request
|
c8c05cde18b0353e39c046405f168dcc9b90a11c
|
[
"Apache-2.0"
] | 2
|
2019-06-05T19:05:25.000Z
|
2020-08-27T14:04:28.000Z
|
""" Module that contains restplus api isntantiation. """
from flask_restplus import Api
from config.blueprints import api_v1_blueprint
# Creates a restplus api instance
api = Api(api_v1_blueprint)
| 28.285714
| 56
| 0.808081
| 28
| 198
| 5.535714
| 0.571429
| 0.141935
| 0.180645
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011628
| 0.131313
| 198
| 6
| 57
| 33
| 0.889535
| 0.414141
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
105e91c445afa21c30c6daf930eb09ffc5196c1d
| 35
|
py
|
Python
|
tests/mods/nest/basic.py
|
Akm0d/pop
|
77d9f6e6de8e02aa2ee5520d0aa0052fabd53243
|
[
"Apache-2.0"
] | 48
|
2019-05-21T16:10:49.000Z
|
2021-12-04T18:02:20.000Z
|
tests/mods/nest/basic.py
|
Akm0d/pop
|
77d9f6e6de8e02aa2ee5520d0aa0052fabd53243
|
[
"Apache-2.0"
] | 43
|
2019-05-21T22:39:44.000Z
|
2020-02-07T16:37:29.000Z
|
tests/mods/nest/basic.py
|
Akm0d/pop
|
77d9f6e6de8e02aa2ee5520d0aa0052fabd53243
|
[
"Apache-2.0"
] | 18
|
2019-05-21T16:10:42.000Z
|
2019-12-13T16:28:36.000Z
|
def ret_true(hub):
return True
| 11.666667
| 18
| 0.685714
| 6
| 35
| 3.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.228571
| 35
| 2
| 19
| 17.5
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
107fea0e86169a409d0d264e61f4cf0f7fe389a7
| 25
|
py
|
Python
|
python/hayate/store/__init__.py
|
tao12345666333/Talk-Is-Cheap
|
7b2c5959828b6d8bbbad8144b9b97f9b77c6b34c
|
[
"MIT"
] | 4
|
2016-04-14T02:11:35.000Z
|
2019-05-30T10:18:41.000Z
|
python/hayate/store/__init__.py
|
tao12345666333/Talk-Is-Cheap
|
7b2c5959828b6d8bbbad8144b9b97f9b77c6b34c
|
[
"MIT"
] | 8
|
2016-07-21T16:02:17.000Z
|
2021-09-23T02:49:34.000Z
|
python/hayate/store/__init__.py
|
tao12345666333/Talk-Is-Cheap
|
7b2c5959828b6d8bbbad8144b9b97f9b77c6b34c
|
[
"MIT"
] | 2
|
2017-02-17T05:02:02.000Z
|
2017-11-08T12:22:09.000Z
|
from modules import user
| 12.5
| 24
| 0.84
| 4
| 25
| 5.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 1
| 25
| 25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
10a48cb9dbfa4002c6269c392e374d0c2564e82b
| 40
|
py
|
Python
|
jinahub/segmenters/PDFSegmenter/__init__.py
|
vivek2301/executors
|
8159681d68408ab8f797497bc3374be77e6ca392
|
[
"Apache-2.0"
] | null | null | null |
jinahub/segmenters/PDFSegmenter/__init__.py
|
vivek2301/executors
|
8159681d68408ab8f797497bc3374be77e6ca392
|
[
"Apache-2.0"
] | null | null | null |
jinahub/segmenters/PDFSegmenter/__init__.py
|
vivek2301/executors
|
8159681d68408ab8f797497bc3374be77e6ca392
|
[
"Apache-2.0"
] | null | null | null |
from .pdf_segmenter import PDFSegmenter
| 20
| 39
| 0.875
| 5
| 40
| 6.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 40
| 1
| 40
| 40
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
52d0d5cc35583e60b17c69d1017475bb9655e156
| 19,034
|
py
|
Python
|
test/test_embpattern.py
|
EmbroiderPy/pyembroidery
|
3d0db61e7a08bba8e51f4e5873ebfa678c12960b
|
[
"MIT"
] | null | null | null |
test/test_embpattern.py
|
EmbroiderPy/pyembroidery
|
3d0db61e7a08bba8e51f4e5873ebfa678c12960b
|
[
"MIT"
] | null | null | null |
test/test_embpattern.py
|
EmbroiderPy/pyembroidery
|
3d0db61e7a08bba8e51f4e5873ebfa678c12960b
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
import unittest
from test.pattern_for_tests import *
class TestEmbpattern(unittest.TestCase):
def position_equals(self, stitches, j, k):
self.assertEqual(stitches[j][:1], stitches[k][:1])
def test_thread_reorder(self):
test_file = "reorder.pes"
shift = get_shift_pattern()
shift.add_command(encode_thread_change(SET_CHANGE_SEQUENCE, thread=1, order=0))
shift.add_command(encode_thread_change(SET_CHANGE_SEQUENCE, 0, None, 1))
shift.add_command(encode_thread_change(SET_CHANGE_SEQUENCE, 1, None, 0))
self.assertEqual(0xFFFFFF & shift.threadlist[0].color, 0xFF0000)
self.assertEqual(0xFFFFFF & shift.threadlist[1].color, 0x0000FF)
write_pes(shift, test_file, {"pes version": 6})
read_pattern = read_pes(test_file)
for thread in read_pattern.threadlist:
print(0xFFFFFF & thread.color)
self.assertEqual((0xFFFFFF & read_pattern.threadlist[0].color), 0x0000FF)
self.assertEqual((0xFFFFFF & read_pattern.threadlist[1].color), 0xFF0000)
self.addCleanup(os.remove, test_file)
def test_needle_count_limited_set(self):
needle_file = "needle-ls.u01"
shift = get_shift_pattern()
shift.add_command(encode_thread_change(SET_CHANGE_SEQUENCE, None, 6, 0))
shift.add_command(encode_thread_change(SET_CHANGE_SEQUENCE, 4, 6, 7))
shift.add_command(encode_thread_change(SET_CHANGE_SEQUENCE, None, 3, 0))
write_u01(shift, needle_file, {"needle_count": 7})
needle_pattern = read_u01(needle_file)
self.assertEqual(needle_pattern.count_stitch_commands(NEEDLE_SET), 16)
first = True
for stitch in needle_pattern.get_match_commands(NEEDLE_SET):
cmd = decode_embroidery_command(stitch[2])
print(cmd)
if first:
# self.assertEqual(cmd[2], 3)
first = False
self.assertLessEqual(cmd[2], 7)
self.addCleanup(os.remove, needle_file)
def test_needle_count_limit1(self):
needle_file = "needle-1.u01"
write_u01(get_shift_pattern(), needle_file, {"needle_count": 1})
needle_pattern = read_u01(needle_file)
self.assertEqual(needle_pattern.count_stitch_commands(STOP), 16)
for stitch in needle_pattern.get_match_commands(NEEDLE_SET):
cmd = decode_embroidery_command(stitch[2])
self.assertLess(cmd[2], 1)
self.addCleanup(os.remove, needle_file)
def test_needle_count_limit2(self):
needle_file = "needle.u01"
write_u01(get_shift_pattern(), needle_file, {"needle_count": 2})
needle_pattern = read_u01(needle_file)
for stitch in needle_pattern.get_match_commands(NEEDLE_SET):
cmd = decode_embroidery_command(stitch[2])
self.assertLessEqual(cmd[2], 2)
self.assertEqual(needle_pattern.count_needle_sets(), 16)
self.addCleanup(os.remove, needle_file)
def test_needle_count_limit3(self):
needle_file = "needle.u01"
write_u01(get_shift_pattern(), needle_file, {"needle_count": 3})
needle_pattern = read_u01(needle_file)
for stitch in needle_pattern.get_match_commands(NEEDLE_SET):
cmd = decode_embroidery_command(stitch[2])
self.assertLessEqual(cmd[2], 3)
self.assertEqual(needle_pattern.count_needle_sets(), 16)
self.addCleanup(os.remove, needle_file)
def test_needle_count_limit4(self):
needle_file = "needle.u01"
write_u01(get_shift_pattern(), needle_file, {"needle_count": 4})
needle_pattern = read_u01(needle_file)
for stitch in needle_pattern.get_match_commands(NEEDLE_SET):
cmd = decode_embroidery_command(stitch[2])
self.assertLessEqual(cmd[2], 4)
self.assertEqual(needle_pattern.count_needle_sets(), 16)
self.addCleanup(os.remove, needle_file)
def test_needle_count_limit5(self):
needle_file = "needle.u01"
write_u01(get_shift_pattern(), needle_file, {"needle_count": 5})
needle_pattern = read_u01(needle_file)
for stitch in needle_pattern.get_match_commands(NEEDLE_SET):
cmd = decode_embroidery_command(stitch[2])
self.assertLessEqual(cmd[2], 5)
self.assertEqual(needle_pattern.count_needle_sets(), 16)
self.addCleanup(os.remove, needle_file)
def test_needle_count_limit6(self):
needle_file = "needle.u01"
write_u01(get_shift_pattern(), needle_file, {"needle_count": 6})
needle_pattern = read_u01(needle_file)
for stitch in needle_pattern.get_match_commands(NEEDLE_SET):
cmd = decode_embroidery_command(stitch[2])
self.assertLessEqual(cmd[2], 6)
self.assertEqual(needle_pattern.count_needle_sets(), 16)
self.addCleanup(os.remove, needle_file)
def test_needle_count_limit7(self):
needle_file = "needle.u01"
write_u01(get_shift_pattern(), needle_file, {"needle_count": 7})
needle_pattern = read_u01(needle_file)
for stitch in needle_pattern.get_match_commands(NEEDLE_SET):
cmd = decode_embroidery_command(stitch[2])
self.assertLessEqual(cmd[2], 7)
self.assertEqual(needle_pattern.count_needle_sets(), 16)
self.addCleanup(os.remove, needle_file)
def test_needle_count_limit8(self):
needle_file = "needle.u01"
write_u01(get_shift_pattern(), needle_file, {"needle_count": 8})
needle_pattern = read_u01(needle_file)
for stitch in needle_pattern.get_match_commands(NEEDLE_SET):
cmd = decode_embroidery_command(stitch[2])
self.assertLessEqual(cmd[2], 8)
self.assertEqual(needle_pattern.count_needle_sets(), 16)
self.addCleanup(os.remove, needle_file)
def test_needle_count_limit9(self):
needle_file = "needle.u01"
write_u01(get_shift_pattern(), needle_file, {"needle_count": 9})
needle_pattern = read_u01(needle_file)
for stitch in needle_pattern.get_match_commands(NEEDLE_SET):
cmd = decode_embroidery_command(stitch[2])
self.assertLessEqual(cmd[2], 9)
self.assertEqual(needle_pattern.count_needle_sets(), 16)
self.addCleanup(os.remove, needle_file)
def test_needle_count_limit10(self):
needle_file = "needle.u01"
write_u01(get_shift_pattern(), needle_file, {"needle_count": 10})
needle_pattern = read_u01(needle_file)
for stitch in needle_pattern.get_match_commands(NEEDLE_SET):
cmd = decode_embroidery_command(stitch[2])
self.assertLessEqual(cmd[2], 10)
self.assertEqual(needle_pattern.count_needle_sets(), 16)
self.addCleanup(os.remove, needle_file)
def test_u01_tie_on(self):
needle_file = "tie_on.u01"
write_u01(get_shift_pattern(), needle_file, {"needle_count": 10, "tie_on": CONTINGENCY_TIE_ON_THREE_SMALL})
needle_pattern = read_u01(needle_file)
for stitch in needle_pattern.get_match_commands(NEEDLE_SET):
cmd = decode_embroidery_command(stitch[2])
self.assertLessEqual(cmd[2], 10)
self.assertEqual(needle_pattern.count_needle_sets(), 16)
self.assertEqual(needle_pattern.count_stitch_commands(STITCH), 16 * (5 + 4))
# 5 for the actual stitch pattern. 3 small, and 1 extra tieon, start.
self.addCleanup(os.remove, needle_file)
def test_u01_tie_off(self):
needle_file = "tie_on.u01"
write_u01(get_shift_pattern(), needle_file, {"needle_count": 10, "tie_off": CONTINGENCY_TIE_OFF_THREE_SMALL})
needle_pattern = read_u01(needle_file)
for stitch in needle_pattern.get_match_commands(NEEDLE_SET):
cmd = decode_embroidery_command(stitch[2])
self.assertLessEqual(cmd[2], 10)
self.assertEqual(needle_pattern.count_needle_sets(), 16)
self.assertEqual(needle_pattern.count_stitch_commands(STITCH), 16 * (5 + 4))
# 5 for the actual stitch pattern. 3 small, and 1 extra tieoff, end.
self.addCleanup(os.remove, needle_file)
def test_write_dst_read_dst_long_jump(self):
file1 = "file3.dst"
pattern = EmbPattern()
pattern.add_block([(0, 0), (0, 200)], "red")
write_dst(pattern, file1)
dst_pattern = read_dst(file1)
self.assertIsNotNone(dst_pattern)
self.assertEqual(dst_pattern.count_stitch_commands(STITCH), 2)
self.assertEqual(dst_pattern.stitches[1][1], 100)
print("dst: ", dst_pattern.stitches)
self.addCleanup(os.remove, file1)
def test_write_dst_read_dst_random_stitch(self):
file1 = "fsmall.dst"
for i in range(0, 12):
max = (i * 10) + 1
write_dst(get_random_pattern_small_halfs(), file1,
{"long_stitch_contingency": CONTINGENCY_LONG_STITCH_SEW_TO, "max_stitch": max})
dst_pattern = read_dst(file1)
xx = 0
yy = 0
command = NO_COMMAND
for stitch in dst_pattern.stitches:
dx = stitch[0] - xx
dy = stitch[1] - yy
xx += dx
yy += dy
last_command = command
command = stitch[2] & COMMAND_MASK
if command == STITCH and last_command == STITCH:
self.assertLessEqual(dx, max)
self.assertLessEqual(dy, max)
self.assertIsNotNone(dst_pattern)
self.addCleanup(os.remove, file1)
def test_write_dst_read_dst_long_jump_random_small(self):
file1 = "file3small.dst"
for i in range(0, 1000):
write_dst(get_random_pattern_small_halfs(), file1,
{"long_stitch_contingency": CONTINGENCY_LONG_STITCH_SEW_TO})
dst_pattern = read_dst(file1)
self.assertIsNotNone(dst_pattern)
self.addCleanup(os.remove, file1)
def test_write_dst_read_dst_long_jump_random_large(self):
file1 = "file3large.dst"
for i in range(0, 5):
write_dst(get_random_pattern_large(), file1,
{"long_stitch_contingency": CONTINGENCY_LONG_STITCH_SEW_TO})
dst_pattern = read_dst(file1)
self.assertIsNotNone(dst_pattern)
self.addCleanup(os.remove, file1)
def test_write_dst_read_dst_divide(self):
file1 = "file3.dst"
pattern = EmbPattern()
pattern.add_block([(0, 0), (0, 2)], "red")
write_dst(pattern, file1, {"scale": 100, "long_stitch_contingency": CONTINGENCY_LONG_STITCH_SEW_TO})
dst_pattern = read_dst(file1)
self.assertIsNotNone(dst_pattern)
self.assertEqual(dst_pattern.count_stitch_commands(STITCH), 3)
self.assertEqual(dst_pattern.stitches[1][1], 100)
print("dst: ", dst_pattern.stitches)
self.addCleanup(os.remove, file1)
def test_write_csv_read_csv_raw(self):
file1 = "file.csv"
write_csv(get_simple_pattern(), file1)
csv_pattern = read_csv(file1)
self.assertIsNotNone(csv_pattern)
self.assertEqual(csv_pattern.count_stitch_commands(COLOR_BREAK), 3)
self.assertEqual(csv_pattern.count_stitch_commands(STITCH), 15)
self.position_equals(csv_pattern.stitches, 0, -1)
print("csv: ", csv_pattern.stitches)
self.addCleanup(os.remove, file1)
def test_write_csv_read_csv_needle(self):
file1 = "file2.csv"
write_csv(get_simple_pattern(), "file2.csv", {"thread_change_command": NEEDLE_SET, "encode": True})
csv_pattern = read_csv(file1)
self.assertIsNotNone(csv_pattern)
self.assertEqual(csv_pattern.count_stitch_commands(NEEDLE_SET), 3)
self.assertEqual(csv_pattern.count_stitch_commands(STITCH), 15)
print("csv: ", csv_pattern.stitches)
self.addCleanup(os.remove, file1)
def test_write_csv_read_csv_color(self):
file1 = "file3.csv"
write_csv(get_simple_pattern(), "file3.csv", {"thread_change_command": COLOR_CHANGE, "encode": True})
csv_pattern = read_csv(file1)
self.assertEqual(csv_pattern.count_stitch_commands(COLOR_CHANGE), 2)
self.assertEqual(csv_pattern.count_stitch_commands(STITCH), 15)
self.position_equals(csv_pattern.stitches, 0, -1)
print("csv: ", csv_pattern.stitches)
self.addCleanup(os.remove, file1)
def test_write_csv_read_csv_encoded_command(self):
file1 = "file-encoded.csv"
pattern = EmbPattern()
encoded_command = encode_thread_change(SET_CHANGE_SEQUENCE, 3, 4, 1)
pattern.add_command(encoded_command)
write_csv(pattern, file1)
csv_pattern = read_csv(file1)
self.assertIsNotNone(csv_pattern)
print("csv-encoded: ", csv_pattern.stitches)
self.assertEqual(encoded_command, csv_pattern.stitches[-1][2])
self.addCleanup(os.remove, file1)
def test_issue_87(self):
"""
Initial test raised by issue 87.
"""
pattern = EmbPattern()
stitches_1 = [[0, 1], [2, 3]]
stitches_2 = [[4, 5], [6, 7]]
pattern.add_block(stitches_1, 0xFF0000)
pattern.add_block(stitches_2, 0x0000FF)
blocks = list(pattern.get_as_colorblocks())
for q in blocks:
print(q)
self.assertEqual(len(blocks), 2)
self.assertEqual(len(blocks[0][0]), 2) # 0,1 and 2,3
self.assertEqual(len(blocks[1][0]), 2) # 4,5 and 6,7
def test_issue_87_2(self):
"""
Tests a pattern arbitrarily starting with a color change.
With two predefined blocks. The blocks should maintain their blockness.
The color change should isolate 0 stitches, of an unknown color.
:return:
"""
pattern = EmbPattern()
stitches_1 = [[0, 1], [2, 3]]
stitches_2 = [[4, 5], [6, 7]]
pattern.color_change()
pattern.add_thread('random')
pattern.add_block(stitches_1, 0xFF0000)
pattern.add_block(stitches_2, 0x0000FF)
blocks = list(pattern.get_as_colorblocks())
# for q in blocks:
# print(q)
self.assertEqual(blocks[1][1].color, 0xFF0000)
self.assertEqual(blocks[2][1].color, 0x0000FF)
self.assertEqual(len(blocks), 3)
self.assertEqual(len(blocks[0][0]), 1)
self.assertEqual(len(blocks[1][0]), 2)
self.assertEqual(len(blocks[2][0]), 2)
for block in blocks:
stitch_block = block[0]
for stitch in stitch_block:
self.assertNotEqual(stitch[2], COLOR_BREAK)
pattern = EmbPattern()
pattern.add_thread('random')
pattern.color_change() # end block 1, empty
pattern.add_thread(0xFF0000)
pattern += stitches_1
pattern.color_change() # end block 2
pattern.add_thread(0x0000FF)
pattern += stitches_2
blocks = list(pattern.get_as_colorblocks())
# end block 3, no explicit end.
# for q in blocks:
# print(q)
self.assertEqual(blocks[0][0][-1][2], COLOR_CHANGE) # Color change ends the block.
self.assertEqual(blocks[1][0][-1][2], COLOR_CHANGE) # Color change ends the block.
self.assertEqual(blocks[1][1].color, 0xFF0000)
self.assertEqual(blocks[2][1].color, 0x0000FF)
self.assertEqual(len(blocks), 3)
self.assertEqual(len(blocks[0][0]), 1)
self.assertEqual(len(blocks[1][0]), 3)
self.assertEqual(len(blocks[2][0]), 2) # Final color change is part of no block.
pattern.color_change() # end block 3
blocks = list(pattern.get_as_colorblocks())
self.assertEqual(len(blocks[2][0]), 3) # Final block with colorchange.
def test_issue_87_3(self):
"""
Tests a pattern arbitrarily starting with a needle_set.
With two predefined blocks. The blocks should maintain their blockness.
The needle set should not contribute a block. Initial needle_set, only
define a starting needle.
:return:
"""
pattern = EmbPattern()
pattern.needle_change()
stitches_1 = [[0, 1], [2, 3]]
stitches_2 = [[4, 5], [6, 7]]
pattern.add_block(stitches_1, 0xFF0000)
pattern.add_block(stitches_2, 0x0000FF)
blocks = list(pattern.get_as_colorblocks())
# for q in blocks:
# print(q)
self.assertEqual(blocks[0][1], 0xFF0000)
self.assertEqual(blocks[1][1], 0x0000FF)
self.assertEqual(len(blocks), 2)
for block in blocks:
stitch_block = block[0]
for stitch in stitch_block:
self.assertNotEqual(stitch[2], COLOR_BREAK)
pattern = EmbPattern()
pattern.needle_change() # start block 0
pattern += stitches_1
pattern.add_thread(EmbThread(0xFF0000))
pattern.needle_change() # start block 1
pattern += stitches_1
pattern.add_thread(EmbThread(0x0000FF))
pattern.needle_change() # start block 2
pattern.add_thread(EmbThread('random'))
blocks = list(pattern.get_as_colorblocks())
for q in blocks:
print(q)
# Mask is required here since needle_set automatically appends extended data.
self.assertEqual(blocks[0][0][0][2] & COMMAND_MASK, NEEDLE_SET) # Needle_set starts the block.
self.assertEqual(blocks[1][0][0][2] & COMMAND_MASK, NEEDLE_SET) # Needle_set starts the block.
self.assertEqual(blocks[0][1], 0xFF0000)
self.assertEqual(blocks[1][1], 0x0000FF)
self.assertEqual(len(blocks), 3)
self.assertEqual(len(blocks[0][0]), 3)
self.assertEqual(len(blocks[1][0]), 3)
self.assertEqual(len(blocks[2][0]), 1)
def test_issue_87_4(self):
"""
Tests a pattern arbitrarily starting with a color break.
With two predefined blocks. The blocks should maintain their blockness.
And ending with another arbitrary color break. This should give exactly
2 blocks which were defined as prepended colorbreaks postpended color breaks
are not to have an impact.
:return:
"""
pattern = EmbPattern()
pattern += COLOR_BREAK
stitches_1 = [[0, 1], [2, 3]]
stitches_2 = [[4, 5], [6, 7]]
pattern.add_block(stitches_1, 0xFF0000)
pattern.add_block(stitches_2, 0x0000FF)
pattern += COLOR_BREAK
blocks = list(pattern.get_as_colorblocks())
for q in blocks:
print(q)
for block in blocks:
stitch_block = block[0]
for stitch in stitch_block:
self.assertNotEqual(stitch[2], COLOR_BREAK)
self.assertEqual(blocks[0][1], 0xFF0000)
self.assertEqual(blocks[1][1], 0x0000FF)
self.assertEqual(len(blocks), 2)
self.assertEqual(len(blocks[0][0]), 2)
self.assertEqual(len(blocks[1][0]), 2)
| 43.95843
| 117
| 0.649364
| 2,422
| 19,034
| 4.838976
| 0.084641
| 0.084471
| 0.032765
| 0.043174
| 0.821075
| 0.780375
| 0.751195
| 0.730461
| 0.713481
| 0.701365
| 0
| 0.043382
| 0.2443
| 19,034
| 432
| 118
| 44.060185
| 0.771413
| 0.075602
| 0
| 0.620397
| 0
| 0
| 0.038258
| 0.007709
| 0
| 0
| 0.014268
| 0
| 0.25779
| 1
| 0.07932
| false
| 0
| 0.008499
| 0
| 0.090652
| 0.033994
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
eae62de182c862dffd5b4f94a54228b065d014cc
| 33
|
py
|
Python
|
citrine_converters/mark10/__init__.py
|
rbuxman/new-refactor
|
c2254fa637fbe54a54e445a0a872d2d0bbe4165a
|
[
"BSD-2-Clause"
] | null | null | null |
citrine_converters/mark10/__init__.py
|
rbuxman/new-refactor
|
c2254fa637fbe54a54e445a0a872d2d0bbe4165a
|
[
"BSD-2-Clause"
] | 2
|
2017-10-04T19:38:48.000Z
|
2017-10-04T19:45:07.000Z
|
citrine_converters/mark10/__init__.py
|
rbuxman/new-refactor
|
c2254fa637fbe54a54e445a0a872d2d0bbe4165a
|
[
"BSD-2-Clause"
] | 2
|
2017-09-25T22:49:09.000Z
|
2018-07-03T16:35:05.000Z
|
from .converter import converter
| 16.5
| 32
| 0.848485
| 4
| 33
| 7
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 33
| 1
| 33
| 33
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
eaffa33d98086a03a84abc510c6db50094e5ab57
| 10,892
|
py
|
Python
|
qcdb/tests/test_cbs_xtpl_gradient.py
|
loriab/qccddb
|
d9e156ef8b313ac0633211fc6b841f84a3ddde24
|
[
"BSD-3-Clause"
] | 8
|
2019-03-28T11:54:59.000Z
|
2022-03-19T03:31:37.000Z
|
qcdb/tests/test_cbs_xtpl_gradient.py
|
loriab/qccddb
|
d9e156ef8b313ac0633211fc6b841f84a3ddde24
|
[
"BSD-3-Clause"
] | 39
|
2018-10-31T23:02:18.000Z
|
2021-12-12T22:11:37.000Z
|
qcdb/tests/test_cbs_xtpl_gradient.py
|
loriab/qccddb
|
d9e156ef8b313ac0633211fc6b841f84a3ddde24
|
[
"BSD-3-Clause"
] | 9
|
2018-03-12T20:51:50.000Z
|
2022-02-28T15:18:34.000Z
|
import os
import numpy as np
import pytest
import qcdb
from .utils import *
#! Various gradients for a strained helium dimer and water molecule
ref_scf_dz = np.array([[0.0, 0.0, 0.01233095], [0.0, 0.0, -0.01233095]])
ref_scf_tz = np.array([[0.0, 0.0, 0.01246097], [0.0, 0.0, -0.01246097]])
ref_scf_dtz = np.array([[0.0, 0.0, 0.01249265], [0.0, 0.0, -0.01249265]])
ref_scf_dtqz = np.array([[0.0, 0.0, 0.01244412], [0.0, 0.0, -0.01244412]])
ref_mp2_dtz = np.array([[0.0, 0.0, 0.01155124], [0.0, 0.0, -0.01155124]])
ref2_scf_dz = np.array(
[[0.0, 0.0, -0.095119035], [0.0151029614, 0.0, 0.0475595175], [-0.0151029614, 0.0, 0.0475595175]]
)
# y-axis, exchanged
permuted_indices_col = [0, 2, 1]
permuted_indices_row = [1, 0]
ref_scf_dz_y = ref_scf_dz[:, permuted_indices_col][permuted_indices_row, :]
ref_mp2_dtz_y = ref_mp2_dtz[:, permuted_indices_col][permuted_indices_row, :]
# y-axis, exchanged, fixed
# ref_scf_dz_yf = np.array(
# [[ 0.0, 0.02466190, 0.0],
# [ 0.0, 0.0, 0.0]])
nucenergy_ref = 1.17594935242 * a2a
nucenergy_ref2 = 8.84102016483414
def system1():
he_dimer = qcdb.set_molecule(
"""
He 0 0 0
He 0 0 1.8
"""
)
# Get a reasonable guess, to save some iterations
qcdb.set_options(
{
"scf_type": "conv", # longstanding "pk"
"mp2_type": "conv",
"reference": "rhf",
}
)
he_dimer.update_geometry()
assert compare_values(nucenergy_ref, he_dimer.nuclear_repulsion_energy(), 9, "Nuclear repulsion energy")
def system2():
he_dimer = qcdb.set_molecule(
"""
He 0 1.8 0
He 0 0 0
no_reorient
no_com
"""
)
# Get a reasonable guess, to save some iterations
qcdb.set_options(
{
"scf_type": "conv", # longstanding "pk",
"mp2_type": "conv",
"reference": "rhf",
}
)
he_dimer.update_geometry()
assert compare_values(nucenergy_ref, he_dimer.nuclear_repulsion_energy(), 9, "Nuclear repulsion energy")
def system3():
h2o_mol = qcdb.set_molecule(
"""
O 0.0 0.0 0.0
H 1.0 0.0 0.0
H 0.0 1.0 0.0
"""
)
# Get a reasonable guess, to save some iterations
qcdb.set_options(
{
"scf_type": "conv", # longstanding "pk"
"mp2_type": "conv",
"reference": "rhf",
}
)
h2o_mol.update_geometry()
assert compare_values(nucenergy_ref2, h2o_mol.nuclear_repulsion_energy(), 9, "Nuclear repulsion energy")
# SCF TESTS
@using("psi4")
def test_1a():
system1()
scf_dz, jrec = qcdb.gradient("SCF/cc-pVDZ", return_wfn=True)
assert compare_arrays(ref_scf_dz, scf_dz, 6, "[1a] SCF/cc-pVDZ Gradient")
assert compare_arrays(ref_scf_dz, jrec["qcvars"]["CURRENT GRADIENT"].data, 6, "[1a] SCF/cc-pVDZ Gradient")
assert compare_arrays(ref_scf_dz, qcdb.variable("CURRENT GRADIENT"), 6, "[1a] SCF/cc-pVDZ Gradient")
# assert compare_arrays(ref_scf_dz, jrec['qcvars']['HF/CC-PVDZ TOTAL GRADIENT'].data, 6, "[1a] SCF/cc-pVDZ Gradient")
assert "Psi4" == jrec["provenance"]["creator"], "[1a] prov"
@using("cfour")
def test_1b():
system1()
scf_dz, jrec = qcdb.gradient("c4-SCF/cc-pVDZ", return_wfn=True)
assert compare_arrays(ref_scf_dz, scf_dz, 6, "[1b] SCF/cc-pVDZ Gradient")
assert compare_arrays(ref_scf_dz, jrec["qcvars"]["CURRENT GRADIENT"].data, 6, "[1b] SCF/cc-pVDZ Gradient")
assert compare_arrays(ref_scf_dz, qcdb.variable("CURRENT GRADIENT"), 6, "[1b] SCF/cc-pVDZ Gradient")
# assert compare_arrays(ref_scf_dz, jrec['qcvars']['HF/CC-PVDZ TOTAL GRADIENT'].data, 6, "[1b] SCF/cc-pVDZ Gradient")
assert "CFOUR" == jrec["provenance"]["creator"], "[1b] prov"
@using("gamess")
def test_1c():
system1()
scf_dz, jrec = qcdb.gradient("gms-SCF/cc-pVDZ", return_wfn=True)
assert compare_arrays(ref_scf_dz, scf_dz, 6, "[1c] SCF/cc-pVDZ Gradient")
assert compare_arrays(ref_scf_dz, jrec["qcvars"]["CURRENT GRADIENT"].data, 6, "[1c] SCF/cc-pVDZ Gradient")
assert compare_arrays(ref_scf_dz, qcdb.variable("CURRENT GRADIENT"), 6, "[1c] SCF/cc-pVDZ Gradient")
# assert compare_arrays(ref_scf_dz, jrec['qcvars']['HF/CC-PVDZ TOTAL GRADIENT'].data, 6, "[1c] SCF/cc-pVDZ Gradient")
assert "GAMESS" == jrec["provenance"]["creator"], "[1c] prov" # GAMESS and/or QCDB??
@using("psi4")
def test_1d():
system2()
scf_dz, jrec = qcdb.gradient("SCF/cc-pVDZ", return_wfn=True)
assert compare_arrays(ref_scf_dz_y, scf_dz, 6, "[1d] SCF/cc-pVDZ Gradient")
assert compare_arrays(ref_scf_dz_y, jrec["qcvars"]["CURRENT GRADIENT"].data, 6, "[1d] SCF/cc-pVDZ Gradient")
assert compare_arrays(ref_scf_dz_y, qcdb.variable("CURRENT GRADIENT"), 6, "[1d] SCF/cc-pVDZ Gradient")
# assert compare_arrays(ref_scf_dz_y, jrec['qcvars']['HF/CC-PVDZ TOTAL GRADIENT'].data, 6, "[1d] SCF/cc-pVDZ Gradient")
# TODO provenance kill list
assert "Psi4" == jrec["provenance"]["creator"], "[1d] prov"
@using("cfour")
def test_1e():
system2()
scf_dz, jrec = qcdb.gradient("c4-SCF/cc-pVDZ", return_wfn=True)
assert compare_arrays(ref_scf_dz_y, scf_dz, 6, "[1e] SCF/cc-pVDZ Gradient")
assert compare_arrays(ref_scf_dz_y, jrec["qcvars"]["CURRENT GRADIENT"].data, 6, "[1e] SCF/cc-pVDZ Gradient")
assert compare_arrays(ref_scf_dz_y, qcdb.variable("CURRENT GRADIENT"), 6, "[1e] SCF/cc-pVDZ Gradient")
# assert compare_arrays(ref_scf_dz_y, jrec['qcvars']['HF/CC-PVDZ TOTAL GRADIENT'].data, 6, "[1e] SCF/cc-pVDZ Gradient")
assert "CFOUR" == jrec["provenance"]["creator"], "[1e] prov"
# this fails bc of the fixed orientation/com, not compatible with GAMESS's symmetry unique atoms
# @using("gamess")
# def test_1f():
# system2()
#
# scf_dz, jrec = qcdb.gradient('gms-SCF/cc-pVDZ', return_wfn=True)
# pp.pprint(jrec)
# assert compare_arrays(ref_scf_dz_y, scf_dz, 6, "[1f] SCF/cc-pVDZ Gradient")
# assert compare_arrays(ref_scf_dz_y, jrec['qcvars']['CURRENT GRADIENT'].data, 6, "[1f] SCF/cc-pVDZ Gradient")
# assert compare_arrays(ref_scf_dz_y, qcdb.variable('CURRENT GRADIENT'), 6, "[1f] SCF/cc-pVDZ Gradient")
# #assert compare_arrays(ref_scf_dz_y, jrec['qcvars']['HF/CC-PVDZ TOTAL GRADIENT'].data, 6, "[1f] SCF/cc-pVDZ Gradient")
# assert 'GAMESS' == jrec['provenance']['creator'], "[1f] prov"
@using("psi4")
def test_1g():
system3()
scf_dz, jrec = qcdb.gradient("SCF/cc-pVDZ", return_wfn=True)
assert compare_arrays(ref2_scf_dz, scf_dz, 6, "[1g] SCF/cc-pVDZ Gradient")
assert compare_arrays(ref2_scf_dz, jrec["qcvars"]["CURRENT GRADIENT"].data, 6, "[1g] SCF/cc-pVDZ Gradient")
assert compare_arrays(ref2_scf_dz, qcdb.variable("CURRENT GRADIENT"), 6, "[1g] SCF/cc-pVDZ Gradient")
# assert compare_arrays(ref_scf_dz, jrec['qcvars']['HF/CC-PVDZ TOTAL GRADIENT'].data, 6, "[1g] SCF/cc-pVDZ Gradient")
# assert ['QCDB', 'Psi4'] == [d['creator'] for d in jrec['provenance']], "[1g] prov"
assert "Psi4" == jrec["provenance"]["creator"], "[1g] prov"
@using("cfour")
def test_1h():
system3()
scf_dz, jrec = qcdb.gradient("c4-SCF/cc-pVDZ", return_wfn=True)
assert compare_arrays(ref2_scf_dz, scf_dz, 6, "[1h] SCF/cc-pVDZ Gradient")
assert compare_arrays(ref2_scf_dz, jrec["qcvars"]["CURRENT GRADIENT"].data, 6, "[1h] SCF/cc-pVDZ Gradient")
assert compare_arrays(ref2_scf_dz, qcdb.variable("CURRENT GRADIENT"), 6, "[1h] SCF/cc-pVDZ Gradient")
# assert compare_arrays(ref_scf_dz, jrec['qcvars']['HF/CC-PVDZ TOTAL GRADIENT'].data, 6, "[1h] SCF/cc-pVDZ Gradient")
assert "CFOUR" == jrec["provenance"]["creator"], "[1h] prov"
@using("gamess")
def test_1i():
system3()
scf_dz, jrec = qcdb.gradient("gms-SCF/cc-pVDZ", return_wfn=True)
assert compare_arrays(ref2_scf_dz, scf_dz, 6, "[1i] SCF/cc-pVDZ Gradient")
assert compare_arrays(ref2_scf_dz, jrec["qcvars"]["CURRENT GRADIENT"].data, 6, "[1i] SCF/cc-pVDZ Gradient")
assert compare_arrays(ref2_scf_dz, qcdb.variable("CURRENT GRADIENT"), 6, "[1i] SCF/cc-pVDZ Gradient")
##assert compare_arrays(ref_scf_dz, jrec['qcvars']['HF/CC-PVDZ TOTAL GRADIENT'].data, 6, "[1i] SCF/cc-pVDZ Gradient")
assert "GAMESS" == jrec["provenance"]["creator"], "[1i] prov"
# def hide_test_2():
# system1()
#
# scf_tz = qcdb.gradient('SCF/cc-pVTZ', dertype=0)
# assert compare_arrays(ref_scf_tz, scf_tz, 6, "[2] SCF/cc-pVTZ Gradient, dertype=0")
#
#
# def hide_test_3():
# system1()
#
# scf_dtz = qcdb.gradient('SCF/cc-pV[23]Z', dertype=0)
# assert compare_arrays(ref_scf_dtz, scf_dtz, 6, "[3] SCF/cc-pV[DT]Z Gradient, dertype=0")
@pytest.mark.xfail(True, reason="Old Driver, Spring 2019", run=True)
@using("psi4")
def test_4a():
system1()
scf_dtz, jrec = qcdb.gradient("HF/cc-pV[23]Z", return_wfn=True)
assert compare_arrays(ref_scf_dtz, scf_dtz, 6, "[4] SCF/cc-pV[DT]Z Gradient, dertype=1")
pp.pprint(jrec)
@pytest.mark.xfail(True, reason="Old Driver, Spring 2019", run=True)
@using("cfour")
def test_4b():
system1()
scf_dtz, jrec = qcdb.gradient("c4-HF/cc-pV[23]Z", return_wfn=True)
assert compare_arrays(ref_scf_dtz, scf_dtz, 6, "[4] SCF/cc-pV[DT]Z Gradient, dertype=1")
pp.pprint(jrec)
@using("psi4")
def test_5a():
system1()
scf_dtqz = qcdb.gradient("HF/cc-pV[DTQ]Z")
assert compare_arrays(ref_scf_dtqz, scf_dtqz, 6, "[5] SCF/cc-pV[DTQ]Z Gradient")
@using("cfour")
def test_5b():
system1()
scf_dtqz = qcdb.gradient("c4-HF/cc-pV[DTQ]Z")
assert compare_arrays(ref_scf_dtqz, scf_dtqz, 6, "[5] SCF/cc-pV[DTQ]Z Gradient")
@using("gamess")
def test_5c():
system1()
scf_dtqz = qcdb.gradient("gms-HF/cc-pV[DTQ]Z")
assert compare_arrays(ref_scf_dtqz, scf_dtqz, 6, "[5] SCF/cc-pV[DTQ]Z Gradient")
@using("psi4")
def test_6a():
system1()
mp2_dtz = qcdb.gradient("MP2/cc-pV[DT]Z")
assert compare_arrays(ref_mp2_dtz, mp2_dtz, 6, "[6] MP2/cc-pV[DT]Z Gradient")
@using("cfour")
def test_6b():
system1()
mp2_dtz = qcdb.gradient("c4-MP2/cc-pV[DT]Z")
assert compare_arrays(ref_mp2_dtz, mp2_dtz, 6, "[6] MP2/cc-pV[DT]Z Gradient")
@using("gamess")
def test_6c():
system1()
mp2_dtz = qcdb.gradient("gms-MP2/cc-pV[DT]Z")
assert compare_arrays(ref_mp2_dtz, mp2_dtz, 6, "[6] MP2/cc-pV[DT]Z Gradient")
@using("psi4")
def test_6c():
system2()
mp2_dtz = qcdb.gradient("MP2/cc-pV[DT]Z")
assert compare_arrays(ref_mp2_dtz_y, mp2_dtz, 6, "[6] MP2/cc-pV[DT]Z Gradient")
@using("cfour")
def test_6d():
system2()
mp2_dtz = qcdb.gradient("c4-MP2/cc-pV[DT]Z")
assert compare_arrays(ref_mp2_dtz_y, mp2_dtz, 6, "[6] MP2/cc-pV[DT]Z Gradient")
# def hide_test_7():
# system1()
#
# mp2_dtz = qcdb.gradient('MP2/cc-pV[DT]Z', dertype='energy')
# assert compare_arrays(ref_mp2_dtz, mp2_dtz, 6, "[7] MP2/cc-pV[DT]Z Gradient, dertype=0")
| 33.931464
| 123
| 0.654517
| 1,717
| 10,892
| 3.966803
| 0.101922
| 0.02173
| 0.022904
| 0.129203
| 0.859492
| 0.813096
| 0.742622
| 0.710762
| 0.703568
| 0.634709
| 0
| 0.058055
| 0.171318
| 10,892
| 320
| 124
| 34.0375
| 0.696543
| 0.251561
| 0
| 0.473988
| 0
| 0
| 0.256274
| 0
| 0
| 0
| 0
| 0.003125
| 0.260116
| 1
| 0.121387
| false
| 0
| 0.028902
| 0
| 0.150289
| 0.011561
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d80b04cd062660df91da7fef59268fa1e8f127a7
| 2,018
|
py
|
Python
|
tests/test_training.py
|
guillaume-chevalier/SGNN-Transformer-Sentence-Model-SimilarityBXENT
|
4176ee7aad926463f240ae4b47b1bf4c547e6187
|
[
"BSD-3-Clause"
] | 3
|
2019-02-04T01:10:20.000Z
|
2019-04-16T12:19:11.000Z
|
tests/test_training.py
|
guillaume-chevalier/SGNN-Transformer-Sentence-Model-SimilarityBXENT
|
4176ee7aad926463f240ae4b47b1bf4c547e6187
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_training.py
|
guillaume-chevalier/SGNN-Transformer-Sentence-Model-SimilarityBXENT
|
4176ee7aad926463f240ae4b47b1bf4c547e6187
|
[
"BSD-3-Clause"
] | null | null | null |
import traceback
import pytest
from src.model.save_load_model import delete_model, MY_MODEL_NAME
from src.training import train_model_on_data
def test_can_train_one_epoch_of_one_step():
batch_size = 10
train_iters_per_epoch = 1
max_epoch = 1
cuda_device_id = None # None for CPU, 0 for first GPU, etc.
model_suffix = ".__unit_test_delete_this_file__.0"
epoch_model_name = MY_MODEL_NAME + ".epoch_{}" + model_suffix
success = False
try:
preproc_sgnn_sklearn_pipeline, model_trainer = train_model_on_data(
max_epoch, train_iters_per_epoch, batch_size,
preproc_sgnn_sklearn_pipeline=None,
model_trainer=None,
cuda_device_id=cuda_device_id,
plot=False,
epoch_model_name=epoch_model_name
)
success = True
except:
traceback.print_exc()
finally:
epoch_model_name = epoch_model_name.format("{}", "*")
print(epoch_model_name)
delete_model(epoch_model_name)
assert success
def test_can_train_one_epoch_of_one_step_on_GPU():
batch_size = 10
train_iters_per_epoch = 1
max_epoch = 1
cuda_device_id = 0 # None for CPU, 0 for first GPU, etc.
model_suffix = ".__unit_test_delete_this_file__.1"
epoch_model_name = MY_MODEL_NAME + ".epoch_{}" + model_suffix
success = False
try:
preproc_sgnn_sklearn_pipeline, model_trainer = train_model_on_data(
max_epoch, train_iters_per_epoch, batch_size,
preproc_sgnn_sklearn_pipeline=None,
model_trainer=None,
cuda_device_id=cuda_device_id,
plot=False,
epoch_model_name=epoch_model_name
)
success = True
finally:
epoch_model_name = epoch_model_name.format("{}", "*")
print(epoch_model_name)
delete_model(epoch_model_name) # If this fails, the files were probably not already created.
assert success
if __name__ == '__main__':
pytest.main([__file__])
| 32.031746
| 101
| 0.681368
| 272
| 2,018
| 4.514706
| 0.253676
| 0.124593
| 0.159609
| 0.092834
| 0.796417
| 0.796417
| 0.796417
| 0.796417
| 0.796417
| 0.7443
| 0
| 0.008575
| 0.248761
| 2,018
| 62
| 102
| 32.548387
| 0.801451
| 0.064916
| 0
| 0.703704
| 0
| 0
| 0.052045
| 0.03505
| 0
| 0
| 0
| 0
| 0.037037
| 1
| 0.037037
| false
| 0
| 0.074074
| 0
| 0.111111
| 0.055556
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
dc5410f85527801650f23a1a9a88a5e498fdb1b3
| 97
|
py
|
Python
|
dw/generate/__init__.py
|
GongJunior/dw-Web
|
0a4309ce11e3404e8ae668b00e4ab522cc2b29ae
|
[
"MIT"
] | null | null | null |
dw/generate/__init__.py
|
GongJunior/dw-Web
|
0a4309ce11e3404e8ae668b00e4ab522cc2b29ae
|
[
"MIT"
] | null | null | null |
dw/generate/__init__.py
|
GongJunior/dw-Web
|
0a4309ce11e3404e8ae668b00e4ab522cc2b29ae
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
bp = Blueprint('generate', __name__)
from dw.generate import routes
| 19.4
| 36
| 0.793814
| 13
| 97
| 5.615385
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134021
| 97
| 5
| 37
| 19.4
| 0.869048
| 0
| 0
| 0
| 1
| 0
| 0.081633
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
dc84046efbf9fb44cc39d6749e9a8c17e2299902
| 58
|
py
|
Python
|
division of string.py
|
ashwin8797/python-learning
|
e260dd8380315939dd7bd2a6b2c0a44b59f709c0
|
[
"MIT"
] | null | null | null |
division of string.py
|
ashwin8797/python-learning
|
e260dd8380315939dd7bd2a6b2c0a44b59f709c0
|
[
"MIT"
] | null | null | null |
division of string.py
|
ashwin8797/python-learning
|
e260dd8380315939dd7bd2a6b2c0a44b59f709c0
|
[
"MIT"
] | null | null | null |
a='cdef'
b='ab'
print('a'/'b')
a=8
b='ab'
print('a'/'b')
| 7.25
| 14
| 0.465517
| 14
| 58
| 1.928571
| 0.428571
| 0.222222
| 0.592593
| 0.666667
| 0.740741
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019608
| 0.12069
| 58
| 7
| 15
| 8.285714
| 0.509804
| 0
| 0
| 0.666667
| 0
| 0
| 0.206897
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
dcb2209bd7f0a8c7167b8778bed9c178a80ec46e
| 116
|
py
|
Python
|
colordict/__init__.py
|
aleferna12/colordict
|
ee66c3695c755a1a8469f740980c53df0d74471d
|
[
"MIT"
] | 1
|
2020-05-30T22:01:55.000Z
|
2020-05-30T22:01:55.000Z
|
colordict/__init__.py
|
aleferna12/colordict
|
ee66c3695c755a1a8469f740980c53df0d74471d
|
[
"MIT"
] | 1
|
2021-04-19T19:16:56.000Z
|
2021-04-29T12:36:06.000Z
|
colordict/__init__.py
|
aleferna12/colordict
|
ee66c3695c755a1a8469f740980c53df0d74471d
|
[
"MIT"
] | null | null | null |
from colordict.color import *
from colordict.cdict import *
from colordict.gradients import *
__version__ = '1.2.5'
| 23.2
| 33
| 0.775862
| 16
| 116
| 5.375
| 0.625
| 0.453488
| 0.44186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029703
| 0.12931
| 116
| 5
| 34
| 23.2
| 0.821782
| 0
| 0
| 0
| 0
| 0
| 0.042735
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f4e76adfae16c6fe70fdb1aa4939999c15e99ad1
| 4,282
|
py
|
Python
|
utils/triplet.py
|
loribonna/CSSL
|
60095d873dd62bb4403a4c20f054543673a21e7a
|
[
"MIT"
] | 3
|
2022-01-04T09:21:19.000Z
|
2022-01-14T22:38:04.000Z
|
utils/triplet.py
|
loribonna/CSSL
|
60095d873dd62bb4403a4c20f054543673a21e7a
|
[
"MIT"
] | null | null | null |
utils/triplet.py
|
loribonna/CSSL
|
60095d873dd62bb4403a4c20f054543673a21e7a
|
[
"MIT"
] | null | null | null |
# Copyright 2021-present, Lorenzo Bonicelli, Pietro Buzzega, Matteo Boschini, Angelo Porrello, Simone Calderara.
# All rights reserved.
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
import torch
def negative_only_triplet_loss(labels, embeddings, k, margin=0, margin_type='soft'):
"""Build the triplet loss over a batch of embeddings.
For each anchor, we get the hardest positive and hardest negative to form a triplet.
Args:
labels: labels of the batch, of size (batch_size,)
embeddings: tensor of shape (batch_size, embed_dim)
margin: margin for triplet loss
squared: Boolean. If true, output is the pairwise squared euclidean distance matrix.
If false, output is the pairwise euclidean distance matrix.
Returns:
triplet_loss: scalar tensor containing the triplet loss
"""
k = min(k, labels.shape[0])
# Get the pairwise distance matrix
pairwise_dist = (embeddings.unsqueeze(0) - embeddings.unsqueeze(1)).pow(2).sum(2)
# For each anchor, get the hardest positive
# First, we need to get a mask for every valid positive (they should have same label)
mask_anchor_positive = torch.eq(torch.unsqueeze(labels, 0), torch.unsqueeze(labels, 1)).float()
# We add inf in each row to the positives
anchor_negative_dist = pairwise_dist
anchor_negative_dist[mask_anchor_positive.bool()] = float('inf')
# shape (batch_size,)
hardest_negative_dist = torch.topk(anchor_negative_dist, k=k, dim=1, largest=False)[0]
mask = hardest_negative_dist != float('inf')
dneg = hardest_negative_dist[mask]
if dneg.shape[0] == 0:
return None
# Combine biggest d(a, p) and smallest d(a, n) into final triplet loss
if margin_type == 'soft':
loss = torch.log1p(torch.exp(- dneg + float(margin)))
else:
loss = torch.clamp(- dneg + float(margin), min=0.0)
# Get thanchor_negative_diste true loss value
loss = torch.mean(loss)
return loss
def batch_hard_triplet_loss(labels, embeddings, k, margin=0, margin_type='soft'):
"""Build the triplet loss over a batch of embeddings.
For each anchor, we get the hardest positive and hardest negative to form a triplet.
Args:
labels: labels of the batch, of size (batch_size,)
embeddings: tensor of shape (batch_size, embed_dim)
margin: margin for triplet loss
squared: Boolean. If true, output is the pairwise squared euclidean distance matrix.
If false, output is the pairwise euclidean distance matrix.
Returns:
triplet_loss: scalar tensor containing the triplet loss
"""
k = min(k, labels.shape[0])
# Get the pairwise distance matrix
pairwise_dist = (embeddings.unsqueeze(0) - embeddings.unsqueeze(1)).pow(2).sum(2)
# For each anchor, get the hardest positive
# First, we need to get a mask for every valid positive (they should have same label)
mask_anchor_positive = torch.eq(torch.unsqueeze(labels, 0), torch.unsqueeze(labels, 1)).float()
# We put to 0 any element where (a, p) is not valid (valid if a != p and label(a) == label(p))
anchor_positive_dist = mask_anchor_positive * pairwise_dist
# shape (batch_size, 1)
hardest_positive_dist = torch.topk(anchor_positive_dist, k=k, dim=1, largest=True)[0]
# We add inf in each row to the positives
anchor_negative_dist = pairwise_dist
anchor_negative_dist[mask_anchor_positive.bool()] = float('inf')
# shape (batch_size,)
hardest_negative_dist = torch.topk(anchor_negative_dist, k=k, dim=1, largest=False)[0]
mask = hardest_negative_dist != float('inf')
dpos = hardest_positive_dist[mask]
dneg = hardest_negative_dist[mask]
if dpos.shape[0] == 0 or dneg.shape[0] == 0:
return None
# Combine biggest d(a, p) and smallest d(a, n) into final triplet loss
if margin_type == 'soft':
loss = torch.log1p(torch.exp(dpos - dneg + float(margin)))
else:
loss = torch.clamp(dpos - dneg + float(margin), min=0.0)
# Get thanchor_negative_diste true loss value
loss = torch.mean(loss)
if loss < 0:
import pdb; pdb.set_trace()
return loss
| 37.561404
| 112
| 0.689164
| 623
| 4,282
| 4.619583
| 0.215088
| 0.045865
| 0.037526
| 0.029187
| 0.825226
| 0.825226
| 0.799166
| 0.776234
| 0.776234
| 0.776234
| 0
| 0.012246
| 0.218122
| 4,282
| 113
| 113
| 37.893805
| 0.847372
| 0.48412
| 0
| 0.65
| 0
| 0
| 0.013352
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0.05
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
524974f598f6db1aa47f48b3302976bd8e7637b0
| 67
|
py
|
Python
|
nesdis_aws/__init__.py
|
hagne/nesdis_aws
|
13db768d83274320dfacbac130fe3e355b647387
|
[
"MIT"
] | null | null | null |
nesdis_aws/__init__.py
|
hagne/nesdis_aws
|
13db768d83274320dfacbac130fe3e355b647387
|
[
"MIT"
] | 2
|
2021-09-22T15:37:23.000Z
|
2021-12-23T18:36:44.000Z
|
nesdis_aws/__init__.py
|
hagne/nesdis_aws
|
13db768d83274320dfacbac130fe3e355b647387
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from nesdis_aws.nesdis_aws import AwsQuery
| 22.333333
| 42
| 0.716418
| 10
| 67
| 4.6
| 0.8
| 0.391304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017241
| 0.134328
| 67
| 2
| 43
| 33.5
| 0.775862
| 0.313433
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
52515b6b78169ef12c353e31f3a806e39dc05655
| 632
|
py
|
Python
|
peacemakr/generated/api/__init__.py
|
peacemakr-io/peacemakr-python-sdk
|
180bbc2e480ea855dddf0e28c2f27e83a17bfb84
|
[
"Apache-2.0"
] | 3
|
2020-01-27T10:07:29.000Z
|
2021-05-17T16:45:59.000Z
|
peacemakr/generated/api/__init__.py
|
peacemakr-io/peacemakr-python-sdk
|
180bbc2e480ea855dddf0e28c2f27e83a17bfb84
|
[
"Apache-2.0"
] | 7
|
2020-06-24T03:55:36.000Z
|
2021-03-30T00:43:51.000Z
|
peacemakr/generated/api/__init__.py
|
peacemakr-io/peacemakr-python-sdk
|
180bbc2e480ea855dddf0e28c2f27e83a17bfb84
|
[
"Apache-2.0"
] | 1
|
2021-04-27T04:12:30.000Z
|
2021-04-27T04:12:30.000Z
|
from __future__ import absolute_import
# flake8: noqa
# import apis into api package
from peacemakr.generated.api.client_api import ClientApi
from peacemakr.generated.api.crypto_config_api import CryptoConfigApi
from peacemakr.generated.api.key_derivation_service_registry_api import KeyDerivationServiceRegistryApi
from peacemakr.generated.api.key_service_api import KeyServiceApi
from peacemakr.generated.api.login_api import LoginApi
from peacemakr.generated.api.org_api import OrgApi
from peacemakr.generated.api.phone_home_api import PhoneHomeApi
from peacemakr.generated.api.server_management_api import ServerManagementApi
| 45.142857
| 103
| 0.882911
| 83
| 632
| 6.481928
| 0.39759
| 0.193309
| 0.327138
| 0.371747
| 0.104089
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001709
| 0.074367
| 632
| 13
| 104
| 48.615385
| 0.917949
| 0.064873
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5260f220b0106e704783abff8ca4c9a6b941e16d
| 212
|
py
|
Python
|
meld/components/counter.py
|
kaidamasaki/teach
|
1d29d3545705edd101216c1db108a4db118090ea
|
[
"Unlicense"
] | null | null | null |
meld/components/counter.py
|
kaidamasaki/teach
|
1d29d3545705edd101216c1db108a4db118090ea
|
[
"Unlicense"
] | null | null | null |
meld/components/counter.py
|
kaidamasaki/teach
|
1d29d3545705edd101216c1db108a4db118090ea
|
[
"Unlicense"
] | null | null | null |
from flask_meld.component import Component
class Counter(Component):
count = 0
def add(self):
self.count = int(self.count) + 1
def subtract(self):
self.count = int(self.count) - 1
| 17.666667
| 42
| 0.632075
| 29
| 212
| 4.586207
| 0.517241
| 0.270677
| 0.195489
| 0.240602
| 0.390977
| 0.390977
| 0.390977
| 0
| 0
| 0
| 0
| 0.019108
| 0.259434
| 212
| 11
| 43
| 19.272727
| 0.828025
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0
| 0.714286
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
8710f4f3de2c7ea447eace537c20b1c9f17c237f
| 47
|
py
|
Python
|
waterbutler/providers/s3compat/__init__.py
|
KakeruMizuno/RDM-waterbutler
|
58ecd801385a7572d1ed56568a31f701291c4e3e
|
[
"Apache-2.0"
] | null | null | null |
waterbutler/providers/s3compat/__init__.py
|
KakeruMizuno/RDM-waterbutler
|
58ecd801385a7572d1ed56568a31f701291c4e3e
|
[
"Apache-2.0"
] | 9
|
2018-04-13T04:57:46.000Z
|
2019-08-20T13:58:02.000Z
|
waterbutler/providers/s3compat/__init__.py
|
KakeruMizuno/RDM-waterbutler
|
58ecd801385a7572d1ed56568a31f701291c4e3e
|
[
"Apache-2.0"
] | 6
|
2018-09-29T13:49:26.000Z
|
2022-01-27T04:35:12.000Z
|
from .provider import S3CompatProvider # noqa
| 23.5
| 46
| 0.808511
| 5
| 47
| 7.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025
| 0.148936
| 47
| 1
| 47
| 47
| 0.925
| 0.085106
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
87417c7bbcb9f76e6735a0ceeac7ffd0b8e1be07
| 9,443
|
py
|
Python
|
tests/contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/test_michelson_coding_KT1TEy.py
|
juztin/pytezos-1
|
7e608ff599d934bdcf129e47db43dbdb8fef9027
|
[
"MIT"
] | 1
|
2020-08-11T02:31:24.000Z
|
2020-08-11T02:31:24.000Z
|
tests/contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/test_michelson_coding_KT1TEy.py
|
juztin/pytezos-1
|
7e608ff599d934bdcf129e47db43dbdb8fef9027
|
[
"MIT"
] | 1
|
2020-12-30T16:44:56.000Z
|
2020-12-30T16:44:56.000Z
|
tests/contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/test_michelson_coding_KT1TEy.py
|
tqtezos/pytezos
|
a4ac0b022d35d4c9f3062609d8ce09d584b5faa8
|
[
"MIT"
] | 1
|
2022-03-20T19:01:00.000Z
|
2022-03-20T19:01:00.000Z
|
from unittest import TestCase
from tests import get_data
from pytezos.michelson.micheline import michelson_to_micheline
from pytezos.michelson.formatter import micheline_to_michelson
class MichelsonCodingTestKT1TEy(TestCase):
def setUp(self):
self.maxDiff = None
def test_michelson_parse_code_KT1TEy(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/code_KT1TEy.json')
actual = michelson_to_micheline(get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/code_KT1TEy.tz'))
self.assertEqual(expected, actual)
def test_michelson_format_code_KT1TEy(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/code_KT1TEy.tz')
actual = micheline_to_michelson(get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/code_KT1TEy.json'),
inline=True)
self.assertEqual(expected, actual)
def test_michelson_inverse_code_KT1TEy(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/code_KT1TEy.json')
actual = michelson_to_micheline(micheline_to_michelson(expected))
self.assertEqual(expected, actual)
def test_michelson_parse_storage_KT1TEy(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/storage_KT1TEy.json')
actual = michelson_to_micheline(get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/storage_KT1TEy.tz'))
self.assertEqual(expected, actual)
def test_michelson_format_storage_KT1TEy(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/storage_KT1TEy.tz')
actual = micheline_to_michelson(get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/storage_KT1TEy.json'),
inline=True)
self.assertEqual(expected, actual)
def test_michelson_inverse_storage_KT1TEy(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/storage_KT1TEy.json')
actual = michelson_to_micheline(micheline_to_michelson(expected))
self.assertEqual(expected, actual)
def test_michelson_parse_parameter_ooSJeE(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_ooSJeE.json')
actual = michelson_to_micheline(get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_ooSJeE.tz'))
self.assertEqual(expected, actual)
def test_michelson_format_parameter_ooSJeE(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_ooSJeE.tz')
actual = micheline_to_michelson(get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_ooSJeE.json'),
inline=True)
self.assertEqual(expected, actual)
def test_michelson_inverse_parameter_ooSJeE(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_ooSJeE.json')
actual = michelson_to_micheline(micheline_to_michelson(expected))
self.assertEqual(expected, actual)
def test_michelson_parse_parameter_opKSUG(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_opKSUG.json')
actual = michelson_to_micheline(get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_opKSUG.tz'))
self.assertEqual(expected, actual)
def test_michelson_format_parameter_opKSUG(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_opKSUG.tz')
actual = micheline_to_michelson(get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_opKSUG.json'),
inline=True)
self.assertEqual(expected, actual)
def test_michelson_inverse_parameter_opKSUG(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_opKSUG.json')
actual = michelson_to_micheline(micheline_to_michelson(expected))
self.assertEqual(expected, actual)
def test_michelson_parse_parameter_ootApt(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_ootApt.json')
actual = michelson_to_micheline(get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_ootApt.tz'))
self.assertEqual(expected, actual)
def test_michelson_format_parameter_ootApt(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_ootApt.tz')
actual = micheline_to_michelson(get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_ootApt.json'),
inline=True)
self.assertEqual(expected, actual)
def test_michelson_inverse_parameter_ootApt(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_ootApt.json')
actual = michelson_to_micheline(micheline_to_michelson(expected))
self.assertEqual(expected, actual)
def test_michelson_parse_parameter_oo7dXA(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_oo7dXA.json')
actual = michelson_to_micheline(get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_oo7dXA.tz'))
self.assertEqual(expected, actual)
def test_michelson_format_parameter_oo7dXA(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_oo7dXA.tz')
actual = micheline_to_michelson(get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_oo7dXA.json'),
inline=True)
self.assertEqual(expected, actual)
def test_michelson_inverse_parameter_oo7dXA(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_oo7dXA.json')
actual = michelson_to_micheline(micheline_to_michelson(expected))
self.assertEqual(expected, actual)
def test_michelson_parse_parameter_opJMM9(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_opJMM9.json')
actual = michelson_to_micheline(get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_opJMM9.tz'))
self.assertEqual(expected, actual)
def test_michelson_format_parameter_opJMM9(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_opJMM9.tz')
actual = micheline_to_michelson(get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_opJMM9.json'),
inline=True)
self.assertEqual(expected, actual)
def test_michelson_inverse_parameter_opJMM9(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_opJMM9.json')
actual = michelson_to_micheline(micheline_to_michelson(expected))
self.assertEqual(expected, actual)
def test_michelson_parse_parameter_op6KiD(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_op6KiD.json')
actual = michelson_to_micheline(get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_op6KiD.tz'))
self.assertEqual(expected, actual)
def test_michelson_format_parameter_op6KiD(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_op6KiD.tz')
actual = micheline_to_michelson(get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_op6KiD.json'),
inline=True)
self.assertEqual(expected, actual)
def test_michelson_inverse_parameter_op6KiD(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_op6KiD.json')
actual = michelson_to_micheline(micheline_to_michelson(expected))
self.assertEqual(expected, actual)
def test_michelson_parse_parameter_onkFnb(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_onkFnb.json')
actual = michelson_to_micheline(get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_onkFnb.tz'))
self.assertEqual(expected, actual)
def test_michelson_format_parameter_onkFnb(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_onkFnb.tz')
actual = micheline_to_michelson(get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_onkFnb.json'),
inline=True)
self.assertEqual(expected, actual)
def test_michelson_inverse_parameter_onkFnb(self):
expected = get_data(
path='contracts/KT1TEyRcaJi39jgj4Uuz7VD6Jmn6CDDADv4x/parameter_onkFnb.json')
actual = michelson_to_micheline(micheline_to_michelson(expected))
self.assertEqual(expected, actual)
| 46.9801
| 90
| 0.733983
| 880
| 9,443
| 7.563636
| 0.05
| 0.048377
| 0.074369
| 0.135216
| 0.963341
| 0.963341
| 0.963341
| 0.963341
| 0.947416
| 0.947416
| 0
| 0.052514
| 0.191359
| 9,443
| 200
| 91
| 47.215
| 0.819146
| 0
| 0
| 0.639053
| 0
| 0
| 0.316531
| 0.316531
| 0
| 0
| 0
| 0
| 0.159763
| 1
| 0.16568
| false
| 0
| 0.023669
| 0
| 0.195266
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5ec5f8854536907f1712c481b9785dc2f47146cf
| 63
|
py
|
Python
|
vrp/__init__.py
|
israelpereira55/CVRPPLOT
|
c4e59f04cab8d6661729decacd044ec619d6d0a4
|
[
"MIT"
] | null | null | null |
vrp/__init__.py
|
israelpereira55/CVRPPLOT
|
c4e59f04cab8d6661729decacd044ec619d6d0a4
|
[
"MIT"
] | null | null | null |
vrp/__init__.py
|
israelpereira55/CVRPPLOT
|
c4e59f04cab8d6661729decacd044ec619d6d0a4
|
[
"MIT"
] | null | null | null |
from .cvrp import CVRP
from .cvrp_solution import CVRP_Solution
| 31.5
| 40
| 0.857143
| 10
| 63
| 5.2
| 0.4
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 63
| 2
| 40
| 31.5
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5ec95bf00cb761c170e9cf77a5c77cb77c53e83a
| 110
|
py
|
Python
|
jagdreisencheck/context_processors.py
|
HerbyDE/jagdreisencheck-webapp
|
9af5deda2423b787da88a0c893f3c474d8e4f73f
|
[
"BSD-3-Clause"
] | null | null | null |
jagdreisencheck/context_processors.py
|
HerbyDE/jagdreisencheck-webapp
|
9af5deda2423b787da88a0c893f3c474d8e4f73f
|
[
"BSD-3-Clause"
] | null | null | null |
jagdreisencheck/context_processors.py
|
HerbyDE/jagdreisencheck-webapp
|
9af5deda2423b787da88a0c893f3c474d8e4f73f
|
[
"BSD-3-Clause"
] | null | null | null |
import os
def testsystem(request):
return {'is_testsystem': eval(os.environ.get('testsystem', "True"))}
| 18.333333
| 72
| 0.7
| 14
| 110
| 5.428571
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127273
| 110
| 6
| 72
| 18.333333
| 0.791667
| 0
| 0
| 0
| 0
| 0
| 0.243243
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
0d65483cbb740a43d18ccce67774236ded01c2b9
| 69,585
|
py
|
Python
|
tests/test_walker.py
|
Code-ReaQtor/latex2mathml
|
849bc9d3e6a0fed255a6eb5709b7f4f4451a9d6b
|
[
"MIT"
] | 21
|
2016-01-10T22:10:32.000Z
|
2018-12-19T12:45:19.000Z
|
tests/test_walker.py
|
Code-ReaQtor/latex2mathml
|
849bc9d3e6a0fed255a6eb5709b7f4f4451a9d6b
|
[
"MIT"
] | 23
|
2016-07-18T10:07:45.000Z
|
2018-11-11T14:12:42.000Z
|
tests/test_walker.py
|
Code-ReaQtor/latex2mathml
|
849bc9d3e6a0fed255a6eb5709b7f4f4451a9d6b
|
[
"MIT"
] | 4
|
2016-03-13T06:53:14.000Z
|
2018-12-25T05:33:04.000Z
|
import string
from typing import Any, Tuple, Union
import pytest
from latex2mathml.exceptions import (
DenominatorNotFoundError,
DoubleSubscriptsError,
DoubleSuperscriptsError,
ExtraLeftOrMissingRightError,
InvalidAlignmentError,
InvalidStyleForGenfracError,
InvalidWidthError,
LimitsMustFollowMathOperatorError,
MissingEndError,
MissingSuperScriptOrSubscriptError,
NumeratorNotFoundError,
)
from latex2mathml.walker import Node, walk
@pytest.mark.parametrize(
"latex, expected",
[
pytest.param(string.ascii_letters, [Node(token=c) for c in string.ascii_letters], id="alphabets"),
pytest.param("{{}}", [Node(token="{}", children=(Node(token="{}", children=()),))], id="empty-group"),
pytest.param(string.digits, [Node(token=string.digits)], id="numbers"),
pytest.param("12.56", [Node(token="12.56")], id="decimals"),
pytest.param("5x", [Node(token="5"), Node(token="x")], id="numbers-and-alphabets"),
pytest.param("5.8x", [Node(token="5.8"), Node(token="x")], id="decimals-and-alphabets"),
pytest.param("3 x", [Node(token="3"), Node(token="x")], id="string-with-space"),
pytest.param("+-*/=()[])]([", [Node(token=c) for c in "+-*/=()[])](["], id="operators"),
pytest.param("3 + 5x - 5y = 7", [Node(token=c) for c in "3+5x-5y=7"], id="numbers-alphabets-and-operators"),
pytest.param(r"\alpha\beta", [Node(token=r"\alpha"), Node(token=r"\beta")], id="symbols"),
pytest.param(
r"\frac2x",
[Node(token=r"\frac", children=(Node(token="2"), Node(token="x")))],
id="symbols-appended-with-number",
),
pytest.param("{a}", [Node(token="{}", children=(Node(token="a"),))], id="single-group"),
pytest.param(
"{a}{b}",
[Node(token="{}", children=(Node(token="a"),)), Node(token="{}", children=(Node(token="b"),))],
id="multiple-groups",
),
pytest.param(
"{a+{b}}",
[
Node(
token="{}",
children=(Node(token="a"), Node(token="+"), Node(token="{}", children=(Node(token="b"),))),
)
],
id="inner-group",
),
pytest.param("a_b", [Node(token="_", children=(Node(token="a"), Node(token="b")))], id="subscript-1"),
pytest.param(
"{a_b}",
[Node(token="{}", children=(Node(token="_", children=(Node(token="a"), Node(token="b"))),))],
id="subscript-2",
),
pytest.param("1_2", [Node(token="_", children=(Node(token="1"), Node(token="2")))], id="subscript-3"),
pytest.param("1.2_2", [Node(token="_", children=(Node(token="1.2"), Node(token="2")))], id="subscript-4"),
pytest.param("a^b", [Node(token="^", children=(Node(token="a"), Node(token="b")))], id="superscript-1"),
pytest.param(
"{a^b}",
[Node(token="{}", children=(Node(token="^", children=(Node(token="a"), Node(token="b"))),))],
id="superscript-2",
),
pytest.param(
"a^{i+1}_3",
[
Node(
token="_^",
children=(
Node(token="a"),
Node(token="3"),
Node(token="{}", children=(Node(token="i"), Node(token="+"), Node(token="1"))),
),
)
],
id="superscript-3",
),
pytest.param(
"a_b^c",
[Node(token="_^", children=(Node(token="a"), Node(token="b"), Node(token="c")))],
id="subscript-and-superscript-1",
),
pytest.param(
"a^b_c",
[Node(token="_^", children=(Node(token="a"), Node(token="c"), Node(token="b")))],
id="subscript-and-superscript-2",
),
pytest.param(
r"\sqrt[3]{2}",
[Node(token=r"\root", children=(Node(token="{}", children=(Node(token="2"),)), Node(token="3")))],
id="root",
),
pytest.param(
r"\frac{1}{2}",
[
Node(
token=r"\frac",
children=(
Node(token="{}", children=(Node(token="1"),)),
Node(token="{}", children=(Node(token="2"),)),
),
),
],
id="fraction-1",
),
pytest.param(
r"1 \over 2",
[Node(token=r"\frac", children=(Node(token="1"), Node(token="2")))],
id="fraction-2",
),
pytest.param(
r"{1 \over 2}",
[Node(token="{}", children=(Node(token=r"\frac", children=(Node(token="1"), Node(token="2"))),))],
id="fraction-3",
),
pytest.param(
r"\left\{\right.",
[Node(token=r"\left", children=(Node(token=r"\right", delimiter="."),), delimiter=r"\{")],
id="null-delimiter-1",
),
pytest.param(
r"\matrix{a & b \\ c & d}",
[
Node(
token=r"\matrix",
children=(
Node(token="a"),
Node(token="&"),
Node(token="b"),
Node(token=r"\\"),
Node(token="c"),
Node(token="&"),
Node(token="d"),
),
alignment="",
)
],
id="matrix-1",
),
pytest.param(
r"\begin{matrix}a & b \\ c & d \end{matrix}",
[
Node(
token=r"\matrix",
children=(
Node(token="a"),
Node(token="&"),
Node(token="b"),
Node(token=r"\\"),
Node(token="c"),
Node(token="&"),
Node(token="d"),
),
alignment="",
)
],
id="matrix-2",
),
pytest.param(
r"\left\{ \begin{array} { l } { 3x - 5y + 4z = 0} \\ { x - y + 8z = 0} \\ { 2x - 6y + z = 0} "
r"\end{array}\right.",
[
Node(
token=r"\left",
children=(
Node(
token=r"\array",
alignment="l",
children=(
Node(
token="{}",
children=(
Node(token="3"),
Node(token="x"),
Node(token="-"),
Node(token="5"),
Node(token="y"),
Node(token="+"),
Node(token="4"),
Node(token="z"),
Node(token="="),
Node(token="0"),
),
),
Node(token=r"\\"),
Node(
token="{}",
children=(
Node(token="x"),
Node(token="-"),
Node(token="y"),
Node(token="+"),
Node(token="8"),
Node(token="z"),
Node(token="="),
Node(token="0"),
),
),
Node(token=r"\\"),
Node(
token="{}",
children=(
Node(token="2"),
Node(token="x"),
Node(token="-"),
Node(token="6"),
Node(token="y"),
Node(token="+"),
Node(token="z"),
Node(token="="),
Node(token="0"),
),
),
),
),
Node(token=r"\right", delimiter="."),
),
delimiter=r"\{",
)
],
id="null-delimiter-2",
),
pytest.param(
r"\begin{matrix*}[r]a & b \\ c & d \end{matrix*}",
[
Node(
token=r"\matrix*",
children=(
Node(token="a"),
Node(token="&"),
Node(token="b"),
Node(token=r"\\"),
Node(token="c"),
Node(token="&"),
Node(token="d"),
),
alignment="r",
)
],
id="matrix-with-alignment",
),
pytest.param(
r"\begin{matrix*}[]a & b \\ c & d \end{matrix*}",
[
Node(
token=r"\matrix*",
children=(
Node(token="a"),
Node(token="&"),
Node(token="b"),
Node(token=r"\\"),
Node(token="c"),
Node(token="&"),
Node(token="d"),
),
alignment="",
)
],
id="matrix-with-empty-alignment",
),
pytest.param(
r"\begin{matrix}-a & b \\ c & d \end{matrix}",
[
Node(
token=r"\matrix",
children=(
Node(token="-"),
Node(token="a"),
Node(token="&"),
Node(token="b"),
Node(token=r"\\"),
Node(token="c"),
Node(token="&"),
Node(token="d"),
),
alignment="",
)
],
id="matrix-with-negative-sign",
),
pytest.param(
r"\begin{matrix}-\end{matrix}",
[Node(token=r"\matrix", children=(Node(token="-"),), alignment="")],
id="matrix-with-just-negative-sign-1",
),
pytest.param(
r"\begin{matrix}a_{1} & b_{2} \\ c_{3} & d_{4} \end{matrix}",
[
Node(
token=r"\matrix",
children=(
Node(token="_", children=(Node(token="a"), Node(token="{}", children=(Node(token="1"),)))),
Node(token="&"),
Node(token="_", children=(Node(token="b"), Node(token="{}", children=(Node(token="2"),)))),
Node(token=r"\\"),
Node(token="_", children=(Node(token="c"), Node(token="{}", children=(Node(token="3"),)))),
Node(token="&"),
Node(token="_", children=(Node(token="d"), Node(token="{}", children=(Node(token="4"),)))),
),
alignment="",
),
],
id="complex-matrix",
),
pytest.param(
r"\begin{array}{cc} 1 & 2 \\ 3 & 4 \end{array}",
[
Node(
token=r"\array",
children=(
Node(token="1"),
Node(token="&"),
Node(token="2"),
Node(token=r"\\"),
Node(token="3"),
Node(token="&"),
Node(token="4"),
),
alignment="cc",
)
],
id="simple-array",
),
pytest.param(
r"""\begin{bmatrix}
a_{1,1} & a_{1,2} & \cdots & a_{1,n} \\
a_{2,1} & a_{2,2} & \cdots & a_{2,n} \\
\vdots & \vdots & \ddots & \vdots \\
a_{m,1} & a_{m,2} & \cdots & a_{m,n}
\end{bmatrix}""",
[
Node(
token=r"\bmatrix",
children=(
Node(
token="_",
children=(
Node(token="a"),
Node(token="{}", children=(Node(token="1"), Node(token=","), Node(token="1"))),
),
),
Node(token="&"),
Node(
token="_",
children=(
Node(token="a"),
Node(token="{}", children=(Node(token="1"), Node(token=","), Node(token="2"))),
),
),
Node(token="&"),
Node(token=r"\cdots"),
Node(token="&"),
Node(
token="_",
children=(
Node(token="a"),
Node(token="{}", children=(Node(token="1"), Node(token=","), Node(token="n"))),
),
),
Node(token=r"\\"),
Node(
token="_",
children=(
Node(token="a"),
Node(token="{}", children=(Node(token="2"), Node(token=","), Node(token="1"))),
),
),
Node(token="&"),
Node(
token="_",
children=(
Node(token="a"),
Node(token="{}", children=(Node(token="2"), Node(token=","), Node(token="2"))),
),
),
Node(token="&"),
Node(token=r"\cdots"),
Node(token="&"),
Node(
token="_",
children=(
Node(token="a"),
Node(token="{}", children=(Node(token="2"), Node(token=","), Node(token="n"))),
),
),
Node(token=r"\\"),
Node(token=r"\vdots"),
Node(token="&"),
Node(token=r"\vdots"),
Node(token="&"),
Node(token=r"\ddots"),
Node(token="&"),
Node(token=r"\vdots"),
Node(token=r"\\"),
Node(
token="_",
children=(
Node(token="a"),
Node(token="{}", children=(Node(token="m"), Node(token=","), Node(token="1"))),
),
),
Node(token="&"),
Node(
token="_",
children=(
Node(token="a"),
Node(token="{}", children=(Node(token="m"), Node(token=","), Node(token="2"))),
),
),
Node(token="&"),
Node(token=r"\cdots"),
Node(token="&"),
Node(
token="_",
children=(
Node(token="a"),
Node(token="{}", children=(Node(token="m"), Node(token=","), Node(token="n"))),
),
),
),
alignment="",
)
],
id="issue-33",
),
pytest.param(
r"\sqrt { ( - 25 ) ^ { 2 } } = \pm 25",
[
Node(
token=r"\sqrt",
children=(
Node(
token="{}",
children=(
Node(token="("),
Node(token="-"),
Node(token="25"),
Node(
token="^", children=(Node(token=")"), Node(token="{}", children=(Node(token="2"),)))
),
),
),
),
),
Node(token="="),
Node(token=r"\pm"),
Node(token="25"),
],
id="issue-42",
),
pytest.param(
r"\left(- x^{3} + 5\right)^{5}",
[
Node(
token="^",
children=(
Node(
token=r"\left",
children=(
Node(token="-"),
Node(
token="^", children=(Node(token="x"), Node(token="{}", children=(Node(token="3"),)))
),
Node(token="+"),
Node(token="5"),
Node(token=r"\right", delimiter=")"),
),
delimiter="(",
),
Node(token="{}", children=(Node(token="5"),)),
),
)
],
id="issue-44",
),
pytest.param(
r"\begin{array}{rcl}ABC&=&a\\A&=&abc\end{array}",
[
Node(
token=r"\array",
children=(
Node(token="A"),
Node(token="B"),
Node(token="C"),
Node(token="&"),
Node(token="="),
Node(token="&"),
Node(token="a"),
Node(token=r"\\"),
Node(token="A"),
Node(token="&"),
Node(token="="),
Node(token="&"),
Node(token="a"),
Node(token="b"),
Node(token="c"),
),
alignment="rcl",
)
],
id="issue-55",
),
pytest.param(
r"\begin{array}{cr} 1 & 2 \\ 3 & 4 \\ \hline 5 & 6 \end{array}",
[
Node(
token=r"\array",
children=(
Node(token="1"),
Node(token="&"),
Node(token="2"),
Node(token=r"\\"),
Node(token="3"),
Node(token="&"),
Node(token="4"),
Node(token=r"\\"),
Node(token=r"\hline"),
Node(token="5"),
Node(token="&"),
Node(token="6"),
),
alignment="cr",
)
],
id="array-with-horizontal-line",
),
pytest.param(
r"\begin{array}{cr} 1 & 2 \\ \hline 3 & 4 \\ \hline 5 & 6 \end{array}",
[
Node(
token=r"\array",
children=(
Node(token="1"),
Node(token="&"),
Node(token="2"),
Node(token=r"\\"),
Node(token=r"\hline"),
Node(token="3"),
Node(token="&"),
Node(token="4"),
Node(token=r"\\"),
Node(token=r"\hline"),
Node(token="5"),
Node(token="&"),
Node(token="6"),
),
alignment="cr",
)
],
id="array-with-horizontal-lines",
),
pytest.param(
r"\mathrm{...}",
[
Node(
token=r"\mathrm",
children=(Node(token="{}", children=(Node(token="."), Node(token="."), Node(token="."))),),
)
],
id="issue-60",
),
pytest.param(
r"\frac{x + 4}{x + \frac{123 \left(\sqrt{x} + 5\right)}{x + 4} - 8}",
[
Node(
token=r"\frac",
children=(
Node(token="{}", children=(Node(token="x"), Node(token="+"), Node(token="4"))),
Node(
token="{}",
children=(
Node(token="x"),
Node(token="+"),
Node(
token=r"\frac",
children=(
Node(
token="{}",
children=(
Node(token="123"),
Node(
token=r"\left",
children=(
Node(
token=r"\sqrt",
children=(Node(token="{}", children=(Node(token="x"),)),),
),
Node(token="+"),
Node(token="5"),
Node(token=r"\right", delimiter=")"),
),
delimiter="(",
),
),
),
Node(token="{}", children=(Node(token="x"), Node(token="+"), Node(token="4"))),
),
),
Node(token="-"),
Node(token="8"),
),
),
),
)
],
id="issue-61",
),
pytest.param(
r"\sqrt {\sqrt {\left( x^{3}\right) + v}}",
[
Node(
token=r"\sqrt",
children=(
Node(
token="{}",
children=(
Node(
token=r"\sqrt",
children=(
Node(
token="{}",
children=(
Node(
token=r"\left",
children=(
Node(
token="^",
children=(
Node(token="x"),
Node(token="{}", children=(Node(token="3"),)),
),
),
Node(token=r"\right", delimiter=")"),
),
delimiter="(",
),
Node(token="+"),
Node(token="v"),
),
),
),
),
),
),
),
)
],
id="issue-63",
),
pytest.param(
r"\left(\left(x\right)\right)",
[
Node(
token=r"\left",
children=(
Node(
token=r"\left",
children=(Node(token="x"), Node(token=r"\right", delimiter=")")),
delimiter="(",
),
Node(token=r"\right", delimiter=")"),
),
delimiter="(",
)
],
id=r"nested-left-right",
),
pytest.param(
r"\left(x\right){5}",
[
Node(
token=r"\left",
children=(Node(token="x"), Node(token=r"\right", delimiter=")")),
delimiter="(",
),
Node(token="{}", children=(Node(token="5"),)),
],
id=r"group-after-right",
),
pytest.param(
r"\sqrt[3]{}",
[Node(token=r"\root", children=(Node(token="{}", children=()), Node(token="3")))],
id="empty-nth-root",
),
pytest.param(
r"1_{}", [Node(token="_", children=(Node(token="1"), Node(token="{}", children=())))], id="empty-subscript"
),
pytest.param(
r"\array{}",
[Node(token=r"\array", children=(Node(token="{}", children=()),), alignment="")],
id="empty-array",
),
pytest.param(
r"\array{{}}",
[Node(token=r"\array", children=(Node(token="{}", children=()),), alignment="")],
id="empty-array-with-empty-group",
),
pytest.param(
r"\left[\begin{matrix}1 & 0 & 0 & 0\\0 & 1 & 0 & 0\\0 & 0 & 1 & 0\\0 & 0 & 0 & 1\end{matrix}\right]",
[
Node(
token=r"\left",
children=(
Node(
token=r"\matrix",
children=(
Node(token="1"),
Node(token="&"),
Node(token="0"),
Node(token="&"),
Node(token="0"),
Node(token="&"),
Node(token="0"),
Node(token=r"\\"),
Node(token="0"),
Node(token="&"),
Node(token="1"),
Node(token="&"),
Node(token="0"),
Node(token="&"),
Node(token="0"),
Node(token=r"\\"),
Node(token="0"),
Node(token="&"),
Node(token="0"),
Node(token="&"),
Node(token="1"),
Node(token="&"),
Node(token="0"),
Node(token=r"\\"),
Node(token="0"),
Node(token="&"),
Node(token="0"),
Node(token="&"),
Node(token="0"),
Node(token="&"),
Node(token="1"),
),
alignment="",
),
Node(token=r"\right", delimiter="]"),
),
delimiter="[",
)
],
id="issue-77",
),
pytest.param(
r"\left({x}\right)",
[
Node(
token=r"\left",
children=(
Node(token="{}", children=(Node(token="x"),)),
Node(token=r"\right", delimiter=")"),
),
delimiter="(",
)
],
id="issue-78-1",
),
pytest.param(
r"\left(\frac{x^{x^{x}}}{x}\right)",
[
Node(
token=r"\left",
children=(
Node(
token=r"\frac",
children=(
Node(
token="{}",
children=(
Node(
token="^",
children=(
Node(token="x"),
Node(
token="{}",
children=(
Node(
token="^",
children=(
Node(token="x"),
Node(token="{}", children=(Node(token="x"),)),
),
),
),
),
),
),
),
),
Node(token="{}", children=(Node(token="x"),)),
),
),
Node(token=r"\right", delimiter=")"),
),
delimiter="(",
)
],
id="issue-78-2",
),
pytest.param(
r"x^{x^{x^{x}}} \left(x^{x^{x}} \left(x^{x} \left(\log{\left(x \right)} + 1\right) \log{\left(x \right)} + "
r"\frac{x^{x}}{x}\right) \log{\left(x \right)} + \frac{x^{x^{x}}}{x}\right)",
[
Node(
token="^",
children=(
Node(token="x"),
Node(
token="{}",
children=(
Node(
token="^",
children=(
Node(token="x"),
Node(
token="{}",
children=(
Node(
token="^",
children=(
Node(token="x"),
Node(token="{}", children=(Node(token="x"),)),
),
),
),
),
),
),
),
),
),
),
Node(
token=r"\left",
children=(
Node(
token="^",
children=(
Node(token="x"),
Node(
token="{}",
children=(
Node(
token="^",
children=(Node(token="x"), Node(token="{}", children=(Node(token="x"),))),
),
),
),
),
),
Node(
token=r"\left",
children=(
Node(
token="^", children=(Node(token="x"), Node(token="{}", children=(Node(token="x"),)))
),
Node(
token=r"\left",
children=(
Node(token=r"\log"),
Node(
token="{}",
children=(
Node(
token=r"\left",
children=(Node(token="x"), Node(token=r"\right", delimiter=")")),
delimiter="(",
),
),
),
Node(token="+"),
Node(token="1"),
Node(token=r"\right", delimiter=")"),
),
delimiter="(",
),
Node(token=r"\log"),
Node(
token="{}",
children=(
Node(
token=r"\left",
children=(Node(token="x"), Node(token=r"\right", delimiter=")")),
delimiter="(",
),
),
),
Node(token="+"),
Node(
token=r"\frac",
children=(
Node(
token="{}",
children=(
Node(
token="^",
children=(
Node(token="x"),
Node(token="{}", children=(Node(token="x"),)),
),
),
),
),
Node(token="{}", children=(Node(token="x"),)),
),
),
Node(token=r"\right", delimiter=")"),
),
delimiter="(",
),
Node(token=r"\log"),
Node(
token="{}",
children=(
Node(
token=r"\left",
children=(Node(token="x"), Node(token=r"\right", delimiter=")")),
delimiter="(",
),
),
),
Node(token="+"),
Node(
token=r"\frac",
children=(
Node(
token="{}",
children=(
Node(
token="^",
children=(
Node(token="x"),
Node(
token="{}",
children=(
Node(
token="^",
children=(
Node(token="x"),
Node(token="{}", children=(Node(token="x"),)),
),
),
),
),
),
),
),
),
Node(token="{}", children=(Node(token="x"),)),
),
),
Node(token=r"\right", delimiter=")"),
),
delimiter="(",
),
],
id="issue-78-3",
),
pytest.param(
r"\log_2{x}",
[
Node(token="_", children=(Node(token=r"\log"), Node(token="2"))),
Node(token="{}", children=(Node(token="x"),)),
],
id="logarithm-with-base",
),
pytest.param(
r"\sqrt[]{3}",
[Node(token=r"\sqrt", children=(Node(token="{}", children=(Node(token="3"),)),))],
id="issue-79-empty-root",
),
pytest.param(
r"\frac{3}{\frac{1}{2}{x}^{2}}",
[
Node(
token=r"\frac",
children=(
Node(token="{}", children=(Node(token="3"),)),
Node(
token="{}",
children=(
Node(
token=r"\frac",
children=(
Node(token="{}", children=(Node(token="1"),)),
Node(token="{}", children=(Node(token="2"),)),
),
),
Node(
token="^",
children=(
Node(token="{}", children=(Node(token="x"),)),
Node(token="{}", children=(Node(token="2"),)),
),
),
),
),
),
)
],
id="issue-79-exponent-after-fraction",
),
pytest.param(
r"\frac{3}{\frac{1}{2}{x}^{2}-\frac{3\sqrt[]{3}}{2}x+3}",
[
Node(
token=r"\frac",
children=(
Node(token="{}", children=(Node(token="3"),)),
Node(
token="{}",
children=(
Node(
token=r"\frac",
children=(
Node(token="{}", children=(Node(token="1"),)),
Node(token="{}", children=(Node(token="2"),)),
),
),
Node(
token="^",
children=(
Node(token="{}", children=(Node(token="x"),)),
Node(token="{}", children=(Node(token="2"),)),
),
),
Node(token="-"),
Node(
token=r"\frac",
children=(
Node(
token="{}",
children=(
Node(token="3"),
Node(
token=r"\sqrt",
children=(Node(token="{}", children=(Node(token="3"),)),),
),
),
),
Node(token="{}", children=(Node(token="2"),)),
),
),
Node(token="x"),
Node(token="+"),
Node(token="3"),
),
),
),
)
],
id="issue-79",
),
pytest.param(
"^3", [Node(token="^", children=(Node(token=""), Node(token="3")))], id="superscript-without-base-works"
),
pytest.param(
"_3", [Node(token="_", children=(Node(token=""), Node(token="3")))], id="subscript-without-base-works"
),
pytest.param(
r"\lim_{x \to +\infty} f(x)",
[
Node(
token="_",
children=(
Node(token=r"\lim"),
Node(
token="{}",
children=(Node(token="x"), Node(token=r"\to"), Node(token="+"), Node(token=r"\infty")),
),
),
),
Node(token="f"),
Node(token="("),
Node(token="x"),
Node(token=")"),
],
id="limit-at-plus-infinity",
),
pytest.param(
r"\inf_{x > s}f(x)",
[
Node(
token="_",
children=(
Node(token=r"\inf"),
Node(token="{}", children=(Node(token="x"), Node(token=">"), Node(token="s"))),
),
),
Node(token="f"),
Node(token="("),
Node(token="x"),
Node(token=")"),
],
id="inf",
),
pytest.param(
r"\sup_{x \in \mathbb{R}}f(x)",
[
Node(
token="_",
children=(
Node(token=r"\sup"),
Node(token="{}", children=(Node(token="x"), Node(token=r"\in"), Node(token="ℝ"))),
),
),
Node(token="f"),
Node(token="("),
Node(token="x"),
Node(token=")"),
],
id="sup",
),
pytest.param(
r"\max_{x \in [a,b]}f(x)",
[
Node(
token="_",
children=(
Node(token=r"\max"),
Node(
token="{}",
children=(
Node(token="x"),
Node(token=r"\in"),
Node(token="["),
Node(token="a"),
Node(token=","),
Node(token="b"),
Node(token="]"),
),
),
),
),
Node(token="f"),
Node(token="("),
Node(token="x"),
Node(token=")"),
],
id="max",
),
pytest.param(
r"\min_{x \in [\alpha,\beta]}f(x)",
[
Node(
token="_",
children=(
Node(token=r"\min"),
Node(
token="{}",
children=(
Node(token="x"),
Node(token=r"\in"),
Node(token="["),
Node(token=r"\alpha"),
Node(token=","),
Node(token=r"\beta"),
Node(token="]"),
),
),
),
),
Node(token="f"),
Node(token="("),
Node(token="x"),
Node(token=")"),
],
id="min",
),
pytest.param(
r"\int\limits_{0}^{\pi}",
[
Node(
token="_^",
children=(
Node(token=r"\int"),
Node(token="{}", children=(Node(token="0"),)),
Node(token="{}", children=(Node(token=r"\pi"),)),
),
modifier=r"\limits",
),
],
id="issue-76",
),
pytest.param(
r"\sum_{\substack{1\le i\le n\\ i\ne j}}",
[
Node(
token="_",
children=(
Node(token=r"\sum"),
Node(
token="{}",
children=(
Node(
token=r"\substack",
children=(
Node(token="1"),
Node(token=r"\le"),
Node(token="i"),
Node(token=r"\le"),
Node(token="n"),
Node(token=r"\\"),
Node(token="i"),
Node(token=r"\ne"),
Node(token="j"),
),
alignment="",
),
),
),
),
)
],
id="issue-75",
),
pytest.param(
r"\mathrm{AA}",
[Node(token=r"\mathrm", children=(Node(token="{}", children=(Node(token="A"), Node(token="A"))),))],
id="issue-94",
),
pytest.param(
r"(1+(x-y)^{2})",
[
Node(token="("),
Node(token="1"),
Node(token="+"),
Node(token="("),
Node(token="x"),
Node(token="-"),
Node(token="y"),
Node(token="^", children=(Node(token=")"), Node(token="{}", children=(Node(token="2"),)))),
Node(token=")"),
],
id="issue-96",
),
pytest.param(
r"p_{\max}",
[Node(token="_", children=(Node(token="p"), Node(token="{}", children=(Node(token=r"\max"),))))],
id="issue-98",
),
pytest.param(
r"\vec{AB}",
[Node(token=r"\vec", children=(Node(token="{}", children=(Node(token="A"), Node(token="B"))),))],
id="issue-103",
),
pytest.param(r"\max f", [Node(token=r"\max"), Node(token="f")], id="issue-108-1"),
pytest.param(
r"\max \{a, b, c\}",
[
Node(token=r"\max"),
Node(token=r"\{"),
Node(token="a"),
Node(token=","),
Node(token="b"),
Node(token=","),
Node(token="c"),
Node(token=r"\}"),
],
id="issue-108-2",
),
pytest.param(
r"\min{(x, y)}",
[
Node(token=r"\min"),
Node(
token="{}",
children=(Node(token="("), Node(token="x"), Node(token=","), Node(token="y"), Node(token=")")),
),
],
id="issue-108-3",
),
pytest.param(
r"x = {-b \pm \sqrt{b^2-4ac} \over 2a}",
[
Node(token="x"),
Node(token="="),
Node(
token="{}",
children=(
Node(
token=r"\frac",
children=(
Node(
token="{}",
children=(
Node(token="-"),
Node(token="b"),
Node(token=r"\pm"),
Node(
token=r"\sqrt",
children=(
Node(
token="{}",
children=(
Node(token="^", children=(Node(token="b"), Node(token="2"))),
Node(token="-"),
Node(token="4"),
Node(token="a"),
Node(token="c"),
),
),
),
),
),
),
Node(token="{}", children=(Node(token="2"), Node(token="a"))),
),
),
),
),
],
id="quadratic-equation",
),
pytest.param(
r"\binom{2}{3}",
[
Node(
token=r"\binom",
children=(
Node(token="{}", children=(Node(token="2"),)),
Node(token="{}", children=(Node(token="3"),)),
),
)
],
id="binomial",
),
pytest.param(
r"\overline{a}",
[Node(token=r"\overline", children=(Node(token="{}", children=(Node(token="a"),)),))],
id="overline",
),
pytest.param(
r"\bar{a}",
[Node(token=r"\bar", children=(Node(token="{}", children=(Node(token="a"),)),))],
id="bar",
),
pytest.param(
r"\underline{a}",
[Node(token=r"\underline", children=(Node(token="{}", children=(Node(token="a"),)),))],
id="underline",
),
pytest.param(
r"\overrightarrow{a}",
[Node(token=r"\overrightarrow", children=(Node(token="{}", children=(Node(token="a"),)),))],
id="overrightarrow",
),
pytest.param(r"\text{Let}", [Node(token=r"\text", text="Let")], id="text"),
pytest.param(
r"F(a,n)=\overset{a-a-a\cdots-a}{}ntext{个}a",
[
Node(token="F"),
Node(token="("),
Node(token="a"),
Node(token=","),
Node(token="n"),
Node(token=")"),
Node(token="="),
Node(
token=r"\overset",
children=(
Node(token="{}", children=()),
Node(
token="{}",
children=(
Node(token="a"),
Node(token="-"),
Node(token="a"),
Node(token="-"),
Node(token="a"),
Node(token=r"\cdots"),
Node(token="-"),
Node(token="a"),
),
),
),
),
Node(token="n"),
Node(token="t"),
Node(token="e"),
Node(token="x"),
Node(token="t"),
Node(token="{}", children=(Node(token="个"),)),
Node(token="a"),
],
id="issue-125-overset",
),
pytest.param(
r"|\hspace1em|\hspace{10ex}|",
[
Node(token="|"),
Node(token=r"\hspace", attributes={"width": "1em"}),
Node(token="|"),
Node(token=r"\hspace", attributes={"width": "10ex"}),
Node(token="|"),
],
id="issue-129-hspace",
),
pytest.param(
"f'(x) = 2x, f''(x) = 2",
[
Node(token="^", children=(Node(token="f"), Node(token=r"\prime"))),
Node(token="("),
Node(token="x"),
Node(token=")"),
Node(token="="),
Node(token="2"),
Node(token="x"),
Node(token=","),
Node(token="^", children=(Node(token="f"), Node(token=r"\dprime"))),
Node(token="("),
Node(token="x"),
Node(token=")"),
Node(token="="),
Node(token="2"),
],
id="prime",
),
pytest.param(
r"{a \above 1pt b} + {c \above {1.5pt} d}",
[
Node(
token="{}",
children=(
Node(
token=r"\frac",
children=(Node(token="a"), Node(token="b")),
attributes={"linethickness": "1pt"},
),
),
),
Node(token="+"),
Node(
token="{}",
children=(
Node(
token=r"\frac",
children=(Node(token="c"), Node(token="d")),
attributes={"linethickness": "1.5pt"},
),
),
),
],
id="above",
),
pytest.param(
r"a \atop {b \atopwithdelims \{ \} c}",
[
Node(
token=r"\frac",
children=(
Node(token="a"),
Node(
token="{}",
children=(
Node(
token=r"\frac",
children=(
Node(token="b"),
Node(token="c"),
),
attributes={"linethickness": "0"},
delimiter="{}",
),
),
),
),
attributes={"linethickness": "0"},
),
],
id="atop-and-atopwithdelims",
),
pytest.param(
r"{a \abovewithdelims [ ] 1pt b} + {c \abovewithdelims . . {1.5pt} d}",
[
Node(
token="{}",
children=(
Node(
token=r"\frac",
children=(
Node(token="a"),
Node(token="b"),
),
attributes={"linethickness": "1pt"},
delimiter="[]",
),
),
),
Node(token="+"),
Node(
token="{}",
children=(
Node(
token=r"\frac",
children=(
Node(token="c"),
Node(token="d"),
),
attributes={"linethickness": "1.5pt"},
delimiter="..",
),
),
),
],
id="abovewithdelims",
),
# We don't want \Huge or \huge to make its siblings as children as it breaks groupings on deep-nesting
pytest.param(
r"[{[\Huge[\huge[[}[",
[
Node(token="["),
Node(
token="{}",
children=(
Node(token="["),
Node(token=r"\Huge"),
Node(token="["),
Node(token=r"\huge"),
Node(token="["),
Node(token="["),
),
),
Node(token="["),
],
id="huge",
),
pytest.param(
r"X_\mathrm{min}",
[
Node(
token="_",
children=(
Node(token="X"),
Node(
token=r"\mathrm",
children=(Node(token="{}", children=(Node(token="m"), Node(token="i"), Node(token="n"))),),
),
),
)
],
id="issue-203-1",
),
pytest.param(
r"a\mathop{t}b\mathop{t}c",
[
Node(token="a"),
Node(token=r"\mathop", children=(Node(token="{}", children=(Node(token="t"),)),)),
Node(token="b"),
Node(token=r"\mathop", children=(Node(token="{}", children=(Node(token="t"),)),)),
Node(token="c"),
],
id="issue-203-2",
),
pytest.param(r"\hbox{E=mc^2}", [Node(token=r"\hbox", text="E=mc^2")], id="hbox"),
pytest.param(
r"\style{color:red}{x+1}",
[
Node(
token="{}",
children=(Node(token="x"), Node(token="+"), Node(token="1")),
attributes={"style": "color:red"},
)
],
id="style",
),
pytest.param(
r"\sideset{_1^2}{_3^4}\sum",
[
Node(
token=r"\sideset",
children=(
Node(
token="_^",
children=(
Node(
token=r"\vphantom",
children=(Node(token=r"\sum", attributes={"movablelimits": "false"}),),
),
Node(token="1"),
Node(token="2"),
),
),
Node(
token="_^",
children=(
Node(token=r"\sum", attributes={"movablelimits": "false"}),
Node(token="3"),
Node(token="4"),
),
),
),
)
],
id="sideset",
),
pytest.param(
r"\sideset{^2}{_3}\sum",
[
Node(
token=r"\sideset",
children=(
Node(
token="^",
children=(
Node(
token=r"\vphantom",
children=(Node(token=r"\sum", attributes={"movablelimits": "false"}),),
),
Node(token="2"),
),
),
Node(
token="_",
children=(
Node(token=r"\sum", attributes={"movablelimits": "false"}),
Node(token="3"),
),
),
),
)
],
id="sideset-2",
),
pytest.param(
r"\root 3 \of x", [Node(token=r"\root", children=(Node(token="x"), Node(token="3")))], id="root-of"
),
pytest.param(
r"\root n+1 \of x + 2",
[
Node(
token=r"\root",
children=(
Node(token="x"),
Node(token="{}", children=(Node(token="n"), Node(token="+"), Node(token="1"))),
),
),
Node(token="+"),
Node(token="2"),
],
id="root-of-multiple",
),
pytest.param(
r"\root \of x",
[Node(token=r"\root", children=(Node(token="x"), Node(token="{}", children=())))],
id="root-of-without-root",
),
pytest.param(
r"\skew7\hat a\skew{8}\hat b",
[
Node(
token=r"\skew",
children=(Node(token=r"\hat", children=(Node(token="a"),)),),
attributes={"width": "0.389em"},
),
Node(
token=r"\skew",
children=(Node(token=r"\hat", children=(Node(token="b"),)),),
attributes={"width": "0.444em"},
),
],
id="skew-hat",
),
pytest.param(r"\xleftarrow x", [Node(token=r"\xleftarrow", children=(Node(token="x"),))], id="xleftarrow"),
pytest.param(
r"\xleftarrow[y] x",
[Node(token=r"\xleftarrow", children=(Node(token="{}", children=(Node(token="y"),)), Node(token="x")))],
id="xleftarrow-with-argument",
),
pytest.param(r"\xrightarrow x", [Node(token=r"\xrightarrow", children=(Node(token="x"),))], id="xrightarrow"),
pytest.param(
r"\xrightarrow[y] x",
[Node(token=r"\xrightarrow", children=(Node(token="{}", children=(Node(token="y"),)), Node(token="x")))],
id="xrightarrow-with-argument",
),
pytest.param(
r"\not \in \not\ni \not a \not\equiv \not\operatorname{R}\not",
[
Node(token=r"\nin"),
Node(token=r"\nni"),
Node(token=r"\not"),
Node(token="a"),
Node(token=r"\nequiv"),
Node(token=r"\not"),
Node(token=r"\operatorname{R}"),
Node(token=r"\not"),
],
id="not",
),
],
)
def test_walk(latex: str, expected: list) -> None:
assert walk(latex) == expected
@pytest.mark.parametrize(
"latex, exception",
[
pytest.param(r"\right)", ExtraLeftOrMissingRightError, id=r"missing-\left"),
pytest.param(r"\left(x", ExtraLeftOrMissingRightError, id=r"missing-\right"),
pytest.param(r"\middle|", ExtraLeftOrMissingRightError, id=r"missing-\left"),
pytest.param(r"{ \over 2}", NumeratorNotFoundError, id="fraction-without-numerator"),
pytest.param(r"{1 \over }", DenominatorNotFoundError, id="fraction-without-denominator"),
pytest.param(r"1_", MissingSuperScriptOrSubscriptError, id="missing-subscript"),
pytest.param(r"1^", MissingSuperScriptOrSubscriptError, id="missing-superscript"),
pytest.param(r"1_2_3", DoubleSubscriptsError, id="double-subscript"),
pytest.param(r"1^2^3", DoubleSuperscriptsError, id="double-superscript"),
pytest.param(r"\genfrac(){1pt}4ab", InvalidStyleForGenfracError, id="invalid-style-for-genfrac"),
pytest.param(r"\begin{array}\end{array1}", MissingEndError, id="missing-end"),
pytest.param(r"\begin{matrix*}[xxx]\end{matrix*}", InvalidAlignmentError, id="invalid-alignment"),
pytest.param(r"\skew{}\hat b", InvalidWidthError, id="invalid-width"),
pytest.param(r"\skew{X}\hat b", InvalidWidthError, id="invalid-width-not-number"),
pytest.param(r"\limits^{\pi}", LimitsMustFollowMathOperatorError, id="limits-must-follow-math-operator-blank"),
pytest.param(r"5\limits^{\pi}", LimitsMustFollowMathOperatorError, id="limits-must-follow-math-operator"),
],
)
def test_error(latex: str, exception: Union[Tuple[Any, ...], Any]) -> None:
with pytest.raises(exception):
walk(latex)
| 40.222543
| 120
| 0.263548
| 4,355
| 69,585
| 4.193341
| 0.064294
| 0.43418
| 0.253203
| 0.208137
| 0.761965
| 0.716406
| 0.679772
| 0.650203
| 0.584164
| 0.541233
| 0
| 0.014832
| 0.589179
| 69,585
| 1,729
| 121
| 40.245807
| 0.62399
| 0.001437
| 0
| 0.749563
| 0
| 0.006989
| 0.0931
| 0.017609
| 0.004077
| 0
| 0
| 0
| 0.000582
| 1
| 0.001165
| false
| 0
| 0.002912
| 0
| 0.004077
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0d6a24d3c064aaf944fe7c1b8f0d0379139f7a37
| 38
|
py
|
Python
|
matrevy/people/decorators.py
|
matrevy/matrevy.dk
|
abaf6bc328adf55d522a5cf0c83092949a8f7a20
|
[
"MIT"
] | null | null | null |
matrevy/people/decorators.py
|
matrevy/matrevy.dk
|
abaf6bc328adf55d522a5cf0c83092949a8f7a20
|
[
"MIT"
] | null | null | null |
matrevy/people/decorators.py
|
matrevy/matrevy.dk
|
abaf6bc328adf55d522a5cf0c83092949a8f7a20
|
[
"MIT"
] | null | null | null |
def access_required():
pass
| 7.6
| 23
| 0.578947
| 4
| 38
| 5.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.342105
| 38
| 4
| 24
| 9.5
| 0.84
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
0de2e9d48b297bc2b7f4ac99703dc4abf09dcea5
| 33
|
py
|
Python
|
flit/buildapi.py
|
ksunden/flit
|
5ed24842e614af813bd67ecf6ee918961fbf0ac2
|
[
"BSD-3-Clause"
] | 1,642
|
2015-03-26T18:22:25.000Z
|
2021-12-01T00:10:52.000Z
|
flit/buildapi.py
|
ksunden/flit
|
5ed24842e614af813bd67ecf6ee918961fbf0ac2
|
[
"BSD-3-Clause"
] | 427
|
2015-03-16T20:22:17.000Z
|
2021-12-01T14:15:38.000Z
|
flit/buildapi.py
|
ksunden/flit
|
5ed24842e614af813bd67ecf6ee918961fbf0ac2
|
[
"BSD-3-Clause"
] | 110
|
2015-03-28T02:50:28.000Z
|
2021-11-27T09:36:37.000Z
|
from flit_core.buildapi import *
| 16.5
| 32
| 0.818182
| 5
| 33
| 5.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 33
| 1
| 33
| 33
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0deadcc874ba4eb04f81a37b0f0d015d49fabee6
| 46
|
py
|
Python
|
aiomatrix/dispatcher/handlers/__init__.py
|
Forden/aiomatrix
|
d258076bae8eb776495b92be46ee9f4baec8d9a6
|
[
"MIT"
] | 2
|
2021-10-29T18:07:08.000Z
|
2021-11-19T00:25:43.000Z
|
aiomatrix/dispatcher/handlers/__init__.py
|
Forden/aiomatrix
|
d258076bae8eb776495b92be46ee9f4baec8d9a6
|
[
"MIT"
] | 1
|
2022-03-06T11:17:43.000Z
|
2022-03-06T11:17:43.000Z
|
aiomatrix/dispatcher/handlers/__init__.py
|
Forden/aiomatrix
|
d258076bae8eb776495b92be46ee9f4baec8d9a6
|
[
"MIT"
] | null | null | null |
from .handler import Handler, HandlerCallback
| 23
| 45
| 0.847826
| 5
| 46
| 7.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108696
| 46
| 1
| 46
| 46
| 0.95122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2185265c3f0c8b55a37b62d8792fcd1de74114d0
| 168
|
py
|
Python
|
tests/test_app.py
|
theia-jane/python-book-api
|
78e7be0462f3921f889e4847a0bb4737da295e82
|
[
"MIT"
] | null | null | null |
tests/test_app.py
|
theia-jane/python-book-api
|
78e7be0462f3921f889e4847a0bb4737da295e82
|
[
"MIT"
] | 8
|
2022-01-23T05:27:19.000Z
|
2022-01-28T06:13:21.000Z
|
tests/test_app.py
|
theia-jane/python-book-api
|
78e7be0462f3921f889e4847a0bb4737da295e82
|
[
"MIT"
] | null | null | null |
import pytest
def test_home(client):
assert client.get("/").status_code == 200
def test_404(client):
assert client.get("/does-not-exist").status_code == 404
| 18.666667
| 59
| 0.696429
| 25
| 168
| 4.52
| 0.6
| 0.123894
| 0.318584
| 0.371681
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.062937
| 0.14881
| 168
| 8
| 60
| 21
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| false
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
df0eeb44fa374eaae6ab36578b4375073329ed01
| 57,519
|
py
|
Python
|
winevtrc/database.py
|
benstobbs/winevt-kb
|
816817592348a564f87c834e1265cf04b0cc2a47
|
[
"Apache-2.0"
] | 9
|
2015-01-22T14:00:18.000Z
|
2022-01-08T06:20:17.000Z
|
winevtrc/database.py
|
benstobbs/winevt-kb
|
816817592348a564f87c834e1265cf04b0cc2a47
|
[
"Apache-2.0"
] | 14
|
2015-01-29T17:09:34.000Z
|
2022-01-05T07:38:52.000Z
|
winevtrc/database.py
|
benstobbs/winevt-kb
|
816817592348a564f87c834e1265cf04b0cc2a47
|
[
"Apache-2.0"
] | 4
|
2016-09-21T04:14:53.000Z
|
2021-09-28T09:35:15.000Z
|
# -*- coding: utf-8 -*-
"""Classes to read from and write to SQLite databases."""
import difflib
import logging
import re
import sqlite3
from winevtrc import definitions
from winevtrc import errors
from winevtrc import resources
class SQLite3DatabaseFile(object):
"""Class that defines a sqlite3 database file."""
_HAS_TABLE_QUERY = (
'SELECT name FROM sqlite_master '
'WHERE type = "table" AND name = "{0:s}"')
def __init__(self):
"""Initializes the database file."""
super(SQLite3DatabaseFile, self).__init__()
self._connection = None
self._cursor = None
self.filename = None
self.read_only = None
def _GetValues(self, cursor, table_names, column_names, condition):
"""Values generator function.
Args:
cursor (sqlite3.Cursor): SQLite database cursor.
table_names (list[str]): table names.
column_names (list[str]): column names.
condition (str): condition.
Yields:
dict[str, object]: value.
Raises:
BackendError: if the database back-end raises an exception.
"""
if condition:
condition = ' WHERE {0:s}'.format(condition)
sql_query = 'SELECT {1:s} FROM {0:s}{2:s}'.format(
', '.join(table_names), ', '.join(column_names), condition)
try:
cursor.execute(sql_query)
except sqlite3.OperationalError as exception:
raise errors.BackendError(exception)
for row in cursor:
values = {}
for column_index, column_name in enumerate(column_names):
values[column_name] = row[column_index]
yield values
def Close(self):
"""Closes the database file.
Raises:
IOError: if the database is not opened.
OSError: if the database is not opened.
"""
if not self._connection:
raise IOError('Cannot close database not opened.')
# We need to run commit or not all data is stored in the database.
self._connection.commit()
self._connection.close()
self._connection = None
self._cursor = None
self.filename = None
self.read_only = None
def CreateTable(self, table_name, column_definitions):
"""Creates a table.
Args:
table_name (str): table name.
column_definitions (list[str]): column definitions.
Raises:
BackendError: if the database back-end raises an exception.
IOError: if the database is not opened or
if the database is in read-only mode.
OSError: if the database is not opened or
if the database is in read-only mode.
"""
if not self._connection:
raise IOError('Cannot create table database not opened.')
if self.read_only:
raise IOError('Cannot create table database in read-only mode.')
sql_query = 'CREATE TABLE {0:s} ( {1:s} )'.format(
table_name, ', '.join(column_definitions))
try:
self._cursor.execute(sql_query)
except sqlite3.OperationalError as exception:
raise errors.BackendError(exception)
def GetValues(self, table_names, column_names, condition):
"""Retrieves values from a table.
Args:
table_names (list[str]): table names.
column_names (list[str]): column names.
condition (str): condition.
Returns:
generator: values generator.
Raises:
IOError: if the database is not opened.
OSError: if the database is not opened.
"""
if not self._connection:
raise IOError('Cannot retrieve values database not opened.')
return self._GetValues(self._cursor, table_names, column_names, condition)
def HasTable(self, table_name):
"""Determines if a specific table exists.
Args:
table_name (str): table name.
Returns:
bool: True if the table exists, false otherwise.
Raises:
BackendError: if the database back-end raises an exception.
IOError: if the database is not opened.
OSError: if the database is not opened.
"""
if not self._connection:
raise IOError('Cannot determine if table exists database not opened.')
sql_query = self._HAS_TABLE_QUERY.format(table_name)
try:
self._cursor.execute(sql_query)
has_table = bool(self._cursor.fetchone())
except sqlite3.OperationalError as exception:
raise errors.BackendError(exception)
return has_table
def InsertValues(self, table_name, column_names, values):
"""Inserts values into a table.
Args:
table_name (str): table name.
column_names (list[str]): column names.
values (list[str]): values formatted as a string.
Raises:
BackendError: if the database back-end raises an exception.
IOError: if the database is not opened or
if the database is in read-only mode or
if an unsupported value type is encountered.
OSError: if the database is not opened or
if the database is in read-only mode or
if an unsupported value type is encountered.
"""
if not self._connection:
raise IOError('Cannot insert values database not opened.')
if self.read_only:
raise IOError('Cannot insert values database in read-only mode.')
if not values:
return
sql_values = []
for value in values:
# TODO: handle bool.
if isinstance(value, str):
# In sqlite3 the double quote is escaped with a second double quote.
value = '"{0:s}"'.format(re.sub('"', '""', value))
elif isinstance(value, int):
value = '{0:d}'.format(value)
elif isinstance(value, float):
value = '{0:f}'.format(value)
elif value is None:
value = 'NULL'
else:
raise IOError('Unsupported value type: {0!s}.'.format(type(value)))
sql_values.append(value)
sql_query = 'INSERT INTO {0:s} ( {1:s} ) VALUES ( {2:s} )'.format(
table_name, ', '.join(column_names), ', '.join(sql_values))
try:
self._cursor.execute(sql_query)
except sqlite3.OperationalError as exception:
raise errors.BackendError(exception)
def Open(self, filename, read_only=False):
"""Opens the database file.
Args:
filename (str): filename of the database.
read_only (Optional[bool]): True if the database should be opened in
read-only mode. Since sqlite3 does not support a real read-only
mode we fake it by only permitting SELECT queries.
Returns:
bool: True if successful or False if not.
Raises:
BackendError: if the database back-end raises an exception.
IOError: if the database is already opened.
OSError: if the database is already opened.
"""
if self._connection:
raise IOError('Cannot open database already opened.')
self.filename = filename
self.read_only = read_only
self._connection = sqlite3.connect(filename)
if not self._connection:
return False
try:
self._cursor = self._connection.cursor()
except sqlite3.OperationalError as exception:
raise errors.BackendError(exception)
if not self._cursor:
return False
return True
class Sqlite3DatabaseReader(object):
"""Class to represent a sqlite3 database reader."""
def __init__(self):
"""Initializes the database reader."""
super(Sqlite3DatabaseReader, self).__init__()
self._database_file = SQLite3DatabaseFile()
def Close(self):
"""Closes the database reader."""
self._database_file.Close()
def Open(self, filename):
"""Opens the database reader.
Args:
filename (str): filename of the database.
Returns:
bool: True if successful or False if not.
"""
return self._database_file.Open(filename, read_only=True)
class Sqlite3DatabaseWriter(object):
"""Class to represent a sqlite3 database writer."""
def __init__(self):
"""Initializes the database writer."""
super(Sqlite3DatabaseWriter, self).__init__()
self._database_file = SQLite3DatabaseFile()
def Close(self):
"""Closes the database writer."""
self._database_file.Close()
def Open(self, filename):
"""Opens the database writer.
Args:
filename (str): filename of the database.
Returns:
bool: True if successful or False if not.
"""
return self._database_file.Open(filename)
class EventProvidersSqlite3DatabaseReader(Sqlite3DatabaseReader):
"""Class to represent an Event Log providers sqlite3 database reader."""
def _GetMessageFilenames(self, log_source, message_file_type):
"""Retrieves the message filenames of a specific Event Log provider.
Args:
log_source (str): source of the Event Log provider.
message_file_type (str): message file type.
Returns:
list[str]: message filenames.
"""
table_names = [
'event_log_providers', 'message_file_per_event_log_provider',
'message_files']
column_names = ['message_files.message_filename']
condition = (
'{0:s}.log_source == "{3:s}" AND '
'{1:s}.message_file_type == "{4:s}" AND '
'{0:s}.event_log_provider_key == {1:s}.event_log_provider_key AND '
'{1:s}.message_file_key == {2:s}.message_file_key').format(
'event_log_providers', 'message_file_per_event_log_provider',
'message_files', log_source, message_file_type)
message_filenames = []
for values in self._database_file.GetValues(
table_names, column_names, condition):
message_filename = values['message_files.message_filename']
message_filenames.append(message_filename)
return message_filenames
def GetEventLogProviders(self):
"""Retrieves the Event Log providers.
Yields:
EventLogProvider: event log provider.
"""
table_names = ['event_log_providers']
column_names = ['log_source', 'log_type', 'provider_guid']
condition = ''
event_log_providers = []
for values in self._database_file.GetValues(
table_names, column_names, condition):
event_log_provider = resources.EventLogProvider(
values['log_type'], values['log_source'], values['provider_guid'])
event_log_providers.append(event_log_provider)
for event_log_provider in event_log_providers:
message_filenames = self._GetMessageFilenames(
event_log_provider.log_source,
definitions.MESSAGE_FILE_TYPE_CATEGORY)
event_log_provider.SetCategoryMessageFilenames(message_filenames)
message_filenames = self._GetMessageFilenames(
event_log_provider.log_source, definitions.MESSAGE_FILE_TYPE_EVENT)
event_log_provider.SetEventMessageFilenames(message_filenames)
message_filenames = self._GetMessageFilenames(
event_log_provider.log_source,
definitions.MESSAGE_FILE_TYPE_PARAMETER)
event_log_provider.SetParameterMessageFilenames(message_filenames)
yield event_log_provider
def GetMessageFiles(self):
"""Retrieves the message filenames.
Yields:
tuple[str, str]: message filename and corresponding database filename.
"""
table_names = ['message_files']
column_names = ['message_filename', 'database_filename']
condition = ''
for values in self._database_file.GetValues(
table_names, column_names, condition):
yield values['message_filename'], values['database_filename']
class EventProvidersSqlite3DatabaseWriter(Sqlite3DatabaseWriter):
"""Class to represent an Event Log providers sqlite3 database writer."""
def _GetEventLogProviderKey(self, event_log_provider):
"""Retrieves the key of an Event Log provider.
Args:
event_log_provider (EventLogProvider): event log provider.
Returns:
int: Event Log provider key or None if no such value.
Raises:
IOError: if more than one value is found in the database.
OSError: if more than one value is found in the database.
"""
table_names = ['event_log_providers']
column_names = ['event_log_provider_key']
condition = 'log_source = "{0:s}" AND log_type = "{1:s}"'.format(
event_log_provider.log_source, event_log_provider.log_type)
values_list = list(self._database_file.GetValues(
table_names, column_names, condition))
number_of_values = len(values_list)
if number_of_values == 0:
return None
if number_of_values == 1:
values = values_list[0]
return values['event_log_provider_key']
raise IOError('More than one value found in database.')
def _GetMessageFileKey(self, message_filename):
"""Retrieves the key of a message file.
Args:
message_filename (str): message filename.
Returns:
int: message file key or None if no such value.
Raises:
IOError: if more than one value is found in the database.
OSError: if more than one value is found in the database.
"""
table_names = ['message_files']
column_names = ['message_file_key']
condition = 'LOWER(message_filename) = LOWER("{0:s}")'.format(
message_filename)
values_list = list(self._database_file.GetValues(
table_names, column_names, condition))
number_of_values = len(values_list)
if number_of_values == 0:
return None
if number_of_values == 1:
values = values_list[0]
return values['message_file_key']
raise IOError('More than one value found in database.')
def WriteMessageFilesPerEventLogProvider(
self, event_log_provider, message_filename, message_file_type):
"""Writes the message files used by an Event Log provider.
Args:
event_log_provider (EventLogProvider): event log provider.
message_filename (str): message filename.
message_file_type (str): message file type.
"""
table_name = 'message_file_per_event_log_provider'
column_names = [
'message_file_key', 'message_file_type', 'event_log_provider_key']
event_log_provider_key = self._GetEventLogProviderKey(event_log_provider)
if event_log_provider_key is None:
logging.warning('Missing event log provider key for: {0:s}'.format(
event_log_provider.log_source))
message_file_key = self._GetMessageFileKey(message_filename)
if message_file_key is None:
logging.warning('Missing message file key for: {0:s}'.format(
message_filename))
has_table = self._database_file.HasTable(table_name)
if not has_table:
column_definitions = [
'message_file_key INTEGER', 'message_file_type TEXT',
'event_log_provider_key INTEGER']
self._database_file.CreateTable(table_name, column_definitions)
insert_values = True
else:
condition = (
'message_file_key = {0:d} AND message_file_type = "{1:s}" AND '
'event_log_provider_key = {2:d}').format(
message_file_key, message_file_type, event_log_provider_key)
values_list = list(self._database_file.GetValues(
[table_name], column_names, condition))
number_of_values = len(values_list)
# TODO: check if more than 1 result.
insert_values = number_of_values == 0
if insert_values:
values = [message_file_key, message_file_type, event_log_provider_key]
self._database_file.InsertValues(table_name, column_names, values)
def WriteEventLogProvider(self, event_log_provider):
"""Writes the Event Log provider.
Args:
event_log_provider (EventLogProvider): event log provider.
"""
table_name = 'event_log_providers'
column_names = ['log_source', 'log_type', 'provider_guid']
has_table = self._database_file.HasTable(table_name)
if not has_table:
column_definitions = [
'event_log_provider_key INTEGER PRIMARY KEY AUTOINCREMENT',
'log_source TEXT', 'log_type TEXT', 'provider_guid TEXT']
self._database_file.CreateTable(table_name, column_definitions)
insert_values = True
else:
condition = 'log_source = "{0:s}" AND log_type = "{1:s}"'.format(
event_log_provider.log_source, event_log_provider.log_type)
values_list = list(self._database_file.GetValues(
[table_name], column_names, condition))
number_of_values = len(values_list)
# TODO: check if more than 1 result.
insert_values = number_of_values == 0
if insert_values:
values = [
event_log_provider.log_source, event_log_provider.log_type,
event_log_provider.provider_guid]
self._database_file.InsertValues(table_name, column_names, values)
def WriteMessageFile(self, message_filename, database_filename):
"""Writes the Windows Message Resource file.
Args:
message_filename (str): message filename.
database_filename (str): database filename.
"""
table_name = 'message_files'
column_names = ['message_filename', 'database_filename']
has_table = self._database_file.HasTable(table_name)
if not has_table:
column_definitions = [
'message_file_key INTEGER PRIMARY KEY AUTOINCREMENT',
'message_filename TEXT', 'database_filename TEXT']
self._database_file.CreateTable(table_name, column_definitions)
insert_values = True
else:
condition = 'LOWER(message_filename) = LOWER("{0:s}")'.format(
message_filename)
values_list = list(self._database_file.GetValues(
[table_name], column_names, condition))
number_of_values = len(values_list)
# TODO: check if more than 1 result.
insert_values = number_of_values == 0
if insert_values:
values = [message_filename, database_filename]
self._database_file.InsertValues(table_name, column_names, values)
class MessageFileSqlite3DatabaseReader(Sqlite3DatabaseReader):
"""Class to represent a message file sqlite3 database reader."""
def GetMessageTables(self):
"""Retrieves the message tables.
Yields:
tuple[int, str]: language code identifier (LCID) and the message file
version.
"""
table_names = ['message_files', 'message_table_languages']
column_names = ['file_version', 'lcid', 'identifier']
condition = (
'message_files.message_file_key = '
'message_table_languages.message_file_key')
for values in self._database_file.GetValues(
table_names, column_names, condition):
yield values['lcid'], values['file_version']
def GetMessages(self, lcid, file_version):
"""Retrieves the messages of a specific message table.
Args:
lcid (str): language code identifier (LCID).
file_version (str): message file file version.
Yields:
tuple[int, str]: message identifier and message string.
"""
table_name = 'message_table_{0:s}'.format(lcid)
if file_version:
table_name = '{0:s}_{1:s}'.format(table_name, file_version)
table_name = re.sub(r'\.', r'_', table_name)
column_names = ['message_identifier', 'message_string']
condition = ''
for values in self._database_file.GetValues(
[table_name], column_names, condition):
yield values['message_identifier'], values['message_string']
def GetStringTables(self):
"""Retrieves the string tables.
Yields:
tuple[int, str]: language code identifier (LCID) and the message file
version.
"""
table_names = ['message_files', 'string_table_languages']
column_names = ['file_version', 'lcid', 'identifier']
condition = (
'message_files.message_file_key = '
'string_table_languages.message_file_key')
for values in self._database_file.GetValues(
table_names, column_names, condition):
yield values['lcid'], values['file_version']
def GetStrings(self, lcid, file_version):
"""Retrieves the strings of a specific string table.
Args:
lcid (str): language code identifier (LCID).
file_version (str): message file file version.
Yields:
tuple[int, str]: string identifier and string.
"""
table_name = 'string_table_{0:s}_{1:s}'.format(
lcid, re.sub(r'\.', '_', file_version))
column_names = ['string_identifier', 'string']
condition = ''
for values in self._database_file.GetValues(
[table_name], column_names, condition):
yield values['string_identifier'], values['string']
class MessageFileSqlite3DatabaseWriter(Sqlite3DatabaseWriter):
"""Class to represent a message file sqlite3 database writer."""
def __init__(self, message_resource_file):
"""Initializes the message file database writer.
Args:
message_resource_file (MessageResourceFile): message resource file.
"""
super(MessageFileSqlite3DatabaseWriter, self).__init__()
self._message_resource_file = message_resource_file
def _GetMessageFileKey(self, message_resource_file):
"""Retrieves the key of a message file.
Args:
message_resource_file (MessageResourceFile): message resource file.
Returns:
int: message file key or None if no such value.
Raises:
IOError: if more than one value is found in the database.
OSError: if more than one value is found in the database.
"""
table_names = ['message_files']
column_names = ['message_file_key']
condition = 'LOWER(path) = LOWER("{0:s}")'.format(
message_resource_file.windows_path)
if message_resource_file.file_version:
condition = '{0:s} AND file_version = "{1:s}"'.format(
condition, message_resource_file.file_version)
if message_resource_file.product_version:
condition = '{0:s} AND product_version = "{1:s}"'.format(
condition, message_resource_file.product_version)
values_list = list(self._database_file.GetValues(
table_names, column_names, condition))
number_of_values = len(values_list)
if number_of_values == 0:
return None
if number_of_values == 1:
values = values_list[0]
return values['message_file_key']
raise IOError('More than one value found in database.')
def _WriteMessage(
self, message_resource_file, message_table, language_identifier,
message_index, table_name, has_table):
"""Writes a message to a specific message table.
Args:
message_resource_file (MessageResourceFile): message resource file.
message_table (pywrc.message_table): message table resource.
language_identifier (int): language identifier (LCID).
message_index (int): message index.
table_name (str): name of the table.
has_table (bool): True if the table previously existed in the database.
"""
column_names = ['message_identifier', 'message_string']
message_identifier = message_table.get_message_identifier(
language_identifier, message_index)
message_identifier = '0x{0:08x}'.format(message_identifier)
message_string = message_table.get_string(
language_identifier, message_index)
if not has_table:
insert_values = True
else:
condition = 'message_identifier = "{0:s}"'.format(message_identifier)
values_list = list(self._database_file.GetValues(
[table_name], column_names, condition))
number_of_values = len(values_list)
if number_of_values == 1:
values = values_list[0]
if message_string != values['message_string']:
differ = difflib.Differ()
diff_list = list(differ.compare(
[values['message_string']], [message_string]))
logging.warning((
'Message string mismatch for LCID: 0x{0:08x}, '
'file version: {1:s}, message identifier: {2:s}.\n'
'{3:s}\n').format(
language_identifier, message_resource_file.file_version,
message_identifier, '\n'.join(diff_list)))
elif number_of_values != 0:
logging.warning((
'More than one message string found for LCID: 0x{0:08x}, '
'file version: {1:s}, message identifier: {2:s}.').format(
language_identifier, message_resource_file.file_version,
message_identifier))
# TODO: warn if new message has been found.
insert_values = False
if insert_values:
values = [message_identifier, message_string]
self._database_file.InsertValues(table_name, column_names, values)
def _WriteMessageFile(self, message_resource_file):
"""Writes a message file.
Args:
message_resource_file (MessageResourceFile): message resource file.
"""
table_name = 'message_files'
column_names = ['path', 'file_version', 'product_version']
has_table = self._database_file.HasTable(table_name)
if not has_table:
column_definitions = [
'message_file_key INTEGER PRIMARY KEY AUTOINCREMENT',
'path TEXT', 'file_version TEXT', 'product_version TEXT']
self._database_file.CreateTable(table_name, column_definitions)
if not has_table:
insert_values = True
else:
condition = 'LOWER(path) = LOWER("{0:s}")'.format(
message_resource_file.windows_path)
if message_resource_file.file_version:
condition = '{0:s} AND file_version = "{1:s}"'.format(
condition, message_resource_file.file_version)
if message_resource_file.product_version:
condition = '{0:s} AND product_version = "{1:s}"'.format(
condition, message_resource_file.product_version)
values_list = list(self._database_file.GetValues(
[table_name], column_names, condition))
number_of_values = len(values_list)
insert_values = number_of_values == 0
if insert_values:
values = [
message_resource_file.windows_path,
message_resource_file.file_version,
message_resource_file.product_version]
self._database_file.InsertValues(table_name, column_names, values)
def _WriteMessageTable(
self, message_resource_file, message_table, language_identifier):
"""Writes a message table for a specific language identifier.
Args:
message_resource_file (MessageResourceFile): message resource file.
message_table (pywrc.message_table): message table resource.
language_identifier (int): language identifier (LCID).
"""
number_of_messages = message_table.get_number_of_messages(
language_identifier)
if number_of_messages > 0:
message_file_key = self._GetMessageFileKey(message_resource_file)
if message_file_key is None:
logging.warning('Missing message file key for: {0:s}'.format(
message_resource_file.windows_path))
else:
self._WriteMessageTableLanguage(message_file_key, language_identifier)
table_name = 'message_table_0x{0:08x}'.format(language_identifier)
if message_resource_file.file_version:
table_name = '{0:s}_{1:s}'.format(
table_name, message_resource_file.file_version)
table_name = re.sub(r'\.', r'_', table_name)
has_table = self._database_file.HasTable(table_name)
if not has_table:
column_definitions = [
'message_identifier TEXT', 'message_string TEXT']
self._database_file.CreateTable(table_name, column_definitions)
for message_index in range(0, number_of_messages):
self._WriteMessage(
message_resource_file, message_table, language_identifier,
message_index, table_name, has_table)
def _WriteMessageTableLanguage(self, message_file_key, language_identifier):
"""Writes a message table language.
Args:
message_file_key (int): message file key.
language_identifier (int): language identifier (LCID).
"""
table_name = 'message_table_languages'
column_names = ['lcid', 'message_file_key', 'identifier']
has_table = self._database_file.HasTable(table_name)
if not has_table:
column_definitions = [
'lcid TEXT', 'message_file_key INT', 'identifier TEXT']
self._database_file.CreateTable(table_name, column_definitions)
if not has_table:
insert_values = True
else:
condition = 'lcid = "0x{0:08x}" AND message_file_key = "{1:d}"'.format(
language_identifier, message_file_key)
values_list = list(self._database_file.GetValues(
[table_name], column_names, condition))
number_of_values = len(values_list)
insert_values = number_of_values == 0
if insert_values:
values = [
'0x{0:08x}'.format(language_identifier), message_file_key,
definitions.LANGUAGES.get(language_identifier, ['', ''])[0]]
self._database_file.InsertValues(table_name, column_names, values)
def _WriteMessageTables(self):
"""Writes the message tables."""
message_table = self._message_resource_file.GetMessageTableResource()
try:
number_of_languages = message_table.get_number_of_languages()
except IOError as exception:
number_of_languages = 0
logging.warning((
'Unable to retrieve number of languages from: {0:s} '
'with error: {1:s}.').format(self._message_resource_file, exception))
if number_of_languages > 0:
for language_identifier in message_table.language_identifiers:
self._WriteMessageTable(
self._message_resource_file, message_table, language_identifier)
def _WriteString(
self, message_resource_file, string_table, language_identifier,
string_index, table_name, has_table):
"""Writes a string to a specific string table.
Args:
message_resource_file (MessageResourceFile): message resource file.
string_table (pywrc.strings): string table.
language_identifier (int): language identifier (LCID).
string_index (int): string index.
table_name (str): name of the table.
has_table (bool): True if the table previously existed in the database.
"""
column_names = ['string_identifier', 'string']
string_identifier = string_table.get_string_identifier(
language_identifier, string_index)
string_identifier = '0x{0:08x}'.format(string_identifier)
string = string_table.get_string(language_identifier, string_index)
if not has_table:
insert_values = True
else:
condition = 'string_identifier = "{0:s}"'.format(string_identifier)
values_list = list(self._database_file.GetValues(
[table_name], column_names, condition))
number_of_values = len(values_list)
if number_of_values == 1:
values = values_list[0]
if string != values['string']:
logging.warning((
'String mismatch for LCID: 0x{0:08x}, '
'file version: {1:s}, string identifier: {2:s}.\n'
'Found: {3:s}\nStored: {4:s}\n').format(
language_identifier, message_resource_file.file_version,
string_identifier, string, values['string']))
elif number_of_values != 0:
logging.warning((
'More than one string found for LCID: 0x{0:08x}, '
'file version: {1:s}, string identifier: {2:s}.').format(
language_identifier, message_resource_file.file_version,
string_identifier))
# TODO: warn if new string has been found.
insert_values = False
if insert_values:
values = [string_identifier, string]
self._database_file.InsertValues(table_name, column_names, values)
def _WriteStringTable(
self, message_resource_file, string_table, language_identifier):
"""Writes a string table for a specific language identifier.
Args:
message_resource_file (MessageResourceFile): message resource file.
string_table (pywrc.strings): string table.
language_identifier (int): language identifier (LCID).
"""
number_of_strings = string_table.get_number_of_strings(
language_identifier)
if number_of_strings > 0:
message_file_key = self._GetMessageFileKey(message_resource_file)
if message_file_key is None:
logging.warning('Missing message file key for: {0:s}'.format(
message_resource_file.windows_path))
self._WriteStringTableLanguage(message_file_key, language_identifier)
table_name = 'message_table_0x{0:08x}'.format(language_identifier)
if message_resource_file.file_version:
table_name = '{0:s}_{1:s}'.format(
table_name, message_resource_file.file_version)
table_name = re.sub(r'\.', r'_', table_name)
has_table = self._database_file.HasTable(table_name)
if not has_table:
column_definitions = ['string_identifier TEXT', 'string TEXT']
self._database_file.CreateTable(table_name, column_definitions)
for string_index in range(0, number_of_strings):
self._WriteString(
message_resource_file, string_table, language_identifier,
string_index, table_name, has_table)
def _WriteStringTableLanguage(self, message_file_key, language_identifier):
"""Writes a string table language.
Args:
message_file_key (int): message file key.
language_identifier (int): language identifier (LCID).
"""
table_name = 'string_table_languages'
column_names = ['lcid', 'message_file_key', 'identifier']
has_table = self._database_file.HasTable(table_name)
if not has_table:
column_definitions = [
'lcid TEXT', 'message_file_key INT', 'identifier TEXT']
self._database_file.CreateTable(table_name, column_definitions)
if not has_table:
insert_values = True
else:
condition = 'lcid = "0x{0:08x}" AND message_file_key = "{1:d}"'.format(
language_identifier, message_file_key)
values_list = list(self._database_file.GetValues(
[table_name], column_names, condition))
number_of_values = len(values_list)
insert_values = number_of_values == 0
if insert_values:
values = [
'0x{0:08x}'.format(language_identifier), message_file_key,
definitions.LANGUAGES.get(language_identifier, ['', ''])[0]]
self._database_file.InsertValues(table_name, column_names, values)
def _WriteStringTables(self):
"""Writes the string tables."""
string_table = self._message_resource_file.GetStringResource()
if not string_table:
return
try:
number_of_languages = string_table.get_number_of_languages()
except IOError as exception:
number_of_languages = 0
logging.warning((
'Unable to retrieve number of languages from: {0:s} '
'with error: {1:s}.').format(self._message_resource_file, exception))
if number_of_languages > 0:
for language_identifier in string_table.language_identifiers:
self._WriteStringTable(
self._message_resource_file, string_table, language_identifier)
def WriteResources(self):
"""Writes the resources."""
self._WriteMessageFile(self._message_resource_file)
self._WriteMessageTables()
# TODO: only write the string resources of Event Log parameter files.
# self._WriteStringTables()
class ResourcesSqlite3DatabaseReader(Sqlite3DatabaseReader):
"""Class to represent an Event Log resources sqlite3 database reader."""
def _GetEventLogProviderKey(self, log_source):
"""Retrieves the Event Log provider key.
Args:
log_source (str): Event Log source.
Returns:
int: an Event Log provider key or None if not available.
Raises:
IOError: if more than one value is found in the database.
OSError: if more than one value is found in the database.
"""
table_names = ['event_log_providers']
column_names = ['event_log_provider_key']
condition = 'log_source == "{0:s}"'.format(log_source)
values_list = list(self._database_file.GetValues(
table_names, column_names, condition))
number_of_values = len(values_list)
if number_of_values == 0:
return None
if number_of_values == 1:
values = values_list[0]
return values['event_log_provider_key']
raise IOError('More than one value found in database.')
def _GetMessage(self, message_file_key, lcid, message_identifier):
"""Retrieves a specific message from a specific message table.
Args:
message_file_key (int): message file key.
lcid (int): language code identifier (LCID).
message_identifier (int): message identifier.
Returns:
str: the message string or None if not available.
Raises:
IOError: if more than one value is found in the database.
OSError: if more than one value is found in the database.
"""
table_name = 'message_table_{0:d}_0x{1:08x}'.format(message_file_key, lcid)
has_table = self._database_file.HasTable(table_name)
if not has_table:
return None
column_names = ['message_string']
condition = 'message_identifier == "0x{0:08x}"'.format(message_identifier)
values = list(self._database_file.GetValues(
[table_name], column_names, condition))
number_of_values = len(values)
if number_of_values == 0:
return None
if number_of_values == 1:
return values[0]['message_string']
raise IOError('More than one value found in database.')
def _GetMessageFileKeys(self, event_log_provider_key):
"""Retrieves the message file keys.
Args:
event_log_provider_key (int): the Event Log provider key.
Yields:
int: a message file key.
"""
table_names = ['message_file_per_event_log_provider']
column_names = ['message_file_key']
condition = 'event_log_provider_key == {0:d}'.format(
event_log_provider_key)
generator = self._database_file.GetValues(
table_names, column_names, condition)
# pylint: disable=not-an-iterable
for values in generator:
yield values['message_file_key']
def _GetMessageFilenames(self, log_source, message_file_type):
"""Retrieves the message filenames of a specific Event Log provider.
Args:
log_source (str): Event Log source.
message_file_type (str): message file type.
Returns:
list[str]: message filenames.
"""
table_names = [
'event_log_providers', 'message_file_per_event_log_provider',
'message_files']
column_names = ['message_files.path']
condition = (
'{0:s}.log_source == "{3:s}" AND '
'{1:s}.message_file_type == "{4:s}" AND '
'{0:s}.event_log_provider_key == {1:s}.event_log_provider_key AND '
'{1:s}.message_file_key == {2:s}.message_file_key').format(
'event_log_providers', 'message_file_per_event_log_provider',
'message_files', log_source, message_file_type)
message_filenames = []
for values in self._database_file.GetValues(
table_names, column_names, condition):
message_filename = values['message_files.path']
message_filenames.append(message_filename)
return message_filenames
def _GetMessages(self, message_file_key, lcid):
"""Retrieves the messages of a specific message table.
Args:
message_file_key (int): message file key.
lcid (int): language code identifier (LCID).
Yields:
tuple[int, str]: message identifier and message string.
"""
table_name = 'message_table_{0:d}_0x{1:08x}'.format(message_file_key, lcid)
has_table = self._database_file.HasTable(table_name)
if has_table:
column_names = ['message_identifier', 'message_string']
condition = ''
for values in self._database_file.GetValues(
[table_name], column_names, condition):
yield values['message_identifier'], values['message_string']
def GetEventLogProviders(self):
"""Retrieves the Event Log providers.
Yields:
EventLogProvider: an Event Log provider.
"""
table_names = ['event_log_providers']
column_names = ['log_source', 'provider_guid']
condition = ''
event_log_providers = []
for values in self._database_file.GetValues(
table_names, column_names, condition):
event_log_provider = resources.EventLogProvider(
None, values['log_source'], values['provider_guid'])
event_log_providers.append(event_log_provider)
for event_log_provider in event_log_providers:
message_filenames = self._GetMessageFilenames(
event_log_provider.log_source,
definitions.MESSAGE_FILE_TYPE_CATEGORY)
event_log_provider.SetCategoryMessageFilenames(message_filenames)
message_filenames = self._GetMessageFilenames(
event_log_provider.log_source, definitions.MESSAGE_FILE_TYPE_EVENT)
event_log_provider.SetEventMessageFilenames(message_filenames)
message_filenames = self._GetMessageFilenames(
event_log_provider.log_source,
definitions.MESSAGE_FILE_TYPE_PARAMETER)
event_log_provider.SetParameterMessageFilenames(message_filenames)
yield event_log_provider
def GetMessage(self, log_source, lcid, message_identifier):
"""Retrieves a specific message for a specific Event Log source.
Args:
log_source (str): Event Log source.
lcid (int): language code identifier (LCID).
message_identifier (int): message identifier.
Returns:
str: the message string or None if not available.
"""
event_log_provider_key = self._GetEventLogProviderKey(log_source)
if not event_log_provider_key:
return None
generator = self._GetMessageFileKeys(event_log_provider_key)
if not generator:
return None
message_string = None
for message_file_key in generator:
message_string = self._GetMessage(
message_file_key, lcid, message_identifier)
if message_string:
break
return message_string
def GetMessages(self, log_source, lcid):
"""Retrieves the messages of a specific Event Log source.
Args:
log_source (str): Event Log source.
lcid (int): language code identifier (LCID).
Yields:
tuple[int, str]: message identifier and message string.
"""
event_log_provider_key = self._GetEventLogProviderKey(log_source)
if event_log_provider_key:
for message_file_key in self._GetMessageFileKeys(event_log_provider_key):
for message_identifier, message_string in self._GetMessages(
message_file_key, lcid):
yield message_identifier, message_string
def GetMetadataAttribute(self, attribute_name):
"""Retrieves the metadata attribute.
Args:
attribute_name (str): name of the metadata attribute.
Returns:
str: value of the metadata attribute or None.
Raises:
IOError: if more than one value is found in the database.
OSError: if more than one value is found in the database.
"""
table_name = 'metadata'
has_table = self._database_file.HasTable(table_name)
if not has_table:
return None
column_names = ['value']
condition = 'name == "{0:s}"'.format(attribute_name)
values = list(self._database_file.GetValues(
[table_name], column_names, condition))
number_of_values = len(values)
if number_of_values == 0:
return None
if number_of_values == 1:
return values[0]['value']
raise IOError('More than one value found in database.')
class ResourcesSqlite3DatabaseWriter(Sqlite3DatabaseWriter):
"""Class to represent a sqlite3 Event Log resources database writer."""
# Message string specifiers that are considered white space.
_WHITE_SPACE_SPECIFIER_RE = re.compile(r'(%[0b]|[\r\n])')
# Message string specifiers that expand to text.
_TEXT_SPECIFIER_RE = re.compile(r'%([ .!%nrt])')
# Curly brackets in a message string.
_CURLY_BRACKETS = re.compile(r'([\{\}])')
# Message string specifiers that expand to a variable place holder.
_PLACE_HOLDER_SPECIFIER_RE = re.compile(r'%([1-9][0-9]?)[!]?[s]?[!]?')
def __init__(self, string_format='wrc'):
"""Initializes the database writer.
Args:
string_format (Optional[str]): string format. The default is the Windows
Resource (wrc) format.
"""
super(ResourcesSqlite3DatabaseWriter, self).__init__()
self._string_format = string_format
def _GetEventLogProviderKey(self, event_log_provider):
"""Retrieves the key of an Event Log provider.
Args:
event_log_provider (EventLogProvider): event log provider.
Returns:
int: the Event Log provider key or None if no such value.
Raises:
IOError: if more than one value is found in the database.
OSError: if more than one value is found in the database.
"""
table_names = ['event_log_providers']
column_names = ['event_log_provider_key']
condition = 'log_source = "{0:s}"'.format(
event_log_provider.log_source)
values_list = list(self._database_file.GetValues(
table_names, column_names, condition))
number_of_values = len(values_list)
if number_of_values == 0:
return None
if number_of_values == 1:
values = values_list[0]
return values['event_log_provider_key']
raise IOError('More than one value found in database.')
def _GetMessageFileKey(self, message_file):
"""Retrieves the key of a message file.
Args:
message_file (MessageFile): message file.
Returns:
int: the message file key or None if no such value.
Raises:
IOError: if more than one value is found in the database.
OSError: if more than one value is found in the database.
"""
table_names = ['message_files']
column_names = ['message_file_key']
condition = 'LOWER(path) = LOWER("{0:s}")'.format(
message_file.windows_path)
values_list = list(self._database_file.GetValues(
table_names, column_names, condition))
number_of_values = len(values_list)
if number_of_values == 0:
return None
if number_of_values == 1:
values = values_list[0]
return values['message_file_key']
raise IOError('More than one value found in database.')
def _GetMessageFileKeyByPath(self, message_filename):
"""Retrieves the key of a message file for a specific path.
Args:
message_filename (str): message filename.
Returns:
int: the message file key or None if no such value.
Raises:
IOError: if more than one value is found in the database.
OSError: if more than one value is found in the database.
"""
table_names = ['message_files']
column_names = ['message_file_key']
condition = 'LOWER(path) = LOWER("{0:s}")'.format(message_filename)
values_list = list(self._database_file.GetValues(
table_names, column_names, condition))
number_of_values = len(values_list)
if number_of_values == 0:
return None
if number_of_values == 1:
values = values_list[0]
return values['message_file_key']
raise IOError('More than one value found in database.')
def _ReformatMessageString(self, message_string):
"""Reformats the message string.
Args:
message_string (str): message string.
Returns:
str: message string in Python format() (PEP 3103) style or None
if not available.
"""
def PlaceHolderSpecifierReplacer(match_object):
"""Replaces message string place holders into Python format() style."""
expanded_groups = []
for group in match_object.groups():
try:
place_holder_number = int(group, 10) - 1
expanded_group = '{{{0:d}:s}}'.format(place_holder_number)
except ValueError:
expanded_group = group
expanded_groups.append(expanded_group)
return ''.join(expanded_groups)
if not message_string:
return None
message_string = self._WHITE_SPACE_SPECIFIER_RE.sub(r'', message_string)
message_string = self._TEXT_SPECIFIER_RE.sub(r'\\\1', message_string)
message_string = self._CURLY_BRACKETS.sub(r'\1\1', message_string)
return self._PLACE_HOLDER_SPECIFIER_RE.sub(
PlaceHolderSpecifierReplacer, message_string)
def _WriteMessage(
self, message_file, language_identifier, message_identifier,
message_string, table_name, has_table):
"""Writes a message to a specific message table.
Args:
message_file (MessageFile): message file.
language_identifier (int): language identifier (LCID).
message_identifier (int): message identifier.
message_string (str): message string.
table_name (str): name of the table.
has_table (bool): True if the table previously existed in the database.
"""
column_names = ['message_identifier', 'message_string']
if not has_table:
insert_values = True
else:
condition = 'message_identifier = "{0:s}"'.format(message_identifier)
values_list = list(self._database_file.GetValues(
[table_name], column_names, condition))
number_of_values = len(values_list)
if number_of_values == 1:
values = values_list[0]
if message_string != values['message_string']:
logging.warning((
'Message string mismatch for LCID: {0:s}, '
'file version: {1:s}, message identifier: {2:s}.\n'
'Found: {3:s}\nStored: {4:s}\n').format(
language_identifier, message_file.file_version,
message_identifier, message_string,
values['message_string']))
elif number_of_values != 0:
logging.warning((
'More than one message string found for LCID: {0:s}, '
'file version: {1:s}, message identifier: {2:s}.').format(
language_identifier, message_file.file_version,
message_identifier))
# TODO: warn if new message has been found.
insert_values = False
if insert_values:
if self._string_format == 'pep3101':
message_string = self._ReformatMessageString(message_string)
values = [message_identifier, message_string]
self._database_file.InsertValues(table_name, column_names, values)
def _WriteMessageFile(self, message_file):
"""Writes a message file.
Args:
message_file (MessageFile): message file.
"""
table_name = 'message_files'
column_names = ['path']
has_table = self._database_file.HasTable(table_name)
if not has_table:
column_definitions = [
'message_file_key INTEGER PRIMARY KEY AUTOINCREMENT', 'path TEXT']
self._database_file.CreateTable(table_name, column_definitions)
if not has_table:
insert_values = True
else:
condition = 'LOWER(path) = LOWER("{0:s}")'.format(
message_file.windows_path)
values_list = list(self._database_file.GetValues(
[table_name], column_names, condition))
number_of_values = len(values_list)
insert_values = number_of_values == 0
if insert_values:
values = [message_file.windows_path]
self._database_file.InsertValues(table_name, column_names, values)
def _WriteMessageTable(self, message_file, message_table):
"""Writes a message table for a specific language identifier.
Args:
message_file (MessageFile): message file.
message_table (MessageTable): message table.
"""
if message_table.message_strings:
message_file_key = self._GetMessageFileKey(message_file)
if message_file_key is None:
logging.warning('Missing message file key for: {0:s}'.format(
message_file.windows_path))
table_name = 'message_table_{0:d}_{1:s}'.format(
message_file_key, message_table.lcid)
has_table = self._database_file.HasTable(table_name)
if not has_table:
column_definitions = [
'message_identifier TEXT', 'message_string TEXT']
self._database_file.CreateTable(table_name, column_definitions)
message_strings = message_table.message_strings
for message_identifier, message_string in message_strings.items():
self._WriteMessage(
message_file, message_table.lcid, message_identifier,
message_string, table_name, has_table)
self._WriteMessageTableLanguage(message_file_key, message_table.lcid)
def _WriteMessageTableLanguage(self, message_file_key, language_identifier):
"""Writes a message table language.
Args:
message_file_key (int): message file key.
language_identifier (int): language identifier (LCID).
"""
table_name = 'message_table_languages'
column_names = ['lcid', 'message_file_key']
has_table = self._database_file.HasTable(table_name)
if not has_table:
column_definitions = ['lcid TEXT', 'message_file_key INT']
self._database_file.CreateTable(table_name, column_definitions)
if not has_table:
insert_values = True
else:
condition = 'lcid = "{0:s}" AND message_file_key = "{1:d}"'.format(
language_identifier, message_file_key)
values_list = list(self._database_file.GetValues(
[table_name], column_names, condition))
number_of_values = len(values_list)
insert_values = number_of_values == 0
if insert_values:
values = [language_identifier, message_file_key]
self._database_file.InsertValues(table_name, column_names, values)
def WriteEventLogProvider(self, event_log_provider):
"""Writes the Event Log provider.
Args:
event_log_provider (EventLogProvider): event log provider.
"""
table_name = 'event_log_providers'
column_names = ['log_source', 'provider_guid']
has_table = self._database_file.HasTable(table_name)
if not has_table:
column_definitions = [
'event_log_provider_key INTEGER PRIMARY KEY AUTOINCREMENT',
'log_source TEXT', 'provider_guid TEXT']
self._database_file.CreateTable(table_name, column_definitions)
insert_values = True
else:
condition = 'log_source = "{0:s}"'.format(event_log_provider.log_source)
values_list = list(self._database_file.GetValues(
[table_name], column_names, condition))
number_of_values = len(values_list)
# TODO: check if more than 1 result.
insert_values = number_of_values == 0
if insert_values:
values = [
event_log_provider.log_source, event_log_provider.provider_guid]
self._database_file.InsertValues(table_name, column_names, values)
def WriteMessageFile(self, message_file):
"""Writes the Windows Message Resource file.
Args:
message_file (MessageFile): message file.
"""
self._WriteMessageFile(message_file)
for message_table in message_file.GetMessageTables():
# TODO track the languages in a table.
self._WriteMessageTable(message_file, message_table)
def WriteMessageFilesPerEventLogProvider(
self, event_log_provider, message_filename, message_file_type):
"""Writes the message files used by an Event Log provider.
Args:
event_log_provider (EventLogProvider): event log provider.
message_filename (str): message filename.
message_file_type (str): message file type.
"""
table_name = 'message_file_per_event_log_provider'
column_names = [
'message_file_key', 'message_file_type', 'event_log_provider_key']
event_log_provider_key = self._GetEventLogProviderKey(event_log_provider)
if event_log_provider_key is None:
logging.warning('Missing event log provider key for: {0:s}'.format(
event_log_provider.log_source))
message_file_key = self._GetMessageFileKeyByPath(message_filename)
if message_file_key is None:
logging.warning('Missing message file key for: {0:s}'.format(
message_filename))
has_table = self._database_file.HasTable(table_name)
if not has_table:
column_definitions = [
'message_file_key INTEGER', 'message_file_type TEXT',
'event_log_provider_key INTEGER']
self._database_file.CreateTable(table_name, column_definitions)
insert_values = True
else:
condition = (
'message_file_key = {0:d} AND message_file_type = "{1:s}" AND '
'event_log_provider_key = {2:d}').format(
message_file_key, message_file_type, event_log_provider_key)
values_list = list(self._database_file.GetValues(
[table_name], column_names, condition))
number_of_values = len(values_list)
# TODO: check if more than 1 result.
insert_values = number_of_values == 0
if insert_values:
values = [message_file_key, message_file_type, event_log_provider_key]
self._database_file.InsertValues(table_name, column_names, values)
def WriteMetadataAttribute(self, attribute_name, attribute_value):
"""Writes a metadata attribute.
Args:
attribute_name (str): name of the metadata attribute.
attribute_value (str): value of the metadata attribute.
"""
table_name = 'metadata'
column_names = ['name', 'value']
has_table = self._database_file.HasTable(table_name)
if not has_table:
column_definitions = ['name TEXT', 'value TEXT']
self._database_file.CreateTable(table_name, column_definitions)
insert_values = True
else:
condition = 'name = "{0:s}"'.format(attribute_name)
values_list = list(self._database_file.GetValues(
[table_name], column_names, condition))
number_of_values = len(values_list)
# TODO: check if more than 1 result.
insert_values = number_of_values == 0
if insert_values:
values = [attribute_name, attribute_value]
self._database_file.InsertValues(table_name, column_names, values)
| 34.176471
| 79
| 0.687877
| 7,018
| 57,519
| 5.354802
| 0.045882
| 0.049761
| 0.048536
| 0.02174
| 0.821181
| 0.791166
| 0.760245
| 0.743135
| 0.721794
| 0.685471
| 0
| 0.006588
| 0.218902
| 57,519
| 1,682
| 80
| 34.19679
| 0.829861
| 0.228672
| 0
| 0.686339
| 0
| 0.001093
| 0.158272
| 0.031814
| 0
| 0
| 0
| 0.007134
| 0
| 1
| 0.066667
| false
| 0
| 0.00765
| 0
| 0.130055
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
df8ff6307f4e7b5eadb02e4fb8c766073b666241
| 21
|
py
|
Python
|
__init__.py
|
acutkosky/freerex
|
58f188db7da6e1cb3224f768e76b944b99d8c5c1
|
[
"MIT"
] | 4
|
2017-08-27T11:54:05.000Z
|
2021-02-04T14:23:35.000Z
|
__init__.py
|
acutkosky/freerex
|
58f188db7da6e1cb3224f768e76b944b99d8c5c1
|
[
"MIT"
] | null | null | null |
__init__.py
|
acutkosky/freerex
|
58f188db7da6e1cb3224f768e76b944b99d8c5c1
|
[
"MIT"
] | 3
|
2018-02-04T19:50:05.000Z
|
2021-02-04T14:23:37.000Z
|
from freerex import *
| 21
| 21
| 0.809524
| 3
| 21
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 21
| 1
| 21
| 21
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
10d007a43d3b2a438346d3b7df8b3d4debd8c562
| 4,515
|
py
|
Python
|
multigraph/features.py
|
kayoyin/scr
|
a2193817b4833da339d32d0b1a502e13998a736a
|
[
"BSD-3-Clause"
] | 1
|
2021-10-08T08:56:20.000Z
|
2021-10-08T08:56:20.000Z
|
multigraph/features.py
|
kayoyin/scr
|
a2193817b4833da339d32d0b1a502e13998a736a
|
[
"BSD-3-Clause"
] | null | null | null |
multigraph/features.py
|
kayoyin/scr
|
a2193817b4833da339d32d0b1a502e13998a736a
|
[
"BSD-3-Clause"
] | 1
|
2022-01-27T07:01:29.000Z
|
2022-01-27T07:01:29.000Z
|
import re
import numpy as np
I_SIGNS = ["I1","I2"]
YOU_SIGNS = ["YOU", "YOU1"]
INDEX_SIGNS = ["INDEX1", "INDEX2", "INDEX4", "INDEX-AREA", "INDEX-ORAL"]
dist_threshold = 50
def normalize(gloss):
return gloss.strip("$^*")
def dist(loc1, loc2):
a,b = loc1
x,y = loc2
return np.sqrt((a-x)**2 + (b-y)**2)
# Multigraph features
def me_or_you(anaphor, antecedent):
anaphor_gloss = normalize(anaphor.attributes["tokens"][0])
anaphor_signer = anaphor.attributes["speaker"][0]
antecedent_gloss = normalize(antecedent.attributes["tokens"][0])
antecedent_signer = antecedent.attributes["speaker"][0]
if anaphor_signer == antecedent_signer:
if (anaphor_gloss in YOU_SIGNS and antecedent_gloss in YOU_SIGNS) or (anaphor_gloss in I_SIGNS and antecedent_gloss in I_SIGNS):
return 0.5
else:
if (anaphor_gloss in YOU_SIGNS and antecedent_gloss in I_SIGNS) or (anaphor_gloss in I_SIGNS and antecedent_gloss in YOU_SIGNS):
return 0.5
return 0
def not_me_or_you(anaphor, antecedent):
anaphor_gloss = normalize(anaphor.attributes["tokens"][0])
anaphor_signer = anaphor.attributes["speaker"][0]
antecedent_gloss = normalize(antecedent.attributes["tokens"][0])
antecedent_signer = antecedent.attributes["speaker"][0]
if anaphor_signer == antecedent_signer:
if (anaphor_gloss in YOU_SIGNS and antecedent_gloss in I_SIGNS) or (anaphor_gloss in I_SIGNS and antecedent_gloss in YOU_SIGNS):
return -np.inf
else:
if (anaphor_gloss in YOU_SIGNS and antecedent_gloss in YOU_SIGNS) or (anaphor_gloss in I_SIGNS and antecedent_gloss in I_SIGNS):
return -np.inf
return 0
def spatially_close(anaphor, antecedent):
if (anaphor.span.begin - antecedent.span.end) < 100 and anaphor.attributes["speaker"][0] == antecedent.attributes["speaker"][0] and normalize(anaphor.attributes["tokens"][0]) in INDEX_SIGNS and normalize(antecedent.attributes["tokens"][0]) in INDEX_SIGNS:
distance = min(dist(anaphor.attributes["mcp"][0], antecedent.attributes["mcp"][0]), dist(anaphor.attributes["tip"][0], antecedent.attributes["tip"][0]))
return max(0, 0.5 + (50 - distance) / 50)
return 0
def prev_ante_is_noun(anaphor, antecedent):
antecedent_gloss = normalize(antecedent.attributes["tokens"][0])
if anaphor.attributes["speaker"][0] == antecedent.attributes["speaker"][0] and antecedent.span.end == anaphor.span.begin - 1 and normalize(anaphor.attributes["tokens"][0]) in INDEX_SIGNS and not antecedent_gloss.startswith("TO-") and not antecedent_gloss.startswith("GEST-") and antecedent_gloss not in INDEX_SIGNS:
return 0.5
return 0
def third_person(anaphor, antecedent):
anaphor_gloss = normalize(anaphor.attributes["tokens"][0])
antecedent_gloss = normalize(antecedent.attributes["tokens"][0])
if (anaphor_gloss in INDEX_SIGNS and antecedent_gloss in I_SIGNS + YOU_SIGNS) or (antecedent_gloss in INDEX_SIGNS and anaphor_gloss in I_SIGNS + YOU_SIGNS):
return -np.inf
return 0
def spatially_far(anaphor, antecedent):
if normalize(anaphor.attributes["tokens"][0]) in INDEX_SIGNS and normalize(antecedent.attributes["tokens"][0]) in INDEX_SIGNS:
distance = min(dist(anaphor.attributes["mcp"][0], antecedent.attributes["mcp"][0]), dist(anaphor.attributes["tip"][0], antecedent.attributes["tip"][0]))
if distance > 100:
return -np.inf
return 0
# Baseline features
def base_me_or_you(anaphor, antecedent):
anaphor_gloss = normalize(anaphor.attributes["tokens"][0])
anaphor_signer = anaphor.attributes["speaker"][0]
antecedent_gloss = normalize(antecedent.attributes["tokens"][0])
antecedent_signer = antecedent.attributes["speaker"][0]
if anaphor_signer == antecedent_signer:
if (anaphor_gloss in YOU_SIGNS and antecedent_gloss in YOU_SIGNS) or (anaphor_gloss in I_SIGNS and antecedent_gloss in I_SIGNS):
return np.inf
else:
if (anaphor_gloss in YOU_SIGNS and antecedent_gloss in I_SIGNS) or (anaphor_gloss in I_SIGNS and antecedent_gloss in YOU_SIGNS):
return np.inf
return 0
def temporally_close(anaphor, antecedent):
if (anaphor.span.begin - antecedent.span.end) < 100 and anaphor.attributes["speaker"][0] == antecedent.attributes["speaker"][0] and normalize(antecedent.attributes["tokens"][0]) in INDEX_SIGNS and normalize(anaphor.attributes["tokens"][0]) in INDEX_SIGNS:
return np.inf
return 0
| 46.071429
| 320
| 0.714729
| 621
| 4,515
| 5.020934
| 0.122383
| 0.062861
| 0.087235
| 0.058371
| 0.859846
| 0.822643
| 0.810135
| 0.788967
| 0.767479
| 0.665491
| 0
| 0.021497
| 0.165449
| 4,515
| 97
| 321
| 46.546392
| 0.805998
| 0.008195
| 0
| 0.635135
| 0
| 0
| 0.059021
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.135135
| false
| 0
| 0.027027
| 0.013514
| 0.445946
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
10d984760586ff43346456ef9dfb911d05efbc36
| 32
|
py
|
Python
|
bronze/5/14928.py
|
darkapplepower/baekjoon
|
4148e8e205f33c07570e52fef123f9a73014fbdb
|
[
"WTFPL"
] | 1
|
2022-03-06T03:51:06.000Z
|
2022-03-06T03:51:06.000Z
|
bronze/5/14928.py
|
darkapplepower/baekjoon
|
4148e8e205f33c07570e52fef123f9a73014fbdb
|
[
"WTFPL"
] | null | null | null |
bronze/5/14928.py
|
darkapplepower/baekjoon
|
4148e8e205f33c07570e52fef123f9a73014fbdb
|
[
"WTFPL"
] | null | null | null |
a=int(input())
print(a%20000303)
| 16
| 17
| 0.71875
| 6
| 32
| 3.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.258065
| 0.03125
| 32
| 2
| 17
| 16
| 0.483871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
10de49d39d0cc08cf6c08caefa71608b618479c7
| 9,677
|
py
|
Python
|
test/test_integration_cli_ecs.py
|
mergermarket/cdflow-commands
|
d1665e668428985ec6759f8aa59580d363701bc5
|
[
"MIT"
] | null | null | null |
test/test_integration_cli_ecs.py
|
mergermarket/cdflow-commands
|
d1665e668428985ec6759f8aa59580d363701bc5
|
[
"MIT"
] | 1,626
|
2017-03-09T23:41:30.000Z
|
2022-03-31T06:15:27.000Z
|
test/test_integration_cli_ecs.py
|
mergermarket/cdflow-commands
|
d1665e668428985ec6759f8aa59580d363701bc5
|
[
"MIT"
] | 3
|
2017-03-22T17:06:26.000Z
|
2020-10-15T09:43:29.000Z
|
import json
import unittest
from datetime import datetime
from io import TextIOWrapper
from cdflow_commands import cli
from mock import MagicMock, Mock, mock_open, patch
import yaml
@patch('cdflow_commands.release._copy_platform_config')
@patch('cdflow_commands.cli.check_output')
@patch('cdflow_commands.release.os')
@patch('cdflow_commands.release.copytree')
@patch('cdflow_commands.release.check_call')
@patch('cdflow_commands.release.make_archive')
@patch('cdflow_commands.release.open', new_callable=mock_open, create=True)
@patch('cdflow_commands.cli.os')
@patch('cdflow_commands.cli.rmtree')
@patch('cdflow_commands.cli.Session')
@patch('cdflow_commands.config.Session')
@patch('cdflow_commands.config.open', new_callable=mock_open, create=True)
@patch('cdflow_commands.config.check_output')
@patch('cdflow_commands.plugins.ecs.check_call')
class TestReleaseCLI(unittest.TestCase):
def test_release_is_configured_and_created(
self, check_call, check_output, mock_open, Session_from_config,
Session_from_cli, rmtree, mock_os, mock_open_release, make_archive,
check_call_release, copytree, mock_os_release, check_output_cli, _
):
mock_metadata_file = MagicMock(spec=TextIOWrapper)
metadata = {
'account-scheme-url': 's3://bucket/key',
'team': 'your-team',
'type': 'docker',
}
mock_metadata_file.read.return_value = yaml.dump(metadata)
mock_open.return_value.__enter__.return_value = mock_metadata_file
mock_root_session = Mock()
mock_root_session.region_name = 'us-east-1'
Session_from_cli.return_value = mock_root_session
mock_s3_body = Mock()
mock_s3_body.read.return_value = json.dumps({
'accounts': {
'foodev': {
'id': '123456789',
'role': 'admin',
}
},
'release-account': 'foodev',
'release-bucket': 'releases',
'default-region': 'us-north-4',
'environments': {
'live': 'foodev',
},
'terraform-backend-s3-bucket': 'tfstate-bucket',
'terraform-backend-s3-dynamodb-table': 'tflocks-table',
'classic-metadata-handling': True,
})
mock_s3_resource = Mock()
mock_s3_resource.Object.return_value.get.return_value = {
'Body': mock_s3_body,
}
mock_root_session.resource.return_value = mock_s3_resource
mock_ecr_client = Mock()
mock_ecr_client.get_authorization_token.return_value = {
'authorizationData': [
{
'authorizationToken': 'dXNlcm5hbWU6cGFzc3dvcmQ=',
'proxyEndpoint': 'dummy-endpoint'
}
]
}
mock_session = Mock()
mock_session.client.return_value = mock_ecr_client
Session_from_config.return_value = mock_session
mock_sts = Mock()
user_id = 'foo'
mock_sts.get_caller_identity.return_value = {
u'UserId': user_id,
'Arn': 'dummy_arn'
}
mock_sts.assume_role.return_value = {
'Credentials': {
'AccessKeyId': 'dummy-access-key-id',
'SecretAccessKey': 'dummy-secret-access-key',
'SessionToken': 'dummy-session-token',
'Expiration': datetime(2015, 1, 1)
},
'AssumedRoleUser': {
'AssumedRoleId': 'dummy-assumed-role-id',
'Arn': 'dummy-arn'
},
'PackedPolicySize': 123
}
mock_root_session.client.return_value = mock_sts
mock_release_file = MagicMock(spec=TextIOWrapper)
mock_open_release.return_value.__enter__.return_value = \
mock_release_file
component_name = 'dummy-component'
version = '1.2.3'
make_archive.return_value = '/tmp/tmpvyzXQB/{}-{}.zip'.format(
component_name, version
)
mock_os_release.environ = {'CDFLOW_IMAGE_DIGEST': 'hash'}
check_output_cli.return_value = 'hash\n'.encode('utf-8')
image_name = '{}.dkr.ecr.{}.amazonaws.com/{}:{}'.format(
123456789,
'us-north-4',
component_name,
version
)
cli.run([
'release', '--platform-config', 'path/to/config',
version, '-c', component_name
])
check_output_cli.assert_called_once_with(['git', 'rev-parse', 'HEAD'])
mock_sts.assume_role.assert_called_once_with(
DurationSeconds=14400,
RoleArn='arn:aws:iam::123456789:role/admin',
RoleSessionName=user_id,
)
check_call.assert_any_call([
'docker', 'build',
'-t', image_name, '.'
])
check_call.assert_any_call(['docker', 'push', image_name])
mock_session.resource.return_value.Object.assert_called_once_with(
'releases',
'{}/{}-{}.zip'.format(component_name, component_name, version)
)
mock_session.resource.return_value.Object.return_value\
.upload_file.assert_called_once_with(
make_archive.return_value,
ExtraArgs={'Metadata': {'cdflow_image_digest': 'hash'}},
)
def test_release_uses_component_name_from_origin(
self, check_call, check_output, mock_open, Session_from_config,
Session_from_cli, rmtree, mock_os, mock_open_release, make_archive,
check_call_release, copytree, mock_os_release, check_output_cli, _
):
mock_metadata_file = MagicMock(spec=TextIOWrapper)
metadata = {
'account-scheme-url': 's3://bucket/key',
'team': 'your-team',
'type': 'docker',
}
mock_metadata_file.read.return_value = yaml.dump(metadata)
mock_open.return_value.__enter__.return_value = mock_metadata_file
mock_root_session = Mock()
mock_root_session.region_name = 'us-east-1'
Session_from_cli.return_value = mock_root_session
mock_s3_body = Mock()
mock_s3_body.read.return_value = json.dumps({
'accounts': {
'foodev': {
'id': '123456789',
'role': 'admin',
}
},
'release-account': 'foodev',
'release-bucket': 'releases',
'default-region': 'us-north-4',
'environments': {
'live': 'foodev',
},
'terraform-backend-s3-bucket': 'tfstate-bucket',
'terraform-backend-s3-dynamodb-table': 'tflocks-table',
'classic-metadata-handling': True,
})
mock_s3_resource = Mock()
mock_s3_resource.Object.return_value.get.return_value = {
'Body': mock_s3_body,
}
mock_root_session.resource.return_value = mock_s3_resource
mock_ecr_client = Mock()
mock_ecr_client.get_authorization_token.return_value = {
'authorizationData': [
{
'authorizationToken': 'dXNlcm5hbWU6cGFzc3dvcmQ=',
'proxyEndpoint': 'dummy-endpoint'
}
]
}
mock_session = Mock()
mock_session.client.return_value = mock_ecr_client
Session_from_config.return_value = mock_session
mock_sts = Mock()
mock_sts.get_caller_identity.return_value = {
u'UserId': 'foo',
'Arn': 'dummy_arn'
}
mock_sts.assume_role.return_value = {
'Credentials': {
'AccessKeyId': 'dummy-access-key-id',
'SecretAccessKey': 'dummy-secret-access-key',
'SessionToken': 'dummy-session-token',
'Expiration': datetime(2015, 1, 1)
},
'AssumedRoleUser': {
'AssumedRoleId': 'dummy-assumed-role-id',
'Arn': 'dummy-arn'
},
'PackedPolicySize': 123
}
mock_root_session.client.return_value = mock_sts
mock_release_file = MagicMock(spec=TextIOWrapper)
mock_open_release.return_value.__enter__.return_value = \
mock_release_file
component_name = 'dummy-component'
version = '1.2.3'
make_archive.return_value = '/tmp/tmpvyzXQB/{}-{}.zip'.format(
component_name, version
)
check_output_cli.return_value = 'hash\n'.encode('utf-8')
check_output.return_value = 'git@github.com:org/{}.git'.format(
component_name
).encode('utf-8')
mock_os_release.environ = {'CDFLOW_IMAGE_DIGEST': 'hash'}
cli.run([
'release', '--platform-config', 'path/to/config', version,
])
image_name = '{}.dkr.ecr.{}.amazonaws.com/{}:{}'.format(
123456789,
'us-north-4',
component_name,
version
)
check_call.assert_any_call([
'docker', 'build',
'-t', image_name, '.'
])
check_call.assert_any_call(['docker', 'push', image_name])
mock_session.resource.return_value.Object.assert_called_once_with(
'releases',
'{}/{}-{}.zip'.format(component_name, component_name, version)
)
mock_session.resource.return_value.Object.return_value\
.upload_file.assert_called_once_with(
make_archive.return_value,
ExtraArgs={'Metadata': {'cdflow_image_digest': 'hash'}},
)
| 34.315603
| 78
| 0.583342
| 985
| 9,677
| 5.389848
| 0.174619
| 0.093238
| 0.050104
| 0.029384
| 0.846864
| 0.823507
| 0.823507
| 0.823507
| 0.808062
| 0.774911
| 0
| 0.015735
| 0.297303
| 9,677
| 281
| 79
| 34.437722
| 0.765
| 0
| 0
| 0.694561
| 0
| 0
| 0.228067
| 0.095071
| 0
| 0
| 0
| 0
| 0.041841
| 1
| 0.008368
| false
| 0
| 0.029289
| 0
| 0.041841
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8029f31fff7f36d6d14eb0eb897380a5cd43ac6d
| 142
|
py
|
Python
|
app/app/views.py
|
rajrpatel62499/recipie-app-api
|
5d92a4bb30b0b6752a98ecbf11e3b46bff0dc81b
|
[
"MIT"
] | null | null | null |
app/app/views.py
|
rajrpatel62499/recipie-app-api
|
5d92a4bb30b0b6752a98ecbf11e3b46bff0dc81b
|
[
"MIT"
] | null | null | null |
app/app/views.py
|
rajrpatel62499/recipie-app-api
|
5d92a4bb30b0b6752a98ecbf11e3b46bff0dc81b
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponse
def demo(request):
return HttpResponse(content=b'<h1>bingo<h1>')
| 28.4
| 49
| 0.788732
| 20
| 142
| 5.6
| 0.75
| 0.178571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015873
| 0.112676
| 142
| 5
| 49
| 28.4
| 0.873016
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
80406760609b37abe1d25d4a04b30570e8e7b382
| 46
|
py
|
Python
|
enthought/pyface/tasks/api.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/pyface/tasks/api.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/pyface/tasks/api.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from pyface.tasks.api import *
| 15.333333
| 30
| 0.76087
| 7
| 46
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152174
| 46
| 2
| 31
| 23
| 0.897436
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
80475223a457d603e063b3c93e10132b377fbba7
| 14,930
|
py
|
Python
|
tests/test_rendering.py
|
Nikolay-Lysenko/dodecaphony
|
1a02af4b8b11785b65596b7ce14e1790436e0098
|
[
"MIT"
] | 2
|
2021-08-29T03:20:21.000Z
|
2021-11-22T01:20:55.000Z
|
tests/test_rendering.py
|
Nikolay-Lysenko/dodecaphony
|
1a02af4b8b11785b65596b7ce14e1790436e0098
|
[
"MIT"
] | null | null | null |
tests/test_rendering.py
|
Nikolay-Lysenko/dodecaphony
|
1a02af4b8b11785b65596b7ce14e1790436e0098
|
[
"MIT"
] | 1
|
2021-08-29T03:20:53.000Z
|
2021-08-29T03:20:53.000Z
|
"""
Test `dodecaphony.rendering` module.
Author: Nikolay Lysenko
"""
from typing import Any
import pretty_midi
import pytest
import yaml
from dodecaphony.fragment import Fragment, override_calculated_attributes
from dodecaphony.rendering import (
create_lilypond_file_from_fragment,
create_midi_from_fragment,
create_sinethesizer_instruments,
create_tsv_events_from_fragment,
create_wav_from_tsv_events,
create_yaml_from_fragment,
)
@pytest.mark.parametrize(
"fragment, expected",
[
(
# `fragment`
Fragment(
temporal_content=[
[1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 1.0, 1.0],
[2.0, 4.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
],
sonic_content=[
['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'pause', 'F'],
['F', 'G#', 'E', 'F#', 'A', 'D', 'C', 'D#', 'C#', 'G', 'A#', 'B'],
],
meter_numerator=4,
meter_denominator=4,
n_beats=16,
line_ids=[1, 2],
upper_line_highest_position=55,
upper_line_lowest_position=41,
n_melodic_lines_by_group=[1, 1],
n_tone_row_instances_by_group=[1, 1],
mutable_temporal_content_indices=[0, 1],
mutable_sonic_content_indices=[0, 1],
),
# `expected`
(
"\\version \"2.18.2\"\n"
"\\layout {\n"
" indent = #0\n"
"}\n"
"\\new StaffGroup <<\n"
" \\new Staff <<\n"
" \\clef treble\n"
" \\time 4/4\n"
" \\key c \\major\n"
" {b'4 ais'4 g'4 cis''4 dis''2 c''2 d''4 a'4 fis'4 e'4 gis'2 r4 f'4}\n"
" >>\n"
" \\new Staff <<\n"
" \\clef bass\n"
" \\time 4/4\n"
" \\key c \\major\n"
" {f'2 gis2~ gis2 e'4 fis'4 a'4 d'4 c'4 dis'4 cis'4 g'4 ais4 b4}\n"
" >>\n"
">>"
)
),
]
)
def test_create_lilypond_file_from_fragment(
path_to_tmp_file: str, fragment: Fragment, expected: str
) -> None:
"""Test `create_lilypond_file_from_fragment` function."""
fragment = override_calculated_attributes(fragment)
create_lilypond_file_from_fragment(fragment, path_to_tmp_file)
with open(path_to_tmp_file) as in_file:
result = in_file.read()
assert result == expected
@pytest.mark.parametrize(
"fragment, note_number, expected",
[
(
# `fragment`
Fragment(
temporal_content=[
[1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0],
[2.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 1.0, 1.0],
],
sonic_content=[
['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'],
['F', 'G#', 'E', 'F#', 'A', 'D', 'C', 'D#', 'C#', 'G', 'A#', 'B'],
],
meter_numerator=4,
meter_denominator=4,
n_beats=16,
line_ids=[1, 2],
upper_line_highest_position=55,
upper_line_lowest_position=41,
n_melodic_lines_by_group=[1, 1],
n_tone_row_instances_by_group=[1, 1],
mutable_temporal_content_indices=[0, 1],
mutable_sonic_content_indices=[0, 1],
),
# `note_number`
5,
# `expected`
{
'pitch': 72,
'start': 4.0,
'end': 5.0
}
),
(
# `fragment`
Fragment(
temporal_content=[
[1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0],
[2.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 1.0, 1.0],
],
sonic_content=[
['B', 'A#', 'G', 'C#', 'D#', 'pause', 'D', 'A', 'F#', 'E', 'G#', 'F'],
['F', 'G#', 'E', 'F#', 'A', 'D', 'C', 'D#', 'C#', 'G', 'A#', 'B'],
],
meter_numerator=4,
meter_denominator=4,
n_beats=16,
line_ids=[1, 2],
upper_line_highest_position=55,
upper_line_lowest_position=41,
n_melodic_lines_by_group=[1, 1],
n_tone_row_instances_by_group=[1, 1],
mutable_temporal_content_indices=[0, 1],
mutable_sonic_content_indices=[0, 1],
),
# `note_number`
5,
# `expected`
{
'pitch': 74,
'start': 5.0,
'end': 5.5
}
),
]
)
def test_create_midi_from_fragment(
path_to_tmp_file: str, fragment: Fragment, note_number: int, expected: dict[str, float]
) -> None:
"""Test `create_midi_from_fragment` function."""
fragment = override_calculated_attributes(fragment)
create_midi_from_fragment(
fragment,
path_to_tmp_file,
beat_in_seconds=0.5,
instruments={k: 0 for k in fragment.line_ids},
velocity=100,
opening_silence_in_seconds=1,
trailing_silence_in_seconds=1
)
midi_data = pretty_midi.PrettyMIDI(path_to_tmp_file)
instrument = midi_data.instruments[0]
midi_note = instrument.notes[note_number]
result = {
'pitch': midi_note.pitch,
'start': midi_note.start,
'end': midi_note.end
}
assert result == expected
@pytest.mark.parametrize(
"fragment, expected",
[
(
# `fragment`
Fragment(
temporal_content=[
[1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0],
[2.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 1.0, 1.0],
],
sonic_content=[
['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'],
['F', 'G#', 'E', 'F#', 'A', 'D', 'C', 'D#', 'C#', 'G', 'A#', 'B'],
],
meter_numerator=4,
meter_denominator=4,
n_beats=16,
line_ids=[1, 2],
upper_line_highest_position=55,
upper_line_lowest_position=41,
n_melodic_lines_by_group=[1, 1],
n_tone_row_instances_by_group=[1, 1],
mutable_temporal_content_indices=[0, 1],
mutable_sonic_content_indices=[0, 1],
),
# `expected`
(
"instrument\tstart_time\tduration\tfrequency\tvelocity\teffects\tline_id\n"
"additive_mellow_pipe\t1.0\t1.0\tF4\t1.0\t\t2\n"
"additive_mellow_pipe\t1.0\t0.5\tB4\t1.0\t\t1\n"
"additive_mellow_pipe\t1.5\t0.5\tA#4\t1.0\t\t1\n"
"additive_mellow_pipe\t2.0\t1.0\tG#3\t1.0\t\t2\n"
"additive_mellow_pipe\t2.0\t0.5\tG4\t1.0\t\t1\n"
"additive_mellow_pipe\t2.5\t0.5\tC#5\t1.0\t\t1\n"
"additive_mellow_pipe\t3.0\t0.5\tE4\t1.0\t\t2\n"
"additive_mellow_pipe\t3.0\t1.0\tD#5\t1.0\t\t1\n"
"additive_mellow_pipe\t3.5\t0.5\tF#4\t1.0\t\t2\n"
"additive_mellow_pipe\t4.0\t0.5\tA4\t1.0\t\t2\n"
"additive_mellow_pipe\t4.0\t1.0\tC5\t1.0\t\t1\n"
"additive_mellow_pipe\t4.5\t0.5\tD4\t1.0\t\t2\n"
"additive_mellow_pipe\t5.0\t1.0\tC4\t1.0\t\t2\n"
"additive_mellow_pipe\t5.0\t0.5\tD5\t1.0\t\t1\n"
"additive_mellow_pipe\t5.5\t0.5\tA4\t1.0\t\t1\n"
"additive_mellow_pipe\t6.0\t1.0\tD#4\t1.0\t\t2\n"
"additive_mellow_pipe\t6.0\t0.5\tF#4\t1.0\t\t1\n"
"additive_mellow_pipe\t6.5\t0.5\tE4\t1.0\t\t1\n"
"additive_mellow_pipe\t7.0\t0.5\tC#4\t1.0\t\t2\n"
"additive_mellow_pipe\t7.0\t1.0\tG#4\t1.0\t\t1\n"
"additive_mellow_pipe\t7.5\t0.5\tG4\t1.0\t\t2\n"
"additive_mellow_pipe\t8.0\t0.5\tA#3\t1.0\t\t2\n"
"additive_mellow_pipe\t8.0\t1.0\tF4\t1.0\t\t1\n"
"additive_mellow_pipe\t8.5\t0.5\tB3\t1.0\t\t2\n"
)
),
(
# `fragment`
Fragment(
temporal_content=[
[1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0],
[2.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 1.0, 1.0],
],
sonic_content=[
['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'],
['pause', 'G#', 'E', 'F#', 'A', 'D', 'C', 'D#', 'C#', 'G', 'A#', 'B'],
],
meter_numerator=4,
meter_denominator=4,
n_beats=16,
line_ids=[1, 2],
upper_line_highest_position=55,
upper_line_lowest_position=41,
n_melodic_lines_by_group=[1, 1],
n_tone_row_instances_by_group=[1, 1],
mutable_temporal_content_indices=[0, 1],
mutable_sonic_content_indices=[0, 1],
),
# `expected`
(
"instrument\tstart_time\tduration\tfrequency\tvelocity\teffects\tline_id\n"
"additive_mellow_pipe\t1.0\t0.5\tB4\t1.0\t\t1\n"
"additive_mellow_pipe\t1.5\t0.5\tA#4\t1.0\t\t1\n"
"additive_mellow_pipe\t2.0\t1.0\tG#3\t1.0\t\t2\n"
"additive_mellow_pipe\t2.0\t0.5\tG4\t1.0\t\t1\n"
"additive_mellow_pipe\t2.5\t0.5\tC#5\t1.0\t\t1\n"
"additive_mellow_pipe\t3.0\t0.5\tE4\t1.0\t\t2\n"
"additive_mellow_pipe\t3.0\t1.0\tD#5\t1.0\t\t1\n"
"additive_mellow_pipe\t3.5\t0.5\tF#4\t1.0\t\t2\n"
"additive_mellow_pipe\t4.0\t0.5\tA4\t1.0\t\t2\n"
"additive_mellow_pipe\t4.0\t1.0\tC5\t1.0\t\t1\n"
"additive_mellow_pipe\t4.5\t0.5\tD4\t1.0\t\t2\n"
"additive_mellow_pipe\t5.0\t1.0\tC4\t1.0\t\t2\n"
"additive_mellow_pipe\t5.0\t0.5\tD5\t1.0\t\t1\n"
"additive_mellow_pipe\t5.5\t0.5\tA4\t1.0\t\t1\n"
"additive_mellow_pipe\t6.0\t1.0\tD#4\t1.0\t\t2\n"
"additive_mellow_pipe\t6.0\t0.5\tF#4\t1.0\t\t1\n"
"additive_mellow_pipe\t6.5\t0.5\tE4\t1.0\t\t1\n"
"additive_mellow_pipe\t7.0\t0.5\tC#4\t1.0\t\t2\n"
"additive_mellow_pipe\t7.0\t1.0\tG#4\t1.0\t\t1\n"
"additive_mellow_pipe\t7.5\t0.5\tG4\t1.0\t\t2\n"
"additive_mellow_pipe\t8.0\t0.5\tA#3\t1.0\t\t2\n"
"additive_mellow_pipe\t8.0\t1.0\tF4\t1.0\t\t1\n"
"additive_mellow_pipe\t8.5\t0.5\tB3\t1.0\t\t2\n"
)
),
]
)
def test_create_tsv_events_from_fragment(
path_to_tmp_file: str, fragment: Fragment, expected: str
) -> None:
"""Test `create_tsv_events_from_fragment` function."""
fragment = override_calculated_attributes(fragment)
create_tsv_events_from_fragment(
fragment,
path_to_tmp_file,
beat_in_seconds=0.5,
instruments={k: 'additive_mellow_pipe' for k in fragment.line_ids},
effects={k: '' for k in fragment.line_ids},
velocity=1.0,
opening_silence_in_seconds=1.0
)
with open(path_to_tmp_file) as in_file:
result = in_file.read()
assert result == expected
@pytest.mark.parametrize(
"tsv_content, trailing_silence_in_seconds",
[
(
[
'instrument\tstart_time\tduration\tfrequency\tvelocity\teffects\tline_id',
'additive_mellow_pipe\t1\t1\tA0\t1\t\t1',
'additive_mellow_pipe\t2\t1\t1\t1\t[{"name": "tremolo", "frequency": 1}]\t1'
],
1.0
),
]
)
def test_create_wav_from_tsv_events(
path_to_tmp_file: str, path_to_another_tmp_file: str,
tsv_content: list[str], trailing_silence_in_seconds: float
) -> None:
"""Test `create_wav_from_tsv_events` function."""
with open(path_to_tmp_file, 'w') as tmp_tsv_file:
for line in tsv_content:
tmp_tsv_file.write(line + '\n')
instruments_registry = create_sinethesizer_instruments()
create_wav_from_tsv_events(
path_to_tmp_file,
path_to_another_tmp_file,
instruments_registry,
trailing_silence_in_seconds
)
@pytest.mark.parametrize(
"fragment, expected",
[
(
# `fragment`
Fragment(
temporal_content=[
[1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0],
[2.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 1.0, 1.0],
],
sonic_content=[
['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'],
['pause', 'G#', 'E', 'F#', 'A', 'D', 'C', 'D#', 'C#', 'G', 'A#', 'B'],
],
meter_numerator=4,
meter_denominator=4,
n_beats=16,
line_ids=[1, 2],
upper_line_highest_position=55,
upper_line_lowest_position=41,
n_melodic_lines_by_group=[1, 1],
n_tone_row_instances_by_group=[1, 1],
mutable_temporal_content_indices=[0, 1],
mutable_sonic_content_indices=[0, 1],
),
# `expected`
{
"temporal_content": {
0: {'durations': [1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0]},
1: {'durations': [2.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 1.0, 1.0]},
},
'sonic_content': {
0: {'pitch_classes': ['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F']},
1: {'pitch_classes': ['pause', 'G#', 'E', 'F#', 'A', 'D', 'C', 'D#', 'C#', 'G', 'A#', 'B']},
}
}
),
]
)
def test_create_yaml_from_fragment(
path_to_tmp_file: str, fragment: Fragment, expected: dict[str, Any]
) -> None:
"""Test `create_yaml_from_fragment` function."""
create_yaml_from_fragment(fragment, path_to_tmp_file)
with open(path_to_tmp_file) as in_file:
result = yaml.load(in_file, Loader=yaml.FullLoader)
assert result == expected
| 39.39314
| 112
| 0.477294
| 2,130
| 14,930
| 3.126291
| 0.084038
| 0.036642
| 0.049106
| 0.052861
| 0.824899
| 0.777895
| 0.757771
| 0.743055
| 0.742153
| 0.67863
| 0
| 0.093886
| 0.355794
| 14,930
| 378
| 113
| 39.497355
| 0.598461
| 0.030342
| 0
| 0.635294
| 0
| 0.147059
| 0.238099
| 0.173723
| 0
| 0
| 0
| 0
| 0.011765
| 1
| 0.014706
| false
| 0
| 0.017647
| 0
| 0.032353
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
337a6112f47120b44bf4d733dc6ae70165c0ca72
| 509
|
py
|
Python
|
Codewars/7kyu/sum-of-a-sequence/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | 7
|
2017-09-20T16:40:39.000Z
|
2021-08-31T18:15:08.000Z
|
Codewars/7kyu/sum-of-a-sequence/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
Codewars/7kyu/sum-of-a-sequence/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
# Python - 3.6.0
Test.describe('Basic tests')
Test.assert_equals(sequence_sum(2, 6, 2), 12)
Test.assert_equals(sequence_sum(1, 5, 1), 15)
Test.assert_equals(sequence_sum(1, 5, 3), 5)
Test.assert_equals(sequence_sum(0, 15, 3), 45)
Test.assert_equals(sequence_sum(16, 15, 3), 0)
Test.assert_equals(sequence_sum(2, 24, 22), 26)
Test.assert_equals(sequence_sum(2, 2, 2), 2)
Test.assert_equals(sequence_sum(2, 2, 1), 2)
Test.assert_equals(sequence_sum(1, 15, 3), 35)
Test.assert_equals(sequence_sum(15, 1, 3), 0)
| 36.357143
| 47
| 0.732809
| 98
| 509
| 3.602041
| 0.234694
| 0.283286
| 0.453258
| 0.679887
| 0.8017
| 0.572238
| 0.328612
| 0
| 0
| 0
| 0
| 0.119048
| 0.092338
| 509
| 13
| 48
| 39.153846
| 0.645022
| 0.027505
| 0
| 0
| 0
| 0
| 0.022312
| 0
| 0
| 0
| 0
| 0
| 0.909091
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3389b6da4236f9b900a54e0566a6d75b3ff17774
| 106
|
py
|
Python
|
tests/functional/test_feeds.py
|
tgiardina/rpp-h
|
fece590f901b052a59c19a24acfeba52cee33c84
|
[
"BSD-2-Clause"
] | 2,103
|
2015-01-07T12:47:49.000Z
|
2022-03-29T02:38:25.000Z
|
tests/functional/test_feeds.py
|
tgiardina/rpp-h
|
fece590f901b052a59c19a24acfeba52cee33c84
|
[
"BSD-2-Clause"
] | 4,322
|
2015-01-04T17:18:01.000Z
|
2022-03-31T17:06:02.000Z
|
tests/functional/test_feeds.py
|
tgiardina/rpp-h
|
fece590f901b052a59c19a24acfeba52cee33c84
|
[
"BSD-2-Clause"
] | 389
|
2015-01-24T04:10:02.000Z
|
2022-03-28T08:00:16.000Z
|
def test_atom_feed(app):
app.get("/stream.atom")
def test_rss_feed(app):
app.get("/stream.rss")
| 15.142857
| 27
| 0.660377
| 18
| 106
| 3.666667
| 0.444444
| 0.212121
| 0.30303
| 0.393939
| 0.575758
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150943
| 106
| 6
| 28
| 17.666667
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0.216981
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
33da9e76d1966f919c595b2c05916185f8d137cf
| 219
|
py
|
Python
|
api/pagination.py
|
hackoregon/neighborhoods-2018
|
2e1bf837365182a212527e1afbedddc759d0ee78
|
[
"MIT"
] | 6
|
2018-03-16T00:06:31.000Z
|
2019-06-02T21:27:03.000Z
|
api/pagination.py
|
hackoregon/neighborhoods-2018
|
2e1bf837365182a212527e1afbedddc759d0ee78
|
[
"MIT"
] | 53
|
2018-03-16T03:29:04.000Z
|
2021-06-10T20:17:11.000Z
|
api/pagination.py
|
hackoregon/neighborhoods-2018
|
2e1bf837365182a212527e1afbedddc759d0ee78
|
[
"MIT"
] | 1
|
2018-06-20T16:03:40.000Z
|
2018-06-20T16:03:40.000Z
|
from rest_framework.pagination import LimitOffsetPagination
class LargeResultSetPagination(LimitOffsetPagination):
max_limit = 5000
class VeryLargeResultSetPagination(LimitOffsetPagination):
max_limit = 50000
| 27.375
| 59
| 0.849315
| 18
| 219
| 10.166667
| 0.722222
| 0.262295
| 0.31694
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046154
| 0.109589
| 219
| 8
| 60
| 27.375
| 0.892308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
33de9874539e22dfa5d9f40390aa0b3a10d072ea
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/heuristics.py
|
Retraces/UkraineBot
|
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/heuristics.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/heuristics.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/04/51/87/277c90731bd98b37e8f742cb674e13fd9e574825ef168b6ba7b52cd2c7
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.447917
| 0
| 96
| 1
| 96
| 96
| 0.447917
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
33e69a55e1abbd2331d6d21f9aeff8fe3a4d205b
| 33
|
py
|
Python
|
child_test.py
|
ja-dir/demorepo
|
00ead2c4167735204f3f184d883b664e87ca725e
|
[
"MIT"
] | null | null | null |
child_test.py
|
ja-dir/demorepo
|
00ead2c4167735204f3f184d883b664e87ca725e
|
[
"MIT"
] | null | null | null |
child_test.py
|
ja-dir/demorepo
|
00ead2c4167735204f3f184d883b664e87ca725e
|
[
"MIT"
] | null | null | null |
print("Hello from child branch")
| 16.5
| 32
| 0.757576
| 5
| 33
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 33
| 1
| 33
| 33
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0.69697
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
1d197f4712ccc99b0b58aaaf5fb7597e526ff1f9
| 5,329
|
py
|
Python
|
django_presskit/migrations/0004_auto_20190202_2017.py
|
FutureProofGames/django_presskit
|
638d77f18e51f6d3a2adeaa6d6e65f62e7c5f934
|
[
"MIT"
] | null | null | null |
django_presskit/migrations/0004_auto_20190202_2017.py
|
FutureProofGames/django_presskit
|
638d77f18e51f6d3a2adeaa6d6e65f62e7c5f934
|
[
"MIT"
] | 1
|
2019-01-09T03:53:53.000Z
|
2019-01-09T03:53:53.000Z
|
django_presskit/migrations/0004_auto_20190202_2017.py
|
FutureProofGames/django_presskit
|
638d77f18e51f6d3a2adeaa6d6e65f62e7c5f934
|
[
"MIT"
] | 3
|
2019-02-18T21:21:48.000Z
|
2019-02-21T00:42:57.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-02-02 20:17
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('django_presskit', '0003_auto_20181002_1915'),
]
operations = [
migrations.AlterModelOptions(
name='additionallink',
options={'ordering': ['my_order', 'datetime_updated']},
),
migrations.AlterModelOptions(
name='award',
options={'ordering': ['my_order', 'datetime_updated']},
),
migrations.AlterModelOptions(
name='companyimageattachment',
options={'ordering': ['my_order', 'datetime_updated']},
),
migrations.AlterModelOptions(
name='companylogoattachment',
options={'ordering': ['my_order', 'datetime_updated']},
),
migrations.AlterModelOptions(
name='companyvideo',
options={'ordering': ['my_order', 'datetime_updated']},
),
migrations.AlterModelOptions(
name='contact',
options={'ordering': ['my_order', 'datetime_updated']},
),
migrations.AlterModelOptions(
name='credit',
options={'ordering': ['my_order', 'datetime_updated']},
),
migrations.AlterModelOptions(
name='feature',
options={'ordering': ['my_order', 'datetime_updated']},
),
migrations.AlterModelOptions(
name='platform',
options={'ordering': ['my_order', 'datetime_updated']},
),
migrations.AlterModelOptions(
name='price',
options={'ordering': ['my_order', 'datetime_updated']},
),
migrations.AlterModelOptions(
name='project',
options={'ordering': ['my_order', 'datetime_updated']},
),
migrations.AlterModelOptions(
name='projectimageattachment',
options={'ordering': ['my_order', 'datetime_updated']},
),
migrations.AlterModelOptions(
name='projectlogoattachment',
options={'ordering': ['my_order', 'datetime_updated']},
),
migrations.AlterModelOptions(
name='quote',
options={'ordering': ['my_order', 'datetime_updated']},
),
migrations.AlterModelOptions(
name='social',
options={'ordering': ['my_order', 'datetime_updated']},
),
migrations.AlterModelOptions(
name='trailer',
options={'ordering': ['my_order', 'datetime_updated']},
),
migrations.AddField(
model_name='additionallink',
name='my_order',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='award',
name='my_order',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='companyimageattachment',
name='my_order',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='companylogoattachment',
name='my_order',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='companyvideo',
name='my_order',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='contact',
name='my_order',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='credit',
name='my_order',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='feature',
name='my_order',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='platform',
name='my_order',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='price',
name='my_order',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='project',
name='my_order',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='projectimageattachment',
name='my_order',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='projectlogoattachment',
name='my_order',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='quote',
name='my_order',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='social',
name='my_order',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='trailer',
name='my_order',
field=models.PositiveIntegerField(default=0),
),
]
| 33.30625
| 67
| 0.552261
| 406
| 5,329
| 7.068966
| 0.150246
| 0.078049
| 0.172822
| 0.122648
| 0.791638
| 0.791638
| 0.791638
| 0.775261
| 0.75784
| 0.402439
| 0
| 0.01382
| 0.321073
| 5,329
| 159
| 68
| 33.515723
| 0.779436
| 0.012948
| 0
| 0.736842
| 1
| 0
| 0.195549
| 0.037093
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.013158
| 0
| 0.032895
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1d4ceea77141f718fb64d5c7ed4f37998bf44db2
| 488
|
py
|
Python
|
tests/test_cache.py
|
nollety/tengen
|
e11d9796638cc7538b2c946d163784c470fcea28
|
[
"MIT"
] | null | null | null |
tests/test_cache.py
|
nollety/tengen
|
e11d9796638cc7538b2c946d163784c470fcea28
|
[
"MIT"
] | 87
|
2021-07-05T07:17:35.000Z
|
2022-03-29T07:29:27.000Z
|
tests/test_cache.py
|
nollety/tengen
|
e11d9796638cc7538b2c946d163784c470fcea28
|
[
"MIT"
] | null | null | null |
"""Test cases for the cache module."""
from tengen.cache import CACHE_DIR
from tengen.cache import init_cache
from tengen.cache import list_cache_content
def test_init_cache() -> None:
"""Cache directory exists after 'init_cache' is called."""
init_cache()
assert CACHE_DIR.exists()
def test_list_cache_content() -> None:
"""Returns a list of str."""
assert isinstance(list_cache_content(), list)
assert all([isinstance(x, str) for x in list_cache_content()])
| 28.705882
| 66
| 0.72541
| 72
| 488
| 4.694444
| 0.402778
| 0.106509
| 0.189349
| 0.186391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161885
| 488
| 16
| 67
| 30.5
| 0.826406
| 0.221311
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.222222
| true
| 0
| 0.333333
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1d5e7553767f502fbd9f241478b886c59de86319
| 161
|
py
|
Python
|
src/internals/errors/missing_dependency_error.py
|
mabel-dev/salmon
|
90866d16b08925206d1f8d9b3c8290fcd35f5322
|
[
"MIT"
] | null | null | null |
src/internals/errors/missing_dependency_error.py
|
mabel-dev/salmon
|
90866d16b08925206d1f8d9b3c8290fcd35f5322
|
[
"MIT"
] | null | null | null |
src/internals/errors/missing_dependency_error.py
|
mabel-dev/salmon
|
90866d16b08925206d1f8d9b3c8290fcd35f5322
|
[
"MIT"
] | null | null | null |
"""
This exception should be used when a lazy import fails
"""
from .base_exception import BaseException
class MissingDependencyError(BaseException):
pass
| 17.888889
| 54
| 0.782609
| 19
| 161
| 6.578947
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15528
| 161
| 8
| 55
| 20.125
| 0.919118
| 0.335404
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
d51a101dc0e1549373e56f29f7045808c6c3758d
| 24
|
py
|
Python
|
tensorkit/debug/__init__.py
|
lizeyan/tensorkit
|
2997a5914ec3c3ec72f91eb5906b5ee878fdc020
|
[
"MIT"
] | null | null | null |
tensorkit/debug/__init__.py
|
lizeyan/tensorkit
|
2997a5914ec3c3ec72f91eb5906b5ee878fdc020
|
[
"MIT"
] | null | null | null |
tensorkit/debug/__init__.py
|
lizeyan/tensorkit
|
2997a5914ec3c3ec72f91eb5906b5ee878fdc020
|
[
"MIT"
] | 2
|
2020-10-15T06:41:32.000Z
|
2021-01-27T12:55:11.000Z
|
from .recorder import *
| 12
| 23
| 0.75
| 3
| 24
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 24
| 1
| 24
| 24
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d537ae7f2ac0289e01caa96891e7381c94778f46
| 115
|
py
|
Python
|
tests/test_import.py
|
chrisbrake/chrisbrake
|
a5d89dead1cb2da5b23ccf7da40050865a107b7a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_import.py
|
chrisbrake/chrisbrake
|
a5d89dead1cb2da5b23ccf7da40050865a107b7a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_import.py
|
chrisbrake/chrisbrake
|
a5d89dead1cb2da5b23ccf7da40050865a107b7a
|
[
"BSD-3-Clause"
] | null | null | null |
def test_import():
""" Test to ensure we didn't fail packaging """
import chrisbrake
assert chrisbrake
| 23
| 51
| 0.678261
| 15
| 115
| 5.133333
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.234783
| 115
| 4
| 52
| 28.75
| 0.875
| 0.33913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.666667
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d5429a0bea2e4073e76b4dd64a45b70fd2038100
| 127
|
py
|
Python
|
conftest.py
|
giocatron/Polygon
|
b3b0e0047bf20eb91379b594fd26603fcf279880
|
[
"CC-BY-4.0"
] | null | null | null |
conftest.py
|
giocatron/Polygon
|
b3b0e0047bf20eb91379b594fd26603fcf279880
|
[
"CC-BY-4.0"
] | null | null | null |
conftest.py
|
giocatron/Polygon
|
b3b0e0047bf20eb91379b594fd26603fcf279880
|
[
"CC-BY-4.0"
] | 2
|
2020-11-24T22:23:08.000Z
|
2020-11-24T23:41:36.000Z
|
pytest_plugins = [
"polygon.tests.fixtures",
"polygon.plugins.tests.fixtures",
"polygon.graphql.tests.fixtures",
]
| 21.166667
| 37
| 0.692913
| 13
| 127
| 6.692308
| 0.461538
| 0.448276
| 0.45977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149606
| 127
| 5
| 38
| 25.4
| 0.805556
| 0
| 0
| 0
| 0
| 0
| 0.645669
| 0.645669
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d55f7170ac22d5540403bc03470e72ccf159e60d
| 32
|
py
|
Python
|
GmailWrapper_JE/je_gmail/token/__init__.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | 2
|
2020-12-30T06:37:10.000Z
|
2020-12-30T07:27:45.000Z
|
GmailWrapper_JE/je_gmail/token/__init__.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
GmailWrapper_JE/je_gmail/token/__init__.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
from je_gmail.token import *
| 10.666667
| 29
| 0.71875
| 5
| 32
| 4.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.21875
| 32
| 2
| 30
| 16
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d5684a74307da6ecd65a68997ecaa5fd58d36323
| 33,685
|
py
|
Python
|
tests/unit_tests/trading_tests/test_portfolio.py
|
CryptoRichy/OctoBot
|
1ca5bd2ba4b8fc09859518fcb2a62f74a1435019
|
[
"Apache-2.0"
] | 1
|
2018-11-26T16:43:36.000Z
|
2018-11-26T16:43:36.000Z
|
tests/unit_tests/trading_tests/test_portfolio.py
|
CryptoRichy/OctoBot
|
1ca5bd2ba4b8fc09859518fcb2a62f74a1435019
|
[
"Apache-2.0"
] | null | null | null |
tests/unit_tests/trading_tests/test_portfolio.py
|
CryptoRichy/OctoBot
|
1ca5bd2ba4b8fc09859518fcb2a62f74a1435019
|
[
"Apache-2.0"
] | null | null | null |
import ccxt
from config.cst import TraderOrderType
from tests.test_utils.config import load_test_config
from tests.test_utils.order_util import fill_limit_or_stop_order, fill_market_order
from trading.exchanges.exchange_manager import ExchangeManager
from trading.trader.order import BuyMarketOrder, OrderConstants, SellLimitOrder, BuyLimitOrder, SellMarketOrder, \
StopLossOrder
from trading.trader.portfolio import Portfolio
from trading.trader.trader_simulator import TraderSimulator
class TestPortfolio:
@staticmethod
def init_default():
config = load_test_config()
exchange_manager = ExchangeManager(config, ccxt.binance, is_simulated=True)
exchange_inst = exchange_manager.get_exchange()
trader_inst = TraderSimulator(config, exchange_inst, 1)
portfolio_inst = Portfolio(config, trader_inst)
trader_inst.stop_order_manager()
return config, portfolio_inst, exchange_inst, trader_inst
def test_load_portfolio(self):
_, portfolio_inst, _, trader_inst = self.init_default()
portfolio_inst._load_portfolio()
assert portfolio_inst.portfolio == {'BTC': {'available': 10, 'total': 10},
'USD': {'available': 1000, 'total': 1000}
}
def test_get_currency_portfolio(self):
_, portfolio_inst, _, trader_inst = self.init_default()
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 10
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 10
assert portfolio_inst.get_currency_portfolio("NANO", Portfolio.TOTAL) == 0
def test_update_portfolio_data(self):
_, portfolio_inst, _, trader_inst = self.init_default()
portfolio_inst._update_portfolio_data("BTC", -5)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 5
portfolio_inst._update_portfolio_data("BTC", -6, total=False, available=True)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 4
portfolio_inst._update_portfolio_data("XRP", 4.5, total=True, available=True)
assert portfolio_inst.get_currency_portfolio("XRP", Portfolio.AVAILABLE) == 4.5
def test_update_portfolio_available(self):
config, portfolio_inst, _, trader_inst = self.init_default()
# Test buy order
market_buy = BuyMarketOrder(trader_inst)
market_buy.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_MARKET],
"BTC/USD",
70,
10,
70)
# test buy order creation
portfolio_inst.update_portfolio_available(market_buy, True)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 10
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 300
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 10
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 1000
# test buy order canceled --> return to init state and the update_portfolio will sync TOTAL with AVAILABLE
portfolio_inst.update_portfolio_available(market_buy, False)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 10
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 1000
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 10
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 1000
# Test sell order
limit_sell = SellLimitOrder(trader_inst)
limit_sell.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.SELL_LIMIT],
"BTC/USD",
60,
8,
60)
# test sell order creation
portfolio_inst.update_portfolio_available(limit_sell, True)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 2
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 1000
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 10
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 1000
# test sell order canceled --> return to init state and the update_portfolio will sync TOTAL with AVAILABLE
portfolio_inst.update_portfolio_available(limit_sell, False)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 10
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 1000
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 10
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 1000
def test_update_portfolio(self):
config, portfolio_inst, _, trader_inst = self.init_default()
# Test buy order
limit_buy = BuyLimitOrder(trader_inst)
limit_buy.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_LIMIT],
"BTC/USD",
70,
10,
70)
# update portfolio with creations
portfolio_inst.update_portfolio_available(limit_buy, True)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 10
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 300
fill_limit_or_stop_order(limit_buy, 69, 71)
portfolio_inst.update_portfolio(limit_buy)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 20
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 300
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 20
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 300
# Test buy order
market_sell = SellMarketOrder(trader_inst)
market_sell.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.SELL_MARKET],
"BTC/USD",
80,
8,
80)
# update portfolio with creations
portfolio_inst.update_portfolio_available(market_sell, True)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 12
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 300
fill_market_order(market_sell, 80)
# when filling market sell
portfolio_inst.update_portfolio(market_sell)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 12
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 940
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 12
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 940
def test_update_portfolio_with_filled_orders(self):
config, portfolio_inst, _, trader_inst = self.init_default()
# Test buy order
market_sell = SellMarketOrder(trader_inst)
market_sell.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.SELL_MARKET],
"BTC/USD",
70,
3,
70)
fill_market_order(market_sell, 70)
# Test sell order
limit_sell = SellLimitOrder(trader_inst)
limit_sell.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.SELL_LIMIT],
"BTC/USD",
100,
4.2,
100)
# Test stop loss order
stop_loss = StopLossOrder(trader_inst)
stop_loss.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.STOP_LOSS],
"BTC/USD",
80,
4.2,
80)
limit_sell.add_linked_order(stop_loss)
stop_loss.add_linked_order(limit_sell)
# Test buy order
limit_buy = BuyLimitOrder(trader_inst)
limit_buy.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_LIMIT],
"BTC/USD",
50,
2,
50)
fill_limit_or_stop_order(limit_buy, 49, 51)
# update portfolio with creations
portfolio_inst.update_portfolio_available(market_sell, True)
portfolio_inst.update_portfolio_available(limit_sell, True)
portfolio_inst.update_portfolio_available(stop_loss, True)
portfolio_inst.update_portfolio_available(limit_buy, True)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 2.8
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 900
# when cancelling limit sell, market sell and stop orders
portfolio_inst.update_portfolio_available(stop_loss, False)
portfolio_inst.update_portfolio_available(limit_sell, False)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 7
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 900
# when filling limit buy
portfolio_inst.update_portfolio(limit_buy)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 9
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 900
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 12
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 900
# when filling market sell
portfolio_inst.update_portfolio(market_sell)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 9
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 1110
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 9
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 1110
def test_update_portfolio_with_cancelled_orders(self):
config, portfolio_inst, _, trader_inst = self.init_default()
# Test buy order
market_sell = SellMarketOrder(trader_inst)
market_sell.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.SELL_MARKET],
"BTC/USD",
80,
4.1,
80)
# Test sell order
limit_sell = SellLimitOrder(trader_inst)
limit_sell.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.SELL_LIMIT],
"BTC/USD",
10,
4.2,
10)
# Test stop loss order
stop_loss = StopLossOrder(trader_inst)
stop_loss.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.STOP_LOSS],
"BTC/USD",
80,
3.6,
80)
portfolio_inst.update_portfolio_available(stop_loss, True)
portfolio_inst.update_portfolio_available(limit_sell, True)
# Test buy order
limit_buy = BuyLimitOrder(trader_inst)
limit_buy.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_LIMIT],
"BTC/USD",
50,
4,
50)
portfolio_inst.update_portfolio_available(limit_buy, True)
portfolio_inst.update_portfolio_available(market_sell, True)
assert round(portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE), 1) == 1.7
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 800
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 10
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 1000
# with no filled orders
portfolio_inst.update_portfolio_available(stop_loss, False)
portfolio_inst.update_portfolio_available(limit_sell, False)
portfolio_inst.update_portfolio_available(limit_buy, False)
portfolio_inst.update_portfolio_available(market_sell, False)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 10
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 1000
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 10
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 1000
def test_update_portfolio_with_stop_loss_orders(self):
config, portfolio_inst, _, trader_inst = self.init_default()
# Test buy order
limit_sell = SellLimitOrder(trader_inst)
limit_sell.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.SELL_LIMIT],
"BTC/USD",
90,
4,
90)
# Test buy order
limit_buy = BuyLimitOrder(trader_inst)
limit_buy.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_LIMIT],
"BTC/USD",
50,
4,
50)
# Test stop loss order
stop_loss = StopLossOrder(trader_inst)
stop_loss.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.STOP_LOSS],
"BTC/USD",
60,
4,
60)
fill_limit_or_stop_order(stop_loss, 59, 61)
portfolio_inst.update_portfolio_available(stop_loss, True)
portfolio_inst.update_portfolio_available(limit_sell, True)
portfolio_inst.update_portfolio_available(limit_buy, True)
assert round(portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE), 1) == 6
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 800
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 10
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 1000
# cancel limits
portfolio_inst.update_portfolio_available(limit_buy, False)
portfolio_inst.update_portfolio_available(limit_sell, False)
# filling stop loss
# typical stop loss behavior --> update available before update portfolio
portfolio_inst.update_portfolio(stop_loss)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 6
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 1240
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 6
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 1240
def test_update_portfolio_with_some_filled_orders(self):
config, portfolio_inst, _, trader_inst = self.init_default()
# Test buy order
limit_sell = SellLimitOrder(trader_inst)
limit_sell.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.SELL_LIMIT],
"BTC/USD",
90,
4,
90)
# Test buy order
limit_buy = BuyLimitOrder(trader_inst)
limit_buy.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_LIMIT],
"BTC/USD",
60,
2,
60)
# Test buy order
limit_buy_2 = BuyLimitOrder(trader_inst)
limit_buy_2.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_LIMIT],
"BTC/USD",
50,
4,
50)
# Test sell order
limit_sell_2 = SellLimitOrder(trader_inst)
limit_sell_2.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.SELL_LIMIT],
"BTC/USD",
10,
2,
10)
# Test stop loss order
stop_loss_2 = StopLossOrder(trader_inst)
stop_loss_2.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.STOP_LOSS],
"BTC/USD",
10,
2,
10)
# Test sell order
limit_sell_3 = SellLimitOrder(trader_inst)
limit_sell_3.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.SELL_LIMIT],
"BTC/USD",
20,
1,
20)
# Test stop loss order
stop_loss_3 = StopLossOrder(trader_inst)
stop_loss_3.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.STOP_LOSS],
"BTC/USD",
20,
1,
20)
portfolio_inst.update_portfolio_available(stop_loss_2, True)
portfolio_inst.update_portfolio_available(stop_loss_3, True)
portfolio_inst.update_portfolio_available(limit_sell, True)
portfolio_inst.update_portfolio_available(limit_sell_2, True)
portfolio_inst.update_portfolio_available(limit_sell_3, True)
portfolio_inst.update_portfolio_available(limit_buy, True)
portfolio_inst.update_portfolio_available(limit_buy_2, True)
assert round(portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE), 1) == 3
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 680
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 10
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 1000
# cancels
portfolio_inst.update_portfolio_available(stop_loss_3, False)
portfolio_inst.update_portfolio_available(limit_sell_2, False)
portfolio_inst.update_portfolio_available(limit_buy, False)
# filling
fill_limit_or_stop_order(stop_loss_2, 9, 11)
fill_limit_or_stop_order(limit_sell, 89, 91)
fill_limit_or_stop_order(limit_sell_3, 19, 21)
fill_limit_or_stop_order(limit_buy_2, 49, 51)
portfolio_inst.update_portfolio(stop_loss_2)
portfolio_inst.update_portfolio(limit_sell)
portfolio_inst.update_portfolio(limit_sell_3)
portfolio_inst.update_portfolio(limit_buy_2)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 7
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 1200
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 7
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 1200
def test_update_portfolio_with_multiple_filled_orders(self):
config, portfolio_inst, _, trader_inst = self.init_default()
# Test buy order
limit_sell = SellLimitOrder(trader_inst)
limit_sell.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.SELL_LIMIT],
"BTC/USD",
90,
4,
90)
# Test buy order
limit_buy = BuyLimitOrder(trader_inst)
limit_buy.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_LIMIT],
"BTC/USD",
60,
2,
60)
# Test buy order
limit_buy_2 = BuyLimitOrder(trader_inst)
limit_buy_2.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_LIMIT],
"BTC/USD",
50,
4,
50)
# Test buy order
limit_buy_3 = BuyLimitOrder(trader_inst)
limit_buy_3.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_LIMIT],
"BTC/USD",
46,
2,
46)
# Test buy order
limit_buy_4 = BuyLimitOrder(trader_inst)
limit_buy_4.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_LIMIT],
"BTC/USD",
41,
1.78,
41)
# Test buy order
limit_buy_5 = BuyLimitOrder(trader_inst)
limit_buy_5.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_LIMIT],
"BTC/USD",
0.2122427,
3.72448,
0.2122427)
# Test buy order
limit_buy_6 = BuyLimitOrder(trader_inst)
limit_buy_6.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_LIMIT],
"BTC/USD",
430,
1.05,
430)
# Test sell order
limit_sell_2 = SellLimitOrder(trader_inst)
limit_sell_2.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.SELL_LIMIT],
"BTC/USD",
10,
2,
10)
# Test stop loss order
stop_loss_2 = StopLossOrder(trader_inst)
stop_loss_2.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.STOP_LOSS],
"BTC/USD",
10,
2,
10)
# Test sell order
limit_sell_3 = SellLimitOrder(trader_inst)
limit_sell_3.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.SELL_LIMIT],
"BTC/USD",
20,
1,
20)
# Test stop loss order
stop_loss_3 = StopLossOrder(trader_inst)
stop_loss_3.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.STOP_LOSS],
"BTC/USD",
20,
1,
20)
# Test sell order
limit_sell_4 = SellLimitOrder(trader_inst)
limit_sell_4.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.SELL_LIMIT],
"BTC/USD",
50,
0.2,
50)
# Test stop loss order
stop_loss_4 = StopLossOrder(trader_inst)
stop_loss_4.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.STOP_LOSS],
"BTC/USD",
45,
0.2,
45)
# Test sell order
limit_sell_5 = SellLimitOrder(trader_inst)
limit_sell_5.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.SELL_LIMIT],
"BTC/USD",
11,
0.7,
11)
# Test stop loss order
stop_loss_5 = StopLossOrder(trader_inst)
stop_loss_5.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.STOP_LOSS],
"BTC/USD",
9,
0.7,
9)
portfolio_inst.update_portfolio_available(stop_loss_2, True)
portfolio_inst.update_portfolio_available(stop_loss_3, True)
portfolio_inst.update_portfolio_available(stop_loss_4, True)
portfolio_inst.update_portfolio_available(stop_loss_5, True)
portfolio_inst.update_portfolio_available(limit_sell, True)
portfolio_inst.update_portfolio_available(limit_sell_2, True)
portfolio_inst.update_portfolio_available(limit_sell_3, True)
portfolio_inst.update_portfolio_available(limit_sell_4, True)
portfolio_inst.update_portfolio_available(limit_sell_5, True)
portfolio_inst.update_portfolio_available(limit_buy, True)
portfolio_inst.update_portfolio_available(limit_buy_2, True)
portfolio_inst.update_portfolio_available(limit_buy_3, True)
portfolio_inst.update_portfolio_available(limit_buy_4, True)
portfolio_inst.update_portfolio_available(limit_buy_5, True)
portfolio_inst.update_portfolio_available(limit_buy_6, True)
assert round(portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE), 1) == 2.1
assert round(portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE), 7) == 62.7295063
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 10
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 1000
# cancels
portfolio_inst.update_portfolio_available(stop_loss_3, False)
portfolio_inst.update_portfolio_available(stop_loss_5, False)
portfolio_inst.update_portfolio_available(limit_sell_2, False)
portfolio_inst.update_portfolio_available(limit_buy, False)
portfolio_inst.update_portfolio_available(limit_buy_3, False)
portfolio_inst.update_portfolio_available(limit_buy_5, False)
portfolio_inst.update_portfolio_available(limit_sell_4, False)
# filling
fill_limit_or_stop_order(stop_loss_2, 9, 11)
fill_limit_or_stop_order(limit_sell, 89, 91)
fill_limit_or_stop_order(limit_sell_3, 19, 21)
fill_limit_or_stop_order(limit_buy_2, 49, 51)
fill_limit_or_stop_order(limit_sell_5, 9, 12)
fill_limit_or_stop_order(stop_loss_4, 44, 46)
fill_limit_or_stop_order(limit_buy_4, 40, 42)
fill_limit_or_stop_order(limit_buy_5, 0.2122426, 0.2122428)
fill_limit_or_stop_order(limit_buy_6, 429, 431)
portfolio_inst.update_portfolio(stop_loss_2)
portfolio_inst.update_portfolio(limit_buy_4)
portfolio_inst.update_portfolio(limit_sell)
portfolio_inst.update_portfolio(limit_sell_3)
portfolio_inst.update_portfolio(limit_buy_2)
portfolio_inst.update_portfolio(limit_sell_5)
portfolio_inst.update_portfolio(stop_loss_4)
portfolio_inst.update_portfolio(limit_buy_5)
portfolio_inst.update_portfolio(limit_buy_6)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 12.65448
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 692.22
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 12.65448
assert round(portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL), 7) == 691.4295063
def test_update_portfolio_with_multiple_symbols_orders(self):
config, portfolio_inst, _, trader_inst = self.init_default()
# Test buy order
market_buy = BuyMarketOrder(trader_inst)
market_buy.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_MARKET],
"ETH/USD",
7,
100,
7)
# test buy order creation
portfolio_inst.update_portfolio_available(market_buy, True)
assert portfolio_inst.get_currency_portfolio("ETH", Portfolio.AVAILABLE) == 0
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 300
assert portfolio_inst.get_currency_portfolio("ETH", Portfolio.TOTAL) == 0
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 1000
fill_market_order(market_buy, 7)
portfolio_inst.update_portfolio(market_buy)
assert portfolio_inst.get_currency_portfolio("ETH", Portfolio.AVAILABLE) == 100
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 300
assert portfolio_inst.get_currency_portfolio("ETH", Portfolio.TOTAL) == 100
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 300
# Test buy order
market_buy = BuyMarketOrder(trader_inst)
market_buy.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_MARKET],
"LTC/BTC",
0.0135222,
100,
0.0135222)
# test buy order creation
portfolio_inst.update_portfolio_available(market_buy, True)
assert portfolio_inst.get_currency_portfolio("LTC", Portfolio.AVAILABLE) == 0
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 8.647780000000001
assert portfolio_inst.get_currency_portfolio("LTC", Portfolio.TOTAL) == 0
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 10
fill_market_order(market_buy, 0.0135222)
portfolio_inst.update_portfolio(market_buy)
assert portfolio_inst.get_currency_portfolio("LTC", Portfolio.AVAILABLE) == 100
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 8.647780000000001
assert portfolio_inst.get_currency_portfolio("LTC", Portfolio.TOTAL) == 100
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 8.647780000000001
# Test buy order
limit_buy = BuyLimitOrder(trader_inst)
limit_buy.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_LIMIT],
"XRP/BTC",
0.00012232132312312,
3000.1214545,
0.00012232132312312)
# test buy order creation
portfolio_inst.update_portfolio_available(limit_buy, True)
assert portfolio_inst.get_currency_portfolio("XRP", Portfolio.AVAILABLE) == 0
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 8.280801174155501
assert portfolio_inst.get_currency_portfolio("XRP", Portfolio.TOTAL) == 0
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 8.647780000000001
# cancel
portfolio_inst.update_portfolio_available(limit_buy, False)
assert portfolio_inst.get_currency_portfolio("XRP", Portfolio.AVAILABLE) == 0
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 8.647780000000001
assert portfolio_inst.get_currency_portfolio("XRP", Portfolio.TOTAL) == 0
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 8.647780000000001
def test_reset_portfolio_available(self):
config, portfolio_inst, _, trader_inst = self.init_default()
# Test buy order
limit_sell = SellLimitOrder(trader_inst)
limit_sell.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.SELL_LIMIT],
"BTC/USD",
90,
4,
90)
portfolio_inst.update_portfolio_available(limit_sell, True)
portfolio_inst.reset_portfolio_available()
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 10
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 1000
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 10
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 1000
# Test sell order
limit_sell = SellLimitOrder(trader_inst)
limit_sell.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.SELL_LIMIT],
"BTC/USD",
90,
4,
90)
portfolio_inst.update_portfolio_available(limit_sell, True)
# Test buy order
limit_buy = BuyLimitOrder(trader_inst)
limit_buy.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_LIMIT],
"VEN/BTC",
0.5,
4,
0.5)
portfolio_inst.update_portfolio_available(limit_buy, True)
# Test buy order
btc_limit_buy = BuyLimitOrder(trader_inst)
btc_limit_buy.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_LIMIT],
"BTC/USD",
10,
50,
10)
portfolio_inst.update_portfolio_available(btc_limit_buy, True)
# Test buy order
btc_limit_buy2 = BuyLimitOrder(trader_inst)
btc_limit_buy2.new(OrderConstants.TraderOrderTypeClasses[TraderOrderType.BUY_LIMIT],
"BTC/USD",
10,
50,
10)
portfolio_inst.update_portfolio_available(btc_limit_buy2, True)
# reset equivalent of the ven buy order
portfolio_inst.reset_portfolio_available("BTC", 4*0.5)
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 6
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 10
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 0
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 1000
# reset equivalent of the btc buy orders 1 and 2
portfolio_inst.reset_portfolio_available("USD")
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.AVAILABLE) == 6
assert portfolio_inst.get_currency_portfolio("BTC", Portfolio.TOTAL) == 10
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.AVAILABLE) == 1000
assert portfolio_inst.get_currency_portfolio("USD", Portfolio.TOTAL) == 1000
| 45.829932
| 115
| 0.642601
| 3,553
| 33,685
| 5.755981
| 0.04644
| 0.141118
| 0.112464
| 0.133783
| 0.919515
| 0.881326
| 0.86651
| 0.851939
| 0.800499
| 0.75253
| 0
| 0.037965
| 0.2806
| 33,685
| 734
| 116
| 45.892371
| 0.805967
| 0.045866
| 0
| 0.686239
| 0
| 0
| 0.022046
| 0
| 0
| 0
| 0
| 0
| 0.211009
| 1
| 0.023853
| false
| 0
| 0.014679
| 0
| 0.042202
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d5a0b9e6c28afe537586038c394fef6730547648
| 94
|
py
|
Python
|
examples/import_example/import_example_server.py
|
izi-global/izir
|
d1a4bfb5c082c3de1956402ef0280564014a3bd8
|
[
"MIT"
] | null | null | null |
examples/import_example/import_example_server.py
|
izi-global/izir
|
d1a4bfb5c082c3de1956402ef0280564014a3bd8
|
[
"MIT"
] | 5
|
2021-03-18T21:01:05.000Z
|
2022-03-11T23:29:48.000Z
|
examples/import_example/import_example_server.py
|
izi-global/izir
|
d1a4bfb5c082c3de1956402ef0280564014a3bd8
|
[
"MIT"
] | null | null | null |
import example_resource
import izi
@izi.get()
def hello():
return example_resource.hi()
| 11.75
| 32
| 0.734043
| 13
| 94
| 5.153846
| 0.692308
| 0.447761
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159574
| 94
| 7
| 33
| 13.428571
| 0.848101
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 6
|
6367e90c33a6766bbed82fef17fa610df4ead57b
| 12,398
|
py
|
Python
|
unet_pipeline/models/mynet.py
|
S-Altman/pneumothorax-segmentation
|
e79673d2a2b3e9ee9db0352d19f52e676ed1187f
|
[
"MIT"
] | null | null | null |
unet_pipeline/models/mynet.py
|
S-Altman/pneumothorax-segmentation
|
e79673d2a2b3e9ee9db0352d19f52e676ed1187f
|
[
"MIT"
] | null | null | null |
unet_pipeline/models/mynet.py
|
S-Altman/pneumothorax-segmentation
|
e79673d2a2b3e9ee9db0352d19f52e676ed1187f
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
import segmentation_models_pytorch as smp
from segmentation_models_pytorch.unet.decoder import DecoderBlock
from segmentation_models_pytorch.base import SegmentationHead
from segmentation_models_pytorch.base import initialization as init
def add_conv(in_ch, out_ch, ksize, stride):
"""
Add a conv2d / batchnorm / leaky ReLU block.
Args:
in_ch (int): number of input channels of the convolution layer.
out_ch (int): number of output channels of the convolution layer.
ksize (int): kernel size of the convolution layer.
stride (int): stride of the convolution layer.
Returns:
stage (Sequential) : Sequential layers composing a convolution block.
"""
stage = nn.Sequential()
pad = (ksize - 1) // 2
stage.add_module('conv', nn.Conv2d(in_channels=in_ch,
out_channels=out_ch, kernel_size=ksize, stride=stride,
padding=pad, bias=False))
stage.add_module('batch_norm', nn.BatchNorm2d(out_ch))
stage.add_module('relu', nn.ReLU(inplace=True))
return stage
class UnetDecoder(nn.Module):
def __init__(self, encoder_channels, decoder_channels):
super().__init__()
encoder_channels = encoder_channels[1:] # remove first skip with same spatial resolution
encoder_channels = encoder_channels[::-1] # reverse channels to start from head of encoder
# computing blocks input and output channels
head_channels = encoder_channels[0]
in_channels = [head_channels] + list(decoder_channels[:-1])
skip_channels = list(encoder_channels[1:]) + [0]
out_channels = decoder_channels
seg_blocks = [
DecoderBlock(in_ch, skip_ch, out_ch)
for in_ch, skip_ch, out_ch in zip(in_channels, skip_channels, out_channels)
]
self.seg_fusion_blocks = nn.ModuleList(seg_blocks[:-1])
self.seg_final_block = seg_blocks[-1]
dis_blocks = [
DecoderBlock(in_ch, skip_ch, out_ch)
for in_ch, skip_ch, out_ch in zip(in_channels, skip_channels, out_channels)
]
self.dis_fusion_blocks = nn.ModuleList(dis_blocks[:-1])
self.dis_final_block = dis_blocks[-1]
def forward(self, *features):
features = features[1:] # remove first skip with same spatial resolution
features = features[::-1] # reverse channels to start from head of encoder
x_seg = features[0]
x_dis = features[0]
skips = features[1:]
for i, decoder_block in enumerate(zip(self.seg_fusion_blocks, self.dis_fusion_blocks)):
seg_decoder_block, dis_decoder_block = decoder_block
skip = skips[i] if i < len(skips) else None
x_seg_temp = seg_decoder_block(x_seg, skip)
x_dis_temp = dis_decoder_block(x_dis, skip)
x_seg = x_seg_temp + x_dis_temp
x_dis = x_seg_temp + x_dis_temp
x_seg = self.seg_final_block(x_seg, None)
x_dis = self.dis_final_block(x_dis, None)
return x_seg, x_dis
class Mynet(nn.Module):
def __init__(self, num_classes=1, **kwargs):
super().__init__()
self.encoder = smp.Unet("resnet50", **kwargs).encoder
self.decoder = UnetDecoder(self.encoder.out_channels, (256, 128, 64, 32, 16))
self.seg_head = SegmentationHead(in_channels=16, out_channels=num_classes)
self.dis_head = SegmentationHead(in_channels=16, out_channels=num_classes)
self.initialize()
def initialize(self):
init.initialize_decoder(self.decoder)
init.initialize_head(self.seg_head)
init.initialize_head(self.dis_head)
def forward(self, x):
features = self.encoder(x)
x_seg, x_dis = self.decoder(*features)
x_seg = self.seg_head(x_seg)
x_dis = self.dis_head(x_dis)
return [x_seg, x_dis]
class UnetDecoderWithoutFusion(nn.Module):
def __init__(self, encoder_channels, decoder_channels):
super().__init__()
encoder_channels = encoder_channels[1:] # remove first skip with same spatial resolution
encoder_channels = encoder_channels[::-1] # reverse channels to start from head of encoder
# computing blocks input and output channels
head_channels = encoder_channels[0]
in_channels = [head_channels] + list(decoder_channels[:-1])
skip_channels = list(encoder_channels[1:]) + [0]
out_channels = decoder_channels
seg_blocks = [
DecoderBlock(in_ch, skip_ch, out_ch)
for in_ch, skip_ch, out_ch in zip(in_channels, skip_channels, out_channels)
]
self.seg_blocks = nn.ModuleList(seg_blocks)
dis_blocks = [
DecoderBlock(in_ch, skip_ch, out_ch)
for in_ch, skip_ch, out_ch in zip(in_channels, skip_channels, out_channels)
]
self.dis_blocks = nn.ModuleList(dis_blocks)
def forward(self, *features):
features = features[1:] # remove first skip with same spatial resolution
features = features[::-1] # reverse channels to start from head of encoder
x_seg = features[0]
x_dis = features[0]
skips = features[1:]
for i, decoder_block in enumerate(zip(self.seg_blocks, self.dis_blocks)):
seg_decoder_block, dis_decoder_block = decoder_block
skip = skips[i] if i < len(skips) else None
x_seg = seg_decoder_block(x_seg, skip)
x_dis = dis_decoder_block(x_dis, skip)
return x_seg, x_dis
class MynetWithoutFusion(nn.Module):
def __init__(self, num_classes=1, **kwargs):
super().__init__()
self.encoder = smp.Unet("resnet50", **kwargs).encoder
self.decoder = UnetDecoderWithoutFusion(self.encoder.out_channels, (256, 128, 64, 32, 16))
self.seg_head = SegmentationHead(in_channels=16, out_channels=num_classes)
self.dis_head = SegmentationHead(in_channels=16, out_channels=num_classes)
self.initialize()
def initialize(self):
init.initialize_decoder(self.decoder)
init.initialize_head(self.seg_head)
init.initialize_head(self.dis_head)
def forward(self, x):
features = self.encoder(x)
x_seg, x_dis = self.decoder(*features)
x_seg = self.seg_head(x_seg)
x_dis = self.dis_head(x_dis)
return [x_seg, x_dis]
class WeightedFusionModule(nn.Module):
def __init__(self, in_channel):
super().__init__()
self.compress_seg = add_conv(in_channel, 16, 1, 1)
self.compress_dis = add_conv(in_channel, 16, 1, 1)
self.weight_seg = nn.Conv2d(32, 2, 1, 1, 0)
self.weight_dis = nn.Conv2d(32, 2, 1, 1, 0)
def forward(self, x_seg, x_dis):
x_seg_compressed = self.compress_seg(x_seg)
x_dis_compressed = self.compress_dis(x_dis)
x_compressed = torch.cat((x_seg_compressed, x_dis_compressed), 1)
seg_weight = self.weight_seg(x_compressed)
seg_weight = F.softmax(seg_weight, dim=1)
dis_weight = self.weight_dis(x_compressed)
dis_weight = F.softmax(dis_weight, dim=1)
x_seg_fusion = x_seg * seg_weight[:, 0:1, :, :] + x_dis * seg_weight[:, 1:, :, :]
x_dis_fusion = x_seg * dis_weight[:, 0:1, :, :] + x_dis * dis_weight[:, 1:, :, :]
return x_seg_fusion, x_dis_fusion
class OnlyOneBranchWeightedFusionModule(nn.Module):
def __init__(self, in_channel, fusion_branch='seg'):
super().__init__()
self.fusion_branch = fusion_branch
self.compress_seg = add_conv(in_channel, 16, 1, 1)
self.compress_dis = add_conv(in_channel, 16, 1, 1)
self.weight_feature = nn.Conv2d(32, 2, 1, 1, 0)
def forward(self, x_seg, x_dis):
x_seg_compressed = self.compress_seg(x_seg)
x_dis_compressed = self.compress_dis(x_dis)
x_compressed = torch.cat((x_seg_compressed, x_dis_compressed), 1)
weight = self.weight_feature(x_compressed)
if self.fusion_branch == 'seg':
x_seg_fusion = x_seg * weight[:, 0:1, :, :] + x_dis * weight[:, 1:, :, :]
x_dis_fusion = x_dis
elif self.fusion_branch == 'dis':
x_seg_fusion = x_seg
x_dis_fusion = x_seg * weight[:, 0:1, :, :] + x_dis * weight[:, 1:, :, :]
return x_seg_fusion, x_dis_fusion
class UnetDecoderWeightedFusion(nn.Module):
def __init__(self, encoder_channels, decoder_channels):
super().__init__()
encoder_channels = encoder_channels[1:] # remove first skip with same spatial resolution
encoder_channels = encoder_channels[::-1] # reverse channels to start from head of encoder
# computing blocks input and output channels
head_channels = encoder_channels[0] # 2048
in_channels = [head_channels] + list(decoder_channels[:-1])
skip_channels = list(encoder_channels[1:]) + [0]
out_channels = decoder_channels
seg_blocks = [
DecoderBlock(in_ch, skip_ch, out_ch)
for in_ch, skip_ch, out_ch in zip(in_channels, skip_channels, out_channels)
]
self.seg_fusion_blocks = nn.ModuleList(seg_blocks[:-1])
self.seg_final_block = seg_blocks[-1]
dis_blocks = [
DecoderBlock(in_ch, skip_ch, out_ch)
for in_ch, skip_ch, out_ch in zip(in_channels, skip_channels, out_channels)
]
self.dis_fusion_blocks = nn.ModuleList(dis_blocks[:-1])
self.dis_final_block = dis_blocks[-1]
weight_in_channels = decoder_channels[:-1]
weight_blocks = [
WeightedFusionModule(in_ch) for in_ch in weight_in_channels
]
self.weight_blocks = nn.ModuleList(weight_blocks)
def forward(self, *features):
features = features[1:] # remove first skip with same spatial resolution
features = features[::-1] # reverse channels to start from head of encoder
x_seg = features[0]
x_dis = features[0]
skips = features[1:]
for i, decoder_block in enumerate(zip(self.seg_fusion_blocks, self.dis_fusion_blocks, self.weight_blocks)):
seg_decoder_block, dis_decoder_block, weight_block = decoder_block
skip = skips[i] if i < len(skips) else None
x_seg = seg_decoder_block(x_seg, skip)
x_dis = dis_decoder_block(x_dis, skip)
x_seg, x_dis = weight_block(x_seg, x_dis)
x_seg = self.seg_final_block(x_seg, None)
x_dis = self.dis_final_block(x_dis, None)
return x_seg, x_dis
class MynetWeightedFusion(nn.Module):
def __init__(self, num_classes=1, **kwargs):
super().__init__()
self.encoder = smp.Unet("resnet50", **kwargs).encoder
self.decoder = UnetDecoderWeightedFusion(self.encoder.out_channels, (256, 128, 64, 32, 16))
self.seg_head = SegmentationHead(in_channels=16, out_channels=num_classes)
self.dis_head = SegmentationHead(in_channels=16, out_channels=num_classes)
self.initialize()
def initialize(self):
init.initialize_decoder(self.decoder)
init.initialize_head(self.seg_head)
init.initialize_head(self.dis_head)
def forward(self, x):
features = self.encoder(x)
x_seg, x_dis = self.decoder(*features)
x_seg = self.seg_head(x_seg)
x_dis = self.dis_head(x_dis)
return [x_seg, x_dis]
class UnetDecoderOnlySegFusion(UnetDecoderWeightedFusion):
def __init__(self, encoder_channels, decoder_channels):
super().__init__(encoder_channels, decoder_channels)
weight_in_channels = decoder_channels[:-1]
weight_blocks = [
OnlyOneBranchWeightedFusionModule(in_ch) for in_ch in weight_in_channels
]
self.weight_blocks = nn.ModuleList(weight_blocks)
class MynetOnlySegFusion(Mynet):
def __init__(self, num_classes=1, **kwargs):
super().__init__()
self.encoder = smp.Unet("resnet50", **kwargs).encoder
self.decoder = UnetDecoderOnlySegFusion(self.encoder.out_channels, (256, 128, 64, 32, 16))
self.seg_head = SegmentationHead(in_channels=16, out_channels=num_classes)
self.dis_head = SegmentationHead(in_channels=16, out_channels=num_classes)
self.initialize()
| 36.464706
| 115
| 0.656396
| 1,657
| 12,398
| 4.584188
| 0.079059
| 0.026856
| 0.013165
| 0.020011
| 0.816746
| 0.796077
| 0.776461
| 0.758294
| 0.743154
| 0.73881
| 0
| 0.019319
| 0.244314
| 12,398
| 339
| 116
| 36.572271
| 0.79144
| 0.086788
| 0
| 0.693966
| 0
| 0
| 0.005238
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.094828
| false
| 0
| 0.030172
| 0
| 0.206897
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6384552925c7ccab07ed4838cd799b76c4e180c7
| 187
|
py
|
Python
|
ddsc/__init__.py
|
Duke-GCB/DukeDSClient
|
7f119a5ee2e674e8deaff1f080caed1953c5cc61
|
[
"MIT"
] | 4
|
2020-06-18T12:30:13.000Z
|
2020-10-12T21:25:54.000Z
|
ddsc/__init__.py
|
Duke-GCB/DukeDSClient
|
7f119a5ee2e674e8deaff1f080caed1953c5cc61
|
[
"MIT"
] | 239
|
2016-02-18T14:44:08.000Z
|
2022-03-11T14:38:56.000Z
|
ddsc/__init__.py
|
Duke-GCB/DukeDSClient
|
7f119a5ee2e674e8deaff1f080caed1953c5cc61
|
[
"MIT"
] | 10
|
2016-02-22T15:01:28.000Z
|
2022-02-21T22:46:26.000Z
|
from __future__ import absolute_import
from ddsc.sdk.dukeds import DukeDS, Session, ItemNotFound, DuplicateNameError
__all__ = ['DukeDS', 'Session', 'ItemNotFound', 'DuplicateNameError']
| 46.75
| 77
| 0.807487
| 19
| 187
| 7.473684
| 0.578947
| 0.183099
| 0.352113
| 0.605634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 187
| 3
| 78
| 62.333333
| 0.835294
| 0
| 0
| 0
| 0
| 0
| 0.229947
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
63ad5efdaf21af9437e34c4171d21521a5843525
| 138
|
py
|
Python
|
tests/fixtures/helpers.py
|
ForroKulcs/bugsnag-python
|
107c1add31a2202cc08ef944aa00ab96996b247a
|
[
"MIT"
] | 76
|
2015-03-01T11:46:57.000Z
|
2022-02-18T10:57:44.000Z
|
tests/fixtures/helpers.py
|
ForroKulcs/bugsnag-python
|
107c1add31a2202cc08ef944aa00ab96996b247a
|
[
"MIT"
] | 119
|
2015-01-14T11:53:08.000Z
|
2022-03-30T08:22:50.000Z
|
tests/fixtures/helpers.py
|
ForroKulcs/bugsnag-python
|
107c1add31a2202cc08ef944aa00ab96996b247a
|
[
"MIT"
] | 46
|
2015-02-09T23:50:57.000Z
|
2022-01-06T16:04:40.000Z
|
def invoke_exception_on_other_file(config):
from bugsnag.event import Event
return Event(Exception("another file!"), config, {})
| 27.6
| 56
| 0.746377
| 18
| 138
| 5.5
| 0.722222
| 0.20202
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144928
| 138
| 4
| 57
| 34.5
| 0.838983
| 0
| 0
| 0
| 0
| 0
| 0.094203
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
63cef1368265d98f03ad1b48449dbaec94267779
| 210
|
py
|
Python
|
Basic_Python/enamurate_and_zip.py
|
vibhgupta/python-basics
|
c898f292eccc39a4fded7d1f1a4f90084bf934ab
|
[
"MIT"
] | null | null | null |
Basic_Python/enamurate_and_zip.py
|
vibhgupta/python-basics
|
c898f292eccc39a4fded7d1f1a4f90084bf934ab
|
[
"MIT"
] | null | null | null |
Basic_Python/enamurate_and_zip.py
|
vibhgupta/python-basics
|
c898f292eccc39a4fded7d1f1a4f90084bf934ab
|
[
"MIT"
] | null | null | null |
# enumerate
numbers = ['one', 'two', 'three', 'four']
seasons = ['fall', 'winter', 'spring', 'summer']
print list(enumerate(numbers))
print list(enumerate(seasons))
# zip
print zip(numbers, seasons)
| 21
| 49
| 0.642857
| 24
| 210
| 5.625
| 0.583333
| 0.237037
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161905
| 210
| 9
| 50
| 23.333333
| 0.767045
| 0.061905
| 0
| 0
| 0
| 0
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.6
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
89370605c716a524220bcda5533303cb372a1512
| 147
|
py
|
Python
|
pyattck/__init__.py
|
ehooo/pyattck
|
99580a476eb88a8311478b9158a38c5e6f359f72
|
[
"MIT"
] | 1
|
2019-09-05T06:28:20.000Z
|
2019-09-05T06:28:20.000Z
|
pyattck/__init__.py
|
doomedraven/pyattck
|
e663824b2320db7354865e6516eb2c6b12ffadbd
|
[
"MIT"
] | null | null | null |
pyattck/__init__.py
|
doomedraven/pyattck
|
e663824b2320db7354865e6516eb2c6b12ffadbd
|
[
"MIT"
] | null | null | null |
from .pyattck import Attck
from .pyattck import (AttckActor, AttckMalware, AttckMalware, AttckMitigation, AttckTactic, AttckTechnique, AttckTools)
| 49
| 119
| 0.836735
| 14
| 147
| 8.785714
| 0.714286
| 0.178862
| 0.276423
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 147
| 2
| 120
| 73.5
| 0.924812
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
898f47d9bafa7127628dc95a2947820243ed5fa5
| 7,408
|
py
|
Python
|
team_9/cocos/utest/test_uniform_snippet.py
|
Donnyvdm/dojo19
|
3cf043a84e3ad6d3c4d59cd9c50b160e1ff03400
|
[
"BSD-3-Clause"
] | 1
|
2019-09-15T18:59:49.000Z
|
2019-09-15T18:59:49.000Z
|
team_9/cocos/utest/test_uniform_snippet.py
|
Donnyvdm/dojo19
|
3cf043a84e3ad6d3c4d59cd9c50b160e1ff03400
|
[
"BSD-3-Clause"
] | null | null | null |
team_9/cocos/utest/test_uniform_snippet.py
|
Donnyvdm/dojo19
|
3cf043a84e3ad6d3c4d59cd9c50b160e1ff03400
|
[
"BSD-3-Clause"
] | null | null | null |
"""tests tools\\uniform_snippet.py , use py.test to run"""
from __future__ import division, print_function, unicode_literals
import six
# make the test find the script in tools directory
import sys, os
tools_path = '../tools'
tools_abspath = os.path.abspath(tools_path)
sys.path.insert(0, tools_abspath)
from uniform_snippet import get_start_line, get_endplus_line, SnipetCompliance
#testing get_start_line
# better name would be get_first_match ?
#Note: target must not contain whitespace, else fails
def test_start_1st_line_match():
target = 'classBackgroundLayer('
should_match = 'class BackgroundLayer(cocos.layer.Layer)'
filling = '\n'.join([
'this is a line',
'this is another'
])
text = should_match + '\n' + filling
lines = text.split('\n')
it = enumerate(lines)
start_line = get_start_line(it, target)
assert start_line == 0
assert six.next(it) == (1, lines[1])
def test_start_last_line_match():
target = 'classBackgroundLayer('
should_match = 'class BackgroundLayer('
filling = '\n'.join([
'this is a line',
'this is another'
])
text = filling + '\n' + should_match
lines = text.split('\n')
it = enumerate(lines)
start_line = get_start_line(it, target)
assert start_line == 2
# here next should raise StoptIteration
StopIteration_raised = False
try:
six.next(it)
except StopIteration:
StopIteration_raised = True
assert StopIteration_raised
def test_start_inner_line_match():
target = 'classBackgroundLayer('
should_match = 'class BackgroundLayer(cocos.layer.Layer)'
filling = '\n'.join([
'this is a line',
'this is another'
])
text = 'A fine first line.\n' + should_match + '\n' + filling
lines = text.split('\n')
it = enumerate(lines)
start_line = get_start_line(it, target)
assert start_line == 1
assert six.next(it) == (2, lines[2])
def test_start_no_match():
target = 'classBackgroundLayer('
filling = '\n'.join([
'this is a line',
'this is another'
])
text = filling
lines = text.split('\n')
it = enumerate(lines)
start_line = get_start_line(it, target)
assert start_line is None
# here next should raise StoptIteration
StopIteration_raised = False
try:
six.next(it)
except StopIteration:
StopIteration_raised = True
assert StopIteration_raised
# start tests for get_endplus_line
# better name can be : seek_end_of_indented_block
def test_endplus_with_trailing_blank_lines():
text = '\n'.join([
' An indented line.',
'\n',
'\tLast non whitespace indented line.',
'\n',
'\n \t \r',
'\n',
'First non whitspace non indented line'
])
lines = text.split('\n')
it = enumerate(lines)
endplus_line = get_endplus_line(it)
assert lines[endplus_line-1] == '\tLast non whitespace indented line.'
def test_endplus_with_no_trailing_blank_lines():
text = '\n'.join([
' An indented line.',
'\n',
'\tLast non whitespace indented line.',
'First non whitspace non indented line'
])
lines = text.split('\n')
it = enumerate(lines)
endplus_line = get_endplus_line(it)
assert lines[endplus_line-1] == '\tLast non whitespace indented line.'
def test_endplus_hitting_first_line():
text = '\n'.join([
'First non whitespace non indented line',
' An indented line.',
'\n',
'\tLast non whitespace indented line.',
])
lines = text.split('\n')
it = enumerate(lines)
endplus_line = get_endplus_line(it)
assert lines[endplus_line] == 'First non whitespace non indented line'
def test_endplus_hitting_EOF():
text = '\n'.join([
' An indented line.',
'\n',
'\tLast non whitespace indented line.'
])
lines = text.split('\n')
it = enumerate(lines)
endplus_line = get_endplus_line(it)
assert lines[endplus_line-1] == '\tLast non whitespace indented line.'
## if desided to run withou py.test
##test_start_1st_line_match()
##test_start_last_line_match()
##test_start_inner_line_match()
##test_start_no_match()
##test_endplus_with_trailing_blank_lines()
##test_endplus_with_no_trailing_blank_lines()
##test_endplus_hitting_EOF()
##test_endplus_hitting_first_line()
# aqui habria que empezar a hacer tests para la clase
##reference = """
##class BackgroundLayer(cocos.layer.Layer):
## def __init__(self):
## super(BackgroundLayer, self).__init__()
## self.img = pyglet.resource.image('background_image.png')
##
## def draw( self ):
## glColor4ub(255, 255, 255, 255)
## glPushMatrix()
## self.transform()
## self.img.blit(0,0)
## glPopMatrix()
##"""
##
##text_0 = """
###one up
##
##class BackgroundLayer(cocos.layer.Layer):
## def __init__(self):
## super(BackgroundLayer, self).__init__()
## self.img = pyglet.resource.image('background_image.png')
##
## def draw( self ):
## glColor4ub(255, 255, 255, 255)
## glPushMatrix()
## self.transform()
## self.img.blit(0,0)
## glPopMatrix()
##
###one down
##"""
##
##def test_midle_text_compliant():
## line_0 = '#line 0'
## line_last = '#line last'
## text_0 = line_0 + '\n' + reference + line_last
## print(text_0)
## SnipetCompliance.set_target(reference, 'classBackgroundLayer(')
## worker = SnipetCompliance('', text_0)
## worker.is_compliant()
## print('\n>>>')
## print('matched:')
## print(worker.matched())
## print('<<<\n')
##
## worker.enforce_compliance()
## print '\n>>>'
## print 'matched:'
## print worker.matched()
## print '<<<\n'
##
## print 'fixed text:'
## print worker.text
##
##
##def test_midle_text_compliant():
## line_0 = '#line 0'
## line_last = '#line last'
## text_0 = line_0 + '\n' + reference + line_last
## print(text_0)
## SnipetCompliance.set_target(reference, 'classBackgroundLayer(')
## worker = SnipetCompliance('', text_0)
## worker.is_compliant()
## assert worker.compliant
## worker.enforce_compliance()
## assert text_0 == worker.text
#### print('\n>>>')
#### print('matched:')
#### print(worker.matched())
#### print('<<<\n')
####
#### worker.enforce_compliance()
#### print('\n>>>')
#### print('matched:')
#### print(worker.matched())
#### print('<<<\n')
####
#### print('fixed text:')
#### print(worker.text)
##
##def test_trailing_text_compliant():
## line_0 = '#line 0'
## text_0 = line_0 + '\n' + reference
## print(text_0)
## SnipetCompliance.set_target(reference, 'classBackgroundLayer(')
## worker = SnipetCompliance('', text_0)
## worker.is_compliant()
## assert text_0 == worker.text
## assert worker.compliant
#### print('\n>>>')
#### print('matched:')
#### print(worker.matched())
#### print('<<<\n')
####
#### worker.enforce_compliance()
#### print('\n>>>')
#### print('matched:')
#### print(worker.matched())
#### print('<<<\n')
####
#### print('fixed text:')
#### print(worker.text)
##test_midle_text_compliant
##test_trailing_text_compliant()
| 28.274809
| 79
| 0.610151
| 869
| 7,408
| 4.970081
| 0.166858
| 0.041676
| 0.022922
| 0.027784
| 0.80852
| 0.764992
| 0.722158
| 0.712665
| 0.697384
| 0.697384
| 0
| 0.011264
| 0.232991
| 7,408
| 261
| 80
| 28.383142
| 0.748856
| 0.426566
| 0
| 0.736842
| 0
| 0
| 0.222391
| 0.038501
| 0
| 0
| 0
| 0
| 0.105263
| 1
| 0.070175
| false
| 0
| 0.035088
| 0
| 0.105263
| 0.008772
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
899f295a0a1d980958923eb85b58f50ab858ce26
| 2,925
|
py
|
Python
|
src/third_party/imageAugmentation/checks/check_elastic_transformation.py
|
Austin1999/ViolenceDetection-master
|
10f3540bb330d06a1f36efd597097d9dcb067645
|
[
"Apache-2.0"
] | 149
|
2018-04-26T16:09:45.000Z
|
2022-03-31T12:57:11.000Z
|
src/third_party/imageAugmentation/checks/check_elastic_transformation.py
|
Austin1999/ViolenceDetection-master
|
10f3540bb330d06a1f36efd597097d9dcb067645
|
[
"Apache-2.0"
] | 44
|
2018-05-07T13:57:44.000Z
|
2021-05-30T11:06:31.000Z
|
src/third_party/imageAugmentation/checks/check_elastic_transformation.py
|
Austin1999/ViolenceDetection-master
|
10f3540bb330d06a1f36efd597097d9dcb067645
|
[
"Apache-2.0"
] | 45
|
2018-06-08T10:08:45.000Z
|
2021-11-24T06:02:06.000Z
|
from __future__ import print_function, division
import imgaug as ia
from imgaug import augmenters as iaa
from scipy import misc
import numpy as np
from skimage import data
def main():
image = data.astronaut()
image = ia.imresize_single_image(image, (128, 128))
images = []
params = [
(0.25, 0.25),
(1.0, 0.25),
(2.0, 0.25),
(3.0, 0.25),
(0.25, 0.50),
(1.0, 0.50),
(2.0, 0.50),
(3.0, 0.50),
(0.25, 0.75),
(1.0, 0.75),
(2.0, 0.75),
(3.0, 0.75)
]
for (alpha, sigma) in params:
images_row = []
seqs_row = [
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="constant", cval=0, order=0),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="constant", cval=128, order=0),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="constant", cval=255, order=0),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="constant", cval=0, order=1),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="constant", cval=128, order=1),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="constant", cval=255, order=1),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="constant", cval=0, order=3),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="constant", cval=128, order=3),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="constant", cval=255, order=3),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="nearest", order=0),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="nearest", order=1),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="nearest", order=2),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="nearest", order=3),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="reflect", order=0),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="reflect", order=1),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="reflect", order=2),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="reflect", order=3),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="wrap", order=0),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="wrap", order=1),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="wrap", order=2),
iaa.ElasticTransformation(alpha=alpha, sigma=sigma, mode="wrap", order=3)
]
for seq in seqs_row:
images_row.append(
seq.augment_image(image)
)
images.append(np.hstack(images_row))
misc.imshow(np.vstack(images))
misc.imsave("elastic_transformations.jpg", np.vstack(images))
if __name__ == "__main__":
main()
| 44.318182
| 100
| 0.627692
| 363
| 2,925
| 4.997245
| 0.165289
| 0.121279
| 0.335722
| 0.393605
| 0.735391
| 0.735391
| 0.735391
| 0.735391
| 0.735391
| 0.338479
| 0
| 0.049007
| 0.225641
| 2,925
| 65
| 101
| 45
| 0.751876
| 0
| 0
| 0
| 0
| 0
| 0.061197
| 0.009231
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017241
| false
| 0
| 0.103448
| 0
| 0.12069
| 0.017241
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
982335b8f59693d558a643455a38a99073476ea2
| 30
|
wsgi
|
Python
|
website-backend/ava/ava.wsgi
|
kbladin/ava-capture
|
2fc24f4a3712f721c3a229b499631e00697209a5
|
[
"BSD-3-Clause"
] | 49
|
2017-08-18T15:25:09.000Z
|
2022-02-20T19:07:00.000Z
|
website-backend/ava/ava.wsgi
|
kbladin/ava-capture
|
2fc24f4a3712f721c3a229b499631e00697209a5
|
[
"BSD-3-Clause"
] | 50
|
2021-11-07T18:24:34.000Z
|
2022-03-19T01:16:48.000Z
|
website-backend/ava/ava.wsgi
|
samqws-marketing/electronicarts_ava-capture
|
a04e5f9a7ee817317d0d58ce800eefc6bf4bd150
|
[
"BSD-3-Clause"
] | 13
|
2017-12-04T19:33:20.000Z
|
2021-07-17T02:17:00.000Z
|
from test import application
| 10
| 28
| 0.833333
| 4
| 30
| 6.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 30
| 2
| 29
| 15
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
983948c35808ce245980ba4837199f091daa00ea
| 145
|
py
|
Python
|
helpers/cache.py
|
pythonapi/python-api-based-on-flask-restful
|
4d83db4bec9d3ad13095ad85787360d3ba64d811
|
[
"MIT"
] | 1
|
2021-04-23T21:58:07.000Z
|
2021-04-23T21:58:07.000Z
|
helpers/cache.py
|
pythonapi/python-api-based-on-flask-restful
|
4d83db4bec9d3ad13095ad85787360d3ba64d811
|
[
"MIT"
] | null | null | null |
helpers/cache.py
|
pythonapi/python-api-based-on-flask-restful
|
4d83db4bec9d3ad13095ad85787360d3ba64d811
|
[
"MIT"
] | null | null | null |
from config import config
class Cache():
def __init__(self):
return base.Client((config['cache']['host'], config['cache']['port']))
| 24.166667
| 78
| 0.641379
| 18
| 145
| 4.944444
| 0.722222
| 0.247191
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.165517
| 145
| 5
| 79
| 29
| 0.735537
| 0
| 0
| 0
| 0
| 0
| 0.124138
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
984788d9c028b7dc4e9f9555ec978987dd05ad96
| 55
|
py
|
Python
|
post_processing/repulsion.py
|
gwpark-git/dynamics_of_networks_and_colloids
|
0b0a3687533379ec75171ae6b906aeff5bedfbba
|
[
"MIT"
] | null | null | null |
post_processing/repulsion.py
|
gwpark-git/dynamics_of_networks_and_colloids
|
0b0a3687533379ec75171ae6b906aeff5bedfbba
|
[
"MIT"
] | null | null | null |
post_processing/repulsion.py
|
gwpark-git/dynamics_of_networks_and_colloids
|
0b0a3687533379ec75171ae6b906aeff5bedfbba
|
[
"MIT"
] | null | null | null |
def U_ij(r):
return (1./3.)*(1. - r)**2.*(2. + r)
| 13.75
| 40
| 0.381818
| 12
| 55
| 1.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119048
| 0.236364
| 55
| 3
| 41
| 18.333333
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
98a5c080813be428b1558ec0f2132b5283db2f0a
| 167
|
py
|
Python
|
maqazin_service/run.py
|
agaverdi/anbar_maqazin_service
|
9db00066defce50d31061a63d96f54ef4bf5cdc4
|
[
"MIT"
] | 1
|
2020-11-30T00:52:03.000Z
|
2020-11-30T00:52:03.000Z
|
maqazin_service/run.py
|
agaverdi/anbar_maqazin_service
|
9db00066defce50d31061a63d96f54ef4bf5cdc4
|
[
"MIT"
] | null | null | null |
maqazin_service/run.py
|
agaverdi/anbar_maqazin_service
|
9db00066defce50d31061a63d96f54ef4bf5cdc4
|
[
"MIT"
] | null | null | null |
from app.app import create_app,db
import os
settings_name = os.getenv("APP_SETTINGS")
app = create_app(settings_name)
# db.create_all(app=create_app(settings_name))
| 20.875
| 46
| 0.796407
| 28
| 167
| 4.464286
| 0.357143
| 0.216
| 0.192
| 0.32
| 0.384
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095808
| 167
| 8
| 46
| 20.875
| 0.827815
| 0.263473
| 0
| 0
| 0
| 0
| 0.098361
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
7f84ecad98e54f5946be09b30175fcdd7652cbce
| 44
|
py
|
Python
|
Webscraping/__init__.py
|
daydreamjesse/ReedTheRobot
|
cc30dae4ceddbaeb9369bb7be220adee7159b744
|
[
"MIT"
] | null | null | null |
Webscraping/__init__.py
|
daydreamjesse/ReedTheRobot
|
cc30dae4ceddbaeb9369bb7be220adee7159b744
|
[
"MIT"
] | null | null | null |
Webscraping/__init__.py
|
daydreamjesse/ReedTheRobot
|
cc30dae4ceddbaeb9369bb7be220adee7159b744
|
[
"MIT"
] | null | null | null |
print("Initializing webscraping package...")
| 44
| 44
| 0.795455
| 4
| 44
| 8.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 44
| 1
| 44
| 44
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
7fbe398a8c6b53aae5006c27b4817a92bf874daa
| 83
|
py
|
Python
|
arcli/triggers/always.py
|
guiscaranse/arcli
|
6ca8158111ab1694f2e1900dde723e3e4d86cfe8
|
[
"Apache-2.0"
] | 2
|
2019-08-07T22:33:26.000Z
|
2020-01-14T00:44:01.000Z
|
arcli/triggers/always.py
|
guiscaranse/arcli
|
6ca8158111ab1694f2e1900dde723e3e4d86cfe8
|
[
"Apache-2.0"
] | 1
|
2019-11-16T22:17:08.000Z
|
2019-11-16T22:17:08.000Z
|
arcli/triggers/always.py
|
guiscaranse/arcli
|
6ca8158111ab1694f2e1900dde723e3e4d86cfe8
|
[
"Apache-2.0"
] | null | null | null |
from arcli.triggers.base import ArcliTrigger
class Always(ArcliTrigger):
pass
| 16.6
| 44
| 0.795181
| 10
| 83
| 6.6
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144578
| 83
| 5
| 45
| 16.6
| 0.929577
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
f69385c094ac3ffba4a6fbec6e04682dcf49ba53
| 19,221
|
py
|
Python
|
src/ralph/lib/custom_fields/tests/test_api.py
|
elubik/ralph
|
ac39be38cbd7c80ac64984848423fc92b8661b16
|
[
"Apache-2.0"
] | 1,668
|
2015-01-01T12:51:20.000Z
|
2022-03-29T09:05:35.000Z
|
src/ralph/lib/custom_fields/tests/test_api.py
|
elubik/ralph
|
ac39be38cbd7c80ac64984848423fc92b8661b16
|
[
"Apache-2.0"
] | 2,314
|
2015-01-02T13:26:26.000Z
|
2022-03-29T04:06:03.000Z
|
src/ralph/lib/custom_fields/tests/test_api.py
|
elubik/ralph
|
ac39be38cbd7c80ac64984848423fc92b8661b16
|
[
"Apache-2.0"
] | 534
|
2015-01-05T12:40:28.000Z
|
2022-03-29T21:10:12.000Z
|
# -*- coding: utf-8 -*-
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from ralph.accounts.models import RalphUser
from ralph.accounts.tests.factories import GroupFactory
from ralph.tests.factories import UserFactory
from ..models import CustomField, CustomFieldTypes, CustomFieldValue
from ..signals import api_post_create, api_post_update
from .models import ModelA, ModelB, SomeModel
class CustomFieldsAPITests(APITestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.a1 = ModelA.objects.create()
cls.b1 = ModelB.objects.create(a=cls.a1)
cls.sm1 = SomeModel.objects.create(name='abc', b=cls.b1)
cls.sm2 = SomeModel.objects.create(name='def')
cls.custom_field_str = CustomField.objects.create(
name='test str', type=CustomFieldTypes.STRING, default_value='xyz'
)
cls.custom_field_choices = CustomField.objects.create(
name='test choice', type=CustomFieldTypes.CHOICE,
choices='qwerty|asdfgh|zxcvbn', default_value='zxcvbn',
use_as_configuration_variable=True,
)
cls.user = get_user_model().objects.create_superuser(
username='root',
password='password',
email='email@email.pl'
)
cls.list_view_name = '{}-customfields-list'.format(
SomeModel._meta.model_name
)
cls.detail_view_name = '{}-customfields-detail'.format(
SomeModel._meta.model_name
)
def setUp(self):
self.client.force_authenticate(self.user)
self.cfv1 = CustomFieldValue.objects.create(
object=self.sm1,
custom_field=self.custom_field_str,
value='sample_value',
)
self.cfv2 = CustomFieldValue.objects.create(
object=self.sm2,
custom_field=self.custom_field_choices,
value='qwerty',
)
self.cfv3 = CustomFieldValue.objects.create(
object=self.sm2,
custom_field=self.custom_field_str,
value='sample_value2',
)
def test_get_customfields_in_object_resource(self):
url = reverse('somemodel-detail', args=(self.sm2.id,))
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['custom_fields'], {
'test_str': 'sample_value2',
'test_choice': 'qwerty',
})
def test_get_customfields_with_inheritance_in_objects_list(self):
CustomFieldValue.objects.create(
object=self.a1,
custom_field=self.custom_field_choices,
value='asdfgh',
)
CustomFieldValue.objects.create(
object=self.b1,
custom_field=self.custom_field_str,
value='sample_value_b1',
)
url = reverse('somemodel-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 2)
cfvs = [obj['custom_fields'] for obj in response.data['results']]
self.assertCountEqual(cfvs, [
{
'test_str': 'sample_value',
'test_choice': 'asdfgh',
},
{
'test_str': 'sample_value2',
'test_choice': 'qwerty',
}
])
def test_get_customfields_with_inheritance_in_object_resource(self):
CustomFieldValue.objects.create(
object=self.a1,
custom_field=self.custom_field_choices,
value='asdfgh',
)
CustomFieldValue.objects.create(
object=self.b1,
custom_field=self.custom_field_str,
value='sample_value_b1',
)
url = reverse('somemodel-detail', args=(self.sm1.id,))
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['custom_fields'], {
'test_str': 'sample_value',
'test_choice': 'asdfgh',
})
def test_get_configuration_variables_in_object_resource(self):
url = reverse('somemodel-detail', args=(self.sm2.id,))
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['configuration_variables'], {
'test_choice': 'qwerty',
})
def test_get_customfields_for_single_object(self):
url = reverse(self.list_view_name, args=(self.sm1.id,))
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 1)
cfv = response.data['results'][0]
self.assertEqual(
cfv['custom_field']['name'], self.custom_field_str.name
)
self.assertEqual(cfv['value'], self.cfv1.value)
self.assertTrue(cfv['url'].endswith(reverse(
self.detail_view_name,
kwargs={'pk': self.cfv1.pk, 'object_pk': self.cfv1.object_id}
)))
def test_get_customfields_with_inheritance_for_single_object(self):
CustomFieldValue.objects.create(
object=self.a1,
custom_field=self.custom_field_choices,
value='qwerty',
)
self.assertEqual(self.sm1.custom_fields.count(), 2)
url = reverse(self.list_view_name, args=(self.sm1.id,))
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
# besides that value of another CF is inherited, CFV assigned directly
# to this object is only one and this only one should be editable
# in context of this object
self.assertEqual(response.data['count'], 1)
def test_get_customfields_for_single_object_options(self):
url = reverse(self.list_view_name, args=(self.sm1.id,))
response = self.client.options(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIn('POST', response['allow'])
def test_get_customfields_for_wrong_object_should_return_404(self):
url = reverse(
self.detail_view_name,
kwargs={'pk': self.cfv1.pk, 'object_pk': self.sm2.pk}
)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_get_single_customfields_for_single_object(self):
url = reverse(
self.detail_view_name,
kwargs={'pk': self.cfv1.pk, 'object_pk': self.cfv1.object_id}
)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['custom_field']['name'], self.custom_field_str.name
)
self.assertEqual(response.data['value'], self.cfv1.value)
self.assertTrue(response.data['url'].endswith(reverse(
self.detail_view_name,
kwargs={'pk': self.cfv1.pk, 'object_pk': self.cfv1.object_id}
)))
def test_get_single_customfields_for_single_object_options(self):
url = reverse(
self.detail_view_name,
kwargs={'pk': self.cfv1.pk, 'object_pk': self.cfv1.object_id}
)
response = self.client.options(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIn('PUT', response['allow'])
self.assertIn('PATCH', response['allow'])
def test_add_new_customfield_value_should_pass(self):
url = reverse(self.list_view_name, args=(self.sm1.id,))
data = {
'value': 'qwerty',
'custom_field': self.custom_field_choices.id,
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
cfv = CustomFieldValue.objects.get(pk=response.data['id'])
self.assertEqual(cfv.object, self.sm1)
self.assertEqual(cfv.custom_field, self.custom_field_choices)
self.assertEqual(cfv.value, 'qwerty')
def test_add_new_customfield_value_with_unmatching_managing_group_should_fail(self): # noqa" E501
self.custom_field_str.managing_group = GroupFactory()
self.custom_field_str.save()
some_object = SomeModel.objects.create(name='DEADBEEF')
url = reverse(self.list_view_name, args=(some_object.id,))
data = {
'value': 'qwerty',
'custom_field': self.custom_field_str.id,
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_add_new_customfield_value_with_matching_managing_group_should_succeed(self): # noqa" E501
group = GroupFactory()
self.user.groups.add(group)
self.custom_field_str.managing_group = group
self.custom_field_str.save()
self.user.save()
some_object = SomeModel.objects.create(name='DEADBEEF')
url = reverse(self.list_view_name, args=(some_object.id,))
data = {
'value': 'qwerty',
'custom_field': self.custom_field_str.id,
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_update_customfield_value_with_unmatching_managing_group_should_fail(self): # noqa: E501
self.custom_field_str.managing_group = GroupFactory()
self.custom_field_str.save()
url = reverse(
self.detail_view_name,
kwargs={'pk': self.cfv1.pk, 'object_pk': self.cfv1.object_id}
)
data = {
'value': 'NEW-VALUE',
'custom_field': self.custom_field_str.id,
}
response = self.client.put(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_update_customfield_value_with_matching_managing_group_should_pass(self): # noqa: E501
group = GroupFactory()
self.user.groups.add(group)
self.custom_field_str.managing_group = group
self.custom_field_str.save()
self.user.save()
url = reverse(
self.detail_view_name,
kwargs={'pk': self.cfv1.pk, 'object_pk': self.cfv1.object_id}
)
data = {
'value': 'NEW-VALUE',
'custom_field': self.custom_field_str.id,
}
response = self.client.put(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.cfv1.refresh_from_db()
self.assertEqual(self.cfv1.object, self.sm1)
self.assertEqual(self.cfv1.custom_field, self.custom_field_str)
self.assertEqual(self.cfv1.value, 'NEW-VALUE')
def test_delete_custom_field_value_with_unmatching_managing_group_should_fail(self): # noqa: E501
self.custom_field_str.managing_group = GroupFactory()
self.custom_field_str.save()
url = reverse(
self.detail_view_name,
kwargs={'pk': self.cfv1.pk, 'object_pk': self.cfv1.object_id}
)
response = self.client.delete(url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(
CustomFieldValue.objects.filter(pk=self.cfv1.pk).count(), 1
)
def test_delete_custom_field_value_with_matching_managing_group_should_pass(self): # noqa: E501
group = GroupFactory()
self.user.groups.add(group)
self.custom_field_str.managing_group = group
self.custom_field_str.save()
self.user.save()
url = reverse(
self.detail_view_name,
kwargs={'pk': self.cfv1.pk, 'object_pk': self.cfv1.object_id}
)
response = self.client.delete(url, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(
CustomFieldValue.objects.filter(pk=self.cfv1.pk).count(), 0
)
def test_add_new_customfield_value_should_send_api_post_create_signal(self): # noqa: E501
self._sig_called_with_instance = None
def listener(sender, instance, **kwargs):
self._sig_called_with_instance = instance
api_post_create.connect(listener)
url = reverse(self.list_view_name, args=(self.sm1.id,))
data = {
'value': 'qwerty',
'custom_field': self.custom_field_choices.id,
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertIsNotNone(self._sig_called_with_instance)
self.assertEqual(
self._sig_called_with_instance.id, response.data['id']
)
def test_add_new_customfield_value_by_attribute_name(self):
expected = 'new-value'
cf = CustomField.objects.create(
name='by-attr', type=CustomFieldTypes.STRING, default_value='v'
)
url = reverse(self.list_view_name, args=(self.sm1.id,))
data = {
'custom_field': cf.attribute_name,
'value': expected,
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
cfv = CustomFieldValue.objects.get(pk=response.data['id'])
self.assertEqual(cfv.value, expected)
def test_add_new_customfield_value_with_duplicated_customfield_should_not_pass(self): # noqa
url = reverse(self.list_view_name, args=(self.sm1.id,))
data = {
'value': 'duplicate!',
'custom_field': self.custom_field_str.id,
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(
'Custom field of the same type already exists for this object.',
response.data['__all__']
)
def test_add_new_customfield_value_with_invalid_value_should_not_pass(self):
url = reverse(self.list_view_name, args=(self.sm1.id,))
data = {
'value': 'invalid!',
'custom_field': self.custom_field_choices.id,
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(
'Select a valid choice. invalid! is not one of the available choices.', # noqa
response.data['__all__']
)
def test_update_customfield_value_should_pass(self):
url = reverse(
self.detail_view_name,
kwargs={'pk': self.cfv1.pk, 'object_pk': self.cfv1.object_id}
)
data = {
'value': 'ytrewq',
'custom_field': self.custom_field_str.id,
}
response = self.client.put(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.cfv1.refresh_from_db()
self.assertEqual(self.cfv1.object, self.sm1)
self.assertEqual(self.cfv1.custom_field, self.custom_field_str)
self.assertEqual(self.cfv1.value, 'ytrewq')
def test_update_customfield_value_should_send_api_post_update_signal(self):
self._sig_called_with_instance = None
def listener(sender, instance, **kwargs):
self._sig_called_with_instance = instance
api_post_update.connect(listener)
url = reverse(
self.detail_view_name,
kwargs={'pk': self.cfv1.pk, 'object_pk': self.cfv1.object_id}
)
data = {
'value': 'abc',
'custom_field': self.custom_field_str.id,
}
response = self.client.put(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIsNotNone(self._sig_called_with_instance)
self.assertEqual(
self._sig_called_with_instance.id, response.data['id']
)
def test_update_customfield_value_with_duplicated_customfield_should_not_pass(self): # noqa
url = reverse(
self.detail_view_name,
kwargs={'pk': self.cfv2.pk, 'object_pk': self.cfv2.object_id}
)
data = {
'value': 'duplicate!',
'custom_field': self.custom_field_str.id,
}
response = self.client.put(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(
'Custom field of the same type already exists for this object.',
response.data['__all__']
)
def test_update_customfield_value_invalid_value_should_not_pass(self):
url = reverse(
self.detail_view_name,
kwargs={'pk': self.cfv2.pk, 'object_pk': self.cfv2.object_id}
)
data = {
'value': 'invalid!',
'custom_field': self.custom_field_choices.id,
}
response = self.client.put(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(
'Select a valid choice. invalid! is not one of the available choices.', # noqa
response.data['__all__']
)
def test_partial_update_value_should_pass(self):
url = reverse(
self.detail_view_name,
kwargs={'pk': self.cfv1.pk, 'object_pk': self.cfv1.object_id}
)
data = {
'value': 'ytrewq',
}
response = self.client.patch(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.cfv1.refresh_from_db()
self.assertEqual(self.cfv1.object, self.sm1)
self.assertEqual(self.cfv1.custom_field, self.custom_field_str)
self.assertEqual(self.cfv1.value, 'ytrewq')
def test_delete_custom_field_value(self):
url = reverse(
self.detail_view_name,
kwargs={'pk': self.cfv1.pk, 'object_pk': self.cfv1.object_id}
)
response = self.client.delete(url, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(
CustomFieldValue.objects.filter(pk=self.cfv1.pk).count(), 0
)
def test_filter_by_custom_field(self):
url = '{}?{}'.format(
reverse('{}-list'.format(SomeModel._meta.model_name)),
'customfield__test_str=sample_value'
)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 1)
| 39.549383
| 103
| 0.637896
| 2,276
| 19,221
| 5.118629
| 0.093146
| 0.068927
| 0.048927
| 0.044807
| 0.838455
| 0.81176
| 0.78206
| 0.756652
| 0.743691
| 0.727639
| 0
| 0.014057
| 0.248686
| 19,221
| 485
| 104
| 39.630928
| 0.792674
| 0.014411
| 0
| 0.616114
| 0
| 0
| 0.084372
| 0.004174
| 0
| 0
| 0
| 0
| 0.165877
| 1
| 0.075829
| false
| 0.023697
| 0.026066
| 0
| 0.104265
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f69993a00d48e29fb845915d46f9225f2c4a9d53
| 32
|
py
|
Python
|
python/testData/optimizeImports/libraryRootInsideProject/main.after.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/optimizeImports/libraryRootInsideProject/main.after.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/optimizeImports/libraryRootInsideProject/main.after.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
import b
import a
print(a, b)
| 5.333333
| 11
| 0.65625
| 7
| 32
| 3
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 32
| 5
| 12
| 6.4
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.333333
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
120b209dae282cc86fe025db5d889a3cfab762a1
| 861
|
py
|
Python
|
MersennePrime/threadedmath.py
|
rmamba/pyLucasLehmer
|
14ec2496e2f64b3aae21ff2629fbbab6fc78cca8
|
[
"MIT"
] | 2
|
2020-03-18T20:28:14.000Z
|
2021-10-12T11:38:38.000Z
|
MersennePrime/threadedmath.py
|
rmamba/pyLucasLehmer
|
14ec2496e2f64b3aae21ff2629fbbab6fc78cca8
|
[
"MIT"
] | 1
|
2018-07-08T18:50:28.000Z
|
2018-08-07T19:12:05.000Z
|
MersennePrime/threadedmath.py
|
rmamba/pyLucasLehmer
|
14ec2496e2f64b3aae21ff2629fbbab6fc78cca8
|
[
"MIT"
] | null | null | null |
import threading
import math
class mul(threading.Thread):
def __init__(self, idx, n, m):
threading.Thread.__init__(self)
self.idx = idx
self.n = n
self.m = m
self.r = None
def run(self):
r = self.n * self.m
self.r = []
self.r.append(r % 256)
while r>255:
r = int(math.floor(r / 256))
self.r.append(r % 256)
class sub(threading.Thread):
def __init__(self, idx, n, m):
threading.Thread.__init__(self)
self.idx = idx
self.n = n
self.m = m
self.r = None
def run(self):
self.r = self.n - self.m
# self.r = []
# self.r.append(r % 256)
# while r>255:
# r = int(math.floor(r / 256))
# self.r.append(r % 256)
# if r>0:
# self.r.append(r)
| 23.27027
| 42
| 0.477352
| 122
| 861
| 3.237705
| 0.196721
| 0.139241
| 0.139241
| 0.151899
| 0.855696
| 0.855696
| 0.855696
| 0.855696
| 0.855696
| 0.855696
| 0
| 0.047259
| 0.385598
| 861
| 36
| 43
| 23.916667
| 0.699433
| 0.157956
| 0
| 0.64
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16
| false
| 0
| 0.08
| 0
| 0.32
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
124bf3e77de4a22a5d5ee9ea9112308f1d774a4b
| 132
|
py
|
Python
|
HackerRank Solutions/Python/Numpy/Zeros and Ones.py
|
DevashishPathrabe/Competetive-Coding
|
91049459359854b7834cbfb31415682600dc9c57
|
[
"MIT"
] | null | null | null |
HackerRank Solutions/Python/Numpy/Zeros and Ones.py
|
DevashishPathrabe/Competetive-Coding
|
91049459359854b7834cbfb31415682600dc9c57
|
[
"MIT"
] | null | null | null |
HackerRank Solutions/Python/Numpy/Zeros and Ones.py
|
DevashishPathrabe/Competetive-Coding
|
91049459359854b7834cbfb31415682600dc9c57
|
[
"MIT"
] | null | null | null |
import numpy
N = tuple(map(int, input().split()))
print (numpy.zeros(N, dtype = numpy.int))
print (numpy.ones(N, dtype = numpy.int))
| 33
| 41
| 0.681818
| 22
| 132
| 4.090909
| 0.545455
| 0.222222
| 0.244444
| 0.311111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113636
| 132
| 4
| 42
| 33
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
1257f4f24158a148035737ab064220be6911ed4b
| 2,689
|
py
|
Python
|
app/slack/tests/test_slack_util.py
|
NDevox/website
|
76004e667f2295eddd79d500ba21f02a0480412f
|
[
"Apache-2.0"
] | null | null | null |
app/slack/tests/test_slack_util.py
|
NDevox/website
|
76004e667f2295eddd79d500ba21f02a0480412f
|
[
"Apache-2.0"
] | null | null | null |
app/slack/tests/test_slack_util.py
|
NDevox/website
|
76004e667f2295eddd79d500ba21f02a0480412f
|
[
"Apache-2.0"
] | null | null | null |
import pook
import pytest
from requests.exceptions import HTTPError
from app.slack.util import (
SlackException,
SlackClient,
)
class TestSlackClient:
@pook.post('https://slack.com/api/users.admin.invite', reply=400)
def test_invite_raises_on_bad_response(self, slack_client: SlackClient):
with pytest.raises(HTTPError):
slack_client.invite('foo@gmail.com', [])
assert pook.isdone()
@pook.post('https://slack.com/api/users.admin.invite', reply=200,
response_json=dict(ok=False, error='already_invited'))
def test_invite_raises_on_not_ok(self, slack_client: SlackClient):
with pytest.raises(SlackException):
slack_client.invite('foo@gmail.com', [])
assert pook.isdone()
@pook.post('https://slack.com/api/users.admin.invite', reply=200,
response_json=dict(ok=True))
def test_invite_returns_true_on_ok(self, slack_client: SlackClient):
assert slack_client.invite('foo@gmail.com', [])
assert pook.isdone()
@pook.get('https://slack.com/api/users.list', reply=400)
def test_members_raises_on_bad_response(self, slack_client: SlackClient):
with pytest.raises(HTTPError):
slack_client.members()
assert pook.isdone()
@pook.get('https://slack.com/api/users.list', reply=200,
response_json=dict(ok=False, error='something'))
def test_members_raises_on_not_ok(self, slack_client: SlackClient):
with pytest.raises(SlackException):
slack_client.members()
assert pook.isdone()
@pook.get('https://slack.com/api/users.list', reply=200,
response_json=dict(ok=True, members=[]))
def test_members_list_on_ok(self, slack_client: SlackClient):
assert isinstance(slack_client.members(), list)
assert pook.isdone()
@pook.get('https://slack.com/api/channels.list', reply=400)
def test_channels_raises_on_bad_response(self, slack_client: SlackClient):
with pytest.raises(HTTPError):
slack_client.channels()
assert pook.isdone()
@pook.get('https://slack.com/api/channels.list', reply=200,
response_json=dict(ok=False, error='something'))
def test_channels_raises_on_not_ok(self, slack_client: SlackClient):
with pytest.raises(SlackException):
slack_client.channels()
assert pook.isdone()
@pook.get('https://slack.com/api/channels.list', reply=200,
response_json=dict(ok=True, channels=[]))
def test_channels_list_on_ok(self, slack_client: SlackClient):
assert isinstance(slack_client.channels(), list)
assert pook.isdone()
| 39.544118
| 78
| 0.677203
| 341
| 2,689
| 5.140762
| 0.155425
| 0.112949
| 0.066743
| 0.082145
| 0.857387
| 0.814033
| 0.814033
| 0.791215
| 0.780947
| 0.780947
| 0
| 0.01246
| 0.194124
| 2,689
| 67
| 79
| 40.134328
| 0.796493
| 0
| 0
| 0.517857
| 0
| 0
| 0.146151
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 1
| 0.160714
| false
| 0
| 0.071429
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
89e16072b6b6f27f2fc669be7c5bb803538a74af
| 36
|
py
|
Python
|
goldsberry/team/__init__.py
|
motraor3/py-Goldsberry
|
fe238b07142cff64eda37e402bb003251af13f9b
|
[
"MIT"
] | 268
|
2015-07-28T18:49:06.000Z
|
2022-03-06T03:08:18.000Z
|
goldsberry/team/__init__.py
|
motraor3/py-Goldsberry
|
fe238b07142cff64eda37e402bb003251af13f9b
|
[
"MIT"
] | 24
|
2015-07-06T22:50:59.000Z
|
2021-07-05T05:10:26.000Z
|
goldsberry/team/__init__.py
|
motraor3/py-Goldsberry
|
fe238b07142cff64eda37e402bb003251af13f9b
|
[
"MIT"
] | 85
|
2015-08-08T17:45:28.000Z
|
2021-11-10T09:35:26.000Z
|
from goldsberry.team._Team2 import *
| 36
| 36
| 0.833333
| 5
| 36
| 5.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030303
| 0.083333
| 36
| 1
| 36
| 36
| 0.848485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
89fcf7bb68a6abfc17cec5cbcb47ee7e8093c45b
| 154
|
py
|
Python
|
restaurantapi/admin.py
|
samueldaviddelacruz/restaurant_graphql_CRUD
|
9aba57cf68308e22263143f0b3b61dcea9d81199
|
[
"MIT"
] | null | null | null |
restaurantapi/admin.py
|
samueldaviddelacruz/restaurant_graphql_CRUD
|
9aba57cf68308e22263143f0b3b61dcea9d81199
|
[
"MIT"
] | null | null | null |
restaurantapi/admin.py
|
samueldaviddelacruz/restaurant_graphql_CRUD
|
9aba57cf68308e22263143f0b3b61dcea9d81199
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from . import models
# Register your models here.
admin.site.register(models.Category)
admin.site.register(models.Dish)
| 22
| 36
| 0.805195
| 22
| 154
| 5.636364
| 0.545455
| 0.145161
| 0.274194
| 0.370968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103896
| 154
| 6
| 37
| 25.666667
| 0.898551
| 0.168831
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
c3b4b93df27bc5c1ac1d3481854d98eae74fbaad
| 116
|
py
|
Python
|
esmvalcore/cmor/_fixes/cmip6/cesm2_waccm.py
|
Peter9192/ESMValCore
|
febd96a39480cc837afbf4e1f5b0ef61571af76a
|
[
"Apache-2.0"
] | 1
|
2019-11-28T13:09:42.000Z
|
2019-11-28T13:09:42.000Z
|
esmvalcore/cmor/_fixes/cmip6/cesm2_waccm.py
|
Peter9192/ESMValCore
|
febd96a39480cc837afbf4e1f5b0ef61571af76a
|
[
"Apache-2.0"
] | null | null | null |
esmvalcore/cmor/_fixes/cmip6/cesm2_waccm.py
|
Peter9192/ESMValCore
|
febd96a39480cc837afbf4e1f5b0ef61571af76a
|
[
"Apache-2.0"
] | 1
|
2019-11-29T00:50:30.000Z
|
2019-11-29T00:50:30.000Z
|
"""Fixes for CESM2-WACCM model."""
from .cesm2 import Tas as BaseTas
class Tas(BaseTas):
"""Fixes for tas."""
| 16.571429
| 34
| 0.655172
| 17
| 116
| 4.470588
| 0.647059
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021053
| 0.181034
| 116
| 6
| 35
| 19.333333
| 0.778947
| 0.37069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7f0707a1dd0239bb048230740dc812bcee02df36
| 3,225
|
py
|
Python
|
GuessTheNumberJACKPOT.py
|
yoursamlan/GuessTheOutput
|
e918d5d45122abfabbf01091ba0535054cf58f98
|
[
"MIT"
] | 1
|
2019-06-25T08:13:40.000Z
|
2019-06-25T08:13:40.000Z
|
GuessTheNumberJACKPOT.py
|
yoursamlan/GuessTheOutput
|
e918d5d45122abfabbf01091ba0535054cf58f98
|
[
"MIT"
] | null | null | null |
GuessTheNumberJACKPOT.py
|
yoursamlan/GuessTheOutput
|
e918d5d45122abfabbf01091ba0535054cf58f98
|
[
"MIT"
] | null | null | null |
print("\n\n\n\n\n")
print(" _____ _____ _____ _____ _____ _____ _____ _____ ")
print(" | __|| | || __|| __|| __| |_ _|| | || __|")
print(" | | || | || __||__ ||__ | | | | || __|")
print(" |_____||_____||_____||_____||_____| |_| |__|__||_____|")
print("\n")
print(" U ___ u _ _ _____ ____ _ _ _____ ")
print(" \/ _ \/ U | |u| | |_ _| U| _ \ u U | |u| | |_ _| ")
print(" | | | | \| |\| | | | \| |_) |/ \| |\| | | | ")
print(". -,_| |_| | | |_| | /| |\ | __/ | |_| | /| |\ ")
print(" \_)-\___/ <<\___/ u |_|U |_| <<\___/ u |_|U ")
print(" \\ (__) )( _// \\_ ||>>_ (__) )( _// \\_ ")
print(" (__) (__) (__) (__) (__)__) (__) (__) (__) ")
print("\n presents \n")
print(" /$$$$$ /$$$$$$ /$$$$$$ /$$ /$$ /$$$$$$$ /$$$$$$ /$$$$$$$$")
print(" |__ $$ /$$__ $$ /$$__ $$| $$ /$$/| $$__ $$ /$$__ $$|__ $$__/")
print(" | $$| $$ \ $$| $$ \__/| $$ /$$/ | $$ \ $$| $$ \ $$ | $$ ")
print(" | $$| $$$$$$$$| $$ | $$$$$/ | $$$$$$$/| $$ | $$ | $$ ")
print(" /$$ | $$| $$__ $$| $$ | $$ $$ | $$____/ | $$ | $$ | $$ ")
print("| $$ | $$| $$ | $$| $$ $$| $$\ $$ | $$ | $$ | $$ | $$ ")
print("| $$$$$$/| $$ | $$| $$$$$$/| $$ \ $$| $$ | $$$$$$/ | $$ ")
print(" \______/ |__/ |__/ \______/ |__/ \__/|__/ \______/ |__/ ")
print("\n R O U N D \n")
print("\n with Amlan \n\n")
import random
print('Input your name :')
name = input()
print('Hi ' + name + ' ! \n')
print('Welcome you to the JACKPOT round \n\n')
while True:
print('You have to select a range between 0 to n')
print('Remember n must be greater than 99 \n')
print('Enter the value of n :')
n = input()
if int(n) > 99:
print('Now guess a number between 0 to ' + str(n)+ ' :')
guess = input()
m = random.randint(0,int(n))
if int(guess) == int(m):
print('Voila!! you hit the JACKPOT. Congrats :)')
else:
diff = int(m)-int(guess)
if int(diff) < 0:
print('OH NO! You just have to stop only before '+str(int(diff)*-1)+' steps to hit the JACKPOT')
else:
print('OH NO! You just have to go '+str(int(diff)*+1)+' more steps to hit the JACKPOT')
print('Right guess would be ' + str(m))
#REPLAY-----------------------------------------------------------------
print('Want to play again ? (press y)')
ans = input()
if ans == 'y':
continue
else:
break
else:
print("JACKPOT round will not work if n < 99")
| 47.426471
| 121
| 0.305426
| 223
| 3,225
| 3.426009
| 0.336323
| 0.209424
| 0.235602
| 0.209424
| 0.265707
| 0.204188
| 0.16623
| 0.058901
| 0
| 0
| 0
| 0.006738
| 0.447752
| 3,225
| 67
| 122
| 48.134328
| 0.422235
| 0.022016
| 0
| 0.072727
| 0
| 0.127273
| 0.600888
| 0.011104
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.018182
| 0
| 0.018182
| 0.672727
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
7f0faa5381c30abd10334b9296ec14c2418fbc2d
| 301
|
py
|
Python
|
bitmovin_api_sdk/encoding/manifests/dash/periods/adaptationsets/representations/mp4/drm/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 11
|
2019-07-03T10:41:16.000Z
|
2022-02-25T21:48:06.000Z
|
bitmovin_api_sdk/encoding/manifests/dash/periods/adaptationsets/representations/mp4/drm/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 8
|
2019-11-23T00:01:25.000Z
|
2021-04-29T12:30:31.000Z
|
bitmovin_api_sdk/encoding/manifests/dash/periods/adaptationsets/representations/mp4/drm/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 13
|
2020-01-02T14:58:18.000Z
|
2022-03-26T12:10:30.000Z
|
from bitmovin_api_sdk.encoding.manifests.dash.periods.adaptationsets.representations.mp4.drm.drm_api import DrmApi
from bitmovin_api_sdk.encoding.manifests.dash.periods.adaptationsets.representations.mp4.drm.dash_mp4_drm_representation_list_query_params import DashMp4DrmRepresentationListQueryParams
| 100.333333
| 185
| 0.913621
| 37
| 301
| 7.135135
| 0.513514
| 0.068182
| 0.113636
| 0.136364
| 0.613636
| 0.613636
| 0.613636
| 0.613636
| 0.613636
| 0.613636
| 0
| 0.013652
| 0.026578
| 301
| 2
| 186
| 150.5
| 0.887372
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6140a6f0755c4d8977a0d2065ff56d7cb56c3beb
| 118
|
py
|
Python
|
titan/tools_pkg/pudb/resources.py
|
mnieber/gen
|
65f8aa4fb671c4f90d5cbcb1a0e10290647a31d9
|
[
"MIT"
] | null | null | null |
titan/tools_pkg/pudb/resources.py
|
mnieber/gen
|
65f8aa4fb671c4f90d5cbcb1a0e10290647a31d9
|
[
"MIT"
] | null | null | null |
titan/tools_pkg/pudb/resources.py
|
mnieber/gen
|
65f8aa4fb671c4f90d5cbcb1a0e10290647a31d9
|
[
"MIT"
] | null | null | null |
from dataclasses import dataclass
from titan.project_pkg.service import Tool
@dataclass
class Pudb(Tool):
pass
| 13.111111
| 42
| 0.788136
| 16
| 118
| 5.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161017
| 118
| 8
| 43
| 14.75
| 0.929293
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
61437615c52f082503a4419bbb4174110894c3ec
| 45
|
py
|
Python
|
cuda_test.py
|
rupeshshrestha123/end2end-asr-pytorch-nepali
|
f9114c38b3c5a43043ec92a20b7dfb278929d19f
|
[
"MIT"
] | null | null | null |
cuda_test.py
|
rupeshshrestha123/end2end-asr-pytorch-nepali
|
f9114c38b3c5a43043ec92a20b7dfb278929d19f
|
[
"MIT"
] | null | null | null |
cuda_test.py
|
rupeshshrestha123/end2end-asr-pytorch-nepali
|
f9114c38b3c5a43043ec92a20b7dfb278929d19f
|
[
"MIT"
] | null | null | null |
import torch
print(torch.cuda.device_count())
| 22.5
| 32
| 0.822222
| 7
| 45
| 5.142857
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.044444
| 45
| 2
| 32
| 22.5
| 0.837209
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
6161b6f4c0801bb90ffe81539148d70e44baade9
| 6,616
|
py
|
Python
|
lsml/core/test/test_datasets_handler.py
|
sandeepdas05/lsm-crack-width
|
38460e514d48f3424bb8d3bd58cb3eb330153e64
|
[
"BSD-3-Clause"
] | 24
|
2020-01-30T15:53:33.000Z
|
2022-01-15T09:46:24.000Z
|
lsml/core/test/test_datasets_handler.py
|
sandeepdas05/lsm-crack-width
|
38460e514d48f3424bb8d3bd58cb3eb330153e64
|
[
"BSD-3-Clause"
] | null | null | null |
lsml/core/test/test_datasets_handler.py
|
sandeepdas05/lsm-crack-width
|
38460e514d48f3424bb8d3bd58cb3eb330153e64
|
[
"BSD-3-Clause"
] | 13
|
2019-12-05T08:32:11.000Z
|
2022-03-20T03:12:03.000Z
|
import os
import unittest
import numpy
import skfmm
from lsml.core.datasets_handler import DatasetsHandler
class TestDatasetsHandler(unittest.TestCase):
def setUp(self):
self.random_state = numpy.random.RandomState(1234)
def test_no_data_at_init(self):
with self.assertRaises(ValueError):
# If the provided h5 file doesn't exist, then data must be supplied
DatasetsHandler(h5_file='this file does not exist')
def test_data_size_mismatches(self):
n_examples = 3
n_dim = 3
# Create some fake image data
imgs = [
self.random_state.randn(
*self.random_state.randint(10, 41, size=n_dim))
for _ in range(n_examples)
]
# Create some fake segmentation data
segs = [
self.random_state.randn(
*self.random_state.randint(10, 41, size=n_dim)) > 0
for _ in range(n_examples)
]
h5_file = 'tmp.h5'
try:
with self.assertRaises(ValueError):
DatasetsHandler(h5_file=h5_file, imgs=imgs, segs=segs)
finally:
# If the test was successful and the exception was raised,
# then this file should not have been created; but, if the test
# failed, it may have been created and should be deleted.
if os.path.exists(h5_file):
os.remove(h5_file)
def test_data_wrong_img_dtype(self):
n_examples = 3
n_dim = 3
# Create some fake image data
imgs = [
self.random_state.randn(
*self.random_state.randint(10, 41, size=n_dim)) > 0
for _ in range(n_examples)
]
# Create some fake segmentation data
segs = [
imgs[i] > 0
for i in range(n_examples)
]
h5_file = 'tmp.h5'
try:
with self.assertRaises(TypeError):
DatasetsHandler(h5_file=h5_file, imgs=imgs, segs=segs)
finally:
if os.path.exists(h5_file):
os.remove(h5_file)
def test_data_wrong_seg_dtype(self):
n_examples = 3
n_dim = 3
# Create some fake image data
imgs = [
self.random_state.randn(
*self.random_state.randint(10, 41, size=n_dim))
for _ in range(n_examples)
]
# Create some fake segmentation data
segs = [
imgs[i]
for i in range(n_examples)
]
h5_file = 'tmp.h5'
try:
with self.assertRaises(TypeError):
DatasetsHandler(h5_file=h5_file, imgs=imgs, segs=segs)
finally:
if os.path.exists(h5_file):
os.remove(h5_file)
def test_wrong_img_ndim(self):
n_examples = 3
n_dim = 3
# Create some fake image data
imgs = [
self.random_state.randn(
*self.random_state.randint(10, 41, size=n_dim))
for _ in range(n_examples)
]
imgs[0] = self.random_state.randn(
*self.random_state.randint(10, 41, size=n_dim-1))
# Create some fake segmentation data
segs = [
imgs[i] > 0
for i in range(n_examples)
]
h5_file = 'tmp.h5'
try:
with self.assertRaises(ValueError):
DatasetsHandler(h5_file=h5_file, imgs=imgs, segs=segs)
finally:
if os.path.exists(h5_file):
os.remove(h5_file)
def test_wrong_seg_ndim(self):
n_examples = 3
n_dim = 3
# Create some fake image data
imgs = [
self.random_state.randn(
*self.random_state.randint(10, 41, size=n_dim))
for _ in range(n_examples)
]
# Create some fake segmentation data
segs = [
imgs[i] > 0
for i in range(n_examples)
]
segs[0] = numpy.ones((4,), dtype=numpy.bool)
h5_file = 'tmp.h5'
try:
with self.assertRaises(ValueError):
DatasetsHandler(h5_file=h5_file, imgs=imgs, segs=segs)
finally:
if os.path.exists(h5_file):
os.remove(h5_file)
def test_wrong_dx_shape(self):
n_examples = 3
n_dim = 3
# Create some fake image data
imgs = [
self.random_state.randn(
*self.random_state.randint(10, 41, size=n_dim))
for _ in range(n_examples)
]
# Create some fake segmentation data
segs = [
imgs[i] > 0
for i in range(n_examples)
]
h5_file = 'tmp.h5'
dx = self.random_state.rand(n_examples, n_dim+1)
try:
with self.assertRaises(ValueError):
DatasetsHandler(h5_file=h5_file, imgs=imgs, segs=segs, dx=dx)
finally:
if os.path.exists(h5_file):
os.remove(h5_file)
def test_convert_to_hdf5_valid(self):
n_examples = 3
n_dim = 3
# Create some fake image data
imgs = [
self.random_state.randn(
*self.random_state.randint(10, 41, size=n_dim))
for _ in range(n_examples)
]
# Create some fake segmentation data
segs = [
imgs[i] > 0
for i in range(n_examples)
]
h5_file = 'tmp.h5'
dx = self.random_state.rand(n_examples, n_dim)
try:
datasets_mgmt = DatasetsHandler(
h5_file=h5_file, imgs=imgs, segs=segs, dx=dx)
for example in datasets_mgmt.iterate_examples():
index = example.index
# Manually compute the distance transform
dist = skfmm.distance(2*segs[index].astype(float)-1,
dx=dx[index])
# Assert image integrity
self.assertLess(
numpy.linalg.norm(imgs[index] - example.img), 1e-8)
# Assert segmentation integrity
self.assertEqual(0, (segs[index] != example.seg).sum())
# Assert distance transform integrity
self.assertLess(
numpy.linalg.norm(dist - example.dist), 1e-8)
# Assert delta term integrity
self.assertLess(
numpy.linalg.norm(dx[index] - example.dx), 1e-8)
finally:
if os.path.exists(h5_file):
os.remove(h5_file)
| 27.114754
| 79
| 0.533706
| 792
| 6,616
| 4.284091
| 0.159091
| 0.065429
| 0.092838
| 0.066018
| 0.737106
| 0.737106
| 0.703507
| 0.703507
| 0.703507
| 0.703507
| 0
| 0.02929
| 0.380744
| 6,616
| 243
| 80
| 27.226337
| 0.798877
| 0.126663
| 0
| 0.701863
| 0
| 0
| 0.01147
| 0
| 0
| 0
| 0
| 0
| 0.068323
| 1
| 0.055901
| false
| 0
| 0.031056
| 0
| 0.093168
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
61a05ed5473d65aa40f015be222967e88e003bc5
| 126
|
py
|
Python
|
20_unit_testing/lectures/1_testing_equality/functions.py
|
gdia/The-Complete-Python-Course
|
ed375b65242249bc749c3e292a6149f8528b9dcf
|
[
"MIT"
] | 29
|
2019-09-02T21:15:59.000Z
|
2022-01-14T02:20:05.000Z
|
20_unit_testing/lectures/1_testing_equality/functions.py
|
gdia/The-Complete-Python-Course
|
ed375b65242249bc749c3e292a6149f8528b9dcf
|
[
"MIT"
] | 2
|
2020-08-20T05:48:36.000Z
|
2021-06-02T03:16:31.000Z
|
20_unit_testing/lectures/1_testing_equality/functions.py
|
gdia/The-Complete-Python-Course
|
ed375b65242249bc749c3e292a6149f8528b9dcf
|
[
"MIT"
] | 38
|
2019-10-20T14:29:12.000Z
|
2022-03-27T19:50:05.000Z
|
from typing import Union
def divide(dividend: Union[int, float], divisor: Union[int, float]):
return dividend / divisor
| 21
| 68
| 0.730159
| 17
| 126
| 5.411765
| 0.647059
| 0.173913
| 0.282609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 126
| 5
| 69
| 25.2
| 0.87619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 6
|
4ee7b6d3b7d85c654c05186bf0864f21a577b9fb
| 13,181
|
py
|
Python
|
tests/test_struct.py
|
pamwenlong/JceStruct
|
d2185c0b31df5eab381668e4ef6c226cf502a06f
|
[
"MIT"
] | 24
|
2021-01-05T02:31:33.000Z
|
2022-02-19T13:25:46.000Z
|
tests/test_struct.py
|
pamwenlong/JceStruct
|
d2185c0b31df5eab381668e4ef6c226cf502a06f
|
[
"MIT"
] | 2
|
2021-03-21T13:23:09.000Z
|
2021-06-15T13:01:56.000Z
|
tests/test_struct.py
|
pamwenlong/JceStruct
|
d2185c0b31df5eab381668e4ef6c226cf502a06f
|
[
"MIT"
] | 6
|
2021-06-07T09:38:46.000Z
|
2022-01-07T15:39:34.000Z
|
import unittest
from typing import List
from jce import JceField, JceStruct, types
class SsoServerInfo(JceStruct):
server: types.STRING = JceField(jce_id=1)
port: types.INT = JceField(jce_id=2)
location: types.STRING = JceField(jce_id=8)
extra: str
extra_default: str = "extra"
class ServerListResponse(JceStruct):
server_list: types.LIST[SsoServerInfo] = JceField(jce_id=2)
class TestStruct(unittest.TestCase):
def test_struct_encode(self):
byte = SsoServerInfo(
server="rcnb", port=8000, location="rcnb", extra="xxx"
).encode()
self.assertEqual(
byte, bytes.fromhex("16 04 72 63 6e 62 21 1f 40 86 04 72 63 6e 62")
)
def test_struct_decode(self):
a = SsoServerInfo.decode(
bytes.fromhex("16 04 72 63 6e 62 21 1f 40 86 04 72 63 6e 62"),
extra="xxx",
)
b = SsoServerInfo(
server="rcnb", port=8000, location="rcnb", extra="xxx"
)
self.assertEqual(a, b)
def test_struct_nested_encode(self):
raw = ServerListResponse.parse_obj(
{
"server_list": [
{
"server": "193.112.231.60",
"port": 8080,
"location": "sz",
"extra": "xxx",
},
{
"server": "42.81.172.215",
"port": 8080,
"location": "tj",
"extra": "xxx",
},
{
"server": "14.22.3.114",
"port": 8080,
"location": "sz",
"extra": "xxx",
},
{
"server": "14.215.138.110",
"port": 443,
"location": "sz",
"extra": "xxx",
},
{
"server": "42.81.169.100",
"port": 80,
"location": "tj",
"extra": "xxx",
},
{
"server": "114.221.144.76",
"port": 14000,
"location": "sh",
"extra": "xxx",
},
{
"server": "113.96.12.224",
"port": 443,
"location": "sz",
"extra": "xxx",
},
{
"server": "42.81.170.122",
"port": 8080,
"location": "tj",
"extra": "xxx",
},
{
"server": "114.221.148.67",
"port": 80,
"location": "sh",
"extra": "xxx",
},
{
"server": "msfwifi.3g.qq.com",
"port": 8080,
"location": "others",
"extra": "xxx",
},
{
"server": "42.81.172.63",
"port": 80,
"location": "tj",
"extra": "xxx",
},
]
}
)
encoded = bytes.fromhex(
"29000B0A160E3139332E3131322E3233312E3630211F908602737A0B"
"0A160D34322E38312E3137322E323135211F908602746A0B0A160B31"
"342E32322E332E313134211F908602737A0B0A160E31342E3231352E"
"3133382E3131302101BB8602737A0B0A160D34322E38312E3136392E"
"31303020508602746A0B0A160E3131342E3232312E3134342E373621"
"36B0860273680B0A160D3131332E39362E31322E3232342101BB8602"
"737A0B0A160D34322E38312E3137302E313232211F908602746A0B0A"
"160E3131342E3232312E3134382E36372050860273680B0A16116D73"
"66776966692E33672E71712E636F6D211F9086066F74686572730B0A"
"160C34322E38312E3137322E363320508602746A0B"
)
self.assertEqual(raw.encode(), encoded)
def test_struct_nested_decode(self):
raw = ServerListResponse.parse_obj(
{
"server_list": [
{
"server": "193.112.231.60",
"port": 8080,
"location": "sz",
"extra": "xxx",
},
{
"server": "42.81.172.215",
"port": 8080,
"location": "tj",
"extra": "xxx",
},
{
"server": "14.22.3.114",
"port": 8080,
"location": "sz",
"extra": "xxx",
},
{
"server": "14.215.138.110",
"port": 443,
"location": "sz",
"extra": "xxx",
},
{
"server": "42.81.169.100",
"port": 80,
"location": "tj",
"extra": "xxx",
},
{
"server": "114.221.144.76",
"port": 14000,
"location": "sh",
"extra": "xxx",
},
{
"server": "113.96.12.224",
"port": 443,
"location": "sz",
"extra": "xxx",
},
{
"server": "42.81.170.122",
"port": 8080,
"location": "tj",
"extra": "xxx",
},
{
"server": "114.221.148.67",
"port": 80,
"location": "sh",
"extra": "xxx",
},
{
"server": "msfwifi.3g.qq.com",
"port": 8080,
"location": "others",
"extra": "xxx",
},
{
"server": "42.81.172.63",
"port": 80,
"location": "tj",
"extra": "xxx",
},
]
}
)
encoded = bytes.fromhex(
"100129000B0A160E3139332E3131322E3233312E3630211F9030014C"
"5C600870018602737A96066F74686572730B0A160D34322E38312E31"
"37322E323135211F9030014C5C600870018602746A960374656C0B0A"
"160B31342E32322E332E313134211F9030014C5C600870018602737A"
"960374656C0B0A160E31342E3231352E3133382E3131302101BB3001"
"4C5C600870018602737A960374656C0B0A160D34322E38312E313639"
"2E313030205030014C5C600870018602746A960374656C0B0A160E31"
"31342E3232312E3134342E37362136B030014C5C6008700186027368"
"960374656C0B0A160D3131332E39362E31322E3232342101BB30014C"
"5C600870018602737A960374656C0B0A160D34322E38312E3137302E"
"313232211F9030014C5C600870018602746A960374656C0B0A160E31"
"31342E3232312E3134382E3637205030014C5C600870018602736896"
"0374656C0B0A16116D7366776966692E33672E71712E636F6D211F90"
"30014C5C60087C86066F746865727396066F74686572730B0A160C34"
"322E38312E3137322E3633205030014C5C600870018602746A960374"
"656C0B39000B0A160E3139332E3131322E3233312E3630211F903001"
"4C5C600870018602737A96066F74686572730B0A160D34322E38312E"
"3137322E323135211F9030014C5C600870018602746A960374656C0B"
"0A160B31342E32322E332E313134211F9030014C5C60087001860273"
"7A960374656C0B0A160E31342E3231352E3133382E3131302101BB30"
"014C5C600870018602737A960374656C0B0A160D34322E38312E3136"
"392E313030205030014C5C600870018602746A960374656C0B0A160E"
"3131342E3232312E3134342E37362136B030014C5C60087001860273"
"68960374656C0B0A160D3131332E39362E31322E3232342101BB3001"
"4C5C600870018602737A960374656C0B0A160D34322E38312E313730"
"2E313232211F9030014C5C600870018602746A960374656C0B0A160E"
"3131342E3232312E3134382E3637205030014C5C6008700186027368"
"960374656C0B0A16116D7366776966692E33672E71712E636F6D211F"
"9030014C5C60087C86066F746865727396066F74686572730B0A160C"
"34322E38312E3137322E3633205030014C5C600870018602746A9603"
"74656C0B425FE636545138406C7C80029005ACBCC900050A160E3130"
"392E3234342E3132392E3135205030014C500360087C8602737A9606"
"6F74686572730B0A160D34322E38312E3136392E313035205030014C"
"500360087C8602746A960374656C0B0A160C3131332E39362E31332E"
"3434205030014C500360087C8602737A960374656C0B0A160E313134"
"2E3232312E3134342E3232205030014C500360087C86027368960374"
"656C0B0A160D34322E38312E3136392E313035205030014C50036008"
"7C8602746A960374656C0BD900050A160E3130392E3234342E313239"
"2E3135205030014C500360087C8602737A96066F74686572730B0A16"
"0D34322E38312E3136392E313035205030014C500360087C8602746A"
"960374656C0B0A160C3131332E39362E31332E3434205030014C5003"
"60087C8602737A960374656C0B0A160E3131342E3232312E3134342E"
"3232205030014C500360087C86027368960374656C0B0A160D34322E"
"38312E3136392E313035205030014C500360087C8602746A96037465"
"6C0BED000CF90F0CF9100CF9110CF01202F113FF38F6142832303230"
"2D31322D32352032323A35383A32382064656C6976657279696E6720"
"6120706F6C696379"
)
self.assertEqual(ServerListResponse.decode(encoded, extra="xxx"), raw)
def test_sruct_list_decode(self):
encoded = bytes.fromhex(
"100129000B0A160E3139332E3131322E3233312E3630211F9030014C5C600870018602737A96066F74686572730B0A160D34322E38312E3137322E323135211F9030014C5C600870018602746A960374656C0B0A160B31342E32322E332E313134211F9030014C5C600870018602737A960374656C0B0A160E31342E3231352E3133382E3131302101BB30014C5C600870018602737A960374656C0B0A160D34322E38312E3136392E313030205030014C5C600870018602746A960374656C0B0A160E3131342E3232312E3134342E37362136B030014C5C6008700186027368960374656C0B0A160D3131332E39362E31322E3232342101BB30014C5C600870018602737A960374656C0B0A160D34322E38312E3137302E313232211F9030014C5C600870018602746A960374656C0B0A160E3131342E3232312E3134382E3637205030014C5C6008700186027368960374656C0B0A16116D7366776966692E33672E71712E636F6D211F9030014C5C60087C86066F746865727396066F74686572730B0A160C34322E38312E3137322E3633205030014C5C600870018602746A960374656C0B39000B0A160E3139332E3131322E3233312E3630211F9030014C5C600870018602737A96066F74686572730B0A160D34322E38312E3137322E323135211F9030014C5C600870018602746A960374656C0B0A160B31342E32322E332E313134211F9030014C5C600870018602737A960374656C0B0A160E31342E3231352E3133382E3131302101BB30014C5C600870018602737A960374656C0B0A160D34322E38312E3136392E313030205030014C5C600870018602746A960374656C0B0A160E3131342E3232312E3134342E37362136B030014C5C6008700186027368960374656C0B0A160D3131332E39362E31322E3232342101BB30014C5C600870018602737A960374656C0B0A160D34322E38312E3137302E313232211F9030014C5C600870018602746A960374656C0B0A160E3131342E3232312E3134382E3637205030014C5C6008700186027368960374656C0B0A16116D7366776966692E33672E71712E636F6D211F9030014C5C60087C86066F746865727396066F74686572730B0A160C34322E38312E3137322E3633205030014C5C600870018602746A960374656C0B425FE636545138406C7C80029005ACBCC900050A160E3130392E3234342E3132392E3135205030014C500360087C8602737A96066F74686572730B0A160D34322E38312E3136392E313035205030014C500360087C8602746A960374656C0B0A160C3131332E39362E31332E3434205030014C500360087C8602737A960374656C0B0A160E3131342E3232312E3134342E3232205030014C500360087C86027368960374656C0B0A160D34322E38312E3136392E313035205030014C500360087C8602746A960374656C0BD900050A160E3130392E3234342E3132392E3135205030014C500360087C8602737A96066F74686572730B0A160D34322E38312E3136392E313035205030014C500360087C8602746A960374656C0B0A160C3131332E39362E31332E3434205030014C500360087C8602737A960374656C0B0A160E3131342E3232312E3134342E3232205030014C500360087C86027368960374656C0B0A160D34322E38312E3136392E313035205030014C500360087C8602746A960374656C0BED000CF90F0CF9100CF9110CF01202F113FF38F61428323032302D31322D32352032323A35383A32382064656C6976657279696E67206120706F6C696379"
)
self.assertEqual(
len(SsoServerInfo.decode_list(encoded, 2, extra="xxx")), 11
)
if __name__ == "__main__":
unittest.main()
| 50.696154
| 2,606
| 0.584781
| 503
| 13,181
| 15.260437
| 0.316103
| 0.02814
| 0.036477
| 0.01876
| 0.167535
| 0.161282
| 0.161282
| 0.161282
| 0.161282
| 0.147994
| 0
| 0.606787
| 0.344966
| 13,181
| 259
| 2,607
| 50.891892
| 0.282256
| 0
| 0
| 0.402439
| 0
| 0
| 0.516198
| 0.433503
| 0
| 1
| 0
| 0
| 0.020325
| 1
| 0.020325
| false
| 0
| 0.012195
| 0
| 0.069106
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f6255b6300e9c51297f4963e202b90bb8ca1cdea
| 171
|
py
|
Python
|
singer_db/markdown_table_parser/__init__.py
|
aaronsteers/singer-index
|
84fdfa2c8d132ed22d8e250e037a6159a50f92dd
|
[
"MIT"
] | 7
|
2020-08-28T20:08:02.000Z
|
2020-10-07T08:24:09.000Z
|
singer_db/markdown_table_parser/__init__.py
|
aaronsteers/singer-index
|
84fdfa2c8d132ed22d8e250e037a6159a50f92dd
|
[
"MIT"
] | null | null | null |
singer_db/markdown_table_parser/__init__.py
|
aaronsteers/singer-index
|
84fdfa2c8d132ed22d8e250e037a6159a50f92dd
|
[
"MIT"
] | null | null | null |
"""A simple markdown table parser."""
from singer_db.markdown_table_parser.main import parse_from_string, parse_from_file
__all__ = [parse_from_string, parse_from_file]
| 28.5
| 83
| 0.824561
| 26
| 171
| 4.846154
| 0.538462
| 0.285714
| 0.301587
| 0.31746
| 0.444444
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093567
| 171
| 5
| 84
| 34.2
| 0.812903
| 0.181287
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
f62faa5e2156701b257592c1d65016e3301d8437
| 244
|
py
|
Python
|
app/public/__init__.py
|
mbugerald/MTN-MobileMoney-Python
|
ff3194e9c4e8c512c7a426c0f29ee3d2da054362
|
[
"MIT"
] | null | null | null |
app/public/__init__.py
|
mbugerald/MTN-MobileMoney-Python
|
ff3194e9c4e8c512c7a426c0f29ee3d2da054362
|
[
"MIT"
] | null | null | null |
app/public/__init__.py
|
mbugerald/MTN-MobileMoney-Python
|
ff3194e9c4e8c512c7a426c0f29ee3d2da054362
|
[
"MIT"
] | null | null | null |
# Lib imports
from flask import Blueprint
from .views.globals import GlobalViews
# Init blueprint
public_blueprint = Blueprint('public_blueprint', __name__, url_prefix='/')
# Register views to blueprint
GlobalViews.register(public_blueprint)
| 24.4
| 74
| 0.811475
| 29
| 244
| 6.551724
| 0.551724
| 0.236842
| 0.252632
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110656
| 244
| 9
| 75
| 27.111111
| 0.875576
| 0.221311
| 0
| 0
| 0
| 0
| 0.091398
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0.75
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
f652f30e1ae521cfff77051510050aab6089ec68
| 1,482
|
py
|
Python
|
data/train/python/f652f30e1ae521cfff77051510050aab6089ec68NaoBroker.py
|
harshp8l/deep-learning-lang-detection
|
2a54293181c1c2b1a2b840ddee4d4d80177efb33
|
[
"MIT"
] | 84
|
2017-10-25T15:49:21.000Z
|
2021-11-28T21:25:54.000Z
|
data/train/python/f652f30e1ae521cfff77051510050aab6089ec68NaoBroker.py
|
vassalos/deep-learning-lang-detection
|
cbb00b3e81bed3a64553f9c6aa6138b2511e544e
|
[
"MIT"
] | 5
|
2018-03-29T11:50:46.000Z
|
2021-04-26T13:33:18.000Z
|
data/train/python/f652f30e1ae521cfff77051510050aab6089ec68NaoBroker.py
|
vassalos/deep-learning-lang-detection
|
cbb00b3e81bed3a64553f9c6aa6138b2511e544e
|
[
"MIT"
] | 24
|
2017-11-22T08:31:00.000Z
|
2022-03-27T01:22:31.000Z
|
import roslib;
roslib.load_manifest( 'nao_core' );
import rospy;
import sys;
from naoqi import ALBroker;
class NaoBroker( object ):
"""
NaoBroker
"""
BROKER_IP = '0.0.0.0';
BROKER_PORT = 0;
def __init__( self, parentIp, parentPort ):
self.parentIp = parentIp;
self.parentPort = parentPort;
# Create a broker.
try:
self.broker = ALBroker( 'nao_utils_broker_{id}'.format( id = id( self ) ), self.BROKER_IP, self.BROKER_PORT, self.parentIp, self.parentPort );
except RuntimeError, e:
rospy.logerr( 'Unable to create a proxy to the NaoQi broker on {parentIp}:{parentPort}.'.format( parentIp = self.parentIp, parentPort = self.parentPort ) );
sys.exit( 1 );
'''
instance = None;
def __init__( self, parentIp, parentPort ):
if( NaoBroker.instance == None ):
self.parentIp = parentIp;
self.parentPort = parentPort;
# Create a broker.
try:
self.broker = ALBroker( 'nao_utils_broker_{id}'.format( id = id( self ) ), self.BROKER_IP, self.BROKER_PORT, self.parentIp, self.parentPort );
except RuntimeError, e:
rospy.logerr( 'Unable to create a proxy to the NaoQi broker on {parentIp}:{parentPort}.'.format( parentIp = self.parentIp, parentPort = self.parentPort ) );
sys.exit( 1 );
NaoBroker.instance = self;
'''
| 34.465116
| 172
| 0.591093
| 163
| 1,482
| 5.239264
| 0.263804
| 0.112412
| 0.103045
| 0.091335
| 0.779859
| 0.711944
| 0.711944
| 0.711944
| 0.711944
| 0.711944
| 0
| 0.006686
| 0.293522
| 1,482
| 43
| 173
| 34.465116
| 0.808978
| 0.010796
| 0
| 0
| 0
| 0
| 0.148966
| 0.062069
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.25
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f65e723772d5e03d2da8e9fcd15739bb5903035d
| 98
|
py
|
Python
|
tests/backend/test_parsers.py
|
UHH-CIW-Project2020/SMARTSexplore
|
3969e33ab0c52ab6da3ed1792696c049786d71ca
|
[
"MIT"
] | 1
|
2021-04-07T12:38:15.000Z
|
2021-04-07T12:38:15.000Z
|
tests/backend/test_parsers.py
|
UHH-CIW-Project2020/SMARTSexplore
|
3969e33ab0c52ab6da3ed1792696c049786d71ca
|
[
"MIT"
] | null | null | null |
tests/backend/test_parsers.py
|
UHH-CIW-Project2020/SMARTSexplore
|
3969e33ab0c52ab6da3ed1792696c049786d71ca
|
[
"MIT"
] | 2
|
2021-03-28T14:40:46.000Z
|
2021-04-09T09:04:39.000Z
|
from smartsexplore.parsers import parse_smartscompare
#def test_parse_smartscompare(session):
| 24.5
| 54
| 0.846939
| 11
| 98
| 7.272727
| 0.818182
| 0.45
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102041
| 98
| 3
| 55
| 32.666667
| 0.909091
| 0.387755
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9cf2df40c34d7654d2e18cbf5c4fab92dd18b947
| 110
|
py
|
Python
|
src/subscription/exceptions.py
|
nffdiogosilva/subscription
|
13f22c275a777a1a0e42a8cd8b1221be2c3ff94b
|
[
"MIT"
] | null | null | null |
src/subscription/exceptions.py
|
nffdiogosilva/subscription
|
13f22c275a777a1a0e42a8cd8b1221be2c3ff94b
|
[
"MIT"
] | 89
|
2019-09-07T13:36:38.000Z
|
2021-06-02T00:32:06.000Z
|
src/subscription/exceptions.py
|
nffdiogosilva/subscription
|
13f22c275a777a1a0e42a8cd8b1221be2c3ff94b
|
[
"MIT"
] | null | null | null |
class CustomerAddWebsitePermissionDenied(Exception):
pass
class ObjectDoesNotExist(Exception):
pass
| 15.714286
| 52
| 0.8
| 8
| 110
| 11
| 0.625
| 0.295455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145455
| 110
| 6
| 53
| 18.333333
| 0.93617
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
140685ac052e2de3d3d0b797ef46d75475ba4c8b
| 330
|
py
|
Python
|
datamontagweb/root/views.py
|
selintunr/datamongit
|
35f4f7a1ab7e6e2c66b956992e6d91cd68885a38
|
[
"MIT"
] | null | null | null |
datamontagweb/root/views.py
|
selintunr/datamongit
|
35f4f7a1ab7e6e2c66b956992e6d91cd68885a38
|
[
"MIT"
] | null | null | null |
datamontagweb/root/views.py
|
selintunr/datamongit
|
35f4f7a1ab7e6e2c66b956992e6d91cd68885a38
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponseRedirect
import requests
# Create your views here.
def index(request):
return render(request, 'index.html')
def indexen(request):
return render(request, 'index-en.html')
def member(request):
return render(request, 'member.html')
| 20.625
| 49
| 0.724242
| 41
| 330
| 5.829268
| 0.487805
| 0.16318
| 0.238494
| 0.32636
| 0.259414
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178788
| 330
| 16
| 49
| 20.625
| 0.881919
| 0.069697
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
145c7a06a10ec1db0084bbef5f086b10db86af53
| 51
|
py
|
Python
|
modules/OCR/__init__.py
|
earlybackhome/You-cannot-guess
|
8674b9c089321835205c75cbc2b36ca4fd9a5b80
|
[
"MIT"
] | 21
|
2017-05-25T09:03:23.000Z
|
2021-12-27T13:02:52.000Z
|
modules/OCR/__init__.py
|
earlybackhome/You-cannot-guess
|
8674b9c089321835205c75cbc2b36ca4fd9a5b80
|
[
"MIT"
] | null | null | null |
modules/OCR/__init__.py
|
earlybackhome/You-cannot-guess
|
8674b9c089321835205c75cbc2b36ca4fd9a5b80
|
[
"MIT"
] | 4
|
2017-06-19T13:50:54.000Z
|
2020-12-12T15:51:37.000Z
|
print('successfully from %s imported'%__package__)
| 25.5
| 50
| 0.803922
| 6
| 51
| 6.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078431
| 51
| 1
| 51
| 51
| 0.787234
| 0
| 0
| 0
| 0
| 0
| 0.568627
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.