hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
c83c6a77a92db084f0d2352d15bb41b2909dab5e
19
py
Python
Programming Languages/Python/Theory/100_Python_Exercises/Exercises/Exercise 93/subdirs/level12/level121/6.py
jaswinder9051998/Resources
fd468af37bf24ca57555d153ee64693c018e822e
[ "MIT" ]
101
2021-12-20T11:57:11.000Z
2022-03-23T09:49:13.000Z
Programming Languages/Python/Theory/100_Python_Exercises/Exercises/Exercise 93/subdirs/level12/level121/6.py
jaswinder9051998/Resources
fd468af37bf24ca57555d153ee64693c018e822e
[ "MIT" ]
4
2022-01-12T11:55:56.000Z
2022-02-12T04:53:33.000Z
Programming Languages/Python/Theory/100_Python_Exercises/Exercises/Exercise 93/subdirs/level12/level121/6.py
jaswinder9051998/Resources
fd468af37bf24ca57555d153ee64693c018e822e
[ "MIT" ]
38
2022-01-12T11:56:16.000Z
2022-03-23T10:07:52.000Z
print("Hello User")
19
19
0.736842
3
19
4.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.052632
19
1
19
19
0.777778
0
0
0
0
0
0.5
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
c856212b1e5a00f4b0faf37002122aee01c6580b
27
py
Python
version.py
SlyryD/OoT-Randomizer
91d5c46ad9c57d2bd6dc9cf568923073686d7ac4
[ "MIT" ]
null
null
null
version.py
SlyryD/OoT-Randomizer
91d5c46ad9c57d2bd6dc9cf568923073686d7ac4
[ "MIT" ]
null
null
null
version.py
SlyryD/OoT-Randomizer
91d5c46ad9c57d2bd6dc9cf568923073686d7ac4
[ "MIT" ]
null
null
null
__version__ = '6.0.36 R-1'
13.5
26
0.62963
6
27
2.166667
1
0
0
0
0
0
0
0
0
0
0
0.217391
0.148148
27
1
27
27
0.347826
0
0
0
0
0
0.37037
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
c083089233dcc5ea1de67a9a22c1029d107622d2
121
py
Python
coverage_comment/log.py
OptimeeringAS/python-coverage-comment-action
8ad51106c29cf72cea966495db6234974b611c3f
[ "MIT" ]
2
2022-02-04T09:22:47.000Z
2022-03-13T21:34:17.000Z
coverage_comment/log.py
OptimeeringAS/python-coverage-comment-action
8ad51106c29cf72cea966495db6234974b611c3f
[ "MIT" ]
4
2022-01-30T21:46:43.000Z
2022-03-28T20:32:11.000Z
coverage_comment/log.py
OptimeeringAS/python-coverage-comment-action
8ad51106c29cf72cea966495db6234974b611c3f
[ "MIT" ]
1
2022-03-18T14:44:37.000Z
2022-03-18T14:44:37.000Z
import logging logger = logging.getLogger("coverage_comment") def __getattr__(name): return getattr(logger, name)
15.125
46
0.760331
14
121
6.214286
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.140496
121
7
47
17.285714
0.836538
0
0
0
0
0
0.132231
0
0
0
0
0
0
1
0.25
false
0
0.25
0.25
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
c084930396e29ede4e7f9c57d0553466a6327155
275
py
Python
dashboard/urls.py
kendricktan/laice
fa61218504938710f180131dfa9d2cdafa9ca9d2
[ "MIT" ]
195
2016-10-16T06:17:35.000Z
2020-12-27T18:50:08.000Z
dashboard/urls.py
Jason-Cooke/laice
fa61218504938710f180131dfa9d2cdafa9ca9d2
[ "MIT" ]
12
2016-12-21T06:39:18.000Z
2021-06-10T19:53:07.000Z
dashboard/urls.py
Jason-Cooke/laice
fa61218504938710f180131dfa9d2cdafa9ca9d2
[ "MIT" ]
33
2016-10-24T09:30:09.000Z
2019-08-28T06:46:44.000Z
from django.conf.urls import url from django.conf import settings from django.conf.urls.static import static from . import views urlpatterns = [ url(r'^$', views.dashboard_view, name='dashboard_view'), ] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
30.555556
67
0.778182
39
275
5.358974
0.410256
0.143541
0.200957
0.172249
0
0
0
0
0
0
0
0
0.112727
275
9
67
30.555556
0.856557
0
0
0
0
0
0.057971
0
0
0
0
0
0
1
0
false
0
0.571429
0
0.571429
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
c0c42705f3bbe8d8c6c20df6bca269e2213f8786
17,491
py
Python
analyze/maps_plot.py
kajoel/Simulating_quantum_systems_on_an_emulated_quantum_computer
f78d0880e1a75a147ac4d858a01edb9bea5eb625
[ "Apache-2.0" ]
1
2021-01-28T13:29:47.000Z
2021-01-28T13:29:47.000Z
analyze/maps_plot.py
kajoel/Simulating_quantum_systems_on_an_emulated_quantum_computer
f78d0880e1a75a147ac4d858a01edb9bea5eb625
[ "Apache-2.0" ]
null
null
null
analyze/maps_plot.py
kajoel/Simulating_quantum_systems_on_an_emulated_quantum_computer
f78d0880e1a75a147ac4d858a01edb9bea5eb625
[ "Apache-2.0" ]
null
null
null
""" Plotting maps from maps.py @author = Joel """ import itertools import numpy as np from matplotlib import cm import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D # noqa: F401 unused import from core import maps def ball_cube_2d(map, mapi) -> plt.Figure: """ Plots map from ball to cube and its inverse. (Colorful) :param map: Map from ball to cube. :param mapi: Map from cube to ball :return: Figure. """ # Init plot object: fig, ax = plt.subplots(2, 2) # ### Ball to cube ### num_grid = 9 n = 8 * (num_grid - 1) + 1 grid_idx = np.linspace(0, n - 1, num_grid, dtype=int) r_line = np.linspace(0, 1, n) t_line = np.linspace(0, 2*np.pi, n) r_ball, t_ball = np.meshgrid(r_line, t_line, indexing='ij') color = r_ball # Map x_cube_from_ball = np.zeros(r_ball.shape) y_cube_from_ball = np.zeros(r_ball.shape) for i, j in itertools.product(range(r_line.size), range(t_line.size)): x_cube_from_ball[i, j], y_cube_from_ball[i, j] = map( np.array([r_line[i]*np.cos(t_line[j]), r_line[i]*np.sin(t_line[j])]) ) # Plot style_r = ['--', '-', '-', '-'] style_t = [':', '--', '-.', '-'] ax[0, 0].pcolormesh(r_ball*np.cos(t_ball), r_ball*np.sin(t_ball), color, cmap='viridis') ax[0, 1].pcolormesh(x_cube_from_ball, y_cube_from_ball, color, cmap='viridis') for k, t_idx in enumerate(grid_idx): ax[0, 0].plot(r_ball[t_idx, :]*np.cos(t_ball[t_idx, :]), r_ball[t_idx, :]*np.sin(t_ball[t_idx, :]), color='r', linewidth=1, linestyle=style_t[divmult(k)]) ax[0, 1].plot(x_cube_from_ball[t_idx, :], y_cube_from_ball[t_idx, :], color='r', linewidth=1, linestyle=style_t[divmult(k)]) for k, r_idx in enumerate(grid_idx): ax[0, 0].plot(r_ball[:, r_idx] * np.cos(t_ball[:, r_idx]), r_ball[:, r_idx] * np.sin(t_ball[:, r_idx]), color='r', linewidth=1, linestyle=style_r[divmult(k)]) ax[0, 1].plot(x_cube_from_ball[:, r_idx], y_cube_from_ball[:, r_idx], color='r', linewidth=1, linestyle=style_r[divmult(k)]) ax[0, 0].axis('equal') ax[0, 0].axis(1.1*np.array([-1, 1, -1, 1])) ax[0, 1].axis('equal') ax[0, 1].axis(1.1 * np.array([-1, 1, -1, 1])) # ### Cube to ball ### num_grid = 9 n = 8*(num_grid-1) + 1 grid_idx = np.linspace(0, n - 1, num_grid, dtype=int) x_line = np.linspace(-1, 1, n) y_line = np.linspace(-1, 1, n) x_cube, y_cube = np.meshgrid(x_line, y_line, indexing='ij') color = -(x_cube**2 + y_cube**2) # Map x_ball_from_cube = np.zeros(x_cube.shape) y_ball_from_cube = np.zeros(y_cube.shape) for i, j in itertools.product(range(x_line.size), range(y_line.size)): x_ball_from_cube[i, j], y_ball_from_cube[i, j] = mapi( np.array([x_line[i], y_line[j]]) ) # Plot style_x = [':', '--', '-.', '-'] style_y = [':', '--', '-.', '-'] ax[1, 0].pcolormesh(x_cube, y_cube, color, cmap='plasma') ax[1, 1].pcolormesh(x_ball_from_cube, y_ball_from_cube, color, cmap='plasma') for k, x_idx in enumerate(grid_idx): ax[1, 0].plot(x_cube[x_idx, :], y_cube[x_idx, :], color='g', linestyle=style_x[divmult(k)]) ax[1, 1].plot(x_ball_from_cube[x_idx, :], y_ball_from_cube[x_idx, :], color='g', linestyle=style_x[divmult(k)]) for k, y_idx in enumerate(grid_idx): ax[1, 0].plot(x_cube[:, y_idx], y_cube[:, y_idx], color='g', linestyle=style_y[divmult(k)]) ax[1, 1].plot(x_ball_from_cube[:, y_idx], y_ball_from_cube[:, y_idx], color='g', linestyle=style_y[divmult(k)]) ax[1, 0].axis('equal') ax[1, 0].axis(1.1 * np.array([-1, 1, -1, 1])) ax[1, 1].axis('equal') ax[1, 1].axis(1.1 * np.array([-1, 1, -1, 1])) return fig def ball_cube_2d_2(map, mapi) -> plt.Figure: """ Plots map from ball to cube and its inverse. (Not colorful) :param map: Map from ball to cube. :param mapi: Map from cube to ball :return: Figure. """ # Init plot object: fig = plt.figure() ax = np.array([[fig.add_subplot(2, 2, 1), fig.add_subplot(2, 2, 2)], [fig.add_subplot(2, 2, 3), fig.add_subplot(2, 2, 4)]]) # ### Ball to cube ### grids = 17 r_line = np.linspace(0, 1, grids) t_line = np.linspace(0, 2 * np.pi, grids) r_ball, t_ball = np.meshgrid(r_line, t_line, indexing='ij') # Map x_cube_from_ball = np.zeros(r_ball.shape) y_cube_from_ball = np.zeros(r_ball.shape) for i, j in itertools.product(range(r_line.size), range(t_line.size)): x_cube_from_ball[i, j], y_cube_from_ball[i, j] = map( np.array( [r_line[i] * np.cos(t_line[j]), r_line[i] * np.sin(t_line[j])]) ) # Plot ax[0, 0].pcolormesh(r_ball * np.cos(t_ball), r_ball * np.sin(t_ball), np.ones(r_ball.shape), cmap=cm.binary, edgecolors='C0') ax[0, 1].pcolormesh(x_cube_from_ball, y_cube_from_ball, np.ones(r_ball.shape), cmap=cm.binary, edgecolors='C0') ax[0, 0].axis('equal') ax[0, 0].axis(1.1 * np.array([-1, 1, -1, 1])) ax[0, 1].axis('equal') ax[0, 1].axis(1.1 * np.array([-1, 1, -1, 1])) # ### Cube to ball ### grids = 17 x_line = np.linspace(-1, 1, grids) y_line = np.linspace(-1, 1, grids) x_cube, y_cube = np.meshgrid(x_line, y_line, indexing='ij') # Map x_ball_from_cube = np.zeros(r_ball.shape) y_ball_from_cube = np.zeros(r_ball.shape) for i, j in itertools.product(range(x_line.size), range(y_line.size)): x_ball_from_cube[i, j], y_ball_from_cube[i, j] = mapi( np.array([x_cube[i, j], y_cube[i, j]]) ) # Plot ax[1, 0].pcolormesh(x_cube, y_cube, np.ones(x_cube.shape), cmap=cm.binary, edgecolors='C0') ax[1, 1].pcolormesh(x_ball_from_cube, y_ball_from_cube, np.ones(r_ball.shape), cmap=cm.binary, edgecolors='C0') ax[1, 0].axis('equal') ax[1, 0].axis(1.1 * np.array([-1, 1, -1, 1])) ax[1, 1].axis('equal') ax[1, 1].axis(1.1 * np.array([-1, 1, -1, 1])) return fig def sphere_ball(map, mapi) -> plt.Figure: """ Plots map from (3d) sphere to (2d) ball and its inverse. :param map: Map from sphere to ball. :param mapi: Map from ball to sphere. :return: Figure. """ # Init plot object: fig = plt.figure() ax = np.array([[fig.add_subplot(2, 2, 1, projection='3d'), fig.add_subplot(2, 2, 2)], [fig.add_subplot(2, 2, 3), fig.add_subplot(2, 2, 4, projection='3d')]]) # ### Sphere to ball ### grids = 16 + 1 # num_grid = 9 # m = 8 # n = m * (num_grid - 1) + 1 # grid_idx = np.linspace(0, n - 1, num_grid, dtype=int) # grid_2d = np.ix_(grid_idx, grid_idx) # TODO: idea for grid on surface t_line = np.linspace(1e-2, np.pi, grids) # TODO: change grids to n p_line = np.linspace(0, 2 * np.pi, grids) # change grids to n t_sphere, p_sphere = np.meshgrid(t_line, p_line, indexing='ij') # color = (np.pi-t_sphere) # Color map for surface # color = color/np.max(color)+0.2 # Map x_ball_from_sphere = np.zeros(t_sphere.shape) y_ball_from_sphere = np.zeros(t_sphere.shape) for i, j in itertools.product(range(t_line.size), range(p_line.size)): x_ball_from_sphere[i, j], y_ball_from_sphere[i, j] = map( np.array( [np.sin(t_line[i]) * np.cos(p_line[j]), np.sin(t_line[i]) * np.sin(p_line[j]), np.cos(t_line[i])]) ) # Plot # style_t = ['--', '-', '-', '-'] # Gridstyles # style_p = [':', '--', '-.', '-'] # cmap = cm.ScalarMappable(cmap='viridis') # Colomap # cmap.set_array(color) # cmap.autoscale() # Surface plot # ax[0, 0].plot_surface(np.sin(t_sphere) * np.cos(p_sphere), # np.sin(t_sphere) * np.sin(p_sphere), # np.cos(t_sphere), # linewidth=10, rcount=1000, ccount=1000, # edgecolor=edge_color, # antialiased=False, # facecolors=cmap.to_rgba(color)) # Heatmap corresponding to surface plot: # ax[0, 1].pcolormesh(x_ball_from_sphere, y_ball_from_sphere, color, # cmap='viridis') # Grid plot: ax[0, 0].plot_wireframe(np.sin(t_sphere) * np.cos(p_sphere), np.sin(t_sphere) * np.sin(p_sphere), np.cos(t_sphere)) ax[0, 1].pcolormesh(x_ball_from_sphere, y_ball_from_sphere, np.ones(t_sphere.shape), cmap=cm.binary, edgecolors='C0') ax[0, 0].set_aspect('equal') ax[0, 1].axis('equal') ax[0, 1].axis(1.1 * np.array([-1, 1, -1, 1])) # ### Ball to sphere ### grids = 16 + 1 r_line = np.linspace(0, 1-1e-2, grids) # TODO: change grids to n t_line = np.linspace(0, 2 * np.pi, grids) # change grids to n r_ball, t_ball = np.meshgrid(r_line, t_line, indexing='ij') # Map x_sphere_from_ball = np.zeros(r_ball.shape) y_sphere_from_ball = np.zeros(r_ball.shape) z_sphere_from_ball = np.zeros(r_ball.shape) for i, j in itertools.product(range(r_line.size), range(t_line.size)): x_sphere_from_ball[i, j], \ y_sphere_from_ball[i, j], \ z_sphere_from_ball[i, j] = mapi( np.array([r_line[i] * np.cos(t_line[j]), r_line[i] * np.sin(t_line[j])]) ) # Plot ax[1, 0].pcolormesh(r_ball * np.cos(t_ball), r_ball * np.sin(t_ball), np.ones(r_ball.shape), cmap=cm.binary, edgecolors='C0') ax[1, 1].plot_wireframe(x_sphere_from_ball, y_sphere_from_ball, z_sphere_from_ball) ax[1, 1].set_aspect('equal') ax[1, 0].axis('equal') ax[1, 0].axis(1.1 * np.array([-1, 1, -1, 1])) return fig def sphere_cube(map, mapi) -> plt.Figure: """ Plots map from (3d) sphere to (2d) cube and its inverse. :param map: Map from sphere to cube. :param mapi: Map from cube to sphere. :return: Figure. """ # Init plot object: fig = plt.figure() ax = np.array([[fig.add_subplot(2, 2, 1, projection='3d'), fig.add_subplot(2, 2, 2)], [fig.add_subplot(2, 2, 3), fig.add_subplot(2, 2, 4, projection='3d')]]) # ### Sphere to cube ### grids = 16 + 1 t_line = np.linspace(1e-2, np.pi, grids) p_line = np.linspace(0, 2 * np.pi, grids) t_sphere, p_sphere = np.meshgrid(t_line, p_line, indexing='ij') # Map x_cube_from_sphere = np.zeros(t_sphere.shape) y_cube_from_sphere = np.zeros(t_sphere.shape) for i, j in itertools.product(range(t_line.size), range(p_line.size)): x_cube_from_sphere[i, j], y_cube_from_sphere[i, j] = map( np.array( [np.sin(t_line[i]) * np.cos(p_line[j]), np.sin(t_line[i]) * np.sin(p_line[j]), np.cos(t_line[i])]) ) # Plot ax[0, 0].plot_wireframe(np.sin(t_sphere) * np.cos(p_sphere), np.sin(t_sphere) * np.sin(p_sphere), np.cos(t_sphere)) ax[0, 1].pcolormesh(x_cube_from_sphere, y_cube_from_sphere, np.ones(t_sphere.shape), cmap=cm.binary, edgecolors='C0') ax[0, 0].set_aspect('equal') ax[0, 1].axis('equal') ax[0, 1].axis(1.1 * np.array([-1, 1, -1, 1])) # ### Cube to sphere ### grids = 16 + 1 x_line = np.linspace(-1, 1, grids) # +- 1e-2 if error y_line = np.linspace(-1, 1, grids) # +- 1e-2 if error x_cube, y_cube = np.meshgrid(x_line, y_line, indexing='ij') # Map x_sphere_from_cube = np.zeros(x_cube.shape) y_sphere_from_cube = np.zeros(x_cube.shape) z_sphere_from_cube = np.zeros(x_cube.shape) for i, j in itertools.product(range(x_line.size), range(y_line.size)): x_sphere_from_cube[i, j], \ y_sphere_from_cube[i, j], \ z_sphere_from_cube[i, j] = mapi( np.array([x_line[i], y_line[j]]) ) # Plot ax[1, 0].pcolormesh(x_cube, y_cube, np.ones(x_cube.shape), cmap=cm.binary, edgecolors='C0') ax[1, 1].plot_wireframe(x_sphere_from_cube, y_sphere_from_cube, z_sphere_from_cube) ax[1, 1].set_aspect('equal') ax[1, 0].axis('equal') ax[1, 0].axis(1.1 * np.array([-1, 1, -1, 1])) return fig def sphere_plane(map, mapi) -> plt.Figure: """ Plots map from (3d) sphere to (2d) plane and its inverse. :param map: Map from sphere to plane. :param mapi: Map from plane to sphere. :return: Figure. """ # Init plot object: fig = plt.figure() ax = np.array([[fig.add_subplot(2, 2, 1, projection='3d'), fig.add_subplot(2, 2, 2)], [fig.add_subplot(2, 2, 3), fig.add_subplot(2, 2, 4, projection='3d')]]) # ### Sphere to plane ### grids = 16 + 1 t_line = np.linspace(1e-2, np.pi, grids) p_line = np.linspace(0, 2 * np.pi, grids) t_sphere, p_sphere = np.meshgrid(t_line, p_line, indexing='ij') # Map x_plane_from_sphere = np.zeros(t_sphere.shape) y_plane_from_sphere = np.zeros(t_sphere.shape) for i, j in itertools.product(range(t_line.size), range(p_line.size)): x_plane_from_sphere[i, j], y_plane_from_sphere[i, j] = map( np.array( [np.sin(t_line[i]) * np.cos(p_line[j]), np.sin(t_line[i]) * np.sin(p_line[j]), np.cos(t_line[i])]) ) # Plot ax[0, 0].plot_wireframe(np.sin(t_sphere) * np.cos(p_sphere), np.sin(t_sphere) * np.sin(p_sphere), np.cos(t_sphere)) ax[0, 1].pcolormesh(x_plane_from_sphere, y_plane_from_sphere, np.ones(t_sphere.shape), cmap=cm.binary, edgecolors='C0') ax[0, 0].set_aspect('equal') ax[0, 1].axis('equal') ax[0, 1].axis(1.1 * np.array([-5, 5, -5, 5])) # ### Plane to sphere ### grids = 16 + 1 r_line = np.linspace(0, 1, int(grids/2)) r_line = np.concatenate((r_line, np.logspace(0, 1, int(grids/2)))) t_line = np.linspace(0, 2 * np.pi, grids) r_plane, t_plane = np.meshgrid(r_line, t_line, indexing='ij') # Map x_sphere_from_plane = np.zeros(r_plane.shape) y_sphere_from_plane = np.zeros(r_plane.shape) z_sphere_from_plane = np.zeros(r_plane.shape) for i, j in itertools.product(range(r_line.size), range(t_line.size)): x_sphere_from_plane[i, j], \ y_sphere_from_plane[i, j], \ z_sphere_from_plane[i, j] = mapi( np.array([r_line[i] * np.cos(t_line[j]), r_line[i] * np.sin(t_line[j])]) ) # Plot ax[1, 0].pcolormesh(r_plane * np.cos(t_plane), r_plane * np.sin(t_plane), np.ones(r_plane.shape), cmap=cm.binary, edgecolors='C0') ax[1, 1].plot_wireframe(x_sphere_from_plane, y_sphere_from_plane, z_sphere_from_plane) ax[1, 1].set_aspect('equal') ax[1, 0].axis('equal') ax[1, 0].axis(1.1 * np.array([-1, 1, -1, 1])) return fig def divmult(n: int, m: int = 2) -> int: """ Checks how many times n is divisible by m. Returns -1 if n=0 @author = Joel :param n: Numerator :param m: Denominator :return: Multiplicity """ if n < 0: raise ValueError('Only non-negative integers are supported for n.') elif n == 0: return -1 q, r = divmod(n, m) count = 0 while not r: count += 1 q, r = divmod(q, m) return count def _main_1(): fig_1 = ball_cube_2d_2(maps.ball_to_cube_linear, maps.cube_to_ball_linear) fig_1.suptitle('2D ball to cube (linear)') fig_2 = sphere_ball(lambda x: maps.sphere_to_ball(x, pole=2), lambda x: maps.ball_to_sphere(x, pole=2)) fig_2.suptitle('3D sphere to 2D ball') fig_3 = sphere_cube(lambda x: maps.ball_to_cube_linear( maps.sphere_to_ball(x, pole=2)), lambda x: maps.ball_to_sphere( maps.cube_to_ball_linear(x), pole=2)) fig_3.suptitle('3D sphere to 2D cube') fig_4 = sphere_plane(lambda x: maps.sphere_to_plane(x, pole=2), lambda x: maps.plane_to_sphere(x, pole=2)) fig_4.suptitle('3D sphere to 2D plane') if __name__ == '__main__': # _main_1() ball_cube_2d_2(lambda x: maps.ball_to_cube_norm(x, k=1), lambda x: maps.cube_to_ball_norm(x, k=1)) # plt.show()
36.439583
80
0.545595
2,775
17,491
3.220901
0.062703
0.017006
0.008727
0.025062
0.830387
0.771202
0.745916
0.736742
0.685388
0.667487
0
0.035543
0.295466
17,491
479
81
36.515658
0.689767
0.136127
0
0.554839
0
0
0.023608
0
0
0
0
0.002088
0
1
0.022581
false
0
0.019355
0
0.064516
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
8d0374e26c91629f53ed381781a0eb22f7c3ed06
65
py
Python
python3/trim/trim1.py
jtraver/dev
c7cd2181594510a8fa27e7325566ed2d79371624
[ "MIT" ]
null
null
null
python3/trim/trim1.py
jtraver/dev
c7cd2181594510a8fa27e7325566ed2d79371624
[ "MIT" ]
null
null
null
python3/trim/trim1.py
jtraver/dev
c7cd2181594510a8fa27e7325566ed2d79371624
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 #!/usr/bin/python print(" this".strip())
13
22
0.646154
10
65
4.2
0.8
0.285714
0
0
0
0
0
0
0
0
0
0.016949
0.092308
65
4
23
16.25
0.694915
0.569231
0
0
0
0
0.192308
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
9b1ea2bbe6f8a86f01cc9b2abaedbccb89837488
33
py
Python
archivenow/__init__.py
MasterScott/archivenow
f71d52d99f9f1b3ac5a10bdc3de20f18180e0872
[ "MIT" ]
6
2017-02-09T23:59:30.000Z
2021-04-07T03:13:07.000Z
archivenow/__init__.py
MasterScott/archivenow
f71d52d99f9f1b3ac5a10bdc3de20f18180e0872
[ "MIT" ]
8
2017-02-10T15:02:52.000Z
2017-02-16T05:16:48.000Z
archivenow/__init__.py
MasterScott/archivenow
f71d52d99f9f1b3ac5a10bdc3de20f18180e0872
[ "MIT" ]
5
2017-02-10T05:02:07.000Z
2017-02-21T02:12:42.000Z
__version__ = '2019.7.27.2.35.46'
33
33
0.69697
7
33
2.714286
1
0
0
0
0
0
0
0
0
0
0
0.387097
0.060606
33
1
33
33
0.225806
0
0
0
0
0
0.5
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
f198be682e202c60e104e11275b5b70cc065ed90
4,826
py
Python
waitlist/test/test_tasks.py
rodbv/kamu
f390d91f7d7755b49176cf5d504648e3fe572237
[ "MIT" ]
70
2018-05-23T16:44:44.000Z
2021-12-05T21:48:10.000Z
waitlist/test/test_tasks.py
rodbv/kamu
f390d91f7d7755b49176cf5d504648e3fe572237
[ "MIT" ]
122
2018-10-06T21:31:24.000Z
2020-11-09T15:04:56.000Z
waitlist/test/test_tasks.py
rodbv/kamu
f390d91f7d7755b49176cf5d504648e3fe572237
[ "MIT" ]
50
2018-05-23T05:49:10.000Z
2021-11-22T07:53:42.000Z
from django.contrib.auth.models import User from django.core import mail from django.test import TestCase from django.utils import timezone from books.models import Book, BookCopy, Library from waitlist.models import WaitlistItem from waitlist.tasks import send_new_user_on_waitlist_notification, send_waitlist_book_available_notification class NewUserOnWaitListNotificationTaskTest(TestCase): def setUp(self): self.book = Book.objects.create( author="Author", title="Clean Code", subtitle="The subtitle", publication_date=timezone.now()) self.library = Library.objects.create(name="Santiago", slug="slug") self.user = User.objects.create( username="claudia", email="claudia@gmail.com", first_name="Claudia", last_name="Silva") self.waitlist_item = WaitlistItem.objects.create( book=self.book, library=self.library, user=self.user, added_date=timezone.now()) def test_sends_waitlist_email_to_borrowers_of_book(self): borrower1 = User.objects.create(username="person 1", email="person1@gmail.com") borrower2 = User.objects.create(username="person 2", email="person2@gmail.com") BookCopy.objects.create(book=self.book, library=self.library, user=borrower1) BookCopy.objects.create(book=self.book, library=self.library, user=borrower2) send_new_user_on_waitlist_notification(self.waitlist_item.id) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, [borrower1.email, borrower2.email]) def test_does_not_send_waitlist_email_to_a_book_without_borrowers(self): BookCopy.objects.create(book=self.book, library=self.library, user=None) send_new_user_on_waitlist_notification(self.waitlist_item.id) self.assertEqual(len(mail.outbox), 0) def test_sets_the_subject_with_the_book_title_and_waitlist_user_name(self): borrower1 = User.objects.create(username="person 1", email="person1@gmail.com") BookCopy.objects.create(book=self.book, library=self.library, user=borrower1) send_new_user_on_waitlist_notification(self.waitlist_item.id) self.assertEqual(mail.outbox[0].subject, 'Claudia Silva is waiting for the book Clean Code on Kamu') def test_includes_details_in_email_body(self): borrower1 = User.objects.create(username="person 1", email="person1@gmail.com") BookCopy.objects.create(book=self.book, library=self.library, user=borrower1) send_new_user_on_waitlist_notification(self.waitlist_item.id) email_body = mail.outbox[0].body self.assertIn('Claudia Silva is waiting for the book Clean Code', email_body) self.assertIn('no other copies are available in Santiago', email_body) class BookAvailableWaitListNotificationTaskTest(TestCase): def setUp(self): self.book = Book.objects.create( author="Author", title="Clean Code", subtitle="The subtitle", publication_date=timezone.now()) self.library = Library.objects.create(name="Santiago", slug="slug") self.user = User.objects.create( username="claudia", email="claudia@gmail.com", first_name="Claudia", last_name="Silva") self.book_copy = self.book.bookcopy_set.create(library=self.library, user=None) def test_sends_email_to_users_on_waitlist_to_notify_a_book_copy_is_available(self): other_user = User.objects.create(username="person 1", email="person1@gmail.com") WaitlistItem.objects.create(book=self.book, library=self.library, user=self.user, added_date=timezone.now()) WaitlistItem.objects.create( book=self.book, library=self.library, user=other_user, added_date=timezone.now()) send_waitlist_book_available_notification(self.book_copy.id) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, [self.user.email, other_user.email]) def test_does_not_send_email_to_a_book_with_empty_waitlist(self): send_waitlist_book_available_notification(self.book_copy.id) self.assertEqual(len(mail.outbox), 0) def test_sets_the_subject_with_the_book_title(self): WaitlistItem.objects.create(book=self.book, library=self.library, user=self.user, added_date=timezone.now()) send_waitlist_book_available_notification(self.book_copy.id) self.assertEqual(mail.outbox[0].subject, 'Clean Code is now available on Kamu') def test_includes_details_in_email_body(self): WaitlistItem.objects.create(book=self.book, library=self.library, user=self.user, added_date=timezone.now()) send_waitlist_book_available_notification(self.book_copy.id) email_body = mail.outbox[0].body self.assertIn('Clean Code, that is on your waitlist, is now available in Santiago', email_body)
49.244898
116
0.740986
650
4,826
5.281538
0.153846
0.079522
0.057676
0.070492
0.787358
0.741334
0.718322
0.707836
0.707836
0.66356
0
0.007319
0.150642
4,826
97
117
49.752577
0.830202
0
0
0.544118
0
0
0.108394
0
0
0
0
0
0.161765
1
0.147059
false
0
0.102941
0
0.279412
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
f1afdac690c753a2457d3f6ada436da59570eb7f
183
py
Python
busy_beaver/models/__init__.py
ZaxR/busy-beaver
ffe1250d0156f71d1053f37c8070ca0dd888348f
[ "MIT" ]
null
null
null
busy_beaver/models/__init__.py
ZaxR/busy-beaver
ffe1250d0156f71d1053f37c8070ca0dd888348f
[ "MIT" ]
null
null
null
busy_beaver/models/__init__.py
ZaxR/busy-beaver
ffe1250d0156f71d1053f37c8070ca0dd888348f
[ "MIT" ]
null
null
null
# Helpers from .base import BaseModel # noqa # Models from .user import User # noqa from .api_user import ApiUser # noqa # Key-Value Store from .kv_store import kv_store # noqa
18.3
38
0.73224
28
183
4.678571
0.5
0.152672
0
0
0
0
0
0
0
0
0
0
0.202186
183
9
39
20.333333
0.89726
0.273224
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
f1b383e7e3cf48a71551c51b2fc47a774578e51c
142
py
Python
extrarumps/utility.py
raphaelhuefner/allbar
594ab75bc2f7324cbdfddfd67a6d9fbfe18f08f8
[ "MIT" ]
8
2018-08-21T21:56:36.000Z
2022-03-21T23:43:44.000Z
extrarumps/utility.py
raphaelhuefner/allbar
594ab75bc2f7324cbdfddfd67a6d9fbfe18f08f8
[ "MIT" ]
2
2018-05-10T10:29:41.000Z
2018-05-10T10:30:43.000Z
extrarumps/utility.py
raphaelhuefner/allbar
594ab75bc2f7324cbdfddfd67a6d9fbfe18f08f8
[ "MIT" ]
1
2018-05-15T17:51:03.000Z
2018-05-15T17:51:03.000Z
import base64 def get_data_from_base64_data_url(data_url): header, encoded = data_url.split(',', 1) return base64.b64decode(encoded)
23.666667
44
0.753521
21
142
4.761905
0.619048
0.21
0
0
0
0
0
0
0
0
0
0.07377
0.140845
142
5
45
28.4
0.745902
0
0
0
0
0
0.007042
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
f1f848db3e6b568360f7fccd36f99c54849160c9
150
py
Python
src/torch_opstractor/test.py
microsoft/opstractor
86160ae627a53b986c466694a3bb96284b670e89
[ "MIT" ]
1
2021-10-09T05:35:11.000Z
2021-10-09T05:35:11.000Z
src/torch_opstractor/test.py
microsoft/opstractor
86160ae627a53b986c466694a3bb96284b670e89
[ "MIT" ]
null
null
null
src/torch_opstractor/test.py
microsoft/opstractor
86160ae627a53b986c466694a3bb96284b670e89
[ "MIT" ]
null
null
null
# from labml_nn.capsule_networks.mnist import main from labml_nn.adaptive_computation.ponder_net.experiment import main import torch_opstractor main()
37.5
68
0.873333
22
150
5.681818
0.681818
0.144
0.176
0
0
0
0
0
0
0
0
0
0.073333
150
4
69
37.5
0.899281
0.32
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
9e2f3d2852cd1fe5f73fc79fc8e0b7909e83ef41
59
py
Python
codechef/beginner/FIRST.py
muj-programmer/cp_is_awesome
a0974bcda6bf5a3041af8237ddb04fd804ccde6d
[ "MIT" ]
3
2019-08-28T18:00:46.000Z
2019-08-30T18:38:24.000Z
codechef/beginner/FIRST.py
muj-programmer/cp_is_awesome
a0974bcda6bf5a3041af8237ddb04fd804ccde6d
[ "MIT" ]
1
2019-08-30T18:41:46.000Z
2019-09-11T20:13:01.000Z
codechef/beginner/FIRST.py
muj-programmer/cp_is_awesome
a0974bcda6bf5a3041af8237ddb04fd804ccde6d
[ "MIT" ]
null
null
null
# AC 26/08/2019 17:58:21 IST # n = int(input()) print(n)
9.833333
30
0.576271
13
59
2.615385
0.923077
0
0
0
0
0
0
0
0
0
0
0.297872
0.20339
59
5
31
11.8
0.425532
0.440678
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
5
9e305a1f33e5d290fac897910502ef607e94bd9d
168
py
Python
ass3-airplane_det/mmdet/models/roi_extractors/__init__.py
Rooooyy/BUAA_PR
5b4d12dc786c3fdc469ae59e0b099e8095aee550
[ "BSD-2-Clause" ]
2
2021-06-09T16:21:53.000Z
2021-08-30T02:31:56.000Z
mmdet/models/roi_extractors/__init__.py
jedibobo/S2ANet-custom-dataset
869b196d4c33713a5c61bd80064d10a453fb76ef
[ "Apache-2.0" ]
null
null
null
mmdet/models/roi_extractors/__init__.py
jedibobo/S2ANet-custom-dataset
869b196d4c33713a5c61bd80064d10a453fb76ef
[ "Apache-2.0" ]
null
null
null
from .single_level import SingleRoIExtractor from .single_level_rotated import SingleRoIExtractorRotated __all__ = ['SingleRoIExtractor', 'SingleRoIExtractorRotated']
33.6
61
0.863095
14
168
9.857143
0.571429
0.144928
0.217391
0
0
0
0
0
0
0
0
0
0.077381
168
4
62
42
0.890323
0
0
0
0
0
0.255952
0.14881
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
7b495ebce8e00e9f2e571a3d09a20bb2e0e8e94e
134
py
Python
fitnessFolks/screening/admin.py
Programming-Club-Ahmedabad-University/wellness
4cc06497ce2f2a6019c0fa4595940703605ffb0a
[ "MIT" ]
1
2020-10-07T09:51:01.000Z
2020-10-07T09:51:01.000Z
fitnessFolks/screening/admin.py
Programming-Club-Ahmedabad-University/wellness
4cc06497ce2f2a6019c0fa4595940703605ffb0a
[ "MIT" ]
1
2020-10-15T07:58:16.000Z
2020-10-15T07:58:16.000Z
fitnessFolks/screening/admin.py
Programming-Club-Ahmedabad-University/wellness
4cc06497ce2f2a6019c0fa4595940703605ffb0a
[ "MIT" ]
2
2020-10-07T07:48:18.000Z
2021-07-16T04:22:44.000Z
from django.contrib import admin from .models import Questions, Answers admin.site.register(Questions) admin.site.register(Answers)
19.142857
38
0.820896
18
134
6.111111
0.555556
0.163636
0.309091
0
0
0
0
0
0
0
0
0
0.097015
134
7
39
19.142857
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
7b5bee56710bc8f5af9a21d26d31b3558a6fd3e3
251
py
Python
gp_kernel.py
steveli/gp-adapter
361fdfbc97f9691b1cc1b422953536b3673e5ee0
[ "MIT" ]
13
2017-08-30T02:54:04.000Z
2021-09-27T07:37:17.000Z
gp_kernel.py
steveli/gp-adapter
361fdfbc97f9691b1cc1b422953536b3673e5ee0
[ "MIT" ]
null
null
null
gp_kernel.py
steveli/gp-adapter
361fdfbc97f9691b1cc1b422953536b3673e5ee0
[ "MIT" ]
4
2017-08-30T02:54:11.000Z
2020-02-03T18:45:55.000Z
import numpy as np import theano.tensor as T def symbolic_kernel(t_diff, gp_params): return gp_params[0] * T.exp(-gp_params[1] * T.sqr(t_diff)) def kernel(t_diff, gp_params): return gp_params[0] * np.exp(-gp_params[1] * np.square(t_diff))
22.818182
67
0.709163
48
251
3.479167
0.395833
0.287425
0.131737
0.155689
0.407186
0.407186
0.407186
0.407186
0.407186
0
0
0.018779
0.151394
251
10
68
25.1
0.765258
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
5
7b71e6e76cd8eed90ffb4c7270aafdfd2aecf697
702
py
Python
app/forms.py
dpguthrie/loan-comparison
f1f6cc7f97fc204a91166c3dfaa55e5c09a49fdd
[ "MIT" ]
2
2017-11-06T17:43:49.000Z
2019-12-20T05:23:39.000Z
app/forms.py
dpguthrie/loan-comparison
f1f6cc7f97fc204a91166c3dfaa55e5c09a49fdd
[ "MIT" ]
4
2019-12-05T23:45:48.000Z
2019-12-05T23:45:50.000Z
app/forms.py
dpguthrie/Loan-Comparison
f1f6cc7f97fc204a91166c3dfaa55e5c09a49fdd
[ "MIT" ]
null
null
null
from flask_wtf import Form from wtforms import DecimalField, IntegerField from wtforms.validators import DataRequired class LoanForm(Form): Amount = DecimalField("Commitment Amount", validators=[DataRequired("Please enter an amount")]) Term = IntegerField("Term (Months)", validators=[DataRequired("Please enter the loan term (in years)")]) Amort = IntegerField("Amortization (Months)", validators=[DataRequired("Please enter the loan amortization (in years)")]) Rate = DecimalField("Interest Rate", validators=[DataRequired("Please enter the loan's interest rate")]) Reset = IntegerField("Rate Reset (Month)", validators=[DataRequired("Please enter the month the rate resets")])
58.5
125
0.757835
81
702
6.555556
0.382716
0.207156
0.263653
0.310734
0.316384
0.248588
0.173258
0
0
0
0
0
0.126781
702
11
126
63.818182
0.866232
0
0
0
0
0
0.371795
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
7b874aff539a08da9357c443415ceade030f76c0
148
py
Python
boilerplate/users/admin.py
andre-dasilva/django-boilerplate
2bce4623053a3993bf4ba897109de8ea3ce0fd0f
[ "MIT" ]
null
null
null
boilerplate/users/admin.py
andre-dasilva/django-boilerplate
2bce4623053a3993bf4ba897109de8ea3ce0fd0f
[ "MIT" ]
null
null
null
boilerplate/users/admin.py
andre-dasilva/django-boilerplate
2bce4623053a3993bf4ba897109de8ea3ce0fd0f
[ "MIT" ]
null
null
null
from django.contrib import admin # Register your models here. from django.contrib.auth import get_user_model admin.site.register(get_user_model())
24.666667
46
0.824324
23
148
5.130435
0.608696
0.169492
0.288136
0
0
0
0
0
0
0
0
0
0.101351
148
6
47
24.666667
0.887218
0.175676
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
7b9a75b894713427865776b11b48b39bfe37cb0f
259
py
Python
share/ttkwidgets/autocomplete/__init__.py
Marusoftware/Marutools
2b462ea02abaf957eb037c281b62d7efe053840e
[ "MIT" ]
null
null
null
share/ttkwidgets/autocomplete/__init__.py
Marusoftware/Marutools
2b462ea02abaf957eb037c281b62d7efe053840e
[ "MIT" ]
5
2021-01-21T09:46:12.000Z
2022-02-14T13:54:44.000Z
share/ttkwidgets/autocomplete/__init__.py
Marusoftware/Marutools
2b462ea02abaf957eb037c281b62d7efe053840e
[ "MIT" ]
2
2021-11-02T11:01:53.000Z
2022-02-14T10:11:21.000Z
# Copyright (c) The ttkwidgets authors 2017 # Available under the license found in LICENSE from .autocomplete_entry import AutocompleteEntry from .autocompletecombobox import AutocompleteCombobox from .autocomplete_entrylistbox import AutocompleteEntryListbox
51.8
63
0.872587
27
259
8.296296
0.703704
0.142857
0
0
0
0
0
0
0
0
0
0.017167
0.100386
259
5
63
51.8
0.944206
0.332046
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
c86ccc1189138c33e39c41f7f70655d56c769c46
58
py
Python
conftest.py
felskrone/salt-eventsd
82acde6d1ca618809c81c662fc7d57ea8ae23712
[ "Apache-2.0" ]
27
2015-01-06T00:56:55.000Z
2020-10-16T16:32:05.000Z
conftest.py
felskrone/salt-eventsd
82acde6d1ca618809c81c662fc7d57ea8ae23712
[ "Apache-2.0" ]
26
2015-01-18T01:38:00.000Z
2020-05-30T09:59:26.000Z
conftest.py
felskrone/salt-eventsd
82acde6d1ca618809c81c662fc7d57ea8ae23712
[ "Apache-2.0" ]
11
2015-01-11T11:14:41.000Z
2019-12-17T06:11:36.000Z
# coding=utf-8 """ """ # 3rd party imports import pytest
8.285714
19
0.637931
8
58
4.625
1
0
0
0
0
0
0
0
0
0
0
0.042553
0.189655
58
6
20
9.666667
0.744681
0.517241
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
c8a3beb91780d5ee12fba69f8043488b0c21de5d
3,752
py
Python
CmixAPIClient/project.py
cmodzelewski-dynata/python-cmixapi-client
ba1a7b2b9c5d8664ac94ca8122c0a5259941d888
[ "MIT" ]
1
2020-01-21T14:42:33.000Z
2020-01-21T14:42:33.000Z
CmixAPIClient/project.py
cmodzelewski-dynata/python-cmixapi-client
ba1a7b2b9c5d8664ac94ca8122c0a5259941d888
[ "MIT" ]
102
2020-01-18T08:40:42.000Z
2022-01-25T11:00:00.000Z
CmixAPIClient/project.py
cmodzelewski-dynata/python-cmixapi-client
ba1a7b2b9c5d8664ac94ca8122c0a5259941d888
[ "MIT" ]
2
2020-01-15T12:47:37.000Z
2020-07-10T22:06:45.000Z
# -*- coding: utf-8 -*- from __future__ import unicode_literals from .error import CmixError class CmixProject(object): def __init__(self, client, project_id): if None in [client, project_id]: raise CmixError("Client and project id are required.") self.client = client self.project_id = project_id def delete_project(self): project_endpoint = 'projects/{}'.format(self.project_id) project_error = 'CMIX returned a non-200 response code while deleting project' project_response = self.client.api_delete(project_endpoint, project_error) return project_response def delete_group(self, group_id): project_endpoint = 'projects/{}/groups/{}'.format(self.project_id, group_id) project_error = 'CMIX returned a non-200 response code while deleting group' project_response = self.client.api_delete(project_endpoint, project_error) return project_response def get_project(self): project_endpoint = 'projects/{}'.format(self.project_id) project_error = 'CMIX returned a non-200 response code while getting project' project_response = self.client.api_get(project_endpoint, project_error) return project_response def get_sources(self): project_endpoint = 'projects/{}/sources'.format(self.project_id) project_error = 'CMIX returned a non-200 response code while getting project sources' project_response = self.client.api_get(project_endpoint, project_error) return project_response def get_groups(self): project_endpoint = 'projects/{}/groups'.format(self.project_id) project_error = 'CMIX returned a non-200 response code while getting project groups' project_response = self.client.api_get(project_endpoint, project_error) return project_response def get_links(self): project_endpoint = 'projects/{}/links'.format(self.project_id) project_error = 'CMIX returned a non-200 response code while getting project links' project_response = self.client.api_get(project_endpoint, project_error) return project_response def get_full_links(self): project_endpoint = 'projects/{}/full-links'.format(self.project_id) project_error = 'CMIX returned a non-200 response code while getting project full links' project_response = self.client.api_get(project_endpoint, project_error) return project_response def get_locales(self): project_endpoint = 'projects/{}/locales'.format(self.project_id) project_error = 'CMIX returned a non-200 response code while getting project locales' project_response = self.client.api_get(project_endpoint, project_error) return project_response def get_markup_files(self): project_endpoint = 'projects/{}/markup-files'.format(self.project_id) project_error = 'CMIX returned a non-200 response code while getting project markup files' project_response = self.client.api_get(project_endpoint, project_error) return project_response def get_respondent_links(self): project_endpoint = 'projects/{}/respondent-links'.format(self.project_id) project_error = 'CMIX returned a non-200 response code while getting project respondent links' project_response = self.client.api_get(project_endpoint, project_error) return project_response def get_surveys(self): project_endpoint = 'projects/{}/surveys'.format(self.project_id) project_error = 'CMIX returned a non-200 response code while getting project surveys' project_response = self.client.api_get(project_endpoint, project_error) return project_response
47.493671
102
0.722015
469
3,752
5.526652
0.113006
0.093364
0.060185
0.084877
0.799383
0.762346
0.759645
0.759645
0.729938
0.728781
0
0.011296
0.197761
3,752
78
103
48.102564
0.849834
0.005597
0
0.380952
0
0
0.260392
0.025476
0
0
0
0
0
1
0.190476
false
0
0.031746
0
0.412698
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
7402cb71101a3e2d55aa90f1c7c3fcdfdc68f04f
317
py
Python
0231_PowerOfTwo.py
yingzhuo1994/LeetCode
636eef90867d21e3439d258ec99fbb8e5ad5a742
[ "MIT" ]
null
null
null
0231_PowerOfTwo.py
yingzhuo1994/LeetCode
636eef90867d21e3439d258ec99fbb8e5ad5a742
[ "MIT" ]
null
null
null
0231_PowerOfTwo.py
yingzhuo1994/LeetCode
636eef90867d21e3439d258ec99fbb8e5ad5a742
[ "MIT" ]
null
null
null
class Solution: # 1st solution # O(1) time | O(1) space def isPowerOfTwo(self, n: int) -> bool: x = 1 while x < n: x *= 2 return x == n # 2nd solution # O(1) time | O(1) space def isPowerOfTwo(self, n: int) -> bool: return n > 0 and not (n & n-1)
24.384615
43
0.476341
48
317
3.145833
0.4375
0.05298
0.13245
0.18543
0.635762
0.635762
0.635762
0.635762
0.635762
0.635762
0
0.052083
0.394322
317
13
44
24.384615
0.734375
0.230284
0
0.25
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0
0.125
0.625
0
0
0
0
null
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
cda01214c5f11754116056594b4c8454b49d0fdb
41
py
Python
bitmovin/errors/bitmovin_error.py
camberbridge/bitmovin-python
3af4c6e79b0291fda05fd1ceeb5bed1bba9f3c95
[ "Unlicense" ]
44
2016-12-12T17:37:23.000Z
2021-03-03T09:48:48.000Z
bitmovin/errors/bitmovin_error.py
camberbridge/bitmovin-python
3af4c6e79b0291fda05fd1ceeb5bed1bba9f3c95
[ "Unlicense" ]
38
2017-01-09T14:45:45.000Z
2022-02-27T18:04:33.000Z
bitmovin/errors/bitmovin_error.py
camberbridge/bitmovin-python
3af4c6e79b0291fda05fd1ceeb5bed1bba9f3c95
[ "Unlicense" ]
27
2017-02-02T22:49:31.000Z
2019-11-21T07:04:57.000Z
class BitmovinError(Exception): pass
13.666667
31
0.756098
4
41
7.75
1
0
0
0
0
0
0
0
0
0
0
0
0.170732
41
2
32
20.5
0.911765
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
cdb053d908eaa8c7a36f635403d213fea4bf0a2c
3,798
py
Python
tests/mutate/test_respell_augmented_unisons.py
gilbertohasnofb/auxjad
553b7fe97221b6f378a93ade6262f024e3cbc678
[ "MIT" ]
6
2020-05-18T09:28:29.000Z
2021-12-22T00:40:54.000Z
tests/mutate/test_respell_augmented_unisons.py
gilbertohasnofb/auxjad
553b7fe97221b6f378a93ade6262f024e3cbc678
[ "MIT" ]
1
2021-04-21T20:29:38.000Z
2021-04-22T19:44:54.000Z
tests/mutate/test_respell_augmented_unisons.py
gilbertohasnofb/auxjad
553b7fe97221b6f378a93ade6262f024e3cbc678
[ "MIT" ]
1
2021-04-21T18:54:46.000Z
2021-04-21T18:54:46.000Z
import abjad import auxjad def test_respell_augmented_unisons_01(): staff = abjad.Staff(r"c'4 r4 <ef' e'>4 g'4 <c' cs'>4 r2.") auxjad.mutate.respell_augmented_unisons(staff[:]) assert abjad.lilypond(staff) == abjad.String.normalize( r""" \new Staff { c'4 r4 <ds' e'>4 g'4 <c' df'>4 r2. } """ ) def test_respell_augmented_unisons_02(): staff = abjad.Staff() for pitch in range(12): staff.append(abjad.Chord([pitch, pitch + 1], (1, 16))) auxjad.mutate.respell_augmented_unisons(staff[:]) assert abjad.lilypond(staff) == abjad.String.normalize( r""" \new Staff { <c' df'>16 <cs' d'>16 <d' ef'>16 <ds' e'>16 <e' f'>16 <f' gf'>16 <fs' g'>16 <g' af'>16 <gs' a'>16 <a' bf'>16 <as' b'>16 <b' c''>16 } """ ) def test_respell_augmented_unisons_03(): staff = abjad.Staff(r"<a c' cs' f'>1") auxjad.mutate.respell_augmented_unisons(staff[:]) assert abjad.lilypond(staff) == abjad.String.normalize( r""" \new Staff { <a c' df' f'>1 } """ ) def test_respell_augmented_unisons_04(): staff = abjad.Staff(r"<e' cs' g' ef'>1") auxjad.mutate.respell_augmented_unisons(staff[:]) assert abjad.lilypond(staff) == abjad.String.normalize( r""" \new Staff { <cs' ds' e' g'>1 } """ ) def test_respell_augmented_unisons_05(): staff = abjad.Staff(r"<c' cs''>1") auxjad.mutate.respell_augmented_unisons(staff[:]) assert abjad.lilypond(staff) == abjad.String.normalize( r""" \new Staff { <c' cs''>1 } """ ) auxjad.mutate.respell_augmented_unisons(staff[:], include_multiples=True) assert abjad.lilypond(staff) == abjad.String.normalize( r""" \new Staff { <c' df''>1 } """ ) def test_respell_augmented_unisons_06(): staff = abjad.Staff(r"<c' cs' cs''>1") auxjad.mutate.respell_augmented_unisons(staff[:]) assert abjad.lilypond(staff) == abjad.String.normalize( r""" \new Staff { <c' df' cs''>1 } """ ) staff = abjad.Staff(r"<c' cs' cs''>1") auxjad.mutate.respell_augmented_unisons( staff[:], respell_by_pitch_class=True, ) assert abjad.lilypond(staff) == abjad.String.normalize( r""" \new Staff { <c' df' df''>1 } """ ) def test_respell_augmented_unisons_07(): staff = abjad.Staff( r""" <e' bs'>1 <e' b' bs'>1 <e' cf''>1 <e' cf'' c''>1 <es' c''>1 <e' es' c''>1 <ff' c''>1 <ff' f' c''>1 """ ) auxjad.mutate.respell_augmented_unisons(staff[:]) assert abjad.lilypond(staff) == abjad.String.normalize( r""" \new Staff { <e' bs'>1 <e' b' c''>1 <e' cf''>1 <e' b' c''>1 <es' c''>1 <e' f' c''>1 <ff' c''>1 <e' f' c''>1 } """ ) def test_respell_augmented_unisons_08(): staff = abjad.Staff(r"c'4 r4 <ef' e'>4 g'4 <c' cs'>4 r2.") abjad.mutate.respell_augmented_unisons(staff[:]) assert abjad.lilypond(staff) == abjad.String.normalize( r""" \new Staff { c'4 r4 <ds' e'>4 g'4 <c' df'>4 r2. } """ )
22.210526
77
0.461822
454
3,798
3.740088
0.136564
0.111896
0.243816
0.170789
0.852179
0.759717
0.682568
0.644876
0.644876
0.618963
0
0.041347
0.382306
3,798
170
78
22.341176
0.682438
0
0
0.318182
0
0.030303
0.060606
0
0
0
0
0
0.151515
1
0.121212
false
0
0.030303
0
0.151515
0
0
0
0
null
0
1
1
1
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
a82f92442b378bef302b650c32fcb80e7caa419b
162
py
Python
tests/testimage.py
rayvnekieron/regionator
ed32f43c315465402b7e18c9f374e22dd89db64e
[ "Apache-2.0" ]
null
null
null
tests/testimage.py
rayvnekieron/regionator
ed32f43c315465402b7e18c9f374e22dd89db64e
[ "Apache-2.0" ]
null
null
null
tests/testimage.py
rayvnekieron/regionator
ed32f43c315465402b7e18c9f374e22dd89db64e
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python import kml.image image = kml.image.Image('image.gif') if image.OutputFormat() != 'GIF': print 'ERROR in kml.image.Image().OutputFormat'
20.25
49
0.703704
24
162
4.75
0.541667
0.350877
0.342105
0
0
0
0
0
0
0
0
0
0.117284
162
7
50
23.142857
0.797203
0.123457
0
0
0
0
0.361702
0.212766
0
0
0
0
0
0
null
null
0
0.25
null
null
0.25
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
5
b52b2c9998b8237bacfaafd009ba1da9a901f368
250
py
Python
utils/config/constants.py
brunofornazari/tcc
57990d68ca196b4da7791faab717d67cfe5497d3
[ "Unlicense" ]
null
null
null
utils/config/constants.py
brunofornazari/tcc
57990d68ca196b4da7791faab717d67cfe5497d3
[ "Unlicense" ]
null
null
null
utils/config/constants.py
brunofornazari/tcc
57990d68ca196b4da7791faab717d67cfe5497d3
[ "Unlicense" ]
null
null
null
""" constants.py Constants.py é responsável por armazenar qualquer varia´vel fixa utilizada no projeto. """ WITAI_KEY = 'ZBH7LRPZAISA7VIM7CNZDG3HJJPRTVTA' PYOWM_KEY = '48250140b550af15b1537f212d815013' NEWS_KEY = '281513d9cf5f444da3c010c87cccdd62'
25
86
0.824
25
250
8.16
0.84
0.107843
0
0
0
0
0
0
0
0
0
0.217778
0.1
250
9
87
27.777778
0.684444
0.4
0
0
0
0
0.676056
0.676056
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
b5503a9d7b5e3fd51c4b7c412c18e4465c98ea1d
382
py
Python
inicio.py
ccastillo-py/BBDD_peewee
7417b00b1b13a64d9a197f2fe833f3d7b8ff931c
[ "MIT" ]
null
null
null
inicio.py
ccastillo-py/BBDD_peewee
7417b00b1b13a64d9a197f2fe833f3d7b8ff931c
[ "MIT" ]
null
null
null
inicio.py
ccastillo-py/BBDD_peewee
7417b00b1b13a64d9a197f2fe833f3d7b8ff931c
[ "MIT" ]
null
null
null
''' ============================================================================================ Proyecto Final de Curso v2.0 Carlos Castillo Rodríguez ============================================================================================ ''' from herramientas.menu import menu from base_datos.creacion_tablas import inicializar_tablas inicializar_tablas() menu()
31.833333
93
0.395288
25
382
5.88
0.72
0.231293
0
0
0
0
0
0
0
0
0
0.005747
0.089005
382
12
94
31.833333
0.416667
0.628272
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
b571231ef4dc7a80bf306168de613f966ed3016c
32,171
py
Python
tests/selenium/guiops/selenium_api/selenium_api_experimental.py
gholms/eucaconsole
4629c961c90e3aae27e3a869a7f157bafeda6489
[ "BSD-2-Clause" ]
null
null
null
tests/selenium/guiops/selenium_api/selenium_api_experimental.py
gholms/eucaconsole
4629c961c90e3aae27e3a869a7f157bafeda6489
[ "BSD-2-Clause" ]
10
2018-03-28T17:25:16.000Z
2021-03-05T10:15:06.000Z
tests/selenium/guiops/selenium_api/selenium_api_experimental.py
gholms/eucaconsole
4629c961c90e3aae27e3a869a7f157bafeda6489
[ "BSD-2-Clause" ]
1
2019-06-07T20:43:45.000Z
2019-06-07T20:43:45.000Z
from selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import Select from selenium.common.exceptions import NoSuchElementException from selenium.common.exceptions import ElementNotVisibleException from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC import time class UICheckException(Exception): def __init__(self, message): raise Exception(message) class SeleniumApi_experimental(): def __init__(self, driver): """ :param driver: webdriver """ assert isinstance(driver, webdriver.Firefox) self.driver = driver retry = 400 timeout_to_locate_element_in_seconds = 30 timeout_to_determine_visibility_in_seconds = 5 timeout_to_determine_if_clickable_in_seconds = 20 def wait_for_element_present_by_id_experimental(self, element_id): """ Waits for element to be present on the page for timeout_to_locate_element_in_seconds Checks for presence every 500 milliseconds """ print "Executing wait_for_element_present_by_id("+element_id+")" print "Looking for element id = " + element_id + " in the DOM." print "Timeout is set to " + str(self.timeout_to_locate_element_in_seconds) + " seconds" # element_present = self.driver.find_element(By.ID, element_id) wait = WebDriverWait(self.driver, self.timeout_to_locate_element_in_seconds, 1, (NoSuchElementException)) def my_method(): try: self.driver.find_element(By.ID, element_id) return True except NoSuchElementException, nse: return False wait.until(my_method, message="Found element") return 0 def wait_for_element_present_by_id_experimental(self, element_id): """ Waits for element to be present on the page for timeout_to_locate_element_in_seconds Checks for presence every 500 milliseconds """ print "Executing wait_for_element_present_by_id("+element_id+")" print "Looking for element id = " + element_id + " in the DOM." print "Timeout is set to " + str(self.timeout_to_locate_element_in_seconds) + " seconds" # element_present = self.driver.find_element(By.ID, element_id) #wait = WebDriverWait(self.driver, self.timeout_to_locate_element_in_seconds, 1, (NoSuchElementException)) def my_method(self): try: self.driver.find_element_by_id(element_id) return True except NoSuchElementException, nse: return False WebDriverWait(self.driver, self.timeout_to_locate_element_in_seconds).until(my_method, "Found element") return 0 def wait_for_element_not_present_by_id_experimental(self, element_id): print "Executing wait_for_element_not_present_by_id("+element_id+")" print "Looking for element id = " + element_id + " in the DOM." print "Timeout is set to " + str(self.timeout_to_locate_element_in_seconds) + " seconds" wait = WebDriverWait(self.driver, self.timeout_to_locate_element_in_seconds) if wait.until_not(EC.presence_of_element_located((By.ID, element_id))): print "Verified element id = " + element_id + " not present." else: raise UICheckException return 0 def wait_for_visible_by_id_experimental(self, element_id): """ Waits for element to become visible for timeout_to_determine_visibility_in_seconds Checks for presence and visibility every 500 milliseconds :param element_id: """ print "Waiting for element id = " + element_id + " to become visible." if self.wait_for_element_present_by_id(element_id): wait = WebDriverWait(self.driver, self.timeout_to_determine_visibility_in_seconds) element = wait.until(EC.visibility_of_element_located((By.ID, element_id))) # def verify_element_present_by_id(self, element_id): # # """ # Tries to locate element by polling every 500ms until timeout_to_locate_element_in_seconds is reached. # :param element_id: # """ # element = WebDriverWait(self.driver, self.timeout_to_locate_element_in_seconds).until( # EC.presence_of_element_located((By.ID, element_id))) # print element def verify_element_visible_by_id_experimental(self, element_id): """ Checks for visibility of element by polling every 500ms until timeout_to_determine_visibility_in_seconds is reached. :param element_id: """ element = WebDriverWait(self.driver, self.timeout_to_determine_visibility_in_seconds).until( EC.visibility_of_element_located((By.ID, element_id)) ) print element # self.verify_element_present("ID", element_id) # self.set_implicit_wait(self.timeout_to_determine_visibility_in_seconds) # is_visible = False # try: # is_visible = self.driver.find_element_by_id(element_id).is_displayed() #except ElementNotVisibleException: # pass #finally: # if is_visible: # print "Element " + element_id + " is visible" # else: # print "Element " + element_id + " is not visible" def verify_element_clickable_by_id_experimental(self,element_id): """ Checks whether the element is clickable by polling every 500ms until timeout_to_determine_if_clickable_in_seconds is reached. :param element_id: """ element = WebDriverWait(self.driver, self.timeout_to_determine_if_clickable_in_seconds).until( EC.element_to_be_clickable((By.ID,element_id)) ) print element ################################################# def wait_for_visible(self, element_type, element): """ Checks visibility of an element. Keeps checking for visibility until max number of trials self.retry is reached. :param element_type: :param element: :return: :raise: """ self.check_if_element_present_by_type(element_type, element) is_visible = False for i in range(self.retry): print "Wait On Visiblity:: Trial: " + str(i) + " Element Type: " + element_type + ", Element: " + element if element_type is "LINK_TEXT": is_visible = self.driver.find_element_by_link_text(element).is_displayed() elif element_type is "ID": is_visible = self.driver.find_element_by_id(element).is_displayed() elif element_type is "CSS_SELECTOR": is_visible = self.driver.find_element_by_css_selector(element).is_displayed() elif element_type is "XPATH": is_visible = self.driver.find_element_by_xpath(element).is_displayed() elif element_type is "NAME": is_visible = self.driver.find_element_by_name(element).is_displayed() if is_visible is True: print "Element " + element + " is visible" break time.sleep(1) if is_visible is False: print "Element " + element + " is not visible!" return is_visible def click_on_visible(self, element_type, element): """ Waits for an element to become visible then clicks the element by its locator. :rtype : object :param element_type: :param element: """ self.wait_for_visible(element_type, element) if element_type is "LINK_TEXT": self.click_element_by_link_text(element) elif element_type is "ID": self.click_element_by_id(element) elif element_type is "CSS_SELECTOR": self.click_element_by_css_selector(element) elif element_type is "XPATH": self.click_element_by_xpath(element) elif element_type is "NAME": self.click_element_by_name(element) def verify_element_present(self, how, what): """ Finds element by locator. Takes as arguments element type and element locator. Will try locating element until implicit wait limit timeout_to_locate_element_in_seconds is reached. Returns NoSuchElementException if element is not found. :param how: :param what: """ print "Executing verify_element_present (" + str(how) + " , " + str(what) + " )" self.set_implicit_wait(self.timeout_to_locate_element_in_seconds) try: self.driver.find_element(by=how, value=what) except NoSuchElementException: return False return True def verify_element_present(self, how, what): """ Finds element by locator. Takes as arguments element type and element locator. Will try locating element until implicit wait limit timeout_to_locate_element_in_seconds is reached. Returns NoSuchElementException if element is not found. :param how: :param what: """ print "Executing verify_element_present (" + str(how) + " , " + str(what) + " )" self.set_implicit_wait(self.timeout_to_locate_element_in_seconds) try: self.driver.find_element(by=how, value=what) except NoSuchElementException: return False return True def wait_for_visible_by_id(self, element_id): """ Checks visibility of an element using its id. Keeps checking for visibility until max number of trials self.retry is reached. :param element_id: """ print "Executing wait_for_visible_by_id( "+element_id+" )" self.wait_for_element_present_by_id(element_id) is_visible = False for i in range(self.retry): is_visible = self.driver.find_element_by_id(element_id).is_displayed() if is_visible is True: print "Element " + element_id + " is visible" break time.sleep(1) if is_visible is False: print "Element " + element_id + " is not visible" def wait_for_visible_by_css_selector(self, css): """ Checks visibility of an element using its css. Keeps checking for visibility until max number of trials self.retry is reached. :param self: :param css: """ is_visible = False for i in range(self.retry): is_visible = self.driver.find_element_by_css_selector(css).is_displayed() if is_visible is True: print "Element " + css + " is visible" break time.sleep(1) if is_visible is False: print "Element " + css + " is not visible" def wait_for_visible_by_xpath(self, xpath): """ Checks visibility of an element using its xpath. Keeps checking for visibility until max number of trials self.retry is reached. :param xpath: """ is_visible = False for i in range(self.retry): is_visible = self.driver.find_element_by_xpath(xpath).is_displayed() if is_visible is True: print "Element " + xpath + " is visible" break time.sleep(1) if is_visible is False: print "Element " + xpath + " is not visible" def click_element_by_link_text(self, link_text): if self.check_if_element_present_by_type("LINK_TEXT", link_text) is not 0: raise UICheckException("Element by link text not present: " + link_text) if self.verify_element_visible_by_link_text(link_text) is not True: raise UICheckException("Element by link text not visible:" + link_text) print "Click: Element Type: LINK_TEXT, Element: " + link_text self.driver.find_element_by_link_text(link_text).click() time.sleep(1) return 0 def click_element_by_css_selector(self, css_selector): if self.check_if_element_present_by_type("CSS_SELECTOR", css_selector) is not 0: raise UICheckException("Element by css selector not present: " + css_selector) if self.verify_element_visible_by_css_selector(css_selector) is not True: raise UICheckException("Element by css selector not visible:" + css_selector) print "Click: Element Type: CSS_SELECTOR, Element: " + css_selector self.driver.find_element_by_css_selector(css_selector).click() time.sleep(1) return 0 def click_element_by_xpath(self, xpath): if self.check_if_element_present_by_type("XPATH", xpath) is not 0: raise UICheckException("Element by xpath not present: " + xpath) # if self.check_if_element_visible_by_type("XPATH", xpath) is not True: # raise UICheckException("Element by xpath not visible:" + xpath) print "Click: Element Type: XPATH, Element: " + xpath self.driver.find_element_by_xpath(xpath).click() time.sleep(1) return 0 def click_element_by_name(self, name): if self.check_if_element_present_by_type("NAME", name) is not 0: raise UICheckException("Element by name not present: " + name) if self.verify_text_not_present_by_name("NAME", name) is not True: raise UICheckException("Element by name not visible:" + name) print "Click: Element Type: NAME, Element: " + name self.driver.find_element_by_name(name).click() return 0 def click_on_visible_by_id(self, element_id): """ Waits for an element to become visible then clicks the element by its id. :param element_id: """ self.wait_for_visible_by_id(element_id) self.click_element_by_id(element_id) def click_on_visible_by_css_selector(self, css): """ Waits for an element to become visible then clicks the element by its css. :param css: """ self.wait_for_visible_by_css_selector(css) self.click_element_by_css_selector(css) def verify_element_visible_by_link_text(self, link_text): """ Checks if element is visible using link text. Does not retry. :param link_text: """ return self.driver.find_element_by_link_text(link_text).is_displayed() def verify_element_visible_by_id(self, element_id): """ Checks if element is visible using id. Does not retry. :param element_id: """ return self.driver.find_element_by_id(element_id).is_displayed() def verify_element_visible_by_css_selector(self, css): """ Checks if element is visible using css. Does not retry. :param css: """ return self.driver.find_element_by_css_selector(css).is_displayed() def verify_element_visible_by_xpath(self, xpath): """ Checks if element is visible using xpath. Does not retry. :param xpath: """ return self.driver.find_element_by_xpath(xpath).is_displayed() def verify_element_visible_by_name(self, name): """ Checks if element is visible using name. Does not retry. :param name: """ return self.driver.find_element_by_name(name).is_displayed() def send_keys_by_link_text(self, link_text, keys): if self.check_if_element_present_by_type("LINK_TEXT", link_text) is not 0: raise UICheckException("Element by link text not present:" + link_text) if self.verify_element_visible_by_link_text(link_text) is not True: raise UICheckException("Element by link text not visible:" + link_text) print "Set: Element Type: LINK_TEXT, Element: " + link_text + ", Keys: " + keys self.driver.find_element_by_link_text(link_text).clear() self.driver.find_element_by_link_text(link_text).send_keys(keys) return 0 def select_visible_text_by_name(self, name, visible_text): if self.check_if_element_present_by_type("NAME", name) is not 0: raise UICheckException("Element by name not present: " + name) if self.verify_element_visible_by_name(name) is not True: raise UICheckException("Element by name not visible:" + name) print "Select: Element Type: NAME, Element: " + name + ", Text: " + visible_text Select(self.driver.find_element_by_name(name)).select_by_visible_text(visible_text) return 0 def verify_element_not_present(self, element_type, element): """ Waits for the element to disappear from the page. Keeps checking until max number or retries self.retry is reached. :param element_type: :param element: """ this_element_type = "" if element_type is "LINK_TEXT": this_element_type = By.LINK_TEXT elif element_type is "ID": this_element_type = By.ID elif element_type is "CSS_SELECTOR": this_element_type = By.CSS_SELECTOR elif element_type is "XPATH": this_element_type = By.XPATH elif element_type is "NAME": this_element_type = By.NAME for i in range(1, self.retry, 1): print "Wait On Removal:: Trial: " + str(i) + " Element Type: " + element_type + ", Element: " + element try: self.driver.find_element(this_element_type, element) except NoSuchElementException: print print "Verified Removal:: Element type: " + element_type + ", Element: " + element return True def verify_text_not_present_by_css(self, css, text): """ Waits for the element to disappear from the page by css. Keeps checking until max number or retries self.retry is reached. :param css: :param text: """ print"Verifying that text displayed at " + css + " does not match " + text for i in range(1, self.retry, 1): displayed = self.store_visible_text_by_css_selector(css) print "Currently displayed at locator " + css + " is " + displayed if displayed != text: print "Verified " + self.store_visible_text_by_css_selector(css) + " does not match " + text return True else: print print "Trial " + str(i) + " :" def verify_text_not_present_by_id(self, element_id, text): """ Waits for the element to disappear from the page by id. Keeps checking until max number or retries self.retry is reached. :param element_id: :param text: """ print"Verifying that text displayed at " + element_id + " does not match " + text for i in range(1, self.retry, 1): if self.store_visible_text_by_id(element_id) != text: print "Verified " + self.store_visible_text_by_id(element_id) + " does not match " + text return True else: print print "Trial " + str(i) + " :" def verify_text_not_present_by_name(self, name, text): """ Waits for the element to disappear from the page by name. Keeps checking until max number or retries self.retry is reached. """ print"Verifying that text displayed at " + name + " does not match " + text for i in range(1, self.retry, 1): if self.store_visible_text_by_name(name) != text: print "Verified " + self.store_visible_text_by_name(name) + " does not match " + text return True else: print print "Trial " + str(i) + " :" def verify_text_not_present_by_xpath(self, xpath, text): """ Waits for the element to disappear from the page by xpath. Keeps checking until max number or retries self.retry is reached. :param xpath: :param text: """ print"Verifying that text displayed at " + xpath + " does not match " + text for i in range(1, self.retry, 1): text_on_page = self.store_visible_text_by_xpath(xpath) time.sleep(10) if text_on_page != text: print "Verified " + self.store_visible_text_by_xpath(xpath) + " does not match " + text return True else: print print "Found text: " + text_on_page + "( Waiting for " + text + " to disappear )" print print "Trial " + str(i) + " :" def verify_text_displayed_by_id(self, element_id, element_text): """ Will wait for element to become visible. Will check if text displayed at element_id matches element_text. Keeps checking until max number or retries self.retry is reached. :param element_id: :param element_text: """ #print("Verifying text " +element_text+" displayed at ID "+element_id) for i in range(self.retry): print "Wait On:: Trial: " + str(i) + " Verifying text " + element_text + " displayed at ID " + element_id self.wait_for_visible_by_id(element_id) try: if element_text == self.driver.find_element_by_id(element_id).text: print"Found text" displayed_text = self.driver.find_element_by_id(element_id).text print("Text displayed at ID " + element_id + " is " + displayed_text) break except: pass time.sleep(1) def verify_text_displayed_by_css(self, element_css, element_text): """ Will wait for element to become visible. Will check if text displayed at element_css matches element_text. Keeps checking until max number or retries self.retry is reached. :param element_css: :param element_text: """ #print("Verifying text " +element_text+" displayed at ID "+element_css) for i in range(self.retry): print "Wait On:: Trial: " + str(i) + " Verifying text " + element_text + " displayed at ID " + element_css self.wait_for_visible_by_css_selector(element_css) try: if element_text == self.driver.find_element_by_css_selector(element_css).text: print"Found text" break except: pass time.sleep(1) try: self.driver.find_element_by_css_selector(element_css).text except AssertionError as e: self.verificationErrors.append(str(e)) displayed_text = self.driver.find_element_by_css_selector(element_css).text print("Text displayed at ID " + element_css + " is " + displayed_text) def verify_text_displayed_by_xpath(self, xpath, element_text): """ Will wait for element to become visible. Will check if text displayed at xpath matches element_text. Keeps checking until max number or retries self.retry is reached. :param xpath: :param element_text: """ #print("Verifying text " +element_text+" displayed at xpath "+locator) displayed_text = None for i in range(self.retry): print "Wait On:: Trial: " + str(i) + " Verifying text " + element_text + " displayed at xpath " + xpath self.wait_for_visible_by_xpath(xpath) try: text_on_page = self.store_visible_text_by_xpath(xpath) if element_text == text_on_page: print"Found text" displayed_text = text_on_page break except: pass time.sleep(1) try: text_on_page = self.store_visible_text_by_xpath(xpath) if element_text == text_on_page: print "Found text" displayed_text = text_on_page except AssertionError as e: self.verificationErrors.append(str(e)) print("Text displayed at xpath " + xpath + " is " + displayed_text) def send_keys_by_css_selector(self, css_selector, keys): if self.check_if_element_present_by_type("CSS_SELECTOR", css_selector) is not 0: raise UICheckException("Element by css selector not present:" + css_selector) if self.verify_element_visible_by_css_selector(css_selector) is not True: raise UICheckException("Element by css selector not visible:" + css_selector) print "Set: Element Type: CSS_SELECTOR, Element: " + css_selector + ", Keys: " + keys self.driver.find_element_by_css_selector(css_selector).clear() self.driver.find_element_by_css_selector(css_selector).send_keys(keys) return 0 def send_keys_by_xpath(self, xpath, keys): if self.check_if_element_present_by_type("XPATH", xpath) is not 0: raise UICheckException("Element by xpath not found :" + xpath) # if self.check_if_element_visible_by_type("XPATH", xpath) is not True: # raise UICheckException("Element by xpath not visible:" + xpath) print "Set: Element Type: XPATH, Element: " + xpath + ", Keys: " + keys self.driver.find_element_by_xpath(xpath).clear() self.driver.find_element_by_xpath(xpath).send_keys(keys) return 0 def send_keys_by_name(self, name, keys): if self.check_if_element_present_by_type("NAME", name) is not 0: raise UICheckException("Element by name not found:" + name) if self.verify_element_visible_by_name(name) is not True: raise UICheckException("Element by name not visible:" + name) print "Set: Element Type: NAME, Element: " + name + ", Keys: " + keys self.driver.find_element_by_name(name).clear() return 0 def store_visible_text_by_link_text(self, link_text): if self.check_if_element_present_by_type("LINK_TEXT", link_text) is not 0: raise UICheckException("Element by link text not present:" + link_text) if self.verify_element_visible_by_link_text(link_text) is not True: raise UICheckException("Element by link text not visible:" + link_text) print "Get Text: Element Type: LINK_TEXT, Element: " + link_text return self.driver.find_element_by_link_text(link_text).text def store_visible_text_by_id(self, this_id): if self.check_if_element_present_by_type("ID", this_id) is not 0: raise UICheckException("Element by id not present:" + this_id) if self.verify_element_visible_by_id(this_id) is not True: raise UICheckException("Element by id not visible:" + this_id) print "Get Text: Element Type: ID, Element: " + this_id return self.driver.find_element_by_id(this_id).text def store_visible_text_by_css_selector(self, css_selector): if self.check_if_element_present_by_type("CSS_SELECTOR", css_selector) is not 0: raise UICheckException("Element by css selector not present:" + css_selector) if self.verify_element_visible_by_css_selector(css_selector) is not True: raise UICheckException("Element by css selector not visible:" + css_selector) print "Get Text: Element Type: CSS_SELECTOR, Element: " + css_selector return self.driver.find_element_by_css_selector(css_selector).text def store_visible_text_by_xpath(self, xpath): if self.check_if_element_present_by_type("XPATH", xpath) is not 0: raise UICheckException("Element by xpath not present: " + xpath) # if self.check_if_element_visible_by_type("XPATH", xpath) is not True: # raise UICheckException("Element by xpath not visible:" + xpath) print "Get Text: Element Type: XPATH, Element: " + xpath return self.driver.find_element_by_xpath(xpath).text def store_visible_text_by_name(self, name): if self.check_if_element_present_by_type("NAME", name) is not 0: raise UICheckException("Element by name not present: " + name) if self.verify_element_visible_by_name(name) is not True: raise UICheckException("Element by name not visible:" + name) print "Click: Element Type: NAME, Element: " + name return self.driver.find_element_by_name(name).text def select_visible_text_by_id(self, this_id, visible_text): if self.check_if_element_present_by_type("ID", this_id) is not 0: raise UICheckException("Element by id not present: " + this_id) if self.verify_element_visible_by_id(this_id) is not True: raise UICheckException("Element by id not visible:" + this_id) print "Select: Element Type: ID, Element: " + this_id + ", Text: " + visible_text Select(self.driver.find_element_by_id(this_id)).select_by_visible_text(visible_text) return 0 def select_visible_text_by_css_selector(self, css_selector, visible_text): if self.check_if_element_present_by_type("CSS_SELECTOR", css_selector) is not 0: raise UICheckException("Element by css selector not present: " + css_selector) if self.verify_element_visible_by_css_selector(css_selector) is not True: raise UICheckException("Element by css selector not visible:" + css_selector) print "Select: Element Type: CSS_SELECTOR, Element: " + css_selector + ", Text: " + visible_text Select(self.driver.find_element_by_css_selector(css_selector)).select_by_visible_text(visible_text) return 0 def select_visible_text_by_xpath(self, xpath, visible_text): if self.check_if_element_present_by_type("XPATH", xpath) is not 0: raise UICheckException("Element by xpath not present: " + xpath) # if self.check_if_element_visible_by_type("XPATH", xpath) is not True: # raise UICheckException("Element by xpath not visible:" + xpath) print "Select: Element Type: XPATH, Element: " + xpath + ", Text: " + visible_text Select(self.driver.find_element_by_xpath(xpath)).select_by_visible_text(visible_text) return 0 def check_if_element_present_by_type(self, element_type, element): """ Checks if element is present using element type and its locator. Keeps checking until max number of trials self.retry are exhausted. :param element_type: :param element: :return: :raise: """ this_element_type = "" if element_type is "LINK_TEXT": this_element_type = By.LINK_TEXT elif element_type is "ID": this_element_type = By.ID elif element_type is "CSS_SELECTOR": this_element_type = By.CSS_SELECTOR elif element_type is "XPATH": this_element_type = By.XPATH elif element_type is "NAME": this_element_type = By.NAME for i in range(self.retry): print "Wait On:: Trial: " + str(i) + " Element Type: " + element_type + ", Element: " + element try: if self.driver.find_element(this_element_type, element): break except: pass #raise UICheckException("Time out") time.sleep(1) # else: # self.fail("timed out after "+`self.retry`+" seconds") try: self.driver.find_element(this_element_type, element) except AssertionError as e: self.verificationErrors.append(str(e)) print "TEST FAILED::: Wait On:: Element Type: " + element_type + ", Element: " + element raise UICheckException("Failed to find element of type " + element_type + element + " present") print "Found:: Element type: " + element_type + ", Element: " + element return 0
43.770068
118
0.642691
4,111
32,171
4.750669
0.041596
0.0447
0.034409
0.051613
0.898976
0.857501
0.801229
0.706605
0.657399
0.586841
0
0.003606
0.275963
32,171
735
119
43.770068
0.834843
0.059215
0
0.559361
0
0
0.144788
0.006578
0
0
0
0
0.009132
0
null
null
0.009132
0.018265
null
null
0.175799
0
0
0
null
0
0
0
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
5
b578a225507c278c1a5a4d107ea30edcef6a7614
55
py
Python
tests/roots/test-ext-autodoc/bug2437/autodoc_dummy_foo.py
samdoran/sphinx
4c91c038b220d07bbdfe0c1680af42fe897f342c
[ "BSD-2-Clause" ]
4,973
2015-01-03T15:44:00.000Z
2022-03-31T03:11:51.000Z
tests/roots/test-ext-autodoc/bug2437/autodoc_dummy_foo.py
samdoran/sphinx
4c91c038b220d07bbdfe0c1680af42fe897f342c
[ "BSD-2-Clause" ]
7,850
2015-01-02T08:09:25.000Z
2022-03-31T18:57:40.000Z
tests/roots/test-ext-autodoc/bug2437/autodoc_dummy_foo.py
samdoran/sphinx
4c91c038b220d07bbdfe0c1680af42fe897f342c
[ "BSD-2-Clause" ]
2,179
2015-01-03T15:26:53.000Z
2022-03-31T12:22:44.000Z
class Foo(object): """Dummy class Foo.""" pass
13.75
26
0.563636
7
55
4.428571
0.714286
0.516129
0
0
0
0
0
0
0
0
0
0
0.254545
55
3
27
18.333333
0.756098
0.290909
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
b5a9fec11515638f6d3ebbceb4c5ec027915ce49
278
py
Python
debug_builds.py
nacitar/nx
93f533b05c125518839f3512d2380f1665893e51
[ "Apache-2.0" ]
null
null
null
debug_builds.py
nacitar/nx
93f533b05c125518839f3512d2380f1665893e51
[ "Apache-2.0" ]
null
null
null
debug_builds.py
nacitar/nx
93f533b05c125518839f3512d2380f1665893e51
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 import new_environment new_environment.main(['--debug', 'build/native']) new_environment.main(['--debug', '--clang', 'build/clang']) new_environment.main(['--debug', '--mingw32', 'build/mingw32']) new_environment.main(['--debug', '--avr', 'build/avr'])
30.888889
63
0.679856
34
278
5.411765
0.411765
0.380435
0.391304
0.5
0
0
0
0
0
0
0
0.019157
0.061151
278
8
64
34.75
0.685824
0.07554
0
0
0
0
0.367188
0
0
0
0
0
0
1
0
true
0
0.2
0
0.2
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
b5b331c52376f75d497e8fcc01710ae2ce70294e
153
py
Python
gui_automation/detector.py
marcosboggia/gui_automation
2adabfa71b00945ada04e619c1a36b124c1dda66
[ "MIT" ]
10
2019-06-23T02:34:24.000Z
2022-01-09T09:35:32.000Z
gui_automation/detector.py
marcosboggia/gui_automation
2adabfa71b00945ada04e619c1a36b124c1dda66
[ "MIT" ]
1
2020-10-18T04:32:08.000Z
2020-10-18T22:04:04.000Z
gui_automation/detector.py
marcosboggia/gui_automation
2adabfa71b00945ada04e619c1a36b124c1dda66
[ "MIT" ]
1
2021-07-23T06:09:29.000Z
2021-07-23T06:09:29.000Z
# Made by Marcos Boggia from abc import ABC, abstractmethod class Detector(ABC): @abstractmethod def detect(self, *args): pass
17
36
0.647059
18
153
5.5
0.833333
0.343434
0
0
0
0
0
0
0
0
0
0
0.281046
153
8
37
19.125
0.9
0.137255
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0.2
0.2
0
0.6
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
5
a91b9859bbffe2001f6b896a010de7edb3a7e80b
3,071
py
Python
phyluce/tests/test_alignment.py
faircloth-lab/phyluce
ae6801a7e749be2fa38513db9846046241d0fd7a
[ "BSD-3-Clause" ]
63
2015-03-16T15:10:17.000Z
2022-02-16T12:36:23.000Z
phyluce/tests/test_alignment.py
faircloth-lab/phyluce
ae6801a7e749be2fa38513db9846046241d0fd7a
[ "BSD-3-Clause" ]
253
2015-01-26T13:03:23.000Z
2022-03-15T19:03:05.000Z
phyluce/tests/test_alignment.py
faircloth-lab/phyluce
ae6801a7e749be2fa38513db9846046241d0fd7a
[ "BSD-3-Clause" ]
45
2015-01-26T13:09:50.000Z
2021-05-24T04:20:30.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- """ (c) 2021 Brant Faircloth || http://faircloth-lab.org/ All rights reserved. This code is distributed under a 3-clause BSD license. Please see LICENSE.txt for more information. Created on 2021-02-13 T16:12:15-06:00 """ import os import re import glob import shutil import subprocess import pytest from Bio import AlignIO import pdb @pytest.fixture(scope="module") def o_dir(request): directory = os.path.join( request.config.rootdir, "phyluce", "tests", "test-observed" ) os.mkdir(directory) def clean(): shutil.rmtree(directory) request.addfinalizer(clean) return directory @pytest.fixture(scope="module") def e_dir(request): directory = os.path.join( request.config.rootdir, "phyluce", "tests", "test-expected" ) return directory def test_seqcap_align_mafft_untrim(o_dir, e_dir, request): program = "bin/align/phyluce_align_seqcap_align" output = os.path.join(o_dir, "mafft") cmd = [ os.path.join(request.config.rootdir, program), "--input", os.path.join(e_dir, "taxon-set.incomplete.fasta"), "--output", output, "--taxa", "4", "--aligner", "mafft", "--output-format", "nexus", "--no-trim", "--cores", "1", ] proc = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) stdout, stderr = proc.communicate() assert proc.returncode == 0, print("""{}""".format(stderr.decode("utf-8"))) output_files = glob.glob(os.path.join(output, "*")) assert output_files, "There are no output files" for output_file in output_files: name = os.path.basename(output_file) expected_file = os.path.join(e_dir, "mafft-no-trim", name) observed = open(output_file).read() expected = open(expected_file).read() assert observed == expected def test_seqcap_align_muscle_untrim(o_dir, e_dir, request): program = "bin/align/phyluce_align_seqcap_align" output = os.path.join(o_dir, "muscle") cmd = [ os.path.join(request.config.rootdir, program), "--input", os.path.join(e_dir, "taxon-set.incomplete.fasta"), "--output", output, "--taxa", "4", "--aligner", "muscle", "--output-format", "nexus", "--no-trim", "--cores", "1", ] proc = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) stdout, stderr = proc.communicate() assert proc.returncode == 0, print("""{}""".format(stderr.decode("utf-8"))) output_files = glob.glob(os.path.join(output, "*")) assert output_files, "There are no output files" for output_file in output_files: name = os.path.basename(output_file) expected_file = os.path.join(e_dir, "muscle-no-trim", name) observed = open(output_file).read() expected = open(expected_file).read() assert observed == expected
27.176991
79
0.614458
380
3,071
4.855263
0.310526
0.045528
0.065041
0.036856
0.739295
0.710027
0.710027
0.710027
0.710027
0.710027
0
0.013664
0.237382
3,071
112
80
27.419643
0.774125
0.083686
0
0.659091
0
0
0.155793
0.044207
0
0
0
0
0.068182
1
0.056818
false
0
0.090909
0
0.170455
0.022727
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
a9870740497b87b334e1d726e4b4aba668ee40f3
613
py
Python
aspect/sqlalchemy/languages/aql/Command.py
jorgeromanespino/PyAspect
5aef25a2951989a57294bd86779845c1ef331a15
[ "Apache-2.0" ]
1
2019-12-12T11:11:21.000Z
2019-12-12T11:11:21.000Z
aspect/sqlalchemy/languages/aql/Command.py
jorgeromanespino/PyAspect
5aef25a2951989a57294bd86779845c1ef331a15
[ "Apache-2.0" ]
null
null
null
aspect/sqlalchemy/languages/aql/Command.py
jorgeromanespino/PyAspect
5aef25a2951989a57294bd86779845c1ef331a15
[ "Apache-2.0" ]
1
2019-12-03T08:54:20.000Z
2019-12-03T08:54:20.000Z
# from aspect.core.languages.aql.translators.Command import Command as CoreCommand # class Command(CoreCommand): # def __init__(self, value=None, result=None, symbol_table=None, children=[], parent={}, interpreter=None): super().__init__(value, result, symbol_table, children, parent, interpreter) # TODO def push_context(self, context): raise NotImplementedError # TODO def pop_context(self): raise NotImplementedError # TODO def peek_context(self, level): raise NotImplementedError # TODO def execute(self): return self.result
25.541667
109
0.683524
67
613
6.059701
0.492537
0.068966
0.206897
0.229064
0
0
0
0
0
0
0
0
0.220228
613
23
110
26.652174
0.849372
0.030995
0
0.25
0
0
0
0
0
0
0
0.043478
0
1
0.416667
false
0
0.083333
0.083333
0.666667
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
0
1
0
0
5
a9951a03f5ac16599473ed5824ebfbdc569fbd1c
42
py
Python
wikipediabase/__main__.py
fakedrake/WikipediaBase
ab5aa92786bddcd7942ad3e3f1f4e433575ba3fb
[ "Apache-2.0" ]
1
2017-11-26T17:57:59.000Z
2017-11-26T17:57:59.000Z
wikipediabase/__main__.py
fakedrake/WikipediaBase
ab5aa92786bddcd7942ad3e3f1f4e433575ba3fb
[ "Apache-2.0" ]
34
2015-03-23T10:28:59.000Z
2021-12-13T20:16:48.000Z
wikipediabase/__main__.py
fakedrake/WikipediaBase
ab5aa92786bddcd7942ad3e3f1f4e433575ba3fb
[ "Apache-2.0" ]
2
2015-05-17T00:56:45.000Z
2015-06-27T22:10:59.000Z
from wikipediabase.cli import main main()
14
34
0.809524
6
42
5.666667
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.119048
42
2
35
21
0.918919
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
8d28d792cc85c8a157a05b595a2850130c95b662
95
py
Python
ha_config/custom_components/lan_smartthings/light.py
bogusfocused/ha_lan_smartthings
81ae2f1305e75fa418afc454095167610724ab37
[ "MIT" ]
2
2021-12-22T02:16:48.000Z
2022-01-26T07:02:03.000Z
ha_config/custom_components/lan_smartthings/light.py
bogusfocused/ha_lan_smartthings
81ae2f1305e75fa418afc454095167610724ab37
[ "MIT" ]
null
null
null
ha_config/custom_components/lan_smartthings/light.py
bogusfocused/ha_lan_smartthings
81ae2f1305e75fa418afc454095167610724ab37
[ "MIT" ]
null
null
null
"""Support for lights through the SmartThings cloud API.""" from .smartthings.light import *
31.666667
59
0.747368
12
95
5.916667
0.916667
0
0
0
0
0
0
0
0
0
0
0
0.147368
95
2
60
47.5
0.876543
0.557895
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
8d37d4dfb991d516d9bdb65c8b809327633e532a
4,984
py
Python
Discord Bots/Robo Wizzz/cogs/mod.py
LUNA761/Code-Archive
c455ca1b4faa230fbbd86c4222c1589ddb0df964
[ "MIT" ]
1
2021-10-04T08:30:04.000Z
2021-10-04T08:30:04.000Z
Discord Bots/Robo Wizzz/cogs/mod.py
LUNA761/Code-Archive
c455ca1b4faa230fbbd86c4222c1589ddb0df964
[ "MIT" ]
null
null
null
Discord Bots/Robo Wizzz/cogs/mod.py
LUNA761/Code-Archive
c455ca1b4faa230fbbd86c4222c1589ddb0df964
[ "MIT" ]
2
2022-01-06T19:07:19.000Z
2022-01-07T14:52:15.000Z
import discord, time, asyncio, os, random, json from discord.ext import commands, tasks from discord.ext.commands import has_permissions, cooldown, MissingPermissions, check from discord.utils import get from termcolor import colored class App(commands.Cog): def __init__(self, client): self.client = client print("Moderation "+colored('Running', 'green')) @commands.command(aliases=['clear']) @has_permissions(manage_messages=True) async def purge(self, ctx, limit : int = 2): try: x = int(limit) except: await ctx.send("Thats not a number!") return await ctx.channel.purge(limit = limit) msg = await ctx.send(f"Cleared by {ctx.author.mention}") await asyncio.sleep(2) await msg.delete() @purge.error async def purge_error(self, ctx, error): if isinstance(error, commands.MissingPermissions): embed=discord.Embed(title="This command can be used only by members who moderate the server.", color=0xff0000) embed.set_author(name="Hey! You are not allowed to do that!") embed.set_thumbnail(url='https://images-ext-2.discordapp.net/external/N8LpKSOO52iNCGjQQ6yJOqlW89RJkzSbDSNjOOa_mVg/%3Fwidth%3D427%26height%3D427/https/media.discordapp.net/attachments/798968646336249928/816351655812792410/kKMxMuzufRPMRxioYgWq_XylZQ1cP0nJ89Za3Bo0VxNGypkybermjXlzOhjW-gCEuT675G4cz_CBDo5Db7Qow5sUuR70FUlXVwag.png?width=300&height=300') embed.set_footer(text="Robo Wizzz Moderation.") await ctx.send(embed=embed) else: raise error @commands.command(aliases=['k']) @has_permissions(kick_members=True) async def kick(self, ctx, member : discord.Member, *, reason = "No reason has been provided by the Moderator."): name = member.name await member.kick(reason=reason) embed=discord.Embed(title=f"Reason: {reason}", description="ID: 1828392010192", color=0x24a800) embed.set_author(name=f"{name} has been kicked from the server.") embed.set_thumbnail(url='https://media.discordapp.net/attachments/811227367635681321/816702594361458708/woman-s-boot-emoji-clipart-sm.png') embed.set_footer(text="Robo Wizzz Moderation.") await ctx.send(embed=embed) @kick.error async def kick_error(self, ctx, error): if isinstance(error, commands.MissingPermissions): embed=discord.Embed(title="This command can be used only by members who moderate the server.", color=0xff0000) embed.set_author(name="Hey! You are not allowed to do that!") embed.set_thumbnail(url='https://images-ext-2.discordapp.net/external/N8LpKSOO52iNCGjQQ6yJOqlW89RJkzSbDSNjOOa_mVg/%3Fwidth%3D427%26height%3D427/https/media.discordapp.net/attachments/798968646336249928/816351655812792410/kKMxMuzufRPMRxioYgWq_XylZQ1cP0nJ89Za3Bo0VxNGypkybermjXlzOhjW-gCEuT675G4cz_CBDo5Db7Qow5sUuR70FUlXVwag.png?width=300&height=300') embed.set_footer(text="Robo Wizzz Moderation.") await ctx.send(embed=embed) elif isinstance(error, commands.CommandInvokeError): await ctx.send(f"❌ Hey, {ctx.author.name} I can't cannot kick this member!") else: raise error @commands.command(aliases=['b']) @has_permissions(ban_members=True) async def ban(self, ctx, member : discord.Member, *, reason = "No reason has been provided by the Moderator."): name = member.name await member.ban(reason=reason) embed=discord.Embed(title=f"Reason: {reason}", description="ID: 1828392010192", color=0x24a800) embed.set_author(name=f"{name} has been banned from the server.") embed.set_thumbnail(url='https://media.discordapp.net/attachments/811227367635681321/816702594361458708/woman-s-boot-emoji-clipart-sm.png') embed.set_footer(text="Robo Wizzz Moderation.") await ctx.send(embed=embed) @ban.error async def ban_error(self, ctx, error): if isinstance(error, commands.MissingPermissions): embed=discord.Embed(title="This command can be used only by members who moderate the server.", color=0xff0000) embed.set_author(name="Hey! You are not allowed to do that!") embed.set_thumbnail(url='https://images-ext-2.discordapp.net/external/N8LpKSOO52iNCGjQQ6yJOqlW89RJkzSbDSNjOOa_mVg/%3Fwidth%3D427%26height%3D427/https/media.discordapp.net/attachments/798968646336249928/816351655812792410/kKMxMuzufRPMRxioYgWq_XylZQ1cP0nJ89Za3Bo0VxNGypkybermjXlzOhjW-gCEuT675G4cz_CBDo5Db7Qow5sUuR70FUlXVwag.png?width=300&height=300') embed.set_footer(text="Robo Wizzz Moderation.") await ctx.send(embed=embed) elif isinstance(error, commands.CommandInvokeError): await ctx.send(f"❌ Hey, {ctx.author.mention} I can't cannot ban this member!") else: raise error def setup(client): client.add_cog(App(client))
56.636364
360
0.708066
603
4,984
5.791045
0.248756
0.034364
0.030928
0.031501
0.78236
0.772623
0.752005
0.752005
0.752005
0.752005
0
0.086486
0.183387
4,984
87
361
57.287356
0.771007
0
0
0.467532
0
0.064935
0.410112
0
0
0
0.008026
0
0
1
0.025974
false
0
0.064935
0
0.116883
0.012987
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
8d63ca0a2d02b18eaa751f641fe0104af96609fc
132
py
Python
cold_ray_norilsk/cold_ray_norilsk/src/game_model/entity/person/base_person.py
DeadSonger/cold_ray_norilsk
ad6d5df4eea489b71c8224c7d3cda6c0f0b34eac
[ "CC0-1.0" ]
null
null
null
cold_ray_norilsk/cold_ray_norilsk/src/game_model/entity/person/base_person.py
DeadSonger/cold_ray_norilsk
ad6d5df4eea489b71c8224c7d3cda6c0f0b34eac
[ "CC0-1.0" ]
2
2021-04-20T09:11:43.000Z
2021-04-24T22:49:49.000Z
cold_ray_norilsk/cold_ray_norilsk/src/game_model/entity/person/base_person.py
DeadSonger/cold_ray_norilsk
ad6d5df4eea489b71c8224c7d3cda6c0f0b34eac
[ "CC0-1.0" ]
null
null
null
from abc import ABC from cold_ray_norilsk.src.game_model.entity.base_entity import BEntity class BPerson(BEntity, ABC): pass
16.5
70
0.795455
21
132
4.809524
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.143939
132
7
71
18.857143
0.893805
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.25
0.5
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
0
0
0
5
8d750ae281e98f737111cd5c815de36b750c2162
146
py
Python
ontario/items.py
zanadaniel/ontario-scraper
c588fcbd10e99254988eec038a32ab22c00b5882
[ "MIT" ]
null
null
null
ontario/items.py
zanadaniel/ontario-scraper
c588fcbd10e99254988eec038a32ab22c00b5882
[ "MIT" ]
null
null
null
ontario/items.py
zanadaniel/ontario-scraper
c588fcbd10e99254988eec038a32ab22c00b5882
[ "MIT" ]
null
null
null
from scrapy import Item from scrapy import Field class Member(Item): member_id = Field() member_name = Field() member_trade = Field()
20.857143
26
0.705479
20
146
5
0.5
0.2
0.32
0
0
0
0
0
0
0
0
0
0.212329
146
7
26
20.857143
0.869565
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
8d9fd2fb5d037a7c718660ae2a4a213a616f7370
108
py
Python
internal/endtoend/testdata/exec_rows/python_postgresql/python/models.py
ShivamSarodia/sqlc
194065e223a53d19219efb290a53b45fcd036a6b
[ "MIT" ]
5,153
2019-08-19T19:24:06.000Z
2022-03-31T22:26:53.000Z
internal/endtoend/testdata/exec_rows/python_postgresql/python/models.py
ShivamSarodia/sqlc
194065e223a53d19219efb290a53b45fcd036a6b
[ "MIT" ]
871
2019-08-19T02:20:57.000Z
2022-03-31T21:03:04.000Z
internal/endtoend/testdata/exec_rows/python_postgresql/python/models.py
ShivamSarodia/sqlc
194065e223a53d19219efb290a53b45fcd036a6b
[ "MIT" ]
377
2019-09-04T07:27:09.000Z
2022-03-31T21:54:45.000Z
# Code generated by sqlc. DO NOT EDIT. import dataclasses @dataclasses.dataclass() class Bar: id: int
13.5
38
0.722222
15
108
5.2
0.933333
0
0
0
0
0
0
0
0
0
0
0
0.194444
108
7
39
15.428571
0.896552
0.333333
0
0
1
0
0
0
0
0
0
0
0
1
0
true
0
0.25
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
0
0
1
0
0
5
a5e463d0cec6edf5639450a8eb67193837568a3c
78
py
Python
dlib-scikit/dlib_scikit_test.py
datmo/docker-files
adf99782fed8cd082a456913a27f7a31dd824e73
[ "MIT" ]
20
2017-08-12T23:31:45.000Z
2022-03-14T20:03:19.000Z
dlib-scikit/dlib_scikit_test.py
asuprem/docker-files
adf99782fed8cd082a456913a27f7a31dd824e73
[ "MIT" ]
null
null
null
dlib-scikit/dlib_scikit_test.py
asuprem/docker-files
adf99782fed8cd082a456913a27f7a31dd824e73
[ "MIT" ]
10
2018-11-30T03:05:49.000Z
2021-01-13T07:20:34.000Z
import dlib import sklearn print(dlib.__version__) print(sklearn.__version__)
15.6
26
0.846154
10
78
5.8
0.5
0
0
0
0
0
0
0
0
0
0
0
0.076923
78
5
26
15.6
0.805556
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
a5efd3fab26876d131f780e86c282c34862153f2
173
py
Python
python/testData/refactoring/rename/renameLocalWithComprehension_after.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/refactoring/rename/renameLocalWithComprehension_after.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/refactoring/rename/renameLocalWithComprehension_after.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
parameter_lists_copy = [bar for bar in parameter_lists] for bar in parameter_lists_copy: if param_index >= len(bar.GetParameters()): parameter_lists.remove(bar)
34.6
55
0.751445
25
173
4.92
0.48
0.455285
0.292683
0.276423
0.357724
0
0
0
0
0
0
0
0.16185
173
4
56
43.25
0.848276
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
93b6a9683ec441f726af767163bf866632ddbf4e
5,099
py
Python
TESTS.py
LRHammond/pv4dsrl
7053ea392f9ddbf3672a0ac348d61abe6b7264d9
[ "MIT" ]
null
null
null
TESTS.py
LRHammond/pv4dsrl
7053ea392f9ddbf3672a0ac348d61abe6b7264d9
[ "MIT" ]
2
2020-03-24T16:31:16.000Z
2020-03-31T00:56:57.000Z
TESTS.py
LRHammond/pv4dsrl
7053ea392f9ddbf3672a0ac348d61abe6b7264d9
[ "MIT" ]
null
null
null
from pydc import HYPE hype = HYPE("models/perfect.pl", 100) result = hype.plan_step( "[observation(x_pos(obj0)) ~= 0, observation(y_pos(obj0)) ~= 0, observation(x_size(obj0)) ~= small, observation(y_size(obj0)) ~= small, observation(colour(obj0)) ~= wall, observation(shape(obj0)) ~= square, observation(nothing(obj0)) ~= no, observation(x_pos(obj1)) ~= 1, observation(y_pos(obj1)) ~= 0, observation(x_size(obj1)) ~= small, observation(y_size(obj1)) ~= small, observation(colour(obj1)) ~= wall, observation(shape(obj1)) ~= square, observation(nothing(obj1)) ~= no, observation(x_pos(obj2)) ~= 2, observation(y_pos(obj2)) ~= 0, observation(x_size(obj2)) ~= small, observation(y_size(obj2)) ~= small, observation(colour(obj2)) ~= wall, observation(shape(obj2)) ~= square, observation(nothing(obj2)) ~= no, observation(x_pos(obj3)) ~= 3, observation(y_pos(obj3)) ~= 0, observation(x_size(obj3)) ~= small, observation(y_size(obj3)) ~= small, observation(colour(obj3)) ~= wall, observation(shape(obj3)) ~= square, observation(nothing(obj3)) ~= no, observation(x_pos(obj4)) ~= 4, observation(y_pos(obj4)) ~= 0, observation(x_size(obj4)) ~= small, observation(y_size(obj4)) ~= small, observation(colour(obj4)) ~= wall, observation(shape(obj4)) ~= square, observation(nothing(obj4)) ~= no, observation(x_pos(obj5)) ~= 0, observation(y_pos(obj5)) ~= 1, observation(x_size(obj5)) ~= small, observation(y_size(obj5)) ~= small, observation(colour(obj5)) ~= wall, observation(shape(obj5)) ~= square, observation(nothing(obj5)) ~= no, observation(x_pos(obj6)) ~= 4, observation(y_pos(obj6)) ~= 1, observation(x_size(obj6)) ~= small, observation(y_size(obj6)) ~= small, observation(colour(obj6)) ~= wall, observation(shape(obj6)) ~= square, observation(nothing(obj6)) ~= no, observation(x_pos(obj7)) ~= 0, observation(y_pos(obj7)) ~= 2, observation(x_size(obj7)) ~= small, observation(y_size(obj7)) ~= small, observation(colour(obj7)) ~= wall, observation(shape(obj7)) ~= square, observation(nothing(obj7)) ~= no, observation(x_pos(obj8)) ~= 3, observation(y_pos(obj8)) ~= 2, observation(x_size(obj8)) ~= small, observation(y_size(obj8)) ~= small, observation(colour(obj8)) ~= wall, observation(shape(obj8)) ~= square, observation(nothing(obj8)) ~= no, observation(x_pos(obj9)) ~= 4, observation(y_pos(obj9)) ~= 2, observation(x_size(obj9)) ~= small, observation(y_size(obj9)) ~= small, observation(colour(obj9)) ~= wall, observation(shape(obj9)) ~= square, observation(nothing(obj9)) ~= no, observation(x_pos(obj10)) ~= 0, observation(y_pos(obj10)) ~= 3, observation(x_size(obj10)) ~= small, observation(y_size(obj10)) ~= small, observation(colour(obj10)) ~= wall, observation(shape(obj10)) ~= square, observation(nothing(obj10)) ~= no, observation(x_pos(obj11)) ~= 4, observation(y_pos(obj11)) ~= 3, observation(x_size(obj11)) ~= small, observation(y_size(obj11)) ~= small, observation(colour(obj11)) ~= wall, observation(shape(obj11)) ~= square, observation(nothing(obj11)) ~= no, observation(x_pos(obj12)) ~= 0, observation(y_pos(obj12)) ~= 4, observation(x_size(obj12)) ~= small, observation(y_size(obj12)) ~= small, observation(colour(obj12)) ~= wall, observation(shape(obj12)) ~= square, observation(nothing(obj12)) ~= no, observation(x_pos(obj13)) ~= 1, observation(y_pos(obj13)) ~= 4, observation(x_size(obj13)) ~= small, observation(y_size(obj13)) ~= small, observation(colour(obj13)) ~= wall, observation(shape(obj13)) ~= square, observation(nothing(obj13)) ~= no, observation(x_pos(obj14)) ~= 2, observation(y_pos(obj14)) ~= 4, observation(x_size(obj14)) ~= small, observation(y_size(obj14)) ~= small, observation(colour(obj14)) ~= wall, observation(shape(obj14)) ~= square, observation(nothing(obj14)) ~= no, observation(x_pos(obj15)) ~= 3, observation(y_pos(obj15)) ~= 4, observation(x_size(obj15)) ~= small, observation(y_size(obj15)) ~= small, observation(colour(obj15)) ~= wall, observation(shape(obj15)) ~= square, observation(nothing(obj15)) ~= no, observation(x_pos(obj16)) ~= 4, observation(y_pos(obj16)) ~= 4, observation(x_size(obj16)) ~= small, observation(y_size(obj16)) ~= small, observation(colour(obj16)) ~= wall, observation(shape(obj16)) ~= square, observation(nothing(obj16)) ~= no, observation(x_pos(obj17)) ~= 1, observation(y_pos(obj17)) ~= 3, observation(x_size(obj17)) ~= small, observation(y_size(obj17)) ~= small, observation(colour(obj17)) ~= hole, observation(shape(obj17)) ~= square, observation(nothing(obj17)) ~= no, observation(x_pos(obj18)) ~= 3, observation(y_pos(obj18)) ~= 3, observation(x_size(obj18)) ~= small, observation(y_size(obj18)) ~= small, observation(colour(obj18)) ~= goal, observation(shape(obj18)) ~= square, observation(nothing(obj18)) ~= no, observation(x_pos(obj19)) ~= 1, observation(y_pos(obj19)) ~= 2, observation(x_size(obj19)) ~= small, observation(y_size(obj19)) ~= small, observation(colour(obj19)) ~= agent, observation(shape(obj19)) ~= square, observation(nothing(obj19)) ~= no]", 100, max_horizon=10, used_horizon=10, use_abstraction=False ) best_action = result["best_action"] stop = result["stop"] print best_action
364.214286
4,858
0.710335
675
5,099
5.237037
0.091852
0.135785
0.084866
0.118812
0
0
0
0
0
0
0
0.056107
0.091194
5,099
13
4,859
392.230769
0.706733
0
0
0
0
0.083333
0.957639
0.692489
0
0
0
0
0
0
null
null
0
0.083333
null
null
0.083333
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
1
1
null
0
0
0
0
1
0
0
0
0
0
0
0
0
5
93cb6e16c9bfce4696850a9b7f4f766113cf7cfa
40
py
Python
nonemptystr/exceptions.py
nekonoshiri/nonemptystr
0a275c96391b1cc4cada19e5d9354b61e7b7a625
[ "MIT" ]
null
null
null
nonemptystr/exceptions.py
nekonoshiri/nonemptystr
0a275c96391b1cc4cada19e5d9354b61e7b7a625
[ "MIT" ]
2
2021-10-06T07:59:44.000Z
2021-10-06T08:39:27.000Z
nonemptystr/exceptions.py
nekonoshiri/nonemptystr
0a275c96391b1cc4cada19e5d9354b61e7b7a625
[ "MIT" ]
null
null
null
class EmptyString(ValueError): pass
13.333333
30
0.75
4
40
7.5
1
0
0
0
0
0
0
0
0
0
0
0
0.175
40
2
31
20
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
93d7356035f886f153932085ef714c53cf645283
262
py
Python
core/admin.py
remiberthoz/teachers-rubrics
355a282e8d4e91e343eef28aaa9b1214b03d3399
[ "MIT" ]
null
null
null
core/admin.py
remiberthoz/teachers-rubrics
355a282e8d4e91e343eef28aaa9b1214b03d3399
[ "MIT" ]
null
null
null
core/admin.py
remiberthoz/teachers-rubrics
355a282e8d4e91e343eef28aaa9b1214b03d3399
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import Exam, GradeItem, GradedItemInReport, Report, Section admin.site.register(Exam) admin.site.register(Section) admin.site.register(GradeItem) admin.site.register(Report) admin.site.register(GradedItemInReport)
26.2
72
0.828244
33
262
6.575758
0.393939
0.207373
0.391705
0.221198
0
0
0
0
0
0
0
0
0.072519
262
9
73
29.111111
0.893004
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.285714
0
0.285714
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
93e6404afdfd81ed3ef4a714c26fb94e92aabc10
1,160
py
Python
tests/kyu_5_tests/test_first_non_repeating_letter.py
the-zebulan/CodeWars
1eafd1247d60955a5dfb63e4882e8ce86019f43a
[ "MIT" ]
40
2016-03-09T12:26:20.000Z
2022-03-23T08:44:51.000Z
tests/kyu_5_tests/test_first_non_repeating_letter.py
akalynych/CodeWars
1eafd1247d60955a5dfb63e4882e8ce86019f43a
[ "MIT" ]
null
null
null
tests/kyu_5_tests/test_first_non_repeating_letter.py
akalynych/CodeWars
1eafd1247d60955a5dfb63e4882e8ce86019f43a
[ "MIT" ]
36
2016-11-07T19:59:58.000Z
2022-03-31T11:18:27.000Z
import unittest from katas.kyu_5.first_non_repeating_letter import first_non_repeating_letter class FirstNonRepeatingLetterTestCase(unittest.TestCase): def test_equal_1(self): self.assertEqual(first_non_repeating_letter('a'), 'a') def test_equal_2(self): self.assertEqual(first_non_repeating_letter('stress'), 't') def test_equal_3(self): self.assertEqual(first_non_repeating_letter('moonmen'), 'e') def test_equal_4(self): self.assertEqual(first_non_repeating_letter(''), '') def test_equal_5(self): self.assertEqual(first_non_repeating_letter('abba'), '') def test_equal_6(self): self.assertEqual(first_non_repeating_letter('aa'), '') def test_equal_7(self): self.assertEqual(first_non_repeating_letter('~><#~><'), '#') def test_equal_8(self): self.assertEqual(first_non_repeating_letter('hello world, eh?'), 'w') def test_equal_9(self): self.assertEqual(first_non_repeating_letter('sTreSS'), 'T') def test_equal_10(self): self.assertEqual(first_non_repeating_letter( "Go hang a salami, I'm a lasagna hog!"), ',')
31.351351
77
0.696552
152
1,160
4.940789
0.296053
0.12783
0.271638
0.36751
0.641811
0.641811
0.641811
0.306258
0.306258
0.306258
0
0.012526
0.174138
1,160
36
78
32.222222
0.771399
0
0
0
0
0
0.07931
0
0
0
0
0
0.416667
1
0.416667
false
0
0.083333
0
0.541667
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
0
0
0
0
1
0
0
5
9e2596f4630fe515b69ec4571a687fcdc8183237
5,688
py
Python
thumt/optimizer.py
vicapple22/THUMT
c852da224e4c99f5312ba6e0b9a68c2575923fb7
[ "BSD-3-Clause" ]
1
2021-04-21T05:30:06.000Z
2021-04-21T05:30:06.000Z
thumt/optimizer.py
liyc7711/THUMT
c852da224e4c99f5312ba6e0b9a68c2575923fb7
[ "BSD-3-Clause" ]
null
null
null
thumt/optimizer.py
liyc7711/THUMT
c852da224e4c99f5312ba6e0b9a68c2575923fb7
[ "BSD-3-Clause" ]
1
2017-08-31T08:25:15.000Z
2017-08-31T08:25:15.000Z
import numpy import theano import theano.tensor as tensor from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams import tools class optimizer(object): ''' The parent class of SGD algorithms ''' def __init__(self): pass class adadelta(optimizer): def __init__(self, config, params): self.config = config self.params = params self.gc = [tools.init_zeros(p.get_value().shape) for p in params] self.g2 = [tools.init_zeros(p.get_value().shape) for p in params] self.u2 = [tools.init_zeros(p.get_value().shape) for p in params] def build(self, cost, inp): grads_noclip = tensor.grad(cost, self.params) grads, grad_norm = tools.clip(grads_noclip, self.config['clip'], params = self.params) gc_up = [(gc, gr) for gc, gr in zip(self.gc, grads)] g2_up = [(g2, self.config['rho'] * g2 + (1. - self.config['rho']) * (gr ** 2.)) for g2, gr in zip(self.g2, grads)] #noclip = theano.function(inp, [cost]+grads_noclip) #noupdate_grads = theano.function(inp, [cost, grad_norm]) update_grads = theano.function(inp, [cost, grad_norm], updates = gc_up + g2_up) delta = [tensor.sqrt(u2 + self.config['epsilon']) / tensor.sqrt(g2 + self.config['epsilon']) * gr for g2, u2, gr in zip(self.g2, self.u2, self.gc)] u2_up = [(u2, self.config['rho'] * u2 + (1. - self.config['rho']) * (d ** 2.)) for u2, d in zip(self.u2, delta)] param_up = [(p, p - d) for p, d in zip(self.params, delta)] #update_params = theano.function([], [], updates=param_up+u2_up) update_params = theano.function([], [], updates = param_up + u2_up) return update_grads, update_params class adam(optimizer): def __init__(self, config, params): self.config = config self.params = params self.gc = [tools.init_zeros(p.get_value().shape) for p in params] self.m = [tools.init_zeros(p.get_value().shape) for p in params] self.v = [tools.init_zeros(p.get_value().shape) for p in params] self.beta1t = theano.shared(numpy.float32(config['beta1_adam'])) self.beta2t = theano.shared(numpy.float32(config['beta2_adam'])) def build(self, cost, inp): grads_noclip = tensor.grad(cost, self.params) grads, grad_norm = tools.clip(grads_noclip, self.config['clip'], params=self.params) update_ab = [(self.beta1t, self.beta1t * self.config['beta1_adam']), (self.beta2t, self.beta2t * self.config['beta2_adam'])] update_gc = [(gc, gr) for gc, gr in zip(self.gc, grads)] update_gc = [(gc, gr) for gc, gr in zip(self.gc, grads)] m_up = [(m, self.config['beta1_adam'] * m + (1. - self.config['beta1_adam']) * gr) for m, gr in zip(self.m, grads)] v_up = [(v, self.config['beta2_adam'] * v + (1. - self.config['beta2_adam']) * (gr ** 2)) for v, gr in zip(self.v, grads)] update_grads = theano.function(inp, [cost, grad_norm], updates=update_gc + m_up + v_up) param_up = [(p, p - self.config['alpha_adam'] * (m / (1. - self.beta1t)) / (tensor.sqrt(v / (1. - self.beta2t)) + self.config['eps_adam'])) for p, m, v in zip(self.params, self.m, self.v)] update_params = theano.function([],[], updates = update_ab + param_up) return update_grads, update_params class adam_slowstart(optimizer): ''' Adam with lowered learning rate at the beginning ''' def __init__(self, config, params): self.config = config self.params = params self.gc = [tools.init_zeros(p.get_value().shape) for p in params] self.m = [tools.init_zeros(p.get_value().shape) for p in params] self.v = [tools.init_zeros(p.get_value().shape) for p in params] self.beta1t = theano.shared(numpy.float32(config['beta1_adam'])) self.beta2t = theano.shared(numpy.float32(config['beta2_adam'])) self.alphadecayt = theano.shared(numpy.float32(config['alphadecay_adam'])) def build(self, cost, inp): grads_noclip = tensor.grad(cost, self.params) grads, grad_norm = tools.clip(grads_noclip, self.config['clip'], params=self.params) update_ab = [(self.beta1t, self.beta1t * self.config['beta1_adam']), (self.beta2t, self.beta2t * self.config['beta2_adam']), (self.alphadecayt, self.alphadecayt * self.config['alphadecay_adam'])] update_gc = [(gc, gr) for gc, gr in zip(self.gc, grads)] update_gc = [(gc, gr) for gc, gr in zip(self.gc, grads)] m_up = [(m, self.config['beta1_adam'] * m + (1. - self.config['beta1_adam']) * gr) for m, gr in zip(self.m, grads)] v_up = [(v, self.config['beta2_adam'] * v + (1. - self.config['beta2_adam']) * (gr ** 2)) for v, gr in zip(self.v, grads)] update_grads = theano.function(inp, [cost, grad_norm], updates=update_gc + m_up + v_up) self.alphat = (1.0 - self.alphadecayt) * self.config['alpha_adam'] param_up = [(p, p - self.alphat * (m / (1. - self.beta1t)) / (tensor.sqrt(v / (1. - self.beta2t)) + self.config['eps_adam'])) for p, m, v in zip(self.params, self.m, self.v)] update_params = theano.function([],[], updates = update_ab + param_up) return update_grads, update_params class SGD(optimizer): ''' SGD with fixed learning rate ''' def __init__(self, config, params): self.config = config self.params = params self.gc = [tools.init_zeros(p.get_value().shape) for p in params] def build(self, cost, inp): grads_noclip = tensor.grad(cost, self.params) grads, grad_norm = tools.clip(grads_noclip, self.config['clip'], square = False, params = self.params) gc_up = [(gc, gr) for gc, gr in zip(self.gc, grads)] update_grads = theano.function(inp, [cost, tensor.sqrt(grad_norm)], updates = gc_up) lr = numpy.float32(self.config['lr']) delta = [-lr * gr for gr in self.gc] params_up = [(p, p - lr * gr) for p, gr in zip(self.params, self.gc)] update_params = theano.function([], [], updates = params_up) return update_grads, update_params
44.093023
190
0.674226
907
5,688
4.077178
0.095921
0.09735
0.041374
0.03867
0.788805
0.738237
0.729854
0.713629
0.702272
0.665765
0
0.016854
0.155063
5,688
128
191
44.4375
0.752601
0.049578
0
0.591398
0
0
0.050279
0
0
0
0
0
0
1
0.096774
false
0.010753
0.053763
0
0.247312
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
f5080bd3547014da411f79d2e475276ccb5dff45
142
py
Python
deep_table/nn/encoders/embedding/__init__.py
pfnet-research/deep-table
a19c0c3048484017d5f24806604c3b3470bcf550
[ "MIT" ]
48
2021-09-30T08:14:26.000Z
2022-03-02T12:20:08.000Z
deep_table/nn/encoders/embedding/__init__.py
pfnet-research/deep-table
a19c0c3048484017d5f24806604c3b3470bcf550
[ "MIT" ]
1
2021-11-08T11:41:49.000Z
2021-11-08T11:41:49.000Z
deep_table/nn/encoders/embedding/__init__.py
pfnet-research/deep-table
a19c0c3048484017d5f24806604c3b3470bcf550
[ "MIT" ]
2
2021-12-31T03:43:48.000Z
2022-03-11T09:04:21.000Z
from .base import BaseEmbedding from .feature_embedding import FeatureEmbedding from .tabtransformer_embedding import TabTransformerEmbedding
35.5
61
0.894366
14
142
8.928571
0.642857
0.24
0
0
0
0
0
0
0
0
0
0
0.084507
142
3
62
47.333333
0.961538
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
1921c70f55e083642037e8729681c92631e89aef
134
py
Python
hyppo/__init__.py
RebeccaYin7/hyppo
fc01775d2253930e095032efd051373148dce58f
[ "Apache-2.0" ]
null
null
null
hyppo/__init__.py
RebeccaYin7/hyppo
fc01775d2253930e095032efd051373148dce58f
[ "Apache-2.0" ]
null
null
null
hyppo/__init__.py
RebeccaYin7/hyppo
fc01775d2253930e095032efd051373148dce58f
[ "Apache-2.0" ]
null
null
null
import hyppo.independence import hyppo.ksample import hyppo.time_series import hyppo.sims import hyppo.discrim __version__ = "0.1.3"
16.75
25
0.820896
20
134
5.25
0.6
0.52381
0
0
0
0
0
0
0
0
0
0.025
0.104478
134
7
26
19.142857
0.85
0
0
0
0
0
0.037313
0
0
0
0
0
0
1
0
false
0
0.833333
0
0.833333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
193150810a1857d730a24cd7db5a4c5f031e19ad
110
py
Python
src/api/v1/solutions/__init__.py
BuildForSDG/Team-115-Product
8e7c56c9a7b97970ba5ee4375928be5b28988b0b
[ "MIT" ]
null
null
null
src/api/v1/solutions/__init__.py
BuildForSDG/Team-115-Product
8e7c56c9a7b97970ba5ee4375928be5b28988b0b
[ "MIT" ]
17
2020-05-11T00:30:33.000Z
2020-06-14T13:02:44.000Z
src/api/v1/solutions/__init__.py
BuildForSDG/Team-115-Product
8e7c56c9a7b97970ba5ee4375928be5b28988b0b
[ "MIT" ]
1
2020-05-02T19:09:19.000Z
2020-05-02T19:09:19.000Z
"""Register solutions blueprint.""" from flask import Blueprint solutions = Blueprint('solutions', __name__)
22
44
0.772727
11
110
7.363636
0.636364
0.444444
0
0
0
0
0
0
0
0
0
0
0.109091
110
4
45
27.5
0.826531
0.263636
0
0
0
0
0.12
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
1
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
1
0
5
194c9e95194fde7726fd75f06fef6fb0888c0e9b
2,935
py
Python
arduino/main.py
ManasUniyal/Friday
6e1ff6541cca98f073e3fd07218b22da165a613b
[ "MIT" ]
1
2021-03-01T11:25:32.000Z
2021-03-01T11:25:32.000Z
arduino/main.py
ManasUniyal/Friday
6e1ff6541cca98f073e3fd07218b22da165a613b
[ "MIT" ]
null
null
null
arduino/main.py
ManasUniyal/Friday
6e1ff6541cca98f073e3fd07218b22da165a613b
[ "MIT" ]
null
null
null
import sys if len(sys.argv) <= 1: print("Not Enough Arguments") exit() string = """#include <SoftwareSerial.h> void(* resetFunc) (void) = 0; SoftwareSerial mySerial(9, 10); // RX, TX void setup() { mySerial.begin(9600); // the GPRS baud rate Serial.begin(9600); // the GPRS baud rate delay(2000); } void loop() { String number; int count=0; if(Serial.available()>0) { if(Serial.readString()=="call") { // resetFunc(); } else if(Serial.available()>0) { number=Serial.readString(); } Serial.println(number); mySerial.println("ATD9057261430;"); // xxxxxxxxx is the number you want to dial, Noice the ";" in the end delay(2000); while(1) { mySerial.println("AT+SPWM=2,63,100");// set PWM 2 PIN delay(100); mySerial.println("AT+SPWM=1,63,100"); delay(100); mySerial.println("AT+SGPIO=0,1,1,1");// set GPIO 1 PIN to 1 delay(100); mySerial.println("AT+SGPIO=0,2,1,1"); delay(100); mySerial.println("AT+SGPIO=0,3,1,1"); delay(100); mySerial.println("AT+SGPIO=0,4,1,1"); delay(100); mySerial.println("AT+SGPIO=0,5,1,1"); delay(100); mySerial.println("AT+SGPIO=0,6,1,1"); delay(100); mySerial.println("AT+SGPIO=0,7,1,1"); delay(100); mySerial.println("AT+SGPIO=0,8,1,1"); delay(100); mySerial.println("AT+SGPIO=0,9,1,1"); delay(100); mySerial.println("AT+SGPIO=0,10,1,1"); delay(100); mySerial.println("AT+SGPIO=0,11,1,1"); delay(100); mySerial.println("AT+SGPIO=0,12,1,1"); delay(500); mySerial.println("AT+SPWM=1,63,0"); delay(100); mySerial.println("AT+SPWM=2,63,0"); delay(100); mySerial.println("AT+SGPIO=0,1,1,0"); // set GPIO 1 PIN to 0 delay(100); mySerial.println("AT+SGPIO=0,2,1,0"); delay(100); mySerial.println("AT+SGPIO=0,3,1,0"); delay(100); mySerial.println("AT+SGPIO=0,4,1,0"); delay(100); mySerial.println("AT+SGPIO=0,5,1,0"); delay(100); mySerial.println("AT+SGPIO=0,6,1,0"); delay(100); mySerial.println("AT+SGPIO=0,7,1,0"); delay(100); mySerial.println("AT+SGPIO=0,8,1,0"); delay(100); mySerial.println("AT+SGPIO=0,9,1,0"); delay(100); mySerial.println("AT+SGPIO=0,10,1,0"); delay(100); mySerial.println("AT+SGPIO=0,11,1,0"); delay(100); mySerial.println("AT+SGPIO=0,12,1,0"); delay(500); count++; if(count==25) { mySerial.println("ATH"); //end the call. if(mySerial.available()) { Serial.print((unsigned char)mySerial.read()); } } } }}""" string = string.replace('9057261430',sys.argv[1],1) f = open("program.nso","w") f.write(string)
27.175926
107
0.543441
418
2,935
3.815789
0.203349
0.282132
0.298433
0.374922
0.631975
0.615674
0.533542
0.514734
0.512853
0
0
0.119816
0.260647
2,935
108
108
27.175926
0.615207
0
0
0.29703
0
0.029703
0.94891
0.449251
0
0
0
0
0
1
0
false
0
0.009901
0
0.009901
0.326733
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
1976b06b42d75a8ce5ff788a6f32da428aee18bd
106
py
Python
app/recognition/haarcascade.py
RafaelGSS/PyCamClass
fbf4f5e9332fe4b8d4d0e17de2fe1025cd77b1cf
[ "Apache-2.0" ]
1
2018-07-19T05:41:02.000Z
2018-07-19T05:41:02.000Z
app/recognition/haarcascade.py
RafaelGSS/PyCam
fbf4f5e9332fe4b8d4d0e17de2fe1025cd77b1cf
[ "Apache-2.0" ]
null
null
null
app/recognition/haarcascade.py
RafaelGSS/PyCam
fbf4f5e9332fe4b8d4d0e17de2fe1025cd77b1cf
[ "Apache-2.0" ]
null
null
null
frontal_face = 'cascades/haarcascade_frontalface_default.xml' eye_cascade = 'cascades/haarcascade_eye.xml'
53
61
0.858491
13
106
6.615385
0.692308
0.44186
0
0
0
0
0
0
0
0
0
0
0.04717
106
2
62
53
0.851485
0
0
0
0
0
0.672897
0.672897
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
1990e00b31ee1a928a7eb19a93676bf21eb4e56f
2,847
py
Python
src/saleor_app/tests/test_install.py
mstrumeck/saleor-app-framework-python
edaae7c35d5e4e80e98c50795c3a63d906998834
[ "BSD-3-Clause" ]
null
null
null
src/saleor_app/tests/test_install.py
mstrumeck/saleor-app-framework-python
edaae7c35d5e4e80e98c50795c3a63d906998834
[ "BSD-3-Clause" ]
null
null
null
src/saleor_app/tests/test_install.py
mstrumeck/saleor-app-framework-python
edaae7c35d5e4e80e98c50795c3a63d906998834
[ "BSD-3-Clause" ]
null
null
null
from unittest.mock import AsyncMock from saleor_app.install import install_app from saleor_app.saleor.client import SaleorClient from saleor_app.saleor.mutations import CREATE_WEBHOOK from saleor_app.schemas.core import WebhookData async def test_install_app(mocker, manifest): mock_saleor_client = AsyncMock(SaleorClient) mock_saleor_client.__aenter__.return_value.execute.return_value = { "webhookCreate": {"webhook": {"id": "123"}} } mock_get_client_for_app = mocker.patch( "saleor_app.install.get_client_for_app", return_value=mock_saleor_client ) mocker.patch("saleor_app.install.secrets.choice", return_value="A") assert ( await install_app( saleor_domain="saleor_domain", auth_token="test_token", manifest=manifest, events={"queue_1": ["TEST_EVENT_1"], "url_1": ["TEST_EVENT_2"]}, use_insecure_saleor_http=True, ) == WebhookData(webhook_id="123", webhook_secret_key="A" * 20) ) mock_get_client_for_app.assert_called_once_with( "http://saleor_domain", manifest=manifest, auth_token="test_token" ) assert mock_saleor_client.__aenter__.return_value.execute.call_count == 2 mock_saleor_client.__aenter__.return_value.execute.assert_any_await( CREATE_WEBHOOK, variables={ "input": { "targetUrl": "queue_1", "events": ["TEST_EVENT_1"], "name": f"{manifest.name}", "secretKey": "A" * 20, } }, ) mock_saleor_client.__aenter__.return_value.execute.assert_any_await( CREATE_WEBHOOK, variables={ "input": { "targetUrl": "url_1", "events": ["TEST_EVENT_2"], "name": f"{manifest.name}", "secretKey": "A" * 20, } }, ) async def test_install_app_secure_https(mocker, manifest): mock_saleor_client = AsyncMock(SaleorClient) mock_saleor_client.__aenter__.return_value.execute.return_value = { "webhookCreate": {"webhook": {"id": "123"}} } mock_get_client_for_app = mocker.patch( "saleor_app.install.get_client_for_app", return_value=mock_saleor_client ) mocker.patch("saleor_app.install.secrets.choice", return_value="A") assert ( await install_app( saleor_domain="saleor_domain", auth_token="test_token", manifest=manifest, events={"queue_1": ["TEST_EVENT_1"], "url_1": ["TEST_EVENT_2"]}, use_insecure_saleor_http=False, ) == WebhookData(webhook_id="123", webhook_secret_key="A" * 20) ) mock_get_client_for_app.assert_called_once_with( "https://saleor_domain", manifest=manifest, auth_token="test_token" )
34.301205
80
0.636811
325
2,847
5.129231
0.206154
0.072585
0.086383
0.053989
0.829034
0.802639
0.802639
0.743851
0.688662
0.688662
0
0.015399
0.247278
2,847
82
81
34.719512
0.762483
0
0
0.555556
0
0
0.18019
0.049175
0
0
0
0
0.097222
1
0
false
0
0.069444
0
0.069444
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
199656b309cc2aa3d14ad1028a049937680254be
241
py
Python
Scripts/square.py
annelida/stuff
699d0230d5329a5eaebc8c93f9fba3c53920925f
[ "MIT" ]
null
null
null
Scripts/square.py
annelida/stuff
699d0230d5329a5eaebc8c93f9fba3c53920925f
[ "MIT" ]
2
2021-06-01T21:52:33.000Z
2021-12-13T19:44:00.000Z
Scripts/square.py
annelida/stuff
699d0230d5329a5eaebc8c93f9fba3c53920925f
[ "MIT" ]
1
2015-10-14T12:16:07.000Z
2015-10-14T12:16:07.000Z
def square(x): """Return the square of x. >>> square(2) 4 >>> square(-2) 4 >>> square(2.0) 4 >>> square(-2.0) 4 >>> square(1j) (-1 + 0j) >>> square('2') TypeError """ return x * x
13.388889
30
0.40249
32
241
3.03125
0.40625
0.360825
0.247423
0.28866
0.412371
0.257732
0
0
0
0
0
0.094595
0.385892
241
17
31
14.176471
0.560811
0.60166
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
5fdb8e291b2a045677654ef5d42d7ed80bfa259a
74
py
Python
model/model_token.py
tobiasaditya/transaction-service
84e9739c4bcb5a17a0faa866e46d6f19e16dddcb
[ "MIT" ]
null
null
null
model/model_token.py
tobiasaditya/transaction-service
84e9739c4bcb5a17a0faa866e46d6f19e16dddcb
[ "MIT" ]
null
null
null
model/model_token.py
tobiasaditya/transaction-service
84e9739c4bcb5a17a0faa866e46d6f19e16dddcb
[ "MIT" ]
null
null
null
from pydantic import BaseModel class TokenData(BaseModel): userId:str
18.5
30
0.797297
9
74
6.555556
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.148649
74
4
31
18.5
0.936508
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
5ff3daba2a50ab0df88411e96b7ccdf2abffece5
2,668
py
Python
Bugscan_exploits-master/exp_list/exp-133.py
csadsl/poc_exp
e3146262e7403f19f49ee2db56338fa3f8e119c9
[ "MIT" ]
11
2020-05-30T13:53:49.000Z
2021-03-17T03:20:59.000Z
Bugscan_exploits-master/exp_list/exp-133.py
csadsl/poc_exp
e3146262e7403f19f49ee2db56338fa3f8e119c9
[ "MIT" ]
6
2020-05-13T03:25:18.000Z
2020-07-21T06:24:16.000Z
Bugscan_exploits-master/exp_list/exp-133.py
csadsl/poc_exp
e3146262e7403f19f49ee2db56338fa3f8e119c9
[ "MIT" ]
6
2020-05-30T13:53:51.000Z
2020-12-01T21:44:26.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- #__author__ = 'ontheway' import re ''' fckeditor版本 <= 2.4.3 ''' def fck2_4_3(host): path = "editor/filemanager/upload/php/upload.php?Type=Media" data = "------WebKitFormBoundaryba3nn74V35zAYnAT\r\n" data += "Content-Disposition: form-data; name=\"NewFile\"; filename=\"ssdlh.php\"\r\n" data += "Content-Type: image/jpeg\r\n\r\n" data += "GIF89a<?php print(md5(521521));?>\r\n" data += "------WebKitFormBoundaryba3nn74V35zAYnAT--\r\n" head = "Content-Type: multipart/form-data; boundary=----WebKitFormBoundaryba3nn74V35zAYnAT\r\n" url = host + path code, head, body, ecode, redirect_url = curl.curl('-H \'%s\' -d \'%s\' %s' % (head,data,url)) if code == 200: shell = re.findall("eted\(\d+,\"(.+?.php)\"",body) if shell: phpurl = util.urljoin(host, '../'+shell[0]) code, head, body, ecode, redirect_url = curl.curl(phpurl) if code==200 and '35fd19fbe470f0cb5581884fa700610f' in body: security_hole('upload vulnerable:%s' % phpurl) else: security_info('maybe vulnerable:%s' % phpurl) ''' fckeditor 版本 介于2.4.3与2.6.4之间(不包括2.4.3) ''' def fck2_6_4(host): path = "editor/filemanager/connectors/php/connector.php?Command=FileUpload&Type=File&CurrentFolder=ssdlh.php%00.jpg" data = "------WebKitFormBoundaryba3nn74V35zAYnAT\r\n" data += "Content-Disposition: form-data; name=\"NewFile\"; filename=\"a.jpg\"\r\n" data += "Content-Type: image/jpeg\r\n\r\n" data += "GIF89a<?php print(md5(521521));?>\r\n" data += "------WebKitFormBoundaryba3nn74V35zAYnAT--\r\n" head = "Content-Type: multipart/form-data; boundary=----WebKitFormBoundaryba3nn74V35zAYnAT\r\n" url = host + path code, head, body, ecode, redirect_url = curl.curl('-H \'%s\' -d \'%s\' %s' % (head,data,url)) if code == 200: shell = re.findall("eted\(\d+,\"(.+?\.php)",body) if shell: phpurl = util.urljoin(host, '../'+shell[0]) code, head, body, ecode, redirect_url = curl.curl(phpurl) if code==200 and '35fd19fbe470f0cb5581884fa700610f' in body: security_hole('upload vulnerable:%s' % phpurl) else: security_info('maybe vulnerable:%s' % phpurl) def assign(service, arg): if service == "fckeditor": return True, arg def audit(arg): fck2_4_3(arg) fck2_6_4(arg) if __name__ == '__main__': from dummy import * audit(assign('fckeditor', 'http://127.0.0.1/fckeditor2.6/')[1])
39.235294
121
0.587706
324
2,668
4.753086
0.311728
0.018182
0.031169
0.103896
0.728571
0.728571
0.728571
0.728571
0.728571
0.728571
0
0.068127
0.22976
2,668
67
122
39.820896
0.681265
0.024363
0
0.625
0
0.020833
0.409109
0.225295
0
0
0
0
0
1
0.083333
false
0
0.041667
0
0.145833
0.041667
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
271ed456f083759b7d89417c01c3723fc86cb460
3,216
py
Python
exercises/exercise1.py
mholdg16/DistributedExercisesAAU
04e9a7e88f3eb19aa05b5d0da241b027facee390
[ "MIT" ]
null
null
null
exercises/exercise1.py
mholdg16/DistributedExercisesAAU
04e9a7e88f3eb19aa05b5d0da241b027facee390
[ "MIT" ]
null
null
null
exercises/exercise1.py
mholdg16/DistributedExercisesAAU
04e9a7e88f3eb19aa05b5d0da241b027facee390
[ "MIT" ]
null
null
null
import random import time from emulators.Device import Device from emulators.Medium import Medium from emulators.MessageStub import MessageStub class GossipMessage(MessageStub): def __init__(self, sender: int, destination: int, secrets): super().__init__(sender, destination) # we use a set to keep the "secrets" here self.secrets = secrets def __str__(self): return f'{self.source} -> {self.destination} : {self.secrets}' class Gossip(Device): def __init__(self, index: int, number_of_devices: int, medium: Medium): super().__init__(index, number_of_devices, medium) # for this exercise we use the index as the "secret", but it could have been a new routing-table (for instance) # or sharing of all the public keys in a cryptographic system self._secrets = set([index]) def run(self): while True: # choose a random receiver (that is not self) p = self.index() while p == self.index(): p = random.randint(0, self.number_of_devices() - 1) while True: ingoing = self.medium().receive() if ingoing is None: break # join the received secrets with known secrets self._secrets.update(ingoing.secrets) self.medium().send(GossipMessage(self.index(), p, self._secrets)) # the following is your termination condition, but where should it be placed? if len(self._secrets) == self.number_of_devices(): break # wait ~[0,1] seconds to avoid livelock time.sleep(random.random()) def print_result(self): print(f'\tDevice {self.index()} got secrets: {self._secrets}') class GossipCircular(Device): def __init__(self, index: int, number_of_devices: int, medium: Medium): super().__init__(index, number_of_devices, medium) # for this exercise we use the index as the "secret", but it could have been a new routing-table (for instance) # or sharing of all the public keys in a cryptographic system self._secrets = set([index]) def run(self): while True: # choose receiver (left or right) if random.randint(0,1) == 1: p = (self.index() + 1) % self.number_of_devices() else: p = (self.index() - 1) % self.number_of_devices() while True: ingoing = self.medium().receive() if ingoing is None: break # join the received secrets with known secrets self._secrets.update(ingoing.secrets) self.medium().send(GossipMessage(self.index(), p, self._secrets)) # the following is your termination condition, but where should it be placed? if len(self._secrets) == self.number_of_devices(): break # wait ~[0,1] seconds to avoid livelock time.sleep(random.random()) def print_result(self): print(f'\tDevice {self.index()} got secrets: {self._secrets}')
34.212766
119
0.589552
387
3,216
4.749354
0.263566
0.071817
0.073449
0.051687
0.738847
0.738847
0.738847
0.738847
0.706202
0.706202
0
0.004989
0.314366
3,216
94
120
34.212766
0.828571
0.240361
0
0.615385
0
0
0.064198
0
0
0
0
0
0
1
0.153846
false
0
0.096154
0.019231
0.326923
0.076923
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
273aaee7d73b65d85187ee474b0f7189268e7e51
122
py
Python
src/httpmon_kafka_pgsql/__init__.py
JonathanThorpe/httpmon_kafka_pgsql
b11b6fad2918e47c601b1af441029441258df482
[ "MIT" ]
null
null
null
src/httpmon_kafka_pgsql/__init__.py
JonathanThorpe/httpmon_kafka_pgsql
b11b6fad2918e47c601b1af441029441258df482
[ "MIT" ]
null
null
null
src/httpmon_kafka_pgsql/__init__.py
JonathanThorpe/httpmon_kafka_pgsql
b11b6fad2918e47c601b1af441029441258df482
[ "MIT" ]
null
null
null
#Package for the httpmon_kafka_pgsql application. from .__version__ import __version__ from .__main__ import main as main
30.5
49
0.844262
17
122
5.235294
0.705882
0
0
0
0
0
0
0
0
0
0
0
0.122951
122
3
50
40.666667
0.831776
0.393443
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
273f0b096d7774648729c29000ab81166dc75d86
251
py
Python
models/__init__.py
fshp971/BIF
015f142ed293bfb73d0176c1f303c1543fb8fe86
[ "MIT" ]
13
2021-01-19T04:31:30.000Z
2022-02-11T07:40:40.000Z
models/__init__.py
FengxiangHe/BIF
2d6e997105742f235c55c396d3b6ccdac6dbfda5
[ "MIT" ]
null
null
null
models/__init__.py
FengxiangHe/BIF
2d6e997105742f235c55c396d3b6ccdac6dbfda5
[ "MIT" ]
2
2021-01-19T04:31:31.000Z
2021-05-18T08:43:27.000Z
''' for GMM simulation ''' from .gmm import VariationalGMM from .gmm import GMM ''' for deep learning ''' from .normal_models import normalMLP from .normal_models import normalLeNet from .mcmc_models import mcmcMLP from .mcmc_models import mcmcLeNet
25.1
38
0.788845
34
251
5.705882
0.441176
0.247423
0.134021
0.226804
0
0
0
0
0
0
0
0
0.139442
251
10
39
25.1
0.898148
0.071713
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
27a70d9ab32abf58139c54418a2f17a7219775f4
55
py
Python
NYPC2019/Day_2/waterpipe.py
M4ndU/algorithm_task
3ad5c3fd311d0251260249ae2cf9d8bb36e42130
[ "MIT" ]
null
null
null
NYPC2019/Day_2/waterpipe.py
M4ndU/algorithm_task
3ad5c3fd311d0251260249ae2cf9d8bb36e42130
[ "MIT" ]
null
null
null
NYPC2019/Day_2/waterpipe.py
M4ndU/algorithm_task
3ad5c3fd311d0251260249ae2cf9d8bb36e42130
[ "MIT" ]
null
null
null
''' 어렵다. 그냥 찍자. ''' print("1") #10 print("4") #10
9.166667
15
0.418182
9
55
2.555556
0.777778
0
0
0
0
0
0
0
0
0
0
0.146341
0.254545
55
5
16
11
0.414634
0.290909
0
0
0
0
0.08
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
27bfddc75c7ff8a97f1c5403e29aa15bceaee6d2
28
py
Python
src/pypi-test-amdukhan/pypi_example.py
terminal-illness/pypi-test
cd2554227165e3ed97b1bb9c38cb30c7f8fa84f0
[ "MIT" ]
null
null
null
src/pypi-test-amdukhan/pypi_example.py
terminal-illness/pypi-test
cd2554227165e3ed97b1bb9c38cb30c7f8fa84f0
[ "MIT" ]
null
null
null
src/pypi-test-amdukhan/pypi_example.py
terminal-illness/pypi-test
cd2554227165e3ed97b1bb9c38cb30c7f8fa84f0
[ "MIT" ]
null
null
null
print(input("Enter Name: "))
28
28
0.678571
4
28
4.75
1
0
0
0
0
0
0
0
0
0
0
0
0.071429
28
1
28
28
0.730769
0
0
0
0
0
0.413793
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
27e406556a592432aca2515ef19e4dbc6f5fd49b
156
py
Python
message_board/users/forms.py
robalford/message_board
744f1f524ee8a60c5b799fc7a9d911818a519bf1
[ "MIT" ]
null
null
null
message_board/users/forms.py
robalford/message_board
744f1f524ee8a60c5b799fc7a9d911818a519bf1
[ "MIT" ]
null
null
null
message_board/users/forms.py
robalford/message_board
744f1f524ee8a60c5b799fc7a9d911818a519bf1
[ "MIT" ]
null
null
null
from django.forms import modelformset_factory from message_board.posts.models import Peeve PeeveFormSet = modelformset_factory(Peeve, fields=('peeve', ))
26
62
0.820513
19
156
6.578947
0.684211
0.304
0
0
0
0
0
0
0
0
0
0
0.096154
156
5
63
31.2
0.886525
0
0
0
0
0
0.032051
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
8bee3666e4c6666a656a45d98642e3089f040837
196
py
Python
libreria/library/admin.py
kvothestrokes/libreria_implementacionweb
c193cd58f1b33fe65dd54ba8d8d8675df4d5abf2
[ "Apache-2.0" ]
null
null
null
libreria/library/admin.py
kvothestrokes/libreria_implementacionweb
c193cd58f1b33fe65dd54ba8d8d8675df4d5abf2
[ "Apache-2.0" ]
null
null
null
libreria/library/admin.py
kvothestrokes/libreria_implementacionweb
c193cd58f1b33fe65dd54ba8d8d8675df4d5abf2
[ "Apache-2.0" ]
null
null
null
from django.contrib import admin from .models import Libro, Cliente, Prestamo # Register your models here. admin.site.register(Libro) admin.site.register(Cliente) admin.site.register(Prestamo)
19.6
44
0.80102
27
196
5.814815
0.481481
0.171975
0.324841
0
0
0
0
0
0
0
0
0
0.107143
196
9
45
21.777778
0.897143
0.132653
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.4
0
0.4
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
8bf1648bc2a45117a2974d06b17f7a5890a8b18f
203
py
Python
libspn_keras/losses/__init__.py
twebr/libspn-keras
b5f107899795634f011b0e0bfedce182c0e87568
[ "MIT" ]
45
2020-02-23T22:01:13.000Z
2021-09-10T19:24:40.000Z
libspn_keras/losses/__init__.py
twebr/libspn-keras
b5f107899795634f011b0e0bfedce182c0e87568
[ "MIT" ]
16
2020-03-12T06:12:44.000Z
2022-01-19T19:44:33.000Z
libspn_keras/losses/__init__.py
twebr/libspn-keras
b5f107899795634f011b0e0bfedce182c0e87568
[ "MIT" ]
9
2020-02-24T13:06:16.000Z
2021-11-09T22:59:32.000Z
from libspn_keras.losses.negative_log_joint import NegativeLogJoint from libspn_keras.losses.negative_log_likelihood import NegativeLogLikelihood __all__ = ["NegativeLogJoint", "NegativeLogLikelihood"]
40.6
77
0.876847
21
203
8
0.571429
0.119048
0.178571
0.25
0.380952
0.380952
0
0
0
0
0
0
0.064039
203
4
78
50.75
0.884211
0
0
0
0
0
0.182266
0.103448
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
e33d0c66cdd3d610d02181f9eb4157fd0640fad1
1,545
py
Python
integration-test/1279-osm-road-min_zoom_defaults.py
rinnyB/vector-datasource
024909ed8245a4ad4a25c908413ba3602de6c335
[ "MIT" ]
null
null
null
integration-test/1279-osm-road-min_zoom_defaults.py
rinnyB/vector-datasource
024909ed8245a4ad4a25c908413ba3602de6c335
[ "MIT" ]
2
2021-03-31T20:22:37.000Z
2021-12-13T20:50:11.000Z
integration-test/1279-osm-road-min_zoom_defaults.py
rinnyB/vector-datasource
024909ed8245a4ad4a25c908413ba3602de6c335
[ "MIT" ]
null
null
null
# -*- encoding: utf-8 -*- from . import FixtureTest class OSMMinZoomTests(FixtureTest): def test_motorway_min_zoom(self): import dsl z, x, y = (16, 33186, 22554) self.generate_fixtures( # https://www.openstreetmap.org/way/16108247 dsl.way(1, dsl.tile_diagonal(z, x, y), { 'source': 'openstreetmap.org', 'highway': 'motorway', }), ) self.assert_has_feature( z, x, y, 'roads', { 'id': 1, 'min_zoom': 5, }) def test_trunk_min_zoom(self): import dsl z, x, y = (16, 33186, 22554) self.generate_fixtures( # https://www.openstreetmap.org/way/16108247 dsl.way(2, dsl.tile_diagonal(z, x, y), { 'source': 'openstreetmap.org', 'highway': 'trunk', }), ) self.assert_has_feature( z, x, y, 'roads', { 'id': 2, 'min_zoom': 6, }) def test_primary_min_zoom(self): import dsl z, x, y = (16, 33186, 22554) self.generate_fixtures( # https://www.openstreetmap.org/way/16108247 dsl.way(3, dsl.tile_diagonal(z, x, y), { 'source': 'openstreetmap.org', 'highway': 'primary', }), ) self.assert_has_feature( z, x, y, 'roads', { 'id': 3, 'min_zoom': 8, })
24.919355
56
0.45178
159
1,545
4.238994
0.283019
0.026706
0.040059
0.075668
0.77003
0.77003
0.77003
0.77003
0.77003
0.636499
0
0.077008
0.41165
1,545
61
57
25.327869
0.664466
0.098382
0
0.545455
0
0
0.111671
0
0
0
0
0
0.068182
1
0.068182
false
0
0.090909
0
0.181818
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
e357be4b4ea3081f114e08f963141484a5e55201
8,622
py
Python
marshmallow_recipe/fields.py
foobarbazmeow/marshmallow-recipe
5bce8abd5db1abec4d60cfa2cd1428c6c5738566
[ "MIT" ]
null
null
null
marshmallow_recipe/fields.py
foobarbazmeow/marshmallow-recipe
5bce8abd5db1abec4d60cfa2cd1428c6c5738566
[ "MIT" ]
null
null
null
marshmallow_recipe/fields.py
foobarbazmeow/marshmallow-recipe
5bce8abd5db1abec4d60cfa2cd1428c6c5738566
[ "MIT" ]
null
null
null
import datetime import decimal import uuid from typing import Any, Type import marshmallow as m from .missing import MISSING, Missing _MARSHMALLOW_VERSION_MAJOR = int(m.__version__.split(".")[0]) def str_field( *, required: bool, default: str | None | Missing = MISSING, name: str | None = None, **_: Any, ) -> m.fields.Field: if required: if default is not MISSING: raise ValueError("Default values is not supported for required fields") return m.fields.String(required=True, **data_key_fields(name)) return m.fields.Str( allow_none=True, **default_fields(None if default is MISSING else default), **data_key_fields(name), ) def bool_field( *, required: bool, default: bool | None | Missing = MISSING, name: str | None = None, **_: Any, ) -> m.fields.Field: if required: if default is not MISSING: raise ValueError("Default values is not supported for required fields") return m.fields.Boolean(required=True, **data_key_fields(name)) return m.fields.Bool( allow_none=True, **default_fields(None if default is MISSING else default), **data_key_fields(name), ) def decimal_field( *, required: bool, default: decimal.Decimal | None | Missing = MISSING, name: str | None = None, places: int = 2, as_string: bool = True, **_: Any, ) -> m.fields.Field: if required: if default is not MISSING: raise ValueError("Default values is not supported for required fields") return m.fields.Decimal(required=True, as_string=as_string, places=places, **data_key_fields(name)) return m.fields.Decimal( allow_none=True, as_string=as_string, places=places, **default_fields(None if default is MISSING else default), **data_key_fields(name), ) def int_field( *, required: bool, default: int | None | Missing = MISSING, name: str | None = None, **_: Any, ) -> m.fields.Field: if required: if default is not MISSING: raise ValueError("Default values is not supported for required fields") return m.fields.Int(required=True, **data_key_fields(name)) return m.fields.Int( allow_none=True, **default_fields(None if default is MISSING else default), **data_key_fields(name), ) def float_field( *, required: bool, default: float | None | Missing = MISSING, name: str | None = None, **_: Any, ) -> m.fields.Field: if required: if default is not MISSING: raise ValueError("Default values is not supported for required fields") return m.fields.Float(required=True, **data_key_fields(name)) return m.fields.Float( allow_none=True, **default_fields(None if default is MISSING else default), **data_key_fields(name), ) def uuid_field( *, required: bool, default: uuid.UUID | None | Missing = MISSING, name: str | None = None, **_: Any, ) -> m.fields.Field: if required: if default is not MISSING: raise ValueError("Default values is not supported for required fields") return m.fields.UUID(required=True, **data_key_fields(name)) return m.fields.UUID( allow_none=True, **default_fields(None if default is MISSING else default), **data_key_fields(name), ) def datetime_field( *, required: bool, default: datetime.datetime | None | Missing = MISSING, name: str | None = None, **_: Any, ) -> m.fields.Field: if required: if default is not MISSING: raise ValueError("Default values is not supported for required fields") return DateTimeField(required=True, **data_key_fields(name)) return DateTimeField( allow_none=True, **default_fields(None if default is MISSING else default), **data_key_fields(name), ) def date_field( *, required: bool, default: datetime.date | None | Missing = MISSING, name: str | None = None, **_: Any, ) -> m.fields.Field: if required: if default is not MISSING: raise ValueError("Default values is not supported for required fields") return m.fields.Date(required=True, **data_key_fields(name)) return m.fields.Date( allow_none=True, **default_fields(None if default is MISSING else default), **data_key_fields(name), ) def nested_field( nested_schema: Type[m.Schema], *, required: bool, default: Any | None | Missing = MISSING, name: str | None = None, **_: Any, ) -> m.fields.Field: if required: if default is not MISSING: raise ValueError("Default values is not supported for required fields") return m.fields.Nested(nested_schema, required=True, **data_key_fields(name)) if default is not MISSING and default is not None: raise ValueError("Default values is not supported for required fields") return m.fields.Nested( nested_schema, allow_none=True, **default_fields(None), **data_key_fields(name), ) def list_field( field: m.fields.Field, *, required: bool, default: Any | None | Missing = MISSING, name: str | None = None, **_: Any, ) -> m.fields.Field: if required: if default is not MISSING: raise ValueError("Default values is not supported for required fields") return m.fields.List(field, required=True, **data_key_fields(name)) if default is not MISSING and default is not None: raise ValueError("Default values is not supported for required fields") return m.fields.List( field, allow_none=True, **default_fields(None), **data_key_fields(name), ) def dict_field( *, required: bool, default: Any | None | Missing = MISSING, name: str | None = None, **_: Any, ) -> m.fields.Field: if required: if default is not MISSING: raise ValueError("Default values is not supported for required fields") return m.fields.Dict(required=True, **data_key_fields(name)) if default is not MISSING and default is not None: raise ValueError("Default values is not supported for required fields") return m.fields.Dict( allow_none=True, **default_fields(None), **data_key_fields(name), ) DateTimeField: Type[m.fields.DateTime] if _MARSHMALLOW_VERSION_MAJOR >= 3: def data_key_fields(name: str | None) -> dict[str, Any]: if name is None: return {} return dict(data_key=name) def default_fields(value: Any) -> dict[str, Any]: return dict(dump_default=value, load_default=value) class DateTimeFieldV3(m.fields.DateTime): def _deserialize(self, value: Any, attr: Any, data: Any, **kwargs: Any) -> Any: result = super()._deserialize(value, attr, data, **kwargs) if result.tzinfo is None: return result.replace(tzinfo=datetime.timezone.utc) return result.astimezone(datetime.timezone.utc) def _serialize(self, value: Any, attr: Any, obj: Any, **kwargs: Any) -> Any: if value is None: return None if value.tzinfo is None: value = value.replace(tzinfo=datetime.timezone.utc) return super()._serialize(value, attr, obj, **kwargs) DateTimeField = DateTimeFieldV3 else: dateutil_tz_utc_cls: Type[datetime.tzinfo] | None try: import dateutil.tz # type: ignore dateutil_tz_utc_cls = dateutil.tz.tzutc except ImportError: dateutil_tz_utc_cls = None def data_key_fields(name: str | None) -> dict[str, Any]: if name is None: return {} return dict(dump_to=name, load_from=name) def default_fields(value: Any) -> dict[str, Any]: return dict(missing=value, default=value) class DateTimeFieldV2(m.fields.DateTime): def _deserialize(self, value: Any, attr: Any, data: Any, **_: Any) -> Any: result = super()._deserialize(value, attr, data) if result.tzinfo is None: return result.replace(tzinfo=datetime.timezone.utc) if dateutil_tz_utc_cls is not None and isinstance(result.tzinfo, dateutil_tz_utc_cls): return result.replace(tzinfo=datetime.timezone.utc) return result.astimezone(datetime.timezone.utc) DateTimeField = DateTimeFieldV2
29.326531
107
0.63106
1,077
8,622
4.922934
0.076137
0.046209
0.058846
0.076952
0.783667
0.768012
0.760845
0.73029
0.714825
0.667295
0
0.001106
0.2656
8,622
293
108
29.426621
0.836229
0.001392
0
0.604167
0
0
0.083062
0
0
0
0
0
0
1
0.075
false
0
0.033333
0.008333
0.2625
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
e36dae7600fb694435944266c5acdfd3e34ea4ec
164
py
Python
NSE_Data_Science_001/NSE01-venv/Lib/site-packages/nsepy/__init__.py
Stoned-KING/Python_Projects
33cd9daf5252e0458b15348268f0c244b03646a7
[ "MIT" ]
null
null
null
NSE_Data_Science_001/NSE01-venv/Lib/site-packages/nsepy/__init__.py
Stoned-KING/Python_Projects
33cd9daf5252e0458b15348268f0c244b03646a7
[ "MIT" ]
null
null
null
NSE_Data_Science_001/NSE01-venv/Lib/site-packages/nsepy/__init__.py
Stoned-KING/Python_Projects
33cd9daf5252e0458b15348268f0c244b03646a7
[ "MIT" ]
null
null
null
from .history import get_history, get_index_pe_history, get_rbi_ref_history from .live import get_quote from .derivatives import get_expiry_date __VERSION__ = 0.7
27.333333
75
0.847561
27
164
4.62963
0.592593
0.216
0
0
0
0
0
0
0
0
0
0.013699
0.109756
164
5
76
32.8
0.842466
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
8b4d27cbe08e81ece1607737cc3066bd65d53abc
166
py
Python
tests/python_parser/data/call.py
ahupp/pegen
e28fe4fb1972c55af5ddb6a7bdd9cba4ea072b81
[ "MIT" ]
106
2020-05-14T03:43:47.000Z
2022-03-26T21:46:47.000Z
tests/python_parser/data/call.py
ahupp/pegen
e28fe4fb1972c55af5ddb6a7bdd9cba4ea072b81
[ "MIT" ]
60
2020-05-07T23:17:46.000Z
2022-03-30T20:26:25.000Z
tests/python_parser/data/call.py
ahupp/pegen
e28fe4fb1972c55af5ddb6a7bdd9cba4ea072b81
[ "MIT" ]
20
2020-05-22T10:21:30.000Z
2022-03-26T21:23:44.000Z
a = () b = {} f() f(b) f(b=c) f(*a) f(c, *a) f(c=1, *b) f(*a, c=1) f(**b) f(c, *a, **b) f(c, *a, x, **b) f(c, a=1, **b) f(a := 1) f(**b, a=1) f(i for i in range(10))
9.764706
23
0.355422
51
166
1.156863
0.215686
0.237288
0.20339
0.20339
0
0
0
0
0
0
0
0.054264
0.222892
166
16
24
10.375
0.403101
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
1
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
8b556749886b41907d7c92c0e938f50d875b3dd3
621
py
Python
PyTuna/audio/notes.py
danieldutton/PyTuna
4c7e5b4f6347fc8d651da5c55f8da9709f423cc3
[ "Apache-2.0" ]
null
null
null
PyTuna/audio/notes.py
danieldutton/PyTuna
4c7e5b4f6347fc8d651da5c55f8da9709f423cc3
[ "Apache-2.0" ]
null
null
null
PyTuna/audio/notes.py
danieldutton/PyTuna
4c7e5b4f6347fc8d651da5c55f8da9709f423cc3
[ "Apache-2.0" ]
null
null
null
class Notes(object): """Description here""" @staticmethod def get_guitar_notes_six_string(): notes = ('ELow', 'A', 'D', 'G', 'B', 'EHigh') return notes @staticmethod def get_guitar_notes_twelve_string(): notes = ('ELow', 'A', 'D', 'G', 'B', 'EHigh') return notes @staticmethod def get_bass_notes_four_string(): notes = ('E', 'A', 'D', 'G') return notes @staticmethod def get_bass_notes_six_string(): notes = ('ELow', 'A', 'D', 'G', 'B', 'EHigh') return notes
20.032258
57
0.492754
67
621
4.328358
0.328358
0.206897
0.248276
0.165517
0.806897
0.686207
0.686207
0.541379
0.541379
0.541379
0
0
0.347826
621
30
58
20.7
0.716049
0.025765
0
0.647059
0
0
0.072148
0
0
0
0
0
0
1
0.235294
false
0
0
0
0.529412
0
0
0
0
null
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
5
8ba12f7608fe5de6166eb528117d3950b52493e2
111
py
Python
data_warehouse/read.py
jbj2505/dend_03_data_warehouse
30a898a622ede22ae360f6337e1a0b8ac22df512
[ "MIT" ]
2
2021-12-15T21:25:38.000Z
2022-02-11T14:10:01.000Z
data_warehouse/read.py
jbj2505/dend_03_data_warehouse
30a898a622ede22ae360f6337e1a0b8ac22df512
[ "MIT" ]
null
null
null
data_warehouse/read.py
jbj2505/dend_03_data_warehouse
30a898a622ede22ae360f6337e1a0b8ac22df512
[ "MIT" ]
3
2019-09-28T12:17:58.000Z
2021-02-19T11:55:44.000Z
def read_sql_query(filepath): with open(filepath, "r") as file_pointer: return file_pointer.read()
27.75
45
0.702703
16
111
4.625
0.75
0.297297
0
0
0
0
0
0
0
0
0
0
0.189189
111
3
46
37
0.822222
0
0
0
0
0
0.009009
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
8ba938daa7c4c1f2a83af63920be3963c1f0d234
24,206
py
Python
tests/test_ipc.py
pkeroulas/nmos-common
b650bad276819d794624f4ff6ea08fbdecd915d7
[ "Apache-2.0" ]
7
2017-12-08T08:05:51.000Z
2020-10-21T07:32:42.000Z
tests/test_ipc.py
pkeroulas/nmos-common
b650bad276819d794624f4ff6ea08fbdecd915d7
[ "Apache-2.0" ]
63
2017-12-13T08:46:58.000Z
2020-12-02T08:48:40.000Z
tests/test_ipc.py
pkeroulas/nmos-common
b650bad276819d794624f4ff6ea08fbdecd915d7
[ "Apache-2.0" ]
7
2017-11-22T10:49:23.000Z
2022-03-15T22:00:17.000Z
# Copyright 2017 British Broadcasting Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import print_function from six import PY2 from six import string_types from six import iteritems import unittest import mock from nmoscommon.ipc import * if PY2: from StringIO import StringIO else: from io import StringIO class TestHost(unittest.TestCase): def __init__(self, *args, **kwargs): super(TestHost, self).__init__(*args, **kwargs) if PY2: self.assertCountEqual = self.assertItemsEqual def setUp(self): paths = ['nmoscommon.ipc.zmq', 'os.chmod', ] patchers = { name : mock.patch(name) for name in paths } self.mocks = { name : patcher.start() for (name, patcher) in iteritems(patchers) } for (name, patcher) in iteritems(patchers): self.addCleanup(patcher.stop) self.zmq = self.mocks['nmoscommon.ipc.zmq'] def test_init(self): address = "ipc://dummy.test" UUT = Host(address) self.assertEqual(UUT.timeout, 100) self.zmq.Context.instance.assert_called_once_with() self.zmq.Context.instance.return_value.socket.assert_called_once_with(self.zmq.REP) self.zmq.Context.instance.return_value.socket.return_value.bind.assert_called_once_with(address) self.zmq.Context.instance.return_value.socket.return_value.setsockopt.assert_called_with(self.zmq.LINGER, 0) if address[:6] == "ipc://": self.mocks['os.chmod'].assert_called_with(address[6:], (stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)) def test_start_starts_a_greenlet_if_not_running(self): address = "ipc://dummy.test" UUT = Host(address) with mock.patch('gevent.spawn') as spawn: UUT.start() spawn.assert_called_once_with(mock.ANY) def test_start_does_nothing_if_already_running(self): address = "ipc://dummy.test" UUT = Host(address) with mock.patch('gevent.spawn') as spawn: UUT.start() spawn.assert_called_once_with(mock.ANY) spawn.reset_mock() UUT.start() spawn.assert_not_called() def test_stop_does_nothing_if_not_running(self): address = "ipc://dummy.test" UUT = Host(address) with mock.patch('gevent.spawn') as spawn: UUT.stop() spawn.return_value.kill.assert_not_called() def test_stop_kills_greenlet_if_already_running(self): address = "ipc://dummy.test" UUT = Host(address) with mock.patch('gevent.spawn') as spawn: UUT.start() UUT.stop() spawn.return_value.kill.assert_called_once_with() def test_start_restarts_greenlet_after_stopping(self): address = "ipc://dummy.test" UUT = Host(address) greenlets = [ mock.MagicMock(name="greenlet1"), mock.MagicMock(name="greenlet2") ] greenlet_queue = [ m for m in greenlets ] # Need a copy so that elements can be popped out with mock.patch('gevent.spawn', side_effect=lambda _ : greenlet_queue.pop(0)) as spawn: UUT.start() spawn.assert_called_once_with(mock.ANY) UUT.stop() greenlets[0].kill.assert_called_once_with() UUT.start() self.assertListEqual([ call for call in spawn.mock_calls if call[0] == "" ], [ mock.call(mock.ANY), mock.call(mock.ANY) ]) UUT.stop() greenlets[1].kill.assert_called_once_with() def test_runloop_does_nothing_if_not_started(self): self.zmq.Context.instance.return_value.socket.return_value.poll.side_effect = Exception # This will throw an exception if the code gets as far as calling it address = "ipc://dummy.test" UUT = Host(address) # Make spawn just call the passed parameter, essentially a threadless launch def stop_then_call(UUT): def __inner(f): UUT.stop() return f() return __inner with mock.patch('gevent.spawn', side_effect=stop_then_call) as spawn: try: UUT.start() except: self.fail(msg="stop didn't prevent the runloop from running") def test_runloop_polls_for_messages_and_retries_on_timeout(self): def return_vals_then_raise(retvals, e=Exception): def __inner(*args, **kwargs): if len(retvals) > 0: return retvals.pop(0) else: raise e return __inner self.zmq.Context.instance.return_value.socket.return_value.poll.side_effect = return_vals_then_raise([ 0, 0, 0 ]) address = "ipc://dummy.test" UUT = Host(address) # Make spawn just call the passed parameter, essentially a threadless launch with mock.patch('gevent.spawn', side_effect=lambda f : f()) as spawn: with self.assertRaises(Exception): UUT.start() self.assertListEqual(self.zmq.Context.instance.return_value.socket.return_value.poll.mock_calls, [ mock.call(timeout=UUT.timeout), mock.call(timeout=UUT.timeout), mock.call(timeout=UUT.timeout), mock.call(timeout=UUT.timeout) ]) def test_runloop_sends_empty_reply_to_malformed_msg(self): def return_vals_then_raise(retvals, e=Exception): def __inner(*args, **kwargs): if len(retvals) > 0: return retvals.pop(0) else: raise e return __inner self.zmq.Context.instance.return_value.socket.return_value.poll.side_effect = return_vals_then_raise([ 1, ]) self.zmq.Context.instance.return_value.socket.return_value.recv_json.return_value = {"foo" : "bar"} address = "ipc://dummy.test" UUT = Host(address) # Make spawn just call the passed parameter, essentially a threadless launch with mock.patch('gevent.spawn', side_effect=lambda f : f()) as spawn: with self.assertRaises(Exception): UUT.start() self.assertListEqual(self.zmq.Context.instance.return_value.socket.return_value.poll.mock_calls, [ mock.call(timeout=UUT.timeout), mock.call(timeout=UUT.timeout) ]) self.zmq.Context.instance.return_value.socket.return_value.recv_json.assert_called_once_with() self.zmq.Context.instance.return_value.socket.return_value.send_json.assert_called_once_with({}) def test_runloop_sends_error_in_response_to_unknown_method_call(self): def return_vals_then_raise(retvals, e=Exception): def __inner(*args, **kwargs): if len(retvals) > 0: return retvals.pop(0) else: raise e return __inner self.zmq.Context.instance.return_value.socket.return_value.poll.side_effect = return_vals_then_raise([ 1, ]) self.zmq.Context.instance.return_value.socket.return_value.recv_json.return_value = {"function" : "unknown_function", "args" : [], "kwargs" : {} } address = "ipc://dummy.test" UUT = Host(address) # Make spawn just call the passed parameter, essentially a threadless launch with mock.patch('gevent.spawn', side_effect=lambda f : f()) as spawn: with self.assertRaises(Exception): UUT.start() self.assertListEqual(self.zmq.Context.instance.return_value.socket.return_value.poll.mock_calls, [ mock.call(timeout=UUT.timeout), mock.call(timeout=UUT.timeout) ]) self.zmq.Context.instance.return_value.socket.return_value.recv_json.assert_called_once_with() self.zmq.Context.instance.return_value.socket.return_value.send_json.assert_called_once_with({ 'exc' : 'AttributeError'}) def test_runloop_sends_error_in_response_to_unknown_method_call(self): def return_vals_then_raise(retvals, e=Exception): def __inner(*args, **kwargs): if len(retvals) > 0: return retvals.pop(0) else: raise e return __inner self.zmq.Context.instance.return_value.socket.return_value.poll.side_effect = return_vals_then_raise([ 1, ]) self.zmq.Context.instance.return_value.socket.return_value.recv_json.return_value = {"function" : "unknown_function", "args" : [], "kwargs" : {} } address = "ipc://dummy.test" UUT = Host(address) # Make spawn just call the passed parameter, essentially a threadless launch with mock.patch('gevent.spawn', side_effect=lambda f : f()) as spawn: with self.assertRaises(Exception): UUT.start() self.assertListEqual(self.zmq.Context.instance.return_value.socket.return_value.poll.mock_calls, [ mock.call(timeout=UUT.timeout), mock.call(timeout=UUT.timeout) ]) self.zmq.Context.instance.return_value.socket.return_value.recv_json.assert_called_once_with() self.zmq.Context.instance.return_value.socket.return_value.send_json.assert_called_once_with({ 'exc' : 'AttributeError'}) def test_runloop_passes_through_calls_to_known_method(self): def return_vals_then_raise(retvals, e=Exception): def __inner(*args, **kwargs): if len(retvals) > 0: return retvals.pop(0) else: raise e return __inner args = [ "foo", "bar" ] kwargs = { "baz" : "potato" } self.zmq.Context.instance.return_value.socket.return_value.poll.side_effect = return_vals_then_raise([ 1, ]) self.zmq.Context.instance.return_value.socket.return_value.recv_json.return_value = {"function" : "test_function", "args" : args, "kwargs" : kwargs } test_function_mock = mock.MagicMock(name="test_function_mock") def test_function(*args, **kwargs): return test_function_mock(*args, **kwargs) test_function_mock.return_value = None address = "ipc://dummy.test" UUT = Host(address) UUT.ipcmethod()(test_function) # Make spawn just call the passed parameter, essentially a threadless launch with mock.patch('gevent.spawn', side_effect=lambda f : f()) as spawn: with self.assertRaises(Exception): UUT.start() self.assertListEqual(self.zmq.Context.instance.return_value.socket.return_value.poll.mock_calls, [ mock.call(timeout=UUT.timeout), mock.call(timeout=UUT.timeout) ]) self.zmq.Context.instance.return_value.socket.return_value.recv_json.assert_called_once_with() test_function_mock.assert_called_once_with(*args, **kwargs) self.zmq.Context.instance.return_value.socket.return_value.send_json.assert_called_once_with({}) def test_runloop_passes_through_calls_to_known_method_and_returns(self): def return_vals_then_raise(retvals, e=Exception): def __inner(*args, **kwargs): if len(retvals) > 0: return retvals.pop(0) else: raise e return __inner args = [ "foo", "bar" ] kwargs = { "baz" : "potato" } self.zmq.Context.instance.return_value.socket.return_value.poll.side_effect = return_vals_then_raise([ 1, ]) self.zmq.Context.instance.return_value.socket.return_value.recv_json.return_value = {"function" : "test_function", "args" : args, "kwargs" : kwargs } test_function_mock = mock.MagicMock(name="test_function_mock") def test_function(*args, **kwargs): return test_function_mock(*args, **kwargs) test_function_mock.return_value = "return_data" address = "ipc://dummy.test" UUT = Host(address) UUT.ipcmethod()(test_function) # Make spawn just call the passed parameter, essentially a threadless launch with mock.patch('gevent.spawn', side_effect=lambda f : f()) as spawn: with self.assertRaises(Exception): UUT.start() self.assertListEqual(self.zmq.Context.instance.return_value.socket.return_value.poll.mock_calls, [ mock.call(timeout=UUT.timeout), mock.call(timeout=UUT.timeout) ]) self.zmq.Context.instance.return_value.socket.return_value.recv_json.assert_called_once_with() test_function_mock.assert_called_once_with(*args, **kwargs) self.zmq.Context.instance.return_value.socket.return_value.send_json.assert_called_once_with({ 'ret' : test_function_mock.return_value }) def test_runloop_passes_through_calls_to_known_method_and_sends_back_exception_when_one_is_raised(self): def return_vals_then_raise(retvals, e=Exception): def __inner(*args, **kwargs): if len(retvals) > 0: return retvals.pop(0) else: raise e return __inner args = [ "foo", "bar" ] kwargs = { "baz" : "potato" } self.zmq.Context.instance.return_value.socket.return_value.poll.side_effect = return_vals_then_raise([ 1, ]) self.zmq.Context.instance.return_value.socket.return_value.recv_json.return_value = {"function" : "test_function", "args" : args, "kwargs" : kwargs } test_function_mock = mock.MagicMock(name="test_function_mock") def test_function(*args, **kwargs): return test_function_mock(*args, **kwargs) test_function_mock.side_effect = Exception("This is a test Exception") address = "ipc://dummy.test" UUT = Host(address) UUT.ipcmethod()(test_function) # Make spawn just call the passed parameter, essentially a threadless launch with mock.patch('gevent.spawn', side_effect=lambda f : f()) as spawn: with self.assertRaises(Exception): UUT.start() self.assertListEqual(self.zmq.Context.instance.return_value.socket.return_value.poll.mock_calls, [ mock.call(timeout=UUT.timeout), mock.call(timeout=UUT.timeout) ]) self.zmq.Context.instance.return_value.socket.return_value.recv_json.assert_called_once_with() test_function_mock.assert_called_once_with(*args, **kwargs) self.zmq.Context.instance.return_value.socket.return_value.send_json.assert_called_once_with(mock.ANY) sentval = self.zmq.Context.instance.return_value.socket.return_value.send_json.call_args[0][0] self.assertIsInstance(sentval, dict) self.assertIn("exc", sentval) self.assertIsInstance(sentval["exc"], string_types) self.assertRegexpMatches(sentval["exc"], r'Exception: This is a test Exception') def test_getmethods(self): methods = [ mock.MagicMock(__name__="foo", __doc__="foodoc"), mock.MagicMock(__name__="bar", __doc__="bardoc"), mock.MagicMock(__name__="baz", __doc__=None), ] address = "ipc://dummy.test" UUT = Host(address) for m in methods: UUT.ipcmethod()(m) self.assertEqual(UUT.getmethods(), dict([ (m.__name__, m.__doc__ if m.__doc__ is not None else "") for m in methods ] + [ ("getmethods", UUT.getmethods.__doc__) ])) class TestProxy(unittest.TestCase): def __init__(self, *args, **kwargs): super(TestProxy, self).__init__(*args, **kwargs) if PY2: self.assertCountEqual = self.assertItemsEqual def setUp(self): paths = ['nmoscommon.ipc.zmq', 'os.path.exists', ] patchers = { name : mock.patch(name) for name in paths } self.mocks = { name : patcher.start() for (name, patcher) in iteritems(patchers) } for (name, patcher) in iteritems(patchers): self.addCleanup(patcher.stop) self.zmq = self.mocks['nmoscommon.ipc.zmq'] def test_init(self): self.mocks['os.path.exists'].return_value = True address = "ipc://dummy.test" UUT = Proxy(address) self.assertEqual(UUT.timeout, 100) self.zmq.Context.instance.assert_called_once_with() if address[:6] == "ipc://": self.mocks['os.path.exists'].assert_called_with(address[6:]) self.zmq.Context.instance.return_value.socket.assert_called_once_with(self.zmq.REQ) self.zmq.Context.instance.return_value.socket.return_value.connect.assert_called_once_with(address) self.assertCountEqual(self.zmq.Context.instance.return_value.socket.return_value.setsockopt.mock_calls, [ mock.call(self.zmq.LINGER, 0), mock.call(self.zmq.SNDTIMEO, 0), mock.call(self.zmq.RCVTIMEO, 0) ]) def test_fails_when_ipc_socket_nonexistant(self): self.mocks['os.path.exists'].return_value = False address = "ipc://dummy.test" with self.assertRaises(RuntimeError): UUT = Proxy(address) if address[:6] == "ipc://": self.mocks['os.path.exists'].assert_called_with(address[6:]) self.zmq.Context.instance.return_value.socket.assert_not_called() def test_remote_call(self): self.mocks['os.path.exists'].return_value = True address = "ipc://dummy.test" UUT = Proxy(address) method_name = "testmethodname" args = [ "foo", "bar", "baz" ] kwargs = { "boop" : "togethertogether" } EXPECTED_RETURN_VALUE = "ejybrvjysdlfhlyguerhli;njk7893ykj" self.zmq.Context.instance.return_value.socket.return_value.poll.side_effect = [ 1, Exception ] self.zmq.Context.instance.return_value.socket.return_value.recv_json.side_effect = [ { 'ret' : EXPECTED_RETURN_VALUE }, Exception ] with mock.patch('gevent.sleep') as sleep: try: r = getattr(UUT, method_name)(*args, **kwargs) except: self.fail(msg="Call to %s failed with unexpected exception: %s" % (method_name, traceback.format_exc(),)) self.assertEqual(r, EXPECTED_RETURN_VALUE) def test_remote_call_raises_when_socket_unconnected(self): self.mocks['os.path.exists'].return_value = True address = "ipc://dummy.test" UUT = Proxy(address) method_name = "testmethodname" args = [ "foo", "bar", "baz" ] kwargs = { "boop" : "togethertogether" } EXPECTED_RETURN_VALUE = "ejybrvjysdlfhlyguerhli;njk7893ykj" self.zmq.Context.instance.return_value.socket.return_value.poll.side_effect = [ 0, Exception ] self.zmq.Context.instance.return_value.socket.return_value.recv_json.side_effect = Exception with mock.patch('gevent.sleep') as sleep: with self.assertRaises(LocalException): r = getattr(UUT, method_name)(*args, **kwargs) def test_remote_call_passes_through_remote_exception(self): self.mocks['os.path.exists'].return_value = True address = "ipc://dummy.test" UUT = Proxy(address) method_name = "testmethodname" args = [ "foo", "bar", "baz" ] kwargs = { "boop" : "togethertogether" } EXPECTED_EXCEPTION_MESSAGE = "ejybrvjysdlfhlyguerhli;njk7893ykj" self.zmq.Context.instance.return_value.socket.return_value.poll.side_effect = [ 1, Exception ] self.zmq.Context.instance.return_value.socket.return_value.recv_json.side_effect = [ { 'exc' : EXPECTED_EXCEPTION_MESSAGE }, Exception ] with mock.patch('gevent.sleep') as sleep: with self.assertRaises(RemoteException) as cm: r = getattr(UUT, method_name)(*args, **kwargs) self.assertEqual(cm.exception.args, ( EXPECTED_EXCEPTION_MESSAGE, )) class TestMain(unittest.TestCase): def setUp(self): paths = ['nmoscommon.ipc.zmq', 'os.path.exists', 'os.chmod'] patchers = { name : mock.patch(name) for name in paths } self.mocks = { name : patcher.start() for (name, patcher) in iteritems(patchers) } for (name, patcher) in iteritems(patchers): self.addCleanup(patcher.stop) self.zmq = self.mocks['nmoscommon.ipc.zmq'] self.mocks['os.path.exists'].return_value = True @mock.patch('sys.argv', [ "ipc", "ipc://tmp.test", "test_func", "foo", "bar", "baz" ]) def test_main(self): EXPECTED_RETURN_VALUES = [] def set_retval(d): EXPECTED_RETURN_VALUES.append(d) def get_retval(): return { 'ret' : EXPECTED_RETURN_VALUES[0] } self.zmq.Context.instance.return_value.socket.return_value.poll.side_effect = [ 1, Exception ] self.zmq.Context.instance.return_value.socket.return_value.send_json.side_effect = set_retval self.zmq.Context.instance.return_value.socket.return_value.recv_json.side_effect = get_retval with mock.patch('sys.stdout', new_callable=StringIO) as mock_stdout: main() self.assertEqual(json.loads(mock_stdout.getvalue().strip()), { 'function' : 'test_func', 'args' : [ 'foo', 'bar', 'baz' ], 'kwargs' : {} }) @mock.patch('sys.argv', [ "ipc", "ipc://tmp.test", ]) def test_main_host(self): self.zmq.Context.instance.return_value.socket.return_value.poll.side_effect = [ 1, Exception ] self.zmq.Context.instance.return_value.socket.return_value.recv_json.side_effect = [{ 'function' : 'hello', 'args' : [], 'kwargs' : { 'name' : 'TestScript' } }, Exception ] with self.assertRaises(Exception): main() self.zmq.Context.instance.return_value.socket.return_value.send_json.assert_called_once_with({ 'ret' : "Hello, TestScript" })
48.509018
172
0.60043
2,763
24,206
5.025697
0.109663
0.098228
0.055452
0.087138
0.787124
0.774089
0.761918
0.742258
0.713596
0.704162
0
0.004495
0.292366
24,206
498
173
48.606426
0.806177
0.053003
0
0.697436
0
0
0.075636
0.004323
0
0
0
0
0.171795
1
0.123077
false
0.010256
0.025641
0.010256
0.207692
0.002564
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
8be97f4a3a8fb1649de50f8d8ff28616b0f3efae
57
py
Python
ima/__init__.py
Imagenipulation/ima.py
e96f681c9a299a6b47482684d256f9616819eaa7
[ "MIT" ]
null
null
null
ima/__init__.py
Imagenipulation/ima.py
e96f681c9a299a6b47482684d256f9616819eaa7
[ "MIT" ]
null
null
null
ima/__init__.py
Imagenipulation/ima.py
e96f681c9a299a6b47482684d256f9616819eaa7
[ "MIT" ]
null
null
null
from .core import __version__ from .client import Client
19
29
0.824561
8
57
5.375
0.625
0
0
0
0
0
0
0
0
0
0
0
0.140351
57
2
30
28.5
0.877551
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
47570b2e24c40344886e4c88ca83c3b9cbdec7df
155
py
Python
inferiot/__init__.py
prafulmaka/inferIoT
fdd80f39d4a3158bfb0c22ccf9b59d04d4577ced
[ "MIT" ]
3
2020-12-06T01:03:59.000Z
2021-02-11T08:20:21.000Z
inferiot/__init__.py
prafulmaka/inferIoT
fdd80f39d4a3158bfb0c22ccf9b59d04d4577ced
[ "MIT" ]
null
null
null
inferiot/__init__.py
prafulmaka/inferIoT
fdd80f39d4a3158bfb0c22ccf9b59d04d4577ced
[ "MIT" ]
1
2020-12-05T23:08:37.000Z
2020-12-05T23:08:37.000Z
"""inferiot - Python facade for connecting to Smarthub's Infer IOT center""" import inferiot.iotclient as inferiot __version__ = '1.0' import inferiot
17.222222
76
0.76129
21
155
5.428571
0.809524
0.245614
0
0
0
0
0
0
0
0
0
0.015267
0.154839
155
8
77
19.375
0.854962
0.451613
0
0
0
0
0.037975
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
477e80ed9f6ecfd7f85b09451b559e2776e308c7
53
py
Python
python-helloworld/message.py
IvolksI/DO288-apps
805839b7487fb42dc5d4eb6c559decb1a86c0ae7
[ "Apache-2.0" ]
null
null
null
python-helloworld/message.py
IvolksI/DO288-apps
805839b7487fb42dc5d4eb6c559decb1a86c0ae7
[ "Apache-2.0" ]
null
null
null
python-helloworld/message.py
IvolksI/DO288-apps
805839b7487fb42dc5d4eb6c559decb1a86c0ae7
[ "Apache-2.0" ]
null
null
null
print("buil hook output") print("Hello from script")
17.666667
26
0.735849
8
53
4.875
0.875
0
0
0
0
0
0
0
0
0
0
0
0.113208
53
2
27
26.5
0.829787
0
0
0
0
0
0.622642
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
47ae0046264094acec6b933b37cdc1ebc00d8d46
196
py
Python
bilibili/__init__.py
96bearli/bili-auto-note
51d8467cd301ef95f90bb5a31c523218d8b5393b
[ "MIT" ]
3
2022-03-21T01:51:01.000Z
2022-03-26T06:25:12.000Z
bilibili/__init__.py
96bearli/bili-auto-note
51d8467cd301ef95f90bb5a31c523218d8b5393b
[ "MIT" ]
2
2022-03-22T04:21:24.000Z
2022-03-28T03:59:01.000Z
bilibili/__init__.py
96bearli/bili-auto-note
51d8467cd301ef95f90bb5a31c523218d8b5393b
[ "MIT" ]
1
2022-03-21T02:10:25.000Z
2022-03-21T02:10:25.000Z
from .agent import BilibiliAgent from .timeline import Timeline, TimelineItem from .bilibili_note_helper import BilibiliNoteHelper, VideoPartInfo from .timeline_converter import TimelineConverter
39.2
67
0.877551
21
196
8.047619
0.619048
0.142012
0
0
0
0
0
0
0
0
0
0
0.091837
196
4
68
49
0.949438
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
47dc7d5a5e563d9645d932ece5b9e00b6403861e
58
py
Python
sandbox/basket/models.py
Bastilla123/shop2
b2a7ded5b39d0228fadfdb1c9e1fbd3ab0e4cfba
[ "BSD-3-Clause" ]
14
2018-01-08T12:50:10.000Z
2021-12-26T18:38:14.000Z
sandbox/basket/models.py
Bastilla123/shop2
b2a7ded5b39d0228fadfdb1c9e1fbd3ab0e4cfba
[ "BSD-3-Clause" ]
12
2021-12-01T11:05:47.000Z
2022-03-01T11:06:09.000Z
sandbox/basket/models.py
Bastilla123/shop2
b2a7ded5b39d0228fadfdb1c9e1fbd3ab0e4cfba
[ "BSD-3-Clause" ]
4
2019-04-09T17:29:34.000Z
2020-06-07T14:46:23.000Z
from oscar.apps.basket.models import * # noqa isort:skip
29
57
0.758621
9
58
4.888889
1
0
0
0
0
0
0
0
0
0
0
0
0.137931
58
1
58
58
0.88
0.258621
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
9a1cc18c8185cbf32d4b99240eee757f92f249d9
102
py
Python
test_dummy.py
Midnighter/travis-osx-python
8224faf7922ddf14b4c5600d1691506da6674851
[ "Apache-2.0" ]
null
null
null
test_dummy.py
Midnighter/travis-osx-python
8224faf7922ddf14b4c5600d1691506da6674851
[ "Apache-2.0" ]
null
null
null
test_dummy.py
Midnighter/travis-osx-python
8224faf7922ddf14b4c5600d1691506da6674851
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- """Dummy tests.""" def test_dummy(): assert True
11.333333
23
0.568627
14
102
4.071429
0.928571
0
0
0
0
0
0
0
0
0
0
0.012195
0.196078
102
8
24
12.75
0.682927
0.539216
0
0
0
0
0
0
0
0
0
0
0.5
1
0.5
true
0
0
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
1
0
0
0
0
0
0
5
9a3b3cc2c3782ef39d66272e9819864dd31b5c21
32
py
Python
tests/src/TNB/mklaren/kernel/__init__.py
bellwethers-in-se/issueCloseTime
e5e00c9625da0793dc8e7985fd88b0ca0b35f7d3
[ "MIT" ]
9
2017-07-27T10:32:48.000Z
2021-07-01T11:51:51.000Z
tests/src/TNB/mklaren/kernel/__init__.py
bellwethers-in-se/issueCloseTime
e5e00c9625da0793dc8e7985fd88b0ca0b35f7d3
[ "MIT" ]
11
2016-03-15T16:27:47.000Z
2019-09-05T02:25:08.000Z
tests/src/TNB/mklaren/kernel/__init__.py
bellwethers-in-se/issueCloseTime
e5e00c9625da0793dc8e7985fd88b0ca0b35f7d3
[ "MIT" ]
5
2017-01-28T22:45:34.000Z
2019-12-04T13:15:10.000Z
import kernel import kinterface
10.666667
17
0.875
4
32
7
0.75
0
0
0
0
0
0
0
0
0
0
0
0.125
32
2
18
16
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
9a64d2dd9b31f6b0d39c12c41183baec7fa691b5
7,219
py
Python
football/migrations/0002_auto_20200918_2149.py
mikiereed/fantaSheets
1565abf23ac7c7aa5905a3a687438d0d985a3990
[ "MIT" ]
null
null
null
football/migrations/0002_auto_20200918_2149.py
mikiereed/fantaSheets
1565abf23ac7c7aa5905a3a687438d0d985a3990
[ "MIT" ]
2
2020-10-14T02:22:00.000Z
2020-10-19T21:54:11.000Z
football/migrations/0002_auto_20200918_2149.py
mikiereed/fantaSheets
1565abf23ac7c7aa5905a3a687438d0d985a3990
[ "MIT" ]
null
null
null
# Generated by Django 3.1 on 2020-09-19 04:49 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('football', '0001_initial'), ] operations = [ migrations.AlterModelOptions( name='leaguesettings', options={'ordering': ('title',), 'verbose_name_plural': 'League Settings'}, ), migrations.AlterField( model_name='leaguesettings', name='league_hosting_site', field=models.CharField(choices=[('espn', 'ESPN'), ('yahoo', 'Yahoo'), ('other', 'Other')], max_length=20, verbose_name='Hosting Site'), ), migrations.AlterField( model_name='leaguesettings', name='number_of_teams', field=models.IntegerField(verbose_name='Number of Teams'), ), migrations.AlterField( model_name='leaguesettings', name='passing_interceptions', field=models.FloatField(verbose_name='Points Per Interception'), ), migrations.AlterField( model_name='leaguesettings', name='passing_touchdowns', field=models.FloatField(verbose_name='Points Per Passing Touchdown'), ), migrations.AlterField( model_name='leaguesettings', name='passing_two_point_conversions', field=models.FloatField(verbose_name='Points Per Passing Two Point Conversion'), ), migrations.AlterField( model_name='leaguesettings', name='passing_yards', field=models.FloatField(verbose_name='Points Per Passing Yard'), ), migrations.AlterField( model_name='leaguesettings', name='roster_bench_spots', field=models.IntegerField(verbose_name='Bench Spots'), ), migrations.AlterField( model_name='leaguesettings', name='roster_cornerbacks', field=models.IntegerField(verbose_name='Cornerbacks'), ), migrations.AlterField( model_name='leaguesettings', name='roster_defensive_backs', field=models.IntegerField(verbose_name='Defensive Backs'), ), migrations.AlterField( model_name='leaguesettings', name='roster_defensive_ends', field=models.IntegerField(verbose_name='Defensive Ends'), ), migrations.AlterField( model_name='leaguesettings', name='roster_defensive_lines', field=models.IntegerField(verbose_name='Defensive Lines'), ), migrations.AlterField( model_name='leaguesettings', name='roster_defensive_players', field=models.IntegerField(verbose_name='Defensive Players'), ), migrations.AlterField( model_name='leaguesettings', name='roster_defensive_tackles', field=models.IntegerField(default=0, verbose_name='Defensive Tackles'), ), migrations.AlterField( model_name='leaguesettings', name='roster_edge_rushers', field=models.IntegerField(verbose_name='Edge Rushers'), ), migrations.AlterField( model_name='leaguesettings', name='roster_flex_running_back_wide_receiver', field=models.IntegerField(verbose_name='Flex RB/WR'), ), migrations.AlterField( model_name='leaguesettings', name='roster_flex_running_back_wide_receiver_tight_end', field=models.IntegerField(verbose_name='Flex RB/WR/TE'), ), migrations.AlterField( model_name='leaguesettings', name='roster_flex_wide_receiver_tight_end', field=models.IntegerField(verbose_name='Flex WR/TE'), ), migrations.AlterField( model_name='leaguesettings', name='roster_head_coaches', field=models.IntegerField(verbose_name='Head Coaches'), ), migrations.AlterField( model_name='leaguesettings', name='roster_injured_reserve_spots', field=models.IntegerField(verbose_name='Injured Reserve Spots'), ), migrations.AlterField( model_name='leaguesettings', name='roster_kickers', field=models.IntegerField(verbose_name='Kickers'), ), migrations.AlterField( model_name='leaguesettings', name='roster_linebackers', field=models.IntegerField(verbose_name='Linebackers'), ), migrations.AlterField( model_name='leaguesettings', name='roster_offensive_players', field=models.IntegerField(verbose_name='Offensive Players'), ), migrations.AlterField( model_name='leaguesettings', name='roster_punters', field=models.IntegerField(verbose_name='Punters'), ), migrations.AlterField( model_name='leaguesettings', name='roster_quarterbacks', field=models.IntegerField(verbose_name='Quarterbacks'), ), migrations.AlterField( model_name='leaguesettings', name='roster_running_backs', field=models.IntegerField(verbose_name='Running Backs'), ), migrations.AlterField( model_name='leaguesettings', name='roster_safeties', field=models.IntegerField(verbose_name='Safeties'), ), migrations.AlterField( model_name='leaguesettings', name='roster_team_defense_special_teams', field=models.IntegerField(verbose_name='Defense / Special Teams'), ), migrations.AlterField( model_name='leaguesettings', name='roster_team_quarterbacks', field=models.IntegerField(verbose_name='Team Quarterbacks'), ), migrations.AlterField( model_name='leaguesettings', name='roster_tight_ends', field=models.IntegerField(verbose_name='Tight Ends'), ), migrations.AlterField( model_name='leaguesettings', name='roster_wide_receivers', field=models.IntegerField(verbose_name='Wide Receivers'), ), migrations.AlterField( model_name='leaguesettings', name='rushing_touchdowns', field=models.FloatField(verbose_name='Points Per Rushing Touchdown'), ), migrations.AlterField( model_name='leaguesettings', name='rushing_two_point_conversions', field=models.FloatField(verbose_name='Points Per Rushing Two Point Conversion'), ), migrations.AlterField( model_name='leaguesettings', name='rushing_yards', field=models.FloatField(verbose_name='Points Per Rushing Yard'), ), migrations.AlterField( model_name='leaguesettings', name='title', field=models.CharField(max_length=50, verbose_name='fantaSheet Name'), ), ]
38.398936
147
0.602854
618
7,219
6.809061
0.169903
0.149715
0.201996
0.234316
0.8125
0.747861
0.593869
0.42134
0.14924
0.091255
0
0.004491
0.290622
7,219
187
148
38.604278
0.817223
0.005957
0
0.569061
1
0
0.262336
0.061751
0
0
0
0
0
1
0
false
0.038674
0.005525
0
0.022099
0
0
0
0
null
0
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
9a6f72c8e4b99794bb422168294c0e5cad07c46d
69
py
Python
PokeUtils/__init__.py
carlossilva2/Poketext
cc1cec77460cdabe4d5400ee2a7ad5b8b0fc0d08
[ "MIT" ]
null
null
null
PokeUtils/__init__.py
carlossilva2/Poketext
cc1cec77460cdabe4d5400ee2a7ad5b8b0fc0d08
[ "MIT" ]
null
null
null
PokeUtils/__init__.py
carlossilva2/Poketext
cc1cec77460cdabe4d5400ee2a7ad5b8b0fc0d08
[ "MIT" ]
null
null
null
from .Pokemon import * from .PokeUtils import * from .PokeDB import *
23
24
0.753623
9
69
5.777778
0.555556
0.384615
0
0
0
0
0
0
0
0
0
0
0.15942
69
3
25
23
0.896552
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
d04633928d83f093fe59df34c7a5cc1ba6bafc69
23,764
py
Python
.venv/lib/python3.8/site-packages/coolname/data/__init__.py
jamesonl/hit-and-blow
232bbfd76cc4fe4a2153a26f3fed6bfa6414bb99
[ "MIT" ]
null
null
null
.venv/lib/python3.8/site-packages/coolname/data/__init__.py
jamesonl/hit-and-blow
232bbfd76cc4fe4a2153a26f3fed6bfa6414bb99
[ "MIT" ]
null
null
null
.venv/lib/python3.8/site-packages/coolname/data/__init__.py
jamesonl/hit-and-blow
232bbfd76cc4fe4a2153a26f3fed6bfa6414bb99
[ "MIT" ]
null
null
null
# THIS FILE IS AUTO-GENERATED, DO NOT EDIT config = {'all': {'comment': 'Entry point', 'type': 'nested', 'lists': ['2', '3', '4'], 'ensure_unique': True, 'ensure_unique_prefix': 4, 'max_slug_length': 50}, '2': {'comment': 'Two words (may also contain prepositions)', 'type': 'nested', 'lists': ['an']}, '3': {'comment': 'Three words (may also contain prepositions)', 'type': 'nested', 'lists': ['aan', 'ano', 'anl', 'nuo', 'as2', 's2o', 's2l', 'sl2']}, '4': {'comment': 'Four words (may also contain prepositions)', 'type': 'nested', 'lists': ['aano', 'aanl', 'anuo', 'as2o', 's2uo', 'as2l', 'asl2']}, 'an': {'comment': 'adjective-noun', 'type': 'cartesian', 'lists': ['adj_any', 'subj']}, 'aan': {'comment': 'adjective-adjective-noun', 'type': 'cartesian', 'lists': ['adj_far', 'adj_near', 'subj']}, 'ano': {'comment': 'adjective-noun-of-noun', 'type': 'cartesian', 'lists': ['adj_any', 'subj', 'of', 'of_noun_any']}, 'anl': {'comment': 'adjective-noun-from-location', 'type': 'cartesian', 'lists': ['adj_any', 'subj', 'from', 'from_noun_no_mod']}, 'nuo': {'comment': 'noun-of-adjective-noun', 'type': 'cartesian', 'lists': ['subj', 'of', 'of_modifier', 'of_noun']}, 'as2': {'comment': 'adjective-2word-subject', 'type': 'cartesian', 'lists': ['adj_far', 'subj2']}, 's2o': {'comment': '2word-subject-of-noun', 'type': 'cartesian', 'lists': ['subj2', 'of', 'of_noun_any']}, 's2l': {'comment': '2word-subject-from-location', 'type': 'cartesian', 'lists': ['subj2', 'from', 'from_noun_no_mod']}, 'sl2': {'comment': 'subject-from-some-location', 'type': 'cartesian', 'lists': ['subj', 'from', 'from2']}, 'aano': {'comment': 'adjective-adjective-noun-of-noun', 'type': 'cartesian', 'lists': ['adj_far', 'adj_near', 'subj', 'of', 'of_noun_any']}, 'aanl': {'comment': 'adjective-adjective-noun-from-location', 'type': 'cartesian', 'lists': ['adj_far', 'adj_near', 'subj', 'from', 'from_noun_no_mod']}, 'anuo': {'comment': 'adjective-noun-of-adjective-noun', 'type': 'cartesian', 'lists': ['adj_any', 'subj', 'of', 'of_modifier', 'of_noun']}, 'as2o': {'comment': 'adjective-2word-subject-of-noun', 'type': 'cartesian', 'lists': ['adj_far', 'subj2', 'of', 'of_noun_any']}, 's2uo': {'comment': 'adjective-2word-subject-of-adjective-noun', 'type': 'cartesian', 'lists': ['subj2', 'of', 'of_modifier', 'of_noun']}, 'as2l': {'comment': 'adjective-2word-subject-from-location', 'type': 'cartesian', 'lists': ['adj_far', 'subj2', 'from', 'from_noun_no_mod']}, 'asl2': {'comment': 'adjective-subject-from-some-location', 'type': 'cartesian', 'lists': ['adj_any', 'subj', 'from', 'from2']}, 'adj_far': {'comment': 'First adjective (with more following)', 'type': 'nested', 'lists': ['adjective', 'adjective_first', 'noun_adjective', 'size']}, 'adj_near': {'comment': 'Last adjective (closest to the subject)', 'type': 'nested', 'lists': ['adjective', 'color', 'noun_adjective', 'prefix']}, 'adj_any': {'comment': 'The only adjective (includes everything)', 'type': 'nested', 'lists': ['adjective', 'color', 'noun_adjective', 'prefix', 'size']}, 'subj': {'comment': 'The subject (animal)', 'type': 'nested', 'lists': ['animal', 'animal_breed', 'animal_legendary']}, 'of': {'type': 'const', 'value': 'of'}, 'of_noun_any': {'type': 'nested', 'lists': ['of_noun', 'of_noun_no_mod']}, 'from': {'type': 'const', 'value': 'from'}, 'from_noun_no_mod': {'type': 'words', 'words': ['venus', 'mars', 'jupiter', 'ganymede', 'saturn', 'uranus', 'neptune', 'pluto', 'betelgeuse', 'sirius', 'vega', 'arcadia', 'asgard', 'atlantis', 'avalon', 'camelot', 'eldorado', 'heaven', 'hell', 'hyperborea', 'lemuria', 'nibiru', 'shambhala', 'tartarus', 'valhalla', 'wonderland']}, 'prefix': {'type': 'words', 'words': ['giga', 'mega', 'micro', 'mini', 'nano', 'pygmy', 'super', 'uber', 'ultra', 'cyber', 'mutant', 'ninja', 'space'], 'max_length': 13}, 'of_modifier': {'type': 'words', 'words': ['absolute', 'abstract', 'algebraic', 'amazing', 'amusing', 'ancient', 'angelic', 'astonishing', 'authentic', 'awesome', 'beautiful', 'classic', 'delightful', 'demonic', 'eminent', 'enjoyable', 'eternal', 'excellent', 'exotic', 'extreme', 'fabulous', 'famous', 'fantastic', 'fascinating', 'flawless', 'fortunate', 'glorious', 'great', 'heavenly', 'holistic', 'hypothetical', 'ideal', 'illegal', 'imaginary', 'immense', 'imminent', 'immortal', 'impossible', 'impressive', 'improbable', 'incredible', 'inescapable', 'inevitable', 'infinite', 'inspiring', 'interesting', 'legal', 'magic', 'majestic', 'major', 'marvelous', 'massive', 'mysterious', 'nonconcrete', 'nonstop', 'luxurious', 'optimal', 'original', 'pastoral', 'perfect', 'perpetual', 'phenomenal', 'pleasurable', 'pragmatic', 'premium', 'radical', 'rampant', 'regular', 'remarkable', 'satisfying', 'serious', 'scientific', 'sexy', 'sheer', 'simple', 'silent', 'spectacular', 'splendid', 'stereotyped', 'stimulating', 'strange', 'striking', 'strongest', 'sublime', 'sudden', 'terrific', 'therapeutic', 'total', 'ultimate', 'uncanny', 'undeniable', 'unearthly', 'unexpected', 'unknown', 'unmatched', 'unnatural', 'unreal', 'unusual', 'utter', 'weird', 'wonderful', 'wondrous'], 'max_length': 13}, 'animal_breed': {'type': 'words', 'words': ['longhorn', 'akita', 'beagle', 'bloodhound', 'bulldog', 'chihuahua', 'collie', 'corgi', 'dalmatian', 'doberman', 'husky', 'labradoodle', 'labrador', 'mastiff', 'malamute', 'mongrel', 'poodle', 'rottweiler', 'spaniel', 'terrier', 'mule', 'mustang', 'pony', 'angora'], 'max_length': 13}, 'size': {'type': 'words', 'words': ['big', 'colossal', 'enormous', 'gigantic', 'great', 'huge', 'hulking', 'humongous', 'large', 'little', 'massive', 'miniature', 'petite', 'portable', 'small', 'tiny', 'towering'], 'max_length': 13}, 'from2': {'type': 'phrases', 'phrases': [('fancy', 'cafe'), ('prestigious', 'college'), ('prestigious', 'university'), ('big', 'city'), ('foreign', 'country'), ('small', 'town'), ('wild', 'west'), ('ancient', 'ruins'), ('another', 'dimension'), ('another', 'planet'), ('flying', 'circus'), ('secret', 'laboratory'), ('the', 'government'), ('the', 'future'), ('the', 'past'), ('the', 'stars')], 'number_of_words': 2, 'max_length': 24}, 'of_noun_no_mod': {'type': 'words', 'words': ['chemistry', 'education', 'experiment', 'mathematics', 'psychology', 'reading', 'cubism', 'painting', 'advertising', 'agreement', 'climate', 'competition', 'effort', 'emphasis', 'foundation', 'judgment', 'memory', 'opportunity', 'perspective', 'priority', 'promise', 'teaching'], 'max_length': 13}, 'of_noun': {'type': 'words', 'words': ['anger', 'bliss', 'contentment', 'courage', 'ecstasy', 'excitement', 'faith', 'felicity', 'fury', 'gaiety', 'glee', 'glory', 'greatness', 'inspiration', 'jest', 'joy', 'happiness', 'holiness', 'love', 'merriment', 'passion', 'patience', 'peace', 'persistence', 'pleasure', 'pride', 'recreation', 'relaxation', 'romance', 'serenity', 'tranquility', 'apotheosis', 'chaos', 'energy', 'essence', 'eternity', 'excellence', 'experience', 'freedom', 'nirvana', 'order', 'perfection', 'spirit', 'variation', 'acceptance', 'brotherhood', 'criticism', 'culture', 'discourse', 'discussion', 'justice', 'piety', 'respect', 'security', 'support', 'tolerance', 'trust', 'warranty', 'abundance', 'admiration', 'assurance', 'authority', 'awe', 'certainty', 'control', 'domination', 'enterprise', 'fame', 'grandeur', 'influence', 'luxury', 'management', 'opposition', 'plenty', 'popularity', 'prestige', 'prosperity', 'reputation', 'reverence', 'reward', 'superiority', 'triumph', 'wealth', 'acumen', 'aptitude', 'art', 'artistry', 'competence', 'efficiency', 'expertise', 'finesse', 'genius', 'leadership', 'perception', 'skill', 'virtuosity', 'argument', 'debate', 'action', 'agility', 'amplitude', 'attack', 'charisma', 'chivalry', 'defense', 'defiance', 'devotion', 'dignity', 'endurance', 'exercise', 'force', 'fortitude', 'gallantry', 'health', 'honor', 'infinity', 'inquire', 'intensity', 'luck', 'mastery', 'might', 'opportunity', 'penetration', 'performance', 'pluck', 'potency', 'protection', 'prowess', 'resistance', 'serendipity', 'speed', 'stamina', 'strength', 'swiftness', 'temperance', 'tenacity', 'valor', 'vigor', 'vitality', 'will', 'advance', 'conversion', 'correction', 'development', 'diversity', 'elevation', 'enhancement', 'enrichment', 'enthusiasm', 'focus', 'fruition', 'growth', 'improvement', 'innovation', 'modernism', 'novelty', 'proficiency', 'progress', 'promotion', 'realization', 'refinement', 'renovation', 'revolution', 'success', 'tempering', 'upgrade', 'ampleness', 'completion', 'satiation', 'saturation', 'sufficiency', 'vastness', 'wholeness', 'attraction', 'beauty', 'bloom', 'cleaning', 'courtesy', 'glamour', 'elegance', 'fascination', 'kindness', 'joviality', 'politeness', 'refinement', 'symmetry', 'sympathy', 'tact', 'calibration', 'drama', 'economy', 'engineering', 'examination', 'philosophy', 'poetry', 'research', 'science', 'democracy', 'election', 'feminism', 'champagne', 'coffee', 'cookies', 'flowers', 'fragrance', 'honeydew', 'music', 'pizza', 'aurora', 'blizzard', 'current', 'dew', 'downpour', 'drizzle', 'hail', 'hurricane', 'lightning', 'rain', 'snow', 'storm', 'sunshine', 'tempest', 'thunder', 'tornado', 'typhoon', 'weather', 'wind', 'whirlwind', 'abracadabra', 'adventure', 'atheism', 'camouflage', 'destiny', 'endeavor', 'expression', 'fantasy', 'fertility', 'imagination', 'karma', 'masquerade', 'maturity', 'radiance', 'shopping', 'sorcery', 'unity', 'witchcraft', 'wizardry', 'wonder', 'youth', 'purring'], 'max_length': 13}, 'subj2': {'type': 'phrases', 'phrases': [('atlantic', 'puffin'), ('bank', 'swallow'), ('barn', 'owl'), ('barn', 'swallow'), ('barred', 'owl'), ('chimney', 'swift'), ('cliff', 'swallow'), ('emperor', 'goose'), ('harlequin', 'duck'), ('himalayan', 'snowcock'), ('hyacinth', 'macaw'), ('mangrove', 'cuckoo'), ('mute', 'swan'), ('northern', 'cardinal'), ('peregrine', 'falcon'), ('prairie', 'falcon'), ('red', 'cardinal'), ('snow', 'goose'), ('snowy', 'owl'), ('trumpeter', 'swan'), ('tufted', 'puffin'), ('whooper', 'swan'), ('whooping', 'crane'), ('fire', 'ant'), ('alpine', 'chipmunk'), ('beaked', 'whale'), ('bottlenose', 'dolphin'), ('clouded', 'leopard'), ('eared', 'seal'), ('elephant', 'seal'), ('feral', 'cat'), ('feral', 'dog'), ('feral', 'donkey'), ('feral', 'goat'), ('feral', 'horse'), ('feral', 'pig'), ('fur', 'seal'), ('grizzly', 'bear'), ('harbor', 'porpoise'), ('honey', 'badger'), ('humpback', 'whale'), ('killer', 'whale'), ('mountain', 'deer'), ('mountain', 'goat'), ('mountain', 'lion'), ('olympic', 'marmot'), ('pampas', 'deer'), ('pine', 'marten'), ('polynesian', 'rat'), ('rhesus', 'macaque'), ('river', 'dolphin'), ('sea', 'lion'), ('sea', 'otter'), ('snow', 'leopard'), ('sperm', 'whale'), ('spinner', 'dolphin'), ('vampire', 'bat'), ('gila', 'monster'), ('freshwater', 'crocodile'), ('saltwater', 'crocodile'), ('snapping', 'turtle'), ('walking', 'mushroom')], 'number_of_words': 2, 'max_length': 22}, 'adjective': {'type': 'words', 'words': ['acrid', 'ambrosial', 'amorphous', 'armored', 'aromatic', 'bald', 'blazing', 'boisterous', 'bouncy', 'brawny', 'bulky', 'camouflaged', 'caped', 'chubby', 'curvy', 'elastic', 'ethereal', 'fat', 'feathered', 'fiery', 'flashy', 'flat', 'fluffy', 'foamy', 'fragrant', 'furry', 'fuzzy', 'glaring', 'hairy', 'heavy', 'hissing', 'horned', 'icy', 'imaginary', 'invisible', 'lean', 'loud', 'loutish', 'lumpy', 'lush', 'masked', 'meaty', 'messy', 'misty', 'nebulous', 'noisy', 'nondescript', 'organic', 'purring', 'quiet', 'quirky', 'radiant', 'roaring', 'ruddy', 'rustling', 'screeching', 'shaggy', 'shapeless', 'shiny', 'silent', 'silky', 'singing', 'skinny', 'smooth', 'soft', 'spicy', 'spiked', 'statuesque', 'sticky', 'tacky', 'tall', 'tangible', 'tentacled', 'thick', 'thundering', 'venomous', 'warm', 'weightless', 'whispering', 'winged', 'wooden', 'adorable', 'affable', 'amazing', 'amiable', 'attractive', 'beautiful', 'calm', 'charming', 'cherubic', 'classic', 'classy', 'convivial', 'cordial', 'cuddly', 'curly', 'cute', 'debonair', 'elegant', 'famous', 'fresh', 'friendly', 'funny', 'gorgeous', 'graceful', 'gregarious', 'grinning', 'handsome', 'hilarious', 'hot', 'interesting', 'kind', 'laughing', 'lovely', 'meek', 'mellow', 'merciful', 'neat', 'nifty', 'notorious', 'poetic', 'pretty', 'refined', 'refreshing', 'sexy', 'smiling', 'sociable', 'spiffy', 'stylish', 'sweet', 'tactful', 'whimsical', 'abiding', 'accurate', 'adamant', 'adaptable', 'adventurous', 'alluring', 'aloof', 'ambitious', 'amusing', 'annoying', 'arrogant', 'aspiring', 'belligerent', 'benign', 'berserk', 'benevolent', 'bold', 'brave', 'cheerful', 'chirpy', 'cocky', 'congenial', 'courageous', 'cryptic', 'curious', 'daft', 'dainty', 'daring', 'defiant', 'delicate', 'delightful', 'determined', 'devout', 'didactic', 'diligent', 'discreet', 'dramatic', 'dynamic', 'eager', 'eccentric', 'elated', 'encouraging', 'enigmatic', 'enthusiastic', 'evasive', 'faithful', 'fair', 'fanatic', 'fearless', 'fervent', 'festive', 'fierce', 'fine', 'free', 'gabby', 'garrulous', 'gay', 'gentle', 'glistening', 'greedy', 'grumpy', 'happy', 'honest', 'hopeful', 'hospitable', 'impetuous', 'independent', 'industrious', 'innocent', 'intrepid', 'jolly', 'jovial', 'just', 'lively', 'loose', 'loyal', 'merry', 'modest', 'mysterious', 'nice', 'obedient', 'optimistic', 'orthodox', 'outgoing', 'outrageous', 'overjoyed', 'passionate', 'perky', 'placid', 'polite', 'positive', 'proud', 'prudent', 'puzzling', 'quixotic', 'quizzical', 'rebel', 'resolute', 'rampant', 'righteous', 'romantic', 'rough', 'rousing', 'sassy', 'satisfied', 'sly', 'sincere', 'snobbish', 'spirited', 'spry', 'stalwart', 'stirring', 'swinging', 'tasteful', 'thankful', 'tidy', 'tremendous', 'truthful', 'unselfish', 'upbeat', 'uppish', 'valiant', 'vehement', 'vengeful', 'vigorous', 'vivacious', 'zealous', 'zippy', 'able', 'adept', 'analytic', 'astute', 'attentive', 'brainy', 'busy', 'calculating', 'capable', 'careful', 'cautious', 'certain', 'clever', 'competent', 'conscious', 'cooperative', 'crafty', 'crazy', 'cunning', 'daffy', 'devious', 'discerning', 'efficient', 'expert', 'functional', 'gifted', 'helpful', 'enlightened', 'idealistic', 'impartial', 'industrious', 'ingenious', 'inquisitive', 'intelligent', 'inventive', 'judicious', 'keen', 'knowing', 'literate', 'logical', 'masterful', 'mindful', 'nonchalant', 'observant', 'omniscient', 'poised', 'practical', 'pragmatic', 'proficient', 'provocative', 'qualified', 'radical', 'rational', 'realistic', 'resourceful', 'savvy', 'sceptical', 'sensible', 'serious', 'shrewd', 'skilled', 'slick', 'slim', 'sloppy', 'smart', 'sophisticated', 'stoic', 'succinct', 'talented', 'thoughtful', 'tricky', 'unbiased', 'uptight', 'versatile', 'versed', 'visionary', 'wise', 'witty', 'accelerated', 'active', 'agile', 'athletic', 'dashing', 'deft', 'dexterous', 'energetic', 'fast', 'frisky', 'hasty', 'hypersonic', 'meteoric', 'mighty', 'muscular', 'nimble', 'nippy', 'powerful', 'prompt', 'quick', 'rapid', 'resilient', 'robust', 'rugged', 'solid', 'speedy', 'steadfast', 'steady', 'strong', 'sturdy', 'tireless', 'tough', 'unyielding', 'rich', 'wealthy', 'meticulous', 'precise', 'rigorous', 'scrupulous', 'strict', 'airborne', 'burrowing', 'crouching', 'flying', 'hidden', 'hopping', 'jumping', 'lurking', 'tunneling', 'warping', 'aboriginal', 'amphibian', 'aquatic', 'arboreal', 'polar', 'terrestrial', 'urban', 'accomplished', 'astonishing', 'authentic', 'awesome', 'delectable', 'excellent', 'exotic', 'exuberant', 'fabulous', 'fantastic', 'fascinating', 'flawless', 'fortunate', 'funky', 'godlike', 'glorious', 'groovy', 'honored', 'illustrious', 'imposing', 'important', 'impressive', 'incredible', 'invaluable', 'kickass', 'majestic', 'magnificent', 'marvellous', 'monumental', 'perfect', 'phenomenal', 'pompous', 'precious', 'premium', 'private', 'remarkable', 'spectacular', 'splendid', 'successful', 'wonderful', 'wondrous', 'offbeat', 'original', 'outstanding', 'quaint', 'unique', 'ancient', 'antique', 'prehistoric', 'primitive', 'abstract', 'acoustic', 'angelic', 'arcane', 'archetypal', 'augmented', 'auspicious', 'axiomatic', 'beneficial', 'bipedal', 'bizarre', 'complex', 'dancing', 'dangerous', 'demonic', 'divergent', 'economic', 'electric', 'elite', 'eminent', 'enchanted', 'esoteric', 'finicky', 'fractal', 'futuristic', 'gainful', 'hallowed', 'heavenly', 'heretic', 'holistic', 'hungry', 'hypnotic', 'hysterical', 'illegal', 'imperial', 'imported', 'impossible', 'inescapable', 'juicy', 'liberal', 'ludicrous', 'lyrical', 'magnetic', 'manipulative', 'mature', 'military', 'macho', 'married', 'melodic', 'natural', 'naughty', 'nocturnal', 'nostalgic', 'optimal', 'pastoral', 'peculiar', 'piquant', 'pristine', 'prophetic', 'psychedelic', 'quantum', 'rare', 'real', 'secret', 'simple', 'spectral', 'spiritual', 'stereotyped', 'stimulating', 'straight', 'strange', 'tested', 'therapeutic', 'true', 'ubiquitous', 'uncovered', 'unnatural', 'utopian', 'vagabond', 'vague', 'vegan', 'victorious', 'vigilant', 'voracious', 'wakeful', 'wandering', 'watchful', 'wild', 'bright', 'brilliant', 'colorful', 'crystal', 'dark', 'dazzling', 'fluorescent', 'glittering', 'glossy', 'gleaming', 'light', 'mottled', 'neon', 'opalescent', 'pastel', 'smoky', 'sparkling', 'spotted', 'striped', 'translucent', 'transparent', 'vivid'], 'max_length': 13}, 'animal': {'type': 'words', 'words': ['earthworm', 'leech', 'worm', 'scorpion', 'spider', 'tarantula', 'barnacle', 'crab', 'crayfish', 'lobster', 'pillbug', 'prawn', 'shrimp', 'ant', 'bee', 'beetle', 'bug', 'bumblebee', 'butterfly', 'caterpillar', 'cicada', 'cricket', 'dragonfly', 'earwig', 'firefly', 'grasshopper', 'honeybee', 'hornet', 'inchworm', 'ladybug', 'locust', 'mantis', 'mayfly', 'mosquito', 'moth', 'sawfly', 'silkworm', 'termite', 'wasp', 'woodlouse', 'centipede', 'millipede', 'pronghorn', 'antelope', 'bison', 'buffalo', 'bull', 'chamois', 'cow', 'gazelle', 'gaur', 'goat', 'ibex', 'impala', 'kudu', 'markhor', 'mouflon', 'muskox', 'nyala', 'sheep', 'wildebeest', 'yak', 'zebu', 'alpaca', 'camel', 'llama', 'vicugna', 'caribou', 'chital', 'deer', 'elk', 'moose', 'pudu', 'reindeer', 'sambar', 'wapiti', 'beluga', 'dolphin', 'narwhal', 'orca', 'porpoise', 'whale', 'donkey', 'horse', 'stallion', 'zebra', 'giraffe', 'okapi', 'hippo', 'rhino', 'boar', 'hog', 'pig', 'swine', 'warthog', 'peccary', 'buzzard', 'eagle', 'goshawk', 'harrier', 'hawk', 'vulture', 'duck', 'goose', 'swan', 'teal', 'bird', 'hummingbird', 'swift', 'kiwi', 'potoo', 'seriema', 'cassowary', 'emu', 'condor', 'auk', 'avocet', 'guillemot', 'kittiwake', 'puffin', 'seagull', 'skua', 'stork', 'dodo', 'dove', 'pigeon', 'kingfisher', 'tody', 'bustard', 'coua', 'coucal', 'cuckoo', 'koel', 'malkoha', 'roadrunner', 'kagu', 'caracara', 'falcon', 'kestrel', 'chachalaca', 'chicken', 'curassow', 'grouse', 'guan', 'junglefowl', 'partridge', 'peacock', 'pheasant', 'quail', 'rooster', 'turkey', 'loon', 'coot', 'crane', 'turaco', 'hoatzin', 'bullfinch', 'crow', 'jackdaw', 'jaybird', 'finch', 'lyrebird', 'magpie', 'myna', 'nightingale', 'nuthatch', 'oriole', 'oxpecker', 'raven', 'robin', 'rook', 'skylark', 'sparrow', 'starling', 'swallow', 'waxbill', 'wren', 'heron', 'ibis', 'jacamar', 'piculet', 'toucan', 'toucanet', 'woodpecker', 'flamingo', 'grebe', 'albatross', 'fulmar', 'petrel', 'spoonbill', 'ara', 'cockatoo', 'kakapo', 'lorikeet', 'macaw', 'parakeet', 'parrot', 'penguin', 'ostrich', 'boobook', 'owl', 'booby', 'cormorant', 'frigatebird', 'pelican', 'quetzal', 'trogon', 'axolotl', 'bullfrog', 'frog', 'newt', 'salamander', 'toad', 'angelfish', 'barracuda', 'carp', 'catfish', 'dogfish', 'goldfish', 'guppy', 'eel', 'flounder', 'herring', 'lionfish', 'mackerel', 'oarfish', 'perch', 'salmon', 'seahorse', 'sturgeon', 'sunfish', 'tench', 'trout', 'tuna', 'wrasse', 'sawfish', 'shark', 'stingray', 'jellyfish', 'alligator', 'caiman', 'crocodile', 'gharial', 'starfish', 'urchin', 'hedgehog', 'coyote', 'dingo', 'dog', 'fennec', 'fox', 'hound', 'jackal', 'tanuki', 'wolf', 'bobcat', 'caracal', 'cat', 'cougar', 'jaguar', 'jaguarundi', 'leopard', 'lion', 'lynx', 'manul', 'ocelot', 'panther', 'puma', 'serval', 'smilodon', 'tiger', 'wildcat', 'aardwolf', 'binturong', 'cheetah', 'civet', 'fossa', 'hyena', 'meerkat', 'mongoose', 'badger', 'coati', 'ermine', 'ferret', 'marten', 'mink', 'otter', 'polecat', 'skunk', 'stoat', 'weasel', 'wolverine', 'seal', 'walrus', 'raccoon', 'ringtail', 'bear', 'panda', 'bat', 'armadillo', 'elephant', 'mammoth', 'mastodon', 'mole', 'hyrax', 'bandicoot', 'bettong', 'cuscus', 'kangaroo', 'koala', 'numbat', 'quokka', 'quoll', 'wallaby', 'wombat', 'echidna', 'platypus', 'tapir', 'anteater', 'sloth', 'agouti', 'beaver', 'capybara', 'chinchilla', 'chipmunk', 'degu', 'dormouse', 'gerbil', 'gopher', 'groundhog', 'jackrabbit', 'jerboa', 'hamster', 'hare', 'lemming', 'marmot', 'mouse', 'muskrat', 'porcupine', 'rabbit', 'rat', 'squirrel', 'vole', 'ape', 'baboon', 'bonobo', 'capuchin', 'chimpanzee', 'galago', 'gibbon', 'gorilla', 'lemur', 'lori', 'macaque', 'mandrill', 'marmoset', 'monkey', 'orangutan', 'tamarin', 'tarsier', 'uakari', 'dugong', 'manatee', 'shrew', 'aardwark', 'clam', 'cockle', 'mussel', 'oyster', 'scallop', 'shellfish', 'ammonite', 'cuttlefish', 'nautilus', 'octopus', 'squid', 'limpet', 'slug', 'snail', 'sponge', 'tuatara', 'agama', 'chameleon', 'dragon', 'gecko', 'iguana', 'lizard', 'pogona', 'skink', 'adder', 'anaconda', 'asp', 'boa', 'cobra', 'copperhead', 'mamba', 'python', 'rattlesnake', 'sidewinder', 'snake', 'taipan', 'viper', 'tortoise', 'turtle', 'dinosaur', 'raptor', 'mushroom'], 'max_length': 13}, 'color': {'type': 'words', 'words': ['almond', 'amaranth', 'apricot', 'artichoke', 'auburn', 'azure', 'banana', 'beige', 'black', 'blond', 'blue', 'brown', 'burgundy', 'carmine', 'carrot', 'celadon', 'cerise', 'cerulean', 'charcoal', 'cherry', 'chestnut', 'chocolate', 'cinnamon', 'copper', 'cream', 'crimson', 'cyan', 'daffodil', 'dandelion', 'denim', 'ebony', 'eggplant', 'gray', 'ginger', 'green', 'indigo', 'infrared', 'jasmine', 'khaki', 'lavender', 'lilac', 'mauve', 'magenta', 'mahogany', 'maize', 'marigold', 'mustard', 'ochre', 'orange', 'papaya', 'peach', 'persimmon', 'pink', 'pistachio', 'pumpkin', 'purple', 'raspberry', 'red', 'rose', 'russet', 'saffron', 'sage', 'scarlet', 'sepia', 'silver', 'tan', 'tangerine', 'taupe', 'teal', 'tomato', 'turquoise', 'tuscan', 'ultramarine', 'ultraviolet', 'umber', 'vanilla', 'vermilion', 'violet', 'viridian', 'white', 'wine', 'wisteria', 'yellow', 'agate', 'amber', 'amethyst', 'aquamarine', 'asparagus', 'beryl', 'brass', 'bronze', 'cobalt', 'coral', 'cornflower', 'diamond', 'emerald', 'garnet', 'golden', 'granite', 'ivory', 'jade', 'jasper', 'lemon', 'lime', 'malachite', 'maroon', 'myrtle', 'nickel', 'olive', 'olivine', 'onyx', 'opal', 'orchid', 'pearl', 'peridot', 'platinum', 'quartz', 'ruby', 'sandy', 'sapphire', 'steel', 'thistle', 'topaz', 'tourmaline', 'tungsten', 'xanthic', 'zircon'], 'max_length': 13}, 'noun_adjective': {'type': 'words', 'words': ['fancy', 'magic', 'rainbow', 'woodoo'], 'max_length': 13}, 'animal_legendary': {'type': 'words', 'words': ['basilisk', 'chupacabra', 'dragon', 'griffin', 'pegasus', 'unicorn'], 'max_length': 13}, 'adjective_first': {'type': 'words', 'words': ['first', 'new'], 'max_length': 13}} # Python 2 compatibility - all words must be unicode # (this is to make Python 2 and 3 both work from the same __init__.py code) try: for listdef in config.values(): if listdef['type'] == 'words': listdef['words'] = [unicode(x) for x in listdef['words']] elif listdef['type'] == 'phrases': listdef['phrases'] = [tuple(unicode(y) for y in x) for x in listdef['phrases']] elif listdef['type'] == 'const': listdef['value'] = unicode(listdef['value']) except NameError: pass
1,584.266667
23,181
0.627209
2,318
23,764
6.38956
0.722606
0.014044
0.019445
0.015597
0.088718
0.079941
0.062656
0.041928
0.010128
0
0
0.003438
0.094218
23,764
14
23,182
1,697.428571
0.684646
0.006943
0
0
1
0
0.621488
0.018648
0
0
0
0
0
1
0
false
0.181818
0.090909
0
0.090909
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
1
0
1
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
5
d048064ead7cf805cc801ac427670acb8dc21362
69
py
Python
siaw/__init__.py
SnailZSJ/SayItAnotherWay
73a3071c27e5aef77a412ec5a9a3aaa319785850
[ "MIT" ]
2
2020-07-23T02:19:06.000Z
2021-02-20T06:10:46.000Z
siaw/__init__.py
SnailZSJ/SayItAnotherWay
73a3071c27e5aef77a412ec5a9a3aaa319785850
[ "MIT" ]
null
null
null
siaw/__init__.py
SnailZSJ/SayItAnotherWay
73a3071c27e5aef77a412ec5a9a3aaa319785850
[ "MIT" ]
1
2020-07-23T02:19:07.000Z
2020-07-23T02:19:07.000Z
from .utils import * from .say_it_another_way import SayItAnotherWay
23
47
0.84058
10
69
5.5
0.8
0
0
0
0
0
0
0
0
0
0
0
0.115942
69
2
48
34.5
0.901639
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
d0663ade98ebca2779a0de82791c70ac255bf0b8
98
py
Python
pdns-mysql-domain-exp/lib/exceptions.py
kilgoretrout1985/pdns-mysql-domain-exp
9692971da82d625b242c740d9be8e2130a483249
[ "MIT" ]
null
null
null
pdns-mysql-domain-exp/lib/exceptions.py
kilgoretrout1985/pdns-mysql-domain-exp
9692971da82d625b242c740d9be8e2130a483249
[ "MIT" ]
null
null
null
pdns-mysql-domain-exp/lib/exceptions.py
kilgoretrout1985/pdns-mysql-domain-exp
9692971da82d625b242c740d9be8e2130a483249
[ "MIT" ]
null
null
null
class MyTooManyWhoisQuerisError(Exception): pass class MyWhoisBanError(Exception): pass
14
43
0.77551
8
98
9.5
0.625
0.342105
0
0
0
0
0
0
0
0
0
0
0.163265
98
6
44
16.333333
0.926829
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
d06bd4492f0599ac38bad796f04db03e5095ff52
54
py
Python
codewof/programming/content/en/print-codewof/solution.py
taskmaker1/codewof
92d52cd3ee91f0f311ff01a92cf6ec07e5593b8d
[ "MIT" ]
3
2019-08-29T04:11:22.000Z
2021-06-22T16:05:51.000Z
codewof/programming/content/en/print-codewof/solution.py
taskmaker1/codewof
92d52cd3ee91f0f311ff01a92cf6ec07e5593b8d
[ "MIT" ]
265
2019-05-30T03:51:46.000Z
2022-03-31T01:05:12.000Z
codewof/programming/content/en/print-codewof/solution.py
samuelsandri/codewof
c9b8b378c06b15a0c42ae863b8f46581de04fdfc
[ "MIT" ]
7
2019-06-29T12:13:37.000Z
2021-09-06T06:49:14.000Z
def print_codewof(): print("Welcome to codeWOF!")
18
32
0.685185
7
54
5.142857
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.166667
54
2
33
27
0.8
0
0
0
0
0
0.351852
0
0
0
0
0
0
1
0.5
true
0
0
0
0.5
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
1
0
5
d077e56753f9706b76a44acb0e134790df9ccc24
22,798
py
Python
dataset.py
yinmh17/semantic-segmentation-pytorch
74aac07b71a854f00b526d869bf06760ae0d40cf
[ "BSD-3-Clause" ]
null
null
null
dataset.py
yinmh17/semantic-segmentation-pytorch
74aac07b71a854f00b526d869bf06760ae0d40cf
[ "BSD-3-Clause" ]
null
null
null
dataset.py
yinmh17/semantic-segmentation-pytorch
74aac07b71a854f00b526d869bf06760ae0d40cf
[ "BSD-3-Clause" ]
null
null
null
import os import json import torch from torchvision import transforms import numpy as np from PIL import Image from lib.utils.zipreader import ZipReader import cv2 import random import collections def get_convert_label_fn(odgt): """ A function that converts labels to expected range [-1, num_classes-1] where -1 is ignored. When using custom dataset, you might want to add your own function. """ def convert_ade_label(segm): "Convert ADE labels to range [-1, 149]" return segm - 1 def convert_cityscapes_label(segm): "Convert cityscapes labels to range [-1, 18]" ignore_label = -1 label_mapping = { -1: ignore_label, 0: ignore_label, 1: ignore_label, 2: ignore_label, 3: ignore_label, 4: ignore_label, 5: ignore_label, 6: ignore_label, 7: 0, 8: 1, 9: ignore_label, 10: ignore_label, 11: 2, 12: 3, 13: 4, 14: ignore_label, 15: ignore_label, 16: ignore_label, 17: 5, 18: ignore_label, 19: 6, 20: 7, 21: 8, 22: 9, 23: 10, 24: 11, 25: 12, 26: 13, 27: 14, 28: 15, 29: ignore_label, 30: ignore_label, 31: 16, 32: 17, 33: 18} temp = segm.clone() for k, v in label_mapping.items(): segm[temp == k] = v return segm if 'cityscapes' in odgt.lower(): return convert_cityscapes_label elif 'ade' in odgt.lower(): return convert_ade_label else: return lambda x: x def imresize(im, size, interp='bilinear'): if interp == 'nearest': resample = PIL.Image.NEAREST elif interp == 'bilinear': resample = PIL.Image.BILINEAR elif interp == 'bicubic': resample = PIL.Image.BICUBIC else: raise Exception('resample method undefined!') return np.array( PIL.Image.fromarray(im).resize((size[1], size[0]), resample) ) class BaseDataset(torch.utils.data.Dataset): def __init__(self, odgt, opt, **kwargs): # parse options self.imgSizes = opt.imgSizes self.imgMaxSize = opt.imgMaxSize # max down sampling rate of network to avoid rounding during conv or pooling self.padding_constant = opt.padding_constant # parse the input list self.parse_input_list(odgt, **kwargs) # mean and std self.normalize = transforms.Normalize( mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) self.convert_label = get_convert_label_fn(odgt) def parse_input_list(self, odgt, max_sample=-1, start_idx=-1, end_idx=-1): if isinstance(odgt, list): self.list_sample = odgt elif isinstance(odgt, str): self.list_sample = [json.loads(x.rstrip()) for x in open(odgt, 'r')] if max_sample > 0: self.list_sample = self.list_sample[0:max_sample] if start_idx >= 0 and end_idx >= 0: # divide file list self.list_sample = self.list_sample[start_idx:end_idx] self.num_sample = len(self.list_sample) assert self.num_sample > 0 print('# samples: {}'.format(self.num_sample)) def img_transform(self, img): # 0-255 to 0-1 img = np.float32(img) / 255. img = img.transpose((2, 0, 1)) img = self.normalize(torch.from_numpy(img.copy())) return img def segm_transform_ade(self, segm): # to tensor, -1 to 149 segm = torch.from_numpy(segm).long() - 1 return segm def segm_transform_citi(self, segm): # transform segm label to tensor segm = torch.from_numpy(segm).long() # convert/map labels to expected range segm = self.convert_label(segm) return segm # Round x to the nearest multiple of p and x' >= x def round2nearest_multiple(self, x, p): return ((x - 1) // p + 1) * p class TrainDataset(BaseDataset): def __init__(self, root_dataset, odgt, opt, batch_per_gpu=1, **kwargs): super(TrainDataset, self).__init__(odgt, opt, **kwargs) self.root_dataset = root_dataset # down sampling rate of segm labe self.segm_downsampling_rate = opt.segm_downsampling_rate self.batch_per_gpu = batch_per_gpu # classify images into two classes: 1. h > w and 2. h <= w self.batch_record_list = [[], []] # override dataset length when trainig with batch_per_gpu > 1 self.cur_idx = 0 self.if_shuffled = False self.odgt = odgt def _get_sub_batch(self): while True: # get a sample record this_sample = self.list_sample[self.cur_idx] if this_sample['height'] > this_sample['width']: self.batch_record_list[0].append(this_sample) # h > w, go to 1st class else: self.batch_record_list[1].append(this_sample) # h <= w, go to 2nd class # update current sample pointer self.cur_idx += 1 if self.cur_idx >= self.num_sample: self.cur_idx = 0 np.random.shuffle(self.list_sample) if len(self.batch_record_list[0]) == self.batch_per_gpu: batch_records = self.batch_record_list[0] self.batch_record_list[0] = [] break elif len(self.batch_record_list[1]) == self.batch_per_gpu: batch_records = self.batch_record_list[1] self.batch_record_list[1] = [] break return batch_records def __getitem__(self, index): # NOTE: random shuffle for the first time. shuffle in __init__ is useless if not self.if_shuffled: np.random.seed(index) np.random.shuffle(self.list_sample) self.if_shuffled = True # get sub-batch candidates batch_records = self._get_sub_batch() # resize all images' short edges to the chosen size if isinstance(self.imgSizes, list) or isinstance(self.imgSizes, tuple): this_short_size = np.random.choice(self.imgSizes) else: this_short_size = self.imgSizes # calculate the BATCH's height and width # since we concat more than one samples, the batch's h and w shall be larger than EACH sample batch_widths = np.zeros(self.batch_per_gpu, np.int32) batch_heights = np.zeros(self.batch_per_gpu, np.int32) for i in range(self.batch_per_gpu): img_height, img_width = batch_records[i]['height'], batch_records[i]['width'] this_scale = min( this_short_size / min(img_height, img_width), \ self.imgMaxSize / max(img_height, img_width)) batch_widths[i] = img_width * this_scale batch_heights[i] = img_height * this_scale # Here we must pad both input image and segmentation map to size h' and w' so that p | h' and p | w' batch_width = np.max(batch_widths) batch_height = np.max(batch_heights) batch_width = int(self.round2nearest_multiple(batch_width, self.padding_constant)) batch_height = int(self.round2nearest_multiple(batch_height, self.padding_constant)) assert self.padding_constant >= self.segm_downsampling_rate, \ 'padding constant must be equal or large than segm downsamping rate' batch_images = torch.zeros( self.batch_per_gpu, 3, batch_height, batch_width) batch_segms = torch.zeros( self.batch_per_gpu, batch_height // self.segm_downsampling_rate, batch_width // self.segm_downsampling_rate).long() for i in range(self.batch_per_gpu): this_record = batch_records[i] # load image and label image_path = self.root_dataset+'ADEChallengeData2016.zip@/ADEChallengeData2016'+this_record['fpath_img'].lstrip('ADEChallengeData2016') segm_path = self.root_dataset+'ADEChallengeData2016.zip@/ADEChallengeData2016'+this_record['fpath_segm'].lstrip('ADEChallengeData2016') if 'cityscapes' in self.odgt.lower(): image_path = self.root_dataset+'leftImg8bit_trainvaltest.zip@/leftImg8bit/'\ +'/'.join(this_record['fpath_img'].split('/')[2:]) segm_path = self.root_dataset+'gtFine_trainvaltest.zip@/gtFine/'\ +'/'.join(this_record['fpath_segm'].split('/')[2:]) img = ZipReader.imread(image_path, 'BGR') img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) segm = ZipReader.imread(segm_path, 'P') #assert(segm.mode == "L") assert(img.shape[0] == segm.shape[0]) assert(img.shape[1] == segm.shape[1]) # random_flip flip = np.random.choice(2) * 2 - 1 img = img[:, ::flip, :] segm = segm[:, ::flip] # note that each sample within a mini batch has different scale param img = cv2.resize(img, (batch_widths[i], batch_heights[i]), cv2.INTER_LINEAR) segm = cv2.resize(segm, (batch_widths[i], batch_heights[i]), cv2.INTER_NEAREST) # further downsample seg label, need to avoid seg label misalignment segm_rounded_height = self.round2nearest_multiple(segm.shape[0], self.segm_downsampling_rate) segm_rounded_width = self.round2nearest_multiple(segm.shape[1], self.segm_downsampling_rate) segm_rounded = np.zeros((segm_rounded_height, segm_rounded_width), dtype='uint8') segm_rounded[:segm.shape[0], :segm.shape[1]] = segm segm = cv2.resize( segm_rounded, (segm_rounded.shape[1] // self.segm_downsampling_rate, \ segm_rounded.shape[0] // self.segm_downsampling_rate), \ cv2.INTER_NEAREST) # image transform, to torch float tensor 3xHxW img = self.img_transform(img) # segm transform, to torch long tensor HxW segm = self.segm_transform_ade(segm) if 'cityscapes' in self.odgt.lower(): segm = self.convert_label(segm) # put into batch arrays batch_images[i][:, :img.shape[1], :img.shape[2]] = img batch_segms[i][:segm.shape[0], :segm.shape[1]] = segm output = dict() output['img_data'] = batch_images output['seg_label'] = batch_segms return output def __len__(self): return int(1e10) # It's a fake length due to the trick that every loader maintains its own list #return self.num_sampleclass class ValDataset(BaseDataset): def __init__(self, root_dataset, odgt, opt, **kwargs): super(ValDataset, self).__init__(odgt, opt, **kwargs) self.root_dataset = root_dataset self.odgt = odgt def __getitem__(self, index): this_record = self.list_sample[index] # load image and label image_path = self.root_dataset + 'ADEChallengeData2016.zip@/ADEChallengeData2016' + this_record['fpath_img'].lstrip('ADEChallengeData2016') segm_path = self.root_dataset + 'ADEChallengeData2016.zip@/ADEChallengeData2016' + this_record['fpath_segm'].lstrip('ADEChallengeData2016') if 'cityscapes' in self.odgt.lower(): image_path = self.root_dataset + 'leftImg8bit_trainvaltest.zip@/leftImg8bit/' \ + '/'.join(this_record['fpath_img'].split('/')[2:]) segm_path = self.root_dataset + 'gtFine_trainvaltest.zip@/gtFine/' \ + '/'.join(this_record['fpath_segm'].split('/')[2:]) img = ZipReader.imread(image_path, 'BGR') img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) segm = ZipReader.imread(segm_path, 'P') assert(img.shape[0] == segm.shape[0]) assert(img.shape[1] == segm.shape[1]) ori_width, ori_height = img.shape[1], img.shape[0] img_resized_list = [] for this_short_size in self.imgSizes: # calculate target height and width scale = min(this_short_size / float(min(ori_height, ori_width)), self.imgMaxSize / float(max(ori_height, ori_width))) target_height, target_width = int(ori_height * scale), int(ori_width * scale) # to avoid rounding in network target_width = self.round2nearest_multiple(target_width, self.padding_constant) target_height = self.round2nearest_multiple(target_height, self.padding_constant) # resize images img_resized = cv2.resize(img, (target_width, target_height), cv2.INTER_LINEAR) # image transform, to torch float tensor 3xHxW img_resized = self.img_transform(img_resized) img_resized = torch.unsqueeze(img_resized, 0) img_resized_list.append(img_resized) # segm transform, to torch long tensor HxW segm = self.segm_transform_ade(segm) if 'cityscapes' in self.odgt.lower(): segm = self.convert_label(segm) batch_segms = torch.unsqueeze(segm, 0) output = dict() output['img_ori'] = np.array(img) output['img_data'] = [x.contiguous() for x in img_resized_list] output['seg_label'] = batch_segms.contiguous() output['info'] = this_record['fpath_img'] return output def __len__(self): return self.num_sample class TestDataset(BaseDataset): def __init__(self, odgt, opt, **kwargs): super(TestDataset, self).__init__(odgt, opt, **kwargs) def __getitem__(self, index): this_record = self.list_sample[index] # load image image_path = this_record['fpath_img'] img = Image.open(image_path).convert('RGB') ori_width, ori_height = img.size img_resized_list = [] for this_short_size in self.imgSizes: # calculate target height and width scale = min(this_short_size / float(min(ori_height, ori_width)), self.imgMaxSize / float(max(ori_height, ori_width))) target_height, target_width = int(ori_height * scale), int(ori_width * scale) # to avoid rounding in network target_width = self.round2nearest_multiple(target_width, self.padding_constant) target_height = self.round2nearest_multiple(target_height, self.padding_constant) # resize images img_resized = imresize(img, (target_width, target_height), interp='bilinear') # image transform, to torch float tensor 3xHxW img_resized = self.img_transform(img_resized) img_resized = torch.unsqueeze(img_resized, 0) img_resized_list.append(img_resized) output = dict() output['img_ori'] = np.array(img) output['img_data'] = [x.contiguous() for x in img_resized_list] output['info'] = this_record['fpath_img'] return output def __len__(self): return self.num_sample class CropTrainDataset(BaseDataset): def __init__(self, root_dataset, cropsize, odgt, opt, batch_per_gpu=1, **kwargs): super(CropTrainDataset, self).__init__(odgt, opt, **kwargs) self.root_dataset = root_dataset # down sampling rate of segm labe self.segm_downsampling_rate = opt.segm_downsampling_rate self.batch_per_gpu = batch_per_gpu # classify images into two classes: 1. h > w and 2. h <= w self.batch_record_list = [[], []] # override dataset length when trainig with batch_per_gpu > 1 self.cur_idx = 0 self.if_shuffled = False self.odgt = odgt self.cropsize=cropsize self.crop_h=cropsize[0] self.crop_w=cropsize[1] self.ignore_label=0 def _get_sub_batch(self): while True: # get a sample record this_sample = self.list_sample[self.cur_idx] if this_sample['height'] > this_sample['width']: self.batch_record_list[0].append(this_sample) # h > w, go to 1st class else: self.batch_record_list[1].append(this_sample) # h <= w, go to 2nd class # update current sample pointer self.cur_idx += 1 if self.cur_idx >= self.num_sample: self.cur_idx = 0 np.random.shuffle(self.list_sample) if len(self.batch_record_list[0]) == self.batch_per_gpu: batch_records = self.batch_record_list[0] self.batch_record_list[0] = [] break elif len(self.batch_record_list[1]) == self.batch_per_gpu: batch_records = self.batch_record_list[1] self.batch_record_list[1] = [] break return batch_records def __getitem__(self, index): # NOTE: random shuffle for the first time. shuffle in __init__ is useless if not self.if_shuffled: np.random.seed(index) np.random.shuffle(self.list_sample) self.if_shuffled = True # get sub-batch candidates batch_records = self._get_sub_batch() # resize all images' short edges to the chosen size if isinstance(self.imgSizes, list) or isinstance(self.imgSizes, tuple): this_short_size = np.random.choice(self.imgSizes) else: this_short_size = self.imgSizes # calculate the BATCH's height and width # since we concat more than one samples, the batch's h and w shall be larger than EACH sample batch_widths = np.zeros(self.batch_per_gpu, np.int32) batch_heights = np.zeros(self.batch_per_gpu, np.int32) for i in range(self.batch_per_gpu): img_height, img_width = batch_records[i]['height'], batch_records[i]['width'] this_scale = min( this_short_size / min(img_height, img_width), \ self.imgMaxSize / max(img_height, img_width)) batch_widths[i] = img_width * this_scale batch_heights[i] = img_height * this_scale # Here we must pad both input image and segmentation map to size h' and w' so that p | h' and p | w' #batch_width = np.max(batch_widths) #batch_height = np.max(batch_heights) #batch_width = int(self.round2nearest_multiple(batch_width, self.padding_constant)) #batch_height = int(self.round2nearest_multiple(batch_height, self.padding_constant)) batch_height = self.cropsize[0] batch_width = self.cropsize[1] batch_width = int(self.round2nearest_multiple(batch_width, self.padding_constant)) batch_height = int(self.round2nearest_multiple(batch_height, self.padding_constant)) assert self.padding_constant >= self.segm_downsampling_rate, \ 'padding constant must be equal or large than segm downsamping rate' batch_images = torch.zeros( self.batch_per_gpu, 3, batch_height, batch_width) batch_segms = torch.zeros( self.batch_per_gpu, batch_height // self.segm_downsampling_rate, batch_width // self.segm_downsampling_rate).long() for i in range(self.batch_per_gpu): this_record = batch_records[i] # load image and label image_path = self.root_dataset+'ADEChallengeData2016.zip@/ADEChallengeData2016'\ +this_record['fpath_img'].lstrip('ADEChallengeData2016') segm_path = self.root_dataset+'ADEChallengeData2016.zip@/ADEChallengeData2016'\ +this_record['fpath_segm'].lstrip('ADEChallengeData2016') img = ZipReader.imread(image_path, 'BGR') img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) segm = ZipReader.imread(segm_path, 'P') #assert(segm.mode == "L") assert(img.shape[0] == segm.shape[0]) assert(img.shape[1] == segm.shape[1]) # random_flip flip = np.random.choice(2) * 2 - 1 img = img[:, ::flip, :] segm = segm[:, ::flip] # note that each sample within a mini batch has different scale param img = cv2.resize(img, (batch_widths[i], batch_heights[i]), cv2.INTER_LINEAR) segm = cv2.resize(segm, (batch_widths[i], batch_heights[i]), cv2.INTER_NEAREST) img_h, img_w = segm.shape pad_h = max(self.crop_h - img_h, 0) pad_w = max(self.crop_w - img_w, 0) if pad_h > 0 or pad_w > 0: img_pad = cv2.copyMakeBorder(img, 0, pad_h, 0, pad_w, cv2.BORDER_CONSTANT, value=(0.0, 0.0, 0.0)) segm_pad = cv2.copyMakeBorder(segm, 0, pad_h, 0, pad_w, cv2.BORDER_CONSTANT, value=(self.ignore_label,)) else: img_pad, segm_pad = img, segm img_h, img_w = segm_pad.shape h_off = random.randint(0, img_h - self.crop_h) w_off = random.randint(0, img_w - self.crop_w) # roi = cv2.Rect(w_off, h_off, self.crop_w, self.crop_h); img = np.asarray(img_pad[h_off : h_off+self.crop_h, w_off : w_off+self.crop_w], np.float32) segm = np.asarray(segm_pad[h_off : h_off+self.crop_h, w_off : w_off+self.crop_w], np.float32) #image = image[:, :, ::-1] # change to BGR #img = img.transpose((2, 0, 1)) # further downsample seg label, need to avoid seg label misalignment segm_rounded_height = self.round2nearest_multiple(segm.shape[0], self.segm_downsampling_rate) segm_rounded_width = self.round2nearest_multiple(segm.shape[1], self.segm_downsampling_rate) segm_rounded = np.zeros((segm_rounded_height, segm_rounded_width), dtype='uint8') segm_rounded[:segm.shape[0], :segm.shape[1]] = segm segm = cv2.resize( segm_rounded, (segm_rounded.shape[1] // self.segm_downsampling_rate, \ segm_rounded.shape[0] // self.segm_downsampling_rate), \ cv2.INTER_NEAREST) # image transform, to torch float tensor 3xHxW img = self.img_transform(img) segm = self.segm_transform_ade(segm) batch_images[i][:, :img.shape[1], :img.shape[2]] = img batch_segms[i][:segm.shape[0], :segm.shape[1]] = segm output = dict() output['img_data'] = batch_images output['seg_label'] = batch_segms return output def __len__(self): return int(1e10) # It's a fake length due to the trick that every loader maintains its own list #return self.num_sampleclass
42.533582
147
0.613563
2,937
22,798
4.525366
0.116105
0.024377
0.019863
0.020315
0.79693
0.769468
0.750734
0.750734
0.740727
0.740727
0
0.024873
0.284016
22,798
535
148
42.613084
0.789377
0.141109
0
0.664
0
0
0.059629
0.021665
0
0
0
0
0.024
1
0.064
false
0
0.026667
0.013333
0.157333
0.002667
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
d08c56b043852f856164ed72329bc03881672030
85
py
Python
warmup-1/missing_char.py
ibLeDy/codingbat-python
246df68940f1bb3b25bdc070906ad2ba42b0c447
[ "MIT" ]
null
null
null
warmup-1/missing_char.py
ibLeDy/codingbat-python
246df68940f1bb3b25bdc070906ad2ba42b0c447
[ "MIT" ]
null
null
null
warmup-1/missing_char.py
ibLeDy/codingbat-python
246df68940f1bb3b25bdc070906ad2ba42b0c447
[ "MIT" ]
null
null
null
def missing_char(str, n): return "".join([v for i, v in enumerate(str) if i != n])
28.333333
58
0.623529
17
85
3.058824
0.764706
0
0
0
0
0
0
0
0
0
0
0
0.188235
85
2
59
42.5
0.753623
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
d0ae499fbb1b24f7fefd45aad242e5a72a1bc959
1,322
py
Python
tests/ext/test_sentry.py
olist/olist-loafer
cb3bc0a46ca5a343fbd455181045212e4d032ce6
[ "MIT" ]
11
2017-10-06T18:18:16.000Z
2021-12-23T11:34:11.000Z
tests/ext/test_sentry.py
pydrinker/pydrinker-loafer
32d2a36827f15cc2b5362ee54da7702267da24c9
[ "MIT" ]
7
2017-02-16T13:20:54.000Z
2020-11-03T13:50:46.000Z
tests/ext/test_sentry.py
pydrinker/pydrinker-loafer
32d2a36827f15cc2b5362ee54da7702267da24c9
[ "MIT" ]
2
2017-10-06T18:18:31.000Z
2020-09-10T15:09:45.000Z
from unittest import mock from loafer.ext.sentry import sentry_handler class MockScope: def __init__(self): self.set_extra = mock.Mock() def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): return None def test_sentry_handler(): mock_scope = MockScope() sdk_mocked = mock.Mock() sdk_mocked.push_scope.return_value = mock_scope handler = sentry_handler(sdk_mocked) exc = ValueError("test") exc_info = (type(exc), exc, None) delete_message = handler(exc_info, "test") assert delete_message is False assert sdk_mocked.push_scope.called mock_scope.set_extra.assert_called_once_with("message", "test") sdk_mocked.capture_exception.assert_called_once_with(exc_info) def test_sentry_handler_delete_message(): mock_scope = MockScope() sdk_mocked = mock.Mock() sdk_mocked.push_scope.return_value = mock_scope handler = sentry_handler(sdk_mocked, delete_message=True) exc = ValueError("test") exc_info = (type(exc), exc, None) delete_message = handler(exc_info, "test") assert delete_message is True assert sdk_mocked.push_scope.called mock_scope.set_extra.assert_called_once_with("message", "test") sdk_mocked.capture_exception.assert_called_once_with(exc_info)
28.12766
67
0.732224
181
1,322
4.944751
0.226519
0.100559
0.058101
0.080447
0.719553
0.719553
0.719553
0.719553
0.719553
0.719553
0
0
0.177005
1,322
46
68
28.73913
0.82261
0
0
0.545455
0
0
0.028744
0
0
0
0
0
0.242424
1
0.151515
false
0
0.060606
0.060606
0.30303
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
ef723897a071963ef285c006b581299542b0a816
189
py
Python
python/basic_course_python/exercises/bank_account/utils.py
ademilsoncarvalho/estudos
ae7b73a6154c29d54be367066803323c6eb52907
[ "MIT" ]
null
null
null
python/basic_course_python/exercises/bank_account/utils.py
ademilsoncarvalho/estudos
ae7b73a6154c29d54be367066803323c6eb52907
[ "MIT" ]
null
null
null
python/basic_course_python/exercises/bank_account/utils.py
ademilsoncarvalho/estudos
ae7b73a6154c29d54be367066803323c6eb52907
[ "MIT" ]
null
null
null
import os def clear_display(): os.system('clear') def get_header(): print("*******************") print("********Ademilson Bank***********") print("*******************")
15.75
46
0.412698
16
189
4.75
0.6875
0
0
0
0
0
0
0
0
0
0
0
0.164021
189
11
47
17.181818
0.481013
0
0
0.285714
0
0
0.402116
0
0
0
0
0
0
1
0.285714
true
0
0.142857
0
0.428571
0.428571
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
1
0
5
ef92f6fb4f799990c76dc7dd9e2f5e98a8e1962e
53
py
Python
0x09-python-everything_is_object/19-copy_list.py
ricardo1470/holbertonschool-higher_level_programming
aab73c8efee665b0215958ee7b338871f13634bc
[ "CNRI-Python" ]
1
2021-01-27T03:13:32.000Z
2021-01-27T03:13:32.000Z
0x09-python-everything_is_object/19-copy_list.py
ricardo1470/holbertonschool-higher_level_programming
aab73c8efee665b0215958ee7b338871f13634bc
[ "CNRI-Python" ]
null
null
null
0x09-python-everything_is_object/19-copy_list.py
ricardo1470/holbertonschool-higher_level_programming
aab73c8efee665b0215958ee7b338871f13634bc
[ "CNRI-Python" ]
2
2021-01-09T04:45:30.000Z
2021-07-13T04:23:47.000Z
#!/usr/bin/python3 def copy_list(l): return l[:]
13.25
18
0.622642
9
53
3.555556
0.888889
0
0
0
0
0
0
0
0
0
0
0.022727
0.169811
53
3
19
17.666667
0.704545
0.320755
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
efc46f91dadfc0f9314d746a851c981990db5473
101
py
Python
scheduled_sheet_fetch.py
Petricpwnz/Map-Pool-App
7654e427e87f81d1305c54542e0308d731b431b6
[ "MIT" ]
null
null
null
scheduled_sheet_fetch.py
Petricpwnz/Map-Pool-App
7654e427e87f81d1305c54542e0308d731b431b6
[ "MIT" ]
2
2020-02-11T23:37:07.000Z
2020-06-05T20:01:47.000Z
scheduled_sheet_fetch.py
Petricpwnz/Map-Pool-App
7654e427e87f81d1305c54542e0308d731b431b6
[ "MIT" ]
null
null
null
from mappool.extra_logic.maps import MapPool fetch_maps = MapPool() fetch_maps.fetchMapsIntoJson()
16.833333
44
0.821782
13
101
6.153846
0.615385
0.3
0.4
0
0
0
0
0
0
0
0
0
0.09901
101
5
45
20.2
0.879121
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
ef26da881e4853f9aeb236b4f3885adc11d44607
181
py
Python
tests/utils.py
pelme/freezegun
415314bbed741a20d123d0479c480d01c2ad4286
[ "Apache-2.0" ]
null
null
null
tests/utils.py
pelme/freezegun
415314bbed741a20d123d0479c480d01c2ad4286
[ "Apache-2.0" ]
null
null
null
tests/utils.py
pelme/freezegun
415314bbed741a20d123d0479c480d01c2ad4286
[ "Apache-2.0" ]
null
null
null
from freezegun.api import FakeDate, FakeDatetime def is_fake_date(obj): return obj.__class__ is FakeDate def is_fake_datetime(obj): return obj.__class__ is FakeDatetime
18.1
48
0.78453
26
181
5
0.538462
0.076923
0.138462
0.261538
0.292308
0
0
0
0
0
0
0
0.160221
181
9
49
20.111111
0.855263
0
0
0
0
0
0
0
0
0
0
0
0
1
0.4
false
0
0.2
0.4
1
0
1
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
5
ef2c4f08596d27057489cda0654506c1b7af5dc9
20,573
py
Python
py/genetargeter/primerSelection.py
pablocarderam/genetargeter
4cd257d02e0753c1be9fa339aa505bb895315a6d
[ "MIT" ]
null
null
null
py/genetargeter/primerSelection.py
pablocarderam/genetargeter
4cd257d02e0753c1be9fa339aa505bb895315a6d
[ "MIT" ]
2
2021-06-02T15:28:52.000Z
2021-06-17T12:08:53.000Z
py/genetargeter/primerSelection.py
pablocarderam/genetargeter
4cd257d02e0753c1be9fa339aa505bb895315a6d
[ "MIT" ]
null
null
null
from builtins import str from py.utils.BioUtils import *; # Imports utils from py.utils.GenBankToolbox import *; # Imports utils from py.genetargeter.constants import *; # Imports constants """ Creates list with GenBankAnn objects for forward and reverse primers for obtaining part given. Poor design, user should check with other tools afterwards. """ def createPrimers(plasmid, part, rangeSize=[18,22,50], rangeMeltTemp=[55,62,65], maxTempDif=3): #TODO: this code is a little crap. log = ""; # init log startPF = part.index[0]; # Rev primer preferred start position endPF = part.index[0] + rangeSize[1]; # Rev primer preferred end position primFwdSeq = plasmid.origin[startPF:endPF]; # Fwd primer sequence if meltingTemp(primFwdSeq) < rangeMeltTemp[0] or meltingTemp(primFwdSeq) > rangeMeltTemp[2] or not primFwdSeq[len(primFwdSeq)-1].upper().replace("G","C") == "C": # if out of Tm range or no GC clamp endPF = part.index[0] + rangeSize[0]; # Smallest fwd primer end position primFwdSeq = plasmid.origin[startPF:endPF]; # Fwd primer sequence maxIndexes = [startPF, endPF]; # store start and end positions of best primer in search range while (meltingTemp(primFwdSeq) < rangeMeltTemp[0] or meltingTemp(primFwdSeq) > rangeMeltTemp[2] or not primFwdSeq[len(primFwdSeq)-1].upper().replace("G","C") == "C") and rangeSize[0] <= len(primFwdSeq) <= rangeSize[2]: # while still no suitable Tm found or no gc clamp and still within length parameters, endPF = endPF + 1; # shift primer start position upstream primFwdSeq = plasmid.origin[startPF:endPF]; # Fwd primer sequence if meltingTemp(primFwdSeq)-rangeMeltTemp[1] > meltingTemp(plasmid.origin[maxIndexes[0]:maxIndexes[1]])-rangeMeltTemp[1] and primFwdSeq[len(primFwdSeq)-1].upper().replace("G","C") == "C": # if this primer has Tm closer to the preferred Tm, and has gc clamp maxIndexes = [startPF, endPF]; # store start and end positions of this primer as best if meltingTemp(primFwdSeq) < rangeMeltTemp[0] or meltingTemp(primFwdSeq) > rangeMeltTemp[2]: # if still no use startPF = maxIndexes[0]; # Fwd primer default start position endPF = maxIndexes[1]; # Fwd primer default end position primFwdSeq = plasmid.origin[startPF:endPF]; # Fwd primer sequence log = log + "Warning: Best fwd primer for sequence " + part.label + " under given constraints has a Tm of " + str(meltingTemp(primFwdSeq)) + "\n\n"; # give warning startPR = part.index[1] - rangeSize[1]; # Rev primer start position endPR = part.index[1]; # Rev primer end position primRevSeq = revComp(plasmid.origin[startPR:endPR]); # Rev primer sequence if meltingTemp(primRevSeq) < rangeMeltTemp[0] or meltingTemp(primFwdSeq)-meltingTemp(primRevSeq) > maxTempDif or not primRevSeq[len(primRevSeq)-1].upper().replace("G","C") == "C": # if out of Tm range or no gc clamp startPR = part.index[1] - rangeSize[0]; # Smallest rev primer end position primRevSeq = revComp(plasmid.origin[startPR:endPR]); # Rev primer sequence maxIndexes = [startPR, endPR]; # store start and end positions of best primer in search range while (meltingTemp(primRevSeq) < rangeMeltTemp[0] or meltingTemp(primFwdSeq)-meltingTemp(primRevSeq) > maxTempDif or not primRevSeq[len(primRevSeq)-1].upper().replace("G","C") == "C") and rangeSize[0] <= len(primRevSeq) <= rangeSize[2]: # while still no suitable Tm found o no gc clamp, and still within length parameters, startPR = startPR - 1; # shift primer start position upstream primRevSeq = revComp(plasmid.origin[startPR:endPR]); # Rev primer sequence if meltingTemp(primFwdSeq)-meltingTemp(primRevSeq) < meltingTemp(primFwdSeq)-meltingTemp(plasmid.origin[maxIndexes[0]:maxIndexes[1]]) and primRevSeq[len(primRevSeq)-1].upper().replace("G","C") == "C": # if this primer has Tm closer to the fwd primer's, maxIndexes = [startPR, endPR]; # store start and end positions of this primer as best if meltingTemp(primRevSeq) < rangeMeltTemp[0] or meltingTemp(primRevSeq) > rangeMeltTemp[2]: # if still no use and it's the reverse primer's fault, startPR = maxIndexes[0]; # Rev primer default start position endPR = maxIndexes[1]; # Rev primer default end position primRevSeq = revComp(plasmid.origin[startPR:endPR]); # Rev primer sequence log = log + "Warning: Best rev primer for sequence " + part.label + " under given constraints has a Tm of " + str(meltingTemp(primRevSeq)) + "\n\n"; # give warning if meltingTemp(primFwdSeq)-meltingTemp(primRevSeq) > maxTempDif: # if temp difference exceeds specs startPR = maxIndexes[0]; # Rev primer default start position endPR = maxIndexes[1]; # Rev primer default end position primRevSeq = revComp(plasmid.origin[startPR:endPR]); # Rev primer sequence lastBase = len(primFwdSeq); # stores possible end points of fwd primer primFwdSeq = primFwdSeq.upper(); while meltingTemp(primFwdSeq[0:lastBase])-meltingTemp(primRevSeq) > maxTempDif and meltingTemp(primFwdSeq[0:lastBase]) > rangeMeltTemp[0] and lastBase > rangeSize[0]*2: # while T diff is still out of bounds and still within bounds of Fwd primer, lastBase -= 1; while not primFwdSeq[lastBase-1].upper().replace("G","C") == "C" and lastBase > rangeSize[0]: # find next G or C upstream lastBase -= 1; if meltingTemp(primFwdSeq[0:lastBase])-meltingTemp(primRevSeq) < maxTempDif and meltingTemp(primFwdSeq[0:lastBase]) > rangeMeltTemp[0]: # while T diff is still out of bounds and still within bounds of Fwd primer, endPF = endPF - (len(primFwdSeq)-lastBase); primFwdSeq = plasmid.origin[startPF:endPF]; else: # if temp difference still exceeds specs log = log + "Warning: Primers for sequence " + part.label + " under given constraints have a Tm difference of " + str(meltingTemp(primFwdSeq)-meltingTemp(primRevSeq)) + ", above the given threshold of " + str(maxTempDif) + "\n\n"; # give warning # if isTricky( primFwdSeq ): # true if this terminus contains homopolymers or AT repeats: # log = log + "Warning: Forward primer for sequence " + part.label + " may be hard to synthesize.\n\n"; # give warning # if isTricky( primRevSeq ): # true if this terminus contains homopolymers or AT repeats: # log = log + "Warning: Reverse primer for sequence " + part.label + " may be hard to synthesize.\n\n"; # give warning annPrimFwd = GenBankAnn(part.label + " Primer (Fwd)", "misc_feature", primFwdSeq, False, [startPF,endPF], annColors['primerColor']); # creates GenBankAnn object to hold fwd primer annPrimRev = GenBankAnn(part.label + " Primer (Rev)", "misc_feature", primRevSeq, True, [startPR,endPR], annColors['primerColor']); # creates GenBankAnn object to hold rev primer log = log + "Primers for part " + part.label + " selected.\n\n"; # logs this process finished return {"out":[annPrimFwd, annPrimRev], "log":log}; # return primer annotation objects """ Creates list with GenBankAnn objects for forward and reverse primers for obtaining part for Gibson Assembly in plasmid given. The plasmid must have the part as an annotation. rangeHom gives the length of homology on each side of the primer [min,preferred,max]. """ def createGibsonPrimers(plasmid, part, rangeHom=[30,40,50], minMeltTemp=68, maxTempDif=5): #TODO: debug. log = ""; # init log startPF = max(part.index[0] - rangeHom[1],0); # Fwd primer preferred start position endPF = min(part.index[0] + rangeHom[1],len(plasmid.origin)); # Fwd primer preferred end position primFwdSeq = plasmid.origin[startPF:endPF]; # Fwd primer sequence if meltingTemp(plasmid.origin[part.index[0]:endPF]) < minMeltTemp or not primFwdSeq[len(primFwdSeq)-1].upper().replace("G","C") == "C": # if still no use startPF = max(part.index[0] - rangeHom[0],0); # Smallest fwd primer start position endPF = min(part.index[0] + rangeHom[0],len(plasmid.origin)); # Smallest fwd primer end position primFwdSeq = plasmid.origin[startPF:endPF]; # Fwd primer sequence maxIndexes = [startPF, endPF]; # store start and end positions of best primer in search range while (meltingTemp(plasmid.origin[part.index[0]:endPF]) < minMeltTemp or not primFwdSeq[len(primFwdSeq)-1].upper().replace("G","C") == "C") and len(primFwdSeq) <= rangeHom[2]*2: # while still no suitable Tm found and still within length parameters, # startPF = startPF - 1; # shift primer start position upstream endPF = endPF + 1; # shift primer start position upstream primFwdSeq = plasmid.origin[startPF:endPF]; # Fwd primer sequence if (meltingTemp(plasmid.origin[part.index[0]:endPF]) > meltingTemp(plasmid.origin[part.index[0]:maxIndexes[1]]) or not plasmid.origin[maxIndexes[1]-1].upper().replace("G","C") == "C") and primFwdSeq[len(primFwdSeq)-1].upper().replace("G","C") == "C" and len(primFwdSeq) <= rangeHom[2]*2: # if this primer has higher Tm than the max or the current max has no gc clamp, and this one does have a gc clamp, and within length constraints, maxIndexes = [startPF, endPF]; # store start and end positions of this primer startPF = maxIndexes[0]; # Fwd primer default start position endPF = maxIndexes[1]; # Fwd primer default end position primFwdSeq = plasmid.origin[startPF:endPF]; # Fwd primer sequence if meltingTemp(plasmid.origin[part.index[0]:endPF]) < minMeltTemp: # if still no use log = log + "Warning: Best Gibson fwd primer for sequence " + part.label + " under given constraints has a Tm of " + str(meltingTemp(plasmid.origin[part.index[0]:endPF])) + ", below the given threshold of " + str(minMeltTemp) + "\n\n"; # give warning startPR = max(part.index[1] - rangeHom[1],0); # Rev primer start position endPR = min(part.index[1] + rangeHom[1],len(plasmid.origin)); # Rev primer end position primRevSeq = revComp(plasmid.origin[startPR:endPR]); # Rev primer sequence if meltingTemp(plasmid.origin[startPR:part.index[1]]) < minMeltTemp or meltingTemp(plasmid.origin[startPR:part.index[1]])-meltingTemp(plasmid.origin[startPR:part.index[1]]) > maxTempDif or not primRevSeq[len(primRevSeq)-1].upper().replace("G","C") == "C": # if still no use startPR = max(part.index[1] - rangeHom[0],1); # Smallest fwd primer start position endPR = min(part.index[1] + rangeHom[0],len(plasmid.origin)); # Smallest fwd primer end position primRevSeq = revComp(plasmid.origin[startPR:endPR]); # Rev primer sequence maxIndexes = [startPR, endPR]; # store start and end positions of best primer in search range while (meltingTemp(plasmid.origin[startPR:part.index[1]]) < minMeltTemp or meltingTemp(plasmid.origin[startPR:part.index[1]])-meltingTemp(plasmid.origin[startPR:part.index[1]]) > maxTempDif or not primRevSeq[len(primRevSeq)-1].upper().replace("G","C") == "C") and rangeHom[0]*2 <= len(primRevSeq) <= rangeHom[2]*2: # while still no suitable Tm found and still within length parameters, startPR = startPR - 1; # shift primer start position upstream # endPR = endPR + 1; # shift primer start position upstream primRevSeq = revComp(plasmid.origin[startPR:endPR]); # Rev primer sequence if meltingTemp(plasmid.origin[startPR:part.index[1]]) > meltingTemp(plasmid.origin[maxIndexes[0]:part.index[1]]) and primRevSeq[len(primRevSeq)-1].upper().replace("G","C") == "C" and rangeHom[0]*2 <= len(primRevSeq) <= rangeHom[2]*2: # if this primer has higher Tm than the max and has gc clamp and within max length, maxIndexes = [startPR, endPR]; # store start and end positions of this primer startPR = maxIndexes[0]; # Rev primer default start position endPR = maxIndexes[1]; # Rev primer default end position primRevSeq = revComp(plasmid.origin[startPR:endPR]); # Rev primer sequence if meltingTemp(plasmid.origin[startPR:part.index[1]]) < minMeltTemp: # if still no use log = log + "Warning: Best Gibson rev primer for sequence " + part.label + " under given constraints has a Tm of " + str(meltingTemp(plasmid.origin[startPR:part.index[1]])) + ", below the given threshold of " + str(minMeltTemp) + "\n\n"; # give warning elif meltingTemp(plasmid.origin[part.index[0]:endPF])-meltingTemp(plasmid.origin[startPR:part.index[1]]) > maxTempDif: # if temp difference exceeds specs startPR = maxIndexes[0]; # Rev primer default start position endPR = maxIndexes[1]; # Rev primer default end position primRevSeq = revComp(plasmid.origin[startPR:endPR]); # Rev primer sequence primFwdSeq = primFwdSeq.upper(); # to uppercase lastBase = endPF-2; # stores possible end points of fwd primer while not plasmid.origin[lastBase-1].upper().replace("G","C") == "C" and lastBase-part.index[0] > rangeHom[0]: # find next G or C upstream lastBase -= 1; while meltingTemp(plasmid.origin[part.index[0]:lastBase])-meltingTemp(plasmid.origin[startPR:part.index[1]]) > maxTempDif and meltingTemp(plasmid.origin[part.index[0]:lastBase]) > minMeltTemp and lastBase-part.index[0] > rangeHom[0]: # while T diff is still out of bounds and still within bounds of Fwd primer, lastBase -= 1; while not plasmid.origin[lastBase-1].upper().replace("G","C") == "C" and lastBase-part.index[0] > rangeHom[0]: # find next G or C upstream lastBase -= 1; if meltingTemp(plasmid.origin[part.index[0]:lastBase])-meltingTemp(plasmid.origin[startPR:part.index[1]]) < maxTempDif and meltingTemp(plasmid.origin[0:lastBase]) > minMeltTemp: # while T diff is still out of bounds and still within bounds of Fwd primer, endPF = lastBase; primFwdSeq = plasmid.origin[startPF:endPF]; else: # if temp difference still exceeds specs log = log + "Warning: Gibson primers for sequence " + part.label + " under given constraints have a Tm difference of " + str(meltingTemp(plasmid.origin[part.index[0]:endPF])-meltingTemp(plasmid.origin[startPR:part.index[1]])) + ", above the given threshold of " + str(maxTempDif) + "\n\n"; # give warning # if isTricky( primFwdSeq ): # true if this terminus contains homopolymers or AT repeats: # log = log + "Warning: Forward Gibson primer for sequence " + part.label + " may be hard to synthesize.\n\n"; # give warning # if isTricky( primRevSeq ): # true if this terminus contains homopolymers or AT repeats: # log = log + "Warning: Reverse Gibson primer for sequence " + part.label + " may be hard to synthesize.\n\n"; # give warning annPrimFwd = GenBankAnn(part.label + " Gibson Primer (Fwd)", "misc_feature", primFwdSeq, False, [startPF,endPF], annColors['primerColor']); # creates GenBankAnn object to hold fwd primer annPrimRev = GenBankAnn(part.label + " Gibson Primer (Rev)", "misc_feature", primRevSeq, True, [startPR,endPR], annColors['primerColor']); # creates GenBankAnn object to hold rev primer log = log + "Gibson primers for part " + part.label + " selected.\n\n"; # logs this process finished return {"out":[annPrimFwd, annPrimRev],"log":log}; # return list of primers """ Creates list with GenBankAnn objects for forward and reverse oligos for obtaining part for Gibson assembly in plasmid given via Klenow reaction. The plasmid must have part as an annotation. lengthHom gives the length of homology on each side of the part sequence. """ def createKlenowOligos(plasmid, part, lengthHom=40): #TODO: debug. log = ""; # init log startPF = part.index[0] - lengthHom; # Fwd primer preferred start position endPF = part.index[1]; # Fwd primer preferred end position primFwdSeq = plasmid.origin[startPF:endPF]; # Fwd primer sequence startPR = part.index[0]; # Rev primer start position endPR = part.index[1] + lengthHom; # Rev primer end position primRevSeq = revComp(plasmid.origin[startPR:endPR]); # Rev primer sequence # if isTricky( primFwdSeq ): # true if this terminus contains homopolymers or AT repeats: # log = log + "Warning: Forward Klenow oligo for sequence " + part.label + " may be hard to synthesize.\n\n"; # give warning # if isTricky( primRevSeq ): # true if this terminus contains homopolymers or AT repeats: # log = log + "Warning: Reverse Klenow oligo for sequence " + part.label + " may be hard to synthesize.\n\n"; # give warning annPrimFwd = GenBankAnn(part.label + " Klenow oligo (Fwd)", "misc_feature", primFwdSeq, False, [startPF,endPF], annColors['primerColor']); # creates GenBankAnn object to hold fwd primer annPrimRev = GenBankAnn(part.label + " Klenow oligo (Rev)", "misc_feature", primRevSeq, True, [startPR,endPR], annColors['primerColor']); # creates GenBankAnn object to hold rev primer log = log + "Klenow oligos for part " + part.label + " selected.\n\n"; # logs this process finished return {"out":[annPrimFwd, annPrimRev], "log":log}; # return list of primers """ Returns gBlock GenBankAnn object in plasmid given for part (a GenBankAnn object) to be synthesized and inserted via Gibson in plasmid. Will give a warning if it suspects the gBlock won't be able to be synthesized by IDT, reverse-engineering from the IDT gene synthesis webpage. """ def createGBlock(plasmid, part, overlapSize): log = ""; # init log startGBlock = part.index[0] - overlapSize; # gBlock start position endGBlock = part.index[1] + overlapSize; # gBlock end position gBlockSeq = plasmid.origin[startGBlock:endGBlock]; # gBlock sequence tricky = isTricky(gBlockSeq) > -1; # True if suspected to be hard to synthesize if tricky: # if sequence seems tricky log = log + "Warning: I suspect synthesis company might reject the gene fragment created for part \n" +part.label+ ": \n" + gBlockSeq + "\n\n"; # warn user annGBlock = GenBankAnn(part.label + " gBlock", "misc_feature", gBlockSeq, False, [startGBlock,endGBlock], annColors['gBlockColor']); # creates GenBankAnn object to hold gBlock log = log + "gBlock for part " + part.label + " selected.\n\n"; # logs this process finished return {"out":annGBlock, "log":log}; # returns annotation """ Abbreviates primer names to fit on commercial tube labels with the format: Seven Digit Gene Identifier_Oligo type_Orientation The seven digit gene identifier code follows "PF3D7_". Oligo types include: LHR (Gibson overhang PCR primers) RHR (Gibson overhang PCR primers) gRNA (Klenow oligos for gRNA sequence) gBlock (gBlock sequencing primer) RecAE (Anneal-extension oligos for recoded region, if the region is small enough) Orientation refers to forward (F) and reverse (R) primers. """ def shortenOligoNames(primerString,prefix): mat = primerString.split("\n"); # split string into lines mat = [l.split(",") for l in mat]; # split lines into cells. mat is now 2D array for primer in mat: # iterates across all primers (rows in array) name = primer[0][len(prefix):]; # gets primer name if name[0:6] == "PF3D7_": # if primer format start is correct newName = name[6:13] + "_"; # Adds numerical identifier to new name # Add oligo type to new name: if name.find("Recoded region") > -1: newName = newName + "RecR" + "_"; elif name.find("LHR") > -1: newName = newName + "LHR" + "_"; elif name.find("RHR") > -1: newName = newName + "RHR" + "_"; elif name.find("sgRNA cassette") > -1: newName = newName + "gRNAgB" + "_"; elif name.find("gRNA") > -1: newName = newName + "gRNAAE" + "_"; elif name.find("Recoded region Klenow") > -1: newName = newName + "RecAE" + "_"; # Add oligo orientation if name.find("fwd") > -1: newName = newName + "F"; elif name.find("rev") > -1: newName = newName + "R"; primer[0] = prefix + newName; # replace name mat = [",".join(l) for l in mat]; # join cells into lines outStr = "\n".join(mat); # join lines into string return outStr;
76.196296
445
0.680552
2,684
20,573
5.209762
0.111028
0.053923
0.048058
0.016019
0.779733
0.752843
0.731889
0.701924
0.682901
0.664164
0
0.012006
0.20245
20,573
269
446
76.479554
0.8402
0.290526
0
0.380952
0
0
0.101568
0
0
0
0
0.011152
0
1
0.029762
false
0
0.02381
0
0.083333
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
5
322d6805d455b06b58ee44e9a931560a562bfef4
313
py
Python
core/models/program.py
CodeForPhilly/prevention-point
f5dfa136d192f96f859351a6a2ebd28618d6a093
[ "MIT" ]
35
2019-03-12T23:59:10.000Z
2021-04-05T15:07:38.000Z
core/models/program.py
CodeForPhilly/prevention-point
f5dfa136d192f96f859351a6a2ebd28618d6a093
[ "MIT" ]
365
2019-03-12T23:40:39.000Z
2022-02-10T11:07:26.000Z
core/models/program.py
CodeForPhilly/prevention-point
f5dfa136d192f96f859351a6a2ebd28618d6a093
[ "MIT" ]
20
2019-03-12T23:36:25.000Z
2021-12-30T00:05:42.000Z
from django.db import models class Program(models.Model): name = models.CharField(max_length=100) is_closed = models.BooleanField(default=False) is_frozen = models.BooleanField(default=False) has_queue = models.BooleanField(default=True) def __str__(self): return '%s' % (self.name)
28.454545
50
0.715655
40
313
5.4
0.675
0.25
0.347222
0.277778
0
0
0
0
0
0
0
0.011583
0.172524
313
10
51
31.3
0.822394
0
0
0
0
0
0.00639
0
0
0
0
0
0
1
0.125
false
0
0.125
0.125
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
5
323c22847f3c8c3dad9dc0a311f4006ab6fab541
17
py
Python
thonny/test/plugins/__init__.py
shreyas202/thonny
ef894c359200b0591cf98451907243395b817c63
[ "MIT" ]
3
2019-05-08T16:12:07.000Z
2019-09-04T20:13:46.000Z
Thonny/Lib/site-packages/thonny/test/plugins/__init__.py
Pydiderot/pydiderotIDE
a42fcde3ea837ae40c957469f5d87427e8ce46d3
[ "MIT" ]
30
2019-01-04T10:14:56.000Z
2020-10-12T14:00:31.000Z
Thonny/Lib/site-packages/thonny/test/plugins/__init__.py
Pydiderot/pydiderotIDE
a42fcde3ea837ae40c957469f5d87427e8ce46d3
[ "MIT" ]
6
2018-04-28T11:33:56.000Z
2022-03-06T15:36:52.000Z
# package marker
8.5
16
0.764706
2
17
6.5
1
0
0
0
0
0
0
0
0
0
0
0
0.176471
17
1
17
17
0.928571
0.823529
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
32414c9a3af92a10efeb2af729ad876634d6ba35
39
py
Python
project/root/__init__.py
renanlage/flask-boilerplate
1b0d4297d8bcdd2722a6c6365582f8e5f8c5dfe2
[ "MIT" ]
null
null
null
project/root/__init__.py
renanlage/flask-boilerplate
1b0d4297d8bcdd2722a6c6365582f8e5f8c5dfe2
[ "MIT" ]
null
null
null
project/root/__init__.py
renanlage/flask-boilerplate
1b0d4297d8bcdd2722a6c6365582f8e5f8c5dfe2
[ "MIT" ]
null
null
null
from project.root.views import root_bp
19.5
38
0.846154
7
39
4.571429
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.102564
39
1
39
39
0.914286
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
3271284d477cfe4fb4a9c521603a9a112ea4c4bb
181
py
Python
src/contra_goapp/maindrive.py
OrdoD/ContraGO2021
7e739880278900675f9b51b9bfe2c7edf1681bb0
[ "MIT" ]
null
null
null
src/contra_goapp/maindrive.py
OrdoD/ContraGO2021
7e739880278900675f9b51b9bfe2c7edf1681bb0
[ "MIT" ]
null
null
null
src/contra_goapp/maindrive.py
OrdoD/ContraGO2021
7e739880278900675f9b51b9bfe2c7edf1681bb0
[ "MIT" ]
null
null
null
import contra_goapp.CONTRAGO_GUI.contra_go_log_in_GUI def main(): contra_goapp.CONTRAGO_GUI.contra_go_log_in_GUI.contrago_log_in() if __name__ == "__main__": main()
22.625
69
0.762431
28
181
4.142857
0.428571
0.12931
0.327586
0.37931
0.655172
0.655172
0.655172
0.655172
0.655172
0
0
0
0.143646
181
7
70
25.857143
0.748387
0
0
0
0
0
0.044199
0
0
0
0
0
0
1
0.2
true
0
0.2
0
0.4
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
3277642f821b7f1605ff063980b49d0873489f58
13
py
Python
Basic/dic_and_set.py
CrazyBBer/Python-Learn-Sample
3bd0694327db6c662c6cc3bdf91c6261daa4b6cf
[ "MIT" ]
2
2020-05-02T11:24:37.000Z
2020-05-02T13:49:18.000Z
Basic/dic_and_set.py
crazybber/pythontrip
062ba71dfe6729ecc606eff7260b1c39497b6456
[ "MIT" ]
null
null
null
Basic/dic_and_set.py
crazybber/pythontrip
062ba71dfe6729ecc606eff7260b1c39497b6456
[ "MIT" ]
null
null
null
#dic and set
6.5
12
0.692308
3
13
3
1
0
0
0
0
0
0
0
0
0
0
0
0.230769
13
1
13
13
0.9
0.846154
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
32a3a201db693dc0b60f15586e86b7a0cc0f936a
63
py
Python
code/hello-world-norwegian.py
nyamako/hacktoberfest-2018
bf7939d4b0cfb57a854a1644dbbae7ddf8af3c4f
[ "MIT" ]
67
2018-09-25T21:37:23.000Z
2020-11-03T02:03:22.000Z
code/hello-world-norwegian.py
nyamako/hacktoberfest-2018
bf7939d4b0cfb57a854a1644dbbae7ddf8af3c4f
[ "MIT" ]
245
2018-09-18T10:07:28.000Z
2020-09-30T19:00:11.000Z
code/hello-world-norwegian.py
nyamako/hacktoberfest-2018
bf7939d4b0cfb57a854a1644dbbae7ddf8af3c4f
[ "MIT" ]
1,192
2018-09-18T11:27:55.000Z
2021-10-17T10:24:37.000Z
print('Hei, verden') print('It is "Hello world" in norwegian')
21
41
0.698413
10
63
4.4
0.9
0
0
0
0
0
0
0
0
0
0
0
0.126984
63
2
42
31.5
0.8
0
0
0
0
0
0.68254
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
32ac3d420eef9eab27ece715852e1bb68af5505a
40
py
Python
homeassistant/components/ws980wifi/__init__.py
eXtgmA/core
58e8e88225a2e5268ddddfaa93c41e8b92e9480e
[ "Apache-2.0" ]
null
null
null
homeassistant/components/ws980wifi/__init__.py
eXtgmA/core
58e8e88225a2e5268ddddfaa93c41e8b92e9480e
[ "Apache-2.0" ]
null
null
null
homeassistant/components/ws980wifi/__init__.py
eXtgmA/core
58e8e88225a2e5268ddddfaa93c41e8b92e9480e
[ "Apache-2.0" ]
null
null
null
"""The WS980WiFi sensor integration."""
20
39
0.725
4
40
7.25
1
0
0
0
0
0
0
0
0
0
0
0.083333
0.1
40
1
40
40
0.722222
0.825
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
0885c85ecc0170f53f4042f6e822f3a9d0d8632f
216
py
Python
stable_nalu/functional/golden_ratio_base.py
bmistry4/nalm-benchmark
273c95cc75241f56e48bcd0b18b043969ef82004
[ "MIT" ]
null
null
null
stable_nalu/functional/golden_ratio_base.py
bmistry4/nalm-benchmark
273c95cc75241f56e48bcd0b18b043969ef82004
[ "MIT" ]
null
null
null
stable_nalu/functional/golden_ratio_base.py
bmistry4/nalm-benchmark
273c95cc75241f56e48bcd0b18b043969ef82004
[ "MIT" ]
null
null
null
import math import torch golden_ratio = (1 + math.sqrt(5)) / 2. tanh = lambda x: (torch.pow(golden_ratio, 2 * x) - 1) / (torch.pow(golden_ratio, 2 * x) + 1) sigmoid = lambda x: 1 / (1 + torch.pow(golden_ratio, -x))
30.857143
92
0.638889
38
216
3.526316
0.368421
0.328358
0.313433
0.425373
0.477612
0.328358
0.328358
0
0
0
0
0.050847
0.180556
216
6
93
36
0.706215
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
08abd608932b659a5491e1e71a4a114946b42c9e
214
py
Python
k_anonymity/__init__.py
yzj2019/k_anonymity
fcce9fef0e1705c7cc5d1d18083b5860dcab8551
[ "MIT" ]
5
2021-06-09T06:45:01.000Z
2021-12-14T05:41:04.000Z
k_anonymity/__init__.py
yzj2019/k_anonymity
fcce9fef0e1705c7cc5d1d18083b5860dcab8551
[ "MIT" ]
null
null
null
k_anonymity/__init__.py
yzj2019/k_anonymity
fcce9fef0e1705c7cc5d1d18083b5860dcab8551
[ "MIT" ]
null
null
null
''' k匿名两种算法的python3.6.9实现\\ 依赖:pandas、numpy\\ 实现了samarati算法、mondrian算法,并提供了加载.data数据的方法 ''' from k_anonymity.samarati import samarati from k_anonymity.mondrian import mondrian from k_anonymity.load import loaddata
23.777778
41
0.827103
28
214
6.214286
0.642857
0.086207
0.241379
0
0
0
0
0
0
0
0
0.015228
0.079439
214
8
42
26.75
0.86802
0.378505
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
08bb9710b615c44971fba26db8da439ab78cdc8b
95
py
Python
vk_mutual_friends_finder/__init__.py
stepuncius/vk_mutual_friends_finder
4bba576ea951deaf3faeaa96c2d4ab0d2e6bc55f
[ "BSD-2-Clause" ]
null
null
null
vk_mutual_friends_finder/__init__.py
stepuncius/vk_mutual_friends_finder
4bba576ea951deaf3faeaa96c2d4ab0d2e6bc55f
[ "BSD-2-Clause" ]
null
null
null
vk_mutual_friends_finder/__init__.py
stepuncius/vk_mutual_friends_finder
4bba576ea951deaf3faeaa96c2d4ab0d2e6bc55f
[ "BSD-2-Clause" ]
null
null
null
import friends_getter import get_names_of_users import intersect_friends import get_id_by_link
19
25
0.915789
16
95
4.9375
0.6875
0.227848
0
0
0
0
0
0
0
0
0
0
0.084211
95
4
26
23.75
0.908046
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5