hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
956d800944722fca803c54cb47c4a6b7ddb48ac6
| 112
|
py
|
Python
|
villas/controller/__init__.py
|
VILLASframework/VILLAScontroller
|
e672439797f209afdd5bc62078f7d49c60269aa4
|
[
"Apache-2.0"
] | null | null | null |
villas/controller/__init__.py
|
VILLASframework/VILLAScontroller
|
e672439797f209afdd5bc62078f7d49c60269aa4
|
[
"Apache-2.0"
] | null | null | null |
villas/controller/__init__.py
|
VILLASframework/VILLAScontroller
|
e672439797f209afdd5bc62078f7d49c60269aa4
|
[
"Apache-2.0"
] | null | null | null |
import pkg_resources # part of setuptools
__version__ = pkg_resources.require('villas-controller')[0].version
| 28
| 67
| 0.803571
| 14
| 112
| 6
| 0.785714
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009901
| 0.098214
| 112
| 3
| 68
| 37.333333
| 0.821782
| 0.160714
| 0
| 0
| 0
| 0
| 0.184783
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
95832b0b672551b187e10a73b519be000369ff7a
| 373
|
py
|
Python
|
problem20.py
|
hubenjm/projecteuler
|
544596a6693a2e64a88e1d4606ce85480486dab5
|
[
"MIT"
] | null | null | null |
problem20.py
|
hubenjm/projecteuler
|
544596a6693a2e64a88e1d4606ce85480486dab5
|
[
"MIT"
] | null | null | null |
problem20.py
|
hubenjm/projecteuler
|
544596a6693a2e64a88e1d4606ce85480486dab5
|
[
"MIT"
] | null | null | null |
def sum_digits(n):
s=0
while n:
s += n % 10
n /= 10
return s
def factorial(n):
if n == 0:
return 1
else:
return n*factorial(n-1)
def sum_factorial_digits(n):
return sum_digits(factorial(n))
def main():
print(sum_factorial_digits(10))
print(sum_factorial_digits(100))
print(sum_factorial_digits(500))
if __name__ == "__main__":
#test()
main()
| 14.346154
| 33
| 0.664879
| 61
| 373
| 3.770492
| 0.311475
| 0.208696
| 0.313043
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053156
| 0.193029
| 373
| 25
| 34
| 14.92
| 0.710963
| 0.016086
| 0
| 0
| 0
| 0
| 0.021858
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.210526
| false
| 0
| 0
| 0.052632
| 0.421053
| 0.157895
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
9599e0e8eb50c219052e77f540ae699c9a360ee4
| 378
|
py
|
Python
|
src/UQpy/stochastic_process/supportive/__init__.py
|
SURGroup/UncertaintyQuantification
|
a94c8db47d07134ea2b3b0a3ca53ca818532c3e6
|
[
"MIT"
] | null | null | null |
src/UQpy/stochastic_process/supportive/__init__.py
|
SURGroup/UncertaintyQuantification
|
a94c8db47d07134ea2b3b0a3ca53ca818532c3e6
|
[
"MIT"
] | null | null | null |
src/UQpy/stochastic_process/supportive/__init__.py
|
SURGroup/UncertaintyQuantification
|
a94c8db47d07134ea2b3b0a3ca53ca818532c3e6
|
[
"MIT"
] | null | null | null |
"""Collection of baseclasses"""
from UQpy.stochastic_process.supportive.inverse_wiener_khinchin_transform import (
inverse_wiener_khinchin_transform,
)
from UQpy.stochastic_process.supportive.wiener_khinchin_transform import (
wiener_khinchin_transform,
)
from UQpy.stochastic_process.supportive.scaling_correlation_function import (
scaling_correlation_function,
)
| 34.363636
| 82
| 0.849206
| 41
| 378
| 7.414634
| 0.390244
| 0.184211
| 0.302632
| 0.246711
| 0.496711
| 0.381579
| 0.381579
| 0.381579
| 0
| 0
| 0
| 0
| 0.087302
| 378
| 10
| 83
| 37.8
| 0.881159
| 0.066138
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
95d0988277f57f3342c8b6a40810f83dd1e3f9a6
| 4,425
|
py
|
Python
|
search_profile/templatetags/user_filters.py
|
fattybobcat/git_profile_search
|
c87683e53e7149221f5f4efb1114cb6ead06e9af
|
[
"MIT"
] | null | null | null |
search_profile/templatetags/user_filters.py
|
fattybobcat/git_profile_search
|
c87683e53e7149221f5f4efb1114cb6ead06e9af
|
[
"MIT"
] | null | null | null |
search_profile/templatetags/user_filters.py
|
fattybobcat/git_profile_search
|
c87683e53e7149221f5f4efb1114cb6ead06e9af
|
[
"MIT"
] | null | null | null |
from django import template
register = template.Library()
@register.filter
def addrowtable(open, closed):
table = ""
while open != {} or closed != {}:
if open != {} and closed != {}:
key, value = closed.popitem()
table += ("<td><a href='{0}' target='_blank''>{0}"
"</a></td><td>{1}</td>").format(value["url"],
value["comment"])
key, value = open.popitem()
table += ("<td><a href='{0}' target='_blank'>{0}"
"</a></td><td>{1}</td>").format(value["url"],
value["comment"])
table += "</tr>"
if open != {} or closed != {}:
table += "<tr><td></td><td></td><td></td><td></td>"
elif closed != {}:
key, value = closed.popitem()
table += ("<td><a href='{0}'target='_blank'>{0}"
"</a></td><td>{1}</td>").format(value["url"],
value["comment"])
table += "<td></td><td></td></tr>"
if closed != {}:
table += "<tr><td></td><td></td><td></td><td></td>"
else:
key, value = open.popitem()
table += ("<td><a href='{0}'target='_blank'>{0}"
"</a></td><td>{1}</td>").format(value["url"],
value["comment"])
table += "<td></td><td</td></tr>"
if open != {}:
table += "<tr><td></td><td></td><td>" \
"</td><td></td><td></td><td></td>"
return table
@register.filter
def page_re(link):
if link.split("=")[-1].isdigit():
return int(link.split("=")[-1])
else:
return link.split("=")[-1]
@register.filter
def page_build(page, user_name):
paginator_html = "<ul class='pagination justify-content-center'>"
if "prev" in page:
paginator_html += ('<li class="page-item"><a class="page-link" href="'
'?q={}?page={}">Previous</a></li>').format(
user_name,
page_re(page["prev"])
)
else:
paginator_html += ('<li class="page-item disabled">'
'<a class="page-link" >Previous</a></li>')
if "first" in page:
paginator_html += ('<li class="page-item"><a class="page-link" '
'href="?q={}?page=1">1</a></li>').format(user_name)
if "next" in page:
paginator_html += ('<li class="page-item active"><a class="page-link"'
' href="?q={0}?page={1}">{1}</a></li>').format(
user_name,
page_re(page["next"])-1,
)
if page_re(page["last"]) != page_re(page["next"]):
paginator_html += ('<li class="page-item"><a class="page-link" '
'href="?q={0}?page={1}">{1}</a></li>').format(
user_name,
page_re(page["next"]),
)
elif "prev" in page:
paginator_html += ('<li class="page-item"><a class="page-link" href="'
'?q={0}?page={1}">{1}</a></li>').format(
user_name,
page_re(page["prev"]),
)
paginator_html += ('<li class="page-item active"><a class="page-link"'
' href="?q={0}?page={1}">{1}</a></li>').format(
user_name,
page_re(page["prev"])+1,
)
if "last" in page:
paginator_html += ('<li class="page-item"><a class="page-link" '
'href="?q={0}?page={1}">{1}</a></li>').format(
user_name,
page_re(page["last"]),
)
if "next" in page:
paginator_html += ('<li class="page-item"><a class="page-link" href="?'
'q={}?page={}">Next</a></li>').format(
user_name,
page_re(page["next"]))
else:
paginator_html += ('<li class="page-item disabled"><a class="page-'
'link">Next</a></li>')
paginator_html += "</ul>"
return paginator_html
@register.filter
def last_pr(page):
s = ""
if page.split("=")[-1].isdigit():
s = (str((int(page.split("=")[-1]) - 1) * 100) + " - " +
str(int(page.split("=")[-1]) * 100 - 1))
return s
| 39.159292
| 79
| 0.420565
| 501
| 4,425
| 3.638723
| 0.129741
| 0.076796
| 0.085573
| 0.092156
| 0.723533
| 0.705979
| 0.705979
| 0.705979
| 0.695557
| 0.661547
| 0
| 0.015784
| 0.355706
| 4,425
| 112
| 80
| 39.508929
| 0.623641
| 0
| 0
| 0.436893
| 0
| 0
| 0.305085
| 0.137175
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038835
| false
| 0
| 0.009709
| 0
| 0.097087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
95e16da927493eb5d7b2ffc4e2a4b2a5ae98e6a7
| 14,393
|
py
|
Python
|
integration_methods.py
|
LudvigLindblad/DM-rate-calculation-in-graphene
|
de36b6207f7412e6ac71d3a88b7d10c55952a819
|
[
"MIT"
] | 3
|
2020-05-12T19:13:28.000Z
|
2020-09-16T07:26:34.000Z
|
integration_methods.py
|
LudvigLindblad/DM-rate-calculation-in-graphene
|
de36b6207f7412e6ac71d3a88b7d10c55952a819
|
[
"MIT"
] | null | null | null |
integration_methods.py
|
LudvigLindblad/DM-rate-calculation-in-graphene
|
de36b6207f7412e6ac71d3a88b7d10c55952a819
|
[
"MIT"
] | null | null | null |
import Rate_calculation
#import constants as ct
from mpmath import mp
from mpmath import fp
import numpy as np
import scipy.integrate as spint
import time
methods=["mp-gl", "mp-ts", "fp-gl", "fp-ts", "sp-quad", "sp-gauss", "monte-carlo", "w-cumsum", "sp-simps", "romberg"];
#cumtrapz relative error tolerance
err_rel=1e-1;
def_nodes=1e2; #default number of nodes
maxloop=100;
# general integration method
# syntax is mpmath like, set the limits you dont want to integrate over to 0
#
def integrate(f, limx, limy, limz, method):
if method=="mp-gl":
if limz!=0:
return mp.quad(f, limx, limy, limz, method="gauss-legendre");
else:
if limy!=0:
return mp.quad(f, limx, limy, method="gauss-legendre");
else:
return mp.quad(f, limx, method="gauss-legendre");
elif method=="mp-ts":
if limz!=0:
return mp.quad(f, limx, limy, limz, method="tanh-sinh");
else:
if limy!=0:
return mp.quad(f, limx, limy, method="tanh-sinh");
else:
return mp.quad(f, limx, method="tanh-sinh");
elif method=="fp-gl":
if limz!=0:
return fp.quad(f, limx, limy, limz, method="gauss-legendre");
else:
if limy!=0:
return fp.quad(f, limx, limy, method="gauss-legendre");
else:
return fp.quad(f, limx, method="gauss-legendre");
elif method=="fp-ts":
if limz!=0:
return fp.quad(f, limx, limy, limz, method="tanh-sinh");
else:
if limy!=0:
return fp.quad(f, limx, limy, method="tanh-sinh");
else:
return fp.quad(f, limx, method="tanh-sinh");
elif method=="sp-quad":
if limz!=0:
return spint.tplquad(f, limz[0], limz[1], limy[0], limy[1], limx[0], limx[1])[0];
else:
if limy!=0:
return spint.dblquad(f, limy[0], limy[1], limx[0], limx[1])[0];
else:
return spint.quad(f, limx[0], limx[1])[0];
elif method=="romberg":
if not np.ndim(limx)==0:
limx=[float(limx[0]), float(limx[1])];
if not np.ndim(limy)==0:
limy=[float(limy[0]), float(limy[1])];
if not np.ndim(limz)==0:
limz=[float(limz[0]), float(limz[1])];
reltol=1e-16;
abstol=1e-16;
if limz!=0:
return spint.romberg(lambda z: spint.romberg(lambda y: spint.romberg(lambda x: float(f(x,y,z)), limx[0], limx[1], tol=abstol, rtol=reltol), limy[0], limy[1], tol=abstol, rtol=reltol), limz[0], limz[1], tol=abstol, rtol=reltol);
else:
if limy!=0:
return spint.romberg(lambda y: spint.romberg(lambda x: float(f(x,y)), limx[0], limy[1], tol=abstol, rtol=reltol), limy[0], limy[1], tol=abstol, rtol=reltol);
else:
return spint.romberg(lambda x: float(f(x)), limx[0], limx[1], tol=abstol, rtol=reltol);
#currently broken, but slow so unused
elif method=="sp-gauss":
if not np.ndim(limx)==0:
limx=[float(limx[0]), float(limx[1])];
if not np.ndim(limy)==0:
limy=[float(limy[0]), float(limy[1])];
if not np.ndim(limz)==0:
limz=[float(limz[0]), float(limz[1])];
order=7;
if limz!=0:
return spint.fixed_quad(lambda z: spint.fixed_quad(lambda y: spint.fixed_quad(lambda x: f(x,y,z), limx[0], limx[1], n=order)[0], limy[0], limy[1], n=order)[0], limz[0], limz[1], n=order)[0];
else:
if limy!=0:
return spint.fixed_quad(lambda y: spint.romberg(lambda x: f(x,y), limx[0], limy[1], n=order)[0], limy[0], limy[1], n=order)[0];
else:
return spint.fixed_quad(lambda x: f(x), limx[0], limx[1], n=order)[0];
elif method=="w-cumsum":
if not np.ndim(limx)==0:
limx=[float(limx[0]), float(limx[1])];
if not np.ndim(limy)==0:
limy=[float(limy[0]), float(limy[1])];
if not np.ndim(limz)==0:
limz=[float(limz[0]), float(limz[1])];
if limz!=0:
dx=(limx[1]-limx[0])/def_nodes;
dy=(limy[1]-limy[0])/def_nodes;
dz=(limz[1]-limz[0])/def_nodes;
loop=0;
lastres=0;
while True:
xl=np.arange(limx[0], limx[1], dx);
yl=np.arange(limy[0], limy[1], dy);
zl=np.arange(limz[0], limz[1], dz);
X, Y, Z=np.meshgrid(xl, yl, zl);
fx=[];
for i in range(0, len(X)):
fy=[];
for j in range(0, len(Y)):
fz=[];
for k in range(0, len(Z)):
fz.append(f(X[i][j][k], Y[i][j][k], zl[k]));
fy.append(spint.simps(fz, dx=dz));
fx.append(spint.simps(fy, dx=dy));
res=spint.simps(fx, dx=dx);
if loop!=0:
if np.abs(res-lastres)/res < err_rel:
return res;
else:
ad=(1/2)**loop; #linear to begin with
dx=dx*ad;
dy=dy*ad;
dz=dz*ad;
lastres=res;
if loop > maxloop:
break;
loop+=1;
else:
if limy!=0:
dx=def_dx;
dy=def_dx;
loop=0;
lastres=0;
while True:
xl=np.arange(limx[0], limx[1], dx);
yl=np.arange(limy[0], limy[1], dy);
X, Y=np.meshgrid(xl, yl);
fx=[];
for i in range(0, len(X)):
fy=[];
for j in range(0, len(Y)):
fy.append(f(X[i][j], yl[j]));
fx.append(spint.simps(fy, dx=dy));
res=spint.simps(fx, dx=dx);
if loop!=0:
if np.abs(res-lastres)/res < err_rel:
return res;
else:
ad=(1/2)**loop; #linear to begin with
dx=dx*ad;
dy=dy*ad;
lastres=res;
if loop > maxloop:
break;
loop+=1;
else:
dx=def_dx;
loop=0;
lastres=0;
while True:
xl=np.arange(limx[0], limx[1], dx);
fx=[];
for i in range(0, len(X)):
fx.append(f(xl[i]));
res=spint.simps(fx, dx=dx);
if loop!=0:
if np.abs(res-lastres)/res < err_rel:
return res;
else:
ad=(1/2)**loop; #linear to begin with
dx=dx*ad;
lastres=res;
if loop > maxloop:
break;
loop+=1;
#still a bit broken but proved slower than mp-gl
elif method=="monte-carlo":
N=int(1e6);
if limz!=0:
N=int(round(N**(1/3)));
x=np.random.rand(N)*(limx[1]-limx[0])+limx[0];
y=np.random.rand(N)*(limy[1]-limy[0])+limy[0];
z=np.random.rand(N)*(limz[1]-limy[0])+limz[0];
X,Y,Z=np.meshgrid(x,y,z);
fxyz=[];
for i in range(0, len(X)):
fxy=[];
for j in range(0, len(Y)):
fx=[];
for k in range(0, len(Z)):
fx.append(f(X[i][j][k], Y[i][j][k], Z[i][j][k]));
fxy.append(fx);
fxyz.append(fxy);
wmax=np.max(fxyz);
wmin=np.min(fxyz);
W=np.random.rand(N, N, N)*(wmax-wmin)+wmin;
est=0;
for i in range(0, len(fxyz)):
for j in range(0, len(fxyz[i])):
for k in range(0, len(fxyz[i][j])):
if W[i][j][k] > 0 and W[i][j][k] < fxyz[i][j][k]:
est=est+fxyz[i][j][k];
elif W[i][j][k] < 0 and W[i][j][k] > fxyz[i][j][k]:
est=est+fxyz[i][j][k];
return (est/(N**3))*(limx[1]-limx[0])*(limy[1]-limy[0])*(limz[1]-limz[0])*(wmax-wmin);
else:
if limy!=0:
N=int(round(N**(1/2)));
x=np.random.rand(N)*(limx[1]-limx[0])+limx[0];
y=np.random.rand(N)*(limy[1]-limy[0])+limy[0];
X,Y=np.meshgrid(x,y);
fxy=[];
for i in range(0, len(X)):
fx=[];
for j in range(0, len(Y)):
fx.append(f(X[i][j], Y[i][j]));
fxy.append(fx);
zmax=np.max(fxy);
zmin=np.min(fxy);
Z=np.random.rand(N, N)*(zmax-zmin)+zmin;
est=0;
for i in range(0, len(fxy)):
for j in range(0, len(fxy[i])):
if Z[i][j] > 0 and Z[i][j] < fxy[i][j]:
est=est+fxy[i][j];
elif Z[i][j] < 0 and Z[i][j] > fxy[i][j]:
est=est+fxy[i][j];
return (est/(N**2))*(limx[1]-limx[0])*(limy[1]-limy[0])*(zmax-zmin);
else:
X=np.random.rand(N)*(limx[1]-limx[0])+limx[0];
fx=[];
for i in range(0, len(X)):
fx.append(f(X[i]));
ymax=np.max(fx);
ymin=np.min(fx);
Y=np.random.rand(N)*(ymax-ymin)+ymin;
est=0;
for i in range(0, len(fx)):
if Y[i] > 0 and Y[i] < fx[i]:
est=est+fx[i];
elif Y[i] < 0 and Y[i] > fx[i]:
est=est+fx[i];
return (est/N)*(limx[1]-limx[0])*(ymax-ymin);
#preallocated, expected to be slow
elif method=="sp-simps":
if limz!=0:
dx=(limx[1]-limx[0])/def_nodes;
dy=(limy[1]-limy[0])/def_nodes;
dz=(limz[1]-limz[0])/def_nodes;
loop=0;
lastres=0;
while True:
xl=np.arange(limx[0], limx[1], dx);
yl=np.arange(limy[0], limy[1], dy);
zl=np.arange(limz[0], limz[1], dz);
X, Y, Z=np.meshgrid(xl, yl, zl);
fx=[];
for i in range(0, len(X)):
fy=[];
for j in range(0, len(Y)):
fz=[];
for k in range(0, len(Z)):
fz.append(f(X[i][j][k], Y[i][j][k], zl[k]));
fy.append(spint.simps(fz, dx=dz));
fx.append(spint.simps(fy, dx=dy));
res=spint.simps(fx, dx=dx);
if loop!=0:
if np.abs(res-lastres)/res < err_rel:
return res;
else:
ad=(1/2)**loop; #linear to begin with
dx=dx*ad;
dy=dy*ad;
dz=dz*ad;
lastres=res;
if loop > maxloop:
break;
loop+=1;
else:
if limy!=0:
dx=def_dx;
dy=def_dx;
loop=0;
lastres=0;
while True:
xl=np.arange(limx[0], limx[1], dx);
yl=np.arange(limy[0], limy[1], dy);
X, Y=np.meshgrid(xl, yl);
fx=[];
for i in range(0, len(X)):
fy=[];
for j in range(0, len(Y)):
fy.append(f(X[i][j], yl[j]));
fx.append(spint.simps(fy, dx=dy));
res=spint.simps(fx, dx=dx);
if loop!=0:
if np.abs(res-lastres)/res < err_rel:
return res;
else:
ad=(1/2)**loop; #linear to begin with
dx=dx*ad;
dy=dy*ad;
lastres=res;
if loop > maxloop:
break;
loop+=1;
else:
dx=def_dx;
loop=0;
lastres=0;
while True:
xl=np.arange(limx[0], limx[1], dx);
fx=[];
for i in range(0, len(X)):
fx.append(f(xl[i]));
res=spint.simps(fx, dx=dx);
if loop!=0:
if np.abs(res-lastres)/res < err_rel:
return res;
else:
ad=(1/2)**loop; #linear to begin with
dx=dx*ad;
lastres=res;
if loop > maxloop:
break;
loop+=1;
return res;
#benchmarking
def benchmark():
f=lambda x,y,z: x**2*y**2-x**2*z;
for m in methods:
t=time.time();
r=integrate(f, [0, 1], 0, 0, m);
print("Method: "+m+" Time: "+str(time.time()-t)+" Result: "+str(r));
if __name__=="__main__":
benchmark();
| 36.62341
| 239
| 0.379907
| 1,806
| 14,393
| 3.008859
| 0.089701
| 0.032205
| 0.035333
| 0.048583
| 0.793522
| 0.756717
| 0.719912
| 0.681266
| 0.617777
| 0.602687
| 0
| 0.03349
| 0.466824
| 14,393
| 392
| 240
| 36.716837
| 0.674616
| 0.029737
| 0
| 0.728659
| 0
| 0
| 0.02208
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006098
| false
| 0
| 0.018293
| 0
| 0.118902
| 0.003049
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
95fe0d9334ca21b5999c423705bf101b4db22828
| 388
|
py
|
Python
|
db.py
|
danalex93/ingsoft1-ayudantia5
|
ccbec871fffe59664ed060be9b962830a0ea8f30
|
[
"MIT"
] | null | null | null |
db.py
|
danalex93/ingsoft1-ayudantia5
|
ccbec871fffe59664ed060be9b962830a0ea8f30
|
[
"MIT"
] | null | null | null |
db.py
|
danalex93/ingsoft1-ayudantia5
|
ccbec871fffe59664ed060be9b962830a0ea8f30
|
[
"MIT"
] | null | null | null |
import pymysql.cursors
class DatabaseManager(object):
def __init__(self):
super(DatabaseManager, self).__init__()
self.connection = pymysql.connect(host='localhost', port=8889, user='ayudante', password='holamundo', db='ayudantia_5', cursorclass=pymysql.cursors.DictCursor)
def get_connection(self):
return self.connection
def close(self):
self.connection.close()
| 32.333333
| 163
| 0.75
| 45
| 388
| 6.244444
| 0.622222
| 0.149466
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014663
| 0.121134
| 388
| 12
| 164
| 32.333333
| 0.809384
| 0
| 0
| 0
| 0
| 0
| 0.095116
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.111111
| 0.111111
| 0.111111
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
250b12af701491e3829416603aa989a42bec10f7
| 873
|
py
|
Python
|
bad_channel_lhc15o_mineAll.py
|
dhruvdixit/alice_calib_emcal
|
26357206518511842c047c78c9b784acae6203e8
|
[
"BSD-3-Clause-LBNL"
] | 1
|
2017-06-06T19:28:08.000Z
|
2017-06-06T19:28:08.000Z
|
bad_channel_lhc15o_mineAll.py
|
dhruvdixit/alice_calib_emcal
|
26357206518511842c047c78c9b784acae6203e8
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
bad_channel_lhc15o_mineAll.py
|
dhruvdixit/alice_calib_emcal
|
26357206518511842c047c78c9b784acae6203e8
|
[
"BSD-3-Clause-LBNL"
] | 1
|
2018-10-11T20:07:25.000Z
|
2018-10-11T20:07:25.000Z
|
hot = [10, 326, 871, 1056, 1160, 1162, 1163, 1382, 1682, 1683, 1685, 1686, 1760, 1851, 1961, 2071, 2273, 3544, 3691, 3849, 3858, 3878, 3884, 4019, 4025, 4048, 4051, 4089, 4101, 4103, 4110, 4245, 4566, 4599, 4797, 5410, 5836, 6111, 6296, 6800, 6801, 6802, 6804, 6805, 6806, 6807, 6809, 7089, 7104, 7874, 8811, 8812, 8850, 8856, 8857, 8897, 8903, 8906, 8909, 8955, 8956, 8958, 8959, 8992, 8994, 8996, 9056, 9246, 11088, 11089, 11101, 11142, 11143, 11144, 11146, 11188, 11195, 11373, 11393, 11839, 11871, 11937, 11966, 12861, 12917, 13073, 13077, 13253, 13484, 13861, 14376, 14382, 14506, 14644, 14653, 14666, 14718, 15018, 15304, 15316, 15317, 15344, 15403, 16297, 16303, 16309, 16340, 16346, 16523, 16768, 16835, 16852, 16858, 16916, 16961, 16988, 17003, 17032, 17070, 17114, 17128, 17151, 17287, 17300, 17310, 17311, 17413, 17418, 17451, 17457, 17461, 17470, 17577, 17627]
| 436.5
| 872
| 0.688431
| 135
| 873
| 4.451852
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.811398
| 0.155785
| 873
| 1
| 873
| 873
| 0.004071
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
251e296e01f36d9a852dceab4a644be72c1f2b3a
| 180
|
py
|
Python
|
example_repo/tests/test_fail.py
|
oaao/monoe-ci
|
760b1d5027628e95607659eda91bc2c7183f528a
|
[
"MIT"
] | null | null | null |
example_repo/tests/test_fail.py
|
oaao/monoe-ci
|
760b1d5027628e95607659eda91bc2c7183f528a
|
[
"MIT"
] | null | null | null |
example_repo/tests/test_fail.py
|
oaao/monoe-ci
|
760b1d5027628e95607659eda91bc2c7183f528a
|
[
"MIT"
] | null | null | null |
# https://github.com/malini/500lines/tree/master/ci/tests
import unittest
class TestFail(unittest.TestCase):
def test_fail(self):
self.fail('destined for greatness')
| 22.5
| 57
| 0.733333
| 24
| 180
| 5.458333
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019355
| 0.138889
| 180
| 7
| 58
| 25.714286
| 0.825806
| 0.305556
| 0
| 0
| 0
| 0
| 0.178862
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
c2848bc6d1ffd1ee9447f32eba506bbea0d39b81
| 156
|
py
|
Python
|
backend/cdEditor/apps.py
|
ArsalanShahid116/cd
|
01e1f9b038cbda11ae61ce0f9d9cae6a83464a11
|
[
"MIT"
] | null | null | null |
backend/cdEditor/apps.py
|
ArsalanShahid116/cd
|
01e1f9b038cbda11ae61ce0f9d9cae6a83464a11
|
[
"MIT"
] | null | null | null |
backend/cdEditor/apps.py
|
ArsalanShahid116/cd
|
01e1f9b038cbda11ae61ce0f9d9cae6a83464a11
|
[
"MIT"
] | 3
|
2019-03-18T11:22:41.000Z
|
2019-03-22T18:38:43.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class CdeditorConfig(AppConfig):
name = 'cdEditor'
| 17.333333
| 39
| 0.737179
| 18
| 156
| 6.111111
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007634
| 0.160256
| 156
| 8
| 40
| 19.5
| 0.832061
| 0.134615
| 0
| 0
| 0
| 0
| 0.06015
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
c2ad656fee2935c4df44cf071dba52ac1411494c
| 94
|
py
|
Python
|
billing/tests.py
|
lamhai1401/decoupled
|
27aa180ebbb0aa038832a0f19e7d806be8075500
|
[
"Apache-2.0"
] | null | null | null |
billing/tests.py
|
lamhai1401/decoupled
|
27aa180ebbb0aa038832a0f19e7d806be8075500
|
[
"Apache-2.0"
] | null | null | null |
billing/tests.py
|
lamhai1401/decoupled
|
27aa180ebbb0aa038832a0f19e7d806be8075500
|
[
"Apache-2.0"
] | null | null | null |
"""
Billing tests docstring
"""
# from django.test import TestCase
# Create your tests here.
| 13.428571
| 34
| 0.723404
| 12
| 94
| 5.666667
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 94
| 6
| 35
| 15.666667
| 0.871795
| 0.861702
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c2c5953c900d707c9ec640a4f2ff053d963f9288
| 87
|
py
|
Python
|
python/testData/refactoring/introduceField/inConstructor.after.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/refactoring/introduceField/inConstructor.after.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/refactoring/introduceField/inConstructor.after.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
class B():
def __init__(self):
self.a = "some string"
print(self.a)
| 21.75
| 30
| 0.528736
| 12
| 87
| 3.5
| 0.75
| 0.238095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.321839
| 87
| 4
| 31
| 21.75
| 0.711864
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0.25
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c2edbbf6cd691237b941937d5f66b48bca280f1e
| 174
|
py
|
Python
|
Chapter 05/ch5_16.py
|
bpbpublications/TEST-YOUR-SKILLS-IN-PYTHON-LANGUAGE
|
f6a4194684515495d00aa38347a725dd08f39a0c
|
[
"MIT"
] | null | null | null |
Chapter 05/ch5_16.py
|
bpbpublications/TEST-YOUR-SKILLS-IN-PYTHON-LANGUAGE
|
f6a4194684515495d00aa38347a725dd08f39a0c
|
[
"MIT"
] | null | null | null |
Chapter 05/ch5_16.py
|
bpbpublications/TEST-YOUR-SKILLS-IN-PYTHON-LANGUAGE
|
f6a4194684515495d00aa38347a725dd08f39a0c
|
[
"MIT"
] | null | null | null |
x = int(input('Enter an integer:'))
y = int(input('Enter an integer:'))
if x ^ y!=0:
print(x,' and ', y, ' are different')
else:
print(x,' and ', y, ' are same')
| 29
| 42
| 0.54023
| 29
| 174
| 3.241379
| 0.517241
| 0.170213
| 0.276596
| 0.319149
| 0.744681
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007519
| 0.235632
| 174
| 6
| 43
| 29
| 0.699248
| 0
| 0
| 0
| 0
| 0
| 0.394118
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
6c03c7aa8a2168cfe38c3a5341f70a474fa14b3e
| 86
|
py
|
Python
|
odp/publish/catalogue/__init__.py
|
SAEON/Open-Data-Platform
|
8509c39c6f65ba18518e825e2359213ec4c67af5
|
[
"MIT"
] | 2
|
2021-03-04T07:09:47.000Z
|
2022-01-02T19:23:41.000Z
|
odp/publish/catalogue/__init__.py
|
SAEON/Open-Data-Platform
|
8509c39c6f65ba18518e825e2359213ec4c67af5
|
[
"MIT"
] | 18
|
2020-09-16T09:16:45.000Z
|
2022-01-25T14:17:42.000Z
|
odp/publish/catalogue/__init__.py
|
SAEON/Open-Data-Platform
|
8509c39c6f65ba18518e825e2359213ec4c67af5
|
[
"MIT"
] | 1
|
2021-06-25T13:02:57.000Z
|
2021-06-25T13:02:57.000Z
|
class Catalogue:
def synchronize(self) -> None:
raise NotImplementedError
| 21.5
| 34
| 0.697674
| 8
| 86
| 7.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.232558
| 86
| 3
| 35
| 28.666667
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
6c11f2c9ccc7e429664a58803cc40dcf749808ca
| 152
|
py
|
Python
|
Suggestion/SupervisedLearning/__init__.py
|
QuasiLegendre/GraSPyPipeline
|
b8f0986cf3bbfa75e2a7c3669ff11a449e3c61bb
|
[
"Apache-2.0"
] | null | null | null |
Suggestion/SupervisedLearning/__init__.py
|
QuasiLegendre/GraSPyPipeline
|
b8f0986cf3bbfa75e2a7c3669ff11a449e3c61bb
|
[
"Apache-2.0"
] | null | null | null |
Suggestion/SupervisedLearning/__init__.py
|
QuasiLegendre/GraSPyPipeline
|
b8f0986cf3bbfa75e2a7c3669ff11a449e3c61bb
|
[
"Apache-2.0"
] | null | null | null |
from .base import SupervisedLearningPipeline
from .MASEPipeline import MASEPipeline
__all__ = [
"SupervisedLearningPipeline",
"MASEPipeline",
]
| 21.714286
| 44
| 0.782895
| 11
| 152
| 10.454545
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144737
| 152
| 6
| 45
| 25.333333
| 0.884615
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.171053
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
6c17a5d022456ad9a1544455d5c7528405e98a8e
| 463
|
py
|
Python
|
dist/py/reflectedlight.py
|
microsoft/jacdac
|
2c6548b7e55ac34141e5152c664ca268e873cf09
|
[
"CC-BY-4.0",
"MIT"
] | 31
|
2020-07-24T14:49:32.000Z
|
2022-03-20T12:20:56.000Z
|
dist/py/reflectedlight.py
|
QPC-database/jacdac
|
74e9f7cebdb1db4c24f211aceb657b5125d0fd40
|
[
"CC-BY-4.0",
"MIT"
] | 747
|
2020-07-31T22:05:45.000Z
|
2022-03-31T23:27:35.000Z
|
dist/py/reflectedlight.py
|
QPC-database/jacdac
|
74e9f7cebdb1db4c24f211aceb657b5125d0fd40
|
[
"CC-BY-4.0",
"MIT"
] | 17
|
2020-07-31T10:49:01.000Z
|
2022-03-15T03:21:43.000Z
|
# Autogenerated file for Reflected light
# Add missing from ... import const
_JD_SERVICE_CLASS_REFLECTED_LIGHT = const(0x126c4cb2)
_JD_REFLECTED_LIGHT_VARIANT_INFRARED_DIGITAL = const(0x1)
_JD_REFLECTED_LIGHT_VARIANT_INFRARED_ANALOG = const(0x2)
_JD_REFLECTED_LIGHT_REG_BRIGHTNESS = const(JD_REG_READING)
_JD_REFLECTED_LIGHT_REG_VARIANT = const(JD_REG_VARIANT)
_JD_REFLECTED_LIGHT_EV_DARK = const(JD_EV_INACTIVE)
_JD_REFLECTED_LIGHT_EV_LIGHT = const(JD_EV_ACTIVE)
| 51.444444
| 58
| 0.87257
| 69
| 463
| 5.202899
| 0.391304
| 0.311978
| 0.267409
| 0.128134
| 0.172702
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023256
| 0.071274
| 463
| 9
| 59
| 51.444444
| 0.811628
| 0.155508
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0.041131
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
6c39a020d2fa8fc95bf3d94bc3b6cdab9143bcc3
| 207
|
py
|
Python
|
scripts/item/consume_2438086.py
|
G00dBye/YYMS
|
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
|
[
"MIT"
] | 54
|
2019-04-16T23:24:48.000Z
|
2021-12-18T11:41:50.000Z
|
scripts/item/consume_2438086.py
|
G00dBye/YYMS
|
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
|
[
"MIT"
] | 3
|
2019-05-19T15:19:41.000Z
|
2020-04-27T16:29:16.000Z
|
scripts/item/consume_2438086.py
|
G00dBye/YYMS
|
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
|
[
"MIT"
] | 49
|
2020-11-25T23:29:16.000Z
|
2022-03-26T16:20:24.000Z
|
# Created by MechAviv
# Nyen Damage Skin | (2438086)
if sm.addDamageSkin(2438086):
sm.chat("'Nyen Damage Skin' Damage Skin has been added to your account's damage skin collection.")
sm.consumeItem()
| 41.4
| 103
| 0.729469
| 30
| 207
| 5.033333
| 0.666667
| 0.264901
| 0.18543
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081395
| 0.169082
| 207
| 5
| 104
| 41.4
| 0.796512
| 0.231884
| 0
| 0
| 0
| 0
| 0.56051
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
6c4541a3d18186afe353469626c1a491e330f82a
| 66
|
py
|
Python
|
Chapter08/festival_test.py
|
PacktPublishing/Learning-Robotics-using-Python
|
d03a5ed459d3c2e05118e8b9345abb5a9650ced9
|
[
"MIT"
] | 8
|
2019-06-04T13:32:35.000Z
|
2021-11-08T13:10:20.000Z
|
Chapter08/festival_test.py
|
davidseowccc/Learning-Robotics-using-Python
|
d03a5ed459d3c2e05118e8b9345abb5a9650ced9
|
[
"MIT"
] | null | null | null |
Chapter08/festival_test.py
|
davidseowccc/Learning-Robotics-using-Python
|
d03a5ed459d3c2e05118e8b9345abb5a9650ced9
|
[
"MIT"
] | 9
|
2019-07-22T03:27:40.000Z
|
2021-07-18T01:00:59.000Z
|
#!/usr/bin/env python
import festival
festival.say("Hello World")
| 16.5
| 27
| 0.757576
| 10
| 66
| 5
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 66
| 3
| 28
| 22
| 0.833333
| 0.30303
| 0
| 0
| 0
| 0
| 0.244444
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
6c5841ba097355b2108b8ddd7a5ef6483c8b259c
| 88
|
py
|
Python
|
03_Day_Operators/9.py
|
diegofregolente/30-Days-Of-Python
|
e0cad31f6d5ab1384ad6fa5a5d24a84771d6c267
|
[
"Apache-2.0"
] | null | null | null |
03_Day_Operators/9.py
|
diegofregolente/30-Days-Of-Python
|
e0cad31f6d5ab1384ad6fa5a5d24a84771d6c267
|
[
"Apache-2.0"
] | null | null | null |
03_Day_Operators/9.py
|
diegofregolente/30-Days-Of-Python
|
e0cad31f6d5ab1384ad6fa5a5d24a84771d6c267
|
[
"Apache-2.0"
] | null | null | null |
x1 = 2
y1 = 2
x2 = 6
y2 = 10
m2 = (y2 - y1) / (x2 - x1)
print(f'Slope = {m2:.2f}') # 9
| 12.571429
| 31
| 0.443182
| 19
| 88
| 2.052632
| 0.684211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.278689
| 0.306818
| 88
| 6
| 32
| 14.666667
| 0.360656
| 0.011364
| 0
| 0
| 0
| 0
| 0.188235
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
6c6823999faa48c52506417b6bbf981460c1fb51
| 974
|
py
|
Python
|
physics/__init__.py
|
fedorpashin/physics
|
d587d50679fb5ad3994a8b992806a30053ed45e1
|
[
"MIT"
] | 2
|
2021-09-06T16:20:25.000Z
|
2021-09-06T16:21:17.000Z
|
physics/__init__.py
|
fedorpashin/physics
|
d587d50679fb5ad3994a8b992806a30053ed45e1
|
[
"MIT"
] | 30
|
2021-09-06T16:20:29.000Z
|
2021-11-27T22:41:03.000Z
|
physics/__init__.py
|
fedorpashin/physics
|
d587d50679fb5ad3994a8b992806a30053ed45e1
|
[
"MIT"
] | null | null | null |
from .interval import *
from .boundary_conditions.boundary_condition import *
from .equations.any_equation import *
from .grids.any_grid import *
from .equations.any_algorithm import *
from .solution import *
from .boundary_conditions.first_type_boundary_condition import *
from .boundary_conditions.second_type_boundary_condition import *
from .boundary_conditions.third_type_boundary_condition import *
from .equations.heat.heat_equation import *
from .equations.heat.integro_interpolation_method import *
from .equations.heat.heat_equation_algorithm import *
from .equations.heat.default_heat_equation_algorithm import *
from .grids.any_common_grid import *
from .grids.any_uniform_grid import *
from .grids.common_grid import *
from .grids.uniform_grid import *
from .grids.common_auxiliary_grid import *
from .grids.uniform_auxiliary_grid import *
from .grids.default_grid import *
from .factories.auxiliary_grid import *
from .factories.default_algorithm import *
| 36.074074
| 65
| 0.835729
| 128
| 974
| 6.0625
| 0.203125
| 0.270619
| 0.162371
| 0.146907
| 0.574742
| 0.298969
| 0.126289
| 0
| 0
| 0
| 0
| 0
| 0.094456
| 974
| 26
| 66
| 37.461538
| 0.879819
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
6c7134f355a2489e6feaef1e58c306ad7e5a70e4
| 265
|
py
|
Python
|
alento_bot/__init__.py
|
alentoghostflame/StupidEveAppraisalBot
|
a78adb206efd3a4dc41cbbfb45ee0b8b61aea248
|
[
"MIT"
] | null | null | null |
alento_bot/__init__.py
|
alentoghostflame/StupidEveAppraisalBot
|
a78adb206efd3a4dc41cbbfb45ee0b8b61aea248
|
[
"MIT"
] | null | null | null |
alento_bot/__init__.py
|
alentoghostflame/StupidEveAppraisalBot
|
a78adb206efd3a4dc41cbbfb45ee0b8b61aea248
|
[
"MIT"
] | null | null | null |
from alento_bot.storage_module import StorageManager, ConfigData, guild_data_transformer, user_data_transformer, \
cache_transformer, BaseCache, BaseGuildCache, BaseUserCache
from alento_bot.core_module import DiscordBot
from alento_bot.core_cog import CoreCog
| 53
| 114
| 0.867925
| 33
| 265
| 6.636364
| 0.606061
| 0.136986
| 0.178082
| 0.155251
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090566
| 265
| 4
| 115
| 66.25
| 0.908714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
66c2ef4213580ea381cb0b4d15d8f3d5e9664322
| 358
|
py
|
Python
|
users/admin.py
|
intelligems/stolos
|
ca658aeea92b841f89992948a136214519dffcc7
|
[
"MIT"
] | 5
|
2019-08-27T10:33:14.000Z
|
2021-11-09T10:57:40.000Z
|
users/admin.py
|
intelligems/stolos
|
ca658aeea92b841f89992948a136214519dffcc7
|
[
"MIT"
] | null | null | null |
users/admin.py
|
intelligems/stolos
|
ca658aeea92b841f89992948a136214519dffcc7
|
[
"MIT"
] | 3
|
2019-05-23T14:37:40.000Z
|
2020-12-14T18:43:16.000Z
|
from django.contrib import admin
from guardian.admin import GuardedModelAdmin
from users import models
@admin.register(models.SSHPublicKey)
class SSHPublicKeyAdmin(GuardedModelAdmin):
pass
@admin.register(models.DockerCert)
class DockerCert(GuardedModelAdmin):
pass
@admin.register(models.APIToken)
class APIToken(GuardedModelAdmin):
pass
| 17.9
| 44
| 0.807263
| 38
| 358
| 7.605263
| 0.421053
| 0.134948
| 0.197232
| 0.235294
| 0.276817
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120112
| 358
| 19
| 45
| 18.842105
| 0.91746
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.25
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
dd5b5672c9ca37a33e9ea130f80a22eedc10fb00
| 233
|
py
|
Python
|
river/tests/admin.py
|
JohnieBraaf/django-river
|
58f49624c37e1e5d1bdb4c7680810126454e36a6
|
[
"BSD-3-Clause"
] | 705
|
2015-07-23T16:09:06.000Z
|
2022-03-05T03:52:31.000Z
|
river/tests/admin.py
|
JohnieBraaf/django-river
|
58f49624c37e1e5d1bdb4c7680810126454e36a6
|
[
"BSD-3-Clause"
] | 188
|
2015-08-28T21:58:38.000Z
|
2021-12-15T06:39:37.000Z
|
river/tests/admin.py
|
JohnieBraaf/django-river
|
58f49624c37e1e5d1bdb4c7680810126454e36a6
|
[
"BSD-3-Clause"
] | 101
|
2015-08-30T04:22:57.000Z
|
2022-03-29T06:58:20.000Z
|
from django.contrib import admin
from django.contrib.admin import ModelAdmin
from river.tests.models import BasicTestModel
class BasicTestModelAdmin(ModelAdmin):
pass
admin.site.register(BasicTestModel, BasicTestModelAdmin)
| 19.416667
| 56
| 0.832618
| 26
| 233
| 7.461538
| 0.576923
| 0.103093
| 0.175258
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111588
| 233
| 11
| 57
| 21.181818
| 0.937198
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.166667
| 0.5
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 4
|
dd606344a082df4a803d75a5752528318fdd82fc
| 136
|
py
|
Python
|
laundryapp/apps.py
|
chrillux/laundrybooking
|
fa58e460454ce8b22de0b3b2d1b697742aa0f140
|
[
"MIT"
] | 1
|
2017-12-06T07:18:59.000Z
|
2017-12-06T07:18:59.000Z
|
laundryapp/apps.py
|
chrillux/laundrybooking
|
fa58e460454ce8b22de0b3b2d1b697742aa0f140
|
[
"MIT"
] | 3
|
2020-02-11T23:47:22.000Z
|
2021-06-10T19:24:46.000Z
|
laundryapp/apps.py
|
chrillux/laundrybooking
|
fa58e460454ce8b22de0b3b2d1b697742aa0f140
|
[
"MIT"
] | 3
|
2017-03-25T19:24:34.000Z
|
2019-09-14T07:00:50.000Z
|
from __future__ import unicode_literals
from django.apps import AppConfig
class LaundryappConfig(AppConfig):
name = 'laundryapp'
| 17
| 39
| 0.801471
| 15
| 136
| 6.933333
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 136
| 7
| 40
| 19.428571
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0.073529
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
dd844e25a03ca578a56a655c145f5eccc06ecfb1
| 97
|
py
|
Python
|
project/partnersapp/apps.py
|
ObukhovVladislav/site-store
|
a016fd69cecfd39d15169e3fae99b14d4806b2bf
|
[
"Apache-2.0"
] | null | null | null |
project/partnersapp/apps.py
|
ObukhovVladislav/site-store
|
a016fd69cecfd39d15169e3fae99b14d4806b2bf
|
[
"Apache-2.0"
] | null | null | null |
project/partnersapp/apps.py
|
ObukhovVladislav/site-store
|
a016fd69cecfd39d15169e3fae99b14d4806b2bf
|
[
"Apache-2.0"
] | null | null | null |
from django.apps import AppConfig
class PartnersappConfig(AppConfig):
name = 'partnersapp'
| 16.166667
| 35
| 0.773196
| 10
| 97
| 7.5
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154639
| 97
| 5
| 36
| 19.4
| 0.914634
| 0
| 0
| 0
| 0
| 0
| 0.113402
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
06c068e14cd832b5533778d992a4229418ea1ff2
| 226
|
py
|
Python
|
app/resources/cache.py
|
thehyve/rest_api
|
e60aa7064dc8946e96ee3937692887e3a5e40459
|
[
"Apache-2.0"
] | 15
|
2017-04-03T22:11:02.000Z
|
2021-03-16T11:49:25.000Z
|
app/resources/cache.py
|
thehyve/rest_api
|
e60aa7064dc8946e96ee3937692887e3a5e40459
|
[
"Apache-2.0"
] | 123
|
2017-01-05T14:01:01.000Z
|
2019-10-21T16:56:48.000Z
|
app/resources/cache.py
|
thehyve/rest_api
|
e60aa7064dc8946e96ee3937692887e3a5e40459
|
[
"Apache-2.0"
] | 9
|
2017-04-03T22:20:14.000Z
|
2021-05-28T16:38:54.000Z
|
__author__ = 'andreap'
from flask import current_app
from flask_restful import Resource
class ClearCache(Resource):
''' clear the aplication cache
'''
def get(self ):
return current_app.cache.clear()
| 15.066667
| 40
| 0.69469
| 27
| 226
| 5.555556
| 0.703704
| 0.12
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.221239
| 226
| 14
| 41
| 16.142857
| 0.852273
| 0.115044
| 0
| 0
| 0
| 0
| 0.037234
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
06f1535314c56fdccd452a964379c01902b29201
| 274
|
py
|
Python
|
keras/applications/xception.py
|
DNAdeveloper/keras
|
d81a590da23b63aec6949f1da118e504d04726ea
|
[
"MIT"
] | 9
|
2018-06-28T12:52:00.000Z
|
2020-09-25T10:32:43.000Z
|
keras/applications/xception.py
|
DNAdeveloper/keras
|
d81a590da23b63aec6949f1da118e504d04726ea
|
[
"MIT"
] | null | null | null |
keras/applications/xception.py
|
DNAdeveloper/keras
|
d81a590da23b63aec6949f1da118e504d04726ea
|
[
"MIT"
] | 7
|
2018-07-02T08:55:46.000Z
|
2020-04-25T20:59:49.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from keras_applications import xception
Xception = xception.Xception
decode_predictions = xception.decode_predictions
preprocess_input = xception.preprocess_input
| 27.4
| 48
| 0.879562
| 32
| 274
| 6.9375
| 0.4375
| 0.135135
| 0.216216
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09854
| 274
| 9
| 49
| 30.444444
| 0.898785
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.571429
| 0
| 0.571429
| 0.142857
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
660659de6edc70b7b8680360299c26304c6fec8c
| 689
|
py
|
Python
|
ogusa/__init__.py
|
benrpage1/OG-USA
|
f0229fe863d0f0e6f1af223b8c903ac97093fe93
|
[
"CC0-1.0"
] | null | null | null |
ogusa/__init__.py
|
benrpage1/OG-USA
|
f0229fe863d0f0e6f1af223b8c903ac97093fe93
|
[
"CC0-1.0"
] | 2
|
2020-09-02T22:58:36.000Z
|
2020-09-03T19:29:46.000Z
|
ogusa/__init__.py
|
prrathi/OG-USA
|
2e5c116bb8656ab190a59e431a8d57415fe26b08
|
[
"CC0-1.0"
] | null | null | null |
"""
Specify what is available to import from the ogusa package.
"""
from ogusa.SS import *
from ogusa.TPI import *
from ogusa.aggregates import *
from ogusa.constants import *
from ogusa.demographics import *
from ogusa.elliptical_u_est import *
from ogusa.execute import *
from ogusa.firm import *
from ogusa.fiscal import *
from ogusa.get_micro_data import *
from ogusa.household import *
from ogusa.income import *
from ogusa.output_plots import *
from ogusa.output_tables import *
from ogusa.parameter_plots import *
from ogusa.parameter_tables import *
from ogusa.parameters import *
from ogusa.tax import *
from ogusa.txfunc import *
from ogusa.utils import *
__version__ = '0.0.0'
| 26.5
| 59
| 0.785196
| 102
| 689
| 5.186275
| 0.352941
| 0.378072
| 0.538752
| 0.079395
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005059
| 0.139332
| 689
| 25
| 60
| 27.56
| 0.887015
| 0.085631
| 0
| 0
| 0
| 0
| 0.008039
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.952381
| 0
| 0.952381
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
661b322d9a2530eb6d0698cf2ee1fa416d3856db
| 59
|
py
|
Python
|
galaxy_api/contrib/__init__.py
|
nixocio/galaxy-api
|
12dcacdc0b093f3051b5dc87095324fb63bf6af3
|
[
"Apache-2.0"
] | null | null | null |
galaxy_api/contrib/__init__.py
|
nixocio/galaxy-api
|
12dcacdc0b093f3051b5dc87095324fb63bf6af3
|
[
"Apache-2.0"
] | null | null | null |
galaxy_api/contrib/__init__.py
|
nixocio/galaxy-api
|
12dcacdc0b093f3051b5dc87095324fb63bf6af3
|
[
"Apache-2.0"
] | null | null | null |
"""A package for Insights specific modules and plugins."""
| 29.5
| 58
| 0.745763
| 8
| 59
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135593
| 59
| 1
| 59
| 59
| 0.862745
| 0.881356
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
663f3be8ccc64695e9ecee40c4ca4ac3693c20a8
| 217
|
py
|
Python
|
samples/MyCustomException.py
|
bogdan9898/flaskvel
|
b3ab27d050870ea5fe6a3cd2ed3833220b18e089
|
[
"MIT"
] | 3
|
2020-08-13T19:39:53.000Z
|
2022-01-03T18:02:58.000Z
|
samples/MyCustomException.py
|
bogdan9898/flaskvel
|
b3ab27d050870ea5fe6a3cd2ed3833220b18e089
|
[
"MIT"
] | null | null | null |
samples/MyCustomException.py
|
bogdan9898/flaskvel
|
b3ab27d050870ea5fe6a3cd2ed3833220b18e089
|
[
"MIT"
] | null | null | null |
from flask import jsonify
from flaskvel import ValidationException
class MyCustomException(ValidationException):
def pretty_print(self):
return jsonify({
"validation": "failed",
"reasons": self._message
})
| 24.111111
| 45
| 0.769585
| 22
| 217
| 7.5
| 0.772727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138249
| 217
| 9
| 46
| 24.111111
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0.105505
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.25
| 0.125
| 0.625
| 0.125
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
b07eb37b000ee567034ae0aa04fa240ea173e015
| 360
|
py
|
Python
|
python/tintin/auth.py
|
FootprintAI/tintin-sdk
|
cc84a1cfeacd0374c098e4f745d8e19815832366
|
[
"Apache-2.0"
] | null | null | null |
python/tintin/auth.py
|
FootprintAI/tintin-sdk
|
cc84a1cfeacd0374c098e4f745d8e19815832366
|
[
"Apache-2.0"
] | null | null | null |
python/tintin/auth.py
|
FootprintAI/tintin-sdk
|
cc84a1cfeacd0374c098e4f745d8e19815832366
|
[
"Apache-2.0"
] | null | null | null |
from requests.auth import AuthBase
class MinioAuth(AuthBase):
"""Attaches HTTP Minio Authentication to the given Request object."""
def __init__(self, project_token):
# setup any auth-related data here
self.project_token = project_token
def __call__(self, r):
r.headers['MinioToken'] = self.project_token
return r
| 30
| 73
| 0.691667
| 45
| 360
| 5.266667
| 0.688889
| 0.202532
| 0.202532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.225
| 360
| 11
| 74
| 32.727273
| 0.849462
| 0.269444
| 0
| 0
| 0
| 0
| 0.038911
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
b0d5092a20ac792be462b6c71154204739960a39
| 8,462
|
py
|
Python
|
app.py
|
5qc/Textwiki
|
9a08a49bb9c914553a3bbad14f71250306715fef
|
[
"MIT"
] | null | null | null |
app.py
|
5qc/Textwiki
|
9a08a49bb9c914553a3bbad14f71250306715fef
|
[
"MIT"
] | null | null | null |
app.py
|
5qc/Textwiki
|
9a08a49bb9c914553a3bbad14f71250306715fef
|
[
"MIT"
] | null | null | null |
import glob, os, re
from flask import Flask, render_template, redirect
app = Flask(__name__, template_folder="templates")
pagesDir = "pages"
@app.errorhandler(400)
def error400():
return render_template("404.html"), 404
@app.route("/")
def index():
return redirect("/index", 302)
@app.route("/<string:page>")
def getPage(page):
for file in os.listdir(pagesDir):
if page in file:
with open(f"{pagesDir}/{page}.txt") as f:
content = f.read()
content = content.replace("\n", "<br />")
# Escpaing Characters
escAsterik = re.compile(r"\\\*")
escUnderline = re.compile(r"\\\_")
escHyphen = re.compile(r"\\\-")
escHashtag = re.compile(r"\\\#")
escOpenBracket = re.compile(r"\\\[")
escCloseBracket = re.compile(r"\\\]")
escOpenBrace = re.compile(r"\\\{")
escCloseBrace = re.compile(r"\\\}")
escBacktick = re.compile(r"\\\`")
content = re.sub(escAsterik, "*", content)
content = re.sub(escUnderline, "_", content)
content = re.sub(escHyphen, "-", content)
content = re.sub(escHashtag, "#", content)
content = re.sub(escOpenBracket, "[", content)
content = re.sub(escCloseBracket, "]", content)
content = re.sub(escOpenBrace, "{", content)
content = re.sub(escCloseBrace, "}", content)
content = re.sub(escBacktick, "`", content)
# Main Syntax
italic = re.compile(r"\*(.*?)\*")
bold = re.compile(r"\*\*(.*?)\*\*")
underline = re.compile(r"\_(.*?)\_")
strike = re.compile(r"\-(.*?)\-")
sub = re.compile(r"\^(.*?)\^")
sup = re.compile(r"\^\^(.*?)\^\^")
heading1 = re.compile(r"#(.*?)#")
heading2 = re.compile(r"##(.*?)##")
heading3 = re.compile(r"###(.*?)###")
heading4 = re.compile(r"####(.*?)####")
heading5 = re.compile(r"#####(.*?)#####")
heading6 = re.compile(r"######(.*?)######")
code = re.compile(r"`(.*?)`")
content = re.sub(code, r"<code>\1</code>", content)
content = re.sub(bold, r"<b>\1</b>", content)
content = re.sub(italic, r"<i>\1</i>", content)
content = re.sub(underline, r"<u>\1</u>", content)
content = re.sub(strike, r"<s>\1</s>", content)
content = re.sub(sup, r"<sup>\1</sup>", content)
content = re.sub(sub, r"<sub>\1</sub>", content)
content = re.sub(heading6, r"<h6>\1</h6>", content)
content = re.sub(heading5, r"<h5>\1</h5>", content)
content = re.sub(heading4, r"<h4>\1</h4>", content)
content = re.sub(heading3, r"<h3>\1</h3>", content)
content = re.sub(heading2, r"<h2>\1</h2>", content)
content = re.sub(heading1, r"<h1>\1</h1>", content)
# Links
link = re.compile(r"\[(.+?)\]\((.*?)\)")
content = re.sub(link, r'<a href="\2">\1</a>', content)
# Images
image = re.compile(r"\{(.*?)\}")
image2 = re.compile(r"\{(.*?)\}(.*?)(?= )")
image3 = re.compile(r"\{(.*?)\}(.*?),(.*?)(?= )")
content = re.sub(image3, r'<img src="/static/img/\1" class="\2" width="\3px" alt="\1" />', content)
content = re.sub(image2, r'<img src="/static/img/\1" class="\2" alt="\1" />', content)
content = re.sub(image, r'<img src="/static/img/\1" alt="\1" />', content)
# Return Content
return render_template("page.html", title=page, content=content)
@app.route("/<string:dir>:<string:page>")
def page2(dir, page):
for file in os.listdir(f"{pagesDir}/{dir}"):
if page in file:
with open(f"{pagesDir}/{dir}/{page}.txt") as f:
content = f.read()
content = content.replace("\n", "<br />")
# Escpaing Characters
escAsterik = re.compile(r"\\\*")
escUnderline = re.compile(r"\\\_")
escHyphen = re.compile(r"\\\-")
escHashtag = re.compile(r"\\\#")
escOpenBracket = re.compile(r"\\\[")
escCloseBracket = re.compile(r"\\\]")
escOpenBrace = re.compile(r"\\\{")
escCloseBrace = re.compile(r"\\\}")
escBacktick = re.compile(r"\\\`")
content = re.sub(escAsterik, "*", content)
content = re.sub(escUnderline, "_", content)
content = re.sub(escHyphen, "-", content)
content = re.sub(escHashtag, "#", content)
content = re.sub(escOpenBracket, "[", content)
content = re.sub(escCloseBracket, "]", content)
content = re.sub(escOpenBrace, "{", content)
content = re.sub(escCloseBrace, "}", content)
content = re.sub(escBacktick, "`", content)
# Main Syntax
italic = re.compile(r"(?<!)\*(.*?)\*(?!)")
bold = re.compile(r"(?<!)\*\*(.*?)\*\*(?!)")
underline = re.compile(r"(?<!)\_(.*?)\_(?!)")
strike = re.compile(r"(?<!)\-(.*?)\-(?!)")
sub = re.compile(r"(?<!)\^(.*?)\^(?!)")
sup = re.compile(r"(?<!)\^\^(.*?)\^\^(?!)")
heading1 = re.compile(r"(?<!)#(.*?)#(?!)")
heading2 = re.compile(r"(?<!)##(.*?)##(?!)")
heading3 = re.compile(r"(?<!)###(.*?)###(?!)")
heading4 = re.compile(r"(?<!)####(.*?)####(?!)")
heading5 = re.compile(r"(?<!)#####(.*?)#####(?!)")
heading6 = re.compile(r"(?<!)######(.*?)######(?!)")
content = re.sub(bold, r"<b>\1</b>", content)
content = re.sub(italic, r"<i>\1</i>", content)
content = re.sub(underline, r"<u>\1</u>", content)
content = re.sub(strike, r"<s>\1</s>", content)
content = re.sub(sup, r"<sup>\1</sup>", content)
content = re.sub(sub, r"<sub>\1</sub>", content)
content = re.sub(heading6, r"<h6>\1</h6>", content)
content = re.sub(heading5, r"<h5>\1</h5>", content)
content = re.sub(heading4, r"<h4>\1</h4>", content)
content = re.sub(heading3, r"<h3>\1</h3>", content)
content = re.sub(heading2, r"<h2>\1</h2>", content)
content = re.sub(heading1, r"<h1>\1</h1>", content)
# Links
link = re.compile(r"(?<!)\[(.+?)\]\((.*?)\)(?!)")
content = re.sub(link, r'<a href="\2">\1</a>', content)
# Images
image = re.compile(r"(?<!)\{(.*?)\}(?!)")
image2 = re.compile(r"(?<!)\{(.*?)\}(.*?)(?= )(?!)")
image3 = re.compile(r"(?<!)\{(.*?)\}(.*?),(.*?)(?= )(?!)")
content = re.sub(image3, r'<img src="/static/img/\1" class="\2" width="\3px" alt="\1" />', content)
content = re.sub(image2, r'<img src="/static/img/\1" class="\2" alt="\1" />', content)
content = re.sub(image, r'<img src="/static/img/\1" alt="\1" />', content)
code = re.compile(r"(?<!)`(.*?)`(?!)")
content = re.sub(code, r"<code>\1</code>", content)
return render_template("page.html", title=f"{dir}:{page}", content=content)
@app.route("/s:create", methods=["POST"])
def createPage():
return render_template("create-page.html")
@app.route("/s:long") # WIP
def longPages():
for file in os.listdir(pagesDir):
files = ""
files += str(file)
# fileNew = re.sub(r"\.txt$", "", file)
# fileSize = f"{fileNew} ({os.path.getsize(f'{pagesDir}/{file}')} bytes)"
return render_template("page.html", title="s:long", content=os.listdir(pagesDir))
@app.route("/s:testing")
def testing():
return render_template("page.html", title="s:testing", content=glob.glob("**/*"))
| 48.079545
| 115
| 0.442449
| 916
| 8,462
| 4.122271
| 0.127729
| 0.07018
| 0.137712
| 0.216367
| 0.778867
| 0.729078
| 0.703125
| 0.665784
| 0.64036
| 0.622881
| 0
| 0.020779
| 0.317537
| 8,462
| 175
| 116
| 48.354286
| 0.624069
| 0.025762
| 0
| 0.549296
| 0
| 0.014085
| 0.194144
| 0.039728
| 0
| 0
| 0
| 0
| 0
| 1
| 0.049296
| false
| 0
| 0.014085
| 0.028169
| 0.112676
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b0fcaf469ac0162df1e38abf84594c8ee8d7237c
| 192
|
py
|
Python
|
api/admin.py
|
david30907d/avancevl-interview
|
fda7b049661de29b66cd4f73939bed12269d3bed
|
[
"MIT"
] | null | null | null |
api/admin.py
|
david30907d/avancevl-interview
|
fda7b049661de29b66cd4f73939bed12269d3bed
|
[
"MIT"
] | 3
|
2020-04-30T15:08:29.000Z
|
2021-05-11T05:39:42.000Z
|
api/admin.py
|
david30907d/avancevl-interview
|
fda7b049661de29b66cd4f73939bed12269d3bed
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from api.models import Restaurant
class RestaurantAdmin(admin.ModelAdmin):
list_display = ('name', )
admin.site.register(Restaurant, RestaurantAdmin)
| 27.428571
| 48
| 0.78125
| 22
| 192
| 6.772727
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130208
| 192
| 7
| 48
| 27.428571
| 0.892216
| 0
| 0
| 0
| 0
| 0
| 0.020725
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
9fe1981013059de94e65e9b4fd6504056868341d
| 237
|
py
|
Python
|
backend/app/schemas/user.py
|
Asma-Alghamdi/CREstimator_website
|
7c4dc07e9ed15cbfa4981ceba8e3115b2b9dabad
|
[
"MIT"
] | null | null | null |
backend/app/schemas/user.py
|
Asma-Alghamdi/CREstimator_website
|
7c4dc07e9ed15cbfa4981ceba8e3115b2b9dabad
|
[
"MIT"
] | null | null | null |
backend/app/schemas/user.py
|
Asma-Alghamdi/CREstimator_website
|
7c4dc07e9ed15cbfa4981ceba8e3115b2b9dabad
|
[
"MIT"
] | null | null | null |
# Normal way
def userEntity(item) -> dict:
return {
"fname":item["fname"],
"lname":item["lname"],
"email":item["email"],
}
def usersEntity(entity) -> list:
return [userEntity(item) for item in entity]
| 23.7
| 48
| 0.578059
| 27
| 237
| 5.074074
| 0.555556
| 0.20438
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.248945
| 237
| 10
| 48
| 23.7
| 0.769663
| 0.042194
| 0
| 0
| 0
| 0
| 0.132743
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
b03b71125c925ab16c151b5f092d5646dbfd2ffa
| 120
|
py
|
Python
|
shop/admin.py
|
Debanjan2001/Ecommerce-WebApp
|
beebbdf036302036b6b3ff03b1a9f0dfe95a9c13
|
[
"MIT"
] | 4
|
2021-04-21T17:58:11.000Z
|
2021-04-25T06:56:42.000Z
|
shop/admin.py
|
Debanjan2001/Ecommerce-WebApp
|
beebbdf036302036b6b3ff03b1a9f0dfe95a9c13
|
[
"MIT"
] | null | null | null |
shop/admin.py
|
Debanjan2001/Ecommerce-WebApp
|
beebbdf036302036b6b3ff03b1a9f0dfe95a9c13
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from . models import Product
# Register your models here.
admin.site.register(Product)
| 24
| 32
| 0.808333
| 17
| 120
| 5.705882
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 120
| 5
| 33
| 24
| 0.92381
| 0.216667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
b0497b7cb430a53a07653a60d303c9c44f7f60b9
| 572
|
py
|
Python
|
example/books/models.py
|
spapas/django-generic-scaffold
|
dbc819455c077f56b0a2202c8a073f2e9e7ca80c
|
[
"MIT"
] | 106
|
2016-01-07T15:01:16.000Z
|
2022-03-31T15:00:17.000Z
|
example/books/models.py
|
spapas/django-generic-scaffold
|
dbc819455c077f56b0a2202c8a073f2e9e7ca80c
|
[
"MIT"
] | 4
|
2016-03-07T07:14:55.000Z
|
2020-04-24T14:29:34.000Z
|
example/books/models.py
|
spapas/django-generic-scaffold
|
dbc819455c077f56b0a2202c8a073f2e9e7ca80c
|
[
"MIT"
] | 15
|
2016-03-06T18:32:57.000Z
|
2022-03-30T16:29:35.000Z
|
from __future__ import unicode_literals
try:
from django.core.urlresolvers import reverse
except ModuleNotFoundError:
from django.urls import reverse
from django.db import models
import generic_scaffold
class Book(models.Model):
title = models.CharField(max_length=128)
author = models.CharField(max_length=128)
category = models.CharField(max_length=32)
def get_absolute_url(self):
return reverse(self.detail_url_name, args=[self.id])
def __str__(self):
return '{0} {1} {2}'.format(self.title, self.author, self.category)
| 27.238095
| 75
| 0.73951
| 77
| 572
| 5.272727
| 0.558442
| 0.073892
| 0.133005
| 0.17734
| 0.133005
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023013
| 0.164336
| 572
| 20
| 76
| 28.6
| 0.82636
| 0
| 0
| 0
| 0
| 0
| 0.019231
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133333
| false
| 0
| 0.333333
| 0.133333
| 0.866667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
b04bc9f7a805f1ba2a951fdf14dd9f74e8a7baf8
| 25
|
py
|
Python
|
docknv/tests/__init__.py
|
sharingcloud/docknv
|
6eec6a576a32cb05278b7af045f90859066c9f1d
|
[
"MIT"
] | null | null | null |
docknv/tests/__init__.py
|
sharingcloud/docknv
|
6eec6a576a32cb05278b7af045f90859066c9f1d
|
[
"MIT"
] | null | null | null |
docknv/tests/__init__.py
|
sharingcloud/docknv
|
6eec6a576a32cb05278b7af045f90859066c9f1d
|
[
"MIT"
] | null | null | null |
"""docknv tests init."""
| 12.5
| 24
| 0.6
| 3
| 25
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 25
| 1
| 25
| 25
| 0.681818
| 0.72
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b053484c5bf8a015af6cdc0ce5bb0b0df5c9b29a
| 148
|
py
|
Python
|
asterocr/text_rec/loss/__init__.py
|
ankur6ue/aster-ocr
|
c4503bb19c843d519a36f0e5b8bebd6809800e04
|
[
"MIT"
] | 605
|
2019-07-13T08:55:47.000Z
|
2022-03-27T13:54:33.000Z
|
asterocr/text_rec/loss/__init__.py
|
ankur6ue/aster-ocr
|
c4503bb19c843d519a36f0e5b8bebd6809800e04
|
[
"MIT"
] | 81
|
2019-07-23T10:17:46.000Z
|
2022-03-29T11:27:41.000Z
|
asterocr/text_rec/loss/__init__.py
|
ankur6ue/aster-ocr
|
c4503bb19c843d519a36f0e5b8bebd6809800e04
|
[
"MIT"
] | 171
|
2019-07-18T06:32:38.000Z
|
2022-03-24T06:44:05.000Z
|
from __future__ import absolute_import
from .sequenceCrossEntropyLoss import SequenceCrossEntropyLoss
__all__ = [
'SequenceCrossEntropyLoss',
]
| 18.5
| 62
| 0.837838
| 11
| 148
| 10.454545
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114865
| 148
| 8
| 63
| 18.5
| 0.877863
| 0
| 0
| 0
| 0
| 0
| 0.161074
| 0.161074
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
b0566025d71afa1ff067e5caeccf913b4a4d215a
| 744
|
py
|
Python
|
checkout_sdk/payments/responses/payment_processed.py
|
emanuele-cesari-cko/checkout-sdk-python
|
e1e0485881cfeb0e994c9852fa04ca36aec45bc3
|
[
"MIT"
] | 13
|
2018-08-29T09:09:11.000Z
|
2021-11-26T08:30:58.000Z
|
checkout_sdk/payments/responses/payment_processed.py
|
emanuele-cesari-cko/checkout-sdk-python
|
e1e0485881cfeb0e994c9852fa04ca36aec45bc3
|
[
"MIT"
] | 17
|
2018-08-30T07:39:15.000Z
|
2022-03-31T16:09:38.000Z
|
checkout_sdk/payments/responses/payment_processed.py
|
emanuele-cesari-cko/checkout-sdk-python
|
e1e0485881cfeb0e994c9852fa04ca36aec45bc3
|
[
"MIT"
] | 13
|
2018-09-11T13:00:55.000Z
|
2021-05-19T15:19:30.000Z
|
from checkout_sdk.payments.responses import Payment
class PaymentProcessed(Payment):
def __init__(self, api_response):
super().__init__(api_response, is_pending=False)
@property
def actions_link(self):
return self.get_link('actions')
@property
def can_capture(self):
return self.has_link('capture')
@property
def capture_link(self):
return self.get_link('capture')
@property
def can_void(self):
return self.has_link('void')
@property
def void_link(self):
return self.get_link('void')
@property
def can_refund(self):
return self.has_link('refund')
@property
def refund_link(self):
return self.get_link('refund')
| 21.257143
| 56
| 0.653226
| 92
| 744
| 5
| 0.304348
| 0.167391
| 0.213043
| 0.156522
| 0.354348
| 0.217391
| 0
| 0
| 0
| 0
| 0
| 0
| 0.240591
| 744
| 34
| 57
| 21.882353
| 0.814159
| 0
| 0
| 0.28
| 0
| 0
| 0.055108
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.32
| false
| 0
| 0.04
| 0.28
| 0.68
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
c6625b242e30d298ad252f1d6703e2d8f6af9901
| 3,353
|
py
|
Python
|
velkozz_web_api/apps/geography_api/migrations/0006_auto_20210925_0716.py
|
velkoz-data-ingestion/velkozz_web_api
|
519a6a90e5fdf5bab8ba2daf637768c5fd424a12
|
[
"MIT"
] | null | null | null |
velkozz_web_api/apps/geography_api/migrations/0006_auto_20210925_0716.py
|
velkoz-data-ingestion/velkozz_web_api
|
519a6a90e5fdf5bab8ba2daf637768c5fd424a12
|
[
"MIT"
] | null | null | null |
velkozz_web_api/apps/geography_api/migrations/0006_auto_20210925_0716.py
|
velkoz-data-ingestion/velkozz_web_api
|
519a6a90e5fdf5bab8ba2daf637768c5fd424a12
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.4 on 2021-09-25 07:16
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('geography_api', '0005_auto_20210925_0702'),
]
operations = [
migrations.AlterField(
model_name='country',
name='alpha2Code',
field=models.CharField(max_length=2, null=True),
),
migrations.AlterField(
model_name='country',
name='alpha3Code',
field=models.CharField(max_length=3, null=True),
),
migrations.AlterField(
model_name='country',
name='altSpellings',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=150), null=True, size=None),
),
migrations.AlterField(
model_name='country',
name='area',
field=models.FloatField(null=True),
),
migrations.AlterField(
model_name='country',
name='callingCodes',
field=models.JSONField(null=True),
),
migrations.AlterField(
model_name='country',
name='currencies',
field=models.JSONField(null=True),
),
migrations.AlterField(
model_name='country',
name='demonym',
field=models.JSONField(null=True),
),
migrations.AlterField(
model_name='country',
name='independent',
field=models.BooleanField(null=True),
),
migrations.AlterField(
model_name='country',
name='landlocked',
field=models.BooleanField(null=True),
),
migrations.AlterField(
model_name='country',
name='languages',
field=models.JSONField(null=True),
),
migrations.AlterField(
model_name='country',
name='latlng',
field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), null=True, size=None),
),
migrations.AlterField(
model_name='country',
name='numericCode',
field=models.IntegerField(null=True),
),
migrations.AlterField(
model_name='country',
name='region',
field=models.CharField(max_length=100, null=True),
),
migrations.AlterField(
model_name='country',
name='status',
field=models.CharField(max_length=50, null=True),
),
migrations.AlterField(
model_name='country',
name='subregion',
field=models.CharField(max_length=100, null=True),
),
migrations.AlterField(
model_name='country',
name='topLevelDomain',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=24), null=True, size=None),
),
migrations.AlterField(
model_name='country',
name='translations',
field=models.JSONField(null=True),
),
migrations.AlterField(
model_name='country',
name='unMember',
field=models.BooleanField(null=True),
),
]
| 31.933333
| 127
| 0.556218
| 299
| 3,353
| 6.130435
| 0.237458
| 0.196399
| 0.245499
| 0.284779
| 0.788871
| 0.707583
| 0.685761
| 0.685761
| 0.554828
| 0.523732
| 0
| 0.021239
| 0.325977
| 3,353
| 104
| 128
| 32.240385
| 0.789823
| 0.013421
| 0
| 0.653061
| 1
| 0
| 0.099516
| 0.006957
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.020408
| 0
| 0.05102
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c699e0de6acba01eb90d1e9d11eddcab32c1286e
| 295
|
py
|
Python
|
p1/meuapp/models.py
|
mentoriacompartilhada/helenamagaldi-v0
|
d26e5b4ae887382b02c00092c2487437bc9ffb78
|
[
"MIT"
] | 1
|
2021-08-19T02:38:47.000Z
|
2021-08-19T02:38:47.000Z
|
p1/meuapp/models.py
|
mentoriacompartilhada/helenamagaldi-v0
|
d26e5b4ae887382b02c00092c2487437bc9ffb78
|
[
"MIT"
] | null | null | null |
p1/meuapp/models.py
|
mentoriacompartilhada/helenamagaldi-v0
|
d26e5b4ae887382b02c00092c2487437bc9ffb78
|
[
"MIT"
] | null | null | null |
from django.db import models
class Client(models.Model):
name = models.CharField(max_length= 240)
family_name = models.CharField(max_length= 300)
age = models.IntegerField(max_length=3)
nickname = models.CharField(max_length=10)
password = models.CharField(max_length=7)
| 24.583333
| 51
| 0.738983
| 40
| 295
| 5.3
| 0.55
| 0.212264
| 0.339623
| 0.45283
| 0.264151
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040323
| 0.159322
| 295
| 11
| 52
| 26.818182
| 0.814516
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.142857
| 0.142857
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
c6b40d24bf82a2bf86acb56f47332d9fa1aeaa0e
| 1,581
|
py
|
Python
|
server/routing.py
|
lluo5779/Robo-Adviser
|
43aa4b73bfc96e55ed664328330a930975596124
|
[
"MIT"
] | null | null | null |
server/routing.py
|
lluo5779/Robo-Adviser
|
43aa4b73bfc96e55ed664328330a930975596124
|
[
"MIT"
] | 3
|
2021-03-31T19:24:03.000Z
|
2021-12-13T20:26:39.000Z
|
server/routing.py
|
lluo5779/Robo-Adviser
|
43aa4b73bfc96e55ed664328330a930975596124
|
[
"MIT"
] | null | null | null |
from flask import render_template, flash, redirect, url_for, request
from flask_login import login_user, logout_user, current_user, login_required
# from app.forms import LoginForm, RegistrationForm
# Render home page
# @app.route('/')
def home():
return render_template('home.jinja2')
def login():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.username.data).first()
if user is None or not user.check_password(form.password.data):
flash('Invalid username or password')
return redirect(url_for('login'))
login_user(user, remember=form.remember_me.data)
next_page = request.args.get('next')
if not next_page or url_parse(next_page).netloc != '':
next_page = url_for('index')
return redirect(next_page)
return render_template('login.jinja2', title='Sign In', form=form)
def logout():
logout_user()
return redirect(url_for('/'))
def sender():
return render_template('sender.jinja2', title='Sign In')
def receiver(fname, lname):
listA = ['a', 'b', 'c', 'd', 'ewrqrewafr']
return render_template('receiver.jinja2', title='Sign In', fname=fname, lname=lname, listA=listA)
def contact():
return render_template('Contact.jinja2', title='Contact Us')
def sendcontact():
return render_template('ContactSent.jinja2')
def aboutus():
return render_template('About.jinja2')
| 31
| 102
| 0.665402
| 200
| 1,581
| 5.095
| 0.355
| 0.109912
| 0.13739
| 0.058881
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005596
| 0.208729
| 1,581
| 50
| 103
| 31.62
| 0.808953
| 0.051866
| 0
| 0
| 0
| 0
| 0.130104
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.242424
| false
| 0.060606
| 0.060606
| 0.151515
| 0.636364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
c6cf5de80cf1f90e66d348c052f85b5224e1d27a
| 540
|
py
|
Python
|
sovi/api/awards/views.py
|
husseinm/Saly
|
e656e4e30e696cc7b425f272424dc3c1db2ae92a
|
[
"MIT"
] | null | null | null |
sovi/api/awards/views.py
|
husseinm/Saly
|
e656e4e30e696cc7b425f272424dc3c1db2ae92a
|
[
"MIT"
] | null | null | null |
sovi/api/awards/views.py
|
husseinm/Saly
|
e656e4e30e696cc7b425f272424dc3c1db2ae92a
|
[
"MIT"
] | null | null | null |
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from sovi.utils import jsonSerializer
from sovi.api.awards.models import Award, AwardType
@login_required
def getAllAwards(request):
return HttpResponse(jsonSerializer.serialize(Award.objects.all()),
content_type="application/json")
@login_required
def getAllAwardTypes(request):
return HttpResponse(jsonSerializer.serialize(AwardType.objects.all()),
content_type="application/json")
| 30
| 74
| 0.75
| 58
| 540
| 6.896552
| 0.517241
| 0.0975
| 0.08
| 0.195
| 0.42
| 0.18
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 540
| 17
| 75
| 31.764706
| 0.888889
| 0
| 0
| 0.333333
| 0
| 0
| 0.059259
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0.166667
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
c6e9511bcfded6e45d799c06ddfe39dbde5408b7
| 800
|
py
|
Python
|
tests/test_34.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_34.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_34.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import pytest
"""
Test 34. Find First and Last Position of Element in Sorted Array
"""
@pytest.fixture(scope="session")
def init_variables_34():
from src.leetcode_34_find_first_and_last_position_of_element_in_sorted_array import (
Solution,
)
solution = Solution()
def _init_variables_34():
return solution
yield _init_variables_34
class TestClass34:
def test_solution_0(self, init_variables_34):
assert init_variables_34().searchRange([5, 7, 7, 8, 8, 10], 8) == [3, 4]
def test_solution_1(self, init_variables_34):
assert init_variables_34().searchRange([5, 7, 7, 8, 8, 10], 6) == [-1, -1]
def test_solution_2(self, init_variables_34):
assert init_variables_34().searchRange([], 0) == [-1, -1]
| 24.242424
| 89
| 0.68
| 116
| 800
| 4.37069
| 0.387931
| 0.230769
| 0.266272
| 0.112426
| 0.518738
| 0.518738
| 0.518738
| 0.518738
| 0.518738
| 0.418146
| 0
| 0.077882
| 0.1975
| 800
| 32
| 90
| 25
| 0.711838
| 0.025
| 0
| 0
| 0
| 0
| 0.009901
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 1
| 0.294118
| false
| 0
| 0.117647
| 0.058824
| 0.529412
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
05a1581e3ca9120962935c2b7091ad8ad3671640
| 3,582
|
py
|
Python
|
tests/testapp/home/migrations/0001_initial.py
|
springload/wagtailblocks
|
2226597db2fad52e661eed14e5fc8de0a1a8b347
|
[
"MIT"
] | 25
|
2015-11-27T09:22:50.000Z
|
2018-01-28T23:42:51.000Z
|
tests/testapp/home/migrations/0001_initial.py
|
springload/wagtailcommonblocks
|
2226597db2fad52e661eed14e5fc8de0a1a8b347
|
[
"MIT"
] | 7
|
2018-03-21T04:46:51.000Z
|
2021-04-14T05:15:33.000Z
|
tests/testapp/home/migrations/0001_initial.py
|
springload/wagtailblocks
|
2226597db2fad52e661eed14e5fc8de0a1a8b347
|
[
"MIT"
] | 6
|
2016-11-12T20:11:34.000Z
|
2018-03-12T15:49:07.000Z
|
# Generated by Django 2.0.3 on 2018-03-21 04:12
import commonblocks.blocks
import commonblocks.fields
from django.db import migrations, models
import django.db.models.deletion
try:
import wagtail.core.blocks as core_blocks
import wagtail.core.fields as core_fields
import wagtail.embeds.blocks as embeds_blocks
import wagtail.images.blocks as images_blocks
except ImportError:
import wagtail.wagtailcore.blocks as core_blocks
import wagtail.wagtailcore.fields as core_fields
import wagtail.wagtailembeds.blocks as embeds_blocks
import wagtail.wagtailimages.blocks as images_blocks
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtailcore', '0032_add_bulk_delete_page_permission'),
]
operations = [
migrations.CreateModel(
name='TestPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('text_field', commonblocks.fields.SimpleRichTextField(blank=True)),
('body_blocks', core_fields.StreamField((('text', commonblocks.blocks.SimpleRichTextBlock()), ('quote', core_blocks.StructBlock((('quote', commonblocks.blocks.SimpleRichTextBlock(required=True)), ('author', core_blocks.CharBlock(required=False)), ('author_title', core_blocks.CharBlock(required=False)), ('image', images_blocks.ImageChooserBlock(required=False))))), ('image', core_blocks.StructBlock((('image', images_blocks.ImageChooserBlock(required=True)), ('alternative_title', core_blocks.CharBlock(required=False)), ('caption', commonblocks.blocks.SimpleRichTextBlock(required=False)), ('attribution', core_blocks.CharBlock(required=False)), ('license_url', core_blocks.URLBlock(required=False)), ('license_name', core_blocks.CharBlock(required=False))))), ('heading', core_blocks.StructBlock((('size', core_blocks.ChoiceBlock(choices=[('', 'Choose your heading'), ('h2', 'h2'), ('h3', 'h3'), ('h4', 'h4'), ('h5', 'h5')], help_text='Heading Size')), ('title', core_blocks.CharBlock(required=True)), ('subtitle', core_blocks.CharBlock(required=False))))), ('video', core_blocks.StructBlock((('video', embeds_blocks.EmbedBlock(help_text='Paste your video URL ie: https://www.youtube.com/watch?v=05GKqTZGRXU', required=True)), ('caption', commonblocks.blocks.SimpleRichTextBlock(required=False))))), ('internal', core_blocks.StructBlock((('link', core_blocks.PageChooserBlock(required=True)), ('title', core_blocks.CharBlock(required=False))))), ('external', core_blocks.StructBlock((('link', core_blocks.URLBlock(required=True)), ('title', core_blocks.CharBlock(required=True)), ('target', core_blocks.ChoiceBlock(choices=[('', 'Open link in'), ('_self', 'Same window'), ('_blank', 'New window')], help_text='Open link in'))))), ('links', core_blocks.StreamBlock((('internal_link', core_blocks.StructBlock((('link', core_blocks.PageChooserBlock(required=True)), ('title', core_blocks.CharBlock(required=False))), label='Internal page')), ('external_link', core_blocks.StructBlock((('link', core_blocks.URLBlock(required=True)), ('title', core_blocks.CharBlock(required=True)), ('target', core_blocks.ChoiceBlock(choices=[('', 'Open link in'), ('_self', 'Same window'), ('_blank', 'New window')], help_text='Open link in'))), label='External Page')))))), blank=True)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
]
| 85.285714
| 2,288
| 0.714685
| 409
| 3,582
| 6.0978
| 0.290954
| 0.124298
| 0.083801
| 0.119086
| 0.479551
| 0.394547
| 0.228549
| 0.225341
| 0.225341
| 0.225341
| 0
| 0.009163
| 0.116415
| 3,582
| 41
| 2,289
| 87.365854
| 0.778831
| 0.012563
| 0
| 0
| 1
| 0
| 0.174823
| 0.010184
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.393939
| 0
| 0.515152
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
05ab785bf7383bc474487869606451d920c812c8
| 95
|
py
|
Python
|
{{ cookiecutter.project_name }}/{{ cookiecutter.project_name }}/apps/core/api/__init__.py
|
marksweb/cookiecutter-django
|
f8ea97593f768f3d1cc260838d2913bdb13026c4
|
[
"MIT"
] | null | null | null |
{{ cookiecutter.project_name }}/{{ cookiecutter.project_name }}/apps/core/api/__init__.py
|
marksweb/cookiecutter-django
|
f8ea97593f768f3d1cc260838d2913bdb13026c4
|
[
"MIT"
] | null | null | null |
{{ cookiecutter.project_name }}/{{ cookiecutter.project_name }}/apps/core/api/__init__.py
|
marksweb/cookiecutter-django
|
f8ea97593f768f3d1cc260838d2913bdb13026c4
|
[
"MIT"
] | null | null | null |
from lib.core.helpers import ModuleImport
imports = {"v1\.0": ModuleImport("apis.v1_0.urls")}
| 23.75
| 51
| 0.736842
| 14
| 95
| 4.928571
| 0.785714
| 0.086957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046512
| 0.094737
| 95
| 3
| 52
| 31.666667
| 0.755814
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
05ba3794bc0e23b5344f404c4660b97899046749
| 111
|
py
|
Python
|
print_ops.py
|
cgsdfc/operator
|
1e23aab962bb3613960702b66eb39d0fe5f10e06
|
[
"MIT"
] | 2
|
2019-06-27T02:28:07.000Z
|
2019-06-30T02:11:19.000Z
|
print_ops.py
|
cgsdfc/operator
|
1e23aab962bb3613960702b66eb39d0fe5f10e06
|
[
"MIT"
] | null | null | null |
print_ops.py
|
cgsdfc/operator
|
1e23aab962bb3613960702b66eb39d0fe5f10e06
|
[
"MIT"
] | null | null | null |
import operator
from pprint import pprint
names = dir(operator)
if __name__ == '__main__':
pprint(names)
| 13.875
| 26
| 0.72973
| 14
| 111
| 5.214286
| 0.642857
| 0.30137
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.18018
| 111
| 7
| 27
| 15.857143
| 0.802198
| 0
| 0
| 0
| 0
| 0
| 0.072072
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0.4
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
05c6604a382a86bfe1e1129a037d444a7d262493
| 22
|
py
|
Python
|
dogse/python/antchain_sdk_dog_test/__init__.py
|
sdk-team/antchain-openapi-prod-sdk
|
4af1da6f11a2771e373f4a8904904427f06f1887
|
[
"MIT"
] | null | null | null |
dogse/python/antchain_sdk_dog_test/__init__.py
|
sdk-team/antchain-openapi-prod-sdk
|
4af1da6f11a2771e373f4a8904904427f06f1887
|
[
"MIT"
] | null | null | null |
dogse/python/antchain_sdk_dog_test/__init__.py
|
sdk-team/antchain-openapi-prod-sdk
|
4af1da6f11a2771e373f4a8904904427f06f1887
|
[
"MIT"
] | null | null | null |
__version__ = '1.1.23'
| 22
| 22
| 0.681818
| 4
| 22
| 2.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0.090909
| 22
| 1
| 22
| 22
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
05e7ce4a9357e481edc7e66fedb1259b6a6a5d93
| 260
|
py
|
Python
|
models/__init__.py
|
Abhishek-Aditya-bs/Streaming-Spark-For-Machine-Learning
|
76f9c97e66d6171bc83d1183fadc30bd492422a7
|
[
"MIT"
] | 1
|
2021-12-10T13:14:53.000Z
|
2021-12-10T13:14:53.000Z
|
models/__init__.py
|
iVishalr/SSML-spark-streaming-for-machine-learning
|
ba95a7d2d6bb15bacfbbf5b3c95317310b36d54f
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
iVishalr/SSML-spark-streaming-for-machine-learning
|
ba95a7d2d6bb15bacfbbf5b3c95317310b36d54f
|
[
"MIT"
] | null | null | null |
from .MLP import MLP
from .svm import SVM
from .ann import ANN
from .kmeansClustering import Kmeans
from .deepImageMLP import DeepImageMLP
from .deepImageSVM import DeepImageSVM
from .deepImageFeaturizer import DeepImage
from .deepImageKmeans import DeepKmeans
| 32.5
| 42
| 0.85
| 32
| 260
| 6.90625
| 0.40625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119231
| 260
| 8
| 43
| 32.5
| 0.965066
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
af37acfda4b315866bd40675fa35cc85e59fd0fc
| 486
|
py
|
Python
|
dygraph/ppdet/modeling/__init__.py
|
syyxsxx/PaddleDetection
|
da0e1eed6472d492abc9db1db324569f9be62a5f
|
[
"Apache-2.0"
] | 2
|
2021-09-07T08:59:53.000Z
|
2022-03-17T03:00:40.000Z
|
dygraph/ppdet/modeling/__init__.py
|
syyxsxx/PaddleDetection
|
da0e1eed6472d492abc9db1db324569f9be62a5f
|
[
"Apache-2.0"
] | null | null | null |
dygraph/ppdet/modeling/__init__.py
|
syyxsxx/PaddleDetection
|
da0e1eed6472d492abc9db1db324569f9be62a5f
|
[
"Apache-2.0"
] | 1
|
2021-06-10T09:48:42.000Z
|
2021-06-10T09:48:42.000Z
|
from . import ops
from . import bbox
from . import mask
from . import backbones
from . import necks
from . import heads
from . import losses
from . import architectures
from . import post_process
from . import layers
from . import utils
from .ops import *
from .bbox import *
from .mask import *
from .backbones import *
from .necks import *
from .heads import *
from .losses import *
from .architectures import *
from .post_process import *
from .layers import *
from .utils import *
| 20.25
| 28
| 0.748971
| 68
| 486
| 5.323529
| 0.205882
| 0.303867
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183128
| 486
| 23
| 29
| 21.130435
| 0.911839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
af45ff3cb9fd9d347e147e662a53b764aa58852c
| 608
|
py
|
Python
|
jesse/strategies/Test38/__init__.py
|
noenfugler/jesse
|
217a3168620a755c1a9576d9deb27105db7dccf8
|
[
"MIT"
] | 3,999
|
2018-11-09T10:38:51.000Z
|
2022-03-31T12:29:12.000Z
|
jesse/strategies/Test38/__init__.py
|
noenfugler/jesse
|
217a3168620a755c1a9576d9deb27105db7dccf8
|
[
"MIT"
] | 172
|
2020-04-16T16:19:08.000Z
|
2022-03-28T13:28:55.000Z
|
jesse/strategies/Test38/__init__.py
|
noenfugler/jesse
|
217a3168620a755c1a9576d9deb27105db7dccf8
|
[
"MIT"
] | 495
|
2019-03-01T21:48:53.000Z
|
2022-03-30T15:35:19.000Z
|
from jesse.strategies import Strategy
# test_average_take_profit_exception
class Test38(Strategy):
def should_long(self) -> bool:
return self.index == 0
def should_short(self) -> bool:
return False
def go_long(self):
qty = 1
self.buy = qty, 2
self.stop_loss = qty, 1
def go_short(self):
pass
def should_cancel(self):
return False
def filters(self):
return [self.filter_1]
def filter_1(self):
# trying to access average_take_profit without setting it first
return self.average_take_profit > 1
| 20.965517
| 71
| 0.633224
| 82
| 608
| 4.5
| 0.487805
| 0.089431
| 0.138211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020882
| 0.291118
| 608
| 28
| 72
| 21.714286
| 0.835267
| 0.157895
| 0
| 0.111111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.388889
| false
| 0.055556
| 0.055556
| 0.277778
| 0.777778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
af473677f6ca8e73947668d1a6fb837a7ffa12ed
| 31
|
py
|
Python
|
login.py
|
ChenJnHui/git_demo
|
f818b1dd2e36c1210857f2ca82c03a772deb11c8
|
[
"MIT"
] | null | null | null |
login.py
|
ChenJnHui/git_demo
|
f818b1dd2e36c1210857f2ca82c03a772deb11c8
|
[
"MIT"
] | null | null | null |
login.py
|
ChenJnHui/git_demo
|
f818b1dd2e36c1210857f2ca82c03a772deb11c8
|
[
"MIT"
] | null | null | null |
asdasdasd
asdasdasdsa
asdasd
| 5.166667
| 11
| 0.83871
| 3
| 31
| 8.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 31
| 5
| 12
| 6.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
af56de4497ade50be667a01643d4f79a84646e97
| 93
|
py
|
Python
|
interactive/mechanism/apps.py
|
mattldawson/music-box-interactive
|
6b2610b4f0f255f0e78e23628dc7ba6cc844d0f4
|
[
"Apache-2.0"
] | 4
|
2020-07-20T17:18:51.000Z
|
2022-03-17T19:08:24.000Z
|
interactive/mechanism/apps.py
|
mattldawson/music-box-interactive
|
6b2610b4f0f255f0e78e23628dc7ba6cc844d0f4
|
[
"Apache-2.0"
] | 174
|
2020-07-30T16:47:53.000Z
|
2022-03-29T17:54:54.000Z
|
interactive/mechanism/apps.py
|
mattldawson/music-box-interactive
|
6b2610b4f0f255f0e78e23628dc7ba6cc844d0f4
|
[
"Apache-2.0"
] | 1
|
2021-01-05T22:42:55.000Z
|
2021-01-05T22:42:55.000Z
|
from django.apps import AppConfig
class MechanismConfig(AppConfig):
name = 'mechanism'
| 15.5
| 33
| 0.763441
| 10
| 93
| 7.1
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 93
| 5
| 34
| 18.6
| 0.910256
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
af7f1ded1897be10162ddcf1d243241fd7786382
| 1,698
|
py
|
Python
|
tools/imports/shockabsorber/model/cast.py
|
Brian151/OpenShockwave
|
152c9c8899a2d949b3b769ee8204a3cef8df1b13
|
[
"Apache-2.0"
] | 49
|
2017-02-21T05:35:49.000Z
|
2022-03-24T19:49:13.000Z
|
tools/imports/shockabsorber/model/cast.py
|
Brian151/OpenShockwave
|
152c9c8899a2d949b3b769ee8204a3cef8df1b13
|
[
"Apache-2.0"
] | 17
|
2017-02-23T08:59:51.000Z
|
2019-05-10T03:02:42.000Z
|
tools/imports/shockabsorber/model/cast.py
|
Brian151/OpenShockwave
|
152c9c8899a2d949b3b769ee8204a3cef8df1b13
|
[
"Apache-2.0"
] | 11
|
2017-05-17T20:21:50.000Z
|
2021-12-14T19:01:41.000Z
|
class CastLibraryTable: #------------------------------
def __init__(self, castlibs):
self.by_nr = {}
self.by_assoc_id = {}
for cl in castlibs:
self.by_nr[cl.nr] = cl
if cl.assoc_id>0:
self.by_assoc_id[cl.assoc_id] = cl
def iter_by_nr(self):
return self.by_nr.itervalues()
def get_cast_library(self, lib_nr):
return self.by_nr[lib_nr]
def get_cast_member(self, lib_nr, member_nr):
cast_lib = self.by_nr[lib_nr]
return cast_lib.get_cast_member(member_nr) if cast_lib != None else None
#--------------------------------------------------
class CastLibrary: #------------------------------
def __init__(self, nr, name, path, assoc_id, idx_range, self_idx):
self.nr = nr
self.name = name
self.path = path
self.assoc_id = assoc_id
self.idx_range = idx_range
self.self_idx = self_idx
self.castmember_table = None
def __repr__(self):
return "<CastLibrary #%d name=\"%s\" size=%d>" % (self.nr, self.name,
len(self.castmember_table) if self.castmember_table != None else -1)
def get_path(self): return self.path
def castmember_table_is_set(self): return self.castmember_table != None
def get_castmember_table(self): return self.castmember_table
def set_castmember_table(self,table):
self.castmember_table = table
def get_cast_member(self, member_nr):
if self.castmember_table == None: return None # TODO: Ensure loaded
return self.castmember_table[member_nr-1]
#--------------------------------------------------
| 36.913043
| 126
| 0.567138
| 217
| 1,698
| 4.119816
| 0.198157
| 0.184564
| 0.170022
| 0.102908
| 0.231544
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00236
| 0.251472
| 1,698
| 45
| 127
| 37.733333
| 0.701023
| 0.106007
| 0
| 0
| 0
| 0
| 0.02115
| 0
| 0
| 0
| 0
| 0.022222
| 0
| 1
| 0.314286
| false
| 0
| 0
| 0.171429
| 0.514286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
af7f4d8af83778a73414805fadc0ae843d4a6c07
| 364
|
py
|
Python
|
fastdl/__init__.py
|
r-salas/fastdl
|
8bb63c8b9cf87b0ae7987ffd4b3ae25816007b43
|
[
"MIT"
] | 3
|
2021-08-25T09:47:41.000Z
|
2021-09-27T03:05:00.000Z
|
fastdl/__init__.py
|
r-salas/fastdl
|
8bb63c8b9cf87b0ae7987ffd4b3ae25816007b43
|
[
"MIT"
] | null | null | null |
fastdl/__init__.py
|
r-salas/fastdl
|
8bb63c8b9cf87b0ae7987ffd4b3ae25816007b43
|
[
"MIT"
] | 1
|
2021-09-27T03:05:10.000Z
|
2021-09-27T03:05:10.000Z
|
from .__version__ import __version__, __author__, __url__, __license__, __description__
from . import utils # noqa: F401
from .config import conf # noqa: F401
from .extractor import extract_file # noqa: F401
from .downloader import download # noqa: F401
from .hasher import hash_file, validate_file # noqa: F401
from .parallel import Parallel # noqa: F401
| 36.4
| 87
| 0.771978
| 47
| 364
| 5.404255
| 0.446809
| 0.188976
| 0.23622
| 0.125984
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059016
| 0.162088
| 364
| 9
| 88
| 40.444444
| 0.77377
| 0.178571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
af8edfa35d04a73bc66ed4878a8ac419b2a337fc
| 134
|
py
|
Python
|
student_management/students/api/throttling.py
|
nidhisha123/student-management
|
ef2df94d8daf29ed7b0b0e023d73ac2d8ec835c5
|
[
"MIT"
] | null | null | null |
student_management/students/api/throttling.py
|
nidhisha123/student-management
|
ef2df94d8daf29ed7b0b0e023d73ac2d8ec835c5
|
[
"MIT"
] | 22
|
2021-12-15T23:24:09.000Z
|
2022-03-31T23:26:33.000Z
|
student_management/students/api/throttling.py
|
nidhisha123/student-management
|
ef2df94d8daf29ed7b0b0e023d73ac2d8ec835c5
|
[
"MIT"
] | null | null | null |
from rest_framework.throttling import UserRateThrottle
class StudentDetailThrottling(UserRateThrottle):
scope = "student-detail"
| 26.8
| 54
| 0.835821
| 12
| 134
| 9.25
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104478
| 134
| 5
| 55
| 26.8
| 0.925
| 0
| 0
| 0
| 0
| 0
| 0.103704
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
af918273cb0941b3f862e7b6f34758efc4d5c7ce
| 299
|
py
|
Python
|
main/break-numbers/break-numbers.py
|
EliahKagan/old-practice-snapshot
|
1b53897eac6902f8d867c8f154ce2a489abb8133
|
[
"0BSD"
] | null | null | null |
main/break-numbers/break-numbers.py
|
EliahKagan/old-practice-snapshot
|
1b53897eac6902f8d867c8f154ce2a489abb8133
|
[
"0BSD"
] | null | null | null |
main/break-numbers/break-numbers.py
|
EliahKagan/old-practice-snapshot
|
1b53897eac6902f8d867c8f154ce2a489abb8133
|
[
"0BSD"
] | null | null | null |
#!/usr/bin/env python3
def read_val():
return int(input())
def count_three_addend_decompositions(n):
# https://www.wolframalpha.com/input/?i=sum_k%3D0%5En+(n+-+k+%2B+1)
return (n + 1) * (n + 2) // 2
for _ in range(read_val()):
print(count_three_addend_decompositions(read_val()))
| 24.916667
| 71
| 0.665552
| 48
| 299
| 3.916667
| 0.645833
| 0.111702
| 0.170213
| 0.319149
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035294
| 0.147157
| 299
| 11
| 72
| 27.181818
| 0.701961
| 0.29097
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0.166667
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
afa35a23ef98ce040ab2e39d40966f4dadc041f3
| 108
|
py
|
Python
|
rideshare/rideshare_profile/apps.py
|
RideQuest/ridequest
|
a2f51d99109f9767c5d2abe7f6fb3f160e5212da
|
[
"MIT"
] | null | null | null |
rideshare/rideshare_profile/apps.py
|
RideQuest/ridequest
|
a2f51d99109f9767c5d2abe7f6fb3f160e5212da
|
[
"MIT"
] | 12
|
2016-04-30T02:36:13.000Z
|
2016-04-30T02:39:45.000Z
|
rideshare/rideshare_profile/apps.py
|
RideQuest/ridequest
|
a2f51d99109f9767c5d2abe7f6fb3f160e5212da
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class RideshareProfileConfig(AppConfig):
name = 'rideshare_profile'
| 18
| 40
| 0.796296
| 11
| 108
| 7.727273
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138889
| 108
| 5
| 41
| 21.6
| 0.913978
| 0
| 0
| 0
| 0
| 0
| 0.157407
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
bb6490997a8de45b640803f4c4967ad9580a3e15
| 174,397
|
py
|
Python
|
unittests/test_process.py
|
Software-Factory-Labs/OpenPIVT
|
9ee144076b870ef2bc2b23a41ae90d63b2f9081f
|
[
"Apache-2.0"
] | 1
|
2020-02-07T22:32:13.000Z
|
2020-02-07T22:32:13.000Z
|
unittests/test_process.py
|
the-aerospace-corporation/pivt
|
5f2ea999ee47b726ccf5bad4be984606901afb36
|
[
"Apache-2.0"
] | 1
|
2020-04-30T14:41:54.000Z
|
2020-04-30T14:41:54.000Z
|
unittests/test_process.py
|
the-aerospace-corporation/pivt
|
5f2ea999ee47b726ccf5bad4be984606901afb36
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2019 The Aerospace Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pivt import process
import unittest
import os
from unittest.mock import patch
from unittest.mock import call
from unittest.mock import MagicMock
import json
import csv
import zipfile
import copy
import tempfile
from copy import deepcopy
from pathlib import Path
from pivt.util import util
from pivt.util import Constants
from pivt.conf_manager import ConfManager
orig_conf_load = ConfManager.load
orig_ci_to_ss = None
orig_ci_subs = None
def setUpModule():
os.environ['PIVT_HOME'] = tempfile.mkdtemp().replace('\\', '/')
ConfManager.load = MagicMock()
util.setup()
global orig_ci_to_ss
orig_ci_to_ss = deepcopy(util.ci_to_ss)
util.ci_to_ss = {
'ci2': 'ss5',
'ci3': 'ss6'
}
global orig_ci_subs
orig_ci_subs = deepcopy(process.CI_SUBS)
process.CI_SUBS = {
'ci3-sub': 'ci3'
}
def tearDownModule():
util.teardown()
util.rmtree(os.environ['PIVT_HOME'], no_exist_ok=True)
if 'PIVT_HOME' in os.environ:
del(os.environ['PIVT_HOME'])
ConfManager.load = orig_conf_load
util.ci_to_ss = orig_ci_to_ss
process.CI_SUBS = orig_ci_subs
def find_file(filename):
"""Finds the file_name, or returns blank if not found.
Allows the unit tests to be run from an arbitrary
directory, rather than assuming it is run from the utility directory directly
"""
filename = os.path.normpath(filename)
for root, dirs, files in os.walk('.'):
for name in files:
path = os.path.join(root, name).replace('.\\', '')
if filename in path:
return path
return ''
if __name__ == '__main__':
unittest.main()
"""
Processor
"""
class TestMain(unittest.TestCase):
# TODO
pass
class TestParseArgs(unittest.TestCase):
def setUp(self):
self.processor = process.Processor([])
def test_no_args(self):
args = self.processor.parse_args([])
self.assertFalse(args.reverse)
def test_reverse(self):
args = self.processor.parse_args(['--reverse'])
self.assertTrue(args.reverse)
class TestProcessorDeleteIndex(unittest.TestCase):
@patch('time.sleep')
@patch('requests.get')
@patch('requests.delete')
def test(self, mock_requests_delete, mock_requests_get, mock_sleep):
mock_requests_get.status_code = 200
process.Processor.delete_index('derp_index', 'derp_app')
class TestProcessorCreateIndex(unittest.TestCase):
pass
class TestProcessorRefreshApp(unittest.TestCase):
# def test(self):
# self.processor.refresh_app('pivt', 'https://localhost:8089')
pass
class TestProcessorGetIndexPath(unittest.TestCase):
def test(self):
assert process.Processor._get_index_path('pivt_cq', 'pivt') == '/servicesNS/nobody/pivt/data/indexes/pivt_cq'
class TestProcessorGetIndexesPath(unittest.TestCase):
def test(self):
assert process.Processor._get_indexes_path('pivt') == '/servicesNS/nobody/pivt/data/indexes'
class TestProcessorGetAppPath(unittest.TestCase):
def test(self):
assert process.Processor._get_app_path('pivt') == '/servicesNS/nobody/pivt'
"""
Source
"""
class TestSourceSetup(unittest.TestCase):
def setUp(self):
self.name = 'name'
self.data_dir = util.data_dir / 'test_dir'
self.source = process.Source(self.name, self.data_dir)
def tearDown(self):
util.rmtree(util.data_dir, no_exist_ok=True)
def test_dir_no_exist(self):
self.source.setup()
self.assertTrue(self.data_dir.exists())
def test_dir_exists(self):
self.data_dir.mkdir(parents=True)
self.source.setup()
self.assertTrue(self.data_dir.exists())
"""
JenkinsSource
"""
class TestJenkinsSourceSetup(unittest.TestCase):
def setUp(self):
self.name = 'name'
self.data_dir = util.data_dir / 'test_dir'
self.source = process.JenkinsSource(self.name, self.data_dir)
self.files = []
self.dirs = []
self.expected_files = []
def tearDown(self):
util.rmtree(util.data_dir, no_exist_ok=True)
def do_it(self):
self.data_dir.mkdir(parents=True)
for filename in self.files:
file_path = self.data_dir / filename
file_path.open('w').close()
for dir_name in self.dirs:
dir_path = self.data_dir / dir_name
dir_path.mkdir()
with patch.object(process.JenkinsSource, '_load_event_keys') as mock_load_keys:
self.source.setup()
self.make_asserts(mock_load_keys)
def make_asserts(self, mock_load_keys):
self.assertTrue(self.data_dir.exists())
files_called_with = mock_load_keys.call_args[0][0]
self.assertEqual(set(self.expected_files), set(files_called_with))
def test_no_files(self):
self.do_it()
def test_some_files(self):
self.files = ['f1.txt', 'f2.txt']
self.dirs = ['some_dir']
self.expected_files = [self.data_dir / filename for filename in self.files]
self.do_it()
class TestJenkinsSourceLoadEventKeys(unittest.TestCase):
def setUp(self):
self.name = 'name'
self.data_dir = util.data_dir / 'test_dir'
self.source = process.JenkinsSource(self.name, self.data_dir)
self.files = []
self.key_return_vals = []
self.expected_event_keys = {}
def do_it(self):
with patch.object(process.JenkinsSource, '_load_db_file_event_keys') as mock_load_file_keys:
self.set_mocks(mock_load_file_keys)
self.source._load_event_keys(self.files)
self.make_asserts()
def set_mocks(self, mock_load_file_keys):
mock_load_file_keys.side_effect = self.key_return_vals
def make_asserts(self):
self.assertEqual(self.expected_event_keys, self.source.event_keys)
def test_no_files(self):
self.do_it()
def test(self):
self.files = [Path('path/f1.json'), Path('path/f2.json')]
self.key_return_vals = ['keys1', 'keys2']
self.expected_event_keys = {}
for i in range(len(self.files)):
file_path = util.basename(self.files[i])
keys = self.key_return_vals[i]
self.expected_event_keys[file_path] = {'new': set(), 'existing': keys}
self.do_it()
class TestJenkinsSourceLoadDbFileEventKeys(unittest.TestCase):
def setUp(self):
self.name = 'name'
self.data_dir = util.data_dir / 'test_dir'
self.source = process.JenkinsSource(self.name, self.data_dir)
self.events = None
self.expected_keys = set()
def tearDown(self):
util.rmtree(util.data_dir, no_exist_ok=True)
def do_it(self):
file_path = self.data_dir/'test.json'
if self.events is not None:
self.data_dir.mkdir(parents=True)
with file_path.open('w') as file:
for event in self.events:
file.write(json.dumps(event) + '\n')
with patch.object(process.JenkinsSource, '_get_event_key', side_effect=lambda e: json.loads(e)['id']):
keys = self.source._load_db_file_event_keys(file_path)
self.make_asserts(keys)
def make_asserts(self, keys):
self.assertEqual(self.expected_keys, keys)
def test_no_file(self):
self.do_it()
def test(self):
self.events = [
{'herp': 'derp', 'id': 1, 'ci': 'ci1'},
{'herp': 'derp', 'id': 2, 'ci': 'ci1'},
{'herp': 'derp', 'id': 3, 'ci': 'ci1'}
]
self.expected_keys = {event['id'] for event in self.events}
self.do_it()
class TestJenkinsSourceLoadNewData(unittest.TestCase):
def setUp(self):
self.name = 'name'
self.data_dir = util.data_dir / 'test_dir'
self.pull_source_path = 'cool_path'
self.source = process.JenkinsSource(self.name, self.data_dir)
self.file_names = []
self.expected_file_process_calls = None
self.expected_added = 0
self.expected_skipped = 0
self.expected_logger_statement = 'INFO:JenkinsSource:Added: 0, skipped: 0'
def do_it(self):
files = (process.DataFile(filename) for filename in self.file_names)
with patch.object(process.JenkinsSource, '_load_new_files') as mock_load_files, patch.object(process.DataFile, 'process') as mock_file_process, self.assertLogs('JenkinsSource', 'INFO') as logger:
self.set_mocks(files, 2, 1, mock_load_files, mock_file_process)
self.source.load_new_data(self.pull_source_path)
self.make_asserts(mock_load_files, mock_file_process, logger)
@staticmethod
def set_mocks(files, added, skipped, mock_load_files, mock_file_process):
mock_load_files.return_value = files
mock_file_process.return_value = added, skipped
def make_asserts(self, mock_load_files, mock_file_process, logger):
mock_load_files.assert_called_once_with(self.pull_source_path)
if self.expected_file_process_calls is None:
mock_file_process.assert_not_called()
else:
self.assertEqual(self.expected_file_process_calls, mock_file_process.call_args_list)
self.assertEqual([self.expected_logger_statement], logger.output)
def test_no_files(self):
self.do_it()
def test(self):
self.file_names = ['f1', 'f2', 'f3']
self.expected_file_process_calls = [call(self.data_dir, self.source.file_stats, self.source.event_keys) for _ in self.file_names]
self.expected_added = len(self.file_names) * 2
self.expected_skipped = len(self.file_names)
self.expected_logger_statement = 'INFO:JenkinsSource:Added: {0}, skipped: {1}'.format(self.expected_added, self.expected_skipped)
self.do_it()
class TestJenkinsSourceLoadNewFiles(unittest.TestCase):
def setUp(self):
self.name = 'name'
self.data_dir = util.data_dir / 'test_dir'
self.files_path = util.data_dir / 'cool_files'
self.source = process.JenkinsSource(self.name, self.data_dir)
self.file_names = []
self.expected_files = []
self.expected_file_load_events_call_count = 0
def tearDown(self):
util.rmtree(util.data_dir, no_exist_ok=True)
@staticmethod
def get_file(file_path, **kwargs):
return process.DataFile(file_path)
def do_it(self):
self.files_path.mkdir(parents=True)
for filename in self.file_names:
file_path = self.files_path / filename
if 'empty' in filename:
file_path.open('w').close()
else:
with file_path.open('w') as file:
file.write('dummy data')
with patch.object(process.JenkinsSource, '_get_data_file') as mock_get_file, patch.object(process.DataFile, 'load_events') as mock_file_load_events:
self.set_mocks(mock_get_file)
files = self.source._load_new_files(self.files_path)
self.make_asserts(files, mock_file_load_events)
def set_mocks(self, mock_get_file):
mock_get_file.side_effect = self.get_file
def make_asserts(self, actual_files, mock_file_load_events):
self.expected_file_paths = [file.path for file in self.expected_files]
self.actual_file_paths = [file.path for file in actual_files]
self.assertEqual(set(self.expected_file_paths), set(self.actual_file_paths))
self.assertEqual(self.expected_file_load_events_call_count, mock_file_load_events.call_count)
def test_no_files(self):
self.do_it()
def test(self):
self.file_names = ['f1.json', 'f2.json', 'ins.json', 'empty.json']
self.expected_files = [process.DataFile(self.files_path / 'f1.json'), process.DataFile(self.files_path / 'f2.json')]
self.expected_file_load_events_call_count = 2
self.do_it()
class TestJenkinsSourcePrintFileStats(unittest.TestCase):
pass
"""
ProductSource
"""
class TestProductSourceInit(unittest.TestCase):
def test(self):
self.source = process.ProductSource()
self.assertEqual('jenkins', self.source.name)
self.assertEqual(util.jenkins_data_dir, self.source.data_dir)
class TestProductSourceSetup(unittest.TestCase):
def setUp(self):
self.source = process.ProductSource()
def tearDown(self):
util.rmtree(util.data_dir, no_exist_ok=True)
def test_dir_no_exist(self):
self.source.setup()
self.assertTrue(util.jenkins_ft_data_dir.exists())
def test_dir_exists(self):
util.jenkins_ft_data_dir.mkdir(parents=True, exist_ok=True)
self.source.setup()
self.assertTrue(util.jenkins_ft_data_dir.exists())
class TestProductSourceGetDataFile(unittest.TestCase):
def setUp(self):
self.source = process.ProductSource()
def test(self):
path = Path('path/hi.txt')
default_instance = 'derp'
file = self.source._get_data_file(path, default_instance=default_instance)
self.assertEqual(path, file.path)
self.assertEqual(default_instance, file.default_instance)
"""
InsSource
"""
class TestInsSourceInit(unittest.TestCase):
def test(self):
source = process.InsSource()
self.assertEqual('ins', source.name)
self.assertEqual(util.ins_data_dir, source.data_dir)
class TestInsSourceGetDataFile(unittest.TestCase):
def setUp(self):
self.source = process.InsSource()
def test(self):
path = Path('path/hi.txt')
file = self.source._get_data_file(path)
self.assertEqual(path, file.path)
"""
VicSource
"""
class TestVicSourceInit(unittest.TestCase):
def test(self):
source = process.VicSource()
self.assertEqual('vic', source.name)
self.assertEqual(util.vic_data_dir, source.data_dir)
class TestVicSourceGetDataFile(unittest.TestCase):
def setUp(self):
self.source = process.VicSource()
def test_incorrect_file(self):
path = Path('path/hi.txt')
file = self.source._get_data_file(path)
self.assertIsNone(file)
def test(self):
path = Path('path/AWS-VIC-Manager.json')
file = self.source._get_data_file(path)
self.assertEqual(path, file.path)
"""
CqSource
"""
class TestCqSourceInit(unittest.TestCase):
def test(self):
source = process.CqSource()
self.assertEqual('cq', source.name)
self.assertEqual(util.cq_data_dir, source.data_dir)
class TestCqSourceSetup(unittest.TestCase):
def setUp(self):
self.source = process.CqSource()
self.source.data_dir.mkdir(parents=True)
self.existing_cq_data = None
self.expected_drs = {}
def tearDown(self):
util.rmtree(util.data_dir, no_exist_ok=True)
def do_it(self):
if self.existing_cq_data is not None:
with util.cq_data_path.open('w', newline='') as file:
writer = csv.DictWriter(file, fieldnames=list(self.existing_cq_data[0].keys()))
writer.writeheader()
writer.writerows(self.existing_cq_data)
with patch.object(process.CqSource, '_load_event_keys'):
self.source.setup()
self.assertEqual(self.expected_drs, self.source.drs)
def test_no_data_path(self):
self.do_it()
def test_empty_file(self):
self.existing_cq_data = [{}]
self.do_it()
def test(self):
self.existing_cq_data = [
{'id': 'id1', 'some_field': 1},
{'id': 'id2', 'some_field': 2},
{'id': 'id1', 'some_field': 3}
]
self.expected_drs = {
'id1': {'id': 'id1', 'some_field': '3'},
'id2': {'id': 'id2', 'some_field': '2'}
}
self.do_it()
class TestCqSourceLoadEventKeys(unittest.TestCase):
def setUp(self):
self.source = process.CqSource()
self.source.data_dir.mkdir(parents=True)
self.existing_events = None
self.expected_existing_keys = set()
def tearDown(self):
util.rmtree(util.data_dir, no_exist_ok=True)
def do_it(self):
keys = []
if self.existing_events is not None:
with util.cq_events_path.open('w') as file:
for event in self.existing_events:
file.write(json.dumps(event) + '\n')
keys.append(event['key'])
with patch.object(process.CqCookedEvent, 'get_key') as mock_get_key:
mock_get_key.side_effect = keys
self.source._load_event_keys()
self.assertEqual(self.source.event_keys, {'new': set(), 'existing': self.expected_existing_keys})
def test_no_data_path(self):
self.do_it()
def test_empty_file(self):
self.existing_events = []
self.do_it()
def test(self):
self.existing_events = [
{'key': 'event1', 'derp': 'herp'},
{'key': 'event2', 'hello': 'hi'}
]
self.expected_existing_keys = {'event1', 'event2'}
self.do_it()
class TestCqSourceLoadNewData(unittest.TestCase):
def setUp(self):
self.source = process.CqSource()
self.pull_source_path = Path(tempfile.mkdtemp())
self.new_data = None
self.expected_event_ids = []
def tearDown(self):
util.rmtree(self.pull_source_path)
@staticmethod
def load_dr(dr, events):
events.append(dr['id'])
def do_it(self):
if self.new_data is not None:
with (self.pull_source_path / 'added_modified.csv').open('w', newline='') as file:
if self.new_data:
writer = csv.DictWriter(file, fieldnames=self.new_data[0].keys())
writer.writeheader()
writer.writerows(self.new_data)
with patch.object(process.CqSource, '_load_dr') as mock_load_dr, patch.object(process.CqSource, '_write_events') as mock_write_events:
mock_load_dr.side_effect = self.load_dr
self.source.load_new_data(self.pull_source_path)
mock_write_events.assert_called_once_with(self.expected_event_ids)
def test_no_file(self):
self.do_it()
def test_empty_file(self):
self.new_data = []
self.do_it()
def test(self):
self.new_data = [
{'id': 1, 'greeting': 'hi'},
{'id': 2, 'greeting': 'hello'}
]
self.expected_event_ids = ['1', '2']
self.do_it()
class TestCqSourceLoadDr(unittest.TestCase):
def setUp(self):
self.source = process.CqSource()
self.dr = None
self.events = []
self.skipped = 0
self.expected_drs = {}
self.expected_events = []
self.expected_stats = {'added': 0, 'skipped': 0, 'modified_drs': set()}
self.expected_header_fields = set()
def do_it(self):
self.source.dr_stats = {'skipped': self.skipped}
self.source.dr_stats['added'] = len([e for e in self.events if e['type'] == 'add'])
self.source.dr_stats['modified_drs'] = set([e['id'] for e in self.events if e['type'] == 'modify'])
self.source._load_dr(self.dr, self.events)
self.assertEqual(self.expected_drs, self.source.drs)
self.assertEqual(self.expected_events, self.events)
self.assertEqual(self.expected_stats, self.source.dr_stats)
self.assertEqual(self.expected_header_fields, self.source.header_fields)
def test_add_no_existing(self):
self.dr = {
'history.action_timestamp': 5,
'id': 3
}
self.expected_drs = {
3: {
'last_changed': 5,
'id': 3
}
}
self.expected_events = [
{'type': 'add', 'dr_id': 3, 'timestamp': 5}
]
self.expected_stats = {
'added': 1,
'skipped': 0,
'modified_drs': set()
}
self.expected_header_fields = {'last_changed', 'id'}
self.do_it()
def test_add_existing(self):
self.dr = {
'history.action_timestamp': 5,
'id': 3
}
self.source.drs = {
1: {
'last_changed': 1,
'id': 1
},
2: {
'last_changed': 3,
'id': 2
}
}
self.events = [
{'type': 'add', 'dr_id': 2, 'timestamp': 3}
]
self.expected_drs = {
1: {
'last_changed': 1,
'id': 1
},
2: {
'last_changed': 3,
'id': 2
},
3: {
'last_changed': 5,
'id': 3
}
}
self.expected_events = [
{'type': 'add', 'dr_id': 2, 'timestamp': 3},
{'type': 'add', 'dr_id': 3, 'timestamp': 5}
]
self.expected_stats = {
'added': 2,
'skipped': 0,
'modified_drs': set()
}
self.expected_header_fields = {'last_changed', 'id'}
self.do_it()
def test_skip(self):
self.dr = {
'history.action_timestamp': 5,
'id': 3
}
self.source.drs = {
2: {
'last_changed': 3,
'id': 2
},
3: {
'last_changed': 6,
'id': 3
}
}
self.events = [
{'type': 'add', 'dr_id': 2, 'timestamp': 3},
{'type': 'add', 'dr_id': 3, 'timestamp': 6}
]
self.expected_drs = {
2: {
'last_changed': 3,
'id': 2
},
3: {
'last_changed': 6,
'id': 3
}
}
self.expected_events = [
{'type': 'add', 'dr_id': 2, 'timestamp': 3},
{'type': 'add', 'dr_id': 3, 'timestamp': 6}
]
self.expected_stats = {
'added': 2,
'skipped': 1,
'modified_drs': set()
}
self.do_it()
def test_modify_same_timestamp(self):
self.dr = {
'history.action_timestamp': 5,
'id': 3,
'hello': 'hola'
}
self.source.drs = {
2: {
'last_changed': 3,
'id': 2
},
3: {
'last_changed': 5,
'id': 3,
'hello': 'hi'
}
}
self.events = [
{'type': 'add', 'dr_id': 2, 'timestamp': 3},
{'type': 'add', 'dr_id': 3, 'timestamp': 5}
]
self.expected_drs = {
2: {
'last_changed': 3,
'id': 2
},
3: {
'last_changed': 5,
'id': 3,
'hello': 'hola'
}
}
self.expected_events = [
{'type': 'add', 'dr_id': 2, 'timestamp': 3},
{'type': 'add', 'dr_id': 3, 'timestamp': 5},
{'type': 'modify', 'dr_id': 3, 'timestamp': 5, 'change_field': 'hello', 'before': 'hi', 'after': 'hola'}
]
self.expected_stats = {
'added': 2,
'skipped': 0,
'modified_drs': {3}
}
self.expected_header_fields = {'last_changed', 'id', 'hello'}
self.do_it()
def test_modify_later_timestamp(self):
self.dr = {
'history.action_timestamp': 6,
'id': 3,
'hello': 'hola'
}
self.source.drs = {
2: {
'last_changed': 3,
'id': 2
},
3: {
'last_changed': 5,
'id': 3,
'hello': 'hi'
}
}
self.events = [
{'type': 'add', 'dr_id': 2, 'timestamp': 3},
{'type': 'add', 'dr_id': 3, 'timestamp': 5}
]
self.expected_drs = {
2: {
'last_changed': 3,
'id': 2
},
3: {
'last_changed': 6,
'id': 3,
'hello': 'hola'
}
}
self.expected_events = [
{'type': 'add', 'dr_id': 2, 'timestamp': 3},
{'type': 'add', 'dr_id': 3, 'timestamp': 5},
{'type': 'modify', 'dr_id': 3, 'timestamp': 6, 'change_field': 'hello', 'before': 'hi', 'after': 'hola'}
]
self.expected_stats = {
'added': 2,
'skipped': 0,
'modified_drs': {3}
}
self.expected_header_fields = {'last_changed', 'id', 'hello'}
self.do_it()
def test_modify_no_changes(self):
self.dr = {
'history.action_timestamp': 6,
'id': 3,
'hello': 'hi'
}
self.source.drs = {
2: {
'last_changed': 3,
'id': 2
},
3: {
'last_changed': 5,
'id': 3,
'hello': 'hi'
}
}
self.events = [
{'type': 'add', 'dr_id': 2, 'timestamp': 3},
{'type': 'add', 'dr_id': 3, 'timestamp': 5}
]
self.expected_drs = {
2: {
'last_changed': 3,
'id': 2
},
3: {
'last_changed': 6,
'id': 3,
'hello': 'hi'
}
}
self.expected_events = [
{'type': 'add', 'dr_id': 2, 'timestamp': 3},
{'type': 'add', 'dr_id': 3, 'timestamp': 5}
]
self.expected_stats = {
'added': 2,
'skipped': 0,
'modified_drs': set()
}
self.expected_header_fields = {'last_changed', 'id', 'hello'}
self.do_it()
class TestCqSourceGetChanges(unittest.TestCase):
def setUp(self):
self.old_dr = {}
self.new_dr = {}
self.expected_changes = []
def do_it(self):
actual_changes = process.CqSource._get_changes(self.old_dr, self.new_dr)
self.assertEqual(len(self.expected_changes), len(actual_changes))
for change in self.expected_changes:
self.assertIn(change, actual_changes)
def test_empty_drs(self):
self.do_it()
def test_new_empty(self):
self.old_dr = {'derp': 'herp', 'hi': 'hello'}
self.expected_changes = [
{'change_field': 'derp', 'before': 'herp', 'after': '%%NONE%%'},
{'change_field': 'hi', 'before': 'hello', 'after': '%%NONE%%'}
]
self.do_it()
def test_old_empty(self):
self.new_dr = {'derp': 'herp', 'hi': 'hello'}
self.expected_changes = [
{'change_field': 'derp', 'before': '%%NONE%%', 'after': 'herp'},
{'change_field': 'hi', 'before': '%%NONE%%', 'after': 'hello'}
]
self.do_it()
def test_same(self):
self.old_dr = {'derp': 'herp', 'hi': 'hello'}
self.new_dr = {'derp': 'herp', 'hi': 'hello'}
self.do_it()
def test_added(self):
self.old_dr = {'derp': 'herp', 'hi': 'hello'}
self.new_dr = {'derp': 'herp', 'hi': 'hello', 'omg': 'ya'}
self.expected_changes = [
{'change_field': 'omg', 'before': '%%NONE%%', 'after': 'ya'}
]
self.do_it()
def test_removed(self):
self.old_dr = {'derp': 'herp', 'hi': 'hello', 'omg': 'ya'}
self.new_dr = {'derp': 'herp', 'hi': 'hello'}
self.expected_changes = [
{'change_field': 'omg', 'before': 'ya', 'after': '%%NONE%%'}
]
self.do_it()
def test_added_removed_changed(self):
self.old_dr = {'derp': 'herp', 'hi': 'hello', 'omg': 'ya'}
self.new_dr = {'derp': 'herp', 'hi': 'hola', 'wow': 'cool'}
self.expected_changes = [
{'change_field': 'omg', 'before': 'ya', 'after': '%%NONE%%'},
{'change_field': 'wow', 'before': '%%NONE%%', 'after': 'cool'},
{'change_field': 'hi', 'before': 'hello', 'after': 'hola'}
]
self.do_it()
class TestCqSourceWriteEvents(unittest.TestCase):
def setUp(self):
self.source = process.CqSource()
util.cq_data_dir.mkdir(parents=True)
self.existing_event_keys = set()
self.new_event_keys = set()
self.existing_events = None
self.events = []
self.expected_new_event_keys = set()
self.expected_events = []
self.expected_logged_added_events = 0
self.expected_logged_skipped_events = 0
def tearDown(self):
util.rmtree(util.data_dir)
def load_events_file(self):
if not util.cq_events_path.exists():
return []
with util.cq_events_path.open() as file:
return [json.loads(line) for line in file]
def do_it(self):
if self.existing_events is not None:
with util.cq_events_path.open('w') as file:
for event in self.existing_events:
file.write(json.dumps(event) + '\n')
self.source.event_keys['existing'] = self.existing_event_keys
self.source.event_keys['new'] = self.new_event_keys
get_key_values = [event['id'] for event in self.events]
with patch.object(process.CqCookedEvent, 'get_key') as mock_get_key, self.assertLogs('CqSource', 'INFO') as logger:
mock_get_key.side_effect = get_key_values
self.source._write_events(self.events)
self.assertEqual(self.expected_new_event_keys, self.source.event_keys['new'])
self.assertEqual(self.expected_events, self.load_events_file())
self.assertEqual('INFO:CqSource:{} added events'.format(self.expected_logged_added_events), logger.output[0])
self.assertEqual('INFO:CqSource:{} skipped events'.format(self.expected_logged_skipped_events), logger.output[1])
def test_no_existing_events_no_new_events(self):
self.do_it()
def test_no_existing_events_no_new_event_keys_some_new_events(self):
self.events = [
{'id': 1, 'greeting': 'hi'},
{'id': 2, 'greeting': 'hello'}
]
self.expected_new_event_keys = {1, 2}
self.expected_events = [
{'id': 1, 'greeting': 'hi'},
{'id': 2, 'greeting': 'hello'}
]
self.expected_logged_added_events = 2
self.do_it()
def test_no_existing_events_no_new_event_keys_some_new_events_one_duplicate(self):
self.events = [
{'id': 1, 'greeting': 'hi'},
{'id': 2, 'greeting': 'hello'},
{'id': 2, 'greeting': 'hola'}
]
self.expected_new_event_keys = {1, 2}
self.expected_events = [
{'id': 1, 'greeting': 'hi'},
{'id': 2, 'greeting': 'hello'}
]
self.expected_logged_added_events = 2
self.do_it()
def test_no_existing_events_one_new_event_key_some_new_events(self):
self.new_event_keys = {1}
self.existing_events = [
{'id': 1, 'greeting': 'hi'}
]
self.events = [
{'id': 1, 'greeting': 'hi'},
{'id': 2, 'greeting': 'hello'},
{'id': 3, 'greeting': 'hola'}
]
self.expected_new_event_keys = {1, 2, 3}
self.expected_events = [
{'id': 1, 'greeting': 'hi'},
{'id': 2, 'greeting': 'hello'},
{'id': 3, 'greeting': 'hola'}
]
self.expected_logged_added_events = 2
self.do_it()
def test_one_existing_event_no_new_event_keys_some_new_events(self):
self.existing_event_keys = {1}
self.existing_events = [
{'id': 1, 'greeting': 'hi'}
]
self.events = [
{'id': 1, 'greeting': 'hi'},
{'id': 2, 'greeting': 'hello'},
{'id': 3, 'greeting': 'hola'}
]
self.expected_new_event_keys = {2, 3}
self.expected_events = [
{'id': 1, 'greeting': 'hi'},
{'id': 2, 'greeting': 'hello'},
{'id': 3, 'greeting': 'hola'}
]
self.expected_logged_added_events = 2
self.expected_logged_skipped_events = 1
self.do_it()
def test_one_existing_event_one_new_event_key_one_new_event(self):
self.existing_event_keys = {1}
self.new_event_keys = {2}
self.existing_events = [
{'id': 1, 'greeting': 'hi'},
{'id': 2, 'greeting': 'hello'}
]
self.events = [
{'id': 1, 'greeting': 'hi'},
{'id': 2, 'greeting': 'hello'},
{'id': 3, 'greeting': 'hola'}
]
self.expected_new_event_keys = {2, 3}
self.expected_events = [
{'id': 1, 'greeting': 'hi'},
{'id': 2, 'greeting': 'hello'},
{'id': 3, 'greeting': 'hola'}
]
self.expected_logged_added_events = 1
self.expected_logged_skipped_events = 1
self.do_it()
class TestCqSourceWriteDrs(unittest.TestCase):
def setUp(self):
self.source = process.CqSource()
util.cq_data_dir.mkdir(parents=True)
self.existing_drs = []
self.expected_drs = []
def tearDown(self):
util.rmtree(util.data_dir)
def do_it(self):
if self.existing_drs:
with util.cq_data_path.open('w', newline='') as file:
writer = csv.DictWriter(file, fieldnames=self.existing_drs[0].keys())
writer.writeheader()
writer.writerows(self.existing_drs)
with patch.object(process.Processor, 'delete_index'), patch.object(process.Processor, 'create_index'):
self.source._write_drs()
with util.cq_data_path.open('r', newline='') as file:
reader = csv.DictReader(file)
actual_drs = [row for row in reader]
self.assertEqual(len(self.expected_drs), len(actual_drs))
for dr in self.expected_drs:
self.assertIn(dr, actual_drs)
def test_no_drs(self):
self.do_it()
def test_no_existing_file(self):
self.source.header_fields = ['id', 'greeting']
self.source.drs = {
'1': {
'id': '1',
'greeting': 'hi'
},
'2': {
'id': '2',
'greeting': 'hello'
}
}
self.expected_drs = [
{
'id': '1',
'greeting': 'hi'
},
{
'id': '2',
'greeting': 'hello'
}
]
self.do_it()
def test_existing_file(self):
self.existing_drs = [
{
'id': '1',
'greeting': 'hi'
},
{
'id': '2',
'greeting': 'hello'
}
]
self.source.header_fields = ['id', 'greeting']
self.source.drs = {
'1': {
'id': '1',
'greeting': 'hi'
},
'2': {
'id': '2',
'greeting': 'hello'
},
'3': {
'id': '3',
'greeting': 'hola'
}
}
self.expected_drs = [
{
'id': '1',
'greeting': 'hi'
},
{
'id': '2',
'greeting': 'hello'
},
{
'id': '3',
'greeting': 'hola'
}
]
self.do_it()
"""
CqSourceOld
"""
class TestCqSourceOldInit(unittest.TestCase):
def test(self):
source = process.CqSourceOld()
self.assertEqual('cq_old', source.name)
self.assertEqual(util.cq_data_dir, source.data_dir)
class TestCqSourceOldLoadExistingData(unittest.TestCase):
def setUp(self):
self.source = process.CqSourceOld()
self.drs = None
self.changed_files = None
self.expected_orig_cq_data = {}
self.expected_orig_cq_changed_files = {}
def tearDown(self):
util.rmtree(util.data_dir, no_exist_ok=True)
def do_it(self):
util.cq_data_dir.mkdir(parents=True)
if self.drs is not None:
with util.cq_data_path_old.open('w', newline='') as file:
writer = csv.DictWriter(file, fieldnames=['id', 'greeting'])
writer.writeheader()
writer.writerows(self.drs)
if self.changed_files is not None:
with util.cq_changed_files_path.open('w', newline='') as file:
writer = csv.DictWriter(file, fieldnames=['id', 'file'])
writer.writeheader()
writer.writerows(self.changed_files)
self.source.load_existing_data()
self.make_asserts()
def make_asserts(self):
self.assertEqual(self.expected_orig_cq_data, self.source.orig_data)
self.assertEqual(self.expected_orig_cq_changed_files, self.source.orig_changed_files)
def test_no_data_files(self):
self.do_it()
def test_cq_data(self):
self.drs = [
{'id': '0', 'greeting': 'hello'},
{'id': '1', 'greeting': 'hi'},
{'id': '2', 'greeting': 'hola'}
]
self.expected_orig_cq_data = {
'0': {'id': '0', 'greeting': 'hello'},
'1': {'id': '1', 'greeting': 'hi'},
'2': {'id': '2', 'greeting': 'hola'}
}
self.do_it()
def test_cq_changed_files(self):
self.changed_files = [
{'id': '0', 'file': 'hello'},
{'id': '0', 'file': 'derp'},
{'id': '1', 'file': 'hi'},
{'id': '2', 'file': 'hola'},
{'id': '2', 'file': 'como estas'},
{'id': '2', 'file': 'herp'},
{'id': '2', 'file': 'herp'}
]
self.expected_orig_cq_changed_files = {
'0': ['hello', 'derp'],
'1': ['hi'],
'2': ['hola', 'como estas', 'herp']
}
self.do_it()
def test_both(self):
self.drs = [
{'id': '0', 'greeting': 'hello'},
{'id': '1', 'greeting': 'hi'},
{'id': '2', 'greeting': 'hola'}
]
self.changed_files = [
{'id': '0', 'file': 'hello'},
{'id': '0', 'file': 'derp'},
{'id': '1', 'file': 'hi'},
{'id': '2', 'file': 'hola'},
{'id': '2', 'file': 'como estas'},
{'id': '2', 'file': 'herp'}
]
self.expected_orig_cq_data = {
'0': {'id': '0', 'greeting': 'hello'},
'1': {'id': '1', 'greeting': 'hi'},
'2': {'id': '2', 'greeting': 'hola'}
}
self.expected_orig_cq_changed_files = {
'0': ['hello', 'derp'],
'1': ['hi'],
'2': ['hola', 'como estas', 'herp']
}
self.do_it()
class TestCqSourceOldLoadNewData(unittest.TestCase):
def setUp(self):
self.source = process.CqSourceOld()
util.data_dir.mkdir(parents=True)
self.file_path = util.data_dir / 'text.csv'
self.file_path.touch()
self.drs = []
self.expected_new_data = {}
self.expected_new_changed_files = {}
def tearDown(self):
util.rmtree(util.data_dir)
def do_it(self):
if self.drs:
fields = self.drs[0].keys()
with self.file_path.open('w', newline='') as file:
writer = csv.DictWriter(file, fieldnames=fields)
writer.writeheader()
writer.writerows(self.drs)
self.source.load_new_data(self.file_path)
def make_asserts(self):
self.assertEqual(self.expected_new_data, self.source.new_data)
self.assertEqual(self.expected_new_changed_files, self.source.new_changed_files)
def test_no_new_no_old(self):
self.do_it()
def test_no_new_some_old(self):
self.source.new_data = {
'id0': {'id': 'id0'},
'id1': {'id': 'id1'}
}
self.source.new_changed_files = {
'id0': ['f1', 'f2'],
'id1': ['f3', 'f4']
}
self.expected_new_data = copy.deepcopy(self.source.new_data)
self.expected_new_changed_files = copy.deepcopy(self.source.new_changed_files)
self.do_it()
def test_some_new_no_old(self):
self.drs = [
{'id': 'id0', 'RTCC_ChangeSet.FileList.Filename': 'f1'},
{'id': 'id0', 'RTCC_ChangeSet.FileList.Filename': 'f2'},
{'id': 'id1', 'RTCC_ChangeSet.FileList.Filename': 'f3'}
]
self.expected_new_data = {
'id0': {'id': 'id0'},
'id1': {'id': 'id1'}
}
self.expected_new_changed_files = {
'id0': ['f1', 'f2'],
'id1': ['f3']
}
self.do_it()
def test_some_new_some_old(self):
self.source.new_data = {
'id0': {'id': 'id0'},
'id1': {'id': 'id1'}
}
self.source.new_changed_files = {
'id0': ['f1', 'f2'],
'id1': ['f3', 'f4']
}
self.drs = [
{'id': 'id1', 'RTCC_ChangeSet.FileList.Filename': 'f4'},
{'id': 'id1', 'RTCC_ChangeSet.FileList.Filename': 'f5'},
{'id': 'id2', 'RTCC_ChangeSet.FileList.Filename': 'f6'},
{'id': 'id2', 'RTCC_ChangeSet.FileList.Filename': 'f7'},
{'id': 'id3', 'RTCC_ChangeSet.FileList.Filename': 'f8'}
]
self.expected_new_data = {
'id0': {'id': 'id0'},
'id1': {'id': 'id1'},
'id2': {'id': 'id2'},
'id3': {'id': 'id3'}
}
self.expected_new_changed_files = {
'id0': ['f1', 'f2'],
'id1': ['f3', 'f4', 'f5'],
'id2': ['f6', 'f7'],
'id3': ['f8']
}
self.do_it()
class TestCqSourceOldProcess(unittest.TestCase):
# TODO
pass
class TestCqSourceOldGetUpdatedData(unittest.TestCase):
def setUp(self):
self.source = process.CqSourceOld()
self.expected_added = 0
self.expected_updated = 0
self.expected_skipped = 0
self.expected_updated_data = {}
def do_it(self):
added_rows, updated_rows, skipped_rows, updated_data = self.source._get_updated_data()
self.make_asserts(added_rows, updated_rows, skipped_rows, updated_data)
def make_asserts(self, added, updated, skipped, updated_data):
self.assertEqual(self.expected_added, added)
self.assertEqual(self.expected_updated, updated)
self.assertEqual(self.expected_skipped, skipped)
self.assertEqual(self.expected_updated_data, updated_data)
def test_empty_cq_data(self):
self.source.orig_data = {'derp': 'herp'}
self.expected_updated_data = copy.deepcopy(self.source.orig_data)
self.do_it()
def test_no_new(self):
self.source.orig_data = {
'id0': {'id': 'id0', 'greeting': 'hi'},
'id1': {'id': 'id1', 'greeting': 'hello'},
'id2': {'id': 'id2', 'greeting': 'hola'}
}
self.source.new_data = {
'id1': {'id': 'id1', 'greeting': 'hello'},
'id2': {'id': 'id2', 'greeting': 'hola'}
}
self.expected_skipped = 2
self.expected_updated_data = copy.deepcopy(self.source.orig_data)
self.do_it()
def test_one_updated(self):
self.source.orig_data = {
'id0': {'id': 'id0', 'greeting': 'hi'},
'id1': {'id': 'id1', 'greeting': 'hello'},
'id2': {'id': 'id2', 'greeting': 'hola'}
}
self.source.new_data = {
'id1': {'id': 'id1', 'greeting': 'derp'},
'id2': {'id': 'id2', 'greeting': 'hola'}
}
self.expected_updated = 1
self.expected_skipped = 1
self.expected_updated_data = {
'id0': {'id': 'id0', 'greeting': 'hi'},
'id1': {'id': 'id1', 'greeting': 'derp'},
'id2': {'id': 'id2', 'greeting': 'hola'}
}
self.do_it()
def test_one_added(self):
self.source.orig_data = {
'id0': {'id': 'id0', 'greeting': 'hi'},
'id1': {'id': 'id1', 'greeting': 'hello'},
'id2': {'id': 'id2', 'greeting': 'hola'}
}
self.source.new_data = {
'id1': {'id': 'id1', 'greeting': 'hello'},
'id2': {'id': 'id2', 'greeting': 'hola'},
'id3': {'id': 'id3', 'greeting': 'herp'}
}
self.expected_added = 1
self.expected_skipped = 2
self.expected_updated_data = {
'id0': {'id': 'id0', 'greeting': 'hi'},
'id1': {'id': 'id1', 'greeting': 'hello'},
'id2': {'id': 'id2', 'greeting': 'hola'},
'id3': {'id': 'id3', 'greeting': 'herp'}
}
self.do_it()
def test_one_updated_one_added(self):
self.source.orig_data = {
'id0': {'id': 'id0', 'greeting': 'hi'},
'id1': {'id': 'id1', 'greeting': 'hello'},
'id2': {'id': 'id2', 'greeting': 'hola'}
}
self.source.new_data = {
'id1': {'id': 'id1', 'greeting': 'derp'},
'id2': {'id': 'id2', 'greeting': 'hola'},
'id3': {'id': 'id3', 'greeting': 'herp'}
}
self.expected_added = 1
self.expected_updated = 1
self.expected_skipped = 1
self.expected_updated_data = {
'id0': {'id': 'id0', 'greeting': 'hi'},
'id1': {'id': 'id1', 'greeting': 'derp'},
'id2': {'id': 'id2', 'greeting': 'hola'},
'id3': {'id': 'id3', 'greeting': 'herp'}
}
self.do_it()
class TestCqSourceOldDictCompare(unittest.TestCase):
def test_both_empty(self):
dict1 = {}
dict2 = {}
self.assertTrue(process.CqSourceOld._dict_compare(dict1, dict2))
def test_first_empty(self):
dict1 = {'derp': 'herp'}
dict2 = {}
self.assertFalse(process.CqSourceOld._dict_compare(dict1, dict2))
def test_second_empty(self):
dict1 = {}
dict2 = {'derp': 'herp'}
self.assertFalse(process.CqSourceOld._dict_compare(dict1, dict2))
def test_same_order(self):
dict1 = {'derp': 1, 'herp': 2}
dict2 = {'derp': 1, 'herp': 2}
self.assertTrue(process.CqSourceOld._dict_compare(dict1, dict2))
def test_different_values(self):
dict1 = {'derp': 1, 'herp': 2}
dict2 = {'derp': 1, 'herp': 5}
self.assertFalse(process.CqSourceOld._dict_compare(dict1, dict2))
def test_different_order(self):
dict1 = {'derp': 1, 'herp': 2}
dict2 = {'herp': 2, 'derp': 1}
self.assertTrue(process.CqSourceOld._dict_compare(dict1, dict2))
def test_more(self):
dict1 = {'derp': 1, 'herp': 2}
dict2 = {'derp': 1, 'herp': 2, 'lerp': 3}
self.assertFalse(process.CqSourceOld._dict_compare(dict1, dict2))
class TestCqSourceOldWriteData(unittest.TestCase):
def setUp(self):
self.source = process.CqSourceOld()
self.source.data_dir.mkdir(parents=True)
def tearDown(self):
util.rmtree(util.data_dir)
def test_different_header_order_one_write(self):
events = [
{'id': 'myid1', 'time': 'mytime15'},
{'id': 'myid2', 'time': 'mytime20'},
{'time': 'mytime50', 'id': 'myid3'},
{'time': 'mytime55', 'id': 'myid4'},
{'id': 'myid5', 'time': 'mytime62'}
]
self.source._write_data(events)
with util.cq_data_path_old.open() as file:
header = file.readline().strip()
header_fields = header.split(',')
id_index = header_fields.index('id')
time_index = header_fields.index('time')
for line in file:
fields = line.split(',')
self.assertIn('myid', fields[id_index])
self.assertIn('mytime', fields[time_index])
def test_different_header_order_multiple_writes(self):
events = [
{'id': 'myid1', 'time': 'mytime15'},
{'id': 'myid2', 'time': 'mytime20'}
]
self.source._write_data(events)
events2 = [
{'id': 'myid1', 'time': 'mytime15'},
{'id': 'myid2', 'time': 'mytime20'},
{'time': 'mytime50', 'id': 'myid3'},
{'id': 'myid4', 'time': 'mytime55'}
]
self.source._write_data(events2)
with util.cq_data_path_old.open() as file:
header = file.readline().strip()
header_fields = header.split(',')
id_index = header_fields.index('id')
time_index = header_fields.index('time')
with util.cq_data_path_old.open() as file:
file.readline()
for line in file:
fields = line.split(',')
self.assertIn('myid', fields[id_index])
self.assertIn('mytime', fields[time_index])
def test_more_fields_one_write(self):
events = [
{'id': 'myid1', 'time': 'mytime15'},
{'id': 'myid2', 'time': 'mytime20'},
{'time': 'mytime50', 'id': 'myid3', 'derp': 'herp'}
]
self.source._write_data(events)
with util.cq_data_path_old.open() as file:
header = set(file.readline().strip().split(','))
self.assertEqual({'id', 'time', 'derp'}, header)
lines = [set(line.strip().split(',')) for line in file]
self.assertEqual([
{'myid1', 'mytime15', ''},
{'myid2', 'mytime20', ''},
{'myid3', 'mytime50', 'herp'}
], lines)
def test_more_fields_multiple_writes(self):
events = [
{'id': 'myid1', 'time': 'mytime15'},
{'id': 'myid2', 'time': 'mytime20'}
]
self.source._write_data(events)
events2 = [
{'id': 'myid1', 'time': 'mytime15'},
{'id': 'myid2', 'time': 'mytime20'},
{'time': 'mytime50', 'id': 'myid3', 'derp': 'herp'}
]
self.source._write_data(events2)
with util.cq_data_path_old.open() as file:
header = set(file.readline().strip().split(','))
self.assertEqual({'id', 'time', 'derp'}, header)
lines = [set(line.strip().split(',')) for line in file]
self.assertEqual([
{'myid1', 'mytime15', ''},
{'myid2', 'mytime20', ''},
{'myid3', 'mytime50', 'herp'}
], lines)
class TestCqSourceOldWriteChangedFilesData(unittest.TestCase):
def setUp(self):
self.source = process.CqSourceOld()
self.source.data_dir.mkdir(parents=True)
def tearDown(self):
util.rmtree(util.data_dir)
def test_different_header_order(self):
self.source.new_changed_files = {
'myid1': [
'myfile1',
'myfile2'
],
'myid2': [
'myfile3'
]
}
self.source._write_changed_files_data()
with util.cq_changed_files_path.open() as file:
header = file.readline().strip()
fields = header.split(',')
assert fields == ['id', 'file']
self.source.new_changed_files = {
'myid3': [
'myfile4',
'myfile5'
]
}
self.source._write_changed_files_data()
self.source.new_changed_files = {
'myid4': [
'myfile6'
]
}
self.source._write_changed_files_data()
with util.cq_changed_files_path.open() as file:
file.readline()
for line in file:
fields = line.split(',')
assert 'myid' in fields[0]
assert 'myfile' in fields[1]
"""
VicStatusSource
"""
class TestVicStatusSourceInit(unittest.TestCase):
def test(self):
source = process.VicStatusSource()
self.assertEqual('vic_status', source.name)
self.assertEqual(util.vic_status_data_dir, source.data_dir)
class TestVicStatusSourceGetDataFile(unittest.TestCase):
def setUp(self):
self.source = process.VicStatusSource()
def test(self):
path = Path('path/hi.txt')
file = self.source._get_data_file(path)
self.assertEqual(path, file.path)
"""
Archive
"""
class TestArchiveGetDefaultInstance(unittest.TestCase):
# first default prod date is 2018-01-23
def test_before_date(self):
archive_names = ['17-12-05_NewData.zip', '18-01-18_NewData.zip', '18-01-22_NewData.zip']
for archive_name in archive_names:
archive = process.Archive(archive_name, None)
self.assertEqual(archive.default_instance, 'Development')
def test_after_date(self):
archive_names = ['18-01-23_NewData.zip', '18-02-01_NewData.zip', '19-01-10_NewData.zip']
for archive_name in archive_names:
archive = process.Archive(archive_name, None)
self.assertEqual(archive.default_instance, 'Production')
class TestArchiveLoad(unittest.TestCase):
def setUp(self):
self.path = Path('archive/path.zip')
self.sources = {'cq_old': process.CqSourceOld()}
self.default_instance = 'def_ins'
self.pull_dir_1 = 'p1'
self.pull_dir_2 = 'p2'
self.cq_file_path = 'cq_file_path'
with patch.object(process.Archive, '_get_default_instance', return_value=self.default_instance):
self.archive = process.Archive(self.path, self.sources)
util.collected_dir.mkdir(parents=True)
util.archive_dir.mkdir(parents=True)
self.archives = []
self.pull_dir_paths = None
self.cq_file_path = None
self.archive_temp_dir = None
self.reverse = False
self.expected_get_components_args = []
self.expected_pull_dirs_processed = []
self.expected_ft_info_process_called = False
self.expected_cq_load_data_args = None
self.expected_rmtree_args = None
def tearDown(self):
util.rmtree(util.data_dir)
def do_it(self):
with patch.object(process.Archive, '_get_components') as mock_get_components, \
patch.object(process.Archive, '_process_pull_dir') as mock_process_pull_dir, \
patch.object(process.FtInfo, 'process') as mock_ft_info_process, \
patch.object(process.CqSourceOld, 'load_new_data') as mock_cq_load_new_data, \
patch.object(Path, 'replace'), \
patch.object(util.__class__, 'rmtree') as mock_rmtree:
self.set_mocks(mock_get_components)
self.archive.load(self.archives, self.reverse)
self.make_asserts(mock_get_components, mock_process_pull_dir, mock_ft_info_process, mock_cq_load_new_data, mock_rmtree)
def set_mocks(self, mock_get_components):
mock_get_components.return_value = self.pull_dir_paths, self.cq_file_path, self.archive_temp_dir
def make_asserts(self, mock_get_components, mock_process_pull_dir, mock_ft_info_process, mock_cq_load_new_data, mock_rmtree):
mock_get_components.assert_called_once_with(self.expected_get_components_args)
processed_pull_dirs = [args_list[0][0] for args_list in mock_process_pull_dir.call_args_list]
self.assertEqual(self.expected_pull_dirs_processed, processed_pull_dirs)
if self.expected_ft_info_process_called:
self.assertEqual(1, mock_ft_info_process.call_count)
else:
mock_ft_info_process.assert_not_called()
if self.expected_cq_load_data_args is not None:
mock_cq_load_new_data.assert_called_once_with(self.expected_cq_load_data_args)
else:
mock_cq_load_new_data.assert_not_called()
if self.expected_rmtree_args is not None:
mock_rmtree.assert_called_once_with(self.expected_rmtree_args[0], no_exist_ok=self.expected_rmtree_args[1])
else:
mock_rmtree.assert_not_called()
def test_none(self):
self.do_it()
def test(self):
self.archives = 'cool archives, dude'
self.pull_dir_paths = ['pull_dir1', 'pull_dir2']
self.cq_file_path = 'cq_path.csv'
self.archive_temp_dir = 'archive_temp_dir'
self.expected_get_components_args = self.archives
self.expected_pull_dirs_processed = self.pull_dir_paths
self.expected_ft_info_process_called = True
self.expected_cq_load_data_args = self.cq_file_path
self.expected_rmtree_args = (self.archive_temp_dir, True)
self.do_it()
def test_reverse(self):
self.archives = 'cool archives, dude'
self.pull_dir_paths = ['pull_dir1', 'pull_dir2']
self.cq_file_path = 'cq_path.csv'
self.archive_temp_dir = 'archive_temp_dir'
self.reverse = True
self.expected_get_components_args = self.archives
self.expected_pull_dirs_processed = ['pull_dir2', 'pull_dir1']
self.expected_ft_info_process_called = True
self.expected_cq_load_data_args = self.cq_file_path
self.expected_rmtree_args = (self.archive_temp_dir, True)
self.do_it()
class TestArchiveGetComponents(unittest.TestCase):
def setUp(self):
self.path = Path('dummy_path')
self.default_instance = 'def_ins'
with patch.object(process.Archive, '_get_default_instance', return_value=self.default_instance):
self.archive = process.Archive(self.path, None)
self.archives = []
self.archive_temp_dir = None
self.pull_dir_names = None
self.cq_file_name = None
self.expected_rmtree_args = None
self.expected_extract_archive_args = None
self.expected_get_paths_args = None
self.expected_pull_dir_paths = None
self.expected_cq_file_path = None
self.expected_archive_temp_dir = None
def tearDown(self):
util.rmtree(util.data_dir, no_exist_ok=True)
def do_it(self):
if self.archive_temp_dir:
self.archive_temp_dir.mkdir(parents=True)
if self.pull_dir_names:
for pull_dir_name in self.pull_dir_names:
pull_dir_path = self.archive_temp_dir / pull_dir_name
pull_dir_path.mkdir()
if self.cq_file_name:
cq_file_path = self.archive_temp_dir / self.cq_file_name
cq_file_path.open('w').close()
with patch.object(util.__class__, 'rmtree') as mock_rmtree, \
patch.object(process.Archive, '_extract') as mock_extract, \
patch.object(process.Archive, '_get_pull_data_paths') as mock_get_paths:
self.set_mocks(mock_extract, mock_get_paths)
pull_dir_paths, cq_file_path, archive_temp_dir = self.archive._get_components(self.archives)
self.make_asserts(pull_dir_paths, cq_file_path, archive_temp_dir, mock_rmtree, mock_extract, mock_get_paths)
def set_mocks(self, mock_extract, mock_get_paths):
mock_extract.return_value = self.archive_temp_dir
mock_get_paths.return_value = self.pull_dir_names, self.cq_file_name
def make_asserts(self, pull_dir_paths, cq_file_path, archive_temp_dir, mock_rmtree, mock_extract, mock_get_paths):
self.assertEqual(self.expected_pull_dir_paths, pull_dir_paths)
self.assertEqual(self.expected_cq_file_path, cq_file_path)
self.assertEqual(self.expected_archive_temp_dir, archive_temp_dir)
if self.expected_rmtree_args is not None:
mock_rmtree.assert_called_once_with(self.expected_rmtree_args)
else:
mock_rmtree.assert_not_called()
if self.expected_extract_archive_args is not None:
mock_extract.assert_called_once_with(self.expected_extract_archive_args)
else:
mock_extract.assert_not_called()
if self.expected_get_paths_args is not None:
self.assertEqual(1, mock_get_paths.call_count)
args = mock_get_paths.call_args[0][0]
self.assertEqual(set(self.expected_get_paths_args), set(args))
else:
mock_get_paths.assert_not_called()
def test_folder_with_no_matching_archive(self):
self.archive.path = util.collected_dir / 'a1'
self.archive.name = 'a1'
self.archives = [
util.collected_dir / 'a1',
util.collected_dir / 'a2.zip'
]
self.do_it()
def test_folder_with_matching_archive(self):
self.archive.path = util.collected_dir / 'a1'
self.archive.name = 'a1'
self.archives = [
util.collected_dir / 'a1',
util.collected_dir / 'a1.zip',
util.collected_dir / 'a2.zip'
]
self.expected_rmtree_args = self.archive.path
self.do_it()
def test_archive(self):
self.pull_dir_names = ['d1', 'd2']
self.cq_file_name = 'cq.csv'
self.archive.path = util.collected_dir / 'a1.zip'
self.archive.name = 'a1.zip'
self.archive_temp_dir = util.collected_dir / 'a1'
self.archives = [
util.collected_dir / 'a1.zip',
util.collected_dir / 'a2.zip'
]
self.expected_extract_archive_args = self.archives
self.expected_get_paths_args = [self.archive_temp_dir / dir_name for dir_name in self.pull_dir_names] + [self.archive_temp_dir / self.cq_file_name]
self.expected_pull_dir_paths = self.pull_dir_names
self.expected_cq_file_path = self.cq_file_name
self.expected_archive_temp_dir = self.archive_temp_dir
self.do_it()
class TestArchiveExtract(unittest.TestCase):
def setUp(self):
self.path = Path('dummy_path')
self.default_instance = 'def_ins'
with patch.object(process.Archive, '_get_default_instance', return_value=self.default_instance):
self.archive = process.Archive(self.path, None)
util.collected_dir.mkdir(parents=True)
self.archives = []
self.expected_archive_temp_path = None
self.expected_temp_path_exists = False
self.expected_archive_contents = []
self.expected_archives = []
def tearDown(self):
util.rmtree(util.data_dir)
def do_it(self):
open('test.txt', 'w').close()
with zipfile.ZipFile(str(util.collected_dir) + '/a1.zip', mode='w') as archive:
archive.write('test.txt')
with zipfile.ZipFile(str(util.collected_dir) + '/a2.zip', mode='w') as archive:
archive.write('test.txt')
os.remove('test.txt')
archive_temp_path = self.archive._extract(self.archives)
self.make_asserts(archive_temp_path)
def make_asserts(self, archive_temp_path):
self.assertEqual(self.expected_archive_temp_path, archive_temp_path)
self.assertEqual(self.expected_temp_path_exists, self.expected_archive_temp_path.exists())
self.assertEqual(self.expected_archive_contents, util.listdir(archive_temp_path))
self.assertEqual(self.expected_archives, self.archives)
def test(self):
self.archive.path = util.collected_dir / 'a1.zip'
self.archive.name = 'a1.zip'
self.archives = [
util.collected_dir / 'a1.zip',
util.collected_dir / 'a2.zip'
]
self.expected_archive_temp_path = util.collected_dir / 'a1'
self.expected_temp_path_exists = True
self.expected_archive_contents = ['test.txt']
self.expected_archives = [
util.collected_dir / 'a1.zip',
util.collected_dir / 'a2.zip'
]
self.do_it()
def test_dir_exists_not_in_archives(self):
self.archive.path = util.collected_dir / 'a1.zip'
self.archive.name = 'a1.zip'
self.archives = [
util.collected_dir / 'a1.zip',
util.collected_dir / 'a2.zip'
]
self.expected_archive_temp_path = util.collected_dir / 'a1'
self.expected_temp_path_exists = True
self.expected_archive_contents = ['test.txt']
self.expected_archives = [
util.collected_dir / 'a1.zip',
util.collected_dir / 'a2.zip'
]
self.expected_archive_temp_path.mkdir()
self.do_it()
def test_dir_exists_in_archives(self):
self.archive.path = util.collected_dir / 'a1.zip'
self.archive.name = 'a1.zip'
self.archives = [
util.collected_dir / 'a1',
util.collected_dir / 'a1.zip',
util.collected_dir / 'a2.zip'
]
self.expected_archive_temp_path = util.collected_dir / 'a1'
self.expected_temp_path_exists = True
self.expected_archive_contents = ['test.txt']
self.expected_archives = [
util.collected_dir / 'a1.zip',
util.collected_dir / 'a2.zip'
]
self.expected_archive_temp_path.mkdir()
self.do_it()
def test_dir_not_exists_in_archives(self):
self.archive.path = util.collected_dir / 'a1.zip'
self.archive.name = 'a1.zip'
self.archives = [
util.collected_dir / 'a1',
util.collected_dir / 'a1.zip',
util.collected_dir / 'a2.zip'
]
self.expected_archive_temp_path = util.collected_dir / 'a1'
self.expected_temp_path_exists = True
self.expected_archive_contents = ['test.txt']
self.expected_archives = [
util.collected_dir / 'a1.zip',
util.collected_dir / 'a2.zip'
]
self.do_it()
class TestArchiveGetPullDataPaths(unittest.TestCase):
def setUp(self):
self.path = util.collected_dir/'archive'
self.default_instance = 'def_ins'
with patch.object(process.Archive, '_get_default_instance', return_value=self.default_instance):
self.archive = process.Archive(self.path, None)
self.path.mkdir(parents=True)
self.pull_dir_names = []
self.cq_file_name = None
self.expected_pull_dirs = []
self.expected_cq_file_path = None
def tearDown(self):
util.rmtree(util.data_dir)
def do_it(self):
archive_contents = []
for pull_dir in self.pull_dir_names:
pull_dir_path = self.archive.path / pull_dir
pull_dir_path.mkdir()
archive_contents.append(pull_dir_path)
if self.cq_file_name:
cq_file_path = self.archive.path / self.cq_file_name
cq_file_path.open('w').close()
archive_contents.append(cq_file_path)
pull_dirs, cq_file_path = self.archive._get_pull_data_paths(archive_contents)
self.make_asserts(pull_dirs, cq_file_path)
def make_asserts(self, pull_dirs, cq_file_path):
self.assertEqual(self.expected_pull_dirs, pull_dirs)
self.assertEqual(self.expected_cq_file_path, cq_file_path)
def test_no_contents(self):
self.do_it()
def test_one_pull_dir_no_cq(self):
self.pull_dir_names = ['d1']
self.expected_pull_dirs = [self.archive.path / pull_dir for pull_dir in self.pull_dir_names]
self.do_it()
def test_two_pull_dir_no_cq(self):
self.pull_dir_names = ['d1', 'd2']
self.expected_pull_dirs = [self.archive.path / pull_dir for pull_dir in self.pull_dir_names]
self.do_it()
def test_no_pull_dir_one_cq(self):
self.cq_file_name = 'CQ_Data.csv'
self.expected_cq_file_path = self.archive.path / self.cq_file_name
self.do_it()
def test_two_pull_dir_one_cq(self):
self.pull_dir_names = ['d1', 'd2']
self.cq_file_name = 'CQ_Data.csv'
self.expected_pull_dirs = [self.archive.path / pull_dir for pull_dir in self.pull_dir_names]
self.expected_cq_file_path = self.archive.path / self.cq_file_name
self.do_it()
class TestArchiveProcessPullDir(unittest.TestCase):
def setUp(self):
self.path = util.collected_dir / 'archive'
self.default_instance = 'def_ins'
self.archive_sources = {'jenkins': process.ProductSource(), 'ins': process.InsSource(), 'vic': process.VicSource(), 'cq_old': process.CqSourceOld(), 'vic_status': process.VicStatusSource()}
self.pull_dir_name = '181203182605'
self.pull_dir_path = self.path / self.pull_dir_name
with patch.object(process.Archive, '_get_default_instance', return_value=self.default_instance):
self.archive = process.Archive(self.path, self.archive_sources)
self.pull_dir_path.mkdir(parents=True)
self.pull_sources = []
self.expected_source_load_new_data_arg_list = None
def tearDown(self):
util.rmtree(util.data_dir)
def do_it(self):
for source in self.pull_sources:
source_path = self.pull_dir_path / source
source_path.mkdir()
with patch.object(process.JenkinsSource, 'load_new_data') as mock_source_load_new_data:
self.archive._process_pull_dir(self.pull_dir_path, None)
self.make_asserts(mock_source_load_new_data)
def make_asserts(self, mock_source_load_new_data):
if self.expected_source_load_new_data_arg_list is not None:
self.assertEqual(len(self.expected_source_load_new_data_arg_list), mock_source_load_new_data.call_count)
for expected_call in self.expected_source_load_new_data_arg_list:
self.assertIn(expected_call, mock_source_load_new_data.call_args_list)
else:
mock_source_load_new_data.assert_not_called()
def test_no_sources(self):
self.do_it()
def test(self):
self.pull_sources = ['jenkins', 'ins', 'vic', 'vic_status', 'unknown']
self.expected_source_load_new_data_arg_list = [
call(self.pull_dir_path / 'jenkins', default_instance=self.default_instance, ft_info=None),
call(self.pull_dir_path / 'ins'),
call(self.pull_dir_path / 'vic'),
call(self.pull_dir_path / 'vic_status', timestamp=1543861565.0)]
self.do_it()
"""
FtInfo
"""
class TestFtInfoProcessFile(unittest.TestCase):
def setUp(self):
self.ft_info = process.FtInfo()
self.ft_dir = util.jenkins_data_dir / 'ft'
self.ft_dir.mkdir(parents=True)
def tearDown(self):
util.rmtree(util.data_dir)
def test_no_content(self):
filename = 'Production_ci1'
table_name = 'features'
content = {}
self.ft_info._process_file(filename, table_name, content)
self.assertEqual(util.listdir(self.ft_dir), [])
def test_bad_table_name(self):
filename = 'Production_ci1'
table_name = 'derp'
content = {'hi'}
with self.assertRaises(Exception):
self.ft_info._process_file(filename, table_name, content)
def test_features_no_existing(self):
filename = 'Production_ci1_1.7.1.0'
table_name = 'features'
content = {
'1': {'id': '1', 'result': 'passed', 'job_timestamp': '1'},
'2': {'id': '2', 'result': 'failed', 'job_timestamp': '2'},
'3': {'id': '3', 'result': 'passed', 'job_timestamp': '3'}
}
self.ft_info._process_file(filename, table_name, content)
full_filename = '{0}_{1}.csv'.format(filename, table_name)
self.assertEqual(util.listdir(self.ft_dir), [full_filename])
expected = {}
file_path = self.ft_dir / full_filename
with file_path.open(newline='') as file:
reader = csv.DictReader(file)
for row in reader:
expected[row['id']] = dict(row)
self.assertEqual(expected, content)
def test_scenarios_no_existing(self):
filename = 'Production_ci1_1.7.1.0'
table_name = 'scenarios'
content = {
'1': {'id': '1', 'result': 'passed', 'job_timestamp': '1'},
'2': {'id': '2', 'result': 'failed', 'job_timestamp': '2'},
'3': {'id': '3', 'result': 'passed', 'job_timestamp': '3'}
}
self.ft_info._process_file(filename, table_name, content)
full_filename = '{0}_{1}.csv'.format(filename, table_name)
self.assertEqual(util.listdir(self.ft_dir), [full_filename])
expected = {}
file_path = self.ft_dir / full_filename
with file_path.open(newline='') as file:
reader = csv.DictReader(file)
for row in reader:
expected[row['id']] = dict(row)
self.assertEqual(expected, content)
@patch('pivt.process.FtInfo._gen_ft_feature_key')
def test_features_one_existing_inclusive(self, mock_key_func):
filename = 'Production_ci1_1.7.1.0'
table_name = 'features'
content = {
'1': {'id': '1', 'result': 'passed', 'job_timestamp': '1'},
'2': {'id': '2', 'result': 'failed', 'job_timestamp': '2'},
'3': {'id': '3', 'result': 'passed', 'job_timestamp': '3'}
}
full_filename = '{0}_{1}.csv'.format(filename, table_name)
existing_content = [
{'id': '2', 'result': 'failed', 'job_timestamp': '2'}
]
file_path = self.ft_dir / full_filename
with file_path.open('w', newline='') as file:
writer = csv.DictWriter(file, fieldnames=existing_content[0].keys())
writer.writeheader()
writer.writerows(existing_content)
def key_func(a_row):
return a_row['id']
mock_key_func.side_effect = key_func
self.ft_info._process_file(filename, table_name, content)
self.assertEqual(util.listdir(self.ft_dir), [full_filename])
expected = {}
file_path = self.ft_dir / full_filename
with file_path.open(newline='') as file:
reader = csv.DictReader(file)
for row in reader:
expected[row['id']] = dict(row)
self.assertEqual(expected, content)
@patch('pivt.process.FtInfo._gen_ft_scenario_key')
def test_scenarios_one_existing_exclusive(self, mock_key_func):
filename = 'Production_ci1_1.7.1.0'
table_name = 'scenarios'
content = {
'1': {'id': '1', 'result': 'passed', 'job_timestamp': '1'},
'3': {'id': '3', 'result': 'passed', 'job_timestamp': '3'}
}
full_filename = '{0}_{1}.csv'.format(filename, table_name)
existing_content = [
{'id': '2', 'result': 'failed', 'job_timestamp': '2'}
]
file_path = self.ft_dir / full_filename
with file_path.open('w', newline='') as file:
writer = csv.DictWriter(file, fieldnames=existing_content[0].keys())
writer.writeheader()
writer.writerows(existing_content)
def key_func(a_row):
return a_row['id']
mock_key_func.side_effect = key_func
self.ft_info._process_file(filename, table_name, content)
self.assertEqual(util.listdir(self.ft_dir), [full_filename])
expected = {}
file_path = self.ft_dir / full_filename
with file_path.open(newline='') as file:
reader = csv.DictReader(file)
for row in reader:
expected[row['id']] = dict(row)
content['2'] = existing_content[0]
self.assertEqual(expected, content)
@patch('pivt.process.FtInfo._gen_ft_feature_key')
def test_no_new_no_existing(self, mock_key_func):
filename = 'Production_ci1_1.7.1.0'
table_name = 'features'
def key_func(a_row):
return a_row['id']
mock_key_func.side_effect = key_func
self.ft_info._process_file(filename, table_name, {})
self.assertEqual(util.listdir(self.ft_dir), [])
@patch('pivt.process.FtInfo._gen_ft_feature_key')
def test_no_new_one_existing(self, mock_key_func):
filename = 'Production_ci1_1.7.1.0'
table_name = 'features'
content = {
'2': {'id': '2', 'result': 'failed', 'job_timestamp': '2'}
}
full_filename = '{0}_{1}.csv'.format(filename, table_name)
existing_content = [
{'id': '2', 'result': 'failed', 'job_timestamp': '2'}
]
file_path = self.ft_dir / full_filename
with file_path.open('w', newline='') as file:
writer = csv.DictWriter(file, fieldnames=existing_content[0].keys())
writer.writeheader()
writer.writerows(existing_content)
def key_func(a_row):
return a_row['id']
mock_key_func.side_effect = key_func
self.ft_info._process_file(filename, table_name, content)
self.assertEqual(util.listdir(self.ft_dir), [full_filename])
expected = {}
file_path = self.ft_dir / full_filename
with file_path.open(newline='') as file:
reader = csv.DictReader(file)
for row in reader:
expected[row['id']] = dict(row)
self.assertEqual(expected, content)
class TestFtInfoLoadFtInfo(unittest.TestCase):
def test_no_reports(self):
event = {'no': 'reports'}
features, scenarios = process.FtInfo.load_ft_info(event)
expected_features = {}
expected_scenarios = {}
self.assertEqual(expected_features, features)
self.assertEqual(expected_scenarios, scenarios)
def test_empty_reports(self):
event = {
'id': 1,
'reports': {}
}
features, scenarios = process.FtInfo.load_ft_info(event)
expected_features = {}
expected_scenarios = {}
self.assertEqual(expected_features, features)
self.assertEqual(expected_scenarios, scenarios)
def test_null_report(self):
event = {
'id': 1,
'reports': {'r1': {}}
}
features, scenarios = process.FtInfo.load_ft_info(event)
expected_features = {}
expected_scenarios = {}
self.assertEqual(expected_features, features)
self.assertEqual(expected_scenarios, scenarios)
def test_invalid(self):
with open(find_file('resources/cucumber_reports/invalid.json'), 'r') as file:
report = json.loads(file.read())
event = {
'instance': 'Production',
'ci': 'ci1',
'number': 5,
'timestamp': 10,
'reports': {
'invalid.json': report
}
}
features, scenarios = process.FtInfo.load_ft_info(event)
expected_features = {}
expected_scenarios = {}
self.assertEqual(expected_features, features)
self.assertEqual(expected_scenarios, scenarios)
def test_invalid_report(self):
with open(find_file('resources/cucumber_reports/invalid-report.json'), 'r') as file:
report = json.loads(file.read())
event = {
'instance': 'Production',
'ci': 'ci1',
'number': 5,
'timestamp': 10,
'reports': {
'invalid-report.json': report
}
}
features, scenarios = process.FtInfo.load_ft_info(event)
expected_features = {}
expected_scenarios = {}
self.assertEqual(expected_features, features)
self.assertEqual(expected_scenarios, scenarios)
def test_invalid_report_2(self):
with open(find_file('resources/cucumber_reports/invalid-report-2.json'), 'r') as file:
report = json.loads(file.read())
event = {
'instance': 'Production',
'ci': 'ci1',
'number': 5,
'timestamp': 10,
'release': '1.7.1.0',
'reports': {
'invalid-report-2.json': report
}
}
features, scenarios = process.FtInfo.load_ft_info(event)
expected_features = {
'Production:ci1:5:10:invalid-report-2.json:simpleId': {
# 'duration': 123456789,
'id': 'simpleId',
'job_ci': 'ci1',
'job_instance': 'Production',
'job_number': 5,
'job_timestamp': 10,
'job_release': '1.7.1.0',
'name': 'Simple '
'feature',
'report_name': 'invalid-report-2.json',
'result': 'passed',
'tags': ''
}
}
expected_scenarios = {}
self.assertEqual(expected_features, features)
self.assertEqual(expected_scenarios, scenarios)
def test_simple(self):
with open(find_file('resources/cucumber_reports/simple.json'), 'r') as file:
report = json.loads(file.read())
event = {
'instance': 'Production',
'ci': 'ci1',
'number': 5,
'timestamp': 10,
'release': '1.7.1.0',
'reports': {
'simple.json': report
}
}
features, scenarios = process.FtInfo.load_ft_info(event)
expected_features = {
'Production:ci1:5:10:simple.json:simpleId': {
# 'duration': 123456789,
'id': 'simpleId',
'job_ci': 'ci1',
'job_instance': 'Production',
'job_number': 5,
'job_timestamp': 10,
'job_release': '1.7.1.0',
'name': 'Simple '
'feature',
'report_name': 'simple.json',
'result': 'passed',
'tags': ''
}
}
expected_scenarios = {
'Production:ci1:5:10:simpleId:Simple scenario': {
# 'duration': 123456789,
'feature_id': 'simpleId',
'id': 'Simple '
'scenario',
'job_ci': 'ci1',
'job_instance': 'Production',
'job_number': 5,
'job_timestamp': 10,
'job_release': '1.7.1.0',
'name': 'Simple '
'scenario',
'report_name': 'simple.json',
'result': 'passed',
'tags': ''
}
}
self.assertEqual(expected_features, features)
self.assertEqual(expected_scenarios, scenarios)
def test_simple_artifact_in_report_name(self):
with open(find_file('resources/cucumber_reports/simple.json'), 'r') as file:
report = json.loads(file.read())
event = {
'instance': 'Production',
'ci': 'ci1',
'number': 5,
'timestamp': 10,
'release': '1.7.1.0',
'reports': {
'http://derp/artifact/simple.json': report
}
}
features, scenarios = process.FtInfo.load_ft_info(event)
expected_features = {
'Production:ci1:5:10:simple.json:simpleId': {
# 'duration': 123456789,
'id': 'simpleId',
'job_ci': 'ci1',
'job_instance': 'Production',
'job_number': 5,
'job_timestamp': 10,
'job_release': '1.7.1.0',
'name': 'Simple '
'feature',
'report_name': 'simple.json',
'result': 'passed',
'tags': ''
}
}
expected_scenarios = {
'Production:ci1:5:10:simpleId:Simple scenario': {
# 'duration': 123456789,
'feature_id': 'simpleId',
'id': 'Simple '
'scenario',
'job_ci': 'ci1',
'job_instance': 'Production',
'job_number': 5,
'job_timestamp': 10,
'job_release': '1.7.1.0',
'name': 'Simple '
'scenario',
'report_name': 'simple.json',
'result': 'passed',
'tags': ''
}
}
self.assertEqual(expected_features, features)
self.assertEqual(expected_scenarios, scenarios)
def test_complex(self):
with open(find_file('resources/cucumber_reports/sample.json'), 'r', encoding='utf-8') as file:
report = json.loads(file.read())
event = {
'instance': 'Production',
'ci': 'ci1',
'number': 5,
'timestamp': 10,
'release': '1.7.1.0',
'reports': {
'sample.json': report
}
}
features, scenarios = process.FtInfo.load_ft_info(event)
expected_features = {
"Production:ci1:5:10:sample.json:account-holder-withdraws-cash": {
"name": "1st feature",
"id": "account-holder-withdraws-cash",
"result": "passed",
# "duration": 99263122889,
"tags": {'featureTag'},
"report_name": "sample.json",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
"Production:ci1:5:10:sample.json:account-holder-withdraws-more-cash": {
"name": "Second feature",
"id": "account-holder-withdraws-more-cash",
"result": "failed",
# "duration": 92610000,
"tags": {''},
"report_name": "sample.json",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
"Production:ci1:5:10:sample.json:failed-background": {
"name": "some feature",
"id": "failed-background",
"result": "failed",
# "duration": 99124118111,
"tags": {'featureTag'},
"report_name": "sample.json",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
}
}
expected_scenarios = {
"Production:ci1:5:10:account-holder-withdraws-cash:account-holder-withdraws-cash;account-has-'sufficient-funds';;2": {
"name": "Account has <sufficient funds>",
"id": "account-holder-withdraws-cash;account-has-'sufficient-funds';;2",
"result": "passed",
# "duration": 139004778,
"tags": {"featureTag", "fast", "checkout"},
"report_name": "sample.json",
"feature_id": "account-holder-withdraws-cash",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
"Production:ci1:5:10:account-holder-withdraws-more-cash:account-holder-withdraws-more-cash;account-has-sufficient-funds;;2": {
"name": "Account may not have sufficient funds",
"id": "account-holder-withdraws-more-cash;account-has-sufficient-funds;;2",
"result": "failed",
# "duration": 92050000,
"tags": {"checkout"},
"report_name": "sample.json",
"feature_id": "account-holder-withdraws-more-cash",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
"Production:ci1:5:10:account-holder-withdraws-more-cash:account-holder-withdraws-more-cash;clean-up": {
"name": "Clean-up",
"id": "account-holder-withdraws-more-cash;clean-up",
"result": "passed",
# "duration": 560000,
"tags": {''},
"report_name": "sample.json",
"feature_id": "account-holder-withdraws-more-cash",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
"Production:ci1:5:10:account-holder-withdraws-more-cash:undefined-result": {
"name": "This step has no result...",
"id": "undefined-result",
"result": "skipped",
# "duration": 0,
"tags": {''},
"report_name": "sample.json",
"feature_id": "account-holder-withdraws-more-cash",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
"Production:ci1:5:10:failed-background:failed-background;account-has-'sufficient-funds';;2": {
"name": "Account has <sufficient funds>",
"id": "failed-background;account-has-'sufficient-funds';;2",
"result": "failed",
# "duration": 0,
"tags": {"featureTag", "fast", "checkout"},
"report_name": "sample.json",
"feature_id": "failed-background",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
}
}
for name, item in features.items():
if 'tags' in item:
item['tags'] = set(item['tags'].split(':'))
for name, item in scenarios.items():
if 'tags' in item:
item['tags'] = set(item['tags'].split(':'))
self.assertEqual(expected_features, features)
self.assertEqual(expected_scenarios, scenarios)
def test_duplicate_scenarios_one_report(self):
with open(find_file('resources/cucumber_reports/duplicate-scenarios-1.json'), 'r', encoding='utf-8') as file:
report = json.loads(file.read())
event = {
'instance': 'Production',
'ci': 'ci1',
'number': 5,
'timestamp': 10,
'release': '1.7.1.0',
'reports': {
'sample.json': report
}
}
features, scenarios = process.FtInfo.load_ft_info(event)
expected_features = {
"Production:ci1:5:10:sample.json:1st-feature": {
"name": "1st feature",
"id": "1st-feature",
"result": "passed",
"tags": '',
"report_name": "sample.json",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
"Production:ci1:5:10:sample.json:2nd-feature": {
"name": "2nd feature",
"id": "2nd-feature",
"result": "failed",
"tags": '',
"report_name": "sample.json",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
}
}
expected_scenarios = {
"Production:ci1:5:10:1st-feature:1st-feature;1st-scenario": {
"name": "1st scenario",
"id": "1st-feature;1st-scenario",
"result": "passed",
"tags": '',
"report_name": "sample.json",
"feature_id": "1st-feature",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
"Production:ci1:5:10:2nd-feature:2nd-feature;1st-scenario": {
"name": "1st scenario",
"id": "2nd-feature;1st-scenario",
"result": "failed",
"tags": '',
"report_name": "sample.json",
"feature_id": "2nd-feature",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
"Production:ci1:5:10:2nd-feature:2nd-feature;2nd-scenario": {
"name": "2nd scenario",
"id": "2nd-feature;2nd-scenario",
"result": "skipped",
"tags": '',
"report_name": "sample.json",
"feature_id": "2nd-feature",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
}
self.assertEqual(expected_features, features)
self.assertEqual(expected_scenarios, scenarios)
def test_duplicate_scenarios_multiple_reports(self):
with open(find_file('resources/cucumber_reports/duplicate-scenarios-1.json'), 'r', encoding='utf-8') as file:
report1 = json.loads(file.read())
with open(find_file('resources/cucumber_reports/duplicate-scenarios-2.json'), 'r', encoding='utf-8') as file:
report2 = json.loads(file.read())
event = {
'instance': 'Production',
'ci': 'ci1',
'number': 5,
'timestamp': 10,
'release': '1.7.1.0',
'reports': {
'sample.json': report1,
'sample2.json': report2
}
}
features, scenarios = process.FtInfo.load_ft_info(event)
expected_features = {
"Production:ci1:5:10:sample.json:1st-feature": {
"name": "1st feature",
"id": "1st-feature",
"result": "passed",
"tags": '',
"report_name": "sample.json",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
"Production:ci1:5:10:sample.json:2nd-feature": {
"name": "2nd feature",
"id": "2nd-feature",
"result": "failed",
"tags": '',
"report_name": "sample.json",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
}
}
expected_scenarios = {
"Production:ci1:5:10:1st-feature:1st-feature;1st-scenario": {
"name": "1st scenario",
"id": "1st-feature;1st-scenario",
"result": "passed",
"tags": '',
"report_name": "sample.json",
"feature_id": "1st-feature",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
"Production:ci1:5:10:2nd-feature:2nd-feature;1st-scenario": {
"name": "1st scenario",
"id": "2nd-feature;1st-scenario",
"result": "failed",
"tags": '',
"report_name": "sample.json",
"feature_id": "2nd-feature",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
"Production:ci1:5:10:2nd-feature:2nd-feature;2nd-scenario": {
"name": "2nd scenario",
"id": "2nd-feature;2nd-scenario",
"result": "passed",
"tags": '',
"report_name": "sample2.json",
"feature_id": "2nd-feature",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
}
self.assertEqual(expected_features, features)
self.assertEqual(expected_scenarios, scenarios)
def test_simple_2(self):
with open(find_file('resources/cucumber_reports/simple-2.json'), 'r', encoding='utf-8') as file:
report = json.loads(file.read())
event = {
'instance': 'Production',
'ci': 'ci1',
'number': 5,
'timestamp': 10,
'release': '1.7.1.0',
'reports': {
'sample.json': report
}
}
features, scenarios = process.FtInfo.load_ft_info(event)
expected_features = {
"Production:ci1:5:10:sample.json:1st-feature": {
"name": "1st feature",
"id": "1st-feature",
"result": "failed",
"tags": '',
"report_name": "sample.json",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
"Production:ci1:5:10:sample.json:2nd-feature": {
"name": "2nd feature",
"id": "2nd-feature",
"result": "failed",
"tags": '',
"report_name": "sample.json",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
}
}
expected_scenarios = {
"Production:ci1:5:10:1st-feature:1st-feature;1st-scenario": {
"name": "1st scenario",
"id": "1st-feature;1st-scenario",
"result": "passed",
"tags": '',
"report_name": "sample.json",
"feature_id": "1st-feature",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
"Production:ci1:5:10:1st-feature:1st-feature;2nd-scenario": {
"name": "2nd scenario",
"id": "1st-feature;2nd-scenario",
"result": "failed",
"tags": '',
"report_name": "sample.json",
"feature_id": "1st-feature",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
"Production:ci1:5:10:2nd-feature:2nd-feature;1st-scenario": {
"name": "1st scenario",
"id": "2nd-feature;1st-scenario",
"result": "failed",
"tags": '',
"report_name": "sample.json",
"feature_id": "2nd-feature",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
"Production:ci1:5:10:2nd-feature:2nd-feature;2nd-scenario": {
"name": "2nd scenario",
"id": "2nd-feature;2nd-scenario",
"result": "skipped",
"tags": '',
"report_name": "sample.json",
"feature_id": "2nd-feature",
"job_instance": "Production",
"job_ci": "ci1",
"job_number": 5,
"job_timestamp": 10,
'job_release': '1.7.1.0'
},
}
self.assertEqual(expected_features, features)
self.assertEqual(expected_scenarios, scenarios)
class TestFtTagsGetFtTags(unittest.TestCase):
def test_no_tags(self):
element = {'no': 'tags'}
actual = process.FtInfo._get_ft_tags(element)
expected = []
self.assertEqual(actual, expected)
def test_empty_tags(self):
element = {'id': 1, 'tags': []}
actual = process.FtInfo._get_ft_tags(element)
expected = []
self.assertEqual(actual, expected)
def test(self):
element = {
'id': 1,
'tags': [
{'name': '@dict_with_at', 'type': 'derp'},
{'name': 'dict_no_at', 'type': 'herp'},
'@str_with_at',
'str_no_at',
{'name': '@dict_with_at', 'type': 'derp'}
]
}
actual = set(process.FtInfo._get_ft_tags(element))
expected = {'dict_with_at', 'dict_no_at', 'str_with_at', 'str_no_at'}
self.assertEqual(actual, expected)
def test_unknown_tag_type(self):
element = {
'id': 1,
'tags': [
{'name': '@dict', 'type': 'derp'},
'@str',
5
]
}
with self.assertRaises(Exception):
process.FtInfo._get_ft_tags(element)
class TestFtInfoGenFtScenarioKey(unittest.TestCase):
def test(self):
scenario = {'job_instance': 'Production', 'job_ci': 'ci1', 'job_number': 5, 'job_timestamp': 3, 'feature_id': 'f1', 'id': 's1', 'duration': 7}
self.assertEqual(process.FtInfo._gen_ft_scenario_key(scenario), 'Production:ci1:5:3:f1:s1')
class TestFtInfoGenFtFeatureKey(unittest.TestCase):
def test(self):
feature = {'job_instance': 'Production', 'job_ci': 'ci1', 'job_number': 5, 'job_timestamp': 3, 'report_name': 'report1', 'id': 'f1', 'duration': 7}
self.assertEqual(process.FtInfo._gen_ft_feature_key(feature), 'Production:ci1:5:3:report1:f1')
class TestFtInfoUpdate(unittest.TestCase):
def test(self):
ft_info = process.FtInfo()
ft_info.ft_info = {
'f1': {
'features': {
'feat1': {'id': 'feat1'},
'feat2': {'id': 'feat2'}
},
'scenarios': {
'scen1': {'id': 'scen1'},
'scen2': {'id': 'scen2'}
}
},
'f2': {
'features': {
'feat3': {'id': 'feat3'},
'feat4': {'id': 'feat4'}
}
},
'f3': {
'scenarios': {
'scen3': {'id': 'scen3'},
'scen4': {'id': 'scen4'}
}
}
}
ft_info2 = {
'f2': {
'features': {
'feat4': {'id': 'feat4'},
'feat5': {'id': 'feat5'}
},
'scenarios': {
'scen5': {'id': 'scen5'},
'scen6': {'id': 'scen6'}
}
},
'f3': {
'features': {
'feat6': {'id': 'feat6'}
},
'scenarios': {
'scen7': {'id': 'scen7'}
}
},
'f4': {
'features': {
'feat7': {'id': 'feat7'}
},
'scenarios': {
'scen8': {'id': 'scen8'}
}
}
}
ft_info.update(ft_info2)
self.assertEqual({
'f1': {
'features': {
'feat1': {'id': 'feat1'},
'feat2': {'id': 'feat2'}
},
'scenarios': {
'scen1': {'id': 'scen1'},
'scen2': {'id': 'scen2'}
}
},
'f2': {
'features': {
'feat3': {'id': 'feat3'},
'feat4': {'id': 'feat4'},
'feat5': {'id': 'feat5'}
},
'scenarios': {
'scen5': {'id': 'scen5'},
'scen6': {'id': 'scen6'}
}
},
'f3': {
'features': {
'feat6': {'id': 'feat6'}
},
'scenarios': {
'scen3': {'id': 'scen3'},
'scen4': {'id': 'scen4'},
'scen7': {'id': 'scen7'}
}
},
'f4': {
'features': {
'feat7': {'id': 'feat7'}
},
'scenarios': {
'scen8': {'id': 'scen8'}
}
}
}, ft_info.ft_info)
"""
DataFile
"""
class TestDataFileIsEmpty(unittest.TestCase):
def setUp(self):
util.data_dir.mkdir()
def tearDown(self):
util.rmtree(util.data_dir)
@patch('pivt.process.DataFile._normalize_filename')
def test_empty(self, mock_normalize):
mock_normalize.return_value = None
file_path = util.data_dir / 'test.txt'
file_path.touch()
data_file = process.DataFile(file_path)
self.assertTrue(data_file.is_empty())
@patch('pivt.process.DataFile._normalize_filename')
def test_not_empty(self, mock_normalize):
mock_normalize.return_value = None
file_path = util.data_dir / 'test.txt'
with file_path.open('w') as file:
file.write('hi!')
data_file = process.DataFile(file_path)
self.assertFalse(data_file.is_empty())
class TestDataFileConstructDbPath(unittest.TestCase):
def test(self):
data_dir = Path('/data')
data_file = process.DataFile(Path('/path/omg/hi.txt'))
self.assertEqual(Path('/data/hi.txt'), data_file._construct_db_path(data_dir))
"""
JsonDataFile
"""
class TestJsonDataFileLoadEvents(unittest.TestCase):
@patch('pivt.process.JsonDataFile._load_event')
@patch('pivt.process.DataFile._normalize_filename')
def test(self, mock_normalize, mock_load_event):
mock_normalize.return_value = None
util.data_dir.mkdir()
file_path = util.data_dir / 'test.txt'
with file_path.open('w') as file:
file.write('hi!\n')
file.write('hello\n')
file.write('world\n')
data_file = process.JsonDataFile(file_path)
data_file.load_events()
mock_load_event.assert_has_calls([
call('hi!\n'),
call('hello\n'),
call('world\n')
])
util.rmtree(util.data_dir)
class TestJsonFileProcess(unittest.TestCase):
def setUp(self):
self.file_path = util.data_dir / 'test.json'
self.data_file = process.JsonDataFile(self.file_path)
self.data_dir = util.data_dir / 'test_dir'
self.data_dir.mkdir(parents=True)
self.db_file = self.data_dir / util.basename(self.file_path)
self.file_stats = {}
self.event_keys = {}
self.expected_file_stats = {}
self.expected_events = {}
self.expected_added = 0
self.expected_skipped = 0
def tearDown(self):
util.rmtree(util.data_dir)
def make_db_files(self):
for filename, stats in self.event_keys.items():
for status, event_keys in stats.items():
if not event_keys:
continue
file_path = self.data_dir / filename
with file_path.open('a') as file:
for key in event_keys:
event = {'id': key}
file.write(json.dumps(event) + '\n')
def load_events(self):
events = []
with self.db_file.open() as file:
for line in file:
events.append(json.loads(line))
return events
def do_it(self):
self.make_db_files()
self.set_mocks()
added, skipped = self.data_file.process(self.data_dir, self.file_stats, self.event_keys)
self.make_asserts(added, skipped)
def set_mocks(self):
pass
def make_asserts(self, added, skipped):
self.assertEqual(self.expected_file_stats, self.file_stats)
actual_events = self.load_events()
self.assertEqual(len(self.expected_events), len(actual_events))
for expected_event in self.expected_events:
self.assertIn(expected_event, actual_events)
self.assertEqual(self.expected_added, added)
self.assertEqual(self.expected_skipped, skipped)
def test_all_new(self):
self.data_file.events = {
'1': {'id': '1'},
'2': {'id': '2'}
}
self.expected_file_stats = {
'test.json': {
'added': 2,
'skipped': 0
}
}
self.expected_events = [
{'id': '1'},
{'id': '2'}
]
self.expected_added = 2
self.expected_skipped = 0
self.do_it()
def test_no_new(self):
self.data_file.events = {
'1': {'id': '1'},
'2': {'id': '2'}
}
self.event_keys['test.json'] = {
'existing': {'1', '2'},
'new': set()
}
self.expected_file_stats = {
'test.json': {
'added': 0,
'skipped': 2
}
}
self.expected_events = [
{'id': '1'},
{'id': '2'}
]
self.expected_added = 0
self.expected_skipped = 2
self.do_it()
def test_mix(self):
self.data_file.events = {
'1': {'id': '1'},
'2': {'id': '2'},
'3': {'id': '3'}
}
self.event_keys['test.json'] = {
'existing': {'1'},
'new': {'2'}
}
self.expected_file_stats['test.json'] = {
'added': 1,
'skipped': 1
}
self.expected_events = [
{'id': '1'},
{'id': '2'},
{'id': '3'}
]
self.expected_added = 1
self.expected_skipped = 2
self.do_it()
def test_existing_stats(self):
self.data_file.events = {
'1': {'id': '1'},
'2': {'id': '2'},
'3': {'id': '3'}
}
self.file_stats['test.json'] = {
'added': 3,
'skipped': 2
}
self.event_keys['test.json'] = {
'existing': {'1'},
'new': {'2'}
}
self.expected_file_stats['test.json'] = {
'added': 4,
'skipped': 3
}
self.expected_events = [
{'id': '1'},
{'id': '2'},
{'id': '3'}
]
self.expected_added = 1
self.expected_skipped = 2
self.do_it()
"""
ProductDataFile
"""
class TestProductDataFileNormalizeFilename(unittest.TestCase):
def do_it(self, path_name, default_instance, expected):
data_file = process.ProductDataFile(Path(path_name), default_instance)
self.assertEqual(expected, data_file.name)
def test_no_instance_default_dev_no_sub(self):
self.do_it('jenkins/ci2_Build.json', 'Development', 'Development_ci2_Build.json')
def test_no_instance_default_prod_no_sub(self):
self.do_it('jenkins/ci2_Build.json', 'Production', 'Production_ci2_Build.json')
def test_instance_dev_default_dev_no_sub(self):
self.do_it('jenkins/Development_ci2_Build.json', 'Development', 'Development_ci2_Build.json')
def test_instance_dev_default_prod_no_sub(self):
self.do_it('jenkins/Development_ci2_Build.json', 'Production', 'Development_ci2_Build.json')
def test_instance_prod_default_dev_no_sub(self):
self.do_it('jenkins/Production_ci2_Build.json', 'Development', 'Production_ci2_Build.json')
def test_instance_prod_default_prod_no_sub(self):
self.do_it('jenkins/Production_ci2_Build.json', 'Production', 'Production_ci2_Build.json')
def test_no_instance_default_dev_with_sub(self):
self.do_it('jenkins/ci3-sub_Build.json', 'Development', 'Development_ci3_Build.json')
def test_no_instance_default_prod_with_sub(self):
self.do_it('jenkins/ci3-sub_Build.json', 'Production', 'Production_ci3_Build.json')
def test_instance_dev_default_dev_with_sub(self):
self.do_it('jenkins/Development_ci3-sub_Build.json', 'Development', 'Development_ci3_Build.json')
def test_instance_dev_default_prod_with_sub(self):
self.do_it('jenkins/Development_ci3-sub_Build.json', 'Production', 'Development_ci3_Build.json')
def test_instance_prod_default_dev_with_sub(self):
self.do_it('jenkins/Production_ci3-sub_Build.json', 'Development', 'Production_ci3_Build.json')
def test_instance_prod_default_prod_with_sub(self):
self.do_it('jenkins/Production_ci3-sub_Build.json', 'Production', 'Production_ci3_Build.json')
class TestProductDataFileLoadEvent(unittest.TestCase):
def setUp(self):
self.path = Path('dummy_path')
self.default_instance = 'default_instance'
self.data_file = process.ProductDataFile(self.path, self.default_instance)
self.event = {}
self.event_key = 'coolkey'
self.expected_strip_called = False
self.expected_load_ft_info_called = False
self.expected_events = {}
def do_it(self):
raw_event = json.dumps(self.event)
with patch.object(process.ProductRawEvent, 'cook') as mock_event_cook, \
patch.object(process.ProductDataFile, '_load_ft_info') as mock_load_ft_info, \
patch.object(process.ProductCookedEvent, 'get_key') as mock_event_get_key:
self.set_mocks(raw_event, mock_event_cook, mock_event_get_key)
self.data_file._load_event(raw_event)
self.make_asserts(mock_event_cook, mock_load_ft_info)
def set_mocks(self, raw_event, mock_event_cook, mock_event_get_key):
mock_event_cook.return_value = process.ProductCookedEvent(raw_event)
mock_event_get_key.return_value = self.event_key
def make_asserts(self, mock_event_cook, mock_load_ft_info):
self.assertEqual(self.expected_strip_called, mock_event_cook.called)
self.assertEqual(self.expected_load_ft_info_called, mock_load_ft_info.called)
self.assertEqual(self.expected_events, self.data_file.events)
def test_building_event(self):
self.event = {'cool': 'word', 'building': True}
self.do_it()
def test_basic_event(self):
self.event = {'cool': 'word', 'id': 2, 'stage': 'Build'}
self.expected_strip_called = True
self.expected_events = {
'coolkey': self.event
}
self.do_it()
def test_basic_event_overwrite(self):
self.event = {'cool': 'word', 'id': 2, 'stage': 'Build'}
self.data_file.events = {
'coolkey': {'cool': 'word', 'id': 1, 'stage': 'Build'}
}
self.expected_strip_called = True
self.expected_events = {
'coolkey': self.event
}
self.do_it()
def test_ft_event(self):
self.event = {'id': 2, 'stage': 'AWS_FunctionalTest'}
self.expected_strip_called = True
self.expected_load_ft_info_called = True
self.expected_events = {
'coolkey': self.event
}
self.do_it()
class TestProductDataFileLoadFtInfo(unittest.TestCase):
def setUp(self):
self.path = Path('dummy_path')
self.default_instance = 'default_instance'
self.data_file = process.ProductDataFile(self.path, self.default_instance)
self.event = {}
self.features = {}
self.scenarios = {}
self.expected_ft_info = {}
def do_it(self):
with patch.object(process.FtInfo, 'load_ft_info') as mock_ft_info_load:
self.set_mocks(mock_ft_info_load)
self.data_file._load_ft_info(self.event)
self.make_asserts()
def set_mocks(self, mock_ft_info_load):
mock_ft_info_load.return_value = self.features, self.scenarios
def make_asserts(self):
self.assertEqual(self.expected_ft_info, self.data_file.ft_info_dict)
self.assertTrue('reports' not in self.event)
def test_no_ft_info(self):
instance = 'Production'
ci = 'ci1'
release = '1.7.2.0'
ft_info_filename = '{0}_{1}_{2}'.format(instance, ci, release)
self.event = {'instance': instance, 'ci': ci, 'release': release}
self.expected_ft_info = {
ft_info_filename: {
'features': {},
'scenarios': {}
}
}
self.do_it()
def test_fresh(self):
instance = 'Production'
ci = 'ci1'
release = '1.7.2.0'
ft_info_filename = '{0}_{1}_{2}'.format(instance, ci, release)
self.event = {'instance': instance, 'ci': ci, 'release': release, 'reports': 'cool reports'}
self.features = {
'f1': 'hi'
}
self.scenarios = {
's1': 'hola',
's2': 'hello'
}
self.expected_ft_info = {
ft_info_filename: {
'features': {
'f1': 'hi'
},
'scenarios': {
's1': 'hola',
's2': 'hello'
}
}
}
self.do_it()
def test_new_file(self):
instance = 'Production'
ci = 'ci1'
release = '1.7.2.0'
ft_info_filename = '{0}_{1}_{2}'.format(instance, ci, release)
self.event = {'instance': instance, 'ci': ci, 'release': release, 'reports': 'cool reports'}
self.features = {
'f1': 'hi'
}
self.scenarios = {
's1': 'hola',
's2': 'hello'
}
self.data_file.ft_info_dict = {
'other_file': {
'features': 'derp',
'scenarios': 'herp'
}
}
self.expected_ft_info = {
'other_file': {
'features': 'derp',
'scenarios': 'herp'
},
ft_info_filename: {
'features': {
'f1': 'hi'
},
'scenarios': {
's1': 'hola',
's2': 'hello'
}
}
}
self.do_it()
def test_existing_file(self):
instance = 'Production'
ci = 'ci1'
release = '1.7.2.0'
ft_info_filename = '{0}_{1}_{2}'.format(instance, ci, release)
self.event = {'instance': instance, 'ci': ci, 'release': release, 'reports': 'cool reports'}
self.features = {
'f2': 'hi'
}
self.scenarios = {
's2': 'hola',
's3': 'hello'
}
self.data_file.ft_info_dict = {
ft_info_filename: {
'features': {
'f1': 'derp'
},
'scenarios': {
's1': 'herp',
's2': 'hola'
}
}
}
self.expected_ft_info = {
ft_info_filename: {
'features': {
'f1': 'derp',
'f2': 'hi'
},
'scenarios': {
's1': 'herp',
's2': 'hola',
's3': 'hello'
}
}
}
self.do_it()
"""
InsDataFile
"""
class TestInsDataFileNormalizeFilename(unittest.TestCase):
def do_it(self, path_name, expected):
data_file = process.InsDataFile(Path(path_name))
self.assertEqual(expected, data_file.name)
def test_no_instance_default_dev_no_sub(self):
self.do_it('ins/Core1.json', 'Core1_develop.json')
def test_dev_instance(self):
self.do_it('ins/Core1_develop.json', 'Core1_develop.json')
def test_master_instance(self):
self.do_it('ins/Core1_master.json', 'Core1_master.json')
class TestInsDataFileLoadEvent(unittest.TestCase):
def setUp(self):
self.path = Path('dummy_path')
self.data_file = process.InsDataFile(self.path)
self.event = {}
self.event_key = 'coolkey'
self.expected_strip_called = False
self.expected_events = {}
def do_it(self):
raw_event = json.dumps(self.event)
with patch.object(process.InsRawEvent, 'cook') as mock_event_cook, \
patch.object(process.InsCookedEvent, 'get_key') as mock_event_get_key:
self.set_mocks(raw_event, mock_event_cook, mock_event_get_key)
self.data_file._load_event(raw_event)
self.make_asserts(mock_event_cook)
def set_mocks(self, raw_event, mock_event_cook, mock_event_get_key):
mock_event_cook.return_value = process.InsCookedEvent(raw_event)
mock_event_get_key.return_value = self.event_key
def make_asserts(self, mock_event_cook):
self.assertEqual(self.expected_strip_called, mock_event_cook.called)
self.assertEqual(self.expected_events, self.data_file.events)
def test_building_event(self):
self.event = {'cool': 'word', 'status': 'IN_PROGRESS'}
self.do_it()
def test_basic_event(self):
self.event = {'cool': 'word', 'id': 2, 'stage': 'Build', 'status': 'DONE'}
self.expected_strip_called = True
self.expected_events = {
'coolkey': self.event
}
self.do_it()
def test_basic_event_overwrite(self):
self.event = {'cool': 'word', 'id': 2, 'stage': 'Build'}
self.data_file.events = {
'coolkey': {'cool': 'word', 'id': 1, 'stage': 'Build'}
}
self.expected_strip_called = True
self.expected_events = {
'coolkey': self.event
}
self.do_it()
"""
VicDataFile
"""
class TestVicDataFileNormalizeFilename(unittest.TestCase):
def do_it(self, path_name, expected):
data_file = process.VicDataFile(Path(path_name))
self.assertEqual(expected, data_file.name)
def test_no_instance_default_dev_no_sub(self):
self.do_it('vic/AWS-VIC-Manager.json', 'Production_AWS-VIC-Manager.json')
def test_dev_instance(self):
self.do_it('vic/Production_AWS-VIC-Manager.json', 'Production_AWS-VIC-Manager.json')
def test_master_instance(self):
self.do_it('vic/Development_AWS-VIC-Manager.json', 'Development_AWS-VIC-Manager.json')
class TestVicDataFileLoadEvent(unittest.TestCase):
def setUp(self):
self.path = Path('dummy_path')
self.data_file = process.VicDataFile(self.path)
self.event = {}
self.event_key = 'coolkey'
self.expected_cooked_called = False
self.expected_events = {}
def do_it(self):
raw_event = json.dumps(self.event)
with patch.object(process.VicRawEvent, 'cook') as mock_event_cook, \
patch.object(process.VicCookedEvent, 'get_key') as mock_event_get_key:
self.set_mocks(raw_event, mock_event_cook, mock_event_get_key)
self.data_file._load_event(raw_event)
self.make_asserts(mock_event_cook)
def set_mocks(self, raw_event, mock_event_cook, mock_event_get_key):
mock_event_cook.return_value = process.VicCookedEvent(raw_event)
mock_event_get_key.return_value = self.event_key
def make_asserts(self, mock_event_cook):
self.assertEqual(self.expected_cooked_called, mock_event_cook.called)
self.assertEqual(self.expected_events, self.data_file.events)
def test_building1_event(self):
self.event = {'cool': 'word', 'building': True}
self.do_it()
def test_building2_event(self):
self.event = {'cool': 'word', 'status': 'IN_PROGRESS'}
self.do_it()
def test_basic_event(self):
self.event = {'cool': 'word', 'id': 2, 'stage': 'Build', 'status': 'DONE'}
self.expected_cooked_called = True
self.expected_events = {
'coolkey': self.event
}
self.do_it()
def test_basic_event_overwrite(self):
self.event = {'cool': 'word', 'id': 2, 'stage': 'Build'}
self.data_file.events = {
'coolkey': {'cool': 'word', 'id': 1, 'stage': 'Build'}
}
self.expected_cooked_called = True
self.expected_events = {
'coolkey': self.event
}
self.do_it()
"""
VicStatusDataFile
"""
class TestVicStatusDataFileLoadEvent(unittest.TestCase):
def setUp(self):
self.path = Path('dummy_path')
self.data_file = process.VicStatusDataFile(self.path)
self.event = {}
self.cooked_events = []
self.cooked_event_keys = []
self.expected_events = {}
def do_it(self):
raw_data = json.dumps(self.event)
with patch.object(process.VicStatusRawEvent, 'cook') as mock_event_cook, \
patch.object(process.VicStatusCookedEvent, 'get_key') as mock_event_get_key:
self.set_mocks(mock_event_cook, mock_event_get_key)
self.data_file._load_event(raw_data)
self.make_asserts()
def set_mocks(self, mock_event_cook, mock_event_get_key):
mock_event_cook.return_value = self.cooked_events
mock_event_get_key.side_effect = self.cooked_event_keys
def make_asserts(self):
self.assertEqual(self.expected_events, self.data_file.events)
def test(self):
self.event = {'derp': 'herp'}
self.cooked_events = [
process.VicStatusCookedEvent({'id': 1}),
process.VicStatusCookedEvent({'id': 2}),
process.VicStatusCookedEvent({'id': 3})
]
self.cooked_event_keys = ['k1', 'k2', 'k3']
self.expected_events = {
'k1': self.cooked_events[0],
'k2': self.cooked_events[1],
'k3': self.cooked_events[2]
}
self.do_it()
"""
JsonDictEvent
"""
class TestJsonDictEventInit(unittest.TestCase):
def setUp(self):
self.default_keys = [1, 2, 3, 4, 5]
self.default_values = ['a', 'b', 'c', 'd', 'e']
def create_dict_event(self, keys, values):
return dict(zip(keys, values))
def create_string_event(self, keys, values):
return json.dumps(self.create_dict_event(keys, values))
def create_test_criteria(self, keys, values,
test_case_generator, expected_generator):
test_case = test_case_generator(keys, values)
expected = util.inner_stringify(expected_generator(keys, values))
return process.JsonDictEvent(test_case), expected
def test_string_empty(self):
keys = []
values = []
json_event, expected = self.create_test_criteria(
keys=keys,
values=values,
test_case_generator=self.create_string_event,
expected_generator=self.create_dict_event)
self.assertIsInstance(json_event, dict)
self.assertEqual(json_event, expected)
def test_string_instance(self):
json_event, expected = self.create_test_criteria(
keys=self.default_keys,
values=self.default_values,
test_case_generator=self.create_string_event,
expected_generator=self.create_dict_event)
self.assertIsInstance(json_event, dict)
self.assertEqual(json_event, expected)
def test_dict_empty(self):
keys = []
values = []
json_event, expected = self.create_test_criteria(
keys=keys,
values=values,
test_case_generator=self.create_dict_event,
expected_generator=self.create_dict_event)
self.assertIsInstance(json_event, dict)
self.assertEqual(json_event, expected)
def test_dict_instance(self):
json_event, expected = self.create_test_criteria(
keys=self.default_keys,
values=self.default_values,
test_case_generator=self.create_dict_event,
expected_generator=self.create_dict_event)
self.assertIsInstance(json_event, dict)
self.assertEqual(json_event, expected)
def test_integrity(self):
test_case_str = self.create_string_event(
self.default_keys, self.default_values)
test_case_dict = self.create_dict_event(
self.default_keys, self.default_values)
test_case_str_copy = copy.deepcopy(test_case_str)
test_case_dict_copy = copy.deepcopy(test_case_dict)
json_event_str = process.JsonDictEvent(test_case_str)
json_event_dict = process.JsonDictEvent(test_case_dict)
self.assertIsNot(json_event_str, test_case_str)
self.assertEqual(test_case_str, test_case_str_copy)
self.assertIsNot(json_event_dict, test_case_dict)
self.assertEqual(test_case_dict, test_case_dict_copy)
def test_bad_instance(self):
with self.assertRaises(TypeError):
_ = process.JsonDictEvent([])
"""
JsonListEvent
"""
class TestJsonListEventInit(unittest.TestCase):
def setUp(self):
self.default_values = ['a', 'b', 'c', 'd', 'e']
@staticmethod
def create_list_event(values):
return values
@staticmethod
def create_string_event(values):
return json.dumps(values)
def create_test_criteria(self, values, test_case_generator, expected_generator):
test_case = test_case_generator(values)
expected = util.inner_stringify(expected_generator(values))
return process.JsonListEvent(test_case), expected
def test_string_empty(self):
values = []
json_event, expected = self.create_test_criteria(
values=values,
test_case_generator=self.create_string_event,
expected_generator=self.create_list_event)
self.assertIsInstance(json_event, list)
self.assertEqual(json_event, expected)
def test_string_instance(self):
json_event, expected = self.create_test_criteria(
values=self.default_values,
test_case_generator=self.create_string_event,
expected_generator=self.create_list_event)
self.assertIsInstance(json_event, list)
self.assertEqual(json_event, expected)
def test_list_empty(self):
values = []
json_event, expected = self.create_test_criteria(
values=values,
test_case_generator=self.create_list_event,
expected_generator=self.create_list_event)
self.assertIsInstance(json_event, list)
self.assertEqual(json_event, expected)
def test_dict_instance(self):
json_event, expected = self.create_test_criteria(
values=self.default_values,
test_case_generator=self.create_list_event,
expected_generator=self.create_list_event)
self.assertIsInstance(json_event, list)
self.assertEqual(json_event, expected)
def test_integrity(self):
test_case_str = self.create_string_event(self.default_values)
test_case_dict = self.create_list_event(self.default_values)
test_case_str_copy = copy.deepcopy(test_case_str)
test_case_dict_copy = copy.deepcopy(test_case_dict)
json_event_str = process.JsonListEvent(test_case_str)
json_event_dict = process.JsonListEvent(test_case_dict)
self.assertIsNot(json_event_str, test_case_str)
self.assertEqual(test_case_str, test_case_str_copy)
self.assertIsNot(json_event_dict, test_case_dict)
self.assertEqual(test_case_dict, test_case_dict_copy)
def test_bad_instance(self):
with self.assertRaises(TypeError):
_ = process.JsonListEvent({})
"""
JenkinsRawEvent
"""
class TestGetParameters(unittest.TestCase):
def test_empty_event(self):
event = {}
expected = {}
self.assertEqual(expected, process.JenkinsRawEvent(event)._get_parameters())
def test_empty_actions(self):
event = {'actions': []}
expected = {}
self.assertEqual(expected, process.JenkinsRawEvent(event)._get_parameters())
def test_no_parameters(self):
event = {
'actions': [
{
'_class': 'hudson.model.CauseAction',
'causes': []
}
]
}
expected = {}
self.assertEqual(expected, process.JenkinsRawEvent(event)._get_parameters())
def test_empty_parameters(self):
event = {
'actions': [
{
'_class': 'hudson.model.ParametersAction',
'parameters': []
}
]
}
expected = {}
self.assertEqual(expected, process.JenkinsRawEvent(event)._get_parameters())
def test_one_parameter(self):
event = {
'actions': [
{
'_class': 'hudson.model.ParametersAction',
'parameters': [
{'name': 'PIPELINE_VERSION', 'value': '1.2.3.4.434'}
]
}
]
}
expected = {'PIPELINE_VERSION': '1.2.3.4.434'}
self.assertEqual(expected, process.JenkinsRawEvent(event)._get_parameters())
def test_two_parameters(self):
event = {
'actions': [
{
'_class': 'hudson.model.ParametersAction',
'parameters': [
{'name': 'CI', 'value': 'ci4'},
{'name': 'PIPELINE_VERSION', 'value': '1.2.3.4.434'}
]
}
]
}
expected = {'CI': 'ci4', 'PIPELINE_VERSION': '1.2.3.4.434'}
self.assertEqual(expected, process.JenkinsRawEvent(event)._get_parameters())
def test_empty_action_with_parameters(self):
event = {
'actions': [
{},
{
'_class': 'hudson.model.ParametersAction',
'parameters': [
{'name': 'CI', 'value': 'ci4'},
{'name': 'PIPELINE_VERSION', 'value': '1.2.3.4.434'}
]
}
]
}
expected = {'CI': 'ci4', 'PIPELINE_VERSION': '1.2.3.4.434'}
self.assertEqual(expected, process.JenkinsRawEvent(event)._get_parameters())
def test_non_interesting_action_with_parameters(self):
event = {
'actions': [
{
'_class': 'hudson.model.CauseAction',
'causes': []
},
{
'_class': 'hudson.model.ParametersAction',
'parameters': [
{'name': 'CI', 'value': 'ci4'},
{'name': 'PIPELINE_VERSION', 'value': '1.2.3.4.434'}
]
}
]
}
expected = {'CI': 'ci4', 'PIPELINE_VERSION': '1.2.3.4.434'}
self.assertEqual(expected, process.JenkinsRawEvent(event)._get_parameters())
def test_multiple_parameter_actions(self):
event = {
'actions': [
{
'_class': 'hudson.model.ParametersAction',
'parameters': [
{'name': 'CI', 'value': 'ci4'},
{'name': 'BASELINE_VERSION', 'value': '1.2.3.4'},
{'name': 'CLEARCASE_VIEW', 'value': 'gpsbuild_ci4_RelCandidate_int_dev'}
]
},
{
'_class': 'hudson.model.ParametersAction',
'parameters': [
{'name': 'PIPELINE_VERSION', 'value': '1.2.3.4.434'}
]
}
]
}
expected = {'CI': 'ci4', 'BASELINE_VERSION': '1.2.3.4', 'CLEARCASE_VIEW': 'gpsbuild_ci4_RelCandidate_int_dev', 'PIPELINE_VERSION': '1.2.3.4.434'}
self.assertEqual(expected, process.JenkinsRawEvent(event)._get_parameters())
"""
ProductRawEvent
"""
class TestProductRawEventInit(unittest.TestCase):
def test_instance(self):
pass
def test_assignment(self):
pass
def test_integrity(self):
pass
class TestProductRawEventCook(unittest.TestCase):
@patch('pivt.process.ProductRawEvent._get_derived_cause')
@patch('pivt.process.ProductRawEvent._get_upstream')
@patch('pivt.process.ProductRawEvent._parse_unit_test_event')
@patch('pivt.process.ProductRawEvent._get_iteration')
@patch('pivt.process.ProductRawEvent._get_release')
@patch('pivt.process.JenkinsRawEvent._get_parameters')
def test_most_basic_build(self, mock_get_parameters, mock_get_release, mock_get_iteration, mock_parse_ut_event, mock_get_upstream, mock_get_derived_cause):
mock_get_parameters.return_value = {}
mock_get_release.return_value = None
mock_get_iteration.return_value = None
mock_get_upstream.return_value = None, None
mock_get_derived_cause.return_value = 'Not Assigned'
event = {'ci': 'ci2', 'stage': 'Build'}
new_event = process.ProductRawEvent(event, 'Production').cook()
self.assertEqual(new_event, {'ci': 'ci2', 'stage': 'Build', 'iteration': None, 'release': None, 'ss': 'ss5', 'instance': 'Production', 'cause': 'Not Assigned', 'derived_cause': 'Not Assigned'})
self.assertFalse(mock_parse_ut_event.called)
@patch('pivt.process.ProductRawEvent._get_derived_cause')
@patch('pivt.process.ProductRawEvent._get_upstream')
@patch('pivt.process.ProductRawEvent._parse_unit_test_event')
@patch('pivt.process.ProductRawEvent._get_iteration')
@patch('pivt.process.ProductRawEvent._get_release')
@patch('pivt.process.JenkinsRawEvent._get_parameters')
def test_basic_build_no_upstream_project(self, mock_get_parameters, mock_get_release, mock_get_iteration, mock_parse_ut_event, mock_get_upstream, mock_get_derived_cause):
mock_get_parameters.return_value = {}
mock_get_release.return_value = None
mock_get_iteration.return_value = None
mock_get_upstream.return_value = None, 5
mock_get_derived_cause.return_value = 'Not Assigned'
event = {'ci': 'ci2', 'stage': 'Build'}
new_event = process.ProductRawEvent(event, 'Production').cook()
self.assertEqual(new_event, {'ci': 'ci2', 'stage': 'Build', 'iteration': None, 'release': None, 'ss': 'ss5', 'instance': 'Production', 'cause': 'Not Assigned', 'derived_cause': 'Not Assigned'})
self.assertFalse(mock_parse_ut_event.called)
@patch('pivt.process.ProductRawEvent._get_derived_cause')
@patch('pivt.process.ProductRawEvent._get_upstream')
@patch('pivt.process.ProductRawEvent._parse_unit_test_event')
@patch('pivt.process.ProductRawEvent._get_iteration')
@patch('pivt.process.ProductRawEvent._get_release')
@patch('pivt.process.JenkinsRawEvent._get_parameters')
def test_basic_build_no_upstream_build(self, mock_get_parameters, mock_get_release, mock_get_iteration, mock_parse_ut_event, mock_get_upstream, mock_get_derived_cause):
mock_get_parameters.return_value = {}
mock_get_release.return_value = None
mock_get_iteration.return_value = None
mock_get_upstream.return_value = 'ci2-Pipeline', None
mock_get_derived_cause.return_value = 'Not Assigned'
event = {'ci': 'ci2', 'stage': 'Build'}
new_event = process.ProductRawEvent(event, 'Production').cook()
self.assertEqual(new_event, {'ci': 'ci2', 'stage': 'Build', 'iteration': None, 'release': None, 'ss': 'ss5', 'instance': 'Production', 'cause': 'Not Assigned', 'derived_cause': 'Not Assigned'})
self.assertFalse(mock_parse_ut_event.called)
@patch('pivt.process.ProductRawEvent._get_derived_cause')
@patch('pivt.process.ProductRawEvent._get_upstream')
@patch('pivt.process.ProductRawEvent._parse_unit_test_event')
@patch('pivt.process.ProductRawEvent._get_iteration')
@patch('pivt.process.ProductRawEvent._get_release')
@patch('pivt.process.JenkinsRawEvent._get_parameters')
def test_basic_build(self, mock_get_parameters, mock_get_release, mock_get_iteration, mock_parse_ut_event, mock_get_upstream, mock_get_derived_cause):
mock_get_parameters.return_value = {'p1': 'derp', 'p2': 'herp'}
mock_get_release.return_value = '1.6.2.3'
mock_get_iteration.return_value = '1.6'
mock_get_upstream.return_value = 'ci2-Pipeline', 5
mock_get_derived_cause.return_value = 'Not Assigned'
event = {'ci': 'ci2', 'stage': 'Build'}
new_event = process.ProductRawEvent(event, 'Production').cook()
self.assertEqual(new_event, {'ci': 'ci2', 'stage': 'Build', 'iteration': '1.6', 'release': '1.6.2.3', 'ss': 'ss5', 'instance': 'Production', 'upstreamProject': 'ci2-Pipeline', 'upstreamBuild': 5, 'cause': 'Not Assigned', 'derived_cause': 'Not Assigned', 'p1': 'derp', 'p2': 'herp'})
self.assertFalse(mock_parse_ut_event.called)
@patch('pivt.process.ProductRawEvent._get_derived_cause')
@patch('pivt.process.ProductRawEvent._get_upstream')
@patch('pivt.process.ProductRawEvent._parse_unit_test_event')
@patch('pivt.process.ProductRawEvent._get_iteration')
@patch('pivt.process.ProductRawEvent._get_release')
@patch('pivt.process.JenkinsRawEvent._get_parameters')
def test_basic_build_ci_sub(self, mock_get_parameters, mock_get_release, mock_get_iteration, mock_parse_ut_event, mock_get_upstream, mock_get_derived_cause):
mock_get_parameters.return_value = {}
mock_get_release.return_value = None
mock_get_iteration.return_value = None
mock_get_upstream.return_value = None, None
mock_get_derived_cause.return_value = 'Not Assigned'
event = {'ci': 'ci3-sub', 'stage': 'Build'}
new_event = process.ProductRawEvent(event, 'Production').cook()
self.assertEqual(new_event, {'ci': 'ci3', 'stage': 'Build', 'iteration': None, 'release': None, 'ss': 'ss6', 'instance': 'Production', 'cause': 'Not Assigned', 'derived_cause': 'Not Assigned'})
self.assertFalse(mock_parse_ut_event.called)
@patch('pivt.process.ProductRawEvent._get_derived_cause')
@patch('pivt.process.ProductRawEvent._get_upstream')
@patch('pivt.process.ProductRawEvent._parse_unit_test_event')
@patch('pivt.process.ProductRawEvent._get_iteration')
@patch('pivt.process.ProductRawEvent._get_release')
@patch('pivt.process.JenkinsRawEvent._get_parameters')
def test_less_basic_build(self, mock_get_parameters, mock_get_release, mock_get_iteration, mock_parse_ut_event, mock_get_upstream, mock_get_derived_cause):
mock_get_parameters.return_value = {}
mock_get_release.return_value = None
mock_get_iteration.return_value = None
mock_get_upstream.return_value = None, None
mock_get_derived_cause.return_value = 'Not Assigned'
event = {'instance': 'Production', 'ss': 'ss5', 'ci': 'ci2', 'stage': 'Build'}
new_event = process.ProductRawEvent(event, 'Production').cook()
self.assertEqual(new_event, {'ci': 'ci2', 'stage': 'Build', 'iteration': None, 'release': None, 'ss': 'ss5', 'instance': 'Production', 'cause': 'Not Assigned', 'derived_cause': 'Not Assigned'})
self.assertFalse(mock_parse_ut_event.called)
@patch('pivt.process.ProductRawEvent._get_derived_cause')
@patch('pivt.process.ProductRawEvent._get_upstream')
@patch('pivt.process.ProductRawEvent._parse_unit_test_event')
@patch('pivt.process.ProductRawEvent._get_iteration')
@patch('pivt.process.ProductRawEvent._get_release')
@patch('pivt.process.JenkinsRawEvent._get_parameters')
def test_advanced_build(self, mock_get_parameters, mock_get_release, mock_get_iteration, mock_parse_ut_event, mock_get_upstream, mock_get_derived_cause):
mock_get_parameters.return_value = {}
mock_get_release.return_value = None
mock_get_iteration.return_value = None
mock_get_upstream.return_value = None, None
mock_get_derived_cause.return_value = 'Not Assigned'
event = {'instance': 'Development', 'ss': 'ss5', 'ci': 'ci2', 'stage': 'Build', 'duration': 10, 'result': 'FAILED', 'derp': 'herp'}
new_event = process.ProductRawEvent(event, 'Production').cook()
self.assertEqual(new_event, {'ci': 'ci2', 'stage': 'Build', 'iteration': None, 'release': None, 'ss': 'ss5', 'instance': 'Development', 'duration': 10, 'result': 'FAILED', 'cause': 'Not Assigned', 'derived_cause': 'Not Assigned'})
self.assertFalse(mock_parse_ut_event.called)
@patch('pivt.process.ProductRawEvent._get_derived_cause')
@patch('pivt.process.ProductRawEvent._get_upstream')
@patch('pivt.process.ProductRawEvent._parse_unit_test_event')
@patch('pivt.process.ProductRawEvent._get_iteration')
@patch('pivt.process.ProductRawEvent._get_release')
@patch('pivt.process.JenkinsRawEvent._get_parameters')
def test_basic_deploy(self, mock_get_parameters, mock_get_release, mock_get_iteration, mock_parse_ut_event, mock_get_upstream, mock_get_derived_cause):
mock_get_parameters.return_value = {}
mock_get_release.return_value = None
mock_get_iteration.return_value = None
mock_get_upstream.return_value = None, None
mock_get_derived_cause.return_value = 'Not Assigned'
event = {'ci': 'ci2', 'stage': 'Deploy'}
new_event = process.ProductRawEvent(event, 'Production').cook()
self.assertEqual(new_event, {'ci': 'ci2', 'stage': 'Deploy', 'iteration': None, 'release': None, 'ss': 'ss5', 'instance': 'Production', 'cause': 'Not Assigned', 'derived_cause': 'Not Assigned'})
self.assertFalse(mock_parse_ut_event.called)
@patch('pivt.process.ProductRawEvent._get_derived_cause')
@patch('pivt.process.ProductRawEvent._get_upstream')
@patch('pivt.process.ProductRawEvent._parse_unit_test_event')
@patch('pivt.process.ProductRawEvent._get_iteration')
@patch('pivt.process.ProductRawEvent._get_release')
@patch('pivt.process.JenkinsRawEvent._get_parameters')
def test_basic_ut(self, mock_get_parameters, mock_get_release, mock_get_iteration, mock_parse_ut_event, mock_get_upstream, mock_get_derived_cause):
mock_get_parameters.return_value = {}
mock_get_release.return_value = None
mock_get_iteration.return_value = None
mock_get_upstream.return_value = None, None
mock_get_derived_cause.return_value = 'Not Assigned'
event = {'ci': 'ci2', 'stage': 'UnitTest'}
new_event = process.ProductRawEvent(event, 'Production').cook()
self.assertEqual(new_event, {'ci': 'ci2', 'stage': 'UnitTest', 'iteration': None, 'release': None, 'ss': 'ss5', 'instance': 'Production', 'cause': 'Not Assigned', 'derived_cause': 'Not Assigned'})
self.assertTrue(mock_parse_ut_event.called)
@patch('pivt.process.ProductRawEvent._get_derived_cause')
@patch('pivt.process.ProductRawEvent._get_upstream')
@patch('pivt.process.ProductRawEvent._parse_unit_test_event')
@patch('pivt.process.ProductRawEvent._get_iteration')
@patch('pivt.process.ProductRawEvent._get_release')
@patch('pivt.process.JenkinsRawEvent._get_parameters')
def test_basic_ft_no_reports(self, mock_get_parameters, mock_get_release, mock_get_iteration, mock_parse_ut_event, mock_get_upstream, mock_get_derived_cause):
mock_get_parameters.return_value = {}
mock_get_release.return_value = None
mock_get_iteration.return_value = None
mock_get_upstream.return_value = None, None
mock_get_derived_cause.return_value = 'Not Assigned'
event = {'ci': 'ci2', 'stage': 'FunctionalTest'}
new_event = process.ProductRawEvent(event, 'Production').cook()
self.assertEqual(new_event, {'ci': 'ci2', 'stage': 'FunctionalTest', 'iteration': None, 'release': None, 'ss': 'ss5', 'instance': 'Production', 'cause': 'Not Assigned', 'derived_cause': 'Not Assigned'})
self.assertFalse(mock_parse_ut_event.called)
@patch('pivt.process.ProductRawEvent._get_derived_cause')
@patch('pivt.process.ProductRawEvent._get_upstream')
@patch('pivt.process.ProductRawEvent._parse_unit_test_event')
@patch('pivt.process.ProductRawEvent._get_iteration')
@patch('pivt.process.ProductRawEvent._get_release')
@patch('pivt.process.JenkinsRawEvent._get_parameters')
def test_basic_ft_with_reports(self, mock_get_parameters, mock_get_release, mock_get_iteration, mock_parse_ut_event, mock_get_upstream, mock_get_derived_cause):
mock_get_parameters.return_value = {}
mock_get_release.return_value = None
mock_get_iteration.return_value = None
mock_get_upstream.return_value = None, None
mock_get_derived_cause.return_value = 'Not Assigned'
event = {'ci': 'ci2', 'stage': 'FunctionalTest', 'reports': 'derp'}
new_event = process.ProductRawEvent(event, 'Production').cook()
self.assertEqual(new_event, {'ci': 'ci2', 'stage': 'FunctionalTest', 'iteration': None, 'release': None, 'ss': 'ss5', 'instance': 'Production', 'reports': 'derp', 'cause': 'Not Assigned', 'derived_cause': 'Not Assigned'})
self.assertFalse(mock_parse_ut_event.called)
@patch('pivt.process.ProductRawEvent._get_derived_cause')
@patch('pivt.process.ProductRawEvent._get_upstream')
@patch('pivt.process.ProductRawEvent._parse_unit_test_event')
@patch('pivt.process.ProductRawEvent._get_iteration')
@patch('pivt.process.ProductRawEvent._get_release')
@patch('pivt.process.JenkinsRawEvent._get_parameters')
def test_basic_build_with_cause(self, mock_get_parameters, mock_get_release, mock_get_iteration, mock_parse_ut_event, mock_get_upstream, mock_get_derived_cause):
mock_get_parameters.return_value = {'p1': 'derp', 'p2': 'herp'}
mock_get_release.return_value = '1.6.2.3'
mock_get_iteration.return_value = '1.6'
mock_get_upstream.return_value = 'ci2-Pipeline', 5
mock_get_derived_cause.return_value = 'Weekly'
event = {'ci': 'ci2', 'stage': 'Build', 'cause': 'Weekly-Builds'}
new_event = process.ProductRawEvent(event, 'Production').cook()
self.assertEqual(new_event,
{'ci': 'ci2', 'stage': 'Build', 'iteration': '1.6', 'release': '1.6.2.3', 'ss': 'ss5', 'instance': 'Production', 'upstreamProject': 'ci2-Pipeline',
'upstreamBuild': 5, 'p1': 'derp', 'p2': 'herp', 'cause': 'Weekly-Builds', 'derived_cause': 'Weekly'})
self.assertFalse(mock_parse_ut_event.called)
@patch('pivt.process.ProductRawEvent._get_derived_cause')
@patch('pivt.process.ProductRawEvent._get_upstream')
@patch('pivt.process.ProductRawEvent._parse_unit_test_event')
@patch('pivt.process.ProductRawEvent._get_iteration')
@patch('pivt.process.ProductRawEvent._get_release')
@patch('pivt.process.JenkinsRawEvent._get_parameters')
def test_aws_stage(self, mock_get_parameters, mock_get_release, mock_get_iteration, mock_parse_ut_event, mock_get_upstream, mock_get_derived_cause):
mock_get_parameters.return_value = {}
mock_get_release.return_value = None
mock_get_iteration.return_value = None
mock_get_upstream.return_value = None, None
mock_get_derived_cause.return_value = 'Not Assigned'
event = {'ci': 'ci2', 'stage': 'AWS', 'fullDisplayName': 'ci2-FuncTest #5'}
new_event = process.ProductRawEvent(event, 'Production').cook()
self.assertEqual(new_event, {'ci': 'ci2', 'stage': 'AWS_FunctionalTest', 'iteration': None, 'release': None, 'ss': 'ss5', 'instance': 'Production', 'cause': 'Not Assigned', 'derived_cause': 'Not Assigned', 'fullDisplayName': 'ci2-FuncTest #5'})
self.assertFalse(mock_parse_ut_event.called)
class TestProductRawEventGetRelease(unittest.TestCase):
def test_empty_event(self):
event = {}
self.assertEqual(process.ProductRawEvent._get_release(event), Constants.VERSION_NOT_ASSIGNED)
def test_baseline_version(self):
event = {'BASELINE_VERSION': '1.2.3.4'}
self.assertEqual(process.ProductRawEvent._get_release(event), '1.2.3.4')
def test_pipeline_version(self):
event = {'PIPELINE_VERSION': '1.2.3.4.434'}
self.assertEqual(process.ProductRawEvent._get_release(event), '1.2.3.4')
def test_baseline_and_pipeline_version(self):
event = {'BASELINE_VERSION': '5.6.7.8', 'PIPELINE_VERSION': '5.6.7.8.28'}
self.assertEqual(process.ProductRawEvent._get_release(event), '5.6.7.8')
def test_pipeline_version_wrong_format(self):
event = {'PIPELINE_VERSION': '1.2.3.4.434.derp'}
self.assertEqual(process.ProductRawEvent._get_release(event), Constants.VERSION_NOT_ASSIGNED)
class TestProductRawEventGetIteration(unittest.TestCase):
def test_empty_event(self):
event = {}
self.assertEqual(process.ProductRawEvent._get_iteration(event), Constants.ITERATION_NOT_ASSIGNED)
def test_baseline_version(self):
event = {'release': '1.2.3.4'}
self.assertEqual(process.ProductRawEvent._get_iteration(event), '1.2')
def test_baseline_and_pipeline_version(self):
event = {'release': '5.6.7.8'}
self.assertEqual(process.ProductRawEvent._get_iteration(event), '5.6')
def test_invalid_baseline_version(self):
event = {'release': 'derp'}
self.assertEqual(process.ProductRawEvent._get_iteration(event), Constants.ITERATION_NOT_ASSIGNED)
def test_release_not_assigned(self):
event = {'release': Constants.VERSION_NOT_ASSIGNED}
self.assertEqual(process.ProductRawEvent._get_iteration(event), Constants.ITERATION_NOT_ASSIGNED)
class TestProductRawEventGetUpstream(unittest.TestCase):
def test_no_actions(self):
event = {
'number': 5,
'result': 'SUCCESS'
}
self.assertEqual(process.ProductRawEvent(event, None)._get_upstream(), (None, None))
def test_no_cause_action(self):
event = {
'number': 5,
'actions': [
{
'_class': 'some weird action',
'derp': 'herp'
},
{
'kind': 'action with no class'
},
{
'_class': 'another action',
}
],
'result': 'SUCCESS'
}
self.assertEqual(process.ProductRawEvent(event, None)._get_upstream(), (None, None))
def test_unknown_cause(self):
event = {
'number': 5,
'actions': [
{
'_class': 'some weird action',
'derp': 'herp'
},
{
'kind': 'action with no class'
},
{
'_class': 'hudson.model.CauseAction',
'causes': [
{
'_class': 'unknown cause class'
}
]
},
{
'_class': 'another action',
}
],
'result': 'SUCCESS'
}
self.assertEqual(process.ProductRawEvent(event, None)._get_upstream(), (None, None))
def test_user_cause(self):
event = {
'number': 5,
'actions': [
{
'_class': 'some weird action',
'derp': 'herp'
},
{
'kind': 'action with no class'
},
{
'_class': 'hudson.model.CauseAction',
'causes': [
{
'_class': 'hudson.model.Cause$UserIdCause'
}
]
},
{
'_class': 'another action',
}
],
'result': 'SUCCESS'
}
self.assertEqual(process.ProductRawEvent(event, None)._get_upstream(), (None, None))
def test_rebuild_user_cause(self):
event = {
'number': 5,
'actions': [
{
'_class': 'some weird action',
'derp': 'herp'
},
{
'kind': 'action with no class'
},
{
'_class': 'hudson.model.CauseAction',
'causes': [
{
'_class': 'hudson.model.Cause$RebuildCause'
},
{
'_class': 'hudson.model.Cause$UserIdCause',
'userName': 'Herpy Derp'
}
]
},
{
'_class': 'another action',
}
],
'result': 'SUCCESS'
}
self.assertEqual(process.ProductRawEvent(event, None)._get_upstream(), (None, None))
def test_self_service_cause(self):
event = {
'number': 5,
'actions': [
{
'_class': 'some weird action',
'derp': 'herp'
},
{
'kind': 'action with no class'
},
{
'_class': 'hudson.model.CauseAction',
'causes': [
{
'_class': 'hudson.model.Cause$UpstreamCause',
'upstreamProject': 'Self-Service-Pipeline',
'upstreamBuild': 55
}
]
},
{
'_class': 'another action',
}
],
'result': 'SUCCESS'
}
self.assertEqual(process.ProductRawEvent(event, None)._get_upstream(), ('Self-Service-Pipeline', 55))
def test_nightly_cause(self):
event = {
'number': 5,
'actions': [
{
'_class': 'some weird action',
'derp': 'herp'
},
{
'kind': 'action with no class'
},
{
'_class': 'hudson.model.CauseAction',
'causes': [
{
'_class': 'hudson.model.Cause$UpstreamCause',
'upstreamProject': 'Nightly-Builds',
'upstreamBuild': 34
}
]
},
{
'_class': 'another action',
}
],
'result': 'SUCCESS'
}
self.assertEqual(process.ProductRawEvent(event, None)._get_upstream(), ('Nightly-Builds', 34))
def test_weekly_cause(self):
event = {
'number': 5,
'actions': [
{
'_class': 'some weird action',
'derp': 'herp'
},
{
'kind': 'action with no class'
},
{
'_class': 'hudson.model.CauseAction',
'causes': [
{
'_class': 'hudson.model.Cause$UpstreamCause',
'upstreamProject': 'Weekly-Builds',
'upstreamBuild': 34
}
]
},
{
'_class': 'another action',
}
],
'result': 'SUCCESS'
}
self.assertEqual(process.ProductRawEvent(event, None)._get_upstream(), ('Weekly-Builds', 34))
def test_upstream_cause(self):
event = {
'number': 5,
'actions': [
{
'_class': 'some weird action',
'derp': 'herp'
},
{
'kind': 'action with no class'
},
{
'_class': 'hudson.model.CauseAction',
'causes': [
{
'_class': 'hudson.model.Cause$UpstreamCause',
'upstreamProject': 'ci2-Pipeline',
'upstreamBuild': 73
}
]
},
{
'_class': 'another action',
}
],
'result': 'SUCCESS'
}
self.assertEqual(process.ProductRawEvent(event, None)._get_upstream(), ('ci2-Pipeline', 73))
class TestProductRawEventParseUnitTestEvent(unittest.TestCase):
@patch('pivt.process.ProductRawEvent._get_ut_counts_with_action')
@patch('pivt.process.ProductRawEvent._get_ut_counts_with_report')
def test_no_report_no_actions(self, mock_get_ut_counts_with_report, mock_get_ut_counts_with_action):
event = {'derp': 'herp'}
new_event = {'herp': 'derp'}
process.ProductRawEvent(event, None)._parse_unit_test_event(new_event)
assert new_event == {'herp': 'derp'}
assert not mock_get_ut_counts_with_report.called
assert not mock_get_ut_counts_with_action.called
@patch('pivt.process.ProductRawEvent._get_ut_counts_with_action')
@patch('pivt.process.ProductRawEvent._get_ut_counts_with_report')
def test_with_report_no_actions(self, mock_get_ut_counts_with_report, mock_get_ut_counts_with_action):
event = {
'derp': 'herp',
'report': {
'hi': 'hello'
}
}
new_event = {'herp': 'derp'}
process.ProductRawEvent(event, None)._parse_unit_test_event(new_event)
assert new_event == {'herp': 'derp'}
assert mock_get_ut_counts_with_report.called
assert not mock_get_ut_counts_with_action.called
@patch('pivt.process.ProductRawEvent._get_ut_counts_with_action')
@patch('pivt.process.ProductRawEvent._get_ut_counts_with_report')
def test_no_report_with_actions_no_total_count(self, mock_get_ut_counts_with_report, mock_get_ut_counts_with_action):
def get_ut_counts_with_action(*args):
my_event = args[1]
my_event['me'] = 'gusta'
mock_get_ut_counts_with_action.side_effect = get_ut_counts_with_action
event = {
'derp': 'herp',
'actions': [
{'hi': 'hello'},
{'hello': 'hi'}
]
}
new_event = {'herp': 'derp'}
process.ProductRawEvent(event, None)._parse_unit_test_event(new_event)
assert new_event == {'herp': 'derp'}
assert not mock_get_ut_counts_with_report.called
assert not mock_get_ut_counts_with_action.called
@patch('pivt.process.ProductRawEvent._get_ut_counts_with_action')
@patch('pivt.process.ProductRawEvent._get_ut_counts_with_report')
def test_no_report_with_actions(self, mock_get_ut_counts_with_report, mock_get_ut_counts_with_action):
def get_ut_counts_with_action(*args):
my_event = args[1]
my_event['me'] = 'gusta'
mock_get_ut_counts_with_action.side_effect = get_ut_counts_with_action
event = {
'derp': 'herp',
'actions': [
{'hi': 'hello'},
{'hello': 'hi'},
{'totalCount': 5}
]
}
new_event = {'herp': 'derp'}
process.ProductRawEvent(event, None)._parse_unit_test_event(new_event)
assert new_event == {'herp': 'derp', 'me': 'gusta'}
assert not mock_get_ut_counts_with_report.called
assert mock_get_ut_counts_with_action.called
class TestProductRawEventGetUtCountsWithReport(unittest.TestCase):
def test_no_tests(self):
report = {'failCount': 0, 'skipCount': 0, 'passCount': 0, 'derp': 'herp'}
new_event = {'hi': 'hello', 'result': 'SUCCESS'}
process.ProductRawEvent._get_ut_counts_with_report(report, new_event)
self.assertEqual(new_event, {'hi': 'hello', 'result': 'SUCCESS'})
def test_tests_aborted(self):
report = {'failCount': 5, 'skipCount': 7, 'passCount': 8, 'derp': 'herp'}
new_event = {'hi': 'hello', 'result': 'ABORTED'}
process.ProductRawEvent._get_ut_counts_with_report(report, new_event)
self.assertEqual(new_event, {'hi': 'hello', 'result': 'ABORTED'})
def test_no_tests_aborted(self):
report = {'failCount': 0, 'skipCount': 0, 'passCount': 0, 'derp': 'herp'}
new_event = {'hi': 'hello', 'result': 'ABORTED'}
process.ProductRawEvent._get_ut_counts_with_report(report, new_event)
self.assertEqual(new_event, {'hi': 'hello', 'result': 'ABORTED'})
def test_tests(self):
report = {'failCount': 5, 'skipCount': 7, 'passCount': 8, 'derp': 'herp'}
new_event = {'hi': 'hello', 'result': 'SUCCESS'}
process.ProductRawEvent._get_ut_counts_with_report(report, new_event)
self.assertEqual(new_event, {'hi': 'hello', 'result': 'SUCCESS', 'failCount': 5, 'skipCount': 7, 'passCount': 8, 'totalCount': 20})
class TestProductRawEventGetUtCountsWithAction(unittest.TestCase):
def test(self):
action = {'failCount': 5, 'skipCount': 7, 'totalCount': 20, 'derp': 'herp'}
new_event = {'hi': 'hello'}
process.ProductRawEvent._get_ut_counts_with_action(action, new_event)
self.assertEqual(new_event, {'hi': 'hello', 'failCount': 5, 'skipCount': 7, 'passCount': 8, 'totalCount': 20})
class TestProductRawEventGetDerivedCause(unittest.TestCase):
def test_self_service(self):
cause = 'Self-Service-Pipeline'
expected = 'Self-Service'
self.assertEqual(expected, process.ProductRawEvent._get_derived_cause(cause))
def test_nightly_2nd_wave(self):
cause = 'Nightly-Builds-2nd-wave'
expected = 'Nightly-2nd-Wave'
self.assertEqual(expected, process.ProductRawEvent._get_derived_cause(cause))
def test_nightly(self):
cause = 'Nightly-Builds'
expected = 'Nightly'
self.assertEqual(expected, process.ProductRawEvent._get_derived_cause(cause))
def test_weekly(self):
cause = 'Weekly-Builds'
expected = 'Weekly'
self.assertEqual(expected, process.ProductRawEvent._get_derived_cause(cause))
def test_user(self):
cause = 'user'
expected = 'User'
self.assertEqual(expected, process.ProductRawEvent._get_derived_cause(cause))
def test_something_else(self):
cause = 'something_else'
expected = 'something_else'
self.assertEqual(expected, process.ProductRawEvent._get_derived_cause(cause))
class TestProductRawEventIsBuilding(unittest.TestCase):
def test_building(self):
event = {'id': 5, 'building': True}
self.assertTrue(process.ProductRawEvent(event, None).is_building())
def test_not_building(self):
event = {'id': 5, 'building': False}
self.assertFalse(process.ProductRawEvent(event, None).is_building())
def test_no_building(self):
event = {'id': 5}
self.assertFalse(process.ProductRawEvent(event, None).is_building())
def test_something_else(self):
event = {'id': 5, 'building': 'something_else'}
self.assertFalse(process.ProductRawEvent(event, None).is_building())
"""
ProductCookedEvent
"""
class TestProductCookedEventGetKey(unittest.TestCase):
def test(self):
event = {'ci': 'DERP', 'stage': 'Build', 'number': 666, 'timestamp': 123456789, 'other1': 'herpy', 'other2': 'derpy'}
self.assertEqual(process.ProductCookedEvent(event).get_key(), 'DERP:Build:666:123456789', 'Should return DERP:Build:666:123456789')
"""
InsRawEvent
"""
class TestInsRawEventCook(unittest.TestCase):
def test_no_instance(self):
raw_event = {'id': 1, 'derp': 'herp'}
expected_cooked_event = process.InsCookedEvent({'id': 1, 'derp': 'herp', 'branch': 'develop'})
cooked_event = process.InsRawEvent(raw_event).cook()
self.assertEqual(expected_cooked_event, cooked_event)
def test_with_instance_dev(self):
raw_event = {'id': 1, 'derp': 'herp', 'branch': 'develop'}
expected_cooked_event = process.InsCookedEvent({'id': 1, 'derp': 'herp', 'branch': 'develop'})
cooked_event = process.InsRawEvent(raw_event).cook()
self.assertEqual(expected_cooked_event, cooked_event)
def test_with_instance_prod(self):
raw_event = {'id': 1, 'derp': 'herp', 'branch': 'production'}
expected_cooked_event = process.InsCookedEvent({'id': 1, 'derp': 'herp', 'branch': 'production'})
cooked_event = process.InsRawEvent(raw_event).cook()
self.assertEqual(expected_cooked_event, cooked_event)
def test_with_stage_flow_nodes(self):
raw_event = {
'id': 1,
'derp': 'herp',
'branch': 'production',
'stages': [
{
'name': 'stage1'
},
{
'name': 'stage2',
'stageFlowNodes': ['derp', 'herp']
}
]
}
expected_cooked_event = process.InsCookedEvent({
'id': 1,
'derp': 'herp',
'branch': 'production',
'stages': [
{
'name': 'stage1'
},
{
'name': 'stage2'
}
]
})
cooked_event = process.InsRawEvent(raw_event).cook()
self.assertEqual(expected_cooked_event, cooked_event)
"""
InsCookedEvent
"""
class TestInsCookedEventGetKey(unittest.TestCase):
def test(self):
event = {'pipeline': 'Core1', 'branch': 'master', 'id': 666, 'timestamp': 123456789, 'other1': 'herpy', 'other2': 'derpy'}
self.assertEqual(process.InsCookedEvent(event).get_key(), 'Core1:master:666:123456789')
"""
VicRawEvent
"""
class TestVicRawEventCook(unittest.TestCase):
pass
class TestVicRawEventIsBuilding(unittest.TestCase):
pass
"""
VicCookedEvent
"""
class TestVicCookedEventGetKey(unittest.TestCase):
def test(self):
event = {'id': 666, 'timestamp': 123456789, 'other1': 'herpy', 'other2': 'derpy'}
self.assertEqual(process.VicCookedEvent(event).get_key(), '666:123456789')
"""
VicStatusRawEvent
"""
class TestVicStatusRawEventCook(unittest.TestCase):
def test(self):
raw_event = [
{'id': 1, 'ci_allocation': 'derp (1 of 2)'},
{'id': 2, 'ci_allocation': 'derp (2 of 2)', 'timestamp': 'derp'}
]
timestamp = 123
expected_cooked_events = [
{'id': 1, 'ci_allocation': 'derp (1 of 2)', 'timestamp': 123},
{'id': 2, 'ci_allocation': 'derp (2 of 2)', 'timestamp': 'derp'}
]
cooked_events = process.VicStatusRawEvent(raw_event).cook(timestamp=timestamp)
self.assertEqual(expected_cooked_events, cooked_events)
"""
VicStatusCookedEvent
"""
class TestVicStatusCookedEventGetKey(unittest.TestCase):
def test(self):
event = {'id': 666, 'timestamp': 123456789, 'ci_allocation': 'derpy (0 of 12)', 'other1': 'herpy', 'other2': 'derpy'}
self.assertEqual('123456789:derpy (0 of 12)', process.VicStatusCookedEvent(event).get_key())
"""
CqCookedEvent
"""
class TestCqCookedEventGetKey(unittest.TestCase):
def test_add(self):
event = {'type': 'add', 'dr_id': 1, 'timestamp': 2}
self.assertEqual('add:1:2', process.CqCookedEvent(event).get_key())
def test_other(self):
event = {'type': 'derp', 'dr_id': 1, 'timestamp': 2, 'change_field': 'greeting', 'before': 'hi', 'after': 'hello', 'derp': 'herp'}
self.assertEqual('derp:1:2', process.CqCookedEvent(event).get_key())
def test_modify(self):
event = {'type': 'modify', 'dr_id': 1, 'timestamp': 2, 'change_field': 'greeting', 'before': 'hi', 'after': 'hello'}
self.assertEqual('modify:1:2:greeting:hi:hello', process.CqCookedEvent(event).get_key())
| 33.544335
| 290
| 0.56255
| 19,003
| 174,397
| 4.895227
| 0.038941
| 0.038184
| 0.009374
| 0.024327
| 0.81566
| 0.763384
| 0.720835
| 0.683566
| 0.645436
| 0.618099
| 0
| 0.018021
| 0.302849
| 174,397
| 5,198
| 291
| 33.550789
| 0.747099
| 0.006743
| 0
| 0.606286
| 0
| 0.001485
| 0.174546
| 0.056633
| 0
| 0
| 0
| 0.000385
| 0.08191
| 1
| 0.09948
| false
| 0.009651
| 0.003959
| 0.002227
| 0.128681
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
bbad43026e4413699de8b53d21ce53ffe9a4ce2d
| 70
|
py
|
Python
|
waitlist/blueprints/api/fittings/blueprint.py
|
kimnnmadsen/eve-inc-waitlist
|
c3e4853c5563a95edbf105c11e73d481595fb3ab
|
[
"MIT"
] | null | null | null |
waitlist/blueprints/api/fittings/blueprint.py
|
kimnnmadsen/eve-inc-waitlist
|
c3e4853c5563a95edbf105c11e73d481595fb3ab
|
[
"MIT"
] | 1
|
2020-02-18T05:11:20.000Z
|
2020-02-18T05:29:10.000Z
|
waitlist/blueprints/api/fittings/blueprint.py
|
kimnnmadsen/eve-inc-waitlist
|
c3e4853c5563a95edbf105c11e73d481595fb3ab
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
bp = Blueprint('api_fittings', __name__)
| 17.5
| 40
| 0.785714
| 9
| 70
| 5.555556
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128571
| 70
| 3
| 41
| 23.333333
| 0.819672
| 0
| 0
| 0
| 0
| 0
| 0.171429
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 4
|
bbae9cddd1a46d201d7326b9c52304b9ac769e60
| 181
|
py
|
Python
|
optimism/test/testJaxConfig.py
|
btalamini/optimism
|
023e1b2a0b137900a7517e4c7ac5056255cf7bbe
|
[
"MIT"
] | null | null | null |
optimism/test/testJaxConfig.py
|
btalamini/optimism
|
023e1b2a0b137900a7517e4c7ac5056255cf7bbe
|
[
"MIT"
] | 1
|
2022-03-12T00:01:12.000Z
|
2022-03-12T00:01:12.000Z
|
optimism/test/testJaxConfig.py
|
btalamini/optimism
|
023e1b2a0b137900a7517e4c7ac5056255cf7bbe
|
[
"MIT"
] | 3
|
2021-12-23T19:53:31.000Z
|
2022-03-27T23:12:03.000Z
|
from optimism.JaxConfig import *
from optimism.test.TestFixture import *
class TestDebugIsOff(TestFixture):
def test_debug_if_off(self):
self.assertTrue(not jaxDebug)
| 22.625
| 39
| 0.767956
| 22
| 181
| 6.181818
| 0.727273
| 0.176471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154696
| 181
| 7
| 40
| 25.857143
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
bbd22d0535b981da2ebb8a40ff270588b1d82203
| 174
|
py
|
Python
|
wiki/forms.py
|
MariomcgeeArt/makewiki
|
69226910b6c136ecd3277faa4095384336fef406
|
[
"MIT"
] | null | null | null |
wiki/forms.py
|
MariomcgeeArt/makewiki
|
69226910b6c136ecd3277faa4095384336fef406
|
[
"MIT"
] | 6
|
2020-06-06T01:51:57.000Z
|
2022-02-10T11:26:02.000Z
|
wiki/forms.py
|
MariomcgeeArt/makewiki
|
69226910b6c136ecd3277faa4095384336fef406
|
[
"MIT"
] | null | null | null |
from django import forms
from wiki.models import Page
class PageForm(forms.ModelForm):
class Meta:
model = Page
fields = ['title', 'author', 'content']
| 19.333333
| 47
| 0.655172
| 21
| 174
| 5.428571
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.241379
| 174
| 8
| 48
| 21.75
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
bbe9d4f86ad71ae1a8da11868d53403988f897d5
| 997
|
py
|
Python
|
pava/implementation/natives/sun/management/OperatingSystemImpl.py
|
laffra/pava
|
54d10cf7f8def2f96e254c0356623d08f221536f
|
[
"MIT"
] | 4
|
2017-03-30T16:51:16.000Z
|
2020-10-05T12:25:47.000Z
|
pava/implementation/natives/sun/management/OperatingSystemImpl.py
|
laffra/pava
|
54d10cf7f8def2f96e254c0356623d08f221536f
|
[
"MIT"
] | null | null | null |
pava/implementation/natives/sun/management/OperatingSystemImpl.py
|
laffra/pava
|
54d10cf7f8def2f96e254c0356623d08f221536f
|
[
"MIT"
] | null | null | null |
def add_native_methods(clazz):
def getTotalSwapSpaceSize____(a0):
raise NotImplementedError()
def getFreeSwapSpaceSize____(a0):
raise NotImplementedError()
def getProcessCpuTime____(a0):
raise NotImplementedError()
def getFreePhysicalMemorySize____(a0):
raise NotImplementedError()
def getTotalPhysicalMemorySize____(a0):
raise NotImplementedError()
def getSystemCpuLoad____(a0):
raise NotImplementedError()
def getProcessCpuLoad____(a0):
raise NotImplementedError()
clazz.getTotalSwapSpaceSize____ = getTotalSwapSpaceSize____
clazz.getFreeSwapSpaceSize____ = getFreeSwapSpaceSize____
clazz.getProcessCpuTime____ = getProcessCpuTime____
clazz.getFreePhysicalMemorySize____ = getFreePhysicalMemorySize____
clazz.getTotalPhysicalMemorySize____ = getTotalPhysicalMemorySize____
clazz.getSystemCpuLoad____ = getSystemCpuLoad____
clazz.getProcessCpuLoad____ = getProcessCpuLoad____
| 32.16129
| 73
| 0.776329
| 61
| 997
| 11.278689
| 0.245902
| 0.071221
| 0.264535
| 0.252907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008475
| 0.171515
| 997
| 30
| 74
| 33.233333
| 0.824455
| 0
| 0
| 0.318182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.363636
| false
| 0
| 0
| 0
| 0.363636
| 0
| 0
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
bbf3dd2205a7a7f5b4ed1af44af8e62336f27a25
| 248
|
py
|
Python
|
desafios/Ex002.py
|
FelipeAlafy/Python
|
da2374e55e8aa84e4ca6d9c7bf8dafeb546a4742
|
[
"MIT"
] | null | null | null |
desafios/Ex002.py
|
FelipeAlafy/Python
|
da2374e55e8aa84e4ca6d9c7bf8dafeb546a4742
|
[
"MIT"
] | null | null | null |
desafios/Ex002.py
|
FelipeAlafy/Python
|
da2374e55e8aa84e4ca6d9c7bf8dafeb546a4742
|
[
"MIT"
] | null | null | null |
dia = input("Em que dia você nasceu? ")
mes = input("Em que mês você nasceu? ")
ano = input("Em que ano você nasceu? ")
print("Você nasceu no dia \033[32m", dia, "\033[m no mês de \033[34m", mes, "\033[m e no ano de \033[33m", ano, "\033[m😆️😆️😆️")
| 49.6
| 127
| 0.612903
| 49
| 248
| 3.22449
| 0.387755
| 0.253165
| 0.189873
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 0.177419
| 248
| 4
| 128
| 62
| 0.627451
| 0
| 0
| 0
| 0
| 0
| 0.657258
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
bbf4e04df68bdc02fd718db4669374fc11669974
| 7,866
|
py
|
Python
|
tests/test_search_views.py
|
Kinto/kinto-algolia
|
f7bfdf795dc2073a69b7c31df1e650e2b51d05d7
|
[
"Apache-2.0"
] | null | null | null |
tests/test_search_views.py
|
Kinto/kinto-algolia
|
f7bfdf795dc2073a69b7c31df1e650e2b51d05d7
|
[
"Apache-2.0"
] | 35
|
2018-04-11T15:14:42.000Z
|
2021-06-25T15:18:04.000Z
|
tests/test_search_views.py
|
Kinto/kinto-algolia
|
f7bfdf795dc2073a69b7c31df1e650e2b51d05d7
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from unittest import mock
from algoliasearch.exceptions import AlgoliaException
from kinto.core.testing import get_user_headers
from . import BaseWebTest
class SearchView(BaseWebTest, unittest.TestCase):
def setUp(self):
self.app.put("/buckets/bid", headers=self.headers)
self.app.put("/buckets/bid/collections/cid", headers=self.headers)
def test_search_response_error_400_indexer_fails(self):
with mock.patch.object(self.app.app.registry.indexer, "client") as client:
client.init_index.return_value.search.side_effect = AlgoliaException
self.app.post("/buckets/bid/collections/cid/search", headers=self.headers, status=400)
def test_search_response_error_400_with_wrong_body(self):
self.app.post("/buckets/bid/collections/cid/search", 'blah',
headers=self.headers, status=400)
def test_invalid_search_query(self):
body = {"whatever": {"wrong": "bad"}}
resp = self.app.post_json("/buckets/bid/collections/cid/search",
body,
headers=self.headers,
status=400)
assert resp.json["message"] == "Algolia error in body: Unknown parameter: whatever"
def test_search_on_empty_collection_returns_empty_list(self):
resp = self.app.post("/buckets/bid/collections/cid/search",
headers=self.headers)
result = resp.json
assert len(result["hits"]) == 0
def test_querystring_search_is_supported(self):
self.app.post_json("/buckets/bid/collections/cid/records",
{"data": {"age": 12}}, headers=self.headers)
self.app.post_json("/buckets/bid/collections/cid/records",
{"data": {"age": 21}}, headers=self.headers)
self.indexer.join()
resp = self.app.get("/buckets/bid/collections/cid/search?filters=age<15",
headers=self.headers)
result = resp.json
assert len(result["hits"]) == 1
assert result["hits"][0]["age"] == 12
def test_empty_querystring_returns_all_results(self):
self.app.post_json("/buckets/bid/collections/cid/records",
{"data": {"age": 12}}, headers=self.headers)
self.app.post_json("/buckets/bid/collections/cid/records",
{"data": {"age": 21}}, headers=self.headers)
self.indexer.join()
resp = self.app.get("/buckets/bid/collections/cid/search",
headers=self.headers)
result = resp.json
assert len(result["hits"]) == 2
# ALGOLIA SEARCH DOESN'T SUPPORT LIMITING YET
# https://github.com/algolia/algoliasearch-client-python/issues/365
# class LimitedResults(BaseWebTest, unittest.TestCase):
# def get_app(self, settings):
# app = self.make_app(settings=settings)
# app.put("/buckets/bid", headers=self.headers)
# app.put_json("/buckets/bid/collections/cid",
# {"data": {"algolia:settings": {}}},
# headers=self.headers)
# requests = [{
# "method": "POST",
# "path": "/buckets/bid/collections/cid/records",
# "body": {"data": {"age": i}}
# } for i in range(5)]
# app.post_json("/batch", {"requests": requests}, headers=self.headers)
# sleep(1) # Wait for indexing
# return app
#
# def test_the_number_of_responses_is_limited_by_paginate_by_setting(self):
# app = self.get_app({"paginate_by": 2})
# resp = app.get("/buckets/bid/collections/cid/search", headers=self.headers)
# result = resp.json
# assert len(result["hits"]) == 2
#
# def test_the_number_of_responses_is_limited_by_max_fetch_size_setting(self):
# app = self.get_app({"storage_max_fetch_size": 2})
# resp = app.get("/buckets/bid/collections/cid/search", headers=self.headers)
# result = resp.json
# assert len(result["hits"]) == 2
#
# def test_the_number_of_responses_is_limited_by_smaller_limit(self):
# app = self.get_app({"paginate_by": 4, "storage_max_fetch_size": 2})
# resp = app.get("/buckets/bid/collections/cid/search", headers=self.headers)
# result = resp.json
# assert len(result["hits"]) == 2
#
# def test_the_number_of_responses_is_limited_by_only_defined_limit(self):
# app = self.get_app({"paginate_by": 0, "storage_max_fetch_size": 2})
# resp = app.get("/buckets/bid/collections/cid/search", headers=self.headers)
# result = resp.json
# assert len(result["hits"]) == 2
#
# def test_size_specified_in_query_is_taken_into_account(self):
# app = self.get_app({"paginate_by": 3})
# query = {
# "hitsPerPage": 2
# }
# resp = app.post_json("/buckets/bid/collections/cid/search", query,
# headers=self.headers)
# result = resp.json
# assert len(result["hits"]) == 2
#
# def test_size_specified_in_query_is_caped_by_setting(self):
# app = self.get_app({"paginate_by": 3})
# query = {
# "hitsPerPage": 4
# }
# resp = app.post_json("/buckets/bid/collections/cid/search", query,
# headers=self.headers)
# result = resp.json
# assert len(result["hits"]) == 3
class PermissionsCheck(BaseWebTest, unittest.TestCase):
def test_search_is_allowed_if_write_on_bucket(self):
body = {"permissions": {"write": ["system.Everyone"]}}
self.app.put_json("/buckets/bid", body, headers=self.headers)
self.app.put("/buckets/bid/collections/cid", headers=self.headers)
self.app.post("/buckets/bid/collections/cid/search", status=200)
def test_search_is_allowed_if_read_on_bucket(self):
body = {"permissions": {"read": ["system.Everyone"]}}
self.app.put_json("/buckets/bid", body, headers=self.headers)
self.app.put("/buckets/bid/collections/cid", headers=self.headers)
self.app.post("/buckets/bid/collections/cid/search", status=200)
def test_search_is_allowed_if_write_on_collection(self):
self.app.put("/buckets/bid", headers=self.headers)
body = {"permissions": {"write": ["system.Everyone"]}}
self.app.put_json("/buckets/bid/collections/cid", body, headers=self.headers)
self.app.post("/buckets/bid/collections/cid/search", status=200)
def test_search_is_allowed_if_read_on_collection(self):
self.app.put("/buckets/bid", headers=self.headers)
body = {"permissions": {"read": ["system.Everyone"]}}
self.app.put_json("/buckets/bid/collections/cid", body, headers=self.headers)
self.app.post("/buckets/bid/collections/cid/search", status=200)
def test_search_is_not_allowed_by_default(self):
self.app.put("/buckets/bid", headers=self.headers)
self.app.put("/buckets/bid/collections/cid", headers=self.headers)
self.app.post("/buckets/bid/collections/cid/search", status=401)
headers = get_user_headers("cual", "quiera")
self.app.post("/buckets/bid/collections/cid/search", status=403, headers=headers)
def test_search_is_not_allowed_if_only_read_on_certain_records(self):
self.app.put("/buckets/bid", headers=self.headers)
body = {"permissions": {"record:create": ["system.Authenticated"]}}
self.app.put_json("/buckets/bid/collections/cid", body, headers=self.headers)
headers = get_user_headers("toto")
self.app.post_json("/buckets/bid/collections/cid/records", {"data": {"pi": 42}},
headers=headers)
self.app.post("/buckets/bid/collections/cid/search", status=403, headers=headers)
| 46
| 98
| 0.627511
| 960
| 7,866
| 4.95625
| 0.164583
| 0.108659
| 0.124842
| 0.166456
| 0.742119
| 0.725725
| 0.702606
| 0.677806
| 0.638714
| 0.621269
| 0
| 0.012331
| 0.226799
| 7,866
| 170
| 99
| 46.270588
| 0.769977
| 0.338037
| 0
| 0.47619
| 0
| 0
| 0.242224
| 0.164463
| 0
| 0
| 0
| 0
| 0.059524
| 1
| 0.154762
| false
| 0
| 0.059524
| 0
| 0.238095
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
a5259ebc7f918353a34189a75b2eb75858babbaf
| 173
|
py
|
Python
|
src/colorise/error.py
|
MisanthropicBit/colorise
|
c7a7e3d4b224e80f39761edfc10e5676b610ba41
|
[
"BSD-3-Clause"
] | 2
|
2016-02-07T19:58:46.000Z
|
2022-03-28T12:26:57.000Z
|
src/colorise/error.py
|
MisanthropicBit/colorise
|
c7a7e3d4b224e80f39761edfc10e5676b610ba41
|
[
"BSD-3-Clause"
] | 5
|
2018-05-25T04:36:11.000Z
|
2021-01-18T19:08:04.000Z
|
src/colorise/error.py
|
MisanthropicBit/colorise
|
c7a7e3d4b224e80f39761edfc10e5676b610ba41
|
[
"BSD-3-Clause"
] | 2
|
2018-03-04T21:57:03.000Z
|
2022-03-28T12:25:54.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Custom colorise exceptions."""
class NotSupportedError(Exception):
"""Raised when functionality is not supported."""
| 19.222222
| 53
| 0.676301
| 19
| 173
| 6.157895
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006757
| 0.144509
| 173
| 8
| 54
| 21.625
| 0.783784
| 0.65896
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
a527c83fa3ccf5cbfdf63ae4fb1d8ed50121da00
| 43
|
py
|
Python
|
imutils/ml/optimizer/__init__.py
|
JacobARose/image-utils
|
aa0e005c0b4df5198d188b074f4e21f8d8f97962
|
[
"MIT"
] | null | null | null |
imutils/ml/optimizer/__init__.py
|
JacobARose/image-utils
|
aa0e005c0b4df5198d188b074f4e21f8d8f97962
|
[
"MIT"
] | null | null | null |
imutils/ml/optimizer/__init__.py
|
JacobARose/image-utils
|
aa0e005c0b4df5198d188b074f4e21f8d8f97962
|
[
"MIT"
] | null | null | null |
"""
imutils/ml/optimizer/__init__.py
"""
| 7.166667
| 32
| 0.651163
| 5
| 43
| 4.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116279
| 43
| 5
| 33
| 8.6
| 0.631579
| 0.744186
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
a53a220e8b1a9ecb7ef773cdf896dcc1cac11f87
| 89
|
py
|
Python
|
samples/generic/apps.py
|
falinwang/dj4e-samples
|
f55f021a2901195975a7375000e9f2b7564f82ad
|
[
"MIT"
] | null | null | null |
samples/generic/apps.py
|
falinwang/dj4e-samples
|
f55f021a2901195975a7375000e9f2b7564f82ad
|
[
"MIT"
] | 5
|
2020-06-05T23:22:56.000Z
|
2022-02-10T10:19:46.000Z
|
samples/generic/apps.py
|
falinwang/dj4e-samples
|
f55f021a2901195975a7375000e9f2b7564f82ad
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class GenericConfig(AppConfig):
name = 'generic'
| 14.833333
| 33
| 0.752809
| 10
| 89
| 6.7
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168539
| 89
| 5
| 34
| 17.8
| 0.905405
| 0
| 0
| 0
| 0
| 0
| 0.078652
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
a559e1b92a72fda3626b6284ff4891dda2fbf822
| 520
|
py
|
Python
|
textacy/math_utils.py
|
ckot/textacy
|
b5d372650f22feae8e2d945d2ee1b8e1aa93441f
|
[
"Apache-2.0"
] | 2
|
2021-04-13T16:00:30.000Z
|
2022-03-15T00:36:48.000Z
|
textacy/math_utils.py
|
ckot/textacy
|
b5d372650f22feae8e2d945d2ee1b8e1aa93441f
|
[
"Apache-2.0"
] | null | null | null |
textacy/math_utils.py
|
ckot/textacy
|
b5d372650f22feae8e2d945d2ee1b8e1aa93441f
|
[
"Apache-2.0"
] | null | null | null |
"""
Set of small utility functions that do mathy stuff.
"""
from __future__ import division
import numpy as np
# TODO: make this module actually good and useful
# UPDATE: this module is still an orphan. Burton, get on it!
def cosine_similarity(vec1, vec2):
"""
Return the cosine similarity between two vectors.
Args:
vec1 (:class:`numpy.array`)
vec2 (:class:`numpy.array`)
Returns:
float
"""
return np.dot(vec1, vec2) / (np.linalg.norm(vec1) * np.linalg.norm(vec2))
| 21.666667
| 77
| 0.659615
| 72
| 520
| 4.694444
| 0.708333
| 0.059172
| 0.088757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02
| 0.230769
| 520
| 23
| 78
| 22.608696
| 0.825
| 0.576923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043478
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
a55c96a04bb04ebdcfd12e6f046c0ee15f77d69e
| 181
|
py
|
Python
|
src/livestreamer/packages/flashmedia/error.py
|
jaccarmac/livestreamer
|
ab80dbd6560f6f9835865b2fc9f9c6015aee5658
|
[
"BSD-2-Clause",
"MIT"
] | 3,614
|
2015-01-01T08:07:27.000Z
|
2022-03-20T00:31:07.000Z
|
src/livestreamer/packages/flashmedia/error.py
|
kviktor/livestreamer
|
ab80dbd6560f6f9835865b2fc9f9c6015aee5658
|
[
"BSD-2-Clause",
"MIT"
] | 1,028
|
2015-01-02T03:38:38.000Z
|
2021-08-06T16:17:48.000Z
|
src/livestreamer/packages/flashmedia/error.py
|
kviktor/livestreamer
|
ab80dbd6560f6f9835865b2fc9f9c6015aee5658
|
[
"BSD-2-Clause",
"MIT"
] | 795
|
2015-01-02T06:12:04.000Z
|
2022-03-27T23:41:53.000Z
|
#!/usr/bin/env python
class FLVError(Exception):
pass
class F4VError(Exception):
pass
class AMFError(Exception):
pass
__all__ = ["FLVError", "F4VError", "AMFError"]
| 13.923077
| 46
| 0.685083
| 20
| 181
| 6
| 0.55
| 0.325
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013423
| 0.176796
| 181
| 12
| 47
| 15.083333
| 0.791946
| 0.110497
| 0
| 0.428571
| 0
| 0
| 0.15
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.428571
| 0
| 0
| 0.428571
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
a583e3384501cd867af7c09558d58b254861daa2
| 168
|
py
|
Python
|
my_classes/.history/ModulesPackages_PackageNamespaces/ImportingModules_20210725180951.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
my_classes/.history/ModulesPackages_PackageNamespaces/ImportingModules_20210725180951.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
my_classes/.history/ModulesPackages_PackageNamespaces/ImportingModules_20210725180951.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
""" Importing modules
When we run a statement such as
import fractions
what is python doing
The first thing to note is that Python
"""
| 21
| 45
| 0.619048
| 23
| 168
| 4.521739
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.345238
| 168
| 8
| 46
| 21
| 0.945455
| 0.803571
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
a59ca8663434cc482993707d749a06bd34ddb9c9
| 97
|
py
|
Python
|
profilesapi/apps.py
|
diyajaiswal11/Profile-rest-api
|
b5acbd97be7b1186c9ee9063a96efd1148419229
|
[
"MIT"
] | 3
|
2020-06-04T05:22:39.000Z
|
2020-09-23T19:44:07.000Z
|
profilesapi/apps.py
|
diyajaiswal11/Profile-rest-api
|
b5acbd97be7b1186c9ee9063a96efd1148419229
|
[
"MIT"
] | null | null | null |
profilesapi/apps.py
|
diyajaiswal11/Profile-rest-api
|
b5acbd97be7b1186c9ee9063a96efd1148419229
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class ProfilesapiConfig(AppConfig):
name = 'profilesapi'
| 16.166667
| 35
| 0.773196
| 10
| 97
| 7.5
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154639
| 97
| 5
| 36
| 19.4
| 0.914634
| 0
| 0
| 0
| 0
| 0
| 0.113402
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
a5a5c8419e43a7a96e837a01dc1aa815921ceae8
| 495
|
py
|
Python
|
tests/yodahelpers/hydratk/extensions/testenv/helpers.py
|
hydratk/hydratk-ext-testenv
|
90eea9c460cc206781154cb541ed0fb8b2b292f3
|
[
"BSD-3-Clause"
] | null | null | null |
tests/yodahelpers/hydratk/extensions/testenv/helpers.py
|
hydratk/hydratk-ext-testenv
|
90eea9c460cc206781154cb541ed0fb8b2b292f3
|
[
"BSD-3-Clause"
] | null | null | null |
tests/yodahelpers/hydratk/extensions/testenv/helpers.py
|
hydratk/hydratk-ext-testenv
|
90eea9c460cc206781154cb541ed0fb8b2b292f3
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""This code is part of TestEnv extension
.. module:: yodahelpers.testenv.helpers
:platform: Unix
:synopsis: TestEnv helpers
.. moduleauthor:: Petr Rašek <bowman@hydratk.org>
"""
from yodalib.hydratk.extensions.testenv.db_int import DB_INT as db
from yodalib.hydratk.extensions.testenv.rest_int import REST_INT as rest
from yodalib.hydratk.extensions.testenv.soap_int import SOAP_INT as soap
from yodalib.hydratk.extensions.testenv.gui_int import GUI_INT as gui
| 33
| 72
| 0.781818
| 73
| 495
| 5.191781
| 0.438356
| 0.116095
| 0.189974
| 0.295515
| 0.369393
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002294
| 0.119192
| 495
| 14
| 73
| 35.357143
| 0.866972
| 0.406061
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
3c1ebe1880b5702bed7a1fc425a215730358e5af
| 852
|
py
|
Python
|
apps/admin/serializers/conference.py
|
panla/kesousou
|
df6751b0a2e8eeec5122418cd29fe1f99e23f39e
|
[
"MIT"
] | 1
|
2021-04-27T01:18:45.000Z
|
2021-04-27T01:18:45.000Z
|
apps/admin/serializers/conference.py
|
panla/kesousou
|
df6751b0a2e8eeec5122418cd29fe1f99e23f39e
|
[
"MIT"
] | 5
|
2020-08-26T14:47:47.000Z
|
2021-03-22T08:56:43.000Z
|
apps/custom/serializers/conference.py
|
panla/kesousou
|
df6751b0a2e8eeec5122418cd29fe1f99e23f39e
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
from model.models import Conference
class ConferenceListSerializer(serializers.ModelSerializer):
sponsors = serializers.ListField(required=False)
classifications = serializers.ListField(required=False)
class Meta:
model = Conference
fields = [
'id', 'title', 'first_creator', 'sponsors', 'classifications', 'experts'
]
class ConferenceDetailSerializer(serializers.ModelSerializer):
keywords = serializers.ListField(required=False)
creators = serializers.ListField()
organizations = serializers.ListField()
sponsors = serializers.ListField(required=False)
classifications = serializers.ListField(required=False)
class Meta:
model = Conference
exclude = [
'original_id', 'created_at', 'updated_at'
]
| 29.37931
| 84
| 0.704225
| 72
| 852
| 8.263889
| 0.444444
| 0.235294
| 0.235294
| 0.277311
| 0.379832
| 0.379832
| 0.379832
| 0.379832
| 0.379832
| 0.379832
| 0
| 0
| 0.205399
| 852
| 28
| 85
| 30.428571
| 0.878877
| 0
| 0
| 0.380952
| 0
| 0
| 0.09507
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.095238
| 0
| 0.619048
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
3c254cf982c779713820f4b14026e6d4d8af8e01
| 146
|
py
|
Python
|
QCDAnalysis/Skimming/python/QCDHLT1jetPath_cff.py
|
SWuchterl/cmssw
|
769b4a7ef81796579af7d626da6039dfa0347b8e
|
[
"Apache-2.0"
] | 6
|
2017-09-08T14:12:56.000Z
|
2022-03-09T23:57:01.000Z
|
QCDAnalysis/Skimming/python/QCDHLT1jetPath_cff.py
|
SWuchterl/cmssw
|
769b4a7ef81796579af7d626da6039dfa0347b8e
|
[
"Apache-2.0"
] | 545
|
2017-09-19T17:10:19.000Z
|
2022-03-07T16:55:27.000Z
|
QCDAnalysis/Skimming/python/QCDHLT1jetPath_cff.py
|
SWuchterl/cmssw
|
769b4a7ef81796579af7d626da6039dfa0347b8e
|
[
"Apache-2.0"
] | 14
|
2017-10-04T09:47:21.000Z
|
2019-10-23T18:04:45.000Z
|
import FWCore.ParameterSet.Config as cms
from QCDAnalysis.Skimming.QCDHLT1jetSkim_cfi import *
QCDHLT1jetSkimpath = cms.Path(QCDHLT1jetTrigger)
| 24.333333
| 53
| 0.849315
| 16
| 146
| 7.6875
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022556
| 0.089041
| 146
| 5
| 54
| 29.2
| 0.902256
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
3c2c7a626f57aef532ceed0db6bf40e89ce2bfef
| 46
|
py
|
Python
|
000818CoursPyGusto/Coursera000818PyBasicsHSEw01TASK007_firstNum_20200508.py
|
SafonovMikhail/python_000577
|
739f764e80f1ca354386f00b8e9db1df8c96531d
|
[
"Apache-2.0"
] | null | null | null |
000818CoursPyGusto/Coursera000818PyBasicsHSEw01TASK007_firstNum_20200508.py
|
SafonovMikhail/python_000577
|
739f764e80f1ca354386f00b8e9db1df8c96531d
|
[
"Apache-2.0"
] | null | null | null |
000818CoursPyGusto/Coursera000818PyBasicsHSEw01TASK007_firstNum_20200508.py
|
SafonovMikhail/python_000577
|
739f764e80f1ca354386f00b8e9db1df8c96531d
|
[
"Apache-2.0"
] | null | null | null |
firstNum = int(input())
print(firstNum // 10)
| 15.333333
| 23
| 0.673913
| 6
| 46
| 5.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05
| 0.130435
| 46
| 2
| 24
| 23
| 0.725
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
3c7cea873f8d63f8786c51259ff90088d28bd244
| 179
|
py
|
Python
|
Security/md5Rand.py
|
CicadaMikoto/Projects
|
ccc3de5184a8dc9fcd108c3ddbe6fd72d6aa380a
|
[
"MIT"
] | 1
|
2021-01-22T07:50:30.000Z
|
2021-01-22T07:50:30.000Z
|
Security/md5Rand.py
|
CicadaMikoto/Projects
|
ccc3de5184a8dc9fcd108c3ddbe6fd72d6aa380a
|
[
"MIT"
] | null | null | null |
Security/md5Rand.py
|
CicadaMikoto/Projects
|
ccc3de5184a8dc9fcd108c3ddbe6fd72d6aa380a
|
[
"MIT"
] | null | null | null |
import string
import md5
for i in xrange(0,2000):
m = md5.new()
m.update('dfksdfkasdjfa' + str(i))
print str(m.hexdigest()) + ",program" + str(i) + "," + str(i) + "source"
| 19.888889
| 74
| 0.603352
| 28
| 179
| 3.857143
| 0.642857
| 0.111111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047945
| 0.184358
| 179
| 8
| 75
| 22.375
| 0.691781
| 0
| 0
| 0
| 0
| 0
| 0.156425
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.333333
| null | null | 0.166667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
3c85dc9b0fc41db3b248270790497d5ecdfb6c5f
| 174
|
py
|
Python
|
venv/bin/django-admin.py
|
RodrigoMachado9/django-api
|
65d61f5cd8e2ca4552af8d5bec9c862ce49b07b8
|
[
"MIT"
] | 1
|
2020-06-29T21:14:43.000Z
|
2020-06-29T21:14:43.000Z
|
venv/bin/django-admin.py
|
RodrigoMachado9/django_api
|
65d61f5cd8e2ca4552af8d5bec9c862ce49b07b8
|
[
"MIT"
] | 1
|
2020-06-22T01:54:45.000Z
|
2020-06-22T01:54:45.000Z
|
venv/bin/django-admin.py
|
RodrigoMachado9/django_api
|
65d61f5cd8e2ca4552af8d5bec9c862ce49b07b8
|
[
"MIT"
] | null | null | null |
#!/home/rmachado/PycharmProjects/pontos_turisticos/venv/bin/python3
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| 29
| 67
| 0.810345
| 21
| 174
| 6.142857
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006289
| 0.086207
| 174
| 5
| 68
| 34.8
| 0.805031
| 0.37931
| 0
| 0
| 0
| 0
| 0.074766
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
3c9755c0350ed6bc8db44ed80c5fbc23dcb16f67
| 22
|
py
|
Python
|
aliyun-python-sdk-cloudphoto/aliyunsdkcloudphoto/__init__.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 1,001
|
2015-07-24T01:32:41.000Z
|
2022-03-25T01:28:18.000Z
|
aliyun-python-sdk-cloudphoto/aliyunsdkcloudphoto/__init__.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 363
|
2015-10-20T03:15:00.000Z
|
2022-03-08T12:26:19.000Z
|
aliyun-python-sdk-cloudphoto/aliyunsdkcloudphoto/__init__.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 682
|
2015-09-22T07:19:02.000Z
|
2022-03-22T09:51:46.000Z
|
__version__ = "1.1.19"
| 22
| 22
| 0.681818
| 4
| 22
| 2.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0.090909
| 22
| 1
| 22
| 22
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b1cbcff7705cd351504c5f7948aaebbb50f7ba6f
| 246
|
py
|
Python
|
ansys_corba/omniORB/COS/CosReference__POA/__init__.py
|
pyansys/ansys_corba
|
91e4e66a48143c827f56cf1113145bb48d5f4d6a
|
[
"MIT"
] | 6
|
2021-04-26T09:25:48.000Z
|
2022-03-26T05:09:38.000Z
|
ansys_corba/omniORB/COS/CosReference__POA/__init__.py
|
pyansys/ansys_corba
|
91e4e66a48143c827f56cf1113145bb48d5f4d6a
|
[
"MIT"
] | 3
|
2022-03-14T08:17:21.000Z
|
2022-03-17T20:07:23.000Z
|
ansys_corba/omniORB/COS/CosReference__POA/__init__.py
|
pyansys/pymapdl-corba
|
91e4e66a48143c827f56cf1113145bb48d5f4d6a
|
[
"MIT"
] | 1
|
2020-11-11T11:10:19.000Z
|
2020-11-11T11:10:19.000Z
|
# DO NOT EDIT THIS FILE!
#
# Python module CosReference__POA generated by omniidl
import omniORB
omniORB.updateModule("CosReference__POA")
# ** 1. Stub files contributing to this module
import CosReference_idl
# ** 2. Sub-modules
# ** 3. End
| 17.571429
| 54
| 0.747967
| 33
| 246
| 5.424242
| 0.787879
| 0.167598
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014563
| 0.162602
| 246
| 13
| 55
| 18.923077
| 0.854369
| 0.601626
| 0
| 0
| 1
| 0
| 0.186813
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
b1ee38230915a65d7f1a3fb4afbac250cc50ae10
| 151
|
py
|
Python
|
psyrun/tests/tasks/task_square_h5.py
|
jgosmann/psyrun
|
42f58223f864005c9ba427fcc0ccf44bd94df1e8
|
[
"MIT"
] | 2
|
2017-07-03T20:03:08.000Z
|
2018-04-19T20:36:17.000Z
|
psyrun/tests/tasks/task_square_h5.py
|
jgosmann/psyrun
|
42f58223f864005c9ba427fcc0ccf44bd94df1e8
|
[
"MIT"
] | 67
|
2015-02-19T02:11:14.000Z
|
2018-04-20T15:51:57.000Z
|
psyrun/tests/tasks/task_square_h5.py
|
jgosmann/psyrun
|
42f58223f864005c9ba427fcc0ccf44bd94df1e8
|
[
"MIT"
] | 1
|
2015-11-13T16:54:16.000Z
|
2015-11-13T16:54:16.000Z
|
from psyrun import Param
from psyrun.store.h5 import H5Store
pspace = Param(x=range(4))
store = H5Store()
def execute(x):
return {'y': x ** 2}
| 13.727273
| 35
| 0.668874
| 24
| 151
| 4.208333
| 0.666667
| 0.19802
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040984
| 0.192053
| 151
| 10
| 36
| 15.1
| 0.786885
| 0
| 0
| 0
| 0
| 0
| 0.006623
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
b1eee9447700dad420a5ef52a73e8993522ff99c
| 111
|
py
|
Python
|
PA6/sport.py
|
tordisuna/SC-T-201-GSKI
|
1e89e5b31e7d74aeecae3dffe2df7ac9e8bb40f2
|
[
"MIT"
] | null | null | null |
PA6/sport.py
|
tordisuna/SC-T-201-GSKI
|
1e89e5b31e7d74aeecae3dffe2df7ac9e8bb40f2
|
[
"MIT"
] | null | null | null |
PA6/sport.py
|
tordisuna/SC-T-201-GSKI
|
1e89e5b31e7d74aeecae3dffe2df7ac9e8bb40f2
|
[
"MIT"
] | 1
|
2021-02-12T11:36:53.000Z
|
2021-02-12T11:36:53.000Z
|
from dataclasses import dataclass, field, asdict
@dataclass(compare=True)
class Sport(object):
name: str
| 15.857143
| 48
| 0.756757
| 14
| 111
| 6
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153153
| 111
| 6
| 49
| 18.5
| 0.893617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
5920a642019ecf310d062feef1490df676193336
| 84
|
py
|
Python
|
code-snippet/python/src/com/codesnippet/ml/__init__.py
|
zhoujiagen/ml_hacks
|
6e68e5227c472562f1c6330fba726a92bf4a7499
|
[
"MIT"
] | null | null | null |
code-snippet/python/src/com/codesnippet/ml/__init__.py
|
zhoujiagen/ml_hacks
|
6e68e5227c472562f1c6330fba726a92bf4a7499
|
[
"MIT"
] | null | null | null |
code-snippet/python/src/com/codesnippet/ml/__init__.py
|
zhoujiagen/ml_hacks
|
6e68e5227c472562f1c6330fba726a92bf4a7499
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
@author: zhoujiagen
Created on 2020/8/21 5:14 PM
"""
| 9.333333
| 28
| 0.559524
| 13
| 84
| 3.615385
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164179
| 0.202381
| 84
| 8
| 29
| 10.5
| 0.537313
| 0.845238
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
3cc6c6f451673d8acff6f063ab61b3f8472ee9af
| 121
|
py
|
Python
|
zmonitor/views.py
|
ozelentok/ZMonitor
|
e203798025435cfa4c9f5bc65568c6fea161088e
|
[
"MIT"
] | null | null | null |
zmonitor/views.py
|
ozelentok/ZMonitor
|
e203798025435cfa4c9f5bc65568c6fea161088e
|
[
"MIT"
] | null | null | null |
zmonitor/views.py
|
ozelentok/ZMonitor
|
e203798025435cfa4c9f5bc65568c6fea161088e
|
[
"MIT"
] | null | null | null |
from django.views import generic
class MonitorView(generic.TemplateView):
template_name = 'zmonitor/zmonitor.html'
| 20.166667
| 44
| 0.793388
| 14
| 121
| 6.785714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123967
| 121
| 5
| 45
| 24.2
| 0.896226
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
a702f721396f5b4abf01c9d77f476b58f2b0e4e5
| 49
|
py
|
Python
|
aws_profile_manager/rotate_keys/__init__.py
|
99stealth/aws-profile-switcher
|
bd8f27a31d1dfeb22cd57c00ee196f72cc753ba1
|
[
"MIT"
] | 31
|
2020-04-01T09:05:23.000Z
|
2020-09-30T16:27:18.000Z
|
aws_profile_manager/rotate_keys/__init__.py
|
99stealth/aws-profile-switcher
|
bd8f27a31d1dfeb22cd57c00ee196f72cc753ba1
|
[
"MIT"
] | 1
|
2020-04-01T13:25:09.000Z
|
2020-04-04T12:38:20.000Z
|
aws_profile_manager/rotate_keys/__init__.py
|
99stealth/aws-profile-switcher
|
bd8f27a31d1dfeb22cd57c00ee196f72cc753ba1
|
[
"MIT"
] | 1
|
2020-05-26T09:55:07.000Z
|
2020-05-26T09:55:07.000Z
|
from .rotate import Rotate
__all__ = ('Rotate',)
| 16.333333
| 26
| 0.714286
| 6
| 49
| 5.166667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 49
| 3
| 27
| 16.333333
| 0.738095
| 0
| 0
| 0
| 0
| 0
| 0.12
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
a709d0ebc17b6c51bbc27a5be7395b52a7c8db39
| 208
|
py
|
Python
|
Three_Part_Moudule/Selenium/test.py
|
QAlexBall/Learning_Py
|
8a5987946928a9d86f6807555ed435ac604b2c44
|
[
"MIT"
] | 2
|
2019-01-24T15:06:59.000Z
|
2019-01-25T07:34:45.000Z
|
Three_Part_Moudule/Selenium/test.py
|
QAlexBall/Learning_Py
|
8a5987946928a9d86f6807555ed435ac604b2c44
|
[
"MIT"
] | 1
|
2019-12-23T09:45:11.000Z
|
2019-12-23T09:45:11.000Z
|
Three_Part_Moudule/Selenium/test.py
|
QAlexBall/Learning_Py
|
8a5987946928a9d86f6807555ed435ac604b2c44
|
[
"MIT"
] | 1
|
2019-07-18T14:21:35.000Z
|
2019-07-18T14:21:35.000Z
|
from selenium import webdriver
import time
driver = webdriver.Chrome(executable_path="/home/alex/Downloads/SoftWare/chromedriver/chromedriver")
# driver = webdriver.Chrome()
driver.get('https://python.org')
| 29.714286
| 100
| 0.798077
| 25
| 208
| 6.6
| 0.72
| 0.181818
| 0.254545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072115
| 208
| 6
| 101
| 34.666667
| 0.854922
| 0.129808
| 0
| 0
| 0
| 0
| 0.407821
| 0.307263
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
a72345742b2ee3e0f3fc56e5055b67bd3a67592d
| 28
|
py
|
Python
|
app/api/api_v1/endpoints/__init__.py
|
fergusdixon/fastapi-example
|
9e47e6f26dd5db86882f11b9e8eac0971925c019
|
[
"MIT"
] | null | null | null |
app/api/api_v1/endpoints/__init__.py
|
fergusdixon/fastapi-example
|
9e47e6f26dd5db86882f11b9e8eac0971925c019
|
[
"MIT"
] | null | null | null |
app/api/api_v1/endpoints/__init__.py
|
fergusdixon/fastapi-example
|
9e47e6f26dd5db86882f11b9e8eac0971925c019
|
[
"MIT"
] | null | null | null |
"""Routes for V1 router."""
| 14
| 27
| 0.607143
| 4
| 28
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041667
| 0.142857
| 28
| 1
| 28
| 28
| 0.666667
| 0.75
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
5951a652a18ecb7114615b274d58a3247f57a8b0
| 202
|
py
|
Python
|
cached_contingency/__init__.py
|
MrTomRod/cached-contingency
|
951431218c47ed246ba1fb655b581a99a5cfde00
|
[
"MIT"
] | null | null | null |
cached_contingency/__init__.py
|
MrTomRod/cached-contingency
|
951431218c47ed246ba1fb655b581a99a5cfde00
|
[
"MIT"
] | null | null | null |
cached_contingency/__init__.py
|
MrTomRod/cached-contingency
|
951431218c47ed246ba1fb655b581a99a5cfde00
|
[
"MIT"
] | null | null | null |
from .KeyValueStore import KeyValueStore
from .CachedContingency import CachedContingency
from .CachedBoschloo import CachedBoschloo
from .CachedFisher import CachedFisher
from .utils import odds_ratio
| 33.666667
| 48
| 0.876238
| 21
| 202
| 8.380952
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09901
| 202
| 5
| 49
| 40.4
| 0.967033
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
595cab70a4b4bb151be060156881ac34eb2ca9ac
| 166
|
py
|
Python
|
test/integration/assets/mock_data/fixture_beaxy.py
|
vinhtran91/beaxy-bot
|
b07cc148f7eeea11694c762be8933b19d8167bd1
|
[
"Apache-2.0"
] | 2
|
2020-08-09T18:08:17.000Z
|
2020-11-19T03:21:31.000Z
|
test/integration/assets/mock_data/fixture_beaxy.py
|
vinhtran91/beaxy-bot
|
b07cc148f7eeea11694c762be8933b19d8167bd1
|
[
"Apache-2.0"
] | null | null | null |
test/integration/assets/mock_data/fixture_beaxy.py
|
vinhtran91/beaxy-bot
|
b07cc148f7eeea11694c762be8933b19d8167bd1
|
[
"Apache-2.0"
] | 1
|
2020-09-07T14:16:46.000Z
|
2020-09-07T14:16:46.000Z
|
import json
class FixtureBeaxy:
TRADE_WS_DATA = json.dumps({"symbol": "BTCUSDC", "price": 9462.28, "size": 0.001, "side": "SELL", "timestamp": 1590694156771})
| 23.714286
| 130
| 0.668675
| 21
| 166
| 5.190476
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160839
| 0.138554
| 166
| 6
| 131
| 27.666667
| 0.601399
| 0
| 0
| 0
| 0
| 0
| 0.23494
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
59b1afab7d18063ead94ae87e9b184d2f1ffd209
| 528
|
py
|
Python
|
layers/poky/meta-selftest/lib/oeqa/selftest/cases/external-layer.py
|
dtischler/px30-test
|
55dce0b7aff1c4a7dea3ac94f94cc9c67fba7c9f
|
[
"Apache-2.0"
] | 53
|
2018-02-28T08:51:32.000Z
|
2022-02-28T06:49:23.000Z
|
layers/poky/meta-selftest/lib/oeqa/selftest/cases/external-layer.py
|
dtischler/px30-test
|
55dce0b7aff1c4a7dea3ac94f94cc9c67fba7c9f
|
[
"Apache-2.0"
] | 27
|
2018-01-25T00:26:53.000Z
|
2020-08-09T05:20:04.000Z
|
layers/poky/meta-selftest/lib/oeqa/selftest/cases/external-layer.py
|
dtischler/px30-test
|
55dce0b7aff1c4a7dea3ac94f94cc9c67fba7c9f
|
[
"Apache-2.0"
] | 51
|
2018-02-21T04:46:08.000Z
|
2022-03-02T04:20:41.000Z
|
#from oeqa.selftest.base import oeSelfTest
from oeqa.selftest.case import OESelftestTestCase
#from oeqa.utils.decorators import testcase
class ImportedTests(OESelftestTestCase):
def test_unconditional_pass(self):
"""
Summary: Doesn't check anything, used to check import test from other layers.
Expected: 1. Pass unconditionally
Product: oe-core
Author: Mariano Lopez <mariano.lopez@intel.com
"""
self.assertEqual(True, True, msg = "Impossible to fail this test")
| 31.058824
| 85
| 0.708333
| 63
| 528
| 5.904762
| 0.698413
| 0.064516
| 0.086022
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00241
| 0.214015
| 528
| 16
| 86
| 33
| 0.893976
| 0.49053
| 0
| 0
| 0
| 0
| 0.127273
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| false
| 0.25
| 0.5
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 4
|
ab79e6da8c3175ede93b3cfd1ecdeb15c18e931d
| 354
|
py
|
Python
|
pycryptax/util.py
|
mgalushka/PyCryptax
|
246117c011a8e9699e178435395a4a38ea024fb5
|
[
"MIT"
] | 8
|
2020-02-10T00:36:52.000Z
|
2021-10-10T22:57:57.000Z
|
pycryptax/util.py
|
mgalushka/PyCryptax
|
246117c011a8e9699e178435395a4a38ea024fb5
|
[
"MIT"
] | null | null | null |
pycryptax/util.py
|
mgalushka/PyCryptax
|
246117c011a8e9699e178435395a4a38ea024fb5
|
[
"MIT"
] | 1
|
2021-11-06T18:08:54.000Z
|
2021-11-06T18:08:54.000Z
|
import datetime, copy
def dateFromString(s):
try:
return datetime.datetime.strptime(s, "%Y-%m-%d")
except ValueError:
return datetime.datetime.strptime(s, "%d %b %Y")
def getPrettyDate(d):
return d.strftime("%d/%m/%Y")
def addToDictKey(d, k, v):
if k in d:
d[k] += v
else:
d[k] = copy.deepcopy(v)
| 18.631579
| 56
| 0.576271
| 52
| 354
| 3.923077
| 0.461538
| 0.029412
| 0.215686
| 0.294118
| 0.303922
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.259887
| 354
| 18
| 57
| 19.666667
| 0.778626
| 0
| 0
| 0
| 0
| 0
| 0.067989
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.230769
| false
| 0
| 0.076923
| 0.076923
| 0.538462
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
ab8bd73c745defb8cd11256f2293b4136ccae44c
| 162
|
py
|
Python
|
rekcurd/console_scripts/errors.py
|
rekcurd/rekcurd-python
|
71e1ecd1f7bf8394563e682308fa7b404492e6da
|
[
"Apache-2.0"
] | 22
|
2019-03-07T12:04:55.000Z
|
2020-08-18T12:34:46.000Z
|
rekcurd/console_scripts/errors.py
|
rekcurd/python-rekcurd
|
71e1ecd1f7bf8394563e682308fa7b404492e6da
|
[
"Apache-2.0"
] | 69
|
2019-02-01T05:53:28.000Z
|
2022-03-15T19:14:20.000Z
|
rekcurd/console_scripts/errors.py
|
rekcurd/python-rekcurd
|
71e1ecd1f7bf8394563e682308fa7b404492e6da
|
[
"Apache-2.0"
] | 8
|
2020-03-05T12:39:52.000Z
|
2022-01-26T10:34:21.000Z
|
# -*- coding: utf-8 -*-
class CommandError(Exception):
"""
Exception class indicating a problem while executing a console
scripts.
"""
pass
| 16.2
| 66
| 0.623457
| 17
| 162
| 5.941176
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008333
| 0.259259
| 162
| 9
| 67
| 18
| 0.833333
| 0.580247
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
abafcc968dc4d82e14329fcff78592be69107073
| 76
|
py
|
Python
|
6 kyu/Arraydiff.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | 6
|
2020-09-03T09:32:25.000Z
|
2020-12-07T04:10:01.000Z
|
6 kyu/Arraydiff.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | 1
|
2021-12-13T15:30:21.000Z
|
2021-12-13T15:30:21.000Z
|
6 kyu/Arraydiff.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | null | null | null |
def array_diff(a, b):
return [int for int in a if int not in b]
| 25.333333
| 45
| 0.578947
| 16
| 76
| 2.6875
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.342105
| 76
| 3
| 46
| 25.333333
| 0.86
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
abcb51f8a5f3341d1194307425df0991c7aa9931
| 11,167
|
py
|
Python
|
RUM.py
|
jingli9111/RUM-Tensorflow
|
6a2fec4ee36a44e8d54f5a25493d2202bc43166a
|
[
"MIT"
] | 29
|
2018-01-07T05:36:42.000Z
|
2021-06-14T20:02:14.000Z
|
RUM.py
|
jingli9111/RUM
|
6a2fec4ee36a44e8d54f5a25493d2202bc43166a
|
[
"MIT"
] | 1
|
2018-06-29T02:59:45.000Z
|
2018-07-29T04:56:13.000Z
|
RUM.py
|
ekcole/RUM-Tensorflow
|
6a2fec4ee36a44e8d54f5a25493d2202bc43166a
|
[
"MIT"
] | 9
|
2018-03-02T21:19:38.000Z
|
2020-06-30T13:52:32.000Z
|
#
import tensorflow as tf
import numpy as np
import auxiliary as aux
from termcolor import colored
from tensorflow.python.util.tf_export import tf_export
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops.rnn_cell_impl import RNNCell
from tensorflow.contrib.layers import fully_connected
sigmoid = math_ops.sigmoid
tanh = math_ops.tanh
matm = math_ops.matmul
mul = math_ops.multiply
relu = nn_ops.relu
sign = math_ops.sign
def rotation_operator(x, y, n_hidden, eps = 1e-12):
"""Rotation between two tensors: R(x,y) is unitary and takes x to y.
Args:
x: a tensor from where we want to start
y: a tensor at which we want to finish
eps: the cutoff for the normalizations (avoiding division by zero)
Returns:
a tensor, which is the orthogonal rotation operator R(x,y)
"""
size_batch = tf.shape(x)[0]
hidden_size = n_hidden
#construct the 2x2 rotation
u = tf.nn.l2_normalize(x, 1, epsilon = eps)
costh = tf.reduce_sum(u * tf.nn.l2_normalize(y, 1, epsilon = eps), 1)
sinth = tf.sqrt(1 - costh ** 2)
step1 = tf.reshape(costh, [size_batch, 1])
step2 = tf.reshape(sinth, [size_batch, 1])
Rth = tf.reshape(tf.concat([step1, -step2, step2, step1], axis = 1), [size_batch, 2, 2])
#get v and concatenate u and v
v = tf.nn.l2_normalize(y - tf.reshape(tf.reduce_sum(u * y, 1), [size_batch,1]) * u, 1, epsilon = eps)
step3 = tf.concat([tf.reshape(u, [size_batch, 1, hidden_size]),
tf.reshape(v, [size_batch, 1, hidden_size])],
axis = 1)
#do the batch matmul
step4 = tf.reshape(u, [size_batch, hidden_size, 1])
step5 = tf.reshape(v, [size_batch, hidden_size, 1])
return (tf.eye(hidden_size, batch_shape = [size_batch]) -
tf.matmul(step4, tf.transpose(step4, [0,2,1])) -
tf.matmul(step5, tf.transpose(step5, [0,2,1])) +
tf.matmul(tf.matmul(tf.transpose(step3, [0,2,1]), Rth), step3))
def rotation_components(x, y, eps = 1e-12):
"""Components for the operator R(x,y)
Together with `rotate` achieves best memory complexity: O(N_batch * N_hidden)
Args:
x: a tensor from where we want to start
y: a tensor at which we want to finish
eps: the cutoff for the normalizations (avoiding division by zero)
Returns:
Four components: u, v, [u,v] and R'(theta)
"""
size_batch = tf.shape(x)[0]
hidden_size = tf.shape(x)[1]
#construct the 2x2 rotation
u = tf.nn.l2_normalize(x, 1, epsilon = eps)
costh = tf.reduce_sum(u * tf.nn.l2_normalize(y, 1, epsilon = eps), 1)
sinth = tf.sqrt(1 - costh ** 2)
step1 = tf.reshape(costh, [size_batch, 1])
step2 = tf.reshape(sinth, [size_batch, 1])
Rth = tf.reshape(tf.concat([step1, -step2, step2, step1], axis = 1), [size_batch, 2, 2])
#get v and concatenate u and v
v = tf.nn.l2_normalize(y - tf.reshape(tf.reduce_sum(u * y, 1), [size_batch,1]) * u, 1, epsilon = eps)
step3 = tf.concat([tf.reshape(u, [size_batch, 1, hidden_size]),
tf.reshape(v, [size_batch, 1, hidden_size])],
axis = 1)
#do the batch matmul
step4 = tf.reshape(u, [size_batch, hidden_size, 1])
step5 = tf.reshape(v, [size_batch, hidden_size, 1])
return step4, step5, step3, Rth
def rotate(v1, v2, v):
"""Rotates v via the rotation R(v1,v2)
Args:
v: a tensor, which is the vector we want to rotate
== to define R(v1,v2) ==
v1: a tensor from where we want to start
v2: a tensor at which we want to finish
Returns:
A tensor: the vector R(v1,v2)[v]
"""
size_batch = tf.shape(v1)[0]
hidden_size = tf.shape(v1)[1]
U = rotation_components(v1, v2)
h = tf.reshape(v, [size_batch, hidden_size, 1])
return (v + tf.reshape(
- tf.matmul(U[0], tf.matmul(tf.transpose(U[0], [0,2,1]), h))
- tf.matmul(U[1], tf.matmul(tf.transpose(U[1], [0,2,1]), h))
+ tf.matmul(tf.transpose(U[2], [0,2,1]), tf.matmul(U[3], tf.matmul(U[2], h))),
[size_batch, hidden_size]
))
class RUMCell(RNNCell):
"""Rotational Unit of Memory
lambda = 0;
uses `rotate` to implement the `Rotation` efficiently.
"""
def __init__(self,
hidden_size,
activation = None,
reuse = None,
kernel_initializer = None,
bias_initializer = None,
T_norm = None,
eps = 1e-12,
use_zoneout = False,
zoneout_keep_h = 0.9,
use_layer_norm = False,
is_training = False
):
"""Initialization of the RUM cell.
Args:
hidden_size: number of neurons in hidden state
acitvation_tmp: activation of the temporary new state
activation_tar: activation of the target
activation_emb: activation of the embedded input
T_norm: norm for time normalization, `eta` in the paper
eps: the cutoff for the normalizations
use_zoneout: zoneout, True or False
use_layer_norm: batch normalization, True or False
is_training: marker for the zoneout
"""
super(RUMCell, self).__init__(_reuse = reuse)
self._hidden_size = hidden_size
self._activation = activation or relu
self._T_norm = T_norm
self._kernel_initializer = kernel_initializer or aux.orthogonal_initializer(1.0)
self._bias_initializer = bias_initializer
self._eps = eps
self._use_zoneout = use_zoneout
self._zoneout_keep_h = zoneout_keep_h
self._use_layer_norm = use_layer_norm
self._is_training = is_training
@property
def state_size(self):
return self._hidden_size
@property
def output_size(self):
return self._hidden_size
def call(self, inputs, state):
with vs.variable_scope("gates"):
bias_ones = self._bias_initializer
if self._bias_initializer is None:
dtype = [a.dtype for a in [inputs, state]][0]
bias_ones = init_ops.constant_initializer(1.0, dtype = dtype)
value = fully_connected(inputs=tf.concat([inputs, state],axis=1),
num_outputs=2*self._hidden_size,
activation_fn=None,
biases_initializer=bias_ones,
weights_initializer=aux.rum_ortho_initializer())
r, u = array_ops.split(value = value, num_or_size_splits = 2, axis = 1)
u = sigmoid(u)
if self._use_layer_norm:
concat = tf.concat([r, u], 1)
concat = aux.layer_norm_all(concat, 2, self._hidden_size, "LN_r_u")
r, u = tf.split(concat, 2, 1)
with vs.variable_scope("candidate"):
x_emb = fully_connected(inputs=inputs,
num_outputs=self._hidden_size,
activation_fn=None,
biases_initializer=self._bias_initializer,
weights_initializer=self._kernel_initializer)
state_new = rotate(x_emb, r, state)
if self._use_layer_norm:
c = self._activation(aux.layer_norm(x_emb + state_new, "LN_c"))
else:
c = self._activation(x_emb + state_new)
new_h = u * state + (1 - u) * c
if self._T_norm != None:
new_h = tf.nn.l2_normalize(new_h, 1, epsilon = self._eps) * self._T_norm
if self._use_zoneout:
new_h = aux.rum_zoneout(new_h, state, self._zoneout_keep_h, self._is_training)
return new_h, new_h
def zero_state(self, batch_size, dtype):
h = tf.zeros([batch_size, self._hidden_size], dtype=dtype)
return h
class ARUMCell(RNNCell):
"""(Associative) Rotational Unit of Memory
lambda = 1 (tuning in the associative memory);
uses `rotation_operator` to update the rotation recursively.
"""
def __init__(self,
hidden_size,
activation = None,
reuse = None,
kernel_initializer = None,
bias_initializer = None,
T_norm = None,
eps = 1e-12,
use_zoneout = False,
zoneout_keep_h = 0.9,
use_layer_norm = False,
is_training = False,
lambda_pow = 0
):
"""Initialization of the Associative RUM cell.
Args:
hidden_size: number of neurons in hidden state
acitvation_tmp: activation of the temporary new state
activation_tar: activation of the target
activation_emb: activation of the embedded input
T_norm: norm for time normalization, `eta` in the paper
eps: the cutoff for the normalizations
use_zoneout: zoneout, True or False
use_layer_norm: batch normalization, True or False
is_training: marker for the zoneout
lambda_pow: the power for the associative memory (an integer)
"""
super(ARUMCell, self).__init__(_reuse = reuse)
self._hidden_size = hidden_size
self._activation = activation or relu
self._T_norm = T_norm
self._kernel_initializer = kernel_initializer or aux.orthogonal_initializer(1.0)
self._bias_initializer = bias_initializer
self._eps = eps
self._use_zoneout = use_zoneout
self._zoneout_keep_h = zoneout_keep_h
self._use_layer_norm = use_layer_norm
self._is_training = is_training
self._lambda_pow = lambda_pow
@property
def state_size(self):
return self._hidden_size * (self._hidden_size + 1)
@property
def output_size(self):
return self._hidden_size
def call(self, inputs, state):
#extract the associative memory and the state
size_batch = tf.shape(state)[0]
assoc_mem, state = tf.split(state, [self._hidden_size * self._hidden_size, self._hidden_size], 1)
assoc_mem = tf.reshape(assoc_mem, [size_batch, self._hidden_size, self._hidden_size])
with vs.variable_scope("gates"):
bias_ones = self._bias_initializer
if self._bias_initializer is None:
dtype = [a.dtype for a in [inputs, state]][0]
bias_ones = init_ops.constant_initializer(1.0, dtype = dtype)
value = fully_connected(inputs=tf.concat([inputs, state],axis=1),
num_outputs=2*self._hidden_size,
activation_fn=None,
biases_initializer=bias_ones,
weights_initializer=aux.rum_ortho_initializer())
r, u = array_ops.split(value = value, num_or_size_splits = 2, axis = 1)
u = sigmoid(u)
if self._use_layer_norm:
concat = tf.concat([r, u], 1)
concat = aux.layer_norm_all(concat, 2, self._hidden_size, "LN_r_u")
r, u = tf.split(concat, 2, 1)
with vs.variable_scope("candidate"):
x_emb = fully_connected(inputs=inputs,
num_outputs=self._hidden_size,
activation_fn=None,
biases_initializer=self._bias_initializer,
weights_initializer=self._kernel_initializer)
tmp_rotation = rotation_operator(x_emb, r, self._hidden_size)
Rt = tf.matmul(assoc_mem, tmp_rotation)
state_new = tf.reshape(tf.matmul(Rt, tf.reshape(state, [size_batch, self._hidden_size, 1])), [size_batch, self._hidden_size])
if self._use_layer_norm:
c = self._activation(aux.layer_norm(x_emb + state_new, "LN_c"))
else:
c = self._activation(x_emb + state_new)
new_h = u * state + (1 - u) * c
if self._T_norm != None:
new_h = tf.nn.l2_normalize(new_h, 1, epsilon = self._eps) * self._T_norm
if self._use_zoneout:
new_h = aux.rum_zoneout(new_h, state, self._zoneout_keep_h, self._is_training)
Rt = tf.reshape(Rt, [size_batch, self._hidden_size * self._hidden_size])
new_state = tf.concat([Rt, new_h], 1)
return new_h, new_state
def zero_state(self, batch_size, dtype):
e = tf.eye(self._hidden_size, batch_shape = [batch_size])
e = tf.reshape(e, [batch_size, self._hidden_size * self._hidden_size])
c = tf.zeros([batch_size, self._hidden_size], dtype=dtype)
h = tf.concat([e, c], 1)
return h
| 34.896875
| 128
| 0.696875
| 1,760
| 11,167
| 4.183523
| 0.122159
| 0.065191
| 0.057042
| 0.022002
| 0.769931
| 0.731903
| 0.709901
| 0.690887
| 0.66549
| 0.637784
| 0
| 0.019301
| 0.188054
| 11,167
| 320
| 129
| 34.896875
| 0.792765
| 0.228978
| 0
| 0.70283
| 0
| 0
| 0.005508
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061321
| false
| 0
| 0.056604
| 0.018868
| 0.179245
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
abd9b76fb74b0c304c6d54275452d1b621067430
| 664
|
py
|
Python
|
Kelas Rabu/Nilai Hash/1310651134 Dharma/1134_dharma.py
|
umjembersoft/TI20151-Keamanan-Komputer
|
6e8a4e75fafa59149f5e96b71eb83de935642c38
|
[
"MIT"
] | null | null | null |
Kelas Rabu/Nilai Hash/1310651134 Dharma/1134_dharma.py
|
umjembersoft/TI20151-Keamanan-Komputer
|
6e8a4e75fafa59149f5e96b71eb83de935642c38
|
[
"MIT"
] | null | null | null |
Kelas Rabu/Nilai Hash/1310651134 Dharma/1134_dharma.py
|
umjembersoft/TI20151-Keamanan-Komputer
|
6e8a4e75fafa59149f5e96b71eb83de935642c38
|
[
"MIT"
] | null | null | null |
__author__ = 'Dharma Trias Brata Swasono'
import hashlib
print
print "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -"
print("Program Sederhana Untuk Melakukan Generate Terhadap Nilai Hash dari MD5")
print "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -"
print
NamaString = "1310651134_Dharma Trias Brata Swasono" #ini adalah variabel yang berisi nama dan nim
hash = hashlib.md5 () #ini adalah perintah hash ke MD5
hash.update(NamaString)
test = hash.hexdigest()
print("Nilai hash SHA 256 dari String " +NamaString+ " adalah : " + test.upper())
print
print("Penghitungan Nilai Hash Selesai")
| 27.666667
| 98
| 0.578313
| 68
| 664
| 5.573529
| 0.573529
| 0.105541
| 0.084433
| 0.121372
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.031621
| 0.237952
| 664
| 23
| 99
| 28.869565
| 0.717391
| 0.112952
| 0
| 0.357143
| 1
| 0.142857
| 0.613288
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.071429
| null | null | 0.571429
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
2806c24584eab63acd8af9533ca5401acc6c7c3d
| 363
|
py
|
Python
|
app/server/admin.py
|
SigmaNewsVenturesGroupLtd/doccano
|
c6ca1e8c17291ad42ef97aae4c0b45c7d322af34
|
[
"MIT"
] | 2
|
2021-03-24T07:42:24.000Z
|
2021-05-17T09:35:31.000Z
|
app/server/admin.py
|
SigmaNewsVenturesGroupLtd/doccano
|
c6ca1e8c17291ad42ef97aae4c0b45c7d322af34
|
[
"MIT"
] | 94
|
2020-01-15T07:26:36.000Z
|
2021-07-27T05:54:10.000Z
|
app/server/admin.py
|
SigmaNewsVenturesGroupLtd/doccano
|
c6ca1e8c17291ad42ef97aae4c0b45c7d322af34
|
[
"MIT"
] | 2
|
2020-01-30T10:05:09.000Z
|
2020-08-07T23:52:46.000Z
|
from django.contrib import admin
from .models import Label, Document, Project
from .models import DocumentAnnotation, SequenceAnnotation, Seq2seqAnnotation
admin.site.register(DocumentAnnotation)
admin.site.register(SequenceAnnotation)
admin.site.register(Seq2seqAnnotation)
admin.site.register(Label)
admin.site.register(Document)
admin.site.register(Project)
| 30.25
| 77
| 0.848485
| 41
| 363
| 7.512195
| 0.341463
| 0.175325
| 0.331169
| 0.220779
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0059
| 0.066116
| 363
| 11
| 78
| 33
| 0.902655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
e602d02bd47c2566e482d012031b4cf2dd7f115a
| 168
|
py
|
Python
|
diagram.py
|
Open-Harvest/CallForCode2021
|
122e935dc7e133c08e34eda5865d5f750e459deb
|
[
"Apache-2.0"
] | 2
|
2021-07-06T18:48:24.000Z
|
2021-11-08T09:33:47.000Z
|
diagram.py
|
Philipsty/CallForCode2021
|
122e935dc7e133c08e34eda5865d5f750e459deb
|
[
"Apache-2.0"
] | 20
|
2021-06-09T02:12:52.000Z
|
2021-06-18T04:53:30.000Z
|
diagram.py
|
Philipsty/CallForCode2021
|
122e935dc7e133c08e34eda5865d5f750e459deb
|
[
"Apache-2.0"
] | 1
|
2021-06-15T02:22:16.000Z
|
2021-06-15T02:22:16.000Z
|
from diagrams import Diagram, Cluster
from diagrams.ibm.general import Cloudant
with Diagram("Open Harvest", show=True, direction="TB"):
with Cluster("Backend"):
| 24
| 56
| 0.755952
| 22
| 168
| 5.772727
| 0.727273
| 0.188976
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130952
| 168
| 6
| 57
| 28
| 0.869863
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.5
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
e62ac222ff9182649f0f6094c7eb17b738214fe3
| 68
|
py
|
Python
|
Eager/tests/samples/policy1.py
|
UCSB-CS-RACELab/eager-appscale
|
d58fe64bb867ef58af19c1d84a5e1ec68ecddd3d
|
[
"Apache-2.0"
] | 3
|
2016-06-12T01:18:49.000Z
|
2018-07-16T18:20:23.000Z
|
Eager/tests/samples/policy1.py
|
UCSB-CS-RACELab/eager-appscale
|
d58fe64bb867ef58af19c1d84a5e1ec68ecddd3d
|
[
"Apache-2.0"
] | null | null | null |
Eager/tests/samples/policy1.py
|
UCSB-CS-RACELab/eager-appscale
|
d58fe64bb867ef58af19c1d84a5e1ec68ecddd3d
|
[
"Apache-2.0"
] | 1
|
2020-05-25T02:59:15.000Z
|
2020-05-25T02:59:15.000Z
|
"""
This is a comment.
"""
assert_app_dependency(app, 'Foo', '1.0')
| 13.6
| 40
| 0.632353
| 11
| 68
| 3.727273
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033898
| 0.132353
| 68
| 5
| 40
| 13.6
| 0.661017
| 0.264706
| 0
| 0
| 0
| 0
| 0.139535
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
052555af2d28f4ca391968946dba864020a83949
| 515
|
py
|
Python
|
tests/mocks.py
|
jmuddappa/DeepClassificationBot
|
70aaa6787cf02e8a6b49a913af6496bc0f288b35
|
[
"MIT"
] | null | null | null |
tests/mocks.py
|
jmuddappa/DeepClassificationBot
|
70aaa6787cf02e8a6b49a913af6496bc0f288b35
|
[
"MIT"
] | null | null | null |
tests/mocks.py
|
jmuddappa/DeepClassificationBot
|
70aaa6787cf02e8a6b49a913af6496bc0f288b35
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import collections
from functools import partial
from StringIO import StringIO
import json
def mock_get(content):
def _mock_get(*args, **kwargs):
return MockResponse(content)
return _mock_get
class MockResponse(collections.namedtuple('Response', 'content')):
def iter_content(self, chunk_size=1):
return iter(partial(StringIO(self.content).read, chunk_size), '')
def close(self):
pass
def json(self):
return json.loads(self.content)
| 22.391304
| 73
| 0.68932
| 64
| 515
| 5.421875
| 0.46875
| 0.060519
| 0.057637
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004831
| 0.196117
| 515
| 22
| 74
| 23.409091
| 0.833333
| 0.040777
| 0
| 0
| 0
| 0
| 0.030488
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.066667
| 0.266667
| 0.2
| 0.933333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 4
|
0548c9c1eb7135b7894549d630bf31c91b96ea86
| 20
|
py
|
Python
|
src/test/resources/files/caretAfterString_after.py
|
rendner/py-prefix-fstring-plugin
|
c2e2ca7cca1b3833e988543fda5bce05c6860309
|
[
"MIT"
] | null | null | null |
src/test/resources/files/caretAfterString_after.py
|
rendner/py-prefix-fstring-plugin
|
c2e2ca7cca1b3833e988543fda5bce05c6860309
|
[
"MIT"
] | null | null | null |
src/test/resources/files/caretAfterString_after.py
|
rendner/py-prefix-fstring-plugin
|
c2e2ca7cca1b3833e988543fda5bce05c6860309
|
[
"MIT"
] | 1
|
2021-05-24T09:32:06.000Z
|
2021-05-24T09:32:06.000Z
|
x = f'test {'<caret>
| 20
| 20
| 0.55
| 4
| 20
| 2.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 20
| 1
| 20
| 20
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
0557a132cfe7715d80692fa19eb494fb7ba5c3d8
| 2,638
|
py
|
Python
|
components/ability.py
|
averyhiebert/vim-roguelike-challenge
|
3decdc13ffcbdc893493c6d454e5ecaffb07094e
|
[
"Vim",
"MIT"
] | null | null | null |
components/ability.py
|
averyhiebert/vim-roguelike-challenge
|
3decdc13ffcbdc893493c6d454e5ecaffb07094e
|
[
"Vim",
"MIT"
] | null | null | null |
components/ability.py
|
averyhiebert/vim-roguelike-challenge
|
3decdc13ffcbdc893493c6d454e5ecaffb07094e
|
[
"Vim",
"MIT"
] | null | null | null |
"""
Basic architectural idea:
Actors (basically just the player) can have "intrinsic" Abilities.
Additionally, Items can have abilities.
An actor has access to the abilities of any items that they have equipped
in their inventory, in addition to any "intrinsic" abilities that they have.
In general, when the player executes a command some TBD function will check
for the requisite abilities. A requirement string will be passed to the
Ability, and it will return True/False regarding whether it satisfies the
requirement string. Requirement string could be something simple like "d",
or more complex, like a regex search.
In retrospect, these are really more like status effects. If I were to
do this again, these would be "effects" with various subclasses,
and players/inventories would have "fulfills(effect)" method. But for 7drl
this system is good enough.
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from components.base_component import BaseComponent
class Ability(BaseComponent):
def fulfills(self,requirement:str) -> bool:
""" Return true if this Ability is sufficient to meet the
given requirement.
"""
raise NotImplementedError()
def ability_string(self) -> str:
""" Return a (short) string summary of the ability, which can be
displayed to the user in a list of currently-active abilities.
"""
# TODO This may cause confusion in the future
return ""
class Omnipotent(Ability):
def fulfills(self,requirement:str) -> bool:
return True
@property
def ability_string(self) -> str:
return "All abilities"
class AllCommands(Ability):
def fulfills(self,requirement:str) -> bool:
""" Return true for any short command (i.e. 1 or 2 characters).
TODO Improve this if I add short status effects that aren't commands
"""
return len(requirement) <= 2 or requirement[0:1] == ":"
@property
def ability_string(self) -> str:
return "all commands"
class SimpleAbility(Ability):
def __init__(self,requirement_string:str):
""" Fulfills only the requirement specified by requirement_string.
E.G. if requirement_string = "H", that presumably means that this
Ability allows the player to use the "H" command.
"""
super().__init__()
self.requirement_string = requirement_string
def fulfills(self,requirement:str) -> Bool:
return requirement == self.requirement_string
@property
def ability_string(self) -> str:
return self.requirement_string
| 33.392405
| 77
| 0.700531
| 346
| 2,638
| 5.268786
| 0.433526
| 0.093253
| 0.032913
| 0.057049
| 0.179923
| 0.179923
| 0.164015
| 0.122326
| 0.054855
| 0
| 0
| 0.002947
| 0.228203
| 2,638
| 78
| 78
| 33.820513
| 0.892436
| 0.537908
| 0
| 0.344828
| 0
| 0
| 0.023572
| 0
| 0
| 0
| 0
| 0.025641
| 0
| 1
| 0.310345
| false
| 0
| 0.103448
| 0.172414
| 0.793103
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.