hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
2d053276d0c4ff141f20054258ea511f884eed15
71
py
Python
deepy/__init__.py
popura/deepy-pytorch
71d87a82e937d82b9b149041280a392cc24b7299
[ "MIT" ]
1
2021-07-19T09:38:26.000Z
2021-07-19T09:38:26.000Z
deepy/__init__.py
popura/deepy-pytorch
71d87a82e937d82b9b149041280a392cc24b7299
[ "MIT" ]
1
2021-07-26T06:47:45.000Z
2021-07-26T06:47:45.000Z
deepy/__init__.py
popura/deepy-pytorch
71d87a82e937d82b9b149041280a392cc24b7299
[ "MIT" ]
null
null
null
import deepy.nn import deepy.train import deepy.data import deepy.util
14.2
18
0.830986
12
71
4.916667
0.5
0.745763
0
0
0
0
0
0
0
0
0
0
0.112676
71
4
19
17.75
0.936508
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
2d13c0f8ab49f31f3cc673f47c329c592afa55c9
41
py
Python
SoftwareDev/Python/AnushkaSomavanshi.py
Anushka272602/OpenOctober
980d5e60bf0ef25018088888360a75f477533aef
[ "Apache-2.0" ]
null
null
null
SoftwareDev/Python/AnushkaSomavanshi.py
Anushka272602/OpenOctober
980d5e60bf0ef25018088888360a75f477533aef
[ "Apache-2.0" ]
null
null
null
SoftwareDev/Python/AnushkaSomavanshi.py
Anushka272602/OpenOctober
980d5e60bf0ef25018088888360a75f477533aef
[ "Apache-2.0" ]
null
null
null
print(Jet fuel doesn't melt steel beams)
20.5
40
0.780488
8
41
4
1
0
0
0
0
0
0
0
0
0
0
0
0.146341
41
1
41
41
0.914286
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
5
74862363d4729b44878881f5ea01cbeaa1e2c38e
40
py
Python
todobackend/todo/domain/exceptions.py
zhangcheng/todobackend_python
4ad56c6874ff4460087236d03515a0a0611e95e4
[ "MIT" ]
null
null
null
todobackend/todo/domain/exceptions.py
zhangcheng/todobackend_python
4ad56c6874ff4460087236d03515a0a0611e95e4
[ "MIT" ]
null
null
null
todobackend/todo/domain/exceptions.py
zhangcheng/todobackend_python
4ad56c6874ff4460087236d03515a0a0611e95e4
[ "MIT" ]
null
null
null
class TodoNotFound(Exception): pass
13.333333
30
0.75
4
40
7.5
1
0
0
0
0
0
0
0
0
0
0
0
0.175
40
2
31
20
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
776ee5c34c04457208ce4691575019e53f6d4902
87
py
Python
minotaur/message/__init__.py
csmith49/minotaur
982e128b440e2c8fe96c450505dfdac9a37f9551
[ "MIT" ]
null
null
null
minotaur/message/__init__.py
csmith49/minotaur
982e128b440e2c8fe96c450505dfdac9a37f9551
[ "MIT" ]
null
null
null
minotaur/message/__init__.py
csmith49/minotaur
982e128b440e2c8fe96c450505dfdac9a37f9551
[ "MIT" ]
null
null
null
from .message import Message, Enter, Exit, Emit from .context_graph import ContextGraph
43.5
47
0.827586
12
87
5.916667
0.75
0
0
0
0
0
0
0
0
0
0
0
0.114943
87
2
48
43.5
0.922078
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
778347dd52233a457be82cd0cd1bb84a46f58d6f
269
py
Python
napari_myfirstplugintest/__init__.py
justinelarsen/test-myfirstnapariplugin
f4b4b3dd8b74464fdfa947322a6eb78e766c7a8d
[ "BSD-3-Clause" ]
null
null
null
napari_myfirstplugintest/__init__.py
justinelarsen/test-myfirstnapariplugin
f4b4b3dd8b74464fdfa947322a6eb78e766c7a8d
[ "BSD-3-Clause" ]
null
null
null
napari_myfirstplugintest/__init__.py
justinelarsen/test-myfirstnapariplugin
f4b4b3dd8b74464fdfa947322a6eb78e766c7a8d
[ "BSD-3-Clause" ]
null
null
null
try: from ._version import version as __version__ except ImportError: __version__ = "unknown" from ._reader import napari_get_reader from ._dock_widget import napari_experimental_provide_dock_widget from ._function import napari_experimental_provide_function
26.9
65
0.840149
33
269
6.181818
0.484848
0.176471
0.235294
0.303922
0
0
0
0
0
0
0
0
0.126394
269
9
66
29.888889
0.868085
0
0
0
0
0
0.026022
0
0
0
0
0
0
1
0
false
0
0.714286
0
0.714286
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
779eba42b8122fd18d59f3b3f16fc138d9b7205f
209
py
Python
ytdlmusic/log.py
jeanphibaconnais/ytdlmusic
6e96a385400b6d7a852970fe867e28043e5db068
[ "MIT" ]
9
2021-04-10T23:11:14.000Z
2021-11-28T11:21:02.000Z
ytdlmusic/log.py
jeanphibaconnais/ytdlmusic
6e96a385400b6d7a852970fe867e28043e5db068
[ "MIT" ]
1
2021-10-16T11:09:31.000Z
2021-10-16T11:09:31.000Z
ytdlmusic/log.py
jeanphibaconnais/ytdlmusic
6e96a385400b6d7a852970fe867e28043e5db068
[ "MIT" ]
2
2021-04-16T07:15:49.000Z
2021-10-20T09:07:01.000Z
""" log utils """ from ytdlmusic.params import is_verbose def print_debug(message): """ print "[debug] " + message only if --verbose """ if is_verbose(): print("[debug] " + message)
14.928571
48
0.588517
24
209
5
0.583333
0.25
0.425
0
0
0
0
0
0
0
0
0
0.253589
209
13
49
16.076923
0.769231
0.258373
0
0
0
0
0.060606
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.5
0.5
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
1
0
5
77a7012c71452f15cd080771113b3a8719722fec
24,963
py
Python
Draft/newData.py
HillaPeter/FinalProject
f42849483a2e898a3198bb539c22bbfdf4308cc9
[ "MIT" ]
null
null
null
Draft/newData.py
HillaPeter/FinalProject
f42849483a2e898a3198bb539c22bbfdf4308cc9
[ "MIT" ]
null
null
null
Draft/newData.py
HillaPeter/FinalProject
f42849483a2e898a3198bb539c22bbfdf4308cc9
[ "MIT" ]
1
2021-06-24T09:10:10.000Z
2021-06-24T09:10:10.000Z
import pandas as pd import matplotlib.pyplot as plt import numpy as np #-------------read csv--------------------- df_2010_2011 = pd.read_csv("/mnt/nadavrap-students/STS/data/data_Shapira_20200911_2010_2011.csv") df_2012_2013 = pd.read_csv("/mnt/nadavrap-students/STS/data/data_Shapira_20200911_2012_2013.csv") df_2014_2015 = pd.read_csv("/mnt/nadavrap-students/STS/data/data_Shapira_20200911_2014_2015.csv") df_2016_2017 = pd.read_csv("/mnt/nadavrap-students/STS/data/data_Shapira_20200911_2016_2017.csv") df_2018_2019 = pd.read_csv("/mnt/nadavrap-students/STS/data/data_Shapira_20200911_2018_2019.csv") df_2010_2011['prcab'].fillna(0) df_2012_2013['prcab'].fillna(0) df_2014_2015['prcab'].fillna(0) df_2016_2017['prcab'].fillna(0) df_2018_2019['prcab'].fillna(0) print(df_2018_2019['prcab']) mask = df_2010_2011['surgyear'] != 2010 df_2011 = df_2010_2011[mask] df_2010 = df_2010_2011[~mask] mask2 = df_2012_2013['surgyear'] != 2012 df_2013 = df_2012_2013[mask2] df_2012 = df_2012_2013[~mask2] mask3 = df_2014_2015['surgyear'] != 2014 df_2015 = df_2014_2015[mask3] df_2014 = df_2014_2015[~mask3] mask4 = df_2016_2017['surgyear'] != 2016 df_2017 = df_2016_2017[mask4] df_2016 = df_2016_2017[~mask4] mask5 = df_2018_2019['surgyear'] != 2018 df_2019 = df_2018_2019[mask5] df_2018 = df_2018_2019[~mask5] avg_siteid = pd.DataFrame() avg_surgid = pd.DataFrame() # #tmpHilla=df_2018_2019.columns # tmpHilla=pd.DataFrame(df_2018_2019.columns.values.tolist()) # tmpHilla.to_csv("/tmp/pycharm_project_355/columns.csv") # my_list = df_2010_2011.columns.values.tolist() # print (my_list) # print() # my_list = df_2012_2013.columns.values.tolist() # print (my_list) # print() # my_list = df_2014_2015.columns.values.tolist() # print (my_list) # print() # my_list = df_2016_2017.columns.values.tolist() # print (my_list) # print() # my_list = df_2018_2019.columns.values.tolist() # print (my_list) # print() #-------------------merge all csv-------------------------- # dfMerge1 = pd.merge(df_2010_2011, df_2012_2013, on='surgorder') # dfMerge2 = pd.merge(dfMerge1, df_2014_2015, on='surgorder') # dfMerge = pd.merge(dfMerge2, df_2016_2017, on='surgorder') #dfMerge = pd.merge(df_2010_2011, df_2012_2013, on='SiteID') #count distinc #table.groupby('YEARMONTH').CLIENTCODE.nunique() def groupby_siteid(): df_2010 = df_2010_2011.groupby('siteid')['surgyear'].apply(lambda x: (x== 2010 ).sum()).reset_index(name='2010') df_2011 = df_2010_2011.groupby('siteid')['surgyear'].apply(lambda x: (x== 2011 ).sum()).reset_index(name='2011') df_2012 = df_2012_2013.groupby('siteid')['surgyear'].apply(lambda x: (x== 2012 ).sum()).reset_index(name='2012') df_2013 = df_2012_2013.groupby('siteid')['surgyear'].apply(lambda x: (x== 2013 ).sum()).reset_index(name='2013') df_2014 = df_2014_2015.groupby('siteid')['surgyear'].apply(lambda x: (x== 2014 ).sum()).reset_index(name='2014') df_2015 = df_2014_2015.groupby('siteid')['surgyear'].apply(lambda x: (x== 2015 ).sum()).reset_index(name='2015') df_2016 = df_2016_2017.groupby('siteid')['surgyear'].apply(lambda x: (x== 2016 ).sum()).reset_index(name='2016') df_2017 = df_2016_2017.groupby('siteid')['surgyear'].apply(lambda x: (x== 2017 ).sum()).reset_index(name='2017') df_2018 = df_2018_2019.groupby('siteid')['surgyear'].apply(lambda x: (x== 2018 ).sum()).reset_index(name='2018') df_2019 = df_2018_2019.groupby('siteid')['surgyear'].apply(lambda x: (x== 2019 ).sum()).reset_index(name='2019') df1 =pd.merge(df_2010, df_2011, on='siteid') df2 =pd.merge(df1, df_2012, on='siteid') df3 =pd.merge(df2, df_2013, on='siteid') df4 =pd.merge(df3, df_2014, on='siteid') df5 =pd.merge(df4, df_2015, on='siteid') df6 =pd.merge(df5, df_2016, on='siteid') df7 =pd.merge(df6, df_2017, on='siteid') df8 =pd.merge(df7, df_2018, on='siteid') df_sum_all_Years =pd.merge(df8, df_2019, on='siteid') cols = df_sum_all_Years.columns.difference(['siteid']) df_sum_all_Years['Distinct_years'] = df_sum_all_Years[cols].gt(0).sum(axis=1) cols_sum = df_sum_all_Years.columns.difference(['siteid','Distinct_years']) df_sum_all_Years['Year_sum'] =df_sum_all_Years.loc[:,cols_sum].sum(axis=1) df_sum_all_Years['Year_avg'] = df_sum_all_Years['Year_sum']/df_sum_all_Years['Distinct_years'] df_sum_all_Years.to_csv("total op sum all years siteid.csv") print("details on site id dist:") print ("num of all sites: ", len(df_sum_all_Years)) less_8 =df_sum_all_Years[df_sum_all_Years['Distinct_years'] !=10] less_8.to_csv("total op less 10 years siteid.csv") print("num of sites with less years: ", len(less_8)) x = np.array(less_8['Distinct_years']) print(np.unique(x)) avg_siteid['siteid'] = df_sum_all_Years['siteid'] avg_siteid['total_year_avg'] = df_sum_all_Years['Year_avg'] def groupby_surgid(): df_2010 = df_2010_2011.groupby('surgid')['surgyear'].apply(lambda x: (x== 2010 ).sum()).reset_index(name='2010') df_2011 = df_2010_2011.groupby('surgid')['surgyear'].apply(lambda x: (x== 2011 ).sum()).reset_index(name='2011') df_2012 = df_2012_2013.groupby('surgid')['surgyear'].apply(lambda x: (x== 2012 ).sum()).reset_index(name='2012') df_2013 = df_2012_2013.groupby('surgid')['surgyear'].apply(lambda x: (x== 2013 ).sum()).reset_index(name='2013') df_2014 = df_2014_2015.groupby('surgid')['surgyear'].apply(lambda x: (x== 2014 ).sum()).reset_index(name='2014') df_2015 = df_2014_2015.groupby('surgid')['surgyear'].apply(lambda x: (x== 2015 ).sum()).reset_index(name='2015') df_2016 = df_2016_2017.groupby('surgid')['surgyear'].apply(lambda x: (x== 2016 ).sum()).reset_index(name='2016') df_2017 = df_2016_2017.groupby('surgid')['surgyear'].apply(lambda x: (x== 2017 ).sum()).reset_index(name='2017') df_2018 = df_2018_2019.groupby('surgid')['surgyear'].apply(lambda x: (x== 2018 ).sum()).reset_index(name='2018') df_2019 = df_2018_2019.groupby('surgid')['surgyear'].apply(lambda x: (x== 2019 ).sum()).reset_index(name='2019') df1 =pd.merge(df_2010, df_2011, on='surgid') df2 =pd.merge(df1, df_2012, on='surgid') df3 =pd.merge(df2, df_2013, on='surgid') df4 =pd.merge(df3, df_2014, on='surgid') df5 =pd.merge(df4, df_2015, on='surgid') df6 =pd.merge(df5, df_2016, on='surgid') df7 =pd.merge(df6, df_2017, on='surgid') df8 =pd.merge(df7, df_2018, on='surgid') df_sum_all_Years =pd.merge(df8, df_2019, on='surgid') cols = df_sum_all_Years.columns.difference(['surgid']) df_sum_all_Years['Distinct_years'] = df_sum_all_Years[cols].gt(0).sum(axis=1) cols_sum = df_sum_all_Years.columns.difference(['surgid','Distinct_years']) df_sum_all_Years['Year_sum'] =df_sum_all_Years.loc[:,cols_sum].sum(axis=1) df_sum_all_Years['Year_avg'] = df_sum_all_Years['Year_sum']/df_sum_all_Years['Distinct_years'] df_sum_all_Years.to_csv("sum all years surgid.csv") print() print("details of surgid dist:") print("num of all surgid: ", len(df_sum_all_Years)) less_8 =df_sum_all_Years[df_sum_all_Years['Distinct_years'] !=10] less_8.to_csv("less 10 years surgid.csv") print("num of doctors with less years: ", len(less_8)) x = np.array(less_8['Distinct_years']) print(np.unique(x)) avg_surgid['surgid'] = df_sum_all_Years['surgid'] avg_surgid['total_year_avg'] = df_sum_all_Years['Year_avg'] def groupby_hospid(): df_2010 = df_2010_2011.groupby('hospid')['surgyear'].apply(lambda x: (x== 2010 ).sum()).reset_index(name='2010') df_2011 = df_2010_2011.groupby('hospid')['surgyear'].apply(lambda x: (x== 2011 ).sum()).reset_index(name='2011') df_2012 = df_2012_2013.groupby('hospid')['surgyear'].apply(lambda x: (x== 2012 ).sum()).reset_index(name='2012') df_2013 = df_2012_2013.groupby('hospid')['surgyear'].apply(lambda x: (x== 2013 ).sum()).reset_index(name='2013') df_2014 = df_2014_2015.groupby('hospid')['surgyear'].apply(lambda x: (x== 2014 ).sum()).reset_index(name='2014') df_2015 = df_2014_2015.groupby('hospid')['surgyear'].apply(lambda x: (x== 2015 ).sum()).reset_index(name='2015') df_2016 = df_2016_2017.groupby('hospid')['surgyear'].apply(lambda x: (x== 2016 ).sum()).reset_index(name='2016') df_2017 = df_2016_2017.groupby('hospid')['surgyear'].apply(lambda x: (x== 2017 ).sum()).reset_index(name='2017') df_2018 = df_2018_2019.groupby('hospid')['surgyear'].apply(lambda x: (x== 2018 ).sum()).reset_index(name='2018') df_2019 = df_2018_2019.groupby('hospid')['surgyear'].apply(lambda x: (x== 2019 ).sum()).reset_index(name='2019') df1 =pd.merge(df_2010, df_2011, on='hospid') df2 =pd.merge(df1, df_2012, on='hospid') df3 =pd.merge(df2, df_2013, on='hospid') df4 =pd.merge(df3, df_2014, on='hospid') df5 =pd.merge(df4, df_2015, on='hospid') df6 =pd.merge(df5, df_2016, on='hospid') df7 =pd.merge(df6, df_2017, on='hospid') df8 =pd.merge(df7, df_2018, on='hospid') df_sum_all_Years =pd.merge(df8, df_2019, on='hospid') cols = df_sum_all_Years.columns.difference(['hospid']) df_sum_all_Years['Distinct_years'] = df_sum_all_Years[cols].gt(0).sum(axis=1) cols_sum = df_sum_all_Years.columns.difference(['hospid','Distinct_years']) df_sum_all_Years['Year_sum'] =df_sum_all_Years.loc[:,cols_sum].sum(axis=1) df_sum_all_Years['Year_avg'] = df_sum_all_Years['Year_sum']/df_sum_all_Years['Distinct_years'] df_sum_all_Years.to_csv("sum all years hospid.csv") print(df_sum_all_Years) print ("num of all sites: ", len(df_sum_all_Years)) less_8 =df_sum_all_Years[df_sum_all_Years['Distinct_years'] !=10] less_8.to_csv("less 10 years hospid.csv") print("num of hospital with less years: ", len(less_8)) x = np.array(less_8['Distinct_years']) print(np.unique(x)) return df_sum_all_Years def draw_hist(data,num_of_bins,title,x_title,y_title,color): plt.hist(data, bins=num_of_bins, color=color,ec="black") plt.title(title) plt.xlabel(x_title) plt.ylabel(y_title) plt.show() def group_by_count(group_by_value,name): df_2010_2011_gb = df_2010_2011.groupby(group_by_value)[group_by_value].count().reset_index(name=name) df_2012_2013_gb = df_2012_2013.groupby(group_by_value)[group_by_value].count().reset_index(name=name) df_2014_2015_gb = df_2014_2015.groupby(group_by_value)[group_by_value].count().reset_index(name=name) df_2016_2017_gb = df_2016_2017.groupby(group_by_value)[group_by_value].count().reset_index(name=name) df_2018_2019_gb = df_2018_2019.groupby(group_by_value)[group_by_value].count().reset_index(name=name) df_merge_1=pd.merge(df_2010_2011_gb,df_2012_2013_gb, on=group_by_value) df_merge_2=pd.merge(df_merge_1,df_2014_2015_gb, on=group_by_value) df_merge_3=pd.merge(df_merge_2,df_2016_2017_gb, on=group_by_value) df_merge_4=pd.merge(df_merge_3,df_2018_2019_gb, on=group_by_value) cols = df_merge_4.columns.difference([group_by_value]) df_merge_4[name] = df_merge_4.loc[:,cols].sum(axis=1) df_new=pd.DataFrame() df_new[group_by_value] = df_merge_4[group_by_value] df_new[name] = df_merge_4[name] return df_new def groupby_siteid_prcab(): df2010 = df_2010.groupby('siteid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2010_reop') df2011 = df_2011.groupby('siteid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2011_reop') df2012 = df_2012.groupby('siteid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2012_reop') df2013 = df_2013.groupby('siteid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2013_reop') df2014 = df_2014.groupby('siteid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2014_reop') df2015 = df_2015.groupby('siteid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2015_reop') df2016 = df_2016.groupby('siteid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2016_reop') df2017 = df_2017.groupby('siteid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2017_reop') df2018 = df_2018.groupby('siteid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2018_reop') df2019 = df_2019.groupby('siteid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2019_reop') df1 = pd.merge(df2010, df2011, on='siteid') df2 = pd.merge(df1, df2012, on='siteid') df3 = pd.merge(df2, df2013, on='siteid') df4 = pd.merge(df3, df2014, on='siteid') df5 = pd.merge(df4, df2015, on='siteid') df6 = pd.merge(df5, df2016, on='siteid') df7 = pd.merge(df6, df2017, on='siteid') df8 = pd.merge(df7, df2018, on='siteid') df_sum_all_Years = pd.merge(df8, df2019, on='siteid') cols = df_sum_all_Years.columns.difference(['siteid']) df_sum_all_Years['Distinct_years_reop'] = df_sum_all_Years[cols].gt(0).sum(axis=1) cols_sum = df_sum_all_Years.columns.difference(['siteid', 'Distinct_years_reop']) df_sum_all_Years['Year_sum_reop'] = df_sum_all_Years.loc[:, cols_sum].sum(axis=1) df_sum_all_Years['Year_avg_reop'] = df_sum_all_Years['Year_sum_reop'] / df_sum_all_Years['Distinct_years_reop'] df_sum_all_Years.to_csv("sum all years siteid reop.csv") less_8 = df_sum_all_Years[df_sum_all_Years['Distinct_years_reop'] != 10] less_8.to_csv("less 10 years reop siteid.csv") print("num of sites with less years: ", len(less_8)) x = np.array(less_8['Distinct_years_reop']) print(np.unique(x)) df_10 = df_2010.groupby('siteid')['prcab'].apply(lambda x:((x==2) | (x==0)).sum()).reset_index(name='2010_Firstop') df_11 = df_2011.groupby('siteid')['prcab'].apply(lambda x:((x==2) | (x==0)).sum()).reset_index(name='2011_Firstop') df_12 = df_2012.groupby('siteid')['prcab'].apply(lambda x:((x==2) | (x==0)).sum()).reset_index(name='2012_Firstop') df_13 = df_2013.groupby('siteid')['prcab'].apply(lambda x:((x==2) | (x==0)).sum()).reset_index(name='2013_Firstop') df_14 = df_2014.groupby('siteid')['prcab'].apply(lambda x:((x==2) | (x==0)).sum()).reset_index(name='2014_Firstop') df_15 = df_2015.groupby('siteid')['prcab'].apply(lambda x:((x==2) | (x==0)).sum()).reset_index(name='2015_Firstop') df_16 = df_2016.groupby('siteid')['prcab'].apply(lambda x:((x==2) | (x==0)).sum()).reset_index(name='2016_Firstop') df_17 = df_2017.groupby('siteid')['prcab'].apply(lambda x:((x==2) | (x==0)).sum()).reset_index(name='2017_Firstop') df_18 = df_2018.groupby('siteid')['prcab'].apply(lambda x:((x==2) | (x==0)).sum()).reset_index(name='2018_Firstop') df_19 = df_2019.groupby('siteid')['prcab'].apply(lambda x:((x==2) | (x==0)).sum()).reset_index(name='2019_Firstop') d1 = pd.merge(df_10, df_11, on='siteid') d2 = pd.merge(d1, df_12, on='siteid') d3 = pd.merge(d2, df_13, on='siteid') d4 = pd.merge(d3, df_14, on='siteid') d5 = pd.merge(d4, df_15, on='siteid') d6 = pd.merge(d5, df_16, on='siteid') d7 = pd.merge(d6, df_17, on='siteid') d8 = pd.merge(d7, df_18, on='siteid') df_sum_all_Years_total = pd.merge(d8, df_19, on='siteid') cols = df_sum_all_Years_total.columns.difference(['siteid']) df_sum_all_Years_total['Distinct_years'] = df_sum_all_Years_total[cols].gt(0).sum(axis=1) cols_sum = df_sum_all_Years_total.columns.difference(['siteid', 'Distinct_years']) df_sum_all_Years_total['Year_sum'] = df_sum_all_Years_total.loc[:, cols_sum].sum(axis=1) df_sum_all_Years_total['Year_avg'] = df_sum_all_Years_total['Year_sum'] / df_sum_all_Years_total['Distinct_years'] df_sum_all_Years_total.to_csv("First op sum all years siteid.csv") # df_sum_all_Years.to_csv("sum all years siteid.csv") # print(df_sum_all_Years) # print("num of all sites: ", len(df_sum_all_Years)) # less = df_sum_all_Years_total[df_sum_all_Years_total['Distinct_years'] != 10] less.to_csv("First op less 10 years siteid.csv") print("First op num of sites with less years: ", len(less)) x = np.array(less['Distinct_years']) print(np.unique(x)) temp_first = pd.DataFrame() temp_first['siteid'] = df_sum_all_Years_total['siteid'] temp_first['Year_avg_Firstop'] = df_sum_all_Years_total['Year_avg'] temp_reop = pd.DataFrame() temp_reop['siteid'] = df_sum_all_Years['siteid'] temp_reop['Year_avg_reop'] = df_sum_all_Years['Year_avg_reop'] df20 = pd.merge(avg_siteid, temp_first, on='siteid', how='left') total_avg_site_id = pd.merge(df20, temp_reop,on='siteid', how='left' ) total_avg_site_id['firstop/total'] = (total_avg_site_id['Year_avg_Firstop']/total_avg_site_id['total_year_avg'])*100 total_avg_site_id['reop/total'] = (total_avg_site_id['Year_avg_reop']/total_avg_site_id['total_year_avg'])*100 total_avg_site_id.to_csv('total_avg_site_id.csv') # avg_siteid['Year_avg_Firstop'] = df_sum_all_Years_total['Year_avg'] # avg_siteid['Year_avg_reop'] = df_sum_all_Years['Year_avg_reop'] def groupby_surgid_prcab(): df2010 = df_2010.groupby('surgid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2010_reop') df2011 = df_2011.groupby('surgid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2011_reop') df2012 = df_2012.groupby('surgid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2012_reop') df2013 = df_2013.groupby('surgid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2013_reop') df2014 = df_2014.groupby('surgid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2014_reop') df2015 = df_2015.groupby('surgid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2015_reop') df2016 = df_2016.groupby('surgid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2016_reop') df2017 = df_2017.groupby('surgid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2017_reop') df2018 = df_2018.groupby('surgid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2018_reop') df2019 = df_2019.groupby('surgid')['prcab'].apply(lambda x: (x == 1).sum()).reset_index(name='2019_reop') df1 = pd.merge(df2010, df2011, on='surgid') df2 = pd.merge(df1, df2012, on='surgid') df3 = pd.merge(df2, df2013, on='surgid') df4 = pd.merge(df3, df2014, on='surgid') df5 = pd.merge(df4, df2015, on='surgid') df6 = pd.merge(df5, df2016, on='surgid') df7 = pd.merge(df6, df2017, on='surgid') df8 = pd.merge(df7, df2018, on='surgid') df_sum_all_Years = pd.merge(df8, df2019, on='surgid') cols = df_sum_all_Years.columns.difference(['surgid']) df_sum_all_Years['Distinct_years_reop'] = df_sum_all_Years[cols].gt(0).sum(axis=1) cols_sum = df_sum_all_Years.columns.difference(['surgid', 'Distinct_years_reop']) df_sum_all_Years['Year_sum_reop'] = df_sum_all_Years.loc[:, cols_sum].sum(axis=1) df_sum_all_Years['Year_avg_reop'] = df_sum_all_Years['Year_sum_reop'] / df_sum_all_Years['Distinct_years_reop'] df_sum_all_Years.to_csv("sum all years surgid reop.csv") less_8 = df_sum_all_Years[df_sum_all_Years['Distinct_years_reop'] != 10] less_8.to_csv("less 10 years reop surgid.csv") print("num of surgid with less years: ", len(less_8)) x = np.array(less_8['Distinct_years_reop']) print(np.unique(x)) df_10 = df_2010.groupby('surgid')['prcab'].apply(lambda x: ((x==2) | (x==0)).sum()).reset_index(name='2010_Firstop') df_11 = df_2011.groupby('surgid')['prcab'].apply(lambda x: ((x==2) | (x==0)).sum()).reset_index(name='2011_Firstop') df_12 = df_2012.groupby('surgid')['prcab'].apply(lambda x: ((x==2) | (x==0)).sum()).reset_index(name='2012_Firstop') df_13 = df_2013.groupby('surgid')['prcab'].apply(lambda x: ((x==2) | (x==0)).sum()).reset_index(name='2013_Firstop') df_14 = df_2014.groupby('surgid')['prcab'].apply(lambda x: ((x==2) | (x==0)).sum()).reset_index(name='2014_Firstop') df_15 = df_2015.groupby('surgid')['prcab'].apply(lambda x: ((x==2) | (x==0)).sum()).reset_index(name='2015_Firstop') df_16 = df_2016.groupby('surgid')['prcab'].apply(lambda x: ((x==2) | (x==0)).sum()).reset_index(name='2016_Firstop') df_17 = df_2017.groupby('surgid')['prcab'].apply(lambda x: ((x==2) | (x==0)).sum()).reset_index(name='2017_Firstop') df_18 = df_2018.groupby('surgid')['prcab'].apply(lambda x: ((x==2) | (x==0)).sum()).reset_index(name='2018_Firstop') df_19 = df_2019.groupby('surgid')['prcab'].apply(lambda x: ((x==2) | (x==0)).sum()).reset_index(name='2019_Firstop') print(df_18) d1 = pd.merge(df_10, df_11, on='surgid') d2 = pd.merge(d1, df_12, on='surgid') d3 = pd.merge(d2, df_13, on='surgid') d4 = pd.merge(d3, df_14, on='surgid') d5 = pd.merge(d4, df_15, on='surgid') d6 = pd.merge(d5, df_16, on='surgid') d7 = pd.merge(d6, df_17, on='surgid') d8 = pd.merge(d7, df_18, on='surgid') df_sum_all_Years_total = pd.merge(d8, df_19, on='surgid') cols = df_sum_all_Years_total.columns.difference(['surgid']) df_sum_all_Years_total['Distinct_years'] = df_sum_all_Years_total[cols].gt(0).sum(axis=1) cols_sum = df_sum_all_Years_total.columns.difference(['surgid', 'Distinct_years']) df_sum_all_Years_total['Year_sum'] = df_sum_all_Years_total.loc[:, cols_sum].sum(axis=1) df_sum_all_Years_total['Year_avg'] = df_sum_all_Years_total['Year_sum'] / df_sum_all_Years_total['Distinct_years'] df_sum_all_Years_total.to_csv("First op sum all years surgid.csv") # df_sum_all_Years.to_csv("sum all years siteid.csv") # print(df_sum_all_Years) # print("num of all sites: ", len(df_sum_all_Years)) # less = df_sum_all_Years_total[df_sum_all_Years_total['Distinct_years'] != 10] less.to_csv("First op less 10 years surgid.csv") print("First op num of surgid with less years: ", len(less)) x = np.array(less['Distinct_years']) print(np.unique(x)) temp_first = pd.DataFrame() temp_first['surgid'] = df_sum_all_Years_total['surgid'] temp_first['Year_avg_Firstop'] = df_sum_all_Years_total['Year_avg'] temp_reop = pd.DataFrame() temp_reop['surgid'] = df_sum_all_Years['surgid'] temp_reop['Year_avg_reop'] = df_sum_all_Years['Year_avg_reop'] df20 = pd.merge(avg_surgid, temp_first, on='surgid', how='left') total_avg_surgid = pd.merge(df20, temp_reop, on='surgid', how='left') total_avg_surgid['firstop/total'] = (total_avg_surgid['Year_avg_Firstop']/total_avg_surgid['total_year_avg'])*100 total_avg_surgid['reop/total'] = (total_avg_surgid['Year_avg_reop']/total_avg_surgid['total_year_avg'])*100 total_avg_surgid.to_csv('total_avg_surgid.csv') groupby_siteid() # groupby_hospid() groupby_siteid_prcab() groupby_surgid() groupby_surgid_prcab() # path="/tmp/pycharm_project_355/" # # # avg_surgid['firstop/total'] = (avg_surgid['Year_avg_Firstop']/avg_surgid['total_year_avg'])*100 # avg_surgid['reop/total'] = (avg_surgid['Year_avg_reop']/avg_surgid['total_year_avg'])*100 # # # avg_siteid['firstop/total'] = (avg_siteid['Year_avg_Firstop']/avg_siteid['total_year_avg'])*100 # avg_siteid['reop/total'] = (avg_siteid['Year_avg_reop']/avg_siteid['total_year_avg'])*100 # # avg_siteid.to_csv('total_avg_site_id.csv') # avg_surgid.to_csv('total_avg_surgid.csv') df_avg_siteid = pd.read_csv("total_avg_site_id.csv") df_avg_surgid = pd.read_csv("total_avg_surgid.csv") # # df_sum_hospid= pd.read_csv(path+"sum all years hospid.csv") # # draw_hist(df_avg_siteid['total_year_avg'],40,"siteid Histogram of yearly avg operation",'avg of Operation',"count of siteid",'skyblue') draw_hist(df_avg_siteid['Year_avg_Firstop'].dropna(),40,"siteid Histogram of yearly avg First operation",'avg of First Operation',"count of siteid",'skyblue') draw_hist(df_avg_siteid['Year_avg_reop'].dropna(),40,"siteid Histogram of yearly avg reOperation",'avg of reOperation',"count of siteid",'skyblue') draw_hist(df_avg_siteid['firstop/total'].dropna(),40,"siteid Histogram of yearly avg First operation/Total operation",'% of First Operation',"count of siteid",'palegreen') draw_hist(df_avg_siteid['reop/total'].dropna(),40,"siteid Histogram of yearly avg reOperation/Total operation",'% of reOperation',"count of siteid",'palegreen') # draw_hist(df_sum_surgid['Year_avg'],20,"surgid Histogram of yearly avg operation",'avg of Operation',"count of surgid") draw_hist(df_avg_surgid['total_year_avg'],40,"surgid Histogram of yearly avg operation",'avg of Operation',"count of surgid",'plum') draw_hist(df_avg_surgid['Year_avg_Firstop'].dropna(),40,"surgid Histogram of yearly avg First operation",'avg of First Operation',"count of surgid",'plum') draw_hist(df_avg_surgid['Year_avg_reop'].dropna(),40,"surgid Histogram of yearly avg reOperation",'avg of reOperation',"count of surgid",'plum') draw_hist(df_avg_surgid['firstop/total'].dropna(),40,"surgid Histogram of yearly avg First operation/Total operation",'% of First Operation',"count of surgid",'bisque') draw_hist(df_avg_surgid['reop/total'].dropna(),40,"surgid Histogram of yearly avg reOperation/Total operation",'% of reOperation',"count of surgid",'bisque')
55.10596
171
0.700597
4,136
24,963
3.917311
0.043279
0.046661
0.085545
0.093075
0.88594
0.854833
0.786384
0.719849
0.691458
0.673621
0
0.098849
0.112887
24,963
453
172
55.10596
0.632784
0.077875
0
0.18125
0
0
0.231004
0.017505
0
0
0
0
0
1
0.021875
false
0
0.009375
0
0.0375
0.071875
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
77c6b0a16082ca393eb363db777ca924e8878d87
52
py
Python
pype9/simulate/common/code_gen/__init__.py
tclose/Pype9
23f96c0885fd9df12d9d11ff800f816520e4b17a
[ "MIT" ]
null
null
null
pype9/simulate/common/code_gen/__init__.py
tclose/Pype9
23f96c0885fd9df12d9d11ff800f816520e4b17a
[ "MIT" ]
null
null
null
pype9/simulate/common/code_gen/__init__.py
tclose/Pype9
23f96c0885fd9df12d9d11ff800f816520e4b17a
[ "MIT" ]
1
2021-04-08T12:46:21.000Z
2021-04-08T12:46:21.000Z
from .base import BaseCodeGenerator, BASE_BUILD_DIR
26
51
0.865385
7
52
6.142857
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.096154
52
1
52
52
0.914894
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
77f1556273e1a00a3a4562e127dc715e9a6f32e3
175
py
Python
open_seq2seq/parts/centaur/__init__.py
gioannides/OpenSeq2Seq
9e13da51eda2d239539cad6a56f8db2bf11492d6
[ "Apache-2.0" ]
1,459
2017-09-11T17:58:19.000Z
2022-03-27T06:42:04.000Z
open_seq2seq/parts/centaur/__init__.py
gioannides/OpenSeq2Seq
9e13da51eda2d239539cad6a56f8db2bf11492d6
[ "Apache-2.0" ]
307
2017-09-14T05:52:16.000Z
2021-06-05T16:08:53.000Z
open_seq2seq/parts/centaur/__init__.py
gioannides/OpenSeq2Seq
9e13da51eda2d239539cad6a56f8db2bf11492d6
[ "Apache-2.0" ]
422
2017-09-11T19:13:21.000Z
2022-03-31T23:43:10.000Z
# Copyright (c) 2019 NVIDIA Corporation from .conv_block import ConvBlock from .attention import AttentionBlock from .batch_norm import BatchNorm1D from .prenet import Prenet
29.166667
39
0.834286
23
175
6.26087
0.695652
0
0
0
0
0
0
0
0
0
0
0.03268
0.125714
175
5
40
35
0.908497
0.211429
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
77fbb351af6929222ccbdff5415833bad52818e8
9,131
py
Python
tests/test_aiobaro.py
bassory99/aiobaro
fbb8fef4f40a66b72e28436a93aaff3cf50f1ece
[ "MIT" ]
null
null
null
tests/test_aiobaro.py
bassory99/aiobaro
fbb8fef4f40a66b72e28436a93aaff3cf50f1ece
[ "MIT" ]
null
null
null
tests/test_aiobaro.py
bassory99/aiobaro
fbb8fef4f40a66b72e28436a93aaff3cf50f1ece
[ "MIT" ]
null
null
null
import uuid import pytest from aiobaro import __version__ def test_version(): assert __version__ == "0.1.0" @pytest.mark.asyncio async def test_login_info(matrix_client): result = await matrix_client.login_info() assert result.ok @pytest.mark.asyncio async def test_register(matrix_client): result = await matrix_client.register( "test_user", password="test_password" ) assert result.ok @pytest.mark.asyncio async def test_login(matrix_client): result = await matrix_client.login("test_user", password="test_password") assert result.ok @pytest.mark.asyncio async def test_room_create(matrix_client): await test_register(matrix_client) await test_login(matrix_client) room_alias_name = None name = "Room" topic = None room_version = None federate = True is_direct = False preset = None invite = None initial_state = None power_level_override = None result = await matrix_client.room_create( name=name, room_alias_name=room_alias_name, topic=topic, room_version=room_version, federate=federate, is_direct=is_direct, preset=preset, invite=invite, initial_state=initial_state, power_level_override=power_level_override, ) assert result.ok assert result.json().get("room_id") @pytest.mark.asyncio async def test_sync(matrix_client): since = None timeout = None data_filter = None full_state = None set_presence = None result = await matrix_client.sync( since=since, timeout=timeout, data_filter=data_filter, full_state=full_state, set_presence=set_presence, ) assert result.ok @pytest.mark.asyncio async def test_room_send(matrix_client): room_id = None event_type = None body = None tx_id = None result = await matrix_client.room_send( room_id, event_type, body, tx_id, ) assert result.ok @pytest.mark.asyncio async def test_room_get_event(matrix_client): room_id, event_id = None, None result = await matrix_client.room_get_event(room_id, event_id) assert result.ok async def test_room_put_state(matrix_client): args, kwargs = [], {} result = await matrix_client.room_put_state(*args, **kwargs) assert result.ok async def test_room_get_state_event(matrix_client): args, kwargs = [], {} result = await matrix_client.room_get_state_event(*args, **kwargs) assert result.ok async def test_room_get_state(matrix_client): args, kwargs = [], {} result = await matrix_client.room_get_state(*args, **kwargs) assert result.ok async def test_room_redact(matrix_client): args, kwargs = [], {} result = await matrix_client.room_redact(*args, **kwargs) assert result.ok async def test_room_kick(matrix_client): args, kwargs = [], {} result = await matrix_client.room_kick(*args, **kwargs) assert result.ok async def test_room_ban(matrix_client): args, kwargs = [], {} result = await matrix_client.room_ban(*args, **kwargs) assert result.ok async def test_room_unban(matrix_client): args, kwargs = [], {} result = await matrix_client.room_unban(*args, **kwargs) assert result.ok async def test_room_invite(matrix_client): args, kwargs = [], {} result = await matrix_client.room_invite(*args, **kwargs) assert result.ok async def test_join(matrix_client): args, kwargs = [], {} result = await matrix_client.join(*args, **kwargs) assert result.ok async def test_room_leave(matrix_client): args, kwargs = [], {} result = await matrix_client.room_leave(*args, **kwargs) assert result.ok async def test_room_forget(matrix_client): args, kwargs = [], {} result = await matrix_client.room_forget(*args, **kwargs) assert result.ok async def test_room_messages(matrix_client): args, kwargs = [], {} result = await matrix_client.room_messages(*args, **kwargs) assert result.ok async def test_keys_upload(matrix_client): args, kwargs = [], {} result = await matrix_client.keys_upload(*args, **kwargs) assert result.ok async def test_keys_query(matrix_client): args, kwargs = [], {} result = await matrix_client.keys_query(*args, **kwargs) assert result.ok async def test_keys_claim(matrix_client): args, kwargs = [], {} result = await matrix_client.keys_claim(*args, **kwargs) assert result.ok async def test_to_device(matrix_client): args, kwargs = [], {} result = await matrix_client.to_device(*args, **kwargs) assert result.ok async def test_devices(matrix_client): args, kwargs = [], {} result = await matrix_client.devices(*args, **kwargs) assert result.ok async def test_update_device(matrix_client): args, kwargs = [], {} result = await matrix_client.update_device(*args, **kwargs) assert result.ok async def test_delete_devices(matrix_client): args, kwargs = [], {} result = await matrix_client.delete_devices(*args, **kwargs) assert result.ok async def test_joined_members(matrix_client): args, kwargs = [], {} result = await matrix_client.joined_members(*args, **kwargs) assert result.ok async def test_joined_rooms(matrix_client): args, kwargs = [], {} result = await matrix_client.joined_rooms(*args, **kwargs) assert result.ok async def test_room_resolve_alias(matrix_client): args, kwargs = [], {} result = await matrix_client.room_resolve_alias(*args, **kwargs) assert result.ok async def test_room_typing(matrix_client): args, kwargs = [], {} result = await matrix_client.room_typing(*args, **kwargs) assert result.ok async def test_update_receipt_marker(matrix_client): args, kwargs = [], {} result = await matrix_client.update_receipt_marker(*args, **kwargs) assert result.ok async def test_room_read_markers(matrix_client): args, kwargs = [], {} result = await matrix_client.room_read_markers(*args, **kwargs) assert result.ok async def test_content_repository_config(matrix_client): args, kwargs = [], {} result = await matrix_client.content_repository_config(*args, **kwargs) assert result.ok async def test_upload(matrix_client): args, kwargs = [], {} result = await matrix_client.upload(*args, **kwargs) assert result.ok async def test_download(matrix_client): args, kwargs = [], {} result = await matrix_client.download(*args, **kwargs) assert result.ok async def test_thumbnail(matrix_client): args, kwargs = [], {} result = await matrix_client.thumbnail(*args, **kwargs) assert result.ok async def test_profile_get(matrix_client): args, kwargs = [], {} result = await matrix_client.profile_get(*args, **kwargs) assert result.ok async def test_profile_get_displayname(matrix_client): args, kwargs = [], {} result = await matrix_client.profile_get_displayname(*args, **kwargs) assert result.ok async def test_profile_set_displayname(matrix_client): args, kwargs = [], {} result = await matrix_client.profile_set_displayname(*args, **kwargs) assert result.ok async def test_profile_get_avatar(matrix_client): args, kwargs = [], {} result = await matrix_client.profile_get_avatar(*args, **kwargs) assert result.ok async def test_profile_set_avatar(matrix_client): args, kwargs = [], {} result = await matrix_client.profile_set_avatar(*args, **kwargs) assert result.ok async def test_get_presence(matrix_client): args, kwargs = [], {} result = await matrix_client.get_presence(*args, **kwargs) assert result.ok async def test_set_presence(matrix_client): args, kwargs = [], {} result = await matrix_client.set_presence(*args, **kwargs) assert result.ok async def test_whoami(matrix_client): args, kwargs = [], {} result = await matrix_client.whoami(*args, **kwargs) assert result.ok async def test_room_context(matrix_client): args, kwargs = [], {} result = await matrix_client.room_context(*args, **kwargs) assert result.ok async def test_upload_filter(matrix_client): args, kwargs = [], {} result = await matrix_client.upload_filter(*args, **kwargs) assert result.ok async def test_set_pushrule(matrix_client): args, kwargs = [], {} result = await matrix_client.set_pushrule(*args, **kwargs) assert result.ok async def test_delete_pushrule(matrix_client): args, kwargs = [], {} result = await matrix_client.delete_pushrule(*args, **kwargs) assert result.ok async def test_enable_pushrule(matrix_client): args, kwargs = [], {} result = await matrix_client.enable_pushrule(*args, **kwargs) assert result.ok async def test_set_pushrule_actions(matrix_client): args, kwargs = [], {} result = await matrix_client.set_pushrule_actions(*args, **kwargs) assert result.ok @pytest.mark.asyncio async def test_logout(matrix_client): result = await matrix_client.logout() assert result.ok
25.016438
77
0.694338
1,180
9,131
5.100847
0.085593
0.207343
0.101678
0.194883
0.794318
0.787672
0.755275
0.73052
0.669214
0.218641
0
0.000409
0.196474
9,131
364
78
25.085165
0.819954
0
0
0.398438
0
0
0.006571
0
0
0
0
0
0.207031
1
0.003906
false
0.007813
0.011719
0
0.015625
0
0
0
0
null
1
0
1
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
7ac7c297747b810de6978d3d7dbd261230c0d8c2
180
py
Python
allure-robotframework/examples/label/labels_library.py
bhumikapaharia/allure-python
b571b9bfc80af6f0431062ee83425e62d90163e4
[ "Apache-2.0" ]
558
2015-03-14T18:26:56.000Z
2022-02-21T00:09:49.000Z
allure-robotframework/examples/label/labels_library.py
bhumikapaharia/allure-python
b571b9bfc80af6f0431062ee83425e62d90163e4
[ "Apache-2.0" ]
448
2015-01-09T10:00:47.000Z
2022-03-24T15:25:02.000Z
allure-robotframework/examples/label/labels_library.py
bhumikapaharia/allure-python
b571b9bfc80af6f0431062ee83425e62d90163e4
[ "Apache-2.0" ]
244
2015-01-26T08:03:11.000Z
2022-03-07T17:06:30.000Z
import allure @allure.label('layer', 'UI') def open_browser_with_ui_layer(): pass def add_custom_label(label_type, *labels): allure.dynamic.label(label_type, *labels)
15
45
0.733333
26
180
4.769231
0.576923
0.16129
0.225806
0.322581
0
0
0
0
0
0
0
0
0.138889
180
11
46
16.363636
0.8
0
0
0
0
0
0.039106
0
0
0
0
0
0
1
0.333333
false
0.166667
0.166667
0
0.5
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
5
7acd1b475ea59c196f814cbf4d687735c8f331e9
84
py
Python
model/__init__.py
Wliubei/Multiple-Resolutionse
3d8a9a725a00e5f9a2ac3b2b9659bc22b3876f75
[ "Unlicense" ]
null
null
null
model/__init__.py
Wliubei/Multiple-Resolutionse
3d8a9a725a00e5f9a2ac3b2b9659bc22b3876f75
[ "Unlicense" ]
null
null
null
model/__init__.py
Wliubei/Multiple-Resolutionse
3d8a9a725a00e5f9a2ac3b2b9659bc22b3876f75
[ "Unlicense" ]
null
null
null
from .senet import se_resnet34, se_resnet12, se_resnet50 from .lcnn import lcnn_net
28
56
0.833333
14
84
4.714286
0.642857
0
0
0
0
0
0
0
0
0
0
0.081081
0.119048
84
2
57
42
0.810811
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
7adaa99b46fee8cb79c46397e704e72c03c3f21d
61
py
Python
examples/oval.py
LettError/drawbot
dce9af449d429af3f10827654d8b9d3bb8bb8efe
[ "BSD-2-Clause" ]
2
2015-09-17T01:27:02.000Z
2020-11-26T12:07:13.000Z
examples/oval.py
LettError/drawbot
dce9af449d429af3f10827654d8b9d3bb8bb8efe
[ "BSD-2-Clause" ]
null
null
null
examples/oval.py
LettError/drawbot
dce9af449d429af3f10827654d8b9d3bb8bb8efe
[ "BSD-2-Clause" ]
null
null
null
# draw an oval # x y w h oval(100, 100, 200, 200)
20.333333
24
0.491803
12
61
2.5
0.75
0
0
0
0
0
0
0
0
0
0
0.324324
0.393443
61
3
24
20.333333
0.486486
0.52459
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
7af2de3f7f8cc84373ee9d37f2f8fd29ef5581ce
39
py
Python
tests/__init__.py
djfrancesco/PyInspire
52019cfc4d70a5c4537b5d46619b889739c7bc75
[ "MIT" ]
null
null
null
tests/__init__.py
djfrancesco/PyInspire
52019cfc4d70a5c4537b5d46619b889739c7bc75
[ "MIT" ]
11
2020-02-25T10:31:07.000Z
2020-02-27T12:52:05.000Z
tests/__init__.py
djfrancesco/PyInspire
52019cfc4d70a5c4537b5d46619b889739c7bc75
[ "MIT" ]
null
null
null
"""Unit test package for pyinspire."""
19.5
38
0.692308
5
39
5.4
1
0
0
0
0
0
0
0
0
0
0
0
0.128205
39
1
39
39
0.794118
0.820513
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
7aff1b7b6ae451eac4e9d69d0cb35eb04b0c843d
39
py
Python
coursera/python_programming_basics/1_week_06_task.py
anklav24/Python-Education
49ebcfabda1376390ee71e1fe321a51e33831f9e
[ "Apache-2.0" ]
null
null
null
coursera/python_programming_basics/1_week_06_task.py
anklav24/Python-Education
49ebcfabda1376390ee71e1fe321a51e33831f9e
[ "Apache-2.0" ]
null
null
null
coursera/python_programming_basics/1_week_06_task.py
anklav24/Python-Education
49ebcfabda1376390ee71e1fe321a51e33831f9e
[ "Apache-2.0" ]
null
null
null
print('Hello, ', input(), '!', sep='')
19.5
38
0.461538
4
39
4.5
1
0
0
0
0
0
0
0
0
0
0
0
0.128205
39
1
39
39
0.529412
0
0
0
0
0
0.205128
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
bb55dfba3c5e871595225c38e20180c5babcee50
196
py
Python
gsea_api/gsea/test_gsea_java.py
krassowski/gsea-api
deb562ea55871b799eb501a798dd49a881ff9523
[ "MIT" ]
8
2020-03-06T02:03:40.000Z
2022-01-22T15:57:17.000Z
gsea_api/gsea/test_gsea_java.py
krassowski/gsea-api
deb562ea55871b799eb501a798dd49a881ff9523
[ "MIT" ]
3
2020-03-06T01:48:53.000Z
2021-10-06T04:15:55.000Z
gsea_api/gsea/test_gsea_java.py
krassowski/gsea-api
deb562ea55871b799eb501a798dd49a881ff9523
[ "MIT" ]
2
2019-12-01T18:41:07.000Z
2020-07-15T14:52:17.000Z
from pytest import raises from gsea_api.gsea.java import GSEADesktop def test_gsea_java(): with raises(Exception, match='Could not find GSEADesktop installation in'): GSEADesktop()
21.777778
79
0.755102
26
196
5.576923
0.692308
0.110345
0
0
0
0
0
0
0
0
0
0
0.173469
196
8
80
24.5
0.895062
0
0
0
0
0
0.214286
0
0
0
0
0
0
1
0.2
true
0
0.4
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
246f23afbb58618ff1328d31123e867d30cf54ac
83
py
Python
nlp_basictasks/tasks/__init__.py
xianghuisun/nlp-basictasks
55c8d2e09f7543dc533ee3938e6613d040942e61
[ "MIT" ]
24
2021-08-25T13:24:11.000Z
2022-03-22T12:07:19.000Z
build/lib/nlp_basictasks/tasks/__init__.py
xianghuisun/nlp-basictasks
55c8d2e09f7543dc533ee3938e6613d040942e61
[ "MIT" ]
2
2021-11-29T06:17:49.000Z
2021-12-25T06:34:49.000Z
build/lib/nlp_basictasks/tasks/__init__.py
xianghuisun/nlp-basictasks
55c8d2e09f7543dc533ee3938e6613d040942e61
[ "MIT" ]
4
2021-12-30T13:14:21.000Z
2022-03-03T08:47:32.000Z
from .cls import cls from .ner import Ner from .sts import sts from .mrc import mrc
20.75
20
0.771084
16
83
4
0.375
0
0
0
0
0
0
0
0
0
0
0
0.180723
83
4
21
20.75
0.941176
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
247d321ce0c16d5bb207856247ce1707ddf9bd17
15,527
py
Python
qa327_frontend_test/test_r1.py
RF0606/CISC327_PROJECT
b0e5839fdc1b6f754bbf05ce174feca9dac54a69
[ "MIT" ]
null
null
null
qa327_frontend_test/test_r1.py
RF0606/CISC327_PROJECT
b0e5839fdc1b6f754bbf05ce174feca9dac54a69
[ "MIT" ]
null
null
null
qa327_frontend_test/test_r1.py
RF0606/CISC327_PROJECT
b0e5839fdc1b6f754bbf05ce174feca9dac54a69
[ "MIT" ]
null
null
null
from importlib import reload import pytest import os import io import sys import qa327.app as app path = os.path.dirname(os.path.abspath(__file__)) '''test case for R1.1: Test if user is logged in''' def test_loggedIn(capsys): if app.status: terminal_input = ['logout', 'exit'] expected_tail_of_terminal_output = ['Welcome the Queens ticket trade machine', 'your balance: 1000', 'type your choice:', 'sell buy update logout', 'logout successfully', 'type your choice:', 'register login exit', 'exit'] helper(capsys, terminal_input, expected_tail_of_terminal_output) '''test case for R1.2: Test if user is not logged in''' def test_notlogged(capsys): if not app.status: terminal_input = ["exit"] expected_tail_of_terminal_output = ['Welcome the Queens ticket trade machine', 'type your choice:', 'register login exit', 'exit'] helper(capsys, terminal_input, expected_tail_of_terminal_output) '''test case for R1.3.1: enter buy can go to buy session when user is logged in''' def test_goBuy_logged(capsys): if app.status: terminal_input = ["buy", 'logout', 'exit'] expected_tail_of_terminal_output = ['Welcome the Queens ticket trade machine', 'your balance: 1000', 'type your choice:', 'sell buy update logout', 'buying session started successfully', 'please type ticket name, quantity:', 'please retype', 'the number of inputs should be 2', 'type your choice:', 'register login exit', 'exit' ] helper(capsys, terminal_input, expected_tail_of_terminal_output) '''test case for R1.3.2: enter sell can go to sell session when user is logged in''' def test_goSell_logged(capsys): if app.status: terminal_input = ["sell", 'logout', 'exit'] expected_tail_of_terminal_output = ['Welcome the Queens ticket trade machine', 'your balance: 1000', 'type your choice:', 'sell buy update logout', 'selling session started successfully', 'please type ticket name, price, quantity, date:', 'please retype', 'the number of inputs should be 4', 'type your choice:', 'register login exit', 'exit'] helper(capsys, terminal_input, expected_tail_of_terminal_output) '''test case for R1.3.3: enter update can go to update session when user is logged in''' def test_goUpdate_logged(capsys): if app.status: terminal_input = ["update", 'logout', 'exit'] expected_tail_of_terminal_output = ['Welcome the Queens ticket trade machine', 'your balance: 1000', 'type your choice:', 'sell buy update logout', 'updating session started successfully', 'please type ticket name, price, quantity, date:', 'please retype', 'the number of inputs should be 4', 'type your choice:', 'register login exit', 'exit'] helper(capsys, terminal_input, expected_tail_of_terminal_output) '''test case for R1.3.4: enter logout can go to out session when user is logged in''' def test_logout_successfully(capsys): if app.status: terminal_input = ["logout", 'exit'] expected_tail_of_terminal_output = ['Welcome the Queens ticket trade machine', 'your balance: 1000', 'type your choice:', 'sell buy update logout', 'please retype', 'the number of inputs should be 2' 'type your choice:', 'register login exit', 'exit'] helper(capsys, terminal_input, expected_tail_of_terminal_output) '''test case for R1.3.5: enter login can go to login session when user is not logged in''' def test_login_whenNotLoggedIn(capsys): if not app.status: terminal_input = ["login", "logout", "exit"] expected_tail_of_terminal_output = ['Welcome the Queens ticket trade machine', 'type your choice:', 'register login exit', 'login session started successfully', 'please type your email and password:', 'please retype', 'the number of inputs should be 2', 'type your choice:', 'register login exit', 'exit'] helper(capsys, terminal_input, expected_tail_of_terminal_output) '''test case for R1.3.6: enter register can go to register session when user is not logged in''' def test_register_successfully(capsys): if not app.status: terminal_input = ["register", 'logout', 'exit', 'exit'] expected_tail_of_terminal_output = ['Welcome the Queens ticket trade machine', 'type your choice:', 'register login exit', 'register session started successfully', 'please enter your email, user name, password and ' 'confirm your password:', 'please retype', 'the number of inputs should be 4 or exit', 'do you want to exit register session(type exit to leave):type your choice:', 'register login exit', 'exit'] helper(capsys, terminal_input, expected_tail_of_terminal_output) '''test case for R1.3.7: enter exit can exit the program when user is not logged in''' def test_exit_successfully(capsys): if not app.status: terminal_input = ["exit"] expected_tail_of_terminal_output = ['Welcome the Queens ticket trade machine', 'type your choice:', 'register login exit', 'exit'] helper(capsys, terminal_input, expected_tail_of_terminal_output) '''test case for R1.4.1: when user is not logged in, buy command are not accepted''' def test_goBuy_notLogged(capsys): if not app.status: terminal_input = ["buy", 'exit'] expected_tail_of_terminal_output = ['Welcome the Queens ticket trade machine', 'type your choice:', 'register login exit', 'invalid command', 'type your choice:', 'register login exit', 'exit'] helper(capsys, terminal_input, expected_tail_of_terminal_output) '''test case for R1.4.2: when user is not logged in, sell command are not accepted''' def test_goSell_notLogged(capsys): if not app.status: terminal_input = ["sell", 'exit'] expected_tail_of_terminal_output = ['Welcome the Queens ticket trade machine', 'type your choice:', 'register login exit', 'invalid command', 'type your choice:', 'register login exit', 'exit'] helper(capsys, terminal_input, expected_tail_of_terminal_output) '''test case for R1.4.3: when user is not logged in, update command are not accepted''' def test_goUpdate_notLogged(capsys): if not app.status: terminal_input = ["update", 'exit'] expected_tail_of_terminal_output = ['Welcome the Queens ticket trade machine', 'type your choice:', 'register login exit', 'invalid command', 'type your choice:', 'register login exit', 'exit'] helper(capsys, terminal_input, expected_tail_of_terminal_output) '''test case for R1.4.4: when user is not logged in, logout command are not accepted''' def test_logout_fail(capsys): if not app.status: terminal_input = ["logout", 'exit'] expected_tail_of_terminal_output = ['Welcome the Queens ticket trade machine', 'type your choice:', 'register login exit', 'invalid command', 'type your choice:', 'register login exit', 'exit'] helper(capsys, terminal_input, expected_tail_of_terminal_output) '''test case for R1.5.1: when user is logged in, login command are not accepted''' def test_login_fail(capsys): if app.status: terminal_input = ["login", "logout", "exit"] expected_tail_of_terminal_output = ['Welcome the Queens ticket trade machine', 'your balance: 1000', 'type your choice:', 'sell buy update logout', 'invalid command' 'your balance: 1000', 'type your choice:', 'sell buy update logout', "logout successfully", 'type your choice:', 'register login exit', 'exit'] helper(capsys, terminal_input, expected_tail_of_terminal_output) '''test case for R1.5.2: when user is logged in, register command are not accepted''' def test_register_fail(capsys): if app.status: terminal_input = ["register", "logout", "exit"] expected_tail_of_terminal_output = ['Welcome the Queens ticket trade machine', 'your balance: 1000', 'type your choice:', 'sell buy update logout', 'invalid command' 'your balance: 1000', 'type your choice:', 'sell buy update logout', "logout successfully", 'type your choice:', 'register login exit', 'exit'] helper(capsys, terminal_input, expected_tail_of_terminal_output) '''test case for R1.5.3: when user is logged in, exit command are not accepted''' def test_exit_fail(capsys): if app.status: terminal_input = ["exit", "logout", "exit"] expected_tail_of_terminal_output = ['Welcome the Queens ticket trade machine', 'your balance: 1000', 'type your choice:', 'sell buy update logout', 'invalid command' 'your balance: 1000', 'type your choice:', 'sell buy update logout', "logout successfully", 'type your choice:', 'register login exit', 'exit'] helper(capsys, terminal_input, expected_tail_of_terminal_output, ) def helper( capsys, terminal_input, expected_tail_of_terminal_output): """Helper function for testing Arguments: capsys -- object created by pytest to capture stdout and stderr terminal_input -- list of string for terminal input expected_tail_of_terminal_output list of expected string at the tail of terminal intput_valid_accounts -- list of valid accounts in the valid_account_list_file expected_output_transactions -- list of expected output transactions """ # cleanup package reload(app) # set terminal input sys.stdin = io.StringIO( '\n'.join(terminal_input)) # run the program with pytest.raises(SystemExit): app.main() # capture terminal output / errors # assuming that in this case we don't use stderr out, err = capsys.readouterr() # split terminal output in lines out_lines = out.splitlines() # compare terminal outputs at the end.` for i in range(1, len(expected_tail_of_terminal_output) + 1): index = i * -1 assert expected_tail_of_terminal_output[index] == out_lines[index]
44.236467
121
0.448702
1,370
15,527
4.923358
0.111679
0.078873
0.076798
0.11742
0.813047
0.786953
0.742328
0.690882
0.639288
0.613047
0
0.012729
0.483931
15,527
350
122
44.362857
0.829028
0.038964
0
0.724891
0
0
0.24359
0
0
0
0
0
0.004367
1
0.074236
false
0.0131
0.026201
0
0.100437
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
24878eadcf4199f79eaad54d477db1fc21c9b788
62
py
Python
test_django_app/migrations/__init__.py
avendesora/django-scripture-index
9877e74f9864d3c7d300409e8b1be9a1c3cabcf4
[ "MIT" ]
1
2020-10-10T18:24:08.000Z
2020-10-10T18:24:08.000Z
test_django_app/migrations/__init__.py
avendesora/django-scripture-index
9877e74f9864d3c7d300409e8b1be9a1c3cabcf4
[ "MIT" ]
1
2021-02-23T11:45:23.000Z
2021-02-24T10:20:41.000Z
test_django_app/migrations/__init__.py
avendesora/django-scripture-index
9877e74f9864d3c7d300409e8b1be9a1c3cabcf4
[ "MIT" ]
1
2020-10-27T18:02:37.000Z
2020-10-27T18:02:37.000Z
"""Database migrations for the test_django_app Django app."""
31
61
0.774194
9
62
5.111111
0.777778
0.391304
0
0
0
0
0
0
0
0
0
0
0.112903
62
1
62
62
0.836364
0.887097
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
24ab9b32f5b8b978d6f2a8407edf3cb8c7d8322f
189
py
Python
chibi_dl_elasticsearch/site/nhentai/site.py
dem4ply/chibi_dl_elasticsearch
68adfdb8a7e7a26d56ee6a7f4dffcc3324504c92
[ "WTFPL" ]
null
null
null
chibi_dl_elasticsearch/site/nhentai/site.py
dem4ply/chibi_dl_elasticsearch
68adfdb8a7e7a26d56ee6a7f4dffcc3324504c92
[ "WTFPL" ]
null
null
null
chibi_dl_elasticsearch/site/nhentai/site.py
dem4ply/chibi_dl_elasticsearch
68adfdb8a7e7a26d56ee6a7f4dffcc3324504c92
[ "WTFPL" ]
null
null
null
from chibi_dl.site.nhentai import Nhentai as Nhentai_base from .episodes import Episode class Nhentai( Nhentai_base ): @property def episode_class( self ): return Episode
21
57
0.740741
25
189
5.44
0.6
0.161765
0
0
0
0
0
0
0
0
0
0
0.206349
189
8
58
23.625
0.906667
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0.166667
0.833333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
5
700f2db2823d1ce6dda5357c9413d105d7fe0893
96
py
Python
kill2.py
laughface809/CPU-Stress
c592e6989a466bf4920092e8cda2017f464adf35
[ "MIT" ]
null
null
null
kill2.py
laughface809/CPU-Stress
c592e6989a466bf4920092e8cda2017f464adf35
[ "MIT" ]
null
null
null
kill2.py
laughface809/CPU-Stress
c592e6989a466bf4920092e8cda2017f464adf35
[ "MIT" ]
null
null
null
import os os.system(' ps aux| grep gnome-panel | awk \'{if($3>80) print $2}\' |xargs kill -9 ')
32
85
0.614583
18
96
3.277778
0.944444
0
0
0
0
0
0
0
0
0
0
0.0625
0.166667
96
3
85
32
0.675
0
0
0
0
0
0.505155
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
703c9b5f0067cc42e732569b715802aef2bb9bce
22
py
Python
__int__.py
CYB3R-G0D/Comicsru
e4dfa6513bd4262d0e8e3d9b7b0e0a72f0c801bb
[ "MIT" ]
null
null
null
__int__.py
CYB3R-G0D/Comicsru
e4dfa6513bd4262d0e8e3d9b7b0e0a72f0c801bb
[ "MIT" ]
null
null
null
__int__.py
CYB3R-G0D/Comicsru
e4dfa6513bd4262d0e8e3d9b7b0e0a72f0c801bb
[ "MIT" ]
null
null
null
from . import comicsru
22
22
0.818182
3
22
6
1
0
0
0
0
0
0
0
0
0
0
0
0.136364
22
1
22
22
0.947368
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
7062a7483e56e14b0b46830852aa0f2b23b2c9f0
66
py
Python
easyTX/__init__.py
VDHARV/easyTX-2.0
90cc9fcdccfd4ff267d13c14e5417d87df0475cc
[ "MIT" ]
null
null
null
easyTX/__init__.py
VDHARV/easyTX-2.0
90cc9fcdccfd4ff267d13c14e5417d87df0475cc
[ "MIT" ]
null
null
null
easyTX/__init__.py
VDHARV/easyTX-2.0
90cc9fcdccfd4ff267d13c14e5417d87df0475cc
[ "MIT" ]
null
null
null
from easyTX.client import Client from easyTX.server import Server
22
32
0.848485
10
66
5.6
0.5
0.357143
0
0
0
0
0
0
0
0
0
0
0.121212
66
2
33
33
0.965517
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
707ce8453d8eb13fa0f3d6ec9c22892f0c842657
24
py
Python
omg.py
ck4xa/cs3240-labdemo
390a486b843569777b16ac5e27cc14429349eecb
[ "MIT" ]
null
null
null
omg.py
ck4xa/cs3240-labdemo
390a486b843569777b16ac5e27cc14429349eecb
[ "MIT" ]
null
null
null
omg.py
ck4xa/cs3240-labdemo
390a486b843569777b16ac5e27cc14429349eecb
[ "MIT" ]
null
null
null
print("This is OMG!!!")
12
23
0.583333
4
24
3.5
1
0
0
0
0
0
0
0
0
0
0
0
0.125
24
1
24
24
0.666667
0
0
0
0
0
0.583333
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
707e98ef8e3a17129ee62f530f57c55a56df5a84
62
py
Python
inspiretools/__init__.py
DavidMStraub/inspiretools
34eb76a732587c60ac2355a95cd9828151cd5810
[ "MIT" ]
10
2016-01-05T13:25:23.000Z
2019-09-26T12:40:33.000Z
inspiretools/__init__.py
DavidMStraub/inspiretools
34eb76a732587c60ac2355a95cd9828151cd5810
[ "MIT" ]
7
2016-01-09T13:06:30.000Z
2021-07-05T21:51:19.000Z
inspiretools/__init__.py
DavidMStraub/inspiretools
34eb76a732587c60ac2355a95cd9828151cd5810
[ "MIT" ]
7
2015-12-22T19:11:26.000Z
2020-11-08T20:23:44.000Z
"""Main module for inspiretools.""" from .functions import *
15.5
35
0.709677
7
62
6.285714
1
0
0
0
0
0
0
0
0
0
0
0
0.145161
62
3
36
20.666667
0.830189
0.467742
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
561b60a0e35134459d157ef3dc70080a44f0286b
86
wsgi
Python
sprint_challenge/web_app/web_app.wsgi
macr/DS-Unit-3-Sprint-3-Productization-and-Cloud
510bed71b23c77b60972a0215df5cc6c7b7b78bb
[ "MIT" ]
null
null
null
sprint_challenge/web_app/web_app.wsgi
macr/DS-Unit-3-Sprint-3-Productization-and-Cloud
510bed71b23c77b60972a0215df5cc6c7b7b78bb
[ "MIT" ]
null
null
null
sprint_challenge/web_app/web_app.wsgi
macr/DS-Unit-3-Sprint-3-Productization-and-Cloud
510bed71b23c77b60972a0215df5cc6c7b7b78bb
[ "MIT" ]
null
null
null
import sys sys.path.insert(0, '/web_app') from aq_dashboard import app as application
21.5
43
0.790698
15
86
4.4
0.8
0
0
0
0
0
0
0
0
0
0
0.013158
0.116279
86
3
44
28.666667
0.855263
0
0
0
0
0
0.093023
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
56606f398d50ad66000483ef205b26d426d943ad
21
py
Python
main.py
us-upal/pyFiveDomailSpacialization
6253b7cbb8072c8a8b56a7ffd4eecf7e41b30638
[ "MIT" ]
null
null
null
main.py
us-upal/pyFiveDomailSpacialization
6253b7cbb8072c8a8b56a7ffd4eecf7e41b30638
[ "MIT" ]
null
null
null
main.py
us-upal/pyFiveDomailSpacialization
6253b7cbb8072c8a8b56a7ffd4eecf7e41b30638
[ "MIT" ]
1
2021-02-08T04:55:29.000Z
2021-02-08T04:55:29.000Z
print("this is main")
21
21
0.714286
4
21
3.75
1
0
0
0
0
0
0
0
0
0
0
0
0.095238
21
1
21
21
0.789474
0
0
0
0
0
0.545455
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
568c612ccec22cc4fde775d590d3ab2dc429dce7
16
py
Python
hub/tasks.py
westerncapelabs/uopboh-hub
10c36026e5588d1490dfa3396745db5b9a94e875
[ "BSD-3-Clause" ]
null
null
null
hub/tasks.py
westerncapelabs/uopboh-hub
10c36026e5588d1490dfa3396745db5b9a94e875
[ "BSD-3-Clause" ]
2
2016-01-18T16:23:53.000Z
2016-02-22T08:50:56.000Z
hub/tasks.py
westerncapelabs/uopboh-hub
10c36026e5588d1490dfa3396745db5b9a94e875
[ "BSD-3-Clause" ]
null
null
null
# tasks go here
8
15
0.6875
3
16
3.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.25
16
1
16
16
0.916667
0.8125
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
3b1b57ab99ce84b8424058fd0fd0574a1b257823
122
py
Python
aiosnow/__init__.py
michaeldcanady/aiosnow
db515b1560d651fc7696a184990c2a2d68db8961
[ "MIT" ]
38
2020-08-03T17:58:48.000Z
2022-03-30T19:39:24.000Z
aiosnow/__init__.py
michaeldcanady/aiosnow
db515b1560d651fc7696a184990c2a2d68db8961
[ "MIT" ]
34
2020-01-20T10:11:46.000Z
2020-06-05T21:25:23.000Z
aiosnow/__init__.py
michaeldcanady/aiosnow
db515b1560d651fc7696a184990c2a2d68db8961
[ "MIT" ]
5
2021-03-26T19:35:20.000Z
2022-01-23T20:09:55.000Z
from .client import Client from .models import ModelSchema, Pluck, TableModel, fields from .query import Selector, select
30.5
58
0.811475
16
122
6.1875
0.6875
0
0
0
0
0
0
0
0
0
0
0
0.131148
122
3
59
40.666667
0.933962
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
3b510c6c371d8dd382701c208f759a9682ed0a59
34
py
Python
src/allinux/coreutils/__init__.py
andrewliqw/python-linux-command
e3286fdbae6cf19a03a577a202343154d79a480d
[ "MIT" ]
null
null
null
src/allinux/coreutils/__init__.py
andrewliqw/python-linux-command
e3286fdbae6cf19a03a577a202343154d79a480d
[ "MIT" ]
null
null
null
src/allinux/coreutils/__init__.py
andrewliqw/python-linux-command
e3286fdbae6cf19a03a577a202343154d79a480d
[ "MIT" ]
null
null
null
from .system_context import uname
17
33
0.852941
5
34
5.6
1
0
0
0
0
0
0
0
0
0
0
0
0.117647
34
1
34
34
0.933333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
8ed72570edc9601b6427463171c024154261c3ab
5,506
py
Python
models/transformer/m2_transformer.py
CurryYuan/X-Trans2Cap
c78a27209f14fcbbec74fe8b5edc06faea2e7d44
[ "Apache-2.0" ]
11
2022-03-08T13:05:59.000Z
2022-03-30T02:13:33.000Z
models/transformer/m2_transformer.py
CurryYuan/X-Trans2Cap
c78a27209f14fcbbec74fe8b5edc06faea2e7d44
[ "Apache-2.0" ]
1
2022-03-25T15:27:09.000Z
2022-03-25T15:27:09.000Z
models/transformer/m2_transformer.py
CurryYuan/X-Trans2Cap
c78a27209f14fcbbec74fe8b5edc06faea2e7d44
[ "Apache-2.0" ]
null
null
null
from typing import Tuple import torch import torch.nn as nn import torch.nn.functional as F from icecream import ic from .containers import Module from .decoders import MeshedDecoder from .encoders import MemoryAugmentedEncoder, DualPathMemoryAugmentedEncoder from .attention import ScaledDotProductAttentionMemory, ScaledDotProductAttention from .beam_search import BeamSearch from .utils import TensorOrSequence, get_batch_size, get_device class M2Transformer(Module): def __init__(self, vocab, max_seq_len, object_latent_dim, padding_idx): super(M2Transformer, self).__init__() self.padding_idx = padding_idx self.bos_idx = vocab['word2idx']['sos'] self.eos_idx = vocab['word2idx']['eos'] self.vocab = vocab self.encoder = MemoryAugmentedEncoder(3, 0, d_in=object_latent_dim, attention_module=ScaledDotProductAttentionMemory, attention_module_kwargs={'m': 40} ) self.decoder = MeshedDecoder(len(vocab["word2idx"]), max_seq_len, 1, padding_idx) self.register_state('enc_output', None) self.register_state('mask_enc', None) def forward(self, objects_features, tokens): # input (b_s, seq_len, d_in) mask_enc = (torch.sum(objects_features, -1) == self.padding_idx).unsqueeze(1).unsqueeze( 1) # (b_s, 1, 1, seq_len) objects_features = self.encoder(objects_features, mask_enc) # (B, 3, n_object, 512) dec_outputs, intermediate_feats = self.decoder(tokens, objects_features, mask_enc) # (B, max_len, vocab_size) return dec_outputs, intermediate_feats, objects_features def step(self, t, prev_output, visual, seq, mode='teacher_forcing', **kwargs): it = None if mode == 'teacher_forcing': raise NotImplementedError elif mode == 'feedback': if t == 0: self.mask_enc = (torch.sum(visual, -1) == self.padding_idx).unsqueeze(1).unsqueeze( 1) # (b_s, 1, 1, seq_len) self.enc_output = self.encoder(visual, self.mask_enc) if isinstance(visual, torch.Tensor): it = visual.data.new_full((visual.shape[0], 1), self.bos_idx).long() else: it = visual[0].data.new_full((visual[0].shape[0], 1), self.bos_idx).long() else: it = prev_output output = self.decoder(it, self.enc_output, self.mask_enc)[0] return F.log_softmax(output, dim=-1) def beam_search(self, visual: TensorOrSequence, max_len: int, beam_size: int, out_size=1, return_probs=False, **kwargs): bs = BeamSearch(self, max_len, self.eos_idx, beam_size) return bs.apply(visual, out_size, return_probs, **kwargs) class DualM2Transformer(Module): def __init__(self, vocab, max_seq_len, object_latent_dim, padding_idx): super(DualM2Transformer, self).__init__() self.padding_idx = padding_idx self.bos_idx = vocab['word2idx']['sos'] self.eos_idx = vocab['word2idx']['eos'] self.vocab = vocab self.encoder = DualPathMemoryAugmentedEncoder(3, 0, d_in=object_latent_dim, attention_module=ScaledDotProductAttentionMemory, attention_module_kwargs={'m': 40}) # self.decoder_t = MeshedDecoder(len(vocab["word2idx"]), max_seq_len, 1, padding_idx) self.decoder = MeshedDecoder(len(vocab["word2idx"]), max_seq_len, 1, padding_idx) self.register_state('enc_output', None) self.register_state('mask_enc', None) def forward(self, feats, extra_feats, tokens): # input (b_s, seq_len, d_in) mask_enc = (torch.sum(feats, -1) == self.padding_idx).unsqueeze(1).unsqueeze( 1) # (b_s, 1, 1, seq_len) feats = self.encoder(feats, extra_feats, mask_enc) # (B, 3, n_object, 512) dec_outputs, intermediate_feats = self.decoder(tokens, feats, mask_enc) # (B, max_len, vocab_size) return dec_outputs, intermediate_feats def step(self, t, prev_output, visual, seq, mode='teacher_forcing', **kwargs): it = None if mode == 'teacher_forcing': raise NotImplementedError elif mode == 'feedback': if t == 0: self.mask_enc = (torch.sum(visual[0], -1) == self.padding_idx).unsqueeze(1).unsqueeze( 1) # (b_s, 1, 1, seq_len) self.enc_output = self.encoder(visual[0], visual[1], self.mask_enc) if isinstance(visual, torch.Tensor): it = visual.data.new_full((visual.shape[0], 1), self.bos_idx).long() else: it = visual[0].data.new_full((visual[0].shape[0], 1), self.bos_idx).long() else: it = prev_output output = self.decoder(it, self.enc_output, self.mask_enc)[0] return F.log_softmax(output, dim=-1) def beam_search(self, visual: TensorOrSequence, max_len: int, beam_size: int, out_size=1, return_probs=False, **kwargs): bs = BeamSearch(self, max_len, self.eos_idx, beam_size) return bs.apply(visual, out_size, return_probs, **kwargs)
46.268908
119
0.602979
665
5,506
4.748872
0.168421
0.031032
0.026599
0.018999
0.797973
0.788474
0.788474
0.788474
0.788474
0.788474
0
0.018561
0.285688
5,506
119
120
46.268908
0.784389
0.05721
0
0.630435
0
0
0.034374
0
0
0
0
0
0
1
0.086957
false
0
0.119565
0
0.293478
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
d6fdf7298e468d7fbe8529f8b09f338be4d1f0c8
175
py
Python
tracker-ui/cgi-bin/testcgi.py
gitrust/timetracker
497a668f9cd7e89a1921b4a4e6989064cf10b1bc
[ "MIT" ]
4
2019-11-06T07:39:13.000Z
2021-02-02T02:03:56.000Z
tracker-ui/cgi-bin/testcgi.py
gitrust/timetracker
497a668f9cd7e89a1921b4a4e6989064cf10b1bc
[ "MIT" ]
null
null
null
tracker-ui/cgi-bin/testcgi.py
gitrust/timetracker
497a668f9cd7e89a1921b4a4e6989064cf10b1bc
[ "MIT" ]
null
null
null
#!c:\Python36\python.exe print ("Content-Type: text/html") # Header print () print ("<html><head><title>Server test</title></head>") print ("<b>Hello</b>") print ("</html>")
25
55
0.634286
25
175
4.44
0.64
0.162162
0
0
0
0
0
0
0
0
0
0.012579
0.091429
175
7
56
25
0.685535
0.171429
0
0
0
0
0.604167
0.173611
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
d95f6ebdb89b5e877a2dff8c98244c83df3d498e
275
py
Python
built-in/TensorFlow/Official/cv/image_classification/ResnetVariant_for_TensorFlow/automl/vega/core/metrics/pytorch/__init__.py
Huawei-Ascend/modelzoo
df51ed9c1d6dbde1deef63f2a037a369f8554406
[ "Apache-2.0" ]
12
2020-12-13T08:34:24.000Z
2022-03-20T15:17:17.000Z
built-in/TensorFlow/Official/cv/image_classification/ResnetVariant_for_TensorFlow/automl/vega/core/metrics/pytorch/__init__.py
Huawei-Ascend/modelzoo
df51ed9c1d6dbde1deef63f2a037a369f8554406
[ "Apache-2.0" ]
3
2021-03-31T20:15:40.000Z
2022-02-09T23:50:46.000Z
built-in/TensorFlow/Research/cv/image_classification/Darts_for_TensorFlow/automl/vega/core/metrics/pytorch/__init__.py
Huawei-Ascend/modelzoo
df51ed9c1d6dbde1deef63f2a037a369f8554406
[ "Apache-2.0" ]
2
2021-07-10T12:40:46.000Z
2021-12-17T07:55:15.000Z
from .classifier_metric import * from .detection_metric import * from .auc_metrics import * from .recall_eval import * from .segmentation_metric import * from .sr_metric import * from .jdd_psnr_metric import * from .metrics import Metrics from .lane_metric import LaneMetric
27.5
35
0.810909
38
275
5.631579
0.394737
0.327103
0.373832
0
0
0
0
0
0
0
0
0
0.130909
275
9
36
30.555556
0.895397
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
d98924b380e33f88315b7a57029d49919421b79e
11,577
py
Python
src/environments.py
ds4dm/branch-search-trees
b0cec513c026b52a00b666f7471d6f4abaf6e1ce
[ "MIT" ]
36
2020-12-24T07:09:09.000Z
2022-03-27T07:01:37.000Z
src/environments.py
ds4dm/branch-search-trees
b0cec513c026b52a00b666f7471d6f4abaf6e1ce
[ "MIT" ]
7
2021-03-04T07:58:39.000Z
2021-12-31T02:43:54.000Z
src/environments.py
ds4dm/branch-search-trees
b0cec513c026b52a00b666f7471d6f4abaf6e1ce
[ "MIT" ]
13
2021-01-25T07:35:23.000Z
2022-03-27T07:01:36.000Z
""" Environment classes, to manage the interface between learning and solver. """ import numpy as np import os import time import torch import pyscipopt as scip from collections import OrderedDict import src.utilities as utilities from .branchers import * class ILEvalEnv: """ Environment to evaluate a trained Imitation Learning policy, using ILEvalBrancher. The specified branching policy is a trained IL policy. """ def __init__(self, device): self.device = device def run_episode(self, instance, name, policy, policy_name, state_dims, scip_seed, cutoff_value, scip_limits, scip_params, verbose, brancher_name='ILEvalBrancher'): """ :param instance: str, pathway to instance.mps.gz :param name: str, name of the instance (w/o extension) :param policy: a trained IL policy :param policy_name: str, name of the policy :param state_dims: dict, of state dimensionalities :param scip_seed: int, SCIP solver seed :param cutoff_value: float, cutoff :param scip_limits: dict, specifying SCIP parameter limits :param scip_params: dict, specifying SCIP parameter setting :param verbose: bool, verbosity :param brancher_name: str, name of the brancher to be defined :return: exp_dict: dict, containing basic statistics on the experiment (run) """ print("\nRunning IL evaluation on instance {}".format(name)) m = scip.Model() # set static solver setting (scip seed and cutoff are set below) utilities.init_params(m, scip_limits, scip_params) # set scip parameters as needed (wrt the current episode setting) m.setBoolParam('randomization/permutevars', True) m.setIntParam('randomization/permutationseed', scip_seed) # SCIP default at 0 m.readProblem(instance) if scip_params['cutoff']: assert cutoff_value is not None m.setObjlimit(cutoff_value) # define brancher brancher = ILEvalBrancher( model=m, device=self.device, policy=policy, state_dims=state_dims, verbose=verbose, ) m.includeBranchrule( brancher, name=brancher_name, desc="bla", priority=999999, maxdepth=-1, maxbounddist=1 ) # perform the episode try: t0 = time.time() t0_process = time.process_time() m.optimize() t1_process = time.process_time() t1 = time.time() print("\tInstance: {}. Nnodes: {}. Branch count: {}. Status: {}. Gap: {:.4f}".format( name, m.getNNodes(), brancher.branch_count, m.getStatus(), m.getGap()) ) except: print("\tSCIP exception or error.") t0 = time.time() t0_process = time.process_time() t1 = t0 t1_process = t0_process # update exp_dict exp_dict = { 'name': name, 'policy': policy_name, 'seed': scip_seed, 'nnodes': m.getNNodes(), 'fair_nnodes': m.getFairNNodes(bytes(brancher_name, 'utf-8')), # needs bytes encoding 'nnodes_left': m.getNNodesLeft(), 'nLP_iterations': m.getNLPIterations(), 'max_depth': m.getMaxDepth(), 'status': m.getStatus(), 'gap': m.getGap(), 'primal_bound': m.getPrimalbound(), 'dual_bound': m.getDualbound(), 'primaldualintegral': m.getPrimalDualIntegral(), 'scip_solve_time': m.getSolvingTime(), 'scip_presolve_time': m.getPresolvingTime(), 'opt_time_process': t1_process - t0_process, 'opt_time_wallclock': t1 - t0, } m.freeProb() return exp_dict class SCIPCollectEnv: """ Environment to run SCIP data collection for imitation learning, with SCIPCollectBrancher class. Instead of a single policy, 'explorer' and 'expert' rules are specified (each should be a string corresponding to a SCIP branching rule). The explorer policy runs for the top k branching decisions, then the expert takes over. Data is collected from expert decisions only. """ def __init__(self): pass def run_episode(self, instance, name, explorer, expert, k, state_dims, scip_seed, cutoff_value, scip_limits, scip_params, verbose, brancher_name='SCIPCollectBrancher'): """ :param instance: str, pathway to instance.mps.gz :param name: str, name of the instance (w/o extension) :param explorer: str, SCIP branching rule to be used as explorer :param expert: str, SCIP branching rule to be used as expert :param k: int, number of branching decision to be explored before data collection :param state_dims: dict, of state dimensionalities :param scip_seed: int, SCIP solver seed :param cutoff_value: float, cutoff :param scip_limits: dict, specifying SCIP parameter limits :param scip_params: dict, specifying SCIP parameter setting :param verbose: bool, verbosity :param brancher_name: str, name of the brancher to be defined :return: exp_dict: dict, containing basic statistics on the experiment (run) brancher.collect_dict: dict, of data (states, labels) collected by the expert """ print("\nRunning data collection on instance {}".format(name)) m = scip.Model() # set static solver setting (scip seed and cutoff are set below) utilities.init_params(m, scip_limits, scip_params) # set scip parameters as needed (wrt the current episode setting) m.setBoolParam('randomization/permutevars', True) m.setIntParam('randomization/permutationseed', scip_seed) # SCIP default at 0 m.readProblem(instance) if scip_params['cutoff']: assert cutoff_value is not None m.setObjlimit(cutoff_value) brancher = SCIPCollectBrancher( model=m, explorer=explorer, expert=expert, k=k, state_dims=state_dims, verbose=verbose ) m.includeBranchrule( brancher, name=brancher_name, desc="bla", priority=999999, maxdepth=-1, maxbounddist=1 ) # optimize, i.e., perform the solve t0 = time.time() t0_process = time.process_time() m.optimize() t1_process = time.process_time() t1 = time.time() print("\tInstance {}. SCIP time: {} (wall-clock: {}). Nnodes: {}. FairNNodes: {}. Collected: {}".format( name, m.getSolvingTime(), t1 - t0, m.getNNodes(), m.getFairNNodes(bytes(brancher_name, 'utf-8')), brancher.collect_count )) # store episode_data exp_dict = { 'name': name, 'explorer': explorer, 'expert': expert, 'k': k, 'seed': scip_seed, 'nnodes': m.getNNodes(), 'fair_nnodes': m.getFairNNodes(bytes(brancher_name, 'utf-8')), # needs bytes encoding 'nnodes_left': m.getNNodesLeft(), 'nLP_iterations': m.getNLPIterations(), 'max_depth': m.getMaxDepth(), 'status': m.getStatus(), 'gap': m.getGap(), 'primal_bound': m.getPrimalbound(), 'dual_bound': m.getDualbound(), 'primaldualintegral': m.getPrimalDualIntegral(), 'scip_solve_time': m.getSolvingTime(), 'scip_presolve_time': m.getPresolvingTime(), 'opt_time_process': t1_process - t0_process, 'opt_time_wallclock': t1 - t0, 'nnodes_list': brancher.nnodes_list, 'nnodesleft_list': brancher.nnodesleft_list, } m.freeProb() return exp_dict, brancher.collect_dict class SCIPEvalEnv: """ Environment for SCIP evaluation runs, with SCIPEvalBrancher class. A single branching policy is specified (a string corresponding to a SCIP branching rule). """ def __init__(self): pass def run_episode(self, instance, name, policy, scip_seed, cutoff_value, scip_limits, scip_params, verbose, brancher_name='SCIPEvalBrancher'): """ :param instance: str, pathway to instance.mps.gz :param name: str, name of the instance (w/o extension) :param policy: str, SCIP branching rule to be used :param scip_seed: int, SCIP solver seed :param cutoff_value: float, cutoff :param scip_limits: dict, specifying SCIP parameter limits :param scip_params: dict, specifying SCIP parameter setting :param verbose: bool, verbosity :param brancher_name: str, name of the brancher to be defined :return: exp_dict: dict, containing basic statistics on the experiment (run) """ print("\nRunning SCIP evaluation on instance {}".format(name)) m = scip.Model() # set static solver setting (scip seed and cutoff are set below) utilities.init_params(m, scip_limits, scip_params) # set scip parameters as needed (wrt the current episode setting) m.setBoolParam('randomization/permutevars', True) m.setIntParam('randomization/permutationseed', scip_seed) # SCIP default at 0 m.readProblem(instance) if scip_params['cutoff']: assert cutoff_value is not None m.setObjlimit(cutoff_value) brancher = SCIPEvalBrancher( model=m, policy=policy, verbose=verbose ) m.includeBranchrule( brancher, name=brancher_name, desc="bla", priority=999999, maxdepth=-1, maxbounddist=1 ) # optimize, i.e., perform the solve t0 = time.time() t0_process = time.process_time() m.optimize() t1_process = time.process_time() t1 = time.time() print("\tInstance {}. SCIP time: {} (wall-clock: {}). Nnodes: {}. FairNNodes: {}".format( name, m.getSolvingTime(), t1 - t0, m.getNNodes(), m.getFairNNodes(bytes(brancher_name, 'utf-8')) )) # store episode_data exp_dict = { 'name': name, 'policy': policy, 'seed': scip_seed, 'nnodes': m.getNNodes(), 'fair_nnodes': m.getFairNNodes(bytes(brancher_name, 'utf-8')), # needs bytes encoding 'nnodes_left': m.getNNodesLeft(), 'nLP_iterations': m.getNLPIterations(), 'max_depth': m.getMaxDepth(), 'status': m.getStatus(), 'gap': m.getGap(), 'primal_bound': m.getPrimalbound(), 'dual_bound': m.getDualbound(), 'primaldualintegral': m.getPrimalDualIntegral(), 'scip_solve_time': m.getSolvingTime(), 'scip_presolve_time': m.getPresolvingTime(), 'opt_time_process': t1_process - t0_process, 'opt_time_wallclock': t1 - t0, 'nnodes_list': brancher.nnodes_list, 'nnodesleft_list': brancher.nnodesleft_list, } m.freeProb() return exp_dict
36.40566
117
0.594023
1,254
11,577
5.3437
0.172249
0.030443
0.011491
0.01358
0.790479
0.78451
0.769736
0.750187
0.72422
0.72422
0
0.008372
0.308716
11,577
317
118
36.520505
0.828939
0.288071
0
0.688442
0
0.005025
0.159286
0.020793
0
0
0
0
0.015075
1
0.030151
false
0.01005
0.040201
0
0.100503
0.035176
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
7962db470eee969c38aff0040ca419b806560327
44
py
Python
the_game/exceptions.py
gcmac16/the_game
07c4b3a280eb358ee23d8ce8f70cf490e1813215
[ "Apache-2.0" ]
null
null
null
the_game/exceptions.py
gcmac16/the_game
07c4b3a280eb358ee23d8ce8f70cf490e1813215
[ "Apache-2.0" ]
null
null
null
the_game/exceptions.py
gcmac16/the_game
07c4b3a280eb358ee23d8ce8f70cf490e1813215
[ "Apache-2.0" ]
null
null
null
class NoValidMoveError(Exception): pass
14.666667
34
0.772727
4
44
8.5
1
0
0
0
0
0
0
0
0
0
0
0
0.159091
44
2
35
22
0.918919
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
7985665d577b88c484af1f856d02d9e8e3ac157c
83
py
Python
data/operator/bbox/transform/rasterize/half_pixel_offset.py
zhangzhengde0225/SwinTrack
526be17f8ef266cb924c6939bd8dda23e9b73249
[ "MIT" ]
143
2021-12-03T02:33:36.000Z
2022-03-29T00:01:48.000Z
data/operator/bbox/transform/rasterize/half_pixel_offset.py
zhangzhengde0225/SwinTrack
526be17f8ef266cb924c6939bd8dda23e9b73249
[ "MIT" ]
33
2021-12-03T10:32:05.000Z
2022-03-31T02:13:55.000Z
data/operator/bbox/transform/rasterize/half_pixel_offset.py
zhangzhengde0225/SwinTrack
526be17f8ef266cb924c6939bd8dda23e9b73249
[ "MIT" ]
24
2021-12-04T06:46:42.000Z
2022-03-30T07:57:47.000Z
def bbox_rasterize_half_pixel_offset(bbox): return tuple(int(v) for v in bbox)
27.666667
43
0.771084
15
83
4
0.8
0
0
0
0
0
0
0
0
0
0
0
0.144578
83
2
44
41.5
0.84507
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
79a443969df459c8458b32070068e8648f52834c
4,880
py
Python
src/fortrace/utility/dummy_virtinst_util.py
dasec/ForTrace
b8187522a2c83fb661e5a1a5f403da8f40a31ead
[ "MIT" ]
1
2022-03-31T14:01:51.000Z
2022-03-31T14:01:51.000Z
src/fortrace/utility/dummy_virtinst_util.py
dasec/ForTrace
b8187522a2c83fb661e5a1a5f403da8f40a31ead
[ "MIT" ]
null
null
null
src/fortrace/utility/dummy_virtinst_util.py
dasec/ForTrace
b8187522a2c83fb661e5a1a5f403da8f40a31ead
[ "MIT" ]
1
2022-03-31T14:02:30.000Z
2022-03-31T14:02:30.000Z
"""A dummy module to prevent import errors from Guest. """ from __future__ import absolute_import import random def default_route(nic=None): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def default_bridge(): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def default_network(conn): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def default_connection(): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def get_cpu_flags(): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def is_pae_capable(conn=None): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def is_hvm_capable(): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def is_kqemu_capable(): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def is_kvm_capable(): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def is_blktap_capable(): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def get_default_arch(): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") # this function is directly from xend/server/netif.py and is thus # available under the LGPL, # Copyright 2004, 2005 Mike Wray <mike.wray@hp.com> # Copyright 2005 XenSource Ltd def randomMAC(type="xen"): """Generate a random MAC address. 00-16-3E allocated to xensource 52-54-00 used by qemu/kvm The OUI list is available at http://standards.ieee.org/regauth/oui/oui.txt. The remaining 3 fields are random, with the first bit of the first random field set 0. >>> randomMAC().startswith("00:16:3E") True >>> randomMAC("foobar").startswith("00:16:3E") True >>> randomMAC("xen").startswith("00:16:3E") True >>> randomMAC("qemu").startswith("52:54:00") True @return: MAC address string """ ouis = { 'xen': [ 0x00, 0x16, 0x3E ], 'qemu': [ 0x52, 0x54, 0x00 ] } try: oui = ouis[type] except KeyError: oui = ouis['xen'] mac = oui + [ random.randint(0x00, 0xff), random.randint(0x00, 0xff), random.randint(0x00, 0xff)] return ':'.join(["%02x" % x for x in mac]) def randomUUID(): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def uuidToString(u): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def uuidFromString(s): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") # the following function quotes from python2.5/uuid.py def get_host_network_devices(): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def get_max_vcpus(conn, type=None): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def get_phy_cpus(conn): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def system(cmd): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def xml_escape(str): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def compareMAC(p, q): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def _xorg_keymap(): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def _console_setup_keymap(): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def default_keymap(): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def pygrub_path(conn=None): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def uri_split(uri): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def is_uri_remote(uri): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def get_uri_hostname(uri): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def get_uri_transport(uri): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def get_uri_driver(uri): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def is_storage_capable(conn): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def get_xml_path(xml, path=None, func=None): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def lookup_pool_by_path(conn, path): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method") def check_keytable(kt): raise RuntimeError("virtinst.utils not loaded. tried to access dummy method")
27.885714
81
0.726434
686
4,880
5.087464
0.236152
0.160745
0.23639
0.283668
0.7149
0.7149
0.689971
0.689971
0.67192
0.67192
0
0.019642
0.17582
4,880
174
82
28.045977
0.848086
0.153484
0
0.443038
0
0
0.452258
0
0
0
0.011843
0
0
1
0.43038
false
0
0.025316
0
0.468354
0
0
0
0
null
0
1
1
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
5
79b3c24af6010be65b186477b7bb2c57039bc314
622
py
Python
python/query/typeql_insert.py
typedb-osi/typeql-lang-python
c5de68365bd9de41f162e02d2be095e57c2218f9
[ "Apache-2.0" ]
10
2021-05-11T20:54:34.000Z
2022-02-13T16:01:46.000Z
python/query/typeql_insert.py
typedb-osi/typeql-python
c5de68365bd9de41f162e02d2be095e57c2218f9
[ "Apache-2.0" ]
null
null
null
python/query/typeql_insert.py
typedb-osi/typeql-python
c5de68365bd9de41f162e02d2be095e57c2218f9
[ "Apache-2.0" ]
3
2021-05-18T09:25:55.000Z
2022-02-13T16:02:14.000Z
from common.exception.typeql_exception import TypeQLException from common.typeql_token import TypeQLToken from query.typeql_writable import TypeQLWritable class TypeQLInsert(TypeQLWritable.InsertOrDelete): def __init__(self, match, variables): #super(TypeQLToken.Command.INSERT, match, #TO SET) self._match = match def validate_insert_vars(self, match, variables): if match != None: #TODO pass return variables @property def variables(self): return self._variables @property def match(self): return self.__match
23.037037
61
0.680064
66
622
6.212121
0.484848
0.087805
0.087805
0
0
0
0
0
0
0
0
0
0.252412
622
27
62
23.037037
0.88172
0.083601
0
0.125
0
0
0
0
0
0
0
0.037037
0
1
0.25
false
0.0625
0.1875
0.125
0.6875
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
1
0
1
1
0
0
5
79dbffd0f23028829225581e7a0e79001299c0d2
190
py
Python
examples/config.py
jsbain/gittle
557077d488cf1699ad4ee022cb9f4d09252ab95c
[ "Apache-2.0" ]
4
2020-01-30T03:25:43.000Z
2021-02-24T02:36:18.000Z
examples/config.py
justecorruptio/gittle
e046fe4731ebe4168884e51ac5baa26c79f0567d
[ "Apache-2.0" ]
1
2019-01-24T05:04:07.000Z
2019-01-24T05:04:07.000Z
examples/config.py
justecorruptio/gittle
e046fe4731ebe4168884e51ac5baa26c79f0567d
[ "Apache-2.0" ]
7
2016-01-29T23:52:54.000Z
2020-07-27T02:29:43.000Z
# Constants repo_path = '/Users/aaron/git/gittle' repo_url = 'git@friendco.de:friendcode/gittle.git' # RSA private key key_file = open('/Users/aaron/git/friendcode-conf/rsa/friendcode_rsa')
31.666667
70
0.768421
29
190
4.896552
0.586207
0.140845
0.183099
0
0
0
0
0
0
0
0
0
0.078947
190
6
70
31.666667
0.811429
0.131579
0
0
0
0
0.680982
0.680982
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
79debe9d6138ae7593db799310111c39bfb17873
104
py
Python
comprex/utility.py
InzamamRahaman/CompreX
1bf5c23bd5759a3f535f4b080ef95c53a1010f72
[ "BSD-3-Clause" ]
7
2018-09-01T22:53:44.000Z
2022-03-10T09:36:40.000Z
comprex/utility.py
InzamamRahaman/CompreX
1bf5c23bd5759a3f535f4b080ef95c53a1010f72
[ "BSD-3-Clause" ]
1
2019-06-19T06:42:34.000Z
2019-06-19T06:42:34.000Z
comprex/utility.py
InzamamRahaman/CompreX
1bf5c23bd5759a3f535f4b080ef95c53a1010f72
[ "BSD-3-Clause" ]
6
2018-04-05T15:38:52.000Z
2021-10-12T15:47:54.000Z
import itertools import collections import time import logging import numpy as np import pandas as pd
11.555556
19
0.826923
16
104
5.375
0.625
0
0
0
0
0
0
0
0
0
0
0
0.173077
104
8
20
13
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
8dbd6962128156bd84973b88cf0363b834ec7dd5
21
py
Python
formalizr/models.py
krasnoperov/django-formalizr
57a4ebe94efd8687ff25537f56471004cbefc6d1
[ "BSD-3-Clause" ]
1
2019-06-27T13:24:04.000Z
2019-06-27T13:24:04.000Z
formalizr/models.py
krasnoperov/django-formalizr
57a4ebe94efd8687ff25537f56471004cbefc6d1
[ "BSD-3-Clause" ]
null
null
null
formalizr/models.py
krasnoperov/django-formalizr
57a4ebe94efd8687ff25537f56471004cbefc6d1
[ "BSD-3-Clause" ]
null
null
null
# There are no models
21
21
0.761905
4
21
4
1
0
0
0
0
0
0
0
0
0
0
0
0.190476
21
1
21
21
0.941176
0.904762
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
8dcc01206c11d249e69a531ecae840ee2b1cbb0a
31
py
Python
cv/photomontage/__init__.py
ShkalikovOleh/cv-labs
dda27a4f19b7e86c774397d7cc8de39461f34ff1
[ "MIT" ]
null
null
null
cv/photomontage/__init__.py
ShkalikovOleh/cv-labs
dda27a4f19b7e86c774397d7cc8de39461f34ff1
[ "MIT" ]
1
2022-02-15T14:06:22.000Z
2022-02-15T14:06:22.000Z
cv/photomontage/__init__.py
ShkalikovOleh/cv-labs
dda27a4f19b7e86c774397d7cc8de39461f34ff1
[ "MIT" ]
1
2021-11-04T16:30:57.000Z
2021-11-04T16:30:57.000Z
from .Merge import merge, g, q
15.5
30
0.709677
6
31
3.666667
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.193548
31
1
31
31
0.88
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
5c00ae4d87d47903c5d3bf44b53a7d7746880842
234
py
Python
dropbox_erpnext_broker/dropbox_erpnext_broker/doctype/site_dropbox_token/test_site_dropbox_token.py
frappe/dropbox_erpnext_broker
b10399668369eb73f0929e3865f1872c83916d8a
[ "MIT" ]
2
2019-02-17T22:53:58.000Z
2021-05-09T11:33:30.000Z
dropbox_erpnext_broker/dropbox_erpnext_broker/doctype/site_dropbox_token/test_site_dropbox_token.py
frappe/dropbox_erpnext_broker
b10399668369eb73f0929e3865f1872c83916d8a
[ "MIT" ]
null
null
null
dropbox_erpnext_broker/dropbox_erpnext_broker/doctype/site_dropbox_token/test_site_dropbox_token.py
frappe/dropbox_erpnext_broker
b10399668369eb73f0929e3865f1872c83916d8a
[ "MIT" ]
9
2017-06-19T16:15:24.000Z
2022-03-15T07:23:35.000Z
# -*- coding: utf-8 -*- # Copyright (c) 2017, Frappe Technologies Pvt Ltd and Contributors # See license.txt from __future__ import unicode_literals import frappe import unittest class TestSiteDropboxToken(unittest.TestCase): pass
21.272727
66
0.782051
29
234
6.137931
0.862069
0
0
0
0
0
0
0
0
0
0
0.024752
0.136752
234
10
67
23.4
0.856436
0.435897
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.2
0.6
0
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
5
5c14deb0600912d8f27d60d85f29a06e7e89e66e
94
py
Python
bitmovin/utils/serialization/__init__.py
camberbridge/bitmovin-python
3af4c6e79b0291fda05fd1ceeb5bed1bba9f3c95
[ "Unlicense" ]
44
2016-12-12T17:37:23.000Z
2021-03-03T09:48:48.000Z
bitmovin/utils/serialization/__init__.py
camberbridge/bitmovin-python
3af4c6e79b0291fda05fd1ceeb5bed1bba9f3c95
[ "Unlicense" ]
38
2017-01-09T14:45:45.000Z
2022-02-27T18:04:33.000Z
bitmovin/utils/serialization/__init__.py
camberbridge/bitmovin-python
3af4c6e79b0291fda05fd1ceeb5bed1bba9f3c95
[ "Unlicense" ]
27
2017-02-02T22:49:31.000Z
2019-11-21T07:04:57.000Z
from .bitmovin_json_encoder import BitmovinJSONEncoder from .serializable import Serializable
31.333333
54
0.893617
10
94
8.2
0.7
0
0
0
0
0
0
0
0
0
0
0
0.085106
94
2
55
47
0.953488
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
a500a383b1bef6202989346118d5a40a43d9eda2
99
py
Python
apps/parties/admin.py
PartyGwam/api
f580e29762990eabdb3bb5e317dee22c6c441696
[ "MIT" ]
1
2018-06-24T08:10:12.000Z
2018-06-24T08:10:12.000Z
apps/parties/admin.py
PartyGwam/api
f580e29762990eabdb3bb5e317dee22c6c441696
[ "MIT" ]
48
2018-06-24T12:30:15.000Z
2022-01-13T00:48:24.000Z
apps/parties/admin.py
PartyGwam/api
f580e29762990eabdb3bb5e317dee22c6c441696
[ "MIT" ]
null
null
null
from django.contrib import admin from apps.parties.models import Party admin.site.register(Party)
19.8
37
0.828283
15
99
5.466667
0.733333
0
0
0
0
0
0
0
0
0
0
0
0.10101
99
4
38
24.75
0.921348
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
a504358f5b3323789f1e172e5ab12d7c6161dad2
165
py
Python
tests/web_platform/css_flexbox_1/test_justify_content_flex_end.py
fletchgraham/colosseum
77be4896ee52b8f5956a3d77b5f2ccd2c8608e8f
[ "BSD-3-Clause" ]
null
null
null
tests/web_platform/css_flexbox_1/test_justify_content_flex_end.py
fletchgraham/colosseum
77be4896ee52b8f5956a3d77b5f2ccd2c8608e8f
[ "BSD-3-Clause" ]
null
null
null
tests/web_platform/css_flexbox_1/test_justify_content_flex_end.py
fletchgraham/colosseum
77be4896ee52b8f5956a3d77b5f2ccd2c8608e8f
[ "BSD-3-Clause" ]
1
2020-01-16T01:56:41.000Z
2020-01-16T01:56:41.000Z
from tests.utils import W3CTestCase class TestJustifyContent_FlexEnd(W3CTestCase): vars().update(W3CTestCase.find_tests(__file__, 'justify-content_flex-end'))
27.5
79
0.812121
19
165
6.684211
0.842105
0
0
0
0
0
0
0
0
0
0
0.019868
0.084848
165
5
80
33
0.821192
0
0
0
0
0
0.146341
0.146341
0
0
0
0
0
1
0
true
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
ebcfad73566f55fdcd3fcac8c9aa49301bad66b7
29
py
Python
pprogramming/__init__.py
erv4gen/Tools-DataProcessing
12d956b9757bfcde4a24e453779671b8daa7e74a
[ "MIT" ]
null
null
null
pprogramming/__init__.py
erv4gen/Tools-DataProcessing
12d956b9757bfcde4a24e453779671b8daa7e74a
[ "MIT" ]
null
null
null
pprogramming/__init__.py
erv4gen/Tools-DataProcessing
12d956b9757bfcde4a24e453779671b8daa7e74a
[ "MIT" ]
null
null
null
from . import pprogramming.py
29
29
0.827586
4
29
6
1
0
0
0
0
0
0
0
0
0
0
0
0.103448
29
1
29
29
0.923077
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
1
null
null
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
5
ebd9a2454e2219a46279ad347bb2af6de258acc1
36,680
py
Python
roboverse/assets/bullet-objects/ShapeNetCore/metadata.py
VentusYue/roboverse
bd19e0ef7bdcae1198aa768bfe9fc18c51878b6d
[ "MIT" ]
null
null
null
roboverse/assets/bullet-objects/ShapeNetCore/metadata.py
VentusYue/roboverse
bd19e0ef7bdcae1198aa768bfe9fc18c51878b6d
[ "MIT" ]
null
null
null
roboverse/assets/bullet-objects/ShapeNetCore/metadata.py
VentusYue/roboverse
bd19e0ef7bdcae1198aa768bfe9fc18c51878b6d
[ "MIT" ]
null
null
null
obj_path_map = { "long_sofa": "04256520/68fce005fd18b5af598a453fd9fbd988", "l_sofa": "04256520/575876c91251e1923d6e282938a47f9e", "conic_bin": "02747177/cf158e768a6c9c8a17cab8b41d766398", "square_prism_bin": "02747177/9fe4d78e7d4085c2f1b010366bb60ce8", "faucet": "03325088/1f223cac61e3679ef235ab3c41aeb5b6", "bunsen_burner": "03325088/7ade4c8d7724e56b76d20e73c57c9a03", "wide_circular_vase": "02880940/53f5240e1e82c96e2d20e9f11baa5f8f", "flat_circular_basket": "02880940/6a772d12b98ab61dc26651d9d35b77ca", "pitcher": "02876657/3ba7dd61736e7a96270c0e719fe4ed97", "gatorade": "02876657/d9aee510fd5e8afb93fb5c975e8de2b7", "narrow_tray": "02801938/d224635923b9ec4637dc91749a7c4915", "jar": "03593526/6e44adec0854dfaa93fb5c975e8de2b7", "horn_vase": "03593526/dd37047caaffb379f9215f842248bc25", "fountain_vase": "03593526/4cd2f74d052b561952e2d18963a75b4d", "narrow_top_vase": "03593526/c5beb87c12986f859f3323e6dbd51308", "long_vase": "03593526/1a9dade4e4a4c47589035c25f0dfeb63", "conic_cup": "03593526/7c1303d3a19a1cef51f77a6d7299806", "camera": "02942699/235a6dd25c0a6f7b66f19f26ac490096", "mug": "03797390/1d18255a04d22794e521eeb8bb14c5b3", "conic_bowl": "03991062/3152bd6fb7bf09d625ebd1cd0b422e32", "ball": "03991062/8d87b950d8e192f5f51f77a6d7299806", "hex_deep_bowl": "03991062/1c4257c50d27388525ebd1cd0b422e32", "square_deep_bowl": "03991062/eb8d2e7e18906c7f25ebd1cd0b422e32", "bird": "02691156/886942791e830bf1d32b1717fde97410", "pillow": "03938244/3b5e274f653258a737f437b479038901", "shed": "02843684/7dd57384aa290a835821cea205f2a4bb", "oblong_scooper": "04530566/9472a24df8372cd42e436d38f27146ec", "beer_bottle": "02876657/5ad47181a9026fc728cc22dce7529b69", "sack_vase": "03593526/a258544f4f27cb7fbbdc99ec57ef9c40", "smushed_dumbbell": "03593526/c8d8974642d4e88e906c240b881af04f", "grill_bench": "04379243/17624075a52c9b15cab01e89f60c9290", "circular_picnic_table": "04379243/24d1d32aa33c38716a97150bb2a72733", "circular_table": "04379243/d6b61af7935d36a6f0aeabfdcb4e1dd9", # Below: mostly colorless objects. Added June 19 2020. "short_handle_cup": "03797390/3143a4accdc23349cac584186c95ce9b", "curved_handle_cup": "03797390/c39fb75015184c2a0c7f097b1a1f7a5", "flowery_half_donut": "02880940/e4c871d1d5e3c49844b2fa2cac0778f5", "semi_golf_ball_bowl": "02880940/45603bffc6a2866b5d1ac0d5489f7d84", "passenger_airplane": "02691156/d605a53c0917acada80799ffaf21ea7d", "open_top_rect_box": "03991062/9da456e7bae24501ffc6e457221b9271", "cookie_circular_lidless_tin": "03991062/5563324c9902f243a2c59a4d90e63212", "chipotle_bowl": "02808440/302618414a9991de3321831d2245cf06", "toilet_bowl": "02808440/fd34f47eb1b816139a625c8b524db46e", "buffet_food_tray": "02808440/1dc36bb29702c28b3321831d2245cf06", "keyhole": "02808440/ea7913efbe22abed412deb0d2b0f3dcd", "bathtub": "02808440/cca20dbad3faf4343321831d2245cf06", "crooked_rim_capsule_container": "02818832/4954953090ca7442527e7f2c027f7469", "colunnade_top": "03593526/de673ddf9df03b8278cf1a714198918", "pacifier_vase": "03593526/1ca73dffe31553efcb349a60fd15aa15", "square_rod_embellishment": "03593526/bcf5a4b764ddeac759f9892433e1b1f4", "bongo_drum_bowl": "03593526/a791d4fa1d840408c5beea20858a99d5", "flat_bottom_sack_vase": "03593526/9097ce398b9700a27e561f865dc44fc5", "stalagcite_chunk": "03593526/6a13375f8fce3142e6597d391ab6fcc1", "pear_ringed_vase": "03593526/c1be3d580b4088bf4cc80585c0d3d970", "two_handled_vase": "03593526/2ff5347d6ee079855337cdc4b758988c", "goblet": "03593526/cf65c7fb56850194490ad276cd2af3a4", "ringed_cup_oversized_base": "03593526/aa638e39c28f3a184800dfbcda5cce3", "t_cup": "03593526/ffc7e98720e71017b3878cedd8c8fe6c", "teepee": "03593526/af61c129521b9a36ad56360b1d2e97b8", "bullet_vase": "03593526/ce2508ec4476b2cbf51f77a6d7299806", "haystack_sofa": "04256520/e74d866f44f857e77b5d58e18a4bdcae", "box_wood_frame": "04256520/5171a910435f4c949a502993c14408e4", "rect_spotted_hollow_bottom_sofa": "04256520/c8108de19d8a3005c5beea20858a99d5", "box_sofa": "04256520/ae4f28a7c4e22d9535dda488a4bbb1e1", "earmuff": "03261776/51245ad5a0571188992fd3dac901d508", "l_automatic_faucet": "03325088/13cdb5df9a944fcbb7a867e9b35a1295", "double_l_faucet": "03325088/af708fe6eac1bc9450da8b99982a3057", "box_crank": "03325088/4477714b35747609f34d244dc02df229", "glass_half_gallon": "02876657/8ea8ced5529a3ffa7ae0a62f7ca532f1", "pepsi_bottle": "02876657/cdeccf2f410846d0e0155f56493d36bc", "two_layered_lampshade": "03636649/23eaba9bdd51a5b0dfe9cab879fd37e8", "beehive_funnel": "03636649/ed323758d0f61cfe6085a0a38e2f255", "rabbit_lamp": "03636649/a82af4e7e81334f8876b399a99a15c0f", "elliptical_capsule": "03636649/11913615a1b732d435836c728d324152", "trapezoidal_bin": "02801938/98c3ddee85264efd7cd51b1084c649a6", "staple_table": "04379243/ec316148b4cdd446b6068c62e84866a1", "grill_park_bench": "02828884/3e0694b77418eb25d2b12aa6a0f050b3", "thick_wood_chair": "02828884/6f0723826537010c870f22c94729669b", "park_grill_chair": "02828884/5b50871735c5cce2d2b12aa6a0f050b3", "long_half_pipe_smooth_park_chair": "02828884/8d074f177479ac42628516f95b4691f", "flat_boat_dish": "04530566/80c6a14accb189a9c2c2c81e2232aa95", "modern_canoe": "04530566/31a41e6a73c5d019efffdb45d12d0585", "vintage_canoe": "04530566/de010f7468ceefc6fcfb3ae2df2f7efd", "oil_tanker": "04530566/6c1cfb2fe245b969c2e818a707fdb3e0", "x_curved_modern_bookshelf": "02871439/43731a6a4bd64ae5492d9da2668ec34c", "pitchfork_shelf": "02871439/cc38bc7db90f43d214b86d5282eb8301", "baseball_cap": "02954340/e823673c1edd73fb97c426435543a860", "tongue_chair": "03001627/ddfe96c6ec86b8752cbb5ed9636a4451", "grill_trash_can": "02747177/8fff3a4ba8db098bd2b12aa6a0f050b3", "crooked_lid_trash_can": "02747177/e7682974949a6aadea9a778eef212687", "aero_cylinder": "02747177/4dbbece412ef64b6d2b12aa6a0f050b3", } path_scaling_map = { '02880940/36ca3b684dbb9c159599371049c32d38': 0.45965730943522365, '02880940/a95e0d8b37f8ca436a3309b77df3f951': 0.36703896426587984, '02880940/4eefe941048189bdb8046e84ebdc62d2': 0.45971293864088836, '02880940/be3c2533130dd3da55f46d55537192b6': 0.338349532841341, '02880940/13e879cb517784a63a4b07a265cff347': 0.3405685000353549, '02880940/a0ac0c76dbb4b7685430c0f7a585e679': 0.4220359444118124, '02880940/c1bad5cc2d9050e48aee29ee398d36ed': 0.43360284129832355, '02880940/468b9b34d07eb9211c75d484f9069623': 0.4207776647482278, '02880940/4845731dbf7522b07492cbf7d8bec255': 0.5089325517195429, '02880940/3f6a6718d729b77bed2eab6efdeec5f8': 0.4056270812581569, '02880940/429a622eac559887bbe43d356df0e955': 0.43217701074761466, '02880940/53f5240e1e82c96e2d20e9f11baa5f8f': 0.3890823579750363, '02880940/cfac22c8ca3339b83ce5cb00b21d9584': 0.3777578051365221, '02880940/4fdb0bd89c490108b8c8761d8f1966ba': 0.408533026934377, '02880940/6a772d12b98ab61dc26651d9d35b77ca': 0.3844594937560873, '02880940/e816066ac8281e2ecf70f9641eb97702': 0.39804690559902695, '02880940/ce48ffb418b99996912a38ce5826ebb8': 0.45889903239684793, '03593526/5c2079f8089b0419ca2de9a00262030f': 0.27694626342895723, '03593526/763474ce228585bf687ad2cd85bde80a': 0.3445859686627363, '03593526/dd37047caaffb379f9215f842248bc25': 0.2990776122148093, '03593526/c444d4f782c68dc9140cbb818dee6c': 0.16974940177530076, '03593526/9bbc7da5313433c4af93a86670701266': 0.3330951252015812, '03593526/c673160553979cebd37947b3ce04a083': 0.23171297743164307, '03593526/6e44adec0854dfaa93fb5c975e8de2b7': 0.19166065022588555, '03593526/8c1d8325da907adf51f77a6d7299806': 0.2382718975304827, '03593526/21efadc7372648164b3c42e318f3affc': 0.21373601333644607, '03593526/7d71fc51ce793a44befd8bfd61b07f5': 0.33623146350607347, '03593526/7c1303d3a19a1cef51f77a6d7299806': 0.2520942787595406, '03593526/1d4a469bdb53d3f77a3f900e0a6f2d83': 0.4047709455873128, '03593526/6f2c815565cfdb97a1c637e821f12a67': 0.2362220644831522, '03593526/ac95989d0e196b4a98d5fc0473d00a1c': 0.2757251510707145, '03593526/9998f783571e10a7d680196133d8b70f': 0.3599957530831075, '03593526/2fc5fff977ac930050b92125e5fcb8ac': 0.1777251222989879, '03593526/a258544f4f27cb7fbbdc99ec57ef9c40': 0.23117363410603475, '03593526/275cc21fad6aa1b6bfa78a7d067c469c': 0.24055120706716254, '03593526/ee425a7b654bcac8bbdc99ec57ef9c40': 0.24435537419787864, '03593526/4cd2f74d052b561952e2d18963a75b4d': 0.3118095112250866, '03593526/c5beb87c12986f859f3323e6dbd51308': 0.2644849674672597, '03593526/4b574003434d2ba041034a9ebee74b0c': 0.2871417452802942, '03593526/930e0e91518dedaf8b5a342cfa6159b4': 0.2689400951946232, '03593526/9e03b1cd20644a638c37cfe791015e2f': 0.40401051854908365, '03593526/9fe7e6a7bf8ca964efad53eb3f0b36fa': 0.3181704164311424, '03593526/d67ac9e710ba445089035c25f0dfeb63': 0.3255950049034956, '03593526/99f6281962f4b4e598910e50f05b8001': 0.2006863623667775, '03593526/81a4a5f10f2f759fcee8a4975a4efb00': 0.2518933552228041, '03593526/c5fda0e7168d23ed89035c25f0dfeb63': 0.253426781500805, '03593526/1a9dade4e4a4c47589035c25f0dfeb63': 0.35625682654873153, '03593526/a2da98dc3d78788435836c728d324152': 0.23115324449174063, '03593526/5b13716dfa70014c726dbbf7bc5e4df3': 0.2675489566377274, '03593526/c8d8974642d4e88e906c240b881af04f': 0.19730288914781574, '03593526/d6b6a7a860cfeeda2f2318fdd66be40a': 0.3408394459498629, '03001627/fa7347547e290732bf65e1af50b5b7d4': 0.44174088068908574, '03001627/bea846f692c8bdc8ce6fb1d4c6089968': 0.37984540054562693, '03001627/9d7d7607e1ba099bd98e59dfd5823115': 0.36890936771015886, '03001627/1d99f74a7903b34bd56bda2fb2008f9d': 0.32225328850799767, '03001627/8cedc8e684d60ff42a06d8c81262ef96': 0.3345418252491842, '03001627/f19e8da9d8f369c531e63f1270e2b445': 0.3423358235487716, '03001627/87afe5137d675efb73418f9a8c25ad1a': 0.2924477558270245, '03001627/ca84b42ab1cfc37be25dfc1bbeae5325': 0.28999262928033687, '03001627/d9156f5552178de2713decb1a0563b12': 0.29057178751468143, '03001627/73b7d6df845221fa9a2041f674671d05': 0.25093906579617997, '03001627/26aa22bd1da8b8c5b1a5c6ecbc81953c': 0.3532808881414112, '03001627/61f71cc002f6da561c81652b127a0ec9': 0.26922978140659054, '03001627/5822ae77b06bea3091da37ff8bdd2524': 0.29812108391771996, '03001627/8b5f8b83715a378e473f10e6caaeca56': 0.2178062649967445, '03001627/3e2375ff9e7af8002861ed753d5b88a1': 0.30838812435176965, '03001627/4a12589099b05c51e13b3410f3683610': 0.19656299929858378, '03001627/98d1fb12c3354d341e67ee2399c63faa': 0.23064667769159686, '03001627/c236deaff8c6fb0d29c9a7a92b0a566d': 0.25955749038420006, '03001627/56e51afd9d5c5fa38b7a92edf72424a7': 0.2653214767854839, '03001627/23c4d774910c9ce03c832f0140db42bc': 0.2848197975362282, '03001627/c4a73db5b3503ffa86abe5555a3b447d': 0.31817389272680696, '03001627/b24ed89d85b74771216fff6094e6695c': 0.3474201455202717, '03001627/f4e5698718f3d4494311a07b696e63e3': 0.23414835541380372, '03001627/e6b2017501b20ce1eff1a662025674bf': 0.34979327241007196, '03001627/884341d10af51df9737a00f007529fbf': 0.36661618843465416, '03001627/e31d71ed32273fede42ac999db581f5e': 0.3038644502901682, '03001627/4a24652fbf2bed7e93583c67df8faf1': 0.3053681526027573, '03001627/8c629a89e570c8776a9cd58b3a6e8ee5': 0.33791394151487303, '03001627/387dc2c22bdf6d2a6df42853f67b5836': 0.30843791497069656, '03001627/bbcdf9d0ecf02e7e9fce07ae6c046b8c': 0.3046758783137524, '03001627/657790bc7fd16326c132086242d50af2': 0.30215088243823013, '03001627/7eabd19312bde1dc9335750905007562': 0.30359652809652504, '03001627/2af09bd8df40506c9e646678ef50aa3d': 0.28369787834571647, '03001627/b66a32bed74e203591f74997d435672d': 0.25416238360382204, '03001627/701551efa4f1f4fe3e478b26cb10ebe4': 0.27453734380407513, '03001627/43d13e139d0eb78668007dfca4077105': 0.27567694928973235, '03001627/6d6a92847f46d8e27b57eb4fc830f67b': 0.318986046718708, '03001627/645022ea9ce898648b442b160bcfb7fd': 0.25912855840530813, '03001627/5b1d0dad82acd6e5893104fdda835c64': 0.23678792372336271, '03001627/248e014f31771b31d3ddfaaa242f81a1': 0.3651945242931118, '03001627/1d828c69106609f8cd783766d090e665': 0.1939593525697592, '03001627/26c9e85dfa18af9fcf004563556ddb36': 0.28223431900241897, '03001627/f128d707527eb10cb04cb542e2c50eb4': 0.27235836056782664, '03001627/c9f5c127b44d0538cb340854b82a069f': 0.2174103981897043, '03001627/84767939783aade4611ea9b20dcb5c83': 0.3401630539464854, '03001627/c666bd15aaebd5b02de0bc4fc4d02dd6': 0.28120595854420705, '03001627/11d4f2a09184ec972b9f810ad7f5cbd2': 0.23375956249891147, '03001627/ea7be2b97e78d5b35a4480134e0cdd21': 0.3188617699173143, '03001627/408631c881e3b33cefb90719a33cc920': 0.22722047311141674, '03001627/2de1bd62aef66dc9bf65e1af50b5b7d4': 0.2745380517401244, '03001627/8a2a0cad888b871eaa84c578b771896d': 0.24530224003108347, '03001627/e3479f55f5894bb3c7f1f7c0570e288d': 0.26829807926252974, '03001627/3f7417590f1bcfded5c89ecb06d1099b': 0.27541307534205783, '03001627/cc2930e7ceb24691febad4f49b26ec52': 0.29027606028299086, '03001627/459bef9cabed55cc593ebeeedbff73b': 0.35610420394204634, '03001627/dfc85456795d943bbadc820495ddb59': 0.26337556029685144, '03001627/697cfbe6e043136b737a00f007529fbf': 0.2664178263673437, '03001627/7ad134826824de98d0bef5e87b92b95e': 0.2601930783769818, '03001627/aa2242ae4ea1074bad0881e4ef1ff29c': 0.30656229936543483, '03001627/fba62693a28b2e4c43f1c519d66bb167': 0.2357822472615468, '03001627/58ef4177c711f38fe302d4da760c718f': 0.28806227179224914, '03001627/ca2294ffc664a55afab1bffbdecd7709': 0.3316280255736537, '03001627/8abd5158ec94dfd8924bf081da6f024c': 0.2950230189760522, '03001627/d6075b23895c7d0880e85c92fa2351f7': 0.2151012911938732, '03001627/80fab0c55a60abb7dafb0be26f6b45d5': 0.27971451323632546, '03001627/d8774646afed0312732375ced502498': 0.41581939838154913, '03001627/5a5b11daa1b5344fb516c05d046e8e45': 0.3041330293650493, '03001627/6e50f19c52a760e3cc1159c3b443c932': 0.3252561166689675, '03001627/10c08a28cae054e53a762233fffc49ea': 0.25475012547626047, '03001627/117c0e0aafc0c3f81015cdff13e6d9f3': 0.21739340593931009, '03001627/3eef51c1ba49a32ef73a9267136cfb76': 0.17817059766632765, '03001627/665bfb42a0362f71d577f4b88a77dd38': 0.38938660739695746, '03001627/80bad2242183a77df69c1bc654d8fbbd': 0.20359793853296182, '03001627/3ef60b4e28c22b3bc7dd78af359f0fc6': 0.21303222935366029, '03001627/d4d9b991ff7d31e8c8687ff9b0b4e4ac': 0.217424365078993, '03001627/6fa2db75b28cc1375c728bbce49718a0': 0.25921945942587865, '03001627/17ab0917e215e4fcfd300048280f015a': 0.23867134749608981, '03001627/5d681a38c0de5545205884f75aba3a': 0.31490510594129895, '03001627/4a4c7abae3929595184740798d03a659': 0.2182130936737045, '03001627/e6ec608ccfb38d6247928239f46b6ef1': 0.3050868625191195, '03001627/5f2441ed2a9ec8fab5d55ded7962c792': 0.2559297705987723, '03001627/a11592a10d32207fd2c7b63cf34a2108': 0.2822276040448187, '03001627/ff2333f528efd790fc93ece3545739c4': 0.2731275648475552, '03001627/d020eee9e094050ad776c08b6a3d0a38': 0.2641135229501191, '03001627/fbca73a2c226a86a593a4d04856c4691': 0.32377871019330906, '03001627/99ae1b3f970c61fd5b56aadec5c0be6b': 0.31312686763915576, '03001627/6a28919186eb55ecf69d0cf4fdc89b12': 0.2435398494658779, '03001627/a10e8dc1cc9522b67a80d424f0f4074d': 0.2481898690823798, '03001627/b856a62e23ef65324b87db09ac4cfa73': 0.20867775472429062, '03001627/bf7e8e0dc4f4038cc2567be77cb7ab45': 0.2529760031622193, '03001627/48429b3467c7185060fcaed6cc231482': 0.2953046773573473, '03001627/f04698af574cb9847edf3a3d7d1bacae': 0.30901759154433295, '03001627/e6b0b43093f105277997a53584de8fa7': 0.27940372641019035, '03001627/5f2d4c625595dc2499b025797420aa58': 0.27403111911589173, '03001627/f1fc7d26395549ba5ad8ce80f1a173ac': 0.29077568193155956, '03001627/bf3f14225e8f899db62f9fb4b7f0626': 0.25514016844534354, '03001627/ed56af61297594bf1c4300651205adf3': 0.2633978736900763, '03001627/8ab6783b1dfbf3a8a5d9ad16964840ab': 0.2384376870991261, '03001627/f23ecf3348299cf743e99e0cae970928': 0.25320484958739403, '03001627/d764960666572084b1ea4e06e88051f3': 0.29691118760802526, '03001627/78261b526d28a436cc786970133d7717': 0.22535548069821643, '03001627/5ef3e4abd4386c8871bc6030acc85f1e': 0.27348474325347316, '03046257/58507d92c9c0a4f07b79156a61ad4c01': 0.24210892812233792, '03046257/f41e23b98991d0f535836c728d324152': 0.34256659449554905, '02942699/235a6dd25c0a6f7b66f19f26ac490096': 0.18864112080609208, '02871439/711126944f5bb83e1933ffef19678834': 0.24535517064666781, '02871439/c91abdb369f0e2a1933ffef19678834': 0.20444373083700307, '02871439/58b97051fb1efb3b4bd9e0690b0b191': 0.2693240097730432, '02871439/b8eb9918623a90fe14b86d5282eb8301': 0.2724581705609199, '02871439/ec882f5717b0f405b2bf4f773fe0e622': 0.22183038041095535, '02871439/f7b93826de3020f3a5f3ebd90cb33bd6': 0.20629757974095397, '02871439/3ba1563f381785bc6739a7caa0c577bd': 0.3319441024123248, '02871439/e6f306e6f391ace7b035d20a1a3ca345': 0.30570540465844603, '03991062/2dcd625ed44bbf1625ebd1cd0b422e32': 0.26290574140137735, '03991062/3fd59dd13de9ccfd703ecb6aac9c5d3c': 0.34491983565252426, '03991062/5e825f4aa6a916c544cd688b4bc0d629': 0.24881291955706472, '03991062/23c2a637319a07b425ebd1cd0b422e32': 0.3028944173888938, '03991062/5afc3b1bd57bf6482c2c0fe8f75ba056': 0.3121985865836113, '03991062/5cc4660eebade12d25ebd1cd0b422e32': 0.2688076515212309, '03991062/6dbdd4270e16cb9425ebd1cd0b422e32': 0.3196504941106921, '03991062/9d7710e65ad393114b3c42e318f3affc': 0.3278343896568729, '03991062/7c86cdecd3b2d29125ebd1cd0b422e32': 0.32430321389876954, '03991062/7c1303d3a19a1cef51f77a6d7299806': 0.2520942787595406, '03991062/6c9a25f120cdcacc25ebd1cd0b422e32': 0.3395011383119723, '03991062/eb8d2e7e18906c7f25ebd1cd0b422e32': 0.22079872149072213, '03991062/48862d7ed8b28f5425ebd1cd0b422e32': 0.25961522352098926, '03991062/870c7ddd93d5b7c524042e14aca574d2': 0.255164939873058, '03991062/3152bd6fb7bf09d625ebd1cd0b422e32': 0.3404096994165607, '03991062/4e5172f357714b7f78caa162a41a851e': 0.1690462268076837, '03991062/1c4257c50d27388525ebd1cd0b422e32': 0.2781578416882425, '03991062/f1c17606d5952d9225ebd1cd0b422e32': 0.26845688968899994, '03991062/d1ed787e654dd0ff25ebd1cd0b422e32': 0.31137676021742655, '03991062/433fedbf96f140fb25ebd1cd0b422e32': 0.3153347275354074, '03991062/8d87b950d8e192f5f51f77a6d7299806': 0.1530489868639284, '03991062/44aea02b6852ce98910e50f05b8001': 0.23012717284932138, '03991062/8c3c0ec3779a163098910e50f05b8001': 0.25625403952921166, '03991062/490e2e25da735cfd3df324363ca0723f': 0.2615222376903661, '03991062/b89de6e29a5a1d6425ebd1cd0b422e32': 0.29648423795558465, '03991062/8111c4fa78c69d7925ebd1cd0b422e32': 0.21184056364198547, '03991062/c0ed2720d248d4e125ebd1cd0b422e32': 0.2995133790991576, '03938244/4691e0947b1e2a6b685998681d42efb8': 0.3351977172094138, '03938244/71dd20123ef5505d5931970d29212910': 0.24779505452759512, '03938244/31db3f4dd9d5944c3628a80a75ee02dc': 0.23649248582473217, '03938244/b422f9f038fc1f4da3149acda85b1964': 0.33714581576381975, '03938244/3b5e274f653258a737f437b479038901': 0.28889512107477777, '03938244/f3833476297f19c664b3b9b23ddfcbc': 0.2439988495809625, '03938244/c665ecfac697faa1222129b9e83640a7': 0.3348988182355991, '04379243/57c21a71a3518b6a1af550e7b4aa14c': 0.2326588450676866, '04379243/6b9b672041acc540e61062b89cc2de3b': 0.2496724106673462, '04379243/81c8ec54ab47bb86b04cb542e2c50eb4': 0.27718908429031197, '04379243/8ccbd2949fd8809b82cdf8854f156846': 0.31485364797288695, '04379243/7e215b6386f3fd4156d1d06c447a736': 0.2602163173495344, '04379243/586edb4eba5c3c7557ab4b593540354': 0.27037760340016304, '04379243/67584a2261e175ccfbed972ae4fd63af': 0.2701806915855507, '04379243/37726dbb4357739bded526a7be77b30e': 0.25165761297169303, '04379243/ead93856b735ec90f0aeabfdcb4e1dd9': 0.24984458042497326, '04379243/d51c7bcf851a368f90193fd5f5187893': 0.28516799314617236, '04379243/9ff56887e5f00cff412a0eaf6d0f1809': 0.3698821821898761, '04379243/ab1d67b6f09b35424ea2d70ab68cd1d2': 0.13518324276871893, '04379243/87af702a9a5370aceea6a5a0ebf81e97': 0.3923480809940565, '04379243/d477a17223129fec53227dcd0d547ba6': 0.24348670670694347, '04379243/199d183157f213e0da7c128b58fc7554': 0.21732957493140945, '04379243/524af53b7863f506e227c1bcfe5b1fc6': 0.3166806699750222, '04379243/b5bc21c92dc997cb7209833c7512d6a2': 0.4417089777648633, '04379243/61a898c20ddb028dfebad4f49b26ec52': 0.3337971678222898, '04379243/d6b61af7935d36a6f0aeabfdcb4e1dd9': 0.2977388571512857, '04379243/970e70ae46244887c35d3c5d3b1fcf7': 0.2856490080009466, '04379243/8bb3a7e1cb24fe6febad4f49b26ec52': 0.1919261225285961, '04379243/ea45019340b754c155f46d55537192b6': 0.293290544285145, '04379243/1f748bcf0ee8eea7da9c49a653a829eb': 0.3016119040918398, '04379243/254bf8d40be1fcb025a517a55e2a2141': 0.33341580297995393, '04379243/95301825e69b3b2db04cb542e2c50eb4': 0.3050147317690796, '04379243/aa118e3ed06f00a85c886bf880a258e': 0.3155906296226216, '04379243/a0fd031270822841febad4f49b26ec52': 0.2595015020315732, '04379243/9feefc5eb43adb4fb7db0056a767efc7': 0.26675671786282573, '04379243/b9c756b2ff5d66ddfebad4f49b26ec52': 0.29386733825711986, '04379243/4d14547b54611e9bcf1ee9bc9708f08c': 0.29290679088342814, '04379243/758df6055459cdf6cf58a1b90d479c9': 0.20695377136760879, '04379243/649cea3b17ffb31bfebad4f49b26ec52': 0.2809006470442715, '04379243/8c67fd5a15e8d9defebad4f49b26ec52': 0.36061045406314834, '04379243/9a60b3b87a457c73f522eecffc49e6a3': 0.2188862660860571, '04379243/7fc3bc8542f4c17ce4511d9a59e40339': 0.2720439981367353, '04379243/1b82432d7a959b8dfebad4f49b26ec52': 0.2474085528720443, '04379243/4a9a73e93f19ece06652506d959dc71d': 0.3287455336192209, '04379243/2e3e46e427b45207765ee729adbdf968': 0.2746370781541623, '04379243/7d358a01c9467815a9505c473725122e': 0.2991201440375514, '04379243/b796639ea7368f3bec11953b27b8a03a': 0.3044683089068464, '04379243/843713faa2ee00cba5d9ad16964840ab': 0.2767530424005882, '04379243/4cd11ae56eba48684733824eae5cd9ae': 0.3051850919872057, '04379243/17624075a52c9b15cab01e89f60c9290': 0.33657782004595815, '04379243/882d74e3afe42d0b651fbe0e01830a4a': 0.26990644052240753, '04379243/b7a0dda52974fa642250bf58700b4d8f': 0.34973596424652276, '04379243/4a0db050c8703a8d6e3c8a33c4ddf2ef': 0.4204919670117183, '04379243/580373e581fa155d3ec45bd2bc895504': 0.32828506185554235, '04379243/a0864018495ae55cdef39da7703174e8': 0.32956389648922424, '04379243/d9994cf6d5d444379dbfd5cfd194350d': 0.28193506164777776, '04379243/7ee773e031400d09b4fc0a2b20c3cddd': 0.3069476693905946, '04379243/188ce43d9c8caabc5213169cc9897a9d': 0.26152773259820306, '04379243/aca4c523f999de86febad4f49b26ec52': 0.23992752198547573, '04379243/1408914f71c66166febad4f49b26ec52': 0.3265484013649964, '04379243/324f0d772a7b728c36350d50e191a45': 0.19920863940387673, '04379243/77b57f3eebab844707cdefe012d0353': 0.26701723803216243, '04379243/93f94ca2abb0e6aeda9c49a653a829eb': 0.3100057547321361, '04379243/3ce4b963a4869248febad4f49b26ec52': 0.3168479119720382, '04379243/74b8222078ba776c661673811de66400': 0.2955727474109995, '04379243/1ef6c2b9d413fb7c681404257d94ad9': 0.26978985777486764, '04379243/8aaca7e2c1b0ec549eea323f522c6486': 0.2854242861634302, '04379243/1aed00532eb4311049ba300375be3b4': 0.32786782760419925, '04379243/2425d3befad0440febad4f49b26ec52': 0.23990003257943382, '04379243/c6442db6d5fc94a62744bf8869518694': 0.24716063565259672, '04379243/b0abbb1a540e4b3431540522caac8407': 0.3212309247961766, '04379243/669a8114b9a602c2febad4f49b26ec52': 0.31333730710610846, '04379243/24d1d32aa33c38716a97150bb2a72733': 0.294917849057276, '04379243/7249c3e41c4807c0f7e0e05bae6131': 0.2583356349071718, '04379243/98fe480bea8f8f0486abe5555a3b447d': 0.238914969916438, '04379243/58160ac529c37aef1f0f01a76c5ff040': 0.29527257482634756, '04379243/cae4f0f8b87db72dbbdc99ec57ef9c40': 0.20142031774210906, '04379243/c733e81695923586754784b56fb4c23b': 0.30608729541372426, '04379243/d45385e0a60f71e1427fcd6e404d0cf5': 0.23662820476539395, '04379243/bc644d8f492e3c25febad4f49b26ec52': 0.31527377148767793, '04379243/4572e2658d6e6cfe531eb43ec132817f': 0.2826694475191026, '04379243/fe5e1df0653804d6ce4670b160b81e9': 0.2507756406842348, '04379243/3d4399c54a60ac26febad4f49b26ec52': 0.2893301179330152, '04379243/8d45802ef679d08a1a3b40747093a35e': 0.19964923548493466, '04379243/d40ba4b29c5ae69dae14646a8c8ddd34': 0.31617103538262104, '04379243/472796909612bf1f1353dc45068d6f44': 0.4166608208023914, '04379243/4116d19d60fc24f037a346dba83c013b': 0.27200633657904416, '04379243/db454c99849016f8febad4f49b26ec52': 0.2927663815172217, '04379243/b9c5de845a1f5ccf23f93d9b8d14f53c': 0.2793957476316928, '04379243/41d280b7db61ebddfebad4f49b26ec52': 0.36292739570345595, '04379243/6f03a6f024145fc9febad4f49b26ec52': 0.29524535899871696, '04379243/3dd217a06e76292b372b6139ac78b39e': 0.2753278554897106, '04379243/cb71e1cf52531981593ebeeedbff73b': 0.32003333412440893, '04379243/9f76504d9b551e548c37cfe791015e2f': 0.37208169535456526, '04379243/45c5ee611c73b90a509330ce00eb0b20': 0.3143074959461117, '04379243/a2561614d015f2fdfebad4f49b26ec52': 0.27860111156104383, '04379243/fe0ac2e334ad4d844fb315ce917a9ec2': 0.29047481002574754, '04379243/9c9554e0883818c9febad4f49b26ec52': 0.283230710701327, '04379243/c85ba9a3e1896eb254adaad15f0d584e': 0.2731296128697195, '04379243/797ecd23342e744bbff15b656f256f05': 0.25518793181235333, '04379243/915855afcc5f8918ab27cc93fdc68c94': 0.2926077219467744, '04379243/75ddfe6d71b14184134155606601dcb2': 0.2755089304802004, '04379243/7dc6c6f96b77b7d3febad4f49b26ec52': 0.36865491387379157, '04379243/f38a18709e55e4647ee217c21e683487': 0.27079920711465555, '04379243/3931ce39e77a25a9dfefa992cb59ea0': 0.28075712730308033, '04379243/575b467b6ebb2f234eaa3180e8182d9e': 0.30778843491305785, '04379243/f6f3b8e08af617e44733824eae5cd9ae': 0.2429340462478592, '04379243/cd224ca2a8aa04b11362d127df6d94eb': 0.3310462718560743, '04379243/dace4e7f0ec285abcaa22a10624245b6': 0.27985688874477954, '04379243/da8ec638b64227066d767b6d0313d349': 0.29539314886207646, '04379243/952da8ad85350267b9b072e1f62798f5': 0.2941584181754032, '04379243/51cfb783895a8af9febad4f49b26ec52': 0.32665656735452714, '04379243/ff2b5b315173f3244fb315ce917a9ec2': 0.3016606149934463, '04379243/4c4675bc602b2a95febad4f49b26ec52': 0.2634548622886725, '04379243/425544b66203da392ebeb1e6a8111f53': 0.4946576827599186, '04379243/fe82d64b0268ba75febad4f49b26ec52': 0.2987631193368364, '04379243/87ebd707ca90700d8b424343280aeccb': 0.2036957111597284, '04379243/4f7f8af38b1a13f67c1b348241918030': 0.2972472116502579, '04379243/1cd6a00b71f02b06430c2c15987e4cd': 0.26448328287181005, '03211117/1d1cd29446bff16090adfc5ef6476a6e': 0.4758888022737482, '03691459/52e827d2f969a2b61f2b6130e0fe93a6': 0.2513604491570652, '03691459/f0f9a2082454542751dfe6844b6e8393': 0.28919258291611305, '03691459/bbda555f0874e9d5b35234ceed4dc815': 0.41396553720047535, '04256520/b44d152534373752febad4f49b26ec52': 0.34255735611908317, '04256520/d3425554004d834f6dbc9d74bad392c': 0.1819656454192386, '04256520/ae9d32ee01af191a32dc1e76c3474bc': 0.30077427871314677, '04256520/388aebe52bbe88757143b902ce4e435d': 0.2695690914357143, '04256520/575876c91251e1923d6e282938a47f9e': 0.2465198488048127, '04256520/bd3cb48163e43810f29b3e56ea45251a': 0.2398063826531157, '04256520/df7cced6f0e5e65c26e55d59015dabc6': 0.31393154755875186, '04256520/cd249bd432c4bc75b82cf928f6ed5338': 0.24875773372794194, '04256520/248e014f31771b31d3ddfaaa242f81a1': 0.3651945242931118, '04256520/3f8523f11a622d8d6983f351200ac6a': 0.28358734579175243, '04256520/d6d69d04e3c34465e9fa215d22832290': 0.3012059086959642, '04256520/3f8aba017afa6d94f78aa2d67f081607': 0.25134347285239617, '04256520/68fce005fd18b5af598a453fd9fbd988': 0.25739880072023846, '03207941/69b0a23eb87e1c396694e76612a795a6': 0.24804073889257744, '03325088/1f223cac61e3679ef235ab3c41aeb5b6': 0.47440583274796444, '03325088/7ade4c8d7724e56b76d20e73c57c9a03': 0.3191524524851414, '02801938/bcc429595319716b726dbbf7bc5e4df3': 0.2443209756271703, '02801938/5208bc4450a16d0e4b3c42e318f3affc': 0.2528763876051636, '02801938/be3c2533130dd3da55f46d55537192b6': 0.388349532841341, '02801938/97c3dff51452522814513156cf2b8d0d': 0.17695602924172202, '02801938/9e4a936285f32194e1a03d0bf111d109': 0.22784618116173397, '02801938/d224635923b9ec4637dc91749a7c4915': 0.28039080824377316, '02801938/acfe521c412fcd04564c0afd61663476': 0.2505751267753218, '03624134/b80c5d63962c04b41395331ebe4786cd': 0.386817271713328, '03624134/66955a3156556f0d1395331ebe4786cd': 0.3517663207978639, '03624134/3a4f0118a57093cbf7c4ed45ce654123': 0.27549847612973255, '03624134/2e9a0e216c08293d1395331ebe4786cd': 0.3579041034522509, '03624134/c141abaec55e87e691687e259c21528d': 0.4682564356182648, '03624134/1897adf19953756d91687e259c21528d': 0.4654626701659226, '03624134/a73a2d9584b2cbc81395331ebe4786cd': 0.34481705702888404, '03624134/2f74196bd5cb462727c767f081f1365a': 0.3530325758763523, '04090263/d16ba2810dd8489cfcace4d823343363': 0.36580559352527237, '04090263/1fa5a9170bb276e7fcace4d823343363': 0.38827163553233524, '04090263/9642b6d77734a1c351cfdb4c9f126c12': 0.33421556265558916, '02933112/8e1a1e2879cfa2d6fe395915d44df772': 0.29216781907355655, '02933112/6343efcc64b331b3e3f7a74e12a274ef': 0.34009363924766967, '02933112/58b97051fb1efb3b4bd9e0690b0b191': 0.21932400977304323, '02933112/1f4ccbdbd0162e9be3f7a74e12a274ef': 0.23687889216033253, '02933112/33ebdfbed1aff9fb12d532e9deb7e02b': 0.2234013755553408, '02933112/85502157d9e253d411fc2b865c2a185b': 0.37097950991290135, '02933112/cd251287fd34d7e0e3f7a74e12a274ef': 0.3569388794982453, '02933112/bfdb60bd61d083536739a7caa0c577bd': 0.3684828163688074, '02691156/886942791e830bf1d32b1717fde97410': 0.4260345405467737, '02773838/f5800755a78fc83957be02cb1dc1e62': 0.37005492349238794, '02828884/7e8caf5bf2eb1a61ecaa3c66b0328b42': 0.39552529436277406, '02828884/444e7c5709eca2496f61afd58e50ae2': 0.30553663124975367, '02828884/7769891a8af54054bfde7937440ef438': 0.26328581076194074, '02828884/7e73d9c7082453987b019ecf3e106a55': 0.272857621789694, '02828884/39d8fdb56b0e160dbcceec49967c0de7': 0.30249821911866226, '02828884/137fdd05ae4e69c7a68359455a0ffe24': 0.3636460238628736, '02828884/d6075b23895c7d0880e85c92fa2351f7': 0.2651012911938732, '02828884/302c64b3f9d0e0a3961c690e3d679ac7': 0.37262511223866135, '02828884/19bb2f65f3de8f5fbdc7943e19c9bdf7': 0.29692137931425705, '02828884/10cfa696ba2259ccbbba142d6df53ce': 0.29593628678247086, '02828884/dbd0698df1623b0391da37ff8bdd2524': 0.25482850396661644, '03797390/c51b79493419eccdc1584fff35347dc6': 0.39853867076304234, '03797390/1d18255a04d22794e521eeb8bb14c5b3': 0.2082370476854743, '02876657/6b810dbc89542fd8a531220b48579115': 0.2410093794578824, '02876657/d9aee510fd5e8afb93fb5c975e8de2b7': 0.23366564984081412, '02876657/1df41477bce9915e362078f6fc3b29f5': 0.3265737295181812, '02876657/56c23ba1699f6294435b5a0263ddd2e2': 0.23235115245198346, '02876657/7746997b8cfd8d11f4718731863dd64d': 0.35992130001177347, '02876657/78d707f4d87b5bfd730ff85f0d8004ee': 0.42442472527435726, '02876657/9fe7e6a7bf8ca964efad53eb3f0b36fa': 0.3181704164311424, '02876657/dc0926ce09d6ce78eb8e919b102c6c08': 0.3959517942378955, '02876657/1b64b36bf7ddae3d7ad11050da24bb12': 0.3339083914873065, '02876657/5ad47181a9026fc728cc22dce7529b69': 0.2819320476806692, '02876657/621e786d6343d3aa2c96718b14a4add9': 0.3096872077369119, '02876657/c3767df815e0e43e4c3a35cee92bb95b': 0.23282710160597733, '02876657/ab6792cddc7c4c83afbf338b16b43f53': 0.2824057726614718, '02876657/3ba7dd61736e7a96270c0e719fe4ed97': 0.33701174377576115, '02876657/e8b48d395d3d8744e53e6e0633163da8': 0.3077651947589247, '03337140/7d4abd821dfbc7dfcc786970133d7717': 0.23166213599879573, '02747177/cf158e768a6c9c8a17cab8b41d766398': 0.2733196973552894, '02747177/9fe4d78e7d4085c2f1b010366bb60ce8': 0.17589323878114485, '02747177/9ee464a3fb9d3e8e57cd6640bbeb736d': 0.23439602186310232, '02747177/8026b11c2f66d77ed5a4ff0c97164231': 0.2098441653314233, '02747177/acfe521c412fcd04564c0afd61663476': 0.2505751267753218, '02843684/7dd57384aa290a835821cea205f2a4bb': 0.19103898957992915, '04530566/907c179103304ce8efcba30d0f49b70': 0.34159289712768015, '04530566/9472a24df8372cd42e436d38f27146ec': 0.4465681610786845, # Below: mostly colorless objects. Added June 19 2020. '03797390/3143a4accdc23349cac584186c95ce9b': 0.25, '03797390/c39fb75015184c2a0c7f097b1a1f7a5': 0.25, '02880940/e4c871d1d5e3c49844b2fa2cac0778f5': 0.25, '02880940/45603bffc6a2866b5d1ac0d5489f7d84': 0.25, '02691156/d605a53c0917acada80799ffaf21ea7d': 0.25, '03991062/9da456e7bae24501ffc6e457221b9271': 0.25, '03991062/5563324c9902f243a2c59a4d90e63212': 0.25, '02808440/302618414a9991de3321831d2245cf06': 0.25, '02808440/fd34f47eb1b816139a625c8b524db46e': 0.25, '02808440/1dc36bb29702c28b3321831d2245cf06': 0.25, '02808440/ea7913efbe22abed412deb0d2b0f3dcd': 0.25, '02808440/cca20dbad3faf4343321831d2245cf06': 0.25, '02818832/4954953090ca7442527e7f2c027f7469': 0.25, '03593526/de673ddf9df03b8278cf1a714198918': 0.25, '03593526/1ca73dffe31553efcb349a60fd15aa15': 0.25, '03593526/bcf5a4b764ddeac759f9892433e1b1f4': 0.25, '03593526/a791d4fa1d840408c5beea20858a99d5': 0.25, '03593526/9097ce398b9700a27e561f865dc44fc5': 0.25, '03593526/6a13375f8fce3142e6597d391ab6fcc1': 0.25, '03593526/c1be3d580b4088bf4cc80585c0d3d970': 0.25, '03593526/2ff5347d6ee079855337cdc4b758988c': 0.25, '03593526/cf65c7fb56850194490ad276cd2af3a4': 0.25, '03593526/aa638e39c28f3a184800dfbcda5cce3': 0.25, '03593526/ffc7e98720e71017b3878cedd8c8fe6c': 0.25, '03593526/af61c129521b9a36ad56360b1d2e97b8': 0.25, '03593526/ce2508ec4476b2cbf51f77a6d7299806': 0.25, '04256520/e74d866f44f857e77b5d58e18a4bdcae': 0.25, '04256520/5171a910435f4c949a502993c14408e4': 0.25, '04256520/c8108de19d8a3005c5beea20858a99d5': 0.25, '04256520/ae4f28a7c4e22d9535dda488a4bbb1e1': 0.25, '03261776/51245ad5a0571188992fd3dac901d508': 0.25, '03325088/13cdb5df9a944fcbb7a867e9b35a1295': 0.25, '03325088/af708fe6eac1bc9450da8b99982a3057': 0.25, '03325088/4477714b35747609f34d244dc02df229': 0.25, '02876657/8ea8ced5529a3ffa7ae0a62f7ca532f1': 0.25, '02876657/cdeccf2f410846d0e0155f56493d36bc': 0.25, '03636649/23eaba9bdd51a5b0dfe9cab879fd37e8': 0.25, '03636649/ed323758d0f61cfe6085a0a38e2f255': 0.25, '03636649/a82af4e7e81334f8876b399a99a15c0f': 0.25, '03636649/11913615a1b732d435836c728d324152': 0.25, '02801938/98c3ddee85264efd7cd51b1084c649a6': 0.25, '04379243/ec316148b4cdd446b6068c62e84866a1': 0.25, '02828884/3e0694b77418eb25d2b12aa6a0f050b3': 0.25, '02828884/6f0723826537010c870f22c94729669b': 0.25, '02828884/5b50871735c5cce2d2b12aa6a0f050b3': 0.25, '02828884/8d074f177479ac42628516f95b4691f': 0.25, '04530566/80c6a14accb189a9c2c2c81e2232aa95': 0.25, '04530566/31a41e6a73c5d019efffdb45d12d0585': 0.25, '04530566/de010f7468ceefc6fcfb3ae2df2f7efd': 0.25, '04530566/6c1cfb2fe245b969c2e818a707fdb3e0': 0.25, '02871439/43731a6a4bd64ae5492d9da2668ec34c': 0.25, '02871439/cc38bc7db90f43d214b86d5282eb8301': 0.25, '02954340/e823673c1edd73fb97c426435543a860': 0.25, '03001627/ddfe96c6ec86b8752cbb5ed9636a4451': 0.25, '02747177/8fff3a4ba8db098bd2b12aa6a0f050b3': 0.25, '02747177/e7682974949a6aadea9a778eef212687': 0.25, '02747177/4dbbece412ef64b6d2b12aa6a0f050b3': 0.25, }
66.690909
83
0.818893
2,223
36,680
13.454791
0.45704
0.005717
0.004781
0.001805
0.002808
0.002808
0.002808
0.002808
0
0
0
0.664211
0.089613
36,680
549
84
66.812386
0.231486
0.002863
0
0
0
0
0.639533
0.613694
0
0
0
0
0
1
0
false
0.001832
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
ccda970cdcf432d2fd2a126755d2876b6106824b
150
py
Python
Other groups/Owlolf/YuYuYu/yuyuyu_common/__init__.py
Ichunjo/encode-script
389a9f497e637eaade6f99acee816636856961d4
[ "MIT" ]
36
2019-11-08T20:50:07.000Z
2022-03-23T05:43:55.000Z
Other groups/Owlolf/YuYuYu/yuyuyu_common/__init__.py
Ichunjo/encode-script
389a9f497e637eaade6f99acee816636856961d4
[ "MIT" ]
1
2019-11-08T21:26:16.000Z
2019-11-08T21:26:16.000Z
Other groups/Owlolf/YuYuYu/yuyuyu_common/__init__.py
Ichunjo/encode-script
389a9f497e637eaade6f99acee816636856961d4
[ "MIT" ]
7
2019-11-08T21:10:47.000Z
2022-03-28T21:57:04.000Z
# flake8: noqa from .constants import graigasm_args from .filter import Denoise, Mask, Thr, Scale from .config import Encoding, EncodingWeb, YuYuYuYu
30
51
0.8
20
150
5.95
0.8
0
0
0
0
0
0
0
0
0
0
0.007692
0.133333
150
4
52
37.5
0.907692
0.08
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
693eaad2d07bc257e005eeed15febcc666a6ee9b
77
py
Python
base/resources/__init__.py
anchalghale/auto_disenchanter
4edab1b72538b15bf8d665629f951db1612fa825
[ "Apache-2.0" ]
7
2021-04-07T17:44:42.000Z
2022-02-13T05:47:11.000Z
base/resources/__init__.py
anchalghale/auto_disenchanter
4edab1b72538b15bf8d665629f951db1612fa825
[ "Apache-2.0" ]
1
2021-08-20T09:11:38.000Z
2022-02-11T12:54:38.000Z
base/resources/__init__.py
anchalghale/auto_disenchanter
4edab1b72538b15bf8d665629f951db1612fa825
[ "Apache-2.0" ]
3
2019-11-22T06:21:17.000Z
2020-06-16T07:25:23.000Z
'''Module for parsing resources''' from .images import * from .json import *
19.25
34
0.714286
10
77
5.5
0.8
0
0
0
0
0
0
0
0
0
0
0
0.155844
77
3
35
25.666667
0.846154
0.363636
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
6950f743cf003af43b1373527c80a580ce929f0f
30,558
py
Python
pytest_docker_squid_fixtures/fixtures.py
crashvb/pytest-docker-squid-fixtures
bdc1bc5473b730f1f9d7e824320a49b3bfc8a2d3
[ "Apache-2.0" ]
null
null
null
pytest_docker_squid_fixtures/fixtures.py
crashvb/pytest-docker-squid-fixtures
bdc1bc5473b730f1f9d7e824320a49b3bfc8a2d3
[ "Apache-2.0" ]
null
null
null
pytest_docker_squid_fixtures/fixtures.py
crashvb/pytest-docker-squid-fixtures
bdc1bc5473b730f1f9d7e824320a49b3bfc8a2d3
[ "Apache-2.0" ]
1
2022-02-15T06:27:06.000Z
2022-02-15T06:27:06.000Z
#!/usr/bin/env python # pylint: disable=redefined-outer-name,too-many-arguments,too-many-locals """The actual fixtures, you found them ;).""" import logging import itertools from base64 import b64encode from functools import partial from pathlib import Path from ssl import create_default_context, SSLContext from string import Template from time import sleep, time from typing import Dict, Generator, List, NamedTuple import pytest from lovely.pytest.docker.compose import Services from _pytest.tmpdir import TempPathFactory from .utils import ( check_proxy, generate_cacerts, generate_htpasswd, generate_keypair, get_docker_compose_user_defined, get_embedded_file, get_user_defined_file, SQUID_PORT_INSECURE, SQUID_PORT_SECURE, SQUID_SERVICE, SQUID_SERVICE_PATTERN, start_service, ) # Caching is needed, as singular-fixtures and list-fixtures will conflict at scale_factor=1 # This appears to only matter when attempting to start the docker secure squid service # for the second time. CACHE = {} LOGGER = logging.getLogger(__name__) class SquidCerts(NamedTuple): # pylint: disable=missing-class-docstring ca_certificate: Path ca_private_key: Path certificate: Path private_key: Path class SquidInsecure(NamedTuple): # pylint: disable=missing-class-docstring docker_compose: Path endpoint: str endpoint_name: str service_name: str # Note: NamedTuple does not support inheritance :( class SquidSecure(NamedTuple): # pylint: disable=missing-class-docstring auth_header: Dict[str, str] cacerts: Path certs: SquidCerts docker_compose: Path endpoint: str endpoint_name: str htpasswd: Path password: str service_name: str ssl_context: SSLContext username: str def _pdsf_docker_compose_insecure( *, docker_compose_files: List[str], scale_factor: int, tmp_path_factory: TempPathFactory, ) -> Generator[List[Path], None, None]: """ Provides the location of the docker-compose configuration file containing the insecure squid service. """ cache_key = _pdsf_docker_compose_insecure.__name__ result = CACHE.get(cache_key, []) for i in range(scale_factor): if i < len(result): continue service_name = SQUID_SERVICE_PATTERN.format("insecure", i) chain = itertools.chain( get_docker_compose_user_defined(docker_compose_files, service_name), # TODO: lovely-docker-compose uses the file for teardown ... get_embedded_file( tmp_path_factory, delete_after=False, name="docker-compose.yml" ), ) for path in chain: result.append(path) break else: LOGGER.warning("Unable to find docker compose for: %s", service_name) result.append("-unknown-") CACHE[cache_key] = result yield result @pytest.fixture(scope="session") def pdsf_docker_compose_insecure( docker_compose_files: List[str], tmp_path_factory: TempPathFactory ) -> Generator[Path, None, None]: """ Provides the location of the docker-compose configuration file containing the insecure squid service. """ for lst in _pdsf_docker_compose_insecure( docker_compose_files=docker_compose_files, scale_factor=1, tmp_path_factory=tmp_path_factory, ): yield lst[0] @pytest.fixture(scope="session") def pdsf_docker_compose_insecure_list( docker_compose_files: List[str], pdsf_scale_factor: int, tmp_path_factory: TempPathFactory, ) -> Generator[List[Path], None, None]: """ Provides the location of the docker-compose configuration file containing the insecure squid service. """ yield from _pdsf_docker_compose_insecure( docker_compose_files=docker_compose_files, scale_factor=pdsf_scale_factor, tmp_path_factory=tmp_path_factory, ) def _pdsf_docker_compose_secure( *, docker_compose_files: List[str], scale_factor: int, tmp_path_factory: TempPathFactory, ) -> Generator[List[Path], None, None]: """ Provides the location of the templated docker-compose configuration file containing the secure squid service. """ cache_key = _pdsf_docker_compose_secure.__name__ result = CACHE.get(cache_key, []) for i in range(scale_factor): if i < len(result): continue service_name = SQUID_SERVICE_PATTERN.format("secure", i) chain = itertools.chain( get_docker_compose_user_defined(docker_compose_files, service_name), get_embedded_file( tmp_path_factory, delete_after=False, name="docker-compose.yml" ), ) for path in chain: result.append(path) break else: LOGGER.warning("Unable to find docker compose for: %s", service_name) result.append("-unknown-") CACHE[cache_key] = result yield result @pytest.fixture(scope="session") def pdsf_docker_compose_secure( docker_compose_files: List[str], tmp_path_factory: TempPathFactory ) -> Generator[Path, None, None]: """ Provides the location of the templated docker-compose configuration file containing the secure squid service. """ for lst in _pdsf_docker_compose_secure( docker_compose_files=docker_compose_files, scale_factor=1, tmp_path_factory=tmp_path_factory, ): yield lst[0] @pytest.fixture(scope="session") def pdsf_docker_compose_secure_list( docker_compose_files: List[str], pdsf_scale_factor: int, tmp_path_factory: TempPathFactory, ) -> Generator[List[Path], None, None]: """ Provides the location of the templated docker-compose configuration file containing the secure squid service. """ yield from _pdsf_docker_compose_secure( docker_compose_files=docker_compose_files, scale_factor=pdsf_scale_factor, tmp_path_factory=tmp_path_factory, ) @pytest.fixture(scope="session") def pdsf_scale_factor() -> int: """Provides the number enumerated instances to be instantiated.""" return 1 def _squid_auth_header( *, squid_password_list: List[str], squid_username_list: List[str], scale_factor: int, ) -> List[Dict[str, str]]: """Provides an HTTP basic authentication header containing credentials for the secure squid service.""" cache_key = _squid_auth_header.__name__ result = CACHE.get(cache_key, []) for i in range(scale_factor): if i < len(result): continue auth = b64encode( f"{squid_username_list[i]}:{squid_password_list[i]}".encode("utf-8") ).decode("utf-8") result.append({"Proxy-Authorization": f"Basic {auth}"}) CACHE[cache_key] = result return result @pytest.fixture(scope="session") def squid_auth_header(squid_password: str, squid_username: str) -> Dict[str, str]: """Provides an HTTP basic authentication header containing credentials for the secure squid service.""" return _squid_auth_header( squid_password_list=[squid_password], squid_username_list=[squid_username], scale_factor=1, )[0] @pytest.fixture(scope="session") def squid_auth_header_list( squid_password_list: List[str], squid_username_list: List[str], pdsf_scale_factor: int, ) -> List[Dict[str, str]]: """Provides an HTTP basic authentication header containing credentials for the secure squid service.""" return _squid_auth_header( squid_password_list=squid_password_list, squid_username_list=squid_username_list, scale_factor=pdsf_scale_factor, ) def _squid_cacerts( *, squid_certs_list: List[SquidCerts], pytestconfig: "_pytest.config.Config", scale_factor: int, tmp_path_factory: TempPathFactory, ) -> Generator[List[Path], None, None]: """ Provides the location of a temporary CA certificate trust store that contains the certificate of the secure squid service. """ cache_key = _squid_cacerts.__name__ result = CACHE.get(cache_key, []) for i in range(scale_factor): if i < len(result): continue chain = itertools.chain( get_user_defined_file(pytestconfig, "cacerts"), generate_cacerts( tmp_path_factory, certificate=squid_certs_list[i].ca_certificate, ), ) for path in chain: result.append(path) break else: LOGGER.warning("Unable to find or generate cacerts!") result.append("-unknown-") CACHE[cache_key] = result yield result @pytest.fixture(scope="session") def squid_cacerts( squid_certs: SquidCerts, pytestconfig: "_pytest.config.Config", tmp_path_factory: TempPathFactory, ) -> Generator[Path, None, None]: """ Provides the location of a temporary CA certificate trust store that contains the certificate of the secure squid service. """ for lst in _squid_cacerts( squid_certs_list=[squid_certs], pytestconfig=pytestconfig, scale_factor=1, tmp_path_factory=tmp_path_factory, ): yield lst[0] @pytest.fixture(scope="session") def squid_cacerts_list( squid_certs_list: List[SquidCerts], pdsf_scale_factor: int, pytestconfig: "_pytest.config.Config", tmp_path_factory: TempPathFactory, ) -> Generator[List[Path], None, None]: """ Provides the location of a temporary CA certificate trust store that contains the certificate of the secure squid service. """ yield from _squid_cacerts( squid_certs_list=squid_certs_list, pytestconfig=pytestconfig, scale_factor=pdsf_scale_factor, tmp_path_factory=tmp_path_factory, ) def _squid_certs( *, scale_factor: int, tmp_path_factory: TempPathFactory ) -> Generator[List[SquidCerts], None, None]: """Provides the location of temporary certificate and private key files for the secure squid service.""" # TODO: Augment to allow for reading certificates from /test ... cache_key = _squid_certs.__name__ result = CACHE.get(cache_key, []) for i in range(scale_factor): if i < len(result): continue tmp_path = tmp_path_factory.mktemp(__name__) service_name = SQUID_SERVICE_PATTERN.format("secure", i) keypair = generate_keypair(service_name=service_name) squid_cert = SquidCerts( ca_certificate=tmp_path.joinpath(f"{SQUID_SERVICE}-ca-{i}.crt"), ca_private_key=tmp_path.joinpath(f"{SQUID_SERVICE}-ca-{i}.key"), certificate=tmp_path.joinpath(f"{SQUID_SERVICE}-{i}.crt"), private_key=tmp_path.joinpath(f"{SQUID_SERVICE}-{i}.key"), ) squid_cert.ca_certificate.write_bytes(keypair.ca_certificate) squid_cert.ca_private_key.write_bytes(keypair.ca_private_key) squid_cert.certificate.write_bytes(keypair.certificate) squid_cert.private_key.write_bytes(keypair.private_key) result.append(squid_cert) CACHE[cache_key] = result yield result for squid_cert in result: squid_cert.ca_certificate.unlink(missing_ok=True) squid_cert.ca_private_key.unlink(missing_ok=True) squid_cert.certificate.unlink(missing_ok=True) squid_cert.private_key.unlink(missing_ok=True) @pytest.fixture(scope="session") def squid_certs( tmp_path_factory: TempPathFactory, ) -> Generator[SquidCerts, None, None]: """Provides the location of temporary certificate and private key files for the secure squid service.""" for lst in _squid_certs(scale_factor=1, tmp_path_factory=tmp_path_factory): yield lst[0] @pytest.fixture(scope="session") def squid_certs_list( pdsf_scale_factor: int, tmp_path_factory: TempPathFactory ) -> Generator[List[SquidCerts], None, None]: """Provides the location of temporary certificate and private key files for the secure squid service.""" yield from _squid_certs( scale_factor=pdsf_scale_factor, tmp_path_factory=tmp_path_factory ) def _squid_htpasswd( *, pytestconfig: "_pytest.config.Config", scale_factor: int, squid_password_list: List[str], squid_username_list: List[str], tmp_path_factory: TempPathFactory, ) -> Generator[List[Path], None, None]: """Provides the location of the htpasswd file for the secure squid service.""" cache_key = _squid_htpasswd.__name__ result = CACHE.get(cache_key, []) for i in range(scale_factor): if i < len(result): continue chain = itertools.chain( get_user_defined_file(pytestconfig, "htpasswd"), generate_htpasswd( tmp_path_factory, username=squid_username_list[i], password=squid_password_list[i], ), ) for path in chain: result.append(path) break else: LOGGER.warning("Unable to find or generate htpasswd!") result.append("-unknown-") CACHE[cache_key] = result yield result @pytest.fixture(scope="session") def squid_htpasswd( pytestconfig: "_pytest.config.Config", squid_password: str, squid_username: str, tmp_path_factory: TempPathFactory, ) -> Generator[Path, None, None]: """Provides the location of the htpasswd file for the secure squid service.""" for lst in _squid_htpasswd( pytestconfig=pytestconfig, scale_factor=1, squid_password_list=[squid_password], squid_username_list=[squid_username], tmp_path_factory=tmp_path_factory, ): yield lst[0] @pytest.fixture(scope="session") def squid_htpasswd_list( pdsf_scale_factor: int, pytestconfig: "_pytest.config.Config", squid_password_list: List[str], squid_username_list: List[str], tmp_path_factory: TempPathFactory, ) -> Generator[List[Path], None, None]: """Provides the location of the htpasswd file for the secure squid service.""" yield from _squid_htpasswd( pytestconfig=pytestconfig, scale_factor=pdsf_scale_factor, squid_username_list=squid_username_list, squid_password_list=squid_password_list, tmp_path_factory=tmp_path_factory, ) def _squid_insecure( *, docker_compose_insecure_list: List[Path], docker_services: Services, squid_squidcfg_insecure_list: List[Path], scale_factor: int, tmp_path_factory: TempPathFactory, ) -> Generator[List[SquidInsecure], None, None]: """Provides the endpoint of a local, insecure, squid.""" cache_key = _squid_insecure.__name__ result = CACHE.get(cache_key, []) for i in range(scale_factor): if i < len(result): continue service_name = SQUID_SERVICE_PATTERN.format("insecure", i) tmp_path = tmp_path_factory.mktemp(__name__) # Create a secure squid service from the docker compose template ... path_docker_compose = tmp_path.joinpath(f"docker-compose-{i}.yml") template = Template(docker_compose_insecure_list[i].read_text("utf-8")) path_docker_compose.write_text( template.substitute( { "CONTAINER_NAME": service_name, # Note: Needed to correctly populate the embedded, consolidated, service template ... "PATH_CERTIFICATE": "/dev/null", "PATH_HTPASSWD": "/dev/null", "PATH_KEY": "/dev/null", "PATH_SQUIDCFG": squid_squidcfg_insecure_list[i], } ), "utf-8", ) LOGGER.debug("Starting insecure squid service [%d] ...", i) LOGGER.debug(" docker-compose : %s", path_docker_compose) LOGGER.debug(" service name : %s", service_name) LOGGER.debug(" squidcfg : %s", squid_squidcfg_insecure_list[i]) check_server = partial(check_proxy, protocol="http") endpoint = start_service( docker_services, check_server=check_server, docker_compose=path_docker_compose, private_port=SQUID_PORT_INSECURE, service_name=service_name, ) LOGGER.debug("Insecure squid endpoint [%d]: %s", i, endpoint) result.append( SquidInsecure( docker_compose=path_docker_compose, endpoint=endpoint, endpoint_name=f"{service_name}:{SQUID_PORT_INSECURE}", service_name=service_name, ) ) CACHE[cache_key] = result yield result @pytest.fixture(scope="session") def squid_insecure( docker_services: Services, squid_squidcfg_insecure: Path, pdsf_docker_compose_insecure: Path, tmp_path_factory: TempPathFactory, ) -> Generator[SquidInsecure, None, None]: """Provides the endpoint of a local, insecure, squid.""" for lst in _squid_insecure( docker_compose_insecure_list=[pdsf_docker_compose_insecure], docker_services=docker_services, squid_squidcfg_insecure_list=[squid_squidcfg_insecure], scale_factor=1, tmp_path_factory=tmp_path_factory, ): yield lst[0] @pytest.fixture(scope="session") def squid_insecure_list( docker_services: Services, squid_squidcfg_insecure_list: List[Path], pdsf_docker_compose_insecure_list: List[Path], pdsf_scale_factor: int, tmp_path_factory: TempPathFactory, ) -> Generator[List[SquidInsecure], None, None]: """Provides the endpoint of a local, insecure, squid.""" yield from _squid_insecure( docker_compose_insecure_list=pdsf_docker_compose_insecure_list, docker_services=docker_services, squid_squidcfg_insecure_list=squid_squidcfg_insecure_list, scale_factor=pdsf_scale_factor, tmp_path_factory=tmp_path_factory, ) def _squid_password(*, scale_factor: int) -> List[str]: """Provides the password to use for authentication to the secure squid service.""" cache_key = _squid_password.__name__ result = CACHE.get(cache_key, []) for i in range(scale_factor): if i < len(result): continue result.append(f"pytest.password.{time()}") sleep(0.05) CACHE[cache_key] = result return result @pytest.fixture(scope="session") def squid_password() -> str: """Provides the password to use for authentication to the secure squid service.""" return _squid_password(scale_factor=1)[0] @pytest.fixture(scope="session") def squid_password_list(pdsf_scale_factor: int) -> List[str]: """Provides the password to use for authentication to the secure squid service.""" return _squid_password(scale_factor=pdsf_scale_factor) def _squid_secure( *, docker_compose_secure_list: List[Path], docker_services: Services, squid_auth_header_list: List[Dict[str, str]], squid_cacerts_list: List[Path], squid_certs_list: List[SquidCerts], squid_htpasswd_list: List[Path], squid_password_list: List[str], squid_squidcfg_secure_list: List[Path], squid_ssl_context_list: List[SSLContext], squid_username_list: List[str], scale_factor: int, tmp_path_factory: TempPathFactory, ) -> Generator[List[SquidSecure], None, None]: """Provides the endpoint of a local, secure, squid.""" cache_key = _squid_secure.__name__ result = CACHE.get(cache_key, []) for i in range(scale_factor): if i < len(result): continue service_name = SQUID_SERVICE_PATTERN.format("secure", i) tmp_path = tmp_path_factory.mktemp(__name__) # Create a secure squid service from the docker compose template ... path_docker_compose = tmp_path.joinpath(f"docker-compose-{i}.yml") template = Template(docker_compose_secure_list[i].read_text("utf-8")) path_docker_compose.write_text( template.substitute( { "CONTAINER_NAME": service_name, "PATH_CERTIFICATE": squid_certs_list[i].certificate, "PATH_HTPASSWD": squid_htpasswd_list[i], "PATH_KEY": squid_certs_list[i].private_key, "PATH_SQUIDCFG": squid_squidcfg_secure_list[i], } ), "utf-8", ) LOGGER.debug("Starting secure squid service [%d] ...", i) LOGGER.debug(" docker-compose : %s", path_docker_compose) LOGGER.debug(" ca certificate : %s", squid_certs_list[i].ca_certificate) LOGGER.debug(" certificate : %s", squid_certs_list[i].certificate) LOGGER.debug(" squidcfg : %s", squid_squidcfg_secure_list[i]) LOGGER.debug(" private key : %s", squid_certs_list[i].private_key) LOGGER.debug(" password : %s", squid_password_list[i]) LOGGER.debug(" service name : %s", service_name) LOGGER.debug(" username : %s", squid_username_list[i]) check_server = partial( check_proxy, auth_header=squid_auth_header_list[i], protocol="https", ssl_context=squid_ssl_context_list[i], ) endpoint = start_service( docker_services, check_server=check_server, docker_compose=path_docker_compose, private_port=SQUID_PORT_SECURE, service_name=service_name, ) LOGGER.debug("Secure squid endpoint [%d]: %s", i, endpoint) result.append( SquidSecure( auth_header=squid_auth_header_list[i], cacerts=squid_cacerts_list[i], certs=squid_certs_list[i], docker_compose=path_docker_compose, endpoint=endpoint, endpoint_name=f"{service_name}:{SQUID_PORT_SECURE}", htpasswd=squid_htpasswd_list[i], password=squid_password_list[i], service_name=service_name, ssl_context=squid_ssl_context_list[i], username=squid_username_list[i], ) ) CACHE[cache_key] = result yield result @pytest.fixture(scope="session") def squid_secure( docker_services: Services, squid_auth_header, squid_cacerts: Path, squid_certs: SquidCerts, squid_htpasswd: Path, squid_password: str, squid_squidcfg_secure: Path, squid_ssl_context: SSLContext, squid_username: str, pdsf_docker_compose_secure: Path, tmp_path_factory: TempPathFactory, ) -> Generator[SquidSecure, None, None]: """Provides the endpoint of a local, secure, squid.""" for lst in _squid_secure( docker_compose_secure_list=[pdsf_docker_compose_secure], squid_auth_header_list=[squid_auth_header], squid_cacerts_list=[squid_cacerts], squid_certs_list=[squid_certs], squid_htpasswd_list=[squid_htpasswd], squid_password_list=[squid_password], squid_squidcfg_secure_list=[squid_squidcfg_secure], squid_ssl_context_list=[squid_ssl_context], squid_username_list=[squid_username], docker_services=docker_services, scale_factor=1, tmp_path_factory=tmp_path_factory, ): yield lst[0] @pytest.fixture(scope="session") def squid_secure_list( docker_services: Services, squid_auth_header_list, squid_cacerts_list: List[Path], squid_certs_list: List[SquidCerts], squid_htpasswd_list: List[Path], squid_password_list: List[str], squid_squidcfg_secure_list: List[Path], squid_ssl_context_list: List[SSLContext], squid_username_list: List[str], pdsf_docker_compose_secure_list: List[Path], pdsf_scale_factor: int, tmp_path_factory: TempPathFactory, ) -> Generator[List[SquidSecure], None, None]: """Provides the endpoint of a local, secure, squid.""" yield from _squid_secure( docker_compose_secure_list=pdsf_docker_compose_secure_list, squid_auth_header_list=squid_auth_header_list, squid_cacerts_list=squid_cacerts_list, squid_certs_list=squid_certs_list, squid_htpasswd_list=squid_htpasswd_list, squid_password_list=squid_password_list, squid_squidcfg_secure_list=squid_squidcfg_secure_list, squid_ssl_context_list=squid_ssl_context_list, squid_username_list=squid_username_list, docker_services=docker_services, scale_factor=pdsf_scale_factor, tmp_path_factory=tmp_path_factory, ) def _squid_squidcfg_insecure( *, pytestconfig: "_pytest.config.Config", scale_factor: int, tmp_path_factory: TempPathFactory, ) -> Generator[List[Path], None, None]: """Provides the location of the squid configuration file for the insecure squid service.""" cache_key = _squid_squidcfg_insecure.__name__ result = CACHE.get(cache_key, []) for i in range(scale_factor): if i < len(result): continue chain = itertools.chain( get_user_defined_file(pytestconfig, "squid.insecure.cfg"), get_embedded_file( tmp_path_factory, delete_after=False, name="squid.insecure.cfg" ), ) for path in chain: result.append(path) break else: LOGGER.warning("Unable to find insecure squid.cfg!") result.append("-unknown-") CACHE[cache_key] = result yield result @pytest.fixture(scope="session") def squid_squidcfg_insecure( pytestconfig: "_pytest.config.Config", tmp_path_factory: TempPathFactory, ) -> Generator[Path, None, None]: """Provides the location of the squid configuration file for the insecure squid service.""" for lst in _squid_squidcfg_insecure( pytestconfig=pytestconfig, scale_factor=1, tmp_path_factory=tmp_path_factory, ): yield lst[0] @pytest.fixture(scope="session") def squid_squidcfg_insecure_list( pdsf_scale_factor: int, pytestconfig: "_pytest.config.Config", tmp_path_factory: TempPathFactory, ) -> Generator[List[Path], None, None]: """Provides the location of the squid configuration file for the insecure squid service.""" yield from _squid_squidcfg_insecure( pytestconfig=pytestconfig, scale_factor=pdsf_scale_factor, tmp_path_factory=tmp_path_factory, ) def _squid_squidcfg_secure( *, pytestconfig: "_pytest.config.Config", scale_factor: int, tmp_path_factory: TempPathFactory, ) -> Generator[List[Path], None, None]: """Provides the location of the squid configuration file for the secure squid service.""" cache_key = _squid_squidcfg_secure.__name__ result = CACHE.get(cache_key, []) for i in range(scale_factor): if i < len(result): continue chain = itertools.chain( get_user_defined_file(pytestconfig, "squid.secure.cfg"), get_embedded_file( tmp_path_factory, delete_after=False, name="squid.secure.cfg" ), ) for path in chain: result.append(path) break else: LOGGER.warning("Unable to find secure squid.cfg!") result.append("-unknown-") CACHE[cache_key] = result yield result @pytest.fixture(scope="session") def squid_squidcfg_secure( pytestconfig: "_pytest.config.Config", tmp_path_factory: TempPathFactory, ) -> Generator[Path, None, None]: """Provides the location of the squid configuration file for the secure squid service.""" for lst in _squid_squidcfg_secure( pytestconfig=pytestconfig, scale_factor=1, tmp_path_factory=tmp_path_factory, ): yield lst[0] @pytest.fixture(scope="session") def squid_squidcfg_secure_list( pdsf_scale_factor: int, pytestconfig: "_pytest.config.Config", tmp_path_factory: TempPathFactory, ) -> Generator[List[Path], None, None]: """Provides the location of the squid configuration file for the secure squid service.""" yield from _squid_squidcfg_secure( pytestconfig=pytestconfig, scale_factor=pdsf_scale_factor, tmp_path_factory=tmp_path_factory, ) def _squid_ssl_context( *, squid_cacerts_list: List[Path], scale_factor: int ) -> List[SSLContext]: """ Provides an SSLContext referencing the temporary CA certificate trust store that contains the certificate of the secure squid service. """ cache_key = _squid_ssl_context.__name__ result = CACHE.get(cache_key, []) for i in range(scale_factor): if i < len(result): continue result.append(create_default_context(cafile=str(squid_cacerts_list[i]))) CACHE[cache_key] = result return result @pytest.fixture(scope="session") def squid_ssl_context(squid_cacerts: Path) -> SSLContext: """ Provides an SSLContext referencing the temporary CA certificate trust store that contains the certificate of the secure squid service. """ return _squid_ssl_context(squid_cacerts_list=[squid_cacerts], scale_factor=1)[0] @pytest.fixture(scope="session") def squid_ssl_context_list( squid_cacerts_list: List[Path], pdsf_scale_factor: int, ) -> List[SSLContext]: """ Provides an SSLContext referencing the temporary CA certificate trust store that contains the certificate of the secure squid service. """ return _squid_ssl_context( squid_cacerts_list=squid_cacerts_list, scale_factor=pdsf_scale_factor, ) def _squid_username(*, scale_factor: int) -> List[str]: """Retrieve the name of the user to use for authentication to the secure squid service.""" cache_key = _squid_username.__name__ result = CACHE.get(cache_key, []) for i in range(scale_factor): if i < len(result): continue result.append(f"pytest.username.{time()}") sleep(0.05) CACHE[cache_key] = result return result @pytest.fixture(scope="session") def squid_username() -> str: """Retrieve the name of the user to use for authentication to the secure squid service.""" return _squid_username(scale_factor=1)[0] @pytest.fixture(scope="session") def squid_username_list( pdsf_scale_factor: int, ) -> List[str]: """Retrieve the name of the user to use for authentication to the secure squid service.""" return _squid_username(scale_factor=pdsf_scale_factor)
33.654185
117
0.678448
3,683
30,558
5.315504
0.060005
0.053788
0.051489
0.039996
0.87189
0.839506
0.777749
0.696174
0.667518
0.65107
0
0.001953
0.229302
30,558
907
118
33.69129
0.829307
0.144512
0
0.634424
0
0
0.06846
0.021772
0
0
0
0.002205
0
1
0.056899
false
0.065434
0.018492
0
0.125178
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
5
6952efb20e61551e9c2bdcce69824ad4218d080a
148
py
Python
udfs/describe/describe.py
Mo-Gul/python-for-excel
4512bcd3ac369c8a5c9de750296b163dd6142a2a
[ "MIT" ]
186
2020-07-29T01:15:48.000Z
2022-03-31T13:23:10.000Z
udfs/describe/describe.py
Svadilfari83/python-for-excel
b3bcc0561bad028024f0f90dbd88b59aced0d2e8
[ "MIT" ]
7
2021-03-08T01:11:02.000Z
2021-12-20T09:48:39.000Z
udfs/describe/describe.py
Svadilfari83/python-for-excel
b3bcc0561bad028024f0f90dbd88b59aced0d2e8
[ "MIT" ]
98
2020-07-27T05:27:07.000Z
2022-03-14T18:05:54.000Z
import xlwings as xw import pandas as pd @xw.func @xw.arg("df", pd.DataFrame, index=True, header=True) def describe(df): return df.describe()
16.444444
52
0.709459
25
148
4.2
0.64
0
0
0
0
0
0
0
0
0
0
0
0.155405
148
8
53
18.5
0.84
0
0
0
0
0
0.013514
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0.166667
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
5
695c1c439e626b7a3579d384255d5c495b31650a
9,855
py
Python
console/django_scantron/user/views.py
RishiKumarRay/scantron
554ebebde1137eeba2ec38e83a59aca4f2f537ef
[ "Apache-2.0" ]
684
2018-08-21T03:38:03.000Z
2022-03-28T17:35:32.000Z
console/django_scantron/user/views.py
RishiKumarRay/scantron
554ebebde1137eeba2ec38e83a59aca4f2f537ef
[ "Apache-2.0" ]
154
2018-08-22T20:07:09.000Z
2021-11-19T08:51:14.000Z
console/django_scantron/user/views.py
RishiKumarRay/scantron
554ebebde1137eeba2ec38e83a59aca4f2f537ef
[ "Apache-2.0" ]
129
2018-08-21T08:54:50.000Z
2022-03-24T11:05:45.000Z
from django.contrib.auth.mixins import LoginRequiredMixin from django.views.generic.detail import DetailView from django.views.generic.edit import CreateView, DeleteView, UpdateView from django.views.generic.list import ListView from django.core.urlresolvers import reverse, reverse_lazy from django.http import Http404 from django_scantron.models import User from django_scantron.user.forms import UserForm class UserListView(LoginRequiredMixin, ListView): model = User template_name = "django_scantron/user_list.html" paginate_by = 20 context_object_name = "user_list" allow_empty = True page_kwarg = "page" paginate_orphans = 0 fields = [ "password", "last_login", "is_superuser", "username", "first_name", "last_name", "email", "is_staff", "is_active", "date_joined", "groups", "user_permissions", ] def __init__(self, **kwargs): return super(UserListView, self).__init__(**kwargs) def dispatch(self, *args, **kwargs): return super(UserListView, self).dispatch(*args, **kwargs) def get(self, request, *args, **kwargs): return super(UserListView, self).get(request, *args, **kwargs) def get_queryset(self): return super(UserListView, self).get_queryset() def get_allow_empty(self): return super(UserListView, self).get_allow_empty() def get_context_data(self, *args, **kwargs): ret = super(UserListView, self).get_context_data(*args, **kwargs) ret["fields"] = self.fields return ret def get_paginate_by(self, queryset): return super(UserListView, self).get_paginate_by(queryset) def get_context_object_name(self, object_list): return super(UserListView, self).get_context_object_name(object_list) def paginate_queryset(self, queryset, page_size): return super(UserListView, self).paginate_queryset(queryset, page_size) def get_paginator(self, queryset, per_page, orphans=0, allow_empty_first_page=True): return super(UserListView, self).get_paginator(queryset, per_page, orphans=0, allow_empty_first_page=True) def render_to_response(self, context, **response_kwargs): return super(UserListView, self).render_to_response(context, **response_kwargs) def get_template_names(self): return super(UserListView, self).get_template_names() class UserDetailView(DetailView): model = User template_name = "django_scantron/user_detail.html" context_object_name = "user" slug_field = "slug" slug_url_kwarg = "slug" pk_url_kwarg = "pk" def __init__(self, **kwargs): return super(UserDetailView, self).__init__(**kwargs) def dispatch(self, *args, **kwargs): return super(UserDetailView, self).dispatch(*args, **kwargs) def get(self, request, *args, **kwargs): return super(UserDetailView, self).get(request, *args, **kwargs) def get_object(self, queryset=None): return super(UserDetailView, self).get_object(queryset) def get_queryset(self): return super(UserDetailView, self).get_queryset() def get_slug_field(self): return super(UserDetailView, self).get_slug_field() def get_context_data(self, **kwargs): ret = super(UserDetailView, self).get_context_data(**kwargs) return ret def get_context_object_name(self, obj): return super(UserDetailView, self).get_context_object_name(obj) def render_to_response(self, context, **response_kwargs): return super(UserDetailView, self).render_to_response(context, **response_kwargs) def get_template_names(self): return super(UserDetailView, self).get_template_names() class UserCreateView(CreateView): model = User form_class = UserForm fields = [ "password", "last_login", "is_superuser", "username", "first_name", "last_name", "email", "is_staff", "is_active", "date_joined", "groups", "user_permissions", ] template_name = "django_scantron/user_create.html" success_url = reverse_lazy("user_list") def __init__(self, **kwargs): return super(UserCreateView, self).__init__(**kwargs) def dispatch(self, request, *args, **kwargs): return super(UserCreateView, self).dispatch(request, *args, **kwargs) def get(self, request, *args, **kwargs): return super(UserCreateView, self).get(request, *args, **kwargs) def post(self, request, *args, **kwargs): return super(UserCreateView, self).post(request, *args, **kwargs) def get_form_class(self): return super(UserCreateView, self).get_form_class() def get_form(self, form_class=UserForm): return super(UserCreateView, self).get_form(form_class) def get_form_kwargs(self, **kwargs): return super(UserCreateView, self).get_form_kwargs(**kwargs) def get_initial(self): return super(UserCreateView, self).get_initial() def form_invalid(self, form): return super(UserCreateView, self).form_invalid(form) def form_valid(self, form): obj = form.save(commit=False) obj.save() return super(UserCreateView, self).form_valid(form) def get_context_data(self, **kwargs): ret = super(UserCreateView, self).get_context_data(**kwargs) return ret def render_to_response(self, context, **response_kwargs): return super(UserCreateView, self).render_to_response(context, **response_kwargs) def get_template_names(self): return super(UserCreateView, self).get_template_names() def get_success_url(self): return self.success_url class UserUpdateView(UpdateView): model = User form_class = UserForm fields = [ "password", "last_login", "is_superuser", "username", "first_name", "last_name", "email", "is_staff", "is_active", "date_joined", "groups", "user_permissions", ] template_name = "django_scantron/user_update.html" initial = {} slug_field = "slug" slug_url_kwarg = "slug" pk_url_kwarg = "pk" context_object_name = "user" def __init__(self, **kwargs): return super(UserUpdateView, self).__init__(**kwargs) def dispatch(self, *args, **kwargs): return super(UserUpdateView, self).dispatch(*args, **kwargs) def get(self, request, *args, **kwargs): return super(UserUpdateView, self).get(request, *args, **kwargs) def post(self, request, *args, **kwargs): return super(UserUpdateView, self).post(request, *args, **kwargs) def get_object(self, queryset=None): return super(UserUpdateView, self).get_object(queryset) def get_queryset(self): return super(UserUpdateView, self).get_queryset() def get_slug_field(self): return super(UserUpdateView, self).get_slug_field() def get_form_class(self): return super(UserUpdateView, self).get_form_class() def get_form(self, form_class=UserForm): return super(UserUpdateView, self).get_form(form_class) def get_form_kwargs(self, **kwargs): return super(UserUpdateView, self).get_form_kwargs(**kwargs) def get_initial(self): return super(UserUpdateView, self).get_initial() def form_invalid(self, form): return super(UserUpdateView, self).form_invalid(form) def form_valid(self, form): obj = form.save(commit=False) obj.save() return super(UserUpdateView, self).form_valid(form) def get_context_data(self, **kwargs): ret = super(UserUpdateView, self).get_context_data(**kwargs) return ret def get_context_object_name(self, obj): return super(UserUpdateView, self).get_context_object_name(obj) def render_to_response(self, context, **response_kwargs): return super(UserUpdateView, self).render_to_response(context, **response_kwargs) def get_template_names(self): return super(UserUpdateView, self).get_template_names() def get_success_url(self): return reverse("user_list") class UserDeleteView(DeleteView): model = User template_name = "django_scantron/user_delete.html" slug_field = "slug" slug_url_kwarg = "slug" pk_url_kwarg = "pk" context_object_name = "user" def __init__(self, **kwargs): return super(UserDeleteView, self).__init__(**kwargs) def dispatch(self, *args, **kwargs): return super(UserDeleteView, self).dispatch(*args, **kwargs) def get(self, request, *args, **kwargs): raise Http404 def post(self, request, *args, **kwargs): return super(UserDeleteView, self).post(request, *args, **kwargs) def delete(self, request, *args, **kwargs): return super(UserDeleteView, self).delete(request, *args, **kwargs) def get_object(self, queryset=None): return super(UserDeleteView, self).get_object(queryset) def get_queryset(self): return super(UserDeleteView, self).get_queryset() def get_slug_field(self): return super(UserDeleteView, self).get_slug_field() def get_context_data(self, **kwargs): ret = super(UserDeleteView, self).get_context_data(**kwargs) return ret def get_context_object_name(self, obj): return super(UserDeleteView, self).get_context_object_name(obj) def render_to_response(self, context, **response_kwargs): return super(UserDeleteView, self).render_to_response(context, **response_kwargs) def get_template_names(self): return super(UserDeleteView, self).get_template_names() def get_success_url(self): return reverse("user_list")
31.893204
114
0.677118
1,167
9,855
5.457584
0.089117
0.100173
0.066729
0.072853
0.841419
0.761972
0.661799
0.624274
0.584393
0.584393
0
0.00141
0.208524
9,855
308
115
31.996753
0.815128
0
0
0.556522
0
0
0.059056
0.016032
0
0
0
0
0
1
0.291304
false
0.013043
0.034783
0.256522
0.782609
0
0
0
0
null
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
15de4c38fd8d75dc47eebaf3b7b2767d1147475d
48
py
Python
web/src/tv.py
javadan/bucket
69214cf5657eeb715f7ead01736a2c1c2a00260b
[ "MIT" ]
1
2020-04-22T09:05:09.000Z
2020-04-22T09:05:09.000Z
web/src/tv.py
javadan/bucket
69214cf5657eeb715f7ead01736a2c1c2a00260b
[ "MIT" ]
5
2018-10-15T13:33:32.000Z
2018-10-24T15:15:19.000Z
web/src/tv.py
javadan/bucket
69214cf5657eeb715f7ead01736a2c1c2a00260b
[ "MIT" ]
2
2018-09-16T19:09:22.000Z
2020-12-09T10:39:44.000Z
# https://www.youtube.com/watch?v=7qn7VnXZb8I
12
45
0.729167
7
48
5
1
0
0
0
0
0
0
0
0
0
0
0.068182
0.083333
48
3
46
16
0.727273
0.895833
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
15e0d8dfde6b78cbd17d55f7963e8c78ccb99644
178
py
Python
tests/conftest.py
xingkong0113/bookworm
7214067f48e7a951198806a1f9170e3fd8fc0cce
[ "MIT" ]
36
2020-11-15T03:21:39.000Z
2022-03-05T01:11:26.000Z
tests/conftest.py
xingkong0113/bookworm
7214067f48e7a951198806a1f9170e3fd8fc0cce
[ "MIT" ]
90
2020-10-06T14:46:07.000Z
2022-03-31T03:03:34.000Z
tests/conftest.py
xingkong0113/bookworm
7214067f48e7a951198806a1f9170e3fd8fc0cce
[ "MIT" ]
20
2020-09-30T17:40:44.000Z
2022-03-17T19:59:53.000Z
import pytest from pathlib import Path @pytest.fixture(scope="function", autouse=True) def asset(): yield lambda filename: str(Path(__file__).parent / "assets" / filename)
22.25
75
0.741573
23
178
5.565217
0.826087
0
0
0
0
0
0
0
0
0
0
0
0.134831
178
7
76
25.428571
0.831169
0
0
0
0
0
0.078652
0
0
0
0
0
0
1
0.2
true
0
0.4
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
15f10d0186cf91dde1118d5211ba845a80e507d4
127
py
Python
sources/cython_setup.py
ousttrue/PMCAplus
e1a0cec3c6a51b791599a19818826a6bbb442b64
[ "Info-ZIP" ]
1
2019-09-21T06:33:21.000Z
2019-09-21T06:33:21.000Z
sources/cython_setup.py
ousttrue/PMCAplus
e1a0cec3c6a51b791599a19818826a6bbb442b64
[ "Info-ZIP" ]
null
null
null
sources/cython_setup.py
ousttrue/PMCAplus
e1a0cec3c6a51b791599a19818826a6bbb442b64
[ "Info-ZIP" ]
null
null
null
from distutils.core import setup, Extension from Cython.Build import cythonize setup(ext_modules=cythonize("PMCA.pyx"))
21.166667
44
0.779528
17
127
5.764706
0.764706
0
0
0
0
0
0
0
0
0
0
0
0.133858
127
5
45
25.4
0.890909
0
0
0
0
0
0.066116
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
c6331a953e68f83c6cade89f45630a6702e60ff0
4,689
py
Python
pywick/optimizers/sign_internal_decay.py
ashishpatel26/pywick
1afffd1c21c2b188836d3599e802146182757bb5
[ "MIT" ]
2
2020-11-28T07:56:09.000Z
2021-11-08T09:30:39.000Z
pywick/optimizers/sign_internal_decay.py
ashishpatel26/pywick
1afffd1c21c2b188836d3599e802146182757bb5
[ "MIT" ]
null
null
null
pywick/optimizers/sign_internal_decay.py
ashishpatel26/pywick
1afffd1c21c2b188836d3599e802146182757bb5
[ "MIT" ]
null
null
null
# Source: https://github.com/cydonia999/AddSign_PowerSign_in_PyTorch/tree/master/torch/optim import math class _SignInternalDecay(object): """Base class for internal decays for PowerSign and AddSign optimizers. Arguments: T_max (int): the total number of training steps to be used to compute internal decays. """ def __init__(self, T_max): if T_max < 1: raise ValueError('T_max should be >= 1.') self.T_max = T_max class LinearInternalDecay(_SignInternalDecay): """Implements a linear decay used internally in PowerSign and AddSign optimizers. It has been proposed in `Neural Optimizer Search with Reinforcement Learning`_. Arguments: T_max (int): the total number of training steps to be used to compute internal decays. .. _Neural Optimizer Search with Reinforcement Learning: https://arxiv.org/abs/1709.07417 """ def __init__(self, T_max): super(LinearInternalDecay, self).__init__(T_max) def __call__(self, step): """Returns a linear decay at the current training step: 1 - step / T_max Args: step: the current training step. """ if step is None: raise ValueError("step is required for linear_decay.") if step < 0: raise ValueError("step should be >= 0.") step = min(step, self.T_max) decay = 1 - float(step) / float(self.T_max) return decay class CosineInternalDecay(_SignInternalDecay): """Implements a cyclical decay used internally in PowerSign and AddSign optimizers. It has been proposed in `Neural Optimizer Search with Reinforcement Learning`_. Arguments: T_max (int): the total number of training steps to be used to compute internal decays num_periods: number of periods of cosine from 0 to T_max (default: 0.5) zero_after: if not None, number after which 0 is returned .. _Neural Optimizer Search with Reinforcement Learning: https://arxiv.org/abs/1709.07417 """ def __init__(self, T_max, num_periods=0.5, zero_after=None): super(CosineInternalDecay, self).__init__(T_max) if zero_after is not None and zero_after < 0: raise ValueError("zero_after should be >= 0.") self.num_periods = num_periods self.zero_after = zero_after def __call__(self, step): """Returns a cyclical decay at the current training step: 0.5 * (1 + cos(2 * pi * num_periods * step / T_max)) Args: step: the current training step. """ if step is None: raise ValueError("step is required for cosine_decay.") if step < 0: raise ValueError("step should be >= 0.") step = min(step, self.T_max) frac = 2.0 * self.num_periods * step / float(self.T_max) if self.zero_after is not None and frac >= 2 * self.zero_after: return 0.0 decay = 0.5 * (1 + math.cos(math.pi * frac)) return decay class RestartCosineInternalDecay(_SignInternalDecay): """Implements a restart decay used internally in PowerSign and AddSign optimizers. It has been proposed in `Neural Optimizer Search with Reinforcement Learning`_. Arguments: T_max (int): the total number of training steps to be used to compute internal decays num_periods: number of half periods of cosine from 0 to T_max (default: 1) zero_after: if not None, number after which 0 is returned .. _Neural Optimizer Search with Reinforcement Learning: https://arxiv.org/abs/1709.07417 """ def __init__(self, T_max, num_periods=1, zero_after=None): super(RestartCosineInternalDecay, self).__init__(T_max) if zero_after is not None and zero_after < 0: raise ValueError("zero_after should be >= 0.") self.num_periods = num_periods self.zero_after = zero_after def __call__(self, step): """Returns a restart decay at the current training step: 0.5 * (1 + cos(pi * (num_periods * step) % T_max / T_max)) Args: step: the current training step. """ if step is None: raise ValueError("step is required for cosine_decay.") if step < 0: raise ValueError("step should be >= 0.") step = min(step, self.T_max) frac = (self.num_periods * step) % self.T_max / float(self.T_max) if self.zero_after is not None and frac >= 2 * self.zero_after: return 0.0 decay = 0.5 * (1 + math.cos(math.pi * frac)) return decay
36.632813
92
0.635956
632
4,689
4.537975
0.15981
0.039052
0.033473
0.052301
0.783124
0.770572
0.741283
0.741283
0.741283
0.718271
0
0.022216
0.280017
4,689
127
93
36.92126
0.82731
0.443165
0
0.679245
0
0
0.099745
0
0
0
0
0
0
1
0.132075
false
0
0.018868
0
0.320755
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
c638691d1791359175ecc194c4d429c24a2e085c
168
py
Python
someip_fuzzer/types.py
cfanatic/someip-protocol-fuzzer
5977f62580f02a95568c0715dd6bb2eb804d0a81
[ "MIT" ]
null
null
null
someip_fuzzer/types.py
cfanatic/someip-protocol-fuzzer
5977f62580f02a95568c0715dd6bb2eb804d0a81
[ "MIT" ]
null
null
null
someip_fuzzer/types.py
cfanatic/someip-protocol-fuzzer
5977f62580f02a95568c0715dd6bb2eb804d0a81
[ "MIT" ]
null
null
null
class NoHostError(Exception): pass class NoHeartbeatError(Exception): pass class NoSudoError(Exception): pass class ServiceShutdown(Exception): pass
14
34
0.744048
16
168
7.8125
0.4375
0.416
0.432
0
0
0
0
0
0
0
0
0
0.184524
168
11
35
15.272727
0.912409
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
d69a02be1158575290d7cea87767e9c96aa3e52c
65
py
Python
cookbook_se/recipes/01_basic/lp_scheduling.py
nuclyde-io/flytesnacks
d0349be8bf3c66ecdcbf4f71ea12c8575a6c4cc0
[ "Apache-2.0" ]
null
null
null
cookbook_se/recipes/01_basic/lp_scheduling.py
nuclyde-io/flytesnacks
d0349be8bf3c66ecdcbf4f71ea12c8575a6c4cc0
[ "Apache-2.0" ]
null
null
null
cookbook_se/recipes/01_basic/lp_scheduling.py
nuclyde-io/flytesnacks
d0349be8bf3c66ecdcbf4f71ea12c8575a6c4cc0
[ "Apache-2.0" ]
null
null
null
""" 04: Scheduling Launch Plans --------------------------- """
10.833333
27
0.353846
4
65
5.75
1
0
0
0
0
0
0
0
0
0
0
0.035088
0.123077
65
5
28
13
0.368421
0.846154
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
d6c38cb764dc59c24b2a8cd5b8df267c32e9be09
12,676
py
Python
sdk/python/pulumi_alicloud/cloudfirewall/control_policy_order.py
pulumi/pulumi-alicloud
9c34d84b4588a7c885c6bec1f03b5016e5a41683
[ "ECL-2.0", "Apache-2.0" ]
42
2019-03-18T06:34:37.000Z
2022-03-24T07:08:57.000Z
sdk/python/pulumi_alicloud/cloudfirewall/control_policy_order.py
pulumi/pulumi-alicloud
9c34d84b4588a7c885c6bec1f03b5016e5a41683
[ "ECL-2.0", "Apache-2.0" ]
152
2019-04-15T21:03:44.000Z
2022-03-29T18:00:57.000Z
sdk/python/pulumi_alicloud/cloudfirewall/control_policy_order.py
pulumi/pulumi-alicloud
9c34d84b4588a7c885c6bec1f03b5016e5a41683
[ "ECL-2.0", "Apache-2.0" ]
3
2020-08-26T17:30:07.000Z
2021-07-05T01:37:45.000Z
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities __all__ = ['ControlPolicyOrderArgs', 'ControlPolicyOrder'] @pulumi.input_type class ControlPolicyOrderArgs: def __init__(__self__, *, acl_uuid: pulumi.Input[str], direction: pulumi.Input[str], order: Optional[pulumi.Input[int]] = None): """ The set of arguments for constructing a ControlPolicyOrder resource. :param pulumi.Input[str] acl_uuid: The unique ID of the access control policy. :param pulumi.Input[str] direction: Direction. Valid values: `in`, `out`. :param pulumi.Input[int] order: The priority of the access control policy. The priority value starts from 1. A small priority value indicates a high priority. **NOTE:** The value of -1 indicates the lowest priority. """ pulumi.set(__self__, "acl_uuid", acl_uuid) pulumi.set(__self__, "direction", direction) if order is not None: pulumi.set(__self__, "order", order) @property @pulumi.getter(name="aclUuid") def acl_uuid(self) -> pulumi.Input[str]: """ The unique ID of the access control policy. """ return pulumi.get(self, "acl_uuid") @acl_uuid.setter def acl_uuid(self, value: pulumi.Input[str]): pulumi.set(self, "acl_uuid", value) @property @pulumi.getter def direction(self) -> pulumi.Input[str]: """ Direction. Valid values: `in`, `out`. """ return pulumi.get(self, "direction") @direction.setter def direction(self, value: pulumi.Input[str]): pulumi.set(self, "direction", value) @property @pulumi.getter def order(self) -> Optional[pulumi.Input[int]]: """ The priority of the access control policy. The priority value starts from 1. A small priority value indicates a high priority. **NOTE:** The value of -1 indicates the lowest priority. """ return pulumi.get(self, "order") @order.setter def order(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "order", value) @pulumi.input_type class _ControlPolicyOrderState: def __init__(__self__, *, acl_uuid: Optional[pulumi.Input[str]] = None, direction: Optional[pulumi.Input[str]] = None, order: Optional[pulumi.Input[int]] = None): """ Input properties used for looking up and filtering ControlPolicyOrder resources. :param pulumi.Input[str] acl_uuid: The unique ID of the access control policy. :param pulumi.Input[str] direction: Direction. Valid values: `in`, `out`. :param pulumi.Input[int] order: The priority of the access control policy. The priority value starts from 1. A small priority value indicates a high priority. **NOTE:** The value of -1 indicates the lowest priority. """ if acl_uuid is not None: pulumi.set(__self__, "acl_uuid", acl_uuid) if direction is not None: pulumi.set(__self__, "direction", direction) if order is not None: pulumi.set(__self__, "order", order) @property @pulumi.getter(name="aclUuid") def acl_uuid(self) -> Optional[pulumi.Input[str]]: """ The unique ID of the access control policy. """ return pulumi.get(self, "acl_uuid") @acl_uuid.setter def acl_uuid(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "acl_uuid", value) @property @pulumi.getter def direction(self) -> Optional[pulumi.Input[str]]: """ Direction. Valid values: `in`, `out`. """ return pulumi.get(self, "direction") @direction.setter def direction(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "direction", value) @property @pulumi.getter def order(self) -> Optional[pulumi.Input[int]]: """ The priority of the access control policy. The priority value starts from 1. A small priority value indicates a high priority. **NOTE:** The value of -1 indicates the lowest priority. """ return pulumi.get(self, "order") @order.setter def order(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "order", value) class ControlPolicyOrder(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, acl_uuid: Optional[pulumi.Input[str]] = None, direction: Optional[pulumi.Input[str]] = None, order: Optional[pulumi.Input[int]] = None, __props__=None): """ Provides a Cloud Firewall Control Policy resource. For information about Cloud Firewall Control Policy Order and how to use it, see [What is Control Policy Order](https://www.alibabacloud.com/help/doc-detail/138867.htm). > **NOTE:** Available in v1.130.0+. ## Example Usage Basic Usage ```python import pulumi import pulumi_alicloud as alicloud example1 = alicloud.cloudfirewall.ControlPolicy("example1", application_name="ANY", acl_action="accept", description="example", destination_type="net", destination="100.1.1.0/24", direction="out", proto="ANY", source="1.2.3.0/24", source_type="net") example2 = alicloud.cloudfirewall.ControlPolicyOrder("example2", acl_uuid=example1.acl_uuid, direction=example1.direction, order=1) ``` ## Import Cloud Firewall Control Policy Order can be imported using the id, e.g. ```sh $ pulumi import alicloud:cloudfirewall/controlPolicyOrder:ControlPolicyOrder example <acl_uuid>:<direction> ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] acl_uuid: The unique ID of the access control policy. :param pulumi.Input[str] direction: Direction. Valid values: `in`, `out`. :param pulumi.Input[int] order: The priority of the access control policy. The priority value starts from 1. A small priority value indicates a high priority. **NOTE:** The value of -1 indicates the lowest priority. """ ... @overload def __init__(__self__, resource_name: str, args: ControlPolicyOrderArgs, opts: Optional[pulumi.ResourceOptions] = None): """ Provides a Cloud Firewall Control Policy resource. For information about Cloud Firewall Control Policy Order and how to use it, see [What is Control Policy Order](https://www.alibabacloud.com/help/doc-detail/138867.htm). > **NOTE:** Available in v1.130.0+. ## Example Usage Basic Usage ```python import pulumi import pulumi_alicloud as alicloud example1 = alicloud.cloudfirewall.ControlPolicy("example1", application_name="ANY", acl_action="accept", description="example", destination_type="net", destination="100.1.1.0/24", direction="out", proto="ANY", source="1.2.3.0/24", source_type="net") example2 = alicloud.cloudfirewall.ControlPolicyOrder("example2", acl_uuid=example1.acl_uuid, direction=example1.direction, order=1) ``` ## Import Cloud Firewall Control Policy Order can be imported using the id, e.g. ```sh $ pulumi import alicloud:cloudfirewall/controlPolicyOrder:ControlPolicyOrder example <acl_uuid>:<direction> ``` :param str resource_name: The name of the resource. :param ControlPolicyOrderArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """ ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(ControlPolicyOrderArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, acl_uuid: Optional[pulumi.Input[str]] = None, direction: Optional[pulumi.Input[str]] = None, order: Optional[pulumi.Input[int]] = None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = ControlPolicyOrderArgs.__new__(ControlPolicyOrderArgs) if acl_uuid is None and not opts.urn: raise TypeError("Missing required property 'acl_uuid'") __props__.__dict__["acl_uuid"] = acl_uuid if direction is None and not opts.urn: raise TypeError("Missing required property 'direction'") __props__.__dict__["direction"] = direction __props__.__dict__["order"] = order super(ControlPolicyOrder, __self__).__init__( 'alicloud:cloudfirewall/controlPolicyOrder:ControlPolicyOrder', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, acl_uuid: Optional[pulumi.Input[str]] = None, direction: Optional[pulumi.Input[str]] = None, order: Optional[pulumi.Input[int]] = None) -> 'ControlPolicyOrder': """ Get an existing ControlPolicyOrder resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] acl_uuid: The unique ID of the access control policy. :param pulumi.Input[str] direction: Direction. Valid values: `in`, `out`. :param pulumi.Input[int] order: The priority of the access control policy. The priority value starts from 1. A small priority value indicates a high priority. **NOTE:** The value of -1 indicates the lowest priority. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _ControlPolicyOrderState.__new__(_ControlPolicyOrderState) __props__.__dict__["acl_uuid"] = acl_uuid __props__.__dict__["direction"] = direction __props__.__dict__["order"] = order return ControlPolicyOrder(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="aclUuid") def acl_uuid(self) -> pulumi.Output[str]: """ The unique ID of the access control policy. """ return pulumi.get(self, "acl_uuid") @property @pulumi.getter def direction(self) -> pulumi.Output[str]: """ Direction. Valid values: `in`, `out`. """ return pulumi.get(self, "direction") @property @pulumi.getter def order(self) -> pulumi.Output[Optional[int]]: """ The priority of the access control policy. The priority value starts from 1. A small priority value indicates a high priority. **NOTE:** The value of -1 indicates the lowest priority. """ return pulumi.get(self, "order")
40.113924
223
0.629931
1,456
12,676
5.302198
0.130495
0.061269
0.050777
0.032642
0.767876
0.744041
0.729663
0.702073
0.683549
0.681477
0
0.008518
0.268381
12,676
315
224
40.24127
0.823916
0.414247
0
0.59589
1
0
0.087244
0.012617
0
0
0
0
0
1
0.150685
false
0.006849
0.034247
0
0.273973
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
d6e97dfd31cc3d2e555d8768beb1f13e90979183
150
py
Python
app/thing/__init__.py
MashSoftware/flask-ui-template
524c8e6e06e591081855f54a2bcb4eaee0c56a97
[ "MIT" ]
1
2021-03-03T10:21:10.000Z
2021-03-03T10:21:10.000Z
app/thing/__init__.py
MashSoftware/flask-ui-template
524c8e6e06e591081855f54a2bcb4eaee0c56a97
[ "MIT" ]
null
null
null
app/thing/__init__.py
MashSoftware/flask-ui-template
524c8e6e06e591081855f54a2bcb4eaee0c56a97
[ "MIT" ]
null
null
null
from flask import Blueprint bp = Blueprint("thing", __name__, template_folder="../templates/thing") from app.thing import routes # noqa: E402,F401
25
71
0.753333
20
150
5.4
0.75
0
0
0
0
0
0
0
0
0
0
0.045802
0.126667
150
5
72
30
0.778626
0.1
0
0
0
0
0.172932
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0.666667
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
1
0
5
ba4f38b366f3e635b69e0b50ae9b3308fd0f9e40
82
py
Python
tests/b314.py
cclauss/sentry-flake8
862cde9c07ce08a1aca212e59a7d31f286e300a0
[ "MIT" ]
null
null
null
tests/b314.py
cclauss/sentry-flake8
862cde9c07ce08a1aca212e59a7d31f286e300a0
[ "MIT" ]
null
null
null
tests/b314.py
cclauss/sentry-flake8
862cde9c07ce08a1aca212e59a7d31f286e300a0
[ "MIT" ]
null
null
null
from __future__ import absolute_import print("print statements are not allowed")
20.5
41
0.829268
11
82
5.727273
0.818182
0
0
0
0
0
0
0
0
0
0
0
0.121951
82
3
42
27.333333
0.875
0
0
0
0
0
0.390244
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
ba51e110f8979c8e167232f9f4a8ad4bc7748fe1
84
py
Python
src/sandbox/print_error.py
ospiper/Sandbox-Runner
d6a463fa7744ea2a88553eef197b6f8a9f4d91f0
[ "MIT" ]
null
null
null
src/sandbox/print_error.py
ospiper/Sandbox-Runner
d6a463fa7744ea2a88553eef197b6f8a9f4d91f0
[ "MIT" ]
null
null
null
src/sandbox/print_error.py
ospiper/Sandbox-Runner
d6a463fa7744ea2a88553eef197b6f8a9f4d91f0
[ "MIT" ]
null
null
null
import sys def error(*args, **kwargs): print(*args, file=sys.stderr, **kwargs)
16.8
43
0.654762
12
84
4.583333
0.75
0
0
0
0
0
0
0
0
0
0
0
0.154762
84
5
43
16.8
0.774648
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
true
0
0.333333
0
0.666667
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
0
0
0
5
ba814d6c5cd024f4b71948615b093a236d8bab76
137
py
Python
src/abaqus/Material/TestData/UniaxialTestDataArray.py
Haiiliin/PyAbaqus
f20db6ebea19b73059fe875a53be370253381078
[ "MIT" ]
7
2022-01-21T09:15:45.000Z
2022-02-15T09:31:58.000Z
src/abaqus/Material/TestData/UniaxialTestDataArray.py
Haiiliin/PyAbaqus
f20db6ebea19b73059fe875a53be370253381078
[ "MIT" ]
null
null
null
src/abaqus/Material/TestData/UniaxialTestDataArray.py
Haiiliin/PyAbaqus
f20db6ebea19b73059fe875a53be370253381078
[ "MIT" ]
null
null
null
from .UniaxialTestData import UniaxialTestData class UniaxialTestDataArray(list[UniaxialTestData]): def findAt(self): pass
19.571429
52
0.766423
12
137
8.75
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.167883
137
6
53
22.833333
0.921053
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0.25
0.25
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
5
ba9aa252bbd7bf77fe945acc9cacbee53b68dfd8
170
py
Python
core/sentry_processors.py
thibaudcolas/great-international-ui
a5b05edeb3e16b01ef379b239dfbd5d10e2fc533
[ "MIT" ]
null
null
null
core/sentry_processors.py
thibaudcolas/great-international-ui
a5b05edeb3e16b01ef379b239dfbd5d10e2fc533
[ "MIT" ]
183
2018-06-26T09:23:59.000Z
2019-08-01T11:22:42.000Z
core/sentry_processors.py
thibaudcolas/great-international-ui
a5b05edeb3e16b01ef379b239dfbd5d10e2fc533
[ "MIT" ]
1
2019-03-09T11:21:28.000Z
2019-03-09T11:21:28.000Z
from raven.processors import SanitizePasswordsProcessor class SanitizeEmailMessagesProcessor(SanitizePasswordsProcessor): KEYS = frozenset([ 'body', ])
21.25
65
0.758824
11
170
11.727273
0.909091
0
0
0
0
0
0
0
0
0
0
0
0.170588
170
7
66
24.285714
0.914894
0
0
0
0
0
0.023529
0
0
0
0
0
0
1
0
false
0.4
0.2
0
0.6
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
5
ba9cda60d63dba3068cb547742aad23b9f92ff10
90
py
Python
basic-data-types/Number.py
thecomputerguy/full-speed-python
6ef6e5006e5a89e67c430cb5320e088e8a3410b0
[ "MIT" ]
null
null
null
basic-data-types/Number.py
thecomputerguy/full-speed-python
6ef6e5006e5a89e67c430cb5320e088e8a3410b0
[ "MIT" ]
null
null
null
basic-data-types/Number.py
thecomputerguy/full-speed-python
6ef6e5006e5a89e67c430cb5320e088e8a3410b0
[ "MIT" ]
null
null
null
a = 5 print(type(a)) b = 5.5 print(type(b)) print(a+b) print((a+b) * 2) print(2+2+4-2/3)
10
16
0.555556
24
90
2.083333
0.333333
0.12
0.4
0.32
0
0
0
0
0
0
0
0.118421
0.155556
90
9
17
10
0.539474
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.714286
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
5
ba9d5e584ec6b87f106dcc132d714f36c568f0dd
264
py
Python
form_schema_generator/settings.py
catveloper/dynamic_form_generator
be2704cff5ee0f93461cf6c82e47dc1a39b9a98e
[ "MIT" ]
null
null
null
form_schema_generator/settings.py
catveloper/dynamic_form_generator
be2704cff5ee0f93461cf6c82e47dc1a39b9a98e
[ "MIT" ]
null
null
null
form_schema_generator/settings.py
catveloper/dynamic_form_generator
be2704cff5ee0f93461cf6c82e47dc1a39b9a98e
[ "MIT" ]
null
null
null
from django.conf import settings form_schema_generator_settings = { 'MODEL_CHOICES_API': 'api:form_schema:model_choices' # TODO: 초이스 기본값 셀렉트, 라디오 변경가능하도록 옵션제공하기 } form_schema_generator_settings.update(getattr(settings, 'FORM_SCHEMA_GENERATOR_SETTINGS'))
29.333333
90
0.80303
35
264
5.685714
0.571429
0.201005
0.286432
0.407035
0.351759
0
0
0
0
0
0
0
0.113636
264
8
91
33
0.850427
0.140152
0
0
0
0
0.337778
0.262222
0
0
0
0.125
0
1
0
false
0
0.2
0
0.2
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
5
baa49bc249f7a285e87b9dd3bb36a48c9eb0e69a
359
py
Python
card.py
lucasege/221project
f1f0b6d5a7ac46a4633bc7446934d14a86fd9ef0
[ "MIT" ]
null
null
null
card.py
lucasege/221project
f1f0b6d5a7ac46a4633bc7446934d14a86fd9ef0
[ "MIT" ]
null
null
null
card.py
lucasege/221project
f1f0b6d5a7ac46a4633bc7446934d14a86fd9ef0
[ "MIT" ]
null
null
null
class Card: def __init__(self, suit, value): self.suit = suit self.value = value def getSuit(self): return self.suit def getValue(self): return self.value def __repr__(self): return str(self.suit) + ", " + str(self.value) def __str__(self): return str(self.suit) + ", " + str(self.value)
22.4375
54
0.568245
45
359
4.266667
0.266667
0.208333
0.145833
0.177083
0.34375
0.34375
0.34375
0.34375
0
0
0
0
0.303621
359
16
55
22.4375
0.768
0
0
0.166667
0
0
0.011111
0
0
0
0
0
0
1
0.416667
false
0
0
0.333333
0.833333
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
bab542828070eb779b9a49ccd9874977f46b8083
6,497
py
Python
snakemake/wrappers/.snakemake.jhwozzn7.merge_bams_wrapper.py
saketkc/EE-546-project
fb7eacd90f6c0a2cb3061837ec5427a14f521aa5
[ "BSD-2-Clause" ]
1
2020-11-02T07:05:09.000Z
2020-11-02T07:05:09.000Z
snakemake/wrappers/.snakemake.jhwozzn7.merge_bams_wrapper.py
saketkc/EE-546-project
fb7eacd90f6c0a2cb3061837ec5427a14f521aa5
[ "BSD-2-Clause" ]
null
null
null
snakemake/wrappers/.snakemake.jhwozzn7.merge_bams_wrapper.py
saketkc/EE-546-project
fb7eacd90f6c0a2cb3061837ec5427a14f521aa5
[ "BSD-2-Clause" ]
null
null
null
######## Snakemake header ######## import sys; sys.path.append("/home/cmb-panasas2/skchoudh/software_frozen/anaconda27/envs/riboraptor/lib/python3.5/site-packages"); import pickle; snakemake = pickle.loads(b'\x80\x03csnakemake.script\nSnakemake\nq\x00)\x81q\x01}q\x02(X\x05\x00\x00\x00inputq\x03csnakemake.io\nInputFiles\nq\x04)\x81q\x05(X\x17\x00\x00\x00bams_srr/SRR1062639.bamq\x06X\x17\x00\x00\x00bams_srr/SRR1062640.bamq\x07X\x17\x00\x00\x00bams_srr/SRR1062641.bamq\x08X\x17\x00\x00\x00bams_srr/SRR1062642.bamq\tX\x17\x00\x00\x00bams_srr/SRR1062643.bamq\nX\x17\x00\x00\x00bams_srr/SRR1062644.bamq\x0bX\x17\x00\x00\x00bams_srr/SRR1062645.bamq\x0cX\x17\x00\x00\x00bams_srr/SRR1062646.bamq\rX\x17\x00\x00\x00bams_srr/SRR1062647.bamq\x0eX\x17\x00\x00\x00bams_srr/SRR1062648.bamq\x0fX\x17\x00\x00\x00bams_srr/SRR1062649.bamq\x10X\x17\x00\x00\x00bams_srr/SRR1062650.bamq\x11X\x17\x00\x00\x00bams_srr/SRR1062651.bamq\x12X\x17\x00\x00\x00bams_srr/SRR1062652.bamq\x13X\x17\x00\x00\x00bams_srr/SRR1062653.bamq\x14X\x17\x00\x00\x00bams_srr/SRR1062654.bamq\x15X\x17\x00\x00\x00bams_srr/SRR1062655.bamq\x16X\x17\x00\x00\x00bams_srr/SRR1062656.bamq\x17X\x17\x00\x00\x00bams_srr/SRR1062657.bamq\x18X\x17\x00\x00\x00bams_srr/SRR1062658.bamq\x19X\x17\x00\x00\x00bams_srr/SRR1062659.bamq\x1aX\x17\x00\x00\x00bams_srr/SRR1062660.bamq\x1bX\x17\x00\x00\x00bams_srr/SRR1062661.bamq\x1cX\x17\x00\x00\x00bams_srr/SRR1062662.bamq\x1dX\x17\x00\x00\x00bams_srr/SRR1062663.bamq\x1eX\x17\x00\x00\x00bams_srr/SRR1062664.bamq\x1fX\x17\x00\x00\x00bams_srr/SRR1062665.bamq X\x17\x00\x00\x00bams_srr/SRR1062666.bamq!X\x17\x00\x00\x00bams_srr/SRR1062667.bamq"X\x17\x00\x00\x00bams_srr/SRR1062668.bamq#X\x17\x00\x00\x00bams_srr/SRR1062669.bamq$X\x17\x00\x00\x00bams_srr/SRR1062670.bamq%X\x17\x00\x00\x00bams_srr/SRR1062671.bamq&X\x17\x00\x00\x00bams_srr/SRR1062672.bamq\'X\x17\x00\x00\x00bams_srr/SRR1062673.bamq(X\x17\x00\x00\x00bams_srr/SRR1062674.bamq)X\x17\x00\x00\x00bams_srr/SRR1062675.bamq*X\x17\x00\x00\x00bams_srr/SRR1062676.bamq+X\x17\x00\x00\x00bams_srr/SRR1062677.bamq,X\x17\x00\x00\x00bams_srr/SRR1062678.bamq-X\x17\x00\x00\x00bams_srr/SRR1062679.bamq.X\x17\x00\x00\x00bams_srr/SRR1062680.bamq/X\x17\x00\x00\x00bams_srr/SRR1062681.bamq0X\x17\x00\x00\x00bams_srr/SRR1062682.bamq1X\x17\x00\x00\x00bams_srr/SRR1062683.bamq2X\x17\x00\x00\x00bams_srr/SRR1062684.bamq3X\x17\x00\x00\x00bams_srr/SRR1062685.bamq4X\x17\x00\x00\x00bams_srr/SRR1062686.bamq5X\x17\x00\x00\x00bams_srr/SRR1062687.bamq6X\x17\x00\x00\x00bams_srr/SRR1062688.bamq7X\x17\x00\x00\x00bams_srr/SRR1062689.bamq8X\x17\x00\x00\x00bams_srr/SRR1062690.bamq9X\x17\x00\x00\x00bams_srr/SRR1062691.bamq:X\x17\x00\x00\x00bams_srr/SRR1062692.bamq;X\x17\x00\x00\x00bams_srr/SRR1062693.bamq<X\x17\x00\x00\x00bams_srr/SRR1062694.bamq=X\x17\x00\x00\x00bams_srr/SRR1062695.bamq>X\x17\x00\x00\x00bams_srr/SRR1062696.bamq?X\x17\x00\x00\x00bams_srr/SRR1062697.bamq@X\x17\x00\x00\x00bams_srr/SRR1062698.bamqAX\x17\x00\x00\x00bams_srr/SRR1062699.bamqBX\x17\x00\x00\x00bams_srr/SRR1062700.bamqCX\x17\x00\x00\x00bams_srr/SRR1062701.bamqDX\x17\x00\x00\x00bams_srr/SRR1062702.bamqEX\x17\x00\x00\x00bams_srr/SRR1062703.bamqFX\x17\x00\x00\x00bams_srr/SRR1062704.bamqGX\x17\x00\x00\x00bams_srr/SRR1062705.bamqHX\x17\x00\x00\x00bams_srr/SRR1062706.bamqIX\x17\x00\x00\x00bams_srr/SRR1062707.bamqJX\x17\x00\x00\x00bams_srr/SRR1062708.bamqKX\x17\x00\x00\x00bams_srr/SRR1062709.bamqLX\x17\x00\x00\x00bams_srr/SRR1062710.bamqMX\x17\x00\x00\x00bams_srr/SRR1062711.bamqNX\x17\x00\x00\x00bams_srr/SRR1062712.bamqOX\x17\x00\x00\x00bams_srr/SRR1062713.bamqPX\x17\x00\x00\x00bams_srr/SRR1062714.bamqQX\x17\x00\x00\x00bams_srr/SRR1062715.bamqRX\x17\x00\x00\x00bams_srr/SRR1062716.bamqSX\x17\x00\x00\x00bams_srr/SRR1062717.bamqTX\x17\x00\x00\x00bams_srr/SRR1062718.bamqUX\x17\x00\x00\x00bams_srr/SRR1062719.bamqVX\x17\x00\x00\x00bams_srr/SRR1062720.bamqWX\x17\x00\x00\x00bams_srr/SRR1062721.bamqXX\x17\x00\x00\x00bams_srr/SRR1062722.bamqYX\x17\x00\x00\x00bams_srr/SRR1062723.bamqZX\x17\x00\x00\x00bams_srr/SRR1062724.bamq[X\x17\x00\x00\x00bams_srr/SRR1062725.bamq\\X\x17\x00\x00\x00bams_srr/SRR1062726.bamq]X\x17\x00\x00\x00bams_srr/SRR1062727.bamq^X\x17\x00\x00\x00bams_srr/SRR1062728.bamq_X\x17\x00\x00\x00bams_srr/SRR1062729.bamq`X\x17\x00\x00\x00bams_srr/SRR1062730.bamqaX\x17\x00\x00\x00bams_srr/SRR1062731.bamqbX\x17\x00\x00\x00bams_srr/SRR1062732.bamqcX\x17\x00\x00\x00bams_srr/SRR1062733.bamqdX\x17\x00\x00\x00bams_srr/SRR1062734.bamqeX\x17\x00\x00\x00bams_srr/SRR1062735.bamqfX\x17\x00\x00\x00bams_srr/SRR1062736.bamqgX\x17\x00\x00\x00bams_srr/SRR1062737.bamqhX\x17\x00\x00\x00bams_srr/SRR1062738.bamqie}qjX\x06\x00\x00\x00_namesqk}qlsbX\t\x00\x00\x00wildcardsqmcsnakemake.io\nWildcards\nqn)\x81qoX\t\x00\x00\x00SRX399824qpa}qq(hk}qrX\x06\x00\x00\x00sampleqsK\x00N\x86qtsX\x06\x00\x00\x00samplequhpubX\x07\x00\x00\x00threadsqvK\x01X\x06\x00\x00\x00configqw}qxX\x0b\x00\x00\x00config_pathqyX\x1b\x00\x00\x00configs/GRCz10_SRP034750.pyqzsX\x03\x00\x00\x00logq{csnakemake.io\nLog\nq|)\x81q}}q~hk}q\x7fsbX\x06\x00\x00\x00outputq\x80csnakemake.io\nOutputFiles\nq\x81)\x81q\x82X\x12\x00\x00\x00bams/SRX399824.bamq\x83a}q\x84hk}q\x85sbX\x04\x00\x00\x00ruleq\x86X\n\x00\x00\x00merge_bamsq\x87X\x06\x00\x00\x00paramsq\x88csnakemake.io\nParams\nq\x89)\x81q\x8aX\x04\x00\x00\x00/tmpq\x8ba}q\x8c(X\x07\x00\x00\x00tmp_dirq\x8dh\x8bhk}q\x8eh\x8dK\x00N\x86q\x8fsubX\t\x00\x00\x00resourcesq\x90csnakemake.io\nResources\nq\x91)\x81q\x92(K\x01K\x01e}q\x93(X\x06\x00\x00\x00_coresq\x94K\x01X\x06\x00\x00\x00_nodesq\x95K\x01hk}q\x96(h\x94K\x00N\x86q\x97h\x95K\x01N\x86q\x98uubub.'); from snakemake.logging import logger; logger.printshellcmds = True ######## Original script ######### import os import tempfile from snakemake.shell import shell if len(snakemake.input) > 1: with tempfile.TemporaryDirectory(dir=snakemake.params.tmp_dir) as temp_dir: cmd = ' -in '.join(snakemake.input) shell(r'''bamtools merge -in {cmd} -out {snakemake.output}.unsorted \ && samtools sort -@ {snakemake.threads} \ -T {temp_dir}/{snakemake.wildcards.sample}_merge_bam \ -o {snakemake.output} {snakemake.output}.unsorted \ && samtools index {snakemake.output} \ && yes | rm -rf {snakemake.output}.unsorted''') elif len(snakemake.input) == 1: source = os.path.abspath(str(snakemake.input[0])) destination = os.path.abspath(str(snakemake.output)) shell('''cp {source} {destination} && cp {source}.bai {destination}.bai''')
295.318182
5,604
0.808989
1,087
6,497
4.730451
0.312787
0.145858
0.255348
0.311163
0.426293
0.217036
0.135356
0
0
0
0
0.291905
0.030322
6,497
21
5,605
309.380952
0.524286
0.005079
0
0
0
0.117647
0.335408
0.288892
0
0
0
0
0
1
0
false
0
0.235294
0
0.235294
0.058824
0
0
0
null
0
1
1
0
0
0
0
0
0
0
1
0
0
0
1
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
baea98dfb55d6af67ac976f15456d6ab0ab1fb58
107
py
Python
src/models/__init__.py
sudaraka/learn-flask-mongoengine
d4bcae3e4a956f544b7d087a955b18edab3b4e0f
[ "BSD-2-Clause" ]
null
null
null
src/models/__init__.py
sudaraka/learn-flask-mongoengine
d4bcae3e4a956f544b7d087a955b18edab3b4e0f
[ "BSD-2-Clause" ]
null
null
null
src/models/__init__.py
sudaraka/learn-flask-mongoengine
d4bcae3e4a956f544b7d087a955b18edab3b4e0f
[ "BSD-2-Clause" ]
null
null
null
""" Models modules """ from .post import Post, BlogPost, Video, Image, Quote from .comment import Comment
21.4
53
0.728972
14
107
5.571429
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.158879
107
4
54
26.75
0.866667
0.130841
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
baf1843415058c898dd3d4f8fa99755c6095b180
13
py
Python
examples/bad.py
orsinium-labs/mypy-test
7ab0fa440dee37b441824eb24ac9b0af2ebde9c5
[ "MIT" ]
3
2022-01-19T10:46:48.000Z
2022-03-20T18:44:07.000Z
examples/bad.py
orsinium-labs/mypy-test
7ab0fa440dee37b441824eb24ac9b0af2ebde9c5
[ "MIT" ]
null
null
null
examples/bad.py
orsinium-labs/mypy-test
7ab0fa440dee37b441824eb24ac9b0af2ebde9c5
[ "MIT" ]
1
2022-01-19T10:45:37.000Z
2022-01-19T10:45:37.000Z
a = 1 a = ""
4.333333
6
0.230769
3
13
1
0.666667
0
0
0
0
0
0
0
0
0
0
0.142857
0.461538
13
2
7
6.5
0.285714
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
246a168d5e0cc167d538e475a7f56d774c078e90
591
py
Python
Desafios/desafio_009.py
romulogoleniesky/Python_C_E_V
2dcf5fb3505a20443788a284c52114c6434118ce
[ "MIT" ]
null
null
null
Desafios/desafio_009.py
romulogoleniesky/Python_C_E_V
2dcf5fb3505a20443788a284c52114c6434118ce
[ "MIT" ]
null
null
null
Desafios/desafio_009.py
romulogoleniesky/Python_C_E_V
2dcf5fb3505a20443788a284c52114c6434118ce
[ "MIT" ]
null
null
null
# DESAFIO 009 - CRIANDO UMA TABUADA: n = float(input('Digite um número para ver a sua tabuada: ')) print('='*10) print('{:.0f} x 1 = {:.0f}'.format(n, (n*1))) print('{:.0f} x 2 = {:.0f}'.format(n, (n*2))) print('{:.0f} x 3 = {:.0f}'.format(n, (n*3))) print('{:.0f} x 4 = {:.0f}'.format(n, (n*4))) print('{:.0f} x 5 = {:.0f}'.format(n, (n*5))) print('{:.0f} x 6 = {:.0f}'.format(n, (n*6))) print('{:.0f} x 7 = {:.0f}'.format(n, (n*7))) print('{:.0f} x 8 = {:.0f}'.format(n, (n*8))) print('{:.0f} x 9 = {:.0f}'.format(n, (n*9))) print('{:.0f} x 10 = {:.0f}'.format(n, (n*10))) print('='*10)
34.764706
61
0.483926
110
591
2.6
0.263636
0.244755
0.27972
0.34965
0
0
0
0
0
0
0
0.096267
0.138748
591
16
62
36.9375
0.465619
0.05753
0
0.153846
0
0
0.421622
0
0
0
0
0
0
1
0
false
0
0
0
0
0.923077
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
5
79edc348979e3ff4e8057ad61c60aff40daa9124
37,580
py
Python
sdk/python/pulumi_google_native/cloudkms/v1/outputs.py
AaronFriel/pulumi-google-native
75d1cda425e33d4610348972cd70bddf35f1770d
[ "Apache-2.0" ]
44
2021-04-18T23:00:48.000Z
2022-02-14T17:43:15.000Z
sdk/python/pulumi_google_native/cloudkms/v1/outputs.py
AaronFriel/pulumi-google-native
75d1cda425e33d4610348972cd70bddf35f1770d
[ "Apache-2.0" ]
354
2021-04-16T16:48:39.000Z
2022-03-31T17:16:39.000Z
sdk/python/pulumi_google_native/cloudkms/v1/outputs.py
AaronFriel/pulumi-google-native
75d1cda425e33d4610348972cd70bddf35f1770d
[ "Apache-2.0" ]
8
2021-04-24T17:46:51.000Z
2022-01-05T10:40:21.000Z
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from ... import _utilities from . import outputs from ._enums import * __all__ = [ 'AuditConfigResponse', 'AuditLogConfigResponse', 'BindingResponse', 'CertificateChainsResponse', 'CryptoKeyVersionResponse', 'CryptoKeyVersionTemplateResponse', 'ExprResponse', 'ExternalProtectionLevelOptionsResponse', 'KeyOperationAttestationResponse', 'WrappingPublicKeyResponse', ] @pulumi.output_type class AuditConfigResponse(dict): """ Specifies the audit configuration for a service. The configuration determines which permission types are logged, and what identities, if any, are exempted from logging. An AuditConfig must have one or more AuditLogConfigs. If there are AuditConfigs for both `allServices` and a specific service, the union of the two AuditConfigs is used for that service: the log_types specified in each AuditConfig are enabled, and the exempted_members in each AuditLogConfig are exempted. Example Policy with multiple AuditConfigs: { "audit_configs": [ { "service": "allServices", "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:jose@example.com" ] }, { "log_type": "DATA_WRITE" }, { "log_type": "ADMIN_READ" } ] }, { "service": "sampleservice.googleapis.com", "audit_log_configs": [ { "log_type": "DATA_READ" }, { "log_type": "DATA_WRITE", "exempted_members": [ "user:aliya@example.com" ] } ] } ] } For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ logging. It also exempts jose@example.com from DATA_READ logging, and aliya@example.com from DATA_WRITE logging. """ @staticmethod def __key_warning(key: str): suggest = None if key == "auditLogConfigs": suggest = "audit_log_configs" if suggest: pulumi.log.warn(f"Key '{key}' not found in AuditConfigResponse. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AuditConfigResponse.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AuditConfigResponse.__key_warning(key) return super().get(key, default) def __init__(__self__, *, audit_log_configs: Sequence['outputs.AuditLogConfigResponse'], service: str): """ Specifies the audit configuration for a service. The configuration determines which permission types are logged, and what identities, if any, are exempted from logging. An AuditConfig must have one or more AuditLogConfigs. If there are AuditConfigs for both `allServices` and a specific service, the union of the two AuditConfigs is used for that service: the log_types specified in each AuditConfig are enabled, and the exempted_members in each AuditLogConfig are exempted. Example Policy with multiple AuditConfigs: { "audit_configs": [ { "service": "allServices", "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:jose@example.com" ] }, { "log_type": "DATA_WRITE" }, { "log_type": "ADMIN_READ" } ] }, { "service": "sampleservice.googleapis.com", "audit_log_configs": [ { "log_type": "DATA_READ" }, { "log_type": "DATA_WRITE", "exempted_members": [ "user:aliya@example.com" ] } ] } ] } For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ logging. It also exempts jose@example.com from DATA_READ logging, and aliya@example.com from DATA_WRITE logging. :param Sequence['AuditLogConfigResponse'] audit_log_configs: The configuration for logging of each type of permission. :param str service: Specifies a service that will be enabled for audit logging. For example, `storage.googleapis.com`, `cloudsql.googleapis.com`. `allServices` is a special value that covers all services. """ pulumi.set(__self__, "audit_log_configs", audit_log_configs) pulumi.set(__self__, "service", service) @property @pulumi.getter(name="auditLogConfigs") def audit_log_configs(self) -> Sequence['outputs.AuditLogConfigResponse']: """ The configuration for logging of each type of permission. """ return pulumi.get(self, "audit_log_configs") @property @pulumi.getter def service(self) -> str: """ Specifies a service that will be enabled for audit logging. For example, `storage.googleapis.com`, `cloudsql.googleapis.com`. `allServices` is a special value that covers all services. """ return pulumi.get(self, "service") @pulumi.output_type class AuditLogConfigResponse(dict): """ Provides the configuration for logging a type of permissions. Example: { "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:jose@example.com" ] }, { "log_type": "DATA_WRITE" } ] } This enables 'DATA_READ' and 'DATA_WRITE' logging, while exempting jose@example.com from DATA_READ logging. """ @staticmethod def __key_warning(key: str): suggest = None if key == "exemptedMembers": suggest = "exempted_members" elif key == "logType": suggest = "log_type" if suggest: pulumi.log.warn(f"Key '{key}' not found in AuditLogConfigResponse. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: AuditLogConfigResponse.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: AuditLogConfigResponse.__key_warning(key) return super().get(key, default) def __init__(__self__, *, exempted_members: Sequence[str], log_type: str): """ Provides the configuration for logging a type of permissions. Example: { "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:jose@example.com" ] }, { "log_type": "DATA_WRITE" } ] } This enables 'DATA_READ' and 'DATA_WRITE' logging, while exempting jose@example.com from DATA_READ logging. :param Sequence[str] exempted_members: Specifies the identities that do not cause logging for this type of permission. Follows the same format of Binding.members. :param str log_type: The log type that this config enables. """ pulumi.set(__self__, "exempted_members", exempted_members) pulumi.set(__self__, "log_type", log_type) @property @pulumi.getter(name="exemptedMembers") def exempted_members(self) -> Sequence[str]: """ Specifies the identities that do not cause logging for this type of permission. Follows the same format of Binding.members. """ return pulumi.get(self, "exempted_members") @property @pulumi.getter(name="logType") def log_type(self) -> str: """ The log type that this config enables. """ return pulumi.get(self, "log_type") @pulumi.output_type class BindingResponse(dict): """ Associates `members`, or principals, with a `role`. """ def __init__(__self__, *, condition: 'outputs.ExprResponse', members: Sequence[str], role: str): """ Associates `members`, or principals, with a `role`. :param 'ExprResponse' condition: The condition that is associated with this binding. If the condition evaluates to `true`, then this binding applies to the current request. If the condition evaluates to `false`, then this binding does not apply to the current request. However, a different role binding might grant the same role to one or more of the principals in this binding. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). :param Sequence[str] members: Specifies the principals requesting access for a Cloud Platform resource. `members` can have the following values: * `allUsers`: A special identifier that represents anyone who is on the internet; with or without a Google account. * `allAuthenticatedUsers`: A special identifier that represents anyone who is authenticated with a Google account or a service account. * `user:{emailid}`: An email address that represents a specific Google account. For example, `alice@example.com` . * `serviceAccount:{emailid}`: An email address that represents a service account. For example, `my-other-app@appspot.gserviceaccount.com`. * `group:{emailid}`: An email address that represents a Google group. For example, `admins@example.com`. * `deleted:user:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a user that has been recently deleted. For example, `alice@example.com?uid=123456789012345678901`. If the user is recovered, this value reverts to `user:{emailid}` and the recovered user retains the role in the binding. * `deleted:serviceAccount:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a service account that has been recently deleted. For example, `my-other-app@appspot.gserviceaccount.com?uid=123456789012345678901`. If the service account is undeleted, this value reverts to `serviceAccount:{emailid}` and the undeleted service account retains the role in the binding. * `deleted:group:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a Google group that has been recently deleted. For example, `admins@example.com?uid=123456789012345678901`. If the group is recovered, this value reverts to `group:{emailid}` and the recovered group retains the role in the binding. * `domain:{domain}`: The G Suite domain (primary) that represents all the users of that domain. For example, `google.com` or `example.com`. :param str role: Role that is assigned to the list of `members`, or principals. For example, `roles/viewer`, `roles/editor`, or `roles/owner`. """ pulumi.set(__self__, "condition", condition) pulumi.set(__self__, "members", members) pulumi.set(__self__, "role", role) @property @pulumi.getter def condition(self) -> 'outputs.ExprResponse': """ The condition that is associated with this binding. If the condition evaluates to `true`, then this binding applies to the current request. If the condition evaluates to `false`, then this binding does not apply to the current request. However, a different role binding might grant the same role to one or more of the principals in this binding. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). """ return pulumi.get(self, "condition") @property @pulumi.getter def members(self) -> Sequence[str]: """ Specifies the principals requesting access for a Cloud Platform resource. `members` can have the following values: * `allUsers`: A special identifier that represents anyone who is on the internet; with or without a Google account. * `allAuthenticatedUsers`: A special identifier that represents anyone who is authenticated with a Google account or a service account. * `user:{emailid}`: An email address that represents a specific Google account. For example, `alice@example.com` . * `serviceAccount:{emailid}`: An email address that represents a service account. For example, `my-other-app@appspot.gserviceaccount.com`. * `group:{emailid}`: An email address that represents a Google group. For example, `admins@example.com`. * `deleted:user:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a user that has been recently deleted. For example, `alice@example.com?uid=123456789012345678901`. If the user is recovered, this value reverts to `user:{emailid}` and the recovered user retains the role in the binding. * `deleted:serviceAccount:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a service account that has been recently deleted. For example, `my-other-app@appspot.gserviceaccount.com?uid=123456789012345678901`. If the service account is undeleted, this value reverts to `serviceAccount:{emailid}` and the undeleted service account retains the role in the binding. * `deleted:group:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a Google group that has been recently deleted. For example, `admins@example.com?uid=123456789012345678901`. If the group is recovered, this value reverts to `group:{emailid}` and the recovered group retains the role in the binding. * `domain:{domain}`: The G Suite domain (primary) that represents all the users of that domain. For example, `google.com` or `example.com`. """ return pulumi.get(self, "members") @property @pulumi.getter def role(self) -> str: """ Role that is assigned to the list of `members`, or principals. For example, `roles/viewer`, `roles/editor`, or `roles/owner`. """ return pulumi.get(self, "role") @pulumi.output_type class CertificateChainsResponse(dict): """ Certificate chains needed to verify the attestation. Certificates in chains are PEM-encoded and are ordered based on https://tools.ietf.org/html/rfc5246#section-7.4.2. """ @staticmethod def __key_warning(key: str): suggest = None if key == "caviumCerts": suggest = "cavium_certs" elif key == "googleCardCerts": suggest = "google_card_certs" elif key == "googlePartitionCerts": suggest = "google_partition_certs" if suggest: pulumi.log.warn(f"Key '{key}' not found in CertificateChainsResponse. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: CertificateChainsResponse.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: CertificateChainsResponse.__key_warning(key) return super().get(key, default) def __init__(__self__, *, cavium_certs: Sequence[str], google_card_certs: Sequence[str], google_partition_certs: Sequence[str]): """ Certificate chains needed to verify the attestation. Certificates in chains are PEM-encoded and are ordered based on https://tools.ietf.org/html/rfc5246#section-7.4.2. :param Sequence[str] cavium_certs: Cavium certificate chain corresponding to the attestation. :param Sequence[str] google_card_certs: Google card certificate chain corresponding to the attestation. :param Sequence[str] google_partition_certs: Google partition certificate chain corresponding to the attestation. """ pulumi.set(__self__, "cavium_certs", cavium_certs) pulumi.set(__self__, "google_card_certs", google_card_certs) pulumi.set(__self__, "google_partition_certs", google_partition_certs) @property @pulumi.getter(name="caviumCerts") def cavium_certs(self) -> Sequence[str]: """ Cavium certificate chain corresponding to the attestation. """ return pulumi.get(self, "cavium_certs") @property @pulumi.getter(name="googleCardCerts") def google_card_certs(self) -> Sequence[str]: """ Google card certificate chain corresponding to the attestation. """ return pulumi.get(self, "google_card_certs") @property @pulumi.getter(name="googlePartitionCerts") def google_partition_certs(self) -> Sequence[str]: """ Google partition certificate chain corresponding to the attestation. """ return pulumi.get(self, "google_partition_certs") @pulumi.output_type class CryptoKeyVersionResponse(dict): """ A CryptoKeyVersion represents an individual cryptographic key, and the associated key material. An ENABLED version can be used for cryptographic operations. For security reasons, the raw cryptographic key material represented by a CryptoKeyVersion can never be viewed or exported. It can only be used to encrypt, decrypt, or sign data when an authorized user or application invokes Cloud KMS. """ @staticmethod def __key_warning(key: str): suggest = None if key == "createTime": suggest = "create_time" elif key == "destroyEventTime": suggest = "destroy_event_time" elif key == "destroyTime": suggest = "destroy_time" elif key == "externalProtectionLevelOptions": suggest = "external_protection_level_options" elif key == "generateTime": suggest = "generate_time" elif key == "importFailureReason": suggest = "import_failure_reason" elif key == "importJob": suggest = "import_job" elif key == "importTime": suggest = "import_time" elif key == "protectionLevel": suggest = "protection_level" elif key == "reimportEligible": suggest = "reimport_eligible" if suggest: pulumi.log.warn(f"Key '{key}' not found in CryptoKeyVersionResponse. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: CryptoKeyVersionResponse.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: CryptoKeyVersionResponse.__key_warning(key) return super().get(key, default) def __init__(__self__, *, algorithm: str, attestation: 'outputs.KeyOperationAttestationResponse', create_time: str, destroy_event_time: str, destroy_time: str, external_protection_level_options: 'outputs.ExternalProtectionLevelOptionsResponse', generate_time: str, import_failure_reason: str, import_job: str, import_time: str, name: str, protection_level: str, reimport_eligible: bool, state: str): """ A CryptoKeyVersion represents an individual cryptographic key, and the associated key material. An ENABLED version can be used for cryptographic operations. For security reasons, the raw cryptographic key material represented by a CryptoKeyVersion can never be viewed or exported. It can only be used to encrypt, decrypt, or sign data when an authorized user or application invokes Cloud KMS. :param str algorithm: The CryptoKeyVersionAlgorithm that this CryptoKeyVersion supports. :param 'KeyOperationAttestationResponse' attestation: Statement that was generated and signed by the HSM at key creation time. Use this statement to verify attributes of the key as stored on the HSM, independently of Google. Only provided for key versions with protection_level HSM. :param str create_time: The time at which this CryptoKeyVersion was created. :param str destroy_event_time: The time this CryptoKeyVersion's key material was destroyed. Only present if state is DESTROYED. :param str destroy_time: The time this CryptoKeyVersion's key material is scheduled for destruction. Only present if state is DESTROY_SCHEDULED. :param 'ExternalProtectionLevelOptionsResponse' external_protection_level_options: ExternalProtectionLevelOptions stores a group of additional fields for configuring a CryptoKeyVersion that are specific to the EXTERNAL protection level. :param str generate_time: The time this CryptoKeyVersion's key material was generated. :param str import_failure_reason: The root cause of the most recent import failure. Only present if state is IMPORT_FAILED. :param str import_job: The name of the ImportJob used in the most recent import of this CryptoKeyVersion. Only present if the underlying key material was imported. :param str import_time: The time at which this CryptoKeyVersion's key material was most recently imported. :param str name: The resource name for this CryptoKeyVersion in the format `projects/*/locations/*/keyRings/*/cryptoKeys/*/cryptoKeyVersions/*`. :param str protection_level: The ProtectionLevel describing how crypto operations are performed with this CryptoKeyVersion. :param bool reimport_eligible: Whether or not this key version is eligible for reimport, by being specified as a target in ImportCryptoKeyVersionRequest.crypto_key_version. :param str state: The current state of the CryptoKeyVersion. """ pulumi.set(__self__, "algorithm", algorithm) pulumi.set(__self__, "attestation", attestation) pulumi.set(__self__, "create_time", create_time) pulumi.set(__self__, "destroy_event_time", destroy_event_time) pulumi.set(__self__, "destroy_time", destroy_time) pulumi.set(__self__, "external_protection_level_options", external_protection_level_options) pulumi.set(__self__, "generate_time", generate_time) pulumi.set(__self__, "import_failure_reason", import_failure_reason) pulumi.set(__self__, "import_job", import_job) pulumi.set(__self__, "import_time", import_time) pulumi.set(__self__, "name", name) pulumi.set(__self__, "protection_level", protection_level) pulumi.set(__self__, "reimport_eligible", reimport_eligible) pulumi.set(__self__, "state", state) @property @pulumi.getter def algorithm(self) -> str: """ The CryptoKeyVersionAlgorithm that this CryptoKeyVersion supports. """ return pulumi.get(self, "algorithm") @property @pulumi.getter def attestation(self) -> 'outputs.KeyOperationAttestationResponse': """ Statement that was generated and signed by the HSM at key creation time. Use this statement to verify attributes of the key as stored on the HSM, independently of Google. Only provided for key versions with protection_level HSM. """ return pulumi.get(self, "attestation") @property @pulumi.getter(name="createTime") def create_time(self) -> str: """ The time at which this CryptoKeyVersion was created. """ return pulumi.get(self, "create_time") @property @pulumi.getter(name="destroyEventTime") def destroy_event_time(self) -> str: """ The time this CryptoKeyVersion's key material was destroyed. Only present if state is DESTROYED. """ return pulumi.get(self, "destroy_event_time") @property @pulumi.getter(name="destroyTime") def destroy_time(self) -> str: """ The time this CryptoKeyVersion's key material is scheduled for destruction. Only present if state is DESTROY_SCHEDULED. """ return pulumi.get(self, "destroy_time") @property @pulumi.getter(name="externalProtectionLevelOptions") def external_protection_level_options(self) -> 'outputs.ExternalProtectionLevelOptionsResponse': """ ExternalProtectionLevelOptions stores a group of additional fields for configuring a CryptoKeyVersion that are specific to the EXTERNAL protection level. """ return pulumi.get(self, "external_protection_level_options") @property @pulumi.getter(name="generateTime") def generate_time(self) -> str: """ The time this CryptoKeyVersion's key material was generated. """ return pulumi.get(self, "generate_time") @property @pulumi.getter(name="importFailureReason") def import_failure_reason(self) -> str: """ The root cause of the most recent import failure. Only present if state is IMPORT_FAILED. """ return pulumi.get(self, "import_failure_reason") @property @pulumi.getter(name="importJob") def import_job(self) -> str: """ The name of the ImportJob used in the most recent import of this CryptoKeyVersion. Only present if the underlying key material was imported. """ return pulumi.get(self, "import_job") @property @pulumi.getter(name="importTime") def import_time(self) -> str: """ The time at which this CryptoKeyVersion's key material was most recently imported. """ return pulumi.get(self, "import_time") @property @pulumi.getter def name(self) -> str: """ The resource name for this CryptoKeyVersion in the format `projects/*/locations/*/keyRings/*/cryptoKeys/*/cryptoKeyVersions/*`. """ return pulumi.get(self, "name") @property @pulumi.getter(name="protectionLevel") def protection_level(self) -> str: """ The ProtectionLevel describing how crypto operations are performed with this CryptoKeyVersion. """ return pulumi.get(self, "protection_level") @property @pulumi.getter(name="reimportEligible") def reimport_eligible(self) -> bool: """ Whether or not this key version is eligible for reimport, by being specified as a target in ImportCryptoKeyVersionRequest.crypto_key_version. """ return pulumi.get(self, "reimport_eligible") @property @pulumi.getter def state(self) -> str: """ The current state of the CryptoKeyVersion. """ return pulumi.get(self, "state") @pulumi.output_type class CryptoKeyVersionTemplateResponse(dict): """ A CryptoKeyVersionTemplate specifies the properties to use when creating a new CryptoKeyVersion, either manually with CreateCryptoKeyVersion or automatically as a result of auto-rotation. """ @staticmethod def __key_warning(key: str): suggest = None if key == "protectionLevel": suggest = "protection_level" if suggest: pulumi.log.warn(f"Key '{key}' not found in CryptoKeyVersionTemplateResponse. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: CryptoKeyVersionTemplateResponse.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: CryptoKeyVersionTemplateResponse.__key_warning(key) return super().get(key, default) def __init__(__self__, *, algorithm: str, protection_level: str): """ A CryptoKeyVersionTemplate specifies the properties to use when creating a new CryptoKeyVersion, either manually with CreateCryptoKeyVersion or automatically as a result of auto-rotation. :param str algorithm: Algorithm to use when creating a CryptoKeyVersion based on this template. For backwards compatibility, GOOGLE_SYMMETRIC_ENCRYPTION is implied if both this field is omitted and CryptoKey.purpose is ENCRYPT_DECRYPT. :param str protection_level: ProtectionLevel to use when creating a CryptoKeyVersion based on this template. Immutable. Defaults to SOFTWARE. """ pulumi.set(__self__, "algorithm", algorithm) pulumi.set(__self__, "protection_level", protection_level) @property @pulumi.getter def algorithm(self) -> str: """ Algorithm to use when creating a CryptoKeyVersion based on this template. For backwards compatibility, GOOGLE_SYMMETRIC_ENCRYPTION is implied if both this field is omitted and CryptoKey.purpose is ENCRYPT_DECRYPT. """ return pulumi.get(self, "algorithm") @property @pulumi.getter(name="protectionLevel") def protection_level(self) -> str: """ ProtectionLevel to use when creating a CryptoKeyVersion based on this template. Immutable. Defaults to SOFTWARE. """ return pulumi.get(self, "protection_level") @pulumi.output_type class ExprResponse(dict): """ Represents a textual expression in the Common Expression Language (CEL) syntax. CEL is a C-like expression language. The syntax and semantics of CEL are documented at https://github.com/google/cel-spec. Example (Comparison): title: "Summary size limit" description: "Determines if a summary is less than 100 chars" expression: "document.summary.size() < 100" Example (Equality): title: "Requestor is owner" description: "Determines if requestor is the document owner" expression: "document.owner == request.auth.claims.email" Example (Logic): title: "Public documents" description: "Determine whether the document should be publicly visible" expression: "document.type != 'private' && document.type != 'internal'" Example (Data Manipulation): title: "Notification string" description: "Create a notification string with a timestamp." expression: "'New message received at ' + string(document.create_time)" The exact variables and functions that may be referenced within an expression are determined by the service that evaluates it. See the service documentation for additional information. """ def __init__(__self__, *, description: str, expression: str, location: str, title: str): """ Represents a textual expression in the Common Expression Language (CEL) syntax. CEL is a C-like expression language. The syntax and semantics of CEL are documented at https://github.com/google/cel-spec. Example (Comparison): title: "Summary size limit" description: "Determines if a summary is less than 100 chars" expression: "document.summary.size() < 100" Example (Equality): title: "Requestor is owner" description: "Determines if requestor is the document owner" expression: "document.owner == request.auth.claims.email" Example (Logic): title: "Public documents" description: "Determine whether the document should be publicly visible" expression: "document.type != 'private' && document.type != 'internal'" Example (Data Manipulation): title: "Notification string" description: "Create a notification string with a timestamp." expression: "'New message received at ' + string(document.create_time)" The exact variables and functions that may be referenced within an expression are determined by the service that evaluates it. See the service documentation for additional information. :param str description: Optional. Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI. :param str expression: Textual representation of an expression in Common Expression Language syntax. :param str location: Optional. String indicating the location of the expression for error reporting, e.g. a file name and a position in the file. :param str title: Optional. Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression. """ pulumi.set(__self__, "description", description) pulumi.set(__self__, "expression", expression) pulumi.set(__self__, "location", location) pulumi.set(__self__, "title", title) @property @pulumi.getter def description(self) -> str: """ Optional. Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI. """ return pulumi.get(self, "description") @property @pulumi.getter def expression(self) -> str: """ Textual representation of an expression in Common Expression Language syntax. """ return pulumi.get(self, "expression") @property @pulumi.getter def location(self) -> str: """ Optional. String indicating the location of the expression for error reporting, e.g. a file name and a position in the file. """ return pulumi.get(self, "location") @property @pulumi.getter def title(self) -> str: """ Optional. Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression. """ return pulumi.get(self, "title") @pulumi.output_type class ExternalProtectionLevelOptionsResponse(dict): """ ExternalProtectionLevelOptions stores a group of additional fields for configuring a CryptoKeyVersion that are specific to the EXTERNAL protection level. """ @staticmethod def __key_warning(key: str): suggest = None if key == "externalKeyUri": suggest = "external_key_uri" if suggest: pulumi.log.warn(f"Key '{key}' not found in ExternalProtectionLevelOptionsResponse. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: ExternalProtectionLevelOptionsResponse.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: ExternalProtectionLevelOptionsResponse.__key_warning(key) return super().get(key, default) def __init__(__self__, *, external_key_uri: str): """ ExternalProtectionLevelOptions stores a group of additional fields for configuring a CryptoKeyVersion that are specific to the EXTERNAL protection level. :param str external_key_uri: The URI for an external resource that this CryptoKeyVersion represents. """ pulumi.set(__self__, "external_key_uri", external_key_uri) @property @pulumi.getter(name="externalKeyUri") def external_key_uri(self) -> str: """ The URI for an external resource that this CryptoKeyVersion represents. """ return pulumi.get(self, "external_key_uri") @pulumi.output_type class KeyOperationAttestationResponse(dict): """ Contains an HSM-generated attestation about a key operation. For more information, see [Verifying attestations] (https://cloud.google.com/kms/docs/attest-key). """ @staticmethod def __key_warning(key: str): suggest = None if key == "certChains": suggest = "cert_chains" if suggest: pulumi.log.warn(f"Key '{key}' not found in KeyOperationAttestationResponse. Access the value via the '{suggest}' property getter instead.") def __getitem__(self, key: str) -> Any: KeyOperationAttestationResponse.__key_warning(key) return super().__getitem__(key) def get(self, key: str, default = None) -> Any: KeyOperationAttestationResponse.__key_warning(key) return super().get(key, default) def __init__(__self__, *, cert_chains: 'outputs.CertificateChainsResponse', content: str, format: str): """ Contains an HSM-generated attestation about a key operation. For more information, see [Verifying attestations] (https://cloud.google.com/kms/docs/attest-key). :param 'CertificateChainsResponse' cert_chains: The certificate chains needed to validate the attestation :param str content: The attestation data provided by the HSM when the key operation was performed. :param str format: The format of the attestation data. """ pulumi.set(__self__, "cert_chains", cert_chains) pulumi.set(__self__, "content", content) pulumi.set(__self__, "format", format) @property @pulumi.getter(name="certChains") def cert_chains(self) -> 'outputs.CertificateChainsResponse': """ The certificate chains needed to validate the attestation """ return pulumi.get(self, "cert_chains") @property @pulumi.getter def content(self) -> str: """ The attestation data provided by the HSM when the key operation was performed. """ return pulumi.get(self, "content") @property @pulumi.getter def format(self) -> str: """ The format of the attestation data. """ return pulumi.get(self, "format") @pulumi.output_type class WrappingPublicKeyResponse(dict): """ The public key component of the wrapping key. For details of the type of key this public key corresponds to, see the ImportMethod. """ def __init__(__self__, *, pem: str): """ The public key component of the wrapping key. For details of the type of key this public key corresponds to, see the ImportMethod. :param str pem: The public key, encoded in PEM format. For more information, see the [RFC 7468](https://tools.ietf.org/html/rfc7468) sections for [General Considerations](https://tools.ietf.org/html/rfc7468#section-2) and [Textual Encoding of Subject Public Key Info] (https://tools.ietf.org/html/rfc7468#section-13). """ pulumi.set(__self__, "pem", pem) @property @pulumi.getter def pem(self) -> str: """ The public key, encoded in PEM format. For more information, see the [RFC 7468](https://tools.ietf.org/html/rfc7468) sections for [General Considerations](https://tools.ietf.org/html/rfc7468#section-2) and [Textual Encoding of Subject Public Key Info] (https://tools.ietf.org/html/rfc7468#section-13). """ return pulumi.get(self, "pem")
56.596386
1,947
0.69702
4,526
37,580
5.648255
0.10738
0.011501
0.017798
0.026013
0.760014
0.73005
0.71319
0.68616
0.669848
0.661242
0
0.006453
0.2124
37,580
663
1,948
56.68175
0.857254
0.55133
0
0.425134
1
0.018717
0.197973
0.062309
0
0
0
0
0
1
0.176471
false
0
0.093583
0
0.427807
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
03009a9b790a589b6fb338017666b6130b828aab
82
py
Python
LPBv2/client/__init__.py
TierynnB/LeaguePyBot
2e96230b9dc24d185ddc0c6086d79f7d01e7a643
[ "MIT" ]
45
2020-11-28T04:45:45.000Z
2022-03-31T05:53:37.000Z
LPBv2/client/__init__.py
TierynnB/LeaguePyBot
2e96230b9dc24d185ddc0c6086d79f7d01e7a643
[ "MIT" ]
13
2021-01-15T00:50:10.000Z
2022-02-02T15:16:49.000Z
LPBv2/client/__init__.py
TierynnB/LeaguePyBot
2e96230b9dc24d185ddc0c6086d79f7d01e7a643
[ "MIT" ]
14
2020-12-21T10:03:31.000Z
2021-11-22T04:03:03.000Z
from .client import Client from .connection import * from .http_requests import *
20.5
28
0.792683
11
82
5.818182
0.545455
0
0
0
0
0
0
0
0
0
0
0
0.146341
82
3
29
27.333333
0.914286
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
034a2de32855dfd2fe530a9546ebf7c56c7de5d5
1,231
py
Python
svm.py
Jhilbertxtu/JDComments_Analyze
9a93c7cfc572509fce5e0f82702d8d55d029ef8f
[ "MIT" ]
2
2021-03-01T13:32:22.000Z
2021-07-28T13:37:43.000Z
svm.py
Jhilbertxtu/JDComments_Analyze
9a93c7cfc572509fce5e0f82702d8d55d029ef8f
[ "MIT" ]
null
null
null
svm.py
Jhilbertxtu/JDComments_Analyze
9a93c7cfc572509fce5e0f82702d8d55d029ef8f
[ "MIT" ]
null
null
null
import pandas as pd from sklearn.decomposition import PCA from sklearn import svm ''' 10条数据,但模型是100维的,所以复制够100,只取前10 第1次 c=2 完全正确4,完全错误2,模糊4 [1. 0. 0. 1. 1. 0. 0. 0. 1. 1. 1. 0. 0. 1. 1. 0. 0. 0. 1. 1. 1. 0. 0. 1. 1. 0. 0. 0. 1. 1. 1. 0. 0. 1. 1. 0. 0. 0. 1. 1. 1. 0. 0. 1. 1. 0. 0. 0. 1. 1. 1. 0. 0. 1. 1. 0. 0. 0. 1. 1. 1. 0. 0. 1. 1. 0. 0. 0. 1. 1. 1. 0. 0. 1. 1. 0. 0. 0. 1. 1. 1. 0. 0. 1. 1. 0. 0. 0. 1. 1. 1. 0. 0. 1. 1. 0. 0. 0. 1. 1.] 第2次 c=1 完全正确5,完全错误1,模糊4 [0. 0. 0. 1. 1. 0. 0. 0. 1. 1. 0. 0. 0. 1. 1. 0. 0. 0. 1. 1. 0. 0. 0. 1. 1. 0. 0. 0. 1. 1. 0. 0. 0. 1. 1. 0. 0. 0. 1. 1. 0. 0. 0. 1. 1. 0. 0. 0. 1. 1. 0. 0. 0. 1. 1. 0. 0. 0. 1. 1. 0. 0. 0. 1. 1. 0. 0. 0. 1. 1. 0. 0. 0. 1. 1. 0. 0. 0. 1. 1. 0. 0. 0. 1. 1. 0. 0. 0. 1. 1. 0. 0. 0. 1. 1. 0. 0. 0. 1. 1.] ''' # 读取测试数据 test=pd.read_csv('datas/test.csv') tx = test.iloc[:,:] # 建立测试PCA,维度需要根据模型优化情况及测试数据量调整 pcax=PCA(n_components=100) pcax.fit(tx) low_x=pcax.transform(tx) # 获取模型数据 df = pd.read_csv('datas/phone_pos_neg.csv') # y为结果,x为向量值 y = df.iloc[:,1] x = df.iloc[:,2:] #原始数据400维,降维到100 pca = PCA(n_components = 100).fit_transform(x) #调整c值,以期最优 clf = svm.SVC(C = 1, probability = True) #训练 clf.fit(pca,y) #预测结果 result = clf.predict(low_x) print(result)
28.627907
73
0.525589
307
1,231
2.078176
0.201954
0.219436
0.188088
0.250784
0.31348
0.31348
0.31348
0.31348
0.31348
0.31348
0
0.247368
0.22827
1,231
42
74
29.309524
0.424211
0.067425
0
0
0
0
0.084475
0.052511
0
0
0
0
0
1
0
false
0
0.1875
0
0.1875
0.0625
0
0
0
null
1
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
036b10b6b64067dc5dbd1af73b4559f639cd4b58
156
py
Python
LegacyCode/quantmark/hello.py
QuantMarkFramework/LibMark
1fcf2107d97e11c9b91be7e59bfa2f78cf953a0e
[ "MIT" ]
1
2021-03-04T13:00:07.000Z
2021-03-04T13:00:07.000Z
LegacyCode/quantmark/hello.py
QuantMarkFramework/LibMark
1fcf2107d97e11c9b91be7e59bfa2f78cf953a0e
[ "MIT" ]
13
2021-02-25T13:42:33.000Z
2021-05-10T16:22:07.000Z
LegacyCode/quantmark/hello.py
QuantMarkFramework/LibMark
1fcf2107d97e11c9b91be7e59bfa2f78cf953a0e
[ "MIT" ]
1
2021-05-19T10:23:45.000Z
2021-05-19T10:23:45.000Z
def hello() -> str: """ Used to test that the library is installed successfully. Returns ---------- The string 'world'. """ return "world"
15.6
58
0.576923
18
156
5
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.25
156
9
59
17.333333
0.769231
0.615385
0
0
0
0
0.131579
0
0
0
0
0
0
1
0.5
true
0
0
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
1
0
0
5
cef4a2d4c9ca6c0a167e16adafa8debb4d00cf71
43
py
Python
exercises/problem08.py
Dmendoza3/Phyton
e6c563609724b2dadcd767d2bfc291090ac2f58e
[ "MIT" ]
null
null
null
exercises/problem08.py
Dmendoza3/Phyton
e6c563609724b2dadcd767d2bfc291090ac2f58e
[ "MIT" ]
null
null
null
exercises/problem08.py
Dmendoza3/Phyton
e6c563609724b2dadcd767d2bfc291090ac2f58e
[ "MIT" ]
null
null
null
x = [(7,4), (1,2), (5,6)] x.sort() print(x)
14.333333
25
0.418605
11
43
1.636364
0.818182
0
0
0
0
0
0
0
0
0
0
0.162162
0.139535
43
3
26
14.333333
0.324324
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.333333
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
cef60922ee674f53eb97165b2267fcf878cb2fbb
445
py
Python
tg/configurator/__init__.py
sergiobrr/tg2
401d77d82bd9daacb9444150c63bb039bf003436
[ "MIT" ]
812
2015-01-16T22:57:52.000Z
2022-03-27T04:49:40.000Z
tg/configurator/__init__.py
sergiobrr/tg2
401d77d82bd9daacb9444150c63bb039bf003436
[ "MIT" ]
74
2015-02-18T17:55:31.000Z
2021-12-13T10:41:08.000Z
tg/configurator/__init__.py
sergiobrr/tg2
401d77d82bd9daacb9444150c63bb039bf003436
[ "MIT" ]
72
2015-06-10T06:02:45.000Z
2022-03-27T08:37:24.000Z
# -*- coding: utf-8 -*- from .base import Configurator, ConfigurationComponent from .base import (BeforeConfigConfigurationAction, ConfigReadyConfigurationAction, AppReadyConfigurationAction, EnvironmentLoadedConfigurationAction) from .application import ApplicationConfigurator from .minimal import MinimalApplicationConfigurator from .fullstack import FullStackApplicationConfigurator
40.454545
56
0.759551
27
445
12.518519
0.666667
0.047337
0.08284
0
0
0
0
0
0
0
0
0.002793
0.195506
445
10
57
44.5
0.941341
0.047191
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.625
0
0.625
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
306d42f9438e936c993e3b1915a2820e54f026b1
147
py
Python
src/0171.excel-sheet-column-number/excel-sheet-column-number.py
lyphui/Just-Code
e0c3c3ecb67cb805080ff686e88522b2bffe7741
[ "MIT" ]
782
2019-11-19T08:20:49.000Z
2022-03-25T06:59:09.000Z
src/0171.excel-sheet-column-number/excel-sheet-column-number.py
Heitao5200/Just-Code
5bb3ee485a103418e693b7ec8e26dc84f3691c79
[ "MIT" ]
1
2021-03-04T12:21:01.000Z
2021-03-05T01:23:54.000Z
src/0171.excel-sheet-column-number/excel-sheet-column-number.py
Heitao5200/Just-Code
5bb3ee485a103418e693b7ec8e26dc84f3691c79
[ "MIT" ]
155
2019-11-20T08:20:42.000Z
2022-03-19T07:28:09.000Z
from functools import reduce class Solution: def titleToNumber(self, s: str) -> int: return reduce(lambda r, c: 26*r + ord(c)-64, s, 0)
36.75
58
0.653061
24
147
4
0.833333
0
0
0
0
0
0
0
0
0
0
0.043478
0.217687
147
4
58
36.75
0.791304
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.25
0.25
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
0658ed17625be521b56bddbca760b703fea646b2
92
py
Python
django/config/heroku_settings.py
andreyvpng/askme
65139c347a6b80f0a660ca24d6dd864e4531903a
[ "Apache-2.0" ]
2
2018-10-29T09:37:47.000Z
2019-11-28T14:11:12.000Z
django/config/heroku_settings.py
andreyvpng/askme
65139c347a6b80f0a660ca24d6dd864e4531903a
[ "Apache-2.0" ]
null
null
null
django/config/heroku_settings.py
andreyvpng/askme
65139c347a6b80f0a660ca24d6dd864e4531903a
[ "Apache-2.0" ]
2
2018-09-18T14:09:46.000Z
2019-11-28T14:11:14.000Z
from config.common_settings import * import django_heroku django_heroku.settings(locals())
18.4
36
0.836957
12
92
6.166667
0.666667
0.324324
0
0
0
0
0
0
0
0
0
0
0.086957
92
4
37
23
0.880952
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
0681bdb828157994453138f14bd0536476c819c5
45
py
Python
hatch/exceptions.py
kapb14/hatch
e7f7e094571780d6499d41960999134966ae699d
[ "Apache-2.0", "MIT" ]
2,549
2017-09-05T06:44:17.000Z
2022-03-31T23:21:02.000Z
hatch/exceptions.py
anmolsrivastava05/hatch
df2c9d46ee7713a1bc156c361cfd0f78e5935297
[ "Apache-2.0" ]
97
2017-06-07T23:14:12.000Z
2022-03-30T14:22:34.000Z
hatch/exceptions.py
anmolsrivastava05/hatch
df2c9d46ee7713a1bc156c361cfd0f78e5935297
[ "Apache-2.0" ]
140
2017-06-10T14:16:47.000Z
2022-03-23T09:25:01.000Z
class InvalidVirtualEnv(Exception): pass
15
35
0.777778
4
45
8.75
1
0
0
0
0
0
0
0
0
0
0
0
0.155556
45
2
36
22.5
0.921053
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
068d8c1352eab1b4c16f7fb8a9d8d1986ab28c38
179
py
Python
edx_rest_api_client/exceptions.py
regisb/edx-rest-api-client
130b5aa1285cd45118becc5021285fdc03e2d56a
[ "Apache-2.0" ]
14
2016-02-15T03:32:26.000Z
2021-10-14T19:14:25.000Z
edx_rest_api_client/exceptions.py
regisb/edx-rest-api-client
130b5aa1285cd45118becc5021285fdc03e2d56a
[ "Apache-2.0" ]
40
2015-10-20T16:51:13.000Z
2021-08-16T13:27:46.000Z
edx_rest_api_client/exceptions.py
regisb/edx-rest-api-client
130b5aa1285cd45118becc5021285fdc03e2d56a
[ "Apache-2.0" ]
10
2016-01-04T18:51:10.000Z
2021-06-22T12:41:14.000Z
# noinspection PyUnresolvedReferences from requests.exceptions import Timeout # pylint: disable=unused-import from slumber.exceptions import * # pylint: disable=wildcard-import
44.75
72
0.826816
19
179
7.789474
0.631579
0.216216
0
0
0
0
0
0
0
0
0
0
0.106145
179
3
73
59.666667
0.925
0.541899
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
ebef03ee8c03c1392776fa51dc7df3923265b4cb
56
py
Python
slide_07/main.py
lordjack/aula_python_slides
38ad45ac1843fc83c3349addb9d49f7d182a574f
[ "MIT" ]
null
null
null
slide_07/main.py
lordjack/aula_python_slides
38ad45ac1843fc83c3349addb9d49f7d182a574f
[ "MIT" ]
null
null
null
slide_07/main.py
lordjack/aula_python_slides
38ad45ac1843fc83c3349addb9d49f7d182a574f
[ "MIT" ]
null
null
null
if(10 <5): print("Teste IF") print("Teste fora IF")
14
22
0.589286
10
56
3.3
0.6
0.606061
0
0
0
0
0
0
0
0
0
0.066667
0.196429
56
4
22
14
0.666667
0
0
0
0
0
0.375
0
0
0
0
0
0
1
0
true
0
0
0
0
0.666667
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
231cbf170f152963ab547f1ebfa8d3ec30f1a686
38
py
Python
src/signals/StopSignal.py
AutoDash/AutoDash
3924795a04159f80ea3b65b2172747babd15f35f
[ "Apache-2.0" ]
3
2020-02-12T01:24:46.000Z
2020-02-13T00:50:46.000Z
src/signals/StopSignal.py
AutoDash/AutoDash
3924795a04159f80ea3b65b2172747babd15f35f
[ "Apache-2.0" ]
32
2020-02-20T10:20:56.000Z
2022-02-10T01:42:46.000Z
src/signals/StopSignal.py
AutoDash/AutoDash
3924795a04159f80ea3b65b2172747babd15f35f
[ "Apache-2.0" ]
1
2020-02-22T02:47:19.000Z
2020-02-22T02:47:19.000Z
class StopSignal(Exception): pass
12.666667
28
0.736842
4
38
7
1
0
0
0
0
0
0
0
0
0
0
0
0.184211
38
3
29
12.666667
0.903226
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
23353eb4f6571e7f2991c1821c9a9a7e45ed154d
93
py
Python
app/model/__init__.py
tomhaoye/crawler.toutiao
ceadabfec3caf3f88fe01df1e7bf39199256c7be
[ "MIT" ]
2
2019-09-02T05:36:59.000Z
2019-12-04T01:46:20.000Z
app/model/__init__.py
tomhaoye/crawler.toutiao
ceadabfec3caf3f88fe01df1e7bf39199256c7be
[ "MIT" ]
null
null
null
app/model/__init__.py
tomhaoye/crawler.toutiao
ceadabfec3caf3f88fe01df1e7bf39199256c7be
[ "MIT" ]
1
2019-12-04T01:46:23.000Z
2019-12-04T01:46:23.000Z
from util.orm import Base, engine from .topic import Topic Base.metadata.create_all(engine)
18.6
33
0.806452
15
93
4.933333
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.11828
93
4
34
23.25
0.902439
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
23381d382e176d80e1bbf1b3dc2396c4ddba98b0
61
py
Python
pm4pymdl/objects/xoc/exporter/__init__.py
dorian1000/pm4py-mdl
71e0c2425abb183da293a58d31e25e50137c774f
[ "MIT" ]
5
2021-01-31T22:45:29.000Z
2022-02-22T14:26:06.000Z
pm4pymdl/objects/xoc/exporter/__init__.py
Javert899/pm4py-mdl
4cc875999100f3f1ad60b925a20e40cf52337757
[ "MIT" ]
3
2021-07-07T15:32:55.000Z
2021-07-07T16:15:36.000Z
pm4pymdl/objects/xoc/exporter/__init__.py
dorian1000/pm4py-mdl
71e0c2425abb183da293a58d31e25e50137c774f
[ "MIT" ]
9
2020-09-23T15:34:11.000Z
2022-03-17T09:15:40.000Z
from pm4pymdl.objects.xoc.exporter import exporter, versions
30.5
60
0.852459
8
61
6.5
0.875
0
0
0
0
0
0
0
0
0
0
0.017857
0.081967
61
1
61
61
0.910714
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
2338a6f17caf2c4bde70d9155d45f4a5aa304d58
206
py
Python
colorschemes/variables.css.py
Rainbow-Spike/dotfiles
92361f9a54ee77a02f8826f0c2ea125f1f5be18e
[ "MIT" ]
null
null
null
colorschemes/variables.css.py
Rainbow-Spike/dotfiles
92361f9a54ee77a02f8826f0c2ea125f1f5be18e
[ "MIT" ]
null
null
null
colorschemes/variables.css.py
Rainbow-Spike/dotfiles
92361f9a54ee77a02f8826f0c2ea125f1f5be18e
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 import _theme as theme print(":root {") for var_name, color in theme.css_variables.items(): print(" --{}{}: {};".format(theme.css_variables_prefix, var_name, color)) print("}")
20.6
76
0.674757
29
206
4.586207
0.655172
0.105263
0.180451
0
0
0
0
0
0
0
0
0.005525
0.121359
206
9
77
22.888889
0.729282
0.101942
0
0
0
0
0.11413
0
0
0
0
0
0
1
0
true
0
0.2
0
0.2
0.6
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
88c499fae475f8ce4183b9360bb5d02b4b41efc5
198
py
Python
Codewars_Python/evens_and_odds.py
nlantau/Codewars_2020_2021
055fbf8785ddd52b9f8e8c2b59294ead01852467
[ "MIT" ]
null
null
null
Codewars_Python/evens_and_odds.py
nlantau/Codewars_2020_2021
055fbf8785ddd52b9f8e8c2b59294ead01852467
[ "MIT" ]
null
null
null
Codewars_Python/evens_and_odds.py
nlantau/Codewars_2020_2021
055fbf8785ddd52b9f8e8c2b59294ead01852467
[ "MIT" ]
null
null
null
# nlantau, 2020-11-09 def evens_and_odds(n): return f"{n:b}" if n % 2 == 0 else f"{n:x}" print(evens_and_odds(1)) print(evens_and_odds(2)) print(evens_and_odds(3)) print(evens_and_odds(13))
16.5
47
0.681818
41
198
3.04878
0.512195
0.32
0.48
0.544
0
0
0
0
0
0
0
0.087719
0.136364
198
11
48
18
0.643275
0.09596
0
0
0
0
0.056497
0
0
0
0
0
0
1
0.166667
false
0
0
0.166667
0.333333
0.666667
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
1
0
5
88d232fc98c6414f6b9a88638419cee864589600
40
py
Python
1.py
BenjamimBorges/ProgramsPy
878801c108ba264bae547cecba909fe266536649
[ "MIT" ]
1
2020-08-29T02:39:31.000Z
2020-08-29T02:39:31.000Z
1.py
BenjamimBorges/ProgramsPy
878801c108ba264bae547cecba909fe266536649
[ "MIT" ]
null
null
null
1.py
BenjamimBorges/ProgramsPy
878801c108ba264bae547cecba909fe266536649
[ "MIT" ]
null
null
null
a = 5 b = a print(a,b) a = 3 print(a,b)
6.666667
10
0.5
12
40
1.666667
0.416667
0.2
0.7
0
0
0
0
0
0
0
0
0.068966
0.275
40
5
11
8
0.62069
0
0
0.4
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.4
1
1
1
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
00305eb8d8ff8ef2a227aa90c4fbd21930de37b4
58
py
Python
implicitresnet/solvers/__init__.py
vreshniak/ImplicitResNet
62e3c2f047f2572a0d0a0ee7cd3c8dd6e340080e
[ "MIT" ]
2
2021-01-01T00:42:17.000Z
2021-01-01T17:32:01.000Z
implicitresnet/solvers/__init__.py
vreshniak/ImplicitResNet
62e3c2f047f2572a0d0a0ee7cd3c8dd6e340080e
[ "MIT" ]
null
null
null
implicitresnet/solvers/__init__.py
vreshniak/ImplicitResNet
62e3c2f047f2572a0d0a0ee7cd3c8dd6e340080e
[ "MIT" ]
null
null
null
from .linear import linsolve from .nonlinear import nsolve
29
29
0.844828
8
58
6.125
0.75
0
0
0
0
0
0
0
0
0
0
0
0.12069
58
2
29
29
0.960784
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
003cbc28d02b0b48e8594becd3fd461f736c3e64
156
py
Python
PYTHON CODES/InfiniteLoop.py
Pavan1199/PURE-PYTHON-CODES
f0b9823e264e67a498a742eb66ab569cc1861b5e
[ "MIT" ]
2
2019-03-31T14:10:44.000Z
2019-05-03T17:19:00.000Z
PYTHON CODES/InfiniteLoop.py
Pavan1199/PURE-PYTHON-CODES
f0b9823e264e67a498a742eb66ab569cc1861b5e
[ "MIT" ]
null
null
null
PYTHON CODES/InfiniteLoop.py
Pavan1199/PURE-PYTHON-CODES
f0b9823e264e67a498a742eb66ab569cc1861b5e
[ "MIT" ]
null
null
null
#Example of an infinite loop print"Example of infinite loop" n=input("Enter a number: ") i=6969 while(i<>n): print i, while(i==n): print i,
17.333333
32
0.628205
27
156
3.62963
0.518519
0.183673
0.142857
0.244898
0.265306
0
0
0
0
0
0
0.033333
0.230769
156
8
33
19.5
0.783333
0.173077
0
0.285714
0
0
0.333333
0
0
0
0
0
0
0
null
null
0
0
null
null
0.428571
1
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
5
cc8b27c64555b9c805f0e45e5c02ba92033b4034
1,048
py
Python
src/admin_toolbelt/contrib/login_records/models.py
Elemnir/admin_toolbelt
8d03841a3676f6477931a202e95b45dc9e86cfa5
[ "BSD-3-Clause" ]
null
null
null
src/admin_toolbelt/contrib/login_records/models.py
Elemnir/admin_toolbelt
8d03841a3676f6477931a202e95b45dc9e86cfa5
[ "BSD-3-Clause" ]
null
null
null
src/admin_toolbelt/contrib/login_records/models.py
Elemnir/admin_toolbelt
8d03841a3676f6477931a202e95b45dc9e86cfa5
[ "BSD-3-Clause" ]
null
null
null
import random from django.db import models def generate_token(): return ''.join( [ random.choice('abcdefghijfklmnopqrstuvwxyz0123456789') for i in range(24) ] ) class LoginRecordToken(models.Model): created = models.DateTimeField(auto_now_add=True) last_used = models.DateTimeField(null=True, blank=True) expires = models.DateTimeField(null=True, blank=True) name = models.CharField(max_length=32) token = models.CharField(max_length=32, unique=True, default=generate_token) def __str__(self): return '{} - {}'.format(self.name, self.created) class LoginRecord(models.Model): when = models.DateTimeField() host = models.CharField(max_length=64) service = models.CharField(max_length=32) method = models.CharField(max_length=32, blank=True, null=True) user = models.CharField(max_length=32) fromhost = models.CharField(max_length=256) def __str__(self): return '{} - {}'.format(self.user, self.when)
31.757576
86
0.667939
122
1,048
5.57377
0.418033
0.154412
0.185294
0.247059
0.373529
0.182353
0
0
0
0
0
0.032767
0.21374
1,048
32
87
32.75
0.792476
0
0
0.086957
1
0
0.048664
0.035305
0
0
0
0
0
1
0.130435
false
0
0.086957
0.130435
0.913043
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
5
aeaa61ca88722be162d4bb1c6e28d61969335068
132
py
Python
ppe_tools/__init__.py
djk2120/CLM5PPE
0e657ea125f3455fe4084d1b5c0848d1b4bb20d1
[ "MIT" ]
5
2020-04-10T23:04:51.000Z
2022-02-04T14:50:00.000Z
ppe_tools/__init__.py
djk2120/CLM5PPE
0e657ea125f3455fe4084d1b5c0848d1b4bb20d1
[ "MIT" ]
null
null
null
ppe_tools/__init__.py
djk2120/CLM5PPE
0e657ea125f3455fe4084d1b5c0848d1b4bb20d1
[ "MIT" ]
5
2020-04-14T00:28:55.000Z
2021-11-12T22:53:53.000Z
from .member import Member from .paraminfo import ParamInfo from .utils import get_default,parse_val from .ensemble import Ensemble
26.4
40
0.840909
19
132
5.736842
0.526316
0
0
0
0
0
0
0
0
0
0
0
0.121212
132
4
41
33
0.939655
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
aeac0247a171e643124ae32e5d01d4dd0ea42da3
93
py
Python
backend/contact/admin.py
RA-MPR/mpr
a2e6f320af916d318da7c68c0764662c3d146974
[ "MIT" ]
null
null
null
backend/contact/admin.py
RA-MPR/mpr
a2e6f320af916d318da7c68c0764662c3d146974
[ "MIT" ]
91
2021-02-24T08:25:47.000Z
2021-05-05T10:14:21.000Z
backend/contact/admin.py
RA-MPR/mpr
a2e6f320af916d318da7c68c0764662c3d146974
[ "MIT" ]
1
2022-01-07T14:56:34.000Z
2022-01-07T14:56:34.000Z
from django.contrib import admin from . import models admin.site.register(models.Contact)
13.285714
35
0.795699
13
93
5.692308
0.692308
0
0
0
0
0
0
0
0
0
0
0
0.129032
93
6
36
15.5
0.91358
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
aeb2641fa8d2956d9d22de81cd4047f4db4a2005
33
py
Python
kloppy/datasets.py
benoitblanc/kloppy
5c3f94ff8806f9e23f8bad095a948a403a06a54c
[ "BSD-3-Clause" ]
null
null
null
kloppy/datasets.py
benoitblanc/kloppy
5c3f94ff8806f9e23f8bad095a948a403a06a54c
[ "BSD-3-Clause" ]
null
null
null
kloppy/datasets.py
benoitblanc/kloppy
5c3f94ff8806f9e23f8bad095a948a403a06a54c
[ "BSD-3-Clause" ]
null
null
null
from .infra.datasets import load
16.5
32
0.818182
5
33
5.4
1
0
0
0
0
0
0
0
0
0
0
0
0.121212
33
1
33
33
0.931034
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
4e13469e74f074acde540fd09f30db3a27a38c92
53
py
Python
vlab_jumpbox_api/lib/__init__.py
willnx/vlab_jumpbox
a323e1d04039990f198a1f3483b71625365846a7
[ "Apache-2.0" ]
1
2019-04-10T16:17:18.000Z
2019-04-10T16:17:18.000Z
vlab_router_api/lib/__init__.py
willnx/vlab_router
2428042c2c1aded430d91ff2a9d411bf338a610a
[ "Apache-2.0" ]
6
2018-05-23T03:55:51.000Z
2018-09-19T16:50:29.000Z
vlab_router_api/lib/__init__.py
willnx/vlab_router
2428042c2c1aded430d91ff2a9d411bf338a610a
[ "Apache-2.0" ]
1
2018-06-04T16:56:37.000Z
2018-06-04T16:56:37.000Z
# -*- coding: UTF-8 -*- from .constants import const
17.666667
28
0.641509
7
53
4.857143
1
0
0
0
0
0
0
0
0
0
0
0.022727
0.169811
53
2
29
26.5
0.75
0.396226
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
9d6cb6d0a57d540664a114f235a9931a22a665f4
135
py
Python
Statistics/Sample.py
melaniemercado/CalculatorProject
3fec4cb33283b3c078a6050e403d0cb4fff0d6d9
[ "MIT" ]
null
null
null
Statistics/Sample.py
melaniemercado/CalculatorProject
3fec4cb33283b3c078a6050e403d0cb4fff0d6d9
[ "MIT" ]
null
null
null
Statistics/Sample.py
melaniemercado/CalculatorProject
3fec4cb33283b3c078a6050e403d0cb4fff0d6d9
[ "MIT" ]
null
null
null
import random def Getsample(data, sample_size): random_values = random.choices(data, k=sample_size - 1) return random_values
19.285714
59
0.748148
19
135
5.105263
0.631579
0.206186
0
0
0
0
0
0
0
0
0
0.008929
0.17037
135
6
60
22.5
0.857143
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
9d7626410cfa6c276d011cf3dace77debdca8603
469
py
Python
app/forms/login_form.py
pedroferronato/gerenciamento-rural
5ed873caf9fdf1da2a26938b8cee57b55e7636f0
[ "MIT" ]
null
null
null
app/forms/login_form.py
pedroferronato/gerenciamento-rural
5ed873caf9fdf1da2a26938b8cee57b55e7636f0
[ "MIT" ]
null
null
null
app/forms/login_form.py
pedroferronato/gerenciamento-rural
5ed873caf9fdf1da2a26938b8cee57b55e7636f0
[ "MIT" ]
null
null
null
from flask_wtf import FlaskForm from wtforms import StringField, PasswordField from wtforms.validators import DataRequired, Length class LoginForm(FlaskForm): usuario = StringField('Usuário:', validators=[DataRequired(message='Insira seu usuário')]) senha = PasswordField('Senha:', validators=[DataRequired(message='Insira sua senha'), Length(min=3, message='Senha muito curta')])
39.083333
80
0.65032
44
469
6.909091
0.545455
0.072368
0.190789
0.230263
0
0
0
0
0
0
0
0.002865
0.255864
469
11
81
42.636364
0.868195
0
0
0
0
0
0.138593
0
0
0
0
0
0
1
0
false
0.222222
0.333333
0
0.666667
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
5
9dd81d5ce290cb7a20d643dce682f7dacc7a8666
3,765
py
Python
ansatz_list.py
iitis/variational_channel_fidelity
b69f5f412d642dc8a5630c8ddcc5d992bb87675b
[ "Apache-1.1" ]
null
null
null
ansatz_list.py
iitis/variational_channel_fidelity
b69f5f412d642dc8a5630c8ddcc5d992bb87675b
[ "Apache-1.1" ]
null
null
null
ansatz_list.py
iitis/variational_channel_fidelity
b69f5f412d642dc8a5630c8ddcc5d992bb87675b
[ "Apache-1.1" ]
null
null
null
from qiskit import QuantumCircuit # TODO: include description and rationale for using each of the ansatzes def ansatz_4q(n,theta, ansatz_no): circ = QuantumCircuit(n) if ansatz_no == 1: for t in theta: circ.ry(t[0], [0]) circ.rz(t[1], [0]) circ.ry(t[2], [0]) circ.ry(t[3], [1]) circ.rz(t[4], [1]) circ.ry(t[5], [1]) circ.ry(t[6], [2]) circ.rz(t[7], [2]) circ.ry(t[0], [2]) circ.ry(t[1], [3]) circ.rz(t[2], [3]) circ.ry(t[3], [3]) circ.cx([0],[1]) circ.cx([1],[2]) circ.cx([2],[3]) circ.cx([3],[0]) if ansatz_no == 2: for t in theta: circ.rz(t[0], [0]) circ.rz(t[1], [1]) circ.rz(t[2], [2]) circ.rz(t[3], [3]) circ.rx(t[4], [0]) circ.rx(t[5], [1]) circ.rx(t[6], [2]) circ.rx(t[7], [3]) circ.cz([0],[1]) circ.cz([1],[2]) circ.cz([2],[3]) circ.cz([3],[0]) if ansatz_no == 3: for t in theta: circ.ry(t[0], [0]) circ.ry(t[1], [1]) circ.ry(t[2], [2]) circ.ry(t[3], [3]) circ.rz(t[4], [0]) circ.rz(t[5], [1]) circ.rz(t[6], [2]) circ.rz(t[7], [3]) circ.cx([0],[1]) circ.cx([1],[2]) circ.cx([2],[3]) circ.cx([3],[0]) if ansatz_no == 4: for t in theta: circ.ry(t[0], [0]) circ.ry(t[1], [1]) circ.ry(t[2], [2]) circ.ry(t[3], [3]) circ.ry(t[4], [0]) circ.ry(t[5], [1]) circ.ry(t[6], [2]) circ.ry(t[7], [3]) circ.cx([0],[1]) circ.cx([1],[2]) circ.cx([2],[3]) circ.cx([3],[0]) circ.ry(t[0], [0]) circ.ry(t[1], [1]) circ.ry(t[2], [2]) circ.ry(t[3], [3]) circ.ry(t[4], [0]) circ.ry(t[5], [1]) circ.ry(t[6], [2]) circ.ry(t[7], [3]) return circ def ansatz_2q(n, theta, ansatz_no): """" Return one of the the considered 2-qubit anasatzes as a quantum circuit with fixed angels. """ circ = QuantumCircuit(n) if ansatz_no == 1: for t in theta: circ.rz(t[0], [0]) circ.ry(t[1], [0]) circ.rz(t[2], [1]) circ.ry(t[3], [1]) circ.cx([0], [1]) elif ansatz_no == 2: for t in theta: circ.rx(t[0], [0]) circ.rz(t[1], [0]) circ.rx(t[2], [1]) circ.rz(t[3], [1]) circ.cx([0], [1]) elif ansatz_no == 3: for t in theta: circ.rz(t[0], [0]) circ.ry(t[1], [0]) circ.rz(t[2], [1]) circ.ry(t[3], [1]) circ.cz([0], [1]) elif ansatz_no == 4: for t in theta: circ.rx(t[0], [0]) circ.rz(t[1], [0]) circ.rx(t[2], [1]) circ.rz(t[3], [1]) circ.cz([0], [1]) elif ansatz_no == 5: for t in theta: circ.ry(t[0], [0]) circ.rz(t[1], [0]) circ.ry(t[2], [0]) circ.ry(t[3], [1]) circ.rz(t[0], [1]) circ.ry(t[1], [1]) circ.cz([0], [1]) elif ansatz_no == 6: for t in theta: circ.ry(t[0], [0]) circ.rz(t[1], [0]) circ.ry(t[2], [0]) circ.ry(t[3], [1]) circ.rz(t[0], [1]) circ.ry(t[1], [1]) circ.cx([0], [1]) return circ
27.086331
94
0.355644
583
3,765
2.272727
0.089194
0.181132
0.211321
0.084528
0.720755
0.714717
0.708679
0.695849
0.673208
0.673208
0
0.093142
0.426826
3,765
138
95
27.282609
0.520853
0.043559
0
0.752066
0
0
0
0
0
0
0
0.007246
0
1
0.016529
false
0
0.008264
0
0.041322
0
0
0
0
null
0
1
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
d19bbd7b2169e9da3c9ecc44d59485e6b6cfcdc5
186
py
Python
data_models/AdaBoostClassifier.py
alexjmeyer92/ml-kit
225b3c45910c274bfa56e1927215d7aadd503b77
[ "MIT" ]
null
null
null
data_models/AdaBoostClassifier.py
alexjmeyer92/ml-kit
225b3c45910c274bfa56e1927215d7aadd503b77
[ "MIT" ]
null
null
null
data_models/AdaBoostClassifier.py
alexjmeyer92/ml-kit
225b3c45910c274bfa56e1927215d7aadd503b77
[ "MIT" ]
null
null
null
from pydantic import BaseModel from typing import Any, List class AdaBoostClassifierTrainingInput(BaseModel): targets: List[Any] samples: List[List[Any]] project_name: str
20.666667
49
0.763441
22
186
6.409091
0.636364
0.099291
0
0
0
0
0
0
0
0
0
0
0.166667
186
8
50
23.25
0.909677
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5