hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
a3fb2f42b23009d24c3827d423f2e6341a5aabbc
253
py
Python
functions/getNeighborhood.py
pvalls/startup-job-coding-challenge
841e044d15a45f2a8930d854a9589c0d04db3ce6
[ "MIT" ]
null
null
null
functions/getNeighborhood.py
pvalls/startup-job-coding-challenge
841e044d15a45f2a8930d854a9589c0d04db3ce6
[ "MIT" ]
null
null
null
functions/getNeighborhood.py
pvalls/startup-job-coding-challenge
841e044d15a45f2a8930d854a9589c0d04db3ce6
[ "MIT" ]
null
null
null
def getNeighborhood(city, neighborhood_name): neighborhood_names = [neighborhood_dictionary['neighborhood'] for neighborhood_dictionary in city] neighborhood_index = neighborhood_names.index(neighborhood_name) return city[neighborhood_index]
63.25
102
0.83004
26
253
7.769231
0.423077
0.237624
0.207921
0
0
0
0
0
0
0
0
0
0.102767
253
4
103
63.25
0.889868
0
0
0
0
0
0.047244
0
0
0
0
0
0
1
0.25
false
0
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
5
430ea42e692a878aae73b2663cd4d2ba4e96935c
208
py
Python
collections/__init__.py
cwaitt/zse
4330397ddf84dafaa0af7bddd25756e008cb3ff5
[ "MIT" ]
3
2021-07-08T19:38:40.000Z
2022-02-18T10:51:11.000Z
collections/__init__.py
cwaitt/zse
4330397ddf84dafaa0af7bddd25756e008cb3ff5
[ "MIT" ]
null
null
null
collections/__init__.py
cwaitt/zse
4330397ddf84dafaa0af7bddd25756e008cb3ff5
[ "MIT" ]
6
2020-09-29T18:19:54.000Z
2022-03-18T14:44:15.000Z
from zse.collections.framework import framework from zse.collections.framework import get_ring_sizes from zse.collections.framework import get_all_fws __all__ = ['framework','get_ring_sizes','get_all_fws']
29.714286
54
0.836538
30
208
5.4
0.333333
0.12963
0.333333
0.5
0.648148
0.444444
0
0
0
0
0
0
0.081731
208
6
55
34.666667
0.848168
0
0
0
0
0
0.163462
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
4314f9791b2c86036416b87e0e855126c19f16a0
254
py
Python
src/diffjson/searchpath/exceptions.py
nfwprod/diffjson
ad7ea15c42e25f16f610491e95da3d0b2e35d654
[ "MIT" ]
1
2021-02-13T08:24:52.000Z
2021-02-13T08:24:52.000Z
src/diffjson/searchpath/exceptions.py
nfwstg/diffjson
52cb24e5629797b32c23e7971d36d111a6817121
[ "MIT" ]
3
2021-03-13T06:44:27.000Z
2021-04-21T16:45:43.000Z
src/diffjson/searchpath/exceptions.py
nfwstg/diffjson
52cb24e5629797b32c23e7971d36d111a6817121
[ "MIT" ]
4
2021-03-13T06:39:30.000Z
2021-04-24T04:52:36.000Z
class LocationPathFormatError(Exception): pass class LocationStepFormatError(Exception): pass class NodenameFormatError(Exception): pass class PredicateFormatError(Exception): pass class PredicatesFormatError(Exception): pass
13.368421
41
0.771654
20
254
9.8
0.4
0.331633
0.367347
0
0
0
0
0
0
0
0
0
0.169291
254
18
42
14.111111
0.92891
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
431eee2c301e5ca4c70a21998faa6062379687a4
88
py
Python
matchtransactions/__main__.py
phizzl3/MatchTransactions
5e497ca4609c0ffec27fabebaac9d466cf9cd708
[ "MIT" ]
null
null
null
matchtransactions/__main__.py
phizzl3/MatchTransactions
5e497ca4609c0ffec27fabebaac9d466cf9cd708
[ "MIT" ]
null
null
null
matchtransactions/__main__.py
phizzl3/MatchTransactions
5e497ca4609c0ffec27fabebaac9d466cf9cd708
[ "MIT" ]
null
null
null
import match_transactions if __name__ == "__main__": match_transactions.main()
17.6
29
0.727273
9
88
6
0.666667
0.62963
0
0
0
0
0
0
0
0
0
0
0.181818
88
5
30
17.6
0.75
0
0
0
0
0
0.089888
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
43295704da9e9d752f509b2ebf92615ace1fecc1
177
py
Python
elm/data_loader/__init__.py
jinxu06/gsubsampling
2e0cace553cf43835709a34a11f9c15b08c15004
[ "Apache-2.0" ]
12
2021-06-11T12:17:58.000Z
2021-12-16T07:36:47.000Z
elm/data_loader/__init__.py
jinxu06/gsubsampling
2e0cace553cf43835709a34a11f9c15b08c15004
[ "Apache-2.0" ]
null
null
null
elm/data_loader/__init__.py
jinxu06/gsubsampling
2e0cace553cf43835709a34a11f9c15b08c15004
[ "Apache-2.0" ]
1
2022-01-31T19:39:06.000Z
2022-01-31T19:39:06.000Z
from .dsprites import DSpritesDataModule from .multi_dsprites import MultiDSpritesDataModule from .fashion_mnist import FashionMNISTDataModule from .clevr import ClevrDataModule
44.25
51
0.892655
18
177
8.666667
0.611111
0.179487
0
0
0
0
0
0
0
0
0
0
0.084746
177
4
52
44.25
0.962963
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
4a331ce9d3fd7406dfc8c98aef532f1e2a6f8964
92
py
Python
diatonicPy.py
lzqlzzq/diatonicPy
bfe130a39f4f88b07ac0de85788feeb1510c2f6b
[ "MIT" ]
null
null
null
diatonicPy.py
lzqlzzq/diatonicPy
bfe130a39f4f88b07ac0de85788feeb1510c2f6b
[ "MIT" ]
null
null
null
diatonicPy.py
lzqlzzq/diatonicPy
bfe130a39f4f88b07ac0de85788feeb1510c2f6b
[ "MIT" ]
null
null
null
from pitch import GenericPitch, Pitch from interval import AbstractInterval, Interval
18.4
48
0.804348
10
92
7.4
0.6
0
0
0
0
0
0
0
0
0
0
0
0.173913
92
4
49
23
0.973684
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
4a7633bec0f5bf5f501d9bea3970c51164d4aeb6
206
py
Python
facebook_hateful_memes_detector/models/text_models/__init__.py
faizanahemad/facebook-hateful-memes
1f7febf65f5fc4ed4aeb476d5383437f677fbc19
[ "MIT" ]
9
2020-07-28T20:33:04.000Z
2022-01-28T16:51:40.000Z
facebook_hateful_memes_detector/models/text_models/__init__.py
faizanahemad/facebook-hateful-memes
1f7febf65f5fc4ed4aeb476d5383437f677fbc19
[ "MIT" ]
3
2021-06-08T21:36:37.000Z
2021-09-08T02:03:07.000Z
facebook_hateful_memes_detector/models/text_models/__init__.py
faizanahemad/facebook-hateful-memes
1f7febf65f5fc4ed4aeb476d5383437f677fbc19
[ "MIT" ]
1
2020-08-26T08:13:25.000Z
2020-08-26T08:13:25.000Z
from .LangFeatures import LangFeaturesModel from .Albert import AlbertClassifer from .Fasttext1DCNN import Fasttext1DCNNModel from .BERTClassifier import BERTClassifier from .FasttextCNN import FasttextCNN
34.333333
45
0.878641
20
206
9.05
0.5
0
0
0
0
0
0
0
0
0
0
0.010753
0.097087
206
5
46
41.2
0.962366
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
43545c4cb5a4107f77f74f576c0be03f410cfab7
147
py
Python
src/crosswalk/__init__.py
jiaweih/CrossWalk
434664ce72cd8ccdf80ced89c8fb109cc0e4417f
[ "MIT" ]
1
2020-06-25T21:51:20.000Z
2020-06-25T21:51:20.000Z
src/crosswalk/__init__.py
jiaweih/CrossWalk
434664ce72cd8ccdf80ced89c8fb109cc0e4417f
[ "MIT" ]
null
null
null
src/crosswalk/__init__.py
jiaweih/CrossWalk
434664ce72cd8ccdf80ced89c8fb109cc0e4417f
[ "MIT" ]
2
2020-06-16T23:35:05.000Z
2020-06-25T21:51:22.000Z
# -*- coding: utf-8 -*- """ crosswalk ~~~~~~~~~ `crosswalk` package. """ from .data import * from .model import * from . import utils
13.363636
24
0.537415
15
147
5.266667
0.666667
0.253165
0
0
0
0
0
0
0
0
0
0.009009
0.244898
147
10
25
14.7
0.702703
0.435374
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
43598e503ccf14648f8551021f178fe6b6ace793
6,489
py
Python
tests/cli/history/test_history_list.py
EddLabs/eddington-static
cdd1d9514c4eea1bd06c24894b3922e6cc3fb1f5
[ "Apache-2.0" ]
null
null
null
tests/cli/history/test_history_list.py
EddLabs/eddington-static
cdd1d9514c4eea1bd06c24894b3922e6cc3fb1f5
[ "Apache-2.0" ]
null
null
null
tests/cli/history/test_history_list.py
EddLabs/eddington-static
cdd1d9514c4eea1bd06c24894b3922e6cc3fb1f5
[ "Apache-2.0" ]
null
null
null
import datetime from pytest_cases import THIS_MODULE, parametrize_with_cases from statue.cli import statue_cli from tests.util import evaluation_mock def case_empty_history(): additional_flags = [] evaluations = [] output = "No previous evaluations.\n" return additional_flags, evaluations, output def case_one_successful_evaluation(): total_commands = 4 timestamp1 = datetime.datetime( year=2020, month=4, day=15, hour=12, minute=7, second=42 ) additional_flags = [] evaluations = [ evaluation_mock( timestamp=timestamp1, successful_commands=total_commands, total_commands=total_commands, total_execution_duration=0.234, ) ] output = "1) 04/15/2020, 12:07:42 - Success (4/4 successful, 0.23 seconds)\n" return additional_flags, evaluations, output def case_one_failed_evaluation(): additional_flags = [] timestamp1 = datetime.datetime( year=2020, month=5, day=12, hour=14, minute=8, second=23 ) evaluations = [ evaluation_mock( timestamp=timestamp1, successful_commands=3, total_commands=4, total_execution_duration=0.591, ) ] output = "1) 05/12/2020, 14:08:23 - Failure (3/4 successful, 0.59 seconds)\n" return additional_flags, evaluations, output def case_two_successful_evaluations(): total_commands1, total_commands2 = 4, 7 additional_flags = [] timestamp1, timestamp2 = ( datetime.datetime(year=2020, month=4, day=15, hour=12, minute=7, second=42), datetime.datetime(year=2020, month=4, day=14, hour=18, minute=59, second=11), ) evaluations = [ evaluation_mock( timestamp=timestamp1, successful_commands=total_commands1, total_commands=total_commands1, total_execution_duration=0.234, ), evaluation_mock( timestamp=timestamp2, successful_commands=total_commands2, total_commands=total_commands2, total_execution_duration=0.189, ), ] output = ( "1) 04/15/2020, 12:07:42 - Success (4/4 successful, 0.23 seconds)\n" "2) 04/14/2020, 18:59:11 - Success (7/7 successful, 0.19 seconds)\n" ) return additional_flags, evaluations, output def case_one_failed_and_one_successful(): total_commands = 4 additional_flags = [] timestamp1, timestamp2 = ( datetime.datetime(year=2020, month=4, day=15, hour=12, minute=7, second=42), datetime.datetime(year=2020, month=4, day=14, hour=18, minute=59, second=11), ) evaluations = [ evaluation_mock( timestamp=timestamp1, successful_commands=total_commands, total_commands=total_commands, total_execution_duration=0.234, ), evaluation_mock( timestamp=timestamp2, successful_commands=3, total_commands=7, total_execution_duration=0.189, ), ] output = ( "1) 04/15/2020, 12:07:42 - Success (4/4 successful, 0.23 seconds)\n" "2) 04/14/2020, 18:59:11 - Failure (3/7 successful, 0.19 seconds)\n" ) return additional_flags, evaluations, output def case_three_evaluations(): total_commands1, total_commands2 = 4, 10 additional_flags = [] timestamp1, timestamp2, timestamp3 = ( datetime.datetime(year=2020, month=4, day=15, hour=12, minute=7, second=42), datetime.datetime(year=2020, month=4, day=14, hour=18, minute=59, second=11), datetime.datetime(year=2020, month=4, day=14, hour=11, minute=31, second=22), ) evaluations = [ evaluation_mock( timestamp=timestamp1, successful_commands=total_commands1, total_commands=total_commands1, total_execution_duration=0.234, ), evaluation_mock( timestamp=timestamp2, successful_commands=3, total_commands=7, total_execution_duration=0.189, ), evaluation_mock( timestamp=timestamp3, successful_commands=total_commands2, total_commands=total_commands2, total_execution_duration=0.03, ), ] output = ( "1) 04/15/2020, 12:07:42 - Success (4/4 successful, 0.23 seconds)\n" "2) 04/14/2020, 18:59:11 - Failure (3/7 successful, 0.19 seconds)\n" "3) 04/14/2020, 11:31:22 - Success (10/10 successful, 0.03 seconds)\n" ) return additional_flags, evaluations, output def case_head_flag(): total_commands1, total_commands2 = 4, 10 additional_flags = ["--head=2"] timestamp1, timestamp2, timestamp3 = ( datetime.datetime(year=2020, month=4, day=15, hour=12, minute=7, second=42), datetime.datetime(year=2020, month=4, day=14, hour=18, minute=59, second=11), datetime.datetime(year=2020, month=4, day=14, hour=11, minute=31, second=22), ) evaluations = [ evaluation_mock( timestamp=timestamp1, successful_commands=total_commands1, total_commands=total_commands1, total_execution_duration=0.234, ), evaluation_mock( timestamp=timestamp2, successful_commands=3, total_commands=7, total_execution_duration=0.189, ), evaluation_mock( timestamp=timestamp3, successful_commands=total_commands2, total_commands=total_commands2, total_execution_duration=0.03, ), ] output = ( "1) 04/15/2020, 12:07:42 - Success (4/4 successful, 0.23 seconds)\n" "2) 04/14/2020, 18:59:11 - Failure (3/7 successful, 0.19 seconds)\n" ) return additional_flags, evaluations, output @parametrize_with_cases( argnames=["additional_flags", "evaluations", "output"], cases=THIS_MODULE, ) def test_history_list( additional_flags, evaluations, output, cli_runner, mock_build_configuration_from_file, ): configuration = mock_build_configuration_from_file.return_value configuration.cache.all_evaluations = evaluations result = cli_runner.invoke(statue_cli, ["history", "list", *additional_flags]) assert ( result.exit_code == 0 ), f"Execution failed with the following error: '{result.exception}'" assert result.output == output
31.965517
85
0.631838
758
6,489
5.216359
0.135884
0.065756
0.060698
0.072838
0.78958
0.770865
0.745574
0.729894
0.706626
0.667425
0
0.100397
0.263215
6,489
202
86
32.123762
0.726626
0
0
0.666667
0
0.062147
0.133919
0
0
0
0
0
0.011299
1
0.045198
false
0
0.022599
0
0.107345
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
43a427e02616b8a21337aa2eede1057f015aafe0
12,776
py
Python
IOverticalGrid.py
nilodna/model2roms
14a5494303d8539bc7c7b018f8d56336c649bf8c
[ "MIT" ]
null
null
null
IOverticalGrid.py
nilodna/model2roms
14a5494303d8539bc7c7b018f8d56336c649bf8c
[ "MIT" ]
null
null
null
IOverticalGrid.py
nilodna/model2roms
14a5494303d8539bc7c7b018f8d56336c649bf8c
[ "MIT" ]
1
2021-06-29T14:58:23.000Z
2021-06-29T14:58:23.000Z
from datetime import datetime import numpy as np import warnings __author__ = 'Trond Kristiansen' __email__ = 'me@trondkristiansen.com' __created__ = datetime(2008, 8, 15) __modified__ = datetime(2015, 7, 25) __version__ = "1.5" __status__ = "Development" ''' Various vertical coordinates Presently, only ocean s-coordinates are supported. Future plans will be to include all of the vertical coordinate systems defined by the CF conventions. vgrid.py function copied from https://github.com/kshedstrom/pyroms (Frederic Castruccio) ''' def calculateVgrid(self): print(("--->Setting up vertical coordinates using self.vtransform: %s self.vstretching: %s"%(self.vtransform,self.vstretching))) if self.vtransform == 1: vgrid = s_coordinate(self.h, self.theta_b, self.theta_s, self.tcline, self.nlevels, self.vtransform, self.vstretching, zeta=None) elif self.vtransform == 2 and self.vstretching == 2: vgrid = s_coordinate_2(self.h, self.theta_b, self.theta_s, self.tcline, self.nlevels, self.vtransform, self.vstretching, zeta=None) elif self.vtransform == 2 and self.vstretching == 4: vgrid = s_coordinate_4(self.h, self.theta_b, self.theta_s, self.tcline, self.nlevels, self.vtransform, self.vstretching, zeta=None) else: raise Warning('Unknow vertical transformation Vtrans') self.z_r = vgrid.z_r[0,:] self.z_w = vgrid.z_w[0,:] self.Cs_rho = vgrid.Cs_r self.Cs_w = vgrid.Cs_w self.s_rho = vgrid.s_rho self.s_w = vgrid.s_w class s_coordinate(object): """ Song and Haidvogel (1994) vertical coordinate transformation (Vtransform=1) and stretching functions (Vstretching=1). return an object that can be indexed to return depths s = s_coordinate(h, theta_b, theta_s, Tcline, N) """ def __init__(self, h, theta_b, theta_s, tcline, N, vtransform, vstretching, zeta=None): self.h = np.asarray(h) self.hmin = h.min() self.theta_b = theta_b self.theta_s = theta_s self.tcline = tcline self.N = int(N) self.Np = self.N+1 self.vtransform = vtransform self.vstretching = vstretching self.hc = min(self.hmin, self.tcline) self.Vtrans = 1 if self.vtransform==1: if (self.tcline > self.hmin): warnings.warn('Vertical transformation parameters are not defined correctly in either gridid.txt or in the history files: \n Tcline = %d and hmin = %d. \n You need to make sure that Tcline <= hmin when using transformation 1.' %(self.Tcline,self.hmin)) self.c1 = 1.0 self.c2 = 2.0 self.p5 = 0.5 if zeta is None: self.zeta = np.zeros(h.shape) else: self.zeta = zeta self._get_s_rho() self._get_s_w() self._get_Cs_r() self._get_Cs_w() self.z_r = z_r(self.h, self.hc, self.N, self.s_rho, self.Cs_r, self.zeta, self.Vtrans) self.z_w = z_w(self.h, self.hc, self.Np, self.s_w, self.Cs_w, self.zeta, self.Vtrans) def _get_s_rho(self): lev = np.arange(1,self.N+1,1) ds = 1.0 / self.N self.s_rho = -self.c1 + (lev - self.p5) * ds def _get_s_w(self): lev = np.arange(0,self.Np,1) ds = 1.0 / (self.Np-1) self.s_w = -self.c1 + lev * ds def _get_Cs_r(self): if (self.theta_s >= 0): Ptheta = np.sinh(self.theta_s * self.s_rho) / np.sinh(self.theta_s) Rtheta = np.tanh(self.theta_s * (self.s_rho + self.p5)) / \ (self.c2 * np.tanh(self.p5 * self.theta_s)) - self.p5 self.Cs_r = (self.c1 - self.theta_b) * Ptheta + self.theta_b * Rtheta else: self.Cs_r = self.s_rho def _get_Cs_w(self): if (self.theta_s >= 0): Ptheta = np.sinh(self.theta_s * self.s_w) / np.sinh(self.theta_s) Rtheta = np.tanh(self.theta_s * (self.s_w + self.p5)) / \ (self.c2 * np.tanh(self.p5 * self.theta_s)) - self.p5 self.Cs_w = (self.c1 - self.theta_b) * Ptheta + self.theta_b * Rtheta else: self.Cs_w = self.s_w class s_coordinate_2(s_coordinate): """ A. Shchepetkin (2005) UCLA-ROMS vertical coordinate transformation (Vtransform=2) and stretching functions (Vstretching=2). return an object that can be indexed to return depths s = s_coordinate_2(h, theta_b, theta_s, Tcline, N) """ def __init__(self, h, theta_b, theta_s, tcline, N, vtransform, vstretching, zeta=None): self.h = np.asarray(h) self.hmin = h.min() self.theta_b = theta_b self.theta_s = theta_s self.tcline = tcline self.N = int(N) self.Np = self.N+1 self.vtransform = vtransform self.vstretching = vstretching self.hc = self.tcline self.Vtrans = 2 self.Aweight = 1.0 self.Bweight = 1.0 self.c1 = 1.0 self.c2 = 2.0 self.p5 = 0.5 if zeta is None: self.zeta = np.zeros(h.shape) else: self.zeta = zeta self._get_s_rho() self._get_s_w() self._get_Cs_r() self._get_Cs_w() self.z_r = z_r(self.h, self.hc, self.N, self.s_rho, self.Cs_r, self.zeta, self.Vtrans) self.z_w = z_w(self.h, self.hc, self.Np, self.s_w, self.Cs_w, self.zeta, self.Vtrans) def _get_s_rho(self): super(s_coordinate_2, self)._get_s_rho() def _get_s_w(self): super(s_coordinate_2, self)._get_s_w() def _get_Cs_r(self): if (self.theta_s >= 0): Csur = (self.c1 - np.cosh(self.theta_s * self.s_rho)) / \ (np.cosh(self.theta_s) - self.c1) if (self.theta_b >= 0): Cbot = np.sinh(self.theta_b * (self.s_rho + self.c1)) / \ np.sinh(self.theta_b) - self.c1 Cweight = (self.s_rho + self.c1)**self.Aweight * \ (self.c1 + (self.Aweight / self.Bweight) * \ (self.c1 - (self.s_rho + self.c1)**self.Bweight)) self.Cs_r = Cweight * Csur + (self.c1 - Cweight) * Cbot else: self.Cs_r = Csur else: self.Cs_r = self.s_rho def _get_Cs_w(self): if (self.theta_s >= 0): Csur = (self.c1 - np.cosh(self.theta_s * self.s_w)) / \ (np.cosh(self.theta_s) - self.c1) if (self.theta_b >= 0): Cbot = np.sinh(self.theta_b * (self.s_w + self.c1)) / \ np.sinh(self.theta_b) - self.c1 Cweight = (self.s_w + self.c1)**self.Aweight * \ (self.c1 + (self.Aweight / self.Bweight) * \ (self.c1 - (self.s_w + self.c1)**self.Bweight)) self.Cs_w = Cweight * Csur + (self.c1 - Cweight) * Cbot else: self.Cs_w = Csur else: self.Cs_w = self.s_w class s_coordinate_4(s_coordinate): """ A. Shchepetkin (2005) UCLA-ROMS vertical coordinate transformation (Vtransform=2) and stretching functions (Vstretching=4). return an object that can be indexed to return depths s = s_coordinate_4(h, theta_b, theta_s, Tcline, N) """ def __init__(self, h, theta_b, theta_s, tcline, N, vtransform, vstretching, zeta=None): self.h = np.asarray(h) self.hmin = h.min() self.theta_b = theta_b self.theta_s = theta_s self.tcline = tcline self.N = int(N) self.Np = self.N+1 self.vtransform = vtransform self.vstretching = vstretching self.hc = self.tcline self.Vtrans = 4 self.c1 = 1.0 self.c2 = 2.0 self.p5 = 0.5 if zeta is None: self.zeta = np.zeros(h.shape) else: self.zeta = zeta self._get_s_rho() self._get_s_w() self._get_Cs_r() self._get_Cs_w() self.z_r = z_r(self.h, self.hc, self.N, self.s_rho, self.Cs_r, self.zeta, self.Vtrans) self.z_w = z_w(self.h, self.hc, self.Np, self.s_w, self.Cs_w, self.zeta, self.Vtrans) def _get_s_rho(self): super(s_coordinate_4, self)._get_s_rho() def _get_s_w(self): super(s_coordinate_4, self)._get_s_w() def _get_Cs_r(self): if (self.theta_s > 0): Csur = (self.c1 - np.cosh(self.theta_s * self.s_rho)) / \ (np.cosh(self.theta_s) - self.c1) else: Csur = -self.s_rho**2 if (self.theta_b > 0): Cbot = (np.exp(self.theta_b * Csur) - self.c1 ) / \ (self.c1 - np.exp(-self.theta_b)) self.Cs_r = Cbot else: self.Cs_r = Csur def _get_Cs_w(self): if (self.theta_s > 0): Csur = (self.c1 - np.cosh(self.theta_s * self.s_w)) / \ (np.cosh(self.theta_s) - self.c1) else: Csur = -self.s_w**2 if (self.theta_b > 0): Cbot = (np.exp(self.theta_b * Csur) - self.c1 ) / \ ( self.c1 - np.exp(-self.theta_b) ) self.Cs_w = Cbot else: self.Cs_w = Csur class z_r(object): """ return an object that can be indexed to return depths of rho point z_r = z_r(h, hc, N, s_rho, Cs_r, zeta, Vtrans) """ def __init__(self, h, hc, N, s_rho, Cs_r, zeta, Vtrans): self.h = h self.hc = hc self.N = N self.s_rho = s_rho self.Cs_r = Cs_r self.zeta = zeta self.Vtrans = Vtrans def __getitem__(self, key): if isinstance(key, tuple) and len(self.zeta.shape) > len(self.h.shape): zeta = self.zeta[key[0]] res_index = (slice(None),) + key[1:] elif len(self.zeta.shape) > len(self.h.shape): zeta = self.zeta[key] res_index = slice(None) else: zeta = self.zeta res_index = key if self.h.ndim == zeta.ndim: # Assure a time-dimension exists zeta = zeta[np.newaxis, :] ti = zeta.shape[0] z_r = np.empty((ti, self.N) + self.h.shape, 'd') if self.Vtrans == 1: for n in range(ti): for k in range(self.N): z0 = self.hc * self.s_rho[k] + (self.h - self.hc) * self.Cs_r[k] z_r[n,k,:] = z0 + zeta[n,:] * (1.0 + z0 / self.h) elif self.Vtrans == 2 or self.Vtrans == 4: for n in range(ti): for k in range(self.N): z0 = (self.hc * self.s_rho[k] + self.h * self.Cs_r[k]) / \ (self.hc + self.h) z_r[n,k,:] = zeta[n,:] + (zeta[n,:] + self.h) * z0 return np.squeeze(z_r[res_index]) class z_w(object): """ return an object that can be indexed to return depths of w point z_w = z_w(h, hc, Np, s_w, Cs_w, zeta, Vtrans) """ def __init__(self, h, hc, Np, s_w, Cs_w, zeta, Vtrans): self.h = h self.hc = hc self.Np = Np self.s_w = s_w self.Cs_w = Cs_w self.zeta = zeta self.Vtrans = Vtrans def __getitem__(self, key): if isinstance(key, tuple) and len(self.zeta.shape) > len(self.h.shape): zeta = self.zeta[key[0]] res_index = (slice(None),) + key[1:] elif len(self.zeta.shape) > len(self.h.shape): zeta = self.zeta[key] res_index = slice(None) else: zeta = self.zeta res_index = key if self.h.ndim == zeta.ndim: # Assure a time-dimension exists zeta = zeta[np.newaxis, :] ti = zeta.shape[0] z_w = np.empty((ti, self.Np) + self.h.shape, 'd') if self.Vtrans == 1: for n in range(ti): for k in range(self.Np): z0 = self.hc * self.s_w[k] + (self.h - self.hc) * self.Cs_w[k] z_w[n,k,:] = z0 + zeta[n,:] * (1.0 + z0 / self.h) elif self.Vtrans == 2 or self.Vtrans == 4: for n in range(ti): for k in range(self.Np): z0 = (self.hc * self.s_w[k] + self.h * self.Cs_w[k]) / \ (self.hc + self.h) z_w[n,k,:] = zeta[n,:] + (zeta[n,:] + self.h) * z0 return np.squeeze(z_w[res_index]) def get_z_levels(self): """ Get a list of all the variables contained in netCDF file "filename" """ self.z_r=-self.h if len(self.z_r)==0: print(("No depth matrix found in file %s"%(self.selffilename)))
33.270833
268
0.542423
1,911
12,776
3.4427
0.100471
0.068399
0.04256
0.036176
0.77945
0.766378
0.74373
0.737346
0.736738
0.70725
0
0.020698
0.326863
12,776
383
269
33.357702
0.744302
0.081559
0
0.671642
0
0.003731
0.036893
0.002035
0
0
0
0
0
1
0.078358
false
0
0.011194
0
0.115672
0.007463
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
43b896a8470f7ae6f3baa1cfae375d512fd3ed3e
109
py
Python
python/803.bricks-falling-when-hit.py
stavanmehta/leetcode
1224e43ce29430c840e65daae3b343182e24709c
[ "Apache-2.0" ]
null
null
null
python/803.bricks-falling-when-hit.py
stavanmehta/leetcode
1224e43ce29430c840e65daae3b343182e24709c
[ "Apache-2.0" ]
null
null
null
python/803.bricks-falling-when-hit.py
stavanmehta/leetcode
1224e43ce29430c840e65daae3b343182e24709c
[ "Apache-2.0" ]
null
null
null
class Solution: def hitBricks(self, grid: List[List[int]], hits: List[List[int]]) -> List[int]:
27.25
83
0.605505
15
109
4.4
0.6
0.318182
0.333333
0
0
0
0
0
0
0
0
0
0.211009
109
3
84
36.333333
0.767442
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
5
43c26f81b5aae5c41524f022af1322b2973bc903
296
py
Python
featuretools/tests/primitive_tests/primitives_to_install/custom_mean.py
Featuretools/featuretools
365abd9519d2eec8eec75936644a7b865d4ef40a
[ "BSD-3-Clause" ]
4,299
2017-09-09T02:41:29.000Z
2019-10-10T05:41:11.000Z
featuretools/tests/primitive_tests/primitives_to_install/custom_mean.py
Featuretools/featuretools
365abd9519d2eec8eec75936644a7b865d4ef40a
[ "BSD-3-Clause" ]
729
2017-09-22T01:54:48.000Z
2019-10-09T15:36:17.000Z
featuretools/tests/primitive_tests/primitives_to_install/custom_mean.py
Featuretools/featuretools
365abd9519d2eec8eec75936644a7b865d4ef40a
[ "BSD-3-Clause" ]
532
2017-09-13T14:18:22.000Z
2019-10-08T06:13:46.000Z
from woodwork.column_schema import ColumnSchema from featuretools.primitives.base import AggregationPrimitive class CustomMean(AggregationPrimitive): name = "custom_mean" input_types = [ColumnSchema(semantic_tags={"numeric"})] return_type = ColumnSchema(semantic_tags={"numeric"})
29.6
61
0.793919
30
296
7.633333
0.733333
0.174672
0.209607
0.270742
0
0
0
0
0
0
0
0
0.114865
296
9
62
32.888889
0.874046
0
0
0
0
0
0.084459
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
43c409a670270cb0ddec3af078a7d4bdaaa5db3e
4,359
py
Python
utils/mean_dist_calculator.py
hubmapconsortium/vccf-visualization-2022-
77538cbccd50d54554f06880b2b004bfa52cb882
[ "MIT" ]
null
null
null
utils/mean_dist_calculator.py
hubmapconsortium/vccf-visualization-2022-
77538cbccd50d54554f06880b2b004bfa52cb882
[ "MIT" ]
null
null
null
utils/mean_dist_calculator.py
hubmapconsortium/vccf-visualization-2022-
77538cbccd50d54554f06880b2b004bfa52cb882
[ "MIT" ]
1
2022-03-25T14:37:42.000Z
2022-03-25T14:37:42.000Z
import statistics target_root_path = r"G:\GE\skin_12_data" print("Region", end='') for i in range(5): print("\t\tAverage\tMedian\t%", end='') print() cell_types = ['all', 'CD68', 'T-Helper', 'T-Killer', 'T-Reg'] region_list = [11, 3, 8, 9, 1, 12, 5, 4, 2, 10, 7] for region_id in region_list: target_file_path = target_root_path + rf"\region_{region_id}\nuclei.csv" cell_file = open(target_file_path, 'r') cell_file.readline() cell_lines = cell_file.readlines() distances = {'all': []} for line in cell_lines: content = line.split(',') type = content[4] distance = content[5] if type in cell_types: if type not in distances: distances[type] = [] else: distances[type].append(float(distance)) distances['all'].append(float(distance)) # print("Region ", region_id) # for key in distances: # print("\t", key) # print("\t\tAverage Distance: ", statistics.mean(distances[key])) # print("\t\tMedian Distance: ", statistics.median(distances[key])) print(region_id, end='') for key in cell_types: print("\t", key, end='') if key in distances: dis_mean = statistics.mean(distances[key]) dis_median = statistics.median(distances[key]) percentage = len(distances[key]) / len(distances['all']) else: dis_mean = 0 dis_median = 0 percentage = 0 print(f"\t{dis_mean}", end='') print(f"\t{dis_median}", end='') print(f"\t{percentage}", end='') print() # # distances = {'all': []} # for region_id in [1, 3, 6, 8, 9, 11]: # target_file_path = target_root_path + rf"\region_{region_id}\nuclei.csv" # # cell_file = open(target_file_path, 'r') # cell_file.readline() # cell_lines = cell_file.readlines() # # for line in cell_lines: # content = line.split(',') # type = content[4] # distance = content[5] # if type not in distances: # distances[type] = [] # else: # distances[type].append(float(distance)) # distances['all'].append(float(distance)) # # print("Sun", end='') # for key in ['all', 'CD68', 'T-Reg', 'T-Helper']: # print("\t", key, end='') # print(f"\t{statistics.mean(distances[key])}", end='') # print(f"\t{statistics.median(distances[key])}", end='') # print(f"\t{len(distances[key]) / len(distances['all'])}", end='') # print() # # distances = {'all': []} # for region_id in [2, 4, 5, 7, 10, 12]: # target_file_path = target_root_path + rf"\region_{region_id}\nuclei.csv" # # cell_file = open(target_file_path, 'r') # cell_file.readline() # cell_lines = cell_file.readlines() # # for line in cell_lines: # content = line.split(',') # type = content[4] # distance = content[5] # if type not in distances: # distances[type] = [] # else: # distances[type].append(float(distance)) # distances['all'].append(float(distance)) # # print("Non-sun", end='') # for key in ['all', 'CD68', 'T-Reg', 'T-Helper']: # print("\t", key, end='') # print(f"\t{statistics.mean(distances[key])}", end='') # print(f"\t{statistics.median(distances[key])}", end='') # print(f"\t{len(distances[key]) / len(distances['all'])}", end='') # print() # # distances = {'all': []} # for region_id in region_list: # target_file_path = target_root_path + rf"\region_{region_id}\nuclei.csv" # # cell_file = open(target_file_path, 'r') # cell_file.readline() # cell_lines = cell_file.readlines() # # for line in cell_lines: # content = line.split(',') # type = content[4] # distance = content[5] # if type in cell_types: # if type not in distances: # distances[type] = [] # else: # distances[type].append(float(distance)) # distances['all'].append(float(distance)) # # print("All ", end='') # for key in cell_types: # print("\t", key, end='') # print(f"\t{statistics.mean(distances[key])}", end='') # print(f"\t{statistics.median(distances[key])}", end='') # print(f"\t{len(distances[key]) / len(distances['all'])}", end='') # print()
33.022727
78
0.560679
551
4,359
4.297641
0.127042
0.054054
0.035473
0.046453
0.779139
0.779139
0.76647
0.76647
0.752534
0.752534
0
0.015049
0.25304
4,359
131
79
33.274809
0.712224
0.635467
0
0.102564
0
0
0.104388
0.034574
0
0
0
0
0
1
0
false
0
0.025641
0
0.025641
0.230769
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
43ca4cbc5cd3a61e33f0a500db093e24b50f7183
899
py
Python
dspylib/tests/date/test_time_units_from_seconds.py
dsears/dspylib
c6b74fa0c530ce4c29fbdd09bfb1b463722bb24d
[ "Apache-2.0" ]
1
2017-03-24T14:59:17.000Z
2017-03-24T14:59:17.000Z
dspylib/tests/date/test_time_units_from_seconds.py
dsears/dspylib
c6b74fa0c530ce4c29fbdd09bfb1b463722bb24d
[ "Apache-2.0" ]
1
2017-03-24T21:51:18.000Z
2017-03-24T21:51:18.000Z
dspylib/tests/date/test_time_units_from_seconds.py
dsears/dspylib
c6b74fa0c530ce4c29fbdd09bfb1b463722bb24d
[ "Apache-2.0" ]
null
null
null
from dspylib.date import time_units_from_seconds def test_time_units_from_seconds(): assert time_units_from_seconds(0) == "0 seconds" assert time_units_from_seconds(1) == "1 second" assert time_units_from_seconds(89) == "1 minute" assert time_units_from_seconds(90) == "2 minutes" assert time_units_from_seconds(59*60) == "59 minutes" assert time_units_from_seconds(60*60) == "1 hour" assert time_units_from_seconds(23*3600) == "23 hours" assert time_units_from_seconds(24*3600) == "1 day" assert time_units_from_seconds(6*86400) == "6 days" assert time_units_from_seconds(7*86400) == "1 week" assert time_units_from_seconds(30*86400) == "4 weeks" assert time_units_from_seconds(32*86400) == "1 month" assert time_units_from_seconds(350*86400) == "11 months" assert time_units_from_seconds(365*86400) == "1 year" assert time_units_from_seconds(730*86400) == "2 years"
47.315789
58
0.758621
145
899
4.344828
0.296552
0.242857
0.350794
0.539683
0.663492
0.209524
0
0
0
0
0
0.119441
0.124583
899
18
59
49.944444
0.681067
0
0
0
0
0
0.123471
0
0
0
0
0
0.882353
1
0.058824
true
0
0.058824
0
0.117647
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
1
0
0
0
0
0
0
5
78fbe649afa7e387cb094a7564eff3eab0497c39
415
py
Python
ver1_0/openassembly/redis_func.py
fragro/Open-Assembly
e9679ff5e7ae9881fa5781d763288ed2f40b014d
[ "BSD-3-Clause" ]
1
2015-11-05T08:22:19.000Z
2015-11-05T08:22:19.000Z
ver1_0/openassembly/redis_func.py
fragro/Open-Assembly
e9679ff5e7ae9881fa5781d763288ed2f40b014d
[ "BSD-3-Clause" ]
null
null
null
ver1_0/openassembly/redis_func.py
fragro/Open-Assembly
e9679ff5e7ae9881fa5781d763288ed2f40b014d
[ "BSD-3-Clause" ]
1
2018-02-03T18:25:41.000Z
2018-02-03T18:25:41.000Z
import redis import settings def redis_client(): """Get a redis client.""" if settings.BROKER_PASSWORD != '': return redis.Redis(settings.BROKER_HOST, settings.BROKER_PORT, settings.BROKER_DB, password=settings.BROKER_PASSWORD, socket_timeout=0.5) else: return redis.Redis(settings.BROKER_HOST, settings.BROKER_PORT, settings.BROKER_DB, socket_timeout=0.5)
27.666667
87
0.696386
52
415
5.346154
0.365385
0.402878
0.158273
0.172662
0.489209
0.489209
0.489209
0.489209
0.489209
0.489209
0
0.012085
0.20241
415
14
88
29.642857
0.827795
0.045783
0
0.222222
0
0
0
0
0
0
0
0
0
1
0.111111
true
0.222222
0.222222
0
0.555556
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
1
0
0
5
600a4350c1aa8387dd1637ae1d3e2a2e21a74e58
32
py
Python
python/testData/refactoring/move/packageImport/before/src/lib1/mod1.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/refactoring/move/packageImport/before/src/lib1/mod1.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/refactoring/move/packageImport/before/src/lib1/mod1.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
def k(x): return lambda y: x
16
22
0.59375
7
32
2.714286
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.28125
32
2
22
16
0.826087
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
5
603520677d60d0d936a7c349f10c7101dc470bd5
71
py
Python
convert_codemeta/__init__.py
caltechlibrary/convert_codemeta
337e39338bcce4f0f201fe03500061ab14b2ae5c
[ "BSD-3-Clause-No-Nuclear-License-2014", "BSD-3-Clause" ]
2
2019-11-14T15:41:49.000Z
2021-04-06T07:22:53.000Z
convert_codemeta/__init__.py
caltechlibrary/convert_codemeta
337e39338bcce4f0f201fe03500061ab14b2ae5c
[ "BSD-3-Clause-No-Nuclear-License-2014", "BSD-3-Clause" ]
4
2019-11-14T16:37:47.000Z
2019-12-12T00:04:48.000Z
convert_codemeta/__init__.py
caltechlibrary/convert_codemeta
337e39338bcce4f0f201fe03500061ab14b2ae5c
[ "BSD-3-Clause-No-Nuclear-License-2014", "BSD-3-Clause" ]
1
2022-02-07T14:28:55.000Z
2022-02-07T14:28:55.000Z
from .validate import validate_codemeta from .convert import crosswalk
23.666667
39
0.859155
9
71
6.666667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.112676
71
2
40
35.5
0.952381
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
604be99530756d16d175d49db61d6e3717427d1a
45
py
Python
Server/python/src/__init__.py
dtracers/Development-Graph
db95318c9465410264b4e9854f16850cbb4c3cda
[ "Apache-2.0" ]
null
null
null
Server/python/src/__init__.py
dtracers/Development-Graph
db95318c9465410264b4e9854f16850cbb4c3cda
[ "Apache-2.0" ]
1
2015-03-17T19:37:10.000Z
2015-03-17T19:37:10.000Z
Server/python/src/__init__.py
dtracers/Development-Graph
db95318c9465410264b4e9854f16850cbb4c3cda
[ "Apache-2.0" ]
null
null
null
def createNewProject(form): print "blah!"
22.5
27
0.711111
5
45
6.4
1
0
0
0
0
0
0
0
0
0
0
0
0.155556
45
2
28
22.5
0.842105
0
0
0
0
0
0.108696
0
0
0
0
0
0
0
null
null
0
0
null
null
0.5
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
5
6077ce14573fb2ae329040185e36f0a39afdd2f1
19
py
Python
test.py
zuochen33/pointnet2
3d6a1737c28d7cb35f9720c8a65123e164ec04e9
[ "MIT" ]
null
null
null
test.py
zuochen33/pointnet2
3d6a1737c28d7cb35f9720c8a65123e164ec04e9
[ "MIT" ]
null
null
null
test.py
zuochen33/pointnet2
3d6a1737c28d7cb35f9720c8a65123e164ec04e9
[ "MIT" ]
null
null
null
print("aaaaaaaa")
6.333333
17
0.684211
2
19
6.5
1
0
0
0
0
0
0
0
0
0
0
0
0.105263
19
2
18
9.5
0.764706
0
0
0
0
0
0.444444
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
60acc50e2a5d763eed0583651cce83274ac50669
187
py
Python
resource_management/apps/employees/admin.py
lmann4/cis526-final-project
7fe34b258cd4e7742cdccc3f2d90fb331361debe
[ "MIT" ]
null
null
null
resource_management/apps/employees/admin.py
lmann4/cis526-final-project
7fe34b258cd4e7742cdccc3f2d90fb331361debe
[ "MIT" ]
4
2017-04-12T19:06:21.000Z
2017-04-19T19:08:34.000Z
resource_management/apps/employees/admin.py
lmann4/cis526-final-project
7fe34b258cd4e7742cdccc3f2d90fb331361debe
[ "MIT" ]
null
null
null
from django.contrib import admin from apps.employees.models import Employee, Position, Schedule admin.site.register(Employee) admin.site.register(Position) admin.site.register(Schedule)
26.714286
62
0.834225
25
187
6.24
0.52
0.173077
0.326923
0
0
0
0
0
0
0
0
0
0.074866
187
6
63
31.166667
0.901734
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.4
0
0.4
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
880845af2a83c399ea40422a47d509134d04ff38
4,770
py
Python
tests/test_parser.py
douglas/shopidown
eb6cf2543a825b7580484ce82a15c5bfea305286
[ "MIT" ]
1
2016-07-16T21:22:41.000Z
2016-07-16T21:22:41.000Z
tests/test_parser.py
douglas/shopidown
eb6cf2543a825b7580484ce82a15c5bfea305286
[ "MIT" ]
null
null
null
tests/test_parser.py
douglas/shopidown
eb6cf2543a825b7580484ce82a15c5bfea305286
[ "MIT" ]
null
null
null
# coding: utf-8 """ test_shopidown ---------------------------------- Tests for `shopidown` module. """ import textwrap from shopidown.parser import Parser parser = Parser() def test_title(): """ Tests that a title parsers to h1 tag """ assert parser.parse("# Title *italic* **bold**") == "<h1>Title <em>italic</em> <strong>bold</strong></h1>" def test_invalid_em_title(): """ Tests that a title without two 'em' marks parsers to h1 tag """ assert parser.parse("# Title *italic **bold**") == "<h1>Title *italic <strong>bold</strong></h1>" def test_invalid_strong_title(): """ Tests that a title without two 'strong' marks parsers to h1 tag """ assert parser.parse("# Title *italic* **bold") == "<h1>Title <em>italic</em> **bold</h1>" def test_subtitle(): """ Tests that a subtitle parsers to h2 tags """ assert parser.parse("## Subtitle") == "<h2>Subtitle</h2>" def test_pseudo_subtitle(): """ Tests that a pseudo subtitle parsers to a paragraph """ assert parser.parse("Subtitle## ") == "<p>Subtitle## </p>" def test_parse_unordered_list(): """ Tests that we can parse an unordered list """ input_text = textwrap.dedent("""\ - list item 1 - list item 2 """) output_text = textwrap.dedent("""\ <ul> <li>list item 1</li> <li>list item 2</li> </ul> """) assert parser.parse(input_text) == output_text def test_parse_ordered_list(): """ Tests that we can parse an ordered list """ input_text = textwrap.dedent("""\ 1. ordered item 1 2. ordered item 2 """) output_text = textwrap.dedent("""\ <ol> <li>ordered item 1</li> <li>ordered item 2</li> </ol> """) assert parser.parse(input_text) == output_text def test_lists(): """ Test that we can parse ul and ol """ input_text = textwrap.dedent("""\ - list item 1 - list item 2 1. ordered item 1 2. ordered item 2 """) output_text = textwrap.dedent("""\ <ul> <li>list item 1</li> <li>list item 2</li> </ul> <ol> <li>ordered item 1</li> <li>ordered item 2</li> </ol> """) assert parser.parse(input_text) == output_text def test_inverted_lists(): """ Test that we can parse ol and ul """ input_text = textwrap.dedent("""\ 1. ordered item 1 2. ordered item 2 - list item 1 - list item 2 """) output_text = textwrap.dedent("""\ <ol> <li>ordered item 1</li> <li>ordered item 2</li> </ol> <ul> <li>list item 1</li> <li>list item 2</li> </ul> """) assert parser.parse(input_text) == output_text def test_guilhaume_markdown_text(): """ Test that we can parse the markdown text Guillaume wrote """ input_text = textwrap.dedent("""\ # Title *italic* **bold** ## Subtitle Paragraph1 Paragraph2 - list item 1 - list item 2 1. ordered item 1 2. ordered item 2 """) output_text = textwrap.dedent("""\ <h1>Title <em>italic</em> <strong>bold</strong></h1> <h2>Subtitle</h2> <p>Paragraph1</p> <p>Paragraph2</p> <ul> <li>list item 1</li> <li>list item 2</li> </ul> <ol> <li>ordered item 1</li> <li>ordered item 2</li> </ol> """) assert parser.parse(input_text) == output_text def test_improved_guilhaume_markdown_text(): """ Test that we can parse an improved version of the markdown text Guillaume wrote """ input_text = textwrap.dedent("""\ # Title *italic* **bold** ## Subtitle Paragraph1 Paragraph2 - list item 1 - list item 2 1. ordered item 1 2. ordered item 2 ## Subtitle Paragraph1 Paragraph2 # Title *italic* **bold** 1. ordered item 1 2. ordered item 2 3. ordered item 3 4. ordered item 4 - list item 1 - list item 2 - list item 3 - list item 4 """) output_text = textwrap.dedent("""\ <h1>Title <em>italic</em> <strong>bold</strong></h1> <h2>Subtitle</h2> <p>Paragraph1</p> <p>Paragraph2</p> <ul> <li>list item 1</li> <li>list item 2</li> </ul> <ol> <li>ordered item 1</li> <li>ordered item 2</li> </ol> <h2>Subtitle</h2> <p>Paragraph1</p> <p>Paragraph2</p> <h1>Title <em>italic</em> <strong>bold</strong></h1> <ol> <li>ordered item 1</li> <li>ordered item 2</li> <li>ordered item 3</li> <li>ordered item 4</li> </ol> <ul> <li>list item 1</li> <li>list item 2</li> <li>list item 3</li> <li>list item 4</li> </ul> """) assert parser.parse(input_text) == output_text
18.416988
110
0.562055
650
4,770
4.046154
0.103077
0.085171
0.053232
0.041065
0.787452
0.776046
0.751711
0.692395
0.652852
0.603802
0
0.030943
0.275052
4,770
258
111
18.488372
0.729612
0.134591
0
0.823529
0
0
0.635866
0.032379
0
0
0
0
0.071895
1
0.071895
false
0
0.013072
0
0.084967
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
714c270dd59c3cbe48e33a95302f82604e69450d
47
py
Python
pyngeso/exceptions.py
atsangarides/pyngeso
88bf875aa145176b9045e58cfd22eb375aa26d2a
[ "MIT" ]
null
null
null
pyngeso/exceptions.py
atsangarides/pyngeso
88bf875aa145176b9045e58cfd22eb375aa26d2a
[ "MIT" ]
null
null
null
pyngeso/exceptions.py
atsangarides/pyngeso
88bf875aa145176b9045e58cfd22eb375aa26d2a
[ "MIT" ]
null
null
null
class UnsuccessfulRequest(Exception): pass
15.666667
37
0.787234
4
47
9.25
1
0
0
0
0
0
0
0
0
0
0
0
0.148936
47
2
38
23.5
0.925
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
71698500e7071760ff1ceefeaf43403015d5ad1d
109
py
Python
src/xmlsec/rsa_x509_pem/pyasn1/codec/der/decoder.py
fredrikt/pyXMLSecurity
002740e414f470eab7f460fbd4cff24820e054a7
[ "BSD-2-Clause-FreeBSD" ]
null
null
null
src/xmlsec/rsa_x509_pem/pyasn1/codec/der/decoder.py
fredrikt/pyXMLSecurity
002740e414f470eab7f460fbd4cff24820e054a7
[ "BSD-2-Clause-FreeBSD" ]
null
null
null
src/xmlsec/rsa_x509_pem/pyasn1/codec/der/decoder.py
fredrikt/pyXMLSecurity
002740e414f470eab7f460fbd4cff24820e054a7
[ "BSD-2-Clause-FreeBSD" ]
null
null
null
# DER decoder from ...type import univ from ..cer import decoder decode = decoder.Decoder(decoder.codecMap)
18.166667
42
0.761468
15
109
5.533333
0.6
0.337349
0
0
0
0
0
0
0
0
0
0
0.137615
109
5
43
21.8
0.882979
0.100917
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
716e5bc14db1d8ba8e6a472c1cbddddb57db977e
56
py
Python
01-python/outdoors/park.py
JamesKunstle/bootstrap_open_source_workflow
c6e02f76188b1040af7647fec89b54fd0a43ed10
[ "MIT" ]
null
null
null
01-python/outdoors/park.py
JamesKunstle/bootstrap_open_source_workflow
c6e02f76188b1040af7647fec89b54fd0a43ed10
[ "MIT" ]
null
null
null
01-python/outdoors/park.py
JamesKunstle/bootstrap_open_source_workflow
c6e02f76188b1040af7647fec89b54fd0a43ed10
[ "MIT" ]
null
null
null
def draw_park(): print("park not found") return
14
27
0.625
8
56
4.25
0.875
0
0
0
0
0
0
0
0
0
0
0
0.25
56
3
28
18.666667
0.809524
0
0
0
0
0
0.25
0
0
0
0
0
0
1
0.333333
true
0
0
0
0.666667
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
1
0
0
5
717c31812bdf67e93cd92b2fcd5e4c31295f6d7d
6,685
py
Python
method_doc_tests.py
FlameHorizon/vba-docgen
5f7585fccb9706b66ee31081bf593b308682f6ac
[ "MIT" ]
null
null
null
method_doc_tests.py
FlameHorizon/vba-docgen
5f7585fccb9706b66ee31081bf593b308682f6ac
[ "MIT" ]
6
2020-05-21T19:24:33.000Z
2020-05-24T09:32:18.000Z
method_doc_tests.py
FlameHorizon/vba-docgen
5f7585fccb9706b66ee31081bf593b308682f6ac
[ "MIT" ]
1
2021-05-24T22:16:17.000Z
2021-05-24T22:16:17.000Z
import unittest from method_doc import MethodDoc class TestMethodDoc(unittest.TestCase): def test_init(self): expected = '# Foo.Bar () Method\n\n' actual = MethodDoc('Foo', 'Bar ()').build() self.assertEqual(expected, actual) def test_buildReturnsDocWithDescription(self): expected = ('# Foo.Bar () Method\n\n' 'Description of Foo.Bar\n\n') doc = MethodDoc('Foo', 'Bar ()') doc.set_description('Description of Foo.Bar') self.assertEqual(expected, doc.build()) def test_buildReturnsDocWithSignature(self): expected = ('# Foo.Bar () Method\n\n' 'Description of Foo.Bar\n\n' '```vb\n' 'Public Sub Bar ()\n' '```\n\n') doc = MethodDoc('Foo', 'Bar ()') doc.set_description('Description of Foo.Bar') doc.set_signature('Public Sub Bar ()') self.assertEqual(expected, doc.build()) def test_buildReturnsDocWhenOneArgument(self): expected = ('# Foo.Bar (Variant) Method\n\n' 'Description of Foo.Bar\n\n' '```vb\n' 'Public Sub Bar (ByVal Item As Variant)\n' '```\n\n' '### Parameters\n\n' '**Item** `Variant` <br>\n' 'Description of an item.\n\n') doc = MethodDoc('Foo', 'Bar (Variant)') doc.set_description('Description of Foo.Bar') doc.set_signature('Public Sub Bar (ByVal Item As Variant)') doc.add_parameter('Item', 'Variant', 'Description of an item.') self.assertEqual(expected, doc.build()) def test_buildReturnsDocWhenAtLeastOneArgument(self): expected = ('# Foo.Bar (Variant, Variant) Method\n\n' 'Description of Foo.Bar\n\n' '```vb\n' 'Public Sub Bar (ByVal Item As Variant, ByVal Item2 As Variant)\n' '```\n\n' '### Parameters\n\n' '**Item** `Variant` <br>\n' 'Description of an item.\n\n' '**Item2** `Variant` <br>\n' 'Description of an item2.\n\n') doc = MethodDoc('Foo', 'Bar (Variant, Variant)') doc.set_description('Description of Foo.Bar') doc.set_signature( 'Public Sub Bar (ByVal Item As Variant, ByVal Item2 As Variant)') doc.add_parameter('Item', 'Variant', 'Description of an item.') doc.add_parameter('Item2', 'Variant', 'Description of an item2.') self.assertEqual(expected, doc.build()) def test_buildReturnsDocWhenMethodIsFunctionZeroArgs(self): expected = ('# Foo.Bar () Method\n\n' 'Description of Foo.Bar\n\n' '```vb\n' 'Public Function Bar () As String\n' '```\n\n' '### Returns\n\n' '`String` <br>\n' 'Description of return value\n\n') doc = MethodDoc('Foo', 'Bar ()') doc.set_description('Description of Foo.Bar') doc.set_signature('Public Function Bar () As String') doc.add_returns('String', 'Description of return value') self.assertEqual(expected, doc.build()) def test_buildReturnsDocWhenMethodIsFunctionAtLeastOneArg(self): expected = ('# Foo.Bar (Variant) Method\n\n' 'Description of Foo.Bar\n\n' '```vb\n' 'Public Function Bar (ByVal Item As Variant) As String\n' '```\n\n' '### Parameters\n\n' '**Item** `Variant` <br>\n' 'Description of an item.\n\n' '### Returns\n\n' '`String` <br>\n' 'Description of return value\n\n') doc = MethodDoc('Foo', 'Bar (Variant)') doc.set_description('Description of Foo.Bar') doc.set_signature( 'Public Function Bar (ByVal Item As Variant) As String') doc.add_parameter('Item', 'Variant', 'Description of an item.') doc.add_returns('String', 'Description of return value') self.assertEqual(expected, doc.build()) def test_buildReturnsDocWhenErrorDefined(self): expected = ('# Foo.Bar () Method\n\n' 'Description of Foo.Bar\n\n' '```vb\n' 'Public Sub Bar ()\n' '```\n\n' '### Errors\n\n' '`OnInvalidArgumentError` <br>\n' 'Description of error\n\n') doc = MethodDoc('Foo', 'Bar ()') doc.set_description('Description of Foo.Bar') doc.set_signature('Public Sub Bar ()') doc.add_error('OnInvalidArgumentError', 'Description of error') self.assertEqual(expected, doc.build()) def test_buildReturnsDocWhenMultipleErrorsDefined(self): expected = ('# Foo.Bar () Method\n\n' 'Description of Foo.Bar\n\n' '```vb\n' 'Public Sub Bar ()\n' '```\n\n' '### Errors\n\n' '`OnInvalidArgumentError` <br>\n' 'Description of error\n\n' '`OnArgumentOutOfRangeError` <br>\n' 'Description of error\n\n') doc = MethodDoc('Foo', 'Bar ()') doc.set_description('Description of Foo.Bar') doc.set_signature('Public Sub Bar ()') doc.add_error('OnInvalidArgumentError', 'Description of error') doc.add_error('OnArgumentOutOfRangeError', 'Description of error') self.assertEqual(expected, doc.build()) def test_buildReturnsDocWithExample(self): expected = ('# Foo.Bar () Method\n\n' 'Description of Foo.Bar\n\n' '```vb\n' 'Public Sub Bar ()\n' '```\n\n' '## Examples\n\n' 'This is an example.\n\n') doc = MethodDoc('Foo', 'Bar ()') doc.set_description('Description of Foo.Bar') doc.set_signature('Public Sub Bar ()') doc.set_example('This is an example.') self.assertEqual(expected, doc.build()) def test_buildReturnDocWithRemarks(self): expected = ('# Foo.Bar () Method\n\n' '### Remarks\n\n' 'This is an example.') doc = MethodDoc('Foo', 'Bar ()') doc.set_example('This is an example.') if __name__ == "__main__": unittest.main()
38.866279
86
0.515034
703
6,685
4.829303
0.091038
0.032401
0.074227
0.100736
0.812666
0.800295
0.778498
0.721649
0.682769
0.667452
0
0.00137
0.344951
6,685
171
87
39.093567
0.773921
0
0
0.7
0
0
0.375617
0.021541
0
0
0
0
0.071429
1
0.078571
false
0
0.014286
0
0.1
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
71b2e07600d324a8b11930a652b5ddfd5fb02568
42
py
Python
tests/__init__.py
laserkelvin/astrochem_ml
1385e481525681943e50467af58f317401747acd
[ "MIT" ]
null
null
null
tests/__init__.py
laserkelvin/astrochem_ml
1385e481525681943e50467af58f317401747acd
[ "MIT" ]
2
2021-11-18T01:33:22.000Z
2021-11-18T14:04:43.000Z
tests/__init__.py
laserkelvin/astrochem_ml
1385e481525681943e50467af58f317401747acd
[ "MIT" ]
null
null
null
"""Unit test package for astrochem_ml."""
21
41
0.714286
6
42
4.833333
1
0
0
0
0
0
0
0
0
0
0
0
0.119048
42
1
42
42
0.783784
0.833333
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
e0cdc4da770c4b3b10597b039cd4b01901b26148
87
py
Python
vdgo_backend/bypass/apps.py
Aruta1ru/web-vdgo
1c26649300b6cbb770117a7a9daccd949da8a265
[ "MIT" ]
null
null
null
vdgo_backend/bypass/apps.py
Aruta1ru/web-vdgo
1c26649300b6cbb770117a7a9daccd949da8a265
[ "MIT" ]
null
null
null
vdgo_backend/bypass/apps.py
Aruta1ru/web-vdgo
1c26649300b6cbb770117a7a9daccd949da8a265
[ "MIT" ]
null
null
null
from django.apps import AppConfig class BypassConfig(AppConfig): name = 'bypass'
14.5
33
0.747126
10
87
6.5
0.9
0
0
0
0
0
0
0
0
0
0
0
0.172414
87
5
34
17.4
0.902778
0
0
0
0
0
0.068966
0
0
0
0
0
0
1
0
false
0.666667
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
5
46049aca38e2c37ca3f41ab029222c28791007a4
2,571
py
Python
project_checker/tests/reporttest.py
zuzannnaobajtek/github-cmake-project-checker
1406c2247bbbecb490bc5000c7fa521b9bf96ec0
[ "MIT" ]
1
2017-05-17T21:21:54.000Z
2017-05-17T21:21:54.000Z
project_checker/tests/reporttest.py
zuzannnaobajtek/github-cmake-project-checker
1406c2247bbbecb490bc5000c7fa521b9bf96ec0
[ "MIT" ]
13
2018-03-28T15:36:17.000Z
2018-04-25T16:44:00.000Z
project_checker/tests/reporttest.py
zuzannnaobajtek/github-cmake-project-checker
1406c2247bbbecb490bc5000c7fa521b9bf96ec0
[ "MIT" ]
15
2017-05-31T11:44:20.000Z
2018-04-19T15:03:35.000Z
from unittest import TestCase from unittest.mock import MagicMock from project_checker.checker.filesystem import Report class ReportTest(TestCase): def test_result_ranking_of_two_labs(self): report = self.report_labs(['lab1_ex1=0', 'lab1_ex2=2']) self.assertEqual('ok;0', report.to_result_ranking(['lab1_ex1', 'lab1_ex2'], sep=';')) def test_result_ranking_of_partially_intersecting_labs(self): report = self.report_labs(['lab1_ex1=0', 'lab1_ex2=2', 'lab2_ex1=1', 'lab2_ex2=0']) self.assertEqual('0;ok', report.to_result_ranking(['lab1_ex2', 'lab2_ex2'], sep=';')) def test_result_ranking_of_partially_intersecting_labs_more_results(self): report = self.report_labs(['lab1_ex1=0', 'lab1_ex2=2', 'lab2_ex1=1', 'lab2_ex2=0']) self.assertEqual('ok;0;ok', report.to_result_ranking(['lab1_ex1', 'lab1_ex2', 'lab2_ex2'], sep=';')) def test_result_ranking_of_four_labs_absent_from_report(self): report = self.report_labs(['lab1_ex1=0', 'lab1_ex2=2']) self.assertEqual('ok;0;0;0', report.to_result_ranking(['lab1_ex1', 'lab1_ex2','lab2_ex1', 'lab2_ex2'], sep=';')) def test_merging_of_two_reports(self): report1 = self.report_labs(['lab1_ex1=0', 'lab1_ex2=2', 'lab2_ex1=1', 'lab2_ex2=0']) report2 = self.report_labs(['lab1_ex2=0', 'lab3_ex1=0']) report1.merge(report2) self.assertEquals('ok;ok;0;ok;ok', report1.to_result_ranking(['lab1_ex1', 'lab1_ex2', 'lab2_ex1', 'lab2_ex2', 'lab3_ex1'], sep=';')) def test_merging_of_two_reports_other_way_round(self): report1 = self.report_labs(['lab1_ex1=0', 'lab1_ex2=2', 'lab2_ex1=1', 'lab2_ex2=0']) report2 = self.report_labs(['lab1_ex2=0', 'lab3_ex1=0']) report2.merge(report1) self.assertEquals('ok;ok;0;ok;ok', report2.to_result_ranking(['lab1_ex1', 'lab1_ex2', 'lab2_ex1', 'lab2_ex2', 'lab3_ex1'], sep=';')) def test_merging_of_two_reports_other_way_round(self): report1 = self.report_labs(['lab1_ex1=0', 'lab1_ex2=2', 'lab2_ex1=1', 'lab2_ex2=0']) report2 = self.report_labs(['lab1_ex2=0', 'lab3_ex1=0']) report2.merge(report1) self.assertEquals('ok;ok;0;ok;ok', report2.to_result_ranking(['lab1_ex1', 'lab1_ex2', 'lab2_ex1', 'lab2_ex2', 'lab3_ex1'], sep=';')) def report_labs(self, labs): file = MagicMock(__iter__=lambda *args: iter(labs)) directory = MagicMock(open=lambda *args: file) report = Report(directory, 'report') report.load() return report
50.411765
120
0.668222
377
2,571
4.209549
0.145889
0.074984
0.088217
0.113422
0.7908
0.776938
0.776938
0.725898
0.725898
0.68368
0
0.077102
0.162583
2,571
50
121
51.42
0.660009
0
0
0.475
0
0
0.22676
0
0
0
0
0
0.175
1
0.2
false
0
0.075
0
0.325
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
1cb08cc15ab7490121ca8547a5030855047eeca8
342
py
Python
main/signals.py
curenamo/ssmreleva
6e50b8819713c81f42a638dfbe9aa37bd943400f
[ "BSD-2-Clause" ]
123
2015-01-08T09:21:05.000Z
2021-11-14T19:45:23.000Z
main/signals.py
cybernetics/formhub
578fc2c5e9febe8dc68b37f7d2e85a76dc2c4c04
[ "BSD-2-Clause" ]
16
2015-02-13T16:56:42.000Z
2021-02-20T23:58:43.000Z
main/signals.py
cybernetics/formhub
578fc2c5e9febe8dc68b37f7d2e85a76dc2c4c04
[ "BSD-2-Clause" ]
110
2015-01-19T14:34:06.000Z
2021-02-01T14:55:11.000Z
from django.contrib.auth.models import User from django.db.models.signals import post_save def set_api_permissions(sender, instance=None, created=False, **kwargs): from utils.user_auth import set_api_permissions_for_user if created: set_api_permissions_for_user(instance) post_save.connect(set_api_permissions, sender=User)
34.2
72
0.809942
51
342
5.137255
0.490196
0.091603
0.259542
0.175573
0.183206
0
0
0
0
0
0
0
0.116959
342
9
73
38
0.86755
0
0
0
0
0
0
0
0
0
0
0
0
1
0.142857
false
0
0.428571
0
0.571429
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
1cbf414bdc027e1c2e3953bdf0a939f63a77cfee
64,845
py
Python
tests/toot_samples.py
muellermartin/moa
8c794b2cb854c1b25570fbb3082e83a9a088c773
[ "MIT" ]
238
2017-11-20T16:51:43.000Z
2021-03-10T23:35:13.000Z
tests/toot_samples.py
muellermartin/moa
8c794b2cb854c1b25570fbb3082e83a9a088c773
[ "MIT" ]
149
2017-11-08T06:44:44.000Z
2021-02-01T20:21:23.000Z
tests/toot_samples.py
muellermartin/moa
8c794b2cb854c1b25570fbb3082e83a9a088c773
[ "MIT" ]
14
2017-11-21T13:54:01.000Z
2021-03-10T17:01:53.000Z
import datetime from datetime import timezone from dateutil.tz import tzutc boost = {'_pagination_prev': {'_pagination_endpoint': '/api/v1/accounts/6717/statuses', '_pagination_method': 'GET', 'since_id': 98972270904179914}, 'account': {'acct': 'moatest', 'avatar': 'https://pdx.social/avatars/original/missing.png', 'avatar_static': 'https://pdx.social/avatars/original/missing.png', 'created_at': datetime.datetime(2017, 10, 30, 20, 34, 29, 216000, tzinfo=tzutc()), 'display_name': '', 'followers_count': 0, 'following_count': 2, 'header': 'https://pdx.social/headers/original/missing.png', 'header_static': 'https://pdx.social/headers/original/missing.png', 'id': '6717', 'locked': True, 'note': '<p></p>', 'statuses_count': 46, 'url': 'https://pdx.social/@moatest', 'username': 'moatest'}, 'application': None, 'content': '<p>RT <span class="h-card"><a href="https://pdx.social/@foozmeat" ' 'class="u-url mention">@<span>foozmeat</span></a></span> Redis was ' 'a real a-hole today. I&apos;m sad that we rely on it for job ' 'queues.</p>', 'created_at': datetime.datetime.now(timezone.utc), 'emojis': [], 'favourited': False, 'favourites_count': 0, 'id': '98972270904179914', 'in_reply_to_account_id': None, 'in_reply_to_id': None, 'language': None, 'media_attachments': [], 'mentions': [], 'muted': False, 'reblog': {'account': {'acct': 'foozmeat', 'avatar': 'https://pdx.social/system/accounts/avatars/000/000/002/original/24f16486ab450a5d.jpg', 'avatar_static': 'https://pdx.social/system/accounts/avatars/000/000/002/original/24f16486ab450a5d.jpg', 'created_at': datetime.datetime(2017, 4, 7, 1, 22, 15, 768000, tzinfo=tzutc()), 'display_name': 'James ✅🚴🐝🍔🕹', 'followers_count': 76, 'following_count': 143, 'header': 'https://pdx.social/system/accounts/headers/000/000/002/original/02d1dd7071e52079.png', 'header_static': 'https://pdx.social/system/accounts/headers/000/000/002/original/02d1dd7071e52079.png', 'id': '2', 'locked': False, 'note': '<p>DevOps at Panic. Admin of pdx.social</p>', 'statuses_count': 232, 'url': 'https://pdx.social/@foozmeat', 'username': 'foozmeat'}, 'application': {'name': 'Web', 'website': None}, 'content': '<p>Redis was a real a-hole today. I&apos;m sad that we ' 'rely on it for job queues.</p>', 'created_at': datetime.datetime(2017, 11, 8, 0, 29, 53, 970000, tzinfo=tzutc()), 'emojis': [], 'favourited': False, 'favourites_count': 1, 'id': '98965978733093918', 'in_reply_to_account_id': None, 'in_reply_to_id': None, 'language': 'en', 'media_attachments': [], 'mentions': [], 'muted': False, 'reblog': None, 'reblogged': True, 'reblogs_count': 1, 'sensitive': False, 'spoiler_text': '', 'tags': [], 'uri': 'https://pdx.social/users/foozmeat/statuses/98965978733093918', 'url': 'https://pdx.social/@foozmeat/98965978733093918', 'visibility': 'public'}, 'reblogged': False, 'reblogs_count': 0, 'sensitive': False, 'spoiler_text': '', 'tags': [], 'uri': 'https://pdx.social/users/moatest/statuses/98972270904179914/activity', 'url': 'https://pdx.social/@moatest/98972270904179914', 'visibility': 'public'} boost_w_attachments = {'_pagination_prev': {'_pagination_endpoint': '/api/v1/accounts/6717/statuses', '_pagination_method': 'GET', 'since_id': 98972419237345529}, 'account': {'acct': 'moatest', 'avatar': 'https://pdx.social/avatars/original/missing.png', 'avatar_static': 'https://pdx.social/avatars/original/missing.png', 'created_at': datetime.datetime(2017, 10, 30, 20, 34, 29, 216000, tzinfo=tzutc()), 'display_name': '', 'followers_count': 0, 'following_count': 2, 'header': 'https://pdx.social/headers/original/missing.png', 'header_static': 'https://pdx.social/headers/original/missing.png', 'id': '6717', 'locked': True, 'note': '<p></p>', 'statuses_count': 47, 'url': 'https://pdx.social/@moatest', 'username': 'moatest'}, 'application': None, 'content': '<p>RT <span class="h-card"><a href="https://pdx.social/@foozmeat" ' 'class="u-url mention">@<span>foozmeat</span></a></span> Finally ' 'able to eat the dog food <a ' 'href="https://pdx.social/media/72JR2KtOOkFy8S8a7Rw" rel="nofollow ' 'noopener" target="_blank"><span ' 'class="invisible">https://</span><span ' 'class="ellipsis">pdx.social/media/72JR2KtOOkFy8</span><span ' 'class="invisible">S8a7Rw</span></a></p>', 'created_at': datetime.datetime.now(timezone.utc), 'emojis': [], 'favourited': False, 'favourites_count': 0, 'id': '98972419237345529', 'in_reply_to_account_id': None, 'in_reply_to_id': None, 'language': None, 'media_attachments': [], 'mentions': [], 'muted': False, 'reblog': {'account': {'acct': 'foozmeat', 'avatar': 'https://pdx.social/system/accounts/avatars/000/000/002/original/24f16486ab450a5d.jpg', 'avatar_static': 'https://pdx.social/system/accounts/avatars/000/000/002/original/24f16486ab450a5d.jpg', 'created_at': datetime.datetime(2017, 4, 7, 1, 22, 15, 768000, tzinfo=tzutc()), 'display_name': 'James ✅🚴🐝🍔🕹', 'followers_count': 76, 'following_count': 143, 'header': 'https://pdx.social/system/accounts/headers/000/000/002/original/02d1dd7071e52079.png', 'header_static': 'https://pdx.social/system/accounts/headers/000/000/002/original/02d1dd7071e52079.png', 'id': '2', 'locked': False, 'note': '<p>DevOps at Panic. Admin of pdx.social</p>', 'statuses_count': 232, 'url': 'https://pdx.social/@foozmeat', 'username': 'foozmeat'}, 'application': {'name': 'Web', 'website': None}, 'content': '<p>Finally able to eat the dog food <a ' 'href="https://pdx.social/media/72JR2KtOOkFy8S8a7Rw" ' 'rel="nofollow noopener" target="_blank"><span ' 'class="invisible">https://</span><span ' 'class="ellipsis">pdx.social/media/72JR2KtOOkFy8</span><span ' 'class="invisible">S8a7Rw</span></a></p>', 'created_at': datetime.datetime(2017, 11, 5, 4, 37, 55, 975000, tzinfo=tzutc()), 'emojis': [], 'favourited': False, 'favourites_count': 0, 'id': '98949967114668601', 'in_reply_to_account_id': None, 'in_reply_to_id': None, 'language': 'en', 'media_attachments': [{'description': 'screenshots', 'id': '22860', 'meta': {'original': {'aspect': 1.124780316344464, 'height': 1138, 'size': '1280x1138', 'width': 1280}, 'small': {'aspect': 1.1235955056179776, 'height': 356, 'size': '400x356', 'width': 400}}, 'preview_url': 'https://pdx.social/system/media_attachments/files/000/022/860/small/aa01670afb664f6f.png', 'remote_url': None, 'text_url': 'https://pdx.social/media/72JR2KtOOkFy8S8a7Rw', 'type': 'image', 'url': 'https://pdx.social/system/media_attachments/files/000/022/860/original/aa01670afb664f6f.png'}], 'mentions': [], 'muted': False, 'reblog': None, 'reblogged': True, 'reblogs_count': 1, 'sensitive': False, 'spoiler_text': '', 'tags': [], 'uri': 'https://pdx.social/users/foozmeat/statuses/98949967114668601', 'url': 'https://pdx.social/@foozmeat/98949967114668601', 'visibility': 'public'}, 'reblogged': False, 'reblogs_count': 0, 'sensitive': False, 'spoiler_text': '', 'tags': [], 'uri': 'https://pdx.social/users/moatest/statuses/98972419237345529/activity', 'url': 'https://pdx.social/@moatest/98972419237345529', 'visibility': 'public'} reply1 = {'account': {'acct': 'moatest', 'avatar': 'https://pdx.social/avatars/original/missing.png', 'avatar_static': 'https://pdx.social/avatars/original/missing.png', 'created_at': datetime.datetime(2017, 10, 30, 20, 34, 29, 216000, tzinfo=tzutc()), 'display_name': '', 'followers_count': 0, 'following_count': 2, 'header': 'https://pdx.social/headers/original/missing.png', 'header_static': 'https://pdx.social/headers/original/missing.png', 'id': '6717', 'locked': True, 'note': '<p></p>', 'statuses_count': 49, 'url': 'https://pdx.social/@moatest', 'username': 'moatest'}, 'application': {'name': 'Web', 'website': None}, 'content': '<p>Reply test 1</p>', 'created_at': datetime.datetime.now(timezone.utc), 'emojis': [], 'favourited': False, 'favourites_count': 0, 'id': '98972724443462491', 'in_reply_to_account_id': None, 'in_reply_to_id': None, 'language': 'en', 'media_attachments': [], 'mentions': [], 'muted': False, 'reblog': None, 'reblogged': False, 'reblogs_count': 0, 'sensitive': False, 'spoiler_text': '', 'tags': [], 'uri': 'https://pdx.social/users/moatest/statuses/98972724443462491', 'url': 'https://pdx.social/@moatest/98972724443462491', 'visibility': 'private'} reply2 = {'_pagination_prev': {'_pagination_endpoint': '/api/v1/accounts/6717/statuses', '_pagination_method': 'GET', 'since_id': 98972725118239719}, 'account': {'acct': 'moatest', 'avatar': 'https://pdx.social/avatars/original/missing.png', 'avatar_static': 'https://pdx.social/avatars/original/missing.png', 'created_at': datetime.datetime(2017, 10, 30, 20, 34, 29, 216000, tzinfo=tzutc()), 'display_name': '', 'followers_count': 0, 'following_count': 2, 'header': 'https://pdx.social/headers/original/missing.png', 'header_static': 'https://pdx.social/headers/original/missing.png', 'id': '6717', 'locked': True, 'note': '<p></p>', 'statuses_count': 49, 'url': 'https://pdx.social/@moatest', 'username': 'moatest'}, 'application': {'name': 'Web', 'website': None}, 'content': '<p>Reply test 2</p>', 'created_at': datetime.datetime.now(timezone.utc), 'emojis': [], 'favourited': False, 'favourites_count': 0, 'id': '98972725118239719', 'in_reply_to_account_id': '6717', 'in_reply_to_id': '98972724443462491', 'language': 'en', 'media_attachments': [], 'mentions': [], 'muted': False, 'reblog': None, 'reblogged': False, 'reblogs_count': 0, 'sensitive': False, 'spoiler_text': '', 'tags': [], 'uri': 'https://pdx.social/users/moatest/statuses/98972725118239719', 'url': 'https://pdx.social/@moatest/98972725118239719', 'visibility': 'private'} twitter_mention = {'_pagination_prev': {'_pagination_endpoint': '/api/v1/accounts/6717/statuses', '_pagination_method': 'GET', 'since_id': 98972792561915550}, 'account': {'acct': 'moatest', 'avatar': 'https://pdx.social/avatars/original/missing.png', 'avatar_static': 'https://pdx.social/avatars/original/missing.png', 'created_at': datetime.datetime(2017, 10, 30, 20, 34, 29, 216000, tzinfo=tzutc()), 'display_name': '', 'followers_count': 0, 'following_count': 2, 'header': 'https://pdx.social/headers/original/missing.png', 'header_static': 'https://pdx.social/headers/original/missing.png', 'id': '6717', 'locked': True, 'note': '<p></p>', 'statuses_count': 51, 'url': 'https://pdx.social/@moatest', 'username': 'moatest'}, 'application': {'name': 'Web', 'website': None}, 'content': '<p>mentioning @foozmeat@twitter.com here</p>', 'created_at': datetime.datetime.now(timezone.utc), 'emojis': [], 'favourited': False, 'favourites_count': 0, 'id': '98972792561915550', 'in_reply_to_account_id': None, 'in_reply_to_id': None, 'language': 'en', 'media_attachments': [], 'mentions': [], 'muted': False, 'pinned': False, 'reblog': None, 'reblogged': False, 'reblogs_count': 0, 'sensitive': False, 'spoiler_text': '', 'tags': [], 'uri': 'https://pdx.social/users/moatest/statuses/98972792561915550', 'url': 'https://pdx.social/@moatest/98972792561915550', 'visibility': 'public'} image_with_description = {'_pagination_prev': {'_pagination_endpoint': '/api/v1/accounts/6717/statuses', '_pagination_method': 'GET', 'since_id': 98972910281369162}, 'account': {'acct': 'moatest', 'avatar': 'https://pdx.social/avatars/original/missing.png', 'avatar_static': 'https://pdx.social/avatars/original/missing.png', 'created_at': datetime.datetime(2017, 10, 30, 20, 34, 29, 216000, tzinfo=tzutc()), 'display_name': '', 'followers_count': 0, 'following_count': 2, 'header': 'https://pdx.social/headers/original/missing.png', 'header_static': 'https://pdx.social/headers/original/missing.png', 'id': '6717', 'locked': True, 'note': '<p></p>', 'statuses_count': 54, 'url': 'https://pdx.social/@moatest', 'username': 'moatest'}, 'application': {'name': 'Web', 'website': None}, 'content': '<p>image description test</p>', 'created_at': datetime.datetime.now(timezone.utc), 'emojis': [], 'favourited': False, 'favourites_count': 0, 'id': '98972910281369162', 'in_reply_to_account_id': None, 'in_reply_to_id': None, 'language': 'fr', 'media_attachments': [{'description': 'gitlab logo', 'id': '24511', 'meta': {'original': {'aspect': 1.0, 'height': 500, 'size': '500x500', 'width': 500}, 'small': {'aspect': 1.0, 'height': 400, 'size': '400x400', 'width': 400}}, 'preview_url': 'https://pdx.social/system/media_attachments/files/000/024/511/small/c22299c464a7d72f.png', 'remote_url': None, 'text_url': 'https://pdx.social/media/kVwpv8trzfEgOg8iOPI', 'type': 'image', 'url': 'https://pdx.social/system/media_attachments/files/000/024/511/original/c22299c464a7d72f.png'}], 'mentions': [], 'muted': False, 'reblog': None, 'reblogged': False, 'reblogs_count': 0, 'sensitive': False, 'spoiler_text': '', 'tags': [], 'uri': 'https://pdx.social/users/moatest/statuses/98972910281369162', 'url': 'https://pdx.social/@moatest/98972910281369162', 'visibility': 'private'} toot_with_mention = {'_pagination_prev': {'_pagination_endpoint': '/api/v1/accounts/6717/statuses', '_pagination_method': 'GET', 'since_id': 98976347364553358}, 'account': {'acct': 'moatest', 'avatar': 'https://pdx.social/avatars/original/missing.png', 'avatar_static': 'https://pdx.social/avatars/original/missing.png', 'created_at': datetime.datetime(2017, 10, 30, 20, 34, 29, 216000, tzinfo=tzutc()), 'display_name': '', 'followers_count': 0, 'following_count': 2, 'header': 'https://pdx.social/headers/original/missing.png', 'header_static': 'https://pdx.social/headers/original/missing.png', 'id': '6717', 'locked': True, 'note': '<p></p>', 'statuses_count': 58, 'url': 'https://pdx.social/@moatest', 'username': 'moatest'}, 'application': {'name': 'Web', 'website': None}, 'content': '<p>mentioning <span class="h-card"><a ' 'href="https://pdx.social/@foozmeat" class="u-url ' 'mention">@<span>foozmeat</span></a></span> here</p>', 'created_at': datetime.datetime.now(timezone.utc), 'emojis': [], 'favourited': False, 'favourites_count': 0, 'id': '98976347364553358', 'in_reply_to_account_id': None, 'in_reply_to_id': None, 'language': 'en', 'media_attachments': [], 'mentions': [{'acct': 'foozmeat', 'id': '2', 'url': 'https://pdx.social/@foozmeat', 'username': 'foozmeat'}], 'muted': False, 'reblog': None, 'reblogged': False, 'reblogs_count': 0, 'sensitive': False, 'spoiler_text': '', 'tags': [], 'uri': 'https://pdx.social/users/moatest/statuses/98976347364553358', 'url': 'https://pdx.social/@moatest/98976347364553358', 'visibility': 'private'} toot_double_mention = {'_pagination_prev': {'_pagination_endpoint': '/api/v1/accounts/6717/statuses', '_pagination_method': 'GET', 'since_id': 99078721594621415}, 'account': {'acct': 'moatest', 'avatar': 'https://pdx.social/avatars/original/missing.png', 'avatar_static': 'https://pdx.social/avatars/original/missing.png', 'created_at': datetime.datetime(2017, 10, 30, 20, 34, 29, 216000, tzinfo=tzutc()), 'display_name': 'Moa test account', 'followers_count': 0, 'following_count': 1, 'header': 'https://pdx.social/headers/original/missing.png', 'header_static': 'https://pdx.social/headers/original/missing.png', 'id': 6717, 'locked': True, 'note': '<p>Lot&apos;s of random garbage posts. You should ignore ' 'it.</p>', 'statuses_count': 99, 'url': 'https://pdx.social/@moatest', 'username': 'moatest'}, 'application': {'name': 'Web', 'website': None}, 'content': '<p>test 1 <span class="h-card"><a ' 'href="https://pdx.social/@moa_party" class="u-url ' 'mention">@<span>moa_party</span></a></span><br />test 2 ' '@moa_party@twitter.com</p>', 'created_at': datetime.datetime.now(timezone.utc), 'emojis': [], 'favourited': False, 'favourites_count': 0, 'id': 99078721594621415, 'in_reply_to_account_id': None, 'in_reply_to_id': None, 'language': 'en', 'media_attachments': [], 'mentions': [{'acct': 'moa_party', 'id': 8598, 'url': 'https://pdx.social/@moa_party', 'username': 'moa_party'}], 'muted': False, 'reblog': None, 'reblogged': False, 'reblogs_count': 0, 'sensitive': False, 'spoiler_text': '', 'tags': [], 'uri': 'https://pdx.social/users/moatest/statuses/99078721594621415', 'url': 'https://pdx.social/@moatest/99078721594621415', 'visibility': 'private'} toot_with_cw = {'_pagination_prev': {'_pagination_endpoint': '/api/v1/accounts/6717/statuses', '_pagination_method': 'GET', 'since_id': 99079553123383380}, 'account': {'acct': 'moatest', 'avatar': 'https://pdx.social/avatars/original/missing.png', 'avatar_static': 'https://pdx.social/avatars/original/missing.png', 'created_at': datetime.datetime(2017, 10, 30, 20, 34, 29, 216000, tzinfo=tzutc()), 'display_name': 'Moa test account', 'followers_count': 0, 'following_count': 1, 'header': 'https://pdx.social/headers/original/missing.png', 'header_static': 'https://pdx.social/headers/original/missing.png', 'id': 6717, 'locked': True, 'note': '<p>Lot&apos;s of random garbage posts. You should ignore ' 'it.</p>', 'statuses_count': 104, 'url': 'https://pdx.social/@moatest', 'username': 'moatest'}, 'application': {'name': 'Web', 'website': None}, 'content': '<p>This is the secret stuff</p>', 'created_at': datetime.datetime.now(timezone.utc), 'emojis': [], 'favourited': False, 'favourites_count': 0, 'id': 99079553123383380, 'in_reply_to_account_id': None, 'in_reply_to_id': None, 'language': 'en', 'media_attachments': [], 'mentions': [], 'muted': False, 'reblog': None, 'reblogged': False, 'reblogs_count': 0, 'sensitive': True, 'spoiler_text': 'This is the spoiler text', 'tags': [], 'uri': 'https://pdx.social/users/moatest/statuses/99079553123383380', 'url': 'https://pdx.social/@moatest/99079553123383380', 'visibility': 'private'} toot_with_many_urls = {'_pagination_prev': {'_pagination_endpoint': '/api/v1/accounts/12083/statuses', '_pagination_method': 'GET', 'since_id': 99113667863200595}, 'account': {'acct': 'kacealexander', 'avatar': 'https://wandering.shop/system/accounts/avatars/000/012/083/original/7b7daab1234cede81647d4aeb039a7e3.png', 'avatar_static': 'https://wandering.shop/system/accounts/avatars/000/012/083/original/7b7daab1234cede81647d4aeb039a7e3.png', 'created_at': datetime.datetime(2017, 11, 13, 20, 43, 54, 343000, tzinfo=tzutc()), 'display_name': 'K. C. Alexander', 'followers_count': 130, 'following_count': 54, 'header': 'https://wandering.shop/system/accounts/headers/000/012/083/original/50bdde4d0e78a313d903e81d750dc330.png', 'header_static': 'https://wandering.shop/system/accounts/headers/000/012/083/original/50bdde4d0e78a313d903e81d750dc330.png', 'id': 12083, 'locked': False, 'note': '<p>Author. Transhumanism and profanity. She/they/just ' 'this guy, you know? Never knows where the towels are at. ' '</p><p>www.kcalexander.com</p>', 'statuses_count': 392, 'url': 'https://wandering.shop/@kacealexander', 'username': 'kacealexander'}, 'application': None, 'content': '<p>Goddamn, I love mastodon, y&apos;all. Please come join it. ' 'wandering.shop is where a lot of your fave authors are gathering, ' 'and mastodon.social is a good place, too. But don&apos;t let ' 'those two stop you. There are many instances (like scifi.fyi or ' 'witches.town or... joinmastodon.com)</p><p>I&apos;m really ' 'content here.</p>', 'created_at': datetime.datetime.now(timezone.utc), 'emojis': [], 'favourited': False, 'favourites_count': 1, 'id': 99113667863200595, 'in_reply_to_account_id': None, 'in_reply_to_id': None, 'language': 'en', 'media_attachments': [], 'mentions': [], 'muted': False, 'pinned': False, 'reblog': None, 'reblogged': False, 'reblogs_count': 0, 'sensitive': False, 'spoiler_text': '', 'tags': [], 'uri': 'https://wandering.shop/users/kacealexander/statuses/99113667863200595', 'url': 'https://wandering.shop/@kacealexander/99113667863200595', 'visibility': 'public'} toot_with_bogus_url = {'_pagination_prev': {'_pagination_endpoint': '/api/v1/accounts/3681/statuses', '_pagination_method': 'GET', 'since_id': 99292682141195094}, 'account': {'acct': 'baldur', 'avatar': 'https://toot.cafe/system/accounts/avatars/000/003/681/original/530e123abb09c828.jpg', 'avatar_static': 'https://toot.cafe/system/accounts/avatars/000/003/681/original/530e123abb09c828.jpg', 'created_at': datetime.datetime(2017, 4, 8, 16, 21, 37, 969000, tzinfo=tzutc()), 'display_name': 'Baldur Bjarnason', 'followers_count': 379, 'following_count': 157, 'header': 'https://toot.cafe/headers/original/missing.png', 'header_static': 'https://toot.cafe/headers/original/missing.png', 'id': 3681, 'locked': False, 'note': '<p>Tech Lead at Rebus Foundation. Lapsed Interactive ' 'Media Academic. Webby Tech Stuff and webby book ' 'stuff.</p>', 'statuses_count': 3016, 'url': 'https://toot.cafe/@baldur', 'username': 'baldur'}, 'application': {'name': 'Web', 'website': None}, 'content': '<p>The OS updates for Meltdown don&apos;t protect userland ' 'exploits. So the browsers are all updating mitigations like ' 'reducing the resolution of performance.now() and disabling ' 'SharedArrayBuffers.</p><p>None of these efforts actually prevent ' 'attacks.</p>', 'created_at': datetime.datetime.now(timezone.utc), 'emojis': [], 'favourited': False, 'favourites_count': 0, 'id': 99292682141195094, 'in_reply_to_account_id': None, 'in_reply_to_id': None, 'language': 'en', 'media_attachments': [], 'mentions': [], 'muted': False, 'pinned': False, 'reblog': None, 'reblogged': False, 'reblogs_count': 0, 'sensitive': True, 'spoiler_text': 'Meltdown/Spectre for web devs', 'tags': [], 'uri': 'https://toot.cafe/users/baldur/statuses/99292682141195094', 'url': 'https://toot.cafe/@baldur/99292682141195094', 'visibility': 'public'} toot_incorrectly_truncated = {'account': {'acct': 'phildini', 'avatar': 'https://wandering.shop/system/accounts/avatars/000/000/001/original/e1068cb5c2779157.jpg', 'avatar_static': 'https://wandering.shop/system/accounts/avatars/000/000/001/original/e1068cb5c2779157.jpg', 'created_at': datetime.datetime(2017, 4, 14, 18, 41, 53, 450000, tzinfo=tzutc()), 'display_name': 'Phildini @ Home ✅', 'followers_count': 1023, 'following_count': 366, 'header': 'https://wandering.shop/system/accounts/headers/000/000/001/original/f3c768c5a3728380.jpg', 'header_static': 'https://wandering.shop/system/accounts/headers/000/000/001/original/f3c768c5a3728380.jpg', 'id': 1, 'locked': False, 'note': '<p>Wandering.shop admin. Writer of fiction, code, and ' 'campaigns.</p>', 'statuses_count': 3684, 'url': 'https://wandering.shop/@phildini', 'username': 'phildini'}, 'application': {'name': 'Amaroq', 'website': 'https://appsto.re/us/OfFxib.i'}, 'content': '<p>Has anyone written a story where the Amish play a crucial role ' 'in future society because they deliberately choose which ' 'technology they let in to their communities, and can therefore be ' 'safe “wake-up zones” for those cryogenically frozen or brought ' 'from the past in some way?</p>', 'created_at': datetime.datetime.now(timezone.utc), 'emojis': [], 'favourited': False, 'favourites_count': 2, 'id': 99434181894510181, 'in_reply_to_account_id': None, 'in_reply_to_id': None, 'language': 'en', 'media_attachments': [], 'mentions': [], 'muted': False, 'pinned': False, 'reblog': None, 'reblogged': False, 'reblogs_count': 2, 'sensitive': False, 'spoiler_text': '', 'tags': [], 'uri': 'https://wandering.shop/users/phildini/statuses/99434181894510181', 'url': 'https://wandering.shop/@phildini/99434181894510181', 'visibility': 'public'} sanitize_test = {'account': {'acct': 'moatest', 'avatar': 'https://pdx.social/avatars/original/missing.png', 'avatar_static': 'https://pdx.social/avatars/original/missing.png', 'bot': False, 'created_at': datetime.datetime(2017, 10, 30, 20, 34, 29, 216000, tzinfo=tzutc()), 'display_name': 'Moa test account', 'emojis': [], 'fields': [], 'followers_count': 0, 'following_count': 1, 'header': 'https://pdx.social/headers/original/missing.png', 'header_static': 'https://pdx.social/headers/original/missing.png', 'id': 6717, 'locked': True, 'note': '<p>Lot&apos;s of random garbage posts. You should ignore ' 'it.</p>', 'statuses_count': 270, 'url': 'https://pdx.social/@moatest', 'username': 'moatest'}, 'application': {'name': 'Web', 'website': None}, 'content': '<p>Sanitize test:</p><p><span class="h-card"><a ' 'href="https://pdx.social/@moatest" class="u-url ' 'mention">@<span>moatest</span></a></span><br ' '/>@xcxcxcxc@twitter.com<br />xcxcxcxc@twitter.com<br ' '/>@xcxcxcxc</p><p>@xcxcxcxc@twitter.com.<br ' '/>xcxcxcxc@twitter.com.<br />@xcxcxcxc.</p>', 'created_at': datetime.datetime.now(timezone.utc), 'emojis': [], 'favourited': False, 'favourites_count': 0, 'id': 100499052882653488, 'in_reply_to_account_id': None, 'in_reply_to_id': None, 'language': 'en', 'media_attachments': [], 'mentions': [{'acct': 'moatest', 'id': 6717, 'url': 'https://pdx.social/@moatest', 'username': 'moatest'}], 'muted': False, 'reblog': None, 'reblogged': False, 'reblogs_count': 0, 'sensitive': False, 'spoiler_text': '', 'tags': [], 'uri': 'https://pdx.social/users/moatest/statuses/100499052882653488', 'url': 'https://pdx.social/@moatest/100499052882653488', 'visibility': 'private'} long_toot = {'account': {'acct': 'moatest', 'avatar': 'https://pdx.social/avatars/original/missing.png', 'avatar_static': 'https://pdx.social/avatars/original/missing.png', 'bot': False, 'created_at': datetime.datetime(2017, 10, 30, 20, 34, 29, 216000, tzinfo=tzutc()), 'display_name': 'Moa test account', 'emojis': [], 'fields': [], 'followers_count': 0, 'following_count': 1, 'header': 'https://pdx.social/headers/original/missing.png', 'header_static': 'https://pdx.social/headers/original/missing.png', 'id': 6717, 'locked': True, 'note': '<p>Lot&apos;s of random garbage posts. You should ignore ' 'it.</p>', 'statuses_count': 272, 'url': 'https://pdx.social/@moatest', 'username': 'moatest'}, 'application': {'name': 'Whalebird', 'website': 'https://whalebird.org'}, 'content': '<p>It is a long established fact that a reader will be distracted ' 'by the readable content of a page when looking at its layout. The ' 'point of using Lorem Ipsum is that it has a more-or-less normal ' 'distribution of letters, as opposed to using &apos;Content here, ' 'content here&apos;, making it look like readable English. Many ' 'desktop publishing packages and web page editors now use Lorem ' 'Ipsum as their default model text, and a search for &apos;lorem ' 'ipsum&apos; will uncover many web sites still in their infancy. ' 'Various</p>', 'created_at': datetime.datetime(2018, 8, 6, 0, 39, 52, 945000, tzinfo=tzutc()), 'emojis': [], 'favourited': False, 'favourites_count': 0, 'id': 100500504096121723, 'in_reply_to_account_id': None, 'in_reply_to_id': None, 'language': 'en', 'media_attachments': [], 'mentions': [], 'muted': False, 'pinned': False, 'reblog': None, 'reblogged': False, 'reblogs_count': 0, 'sensitive': False, 'spoiler_text': '', 'tags': [], 'uri': 'https://pdx.social/users/moatest/statuses/100500504096121723', 'url': 'https://pdx.social/@moatest/100500504096121723', 'visibility': 'public'} long_toot_with_link = {'id': 104106673572051382, 'created_at': datetime.datetime(2020, 5, 3, 21, 36, 12, 271000, tzinfo=tzutc()), 'in_reply_to_id': None, 'in_reply_to_account_id': None, 'sensitive': False, 'spoiler_text': '', 'visibility': 'private', 'language': 'en', 'uri': 'https://pdx.social/users/moatest/statuses/104106673572051382', 'url': 'https://pdx.social/@moatest/104106673572051382', 'replies_count': 0, 'reblogs_count': 0, 'favourites_count': 0, 'favourited': False, 'reblogged': False, 'muted': False, 'bookmarked': False, 'content': '<p>Managed to get Telus IPv6 working on my UniFi Dream Machine-powered home network without their crappy router in the mix!</p><p>I followed this bit about setting dhcpv6 to prefix-only: <a href="https://chadohman.ca/telus-optik-iptv-on-ubiquiti-unifi/#DHCPv6_IGMP_Proxy" rel="nofollow noopener noreferrer" target="_blank"><span class="invisible">https://</span><span class="ellipsis">chadohman.ca/telus-optik-iptv-</span><span class="invisible">on-ubiquiti-unifi/#DHCPv6_IGMP_Proxy</span></a>, but on the Dream Machine the config is at /mnt/data/unifi/data/sites</p><p>There is more to it (all in that article) if you use their TV streaming service, but we don’t, so just this bit works!</p><p>And on the other end I just had to make sure to turn on IPv6 delegation on the LAN side under the hidden</p>', 'reblog': None, 'application': {'name': 'Web', 'website': None}, 'account': {'id': 6717, 'username': 'moatest', 'acct': 'moatest', 'display_name': 'Moa test account', 'locked': True, 'bot': False, 'discoverable': None, 'group': False, 'created_at': datetime.datetime(2017, 10, 30, 20, 34, 29, 216000, tzinfo=tzutc()), 'note': '<p>Lot&apos;s of random garbage posts. You should ignore it.</p>', 'url': 'https://pdx.social/@moatest', 'avatar': 'https://pdx.social/avatars/original/missing.png', 'avatar_static': 'https://pdx.social/avatars/original/missing.png', 'header': 'https://pdx.social/headers/original/missing.png', 'header_static': 'https://pdx.social/headers/original/missing.png', 'followers_count': 0, 'following_count': 0, 'statuses_count': 377, 'last_status_at': datetime.datetime(2020, 5, 3, 0, 0), 'emojis': [], 'fields': []}, 'media_attachments': [], 'mentions': [], 'tags': [], 'emojis': [], 'card': {'url': 'https://chadohman.ca/telus-optik-iptv-on-ubiquiti-unifi/#DHCPv6_IGMP_Proxy', 'title': 'TELUS Optik IPTV on Ubiquiti Unifi – Chad Ohman', 'description': '', 'type': 'link', 'author_name': 'Chad Ohman', 'author_url': 'https://chadohman.ca/author/chad_ohman/', 'provider_name': 'Chad Ohman', 'provider_url': 'https://chadohman.ca', 'html': '', 'width': 0, 'height': 0, 'image': None, 'embed_url': ''}, 'poll': None} long_toot_with_two_links = {'id': 104106946023808139, 'created_at': datetime.datetime(2020, 5, 3, 22, 45, 29, 424000, tzinfo=tzutc()), 'in_reply_to_id': None, 'in_reply_to_account_id': None, 'sensitive': False, 'spoiler_text': '', 'visibility': 'private', 'language': 'en', 'uri': 'https://pdx.social/users/moatest/statuses/104106946023808139', 'url': 'https://pdx.social/@moatest/104106946023808139', 'replies_count': 0, 'reblogs_count': 0, 'favourites_count': 0, 'favourited': False, 'reblogged': False, 'muted': False, 'bookmarked': False, 'content': '<p>I feel like I must be the only one but I still deeply, sincerely miss the early iOS 6 era Podcasts app with its simulated reel-to-reel UI</p><p>(images from <a href="https://nicemohawk.com/blog/2013/03/making-screens-look-like-objects/" rel="nofollow noopener noreferrer" target="_blank"><span class="invisible">https://</span><span class="ellipsis">nicemohawk.com/blog/2013/03/ma</span><span class="invisible">king-screens-look-like-objects/</span></a> and <a href="https://www.niemanlab.org/2012/06/apple-tune-into-podcast-stations-on-an-iphone-radio-dial/" rel="nofollow noopener noreferrer" target="_blank"><span class="invisible">https://www.</span><span class="ellipsis">niemanlab.org/2012/06/apple-tu</span><span class="invisible">ne-into-podcast-stations-on-an-iphone-radio-dial/</span></a>)</p>', 'reblog': None, 'application': {'name': 'Web', 'website': None}, 'account': {'id': 6717, 'username': 'moatest', 'acct': 'moatest', 'display_name': 'Moa test account', 'locked': True, 'bot': False, 'discoverable': None, 'group': False, 'created_at': datetime.datetime(2017, 10, 30, 20, 34, 29, 216000, tzinfo=tzutc()), 'note': '<p>Lot&apos;s of random garbage posts. You should ignore it.</p>', 'url': 'https://pdx.social/@moatest', 'avatar': 'https://pdx.social/avatars/original/missing.png', 'avatar_static': 'https://pdx.social/avatars/original/missing.png', 'header': 'https://pdx.social/headers/original/missing.png', 'header_static': 'https://pdx.social/headers/original/missing.png', 'followers_count': 0, 'following_count': 0, 'statuses_count': 377, 'last_status_at': datetime.datetime(2020, 5, 3, 0, 0), 'emojis': [], 'fields': []}, 'media_attachments': [], 'mentions': [], 'tags': [], 'emojis': [], 'card': {'url': 'https://nicemohawk.com/blog/2013/03/making-screens-look-like-objects/', 'title': 'Making Screens Look Like Objects - The Fine Edge', 'description': '\n\tMaking Screens Look Like Objects\n\tBob\nThere are lots of apps on iOS and Mac that try to look like real-world objects, and many of them are terrib...', 'type': 'link', 'author_name': '', 'author_url': '', 'provider_name': '', 'provider_url': '', 'html': '', 'width': 0, 'height': 0, 'image': None, 'embed_url': ''}, 'poll': None}
86.230053
2,773
0.348508
4,232
64,845
5.228497
0.140359
0.050843
0.076558
0.029963
0.757265
0.723776
0.713653
0.691599
0.687079
0.674967
0
0.084124
0.542987
64,845
751
2,774
86.344874
0.662122
0
0
0.678474
0
0.038147
0.341399
0.029578
0
0
0
0
0
1
0
false
0
0.004087
0
0.004087
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
1cf206f165c00b0c5c4633e0fa62e26add60371b
86
py
Python
instance/config.py
mungai-joel/Blog
c1eb64030086b7361bbdaa98c639112ae288d137
[ "Unlicense" ]
null
null
null
instance/config.py
mungai-joel/Blog
c1eb64030086b7361bbdaa98c639112ae288d137
[ "Unlicense" ]
null
null
null
instance/config.py
mungai-joel/Blog
c1eb64030086b7361bbdaa98c639112ae288d137
[ "Unlicense" ]
null
null
null
SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://moonguy:notmoonguy@localhost/blog'
43
84
0.825581
9
86
7.666667
1
0
0
0
0
0
0
0
0
0
0
0.012346
0.05814
86
2
85
43
0.839506
0
0
0
0
0
0.632184
0.632184
0
0
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
1
0
0
0
0
0
0
0
0
5
08fd2622b9996ab5d4c888607219f73069436e83
27,875
py
Python
webserver/app/main/controller/document_controller.py
pansila/Auto-Test-System
bfe51a277466939a32daa08f27a89cf3c1900def
[ "MIT" ]
14
2019-02-19T01:31:08.000Z
2021-12-12T12:56:08.000Z
webserver/app/main/controller/document_controller.py
pansila/Auto-Test-System
bfe51a277466939a32daa08f27a89cf3c1900def
[ "MIT" ]
2
2020-03-10T12:12:10.000Z
2020-03-10T12:12:10.000Z
webserver/app/main/controller/document_controller.py
pansila/Auto-Test-System
bfe51a277466939a32daa08f27a89cf3c1900def
[ "MIT" ]
4
2019-07-09T02:00:13.000Z
2020-08-18T14:04:24.000Z
import aiofiles import asyncio import base64 import json import datetime import subprocess import os import shutil from pathlib import Path from sanic_openapi import doc from sanic.response import json, file from sanic import Blueprint from sanic.views import HTTPMethodView from async_files.utils import async_wraps from ..model.database import Documentation, User from ..util.dto import DocDto, json_response from ..util.tarball import path_to_dict from ..util.response import response_message, ENOENT, EINVAL, SUCCESS, EACCES, EPERM, EEXIST, GIT_ERROR from ..util.decorator import token_required, organization_team_required_by_json, organization_team_required_by_args, organization_team_required_by_form from ..util.get_path import get_document_root, get_pictures_root, is_path_secure from ..util import js2python_bool, async_listdir, async_exists, async_rmtree, async_makedirs _doc_roots = DocDto.doc_roots _doc_history = DocDto.doc_history _doc_pictures = DocDto.doc_pictures _doc_content = DocDto.doc_content _doc_query = DocDto.doc_query bp = Blueprint('doc', url_prefix='/doc') def check_editable(doc, user, organization, team, proprietary=None): if user.is_admin(): return True if doc: # modify a file if doc.proprietary: if doc.locked: if team: for u in team.editors: if u == user: return True for u in organization.editors: if u == user: return True else: return True else: if doc.locked: if user.is_collaborator(): return True else: return True else: # new a file if team: for u in team.editors: if u == user: return True for u in organization.editors: if u == user: return True if not proprietary and user.is_collaborator(): return True return False @async_wraps def git_checkout_add_push(repo_root, language, new_branch=False, debug=False): if new_branch: try: subprocess.run(['git', 'checkout', '-b', language], cwd=repo_root, check=True, stdout=subprocess.DEVNULL if not debug else None) except subprocess.CalledProcessError: return json(response_message(GIT_ERROR, "git checkout branch error")) with open(repo_root / '.revision') as f: revision = f.read() with open(repo_root / '.revision', 'w') as f: f.write(str(int(revision) + 1)) try: subprocess.run(['git', 'add', '.'], cwd=repo_root, check=True, stdout=subprocess.DEVNULL if not debug else None) except subprocess.CalledProcessError: return json(response_message(GIT_ERROR, "git add error")) try: subprocess.run(['git', 'commit', '-m', str(int(revision) + 1)], cwd=repo_root, check=True, stdout=subprocess.DEVNULL if not debug else None) except subprocess.CalledProcessError: return json(response_message(GIT_ERROR, "git commit error")) try: subprocess.run(['git', 'push', 'origin', language], cwd=repo_root, check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL if not debug else None) except subprocess.CalledProcessError: return json(response_message(GIT_ERROR, "git push error")) return json(response_message(SUCCESS)) @bp.get('/roots') @doc.summary('Return document paths in the root directory') @doc.consumes(doc.String(name='X-Token'), location='header') @doc.consumes(doc.Boolean(name='proprietary')) @doc.consumes(doc.String(name='language')) @doc.produces(_doc_roots) @token_required @organization_team_required_by_args async def handler(self, request): organization = request.ctx.organization team = request.ctx.team proprietary = js2python_bool(request.args.get('proprietary', False)) language = request.args.get('language', 'en') paths = [{'value': 0, 'label': '/'}] if proprietary: doc_root = get_document_root(language, organization, team) else: doc_root = get_document_root(language, None, None) dirs = await async_listdir(doc_root) for i, d in enumerate(dirs): if (doc_root / d).is_dir(): paths.append({'value': i + 1, 'label': d}) return json(response_message(SUCCESS, paths=paths)) @bp.get('/check') @doc.summary('Check whether the requester has the privilege to edit the page') @doc.consumes(doc.String(name='X-Token'), location='header') @doc.consumes(_doc_query) @doc.produces(json_response) @token_required @organization_team_required_by_args async def handler(self, request): path = request.args.get('path', None) if not path: return json(response_message(EINVAL)) language = request.args.get('language', 'en') proprietary = js2python_bool(request.args.get('proprietary', False)) organization = request.ctx.organization team = request.ctx.team user = request.ctx.get('user') query = {'path': path, 'proprietary': proprietary, 'language': language} if proprietary: query['organization'] = organization if team: query['team'] = team doc = await Documentation.find_one(query) # if not doc: # return json(response_message(ENOENT)) if check_editable(doc, user, organization, team): return json(response_message(SUCCESS)) return json(response_message(EACCES)) @bp.get('/history') @doc.summary('get the page change history') @doc.consumes(doc.String(name='X-Token'), location='header') @doc.consumes(_doc_query) @doc.produces(_doc_history) @token_required @organization_team_required_by_args async def handler(self, request): language = request.args.get('language', 'en') proprietary = js2python_bool(request.args.get('proprietary', False)) path = request.args.get('path', None) if not path: return json(response_message(ENOENT)) organization = request.ctx.organization team = request.ctx.team user = request.ctx.user query = {'path': path, 'proprietary': proprietary, 'language': language} if proprietary: query['organization'] = organization if team: query['team'] = team doc = await Documentation.find_one(query) if not doc: return json(response_message(ENOENT)) if proprietary: doc_root = get_document_root(language, organization, team) else: doc_root = get_document_root(language, None, None) doc_root_parent = doc_root.parent process = await asyncio.create_subprocess_exec('git', 'checkout', language, cwd=doc_root_parent, stdout=asyncio.subprocess.DEVNULL, stderr=asyncio.subprocess.DEVNULL) await process.wait() if process.returncode != 0: return json(response_message(GIT_ERROR, 'git checkout error')) process = await asyncio.create_subprocess_exec('git', 'log', '--pretty=oneline', '-p', Path(language) / doc.path, '-10', cwd=doc_root_parent, stdout=asyncio.subprocess.PIPE) history = [] results = [] pre_line = None cnt = 0 revision = None while True: line = await process.stdout.readline() if not line: break line = line.decode() parts = line.split(' ') if parts[0] == 'diff': if revision is not None: history.append({'title': revision, 'revision': revision, 'description': '\n'.join(results[5:-1])}) cid, revision = pre_line.split(' ') results = [] cnt = 0 if '.md ' in line or 0 < cnt < 10: cnt += 1 if line != '\\ No newline at end of file': results.append(line) pre_line = line else: history.append({'title': revision, 'revision': revision, 'description': '\n'.join(results[5:])}) return json(response_message(SUCCESS, history=history)) class PicturePathView(HTTPMethodView): @doc.summary('Return the directories of the current path') @doc.consumes(doc.String(name='X-Token'), location='header') @doc.consumes(_doc_query) @doc.produces(_doc_roots) @token_required @organization_team_required_by_args async def get(self, request): organization = request.ctx.organization team = request.ctx.team proprietary = js2python_bool(request.args.get('proprietary', False)) language = request.args.get('language', 'en') path = request.args.get('path', None) if not path: return json(response_message(EINVAL, 'field path can not be empty')) path.lstrip('/').lstrip('\\') path = Path(path) if not is_path_secure(path): return json(response_message(EINVAL, 'Illegal path')) if path.suffix: path = path.parent paths = [] if proprietary: pic_root = get_pictures_root(language, organization, team) else: pic_root = get_pictures_root(language, None, None) if not await async_exists(pic_root): await aiofiles.os.mkdir(pic_root) pic_root_parent = pic_root.parent for f in await async_listdir(pic_root_parent / path): if (pic_root_parent / path / f).is_dir(): paths.append({'value': f, 'label': f}) return json(response_message(SUCCESS, paths=paths)) @doc.summary('Create a directory under the specified path') @doc.consumes(doc.String(name='X-Token'), location='header') @doc.consumes(_doc_query, location='body') @doc.produces(json_response) @token_required @organization_team_required_by_json async def post(self, request): organization = request.ctx.organization team = request.ctx.team proprietary = js2python_bool(request.json.get('proprietary', False)) language = request.json.get('language', 'en') path = request.json.get('path', None) if not path: return json(response_message(EINVAL, 'field path can not be empty')) path.lstrip('/').lstrip('\\') path = Path(path) if not is_path_secure(path): return json(response_message(EINVAL, 'Illegal path')) if path.suffix: path = path.parent paths = [] if proprietary: pic_root = get_pictures_root(language, organization, team) else: pic_root = get_pictures_root(language, None, None) if not await async_exists(pic_root): await aiofiles.os.mkdir(pic_root) pic_root_parent = pic_root.parent if await async_exists(pic_root_parent / path): return json(response_message(EEXIST)) await aiofiles.os.mkdir(pic_root_parent / path) return json(response_message(SUCCESS)) @doc.summary('Delete a directory under the specified path') @doc.consumes(doc.String(name='X-Token'), location='header') @doc.consumes(_doc_query, location='body') @doc.produces(json_response) @token_required @organization_team_required_by_json async def delete(self, request): organization = request.ctx.organization team = request.ctx.team proprietary = js2python_bool(request.json.get('proprietary', False)) language = request.json.get('language', 'en') path = request.json.get('path', None) if not path: return json(response_message(EINVAL, 'field path can not be empty')) path.lstrip('/').lstrip('\\') path = Path(path) if not is_path_secure(path): return json(response_message(EINVAL, 'Illegal path')) if path == '.' or path == './': return json(response_message(EPERM, 'root directory can not be deleted')) if path == language or path == './' + language or path == '.\\' + language: return json(response_message(EPERM, 'language directory can not be deleted')) if path.suffix: path = path.parent paths = [] if proprietary: pic_root = get_pictures_root(language, organization, team) else: pic_root = get_pictures_root(language, None, None) if not await async_exists(pic_root): await aiofiles.os.mkdir(pic_root) pic_root_parent = pic_root.parent if not await async_exists(pic_root_parent / path): return json(response_message(ENOENT)) await async_rmtree(pic_root_parent / path) return json(response_message(SUCCESS)) class DocumentPictureView(HTTPMethodView): @doc.summary('Return all pictures under a path') @doc.consumes(doc.String(name='X-Token'), location='header') @doc.consumes(_doc_query) @doc.produces(_doc_pictures) @token_required @organization_team_required_by_args async def get(self, request): proprietary = js2python_bool(request.args.get('proprietary', False)) language = request.args.get('language', 'en') path = request.args.get('path', None) if not path: return json(response_message(EINVAL)) path.lstrip('/').lstrip('\\') path = Path(path) if not is_path_secure(path): return json(response_message(EINVAL, 'Illegal path')) if path.suffix: path = path.parent if proprietary: organization = request.ctx.organization team = request.ctx.team if not organization and not team: return json(response_message(EINVAL)) pic_root = get_pictures_root(language, organization, team) else: pic_root = get_pictures_root(language, None, None) if not await async_exists(pic_root): await aiofiles.os.mkdir(pic_root) pic_root_parent = pic_root.parent fileList = [] for f in await async_listdir(pic_root_parent / path): fileName = pic_root_parent / path / f if fileName.is_dir(): continue async with aiofiles.open(fileName, 'rb') as pic: data = await pic.read() fileList.append({ 'name': f, 'data': base64.b64encode(data).decode('ascii'), 'type': f'image/{f.suffix[1:]}', 'size': os.path.getsize(fileName) }) return json(response_message(SUCCESS, file_list=fileList)) @doc.summary('Upload pictures to a path') @doc.consumes(doc.String(name='X-Token'), location='header') @doc.consumes(_doc_query, location='body') @doc.produces(json_response) @token_required @organization_team_required_by_form async def post(self, request): proprietary = js2python_bool(request.form.get('proprietary', False)) language = request.form.get('language', 'en') path = request.form.get('path', None) if not path: return json(response_message(EINVAL)) path.lstrip('/').lstrip('\\') path = Path(path) if not is_path_secure(path): return json(response_message(EINVAL, 'Illegal path')) if path.suffix: path = path.parent if proprietary: organization = request.ctx.organization team = request.ctx.team if not organization and not team: return json(response_message(EINVAL)) pic_root = get_pictures_root(language, organization, team) else: pic_root = get_pictures_root(language, None, None) if not await async_exists(pic_root): await aiofiles.os.mkdir(pic_root) pic_root_parent = pic_root.parent for k in request.files: await async_wraps(request.files[k].save)(pic_root_parent / path / k) return json(response_message(SUCCESS)) @doc.summary('Remove a picture under a path') @doc.consumes(doc.String(name='X-Token'), location='header') @doc.consumes(_doc_query, location='body') @doc.produces(json_response) @token_required @organization_team_required_by_json async def delete(self, request): proprietary = js2python_bool(request.json.get('proprietary', False)) language = request.json.get('language', 'en') path = request.json.get('path', None) if not path: return json(response_message(EINVAL)) path.lstrip('/').lstrip('\\') path = Path(path) if not is_path_secure(path): return json(response_message(EINVAL, 'Illegal path')) if path.suffix: path = path.parent if proprietary: organization = request.ctx.organization team = request.ctx.team if not organization and not team: return json(response_message(EINVAL)) pic_root = get_pictures_root(language, organization, team) else: pic_root = get_pictures_root(language, None, None) if not await async_exists(pic_root): await aiofiles.os.mkdir(pic_root) pic_root_parent = pic_root.parent filename = request.json.get('filename', None) if not filename: return json(response_message(EINVAL)) await aiofiles.os.remove(pic_root_parent / path / filename) return json(response_message(SUCCESS)) class DocumentView(HTTPMethodView): @doc.summary('Return markdown file content') @doc.consumes(doc.String(name='X-Token'), location='header') @doc.consumes(doc.String(name='proprietary'), location='body') @doc.consumes(doc.String(name='language'), location='body') @doc.produces(_doc_content) @token_required @organization_team_required_by_args async def get(self, request, file_path): if not file_path: file_path = 'home.md' if file_path.endswith('/'): file_path = file_path[:-1] organization = request.ctx.organization team = request.ctx.team proprietary = js2python_bool(request.args.get('proprietary', False)) language = request.args.get('language', 'en') query = {'path': file_path, 'proprietary': proprietary, 'language': language} if proprietary: query['organization'] = organization if team: query['team'] = team doc = await Documentation.find_one(query) if not doc: return json(response_message(ENOENT)) doc.view_times += 1 await doc.commit() if proprietary: doc_root = get_document_root(language, organization, team) else: doc_root = get_document_root(language, None, None) doc_root_parent = doc_root.parent process = await asyncio.create_subprocess_exec('git', 'checkout', language, cwd=doc_root_parent, stdout=asyncio.subprocess.DEVNULL, stderr=asyncio.subprocess.DEVNULL) await process.wait() if process.returncode != 0: return json(response_message(GIT_ERROR, 'git checkout error')) async with aiofiles.open((doc_root / doc.path).resolve()) as f: return json(response_message(SUCCESS, content=await f.read(), locked=doc.locked)) @doc.summary('Update a markdown file') @doc.consumes(doc.String(name='X-Token'), location='header') @doc.consumes(doc.String(name='proprietary'), location='body') @doc.consumes(doc.String(name='language'), location='body') @doc.consumes(doc.String(name='doc_content'), location='body') @doc.produces(json_response) @token_required @organization_team_required_by_json async def post(self, request, file_path): if not file_path: return json(response_message(EINVAL, 'file\'s path is required')) if not is_path_secure(file_path): return json(response_message(EINVAL, 'Illegal path')) if not file_path.endswith('.md'): file_path += '.md' organization = request.ctx.organization team = request.ctx.team proprietary = js2python_bool(request.json.get('proprietary', False)) user = request.ctx.user doc_content = request.json.get('doc_content', '') language = request.json.get('language', 'en') query = {'path': file_path, 'proprietary': proprietary, 'filename': os.path.basename(file_path), 'language': language} if proprietary: query['organization'] = organization if team: query['team'] = team doc = await Documentation.find_one(query) if not doc: if not check_editable(None, user, organization, team, proprietary): return json(response_message(EACCES)) doc = Documentation(**query) doc.uploader = user else: if not check_editable(doc, user, organization, team): return json(response_message(EACCES)) doc.last_modified = datetime.datetime.utcnow() doc.last_modifier = user if file_path == 'home.md': doc.locked = True await doc.commit() if proprietary: doc_root = get_document_root(language, organization, team) else: doc_root = get_document_root(language, None, None) if not doc_root.exists(): await async_makedirs(doc_root) doc_root_parent = doc_root.parent git_root = doc_root_parent / 'doc.git' if not git_root.exists(): await async_makedirs(git_root) process = await asyncio.create_subprocess_exec('git', '--bare', 'init', cwd=git_root) await process.wait() if process.returncode != 0: return json(response_message(GIT_ERROR, "git init error")) if not (doc_root_parent / '.git').exists(): async with aiofiles.open(doc_root_parent / '.gitignore', 'w') as f: await f.write('doc.git\n') async with aiofiles.open(doc_root_parent / '.revision', 'w') as f: await f.write('0') process = await asyncio.create_subprocess_exec('git', 'init', cwd=doc_root_parent) await process.wait() if process.returncode != 0: return json(response_message(GIT_ERROR, "git local init error")) process = await asyncio.create_subprocess_exec('git', 'remote', 'add', 'origin', git_root.resolve(), cwd=doc_root_parent) await process.wait() if process.returncode != 0: return json(response_message(GIT_ERROR, "git remote add error")) await git_checkout_add_push(doc_root_parent, 'master', new_branch=True) process = await asyncio.create_subprocess_exec('git', 'checkout', language, cwd=doc_root_parent, stdout=asyncio.subprocess.DEVNULL, stderr=asyncio.subprocess.DEVNULL) await process.wait() if process.returncode != 0: process = await asyncio.create_subprocess_exec('git', 'checkout', '--orphan', language, cwd=doc_root_parent) await process.wait() if process.returncode != 0: return json(response_message(GIT_ERROR, "git checkout branch error")) else: for f in await async_listdir(doc_root_parent): if f != 'doc.git' and f != '.git' and f != '.gitignore' and f != language: try: await aiofiles.os.remove(doc_root_parent / f) except OSError: await async_rmtree(doc_root_parent / f) async with aiofiles.open(doc_root_parent / '.revision', 'w') as f: await f.write('0') dirname = os.path.dirname(doc.path) if dirname and not (doc_root / dirname).exists(): await async_makedirs(doc_root / dirname) async with aiofiles.open(doc_root / doc.path, 'w') as f: await f.write(doc_content) await git_checkout_add_push(doc_root_parent, language) return json(response_message(SUCCESS)) @doc.summary('Delete a markdown file') @doc.consumes(doc.String(name='X-Token'), location='header') @doc.consumes(doc.String(name='proprietary'), location='body') @doc.consumes(doc.String(name='language'), location='body') @doc.produces(json_response) @token_required @organization_team_required_by_json async def delete(self, request, file_path): if not file_path: return json(response_message(EINVAL, 'file\'s path is required')) if not file_path.endswith('.md'): file_path += '.md' organization = request.ctx.organization team = request.ctx.team proprietary = js2python_bool(request.json.get('proprietary', False)) user = request.ctx.user language = request.json.get('language', 'en') query = {'path': file_path, 'proprietary': proprietary, 'language': language} if proprietary: query['organization'] = organization if team: query['team'] = team doc = await Documentation.find_one(query) if not doc: return json(response_message(ENOENT, 'document not found')) if not check_editable(doc, user, organization, team): return json(response_message(EACCES)) if proprietary: doc_root = get_document_root(language, organization, team) else: doc_root = get_document_root(language, None, None) doc_root_parent = doc_root.parent process = await asyncio.create_subprocess_exec('git', 'checkout', language, cwd=doc_root_parent, stdout=asyncio.subprocess.DEVNULL, stderr=asyncio.subprocess.DEVNULL) await process.wait() if process.returncode != 0: return json(response_message(ENOENT, 'file not found')) await aiofiles.os.remove(doc_root / doc.path) await doc.delete() await git_checkout_add_push(doc_root_parent, language) return json(response_message(SUCCESS)) @doc.summary('Lock a document file so that only allowed users can edit it') @doc.consumes(doc.String(name='X-Token'), location='header') @doc.consumes(_doc_query, location='body') @doc.consumes(doc.String(name='lock'), location='body') @doc.consumes(doc.String(name='proprietary'), location='body') @doc.consumes(doc.String(name='language'), location='body') @doc.produces(json_response) @token_required @organization_team_required_by_json async def patch(self, request, file_path): if not file_path: return json(response_message(EINVAL, 'file\'s path is required')) if not file_path.endswith('.md'): file_path += '.md' organization = request.ctx.organization team = request.ctx.team user = request.ctx.user proprietary = js2python_bool(request.json.get('proprietary', False)) language = request.json.get('language', 'en') lock = js2python_bool(request.json.get('lock', None)) query = {'path': file_path, 'proprietary': proprietary, 'language': language} if proprietary: query['organization'] = organization if team: query['team'] = team doc = await Documentation.find_one(query) if not doc: return json(response_message(ENOENT, 'document not found')) if not check_editable(doc, user, organization, team): return json(response_message(EACCES)) doc.locked = lock await doc.commit() return json(response_message(SUCCESS)) bp.add_route(DocumentPictureView.as_view(), '/pictures') bp.add_route(PicturePathView.as_view(), '/picture/path') bp.add_route(DocumentView.as_view(), '/<file_path:path>')
40.634111
177
0.63548
3,345
27,875
5.13154
0.079522
0.048937
0.063967
0.088844
0.806874
0.764696
0.730556
0.705214
0.687271
0.674163
0
0.002397
0.251803
27,875
685
178
40.693431
0.820627
0.002798
0
0.688119
0
0.00165
0.087579
0
0
0
0
0
0
1
0.0033
false
0
0.034653
0
0.158416
0.0033
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
1c07b295e10c08e7228aa1ec6fb2b3677497e104
3,162
py
Python
src/source/test_cleaning.py
hinha/kalkula-extension
57163eaf55d0a734e65afe8af6bb35fe07c03b55
[ "MIT" ]
null
null
null
src/source/test_cleaning.py
hinha/kalkula-extension
57163eaf55d0a734e65afe8af6bb35fe07c03b55
[ "MIT" ]
null
null
null
src/source/test_cleaning.py
hinha/kalkula-extension
57163eaf55d0a734e65afe8af6bb35fe07c03b55
[ "MIT" ]
null
null
null
import unittest import hashlib from . import cleaning def FILE(file_name): # Open,close, read file and calculate MD5 on its contents prop = b"" readable_hash = hashlib.md5() with open(".data/" + file_name, "rb") as f: prop += bytearray(f.read()) for byte_block in iter(lambda: f.read(4096), b""): readable_hash.update(byte_block) return readable_hash.hexdigest(), prop class TestCleaningExcel(unittest.TestCase): def test_file_id_checker(self): try: cleaning.Cleaning(b"123", "123", "name.xlsx") except Exception as e: self.assertEqual(str(e), "Unable to verify hash type") def test_file_id_checker_not_same(self): name = "Contoh-data-survei.xlsx" hash_file, io = FILE(name) try: c = cleaning.Cleaning(b"123", "a18de379d604194e4cb560364c6434c8", name) self.assertNotEqual(c.file_id, hash_file) except Exception as e: self.assertEqual(str(e), "Unable to verify hash type") def test_read_excel_error_unsupported_format(self): name = "Contoh-data-survei.xlsx" hash_file, io = FILE(name) c = cleaning.Cleaning(b"123", "d41d8cd98f00b204e9800998ecf8427e", name) self.assertEqual(c.file_id, hash_file) try: c.Excel() except Exception as e: self.assertEqual(str(e), "Unsupported format, or corrupt file") def test_read_excel(self): name = "Contoh-data-survei.xlsx" hash_file, io = FILE(name) c = cleaning.Cleaning(io, "d41d8cd98f00b204e9800998ecf8427e", name) self.assertEqual(c.file_id, hash_file) exp = c.Excel() self.assertNotEqual(io, exp.file) class TestCleaningCsv(unittest.TestCase): def test_file_id_checker(self): try: cleaning.Cleaning(b"123", "123", "name.csv") except Exception as e: self.assertEqual(str(e), "Unable to verify hash type") def test_file_id_checker_not_same(self): name = "gevorment_2020.csv" hash_file, io = FILE(name) try: c = cleaning.Cleaning(b"123", "a18de379d604194e4cb560364c6434c8", name) self.assertNotEqual(c.file_id, hash_file) except Exception as e: self.assertEqual(str(e), "Unable to verify hash type") def test_read_csv_error_unsupported_format(self): name = "gevorment_2020.csv" hash_file, io = FILE(name) c = cleaning.Cleaning(b"123", "d41d8cd98f00b204e9800998ecf8427e", name) self.assertEqual(c.file_id, hash_file) try: c.Csv() except Exception as e: self.assertEqual(str(e), "Unsupported format, or corrupt file") def test_read_csv(self): name = "gevorment_2020.csv" hash_file, io = FILE(name) c = cleaning.Cleaning(io, "d41d8cd98f00b204e9800998ecf8427e", name) self.assertEqual(c.file_id, hash_file) try: exp = c.Csv() self.assertEqual(io, exp.file) except Exception as e: self.assertEqual(str(e), "Unable to verify hash type")
31.939394
83
0.626502
392
3,162
4.905612
0.193878
0.093604
0.061882
0.065523
0.793552
0.770671
0.770671
0.770671
0.770671
0.767551
0
0.074074
0.265655
3,162
98
84
32.265306
0.754091
0.017394
0
0.662162
0
0
0.181643
0.084058
0
0
0
0
0.202703
1
0.121622
false
0
0.040541
0
0.202703
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
1c0d31542fca61610c44233ca203070382223f69
417
py
Python
tests/fixtures/suites.py
heylouiz/spidermon
3ae2c46d1cf5b46efb578798b881264be3e68394
[ "BSD-3-Clause" ]
2
2019-10-03T16:47:11.000Z
2022-02-22T11:56:02.000Z
tests/fixtures/suites.py
heylouiz/spidermon
3ae2c46d1cf5b46efb578798b881264be3e68394
[ "BSD-3-Clause" ]
23
2019-05-30T20:27:38.000Z
2019-08-20T07:23:09.000Z
tests/fixtures/suites.py
heylouiz/spidermon
3ae2c46d1cf5b46efb578798b881264be3e68394
[ "BSD-3-Clause" ]
1
2022-03-24T03:01:19.000Z
2022-03-24T03:01:19.000Z
from __future__ import absolute_import from spidermon import MonitorSuite from .cases import * class EmptySuite(MonitorSuite): pass class Suite01(MonitorSuite): monitors = [Monitor01] class Suite02(MonitorSuite): monitors = [Suite01, Monitor02] class Suite03(MonitorSuite): monitors = [Suite01, Suite02] class Suite04(MonitorSuite): monitors = [Suite01, Suite02, Monitor01, Monitor02]
16.038462
55
0.745803
41
417
7.463415
0.414634
0.261438
0.264706
0.222222
0
0
0
0
0
0
0
0.075362
0.172662
417
25
56
16.68
0.811594
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0.076923
0.230769
0
0.923077
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
5
1c0f780f655f8bf61c4185249b66a4546dfdfb8f
107
py
Python
test/test_import.py
csyben/PYRO-NN-Layers
9bec5ccaf62eb1fec01c1668ba1a6375673f8b12
[ "Apache-2.0" ]
24
2019-03-12T10:07:59.000Z
2022-01-07T09:37:43.000Z
test/test_import.py
theHamsta/PYRO-NN-Layers
c776c3d7315f483937a7cebf667c6d491ecd57e6
[ "Apache-2.0" ]
6
2019-06-19T15:45:48.000Z
2021-07-14T18:39:48.000Z
test/test_import.py
theHamsta/PYRO-NN-Layers
c776c3d7315f483937a7cebf667c6d491ecd57e6
[ "Apache-2.0" ]
10
2019-04-20T09:09:17.000Z
2021-05-06T09:25:10.000Z
import sys from os.path import join,dirname sys.path.insert(0,dirname(__file__)) import pyronn_layers_dev
17.833333
36
0.82243
18
107
4.555556
0.722222
0
0
0
0
0
0
0
0
0
0
0.010309
0.093458
107
5
37
21.4
0.835052
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
1c2810076267faf13adba1ab729b2c286ab3d05b
169
py
Python
gif/start.py
MisterZhouZhou/pythonLearn
8933c7a6d444d3d86a173984e6cf4c08dbf84039
[ "Apache-2.0" ]
1
2019-07-09T09:59:39.000Z
2019-07-09T09:59:39.000Z
gif/start.py
MisterZhouZhou/pythonLearn
8933c7a6d444d3d86a173984e6cf4c08dbf84039
[ "Apache-2.0" ]
null
null
null
gif/start.py
MisterZhouZhou/pythonLearn
8933c7a6d444d3d86a173984e6cf4c08dbf84039
[ "Apache-2.0" ]
null
null
null
import subprocess if __name__ == '__main__': subprocess.call("python3 index.py -p /Users/zhouwei/Desktop/python/pythonLearn/gif/test.jpg -t 16 -s 1.25", shell=True)
42.25
123
0.739645
26
169
4.5
0.961538
0
0
0
0
0
0
0
0
0
0
0.04
0.112426
169
4
123
42.25
0.74
0
0
0
0
0.333333
0.564706
0.317647
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
1c466d37b86970653faf38b62fdf0da523eb0c8b
284
py
Python
src/Distiller/textbrewer/distillers.py
haroldNLP/Distiller
f3ab5f94a9092fca1e2bdb9f486e66fd0b24bcfd
[ "MIT" ]
2
2022-03-21T08:02:02.000Z
2022-03-21T08:29:07.000Z
src/Distiller/textbrewer/distillers.py
haroldNLP/Distiller
f3ab5f94a9092fca1e2bdb9f486e66fd0b24bcfd
[ "MIT" ]
null
null
null
src/Distiller/textbrewer/distillers.py
haroldNLP/Distiller
f3ab5f94a9092fca1e2bdb9f486e66fd0b24bcfd
[ "MIT" ]
null
null
null
from .distiller_train import BasicTrainer from .distiller_basic import BasicDistiller from .distiller_general import GeneralDistiller from .distiller_multitask import MultiTaskDistiller from .distiller_multiteacher import MultiTeacherDistiller from .distiller_emd import EMDDistiller
40.571429
57
0.894366
30
284
8.266667
0.5
0.314516
0
0
0
0
0
0
0
0
0
0
0.084507
284
6
58
47.333333
0.953846
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
98dff5ba2a957906ca64c316dabab348cffccf3c
86
py
Python
src/hub/dataload/sources/uniprot/__init__.py
biothings/biothings.species
77733e178e52277b3d2394e80ed5c133f2929c19
[ "Apache-2.0" ]
null
null
null
src/hub/dataload/sources/uniprot/__init__.py
biothings/biothings.species
77733e178e52277b3d2394e80ed5c133f2929c19
[ "Apache-2.0" ]
4
2019-02-13T16:03:47.000Z
2021-10-17T23:10:34.000Z
src/hub/dataload/sources/uniprot/__init__.py
biothings/biothings.species
77733e178e52277b3d2394e80ed5c133f2929c19
[ "Apache-2.0" ]
3
2017-06-12T18:34:35.000Z
2021-04-12T07:49:57.000Z
from .dumper import UniprotSpeciesDumper from .uploader import UniprotSpeciesUploader
28.666667
44
0.883721
8
86
9.5
0.75
0
0
0
0
0
0
0
0
0
0
0
0.093023
86
2
45
43
0.974359
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
c720eb8fb601ffd196ee2c975b85e06eccaaed92
153
py
Python
Whole-App-Acceleration/apps/resnet50/build_flow/DPUCADF8H_u200/scripts/utility/readme_gen/gs_summary_subdir.py
hito0512/Vitis-AI
996459fb96cb077ed2f7e789d515893b1cccbc95
[ "Apache-2.0" ]
848
2019-12-03T00:16:17.000Z
2022-03-31T22:53:17.000Z
dsa/WAA-TRD/proj/build/classification-pre_DPUv3int8/scripts/utility/readme_gen/gs_summary_subdir.py
wangyifan778/Vitis-AI
f61061eef7550d98bf02a171604c9a9f283a7c47
[ "Apache-2.0" ]
656
2019-12-03T00:48:46.000Z
2022-03-31T18:41:54.000Z
dsa/WAA-TRD/proj/build/classification-pre_DPUv3int8/scripts/utility/readme_gen/gs_summary_subdir.py
wangyifan778/Vitis-AI
f61061eef7550d98bf02a171604c9a9f283a7c47
[ "Apache-2.0" ]
506
2019-12-03T00:46:26.000Z
2022-03-30T10:34:56.000Z
#!/usr/bin/env python import os, re import fnmatch import json import sys sys.path.append(".") import gs_summary_util gs_summary_util.genReadMe2(".")
12.75
31
0.75817
24
153
4.666667
0.666667
0.160714
0.232143
0
0
0
0
0
0
0
0
0.007407
0.117647
153
11
32
13.909091
0.822222
0.130719
0
0
0
0
0.015152
0
0
0
0
0
0
1
0
true
0
0.714286
0
0.714286
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
c72d023a35b00761814722cbdc05736f6e658dd0
79
py
Python
tests/pruebas2.py
a-domingu/discern
785edfb90187c358e67592c70a8299bdf86ab410
[ "Apache-2.0" ]
null
null
null
tests/pruebas2.py
a-domingu/discern
785edfb90187c358e67592c70a8299bdf86ab410
[ "Apache-2.0" ]
null
null
null
tests/pruebas2.py
a-domingu/discern
785edfb90187c358e67592c70a8299bdf86ab410
[ "Apache-2.0" ]
null
null
null
import pruebas, prueba_simple print(pruebas.Clase1_1().Clase1_2().firstn(5))
15.8
46
0.772152
12
79
4.833333
0.833333
0
0
0
0
0
0
0
0
0
0
0.068493
0.075949
79
4
47
19.75
0.726027
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
c7334d48217e176b7d099ef1eb2fe3dc5b5b2d0a
104,707
py
Python
wo/cli/plugins/stack.py
cdk-comp/WordOps
d1f0ccc7202d43c90ee7640f7acd4b7c3c158ee1
[ "MIT" ]
null
null
null
wo/cli/plugins/stack.py
cdk-comp/WordOps
d1f0ccc7202d43c90ee7640f7acd4b7c3c158ee1
[ "MIT" ]
null
null
null
wo/cli/plugins/stack.py
cdk-comp/WordOps
d1f0ccc7202d43c90ee7640f7acd4b7c3c158ee1
[ "MIT" ]
2
2021-01-02T07:49:51.000Z
2022-03-26T15:58:50.000Z
"""Stack Plugin for WordOps""" from cement.core.controller import CementBaseController, expose from cement.core import handler, hook from wo.cli.plugins.site_functions import * from wo.core.variables import WOVariables from wo.core.aptget import WOAptGet from wo.core.download import WODownload from wo.core.shellexec import WOShellExec, CommandExecutionError from wo.core.fileutils import WOFileUtils from wo.core.apt_repo import WORepo from wo.core.extract import WOExtract from wo.core.mysql import WOMysql from wo.core.addswap import WOSwap from wo.core.git import WOGit from wo.core.checkfqdn import check_fqdn from pynginxconfig import NginxConfig from wo.core.services import WOService from wo.core.variables import WOVariables import random import string import configparser import time import shutil import os import pwd import grp import codecs import platform from wo.cli.plugins.stack_services import WOStackStatusController from wo.cli.plugins.stack_migrate import WOStackMigrateController from wo.cli.plugins.stack_upgrade import WOStackUpgradeController from wo.core.logging import Log from wo.cli.plugins.sitedb import * def wo_stack_hook(app): pass class WOStackController(CementBaseController): class Meta: label = 'stack' stacked_on = 'base' stacked_type = 'nested' description = 'Stack command manages stack operations' arguments = [ (['--all'], dict(help='Install all stacks at once', action='store_true')), (['--web'], dict(help='Install web stack', action='store_true')), (['--admin'], dict(help='Install admin tools stack', action='store_true')), (['--nginx'], dict(help='Install Nginx stack', action='store_true')), # (['--nginxmainline'], # dict(help='Install Nginx mainline stack', action='store_true')), (['--php'], dict(help='Install PHP stack', action='store_true')), (['--php72'], dict(help='Install PHP 7.2 stack', action='store_true')), (['--mysql'], dict(help='Install MySQL stack', action='store_true')), (['--hhvm'], dict(help='Install HHVM stack', action='store_true')), (['--wpcli'], dict(help='Install WPCLI stack', action='store_true')), (['--phpmyadmin'], dict(help='Install PHPMyAdmin stack', action='store_true')), (['--adminer'], dict(help='Install Adminer stack', action='store_true')), (['--utils'], dict(help='Install Utils stack', action='store_true')), (['--redis'], dict(help='Install Redis', action='store_true')), (['--phpredisadmin'], dict(help='Install phpRedisAdmin', action='store_true')), ] usage = "ee stack (command) [options]" @expose(hide=True) def default(self): """default action of wo stack command""" self.app.args.print_help() @expose(hide=True) def pre_pref(self, apt_packages): """Pre settings to do before installation packages""" if set(WOVariables.wo_mysql).issubset(set(apt_packages)): Log.info(self, "Adding repository for MySQL, please wait...") mysql_pref = ("Package: *\nPin: origin sfo1.mirrors.digitalocean.com" "\nPin-Priority: 1000\n") with open('/etc/apt/preferences.d/' 'MariaDB.pref', 'w') as mysql_pref_file: mysql_pref_file.write(mysql_pref) WORepo.add(self, repo_url=WOVariables.wo_mysql_repo) Log.debug(self, 'Adding key for {0}' .format(WOVariables.wo_mysql_repo)) WORepo.add_key(self, '0xcbcb082a1bb943db', keyserver="keyserver.ubuntu.com") WORepo.add_key(self, '0xF1656F24C74CD1D8', keyserver="keyserver.ubuntu.com") chars = ''.join(random.sample(string.ascii_letters, 8)) Log.debug(self, "Pre-seeding MySQL") Log.debug(self, "echo \"mariadb-server-10.1 " "mysql-server/root_password " "password \" | " "debconf-set-selections") try: WOShellExec.cmd_exec(self, "echo \"mariadb-server-10.1 " "mysql-server/root_password " "password {chars}\" | " "debconf-set-selections" .format(chars=chars), log=False) except CommandExecutionError as e: Log.error("Failed to initialize MySQL package") Log.debug(self, "echo \"mariadb-server-10.1 " "mysql-server/root_password_again " "password \" | " "debconf-set-selections") try: WOShellExec.cmd_exec(self, "echo \"mariadb-server-10.1 " "mysql-server/root_password_again " "password {chars}\" | " "debconf-set-selections" .format(chars=chars), log=False) except CommandExecutionError as e: Log.error("Failed to initialize MySQL package") mysql_config = """ [client] user = root password = {chars} """.format(chars=chars) config = configparser.ConfigParser() config.read_string(mysql_config) Log.debug(self, 'Writting configuration into MySQL file') conf_path = "/etc/mysql/conf.d/my.cnf" os.makedirs(os.path.dirname(conf_path), exist_ok=True) with open(conf_path, encoding='utf-8', mode='w') as configfile: config.write(configfile) Log.debug(self, 'Setting my.cnf permission') WOFileUtils.chmod(self, "/etc/mysql/conf.d/my.cnf", 0o600) if set(WOVariables.wo_nginx).issubset(set(apt_packages)): Log.info(self, "Adding repository for NGINX, please wait...") WORepo.add(self, repo_url=WOVariables.wo_nginx_repo) Log.debug(self, 'Adding ppa of Nginx') WORepo.add_key(self, WOVariables.wo_nginx_key) if (WOVariables.wo_platform_codename == 'trusty' or WOVariables.wo_platform_codename == 'xenial' or WOVariables.wo_platform_codename == 'bionic'): if set(WOVariables.wo_php72).issubset(set(apt_packages)): Log.info(self, "Adding repository for PHP, please wait...") Log.debug(self, 'Adding ppa for PHP') WORepo.add(self, ppa=WOVariables.wo_php_repo) else: if set(WOVariables.wo_php).issubset(set(apt_packages)): Log.info(self, "Adding repository for PHP, please wait...") # Add repository for php if WOVariables.wo_platform_distro == 'debian': if WOVariables.wo_platform_codename != 'jessie': Log.debug(self, 'Adding repo_url of php for debian') WORepo.add(self, repo_url=WOVariables.wo_php_repo) Log.debug(self, 'Adding Dotdeb/php GPG key') WORepo.add_key(self, '89DF5277') else: Log.debug(self, 'Adding ppa for PHP') WORepo.add(self, ppa=WOVariables.wo_php_repo) if WOVariables.wo_platform_codename == 'jessie': if set(WOVariables.wo_php72).issubset(set(apt_packages)): Log.debug(self, 'Adding repo_url of php 7.0 for debian') WORepo.add(self, repo_url=WOVariables.wo_php_repo) Log.debug(self, 'Adding Dotdeb/php GPG key') WORepo.add_key(self, '89DF5277') if set(WOVariables.wo_hhvm).issubset(set(apt_packages)): if (WOVariables.wo_platform_codename != 'xenial' or WOVariables.wo_platform_codename != 'bionic'): Log.info(self, "Adding repository for HHVM, please wait...") if WOVariables.wo_platform_codename == 'precise': Log.debug(self, 'Adding PPA for Boost') WORepo.add(self, ppa=WOVariables.wo_boost_repo) Log.debug(self, 'Adding ppa repo for HHVM') WORepo.add(self, repo_url=WOVariables.wo_hhvm_repo) Log.debug(self, 'Adding HHVM GPG Key') WORepo.add_key(self, '0x5a16e7281be7a449') else: Log.info(self, "Using default Ubuntu repository for HHVM") if set(WOVariables.wo_redis).issubset(set(apt_packages)): Log.info(self, "Adding repository for Redis, please wait...") if WOVariables.wo_platform_distro == 'debian': Log.debug(self, 'Adding repo_url of redis for debian') WORepo.add(self, repo_url=WOVariables.wo_redis_repo) Log.debug(self, 'Adding Dotdeb GPG key') WORepo.add_key(self, '89DF5277') else: Log.debug(self, 'Adding ppa for redis') WORepo.add(self, ppa=WOVariables.wo_redis_repo) @expose(hide=True) def post_pref(self, apt_packages, packages): """Post activity after installation of packages""" if len(apt_packages): if set(WOVariables.wo_nginx).issubset(set(apt_packages)): if set(["nginx-plus"]).issubset(set(apt_packages)) or set(["nginx"]).issubset(set(apt_packages)): # Fix for white screen death with NGINX PLUS if not WOFileUtils.grep(self, '/etc/nginx/fastcgi_params', 'SCRIPT_FILENAME'): with open('/etc/nginx/fastcgi_params', encoding='utf-8', mode='a') as wo_nginx: wo_nginx.write('fastcgi_param \tSCRIPT_FILENAME ' '\t$request_filename;\n') if not (os.path.isfile('/etc/nginx/common/wpfc.conf')): # Change WordOpsVersion in nginx.conf file WOFileUtils.searchreplace(self, "/etc/nginx/nginx.conf", "# add_header", "add_header") WOFileUtils.searchreplace(self, "/etc/nginx/nginx.conf", "\"WordOps\"", "\"WordOps{0}\"" .format(WOVariables.wo_version)) data = dict() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/conf.d/blockips.conf') wo_nginx = open('/etc/nginx/conf.d/blockips.conf', encoding='utf-8', mode='w') self.app.render((data), 'blockips.mustache', out=wo_nginx) wo_nginx.close() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/conf.d/fastcgi.conf') wo_nginx = open('/etc/nginx/conf.d/fastcgi.conf', encoding='utf-8', mode='w') self.app.render((data), 'fastcgi.mustache', out=wo_nginx) wo_nginx.close() data = dict(php="9000", debug="9001", hhvm="8000",php72="9072",debug7="9172", hhvmconf=False, php7conf= True if WOAptGet.is_installed(self,'php7.2-fpm') else False ) Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/conf.d/upstream.conf') wo_nginx = open('/etc/nginx/conf.d/upstream.conf', encoding='utf-8', mode='w') self.app.render((data), 'upstream.mustache', out=wo_nginx) wo_nginx.close() # Setup Nginx common directory if not os.path.exists('/etc/nginx/common'): Log.debug(self, 'Creating directory' '/etc/nginx/common') os.makedirs('/etc/nginx/common') data = dict(webroot=WOVariables.wo_webroot) Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/acl.conf') wo_nginx = open('/etc/nginx/common/acl.conf', encoding='utf-8', mode='w') self.app.render((data), 'acl.mustache', out=wo_nginx) wo_nginx.close() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/locations.conf') wo_nginx = open('/etc/nginx/common/locations.conf', encoding='utf-8', mode='w') self.app.render((data), 'locations.mustache', out=wo_nginx) wo_nginx.close() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/php.conf') wo_nginx = open('/etc/nginx/common/php.conf', encoding='utf-8', mode='w') self.app.render((data), 'php.mustache', out=wo_nginx) wo_nginx.close() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/wpcommon.conf') wo_nginx = open('/etc/nginx/common/wpcommon.conf', encoding='utf-8', mode='w') self.app.render((data), 'wpcommon.mustache', out=wo_nginx) wo_nginx.close() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/wpfc.conf') wo_nginx = open('/etc/nginx/common/wpfc.conf', encoding='utf-8', mode='w') self.app.render((data), 'wpfc.mustache', out=wo_nginx) wo_nginx.close() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/wpsc.conf') wo_nginx = open('/etc/nginx/common/wpsc.conf', encoding='utf-8', mode='w') self.app.render((data), 'wpsc.mustache', out=wo_nginx) wo_nginx.close() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/wpsubdir.conf') wo_nginx = open('/etc/nginx/common/wpsubdir.conf', encoding='utf-8', mode='w') self.app.render((data), 'wpsubdir.mustache', out=wo_nginx) wo_nginx.close() #php7 conf if (WOVariables.wo_platform_codename == 'stretch' or WOVariables.wo_platform_codename == 'jessie' or WOVariables.wo_platform_codename == 'trusty' or WOVariables.wo_platform_codename == 'xenial' or WOVariables.wo_platform_codename == 'bionic') and (not os.path.isfile("/etc/nginx/common/php7.conf")): #data = dict() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/locations-php7.conf') wo_nginx = open('/etc/nginx/common/locations-php7.conf', encoding='utf-8', mode='w') self.app.render((data), 'locations-php7.mustache', out=wo_nginx) wo_nginx.close() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/php7.conf') wo_nginx = open('/etc/nginx/common/php7.conf', encoding='utf-8', mode='w') self.app.render((data), 'php7.mustache', out=wo_nginx) wo_nginx.close() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/wpcommon-php7.conf') wo_nginx = open('/etc/nginx/common/wpcommon-php7.conf', encoding='utf-8', mode='w') self.app.render((data), 'wpcommon-php7.mustache', out=wo_nginx) wo_nginx.close() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/wpfc-php7.conf') wo_nginx = open('/etc/nginx/common/wpfc-php7.conf', encoding='utf-8', mode='w') self.app.render((data), 'wpfc-php7.mustache', out=wo_nginx) wo_nginx.close() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/wpsc-php7.conf') wo_nginx = open('/etc/nginx/common/wpsc-php7.conf', encoding='utf-8', mode='w') self.app.render((data), 'wpsc-php7.mustache', out=wo_nginx) wo_nginx.close() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/redis-php7.conf') wo_nginx = open('/etc/nginx/common/redis-php7.conf', encoding='utf-8', mode='w') self.app.render((data), 'redis-php7.mustache', out=wo_nginx) wo_nginx.close() # Nginx-Plus does not have nginx package structure like this # So creating directories if set(["nginx-plus"]).issubset(set(apt_packages)) or set(["nginx"]).issubset(set(apt_packages)): Log.info(self, "Installing WordOpsConfigurations for" "NGINX") if not os.path.exists('/etc/nginx/sites-available'): Log.debug(self, 'Creating directory' '/etc/nginx/sites-available') os.makedirs('/etc/nginx/sites-available') if not os.path.exists('/etc/nginx/sites-enabled'): Log.debug(self, 'Creating directory' '/etc/nginx/sites-available') os.makedirs('/etc/nginx/sites-enabled') # 22222 port settings Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/sites-available/' '22222') wo_nginx = open('/etc/nginx/sites-available/22222', encoding='utf-8', mode='w') self.app.render((data), '22222.mustache', out=wo_nginx) wo_nginx.close() passwd = ''.join([random.choice (string.ascii_letters + string.digits) for n in range(6)]) try: WOShellExec.cmd_exec(self, "printf \"WordOps:" "$(openssl passwd -crypt " "{password} 2> /dev/null)\n\"" "> /etc/nginx/htpasswd-wo " "2>/dev/null" .format(password=passwd)) except CommandExecutionError as e: Log.error(self, "Failed to save HTTP Auth") # Create Symbolic link for 22222 WOFileUtils.create_symlink(self, ['/etc/nginx/' 'sites-available/' '22222', '/etc/nginx/' 'sites-enabled/' '22222']) # Create log and cert folder and softlinks if not os.path.exists('{0}22222/logs' .format(WOVariables.wo_webroot)): Log.debug(self, "Creating directory " "{0}22222/logs " .format(WOVariables.wo_webroot)) os.makedirs('{0}22222/logs' .format(WOVariables.wo_webroot)) if not os.path.exists('{0}22222/cert' .format(WOVariables.wo_webroot)): Log.debug(self, "Creating directory " "{0}22222/cert" .format(WOVariables.wo_webroot)) os.makedirs('{0}22222/cert' .format(WOVariables.wo_webroot)) WOFileUtils.create_symlink(self, ['/var/log/nginx/' '22222.access.log', '{0}22222/' 'logs/access.log' .format(WOVariables.wo_webroot)] ) WOFileUtils.create_symlink(self, ['/var/log/nginx/' '22222.error.log', '{0}22222/' 'logs/error.log' .format(WOVariables.wo_webroot)] ) try: WOShellExec.cmd_exec(self, "openssl genrsa -out " "{0}22222/cert/22222.key 2048" .format(WOVariables.wo_webroot)) WOShellExec.cmd_exec(self, "openssl req -new -batch " "-subj /commonName=127.0.0.1/ " "-key {0}22222/cert/22222.key " "-out {0}22222/cert/" "22222.csr" .format(WOVariables.wo_webroot)) WOFileUtils.mvfile(self, "{0}22222/cert/22222.key" .format(WOVariables.wo_webroot), "{0}22222/cert/" "22222.key.org" .format(WOVariables.wo_webroot)) WOShellExec.cmd_exec(self, "openssl rsa -in " "{0}22222/cert/" "22222.key.org -out " "{0}22222/cert/22222.key" .format(WOVariables.wo_webroot)) WOShellExec.cmd_exec(self, "openssl x509 -req -days " "3652 -in {0}22222/cert/" "22222.csr -signkey {0}" "22222/cert/22222.key -out " "{0}22222/cert/22222.crt" .format(WOVariables.wo_webroot)) except CommandExecutionError as e: Log.error(self, "Failed to generate HTTPS certificate for 22222") # Nginx Configation into GIT WOGit.add(self, ["/etc/nginx"], msg="Adding Nginx into Git") WOService.reload_service(self, 'nginx') if set(["nginx-plus"]).issubset(set(apt_packages)) or set(["nginx"]).issubset(set(apt_packages)): WOShellExec.cmd_exec(self, "sed -i -e 's/^user/#user/'" " -e '/^#user/a user" "\ www-data\;'" " /etc/nginx/nginx.conf") if not WOShellExec.cmd_exec(self, "cat /etc/nginx/" "nginx.conf | grep -q " "'/etc/nginx/sites-enabled'"): WOShellExec.cmd_exec(self, "sed -i '/\/etc\/" "nginx\/conf\.d\/\*" "\.conf/a \ include" "\ \/etc\/nginx\/sites-enabled" "\/*;' /etc/nginx/nginx.conf") # WordOpsconfig for NGINX plus data['version'] = WOVariables.wo_version Log.debug(self, 'Writting for nginx plus configuration' ' to file /etc/nginx/conf.d/wo-plus.conf') wo_nginx = open('/etc/nginx/conf.d/wo-plus.conf', encoding='utf-8', mode='w') self.app.render((data), 'wo-plus.mustache', out=wo_nginx) wo_nginx.close() print("HTTP Auth User Name: WordOps" + "\nHTTP Auth Password : {0}".format(passwd)) WOService.reload_service(self, 'nginx') else: self.msg = (self.msg + ["HTTP Auth User Name: WordOps"] + ["HTTP Auth Password : {0}".format(passwd)]) else: WOService.restart_service(self, 'nginx') if WOAptGet.is_installed(self,'redis-server'): if os.path.isfile("/etc/nginx/nginx.conf") and (not os.path.isfile("/etc/nginx/common/redis.conf")): data = dict() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/redis.conf') wo_nginx = open('/etc/nginx/common/redis.conf', encoding='utf-8', mode='w') self.app.render((data), 'redis.mustache', out=wo_nginx) wo_nginx.close() if os.path.isfile("/etc/nginx/nginx.conf") and (not os.path.isfile("/etc/nginx/common/redis-hhvm.conf")): data = dict() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/redis-hhvm.conf') wo_nginx = open('/etc/nginx/common/redis-hhvm.conf', encoding='utf-8', mode='w') self.app.render((data), 'redis-hhvm.mustache', out=wo_nginx) wo_nginx.close() if (WOVariables.wo_platform_codename == 'trusty' or WOVariables.wo_platform_codename == 'xenial' or WOVariables.wo_platform_codename == 'bionic'): if os.path.isfile("/etc/nginx/nginx.conf") and (not os.path.isfile("/etc/nginx/common/redis-php7.conf")): data = dict() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/redis-php7.conf') wo_nginx = open('/etc/nginx/common/redis-php7.conf', encoding='utf-8', mode='w') self.app.render((data), 'redis-php7.mustache', out=wo_nginx) wo_nginx.close() if os.path.isfile("/etc/nginx/conf.d/upstream.conf"): if not WOFileUtils.grep(self, "/etc/nginx/conf.d/" "upstream.conf", "redis"): with open("/etc/nginx/conf.d/upstream.conf", "a") as redis_file: redis_file.write("upstream redis {\n" " server 127.0.0.1:6379;\n" " keepalive 10;\n}\n") if os.path.isfile("/etc/nginx/nginx.conf") and (not os.path.isfile("/etc/nginx/conf.d/redis.conf")): with open("/etc/nginx/conf.d/redis.conf", "a") as redis_file: redis_file.write("# Log format Settings\n" "log_format rt_cache_redis '$remote_addr $upstream_response_time $srcache_fetch_status [$time_local] '\n" "'$http_host \"$request\" $status $body_bytes_sent '\n" "'\"$http_referer\" \"$http_user_agent\"';\n") #setup nginx common folder for php7 if self.app.pargs.php72: if os.path.isdir("/etc/nginx/common") and (not os.path.isfile("/etc/nginx/common/php7.conf")): data = dict() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/locations-php7.conf') wo_nginx = open('/etc/nginx/common/locations-php7.conf', encoding='utf-8', mode='w') self.app.render((data), 'locations-php7.mustache', out=wo_nginx) wo_nginx.close() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/php7.conf') wo_nginx = open('/etc/nginx/common/php7.conf', encoding='utf-8', mode='w') self.app.render((data), 'php7.mustache', out=wo_nginx) wo_nginx.close() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/wpcommon-php7.conf') wo_nginx = open('/etc/nginx/common/wpcommon-php7.conf', encoding='utf-8', mode='w') self.app.render((data), 'wpcommon-php7.mustache', out=wo_nginx) wo_nginx.close() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/wpfc-php7.conf') wo_nginx = open('/etc/nginx/common/wpfc-php7.conf', encoding='utf-8', mode='w') self.app.render((data), 'wpfc-php7.mustache', out=wo_nginx) wo_nginx.close() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/wpsc-php7.conf') wo_nginx = open('/etc/nginx/common/wpsc-php7.conf', encoding='utf-8', mode='w') self.app.render((data), 'wpsc-php7.mustache', out=wo_nginx) wo_nginx.close() if os.path.isdir("/etc/nginx/common") and (not os.path.isfile("/etc/nginx/common/redis-php7.conf")): data = dict() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/redis-php7.conf') wo_nginx = open('/etc/nginx/common/redis-php7.conf', encoding='utf-8', mode='w') self.app.render((data), 'redis-php7.mustache', out=wo_nginx) wo_nginx.close() if os.path.isfile("/etc/nginx/conf.d/upstream.conf"): if not WOFileUtils.grep(self, "/etc/nginx/conf.d/upstream.conf", "php72"): with open("/etc/nginx/conf.d/upstream.conf", "a") as php_file: php_file.write("upstream php72 {\nserver 127.0.0.1:9072;\n}\n" "upstream debug72 {\nserver 127.0.0.1:9172;\n}\n") if set(WOVariables.wo_hhvm).issubset(set(apt_packages)): WOShellExec.cmd_exec(self, "update-rc.d hhvm defaults") WOFileUtils.searchreplace(self, "/etc/hhvm/server.ini", "9000", "8000") if (WOVariables.wo_platform_codename != 'xenial' or WOVariables.wo_platform_codename != 'bionic'): WOFileUtils.searchreplace(self, "/etc/nginx/hhvm.conf", "9000", "8000") with open("/etc/hhvm/php.ini", "a") as hhvm_file: hhvm_file.write("hhvm.log.header = true\n" "hhvm.log.natives_stack_trace = true\n" "hhvm.mysql.socket = " "/var/run/mysqld/mysqld.sock\n" "hhvm.pdo_mysql.socket = " "/var/run/mysqld/mysqld.sock\n" "hhvm.mysqli.socket = " "/var/run/mysqld/mysqld.sock\n") with open("/etc/hhvm/server.ini", "a") as hhvm_file: hhvm_file.write("hhvm.server.ip = 127.0.0.1\n") if os.path.isfile("/etc/nginx/conf.d/fastcgi.conf"): if not WOFileUtils.grep(self, "/etc/nginx/conf.d/" "fastcgi.conf", "fastcgi_keep_conn"): with open("/etc/nginx/conf.d/fastcgi.conf", "a") as hhvm_file: hhvm_file.write("fastcgi_keep_conn on;\n") if os.path.isfile("/etc/nginx/conf.d/upstream.conf"): if not WOFileUtils.grep(self, "/etc/nginx/conf.d/" "upstream.conf", "hhvm"): with open("/etc/nginx/conf.d/upstream.conf", "a") as hhvm_file: hhvm_file.write("upstream hhvm {\nserver " "127.0.0.1:8000;\n" "server 127.0.0.1:9000 backup;\n}" "\n") WOGit.add(self, ["/etc/hhvm"], msg="Adding HHVM into Git") WOService.restart_service(self, 'hhvm') if os.path.isfile("/etc/nginx/nginx.conf") and (not os.path.isfile("/etc/nginx/common/php-hhvm.conf")): data = dict() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/php-hhvm.conf') wo_nginx = open('/etc/nginx/common/php-hhvm.conf', encoding='utf-8', mode='w') self.app.render((data), 'php-hhvm.mustache', out=wo_nginx) wo_nginx.close() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/wpfc-hhvm.conf') wo_nginx = open('/etc/nginx/common/wpfc-hhvm.conf', encoding='utf-8', mode='w') self.app.render((data), 'wpfc-hhvm.mustache', out=wo_nginx) wo_nginx.close() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/wpsc-hhvm.conf') wo_nginx = open('/etc/nginx/common/wpsc-hhvm.conf', encoding='utf-8', mode='w') self.app.render((data), 'wpsc-hhvm.mustache', out=wo_nginx) wo_nginx.close() if not WOService.reload_service(self, 'nginx'): Log.error(self, "Failed to reload Nginx, please check " "output of `nginx -t`") if set(WOVariables.wo_redis).issubset(set(apt_packages)): if os.path.isfile("/etc/nginx/nginx.conf") and (not os.path.isfile("/etc/nginx/common/redis.conf")): data = dict() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/redis.conf') wo_nginx = open('/etc/nginx/common/redis.conf', encoding='utf-8', mode='w') self.app.render((data), 'redis.mustache', out=wo_nginx) wo_nginx.close() if os.path.isfile("/etc/nginx/nginx.conf") and (not os.path.isfile("/etc/nginx/common/redis-hhvm.conf")): data = dict() Log.debug(self, 'Writting the nginx configuration to ' 'file /etc/nginx/common/redis-hhvm.conf') wo_nginx = open('/etc/nginx/common/redis-hhvm.conf', encoding='utf-8', mode='w') self.app.render((data), 'redis-hhvm.mustache', out=wo_nginx) wo_nginx.close() if os.path.isfile("/etc/nginx/conf.d/upstream.conf"): if not WOFileUtils.grep(self, "/etc/nginx/conf.d/" "upstream.conf", "redis"): with open("/etc/nginx/conf.d/upstream.conf", "a") as redis_file: redis_file.write("upstream redis {\n" " server 127.0.0.1:6379;\n" " keepalive 10;\n}\n") if os.path.isfile("/etc/nginx/nginx.conf") and (not os.path.isfile("/etc/nginx/conf.d/redis.conf")): with open("/etc/nginx/conf.d/redis.conf", "a") as redis_file: redis_file.write("# Log format Settings\n" "log_format rt_cache_redis '$remote_addr $upstream_response_time $srcache_fetch_status [$time_local] '\n" "'$http_host \"$request\" $status $body_bytes_sent '\n" "'\"$http_referer\" \"$http_user_agent\"';\n") if (WOVariables.wo_platform_codename == 'trusty' or WOVariables.wo_platform_codename == 'xenial' or WOVariables.wo_platform_codename == 'bionic'): # Create log directories if not os.path.exists('/var/log/php/7.2/'): Log.debug(self, 'Creating directory /var/log/php/7.2/') os.makedirs('/var/log/php/7.2/') # Parse etc/php/7.2/fpm/php.ini config = configparser.ConfigParser() Log.debug(self, "configuring php file /etc/php/7.2/fpm/php.ini") config.read('/etc/php/7.2/fpm/php.ini') config['PHP']['expose_php'] = 'Off' config['PHP']['post_max_size'] = '100M' config['PHP']['upload_max_filesize'] = '100M' config['PHP']['max_execution_time'] = '300' config['PHP']['date.timezone'] = WOVariables.wo_timezone with open('/etc/php/7.2/fpm/php.ini', encoding='utf-8', mode='w') as configfile: Log.debug(self, "Writting php configuration into " "/etc/php/7.2/fpm/php.ini") config.write(configfile) # Parse /etc/php/7.2/fpm/php-fpm.conf data = dict(pid="/run/php/php7.2-fpm.pid", error_log="/var/log/php/7.2/fpm.log", include="/etc/php/7.2/fpm/pool.d/*.conf") Log.debug(self, "writting php5 configuration into " "/etc/php/7.2/fpm/php-fpm.conf") wo_php_fpm = open('/etc/php/7.2/fpm/php-fpm.conf', encoding='utf-8', mode='w') self.app.render((data), 'php-fpm.mustache', out=wo_php_fpm) wo_php_fpm.close() # Parse /etc/php/7.2/fpm/pool.d/www.conf config = configparser.ConfigParser() config.read_file(codecs.open('/etc/php/7.2/fpm/pool.d/www.conf', "r", "utf8")) config['www']['ping.path'] = '/ping' config['www']['pm.status_path'] = '/status' config['www']['pm.max_requests'] = '100' config['www']['pm.max_children'] = '25' config['www']['pm.start_servers'] = '5' config['www']['pm.min_spare_servers'] = '2' config['www']['pm.max_spare_servers'] = '5' config['www']['request_terminate_timeout'] = '100' config['www']['pm'] = 'ondemand' config['www']['listen'] = '127.0.0.1:9072' with codecs.open('/etc/php/7.2/fpm/pool.d/www.conf', encoding='utf-8', mode='w') as configfile: Log.debug(self, "Writing PHP 7.2 configuration into " "/etc/php/7.2/fpm/pool.d/www.conf") config.write(configfile) # Generate /etc/php/7.2/fpm/pool.d/debug.conf WOFileUtils.copyfile(self, "/etc/php/7.2/fpm/pool.d/www.conf", "/etc/php/7.2/fpm/pool.d/debug.conf") WOFileUtils.searchreplace(self, "/etc/php/7.2/fpm/pool.d/" "debug.conf", "[www]", "[debug]") config = configparser.ConfigParser() config.read('/etc/php/7.2/fpm/pool.d/debug.conf') config['debug']['listen'] = '127.0.0.1:9172' config['debug']['rlimit_core'] = 'unlimited' config['debug']['slowlog'] = '/var/log/php/7.2/slow.log' config['debug']['request_slowlog_timeout'] = '10s' with open('/etc/php/7.2/fpm/pool.d/debug.conf', encoding='utf-8', mode='w') as confifile: Log.debug(self, "writting PHP5 configuration into " "/etc/php/7.2/fpm/pool.d/debug.conf") config.write(confifile) with open("/etc/php/7.2/fpm/pool.d/debug.conf", encoding='utf-8', mode='a') as myfile: myfile.write("php_admin_value[xdebug.profiler_output_dir] " "= /tmp/ \nphp_admin_value[xdebug.profiler_" "output_name] = cachegrind.out.%p-%H-%R " "\nphp_admin_flag[xdebug.profiler_enable" "_trigger] = on \nphp_admin_flag[xdebug." "profiler_enable] = off\n") # Disable xdebug if not WOShellExec.cmd_exec(self, "grep -q \';zend_extension\' /etc/php/7.2/mods-available/xdebug.ini"): WOFileUtils.searchreplace(self, "/etc/php/7.2/mods-available/" "xdebug.ini", "zend_extension", ";zend_extension") # PHP and Debug pull configuration if not os.path.exists('{0}22222/htdocs/fpm/status/' .format(WOVariables.wo_webroot)): Log.debug(self, 'Creating directory ' '{0}22222/htdocs/fpm/status/ ' .format(WOVariables.wo_webroot)) os.makedirs('{0}22222/htdocs/fpm/status/' .format(WOVariables.wo_webroot)) open('{0}22222/htdocs/fpm/status/debug' .format(WOVariables.wo_webroot), encoding='utf-8', mode='a').close() open('{0}22222/htdocs/fpm/status/php' .format(WOVariables.wo_webroot), encoding='utf-8', mode='a').close() # Write info.php if not os.path.exists('{0}22222/htdocs/php/' .format(WOVariables.wo_webroot)): Log.debug(self, 'Creating directory ' '{0}22222/htdocs/php/ ' .format(WOVariables.wo_webroot)) os.makedirs('{0}22222/htdocs/php' .format(WOVariables.wo_webroot)) with open("{0}22222/htdocs/php/info.php" .format(WOVariables.wo_webroot), encoding='utf-8', mode='w') as myfile: myfile.write("<?php\nphpinfo();\n?>") WOFileUtils.chown(self, "{0}22222" .format(WOVariables.wo_webroot), WOVariables.wo_php_user, WOVariables.wo_php_user, recursive=True) WOGit.add(self, ["/etc/php"], msg="Adding PHP into Git") WOService.restart_service(self, 'php7.2-fpm') #PHP7.0 configuration for debian if (WOVariables.wo_platform_codename == 'jessie' ) and set(WOVariables.wo_php72).issubset(set(apt_packages)): # Create log directories if not os.path.exists('/var/log/php/7.2/'): Log.debug(self, 'Creating directory /var/log/php/7.2/') os.makedirs('/var/log/php/7.2/') # Parse etc/php/7.2/fpm/php.ini config = configparser.ConfigParser() Log.debug(self, "configuring php file /etc/php/7.2/fpm/php.ini") config.read('/etc/php/7.2/fpm/php.ini') config['PHP']['expose_php'] = 'Off' config['PHP']['post_max_size'] = '100M' config['PHP']['upload_max_filesize'] = '100M' config['PHP']['max_execution_time'] = '300' config['PHP']['date.timezone'] = WOVariables.wo_timezone with open('/etc/php/7.2/fpm/php.ini', encoding='utf-8', mode='w') as configfile: Log.debug(self, "Writting php configuration into " "/etc/php/7.2/fpm/php.ini") config.write(configfile) # Parse /etc/php/7.2/fpm/php-fpm.conf data = dict(pid="/run/php/php7.2-fpm.pid", error_log="/var/log/php7.2-fpm.log", include="/etc/php/7.2/fpm/pool.d/*.conf") Log.debug(self, "writting php 7.0 configuration into " "/etc/php/7.2/fpm/php-fpm.conf") wo_php_fpm = open('/etc/php/7.2/fpm/php-fpm.conf', encoding='utf-8', mode='w') self.app.render((data), 'php-fpm.mustache', out=wo_php_fpm) wo_php_fpm.close() # Parse /etc/php/7.2/fpm/pool.d/www.conf config = configparser.ConfigParser() config.read_file(codecs.open('/etc/php/7.2/fpm/pool.d/www.conf', "r", "utf8")) config['www']['ping.path'] = '/ping' config['www']['pm.status_path'] = '/status' config['www']['pm.max_requests'] = '500' config['www']['pm.max_children'] = '100' config['www']['pm.start_servers'] = '20' config['www']['pm.min_spare_servers'] = '10' config['www']['pm.max_spare_servers'] = '30' config['www']['request_terminate_timeout'] = '300' config['www']['pm'] = 'ondemand' config['www']['listen'] = '127.0.0.1:9072' with codecs.open('/etc/php/7.2/fpm/pool.d/www.conf', encoding='utf-8', mode='w') as configfile: Log.debug(self, "writting PHP5 configuration into " "/etc/php/7.2/fpm/pool.d/www.conf") config.write(configfile) # Generate /etc/php/7.2/fpm/pool.d/debug.conf WOFileUtils.copyfile(self, "/etc/php/7.2/fpm/pool.d/www.conf", "/etc/php/7.2/fpm/pool.d/debug.conf") WOFileUtils.searchreplace(self, "/etc/php/7.2/fpm/pool.d/" "debug.conf", "[www]", "[debug]") config = configparser.ConfigParser() config.read('/etc/php/7.2/fpm/pool.d/debug.conf') config['debug']['listen'] = '127.0.0.1:9172' config['debug']['rlimit_core'] = 'unlimited' config['debug']['slowlog'] = '/var/log/php/7.2/slow.log' config['debug']['request_slowlog_timeout'] = '10s' with open('/etc/php/7.2/fpm/pool.d/debug.conf', encoding='utf-8', mode='w') as confifile: Log.debug(self, "writting PHP5 configuration into " "/etc/php/7.2/fpm/pool.d/debug.conf") config.write(confifile) with open("/etc/php/7.2/fpm/pool.d/debug.conf", encoding='utf-8', mode='a') as myfile: myfile.write("php_admin_value[xdebug.profiler_output_dir] " "= /tmp/ \nphp_admin_value[xdebug.profiler_" "output_name] = cachegrind.out.%p-%H-%R " "\nphp_admin_flag[xdebug.profiler_enable" "_trigger] = on \nphp_admin_flag[xdebug." "profiler_enable] = off\n") # Disable xdebug if not WOShellExec.cmd_exec(self, "grep -q \';zend_extension\' /etc/php/7.2/mods-available/xdebug.ini"): WOFileUtils.searchreplace(self, "/etc/php/7.2/mods-available/" "xdebug.ini", "zend_extension", ";zend_extension") # PHP and Debug pull configuration if not os.path.exists('{0}22222/htdocs/fpm/status/' .format(WOVariables.wo_webroot)): Log.debug(self, 'Creating directory ' '{0}22222/htdocs/fpm/status/ ' .format(WOVariables.wo_webroot)) os.makedirs('{0}22222/htdocs/fpm/status/' .format(WOVariables.wo_webroot)) open('{0}22222/htdocs/fpm/status/debug' .format(WOVariables.wo_webroot), encoding='utf-8', mode='a').close() open('{0}22222/htdocs/fpm/status/php' .format(WOVariables.wo_webroot), encoding='utf-8', mode='a').close() # Write info.php if not os.path.exists('{0}22222/htdocs/php/' .format(WOVariables.wo_webroot)): Log.debug(self, 'Creating directory ' '{0}22222/htdocs/php/ ' .format(WOVariables.wo_webroot)) os.makedirs('{0}22222/htdocs/php' .format(WOVariables.wo_webroot)) with open("{0}22222/htdocs/php/info.php" .format(WOVariables.wo_webroot), encoding='utf-8', mode='w') as myfile: myfile.write("<?php\nphpinfo();\n?>") WOFileUtils.chown(self, "{0}22222" .format(WOVariables.wo_webroot), WOVariables.wo_php_user, WOVariables.wo_php_user, recursive=True) WOGit.add(self, ["/etc/php"], msg="Adding PHP into Git") WOService.restart_service(self, 'php7.2-fpm') #preconfiguration for php7.2 if (WOVariables.wo_platform_codename == 'trusty' or WOVariables.wo_platform_codename == 'xenial' or WOVariables.wo_platform_codename == 'bionic') and set(WOVariables.wo_php72).issubset(set(apt_packages)): # Create log directories if not os.path.exists('/var/log/php/7.2/'): Log.debug(self, 'Creating directory /var/log/php/7.2/') os.makedirs('/var/log/php/7.2/') # Parse etc/php/7.2/fpm/php.ini config = configparser.ConfigParser() Log.debug(self, "configuring php file /etc/php/7.2/fpm/php.ini") config.read('/etc/php/7.2/fpm/php.ini') config['PHP']['expose_php'] = 'Off' config['PHP']['post_max_size'] = '64M' config['PHP']['upload_max_filesize'] = '64M' config['PHP']['max_execution_time'] = '30' config['PHP']['date.timezone'] = WOVariables.wo_timezone with open('/etc/php/7.2/fpm/php.ini', encoding='utf-8', mode='w') as configfile: Log.debug(self, "Writting php configuration into " "/etc/php/7.2/fpm/php.ini") config.write(configfile) # Parse /etc/php/7.2/fpm/php-fpm.conf data = dict(pid="/run/php/php7.2-fpm.pid", error_log="/var/log/php/7.2/fpm.log", include="/etc/php/7.2/fpm/pool.d/*.conf") Log.debug(self, "writting php 7.0 configuration into " "/etc/php/7.2/fpm/php-fpm.conf") wo_php_fpm = open('/etc/php/7.2/fpm/php-fpm.conf', encoding='utf-8', mode='w') self.app.render((data), 'php-fpm.mustache', out=wo_php_fpm) wo_php_fpm.close() # Parse /etc/php/7.2/fpm/pool.d/www.conf config = configparser.ConfigParser() config.read_file(codecs.open('/etc/php/7.2/fpm/pool.d/www.conf', "r", "utf8")) config['www']['ping.path'] = '/ping' config['www']['pm.status_path'] = '/status' config['www']['pm.max_requests'] = '100' config['www']['pm.max_children'] = '25' config['www']['pm.start_servers'] = '5' config['www']['pm.min_spare_servers'] = '2' config['www']['pm.max_spare_servers'] = '5' config['www']['request_terminate_timeout'] = '100' config['www']['pm'] = 'ondemand' config['www']['listen'] = '127.0.0.1:9072' with codecs.open('/etc/php/7.2/fpm/pool.d/www.conf', encoding='utf-8', mode='w') as configfile: Log.debug(self, "writting PHP5 configuration into " "/etc/php/7.2/fpm/pool.d/www.conf") config.write(configfile) # Generate /etc/php/7.2/fpm/pool.d/debug.conf WOFileUtils.copyfile(self, "/etc/php/7.2/fpm/pool.d/www.conf", "/etc/php/7.2/fpm/pool.d/debug.conf") WOFileUtils.searchreplace(self, "/etc/php/7.2/fpm/pool.d/" "debug.conf", "[www]", "[debug]") config = configparser.ConfigParser() config.read('/etc/php/7.2/fpm/pool.d/debug.conf') config['debug']['listen'] = '127.0.0.1:9172' config['debug']['rlimit_core'] = 'unlimited' config['debug']['slowlog'] = '/var/log/php/7.2/slow.log' config['debug']['request_slowlog_timeout'] = '10s' with open('/etc/php/7.2/fpm/pool.d/debug.conf', encoding='utf-8', mode='w') as confifile: Log.debug(self, "writting PHP5 configuration into " "/etc/php/7.2/fpm/pool.d/debug.conf") config.write(confifile) with open("/etc/php/7.2/fpm/pool.d/debug.conf", encoding='utf-8', mode='a') as myfile: myfile.write("php_admin_value[xdebug.profiler_output_dir] " "= /tmp/ \nphp_admin_value[xdebug.profiler_" "output_name] = cachegrind.out.%p-%H-%R " "\nphp_admin_flag[xdebug.profiler_enable" "_trigger] = on \nphp_admin_flag[xdebug." "profiler_enable] = off\n") # Disable xdebug if not WOShellExec.cmd_exec(self, "grep -q \';zend_extension\' /etc/php/7.2/mods-available/xdebug.ini"): WOFileUtils.searchreplace(self, "/etc/php/7.2/mods-available/" "xdebug.ini", "zend_extension", ";zend_extension") # PHP and Debug pull configuration if not os.path.exists('{0}22222/htdocs/fpm/status/' .format(WOVariables.wo_webroot)): Log.debug(self, 'Creating directory ' '{0}22222/htdocs/fpm/status/ ' .format(WOVariables.wo_webroot)) os.makedirs('{0}22222/htdocs/fpm/status/' .format(WOVariables.wo_webroot)) open('{0}22222/htdocs/fpm/status/debug' .format(WOVariables.wo_webroot), encoding='utf-8', mode='a').close() open('{0}22222/htdocs/fpm/status/php' .format(WOVariables.wo_webroot), encoding='utf-8', mode='a').close() # Write info.php if not os.path.exists('{0}22222/htdocs/php/' .format(WOVariables.wo_webroot)): Log.debug(self, 'Creating directory ' '{0}22222/htdocs/php/ ' .format(WOVariables.wo_webroot)) os.makedirs('{0}22222/htdocs/php' .format(WOVariables.wo_webroot)) with open("{0}22222/htdocs/php/info.php" .format(WOVariables.wo_webroot), encoding='utf-8', mode='w') as myfile: myfile.write("<?php\nphpinfo();\n?>") WOFileUtils.chown(self, "{0}22222" .format(WOVariables.wo_webroot), WOVariables.wo_php_user, WOVariables.wo_php_user, recursive=True) WOGit.add(self, ["/etc/php"], msg="Adding PHP into Git") WOService.restart_service(self, 'php7.2-fpm') if set(WOVariables.wo_mysql).issubset(set(apt_packages)): if not os.path.isfile("/etc/mysql/my.cnf"): config = ("[mysqld]\nwait_timeout = 30\n" "interactive_timeout=60\nperformance_schema = 0" "\nquery_cache_type = 1") config_file = open("/etc/mysql/my.cnf", encoding='utf-8', mode='w') config_file.write(config) config_file.close() else: try: WOShellExec.cmd_exec(self, "sed -i \"/#max_conn" "ections/a wait_timeout = 30 \\n" "interactive_timeout = 60 \\n" "performance_schema = 0\\n" "query_cache_type = 1 \" " "/etc/mysql/my.cnf") except CommandExecutionError as e: Log.error(self, "Unable to update MySQL file") WOFileUtils.chmod(self, "/usr/bin/mysqltuner", 0o775) WOGit.add(self, ["/etc/mysql"], msg="Adding MySQL into Git") WOService.reload_service(self, 'mysql') if len(packages): if any('/usr/bin/wp' == x[1] for x in packages): Log.debug(self, "Setting Privileges to /usr/bin/wp file ") WOFileUtils.chmod(self, "/usr/bin/wp", 0o775) if any('/tmp/pma.tar.gz' == x[1] for x in packages): WOExtract.extract(self, '/tmp/pma.tar.gz', '/tmp/') Log.debug(self, 'Extracting file /tmp/pma.tar.gz to ' 'location /tmp/') if not os.path.exists('{0}22222/htdocs/db' .format(WOVariables.wo_webroot)): Log.debug(self, "Creating new directory " "{0}22222/htdocs/db" .format(WOVariables.wo_webroot)) os.makedirs('{0}22222/htdocs/db' .format(WOVariables.wo_webroot)) shutil.move('/tmp/phpmyadmin-STABLE/', '{0}22222/htdocs/db/pma/' .format(WOVariables.wo_webroot)) shutil.copyfile('{0}22222/htdocs/db/pma/config.sample.inc.php' .format(WOVariables.wo_webroot), '{0}22222/htdocs/db/pma/config.inc.php' .format(WOVariables.wo_webroot)) Log.debug(self, 'Setting Blowfish Secret Key FOR COOKIE AUTH to ' '{0}22222/htdocs/db/pma/config.inc.php file ' .format(WOVariables.wo_webroot)) blowfish_key = ''.join([random.choice (string.ascii_letters + string.digits) for n in range(10)]) WOFileUtils.searchreplace(self, '{0}22222/htdocs/db/pma/config.inc.php' .format(WOVariables.wo_webroot), "$cfg[\'blowfish_secret\'] = \'\';","$cfg[\'blowfish_secret\'] = \'{0}\';" .format(blowfish_key)) Log.debug(self, 'Setting HOST Server For Mysql to ' '{0}22222/htdocs/db/pma/config.inc.php file ' .format(WOVariables.wo_webroot)) WOFileUtils.searchreplace(self, '{0}22222/htdocs/db/pma/config.inc.php' .format(WOVariables.wo_webroot), "$cfg[\'Servers\'][$i][\'host\'] = \'localhost\';","$cfg[\'Servers\'][$i][\'host\'] = \'{0}\';" .format(WOVariables.wo_mysql_host)) Log.debug(self, 'Setting Privileges of webroot permission to ' '{0}22222/htdocs/db/pma file ' .format(WOVariables.wo_webroot)) WOFileUtils.chown(self, '{0}22222' .format(WOVariables.wo_webroot), WOVariables.wo_php_user, WOVariables.wo_php_user, recursive=True) if any('/tmp/memcache.tar.gz' == x[1] for x in packages): Log.debug(self, "Extracting memcache.tar.gz to location" " {0}22222/htdocs/cache/memcache " .format(WOVariables.wo_webroot)) WOExtract.extract(self, '/tmp/memcache.tar.gz', '{0}22222/htdocs/cache/memcache' .format(WOVariables.wo_webroot)) Log.debug(self, "Setting Privileges to " "{0}22222/htdocs/cache/memcache file" .format(WOVariables.wo_webroot)) WOFileUtils.chown(self, '{0}22222' .format(WOVariables.wo_webroot), WOVariables.wo_php_user, WOVariables.wo_php_user, recursive=True) if any('/tmp/webgrind.tar.gz' == x[1] for x in packages): Log.debug(self, "Extracting file webgrind.tar.gz to " "location /tmp/ ") WOExtract.extract(self, '/tmp/webgrind.tar.gz', '/tmp/') if not os.path.exists('{0}22222/htdocs/php' .format(WOVariables.wo_webroot)): Log.debug(self, "Creating directroy " "{0}22222/htdocs/php" .format(WOVariables.wo_webroot)) os.makedirs('{0}22222/htdocs/php' .format(WOVariables.wo_webroot)) shutil.move('/tmp/webgrind-master/', '{0}22222/htdocs/php/webgrind' .format(WOVariables.wo_webroot)) WOFileUtils.searchreplace(self, "{0}22222/htdocs/php/webgrind/" "config.php" .format(WOVariables.wo_webroot), "/usr/local/bin/dot", "/usr/bin/dot") WOFileUtils.searchreplace(self, "{0}22222/htdocs/php/webgrind/" "config.php" .format(WOVariables.wo_webroot), "Europe/Copenhagen", WOVariables.wo_timezone) WOFileUtils.searchreplace(self, "{0}22222/htdocs/php/webgrind/" "config.php" .format(WOVariables.wo_webroot), "90", "100") Log.debug(self, "Setting Privileges of webroot permission to " "{0}22222/htdocs/php/webgrind/ file " .format(WOVariables.wo_webroot)) WOFileUtils.chown(self, '{0}22222' .format(WOVariables.wo_webroot), WOVariables.wo_php_user, WOVariables.wo_php_user, recursive=True) if any('/tmp/anemometer.tar.gz' == x[1] for x in packages): Log.debug(self, "Extracting file anemometer.tar.gz to " "location /tmp/ ") WOExtract.extract(self, '/tmp/anemometer.tar.gz', '/tmp/') if not os.path.exists('{0}22222/htdocs/db/' .format(WOVariables.wo_webroot)): Log.debug(self, "Creating directory") os.makedirs('{0}22222/htdocs/db/' .format(WOVariables.wo_webroot)) shutil.move('/tmp/Anemometer-master', '{0}22222/htdocs/db/anemometer' .format(WOVariables.wo_webroot)) chars = ''.join(random.sample(string.ascii_letters, 8)) try: WOShellExec.cmd_exec(self, 'mysql < {0}22222/htdocs/db' '/anemometer/install.sql' .format(WOVariables.wo_webroot)) except CommandExecutionError as e: raise SiteError("Unable to import Anemometer database") WOMysql.execute(self, 'grant select on *.* to \'anemometer\'' '@\'{0}\' IDENTIFIED' ' BY \'{1}\''.format(self.app.config.get('mysql', 'grant-host'),chars)) Log.debug(self, "grant all on slow-query-log.*" " to anemometer@root_user IDENTIFIED BY password ") WOMysql.execute(self, 'grant all on slow_query_log.* to' '\'anemometer\'@\'{0}\' IDENTIFIED' ' BY \'{1}\''.format(self.app.config.get( 'mysql', 'grant-host'), chars), errormsg="cannot grant priviledges", log=False) # Custom Anemometer configuration Log.debug(self, "configration Anemometer") data = dict(host=WOVariables.wo_mysql_host, port='3306', user='anemometer', password=chars) wo_anemometer = open('{0}22222/htdocs/db/anemometer' '/conf/config.inc.php' .format(WOVariables.wo_webroot), encoding='utf-8', mode='w') self.app.render((data), 'anemometer.mustache', out=wo_anemometer) wo_anemometer.close() if any('/usr/bin/pt-query-advisor' == x[1] for x in packages): WOFileUtils.chmod(self, "/usr/bin/pt-query-advisor", 0o775) if any('/tmp/pra.tar.gz' == x[1] for x in packages): Log.debug(self, 'Extracting file /tmp/pra.tar.gz to ' 'loaction /tmp/') WOExtract.extract(self, '/tmp/pra.tar.gz', '/tmp/') if not os.path.exists('{0}22222/htdocs/cache/redis' .format(WOVariables.wo_webroot)): Log.debug(self, "Creating new directory " "{0}22222/htdocs/cache/redis" .format(WOVariables.wo_webroot)) os.makedirs('{0}22222/htdocs/cache/redis' .format(WOVariables.wo_webroot)) shutil.move('/tmp/phpRedisAdmin-master/', '{0}22222/htdocs/cache/redis/phpRedisAdmin' .format(WOVariables.wo_webroot)) Log.debug(self, 'Extracting file /tmp/predis.tar.gz to ' 'loaction /tmp/') WOExtract.extract(self, '/tmp/predis.tar.gz', '/tmp/') shutil.move('/tmp/predis-1.0.1/', '{0}22222/htdocs/cache/redis/phpRedisAdmin/vendor' .format(WOVariables.wo_webroot)) Log.debug(self, 'Setting Privileges of webroot permission to ' '{0}22222/htdocs/cache/ file ' .format(WOVariables.wo_webroot)) WOFileUtils.chown(self, '{0}22222' .format(WOVariables.wo_webroot), WOVariables.wo_php_user, WOVariables.wo_php_user, recursive=True) @expose(help="Install packages") def install(self, packages=[], apt_packages=[], disp_msg=True): """Start installation of packages""" self.msg = [] try: # Default action for stack installation if ((not self.app.pargs.web) and (not self.app.pargs.admin) and (not self.app.pargs.nginx) and (not self.app.pargs.php) and (not self.app.pargs.mysql) and (not self.app.pargs.wpcli) and (not self.app.pargs.phpmyadmin) and (not self.app.pargs.hhvm) and (not self.app.pargs.adminer) and (not self.app.pargs.utils) and (not self.app.pargs.redis) and (not self.app.pargs.phpredisadmin) and (not self.app.pargs.php72)): self.app.pargs.web = True self.app.pargs.admin = True if self.app.pargs.all: self.app.pargs.web = True self.app.pargs.admin = True if self.app.pargs.web: self.app.pargs.nginx = True self.app.pargs.php = True self.app.pargs.mysql = True self.app.pargs.wpcli = True if self.app.pargs.admin: self.app.pargs.nginx = True self.app.pargs.php = True self.app.pargs.mysql = True self.app.pargs.adminer = True self.app.pargs.phpmyadmin = True self.app.pargs.utils = True if self.app.pargs.redis: if not WOAptGet.is_installed(self, 'redis-server'): apt_packages = apt_packages + WOVariables.wo_redis self.app.pargs.php = True else: Log.info(self, "Redis already installed") if self.app.pargs.nginx: Log.debug(self, "Setting apt_packages variable for Nginx") if not (WOAptGet.is_installed(self, 'nginx-custom')): if not (WOAptGet.is_installed(self, 'nginx-plus') or WOAptGet.is_installed(self, 'nginx')): apt_packages = apt_packages + WOVariables.wo_nginx else: if WOAptGet.is_installed(self, 'nginx-plus'): Log.info(self, "NGINX PLUS Detected ...") apt = ["nginx-plus"] + WOVariables.wo_nginx self.post_pref(apt, packages) elif WOAptGet.is_installed(self, 'nginx'): Log.info(self, "WordOps detected an already installed nginx package." "It may or may not have required modules.\n") apt = ["nginx"] + WOVariables.wo_nginx self.post_pref(apt, packages) else: Log.debug(self, "Nginx Stable already installed") if self.app.pargs.php: Log.debug(self, "Setting apt_packages variable for PHP") if not (WOAptGet.is_installed(self, 'php5-fpm') or WOAptGet.is_installed(self, 'php5.6-fpm')): if (WOVariables.wo_platform_codename == 'trusty' or WOVariables.wo_platform_codename == 'xenial' or WOVariables.wo_platform_codename == 'bionic'): apt_packages = apt_packages + WOVariables.wo_php72 + WOVariables.wo_php_extra else: apt_packages = apt_packages + WOVariables.wo_php else: Log.debug(self, "PHP already installed") Log.info(self, "PHP already installed") #PHP 7.0 for Debian (jessie+) if self.app.pargs.php72 and WOVariables.wo_platform_distro == 'debian': if (WOVariables.wo_platform_codename == 'jessie'): Log.debug(self, "Setting apt_packages variable for PHP 7.2") if not WOAptGet.is_installed(self, 'php7.2-fpm') : apt_packages = apt_packages + WOVariables.wo_php72 if not WOAptGet.is_installed(self, 'php5-fpm'): apt_packages = apt_packages + WOVariables.wo_php else: Log.debug(self, "PHP 7.2 already installed") Log.info(self, "PHP 7.2 already installed") else: Log.debug(self, "PHP 7.2 Not Available for your Distribution") Log.info(self, "PHP 7.2 Not Available for your Distribution") #PHP 7.0 for Ubuntu if self.app.pargs.php72 and not WOVariables.wo_platform_distro == 'debian': if (WOVariables.wo_platform_codename == 'trusty' or WOVariables.wo_platform_codename == 'xenial' or WOVariables.wo_platform_codename == 'bionic'): Log.debug(self, "Setting apt_packages variable for PHP 7.2") if not WOAptGet.is_installed(self, 'php7.2-fpm') : apt_packages = apt_packages + WOVariables.wo_php72 + WOVariables.wo_php_extra else: Log.debug(self, "PHP 7.2 already installed") Log.info(self, "PHP 7.2 already installed") else: Log.debug(self, "Unfortunately PHP 7.2 is not available for your Ubuntu or Debian version.") Log.info(self, "Unfortunately PHP 7.2 is not available for your Ubuntu or Debian version.") if self.app.pargs.hhvm: Log.debug(self, "Setting apt packages variable for HHVM") if platform.architecture()[0] is '32bit': Log.error(self, "HHVM is not supported by 32bit system") if not WOAptGet.is_installed(self, 'hhvm'): apt_packages = apt_packages + WOVariables.wo_hhvm else: Log.debug(self, "HHVM already installed") Log.info(self, "HHVM already installed") if self.app.pargs.mysql: Log.debug(self, "Setting apt_packages variable for MySQL") if not WOShellExec.cmd_exec(self, "mysqladmin ping"): apt_packages = apt_packages + WOVariables.wo_mysql packages = packages + [["https://raw." "githubusercontent.com/" "major/MySQLTuner-perl" "/master/mysqltuner.pl", "/usr/bin/mysqltuner", "MySQLTuner"]] else: Log.debug(self, "MySQL connection is already alive") Log.info(self, "MySQL connection is already alive") if self.app.pargs.wpcli: Log.debug(self, "Setting packages variable for WP-CLI") if not WOShellExec.cmd_exec(self, "which wp"): packages = packages + [["https://github.com/wp-cli/wp-cli/" "releases/download/v{0}/" "wp-cli-{0}.phar" "".format(WOVariables.wo_wp_cli), "/usr/bin/wp", "WP-CLI"]] else: Log.debug(self, "WP-CLI is already installed") Log.info(self, "WP-CLI is already installed") if self.app.pargs.phpmyadmin: Log.debug(self, "Setting packages varible for phpMyAdmin ") packages = packages + [["https://github.com/phpmyadmin/" "phpmyadmin/archive/STABLE.tar.gz", "/tmp/pma.tar.gz", "phpMyAdmin"]] if self.app.pargs.phpredisadmin: Log.debug(self, "Setting packages varible for phpRedisAdmin") packages = packages + [["https://github.com/ErikDubbelboer/" "phpRedisAdmin/archive/master.tar.gz", "/tmp/pra.tar.gz","phpRedisAdmin"], ["https://github.com/nrk/predis/" "archive/v1.0.1.tar.gz", "/tmp/predis.tar.gz", "Predis"]] if self.app.pargs.adminer: Log.debug(self, "Setting packages variable for Adminer ") packages = packages + [["https://www.adminer.org/static/download/" "{0}/adminer-{0}.php" "".format(WOVariables.wo_adminer), "{0}22222/" "htdocs/db/adminer/index.php" .format(WOVariables.wo_webroot), "Adminer"]] if self.app.pargs.utils: Log.debug(self, "Setting packages variable for utils") packages = packages + [["https://storage.googleapis.com/google-code-archive-downloads/" "v2/code.google.com/phpmemcacheadmin/" "phpMemcachedAdmin-1.2.2-r262.tar.gz", '/tmp/memcache.tar.gz', 'phpMemcachedAdmin'], ["https://raw.githubusercontent.com" "/rtCamp/eeadmin/master/cache/nginx/" "clean.php", "{0}22222/htdocs/cache/" "nginx/clean.php" .format(WOVariables.wo_webroot), "clean.php"], ["https://raw.github.com/rlerdorf/" "opcache-status/master/opcache.php", "{0}22222/htdocs/cache/" "opcache/opcache.php" .format(WOVariables.wo_webroot), "opcache.php"], ["https://raw.github.com/amnuts/" "opcache-gui/master/index.php", "{0}22222/htdocs/" "cache/opcache/opgui.php" .format(WOVariables.wo_webroot), "Opgui"], ["https://gist.github.com/ck-on/4959032" "/raw/0b871b345fd6cfcd6d2be030c1f33d1" "ad6a475cb/ocp.php", "{0}22222/htdocs/cache/" "opcache/ocp.php" .format(WOVariables.wo_webroot), "OCP.php"], ["https://github.com/jokkedk/webgrind/" "archive/master.tar.gz", '/tmp/webgrind.tar.gz', 'Webgrind'], ["http://bazaar.launchpad.net/~" "percona-toolkit-dev/percona-toolkit/" "2.1/download/head:/ptquerydigest-" "20110624220137-or26tn4" "expb9ul2a-16/pt-query-digest", "/usr/bin/pt-query-advisor", "pt-query-advisor"], ["https://github.com/box/Anemometer/" "archive/master.tar.gz", '/tmp/anemometer.tar.gz', 'Anemometer'] ] except Exception as e: pass if len(apt_packages) or len(packages): Log.debug(self, "Calling pre_pref") self.pre_pref(apt_packages) if len(apt_packages): WOSwap.add(self) Log.info(self, "Updating apt-cache, please wait...") WOAptGet.update(self) Log.info(self, "Installing packages, please wait...") WOAptGet.install(self, apt_packages) if len(packages): Log.debug(self, "Downloading following: {0}".format(packages)) WODownload.download(self, packages) Log.debug(self, "Calling post_pref") self.post_pref(apt_packages, packages) if 'redis-server' in apt_packages: # set redis.conf parameter # set maxmemory 10% for ram below 512MB and 20% for others # set maxmemory-policy allkeys-lru if os.path.isfile("/etc/redis/redis.conf"): if WOVariables.wo_ram < 512: Log.debug(self, "Setting maxmemory variable to {0} in redis.conf" .format(int(WOVariables.wo_ram*1024*1024*0.1))) WOShellExec.cmd_exec(self, "sed -i 's/# maxmemory <bytes>/maxmemory {0}/' /etc/redis/redis.conf" .format(int(WOVariables.wo_ram*1024*1024*0.1))) Log.debug(self, "Setting maxmemory-policy variable to allkeys-lru in redis.conf") WOShellExec.cmd_exec(self, "sed -i 's/# maxmemory-policy.*/maxmemory-policy allkeys-lru/' " "/etc/redis/redis.conf") WOService.restart_service(self, 'redis-server') else: Log.debug(self, "Setting maxmemory variable to {0} in redis.conf" .format(int(WOVariables.wo_ram*1024*1024*0.2))) WOShellExec.cmd_exec(self, "sed -i 's/# maxmemory <bytes>/maxmemory {0}/' /etc/redis/redis.conf" .format(int(WOVariables.wo_ram*1024*1024*0.2))) Log.debug(self, "Setting maxmemory-policy variable to allkeys-lru in redis.conf") WOShellExec.cmd_exec(self, "sed -i 's/# maxmemory-policy.*/maxmemory-policy allkeys-lru/' " "/etc/redis/redis.conf") WOService.restart_service(self, 'redis-server') if disp_msg: if len(self.msg): for msg in self.msg: Log.info(self, Log.ENDC + msg) Log.info(self, "Successfully installed packages") else: return self.msg @expose(help="Remove packages") def remove(self): """Start removal of packages""" apt_packages = [] packages = [] if ((not self.app.pargs.web) and (not self.app.pargs.admin) and (not self.app.pargs.nginx) and (not self.app.pargs.php) and (not self.app.pargs.php72) and (not self.app.pargs.mysql) and (not self.app.pargs.wpcli) and (not self.app.pargs.phpmyadmin) and (not self.app.pargs.hhvm) and (not self.app.pargs.adminer) and (not self.app.pargs.utils) and (not self.app.pargs.all) and (not self.app.pargs.redis) and (not self.app.pargs.phpredisadmin)): self.app.pargs.web = True self.app.pargs.admin = True if self.app.pargs.all: self.app.pargs.web = True self.app.pargs.admin = True if (WOVariables.wo_platform_codename == 'trusty' or WOVariables.wo_platform_codename == 'xenial' or WOVariables.wo_platform_codename == 'bionic'): self.app.pargs.php72 = True if self.app.pargs.web: self.app.pargs.nginx = True self.app.pargs.php = True self.app.pargs.mysql = True self.app.pargs.wpcli = True if self.app.pargs.admin: self.app.pargs.adminer = True self.app.pargs.phpmyadmin = True self.app.pargs.utils = True if self.app.pargs.nginx: if WOAptGet.is_installed(self, 'nginx-custom'): Log.debug(self, "Removing apt_packages variable of Nginx") apt_packages = apt_packages + WOVariables.wo_nginx else: Log.error(self,"Cannot Remove! Nginx Stable version not found.") if self.app.pargs.php: Log.debug(self, "Removing apt_packages variable of PHP") if (WOVariables.wo_platform_codename == 'trusty' or WOVariables.wo_platform_codename == 'xenial' or WOVariables.wo_platform_codename == 'bionic'): apt_packages = apt_packages + WOVariables.wo_php72 if not WOAptGet.is_installed(self, 'php7.2-fpm'): apt_packages = apt_packages + WOVariables.wo_php_extra else: apt_packages = apt_packages + WOVariables.wo_php #PHP7.0 for debian(jessie+) if self.app.pargs.php72: if (WOVariables.wo_platform_codename == 'jessie'): Log.debug(self, "Removing apt_packages variable of PHP 7.0") apt_packages = apt_packages + WOVariables.wo_php72 if not WOAptGet.is_installed(self, 'php5-fpm'): apt_packages = apt_packages + WOVariables.wo_php_extra else: Log.info(self,"PHP 7.0 not supported.") if self.app.pargs.php72: if (WOVariables.wo_platform_codename == 'trusty' or WOVariables.wo_platform_codename == 'xenial' or WOVariables.wo_platform_codename == 'bionic'): Log.debug(self, "Removing apt_packages variable of PHP 7.0") apt_packages = apt_packages + WOVariables.wo_php72 if not WOAptGet.is_installed(self, 'php5.6-fpm'): apt_packages = apt_packages + WOVariables.wo_php_extra else: Log.info(self,"PHP 7.0 not supported.") if self.app.pargs.hhvm: if WOAptGet.is_installed(self, 'hhvm'): Log.debug(self, "Removing apt_packages variable of HHVM") apt_packages = apt_packages + WOVariables.wo_hhvm if self.app.pargs.redis: Log.debug(self, "Remove apt_packages variable of Redis") apt_packages = apt_packages + WOVariables.wo_redis if self.app.pargs.mysql: Log.debug(self, "Removing apt_packages variable of MySQL") apt_packages = apt_packages + WOVariables.wo_mysql packages = packages + ['/usr/bin/mysqltuner'] if self.app.pargs.wpcli: Log.debug(self, "Removing package variable of WPCLI ") if os.path.isfile('/usr/bin/wp'): packages = packages + ['/usr/bin/wp'] else: Log.warn(self, "WP-CLI is not installed with WordOps") if self.app.pargs.phpmyadmin: Log.debug(self, "Removing package variable of phpMyAdmin ") packages = packages + ['{0}22222/htdocs/db/pma' .format(WOVariables.wo_webroot)] if self.app.pargs.phpredisadmin: Log.debug(self, "Removing package variable of phpRedisAdmin ") packages = packages + ['{0}22222/htdocs/cache/redis/phpRedisAdmin' .format(WOVariables.wo_webroot)] if self.app.pargs.adminer: Log.debug(self, "Removing package variable of Adminer ") packages = packages + ['{0}22222/htdocs/db/adminer' .format(WOVariables.wo_webroot)] if self.app.pargs.utils: Log.debug(self, "Removing package variable of utils ") packages = packages + ['{0}22222/htdocs/php/webgrind/' .format(WOVariables.wo_webroot), '{0}22222/htdocs/cache/opcache' .format(WOVariables.wo_webroot), '{0}22222/htdocs/cache/nginx/' 'clean.php'.format(WOVariables.wo_webroot), '{0}22222/htdocs/cache/memcache' .format(WOVariables.wo_webroot), '/usr/bin/pt-query-advisor', '{0}22222/htdocs/db/anemometer' .format(WOVariables.wo_webroot)] if len(packages) or len(apt_packages): wo_prompt = input('Are you sure you to want to' ' remove from server.' '\nPackage configuration will remain' ' on server after this operation.\n' 'Any answer other than ' '"yes" will be stop this' ' operation : ') if wo_prompt == 'YES' or wo_prompt == 'yes': if (set(["nginx-custom"]).issubset(set(apt_packages))) : WOService.stop_service(self, 'nginx') if len(packages): WOFileUtils.remove(self, packages) WOAptGet.auto_remove(self) if len(apt_packages): Log.debug(self, "Removing apt_packages") Log.info(self, "Removing packages, please wait...") WOAptGet.remove(self, apt_packages) WOAptGet.auto_remove(self) Log.info(self, "Successfully removed packages") #Added for Ondrej Repo missing package Fix if self.app.pargs.php72: if WOAptGet.is_installed(self, 'php5.6-fpm'): Log.info(self, "PHP5.6-fpm found on system.") Log.info(self, "Verifying and installing missing packages,") WOShellExec.cmd_exec(self, "apt-get install -y php-memcached php-igbinary") @expose(help="Purge packages") def purge(self): """Start purging of packages""" apt_packages = [] packages = [] # Default action for stack purge if ((not self.app.pargs.web) and (not self.app.pargs.admin) and (not self.app.pargs.nginx) and (not self.app.pargs.php) and (not self.app.pargs.php7) and (not self.app.pargs.mysql) and (not self.app.pargs.wpcli) and (not self.app.pargs.phpmyadmin) and (not self.app.pargs.hhvm) and (not self.app.pargs.adminer) and (not self.app.pargs.utils) and (not self.app.pargs.all) and (not self.app.pargs.redis) and (not self.app.pargs.phpredisadmin)): self.app.pargs.web = True self.app.pargs.admin = True if self.app.pargs.all: self.app.pargs.web = True self.app.pargs.admin = True if (WOVariables.wo_platform_codename == 'trusty' or WOVariables.wo_platform_codename == 'xenial' or WOVariables.wo_platform_codename == 'bionic'): self.app.pargs.php7 = True if self.app.pargs.web: self.app.pargs.nginx = True self.app.pargs.php = True self.app.pargs.mysql = True self.app.pargs.wpcli = True if self.app.pargs.admin: self.app.pargs.adminer = True self.app.pargs.phpmyadmin = True self.app.pargs.utils = True if self.app.pargs.nginx: if WOAptGet.is_installed(self, 'nginx-custom'): Log.debug(self, "Purge apt_packages variable of Nginx") apt_packages = apt_packages + WOVariables.wo_nginx else: Log.error(self,"Cannot Purge! Nginx Stable version not found.") if self.app.pargs.php: Log.debug(self, "Purge apt_packages variable PHP") if (WOVariables.wo_platform_codename == 'trusty' or WOVariables.wo_platform_codename == 'xenial' or WOVariables.wo_platform_codename == 'bionic'): apt_packages = apt_packages + WOVariables.wo_php_extra else: apt_packages = apt_packages + WOVariables.wo_php72 #For debian --php7 if self.app.pargs.php72: if (WOVariables.wo_platform_codename == 'jessie'): Log.debug(self, "Removing apt_packages variable of PHP 7.0") apt_packages = apt_packages + WOVariables.wo_php72 if not WOAptGet.is_installed(self, 'php5-fpm'): apt_packages = apt_packages + WOVariables.wo_php_extra else: Log.info(self,"PHP 7.2 not supported.") if self.app.pargs.php72: if (WOVariables.wo_platform_codename == 'trusty' or WOVariables.wo_platform_codename == 'xenial' or WOVariables.wo_platform_codename == 'bionic'): Log.debug(self, "Removing apt_packages variable of PHP 7.0") apt_packages = apt_packages + WOVariables.wo_php72 if not WOAptGet.is_installed(self, 'php5.6-fpm'): apt_packages = apt_packages + WOVariables.wo_php_extra else: Log.info(self,"PHP 7.2 not supported.") if self.app.pargs.hhvm: if WOAptGet.is_installed(self, 'hhvm'): Log.debug(self, "Purge apt_packages varible of HHVM") apt_packages = apt_packages + WOVariables.wo_hhvm if self.app.pargs.redis: Log.debug(self, "Purge apt_packages variable of Redis") apt_packages = apt_packages + WOVariables.wo_redis if self.app.pargs.mysql: Log.debug(self, "Purge apt_packages variable MySQL") apt_packages = apt_packages + WOVariables.wo_mysql packages = packages + ['/usr/bin/mysqltuner'] if self.app.pargs.wpcli: Log.debug(self, "Purge package variable WPCLI") if os.path.isfile('/usr/bin/wp'): packages = packages + ['/usr/bin/wp'] else: Log.warn(self, "WP-CLI is not installed with WordOps") if self.app.pargs.phpmyadmin: packages = packages + ['{0}22222/htdocs/db/pma'. format(WOVariables.wo_webroot)] Log.debug(self, "Purge package variable phpMyAdmin") if self.app.pargs.phpredisadmin: Log.debug(self, "Removing package variable of phpRedisAdmin ") packages = packages + ['{0}22222/htdocs/cache/redis/phpRedisAdmin' .format(WOVariables.wo_webroot)] if self.app.pargs.adminer: Log.debug(self, "Purge package variable Adminer") packages = packages + ['{0}22222/htdocs/db/adminer' .format(WOVariables.wo_webroot)] if self.app.pargs.utils: Log.debug(self, "Purge package variable utils") packages = packages + ['{0}22222/htdocs/php/webgrind/' .format(WOVariables.wo_webroot), '{0}22222/htdocs/cache/opcache' .format(WOVariables.wo_webroot), '{0}22222/htdocs/cache/nginx/' 'clean.php'.format(WOVariables.wo_webroot), '{0}22222/htdocs/cache/memcache' .format(WOVariables.wo_webroot), '/usr/bin/pt-query-advisor', '{0}22222/htdocs/db/anemometer' .format(WOVariables.wo_webroot) ] if len(packages) or len(apt_packages): wo_prompt = input('Are you sure you to want to purge ' 'from server ' 'along with their configuration' ' packages,\nAny answer other than ' '"yes" will be stop this ' 'operation :') if wo_prompt == 'YES' or wo_prompt == 'yes': if (set(["nginx-custom"]).issubset(set(apt_packages))) : WOService.stop_service(self, 'nginx') if len(apt_packages): Log.info(self, "Purging packages, please wait...") WOAptGet.remove(self, apt_packages, purge=True) WOAptGet.auto_remove(self) if len(packages): WOFileUtils.remove(self, packages) WOAptGet.auto_remove(self) Log.info(self, "Successfully purged packages") #Added for php Ondrej repo missing package fix if self.app.pargs.php72: if WOAptGet.is_installed(self, 'php5.6-fpm'): Log.info(self, "PHP5.6-fpm found on system.") Log.info(self, "Verifying and installing missing packages,") WOShellExec.cmd_exec(self, "apt-get install -y php-memcached php-igbinary") def load(app): # register the plugin class.. this only happens if the plugin is enabled handler.register(WOStackController) handler.register(WOStackStatusController) handler.register(WOStackMigrateController) handler.register(WOStackUpgradeController) # register a hook (function) to run after arguments are parsed. hook.register('post_argument_parsing', wo_stack_hook)
55.400529
271
0.467027
10,323
104,707
4.646421
0.064904
0.066945
0.037778
0.05529
0.817992
0.776983
0.738497
0.70908
0.67691
0.634734
0
0.027619
0.418367
104,707
1,890
272
55.400529
0.759971
0.019664
0
0.631062
0
0.003069
0.251604
0.095244
0
0
0.000527
0
0
1
0.004911
false
0.012277
0.020258
0
0.02701
0.001842
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
c748ee3363bc89e6bf07b6638056bff521f79e66
403
py
Python
ginger/forms/formatters.py
vivsh/django-ginger
d293109becc72845a23f2aeb732ed808a7a67d69
[ "MIT" ]
null
null
null
ginger/forms/formatters.py
vivsh/django-ginger
d293109becc72845a23f2aeb732ed808a7a67d69
[ "MIT" ]
null
null
null
ginger/forms/formatters.py
vivsh/django-ginger
d293109becc72845a23f2aeb732ed808a7a67d69
[ "MIT" ]
null
null
null
from collections import defaultdict registry = defaultdict(dict) def register(model_class, **kwargs): return class Column(object): pass class Second(object): def format(self, value, **context): return class Dollar(object): def format(self): pass class Delegate(object): def format(self, value, name, row): return class Generic(object): pass
12.59375
39
0.655087
47
403
5.595745
0.531915
0.125475
0.171103
0.21673
0.18251
0
0
0
0
0
0
0
0.245658
403
32
40
12.59375
0.865132
0
0
0.352941
0
0
0
0
0
0
0
0
0
1
0.235294
false
0.176471
0.058824
0.176471
0.764706
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
5
c77cb4315e3c831e4a020c880d2124d326286d5e
6,220
py
Python
tests/metarl/tf/models/test_cnn_model.py
icml2020submission6857/metarl
9b66cefa2b6bcb6a38096d629ce8853b47c7171d
[ "MIT" ]
2
2020-03-15T14:35:15.000Z
2021-02-15T16:38:00.000Z
tests/metarl/tf/models/test_cnn_model.py
icml2020submission6857/metarl
9b66cefa2b6bcb6a38096d629ce8853b47c7171d
[ "MIT" ]
null
null
null
tests/metarl/tf/models/test_cnn_model.py
icml2020submission6857/metarl
9b66cefa2b6bcb6a38096d629ce8853b47c7171d
[ "MIT" ]
1
2020-02-24T03:04:23.000Z
2020-02-24T03:04:23.000Z
import pickle import numpy as np import pytest import tensorflow as tf from metarl.tf.models import CNNModel from metarl.tf.models import CNNModelWithMaxPooling from tests.fixtures import TfGraphTestCase class TestCNNModel(TfGraphTestCase): def setup_method(self): super().setup_method() self.batch_size = 5 self.input_width = 10 self.input_height = 10 self.obs_input = np.ones((self.batch_size, self.input_width, self.input_height, 3)) input_shape = self.obs_input.shape[1:] # height, width, channel self._input_ph = tf.compat.v1.placeholder( tf.float32, shape=(None, ) + input_shape, name='input') # yapf: disable @pytest.mark.parametrize('filter_sizes, in_channels, out_channels, ' 'strides', [ ((1,), (3,), (32,), (1,)), # noqa: E122 ((3,), (3,), (32,), (1,)), ((3,), (3,), (32,), (2,)), ((1, 1), (3, 32), (32, 64), (1, 1)), ((3, 3), (3, 32), (32, 64), (1, 1)), ((3, 3), (3, 32), (32, 64), (2, 2)), ]) # yapf: enable def test_output_value(self, filter_sizes, in_channels, out_channels, strides): model = CNNModel( filter_dims=filter_sizes, num_filters=out_channels, strides=strides, name='cnn_model', padding='VALID', hidden_w_init=tf.constant_initializer(1), hidden_nonlinearity=None) outputs = model.build(self._input_ph) output = self.sess.run( outputs, feed_dict={self._input_ph: self.obs_input}) filter_sum = 1 # filter value after 3 layers of conv for filter_size, in_channel in zip(filter_sizes, in_channels): filter_sum *= filter_size * filter_size * in_channel current_size = self.input_width for filter_size, stride in zip(filter_sizes, strides): current_size = int((current_size - filter_size) / stride) + 1 flatten_shape = current_size * current_size * out_channels[-1] # flatten expected_output = np.full((self.batch_size, flatten_shape), filter_sum, dtype=np.float32) assert np.array_equal(output, expected_output) # yapf: disable @pytest.mark.parametrize('filter_sizes, in_channels, out_channels, ' 'strides, pool_strides, pool_shapes', [ ((1,), (3,), (32,), (1,), (1, 1), (1, 1)), # noqa: E122 ((3,), (3,), (32,), (1,), (2, 2), (1, 1)), ((3,), (3,), (32,), (1,), (1, 1), (2, 2)), ((3,), (3,), (32,), (1,), (2, 2), (2, 2)), ((3,), (3,), (32,), (2,), (1, 1), (2, 2)), ((3,), (3,), (32,), (2,), (2, 2), (2, 2)), ((1, 1), (3, 32), (32, 64), (1, 1), (1, 1), (1, 1)), ((3, 3), (3, 32), (32, 64), (1, 1), (1, 1), (1, 1)), ((3, 3), (3, 32), (32, 64), (2, 2), (1, 1), (1, 1)), ]) # yapf: enable def test_output_value_max_pooling(self, filter_sizes, in_channels, out_channels, strides, pool_strides, pool_shapes): model = CNNModelWithMaxPooling( filter_dims=filter_sizes, num_filters=out_channels, strides=strides, name='cnn_model', padding='VALID', pool_strides=pool_strides, pool_shapes=pool_shapes, hidden_w_init=tf.constant_initializer(1), hidden_nonlinearity=None) outputs = model.build(self._input_ph) output = self.sess.run( outputs, feed_dict={self._input_ph: self.obs_input}) filter_sum = 1 # filter value after 3 layers of conv for filter_size, in_channel in zip(filter_sizes, in_channels): filter_sum *= filter_size * filter_size * in_channel current_size = self.input_width for filter_size, stride in zip(filter_sizes, strides): current_size = int((current_size - filter_size) / stride) + 1 current_size = int( (current_size - pool_shapes[0]) / pool_strides[0]) + 1 flatten_shape = current_size * current_size * out_channels[-1] # flatten expected_output = np.full((self.batch_size, flatten_shape), filter_sum, dtype=np.float32) assert np.array_equal(output, expected_output) # yapf: disable @pytest.mark.parametrize('filter_sizes, in_channels, out_channels, ' 'strides', [ ((1, ), (3, ), (32, ), (1, )), # noqa: E122 ((3, ), (3, ), (32, ), (1, )), ((3, ), (3, ), (32, ), (2, )), ((1, 1), (3, 32), (32, 64), (1, 1)), ((3, 3), (3, 32), (32, 64), (1, 1)), ((3, 3), (3, 32), (32, 64), (2, 2)), ]) # yapf: enable def test_is_pickleable(self, filter_sizes, in_channels, out_channels, strides): model = CNNModel( filter_dims=filter_sizes, num_filters=out_channels, strides=strides, name='cnn_model', padding='VALID', hidden_w_init=tf.constant_initializer(1), hidden_nonlinearity=None) outputs = model.build(self._input_ph) with tf.compat.v1.variable_scope('cnn_model/cnn/h0', reuse=True): bias = tf.compat.v1.get_variable('bias') bias.load(tf.ones_like(bias).eval()) output1 = self.sess.run( outputs, feed_dict={self._input_ph: self.obs_input}) h = pickle.dumps(model) with tf.compat.v1.Session(graph=tf.Graph()) as sess: model_pickled = pickle.loads(h) input_shape = self.obs_input.shape[1:] # height, width, channel input_ph = tf.compat.v1.placeholder( tf.float32, shape=(None, ) + input_shape, name='input') outputs = model_pickled.build(input_ph) output2 = sess.run(outputs, feed_dict={input_ph: self.obs_input}) assert np.array_equal(output1, output2)
39.617834
77
0.53328
757
6,220
4.165126
0.163804
0.017761
0.01903
0.019981
0.790993
0.741199
0.724707
0.715192
0.713923
0.712972
0
0.05752
0.318006
6,220
156
78
39.871795
0.685761
0.039711
0
0.626984
0
0
0.040792
0
0
0
0
0
0.02381
1
0.031746
false
0
0.055556
0
0.095238
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
c78f7cf855f6694576552a3e0d900f2e7fd893aa
106
py
Python
monitor/monitor/__init__.py
michix99/IubhUpMonitor
bdd12df3c0def7537bbf4a30a66f116af9f4cb2d
[ "MIT" ]
null
null
null
monitor/monitor/__init__.py
michix99/IubhUpMonitor
bdd12df3c0def7537bbf4a30a66f116af9f4cb2d
[ "MIT" ]
null
null
null
monitor/monitor/__init__.py
michix99/IubhUpMonitor
bdd12df3c0def7537bbf4a30a66f116af9f4cb2d
[ "MIT" ]
null
null
null
from .Utils import create_json, read_json from .Website import Website from .monitor_class import Monitor
26.5
41
0.839623
16
106
5.375
0.5625
0
0
0
0
0
0
0
0
0
0
0
0.122642
106
3
42
35.333333
0.924731
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
c7c9a08fe4309fc0b12a8e32ffe7a6fab0dcafd4
90
py
Python
exercicios/__init__.py
RafaelGomides/VemPython
88e5b82c007a5c5a18ffd3a369444709a99d6a3c
[ "Unlicense" ]
null
null
null
exercicios/__init__.py
RafaelGomides/VemPython
88e5b82c007a5c5a18ffd3a369444709a99d6a3c
[ "Unlicense" ]
null
null
null
exercicios/__init__.py
RafaelGomides/VemPython
88e5b82c007a5c5a18ffd3a369444709a99d6a3c
[ "Unlicense" ]
null
null
null
# Projeto: VemPython/__init__.py # Autor: rafael # Data: 19/03/18 - 16:06 # Objetivo: TODO
22.5
32
0.7
14
90
4.214286
1
0
0
0
0
0
0
0
0
0
0
0.12987
0.144444
90
4
33
22.5
0.636364
0.911111
0
null
0
null
0
0
null
0
0
0.25
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
1
0
0
0
1
0
0
0
0
0
0
5
c7d0152e01234acc5d8aa99e3513234f722f452c
19
py
Python
posthog/version.py
lharress/posthog
73809d54b14ffc1b6ad6f600e0e4f06ab3090cb1
[ "MIT" ]
null
null
null
posthog/version.py
lharress/posthog
73809d54b14ffc1b6ad6f600e0e4f06ab3090cb1
[ "MIT" ]
null
null
null
posthog/version.py
lharress/posthog
73809d54b14ffc1b6ad6f600e0e4f06ab3090cb1
[ "MIT" ]
null
null
null
VERSION = "1.30.0"
9.5
18
0.578947
4
19
2.75
1
0
0
0
0
0
0
0
0
0
0
0.25
0.157895
19
1
19
19
0.4375
0
0
0
0
0
0.315789
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
c7f279d16c578c60ff43bb92f7e26683b5d5e405
1,474
py
Python
vg/test_cross.py
lace/vx
33134cae43d7729b6128b198119e1593035066ae
[ "BSD-2-Clause" ]
100
2019-01-18T05:08:34.000Z
2022-03-24T09:59:11.000Z
vg/test_cross.py
lace/vg
bece5191756b43378e882fd1fdf0ffa45a06e467
[ "BSD-2-Clause" ]
153
2018-11-16T17:44:28.000Z
2022-03-10T23:33:50.000Z
vg/test_cross.py
lace/vx
33134cae43d7729b6128b198119e1593035066ae
[ "BSD-2-Clause" ]
14
2019-05-17T15:05:52.000Z
2022-03-09T08:42:53.000Z
import numpy as np import pytest import vg def test_cross(): v1 = np.array([1.0, 2.0, 3.0]) v2 = np.array([4.0, 5.0, 6.0]) expected = np.array([-3.0, 6.0, -3.0]) np.testing.assert_array_almost_equal(vg.cross(v1, v2), expected) np.testing.assert_array_almost_equal(vg.cross(v2, v1), -expected) def test_cross_stacked(): v1 = np.array([[1.0, 0.0, -1.0], [1.0, 2.0, 3.0]]) v2 = np.array([[2.0, 2.0, 2.0], [4.0, 5.0, 6.0]]) expected = np.array([[2.0, -4.0, 2.0], [-3.0, 6.0, -3.0]]) np.testing.assert_array_almost_equal(vg.cross(v1, v2), expected) np.testing.assert_array_almost_equal(vg.cross(v2, v1), -expected) def test_cross_mixed(): v1 = np.array([[1.0, 0.0, -1.0], [1.0, 2.0, 3.0]]) v2 = np.array([4.0, 5.0, 6.0]) expected = np.array([[5.0, -10.0, 5.0], [-3.0, 6.0, -3.0]]) np.testing.assert_array_almost_equal(vg.cross(v1, v2), expected) np.testing.assert_array_almost_equal(vg.cross(v2, v1), -expected) def test_cross_error(): v1 = np.array([[1.0, 0.0, -1.0], [1.0, 2.0, 3.0]]) v2 = np.array([[4.0, 5.0, 6.0]]) with pytest.raises( ValueError, match="v2 must be an array with shape \\(2, 3\\); got \\(1, 3\\)" ): vg.cross(v1, v2) v1 = np.array([[1.0, 0.0, -1.0], [1.0, 2.0, 3.0]]) v2 = np.array([[[4.0, 5.0, 6.0]]]) with pytest.raises( ValueError, match="Not sure what to do with 2 dimensions and 3 dimensions" ): vg.cross(v1, v2)
28.901961
85
0.569878
289
1,474
2.820069
0.152249
0.111656
0.03681
0.029448
0.784049
0.770552
0.770552
0.770552
0.770552
0.744785
0
0.130619
0.200136
1,474
50
86
29.48
0.560645
0
0
0.529412
0
0
0.075305
0
0
0
0
0
0.176471
1
0.117647
false
0
0.088235
0
0.205882
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
1bf0990e8a7afa26612eb4f8b749a1dfc9a4adf9
43
py
Python
tests/__init__.py
kwang1971/stockwatching
b7fab92505ec0804004322022c89287985e8d375
[ "MIT" ]
null
null
null
tests/__init__.py
kwang1971/stockwatching
b7fab92505ec0804004322022c89287985e8d375
[ "MIT" ]
null
null
null
tests/__init__.py
kwang1971/stockwatching
b7fab92505ec0804004322022c89287985e8d375
[ "MIT" ]
null
null
null
"""Unit test package for stockwatching."""
21.5
42
0.72093
5
43
6.2
1
0
0
0
0
0
0
0
0
0
0
0
0.116279
43
1
43
43
0.815789
0.837209
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
4000202843e285527500d074fb79077946690da0
46
py
Python
tests/plans/reloadplan/ready.py
buzztroll/cloudinit.d
9fcc7c489b646e76f2b7360c0d807dae88d9cc0f
[ "Apache-2.0" ]
null
null
null
tests/plans/reloadplan/ready.py
buzztroll/cloudinit.d
9fcc7c489b646e76f2b7360c0d807dae88d9cc0f
[ "Apache-2.0" ]
null
null
null
tests/plans/reloadplan/ready.py
buzztroll/cloudinit.d
9fcc7c489b646e76f2b7360c0d807dae88d9cc0f
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python import sys sys.exit(0)
9.2
21
0.695652
9
46
3.555556
0.888889
0
0
0
0
0
0
0
0
0
0
0.025
0.130435
46
4
22
11.5
0.775
0.434783
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
4005d1015406a13c65033cb9afb694758b3abbed
94
py
Python
source/Calculator/Array_Sort.py
afarahi/XTRA
6550b216264abaa3ed705835aca0981f2934e069
[ "MIT" ]
2
2018-11-01T12:38:56.000Z
2019-10-22T07:02:54.000Z
source/Calculator/Array_Sort.py
afarahi/XTRA
6550b216264abaa3ed705835aca0981f2934e069
[ "MIT" ]
null
null
null
source/Calculator/Array_Sort.py
afarahi/XTRA
6550b216264abaa3ed705835aca0981f2934e069
[ "MIT" ]
null
null
null
def Sorting_array_data(A,i): B = sorted(A, key=lambda a_entry: a_entry[i]) return B
18.8
50
0.659574
18
94
3.222222
0.666667
0.206897
0
0
0
0
0
0
0
0
0
0
0.212766
94
4
51
23.5
0.783784
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
4009ced76f8d594eda33f26bb9738e059c9389e4
79
py
Python
donatello/__init__.py
welchbj/donatello
a64e3998a46d8b5d54d3ab759435ae4124dfff93
[ "MIT" ]
2
2019-09-10T14:28:02.000Z
2020-05-29T04:37:01.000Z
donatello/__init__.py
welchbj/donatello
a64e3998a46d8b5d54d3ab759435ae4124dfff93
[ "MIT" ]
null
null
null
donatello/__init__.py
welchbj/donatello
a64e3998a46d8b5d54d3ab759435ae4124dfff93
[ "MIT" ]
null
null
null
from .encode import ( # noqa encode_x86_32) # TODO: all the other exports
19.75
29
0.696203
12
79
4.416667
0.916667
0
0
0
0
0
0
0
0
0
0
0.065574
0.227848
79
3
30
26.333333
0.803279
0.405063
0
0
0
0
0
0
0
0
0
0.333333
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
1
0
1
0
0
0
0
5
4047f0f6914fe2017a7907eebda8594514da8969
196
py
Python
se_layers/__init__.py
ioanvl/1d_squeeze_excitation
f422dc4b8e7de6239a6fb7d1688048db5053e733
[ "MIT" ]
null
null
null
se_layers/__init__.py
ioanvl/1d_squeeze_excitation
f422dc4b8e7de6239a6fb7d1688048db5053e733
[ "MIT" ]
null
null
null
se_layers/__init__.py
ioanvl/1d_squeeze_excitation
f422dc4b8e7de6239a6fb7d1688048db5053e733
[ "MIT" ]
null
null
null
from .se_layers import ChannelSELayer1d, SpatialSELayer1d, ChannelSpatialSELayer1d __all__ = ['ChannelSELayer1d', 'SpatialSELayer1d', 'ChannelSpatialSELayer1d', ]
32.666667
82
0.693878
11
196
11.909091
0.727273
0.48855
0.839695
0
0
0
0
0
0
0
0
0.039735
0.229592
196
6
83
32.666667
0.827815
0
0
0
0
0
0.279188
0.116751
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0
1
0
1
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
404dae880cd4fbaec4855067bde103cecea4b93c
193
py
Python
gdf_tests/__main__.py
dunkgray/gdf
7b39f0c90cf63d501b36ea9d754269616d79e0d4
[ "Apache-2.0" ]
7
2015-08-27T09:20:55.000Z
2019-06-27T14:00:11.000Z
gdf_tests/__main__.py
alex-ip/gdf
7b39f0c90cf63d501b36ea9d754269616d79e0d4
[ "Apache-2.0" ]
null
null
null
gdf_tests/__main__.py
alex-ip/gdf
7b39f0c90cf63d501b36ea9d754269616d79e0d4
[ "Apache-2.0" ]
5
2015-05-13T05:58:13.000Z
2019-12-09T00:36:11.000Z
import test_arguments import test_config_file import test_database import test_gdf # Run all tests test_arguments.main() test_config_file.main() test_database.main() test_gdf.main()
17.545455
24
0.792746
29
193
4.931034
0.37931
0.27972
0.195804
0
0
0
0
0
0
0
0
0
0.139896
193
10
25
19.3
0.861446
0.067358
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
40dd784373debb56d7c9c93e837a17e735d14148
3,137
py
Python
data/train/python/40dd784373debb56d7c9c93e837a17e735d14148menu.py
harshp8l/deep-learning-lang-detection
2a54293181c1c2b1a2b840ddee4d4d80177efb33
[ "MIT" ]
84
2017-10-25T15:49:21.000Z
2021-11-28T21:25:54.000Z
data/train/python/40dd784373debb56d7c9c93e837a17e735d14148menu.py
vassalos/deep-learning-lang-detection
cbb00b3e81bed3a64553f9c6aa6138b2511e544e
[ "MIT" ]
5
2018-03-29T11:50:46.000Z
2021-04-26T13:33:18.000Z
data/train/python/40dd784373debb56d7c9c93e837a17e735d14148menu.py
vassalos/deep-learning-lang-detection
cbb00b3e81bed3a64553f9c6aa6138b2511e544e
[ "MIT" ]
24
2017-11-22T08:31:00.000Z
2022-03-27T01:22:31.000Z
response.title = settings.title response.subtitle = settings.subtitle response.meta.author = '%(author)s <%(author_email)s>' % settings response.meta.keywords = settings.keywords response.meta.description = settings.description response.menu = [ (T('Home'),URL('default','index')==URL(),URL('default','index'),[]), (T('Contracts'),URL('default','contract')==URL(),URL('default','contract'),[]), (T('About'),URL('default','about')==URL(),URL('default','about'),[]), (T('Contact'),URL('default','contact')==URL(),URL('default','contact'),[]), #(T('Building'),URL('default','building_manage')==URL(),URL('default','building_manage'),[]), #(T('Floor'),URL('default','floor_manage')==URL(),URL('default','floor_manage'),[]), #(T('Apartment'),URL('default','apartment_manage')==URL(),URL('default','apartment_manage'),[]), #(T('Apartment Type'),URL('default','apartment_type_manage')==URL(),URL('default','apartment_type_manage'),[]), #(T('User Info'),URL('default','user_info_manage')==URL(),URL('default','user_info_manage'),[]), #(T('Contract'),URL('default','contract_manage')==URL(),URL('default','contract_manage'),[]), #(T('Semester'),URL('default','semester_manage')==URL(),URL('default','semester_manage'),[]), #(T('Parking'),URL('default','parking_manage')==URL(),URL('default','parking_manage'),[]), #(T('Request'),URL('default','request_manage')==URL(),URL('default','request_manage'),[]), #(T('Request Comments'),URL('default','request_comments_manage')==URL(),URL('default','request_comments_manage'),[]), #(T('Request Type'),URL('default','request_type_manage')==URL(),URL('default','request_type_manage'),[]), #(T('Room'),URL('default','room_manage')==URL(),URL('default','room_manage'),[]), #(T('Room Type'),URL('default','room_type_manage')==URL(),URL('default','room_type_manage'),[]), #(T('T Building'),URL('default','t_building_manage')==URL(),URL('default','t_building_manage'),[]), #(T('T Floor'),URL('default','t_floor_manage')==URL(),URL('default','t_floor_manage'),[]), #(T('T Apartment'),URL('default','t_apartment_manage')==URL(),URL('default','t_apartment_manage'),[]), #(T('T Apartment Type'),URL('default','t_apartment_type_manage')==URL(),URL('default','t_apartment_type_manage'),[]), #(T('T User Info'),URL('default','t_user_info_manage')==URL(),URL('default','t_user_info_manage'),[]), #(T('T Contract'),URL('default','t_contract_manage')==URL(),URL('default','t_contract_manage'),[]), #(T('T Semester'),URL('default','t_semester_manage')==URL(),URL('default','t_semester_manage'),[]), #(T('T Parking'),URL('default','t_parking_manage')==URL(),URL('default','t_parking_manage'),[]), #(T('T Request'),URL('default','t_request_manage')==URL(),URL('default','t_request_manage'),[]), #(T('T Request Comments'),URL('default','t_request_comments_manage')==URL(),URL('default','t_request_comments_manage'),[]), #(T('T Request Type'),URL('default','t_request_type_manage')==URL(),URL('default','t_request_type_manage'),[]), #(T('T Room'),URL('default','t_room_manage')==URL(),URL('default','t_room_manage'),[]), #(T('T Room Type'),URL('default','t_room_type_manage')==URL(),URL('default','t_room_type_manage'),[]), ]
82.552632
123
0.670704
420
3,137
4.77381
0.07619
0.299252
0.194514
0.246384
0.666334
0.261845
0
0
0
0
0
0
0.023908
3,137
37
124
84.783784
0.654801
0.810328
0
0
0
0
0.281195
0
0
0
0
0
0
1
0
true
0
0
0
0
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
40e3b2eab8cfb39301a6a0dce5d869a6cd8807ed
9,566
py
Python
tests/test_sifter_sqlite.py
Mikea1985/orbit_cheby
cbc743fffa0ec6415ea17149c7768cd5fbe567f8
[ "MIT" ]
null
null
null
tests/test_sifter_sqlite.py
Mikea1985/orbit_cheby
cbc743fffa0ec6415ea17149c7768cd5fbe567f8
[ "MIT" ]
12
2020-08-05T18:11:34.000Z
2021-07-16T18:25:07.000Z
tests/test_sifter_sqlite.py
Mikea1985/orbit_cheby
cbc743fffa0ec6415ea17149c7768cd5fbe567f8
[ "MIT" ]
1
2021-06-23T15:41:32.000Z
2021-06-23T15:41:32.000Z
# -*- coding: utf-8 -*- # cheby_checker/tests/test_sqlite ''' -------------------------------------------------------------- tests of sifter's sqlite functions Jan 2020 Matt Payne & Mike Alexandersen -------------------------------------------------------------- ''' # Import third-party packages # -------------------------------------------------------------- import sys import os import pytest # Import neighboring packages # -------------------------------------------------------------- sys.path.append(os.path.dirname(os.path.dirname( os.path.realpath(__file__)))) from cheby_checker import sifter_sql as sql # Convenience data / functions to aid testing # -------------------------------------------------------------- test_tracklet_dict_list = [] for i in range(4): test_tracklet_dict_list.append({'JD': 123 + i, 'HP': 456 + i, 'tracklet_name': 'kjhdfasdf' + str(i), 'asd': 'fgh', 'ghfgh': 987 } ) # Actual tests ... # -------------------------------------------------------------- def test_db_creation(): '''Test that an empty database can be created.''' # Where do we want the db to live assert 'sifter' in sql.fetch_db_filepath() if os.path.isfile(sql.fetch_db_filepath()): os.remove(sql.fetch_db_filepath()) # Does a db get created conn = sql.create_connection(sql.fetch_db_filepath()) assert os.path.isfile(os.path.join(sql.fetch_db_filepath())) def test_table_creation(): '''Test table creation.''' expected_table_name = 'tracklets' # set up db & table if os.path.isfile(sql.fetch_db_filepath()): os.remove(sql.fetch_db_filepath()) conn = sql.create_connection(sql.fetch_db_filepath()) cur = conn.cursor() # Create the table sql.create_specific_table(conn) # - get the count of tables with the name cur.execute('SELECT name from sqlite_master WHERE type = "table" AND name = "tracklets"') # - if the count is 1, then table exists assert len(cur.fetchone()) == 1, 'table does not exist' # Delete the db to facilitate future testing os.remove(sql.fetch_db_filepath()) @pytest.mark.parametrize('tracklet_dict_list', [test_tracklet_dict_list]) def test_tracklet_upsert(tracklet_dict_list): '''Test tracklet upsertion into the database.''' # set up db & table if os.path.isfile(sql.fetch_db_filepath()): os.remove(sql.fetch_db_filepath()) conn = sql.create_connection(sql.fetch_db_filepath()) cur = conn.cursor() sql.create_specific_table(conn) # create some data and then upload it ... tracklet_dict = tracklet_dict_list[0] sql.upsert_tracklet(conn, tracklet_dict['JD'], tracklet_dict['HP'], tracklet_dict['tracklet_name'], tracklet_dict) # test that the data was actually uploaded cur.execute('SELECT * from tracklets') f = cur.fetchone() assert (len(f) > 3 and f[3] == tracklet_dict['tracklet_name']),\ 'data not uploaded' # Delete the db to facilitate future testing os.remove(sql.fetch_db_filepath()) @pytest.mark.parametrize(('tracklet_dict_list'), [test_tracklet_dict_list]) def test_tracklets_upsert(tracklet_dict_list): """ Here we are updating/inserting **lists** of tracklet data """ # set up db & table if os.path.isfile(sql.fetch_db_filepath()): os.remove(sql.fetch_db_filepath()) conn = sql.create_connection(sql.fetch_db_filepath()) cur = conn.cursor() sql.create_specific_table(conn) # upload data ... JD = [tracklet_dic['JD'] for tracklet_dic in tracklet_dict_list] HP = [tracklet_dic['HP'] for tracklet_dic in tracklet_dict_list] tracklet_name = [tracklet_dic['tracklet_name'] for tracklet_dic in tracklet_dict_list] sql.upsert_tracklets(conn, JD, HP, tracklet_name, tracklet_dict_list) # test that the data was actually uploaded cur.execute('SELECT * from tracklets') f = cur.fetchall() assert(len(f) == len(tracklet_dict_list)), 'data not uploaded' for ii, fi in enumerate(f): assert fi[3] == tracklet_name[ii], 'data not uploaded' # Delete the db to facilitate future testing os.remove(sql.fetch_db_filepath()) @pytest.mark.parametrize(('tracklet_dict_list'), [test_tracklet_dict_list]) def test_tracklet_query(tracklet_dict_list): '''Test querying a tracklet.''' # set up db & table if os.path.isfile(sql.fetch_db_filepath()): os.remove(sql.fetch_db_filepath()) conn = sql.create_connection(sql.fetch_db_filepath()) cur = conn.cursor() sql.create_specific_table(conn) # upload data ... JD = [tracklet_dic['JD'] for tracklet_dic in tracklet_dict_list] HP = [tracklet_dic['HP'] for tracklet_dic in tracklet_dict_list] tracklet_name = [tracklet_dic['tracklet_name'] for tracklet_dic in tracklet_dict_list] sql.upsert_tracklets(conn, JD, HP, tracklet_name, tracklet_dict_list) # query the data & check that requisite dictionaries are returned list_of_tuples = sql.query_tracklets_jdhp(conn, JD[0], HP[0]) assert isinstance(list_of_tuples, list) and len(list_of_tuples) == 1 # Delete the db to facilitate future testing os.remove(sql.fetch_db_filepath()) @pytest.mark.parametrize(('tracklet_dict_list'), [test_tracklet_dict_list]) def test_tracklet_query_mutiple_HP(tracklet_dict_list): '''Test querying multiple Heal Pix.''' # set up db & table if os.path.isfile(sql.fetch_db_filepath()): os.remove(sql.fetch_db_filepath()) conn = sql.create_connection(sql.fetch_db_filepath()) cur = conn.cursor() sql.create_specific_table(conn) # upload data ... JD = [tracklet_dic['JD'] for tracklet_dic in tracklet_dict_list] HP = [tracklet_dic['HP'] for tracklet_dic in tracklet_dict_list] tracklet_name = [tracklet_dic['tracklet_name'] for tracklet_dic in tracklet_dict_list] sql.upsert_tracklets(conn, JD, HP, tracklet_name, tracklet_dict_list) # query the data & check that requisite dictionaries are returned list_of_tuples = sql.query_tracklets_jd_hplist(conn, JD[0], HP) assert isinstance(list_of_tuples, list) and len(list_of_tuples) == 1 # Delete the db to facilitate future testing os.remove(sql.fetch_db_filepath()) @pytest.mark.parametrize(('tracklet_dict_list'), [test_tracklet_dict_list]) def test_delete_tracklet(tracklet_dict_list): '''Test deletion of a tracklet.''' # set up db & table if os.path.isfile(sql.fetch_db_filepath()): os.remove(sql.fetch_db_filepath()) conn = sql.create_connection(sql.fetch_db_filepath()) cur = conn.cursor() sql.create_specific_table(conn) # upload data ... JD = [tracklet_dic['JD'] for tracklet_dic in tracklet_dict_list] HP = [tracklet_dic['HP'] for tracklet_dic in tracklet_dict_list] tracklet_name = [tracklet_dic['tracklet_name'] for tracklet_dic in tracklet_dict_list] sql.upsert_tracklets(conn, JD, HP, tracklet_name, tracklet_dict_list) # query the data & check that required # of dictionaries are returned list_of_tuples = sql.query_tracklets_jdhp(conn, JD[0], HP[0]) assert isinstance(list_of_tuples, list) and len(list_of_tuples) == 1 # now delete a tracklet & check that one less dictionary is subsequently returned sql.delete_tracklet(conn, tracklet_dict_list[0]['tracklet_name']) list_of_tuples = sql.query_tracklets_jdhp(conn, JD[0], HP[0]) assert isinstance(list_of_tuples, list) and len(list_of_tuples) == 0 # Delete the db to facilitate future testing os.remove(sql.fetch_db_filepath()) @pytest.mark.parametrize(('tracklet_dict_list'), [test_tracklet_dict_list]) def test_delete_tracklets(tracklet_dict_list): '''Test deleting multiple tracklets.''' # set up db & table if os.path.isfile(sql.fetch_db_filepath()): os.remove(sql.fetch_db_filepath()) conn = sql.create_connection(sql.fetch_db_filepath()) cur = conn.cursor() sql.create_specific_table(conn) # upload data ... JD = [tracklet_dic['JD'] for tracklet_dic in tracklet_dict_list] HP = [tracklet_dic['HP'] for tracklet_dic in tracklet_dict_list] tracklet_name = [tracklet_dic['tracklet_name'] for tracklet_dic in tracklet_dict_list] sql.upsert_tracklets(conn, JD, HP, tracklet_name, tracklet_dict_list) # query the data & check that required # of dictionaries are returned list_of_tuples = sql.query_tracklets_jdhp(conn, JD[0], HP[0]) assert isinstance(list_of_tuples, list) and len(list_of_tuples) == 1 list_of_tuples = sql.query_tracklets_jdhp(conn, JD[1], HP[1]) assert isinstance(list_of_tuples, list) and len(list_of_tuples) == 1 # now delete two tracklets & check that two fewer dictionaries remain sql.delete_tracklets(conn, [tracklet_dict_list[0]['tracklet_name'], tracklet_dict_list[1]['tracklet_name']]) list_of_tuples = sql.query_tracklets_jdhp(conn, JD[0], HP[0]) assert isinstance(list_of_tuples, list) and len(list_of_tuples) == 0 list_of_tuples = sql.query_tracklets_jdhp(conn, JD[1], HP[1]) assert isinstance(list_of_tuples, list) and len(list_of_tuples) == 0 # Delete the db to facilitate future testing os.remove(sql.fetch_db_filepath()) # End of file.
37.960317
93
0.660882
1,293
9,566
4.62181
0.12761
0.10241
0.120482
0.099398
0.756024
0.729585
0.729585
0.720549
0.713688
0.713688
0
0.00611
0.195902
9,566
251
94
38.111554
0.770801
0.221305
0
0.671642
0
0
0.068886
0
0
0
0
0
0.104478
1
0.059701
false
0
0.029851
0
0.089552
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
905b0d4d395d245d250fe27d245afa6ff376a1dd
603
py
Python
tests/test_mppcommand.py
Inqbus/mpp-solar
899c98785f8f58a431c04162eb653c41d043fca0
[ "MIT" ]
null
null
null
tests/test_mppcommand.py
Inqbus/mpp-solar
899c98785f8f58a431c04162eb653c41d043fca0
[ "MIT" ]
null
null
null
tests/test_mppcommand.py
Inqbus/mpp-solar
899c98785f8f58a431c04162eb653c41d043fca0
[ "MIT" ]
null
null
null
import unittest from mppsolar import mppcommand from builtins import bytes class test_mppcommand(unittest.TestCase): def test_crc(self): """ Test crc function generates correct crc """ self.assertListEqual(mppcommand.crc(bytes('QPIGS', 'utf-8')), [183, 169]) self.assertListEqual(mppcommand.crc(bytes('QPIRI', 'utf-8')), [248, 84]) self.assertListEqual(mppcommand.crc(bytes('PSDV56.4', 'utf-8')), [249, 224]) self.assertListEqual(mppcommand.crc(bytes('186', 'utf-8')), [41, 60]) self.assertListEqual(mppcommand.crc(bytes('196', 'utf-8')), [27, 14])
43.071429
84
0.668325
76
603
5.276316
0.460526
0.236908
0.361596
0.399002
0.461347
0
0
0
0
0
0
0.076923
0.159204
603
13
85
46.384615
0.714004
0.064677
0
0
1
0
0.08813
0
0
0
0
0
0.5
1
0.1
false
0
0.3
0
0.5
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
5
90a623d1b6652c78cad17cba8175991a2e33a2dc
410
py
Python
visualization.py
hynekdav/semi-supervised-VOS
6b29baef2e4fd018502fb434e978e8e924fb84b1
[ "MIT" ]
null
null
null
visualization.py
hynekdav/semi-supervised-VOS
6b29baef2e4fd018502fb434e978e8e924fb84b1
[ "MIT" ]
2
2022-01-13T03:45:31.000Z
2022-03-12T00:57:40.000Z
visualization.py
hynekdav/semi-supervised-VOS
6b29baef2e4fd018502fb434e978e8e924fb84b1
[ "MIT" ]
null
null
null
# -*- encoding: utf-8 -*- # ! python3 import click from src.visualization.overlay import overlay_command from src.visualization.prediction_only import prediction_only_command from src.visualization.side_by_side import side_by_side_command @click.group(name='cli') def cli(): pass cli.add_command(overlay_command) cli.add_command(side_by_side_command) cli.add_command(prediction_only_command) cli()
18.636364
69
0.804878
59
410
5.288136
0.355932
0.067308
0.192308
0.173077
0
0
0
0
0
0
0
0.005435
0.102439
410
21
70
19.52381
0.842391
0.080488
0
0
0
0
0.008021
0
0
0
0
0
0
1
0.090909
true
0.090909
0.363636
0
0.454545
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
0
0
0
5
90ad7d17c02755a881530ed8328a383053d6373b
123
py
Python
bbs_langtypes/admin.py
TakeshiOkamoto/mpp_bbs_dj
0a91c9bf3e1450dc6d062f0beeba263faa33b1e3
[ "MIT" ]
null
null
null
bbs_langtypes/admin.py
TakeshiOkamoto/mpp_bbs_dj
0a91c9bf3e1450dc6d062f0beeba263faa33b1e3
[ "MIT" ]
null
null
null
bbs_langtypes/admin.py
TakeshiOkamoto/mpp_bbs_dj
0a91c9bf3e1450dc6d062f0beeba263faa33b1e3
[ "MIT" ]
null
null
null
from django.contrib import admin # Register your models here. from .models import LangType admin.site.register(LangType)
17.571429
32
0.804878
17
123
5.823529
0.647059
0
0
0
0
0
0
0
0
0
0
0
0.130081
123
6
33
20.5
0.925234
0.211382
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
90bee37ed35ad40264d484a02761d2ea60b709da
485
py
Python
src/core/schemas/token.py
ConnectAPI/Gateway
95fc283ff8c730353709ebe14f613e74821c45a5
[ "MIT" ]
null
null
null
src/core/schemas/token.py
ConnectAPI/Gateway
95fc283ff8c730353709ebe14f613e74821c45a5
[ "MIT" ]
null
null
null
src/core/schemas/token.py
ConnectAPI/Gateway
95fc283ff8c730353709ebe14f613e74821c45a5
[ "MIT" ]
null
null
null
from typing import List from secrets import token_urlsafe from datetime import datetime from pydantic import BaseModel, Field class NewTokenForm(BaseModel): scopes: List[str] = Field(default_factory=list) class Token(BaseModel): tid: str = Field(default_factory=lambda: token_urlsafe(15)) refresh_token: str = Field(default_factory=lambda: token_urlsafe(20)) scopes: List[str] created_at: datetime = Field(default_factory=datetime.now) active: bool = True
26.944444
73
0.762887
64
485
5.640625
0.4375
0.132964
0.210526
0.182825
0.221607
0.221607
0.221607
0
0
0
0
0.009732
0.152577
485
17
74
28.529412
0.868613
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
90f4efb037a06d47b28cd5de6df852d474d488eb
52
py
Python
icarus/__init__.py
lybroman/Icarus
cc57d68b28a62dd02656ebd339aef534dc429fa8
[ "MIT" ]
null
null
null
icarus/__init__.py
lybroman/Icarus
cc57d68b28a62dd02656ebd339aef534dc429fa8
[ "MIT" ]
null
null
null
icarus/__init__.py
lybroman/Icarus
cc57d68b28a62dd02656ebd339aef534dc429fa8
[ "MIT" ]
null
null
null
from app import Icarus from suitcase import Suitcase
26
29
0.865385
8
52
5.625
0.625
0
0
0
0
0
0
0
0
0
0
0
0.134615
52
2
29
26
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
90ff24ac7aa19ba91936b10177874ae9e3b74247
214
py
Python
utilities/uuid.py
merretbuurman/rapydo-utils
54cdbf2f39f262f50057656e04d47f19c5b56b11
[ "MIT" ]
3
2017-06-01T17:18:29.000Z
2019-04-16T21:46:04.000Z
utilities/uuid.py
merretbuurman/rapydo-utils
54cdbf2f39f262f50057656e04d47f19c5b56b11
[ "MIT" ]
20
2017-07-13T07:59:13.000Z
2019-10-02T12:49:09.000Z
utilities/uuid.py
merretbuurman/rapydo-utils
54cdbf2f39f262f50057656e04d47f19c5b56b11
[ "MIT" ]
3
2018-05-30T13:46:24.000Z
2019-08-22T13:51:03.000Z
# -*- coding: utf-8 -*- """ Handling IDs in a more secure way """ import uuid def getUUID(): return str(uuid.uuid4()) def getUUIDfromString(string): return str(uuid.uuid5(uuid.NAMESPACE_URL, string))
13.375
54
0.663551
29
214
4.862069
0.758621
0.12766
0.184397
0
0
0
0
0
0
0
0
0.017143
0.182243
214
15
55
14.266667
0.788571
0.261682
0
0
0
0
0
0
0
0
0
0
0
1
0.4
false
0
0.2
0.4
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
291c44e8c0ca8267aaae56febe5643f38ad17ab3
2,814
py
Python
dagster_toolkit/tests/test_environment_dict.py
ib-da-ncirl/dagster_toolkit
fbe5eec1ab3b36198dd2c4bc441792d6c7a54d76
[ "MIT" ]
null
null
null
dagster_toolkit/tests/test_environment_dict.py
ib-da-ncirl/dagster_toolkit
fbe5eec1ab3b36198dd2c4bc441792d6c7a54d76
[ "MIT" ]
null
null
null
dagster_toolkit/tests/test_environment_dict.py
ib-da-ncirl/dagster_toolkit
fbe5eec1ab3b36198dd2c4bc441792d6c7a54d76
[ "MIT" ]
null
null
null
from unittest import TestCase from environ import EnvironmentDict class TestEnvironmentDict(TestCase): def test_add_solid(self): environ = EnvironmentDict() environ.add_solid('new_solid') self.assertTrue('solids' in environ._e_dict.keys()) self.assertTrue('new_solid' in environ._e_dict['solids'].keys()) def test_add_solid_input(self): environ = EnvironmentDict() environ.add_solid_input('new_solid', 'solid_ip', 'ip_value') self.assertTrue('solids' in environ._e_dict.keys()) self.assertTrue('new_solid' in environ._e_dict['solids'].keys()) self.assertTrue('inputs' in environ._e_dict['solids']['new_solid'].keys()) self.assertTrue('solid_ip' in environ._e_dict['solids']['new_solid']['inputs'].keys()) self.assertTrue('value' in environ._e_dict['solids']['new_solid']['inputs']['solid_ip'].keys()) self.assertEqual(environ._e_dict['solids']['new_solid']['inputs']['solid_ip']['value'], 'ip_value') def test_add_composite_solid(self): environ = EnvironmentDict() environ.add_composite_solid('new_solid', 'child_solid') self.assertTrue('solids' in environ._e_dict.keys()) self.assertTrue('new_solid' in environ._e_dict['solids'].keys()) self.assertTrue('solids' in environ._e_dict['solids']['new_solid'].keys()) self.assertTrue('child_solid' in environ._e_dict['solids']['new_solid']['solids'].keys()) def test_add_composite_solid_input(self): environ = EnvironmentDict() environ.add_composite_solid_input('new_solid', 'child_solid', 'solid_ip', 'ip_value') self.assertTrue('solids' in environ._e_dict.keys()) self.assertTrue('new_solid' in environ._e_dict['solids'].keys()) self.assertTrue('solids' in environ._e_dict['solids']['new_solid'].keys()) self.assertTrue('child_solid' in environ._e_dict['solids']['new_solid']['solids'].keys()) self.assertTrue('inputs' in environ._e_dict['solids']['new_solid']['solids']['child_solid'].keys()) self.assertTrue('solid_ip' in environ._e_dict['solids']['new_solid']['solids']['child_solid']['inputs'].keys()) self.assertTrue('value' in environ._e_dict['solids']['new_solid']['solids']['child_solid']['inputs']['solid_ip'].keys()) self.assertEqual(environ._e_dict['solids']['new_solid']['solids']['child_solid']['inputs']['solid_ip']['value'], 'ip_value') def test_add_resource(self): environ = EnvironmentDict() environ.add_resource('new_resource', 'resource_value') self.assertTrue('resources' in environ._e_dict.keys()) self.assertTrue('new_resource' in environ._e_dict['resources'].keys()) self.assertEqual(environ._e_dict['resources']['new_resource'], 'resource_value')
52.111111
132
0.679815
357
2,814
5.039216
0.081232
0.102279
0.153419
0.155642
0.850472
0.809339
0.760978
0.674263
0.654808
0.654808
0
0
0.145345
2,814
53
133
53.09434
0.748025
0
0
0.414634
0
0
0.245558
0
0
0
0
0
0.560976
1
0.121951
false
0
0.04878
0
0.195122
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
5
29360fe7f58b1c9e51134c4e7c95d9de924beafb
43
py
Python
papermill_report/__main__.py
ariadnext/papermill_report
624526a32319842caf8414b50a4d0691312c1784
[ "BSD-3-Clause" ]
13
2020-12-04T09:34:56.000Z
2022-03-14T15:31:40.000Z
papermill_report/__main__.py
ariadnext/papermill_report
624526a32319842caf8414b50a4d0691312c1784
[ "BSD-3-Clause" ]
null
null
null
papermill_report/__main__.py
ariadnext/papermill_report
624526a32319842caf8414b50a4d0691312c1784
[ "BSD-3-Clause" ]
null
null
null
from .papermill_report import main main()
10.75
34
0.790698
6
43
5.5
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.139535
43
3
35
14.333333
0.891892
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
2977dc368e847330f9f668e95e8ba52b94333999
46
py
Python
je_editor/ui/ui_event/auto_save/__init__.py
JE-Chen/je_editor
2f18dedb6f0eb27c38668dc53f520739c8d5c6c6
[ "MIT" ]
1
2021-12-10T14:57:15.000Z
2021-12-10T14:57:15.000Z
je_editor/ui/ui_event/auto_save/__init__.py
JE-Chen/je_editor
2f18dedb6f0eb27c38668dc53f520739c8d5c6c6
[ "MIT" ]
null
null
null
je_editor/ui/ui_event/auto_save/__init__.py
JE-Chen/je_editor
2f18dedb6f0eb27c38668dc53f520739c8d5c6c6
[ "MIT" ]
null
null
null
from je_editor.ui.ui_event.auto_save import *
23
45
0.826087
9
46
3.888889
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.086957
46
1
46
46
0.833333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
297f7ca6f350ccef31b5714b73fbfc22d1788b64
59
py
Python
action/__init__.py
akyruu/blender-cartography-addon
4f34b029d9b6a72619227ab3ceaed9393506934e
[ "Apache-2.0" ]
null
null
null
action/__init__.py
akyruu/blender-cartography-addon
4f34b029d9b6a72619227ab3ceaed9393506934e
[ "Apache-2.0" ]
null
null
null
action/__init__.py
akyruu/blender-cartography-addon
4f34b029d9b6a72619227ab3ceaed9393506934e
[ "Apache-2.0" ]
null
null
null
from . import calculate_coordinates, generate_blender_file
29.5
58
0.881356
7
59
7
1
0
0
0
0
0
0
0
0
0
0
0
0.084746
59
1
59
59
0.907407
0
0
0
1
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
465046cb6336f51ec637d1e733b96d84cfc73ec5
8,259
py
Python
tests/infrastructure/sqlalechemy/repositories/test_base_repository.py
digitalservice4germany/erica
7e07d88f3db78ab6e4f7cccad8dfef2a4b3a71b2
[ "MIT" ]
3
2022-01-31T15:17:17.000Z
2022-03-01T16:15:47.000Z
tests/infrastructure/sqlalechemy/repositories/test_base_repository.py
digitalservice4germany/erica
7e07d88f3db78ab6e4f7cccad8dfef2a4b3a71b2
[ "MIT" ]
59
2022-01-31T14:04:20.000Z
2022-03-31T20:08:47.000Z
tests/infrastructure/sqlalechemy/repositories/test_base_repository.py
digitalservice4germany/erica
7e07d88f3db78ab6e4f7cccad8dfef2a4b3a71b2
[ "MIT" ]
1
2022-03-10T09:24:28.000Z
2022-03-10T09:24:28.000Z
from abc import ABC from unittest.mock import MagicMock, call from uuid import uuid4, UUID import pytest from erica.domain.repositories.base_repository_interface import BaseRepositoryInterface from erica.infrastructure.sqlalchemy.repositories.base_repository import BaseRepository, EntityNotFoundError from tests.infrastructure.sqlalechemy.repositories.mock_repositories import MockDomainModel, MockSchema class MockBaseRepository( BaseRepository[MockDomainModel, MockSchema], BaseRepositoryInterface, ABC ): def __init__(self, db_connection): super().__init__(db_connection) self.DatabaseEntity = MockSchema self.DomainModel = MockDomainModel class TestBaseRepositoryCreate: def test_if_entity_of_type_domain_model_as_input_then_entity_with_correct_data_in_database(self,transactional_session_with_mock_schema): repository = MockBaseRepository(db_connection=transactional_session_with_mock_schema) repository.create(MockDomainModel(payload={'endboss': 'Melkor'})) assert len(transactional_session_with_mock_schema.query(MockSchema).all()) == 1 assert isinstance(transactional_session_with_mock_schema.query(MockSchema).all()[0], MockSchema) def test_if_entity_of_type_domain_model_as_input_then_entity_of_schema_type_is_in_database(self,transactional_session_with_mock_schema): repository = MockBaseRepository(db_connection=transactional_session_with_mock_schema) repository.create(MockDomainModel(payload={'endboss': 'Melkor'})) assert isinstance(transactional_session_with_mock_schema.query(MockSchema).all()[0], MockSchema) def test_if_entity_of_type_domain_model_as_input_then_return_schema_type(self, transactional_session_with_mock_schema): repository = MockBaseRepository(db_connection=transactional_session_with_mock_schema) returned_value = repository.create(MockDomainModel(payload={'endboss': 'Melkor'})) assert isinstance(returned_value, MockDomainModel) class TestBaseRepositoryGet: def test_if_entity_of_type_domain_model_as_input_then_return_list_with_schema_repr_of_entities(self, transactional_session_with_mock_schema): mock_object = MockDomainModel(payload={'endboss': 'Melkor'}) list_of_schema_object = [MockSchema(**mock_object.dict()), MockSchema(**mock_object.dict()), MockSchema(**mock_object.dict())] transactional_session_with_mock_schema.add(list_of_schema_object[0]) transactional_session_with_mock_schema.add(list_of_schema_object[1]) transactional_session_with_mock_schema.add(list_of_schema_object[2]) transactional_session_with_mock_schema.commit() found_entities = MockBaseRepository(db_connection=transactional_session_with_mock_schema).get() assert found_entities == list_of_schema_object def test_if_table_is_empty_then_return_empty_list(self, transactional_session_with_mock_schema): found_entities = MockBaseRepository(db_connection=transactional_session_with_mock_schema).get() assert found_entities == [] class TestBaseRepositoryGetById: def test_if_entity_in_database_then_return_domain_representation(self, transactional_session_with_mock_schema): mock_object = MockDomainModel(payload={'endboss': 'Melkor'}) schema_object = MockSchema(**mock_object.dict()) transactional_session_with_mock_schema.add(schema_object) transactional_session_with_mock_schema.commit() found_entity = MockBaseRepository(db_connection=transactional_session_with_mock_schema).get_by_id(schema_object.id) assert found_entity == mock_object def test_if_entity_not_in_database_then_raise_exception(self, transactional_session_with_mock_schema): mock_object = MockDomainModel(payload={'endboss': 'Melkor'}) mock_object.request_id = uuid4() schema_object = MockSchema(**mock_object.dict()) with pytest.raises(EntityNotFoundError): MockBaseRepository(db_connection=transactional_session_with_mock_schema).get_by_id(schema_object.id) class TestBaseRepositoryUpdate: def test_if_entity_in_database_then_return_updated_domain_representation(self, transactional_session_with_mock_schema): mock_object = MockDomainModel(payload={'endboss': 'Melkor'}) schema_object = MockSchema(**mock_object.dict()) transactional_session_with_mock_schema.add(schema_object) transactional_session_with_mock_schema.commit() updated_object = MockDomainModel(payload={'endboss': 'Sauron'}) updated_entity = MockBaseRepository(db_connection=transactional_session_with_mock_schema).update(schema_object.id, updated_object) assert updated_entity == updated_object def test_if_entity_in_database_then_update_in_database(self, transactional_session_with_mock_schema): mock_object = MockDomainModel(payload={'endboss': 'Melkor'}) schema_object = MockSchema(**mock_object.dict()) transactional_session_with_mock_schema.add(schema_object) transactional_session_with_mock_schema.commit() updated_object = MockDomainModel(payload={'endboss': 'Sauron'}) MockBaseRepository(db_connection=transactional_session_with_mock_schema).update(schema_object.id, updated_object) updated_entry_in_db = transactional_session_with_mock_schema.query(MockSchema).filter(MockSchema.id == schema_object.id).first() assert updated_entry_in_db.id == schema_object.id assert updated_entry_in_db.payload == {'endboss': 'Sauron'} def test_if_entity_not_in_database_then_raise_error(self, transactional_session_with_mock_schema): mock_object = MockDomainModel(payload={'endboss': 'Melkor'}) schema_object = MockSchema(**mock_object.dict()) updated_object = MockDomainModel(payload={'endboss': 'Sauron'}) with pytest.raises(EntityNotFoundError): MockBaseRepository(db_connection=transactional_session_with_mock_schema).update(schema_object.id, updated_object) @pytest.mark.freeze_uuids def test_if_only_request_id_changed_then_only_call_update_with_changed_attributes(self, transactional_session_with_mock_schema): mock_object = MockDomainModel(payload={'endboss': 'Melkor'}) schema_object = MockSchema(**mock_object.dict()) transactional_session_with_mock_schema.add(schema_object) transactional_session_with_mock_schema.commit() updated_object = MockDomainModel(request_id=uuid4(), payload={'endboss': 'Melkor'}) # We need a mock object to be able to intercept the call to the update function repo = MockBaseRepository(db_connection=transactional_session_with_mock_schema) update_mock = MagicMock() mocked_get_by_id = MagicMock(side_effect=lambda request_id: MagicMock( first=MagicMock(return_value=MockBaseRepository(db_connection=transactional_session_with_mock_schema)._get_by_id(request_id).first()), update=update_mock)) repo._get_by_id = mocked_get_by_id repo.update(schema_object.id, updated_object) assert update_mock.mock_calls == [call({'request_id': UUID('00000000-0000-0000-0000-000000000000')})] class TestBaseRepositoryDelete: def test_if_entity_in_database_then_delete_from_database(self, transactional_session_with_mock_schema): mock_object = MockDomainModel(payload={'endboss': 'Melkor'}) schema_object = MockSchema(**mock_object.dict()) transactional_session_with_mock_schema.add(schema_object) transactional_session_with_mock_schema.commit() MockBaseRepository(db_connection=transactional_session_with_mock_schema).delete(schema_object.id) assert len(transactional_session_with_mock_schema.query(MockSchema).all()) == 0 def test_if_entity_not_in_database_then_raise_error(self, transactional_session_with_mock_schema): mock_object = MockDomainModel(payload={'endboss': 'Melkor'}) schema_object = MockSchema(**mock_object.dict()) with pytest.raises(EntityNotFoundError): MockBaseRepository(db_connection=transactional_session_with_mock_schema).delete(schema_object.id)
49.753012
146
0.782056
955
8,259
6.277487
0.129843
0.153461
0.184153
0.214846
0.758966
0.745955
0.729608
0.696747
0.665555
0.645538
0
0.005901
0.138273
8,259
165
147
50.054545
0.836448
0.009323
0
0.441441
0
0
0.032645
0.004402
0
0
0
0
0.108108
1
0.126126
false
0
0.063063
0
0.243243
0
0
0
0
null
0
1
1
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
4664dc735f138fa97cdbb7b044ec0b44f3a80e27
90
py
Python
setup_files/tests/unitary_tests.py
AnthoOzier/ktest
de9273575986a2b578bdb894703f397e5d7749c7
[ "MIT" ]
null
null
null
setup_files/tests/unitary_tests.py
AnthoOzier/ktest
de9273575986a2b578bdb894703f397e5d7749c7
[ "MIT" ]
null
null
null
setup_files/tests/unitary_tests.py
AnthoOzier/ktest
de9273575986a2b578bdb894703f397e5d7749c7
[ "MIT" ]
null
null
null
import unittest from ktest import Tester class TestTester(unittest.TestCase): test
12.857143
36
0.777778
11
90
6.363636
0.818182
0
0
0
0
0
0
0
0
0
0
0
0.177778
90
6
37
15
0.945946
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
467a7428a4820184c04fed21707e80a485235a4b
2,782
py
Python
tests/test_sorting_reversing.py
grantps/superhelp
d8e861bf1ad91571ac23b9c833a8cd461bb1952f
[ "MIT" ]
27
2020-05-17T20:48:43.000Z
2022-01-08T21:32:30.000Z
tests/test_sorting_reversing.py
grantps/superhelp
d8e861bf1ad91571ac23b9c833a8cd461bb1952f
[ "MIT" ]
null
null
null
tests/test_sorting_reversing.py
grantps/superhelp
d8e861bf1ad91571ac23b9c833a8cd461bb1952f
[ "MIT" ]
null
null
null
from textwrap import dedent from tests import check_as_expected ROOT = 'superhelp.helpers.sorting_reversing_help.' def test_misc(): test_conf = [ ( dedent("""\ pet = 'cat' """), { ROOT + 'sorting_reversing_overview': 0, ROOT + 'list_sort_as_value': 0, } ), ( dedent("""\ pets = sorted(['cat', 'dog', 'budgie']) """), { ROOT + 'sorting_reversing_overview': 1, ROOT + 'list_sort_as_value': 0, } ), ( dedent("""\ my_pets = sorted(['cat', 'dog', 'budgie']) your_pets = sorted(['cat', 'dog', 'budgie']) """), { ROOT + 'sorting_reversing_overview': 2, ROOT + 'list_sort_as_value': 0, } ), ( dedent("""\ my_pets = reversed(['cat', 'dog', 'budgie']) your_pets = sorted(['cat', 'dog', 'budgie']) """), { ROOT + 'sorting_reversing_overview': 2, ROOT + 'list_sort_as_value': 0, } ), ( dedent("""\ for i in range(2): my_pets = reversed(['cat', 'dog', 'budgie']) your_pets = sorted(['cat', 'dog', 'budgie']) """), { ROOT + 'sorting_reversing_overview': 1, ROOT + 'list_sort_as_value': 0, } ), ( dedent("""\ demo = [1, 2].sort() """), { ROOT + 'sorting_reversing_overview': 1, ROOT + 'list_sort_as_value': 1, } ), ( dedent("""\ for i in range(2): demo = [1, 2].sort() """), { ROOT + 'sorting_reversing_overview': 1, ROOT + 'list_sort_as_value': 1, } ), ( dedent("""\ my_pets = reversed(['cat', 'dog', 'budgie']) for i in range(2): demo = [1, 2].sort() """), { ROOT + 'sorting_reversing_overview': 2, ROOT + 'list_sort_as_value': 1, } ), ( dedent("""\ hours, mins, secs = Utils._get_time_parts_since_t1(t1) """), { ROOT + 'sorting_reversing_overview': 0, ROOT + 'list_sort_as_value': 0, } ), ] check_as_expected(test_conf, execute_code=True) check_as_expected(test_conf, execute_code=False) # test_misc()
27.27451
66
0.387491
233
2,782
4.317597
0.223176
0.159046
0.178926
0.250497
0.82008
0.814115
0.802187
0.695825
0.695825
0.689861
0
0.019741
0.471963
2,782
101
67
27.544554
0.665078
0.003954
0
0.59375
0
0
0.473456
0.111593
0
0
0
0
0
1
0.010417
false
0
0.020833
0
0.03125
0
0
0
0
null
0
0
1
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
4686335ba402d0083d7da29190b97e3ce3edc1fa
169
py
Python
allauth/socialaccount/providers/doximity/urls.py
mina-gaid/scp-stock-forcasting
38e1cd303d4728a987df117f666ce194e241ed1a
[ "MIT" ]
1
2018-04-06T21:36:59.000Z
2018-04-06T21:36:59.000Z
allauth/socialaccount/providers/doximity/urls.py
mina-gaid/scp-stock-forcasting
38e1cd303d4728a987df117f666ce194e241ed1a
[ "MIT" ]
6
2020-06-05T18:44:19.000Z
2022-01-13T00:48:56.000Z
allauth/socialaccount/providers/doximity/urls.py
mina-gaid/scp-stock-forcasting
38e1cd303d4728a987df117f666ce194e241ed1a
[ "MIT" ]
null
null
null
from allauth.socialaccount.providers.oauth2.urls import default_urlpatterns from .provider import DoximityProvider urlpatterns = default_urlpatterns(DoximityProvider)
28.166667
75
0.87574
17
169
8.588235
0.647059
0.246575
0
0
0
0
0
0
0
0
0
0.00641
0.076923
169
5
76
33.8
0.929487
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
46aaeb7c961aca1f90d800327c55dcad41831464
96
py
Python
services/postgres/shared_functions/run_migrations.py
buckmaxwell/jukebox
3b04fd1e38e9d3f57d86cf6695bcc3d8e8126681
[ "MIT" ]
1
2020-08-11T16:19:49.000Z
2020-08-11T16:19:49.000Z
services/postgres/shared_functions/run_migrations.py
buckmaxwell/jukebox
3b04fd1e38e9d3f57d86cf6695bcc3d8e8126681
[ "MIT" ]
39
2020-05-06T05:27:19.000Z
2022-02-27T08:35:36.000Z
services/postgres/shared_functions/run_migrations.py
buckmaxwell/jukebox
3b04fd1e38e9d3f57d86cf6695bcc3d8e8126681
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 from migrator import run_migrations run_migrations("shared_functions")
16
35
0.8125
13
96
5.769231
0.846154
0.346667
0
0
0
0
0
0
0
0
0
0.011494
0.09375
96
5
36
19.2
0.850575
0.21875
0
0
0
0
0.216216
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
46af8406c76913a5bf75c4e378946405e1b2dd88
63
py
Python
03/3 - The Great Escape.py
Surferlul/csc-python-solutions
bea99e5e1e344d17fb2cb29d8bcbc6b108e24cee
[ "MIT" ]
null
null
null
03/3 - The Great Escape.py
Surferlul/csc-python-solutions
bea99e5e1e344d17fb2cb29d8bcbc6b108e24cee
[ "MIT" ]
null
null
null
03/3 - The Great Escape.py
Surferlul/csc-python-solutions
bea99e5e1e344d17fb2cb29d8bcbc6b108e24cee
[ "MIT" ]
null
null
null
print("A double-quote's escaped using a backslash, e.g. \\\"")
31.5
62
0.666667
11
63
3.818182
0.909091
0
0
0
0
0
0
0
0
0
0
0
0.126984
63
1
63
63
0.763636
0
0
0
0
0
0.825397
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
314021fe8c6e889618a59529e0adf623c31c3083
113
py
Python
bnbphoneticparser/__init__.py
porimol/BengaliPhoneticParser
27ed428f92adc5b2662e26b8652d2b611f1ab33c
[ "MIT" ]
18
2018-04-06T17:10:37.000Z
2021-01-29T13:09:19.000Z
bnbphoneticparser/__init__.py
porimol/BengaliPhoneticParser
27ed428f92adc5b2662e26b8652d2b611f1ab33c
[ "MIT" ]
4
2018-04-07T05:59:41.000Z
2020-10-27T06:20:56.000Z
bnbphoneticparser/__init__.py
porimol/BengaliPhoneticParser
27ed428f92adc5b2662e26b8652d2b611f1ab33c
[ "MIT" ]
8
2018-04-06T17:03:23.000Z
2021-02-08T15:33:05.000Z
# coding=utf-8 from .bengalitobanglish import BengaliToBanglish from .banglishtobengali import BanglishToBengali
28.25
48
0.867257
11
113
8.909091
0.636364
0
0
0
0
0
0
0
0
0
0
0.009709
0.088496
113
3
49
37.666667
0.941748
0.106195
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
316a040544c141e22bcd9dda11ce231257174bd4
379
py
Python
two_fer.py
caiocampos/Python-Random-Stuff
5e9005e7dec776e9af0c407d063624d041d3c84c
[ "MIT" ]
null
null
null
two_fer.py
caiocampos/Python-Random-Stuff
5e9005e7dec776e9af0c407d063624d041d3c84c
[ "MIT" ]
null
null
null
two_fer.py
caiocampos/Python-Random-Stuff
5e9005e7dec776e9af0c407d063624d041d3c84c
[ "MIT" ]
null
null
null
def two_fer_va(name=None): if name is None: name = 'you' return f'One for {name}, one for me.' def two_fer_vb(name=None): return f'One for {"you" if name is None else name}, one for me.' def two_fer_vc(name=None): return f'One for {name or "you"}, one for me.' def two_fer_vd(name='you'): return f'One for {name}, one for me.' two_fer = two_fer_vd
25.266667
68
0.641161
75
379
3.093333
0.253333
0.206897
0.155172
0.224138
0.676724
0.659483
0.405172
0.275862
0.275862
0.275862
0
0
0.224274
379
15
69
25.266667
0.789116
0
0
0.181818
0
0
0.394737
0
0
0
0
0
0
1
0.363636
false
0
0
0.272727
0.727273
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
3175ed5ccd91a2f9a965b022fd6e977698b559e3
94
py
Python
api/models/__init__.py
p0lygun/astounding-arapaimas
f82dbb2ec75ab7d98da6a46a1276c12583048b3c
[ "MIT" ]
null
null
null
api/models/__init__.py
p0lygun/astounding-arapaimas
f82dbb2ec75ab7d98da6a46a1276c12583048b3c
[ "MIT" ]
19
2021-07-11T10:02:08.000Z
2021-07-20T14:58:29.000Z
api/models/__init__.py
p0lygun/astounding-arapaimas
f82dbb2ec75ab7d98da6a46a1276c12583048b3c
[ "MIT" ]
null
null
null
from api.models.game import Game # noqa: F401 from api.models.user import User # noqa: F401
31.333333
46
0.744681
16
94
4.375
0.5
0.2
0.371429
0
0
0
0
0
0
0
0
0.076923
0.170213
94
2
47
47
0.820513
0.223404
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
31884eca75e1a3469faae7b5ec3c052da83623ad
209
py
Python
TradzQAI/core/__init__.py
kkuette/AI_project
1f46cb2536b24cb3716250f1e9705daa76af4f60
[ "Apache-2.0" ]
164
2017-11-24T13:07:04.000Z
2022-03-10T04:54:46.000Z
TradzQAI/core/__init__.py
kkuette/AI_project
1f46cb2536b24cb3716250f1e9705daa76af4f60
[ "Apache-2.0" ]
21
2018-09-29T10:27:10.000Z
2019-06-12T07:01:58.000Z
TradzQAI/core/__init__.py
kkuette/AI_project
1f46cb2536b24cb3716250f1e9705daa76af4f60
[ "Apache-2.0" ]
49
2018-05-09T17:28:52.000Z
2022-02-27T04:50:45.000Z
from .environnement import Local_env from .environnement import Live_env from .worker import Local_Worker from .worker import Live_Worker from .session import Local_session from .session import Live_session
23.222222
36
0.84689
30
209
5.7
0.266667
0.192982
0.269006
0
0
0
0
0
0
0
0
0
0.124402
209
8
37
26.125
0.934426
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
318c474bee597309cbea1796dfd3e9c2886e6e75
173
py
Python
custom_components/local_luftdaten/__init__.py
Aulos/local_luftdaten
b7ea6bc60605579b50046c191788bba9e8135889
[ "MIT" ]
163
2020-08-01T12:19:46.000Z
2022-03-28T09:04:57.000Z
custom_components/local_luftdaten/__init__.py
Aulos/local_luftdaten
b7ea6bc60605579b50046c191788bba9e8135889
[ "MIT" ]
81
2020-08-04T00:28:46.000Z
2022-03-29T15:48:51.000Z
custom_components/local_luftdaten/__init__.py
Aulos/local_luftdaten
b7ea6bc60605579b50046c191788bba9e8135889
[ "MIT" ]
28
2020-08-02T12:02:24.000Z
2022-03-22T00:07:34.000Z
""" Support for local Luftdaten sensors. Copyright (c) 2019 Mario Villavecchia Licensed under MIT. All rights reserved. https://github.com/lichtteil/local_luftdaten/ """
17.3
45
0.768786
22
173
6
0.909091
0.212121
0
0
0
0
0
0
0
0
0
0.02649
0.127168
173
9
46
19.222222
0.847682
0.947977
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
319b9adbab5a4b6eb430e22e6d8084d79c8b32fa
1,132
py
Python
pythonScript/process/__init__.py
ulsdevteam/ccvgd-database
b9068d4d50ed9140a8a0d320363bf244a73fdb9f
[ "MIT" ]
1
2021-09-08T14:49:52.000Z
2021-09-08T14:49:52.000Z
pythonScript/process/__init__.py
ulsdevteam/ccvgd-database
b9068d4d50ed9140a8a0d320363bf244a73fdb9f
[ "MIT" ]
3
2020-10-22T20:02:46.000Z
2021-07-28T20:53:28.000Z
pythonScript/process/__init__.py
ulsdevteam/ccvgd-database
b9068d4d50ed9140a8a0d320363bf244a73fdb9f
[ "MIT" ]
null
null
null
from process.process_csv_Village_Information import create_csv_village_and_address, create_csv_city_county_province from process.process_Category import create_csv_category from process.process_csv_Economy_Yearly_Range import create_csv_economy_economyUnit from process.process_csv_Education_Yearly_Range import create_csv_education_educationUnit from process.process_csv_Natural_Disasters import create_csv_naturalDisasters from process.process_csv_Ethnic_Groups_Yearly_Range import create_csv_ethnic_ethnicUnit from process.process_csv_Last_Name import create_csv_lastName_lastNameCategory from process.process_csv_Familyplanning_Yearly_Range import create_csv_familyplanning_familyplanningUnit from process.process_csv_Military_Yearly_Range import create_csv_military_militaryUnit from process.process_csv_Natrual_Environment import create_csv_natrualenvironment_Unit from process.process_csv_Population_Yearly_Range import create_csv_population_populationUnit from process.process_csv_villagegeography import create_csv_villagegeography_Unit from process.process_csv_Gazetteer_Information import create_csv_gazetteerInformation
80.857143
115
0.940813
150
1,132
6.54
0.28
0.12844
0.238532
0.256881
0.20999
0
0
0
0
0
0
0
0.04682
1,132
14
116
80.857143
0.909175
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
31e4a493d5ac20fbe86f141b3a761e3c8813b9f1
34
py
Python
grafana_script/__init__.py
NETHINKS/cmdbToGrafana
056e872a26c3578562546aa94a7d71bd6e78d7a2
[ "MIT" ]
1
2019-12-17T14:45:57.000Z
2019-12-17T14:45:57.000Z
grafana_script/__init__.py
pkremser/cmdbToGrafana
056e872a26c3578562546aa94a7d71bd6e78d7a2
[ "MIT" ]
null
null
null
grafana_script/__init__.py
pkremser/cmdbToGrafana
056e872a26c3578562546aa94a7d71bd6e78d7a2
[ "MIT" ]
null
null
null
# pylint: disable=too-many-locals
17
33
0.764706
5
34
5.2
1
0
0
0
0
0
0
0
0
0
0
0
0.088235
34
1
34
34
0.83871
0.911765
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
73005427c76fcef869587323f0bd319ac9d917a8
45
py
Python
processor/__init__.py
SeongSuKim95/Re-ID-baseline
b145bba712492f7a93cd3771e007fa694b1c44b6
[ "MIT" ]
297
2021-03-26T14:29:47.000Z
2021-09-10T11:33:56.000Z
PASS_transreid/processor/__init__.py
CASIA-IVA-Lab/PASS-reID
46dc6d25f4396e35ac1a766ad2dcaa580beccf15
[ "Apache-2.0" ]
31
2019-06-13T02:03:22.000Z
2021-12-30T03:55:46.000Z
PASS_transreid/processor/__init__.py
CASIA-IVA-Lab/PASS-reID
46dc6d25f4396e35ac1a766ad2dcaa580beccf15
[ "Apache-2.0" ]
71
2019-06-17T01:10:08.000Z
2022-03-03T06:51:48.000Z
from .processor import do_train, do_inference
45
45
0.866667
7
45
5.285714
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.088889
45
1
45
45
0.902439
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
7301d5874a512e00fff36892577cdea81f0d9b9b
149
py
Python
imdb_rest/movies/__init__.py
ExtRemoSolutions/imdb-importer
bec5272c7f457fc7412ebd7051bd041dd1288d81
[ "MIT" ]
null
null
null
imdb_rest/movies/__init__.py
ExtRemoSolutions/imdb-importer
bec5272c7f457fc7412ebd7051bd041dd1288d81
[ "MIT" ]
null
null
null
imdb_rest/movies/__init__.py
ExtRemoSolutions/imdb-importer
bec5272c7f457fc7412ebd7051bd041dd1288d81
[ "MIT" ]
null
null
null
from flask import Blueprint # Create blueprint for movies module mod_movies = Blueprint('movies', __name__) from imdb_rest.movies.movies import *
18.625
42
0.791946
20
149
5.6
0.6
0
0
0
0
0
0
0
0
0
0
0
0.14094
149
7
43
21.285714
0.875
0.228188
0
0
0
0
0.053097
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0.666667
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
1
0
5
732a48903b514a9e4b804fa4ab7bd97ca2b78095
3,316
py
Python
structure/migrations/0033_auto_20201110_0856.py
pszgaspar/protwis
4989a67175ef3c95047d795c843cf6b9cf4141fa
[ "Apache-2.0" ]
21
2016-01-20T09:33:14.000Z
2021-12-20T19:19:45.000Z
structure/migrations/0033_auto_20201110_0856.py
pszgaspar/protwis
4989a67175ef3c95047d795c843cf6b9cf4141fa
[ "Apache-2.0" ]
75
2016-02-26T16:29:58.000Z
2022-03-21T12:35:13.000Z
structure/migrations/0033_auto_20201110_0856.py
pszgaspar/protwis
4989a67175ef3c95047d795c843cf6b9cf4141fa
[ "Apache-2.0" ]
77
2016-01-22T08:44:26.000Z
2022-02-01T15:54:56.000Z
# Generated by Django 3.0.3 on 2020-11-10 07:56 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('structure', '0032_structure_author_state'), ] operations = [ migrations.RemoveField( model_name='structuremodelrmsd', name='TM_all', ), migrations.RemoveField( model_name='structuremodelrmsd', name='date', ), migrations.RemoveField( model_name='structuremodelrmsd', name='overall_all', ), migrations.RemoveField( model_name='structuremodelrmsd', name='overall_backbone', ), migrations.RemoveField( model_name='structuremodelrmsd', name='pdb', ), migrations.RemoveField( model_name='structuremodelrmsd', name='service', ), migrations.RemoveField( model_name='structuremodelrmsd', name='version', ), migrations.AddField( model_name='structuremodelrmsd', name='ECL1', field=models.DecimalField(decimal_places=1, max_digits=2, null=True), ), migrations.AddField( model_name='structuremodelrmsd', name='ECL2', field=models.DecimalField(decimal_places=1, max_digits=2, null=True), ), migrations.AddField( model_name='structuremodelrmsd', name='H8', field=models.DecimalField(decimal_places=1, max_digits=2, null=True), ), migrations.AddField( model_name='structuremodelrmsd', name='ICL1', field=models.DecimalField(decimal_places=1, max_digits=2, null=True), ), migrations.AddField( model_name='structuremodelrmsd', name='ICL2', field=models.DecimalField(decimal_places=1, max_digits=2, null=True), ), migrations.AddField( model_name='structuremodelrmsd', name='binding_pocket', field=models.DecimalField(decimal_places=1, max_digits=2, null=True), ), migrations.AddField( model_name='structuremodelrmsd', name='main_template', field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='main_template', to='structure.Structure'), preserve_default=False, ), migrations.AddField( model_name='structuremodelrmsd', name='target_structure', field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='target_structure', to='structure.Structure'), preserve_default=False, ), migrations.AlterField( model_name='structuremodelrmsd', name='TM_backbone', field=models.DecimalField(decimal_places=1, max_digits=2, null=True), ), migrations.AlterField( model_name='structuremodelrmsd', name='homology_model', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='structure.StructureModel'), ), ]
34.905263
154
0.593486
297
3,316
6.454545
0.23569
0.079812
0.239437
0.274909
0.805425
0.803339
0.606155
0.44288
0.44288
0.44288
0
0.016281
0.29614
3,316
94
155
35.276596
0.805056
0.013571
0
0.681818
1
0
0.175283
0.015601
0
0
0
0
0
1
0
false
0
0.022727
0
0.056818
0
0
0
0
null
0
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
7348275eb96b8773c594f909a3edcdd1f53bfa85
61
py
Python
mnistetude/network/__init__.py
aram-father/mnist-etude
3f9196a7ae3447c3412507896c3dc3ae81f9d7d0
[ "BSD-2-Clause" ]
null
null
null
mnistetude/network/__init__.py
aram-father/mnist-etude
3f9196a7ae3447c3412507896c3dc3ae81f9d7d0
[ "BSD-2-Clause" ]
1
2022-03-18T03:14:13.000Z
2022-03-18T03:14:13.000Z
mnistetude/network/__init__.py
aram-father/mnist-etude
3f9196a7ae3447c3412507896c3dc3ae81f9d7d0
[ "BSD-2-Clause" ]
null
null
null
from .multi_layer_perceptron import * from .inetwork import *
30.5
37
0.819672
8
61
6
0.75
0
0
0
0
0
0
0
0
0
0
0
0.114754
61
2
38
30.5
0.888889
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
735da70919b85a80d0956dfe607c01ea1fe0addb
142
py
Python
tubelib/src/twitch/__init__.py
FoxyRabbit67/enigma2-plugins
f6b94012726931fdf28e80a26226aec612b350de
[ "Linux-OpenIB" ]
41
2016-01-21T17:54:44.000Z
2021-06-26T05:54:41.000Z
tubelib/src/twitch/__init__.py
FoxyRabbit67/enigma2-plugins
f6b94012726931fdf28e80a26226aec612b350de
[ "Linux-OpenIB" ]
22
2016-11-16T11:25:26.000Z
2021-12-13T09:13:06.000Z
tubelib/src/twitch/__init__.py
FoxyRabbit67/enigma2-plugins
f6b94012726931fdf28e80a26226aec612b350de
[ "Linux-OpenIB" ]
62
2016-02-05T22:55:48.000Z
2022-03-12T21:48:22.000Z
import sys sys.argv = ["enigma2"] #HACKFIX FOR argparse reading sys.argv[0] wihtout checking sys.argc import TwitchChannelListServiceProvider
35.5
90
0.816901
18
142
6.444444
0.722222
0.12069
0
0
0
0
0
0
0
0
0
0.015748
0.105634
142
4
91
35.5
0.897638
0.464789
0
0
0
0
0.092105
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
b425949cd1e98727d096486dbda047346d5a060a
231
py
Python
takeoff/generators/api/__init__.py
themarceloribeiro/takeoff-py
dfa4c58ccd9146f82ffe4d5f76aa2a258d3c0d7c
[ "MIT" ]
null
null
null
takeoff/generators/api/__init__.py
themarceloribeiro/takeoff-py
dfa4c58ccd9146f82ffe4d5f76aa2a258d3c0d7c
[ "MIT" ]
null
null
null
takeoff/generators/api/__init__.py
themarceloribeiro/takeoff-py
dfa4c58ccd9146f82ffe4d5f76aa2a258d3c0d7c
[ "MIT" ]
null
null
null
from .api_project_generator import ApiProjectGenerator from .api_model_generator import ApiModelGenerator from .api_authentication_generator import ApiAuthenticationGenerator from .api_resource_generator import ApiResourceGenerator
57.75
68
0.917749
24
231
8.5
0.5
0.137255
0
0
0
0
0
0
0
0
0
0
0.064935
231
4
69
57.75
0.944444
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
b4470b68277369bf054845955970e8a850527240
72
py
Python
client/makieta.py
ILoveMuffins/WWW-MVC-_test
d2085ca555c343302fff71e547d9b0aba704026a
[ "MIT" ]
null
null
null
client/makieta.py
ILoveMuffins/WWW-MVC-_test
d2085ca555c343302fff71e547d9b0aba704026a
[ "MIT" ]
null
null
null
client/makieta.py
ILoveMuffins/WWW-MVC-_test
d2085ca555c343302fff71e547d9b0aba704026a
[ "MIT" ]
null
null
null
class Makieta: def __init__(self, dane): self.dane = dane
12
29
0.597222
9
72
4.333333
0.666667
0.410256
0
0
0
0
0
0
0
0
0
0
0.305556
72
5
30
14.4
0.78
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
b45aadfc8eebaa435e326ec73097818b8266bd3e
121
py
Python
src/walking_habits/exceptions.py
V3RON/walking-habits
5c06fb4e53816f9e016bab034262a11bbade7ae3
[ "MIT" ]
null
null
null
src/walking_habits/exceptions.py
V3RON/walking-habits
5c06fb4e53816f9e016bab034262a11bbade7ae3
[ "MIT" ]
null
null
null
src/walking_habits/exceptions.py
V3RON/walking-habits
5c06fb4e53816f9e016bab034262a11bbade7ae3
[ "MIT" ]
1
2020-01-09T20:55:39.000Z
2020-01-09T20:55:39.000Z
class Walking_HabitsBaseException(Exception): pass class InvalidLayoutError(Walking_HabitsBaseException): pass
17.285714
54
0.818182
10
121
9.7
0.6
0.536082
0
0
0
0
0
0
0
0
0
0
0.132231
121
6
55
20.166667
0.92381
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
81ebc6cdcdd305f009eca57223c6dd9f9edc69db
138
py
Python
tests/basics/int_bytes_long.py
ARF1/micropython
dacba223889746ea809dc9cbaa563a2573712f15
[ "MIT" ]
8
2017-01-08T19:45:01.000Z
2020-09-07T04:39:10.000Z
tests/basics/int_bytes_long.py
ARF1/micropython
dacba223889746ea809dc9cbaa563a2573712f15
[ "MIT" ]
null
null
null
tests/basics/int_bytes_long.py
ARF1/micropython
dacba223889746ea809dc9cbaa563a2573712f15
[ "MIT" ]
6
2017-10-26T20:07:56.000Z
2021-05-27T00:19:51.000Z
b = bytes(range(20)) il = int.from_bytes(b, "little") ib = int.from_bytes(b, "big") print(il) print(ib) print(il.to_bytes(20, "little"))
17.25
32
0.65942
26
138
3.384615
0.461538
0.159091
0.272727
0.295455
0
0
0
0
0
0
0
0.032787
0.115942
138
7
33
19.714286
0.688525
0
0
0
0
0
0.108696
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
5
81ecb8c486d3c3fb068c758c8a47c2d3e553e204
946
py
Python
swagger/broker-client/test/test_api_response.py
tarehart/RLBotTwitchBroker
3c82a2d08a97faa2406fa1c3ffbb196a10416152
[ "MIT" ]
2
2020-05-27T17:41:35.000Z
2020-05-29T20:59:04.000Z
swagger/broker-client/test/test_api_response.py
tarehart/RLBotTwitchBroker
3c82a2d08a97faa2406fa1c3ffbb196a10416152
[ "MIT" ]
1
2020-10-13T23:45:06.000Z
2020-10-13T23:45:06.000Z
swagger/broker-client/test/test_api_response.py
tarehart/RLBotTwitchBroker
3c82a2d08a97faa2406fa1c3ffbb196a10416152
[ "MIT" ]
2
2020-06-07T14:52:33.000Z
2020-06-22T12:14:39.000Z
# coding: utf-8 """ RLBot Twitch Broker Allows custom Rocket League bots to register themselves with a central broker. # noqa: E501 OpenAPI spec version: 1.0.0 Contact: rlbotofficial@gmail.com Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import unittest import rlbot_twitch_broker_client from models.api_response import ApiResponse # noqa: E501 from rlbot_twitch_broker_client.rest import ApiException class TestApiResponse(unittest.TestCase): """ApiResponse unit test stubs""" def setUp(self): pass def tearDown(self): pass def testApiResponse(self): """Test ApiResponse""" # FIXME: construct object with mandatory attributes with example values # model = rlbot_twitch_broker_client.models.api_response.ApiResponse() # noqa: E501 pass if __name__ == '__main__': unittest.main()
23.65
96
0.714588
115
946
5.669565
0.6
0.067485
0.104294
0.105828
0
0
0
0
0
0
0
0.017287
0.205074
946
39
97
24.25641
0.849734
0.489429
0
0.214286
0
0
0.018141
0
0
0
0
0.025641
0
1
0.214286
false
0.214286
0.357143
0
0.642857
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
1
1
0
1
0
0
5
c304273810300198a7ca5b9e582f7f034a97ada5
38
py
Python
particles/foreign_distributions/__init__.py
maxjcohen/particles
359128679cdffe3985adad4028db4cf7b3aed076
[ "MIT" ]
null
null
null
particles/foreign_distributions/__init__.py
maxjcohen/particles
359128679cdffe3985adad4028db4cf7b3aed076
[ "MIT" ]
null
null
null
particles/foreign_distributions/__init__.py
maxjcohen/particles
359128679cdffe3985adad4028db4cf7b3aed076
[ "MIT" ]
null
null
null
from .torch import torch_distribution
19
37
0.868421
5
38
6.4
0.8
0
0
0
0
0
0
0
0
0
0
0
0.105263
38
1
38
38
0.941176
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
c30ab9b78b790ad75c2cd17722fc96b44494e134
14,306
py
Python
permutations.py
YanshuHu/combinatorics-oj1
551286aaac63094b74a3bbb00462a1bd696608fd
[ "Apache-2.0" ]
null
null
null
permutations.py
YanshuHu/combinatorics-oj1
551286aaac63094b74a3bbb00462a1bd696608fd
[ "Apache-2.0" ]
null
null
null
permutations.py
YanshuHu/combinatorics-oj1
551286aaac63094b74a3bbb00462a1bd696608fd
[ "Apache-2.0" ]
null
null
null
# lst1 = [8,3,9,6,4,7,5,2,1] # lst2 = [10,11,12,8,3,9,6,4,7,5,2,1] # lst3 = [8,9,3,6,7,4,5,2,1] # lst4 = [8,3,9,6,4,7,5,2,1] # lst = [7, 2, 6, 4, 2, 3, 2, 1] # lst5 = [14, 4, 8, 17, 16, 2, 12, 6, 18, 3, 10, 13, 9, 5, 1, 11, 19, 15, 7, 20] # lst6 = [1, 17, 11, 20, 7, 15, 13, 10, 6, 16, 12, 19, 8, 18, 5, 3, 4, 14, 9, 2] # lst7 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20] # k3 = [2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] # #k = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,2,0] # k1 = [0,0,0,0,4,0,2,0] # k2 = [0,0,0,0,0,0,0,1] #[9, 1, 1] #[8, 3, 9, 6, 4, 7, 5, 2, 1] # 9 1 1 # 8 3 9 6 4 7 5 2 1 def main(): variable1 = input() variable2 = input() a = variable1.split() b = variable2.split() first_line = [] second_line = [] # k = variable1.split() # print(k) #get inputs for i in a: first_line.append(int(i)) #print(int(i)) for i in b: second_line.append(int(i)) #what kind of order add = True type = first_line[1] if first_line[2] >= 0: add = True if first_line[2] < 0: add = False lst = second_line k = abs(int(first_line[2])) k = [int(i) for i in str(k)] # missing_k = 0 missing_k = 20 - len(k) k = k[::-1] for i in range(missing_k): k.append(0) k = k[::-1] if type == 1: if add == True: output = dict_order(add_1(second_line, k)) else: output = dict_order(subtract_1(second_line, k)) if type == 2: if add == True: output = order_2(add_2(second_line, k)) else: output = order_2(subtract_2(second_line, k)) if type == 3: if add == True: output = order_3(add_3(second_line,k)) else: output = order_3(subtract_3(second_line, k)) final = ' '.join(str(i) for i in output) print(final) #print(lst) #print(k) #print(add) #order_3(add_3(lst1 , k)) #order_2(add_2(lst7, k3)) #shift_3(lst1) ############### 字典 def shift_1(lst): new_lst = lst shifted_num = [] while new_lst: count = 0 compare = new_lst[0] for i in range(len(new_lst)): if new_lst[i] < new_lst[0]: count += 1 shifted_num.append(count) del new_lst[0] shifted_num.pop() #print("shifted_num: ", shifted_num) return shifted_num def add_1(lst1, k): shifted_num = shift_1(lst1) limit = len(shifted_num) + 1 limit_list = [] added_list = [] k = k[::-1] for i in range(limit): limit_list.append(i + 1) limit_list.pop(0) #the list i want to work with true_list = shifted_num[::-1] #print("here",true_list) list_len = len(true_list) for i in range(list_len): if (true_list[i]+k[i]) > limit_list[i]: true_list[i+1] += 1 a = (true_list[i] + k[i]) - (limit_list[i]) added_list.append(a) elif (true_list[i]+k[i]) == limit_list[i]: if len(true_list) != 1: true_list[i+1] += 1 else: true_list.append(1) added_list.append(0) else: added_list.append(true_list[i] + k[i]) #print("here", true_list ) #print("limit_list: ", limit_list) #print("added_list: ", added_list[::-1]) return added_list[::-1] def helper1(lst, start): new_lst = lst[start:] index_of_zeros = [] index_of_carry = [] for i in range(len(new_lst)): if new_lst[i] == 0: index_of_zeros.append(i+ start) elif new_lst[i] > 0: index_of_carry.append(i) #print(index_of_carry, start) index_of_carry = index_of_carry[1] + start a = [] a.append([index_of_carry]) a.append(index_of_zeros) return a def subtract_1(lst, k): shifted_num = shift_1(lst) limit_list = [] k = k[::-1] limit = len(shifted_num) + 1 for i in range(limit): limit_list.append(i + 1) limit_list.pop(0) top = limit_list lst = shifted_num[::-1] subtract_list = [] list_len = len(lst) next_borrow = 1 #print(top) for i in range(list_len): if (lst[i] - k[i]) >= 0: subtract_list.append(lst[i] - k[i]) while (lst[i] - k[i]) < 0: if lst[i+next_borrow] > 0: lst[i+next_borrow] -= 1 lst[i] = lst[i] + top[i] if (lst[i] - k[i]) >= 0: subtract_list.append(lst[i] - k[i]) elif lst[i+next_borrow] == 0: a = helper1(lst, i) index_of_carry = a[0][0] index_of_zeros = a[1][0] temp = lst[:index_of_carry] lst[index_of_carry] -= 1 for j in range(len(temp)): lst[j] += top[j] if (lst[i] - k[i]) > 0: subtract_list.append(lst[i] - k[i]) #print("subtract_list:", subtract_list[::-1]) return subtract_list[::-1] def dict_order(lst): limit = len(lst) + 1 temp = [] for i in range(limit): temp.append(-1) for i in range(len(lst)): bigger = False current_carry = lst[i] + 1 if i == 0: temp[0] = current_carry for j in temp[:i]: if j <= (current_carry): current_carry += 1 temp[i] = current_carry j = -10 elif current_carry not in temp[:i]: temp[i] = current_carry while (current_carry) in temp[:i]: current_carry += 1 temp[i] = current_carry left = [] no = [] for i in range(len(temp)): left.append(i+1) for i in left: if i not in temp: no.append(i) for i in range(len(temp)): if temp[i] == -1: temp[i] = no[0] print(temp) return temp ############### ###############加 def shift_2(lst): new_lst = lst shifted_num = [] #print(new_lst) while new_lst: count = 0 biggest = max(new_lst) biggest_index = new_lst.index(biggest) # for i in range(len(new_lst[biggest_index:])): # #print(new_lst[biggest_index:]) # if new_lst[i] < biggest: # #print(new_lst[i],biggest) # count += 1 for i in new_lst[biggest_index+1:]: #print(new_lst[biggest_index:]) if i < biggest: #print(new_lst[i],biggest) count += 1 shifted_num.append(count) del new_lst[biggest_index] shifted_num.pop() #print("shifted_num: ", shifted_num) return shifted_num[::-1] ##### add them using the algorithm def add_2(lst, k): shifted_num = shift_2(lst) limit = len(shifted_num) + 1 limit_list = [] added_list = [] k = k[::-1] for i in range(limit): limit_list.append(i + 1) limit_list.pop(0) #the list i want to work with true_list = shifted_num limit_list = limit_list #print("limit_list: ", limit_list) #print("true_list: ", true_list) #print("k: ", k) list_len = len(true_list) for i in range(list_len): if (true_list[i]+k[i]) > limit_list[i]: true_list[i+1] += 1 a = (true_list[i] + k[i]) - (limit_list[i]) added_list.append(a) elif (true_list[i]+k[i]) == limit_list[i]: if len(true_list) != 1: true_list[i+1] += 1 else: true_list.append(1) added_list.append(0) else: added_list.append(true_list[i] + k[i]) #print(added_list[::-1]) return added_list[::-1] def helper2(lst, start): new_lst = lst[start:] index_of_zeros = [] index_of_carry = [] for i in range(len(new_lst)): if new_lst[i] == 0: index_of_zeros.append(i+ start) elif new_lst[i] > 0: index_of_carry.append(i) index_of_carry = index_of_carry[1] + start a = [] a.append([index_of_carry]) a.append(index_of_zeros) #print("a:", a) return a def subtract_2(lst, k): shifted_num = shift_2(lst) limit_list = [] shifted_num = shifted_num[::-1] k = k[::-1] limit = len(shifted_num) + 1 for i in range(limit): limit_list.append(i + 1) limit_list.pop(0) top = limit_list lst = shifted_num[::-1] subtract_list = [] list_len = len(lst) next_borrow = 1 #print("top: ",top) #print(k) #print(lst) for i in range(list_len): if (lst[i] - k[i]) >= 0: subtract_list.append(lst[i] - k[i]) while (lst[i] - k[i]) < 0: if lst[i+next_borrow] > 0: lst[i+next_borrow] -= 1 lst[i] = lst[i] + top[i] if (lst[i] - k[i]) >= 0: subtract_list.append(lst[i] - k[i]) elif lst[i+next_borrow] == 0: a = helper1(lst, i) index_of_carry = a[0][0] index_of_zeros = a[1][0] temp = lst[:index_of_carry] lst[index_of_carry] -= 1 for j in range(len(temp)): lst[j] += top[j] if (lst[i] - k[i]) > 0: subtract_list.append(lst[i] - k[i]) print("subtract_list:", subtract_list) return subtract_list[::-1] def find_it_2(new_list,index,value): num = 0 for i in range(len(new_list)): if new_list[i] == -1: num+=1 if num == index+1: new_list[i] = value #print("new list: ", new_list ) return new_list #return from 中介数 def order_2(lst): top = [] limit = len(lst) + 1 for i in range(limit): top.append(i + 1) top.pop(0) new_top = top[::-1] new_lst = lst temp = [] for i in range(len(new_lst)+1): temp.append(-1) #print("new_lst:", new_lst, "new_top: ", new_top, "temp: ", temp) for i in range(len(new_lst)): #print(new_lst:", new_lst, "new_top: ", new_top, "temp: ", temp) temp = find_it_2(temp, new_lst[i], new_top[i]) for i in range(len(temp)): if temp[i] == -1: temp[i] = 1 #print(temp[::-1]) return temp[::-1] ###############增 ###############减 #get 中介数 def shift_3(lst): a = shift_2(lst) a = a[::-1] #print(a[::-1]) return a[::-1] # print (lst) #add 中介数 def add_3(lst, k): shifted_num = shift_3(lst) limit = len(shifted_num) + 1 limit_list = [] added_list = [] k = k[::-1] for i in range(limit): limit_list.append(i + 1) limit_list.pop(0) true_list = shifted_num[::-1] limit_list = limit_list[::-1] list_len = len(true_list) for i in range(list_len): if (true_list[i]+k[i]) > limit_list[i]: true_list[i+1] += 1 a = (true_list[i] + k[i]) - (limit_list[i]) added_list.append(a) elif (true_list[i]+k[i]) == limit_list[i]: if len(true_list) != 1: true_list[i+1] += 1 else: true_list.append(1) added_list.append(0) else: added_list.append(true_list[i] + k[i]) #print(added_list[::-1]) return added_list[::-1] #subtraction helper def helper3(lst, start): new_lst = lst[start:] index_of_zeros = [] index_of_carry = [] for i in range(len(new_lst)): if new_lst[i] == 0: index_of_zeros.append(i+ start) elif new_lst[i] > 0: index_of_carry.append(i) index_of_carry = index_of_carry[1] + start a = [] a.append([index_of_carry]) a.append(index_of_zeros) return a #subtract 中介数 def subtract_3(lst, k): shifted_num = shift_3(lst) limit_list = [] k = k[::-1] limit = len(shifted_num) + 1 for i in range(limit): limit_list.append(i + 1) limit_list.pop(0) top = limit_list[::-1] #reverse list to work with lst = shifted_num[::-1] subtract_list = [] list_len = len(lst) next_borrow = 1 for i in range(list_len): if (lst[i] - k[i]) >= 0: subtract_list.append(lst[i] - k[i]) while (lst[i] - k[i]) < 0: if lst[i+next_borrow] > 0: lst[i+next_borrow] -= 1 lst[i] = lst[i] + top[i] if (lst[i] - k[i]) >= 0: subtract_list.append(lst[i] - k[i]) elif lst[i+next_borrow] == 0: a = helper1(lst, i) index_of_carry = a[0][0] index_of_zeros = a[1][0] temp = lst[:index_of_carry] lst[index_of_carry] -= 1 for j in range(len(temp)): lst[j] += top[j] if (lst[i] - k[i]) > 0: subtract_list.append(lst[i] - k[i]) #print("subtract_list:", subtract_list) return subtract_list[::-1] #find value helper def find_it_3(new_list,index,value): num = 0 for i in range(len(new_list)): if new_list[i] == -1: num+=1 if num == index+1: new_list[i] = value return new_list #return from 中介数 def order_3(lst): top = [] limit = len(lst) + 1 for i in range(limit): top.append(i + 1) top.pop(0) new_top = top[::-1] new_lst = lst[::-1] temp = [] for i in range(len(new_lst)+1): temp.append(-1) # print("new_lst:", new_lst, "new_top: ", new_top, "temp: ", temp) for i in range(len(new_lst)): #print(new_lst:", new_lst, "new_top: ", new_top, "temp: ", temp) temp = find_it_3(temp, new_lst[i], new_top[i]) for i in range(len(temp)): if temp[i] == -1: temp[i] = 1 #print(temp[::-1]) return temp[::-1] ###############加 ###############邻排列 def shift_4(lst): new_lst = lst shifted_num = [] while new_lst: count = 0 biggest = max(new_lst) biggest_index = new_lst.index(biggest) for i in range(len(new_lst[biggest_index:])): if new_lst[i] < biggest: count += 1 shifted_num.append(count) del new_lst[biggest_index] shifted_num.pop() a = shifted_num[::-1] return a if __name__ == "__main__": main()
28.612
80
0.504893
2,207
14,306
3.085184
0.057997
0.048465
0.034366
0.019386
0.78969
0.754443
0.731826
0.707446
0.690997
0.657365
0
0.04938
0.334685
14,306
499
81
28.669339
0.666001
0.141829
0
0.757653
0
0
0.001903
0
0
0
0
0
0
1
0.048469
false
0
0
0
0.094388
0.007653
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
c31a9fec5f0da5caa5e0f4d5dc66854b532acfd2
148
py
Python
python_plsql/__init__.py
LatvianPython/python_plsql
26d8b51be5250ef195c5fc8777fde874b52626e4
[ "MIT" ]
3
2019-11-08T21:39:24.000Z
2020-11-02T19:06:03.000Z
python_plsql/__init__.py
LatvianPython/python_plsql
26d8b51be5250ef195c5fc8777fde874b52626e4
[ "MIT" ]
null
null
null
python_plsql/__init__.py
LatvianPython/python_plsql
26d8b51be5250ef195c5fc8777fde874b52626e4
[ "MIT" ]
1
2020-02-16T12:09:47.000Z
2020-02-16T12:09:47.000Z
from cx_Oracle import DatabaseError from .client import Database from .client import NotFound __all__ = ["Database", "NotFound", "DatabaseError"]
21.142857
51
0.783784
17
148
6.529412
0.529412
0.18018
0.288288
0
0
0
0
0
0
0
0
0
0.128378
148
6
52
24.666667
0.860465
0
0
0
0
0
0.195946
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
c321dc356211c87f793fe1df65d036b97639551d
44
py
Python
lib/exceptions.py
sschwetz/network_tech
fc65166e71bfdb5a0e99ca7e7ce9f7814b92869b
[ "Apache-2.0" ]
73
2017-05-04T06:35:20.000Z
2022-02-03T13:57:00.000Z
lib/exceptions.py
sschwetz/network_tech
fc65166e71bfdb5a0e99ca7e7ce9f7814b92869b
[ "Apache-2.0" ]
35
2017-11-09T16:28:48.000Z
2022-01-12T08:15:48.000Z
lib/exceptions.py
sschwetz/network_tech
fc65166e71bfdb5a0e99ca7e7ce9f7814b92869b
[ "Apache-2.0" ]
20
2017-11-08T05:07:59.000Z
2021-12-09T17:41:06.000Z
class InvalidPassword(Exception): pass
11
33
0.75
4
44
8.25
1
0
0
0
0
0
0
0
0
0
0
0
0.181818
44
3
34
14.666667
0.916667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
1
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
c34ac6ba82e3c98c21fda4f0a45d0f3f6a4e9b7c
15,095
py
Python
Wrappers/Python/test/test_PluginsTigre_General.py
samdporter/CIL
cd37de8e3d757674f61236f9943792d106bab428
[ "Apache-2.0" ]
null
null
null
Wrappers/Python/test/test_PluginsTigre_General.py
samdporter/CIL
cd37de8e3d757674f61236f9943792d106bab428
[ "Apache-2.0" ]
null
null
null
Wrappers/Python/test/test_PluginsTigre_General.py
samdporter/CIL
cd37de8e3d757674f61236f9943792d106bab428
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # This work is part of the Core Imaging Library (CIL) developed by CCPi # (Collaborative Computational Project in Tomographic Imaging), with # substantial contributions by UKRI-STFC and University of Manchester. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from cil.framework import AcquisitionGeometry from cil.framework.framework import ImageGeometry import unittest import numpy as np from cil.utilities.display import show2D from utils import has_gpu_tigre, has_tigre if has_tigre: from cil.plugins.tigre import ProjectionOperator from cil.plugins.tigre import CIL2TIGREGeometry has_tigre_gpu = has_gpu_tigre() if not has_tigre_gpu: print("Unable to run TIGRE GPU tests") class Test_convert_geometry(unittest.TestCase): def setUp(self): self.num_pixels_x = 12 self.num_pixels_y = 6 self.pixel_size_x = 0.1 self.pixel_size_y = 0.2 self.ig = ImageGeometry(3,4,5,0.1,0.2,0.3) self.angles_deg = np.asarray([0,90.0,180.0], dtype='float32') self.angles_rad = self.angles_deg * np.pi /180.0 def compare_angles(self,ang1,ang2,atol): diff = ang1 - ang2 while diff < -np.pi: diff += 2 * np.pi while diff >= np.pi: diff -= 2 * np.pi self.assertLess(abs(diff),atol) @unittest.skipUnless(has_tigre, "TIGRE not installed") def test_cone2D(self): ag = AcquisitionGeometry.create_Cone2D(source_position=[0,-6], detector_position=[0,16])\ .set_angles(self.angles_rad, angle_unit='radian')\ .set_labels(['angle','horizontal'])\ .set_panel(self.num_pixels_x, self.pixel_size_x) #2D cone tg_geometry, tg_angles = CIL2TIGREGeometry.getTIGREGeometry(self.ig, ag) for i, ang in enumerate(tg_angles): ang2 = -(self.angles_rad[i] + np.pi/2) self.compare_angles(ang,ang2,1e-6) self.assertTrue(tg_geometry.mode=='cone') np.testing.assert_allclose(tg_geometry.DSD, ag.dist_center_detector + ag.dist_source_center) np.testing.assert_allclose(tg_geometry.DSO, ag.dist_source_center) np.testing.assert_allclose(tg_geometry.dDetector, ag.config.panel.pixel_size[::-1]) np.testing.assert_allclose(tg_geometry.nDetector, ag.config.panel.num_pixels[::-1]) np.testing.assert_allclose(tg_geometry.sDetector, tg_geometry.dDetector * tg_geometry.nDetector) np.testing.assert_allclose(tg_geometry.rotDetector,0) np.testing.assert_allclose(tg_geometry.offDetector,0) np.testing.assert_allclose(tg_geometry.offOrigin,0) np.testing.assert_allclose(tg_geometry.nVoxel, [1,self.ig.voxel_num_y,self.ig.voxel_num_x]) np.testing.assert_allclose(tg_geometry.dVoxel, [ag.config.panel.pixel_size[1]/ag.magnification,self.ig.voxel_size_y,self.ig.voxel_size_x]) @unittest.skipUnless(has_tigre, "TIGRE not installed") def test_cone3D_simple(self): ag = AcquisitionGeometry.create_Cone3D(source_position=[0,-6,0], detector_position=[0,16,0])\ .set_angles(self.angles_deg, angle_unit='degree')\ .set_labels(['vertical', 'angle','horizontal'])\ .set_panel((self.num_pixels_x,self.num_pixels_y), (self.pixel_size_x,self.pixel_size_y)) self.assertTrue(ag.system_description=='simple') tg_geometry, tg_angles = CIL2TIGREGeometry.getTIGREGeometry(self.ig, ag) for i, ang in enumerate(tg_angles): ang2 = -(self.angles_rad[i] + np.pi/2) self.compare_angles(ang,ang2,1e-6) self.assertTrue(tg_geometry.mode=='cone') np.testing.assert_allclose(tg_geometry.DSD, ag.dist_center_detector + ag.dist_source_center) np.testing.assert_allclose(tg_geometry.DSO, ag.dist_source_center) np.testing.assert_allclose(tg_geometry.dDetector, ag.config.panel.pixel_size[::-1]) np.testing.assert_allclose(tg_geometry.nDetector, ag.config.panel.num_pixels[::-1]) np.testing.assert_allclose(tg_geometry.sDetector, tg_geometry.dDetector * tg_geometry.nDetector) np.testing.assert_allclose(tg_geometry.rotDetector,0) np.testing.assert_allclose(tg_geometry.offDetector,0) np.testing.assert_allclose(tg_geometry.offOrigin,0) np.testing.assert_allclose(tg_geometry.nVoxel, [self.ig.voxel_num_z,self.ig.voxel_num_y,self.ig.voxel_num_x]) np.testing.assert_allclose(tg_geometry.dVoxel, [self.ig.voxel_size_z,self.ig.voxel_size_y,self.ig.voxel_size_x]) @unittest.skipUnless(has_tigre, "TIGRE not installed") def test_cone3D_offset(self): #3, 4, 5 triangle for source + object ag = AcquisitionGeometry.create_Cone3D(source_position=[0,-4,0], detector_position=[0,4,0], rotation_axis_position=[3,0, 0])\ .set_angles(self.angles_deg, angle_unit='degree')\ .set_labels(['vertical', 'angle','horizontal'])\ .set_panel((self.num_pixels_x,self.num_pixels_y), (self.pixel_size_x,self.pixel_size_y)) self.assertTrue(ag.system_description=='offset') tg_geometry, tg_angles= CIL2TIGREGeometry.getTIGREGeometry(self.ig, ag) np.testing.assert_allclose(tg_geometry.DSO, ag.dist_source_center) yaw = np.arcsin(3./5.) det_rot = np.array([0,0,yaw]) np.testing.assert_allclose(tg_geometry.rotDetector,det_rot) offset = 4 * 6 /5 det_offset = np.array([0,-offset,0]) np.testing.assert_allclose(tg_geometry.offDetector,det_offset) s2d = ag.dist_center_detector + ag.dist_source_center - 6 * 3 /5 np.testing.assert_allclose(tg_geometry.DSD, s2d) for i, ang in enumerate(tg_angles): ang2 = -(self.angles_rad[i] + np.pi/2 + yaw) self.compare_angles(ang,ang2,1e-6) self.assertTrue(tg_geometry.mode=='cone') np.testing.assert_allclose(tg_geometry.dDetector, ag.config.panel.pixel_size[::-1]) np.testing.assert_allclose(tg_geometry.nDetector, ag.config.panel.num_pixels[::-1]) np.testing.assert_allclose(tg_geometry.sDetector, tg_geometry.dDetector * tg_geometry.nDetector) np.testing.assert_allclose(tg_geometry.offOrigin,0) np.testing.assert_allclose(tg_geometry.nVoxel, [self.ig.voxel_num_z,self.ig.voxel_num_y,self.ig.voxel_num_x]) np.testing.assert_allclose(tg_geometry.dVoxel, [self.ig.voxel_size_z,self.ig.voxel_size_y,self.ig.voxel_size_x]) @unittest.skipUnless(has_tigre, "TIGRE not installed") def test_cone3D_advanced(self): ag = AcquisitionGeometry.create_Cone3D(source_position=[0,-10,0], detector_position=[0,10,0], rotation_axis_position=[0,0, 0],rotation_axis_direction=[0,-1,1])\ .set_angles(self.angles_deg, angle_unit='degree')\ .set_labels(['vertical', 'angle','horizontal'])\ .set_panel((self.num_pixels_x,self.num_pixels_y), (self.pixel_size_x,self.pixel_size_y)) self.assertTrue(ag.system_description=='advanced') tg_geometry, tg_angles= CIL2TIGREGeometry.getTIGREGeometry(self.ig, ag) self.assertAlmostEqual(tg_geometry.DSO, ag.dist_source_center*np.sin(np.pi/4),5) s2o = ag.dist_source_center * np.cos(np.pi/4) np.testing.assert_allclose(tg_geometry.DSO, s2o) s2d = (ag.dist_center_detector + ag.dist_source_center) * np.cos(np.pi/4) np.testing.assert_allclose(tg_geometry.DSD, s2d) det_rot = np.array([0,-np.pi/4,0]) np.testing.assert_allclose(tg_geometry.rotDetector,det_rot) det_offset = np.array([-s2d,0,0]) np.testing.assert_allclose(tg_geometry.offDetector,det_offset) for i, ang in enumerate(tg_angles): ang2 = -(self.angles_rad[i] + np.pi/2) self.compare_angles(ang,ang2,1e-6) self.assertTrue(tg_geometry.mode=='cone') np.testing.assert_allclose(tg_geometry.dDetector, ag.config.panel.pixel_size[::-1]) np.testing.assert_allclose(tg_geometry.nDetector, ag.config.panel.num_pixels[::-1]) np.testing.assert_allclose(tg_geometry.sDetector, tg_geometry.dDetector * tg_geometry.nDetector) height = 10 / np.sqrt(2) np.testing.assert_allclose(tg_geometry.offOrigin,[-height,0,0]) np.testing.assert_allclose(tg_geometry.nVoxel, [self.ig.voxel_num_z,self.ig.voxel_num_y,self.ig.voxel_num_x]) np.testing.assert_allclose(tg_geometry.dVoxel, [self.ig.voxel_size_z,self.ig.voxel_size_y,self.ig.voxel_size_x]) @unittest.skipUnless(has_tigre, "TIGRE not installed") def test_parallel2D(self): ag = AcquisitionGeometry.create_Parallel2D()\ .set_angles(self.angles_rad, angle_unit='radian')\ .set_labels(['angle','horizontal'])\ .set_panel(self.num_pixels_x, self.pixel_size_x) tg_geometry, tg_angles = CIL2TIGREGeometry.getTIGREGeometry(self.ig, ag) for i, ang in enumerate(tg_angles): ang2 = -(self.angles_rad[i] + np.pi/2) self.compare_angles(ang,ang2,1e-6) self.assertTrue(tg_geometry.mode=='parallel') np.testing.assert_allclose(tg_geometry.dDetector, ag.config.panel.pixel_size[::-1]) np.testing.assert_allclose(tg_geometry.nDetector, ag.config.panel.num_pixels[::-1]) np.testing.assert_allclose(tg_geometry.sDetector, tg_geometry.dDetector * tg_geometry.nDetector) np.testing.assert_allclose(tg_geometry.rotDetector,0) np.testing.assert_allclose(tg_geometry.offDetector,0) np.testing.assert_allclose(tg_geometry.offOrigin,0) np.testing.assert_allclose(tg_geometry.nVoxel, [1,self.ig.voxel_num_y,self.ig.voxel_num_x]) np.testing.assert_allclose(tg_geometry.dVoxel, [ag.config.panel.pixel_size[1],self.ig.voxel_size_y,self.ig.voxel_size_x]) @unittest.skipUnless(has_tigre, "TIGRE not installed") def test_parallel3D_simple(self): ag = AcquisitionGeometry.create_Parallel3D()\ .set_angles(self.angles_deg, angle_unit='degree')\ .set_labels(['vertical', 'angle','horizontal'])\ .set_panel((self.num_pixels_x,self.num_pixels_y), (self.pixel_size_x,self.pixel_size_y)) tg_geometry, tg_angles = CIL2TIGREGeometry.getTIGREGeometry(self.ig, ag) for i, ang in enumerate(tg_angles): ang2 = -(self.angles_rad[i] + np.pi/2) self.compare_angles(ang,ang2,1e-6) self.assertTrue(tg_geometry.mode=='parallel') np.testing.assert_allclose(tg_geometry.dDetector, ag.config.panel.pixel_size[::-1]) np.testing.assert_allclose(tg_geometry.nDetector, ag.config.panel.num_pixels[::-1]) np.testing.assert_allclose(tg_geometry.sDetector, tg_geometry.dDetector * tg_geometry.nDetector) np.testing.assert_allclose(tg_geometry.rotDetector,0) np.testing.assert_allclose(tg_geometry.offDetector,0) np.testing.assert_allclose(tg_geometry.offOrigin,0) np.testing.assert_allclose(tg_geometry.nVoxel, [self.ig.voxel_num_z,self.ig.voxel_num_y,self.ig.voxel_num_x]) np.testing.assert_allclose(tg_geometry.dVoxel, [self.ig.voxel_size_z,self.ig.voxel_size_y,self.ig.voxel_size_x]) @unittest.skipUnless(has_tigre, "TIGRE not installed") def test_parallel3D_offset(self): ag = AcquisitionGeometry.create_Parallel3D(detector_position=[2,0,0], rotation_axis_position=[3,0, 0])\ .set_angles(self.angles_deg, angle_unit='degree')\ .set_labels(['vertical', 'angle','horizontal'])\ .set_panel((self.num_pixels_x,self.num_pixels_y), (self.pixel_size_x,self.pixel_size_y)) self.assertTrue(ag.system_description=='offset') tg_geometry, tg_angles= CIL2TIGREGeometry.getTIGREGeometry(self.ig, ag) det_offset = np.array([0,-1,0]) np.testing.assert_allclose(tg_geometry.offDetector,det_offset) for i, ang in enumerate(tg_angles): ang2 = -(self.angles_rad[i] + np.pi/2) self.compare_angles(ang,ang2,1e-6) self.assertTrue(tg_geometry.mode=='parallel') np.testing.assert_allclose(tg_geometry.dDetector, ag.config.panel.pixel_size[::-1]) np.testing.assert_allclose(tg_geometry.nDetector, ag.config.panel.num_pixels[::-1]) np.testing.assert_allclose(tg_geometry.sDetector, tg_geometry.dDetector * tg_geometry.nDetector) np.testing.assert_allclose(tg_geometry.offOrigin,0) np.testing.assert_allclose(tg_geometry.nVoxel, [self.ig.voxel_num_z,self.ig.voxel_num_y,self.ig.voxel_num_x]) np.testing.assert_allclose(tg_geometry.dVoxel, [self.ig.voxel_size_z,self.ig.voxel_size_y,self.ig.voxel_size_x]) class TestMechanics(unittest.TestCase): @unittest.skipUnless(has_tigre_gpu, "Requires TIGRE GPU") def setUp(self): self.ag = AcquisitionGeometry.create_Cone2D([0,-500],[0,500]).set_angles([0]).set_panel(5,1) arr = np.arange(5*5).reshape(5,5) self.ig = ImageGeometry(5,5) self.data = self.ig.allocate() self.data.fill(arr) self.acq_data = self.ag.allocate() self.acq_data.fill(arr[0]) def test_adjoint_weights(self): #checks adjoint_weights parameter calls different backend Op = ProjectionOperator(self.ig, self.ag, adjoint_weights='matched') bp1 = Op.adjoint(self.acq_data) Op = ProjectionOperator(self.ig, self.ag, adjoint_weights='FDK') bp2 = Op.adjoint(self.acq_data) diff = (bp1 - bp2).abs().sum() self.assertGreater(diff,25) def test_direct_method(self): #checks direct_method parameter calls different backend Op = ProjectionOperator(self.ig, self.ag, direct_method='Siddon') fp1 = Op.direct(self.data) Op = ProjectionOperator(self.ig, self.ag, direct_method='interpolated') fp2 = Op.direct(self.data) diff = (fp1 - fp2).abs().sum() self.assertGreater(diff,0.1)
47.024922
168
0.669228
2,046
15,095
4.70088
0.120235
0.095654
0.098253
0.150655
0.789249
0.75078
0.75078
0.740175
0.704408
0.676024
0
0.02043
0.212057
15,095
320
169
47.171875
0.788213
0.061212
0
0.600962
0
0
0.033072
0
0
0
0
0
0.375
1
0.057692
false
0
0.038462
0
0.105769
0.004808
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
c377ae1623975639ed8321262ccbf34198c97dba
272
py
Python
tflib/__init__.py
tonyshao5/Tensorflow-up
f8f8fce9436c40cad298f6211db2be3a18480bad
[ "MIT" ]
50
2017-10-14T15:04:45.000Z
2022-02-28T13:44:26.000Z
tflib/__init__.py
tonyshao5/Tensorflow-up
f8f8fce9436c40cad298f6211db2be3a18480bad
[ "MIT" ]
1
2019-05-16T18:22:46.000Z
2019-05-27T22:26:57.000Z
tflib/__init__.py
tonyshao5/Tensorflow-up
f8f8fce9436c40cad298f6211db2be3a18480bad
[ "MIT" ]
17
2018-06-19T07:19:29.000Z
2021-04-04T16:56:03.000Z
from __future__ import absolute_import from __future__ import division from __future__ import print_function from tflib.checkpoint import * from tflib.data import * from tflib.ops import * from tflib.utils import * from tflib.variable import * from tflib.vision import *
24.727273
38
0.819853
38
272
5.5
0.368421
0.287081
0.358852
0
0
0
0
0
0
0
0
0
0.136029
272
10
39
27.2
0.889362
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0.111111
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
6f1ece75a11c1aeb453e622b1b66fd8ba42377ac
39
py
Python
mpf/file_interfaces/__init__.py
Scottacus64/mpf
fcfb6c5698b9c7d8bf0eb64b021aaa389ea6478a
[ "MIT" ]
163
2015-01-25T02:19:50.000Z
2022-03-26T12:00:28.000Z
mpf/file_interfaces/__init__.py
Scottacus64/mpf
fcfb6c5698b9c7d8bf0eb64b021aaa389ea6478a
[ "MIT" ]
1,086
2015-03-23T19:53:17.000Z
2022-03-24T20:46:11.000Z
mpf/file_interfaces/__init__.py
Scottacus64/mpf
fcfb6c5698b9c7d8bf0eb64b021aaa389ea6478a
[ "MIT" ]
148
2015-01-28T02:31:39.000Z
2022-03-22T13:54:01.000Z
"""Contains config file interfaces."""
19.5
38
0.717949
4
39
7
1
0
0
0
0
0
0
0
0
0
0
0
0.102564
39
1
39
39
0.8
0.820513
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
6f20dabcce2256826a0d68d4739f5092d1c5a317
104
py
Python
spinesTS/preprocessing/__init__.py
BirchKwok/spinesTS
b88ec333f41f58979e0570177d1fdc364d976056
[ "Apache-2.0" ]
2
2021-08-15T09:29:37.000Z
2022-03-10T13:56:13.000Z
spinesTS/preprocessing/__init__.py
BirchKwok/spinesTS
b88ec333f41f58979e0570177d1fdc364d976056
[ "Apache-2.0" ]
null
null
null
spinesTS/preprocessing/__init__.py
BirchKwok/spinesTS
b88ec333f41f58979e0570177d1fdc364d976056
[ "Apache-2.0" ]
null
null
null
from ._split_seq import split_series, train_test_split_ts from ._gaussian_rank import GaussRankScaler
34.666667
58
0.865385
15
104
5.466667
0.733333
0
0
0
0
0
0
0
0
0
0
0
0.105769
104
2
59
52
0.88172
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
6f25984ed6beab502d525a983fd9ddd1c6aab74b
328
py
Python
app/utils/errors/__init__.py
Jajc09/sifco-api
ef1af2a48513695f7a7a5a82d0d96d1c256908f3
[ "MIT" ]
null
null
null
app/utils/errors/__init__.py
Jajc09/sifco-api
ef1af2a48513695f7a7a5a82d0d96d1c256908f3
[ "MIT" ]
null
null
null
app/utils/errors/__init__.py
Jajc09/sifco-api
ef1af2a48513695f7a7a5a82d0d96d1c256908f3
[ "MIT" ]
null
null
null
# rutas de las diferentes clases a ejecutar dentro de errors from app.utils.errors.NotFoundException import NotFoundException from app.utils.errors.BadRequestException import BadRequestException from app.utils.errors.KeyErrorException import KeyErrorException from app.utils.errors.BadLengthException import BadLengthException
46.857143
68
0.875
38
328
7.552632
0.447368
0.097561
0.167247
0.250871
0
0
0
0
0
0
0
0
0.085366
328
7
69
46.857143
0.956667
0.176829
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
6f2abcce44f7453fcda82d2ce0bc931ba3f8e9b7
996
py
Python
tests/.stage2_default/verify.d/test_mysql.py
exphost/exphost.mysql_content
6f58cf432af4eb113a7d64c0e44057c6658e05ec
[ "MIT" ]
null
null
null
tests/.stage2_default/verify.d/test_mysql.py
exphost/exphost.mysql_content
6f58cf432af4eb113a7d64c0e44057c6658e05ec
[ "MIT" ]
1
2021-09-23T23:33:37.000Z
2021-09-23T23:33:37.000Z
tests/test_centos8_default/verify.d/test_mysql.py
exphost/exphost.mysql_content
6f58cf432af4eb113a7d64c0e44057c6658e05ec
[ "MIT" ]
null
null
null
def test_mysql_process(host): assert host.service("my_db-mysql").is_running assert host.service("my_db-mysql").is_enabled def test_check_mysql_access(host): assert host.run("mysql --protocol=tcp -h 127.0.0.1 -u u1 -P 13306 -ppassword1 db1").succeeded assert host.run("mysql --protocol=tcp -h 127.0.0.1 -u u1 -P 13306 -ppassword1 db2").failed assert host.run("mysql --protocol=tcp -h 127.0.0.1 -u u1 -P 13306 -ppassword2 db1").failed assert host.run("mysql --protocol=tcp -h 127.0.0.1 -u u3 -P 13306 -ppassword3 db1").succeeded assert host.run("mysql --protocol=tcp -h 127.0.0.1 -u u3 -P 13306 -ppassword3 db2").succeeded assert host.run("mysql --protocol=tcp -h 127.0.0.1 -u u3 -P 13306 -ppassword3 db3").failed def test_db_encoding(host): assert host.run("""echo "SELECT DEFAULT_CHARACTER_SET_NAME FROM information_schema.SCHEMATA where SCHEMA_NAME='db1';" | mysql --protocol=tcp -h 127.0.0.1 -u u1 -P 13306 -ppassword1 db1 -N""").stdout.strip() == "utf8"
62.25
220
0.706827
175
996
3.931429
0.285714
0.130814
0.132267
0.172965
0.677326
0.677326
0.677326
0.59593
0.59593
0.59593
0
0.116414
0.13755
996
15
221
66.4
0.684517
0
0
0
0
0.583333
0.580321
0.053213
0
0
0
0
0.75
1
0.25
false
0.583333
0
0
0.25
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
null
0
0
0
1
0
1
0
1
0
0
0
0
0
5
6f367084bf2be1d95f6f14f61ae14586c79f8950
1,952
py
Python
python/mlad/api/base.py
onetop21/MLAppDeploy
42e5fa6b58b0fb90043feb85af7ae8da9159bfc8
[ "MIT" ]
null
null
null
python/mlad/api/base.py
onetop21/MLAppDeploy
42e5fa6b58b0fb90043feb85af7ae8da9159bfc8
[ "MIT" ]
209
2020-09-14T11:54:35.000Z
2022-03-30T09:26:33.000Z
python/mlad/api/base.py
onetop21/MLAppDeploy
42e5fa6b58b0fb90043feb85af7ae8da9159bfc8
[ "MIT" ]
null
null
null
import requests from typing import Optional, Dict from .exception import raise_error class APIBase: def __init__(self, config, prefix): self.baseurl = f'{config.apiserver.address}/api/v1/{prefix}' self.headers = {'session': config.session} self.raise_error = raise_error def _get(self, path: str, params: Optional[Dict] = None, raw: bool = False, stream: bool = False, timeout: int = 30): url = f'{self.baseurl}{path}' res = requests.get(url=url, headers=self.headers, params=params, timeout=timeout, stream=stream) self.raise_error(res) return res if raw else res.json() def _post(self, path: str, params: Optional[Dict] = None, body: Optional[Dict] = None, raw: bool = False, stream: bool = False, timeout: int = 30): url = f'{self.baseurl}{path}' res = requests.post(url=url, headers=self.headers, params=params, json=body, timeout=timeout, stream=stream) self.raise_error(res) return res if raw else res.json() def _delete(self, path: str, params: Optional[Dict] = None, body: Optional[Dict] = None, raw: bool = False, stream: bool = False, timeout: int = 30): url = f'{self.baseurl}{path}' res = requests.delete(url=url, headers=self.headers, params=params, json=body, timeout=timeout, stream=stream) self.raise_error(res) return res if raw else res.json() def _put(self, path: str, params: Optional[Dict] = None, body: Optional[Dict] = None, raw: bool = False, stream: bool = False, timeout: int = 30): url = f'{self.baseurl}{path}' res = requests.put(url=url, headers=self.headers, params=params, json=body, timeout=timeout, stream=stream) self.raise_error(res) return res if raw else res.json()
42.434783
92
0.600922
248
1,952
4.669355
0.185484
0.082902
0.096718
0.058722
0.791883
0.791883
0.791883
0.746114
0.746114
0.746114
0
0.006369
0.276127
1,952
45
93
43.377778
0.813163
0
0
0.555556
0
0
0.066086
0.021516
0
0
0
0
0
1
0.138889
false
0
0.083333
0
0.361111
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
6f41c8369be058a2004ab367c70b5043fd21f7b8
55
py
Python
agents/__init__.py
InesVogel/Connect4
9528115515fb33d107ebc26d4141a1d3effdca5e
[ "MIT" ]
null
null
null
agents/__init__.py
InesVogel/Connect4
9528115515fb33d107ebc26d4141a1d3effdca5e
[ "MIT" ]
null
null
null
agents/__init__.py
InesVogel/Connect4
9528115515fb33d107ebc26d4141a1d3effdca5e
[ "MIT" ]
null
null
null
from .common import initialize_game_state, next_player
27.5
54
0.872727
8
55
5.625
1
0
0
0
0
0
0
0
0
0
0
0
0.090909
55
1
55
55
0.9
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5