hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
31c411f06ad582e4a0fa3fab2a675d3cd66653a8
88
py
Python
md_autogen/__init__.py
vfdev-5/markdown-apidocs
711c15e9cd93a2291501cc49e072786757d76386
[ "MIT" ]
null
null
null
md_autogen/__init__.py
vfdev-5/markdown-apidocs
711c15e9cd93a2291501cc49e072786757d76386
[ "MIT" ]
null
null
null
md_autogen/__init__.py
vfdev-5/markdown-apidocs
711c15e9cd93a2291501cc49e072786757d76386
[ "MIT" ]
null
null
null
from __future__ import absolute_import from .md_autogen import * __version__ = "0.5"
12.571429
38
0.772727
12
88
4.833333
0.75
0
0
0
0
0
0
0
0
0
0
0.027027
0.159091
88
6
39
14.666667
0.756757
0
0
0
1
0
0.034091
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
31eb67a6115f58b56d7f9e7ee853b1a3ec47352e
14,459
py
Python
test/wecall_acceptance/single_sample_diploid/test_quality_recalibration.py
dylex/wecall
35d24cefa4fba549e737cd99329ae1b17dd0156b
[ "MIT" ]
8
2018-10-08T15:47:21.000Z
2021-11-09T07:13:05.000Z
test/wecall_acceptance/single_sample_diploid/test_quality_recalibration.py
dylex/wecall
35d24cefa4fba549e737cd99329ae1b17dd0156b
[ "MIT" ]
4
2018-11-05T09:16:27.000Z
2020-04-09T12:32:56.000Z
test/wecall_acceptance/single_sample_diploid/test_quality_recalibration.py
dylex/wecall
35d24cefa4fba549e737cd99329ae1b17dd0156b
[ "MIT" ]
4
2019-09-03T15:46:39.000Z
2021-06-04T07:28:33.000Z
# All content Copyright (C) 2018 Genomics plc from unittest import expectedFailure from wecall_test_drivers.ascii_quality_recalibration_runner import AsciiQualityRecalibrationTest from wecall_test_drivers.ascii_wecall_runner import AsciiWecallRunnerTest class TestQualityRecalibrationDeletion(AsciiQualityRecalibrationTest): def test_should_not_recalibrate_good_read_data_for_deletion(self): reference = "ATCTAATAGCTATCAGCAATATCGCGCGTATTATTTATTTAT" bam_spec = [" ,,,,,,,,,,*,,,,,,,,,,,,,,,,,,,,,, ", "..............*............. ", " ,,,,,,,,*,,,,,,,,,,,,,,,,,,,, ", "..............*....................... "] self.assert_quality_recalibrated_in_output_bam(reference, bam_spec, bam_spec) class TestQualityRecalibrationInsertion(AsciiQualityRecalibrationTest): def test_should_not_recalibrate_good_read_data_for_deletion(self): reference = "ATCTAATAGCTATC*GCAATATCGCGCGTATTATTTATTTAT" bam_spec = [" ,,,,,,,,,,a,,,,,,,,,,,,,,,,,,,,,, ", "..............A............. ", " ,,,,,,,,a,,,,,,,,,,,,,,,,,,,, ", "..............A....................... "] self.assert_quality_recalibrated_in_output_bam(reference, bam_spec, bam_spec) class TestQualityRecalibrationBespokeQualities(AsciiQualityRecalibrationTest): def test_should_not_recalibrate_good_read_data_with_snp_1(self): reference = "ATCTAATAGCTATCAGCAATATCGCGCGTATTATTTATTTAT" bam_spec = [" ,,,,,,,,,,t,,,,,,,,,,,,,,,,,,,,,, ", " 0 ", "..............T............. ", " 0 ", " ,,,,,,,,t,,,,,,,,,,,,,,,,,,,, ", " 0 ", "..............T....................... ", " 0 "] self.assert_quality_recalibrated_in_output_bam(reference, bam_spec, bam_spec) def test_should_not_recalibrate_good_read_data_with_snp_2(self): reference = "ATCTAATAGCTATCAGCAATATCGCGCGTATTATTTATTTAT" bam_spec = [" ,,,,,,,,,,t,,,,,,,,,,,,,,,,,,,,,, ", " 1 ", "..............T............. ", " 1 ", " ,,,,,,,,t,,,,,,,,,,,,,,,,,,,, ", " 1 ", "..............T....................... ", " 1 "] self.assert_quality_recalibrated_in_output_bam(reference, bam_spec, bam_spec) class TestQualityRecalibrationSingleSNP(AsciiQualityRecalibrationTest): def test_should_not_recalibrate_region_snp_on_two_forward_strands_out_of_eight(self): reference = "ATCTAATAGCATCTAATAGCTAGCATCCGTAACAGCAATATCGCGCGTATTATTTATTTAT" bam_spec = [ "..............................T..............................", " .....................T..............................", " ....................................................", " ....................................................", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, ", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, ", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, ", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, "] self.assert_quality_recalibrated_in_output_bam(reference, bam_spec, bam_spec) def test_should_not_recalibrate_snp_on_one_forward_and_reverse_strand_out_of_eight(self): reference = "ATCTAATAGCATCTAATAGCTAGCATCCGTAACAGCAATATCGCGCGTATTATTTATTTAT" bam_spec = [ "..............................T.......................... ", " ....................................................", " ....................................................", " ....................................................", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, ", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, ", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,t,,,,,,,,,,,,,,,,,,,,,,,,,,,,, ", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, "] self.assert_quality_recalibrated_in_output_bam(reference, bam_spec, bam_spec) def test_should_recalibrate_around_snp_on_two_forward_strands_out_of_nine(self): reference = "ATCTAATAGCATCTAATAGCTAGCATCCGTAACAGCAATATCGCGCGTATTATTTATTTAT" input_bam = [ "..............................T..............................", " .....................T..............................", " ....................................................", " ....................................................", " ....................................................", " ....................................................", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, ", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, ", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, "] output_bam = [ "..............................T..............................", " 000000000000000000000000000000000000000000", " .....................T..............................", " 0000000000000000000000000000000000000000", " ....................................................", " ....................................................", " ....................................................", " ....................................................", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, ", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, ", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, "] self.assert_quality_recalibrated_in_output_bam(reference, input_bam, output_bam) def test_should_recalibrate_snp_on_one_forward_and_reverse_strand_out_of_nine(self): reference = "ATCTAATAGCATCTAATAGCTAGCATCCGTAACAGCAATATCGCGCGTATTATTTATTTAT" input_bam = [ "..............................T..............................", " ....................................................", " ....................................................", " ....................................................", " ....................................................", " ....................................................", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, ", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,t,,,,,,,,,,,,,,,,,,,,,,,,,,,,, ", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, "] output_bam = [ "..............................T..............................", " 000000000000000000000000000000000000000000", " ....................................................", " ....................................................", " ....................................................", " ....................................................", " ....................................................", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, ", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,t,,,,,,,,,,,,,,,,,,,,,,,,,,,,, ", " 0000000000000000000000000000000000000000 ", " ,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, "] self.assert_quality_recalibrated_in_output_bam(reference, input_bam, output_bam) class TestRealDataExamplesFromNA12878(AsciiWecallRunnerTest): def calls_variants_without_recalibration(self, ref, sequence_list, expected_ascii_haplotypes): self.calls_variants( ref, sequence_list, config_dict={"recalibrateBaseQs": False, "overwrite": True}, expected_ascii_haplotypes=expected_ascii_haplotypes ) def calls_variants_with_recalibration(self, ref, sequence_list, expected_ascii_haplotypes): self.calls_variants( ref, sequence_list, config_dict={"recalibrateBaseQs": True, "overwrite": True}, expected_ascii_haplotypes=expected_ascii_haplotypes ) def calls_variants_with_and_without_recalibration(self, ref, sequence_list, expected_ascii_haplotypes): self.calls_variants_with_recalibration( ref, sequence_list, expected_ascii_haplotypes) self.calls_variants_without_recalibration( ref, sequence_list, expected_ascii_haplotypes) def test_calls_two_good_snps_with_and_without_recalibration(self): self.calls_variants_with_and_without_recalibration( "ACGCCCCCTGCAAAAACTACTAAAAA", [".T........................", ".T........................", "...........C..............", "...........C.............."], [".T........................", # Expected calls "...........C.............."] ) @expectedFailure def test_calls_false_positive_snp_with_and_without_recalibration(self): self.calls_variants_without_recalibration( "AGTGCCTGTTGCAAACTTAAAGTAT**********AA**********TAAAATAAA**********ATAAATAAAAAAAAATAAAAAAAAGAATA", [",,,,,,,,,,, ..........**********..**********.........**********.............................", "................. ......**********..**********.........**********.............................", ",,,,,,,,,,,,,,,,,,,,,,,,,**********,,**********,,, ...**********.............................", ".........................**********..**********..... ..**********.............................", ",,,,,,,,,,,,,,,,,,,,,,,,,**********,,**********,,,,,,,, ,,,,,,,,,,,,,,,,,,,,,,,", ",,,,,,,,,,,,,,,,,,,,,,,,,**********,,**********,,,,,,,,,**********,,,,, ..................", ",,,,,,,,,,,,,,,,,,,,,,,,,**********,,**********,,,,,,,,,**********,,,,,,,, .................", "...............T.........**********G.**********.........**********.............. ...........", " 1 1 ", ",,,,,,,,,,,,,,,,,,,,,,,,,**********,,**********,,,,,,,,,**********,,,,,,,,,,,,,,,,, ..........", ",,,t,,,,,,,,,,,,,,,,,,,,,**********,,**********,,,,,,,,,**********,,,,, ................", " 1 ", ",,,,,,,,,,,,,,,,,,,,,,,,,aataaaataa,,**********,,,,,,,,,**********,,,,,,,,,,,,, ........", " 3333333333 ", ",,,,,,,,,,,,,,,,,,,,,,,,,**********,,**********,, ,,,,,", ",,,,,,,,,,,,,,,,,,,,,,,,,**********,,**********,,,,,,,,,**********,,,,,,,,,,,,,,,,,,,,,,,,,,,,,", ",,,,,,,,,,,,,,,,,,,,,,,,,aataaaataa,,**********,,,,,,,,,**********,,,,,,,,,,,,,,,,,,,,,,,,,,, ", " 3333333333 ", ",,,,,,,,,,,,,,,,,,,,,,,,,aataaaataa,,**********,,,,,,,,,**********,,,,,,,,,,,,,,,,,,,,,,,,,,, ", " 3333333333 ", ",,,,,,,,,,,,,,,,,,,,,,,,,**********,,**********,,,,,,,,,**********,,,,,,,,,,,,,,,,,,,,,,,,,,,,,", ",,aa,,,,,,,,,,,,,,,,,a,,,aataaaataa,,**********,,,,,,,,,**********,,,,,g,,,,,,,,,,,,,,,,,,,,,, ", " 11 1 3333333333 1 ", ",,,,,,,,,,,,,,,,,,,,,,,,,**********,,taaaataaac,,,,,,,,,**********,,,,,,,,,,,,,,,,,,,,,,,,,,,,,", " 3333333333 ", ",,,,,,,,,,,,,,,,,,,,,,,,,aataaaataa,,**********,,,,,,,,,**********,,,,,,,,,,,,,,,,,,,,,,,,,,,,,", " 3333333333 ", ",,,,,,,,,,,,,,,,,,,,,,,,,**********,,**********,,,,,,,,,**********,,,,,,,,,,,,,,,,,,,,,,,,,,,,,", ",,,,,,,,,,,,,,,,,,,,,,,,,**********,,**********,,,,,,,,,**********,,,,,,,,,,,,,,,,,,,,,,,,,,,,,", ",,,,,,,,,,,,,,,,,,,,,,,,,**********,,**********,,,,,,,,,ataaaataaa,,,,,,,,,,,,,,,,,,,,,,,,,,,,,", " 3333333333 ", ".........................**********..**********.........**********.....AT....T............ ,,", " 1 ", ".........................**********..**********.........**********.............................", ".........................**********..**********.........**********.....AT....T................."], [".........................**********..**********.........**********.....AT......................", # Expected calls # noqa ".........................AATAAAATAA..**********.........**********.....AT......................"] # Expected calls # noqa )
65.722727
137
0.254859
504
14,459
6.861111
0.198413
0.036437
0.059861
0.067091
0.747831
0.710816
0.64546
0.614228
0.586466
0.562175
0
0.02575
0.298983
14,459
219
138
66.022831
0.31541
0.007054
0
0.631016
0
0
0.604878
0.455749
0
0
0
0
0.042781
1
0.069519
false
0
0.016043
0
0.112299
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
9eca174bbb96c3698dc1e23f38d14eee2c1a29a5
1,699
py
Python
Dragan_Dorin_Alexandru/api/app.py
RazvanBalau/parallel-2020
bd9c0dea6cc70e167320f64632d7a235522dfdb3
[ "MIT" ]
null
null
null
Dragan_Dorin_Alexandru/api/app.py
RazvanBalau/parallel-2020
bd9c0dea6cc70e167320f64632d7a235522dfdb3
[ "MIT" ]
null
null
null
Dragan_Dorin_Alexandru/api/app.py
RazvanBalau/parallel-2020
bd9c0dea6cc70e167320f64632d7a235522dfdb3
[ "MIT" ]
23
2020-01-15T15:02:39.000Z
2020-01-15T17:23:03.000Z
from flask import Flask from worker import celery import celery.states as states import csv import time app = Flask(__name__) def chunker_list(seq, size): return (seq[i::size] for i in range(size)) @app.route('/correct_addresses/<int:number_of_tasks>') def correct_addresses(number_of_tasks: int) -> str: dataset = ["Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catel", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", "Catels", ] dataset = list(chunker_list(dataset, number_of_tasks)) celery.send_task('tasks.correct_addresses', args=[number_of_tasks, dataset], kwargs={}) return str(number_of_tasks) + " tasks started !" @app.route('/check/<string:task_id>') def check_task(task_id: str) -> str: res = celery.AsyncResult(task_id) if res.state == states.PENDING: return res.state else: return str(res.result)
65.346154
966
0.637434
209
1,699
5.066986
0.22488
0.424929
0.566572
0.661001
0.519358
0.519358
0.519358
0.519358
0.519358
0.519358
0
0
0.130665
1,699
25
967
67.96
0.716994
0
0
0
0
0
0.383755
0.050618
0
0
0
0
0
1
0.142857
false
0
0.238095
0.047619
0.571429
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
9edebf441b9d33147c13ec2faedae7abbe689c00
96
py
Python
tests/test_intervalmap.py
marciogameiro/intervalmap
2ae87100dc3954a67e62d30f1f7882ea40a0e326
[ "MIT" ]
null
null
null
tests/test_intervalmap.py
marciogameiro/intervalmap
2ae87100dc3954a67e62d30f1f7882ea40a0e326
[ "MIT" ]
null
null
null
tests/test_intervalmap.py
marciogameiro/intervalmap
2ae87100dc3954a67e62d30f1f7882ea40a0e326
[ "MIT" ]
null
null
null
# test_intervalmap.py # Marcio Gameiro # 2021-01-05 # MIT LICENSE import intervalmap as intmap
13.714286
28
0.770833
14
96
5.214286
0.928571
0
0
0
0
0
0
0
0
0
0
0.098765
0.15625
96
6
29
16
0.802469
0.59375
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
730191f687faf0fb84d6fd6f958f9a55ee8c94ac
93
py
Python
src/sql_alchemy/02_final_svc5_data_auto_service/restful_auto_service/data/sqlalchemy_base.py
turing4ever/restful-services-in-pyramid
953d8415f37c0d3d0040f35f9b139f45339939bc
[ "MIT" ]
58
2017-02-16T18:17:00.000Z
2021-11-12T02:18:32.000Z
src/sql_alchemy/02_final_svc5_data_auto_service/restful_auto_service/data/sqlalchemy_base.py
imbi7py/restful-services-in-pyramid
8521dbdb186513f498752048699bc9379bba64f5
[ "MIT" ]
7
2017-08-31T00:37:28.000Z
2019-09-08T16:04:45.000Z
src/sql_alchemy/02_final_svc5_data_auto_service/restful_auto_service/data/sqlalchemy_base.py
imbi7py/restful-services-in-pyramid
8521dbdb186513f498752048699bc9379bba64f5
[ "MIT" ]
36
2017-08-24T19:53:32.000Z
2021-11-12T02:18:33.000Z
from sqlalchemy.ext.declarative import declarative_base SqlAlchemyBase = declarative_base()
23.25
55
0.860215
10
93
7.8
0.7
0.384615
0
0
0
0
0
0
0
0
0
0
0.086022
93
3
56
31
0.917647
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
731281ec640ebe7f98b3e4310429e5853297d28c
244
py
Python
rippl/legislature/admin.py
gnmerritt/dailyrippl
9a0f9615ba597a475dbd6305b589827cb2d97b03
[ "MIT" ]
6
2016-12-03T20:30:43.000Z
2017-01-10T01:50:09.000Z
rippl/legislature/admin.py
gnmerritt/dailyrippl
9a0f9615ba597a475dbd6305b589827cb2d97b03
[ "MIT" ]
24
2016-11-30T02:31:13.000Z
2020-02-25T22:47:27.000Z
rippl/legislature/admin.py
gnmerritt/dailyrippl
9a0f9615ba597a475dbd6305b589827cb2d97b03
[ "MIT" ]
1
2016-12-25T21:42:31.000Z
2016-12-25T21:42:31.000Z
from django.contrib import admin from . import models admin.site.register(models.State) admin.site.register(models.District) admin.site.register(models.Representative) admin.site.register(models.Term) admin.site.register(models.ContactInfo)
22.181818
42
0.82377
33
244
6.090909
0.393939
0.223881
0.422886
0.572139
0
0
0
0
0
0
0
0
0.065574
244
10
43
24.4
0.881579
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.285714
0
0.285714
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
733ac65472342a53c51bb203028ce20ee9757d52
3,537
py
Python
python/sparkdl/transformers/keras_applications.py
alonsoir/spark-deep-learning
3f668d9b4a0aa2ef6fe05df5bf5c1d705cd2530d
[ "Apache-2.0" ]
54
2017-10-12T04:42:18.000Z
2021-08-24T08:47:03.000Z
python/sparkdl/transformers/keras_applications.py
alonsoir/spark-deep-learning
3f668d9b4a0aa2ef6fe05df5bf5c1d705cd2530d
[ "Apache-2.0" ]
null
null
null
python/sparkdl/transformers/keras_applications.py
alonsoir/spark-deep-learning
3f668d9b4a0aa2ef6fe05df5bf5c1d705cd2530d
[ "Apache-2.0" ]
17
2017-10-12T07:34:10.000Z
2020-03-12T12:25:25.000Z
# Copyright 2017 Databricks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from abc import ABCMeta, abstractmethod import keras.backend as K from keras.applications import inception_v3, xception import tensorflow as tf from sparkdl.transformers.utils import (imageInputPlaceholder, InceptionV3Constants) """ Essentially a factory function for getting the correct KerasApplicationModel class for the network name. """ def getKerasApplicationModel(name): try: return KERAS_APPLICATION_MODELS[name]() except KeyError: raise ValueError("%s is not a supported model. Supported models: %s" % (name, ', '.join(KERAS_APPLICATION_MODELS.keys()))) class KerasApplicationModel: __metaclass__ = ABCMeta def getModelData(self, featurize): sess = tf.Session() with sess.as_default(): K.set_learning_phase(0) inputImage = imageInputPlaceholder(nChannels=3) preprocessed = self.preprocess(inputImage) model = self.model(preprocessed, featurize) return dict(inputTensorName=inputImage.name, outputTensorName=model.output.name, session=sess, inputTensorSize=self.inputShape(), outputMode="vector") @abstractmethod def preprocess(self, inputImage): pass @abstractmethod def model(self, preprocessed, featurize): pass @abstractmethod def inputShape(self): pass def _testPreprocess(self, inputImage): """ For testing only. The preprocess function to be called before kerasModel.predict(). """ return self.preprocess(inputImage) @abstractmethod def _testKerasModel(self, include_top): """ For testing only. The keras model object to compare to. """ pass class InceptionV3Model(KerasApplicationModel): def preprocess(self, inputImage): return inception_v3.preprocess_input(inputImage) def model(self, preprocessed, featurize): return inception_v3.InceptionV3(input_tensor=preprocessed, weights="imagenet", include_top=(not featurize)) def inputShape(self): return InceptionV3Constants.INPUT_SHAPE def _testKerasModel(self, include_top): return inception_v3.InceptionV3(weights="imagenet", include_top=include_top) class XceptionModel(KerasApplicationModel): def preprocess(self, inputImage): return xception.preprocess_input(inputImage) def model(self, preprocessed, featurize): return xception.Xception(input_tensor=preprocessed, weights="imagenet", include_top=(not featurize)) def inputShape(self): return (299, 299) def _testKerasModel(self, include_top): return xception.Xception(weights="imagenet", include_top=include_top) KERAS_APPLICATION_MODELS = { "InceptionV3": InceptionV3Model, "Xception": XceptionModel }
31.300885
91
0.685327
373
3,537
6.404826
0.404826
0.037673
0.036836
0.041859
0.255337
0.228548
0.123064
0.123064
0.123064
0.069485
0
0.009989
0.235793
3,537
112
92
31.580357
0.873844
0.195646
0
0.349206
0
0
0.040419
0
0
0
0
0
0
1
0.238095
false
0.063492
0.079365
0.126984
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
4
733f51061eb2713e3c023fe22c3f5f6e897b10a9
213
py
Python
listener/normal/stats/urls.py
andymckay/arecibo
eb6787ea0a276047ef5add2df67a4dd051e5c961
[ "Apache-2.0" ]
6
2016-01-26T04:47:52.000Z
2022-01-24T19:55:04.000Z
listener/normal/stats/urls.py
andymckay/arecibo
eb6787ea0a276047ef5add2df67a4dd051e5c961
[ "Apache-2.0" ]
6
2017-02-12T05:11:25.000Z
2017-02-12T05:12:15.000Z
listener/normal/stats/urls.py
andymckay/arecibo
eb6787ea0a276047ef5add2df67a4dd051e5c961
[ "Apache-2.0" ]
2
2015-12-09T22:37:58.000Z
2021-09-09T17:04:33.000Z
from django.conf.urls.defaults import * urlpatterns = patterns('', url(r'^$', 'stats.views.stats_view', name="stats-view"), url(r'^view/(?P<key>[\w-]+)/$', 'stats.views.stats_view', name="stats-view"), )
30.428571
81
0.629108
30
213
4.4
0.566667
0.272727
0.227273
0.287879
0.484848
0.484848
0.484848
0
0
0
0
0
0.107981
213
6
82
35.5
0.694737
0
0
0
0
0
0.41784
0.314554
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
b43daadc65f2076c7fc8d0b65301e47cddc4b0e8
186
py
Python
argyle/tests/__init__.py
mlavin/argyle
92cc6e1dd9b8e7cb41c5098a79d05e14b8243d72
[ "BSD-2-Clause" ]
6
2015-11-05T08:53:00.000Z
2020-03-11T14:27:00.000Z
argyle/tests/__init__.py
mlavin/argyle
92cc6e1dd9b8e7cb41c5098a79d05e14b8243d72
[ "BSD-2-Clause" ]
1
2017-12-18T07:50:47.000Z
2017-12-18T07:50:47.000Z
argyle/tests/__init__.py
mlavin/argyle
92cc6e1dd9b8e7cb41c5098a79d05e14b8243d72
[ "BSD-2-Clause" ]
null
null
null
import os from .utils import unittest def main(): suite = unittest.loader.defaultTestLoader.discover(os.path.dirname(__file__)) unittest.TextTestRunner(verbosity=2).run(suite)
23.25
81
0.768817
23
186
6.043478
0.782609
0
0
0
0
0
0
0
0
0
0
0.006098
0.11828
186
7
82
26.571429
0.841463
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.4
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
b44bc64c35e9214f1257cb65b012e32cd37b0036
51,098
py
Python
tests/_andre/test_perf.py
mehdisadeghi/saga-python
19950ec1a4a0409b3c102eb60fc18ec91d578ca6
[ "MIT" ]
null
null
null
tests/_andre/test_perf.py
mehdisadeghi/saga-python
19950ec1a4a0409b3c102eb60fc18ec91d578ca6
[ "MIT" ]
null
null
null
tests/_andre/test_perf.py
mehdisadeghi/saga-python
19950ec1a4a0409b3c102eb60fc18ec91d578ca6
[ "MIT" ]
null
null
null
__author__ = "Andre Merzky" __copyright__ = "Copyright 2012-2013, The SAGA Project" __license__ = "MIT" # import gc import os import sys import time import saga import pprint import cProfile import subprocess import threading def create_service (url, services) : service = saga.job.Service (url) # print "%s : %s" % (service.get_url (), id(service)) services.append (service) def run_jobs (service, n_jobs) : jd = saga.job.Description () jd.executable = '/bin/sleep' jd.arguments = ['1'] for id in range (1, n_jobs+1) : tmp_j = service.create_job (jd) tmp_j.run () # print "id: %5d : %s [%s]" % (id, tmp_j.id, tmp_j.get_state ()) # time.sleep (5) # time.sleep (1) # sys.stdout.write ('\n') # del (service) def perf (n_jobs, tuples) : try : s = saga.Session () targets = "" urls = [] threads = [] services = [] n_services = 0 for (n, url) in tuples : for i in range (0, n) : urls.append (url) n_services += n targets += "%d*%s " % (n, url) for url in urls : thread = threading.Thread (target=create_service, args=[url, services]) thread.start () threads.append (thread) for thread in threads : thread.join () threads = [] start = time.time () for service in services : thread = threading.Thread (target=run_jobs, args=[service, n_jobs // n_services]) thread.start () threads.append (thread) for thread in threads : thread.join () stop = time.time () seconds = stop - start rate = n_jobs / (seconds) print "%10s %5s %5.2f %7.2f %s" % (n_services, n_jobs, seconds, rate, targets) except saga.exceptions.SagaException as e : print "Exception: ==========\n%s" % e.get_message () print "%s=====================" % e.get_traceback () # "xxxxxxxxxx xxxxxx xxxxxx xxxxxxxx xxxxxx xxxxxxx..." print "n_services n_jobs time jobs/sec memory targets" def main () : perf (100, [(1, 'fork://localhost/')]) # cProfile.run('main()', 'test_perf.prof') # perf (10000, [(10, 'ssh://merzky@localhost/')] + \ # [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')] + \ # [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf (100, [(2, 'fork://localhost/')]) # perf (100, [(2, 'ssh://merzky@localhost/')]) # perf (1000, [( 1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (1000, [( 2, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (1000, [( 3, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (1000, [( 4, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (1000, [( 5, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (1000, [( 6, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (1000, [( 7, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (1000, [( 8, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (1000, [( 9, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (1000, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (1, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (1, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf (1, [(1, 'gsissh://trestles-login.sdsc.edu/')]) # perf (1, [(1, 'ssh://india.futuregrid.org/')]) # perf (1, [(1, 'ssh://sierra.futuregrid.org/')]) perf (100, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) perf (100, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/'), (10, 'ssh://repex1.tacc.utexas.edu/')]) perf (100, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/'), (10, 'ssh://repex1.tacc.utexas.edu/'), (10, 'gsissh://trestles-login.sdsc.edu/')]) perf (100, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/'), (10, 'ssh://repex1.tacc.utexas.edu/'), (10, 'gsissh://trestles-login.sdsc.edu/'), (10, 'ssh://india.futuregrid.org/')]) perf (100, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/'), (10, 'ssh://repex1.tacc.utexas.edu/'), (10, 'gsissh://trestles-login.sdsc.edu/'), (10, 'ssh://india.futuregrid.org/'), (10, 'ssh://sierra.futuregrid.org/')]) # perf ( 100, [(1, 'fork://localhost/')]) # perf ( 100, [(1, 'ssh://merzky@localhost/')]) # perf ( 100, [(10, 'fork://localhost/')]) # perf ( 100, [(10, 'ssh://merzky@localhost/')]) # perf ( 1, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1, [(1, 'gsissh://stampede.tacc.utexas.edu/')]) # perf ( 10, [(10, 'fork://localhost/')]) # perf ( 10, [(10, 'fork://localhost/')]) # perf ( 10, [(10, 'fork://localhost/')]) # # perf ( 10, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 10, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 10, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # # perf ( 10, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 10, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 10, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # # perf ( 10, [(10, 'ssh://localhost/')]) # perf ( 10, [(10, 'ssh://localhost/')]) # perf ( 10, [(10, 'ssh://localhost/')]) # # perf ( 10, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 10, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 10, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 100, [(1, 'ssh://merzky@localhost/')]) # perf ( 0, [(1, 'fork://localhost/')]) # perf ( 1, [(1, 'fork://localhost/')]) # perf ( 2, [(1, 'fork://localhost/')]) # perf ( 4, [(1, 'fork://localhost/')]) # perf ( 8, [(1, 'fork://localhost/')]) # perf ( 16, [(1, 'fork://localhost/')]) # perf ( 32, [(1, 'fork://localhost/')]) # perf ( 64, [(1, 'fork://localhost/')]) # perf ( 128, [(1, 'fork://localhost/')]) # perf ( 256, [(1, 'fork://localhost/')]) # perf ( 512, [(1, 'fork://localhost/')]) # perf ( 1024, [(1, 'fork://localhost/')]) # perf ( 2048, [(1, 'fork://localhost/')]) # perf ( 4096, [(1, 'fork://localhost/')]) # perf ( 8192, [(1, 'fork://localhost/')]) # perf (16384, [(1, 'fork://localhost/')]) # perf (32768, [(1, 'fork://localhost/')]) # perf ( 0, [(2, 'fork://localhost/')]) # perf ( 1, [(2, 'fork://localhost/')]) # perf ( 2, [(2, 'fork://localhost/')]) # perf ( 4, [(2, 'fork://localhost/')]) # perf ( 8, [(2, 'fork://localhost/')]) # perf ( 16, [(2, 'fork://localhost/')]) # perf ( 32, [(2, 'fork://localhost/')]) # perf ( 64, [(2, 'fork://localhost/')]) # perf ( 128, [(2, 'fork://localhost/')]) # perf ( 256, [(2, 'fork://localhost/')]) # perf ( 512, [(2, 'fork://localhost/')]) # perf ( 1024, [(2, 'fork://localhost/')]) # perf ( 2048, [(2, 'fork://localhost/')]) # perf ( 4096, [(2, 'fork://localhost/')]) # perf ( 8192, [(2, 'fork://localhost/')]) # perf (16384, [(2, 'fork://localhost/')]) # perf (32768, [(2, 'fork://localhost/')]) # perf ( 0, [(3, 'fork://localhost/')]) # perf ( 1, [(3, 'fork://localhost/')]) # perf ( 2, [(3, 'fork://localhost/')]) # perf ( 4, [(3, 'fork://localhost/')]) # perf ( 8, [(3, 'fork://localhost/')]) # perf ( 16, [(3, 'fork://localhost/')]) # perf ( 32, [(3, 'fork://localhost/')]) # perf ( 64, [(3, 'fork://localhost/')]) # perf ( 128, [(3, 'fork://localhost/')]) # perf ( 256, [(3, 'fork://localhost/')]) # perf ( 512, [(3, 'fork://localhost/')]) # perf ( 1024, [(3, 'fork://localhost/')]) # perf ( 2048, [(3, 'fork://localhost/')]) # perf ( 4096, [(3, 'fork://localhost/')]) # perf ( 8192, [(3, 'fork://localhost/')]) # perf (16384, [(3, 'fork://localhost/')]) # perf (32768, [(3, 'fork://localhost/')]) # perf ( 0, [(4, 'fork://localhost/')]) # perf ( 1, [(4, 'fork://localhost/')]) # perf ( 2, [(4, 'fork://localhost/')]) # perf ( 4, [(4, 'fork://localhost/')]) # perf ( 8, [(4, 'fork://localhost/')]) # perf ( 16, [(4, 'fork://localhost/')]) # perf ( 32, [(4, 'fork://localhost/')]) # perf ( 64, [(4, 'fork://localhost/')]) # perf ( 128, [(4, 'fork://localhost/')]) # perf ( 256, [(4, 'fork://localhost/')]) # perf ( 512, [(4, 'fork://localhost/')]) # perf ( 1024, [(4, 'fork://localhost/')]) # perf ( 2048, [(4, 'fork://localhost/')]) # perf ( 4096, [(4, 'fork://localhost/')]) # perf ( 8192, [(4, 'fork://localhost/')]) # perf (16384, [(4, 'fork://localhost/')]) # perf (32768, [(4, 'fork://localhost/')]) # perf ( 0, [(5, 'fork://localhost/')]) # perf ( 1, [(5, 'fork://localhost/')]) # perf ( 2, [(5, 'fork://localhost/')]) # perf ( 4, [(5, 'fork://localhost/')]) # perf ( 8, [(5, 'fork://localhost/')]) # perf ( 16, [(5, 'fork://localhost/')]) # perf ( 32, [(5, 'fork://localhost/')]) # perf ( 64, [(5, 'fork://localhost/')]) # perf ( 128, [(5, 'fork://localhost/')]) # perf ( 256, [(5, 'fork://localhost/')]) # perf ( 512, [(5, 'fork://localhost/')]) # perf ( 1024, [(5, 'fork://localhost/')]) # perf ( 2048, [(5, 'fork://localhost/')]) # perf ( 4096, [(5, 'fork://localhost/')]) # perf ( 8192, [(5, 'fork://localhost/')]) # perf (16384, [(5, 'fork://localhost/')]) # perf (32768, [(5, 'fork://localhost/')]) # perf ( 0, [(6, 'fork://localhost/')]) # perf ( 1, [(6, 'fork://localhost/')]) # perf ( 2, [(6, 'fork://localhost/')]) # perf ( 4, [(6, 'fork://localhost/')]) # perf ( 8, [(6, 'fork://localhost/')]) # perf ( 16, [(6, 'fork://localhost/')]) # perf ( 32, [(6, 'fork://localhost/')]) # perf ( 64, [(6, 'fork://localhost/')]) # perf ( 128, [(6, 'fork://localhost/')]) # perf ( 256, [(6, 'fork://localhost/')]) # perf ( 512, [(6, 'fork://localhost/')]) # perf ( 1024, [(6, 'fork://localhost/')]) # perf ( 2048, [(6, 'fork://localhost/')]) # perf ( 4096, [(6, 'fork://localhost/')]) # perf ( 8192, [(6, 'fork://localhost/')]) # perf (16384, [(6, 'fork://localhost/')]) # perf (32768, [(6, 'fork://localhost/')]) # perf ( 0, [(7, 'fork://localhost/')]) # perf ( 1, [(7, 'fork://localhost/')]) # perf ( 2, [(7, 'fork://localhost/')]) # perf ( 4, [(7, 'fork://localhost/')]) # perf ( 8, [(7, 'fork://localhost/')]) # perf ( 16, [(7, 'fork://localhost/')]) # perf ( 32, [(7, 'fork://localhost/')]) # perf ( 64, [(7, 'fork://localhost/')]) # perf ( 128, [(7, 'fork://localhost/')]) # perf ( 256, [(7, 'fork://localhost/')]) # perf ( 512, [(7, 'fork://localhost/')]) # perf ( 1024, [(7, 'fork://localhost/')]) # perf ( 2048, [(7, 'fork://localhost/')]) # perf ( 4096, [(7, 'fork://localhost/')]) # perf ( 8192, [(7, 'fork://localhost/')]) # perf (16384, [(7, 'fork://localhost/')]) # perf (32768, [(7, 'fork://localhost/')]) # perf ( 0, [(8, 'fork://localhost/')]) # perf ( 1, [(8, 'fork://localhost/')]) # perf ( 2, [(8, 'fork://localhost/')]) # perf ( 4, [(8, 'fork://localhost/')]) # perf ( 8, [(8, 'fork://localhost/')]) # perf ( 16, [(8, 'fork://localhost/')]) # perf ( 32, [(8, 'fork://localhost/')]) # perf ( 64, [(8, 'fork://localhost/')]) # perf ( 128, [(8, 'fork://localhost/')]) # perf ( 256, [(8, 'fork://localhost/')]) # perf ( 512, [(8, 'fork://localhost/')]) # perf ( 1024, [(8, 'fork://localhost/')]) # perf ( 2048, [(8, 'fork://localhost/')]) # perf ( 4096, [(8, 'fork://localhost/')]) # perf ( 8192, [(8, 'fork://localhost/')]) # perf (16384, [(8, 'fork://localhost/')]) # perf (32768, [(8, 'fork://localhost/')]) # perf ( 0, [(9, 'fork://localhost/')]) # perf ( 1, [(9, 'fork://localhost/')]) # perf ( 2, [(9, 'fork://localhost/')]) # perf ( 4, [(9, 'fork://localhost/')]) # perf ( 8, [(9, 'fork://localhost/')]) # perf ( 16, [(9, 'fork://localhost/')]) # perf ( 32, [(9, 'fork://localhost/')]) # perf ( 64, [(9, 'fork://localhost/')]) # perf ( 128, [(9, 'fork://localhost/')]) # perf ( 256, [(9, 'fork://localhost/')]) # perf ( 512, [(9, 'fork://localhost/')]) # perf ( 1024, [(9, 'fork://localhost/')]) # perf ( 2048, [(9, 'fork://localhost/')]) # perf ( 4096, [(9, 'fork://localhost/')]) # perf ( 8192, [(9, 'fork://localhost/')]) # perf (16384, [(9, 'fork://localhost/')]) # perf (32768, [(9, 'fork://localhost/')]) # perf ( 0, [(10, 'fork://localhost/')]) # perf ( 1, [(10, 'fork://localhost/')]) # perf ( 2, [(10, 'fork://localhost/')]) # perf ( 4, [(10, 'fork://localhost/')]) # perf ( 8, [(10, 'fork://localhost/')]) # perf ( 16, [(10, 'fork://localhost/')]) # perf ( 32, [(10, 'fork://localhost/')]) # perf ( 64, [(10, 'fork://localhost/')]) # perf ( 128, [(10, 'fork://localhost/')]) # perf ( 256, [(10, 'fork://localhost/')]) # perf ( 512, [(10, 'fork://localhost/')]) # perf ( 1024, [(10, 'fork://localhost/')]) # perf ( 2048, [(10, 'fork://localhost/')]) # perf ( 4096, [(10, 'fork://localhost/')]) # perf ( 8192, [(10, 'fork://localhost/')]) # perf (16384, [(10, 'fork://localhost/')]) # perf (32768, [(10, 'fork://localhost/')]) # # perf ( 0, [(1, 'ssh://merzky@localhost/')]) # perf ( 1, [(1, 'ssh://merzky@localhost/')]) # perf ( 2, [(1, 'ssh://merzky@localhost/')]) # perf ( 4, [(1, 'ssh://merzky@localhost/')]) # perf ( 8, [(1, 'ssh://merzky@localhost/')]) # perf ( 16, [(1, 'ssh://merzky@localhost/')]) # perf ( 32, [(1, 'ssh://merzky@localhost/')]) # perf ( 64, [(1, 'ssh://merzky@localhost/')]) # perf ( 128, [(1, 'ssh://merzky@localhost/')]) # perf ( 256, [(1, 'ssh://merzky@localhost/')]) # perf ( 512, [(1, 'ssh://merzky@localhost/')]) # perf ( 1024, [(1, 'ssh://merzky@localhost/')]) # perf ( 2048, [(1, 'ssh://merzky@localhost/')]) # perf ( 4096, [(1, 'ssh://merzky@localhost/')]) # perf ( 8192, [(1, 'ssh://merzky@localhost/')]) # perf (16384, [(1, 'ssh://merzky@localhost/')]) # perf (32768, [(1, 'ssh://merzky@localhost/')]) # perf ( 0, [(2, 'ssh://merzky@localhost/')]) # perf ( 1, [(2, 'ssh://merzky@localhost/')]) # perf ( 2, [(2, 'ssh://merzky@localhost/')]) # perf ( 4, [(2, 'ssh://merzky@localhost/')]) # perf ( 8, [(2, 'ssh://merzky@localhost/')]) # perf ( 16, [(2, 'ssh://merzky@localhost/')]) # perf ( 32, [(2, 'ssh://merzky@localhost/')]) # perf ( 64, [(2, 'ssh://merzky@localhost/')]) # perf ( 128, [(2, 'ssh://merzky@localhost/')]) # perf ( 256, [(2, 'ssh://merzky@localhost/')]) # perf ( 512, [(2, 'ssh://merzky@localhost/')]) # perf ( 1024, [(2, 'ssh://merzky@localhost/')]) # perf ( 2048, [(2, 'ssh://merzky@localhost/')]) # perf ( 4096, [(2, 'ssh://merzky@localhost/')]) # perf ( 8192, [(2, 'ssh://merzky@localhost/')]) # perf (16384, [(2, 'ssh://merzky@localhost/')]) # perf (32768, [(2, 'ssh://merzky@localhost/')]) # perf ( 0, [(3, 'ssh://merzky@localhost/')]) # perf ( 1, [(3, 'ssh://merzky@localhost/')]) # perf ( 2, [(3, 'ssh://merzky@localhost/')]) # perf ( 4, [(3, 'ssh://merzky@localhost/')]) # perf ( 8, [(3, 'ssh://merzky@localhost/')]) # perf ( 16, [(3, 'ssh://merzky@localhost/')]) # perf ( 32, [(3, 'ssh://merzky@localhost/')]) # perf ( 64, [(3, 'ssh://merzky@localhost/')]) # perf ( 128, [(3, 'ssh://merzky@localhost/')]) # perf ( 256, [(3, 'ssh://merzky@localhost/')]) # perf ( 512, [(3, 'ssh://merzky@localhost/')]) # perf ( 1024, [(3, 'ssh://merzky@localhost/')]) # perf ( 2048, [(3, 'ssh://merzky@localhost/')]) # perf ( 4096, [(3, 'ssh://merzky@localhost/')]) # perf ( 8192, [(3, 'ssh://merzky@localhost/')]) # perf (16384, [(3, 'ssh://merzky@localhost/')]) # perf (32768, [(3, 'ssh://merzky@localhost/')]) # perf ( 0, [(4, 'ssh://merzky@localhost/')]) # perf ( 1, [(4, 'ssh://merzky@localhost/')]) # perf ( 2, [(4, 'ssh://merzky@localhost/')]) # perf ( 4, [(4, 'ssh://merzky@localhost/')]) # perf ( 8, [(4, 'ssh://merzky@localhost/')]) # perf ( 16, [(4, 'ssh://merzky@localhost/')]) # perf ( 32, [(4, 'ssh://merzky@localhost/')]) # perf ( 64, [(4, 'ssh://merzky@localhost/')]) # perf ( 128, [(4, 'ssh://merzky@localhost/')]) # perf ( 256, [(4, 'ssh://merzky@localhost/')]) # perf ( 512, [(4, 'ssh://merzky@localhost/')]) # perf ( 1024, [(4, 'ssh://merzky@localhost/')]) # perf ( 2048, [(4, 'ssh://merzky@localhost/')]) # perf ( 4096, [(4, 'ssh://merzky@localhost/')]) # perf ( 8192, [(4, 'ssh://merzky@localhost/')]) # perf (16384, [(4, 'ssh://merzky@localhost/')]) # perf (32768, [(4, 'ssh://merzky@localhost/')]) # perf ( 0, [(5, 'ssh://merzky@localhost/')]) # perf ( 1, [(5, 'ssh://merzky@localhost/')]) # perf ( 2, [(5, 'ssh://merzky@localhost/')]) # perf ( 4, [(5, 'ssh://merzky@localhost/')]) # perf ( 8, [(5, 'ssh://merzky@localhost/')]) # perf ( 16, [(5, 'ssh://merzky@localhost/')]) # perf ( 32, [(5, 'ssh://merzky@localhost/')]) # perf ( 64, [(5, 'ssh://merzky@localhost/')]) # perf ( 128, [(5, 'ssh://merzky@localhost/')]) # perf ( 256, [(5, 'ssh://merzky@localhost/')]) # perf ( 512, [(5, 'ssh://merzky@localhost/')]) # perf ( 1024, [(5, 'ssh://merzky@localhost/')]) # perf ( 2048, [(5, 'ssh://merzky@localhost/')]) # perf ( 4096, [(5, 'ssh://merzky@localhost/')]) # perf ( 8192, [(5, 'ssh://merzky@localhost/')]) # perf (16384, [(5, 'ssh://merzky@localhost/')]) # perf (32768, [(5, 'ssh://merzky@localhost/')]) # perf ( 0, [(6, 'ssh://merzky@localhost/')]) # perf ( 1, [(6, 'ssh://merzky@localhost/')]) # perf ( 2, [(6, 'ssh://merzky@localhost/')]) # perf ( 4, [(6, 'ssh://merzky@localhost/')]) # perf ( 8, [(6, 'ssh://merzky@localhost/')]) # perf ( 16, [(6, 'ssh://merzky@localhost/')]) # perf ( 32, [(6, 'ssh://merzky@localhost/')]) # perf ( 64, [(6, 'ssh://merzky@localhost/')]) # perf ( 128, [(6, 'ssh://merzky@localhost/')]) # perf ( 256, [(6, 'ssh://merzky@localhost/')]) # perf ( 512, [(6, 'ssh://merzky@localhost/')]) # perf ( 1024, [(6, 'ssh://merzky@localhost/')]) # perf ( 2048, [(6, 'ssh://merzky@localhost/')]) # perf ( 4096, [(6, 'ssh://merzky@localhost/')]) # perf ( 8192, [(6, 'ssh://merzky@localhost/')]) # perf (16384, [(6, 'ssh://merzky@localhost/')]) # perf (32768, [(6, 'ssh://merzky@localhost/')]) # perf ( 0, [(7, 'ssh://merzky@localhost/')]) # perf ( 1, [(7, 'ssh://merzky@localhost/')]) # perf ( 2, [(7, 'ssh://merzky@localhost/')]) # perf ( 4, [(7, 'ssh://merzky@localhost/')]) # perf ( 8, [(7, 'ssh://merzky@localhost/')]) # perf ( 16, [(7, 'ssh://merzky@localhost/')]) # perf ( 32, [(7, 'ssh://merzky@localhost/')]) # perf ( 64, [(7, 'ssh://merzky@localhost/')]) # perf ( 128, [(7, 'ssh://merzky@localhost/')]) # perf ( 256, [(7, 'ssh://merzky@localhost/')]) # perf ( 512, [(7, 'ssh://merzky@localhost/')]) # perf ( 1024, [(7, 'ssh://merzky@localhost/')]) # perf ( 2048, [(7, 'ssh://merzky@localhost/')]) # perf ( 4096, [(7, 'ssh://merzky@localhost/')]) # perf ( 8192, [(7, 'ssh://merzky@localhost/')]) # perf (16384, [(7, 'ssh://merzky@localhost/')]) # perf (32768, [(7, 'ssh://merzky@localhost/')]) # perf ( 0, [(8, 'ssh://merzky@localhost/')]) # perf ( 1, [(8, 'ssh://merzky@localhost/')]) # perf ( 2, [(8, 'ssh://merzky@localhost/')]) # perf ( 4, [(8, 'ssh://merzky@localhost/')]) # perf ( 8, [(8, 'ssh://merzky@localhost/')]) # perf ( 16, [(8, 'ssh://merzky@localhost/')]) # perf ( 32, [(8, 'ssh://merzky@localhost/')]) # perf ( 64, [(8, 'ssh://merzky@localhost/')]) # perf ( 128, [(8, 'ssh://merzky@localhost/')]) # perf ( 256, [(8, 'ssh://merzky@localhost/')]) # perf ( 512, [(8, 'ssh://merzky@localhost/')]) # perf ( 1024, [(8, 'ssh://merzky@localhost/')]) # perf ( 2048, [(8, 'ssh://merzky@localhost/')]) # perf ( 4096, [(8, 'ssh://merzky@localhost/')]) # perf ( 8192, [(8, 'ssh://merzky@localhost/')]) # perf (16384, [(8, 'ssh://merzky@localhost/')]) # perf (32768, [(8, 'ssh://merzky@localhost/')]) # perf ( 0, [(9, 'ssh://merzky@localhost/')]) # perf ( 1, [(9, 'ssh://merzky@localhost/')]) # perf ( 2, [(9, 'ssh://merzky@localhost/')]) # perf ( 4, [(9, 'ssh://merzky@localhost/')]) # perf ( 8, [(9, 'ssh://merzky@localhost/')]) # perf ( 16, [(9, 'ssh://merzky@localhost/')]) # perf ( 32, [(9, 'ssh://merzky@localhost/')]) # perf ( 64, [(9, 'ssh://merzky@localhost/')]) # perf ( 128, [(9, 'ssh://merzky@localhost/')]) # perf ( 256, [(9, 'ssh://merzky@localhost/')]) # perf ( 512, [(9, 'ssh://merzky@localhost/')]) # perf ( 1024, [(9, 'ssh://merzky@localhost/')]) # perf ( 2048, [(9, 'ssh://merzky@localhost/')]) # perf ( 4096, [(9, 'ssh://merzky@localhost/')]) # perf ( 8192, [(9, 'ssh://merzky@localhost/')]) # perf (16384, [(9, 'ssh://merzky@localhost/')]) # perf (32768, [(9, 'ssh://merzky@localhost/')]) # perf ( 0, [(10, 'ssh://merzky@localhost/')]) # perf ( 1, [(10, 'ssh://merzky@localhost/')]) # perf ( 2, [(10, 'ssh://merzky@localhost/')]) # perf ( 4, [(10, 'ssh://merzky@localhost/')]) # perf ( 8, [(10, 'ssh://merzky@localhost/')]) # perf ( 16, [(10, 'ssh://merzky@localhost/')]) # perf ( 32, [(10, 'ssh://merzky@localhost/')]) # perf ( 64, [(10, 'ssh://merzky@localhost/')]) # perf ( 128, [(10, 'ssh://merzky@localhost/')]) # perf ( 256, [(10, 'ssh://merzky@localhost/')]) # perf ( 512, [(10, 'ssh://merzky@localhost/')]) # perf ( 1024, [(10, 'ssh://merzky@localhost/')]) # perf ( 2048, [(10, 'ssh://merzky@localhost/')]) # perf ( 4096, [(10, 'ssh://merzky@localhost/')]) # perf ( 8192, [(10, 'ssh://merzky@localhost/')]) # perf (16384, [(10, 'ssh://merzky@localhost/')]) # perf (32768, [(10, 'ssh://merzky@localhost/')]) # # # perf ( 0, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 16, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 32, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 64, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 128, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 256, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 512, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1024, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2048, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4096, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8192, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (16384, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (32768, [(1, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 0, [(2, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1, [(2, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2, [(2, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4, [(2, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8, [(2, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 16, [(2, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 32, [(2, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 64, [(2, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 128, [(2, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 256, [(2, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 512, [(2, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1024, [(2, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2048, [(2, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4096, [(2, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8192, [(2, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (16384, [(2, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (32768, [(2, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 0, [(3, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1, [(3, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2, [(3, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4, [(3, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8, [(3, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 16, [(3, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 32, [(3, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 64, [(3, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 128, [(3, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 256, [(3, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 512, [(3, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1024, [(3, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2048, [(3, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4096, [(3, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8192, [(3, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (16384, [(3, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (32768, [(3, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 0, [(4, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1, [(4, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2, [(4, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4, [(4, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8, [(4, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 16, [(4, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 32, [(4, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 64, [(4, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 128, [(4, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 256, [(4, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 512, [(4, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1024, [(4, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2048, [(4, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4096, [(4, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8192, [(4, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (16384, [(4, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (32768, [(4, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 0, [(5, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1, [(5, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2, [(5, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4, [(5, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8, [(5, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 16, [(5, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 32, [(5, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 64, [(5, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 128, [(5, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 256, [(5, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 512, [(5, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1024, [(5, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2048, [(5, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4096, [(5, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8192, [(5, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (16384, [(5, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (32768, [(5, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 0, [(6, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1, [(6, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2, [(6, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4, [(6, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8, [(6, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 16, [(6, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 32, [(6, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 64, [(6, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 128, [(6, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 256, [(6, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 512, [(6, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1024, [(6, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2048, [(6, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4096, [(6, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8192, [(6, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (16384, [(6, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (32768, [(6, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 0, [(7, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1, [(7, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2, [(7, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4, [(7, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8, [(7, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 16, [(7, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 32, [(7, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 64, [(7, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 128, [(7, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 256, [(7, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 512, [(7, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1024, [(7, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2048, [(7, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4096, [(7, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8192, [(7, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (16384, [(7, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (32768, [(7, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 0, [(8, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1, [(8, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2, [(8, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4, [(8, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8, [(8, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 16, [(8, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 32, [(8, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 64, [(8, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 128, [(8, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 256, [(8, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 512, [(8, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1024, [(8, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2048, [(8, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4096, [(8, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8192, [(8, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (16384, [(8, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (32768, [(8, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 0, [(9, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1, [(9, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2, [(9, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4, [(9, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8, [(9, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 16, [(9, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 32, [(9, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 64, [(9, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 128, [(9, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 256, [(9, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 512, [(9, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1024, [(9, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2048, [(9, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4096, [(9, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8192, [(9, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (16384, [(9, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (32768, [(9, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 0, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 16, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 32, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 64, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 128, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 256, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 512, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 1024, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 2048, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 4096, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf ( 8192, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (16384, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # perf (32768, [(10, 'ssh://amerzky@cyder.cct.lsu.edu/')]) # # # perf ( 0, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 16, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 32, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 64, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 128, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 256, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 512, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1024, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2048, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4096, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8192, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf (16384, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf (32768, [(1, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 0, [(2, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1, [(2, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2, [(2, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4, [(2, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8, [(2, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 16, [(2, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 32, [(2, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 64, [(2, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 128, [(2, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 256, [(2, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 512, [(2, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1024, [(2, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2048, [(2, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4096, [(2, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8192, [(2, 'ssh://repex1.tacc.utexas.edu/')]) # perf (16384, [(2, 'ssh://repex1.tacc.utexas.edu/')]) # perf (32768, [(2, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 0, [(3, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1, [(3, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2, [(3, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4, [(3, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8, [(3, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 16, [(3, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 32, [(3, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 64, [(3, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 128, [(3, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 256, [(3, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 512, [(3, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1024, [(3, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2048, [(3, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4096, [(3, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8192, [(3, 'ssh://repex1.tacc.utexas.edu/')]) # perf (16384, [(3, 'ssh://repex1.tacc.utexas.edu/')]) # perf (32768, [(3, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 0, [(4, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1, [(4, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2, [(4, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4, [(4, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8, [(4, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 16, [(4, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 32, [(4, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 64, [(4, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 128, [(4, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 256, [(4, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 512, [(4, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1024, [(4, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2048, [(4, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4096, [(4, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8192, [(4, 'ssh://repex1.tacc.utexas.edu/')]) # perf (16384, [(4, 'ssh://repex1.tacc.utexas.edu/')]) # perf (32768, [(4, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 0, [(5, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1, [(5, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2, [(5, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4, [(5, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8, [(5, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 16, [(5, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 32, [(5, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 64, [(5, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 128, [(5, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 256, [(5, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 512, [(5, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1024, [(5, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2048, [(5, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4096, [(5, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8192, [(5, 'ssh://repex1.tacc.utexas.edu/')]) # perf (16384, [(5, 'ssh://repex1.tacc.utexas.edu/')]) # perf (32768, [(5, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 0, [(6, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1, [(6, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2, [(6, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4, [(6, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8, [(6, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 16, [(6, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 32, [(6, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 64, [(6, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 128, [(6, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 256, [(6, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 512, [(6, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1024, [(6, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2048, [(6, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4096, [(6, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8192, [(6, 'ssh://repex1.tacc.utexas.edu/')]) # perf (16384, [(6, 'ssh://repex1.tacc.utexas.edu/')]) # perf (32768, [(6, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 0, [(7, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1, [(7, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2, [(7, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4, [(7, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8, [(7, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 16, [(7, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 32, [(7, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 64, [(7, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 128, [(7, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 256, [(7, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 512, [(7, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1024, [(7, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2048, [(7, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4096, [(7, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8192, [(7, 'ssh://repex1.tacc.utexas.edu/')]) # perf (16384, [(7, 'ssh://repex1.tacc.utexas.edu/')]) # perf (32768, [(7, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 0, [(8, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1, [(8, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2, [(8, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4, [(8, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8, [(8, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 16, [(8, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 32, [(8, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 64, [(8, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 128, [(8, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 256, [(8, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 512, [(8, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1024, [(8, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2048, [(8, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4096, [(8, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8192, [(8, 'ssh://repex1.tacc.utexas.edu/')]) # perf (16384, [(8, 'ssh://repex1.tacc.utexas.edu/')]) # perf (32768, [(8, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 0, [(9, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1, [(9, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2, [(9, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4, [(9, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8, [(9, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 16, [(9, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 32, [(9, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 64, [(9, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 128, [(9, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 256, [(9, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 512, [(9, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1024, [(9, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2048, [(9, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4096, [(9, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8192, [(9, 'ssh://repex1.tacc.utexas.edu/')]) # perf (16384, [(9, 'ssh://repex1.tacc.utexas.edu/')]) # perf (32768, [(9, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 0, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 16, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 32, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 64, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 128, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 256, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 512, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 1024, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 2048, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 4096, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf ( 8192, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf (16384, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # perf (32768, [(10, 'ssh://repex1.tacc.utexas.edu/')]) # # perf ( 0, [(1, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1, [(1, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2, [(1, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4, [(1, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8, [(1, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 16, [(1, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 32, [(1, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 64, [(1, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 128, [(1, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 256, [(1, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 512, [(1, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1024, [(1, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2048, [(1, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4096, [(1, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8192, [(1, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (16384, [(1, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (32768, [(1, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 0, [(2, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1, [(2, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2, [(2, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4, [(2, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8, [(2, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 16, [(2, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 32, [(2, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 64, [(2, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 128, [(2, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 256, [(2, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 512, [(2, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1024, [(2, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2048, [(2, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4096, [(2, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8192, [(2, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (16384, [(2, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (32768, [(2, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 0, [(3, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1, [(3, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2, [(3, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4, [(3, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8, [(3, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 16, [(3, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 32, [(3, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 64, [(3, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 128, [(3, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 256, [(3, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 512, [(3, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1024, [(3, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2048, [(3, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4096, [(3, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8192, [(3, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (16384, [(3, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (32768, [(3, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 0, [(4, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1, [(4, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2, [(4, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4, [(4, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8, [(4, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 16, [(4, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 32, [(4, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 64, [(4, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 128, [(4, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 256, [(4, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 512, [(4, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1024, [(4, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2048, [(4, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4096, [(4, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8192, [(4, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (16384, [(4, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (32768, [(4, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 0, [(5, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1, [(5, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2, [(5, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4, [(5, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8, [(5, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 16, [(5, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 32, [(5, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 64, [(5, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 128, [(5, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 256, [(5, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 512, [(5, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1024, [(5, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2048, [(5, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4096, [(5, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8192, [(5, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (16384, [(5, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (32768, [(5, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 0, [(6, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1, [(6, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2, [(6, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4, [(6, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8, [(6, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 16, [(6, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 32, [(6, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 64, [(6, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 128, [(6, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 256, [(6, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 512, [(6, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1024, [(6, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2048, [(6, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4096, [(6, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8192, [(6, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (16384, [(6, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (32768, [(6, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 0, [(7, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1, [(7, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2, [(7, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4, [(7, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8, [(7, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 16, [(7, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 32, [(7, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 64, [(7, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 128, [(7, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 256, [(7, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 512, [(7, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1024, [(7, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2048, [(7, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4096, [(7, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8192, [(7, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (16384, [(7, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (32768, [(7, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 0, [(8, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1, [(8, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2, [(8, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4, [(8, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8, [(8, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 16, [(8, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 32, [(8, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 64, [(8, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 128, [(8, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 256, [(8, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 512, [(8, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1024, [(8, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2048, [(8, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4096, [(8, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8192, [(8, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (16384, [(8, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (32768, [(8, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 0, [(9, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1, [(9, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2, [(9, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4, [(9, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8, [(9, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 16, [(9, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 32, [(9, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 64, [(9, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 128, [(9, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 256, [(9, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 512, [(9, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1024, [(9, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2048, [(9, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4096, [(9, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8192, [(9, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (16384, [(9, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (32768, [(9, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 0, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 16, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 32, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 64, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 128, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 256, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 512, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 1024, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 2048, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 4096, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf ( 8192, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (16384, [(10, 'gsissh://ranger.tacc.utexas.edu/')]) # perf (32768, [(10, 'gsissh://ranger.tacc.utexas.edu/')])
49.465634
93
0.525461
6,814
51,098
3.93484
0.019665
0.14046
0.17164
0.221916
0.826235
0.662614
0.655975
0.258466
0.049083
0.047106
0
0.085463
0.16304
51,098
1,032
94
49.513566
0.541469
0.908959
0
0.263889
0
0
0.178629
0.128548
0
0
0
0
0
0
null
null
0
0.111111
null
null
0.069444
0
0
0
null
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
b45cbc2310b2fe5212db3beb0e669c9f10d913b3
60
py
Python
Lesson_5/Plays/lets_to_play.py
Mike030668/Python--learning--UII
4b0a3fe32b1bb4e6f98130aab09b3ab55eca2df4
[ "Apache-2.0" ]
null
null
null
Lesson_5/Plays/lets_to_play.py
Mike030668/Python--learning--UII
4b0a3fe32b1bb4e6f98130aab09b3ab55eca2df4
[ "Apache-2.0" ]
null
null
null
Lesson_5/Plays/lets_to_play.py
Mike030668/Python--learning--UII
4b0a3fe32b1bb4e6f98130aab09b3ab55eca2df4
[ "Apache-2.0" ]
null
null
null
from Lesson_5.Plays import victorina victorina.start_play()
20
36
0.85
9
60
5.444444
0.888889
0
0
0
0
0
0
0
0
0
0
0.018182
0.083333
60
3
37
20
0.872727
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
b486b3ec9fdd13c49a29c9a48bd55d3a962a9108
1,204
py
Python
src/SpiderPi/HiwonderSDK/BuzzerControlDemo.py
peteh/spiderpi
19c4e254979a756acb641b67e944568c7c79520c
[ "MIT" ]
null
null
null
src/SpiderPi/HiwonderSDK/BuzzerControlDemo.py
peteh/spiderpi
19c4e254979a756acb641b67e944568c7c79520c
[ "MIT" ]
null
null
null
src/SpiderPi/HiwonderSDK/BuzzerControlDemo.py
peteh/spiderpi
19c4e254979a756acb641b67e944568c7c79520c
[ "MIT" ]
null
null
null
import time import Board print(''' ********************************************************** ********Function:Hiwonder SpiderPi expansion board, serial servo control routine,buzzer control routine********* ********************************************************** ---------------------------------------------------------- Official website:http://www.hiwonder.com Online mall:https://huaner.tmall.com/ ---------------------------------------------------------- The following commands need to be used in the LX terminal, which can be opened by ctrl+alt+t, or click Click the black LX terminal icon in the upper bar ---------------------------------------------------------- Usage: sudo python3 BuzzerControlDemo.py ---------------------------------------------------------- Version: --V1.1 2020/11/07 ---------------------------------------------------------- Tips: * Press Ctrl+C to close the program, if it fails, please try multiple times! ---------------------------------------------------------- ''') Board.setBuzzer(0) # close Board.setBuzzer(1) # open time.sleep(0.1) # delay Board.setBuzzer(0) # close time.sleep(1) # delay Board.setBuzzer(1) time.sleep(0.5) Board.setBuzzer(0)
33.444444
112
0.446013
117
1,204
4.589744
0.641026
0.130354
0.083799
0.074488
0
0
0
0
0
0
0
0.019248
0.093854
1,204
35
113
34.4
0.472961
0.023256
0
0.433333
0
0.033333
0.833333
0.417949
0
0
0
0
0
1
0
true
0
0.066667
0
0.066667
0.033333
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
1
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
b488066a0925814a00202a70c47ee90f52971945
135
py
Python
backend/src/applications/user/get_user_request.py
Seina88/attendance-system
afa7ba64c7fd99623a1c5dd3b09151ade759d715
[ "MIT" ]
2
2021-05-12T14:09:44.000Z
2021-06-19T12:38:33.000Z
backend/src/applications/user/get_user_request.py
Seina88/attendance-system
afa7ba64c7fd99623a1c5dd3b09151ade759d715
[ "MIT" ]
10
2021-05-13T12:09:47.000Z
2021-06-07T13:28:17.000Z
backend/src/applications/user/get_user_request.py
Seina88/attendance-system
afa7ba64c7fd99623a1c5dd3b09151ade759d715
[ "MIT" ]
1
2021-06-17T00:54:04.000Z
2021-06-17T00:54:04.000Z
class GetUserRequest: def __init__(self, id: str, api_token: str) -> None: self.id = id self.api_token = api_token
27
56
0.62963
19
135
4.105263
0.526316
0.307692
0
0
0
0
0
0
0
0
0
0
0.266667
135
4
57
33.75
0.787879
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
81f6bc873588c44cb452c2efc36feed33e7dba7b
2,102
py
Python
pacu/models/awsapi/ec2-instance-connect.py
RyanJarv/Pacu2
27df4bcf296fc8f467d3dc671a47bf9519ce7a24
[ "MIT" ]
1
2022-03-09T14:51:54.000Z
2022-03-09T14:51:54.000Z
pacu/models/awsapi/ec2-instance-connect.py
RyanJarv/Pacu2
27df4bcf296fc8f467d3dc671a47bf9519ce7a24
[ "MIT" ]
null
null
null
pacu/models/awsapi/ec2-instance-connect.py
RyanJarv/Pacu2
27df4bcf296fc8f467d3dc671a47bf9519ce7a24
[ "MIT" ]
null
null
null
# generated by datamodel-codegen: # filename: openapi.yaml # timestamp: 2021-12-31T02:48:35+00:00 from __future__ import annotations from typing import Annotated, Any, Optional from pydantic import BaseModel, Field class AuthException(BaseModel): __root__: Any class InvalidArgsException(AuthException): pass class ServiceException(AuthException): pass class ThrottlingException(AuthException): pass class EC2InstanceNotFoundException(AuthException): pass class SerialConsoleAccessDisabledException(AuthException): pass class EC2InstanceTypeInvalidException(AuthException): pass class SerialConsoleSessionLimitExceededException(AuthException): pass class SerialConsoleSessionUnavailableException(AuthException): pass class AvailabilityZone(BaseModel): __root__: Annotated[ str, Field(max_length=32, min_length=6, regex='^(\\w+-){2,3}\\d+\\w+$') ] class InstanceId(BaseModel): __root__: Annotated[str, Field(max_length=32, min_length=10, regex='^i-[a-f0-9]+$')] class InstanceOSUser(BaseModel): __root__: Annotated[ str, Field( max_length=32, min_length=1, regex='^[A-Za-z_][A-Za-z0-9\\@\\._-]{0,30}[A-Za-z0-9\\$_-]?$', ), ] class RequestId(BaseModel): __root__: str class SSHPublicKey(BaseModel): __root__: Annotated[str, Field(max_length=4096, min_length=256)] class Success(BaseModel): __root__: bool class SerialPort(BaseModel): __root__: Annotated[int, Field(ge=0.0, le=0.0)] class SendSSHPublicKeyResponse(BaseModel): RequestId: Optional[RequestId] = None Success: Optional[Success] = None class SendSSHPublicKeyRequest(BaseModel): InstanceId: InstanceId InstanceOSUser: InstanceOSUser SSHPublicKey: SSHPublicKey AvailabilityZone: AvailabilityZone class SendSerialConsoleSSHPublicKeyResponse(SendSSHPublicKeyResponse): pass class SendSerialConsoleSSHPublicKeyRequest(BaseModel): InstanceId: InstanceId SerialPort: Optional[SerialPort] = None SSHPublicKey: SSHPublicKey
20.019048
88
0.725975
204
2,102
7.25
0.377451
0.054767
0.118999
0.067613
0.127789
0.127789
0.127789
0.10142
0.10142
0.10142
0
0.029868
0.171741
2,102
104
89
20.211538
0.819644
0.045671
0
0.258621
1
0.017241
0.043956
0.037463
0
0
0
0
0
1
0
true
0.155172
0.051724
0
0.689655
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
1
0
0
1
0
0
4
5ee34ca0f9dd76add60d238d407a47dadcff64cf
185
py
Python
run.py
ASHWINISINHA/pi-oled-intelligent-clock
1452079bdd9c152eac6a20722de5601fb455dc78
[ "MIT" ]
null
null
null
run.py
ASHWINISINHA/pi-oled-intelligent-clock
1452079bdd9c152eac6a20722de5601fb455dc78
[ "MIT" ]
null
null
null
run.py
ASHWINISINHA/pi-oled-intelligent-clock
1452079bdd9c152eac6a20722de5601fb455dc78
[ "MIT" ]
null
null
null
import subprocess from time import sleep y=(0.1) subprocess.Popen(["python", 'rd1.py']) sleep(y) subprocess.Popen(["python", 'rs1.py']) sleep(y) subprocess.Popen(["python", 'rl1.py'])
18.5
38
0.691892
28
185
4.571429
0.5
0.140625
0.492188
0.28125
0.453125
0.453125
0
0
0
0
0
0.029586
0.086486
185
9
39
20.555556
0.727811
0
0
0.25
0
0
0.194595
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
5eeaa0320e60e8cb1e770189fadf3c208691cdae
174
py
Python
Compete/Class of Code/Reverse Mode/reverse1.py
H2u-Hwng/CodinGame
cbef4000b30b25475dfbf002f575d62ac6a61ce1
[ "MIT" ]
null
null
null
Compete/Class of Code/Reverse Mode/reverse1.py
H2u-Hwng/CodinGame
cbef4000b30b25475dfbf002f575d62ac6a61ce1
[ "MIT" ]
null
null
null
Compete/Class of Code/Reverse Mode/reverse1.py
H2u-Hwng/CodinGame
cbef4000b30b25475dfbf002f575d62ac6a61ce1
[ "MIT" ]
null
null
null
drink = input() straw = int(input()) if straw == 1: print('____|____') else: print('_________') print('\\ /') print(' \\ ' + drink + ' /') print(' \\___/')
15.818182
30
0.488506
15
174
4.333333
0.533333
0.307692
0
0
0
0
0
0
0
0
0
0.007634
0.247126
174
10
31
17.4
0.48855
0
0
0
0
0
0.252874
0
0
0
0
0
0
1
0
false
0
0
0
0
0.555556
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
6f2f7c4e8d8fb33a0c93291ce64b2977ac6d5cc9
196
py
Python
minigest/contabilita/urls/bilancio.py
ctrlmaniac/minigest
2bfceb57e41c872e4112e24d0e6991164846888b
[ "MIT" ]
null
null
null
minigest/contabilita/urls/bilancio.py
ctrlmaniac/minigest
2bfceb57e41c872e4112e24d0e6991164846888b
[ "MIT" ]
1
2021-09-22T19:10:20.000Z
2021-09-22T19:10:20.000Z
minigest/contabilita/urls/bilancio.py
ctrlmaniac/minigest
2bfceb57e41c872e4112e24d0e6991164846888b
[ "MIT" ]
null
null
null
from django.urls import path from ..views import BilancioView urlpatterns = [ path("<int:azienda>/", BilancioView.as_view()), path("<int:azienda>/<periodo>/", BilancioView.as_view()), ]
21.777778
61
0.688776
23
196
5.782609
0.565217
0.105263
0.210526
0
0
0
0
0
0
0
0
0
0.132653
196
8
62
24.5
0.782353
0
0
0
0
0
0.193878
0.122449
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
6f3235501d00dede1c66cda9ae6fab069df84e56
277
py
Python
vsparsedvd/utils/utils.py
Setsugennoao/vs-parsedvd
fe89ff51b85758dca32e73b3c884acfccf583451
[ "MIT" ]
2
2021-12-06T05:48:14.000Z
2022-02-10T09:17:18.000Z
vsparsedvd/utils/utils.py
Setsugennoao/vs-parsedvd
fe89ff51b85758dca32e73b3c884acfccf583451
[ "MIT" ]
null
null
null
vsparsedvd/utils/utils.py
Setsugennoao/vs-parsedvd
fe89ff51b85758dca32e73b3c884acfccf583451
[ "MIT" ]
null
null
null
from __future__ import annotations from typing import Sequence def opt_int(val: str | int | None) -> int | None: return int(val) if val is not None else None def opt_ints(vals: Sequence[str | int | None]) -> Sequence[int | None]: return [opt_int(x) for x in vals]
23.083333
71
0.689531
46
277
4
0.478261
0.152174
0.108696
0
0
0
0
0
0
0
0
0
0.209386
277
11
72
25.181818
0.840183
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
4
6f6fccf217ecb38f32e0056202da74b90bfbc6e0
683
py
Python
ipymd/formats/tests/test_utils.py
nathanfdunn/ipymd
cca5f98e34a024396e21ab7a3f322bbe2e3f37d1
[ "BSD-3-Clause" ]
521
2015-01-01T09:22:05.000Z
2022-02-06T23:54:10.000Z
ipymd/formats/tests/test_utils.py
nathanfdunn/ipymd
cca5f98e34a024396e21ab7a3f322bbe2e3f37d1
[ "BSD-3-Clause" ]
79
2015-01-04T21:17:29.000Z
2018-08-28T16:39:21.000Z
ipymd/formats/tests/test_utils.py
nathanfdunn/ipymd
cca5f98e34a024396e21ab7a3f322bbe2e3f37d1
[ "BSD-3-Clause" ]
56
2015-03-01T08:48:26.000Z
2022-01-29T03:11:13.000Z
# -*- coding: utf-8 -*- """Test utils.""" #------------------------------------------------------------------------------ # Imports #------------------------------------------------------------------------------ import os.path as op from ._utils import _test_file_path, _exec_test_file #------------------------------------------------------------------------------ # Test Markdown parser #------------------------------------------------------------------------------ def test_file_path(): filename = 'ex1' assert op.exists(_test_file_path(filename, 'markdown')) def test_exec_test_file(): filename = 'ex1' assert isinstance(_exec_test_file(filename), list)
27.32
79
0.373353
51
683
4.647059
0.45098
0.202532
0.151899
0.168776
0
0
0
0
0
0
0
0.004862
0.096633
683
24
80
28.458333
0.379254
0.549048
0
0.25
0
0
0.047297
0
0
0
0
0
0.25
1
0.25
false
0
0.25
0
0.5
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
6f7775072bcc1c947a80de9ca39c577d0a29f71e
224
py
Python
workalendar/usa/new_jersey.py
taiyeoguns/workalendar
35ef15b0fe166ab2e73e419c83ad45738a62ac8d
[ "MIT" ]
405
2017-11-21T14:33:58.000Z
2021-06-03T01:58:55.000Z
workalendar/usa/new_jersey.py
taiyeoguns/workalendar
35ef15b0fe166ab2e73e419c83ad45738a62ac8d
[ "MIT" ]
236
2017-11-20T16:11:50.000Z
2021-05-28T08:02:53.000Z
workalendar/usa/new_jersey.py
taiyeoguns/workalendar
35ef15b0fe166ab2e73e419c83ad45738a62ac8d
[ "MIT" ]
126
2017-12-12T14:04:25.000Z
2021-05-29T14:27:11.000Z
from ..registry_tools import iso_register from .core import UnitedStates @iso_register('US-NJ') class NewJersey(UnitedStates): """New Jersey""" include_good_friday = True include_election_day_every_year = True
22.4
42
0.763393
29
224
5.586207
0.758621
0.135802
0
0
0
0
0
0
0
0
0
0
0.147321
224
9
43
24.888889
0.848168
0.044643
0
0
0
0
0.024038
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.833333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
48b04817aba5557350700788058f6de5ce4b787d
29,778
py
Python
pytests/test_cphd_consistency.py
pressler-vsc/sarpy
fa6c951c42b9a7d9df2edfa53c771494cb0246fb
[ "MIT" ]
null
null
null
pytests/test_cphd_consistency.py
pressler-vsc/sarpy
fa6c951c42b9a7d9df2edfa53c771494cb0246fb
[ "MIT" ]
null
null
null
pytests/test_cphd_consistency.py
pressler-vsc/sarpy
fa6c951c42b9a7d9df2edfa53c771494cb0246fb
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # # Copyright 2020-2021 Valkyrie Systems Corporation # # Licensed under MIT License. See LICENSE. # import copy import os import re import shutil import tempfile from lxml import etree import numpy as np import pytest from sarpy.consistency import cphd_consistency from sarpy.io.phase_history.cphd_schema import get_schema_path GOOD_CPHD = os.path.join(os.environ['SARPY_TEST_PATH'], 'cphd', 'spotlight_example.cphd') DEFAULT_SCHEMA = get_schema_path(version='1.0.1') def make_elem(tag, text=None, children=None, namespace=None, attributes=None, **attrib): """ Creates described element. Creates the Element with tag name, text, and attributes given. Attributes can be specified as either a dictionary or keyword arguments. Parameters ---------- tag : str A string that will become the tag name. text : str A string that will become the text in the element. (Default: ``None``) parent : lxml.etree.ElementTree.Element The parent element. (Default: ``None``) children : lxml.etree.ElementTree The children elements. (Default: ``None``) namespace : str The string containing the namespace. (Default: ``None``) attributes : dict A dictionary mapping attribute names to values. (Default: ``None``) **attrib : list Keyword arguments that map to attributes. (Default: ``None``) Returns ------- lxml.etree.ElementTree.Element """ if attributes is None: attributes = {} if text is not None: if isinstance(text, bool): text = str(text).lower() if not isinstance(text, str): text = repr(text) attrib = copy.copy(attrib) attrib.update(attributes) attrib = {key: str(value) for key, value in attrib.items()} if namespace is not None: tag = '{{{namespace}}}{tag}'.format(namespace=namespace, tag=tag) retval = etree.Element(tag, attrib) if text is not None: retval.text = str(text) if children is not None: retval.extend([child for child in children if child is not None]) return retval @pytest.fixture def tmpdir(): dirname = tempfile.mkdtemp() yield dirname shutil.rmtree(dirname) @pytest.fixture(scope='module') def good_xml_str(): with open(GOOD_CPHD, 'rb') as fid: header = cphd_consistency.read_header(fid) fid.seek(header['XML_BLOCK_BYTE_OFFSET'], 0) xml_block_size = header['XML_BLOCK_SIZE'] return fid.read(xml_block_size).decode() @pytest.fixture def good_xml(good_xml_str): good_xml_root = etree.fromstring(good_xml_str) good_xml_root_no_ns = cphd_consistency.strip_namespace(etree.fromstring(good_xml_str)) yield {'with_ns': good_xml_root, 'without_ns': good_xml_root_no_ns, 'nsmap': {'ns': re.match(r'\{(.*)\}', good_xml_root.tag).group(1)}} @pytest.fixture def good_header(): with open(GOOD_CPHD, 'rb') as fid: return cphd_consistency.read_header(fid) def remove_nodes(*nodes): for node in nodes: node.getparent().remove(node) def copy_xml(elem): return etree.fromstring(etree.tostring(elem)) def test_from_file_cphd(): cphdcon = cphd_consistency.CphdConsistency.from_file(str(GOOD_CPHD), DEFAULT_SCHEMA, True) assert isinstance(cphdcon, cphd_consistency.CphdConsistency) cphdcon.check() assert not cphdcon.failures() def test_from_file_xml(good_xml_str, tmpdir): xml_file = os.path.join(tmpdir, 'cphd.xml') with open(xml_file, 'w') as fid: fid.write(good_xml_str) cphdcon = cphd_consistency.CphdConsistency.from_file(str(xml_file), DEFAULT_SCHEMA, False) assert isinstance(cphdcon, cphd_consistency.CphdConsistency) cphdcon.check() assert not cphdcon.failures() def test_main(good_xml_str, tmpdir): assert not cphd_consistency.main([str(GOOD_CPHD), '--schema', DEFAULT_SCHEMA, '--signal-data']) assert not cphd_consistency.main([str(GOOD_CPHD), '--noschema']) assert not cphd_consistency.main([str(GOOD_CPHD)]) xml_file = os.path.join(tmpdir, 'cphd.xml') with open(xml_file, 'w') as fid: fid.write(good_xml_str) assert not cphd_consistency.main([str(xml_file), '-v']) def test_xml_schema_error(good_xml): bad_xml = copy_xml(good_xml['with_ns']) remove_nodes(*bad_xml.xpath('./ns:Global/ns:DomainType', namespaces=good_xml['nsmap'])) cphd_con = cphd_consistency.CphdConsistency(bad_xml, pvps={}, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) cphd_con.check('check_against_schema') assert cphd_con.failures() def test_check_classification_and_release_info_error(good_xml, good_header): bad_xml = copy_xml(good_xml['without_ns']) bad_xml.find('./CollectionID/ReleaseInfo').text += '-make-bad' cphd_con = cphd_consistency.CphdConsistency(bad_xml, pvps={}, header=good_header, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) cphd_con.check('check_classification_and_release_info') assert cphd_con.failures() def test_error_in_check(good_xml): bad_xml = copy_xml(good_xml['with_ns']) remove_nodes(*bad_xml.xpath('./ns:Channel/ns:Parameters/ns:DwellTimes/ns:CODId', namespaces=good_xml['nsmap'])) cphd_con = cphd_consistency.CphdConsistency(bad_xml, pvps={}, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) tocheck = [] for chan_id in bad_xml.findall('./ns:Data/ns:Channel/ns:Identifier', namespaces=good_xml['nsmap']): tocheck.append('check_channel_dwell_exist_{}'.format(chan_id.text)) cphd_con.check(tocheck) assert cphd_con.failures() def test_polygon_size_error(good_xml): bad_xml = copy_xml(good_xml['with_ns']) ia_polygon_node = bad_xml.find('./ns:SceneCoordinates/ns:ImageArea/ns:Polygon', namespaces=good_xml['nsmap']) ia_polygon_node.attrib['size'] = "12345678890" cphd_con = cphd_consistency.CphdConsistency(bad_xml, pvps={}, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) cphd_con.check('check_global_imagearea_polygon') assert cphd_con.failures() def test_polygon_winding_error(good_xml): bad_xml = copy_xml(good_xml['with_ns']) ia_polygon_node = bad_xml.find('./ns:SceneCoordinates/ns:ImageArea/ns:Polygon', namespaces=good_xml['nsmap']) size = int(ia_polygon_node.attrib['size']) # Reverse the order of the vertices for vertex in ia_polygon_node: vertex.attrib['index'] = str(size - int(vertex.attrib['index']) + 1) cphd_con = cphd_consistency.CphdConsistency(bad_xml, pvps={}, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) cphd_con.check('check_global_imagearea_polygon') assert cphd_con.failures() @pytest.fixture def xml_with_signal_normal(good_xml): root = copy_xml(good_xml['with_ns']) pvps = {} for channel_node in root.findall('./ns:Data/ns:Channel', namespaces=good_xml['nsmap']): chan_id = channel_node.findtext('./ns:Identifier', namespaces=good_xml['nsmap']) num_vect = int(channel_node.findtext('./ns:NumVectors', namespaces=good_xml['nsmap'])) pvps[chan_id] = np.ones(num_vect, dtype=[('SIGNAL', 'i8')]) chan_param_node = root.xpath('./ns:Channel/ns:Parameters/ns:Identifier[text()="{}"]/..'.format(chan_id), namespaces=good_xml['nsmap'])[0] chan_param_node.append(make_elem('SignalNormal', 'true', namespace=good_xml['nsmap']['ns'])) return pvps, root, good_xml['nsmap'] def test_signalnormal(xml_with_signal_normal): pvps, root, nsmap = xml_with_signal_normal cphd_con = cphd_consistency.CphdConsistency(root, pvps=pvps, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) tocheck = ['check_channel_signalnormal_{}'.format(key) for key in pvps.keys()] cphd_con.check(tocheck) assert not cphd_con.failures() def test_signalnormal_bad_pvp(xml_with_signal_normal): pvps, root, nsmap = xml_with_signal_normal for idx, pvp in enumerate(pvps.values()): pvp['SIGNAL'][idx] = 0 cphd_con = cphd_consistency.CphdConsistency(root, pvps=pvps, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) tocheck = ['check_channel_signalnormal_{}'.format(key) for key in pvps.keys()] cphd_con.check(tocheck) assert len(cphd_con.failures()) == len(pvps) for norm_node in root.findall('./ns:Channel/ns:Parameters/ns:SignalNormal', namespaces=nsmap): norm_node.text = 'false' cphd_con = cphd_consistency.CphdConsistency(root, pvps=pvps, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) cphd_con.check(tocheck) assert not cphd_con.failures() no_sig_pvp = {name: np.zeros(pvp.shape, dtype=[('notsignal', 'i8')]) for name, pvp in pvps.items()} cphd_con = cphd_consistency.CphdConsistency(root, pvps=no_sig_pvp, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) cphd_con.check(tocheck) assert cphd_con.failures() @pytest.fixture def xml_without_fxfixed(good_xml): root = copy_xml(good_xml['with_ns']) pvps = {} for channel_node in root.findall('./ns:Data/ns:Channel', namespaces=good_xml['nsmap']): chan_id = channel_node.findtext('./ns:Identifier', namespaces=good_xml['nsmap']) num_vect = int(channel_node.findtext('./ns:NumVectors', namespaces=good_xml['nsmap'])) pvps[chan_id] = np.zeros(num_vect, dtype=[('FX1', 'f8'), ('FX2', 'f8')]) pvps[chan_id]['FX1'] = np.linspace(1.0, 1.1, num_vect) pvps[chan_id]['FX2'] = np.linspace(2.0, 2.2, num_vect) chan_param_node = root.xpath('./ns:Channel/ns:Parameters/ns:Identifier[text()="{}"]/..'.format(chan_id), namespaces=good_xml['nsmap'])[0] chan_param_node.find('./ns:FXFixed', namespaces=good_xml['nsmap']).text = 'false' root.find('./ns:Channel/ns:FXFixedCPHD', namespaces=good_xml['nsmap']).text = 'false' return pvps, root, good_xml['nsmap'] def test_fxfixed(xml_without_fxfixed): pvps, root, nsmap = xml_without_fxfixed cphd_con = cphd_consistency.CphdConsistency(root, pvps=pvps, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) tocheck = ['check_channel_fxfixed_{}'.format(key) for key in pvps.keys()] tocheck.append('check_file_fxfixed') cphd_con.check(tocheck) assert not cphd_con.failures() @pytest.fixture def xml_without_toafixed(good_xml): root = copy_xml(good_xml['with_ns']) pvps = {} for channel_node in root.findall('./ns:Data/ns:Channel', namespaces=good_xml['nsmap']): chan_id = channel_node.findtext('./ns:Identifier', namespaces=good_xml['nsmap']) num_vect = int(channel_node.findtext('./ns:NumVectors', namespaces=good_xml['nsmap'])) pvps[chan_id] = np.zeros(num_vect, dtype=[('TOA1', 'f8'), ('TOA2', 'f8')]) pvps[chan_id]['TOA1'] = np.linspace(1.0, 1.1, num_vect) pvps[chan_id]['TOA2'] = np.linspace(2.0, 2.2, num_vect) chan_param_node = root.xpath('./ns:Channel/ns:Parameters/ns:Identifier[text()="{}"]/..'.format(chan_id), namespaces=good_xml['nsmap'])[0] chan_param_node.find('./ns:TOAFixed', namespaces=good_xml['nsmap']).text = 'false' root.find('./ns:Channel/ns:TOAFixedCPHD', namespaces=good_xml['nsmap']).text = 'false' return pvps, root, good_xml['nsmap'] def test_channel_toafixed(xml_without_toafixed): pvps, root, nsmap = xml_without_toafixed cphd_con = cphd_consistency.CphdConsistency(root, pvps=pvps, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) tocheck = ['check_channel_toafixed_{}'.format(key) for key in pvps.keys()] tocheck.append('check_file_toafixed') cphd_con.check(tocheck) assert not cphd_con.failures() @pytest.fixture def xml_without_srpfixed(good_xml): root = copy_xml(good_xml['with_ns']) pvps = {} for channel_node in root.findall('./ns:Data/ns:Channel', namespaces=good_xml['nsmap']): chan_id = channel_node.findtext('./ns:Identifier', namespaces=good_xml['nsmap']) num_vect = int(channel_node.findtext('./ns:NumVectors', namespaces=good_xml['nsmap'])) pvps[chan_id] = np.zeros(num_vect, dtype=[('SRPPos', 'f8', 3)]) pvps[chan_id]['SRPPos'][:, 0] = np.linspace(1.0, 10, num_vect) pvps[chan_id]['SRPPos'][:, 1] = np.linspace(2.0, 20, num_vect) pvps[chan_id]['SRPPos'][:, 2] = np.linspace(3.0, 30, num_vect) chan_param_node = root.xpath('./ns:Channel/ns:Parameters/ns:Identifier[text()="{}"]/..'.format(chan_id), namespaces=good_xml['nsmap'])[0] chan_param_node.find('./ns:SRPFixed', namespaces=good_xml['nsmap']).text = 'false' root.find('./ns:Channel/ns:SRPFixedCPHD', namespaces=good_xml['nsmap']).text = 'false' return pvps, root, good_xml['nsmap'] def test_channel_srpfixed(xml_without_srpfixed): pvps, root, nsmap = xml_without_srpfixed cphd_con = cphd_consistency.CphdConsistency(root, pvps=pvps, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) tocheck = ['check_channel_srpfixed_{}'.format(key) for key in pvps.keys()] tocheck.append('check_file_srpfixed') cphd_con.check(tocheck) assert not cphd_con.failures() @pytest.fixture def xml_with_txrcv(good_xml): root = copy_xml(good_xml['with_ns']) root.append(make_elem('TxRcv', namespace=good_xml['nsmap']['ns'], children=[ make_elem('NumTxWFs', 2, namespace=good_xml['nsmap']['ns']), make_elem('TxWFParameters', namespace=good_xml['nsmap']['ns'], children=[ make_elem('Identifier', 'wf_unit_test_1', namespace=good_xml['nsmap']['ns']), ]), make_elem('TxWFParameters', namespace=good_xml['nsmap']['ns'], children=[ make_elem('Identifier', 'wf_unit_test_2', namespace=good_xml['nsmap']['ns']), ]), make_elem('NumRcvs', 2, namespace=good_xml['nsmap']['ns']), make_elem('RcvParameters', namespace=good_xml['nsmap']['ns'], children=[ make_elem('Identifier', 'rcv_unit_test_1', namespace=good_xml['nsmap']['ns']), ]), make_elem('RcvParameters', namespace=good_xml['nsmap']['ns'], children=[ make_elem('Identifier', 'rcv_unit_test_2', namespace=good_xml['nsmap']['ns']), ]) ])) chan_param_node = root.xpath('./ns:Channel/ns:Parameters', namespaces=good_xml['nsmap'])[0] chan_param_node.append(make_elem('TxRcv', namespace=good_xml['nsmap']['ns'], children=[ make_elem('TxWFId', 'wf_unit_test_1', namespace=good_xml['nsmap']['ns']), make_elem('TxWFId', 'wf_unit_test_2', namespace=good_xml['nsmap']['ns']), make_elem('RcvId', 'rcv_unit_test_1', namespace=good_xml['nsmap']['ns']), make_elem('RcvId', 'rcv_unit_test_2', namespace=good_xml['nsmap']['ns']), ])) chan_ids = [chan_param_node.findtext('./ns:Identifier', namespaces=good_xml['nsmap'])] return chan_ids, root, good_xml['nsmap'] def test_txrcv(xml_with_txrcv): chan_ids, root, nsmap = xml_with_txrcv cphd_con = cphd_consistency.CphdConsistency(root, pvps=None, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) tocheck = ['check_channel_txrcv_exist_{}'.format(key) for key in chan_ids] cphd_con.check(tocheck) assert not cphd_con.failures() def test_txrcv_bad_txwfid(xml_with_txrcv): chan_ids, root, nsmap = xml_with_txrcv chan_param_node = root.xpath('./ns:Channel/ns:Parameters/ns:Identifier[text()="{}"]/..'.format(chan_ids[0]), namespaces=nsmap)[0] chan_param_node.xpath('./ns:TxRcv/ns:TxWFId', namespaces=nsmap)[-1].text = 'missing' cphd_con = cphd_consistency.CphdConsistency(root, pvps=None, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) tocheck = ['check_channel_txrcv_exist_{}'.format(key) for key in chan_ids] cphd_con.check(tocheck) assert cphd_con.failures() def test_txrcv_bad_rcvid(xml_with_txrcv): chan_ids, root, nsmap = xml_with_txrcv chan_param_node = root.xpath('./ns:Channel/ns:Parameters/ns:Identifier[text()="{}"]/..'.format(chan_ids[0]), namespaces=nsmap)[0] chan_param_node.xpath('./ns:TxRcv/ns:RcvId', namespaces=nsmap)[-1].text = 'missing' cphd_con = cphd_consistency.CphdConsistency(root, pvps=None, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) tocheck = ['check_channel_txrcv_exist_{}'.format(key) for key in chan_ids] cphd_con.check(tocheck) assert cphd_con.failures() @pytest.fixture def xml_with_fxbwnoise(good_xml): root = copy_xml(good_xml['with_ns']) pvps = {} for channel_node in root.findall('./ns:Data/ns:Channel', namespaces=good_xml['nsmap']): chan_id = channel_node.findtext('./ns:Identifier', namespaces=good_xml['nsmap']) num_vect = int(channel_node.findtext('./ns:NumVectors', namespaces=good_xml['nsmap'])) pvps[chan_id] = np.zeros(num_vect, dtype=[('FXN1', 'f8'), ('FXN2', 'f8')]) pvps[chan_id]['FXN1'] = np.linspace(1, 2, num_vect) pvps[chan_id]['FXN2'] = pvps[chan_id]['FXN1'] * 1.1 pvps[chan_id]['FXN1'][10] = np.nan pvps[chan_id]['FXN2'][10] = np.nan chan_param_node = root.xpath('./ns:Channel/ns:Parameters/ns:Identifier[text()="{}"]/..'.format(chan_id), namespaces=good_xml['nsmap'])[0] chan_param_node.append(make_elem('FxBWNoise', 1.2, namespace=good_xml['nsmap']['ns'])) return pvps, root, good_xml['nsmap'] def test_fxbwnoise(xml_with_fxbwnoise): pvps, root, nsmap = xml_with_fxbwnoise cphd_con = cphd_consistency.CphdConsistency(root, pvps=pvps, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) tocheck = ['check_channel_fxbwnoise_{}'.format(key) for key in pvps.keys()] cphd_con.check(tocheck) assert not cphd_con.failures() def test_fxbwnoise_bad_domain(xml_with_fxbwnoise): pvps, root, nsmap = xml_with_fxbwnoise root.find('./ns:Global/ns:DomainType', namespaces=nsmap).text = 'TOA' cphd_con = cphd_consistency.CphdConsistency(root, pvps=pvps, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) tocheck = ['check_channel_fxbwnoise_{}'.format(key) for key in pvps.keys()] cphd_con.check(tocheck) assert cphd_con.failures() def test_fxbwnoise_bad_value(xml_with_fxbwnoise): pvps, root, nsmap = xml_with_fxbwnoise chan_id = list(pvps.keys())[-1] pvps[chan_id]['FXN1'][0] = 0.5 cphd_con = cphd_consistency.CphdConsistency(root, pvps=pvps, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) tocheck = ['check_channel_fxbwnoise_{}'.format(key) for key in pvps.keys()] cphd_con.check(tocheck) assert cphd_con.failures() def test_geoinfo_polygons(good_xml): root = copy_xml(good_xml['with_ns']) root.append(make_elem('GeoInfo', namespace=good_xml['nsmap']['ns'], children=[ make_elem('Polygon', size='3', namespace=good_xml['nsmap']['ns'], children=[ make_elem('Vertex', index='1', namespace=good_xml['nsmap']['ns'], children=[ make_elem('Lat', 0.0, namespace=good_xml['nsmap']['ns']), make_elem('Lon', 0.0, namespace=good_xml['nsmap']['ns']), ]), make_elem('Vertex', index='2', namespace=good_xml['nsmap']['ns'], children=[ make_elem('Lat', 1.0, namespace=good_xml['nsmap']['ns']), make_elem('Lon', 0.0, namespace=good_xml['nsmap']['ns']), ]), make_elem('Vertex', index='3', namespace=good_xml['nsmap']['ns'], children=[ make_elem('Lat', 1.0, namespace=good_xml['nsmap']['ns']), make_elem('Lon', 1.0, namespace=good_xml['nsmap']['ns']), ]), ]) ])) cphd_con = cphd_consistency.CphdConsistency(root, pvps=None, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) cphd_con.check('check_geoinfo_polygons') assert not cphd_con.failures() def test_geoinfo_polygons_bad_order(good_xml): root = copy_xml(good_xml['with_ns']) root.append(make_elem('GeoInfo', namespace=good_xml['nsmap']['ns'], children=[ make_elem('Polygon', size='3', namespace=good_xml['nsmap']['ns'], children=[ make_elem('Vertex', index='1', namespace=good_xml['nsmap']['ns'], children=[ make_elem('Lat', 0.0, namespace=good_xml['nsmap']['ns']), make_elem('Lon', 0.0, namespace=good_xml['nsmap']['ns']), ]), make_elem('Vertex', index='2', namespace=good_xml['nsmap']['ns'], children=[ make_elem('Lat', 0.0, namespace=good_xml['nsmap']['ns']), make_elem('Lon', 1.0, namespace=good_xml['nsmap']['ns']), ]), make_elem('Vertex', index='3', namespace=good_xml['nsmap']['ns'], children=[ make_elem('Lat', 1.0, namespace=good_xml['nsmap']['ns']), make_elem('Lon', 1.0, namespace=good_xml['nsmap']['ns']), ]), ]) ])) cphd_con = cphd_consistency.CphdConsistency(root, pvps=None, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) cphd_con.check('check_geoinfo_polygons') assert cphd_con.failures() @pytest.fixture def xml_with_channel_imagearea(good_xml): root = copy_xml(good_xml['with_ns']) for chan_param_node in root.xpath('./ns:Channel/ns:Parameters', namespaces=good_xml['nsmap']): chan_param_node.append(make_elem('ImageArea', namespace=good_xml['nsmap']['ns'], children=[ make_elem('X1Y1', namespace=good_xml['nsmap']['ns'], children=[ make_elem('X', -50, namespace=good_xml['nsmap']['ns']), make_elem('Y', -50, namespace=good_xml['nsmap']['ns']), ]), make_elem('X2Y2', namespace=good_xml['nsmap']['ns'], children=[ make_elem('X', 50, namespace=good_xml['nsmap']['ns']), make_elem('Y', 50, namespace=good_xml['nsmap']['ns']), ]), make_elem('Polygon', size='4', namespace=good_xml['nsmap']['ns'], children=[ make_elem('Vertex', index='1', namespace=good_xml['nsmap']['ns'], children=[ make_elem('X', -50.0, namespace=good_xml['nsmap']['ns']), make_elem('Y', 0.0, namespace=good_xml['nsmap']['ns']), ]), make_elem('Vertex', index='2', namespace=good_xml['nsmap']['ns'], children=[ make_elem('X', 0.0, namespace=good_xml['nsmap']['ns']), make_elem('Y', 50.0, namespace=good_xml['nsmap']['ns']), ]), make_elem('Vertex', index='3', namespace=good_xml['nsmap']['ns'], children=[ make_elem('X', 50.0, namespace=good_xml['nsmap']['ns']), make_elem('Y', 0.0, namespace=good_xml['nsmap']['ns']), ]), make_elem('Vertex', index='4', namespace=good_xml['nsmap']['ns'], children=[ make_elem('X', 0.0, namespace=good_xml['nsmap']['ns']), make_elem('Y', -50.0, namespace=good_xml['nsmap']['ns']), ]), ]) ])) return root, good_xml['nsmap'] def test_channel_image_area(xml_with_channel_imagearea): root, nsmap = xml_with_channel_imagearea cphd_con = cphd_consistency.CphdConsistency(root, pvps=None, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) tocheck = [] for chan_id in root.findall('./ns:Data/ns:Channel/ns:Identifier', namespaces=nsmap): tocheck.append('check_channel_imagearea_x1y1_{}'.format(chan_id.text)) tocheck.append('check_channel_imagearea_polygon_{}'.format(chan_id.text)) cphd_con.check(tocheck) assert not cphd_con.failures() @pytest.fixture def xml_with_extendedarea(good_xml): root = copy_xml(good_xml['with_ns']) scene = root.find('./ns:SceneCoordinates', namespaces=good_xml['nsmap']) scene.append(make_elem('ExtendedArea', namespace=good_xml['nsmap']['ns'], children=[ make_elem('X1Y1', namespace=good_xml['nsmap']['ns'], children=[ make_elem('X', -1000, namespace=good_xml['nsmap']['ns']), make_elem('Y', -1000, namespace=good_xml['nsmap']['ns']), ]), make_elem('X2Y2', namespace=good_xml['nsmap']['ns'], children=[ make_elem('X', 1000, namespace=good_xml['nsmap']['ns']), make_elem('Y', 1000, namespace=good_xml['nsmap']['ns']), ]), make_elem('Polygon', size='4', namespace=good_xml['nsmap']['ns'], children=[ make_elem('Vertex', index='1', namespace=good_xml['nsmap']['ns'], children=[ make_elem('X', -1000.0, namespace=good_xml['nsmap']['ns']), make_elem('Y', 0.0, namespace=good_xml['nsmap']['ns']), ]), make_elem('Vertex', index='2', namespace=good_xml['nsmap']['ns'], children=[ make_elem('X', 0.0, namespace=good_xml['nsmap']['ns']), make_elem('Y', 1000.0, namespace=good_xml['nsmap']['ns']), ]), make_elem('Vertex', index='3', namespace=good_xml['nsmap']['ns'], children=[ make_elem('X', 1000.0, namespace=good_xml['nsmap']['ns']), make_elem('Y', 0.0, namespace=good_xml['nsmap']['ns']), ]), make_elem('Vertex', index='4', namespace=good_xml['nsmap']['ns'], children=[ make_elem('X', 0.0, namespace=good_xml['nsmap']['ns']), make_elem('Y', -1000.0, namespace=good_xml['nsmap']['ns']), ]), ]) ])) return root, good_xml['nsmap'] def test_extended_imagearea(xml_with_extendedarea): root, nsmap = xml_with_extendedarea cphd_con = cphd_consistency.CphdConsistency(root, pvps=None, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) cphd_con.check(['check_extended_imagearea_polygon', 'check_extended_imagearea_x1y1_x2y2']) assert not cphd_con.failures() def test_extended_imagearea_polygon_bad_extent(xml_with_extendedarea): root, nsmap = xml_with_extendedarea root.find('./ns:SceneCoordinates/ns:ExtendedArea/ns:X2Y2/ns:X', namespaces=nsmap).text = '2000' cphd_con = cphd_consistency.CphdConsistency(root, pvps=None, header=None, filename=None, schema=DEFAULT_SCHEMA, check_signal_data=False) cphd_con.check('check_extended_imagearea_polygon') assert cphd_con.failures()
45.741935
115
0.597119
3,585
29,778
4.705439
0.076709
0.069299
0.087498
0.099591
0.792815
0.759974
0.741478
0.717826
0.69684
0.67467
0
0.011525
0.25989
29,778
650
116
45.812308
0.753891
0.031433
0
0.607884
0
0
0.131682
0.055401
0
0
0
0
0.064315
1
0.080913
false
0
0.020747
0.002075
0.126556
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
48c7b95e25db83b298727eceeb0bb5aecb735fac
16,616
py
Python
venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_Salsa20.py
JagahOrg/Voting
893fbbda0584c63ac5b86328fffb44744a0e1239
[ "Apache-2.0" ]
4
2021-07-12T16:37:36.000Z
2021-08-06T09:42:37.000Z
venv/Lib/site-packages/Cryptodome/SelfTest/Cipher/test_Salsa20.py
JagahOrg/Voting
893fbbda0584c63ac5b86328fffb44744a0e1239
[ "Apache-2.0" ]
20
2021-05-03T18:02:23.000Z
2022-03-12T12:01:04.000Z
Lib/site-packages/Cryptodome/SelfTest/Cipher/test_Salsa20.py
fochoao/cpython
3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9
[ "bzip2-1.0.6", "0BSD" ]
2
2021-03-16T12:41:29.000Z
2021-03-16T14:50:08.000Z
# -*- coding: utf-8 -*- # # SelfTest/Cipher/Salsa20.py: Self-test for the Salsa20 stream cipher # # Written in 2013 by Fabrizio Tarizzo <fabrizio@fabriziotarizzo.org> # # =================================================================== # The contents of this file are dedicated to the public domain. To # the extent that dedication to the public domain is not available, # everyone is granted a worldwide, perpetual, royalty-free, # non-exclusive license to exercise all rights associated with the # contents of this file for any purpose whatsoever. # No rights are reserved. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # =================================================================== """Self-test suite for Cryptodome.Cipher.Salsa20""" import unittest from Cryptodome.Util.py3compat import bchr from Cryptodome.SelfTest.st_common import list_test_cases from Cryptodome.Cipher import Salsa20 from .common import make_stream_tests # This is a list of (plaintext, ciphertext, key[, description[, params]]) # tuples. test_data = [ # Test vectors are taken from # http://www.ecrypt.eu.org/stream/svn/viewcvs.cgi/ecrypt/trunk/submissions/salsa20/full/verified.test-vectors ( '00' * 512, '4dfa5e481da23ea09a31022050859936da52fcee218005164f267cb65f5cfd7f' + '2b4f97e0ff16924a52df269515110a07f9e460bc65ef95da58f740b7d1dbb0aa' + 'd64cec189c7eb8c6bbf3d7376c80a481d43e628701f6a27afb9fe23919f24114' + '8db44f70d7063efcc3dd55a0893a613c3c6fe1c127bd6f59910589293bb6ef9e' + 'e24819066dee1a64f49b0bbad5988635272b169af861f85df881939f29ada6fd' + '0241410e8d332ae4798d929434a2630de451ec4e0169694cbaa7ebb121ea6a2b' + 'da9c1581f429e0a00f7d67e23b730676783b262e8eb43a25f55fb90b3e753aef' + '8c6713ec66c51881111593ccb3e8cb8f8de124080501eeeb389c4bcb6977cf95' + '7d5789631eb4554400e1e025935dfa7b3e9039d61bdc58a8697d36815bf1985c' + 'efdf7ae112e5bb81e37ecf0616ce7147fc08a93a367e08631f23c03b00a8da2f' + 'aa5024e5c8d30aca43fc2d5082067b21b234bc741d68fb292c6012c3764ccee3' + '1e364a5403e00cfee338a21a01e7d3cefd5a770ca0ab48c435ea6116435f7ad8' + '30b217b49f978a68e207ed9f462af7fb195b2115fe8f24f152e4ddc32202d6f2' + 'b52fafbcfbc202d8a259a611e901d3f62d065eb13f09bbc45cd45119b843efaa' + 'b375703739daced4dd4059fd71c3c47fc2f9939670fad4a46066adcc6a564578' + '3308b90ffb72be04a6b147cbe38cc0c3b9267c296a92a7c69873f9f263be9703', '80000000000000000000000000000000', '128 bits key, set 1, vector 0', dict (iv='00'*8)), ( '00' * 512, 'e3be8fdd8beca2e3ea8ef9475b29a6e7003951e1097a5c38d23b7a5fad9f6844' + 'b22c97559e2723c7cbbd3fe4fc8d9a0744652a83e72a9c461876af4d7ef1a117' + '8da2b74eef1b6283e7e20166abcae538e9716e4669e2816b6b20c5c356802001' + 'cc1403a9a117d12a2669f456366d6ebb0f1246f1265150f793cdb4b253e348ae' + '203d89bc025e802a7e0e00621d70aa36b7e07cb1e7d5b38d5e222b8b0e4b8407' + '0142b1e29504767d76824850320b5368129fdd74e861b498e3be8d16f2d7d169' + '57be81f47b17d9ae7c4ff15429a73e10acf250ed3a90a93c711308a74c6216a9' + 'ed84cd126da7f28e8abf8bb63517e1ca98e712f4fb2e1a6aed9fdc73291faa17' + '958211c4ba2ebd5838c635edb81f513a91a294e194f1c039aeec657dce40aa7e' + '7c0af57cacefa40c9f14b71a4b3456a63e162ec7d8d10b8ffb1810d71001b618' + '2f9f73da53b85405c11f7b2d890fa8ae0c7f2e926d8a98c7ec4e91b65120e988' + '349631a700c6facec3471cb0413656e75e309456584084d7e12c5b43a41c43ed' + '9a048abd9b880da65f6a665a20fe7b77cd292fe62cae644b7f7df69f32bdb331' + '903e6505ce44fdc293920c6a9ec7057e23df7dad298f82ddf4efb7fdc7bfc622' + '696afcfd0cddcc83c7e77f11a649d79acdc3354e9635ff137e929933a0bd6f53' + '77efa105a3a4266b7c0d089d08f1e855cc32b15b93784a36e56a76cc64bc8477', '8000000000000000000000000000000000000000000000000000000000000000', '256 bits key, set 1, vector 0', dict (iv='00'*8)), ( '00' * 512, '169060ccb42bea7bee4d8012a02f3635eb7bca12859fa159cd559094b3507db8' + '01735d1a1300102a9c9415546829cbd2021ba217b39b81d89c55b13d0c603359' + '3f84159a3c84f4b4f4a0edcd9d38ff261a737909e0b66d68b5cac496f3a5be99' + 'cb12c321ab711afaab36cc0947955e1a9bb952ed54425e7711279fbc81bb83f5' + '6e55cea44e6daddb05858a153ea6213b3350c12aa1a83ef2726f09485fa71790' + 'f9b9f922c7dda1113b1f9d56658ed3402803f511bc1f122601d5e7f0ff036e23' + '23ef24bb24195b9fd574823cd8a40c29d86bd35c191e2038779ff696c712b6d8' + '2e7014dbe1ac5d527af076c088c4a8d44317958189f6ef54933a7e0816b5b916' + 'd8f12ed8afe9422b85e5cc9b8adec9d6cfabe8dbc1082bccc02f5a7266aa074c' + 'a284e583a35837798cc0e69d4ce937653b8cdd65ce414b89138615ccb165ad19' + '3c6b9c3d05eef4be921a10ea811fe61d11c6867600188e065daff90b509ec56b' + 'd41e7e8968c478c78d590c2d2ee24ea009c8f49bc3d81672cfc47895a9e21c9a' + '471ebf8e294bee5d2de436ac8d052bf31111b345f1da23c3a4d13b9fc5f0900a' + 'a298f98f538973b8fad40d4d159777de2cfe2a3dead1645ddb49794827dba040' + 'f70a0ff4ecd155e0f033604693a51e2363880e2ecf98699e7174af7c2c6b0fc6' + '59ae329599a3949272a37b9b2183a0910922a3f325ae124dcbdd735364055ceb', '09090909090909090909090909090909', '128 bits key, set 2, vector 9', dict (iv='00'*8)), ( '00' * 512, '7041e747ceb22ed7812985465f50333124f971da1c5d6efe5ca201b886f31046' + 'e757e5c3ec914f60ed1f6bce2819b6810953f12b8ba1199bf82d746a8b8a88f1' + '142002978ec4c35b95dc2c82990f9e847a0ab45f2ca72625f5190c820f29f3aa' + 'f5f0b5572b06b70a144f2a240c3b3098d4831fa1ce1459f8d1df226a6a79b0ab' + '41e91799ef31b5ff3d756c19126b19025858ee70fbd69f2be955cb011c005e31' + '32b271b378f39b0cb594e95c99ce6ff17735a541891845bbf0450afcb4a850b9' + '4ee90afb713ae7e01295c74381180a3816d7020d5a396c0d97aaa783eaabb6ec' + '44d5111157f2212d1b1b8fca7893e8b520cd482418c272ab119b569a2b9598eb' + '355624d12e79adab81153b58cd22eaf1b2a32395dedc4a1c66f4d274070b9800' + 'ea95766f0245a8295f8aadb36ddbbdfa936417c8dbc6235d19494036964d3e70' + 'b125b0f800c3d53881d9d11e7970f827c2f9556935cd29e927b0aceb8cae5fd4' + '0fd88a8854010a33db94c96c98735858f1c5df6844f864feaca8f41539313e7f' + '3c0610214912cd5e6362197646207e2d64cd5b26c9dfe0822629dcbeb16662e8' + '9ff5bf5cf2e499138a5e27bd5027329d0e68ddf53103e9e409523662e27f61f6' + '5cf38c1232023e6a6ef66c315bcb2a4328642faabb7ca1e889e039e7c444b34b' + 'b3443f596ac730f3df3dfcdb343c307c80f76e43e8898c5e8f43dc3bb280add0', '0909090909090909090909090909090909090909090909090909090909090909', '256 bits key, set 2, vector 9', dict (iv='00'*8)), ( '00' * 1024, '71daee5142d0728b41b6597933ebf467e43279e30978677078941602629cbf68' + 'b73d6bd2c95f118d2b3e6ec955dabb6dc61c4143bc9a9b32b99dbe6866166dc0' + '8631b7d6553050303d7252c264d3a90d26c853634813e09ad7545a6ce7e84a5d' + 'fc75ec43431207d5319970b0faadb0e1510625bb54372c8515e28e2accf0a993' + '0ad15f431874923d2a59e20d9f2a5367dba6051564f150287debb1db536ff9b0' + '9ad981f25e5010d85d76ee0c305f755b25e6f09341e0812f95c94f42eead346e' + '81f39c58c5faa2c88953dc0cac90469db2063cb5cdb22c9eae22afbf0506fca4' + '1dc710b846fbdfe3c46883dd118f3a5e8b11b6afd9e71680d8666557301a2daa' + 'fb9496c559784d35a035360885f9b17bd7191977deea932b981ebdb29057ae3c' + '92cfeff5e6c5d0cb62f209ce342d4e35c69646ccd14e53350e488bb310a32f8b' + '0248e70acc5b473df537ced3f81a014d4083932bedd62ed0e447b6766cd2604b' + '706e9b346c4468beb46a34ecf1610ebd38331d52bf33346afec15eefb2a7699e' + '8759db5a1f636a48a039688e39de34d995df9f27ed9edc8dd795e39e53d9d925' + 'b278010565ff665269042f05096d94da3433d957ec13d2fd82a0066283d0d1ee' + 'b81bf0ef133b7fd90248b8ffb499b2414cd4fa003093ff0864575a43749bf596' + '02f26c717fa96b1d057697db08ebc3fa664a016a67dcef8807577cc3a09385d3' + 'f4dc79b34364bb3b166ce65fe1dd28e3950fe6fa81063f7b16ce1c0e6daac1f8' + '188455b77752045e863c9b256ad92bc6e2d08314c5bba191c274f42dfbb3d652' + 'bb771956555e880f84cd8b827a4c5a52f3a099fa0259bd4aac3efd541f191170' + '4412d6e85fbcc628b335875b9fef24807f6e1bc66c3186159e1e7f5a13913e02' + 'd241ce2efdbcaa275039fb14eac5923d17ffbc7f1abd3b45e92127575bfbabf9' + '3a257ebef0aa1437b326e41b585af572f7239c33b32981a1577a4f629b027e1e' + 'b49d58cc497e944d79cef44357c2bf25442ab779651e991147bf79d6fd3a8868' + '0cd3b1748e07fd10d78aceef6db8a5e563570d40127f754146c34a440f2a991a' + '23fa39d365141f255041f2135c5cba4373452c114da1801bacca38610e3a6524' + '2b822d32de4ab5a7d3cf9b61b37493c863bd12e2cae10530cddcda2cb7a5436b' + 'ef8988d4d24e8cdc31b2d2a3586340bc5141f8f6632d0dd543bfed81eb471ba1' + 'f3dc2225a15ffddcc03eb48f44e27e2aa390598adf83f15c6608a5f18d4dfcf0' + 'f547d467a4d70b281c83a595d7660d0b62de78b9cca023cca89d7b1f83484638' + '0e228c25f049184a612ef5bb3d37454e6cfa5b10dceda619d898a699b3c8981a' + '173407844bb89b4287bf57dd6600c79e352c681d74b03fa7ea0d7bf6ad69f8a6' + '8ecb001963bd2dd8a2baa0083ec09751cd9742402ad716be16d5c052304cfca1', '0F62B5085BAE0154A7FA4DA0F34699EC', '128 bits key, Set 6, vector# 3', dict (iv='288FF65DC42B92F9')), ( '00' * 1024, '5e5e71f90199340304abb22a37b6625bf883fb89ce3b21f54a10b81066ef87da' + '30b77699aa7379da595c77dd59542da208e5954f89e40eb7aa80a84a6176663f' + 'd910cde567cf1ff60f7040548d8f376bfd1f44c4774aac37410ede7d5c3463fc' + '4508a603201d8495ad257894e5eb1914b53e8da5e4bf2bc83ac87ce55cc67df7' + '093d9853d2a83a9c8be969175df7c807a17156df768445dd0874a9271c6537f5' + 'ce0466473582375f067fa4fcdaf65dbc0139cd75e8c21a482f28c0fb8c3d9f94' + '22606cc8e88fe28fe73ec3cb10ff0e8cc5f2a49e540f007265c65b7130bfdb98' + '795b1df9522da46e48b30e55d9f0d787955ece720205b29c85f3ad9be33b4459' + '7d21b54d06c9a60b04b8e640c64e566e51566730e86cf128ab14174f91bd8981' + 'a6fb00fe587bbd6c38b5a1dfdb04ea7e61536fd229f957aa9b070ca931358e85' + '11b92c53c523cb54828fb1513c5636fa9a0645b4a3c922c0db94986d92f314ff' + '7852c03b231e4dceea5dd8cced621869cff818daf3c270ff3c8be2e5c74be767' + 'a4e1fdf3327a934fe31e46df5a74ae2021cee021d958c4f615263d99a5ddae7f' + 'eab45e6eccbafefe4761c57750847b7e75ee2e2f14333c0779ce4678f47b1e1b' + '760a03a5f17d6e91d4b42313b3f1077ee270e432fe04917ed1fc8babebf7c941' + '42b80dfb44a28a2a3e59093027606f6860bfb8c2e5897078cfccda7314c70035' + 'f137de6f05daa035891d5f6f76e1df0fce1112a2ff0ac2bd3534b5d1bf4c7165' + 'fb40a1b6eacb7f295711c4907ae457514a7010f3a342b4427593d61ba993bc59' + '8bd09c56b9ee53aac5dd861fa4b4bb53888952a4aa9d8ca8671582de716270e1' + '97375b3ee49e51fa2bf4ef32015dd9a764d966aa2ae541592d0aa650849e99ca' + '5c6c39beebf516457cc32fe4c105bff314a12f1ec94bdf4d626f5d9b1cbbde42' + 'e5733f0885765ba29e2e82c829d312f5fc7e180679ac84826c08d0a644b326d0' + '44da0fdcc75fa53cfe4ced0437fa4df5a7ecbca8b4cb7c4a9ecf9a60d00a56eb' + '81da52adc21f508dbb60a9503a3cc94a896616d86020d5b0e5c637329b6d396a' + '41a21ba2c4a9493cf33fa2d4f10f77d5b12fdad7e478ccfe79b74851fc96a7ca' + '6320c5efd561a222c0ab0fb44bbda0e42149611d2262bb7d1719150fa798718a' + '0eec63ee297cad459869c8b0f06c4e2b56cbac03cd2605b2a924efedf85ec8f1' + '9b0b6c90e7cbd933223ffeb1b3a3f9677657905829294c4c70acdb8b0891b47d' + '0875d0cd6c0f4efe2917fc44b581ef0d1e4280197065d07da34ab33283364552' + 'efad0bd9257b059acdd0a6f246812feb69e7e76065f27dbc2eee94da9cc41835' + 'bf826e36e5cebe5d4d6a37a6a666246290ce51a0c082718ab0ec855668db1add' + 'a658e5f257e0db39384d02e6145c4c00eaa079098f6d820d872de711b6ed08cf', '0F62B5085BAE0154A7FA4DA0F34699EC3F92E5388BDE3184D72A7DD02376C91C', '256 bits key, Set 6, vector# 3', dict (iv='288FF65DC42B92F9')), ] class KeyLength(unittest.TestCase): def runTest(self): nonce = bchr(0) * 8 for key_length in (15, 30, 33): key = bchr(1) * key_length self.assertRaises(ValueError, Salsa20.new, key, nonce) class NonceTests(unittest.TestCase): def test_invalid_nonce_length(self): key = bchr(1) * 16 self.assertRaises(ValueError, Salsa20.new, key, bchr(0) * 7) self.assertRaises(ValueError, Salsa20.new, key, bchr(0) * 9) def test_default_nonce(self): cipher1 = Salsa20.new(bchr(1) * 16) cipher2 = Salsa20.new(bchr(1) * 16) self.assertEqual(len(cipher1.nonce), 8) self.assertNotEqual(cipher1.nonce, cipher2.nonce) class ByteArrayTest(unittest.TestCase): """Verify we can encrypt or decrypt bytearrays""" def runTest(self): data = b"0123" key = b"9" * 32 nonce = b"t" * 8 # Encryption data_ba = bytearray(data) key_ba = bytearray(key) nonce_ba = bytearray(nonce) cipher1 = Salsa20.new(key=key, nonce=nonce) ct = cipher1.encrypt(data) cipher2 = Salsa20.new(key=key_ba, nonce=nonce_ba) key_ba[:1] = b'\xFF' nonce_ba[:1] = b'\xFF' ct_test = cipher2.encrypt(data_ba) self.assertEqual(ct, ct_test) self.assertEqual(cipher1.nonce, cipher2.nonce) # Decryption key_ba = bytearray(key) nonce_ba = bytearray(nonce) ct_ba = bytearray(ct) cipher3 = Salsa20.new(key=key_ba, nonce=nonce_ba) key_ba[:1] = b'\xFF' nonce_ba[:1] = b'\xFF' pt_test = cipher3.decrypt(ct_ba) self.assertEqual(data, pt_test) class MemoryviewTest(unittest.TestCase): """Verify we can encrypt or decrypt bytearrays""" def runTest(self): data = b"0123" key = b"9" * 32 nonce = b"t" * 8 # Encryption data_mv = memoryview(bytearray(data)) key_mv = memoryview(bytearray(key)) nonce_mv = memoryview(bytearray(nonce)) cipher1 = Salsa20.new(key=key, nonce=nonce) ct = cipher1.encrypt(data) cipher2 = Salsa20.new(key=key_mv, nonce=nonce_mv) key_mv[:1] = b'\xFF' nonce_mv[:1] = b'\xFF' ct_test = cipher2.encrypt(data_mv) self.assertEqual(ct, ct_test) self.assertEqual(cipher1.nonce, cipher2.nonce) # Decryption key_mv = memoryview(bytearray(key)) nonce_mv = memoryview(bytearray(nonce)) ct_mv = memoryview(bytearray(ct)) cipher3 = Salsa20.new(key=key_mv, nonce=nonce_mv) key_mv[:1] = b'\xFF' nonce_mv[:1] = b'\xFF' pt_test = cipher3.decrypt(ct_mv) self.assertEqual(data, pt_test) class TestOutput(unittest.TestCase): def runTest(self): # Encrypt/Decrypt data and test output parameter key = b'4' * 32 nonce = b'5' * 8 cipher = Salsa20.new(key=key, nonce=nonce) pt = b'5' * 16 ct = cipher.encrypt(pt) output = bytearray(16) cipher = Salsa20.new(key=key, nonce=nonce) res = cipher.encrypt(pt, output=output) self.assertEqual(ct, output) self.assertEqual(res, None) cipher = Salsa20.new(key=key, nonce=nonce) res = cipher.decrypt(ct, output=output) self.assertEqual(pt, output) self.assertEqual(res, None) output = memoryview(bytearray(16)) cipher = Salsa20.new(key=key, nonce=nonce) cipher.encrypt(pt, output=output) self.assertEqual(ct, output) cipher = Salsa20.new(key=key, nonce=nonce) cipher.decrypt(ct, output=output) self.assertEqual(pt, output) cipher = Salsa20.new(key=key, nonce=nonce) self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) cipher = Salsa20.new(key=key, nonce=nonce) self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) shorter_output = bytearray(7) cipher = Salsa20.new(key=key, nonce=nonce) self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) cipher = Salsa20.new(key=key, nonce=nonce) self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) def get_tests(config={}): tests = make_stream_tests(Salsa20, "Salsa20", test_data) tests.append(KeyLength()) tests += list_test_cases(NonceTests) tests.append(ByteArrayTest()) tests.append(MemoryviewTest()) tests.append(TestOutput()) return tests if __name__ == '__main__': import unittest suite = lambda: unittest.TestSuite(get_tests()) unittest.main(defaultTest='suite') # vim:set ts=4 sw=4 sts=4 expandtab:
45.152174
113
0.768897
1,028
16,616
12.358949
0.339494
0.015742
0.018418
0.01889
0.175049
0.160882
0.152853
0.147501
0.126013
0.094687
0
0.399575
0.150518
16,616
367
114
45.275204
0.500531
0.093885
0
0.243446
0
0
0.584955
0.565032
0
1
0
0.002725
0.078652
1
0.026217
false
0
0.022472
0
0.071161
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
4
48e039205b6042c1203a6f6039adda6bce80c257
278
py
Python
milkviz/utils/__init__.py
Mr-Milk/milkviz
f2ebef805776301ddfe2ea1c778f9dbdd8edb420
[ "MIT" ]
null
null
null
milkviz/utils/__init__.py
Mr-Milk/milkviz
f2ebef805776301ddfe2ea1c778f9dbdd8edb420
[ "MIT" ]
4
2021-09-17T10:56:25.000Z
2022-03-19T09:17:02.000Z
milkviz/utils/__init__.py
Mr-Milk/milkviz
f2ebef805776301ddfe2ea1c778f9dbdd8edb420
[ "MIT" ]
null
null
null
from .doc import doc from .fig import adaptive_figsize, norm_arr, set_cbar, set_size_legend, set_category_legend, \ set_ticks, set_spines, color_mapper_cat, color_mapper_val, mask_triu, get_cmap_colors, create_cmap, get_render_size from .geo import rotate_points, normalize
55.6
119
0.830935
45
278
4.688889
0.666667
0.085308
0
0
0
0
0
0
0
0
0
0
0.107914
278
4
120
69.5
0.850806
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
48e9d3da231af6fee11d152800a73020aaa605cb
156
py
Python
page278-c/question-1.py
CodyAaTherf/Grade9-ComputerSolutions-Python
828648269a079e4b187ef9cdbd3c14262962d9d2
[ "MIT" ]
null
null
null
page278-c/question-1.py
CodyAaTherf/Grade9-ComputerSolutions-Python
828648269a079e4b187ef9cdbd3c14262962d9d2
[ "MIT" ]
null
null
null
page278-c/question-1.py
CodyAaTherf/Grade9-ComputerSolutions-Python
828648269a079e4b187ef9cdbd3c14262962d9d2
[ "MIT" ]
null
null
null
x = float(input("Enter first number: ")) y = float(input("Enter second number: ")) q = x + y r = x - y print(f"{x} + {y} = {q}") print(f"{x} - {y} = {r}")
19.5
41
0.50641
28
156
2.821429
0.428571
0.101266
0.379747
0.202532
0
0
0
0
0
0
0
0
0.211538
156
8
42
19.5
0.642276
0
0
0
0
0
0.452229
0
0
0
0
0
0
1
0
false
0
0
0
0
0.333333
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
48f5360c26913d9d95e28ea8fefe16b18bea0fb0
27
py
Python
muser/muser/__init__.py
briankim-pitt/muser
55be568ff58c40517f53f555da59f1acdad57ae3
[ "MIT" ]
1
2021-10-01T02:35:22.000Z
2021-10-01T02:35:22.000Z
muser/muser/__init__.py
briankim-pitt/muser
55be568ff58c40517f53f555da59f1acdad57ae3
[ "MIT" ]
null
null
null
muser/muser/__init__.py
briankim-pitt/muser
55be568ff58c40517f53f555da59f1acdad57ae3
[ "MIT" ]
null
null
null
""" Package for muser. """
6.75
18
0.555556
3
27
5
1
0
0
0
0
0
0
0
0
0
0
0
0.185185
27
3
19
9
0.681818
0.666667
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
48ff59ae8aaec2febd81a3a4cfa7a9cb57a188c0
327
py
Python
src/utils/io.py
yasamangs/Anonymous-Telegram-bot
dfcc9115403ed85dab6c4b651fb12824a3e11376
[ "MIT" ]
null
null
null
src/utils/io.py
yasamangs/Anonymous-Telegram-bot
dfcc9115403ed85dab6c4b651fb12824a3e11376
[ "MIT" ]
null
null
null
src/utils/io.py
yasamangs/Anonymous-Telegram-bot
dfcc9115403ed85dab6c4b651fb12824a3e11376
[ "MIT" ]
null
null
null
import json def read_json_file(file_path): with open(file_path, 'r') as f: json.load(f) def write_json_file(data, file_path, indent=4): with open(file_path, 'a') as f: json.dump(data, f, indent=indent) def write_file(data, file_path): with open(file_path, 'w') as f: f.write(data)
19.235294
47
0.633028
56
327
3.5
0.339286
0.244898
0.183673
0.244898
0.244898
0.244898
0
0
0
0
0
0.004
0.235474
327
16
48
20.4375
0.78
0
0
0
0
0
0.009174
0
0
0
0
0
0
1
0.3
false
0
0.1
0
0.4
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
5b197ba86dff41ae0a9405f0648bcf80eebce69e
320
py
Python
qaseio/src/qaseio/client/models/__init__.py
aleksandr-kotlyar/qase-python
3e6916eb4bf3518651e0a8e2e62281bfe0bfa464
[ "Apache-2.0" ]
null
null
null
qaseio/src/qaseio/client/models/__init__.py
aleksandr-kotlyar/qase-python
3e6916eb4bf3518651e0a8e2e62281bfe0bfa464
[ "Apache-2.0" ]
null
null
null
qaseio/src/qaseio/client/models/__init__.py
aleksandr-kotlyar/qase-python
3e6916eb4bf3518651e0a8e2e62281bfe0bfa464
[ "Apache-2.0" ]
null
null
null
# flake8: noqa from .attachments import * from .base import * from .cases import * from .custom_fields import * from .defects import * from .milestones import * from .plans import * from .projects import * from .results import * from .runs import * from .shared_steps import * from .suites import * from .users import *
21.333333
28
0.740625
43
320
5.465116
0.44186
0.510638
0
0
0
0
0
0
0
0
0
0.003774
0.171875
320
14
29
22.857143
0.883019
0.0375
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
5b1a14142cca0e0c2c2f0b27bf6b99e5490d946d
5,197
gyp
Python
third_party/css_parser/css_parser.gyp
PeterDaveHello/incubator-pagespeed-mod
885f4653e204e1152cb3928f0755d93ec5fdceae
[ "Apache-2.0" ]
2
2019-11-02T07:54:17.000Z
2020-04-16T09:26:51.000Z
third_party/css_parser/css_parser.gyp
PeterDaveHello/incubator-pagespeed-mod
885f4653e204e1152cb3928f0755d93ec5fdceae
[ "Apache-2.0" ]
12
2017-03-14T18:26:11.000Z
2021-10-01T15:33:50.000Z
third_party/css_parser/css_parser.gyp
PeterDaveHello/incubator-pagespeed-mod
885f4653e204e1152cb3928f0755d93ec5fdceae
[ "Apache-2.0" ]
1
2020-04-16T09:28:30.000Z
2020-04-16T09:28:30.000Z
# Copyright 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. { 'variables': { # chromium_code indicates that the code is not # third-party code and should be subjected to strict compiler # warnings/errors in order to catch programming mistakes. 'chromium_code': 1, 'css_parser_root': 'src', 'instaweb_root': '../..', }, 'targets': [ { 'variables': { 'chromium_code': 0, }, 'target_name': 'utf', 'type': '<(library)', 'dependencies': [ '<(DEPTH)/base/base.gyp:base', '<(DEPTH)/third_party/gflags/gflags.gyp:gflags', ], 'include_dirs': [ '<(css_parser_root)', '<(DEPTH)', ], 'cflags': ['-funsigned-char', '-Wno-sign-compare', '-Wno-return-type'], 'sources': [ '<(css_parser_root)/third_party/utf/rune.c', '<(css_parser_root)/third_party/utf/runestrcat.c', '<(css_parser_root)/third_party/utf/runestrchr.c', '<(css_parser_root)/third_party/utf/runestrcmp.c', '<(css_parser_root)/third_party/utf/runestrcpy.c', '<(css_parser_root)/third_party/utf/runestrecpy.c', '<(css_parser_root)/third_party/utf/runestrlen.c', '<(css_parser_root)/third_party/utf/runestrncat.c', '<(css_parser_root)/third_party/utf/runestrncmp.c', '<(css_parser_root)/third_party/utf/runestrncpy.c', '<(css_parser_root)/third_party/utf/runestrrchr.c', '<(css_parser_root)/third_party/utf/runestrstr.c', '<(css_parser_root)/third_party/utf/runetype.c', '<(css_parser_root)/third_party/utf/utf.h', '<(css_parser_root)/third_party/utf/utfdef.h', '<(css_parser_root)/third_party/utf/utfecpy.c', '<(css_parser_root)/third_party/utf/utflen.c', '<(css_parser_root)/third_party/utf/utfnlen.c', '<(css_parser_root)/third_party/utf/utfrrune.c', '<(css_parser_root)/third_party/utf/utfrune.c', '<(css_parser_root)/third_party/utf/utfutf.c', ], }, { 'target_name': 'css_parser_gperf', 'variables': { 'instaweb_gperf_subdir': 'third_party/css_parser/src/webutil/css', }, 'dependencies': [ '<(DEPTH)/base/base.gyp:base', '<(DEPTH)/pagespeed/kernel.gyp:proto_util', '<(DEPTH)/pagespeed/kernel.gyp:util', '<(DEPTH)/third_party/google-sparsehash/google-sparsehash.gyp:include', ], 'sources': [ '<(css_parser_root)/webutil/css/identifier.gperf', '<(css_parser_root)/webutil/css/property.gperf', ], 'include_dirs': [ '<(css_parser_root)', '<(DEPTH)', ], 'includes': [ '../../net/instaweb/gperf.gypi', ], }, { 'target_name': 'css_parser', 'type': '<(library)', 'dependencies': [ '<(DEPTH)/base/base.gyp:base', '<(DEPTH)/third_party/gflags/gflags.gyp:gflags', '<(DEPTH)/third_party/google-sparsehash/google-sparsehash.gyp:include', 'css_parser_gperf', 'utf', ], 'export_dependent_settings': [ '<(DEPTH)/third_party/google-sparsehash/google-sparsehash.gyp:include', ], 'include_dirs': [ '<(css_parser_root)', '<(DEPTH)', ], 'cflags': ['-funsigned-char', '-Wno-sign-compare', '-Wno-return-type'], 'sources': [ '<(css_parser_root)/string_using.h', '<(css_parser_root)/webutil/css/media.cc', '<(css_parser_root)/webutil/css/parser.cc', '<(css_parser_root)/webutil/css/selector.cc', '<(css_parser_root)/webutil/css/string_util.cc', '<(css_parser_root)/webutil/css/tostring.cc', '<(css_parser_root)/webutil/css/util.cc', '<(css_parser_root)/webutil/css/value.cc', #'<(css_parser_root)/webutil/css/parse_arg.cc', # Tests #'<(css_parser_root)/webutil/css/gtest_main.cc', #'<(css_parser_root)/webutil/css/identifier_test.cc', #'<(css_parser_root)/webutil/css/parser_unittest.cc', #'<(css_parser_root)/webutil/css/property_test.cc', #'<(css_parser_root)/webutil/css/tostring_test.cc', #'<(css_parser_root)/webutil/css/util_test.cc', '<(css_parser_root)/webutil/html/htmlcolor.cc', '<(css_parser_root)/webutil/html/htmltagenum.cc', '<(css_parser_root)/webutil/html/htmltagindex.cc', # UnicodeText '<(css_parser_root)/util/utf8/internal/unicodetext.cc', '<(css_parser_root)/util/utf8/internal/unilib.cc', # Supporting interfaces. '<(css_parser_root)/strings/ascii_ctype.cc', '<(css_parser_root)/strings/stringpiece_utils.cc', ], }, ], }
37.121429
79
0.620165
628
5,197
4.882166
0.278662
0.161448
0.207763
0.123288
0.573386
0.558382
0.431507
0.169276
0.169276
0.113503
0
0.002922
0.209736
5,197
139
80
37.388489
0.743608
0.210506
0
0.392523
0
0
0.672718
0.552012
0
0
0
0
0
1
0
true
0
0
0
0
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
5b1d1d0d618a7cef45f8eb019e394a33d379e8c4
687
py
Python
neurokit2/ecg/__init__.py
TiagoTostas/NeuroKit
664350463fc1c03eb81f0bba37296762be7c81ae
[ "MIT" ]
1
2020-05-26T09:46:57.000Z
2020-05-26T09:46:57.000Z
neurokit2/ecg/__init__.py
TiagoTostas/NeuroKit
664350463fc1c03eb81f0bba37296762be7c81ae
[ "MIT" ]
null
null
null
neurokit2/ecg/__init__.py
TiagoTostas/NeuroKit
664350463fc1c03eb81f0bba37296762be7c81ae
[ "MIT" ]
1
2020-10-27T06:47:51.000Z
2020-10-27T06:47:51.000Z
"""Submodule for NeuroKit.""" from .ecg_simulate import ecg_simulate from .ecg_clean import ecg_clean from .ecg_findpeaks import ecg_findpeaks from .ecg_fixpeaks import ecg_fixpeaks from .ecg_peaks import ecg_peaks from .ecg_rate import ecg_rate from .ecg_segment import ecg_segment from .ecg_process import ecg_process from .ecg_plot import ecg_plot from .ecg_delineate import ecg_delineate from .ecg_rsp import ecg_rsp from .ecg_hrv import ecg_hrv from .ecg_phase import ecg_phase from .ecg_rsa import ecg_rsa from .ecg_quality import ecg_quality from .ecg_eventrelated import ecg_eventrelated from .ecg_intervalrelated import ecg_intervalrelated from .ecg_analyze import ecg_analyze
32.714286
52
0.852984
111
687
4.954955
0.216216
0.229091
0
0
0
0
0
0
0
0
0
0
0.110626
687
20
53
34.35
0.900164
0.033479
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
d29ec8bb169267d50e52ac323768bca2dbf55690
3,658
py
Python
dev_nb/nb_002b.py
lesscomfortable/fastai_v1
bbc5c37329cf45f59bd2daaa2f56723cb7565643
[ "Apache-2.0" ]
1
2018-10-23T20:45:41.000Z
2018-10-23T20:45:41.000Z
dev_nb/nb_002b.py
lesscomfortable/fastai_v1
bbc5c37329cf45f59bd2daaa2f56723cb7565643
[ "Apache-2.0" ]
null
null
null
dev_nb/nb_002b.py
lesscomfortable/fastai_v1
bbc5c37329cf45f59bd2daaa2f56723cb7565643
[ "Apache-2.0" ]
null
null
null
################################################# ### THIS FILE WAS AUTOGENERATED! DO NOT EDIT! ### ################################################# from nb_002 import * import typing from typing import Dict, Any, AnyStr, List, Sequence, TypeVar, Tuple, Optional, Union def normalize(x, mean,std): return (x-mean[...,None,None]) / std[...,None,None] def denormalize(x, mean,std): return x*std[...,None,None] + mean[...,None,None] def normalize_batch(b, mean, std, do_y=False): x,y = b x = normalize(x,mean,std) if do_y: y = normalize(y,mean,std) return x,y def normalize_funcs(mean, std, do_y=False, device=None): if device is None: device=default_device return (partial(normalize_batch, mean=mean.to(device),std=std.to(device)), partial(denormalize, mean=mean, std=std)) @dataclass class DeviceDataLoader(): dl: DataLoader device: torch.device tfms: List[Callable]=None def __len__(self): return len(self.dl) def proc_batch(self,b): b = to_device(self.device,b) return b if self.tfms is None else self.tfms(b) def __iter__(self): self.gen = map(self.proc_batch, self.dl) return iter(self.gen) @classmethod def create(cls, *args, device=default_device, tfms=tfms, **kwargs): return cls(DataLoader(*args, **kwargs), device=device, tfms=tfms) class DataBunch(): def __init__(self, train_dl:DataLoader, valid_dl:DataLoader, device:torch.device=None, tfms=None): self.device = default_device if device is None else device self.train_dl = DeviceDataLoader(train_dl, self.device, tfms=tfms) self.valid_dl = DeviceDataLoader(valid_dl, self.device, tfms=tfms) @classmethod def create(cls, train_ds, valid_ds, bs=64, train_tfm=None, valid_tfm=None, num_workers=4, tfms=None, device=None, **kwargs): if train_tfm or not isinstance(train_ds, DatasetTfm): train_ds = DatasetTfm(train_ds,train_tfm, **kwargs) if valid_tfm or not isinstance(valid_ds, DatasetTfm): valid_ds = DatasetTfm(valid_ds,valid_tfm, **kwargs) return cls(DataLoader(train_ds, bs, shuffle=True, num_workers=num_workers), DataLoader(valid_ds, bs*2, shuffle=False, num_workers=num_workers), device=device, tfms=tfms) @property def train_ds(self): return self.train_dl.dl.dataset @property def valid_ds(self): return self.valid_dl.dl.dataset @property def c(self): return self.train_ds.c def conv_layer(ni, nf, ks=3, stride=1): return nn.Sequential( nn.Conv2d(ni, nf, kernel_size=ks, bias=False, stride=stride, padding=ks//2), nn.BatchNorm2d(nf), nn.LeakyReLU(negative_slope=0.1, inplace=True)) class ResLayer(nn.Module): def __init__(self, ni): super().__init__() self.conv1=conv_layer(ni, ni//2, ks=1) self.conv2=conv_layer(ni//2, ni, ks=3) def forward(self, x): return x + self.conv2(self.conv1(x)) class Darknet(nn.Module): def make_group_layer(self, ch_in, num_blocks, stride=1): return [conv_layer(ch_in, ch_in*2,stride=stride) ] + [(ResLayer(ch_in*2)) for i in range(num_blocks)] def __init__(self, num_blocks, num_classes, nf=32): super().__init__() layers = [conv_layer(3, nf, ks=3, stride=1)] for i,nb in enumerate(num_blocks): layers += self.make_group_layer(nf, nb, stride=2-(i==1)) nf *= 2 layers += [nn.AdaptiveAvgPool2d(1), Flatten(), nn.Linear(nf, num_classes)] self.layers = nn.Sequential(*layers) def forward(self, x): return self.layers(x)
38.914894
113
0.639967
527
3,658
4.263757
0.233397
0.021807
0.031153
0.018692
0.155763
0
0
0
0
0
0
0.012003
0.202843
3,658
94
114
38.914894
0.758573
0.011208
0
0.098592
1
0
0
0
0
0
0
0
0
1
0.267606
false
0
0.042254
0.15493
0.521127
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
d2ff39f2333f671356abe0fb9bfa6f02265efda9
18,507
py
Python
vseq/modules/hmlstm.py
JakobHavtorn/vseq
bdd0258738b5f43d6f0f6c3df4b8b270f06d0aea
[ "MIT" ]
7
2021-03-25T12:33:53.000Z
2022-03-23T13:10:31.000Z
vseq/modules/hmlstm.py
JakobHavtorn/vseq
bdd0258738b5f43d6f0f6c3df4b8b270f06d0aea
[ "MIT" ]
null
null
null
vseq/modules/hmlstm.py
JakobHavtorn/vseq
bdd0258738b5f43d6f0f6c3df4b8b270f06d0aea
[ "MIT" ]
null
null
null
from typing import List, Optional, Union, Tuple import math import torch import torch.nn as nn from torch import sigmoid, tanh from torch.nn import Parameter from torchtyping import TensorType from vseq.modules.straight_through import BernoulliSTE, BinaryThresholdSTE def hard_sigmoid(x, slope: float = 1): temp = torch.div(torch.add(torch.mul(x, slope), 1), 2.0) output = torch.clamp(temp, min=0, max=1) return output class HMLSTMCell(nn.Module): def __init__( self, hidden_size: int, below_size: int, above_size: Optional[int] = None, threshold_fn: str = "threshold", ): """Hierarchical Multilevel LSTM cell (HM-LSTM) as described in [1]. Parameters: W_10 is the state transition parameters from layer l-1 (bottom layer) to layer l U_11 is the state transition parameters from layer l (current layer) to layer l U_21 is the state transition parameters from layer l+1 (top layer) to layer l Internal representations are (D, B), i.e. batch last and dimension first. This saves a transpose. Args: hidden_size (int): Dimensionality of hidden layer (transition from `l` to `l`). above_size (int): Dimensionality of above layer (transition from `l+1` to `l`). Defaults to None. below_size (int): Dimensionality of below layer (transition frmo `l-1` to `l`). [1] Hierarchical Multiscale Recurrent Neural Networks. http://arxiv.org/abs/1609.01704 """ super().__init__() self.below_size = below_size self.hidden_size = hidden_size self.above_size = above_size self.threshold_fn = threshold_fn self.is_top_layer = above_size is None self.gates_size = 4 * self.hidden_size + 1 self.W_10 = Parameter(torch.FloatTensor(self.gates_size, self.below_size)) self.U_11 = Parameter(torch.FloatTensor(self.gates_size, self.hidden_size)) if not self.is_top_layer: self.U_21 = Parameter(torch.FloatTensor(self.gates_size, self.above_size)) self.bias = Parameter(torch.FloatTensor(self.gates_size)) if threshold_fn == "threshold": self.threshold_ste = BinaryThresholdSTE(threshold=0.5) elif threshold_fn == "bernoulli": self.threshold_ste = BernoulliSTE() elif threshold_fn == "soft": self.threshold_ste = None else: raise ValueError(f"Unknown threshold function `{threshold_fn}`") self.reset_parameters() def reset_parameters(self): stdv = 1.0 / math.sqrt(self.hidden_size) for par in self.parameters(): par.data.uniform_(-stdv, stdv) def forward( self, c: TensorType["D", "B"], h_below: TensorType["D", "B"], h: TensorType["D", "B"], h_above: TensorType["D", "B"], z: TensorType[1, "B"], z_below: TensorType[1, "B"], a: float = 1, ) -> Tuple[TensorType["D", "B"], TensorType["D", "B"], TensorType["D", 1]]: """Perform HM-LSTM forward pass. Update logic: if z == 1: (FLUSH) c_new = i * g h_new = o * F.tanh(c_new) elif z_below == 0: (COPY) c_new = c h_new = h else: (UPDATE) c_new = f * c + i * g h_new = o * F.tanh(c_new) Update logic alternative (but seemingly slower) implementation: c_new = torch.zeros_like(f) z = z.expand_as(f) flush = z == 1 update = torch.logical_and(z == 0, z_below == 1) copy = torch.logical_and(z == 0, z_below == 0) c_new[:, flush] = (i[:, flush] * g[:, flush]) c_new[:, update] = (f[:, update] * c[:, update] + i[:, update] * g[:, update]) c_new[:, copy] = c[:, copy] Args: c (torch.Tensor): Previous time step cell state for this layer h_below (torch.Tensor): Current time step hidden state from layer below h (torch.Tensor): Previous time step hidden state for this layer h_above (torch.Tensor): Previous time step hidden state for layer above (if any, otherwise ignored) z (torch.Tensor): Previous time step boundary detector from this layer z_below (torch.Tensor): Current time step boundary detector from layer below a (float, optional): Slope of hard sigmoid activation for boundary detector. Defaults to 1. Returns: tuple: (h, c, z) for this time step """ s_recurrent = torch.mm(self.U_11, h) if self.is_top_layer: s_topdown = torch.zeros_like(s_recurrent) else: s_topdown = z * torch.mm(self.U_21, h_above) s_bottomup = z_below * torch.mm(self.W_10, h_below) f_slice = s_recurrent + s_topdown + s_bottomup + self.bias.unsqueeze(1) forgetgate, ingate, outgate, cellgate = f_slice[:-1, :].chunk(chunks=4, dim=0) z_gate = f_slice[self.hidden_size * 4 : self.hidden_size * 4 + 1] f = sigmoid(forgetgate) i = sigmoid(ingate) o = sigmoid(outgate) g = tanh(cellgate) z_hat = hard_sigmoid(z_gate, slope=a) one = torch.ones_like(f) c_new = z * (i * g) + (one - z) * (one - z_below) * c + (one - z) * z_below * (f * c + i * g) h_new = ( z * o * tanh(c_new) + (one - z) * (one - z_below) * h + (one - z) * z_below * o * tanh(c_new) ) z_new = self.threshold_ste(z_hat) return h_new, c_new, z_new def extra_repr(self) -> str: return f"below_size={self.below_size}, hidden_size={self.hidden_size}, above_size={self.above_size}" class LayerNormHMLSTMCell(nn.Module): def __init__( self, hidden_size: int, below_size: int, above_size: Optional[int] = None, threshold_fn: str = "threshold", elementwise_affine: bool = True, ): """Hierarchical Multilevel LSTM cell (HM-LSTM) as described in [1]. Parameters: W_10 is the state transition parameters from layer l-1 (bottom layer) to layer l U_11 is the state transition parameters from layer l (current layer) to layer l U_21 is the state transition parameters from layer l+1 (top layer) to layer l Internal representations are (D, B), i.e. batch last and dimension first. This saves a transpose. Args: hidden_size (int): Dimensionality of hidden layer (transition from `l` to `l`). above_size (int): Dimensionality of above layer (transition from `l+1` to `l`). Defaults to None. below_size (int): Dimensionality of below layer (transition frmo `l-1` to `l`). [1] Hierarchical Multiscale Recurrent Neural Networks. http://arxiv.org/abs/1609.01704 """ super().__init__() self.below_size = below_size self.hidden_size = hidden_size self.above_size = above_size self.threshold_fn = threshold_fn self.is_top_layer = above_size is None self.gates_size = 4 * self.hidden_size + 1 self.W_10 = Parameter(torch.FloatTensor(self.gates_size, self.below_size)) self.U_11 = Parameter(torch.FloatTensor(self.gates_size, self.hidden_size)) if not self.is_top_layer: self.U_21 = Parameter(torch.FloatTensor(self.gates_size, self.above_size)) self.ln_10 = nn.LayerNorm(self.gates_size, elementwise_affine=elementwise_affine) self.ln_11 = nn.LayerNorm(self.gates_size, elementwise_affine=elementwise_affine) self.ln_21 = nn.LayerNorm(self.gates_size, elementwise_affine=elementwise_affine) if threshold_fn == "threshold": self.threshold_ste = BinaryThresholdSTE() elif threshold_fn == "bernoulli": self.threshold_ste = BernoulliSTE() elif threshold_fn == "soft": self.threshold_ste = None else: raise ValueError(f"Unknown threshold function `{threshold_fn}`") self.reset_parameters() def reset_parameters(self): stdv = 1.0 / math.sqrt(self.hidden_size) for par in self.parameters(): par.data.uniform_(-stdv, stdv) def forward( self, c: TensorType["D", "B"], h_below: TensorType["D", "B"], h: TensorType["D", "B"], h_above: TensorType["D", "B"], z: TensorType[1, "B"], z_below: TensorType[1, "B"], a: float = 1, ) -> Tuple[TensorType["D", "B"], TensorType["D", "B"], TensorType["D", 1]]: """Perform HM-LSTM forward pass. Update logic: if z == 1: (FLUSH) c_new = i * g h_new = o * F.tanh(c_new) elif z_below == 0: (COPY) c_new = c h_new = h else: (UPDATE) c_new = f * c + i * g h_new = o * F.tanh(c_new) Update logic alternative (but seemingly slower) implementation: c_new = torch.zeros_like(f) z = z.expand_as(f) flush = z == 1 update = torch.logical_and(z == 0, z_below == 1) copy = torch.logical_and(z == 0, z_below == 0) c_new[:, flush] = (i[:, flush] * g[:, flush]) c_new[:, update] = (f[:, update] * c[:, update] + i[:, update] * g[:, update]) c_new[:, copy] = c[:, copy] Args: c (torch.Tensor): Previous time step cell state for this layer h_below (torch.Tensor): Current time step hidden state from layer below h (torch.Tensor): Previous time step hidden state for this layer h_above (torch.Tensor): Previous time step hidden state for layer above (if any, otherwise ignored) z (torch.Tensor): Previous time step boundary detector from this layer z_below (torch.Tensor): Current time step boundary detector from layer below a (float, optional): Slope of hard sigmoid activation for boundary detector. Defaults to 1. Returns: tuple: (h, c, z) for this time step """ s_recurrent = self.ln_11(torch.mm(self.U_11, h).T).T if self.is_top_layer: s_topdown = torch.zeros_like(s_recurrent) else: s_topdown = z * self.ln_21(torch.mm(self.U_21, h_above).T).T s_bottomup = z_below * self.ln_10(torch.mm(self.W_10, h_below).T).T f_slice = s_recurrent + s_topdown + s_bottomup forgetgate, ingate, outgate, cellgate = f_slice[:-1, :].chunk(chunks=4, dim=0) z_gate = f_slice[self.hidden_size * 4 : self.hidden_size * 4 + 1] f = sigmoid(forgetgate) i = sigmoid(ingate) o = sigmoid(outgate) g = tanh(cellgate) z_hat = hard_sigmoid(z_gate, slope=a) one = torch.ones_like(f) c_new = z * (i * g) + (one - z) * (one - z_below) * c + (one - z) * z_below * (f * c + i * g) h_new = (z * o * tanh(c_new) + (one - z) * (one - z_below) * h + (one - z) * z_below * o * tanh(c_new)) z_new = self.threshold_ste(z_hat) return h_new, c_new, z_new def extra_repr(self) -> str: return f"below_size={self.below_size}, hidden_size={self.hidden_size}, above_size={self.above_size}" class HMLSTM(nn.Module): def __init__( self, input_size: int, sizes: Union[int, List[int]], num_layers: Optional[int] = None, layer_norm: bool = False ): super().__init__() assert ( (isinstance(sizes, list) and num_layers is None) or (isinstance(sizes, int) and num_layers is not None), "Must give `sizes` as list and not `num_layers` OR `sizes` as int along with a number of layers", ) self.input_size = input_size self.sizes = sizes self.num_layers = len(sizes) if num_layers is None else num_layers self.layer_norm = layer_norm cell = LayerNormHMLSTMCell if layer_norm else HMLSTMCell sizes = [input_size, *sizes, None] cells = torch.nn.ModuleList() for l in range(self.num_layers): cells.append(cell(below_size=sizes[l], hidden_size=sizes[l + 1], above_size=sizes[l + 2])) self.cells = cells def forward( self, x: TensorType["B", "T", "D"], h_init: Optional[List[TensorType["B", "T", "D"]]] = None, c_init: Optional[List[TensorType["B", "T", "D"]]] = None, z_init: Optional[List[TensorType["B", "T", 1]]] = None, a: float = 1, ): # x.size = (B, T, D) time_steps = x.size(1) batch_size = x.size(0) device = x.device if h_init is None: h = [[torch.zeros(self.sizes[l], batch_size, device=device)] for l in range(self.num_layers)] else: h = [[h.permute(2, 1, 0)] for h in h_init] # (B, T, D) to (D, T, B) if c_init is None: c = [[torch.zeros(self.sizes[l], batch_size, device=device)] for l in range(self.num_layers)] else: c = [[c.permute(2, 1, 0)] for c in c_init] # (B, T, D) to (D, T, B) if z_init is None: z = [[torch.zeros(1, batch_size, device=device)] for l in range(self.num_layers)] else: z = [[z.permute(2, 1, 0)] for z in z_init] # (B, T, D) to (D, T, B) # create a fictive top layer that gives `h=None` for all time steps. # used as `h_above` input for the actual top layer. # h.append([torch.zeros(1, batch_size, device=device)] * time_steps) h.append([None] * time_steps) # z_below for layer 0 z_one = torch.ones(1, batch_size, device=device) x = x.permute(2, 1, 0) # (B, T, D) to (D, T, B) for t in range(time_steps): # input layer l = 0 h_tl, c_tl, z_tl = self.cells[l]( c=c[l][t], h=h[l][t], h_below=x[:, t, :], h_above=h[l + 1][t], z=z[l][t], z_below=z_one, # Never skip input by copying (forces UPDATE or FLUSH) a=a, ) h[l].append(h_tl) c[l].append(c_tl) z[l].append(z_tl) # additional layers for l in range(1, self.num_layers): h_tl, c_tl, z_tl = self.cells[l]( c=c[l][t], h=h[l][t], h_below=h[l - 1][t + 1], h_above=h[l + 1][t], z=z[l][t], z_below=z[l - 1][t + 1], a=a, ) h[l].append(h_tl) c[l].append(c_tl) z[l].append(z_tl) h = [hl[1:] for hl in h[:-1]] # Remove initial value and fictive layer c = [cl[1:] for cl in c] # Remove initial value z = [zl[1:] for zl in z] # Remove initial value # collect final timestep per layer h_out = [h[l][-1] for l in range(self.num_layers)] c_out = [c[l][-1] for l in range(self.num_layers)] z_out = [z[l][-1] for l in range(self.num_layers)] return ( [torch.stack(hl, dim=1).permute(2, 1, 0) for hl in h], # (B, T, D) [torch.stack(cl, dim=1).permute(2, 1, 0) for cl in c], # (B, T, D) [torch.stack(zl, dim=1).permute(2, 1, 0) for zl in z], # (B, T, 1) (h_out, c_out, z_out), # (B, D), (B, D), (B, 1) ) def realized_operations( self, z: List[TensorType["B", "T", 1]], x_sl: TensorType["B", int], seq_mask: TensorType["B", "T", bool] ): """Return the boolean masks incidating where the different operations took place and compute the clockrates""" update_ops, copy_ops, flush_ops = [], [], [] update_rates, copy_rates, flush_rates = [], [], [] x_sl = x_sl.to(z[0].device) for l, z_l in enumerate(z): z_below = torch.ones_like(z[0]) if l == 0 else z[l - 1] update_ops.append(((z_l[:, :-1, :] == 0) * (z_below[:, 1:, :] == 1)).squeeze()) copy_ops.append(((z_l[:, :-1, :] == 0) * (z_below[:, 1:, :] == 0)).squeeze()) flush_ops.append((z_l[:, :-1, :] == 1).squeeze()) update_rates.append((update_ops[l] * seq_mask[:, 1:]).sum(1) / x_sl) copy_rates.append((copy_ops[l] * seq_mask[:, 1:]).sum(1) / x_sl) flush_rates.append((flush_ops[l] * seq_mask[:, 1:]).sum(1) / x_sl) return update_ops, copy_ops, flush_ops, update_rates, copy_rates, flush_rates if __name__ == "__main__": import timeit import numpy as np device = "cuda" if torch.cuda.is_available() else "cpu" batch = 32 hidden_size = 256 below_size = 128 above_size = 512 cell = HMLSTMCell( hidden_size=hidden_size, below_size=below_size, above_size=above_size, threshold_fn="threshold", ).to(device) lncell = HMLSTMCell( hidden_size=hidden_size, below_size=below_size, above_size=above_size, threshold_fn="threshold", ).to(device) lstm = nn.LSTMCell( input_size=hidden_size, hidden_size=hidden_size, ).to(device) c = torch.randn(hidden_size, batch).to(device) h = torch.randn(hidden_size, batch).to(device) z = torch.randint(low=0, high=2, size=(1, batch)).to(device) h_above = torch.randn(above_size, batch).to(device) h_below = torch.randn(below_size, batch).to(device) z_below = torch.randint(low=0, high=2, size=(1, batch)).to(device) cell(c, h_below, h, h_above, z, z_below) timer = timeit.Timer("lstm(h, (h, c))", globals=dict(lstm=lstm, h=h.T, c=c.T)) number, time_taken = timer.autorange() timings = timer.repeat(repeat=10, number=number) print(f"LSTM: {number=:d}, {min(timings):.3e} +- {np.std(timings):.3e} s") timer = timeit.Timer("cell(c, h_below, h, h_above, z, z_below)", globals=globals()) number, time_taken = timer.autorange() timings = timer.repeat(repeat=10, number=number) print(f"HM-LSTM: {number=:d}, {min(timings):.3e} +- {np.std(timings):.3e} s") timer = timeit.Timer("lncell(c, h_below, h, h_above, z, z_below)", globals=globals()) number, time_taken = timer.autorange() timings = timer.repeat(repeat=10, number=number) print(f"HM-LSTM LayerNorm: {number=:d}, {min(timings):.3e} +- {np.std(timings):.3e} s")
39.044304
119
0.573026
2,651
18,507
3.834025
0.108638
0.032468
0.022039
0.018103
0.77804
0.757576
0.748131
0.72137
0.696183
0.670504
0
0.017793
0.295456
18,507
473
120
39.12685
0.761715
0.261739
0
0.554007
0
0.013937
0.062201
0.018444
0
0
0
0
0.003484
1
0.041812
false
0
0.034843
0.006969
0.111498
0.010453
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
826aafa7cf07e63c0a0fd1c82f2280ac3515e8bc
100
py
Python
flask_app/common/__init__.py
Jarrettluo/flask-restful-quick-start
50b923baa80e769f07a89fba71745d9333cf9e88
[ "MIT" ]
6
2021-11-15T04:39:41.000Z
2022-03-03T09:59:24.000Z
flask_app/common/__init__.py
Jarrettluo/flask-restful-quick-start
50b923baa80e769f07a89fba71745d9333cf9e88
[ "MIT" ]
null
null
null
flask_app/common/__init__.py
Jarrettluo/flask-restful-quick-start
50b923baa80e769f07a89fba71745d9333cf9e88
[ "MIT" ]
null
null
null
# encoding: utf-8 """ @version: 1.0 @author: Jarrett @file: __init__.py @time: 2021/11/10 15:50 """
12.5
23
0.64
17
100
3.529412
1
0
0
0
0
0
0
0
0
0
0
0.174419
0.14
100
7
24
14.285714
0.523256
0.9
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
8273790a5ce59a9fcfc206bec5e557c7014c8ce0
3,940
py
Python
tests/charts-out/test_graphics_charts_piecharts_sample2.py
debragail/reportlab-mirror
1e5814e1313ed50d5abb65487b207711cb4f7595
[ "BSD-3-Clause" ]
1
2020-05-21T23:34:55.000Z
2020-05-21T23:34:55.000Z
tests/charts-out/test_graphics_charts_piecharts_sample2.py
debragail/reportlab-mirror
1e5814e1313ed50d5abb65487b207711cb4f7595
[ "BSD-3-Clause" ]
null
null
null
tests/charts-out/test_graphics_charts_piecharts_sample2.py
debragail/reportlab-mirror
1e5814e1313ed50d5abb65487b207711cb4f7595
[ "BSD-3-Clause" ]
null
null
null
#Autogenerated by ReportLab guiedit do not edit from reportlab.graphics.shapes import _DrawingEditorMixin, Drawing, Group, Wedge, String from reportlab.lib.colors import Color, CMYKColor, PCMYKColor class ExplodedDrawing_Drawing(_DrawingEditorMixin,Drawing): def __init__(self,width=400,height=200,*args,**kw): Drawing.__init__(self,width,height,*args,**kw) self.transform = (1,0,0,1,0,0) self.add(Wedge(200,100,75,-21.6,90,yradius=75,annular=False,fillColor=Color(.27451,.509804,.705882,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=1,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None)) self.add(Wedge(200,100,75,-74.88,-21.6,yradius=75,annular=False,fillColor=Color(.847059,.74902,.847059,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=1,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None)) self.add(Wedge(200,100,75,-113.76,-74.88,yradius=75,annular=False,fillColor=Color(.392157,.584314,.929412,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=1,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None)) self.add(Wedge(200,100,75,-141.12,-113.76,yradius=75,annular=False,fillColor=Color(.690196,.768627,.870588,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=1,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None)) self.add(Wedge(200,100,75,-153,-141.12,yradius=75,annular=False,fillColor=Color(.498039,1,.831373,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=1,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None)) self.add(Wedge(200,100,75,-163.8,-153,yradius=75,annular=False,fillColor=Color(.372549,.619608,.627451,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=1,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None)) self.add(Wedge(200,100,75,-170.64,-163.8,yradius=75,annular=False,fillColor=Color(.941176,.501961,.501961,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=1,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None)) self.add(Wedge(200,100,75,-216,-170.64,yradius=75,annular=False,fillColor=Color(.823529,.705882,.54902,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=1,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None)) self.add(Wedge(200,100,75,-270,-216,yradius=75,annular=False,fillColor=Color(.560784,.737255,.545098,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=1,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None)) self.add(String(274.4373,150.5875,'1',textAnchor='middle',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1))) self.add(String(259.9411,32.8653,'2',textAnchor='middle',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1))) self.add(String(193.2206,10.2557,'3',textAnchor='middle',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1))) self.add(String(145.2863,28.54086,'4',textAnchor='middle',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1))) self.add(String(124.4684,51.06156,'5',textAnchor='middle',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1))) self.add(String(116.3201,66.86879,'6',textAnchor='middle',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1))) self.add(String(112.2296,80.09127,'7',textAnchor='middle',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1))) self.add(String(112.4211,120.735,'8',textAnchor='middle',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1))) self.add(String(159.1409,180.1906,'X',textAnchor='middle',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1))) if __name__=="__main__": #NORUNTESTS ExplodedDrawing_Drawing().save(formats=['pdf'],outDir='.',fnRoot=None)
127.096774
263
0.784264
609
3,940
5.041051
0.239737
0.024756
0.018567
0.046906
0.762215
0.762215
0.653094
0.653094
0.653094
0.653094
0
0.153746
0.024365
3,940
30
264
131.333333
0.644901
0.014213
0
0
1
0
0.044822
0
0
0
0
0
0
1
0.038462
false
0
0.076923
0
0.153846
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
1
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
8277c5f90252eb63190632587c07ff681043bb78
4,151
py
Python
python/src/drop_one_align_feature.py
antlr/groom
909c04b386c6d384344cd0d060dd1e3b4bde77a2
[ "BSD-2-Clause" ]
408
2016-04-21T09:40:08.000Z
2022-03-22T02:05:29.000Z
python/src/drop_one_align_feature.py
antlr/groom
909c04b386c6d384344cd0d060dd1e3b4bde77a2
[ "BSD-2-Clause" ]
25
2016-01-24T17:28:49.000Z
2021-05-05T19:17:55.000Z
python/src/drop_one_align_feature.py
antlr/groom
909c04b386c6d384344cd0d060dd1e3b4bde77a2
[ "BSD-2-Clause" ]
78
2016-02-14T07:22:21.000Z
2022-02-10T08:23:12.000Z
# # AUTO-GENERATED FILE. DO NOT EDIT # CodeBuff 1.4.13 'Fri May 13 12:46:16 PDT 2016' # import matplotlib.pyplot as plt fig = plt.figure() ax = plt.subplot(111) N = 19 featureIndexes = range(0,N) java = [0.0, 0.0, 0.0, 0.042016808, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] ax.plot(featureIndexes, java, label="java") sqlite = [0.25, 0.25, 0.2777778, 0.31034482, 0.25283018, 0.25925925, 0.25, 0.25, 0.25, 0.25, 0.26792452, 0.25, 0.26923078, 0.25283018, 0.2413793, 0.25, 0.25283018, 0.25, 0.25660378] ax.plot(featureIndexes, sqlite, label="sqlite") java8 = [0.0, 0.0, 0.0, 0.014814815, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] ax.plot(featureIndexes, java8, label="java8") quorum = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.10526316, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] ax.plot(featureIndexes, quorum, label="quorum") antlr = [0.016200295, 0.016200295, 0.016200295, 0.041666668, 0.016200295, 0.02503682, 0.040257648, 0.016200295, 0.016200295, 0.016200295, 0.025, 0.016200295, 0.016200295, 0.016200295, 0.016200295, 0.016200295, 0.016200295, 0.016200295, 0.016200295] ax.plot(featureIndexes, antlr, label="antlr") tsql = [0.1875, 0.17460318, 0.22222222, 0.23569024, 0.1875, 0.18855219, 0.1875, 0.19865319, 0.1875, 0.1875, 0.1923077, 0.1875, 0.17391305, 0.1875, 0.17460318, 0.1875, 0.1875, 0.1875, 0.18855219] ax.plot(featureIndexes, tsql, label="tsql") labels = ['curated', 'LT(-1) right ancestor', ' LT(1)', 'Strt line', 'Big list', 'List elem.', 'token child index', 'LT(1) left ancestor', 'ancestor child index', ' parent', 'parent child index', ' parent^2', 'parent^2 child index', ' parent^3', 'parent^3 child index', ' parent^4', 'parent^4 child index', ' parent^5', 'parent^5 child index', 'LT(-1) right ancestor', 'LT(1)', 'Strt line', 'Big list', 'List elem.', 'token child index', 'LT(1) left ancestor', 'ancestor child index', 'parent', 'parent child index', 'parent^2', 'parent^2 child index', 'parent^3', 'parent^3 child index', 'parent^4', 'parent^4 child index', 'parent^5', 'parent^5 child index', 'LT(-1) right ancestor', 'LT(1)', 'Strt line', 'Big list', 'List elem.', 'token child index', 'LT(1) left ancestor', 'ancestor child index', 'parent', 'parent child index', 'parent^2', 'parent^2 child index', 'parent^3', 'parent^3 child index', 'parent^4', 'parent^4 child index', 'parent^5', 'parent^5 child index', 'LT(-1) right ancestor', 'LT(1)', 'Strt line', 'Big list', 'List elem.', 'token child index', 'LT(1) left ancestor', 'ancestor child index', 'parent', 'parent child index', 'parent^2', 'parent^2 child index', 'parent^3', 'parent^3 child index', 'parent^4', 'parent^4 child index', 'parent^5', 'parent^5 child index', 'LT(-1) right ancestor', 'LT(1)', 'Strt line', 'Big list', 'List elem.', 'token child index', 'LT(1) left ancestor', 'ancestor child index', 'parent', 'parent child index', 'parent^2', 'parent^2 child index', 'parent^3', 'parent^3 child index', 'parent^4', 'parent^4 child index', 'parent^5', 'parent^5 child index', 'LT(-1) right ancestor', 'LT(1)', 'Strt line', 'Big list', 'List elem.', 'token child index', 'LT(1) left ancestor', 'ancestor child index', 'parent', 'parent child index', 'parent^2', 'parent^2 child index', 'parent^3', 'parent^3 child index', 'parent^4', 'parent^4 child index', 'parent^5', 'parent^5 child index', 'LT(-1) right ancestor', 'LT(1)', 'Strt line', 'Big list', 'List elem.', 'token child index', 'LT(1) left ancestor', 'ancestor child index', 'parent', 'parent child index', 'parent^2', 'parent^2 child index', 'parent^3', 'parent^3 child index', 'parent^4', 'parent^4 child index', 'parent^5', 'parent^5 child index'] ax.set_xticklabels(labels, rotation=60, fontsize=8) plt.xticks(featureIndexes, labels, rotation=60) ax.yaxis.grid(True, linestyle='-', which='major', color='lightgrey', alpha=0.5) ax.set_xlabel("Alignment Feature") ax.set_ylabel("Median Error rate") ax.set_title("Effect of Dropping One Feature on Alignment Decision\nMedian Leave-one-out Validation Error Rate") plt.legend() plt.tight_layout() fig.savefig("images/drop_one_align_feature.pdf", format='pdf') plt.show()
115.305556
2,245
0.668755
730
4,151
3.791781
0.173973
0.075867
0.107298
0.134393
0.666546
0.643786
0.643786
0.611633
0.611633
0.611633
0
0.184354
0.119248
4,151
35
2,246
118.6
0.572757
0.019032
0
0
1
0
0.476764
0.008114
0
0
0
0
0
1
0
false
0
0.035714
0
0.035714
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
1
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
827f584a8724a0574ee052fa46cdf97997fceb51
231
py
Python
intro/part03-31_print_many_times/src/print_many_times.py
Hannah-Abi/python-pro-21
2ce32c4bf118054329d19afdf83c50561be1ada8
[ "MIT" ]
null
null
null
intro/part03-31_print_many_times/src/print_many_times.py
Hannah-Abi/python-pro-21
2ce32c4bf118054329d19afdf83c50561be1ada8
[ "MIT" ]
null
null
null
intro/part03-31_print_many_times/src/print_many_times.py
Hannah-Abi/python-pro-21
2ce32c4bf118054329d19afdf83c50561be1ada8
[ "MIT" ]
null
null
null
# Write your solution here def print_many_times(text, times): print((text + "\n") * int(times) ) # You can test your function by calling it within the following block if __name__ == "__main__": print_many_times("python", 5)
38.5
69
0.709957
35
231
4.342857
0.771429
0.118421
0.184211
0
0
0
0
0
0
0
0
0.005236
0.17316
231
6
70
38.5
0.790576
0.398268
0
0
0
0
0.116788
0
0
0
0
0
0
1
0.25
false
0
0
0
0.25
0.75
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
1
0
4
82c6a6219f1368afbca59065c9a922bdbd956dea
205
py
Python
text/_cascade/block_format_context.py
jedhsu/text
8525b602d304ac571a629104c48703443244545c
[ "Apache-2.0" ]
null
null
null
text/_cascade/block_format_context.py
jedhsu/text
8525b602d304ac571a629104c48703443244545c
[ "Apache-2.0" ]
null
null
null
text/_cascade/block_format_context.py
jedhsu/text
8525b602d304ac571a629104c48703443244545c
[ "Apache-2.0" ]
null
null
null
# [TODO] place more preicsely from dataclasses import dataclass @dataclass class Overflow: anchor: type block: type clip_margin: type inline: type wrap: type x: type y: type
13.666667
33
0.663415
26
205
5.192308
0.730769
0
0
0
0
0
0
0
0
0
0
0
0.278049
205
14
34
14.642857
0.912162
0.131707
0
0
0
0
0
0
0
0
0
0.071429
0
1
0
true
0
0.1
0
0.9
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
1
0
0
0
1
0
0
4
82d49e9b4bbcb41faa8653654b49687ef26ae5f2
113
py
Python
src/config.py
jowtro/jx-flask-temp1
4d3d998469658c884ba706316381ac59628cfc5c
[ "MIT" ]
null
null
null
src/config.py
jowtro/jx-flask-temp1
4d3d998469658c884ba706316381ac59628cfc5c
[ "MIT" ]
null
null
null
src/config.py
jowtro/jx-flask-temp1
4d3d998469658c884ba706316381ac59628cfc5c
[ "MIT" ]
null
null
null
import os from dotenv import load_dotenv # load env vars from .env load_dotenv() TEST = os.getenv("test_var")
12.555556
30
0.743363
19
113
4.263158
0.526316
0.246914
0
0
0
0
0
0
0
0
0
0
0.168142
113
8
31
14.125
0.861702
0.20354
0
0
0
0
0.090909
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
7d5eb8d8bd72a42336471a51607777e70d6b8a0c
135
py
Python
django_misaka/apps.py
chiehtu/django-misaka
943404ae68b5c70af1915b33693dbb7d891a3906
[ "MIT" ]
3
2015-03-09T22:37:18.000Z
2017-10-18T19:27:35.000Z
django_misaka/apps.py
chiehtu/django-misaka
943404ae68b5c70af1915b33693dbb7d891a3906
[ "MIT" ]
8
2017-02-10T01:35:31.000Z
2022-01-07T10:06:17.000Z
django_misaka/apps.py
chiehtu/django-misaka
943404ae68b5c70af1915b33693dbb7d891a3906
[ "MIT" ]
2
2017-01-26T23:02:55.000Z
2019-09-25T13:36:28.000Z
from django.apps import AppConfig class DjangoMisakaConfig(AppConfig): name = 'django_misaka' verbose_name = 'Django Misaka'
19.285714
36
0.755556
15
135
6.666667
0.666667
0.2
0.32
0
0
0
0
0
0
0
0
0
0.17037
135
6
37
22.5
0.892857
0
0
0
0
0
0.192593
0
0
0
0
0
0
1
0
false
0
0.25
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
7d67765f1fa1c229aef2a0cdf1a0c0f69960f80f
132
py
Python
djangosige/apps/cadastro/apps.py
limeiragabriel/sige
c7c1728aee1ed134cab1841db945223234e9a40e
[ "MIT" ]
330
2017-07-03T08:41:24.000Z
2022-03-31T04:34:17.000Z
djangosige/apps/cadastro/apps.py
limeiragabriel/sige
c7c1728aee1ed134cab1841db945223234e9a40e
[ "MIT" ]
107
2017-07-03T22:21:35.000Z
2022-03-30T08:10:24.000Z
djangosige/apps/cadastro/apps.py
limeiragabriel/sige
c7c1728aee1ed134cab1841db945223234e9a40e
[ "MIT" ]
258
2017-06-27T20:11:46.000Z
2022-03-20T21:46:34.000Z
from __future__ import unicode_literals from django.apps import AppConfig class CadastroConfig(AppConfig): name = 'cadastro'
16.5
39
0.795455
15
132
6.666667
0.8
0
0
0
0
0
0
0
0
0
0
0
0.151515
132
7
40
18.857143
0.892857
0
0
0
0
0
0.060606
0
0
0
0
0
0
1
0
false
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
7d67943b13b730875ff3da0d4948c019b04394f1
19,520
py
Python
geosoft/gxapi/GXFFT.py
fearaschiarrai/gxpy
4c5e7594b24e530a8cd94df1eef562c5c6ce3e92
[ "BSD-2-Clause" ]
25
2017-07-14T06:39:37.000Z
2022-03-09T21:39:51.000Z
geosoft/gxapi/GXFFT.py
fearaschiarrai/gxpy
4c5e7594b24e530a8cd94df1eef562c5c6ce3e92
[ "BSD-2-Clause" ]
100
2016-12-13T17:30:41.000Z
2021-08-01T20:21:13.000Z
geosoft/gxapi/GXFFT.py
fearaschiarrai/gxpy
4c5e7594b24e530a8cd94df1eef562c5c6ce3e92
[ "BSD-2-Clause" ]
28
2016-12-12T17:34:40.000Z
2022-03-16T15:39:39.000Z
### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXFFT(gxapi_cy.WrapFFT): """ GXFFT class. This class allows for the application of predefined filters to data in an OASIS database. The system uses the Winograd algorithm to transform data in the spatial domain to the wavenumber or Fourier domain. """ def __init__(self, handle=0): super(GXFFT, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXFFT <geosoft.gxapi.GXFFT>` :returns: A null `GXFFT <geosoft.gxapi.GXFFT>` :rtype: GXFFT """ return GXFFT() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def add_white_noise(self, amp, option): """ Add white noise to the power spectrum of an FFT object. :param amp: The value added to the real part of all non-DC components of the current power spectrum :param option: :ref:`FFT_WHITE_NOISE` :type amp: float :type option: int .. versionadded:: 9.9 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._add_white_noise(amp, option) def app_dens(self, thick, dens): """ Appparent density filter :param thick: Thickness (meters) of the earth model :param dens: Background density (g/cm3) (default = 0) :type thick: float :type dens: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._app_dens(thick, dens) def app_susc(self, strength): """ Apparent susceptiblity filter :param strength: Total magnetic field strength :type strength: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Reduction to magnetic pole (`red_pol <geosoft.gxapi.GXFFT.red_pol>`) and downward continuation (`contin <geosoft.gxapi.GXFFT.contin>`) should be called BEFORE using `app_susc <geosoft.gxapi.GXFFT.app_susc>`. """ self._app_susc(strength) def band_pass(self, llen, hlen, pass_defined): """ Bandpass filter (using low and high wavelength cutoffs) :param llen: Low Cutoff wavelength (meters) :param hlen: High Cutoff wavelength (meter) :param pass_defined: 1= Pass the defined band (default); 0= Reject the band :type llen: float :type hlen: float :type pass_defined: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._band_pass(llen, hlen, pass_defined) def b_worth(self, clen, degree, filter_type): """ Butterworth filter :param clen: Central cutoff wavelength (meter) :param degree: Degree of the filter function (default = 8.0) :param filter_type: Filter type: 1= Low-pass (regional) filter (default) 0= High-pass (residual) filter :type clen: float :type degree: float :type filter_type: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._b_worth(clen, degree, filter_type) def rc_filter(self, clen, filter_type): """ RC filter :param clen: Central cutoff wavelength (meter) :param filter_type: Filter type: 1= Low-pass (regional) filter (default) 0= High-pass (residual) filter :type clen: float :type filter_type: int .. versionadded:: 8.5 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._rc_filter(clen, filter_type) def contin(self, dist): """ Upward/Downward continuation filter :param dist: Distance to continue; positive = downwards negative = upwards :type dist: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._contin(dist) def cos_roll(self, llen, hlen, degree, type): """ Cosine roll-off filter :param llen: Low wavelength start point (meters) :param hlen: High wavelength end point (meters) :param degree: Degree of the filter function (default = 2.0) :param type: Filter type: 1= Low-pass (regional) filter (default) 0= High-pass (residual) filter :type llen: float :type hlen: float :type degree: float :type type: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._cos_roll(llen, hlen, degree, type) @classmethod def create(cls, gvv, interv, trend): """ Create a New `GXFFT <geosoft.gxapi.GXFFT>` with detrend options. :param gvv: `GXVV <geosoft.gxapi.GXVV>` to transform. :param interv: Element space interval :param trend: :ref:`FFT_DETREND` :type gvv: GXVV :type interv: float :type trend: int :returns: `GXFFT <geosoft.gxapi.GXFFT>` Object :rtype: GXFFT .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The detrending options control the removal of a trend from the data before the `GXFFT <geosoft.gxapi.GXFFT>` is applied. The default data expansion is 10% before `GXFFT <geosoft.gxapi.GXFFT>`. """ ret_val = gxapi_cy.WrapFFT._create(GXContext._get_tls_geo(), gvv, interv, trend) return GXFFT(ret_val) @classmethod def create_ex(cls, gvv, interv, trend, expansion): """ Create a New `GXFFT <geosoft.gxapi.GXFFT>` with detrend and expansion options. :param gvv: `GXVV <geosoft.gxapi.GXVV>` to transform. :param interv: Element space interval :param trend: :ref:`FFT_DETREND` :param expansion: Minimum expansion % :type gvv: GXVV :type interv: float :type trend: int :type expansion: float :returns: `GXFFT <geosoft.gxapi.GXFFT>` Object :rtype: GXFFT .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The detrending options control the removal of a trend from the data before the `GXFFT <geosoft.gxapi.GXFFT>` is applied. The expansion options control the minimum data expansion before the `GXFFT <geosoft.gxapi.GXFFT>` is applied. """ ret_val = gxapi_cy.WrapFFT._create_ex(GXContext._get_tls_geo(), gvv, interv, trend, expansion) return GXFFT(ret_val) @classmethod def create_ref(cls, gvv, interv, trend): """ Create `GXFFT <geosoft.gxapi.GXFFT>` object with detrend options from reference (original) channel, but no `GXFFT <geosoft.gxapi.GXFFT>` process. :param gvv: `GXVV <geosoft.gxapi.GXVV>` contains channel data to perform `GXFFT <geosoft.gxapi.GXFFT>` operations upon. :param interv: Element space interval, should be the same as in `create_ex <geosoft.gxapi.GXFFT.create_ex>` call :param trend: :ref:`FFT_DETREND` :type gvv: GXVV :type interv: float :type trend: int :returns: `GXFFT <geosoft.gxapi.GXFFT>` Object :rtype: GXFFT .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** This just creates an object. It is intended to be called immediately after with `set_vv <geosoft.gxapi.GXFFT.set_vv>`. """ ret_val = gxapi_cy.WrapFFT._create_ref(GXContext._get_tls_geo(), gvv, interv, trend) return GXFFT(ret_val) @classmethod def create_ref_ex(cls, gvv, interv, trend, expansion, d_cmult): """ Create `GXFFT <geosoft.gxapi.GXFFT>` object with detrend and expansion options from reference (original) channel, but no `GXFFT <geosoft.gxapi.GXFFT>` process. :param gvv: `GXVV <geosoft.gxapi.GXVV>` contains channel data to perform `GXFFT <geosoft.gxapi.GXFFT>` operations upon. :param interv: Element space interval, should be the same as in `create_ex <geosoft.gxapi.GXFFT.create_ex>` call :param trend: :ref:`FFT_DETREND` :param expansion: Minimum expansion %, should be the same as in `create_ex <geosoft.gxapi.GXFFT.create_ex>` call :param d_cmult: DC level multiple :type gvv: GXVV :type interv: float :type trend: int :type expansion: float :type d_cmult: float :returns: `GXFFT <geosoft.gxapi.GXFFT>` Object :rtype: GXFFT .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** This just creates an object. It is intended to be called immediately after with `set_vv <geosoft.gxapi.GXFFT.set_vv>`. """ ret_val = gxapi_cy.WrapFFT._create_ref_ex(GXContext._get_tls_geo(), gvv, interv, trend, expansion, d_cmult) return GXFFT(ret_val) def gaus(self, dev, type): """ Gaussian filter :param dev: Standard deviation cutoff of function (meters) :param type: Filter type: 1= Low-pass (residual) filter (default) 0= High-pass (regional) filter :type dev: float :type type: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._gaus(dev, type) def get_vv(self, gv_vr, gv_vi): """ Copies real and imaginary `GXVV <geosoft.gxapi.GXVV>`'s to user `GXVV <geosoft.gxapi.GXVV>`'s. :param gv_vr: Real component :param gv_vi: Imaginary component :type gv_vr: GXVV :type gv_vi: GXVV .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._get_vv(gv_vr, gv_vi) def h_drv(self, order): """ Horizontal derivative :param order: Order of differentiation (default = 1) :type order: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._h_drv(order) def high_pass(self, wlen, fid_int): """ High bandpass filter :param wlen: Cutoff wavelength (meter) :param fid_int: Fiducial increment of the `GXFFT <geosoft.gxapi.GXFFT>`'s channel data :type wlen: float :type fid_int: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._high_pass(wlen, fid_int) def h_int(self): """ Horizontal integration .. versionadded:: 5.1.4 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._h_int() def inverse(self, gvv, gv_vm): """ Inverse the `GXFFT <geosoft.gxapi.GXFFT>` from wave number domain to space domain :param gvv: Output `GXVV <geosoft.gxapi.GXVV>` :param gv_vm: Original `GXVV <geosoft.gxapi.GXVV>` which was used to create `GXFFT <geosoft.gxapi.GXFFT>` (will be used as mask for output `GXVV <geosoft.gxapi.GXVV>`; no masking if this parameter is NULL) :type gvv: GXVV :type gv_vm: GXVV .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._inverse(gvv, gv_vm) def low_pass(self, wlen): """ Low bandpass filter :param wlen: Cutoff wavelength (meters) :type wlen: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._low_pass(wlen) def red_pol(self, inc, dec, incp, dir): """ Reduction to magnetic pole :param inc: Geomagnetic inclination (degrees) :param dec: Geomagnetic declination (degrees) :param incp: Inclination (degrees) for amplitude correction (default = 20.0) :param dir: Direction (degrees) of Line from North :type inc: float :type dec: float :type incp: float :type dir: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._red_pol(inc, dec, incp, dir) def nyquist(self): """ Gets the Nyquist frequency (wavenumbers/sample unit). :returns: Nyquist frequency (wavenumbers/sample unit). :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._nyquist() return ret_val def samp_incr(self): """ Gets the original sample increment. :returns: Original sample increment. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._samp_incr() return ret_val def wave_incr(self): """ Get the wave number increment. :returns: Wave number increment :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._wave_incr() return ret_val def set_vv(self, gv_vr, gv_vi): """ Sets real and imaginary VVs in `GXFFT <geosoft.gxapi.GXFFT>`. :param gv_vr: Real component :param gv_vi: Imaginary component :type gv_vr: GXVV :type gv_vi: GXVV .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The `GXVV <geosoft.gxapi.GXVV>` must have been obtained from the same `GXFFT <geosoft.gxapi.GXFFT>` using the `set_vv <geosoft.gxapi.GXFFT.set_vv>` method. """ self._set_vv(gv_vr, gv_vi) def spectrum(self, gvv): """ Calculates a power spectrum :param gvv: Output power spectrum `GXVV <geosoft.gxapi.GXVV>` :type gvv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._spectrum(gvv) def v_drv(self, order): """ Vertical derivative :param order: Order of differentiation (default = 1) :type order: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._v_drv(order) def v_int(self): """ Vertical integration .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._v_int() def write_spectrum(self, gvv, out_file): """ Writes a power spectrum to a file :param gvv: Output power spectrum `GXVV <geosoft.gxapi.GXVV>` :param out_file: File name for output spectrum :type gvv: GXVV :type out_file: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._write_spectrum(gvv, out_file.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer
30.837283
214
0.606557
2,343
19,520
4.959454
0.135723
0.033735
0.046816
0.060241
0.684079
0.650861
0.625732
0.610671
0.580981
0.552926
0
0.019969
0.28166
19,520
633
215
30.837283
0.808729
0.688678
0
0.155844
1
0
0
0
0
0
0
0
0
1
0.402597
false
0.077922
0.025974
0
0.558442
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
7dc2e4a999b96350f22e02f0f9ac5d6623fb8c94
55
py
Python
ensconce/webapp/__init__.py
netwrkr/ensconce
eda938c67eb0af8fb7d3ccf668e07d2f76485aa5
[ "BSD-3-Clause" ]
1
2021-05-05T13:52:44.000Z
2021-05-05T13:52:44.000Z
ensconce/webapp/__init__.py
netwrkr/ensconce
eda938c67eb0af8fb7d3ccf668e07d2f76485aa5
[ "BSD-3-Clause" ]
null
null
null
ensconce/webapp/__init__.py
netwrkr/ensconce
eda938c67eb0af8fb7d3ccf668e07d2f76485aa5
[ "BSD-3-Clause" ]
null
null
null
""" The top-level package for the cherrypy webapp. """
18.333333
47
0.690909
8
55
4.75
0.875
0
0
0
0
0
0
0
0
0
0
0
0.163636
55
3
48
18.333333
0.826087
0.836364
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
7deccc78170f97c1f308154f730928b6045f3b60
152
py
Python
Max_Waelbers/Code/Source_Data/data.py
ArtezGDA/MappingTheCity-Maps
a29377af7878907d30b4199d0859f007ba08b5e6
[ "MIT" ]
null
null
null
Max_Waelbers/Code/Source_Data/data.py
ArtezGDA/MappingTheCity-Maps
a29377af7878907d30b4199d0859f007ba08b5e6
[ "MIT" ]
null
null
null
Max_Waelbers/Code/Source_Data/data.py
ArtezGDA/MappingTheCity-Maps
a29377af7878907d30b4199d0859f007ba08b5e6
[ "MIT" ]
null
null
null
#!/usr/bin/pyhton #scrape brandstofelektrisch.json import urllib import json def main(): with open("brandstofelektrisch.json"), 'r') as inputfile
15.2
57
0.743421
19
152
5.947368
0.789474
0.40708
0
0
0
0
0
0
0
0
0
0
0.131579
152
10
58
15.2
0.856061
0
0
0
0
0
0.240385
0.230769
0
0
0
0
0
0
null
null
0
0.5
null
null
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
4
814a3241a7119f0f80b0e55dd020d4fc69437423
132
py
Python
HIS_void/patient/signals.py
YuanchenZhu2020/HIS_void
7289bf537e9fc4b09750bbca76a4cc8354dc770f
[ "MIT" ]
null
null
null
HIS_void/patient/signals.py
YuanchenZhu2020/HIS_void
7289bf537e9fc4b09750bbca76a4cc8354dc770f
[ "MIT" ]
null
null
null
HIS_void/patient/signals.py
YuanchenZhu2020/HIS_void
7289bf537e9fc4b09750bbca76a4cc8354dc770f
[ "MIT" ]
null
null
null
from django.dispatch import Signal patient_logged_in = Signal() patient_login_failed = Signal() patient_logged_out = Signal()
22
35
0.780303
17
132
5.705882
0.647059
0.402062
0.391753
0
0
0
0
0
0
0
0
0
0.143939
132
5
36
26.4
0.858407
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
8151676fbd111d73aaf820188ecadf414d67b40e
55
py
Python
tests/__init__.py
Prior99/go2
837dc91717cd875b207ac4ebd44cad3f6ffdfadb
[ "FSFAP" ]
null
null
null
tests/__init__.py
Prior99/go2
837dc91717cd875b207ac4ebd44cad3f6ffdfadb
[ "FSFAP" ]
null
null
null
tests/__init__.py
Prior99/go2
837dc91717cd875b207ac4ebd44cad3f6ffdfadb
[ "FSFAP" ]
null
null
null
import config config.database_uri = 'sqlite:///memory'
18.333333
40
0.763636
7
55
5.857143
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.090909
55
2
41
27.5
0.82
0
0
0
0
0
0.290909
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
815bd59f2daf5e53f1fb4671188b22e365b14bde
51
py
Python
src/models/wgangp_g_model.py
universuen/RVGAN-TL
d370673063a3dfd9cc4a20bfd1c18bc95aadabca
[ "MIT" ]
null
null
null
src/models/wgangp_g_model.py
universuen/RVGAN-TL
d370673063a3dfd9cc4a20bfd1c18bc95aadabca
[ "MIT" ]
null
null
null
src/models/wgangp_g_model.py
universuen/RVGAN-TL
d370673063a3dfd9cc4a20bfd1c18bc95aadabca
[ "MIT" ]
null
null
null
from .gan_g_model import GANGModel as WGANGPGModel
25.5
50
0.862745
8
51
5.25
1
0
0
0
0
0
0
0
0
0
0
0
0.117647
51
1
51
51
0.933333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
8160498c3c38cd7845e475c12a1285069bb88303
147
py
Python
gym_missile_command/__init__.py
ElieKadoche/gym_missile_command
e729721024005d2adcabc759768756516c70bc06
[ "MIT" ]
4
2021-02-22T11:07:11.000Z
2022-03-30T04:14:46.000Z
gym_missile_command/__init__.py
ElieKadoche/gym_missile_command
e729721024005d2adcabc759768756516c70bc06
[ "MIT" ]
null
null
null
gym_missile_command/__init__.py
ElieKadoche/gym_missile_command
e729721024005d2adcabc759768756516c70bc06
[ "MIT" ]
1
2021-06-04T04:02:49.000Z
2021-06-04T04:02:49.000Z
from gym.envs.registration import register register( id="missile-command-v0", entry_point="gym_missile_command.envs:MissileCommandEnv", )
21
61
0.77551
18
147
6.166667
0.722222
0.252252
0
0
0
0
0
0
0
0
0
0.007692
0.115646
147
6
62
24.5
0.846154
0
0
0
0
0
0.408163
0.285714
0
0
0
0
0
1
0
true
0
0.2
0
0.2
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
817143745dff70101296903798e901861afafeab
334
py
Python
tests/test_site.py
GabrielIFPB/little-notes
cd1176275e9a2523801626a6c22b6218867bd21b
[ "MIT" ]
null
null
null
tests/test_site.py
GabrielIFPB/little-notes
cd1176275e9a2523801626a6c22b6218867bd21b
[ "MIT" ]
null
null
null
tests/test_site.py
GabrielIFPB/little-notes
cd1176275e9a2523801626a6c22b6218867bd21b
[ "MIT" ]
null
null
null
# def test_site_index(client, captured_templates): # response = client.get("/") # assert response.status_code == 200 # assert len(captured_templates) == 1 # template, context = captured_templates[0] # assert template.name == "index.html" # assert response.headers["content-Type"] == "text/html; charset=utf-8"
41.75
75
0.679641
40
334
5.525
0.675
0.230769
0
0
0
0
0
0
0
0
0
0.021739
0.173653
334
7
76
47.714286
0.778986
0.95509
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
8187b1f711a0d3747d35e21360ef5beff63489f6
130
py
Python
utils/single_out.py
JBarmentlo/Piscine-Math-42
6d94d382e95010f0daf910fa9da712e878869351
[ "MIT" ]
null
null
null
utils/single_out.py
JBarmentlo/Piscine-Math-42
6d94d382e95010f0daf910fa9da712e878869351
[ "MIT" ]
null
null
null
utils/single_out.py
JBarmentlo/Piscine-Math-42
6d94d382e95010f0daf910fa9da712e878869351
[ "MIT" ]
null
null
null
def single_out(a, index): ''' returns a with all bits but the index bit set to 0 ''' return (a & (1 << index))
26
58
0.546154
21
130
3.333333
0.809524
0
0
0
0
0
0
0
0
0
0
0.022727
0.323077
130
5
59
26
0.772727
0.384615
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
8192ace4c343bdf09ab21619981b7c91ee219fcc
204
py
Python
tests/simple_class.py
ssarangi/PyVyM
f96c46e7b8d38f938345ca915c5356b4d9c86d64
[ "MIT" ]
3
2017-09-24T17:35:29.000Z
2021-02-14T21:53:03.000Z
tests/simple_class.py
ssarangi/PyVyM
f96c46e7b8d38f938345ca915c5356b4d9c86d64
[ "MIT" ]
null
null
null
tests/simple_class.py
ssarangi/PyVyM
f96c46e7b8d38f938345ca915c5356b4d9c86d64
[ "MIT" ]
1
2019-08-22T01:09:15.000Z
2019-08-22T01:09:15.000Z
class Foo: def __init__(self): self.member1 = 1 def print(self): self.member1 += 5 print("Member1: %s" % self.member1) def main(): foo = Foo() foo.print() main()
15.692308
43
0.529412
26
204
4
0.423077
0.317308
0.288462
0
0
0
0
0
0
0
0
0.043165
0.318627
204
13
44
15.692308
0.705036
0
0
0
0
0
0.053659
0
0
0
0
0
0
1
0.3
false
0
0
0
0.4
0.3
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
81af4087dfc40937c5e9a5d883a350420adf9436
93
py
Python
HealthKit/healthapp/apps.py
Koushik-Sarker-Seemanto/DatabaseProject
d936d83643242942b903381ef263dc10f8fbc177
[ "MIT" ]
null
null
null
HealthKit/healthapp/apps.py
Koushik-Sarker-Seemanto/DatabaseProject
d936d83643242942b903381ef263dc10f8fbc177
[ "MIT" ]
null
null
null
HealthKit/healthapp/apps.py
Koushik-Sarker-Seemanto/DatabaseProject
d936d83643242942b903381ef263dc10f8fbc177
[ "MIT" ]
null
null
null
from django.apps import AppConfig class HealthappConfig(AppConfig): name = 'healthapp'
15.5
33
0.763441
10
93
7.1
0.9
0
0
0
0
0
0
0
0
0
0
0
0.16129
93
5
34
18.6
0.910256
0
0
0
0
0
0.096774
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
81c65d327337882fa0595cb517fbaf5a266fd0e0
44
py
Python
__init__.py
vyouzhis/energy
c9c9b0c7dc2e85a093fb531f80c3aa6f458e8b6e
[ "Apache-2.0" ]
null
null
null
__init__.py
vyouzhis/energy
c9c9b0c7dc2e85a093fb531f80c3aa6f458e8b6e
[ "Apache-2.0" ]
null
null
null
__init__.py
vyouzhis/energy
c9c9b0c7dc2e85a093fb531f80c3aa6f458e8b6e
[ "Apache-2.0" ]
1
2019-07-19T03:03:43.000Z
2019-07-19T03:03:43.000Z
__version__ = '0.0.1' __author__ = 'EtoMC2'
14.666667
21
0.681818
6
44
3.666667
0.833333
0
0
0
0
0
0
0
0
0
0
0.105263
0.136364
44
2
22
22
0.473684
0
0
0
0
0
0.25
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
81c77558a514adeba4130477ed1fc1b7f429e070
24
py
Python
caller/textractcaller/_version.py
aws-samples/amazon-textract-textractor
35da7d662b06f03d1fc9db5fbabb3d1f45b1edd6
[ "Apache-2.0" ]
187
2019-05-07T20:20:33.000Z
2022-03-26T11:29:50.000Z
caller/textractcaller/_version.py
aws-samples/amazon-textract-textractor
35da7d662b06f03d1fc9db5fbabb3d1f45b1edd6
[ "Apache-2.0" ]
37
2019-05-15T13:51:50.000Z
2022-03-25T21:57:37.000Z
caller/textractcaller/_version.py
aws-samples/amazon-textract-textractor
35da7d662b06f03d1fc9db5fbabb3d1f45b1edd6
[ "Apache-2.0" ]
84
2019-05-28T01:28:00.000Z
2022-03-10T01:59:21.000Z
__version__ = '0.0.15'
8
22
0.625
4
24
2.75
0.75
0
0
0
0
0
0
0
0
0
0
0.2
0.166667
24
2
23
12
0.35
0
0
0
0
0
0.26087
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
81d1b538fa5a380347b8c66fb4e24794c1d2ab00
21
py
Python
certbot_dns_duckdns/__init__.py
chaptergy/certbot_dns_duckdns
13daa19a06e6cfbf8ca1c5cc7f0a31e9142d1da6
[ "MIT" ]
null
null
null
certbot_dns_duckdns/__init__.py
chaptergy/certbot_dns_duckdns
13daa19a06e6cfbf8ca1c5cc7f0a31e9142d1da6
[ "MIT" ]
null
null
null
certbot_dns_duckdns/__init__.py
chaptergy/certbot_dns_duckdns
13daa19a06e6cfbf8ca1c5cc7f0a31e9142d1da6
[ "MIT" ]
null
null
null
__version__ = "v0.5"
10.5
20
0.666667
3
21
3.333333
1
0
0
0
0
0
0
0
0
0
0
0.111111
0.142857
21
1
21
21
0.444444
0
0
0
0
0
0.190476
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
81d29f9a18034cf0d9190547ac37fad69fe6780e
142
py
Python
simf/common/__init__.py
MartinJakomin/SIMF
e04110ddcaed887abc58084686d00f84fdc6a8c8
[ "MIT" ]
1
2021-01-22T05:15:30.000Z
2021-01-22T05:15:30.000Z
simf/common/__init__.py
MartinJakomin/SIMF
e04110ddcaed887abc58084686d00f84fdc6a8c8
[ "MIT" ]
null
null
null
simf/common/__init__.py
MartinJakomin/SIMF
e04110ddcaed887abc58084686d00f84fdc6a8c8
[ "MIT" ]
null
null
null
from .data import build_test_set, split_streams, build_sparse_matrix __all__ = ['build_sparse_matrix', 'split_streams', 'build_test_set']
35.5
69
0.795775
20
142
4.95
0.55
0.181818
0.242424
0
0
0
0
0
0
0
0
0
0.105634
142
3
70
47.333333
0.779528
0
0
0
0
0
0.330935
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
c4b128ac557de255d8f01c1bb376ec1b5cedffbb
140
py
Python
python/pangram/pangram.py
cjcbusatto/exercism-solved
51cfe3d0134899b043d5668bc270bf480e6d9644
[ "MIT" ]
null
null
null
python/pangram/pangram.py
cjcbusatto/exercism-solved
51cfe3d0134899b043d5668bc270bf480e6d9644
[ "MIT" ]
null
null
null
python/pangram/pangram.py
cjcbusatto/exercism-solved
51cfe3d0134899b043d5668bc270bf480e6d9644
[ "MIT" ]
1
2018-12-25T22:14:40.000Z
2018-12-25T22:14:40.000Z
def is_pangram(sentence): if len(set(sentence.lower()) & set("abcdefghijklmnopqrstuvwxyz")) == 26: return True return False
28
76
0.671429
16
140
5.8125
0.8125
0
0
0
0
0
0
0
0
0
0
0.017857
0.2
140
4
77
35
0.8125
0
0
0
0
0
0.185714
0.185714
0
0
0
0
0
1
0.25
false
0
0
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
c4f37043eaa55b7d635818bedd81a249aff9fc87
83
py
Python
captain_hook/services/scrutinizer/__init__.py
brantje/captain_hook
dde076a96afffa2235b7d8d01d47c4e61099c6b6
[ "Apache-2.0" ]
1
2017-01-07T16:22:05.000Z
2017-01-07T16:22:05.000Z
captain_hook/services/scrutinizer/__init__.py
brantje/captain_hook
dde076a96afffa2235b7d8d01d47c4e61099c6b6
[ "Apache-2.0" ]
3
2017-02-27T00:34:19.000Z
2017-02-27T14:25:44.000Z
captain_hook/services/scrutinizer/__init__.py
brantje/telegram-github-bot
dde076a96afffa2235b7d8d01d47c4e61099c6b6
[ "Apache-2.0" ]
null
null
null
from __future__ import absolute_import from .scrutinizer import ScrutinizerService
27.666667
43
0.891566
9
83
7.666667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.096386
83
2
44
41.5
0.92
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
f201efd0e1a3771cdd775b1c1d1798d4da78978a
130
py
Python
lib/plugs/snowflake.py
bankova-gabriella/layer
87feadbca7bcb935b91e1a4b29fd15ea26103075
[ "MIT" ]
null
null
null
lib/plugs/snowflake.py
bankova-gabriella/layer
87feadbca7bcb935b91e1a4b29fd15ea26103075
[ "MIT" ]
null
null
null
lib/plugs/snowflake.py
bankova-gabriella/layer
87feadbca7bcb935b91e1a4b29fd15ea26103075
[ "MIT" ]
null
null
null
import snowflake class Snowflake: def __init__(self, layer): self.layer = layer self.snowflake = snowflake
14.444444
34
0.653846
14
130
5.785714
0.5
0.222222
0
0
0
0
0
0
0
0
0
0
0.276923
130
8
35
16.25
0.861702
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.2
0
0.6
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
f207144b351ffd741359857d5183bbc812a5bb2f
102
py
Python
7_kyu/Triangle_area.py
UlrichBerntien/Codewars-Katas
bbd025e67aa352d313564d3862db19fffa39f552
[ "MIT" ]
null
null
null
7_kyu/Triangle_area.py
UlrichBerntien/Codewars-Katas
bbd025e67aa352d313564d3862db19fffa39f552
[ "MIT" ]
null
null
null
7_kyu/Triangle_area.py
UlrichBerntien/Codewars-Katas
bbd025e67aa352d313564d3862db19fffa39f552
[ "MIT" ]
null
null
null
def t_area(t_str: str) -> float: rows = sum(c == '\n' for c in t_str) - 2 return (rows*rows)/2
34
44
0.568627
21
102
2.619048
0.619048
0.145455
0
0
0
0
0
0
0
0
0
0.025974
0.245098
102
3
45
34
0.688312
0
0
0
0
0
0.019417
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
f207bef41b000bbce592a8e78f6aacb879188ea2
623
py
Python
src/compas_occ/geometry/__init__.py
jf---/compas_occ
8f8a4d7f38984d06e6c32c851625275735503611
[ "MIT" ]
1
2021-06-15T22:49:40.000Z
2021-06-15T22:49:40.000Z
src/compas_occ/geometry/__init__.py
jf---/compas_occ
8f8a4d7f38984d06e6c32c851625275735503611
[ "MIT" ]
null
null
null
src/compas_occ/geometry/__init__.py
jf---/compas_occ
8f8a4d7f38984d06e6c32c851625275735503611
[ "MIT" ]
null
null
null
""" ******************************************************************************** compas_occ.geometry ******************************************************************************** .. currentmodule:: compas_occ.geometry Curves ====== .. autosummary:: :toctree: generated/ :nosignatures: Curve NurbsCurve Surfaces ======== .. autosummary:: :toctree: generated/ :nosignatures: Surface NurbsSurface """ from .curves import Curve # noqa: F401 from .curves import NurbsCurve # noqa: F401 from .surfaces import Surface # noqa: F401 from .surfaces import NurbsSurface # noqa: F401
18.878788
80
0.495987
45
623
6.822222
0.4
0.104235
0.117264
0.254072
0.169381
0
0
0
0
0
0
0.022857
0.157303
623
32
81
19.46875
0.561905
0.770465
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
f20aeb65920da97fa582a49f9f3ecd700ffacafa
2,304
py
Python
parlai/mturk/core/worlds.py
ysglh/ParlAI
e0f16e9168839be12f72d3431b9819cf3d51fe10
[ "BSD-3-Clause" ]
2
2017-09-30T23:23:44.000Z
2021-07-08T17:12:58.000Z
parlai/mturk/core/worlds.py
ysglh/ParlAI
e0f16e9168839be12f72d3431b9819cf3d51fe10
[ "BSD-3-Clause" ]
1
2018-03-08T20:44:39.000Z
2018-03-08T23:49:29.000Z
parlai/mturk/core/worlds.py
ysglh/ParlAI
e0f16e9168839be12f72d3431b9819cf3d51fe10
[ "BSD-3-Clause" ]
1
2018-03-08T20:42:57.000Z
2018-03-08T20:42:57.000Z
# Copyright (c) 2017-present, Facebook, Inc. # All rights reserved. # This source code is licensed under the BSD-style license found in the # LICENSE file in the root directory of this source tree. An additional grant # of patent rights can be found in the PATENTS file in the same directory. from parlai.core.worlds import World class MTurkOnboardWorld(World): """Generic world for onboarding a Turker and collecting information from them.""" def __init__(self, opt, mturk_agent): self.mturk_agent = mturk_agent self.episodeDone = False def parley(self): self.episodeDone = True def episode_done(self): return self.episodeDone def shutdown(self): pass class MTurkTaskWorld(World): """Generic world for MTurk tasks.""" def __init__(self, opt, mturk_agent): self.mturk_agent = mturk_agent self.episodeDone = False def parley(self): self.episodeDone = True def episode_done(self): return self.episodeDone def report(self): pass def shutdown(self): self.mturk_agent.shutdown() """ Use the following code if there are multiple MTurk agents: global shutdown_agent def shutdown_agent(mturk_agent): mturk_agent.shutdown() Parallel( n_jobs=len(self.mturk_agents), backend='threading' )(delayed(shutdown_agent)(agent) for agent in self.mturk_agents) """ def review_work(self): """Programmatically approve/reject the turker's work. For example: .. code-block:: python if self.turker_response == '0': self.mturk_agent.reject_work( 'You rated our model's response as a 0/10 but we ' 'know we\'re better than that' ) else: if self.turker_response == '10': self.mturk_agent.pay_bonus(1, 'Thanks for a great rating!') self.mturk_agent.approve_work() """ # self.mturk_agent.approve_work() # self.mturk_agent.reject_work() # self.mturk_agent.pay_bonus(1000) # Pay $1000 as bonus # self.mturk_agent.block_worker() # Block this worker from future HITs pass
30.72
79
0.619792
284
2,304
4.880282
0.419014
0.11544
0.10101
0.04329
0.307359
0.251082
0.251082
0.251082
0.204906
0.204906
0
0.011714
0.296007
2,304
74
80
31.135135
0.842787
0.43967
0
0.76
0
0
0
0
0
0
0
0
0
1
0.4
false
0.12
0.04
0.08
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
48524b96a63c4f9c7f28c9f360015d66badfb200
68
py
Python
__main__.py
lwhjon/repo-labels-cli
5a16638f45f0b13e9fdddc59b1a0952f3ba51287
[ "MIT" ]
1
2021-07-14T06:32:50.000Z
2021-07-14T06:32:50.000Z
__main__.py
lwhjon/repo-labels-cli
5a16638f45f0b13e9fdddc59b1a0952f3ba51287
[ "MIT" ]
2
2021-08-31T18:14:03.000Z
2021-12-29T18:14:25.000Z
__main__.py
lwhjon/repo-labels-cli
5a16638f45f0b13e9fdddc59b1a0952f3ba51287
[ "MIT" ]
null
null
null
import repolabels if __name__ == "__main__": repolabels.main()
13.6
26
0.705882
7
68
5.714286
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.176471
68
4
27
17
0.714286
0
0
0
0
0
0.117647
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
4871e17eb202394f9905e405eff685389769acab
12,132
py
Python
tests/unit/test_config.py
neuro-inc/platform-reports
161c18733370235af0b63a772de49343e956c35c
[ "Apache-2.0" ]
null
null
null
tests/unit/test_config.py
neuro-inc/platform-reports
161c18733370235af0b63a772de49343e956c35c
[ "Apache-2.0" ]
9
2021-12-23T03:10:40.000Z
2022-03-31T03:15:52.000Z
tests/unit/test_config.py
neuro-inc/platform-reports
161c18733370235af0b63a772de49343e956c35c
[ "Apache-2.0" ]
null
null
null
from pathlib import Path from yarl import URL from platform_reports.config import ( EnvironConfigFactory, GrafanaProxyConfig, KubeClientAuthType, KubeConfig, MetricsConfig, PlatformAuthConfig, PlatformServiceConfig, PrometheusProxyConfig, SentryConfig, ServerConfig, ZipkinConfig, ) class TestEnvironConfigFactory: def test_create_metrics_defaults(self) -> None: env = { "NP_CONFIG_URL": "http://dev.neu.ro", "NP_API_URL": "http://dev.neu.ro/api/v1", "NP_TOKEN": "token", "NP_CLUSTER_NAME": "default", "NP_NODE_NAME": "node", "NP_KUBE_URL": "https://kubernetes.default.svc", } result = EnvironConfigFactory(env).create_metrics() assert result == MetricsConfig( server=ServerConfig(), platform_config=PlatformServiceConfig( url=URL("http://dev.neu.ro"), token="token" ), platform_api=PlatformServiceConfig( url=URL("http://dev.neu.ro/api/v1"), token="token" ), kube=KubeConfig( url=URL("https://kubernetes.default.svc"), auth_type=KubeClientAuthType.NONE, ), cluster_name="default", node_name="node", ) def test_create_metrics_custom(self) -> None: env = { "NP_METRICS_API_SCHEME": "http", "NP_METRICS_API_HOST": "metrics", "NP_METRICS_API_PORT": "9500", "NP_CONFIG_URL": "http://dev.neu.ro", "NP_API_URL": "http://dev.neu.ro/api/v1", "NP_TOKEN": "token", "NP_CLUSTER_NAME": "default", "NP_NODE_NAME": "node", "NP_CLOUD_PROVIDER": "aws", "NP_REGION": "us-east-1", "NP_GCP_SERVICE_ACCOUNT_KEY_PATH": "sa.json", "NP_AZURE_PRICES_URL": "https://azure-prices", "NP_JOBS_NAMESPACE": "platform-jobs", "NP_NODE_POOL_LABEL": "node-pool", "NP_NODE_PREEMPTIBLE_LABEL": "preemptible", "NP_JOB_LABEL": "job", "NP_ZIPKIN_URL": "https://zipkin:9411", "NP_SENTRY_DSN": "https://sentry", "NP_SENTRY_CLUSTER_NAME": "test", "NP_KUBE_URL": "https://kubernetes.default.svc", } result = EnvironConfigFactory(env).create_metrics() assert result == MetricsConfig( server=ServerConfig(scheme="http", host="metrics", port=9500), platform_config=PlatformServiceConfig( url=URL("http://dev.neu.ro"), token="token" ), platform_api=PlatformServiceConfig( url=URL("http://dev.neu.ro/api/v1"), token="token" ), kube=KubeConfig( url=URL("https://kubernetes.default.svc"), auth_type=KubeClientAuthType.NONE, ), cluster_name="default", node_name="node", cloud_provider="aws", region="us-east-1", gcp_service_account_key_path=Path("sa.json"), azure_prices_url=URL("https://azure-prices"), jobs_namespace="platform-jobs", node_pool_label="node-pool", node_preemptible_label="preemptible", job_label="job", zipkin=ZipkinConfig( url=URL("https://zipkin:9411"), app_name="platform-metrics-exporter" ), sentry=SentryConfig( dsn=URL("https://sentry"), app_name="platform-metrics-exporter", cluster_name="test", ), ) def test_create_prometheus_proxy_defaults(self) -> None: env = { "NP_CLUSTER_NAME": "default", "NP_AUTH_ACCESS_TOKEN_COOKIE_NAMES": "sat,dat", "NP_PROMETHEUS_HOST": "prometheus", "NP_PROMETHEUS_PORT": "9090", "NP_AUTH_URL": "-", "NP_TOKEN": "token", "NP_API_URL": "https://dev.neu.ro/api/v1", } result = EnvironConfigFactory(env).create_prometheus_proxy() assert result == PrometheusProxyConfig( cluster_name="default", access_token_cookie_names=["sat", "dat"], server=ServerConfig(), prometheus_server=ServerConfig(host="prometheus", port=9090), platform_auth=PlatformAuthConfig(url=None, token="token"), platform_api=PlatformServiceConfig( url=URL("https://dev.neu.ro/api/v1"), token="token" ), ) def test_create_prometheus_proxy_custom(self) -> None: env = { "NP_CLUSTER_NAME": "default", "NP_AUTH_ACCESS_TOKEN_COOKIE_NAMES": "sat,dat", "NP_REPORTS_API_SCHEME": "https", "NP_REPORTS_API_HOST": "platform-reports", "NP_REPORTS_API_PORT": "80", "NP_PROMETHEUS_SCHEME": "https", "NP_PROMETHEUS_HOST": "prometheus", "NP_PROMETHEUS_PORT": "9090", "NP_AUTH_URL": "https://dev.neu.ro", "NP_TOKEN": "token", "NP_API_URL": "https://dev.neu.ro/api/v1", "NP_ZIPKIN_URL": "https://zipkin:9411", "NP_SENTRY_DSN": "https://sentry", "NP_SENTRY_CLUSTER_NAME": "test", } result = EnvironConfigFactory(env).create_prometheus_proxy() assert result == PrometheusProxyConfig( cluster_name="default", access_token_cookie_names=["sat", "dat"], server=ServerConfig(scheme="https", host="platform-reports", port=80), prometheus_server=ServerConfig( scheme="https", host="prometheus", port=9090 ), platform_auth=PlatformAuthConfig( url=URL("https://dev.neu.ro"), token="token" ), platform_api=PlatformServiceConfig( url=URL("https://dev.neu.ro/api/v1"), token="token" ), zipkin=ZipkinConfig( url=URL("https://zipkin:9411"), app_name="platform-prometheus-proxy" ), sentry=SentryConfig( dsn=URL("https://sentry"), app_name="platform-prometheus-proxy", cluster_name="test", ), ) def test_create_grafana_proxy_defaults(self) -> None: env = { "NP_CLUSTER_NAME": "default", "NP_AUTH_ACCESS_TOKEN_COOKIE_NAMES": "sat,dat", "NP_GRAFANA_HOST": "grafana", "NP_GRAFANA_PORT": "3000", "NP_AUTH_URL": "-", "NP_TOKEN": "token", "NP_API_URL": "https://dev.neu.ro/api/v1", } result = EnvironConfigFactory(env).create_grafana_proxy() assert result == GrafanaProxyConfig( cluster_name="default", access_token_cookie_names=["sat", "dat"], server=ServerConfig(), grafana_server=ServerConfig(host="grafana", port=3000), platform_auth=PlatformAuthConfig(url=None, token="token"), platform_api=PlatformServiceConfig( url=URL("https://dev.neu.ro/api/v1"), token="token" ), ) def test_create_grafana_proxy_custom(self) -> None: env = { "NP_CLUSTER_NAME": "default", "NP_AUTH_ACCESS_TOKEN_COOKIE_NAMES": "sat,dat", "NP_REPORTS_API_SCHEME": "https", "NP_REPORTS_API_HOST": "platform-reports", "NP_REPORTS_API_PORT": "80", "NP_GRAFANA_SCHEME": "https", "NP_GRAFANA_HOST": "grafana", "NP_GRAFANA_PORT": "3000", "NP_AUTH_URL": "https://dev.neu.ro", "NP_TOKEN": "token", "NP_API_URL": "https://dev.neu.ro/api/v1", "NP_ZIPKIN_URL": "https://zipkin:9411", "NP_SENTRY_DSN": "https://sentry", "NP_SENTRY_CLUSTER_NAME": "test", } result = EnvironConfigFactory(env).create_grafana_proxy() assert result == GrafanaProxyConfig( cluster_name="default", access_token_cookie_names=["sat", "dat"], server=ServerConfig(scheme="https", host="platform-reports", port=80), grafana_server=ServerConfig(scheme="https", host="grafana", port=3000), platform_auth=PlatformAuthConfig( url=URL("https://dev.neu.ro"), token="token" ), platform_api=PlatformServiceConfig( url=URL("https://dev.neu.ro/api/v1"), token="token" ), zipkin=ZipkinConfig( url=URL("https://zipkin:9411"), app_name="platform-grafana-proxy" ), sentry=SentryConfig( dsn=URL("https://sentry"), app_name="platform-grafana-proxy", cluster_name="test", ), ) def test_create_zipkin_none(self) -> None: result = EnvironConfigFactory({}).create_zipkin(default_app_name="app") assert result is None def test_create_zipkin_default(self) -> None: env = {"NP_ZIPKIN_URL": "https://zipkin:9411"} result = EnvironConfigFactory(env).create_zipkin(default_app_name="app") assert result == ZipkinConfig(url=URL("https://zipkin:9411"), app_name="app") def test_create_zipkin_custom(self) -> None: env = { "NP_ZIPKIN_URL": "https://zipkin:9411", "NP_ZIPKIN_APP_NAME": "api", "NP_ZIPKIN_SAMPLE_RATE": "1", } result = EnvironConfigFactory(env).create_zipkin(default_app_name="app") assert result == ZipkinConfig( url=URL("https://zipkin:9411"), app_name="api", sample_rate=1 ) def test_create_sentry_none(self) -> None: result = EnvironConfigFactory({}).create_sentry(default_app_name="app") assert result is None def test_create_sentry_default(self) -> None: env = { "NP_SENTRY_DSN": "https://sentry", "NP_SENTRY_CLUSTER_NAME": "test", } result = EnvironConfigFactory(env).create_sentry(default_app_name="app") assert result == SentryConfig( dsn=URL("https://sentry"), app_name="app", cluster_name="test" ) def test_create_sentry_custom(self) -> None: env = { "NP_SENTRY_DSN": "https://sentry", "NP_SENTRY_APP_NAME": "api", "NP_SENTRY_CLUSTER_NAME": "test", "NP_SENTRY_SAMPLE_RATE": "1", } result = EnvironConfigFactory(env).create_sentry(default_app_name="app") assert result == SentryConfig( dsn=URL("https://sentry"), app_name="api", cluster_name="test", sample_rate=1, ) def test_create_kube(self) -> None: env = { "NP_KUBE_URL": "https://kubernetes.default.svc", "NP_KUBE_AUTH_TYPE": "token", "NP_KUBE_TOKEN": "k8s-token", "NP_KUBE_TOKEN_PATH": "k8s-token-path", "NP_KUBE_CERT_AUTHORITY_DATA": "k8s-ca-data", "NP_KUBE_CERT_AUTHORITY_PATH": "k8s-ca-path", "NP_KUBE_CLIENT_CERT_PATH": "k8s-client-cert-path", "NP_KUBE_CLIENT_KEY_PATH": "k8s-client-key-path", "NP_KUBE_CONN_TIMEOUT": "100", "NP_KUBE_READ_TIMEOUT": "200", "NP_KUBE_CONN_POOL_SIZE": "300", "NP_KUBE_CONN_KEEP_ALIVE_TIMEOUT": "400", } result = EnvironConfigFactory(env).create_kube() assert result == KubeConfig( url=URL("https://kubernetes.default.svc"), auth_type=KubeClientAuthType.TOKEN, token="k8s-token", token_path="k8s-token-path", cert_authority_data_pem="k8s-ca-data", cert_authority_path="k8s-ca-path", client_cert_path="k8s-client-cert-path", client_key_path="k8s-client-key-path", conn_timeout_s=100, read_timeout_s=200, conn_pool_size=300, conn_keep_alive_timeout_s=400, )
37.329231
85
0.561161
1,252
12,132
5.128594
0.091853
0.043607
0.024918
0.020558
0.800498
0.745678
0.705653
0.667653
0.651145
0.631833
0
0.016805
0.303495
12,132
324
86
37.444444
0.743077
0
0
0.588235
0
0
0.286515
0.057781
0
0
0
0
0.044983
1
0.044983
false
0
0.010381
0
0.058824
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
6f9000a11b2cc5bd11480c338dedf03621015a05
183
py
Python
viocetotext/setup.py
ASaiun/voice_to_text
6afcf3403c6d4365e7d1c7586fe250d0ade83b12
[ "Apache-2.0" ]
null
null
null
viocetotext/setup.py
ASaiun/voice_to_text
6afcf3403c6d4365e7d1c7586fe250d0ade83b12
[ "Apache-2.0" ]
null
null
null
viocetotext/setup.py
ASaiun/voice_to_text
6afcf3403c6d4365e7d1c7586fe250d0ade83b12
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Time : 11/28/2017 11:41 AM # @Author : Siyuan(Saiun) # @Site : # @File : setup.py.py # @Software: PyCharm Community Edition
26.142857
38
0.590164
26
183
4.153846
0.923077
0
0
0
0
0
0
0
0
0
0
0.090278
0.213115
183
7
38
26.142857
0.659722
0.928962
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
6f90d64459bbbda4b0e6e2e16202d5046a2ed99b
31,520
py
Python
college/models.py
dwillis/fumblerooski
68a1d4f759ac7c23023ccdcca558a0fbcfb1e48c
[ "BSD-3-Clause" ]
6
2015-06-30T14:11:07.000Z
2021-01-13T12:11:36.000Z
college/models.py
dwillis/fumblerooski
68a1d4f759ac7c23023ccdcca558a0fbcfb1e48c
[ "BSD-3-Clause" ]
null
null
null
college/models.py
dwillis/fumblerooski
68a1d4f759ac7c23023ccdcca558a0fbcfb1e48c
[ "BSD-3-Clause" ]
4
2017-06-23T00:14:34.000Z
2022-03-03T20:10:43.000Z
from django.db import models from django import forms import datetime from django.template.defaultfilters import slugify from django.conf import settings CURRENT_SEASON = getattr(settings, 'CURRENT_SEASON', datetime.date.today().year) STATUS_CHOICES = ( ('FR', 'Freshman'), ('SO', 'Sophomore'), ('JR', 'Junior'), ('SR', 'Senior'), ) POSITION_TYPE_CHOICES = ( ('O', 'Offense'), ('D', 'Defense'), ('S', 'Special Teams'), ) SIDE_CHOICES = ( ('O', 'Own'), ('P', 'Opponents'), ) RESULT_CHOICES = ( ('W', 'Win'), ('L', 'Loss'), ('T', 'Tie'), ) GAME_TYPE_CHOICES = ( ('H', 'Home'), ('A', 'Away'), ('N', 'Neutral Site'), ) PLAY_CHOICES = ( ('R', 'Run'), ('P', 'Pass'), ('F', 'Field Goal'), ('X', 'Extra Point'), ('N', 'Penalty'), ('K', 'Kickoff'), ('U', 'Punt'), ('T', 'Turnover'), ) DIVISION_CHOICES = ( ('B', 'Bowl Subdivision'), ('C', 'Championship Subdivision'), ('D', 'Division II'), ('T', 'Division III'), ) class State(models.Model): id = models.CharField(max_length=2, editable=False, primary_key=True) name = models.CharField(max_length=50) def __unicode__(self): return self.name def get_absolute_url(self): return "/states/%s/" % self.id.lower() class StateForm(forms.Form): name = forms.ModelChoiceField(queryset=State.objects.all().order_by('name')) class City(models.Model): name = models.CharField(max_length=75) slug = models.SlugField(max_length=75) state = models.ForeignKey(State, null=True, blank=True) def __unicode__(self): if self.state: return "%s, %s" % (self.name, self.state.id) else: return self.name def get_absolute_url(self): return "/college/states/%s/%s/" % (self.state.id.lower(), self.slug) class Meta: verbose_name_plural = 'cities' class Week(models.Model): season = models.IntegerField() week_num = models.IntegerField() end_date = models.DateField() def __unicode__(self): return "Week %s, %s" % (self.week_num, self.season) def week_games_url(self): return "/college/seasons/%s/week/%s/" % (self.season, self.week_num) class Conference(models.Model): abbrev = models.CharField(max_length=10) name = models.CharField(max_length=90) def __unicode__(self): return self.name def get_absolute_url(self): return '/college/conferences/%s/' % self.abbrev.lower() class College(models.Model): name = models.CharField(max_length=90) slug = models.SlugField(max_length=90) drive_slug = models.CharField(max_length=90) # city = models.ForeignKey(City, blank=True) # state = models.ForeignKey(State, blank=True) official_url = models.CharField(max_length=120, blank=True) official_rss = models.CharField(max_length=120, blank=True) updated = models.BooleanField() def __unicode__(self): return self.name def get_absolute_url(self): return '/college/teams/%s/' % self.slug def current_record(self): current_season = self.collegeyear_set.get(season=datetime.date.today()).year return "(%d-%d)" % (current_season.wins, current_season.losses) class Meta: ordering = ['name', 'state'] class CollegeYear(models.Model): college = models.ForeignKey(College) season = models.IntegerField() wins = models.IntegerField(default=0) losses = models.IntegerField(default=0) ties = models.IntegerField(default=0) conference_wins = models.IntegerField(default=0) conference_losses = models.IntegerField(default=0) conference_ties = models.IntegerField(default=0) freshmen = models.IntegerField(default=0) sophomores = models.IntegerField(default=0) juniors = models.IntegerField(default=0) seniors = models.IntegerField(default=0) conference = models.ForeignKey(Conference, null=True, blank=True) division = models.CharField(max_length=1, choices=DIVISION_CHOICES) def __unicode__(self): return "%s - %s" % (self.college.name, str(self.season)) def game_count(self): return self.wins+self.losses+self.ties def get_ncaa_week_url(self): return 'http://web1.ncaa.org/football/exec/rankingSummary?year=%d&org=%d&week=' % (self.season, self.college.id) def get_absolute_url(self): return "/college/teams/%s/%s/" % (self.college.slug, self.season) def get_conference_url(self): if self.conference: return "/college/conferences/%s/%s/" % (self.conference.abbrev, self.season) def coaching_staff_url(self): return self.get_absolute_url()+'coaches/' def record(self): if self.ties: return "%s-%s-%s" % (self.wins, self.losses, self.ties) else: return "%s-%s" % (self.wins, self.losses) def conference_record(self): if self.conference_ties: return "%s-%s-%s" % (self.conference_wins, self.conference_losses, self.conference_ties) else: return "%s-%s" % (self.conference_wins, self.conference_losses) def coach_total(self): return len(self.collegecoach_set.filter(end_date__isnull=True)) class Meta: ordering = ['college', '-season'] class Coach(models.Model): ncaa_name = models.CharField(max_length=90) first_name = models.CharField(max_length=75) last_name = models.CharField(max_length=75) slug = models.CharField(max_length=75, editable=False) college = models.ForeignKey(College, null=True, blank=True, related_name='School') grad_year = models.IntegerField(null=True, blank=True) birth_date = models.DateField(null=True, blank=True) years = models.IntegerField(default=0, blank=True) wins = models.IntegerField(default=0, blank=True) losses = models.IntegerField(default=0, blank=True) ties = models.IntegerField(default=0, blank=True) def __unicode__(self): return self.first_name + " " + self.last_name def save(self): super(Coach, self).save() self.slug = '%s-%s-%s' % (str(self.id), slugify(self.first_name), slugify(self.last_name)) super(Coach, self).save() def get_absolute_url(self): return '/coaches/detail/%s/' % self.slug def full_name(self): return self.first_name + " " + self.last_name def current_school(self): try: current_school = self.collegecoach_set.get(collegeyear__season__exact = CURRENT_SEASON, end_date = None).collegeyear.college except: current_school = None return current_school def seasons_at_school(self,school): return [sorted([cy.collegeyear.season for cy in self.collegecoach_set.all() if cy.collegeyear.college == school])] def seasons_at_current_school(self): return len([cy.collegeyear.college.id for cy in self.collegecoach_set.all() if cy.collegeyear.college.id == self.current_school().id]) def current_job(self): if self.current_school(): cy = self.collegecoach_set.filter(collegeyear__college=self.current_school).order_by('start_date')[0].jobs_display() return cy else: return None def head_coach_experience(self): if 1 in sum([[j.id for j in job.jobs.all() if j.id == 1] for job in self.collegecoach_set.all()],[]): return "Yes" else: return "No" def years_since_2000(self): return self.collegecoach_set.all().count() def years_at_alma_mater_since_2000(self): return len([a for a in self.collegecoach_set.all() if self.college == a.collegeyear.college]) def states_coached_in(self): states = {} state_list = [s.collegeyear.college.state.id for s in self.collegecoach_set.all()] [states.setdefault(e,500) for e in state_list if e not in states] return states def coaching_peers(self): from django.db import connection cursor = connection.cursor() year_ids = [str(c.collegeyear.id) for c in self.collegecoach_set.all()] cursor.execute("SELECT distinct college_coach.id FROM college_coach INNER JOIN college_collegecoach ON college_coach.id=college_collegecoach.coach_id WHERE college_collegecoach.collegeyear_id IN (%s)" % ','.join(year_ids)) results = cursor.fetchall() ids = [c[0] for c in results] return Coach.objects.filter(id__in=ids).exclude(id=self.id) class Meta: ordering = ['last_name', 'first_name'] verbose_name_plural = 'Coaches' class CoachForm(forms.Form): name = forms.CharField(max_length=50, initial='Last name') class CoachDetailForm(forms.Form): coaches = forms.ModelChoiceField(queryset=Coach.objects.none()) def __init__(self, coaches, *args, **kwargs): super(CoachDetailForm, self).__init__(*args, **kwargs) self.fields["coaches"].queryset = coaches class CoachingJob(models.Model): name = models.CharField(max_length=75) slug = models.SlugField(max_length=75) def __unicode__(self): return self.name class CollegeCoach(models.Model): coach = models.ForeignKey(Coach) collegeyear = models.ForeignKey(CollegeYear) jobs = models.ManyToManyField(CoachingJob) start_date = models.DateField(null=True, blank=True) end_date = models.DateField(null=True, blank=True) is_head_coach = models.BooleanField(default=False) def __unicode__(self): return "%s: %s" % (self.coach, self.collegeyear) def get_absolute_url(self): return self.coach.get_absolute_url() def jobs_display(self): return ", ".join([x.name for x in self.jobs.all()]) def is_current_job(self): if self.collegeyear.season == CURRENT_SEASON and self.end_date == None: return True else: return False def partial_season(self): if end_date: return True else: return False def feed_date(self): if self.start_date and self.end_date: return self.end_date elif self.start_date: return self.start_date elif self.end_date: return self.end_date def feed_action(self): if self.start_date and self.end_date: return "Departed" elif self.start_date: return "Hired" elif self.end_date: return "Departed" class Meta: ordering = ['coach__last_name','-collegeyear__season'] verbose_name_plural = 'College coaches' class CollegeTotal(models.Model): college = models.ForeignKey(College) season = models.IntegerField() third_down_attempts = models.IntegerField(default=0) third_down_conversions = models.IntegerField(default=0) fourth_down_attempts = models.IntegerField(default=0) fourth_down_conversions = models.IntegerField(default=0) first_downs_rushing = models.IntegerField(default=0) first_downs_passing = models.IntegerField(default=0) first_downs_penalty = models.IntegerField(default=0) first_downs_total = models.IntegerField(default=0) penalties = models.IntegerField(default=0) penalty_yards = models.IntegerField(default=0) fumbles = models.IntegerField(default=0) fumbles_lost = models.IntegerField(default=0) rushes = models.IntegerField(default=0) rush_gain = models.IntegerField(default=0) rush_loss = models.IntegerField(default=0) rush_net = models.IntegerField(default=0) rush_touchdowns = models.IntegerField(default=0) total_plays = models.IntegerField(default=0) total_yards = models.IntegerField(default=0) pass_attempts = models.IntegerField(default=0) pass_completions = models.IntegerField(default=0) pass_interceptions = models.IntegerField(default=0) pass_yards = models.IntegerField(default=0) pass_touchdowns = models.IntegerField(default=0) receptions = models.IntegerField(default=0) receiving_yards = models.IntegerField(default=0) receiving_touchdowns = models.IntegerField(default=0) punts = models.IntegerField(default=0) punt_yards = models.IntegerField(default=0) punt_returns = models.IntegerField(default=0) punt_return_yards = models.IntegerField(default=0) punt_return_touchdowns = models.IntegerField(default=0) kickoff_returns = models.IntegerField(default=0) kickoff_return_yards = models.IntegerField(default=0) kickoff_return_touchdowns = models.IntegerField(default=0) touchdowns = models.IntegerField(default=0) pat_attempts = models.IntegerField(default=0) pat_made = models.IntegerField(default=0) two_point_conversion_attempts = models.IntegerField(default=0) two_point_conversions = models.IntegerField(default=0) field_goal_attempts = models.IntegerField(default=0) field_goals_made = models.IntegerField(default=0) points = models.IntegerField(default=0) class Position(models.Model): abbrev = models.CharField(max_length=5) name = models.CharField(max_length=25) plural_name = models.CharField(max_length=25) position_type = models.CharField(max_length=1, choices=POSITION_TYPE_CHOICES) def __unicode__(self): return self.abbrev def get_absolute_url(self): return '/recruits/positions/%s/' % self.abbrev.lower() class BowlGame(models.Model): name = models.CharField(max_length=75) slug = models.CharField(max_length=75) city = models.ForeignKey(City) def __unicode__(self): return self.name def get_absolute_url(self): return '/college/bowl-games/%s/' % self.slug class Game(models.Model): season = models.IntegerField() team1 = models.ForeignKey(CollegeYear, related_name='team1') coach1 = models.ForeignKey(Coach, null=True, related_name='first_coach') team2 = models.ForeignKey(CollegeYear, related_name='team2') coach2 = models.ForeignKey(Coach, null=True, related_name='second_coach') date = models.DateField() week = models.ForeignKey(Week) t1_game_type = models.CharField(max_length=1, choices=GAME_TYPE_CHOICES) t1_result = models.CharField(max_length=1, choices=RESULT_CHOICES, blank=True) team1_score = models.IntegerField(null=True, blank=True) team2_score = models.IntegerField(null=True, blank=True) site = models.CharField(max_length=90, blank=True) attendance = models.IntegerField(null=True, blank=True) overtime = models.CharField(max_length=5, blank=True) ncaa_xml = models.CharField(max_length=120, blank=True) duration = models.TimeField(null=True, blank=True) has_drives = models.BooleanField() has_stats = models.BooleanField() has_player_stats = models.BooleanField() is_conference_game = models.BooleanField() is_bowl_game = models.BooleanField() bowl_game = models.ForeignKey(BowlGame, null=True, blank=True) def __unicode__(self): return '%s vs. %s, %s' % (self.team1, self.team2, self.date) def get_absolute_url(self): return '/college/teams/%s/vs/%s/%s/%s/%s/' % (self.team1.college.slug, self.team2.college.slug, self.date.year, self.date.month, self.date.day) def get_matchup_url(self): return '/college/teams/%s/vs/%s/' % (self.team1.college.slug, self.team2.college.slug) def get_reverse_url(self): return '/college/teams/%s/vs/%s/%s/%s/%s/' % (self.team2.college.slug, self.team1.college.slug, self.date.year, self.date.month, self.date.day) def get_ncaa_xml_url(self): return 'http://web1.ncaa.org/d1mfb/%s/Internet/worksheets/%s.xml' % (self.season, self.ncaa_xml.strip()) def get_ncaa_drive_url(self): return "http://web1.ncaa.org/mfb/driveSummary.jsp?acadyr=%s&h=%s&v=%s&date=%s&game=%s" % (self.season, self.team1.college.id, self.team2.college.id, self.date.strftime("%d-%b-%y").upper(), self.ncaa_xml.strip()) def get_play_by_play_url(self): return "http://web1.ncaa.org/mfb/driveSummary.jsp?expand=A&acadyr=%s&h=%s&v=%s&date=%s&game=%s" % (self.season, self.team1.college.id, self.team2.college.id, self.date.strftime("%d-%b-%y").upper(), self.ncaa_xml.strip()) def margin(self): return self.team1_score-self.team2_score def display(self): if self.margin() > 0: return "%s %s, %s %s" % (self.team1.college, self.team1_score, self.team2.college, self.team2_score) else: return "%s %s, %s %s" % (self.team2.college, self.team2_score, self.team1.college, self.team1_score) class QuarterScore(models.Model): "Represents a team's scoring during a quarter of a game. OT periods begin with 5." "Not implemented yet." game = models.ForeignKey(Game) team = models.ForeignKey(CollegeYear) season = models.IntegerField() quarter = models.IntegerField(default=CURRENT_SEASON) points = models.PositiveIntegerField(default=0) def __unicode__(self): return "%s - %s" (self.team, self.quarter) class DriveOutcome(models.Model): abbrev = models.CharField(max_length=10) name = models.CharField(max_length=50, null=True) slug = models.SlugField(max_length=50, null=True) def __unicode__(self): return self.name class GameDrive(models.Model): season = models.IntegerField() game = models.ForeignKey(Game) team = models.ForeignKey(CollegeYear) drive = models.IntegerField() quarter = models.PositiveSmallIntegerField() start_how = models.CharField(max_length=25) start_time = models.TimeField() start_position = models.IntegerField() start_side = models.CharField(max_length=1, choices=SIDE_CHOICES) end_result = models.ForeignKey(DriveOutcome) end_time = models.TimeField() end_position = models.IntegerField(null=True) end_side = models.CharField(max_length=1, choices=SIDE_CHOICES) plays = models.IntegerField() yards = models.IntegerField() time_of_possession = models.TimeField() def __unicode__(self): return "%s: %s drive %s" % (self.game, self.team, self.drive) class GameOffense(models.Model): game = models.ForeignKey(Game) team = models.ForeignKey(CollegeYear) season = models.IntegerField() third_down_attempts = models.IntegerField(default=0) third_down_conversions = models.IntegerField(default=0) fourth_down_attempts = models.IntegerField(default=0) fourth_down_conversions = models.IntegerField(default=0) time_of_possession = models.TimeField(null=True) first_downs_rushing = models.IntegerField(default=0) first_downs_passing = models.IntegerField(default=0) first_downs_penalty = models.IntegerField(default=0) first_downs_total = models.IntegerField(default=0) penalties = models.IntegerField(default=0) penalty_yards = models.IntegerField(default=0) fumbles = models.IntegerField(default=0) fumbles_lost = models.IntegerField(default=0) rushes = models.IntegerField(default=0) rush_gain = models.IntegerField(default=0) rush_loss = models.IntegerField(default=0) rush_net = models.IntegerField(default=0) rush_touchdowns = models.IntegerField(default=0) total_plays = models.IntegerField(default=0) total_yards = models.IntegerField(default=0) pass_attempts = models.IntegerField(default=0) pass_completions = models.IntegerField(default=0) pass_interceptions = models.IntegerField(default=0) pass_yards = models.IntegerField(default=0) pass_touchdowns = models.IntegerField(default=0) receptions = models.IntegerField(default=0) receiving_yards = models.IntegerField(default=0) receiving_touchdowns = models.IntegerField(default=0) punts = models.IntegerField(default=0) punt_yards = models.IntegerField(default=0) punt_returns = models.IntegerField(default=0) punt_return_yards = models.IntegerField(default=0) punt_return_touchdowns = models.IntegerField(default=0) kickoff_returns = models.IntegerField(default=0) kickoff_return_yards = models.IntegerField(default=0) kickoff_return_touchdowns = models.IntegerField(default=0) touchdowns = models.IntegerField(default=0) pat_attempts = models.IntegerField(default=0) pat_made = models.IntegerField(default=0) two_point_conversion_attempts = models.IntegerField(default=0) two_point_conversions = models.IntegerField(default=0) field_goal_attempts = models.IntegerField(default=0) field_goals_made = models.IntegerField(default=0) points = models.IntegerField(default=0) def __unicode__(self): return '%s - %s' % (self.game, self.team) def third_down_rate(self): return float(self.third_down_conversions)/float(self.third_down_attempts) def field_goal_rate(self): return float(self.field_goals_made)/float(self.field_goal_attempts) def penalty_yard_ratio(self): return float(self.penalty_yards)/float(self.total_yards) def yards_per_reception(self): return float(self.receiving_yards)/float(self.receptions) def yards_per_pass_attempt(self): return float(self.receiving_yards)/(self.pass_attempts) def rushing_first_downs_pct(self): return float(self.first_downs_rushing)/float(self.first_downs_total)*100 """ Returns a floating-point number representing the number of touchdowns per rushing attempt for a single game. """ def touchdowns_per_rushes(self): return float(self.rush_touchdowns)/float(self.rushes)*100 """ Returns the opponent for a team's given Game Offense record. """ def opponent(self): if self.team == self.game.team2: return self.game.team1 else: return self.game.team2 class GameDefense(models.Model): game = models.ForeignKey(Game) team = models.ForeignKey(CollegeYear) season = models.IntegerField() safeties = models.IntegerField(default=0) unassisted_tackles = models.IntegerField(default=0) assisted_tackles = models.IntegerField(default=0) unassisted_tackles_for_loss = models.IntegerField(default=0) assisted_tackles_for_loss = models.IntegerField(default=0) tackles_for_loss_yards = models.IntegerField(default=0) unassisted_sacks = models.IntegerField(default=0) assisted_sacks = models.IntegerField(default=0) sack_yards = models.IntegerField(default=0) defensive_interceptions = models.IntegerField(default=0) defensive_interception_yards = models.IntegerField(default=0) defensive_interception_touchdowns = models.IntegerField(default=0) pass_breakups = models.IntegerField(default=0) fumbles_forced = models.IntegerField(default=0) fumbles_number = models.IntegerField(default=0) fumbles_yards = models.IntegerField(default=0) fumbles_touchdowns = models.IntegerField(default=0) def __unicode__(self): return '%s - %s' % (self.game, self.team) class Player(models.Model): name = models.CharField(max_length=120) slug = models.SlugField(max_length=120) team = models.ForeignKey(CollegeYear) season = models.IntegerField() position = models.ForeignKey(Position) number = models.CharField(max_length=4) games_played = models.PositiveIntegerField(default=0) status = models.CharField(max_length=2, choices=STATUS_CHOICES) def __unicode__(self): return u"%s - %s" % (self.name, self.team) def get_absolute_url(self): return '/college/teams/%s/%s/players/%s/' % (self.team.college.slug, self.season, self.slug) def get_team_position_url(self): return '/college/teams/%s/%s/players/positions/%s/' % (self.team.college.slug, self.season, self.position.abbrev.lower()) def get_team_class_url(self): return '/college/teams/%s/%s/players/class/%s/' % (self.team.college.slug, self.season, self.status.lower()) class Meta: ordering = ['id'] class PlayerCollegeCareer(models.Model): player = models.ForeignKey(Player) first_season = models.ForeignKey(CollegeYear, related_name='first_season') last_season = models.ForeignKey(CollegeYear, related_name='last_season') total_games = models.IntegerField(null=True, blank=True) def __unicode__(self): return self.player.name.full_name() class PlayerGame(models.Model): player = models.ForeignKey(Player) game = models.ForeignKey(Game) played = models.BooleanField() starter = models.BooleanField() total_plays = models.IntegerField() total_yards = models.IntegerField() def __unicode__(self): return self.player.name class PlayerRush(models.Model): player = models.ForeignKey(Player) game = models.ForeignKey(Game) rushes = models.IntegerField(default=0) gain = models.IntegerField(default=0) loss = models.IntegerField(default=0) net = models.IntegerField(default=0) td = models.IntegerField(default=0) long_yards = models.IntegerField(default=0) average = models.FloatField(default=0) total_plays = models.IntegerField(default=0) total_yards = models.IntegerField(default=0) def __unicode__(self): return "%s - %s" % (self.player.name, self.game) class Meta: verbose_name_plural = "player rushing" class PlayerPass(models.Model): player = models.ForeignKey(Player) game = models.ForeignKey(Game) attempts = models.IntegerField(default=0) completions = models.IntegerField(default=0) interceptions = models.IntegerField(default=0) yards = models.IntegerField(default=0) td = models.IntegerField(default=0) conversions = models.IntegerField(default=0) total_plays = models.IntegerField(default=0) total_yards = models.IntegerField(default=0) pass_efficiency = models.FloatField(default=0) def __unicode__(self): return "%s - %s" % (self.player.name, self.game) def comp_att(self): return "%d of %d" % (self.completions, self.attempts) class Meta: verbose_name_plural = 'player passing' class PlayerReceiving(models.Model): player = models.ForeignKey(Player) game = models.ForeignKey(Game) receptions = models.IntegerField(default=0) yards = models.IntegerField(default=0) td = models.IntegerField(default=0) long_yards = models.IntegerField(default=0) average = models.FloatField(default=0) def __unicode__(self): return "%s - %s" % (self.player.name, self.game) class PlayerScoring(models.Model): player = models.ForeignKey(Player) game = models.ForeignKey(Game) td = models.IntegerField(default=0) fg_att = models.IntegerField(default=0) fg_made = models.IntegerField(default=0) pat_att = models.IntegerField(default=0) pat_made = models.IntegerField(default=0) two_pt_att = models.IntegerField(default=0) two_pt_made = models.IntegerField(default=0) def_pat_att = models.IntegerField(default=0) def_pat_made = models.IntegerField(default=0) def_two_pt_att = models.IntegerField(default=0) def_two_pt_made = models.IntegerField(default=0) safeties = models.IntegerField(default=0) points = models.IntegerField(default=0) def __unicode__(self): return "%s - %s" % (self.player.name, self.game) class PlayerTackle(models.Model): player = models.ForeignKey(Player) game = models.ForeignKey(Game) unassisted_tackles = models.IntegerField(default=0) assisted_tackles = models.IntegerField(default=0) def __unicode__(self): return "%s - %s" % (self.player.name, self.game) def total_tackles(self): return self.unassisted_tackles+self.assisted_tackles class PlayerTacklesLoss(models.Model): player = models.ForeignKey(Player) game = models.ForeignKey(Game) unassisted_tackles_for_loss = models.IntegerField(default=0) assisted_tackles_for_loss = models.IntegerField(default=0) tackles_for_loss_yards = models.IntegerField(default=0) unassisted_sacks = models.IntegerField(default=0) assisted_sacks = models.IntegerField(default=0) sack_yards = models.IntegerField(default=0) def __unicode__(self): return "%s - %s" % (self.player.name, self.game) def total_sacks(self): return self.unassisted_sacks+self.assisted_sacks def total_tackles_for_loss(self): return self.unassisted_tackles_for_loss+self.assisted_tackles_for_loss class Meta: verbose_name_plural = 'player tackles for loss' class PlayerPassDefense(models.Model): player = models.ForeignKey(Player) game = models.ForeignKey(Game) interceptions = models.IntegerField(default=0) interception_yards = models.IntegerField(default=0) interception_td = models.IntegerField(default=0) pass_breakups = models.IntegerField(default=0) def __unicode__(self): return "%s - %s" % (self.player.name, self.game) class PlayerFumble(models.Model): player = models.ForeignKey(Player) game = models.ForeignKey(Game) fumbles_forced = models.IntegerField(default=0) fumbles_number = models.IntegerField(default=0) fumbles_yards = models.IntegerField(default=0) fumbles_td = models.IntegerField(default=0) def __unicode__(self): return "%s - %s" % (self.player.name, self.game) class PlayerReturn(models.Model): player = models.ForeignKey(Player) game = models.ForeignKey(Game) punt_returns = models.IntegerField(default=0) punt_return_yards = models.IntegerField(default=0) punt_return_td = models.IntegerField(default=0) kickoff_returns = models.IntegerField(default=0) kickoff_return_yards = models.IntegerField(default=0) kickoff_return_td = models.IntegerField(default=0) def __unicode__(self): return "%s - %s" % (self.player.name, self.game) class PlayerSummary(models.Model): player = models.ForeignKey(Player) rushes = models.IntegerField(null=True) rush_gain = models.IntegerField(null=True) rush_loss = models.IntegerField(null=True) rush_net = models.IntegerField(null=True) rush_td = models.IntegerField(null=True) pass_attempts = models.IntegerField(null=True) pass_complete = models.IntegerField(null=True) pass_intercept = models.IntegerField(null=True) pass_yards = models.IntegerField(null=True) pass_td = models.IntegerField(null=True) conversions = models.IntegerField(null=True) offense_plays = models.IntegerField(null=True) offense_yards = models.IntegerField(null=True) receptions = models.IntegerField(null=True) reception_yards = models.IntegerField(null=True) reception_td = models.IntegerField(null=True) def __unicode__(self): return "%s - %s" % (self.player.name, self.player.season) class Poll(models.Model): name = models.CharField(max_length=50) slug = models.SlugField(max_length=50) def __unicode__(self): return self.name class PollResults(models.Model): poll = models.ForeignKey(Poll) week = models.ForeignKey(Week) team = models.ForeignKey(College) rank = models.IntegerField() def __unicode__(self): return "%s: %s %s" % (self.poll, self.week, self.team)
37.568534
230
0.692259
3,900
31,520
5.414615
0.101538
0.180707
0.204811
0.211773
0.669792
0.565516
0.492731
0.436094
0.416489
0.395369
0
0.01243
0.188325
31,520
838
231
37.613365
0.812969
0.004029
0
0.427515
0
0.005917
0.057377
0.014424
0
0
0
0
0
1
0.133136
false
0.038462
0.008876
0.106509
0.85355
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
4
6fb04c6b934dbc2c2a8f1050502568403cd8b120
142
py
Python
reddit2telegram/channels/azurlane_sub/app.py
mainyordle/reddit2telegram
1163e15aed3b6ff0fba65b222d3d9798f644c386
[ "MIT" ]
187
2016-09-20T09:15:54.000Z
2022-03-29T12:22:33.000Z
reddit2telegram/channels/azurlane_sub/app.py
mainyordle/reddit2telegram
1163e15aed3b6ff0fba65b222d3d9798f644c386
[ "MIT" ]
84
2016-09-22T14:25:07.000Z
2022-03-19T01:26:17.000Z
reddit2telegram/channels/azurlane_sub/app.py
mainyordle/reddit2telegram
1163e15aed3b6ff0fba65b222d3d9798f644c386
[ "MIT" ]
172
2016-09-21T15:39:39.000Z
2022-03-16T15:15:58.000Z
#encoding:utf-8 subreddit = 'AzureLane' t_channel = '@AzurLane_sub' def send_post(submission, r2t): return r2t.send_simple(submission)
15.777778
38
0.746479
19
142
5.368421
0.842105
0
0
0
0
0
0
0
0
0
0
0.02439
0.133803
142
8
39
17.75
0.804878
0.098592
0
0
0
0
0.173228
0
0
0
0
0
0
1
0.25
false
0
0
0.25
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
4
6fc618b732031598e6c69fb80b53fe55b2821831
11,533
py
Python
tests/gdk/common/test_model_actions.py
timmattison/aws-greengrass-gdk-cli
60a002f0f2fee84b79022662ba0cae9e0246b6f8
[ "Apache-2.0" ]
10
2022-01-15T09:50:32.000Z
2022-03-26T16:39:49.000Z
tests/gdk/common/test_model_actions.py
timmattison/aws-greengrass-gdk-cli
60a002f0f2fee84b79022662ba0cae9e0246b6f8
[ "Apache-2.0" ]
46
2021-11-30T19:49:16.000Z
2022-03-31T07:14:23.000Z
tests/gdk/common/test_model_actions.py
timmattison/aws-greengrass-gdk-cli
60a002f0f2fee84b79022662ba0cae9e0246b6f8
[ "Apache-2.0" ]
7
2021-11-30T19:49:42.000Z
2022-03-17T16:25:34.000Z
from pathlib import Path from unittest.mock import mock_open, patch import gdk.common.consts as consts import gdk.common.model_actions as model_actions import pytest def test_get_validated_model_file_not_exists(mocker): mock_get_static_file_path = mocker.patch("gdk.common.utils.get_static_file_path", return_value=None) mock_is_valid_model = mocker.patch("gdk.common.model_actions.is_valid_model", return_value=False) with pytest.raises(Exception) as e_info: model_actions.get_validated_model() expected_err_message = "expected str, bytes or os.PathLike object, not NoneType" assert e_info.value.args[0] == expected_err_message assert not mock_is_valid_model.called assert mock_get_static_file_path.call_count == 1 def test_get_validated_model_file_exists(mocker): file_path = Path("path/to/open") mock_get_static_file_path = mocker.patch("gdk.common.utils.get_static_file_path", return_value=file_path) mock_is_valid_model = mocker.patch("gdk.common.model_actions.is_valid_model", return_value=True) with patch("builtins.open", mock_open(read_data="{}")) as mock_file: model_actions.get_validated_model() assert open(file_path).read() == "{}" mock_file.assert_called_with(file_path) assert not mock_is_valid_model.called assert mock_get_static_file_path.call_count == 1 def test_get_validated_model_with_valid_model(mocker): # Should return model when the model is valid mocker.patch("gdk.common.model_actions.is_valid_model", return_value=True) command_model = model_actions.get_validated_model() assert command_model def test_get_validated_model_with_invalid_model(mocker): # Should raise an exception when the model is invalid mock_is_valid_model = mocker.patch("gdk.common.model_actions.is_valid_model", return_value=False) model_actions.get_validated_model() assert not mock_is_valid_model.called def test_is_valid_argument_model_valid(): # Valid argument that contains both name and help. valid_arg = {"name": ["-l", "--lang"], "help": "language help", "choices": ["p", "j"]} assert model_actions.is_valid_argument_model(valid_arg) def test_is_valid_argument_model_without_name(): # Invalid arg without name. invalid_arg_without_name = {"names": ["-l", "--lang"], "help": "help"} assert not model_actions.is_valid_model(invalid_arg_without_name, consts.cli_tool_name) def test_is_valid_argument_model_without_help(): # Invalid arg without help. invalid_arg_without_help = {"name": ["-l", "--lang"], "helper": "help"} assert not model_actions.is_valid_model(invalid_arg_without_help, consts.cli_tool_name) def test_is_valid_subcommand_model_valid(): # Valid subcommand with valid commmand key in the cli model. model = { "gdk": {"sub-commands": ["component"], "help": "help"}, "component": {"help": "help", "sub-commands": ["init", "build"]}, "build": {"help": "help"}, "init": {"help": "help"}, } valid_model_subcommands = ["component"] assert model_actions.is_valid_subcommand_model(model, valid_model_subcommands) def test_is_valid_subcommand_model_valid_without_help(): # Valid subcommand without help in the cli model. model = { "gdk": {"sub-commands": ["component"]}, "component": {"sub-commands": ["init", "build"]}, "build": {}, "init": {}, } valid_model_subcommands = ["component"] assert not model_actions.is_valid_subcommand_model(model, valid_model_subcommands) def test_is_valid_subcommand_model_invalid(): # Invalid subcommand with no key in the cli model. model = { "gdk": {"sub-commands": ["component"]}, "component": {"sub-commands": ["init", "build"]}, "init": {}, "build": {}, } invalid_model_subcommands = ["component", "invalid-subcommand-that-is-not-present-as-key"] assert not model_actions.is_valid_subcommand_model(model, invalid_model_subcommands) def test_is_valid_model_call_counts(mocker): valid_model = { "gdk": {"sub-commands": ["component"], "help": "help"}, "component": {"sub-commands": ["init", "build"], "help": "help"}, "init": { "arguments": { "lang": {"name": ["-l", "--lang"], "help": "help"}, "temp": {"name": ["-t", "--temp"], "help": "help"}, }, "arg_groups": [ { "title": "Greengrass component templates.", "args": ["lang"], "description": "description", } ], "help": "help", }, "build": {"help": "help"}, } spy_is_valid_argument_model = mocker.spy(model_actions, "is_valid_argument_model") spy_is_valid_argument_group_model = mocker.spy(model_actions, "is_valid_argument_group_model") spy_is_valid_sub_command = mocker.spy(model_actions, "is_valid_subcommand_model") assert model_actions.is_valid_model(valid_model, consts.cli_tool_name) assert spy_is_valid_argument_model.call_count == 2 assert spy_is_valid_argument_group_model.call_count == 1 assert spy_is_valid_sub_command.call_count == 2 def test_is_valid_model_invalid_argument_model(mocker): invalid_model = { "gdk": {"sub-commands": ["component"], "help": "help"}, "component": {"sub-commands": ["init", "build"], "help": "help"}, "init": { "arguments": { "lang": {"name": ["-l", "--lang"]}, "temp": {"name": ["-t", "--temp"]}, }, "arg_groups": [ { "title": "Greengrass component templates.", "args": ["lang"], "description": "description", } ], "help": "help", }, "build": {"help": "help"}, } spy_is_valid_argument_model = mocker.spy(model_actions, "is_valid_argument_model") spy_is_valid_argument_group_model = mocker.spy(model_actions, "is_valid_argument_group_model") spy_is_valid_sub_command = mocker.spy(model_actions, "is_valid_subcommand_model") assert not model_actions.is_valid_model(invalid_model, consts.cli_tool_name) assert spy_is_valid_argument_model.call_count == 1 assert spy_is_valid_argument_group_model.call_count == 0 assert spy_is_valid_sub_command.call_count == 2 # gdk, component def test_is_valid_model_invalid_argument_group_model(mocker): valid_model = { "gdk": {"sub-commands": ["init"], "help": "help"}, "init": { "arguments": { "lang": {"name": ["-l", "--lang"], "help": "help"}, "temp": {"name": ["-t", "--temp"], "help": "help"}, }, "arg_groups": [ { "title": "Greengrass component templates.", "args": ["lang", "template"], "description": "description", } ], "help": "help", }, "build": {"help": "help"}, } spy_is_valid_argument_model = mocker.spy(model_actions, "is_valid_argument_model") spy_is_valid_argument_group_model = mocker.spy(model_actions, "is_valid_argument_group_model") spy_is_valid_sub_command = mocker.spy(model_actions, "is_valid_subcommand_model") assert not model_actions.is_valid_model(valid_model, consts.cli_tool_name) assert spy_is_valid_argument_model.call_count == 2 assert spy_is_valid_argument_group_model.call_count == 1 assert spy_is_valid_sub_command.call_count == 1 # gdk def test_is_valid_model_invalid_sub_commands(mocker): valid_model = { "gdk": {"sub-commands": ["component"], "help": "help"}, "component": {"sub-commands": ["init", "not-valid"], "help": "help"}, "init": { "arguments": { "lang": {"name": ["-l", "--lang"], "help": "help"}, "temp": {"name": ["-t", "--temp"], "help": "help"}, }, "arg_groups": [ { "title": "Greengrass component templates.", "args": ["lang", "temp"], "description": "description", } ], "help": "help", }, "build": {"help": "help"}, } spy_is_valid_argument_model = mocker.spy(model_actions, "is_valid_argument_model") spy_is_valid_argument_group_model = mocker.spy(model_actions, "is_valid_argument_group_model") spy_is_valid_sub_command = mocker.spy(model_actions, "is_valid_subcommand_model") assert not model_actions.is_valid_model(valid_model, consts.cli_tool_name) assert spy_is_valid_argument_model.call_count == 2 assert spy_is_valid_argument_group_model.call_count == 1 assert spy_is_valid_sub_command.call_count == 2 # gdk, component def test_is_valid_argument_group_valid(): # Valid argument group model with correct arguments t_arg_group = {"title": "Greengrass component templates.", "args": ["language", "template"], "description": "description"} t_args = { "language": {"name": ["-l", "--language"], "help": "help", "choices": ["p", "j"]}, "template": {"name": ["-t", "--template"], "help": "help"}, "repository": {"name": ["-r", "--repository"], "help": "help"}, } assert model_actions.is_valid_argument_group_model(t_arg_group, t_args) def test_is_valid_argument_group_invalid_group(): # Invalid argument group model without title t_arg_group = {"args": ["language", "template"], "description": "description"} t_args = { "language": {"name": ["-l", "--language"], "help": "help", "choices": ["p", "j"]}, "template": {"name": ["-t", "--template"], "help": "help"}, "repository": {"name": ["-r", "--repository"], "help": "help"}, } assert not model_actions.is_valid_argument_group_model(t_arg_group, t_args) # Invalid argument group model without args t_arg_group = {"title": "title", "description": "description"} t_args = { "language": {"name": ["-l", "--language"], "help": "help", "choices": ["p", "j"]}, "template": {"name": ["-t", "--template"], "help": "help"}, "repository": {"name": ["-r", "--repository"], "help": "help"}, } assert not model_actions.is_valid_argument_group_model(t_arg_group, t_args) # Invalid argument group model without description t_arg_group = {"title": "title", "args": ["language", "template"]} t_args = { "language": {"name": ["-l", "--language"], "help": "help", "choices": ["p", "j"]}, "template": {"name": ["-t", "--template"], "help": "help"}, "repository": {"name": ["-r", "--repository"], "help": "help"}, } assert not model_actions.is_valid_argument_group_model(t_arg_group, t_args) def test_is_valid_argument_group_invalid_with_arg_not_in_arguments(): # Invalid argument group model with arg not in arguments t_arg_group = { "args": ["this-arg-not-in-arguments", "template"], "description": "description", "title": "title", } t_args = { "language": {"name": ["-l", "--language"], "help": "help", "choices": ["p", "j"]}, "template": {"name": ["-t", "--template"], "help": "help"}, "repository": {"name": ["-r", "--repository"], "help": "help"}, } assert not model_actions.is_valid_argument_group_model(t_arg_group, t_args)
42.091241
126
0.62759
1,381
11,533
4.877625
0.081825
0.077939
0.080166
0.087441
0.821704
0.779394
0.717191
0.692844
0.679335
0.664489
0
0.001658
0.215729
11,533
273
127
42.245421
0.743062
0.054192
0
0.557604
0
0
0.24433
0.055826
0
0
0
0
0.165899
1
0.078341
false
0
0.023041
0
0.101382
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
6fd04634351c4906a1efba5cd00441acf0488bd7
207
py
Python
guet/steps/action/_print.py
AbhishekMashetty/pairprogrammingmasetty
0528d4999b472ec6d94058193275a505eaf2c762
[ "Apache-2.0" ]
13
2018-12-21T22:47:28.000Z
2021-12-17T14:27:35.000Z
guet/steps/action/_print.py
chiptopher/guet
1099ee623311ba1d052237612efc9b06b7ff68bb
[ "Apache-2.0" ]
63
2018-08-30T11:19:12.000Z
2021-05-13T12:11:08.000Z
guet/steps/action/_print.py
chiptopher/guet
1099ee623311ba1d052237612efc9b06b7ff68bb
[ "Apache-2.0" ]
7
2019-05-21T13:52:37.000Z
2022-01-30T22:57:21.000Z
from .action import Action class PrintAction(Action): def __init__(self, message: str): super().__init__() self.message = message def execute(self, _): print(self.message)
18.818182
37
0.63285
23
207
5.304348
0.565217
0.270492
0.245902
0
0
0
0
0
0
0
0
0
0.256039
207
10
38
20.7
0.792208
0
0
0
0
0
0
0
0
0
0
0
0
1
0.285714
false
0
0.142857
0
0.571429
0.142857
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
6ff86ddd5ce0c454281e6568c628bd3c49ea5024
394
py
Python
examples/MMPT/mmpt/__init__.py
Shiguang-Guo/fairseq
c9d3df5679d0829cda8fc3c818b6cab52b78dc37
[ "MIT" ]
16,259
2018-05-02T02:31:30.000Z
2022-03-31T21:50:23.000Z
examples/MMPT/mmpt/__init__.py
Shiguang-Guo/fairseq
c9d3df5679d0829cda8fc3c818b6cab52b78dc37
[ "MIT" ]
3,863
2018-05-02T13:42:39.000Z
2022-03-31T19:03:32.000Z
examples/MMPT/mmpt/__init__.py
Shiguang-Guo/fairseq
c9d3df5679d0829cda8fc3c818b6cab52b78dc37
[ "MIT" ]
4,796
2018-05-02T07:55:51.000Z
2022-03-31T14:46:45.000Z
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. try: # fairseq user dir from .datasets import FairseqMMDataset from .losses import FairseqCriterion from .models import FairseqMMModel from .tasks import FairseqMMTask except ImportError: pass
30.307692
65
0.751269
52
394
5.692308
0.769231
0.067568
0
0
0
0
0
0
0
0
0
0
0.205584
394
12
66
32.833333
0.945687
0.469543
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.142857
0.714286
0
0.714286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
4
b50d441499382a84f41458396af0f4f8d532fb26
2,559
py
Python
project/server/middlewares/auth.py
ecralx/rotten-potatoes-api
7061eaff272cab04469777716dda3a620776ed1e
[ "MIT" ]
null
null
null
project/server/middlewares/auth.py
ecralx/rotten-potatoes-api
7061eaff272cab04469777716dda3a620776ed1e
[ "MIT" ]
null
null
null
project/server/middlewares/auth.py
ecralx/rotten-potatoes-api
7061eaff272cab04469777716dda3a620776ed1e
[ "MIT" ]
null
null
null
# project/server/middlewares/auth.py from flask import request, make_response, jsonify from functools import wraps from project.server.models import User def with_authorization_middleware(fn): """ Checks if there's an Authorization header in the request and supplies the valid user to the function (view) """ @wraps(fn) def wrapper(*args, **kwargs): # get the auth token auth_header = request.headers.get('Authorization') if auth_header: try: auth_token = auth_header.split(" ")[1] except IndexError: return fn(*args, **kwargs, user=None) else: auth_token = '' if auth_token: resp = User.decode_auth_token(auth_token) if not isinstance(resp, str): user = User.query.filter_by(id=resp).first() return fn(*args, **kwargs, user=user) return fn(*args, **kwargs, user=None) else: return fn(*args, **kwargs, user=None) return wrapper def auth_middleware(fn): """ Checks if there's an Authorization header in the request and supplies the valid user to the function (view) Aborts if there's any problem with the header (user must be logged in) """ @wraps(fn) def wrapper(*args, **kwargs): # get the auth token auth_header = request.headers.get('Authorization') if auth_header: try: auth_token = auth_header.split(" ")[1] except IndexError: response_object = { 'status': 'fail', 'status_code': 401, 'message': 'Bearer token malformed.' } return make_response(jsonify(response_object)), 401 else: auth_token = '' if auth_token: resp = User.decode_auth_token(auth_token) if not isinstance(resp, str): user = User.query.filter_by(id=resp).first() return fn(*args, **kwargs, user=user) response_object = { 'status': 'fail', 'status_code': 401, 'message': resp } return make_response(jsonify(response_object)), 401 else: response_object = { 'status': 'fail', 'status_code': 401, 'message': 'Provide a valid auth token.' } return make_response(jsonify(response_object)), 401 return wrapper
35.054795
111
0.550215
281
2,559
4.882562
0.266904
0.085277
0.056851
0.065598
0.794461
0.794461
0.77551
0.704082
0.543732
0.543732
0
0.01207
0.352481
2,559
73
112
35.054795
0.815932
0.14068
0
0.810345
0
0
0.074896
0
0
0
0
0
0
1
0.068966
false
0
0.051724
0
0.293103
0
0
0
0
null
0
0
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
82f97c32f939d4327706890dff7a052ac9a87566
3,151
py
Python
mpvr/test/test.py
nearj/mpvr-motionfiltering
478304391e031a11bd15a604a272017ce8e48abf
[ "MIT" ]
null
null
null
mpvr/test/test.py
nearj/mpvr-motionfiltering
478304391e031a11bd15a604a272017ce8e48abf
[ "MIT" ]
null
null
null
mpvr/test/test.py
nearj/mpvr-motionfiltering
478304391e031a11bd15a604a272017ce8e48abf
[ "MIT" ]
1
2019-07-14T01:32:04.000Z
2019-07-14T01:32:04.000Z
import numpy as np import pandas as pd import cv2 from numba import jit from . import datamanager class ThreeDI(datamanager.DataManager): """data manager for 3DI :param _scenarios: list of scenario :type _scenarios: dict :param _sensored: sensored axis list in 3di experiment :type _sensored: list :param _unsensored: sensored axis list in 3di experiment :type _unsensored: list :param _time_column: time column name in 3di.csv :type _time_column: str :param _start_index: start index of experiment :type _start_index: int :param _end_index: end index of experiment :type _end_index: int :param _step_min: minimum time step size of time stamps :type _step_min: int :param _step_max: maximum time step size of time stamps :type _step_max: int """ def __init__(self, *args, **kwargs): """Constructor for 3DI data manager""" super(ThreeDI, self).__init__(*args, **kwargs) # to make 6 axis motion, e.g.) (1.1, 2.2, 3.3) > (1.1, 2.2, 3.3, 0, 0, 0) # def _load_with_preset(self): # """Implementation method of :meth:'mpvr.datamanager.Datamanager.load()' # :returns: Generator of classified motion and video process in experiment # :rtype: Iterator[(int, list)] # """ # motion_load_directory = self._load_motion_dir + self._scenario + self._extension['csv'] # video_load_directory = self._load_video_dir + self._scenario + '/' # df = pd.read_csv(motion_load_directory) # times = pd.to_datetime(df[self._time_column].str.split().str[1]).astype(int) / 10 ** 9 # times -= times[self._start_index] # to set timestamps 0 at start index # sampled_indices, sampled_deltatimes = self._sampling_time(times) # # to make sampling rate approximately about 3hz as previous works # sensored_motion_vectors = np.diff(df[self._sensored.values()].values, axis = 0) # # order: pitch, yaw, roll # motion_vectors = np.hstack(( # sensored_motion_vectors, np.zeros(( # sensored_motion_vectors.shape[0], len(self._unsensored))))) # # to make 6 axis motion, e.g.) (1.1, 2.2, 3.3) > (1.1, 2.2, 3.3, 0, 0, 0) # sampled_motion = self._sampling_motion(sampled_deltatimes, # sampled_indices, # motion_vectors) # sampled_video = self._sampling_visual_from_png(sampled_deltatimes, # sampled_indices, # video_load_directory) # motion_bins = self._make_motion_bins(sampled_motion) # # TODO: stands for rewind... > # sampled_motion = self._sampling_motion(sampled_deltatimes, # sampled_indices, # motion_vectors) # for sample in zip(*(sampled_motion, sampled_video)): # yield self._classification_motion(sample[0], motion_bins), \ # self._classification_video(sample[1])
42.581081
97
0.606791
381
3,151
4.745407
0.32021
0.043142
0.006637
0.00885
0.19469
0.19469
0.19469
0.155973
0.120575
0.120575
0
0.021592
0.29451
3,151
73
98
43.164384
0.791723
0.827674
0
0
0
0
0
0
0
0
0
0.013699
0
1
0.125
false
0
0.625
0
0.875
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
1
0
1
0
0
4
d227a55a3a1ec41066cf45c9a1652ae46dbb46c9
103
py
Python
BasicProject/HelloDjangoApp/apps.py
chimmilisrinivas/python-sample-vs-learning-django
30ce250b187507a99bf35e2691d483ebf03aa7f8
[ "MIT" ]
13
2018-07-19T04:05:17.000Z
2019-03-19T22:35:27.000Z
BasicProject/HelloDjangoApp/apps.py
chimmilisrinivas/python-sample-vs-learning-django
30ce250b187507a99bf35e2691d483ebf03aa7f8
[ "MIT" ]
4
2018-10-02T04:39:11.000Z
2018-11-29T01:06:30.000Z
BasicProject/HelloDjangoApp/apps.py
chimmilisrinivas/python-sample-vs-learning-django
30ce250b187507a99bf35e2691d483ebf03aa7f8
[ "MIT" ]
16
2019-11-03T23:14:50.000Z
2022-03-16T06:12:38.000Z
from django.apps import AppConfig class HelloDjangoAppConfig(AppConfig): name = 'HelloDjangoApp'
17.166667
38
0.786408
10
103
8.1
0.9
0
0
0
0
0
0
0
0
0
0
0
0.145631
103
5
39
20.6
0.920455
0
0
0
0
0
0.135922
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
d22bd14dc917a4fe741f630af22e4772f91714c4
175
py
Python
importance/download_pageviews.py
halfak/Article-importance-in-Wikipedia
43dbb04e320fe2aed2bd72aa08ad6b7085ba84e9
[ "MIT" ]
2
2015-01-31T15:32:20.000Z
2022-01-23T22:23:53.000Z
importance/download_pageviews.py
Seanpm2001-research/Article-importance-in-Wikipedia
43dbb04e320fe2aed2bd72aa08ad6b7085ba84e9
[ "MIT" ]
null
null
null
importance/download_pageviews.py
Seanpm2001-research/Article-importance-in-Wikipedia
43dbb04e320fe2aed2bd72aa08ad6b7085ba84e9
[ "MIT" ]
4
2015-01-21T14:34:13.000Z
2022-01-23T22:24:37.000Z
""" Downloads hourly pageview files for a directory. Usage: download_pageviews <web directory> <output directory> """ import docopt def main(): args = docopt.docopt
15.909091
57
0.72
21
175
5.952381
0.809524
0
0
0
0
0
0
0
0
0
0
0
0.182857
175
10
58
17.5
0.874126
0.651429
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
4
d242a1b004c13331b65295d59929b3ce78ff0756
1,243
py
Python
verbcalc/tests/test_calculating.py
syedatif4118/VerbCalc
16834cbf1a78036b386ce1020bfd705fde7a0833
[ "BSD-3-Clause" ]
null
null
null
verbcalc/tests/test_calculating.py
syedatif4118/VerbCalc
16834cbf1a78036b386ce1020bfd705fde7a0833
[ "BSD-3-Clause" ]
null
null
null
verbcalc/tests/test_calculating.py
syedatif4118/VerbCalc
16834cbf1a78036b386ce1020bfd705fde7a0833
[ "BSD-3-Clause" ]
null
null
null
""" Tests calculating. """ import unittest import verbcalc class TestCalculate(unittest.TestCase): """ Tests calculate function. """ def test_calculations(self): self.assertEqual(verbcalc.calculate('2 plus 2'), 'The result is 4') self.assertEqual(verbcalc.calculate ('what is 2 minus 2'), 'The result is 0') self.assertEqual(verbcalc.calculate ('calculate 2 times 2'), 'The result is 4') self.assertEqual(verbcalc.calculate ('2 divided by 2'), 'The result is 1') self.assertEqual(verbcalc.calculate ('2 to the power of 2'), 'The result is 4') self.assertEqual(verbcalc.calculate ('Absolute value of -2'), 'The result is 2') self.assertEqual(verbcalc.calculate ('2 mod 2'), 'The result is 0') self.assertEqual(verbcalc.calculate('2 root of 4'), 'The result is 2') self.assertEqual(verbcalc.calculate('3 root of 27'), 'The result is 3') self.assertEqual(verbcalc.calculate ('2 divided by 0'), 'You cannot divide by zero!') if __name__ == '__main__': unittest.main()
36.558824
79
0.572808
144
1,243
4.881944
0.305556
0.213371
0.327169
0.455192
0.623044
0.519203
0.519203
0.445235
0.320057
0
0
0.033998
0.313757
1,243
33
80
37.666667
0.790152
0.035398
0
0.304348
0
0
0.263605
0
0
0
0
0
0.434783
1
0.043478
false
0
0.086957
0
0.173913
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
4
d24608d5ddc47d0a079c9d18aaab34a2d9d7dd27
8,331
py
Python
{{cookiecutter.project_slug}}/settings/base.py
yunior-dev/django-vue-cookie
74f04a3aaaa4f40070c816d855a165c7ecc2d12e
[ "BSD-3-Clause" ]
3
2020-10-10T20:08:08.000Z
2021-03-26T05:46:25.000Z
{{cookiecutter.project_slug}}/settings/base.py
yunior-dev/django-cookie
74f04a3aaaa4f40070c816d855a165c7ecc2d12e
[ "BSD-3-Clause" ]
null
null
null
{{cookiecutter.project_slug}}/settings/base.py
yunior-dev/django-cookie
74f04a3aaaa4f40070c816d855a165c7ecc2d12e
[ "BSD-3-Clause" ]
1
2021-11-19T21:25:45.000Z
2021-11-19T21:25:45.000Z
""" Base settings to build other settings file upon. """ from pathlib import Path from django.conf.locale.en import formats as en_formats import environ ROOT_DIR = Path(__file__).resolve(strict=True).parent.parent # apps/ directory APPS_DIR = ROOT_DIR / 'apps' # resources/ directory RESOURCES_DIR = ROOT_DIR / 'resources' env = environ.Env() READ_DOT_ENV_FILE = env.bool('APP_READ_DOT_ENV_FILE', default=False) if READ_DOT_ENV_FILE: # OS environment variables take precedence over variables from .env env.read_env(str(ROOT_DIR / '.env')) # GENERAL # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#debug DEBUG = env.bool('APP_DEBUG', False) # Local time zone. Choices are # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # though not all of them may be available with every OS. # In Windows, this must be set to your system time zone. TIME_ZONE = 'UTC' # https://docs.djangoproject.com/en/dev/ref/settings/#language-code LANGUAGE_CODE = 'en-us' # https://docs.djangoproject.com/en/dev/ref/settings/#site-id SITE_ID = 1 # https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n USE_I18N = True # https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n USE_L10N = True # https://docs.djangoproject.com/en/dev/ref/settings/#use-tz USE_TZ = True # https://docs.djangoproject.com/en/dev/ref/settings/#locale-paths LOCALE_PATHS = [str(ROOT_DIR / 'locale')] # URLS # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf ROOT_URLCONF = 'config.urls' # https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application WSGI_APPLICATION = 'config.wsgi.application' # APPS # ------------------------------------------------------------------------------ DJANGO_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', # 'django.contrib.humanize', # Handy template tags ] VENDOR_APPS = [ 'djangomix', # Third party apps go here. ] LOCAL_APPS = [ {%- if cookiecutter.use_vuejs == "y" %} 'apps.client', {%- endif %} 'apps.core', # 'apps.users', # Your apps: custom apps go here. ] # https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps INSTALLED_APPS = DJANGO_APPS + VENDOR_APPS + LOCAL_APPS # PASSWORDS # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers PASSWORD_HASHERS = [ # https://docs.djangoproject.com/en/dev/topics/auth/passwords/#using-argon2-with-django 'django.contrib.auth.hashers.PBKDF2PasswordHasher', 'django.contrib.auth.hashers.PBKDF2PasswordHasher', 'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher', 'django.contrib.auth.hashers.BCryptSHA256PasswordHasher', ] # https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator' }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator' }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator' }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator' }, ] # MIDDLEWARE # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#middleware MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', {%- if cookiecutter.use_whitenoise == "y" %} 'whitenoise.middleware.WhiteNoiseMiddleware', {%- endif %} 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.common.BrokenLinkEmailsMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] # STATIC # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#static-root STATIC_ROOT = str(ROOT_DIR / 'staticfiles') # https://docs.djangoproject.com/en/dev/ref/settings/#static-url STATIC_URL = '/static/' # https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS STATICFILES_DIRS = [str(ROOT_DIR / 'static')] # https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders STATICFILES_FINDERS = [ 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', ] STATIC_URL = '/static/' {%- if cookiecutter.use_whitenoise == "y" %} # WHITENOISE STORAGE # http://whitenoise.evans.io/en/stable/ STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage' {%- endif %} # MEDIA # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#media-root MEDIA_ROOT = str(ROOT_DIR / "media") # https://docs.djangoproject.com/en/dev/ref/settings/#media-url MEDIA_URL = "/media/" # TEMPLATES # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#templates TEMPLATES = [ { # https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND 'BACKEND': 'django.template.backends.django.DjangoTemplates', # https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs 'DIRS': [str('templates')], 'OPTIONS': { # https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders # https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types 'loaders': [ 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ], # https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.i18n', 'django.template.context_processors.media', 'django.template.context_processors.static', 'django.template.context_processors.tz', 'django.contrib.messages.context_processors.messages', ], }, } ] # SECURITY # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-httponly SESSION_COOKIE_HTTPONLY = True # https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-httponly CSRF_COOKIE_HTTPONLY = True # https://docs.djangoproject.com/en/dev/ref/settings/#secure-browser-xss-filter SECURE_BROWSER_XSS_FILTER = True # https://docs.djangoproject.com/en/dev/ref/settings/#x-frame-options X_FRAME_OPTIONS = 'DENY' # ADMIN # ------------------------------------------------------------------------------ # Django Admin URL. # https://docs.djangoproject.com/en/dev/ref/settings/#admins ADMIN_URL = 'admin/' # DATE FORMATS # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/3.1/ref/settings/#date-input-formats DATE_INPUT_FORMATS = ['%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y'] en_formats.DATE_FORMAT = 'N j, Y' # AUTO PRIMARY KEY FIELD # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/3.2/releases/3.2/# DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' # Replace the default user model with the custom model. # AUTH_USER_MODEL = 'core.User' # Your stuff... # ------------------------------------------------------------------------------
35.909483
93
0.625375
880
8,331
5.8125
0.245455
0.058065
0.141935
0.16129
0.358358
0.358358
0.296188
0.289736
0.216422
0.084066
0
0.003425
0.123875
8,331
231
94
36.064935
0.697356
0.474733
0
0.094828
0
0
0.481948
0.411876
0
0
0
0
0
0
null
null
0.086207
0.025862
null
null
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
1
0
0
1
0
0
0
0
0
4
d24cef7075a55a780520530a7ea876fcd40efd6b
2,649
py
Python
test.py
santaclose/noose
e963138b81f380ca0f46369941cf65c4349bd4fb
[ "Apache-2.0" ]
8
2021-01-31T11:30:05.000Z
2021-09-01T07:48:34.000Z
test.py
santaclose/noose
e963138b81f380ca0f46369941cf65c4349bd4fb
[ "Apache-2.0" ]
4
2021-09-01T08:17:18.000Z
2021-09-24T22:32:24.000Z
test.py
santaclose/noose
e963138b81f380ca0f46369941cf65c4349bd4fb
[ "Apache-2.0" ]
1
2021-09-01T07:49:58.000Z
2021-09-01T07:49:58.000Z
import os import time import sys defines = ["LINUX", "TEST"] links = "-lsfml-graphics -lsfml-window -lsfml-system -lgtest -pthread" output = "test" defineString = "" first = True for item in defines: if not first: defineString += " " else: first = False defineString += f"-D{item}" os.chdir("test") print('---------------------------') print('-- graph operations test --') print('---------------------------') cppFiles = "../noose/nodeProvider/nodeFunctionality.cpp ../noose/nodeProvider/nodeProvider.cpp ../noose/logic/connectionSystem.cpp ../noose/logic/graphOperations.cpp ../noose/logic/node.cpp ../noose/logic/nodeSystem.cpp graphOperationsTest.cpp" finalString = f"g++ -o {output} {cppFiles} {links} {defineString} -w" os.system(finalString) os.system("./test") print('--------------------------') print('-- node provider test --') print('--------------------------') cppFiles = "../noose/nodeProvider/nodeFunctionality.cpp ../noose/nodeProvider/nodeProvider.cpp ../noose/logic/connectionSystem.cpp ../noose/logic/graphOperations.cpp ../noose/logic/node.cpp ../noose/logic/nodeSystem.cpp nodeProviderTest.cpp" finalString = f"g++ -o {output} {cppFiles} {links} {defineString} -w" os.system(finalString) os.system("./test") print('--------------------------') print('-- searcher test --') print('--------------------------') cppFiles = "../noose/nodeProvider/nodeFunctionality.cpp ../noose/nodeProvider/nodeProvider.cpp ../noose/logic/connectionSystem.cpp ../noose/logic/graphOperations.cpp ../noose/logic/node.cpp ../noose/logic/nodeSystem.cpp ../noose/searcher.cpp searcherTest.cpp" finalString = f"g++ -o {output} {cppFiles} {links} {defineString} -w" os.system(finalString) os.system("./test") print('--------------------------') print('-- logic component test --') print('--------------------------') cppFiles = "../noose/nodeProvider/nodeFunctionality.cpp ../noose/nodeProvider/nodeProvider.cpp ../noose/logic/connectionSystem.cpp ../noose/logic/graphOperations.cpp ../noose/logic/node.cpp ../noose/logic/nodeSystem.cpp logicComponentTest.cpp" finalString = f"g++ -o {output} {cppFiles} {links} {defineString} -w" os.system(finalString) os.system("./test") ''' ''' ''' ''' if 'ui' not in sys.argv: exit() print('--------------------------') print('-- ui node system test --') print('--------------------------') os.chdir("../noose/") cppFiles = " ".join(["../noose" + file[1:] for file in os.popen("find . | grep cpp").read().split("\n") if len(file) > 0]) os.chdir("../test/") finalString = f"g++ -o {output} {cppFiles} {links} {defineString} -w -DTEST" os.system(finalString) os.system("./test")
32.703704
259
0.624764
291
2,649
5.687285
0.230241
0.101511
0.12568
0.042296
0.700302
0.700302
0.681571
0.681571
0.681571
0.653776
0
0.000831
0.091733
2,649
81
260
32.703704
0.687032
0
0
0.444444
0
0.074074
0.66692
0.430956
0
0
0
0
0
1
0
false
0
0.055556
0
0.055556
0.277778
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
d26ce16c5b304988a6b5d84632bb238d29a3ef83
337
py
Python
sentiment/utils.py
SinanTang/simple-sentiment-analyser.lambda
3830b9c96dc4d436ff8d99e3aad049b03ebda34e
[ "MIT" ]
1
2020-11-29T12:50:41.000Z
2020-11-29T12:50:41.000Z
sentiment/utils.py
SinanTang/simple-sentiment-analyser.lambda
3830b9c96dc4d436ff8d99e3aad049b03ebda34e
[ "MIT" ]
null
null
null
sentiment/utils.py
SinanTang/simple-sentiment-analyser.lambda
3830b9c96dc4d436ff8d99e3aad049b03ebda34e
[ "MIT" ]
null
null
null
import os from pathlib import Path def get_project_root(): return Path(__file__).parent def get_training_data_path(): return os.path.join(get_project_root(), 'data/train') def load_stop_word_list(lang): fp = os.path.join(get_project_root(), 'data/stopwords/{}'.format(lang)) return open(fp, 'r').read().split('\n')
21.0625
75
0.706231
52
337
4.269231
0.557692
0.135135
0.189189
0.117117
0.252252
0.252252
0.252252
0
0
0
0
0
0.136499
337
15
76
22.466667
0.762887
0
0
0
0
0
0.089021
0
0
0
0
0
0
1
0.333333
false
0
0.222222
0.222222
0.888889
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
964919d97c14fc18d854e30c3b2fc87b564be138
359
py
Python
elemental/admin.py
shawnadelic/django-elemental
5c09e3add921c9a7f661c29a4871846fb4c28f22
[ "MIT" ]
null
null
null
elemental/admin.py
shawnadelic/django-elemental
5c09e3add921c9a7f661c29a4871846fb4c28f22
[ "MIT" ]
null
null
null
elemental/admin.py
shawnadelic/django-elemental
5c09e3add921c9a7f661c29a4871846fb4c28f22
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import Link, Menu, Page class LinkAdmin(admin.ModelAdmin): pass class MenuAdmin(admin.ModelAdmin): pass class PageAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("title",)} admin.site.register(Link, MenuAdmin) admin.site.register(Menu, MenuAdmin) admin.site.register(Page, PageAdmin)
17.95
46
0.749304
44
359
6.090909
0.477273
0.16791
0.190299
0.179104
0
0
0
0
0
0
0
0
0.133705
359
19
47
18.894737
0.861736
0
0
0.181818
0
0
0.02507
0
0
0
0
0
0
1
0
false
0.181818
0.181818
0
0.545455
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
4
9650c0a4124b207d1bb0afc39614d847e36d7bd8
825
py
Python
python/space-age/space_age.py
jjkeyser/exercism
c2c21eb04a6564d9d41c8b317ebec53d63e5a3e8
[ "MIT" ]
null
null
null
python/space-age/space_age.py
jjkeyser/exercism
c2c21eb04a6564d9d41c8b317ebec53d63e5a3e8
[ "MIT" ]
null
null
null
python/space-age/space_age.py
jjkeyser/exercism
c2c21eb04a6564d9d41c8b317ebec53d63e5a3e8
[ "MIT" ]
null
null
null
class SpaceAge: EARTH_YEAR_IN_SECONDS = 31557600 def __init__(self, seconds): self.seconds = seconds self.age_on_earth = self.seconds / self.EARTH_YEAR_IN_SECONDS def on_earth(self): return round(self.age_on_earth, 2) def on_mercury(self): return round(self.age_on_earth / 0.2408467, 2) def on_venus(self): return round(self.age_on_earth / 0.61519726, 2) def on_mars(self): return round(self.age_on_earth / 1.8808158, 2) def on_jupiter(self): return round(self.age_on_earth / 11.862615, 2) def on_saturn(self): return round(self.age_on_earth / 29.447498, 2) def on_uranus(self): return round(self.age_on_earth / 84.016846, 2) def on_neptune(self): return round(self.age_on_earth / 164.79132, 2)
25.78125
69
0.65697
127
825
3.984252
0.275591
0.13834
0.160079
0.249012
0.462451
0.462451
0.462451
0.118577
0
0
0
0.1168
0.242424
825
31
70
26.612903
0.6928
0
0
0
0
0
0
0
0
0
0
0
0
1
0.428571
false
0
0
0.380952
0.904762
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
9682eb99d1cd900ae2b2a969a51a352adce579b7
35
py
Python
settings.py
softwaresaved/reposearch
cdf6422f9b674052e1867c02216cd01d64fe95f2
[ "BSD-3-Clause" ]
null
null
null
settings.py
softwaresaved/reposearch
cdf6422f9b674052e1867c02216cd01d64fe95f2
[ "BSD-3-Clause" ]
2
2017-07-20T09:51:33.000Z
2017-07-20T15:32:56.000Z
settings.py
softwaresaved/reposearch
cdf6422f9b674052e1867c02216cd01d64fe95f2
[ "BSD-3-Clause" ]
null
null
null
MONGODATABASENAME = 'figsharedata'
17.5
34
0.828571
2
35
14.5
1
0
0
0
0
0
0
0
0
0
0
0
0.085714
35
1
35
35
0.90625
0
0
0
0
0
0.342857
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
96962c7303052c39c4919d316c9072a143ffa973
121
py
Python
Python/Unsorted/112a.py
LittleEndu/Codeforces
82c49b10702c58bc5ce062801d740a2f5f600062
[ "MIT" ]
null
null
null
Python/Unsorted/112a.py
LittleEndu/Codeforces
82c49b10702c58bc5ce062801d740a2f5f600062
[ "MIT" ]
null
null
null
Python/Unsorted/112a.py
LittleEndu/Codeforces
82c49b10702c58bc5ce062801d740a2f5f600062
[ "MIT" ]
null
null
null
aa = input().lower() bb = input().lower() if aa == bb: print("0") elif aa > bb: print("1") else: print("-1")
13.444444
20
0.495868
19
121
3.157895
0.526316
0.333333
0.3
0
0
0
0
0
0
0
0
0.032967
0.247934
121
8
21
15.125
0.626374
0
0
0
0
0
0.033058
0
0
0
0
0
0
1
0
false
0
0
0
0
0.375
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
96b4dddfcd1a2da469fb76efebc1d459fa832367
127
py
Python
FUNDADESK/APPS/HELPDESK/admin.py
flopezcpam/fundadesk-fmlc
66275055828f6b6173155418d745f093d9e1fa2f
[ "CC0-1.0" ]
null
null
null
FUNDADESK/APPS/HELPDESK/admin.py
flopezcpam/fundadesk-fmlc
66275055828f6b6173155418d745f093d9e1fa2f
[ "CC0-1.0" ]
null
null
null
FUNDADESK/APPS/HELPDESK/admin.py
flopezcpam/fundadesk-fmlc
66275055828f6b6173155418d745f093d9e1fa2f
[ "CC0-1.0" ]
null
null
null
from django.contrib import admin from .models import Incidencia # Register your models here. admin.site.register(Incidencia)
21.166667
32
0.811024
17
127
6.058824
0.647059
0
0
0
0
0
0
0
0
0
0
0
0.125984
127
6
33
21.166667
0.927928
0.204724
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
736ae0cab9222b017a1c6bfd6552e32df5d97870
129
py
Python
submissions/abc080/a.py
m-star18/atcoder
08e475810516602fa088f87daf1eba590b4e07cc
[ "Unlicense" ]
1
2021-05-10T01:16:28.000Z
2021-05-10T01:16:28.000Z
submissions/abc080/a.py
m-star18/atcoder
08e475810516602fa088f87daf1eba590b4e07cc
[ "Unlicense" ]
3
2021-05-11T06:14:15.000Z
2021-06-19T08:18:36.000Z
submissions/abc080/a.py
m-star18/atcoder
08e475810516602fa088f87daf1eba590b4e07cc
[ "Unlicense" ]
null
null
null
# sys.stdin.readline() import sys import math input = sys.stdin.readline n, a, b = map(int, input().split()) print(min(n*a, b))
16.125
35
0.666667
23
129
3.73913
0.608696
0.186047
0.372093
0
0
0
0
0
0
0
0
0
0.139535
129
7
36
18.428571
0.774775
0.155039
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0.2
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
7370d6319aed0136ee694d5d67e4ac2834e1d6c0
54
py
Python
mtg_ssm/scryfall/third_party/__init__.py
suniahk/mtg_ssm
66912ff1a8d3532683d303b8d5d0c13287c28b32
[ "MIT" ]
29
2016-03-18T12:10:36.000Z
2022-02-20T17:32:06.000Z
mtg_ssm/scryfall/third_party/__init__.py
gwax/mtgcdb
f45b45052f34bebd600c8be0c4fb787856971162
[ "MIT" ]
6
2016-04-26T08:25:01.000Z
2021-02-22T11:56:27.000Z
mtg_ssm/scryfall/third_party/__init__.py
gwax/mtgcdb
f45b45052f34bebd600c8be0c4fb787856971162
[ "MIT" ]
8
2016-06-12T09:44:57.000Z
2021-11-05T01:17:59.000Z
"""Establish mtg_ssm.scryfall.third_party package."""
27
53
0.777778
7
54
5.714286
1
0
0
0
0
0
0
0
0
0
0
0
0.055556
54
1
54
54
0.784314
0.87037
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
737462f1355d5a240da40c16a10d16abce218a06
63
py
Python
airtech/apps/tickets/__init__.py
sam-karis/airtech
8e1cd7a9821719d27db046218625d70daaa46139
[ "MIT" ]
null
null
null
airtech/apps/tickets/__init__.py
sam-karis/airtech
8e1cd7a9821719d27db046218625d70daaa46139
[ "MIT" ]
4
2021-03-18T23:42:26.000Z
2022-02-10T12:36:23.000Z
airtech/apps/tickets/__init__.py
sam-karis/airtech
8e1cd7a9821719d27db046218625d70daaa46139
[ "MIT" ]
null
null
null
default_app_config = 'airtech.apps.tickets.apps.TicketsConfig'
31.5
62
0.84127
8
63
6.375
0.875
0
0
0
0
0
0
0
0
0
0
0
0.047619
63
1
63
63
0.85
0
0
0
0
0
0.619048
0.619048
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
7389a6131435fe7fc464918852f62d1c95ba54d3
146
py
Python
publicart_watcher/tests/test_command_line.py
rememberlenny/publicart-watcher
1bf880355f675eca2c0e6fbb48aa7d57ff39e515
[ "MIT" ]
null
null
null
publicart_watcher/tests/test_command_line.py
rememberlenny/publicart-watcher
1bf880355f675eca2c0e6fbb48aa7d57ff39e515
[ "MIT" ]
null
null
null
publicart_watcher/tests/test_command_line.py
rememberlenny/publicart-watcher
1bf880355f675eca2c0e6fbb48aa7d57ff39e515
[ "MIT" ]
null
null
null
from unittest import TestCase from publicart_watcher.command_line import main class TestCmd(TestCase): def test_basic(self): main()
18.25
47
0.753425
19
146
5.631579
0.789474
0
0
0
0
0
0
0
0
0
0
0
0.184932
146
8
48
18.25
0.89916
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.4
0
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
73a7b3a759250b8bb558898fe67d793d8c626091
485
py
Python
src/ikazuchi/tests/data/rst/api_call_text_with_indent.py
t2y/ikazuchi
7023111e92fa47360c50cfefd1398c554475f2c6
[ "Apache-2.0" ]
null
null
null
src/ikazuchi/tests/data/rst/api_call_text_with_indent.py
t2y/ikazuchi
7023111e92fa47360c50cfefd1398c554475f2c6
[ "Apache-2.0" ]
null
null
null
src/ikazuchi/tests/data/rst/api_call_text_with_indent.py
t2y/ikazuchi
7023111e92fa47360c50cfefd1398c554475f2c6
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- DATA_SET = [ ( u"text line", (u"", u"text line") ), ( u" ", (u" ", u" ") ), ( u" \t ", (u" \t", u" ") ), ( u" text line", (u" ", u"text line") ), ( u"\ttext line", (u"\t", u"text line") ), ( u"\t text line", (u"\t ", u"text line") ), ( u"\t \ntext line", (u"\t \n", u"text line") ), ]
14.69697
33
0.265979
54
485
2.37037
0.222222
0.351563
0.492188
0.46875
0.59375
0.59375
0.578125
0.578125
0
0
0
0.004132
0.501031
485
32
34
15.15625
0.524793
0.043299
0
0.5
0
0
0.281385
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
73a8cea2d222e9e117fbd8fe7aaa76df18700d0f
91
py
Python
intask_api/subtasks/apps.py
KirovVerst/intask
4bdec6f49fa2873cca1354d7d3967973f5bcadc3
[ "MIT" ]
null
null
null
intask_api/subtasks/apps.py
KirovVerst/intask
4bdec6f49fa2873cca1354d7d3967973f5bcadc3
[ "MIT" ]
7
2016-08-17T23:08:31.000Z
2022-03-02T02:23:08.000Z
intask_api/subtasks/apps.py
KirovVerst/intask
4bdec6f49fa2873cca1354d7d3967973f5bcadc3
[ "MIT" ]
null
null
null
from django.apps import AppConfig class SubtasksConfig(AppConfig): name = 'subtasks'
15.166667
33
0.758242
10
91
6.9
0.9
0
0
0
0
0
0
0
0
0
0
0
0.164835
91
5
34
18.2
0.907895
0
0
0
0
0
0.087912
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
73c80160f8e80e22732257c8bf5bc870cf6196ae
758
py
Python
p99/python3/p31.py
jlou2u/katas
bbdeef5f2bf3c26d9764c173793724df3f01341f
[ "MIT" ]
null
null
null
p99/python3/p31.py
jlou2u/katas
bbdeef5f2bf3c26d9764c173793724df3f01341f
[ "MIT" ]
null
null
null
p99/python3/p31.py
jlou2u/katas
bbdeef5f2bf3c26d9764c173793724df3f01341f
[ "MIT" ]
null
null
null
def is_prime(n): if n <= 3: return True return not any([n % i == 0 for i in range(2, n)]) def test_is_prime(): assert is_prime(1) assert is_prime(2) assert is_prime(3) assert is_prime(5) assert is_prime(7) assert is_prime(11) assert is_prime(13) assert is_prime(17) assert is_prime(19) assert is_prime(23) assert is_prime(29) assert is_prime(31) assert is_prime(37) assert is_prime(41) assert is_prime(43) assert is_prime(43) assert is_prime(97) assert not is_prime(4) assert not is_prime(6) assert not is_prime(8) assert not is_prime(9) assert not is_prime(10) assert not is_prime(12) assert not is_prime(14) assert not is_prime(96)
19.435897
53
0.639842
133
758
3.43609
0.300752
0.413567
0.483589
0.280088
0.094092
0.094092
0.094092
0
0
0
0
0.079422
0.269129
758
38
54
19.947368
0.745487
0
0
0.066667
0
0
0
0
0
0
0
0
0.833333
1
0.066667
false
0
0
0
0.133333
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
4
73d31f64b5fd041e2478f9b98b095c6eef171d5b
735
py
Python
thredo/queue.py
dabeaz/thredo
bd17c885bdad514fa2729998fe8b9388800b5fc2
[ "MIT" ]
340
2018-07-23T18:21:56.000Z
2021-12-11T05:50:58.000Z
thredo/queue.py
dabeaz/thredo
bd17c885bdad514fa2729998fe8b9388800b5fc2
[ "MIT" ]
6
2018-07-31T11:52:56.000Z
2019-11-25T19:52:32.000Z
thredo/queue.py
dabeaz/thredo
bd17c885bdad514fa2729998fe8b9388800b5fc2
[ "MIT" ]
25
2018-07-27T06:09:05.000Z
2022-03-13T12:53:23.000Z
# queue.py # # A basic queue __all__ = [ 'Queue' ] import curio # -- Thredo from .thr import TAWAIT as AWAIT class Queue(object): def __init__(self, maxsize=0): self._queue = curio.Queue(maxsize) def empty(self): return self._queue.empty() def full(self): return self._queue.full() def get(self): return AWAIT(self._queue.get) def join(self): return AWAIT(self._queue.join) def put(self, item): return AWAIT(self._queue.put, item) def qsize(self): return self._queue.qsize() def task_done(self): return AWAIT(self._queue.task_done) def __iter__(self): return self def __next__(self): return self.get()
17.5
43
0.608163
97
735
4.340206
0.329897
0.171021
0.166271
0.190024
0.171021
0
0
0
0
0
0
0.001876
0.27483
735
41
44
17.926829
0.787993
0.043537
0
0
0
0
0.007163
0
0
0
0
0
0
1
0.416667
false
0
0.083333
0.375
0.916667
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
fb62541cdfbdf522f087d08126892db91d803f20
21
py
Python
singer_tap_amazon_mws/cache.py
pys933/singer-tap-amazon-mws
f36649ee0127c41ebde3cb3797469a736c59680f
[ "Apache-2.0" ]
4
2019-09-10T15:24:44.000Z
2020-06-18T18:36:10.000Z
singer_tap_amazon_mws/cache.py
pys933/singer-tap-amazon-mws
f36649ee0127c41ebde3cb3797469a736c59680f
[ "Apache-2.0" ]
1
2019-11-01T20:50:19.000Z
2020-05-25T16:57:52.000Z
singer_tap_amazon_mws/cache.py
pys933/singer-tap-amazon-mws
f36649ee0127c41ebde3cb3797469a736c59680f
[ "Apache-2.0" ]
6
2019-09-10T15:24:45.000Z
2021-03-30T23:51:49.000Z
InventoryCache = {}
7
19
0.666667
1
21
14
1
0
0
0
0
0
0
0
0
0
0
0
0.190476
21
2
20
10.5
0.823529
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
fb7c6d61e4eee6e1471882d1ec776feeceefe5aa
57
py
Python
models/synapses/PCDCNnMFDCN2015cSudhakar/__init__.py
HarshKhilawala/cerebmodels
d2a2f2ef947ef9dc23ddce6e55159240cd3233cb
[ "BSD-3-Clause" ]
null
null
null
models/synapses/PCDCNnMFDCN2015cSudhakar/__init__.py
HarshKhilawala/cerebmodels
d2a2f2ef947ef9dc23ddce6e55159240cd3233cb
[ "BSD-3-Clause" ]
9
2020-03-24T17:09:03.000Z
2021-05-17T16:11:17.000Z
models/synapses/PCDCNnMFDCN2015cSudhakar/__init__.py
myHBPwork/cerebmodels
371ea7f1bbe388f1acade17c7128b8ca6ab8fb7a
[ "BSD-3-Clause" ]
1
2021-05-21T03:08:41.000Z
2021-05-21T03:08:41.000Z
# ~/models/synapses/PCDCNnMFDCN2015cSudhakar/__init__.py
28.5
56
0.842105
5
57
8.8
1
0
0
0
0
0
0
0
0
0
0
0.072727
0.035088
57
1
57
57
0.727273
0.947368
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
fb7e349445e31e2c6f83c3028f877ba44b231032
8,837
py
Python
tests/test_decorators.py
simonw/django-sharding
06258973963eae1112ac4bc572592f729f6957fc
[ "BSD-3-Clause" ]
null
null
null
tests/test_decorators.py
simonw/django-sharding
06258973963eae1112ac4bc572592f729f6957fc
[ "BSD-3-Clause" ]
null
null
null
tests/test_decorators.py
simonw/django-sharding
06258973963eae1112ac4bc572592f729f6957fc
[ "BSD-3-Clause" ]
null
null
null
import unittest from django.conf import settings from django.db import models from django_sharding_library.id_generation_strategies import TableStrategyModel from django_sharding_library.decorators import model_config from django_sharding_library.exceptions import NonExistentDatabaseException, ShardedModelInitializationException from django_sharding_library.fields import PostgresShardGeneratedIDField, TableShardedIDField from django.test import TestCase from django_sharding_library.manager import ShardManager from django_sharding_library.constants import Backends class ModelConfigDecoratorTestCase(TestCase): def test_model_cannot_be_both_sharded_and_marked_for_a_specific_db(self): with self.assertRaises(ShardedModelInitializationException): @model_config(shard_group='default', database='default') class TestModelTwo(models.Model): id = TableShardedIDField(primary_key=True, source_table_name="blah") random_string = models.CharField(max_length=120) user_pk = models.PositiveIntegerField() def get_shard(self): pass def test_sharded_model_requires_a_get_shard_method(self): with self.assertRaises(ShardedModelInitializationException): @model_config(shard_group='default') class TestModelTwo(models.Model): id = TableShardedIDField(primary_key=True, source_table_name="blah") random_string = models.CharField(max_length=120) user_pk = models.PositiveIntegerField() def test_sharded_id_field_must_be_primary_key(self): with self.assertRaises(ShardedModelInitializationException): @model_config(shard_group='default') class TestModelTwo(models.Model): id = TableShardedIDField(source_table_name="blah") random_string = models.CharField(max_length=120) user_pk = models.PositiveIntegerField(primary_key=True) def get_shard(self): pass def test_sharded_model_must_have_sharded_id_fied(self): with self.assertRaises(ShardedModelInitializationException): @model_config(shard_group='default') class TestModelTwo(models.Model): random_string = models.CharField(max_length=120) user_pk = models.PositiveIntegerField() def get_shard(self): from django.contrib.auth import get_user_model return get_user_model().objects.get(pk=self.user_pk).shard def test_puts_shard_group_on_the_model_class(self): @model_config(shard_group='testing') class TestModelThree(models.Model): id = TableShardedIDField(source_table_name="blah", primary_key=True) random_string = models.CharField(max_length=120) user_pk = models.PositiveIntegerField() def get_shard(self): from django.contrib.auth import get_user_model return get_user_model().objects.get(pk=self.user_pk).shard self.assertEqual(getattr(TestModelThree, 'django_sharding__shard_group', None), 'testing') @unittest.skipIf(settings.DATABASES['default']['ENGINE'] not in Backends.POSTGRES, "Not a postgres backend") def test_two_postgres_sharded_id_generator_fields(self): @model_config(shard_group='testing') class TestModelThree(models.Model): id = PostgresShardGeneratedIDField(primary_key=True) something = PostgresShardGeneratedIDField() random_string = models.CharField(max_length=120) user_pk = models.PositiveIntegerField() def get_shard(self): from django.contrib.auth import get_user_model return get_user_model().objects.get(pk=self.user_pk).shard self.assertEqual(getattr(TestModelThree, 'django_sharding__shard_group', None), 'testing') def test_cannot_place_database_on_replica_db(self): with self.assertRaises(NonExistentDatabaseException): @model_config(database='app_shard_001_replica_001') class ShardedTestModelIDsTwo(TableStrategyModel): pass def test_cannot_place_database_on_non_existant_db(self): with self.assertRaises(NonExistentDatabaseException): @model_config(database='i_do_not_exist') class ShardedTestModelIDsTwo(TableStrategyModel): pass def test_puts_database_name_on_model_stored_on_another_database(self): @model_config(database='app_shard_002') class ShardedTestModelIDsThree(TableStrategyModel): pass self.assertEqual(getattr(ShardedTestModelIDsThree, 'django_sharding__database', None), 'app_shard_002') def test_abstract_model_with_defined_manager_raises_exception_if_not_instance_of_shard_manager(self): # Manager is defined and not shard model, should raise an exception with self.assertRaises(ShardedModelInitializationException): @model_config(shard_group='default', sharded_by_field="user_pk") class TestModelOne(models.Model): id = TableShardedIDField(primary_key=True, source_table_name="blah") random_string = models.CharField(max_length=120) user_pk = models.PositiveIntegerField() objects = models.Manager() class Meta: abstract = True def get_shard(self): pass @staticmethod def get_shard_from_id(id): pass # Manager is not defined, should NOT raise an exception @model_config(shard_group='default', sharded_by_field="user_pk") class TestModelTwo(models.Model): id = TableShardedIDField(primary_key=True, source_table_name="blah") random_string = models.CharField(max_length=120) user_pk = models.PositiveIntegerField() class Meta: abstract = True def get_shard(self): pass @staticmethod def get_shard_from_id(id): pass # Manager is defines but is a shardmanager, should not raise an exception @model_config(shard_group='default', sharded_by_field="user_pk") class TestModelThree(models.Model): id = TableShardedIDField(primary_key=True, source_table_name="blah") random_string = models.CharField(max_length=120) user_pk = models.PositiveIntegerField() objects = ShardManager() class Meta: abstract = True def get_shard(self): pass @staticmethod def get_shard_from_id(id): pass def test_decorator_raises_exception_when_sharded_by_field_is_defined_with_no_get_shard_from_id_function(self): with self.assertRaises(ShardedModelInitializationException): @model_config(shard_group='default', sharded_by_field="user_pk") class TestModelOne(models.Model): id = TableShardedIDField(primary_key=True, source_table_name="blah") random_string = models.CharField(max_length=120) user_pk = models.PositiveIntegerField() def get_shard(self): pass def test_decorator_raises_exception_when_using_sharded_by_field_and_custom_manager_is_not_shard_manager_instance(self): class CustomManager(models.Manager): pass with self.assertRaises(ShardedModelInitializationException): @model_config(shard_group='default', sharded_by_field="user_pk") class TestModelOne(models.Model): id = TableShardedIDField(primary_key=True, source_table_name="blah") random_string = models.CharField(max_length=120) user_pk = models.PositiveIntegerField() objects = CustomManager() def get_shard(self): pass @staticmethod def get_shard_from_id(id): pass def test_decorator_raises_exception_when_no_arguments_passed_in(self): with self.assertRaises(ShardedModelInitializationException): @model_config() class TestModelOne(models.Model): id = TableShardedIDField(primary_key=True, source_table_name="blah") random_string = models.CharField(max_length=120) user_pk = models.PositiveIntegerField() def get_shard(self): pass @staticmethod def get_shard_from_id(id): pass
42.690821
123
0.665724
910
8,837
6.128571
0.148352
0.021517
0.031558
0.058096
0.720459
0.713825
0.681729
0.664156
0.657343
0.622198
0
0.007381
0.264117
8,837
206
124
42.898058
0.850223
0.021614
0
0.740506
0
0
0.040963
0.012266
0
0
0
0
0.082278
1
0.183544
false
0.113924
0.082278
0
0.411392
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
4
fb99f9ca00b3ff6d12f0884a7212dcf42fdfda5b
402
py
Python
KlasaDecyzyjna.py
Grundi1410/uwm-ssi
522e573203858f85d9cbe5e89daa7c0a5cdac7db
[ "MIT" ]
null
null
null
KlasaDecyzyjna.py
Grundi1410/uwm-ssi
522e573203858f85d9cbe5e89daa7c0a5cdac7db
[ "MIT" ]
null
null
null
KlasaDecyzyjna.py
Grundi1410/uwm-ssi
522e573203858f85d9cbe5e89daa7c0a5cdac7db
[ "MIT" ]
null
null
null
class KlasaDecyzyjna: def __init__(self, klasaDecyzyjna=0, attributes=0): self.klasaDecyzyjna = "" self.attributes = [] def setKlasaDecyzyjna(self, a): self.klasaDecyzyjna = a def setAttributes(self, a): self.attributes = a def getKlasaDecyzyjna(self): return self.klasaDecyzyjna def getAttributes(self): return self.attributes
22.333333
55
0.646766
39
402
6.564103
0.333333
0.28125
0.070313
0
0
0
0
0
0
0
0
0.006757
0.263682
402
17
56
23.647059
0.858108
0
0
0
0
0
0
0
0
0
0
0
0
1
0.416667
false
0
0
0.166667
0.666667
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
fba4a3752567cdbbfd0894624db7edce08ab87da
85
py
Python
flash/apps.py
mdrichar/speedymc
525dccf1f7629b50b3a7275991fd6d8efcf18701
[ "MIT" ]
null
null
null
flash/apps.py
mdrichar/speedymc
525dccf1f7629b50b3a7275991fd6d8efcf18701
[ "MIT" ]
null
null
null
flash/apps.py
mdrichar/speedymc
525dccf1f7629b50b3a7275991fd6d8efcf18701
[ "MIT" ]
null
null
null
from django.apps import AppConfig class FlashConfig(AppConfig): name = 'flash'
14.166667
33
0.741176
10
85
6.3
0.9
0
0
0
0
0
0
0
0
0
0
0
0.176471
85
5
34
17
0.9
0
0
0
0
0
0.058824
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
fbb4b49cd4c8e4b50a3868ea30cc9e09b860d880
6,612
py
Python
pymars/tests/test_reduce_model.py
tsikes/pyMARS
ca393697f3bcc78d93c9f9d0254ea3f1dd049524
[ "MIT" ]
39
2016-03-23T19:52:11.000Z
2022-02-17T12:50:44.000Z
pymars/tests/test_reduce_model.py
tsikes/pyMARS
ca393697f3bcc78d93c9f9d0254ea3f1dd049524
[ "MIT" ]
62
2017-06-16T23:24:17.000Z
2021-07-23T03:01:34.000Z
pymars/tests/test_reduce_model.py
tsikes/pyMARS
ca393697f3bcc78d93c9f9d0254ea3f1dd049524
[ "MIT" ]
41
2016-02-25T18:38:12.000Z
2022-03-15T02:57:06.000Z
""" Tests the create trimmed model unit used by pyMARS """ import os import pkg_resources import pytest import cantera as ct from ..reduce_model import trim def relative_location(file): file_path = os.path.join(file) return pkg_resources.resource_filename(__name__, file_path) class TestTrim: def test_GRI_minus_three(self): """Tests removal of three species from GRI Mech 3.0. """ # Original model to remove things from initial_model = 'gri30.cti' # Create exclusion list for test case exclusion_list = ["CH4", "O2", "N2"] # Run trim unit reduced_model = trim(initial_model, exclusion_list, 'gri30.cti') # Expected answer expected_species_num = 50 expected_reactions_num = 237 # Make sure number matches what is expected assert reduced_model.n_species == expected_species_num assert reduced_model.n_reactions == expected_reactions_num # Make sure removed species are not included assert "CH4" not in reduced_model.species_names assert "O2" not in reduced_model.species_names assert "N2" not in reduced_model.species_names def test_GRI_minus_zero(self): """Tests removal of zero species from GRI Mech 3.0. """ # Original model to remove things from initial_model = 'gri30.cti' # Create exclusion list for test case exclusion_list = [] # Run trim unit reduced_model = trim(initial_model, exclusion_list, 'reduced_gri30.cti') # Expected answer expected_species_num = 53 expected_reactions_num = 325 # Make sure number matches what is expected assert reduced_model.n_species == expected_species_num assert reduced_model.n_reactions == expected_reactions_num assert reduced_model.name == 'reduced_gri30' def test_artificial_minus_one(self): """Test removing one species from artificial model. """ # Original model to remove things from initial_model = relative_location(os.path.join('assets', 'artificial-mechanism.cti')) # Create exclusion list for test case exclusion_list = ['H'] # Run trim unit reduced_model = trim(initial_model, exclusion_list, 'a-m.cti') # Expected answer expected_species_num = 3 expected_reactions_num = 1 # Make sure number matches what is expected assert reduced_model.n_species == expected_species_num assert reduced_model.n_reactions == expected_reactions_num # Make sure removed species are not included assert 'H' not in reduced_model.species_names for sp in exclusion_list: assert all([sp not in {**rxn.reactants, **rxn.products} for rxn in reduced_model.reactions()]) def testArtRemoveAll(self): """Test removing all four species in an artificial model. Raises exception because Cantera will not produce a Solution with no species/reactions. """ # Original model to remove things from initial_model = relative_location(os.path.join('assets', 'artificial-mechanism.cti')) # Create exclusion list for test case exclusion_list = ["H", "H2", "O2", "H2O"] with pytest.raises(ValueError): reduced_model = trim(initial_model, exclusion_list, "a-m.cti") def testArtRemoveInvalid(self): """Test removing species not present in model. """ # Original model to remove things from initial_model = relative_location(os.path.join('assets', 'artificial-mechanism.cti')) # Create exclusion list for test case exclusion_list = ['CH4'] # Run trim unit reduced_model = trim(initial_model, exclusion_list, 'a-m.cti') # Expected answer expected_species_num = 4 expected_reactions_num = 2 # Make sure number matches what is expected assert reduced_model.n_species == expected_species_num assert reduced_model.n_reactions == expected_reactions_num for sp in exclusion_list: assert all([sp not in {**rxn.reactants, **rxn.products} for rxn in reduced_model.reactions()]) def testArtRemoveInvalidAnd1(self): """Test removing mixture of species both in and not in artificial model. """ # Original model to remove things from initial_model = relative_location(os.path.join('assets', 'artificial-mechanism.cti')) # Create exclusion list for test case exclusion_list = ["H", "CH4"] # Run trim unit reduced_model = trim(initial_model, exclusion_list, "a-m.cti") # Expected answer expected_species_num = 3 expected_reactions_num = 1 # Make sure number matches what is expected assert reduced_model.n_species == expected_species_num assert reduced_model.n_reactions == expected_reactions_num # Make sure removed species are not included assert "H" not in reduced_model.species_names for sp in exclusion_list: assert all([sp not in {**rxn.reactants, **rxn.products} for rxn in reduced_model.reactions()]) def test_GRI_minus_10(self): """Test removing 10 species from GRI Mech 3.0 """ # Original model to remove things from initial_model = 'gri30.cti' # Create exclusion list for test case exclusion_list = ["CH4", "O2", "N2", "H", "OH", "H2O", "CH2", "CH3", "CO", "AR"] # Run trim unit reduced_model = trim(initial_model, exclusion_list, 'reduced_gri30.cti') # Expected answer expected_species_num = 43 expected_reactions_num = 14 # Make sure number matches what is expected assert reduced_model.n_species == expected_species_num assert reduced_model.n_reactions == expected_reactions_num # Make sure removed species are not included for sp in exclusion_list: assert sp not in reduced_model.species_names assert all([sp not in {**rxn.reactants, **rxn.products} for rxn in reduced_model.reactions()]) def test_remove_explicit_third_bodies(self): """Tests appropriate removal of reactions with explicit third body species. """ initial_model = relative_location(os.path.join('assets', 'model-third-bodies.cti')) reduced_model = trim(initial_model, ['ar', 'he'], 'test.cti') assert reduced_model.n_species == 4 assert reduced_model.n_reactions == 1
36.131148
106
0.660012
831
6,612
5.046931
0.158845
0.094421
0.064378
0.063424
0.768479
0.748927
0.735813
0.70267
0.692179
0.692179
0
0.013093
0.260738
6,612
182
107
36.32967
0.844926
0.26588
0
0.45122
0
0
0.068559
0.024816
0
0
0
0
0.304878
1
0.109756
false
0
0.060976
0
0.195122
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
83734422d52fcc66a3ec353f44ee1bb102e69dda
77
py
Python
gpcrawler/entrypoint.py
fst034356/crawler
98f0c1a129b3b5b77fe88971f4f0c6aae5a8964f
[ "MIT" ]
92
2017-03-06T06:32:49.000Z
2020-04-05T02:14:56.000Z
gpcrawler/entrypoint.py
GeraldDoubleJ/crawler
ab068b1b4d8d00336bb11cea0860244e0817e472
[ "MIT" ]
1
2018-06-03T10:38:26.000Z
2018-09-04T09:17:40.000Z
gpcrawler/entrypoint.py
GeraldDoubleJ/crawler
ab068b1b4d8d00336bb11cea0860244e0817e472
[ "MIT" ]
58
2017-04-09T11:18:40.000Z
2020-03-23T11:20:17.000Z
from scrapy.cmdline import execute execute(['scrapy', 'crawl', 'googleplay'])
38.5
42
0.753247
9
77
6.444444
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.077922
77
2
42
38.5
0.816901
0
0
0
0
0
0.269231
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
83a3a1112068729b380736efe81c6a6773f992fa
241
py
Python
pyleecan/Methods/Simulation/OPslip/get_slip.py
Eomys/Pyleecan
4d7f0cbabf0311006963e7a2f435db2ecd901118
[ "Apache-2.0" ]
4
2017-11-27T10:14:34.000Z
2018-09-20T11:30:32.000Z
pyleecan/Methods/Simulation/OPslip/get_slip.py
Eomys/Pyleecan
4d7f0cbabf0311006963e7a2f435db2ecd901118
[ "Apache-2.0" ]
null
null
null
pyleecan/Methods/Simulation/OPslip/get_slip.py
Eomys/Pyleecan
4d7f0cbabf0311006963e7a2f435db2ecd901118
[ "Apache-2.0" ]
null
null
null
def get_slip(self): """Returns the Rotor mechanical slip Parameters ---------- self : OPslip An OPslip object Returns ------- slip : float Rotor mechanical slip """ return self.slip_ref
15.0625
40
0.543568
25
241
5.16
0.6
0.232558
0.294574
0
0
0
0
0
0
0
0
0
0.3361
241
15
41
16.066667
0.80625
0.609959
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
83a618abd558f910856d16b1c240006ee4587c42
176
py
Python
Functions and modules/lambda_function.py
UgainJain/LearnPythonByDoing
4784c334d7f485223a29592ab47c6c017ec67145
[ "MIT" ]
5
2018-11-06T11:15:35.000Z
2020-07-29T21:54:28.000Z
Functions and modules/lambda_function.py
UgainJain/LearnPythonByDoing
4784c334d7f485223a29592ab47c6c017ec67145
[ "MIT" ]
1
2018-11-13T13:22:11.000Z
2018-11-13T13:22:11.000Z
Functions and modules/lambda_function.py
UgainJain/LearnPythonByDoing
4784c334d7f485223a29592ab47c6c017ec67145
[ "MIT" ]
11
2018-11-06T11:12:21.000Z
2019-07-12T11:43:05.000Z
cube=lambda a : a*a*a; #lambda function declaration print("cube of 6 :",cube(6)) print("cube of 9 :",cube(9)) print("cube of 11 :",cube(11))
35.2
83
0.522727
27
176
3.407407
0.37037
0.065217
0.358696
0
0
0
0
0
0
0
0
0.065574
0.306818
176
5
84
35.2
0.688525
0.153409
0
0
0
0
0.234483
0
0
0
0
0
0
1
0
false
0
0
0
0
0.75
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
83ce5331863c9703df63c671f015a407cc6f3ce3
340
py
Python
Instagram_Web_Scraper/public_account.py
rahulnair502/Projects
168572d99bb4c266b3f57a325edd8e10bc2e8344
[ "MIT" ]
null
null
null
Instagram_Web_Scraper/public_account.py
rahulnair502/Projects
168572d99bb4c266b3f57a325edd8e10bc2e8344
[ "MIT" ]
null
null
null
Instagram_Web_Scraper/public_account.py
rahulnair502/Projects
168572d99bb4c266b3f57a325edd8e10bc2e8344
[ "MIT" ]
null
null
null
##used for accounts that are not yours class Public_account: def __init__(self, username): self.username = username def insta_page2(self): print(f"https://www.instagram.com/{self.username}/?hl=en") driver.get(f"https://www.instagram.com/{self.username}/?hl=en") time.sleep(random.uniform(.75, 1.5))
34
71
0.658824
49
340
4.44898
0.673469
0.220183
0.082569
0.165138
0.33945
0.33945
0.33945
0.33945
0.33945
0
0
0.017986
0.182353
340
9
72
37.777778
0.766187
0.105882
0
0
0
0
0.317881
0
0
0
0
0
0
1
0.285714
false
0
0
0
0.428571
0.142857
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
83e047444793f7e0c02754f221c13cf66c3776bc
28
py
Python
data/studio21_generated/introductory/4208/starter_code.py
vijaykumawat256/Prompt-Summarization
614f5911e2acd2933440d909de2b4f86653dc214
[ "Apache-2.0" ]
null
null
null
data/studio21_generated/introductory/4208/starter_code.py
vijaykumawat256/Prompt-Summarization
614f5911e2acd2933440d909de2b4f86653dc214
[ "Apache-2.0" ]
null
null
null
data/studio21_generated/introductory/4208/starter_code.py
vijaykumawat256/Prompt-Summarization
614f5911e2acd2933440d909de2b4f86653dc214
[ "Apache-2.0" ]
null
null
null
def ipsubnet2list(subnet):
14
26
0.785714
3
28
7.333333
1
0
0
0
0
0
0
0
0
0
0
0.04
0.107143
28
2
27
14
0.84
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4