hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5b60c2073338e8261d845aeff4ff120ded2d4d2b
| 107
|
py
|
Python
|
twitter_nlp_toolkit/twitter-nlp-toolkit.py
|
Moe520/twitter-toolbox
|
5e4b7881923394392619a5ced22857772cccb08b
|
[
"MIT"
] | 25
|
2020-04-15T00:52:24.000Z
|
2021-08-23T14:53:28.000Z
|
twitter_nlp_toolkit/twitter-nlp-toolkit.py
|
Moe520/twitter-toolbox
|
5e4b7881923394392619a5ced22857772cccb08b
|
[
"MIT"
] | 3
|
2020-04-26T22:09:09.000Z
|
2022-03-26T21:20:32.000Z
|
twitter_nlp_toolkit/twitter-nlp-toolkit.py
|
Moe520/twitter-toolbox
|
5e4b7881923394392619a5ced22857772cccb08b
|
[
"MIT"
] | 3
|
2020-07-10T20:20:12.000Z
|
2021-07-26T12:04:27.000Z
|
class twitter_nlp_toolkit:
def __init__(self):
print("twitter_nlp_toolkit initialized")
| 21.4
| 48
| 0.691589
| 12
| 107
| 5.5
| 0.75
| 0.30303
| 0.515152
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.233645
| 107
| 5
| 49
| 21.4
| 0.804878
| 0
| 0
| 0
| 0
| 0
| 0.287037
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
5b8c97551e188eef494a74b02d0bdeb7ea77507f
| 90
|
py
|
Python
|
curso-py-coder/tipos/basicos.py
|
JoaoMvchvdo22/Python
|
4578f0d38c5f9849821a8999fb96f70a124a785a
|
[
"MIT"
] | null | null | null |
curso-py-coder/tipos/basicos.py
|
JoaoMvchvdo22/Python
|
4578f0d38c5f9849821a8999fb96f70a124a785a
|
[
"MIT"
] | null | null | null |
curso-py-coder/tipos/basicos.py
|
JoaoMvchvdo22/Python
|
4578f0d38c5f9849821a8999fb96f70a124a785a
|
[
"MIT"
] | null | null | null |
print(type(1))
print(type(1.1))
print(type('texto'))
print(type(False))
print(type(True))
| 15
| 20
| 0.688889
| 16
| 90
| 3.875
| 0.375
| 0.725806
| 0.322581
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035294
| 0.055556
| 90
| 5
| 21
| 18
| 0.694118
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
5bb78f71032af6db965c86b2c448c49cc222924b
| 46
|
py
|
Python
|
B3LOB/__init__.py
|
efbrasil/B3LOB
|
228962f66f8b3118c1a6aac703960c7c21ace097
|
[
"MIT"
] | null | null | null |
B3LOB/__init__.py
|
efbrasil/B3LOB
|
228962f66f8b3118c1a6aac703960c7c21ace097
|
[
"MIT"
] | null | null | null |
B3LOB/__init__.py
|
efbrasil/B3LOB
|
228962f66f8b3118c1a6aac703960c7c21ace097
|
[
"MIT"
] | null | null | null |
from .lob import Lob
from .types import Order
| 15.333333
| 24
| 0.782609
| 8
| 46
| 4.5
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 46
| 2
| 25
| 23
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5bc825b838bc5587ec1bd5538e391a15106f38fd
| 40
|
py
|
Python
|
passenger_wsgi.py
|
divyesh1099/badboystyle
|
f4fec0858b43e14f0e1f173261f363d4262c28ea
|
[
"MIT"
] | null | null | null |
passenger_wsgi.py
|
divyesh1099/badboystyle
|
f4fec0858b43e14f0e1f173261f363d4262c28ea
|
[
"MIT"
] | null | null | null |
passenger_wsgi.py
|
divyesh1099/badboystyle
|
f4fec0858b43e14f0e1f173261f363d4262c28ea
|
[
"MIT"
] | null | null | null |
from badboystyle.wsgi import application
| 40
| 40
| 0.9
| 5
| 40
| 7.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075
| 40
| 1
| 40
| 40
| 0.972973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5bf1ca819b42ca2e1c33074a014fada3d11925e1
| 83
|
py
|
Python
|
desktop/core/ext-py/nose-1.3.7/unit_tests/test_issue_064.py
|
kokosing/hue
|
2307f5379a35aae9be871e836432e6f45138b3d9
|
[
"Apache-2.0"
] | 5,079
|
2015-01-01T03:39:46.000Z
|
2022-03-31T07:38:22.000Z
|
desktop/core/ext-py/nose-1.3.7/unit_tests/test_issue_064.py
|
zks888/hue
|
93a8c370713e70b216c428caa2f75185ef809deb
|
[
"Apache-2.0"
] | 1,623
|
2015-01-01T08:06:24.000Z
|
2022-03-30T19:48:52.000Z
|
desktop/core/ext-py/nose-1.3.7/unit_tests/test_issue_064.py
|
zks888/hue
|
93a8c370713e70b216c428caa2f75185ef809deb
|
[
"Apache-2.0"
] | 2,033
|
2015-01-04T07:18:02.000Z
|
2022-03-28T19:55:47.000Z
|
def test_is_generator_alias():
from nose.util import is_generator, isgenerator
| 27.666667
| 51
| 0.807229
| 12
| 83
| 5.25
| 0.833333
| 0.349206
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13253
| 83
| 2
| 52
| 41.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
f3ad4512465eb8a584f437af83d8ebca59cfc603
| 250
|
py
|
Python
|
Cometarios.py
|
FreyderUrbano/Python_Programas
|
8a11729d1148c319d8fa145ad18038cc7d63f0d9
|
[
"MIT"
] | null | null | null |
Cometarios.py
|
FreyderUrbano/Python_Programas
|
8a11729d1148c319d8fa145ad18038cc7d63f0d9
|
[
"MIT"
] | null | null | null |
Cometarios.py
|
FreyderUrbano/Python_Programas
|
8a11729d1148c319d8fa145ad18038cc7d63f0d9
|
[
"MIT"
] | null | null | null |
#TODOS LOS METODOS QUE EXISTEN PARA HACER COMENTARIOS EN PYTHON
#ESTE ES UN EJEMPLO DE COMENTARIO
print('COMETARIOS EN EL CODIGO')
"ESTE ES OTRO EJEMPLO DE COMO HACER COMENTARIOS"
'''HOLA
ESTO ES EJEMPLO DE COMO HACER
COMENTARIOS MULTILINEA
'''
| 17.857143
| 63
| 0.772
| 38
| 250
| 5.078947
| 0.657895
| 0.248705
| 0.134715
| 0.186529
| 0.300518
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172
| 250
| 13
| 64
| 19.230769
| 0.932367
| 0.376
| 0
| 0
| 0
| 0
| 0.784091
| 0
| 0
| 0
| 0
| 0.076923
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
f3bed3680391e83a7846bea24627fbeae018f29f
| 27
|
py
|
Python
|
deeploy/middlewares/__init__.py
|
ukor/deeploy
|
eb83ecffe671db19deccdb8d02204b208c87bb61
|
[
"MIT"
] | null | null | null |
deeploy/middlewares/__init__.py
|
ukor/deeploy
|
eb83ecffe671db19deccdb8d02204b208c87bb61
|
[
"MIT"
] | 12
|
2019-07-29T03:38:10.000Z
|
2019-08-16T03:08:14.000Z
|
deeploy/middlewares/__init__.py
|
ukor/deeploy
|
eb83ecffe671db19deccdb8d02204b208c87bb61
|
[
"MIT"
] | 1
|
2021-02-12T14:17:28.000Z
|
2021-02-12T14:17:28.000Z
|
from .json_parser import *
| 13.5
| 26
| 0.777778
| 4
| 27
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f3caec0a86a6f632435fc3426455e434f84899ae
| 31,072
|
py
|
Python
|
Excel/ictc_report_section_i_report.py
|
2amitprakash/Python_Codes
|
c7e915c0da87fb7438777be5945f81d4126fea42
|
[
"MIT"
] | null | null | null |
Excel/ictc_report_section_i_report.py
|
2amitprakash/Python_Codes
|
c7e915c0da87fb7438777be5945f81d4126fea42
|
[
"MIT"
] | null | null | null |
Excel/ictc_report_section_i_report.py
|
2amitprakash/Python_Codes
|
c7e915c0da87fb7438777be5945f81d4126fea42
|
[
"MIT"
] | null | null | null |
from openpyxl import Workbook
from openpyxl import load_workbook
import datetime
import common.connect_soch as conn
import pandas as pd
def fetch_data():
sql = 'Select \
table3."Received_Month", \
table3."Received_Year",\
SUM(table3."Number_of_individuals_received_pre-test_counseling/information")"Number_of_individuals_received_pre-test_counseling/information",\
SUM(table3."Number_of_individuals_receiving_post-test_counseling_and_given_results")"Number_of_individuals_receiving_post-test_counseling_and_given_results",\
SUM(table3."Number_of_individuals_with_High_Risk_Behavior_received_follow-up_counseling")"Number_of_individuals_with_High_Risk_Behavior_received_follow-up_counseling",\
SUM(table3."Number_of_individuals_tested_for_HIV")"Number_of_individuals_tested_for_HIV",\
SUM(table3."Number_of_individuals_received_result_within_7_days_of_HIV_Test")"Number_of_individuals_received_result_within_7_days_of_HIV_Test",\
SUM(table3."Number_of_HIV_positive_individuals_having_HIV-I_infection")"Number_of_HIV_positive_individuals_having_HIV-I_infection",\
SUM(table3."Number_of_HIV_positive_individuals_having_HIV-II_infection")"Number_of_HIV_positive_individuals_having_HIV-II_infection",\
SUM(table3."Number_of_HIV_positive_individuals_having_both_HIV-I_&_II_infections")"Number_of_HIV_positive_individuals_having_both_HIV-I_&_II_infections",\
SUM(table3."Number_of_individuals_tested_for_HIV_and_found_Negative")"Number_of_individuals_tested_for_HIV_and_found_Negative",\
SUM(table3."Number_of_Self-initiated_Individuals_tested_for_HIV")"Number_of_Self-initiated_Individuals_tested_for_HIV",\
SUM(table3."Number_of_Self-initiated_individuals_diagnosed_HIV_positive")"Number_of_Self-initiated_individuals_diagnosed_HIV_positive",\
SUM(table3."Number_of_provider_initiated_Individuals_tested_for_HIV")"Number_of_provider_initiated_Individuals_tested_for_HIV",\
SUM(table3."Number_of_provider_initiated_individuals_diagnosed_HIV_positive")"Number_of_provider_initiated_individuals_diagnosed_HIV_positive",\
SUM(table3."Total_number_of_individuals_turned_Indeterminate_for_HIV_at_SA_ICTC")"Total_number_of_individuals_turned_Indeterminate_for_HIV_at_SA_ICTC" \
from( \
select \
table2."SACS_ID" ,\
table2."SACS",\
table2."Received_Month",\
table2."Received_Year",\
SUM(table2."Number_of_individuals_received_pre-test_counseling/information")"Number_of_individuals_received_pre-test_counseling/information",\
SUM(table2."Number_of_individuals_receiving_post-test_counseling_and_given_results")"Number_of_individuals_receiving_post-test_counseling_and_given_results",\
SUM(table2."Number_of_individuals_with_High_Risk_Behavior_received_follow-up_counseling")"Number_of_individuals_with_High_Risk_Behavior_received_follow-up_counseling",\
SUM(table2."Number_of_individuals_tested_for_HIV")"Number_of_individuals_tested_for_HIV",\
SUM(table2."Number_of_individuals_received_result_within_7_days_of_HIV_Test")"Number_of_individuals_received_result_within_7_days_of_HIV_Test",\
SUM(table2."Number_of_HIV_positive_individuals_having_HIV-I_infection")"Number_of_HIV_positive_individuals_having_HIV-I_infection",\
SUM(table2."Number_of_HIV_positive_individuals_having_HIV-II_infection")"Number_of_HIV_positive_individuals_having_HIV-II_infection",\
SUM(table2."Number_of_HIV_positive_individuals_having_both_HIV-I_&_II_infections")"Number_of_HIV_positive_individuals_having_both_HIV-I_&_II_infections",\
SUM(table2."Number_of_individuals_tested_for_HIV_and_found_Negative")"Number_of_individuals_tested_for_HIV_and_found_Negative",\
SUM(table2."Number_of_Self-initiated_Individuals_tested_for_HIV")"Number_of_Self-initiated_Individuals_tested_for_HIV",\
SUM(table2."Number_of_Self-initiated_individuals_diagnosed_HIV_positive")"Number_of_Self-initiated_individuals_diagnosed_HIV_positive",\
SUM(table2."Number_of_provider_initiated_Individuals_tested_for_HIV")"Number_of_provider_initiated_Individuals_tested_for_HIV",\
SUM(table2."Number_of_provider_initiated_individuals_diagnosed_HIV_positive")"Number_of_provider_initiated_individuals_diagnosed_HIV_positive",\
SUM(table2."Total_number_of_individuals_turned_Indeterminate_for_HIV_at_SA_ICTC")"Total_number_of_individuals_turned_Indeterminate_for_HIV_at_SA_ICTC"\
\
from\
(\
Select \
T2.ID,\
T2."SACS",\
T2."ICTC_center",\
T2."SACS_ID", \
CASE WHEN T2."Received_Month" = 1 THEN '"'January'"' \
WHEN T2."Received_Month" = 2 THEN '"'February'"' \
WHEN T2."Received_Month" = 3 THEN '"'March'"' \
WHEN T2."Received_Month" = 4 THEN '"'April'"' \
WHEN T2."Received_Month" = 5 THEN '"'May'"' \
WHEN T2."Received_Month" = 6 THEN '"'June'"' \
WHEN T2."Received_Month" = 7 THEN '"'July'"' \
WHEN T2."Received_Month" = 8 THEN '"'August'"' \
WHEN T2."Received_Month" = 9 THEN '"'September'"' \
WHEN T2."Received_Month" = 10 THEN '"'October'"' \
WHEN T2."Received_Month" = 11 THEN '"'November'"' \
WHEN T2."Received_Month" = 12 THEN '"'December'"' \
END as "Received_Month",\
T2."Received_Year",\
SUM(T2."Number_of_individuals_received_pre-test_counseling/information")"Number_of_individuals_received_pre-test_counseling/information",\
SUM(T2."Number_of_individuals_receiving_post-test_counseling_and_given_results")"Number_of_individuals_receiving_post-test_counseling_and_given_results",\
SUM(T2."Number_of_individuals_with_High_Risk_Behavior_received_follow-up_counseling")"Number_of_individuals_with_High_Risk_Behavior_received_follow-up_counseling",\
SUM(T3."Number_of_individuals_tested_for_HIV")"Number_of_individuals_tested_for_HIV",\
SUM(T4."Number_of_individuals_received_result_within_7_days_of_HIV_Test")"Number_of_individuals_received_result_within_7_days_of_HIV_Test",\
SUM(T5."Number_of_HIV_positive_individuals_having_HIV-I_infection")"Number_of_HIV_positive_individuals_having_HIV-I_infection",\
SUM(T5."Number_of_HIV_positive_individuals_having_HIV-II_infection")"Number_of_HIV_positive_individuals_having_HIV-II_infection",\
SUM(T5."Number_of_HIV_positive_individuals_having_both_HIV-I_&_II_infections")"Number_of_HIV_positive_individuals_having_both_HIV-I_&_II_infections",\
SUM(T6."Number_of_individuals_tested_for_HIV_and_found_Negative")"Number_of_individuals_tested_for_HIV_and_found_Negative",\
SUM(T7."Number_of_Self-initiated_Individuals_tested_for_HIV")"Number_of_Self-initiated_Individuals_tested_for_HIV",\
SUM(T8."Number_of_Self-initiated_individuals_diagnosed_HIV_positive")"Number_of_Self-initiated_individuals_diagnosed_HIV_positive",\
SUM(T9."Number_of_provider_initiated_Individuals_tested_for_HIV")"Number_of_provider_initiated_Individuals_tested_for_HIV",\
SUM(T10."Number_of_provider_initiated_individuals_diagnosed_HIV_positive")"Number_of_provider_initiated_individuals_diagnosed_HIV_positive",\
SUM(T11."Total_number_of_individuals_turned_Indeterminate_for_HIV_at_SA_ICTC")"Total_number_of_individuals_turned_Indeterminate_for_HIV_at_SA_ICTC"\
from(\
select \
f.ID, \
f_sacs.name as "SACS",\
f.name as "ICTC_center",\
f_sacs.id as "SACS_ID",\
case When iv.BENEFICIARY_STATUS=1 Then \
(cast(count(iben.BENEFICIARY_ID)as numeric)) Else 0 End as "Number_of_individuals_received_pre-test_counseling/information",\
case When iv.BENEFICIARY_STATUS=4 Then \
(cast(count(iben.BENEFICIARY_ID)as numeric)) Else 0 End as "Number_of_individuals_receiving_post-test_counseling_and_given_results",\
case When iv.BENEFICIARY_STATUS=5 Then \
(cast(count(iben.BENEFICIARY_ID)as numeric)) Else 0 End as "Number_of_individuals_with_High_Risk_Behavior_received_follow-up_counseling",\
\
extract(month from iben.registration_date) as "Received_Month",\
extract(year from iben.registration_date) as "Received_Year"\
FROM ICTC_BENEFICIARY as iben \
JOIN BENEFICIARY as b on (iben.BENEFICIARY_ID = b.ID)\
JOIN ICTC_SAMPLE_COLLECTION as isc on (iben.ID = isc.ICTC_BENEFICIARY_ID)\
JOIN FACILITY as f on (iben.FACILITY_ID = f.ID)\
JOIN FACILITY as f_sacs on (f_sacs.id=f.sacs_id)\
JOIN ICTC_VISIT as iv on (isc.VISIT_ID = iv.ID)\
JOIN FACILITY_TYPE as ft on (f.FACILITY_TYPE_ID = ft.ID)\
JOIN ICTC_TEST_RESULT as itr on (iv.ID = itr.VISIT_ID)\
where f.facility_type_id in (11,13) and f_sacs.facility_type_id in (2) and \
iv.BENEFICIARY_STATUS in (1,4,5)\
and iv.IS_PREGNANT = '"'true'"'\
and b.gender in ('"'female'"')\
and iben.is_active = '"'true'"'\
and b.is_active = '"'true'"' \
and isc.is_active = '"'true'"' \
and f.is_active = '"'true'"' \
and f_sacs.is_active = '"'true'"' \
and iv.is_active = '"'true'"' \
and itr.is_active = '"'true'"' \
and ft.is_active = '"'true'"' \
group by\
f.id, b.gender,f_sacs.name,f_sacs.id,\
f.name,iv.BENEFICIARY_STATUS,\
extract(month from iben.registration_date),\
extract(year from iben.registration_date))T2\
\
full outer join(\
select \
f.ID, \
f_sacs.name as "SACS",\
f.name as "ICTC_center",\
f_sacs.id as "SACS_ID",\
cast(count(iben.BENEFICIARY_ID)as numeric) as "Number_of_individuals_tested_for_HIV",\
extract(month from iben.registration_date) as "Received_Month",\
extract(year from iben.registration_date) as "Received_Year"\
FROM ICTC_BENEFICIARY as iben \
JOIN BENEFICIARY as b on (iben.BENEFICIARY_ID = b.ID)\
JOIN ICTC_SAMPLE_COLLECTION as isc on (iben.ID = isc.ICTC_BENEFICIARY_ID)\
JOIN FACILITY as f on (iben.FACILITY_ID = f.ID)\
JOIN FACILITY as f_sacs on (f_sacs.id=f.sacs_id)\
JOIN ICTC_VISIT as iv on (isc.VISIT_ID = iv.ID)\
JOIN FACILITY_TYPE as ft on (f.FACILITY_TYPE_ID = ft.ID)\
JOIN ICTC_TEST_RESULT as itr on (iv.ID = itr.VISIT_ID)\
where f.facility_type_id in (11,13) and f_sacs.facility_type_id in (2) and itr.tested_date is not null\
and iv.IS_PREGNANT = '"'true'"'\
and b.gender in ('"'female'"')\
and iben.is_active = '"'true'"'\
and b.is_active = '"'true'"' \
and isc.is_active = '"'true'"'\
and f.is_active = '"'true'"'\
and f_sacs.is_active = '"'true'"'\
and iv.is_active = '"'true'"' \
and itr.is_active = '"'true'"'\
and ft.is_active = '"'true'"'\
group by\
f.id,\
b.gender,f_sacs.name,f_sacs.id,\
f.name,iv.BENEFICIARY_STATUS,\
extract(month from iben.registration_date),\
extract(year from iben.registration_date))T3 on (T2.ID=T3.ID and T2."SACS_ID"=T3."SACS_ID" and T2."Received_Month"=T3."Received_Month" and T2."Received_Year"=T3."Received_Year")\
full outer join \
(select \
f.ID, \
f_sacs.name as "SACS",\
f.name as "ICTC_center",\
f_sacs.id as "SACS_ID",\
cast(count(iben.BENEFICIARY_ID)as numeric)as "Number_of_individuals_received_result_within_7_days_of_HIV_Test",\
extract(month from iben.registration_date) as "Received_Month",\
extract(year from iben.registration_date) as "Received_Year"\
FROM ICTC_BENEFICIARY as iben \
JOIN BENEFICIARY as b on (iben.BENEFICIARY_ID = b.ID)\
JOIN ICTC_SAMPLE_COLLECTION as isc on (iben.ID = isc.ICTC_BENEFICIARY_ID)\
JOIN FACILITY as f on (iben.FACILITY_ID = f.ID)\
JOIN FACILITY as f_sacs on (f_sacs.id=f.sacs_id)\
JOIN ICTC_VISIT as iv on (isc.VISIT_ID = iv.ID)\
JOIN FACILITY_TYPE as ft on (f.FACILITY_TYPE_ID = ft.ID)\
JOIN ICTC_TEST_RESULT as itr on (iv.ID = itr.VISIT_ID)\
where f.facility_type_id in (11,13) and f_sacs.facility_type_id in (2) and itr.tested_date is not null\
and iv.IS_PREGNANT = '"'true'"'\
and b.gender in ('"'female'"')\
and cast((cast(isc.sample_collection_date AS DATE) - cast(itr.report_received_date AS DATE))day as numeric) <=7\
and iben.is_active = '"'true'"'\
and b.is_active = '"'true'"' \
and isc.is_active = '"'true'"'\
and f.is_active = '"'true'"'\
and f_sacs.is_active = '"'true'"'\
and iv.is_active = '"'true'"' \
and itr.is_active = '"'true'"'\
and ft.is_active = '"'true'"'\
group by\
f.id,b.gender,\
f.name,iv.BENEFICIARY_STATUS,f_sacs.name,f_sacs.id,\
extract(month from iben.registration_date),\
extract(year from iben.registration_date),isc.sample_collection_date,\
itr.report_received_date\
)T4 on (T2.ID=T4.ID and T2."SACS_ID"=T4."SACS_ID" and T2."Received_Month"=T4."Received_Month" and T2."Received_Year"=T4."Received_Year")\
\
full outer join (\
select \
f.ID, \
f_sacs.name as "SACS",\
f.name as "ICTC_center",\
f_sacs.id as "SACS_ID",\
\
Case When itr.hiv_type=1 then (cast(count(iben.BENEFICIARY_ID)as numeric))Else 0 End as "Number_of_HIV_positive_individuals_having_HIV-I_infection",\
Case When itr.hiv_type=2 then (cast(count(iben.BENEFICIARY_ID)as numeric))Else 0 End as "Number_of_HIV_positive_individuals_having_HIV-II_infection",\
Case When itr.hiv_type=3 then (cast(count(iben.BENEFICIARY_ID)as numeric))Else 0 End as "Number_of_HIV_positive_individuals_having_both_HIV-I_&_II_infections",\
\
extract(month from iben.registration_date) as "Received_Month",\
extract(year from iben.registration_date) as "Received_Year"\
FROM ICTC_BENEFICIARY as iben \
JOIN BENEFICIARY as b on (iben.BENEFICIARY_ID = b.ID)\
JOIN ICTC_SAMPLE_COLLECTION as isc on (iben.ID = isc.ICTC_BENEFICIARY_ID)\
JOIN FACILITY as f on (iben.FACILITY_ID = f.ID)\
JOIN FACILITY as f_sacs on (f_sacs.id=f.sacs_id)\
JOIN ICTC_VISIT as iv on (isc.VISIT_ID = iv.ID)\
JOIN FACILITY_TYPE as ft on (f.FACILITY_TYPE_ID = ft.ID)\
JOIN ICTC_TEST_RESULT as itr on (iv.ID = itr.VISIT_ID)\
where f.facility_type_id in (11,13) and f_sacs.facility_type_id in (2) and itr.hiv_type in (1,2,3)\
and iv.IS_PREGNANT = '"'true'"'\
and b.gender in ('"'female'"')\
and iben.is_active = '"'true'"'\
and b.is_active = '"'true'"' \
and isc.is_active = '"'true'"'\
and f.is_active = '"'true'"'\
and f_sacs.is_active = '"'true'"'\
and iv.is_active = '"'true'"' \
and itr.is_active = '"'true'"'\
and ft.is_active = '"'true'"'\
group by \
f.id,b.gender,f_sacs.name,f_sacs.id,\
f.name,iv.BENEFICIARY_STATUS,\
extract(month from iben.registration_date),\
extract(year from iben.registration_date),itr.hiv_type)T5 on (T2.ID=T5.ID and T2."SACS_ID"=T5."SACS_ID" and T2."Received_Month"=T5."Received_Month" and T2."Received_Year"=T5."Received_Year")\
\
full outer join \
(select \
f.ID, \
f_sacs.name as "SACS",\
f.name as "ICTC_center",\
f_sacs.id as "SACS_ID",\
Case When itr.hiv_status=1 then (cast(count(iben.BENEFICIARY_ID)as numeric))Else 0 End as "Number_of_individuals_tested_for_HIV_and_found_Negative",\
extract(month from iben.registration_date) as "Received_Month",\
extract(year from iben.registration_date) as "Received_Year"\
FROM ICTC_BENEFICIARY as iben \
JOIN BENEFICIARY as b on (iben.BENEFICIARY_ID = b.ID)\
JOIN ICTC_SAMPLE_COLLECTION as isc on (iben.ID = isc.ICTC_BENEFICIARY_ID)\
JOIN FACILITY as f on (iben.FACILITY_ID = f.ID)\
JOIN FACILITY as f_sacs on (f_sacs.id=f.sacs_id)\
JOIN ICTC_VISIT as iv on (isc.VISIT_ID = iv.ID)\
JOIN FACILITY_TYPE as ft on (f.FACILITY_TYPE_ID = ft.ID)\
JOIN ICTC_TEST_RESULT as itr on (iv.ID = itr.VISIT_ID)\
where f.facility_type_id in (11,13) and f_sacs.facility_type_id in (2) and itr.hiv_status in (1)\
and iv.IS_PREGNANT = '"'true'"'\
and b.gender in ('"'female'"')\
and iben.is_active = '"'true'"'\
and b.is_active = '"'true'"' \
and isc.is_active = '"'true'"'\
and f.is_active = '"'true'"'\
and f_sacs.is_active = '"'true'"'\
and iv.is_active = '"'true'"' \
and itr.is_active = '"'true'"'\
and ft.is_active = '"'true'"'\
group by\
f.id,itr.hiv_status,b.gender,\
f.name,iv.BENEFICIARY_STATUS,f_sacs.name,f_sacs.id,\
extract(month from iben.registration_date),\
extract(year from iben.registration_date),itr.hiv_type)T6 on (T2.ID=T6.ID and T2."SACS_ID"=T6."SACS_ID" and T2."Received_Month"=T6."Received_Month" and T2."Received_Year"=T6."Received_Year")\
\
full outer join (\
select \
f.ID, \
f_sacs.name as "SACS",\
f.name as "ICTC_center",\
f_sacs.id as "SACS_ID",\
cast(count(iben.BENEFICIARY_ID)as numeric) as "Number_of_Self-initiated_Individuals_tested_for_HIV",\
extract(month from iben.registration_date) as "Received_Month",\
extract(year from iben.registration_date) as "Received_Year"\
FROM ICTC_BENEFICIARY as iben \
JOIN BENEFICIARY as b on (iben.BENEFICIARY_ID = b.ID)\
JOIN ICTC_SAMPLE_COLLECTION as isc on (iben.ID = isc.ICTC_BENEFICIARY_ID)\
JOIN FACILITY as f on (iben.FACILITY_ID = f.ID)\
JOIN FACILITY as f_sacs on (f_sacs.id=f.sacs_id)\
JOIN ICTC_VISIT as iv on (isc.VISIT_ID = iv.ID)\
JOIN FACILITY_TYPE as ft on (f.FACILITY_TYPE_ID = ft.ID)\
JOIN ICTC_TEST_RESULT as itr on (iv.ID = itr.VISIT_ID)\
where f.facility_type_id in (11,13) and f_sacs.facility_type_id in (2) and iben.referred_by is null\
and iv.IS_PREGNANT = '"'true'"'\
and b.gender in ('"'female'"')\
and iben.is_active = '"'true'"'\
and b.is_active = '"'true'"' \
and isc.is_active = '"'true'"'\
and f.is_active = '"'true'"'\
and f_sacs.is_active = '"'true'"'\
and iv.is_active = '"'true'"' \
and itr.is_active = '"'true'"'\
and ft.is_active = '"'true'"' \
group by\
f.id,b.gender,\
f.name,iv.BENEFICIARY_STATUS,f_sacs.name,f_sacs.id,\
extract(month from iben.registration_date),\
extract(year from iben.registration_date),itr.hiv_type)T7 on (T2.ID=T7.ID and T2."SACS_ID"=T7."SACS_ID" and T2."Received_Month"=T7."Received_Month" and T2."Received_Year"=T7."Received_Year")\
\
full outer join (select \
f.ID, \
f_sacs.name as "SACS",\
f.name as "ICTC_center",\
f_sacs.id as "SACS_ID",\
cast(count(iben.BENEFICIARY_ID)as numeric) as "Number_of_Self-initiated_individuals_diagnosed_HIV_positive",\
extract(month from iben.registration_date) as "Received_Month",\
extract(year from iben.registration_date) as "Received_Year"\
FROM ICTC_BENEFICIARY as iben \
JOIN BENEFICIARY as b on (iben.BENEFICIARY_ID = b.ID)\
JOIN ICTC_SAMPLE_COLLECTION as isc on (iben.ID = isc.ICTC_BENEFICIARY_ID)\
JOIN FACILITY as f on (iben.FACILITY_ID = f.ID)\
JOIN FACILITY as f_sacs on (f_sacs.id=f.sacs_id)\
JOIN ICTC_VISIT as iv on (isc.VISIT_ID = iv.ID)\
JOIN FACILITY_TYPE as ft on (f.FACILITY_TYPE_ID = ft.ID)\
JOIN ICTC_TEST_RESULT as itr on (iv.ID = itr.VISIT_ID)\
where f.facility_type_id in (11,13) and f_sacs.facility_type_id in (2) and iben.referred_by is null and itr.hiv_status in (1)\
and iv.IS_PREGNANT = '"'true'"'\
and b.gender in ('"'female'"')\
and iben.is_active = '"'true'"'\
and b.is_active = '"'true'"' \
and isc.is_active = '"'true'"'\
and f.is_active = '"'true'"'\
and f_sacs.is_active = '"'true'"'\
and iv.is_active = '"'true'"' \
and itr.is_active = '"'true'"'\
and ft.is_active = '"'true'"'\
group by\
f.id,b.gender,\
f.name,iv.BENEFICIARY_STATUS,f_sacs.name,f_sacs.id,\
extract(month from iben.registration_date),\
extract(year from iben.registration_date),itr.hiv_type)T8 on (T2.ID=T8.ID and T2."SACS_ID"=T8."SACS_ID" and T2."Received_Month"=T8."Received_Month" and T2."Received_Year"=T8."Received_Year")\
\
full outer join \
(select \
f.ID, \
f_sacs.name as "SACS",\
f.name as "ICTC_center",\
f_sacs.id as "SACS_ID",\
cast(count(iben.BENEFICIARY_ID)as numeric) as "Number_of_provider_initiated_Individuals_tested_for_HIV",\
extract(month from iben.registration_date) as "Received_Month",\
extract(year from iben.registration_date) as "Received_Year"\
FROM ICTC_BENEFICIARY as iben \
JOIN BENEFICIARY as b on (iben.BENEFICIARY_ID = b.ID)\
JOIN ICTC_SAMPLE_COLLECTION as isc on (iben.ID = isc.ICTC_BENEFICIARY_ID)\
JOIN FACILITY as f on (iben.FACILITY_ID = f.ID)\
JOIN FACILITY as f_sacs on (f_sacs.id=f.sacs_id)\
JOIN ICTC_VISIT as iv on (isc.VISIT_ID = iv.ID)\
JOIN FACILITY_TYPE as ft on (f.FACILITY_TYPE_ID = ft.ID)\
JOIN ICTC_TEST_RESULT as itr on (iv.ID = itr.VISIT_ID)\
where f.facility_type_id in (11,13) and f_sacs.facility_type_id in (2) and iben.referred_by is not null\
and iv.IS_PREGNANT = '"'true'"'\
and b.gender in ('"'female'"')\
and iben.is_active = '"'true'"'\
and b.is_active = '"'true'"' \
and isc.is_active = '"'true'"'\
and f.is_active = '"'true'"'\
and f_sacs.is_active = '"'true'"'\
and iv.is_active = '"'true'"' \
and itr.is_active = '"'true'"'\
and ft.is_active = '"'true'"'\
group by\
f.id,b.gender,\
f.name,iv.BENEFICIARY_STATUS,f_sacs.name,f_sacs.id,\
extract(month from iben.registration_date),\
extract(year from iben.registration_date),itr.hiv_type)T9 on (T2.ID=T9.ID and T2."SACS_ID"=T9."SACS_ID" and T2."Received_Month"=T9."Received_Month" and T2."Received_Year"=T9."Received_Year")\
full outer join \
(select \
f.ID, \
f_sacs.name as "SACS",\
f.name as "ICTC_center",\
f_sacs.id as "SACS_ID",\
cast(count(iben.BENEFICIARY_ID)as numeric) as "Number_of_provider_initiated_individuals_diagnosed_HIV_positive",\
extract(month from iben.registration_date) as "Received_Month",\
extract(year from iben.registration_date) as "Received_Year"\
FROM ICTC_BENEFICIARY as iben \
JOIN BENEFICIARY as b on (iben.BENEFICIARY_ID = b.ID)\
JOIN ICTC_SAMPLE_COLLECTION as isc on (iben.ID = isc.ICTC_BENEFICIARY_ID)\
JOIN FACILITY as f on (iben.FACILITY_ID = f.ID)\
JOIN FACILITY as f_sacs on (f_sacs.id=f.sacs_id)\
JOIN ICTC_VISIT as iv on (isc.VISIT_ID = iv.ID)\
JOIN FACILITY_TYPE as ft on (f.FACILITY_TYPE_ID = ft.ID)\
JOIN ICTC_TEST_RESULT as itr on (iv.ID = itr.VISIT_ID)\
where f.facility_type_id in (11,13) and f_sacs.facility_type_id in (2) and iben.referred_by is not null and itr.hiv_status in (1)\
and iv.IS_PREGNANT = '"'true'"'\
and b.gender in ('"'female'"')\
and iben.is_active = '"'true'"'\
and b.is_active = '"'true'"' \
and isc.is_active = '"'true'"'\
and f.is_active = '"'true'"'\
and f_sacs.is_active = '"'true'"'\
and iv.is_active = '"'true'"' \
and itr.is_active = '"'true'"'\
and ft.is_active = '"'true'"'\
group by\
f.id,b.gender,\
f.name,iv.BENEFICIARY_STATUS,f_sacs.name,f_sacs.id,\
extract(month from iben.registration_date),\
extract(year from iben.registration_date),itr.hiv_type)T10 on (T2.ID=T10.ID and T2."SACS_ID"=T10."SACS_ID" and T2."Received_Month"=T10."Received_Month" and T2."Received_Year"=T10."Received_Year")\
full outer join \
(select \
f.ID, \
f_sacs.name as "SACS",\
f.name as "ICTC_center",\
f_sacs.id as "SACS_ID",\
cast(count(iben.BENEFICIARY_ID)as numeric) as "Total_number_of_individuals_turned_Indeterminate_for_HIV_at_SA_ICTC",\
extract(month from iben.registration_date) as "Received_Month",\
extract(year from iben.registration_date) as "Received_Year"\
FROM ICTC_BENEFICIARY as iben \
JOIN BENEFICIARY as b on (iben.BENEFICIARY_ID = b.ID)\
JOIN ICTC_SAMPLE_COLLECTION as isc on (iben.ID = isc.ICTC_BENEFICIARY_ID)\
JOIN FACILITY as f on (iben.FACILITY_ID = f.ID)\
JOIN FACILITY as f_sacs on (f_sacs.id=f.sacs_id)\
JOIN ICTC_VISIT as iv on (isc.VISIT_ID = iv.ID)\
JOIN FACILITY_TYPE as ft on (f.FACILITY_TYPE_ID = ft.ID)\
JOIN ICTC_TEST_RESULT as itr on (iv.ID = itr.VISIT_ID)\
where f.facility_type_id in (10,11,13) and f_sacs.facility_type_id in (2) and itr.hiv_status in (3) \
and iv.IS_PREGNANT = '"'true'"'\
and b.gender in ('"'female'"')\
and iben.is_active = '"'true'"'\
and b.is_active = '"'true'"' \
and isc.is_active = '"'true'"'\
and f.is_active = '"'true'"'\
and f_sacs.is_active = '"'true'"'\
and iv.is_active = '"'true'"' \
and itr.is_active = '"'true'"'\
and ft.is_active = '"'true'"'\
group by\
f.id,b.gender,\
f.name,iv.BENEFICIARY_STATUS,f_sacs.name,f_sacs.id,\
extract(month from iben.registration_date),\
extract(year from iben.registration_date),itr.hiv_type)T11 \
on (T2.ID=T11.ID and T2."SACS_ID"=T11."SACS_ID" and T2."Received_Month"=T11."Received_Month" and T2."Received_Year"=T11."Received_Year")\
group by \
T2.ID,\
T2."SACS",\
T2."ICTC_center",\
T2."SACS_ID",\
T2."Received_Month",\
T2."Received_Year"\
\
)table2\
group by\
table2."SACS_ID" ,\
table2."SACS",\
table2."Received_Month", \
table2."Received_Year" \
)table3\
group by \
\
table3."Received_Month", \
table3."Received_Year"'
#Execute query
xl_df = pd.read_sql(sql, conn.connect())
return xl_df
def create_report():
#Get dataframe
df = fetch_data()
# Start by opening the spreadsheet and selecting the main sheet
workbook = load_workbook(filename='templates\\ictc_report_section_i_template.xlsx')
sheet = workbook.active
#Check if DF is empty
if df.empty:
print('DataFrame is empty!')
else:# Write what you want into a specific cell
print(df)
sheet["H9"] = df['Number_of_individuals_received_pre-test_counseling/information']
sheet["H10"] = df['Number_of_individuals_tested_for_HIV']
sheet["H11"] = df['Number_of_individuals_receiving_post-test_counseling_and_given_results']
sheet["H12"] = df['Number_of_individuals_received_result_within_7_days_of_HIV_Test']
sheet["H13"] = 0 # once mapping available, update it.
sheet["H14"] = df['Number_of_HIV_positive_individuals_having_HIV-I_infection']
sheet["H15"] = df['Number_of_HIV_positive_individuals_having_HIV-II_infection']
sheet["H16"] = df['Number_of_HIV_positive_individuals_having_both_HIV-I_&_II_infections']
sheet["H17"] = df['Number_of_individuals_tested_for_HIV_and_found_Negative']
sheet["H18"] = df['Number_of_individuals_with_High_Risk_Behavior_received_follow-up_counseling']
sheet["H19"] = df['Number_of_Self-initiated_Individuals_tested_for_HIV']
sheet["H20"] = df['Number_of_Self-initiated_individuals_diagnosed_HIV_positive']
sheet["H21"] = df['Number_of_provider_initiated_Individuals_tested_for_HIV']
sheet["H22"] = df['Number_of_provider_initiated_individuals_diagnosed_HIV_positive']
sheet["H123"] = df['Total_number_of_individuals_turned_Indeterminate_for_HIV_at_SA_ICTC']
# Save the spreadsheet
now = datetime.datetime.now()
pref = now.strftime('%Y_%b_')
workbook.save(filename='reports\\ictc_report_' + pref + '_section_i national pregnant.xlsx')
print ('*** Excel report created.')
# Test the Function
if __name__=="__main__":
create_report()
| 63.154472
| 209
| 0.628379
| 4,211
| 31,072
| 4.289717
| 0.050819
| 0.049601
| 0.053144
| 0.058127
| 0.914914
| 0.899192
| 0.863651
| 0.85961
| 0.841287
| 0.826838
| 0
| 0.013251
| 0.26165
| 31,072
| 491
| 210
| 63.283096
| 0.774126
| 0.007177
| 0
| 0.664583
| 0
| 0.054167
| 0.284655
| 0.22342
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004167
| false
| 0
| 0.010417
| 0
| 0.016667
| 0.00625
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
45fdf713e879afa97bc2678c0e2df6a54ce1b9fb
| 71
|
py
|
Python
|
pyssian/tests/__init__.py
|
maserasgroup-repo/pyssian
|
b80b216e24391db807a09518a237f94fa894a648
|
[
"MIT"
] | 15
|
2021-07-07T16:25:26.000Z
|
2022-03-19T11:58:23.000Z
|
pyssian/tests/__init__.py
|
maserasgroup-repo/pyssian
|
b80b216e24391db807a09518a237f94fa894a648
|
[
"MIT"
] | 9
|
2021-07-28T22:09:07.000Z
|
2022-01-17T08:03:15.000Z
|
pyssian/tests/__init__.py
|
maserasgroup-repo/pyssian
|
b80b216e24391db807a09518a237f94fa894a648
|
[
"MIT"
] | null | null | null |
from .test_chemistryutils import *
from .test_linkjobparsers import *
| 23.666667
| 35
| 0.816901
| 8
| 71
| 7
| 0.625
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126761
| 71
| 2
| 36
| 35.5
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
341102a2a0f534d0559abd38574d973c63b2f2cf
| 32
|
py
|
Python
|
src/task/__init__.py
|
qihongl/dlstm-demo
|
1f2edc02708d1226224c76396548b40caafef76e
|
[
"MIT"
] | 30
|
2019-06-14T11:22:42.000Z
|
2022-03-06T11:11:22.000Z
|
src/task/__init__.py
|
qihongl/dlstm-demo
|
1f2edc02708d1226224c76396548b40caafef76e
|
[
"MIT"
] | 4
|
2019-04-23T22:08:25.000Z
|
2021-11-18T16:41:59.000Z
|
src/task/__init__.py
|
qihongl/dlstm-demo
|
1f2edc02708d1226224c76396548b40caafef76e
|
[
"MIT"
] | 8
|
2019-10-23T20:44:31.000Z
|
2022-03-23T15:36:07.000Z
|
from .ContextualChoice import *
| 16
| 31
| 0.8125
| 3
| 32
| 8.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 32
| 1
| 32
| 32
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
342609da2a0d7d678b017b44a76a2d810ab489bc
| 4,850
|
py
|
Python
|
tests/test_op_naming_convention.py
|
limodou/uliweb-alembic
|
177ff67441a0f07d464df196caa43f894a0bd6aa
|
[
"MIT"
] | null | null | null |
tests/test_op_naming_convention.py
|
limodou/uliweb-alembic
|
177ff67441a0f07d464df196caa43f894a0bd6aa
|
[
"MIT"
] | null | null | null |
tests/test_op_naming_convention.py
|
limodou/uliweb-alembic
|
177ff67441a0f07d464df196caa43f894a0bd6aa
|
[
"MIT"
] | null | null | null |
from sqlalchemy import Integer, Column, ForeignKey, \
Table, String, Boolean, MetaData, CheckConstraint
from sqlalchemy.sql import column, func, text
from sqlalchemy import event
from alembic import op
from . import op_fixture, assert_raises_message, requires_094
@requires_094
def test_add_check_constraint():
context = op_fixture(naming_convention={
"ck": "ck_%(table_name)s_%(constraint_name)s"
})
op.create_check_constraint(
"foo",
"user_table",
func.len(column('name')) > 5
)
context.assert_(
"ALTER TABLE user_table ADD CONSTRAINT ck_user_table_foo "
"CHECK (len(name) > 5)"
)
@requires_094
def test_add_check_constraint_name_is_none():
context = op_fixture(naming_convention={
"ck": "ck_%(table_name)s_foo"
})
op.create_check_constraint(
None,
"user_table",
func.len(column('name')) > 5
)
context.assert_(
"ALTER TABLE user_table ADD CONSTRAINT ck_user_table_foo "
"CHECK (len(name) > 5)"
)
@requires_094
def test_add_unique_constraint_name_is_none():
context = op_fixture(naming_convention={
"uq": "uq_%(table_name)s_foo"
})
op.create_unique_constraint(
None,
"user_table",
'x'
)
context.assert_(
"ALTER TABLE user_table ADD CONSTRAINT uq_user_table_foo UNIQUE (x)"
)
@requires_094
def test_add_index_name_is_none():
context = op_fixture(naming_convention={
"ix": "ix_%(table_name)s_foo"
})
op.create_index(
None,
"user_table",
'x'
)
context.assert_(
"CREATE INDEX ix_user_table_foo ON user_table (x)"
)
@requires_094
def test_add_check_constraint_already_named_from_schema():
m1 = MetaData(
naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"})
ck = CheckConstraint("im a constraint", name="cc1")
Table('t', m1, Column('x'), ck)
context = op_fixture(
naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"})
op.create_table(
"some_table",
Column('x', Integer, ck),
)
context.assert_(
"CREATE TABLE some_table "
"(x INTEGER CONSTRAINT ck_t_cc1 CHECK (im a constraint))"
)
@requires_094
def test_add_check_constraint_inline_on_table():
context = op_fixture(
naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"})
op.create_table(
"some_table",
Column('x', Integer),
CheckConstraint("im a constraint", name="cc1")
)
context.assert_(
"CREATE TABLE some_table "
"(x INTEGER, CONSTRAINT ck_some_table_cc1 CHECK (im a constraint))"
)
@requires_094
def test_add_check_constraint_inline_on_table_w_f():
context = op_fixture(
naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"})
op.create_table(
"some_table",
Column('x', Integer),
CheckConstraint("im a constraint", name=op.f("ck_some_table_cc1"))
)
context.assert_(
"CREATE TABLE some_table "
"(x INTEGER, CONSTRAINT ck_some_table_cc1 CHECK (im a constraint))"
)
@requires_094
def test_add_check_constraint_inline_on_column():
context = op_fixture(
naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"})
op.create_table(
"some_table",
Column('x', Integer, CheckConstraint("im a constraint", name="cc1"))
)
context.assert_(
"CREATE TABLE some_table "
"(x INTEGER CONSTRAINT ck_some_table_cc1 CHECK (im a constraint))"
)
@requires_094
def test_add_check_constraint_inline_on_column_w_f():
context = op_fixture(
naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"})
op.create_table(
"some_table",
Column(
'x', Integer,
CheckConstraint("im a constraint", name=op.f("ck_q_cc1")))
)
context.assert_(
"CREATE TABLE some_table "
"(x INTEGER CONSTRAINT ck_q_cc1 CHECK (im a constraint))"
)
@requires_094
def test_add_column_schema_type():
context = op_fixture(naming_convention={
"ck": "ck_%(table_name)s_%(constraint_name)s"
})
op.add_column('t1', Column('c1', Boolean(name='foo'), nullable=False))
context.assert_(
'ALTER TABLE t1 ADD COLUMN c1 BOOLEAN NOT NULL',
'ALTER TABLE t1 ADD CONSTRAINT ck_t1_foo CHECK (c1 IN (0, 1))'
)
@requires_094
def test_add_column_schema_type_w_f():
context = op_fixture(naming_convention={
"ck": "ck_%(table_name)s_%(constraint_name)s"
})
op.add_column(
't1', Column('c1', Boolean(name=op.f('foo')), nullable=False))
context.assert_(
'ALTER TABLE t1 ADD COLUMN c1 BOOLEAN NOT NULL',
'ALTER TABLE t1 ADD CONSTRAINT foo CHECK (c1 IN (0, 1))'
)
| 27.714286
| 76
| 0.646598
| 630
| 4,850
| 4.62381
| 0.112698
| 0.036045
| 0.041195
| 0.067971
| 0.856162
| 0.856162
| 0.79437
| 0.767937
| 0.719876
| 0.69516
| 0
| 0.018558
| 0.233402
| 4,850
| 174
| 77
| 27.873563
| 0.764927
| 0
| 0
| 0.577181
| 0
| 0
| 0.319381
| 0.081649
| 0
| 0
| 0
| 0
| 0.080537
| 1
| 0.073826
| false
| 0
| 0.033557
| 0
| 0.107383
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
343f4c316df3eb94e2a093e054414bbe03136825
| 215
|
py
|
Python
|
learn-to-code-with-python/06-Strings-The-Basics/length-concatenation-and-immutability.py
|
MaciejZurek/python_practicing
|
0a426f2aed151573e1f8678e0239ff596d92bbde
|
[
"MIT"
] | null | null | null |
learn-to-code-with-python/06-Strings-The-Basics/length-concatenation-and-immutability.py
|
MaciejZurek/python_practicing
|
0a426f2aed151573e1f8678e0239ff596d92bbde
|
[
"MIT"
] | null | null | null |
learn-to-code-with-python/06-Strings-The-Basics/length-concatenation-and-immutability.py
|
MaciejZurek/python_practicing
|
0a426f2aed151573e1f8678e0239ff596d92bbde
|
[
"MIT"
] | null | null | null |
print(len("Python"))
print(len("programming"))
print(len(" "))
print("Maciej" + "Zurek")
print("Maciej " + "Zurek")
print("Maciej" + " Zurek")
print("Maciej" + " " + "Zurek")
print("a" "b" "c")
print("---" * 10)
| 16.538462
| 31
| 0.55814
| 26
| 215
| 4.615385
| 0.384615
| 0.366667
| 0.533333
| 0.7
| 0.575
| 0.575
| 0.575
| 0.575
| 0.575
| 0.575
| 0
| 0.010811
| 0.139535
| 215
| 12
| 32
| 17.916667
| 0.637838
| 0
| 0
| 0.333333
| 0
| 0
| 0.330233
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
344305657381aa397d4c83394bfe167385f5900d
| 66
|
py
|
Python
|
test/utils.py
|
gadalang/gadalang-lang
|
16cc1ea403a31667632bb4055257bb7754f0d15d
|
[
"MIT"
] | null | null | null |
test/utils.py
|
gadalang/gadalang-lang
|
16cc1ea403a31667632bb4055257bb7754f0d15d
|
[
"MIT"
] | null | null | null |
test/utils.py
|
gadalang/gadalang-lang
|
16cc1ea403a31667632bb4055257bb7754f0d15d
|
[
"MIT"
] | null | null | null |
import unittest
class TestCaseBase(unittest.TestCase):
pass
| 11
| 38
| 0.772727
| 7
| 66
| 7.285714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 66
| 5
| 39
| 13.2
| 0.927273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
344976bb5cdd0e0335c4f00e9f675ace6eaf5192
| 117
|
py
|
Python
|
autolog/views.py
|
ReneBrals/RESS-AutoLog
|
344650797c00298c701bf8c7d086f4bd3fafc978
|
[
"MIT"
] | null | null | null |
autolog/views.py
|
ReneBrals/RESS-AutoLog
|
344650797c00298c701bf8c7d086f4bd3fafc978
|
[
"MIT"
] | null | null | null |
autolog/views.py
|
ReneBrals/RESS-AutoLog
|
344650797c00298c701bf8c7d086f4bd3fafc978
|
[
"MIT"
] | null | null | null |
from django.http import HttpResponseRedirect
def redir(request):
return HttpResponseRedirect('autologbackend/')
| 23.4
| 50
| 0.811966
| 11
| 117
| 8.636364
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 117
| 4
| 51
| 29.25
| 0.913462
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
caba3c87902297ff01448b03d46dc126efaf2182
| 23
|
py
|
Python
|
AcsServer/__init__.py
|
FurmanCenter/ACSDownloader
|
918afc0c7baa8814da98c2e3ee11352af68c027e
|
[
"Apache-2.0"
] | 1
|
2020-04-15T15:40:18.000Z
|
2020-04-15T15:40:18.000Z
|
AcsServer/__init__.py
|
FurmanCenter/ACSDownloader
|
918afc0c7baa8814da98c2e3ee11352af68c027e
|
[
"Apache-2.0"
] | null | null | null |
AcsServer/__init__.py
|
FurmanCenter/ACSDownloader
|
918afc0c7baa8814da98c2e3ee11352af68c027e
|
[
"Apache-2.0"
] | null | null | null |
from AcsServer import *
| 23
| 23
| 0.826087
| 3
| 23
| 6.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 23
| 1
| 23
| 23
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cabe0e106067c1e1d947b5b24c5fd3a032171d11
| 32
|
py
|
Python
|
bm25fast/__init__.py
|
timattwell/bm25-rust
|
4b73cacaf36cdbf1e9e5c78d01c8e34a82e74566
|
[
"MIT"
] | null | null | null |
bm25fast/__init__.py
|
timattwell/bm25-rust
|
4b73cacaf36cdbf1e9e5c78d01c8e34a82e74566
|
[
"MIT"
] | null | null | null |
bm25fast/__init__.py
|
timattwell/bm25-rust
|
4b73cacaf36cdbf1e9e5c78d01c8e34a82e74566
|
[
"MIT"
] | null | null | null |
from MyLib.bm25_rust import BM25
| 32
| 32
| 0.875
| 6
| 32
| 4.5
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 0.09375
| 32
| 1
| 32
| 32
| 0.793103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1b131e12dd79e015fe42981dafd55209124444b0
| 34
|
py
|
Python
|
nanoevent/__init__.py
|
berry-langerak/nanoevent
|
65ebd1bee82816b4067f3c33bb302b1ee5ac1931
|
[
"MIT"
] | null | null | null |
nanoevent/__init__.py
|
berry-langerak/nanoevent
|
65ebd1bee82816b4067f3c33bb302b1ee5ac1931
|
[
"MIT"
] | null | null | null |
nanoevent/__init__.py
|
berry-langerak/nanoevent
|
65ebd1bee82816b4067f3c33bb302b1ee5ac1931
|
[
"MIT"
] | null | null | null |
from .dispatcher import Dispatcher
| 34
| 34
| 0.882353
| 4
| 34
| 7.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 34
| 1
| 34
| 34
| 0.967742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1b3a75eaea1aa134df3f6ce8dd9ad7ed0cb05aa6
| 103
|
py
|
Python
|
experiments/__init__.py
|
srikarym/torchrl
|
fee98e78ac1657a2c9a4063dd8d63ba207a121e2
|
[
"Apache-2.0"
] | 3
|
2019-02-27T19:00:32.000Z
|
2020-07-19T03:18:28.000Z
|
experiments/__init__.py
|
srikarym/torchrl
|
fee98e78ac1657a2c9a4063dd8d63ba207a121e2
|
[
"Apache-2.0"
] | null | null | null |
experiments/__init__.py
|
srikarym/torchrl
|
fee98e78ac1657a2c9a4063dd8d63ba207a121e2
|
[
"Apache-2.0"
] | null | null | null |
from . import a2c
from . import ddpg
from . import dqn
from . import ppo
from . import prioritized_dqn
| 17.166667
| 29
| 0.757282
| 16
| 103
| 4.8125
| 0.4375
| 0.649351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012048
| 0.194175
| 103
| 5
| 30
| 20.6
| 0.915663
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
94021390ad6156157defede94424552ab99e054d
| 4,329
|
py
|
Python
|
tests/open_alchemy/models_file/model/test_source/test_typed_dict.py
|
MihailMiller/OpenAlchemy
|
55b751c58ca50706ebc46262f50addb7dec34278
|
[
"Apache-2.0"
] | 40
|
2019-11-05T06:50:35.000Z
|
2022-03-09T01:34:57.000Z
|
tests/open_alchemy/models_file/model/test_source/test_typed_dict.py
|
MihailMiller/OpenAlchemy
|
55b751c58ca50706ebc46262f50addb7dec34278
|
[
"Apache-2.0"
] | 178
|
2019-11-03T04:10:38.000Z
|
2022-03-31T00:07:17.000Z
|
tests/open_alchemy/models_file/model/test_source/test_typed_dict.py
|
MihailMiller/OpenAlchemy
|
55b751c58ca50706ebc46262f50addb7dec34278
|
[
"Apache-2.0"
] | 17
|
2019-11-04T07:22:46.000Z
|
2022-03-23T05:29:49.000Z
|
"""Tests for model."""
import pytest
from open_alchemy.models_file import types
from open_alchemy.models_file.model import source as model_source
_ColumnArtifacts = types.ColumnArtifacts
_TypedDictArtifacts = types.TypedDictArtifacts
_TypedDictClassArtifacts = types.TypedDictClassArtifacts
@pytest.mark.parametrize(
"artifacts, expected_source",
[
(
_TypedDictArtifacts(
required=_TypedDictClassArtifacts(
props=[_ColumnArtifacts(name="column_1", type="type_1")],
empty=False,
name="ModelRequiredDict",
parent_class="RequiredParentClass",
),
not_required=None, # type: ignore
),
'''
class ModelRequiredDict(RequiredParentClass, total=True):
"""TypedDict for properties that are required."""
column_1: type_1''',
),
(
_TypedDictArtifacts(
required=_TypedDictClassArtifacts(
props=[
_ColumnArtifacts(name="column_1", type="type_1"),
_ColumnArtifacts(name="column_2", type="type_2"),
],
empty=False,
name="ModelRequiredDict",
parent_class="RequiredParentClass",
),
not_required=None, # type: ignore
),
'''
class ModelRequiredDict(RequiredParentClass, total=True):
"""TypedDict for properties that are required."""
column_1: type_1
column_2: type_2''',
),
],
ids=["single property", "multiple properties"],
)
@pytest.mark.models_file
def test_typed_dict_required(artifacts, expected_source):
"""
GIVEN model artifacts
WHEN typed_dict_required is called with the artifacts
THEN the source code for the typed dict class is returned.
"""
source = model_source.typed_dict_required(artifacts=artifacts)
assert source == expected_source
@pytest.mark.parametrize(
"artifacts, expected_source",
[
(
_TypedDictArtifacts(
required=None, # type: ignore
not_required=_TypedDictClassArtifacts(
props=[],
empty=True,
name="ModelNotRequiredDict",
parent_class="NotRequiredParentClass",
),
),
'''
class ModelNotRequiredDict(NotRequiredParentClass, total=False):
"""TypedDict for properties that are not required."""''',
),
(
_TypedDictArtifacts(
required=None, # type: ignore
not_required=_TypedDictClassArtifacts(
props=[_ColumnArtifacts(name="column_1", type="type_1")],
empty=False,
name="ModelNotRequiredDict",
parent_class="NotRequiredParentClass",
),
),
'''
class ModelNotRequiredDict(NotRequiredParentClass, total=False):
"""TypedDict for properties that are not required."""
column_1: type_1''',
),
(
_TypedDictArtifacts(
required=None, # type: ignore
not_required=_TypedDictClassArtifacts(
props=[
_ColumnArtifacts(name="column_1", type="type_1"),
_ColumnArtifacts(name="column_2", type="type_2"),
],
empty=False,
name="ModelNotRequiredDict",
parent_class="NotRequiredParentClass",
),
),
'''
class ModelNotRequiredDict(NotRequiredParentClass, total=False):
"""TypedDict for properties that are not required."""
column_1: type_1
column_2: type_2''',
),
],
ids=["empty", "single property", "multiple properties"],
)
@pytest.mark.models_file
def test_typed_dict_not_required(artifacts, expected_source):
"""
GIVEN model artifacts
WHEN typed_dict_not_required is called with the artifacts
THEN the source code for the typed dict class is returned.
"""
source = model_source.typed_dict_not_required(artifacts=artifacts)
assert source == expected_source
| 31.369565
| 77
| 0.571957
| 356
| 4,329
| 6.716292
| 0.174157
| 0.050606
| 0.036805
| 0.046006
| 0.912171
| 0.888749
| 0.888749
| 0.845253
| 0.808448
| 0.775408
| 0
| 0.008339
| 0.335181
| 4,329
| 137
| 78
| 31.59854
| 0.822446
| 0.082236
| 0
| 0.695652
| 0
| 0
| 0.128838
| 0.020893
| 0
| 0
| 0
| 0
| 0.021739
| 1
| 0.021739
| false
| 0
| 0.032609
| 0
| 0.054348
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
942301af5d2bc9404b9018b84a0677d63eec961c
| 79
|
py
|
Python
|
PyFSM/pyfsm/exceptions/null_exception.py
|
wafec/wafec-py-fsm
|
444b7b797411daa3186cf812535a660404951d00
|
[
"MIT"
] | null | null | null |
PyFSM/pyfsm/exceptions/null_exception.py
|
wafec/wafec-py-fsm
|
444b7b797411daa3186cf812535a660404951d00
|
[
"MIT"
] | null | null | null |
PyFSM/pyfsm/exceptions/null_exception.py
|
wafec/wafec-py-fsm
|
444b7b797411daa3186cf812535a660404951d00
|
[
"MIT"
] | null | null | null |
from .base import ExceptionBase
class NullException(ExceptionBase):
pass
| 13.166667
| 35
| 0.78481
| 8
| 79
| 7.75
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164557
| 79
| 5
| 36
| 15.8
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
942d624c760d53cedec0c81607d9639db147ec0d
| 43
|
py
|
Python
|
api/__init__.py
|
satyap54/synced-player
|
7dda4150762393ed431a62d779a8e3b7051bc9ef
|
[
"MIT"
] | null | null | null |
api/__init__.py
|
satyap54/synced-player
|
7dda4150762393ed431a62d779a8e3b7051bc9ef
|
[
"MIT"
] | null | null | null |
api/__init__.py
|
satyap54/synced-player
|
7dda4150762393ed431a62d779a8e3b7051bc9ef
|
[
"MIT"
] | null | null | null |
from . import schemas, models, dependencies
| 43
| 43
| 0.813953
| 5
| 43
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116279
| 43
| 1
| 43
| 43
| 0.921053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9432bd961e4352b4d8019b837f853b2bfb7c4a5d
| 251
|
py
|
Python
|
torch3d/metrics/metric.py
|
zhangmozhe/torch3d
|
d47e9b243e520f9c0c72a26c271d2c7ad242cb65
|
[
"BSD-3-Clause"
] | 2
|
2020-08-28T08:57:38.000Z
|
2021-12-13T06:17:11.000Z
|
torch3d/metrics/metric.py
|
murari023/torch3d
|
9a451f876fe7ea23f6251ad783f505ec363ee884
|
[
"BSD-3-Clause"
] | null | null | null |
torch3d/metrics/metric.py
|
murari023/torch3d
|
9a451f876fe7ea23f6251ad783f505ec363ee884
|
[
"BSD-3-Clause"
] | 1
|
2020-06-03T15:19:25.000Z
|
2020-06-03T15:19:25.000Z
|
class Metric(object):
def reset(self):
raise NotImplementedError
def update(self, x, y):
raise NotImplementedError
def score(self):
raise NotImplementedError
def mean(self):
raise NotImplementedError
| 19.307692
| 33
| 0.649402
| 25
| 251
| 6.52
| 0.52
| 0.588957
| 0.515337
| 0.380368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.282869
| 251
| 12
| 34
| 20.916667
| 0.905556
| 0
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0
| 0
| 0
| 0.555556
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
9437515fe5f6b31da8aed3a53d5e17456b7c97be
| 155
|
py
|
Python
|
app/api/__init__.py
|
riipandi/flask-blueprint
|
bb4511391d9d27c0ef1017173c36864aef2441aa
|
[
"MIT"
] | 1
|
2020-07-06T05:32:16.000Z
|
2020-07-06T05:32:16.000Z
|
app/api/__init__.py
|
riipandi/flask-blueprint
|
bb4511391d9d27c0ef1017173c36864aef2441aa
|
[
"MIT"
] | null | null | null |
app/api/__init__.py
|
riipandi/flask-blueprint
|
bb4511391d9d27c0ef1017173c36864aef2441aa
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
# Definisikan modul Blueprint
api = Blueprint('api', __name__, url_prefix='/api')
# Load komponen modul
from .index import *
| 19.375
| 51
| 0.754839
| 20
| 155
| 5.6
| 0.65
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148387
| 155
| 7
| 52
| 22.142857
| 0.848485
| 0.303226
| 0
| 0
| 0
| 0
| 0.066667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
8481c7b23e5c04f71e76d28dbeebdbb0529c5eb6
| 43
|
py
|
Python
|
run_5117.py
|
mpi3d/goodix-fp-dump
|
039940845bd5eeb98cd92d72f267e3be77feb156
|
[
"MIT"
] | 136
|
2021-05-05T14:16:17.000Z
|
2022-03-31T09:04:18.000Z
|
run_5117.py
|
tsunekotakimoto/goodix-fp-dump
|
b88ecbababd3766314521fe30ee943c4bd1810df
|
[
"MIT"
] | 14
|
2021-08-20T09:49:39.000Z
|
2022-03-20T13:18:05.000Z
|
run_5117.py
|
tsunekotakimoto/goodix-fp-dump
|
b88ecbababd3766314521fe30ee943c4bd1810df
|
[
"MIT"
] | 11
|
2021-08-02T15:49:11.000Z
|
2022-02-06T22:06:42.000Z
|
from driver_51x7 import main
main(0x5117)
| 10.75
| 28
| 0.813953
| 7
| 43
| 4.857143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.216216
| 0.139535
| 43
| 3
| 29
| 14.333333
| 0.702703
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139535
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
849c9ddf3843def486a01f8f7dea0026893a242f
| 30
|
py
|
Python
|
isingpy/__init__.py
|
kamran-haider/isingpy
|
cd9c98f7702922f539c3a9cc1dc31cec17279f98
|
[
"MIT"
] | null | null | null |
isingpy/__init__.py
|
kamran-haider/isingpy
|
cd9c98f7702922f539c3a9cc1dc31cec17279f98
|
[
"MIT"
] | null | null | null |
isingpy/__init__.py
|
kamran-haider/isingpy
|
cd9c98f7702922f539c3a9cc1dc31cec17279f98
|
[
"MIT"
] | null | null | null |
from .ising import IsingModel
| 15
| 29
| 0.833333
| 4
| 30
| 6.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 30
| 1
| 30
| 30
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
84f29ef38ed5c526be58faf5d67e17cf667526f8
| 38
|
py
|
Python
|
lcs/agents/yacs/__init__.py
|
GodspeedYouBlackEmperor/pyalcs
|
9811bc5cde935e04e0fd87fb5930bd1b9170e73a
|
[
"MIT"
] | 11
|
2018-02-13T05:37:26.000Z
|
2022-02-02T13:33:18.000Z
|
lcs/agents/yacs/__init__.py
|
GodspeedYouBlackEmperor/pyalcs
|
9811bc5cde935e04e0fd87fb5930bd1b9170e73a
|
[
"MIT"
] | 40
|
2017-09-07T07:15:43.000Z
|
2021-06-09T15:42:27.000Z
|
lcs/agents/yacs/__init__.py
|
GodspeedYouBlackEmperor/pyalcs
|
9811bc5cde935e04e0fd87fb5930bd1b9170e73a
|
[
"MIT"
] | 14
|
2017-10-31T09:01:14.000Z
|
2022-01-02T09:38:29.000Z
|
from .yacs import Configuration, YACS
| 19
| 37
| 0.815789
| 5
| 38
| 6.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 38
| 1
| 38
| 38
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ca05e1083a00680da2d8dee945cd06cbf7cea681
| 32
|
py
|
Python
|
hello.py
|
smartCampusBPHC/website
|
cf6d3b58c40234aaa4207a4d8f43b37ec5ee88d8
|
[
"MIT"
] | null | null | null |
hello.py
|
smartCampusBPHC/website
|
cf6d3b58c40234aaa4207a4d8f43b37ec5ee88d8
|
[
"MIT"
] | null | null | null |
hello.py
|
smartCampusBPHC/website
|
cf6d3b58c40234aaa4207a4d8f43b37ec5ee88d8
|
[
"MIT"
] | null | null | null |
import os
print "hello world"
| 6.4
| 19
| 0.71875
| 5
| 32
| 4.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.21875
| 32
| 4
| 20
| 8
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0.354839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.5
| null | null | 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
ca250e241ea1d1435f1a762c202dcc9751b56af7
| 83
|
py
|
Python
|
docs/papers/wpmvp14/experiments/np0.py
|
davidbrochart/pythran
|
24b6c8650fe99791a4091cbdc2c24686e86aa67c
|
[
"BSD-3-Clause"
] | 1,647
|
2015-01-13T01:45:38.000Z
|
2022-03-28T01:23:41.000Z
|
docs/papers/wpmvp14/experiments/np0.py
|
davidbrochart/pythran
|
24b6c8650fe99791a4091cbdc2c24686e86aa67c
|
[
"BSD-3-Clause"
] | 1,116
|
2015-01-01T09:52:05.000Z
|
2022-03-18T21:06:40.000Z
|
docs/papers/wpmvp14/experiments/np0.py
|
davidbrochart/pythran
|
24b6c8650fe99791a4091cbdc2c24686e86aa67c
|
[
"BSD-3-Clause"
] | 180
|
2015-02-12T02:47:28.000Z
|
2022-03-14T10:28:18.000Z
|
#pythran export np0(float [])
import numpy as np
def np0(a):
return 1. / a + a
| 16.6
| 29
| 0.626506
| 15
| 83
| 3.466667
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 0.240964
| 83
| 4
| 30
| 20.75
| 0.777778
| 0.337349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
ca6ce9a1db78175a65cfc475b00311aac6bbe308
| 23
|
py
|
Python
|
awis/__init__.py
|
whistlebee/awis-py
|
01793c72b369e5e41c4d11b7ba67f71e47cee3ef
|
[
"Apache-2.0"
] | 1
|
2020-09-04T18:50:32.000Z
|
2020-09-04T18:50:32.000Z
|
awis/__init__.py
|
whistlebee/awis-py
|
01793c72b369e5e41c4d11b7ba67f71e47cee3ef
|
[
"Apache-2.0"
] | 1
|
2020-09-06T05:51:43.000Z
|
2020-09-19T09:27:56.000Z
|
awis/__init__.py
|
whistlebee/awis-py
|
01793c72b369e5e41c4d11b7ba67f71e47cee3ef
|
[
"Apache-2.0"
] | null | null | null |
from .awis import AWIS
| 11.5
| 22
| 0.782609
| 4
| 23
| 4.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 23
| 1
| 23
| 23
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
047f51e09dd6b939aecf2a54f7aa3191167785b2
| 63
|
py
|
Python
|
hypertuner/__init__.py
|
hypertuner/hypertuner-pythonlib
|
416cb48b75967409b5b77272fdebc0500cc2a7d9
|
[
"MIT"
] | 1
|
2019-08-19T22:29:25.000Z
|
2019-08-19T22:29:25.000Z
|
hypertuner/__init__.py
|
hypertuner/hypertuner-pythonlib
|
416cb48b75967409b5b77272fdebc0500cc2a7d9
|
[
"MIT"
] | null | null | null |
hypertuner/__init__.py
|
hypertuner/hypertuner-pythonlib
|
416cb48b75967409b5b77272fdebc0500cc2a7d9
|
[
"MIT"
] | null | null | null |
from .args import *
from .logger import *
from .saver import *
| 15.75
| 21
| 0.714286
| 9
| 63
| 5
| 0.555556
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 63
| 3
| 22
| 21
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
04e40a0301e97ade0fd6211c6694eb2a184120ad
| 4,297
|
py
|
Python
|
python/PD_motor_controller_plot.py
|
Tbarkin121/CubeBot_IsaacGym
|
1f2539d63576be83bdb6ca38455f5656562ad11d
|
[
"MIT"
] | null | null | null |
python/PD_motor_controller_plot.py
|
Tbarkin121/CubeBot_IsaacGym
|
1f2539d63576be83bdb6ca38455f5656562ad11d
|
[
"MIT"
] | null | null | null |
python/PD_motor_controller_plot.py
|
Tbarkin121/CubeBot_IsaacGym
|
1f2539d63576be83bdb6ca38455f5656562ad11d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 13 14:19:24 2022
@author: tyler
"""
import matplotlib.pyplot as plt
from matplotlib import cm
from matplotlib.ticker import LinearLocator
import numpy as np
#%%
maxSpeed = 200
maxTorque = 2
Stiffness = 0 #Scales posError
Damping = 0.1 #Scales velError
num_points = 25
ones = np.ones((num_points, 1))
posError = np.linspace(-10, 10, num=num_points, endpoint=True, retstep=False, dtype=None, axis=0)
posError = np.expand_dims(posError, 1)
posError = np.transpose(np.dot(posError, np.transpose(ones)))
posTorque = posError*Stiffness
velError = np.linspace(-maxSpeed, maxSpeed, num=num_points, endpoint=True, retstep=False, dtype=None, axis=0)
velError = np.expand_dims(velError, 1)
velError = np.dot(velError, np.transpose(ones))
velTorque = velError*Damping
Torque = np.clip(posTorque + velTorque, -maxTorque, maxTorque)
azim_setpoints = np.linspace(0, 360, num=50, endpoint=True, retstep=False, dtype=None, axis=0)
for azim in azim_setpoints:
fig, ax = plt.subplots(subplot_kw={"projection": "3d"})
ax.view_init(elev=45, azim=azim)
ax.dist = 10
surf = ax.plot_surface(posError, velError, Torque, cmap=cm.coolwarm, linewidth=0, antialiased=False)
# Add a color bar which maps values to colors.
fig.colorbar(surf)
ax.set_xlabel('Pos Error')
ax.set_ylabel('Vel Error')
ax.set_zlabel('Torque')
plt.show()
#%%
plt.plot(posError, Torque)
plt.ylabel('Torque')
plt.xlabel('Error')
plt.show()
#%%
maxSpeed = 200
maxTorque = 2
Stiffness = 0 #Scales posError
Damping = 0.01 #Scales velError
num_points = 25
ones = np.ones((num_points, 1))
dof_vel = np.linspace(-maxSpeed, maxSpeed, num=num_points, endpoint=True, retstep=False, dtype=None, axis=0)
dof_vel = np.expand_dims(dof_vel, 1)
dof_vel = np.transpose(np.dot(dof_vel, np.transpose(ones)))
action = np.linspace(-1, 1, num=num_points, endpoint=True, retstep=False, dtype=None, axis=0)
action = np.expand_dims(action, 1)
action = np.dot(action, np.transpose(ones))
TorqueTarget = action*maxTorque
max_available_torque = np.clip(maxTorque - dof_vel*maxTorque/maxSpeed, -maxTorque, maxTorque)
min_available_torque = np.clip(-maxTorque - dof_vel*maxTorque/maxSpeed, -maxTorque, maxTorque)
Torque = np.clip(TorqueTarget, min_available_torque, max_available_torque)
azim_setpoints = np.linspace(0, 45, num=100, endpoint=True, retstep=False, dtype=None, axis=0)
for azim in azim_setpoints:
fig, ax = plt.subplots(subplot_kw={"projection": "3d"})
ax.view_init(elev=45, azim=azim)
ax.dist = 10
surf = ax.plot_surface(dof_vel, TorqueTarget, Torque, cmap=cm.coolwarm, linewidth=0, antialiased=False)
# Add a color bar which maps values to colors.
fig.colorbar(surf)
ax.set_xlabel('DOF Vel')
ax.set_ylabel('Torque Target')
ax.set_zlabel('Torque Actual')
plt.show()
#%%
maxSpeed = 200
maxTorque = 2
Stiffness = 0 #Scales posError
Damping = 0.01 #Scales velError
num_points = 25
ones = np.ones((num_points, 1))
dof_vel = np.linspace(-maxSpeed, maxSpeed, num=num_points, endpoint=True, retstep=False, dtype=None, axis=0)
dof_vel = np.expand_dims(dof_vel, 1)
dof_vel = np.transpose(np.dot(dof_vel, np.transpose(ones)))
action = np.linspace(-1, 1, num=num_points, endpoint=True, retstep=False, dtype=None, axis=0)
action = np.expand_dims(action, 1)
action = np.dot(action, np.transpose(ones))
TorqueTarget = action*maxTorque
offset = 2
max_available_torque = np.clip(maxTorque - (offset*dof_vel/maxSpeed + (1-offset))*maxTorque, -maxTorque, maxTorque)
min_available_torque = np.clip(-maxTorque - (offset*dof_vel/maxSpeed - (1-offset))*maxTorque, -maxTorque, maxTorque)
Torque = np.clip(TorqueTarget, min_available_torque, max_available_torque)
azim_setpoints = np.linspace(0, 45, num=100, endpoint=True, retstep=False, dtype=None, axis=0)
for azim in azim_setpoints:
fig, ax = plt.subplots(subplot_kw={"projection": "3d"})
ax.view_init(elev=45, azim=azim)
ax.dist = 10
surf = ax.plot_surface(dof_vel, TorqueTarget, Torque, cmap=cm.coolwarm, linewidth=0, antialiased=False)
# Add a color bar which maps values to colors.
fig.colorbar(surf)
ax.set_xlabel('DOF Vel')
ax.set_ylabel('Torque Target')
ax.set_zlabel('Torque Actual')
plt.show()
| 33.053846
| 116
| 0.726321
| 646
| 4,297
| 4.716718
| 0.178019
| 0.035445
| 0.056121
| 0.070889
| 0.813259
| 0.805382
| 0.803413
| 0.803413
| 0.801444
| 0.801444
| 0
| 0.029126
| 0.137072
| 4,297
| 129
| 117
| 33.310078
| 0.792611
| 0.076332
| 0
| 0.696629
| 0
| 0
| 0.034719
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.044944
| 0
| 0.044944
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b6eddaaa0c2ce84a2557c0b8ade5ba320bc09c5b
| 24
|
py
|
Python
|
rgpy/standard/__init__.py
|
jqhoogland/rgpy
|
a2a45d3347e55554efe16b4f68bdc03ee19cc9c8
|
[
"MIT"
] | 4
|
2019-06-25T11:34:58.000Z
|
2020-12-08T17:49:20.000Z
|
rgpy/standard/__init__.py
|
jqhoogland/rgpy
|
a2a45d3347e55554efe16b4f68bdc03ee19cc9c8
|
[
"MIT"
] | null | null | null |
rgpy/standard/__init__.py
|
jqhoogland/rgpy
|
a2a45d3347e55554efe16b4f68bdc03ee19cc9c8
|
[
"MIT"
] | null | null | null |
from .block_rg import *
| 12
| 23
| 0.75
| 4
| 24
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 24
| 1
| 24
| 24
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6d0a6db34ad80c02dbc4918f8b572a28df957de2
| 36
|
py
|
Python
|
src/bgwork/__init__.py
|
realprocrastinator/DIMY-grp
|
40fcf111ed8f6c525a5df74ddf84deff0c1b7736
|
[
"MIT"
] | null | null | null |
src/bgwork/__init__.py
|
realprocrastinator/DIMY-grp
|
40fcf111ed8f6c525a5df74ddf84deff0c1b7736
|
[
"MIT"
] | null | null | null |
src/bgwork/__init__.py
|
realprocrastinator/DIMY-grp
|
40fcf111ed8f6c525a5df74ddf84deff0c1b7736
|
[
"MIT"
] | null | null | null |
from .bgmgr import BackgroundManager
| 36
| 36
| 0.888889
| 4
| 36
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 36
| 1
| 36
| 36
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6d4b17960d768468c41b4ed2ba763d4e4aa9dc27
| 16,117
|
py
|
Python
|
tests/functional/source_overrides/fixtures.py
|
tomasfarias/dbt-core
|
ed5df342ca5d99c5e6971ee6d11c8cf3e6e263b3
|
[
"Apache-2.0"
] | 799
|
2021-10-13T21:40:33.000Z
|
2022-03-31T16:19:31.000Z
|
tests/functional/source_overrides/fixtures.py
|
tomasfarias/dbt-core
|
ed5df342ca5d99c5e6971ee6d11c8cf3e6e263b3
|
[
"Apache-2.0"
] | 939
|
2021-10-13T17:45:24.000Z
|
2022-03-31T22:09:58.000Z
|
tests/functional/source_overrides/fixtures.py
|
tomasfarias/dbt-core
|
ed5df342ca5d99c5e6971ee6d11c8cf3e6e263b3
|
[
"Apache-2.0"
] | 175
|
2021-10-14T18:59:06.000Z
|
2022-03-31T16:17:32.000Z
|
import pytest
dupe_models__schema2_yml = """
version: 2
sources:
- name: my_source
overrides: localdep
schema: "{{ target.schema }}"
database: "{{ target.database }}"
freshness:
error_after: {count: 3, period: day}
tables:
- name: my_table
identifier: my_real_seed
# on the override, the "color" column is only unique, it can be null!
columns:
- name: id
tests:
- not_null
- unique
- name: color
tests:
- unique
- name: my_other_table
identifier: my_real_other_seed
- name: snapshot_freshness
identifier: snapshot_freshness_base
freshness:
error_after: {count: 1, period: day}
"""
dupe_models__schema1_yml = """
version: 2
sources:
- name: my_source
overrides: localdep
schema: "{{ target.schema }}"
database: "{{ target.database }}"
freshness:
error_after: {count: 3, period: day}
tables:
- name: my_table
identifier: my_real_seed
# on the override, the "color" column is only unique, it can be null!
columns:
- name: id
tests:
- not_null
- unique
- name: color
tests:
- unique
- name: my_other_table
identifier: my_real_other_seed
- name: snapshot_freshness
identifier: snapshot_freshness_base
freshness:
error_after: {count: 1, period: day}
"""
local_dependency__dbt_project_yml = """
config-version: 2
name: localdep
version: '1.0'
profile: 'default'
seeds:
quote_columns: False
seed-paths: ['seeds']
"""
local_dependency__models__schema_yml = """
version: 2
sources:
- name: my_source
schema: invalid_schema
database: invalid_database
freshness:
error_after: {count: 3, period: hour}
tables:
- name: my_table
identifier: my_seed
columns:
- name: id
tests:
- unique
- not_null
- name: color
tests:
- unique
- not_null
- name: my_other_table
identifier: my_other_seed
columns:
- name: id
tests:
- unique
- not_null
- name: letter
tests:
- unique
- not_null
- name: snapshot_freshness
identifier: snapshot_freshness_base
loaded_at_field: updated_at
freshness:
error_after: {count: 1, period: hour}
- name: my_other_source
schema: "{{ target.schema }}"
database: "{{ target.database }}"
freshness:
error_after: {count: 1, period: day}
tables:
- name: never_fresh
loaded_at_field: updated_at
"""
local_dependency__models__my_model_sql = """
{{ config(materialized="table") }}
with colors as (
select id, color from {{ source('my_source', 'my_table') }}
),
letters as (
select id, letter from {{ source('my_source', 'my_other_table') }}
)
select letter, color from colors join letters using (id)
"""
local_dependency__seeds__my_other_seed_csv = """id,letter
1,r
2,g
3,b
"""
local_dependency__seeds__my_seed_csv = """id,color
1,red
2,green
3,blue
"""
local_dependency__seeds__keep__never_fresh_csv = """favorite_color,id,first_name,email,ip_address,updated_at
blue,1,Larry,lking0@miitbeian.gov.cn,'69.135.206.194',2008-09-12 19:08:31
blue,2,Larry,lperkins1@toplist.cz,'64.210.133.162',1978-05-09 04:15:14
blue,3,Anna,amontgomery2@miitbeian.gov.cn,'168.104.64.114',2011-10-16 04:07:57
blue,4,Sandra,sgeorge3@livejournal.com,'229.235.252.98',1973-07-19 10:52:43
blue,5,Fred,fwoods4@google.cn,'78.229.170.124',2012-09-30 16:38:29
blue,6,Stephen,shanson5@livejournal.com,'182.227.157.105',1995-11-07 21:40:50
blue,7,William,wmartinez6@upenn.edu,'135.139.249.50',1982-09-05 03:11:59
blue,8,Jessica,jlong7@hao123.com,'203.62.178.210',1991-10-16 11:03:15
blue,9,Douglas,dwhite8@tamu.edu,'178.187.247.1',1979-10-01 09:49:48
blue,10,Lisa,lcoleman9@nydailynews.com,'168.234.128.249',2011-05-26 07:45:49
blue,11,Ralph,rfieldsa@home.pl,'55.152.163.149',1972-11-18 19:06:11
blue,12,Louise,lnicholsb@samsung.com,'141.116.153.154',2014-11-25 20:56:14
blue,13,Clarence,cduncanc@sfgate.com,'81.171.31.133',2011-11-17 07:02:36
blue,14,Daniel,dfranklind@omniture.com,'8.204.211.37',1980-09-13 00:09:04
blue,15,Katherine,klanee@auda.org.au,'176.96.134.59',1997-08-22 19:36:56
blue,16,Billy,bwardf@wikia.com,'214.108.78.85',2003-10-19 02:14:47
blue,17,Annie,agarzag@ocn.ne.jp,'190.108.42.70',1988-10-28 15:12:35
blue,18,Shirley,scolemanh@fastcompany.com,'109.251.164.84',1988-08-24 10:50:57
blue,19,Roger,rfrazieri@scribd.com,'38.145.218.108',1985-12-31 15:17:15
blue,20,Lillian,lstanleyj@goodreads.com,'47.57.236.17',1970-06-08 02:09:05
blue,21,Aaron,arodriguezk@nps.gov,'205.245.118.221',1985-10-11 23:07:49
blue,22,Patrick,pparkerl@techcrunch.com,'19.8.100.182',2006-03-29 12:53:56
blue,23,Phillip,pmorenom@intel.com,'41.38.254.103',2011-11-07 15:35:43
blue,24,Henry,hgarcian@newsvine.com,'1.191.216.252',2008-08-28 08:30:44
blue,25,Irene,iturnero@opera.com,'50.17.60.190',1994-04-01 07:15:02
blue,26,Andrew,adunnp@pen.io,'123.52.253.176',2000-11-01 06:03:25
blue,27,David,dgutierrezq@wp.com,'238.23.203.42',1988-01-25 07:29:18
blue,28,Henry,hsanchezr@cyberchimps.com,'248.102.2.185',1983-01-01 13:36:37
blue,29,Evelyn,epetersons@gizmodo.com,'32.80.46.119',1979-07-16 17:24:12
blue,30,Tammy,tmitchellt@purevolume.com,'249.246.167.88',2001-04-03 10:00:23
blue,31,Jacqueline,jlittleu@domainmarket.com,'127.181.97.47',1986-02-11 21:35:50
blue,32,Earl,eortizv@opera.com,'166.47.248.240',1996-07-06 08:16:27
blue,33,Juan,jgordonw@sciencedirect.com,'71.77.2.200',1987-01-31 03:46:44
blue,34,Diane,dhowellx@nyu.edu,'140.94.133.12',1994-06-11 02:30:05
blue,35,Randy,rkennedyy@microsoft.com,'73.255.34.196',2005-05-26 20:28:39
blue,36,Janice,jriveraz@time.com,'22.214.227.32',1990-02-09 04:16:52
blue,37,Laura,lperry10@diigo.com,'159.148.145.73',2015-03-17 05:59:25
blue,38,Gary,gray11@statcounter.com,'40.193.124.56',1970-01-27 10:04:51
blue,39,Jesse,jmcdonald12@typepad.com,'31.7.86.103',2009-03-14 08:14:29
blue,40,Sandra,sgonzalez13@goodreads.com,'223.80.168.239',1993-05-21 14:08:54
blue,41,Scott,smoore14@archive.org,'38.238.46.83',1980-08-30 11:16:56
blue,42,Phillip,pevans15@cisco.com,'158.234.59.34',2011-12-15 23:26:31
blue,43,Steven,sriley16@google.ca,'90.247.57.68',2011-10-29 19:03:28
blue,44,Deborah,dbrown17@hexun.com,'179.125.143.240',1995-04-10 14:36:07
blue,45,Lori,lross18@ow.ly,'64.80.162.180',1980-12-27 16:49:15
blue,46,Sean,sjackson19@tumblr.com,'240.116.183.69',1988-06-12 21:24:45
blue,47,Terry,tbarnes1a@163.com,'118.38.213.137',1997-09-22 16:43:19
blue,48,Dorothy,dross1b@ebay.com,'116.81.76.49',2005-02-28 13:33:24
blue,49,Samuel,swashington1c@house.gov,'38.191.253.40',1989-01-19 21:15:48
blue,50,Ralph,rcarter1d@tinyurl.com,'104.84.60.174',2007-08-11 10:21:49
"""
local_dependency__seeds__keep__snapshot_freshness_base_csv = """favorite_color,id,first_name,email,ip_address,updated_at
blue,1,Larry,lking0@miitbeian.gov.cn,'69.135.206.194',2008-09-12 19:08:31
blue,2,Larry,lperkins1@toplist.cz,'64.210.133.162',1978-05-09 04:15:14
blue,3,Anna,amontgomery2@miitbeian.gov.cn,'168.104.64.114',2011-10-16 04:07:57
blue,4,Sandra,sgeorge3@livejournal.com,'229.235.252.98',1973-07-19 10:52:43
blue,5,Fred,fwoods4@google.cn,'78.229.170.124',2012-09-30 16:38:29
blue,6,Stephen,shanson5@livejournal.com,'182.227.157.105',1995-11-07 21:40:50
blue,7,William,wmartinez6@upenn.edu,'135.139.249.50',1982-09-05 03:11:59
blue,8,Jessica,jlong7@hao123.com,'203.62.178.210',1991-10-16 11:03:15
blue,9,Douglas,dwhite8@tamu.edu,'178.187.247.1',1979-10-01 09:49:48
blue,10,Lisa,lcoleman9@nydailynews.com,'168.234.128.249',2011-05-26 07:45:49
blue,11,Ralph,rfieldsa@home.pl,'55.152.163.149',1972-11-18 19:06:11
blue,12,Louise,lnicholsb@samsung.com,'141.116.153.154',2014-11-25 20:56:14
blue,13,Clarence,cduncanc@sfgate.com,'81.171.31.133',2011-11-17 07:02:36
blue,14,Daniel,dfranklind@omniture.com,'8.204.211.37',1980-09-13 00:09:04
blue,15,Katherine,klanee@auda.org.au,'176.96.134.59',1997-08-22 19:36:56
blue,16,Billy,bwardf@wikia.com,'214.108.78.85',2003-10-19 02:14:47
blue,17,Annie,agarzag@ocn.ne.jp,'190.108.42.70',1988-10-28 15:12:35
blue,18,Shirley,scolemanh@fastcompany.com,'109.251.164.84',1988-08-24 10:50:57
blue,19,Roger,rfrazieri@scribd.com,'38.145.218.108',1985-12-31 15:17:15
blue,20,Lillian,lstanleyj@goodreads.com,'47.57.236.17',1970-06-08 02:09:05
blue,21,Aaron,arodriguezk@nps.gov,'205.245.118.221',1985-10-11 23:07:49
blue,22,Patrick,pparkerl@techcrunch.com,'19.8.100.182',2006-03-29 12:53:56
blue,23,Phillip,pmorenom@intel.com,'41.38.254.103',2011-11-07 15:35:43
blue,24,Henry,hgarcian@newsvine.com,'1.191.216.252',2008-08-28 08:30:44
blue,25,Irene,iturnero@opera.com,'50.17.60.190',1994-04-01 07:15:02
blue,26,Andrew,adunnp@pen.io,'123.52.253.176',2000-11-01 06:03:25
blue,27,David,dgutierrezq@wp.com,'238.23.203.42',1988-01-25 07:29:18
blue,28,Henry,hsanchezr@cyberchimps.com,'248.102.2.185',1983-01-01 13:36:37
blue,29,Evelyn,epetersons@gizmodo.com,'32.80.46.119',1979-07-16 17:24:12
blue,30,Tammy,tmitchellt@purevolume.com,'249.246.167.88',2001-04-03 10:00:23
blue,31,Jacqueline,jlittleu@domainmarket.com,'127.181.97.47',1986-02-11 21:35:50
blue,32,Earl,eortizv@opera.com,'166.47.248.240',1996-07-06 08:16:27
blue,33,Juan,jgordonw@sciencedirect.com,'71.77.2.200',1987-01-31 03:46:44
blue,34,Diane,dhowellx@nyu.edu,'140.94.133.12',1994-06-11 02:30:05
blue,35,Randy,rkennedyy@microsoft.com,'73.255.34.196',2005-05-26 20:28:39
blue,36,Janice,jriveraz@time.com,'22.214.227.32',1990-02-09 04:16:52
blue,37,Laura,lperry10@diigo.com,'159.148.145.73',2015-03-17 05:59:25
blue,38,Gary,gray11@statcounter.com,'40.193.124.56',1970-01-27 10:04:51
blue,39,Jesse,jmcdonald12@typepad.com,'31.7.86.103',2009-03-14 08:14:29
blue,40,Sandra,sgonzalez13@goodreads.com,'223.80.168.239',1993-05-21 14:08:54
blue,41,Scott,smoore14@archive.org,'38.238.46.83',1980-08-30 11:16:56
blue,42,Phillip,pevans15@cisco.com,'158.234.59.34',2011-12-15 23:26:31
blue,43,Steven,sriley16@google.ca,'90.247.57.68',2011-10-29 19:03:28
blue,44,Deborah,dbrown17@hexun.com,'179.125.143.240',1995-04-10 14:36:07
blue,45,Lori,lross18@ow.ly,'64.80.162.180',1980-12-27 16:49:15
blue,46,Sean,sjackson19@tumblr.com,'240.116.183.69',1988-06-12 21:24:45
blue,47,Terry,tbarnes1a@163.com,'118.38.213.137',1997-09-22 16:43:19
blue,48,Dorothy,dross1b@ebay.com,'116.81.76.49',2005-02-28 13:33:24
blue,49,Samuel,swashington1c@house.gov,'38.191.253.40',1989-01-19 21:15:48
blue,50,Ralph,rcarter1d@tinyurl.com,'104.84.60.174',2007-08-11 10:21:49
green,51,Wayne,whudson1e@princeton.edu,'90.61.24.102',1983-07-03 16:58:12
green,52,Rose,rjames1f@plala.or.jp,'240.83.81.10',1995-06-08 11:46:23
green,53,Louise,lcox1g@theglobeandmail.com,'105.11.82.145',2016-09-19 14:45:51
green,54,Kenneth,kjohnson1h@independent.co.uk,'139.5.45.94',1976-08-17 11:26:19
green,55,Donna,dbrown1i@amazon.co.uk,'19.45.169.45',2006-05-27 16:51:40
green,56,Johnny,jvasquez1j@trellian.com,'118.202.238.23',1975-11-17 08:42:32
green,57,Patrick,pramirez1k@tamu.edu,'231.25.153.198',1997-08-06 11:51:09
green,58,Helen,hlarson1l@prweb.com,'8.40.21.39',1993-08-04 19:53:40
green,59,Patricia,pspencer1m@gmpg.org,'212.198.40.15',1977-08-03 16:37:27
green,60,Joseph,jspencer1n@marriott.com,'13.15.63.238',2005-07-23 20:22:06
green,61,Phillip,pschmidt1o@blogtalkradio.com,'177.98.201.190',1976-05-19 21:47:44
green,62,Joan,jwebb1p@google.ru,'105.229.170.71',1972-09-07 17:53:47
green,63,Phyllis,pkennedy1q@imgur.com,'35.145.8.244',2000-01-01 22:33:37
green,64,Katherine,khunter1r@smh.com.au,'248.168.205.32',1991-01-09 06:40:24
green,65,Laura,lvasquez1s@wiley.com,'128.129.115.152',1997-10-23 12:04:56
green,66,Juan,jdunn1t@state.gov,'44.228.124.51',2004-11-10 05:07:35
green,67,Judith,jholmes1u@wiley.com,'40.227.179.115',1977-08-02 17:01:45
green,68,Beverly,bbaker1v@wufoo.com,'208.34.84.59',2016-03-06 20:07:23
green,69,Lawrence,lcarr1w@flickr.com,'59.158.212.223',1988-09-13 06:07:21
green,70,Gloria,gwilliams1x@mtv.com,'245.231.88.33',1995-03-18 22:32:46
green,71,Steven,ssims1y@cbslocal.com,'104.50.58.255',2001-08-05 21:26:20
green,72,Betty,bmills1z@arstechnica.com,'103.177.214.220',1981-12-14 21:26:54
green,73,Mildred,mfuller20@prnewswire.com,'151.158.8.130',2000-04-19 10:13:55
green,74,Donald,dday21@icq.com,'9.178.102.255',1972-12-03 00:58:24
green,75,Eric,ethomas22@addtoany.com,'85.2.241.227',1992-11-01 05:59:30
green,76,Joyce,jarmstrong23@sitemeter.com,'169.224.20.36',1985-10-24 06:50:01
green,77,Maria,mmartinez24@amazonaws.com,'143.189.167.135',2005-10-05 05:17:42
green,78,Harry,hburton25@youtube.com,'156.47.176.237',1978-03-26 05:53:33
green,79,Kevin,klawrence26@hao123.com,'79.136.183.83',1994-10-12 04:38:52
green,80,David,dhall27@prweb.com,'133.149.172.153',1976-12-15 16:24:24
green,81,Kathy,kperry28@twitter.com,'229.242.72.228',1979-03-04 02:58:56
green,82,Adam,aprice29@elegantthemes.com,'13.145.21.10',1982-11-07 11:46:59
green,83,Brandon,bgriffin2a@va.gov,'73.249.128.212',2013-10-30 05:30:36
green,84,Henry,hnguyen2b@discovery.com,'211.36.214.242',1985-01-09 06:37:27
green,85,Eric,esanchez2c@edublogs.org,'191.166.188.251',2004-05-01 23:21:42
green,86,Jason,jlee2d@jimdo.com,'193.92.16.182',1973-01-08 09:05:39
green,87,Diana,drichards2e@istockphoto.com,'19.130.175.245',1994-10-05 22:50:49
green,88,Andrea,awelch2f@abc.net.au,'94.155.233.96',2002-04-26 08:41:44
green,89,Louis,lwagner2g@miitbeian.gov.cn,'26.217.34.111',2003-08-25 07:56:39
green,90,Jane,jsims2h@seesaa.net,'43.4.220.135',1987-03-20 20:39:04
green,91,Larry,lgrant2i@si.edu,'97.126.79.34',2000-09-07 20:26:19
green,92,Louis,ldean2j@prnewswire.com,'37.148.40.127',2011-09-16 20:12:14
green,93,Jennifer,jcampbell2k@xing.com,'38.106.254.142',1988-07-15 05:06:49
green,94,Wayne,wcunningham2l@google.com.hk,'223.28.26.187',2009-12-15 06:16:54
green,95,Lori,lstevens2m@icq.com,'181.250.181.58',1984-10-28 03:29:19
green,96,Judy,jsimpson2n@marriott.com,'180.121.239.219',1986-02-07 15:18:10
green,97,Phillip,phoward2o@usa.gov,'255.247.0.175',2002-12-26 08:44:45
green,98,Gloria,gwalker2p@usa.gov,'156.140.7.128',1997-10-04 07:58:58
green,99,Paul,pjohnson2q@umn.edu,'183.59.198.197',1991-11-14 12:33:55
green,100,Frank,fgreene2r@blogspot.com,'150.143.68.121',2010-06-12 23:55:39
"""
models__schema_yml = """
version: 2
sources:
- name: my_source
overrides: localdep
schema: "{{ target.schema }}"
database: "{{ target.database }}"
freshness:
error_after: {count: 3, period: day}
tables:
- name: my_table
identifier: my_real_seed
# on the override, the "color" column is only unique, it can be null!
columns:
- name: id
tests:
- not_null
- unique
- name: color
tests:
- unique
- name: my_other_table
identifier: my_real_other_seed
- name: snapshot_freshness
identifier: snapshot_freshness_base
freshness:
error_after: {count: 1, period: day}
"""
seeds__expected_result_csv = """letter,color
c,cyan
m,magenta
y,yellow
k,key
"""
seeds__my_real_other_seed_csv = """id,letter
1,c
2,m
3,y
4,k
"""
seeds__my_real_seed_csv = """id,color
1,cyan
2,magenta
3,yellow
4,key
5,NULL
"""
@pytest.fixture(scope="class")
def local_dependency():
return {
"dbt_project.yml": local_dependency__dbt_project_yml,
"models": {
"schema.yml": local_dependency__models__schema_yml,
"my_model.sql": local_dependency__models__my_model_sql,
},
"seeds": {
"my_other_seed.csv": local_dependency__seeds__my_other_seed_csv,
"my_seed.csv": local_dependency__seeds__my_seed_csv,
"keep": {
"never_fresh.csv": local_dependency__seeds__keep__never_fresh_csv,
"snapshot_freshness_base.csv": local_dependency__seeds__keep__snapshot_freshness_base_csv,
},
},
}
| 42.637566
| 120
| 0.70224
| 3,018
| 16,117
| 3.675282
| 0.220013
| 0.020285
| 0.015417
| 0.019473
| 0.694104
| 0.670483
| 0.647764
| 0.626127
| 0.617743
| 0.605842
| 0
| 0.28792
| 0.116027
| 16,117
| 377
| 121
| 42.750663
| 0.49063
| 0
| 0
| 0.644699
| 0
| 0.429799
| 0.927778
| 0.598933
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002865
| false
| 0
| 0.002865
| 0.002865
| 0.008596
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ed96403cf8c630fdd360330bd91b886abb9c94d3
| 28
|
py
|
Python
|
neuralcoref/cli/__init__.py
|
lukovnikov/neuralcoref
|
12874fece046eb513530d778766366a3d606158d
|
[
"MIT"
] | 1
|
2019-04-03T12:08:48.000Z
|
2019-04-03T12:08:48.000Z
|
neuralcoref/cli/__init__.py
|
Sivaneshmsc/neuralcoref
|
8c266c4ec9a5532c4998a85a21fbf2bbfc3b1485
|
[
"MIT"
] | null | null | null |
neuralcoref/cli/__init__.py
|
Sivaneshmsc/neuralcoref
|
8c266c4ec9a5532c4998a85a21fbf2bbfc3b1485
|
[
"MIT"
] | null | null | null |
from .package import package
| 28
| 28
| 0.857143
| 4
| 28
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 28
| 1
| 28
| 28
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
eda264c65932fd537503cd16e4b0e61c1db215e6
| 24
|
py
|
Python
|
i8_ae/__init__.py
|
ShahabCypher/i8.ae
|
1b24556138e6dea80e9d0a7979b96f297748bf8b
|
[
"MIT"
] | null | null | null |
i8_ae/__init__.py
|
ShahabCypher/i8.ae
|
1b24556138e6dea80e9d0a7979b96f297748bf8b
|
[
"MIT"
] | null | null | null |
i8_ae/__init__.py
|
ShahabCypher/i8.ae
|
1b24556138e6dea80e9d0a7979b96f297748bf8b
|
[
"MIT"
] | null | null | null |
from i8_ae.i8 import i8
| 12
| 23
| 0.791667
| 6
| 24
| 3
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 0.166667
| 24
| 1
| 24
| 24
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6120343d1f0cdd74a39b580c9ce5d6e0d8f85397
| 433
|
py
|
Python
|
drone_api_client/model/template.py
|
Slamnlc/drone-api-client
|
ffd4cee4b118144f89621a2be1ee3a75938851e7
|
[
"MIT"
] | null | null | null |
drone_api_client/model/template.py
|
Slamnlc/drone-api-client
|
ffd4cee4b118144f89621a2be1ee3a75938851e7
|
[
"MIT"
] | null | null | null |
drone_api_client/model/template.py
|
Slamnlc/drone-api-client
|
ffd4cee4b118144f89621a2be1ee3a75938851e7
|
[
"MIT"
] | null | null | null |
class DroneTemplate:
__attr__ = ('id', 'name', 'namespace', 'data')
def __init__(self, data: dict):
self._data = data
@property
def id(self):
return self._data.get('id')
@property
def name(self):
return self._data.get('name')
@property
def namespace(self):
return self._data.get('namespace')
@property
def data(self):
return self._data.get('data')
| 19.681818
| 50
| 0.579677
| 51
| 433
| 4.666667
| 0.27451
| 0.201681
| 0.235294
| 0.302521
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.277136
| 433
| 21
| 51
| 20.619048
| 0.760383
| 0
| 0
| 0.25
| 0
| 0
| 0.08776
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3125
| false
| 0
| 0
| 0.25
| 0.6875
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
b6281082c5a191405d276cea774802cc099841b8
| 103
|
py
|
Python
|
thor/spdy/__init__.py
|
alex-stefa/thor
|
a4f9ad7b35b824639a836e27eda7e6c1c6f34e22
|
[
"Unlicense"
] | null | null | null |
thor/spdy/__init__.py
|
alex-stefa/thor
|
a4f9ad7b35b824639a836e27eda7e6c1c6f34e22
|
[
"Unlicense"
] | null | null | null |
thor/spdy/__init__.py
|
alex-stefa/thor
|
a4f9ad7b35b824639a836e27eda7e6c1c6f34e22
|
[
"Unlicense"
] | null | null | null |
#!/usr/bin/env python
from thor.spdy.client import SpdyClient
from thor.spdy.server import SpdyServer
| 20.6
| 39
| 0.805825
| 16
| 103
| 5.1875
| 0.75
| 0.192771
| 0.289157
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106796
| 103
| 4
| 40
| 25.75
| 0.902174
| 0.194175
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b65dca70c6acc9250b92fb72679ac6ecc3e8263a
| 4,432
|
py
|
Python
|
tests/test_format.py
|
knowark/validark
|
bab931768671ab92f3d495a9254801e42af621a8
|
[
"MIT"
] | null | null | null |
tests/test_format.py
|
knowark/validark
|
bab931768671ab92f3d495a9254801e42af621a8
|
[
"MIT"
] | null | null | null |
tests/test_format.py
|
knowark/validark
|
bab931768671ab92f3d495a9254801e42af621a8
|
[
"MIT"
] | null | null | null |
from validark import camel_to_snake, snake_to_camel, normalize
def test_camel_to_snake():
assert camel_to_snake('theBrownFox') == 'the_brown_fox'
assert camel_to_snake('TheBrownFox') == 'the_brown_fox'
assert camel_to_snake('the_brown_fox') == 'the_brown_fox'
def test_snake_to_camel():
assert snake_to_camel('the_brown_fox') == 'theBrownFox'
assert snake_to_camel('the_Brown_fox') == 'theBrownFox'
assert snake_to_camel('The_Brown_Fox') == 'theBrownFox'
assert snake_to_camel('theBrownFox') == 'theBrownFox'
def test_snake_normalize_simple():
snake_data = normalize({
'userId': '001',
'name': 'Jhon Doe',
'accountId': '7890'
}, 'snake')
assert snake_data == {
'user_id': '001',
'name': 'Jhon Doe',
'account_id': '7890'
}
def test_snake_normalize_nested():
snake_data = normalize({
'userId': '001',
'address': {
'city': 'Popayán',
'zipCode': 190002
},
'name': 'Jhon Doe',
'accountId': '7890'
}, 'snake')
assert snake_data == {
'user_id': '001',
'address': {
'city': 'Popayán',
'zip_code': 190002
},
'name': 'Jhon Doe',
'account_id': '7890'
}
def test_snake_normalize_deep_nested():
snake_data = normalize({
'userId': '001',
'address': {
'city': 'Popayán',
'zipCode': 190002,
'neighborhood': {
'zone': 'A',
'hubLocality': 'Lake Ville'
}
},
'name': 'Jhon Doe',
'accountId': '7890'
}, 'snake')
assert snake_data == {
'user_id': '001',
'address': {
'city': 'Popayán',
'zip_code': 190002,
'neighborhood': {
'zone': 'A',
'hub_locality': 'Lake Ville'
}
},
'name': 'Jhon Doe',
'account_id': '7890'
}
def test_snake_normalize_list():
snake_data = normalize({
'userId': '001',
'name': 'Jhon Doe',
'accountId': '7890',
'contacts': [
{'userId': '002', 'name': 'Mark', 'phoneNumber': '2222'},
{'userId': '003', 'name': 'Brad', 'phoneNumber': '3333'},
{'userId': '004', 'name': 'Carl', 'phoneNumber': '4444'},
]
}, 'snake')
assert snake_data == {
'user_id': '001',
'name': 'Jhon Doe',
'account_id': '7890',
'contacts': [
{'user_id': '002', 'name': 'Mark', 'phone_number': '2222'},
{'user_id': '003', 'name': 'Brad', 'phone_number': '3333'},
{'user_id': '004', 'name': 'Carl', 'phone_number': '4444'}
]
}
def test_camel_normalize_complex():
camel_data = normalize({
'user_id': '001',
'name': 'Jhon Doe',
'account_id': '7890',
'contacts': [
{'user_id': '002', 'name': 'Mark', 'phone_number': '2222'},
{'user_id': '003', 'name': 'Brad', 'phone_number': '3333'},
{'user_id': '004', 'name': 'Carl', 'phone_number': '4444'}
]
})
assert camel_data == {
'userId': '001',
'name': 'Jhon Doe',
'accountId': '7890',
'contacts': [
{'userId': '002', 'name': 'Mark', 'phoneNumber': '2222'},
{'userId': '003', 'name': 'Brad', 'phoneNumber': '3333'},
{'userId': '004', 'name': 'Carl', 'phoneNumber': '4444'}
]
}
def test_camel_normalize_list():
camel_data = normalize([
{'user_id': '002', 'name': 'Mark', 'phone_number': '2222'},
{'user_id': '003', 'name': 'Brad', 'phone_number': '3333'},
{'user_id': '004', 'name': 'Carl', 'phone_number': '4444'}
])
assert camel_data == [
{'userId': '002', 'name': 'Mark', 'phoneNumber': '2222'},
{'userId': '003', 'name': 'Brad', 'phoneNumber': '3333'},
{'userId': '004', 'name': 'Carl', 'phoneNumber': '4444'}
]
def test_camel_normalize_primitives():
camel_data = normalize([
{'user_id': '002', 'name': 'Mark', 'phone_number': '2222'},
"admin",
5
])
assert camel_data == [
{'userId': '002', 'name': 'Mark', 'phoneNumber': '2222'},
"admin",
5
]
def test_camel_normalize_none():
camel_data = normalize(None)
assert camel_data is None
| 27.02439
| 71
| 0.497744
| 435
| 4,432
| 4.813793
| 0.144828
| 0.04298
| 0.052531
| 0.040115
| 0.844795
| 0.817574
| 0.817574
| 0.817574
| 0.817574
| 0.795129
| 0
| 0.077099
| 0.309341
| 4,432
| 163
| 72
| 27.190184
| 0.606991
| 0
| 0
| 0.634328
| 0
| 0
| 0.320171
| 0
| 0
| 0
| 0
| 0
| 0.11194
| 1
| 0.074627
| false
| 0
| 0.007463
| 0
| 0.08209
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fcf88cb4e95665505f0495a1ab3bcfef74045c8c
| 2,078
|
py
|
Python
|
tries/test/plots/get_keys_prefix.py
|
hpaucar/datastructures-ii-repo
|
203dbafcd4bb82a4214f93e21f15b3be89cea76c
|
[
"MIT"
] | null | null | null |
tries/test/plots/get_keys_prefix.py
|
hpaucar/datastructures-ii-repo
|
203dbafcd4bb82a4214f93e21f15b3be89cea76c
|
[
"MIT"
] | null | null | null |
tries/test/plots/get_keys_prefix.py
|
hpaucar/datastructures-ii-repo
|
203dbafcd4bb82a4214f93e21f15b3be89cea76c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2.7
from numpy import *
from pylab import *
from matplotlib import rc, rcParams
trie = genfromtxt('../data/trie_get_keys_prefix.output')
tst = genfromtxt('../data/tst_get_keys_prefix.output')
radix = genfromtxt('../data/radix_get_keys_prefix.output')
_map = genfromtxt('../data/map_get_keys_prefix.output')
umap = genfromtxt('../data/umap_get_keys_prefix.output')
######## TIME ########
plot(trie[:,0], trie[:,1], '-o', label='Trie')
hold(True)
plot(tst[:,0], tst[:,1], '-o', label='Ternary Search Tree')
plot(radix[:,0], radix[:,1], '-o', label='Radix Tree')
plot(_map[:,0], _map[:,1], '-o', label='STL ordered Map')
plot(umap[:,0], umap[:,1], '-o', label='STL unordered Map')
xlabel('Max length of the string')
ylabel('Time(ms)')
title('Get keys prefix')
legend(loc='best')
grid(True)
savefig('../images/get_keys_prefix/random/get_keys_prefix_time_ALL.eps')
hold(False)
plot(tst[:,0], tst[:,1], '-o', label='Ternary Search Tree')
hold(True)
plot(radix[:,0], radix[:,1], '-o', label='Radix Tree')
plot(_map[:,0], _map[:,1], '-o', label='STL ordered Map')
plot(umap[:,0], umap[:,1], '-o', label='STL unordered Map')
xlabel('Max length of the string')
ylabel('Time(ms)')
title('Get keys prefix')
legend(loc='best')
grid(True)
savefig('../images/get_keys_prefix/random/get_keys_prefix_time_TRMU.eps')
hold(False)
plot(radix[:,0], radix[:,1], '-o', label='Radix Tree')
hold(True)
plot(_map[:,0], _map[:,1], '-o', label='STL ordered Map')
plot(umap[:,0], umap[:,1], '-o', label='STL unordered Map')
xlabel('Max length of the string')
ylabel('Time(ms)')
title('Get keys prefix')
legend(loc='best')
grid(True)
savefig('../images/get_keys_prefix/random/get_keys_prefix_time_RMU.eps')
hold(False)
plot(trie[:,0], trie[:,1], '-o', label='Trie')
hold(True)
plot(tst[:,0], tst[:,1], '-o', label='Ternary Search Tree')
plot(radix[:,0], radix[:,1], '-o', label='Radix Tree')
xlabel('Max length of the string')
ylabel('Time(ms)')
title('Get keys prefix')
legend(loc='best')
grid(True)
savefig('../images/get_keys_prefix/random/get_keys_prefix_time_TTR.eps')
| 30.115942
| 73
| 0.670356
| 333
| 2,078
| 4.045045
| 0.177177
| 0.088344
| 0.164068
| 0.044543
| 0.738679
| 0.738679
| 0.738679
| 0.738679
| 0.738679
| 0.715664
| 0
| 0.016887
| 0.088065
| 2,078
| 68
| 74
| 30.558824
| 0.693931
| 0.013474
| 0
| 0.777778
| 0
| 0
| 0.420276
| 0.206201
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.055556
| 0
| 0.055556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1e1e5a064f3f9bf48c1fab2d48ec8d7553ebf746
| 16,330
|
py
|
Python
|
tensorflow_checkpoint_reader/pb/tensorflow/core/framework/api_def_pb2.py
|
shawwn/tensorflow-checkpoint-reader
|
f0e65548411e3bd66a07e36bb1850907a05952d0
|
[
"MIT"
] | 1
|
2021-12-02T15:06:09.000Z
|
2021-12-02T15:06:09.000Z
|
tensorflow_checkpoint_reader/pb/tensorflow/core/framework/api_def_pb2.py
|
shawwn/tensorflow-checkpoint-reader
|
f0e65548411e3bd66a07e36bb1850907a05952d0
|
[
"MIT"
] | null | null | null |
tensorflow_checkpoint_reader/pb/tensorflow/core/framework/api_def_pb2.py
|
shawwn/tensorflow-checkpoint-reader
|
f0e65548411e3bd66a07e36bb1850907a05952d0
|
[
"MIT"
] | null | null | null |
'Generated protocol buffer code.'
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
_sym_db = _symbol_database.Default()
from ....tensorflow.core.framework import attr_value_pb2 as tensorflow_dot_core_dot_framework_dot_attr__value__pb2
DESCRIPTOR = _descriptor.FileDescriptor(name='tensorflow/core/framework/api_def.proto', package='tensorflow', syntax='proto3', serialized_options=b'\n\x18org.tensorflow.frameworkB\x0cApiDefProtosP\x01ZNgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/api_def_go_proto\xf8\x01\x01', create_key=_descriptor._internal_create_key, serialized_pb=b'\n\'tensorflow/core/framework/api_def.proto\x12\ntensorflow\x1a*tensorflow/core/framework/attr_value.proto"\xe1\x05\n\x06ApiDef\x12\x15\n\rgraph_op_name\x18\x01 \x01(\t\x12\x1b\n\x13deprecation_message\x18\x0c \x01(\t\x12\x1b\n\x13deprecation_version\x18\r \x01(\x05\x121\n\nvisibility\x18\x02 \x01(\x0e2\x1d.tensorflow.ApiDef.Visibility\x12-\n\x08endpoint\x18\x03 \x03(\x0b2\x1b.tensorflow.ApiDef.Endpoint\x12&\n\x06in_arg\x18\x04 \x03(\x0b2\x16.tensorflow.ApiDef.Arg\x12\'\n\x07out_arg\x18\x05 \x03(\x0b2\x16.tensorflow.ApiDef.Arg\x12\x11\n\targ_order\x18\x0b \x03(\t\x12%\n\x04attr\x18\x06 \x03(\x0b2\x17.tensorflow.ApiDef.Attr\x12\x0f\n\x07summary\x18\x07 \x01(\t\x12\x13\n\x0bdescription\x18\x08 \x01(\t\x12\x1a\n\x12description_prefix\x18\t \x01(\t\x12\x1a\n\x12description_suffix\x18\n \x01(\t\x1aI\n\x08Endpoint\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ndeprecated\x18\x03 \x01(\x08\x12\x1b\n\x13deprecation_version\x18\x04 \x01(\x05\x1a;\n\x03Arg\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\trename_to\x18\x02 \x01(\t\x12\x13\n\x0bdescription\x18\x03 \x01(\t\x1aj\n\x04Attr\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\trename_to\x18\x02 \x01(\t\x12,\n\rdefault_value\x18\x03 \x01(\x0b2\x15.tensorflow.AttrValue\x12\x13\n\x0bdescription\x18\x04 \x01(\t"G\n\nVisibility\x12\x16\n\x12DEFAULT_VISIBILITY\x10\x00\x12\x0b\n\x07VISIBLE\x10\x01\x12\x08\n\x04SKIP\x10\x02\x12\n\n\x06HIDDEN\x10\x03")\n\x07ApiDefs\x12\x1e\n\x02op\x18\x01 \x03(\x0b2\x12.tensorflow.ApiDefB}\n\x18org.tensorflow.frameworkB\x0cApiDefProtosP\x01ZNgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/api_def_go_proto\xf8\x01\x01b\x06proto3', dependencies=[tensorflow_dot_core_dot_framework_dot_attr__value__pb2.DESCRIPTOR])
_APIDEF_VISIBILITY = _descriptor.EnumDescriptor(name='Visibility', full_name='tensorflow.ApiDef.Visibility', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[_descriptor.EnumValueDescriptor(name='DEFAULT_VISIBILITY', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='VISIBLE', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='SKIP', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='HIDDEN', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key)], containing_type=None, serialized_options=None, serialized_start=766, serialized_end=837)
_sym_db.RegisterEnumDescriptor(_APIDEF_VISIBILITY)
_APIDEF_ENDPOINT = _descriptor.Descriptor(name='Endpoint', full_name='tensorflow.ApiDef.Endpoint', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='name', full_name='tensorflow.ApiDef.Endpoint.name', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='deprecated', full_name='tensorflow.ApiDef.Endpoint.deprecated', index=1, number=3, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='deprecation_version', full_name='tensorflow.ApiDef.Endpoint.deprecation_version', index=2, number=4, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=522, serialized_end=595)
_APIDEF_ARG = _descriptor.Descriptor(name='Arg', full_name='tensorflow.ApiDef.Arg', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='name', full_name='tensorflow.ApiDef.Arg.name', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='rename_to', full_name='tensorflow.ApiDef.Arg.rename_to', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='description', full_name='tensorflow.ApiDef.Arg.description', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=597, serialized_end=656)
_APIDEF_ATTR = _descriptor.Descriptor(name='Attr', full_name='tensorflow.ApiDef.Attr', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='name', full_name='tensorflow.ApiDef.Attr.name', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='rename_to', full_name='tensorflow.ApiDef.Attr.rename_to', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='default_value', full_name='tensorflow.ApiDef.Attr.default_value', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='description', full_name='tensorflow.ApiDef.Attr.description', index=3, number=4, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=658, serialized_end=764)
_APIDEF = _descriptor.Descriptor(name='ApiDef', full_name='tensorflow.ApiDef', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='graph_op_name', full_name='tensorflow.ApiDef.graph_op_name', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='deprecation_message', full_name='tensorflow.ApiDef.deprecation_message', index=1, number=12, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='deprecation_version', full_name='tensorflow.ApiDef.deprecation_version', index=2, number=13, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='visibility', full_name='tensorflow.ApiDef.visibility', index=3, number=2, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='endpoint', full_name='tensorflow.ApiDef.endpoint', index=4, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='in_arg', full_name='tensorflow.ApiDef.in_arg', index=5, number=4, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='out_arg', full_name='tensorflow.ApiDef.out_arg', index=6, number=5, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='arg_order', full_name='tensorflow.ApiDef.arg_order', index=7, number=11, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='attr', full_name='tensorflow.ApiDef.attr', index=8, number=6, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='summary', full_name='tensorflow.ApiDef.summary', index=9, number=7, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='description', full_name='tensorflow.ApiDef.description', index=10, number=8, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='description_prefix', full_name='tensorflow.ApiDef.description_prefix', index=11, number=9, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='description_suffix', full_name='tensorflow.ApiDef.description_suffix', index=12, number=10, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[_APIDEF_ENDPOINT, _APIDEF_ARG, _APIDEF_ATTR], enum_types=[_APIDEF_VISIBILITY], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=100, serialized_end=837)
_APIDEFS = _descriptor.Descriptor(name='ApiDefs', full_name='tensorflow.ApiDefs', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='op', full_name='tensorflow.ApiDefs.op', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=839, serialized_end=880)
_APIDEF_ENDPOINT.containing_type = _APIDEF
_APIDEF_ARG.containing_type = _APIDEF
_APIDEF_ATTR.fields_by_name['default_value'].message_type = tensorflow_dot_core_dot_framework_dot_attr__value__pb2._ATTRVALUE
_APIDEF_ATTR.containing_type = _APIDEF
_APIDEF.fields_by_name['visibility'].enum_type = _APIDEF_VISIBILITY
_APIDEF.fields_by_name['endpoint'].message_type = _APIDEF_ENDPOINT
_APIDEF.fields_by_name['in_arg'].message_type = _APIDEF_ARG
_APIDEF.fields_by_name['out_arg'].message_type = _APIDEF_ARG
_APIDEF.fields_by_name['attr'].message_type = _APIDEF_ATTR
_APIDEF_VISIBILITY.containing_type = _APIDEF
_APIDEFS.fields_by_name['op'].message_type = _APIDEF
DESCRIPTOR.message_types_by_name['ApiDef'] = _APIDEF
DESCRIPTOR.message_types_by_name['ApiDefs'] = _APIDEFS
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ApiDef = _reflection.GeneratedProtocolMessageType('ApiDef', (_message.Message,), {'Endpoint': _reflection.GeneratedProtocolMessageType('Endpoint', (_message.Message,), {'DESCRIPTOR': _APIDEF_ENDPOINT, '__module__': 'tensorflow.core.framework.api_def_pb2'}), 'Arg': _reflection.GeneratedProtocolMessageType('Arg', (_message.Message,), {'DESCRIPTOR': _APIDEF_ARG, '__module__': 'tensorflow.core.framework.api_def_pb2'}), 'Attr': _reflection.GeneratedProtocolMessageType('Attr', (_message.Message,), {'DESCRIPTOR': _APIDEF_ATTR, '__module__': 'tensorflow.core.framework.api_def_pb2'}), 'DESCRIPTOR': _APIDEF, '__module__': 'tensorflow.core.framework.api_def_pb2'})
_sym_db.RegisterMessage(ApiDef)
_sym_db.RegisterMessage(ApiDef.Endpoint)
_sym_db.RegisterMessage(ApiDef.Arg)
_sym_db.RegisterMessage(ApiDef.Attr)
ApiDefs = _reflection.GeneratedProtocolMessageType('ApiDefs', (_message.Message,), {'DESCRIPTOR': _APIDEFS, '__module__': 'tensorflow.core.framework.api_def_pb2'})
_sym_db.RegisterMessage(ApiDefs)
DESCRIPTOR._options = None
| 418.717949
| 5,356
| 0.823148
| 2,331
| 16,330
| 5.435006
| 0.083226
| 0.05178
| 0.085484
| 0.074592
| 0.753256
| 0.715842
| 0.69011
| 0.652932
| 0.644644
| 0.628305
| 0
| 0.036322
| 0.042376
| 16,330
| 38
| 5,357
| 429.736842
| 0.77382
| 0.001898
| 0
| 0
| 1
| 1.324324
| 0.187519
| 0.142568
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.135135
| 0
| 0.135135
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1e3ef1151e539719b27f7e56b56d8a3e9e0a31ad
| 35
|
py
|
Python
|
febio_python/xplt/__init__.py
|
Nobregaigor/febio-python
|
6895b4e2f8959444a66b6f77a000ce193c3357a7
|
[
"MIT"
] | null | null | null |
febio_python/xplt/__init__.py
|
Nobregaigor/febio-python
|
6895b4e2f8959444a66b6f77a000ce193c3357a7
|
[
"MIT"
] | null | null | null |
febio_python/xplt/__init__.py
|
Nobregaigor/febio-python
|
6895b4e2f8959444a66b6f77a000ce193c3357a7
|
[
"MIT"
] | null | null | null |
from .xplt_parser import read_xplt
| 17.5
| 34
| 0.857143
| 6
| 35
| 4.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 1
| 35
| 35
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1e6a65aa837865df4b7fe5df02dc4c990e224da1
| 23,910
|
py
|
Python
|
world/views.py
|
Manzanero/bardo-web
|
ce60f3590501adccfa6444a6b2abf0a9d141fbf7
|
[
"MIT"
] | null | null | null |
world/views.py
|
Manzanero/bardo-web
|
ce60f3590501adccfa6444a6b2abf0a9d141fbf7
|
[
"MIT"
] | null | null | null |
world/views.py
|
Manzanero/bardo-web
|
ce60f3590501adccfa6444a6b2abf0a9d141fbf7
|
[
"MIT"
] | null | null | null |
import json
from datetime import datetime
from json.decoder import JSONDecodeError
from django.db.models import Q
from django.http import JsonResponse, Http404
from django.shortcuts import get_object_or_404
from django.utils import timezone
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods
from utils.decorators import require_basic_auth, redirect_preflight
from utils.exceptions import get_stacktrace_str
from world.models import Campaign, Map, Action, CampaignProperty, MapProperty
@redirect_preflight
@require_basic_auth
@require_http_methods(["GET"])
def load_world(request):
try:
campaigns = Campaign.objects.all().order_by('name')
response = {
'status': 200,
'message': f'World loaded',
'world': {
'campaigns': [{'name': x.name, 'id': x.campaign_id} for x in campaigns]
}
}
except Http404 as e:
response = {'status': 404, 'message': str(e)}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
@redirect_preflight
@require_basic_auth
@require_http_methods(["GET"])
def load_campaign(request, campaign_id):
try:
campaign = get_object_or_404(Campaign, campaign_id=campaign_id)
user_properties = CampaignProperty.objects.filter(campaign=campaign, user=request.user)
properties = [{'name': x.name, 'value': x.value} for x in user_properties]
property_names = [p.name for p in user_properties]
default_properties = CampaignProperty.objects.filter(campaign=campaign, user__isnull=True)
for p in default_properties:
if p.name not in property_names:
properties.append({'name': p.name, 'value': p.value})
players = [x.user for x in CampaignProperty.objects.filter(campaign=campaign, name='IS_PLAYER')]
master_name = get_object_or_404(CampaignProperty, campaign=campaign, name='IS_MASTER').user.username
response = {
'status': 200,
'message': f'Campaign loaded (name={campaign.name}, id={campaign.campaign_id})',
'date': timezone.localtime(campaign.updated).isoformat(timespec='microseconds'),
'campaign': {
'properties': properties,
'maps': [{'name': x.name, 'id': x.map_id} for x in campaign.map_set.order_by('name')],
'players': [{'name': x.username, 'id': x.id, 'master': x.username == master_name} for x in players],
}
}
except Http404 as e:
response = {'status': 404, 'message': str(e)}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
@redirect_preflight
@require_basic_auth
@require_http_methods(["GET"])
def load_campaign_property(request, campaign_id, property_name):
try:
campaign = get_object_or_404(Campaign, campaign_id=campaign_id)
props = CampaignProperty.objects.filter(campaign=campaign, user=request.user, name=property_name)
prop = props[0] if props else get_object_or_404(CampaignProperty,
campaign=campaign, user=None, name=property_name)
response = {
'status': 200,
'message': f'Campaign Property loaded (campaign_name={campaign.name}, name={property_name})',
'campaign': {'properties': [{'name': prop.name, 'value': prop.value}]}
}
except Http404 as e:
response = {'status': 404, 'message': str(e) + f' (campaign={campaign_id}, property={property_name}'}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
@csrf_exempt
@redirect_preflight
@require_basic_auth
@require_http_methods(["POST"])
def save_campaign_property(request, campaign_id, property_name):
try:
campaign = get_object_or_404(Campaign, campaign_id=campaign_id)
prop = CampaignProperty.objects.get_or_create(campaign=campaign, user=request.user, name=property_name)[0]
prop.name = property_name
prop.value = request.body.decode('utf-8')
prop.save()
response = {
'status': 200,
'message': f'Campaign Property saved (campaign_name={campaign.name}, '
f'name={property_name}, value={prop.value})',
}
except Http404 as e:
response = {'status': 404, 'message': str(e) + f' (campaign={campaign_id})'}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
@csrf_exempt
@redirect_preflight
@require_basic_auth
@require_http_methods(["POST"])
def default_campaign_property(request, campaign_id, property_name):
try:
campaign = get_object_or_404(Campaign, campaign_id=campaign_id)
prop = CampaignProperty.objects.get_or_create(campaign=campaign, user=None, name=property_name)[0]
prop.name = property_name
prop.value = request.body.decode('utf-8')
prop.save()
response = {
'status': 200,
'message': f'Campaign Property default (campaign_name={campaign.name}, '
f'name={property_name}, value={prop.value})',
}
except Http404 as e:
response = {'status': 404, 'message': str(e) + f' (campaign={campaign_id})'}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
@csrf_exempt
@redirect_preflight
@require_basic_auth
@require_http_methods(["DELETE"])
def delete_campaign_property(request, campaign_id, property_name):
try:
campaign = get_object_or_404(Campaign, campaign_id=campaign_id)
prop = get_object_or_404(CampaignProperty, campaign=campaign, user=request.user, name=property_name)
prop.delete()
response = {
'status': 200,
'message': f'Campaign Property deleted (campaign_name={campaign.name})',
}
except Http404 as e:
response = {'status': 404, 'message': str(e) + f' (campaign={campaign_id}, name={property_name})'}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
@csrf_exempt
@redirect_preflight
@require_basic_auth
@require_http_methods(["POST"])
def save_map_property(request, campaign_id, map_id, property_name):
try:
tile_map = get_object_or_404(Map, campaign__campaign_id=campaign_id, map_id=map_id)
prop = MapProperty.objects.get_or_create(map=tile_map, user=request.user, name=property_name)[0]
prop.value = request.body.decode('utf-8')
prop.save()
response = {
'status': 200,
'message': f'Map Property saved (map_name={tile_map.name}, name={property_name}, value={prop.value})',
}
except Http404 as e:
response = {'status': 404, 'message': str(e) + f' (campaign={campaign_id}, map={map_id})'}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
@csrf_exempt
@redirect_preflight
@require_basic_auth
@require_http_methods(["POST"])
def default_map_property(request, campaign_id, map_id, property_name):
try:
tile_map = get_object_or_404(Map, campaign__campaign_id=campaign_id, map_id=map_id)
prop = MapProperty.objects.get_or_create(map=tile_map, user=None, name=property_name)[0]
prop.value = request.body.decode('utf-8')
prop.save()
response = {
'status': 200,
'message': f'Map Property default (map_name={tile_map.name}, name={property_name}, value={prop.value})',
}
except Http404 as e:
response = {'status': 404, 'message': str(e) + f' (campaign={campaign_id}, map={map_id})'}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
@csrf_exempt
@redirect_preflight
@require_basic_auth
@require_http_methods(["DELETE"])
def delete_map_property(request, campaign_id, map_id, property_name):
try:
tile_map = get_object_or_404(Map, campaign__campaign_id=campaign_id, map_id=map_id)
prop = get_object_or_404(MapProperty, map=tile_map, user=request.user, name=property_name)
prop.delete()
response = {
'status': 200,
'message': f'Map Property deleted (map_name={tile_map.name}, name={property_name})',
}
except Http404 as e:
response = {'status': 404, 'message': str(e) + f' (campaign={campaign_id}, map={map_id}, name={property_name}'}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
@redirect_preflight
@require_basic_auth
@require_http_methods(["GET"])
def load_map(request, campaign_id, map_id):
try:
tile_map = get_object_or_404(Map, campaign__campaign_id=campaign_id, map_id=map_id)
response = {
'status': 200,
'message': f'Map loaded (name={tile_map.name}, id={map_id})',
'date': timezone.localtime(tile_map.saved).isoformat(timespec='microseconds'),
'map': json.loads(tile_map.data)
}
except Http404 as e:
response = {'status': 404, 'message': str(e) + f' (campaign={campaign_id}, map={map_id})'}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
@redirect_preflight
@require_basic_auth
@require_http_methods(["GET"])
def load_map_properties(request, campaign_id, map_id):
try:
tile_map = get_object_or_404(Map, campaign__campaign_id=campaign_id, map_id=map_id)
# user_properties = MapProperty.objects.filter(map=tile_map, user=request.user)
# properties = [{'name': x.name, 'value': x.value} for x in user_properties]
# property_names = [p.name for p in user_properties]
# default_properties = MapProperty.objects.filter(map=tile_map, user__isnull=True)
# for p in default_properties:
# if p.name not in property_names:
# properties.append({'name': p.name, 'value': p.value})
user_properties = MapProperty.objects.filter(Q(map=tile_map, user=request.user) |
Q(map=tile_map, user__isnull=True))
properties = [{'name': x.name, 'value': x.value} for x in user_properties]
response = {
'status': 200,
'message': f'Map Properties loaded (len={len(properties)})',
'date': timezone.localtime(tile_map.saved).isoformat(timespec='microseconds'),
'properties': properties
}
except Http404 as e:
response = {'status': 404, 'message': str(e) + f' (campaign={campaign_id}, map={map_id}'}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
@redirect_preflight
@require_basic_auth
@require_http_methods(["GET"])
def load_map_properties_for_user(request, campaign_id, map_id, user_id):
try:
tile_map = get_object_or_404(Map, campaign__campaign_id=campaign_id, map_id=map_id)
user_properties = MapProperty.objects.filter(Q(map=tile_map, user_id=user_id) |
Q(map=tile_map, user__isnull=True))
properties = [{'name': x.name, 'value': x.value} for x in user_properties]
response = {
'status': 200,
'message': f'Map Properties loaded (len={len(properties)})',
'date': timezone.localtime(tile_map.saved).isoformat(timespec='microseconds'),
'properties': properties
}
except Http404 as e:
response = {'status': 404, 'message': str(e) + f' (campaign={campaign_id}, map={map_id}'}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
PERMISSIONS = [
"SHARED_NAME",
"SHARED_POSITION",
"SHARED_VISION",
"SHARED_CONTROL",
"SHARED_HEALTH",
"SHARED_STAMINA",
"SHARED_MANA",
]
@csrf_exempt
@redirect_preflight
@require_basic_auth
@require_http_methods(["POST"])
def reset_permissions(request, campaign_id, map_id):
try:
tile_map = get_object_or_404(Map, campaign__campaign_id=campaign_id, map_id=map_id)
entities = json.loads(request.body.decode('utf-8'))['entities']
MapProperty.objects.filter(map=tile_map, name__in=PERMISSIONS, value__in=entities).delete()
response = {
'status': 200,
'message': f'Map Permissions reset for entities (id__in={entities})',
}
except Http404 as e:
response = {'status': 404, 'message': str(e) + f' (campaign={campaign_id}, map={map_id})'}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
@csrf_exempt
@redirect_preflight
@require_basic_auth
@require_http_methods(["POST"])
def default_permissions(request, campaign_id, map_id):
try:
tile_map = get_object_or_404(Map, campaign__campaign_id=campaign_id, map_id=map_id)
data = json.loads(request.body.decode('utf-8'))
entities = data['entities']
players = data['players']
if players:
MapProperty.objects.filter(map=tile_map, user_id__in=players,
name__in=PERMISSIONS, value__in=entities).delete()
else:
MapProperty.objects.filter(map=tile_map, user__isnull=True,
name__in=PERMISSIONS, value__in=entities).delete()
response = {
'status': 200,
'message': f'Map Permissions default for entities (id__in={entities})',
}
except Http404 as e:
response = {'status': 404, 'message': str(e) + f' (campaign={campaign_id}, map={map_id})'}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
@csrf_exempt
@redirect_preflight
@require_basic_auth
@require_http_methods(["POST"])
def map_permissions(request, campaign_id, map_id):
try:
tile_map = get_object_or_404(Map, campaign__campaign_id=campaign_id, map_id=map_id)
permissions = json.loads(request.body.decode('utf-8'))['permissions']
player_reset = []
for permission in permissions:
entity = permission['entity']
player = permission['player']
perm = permission['permission']
property_name = {
'name': 'SHARED_NAME',
'position': 'SHARED_POSITION',
'vision': 'SHARED_VISION',
'control': 'SHARED_CONTROL',
'health': 'SHARED_HEALTH',
'stamina': 'SHARED_STAMINA',
'mana': 'SHARED_MANA',
}[perm]
if player not in player_reset:
if player:
MapProperty.objects.filter(map=tile_map, user_id=player,
name__in=PERMISSIONS, value=entity).delete()
else:
MapProperty.objects.filter(map=tile_map, user__isnull=True,
name__in=PERMISSIONS, value=entity).delete()
player_reset.append(player)
if player:
MapProperty.objects.get_or_create(map=tile_map, user_id=player,
name=property_name, value=entity)[0].save()
else:
MapProperty.objects.get_or_create(map=tile_map, user__isnull=True,
name=property_name, value=entity)[0].save()
response = {
'status': 200,
'message': f'Map Permissions updates (len={len(permissions)})',
}
except Http404 as e:
response = {'status': 404, 'message': str(e) + f' (campaign={campaign_id}, map={map_id})'}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
@csrf_exempt
@redirect_preflight
@require_basic_auth
@require_http_methods(["POST"])
def save_map(request, campaign_id, map_id):
try:
campaign = get_object_or_404(Campaign, campaign_id=campaign_id)
tile_map = Map.objects.get_or_create(campaign=campaign, map_id=map_id)[0]
json_data = request.body.decode('utf-8')
data = json.loads(json_data)
tile_map.name = data['name']
tile_map.data = json_data
tile_map.saved = timezone.now()
tile_map.save()
response = {
'status': 200,
'message': f'Map saved (name={tile_map.name}, id={map_id})',
'date': timezone.localtime(tile_map.saved).isoformat(timespec='microseconds'),
}
except Http404 as e:
response = {'status': 404, 'message': str(e)}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
@csrf_exempt
@redirect_preflight
@require_basic_auth
@require_http_methods(["DELETE"])
def delete_map(request, campaign_id, map_id):
try:
tile_map = get_object_or_404(Map, campaign__campaign_id=campaign_id, map_id=map_id)
tile_map.delete()
response = {
'status': 200,
'message': f'Map deleted (name={tile_map.name}, id={map_id})',
}
except Http404 as e:
response = {'status': 404, 'message': str(e)}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
@redirect_preflight
@require_basic_auth
@require_http_methods(["GET"])
def map_actions(request, campaign_id, map_id):
try:
tile_map = get_object_or_404(Map, campaign__campaign_id=campaign_id, map_id=map_id)
actions = tile_map.action_set.all().filter(created__gt=tile_map.saved)
date = timezone.localtime(
actions.last().created if actions else tile_map.saved).isoformat(timespec='microseconds')
response = {
'status': 200,
'message': f'Actions loaded (len={len(actions)})',
'date': date,
'actions': [json.loads(x.data) for x in actions],
}
except Http404 as e:
response = {'status': 404, 'message': str(e)}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
@csrf_exempt
@redirect_preflight
@require_basic_auth
@require_http_methods(["POST"])
def update_actions(request, campaign_id, datetime_iso: str):
try:
campaign = get_object_or_404(Campaign, campaign_id=campaign_id)
data = request.body.decode('utf-8')
actions_post = json.loads(data)['actions'] if data else []
for action in actions_post:
if action.get('map'):
tile_map = get_object_or_404(Map, campaign=campaign, map_id=action['map'])
Action.objects.create(campaign=campaign, map=tile_map, user=request.user, data=json.dumps(action))
else:
Action.objects.create(campaign=campaign, user=request.user, data=json.dumps(action))
actions = Action.objects.filter(campaign=campaign, created__gt=datetime.fromisoformat(datetime_iso))
date = timezone.localtime(actions.last().created).isoformat(timespec='microseconds') if actions else datetime_iso
actions = list(actions.exclude(user=request.user))
response = {
'status': 200,
'message': f'Actions loaded (len={len(actions_post)}). Actions downloaded (len={len(actions)})',
'date': date,
'actions': [json.loads(x.data) for x in actions],
}
except Http404 as e:
response = {'status': 404, 'message': str(e)}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
@csrf_exempt
@redirect_preflight
@require_basic_auth
@require_http_methods(["DELETE"])
def reset_actions(request, campaign_id):
try:
campaign = get_object_or_404(Campaign, campaign_id=campaign_id)
actions = list(campaign.action_set.all())
for action in actions:
action.delete()
response = {
'status': 200,
'message': f'Actions deleted (len={len(actions)})',
}
except Http404 as e:
response = {'status': 404, 'message': str(e)}
except JSONDecodeError as e:
response = {'status': 400, 'message': "JSONDecodeError: " + str(e)}
except Exception as e:
response = {'status': 500, 'message': get_stacktrace_str(e)}
return JsonResponse(response, safe=False, status=response['status'])
| 41.655052
| 121
| 0.640067
| 2,840
| 23,910
| 5.174296
| 0.048944
| 0.095271
| 0.044913
| 0.069411
| 0.847023
| 0.837428
| 0.806261
| 0.767744
| 0.743178
| 0.726301
| 0
| 0.02146
| 0.230197
| 23,910
| 573
| 122
| 41.727749
| 0.776921
| 0.017231
| 0
| 0.670683
| 0
| 0
| 0.167561
| 0.038101
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040161
| false
| 0
| 0.024096
| 0
| 0.104418
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1ed38d1a331f5d5bc1ed93f54d27d24efb44d805
| 178
|
py
|
Python
|
ermaket/api/generation/templates/__init__.tmpl.py
|
SqrtMinusOne/ERMaket_Experiment
|
c4a7b61651edd15a619d9b690e2aaeaab4de282d
|
[
"Apache-2.0"
] | null | null | null |
ermaket/api/generation/templates/__init__.tmpl.py
|
SqrtMinusOne/ERMaket_Experiment
|
c4a7b61651edd15a619d9b690e2aaeaab4de282d
|
[
"Apache-2.0"
] | null | null | null |
ermaket/api/generation/templates/__init__.tmpl.py
|
SqrtMinusOne/ERMaket_Experiment
|
c4a7b61651edd15a619d9b690e2aaeaab4de282d
|
[
"Apache-2.0"
] | null | null | null |
{% if import_base %}
from .base import *
{% endif %}
{% if import_system %}
from .system_user import *
from .system_user_has_role import *
from .system_role import *
{% endif %}
| 19.777778
| 35
| 0.696629
| 25
| 178
| 4.68
| 0.36
| 0.25641
| 0.239316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168539
| 178
| 8
| 36
| 22.25
| 0.790541
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.75
| null | null | 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
94a1054b9d186cc98f5f64916dff280cbd10810f
| 14,864
|
py
|
Python
|
src/colorviews/views.py
|
ju-sh/colorviews
|
b9757dd3a799d68bd89966852f36f06f21e36072
|
[
"MIT"
] | 5
|
2021-06-10T21:12:16.000Z
|
2022-01-14T05:04:03.000Z
|
src/colorviews/views.py
|
ju-sh/colorviews
|
b9757dd3a799d68bd89966852f36f06f21e36072
|
[
"MIT"
] | null | null | null |
src/colorviews/views.py
|
ju-sh/colorviews
|
b9757dd3a799d68bd89966852f36f06f21e36072
|
[
"MIT"
] | null | null | null |
"""
ColorView classes for Color and AlphaColor objects
For Color:
- ColorViewRGB
- ColorViewHSL
- ColorViewHSV
For AlphaColor:
- ColorViewRGBA
- ColorViewHSLA
- ColorViewHSVA
"""
import colorsys
from typing import cast, List, Sequence, TYPE_CHECKING
import colorviews.utils as utils
if TYPE_CHECKING:
import colorviews.colors as colors
class BaseColorView:
"""
Base class of all ColorView classes.
"""
__slots__ = ["color"]
def __init__(self, color: "colors.BaseColor"):
self.color = color
class BaseAlphaColorView(BaseColorView):
"""
Base class of all ColorView classes with alpha value.
"""
__slots__: List[str] = []
@property
def a(self) -> float:
"""Alpha component as a float value"""
self.color = cast("colors.AlphaColor", self.color)
return self.color._a
@a.setter
def a(self, val: float):
utils.validate(val)
self.color = cast("colors.AlphaColor", self.color)
self.color._a = val
class BaseColorViewRGB(BaseColorView):
"""
Base class of ColorViewRGB and ColorViewRGBA classes.
"""
__slots__: List[str] = []
@property
def r(self) -> float:
"""Red component as a float value"""
return self.color._r
@r.setter
def r(self, val: float):
utils.validate(val)
self.color._r = val
@property
def g(self) -> float:
"""Green component as a float value"""
return self.color._g
@g.setter
def g(self, val: float):
utils.validate(val)
self.color._g = val
@property
def b(self) -> float:
"""Blue component as a float value"""
return self.color._b
@b.setter
def b(self, val: float):
utils.validate(val)
self.color._b = val
class ColorViewRGB(BaseColorViewRGB):
"""
RGB view of Color objects in float values
"""
__slots__: List[str] = []
@property
def vals(self) -> Sequence[float]:
"""
Tuple of RGB values as floats.
"""
return tuple(self)
@vals.setter
def vals(self, values: Sequence[float]):
if len(values) != 3:
raise ValueError("Needs exactly 3 floats")
utils.validate(values[0])
utils.validate(values[1])
utils.validate(values[2])
self.r, self.g, self.b = values
def __iter__(self):
yield from [self.r, self.g, self.b]
def replace(self, r=None, g=None, b=None) -> "colors.Color":
"""
Create a new Color object by replacing the RGB values of the Color
object associated with the colorview.
Arguments:
r: Red component of RGB value as a float in the range [0, 0.1].
g: Green component of RGB value as a float in the range [0, 0.1].
b: Blue component of RGB value as a float in the range [0, 0.1].
Returns:
Color object with modified RGB values.
"""
if r is None:
r = self.r
if g is None:
g = self.g
if b is None:
b = self.b
self.color = cast("colors.Color", self.color)
return self.color.from_rgb(r, g, b)
class ColorViewRGBA(BaseColorViewRGB, BaseAlphaColorView):
"""
RGBA view of Color objects in float values
"""
__slots__: List[str] = []
@property
def vals(self) -> Sequence[float]:
"""
Tuple of RGBA values as floats.
"""
return tuple(self)
@vals.setter
def vals(self, values: Sequence[float]):
if len(values) != 4:
raise ValueError("Needs exactly 4 floats")
utils.validate(values[0])
utils.validate(values[1])
utils.validate(values[2])
utils.validate(values[3])
self.r, self.g, self.b, self.a = values
def __iter__(self):
yield from [self.r, self.g, self.b, self.a]
def replace(self, r=None, g=None, b=None, a=None) -> "colors.AlphaColor":
"""
Create a new Color object by replacing the RGBA values of the Color
object associated with the colorview.
Arguments:
r: Red component of RGBA value as a float in the range [0, 0.1].
g: Green component of RGBA value as a float in the range [0, 0.1].
b: Blue component of RGBA value as a float in the range [0, 0.1].
a: Alpha component of RGBA value as a float in the range [0, 0.1].
Returns:
Color object with modified RGBA values.
"""
if r is None:
r = self.r
if g is None:
g = self.g
if b is None:
b = self.b
if a is None:
a = self.a
self.color = cast("colors.AlphaColor", self.color)
return self.color.from_rgba(r, g, b, a)
class BaseColorViewHSL(BaseColorView):
"""
Base class of ColorViewHSL and ColorViewHSLA classes.
"""
__slots__: List[str] = []
@property
def h(self):
"""Hue component as a float value"""
rgb = (self.color._r, self.color._g, self.color._b)
hls = colorsys.rgb_to_hls(*rgb)
return hls[0]
@h.setter
def h(self, val: float):
utils.validate(val)
rgb = (self.color._r, self.color._g, self.color._b)
hls = colorsys.rgb_to_hls(*rgb)
rgb = colorsys.hls_to_rgb(val, hls[1], hls[2])
self.color._r, self.color._g, self.color._b = rgb
@property
def s(self):
"""Saturation component as a float value"""
rgb = (self.color._r, self.color._g, self.color._b)
hls = colorsys.rgb_to_hls(*rgb)
return hls[2]
@s.setter
def s(self, val: float):
utils.validate(val)
rgb = (self.color._r, self.color._g, self.color._b)
hls = colorsys.rgb_to_hls(*rgb)
rgb = colorsys.hls_to_rgb(hls[0], hls[2], val)
self.color._r, self.color._g, self.color._b = rgb
@property
def l(self):
"""Lightness component as a float value"""
rgb = (self.color._r, self.color._g, self.color._b)
hls = colorsys.rgb_to_hls(*rgb)
return hls[1]
@l.setter
def l(self, val: float):
utils.validate(val)
rgb = (self.color._r, self.color._g, self.color._b)
hls = colorsys.rgb_to_hls(*rgb)
rgb = colorsys.hls_to_rgb(hls[0], val, hls[1])
self.color._r, self.color._g, self.color._b = rgb
class ColorViewHSL(BaseColorViewHSL):
"""
HSL view of Color objects in float values
"""
__slots__: List[str] = []
def __iter__(self):
hls = colorsys.rgb_to_hls(*self.color.rgb.vals)
yield from [hls[0], hls[2], hls[1]]
@property
def vals(self) -> Sequence[float]:
"""
Tuple of HSL values as floats.
"""
return tuple(self)
@vals.setter
def vals(self, values: Sequence[float]):
if len(values) != 3:
raise ValueError("Needs exactly 3 floats")
utils.validate(values[0])
utils.validate(values[1])
utils.validate(values[2])
rgb = colorsys.hls_to_rgb(values[0], values[2], values[1])
self.color = cast("colors.Color", self.color)
self.color.rgb.vals = rgb
def replace(self, h=None, s=None, l=None) -> "colors.Color":
"""
Create a new Color object by replacing the HSL values of the Color
object associated with the colorview.
Arguments:
h: Hue component of HSL value as a float in the range [0, 0.1].
s: Saturation component of HSL value as a float in the
range [0, 0.1].
l: Lightness component of HSL value as a float in the range [0, 0.1].
Returns:
Color object with modified HSL values.
"""
hsl = list(self)
if h is None:
h = hsl[0]
if s is None:
s = hsl[1]
if l is None:
l = hsl[2]
self.color = cast("colors.Color", self.color)
return self.color.from_hsl(h, s, l)
class ColorViewHSLA(BaseColorViewHSL, BaseAlphaColorView):
"""
HSLA view of Color objects in float values
"""
__slots__: List[str] = []
def __iter__(self):
yield from [self.h, self.s, self.l, self.a]
@property
def vals(self) -> Sequence[float]:
"""
Tuple of HSLA values as floats.
"""
return tuple(self)
@vals.setter
def vals(self, values: Sequence[float]):
if len(values) != 4:
raise ValueError("Needs exactly 4 floats")
utils.validate(values[0])
utils.validate(values[1])
utils.validate(values[2])
utils.validate(values[3])
rgb = colorsys.hls_to_rgb(values[0], values[2], values[1])
self.color = cast("colors.AlphaColor", self.color)
self.color.rgba.vals = rgb + (values[3], )
def replace(self, h=None, s=None, l=None, a=None) -> "colors.AlphaColor":
"""
Create a new AlphaColor object by replacing the HSLA values of the
AlphaColor object associated with the colorview.
Arguments:
h: Hue component of HSLA value as a float in the range [0, 0.1].
s: Saturation component of HSLA value as a float in the
range [0, 0.1].
l: Lightness component of HSLA value as a float in the
range [0, 0.1].
a: Alpha component of HSLA value as a float in the range [0, 0.1].
Returns:
AlphaColor object with modified HSLA values.
"""
hsla = list(self)
if h is None:
h = hsla[0]
if s is None:
s = hsla[1]
if l is None:
l = hsla[2]
if a is None:
a = hsla[3]
self.color = cast("colors.AlphaColor", self.color)
return self.color.from_hsla(h, s, l, a)
class BaseColorViewHSV(BaseColorView):
"""
Base class of ColorViewHSV and ColorViewHSVA classes.
"""
__slots__: List[str] = []
@property
def h(self):
"""Hue component as a float value"""
rgb = (self.color._r, self.color._g, self.color._b)
hsv = colorsys.rgb_to_hsv(*rgb)
return hsv[0]
@h.setter
def h(self, val: float):
utils.validate(val)
rgb = (self.color._r, self.color._g, self.color._b)
hsv = colorsys.rgb_to_hsv(*rgb)
rgb = colorsys.hsv_to_rgb(val, hsv[1], hsv[2])
self.color._r, self.color._g, self.color._b = rgb
@property
def s(self):
"""Saturation component as a float value"""
rgb = (self.color._r, self.color._g, self.color._b)
hsv = colorsys.rgb_to_hsv(*rgb)
return hsv[1]
@s.setter
def s(self, val: float):
utils.validate(val)
rgb = (self.color._r, self.color._g, self.color._b)
hsv = colorsys.rgb_to_hsv(*rgb)
rgb = colorsys.hsv_to_rgb(hsv[0], val, hsv[2])
self.color._r, self.color._g, self.color._b = rgb
@property
def v(self):
"""Value component as a float value"""
rgb = (self.color._r, self.color._g, self.color._b)
hsv = colorsys.rgb_to_hsv(*rgb)
return hsv[2]
@v.setter
def v(self, val: float):
utils.validate(val)
rgb = (self.color._r, self.color._g, self.color._b)
hsv = colorsys.rgb_to_hsv(*rgb)
rgb = colorsys.hsv_to_rgb(hsv[0], hsv[1], val)
self.color._r, self.color._g, self.color._b = rgb
class ColorViewHSV(BaseColorViewHSV):
"""
HSV view of Color objects in float values
"""
__slots__: List[str] = []
def __iter__(self):
hsv = colorsys.rgb_to_hsv(*self.color.rgb.vals)
yield from hsv
@property
def vals(self) -> Sequence[float]:
"""
Tuple of HSV values as floats.
"""
return tuple(self)
@vals.setter
def vals(self, values: Sequence[float]):
if len(values) != 3:
raise ValueError("Needs exactly 3 floats")
utils.validate(values[0])
utils.validate(values[1])
utils.validate(values[2])
rgb = colorsys.hsv_to_rgb(values[0], values[1], values[2])
self.color = cast("colors.Color", self.color)
self.color.rgb.vals = rgb
def replace(self, h=None, s=None, v=None) -> "colors.Color":
"""
Create a new Color object by replacing the HSV values of the Color
object associated with the colorview.
Arguments:
h: Hue component of HSV value as a float in the range [0, 0.1].
s: Saturation component of HSV value as a float in the
range [0, 0.1].
v: Value component of HSV value as a float in the range [0, 0.1].
Returns:
Color object with modified HSV values.
"""
hsv = list(self)
if h is None:
h = hsv[0]
if s is None:
s = hsv[1]
if v is None:
v = hsv[2]
self.color = cast("colors.Color", self.color)
return self.color.from_hsv(h, s, v)
class ColorViewHSVA(BaseColorViewHSV, BaseAlphaColorView):
"""
HSVA view of Color objects in float values
"""
__slots__: List[str] = []
def __iter__(self):
yield from [self.h, self.s, self.v, self.a]
@property
def vals(self) -> Sequence[float]:
"""
Tuple of HSVA values as floats.
"""
return tuple(self)
@vals.setter
def vals(self, values: Sequence[float]):
if len(values) != 4:
raise ValueError("Needs exactly 4 floats")
utils.validate(values[0])
utils.validate(values[1])
utils.validate(values[2])
utils.validate(values[3])
rgb = colorsys.hsv_to_rgb(values[0], values[1], values[2])
self.color = cast("colors.AlphaColor", self.color)
self.color.rgba.vals = rgb + (values[3], )
def replace(self, h=None, s=None, v=None, a=None) -> "colors.AlphaColor":
"""
Create a new AlphaColor object by replacing the HSVA values of the
AlphaColor object associated with the colorview.
Arguments:
h: Hue component of HSVA value as a float in the range [0, 0.1].
s: Saturation component of HSVA value as a float in the
range [0, 0.1].
v: Value component of HSVA value as a float in the
range [0, 0.1].
a: Alpha component of HSVA value as a float in the range [0, 0.1].
Returns:
AlphaColor object with modified HSVA values.
"""
hsva = list(self)
if h is None:
h = hsva[0]
if s is None:
s = hsva[1]
if v is None:
v = hsva[2]
if a is None:
a = hsva[3]
self.color = cast("colors.AlphaColor", self.color)
return self.color.from_hsva(h, s, v, a)
| 28.918288
| 79
| 0.572793
| 2,054
| 14,864
| 4.05258
| 0.051607
| 0.108121
| 0.029793
| 0.032797
| 0.844426
| 0.833253
| 0.808626
| 0.789644
| 0.740509
| 0.727895
| 0
| 0.014139
| 0.310078
| 14,864
| 513
| 80
| 28.974659
| 0.797562
| 0.251749
| 0
| 0.657343
| 0
| 0
| 0.041018
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.157343
| false
| 0
| 0.013986
| 0
| 0.325175
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a20e0fee2f6a078183368ce88da7a4a167353b7a
| 358
|
py
|
Python
|
funboost/concurrent_pool/single_thread_executor.py
|
DJMIN/funboost
|
7570ca2909bb0b44a1080f5f98aa96c86d3da9d4
|
[
"Apache-2.0"
] | 333
|
2019-08-08T10:25:27.000Z
|
2022-03-30T07:32:04.000Z
|
funboost/concurrent_pool/single_thread_executor.py
|
mooti-barry/funboost
|
2cd9530e2c4e5a52fc921070d243d402adbc3a0e
|
[
"Apache-2.0"
] | 38
|
2020-04-24T01:47:51.000Z
|
2021-12-20T07:22:15.000Z
|
funboost/concurrent_pool/single_thread_executor.py
|
mooti-barry/funboost
|
2cd9530e2c4e5a52fc921070d243d402adbc3a0e
|
[
"Apache-2.0"
] | 84
|
2019-08-09T11:51:14.000Z
|
2022-03-02T06:29:09.000Z
|
from typing import Callable
class SoloExecutor:
# noinspection PyUnusedLocal
def __init__(self, max_workers: int = 1):
pass
# noinspection PyMethodMayBeStatic
def submit(self, fn: Callable, *args, **kwargs):
return fn(*args, **kwargs)
# noinspection PyMethodMayBeStatic
def shutdown(self, wait=True):
pass
| 22.375
| 52
| 0.670391
| 37
| 358
| 6.351351
| 0.675676
| 0.26383
| 0.289362
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00369
| 0.243017
| 358
| 15
| 53
| 23.866667
| 0.863469
| 0.256983
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0.25
| 0.125
| 0.125
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
bf663fff316b9b193b327a4c7a05f1f3511336d7
| 2,170
|
py
|
Python
|
core/test_middleware.py
|
uktrade/pir-api
|
79747ceab042c42c287e2b7471f6dade70f68693
|
[
"MIT"
] | 1
|
2021-02-02T19:08:55.000Z
|
2021-02-02T19:08:55.000Z
|
core/test_middleware.py
|
uktrade/invest-pir-api
|
be56efddf9dfdf81c8557441a9a54d9a4dd4bab1
|
[
"MIT"
] | 21
|
2018-07-10T10:20:47.000Z
|
2022-03-24T09:36:29.000Z
|
core/test_middleware.py
|
uktrade/pir-api
|
79747ceab042c42c287e2b7471f6dade70f68693
|
[
"MIT"
] | 1
|
2021-02-04T11:28:37.000Z
|
2021-02-04T11:28:37.000Z
|
from django.http import HttpResponse
from .middleware import AdminIpRestrictionMiddleware
def test_ip_restriction_middleware_is_enabled(client, settings):
settings.RESTRICT_ADMIN = True
assert client.get('/admin/').status_code == 401
def test_ip_restriction_applies_to_admin_only(rf, settings):
settings.RESTRICT_ADMIN = True
request = rf.get('/api/pir/')
assert AdminIpRestrictionMiddleware(lambda _: HttpResponse(status=200))(request).status_code == 200 # noqa
def test_ip_restriction_enabled_false(rf, settings):
settings.RESTRICT_ADMIN = False
request = rf.get('/admin/', HTTP_X_FORWARDED_FOR='')
assert AdminIpRestrictionMiddleware(lambda _: HttpResponse(status=200))(request).status_code == 200 # noqa
def test_ip_restriction_missing_x_forwarded_header(rf, settings):
settings.RESTRICT_ADMIN = True
request = rf.get('/admin/', HTTP_X_FORWARDED_FOR='1.1.1.1')
assert AdminIpRestrictionMiddleware(lambda _: HttpResponse(status=200))(request).status_code == 401 # noqa
def test_ip_restriction_invalid_x_forwarded_header(rf, settings):
settings.RESTRICT_ADMIN = True
request = rf.get('/admin/', HTTP_X_FORWARDED_FOR='1.1.1.1')
assert AdminIpRestrictionMiddleware(lambda _: HttpResponse(status=200))(request).status_code == 401 # noqa
def test_ip_restriction_valid_ip(rf, settings):
settings.RESTRICT_ADMIN = True
settings.ALLOWED_ADMIN_IPS = ['2.2.2.2']
request = rf.get('/admin/',
HTTP_X_FORWARDED_FOR='1.1.1.1, 2.2.2.2, 3.3.3.3')
assert AdminIpRestrictionMiddleware(lambda _: HttpResponse(status=200))(request).status_code == 200 # noqa
def test_ip_restriction_invalid_ip(rf, settings):
settings.RESTRICT_ADMIN = True
settings.ALLOWED_ADMIN_IPS = ['1.1.1.1']
request = rf.get('/admin/',
HTTP_X_FORWARDED_FOR='1.1.1.1, 2.2.2.2, 3.3.3.3')
assert AdminIpRestrictionMiddleware(lambda _: HttpResponse(status=200))(request).status_code == 401 # noqa
settings.ALLOWED_ADMIN_IPS = ['3.3.3.3']
assert AdminIpRestrictionMiddleware(lambda _: HttpResponse(status=200))(request).status_code == 401 # noqa
| 33.384615
| 111
| 0.731797
| 285
| 2,170
| 5.298246
| 0.161404
| 0.019868
| 0.019868
| 0.092715
| 0.82649
| 0.784106
| 0.774834
| 0.774834
| 0.752318
| 0.721192
| 0
| 0.048108
| 0.147465
| 2,170
| 64
| 112
| 33.90625
| 0.768108
| 0.015668
| 0
| 0.542857
| 0
| 0.057143
| 0.06391
| 0
| 0
| 0
| 0
| 0
| 0.228571
| 1
| 0.2
| false
| 0
| 0.057143
| 0
| 0.257143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bf8d6e0498d5afe67689cd33ce48ec68fb6d801b
| 153
|
py
|
Python
|
client/starwhale/cluster/__init__.py
|
goldenxinxing/starwhale
|
2fbb72b0ce5e135b432120c779440e53942be3b5
|
[
"Apache-2.0"
] | 1
|
2022-03-24T02:03:48.000Z
|
2022-03-24T02:03:48.000Z
|
client/starwhale/cluster/__init__.py
|
goldenxinxing/starwhale
|
2fbb72b0ce5e135b432120c779440e53942be3b5
|
[
"Apache-2.0"
] | null | null | null |
client/starwhale/cluster/__init__.py
|
goldenxinxing/starwhale
|
2fbb72b0ce5e135b432120c779440e53942be3b5
|
[
"Apache-2.0"
] | null | null | null |
from .view import ClusterView
from .model import DEFAULT_PAGE_NUM, DEFAULT_PAGE_SIZE
__all__ = ["ClusterView", "DEFAULT_PAGE_NUM", "DEFAULT_PAGE_SIZE"]
| 30.6
| 66
| 0.810458
| 21
| 153
| 5.333333
| 0.47619
| 0.392857
| 0.25
| 0.375
| 0.517857
| 0.517857
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098039
| 153
| 4
| 67
| 38.25
| 0.811594
| 0
| 0
| 0
| 0
| 0
| 0.287582
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
44aaddfba5d3ff7d2ca00be36fd5c24300f7e10e
| 8,193
|
py
|
Python
|
categories/acid3/handlers.py
|
image72/browserscope
|
44a63558ee376704d996851099bc7703128201cc
|
[
"Apache-2.0"
] | 22
|
2015-10-26T15:20:37.000Z
|
2022-03-11T06:38:17.000Z
|
categories/acid3/handlers.py
|
image72/browserscope
|
44a63558ee376704d996851099bc7703128201cc
|
[
"Apache-2.0"
] | 10
|
2016-01-22T18:46:19.000Z
|
2019-07-19T12:49:51.000Z
|
categories/acid3/handlers.py
|
mcauer/browserscope
|
a9c0e1a250774f14689e06f93ad274d0b9d725e4
|
[
"Apache-2.0"
] | 12
|
2015-10-17T09:40:44.000Z
|
2019-06-08T19:54:36.000Z
|
#!/usr/bin/python2.5
#
# Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the 'License')
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers for Acid3 Tests.
Example beacon request:
http://localhost:8080/beacon?category=acid3&results=score=99
"""
__author__ = 'jacobm@google.com (Jacob Moon)'
import time
from categories import all_test_sets
from base import decorators
from base import util
from django import http
from django.template import Context, loader
CATEGORY = 'acid3'
def About(request):
"""About page."""
params = {
'page_title': 'What is the Acid3 Test?',
'tests': all_test_sets.GetTestSet(CATEGORY).tests,
}
return util.Render(request, 'templates/about.html', params, CATEGORY)
def SupportAPng(request):
# AppEngine does not allow disk operations, so the content of support-a.png file is written here.
content = '\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x96\x00\x00\x00\x96\x08\x02\x00\x00\x00\xb3c\xe6\xb5\x00\x00\x00\x01sRGB\x00\xae\xce\x1c\xe9\x00\x00\x00\x04gAMA\x00\x00\xb1\x8f\x0b\xfca\x05\x00\x00\x00 cHRM\x00\x00z&\x00\x00\x80\x84\x00\x00\xfa\x00\x00\x00\x80\xe8\x00\x00u0\x00\x00\xea`\x00\x00:\x98\x00\x00\x17p\x9c\xbaQ<\x00\x00\x08\x86IDATx^\xed\x9a\xe1\x8e%+\x08\x84\xf7\xbe\xf9\xbe\xf9\xde\x93\x9c\xa4cZ\x85*@\x85\x19\'\xf3\x13\x11\xea\x03\xa4\'\xf3\xdf\xbf?\xf7\xa7\xb8\x02\x1f\x84\xf7\xb7\xb4\x02\x97_y\x05\xca\'P\xba\x81B\x82\xbf\x08\xcb+P>\x81\x90B.\xed\xe4",\xaf@\xf9\x04J7PH\xf0\x17ay\x05\xca\'\x10R\xc8\xa5\x9d\\\x84\xe5\x15(\x9f@\xe9\x06\n\t\xfe",\xaf@\xf9\x04B\n\xb9\xb4\x93\x8b\xb0\xbc\x02\xe5\x13(\xdd@!\xc1_\x84\xe5\x15(\x9f@H!\x97vr\x11\x96W\xa0|\x02\xa5\x1b($\xf8\x8b\xb0\xbc\x02\xe5\x13\x08)d\xd0\xc9\xdf\x94\xffgt\x11\xa2\n|\xf8]\x84\xa8X\xb3\x9e8(\xe2\xf7\xea\x83\x01\x08s\xc2++8\x82\xfcf\x07\x15l\xf9%\xa4X\x03\xe1KD\x7fA\xe0\x1ez~\xd9\xc6i\x01\x84\x07\x9b`\xc8\xef"\xe4\x8a\xe6\xa0\x88\xb3\xab\xb3\xcdRNP|\xfe\x84X\x1e\x14Q\xbe:\x15\xc5\xbc\x08U\x11\xc1*1\xcc=\xf5\xea\x8bP\xaf\x9b\x10\x11{\xa1\x11\xe9\xc1\xab\r\x95\x01\xd6\x1ck\xa6\xab\xc9z\xf4\xdb\xe3"\xcet\x1c~\x81\xbc\xf8\t6x\x00\xfed\xfd\x1e\xd2!\xc4\xe5\x1b\xb6T{\xbcU\x07\xe1\xf7\xb1w\xde\xee\xe7a\xf0\x90\x0ba\xdb\x19\xb8\x9aO\xda\x81\xfc@\x9c\x06\xc5\xc3\x8f$B\xf8\x9al,B\x0f\xbf\xfe.\x10a\x86\x171\x0b\xc2!\x00\x9cb8\xbfo\xaf\x80\x01\x847\x16\xe50\x05\xc2\x19\x00\\\xc4\xd8\xfd\xa5\x7fDe\x96\x94\xe2\xe1\xc6?\n!\xbb\xbf<;\x8e\xbf\x86\xc2\xc1\xe0\x0e\xcf#\x14\xe4\x8b\x9afr\x8f\xca\x01\x80\xb3\xf4\xe0\xa3x\x18\xe1K\xa0Y\xe9\xa9:\n5+|{\xf4%2\xf4\xa3\xde>\xbc\x02o#\xa7\xe5O@\xc8J\xd0*\xae\xce\x00\xf0=fc\x08\xb4?\x89\x10lAy\x9c\xca\x12\xf7J\x19ZJ=\x12\xc8\xc3\xe0\xea\x18\xc2\xe1\xa7\x98:\x0fU5\x87\x06\x8f[\xc3q\xf5=\x16v(\x03\x0f\xc3\x91,\x08\x91\xd0\r\x00\xdaE\xc6|\\\xa0x\x9c\xdf\'\x803\x08\xa9\x11\xca~\xa5\x99Q\t\x07\x87\xe3:\x03\xbf\x14\x08\x91\xfe\xf3L\xc2\x10\xa2\xf2\xee\xfa\xeb6Rd\t\xf4,\xf7!\xccfOuO\xeb,\xbf3]8\xfc\xd0V{q\x11\x18\xdc\xadZUj\n\x8b\x0cv\xbf\x85E\xf9\t\x7f\x1f\xf8u]hHX\xdd}\xf0N\xfaXz\x96\xa3\xd9\xd9W{\x19r\xf44\xe8\xee.dc\xc5?\x1f\x11\x90\xb3\xdb\x91\xb3-~\xe19\xdf\xcc\xef\xcc[\x88S\x1c*k\xfe\xfc\x97\xefE(\xca{\xe9\xf3\xe1\x81\'\x18b\x99\xb7\x0bg\x9a.B8\xfc\xf2\xebgx\xaa\xfe\xfbJq\x00\xe1\xebAb\x87\x9b\xb9\x99\xd4\x92\x97\x1b\xf1\xc5x\xf8\xfe\x81\xa9\xa9\x91P\x06\xbb\x11\xe2I\xb2]\xf8\xf5,`\x98}\x15\x80\x0bN\xeb<\x0f\xbf\xad]H\xbd\xf3jC\xccD\xa4\x10\xf6!Q\xc7\xd5\xa7\x91j&\xb3\xf1\xa6.\xa4\xf8\xa9\xcf\x92\x81_\xdf\xfd\xb3]\x97\xda\xa1\x84\xa7\xd1\x8c\x84=\xb8\x03!\xcbOE8\xdb\xef\xf1\xde\x15\xa64\x8e0\x03\xbf\x1d\x83\xd4\xc0\x0fA\xa8\xbe|\xb3&\x03\x9d#x\x1e\x1ba#c[\xca`\xbf\xb6\x0bm\xfc\x0c*\xcb\xfd\xd7j\xadv\xea\xcb@]\x98e\x03\x03\x12\xf6\xc8B\x84f~\xeb\x10\xaa\xfc\xe4/\x87G\\\xa4\xff\xfa\xd7\x97e\x03\xda\xafB8\xe4\xa7f\xdek\x04\xb6\x97\xdf\x8cZ/\xe5\xea\xf4\xd4.\x88\xad5[\x82\xd0\xc9o\x7f\x17\xe2\xed\xf5\xc4v|~>\x01\xc4#\xf4\xf3\xeb\x1b\x02l\xb2(\xf6\x86Vx\xc5l\xf6`8\x18\x8c0\x8a\x9f\r\x86\xed\x14\xb8\xbf\xa8\xe2\xe2\xcfD\xec\x06\x1b\x890\x90\x9f\xb9\x11\xc1~\x15\xbe\x0bUTC\x03\'?\xcf\xf3\x19\x89\xb0\xcf\xcd\x99XHW\xe1Pm\xf0\xda _\x1e\xd4\xa5\x14\xf9\xfaT\xa3Z\x88\xd0\xcf\xaf\x8d\x1e\'a\xb3T\x95\x92\xf7\x97\x9e\x96\xdaX!\xfc>Q\xadBx\xf9\t-\x18\xf5\xfa~\xabj\tB\xb5\x00\xa9\x92\xb7u\x15~\x8a\nf8\x18\xfa\xf9\xf9\xe2\'\x04c\xbe}\xf7G\x853P\x9c\x07e\x19\x12\xd5\x90_\x8bp)?o\x17R\x0f\x80\xb95)*\xa0\xb1\x13\x9e\xfa.>\xca\xac\xe6\xe7BH!\xa1\x8c\x9d[\xccs|\x83|\xe6P\x03k\xc8\xf8\x16R\xdb\x8a\x99\x9f\xfaQ!\x0b1C\x18(\x1fR.\xb1\xcbK\x1f\xbc\x05!\x15\x93\x87\xdf\n\x84\xa7\xf8\xad\xb8\xd7\xb8\x91R\xa5\xed\xe4\x97\r\xe1\xec;A}\x80\xd7\xf1\xa3\xdfB*V?\xbfp\x84\x1e)\x87o\x07%\x88\xe7v\xe1,1H\xd5p{\x03\x7f\xd0\xea\xa5\xc2\x15\xd4\xc0\xc7\x9fU\xf9\xfd\xf3\xa7\xccz@\x11\xaaR\x0e\r\xd8hz{\xf5^\x10\xa1\'\x92a)\xa8\xf9\xce\xa6\xae\'\x92\xe1Y\x08\xa1\xaa\xe3\xba\xf5]\xbd\x1aA\xe8QM\r@\x18\xb0\x9e{\xf1\xb3\xcb\x11:\xa7\x19\xa2\xe00[\xea\xb3g\xa6\x17r{\xdfm!K@^\x84\x86\xf4@\x1d\xd5\xc9\x86\x8b\xf2\xb5D\xee\x9dM~\xf6.\x8f=\xd4\x85`>H\xce\x86\xa64\xb85\x14\xcaKD\xf9R\xb9k=<\x0cgQ\x84xa\x9a\x15\x9f\xe9nvh\x90\x03I3\x15\xbfO0\x1cB$C\xb3\xe2\xfdA\xe7u\xdf\xe1a\x00ih\xc1\xfe\xe9\xb5]m\x88\xd6\x92\xe1sM \xadu\xae(Q\x900f\xef_\xcb\xcc\xf0^Pq\xb6\xc6.\x84\xce.A\xf4\xf2\xdb\x80\xd2\xf8/\xf2{\x00C}\x99\x05 \\\xb7\xec\xf8EA\xa6Y\xc8-\x8b\x9c Pc\x10f\x1e\xad\xb2\n\x8b\xa4\x0fw+d\x11\x89\xf0\xb5R\x86\xa7\xd1?0\xe0\x15\xf2\x0e\t:9h&Wa$B\xea\xd3\xca\xa9\x08\xf5\x0c\x0f%p\x06\xb0\xed\xb8:K9\x84\xdb\xe2\xc6\xd7z$$a\x87D\x8e\x1f\xb4Q\xf9q\xdf\x85\x073i\xaf6\xf4\xbap\x84\xea\xe6\xcd\n \xfc\x08\x84\x9b\xa3\x9f]g\xeb\xa7\xf6\xd4\xe39\xf3\n\x86,\xd2O\xfc\xd0 M\xcb\x0f\xef\xa1\xd6rH\x14\x1f\xddC\xf6`\xc7\x80\x1f`\xb87\xa8\x0b\x93\xf3\x03E\x19\xd65\x9b\xda\x8c=\xa5\xb8:HXoJ\x17\xb2IR\xf6\xa0\xfajJ\xf8\xa5/\x06x\x13\x0b\xdf3jx\xab\r$\x84\xb84\x94%;\xc7\x10\t\xf0\x00\x0c\xdb\x90\xe0\xfc5T\x91P\xc3m\xa6\x08qQ\x90\xd5\xa3_"\xa8\x0eP\xd3\xf6G\xeb\xf7\xa0\x06\xb9\xc8\x00Zg\x9cw\xcf\xf8\x05R\xf4\x03\x88\xf2\xe0\xd4\xcap|9\xc2\x10~\xb2\x13\xf0M\x8d\x82\x84\xf81\x900\x1fY\x8b\xf0\x9b\xed08D\x88\x9facf\x03\x1e\\\x88p\xc6/\'\x98=\xad\x0cR\xa1\xcc\x16",\xd4|\xec\x92\x1cR\x85\x14\'\xc1x\x1f\xc2a\xda\xd4F\xa3\ng\xf36SG\xbd.\xdc\xc0\x06u\x07B\xea\xab\xc3\xac\x0b\xdeI\xacR\xe6\x90\xa8\x83lT\x8f\xfd*\x84j\xf4\xb8\xe2\xaa\xab\xd9\xc6d\x16\xa5?\x88\xc4\xe0\xb1\xf1\x84\x1a\x8c\x10Lc\xc8\xcf\xbcPx\xf2G\xce\xb6I\xd9f\xb5,\x0b\x12\xc3\xf2\xb7\x10$\xd7\xef\xa8\xc3\x0f>\xc4\x9b3m\xfcx\xcf\xef9\x8b\xc4\xa9\xda\xe0\x91\xcc,\x03\xbaP\x8dR\xfd\xbaP\x17\n\x7f\x9e6\x0f\x02\xbf\x97CP\x84\x97\x99-\xaa\xd7)/BdD\x0c\xdf\xaaa\xff\xcd\x06\xac\xc0\x98RaVLC\'j\x84j\xe5!\xe2P\xf1\x0f\x8d]\x08\xcd!\xaa\xeax\x0cf\xdb\x8d\xca\xaf\xbdT\r\x00\x94~&\x11x\x1c1\xb3#\x14\xf8\xc9\x17SR\xb2\xfd\x91\x8a\x1f\x02\xc0ocD\xe8)\xae\x99\xca\xafeO\x1dS\xbd\xc1/\xe4\xf7\x11\xc1\x82p\xc5\x9b\x1c\xc2Oxt\x85b_1?\xfd\xbd\x85{\xa0\x11F=\x12\xc3\x1e\x92g\xacp\xb5\xbc\xf4\x82\xfc>f\xea\x90\xc7\x95\xddf\xc9!\\\x97\xa1\xea\xf9\xf2\x9b\xd5\x04\x87pQee\xe0\xb7(\xb5\rnS D6X\xea\x9d\xf3\xd4\xc4\x06\xd1c\xaf\xc8\x8e\xd06?\x91\x9a\x88\xd5\xf1\xa0\xb7\xd4\x08W\xf0\xfb\xee,\x07\x15\x0f\xbf:o2\x8b\xf8\x85+x\xdcav\x84\xb3o\x8f\xe3\xc2\xe5\t /\xc2<\x1a%\x8f\xe4",\xaf@\xf9\x04\x92\xb7\xc8\x86\xf0.\xc2\xf2\n\x94O`C\x99\'\xbf\xe2",\xaf@\xf9\x04\x92\xb7\xc8\x86\xf0.\xc2\xf2\n\x94O`C\x99\'\xbf\xe2",\xaf@\xf9\x04\x92\xb7\xc8\x86\xf0.\xc2\xf2\n\x94O`C\x99\'\xbf\xe2",\xaf@\xf9\x04\x92\xb7\xc8\x86\xf0.\xc2\xf2\n\x94O`C\x99\'\xbf\xe2",\xaf@\xf9\x04\x92\xb7\xc8\x86\xf0.\xc2\xf2\n\x94O`C\x99\'\xbf\xe2",\xaf@\xf9\x04\x92\xb7\xc8\x86\xf0.\xc2\xf2\n\x94O`C\x99\'\xbf\xe2",\xaf@\xf9\x04\x92\xb7\xc8\x86\xf0\xfe\x07\xae,i\x82\xf4Is\xb4\x00\x00\x00\x00IEND\xaeB`\x82'
status = 404
content_type = 'image/png'
return http.HttpResponse(
content=content, status=status, content_type=content_type)
| 151.722222
| 6,739
| 0.730135
| 1,764
| 8,193
| 3.379819
| 0.344104
| 0.025159
| 0.013586
| 0.014089
| 0.055351
| 0.055351
| 0.049312
| 0.049312
| 0.049312
| 0.049312
| 0
| 0.212236
| 0.030392
| 8,193
| 53
| 6,740
| 154.584906
| 0.538268
| 0.096302
| 0
| 0
| 0
| 0.65
| 0.601464
| 0.583842
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.3
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
44e4c385640a4090e5625d2d2b3dad2c17ee9a95
| 28
|
py
|
Python
|
hello_world.py
|
Ram431/profiles-rest-api1
|
ea3bf503378184e42ead99ef56de3ad47b2994be
|
[
"MIT"
] | null | null | null |
hello_world.py
|
Ram431/profiles-rest-api1
|
ea3bf503378184e42ead99ef56de3ad47b2994be
|
[
"MIT"
] | null | null | null |
hello_world.py
|
Ram431/profiles-rest-api1
|
ea3bf503378184e42ead99ef56de3ad47b2994be
|
[
"MIT"
] | null | null | null |
print ("Welcome to Python")
| 14
| 27
| 0.714286
| 4
| 28
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.607143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
780238180aa229c6126919ed1ed5cedca0ec3757
| 170
|
py
|
Python
|
apps/mpm/views/__init__.py
|
gustavofoa/pympm
|
a7d2d3a396e7ebf77ec6f213ee04093e790bf901
|
[
"Apache-2.0"
] | 2
|
2017-04-26T10:21:57.000Z
|
2021-04-02T08:36:24.000Z
|
apps/mpm/views/__init__.py
|
gustavofoa/pympm
|
a7d2d3a396e7ebf77ec6f213ee04093e790bf901
|
[
"Apache-2.0"
] | null | null | null |
apps/mpm/views/__init__.py
|
gustavofoa/pympm
|
a7d2d3a396e7ebf77ec6f213ee04093e790bf901
|
[
"Apache-2.0"
] | null | null | null |
from .view_starratings import starratings_ajax
from .view_datas import datas
from .view_search import search
from .views import *
from .view_errors import page_not_found
| 28.333333
| 46
| 0.847059
| 26
| 170
| 5.269231
| 0.461538
| 0.233577
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 170
| 5
| 47
| 34
| 0.913333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
78596fc2fa65f198665f1bae6bc7427fbb67c1b4
| 34
|
py
|
Python
|
src/pyclassifier/__init__.py
|
rahmansharifi/pydictionaryclassifier
|
ab5471b61f1502c55d09d0caf6d1ee3a7fd273b0
|
[
"MIT"
] | null | null | null |
src/pyclassifier/__init__.py
|
rahmansharifi/pydictionaryclassifier
|
ab5471b61f1502c55d09d0caf6d1ee3a7fd273b0
|
[
"MIT"
] | null | null | null |
src/pyclassifier/__init__.py
|
rahmansharifi/pydictionaryclassifier
|
ab5471b61f1502c55d09d0caf6d1ee3a7fd273b0
|
[
"MIT"
] | null | null | null |
from .classifier import dictionary
| 34
| 34
| 0.882353
| 4
| 34
| 7.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 34
| 1
| 34
| 34
| 0.967742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
785e6f6cb1b274a7b7ea23b3a3d281c7b33042e8
| 2,820
|
py
|
Python
|
Q2/q2.py
|
Xascoria/AdventOfCode2021
|
869d6f292f6444fe6bb26bf37049a0949bf49019
|
[
"MIT"
] | null | null | null |
Q2/q2.py
|
Xascoria/AdventOfCode2021
|
869d6f292f6444fe6bb26bf37049a0949bf49019
|
[
"MIT"
] | null | null | null |
Q2/q2.py
|
Xascoria/AdventOfCode2021
|
869d6f292f6444fe6bb26bf37049a0949bf49019
|
[
"MIT"
] | null | null | null |
f = open("Q2/inputs.txt","r")
z = f.readlines()
c = d = 0
for i in z:
#print(i)
a,b = i.split()
if a == "forward":
c += int(b)
elif a == "down":
d += int(b)
else:
d -= int(b)
print(c*d)
c = d = 0
aim = 0
for i in z:
#print(i)
a,b = i.split()
if a == "forward":
c += int(b)
d += aim*int(b)
elif a == "down":
#d += int(b)
aim += int(b)
else:
#d -= int(b)
aim -= int(b)
print(c*d)
submarine = [
" |_ ",
" _____|~ |____ ",
" ( -- ~~~~--_ ",
" ~~~~~~~~~~~~~~~~~~~'`"
]
f = open("Q2/inputs2.txt","r")
commands = f.readlines()
submarine = [
" _| ",
" ____| ~|_____ ",
" _------ -- )",
"/'------------------- ",
]
ocean = [[*"~"*50]for _ in range(10)]
current_cord = [0,0]
for i in range(len(submarine)):
for j in range(len(submarine[i])):
ocean[i][j] = submarine[i][j]
for i in ocean:
print("".join(i))
ocean = [[*"~"*50]for _ in range(10)]
for cmd in commands:
print(cmd)
action, num = cmd.split()
if action == "forward":
current_cord[1] += int(num)
elif action == "up":
current_cord[0] += int(num)
else:
current_cord[0] -= int(num)
for i in range(len(submarine)):
for j in range(len(submarine[i])):
ocean[i+current_cord[0]][j+current_cord[1]] = submarine[i][j]
for i in ocean:
print("".join(i))
ocean = [[*"~"*50]for _ in range(10)]
f = open("Q2/inputs.txt","r")
commands = f.readlines()
a = [((j[0][0]=="f")*int(j[1]), (j[0][0]!="f")*([-1,1][j[0][0]=="d"]*int(j[1]))) for i in commands if (j:=i.split())]
print(__import__("math").prod([sum(i) for i in zip(*a)]))
a = [((i.split()[0][0]=="f")*int(i.split()[1]),(i.split()[0][0]!="f")*([-1,1][i.split()[0][0]=="d"]*int(i.split()[1])))for i in open("inputs.txt").readlines()]
print(__import__("math").prod([sum(i)for i in zip(*[((i.split()[0][0]=="f")*int(i.split()[1]),(i.split()[0][0]!="f")*([-1,1][i.split()[0][0]=="d"]*int(i.split()[1])))for i in open("inputs.txt").readlines()])]))
# submarine = [
# " _| ",
# " ____| ~|_____ ",
# " _------ -- )",
# "/'------------------- ",]
# ocean = [[*"~"*50]for _ in range(10)]
# for cmd in commands:
# print(cmd)
# action, num = cmd.split()
# if action == "forward":
# current_cord[1] += int(num)
# elif action == "up":
# current_cord[0] += int(num)
# else:
# current_cord[0] -= int(num)
# for i in range(len(submarine)):
# for j in range(len(submarine[i])):
# ocean[i+current_cord[0]][j+current_cord[1]] = submarine[i][j]
# for i in ocean:
# print("".join(i))
# ocean = [[*"~"*50]for _ in range(10)]
| 27.378641
| 210
| 0.454255
| 409
| 2,820
| 2.992665
| 0.114914
| 0.042484
| 0.063725
| 0.093137
| 0.914216
| 0.873366
| 0.819444
| 0.819444
| 0.755719
| 0.755719
| 0
| 0.033495
| 0.269504
| 2,820
| 103
| 211
| 27.378641
| 0.56068
| 0.238652
| 0
| 0.507463
| 0
| 0
| 0.140367
| 0.019783
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.029851
| 0
| 0.029851
| 0.104478
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
78c2c84531d337622b13586bcac5b465874151fd
| 1,902
|
py
|
Python
|
tests/pymcell4/3000_intermembrane_rxns/geometry.py
|
mcellteam/mcell-tests
|
34d2d967b75d56edbae999bf0090641850f4f4fe
|
[
"MIT"
] | 1
|
2021-08-13T20:40:54.000Z
|
2021-08-13T20:40:54.000Z
|
tests/pymcell4/3000_intermembrane_rxns/geometry.py
|
mcellteam/mcell_tests
|
34d2d967b75d56edbae999bf0090641850f4f4fe
|
[
"MIT"
] | null | null | null |
tests/pymcell4/3000_intermembrane_rxns/geometry.py
|
mcellteam/mcell_tests
|
34d2d967b75d56edbae999bf0090641850f4f4fe
|
[
"MIT"
] | null | null | null |
# WARNING: This is an automatically generated file and will be overwritten
# by CellBlender on the next model export.
import mcell as m
# ---- up ----
up_vertex_list = [
[-0.205813646316528, -0.236602157354355, 0.020],
[-0.205813646316528, -0.236602157354355, 0.100],
[-0.205813646316528, 0.163397818803787, 0.020],
[-0.205813646316528, 0.163397818803787, 0.100],
[0.194186329841614, -0.236602157354355, 0.020],
[0.194186329841614, -0.236602157354355, 0.100],
[0.194186329841614, 0.163397818803787, 0.020],
[0.194186329841614, 0.163397818803787, 0.100]
] # up_vertex_list
up_wall_list = [
[1, 2, 0],
[3, 6, 2],
[7, 4, 6],
[5, 0, 4],
[6, 0, 2],
[3, 5, 7],
[1, 3, 2],
[3, 7, 6],
[7, 5, 4],
[5, 1, 0],
[6, 4, 0],
[3, 1, 5]
] # up_wall_list
up = m.GeometryObject(
name = 'up',
vertex_list = up_vertex_list,
wall_list = up_wall_list,
surface_regions = []
)
# ^^^^ up ^^^^
# ---- bottom ----
bottom_vertex_list = [
[-0.199999988079071, -0.199999988079071, -0.02],
[-0.199999988079071, -0.199999988079071, 0.018], # the gap is 2nm
[-0.199999988079071, 0.199999988079071, -0.02],
[-0.199999988079071, 0.199999988079071, 0.018],
[0.199999988079071, -0.199999988079071, -0.02],
[0.199999988079071, -0.199999988079071, 0.018],
[0.199999988079071, 0.199999988079071, -0.02],
[0.199999988079071, 0.199999988079071, 0.018]
] # bottom_vertex_list
bottom_wall_list = [
[1, 2, 0],
[3, 6, 2],
[7, 4, 6],
[5, 0, 4],
[6, 0, 2],
[3, 5, 7],
[1, 3, 2],
[3, 7, 6],
[7, 5, 4],
[5, 1, 0],
[6, 4, 0],
[3, 1, 5]
] # bottom_wall_list
bottom = m.GeometryObject(
name = 'bottom',
vertex_list = bottom_vertex_list,
wall_list = bottom_wall_list,
surface_regions = []
)
# ^^^^ bottom ^^^^
| 24.384615
| 74
| 0.569401
| 256
| 1,902
| 4.113281
| 0.195313
| 0.243115
| 0.25831
| 0.243115
| 0.623932
| 0.353276
| 0.353276
| 0.353276
| 0.353276
| 0.353276
| 0
| 0.449791
| 0.246057
| 1,902
| 77
| 75
| 24.701299
| 0.284519
| 0.13775
| 0
| 0.42623
| 1
| 0
| 0.00492
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016393
| 0
| 0.016393
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1535a191023144e7168c2fcd11753b139f3601a7
| 165
|
py
|
Python
|
Level5/flaskr/views.py
|
oswaldo-patino/python-bootcamp
|
a8527ca4b71bafc58a813c92288d0fc2fd083230
|
[
"MIT"
] | null | null | null |
Level5/flaskr/views.py
|
oswaldo-patino/python-bootcamp
|
a8527ca4b71bafc58a813c92288d0fc2fd083230
|
[
"MIT"
] | null | null | null |
Level5/flaskr/views.py
|
oswaldo-patino/python-bootcamp
|
a8527ca4b71bafc58a813c92288d0fc2fd083230
|
[
"MIT"
] | null | null | null |
from flask import Flask, request
from flask.templating import render_template
from . import app
@app.route("/")
def home():
return render_template("home.html")
| 20.625
| 44
| 0.751515
| 23
| 165
| 5.304348
| 0.565217
| 0.147541
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139394
| 165
| 8
| 45
| 20.625
| 0.859155
| 0
| 0
| 0
| 0
| 0
| 0.060241
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.5
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
15511b184d34230a2e6b2df1e02c9437457f1fd4
| 37
|
py
|
Python
|
ebp/__init__.py
|
arokem/elastic_basis_pursuit
|
780b9ce3912c8bad15b581d9199b364059086387
|
[
"MIT"
] | null | null | null |
ebp/__init__.py
|
arokem/elastic_basis_pursuit
|
780b9ce3912c8bad15b581d9199b364059086387
|
[
"MIT"
] | null | null | null |
ebp/__init__.py
|
arokem/elastic_basis_pursuit
|
780b9ce3912c8bad15b581d9199b364059086387
|
[
"MIT"
] | null | null | null |
from .elastic_basis_pursuit import *
| 18.5
| 36
| 0.837838
| 5
| 37
| 5.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.878788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
15bd15cef7bbec8ed012df5793ffc716491e3aa4
| 18
|
py
|
Python
|
app/extensions/__init__.py
|
cupskeee/App-MFE
|
4f546a9e6a475f3937f1a77406e612e3354af2b7
|
[
"Apache-2.0"
] | null | null | null |
app/extensions/__init__.py
|
cupskeee/App-MFE
|
4f546a9e6a475f3937f1a77406e612e3354af2b7
|
[
"Apache-2.0"
] | 2
|
2022-02-13T15:05:55.000Z
|
2022-02-27T06:00:16.000Z
|
api/database/models.py
|
roycechua23/react-flask-rest-crud-sample
|
13fe823ec3ac1ea2ed7b90127f07eb92507a5244
|
[
"MIT"
] | null | null | null |
from .db import db
| 18
| 18
| 0.777778
| 4
| 18
| 3.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 18
| 1
| 18
| 18
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ec65c42a2fe369549f971120945c8fc997d9ba65
| 145
|
py
|
Python
|
todo/admin.py
|
ruslan-ok/ServerApps
|
541aa12f1933054a12f590ce78544178be374669
|
[
"MIT"
] | 1
|
2021-06-07T02:14:13.000Z
|
2021-06-07T02:14:13.000Z
|
todo/admin.py
|
ruslan-ok/ServerApps
|
541aa12f1933054a12f590ce78544178be374669
|
[
"MIT"
] | 9
|
2021-08-14T07:53:47.000Z
|
2022-03-18T19:07:22.000Z
|
todo/admin.py
|
ruslan-ok/ServerApps
|
541aa12f1933054a12f590ce78544178be374669
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Grp, Lst, Task
admin.site.register(Grp)
admin.site.register(Lst)
admin.site.register(Task)
| 20.714286
| 34
| 0.793103
| 23
| 145
| 5
| 0.478261
| 0.234783
| 0.443478
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096552
| 145
| 6
| 35
| 24.166667
| 0.877863
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
ec689a2001af71d5afbe53b8d21ef02693d5b366
| 192
|
py
|
Python
|
chainladder/tails/__init__.py
|
AragondaJyosna/chainladder-python
|
45f51365279d6a30eac6d74f5d3ea492d7b7e1d8
|
[
"MIT"
] | 1
|
2019-03-03T06:01:26.000Z
|
2019-03-03T06:01:26.000Z
|
chainladder/tails/__init__.py
|
AragondaJyosna/chainladder-python
|
45f51365279d6a30eac6d74f5d3ea492d7b7e1d8
|
[
"MIT"
] | null | null | null |
chainladder/tails/__init__.py
|
AragondaJyosna/chainladder-python
|
45f51365279d6a30eac6d74f5d3ea492d7b7e1d8
|
[
"MIT"
] | null | null | null |
""" tails should store all tail methodologies
"""
from chainladder.tails.base import TailBase
from chainladder.tails.constant import TailConstant
from chainladder.tails.curve import TailCurve
| 32
| 51
| 0.833333
| 24
| 192
| 6.666667
| 0.625
| 0.28125
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104167
| 192
| 5
| 52
| 38.4
| 0.930233
| 0.213542
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
ece6238b4264cbdba9be4a816d3aa1110598bc9e
| 9,138
|
py
|
Python
|
o3seespy/command/uniaxial_material/pytz.py
|
vijaypolimeru/o3seespy
|
c9ef0c27f685de705721b10eb1ea81c3a3c24c4e
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
o3seespy/command/uniaxial_material/pytz.py
|
vijaypolimeru/o3seespy
|
c9ef0c27f685de705721b10eb1ea81c3a3c24c4e
|
[
"MIT",
"BSD-3-Clause"
] | 1
|
2021-06-25T15:33:31.000Z
|
2021-06-25T15:33:31.000Z
|
o3seespy/command/uniaxial_material/pytz.py
|
millen1m/o3seespy
|
7eead6aef8055f73af39b969e0d3499a67e1737f
|
[
"MIT",
"BSD-3-Clause"
] | 1
|
2020-12-12T21:01:42.000Z
|
2020-12-12T21:01:42.000Z
|
from o3seespy.command.uniaxial_material.base_material import UniaxialMaterialBase
class PySimple1(UniaxialMaterialBase):
"""
The PySimple1 UniaxialMaterial Class
This command is used to construct a PySimple1 uniaxial material object.
"""
op_type = 'PySimple1'
def __init__(self, osi, soil_type, pult, y50, cd, c=0.0):
"""
Initial method for PySimple1
Parameters
----------
soil_type: int
Soiltype = 1 backbone of p-y curve approximates matlock (1970) soft clay relation. soiltype = 2 backbone of
p-y curve approximates api (1993) sand relation.
pult: float
Ultimate capacity of the p-y material. note that "p" or "pult" are distributed loads [force per length of
pile] in common design equations, but are both loads for this uniaxialmaterial [i.e., distributed load times the
tributary length of the pile].
y50: float
Displacement at which 50% of pult is mobilized in monotonic loading.
cd: float
Variable that sets the drag resistance within a fully-mobilized gap as cd*pult.
c: float
The viscous damping term (dashpot) on the far-field (elastic) component of the displacement rate (velocity).
(optional default = 0.0). nonzero c values are used to represent radiation damping effects
"""
self.soil_type = int(soil_type)
self.pult = float(pult)
self.y50 = float(y50)
self.cd = float(cd)
self.c = float(c)
osi.n_mat += 1
self._tag = osi.n_mat
self._parameters = [self.op_type, self._tag, self.soil_type, self.pult, self.y50, self.cd, self.c]
self.to_process(osi)
class TzSimple1(UniaxialMaterialBase):
"""
The TzSimple1 UniaxialMaterial Class
This command is used to construct a TzSimple1 uniaxial material object.
"""
op_type = 'TzSimple1'
def __init__(self, osi, soil_type, tult, z50, c=0.0):
"""
Initial method for TzSimple1
Parameters
----------
soil_type: int
Soiltype = 1 backbone of t-z curve approximates reese and o'neill (1987). soiltype = 2 backbone of t-z curve
approximates mosher (1984) relation.
tult: float
Ultimate capacity of the t-z material. see note 1.
z50: float
Displacement at which 50% of tult is mobilized in monotonic loading.
c: float
The viscous damping term (dashpot) on the far-field (elastic) component of the displacement rate (velocity).
(optional default = 0.0). see note 2.
"""
self.soil_type = int(soil_type)
self.tult = float(tult)
self.z50 = float(z50)
self.c = float(c)
osi.n_mat += 1
self._tag = osi.n_mat
self._parameters = [self.op_type, self._tag, self.soil_type, self.tult, self.z50, self.c]
self.to_process(osi)
class QzSimple1(UniaxialMaterialBase):
"""
The QzSimple1 UniaxialMaterial Class
This command is used to construct a QzSimple1 uniaxial material object.
"""
op_type = 'QzSimple1'
def __init__(self, osi, qz_type, qult, z50, suction=0.0, c=0.0):
"""
Initial method for QzSimple1
Parameters
----------
qz_type: int
Qztype = 1 backbone of q-z curve approximates reese and o'neill's (1987) relation for drilled shafts in
clay. qztype = 2 backbone of q-z curve approximates vijayvergiya's (1977) relation for piles in sand.
qult: float
Ultimate capacity of the q-z material. see note 1.
z50: float
Displacement at which 50% of qult is mobilized in monotonic loading. see note 2.
suction: float
Uplift resistance is equal to suction*qult. default = 0.0. the value of suction must be 0.0 to 0.1.*
c: float
The viscous damping term (dashpot) on the far-field (elastic) component of the displacement rate (velocity).
default = 0.0. nonzero c values are used to represent radiation damping effects.*
"""
self.qz_type = int(qz_type)
self.qult = float(qult)
self.z50 = float(z50)
self.suction = float(suction)
self.c = float(c)
osi.n_mat += 1
self._tag = osi.n_mat
self._parameters = [self.op_type, self._tag, self.qz_type, self.qult, self.z50, self.suction, self.c]
self.to_process(osi)
class PyLiq1(UniaxialMaterialBase):
"""
The PyLiq1 UniaxialMaterial Class
"""
op_type = 'PyLiq1'
def __init__(self, osi, soil_type, pult, y50, cd, c, p_res, ele1, ele2, time_series=None):
"""
Initial method for PyLiq1
Parameters
----------
soil_type: int
Soiltype = 1 backbone of p-y curve approximates matlock (1970) soft clay relation. soiltype = 2 backbone of
p-y curve approximates api (1993) sand relation.
pult: float
Ultimate capacity of the p-y material. note that "p" or "pult" are distributed loads [force per length of
pile] in common design equations, but are both loads for this uniaxialmaterial [i.e., distributed load times the
tributary length of the pile].
y50: float
Displacement at which 50% of pult is mobilized in monotonic loading.
cd: float
Variable that sets the drag resistance within a fully-mobilized gap as cd*pult.
c: float
The viscous damping term (dashpot) on the far-field (elastic) component of the displacement rate (velocity).
(optional default = 0.0). nonzero c values are used to represent radiation damping effects
p_res: float
Sets the minimum (or residual) peak resistance that the material retains as the adjacent solid soil elements
liquefy
ele1: float
Are the eletag (element numbers) for the two solid elements from which pyliq1 will obtain mean effective
stresses and excess pore pressures
ele2: float
Are the eletag (element numbers) for the two solid elements from which pyliq1 will obtain mean effective
stresses and excess pore pressures
time_series: obj
Alternatively, mean effective stress can be supplied by a time series by specifying the text string
``'-timeseries'`` and the tag of the series ``seriestag``.
"""
self.soil_type = int(soil_type)
self.pult = float(pult)
self.y50 = float(y50)
self.cd = float(cd)
self.c = float(c)
self.p_res = float(p_res)
self.ele1 = float(ele1)
self.ele2 = float(ele2)
self.time_series = time_series
osi.n_mat += 1
self._tag = osi.n_mat
self._parameters = [self.op_type, self._tag, self.soil_type, self.pult, self.y50, self.cd, self.c, self.p_res, self.ele1, self.ele2]
if getattr(self, 'time_series') is not None:
self._parameters += ['-timeSeries', self.time_series.tag]
self.to_process(osi)
class TzLiq1(UniaxialMaterialBase):
"""
The TzLiq1 UniaxialMaterial Class
"""
op_type = 'TzLiq1'
def __init__(self, osi, tz_type, tult, z50, c, ele1, ele2, time_series=None):
"""
Initial method for TzLiq1
Parameters
----------
tz_type: int
Tztype = 1 backbone of t-z curve approximates reese and o'neill (1987). tztype = 2 backbone of t-z curve
approximates mosher (1984) relation.
tult: float
Ultimate capacity of the t-z material. see note 1.
z50: float
Displacement at which 50% of tult is mobilized in monotonic loading.
c: float
The viscous damping term (dashpot) on the far-field (elastic) component of the displacement rate (velocity).
ele1: float
Are the eletag (element numbers) for the two solid elements from which pyliq1 will obtain mean effective
stresses and excess pore pressures
ele2: float
Are the eletag (element numbers) for the two solid elements from which pyliq1 will obtain mean effective
stresses and excess pore pressures
time_series: obj
Alternatively, mean effective stress can be supplied by a time series by specifying the text string
``'-timeseries'`` and the tag of the seriesm ``seriestag``.
"""
self.tz_type = int(tz_type)
self.tult = float(tult)
self.z50 = float(z50)
self.c = float(c)
self.ele1 = float(ele1)
self.ele2 = float(ele2)
self.time_series = time_series
osi.n_mat += 1
self._tag = osi.n_mat
self._parameters = [self.op_type, self._tag, self.tz_type, self.tult, self.z50, self.c, self.ele1, self.ele2]
if getattr(self, 'time_series') is not None:
self._parameters += ['-timeSeries', self.time_series.tag]
self.to_process(osi)
| 41.162162
| 140
| 0.623988
| 1,216
| 9,138
| 4.601974
| 0.155428
| 0.021444
| 0.012509
| 0.012509
| 0.818442
| 0.788599
| 0.767513
| 0.747677
| 0.721051
| 0.694246
| 0
| 0.029499
| 0.295141
| 9,138
| 221
| 141
| 41.348416
| 0.839311
| 0.562486
| 0
| 0.628571
| 0
| 0
| 0.027276
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.014286
| 0
| 0.228571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
01aadc588ce09495954e0fe8d52dc83fb3a7833d
| 197
|
py
|
Python
|
juc2/art/__init__.py
|
vesche/juc2
|
3484175988bbf0c8f188c876641f1dd39b3c4af0
|
[
"MIT"
] | null | null | null |
juc2/art/__init__.py
|
vesche/juc2
|
3484175988bbf0c8f188c876641f1dd39b3c4af0
|
[
"MIT"
] | null | null | null |
juc2/art/__init__.py
|
vesche/juc2
|
3484175988bbf0c8f188c876641f1dd39b3c4af0
|
[
"MIT"
] | null | null | null |
"""juc2.art"""
from juc2.art.animals import Animals
from juc2.art.objects import Objects
from juc2.art.shapes import Shapes
from juc2.art.symbols import Symbols
from juc2.art.tools import Tools
| 28.142857
| 36
| 0.791878
| 32
| 197
| 4.875
| 0.28125
| 0.269231
| 0.352564
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034682
| 0.121827
| 197
| 7
| 37
| 28.142857
| 0.867052
| 0.040609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
01c8c4fd22eeeb77ae4d2e295133270125fc11f4
| 44
|
py
|
Python
|
tests/test_example.py
|
Turtledash/BattlegroundsSimulation
|
7a93efaee90646f808e7272f1c8f636bb3095382
|
[
"Apache-2.0"
] | null | null | null |
tests/test_example.py
|
Turtledash/BattlegroundsSimulation
|
7a93efaee90646f808e7272f1c8f636bb3095382
|
[
"Apache-2.0"
] | null | null | null |
tests/test_example.py
|
Turtledash/BattlegroundsSimulation
|
7a93efaee90646f808e7272f1c8f636bb3095382
|
[
"Apache-2.0"
] | null | null | null |
def test_numbers():
assert 3 * 4 == 12
| 14.666667
| 23
| 0.568182
| 7
| 44
| 3.428571
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 0.295455
| 44
| 2
| 24
| 22
| 0.645161
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
01ca12270ce14f23629bd24516ee64ad6c2fed83
| 22,465
|
py
|
Python
|
edbdeploy/cloud.py
|
jt-edb/postgres-deployment
|
871cf517379152f096f44d682dd03d93971715a2
|
[
"BSD-3-Clause"
] | null | null | null |
edbdeploy/cloud.py
|
jt-edb/postgres-deployment
|
871cf517379152f096f44d682dd03d93971715a2
|
[
"BSD-3-Clause"
] | 1
|
2021-07-01T18:02:00.000Z
|
2021-07-01T18:02:00.000Z
|
edbdeploy/cloud.py
|
jt-edb/postgres-deployment
|
871cf517379152f096f44d682dd03d93971715a2
|
[
"BSD-3-Clause"
] | null | null | null |
import logging
import json
import os
import re
import time
from subprocess import CalledProcessError
from .system import exec_shell
class CloudCliError(Exception):
pass
class AWSCli:
def __init__(self, bin_path=None):
# aws CLI supported versions interval
self.min_version = (0, 0, 0)
self.max_version = (1, 19, 18)
# Path to look up for executable
self.bin_path = None
# Force aws CLI binary path if bin_path exists and contains
# aws file.
if bin_path is not None and os.path.exists(bin_path):
if os.path.exists(os.path.join(bin_path, 'aws')):
self.bin_path = bin_path
pass
def check_version(self):
"""
Verify aws CLI version, based on the interval formed by min_version and
max_version.
aws CLI version is fetched using the command: aws --version
"""
try:
output = exec_shell([self.bin("aws"), "--version"])
except CalledProcessError as e:
logging.error("Failed to execute the command: %s", e.cmd)
logging.error("Return code is: %s", e.returncode)
logging.error("Output: %s", e.output)
raise Exception(
"aws CLI executable seems to be missing. Please install it or "
"check your PATH variable"
)
version = None
# Parse command output and extract the version number
pattern = re.compile(r"^aws-cli\/([0-9]+)\.([0-9]+)\.([0-9]+) ")
for line in output.decode("utf-8").split("\n"):
m = pattern.search(line)
if m:
version = (int(m.group(1)), int(m.group(2)), int(m.group(3)))
break
if version is None:
raise Exception("Unable to parse aws CLI version")
logging.info("aws CLI version: %s", '.'.join(map(str, version)))
# Verify if the version fetched is supported
for i in range(0, 3):
min = self.min_version[i]
max = self.max_version[i]
if version[i] < max:
# If current digit is below the maximum value, no need to
# check others digits, we are good
break
if version[i] not in list(range(min, max + 1)):
raise Exception(
("aws CLI version %s not supported, must be between %s and"
" %s") % (
'.'.join(map(str, version)),
'.'.join(map(str, self.min_version)),
'.'.join(map(str, self.max_version)),
)
)
def bin(self, binary):
"""
Return binary's path
"""
if self.bin_path is not None:
return os.path.join(self.bin_path, binary)
else:
return binary
def check_instance_type_availability(self, instance_type, region):
try:
output = exec_shell([
self.bin("aws"),
"ec2",
"describe-instance-type-offerings",
"--location-type availability-zone",
"--filters Name=instance-type,Values=%s" % instance_type,
"--region %s" % region,
"--output json"
])
result = json.loads(output.decode("utf-8"))
logging.debug("Command output: %s", result)
if len(result["InstanceTypeOfferings"]) == 0:
raise CloudCliError(
"Instance type %s not available in region %s"
% (instance_type, region)
)
except ValueError:
# JSON decoding error
logging.error("Failed to decode JSON data")
logging.error("Output: %s", output.decode("utf-8"))
raise CloudCliError(
"Failed to decode JSON data, please check the logs for details"
)
except CalledProcessError as e:
logging.error("Failed to execute the command: %s", e.cmd)
logging.error("Return code is: %s", e.returncode)
logging.error("Output: %s", e.output)
raise CloudCliError(
"Failed to execute the following command, please check the "
"logs for details: %s" % e.cmd
)
def get_image_id(self, image, region):
try:
output = exec_shell([
self.bin("aws"),
"ec2",
"describe-images",
"--filters Name=name,Values=\"%s\"" % image,
"--query 'sort_by(Images, &Name)[-1]'",
"--region %s" % region,
"--output json"
])
result = json.loads(output.decode("utf-8"))
logging.debug("Command output: %s", result)
if result.get('State') == 'available':
return result.get('ImageId')
except ValueError:
# JSON decoding error
logging.error("Failed to decode JSON data")
logging.error("Output: %s", output.decode("utf-8"))
raise CloudCliError(
"Failed to decode JSON data, please check the logs for details"
)
except CalledProcessError as e:
logging.error("Failed to execute the command: %s", e.cmd)
logging.error("Return code is: %s", e.returncode)
logging.error("Output: %s", e.output)
raise CloudCliError(
"Failed to execute the following command, please check the "
"logs for details: %s" % e.cmd
)
def check_instances_availability(self, region):
try:
output = exec_shell([
self.bin("aws"),
"ec2",
"wait",
"instance-status-ok",
"--region %s" % region
])
logging.debug("Command output: %s", output.decode("utf-8"))
except CalledProcessError as e:
logging.error("Failed to execute the command: %s", e.cmd)
logging.error("Return code is: %s", e.returncode)
logging.error("Output: %s", e.output)
raise CloudCliError(
"Failed to execute the following command, please check the "
"logs for details: %s" % e.cmd
)
class AWSRDSCli(AWSCli):
def check_instance_type_availability(self, instance_type, region):
try:
output = exec_shell([
self.bin("aws"),
"rds",
"describe-reserved-db-instances-offerings",
"--product-description postgresql",
"--region %s" % region,
"--db-instance-class %s" % instance_type,
"--output json"
])
result = json.loads(output.decode("utf-8"))
logging.debug("Command output: %s", result)
if len(result["ReservedDBInstancesOfferings"]) == 0:
raise CloudCliError(
"Instance type %s not available in region %s"
% (instance_type, region)
)
except ValueError:
# JSON decoding error
logging.error("Failed to decode JSON data")
logging.error("Output: %s", output.decode("utf-8"))
raise CloudCliError(
"Failed to decode JSON data, please check the logs for details"
)
except CalledProcessError as e:
logging.error("Failed to execute the command: %s", e.cmd)
logging.error("Return code is: %s", e.returncode)
logging.error("Output: %s", e.output)
raise CloudCliError(
"Failed to execute the following command, please check the "
"logs for details: %s" % e.cmd
)
class AWSRDSAuroraCli(AWSRDSCli):
pass
class AzureCli:
def __init__(self, bin_path=None):
# azure CLI supported versions interval
self.min_version = (0, 0, 0)
self.max_version = (2, 20, 0)
# Path to look up for executable
self.bin_path = None
# Force azure CLI binary path if bin_path exists and contains
# az file.
if bin_path is not None and os.path.exists(bin_path):
if os.path.exists(os.path.join(bin_path, 'az')):
self.bin_path = bin_path
def check_version(self):
"""
Verify azure CLI version, based on the interval formed by min_version and
max_version.
azure CLI version is fetched using the command: az --version
"""
try:
output = exec_shell([self.bin("az"), "--version"])
except CalledProcessError as e:
logging.error("Failed to execute the command: %s", e.cmd)
logging.error("Return code is: %s", e.returncode)
logging.error("Output: %s", e.output)
raise Exception(
"azure CLI executable seems to be missing. Please install it or "
"check your PATH variable"
)
version = None
# Parse command output and extract the version number
pattern = re.compile(r"^azure-cli\s+([0-9]+)\.([0-9]+)\.([0-9]+)")
for line in output.decode("utf-8").split("\n"):
m = pattern.search(line)
if m:
version = (int(m.group(1)), int(m.group(2)), int(m.group(3)))
break
if version is None:
raise Exception("Unable to parse azure CLI version")
logging.info("azure CLI version: %s", '.'.join(map(str, version)))
# Verify if the version fetched is supported
for i in range(0, 3):
min = self.min_version[i]
max = self.max_version[i]
if version[i] < max:
# If current digit is below the maximum value, no need to
# check others digits, we are good
break
if version[i] not in list(range(min, max + 1)):
raise Exception(
("azure CLI version %s not supported, must be between %s and"
" %s") % (
'.'.join(map(str, version)),
'.'.join(map(str, self.min_version)),
'.'.join(map(str, self.max_version)),
)
)
def bin(self, binary):
"""
Return binary's path
"""
if self.bin_path is not None:
return os.path.join(self.bin_path, binary)
else:
return binary
def check_instance_type_availability(self, instance_type, region):
try:
output = exec_shell([
self.bin("az"),
"vm",
"list-sizes",
"--location %s" % region,
"--query \"[?name == '%s']\"" % instance_type,
"--output json"
])
result = json.loads(output.decode("utf-8"))
logging.debug("Command output: %s", result)
if len(result) == 0:
raise CloudCliError(
"Instance type %s not available in region %s"
% (instance_type, region)
)
except ValueError:
# JSON decoding error
logging.error("Failed to decode JSON data")
logging.error("Output: %s", output.decode("utf-8"))
raise CloudCliError(
"Failed to decode JSON data, please check the logs for details"
)
except CalledProcessError as e:
logging.error("Failed to execute the command: %s", e.cmd)
logging.error("Return code is: %s", e.returncode)
logging.error("Output: %s", e.output)
raise CloudCliError(
"Failed to execute the following command, please check the "
"logs for details: %s" % e.cmd
)
def check_image_availability(self, publisher, offer, sku, region):
try:
output = exec_shell([
self.bin("az"),
"vm",
"image",
"list",
"--all",
"-p \"%s\"" % publisher,
"-f \"%s\"" % offer,
"-s \"%s\"" % sku,
"-l %s" % region,
"--query",
"\"[?offer == '%s' && sku =='%s']\"" % (offer, sku),
"--output json"
])
result = json.loads(output.decode("utf-8"))
logging.debug("Command output: %s", result)
if len(result) == 0:
raise CloudCliError(
"Image %s:%s:%s not available in region %s"
% (publisher, offer, sku, region)
)
except ValueError:
# JSON decoding error
logging.error("Failed to decode JSON data")
logging.error("Output: %s", output.decode("utf-8"))
raise CloudCliError(
"Failed to decode JSON data, please check the logs for details"
)
except CalledProcessError as e:
logging.error("Failed to execute the command: %s", e.cmd)
logging.error("Return code is: %s", e.returncode)
logging.error("Output: %s", e.output)
raise CloudCliError(
"Failed to execute the following command, please check the "
"logs for details: %s" % e.cmd
)
def check_instances_availability(self, project_name):
try:
output = exec_shell([
self.bin("az"),
"vm",
"wait",
"--ids",
"$(%s vm list -g \"%s_edb_resource_group\" --query \"[].id\" -o tsv)"
% (self.bin("az"), project_name),
"--created"
])
logging.debug("Command output: %s", output.decode("utf-8"))
except CalledProcessError as e:
logging.error("Failed to execute the command: %s", e.cmd)
logging.error("Return code is: %s", e.returncode)
logging.error("Output: %s", e.output)
raise CloudCliError(
"Failed to execute the following command, please check the "
"logs for details: %s" % e.cmd
)
class GCloudCli:
def __init__(self, bin_path=None):
# gcloud CLI supported versions interval
self.min_version = (0, 0, 0)
self.max_version = (329, 0, 0)
# Path to look up for executable
self.bin_path = None
# Force gcloud CLI binary path if bin_path exists and contains
# gcloud file.
if bin_path is not None and os.path.exists(bin_path):
if os.path.exists(os.path.join(bin_path, 'gcloud')):
self.bin_path = bin_path
def check_version(self):
"""
Verify gcloud CLI version, based on the interval formed by min_version and
max_version.
gcloud CLI version is fetched using the command: gcloud --version
"""
try:
output = exec_shell([self.bin("gcloud"), "--version"])
except CalledProcessError as e:
logging.error("Failed to execute the command: %s", e.cmd)
logging.error("Return code is: %s", e.returncode)
logging.error("Output: %s", e.output)
raise Exception(
"gcloud CLI executable seems to be missing. Please install it or "
"check your PATH variable"
)
version = None
# Parse command output and extract the version number
pattern = re.compile(r"^Google Cloud SDK ([0-9]+)\.([0-9]+)\.([0-9]+)")
for line in output.decode("utf-8").split("\n"):
m = pattern.search(line)
if m:
version = (int(m.group(1)), int(m.group(2)), int(m.group(3)))
break
if version is None:
raise Exception("Unable to parse gcloud CLI version")
logging.info("gcloud CLI version: %s", '.'.join(map(str, version)))
# Verify if the version fetched is supported
for i in range(0, 3):
min = self.min_version[i]
max = self.max_version[i]
if version[i] < max:
# If current digit is below the maximum value, no need to
# check others digits, we are good
break
if version[i] not in list(range(min, max + 1)):
raise Exception(
("gcloud CLI version %s not supported, must be between %s and"
" %s") % (
'.'.join(map(str, version)),
'.'.join(map(str, self.min_version)),
'.'.join(map(str, self.max_version)),
)
)
def bin(self, binary):
"""
Return binary's path
"""
if self.bin_path is not None:
return os.path.join(self.bin_path, binary)
else:
return binary
def check_instance_type_availability(self, instance_type, region):
try:
output = exec_shell([
self.bin("gcloud"),
"compute",
"machine-types",
"list",
"--filter=\"name=%s zone:%s*\"" % (instance_type, region),
"--format=json"
])
result = json.loads(output.decode("utf-8"))
logging.debug("Command output: %s", result)
if len(result) == 0:
raise CloudCliError(
"Instance type %s not available in region %s"
% (instance_type, region)
)
except ValueError:
# JSON decoding error
logging.error("Failed to decode JSON data")
logging.error("Output: %s", output.decode("utf-8"))
raise CloudCliError(
"Failed to decode JSON data, please check the logs for details"
)
except CalledProcessError as e:
logging.error("Failed to execute the command: %s", e.cmd)
logging.error("Return code is: %s", e.returncode)
logging.error("Output: %s", e.output)
raise CloudCliError(
"Failed to execute the following command, please check the "
"logs for details: %s" % e.cmd
)
def check_image_availability(self, image):
try:
output = exec_shell([
self.bin("gcloud"),
"compute",
"images",
"list",
"--filter=\"family=%s\"" % image,
"--format=json"
])
result = json.loads(output.decode("utf-8"))
logging.debug("Command output: %s", result)
if len(result) == 0 or result[0]['status'] != 'READY':
raise CloudCliError("Image %s not available" % image)
except ValueError:
# JSON decoding error
logging.error("Failed to decode JSON data")
logging.error("Output: %s", output.decode("utf-8"))
raise CloudCliError(
"Failed to decode JSON data, please check the logs for details"
)
except CalledProcessError as e:
logging.error("Failed to execute the command: %s", e.cmd)
logging.error("Return code is: %s", e.returncode)
logging.error("Output: %s", e.output)
raise CloudCliError(
"Failed to execute the following command, please check the "
"logs for details: %s" % e.cmd
)
def check_instances_availability(self, project_name, region, node_count):
try_count = 0
try_max = 5
try_nap_time = 2
while True:
if try_count >= try_max:
raise CloudCliError(
"Unable to check instances availability after %s trys"
% try_count
)
try_count += 1
try:
output = exec_shell([
self.bin("gcloud"),
"compute",
"instances",
"list",
"--filter=\"name:%s-* zone ~ %s-[a-z] status=RUNNING\""
% (project_name, region),
"--format=json"
])
result = json.loads(output.decode("utf-8"))
logging.debug("Command output: %s", result)
if (len(result) >= node_count):
# Number of ready instances is good, just break the loop
break
time.sleep(try_nap_time)
except ValueError:
# JSON decoding error
logging.error("Failed to decode JSON data")
logging.error("Output: %s", output.decode("utf-8"))
raise CloudCliError(
"Failed to decode JSON data, please check the logs for "
"details"
)
except CalledProcessError as e:
logging.error("Failed to execute the command: %s", e.cmd)
logging.error("Return code is: %s", e.returncode)
logging.error("Output: %s", e.output)
raise CloudCliError(
"Failed to execute the following command, please check the"
" logs for details: %s" % e.cmd
)
class CloudCli:
def __init__(self, cloud, bin_path):
self.cloud = cloud
if self.cloud == 'aws':
self.cli = AWSCli(bin_path)
elif self.cloud == 'aws-rds':
self.cli = AWSRDSCli(bin_path)
elif self.cloud == 'aws-rds-aurora':
self.cli = AWSRDSAuroraCli(bin_path)
elif self.cloud == 'azure':
self.cli = AzureCli(bin_path)
elif self.cloud == 'gcloud':
self.cli = GCloudCli(bin_path)
else:
raise Exception("Unknown cloud %s", self.cloud)
def check_instance_type_availability(self, instance_type, region):
return self.cli.check_instance_type_availability(instance_type, region)
def check_version(self):
self.cli.check_version()
| 37.756303
| 85
| 0.50897
| 2,473
| 22,465
| 4.55924
| 0.090578
| 0.058537
| 0.030599
| 0.036718
| 0.844967
| 0.837605
| 0.826253
| 0.803104
| 0.792639
| 0.770732
| 0
| 0.006822
| 0.380102
| 22,465
| 594
| 86
| 37.819865
| 0.802815
| 0.075406
| 0
| 0.692632
| 0
| 0
| 0.222411
| 0.013478
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046316
| false
| 0.006316
| 0.014737
| 0.002105
| 0.092632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1762b4840bc82ae01d6dcb35cb217712100dc3ae
| 42,149
|
py
|
Python
|
pirates/leveleditor/worldData/port_royal_area_jungle_c_1.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 81
|
2018-04-08T18:14:24.000Z
|
2022-01-11T07:22:15.000Z
|
pirates/leveleditor/worldData/port_royal_area_jungle_c_1.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 4
|
2018-09-13T20:41:22.000Z
|
2022-01-08T06:57:00.000Z
|
pirates/leveleditor/worldData/port_royal_area_jungle_c_1.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 26
|
2018-05-26T12:49:27.000Z
|
2021-09-11T09:11:59.000Z
|
from pandac.PandaModules import Point3, VBase3, Vec4
objectStruct = {'Interact Links': [['1175892736.0dxschafe', '1165197469.59Shochet', 'Bi-directional'], ['1165197301.95Shochet', '1165197288.56Shochet', 'Bi-directional'], ['1175901440.0dxschafe', '1175892736.0dxschafe2', 'Bi-directional'], ['1175901568.0dxschafe', '1165197257.5Shochet', 'Bi-directional'], ['1175892864.0dxschafe', '1175901952.0dxschafe', 'Bi-directional'], ['1175892352.0dxschafe0', '1175902080.0dxschafe', 'Bi-directional']],'Objects': {'1164141722.61sdnaik': {'Type': 'Island Game Area','Name': 'port_royal_area_jungle_c_1','File': '','Environment': 'Jungle','AdditionalData': ['JungleAreaC'],'Footstep Sound': 'Sand','Instanced': True,'Minimap': False,'Objects': {'1164141948.44sdnaik': {'Type': 'Locator Node','Name': 'portal_interior_1','Hpr': VBase3(-4.256, 0.0, 0.0),'Pos': Point3(-632.715, -263.407, 75.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1164141948.45sdnaik': {'Type': 'Locator Node','Name': 'portal_interior_2','Hpr': VBase3(107.903, 0.0, 0.0),'Pos': Point3(304.679, -408.087, 115.611),'Scale': VBase3(1.0, 1.0, 1.0)},'1164939070.28Shochet': {'Type': 'Spawn Node','Aggro Radius': '12.0000','AnimSet': 'default','Hpr': VBase3(128.928, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(-405.761, -124.137, 102.347),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Scorp T3','Start State': 'Idle','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Model': 'models/misc/smiley'}},'1164939086.73Shochet': {'Type': 'Spawn Node','Aggro Radius': '12.0000','AnimSet': 'default','Hpr': VBase3(-67.644, 0.0, 0.0),'Min Population': '3','Patrol Radius': '12.0000','Pause Chance': '100','Pause Duration': '30','Pos': Point3(-372.829, -294.251, 101.077),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Scorp T3','Start State': 'Idle','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Model': 'models/misc/smiley'}},'1164939103.3Shochet': {'Type': 'Spawn Node','Aggro Radius': '12.0000','AnimSet': 'default','Hpr': VBase3(93.878, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(-268.592, -126.948, 117.51),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Scorp T3','Start State': 'Idle','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Model': 'models/misc/smiley'}},'1164939260.28Shochet': {'Type': 'Spawn Node','Aggro Radius': '12.0000','AnimSet': 'default','Hpr': Point3(0.0, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': '0','Pause Duration': '5','Pos': Point3(356.371, -315.345, 113.48),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Bat T4','Start State': 'Patrol','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Model': 'models/misc/smiley'}},'1164939309.61Shochet': {'Type': 'Spawn Node','Aggro Radius': '12.0000','AnimSet': 'default','Hpr': VBase3(83.792, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(-495.787, -282.083, 87.745),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Scorp T3','Start State': 'Idle','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Model': 'models/misc/smiley'}},'1165197257.5Shochet': {'Type': 'Object Spawn Node','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-312.947, -311.051, 107.246),'Priority': '1','Scale': VBase3(1.0, 1.0, 1.0),'SpawnDelay': '300','Spawnables': 'Buried Treasure','VisSize': '','Visual': {'Color': (0.8, 0.2, 0.65, 1),'Model': 'models/misc/smiley'},'startingDepth': '12'},'1165197288.56Shochet': {'Type': 'Object Spawn Node','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(101.459, 2.186, 117.559),'Priority': '1','Scale': VBase3(1.0, 1.0, 1.0),'SpawnDelay': '300','Spawnables': 'Buried Treasure','VisSize': '','Visual': {'Color': (0.8, 0.2, 0.65, 1),'Model': 'models/misc/smiley'},'startingDepth': '12'},'1165197301.95Shochet': {'Type': 'Spawn Node','AnimSet': 'default','Hpr': VBase3(-27.089, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(120.252, 15.244, 116.711),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Scorp T3','Start State': 'Patrol','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0.0, 0.0, 0.65, 1.0),'Model': 'models/misc/smiley'}},'1165197323.8Shochet': {'Type': 'Spawn Node','AnimSet': 'default','Hpr': VBase3(143.13, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(205.686, 112.289, 111.874),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Gator T4','Start State': 'Idle','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1165197469.59Shochet': {'Type': 'Object Spawn Node','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-122.368, -172.88, 128.692),'Priority': '1','Scale': VBase3(1.0, 1.0, 1.0),'SpawnDelay': '20','Spawnables': 'Buried Treasure','VisSize': '','Visual': {'Color': (0.8, 0.2, 0.65, 1.0),'Model': 'models/misc/smiley'},'startingDepth': '12'},'1175891840.0dxschafe': {'Type': 'Spawn Node','Aggro Radius': '12.0000','AnimSet': 'default','Hpr': VBase3(158.581, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': '100','Pause Duration': '30','Pos': Point3(211.261, 154.38, 106.955),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Gator T4','Start State': 'Idle','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1175892224.0dxschafe': {'Type': 'Spawn Node','AnimSet': 'default','Hpr': VBase3(56.444, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': '0','Pause Duration': '5','Pos': Point3(-44.099, -38.298, 123.644),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Bat T4','Start State': 'Patrol','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1175892352.0dxschafe0': {'Type': 'Spawn Node','AnimSet': 'default','Hpr': Point3(0.0, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(27.319, 152.447, 117.1),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Scorp T3','Start State': 'Ambush','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1175892352.0dxschafe1': {'Type': 'Spawn Node','Aggro Radius': '12.0000','AnimSet': 'default','Hpr': Point3(0.0, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(18.93, 55.016, 119.339),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Scorp T3','Start State': 'Idle','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1175892736.0dxschafe': {'Type': 'Spawn Node','Aggro Radius': '15.9639','AnimSet': 'default','Hpr': Point3(0.0, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(-111.044, -187.834, 128.609),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Wasp T3','Start State': 'Patrol','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1175892736.0dxschafe1': {'Type': 'Spawn Node','AnimSet': 'default','Hpr': Point3(0.0, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(-390.404, -374.448, 96.8),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Wasp T4','Start State': 'Idle','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1175892736.0dxschafe2': {'Type': 'Spawn Node','AnimSet': 'default','Hpr': Point3(0.0, 0.0, 0.0),'Min Population': '1','Patrol Radius': '1.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(-464.324, -399.697, 87.853),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Scorp T3','Start State': 'Ambush','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1175892864.0dxschafe': {'Type': 'Spawn Node','Aggro Radius': '12.0000','AnimSet': 'default','Hpr': VBase3(-163.473, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': '0','Pause Duration': '5','Pos': Point3(-463.553, -95.888, 96.742),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Bat T4','Start State': 'Patrol','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1175901184.0dxschafe': {'Type': 'Object Spawn Node','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(265.836, 31.799, 111.406),'Priority': '1','Scale': VBase3(1.0, 1.0, 1.0),'SpawnDelay': '20','Spawnables': 'Buried Treasure','VisSize': '','Visual': {'Color': (0.8, 0.2, 0.65, 1),'Model': 'models/misc/smiley'},'startingDepth': '12'},'1175901312.0dxschafe': {'Type': 'Object Spawn Node','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-28.718, -99.13, 124.045),'Priority': '1','Scale': VBase3(1.0, 1.0, 1.0),'SpawnDelay': '20','Spawnables': 'Buried Treasure','VisSize': '','Visual': {'Color': (0.8, 0.2, 0.65, 1),'Model': 'models/misc/smiley'},'startingDepth': '12'},'1175901440.0dxschafe': {'Type': 'Object Spawn Node','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-509.896, -361.984, 83.883),'Priority': '1','Scale': VBase3(1.0, 1.0, 1.0),'SpawnDelay': '20','Spawnables': 'Buried Treasure','VisSize': '','Visual': {'Color': (0.8, 0.2, 0.65, 1),'Model': 'models/misc/smiley'},'startingDepth': '12'},'1175901568.0dxschafe': {'Type': 'Spawn Node','Aggro Radius': '12.0000','AnimSet': 'default','Hpr': VBase3(100.387, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(-314.675, -287.615, 107.733),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Wasp T4','Start State': 'Patrol','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1175901696.0dxschafe': {'Type': 'Spawn Node','Aggro Radius': '14.1566','AnimSet': 'default','Hpr': Point3(0.0, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(-143.198, -72.187, 127.375),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Scorp T3','Start State': 'Idle','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Model': 'models/misc/smiley'}},'1175901696.0dxschafe0': {'Type': 'Spawn Node','Aggro Radius': '13.8554','AnimSet': 'default','Hpr': VBase3(113.394, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(33.63, 114.009, 117.659),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Scorp T3','Start State': 'Idle','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Model': 'models/misc/smiley'}},'1175901696.0dxschafe1': {'Type': 'Spawn Node','AnimSet': 'default','Hpr': VBase3(159.441, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': '0','Pause Duration': '5','Pos': Point3(210.848, -47.225, 114.902),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Scorp T3','Start State': 'Patrol','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Model': 'models/misc/smiley'}},'1175901696.0dxschafe2': {'Type': 'Spawn Node','Aggro Radius': '11.1446','AnimSet': 'default','Hpr': VBase3(175.144, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': '0','Pause Duration': '5','Pos': Point3(272.21, -163.32, 115.157),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Scorp T3','Start State': 'Patrol','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Model': 'models/misc/smiley'}},'1175901696.0dxschafe3': {'Type': 'Spawn Node','AnimSet': 'default','Hpr': VBase3(125.192, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(329.874, -354.521, 114.541),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Scorp T3','Start State': 'Idle','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Model': 'models/misc/smiley'}},'1175901952.0dxschafe': {'Type': 'Object Spawn Node','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(-497.893, -114.69, 92.381),'Priority': '1','Scale': VBase3(1.0, 1.0, 1.0),'SpawnDelay': '20','Spawnables': 'Buried Treasure','VisSize': '','Visual': {'Color': (0.8, 0.2, 0.65, 1),'Model': 'models/misc/smiley'},'startingDepth': '12'},'1175902080.0dxschafe': {'Type': 'Object Spawn Node','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(27.056, 182.359, 116.509),'Priority': '1','Scale': VBase3(1.0, 1.0, 1.0),'SpawnDelay': '20','Spawnables': 'Buried Treasure','VisSize': '','Visual': {'Color': (0.8, 0.2, 0.65, 1),'Model': 'models/misc/smiley'},'startingDepth': '12'},'1179265791.47Aholdun': {'Type': 'Player Spawn Node','Hpr': VBase3(-56.324, 0.0, 0.0),'Index': -1,'Pos': Point3(-205.82, -74.384, 126.01),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'All','Visual': {'Color': (0.5, 0.5, 0.5, 1),'Model': 'models/misc/smiley'}},'1179265841.94Aholdun': {'Type': 'Player Spawn Node','Hpr': VBase3(-70.028, 0.0, 0.0),'Index': -1,'Pos': Point3(-511.784, -195.934, 88.483),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'All','Visual': {'Color': (0.5, 0.5, 0.5, 1),'Model': 'models/misc/smiley'}},'1179265866.19Aholdun': {'Type': 'Player Spawn Node','Hpr': VBase3(-68.126, 0.0, 0.0),'Index': -1,'Pos': Point3(-303.287, -101.963, 114.378),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'All','Visual': {'Color': (0.5, 0.5, 0.5, 1),'Model': 'models/misc/smiley'}},'1179265884.3Aholdun': {'Type': 'Player Spawn Node','Hpr': VBase3(-61.773, 0.0, 0.0),'Index': -1,'Pos': Point3(-101.443, 8.502, 124.344),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'All','Visual': {'Color': (0.5, 0.5, 0.5, 1),'Model': 'models/misc/smiley'}},'1179265965.55Aholdun': {'Type': 'Player Spawn Node','Hpr': VBase3(-37.707, 0.0, 0.0),'Index': -1,'Pos': Point3(-361.817, -234.733, 104.026),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'All','Visual': {'Color': (0.5, 0.5, 0.5, 1),'Model': 'models/misc/smiley'}},'1188441856.0dxschafe': {'Type': 'Player Spawn Node','Hpr': VBase3(28.132, 0.0, 0.0),'Index': -1,'Pos': Point3(284.681, -333.714, 115.999),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'All','Visual': {'Color': (0.5, 0.5, 0.5, 1),'Model': 'models/misc/smiley'}},'1188441856.0dxschafe0': {'Type': 'Player Spawn Node','Hpr': VBase3(-160.871, 0.0, 0.0),'Index': -1,'Pos': Point3(151.044, 73.064, 114.509),'Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'All','Visual': {'Color': (0.5, 0.5, 0.5, 1),'Model': 'models/misc/smiley'}},'1190846720.0dxschafe': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(366.65, -267.765, 112.937),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1190846720.0dxschafe0': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(317.426, -250.676, 114.547),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1190846720.0dxschafe1': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(287.599, -303.44, 115.776),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1190847360.0dxschafe': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(-533.511, -249.458, 84.517),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1190847360.0dxschafe0': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(-424.114, -319.024, 94.658),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1190847360.0dxschafe1': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(-339.944, -229.384, 106.61),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1190847488.0dxschafe': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(62.21, 124.587, 116.479),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1190847488.0dxschafe0': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(165.604, -4.669, 115.577),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1190847616.0dxschafe': {'Type': 'Spawn Node','Aggro Radius': '12.0000','AnimSet': 'default','Hpr': VBase3(176.71, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': '100','Pause Duration': '30','Pos': Point3(168.411, 140.387, 112.571),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Gator T4','Start State': 'Patrol','StartFrame': '0','Team': '1','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1192645760.0dxschafe': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(255.886, 44.895, 111.53),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1192645760.0dxschafe0': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(113.373, 14.589, 116.957),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1192645888.0dxschafe': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(221.687, -85.284, 115.299),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1192645888.0dxschafe0': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(250.07, -148.329, 115.605),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1192645888.0dxschafe1': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(246.683, -204.326, 116.771),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1192646016.0dxschafe': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(204.477, -332.773, 118.728),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1192646016.0dxschafe0': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(305.498, -272.237, 115.041),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1192646144.0dxschafe': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(235.126, 51.284, 112.104),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1192646144.0dxschafe0': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(63.87, 96.958, 116.978),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1192646144.0dxschafe1': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(209.347, 178.181, 106.955),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1192646400.0dxschafe0': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(-1.619, 92.0, 119.292),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1192646400.0dxschafe1': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(-163.425, -120.076, 129.02),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1192646400.0dxschafe2': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': '0','Pause Duration': '5','Pos': Point3(-504.682, -169.48, 90.039),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1230751967.29kmuller': {'Type': 'Collision Barrier','DisableCollision': False,'Holiday': '','Hpr': VBase3(50.574, 0.0, 0.0),'Pos': Point3(-41.362, 131.752, 119.192),'Scale': VBase3(1.0, 1.0, 2.237),'VisSize': '','Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_plane'}},'1230752192.42kmuller': {'Type': 'Collision Barrier','DisableCollision': False,'Holiday': '','Hpr': VBase3(86.59, 0.0, 0.0),'Pos': Point3(-515.671, -361.698, 81.901),'Scale': VBase3(1.0, 1.0, 3.117),'VisSize': '','Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_plane'}},'1230752342.47kmuller': {'Type': 'Bush','DisableCollision': True,'Holiday': '','Hpr': VBase3(58.152, 0.0, 0.0),'Pos': Point3(380.918, -284.854, 112.761),'Scale': VBase3(1.0, 1.0, 0.848),'VisSize': '','Visual': {'Model': 'models/vegetation/bush_c'}},'1230752387.67kmuller': {'Type': 'Collision Barrier','DisableCollision': False,'Holiday': '','Hpr': VBase3(-44.786, 0.0, 0.0),'Pos': Point3(385.341, -287.334, 111.74),'Scale': VBase3(4.762, 2.112, 2.112),'VisSize': '','Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_cube'}},'1230752456.97kmuller': {'Type': 'Collision Barrier','DisableCollision': False,'Holiday': '','Hpr': VBase3(-80.499, 0.0, 0.0),'Pos': Point3(387.309, -301.839, 111.848),'Scale': VBase3(1.461, 1.327, 2.416),'VisSize': '','Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_plane'}},'1230752640.42kmuller': {'Type': 'Collision Barrier','DisableCollision': False,'Holiday': '','Hpr': VBase3(170.035, 0.0, 0.0),'Pos': Point3(191.843, -342.681, 118.168),'Scale': VBase3(1.675, 1.675, 2.78),'VisSize': '','Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_plane'}},'1230752671.32kmuller': {'Type': 'Collision Barrier','DisableCollision': False,'Holiday': '','Hpr': VBase3(104.622, 0.0, 0.0),'Pos': Point3(201.062, -363.66, 118.336),'Scale': VBase3(0.763, 1.0, 2.724),'VisSize': '','Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_plane'}},'1240954760.37piwanow': {'Type': 'Spawn Node','AnimSet': 'default','Hpr': Point3(0.0, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': '100','Pause Duration': '30','Pos': Point3(-467.452, -368.183, 88.418),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Wasp T4','Start State': 'Patrol','StartFrame': '0','Team': 'default','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1240954777.44piwanow': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(-494.747, -326.253, 86.6),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1240954779.93piwanow': {'Type': 'Movement Node','Hpr': Point3(0.0, 0.0, 0.0),'Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(-430.449, -341.382, 93.307),'Scale': VBase3(1.0, 1.0, 1.0),'VisSize': '','Visual': {'Color': (0.65, 0, 0, 1),'Model': 'models/misc/smiley'}},'1240954868.15piwanow': {'Type': 'Spawn Node','AnimSet': 'default','Hpr': VBase3(-45.221, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(181.07, -90.795, 116.783),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Wasp T4','Start State': 'Idle','StartFrame': '0','Team': 'default','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1240961337.65piwanow': {'Type': 'Spawn Node','AnimSet': 'default','Hpr': VBase3(299.655, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(-158.77, -138.719, 129.237),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Wasp T3','Start State': 'Patrol','StartFrame': '0','Team': 'default','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1240961889.02piwanow': {'Type': 'Spawn Node','Aggro Radius': '14.1566','AnimSet': 'default','Hpr': VBase3(40.914, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(-332.404, -330.502, 104.52),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Wasp T4','Start State': 'Idle','StartFrame': '0','Team': 'default','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1240962134.4piwanow': {'Type': 'Spawn Node','AnimSet': 'default','Hpr': Point3(0.0, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(77.034, -31.858, 119.119),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Bat T4','Start State': 'Idle','StartFrame': '0','Team': 'default','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1240962151.72piwanow': {'Type': 'Spawn Node','Aggro Radius': '14.7590','AnimSet': 'default','Hpr': VBase3(-80.897, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(128.509, -48.108, 117.704),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Wasp T3','Start State': 'Idle','StartFrame': '0','Team': 'default','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1240962222.66piwanow': {'Type': 'Spawn Node','AnimSet': 'default','Hpr': VBase3(-68.894, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(178.959, -321.47, 119.553),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Wasp T4','Start State': 'Idle','StartFrame': '0','Team': 'default','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1240962281.96piwanow': {'Type': 'Spawn Node','AnimSet': 'default','Hpr': VBase3(131.795, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(369.213, -243.718, 112.753),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Bat T4','Start State': 'Idle','StartFrame': '0','Team': 'default','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}},'1248385280.0jloehrle': {'Type': 'Spawn Node','AnimSet': 'default','Hpr': Point3(0.0, 0.0, 0.0),'Min Population': '1','Patrol Radius': '12.0000','Pause Chance': 100,'Pause Duration': 30,'Pos': Point3(-67.519, -132.104, 126.019),'PoseAnim': '','PoseFrame': '','PropLeft': 'None','PropRight': 'None','Scale': VBase3(1.0, 1.0, 1.0),'Spawnables': 'Wasp T3','Start State': 'Patrol','StartFrame': '0','Team': 'default','TrailFX': 'None','TrailLeft': 'None','TrailRight': 'None','VisSize': '','Visual': {'Color': (0, 0, 0.65, 1),'Model': 'models/misc/smiley'}}},'Visibility': 'Grid','Visual': {'Model': 'models/jungles/jungle_c_zero'}}},'TodSettings': {'AmbientColors': {0: Vec4(0.45, 0.53, 0.65, 1),2: Vec4(1, 1, 1, 1),4: Vec4(0.4, 0.45, 0.5, 1),6: Vec4(0.44, 0.45, 0.56, 1),8: Vec4(0.39, 0.42, 0.54, 1),12: Vec4(0.34, 0.28, 0.41, 1),13: Vec4(0.34, 0.28, 0.41, 1),14: Vec4(0.66, 0.76, 0.41, 1),15: Vec4(0.66, 0.76, 0.41, 1),16: Vec4(0.25, 0.25, 0.25, 1),17: Vec4(0.66, 0.76, 0.41, 1)},'DirectionalColors': {0: Vec4(0.55, 0.46, 0.35, 1),2: Vec4(1, 1, 0.984314, 1),4: Vec4(0.6, 0.34, 0.1, 1),6: Vec4(0.46, 0.48, 0.45, 1),8: Vec4(0.42, 0.42, 0.4, 1),12: Vec4(0.66, 0.76, 0.05, 1),13: Vec4(0.66, 0.76, 0.05, 1),14: Vec4(0.3, 0.2, 0.53, 1),15: Vec4(0.3, 0.2, 0.53, 1),16: Vec4(0, 0, 0, 1),17: Vec4(0.3, 0.2, 0.53, 1)},'FogColors': {0: Vec4(0.3, 0.2, 0.15, 0),2: Vec4(0.6, 0.694118, 0.894118, 1),4: Vec4(0.3, 0.18, 0.15, 0),6: Vec4(0.15, 0.2, 0.35, 0),8: Vec4(0.05, 0.06, 0.17, 0),12: Vec4(0.1, 0.12, 0.03, 0),13: Vec4(0.1, 0.12, 0.03, 0),14: Vec4(0.1, 0.12, 0.03, 0),15: Vec4(0.1, 0.12, 0.03, 0),16: Vec4(0.25, 0.25, 0.25, 1),17: Vec4(0.1, 0.12, 0.03, 0)},'FogRanges': {0: 0.0001,2: 9.999999747378752e-05,4: 0.0001,6: 0.0001,8: 0.0002,12: 0.00025,13: 0.00025,14: 0.00025,15: 0.00025,16: 0.0001,17: 0.005},'LinearFogRanges': {0: (0.0, 100.0),2: (0.0, 100.0),4: (0.0, 100.0),6: (0.0, 100.0),8: (0.0, 100.0),12: (0.0, 100.0),13: (0.0, 100.0),14: (0.0, 100.0),15: (0.0, 100.0),16: (0.0, 100.0),17: (0.0, 100.0)}},'Node Links': [['1190846720.0dxschafe', '1164939260.28Shochet', 'Bi-directional'], ['1190846720.0dxschafe', '1190846720.0dxschafe0', 'Bi-directional'], ['1190846720.0dxschafe1', '1190846720.0dxschafe0', 'Bi-directional'], ['1190846720.0dxschafe1', '1164939260.28Shochet', 'Bi-directional'], ['1190847360.0dxschafe', '1175892864.0dxschafe', 'Bi-directional'], ['1190847360.0dxschafe', '1190847360.0dxschafe0', 'Bi-directional'], ['1190847360.0dxschafe1', '1190847360.0dxschafe0', 'Bi-directional'], ['1190847360.0dxschafe1', '1175892864.0dxschafe', 'Bi-directional'], ['1175892224.0dxschafe', '1190847488.0dxschafe0', 'Bi-directional'], ['1190847488.0dxschafe0', '1190847488.0dxschafe', 'Bi-directional'], ['1175892224.0dxschafe', '1190847488.0dxschafe', 'Bi-directional'], ['1192645760.0dxschafe0', '1175901696.0dxschafe1', 'Bi-directional'], ['1192645760.0dxschafe0', '1192645760.0dxschafe', 'Bi-directional'], ['1192645760.0dxschafe', '1175901696.0dxschafe1', 'Bi-directional'], ['1192646016.0dxschafe', '1175901696.0dxschafe2', 'Bi-directional'], ['1192646016.0dxschafe', '1192646016.0dxschafe0', 'Bi-directional'], ['1192646016.0dxschafe0', '1175901696.0dxschafe2', 'Bi-directional'], ['1192646144.0dxschafe', '1175891840.0dxschafe', 'Bi-directional'], ['1192646144.0dxschafe', '1192646144.0dxschafe1', 'Bi-directional'], ['1192646144.0dxschafe0', '1192646144.0dxschafe1', 'Bi-directional'], ['1192646144.0dxschafe0', '1192646144.0dxschafe', 'Bi-directional'], ['1192646144.0dxschafe0', '1190847616.0dxschafe', 'Bi-directional'], ['1164939086.73Shochet', '1190847360.0dxschafe0', 'Bi-directional'], ['1192646400.0dxschafe1', '1192646400.0dxschafe2', 'Bi-directional'], ['1192646400.0dxschafe0', '1192646400.0dxschafe1', 'Bi-directional'], ['1192645888.0dxschafe0', '1192645888.0dxschafe', 'Bi-directional'], ['1192645888.0dxschafe1', '1192645888.0dxschafe0', 'Bi-directional'], ['1240954760.37piwanow', '1240954777.44piwanow', 'Bi-directional'], ['1240954760.37piwanow', '1240954779.93piwanow', 'Bi-directional'], ['1240954777.44piwanow', '1240954779.93piwanow', 'Bi-directional']],'Layers': {},'ObjectIds': {'1164141722.61sdnaik': '["Objects"]["1164141722.61sdnaik"]','1164141948.44sdnaik': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1164141948.44sdnaik"]','1164141948.45sdnaik': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1164141948.45sdnaik"]','1164939070.28Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1164939070.28Shochet"]','1164939086.73Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1164939086.73Shochet"]','1164939103.3Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1164939103.3Shochet"]','1164939260.28Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1164939260.28Shochet"]','1164939309.61Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1164939309.61Shochet"]','1165197257.5Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1165197257.5Shochet"]','1165197288.56Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1165197288.56Shochet"]','1165197301.95Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1165197301.95Shochet"]','1165197323.8Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1165197323.8Shochet"]','1165197469.59Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1165197469.59Shochet"]','1175891840.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175891840.0dxschafe"]','1175892224.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175892224.0dxschafe"]','1175892352.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175892352.0dxschafe0"]','1175892352.0dxschafe1': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175892352.0dxschafe1"]','1175892736.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175892736.0dxschafe"]','1175892736.0dxschafe1': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175892736.0dxschafe1"]','1175892736.0dxschafe2': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175892736.0dxschafe2"]','1175892864.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175892864.0dxschafe"]','1175901184.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901184.0dxschafe"]','1175901312.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901312.0dxschafe"]','1175901440.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901440.0dxschafe"]','1175901568.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901568.0dxschafe"]','1175901696.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901696.0dxschafe"]','1175901696.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901696.0dxschafe0"]','1175901696.0dxschafe1': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901696.0dxschafe1"]','1175901696.0dxschafe2': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901696.0dxschafe2"]','1175901696.0dxschafe3': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901696.0dxschafe3"]','1175901952.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901952.0dxschafe"]','1175902080.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175902080.0dxschafe"]','1179265791.47Aholdun': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1179265791.47Aholdun"]','1179265841.94Aholdun': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1179265841.94Aholdun"]','1179265866.19Aholdun': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1179265866.19Aholdun"]','1179265884.3Aholdun': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1179265884.3Aholdun"]','1179265965.55Aholdun': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1179265965.55Aholdun"]','1188441856.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1188441856.0dxschafe"]','1188441856.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1188441856.0dxschafe0"]','1190846720.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1190846720.0dxschafe"]','1190846720.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1190846720.0dxschafe0"]','1190846720.0dxschafe1': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1190846720.0dxschafe1"]','1190847360.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1190847360.0dxschafe"]','1190847360.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1190847360.0dxschafe0"]','1190847360.0dxschafe1': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1190847360.0dxschafe1"]','1190847488.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1190847488.0dxschafe"]','1190847488.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1190847488.0dxschafe0"]','1190847616.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1190847616.0dxschafe"]','1192645760.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192645760.0dxschafe"]','1192645760.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192645760.0dxschafe0"]','1192645888.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192645888.0dxschafe"]','1192645888.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192645888.0dxschafe0"]','1192645888.0dxschafe1': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192645888.0dxschafe1"]','1192646016.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192646016.0dxschafe"]','1192646016.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192646016.0dxschafe0"]','1192646144.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192646144.0dxschafe"]','1192646144.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192646144.0dxschafe0"]','1192646144.0dxschafe1': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192646144.0dxschafe1"]','1192646400.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192646400.0dxschafe0"]','1192646400.0dxschafe1': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192646400.0dxschafe1"]','1192646400.0dxschafe2': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192646400.0dxschafe2"]','1230751967.29kmuller': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1230751967.29kmuller"]','1230752192.42kmuller': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1230752192.42kmuller"]','1230752342.47kmuller': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1230752342.47kmuller"]','1230752387.67kmuller': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1230752387.67kmuller"]','1230752456.97kmuller': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1230752456.97kmuller"]','1230752640.42kmuller': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1230752640.42kmuller"]','1230752671.32kmuller': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1230752671.32kmuller"]','1240954760.37piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240954760.37piwanow"]','1240954777.44piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240954777.44piwanow"]','1240954779.93piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240954779.93piwanow"]','1240954868.15piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240954868.15piwanow"]','1240961337.65piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240961337.65piwanow"]','1240961889.02piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240961889.02piwanow"]','1240962134.4piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240962134.4piwanow"]','1240962151.72piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240962151.72piwanow"]','1240962222.66piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240962222.66piwanow"]','1240962281.96piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240962281.96piwanow"]','1248385280.0jloehrle': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1248385280.0jloehrle"]'}}
extraInfo = {'camPos': Point3(-569.134, -428.277, 1468.57),'camHpr': VBase3(-44.3142, -57.7401, 3.19909e-06),'focalLength': 1.39999997616,'skyState': -2,'fog': 0}
| 14,049.666667
| 41,933
| 0.645519
| 5,909
| 42,149
| 4.597394
| 0.109325
| 0.029375
| 0.029044
| 0.023853
| 0.700214
| 0.6108
| 0.578517
| 0.561584
| 0.53832
| 0.531915
| 0
| 0.223154
| 0.063228
| 42,149
| 3
| 41,934
| 14,049.666667
| 0.464872
| 0
| 0
| 0
| 0
| 0
| 0.576418
| 0.176038
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
179727300141671a39f39fdafafc3a9f8036910f
| 74
|
py
|
Python
|
trypython/wargame/__init__.py
|
devlights/try-python
|
67d1d26476794da81c8c76662486108ce03f8fb4
|
[
"MIT"
] | 4
|
2019-10-21T11:42:11.000Z
|
2020-03-12T16:35:51.000Z
|
trypython/wargame/__init__.py
|
devlights/try-python
|
67d1d26476794da81c8c76662486108ce03f8fb4
|
[
"MIT"
] | 80
|
2017-02-08T07:55:37.000Z
|
2021-10-06T06:30:30.000Z
|
trypython/wargame/__init__.py
|
devlights/try-python
|
67d1d26476794da81c8c76662486108ce03f8fb4
|
[
"MIT"
] | 1
|
2020-03-12T04:37:17.000Z
|
2020-03-12T04:37:17.000Z
|
def play():
from trypython.wargame.game import Game
Game().play()
| 18.5
| 43
| 0.662162
| 10
| 74
| 4.9
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.202703
| 74
| 3
| 44
| 24.666667
| 0.830508
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bdd55ab08810c57545e169e1489ad0e371f49f6f
| 191
|
py
|
Python
|
mayan/apps/autoadmin/tests/mixins.py
|
nattangwiwat/Mayan-EDMS-recitation
|
fcf16afb56eae812fb99144d65ae1ae6749de0b7
|
[
"Apache-2.0"
] | 343
|
2015-01-05T14:19:35.000Z
|
2018-12-10T19:07:48.000Z
|
mayan/apps/autoadmin/tests/mixins.py
|
nattangwiwat/Mayan-EDMS-recitation
|
fcf16afb56eae812fb99144d65ae1ae6749de0b7
|
[
"Apache-2.0"
] | 191
|
2015-01-03T00:48:19.000Z
|
2018-11-30T09:10:25.000Z
|
mayan/apps/autoadmin/tests/mixins.py
|
nattangwiwat/Mayan-EDMS-recitation
|
fcf16afb56eae812fb99144d65ae1ae6749de0b7
|
[
"Apache-2.0"
] | 257
|
2019-05-14T10:26:37.000Z
|
2022-03-30T03:37:36.000Z
|
from mayan.apps.common.settings import setting_home_view
class AutoAdminViewMixing:
def _request_home_view(self):
return self.get(viewname=setting_home_view.value, follow=True)
| 27.285714
| 70
| 0.795812
| 26
| 191
| 5.576923
| 0.769231
| 0.165517
| 0.206897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13089
| 191
| 6
| 71
| 31.833333
| 0.873494
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
da4aeaa3cdf0a7bc278491c6ced7ec852d748ece
| 546
|
py
|
Python
|
ravel/manifest/exceptions.py
|
gigaquads/pybiz
|
e9654592246be06a777934e889e03407c5c1673e
|
[
"MIT"
] | 2
|
2021-02-26T15:30:44.000Z
|
2021-05-22T14:06:17.000Z
|
ravel/manifest/exceptions.py
|
gigaquads/ravel
|
e9654592246be06a777934e889e03407c5c1673e
|
[
"MIT"
] | null | null | null |
ravel/manifest/exceptions.py
|
gigaquads/ravel
|
e9654592246be06a777934e889e03407c5c1673e
|
[
"MIT"
] | null | null | null |
from ravel.exceptions import ManifestError
class ManifestFileNotFound(ManifestError):
pass
class UnrecognizedManifestFileFormat(ManifestError):
pass
class ManifestValidationError(ManifestError):
pass
class ManifestInheritanceError(ManifestError):
pass
class StoreClassNotFound(ManifestError):
pass
class ResourceClassNotFound(ManifestError):
pass
class DuplicateResourceClass(ManifestError):
pass
class DuplicateStoreClass(ManifestError):
pass
class FilesystemScanTimeout(ManifestError):
pass
| 14.756757
| 52
| 0.794872
| 41
| 546
| 10.585366
| 0.390244
| 0.352535
| 0.40553
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 546
| 37
| 53
| 14.756757
| 0.939394
| 0
| 0
| 0.473684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.473684
| 0.052632
| 0
| 0.526316
| 0
| 0
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
e50c64f8e007dae5fa6b4b2ffb673f19e4db1570
| 7,849
|
py
|
Python
|
tests/test_domination.py
|
somacdivad/grinpy
|
597f9109b84f1c1aa8c8dd2ac5b572a05ba474de
|
[
"BSD-3-Clause"
] | 12
|
2019-08-27T11:04:09.000Z
|
2022-03-03T07:38:42.000Z
|
tests/test_domination.py
|
somacdivad/grinpy
|
597f9109b84f1c1aa8c8dd2ac5b572a05ba474de
|
[
"BSD-3-Clause"
] | 18
|
2017-12-03T20:20:11.000Z
|
2019-07-07T18:04:54.000Z
|
tests/test_domination.py
|
somacdivad/grinpy
|
597f9109b84f1c1aa8c8dd2ac5b572a05ba474de
|
[
"BSD-3-Clause"
] | 5
|
2017-11-28T22:43:05.000Z
|
2021-07-02T08:48:43.000Z
|
import grinpy as gp
import pytest
def test_non_integral_value_for_k_raises_error_in_is_k_dom_set():
with pytest.raises(TypeError):
G = gp.star_graph(2)
gp.is_k_dominating_set(G, [0], 1.5)
def test_0_value_for_k_raises_error_in_is_k_dom_set():
with pytest.raises(ValueError):
G = gp.star_graph(2)
gp.is_k_dominating_set(G, [0], 0)
def test_non_int_value_for_k_raises_error_in_min_k_dom_set():
with pytest.raises(TypeError):
G = gp.star_graph(2)
gp.min_k_dominating_set(G, 1.5)
def test_0_value_for_k_raises_error_in_min_k_dom_set():
with pytest.raises(ValueError):
G = gp.star_graph(2)
gp.min_k_dominating_set(G, 0)
def test_non_int_value_for_k_raises_error_in_k_dom_num():
with pytest.raises(TypeError):
G = gp.star_graph(2)
gp.k_domination_number(G, 1.5)
def test_0_value_for_k_raises_error_in_k_dom_num():
with pytest.raises(ValueError):
G = gp.star_graph(2)
gp.k_domination_number(G, 0)
def test_integral_float_for_k_works():
G = gp.star_graph(2)
assert gp.is_k_dominating_set(G, [0], 1.0) is True
def test_max_degree_vertex_is_dominating_set_of_star():
for i in range(1, 9):
G = gp.star_graph(i)
assert gp.is_k_dominating_set(G, [0], 1) is True
def test_min_degree_vertex_is_not_dominating_set_of_star():
for i in range(2, 9):
G = gp.star_graph(i)
assert gp.is_k_dominating_set(G, [1], 1) is False
def test_dominating_set_with_nodes_not_in_graph():
G = gp.star_graph(3)
assert gp.is_k_dominating_set(G, [4], 1) is False
assert gp.is_k_dominating_set(G, [0, 4], 1) is True
def test_max_degree_vertex_is_not_2_dominating_set_of_star():
for i in range(1, 9):
G = gp.star_graph(i)
assert gp.is_k_dominating_set(G, [0], 2) is False
def test_min_degree_vertices_are_2_dominating_set_of_star():
for i in range(2, 9):
G = gp.star_graph(i)
nodes = [i for i in range(1, i + 2)]
assert gp.is_k_dominating_set(G, nodes, 2) is True
def test_2_dominating_set_with_nodes_not_in_graph():
G = gp.star_graph(3)
nodes = [1, 2, 3, 4]
assert gp.is_k_dominating_set(G, [4], 1) is False
assert gp.is_k_dominating_set(G, nodes, 1) is True
def test_no_single_node_is_total_dominating_set_of_star():
G = gp.star_graph(3)
for v in gp.nodes(G):
assert gp.is_total_dominating_set(G, [v]) is False
def test_adjacent_vertices_are_total_dominating_set_of_star():
G = gp.star_graph(3)
for v in gp.nodes(G):
for u in gp.nodes(G):
if gp.are_neighbors(G, u, v):
assert gp.is_total_dominating_set(G, [u, v]) is True
def test_non_adjacent_vertices_not_total_dominating_set_of_star():
G = gp.star_graph(3)
for v in gp.nodes(G):
for u in gp.nodes(G):
if not gp.are_neighbors(G, u, v):
assert gp.is_total_dominating_set(G, [u, v]) is False
def test_center_vertex_of_star_is_connected_dominating_set():
G = gp.star_graph(3)
assert gp.is_connected_dominating_set(G, [0]) is True
def test_leaves_of_star_are_not_connected_dominating_set():
G = gp.star_graph(3)
D = [1, 2, 3]
assert gp.is_connected_dominating_set(G, D) is False
def test_3_adjacent_vertice_is_connected_2_dominating_set_of_4_cycle():
G = gp.cycle_graph(4)
assert gp.is_connected_k_dominating_set(G, [0, 1, 2], 2) is True
def test_non_adjacent_vertices_not_connected_2_dom_set_of_4_cycle():
G = gp.cycle_graph(4)
assert gp.is_connected_k_dominating_set(G, [0, 2], 2) is False
def test_connected_domination_number_of_star_is_1():
G = gp.star_graph(3)
assert gp.connected_domination_number(G) == 1
def test_connected_domination_number_of_P5_is_3():
G = gp.path_graph(5)
assert gp.connected_domination_number(G) == 3
def leaves_of_star_is_independent_dominating_set():
G = gp.star_graph(3)
D = [1, 2, 3]
assert gp.is_independent_dominating_set(G, D) is True
def center_node_and_leaf_is_not_ind_dom_set_of_star():
G = gp.star_graph(3)
assert gp.is_independent_dominating_set(G, [0, 1]) is False
def test_independent_domination_num_of_monster_is_3():
G = gp.star_graph(3)
G.add_edge(3, 4)
G.add_edge(3, 5)
assert gp.independent_domination_number(G, method="bf") == 3
assert gp.independent_domination_number(G, method="ilp") == 3
def test_non_int_value_for_k_raises_error_in_connected_k_dom_set():
with pytest.raises(TypeError):
G = gp.star_graph(2)
gp.is_connected_k_dominating_set(G, [0], 1.5)
def test_0_value_for_k_raises_error_in_connected_k_dom_set():
with pytest.raises(ValueError):
G = gp.star_graph(2)
gp.is_connected_k_dominating_set(G, [0], 0)
def test_non_int_value_for_k_raises_error_in_min_connected_k_dom_set():
with pytest.raises(TypeError):
G = gp.star_graph(2)
gp.min_connected_k_dominating_set(G, 1.5)
def test_0_value_for_k_raises_error_in_min_connected_k_dom_set():
with pytest.raises(ValueError):
G = gp.star_graph(2)
gp.min_connected_k_dominating_set(G, 0)
def test_non_int_value_for_k_raises_error_in_connected_k_dom_num():
with pytest.raises(TypeError):
G = gp.star_graph(2)
gp.connected_k_domination_number(G, 1.5)
def test_0_value_for_k_raises_error_in_connected_k_dom_num():
with pytest.raises(ValueError):
G = gp.star_graph(2)
gp.connected_k_domination_number(G, 0)
def test_non_int_value_for_k_raises_error_in_ind_k_dom_set():
with pytest.raises(TypeError):
G = gp.star_graph(2)
gp.is_independent_k_dominating_set(G, [0], 1.5)
def test_0_value_for_k_raises_error_in_ind_k_dom_set():
with pytest.raises(ValueError):
G = gp.star_graph(2)
gp.is_independent_k_dominating_set(G, [0], 0)
def test_non_int_value_for_k_raises_error_in_min_ind_k_dom_set():
with pytest.raises(TypeError):
G = gp.star_graph(2)
gp.min_independent_k_dominating_set(G, 1.5)
def test_0_value_for_k_raises_error_in_min_ind_k_dom_set():
with pytest.raises(ValueError):
G = gp.star_graph(2)
gp.min_independent_k_dominating_set(G, 0)
def test_min_ind_dom_set_ip_returns_same_as_bf_for_peterson_graph():
G = gp.petersen_graph()
bf = len(gp.min_independent_dominating_set(G, method="bf"))
ip = len(gp.min_independent_dominating_set(G, method="ilp"))
assert bf == ip
def test_non_int_value_for_k_raises_error_in_ind_k_dom_num():
with pytest.raises(TypeError):
G = gp.star_graph(2)
gp.independent_k_domination_number(G, 1.5)
def test_0_value_for_k_raises_error_in_ind_k_dom_num():
with pytest.raises(ValueError):
G = gp.star_graph(2)
gp.independent_k_domination_number(G, 0)
def test_min_conn_dominating_for_disconnected_graph_is_0():
G = gp.Graph()
G.add_edge(1, 2)
G.add_edge(3, 4)
assert gp.connected_domination_number(G) == 0
def test_tot_dom_for_graph_with_isolates_is_0():
G = gp.empty_graph(5)
assert gp.total_domination_number(G, method="bf") == 0
assert gp.total_domination_number(G, method="ilp") == 0
def test_domination_number_of_star_is_1():
for i in range(1, 9):
G = gp.star_graph(i)
assert gp.domination_number(G, method="bf") == 1
assert gp.domination_number(G, method="ilp") == 1
def test_2_domination_number_of_star_is_order_minus_1():
for i in range(2, 9):
G = gp.star_graph(i)
assert gp.k_domination_number(G, 2) == G.order() - 1
def test_total_domination_number_of_star_is_2():
for i in range(1, 9):
G = gp.star_graph(i)
assert gp.total_domination_number(G, method="bf") == 2
assert gp.total_domination_number(G, method="ilp") == 2
| 29.507519
| 71
| 0.710664
| 1,393
| 7,849
| 3.567839
| 0.066045
| 0.117706
| 0.052113
| 0.089336
| 0.877264
| 0.818109
| 0.76499
| 0.733199
| 0.618109
| 0.611871
| 0
| 0.02646
| 0.186266
| 7,849
| 265
| 72
| 29.618868
| 0.751683
| 0
| 0
| 0.418994
| 0
| 0
| 0.003185
| 0
| 0
| 0
| 0
| 0
| 0.173184
| 1
| 0.240223
| false
| 0
| 0.011173
| 0
| 0.251397
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e50e7a47bc251e5601bcde26793c71746abb7417
| 26
|
py
|
Python
|
example_project/source/divisions/__init__.py
|
patRyserWelch8/demo_python_testing
|
120fd44ee8150f239f94ffc4bd387533269cef17
|
[
"MIT"
] | null | null | null |
example_project/source/divisions/__init__.py
|
patRyserWelch8/demo_python_testing
|
120fd44ee8150f239f94ffc4bd387533269cef17
|
[
"MIT"
] | null | null | null |
example_project/source/divisions/__init__.py
|
patRyserWelch8/demo_python_testing
|
120fd44ee8150f239f94ffc4bd387533269cef17
|
[
"MIT"
] | null | null | null |
from .divide import divide
| 26
| 26
| 0.846154
| 4
| 26
| 5.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 26
| 1
| 26
| 26
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e52fac916cb851dd27c9b25c10c9640dbec0dec1
| 27
|
py
|
Python
|
investigate/__init__.py
|
pranali139/pyinvestigate
|
a182e73a750f03e906d9b25842d556db8d2fd54f
|
[
"MIT"
] | 63
|
2015-01-26T20:47:50.000Z
|
2021-12-09T05:58:11.000Z
|
investigate/__init__.py
|
pranali139/pyinvestigate
|
a182e73a750f03e906d9b25842d556db8d2fd54f
|
[
"MIT"
] | 16
|
2016-04-21T17:54:40.000Z
|
2021-04-19T10:06:13.000Z
|
investigate/__init__.py
|
pranali139/pyinvestigate
|
a182e73a750f03e906d9b25842d556db8d2fd54f
|
[
"MIT"
] | 32
|
2015-07-22T17:30:33.000Z
|
2021-09-16T21:12:49.000Z
|
from .investigate import *
| 13.5
| 26
| 0.777778
| 3
| 27
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e533405ff3957b411b97684313ece8efaee24dea
| 151
|
py
|
Python
|
tests/assets/pipeline-sql-products-in-source/config.py
|
MarcoJHB/ploomber
|
4849ef6915572f7934392443b4faf138172b9596
|
[
"Apache-2.0"
] | 2,141
|
2020-02-14T02:34:34.000Z
|
2022-03-31T22:43:20.000Z
|
tests/assets/pipeline-sql-products-in-source/config.py
|
MarcoJHB/ploomber
|
4849ef6915572f7934392443b4faf138172b9596
|
[
"Apache-2.0"
] | 660
|
2020-02-06T16:15:57.000Z
|
2022-03-31T22:55:01.000Z
|
tests/assets/pipeline-sql-products-in-source/config.py
|
MarcoJHB/ploomber
|
4849ef6915572f7934392443b4faf138172b9596
|
[
"Apache-2.0"
] | 122
|
2020-02-14T18:53:05.000Z
|
2022-03-27T22:33:24.000Z
|
from ploomber.clients import SQLAlchemyClient
def get_uri():
return 'sqlite:///my.db'
def get_client():
return SQLAlchemyClient(get_uri())
| 15.1
| 45
| 0.721854
| 19
| 151
| 5.578947
| 0.684211
| 0.113208
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15894
| 151
| 9
| 46
| 16.777778
| 0.834646
| 0
| 0
| 0
| 0
| 0
| 0.099338
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| true
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
e5e2f2792be30a9df3ccb748133efc2d0eaa9dd4
| 43
|
py
|
Python
|
test/gallery/fitnessprivacy/__init__.py
|
jeanqasaur/jeeves
|
1b5783a98f88fa2fc9e4cae7e005d2c9242cfea4
|
[
"MIT"
] | 253
|
2015-01-02T01:54:27.000Z
|
2022-03-10T01:44:02.000Z
|
test/gallery/fitnessprivacy/__init__.py
|
jeanqasaur/jeeves
|
1b5783a98f88fa2fc9e4cae7e005d2c9242cfea4
|
[
"MIT"
] | 4
|
2015-06-09T03:36:28.000Z
|
2017-08-11T15:54:24.000Z
|
test/gallery/fitnessprivacy/__init__.py
|
jeanqasaur/jeeves
|
1b5783a98f88fa2fc9e4cae7e005d2c9242cfea4
|
[
"MIT"
] | 29
|
2015-02-14T02:24:08.000Z
|
2021-12-16T02:46:16.000Z
|
import macropy.activate
import testFitness
| 14.333333
| 23
| 0.883721
| 5
| 43
| 7.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 43
| 2
| 24
| 21.5
| 0.974359
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
008b4a9a2ae53d654495d42bc5715eb63ea42866
| 73
|
py
|
Python
|
neobox/cmd/pause.py
|
nnnewb/neobox
|
985ca7ae32cf198ac69184d2b69ed818a60e3953
|
[
"MIT"
] | null | null | null |
neobox/cmd/pause.py
|
nnnewb/neobox
|
985ca7ae32cf198ac69184d2b69ed818a60e3953
|
[
"MIT"
] | null | null | null |
neobox/cmd/pause.py
|
nnnewb/neobox
|
985ca7ae32cf198ac69184d2b69ed818a60e3953
|
[
"MIT"
] | null | null | null |
import click
@click.command()
def pause():
"""TODO
"""
pass
| 9.125
| 16
| 0.534247
| 8
| 73
| 4.875
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.287671
| 73
| 8
| 17
| 9.125
| 0.75
| 0.054795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 1
| 0.25
| true
| 0.25
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
00dbf583793e826fe879d0b4effdb1e696dd842d
| 27
|
py
|
Python
|
pyLruCache/__init__.py
|
rfyiambest/PyLruCache
|
652a110b4ecb1578f1e35937e92e8bd4dc4240f4
|
[
"MIT"
] | 37
|
2015-04-23T15:38:15.000Z
|
2021-04-14T14:45:09.000Z
|
pyLruCache/__init__.py
|
rfyiambest/PyLruCache
|
652a110b4ecb1578f1e35937e92e8bd4dc4240f4
|
[
"MIT"
] | 1
|
2015-11-01T11:15:25.000Z
|
2015-11-03T07:51:29.000Z
|
pyLruCache/__init__.py
|
rfyiambest/PyLruCache
|
652a110b4ecb1578f1e35937e92e8bd4dc4240f4
|
[
"MIT"
] | 10
|
2015-02-10T09:52:30.000Z
|
2017-06-30T05:27:43.000Z
|
from .pyLruCache import *
| 13.5
| 26
| 0.740741
| 3
| 27
| 6.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185185
| 27
| 1
| 27
| 27
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9757267214a94287ac468949014f9f672820a32f
| 23,755
|
py
|
Python
|
Draft/FirstModelReg.py
|
HillaPeter/FinalProject
|
f42849483a2e898a3198bb539c22bbfdf4308cc9
|
[
"MIT"
] | null | null | null |
Draft/FirstModelReg.py
|
HillaPeter/FinalProject
|
f42849483a2e898a3198bb539c22bbfdf4308cc9
|
[
"MIT"
] | null | null | null |
Draft/FirstModelReg.py
|
HillaPeter/FinalProject
|
f42849483a2e898a3198bb539c22bbfdf4308cc9
|
[
"MIT"
] | 1
|
2021-06-24T09:10:10.000Z
|
2021-06-24T09:10:10.000Z
|
import pandas as pd
from sklearn import linear_model
import statsmodels.api as sm
import numpy as np
from scipy import stats
df_all = pd.read_csv("/mnt/nadavrap-students/STS/data/imputed_data2.csv")
# print(df_all.head())
#
print(df_all.columns.tolist())
print (df_all.info())
df_all = df_all.replace({'MtOpD':{False:0, True:1}})
df_all = df_all.replace({'Complics':{False:0, True:1}})
mask_reop = df_all['Reoperation'] == 'Reoperation'
df_reop = df_all[mask_reop]
mask = df_all['surgyear'] == 2010
df_2010 = df_all[mask]
mask = df_all['surgyear'] == 2011
df_2011 = df_all[mask]
mask = df_all['surgyear'] == 2012
df_2012 = df_all[mask]
mask = df_all['surgyear'] == 2013
df_2013 = df_all[mask]
mask = df_all['surgyear'] == 2014
df_2014 = df_all[mask]
mask = df_all['surgyear'] == 2015
df_2015 = df_all[mask]
mask = df_all['surgyear'] == 2016
df_2016 = df_all[mask]
mask = df_all['surgyear'] == 2017
df_2017 = df_all[mask]
mask = df_all['surgyear'] == 2018
df_2018 = df_all[mask]
mask = df_all['surgyear'] == 2019
df_2019 = df_all[mask]
avg_siteid = pd.DataFrame()
avg_surgid = pd.DataFrame()
df_siteid_reg = pd.DataFrame()
df_surgid_reg = pd.DataFrame()
def groupby_siteid():
df2010 = df_2010.groupby('SiteID')['SiteID'].count().reset_index(name='2010_total')
df2011 = df_2011.groupby('SiteID')['SiteID'].count().reset_index(name='2011_total')
df2012 = df_2012.groupby('SiteID')['SiteID'].count().reset_index(name='2012_total')
df2013 = df_2013.groupby('SiteID')['SiteID'].count().reset_index(name='2013_total')
df2014 = df_2014.groupby('SiteID')['SiteID'].count().reset_index(name='2014_total')
df2015 = df_2015.groupby('SiteID')['SiteID'].count().reset_index(name='2015_total')
df2016 = df_2016.groupby('SiteID')['SiteID'].count().reset_index(name='2016_total')
df2017 = df_2017.groupby('SiteID')['SiteID'].count().reset_index(name='2017_total')
df2018 = df_2018.groupby('SiteID')['SiteID'].count().reset_index(name='2018_total')
df2019 = df_2019.groupby('SiteID')['SiteID'].count().reset_index(name='2019_total')
df1 =pd.merge(df2010, df2011, on='SiteID', how='outer')
df2 =pd.merge(df1, df2012, on='SiteID', how='outer')
df3 =pd.merge(df2, df2013, on='SiteID', how='outer')
df4 =pd.merge(df3, df2014, on='SiteID', how='outer')
df5 =pd.merge(df4, df2015, on='SiteID', how='outer')
df6 =pd.merge(df5, df2016, on='SiteID', how='outer')
df7 =pd.merge(df6, df2017, on='SiteID', how='outer')
df8 =pd.merge(df7, df2018, on='SiteID', how='outer')
df_sum_all_Years =pd.merge(df8, df2019, on='SiteID', how='outer')
df_sum_all_Years.fillna(0,inplace=True)
cols = df_sum_all_Years.columns.difference(['SiteID'])
df_sum_all_Years['Distinct_years'] = df_sum_all_Years[cols].gt(0).sum(axis=1)
cols_sum = df_sum_all_Years.columns.difference(['SiteID','Distinct_years'])
df_sum_all_Years['Year_sum'] =df_sum_all_Years.loc[:,cols_sum].sum(axis=1)
df_sum_all_Years['Year_avg'] = df_sum_all_Years['Year_sum']/df_sum_all_Years['Distinct_years']
df_sum_all_Years.to_csv("/tmp/pycharm_project_723/files/total op sum all years siteid.csv")
# print("details on site id dist:")
# # print("num of all sites: ", len(df_sum_all_Years))
#
# less_8 =df_sum_all_Years[df_sum_all_Years['Distinct_years'] !=10]
# less_8.to_csv("total op less 10 years siteid.csv")
# print("num of sites with less years: ", len(less_8))
#
# x = np.array(less_8['Distinct_years'])
# print(np.unique(x))
avg_siteid['SiteID'] = df_sum_all_Years['SiteID']
avg_siteid['total_year_sum'] = df_sum_all_Years['Year_sum']
avg_siteid['total_year_avg'] = df_sum_all_Years['Year_avg']
avg_siteid['num_of_years'] = df_sum_all_Years['Distinct_years']
def groupby_siteid_reop():
df2010 = df_2010.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'Reoperation').sum()).reset_index(name='2010_reop')
df2011 = df_2011.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'Reoperation').sum()).reset_index(name='2011_reop')
df2012 = df_2012.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'Reoperation').sum()).reset_index(name='2012_reop')
df2013 = df_2013.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'Reoperation').sum()).reset_index(name='2013_reop')
df2014 = df_2014.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'Reoperation').sum()).reset_index(name='2014_reop')
df2015 = df_2015.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'Reoperation').sum()).reset_index(name='2015_reop')
df2016 = df_2016.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'Reoperation').sum()).reset_index(name='2016_reop')
df2017 = df_2017.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'Reoperation').sum()).reset_index(name='2017_reop')
df2018 = df_2018.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'Reoperation').sum()).reset_index(name='2018_reop')
df2019 = df_2019.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'Reoperation').sum()).reset_index(name='2019_reop')
df1 =pd.merge(df2010, df2011, on='SiteID', how='outer')
df2 =pd.merge(df1, df2012, on='SiteID', how='outer')
df3 =pd.merge(df2, df2013, on='SiteID', how='outer')
df4 =pd.merge(df3, df2014, on='SiteID', how='outer')
df5 =pd.merge(df4, df2015, on='SiteID', how='outer')
df6 =pd.merge(df5, df2016, on='SiteID', how='outer')
df7 =pd.merge(df6, df2017, on='SiteID', how='outer')
df8 =pd.merge(df7, df2018, on='SiteID', how='outer')
df_sum_all_Years =pd.merge(df8, df2019, on='SiteID', how='outer')
df_sum_all_Years.fillna(0,inplace=True)
cols = df_sum_all_Years.columns.difference(['SiteID'])
df_sum_all_Years['Distinct_years_reop'] = df_sum_all_Years[cols].gt(0).sum(axis=1)
cols_sum = df_sum_all_Years.columns.difference(['SiteID', 'Distinct_years_reop'])
df_sum_all_Years['Year_sum_reop'] = df_sum_all_Years.loc[:, cols_sum].sum(axis=1)
df_sum_all_Years['Year_avg_reop'] = df_sum_all_Years['Year_sum_reop'] / avg_siteid['num_of_years']
df_sum_all_Years.to_csv("/tmp/pycharm_project_723/files/sum all years siteid reop.csv")
# -----------------------first op------------------------------------
df_10 = df_2010.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2010_FirstOperation')
df_11 = df_2011.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2011_FirstOperation')
df_12 = df_2012.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2012_FirstOperation')
df_13 = df_2013.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2013_FirstOperation')
df_14 = df_2014.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2014_FirstOperation')
df_15 = df_2015.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2015_FirstOperation')
df_16 = df_2016.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2016_FirstOperation')
df_17 = df_2017.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2017_FirstOperation')
df_18 = df_2018.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2018_FirstOperation')
df_19 = df_2019.groupby('SiteID')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2019_FirstOperation')
d1 = pd.merge(df_10, df_11, on='SiteID', how='outer')
d2 = pd.merge(d1, df_12, on='SiteID', how='outer')
d3 = pd.merge(d2, df_13, on='SiteID', how='outer')
d4 = pd.merge(d3, df_14, on='SiteID', how='outer')
d5 = pd.merge(d4, df_15, on='SiteID', how='outer')
d6 = pd.merge(d5, df_16, on='SiteID', how='outer')
d7 = pd.merge(d6, df_17, on='SiteID', how='outer')
d8 = pd.merge(d7, df_18, on='SiteID', how='outer')
df_sum_all_Years_total = pd.merge(d8, df_19, on='SiteID', how='outer')
df_sum_all_Years_total.fillna(0, inplace=True)
cols = df_sum_all_Years_total.columns.difference(['SiteID'])
df_sum_all_Years_total['Distinct_years'] = df_sum_all_Years_total[cols].gt(0).sum(axis=1)
cols_sum = df_sum_all_Years_total.columns.difference(['SiteID', 'Distinct_years'])
df_sum_all_Years_total['Year_sum'] = df_sum_all_Years_total.loc[:, cols_sum].sum(axis=1)
df_sum_all_Years_total['Year_avg'] = df_sum_all_Years_total['Year_sum'] / avg_siteid['num_of_years']
df_sum_all_Years_total.to_csv("/tmp/pycharm_project_723/files/First Operation sum all years siteid.csv")
#---------------------------merge------------------------
temp_first = pd.DataFrame()
temp_first['SiteID'] = df_sum_all_Years_total['SiteID']
temp_first['Year_sum_Firstop'] = df_sum_all_Years_total['Year_sum']
temp_first['Year_avg_Firstop'] = df_sum_all_Years_total['Year_avg']
temp_reop = pd.DataFrame()
temp_reop['SiteID'] = df_sum_all_Years['SiteID']
temp_reop['Year_avg_reop'] = df_sum_all_Years['Year_avg_reop']
temp_reop['Year_sum_reop'] = df_sum_all_Years['Year_sum_reop']
df_mort = groupby_mortality_siteid()
df_reop_mort = groupby_mortality_siteid_reop()
df_reop_complics = groupby_complics_siteid()
df20 = pd.merge(avg_siteid, temp_first, on='SiteID', how='outer')
temp_merge = pd.merge(df20, temp_reop, on='SiteID', how='outer')
temp_merge2 = pd.merge(temp_merge, df_mort, on='SiteID', how='outer')
temp_merge3 = pd.merge(temp_merge2,df_reop_mort, on='SiteID', how='outer')
total_avg_site_id = pd.merge(temp_merge3, df_reop_complics, on='SiteID', how='outer')
total_avg_site_id['firstop/total'] = (total_avg_site_id['Year_sum_Firstop'] / total_avg_site_id['total_year_sum']) *100
total_avg_site_id['reop/total'] = (total_avg_site_id['Year_sum_reop'] / total_avg_site_id['total_year_sum']) * 100
total_avg_site_id['mortalty_rate'] = (total_avg_site_id['Mortality'] / total_avg_site_id['total_year_sum'])*100
total_avg_site_id['mortalty_reop_rate'] = (total_avg_site_id['Mortality_reop'] / total_avg_site_id['Year_sum_reop']) * 100
total_avg_site_id['Complics_reop_rate'] = (total_avg_site_id['Complics_reop'] / total_avg_site_id['Year_sum_reop']) * 100
total_avg_site_id.fillna(0, inplace=True)
total_avg_site_id.to_csv('total_avg_site_id.csv')
df_siteid_reg['SiteID'] =total_avg_site_id['SiteID']
df_siteid_reg['total_year_avg'] = total_avg_site_id['total_year_avg']
def groupby_surgid():
df2010 = df_2010.groupby('surgid')['surgid'].count().reset_index(name='2010_total')
df2011 = df_2011.groupby('surgid')['surgid'].count().reset_index(name='2011_total')
df2012 = df_2012.groupby('surgid')['surgid'].count().reset_index(name='2012_total')
df2013 = df_2013.groupby('surgid')['surgid'].count().reset_index(name='2013_total')
df2014 = df_2014.groupby('surgid')['surgid'].count().reset_index(name='2014_total')
df2015 = df_2015.groupby('surgid')['surgid'].count().reset_index(name='2015_total')
df2016 = df_2016.groupby('surgid')['surgid'].count().reset_index(name='2016_total')
df2017 = df_2017.groupby('surgid')['surgid'].count().reset_index(name='2017_total')
df2018 = df_2018.groupby('surgid')['surgid'].count().reset_index(name='2018_total')
df2019 = df_2019.groupby('surgid')['surgid'].count().reset_index(name='2019_total')
df1 = pd.merge(df2010, df2011, on='surgid', how='outer')
df2 = pd.merge(df1, df2012, on='surgid', how='outer')
df3 = pd.merge(df2, df2013, on='surgid', how='outer')
df4 = pd.merge(df3, df2014, on='surgid', how='outer')
df5 = pd.merge(df4, df2015, on='surgid', how='outer')
df6 = pd.merge(df5, df2016, on='surgid', how='outer')
df7 = pd.merge(df6, df2017, on='surgid', how='outer')
df8 = pd.merge(df7, df2018, on='surgid', how='outer')
df_sum_all_Years = pd.merge(df8, df2019, on='surgid', how='outer')
df_sum_all_Years.fillna(0, inplace=True)
cols = df_sum_all_Years.columns.difference(['surgid'])
df_sum_all_Years['Distinct_years'] = df_sum_all_Years[cols].gt(0).sum(axis=1)
cols_sum = df_sum_all_Years.columns.difference(['surgid', 'Distinct_years'])
df_sum_all_Years['Year_sum'] = df_sum_all_Years.loc[:, cols_sum].sum(axis=1)
df_sum_all_Years['Year_avg'] = df_sum_all_Years['Year_sum'] / df_sum_all_Years['Distinct_years']
df_sum_all_Years.to_csv("/tmp/pycharm_project_723/files/total op sum all years surgid.csv")
# print("details on surg id dist:")
# print("num of all surgid: ", len(df_sum_all_Years))
#
# less_8 = df_sum_all_Years[df_sum_all_Years['Distinct_years'] != 10]
# less_8.to_csv("total op less 10 years surgid.csv")
# print("num of surgid with less years: ", len(less_8))
#
# x = np.array(less_8['Distinct_years'])
# print(np.unique(x))
# avg_surgid['surgid'] = df_sum_all_Years['surgid']
# avg_surgid['total_year_avg'] = df_sum_all_Years['Year_avg']
avg_surgid['surgid'] = df_sum_all_Years['surgid']
avg_surgid['total_year_avg'] = df_sum_all_Years['Year_avg']
avg_surgid['total_year_count'] = df_sum_all_Years['Year_sum']
avg_surgid['num_of_years'] = df_sum_all_Years['Distinct_years']
def groupby_surgid_reop():
df2010 = df_2010.groupby('surgid')['Reoperation'].apply(lambda x: (x =='Reoperation').sum()).reset_index(name='2010_reop')
df2011 = df_2011.groupby('surgid')['Reoperation'].apply(lambda x: (x =='Reoperation').sum()).reset_index(name='2011_reop')
df2012 = df_2012.groupby('surgid')['Reoperation'].apply(lambda x: (x =='Reoperation').sum()).reset_index(name='2012_reop')
df2013 = df_2013.groupby('surgid')['Reoperation'].apply(lambda x: (x =='Reoperation').sum()).reset_index(name='2013_reop')
df2014 = df_2014.groupby('surgid')['Reoperation'].apply(lambda x: (x =='Reoperation').sum()).reset_index(name='2014_reop')
df2015 = df_2015.groupby('surgid')['Reoperation'].apply(lambda x: (x =='Reoperation').sum()).reset_index(name='2015_reop')
df2016 = df_2016.groupby('surgid')['Reoperation'].apply(lambda x: (x =='Reoperation').sum()).reset_index(name='2016_reop')
df2017 = df_2017.groupby('surgid')['Reoperation'].apply(lambda x: (x =='Reoperation').sum()).reset_index(name='2017_reop')
df2018 = df_2018.groupby('surgid')['Reoperation'].apply(lambda x: (x =='Reoperation').sum()).reset_index(name='2018_reop')
df2019 = df_2019.groupby('surgid')['Reoperation'].apply(lambda x: (x =='Reoperation').sum()).reset_index(name='2019_reop')
df1 = pd.merge(df2010, df2011, on='surgid', how='outer')
df2 = pd.merge(df1, df2012, on='surgid', how='outer')
df3 = pd.merge(df2, df2013, on='surgid', how='outer')
df4 = pd.merge(df3, df2014, on='surgid', how='outer')
df5 = pd.merge(df4, df2015, on='surgid', how='outer')
df6 = pd.merge(df5, df2016, on='surgid', how='outer')
df7 = pd.merge(df6, df2017, on='surgid', how='outer')
df8 = pd.merge(df7, df2018, on='surgid', how='outer')
df_sum_all_Years = pd.merge(df8, df2019, on='surgid', how='outer')
df_sum_all_Years.fillna(0, inplace=True)
cols = df_sum_all_Years.columns.difference(['surgid'])
df_sum_all_Years['Distinct_years_reop'] = df_sum_all_Years[cols].gt(0).sum(axis=1)
cols_sum = df_sum_all_Years.columns.difference(['surgid', 'Distinct_years_reop'])
df_sum_all_Years['Year_sum_reop'] = df_sum_all_Years.loc[:, cols_sum].sum(axis=1)
df_sum_all_Years['Year_avg_reop'] = df_sum_all_Years['Year_sum_reop'] / avg_surgid['num_of_years']
df_sum_all_Years.to_csv("/tmp/pycharm_project_723/files/sum all years surgid reop.csv")
# -----------------------first op------------------------------------
df_10 = df_2010.groupby('surgid')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2010_FirstOperation')
df_11 = df_2011.groupby('surgid')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2011_FirstOperation')
df_12 = df_2012.groupby('surgid')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2012_FirstOperation')
df_13 = df_2013.groupby('surgid')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2013_FirstOperation')
df_14 = df_2014.groupby('surgid')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2014_FirstOperation')
df_15 = df_2015.groupby('surgid')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2015_FirstOperation')
df_16 = df_2016.groupby('surgid')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2016_FirstOperation')
df_17 = df_2017.groupby('surgid')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2017_FirstOperation')
df_18 = df_2018.groupby('surgid')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2018_FirstOperation')
df_19 = df_2019.groupby('surgid')['Reoperation'].apply(lambda x: (x == 'First Time').sum()).reset_index(name='2019_FirstOperation')
d1 = pd.merge(df_10, df_11, on='surgid', how='outer')
d2 = pd.merge(d1, df_12, on='surgid', how='outer')
d3 = pd.merge(d2, df_13, on='surgid', how='outer')
d4 = pd.merge(d3, df_14, on='surgid', how='outer')
d5 = pd.merge(d4, df_15, on='surgid', how='outer')
d6 = pd.merge(d5, df_16, on='surgid', how='outer')
d7 = pd.merge(d6, df_17, on='surgid', how='outer')
d8 = pd.merge(d7, df_18, on='surgid', how='outer')
df_sum_all_Years_total = pd.merge(d8, df_19, on='surgid', how='outer')
df_sum_all_Years_total.fillna(0, inplace=True)
cols = df_sum_all_Years_total.columns.difference(['surgid'])
df_sum_all_Years_total['Distinct_years'] = df_sum_all_Years_total[cols].gt(0).sum(axis=1)
cols_sum = df_sum_all_Years_total.columns.difference(['surgid', 'Distinct_years'])
df_sum_all_Years_total['Year_sum'] = df_sum_all_Years_total.loc[:, cols_sum].sum(axis=1)
df_sum_all_Years_total['Year_avg'] = df_sum_all_Years_total['Year_sum'] / avg_surgid['num_of_years']
df_sum_all_Years_total.to_csv("/tmp/pycharm_project_723/files/First op sum all years surgid.csv")
# ---------------------------merge------------------------
temp_first = pd.DataFrame()
temp_first['surgid'] = df_sum_all_Years_total['surgid']
temp_first['Year_avg_Firstop'] = df_sum_all_Years_total['Year_avg']
temp_first['Year_sum_Firstop'] = df_sum_all_Years_total['Year_sum']
temp_reop = pd.DataFrame()
temp_reop['surgid'] = df_sum_all_Years['surgid']
temp_reop['Year_avg_reop'] = df_sum_all_Years['Year_avg_reop']
temp_reop['Year_sum_reop'] = df_sum_all_Years['Year_sum_reop']
df_mort = groupby_mortality_surgid()
df_reop_mort = groupby_mortality_surgid_reop()
df_reop_complics = groupby_complics_surgid()
df20 = pd.merge(avg_surgid, temp_first, on='surgid', how='outer')
temp_merge = pd.merge(df20, temp_reop, on='surgid', how='outer')
temp_merge2 = pd.merge(temp_merge, df_mort, on='surgid', how='outer')
temp_merge3 = pd.merge(temp_merge2,df_reop_mort, on='surgid', how='outer')
total_avg_surgid = pd.merge(temp_merge3, df_reop_complics, on='surgid', how='outer')
total_avg_surgid['firstop/total'] = (total_avg_surgid['Year_sum_Firstop'] / total_avg_surgid['total_year_count']) * 100
total_avg_surgid['reop/total'] = (total_avg_surgid['Year_sum_reop'] / total_avg_surgid['total_year_count']) * 100
total_avg_surgid['mortalty_rate'] = (total_avg_surgid['Mortality'] / total_avg_surgid['total_year_count']) * 100
total_avg_surgid['mortalty_reop_rate'] = (total_avg_surgid['Mortality_reop'] / total_avg_surgid['Year_sum_reop']) * 100
total_avg_surgid['Complics_reop_rate'] = (total_avg_surgid['Complics_reop'] / total_avg_surgid['Year_sum_reop']) * 100
total_avg_surgid.fillna(0, inplace=True)
total_avg_surgid.to_csv('total_avg_surgid.csv')
def groupby_mortality_siteid():
dfmort = df_all.groupby('SiteID')['MtOpD'].apply(lambda x: (x == 1).sum()).reset_index(name='Mortality')
dfmort.to_csv("/tmp/pycharm_project_723/files/mortality siteid.csv")
return dfmort
def groupby_mortality_siteid_reop():
dfmort = df_reop.groupby('SiteID')['MtOpD'].apply(lambda x: (x == 1).sum()).reset_index(name='Mortality_reop')
dfmort.to_csv("/tmp/pycharm_project_723/files/mortality siteid reop.csv")
return dfmort
def groupby_complics_siteid():
df_comp = df_all.groupby('SiteID')['Complics'].apply(lambda x: (x == 1).sum()).reset_index(name='Complics')
dfmort = df_reop.groupby('SiteID')['Complics'].apply(lambda x: (x == 1).sum()).reset_index(name='Complics_reop')
df20 = pd.merge(df_comp, dfmort, on='SiteID', how='outer')
df20.to_csv("/tmp/pycharm_project_723/files/Complics siteid.csv")
return df20
def groupby_mortality_surgid():
dfmort = df_all.groupby('surgid')['MtOpD'].apply(lambda x: (x == 1).sum()).reset_index(name='Mortality')
dfmort.to_csv("/tmp/pycharm_project_723/files/mortality surgid.csv")
return dfmort
def groupby_mortality_surgid_reop():
dfmort = df_reop.groupby('surgid')['MtOpD'].apply(lambda x: (x == 1).sum()).reset_index(name='Mortality_reop')
dfmort.to_csv("/tmp/pycharm_project_723/files/mortality surgid reop.csv")
return dfmort
def groupby_complics_surgid():
df_comp = df_all.groupby('surgid')['Complics'].apply(lambda x: (x == 1).sum()).reset_index(name='Complics')
dfmort = df_reop.groupby('surgid')['Complics'].apply(lambda x: (x == 1).sum()).reset_index(name='Complics_reop')
df20 = pd.merge(df_comp, dfmort, on='surgid', how='outer')
df20.to_csv("/tmp/pycharm_project_723/files/Complics surgid.csv")
return df20
def launch_reg_siteid():
df_sites = pd.read_csv("total_avg_site_id.csv")
print("---------------------site id reg:--------------------------")
print()
X = df_sites[['total_year_avg', 'Year_avg_Firstop', 'Year_avg_reop', 'reop/total']] # here we have 2 variables for multiple regression. If you just want to use one variable for simple linear regression, then use X = df['Interest_Rate'] for example.Alternatively, you may add additional variables within the brackets
Y = df_sites['mortalty_rate']
# with sklearn
regr = linear_model.LinearRegression()
regr.fit(X, Y)
print('Intercept: \n', regr.intercept_)
print('Coefficients: \n', regr.coef_)
# with statsmodels
X = sm.add_constant(X) # adding a constant
model = sm.OLS(Y, X).fit()
predictions = model.predict(X)
print_model = model.summary()
print(print_model)
print()
print()
def launch_reg_surgid():
df_sites = pd.read_csv("total_avg_surgid.csv")
print("------------------------ surg id reg:-------------------------------")
print()
X = df_sites[['total_year_avg', 'Year_avg_Firstop', 'Year_avg_reop','reop/total']] # here we have 2 variables for multiple regression. If you just want to use one variable for simple linear regression, then use X = df['Interest_Rate'] for example.Alternatively, you may add additional variables within the brackets
Y = df_sites['mortalty_rate']
# with sklearn
regr = linear_model.LinearRegression()
regr.fit(X, Y)
print('Intercept: \n', regr.intercept_)
print('Coefficients: \n', regr.coef_)
# with statsmodels
X = sm.add_constant(X) # adding a constant
model = sm.OLS(Y, X).fit()
predictions = model.predict(X)
print_model = model.summary()
print(print_model)
groupby_siteid()
groupby_siteid_reop()
groupby_surgid()
groupby_surgid_reop()
# launch_reg_siteid()
# launch_reg_surgid()
| 55.894118
| 320
| 0.693033
| 3,595
| 23,755
| 4.278164
| 0.055077
| 0.039792
| 0.072952
| 0.081144
| 0.91671
| 0.890507
| 0.84987
| 0.799805
| 0.786281
| 0.755722
| 0
| 0.057674
| 0.119007
| 23,755
| 425
| 321
| 55.894118
| 0.677227
| 0.067691
| 0
| 0.329114
| 0
| 0
| 0.255721
| 0.029625
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037975
| false
| 0
| 0.015823
| 0
| 0.072785
| 0.050633
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8ae6176cbe231bf89a30d9b90cb5abe3b1282aad
| 159
|
py
|
Python
|
politics/api.py
|
microstack/backend
|
40eb6b06fc44c6511ba71b89aff2eb19ff251f83
|
[
"MIT"
] | null | null | null |
politics/api.py
|
microstack/backend
|
40eb6b06fc44c6511ba71b89aff2eb19ff251f83
|
[
"MIT"
] | 5
|
2016-08-01T14:31:44.000Z
|
2016-09-03T15:28:56.000Z
|
politics/api.py
|
microstack/backend
|
40eb6b06fc44c6511ba71b89aff2eb19ff251f83
|
[
"MIT"
] | null | null | null |
# -*- encoding: utf-8 -*-
from flask import Flask
from flask_restful import Resource
from flask_restful import Api
from settings import app
api = Api(app)
| 14.454545
| 34
| 0.748428
| 24
| 159
| 4.875
| 0.458333
| 0.230769
| 0.273504
| 0.376068
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007634
| 0.176101
| 159
| 10
| 35
| 15.9
| 0.885496
| 0.144654
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8aeb87dc3f61093276959b37144abd091af8a19c
| 2,254
|
py
|
Python
|
staff_models/staff_groups/migrations/0001_initial.py
|
reimibeta/django-staff-models
|
ec42cb4ba30e87efeff34f152a5dc68b469f4784
|
[
"Apache-2.0"
] | null | null | null |
staff_models/staff_groups/migrations/0001_initial.py
|
reimibeta/django-staff-models
|
ec42cb4ba30e87efeff34f152a5dc68b469f4784
|
[
"Apache-2.0"
] | null | null | null |
staff_models/staff_groups/migrations/0001_initial.py
|
reimibeta/django-staff-models
|
ec42cb4ba30e87efeff34f152a5dc68b469f4784
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.1.7 on 2021-05-27 13:42
from django_datetime.datetime import datetime
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('staffs', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='StaffWorker',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('assigned_date', models.DateField(default=datetime.dnow())),
('is_active', models.BooleanField(default=True)),
('staff', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='staffs.staff')),
],
),
migrations.CreateModel(
name='StaffSeller',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('assigned_date', models.DateField(default=datetime.dnow())),
('is_active', models.BooleanField(default=True)),
('staff', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='staffs.staff')),
],
),
migrations.CreateModel(
name='StaffManager',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('assigned_date', models.DateField(default=datetime.dnow())),
('is_active', models.BooleanField(default=True)),
('staff', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='staffs.staff')),
],
),
migrations.CreateModel(
name='StaffDeliver',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('assigned_date', models.DateField(default=datetime.dnow())),
('is_active', models.BooleanField(default=True)),
('staff', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='staffs.staff')),
],
),
]
| 41.740741
| 114
| 0.59228
| 220
| 2,254
| 5.95
| 0.254545
| 0.036669
| 0.053476
| 0.084034
| 0.778457
| 0.778457
| 0.778457
| 0.778457
| 0.778457
| 0.778457
| 0
| 0.011487
| 0.266193
| 2,254
| 53
| 115
| 42.528302
| 0.779927
| 0.019965
| 0
| 0.695652
| 1
| 0
| 0.106932
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.065217
| 0
| 0.152174
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c1552a24d6b393762d65739b3527e84b98e20f8a
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/setuptools/_distutils/version.py
|
GiulianaPola/select_repeats
|
17a0d053d4f874e42cf654dd142168c2ec8fbd11
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/setuptools/_distutils/version.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/setuptools/_distutils/version.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/b3/24/6f/3f1b8c4319dfb69b8829e444fb610b43f64c0b0309fa8f227be971759a
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.4375
| 0
| 96
| 1
| 96
| 96
| 0.458333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c19ee6a9218fb82eda5b51a8f8ca378c863a1b0e
| 37
|
py
|
Python
|
hrm_api/community/factories/generators/__init__.py
|
unknowncoder05/HRM
|
2a0ad62373fdaefafe533727b2d586d8f6327e87
|
[
"MIT"
] | null | null | null |
hrm_api/community/factories/generators/__init__.py
|
unknowncoder05/HRM
|
2a0ad62373fdaefafe533727b2d586d8f6327e87
|
[
"MIT"
] | null | null | null |
hrm_api/community/factories/generators/__init__.py
|
unknowncoder05/HRM
|
2a0ad62373fdaefafe533727b2d586d8f6327e87
|
[
"MIT"
] | null | null | null |
from .full_feed import feed_generator
| 37
| 37
| 0.891892
| 6
| 37
| 5.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081081
| 37
| 1
| 37
| 37
| 0.911765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c1de12bdcc31444de2314272889e55489e2aebb8
| 123
|
py
|
Python
|
helloworld/views.py
|
serverlessplus/django-example
|
9508c05723d7c05e6b697b8b573e1054e5cdb2e5
|
[
"Apache-2.0"
] | 2
|
2019-11-30T14:23:08.000Z
|
2019-12-03T01:42:10.000Z
|
helloworld/views.py
|
serverlessplus/django-example
|
9508c05723d7c05e6b697b8b573e1054e5cdb2e5
|
[
"Apache-2.0"
] | null | null | null |
helloworld/views.py
|
serverlessplus/django-example
|
9508c05723d7c05e6b697b8b573e1054e5cdb2e5
|
[
"Apache-2.0"
] | 1
|
2019-04-29T04:29:54.000Z
|
2019-04-29T04:29:54.000Z
|
from django.http import HttpResponse
# Create your views here.
def index(request):
return HttpResponse('hello world')
| 20.5
| 38
| 0.764228
| 16
| 123
| 5.875
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154472
| 123
| 5
| 39
| 24.6
| 0.903846
| 0.186992
| 0
| 0
| 0
| 0
| 0.112245
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
c1fce4a1bf7bfeaa9908a6debb5869f6f4324785
| 41
|
py
|
Python
|
app/notification/__init__.py
|
tradaviahe1982/labman-master
|
0648410e1b449e8c21574a5bbbc0bcd9c38f1634
|
[
"MIT"
] | 10
|
2016-08-18T07:12:37.000Z
|
2019-10-13T06:35:27.000Z
|
app/notification/__init__.py
|
PSOdAz/labman
|
591f698a12e474df08ccba2503174655cc6a2265
|
[
"MIT"
] | null | null | null |
app/notification/__init__.py
|
PSOdAz/labman
|
591f698a12e474df08ccba2503174655cc6a2265
|
[
"MIT"
] | 4
|
2018-09-06T15:49:59.000Z
|
2020-09-29T13:16:21.000Z
|
from .controller import mod_notification
| 20.5
| 40
| 0.878049
| 5
| 41
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 41
| 1
| 41
| 41
| 0.945946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
de0a63023e9153deb05b9dd458eaefb721553aa1
| 7,334
|
py
|
Python
|
src/unittest/python/index_tests.py
|
donaldmcdougal/britecore-test
|
5cfed0c2259396aa10dbd1d0b9155a592c8007af
|
[
"Apache-2.0"
] | null | null | null |
src/unittest/python/index_tests.py
|
donaldmcdougal/britecore-test
|
5cfed0c2259396aa10dbd1d0b9155a592c8007af
|
[
"Apache-2.0"
] | null | null | null |
src/unittest/python/index_tests.py
|
donaldmcdougal/britecore-test
|
5cfed0c2259396aa10dbd1d0b9155a592c8007af
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import requests
import json
import sys
from database import FeatureRequest
from datetime import date
from index import app
class IndexTest(unittest.TestCase):
base_url = 'http://localhost:5000'
def setUp(self):
app.config['TESTING'] = True
app.config['WTF_CSRF_METHODS'] = [] # This is the magic
self.app = app.test_client()
def test_server_should_get_home_page(self):
page = requests.get(self.base_url)
self.assertEquals(200, page.status_code)
def test_server_should_get_three_clients(self):
clients = requests.get(self.base_url + '/client')
clientObjs = clients.content
self.assertEquals(3, len(json.loads(clientObjs)))
self.assertEquals(200, clients.status_code)
def test_server_should_get_four_product_areas(self):
pas = requests.get(self.base_url + '/product_area')
pasObjs = pas.content
self.assertEquals(4, len(json.loads(pasObjs)))
self.assertEquals(200, pas.status_code)
def test_server_should_get_no_frs(self):
frs = requests.get(self.base_url + '/feature_request')
frObjs = frs.content
self.assertEquals(0, len(json.loads(frObjs)))
self.assertEquals(200, frs.status_code)
def test_server_should_create_fr(self):
fr = FeatureRequest(title='FR', description='Test FR',
client_id=1, client_priority=1,
target_date='2018-06-30', product_area_id=1)
frJson = fr.to_json()
headers = {'Content-Type': 'application/json'}
frSaved = requests.post(self.base_url + '/feature_request', data=json.dumps(frJson), headers=headers)
self.assertEquals(200, frSaved.status_code)
frSavedJson = json.loads(frSaved.content)
self.assertEquals(1, frSavedJson['id'])
# now delete the feature request
deleted = requests.delete(self.base_url + '/feature_request/1')
self.assertEquals(200, deleted.status_code)
def test_server_should_get_all_frs(self):
fr = FeatureRequest(title='FR', description='Test FR',
client_id=1, client_priority=1,
target_date='2018-06-30', product_area_id=1)
frJson = fr.to_json()
headers = {'Content-Type': 'application/json'}
frSaved = requests.post(self.base_url + '/feature_request', data=json.dumps(frJson), headers=headers)
frs = requests.get(self.base_url + '/feature_request')
frObjs = frs.content
self.assertEquals(1, len(json.loads(frObjs)))
self.assertEquals(200, frs.status_code)
# now delete the feature request
deleted = requests.delete(self.base_url + '/feature_request/1')
self.assertEquals(200, deleted.status_code)
def test_server_should_get_one_fr(self):
fr = FeatureRequest(title='FR', description='Test FR',
client_id=1, client_priority=1,
target_date='2018-06-30', product_area_id=1)
frJson = fr.to_json()
headers = {'Content-Type': 'application/json'}
frSaved = requests.post(self.base_url + '/feature_request', data=json.dumps(frJson), headers=headers)
frReturned = requests.get(self.base_url + '/feature_request/1')
frObj = json.loads(frReturned.content)
self.assertEquals(1, frObj['id'])
self.assertEquals(200, frReturned.status_code)
# now delete the feature request
deleted = requests.delete(self.base_url + '/feature_request/1')
self.assertEquals(200, deleted.status_code)
def test_server_should_not_update_non_existent_fr(self):
fr = FeatureRequest(title='FR', description='Test FR',
client_id=1, client_priority=1,
target_date='2018-06-30', product_area_id=1)
frJson = fr.to_json()
headers = {'Content-Type': 'application/json'}
frSaved = requests.put(self.base_url + '/feature_request/1', data=json.dumps(frJson), headers=headers)
self.assertEquals(404, frSaved.status_code)
def test_server_should_update_fr(self):
fr = FeatureRequest(title='FR', description='Test FR',
client_id=1, client_priority=1,
target_date='2018-06-30', product_area_id=1)
frJson = fr.to_json()
headers = {'Content-Type': 'application/json'}
frSaved = requests.post(self.base_url + '/feature_request', data=json.dumps(frJson), headers=headers)
frReturned = requests.get(self.base_url + '/feature_request/1')
frObj = json.loads(frReturned.content)
self.assertEquals(1, frObj['id'])
self.assertEquals(200, frReturned.status_code)
fr.description = 'Test FR 2'
frJson = fr.to_json()
frSaved = requests.put(self.base_url + '/feature_request/1', data=json.dumps(frJson), headers=headers)
self.assertEquals(200, frSaved.status_code)
# now delete the feature request
deleted = requests.delete(self.base_url + '/feature_request/1')
self.assertEquals(200, deleted.status_code)
def test_server_should_not_delete_non_existent_fr(self):
deleted = requests.delete(self.base_url + '/feature_request/1')
self.assertEquals(404, deleted.status_code)
def test_server_should_not_allow_delete_without_id(self):
deleted = requests.delete(self.base_url + '/feature_request')
self.assertEquals(405, deleted.status_code)
def test_server_should_reassign_priorities(self):
fr1 = FeatureRequest(title='FR', description='Test FR',
client_id=1, client_priority=1,
target_date='2018-06-30', product_area_id=1)
frJson = fr1.to_json()
headers = {'Content-Type': 'application/json'}
frSaved = requests.post(self.base_url + '/feature_request', data=json.dumps(frJson), headers=headers)
frReturned = requests.get(self.base_url + '/feature_request/1')
frObj = json.loads(frReturned.content)
self.assertEquals(1, frObj['id'])
self.assertEquals(200, frReturned.status_code)
fr2 = FeatureRequest(title='FR', description='Test FR 2',
client_id=1, client_priority=1,
target_date='2018-06-30', product_area_id=1)
frJson = fr2.to_json()
frSaved = requests.post(self.base_url + '/feature_request', data=json.dumps(frJson), headers=headers)
self.assertEquals(200, frSaved.status_code)
frReturned = requests.get(self.base_url + '/feature_request/1')
frObj = json.loads(frReturned.content)
self.assertEquals(2, frObj['client_priority'])
self.assertEquals(200, frReturned.status_code)
frJson = fr1.to_json()
frSaved = requests.put(self.base_url + '/feature_request/1', data=json.dumps(frJson), headers=headers)
self.assertEquals(200, frSaved.status_code)
frReturned = requests.get(self.base_url + '/feature_request/1')
frObj = json.loads(frReturned.content)
self.assertEquals(1, frObj['client_priority'])
self.assertEquals(200, frReturned.status_code)
# now delete the feature requests
deleted = requests.delete(self.base_url + '/feature_request/1')
self.assertEquals(200, deleted.status_code)
deleted = requests.delete(self.base_url + '/feature_request/2')
self.assertEquals(200, deleted.status_code)
| 47.012821
| 110
| 0.673302
| 923
| 7,334
| 5.145179
| 0.114843
| 0.111181
| 0.062539
| 0.090967
| 0.836808
| 0.813856
| 0.788166
| 0.760371
| 0.742472
| 0.71594
| 0
| 0.031266
| 0.206299
| 7,334
| 155
| 111
| 47.316129
| 0.784573
| 0.023589
| 0
| 0.621212
| 0
| 0
| 0.115739
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.098485
| false
| 0
| 0.05303
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e70637f323320c5430e7f38f82bb7a15b6d99441
| 185
|
py
|
Python
|
backend/src/awattprice/__init__.py
|
sp4c38/AWattPrice
|
a2eb20ac44b7c60a4072d23ae4bfc4f4c21ff2d6
|
[
"BSD-3-Clause"
] | 8
|
2020-10-22T14:47:54.000Z
|
2022-01-23T20:17:51.000Z
|
backend/src/awattprice/__init__.py
|
sp4c38/AwattarApp
|
b914e8042e5cdcb84485d6d45133a00244662bda
|
[
"BSD-3-Clause"
] | 75
|
2020-11-16T16:13:28.000Z
|
2022-03-27T09:45:56.000Z
|
backend/src/awattprice/__init__.py
|
sp4c38/AwattarApp
|
b914e8042e5cdcb84485d6d45133a00244662bda
|
[
"BSD-3-Clause"
] | 4
|
2020-11-10T21:21:08.000Z
|
2021-10-20T12:35:33.000Z
|
from . import configurator
from . import database
from . import defaults
from . import exceptions
from . import notifications
from . import orm
from . import prices
from . import utils
| 20.555556
| 27
| 0.783784
| 24
| 185
| 6.041667
| 0.416667
| 0.551724
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172973
| 185
| 8
| 28
| 23.125
| 0.947712
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e70b0b4f6c6483258e8ae147b9fb6dd38581695f
| 1,361
|
py
|
Python
|
alpyro_msgs/trajectory_msgs/multidofjointtrajectorypoint.py
|
rho2/alpyro_msgs
|
b5a680976c40c83df70d61bb2db1de32a1cde8d3
|
[
"MIT"
] | 1
|
2020-12-13T13:07:10.000Z
|
2020-12-13T13:07:10.000Z
|
alpyro_msgs/trajectory_msgs/multidofjointtrajectorypoint.py
|
rho2/alpyro_msgs
|
b5a680976c40c83df70d61bb2db1de32a1cde8d3
|
[
"MIT"
] | null | null | null |
alpyro_msgs/trajectory_msgs/multidofjointtrajectorypoint.py
|
rho2/alpyro_msgs
|
b5a680976c40c83df70d61bb2db1de32a1cde8d3
|
[
"MIT"
] | null | null | null |
from typing import List
from typing_extensions import Annotated
from alpyro_msgs import RosMessage, duration
from alpyro_msgs.geometry_msgs.transform import Transform
from alpyro_msgs.geometry_msgs.twist import Twist
class MultiDOFJointTrajectoryPoint(RosMessage):
__msg_typ__ = "trajectory_msgs/MultiDOFJointTrajectoryPoint"
__msg_def__ = "Z2VvbWV0cnlfbXNncy9UcmFuc2Zvcm1bXSB0cmFuc2Zvcm1zCiAgZ2VvbWV0cnlfbXNncy9WZWN0b3IzIHRyYW5zbGF0aW9uCiAgICBmbG9hdDY0IHgKICAgIGZsb2F0NjQgeQogICAgZmxvYXQ2NCB6CiAgZ2VvbWV0cnlfbXNncy9RdWF0ZXJuaW9uIHJvdGF0aW9uCiAgICBmbG9hdDY0IHgKICAgIGZsb2F0NjQgeQogICAgZmxvYXQ2NCB6CiAgICBmbG9hdDY0IHcKZ2VvbWV0cnlfbXNncy9Ud2lzdFtdIHZlbG9jaXRpZXMKICBnZW9tZXRyeV9tc2dzL1ZlY3RvcjMgbGluZWFyCiAgICBmbG9hdDY0IHgKICAgIGZsb2F0NjQgeQogICAgZmxvYXQ2NCB6CiAgZ2VvbWV0cnlfbXNncy9WZWN0b3IzIGFuZ3VsYXIKICAgIGZsb2F0NjQgeAogICAgZmxvYXQ2NCB5CiAgICBmbG9hdDY0IHoKZ2VvbWV0cnlfbXNncy9Ud2lzdFtdIGFjY2VsZXJhdGlvbnMKICBnZW9tZXRyeV9tc2dzL1ZlY3RvcjMgbGluZWFyCiAgICBmbG9hdDY0IHgKICAgIGZsb2F0NjQgeQogICAgZmxvYXQ2NCB6CiAgZ2VvbWV0cnlfbXNncy9WZWN0b3IzIGFuZ3VsYXIKICAgIGZsb2F0NjQgeAogICAgZmxvYXQ2NCB5CiAgICBmbG9hdDY0IHoKZHVyYXRpb24gdGltZV9mcm9tX3N0YXJ0Cgo="
__md5_sum__ = "3ebe08d1abd5b65862d50e09430db776"
transforms: Annotated[List[Transform], 0, 0]
velocities: Annotated[List[Twist], 0, 0]
accelerations: Annotated[List[Twist], 0, 0]
time_from_start: duration
| 80.058824
| 814
| 0.923586
| 67
| 1,361
| 18.402985
| 0.432836
| 0.024331
| 0.034063
| 0.035685
| 0.074615
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097843
| 0.046289
| 1,361
| 16
| 815
| 85.0625
| 0.85208
| 0
| 0
| 0
| 0
| 0
| 0.640705
| 0.640705
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.384615
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e7588d97f65da25b70b968112d6976b62a987aad
| 107
|
py
|
Python
|
handler/__init__.py
|
jiss-software/jiss-rendering-service
|
f6f61898f2bbd36eb6fd7a387260fa2978f5b767
|
[
"Apache-2.0"
] | null | null | null |
handler/__init__.py
|
jiss-software/jiss-rendering-service
|
f6f61898f2bbd36eb6fd7a387260fa2978f5b767
|
[
"Apache-2.0"
] | null | null | null |
handler/__init__.py
|
jiss-software/jiss-rendering-service
|
f6f61898f2bbd36eb6fd7a387260fa2978f5b767
|
[
"Apache-2.0"
] | null | null | null |
from HealthCheck import *
from Img import *
from Pdf import *
from Resize import *
from Watermark import *
| 17.833333
| 25
| 0.766355
| 15
| 107
| 5.466667
| 0.466667
| 0.487805
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186916
| 107
| 5
| 26
| 21.4
| 0.942529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e7ac0b08c6c277fc45dcc92d9dcbc6f0820174dd
| 22,960
|
py
|
Python
|
tests/cli_test.py
|
kiwi-bop/zap-cli
|
55d3341622074f65af287fe07d43196a55c515f1
|
[
"MIT"
] | 196
|
2015-06-22T06:23:28.000Z
|
2022-03-23T08:54:10.000Z
|
tests/cli_test.py
|
kiwi-bop/zap-cli
|
55d3341622074f65af287fe07d43196a55c515f1
|
[
"MIT"
] | 89
|
2015-12-02T17:07:57.000Z
|
2022-02-03T10:20:50.000Z
|
tests/cli_test.py
|
kiwi-bop/zap-cli
|
55d3341622074f65af287fe07d43196a55c515f1
|
[
"MIT"
] | 65
|
2015-12-14T16:27:59.000Z
|
2022-02-21T22:59:52.000Z
|
"""
Tests for the ZAP CLI.
.. moduleauthor:: Daniel Grunwell (grunny)
"""
import unittest
from click.testing import CliRunner
from ddt import ddt
from mock import PropertyMock, Mock, MagicMock, patch
import zapv2
from zapcli import zap_helper, cli
from zapcli.exceptions import ZAPError
@ddt
class ZAPCliTestCase(unittest.TestCase):
"""Test ZAP CLI methods."""
def setUp(self):
self.runner = CliRunner()
cli.console = Mock()
@patch('zapcli.zap_helper.ZAPHelper.start')
def test_start_zap_daemon(self, helper_mock):
"""Test command to start ZAP daemon."""
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'start'])
helper_mock.assert_called_with(options=None)
self.assertEqual(result.exit_code, 0)
@patch('zapcli.zap_helper.ZAPHelper.start')
def test_start_zap_daemon_with_options(self, helper_mock):
"""Test command to start ZAP daemon."""
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'start',
'--start-options', '-config api.key=12345'])
helper_mock.assert_called_with(options='-config api.key=12345')
self.assertEqual(result.exit_code, 0)
@patch('zapcli.zap_helper.ZAPHelper.start')
def test_start_zap_daemon_exception(self, helper_mock):
"""Test command to start ZAP daemon has an exit code of 1 when an exception is raised."""
helper_mock.side_effect = ZAPError('error')
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'start'])
helper_mock.assert_called_with(options=None)
self.assertEqual(result.exit_code, 2)
@patch('zapcli.zap_helper.ZAPHelper.shutdown')
def test_shutdown_zap_daemon(self, helper_mock):
"""Test command to shutdown ZAP daemon."""
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'shutdown'])
helper_mock.assert_called_with()
self.assertEqual(result.exit_code, 0)
@patch('zapcli.zap_helper.ZAPHelper.shutdown')
def test_shutdown_zap_daemon_exception(self, helper_mock):
"""Test command to shutdown ZAP daemon has an exit code of 1 when an exception is raised."""
helper_mock.side_effect = ZAPError('error')
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'shutdown'])
helper_mock.assert_called_with()
self.assertEqual(result.exit_code, 2)
@patch('zapcli.zap_helper.ZAPHelper.is_running')
def test_check_status_running(self, helper_mock):
"""Test the status command."""
helper_mock.return_value = True
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'status'])
self.assertEqual(result.exit_code, 0)
@patch('zapcli.zap_helper.ZAPHelper.is_running')
def test_check_status_not_running(self, helper_mock):
"""Test the status command when ZAP is not running."""
helper_mock.return_value = False
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'status'])
self.assertEqual(result.exit_code, 2)
@patch('zapcli.zap_helper.ZAPHelper.wait_for_zap')
@patch('zapcli.zap_helper.ZAPHelper.is_running')
def test_check_status_timeout(self, running_mock, wait_mock):
"""Test the status command with a timeout."""
running_mock.return_value = False
wait_mock.side_effect = ZAPError('error')
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'status', '-t', '0'])
self.assertEqual(result.exit_code, 2)
@patch('zapcli.zap_helper.ZAPHelper.wait_for_zap')
@patch('zapcli.zap_helper.ZAPHelper.is_running')
def test_check_status_timeout_success(self, running_mock, wait_mock):
"""Test the status command with a successful wait for ZAP to start."""
running_mock.return_value = False
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'status', '-t', '0'])
self.assertEqual(result.exit_code, 0)
@patch('zapcli.zap_helper.ZAPHelper.open_url')
def test_open_url(self, helper_mock):
"""Test open URL method."""
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'open-url', 'http://localhost/'])
helper_mock.assert_called_with('http://localhost/')
self.assertEqual(result.exit_code, 0)
@patch('zapcli.zap_helper.ZAPHelper.open_url')
def test_open_url_no_url(self, helper_mock):
"""Test open URL method isn't called and an error status raised when no URL provided."""
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'open-url'])
self.assertFalse(helper_mock.called)
self.assertEqual(result.exit_code, 2)
@patch('zapcli.zap_helper.ZAPHelper.run_spider')
def test_spider_url(self, helper_mock):
"""Test spider URL method."""
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'spider', 'http://localhost/'])
helper_mock.assert_called_with('http://localhost/', None, None)
self.assertEqual(result.exit_code, 0)
@patch('zapcli.zap_helper.ZAPHelper.run_spider')
def test_spider_url_no_url(self, helper_mock):
"""Test spider URL method isn't called and an error status raised when no URL provided."""
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'spider'])
self.assertFalse(helper_mock.called)
self.assertEqual(result.exit_code, 2)
@patch('zapcli.zap_helper.ZAPHelper.run_ajax_spider')
def test_ajax_spider_url(self, helper_mock):
"""Test AJAX Spider URL method."""
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'ajax-spider', 'http://localhost/'])
helper_mock.assert_called_with('http://localhost/')
self.assertEqual(result.exit_code, 0)
@patch('zapcli.zap_helper.ZAPHelper.run_ajax_spider')
def test_ajax_spider_url_no_url(self, helper_mock):
"""Test AJAX Spider URL method isn't called and an error status raised when no URL provided."""
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'ajax-spider'])
self.assertFalse(helper_mock.called)
self.assertEqual(result.exit_code, 2)
@patch('zapcli.cli.ZAPHelper')
def test_quick_scan(self, helper_mock):
"""Testing quick scan."""
instance = helper_mock.return_value
instance.scanner_groups = ['xss']
instance.scanner_group_map = {'xss': ['40012', '40014', '40016', '40017']}
instance.alerts.return_value = []
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', '--verbose', 'quick-scan',
'http://localhost/', '--self-contained', '--scanners', 'xss',
'--spider', '--exclude', 'pattern'])
self.assertEqual(result.exit_code, 0)
@patch('zapcli.cli.ZAPHelper')
def test_quick_scan_issues_found(self, helper_mock):
"""Testing quick scan."""
instance = helper_mock.return_value
instance.scanner_groups = ['xss']
instance.scanner_group_map = {'xss': ['40012', '40014', '40016', '40017']}
instance.alerts.return_value = [{
'url': 'http://localhost/?test=%3C%2Fspan%3E%3Cscript%3Ealert%281%29%3B%3C%2Fscript%3E%3Cspan%3E',
'alert': 'Cross Site Scripting (Reflected)',
'cweid': '79',
'risk': 'High',
}]
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', '--verbose', 'quick-scan',
'http://localhost/', '--self-contained', '--scanners', 'xss',
'--spider', '--exclude', 'pattern'])
self.assertEqual(result.exit_code, 1)
@patch('zapcli.zap_helper.ZAPHelper.start')
def test_quick_scan_start_error(self, helper_mock):
"""Testing quick scan."""
helper_mock.side_effect = ZAPError('error')
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', '--verbose', 'quick-scan',
'http://localhost/', '--self-contained'])
self.assertEqual(result.exit_code, 2)
@patch('zapcli.cli.ZAPHelper')
def test_quick_scan_shutdown_error(self, helper_mock):
"""Testing quick scan."""
instance = helper_mock.return_value
instance.alerts.return_value = []
instance.shutdown.side_effect = ZAPError('error')
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', '--verbose', 'quick-scan',
'http://localhost/', '--self-contained'])
self.assertEqual(result.exit_code, 2)
@patch('zapcli.cli.ZAPHelper')
def test_quick_scan_enable_scanners_error(self, helper_mock):
"""Testing quick scan."""
instance = helper_mock.return_value
instance.alerts.return_value = []
instance.scanner_groups = ['xss']
instance.scanner_group_map = {'xss': ['40012', '40014', '40016', '40017']}
instance.set_enabled_scanners.side_effect = ZAPError('error')
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', '--verbose', 'quick-scan',
'http://localhost/', '--scanners', 'xss'])
self.assertEqual(result.exit_code, 2)
@patch('zapcli.cli.ZAPHelper')
def test_quick_scan_exclude_from_all_error(self, helper_mock):
"""Testing quick scan."""
instance = helper_mock.return_value
instance.alerts.return_value = []
instance.exclude_from_all.side_effect = ZAPError('error')
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', '--verbose', 'quick-scan',
'http://localhost/', '--exclude', 'pattern'])
self.assertEqual(result.exit_code, 2)
@patch('zapv2.ascan')
def test_active_scanners_enable(self, ascan_mock):
"""Test enabling active scanners."""
self.runner.invoke(cli.cli, ['--boring', '--api-key', '', '--verbose', 'scanners', 'enable',
'--scanners', '1,2,3'])
ascan_mock.return_value.enable_scanners.assert_called_with('1,2,3')
@patch('zapv2.ascan')
def test_active_scanners_disable(self, ascan_mock):
"""Test enabling active scanners."""
self.runner.invoke(cli.cli, ['--boring', '--api-key', '', '--verbose', 'scanners', 'disable',
'--scanners', '1,2,3'])
ascan_mock.return_value.disable_scanners.assert_called_with('1,2,3')
@patch('zapv2.ascan')
def test_active_scan_policies_enable(self, ascan_mock):
"""Test enabling active scan policies method."""
self.runner.invoke(cli.cli, ['--boring', '--api-key', '', '--verbose', 'policies', 'enable',
'--policy-ids', '1,2,3'])
ascan_mock.return_value.set_enabled_policies.assert_called_with('1,2,3')
@patch('zapcli.zap_helper.ZAPHelper.exclude_from_all')
def test_exclude_from_scanners(self, helper_mock):
"""Test exclude from scanners command."""
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'exclude', 'pattern'])
helper_mock.assert_called_with('pattern')
self.assertEqual(result.exit_code, 0)
@patch('zapcli.zap_helper.ZAPHelper.exclude_from_all')
def test_exclude_from_scanners_error(self, helper_mock):
"""Test exclude from scanners command with error raised."""
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'exclude', '['])
self.assertFalse(helper_mock.called)
self.assertEqual(result.exit_code, 2)
@patch('zapv2.script.enable')
def test_enable_script(self, enable_mock):
"""Test command to enable a script."""
enable_mock.return_value = 'OK'
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'scripts', 'enable', 'Foo.js'])
enable_mock.assert_called_with('Foo.js')
self.assertEqual(result.exit_code, 0)
@patch('zapv2.script.enable')
def test_enable_script_error(self, enable_mock):
"""Test command to enable a script with error raised."""
enable_mock.return_value = 'Does Not Exist'
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'scripts', 'enable', 'Foo.js'])
enable_mock.assert_called_with('Foo.js')
self.assertEqual(result.exit_code, 2)
@patch('zapv2.script.disable')
def test_disable_script(self, disable_mock):
"""Test command to disable a script."""
disable_mock.return_value = 'OK'
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'scripts', 'disable', 'Foo.js'])
disable_mock.assert_called_with('Foo.js')
self.assertEqual(result.exit_code, 0)
@patch('zapv2.script.disable')
def test_disable_script_error(self, disable_mock):
"""Test command to disable a script with error raised."""
disable_mock.return_value = 'Does Not Exist'
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'scripts', 'disable', 'Foo.js'])
disable_mock.assert_called_with('Foo.js')
self.assertEqual(result.exit_code, 2)
@patch('zapv2.script.remove')
def test_remove_script(self, remove_mock):
"""Test command to remove a script."""
remove_mock.return_value = 'OK'
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'scripts', 'remove', 'Foo.js'])
remove_mock.assert_called_with('Foo.js')
self.assertEqual(result.exit_code, 0)
@patch('zapv2.script.remove')
def test_remove_script_error(self, remove_mock):
"""Test command to remove a script with error raised."""
remove_mock.return_value = 'Does Not Exist'
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'scripts', 'remove', 'Foo.js'])
remove_mock.assert_called_with('Foo.js')
self.assertEqual(result.exit_code, 2)
@patch('zapv2.script')
@patch('os.path.isfile')
def test_load_script(self, isfile_mock, script_mock):
"""Test command to load a script."""
script_name = 'Foo.js'
script_type = 'proxy'
engine = 'Oracle Nashorn'
valid_engines = ['ECMAScript : Oracle Nashorn']
isfile_mock.return_value = True
class_mock = MagicMock()
class_mock.load.return_value = 'OK'
engines = PropertyMock(return_value=valid_engines)
type(class_mock).list_engines = engines
script_mock.return_value = class_mock
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'scripts', 'load',
'--name', script_name, '--script-type', script_type,
'--engine', engine, '--file-path', script_name])
class_mock.load.assert_called_with(script_name, script_type, engine, script_name, scriptdescription='')
self.assertEqual(result.exit_code, 0)
@patch('zapv2.script')
@patch('os.path.isfile')
def test_load_script_file_error(self, isfile_mock, script_mock):
"""Testing that an error is raised when an invalid file is provided."""
isfile_mock.return_value = False
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'scripts', 'load',
'--name', 'Foo.js', '--script-type', 'proxy',
'--engine', 'Oracle Nashorn', '--file-path', 'Foo.js'])
self.assertEqual(result.exit_code, 2)
self.assertFalse(script_mock.return_value.load.called)
@patch('zapv2.script')
@patch('os.path.isfile')
def test_load_script_engine_error(self, isfile_mock, script_mock):
"""Testing that an error is raised when an invalid engine is provided."""
isfile_mock.return_value = True
valid_engines = ['ECMAScript : Oracle Nashorn']
class_mock = MagicMock()
class_mock.load.return_value = 'OK'
engines = PropertyMock(return_value=valid_engines)
type(class_mock).list_engines = engines
script_mock.return_value = class_mock
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'scripts', 'load',
'--name', 'Foo.js', '--script-type', 'proxy',
'--engine', 'Invalid Engine', '--file-path', 'Foo.js'])
self.assertEqual(result.exit_code, 2)
self.assertFalse(class_mock.load.called)
@patch('zapv2.script')
@patch('os.path.isfile')
def test_load_script_unknown_error(self, isfile_mock, script_mock):
"""Testing that an error is raised when an erro response is received from the API."""
script_name = 'Foo.js'
script_type = 'proxy'
engine = 'Oracle Nashorn'
valid_engines = ['ECMAScript : Oracle Nashorn']
isfile_mock.return_value = True
class_mock = MagicMock()
class_mock.load.return_value = 'Internal Error'
engines = PropertyMock(return_value=valid_engines)
type(class_mock).list_engines = engines
script_mock.return_value = class_mock
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', 'scripts', 'load',
'--name', script_name, '--script-type', script_type,
'--engine', engine, '--file-path', script_name])
self.assertEqual(result.exit_code, 2)
class_mock.load.assert_called_with(script_name, script_type, engine, script_name, scriptdescription='')
@patch('zapcli.zap_helper.ZAPHelper.xml_report')
def test_xml_report(self, report_mock):
"""Testing XML report."""
result = self.runner.invoke(cli.cli,
['report', '-o', 'foo.xml', '-f', 'xml'])
report_mock.assert_called_with('foo.xml')
self.assertEqual(result.exit_code, 0)
@patch('zapcli.zap_helper.ZAPHelper.md_report')
def test_md_report(self, report_mock):
"""Testing MD report."""
result = self.runner.invoke(cli.cli,
['report', '-o', 'foo.md', '-f', 'md'])
report_mock.assert_called_with('foo.md')
self.assertEqual(result.exit_code, 0)
@patch('zapcli.zap_helper.ZAPHelper.html_report')
def test_html_report(self, report_mock):
"""Testing HTML report."""
result = self.runner.invoke(cli.cli,
['report', '-o', 'foo.html', '-f', 'html'])
report_mock.assert_called_with('foo.html')
self.assertEqual(result.exit_code, 0)
@patch('zapv2.context.include_in_context')
def test_context_include(self, context_mock):
"""Testing including a regex in a given context."""
context_mock.return_value = 'OK'
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', '--verbose', 'context',
'include', '--name', 'Test', '--pattern', 'zap-cli'])
context_mock.assert_called_with(contextname='Test', regex='zap-cli')
self.assertEqual(result.exit_code, 0)
@patch('zapv2.context.include_in_context')
def test_context_include_error(self, context_mock):
"""Testing that an error is reported when an invalid response is received from the API."""
context_mock.return_value = 'Error'
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', '--verbose', 'context',
'include', '--name', 'Test', '--pattern', 'zap-cli'])
context_mock.assert_called_with(contextname='Test', regex='zap-cli')
self.assertEqual(result.exit_code, 2)
def test_context_include_regex_error(self):
"""Testing that an error is reported when providing an invalid regex."""
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', '--verbose', 'context',
'include', '--name', 'Test', '--pattern', '['])
self.assertEqual(result.exit_code, 2)
@patch('zapv2.context.exclude_from_context')
def test_context_exclude(self, context_mock):
"""Testing excluding a regex from a given context."""
context_mock.return_value = 'OK'
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', '--verbose', 'context',
'exclude', '--name', 'Test', '--pattern', 'zap-cli'])
context_mock.assert_called_with(contextname='Test', regex='zap-cli')
self.assertEqual(result.exit_code, 0)
@patch('zapv2.context.exclude_from_context')
def test_context_exclude_error(self, context_mock):
"""Testing that an error is reported when an invalid response is received from the API."""
context_mock.return_value = 'Error'
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', '--verbose', 'context',
'exclude', '--name', 'Test', '--pattern', 'zap-cli'])
context_mock.assert_called_with(contextname='Test', regex='zap-cli')
self.assertEqual(result.exit_code, 2)
def test_context_exclude_regex_error(self):
"""Testing that an error is reported when providing an invalid regex."""
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', '--verbose', 'context',
'exclude', '--name', 'Test', '--pattern', '['])
self.assertEqual(result.exit_code, 2)
@patch('zapv2.core.load_session')
@patch('os.path.isfile')
def test_load_session(self, isfile_mock, session_mock):
"""Test loading a session from a file."""
isfile_mock.return_value = True
file_path = '/path/to/zap'
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', '--verbose', 'session',
'load', file_path])
self.assertEqual(result.exit_code, 0)
session_mock.assert_called_with(file_path)
@patch('zapv2.core.load_session')
@patch('os.path.isfile')
def test_load_session_error(self, isfile_mock, session_mock):
"""Testing that an error is reported when providing an invalid file path."""
isfile_mock.return_value = False
file_path = 'invalid'
result = self.runner.invoke(cli.cli, ['--boring', '--api-key', '', '--verbose', 'session',
'load', file_path])
self.assertEqual(result.exit_code, 2)
self.assertFalse(session_mock.called)
if __name__ == '__main__':
unittest.main()
| 48.851064
| 111
| 0.610845
| 2,731
| 22,960
| 4.935189
| 0.072867
| 0.035614
| 0.055795
| 0.066256
| 0.903843
| 0.872014
| 0.855246
| 0.843152
| 0.801232
| 0.758792
| 0
| 0.009883
| 0.233188
| 22,960
| 469
| 112
| 48.955224
| 0.755651
| 0.098214
| 0
| 0.646377
| 0
| 0.002899
| 0.212348
| 0.051327
| 0
| 0
| 0
| 0
| 0.228986
| 1
| 0.13913
| false
| 0
| 0.02029
| 0
| 0.162319
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e7bcf9a298b487b0de8264b2bea7b7ff24d60dcc
| 376
|
py
|
Python
|
spelling_bee/__init__.py
|
JEHoctor/spelling-bee
|
f06559e22efff6cddd531cc7ee1e96ce7242aad5
|
[
"MIT"
] | null | null | null |
spelling_bee/__init__.py
|
JEHoctor/spelling-bee
|
f06559e22efff6cddd531cc7ee1e96ce7242aad5
|
[
"MIT"
] | null | null | null |
spelling_bee/__init__.py
|
JEHoctor/spelling-bee
|
f06559e22efff6cddd531cc7ee1e96ce7242aad5
|
[
"MIT"
] | null | null | null |
import spelling_bee.archive as archive
import spelling_bee.data_model as data_model
import spelling_bee.dictionary as dictionary
import spelling_bee.folders as folders
import spelling_bee.markov_search as markov_search
import spelling_bee.scrape as scrape
from spelling_bee.dictionary_search import dictionary_search
from spelling_bee.nyt_scraping import HintData, PuzzleData
| 41.777778
| 60
| 0.888298
| 56
| 376
| 5.696429
| 0.303571
| 0.275862
| 0.319749
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087766
| 376
| 8
| 61
| 47
| 0.930029
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
99c2da1b103ecac4956b3c0b8162f07233b0c33a
| 71
|
py
|
Python
|
pprint_blocks.py
|
f-prime/Severus
|
9edfab52ecfb9d8db3095a74feef12de1b24d75f
|
[
"MIT"
] | null | null | null |
pprint_blocks.py
|
f-prime/Severus
|
9edfab52ecfb9d8db3095a74feef12de1b24d75f
|
[
"MIT"
] | 1
|
2021-04-30T20:52:55.000Z
|
2021-04-30T20:52:55.000Z
|
pprint_blocks.py
|
f-prime/Severus
|
9edfab52ecfb9d8db3095a74feef12de1b24d75f
|
[
"MIT"
] | null | null | null |
import pprint
import json
pprint.pprint(json.load(open("blocks.db")))
| 14.2
| 43
| 0.760563
| 11
| 71
| 4.909091
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084507
| 71
| 4
| 44
| 17.75
| 0.830769
| 0
| 0
| 0
| 0
| 0
| 0.126761
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
99d581393e75bd8e7dd86b9d9e45aaaa84d8a38b
| 30
|
py
|
Python
|
on_excel/workbook/__init__.py
|
yuyuko-C/pyworkkit
|
7785356bcbc93f56c81f3d78362598d1a6ba10c2
|
[
"Apache-2.0"
] | null | null | null |
on_excel/workbook/__init__.py
|
yuyuko-C/pyworkkit
|
7785356bcbc93f56c81f3d78362598d1a6ba10c2
|
[
"Apache-2.0"
] | null | null | null |
on_excel/workbook/__init__.py
|
yuyuko-C/pyworkkit
|
7785356bcbc93f56c81f3d78362598d1a6ba10c2
|
[
"Apache-2.0"
] | null | null | null |
from .workbook import Workbook
| 30
| 30
| 0.866667
| 4
| 30
| 6.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 30
| 1
| 30
| 30
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
99e1436302ccca34f9f1f38c42aad7e885a6824e
| 2,770
|
py
|
Python
|
app/validators.py
|
gtindo/Opus
|
ae093b47eaeac935cac0a78b558a36a95d7bbb69
|
[
"MIT"
] | 2
|
2019-10-25T19:11:08.000Z
|
2020-10-29T12:48:08.000Z
|
app/validators.py
|
gtindo/Opus
|
ae093b47eaeac935cac0a78b558a36a95d7bbb69
|
[
"MIT"
] | 6
|
2019-11-07T11:31:22.000Z
|
2021-06-02T00:31:50.000Z
|
app/validators.py
|
gtindo/Opus
|
ae093b47eaeac935cac0a78b558a36a95d7bbb69
|
[
"MIT"
] | null | null | null |
from jsonschema import validate, ValidationError
def validate_compare_input(message):
"""
validate input for function that compare two songs
:param message: message received by microservice
:type message: `dict`
:return: tuple with status and error message
:rtype: `tuple`
"""
input_schema = {
"type": "object",
"required": ["action", "song_1", "song_2"],
"properties": {
"action": {"type": "string", "description": "Function to call"},
"song_1": {
"type": "object",
"required": ["file_id", "name", "content", "extension"],
"properties": {
"name": {"type": "string"},
"file_id": {"type": "string", "description": "file_id as saved in a database"},
"content": {"type": "string", "description": "file converted to base64"},
"extension": {"type": "string"}
}
},
"song_2": {
"type": "object",
"required": ["file_id", "name", "content", "extension"],
"properties": {
"name": {"type": "string"},
"file_id": {"type": "string", "description": "file_id as saved in a database"},
"content": {"type": "string", "description": "file converted to base64"},
"extension": {"type": "string"}
}
}
}
}
status = False
error = ""
try:
status = validate(message, input_schema)
except ValidationError as e:
error = "Validation Error: " + str(e)
return status, error
def validate_fingerprint_input(message):
"""
Validate input for function that generate fingerprint of a song
:param message:
:return:
:rtype: `tuple`
"""
input_schema = {
"type": "object",
"required": ["action", "song"],
"properties": {
"action": {"type": "string", "description": "Function to call"},
"song": {
"type": "object",
"required": ["file_id", "name", "content", "extension"],
"properties": {
"name": {"type": "string"},
"file_id": {"type": "string", "description": "file_id as saved in a database"},
"content": {"type": "string", "description": "file converted to base64"},
"extension": {"type": "string"}
}
}
}
}
status = False
error = ""
try:
status = validate(message, input_schema)
except ValidationError as e:
error = "Validation Error: " + str(e)
return status, error
| 32.97619
| 99
| 0.486643
| 241
| 2,770
| 5.506224
| 0.248963
| 0.105501
| 0.126601
| 0.113037
| 0.814619
| 0.814619
| 0.814619
| 0.754333
| 0.754333
| 0.597589
| 0
| 0.005634
| 0.359206
| 2,770
| 83
| 100
| 33.373494
| 0.741972
| 0.103971
| 0
| 0.704918
| 0
| 0
| 0.336773
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032787
| false
| 0
| 0.016393
| 0
| 0.081967
| 0.016393
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
820c5d0f6e34d6f8a09fa2d5a16ffe7d5e8bd5c4
| 184
|
py
|
Python
|
tests/test_build_undefined.py
|
FunTimeCoding/python-utility
|
e91df316684a07161aae33576329f9092d2e97e6
|
[
"MIT"
] | null | null | null |
tests/test_build_undefined.py
|
FunTimeCoding/python-utility
|
e91df316684a07161aae33576329f9092d2e97e6
|
[
"MIT"
] | null | null | null |
tests/test_build_undefined.py
|
FunTimeCoding/python-utility
|
e91df316684a07161aae33576329f9092d2e97e6
|
[
"MIT"
] | null | null | null |
from python_utility.build_undefined import Build
def test_build_undefined() -> None:
assert Build.GIT_TAG != ''
assert Build.GIT_HASH != ''
assert Build.BUILD_DATE != ''
| 23
| 48
| 0.701087
| 24
| 184
| 5.083333
| 0.583333
| 0.270492
| 0.229508
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184783
| 184
| 7
| 49
| 26.285714
| 0.813333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.6
| 1
| 0.2
| true
| 0
| 0.2
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
821ed1bd79a32dc4c720ccfdb9246254d39c5c3d
| 249
|
py
|
Python
|
deep_recommenders/estimator/models/ranking/__init__.py
|
LongmaoTeamTf/deep_recommenders
|
168dabe4ef3a38cc582d019766cf3de576bc8af1
|
[
"Apache-2.0"
] | 143
|
2021-02-04T11:28:07.000Z
|
2022-03-28T09:02:00.000Z
|
deep_recommenders/estimator/models/ranking/__init__.py
|
LongmaoTeamTf/Deep-NLP
|
168dabe4ef3a38cc582d019766cf3de576bc8af1
|
[
"Apache-2.0"
] | 7
|
2021-03-04T23:59:31.000Z
|
2022-01-27T05:13:02.000Z
|
deep_recommenders/estimator/models/ranking/__init__.py
|
LongmaoTeamTf/deep_recommenders
|
168dabe4ef3a38cc582d019766cf3de576bc8af1
|
[
"Apache-2.0"
] | 40
|
2021-02-08T15:26:53.000Z
|
2022-03-29T08:41:14.000Z
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from deep_recommenders.estimator.models.ranking.fnn import FNN
from deep_recommenders.estimator.models.ranking.wide_and_deep import WDL
from deep_recommenders.estimator.models.ranking.deepfm import DeepFM
| 35.571429
| 72
| 0.819277
| 35
| 249
| 5.685714
| 0.514286
| 0.120603
| 0.301508
| 0.437186
| 0.633166
| 0.633166
| 0
| 0
| 0
| 0
| 0
| 0.008696
| 0.076305
| 249
| 6
| 73
| 41.5
| 0.856522
| 0.156627
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
822f35cc44a04d1aa50051a5fe4804400405632c
| 25,249
|
py
|
Python
|
karrio/api/api_api.py
|
karrioapi/karrio-python
|
7b7e3b386016a138a5668644884a7a9fc497b15c
|
[
"MIT"
] | 1
|
2018-12-28T18:32:37.000Z
|
2018-12-28T18:32:37.000Z
|
karrio/api/api_api.py
|
karrioapi/karrio-python
|
7b7e3b386016a138a5668644884a7a9fc497b15c
|
[
"MIT"
] | null | null | null |
karrio/api/api_api.py
|
karrioapi/karrio-python
|
7b7e3b386016a138a5668644884a7a9fc497b15c
|
[
"MIT"
] | null | null | null |
"""
Karrio API
## API Reference Karrio is an open source multi-carrier shipping API that simplifies the integration of logistic carrier services. The Karrio API is organized around REST. Our API has predictable resource-oriented URLs, accepts JSON-encoded request bodies, returns JSON-encoded responses, and uses standard HTTP response codes, authentication, and verbs. The Karrio API differs for every account as we release new versions. These docs are customized to your version of the API. ## Versioning When backwards-incompatible changes are made to the API, a new, dated version is released. The current version is `2022.4`. Read our API changelog and to learn more about backwards compatibility. As a precaution, use API versioning to check a new API version before committing to an upgrade. ## Pagination All top-level API resources have support for bulk fetches via \"list\" API methods. For instance, you can list addresses, list shipments, and list trackers. These list API methods share a common structure, taking at least these two parameters: limit, and offset. Karrio utilizes offset-based pagination via the offset and limit parameters. Both parameters take a number as value (see below) and return objects in reverse chronological order. The offset parameter returns objects listed after an index. The limit parameter take a limit on the number of objects to be returned from 1 to 100. ```json { \"next\": \"/v1/shipments?limit=25&offset=25\", \"previous\": \"/v1/shipments?limit=25&offset=25\", \"results\": [ ] } ``` ## Environments The Karrio API offer the possibility to create and retrieve certain objects in `test_mode`. In development, it is therefore possible to add carrier connections, get live rates, buy labels, create trackers and schedule pickups in `test_mode`. # noqa: E501
The version of the OpenAPI document: 2022.4
Contact:
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from karrio.api_client import ApiClient, Endpoint as _Endpoint
from karrio.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from karrio.model.metadata import Metadata
from karrio.model.references import References
from karrio.model.token_obtain_pair import TokenObtainPair
from karrio.model.token_pair import TokenPair
from karrio.model.token_refresh import TokenRefresh
from karrio.model.token_verify import TokenVerify
class APIApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.authenticate_endpoint = _Endpoint(
settings={
'response_type': (TokenPair,),
'auth': [
'Token'
],
'endpoint_path': '/api/token',
'operation_id': 'authenticate',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'data',
],
'required': [
'data',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'data':
(TokenObtainPair,),
},
'attribute_map': {
},
'location_map': {
'data': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.data_endpoint = _Endpoint(
settings={
'response_type': (References,),
'auth': [
'Token'
],
'endpoint_path': '/v1/references',
'operation_id': 'data',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.ping_endpoint = _Endpoint(
settings={
'response_type': (Metadata,),
'auth': [
'Token'
],
'endpoint_path': '/',
'operation_id': 'ping',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.refresh_token_endpoint = _Endpoint(
settings={
'response_type': (TokenPair,),
'auth': [
'Token'
],
'endpoint_path': '/api/token/refresh',
'operation_id': 'refresh_token',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'data',
],
'required': [
'data',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'data':
(TokenRefresh,),
},
'attribute_map': {
},
'location_map': {
'data': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.verify_token_endpoint = _Endpoint(
settings={
'response_type': ({str: ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},)},),
'auth': [
'Token'
],
'endpoint_path': '/api/token/verify',
'operation_id': 'verify_token',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'data',
],
'required': [
'data',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'data':
(TokenVerify,),
},
'attribute_map': {
},
'location_map': {
'data': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
def authenticate(
self,
data,
**kwargs
):
"""Obtain auth token pair # noqa: E501
Authenticate the user and return a token pair # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.authenticate(data, async_req=True)
>>> result = thread.get()
Args:
data (TokenObtainPair):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
TokenPair
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['data'] = \
data
return self.authenticate_endpoint.call_with_http_info(**kwargs)
def data(
self,
**kwargs
):
"""Data References # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.data(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
References
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
return self.data_endpoint.call_with_http_info(**kwargs)
def ping(
self,
**kwargs
):
"""Instance Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.ping(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Metadata
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
return self.ping_endpoint.call_with_http_info(**kwargs)
def refresh_token(
self,
data,
**kwargs
):
"""Refresh auth token # noqa: E501
Authenticate the user and return a token pair # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.refresh_token(data, async_req=True)
>>> result = thread.get()
Args:
data (TokenRefresh):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
TokenPair
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['data'] = \
data
return self.refresh_token_endpoint.call_with_http_info(**kwargs)
def verify_token(
self,
data,
**kwargs
):
"""Verify auth token # noqa: E501
Verify an existent authentication token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.verify_token(data, async_req=True)
>>> result = thread.get()
Args:
data (TokenVerify):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
{str: ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},)}
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['data'] = \
data
return self.verify_token_endpoint.call_with_http_info(**kwargs)
| 37.854573
| 1,831
| 0.521248
| 2,502
| 25,249
| 5.064748
| 0.126299
| 0.03196
| 0.020518
| 0.021307
| 0.788747
| 0.781329
| 0.771307
| 0.757418
| 0.757418
| 0.75434
| 0
| 0.004339
| 0.3976
| 25,249
| 666
| 1,832
| 37.911411
| 0.828797
| 0.442275
| 0
| 0.638955
| 0
| 0
| 0.220611
| 0.042872
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014252
| false
| 0
| 0.023753
| 0
| 0.052257
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8247bd402137c14b2c62b430fdb29985351cc97b
| 13,424
|
py
|
Python
|
tests/statisticslearning_tests.py
|
anish-lu-yihe/abcpy
|
be58367c4d7e38ee696238e3d8405e8abe2defb7
|
[
"BSD-3-Clause-Clear"
] | 1
|
2021-08-24T10:40:55.000Z
|
2021-08-24T10:40:55.000Z
|
tests/statisticslearning_tests.py
|
anish-lu-yihe/abcpy
|
be58367c4d7e38ee696238e3d8405e8abe2defb7
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
tests/statisticslearning_tests.py
|
anish-lu-yihe/abcpy
|
be58367c4d7e38ee696238e3d8405e8abe2defb7
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
import unittest
import numpy as np
from abcpy.backends import BackendDummy as Backend
from abcpy.continuousmodels import Normal
from abcpy.continuousmodels import Uniform
from abcpy.statistics import Identity
from abcpy.statisticslearning import Semiautomatic, SemiautomaticNN, TripletDistanceLearning, \
ContrastiveDistanceLearning
try:
import torch
except ImportError:
has_torch = False
else:
has_torch = True
class SemiautomaticTests(unittest.TestCase):
def setUp(self):
# define prior and model
sigma = Uniform([[10], [20]])
mu = Normal([0, 1])
Y = Normal([mu, sigma])
# define backend
self.backend = Backend()
# define statistics
self.statistics_cal = Identity(degree=3, cross=False)
# Initialize statistics learning
self.statisticslearning = Semiautomatic([Y], self.statistics_cal, self.backend, n_samples=1000,
n_samples_per_param=1, seed=1)
def test_transformation(self):
# Transform statistics extraction
self.new_statistics_calculator = self.statisticslearning.get_statistics()
# Simulate observed data
Obs = Normal([2, 4])
y_obs = Obs.forward_simulate(Obs.get_input_values(), 1)[0].tolist()
extracted_statistics = self.new_statistics_calculator.statistics(y_obs)
self.assertEqual(np.shape(extracted_statistics), (1, 2))
# NOTE we cannot test this, since the linear regression used uses a random number generator (which we cannot access and is in C). Therefore, our results differ and testing might fail
# self.assertLess(extracted_statistics[0,0] - 0.00215507052338, 10e-2)
# self.assertLess(extracted_statistics[0,1] - (-0.0058023274456), 10e-2)
class SemiautomaticNNTests(unittest.TestCase):
def setUp(self):
# define prior and model
sigma = Uniform([[10], [20]])
mu = Normal([0, 1])
self.Y = Normal([mu, sigma])
# define backend
self.backend = Backend()
# define statistics
self.statistics_cal = Identity(degree=3, cross=False)
if has_torch:
# Initialize statistics learning
self.statisticslearning = SemiautomaticNN([self.Y], self.statistics_cal, self.backend, n_samples=100,
n_samples_per_param=1, seed=1, n_epochs=10, scale_samples=False,
use_tqdm=False)
# with sample scaler:
self.statisticslearning_with_scaler = SemiautomaticNN([self.Y], self.statistics_cal, self.backend,
n_samples=100, n_samples_per_param=1, seed=1,
n_epochs=10, scale_samples=True, use_tqdm=False)
def test_initialization(self):
if not has_torch:
self.assertRaises(ImportError, SemiautomaticNN, [self.Y], self.statistics_cal, self.backend)
def test_transformation(self):
if has_torch:
# Transform statistics extraction
self.new_statistics_calculator = self.statisticslearning.get_statistics()
self.new_statistics_calculator_with_scaler = self.statisticslearning_with_scaler.get_statistics()
# Simulate observed data
Obs = Normal([2, 4])
y_obs = Obs.forward_simulate(Obs.get_input_values(), 1)[0].tolist()
extracted_statistics = self.new_statistics_calculator.statistics(y_obs)
self.assertEqual(np.shape(extracted_statistics), (1, 2))
self.assertRaises(RuntimeError, self.new_statistics_calculator.statistics, [np.array([1, 2])])
extracted_statistics = self.new_statistics_calculator_with_scaler.statistics(y_obs)
self.assertEqual(np.shape(extracted_statistics), (1, 2))
self.assertRaises(ValueError, self.new_statistics_calculator_with_scaler.statistics, [np.array([1, 2])])
def test_errors(self):
if has_torch:
with self.assertRaises(RuntimeError):
self.statisticslearning = SemiautomaticNN([self.Y], self.statistics_cal, self.backend, n_samples=1000,
n_samples_per_param=1, seed=1, parameters=np.ones((100, 1)))
with self.assertRaises(RuntimeError):
self.statisticslearning = SemiautomaticNN([self.Y], self.statistics_cal, self.backend, n_samples=1000,
n_samples_per_param=1, seed=1, simulations=np.ones((100, 1)))
with self.assertRaises(RuntimeError):
self.statisticslearning = SemiautomaticNN([self.Y], self.statistics_cal, self.backend, n_samples=1000,
n_samples_per_param=1, seed=1,
simulations=np.ones((100, 1, 3)))
with self.assertRaises(RuntimeError):
self.statisticslearning = SemiautomaticNN([self.Y], self.statistics_cal, self.backend, n_samples=1000,
n_samples_per_param=1, seed=1,
parameters=np.ones((100, 1, 2)))
with self.assertRaises(RuntimeError):
self.statisticslearning = SemiautomaticNN([self.Y], self.statistics_cal, self.backend, n_samples=1000,
n_samples_per_param=1, seed=1, simulations=np.ones((100, 1)),
parameters=np.zeros((99, 1)))
with self.assertRaises(RuntimeError):
self.statisticslearning = SemiautomaticNN([self.Y], self.statistics_cal, self.backend, n_samples=1000,
n_samples_per_param=1, seed=1,
parameters_val=np.ones((100, 1)))
with self.assertRaises(RuntimeError):
self.statisticslearning = SemiautomaticNN([self.Y], self.statistics_cal, self.backend, n_samples=1000,
n_samples_per_param=1, seed=1,
simulations_val=np.ones((100, 1)))
with self.assertRaises(RuntimeError):
self.statisticslearning = SemiautomaticNN([self.Y], self.statistics_cal, self.backend, n_samples=1000,
n_samples_per_param=1, seed=1,
simulations_val=np.ones((100, 1, 3)))
with self.assertRaises(RuntimeError):
self.statisticslearning = SemiautomaticNN([self.Y], self.statistics_cal, self.backend, n_samples=1000,
n_samples_per_param=1, seed=1,
parameters_val=np.ones((100, 1, 2)))
with self.assertRaises(RuntimeError):
self.statisticslearning = SemiautomaticNN([self.Y], self.statistics_cal, self.backend, n_samples=1000,
n_samples_per_param=1, seed=1,
simulations_val=np.ones((100, 1)),
parameters_val=np.zeros((99, 1)))
with self.assertRaises(TypeError):
self.statisticslearning = SemiautomaticNN([self.Y], self.statistics_cal, self.backend, n_samples=1000,
n_samples_per_param=1, seed=1,
parameters=[i for i in range(10)],
simulations=[i for i in range(10)])
with self.assertRaises(TypeError):
self.statisticslearning = SemiautomaticNN([self.Y], self.statistics_cal, self.backend, n_samples=1000,
n_samples_per_param=1, seed=1,
parameters_val=[i for i in range(10)],
simulations_val=[i for i in range(10)])
class ContrastiveDistanceLearningTests(unittest.TestCase):
def setUp(self):
# define prior and model
sigma = Uniform([[10], [20]])
mu = Normal([0, 1])
self.Y = Normal([mu, sigma])
# define backend
self.backend = Backend()
# define statistics
self.statistics_cal = Identity(degree=3, cross=False)
if has_torch:
# Initialize statistics learning
self.statisticslearning = ContrastiveDistanceLearning([self.Y], self.statistics_cal, self.backend,
n_samples=100, n_samples_per_param=1, seed=1,
n_epochs=10, scale_samples=False, use_tqdm=False)
# with sample scaler:
self.statisticslearning_with_scaler = ContrastiveDistanceLearning([self.Y], self.statistics_cal,
self.backend, n_samples=100,
n_samples_per_param=1, seed=1,
n_epochs=10, scale_samples=True, use_tqdm=False)
def test_initialization(self):
if not has_torch:
self.assertRaises(ImportError, ContrastiveDistanceLearning, [self.Y], self.statistics_cal,
self.backend)
def test_transformation(self):
if has_torch:
# Transform statistics extraction
self.new_statistics_calculator = self.statisticslearning.get_statistics()
self.new_statistics_calculator_with_scaler = self.statisticslearning_with_scaler.get_statistics()
# Simulate observed data
Obs = Normal([2, 4])
y_obs = Obs.forward_simulate(Obs.get_input_values(), 1)[0].tolist()
extracted_statistics = self.new_statistics_calculator.statistics(y_obs)
self.assertEqual(np.shape(extracted_statistics), (1, 2))
self.assertRaises(RuntimeError, self.new_statistics_calculator.statistics, [np.array([1, 2])])
extracted_statistics = self.new_statistics_calculator_with_scaler.statistics(y_obs)
self.assertEqual(np.shape(extracted_statistics), (1, 2))
self.assertRaises(ValueError, self.new_statistics_calculator_with_scaler.statistics, [np.array([1, 2])])
class TripletDistanceLearningTests(unittest.TestCase):
def setUp(self):
# define prior and model
sigma = Uniform([[10], [20]])
mu = Normal([0, 1])
self.Y = Normal([mu, sigma])
# define backend
self.backend = Backend()
# define statistics
self.statistics_cal = Identity(degree=3, cross=False)
if has_torch:
# Initialize statistics learning
self.statisticslearning = TripletDistanceLearning([self.Y], self.statistics_cal, self.backend,
scale_samples=False, use_tqdm=False,
n_samples=100, n_samples_per_param=1, seed=1, n_epochs=10)
# with sample scaler:
self.statisticslearning_with_scaler = TripletDistanceLearning([self.Y], self.statistics_cal, self.backend,
scale_samples=True, use_tqdm=False,
n_samples=100, n_samples_per_param=1, seed=1,
n_epochs=10)
def test_initialization(self):
if not has_torch:
self.assertRaises(ImportError, TripletDistanceLearning, [self.Y], self.statistics_cal, self.backend)
def test_transformation(self):
if has_torch:
# Transform statistics extraction
self.new_statistics_calculator = self.statisticslearning.get_statistics()
self.new_statistics_calculator_with_scaler = self.statisticslearning_with_scaler.get_statistics()
# Simulate observed data
Obs = Normal([2, 4])
y_obs = Obs.forward_simulate(Obs.get_input_values(), 1)[0].tolist()
extracted_statistics = self.new_statistics_calculator.statistics(y_obs)
self.assertEqual(np.shape(extracted_statistics), (1, 2))
self.assertRaises(RuntimeError, self.new_statistics_calculator.statistics, [np.array([1, 2])])
extracted_statistics = self.new_statistics_calculator_with_scaler.statistics(y_obs)
self.assertEqual(np.shape(extracted_statistics), (1, 2))
self.assertRaises(ValueError, self.new_statistics_calculator_with_scaler.statistics, [np.array([1, 2])])
if __name__ == '__main__':
unittest.main()
| 53.696
| 190
| 0.565256
| 1,324
| 13,424
| 5.52568
| 0.10423
| 0.041553
| 0.060416
| 0.054128
| 0.898305
| 0.879442
| 0.876299
| 0.857983
| 0.849098
| 0.849098
| 0
| 0.032811
| 0.348406
| 13,424
| 249
| 191
| 53.911647
| 0.80359
| 0.07062
| 0
| 0.662791
| 0
| 0
| 0.000643
| 0
| 0
| 0
| 0
| 0
| 0.162791
| 1
| 0.069767
| false
| 0
| 0.069767
| 0
| 0.162791
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
41a3a1958195d57d91bf89d38e0b980591bc5284
| 6,044
|
py
|
Python
|
BotTelegram/tests/test_comando_search.py
|
manuggz/memes_telegram_bot
|
2ed73aac099923d08c89616ec35c965204cac119
|
[
"Apache-2.0"
] | null | null | null |
BotTelegram/tests/test_comando_search.py
|
manuggz/memes_telegram_bot
|
2ed73aac099923d08c89616ec35c965204cac119
|
[
"Apache-2.0"
] | null | null | null |
BotTelegram/tests/test_comando_search.py
|
manuggz/memes_telegram_bot
|
2ed73aac099923d08c89616ec35c965204cac119
|
[
"Apache-2.0"
] | null | null | null |
from django.test import TestCase
import json
# Create your tests here.
from django.test import override_settings
@override_settings(DEBUG=True)
class TestSearch(TestCase):
@classmethod
def setUpTestData(cls):
# Set up data for the whole TestCase
cls.user_from = {}
cls.user_from["first_name"] = "Manuel"
cls.user_from["last_name"] = "Gonzalez"
cls.user_from["username"] = "manuggz"
cls.user_from["id"] = "109518141"
cls.chat = {}
cls.chat["first_name"] = "Manuel"
cls.chat["last_name"] = "Gonzalez"
cls.chat["username"] = "manuggz"
cls.chat["type"] = "private"
cls.chat["id"] = "109518141"
cls.consulta = {u'message': {u'text': "", u'from': cls.user_from, u'chat': cls.chat, u'message_id': 905475,
u'date': 1475391962}, u'update_id': 25256647, u'debug': True}
def test_search_sin_comandos(self):
self.consulta[u'message'][u'text'] = u"/search"
response = self.client.post('/BotTelegram/119646075:AAFsQGgw8IaLwvRZX-IBO9mgV3k048NpuMg/',
json.dumps(self.consulta), content_type="text/json", secure=True)
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
def test_simple_search_space(self):
self.consulta[u'message'][u'text'] = u"/search "
response = self.client.post('/BotTelegram/119646075:AAFsQGgw8IaLwvRZX-IBO9mgV3k048NpuMg/',
json.dumps(self.consulta),content_type="text/json", secure=True)
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
def test_simple_space_search(self):
self.consulta[u'message'][u'text'] = u" /search"
response = self.client.post('/BotTelegram/119646075:AAFsQGgw8IaLwvRZX-IBO9mgV3k048NpuMg/',
json.dumps(self.consulta),content_type="text/json", secure=True)
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
def test_simple_space_search_space(self):
self.consulta[u'message'][u'text'] = u" /search "
response = self.client.post('/BotTelegram/119646075:AAFsQGgw8IaLwvRZX-IBO9mgV3k048NpuMg/',
json.dumps(self.consulta),content_type="text/json", secure=True)
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
def test_simple_spacex100_search_spacex100(self):
self.consulta[u'message'][u'text'] = " "*100 + u"/search " +" "*100
response = self.client.post('/BotTelegram/119646075:AAFsQGgw8IaLwvRZX-IBO9mgV3k048NpuMg/',
json.dumps(self.consulta),content_type="text/json", secure=True)
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
def test_simple_spacex100_search_spacex100_create(self):
self.consulta[u'message'][u'text'] = " "*100 + u"/search " +" "*100 + " create"
response = self.client.post('/BotTelegram/119646075:AAFsQGgw8IaLwvRZX-IBO9mgV3k048NpuMg/',
json.dumps(self.consulta),content_type="text/json", secure=True)
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
def test_simple_spacex100_search_spacex100_create_space(self):
self.consulta[u'message'][u'text'] = " "*100 + u"/search " +" "*100 + " create "
response = self.client.post('/BotTelegram/119646075:AAFsQGgw8IaLwvRZX-IBO9mgV3k048NpuMg/',
json.dumps(self.consulta),content_type="text/json", secure=True)
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
def test_simple_spacex100_search_spacex100_yao_spacex100(self):
self.consulta[u'message'][u'text'] = " "*100 + u"/search " +" "*100 + " yao " + " "*100
response = self.client.post('/BotTelegram/119646075:AAFsQGgw8IaLwvRZX-IBO9mgV3k048NpuMg/',
json.dumps(self.consulta),content_type="text/json", secure=True)
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
def test_simple_spacex100_search_spacex100_create_spacex100_search(self):
self.consulta[u'message'][u'text'] = " "*100 + u"/search " +" "*100 + " create " + " "*100 + " search"
response = self.client.post('/BotTelegram/119646075:AAFsQGgw8IaLwvRZX-IBO9mgV3k048NpuMg/',
json.dumps(self.consulta),content_type="text/json", secure=True)
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
def test_search_forever_alone(self):
self.consulta[u'message'][u'text'] = u"/search Forever Alone"
response = self.client.post('/BotTelegram/119646075:AAFsQGgw8IaLwvRZX-IBO9mgV3k048NpuMg/',
json.dumps(self.consulta), content_type="text/json", secure=True)
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
def test_search_no_existe(self):
self.consulta[u'message'][u'text'] = u"/search 123123123123189876761009123781238712989912992"
response = self.client.post('/BotTelegram/119646075:AAFsQGgw8IaLwvRZX-IBO9mgV3k048NpuMg/',
json.dumps(self.consulta), content_type="text/json", secure=True)
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
def test_double_search(self):
self.consulta[u'message'][u'text'] = u"/search /search"
response = self.client.post('/BotTelegram/119646075:AAFsQGgw8IaLwvRZX-IBO9mgV3k048NpuMg/',
json.dumps(self.consulta), content_type="text/json", secure=True)
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
| 45.787879
| 115
| 0.636168
| 687
| 6,044
| 5.47016
| 0.117904
| 0.076637
| 0.055349
| 0.058808
| 0.831293
| 0.831293
| 0.825439
| 0.825439
| 0.822512
| 0.803353
| 0
| 0.089173
| 0.237426
| 6,044
| 131
| 116
| 46.137405
| 0.726188
| 0.079087
| 0
| 0.525
| 0
| 0
| 0.24013
| 0.135749
| 0.0625
| 0
| 0
| 0
| 0.15
| 1
| 0.1625
| false
| 0
| 0.0375
| 0
| 0.2125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
68c949dda5d8d59ae87d1ed88d56f53bf5dee507
| 150
|
py
|
Python
|
backend/apps/music/urls.py
|
daojunL/Art-Event-Gallery
|
33fb51f78a9815f6ef29422291247908ec7008ef
|
[
"MIT"
] | null | null | null |
backend/apps/music/urls.py
|
daojunL/Art-Event-Gallery
|
33fb51f78a9815f6ef29422291247908ec7008ef
|
[
"MIT"
] | null | null | null |
backend/apps/music/urls.py
|
daojunL/Art-Event-Gallery
|
33fb51f78a9815f6ef29422291247908ec7008ef
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from django.urls import path
from django.urls import path
from .import views
app_name = 'music'
urlpatterns = [
]
| 13.636364
| 32
| 0.766667
| 22
| 150
| 5.181818
| 0.545455
| 0.263158
| 0.245614
| 0.350877
| 0.45614
| 0.45614
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 150
| 11
| 33
| 13.636364
| 0.912
| 0
| 0
| 0.285714
| 0
| 0
| 0.033113
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.571429
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ec2925a7ff60e2ec9b23a8378944b9f6d293a682
| 16,160
|
py
|
Python
|
alibaba/spiders/alibaba_spiders.py
|
PandorAstrum/alibaba_skrapy
|
1548a354785578be1850015eeb439c368f5be4f2
|
[
"MIT"
] | null | null | null |
alibaba/spiders/alibaba_spiders.py
|
PandorAstrum/alibaba_skrapy
|
1548a354785578be1850015eeb439c368f5be4f2
|
[
"MIT"
] | null | null | null |
alibaba/spiders/alibaba_spiders.py
|
PandorAstrum/alibaba_skrapy
|
1548a354785578be1850015eeb439c368f5be4f2
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import re
from scrapy import Request
from scrapy import Spider
from bs4 import BeautifulSoup
from alibaba.items import AlibabaItem
class AlibabaSpidersSpider(Spider):
name = 'alibaba_spiders'
allowed_domains = ['alibaba.com']
def __init__(self, **kwargs):
super(AlibabaSpidersSpider, self).__init__(**kwargs)
self.start_urls = [kwargs.get('_start_urls')]
self.headers = kwargs.get('_headers')
self.category_check = kwargs.get('_category_check')
self.prev = kwargs.get('_prev')
self.previous_list = kwargs.get('_previous_list')
self.tmp_links = []
def start_requests(self):
for url in self.start_urls:
yield Request(url, headers={'User-Agent': self.headers})
def parse(self, response):
if self.category_check:
# only scrap the categories
categories = response.xpath('//div[@class="mod-content"]//ul//li//a/text()').extract()
# write to file
all_sub_cat = response.xpath('//div[@module-title="productGroups"]/@module-data').extract_first()
all_sub_cat = all_sub_cat.replace('%', ',').replace('0', ',').replace('1', ',').replace('2', ',')
all_sub_cat = all_sub_cat.replace('3', ',').replace('4', ',').replace('5', ',').replace('6', ',')
all_sub_cat = all_sub_cat.replace('7', ',').replace('8', ',').replace('9', ',')
all_sub_cat_spit = all_sub_cat.split('Fproductgrouplist')
all_sub_cat_spit.pop(0)
# print(all_sub_cat_spit)
for c in categories:
subs = []
cat_ = []
if '&' in c:
matching_category = c.replace('&', '_').replace(' ', '')
elif " " in c:
matching_category = c.replace(' ', '_')
elif ',' in c:
matching_category = c.replace(',', '').replace(' ', '_')
elif '/' in c:
matching_category = c.replace('&', '').replace(',', '').replace(' ', '_')
else:
matching_category = c
for indx, sub in enumerate(all_sub_cat_spit):
sub_cat = re.findall(r'([A-Z]\w+\.html)', sub)
sub_cat[0] = sub_cat[0].replace('.html', '').replace('F', '', 1)
if sub_cat[0] == matching_category:
cat_.append(sub_cat[0])
break
else:
subs.append(sub_cat[0])
for s in all_sub_cat_spit[:]:
sub_cat = re.findall(r'([A-Z]\w+\.html)', s)
sub_cat[0] = sub_cat[0].replace('.html', '').replace('F', '', 1)
if sub_cat[0] in subs:
all_sub_cat_spit.remove(s)
if sub_cat[0] in cat_:
all_sub_cat_spit.remove(s)
yield {
"Category": matching_category,
"Sub Category": subs
}
else:
# get all the links
_div = response.xpath('//div[@class="module-product-list"]')
links = _div.xpath('.//div[@class="product-info"]/div[@class="title"]/a/@href').extract()
# process and remove extra links
for link in links:
if link not in self.tmp_links:
abs_link = response.urljoin(link)
self.tmp_links.append(abs_link)
yield Request(url=abs_link, callback=self.parse_item, headers={'User-Agent': self.headers})
# next
next_page_url = response.xpath('//div[@class="next-pagination-list"]/a/@href').extract()
for next_page in next_page_url:
if next_page:
abs_next_page_url = response.urljoin(next_page)
yield Request(abs_next_page_url, callback=self.parse, headers={'User-Agent': self.headers})
def parse_item(self, response):
# get actual data here
soup = BeautifulSoup(response.text, 'lxml')
item = AlibabaItem()
# if previous file then get the list of previous url and match
if not self.prev:
item['url'] = response.url
item['title'] = response.xpath('//h1[@class="ma-title"]/text()').extract_first()
item['price'] = response.xpath('//span[@class="ma-ref-price"]/span/text()').extract_first()
item['min_order'] = response.xpath('//span[@class="ma-min-order"]/text()').extract_first()
_quick_details = response.xpath('//div[contains(text(), "Quick Details")]/following-sibling::div/dl')
_q1_keys = _quick_details.xpath('.//dt/span/text()').extract()
_q2_values = _quick_details.xpath('.//dd/div/text()').extract()
_q = list(zip(_q1_keys, _q2_values)) # need to arrange in str format
tmp_quick_details = []
for quick in _q:
tmp_quick_details.append("\n" + quick[0] + " " + quick[1])
tmp_quick_details[0] = tmp_quick_details[0].replace("\n", "")
item['short_description'] = tmp_quick_details
item['supply_ability'] = response.xpath(
'//div[contains(text(), "Supply Ability")]/following-sibling::div/dl/dd/text()').extract_first()
_packaging = soup.find('div', string='Packaging & Delivery')
if _packaging:
_all_packaging_div = _packaging.findNext('div')
_all_packaging = _all_packaging_div.findAll('dl')
_tmp_packaging = []
for _pack in _all_packaging:
h = _pack.find('dt').text
t = _pack.find('dd').text
h.strip()
t.strip()
_tmp_packaging.append(h + ": " + t)
# clean
for i, t in enumerate(_tmp_packaging):
if "Lead Time" in t:
removing_index = i
else:
removing_index = None
if removing_index is None:
pass
else:
_tmp_packaging = _tmp_packaging[:removing_index]
item['packaging_delivery'] = _tmp_packaging
_desc = soup.find('div', {'id': 'J-rich-text-description'})
_desc_div = _desc.find('div', {'data-section-title': "Product Description"})
if _desc_div:
_all_description = _desc_div.findAll('p')
if len(_all_description) == 0:
_d = _desc.prettify().replace('\n', '')
_d = ''.join(c for c in _d if ord(c) < 128)
tmp_description = _d # no p
else:
tmp_description = []
# get temp descriptions
for _description in _all_description:
_d = _description.prettify().replace('\n', '')
if "<img" not in _d:
_d = ''.join(c for c in _d if ord(c) < 128)
# dump any image
tmp_description.append(_d)
index_of_related_products = 0
# find related products
for indx, td in enumerate(tmp_description):
if re.search(r'[r|R]elated [p|P]roducts', td) is not None:
index_of_related_products = indx
break
else:
index_of_related_products = len(tmp_description) + 1
# slicing only take upto related products clean
_final_desc = tmp_description[:index_of_related_products]
# joining list elements into one string
_joined_string_description = ''.join(_final_desc)
else:
_all_description = _desc.findAll('p')
if len(_all_description) == 0:
_d = _desc.prettify().replace('\n', '')
_d = ''.join(c for c in _d if ord(c) < 128)
tmp_description = _d
else:
tmp_description = []
# get into a list
for _description in _all_description:
_d = _description.prettify().replace('\n', '')
if "<img" not in _d:
_d = ''.join(c for c in _d if ord(c) < 128)
# dump any image
tmp_description.append(_d)
index_of_related_products = 0
# find related products
for indx, td in enumerate(tmp_description):
if re.search(r'[r|R]elated [p|P]roducts', td) is not None:
index_of_related_products = indx
break
else:
index_of_related_products = len(tmp_description) + 1
# clean
_final_desc = tmp_description[:index_of_related_products]
# joining list elements into one string
_joined_string_description = ''.join(_final_desc)
item['description'] = _joined_string_description
_all_pics_div = soup.find('div', {'class': 'module-detailBoothImage'})
_all_pic = _all_pics_div.findAll('img')
_all_pic.pop(0)
_temp_pic_list = []
for _pic in _all_pic:
_tmp_pic = _pic['src']
_tmp_pic = "https:" + _tmp_pic
head, sep, tail = _tmp_pic.partition('_50x50')
_temp_pic_list.append(head)
item['images_links'] = _temp_pic_list
yield item
else:
if response.url not in self.previous_list:
item['url'] = response.url
item['title'] = response.xpath('//h1[@class="ma-title"]/text()').extract_first()
item['price'] = response.xpath('//span[@class="ma-ref-price"]/span/text()').extract_first()
item['min_order'] = response.xpath('//span[@class="ma-min-order"]/text()').extract_first()
_quick_details = response.xpath('//div[contains(text(), "Quick Details")]/following-sibling::div/dl')
_q1_keys = _quick_details.xpath('.//dt/span/text()').extract()
_q2_values = _quick_details.xpath('.//dd/div/text()').extract()
_q = list(zip(_q1_keys, _q2_values)) # need to arrange in str format
tmp_quick_details = []
for quick in _q:
tmp_quick_details.append("\n" + quick[0] + " " + quick[1])
tmp_quick_details[0] = tmp_quick_details[0].replace("\n", "")
item['short_description'] = tmp_quick_details
item['supply_ability'] = response.xpath(
'//div[contains(text(), "Supply Ability")]/following-sibling::div/dl/dd/text()').extract_first()
_packaging = soup.find('div', string='Packaging & Delivery')
if _packaging:
_all_packaging_div = _packaging.findNext('div')
_all_packaging = _all_packaging_div.findAll('dl')
_tmp_packaging = []
for _pack in _all_packaging:
h = _pack.find('dt').text
t = _pack.find('dd').text
h.strip()
t.strip()
_tmp_packaging.append(h + ": " + t)
# clean
for i, t in enumerate(_tmp_packaging):
if "Lead Time" in t:
removing_index = i
else:
removing_index = None
if removing_index is None:
pass
else:
_tmp_packaging = _tmp_packaging[:removing_index]
item['packaging_delivery'] = _tmp_packaging
_desc = soup.find('div', {'id': 'J-rich-text-description'})
_desc_div = _desc.find('div', {'data-section-title': "Product Description"})
if _desc_div:
_all_description = _desc_div.findAll('p')
if len(_all_description) == 0:
_d = _desc.prettify().replace('\n', '')
_d = ''.join(c for c in _d if ord(c) < 128)
tmp_description = _d # no p
else:
tmp_description = []
# get temp descriptions
for _description in _all_description:
_d = _description.prettify().replace('\n', '')
if "<img" not in _d:
_d = ''.join(c for c in _d if ord(c) < 128)
# dump any image
tmp_description.append(_d)
index_of_related_products = 0
# find related products
for indx, td in enumerate(tmp_description):
if re.search(r'[r|R]elated [p|P]roducts', td) is not None:
index_of_related_products = indx
break
else:
index_of_related_products = len(tmp_description) + 1
# slicing only take upto related products clean
_final_desc = tmp_description[:index_of_related_products]
# joining list elements into one string
_joined_string_description = ''.join(_final_desc)
else:
_all_description = _desc.findAll('p')
if len(_all_description) == 0:
_d = _desc.prettify().replace('\n', '')
_d = ''.join(c for c in _d if ord(c) < 128)
tmp_description = _d
else:
tmp_description = []
# get into a list
for _description in _all_description:
_d = _description.prettify().replace('\n', '')
if "<img" not in _d:
_d = ''.join(c for c in _d if ord(c) < 128)
# dump any image
tmp_description.append(_d)
index_of_related_products = 0
# find related products
for indx, td in enumerate(tmp_description):
if re.search(r'[r|R]elated [p|P]roducts', td) is not None:
index_of_related_products = indx
break
else:
index_of_related_products = len(tmp_description) + 1
# clean
_final_desc = tmp_description[:index_of_related_products]
# joining list elements into one string
_joined_string_description = ''.join(_final_desc)
item['description'] = _joined_string_description
_all_pics_div = soup.find('div', {'class': 'module-detailBoothImage'})
_all_pic = _all_pics_div.findAll('img')
_all_pic.pop(0)
_temp_pic_list = []
for _pic in _all_pic:
_tmp_pic = _pic['src']
_tmp_pic = "https:" + _tmp_pic
head, sep, tail = _tmp_pic.partition('_50x50')
_temp_pic_list.append(head)
item['images_links'] = _temp_pic_list
yield item
| 48.969697
| 117
| 0.482735
| 1,659
| 16,160
| 4.39783
| 0.131404
| 0.021382
| 0.030702
| 0.048246
| 0.775493
| 0.770011
| 0.764529
| 0.744655
| 0.744655
| 0.743147
| 0
| 0.009139
| 0.404146
| 16,160
| 329
| 118
| 49.118541
| 0.748572
| 0.048453
| 0
| 0.737828
| 0
| 0.007491
| 0.10938
| 0.051366
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014981
| false
| 0.007491
| 0.018727
| 0
| 0.044944
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6bae9d4ecf11391a47e1d3c81488525b58c47c7f
| 140
|
py
|
Python
|
lesson5/while_loop.py
|
vinaymayar/python-game-workshop
|
e990f51815c2080a0d702c9d90dac8e8c2a35d45
|
[
"MIT"
] | 1
|
2016-10-11T19:27:08.000Z
|
2016-10-11T19:27:08.000Z
|
lesson5/while_loop.py
|
vinaymayar/python-game-workshop
|
e990f51815c2080a0d702c9d90dac8e8c2a35d45
|
[
"MIT"
] | null | null | null |
lesson5/while_loop.py
|
vinaymayar/python-game-workshop
|
e990f51815c2080a0d702c9d90dac8e8c2a35d45
|
[
"MIT"
] | null | null | null |
# Exercise 1:
# Using a while loop, print the numbers 0 to 10
# Exercise 2:
# Using a while loop, print all even numbers from 0 to 100
| 23.333333
| 60
| 0.692857
| 26
| 140
| 3.730769
| 0.653846
| 0.123711
| 0.226804
| 0.309278
| 0.412371
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 0.25
| 140
| 5
| 61
| 28
| 0.838095
| 0.928571
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d40c50d0c860f6e7a38c2f88f0f2ed413c112449
| 97
|
py
|
Python
|
aztools/__init__.py
|
abduzoghbi/aztools
|
949cc2ec0dbb4426be0d39c5c9832243c4dbde43
|
[
"MIT"
] | null | null | null |
aztools/__init__.py
|
abduzoghbi/aztools
|
949cc2ec0dbb4426be0d39c5c9832243c4dbde43
|
[
"MIT"
] | null | null | null |
aztools/__init__.py
|
abduzoghbi/aztools
|
949cc2ec0dbb4426be0d39c5c9832243c4dbde43
|
[
"MIT"
] | null | null | null |
from .simlc import SimLC
from .lcurve import LCurve
from . import misc
from . import data_tools
| 16.166667
| 26
| 0.783505
| 15
| 97
| 5
| 0.466667
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175258
| 97
| 6
| 27
| 16.166667
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d40cee1482a67cf83be03e1924b123c37b588310
| 204
|
py
|
Python
|
dist-sys/deliveries/admin.py
|
joehalloran/dist-sys
|
8317eb1bf802f991f0bf4eb78470e05b36b10ba6
|
[
"MIT"
] | null | null | null |
dist-sys/deliveries/admin.py
|
joehalloran/dist-sys
|
8317eb1bf802f991f0bf4eb78470e05b36b10ba6
|
[
"MIT"
] | 3
|
2020-02-11T23:55:06.000Z
|
2021-06-10T21:16:58.000Z
|
dist-sys/deliveries/admin.py
|
joehalloran/dist-sys
|
8317eb1bf802f991f0bf4eb78470e05b36b10ba6
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Customer, Address, Product, Order
admin.site.register(Customer)
admin.site.register(Address)
admin.site.register(Product)
admin.site.register(Order)
| 22.666667
| 53
| 0.813725
| 28
| 204
| 5.928571
| 0.428571
| 0.216867
| 0.409639
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 204
| 8
| 54
| 25.5
| 0.887701
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.