hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
243b41e4fe63085b30eb5cf8b63cd67590a2a0b6 | 81,270 | py | Python | intrepyd/tests/A7E_requirements.py | bobosoft/intrepyd | 13f0912b31f86f9bcc50f52ef4ad870e33f0cf65 | [
"BSD-3-Clause"
] | 2 | 2021-04-25T17:38:03.000Z | 2022-03-20T20:48:50.000Z | intrepyd/tests/A7E_requirements.py | bobosoft/intrepyd | 13f0912b31f86f9bcc50f52ef4ad870e33f0cf65 | [
"BSD-3-Clause"
] | 1 | 2016-11-30T22:25:00.000Z | 2017-01-16T22:43:39.000Z | intrepyd/tests/A7E_requirements.py | bobosoft/intrepyd | 13f0912b31f86f9bcc50f52ef4ad870e33f0cf65 | [
"BSD-3-Clause"
] | null | null | null | import intrepyd as ip
import intrepyd.scr
import intrepyd.circuit
import collections
from . import from_fixture_path
class SimulinkCircuit(ip.circuit.Circuit):
def __init__(self, ctx, name):
ip.circuit.Circuit.__init__(self, ctx, name)
def _mk_naked_circuit_impl(self, inputs):
input_keys = list(inputs)
# presentPositionEntered -> n1
n1 = inputs[input_keys[0]]
# ACAIRB -> n2
n2 = inputs[input_keys[1]]
# IMSAUTOC -> n3
n3 = inputs[input_keys[2]]
# Desig -> n4
n4 = inputs[input_keys[3]]
# Data23=Sea -> n5
n5 = inputs[input_keys[4]]
# CAstageComplete -> n6
n6 = inputs[input_keys[5]]
# CLstageComplete -> n7
n7 = inputs[input_keys[6]]
# NDstageComplete -> n8
n8 = inputs[input_keys[7]]
# HSstageComplete -> n9
n9 = inputs[input_keys[8]]
# PNLTEST=TEST -> n10
n10 = inputs[input_keys[9]]
# IMSup -> n11
n11 = inputs[input_keys[10]]
# latitude -> n12
n12 = inputs[input_keys[11]]
# DopplerUp -> n13
n13 = inputs[input_keys[12]]
# DopplerCoupled -> n14
n14 = inputs[input_keys[13]]
# IMSMODE -> n15
n15 = inputs[input_keys[14]]
# AirVelocityTestPassed -> n16
n16 = inputs[input_keys[15]]
# PitchSmall AND RollSmall -> n17
n17 = inputs[input_keys[16]]
# SINSup -> n18
n18 = inputs[input_keys[17]]
# SINSvelocityTestPassed -> n19
n19 = inputs[input_keys[18]]
# LandVelocityTestPassed -> n20
n20 = inputs[input_keys[19]]
# NonInterveningTakeoff -> n21
n21 = inputs[input_keys[20]]
# GroundTestFinished -> n22
n22 = inputs[input_keys[21]]
# UPDATTW -> n23
n23 = inputs[input_keys[22]]
# MODEROT -> n24
n24 = inputs[input_keys[23]]
# PRESPOS -> n25
n25 = inputs[input_keys[24]]
# GUNNSEL -> n26
n26 = inputs[input_keys[25]]
# MSFW -> n27
n27 = inputs[input_keys[26]]
# NonZeroDigitEntered -> n28
n28 = inputs[input_keys[27]]
# ENTERSW -> n29
n29 = inputs[input_keys[28]]
# FLYTOchanged -> n30
n30 = inputs[input_keys[29]]
# HUDREL -> n31
n31 = inputs[input_keys[30]]
# AnyDestEntered -> n32
n32 = inputs[input_keys[31]]
# HighDrag -> n33
n33 = inputs[input_keys[32]]
# LowDrag -> n34
n34 = inputs[input_keys[33]]
# OverflownExit -> n35
n35 = inputs[input_keys[34]]
# Overflown>42 -> n36
n36 = inputs[input_keys[35]]
# FLYTOTOG=Dest -> n37
n37 = inputs[input_keys[36]]
# FLYTOTW -> n38
n38 = inputs[input_keys[37]]
# WEAPTYPE -> n39
n39 = inputs[input_keys[38]]
# Station_Selected -> n40
n40 = inputs[input_keys[39]]
# TD -> n41
n41 = inputs[input_keys[40]]
# NU=AflyUpd -> n42
n42 = inputs[input_keys[41]]
# A7E_requirements/Constant
n43 = self.context.mk_number('70.0', self.context.mk_real_type())
self.nets['A7E_requirements/Constant'] = n43
# A7E_requirements/Relational Operator -> n44
n44 = self.context.mk_gt(n12, n43)
self.nets['A7E_requirements/Relational Operator'] = n44
# A7E_requirements/Constant1
n45 = self.context.mk_number('80.0', self.context.mk_real_type())
self.nets['A7E_requirements/Constant1'] = n45
# A7E_requirements/Relational Operator1 -> n46
n46 = self.context.mk_gt(n12, n45)
self.nets['A7E_requirements/Relational Operator1'] = n46
# A7E_requirements/Gndal
n47 = self.context.mk_number('0', self.context.mk_int8_type())
self.nets['A7E_requirements/Gndal'] = n47
# A7E_requirements/ro -> n48
n48 = self.context.mk_eq(n15, n47)
self.nets['A7E_requirements/ro'] = n48
# A7E_requirements/Norm
n49 = self.context.mk_number('1', self.context.mk_int8_type())
self.nets['A7E_requirements/Norm'] = n49
# A7E_requirements/ro1 -> n50
n50 = self.context.mk_eq(n15, n49)
self.nets['A7E_requirements/ro1'] = n50
# A7E_requirements/Iner
n51 = self.context.mk_number('2', self.context.mk_int8_type())
self.nets['A7E_requirements/Iner'] = n51
# A7E_requirements/ro2 -> n52
n52 = self.context.mk_eq(n15, n51)
self.nets['A7E_requirements/ro2'] = n52
# A7E_requirements/MagSl
n53 = self.context.mk_number('3', self.context.mk_int8_type())
self.nets['A7E_requirements/MagSl'] = n53
# A7E_requirements/ro3 -> n54
n54 = self.context.mk_eq(n15, n53)
self.nets['A7E_requirements/ro3'] = n54
# A7E_requirements/Grid
n55 = self.context.mk_number('4', self.context.mk_int8_type())
self.nets['A7E_requirements/Grid'] = n55
# A7E_requirements/ro4 -> n56
n56 = self.context.mk_eq(n15, n55)
self.nets['A7E_requirements/ro4'] = n56
n57_1 = self.ANT(n1, n2, n3, n4, n5, n6, n7, n8, n9, n10, n11, n44, n46, n13, n14, n48, n50, n52, n54, n56, n16, n17, n18, n19, n20, n21, n22)
# A7E_requirements/FLYOVER
n59 = self.context.mk_number('0', self.context.mk_int8_type())
self.nets['A7E_requirements/FLYOVER'] = n59
# A7E_requirements/ro5 -> n60
n60 = self.context.mk_eq(n23, n59)
self.nets['A7E_requirements/ro5'] = n60
# A7E_requirements/HUD
n61 = self.context.mk_number('1', self.context.mk_int8_type())
self.nets['A7E_requirements/HUD'] = n61
# A7E_requirements/ro6 -> n62
n62 = self.context.mk_eq(n23, n61)
self.nets['A7E_requirements/ro6'] = n62
# A7E_requirements/RADAR
n63 = self.context.mk_number('2', self.context.mk_int8_type())
self.nets['A7E_requirements/RADAR'] = n63
# A7E_requirements/ro7 -> n64
n64 = self.context.mk_eq(n23, n63)
self.nets['A7E_requirements/ro7'] = n64
# A7E_requirements/TACLL
n65 = self.context.mk_number('3', self.context.mk_int8_type())
self.nets['A7E_requirements/TACLL'] = n65
# A7E_requirements/ro8 -> n66
n66 = self.context.mk_eq(n23, n65)
self.nets['A7E_requirements/ro8'] = n66
# A7E_requirements/Other
n67 = self.context.mk_number('4', self.context.mk_int8_type())
self.nets['A7E_requirements/Other'] = n67
# A7E_requirements/ro9 -> n68
n68 = self.context.mk_eq(n23, n67)
self.nets['A7E_requirements/ro9'] = n68
# A7E_requirements/BOC_
n69 = self.context.mk_number('2', self.context.mk_int8_type())
self.nets['A7E_requirements/BOC_'] = n69
# A7E_requirements/ro12 -> n70
n70 = self.context.mk_eq(n27, n69)
self.nets['A7E_requirements/ro12'] = n70
# A7E_requirements/BOCOFF
n71 = self.context.mk_number('3', self.context.mk_int8_type())
self.nets['A7E_requirements/BOCOFF'] = n71
# A7E_requirements/ro13 -> n72
n72 = self.context.mk_eq(n27, n71)
self.nets['A7E_requirements/ro13'] = n72
# A7E_requirements/CCIP
n73 = self.context.mk_number('4', self.context.mk_int8_type())
self.nets['A7E_requirements/CCIP'] = n73
# A7E_requirements/ro14 -> n74
n74 = self.context.mk_eq(n27, n73)
self.nets['A7E_requirements/ro14'] = n74
# A7E_requirements/NATT
n75 = self.context.mk_number('5', self.context.mk_int8_type())
self.nets['A7E_requirements/NATT'] = n75
# A7E_requirements/ro15 -> n76
n76 = self.context.mk_eq(n27, n75)
self.nets['A7E_requirements/ro15'] = n76
# A7E_requirements/NATOFF
n77 = self.context.mk_number('6', self.context.mk_int8_type())
self.nets['A7E_requirements/NATOFF'] = n77
# A7E_requirements/ro16 -> n78
n78 = self.context.mk_eq(n27, n77)
self.nets['A7E_requirements/ro16'] = n78
# A7E_requirements/WDMFS -> n79
tn2 = self.context.mk_or(n76, n78)
tn1 = self.context.mk_or(n74, tn2)
tn0 = self.context.mk_or(n72, tn1)
n79 = self.context.mk_or(n70, tn0)
self.nets['A7E_requirements/WDMFS'] = n79
# A7E_requirements/BOC
n80 = self.context.mk_number('30', self.context.mk_int8_type())
self.nets['A7E_requirements/BOC'] = n80
# A7E_requirements/00
n81 = self.context.mk_number('0', self.context.mk_int8_type())
self.nets['A7E_requirements/00'] = n81
# A7E_requirements/ro20 -> n82
n82 = self.context.mk_eq(n81, n39)
self.nets['A7E_requirements/ro20'] = n82
# A7E_requirements/not2 -> n83
n83 = self.context.mk_not(n82)
self.nets['A7E_requirements/not2'] = n83
# A7E_requirements/UN
n84 = self.context.mk_number('12', self.context.mk_int8_type())
self.nets['A7E_requirements/UN'] = n84
# A7E_requirements/GN
n85 = self.context.mk_number('0', self.context.mk_int8_type())
self.nets['A7E_requirements/GN'] = n85
# A7E_requirements/RK
n86 = self.context.mk_number('1', self.context.mk_int8_type())
self.nets['A7E_requirements/RK'] = n86
# A7E_requirements/WL
n87 = self.context.mk_number('2', self.context.mk_int8_type())
self.nets['A7E_requirements/WL'] = n87
# A7E_requirements/SK
n88 = self.context.mk_number('3', self.context.mk_int8_type())
self.nets['A7E_requirements/SK'] = n88
# A7E_requirements/MF
n89 = self.context.mk_number('4', self.context.mk_int8_type())
self.nets['A7E_requirements/MF'] = n89
# A7E_requirements/SOD
n90 = self.context.mk_number('5', self.context.mk_int8_type())
self.nets['A7E_requirements/SOD'] = n90
# A7E_requirements/SSH
n91 = self.context.mk_number('6', self.context.mk_int8_type())
self.nets['A7E_requirements/SSH'] = n91
# A7E_requirements/SL
n92 = self.context.mk_number('7', self.context.mk_int8_type())
self.nets['A7E_requirements/SL'] = n92
# A7E_requirements/MD
n93 = self.context.mk_number('8', self.context.mk_int8_type())
self.nets['A7E_requirements/MD'] = n93
# A7E_requirements/OD
n94 = self.context.mk_number('9', self.context.mk_int8_type())
self.nets['A7E_requirements/OD'] = n94
# A7E_requirements/SM
n95 = self.context.mk_number('10', self.context.mk_int8_type())
self.nets['A7E_requirements/SM'] = n95
# A7E_requirements/OR
n96 = self.context.mk_number('11', self.context.mk_int8_type())
self.nets['A7E_requirements/OR'] = n96
n97_1 = self.weapon_class(n39, n84, n85, n86, n87, n88, n89, n90, n91, n92, n93, n94, n95, n96)
# A7E_requirements/diff -> n98
n98 = self.context.mk_neq(n84, n97_1)
self.nets['A7E_requirements/diff'] = n98
# A7E_requirements/Ready_Station -> n99
tn3 = self.context.mk_and(n83, n98)
n99 = self.context.mk_and(n40, tn3)
self.nets['A7E_requirements/Ready_Station'] = n99
# A7E_requirements/eq1 -> n100
n100 = self.context.mk_eq(n90, n97_1)
self.nets['A7E_requirements/eq1'] = n100
# A7E_requirements/eq2 -> n101
n101 = self.context.mk_eq(n91, n97_1)
self.nets['A7E_requirements/eq2'] = n101
# A7E_requirements/Special -> n102
n102 = self.context.mk_or(n100, n101)
self.nets['A7E_requirements/Special'] = n102
# A7E_requirements/Walleye -> n103
n103 = self.context.mk_eq(n97_1, n87)
self.nets['A7E_requirements/Walleye'] = n103
# A7E_requirements/Guns -> n104
n104 = self.context.mk_eq(n97_1, n85)
self.nets['A7E_requirements/Guns'] = n104
# A7E_requirements/Rockets -> n105
n105 = self.context.mk_eq(n97_1, n86)
self.nets['A7E_requirements/Rockets'] = n105
# A7E_requirements/Reserved_Weapon -> n106
tn5 = self.context.mk_or(n104, n105)
tn4 = self.context.mk_or(n103, tn5)
n106 = self.context.mk_or(n102, tn4)
self.nets['A7E_requirements/Reserved_Weapon'] = n106
# A7E_requirements/Shrike -> n107
n107 = self.context.mk_eq(n97_1, n88)
self.nets['A7E_requirements/Shrike'] = n107
# A7E_requirements/not3 -> n108
n108 = self.context.mk_not(n107)
self.nets['A7E_requirements/not3'] = n108
# A7E_requirements/not4 -> n109
n109 = self.context.mk_not(n106)
self.nets['A7E_requirements/not4'] = n109
# A7E_requirements/Other_Weapon -> n110
tn6 = self.context.mk_and(n108, n109)
n110 = self.context.mk_and(n99, tn6)
self.nets['A7E_requirements/Other_Weapon'] = n110
# A7E_requirements/not1 -> n111
n111 = self.context.mk_not(n26)
self.nets['A7E_requirements/not1'] = n111
# A7E_requirements/zero
n112 = self.context.mk_number('0', self.context.mk_int8_type())
self.nets['A7E_requirements/zero'] = n112
# A7E_requirements/ro17 -> n113
n113 = self.context.mk_eq(n38, n112)
self.nets['A7E_requirements/ro17'] = n113
# A7E_requirements/reset
n114 = self.context.mk_number('1', self.context.mk_int8_type())
self.nets['A7E_requirements/reset'] = n114
# A7E_requirements/ro18 -> n115
n115 = self.context.mk_eq(n38, n114)
self.nets['A7E_requirements/ro18'] = n115
# A7E_requirements/or2 -> n116
n116 = self.context.mk_or(n28, n41)
self.nets['A7E_requirements/or2'] = n116
# A7E_requirements/Redesignate -> n117
n117 = self.context.mk_and(n42, n116)
self.nets['A7E_requirements/Redesignate'] = n117
n118_1 = self.WD(n99, n31, n106, n102, n105, n104, n103, n107, n110, n111, n113, n115, n37, n79, n70, n72, n74, n76, n78, n4, n117, n32, n33, n34, n35, n36, n2)
# A7E_requirements/ro19 -> n119
n119 = self.context.mk_eq(n80, n118_1)
self.nets['A7E_requirements/ro19'] = n119
# A7E_requirements/None
n120 = self.context.mk_number('0', self.context.mk_int8_type())
self.nets['A7E_requirements/None'] = n120
# A7E_requirements/ro10 -> n121
n121 = self.context.mk_eq(n27, n120)
self.nets['A7E_requirements/ro10'] = n121
# A7E_requirements/TF
n122 = self.context.mk_number('1', self.context.mk_int8_type())
self.nets['A7E_requirements/TF'] = n122
# A7E_requirements/ro11 -> n123
n123 = self.context.mk_eq(n27, n122)
self.nets['A7E_requirements/ro11'] = n123
n124_1 = self.NU(n40, n2, n24, n25, n60, n62, n64, n66, n68, n26, n79, n5, n119, n121, n123, n28, n29, n30)
# A7E_requirements/or1 -> n127
n127 = self.context.mk_or(n40, n82)
self.nets['A7E_requirements/or1'] = n127
# A7E_requirements/Assumption
self.context.mk_assumption(n127)
# A7E_requirements/Enum1 -> n129
tn11 = self.context.mk_or(n76, n78)
tn10 = self.context.mk_or(n74, tn11)
tn9 = self.context.mk_or(n72, tn10)
tn8 = self.context.mk_or(n70, tn9)
tn7 = self.context.mk_or(n123, tn8)
n129 = self.context.mk_or(n121, tn7)
self.nets['A7E_requirements/Enum1'] = n129
# A7E_requirements/Assumption1
self.context.mk_assumption(n129)
# A7E_requirements/Enum2 -> n131
tn14 = self.context.mk_or(n66, n68)
tn13 = self.context.mk_or(n64, tn14)
tn12 = self.context.mk_or(n62, tn13)
n131 = self.context.mk_or(n60, tn12)
self.nets['A7E_requirements/Enum2'] = n131
# A7E_requirements/Assumption2
self.context.mk_assumption(n131)
# A7E_requirements/AflyUpd
n133 = self.context.mk_number('22', self.context.mk_int8_type())
self.nets['A7E_requirements/AflyUpd'] = n133
# A7E_requirements/ro21 -> n134
n134 = self.context.mk_eq(n124_1, n133)
self.nets['A7E_requirements/ro21'] = n134
# A7E_requirements/and2 -> n135
n135 = self.context.mk_and(n134, n42)
self.nets['A7E_requirements/and2'] = n135
# A7E_requirements/Assumption3
self.context.mk_assumption(n135)
# n57 -> ANT_MODE
# n124 -> NU_MODE
# n118 -> WD_MODE
outputs = collections.OrderedDict()
outputs['A7E_requirements/ANT_MODE'] = n57_1
self.nets['A7E_requirements/ANT_MODE'] = n57_1
outputs['A7E_requirements/NU_MODE'] = n124_1
self.nets['A7E_requirements/NU_MODE'] = n124_1
outputs['A7E_requirements/WD_MODE'] = n118_1
self.nets['A7E_requirements/WD_MODE'] = n118_1
return outputs
def _mk_inputs(self):
# A7E_requirements/presentPositionEntered -> n1
n1 = self.context.mk_input('presentPositionEntered', self.context.mk_boolean_type())
self.inputs['presentPositionEntered'] = n1
# A7E_requirements/ACAIRB -> n2
n2 = self.context.mk_input('ACAIRB', self.context.mk_boolean_type())
self.inputs['ACAIRB'] = n2
# A7E_requirements/IMSAUTOC -> n3
n3 = self.context.mk_input('IMSAUTOC', self.context.mk_boolean_type())
self.inputs['IMSAUTOC'] = n3
# A7E_requirements/Desig -> n4
n4 = self.context.mk_input('Desig', self.context.mk_boolean_type())
self.inputs['Desig'] = n4
# A7E_requirements/Data23=Sea -> n5
n5 = self.context.mk_input('Data23=Sea', self.context.mk_boolean_type())
self.inputs['Data23=Sea'] = n5
# A7E_requirements/CAstageComplete -> n6
n6 = self.context.mk_input('CAstageComplete', self.context.mk_boolean_type())
self.inputs['CAstageComplete'] = n6
# A7E_requirements/CLstageComplete -> n7
n7 = self.context.mk_input('CLstageComplete', self.context.mk_boolean_type())
self.inputs['CLstageComplete'] = n7
# A7E_requirements/NDstageComplete -> n8
n8 = self.context.mk_input('NDstageComplete', self.context.mk_boolean_type())
self.inputs['NDstageComplete'] = n8
# A7E_requirements/HSstageComplete -> n9
n9 = self.context.mk_input('HSstageComplete', self.context.mk_boolean_type())
self.inputs['HSstageComplete'] = n9
# A7E_requirements/PNLTEST=TEST -> n10
n10 = self.context.mk_input('PNLTEST=TEST', self.context.mk_boolean_type())
self.inputs['PNLTEST=TEST'] = n10
# A7E_requirements/IMSup -> n11
n11 = self.context.mk_input('IMSup', self.context.mk_boolean_type())
self.inputs['IMSup'] = n11
# A7E_requirements/latitude -> n12
n12 = self.context.mk_input('latitude', self.context.mk_real_type())
self.inputs['latitude'] = n12
# A7E_requirements/DopplerUp -> n13
n13 = self.context.mk_input('DopplerUp', self.context.mk_boolean_type())
self.inputs['DopplerUp'] = n13
# A7E_requirements/DopplerCoupled -> n14
n14 = self.context.mk_input('DopplerCoupled', self.context.mk_boolean_type())
self.inputs['DopplerCoupled'] = n14
# A7E_requirements/IMSMODE -> n15
n15 = self.context.mk_input('IMSMODE', self.context.mk_int8_type())
self.inputs['IMSMODE'] = n15
# A7E_requirements/AirVelocityTestPassed -> n16
n16 = self.context.mk_input('AirVelocityTestPassed', self.context.mk_boolean_type())
self.inputs['AirVelocityTestPassed'] = n16
# A7E_requirements/PitchSmall AND RollSmall -> n17
n17 = self.context.mk_input('PitchSmall AND RollSmall', self.context.mk_boolean_type())
self.inputs['PitchSmall AND RollSmall'] = n17
# A7E_requirements/SINSup -> n18
n18 = self.context.mk_input('SINSup', self.context.mk_boolean_type())
self.inputs['SINSup'] = n18
# A7E_requirements/SINSvelocityTestPassed -> n19
n19 = self.context.mk_input('SINSvelocityTestPassed', self.context.mk_boolean_type())
self.inputs['SINSvelocityTestPassed'] = n19
# A7E_requirements/LandVelocityTestPassed -> n20
n20 = self.context.mk_input('LandVelocityTestPassed', self.context.mk_boolean_type())
self.inputs['LandVelocityTestPassed'] = n20
# A7E_requirements/NonInterveningTakeoff -> n21
n21 = self.context.mk_input('NonInterveningTakeoff', self.context.mk_boolean_type())
self.inputs['NonInterveningTakeoff'] = n21
# A7E_requirements/GroundTestFinished -> n22
n22 = self.context.mk_input('GroundTestFinished', self.context.mk_boolean_type())
self.inputs['GroundTestFinished'] = n22
# A7E_requirements/UPDATTW -> n23
n23 = self.context.mk_input('UPDATTW', self.context.mk_int8_type())
self.inputs['UPDATTW'] = n23
# A7E_requirements/MODEROT -> n24
n24 = self.context.mk_input('MODEROT', self.context.mk_boolean_type())
self.inputs['MODEROT'] = n24
# A7E_requirements/PRESPOS -> n25
n25 = self.context.mk_input('PRESPOS', self.context.mk_boolean_type())
self.inputs['PRESPOS'] = n25
# A7E_requirements/GUNNSEL -> n26
n26 = self.context.mk_input('GUNNSEL', self.context.mk_boolean_type())
self.inputs['GUNNSEL'] = n26
# A7E_requirements/MSFW -> n27
n27 = self.context.mk_input('MSFW', self.context.mk_int8_type())
self.inputs['MSFW'] = n27
# A7E_requirements/NonZeroDigitEntered -> n28
n28 = self.context.mk_input('NonZeroDigitEntered', self.context.mk_boolean_type())
self.inputs['NonZeroDigitEntered'] = n28
# A7E_requirements/ENTERSW -> n29
n29 = self.context.mk_input('ENTERSW', self.context.mk_boolean_type())
self.inputs['ENTERSW'] = n29
# A7E_requirements/FLYTOchanged -> n30
n30 = self.context.mk_input('FLYTOchanged', self.context.mk_boolean_type())
self.inputs['FLYTOchanged'] = n30
# A7E_requirements/HUDREL -> n31
n31 = self.context.mk_input('HUDREL', self.context.mk_boolean_type())
self.inputs['HUDREL'] = n31
# A7E_requirements/AnyDestEntered -> n32
n32 = self.context.mk_input('AnyDestEntered', self.context.mk_boolean_type())
self.inputs['AnyDestEntered'] = n32
# A7E_requirements/HighDrag -> n33
n33 = self.context.mk_input('HighDrag', self.context.mk_boolean_type())
self.inputs['HighDrag'] = n33
# A7E_requirements/LowDrag -> n34
n34 = self.context.mk_input('LowDrag', self.context.mk_boolean_type())
self.inputs['LowDrag'] = n34
# A7E_requirements/OverflownExit -> n35
n35 = self.context.mk_input('OverflownExit', self.context.mk_boolean_type())
self.inputs['OverflownExit'] = n35
# A7E_requirements/Overflown>42 -> n36
n36 = self.context.mk_input('Overflown>42', self.context.mk_boolean_type())
self.inputs['Overflown>42'] = n36
# A7E_requirements/FLYTOTOG=Dest -> n37
n37 = self.context.mk_input('FLYTOTOG=Dest', self.context.mk_boolean_type())
self.inputs['FLYTOTOG=Dest'] = n37
# A7E_requirements/FLYTOTW -> n38
n38 = self.context.mk_input('FLYTOTW', self.context.mk_int8_type())
self.inputs['FLYTOTW'] = n38
# A7E_requirements/WEAPTYPE -> n39
n39 = self.context.mk_input('WEAPTYPE', self.context.mk_int8_type())
self.inputs['WEAPTYPE'] = n39
# A7E_requirements/Station_Selected -> n40
n40 = self.context.mk_input('Station_Selected', self.context.mk_boolean_type())
self.inputs['Station_Selected'] = n40
# A7E_requirements/TD -> n41
n41 = self.context.mk_input('TD', self.context.mk_boolean_type())
self.inputs['TD'] = n41
# A7E_requirements/NU=AflyUpd -> n42
n42 = self.context.mk_input('NU=AflyUpd', self.context.mk_boolean_type())
self.inputs['NU=AflyUpd'] = n42
def weapon_class(self, n137, n138, n139, n140, n141, n142, n143, n144, n145, n146, n147, n148, n149, n150):
# WEAPTYPE -> n137
# UN_ -> n138
# GN_ -> n139
# RK_ -> n140
# WL_ -> n141
# SK_ -> n142
# MF_ -> n143
# SOD_ -> n144
# SSH_ -> n145
# SL_ -> n146
# MD_ -> n147
# OD_ -> n148
# SM_ -> n149
# OR_ -> n150
# A7E_requirements/weapon_class/v54
n151 = self.context.mk_number('94', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v54'] = n151
# A7E_requirements/weapon_class/eq52 -> n152
n152 = self.context.mk_eq(n137, n151)
self.nets['A7E_requirements/weapon_class/eq52'] = n152
# A7E_requirements/weapon_class/v53
n153 = self.context.mk_number('99', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v53'] = n153
# A7E_requirements/weapon_class/eq53 -> n154
n154 = self.context.mk_eq(n137, n153)
self.nets['A7E_requirements/weapon_class/eq53'] = n154
# A7E_requirements/weapon_class/Logical Operator9 -> n155
n155 = self.context.mk_or(n152, n154)
self.nets['A7E_requirements/weapon_class/Logical Operator9'] = n155
# A7E_requirements/weapon_class/v52
n156 = self.context.mk_number('59', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v52'] = n156
# A7E_requirements/weapon_class/eq50 -> n157
n157 = self.context.mk_eq(n137, n156)
self.nets['A7E_requirements/weapon_class/eq50'] = n157
# A7E_requirements/weapon_class/v51
n158 = self.context.mk_number('75', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v51'] = n158
# A7E_requirements/weapon_class/eq51 -> n159
n159 = self.context.mk_eq(n137, n158)
self.nets['A7E_requirements/weapon_class/eq51'] = n159
# A7E_requirements/weapon_class/Logical Operator8 -> n160
n160 = self.context.mk_or(n157, n159)
self.nets['A7E_requirements/weapon_class/Logical Operator8'] = n160
# A7E_requirements/weapon_class/v50
n161 = self.context.mk_number('63', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v50'] = n161
# A7E_requirements/weapon_class/eq48 -> n162
n162 = self.context.mk_eq(n137, n161)
self.nets['A7E_requirements/weapon_class/eq48'] = n162
# A7E_requirements/weapon_class/v49
n163 = self.context.mk_number('67', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v49'] = n163
# A7E_requirements/weapon_class/eq49 -> n164
n164 = self.context.mk_eq(n137, n163)
self.nets['A7E_requirements/weapon_class/eq49'] = n164
# A7E_requirements/weapon_class/Logical Operator7 -> n165
n165 = self.context.mk_or(n162, n164)
self.nets['A7E_requirements/weapon_class/Logical Operator7'] = n165
# A7E_requirements/weapon_class/v46
n166 = self.context.mk_number('56', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v46'] = n166
# A7E_requirements/weapon_class/eq44 -> n167
n167 = self.context.mk_eq(n137, n166)
self.nets['A7E_requirements/weapon_class/eq44'] = n167
# A7E_requirements/weapon_class/v45
n168 = self.context.mk_number('62', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v45'] = n168
# A7E_requirements/weapon_class/eq43 -> n169
n169 = self.context.mk_eq(n137, n168)
self.nets['A7E_requirements/weapon_class/eq43'] = n169
# A7E_requirements/weapon_class/v44
n170 = self.context.mk_number('66', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v44'] = n170
# A7E_requirements/weapon_class/eq42 -> n171
n171 = self.context.mk_eq(n137, n170)
self.nets['A7E_requirements/weapon_class/eq42'] = n171
# A7E_requirements/weapon_class/v43
n172 = self.context.mk_number('90', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v43'] = n172
# A7E_requirements/weapon_class/eq41 -> n173
n173 = self.context.mk_eq(n137, n172)
self.nets['A7E_requirements/weapon_class/eq41'] = n173
# A7E_requirements/weapon_class/v42
n174 = self.context.mk_number('91', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v42'] = n174
# A7E_requirements/weapon_class/eq40 -> n175
n175 = self.context.mk_eq(n137, n174)
self.nets['A7E_requirements/weapon_class/eq40'] = n175
# A7E_requirements/weapon_class/v41
n176 = self.context.mk_number('93', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v41'] = n176
# A7E_requirements/weapon_class/eq45 -> n177
n177 = self.context.mk_eq(n137, n176)
self.nets['A7E_requirements/weapon_class/eq45'] = n177
# A7E_requirements/weapon_class/v47
n178 = self.context.mk_number('97', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v47'] = n178
# A7E_requirements/weapon_class/eq46 -> n179
n179 = self.context.mk_eq(n137, n178)
self.nets['A7E_requirements/weapon_class/eq46'] = n179
# A7E_requirements/weapon_class/v48
n180 = self.context.mk_number('98', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v48'] = n180
# A7E_requirements/weapon_class/eq47 -> n181
n181 = self.context.mk_eq(n137, n180)
self.nets['A7E_requirements/weapon_class/eq47'] = n181
# A7E_requirements/weapon_class/Logical Operator6 -> n182
tn20 = self.context.mk_or(n179, n181)
tn19 = self.context.mk_or(n177, tn20)
tn18 = self.context.mk_or(n175, tn19)
tn17 = self.context.mk_or(n173, tn18)
tn16 = self.context.mk_or(n171, tn17)
tn15 = self.context.mk_or(n169, tn16)
n182 = self.context.mk_or(n167, tn15)
self.nets['A7E_requirements/weapon_class/Logical Operator6'] = n182
# A7E_requirements/weapon_class/v30
n183 = self.context.mk_number('50', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v30'] = n183
# A7E_requirements/weapon_class/eq28 -> n184
n184 = self.context.mk_eq(n137, n183)
self.nets['A7E_requirements/weapon_class/eq28'] = n184
# A7E_requirements/weapon_class/v29
n185 = self.context.mk_number('53', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v29'] = n185
# A7E_requirements/weapon_class/eq27 -> n186
n186 = self.context.mk_eq(n137, n185)
self.nets['A7E_requirements/weapon_class/eq27'] = n186
# A7E_requirements/weapon_class/v28
n187 = self.context.mk_number('60', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v28'] = n187
# A7E_requirements/weapon_class/eq26 -> n188
n188 = self.context.mk_eq(n137, n187)
self.nets['A7E_requirements/weapon_class/eq26'] = n188
# A7E_requirements/weapon_class/v27
n189 = self.context.mk_number('61', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v27'] = n189
# A7E_requirements/weapon_class/eq25 -> n190
n190 = self.context.mk_eq(n137, n189)
self.nets['A7E_requirements/weapon_class/eq25'] = n190
# A7E_requirements/weapon_class/v26
n191 = self.context.mk_number('64', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v26'] = n191
# A7E_requirements/weapon_class/eq24 -> n192
n192 = self.context.mk_eq(n137, n191)
self.nets['A7E_requirements/weapon_class/eq24'] = n192
# A7E_requirements/weapon_class/v25
n193 = self.context.mk_number('65', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v25'] = n193
# A7E_requirements/weapon_class/eq29 -> n194
n194 = self.context.mk_eq(n137, n193)
self.nets['A7E_requirements/weapon_class/eq29'] = n194
# A7E_requirements/weapon_class/v31
n195 = self.context.mk_number('68', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v31'] = n195
# A7E_requirements/weapon_class/eq30 -> n196
n196 = self.context.mk_eq(n137, n195)
self.nets['A7E_requirements/weapon_class/eq30'] = n196
# A7E_requirements/weapon_class/v32
n197 = self.context.mk_number('69', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v32'] = n197
# A7E_requirements/weapon_class/eq31 -> n198
n198 = self.context.mk_eq(n137, n197)
self.nets['A7E_requirements/weapon_class/eq31'] = n198
# A7E_requirements/weapon_class/v33
n199 = self.context.mk_number('70', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v33'] = n199
# A7E_requirements/weapon_class/eq32 -> n200
n200 = self.context.mk_eq(n137, n199)
self.nets['A7E_requirements/weapon_class/eq32'] = n200
# A7E_requirements/weapon_class/v34
n201 = self.context.mk_number('72', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v34'] = n201
# A7E_requirements/weapon_class/eq33 -> n202
n202 = self.context.mk_eq(n137, n201)
self.nets['A7E_requirements/weapon_class/eq33'] = n202
# A7E_requirements/weapon_class/v35
n203 = self.context.mk_number('73', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v35'] = n203
# A7E_requirements/weapon_class/eq34 -> n204
n204 = self.context.mk_eq(n137, n203)
self.nets['A7E_requirements/weapon_class/eq34'] = n204
# A7E_requirements/weapon_class/v36
n205 = self.context.mk_number('74', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v36'] = n205
# A7E_requirements/weapon_class/eq35 -> n206
n206 = self.context.mk_eq(n137, n205)
self.nets['A7E_requirements/weapon_class/eq35'] = n206
# A7E_requirements/weapon_class/v37
n207 = self.context.mk_number('77', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v37'] = n207
# A7E_requirements/weapon_class/eq36 -> n208
n208 = self.context.mk_eq(n137, n207)
self.nets['A7E_requirements/weapon_class/eq36'] = n208
# A7E_requirements/weapon_class/v38
n209 = self.context.mk_number('78', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v38'] = n209
# A7E_requirements/weapon_class/eq37 -> n210
n210 = self.context.mk_eq(n137, n209)
self.nets['A7E_requirements/weapon_class/eq37'] = n210
# A7E_requirements/weapon_class/v39
n211 = self.context.mk_number('79', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v39'] = n211
# A7E_requirements/weapon_class/eq38 -> n212
n212 = self.context.mk_eq(n137, n211)
self.nets['A7E_requirements/weapon_class/eq38'] = n212
# A7E_requirements/weapon_class/v40
n213 = self.context.mk_number('95', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v40'] = n213
# A7E_requirements/weapon_class/eq39 -> n214
n214 = self.context.mk_eq(n137, n213)
self.nets['A7E_requirements/weapon_class/eq39'] = n214
# A7E_requirements/weapon_class/Logical Operator5 -> n215
tn34 = self.context.mk_or(n212, n214)
tn33 = self.context.mk_or(n210, tn34)
tn32 = self.context.mk_or(n208, tn33)
tn31 = self.context.mk_or(n206, tn32)
tn30 = self.context.mk_or(n204, tn31)
tn29 = self.context.mk_or(n202, tn30)
tn28 = self.context.mk_or(n200, tn29)
tn27 = self.context.mk_or(n198, tn28)
tn26 = self.context.mk_or(n196, tn27)
tn25 = self.context.mk_or(n194, tn26)
tn24 = self.context.mk_or(n192, tn25)
tn23 = self.context.mk_or(n190, tn24)
tn22 = self.context.mk_or(n188, tn23)
tn21 = self.context.mk_or(n186, tn22)
n215 = self.context.mk_or(n184, tn21)
self.nets['A7E_requirements/weapon_class/Logical Operator5'] = n215
# A7E_requirements/weapon_class/v23
n216 = self.context.mk_number('43', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v23'] = n216
# A7E_requirements/weapon_class/eq22 -> n217
n217 = self.context.mk_eq(n137, n216)
self.nets['A7E_requirements/weapon_class/eq22'] = n217
# A7E_requirements/weapon_class/v22
n218 = self.context.mk_number('45', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v22'] = n218
# A7E_requirements/weapon_class/eq23 -> n219
n219 = self.context.mk_eq(n137, n218)
self.nets['A7E_requirements/weapon_class/eq23'] = n219
# A7E_requirements/weapon_class/Logical Operator4 -> n220
n220 = self.context.mk_or(n217, n219)
self.nets['A7E_requirements/weapon_class/Logical Operator4'] = n220
# A7E_requirements/weapon_class/v21
n221 = self.context.mk_number('41', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v21'] = n221
# A7E_requirements/weapon_class/eq19 -> n222
n222 = self.context.mk_eq(n137, n221)
self.nets['A7E_requirements/weapon_class/eq19'] = n222
# A7E_requirements/weapon_class/v20
n223 = self.context.mk_number('42', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v20'] = n223
# A7E_requirements/weapon_class/eq18 -> n224
n224 = self.context.mk_eq(n137, n223)
self.nets['A7E_requirements/weapon_class/eq18'] = n224
# A7E_requirements/weapon_class/v19
n225 = self.context.mk_number('44', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v19'] = n225
# A7E_requirements/weapon_class/eq17 -> n226
n226 = self.context.mk_eq(n137, n225)
self.nets['A7E_requirements/weapon_class/eq17'] = n226
# A7E_requirements/weapon_class/v18
n227 = self.context.mk_number('46', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v18'] = n227
# A7E_requirements/weapon_class/eq16 -> n228
n228 = self.context.mk_eq(n137, n227)
self.nets['A7E_requirements/weapon_class/eq16'] = n228
# A7E_requirements/weapon_class/v17
n229 = self.context.mk_number('47', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v17'] = n229
# A7E_requirements/weapon_class/eq15 -> n230
n230 = self.context.mk_eq(n137, n229)
self.nets['A7E_requirements/weapon_class/eq15'] = n230
# A7E_requirements/weapon_class/v16
n231 = self.context.mk_number('48', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v16'] = n231
# A7E_requirements/weapon_class/eq20 -> n232
n232 = self.context.mk_eq(n137, n231)
self.nets['A7E_requirements/weapon_class/eq20'] = n232
# A7E_requirements/weapon_class/v24
n233 = self.context.mk_number('55', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v24'] = n233
# A7E_requirements/weapon_class/eq21 -> n234
n234 = self.context.mk_eq(n137, n233)
self.nets['A7E_requirements/weapon_class/eq21'] = n234
# A7E_requirements/weapon_class/Logical Operator3 -> n235
tn39 = self.context.mk_or(n232, n234)
tn38 = self.context.mk_or(n230, tn39)
tn37 = self.context.mk_or(n228, tn38)
tn36 = self.context.mk_or(n226, tn37)
tn35 = self.context.mk_or(n224, tn36)
n235 = self.context.mk_or(n222, tn35)
self.nets['A7E_requirements/weapon_class/Logical Operator3'] = n235
# A7E_requirements/weapon_class/v15
n236 = self.context.mk_number('58', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v15'] = n236
# A7E_requirements/weapon_class/eq14 -> n237
n237 = self.context.mk_eq(n137, n236)
self.nets['A7E_requirements/weapon_class/eq14'] = n237
# A7E_requirements/weapon_class/v14
n238 = self.context.mk_number('57', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v14'] = n238
# A7E_requirements/weapon_class/eq13 -> n239
n239 = self.context.mk_eq(n137, n238)
self.nets['A7E_requirements/weapon_class/eq13'] = n239
# A7E_requirements/weapon_class/v13
n240 = self.context.mk_number('33', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v13'] = n240
# A7E_requirements/weapon_class/eq12 -> n241
n241 = self.context.mk_eq(n137, n240)
self.nets['A7E_requirements/weapon_class/eq12'] = n241
# A7E_requirements/weapon_class/v12
n242 = self.context.mk_number('32', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v12'] = n242
# A7E_requirements/weapon_class/eq11 -> n243
n243 = self.context.mk_eq(n137, n242)
self.nets['A7E_requirements/weapon_class/eq11'] = n243
# A7E_requirements/weapon_class/v11
n244 = self.context.mk_number('31', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v11'] = n244
# A7E_requirements/weapon_class/eq10 -> n245
n245 = self.context.mk_eq(n137, n244)
self.nets['A7E_requirements/weapon_class/eq10'] = n245
# A7E_requirements/weapon_class/v10
n246 = self.context.mk_number('30', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v10'] = n246
# A7E_requirements/weapon_class/eq6 -> n247
n247 = self.context.mk_eq(n137, n246)
self.nets['A7E_requirements/weapon_class/eq6'] = n247
# A7E_requirements/weapon_class/v9
n248 = self.context.mk_number('24', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v9'] = n248
# A7E_requirements/weapon_class/eq7 -> n249
n249 = self.context.mk_eq(n137, n248)
self.nets['A7E_requirements/weapon_class/eq7'] = n249
# A7E_requirements/weapon_class/v8
n250 = self.context.mk_number('22', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v8'] = n250
# A7E_requirements/weapon_class/eq8 -> n251
n251 = self.context.mk_eq(n137, n250)
self.nets['A7E_requirements/weapon_class/eq8'] = n251
# A7E_requirements/weapon_class/v7
n252 = self.context.mk_number('21', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v7'] = n252
# A7E_requirements/weapon_class/eq9 -> n253
n253 = self.context.mk_eq(n137, n252)
self.nets['A7E_requirements/weapon_class/eq9'] = n253
# A7E_requirements/weapon_class/Logical Operator2 -> n254
tn46 = self.context.mk_or(n251, n253)
tn45 = self.context.mk_or(n249, tn46)
tn44 = self.context.mk_or(n247, tn45)
tn43 = self.context.mk_or(n245, tn44)
tn42 = self.context.mk_or(n243, tn43)
tn41 = self.context.mk_or(n241, tn42)
tn40 = self.context.mk_or(n239, tn41)
n254 = self.context.mk_or(n237, tn40)
self.nets['A7E_requirements/weapon_class/Logical Operator2'] = n254
# A7E_requirements/weapon_class/v6
n255 = self.context.mk_number('17', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v6'] = n255
# A7E_requirements/weapon_class/eq5 -> n256
n256 = self.context.mk_eq(n137, n255)
self.nets['A7E_requirements/weapon_class/eq5'] = n256
# A7E_requirements/weapon_class/v5
n257 = self.context.mk_number('10', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v5'] = n257
# A7E_requirements/weapon_class/eq4 -> n258
n258 = self.context.mk_eq(n137, n257)
self.nets['A7E_requirements/weapon_class/eq4'] = n258
# A7E_requirements/weapon_class/v4
n259 = self.context.mk_number('2', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v4'] = n259
# A7E_requirements/weapon_class/eq2 -> n260
n260 = self.context.mk_eq(n137, n259)
self.nets['A7E_requirements/weapon_class/eq2'] = n260
# A7E_requirements/weapon_class/v3
n261 = self.context.mk_number('3', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v3'] = n261
# A7E_requirements/weapon_class/eq3 -> n262
n262 = self.context.mk_eq(n137, n261)
self.nets['A7E_requirements/weapon_class/eq3'] = n262
# A7E_requirements/weapon_class/Logical Operator1 -> n263
n263 = self.context.mk_or(n260, n262)
self.nets['A7E_requirements/weapon_class/Logical Operator1'] = n263
# A7E_requirements/weapon_class/v1
n264 = self.context.mk_number('0', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v1'] = n264
# A7E_requirements/weapon_class/eq -> n265
n265 = self.context.mk_eq(n137, n264)
self.nets['A7E_requirements/weapon_class/eq'] = n265
# A7E_requirements/weapon_class/v2
n266 = self.context.mk_number('13', self.context.mk_int8_type())
self.nets['A7E_requirements/weapon_class/v2'] = n266
# A7E_requirements/weapon_class/eq1 -> n267
n267 = self.context.mk_eq(n137, n266)
self.nets['A7E_requirements/weapon_class/eq1'] = n267
# A7E_requirements/weapon_class/Logical Operator -> n268
n268 = self.context.mk_or(n265, n267)
self.nets['A7E_requirements/weapon_class/Logical Operator'] = n268
# A7E_requirements/weapon_class/Switch
n269 = self.context.mk_ite(n268, n139, n138)
self.nets['A7E_requirements/weapon_class/Switch'] = n269
# A7E_requirements/weapon_class/Switch1
n270 = self.context.mk_ite(n263, n140, n269)
self.nets['A7E_requirements/weapon_class/Switch1'] = n270
# A7E_requirements/weapon_class/Switch2
n271 = self.context.mk_ite(n258, n141, n270)
self.nets['A7E_requirements/weapon_class/Switch2'] = n271
# A7E_requirements/weapon_class/Switch3
n272 = self.context.mk_ite(n256, n142, n271)
self.nets['A7E_requirements/weapon_class/Switch3'] = n272
# A7E_requirements/weapon_class/Switch4
n273 = self.context.mk_ite(n254, n143, n272)
self.nets['A7E_requirements/weapon_class/Switch4'] = n273
# A7E_requirements/weapon_class/Switch5
n274 = self.context.mk_ite(n235, n144, n273)
self.nets['A7E_requirements/weapon_class/Switch5'] = n274
# A7E_requirements/weapon_class/Switch6
n275 = self.context.mk_ite(n220, n145, n274)
self.nets['A7E_requirements/weapon_class/Switch6'] = n275
# A7E_requirements/weapon_class/Switch7
n276 = self.context.mk_ite(n215, n146, n275)
self.nets['A7E_requirements/weapon_class/Switch7'] = n276
# A7E_requirements/weapon_class/Switch8
n277 = self.context.mk_ite(n182, n147, n276)
self.nets['A7E_requirements/weapon_class/Switch8'] = n277
# A7E_requirements/weapon_class/Switch9
n278 = self.context.mk_ite(n165, n148, n277)
self.nets['A7E_requirements/weapon_class/Switch9'] = n278
# A7E_requirements/weapon_class/Switch10
n279 = self.context.mk_ite(n160, n149, n278)
self.nets['A7E_requirements/weapon_class/Switch10'] = n279
# A7E_requirements/weapon_class/Switch11
n280 = self.context.mk_ite(n155, n150, n279)
self.nets['A7E_requirements/weapon_class/Switch11'] = n280
# n280 -> weapon_class
return n280
def InitialState(self, n282, n283, n284, n285, n286, n287, n288, n289, n290, n291, n292, n293):
# IMSup -> n282
# IMSMODE=Gndal -> n283
# IMSMODE=Iner -> n284
# IMSMODE=Norm -> n285
# IMSMODE=MagSl -> n286
# IMSMODE=Grid -> n287
# Data23=Sea -> n288
# Landaln -> n289
# OLB -> n290
# MagSl -> n291
# Grid -> n292
# IMSfail -> n293
# A7E_requirements/ANT/InitialState/not2 -> n294
n294 = self.context.mk_not(n282)
self.nets['A7E_requirements/ANT/InitialState/not2'] = n294
# A7E_requirements/ANT/InitialState/and5 -> n295
n295 = self.context.mk_and(n282, n287)
self.nets['A7E_requirements/ANT/InitialState/and5'] = n295
# A7E_requirements/ANT/InitialState/and4 -> n296
n296 = self.context.mk_and(n282, n286)
self.nets['A7E_requirements/ANT/InitialState/and4'] = n296
# A7E_requirements/ANT/InitialState/and3 -> n297
n297 = self.context.mk_and(n283, n288)
self.nets['A7E_requirements/ANT/InitialState/and3'] = n297
# A7E_requirements/ANT/InitialState/or1 -> n298
tn47 = self.context.mk_or(n285, n297)
n298 = self.context.mk_or(n284, tn47)
self.nets['A7E_requirements/ANT/InitialState/or1'] = n298
# A7E_requirements/ANT/InitialState/and2 -> n299
n299 = self.context.mk_and(n282, n298)
self.nets['A7E_requirements/ANT/InitialState/and2'] = n299
# A7E_requirements/ANT/InitialState/not1 -> n300
n300 = self.context.mk_not(n288)
self.nets['A7E_requirements/ANT/InitialState/not1'] = n300
# A7E_requirements/ANT/InitialState/and1 -> n301
tn48 = self.context.mk_and(n283, n300)
n301 = self.context.mk_and(n282, tn48)
self.nets['A7E_requirements/ANT/InitialState/and1'] = n301
# A7E_requirements/ANT/InitialState/Error
n302 = self.context.mk_number('255', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/InitialState/Error'] = n302
# A7E_requirements/ANT/InitialState/Switch
n303 = self.context.mk_ite(n301, n289, n302)
self.nets['A7E_requirements/ANT/InitialState/Switch'] = n303
# A7E_requirements/ANT/InitialState/Switch1
n304 = self.context.mk_ite(n299, n290, n303)
self.nets['A7E_requirements/ANT/InitialState/Switch1'] = n304
# A7E_requirements/ANT/InitialState/Switch2
n305 = self.context.mk_ite(n296, n291, n304)
self.nets['A7E_requirements/ANT/InitialState/Switch2'] = n305
# A7E_requirements/ANT/InitialState/Switch3
n306 = self.context.mk_ite(n295, n292, n305)
self.nets['A7E_requirements/ANT/InitialState/Switch3'] = n306
# A7E_requirements/ANT/InitialState/Switch4
n307 = self.context.mk_ite(n294, n293, n306)
self.nets['A7E_requirements/ANT/InitialState/Switch4'] = n307
# n307 -> InitialState
return n307
def ANT(self, n309, n310, n311, n312, n313, n314, n315, n316, n317, n318, n319, n320, n321, n322, n323, n324, n325, n326, n327, n328, n329, n330, n331, n332, n333, n334, n335):
# presentPositionEntered -> n309
# ACAIRB=Yes -> n310
# IMSAUTOC=On -> n311
# Desig -> n312
# Data23=Sea -> n313
# CAstageComplete -> n314
# CLstageComplete -> n315
# NDstageComplete -> n316
# HSstageComplete -> n317
# PNLTEST=TEST -> n318
# IMSup -> n319
# latitude>70 -> n320
# latitude>80 -> n321
# DopplerUp -> n322
# DopplerCoupled -> n323
# IMSMODE=Gndal -> n324
# IMSMODE=Norm -> n325
# IMSMODE=Iner -> n326
# IMSMODE=MagSl -> n327
# IMSMODE=Grid -> n328
# AirVelocityTestPassed -> n329
# PitchSmall AND RollSmall -> n330
# SINSup -> n331
# SINSvelocityTestPassed -> n332
# LandVelocityTestPassed -> n333
# NoInterveningTakeoff -> n334
# GroundTestFinished -> n335
n336 = self.context.mk_latch('A7E_requirements/ANT/Init', self.context.mk_boolean_type())
n337 = self.context.mk_latch('A7E_requirements/ANT/Past', self.context.mk_boolean_type())
n338 = self.context.mk_latch('A7E_requirements/ANT/Past1', self.context.mk_boolean_type())
n339 = self.context.mk_latch('A7E_requirements/ANT/Past10', self.context.mk_boolean_type())
n340 = self.context.mk_latch('A7E_requirements/ANT/Past11', self.context.mk_boolean_type())
n341 = self.context.mk_latch('A7E_requirements/ANT/Past12', self.context.mk_boolean_type())
n342 = self.context.mk_latch('A7E_requirements/ANT/Past13', self.context.mk_boolean_type())
n343 = self.context.mk_latch('A7E_requirements/ANT/Past14', self.context.mk_boolean_type())
n344 = self.context.mk_latch('A7E_requirements/ANT/Past15', self.context.mk_boolean_type())
n345 = self.context.mk_latch('A7E_requirements/ANT/Past16', self.context.mk_boolean_type())
n346 = self.context.mk_latch('A7E_requirements/ANT/Past17', self.context.mk_boolean_type())
n347 = self.context.mk_latch('A7E_requirements/ANT/Past18', self.context.mk_boolean_type())
n348 = self.context.mk_latch('A7E_requirements/ANT/Past19', self.context.mk_boolean_type())
n349 = self.context.mk_latch('A7E_requirements/ANT/Past7', self.context.mk_boolean_type())
n350 = self.context.mk_latch('A7E_requirements/ANT/Past6', self.context.mk_boolean_type())
n351 = self.context.mk_latch('A7E_requirements/ANT/Past9', self.context.mk_boolean_type())
n352 = self.context.mk_latch('A7E_requirements/ANT/Past8', self.context.mk_boolean_type())
n353 = self.context.mk_latch('A7E_requirements/ANT/Past3', self.context.mk_boolean_type())
n354 = self.context.mk_latch('A7E_requirements/ANT/Past2', self.context.mk_boolean_type())
n355 = self.context.mk_latch('A7E_requirements/ANT/Past5', self.context.mk_boolean_type())
n356 = self.context.mk_latch('A7E_requirements/ANT/Past4', self.context.mk_boolean_type())
n357 = self.context.mk_latch('A7E_requirements/ANT/Past(Mode)', self.context.mk_int8_type())
n358 = self.context.mk_latch('A7E_requirements/ANT/Past20', self.context.mk_boolean_type())
n359 = self.context.mk_latch('A7E_requirements/ANT/Past21', self.context.mk_boolean_type())
n360 = self.context.mk_latch('A7E_requirements/ANT/Past22', self.context.mk_boolean_type())
n361 = self.context.mk_latch('A7E_requirements/ANT/Past23', self.context.mk_boolean_type())
n362 = self.context.mk_latch('A7E_requirements/ANT/Past24', self.context.mk_boolean_type())
n363 = self.context.mk_latch('A7E_requirements/ANT/Past25', self.context.mk_boolean_type())
n364 = self.context.mk_latch('A7E_requirements/ANT/Past26', self.context.mk_boolean_type())
# A7E_requirements/ANT/false
n365 = self.context.mk_false()
self.nets['A7E_requirements/ANT/false'] = n365
# A7E_requirements/ANT/Landaln
n366 = self.context.mk_number('2', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/Landaln'] = n366
# A7E_requirements/ANT/OLB
n367 = self.context.mk_number('11', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/OLB'] = n367
# A7E_requirements/ANT/MagSl
n368 = self.context.mk_number('12', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/MagSl'] = n368
# A7E_requirements/ANT/Grid
n369 = self.context.mk_number('13', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/Grid'] = n369
# A7E_requirements/ANT/IMSfail
n370 = self.context.mk_number('14', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/IMSfail'] = n370
n371_1 = self.InitialState(n319, n324, n326, n325, n327, n328, n313, n366, n367, n368, n369, n370)
# Bus Creator
n372 = [n309, n310, n311, n312, n313, n314, n315, n316, n317, n318, n319, n320, n321, n322, n323, n324, n325, n326, n327, n328, n329, n330, n331, n332, n333, n334, n335]
# Bus Creator1
n373 = [n337, n338, n354, n353, n356, n355, n350, n349, n352, n351, n339, n340, n341, n342, n343, n344, n345, n346, n347, n348, n358, n359, n360, n361, n362, n363, n364]
# A7E_requirements/ANT/Lautocal
n374 = self.context.mk_number('0', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/Lautocal'] = n374
# A7E_requirements/ANT/Sautocal
n375 = self.context.mk_number('1', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/Sautocal'] = n375
# A7E_requirements/ANT/SINSaln
n376 = self.context.mk_number('3', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/SINSaln'] = n376
# A7E_requirements/ANT/01Update
n377 = self.context.mk_number('4', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/01Update'] = n377
# A7E_requirements/ANT/HUDaln
n378 = self.context.mk_number('5', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/HUDaln'] = n378
# A7E_requirements/ANT/Airaln
n379 = self.context.mk_number('6', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/Airaln'] = n379
# A7E_requirements/ANT/DIG
n380 = self.context.mk_number('7', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/DIG'] = n380
# A7E_requirements/ANT/DI
n381 = self.context.mk_number('8', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/DI'] = n381
# A7E_requirements/ANT/I
n382 = self.context.mk_number('9', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/I'] = n382
# A7E_requirements/ANT/UDI
n383 = self.context.mk_number('10', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/UDI'] = n383
# A7E_requirements/ANT/PolarDI
n384 = self.context.mk_number('15', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/PolarDI'] = n384
# A7E_requirements/ANT/PolarI
n385 = self.context.mk_number('16', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/PolarI'] = n385
# A7E_requirements/ANT/Grtest
n386 = self.context.mk_number('17', self.context.mk_int8_type())
self.nets['A7E_requirements/ANT/Grtest'] = n386
# Bus Creator2
n387 = [n374, n375, n366, n376, n377, n378, n379, n380, n381, n382, n383, n367, n368, n369, n370, n384, n385, n386]
# A7E_requirements/ANT/Mode1
n388 = self.context.mk_ite(n336, n371_1, n357)
self.nets['A7E_requirements/ANT/Mode1'] = n388
n389_1 = ip.scr.mk_scr(self.context, from_fixture_path('ANT'), n372, n373, n387, n388)
# A7E_requirements/ANT/Mode
n390 = self.context.mk_ite(n336, n371_1, n389_1)
self.nets['A7E_requirements/ANT/Mode'] = n390
in336 = self.context.mk_true()
self.context.set_latch_init_next(n336, in336, n365)
in337 = self.context.mk_true()
self.context.set_latch_init_next(n337, in337, n309)
in338 = self.context.mk_true()
self.context.set_latch_init_next(n338, in338, n310)
in339 = self.context.mk_true()
self.context.set_latch_init_next(n339, in339, n319)
in340 = self.context.mk_true()
self.context.set_latch_init_next(n340, in340, n320)
in341 = self.context.mk_true()
self.context.set_latch_init_next(n341, in341, n321)
in342 = self.context.mk_true()
self.context.set_latch_init_next(n342, in342, n322)
in343 = self.context.mk_true()
self.context.set_latch_init_next(n343, in343, n323)
in344 = self.context.mk_true()
self.context.set_latch_init_next(n344, in344, n324)
in345 = self.context.mk_true()
self.context.set_latch_init_next(n345, in345, n325)
in346 = self.context.mk_true()
self.context.set_latch_init_next(n346, in346, n326)
in347 = self.context.mk_true()
self.context.set_latch_init_next(n347, in347, n327)
in348 = self.context.mk_true()
self.context.set_latch_init_next(n348, in348, n328)
in349 = self.context.mk_true()
self.context.set_latch_init_next(n349, in349, n316)
in350 = self.context.mk_true()
self.context.set_latch_init_next(n350, in350, n315)
in351 = self.context.mk_true()
self.context.set_latch_init_next(n351, in351, n318)
in352 = self.context.mk_true()
self.context.set_latch_init_next(n352, in352, n317)
in353 = self.context.mk_true()
self.context.set_latch_init_next(n353, in353, n312)
in354 = self.context.mk_true()
self.context.set_latch_init_next(n354, in354, n311)
in355 = self.context.mk_true()
self.context.set_latch_init_next(n355, in355, n314)
in356 = self.context.mk_true()
self.context.set_latch_init_next(n356, in356, n313)
in357 = self.context.mk_number('1', self.context.mk_int8_type())
self.context.set_latch_init_next(n357, in357, n390)
in358 = self.context.mk_true()
self.context.set_latch_init_next(n358, in358, n329)
in359 = self.context.mk_true()
self.context.set_latch_init_next(n359, in359, n330)
in360 = self.context.mk_true()
self.context.set_latch_init_next(n360, in360, n331)
in361 = self.context.mk_true()
self.context.set_latch_init_next(n361, in361, n332)
in362 = self.context.mk_true()
self.context.set_latch_init_next(n362, in362, n333)
in363 = self.context.mk_true()
self.context.set_latch_init_next(n363, in363, n334)
in364 = self.context.mk_true()
self.context.set_latch_init_next(n364, in364, n335)
# n390 -> ANT
return n390
def NU(self, n392, n393, n394, n395, n396, n397, n398, n399, n400, n401, n402, n403, n404, n405, n406, n407, n408, n409):
# StationSelected -> n392
# ACAIRB=Yes -> n393
# MODEROT=PRESPOS -> n394
# PRESPOS=UPDATE -> n395
# UPDATTW=FLYOVER -> n396
# UPDATTW=HUD -> n397
# UPDATTW=RADAR -> n398
# UPDATTW=TACLL -> n399
# UPDATTW=Other -> n400
# GUNNSEL=Yes -> n401
# WDMFS -> n402
# Data23=Sea -> n403
# WeaponMode=BOC -> n404
# MSFW=None -> n405
# MSFW=TF -> n406
# NonZeroDigitEntered -> n407
# ENTERSW=On -> n408
# FLYTOchanged -> n409
n410 = self.context.mk_latch('A7E_requirements/NU/Init', self.context.mk_boolean_type())
n411 = self.context.mk_latch('A7E_requirements/NU/Past', self.context.mk_boolean_type())
n412 = self.context.mk_latch('A7E_requirements/NU/Past1', self.context.mk_boolean_type())
n413 = self.context.mk_latch('A7E_requirements/NU/Past10', self.context.mk_boolean_type())
n414 = self.context.mk_latch('A7E_requirements/NU/Past11', self.context.mk_boolean_type())
n415 = self.context.mk_latch('A7E_requirements/NU/Past12', self.context.mk_boolean_type())
n416 = self.context.mk_latch('A7E_requirements/NU/Past13', self.context.mk_boolean_type())
n417 = self.context.mk_latch('A7E_requirements/NU/Past14', self.context.mk_boolean_type())
n418 = self.context.mk_latch('A7E_requirements/NU/Past15', self.context.mk_boolean_type())
n419 = self.context.mk_latch('A7E_requirements/NU/Past16', self.context.mk_boolean_type())
n420 = self.context.mk_latch('A7E_requirements/NU/Past17', self.context.mk_boolean_type())
n421 = self.context.mk_latch('A7E_requirements/NU/Past7', self.context.mk_boolean_type())
n422 = self.context.mk_latch('A7E_requirements/NU/Past6', self.context.mk_boolean_type())
n423 = self.context.mk_latch('A7E_requirements/NU/Past9', self.context.mk_boolean_type())
n424 = self.context.mk_latch('A7E_requirements/NU/Past8', self.context.mk_boolean_type())
n425 = self.context.mk_latch('A7E_requirements/NU/Past3', self.context.mk_boolean_type())
n426 = self.context.mk_latch('A7E_requirements/NU/Past2', self.context.mk_boolean_type())
n427 = self.context.mk_latch('A7E_requirements/NU/Past5', self.context.mk_boolean_type())
n428 = self.context.mk_latch('A7E_requirements/NU/Past4', self.context.mk_boolean_type())
n429 = self.context.mk_latch('A7E_requirements/NU/Past(Mode)', self.context.mk_int8_type())
# A7E_requirements/NU/false
n430 = self.context.mk_false()
self.nets['A7E_requirements/NU/false'] = n430
# A7E_requirements/NU/UNone
n431 = self.context.mk_number('18', self.context.mk_int8_type())
self.nets['A7E_requirements/NU/UNone'] = n431
# Bus Creator
n432 = [n392, n393, n394, n395, n396, n397, n398, n399, n400, n401, n402, n403, n404, n405, n406, n407, n408, n409]
# Bus Creator1
n433 = [n411, n412, n426, n425, n428, n427, n422, n421, n424, n423, n413, n414, n415, n416, n417, n418, n419, n420]
# A7E_requirements/NU/HUDUpd
n434 = self.context.mk_number('19', self.context.mk_int8_type())
self.nets['A7E_requirements/NU/HUDUpd'] = n434
# A7E_requirements/NU/RadarUpd
n435 = self.context.mk_number('20', self.context.mk_int8_type())
self.nets['A7E_requirements/NU/RadarUpd'] = n435
# A7E_requirements/NU/FlyUpd
n436 = self.context.mk_number('21', self.context.mk_int8_type())
self.nets['A7E_requirements/NU/FlyUpd'] = n436
# A7E_requirements/NU/AflyUpd
n437 = self.context.mk_number('22', self.context.mk_int8_type())
self.nets['A7E_requirements/NU/AflyUpd'] = n437
# A7E_requirements/NU/MapUpd
n438 = self.context.mk_number('23', self.context.mk_int8_type())
self.nets['A7E_requirements/NU/MapUpd'] = n438
# A7E_requirements/NU/TacUpd
n439 = self.context.mk_number('24', self.context.mk_int8_type())
self.nets['A7E_requirements/NU/TacUpd'] = n439
# Bus Creator2
n440 = [n431, n434, n435, n436, n437, n438, n439]
# A7E_requirements/NU/Mode1
n441 = self.context.mk_ite(n410, n431, n429)
self.nets['A7E_requirements/NU/Mode1'] = n441
n442_1 = ip.scr.mk_scr(self.context, from_fixture_path('NU'), n432, n433, n440, n441)
# A7E_requirements/NU/Mode
n443 = self.context.mk_ite(n410, n431, n442_1)
self.nets['A7E_requirements/NU/Mode'] = n443
in410 = self.context.mk_true()
self.context.set_latch_init_next(n410, in410, n430)
in411 = self.context.mk_true()
self.context.set_latch_init_next(n411, in411, n392)
in412 = self.context.mk_true()
self.context.set_latch_init_next(n412, in412, n393)
in413 = self.context.mk_true()
self.context.set_latch_init_next(n413, in413, n402)
in414 = self.context.mk_true()
self.context.set_latch_init_next(n414, in414, n403)
in415 = self.context.mk_true()
self.context.set_latch_init_next(n415, in415, n404)
in416 = self.context.mk_true()
self.context.set_latch_init_next(n416, in416, n405)
in417 = self.context.mk_true()
self.context.set_latch_init_next(n417, in417, n406)
in418 = self.context.mk_true()
self.context.set_latch_init_next(n418, in418, n407)
in419 = self.context.mk_true()
self.context.set_latch_init_next(n419, in419, n408)
in420 = self.context.mk_true()
self.context.set_latch_init_next(n420, in420, n409)
in421 = self.context.mk_true()
self.context.set_latch_init_next(n421, in421, n399)
in422 = self.context.mk_true()
self.context.set_latch_init_next(n422, in422, n398)
in423 = self.context.mk_true()
self.context.set_latch_init_next(n423, in423, n401)
in424 = self.context.mk_true()
self.context.set_latch_init_next(n424, in424, n400)
in425 = self.context.mk_true()
self.context.set_latch_init_next(n425, in425, n395)
in426 = self.context.mk_true()
self.context.set_latch_init_next(n426, in426, n394)
in427 = self.context.mk_true()
self.context.set_latch_init_next(n427, in427, n397)
in428 = self.context.mk_true()
self.context.set_latch_init_next(n428, in428, n396)
in429 = self.context.mk_number('1', self.context.mk_int8_type())
self.context.set_latch_init_next(n429, in429, n443)
# n443 -> NU
return n443
def WD(self, n445, n446, n447, n448, n449, n450, n451, n452, n453, n454, n455, n456, n457, n458, n459, n460, n461, n462, n463, n464, n465, n466, n467, n468, n469, n470, n471):
# ReadyStation -> n445
# HUDREL=Yes -> n446
# ReservedWeapon -> n447
# Special -> n448
# Rockets -> n449
# Guns -> n450
# OnWalleye -> n451
# Shrike -> n452
# OtherWeapon -> n453
# GUNSSEL=No -> n454
# FLYTOTW=0 -> n455
# FLYTOTW=reset -> n456
# FLYTOTOG=Dest -> n457
# WDMFS -> n458
# MFSW=BOC -> n459
# MFSW=BOCOFF -> n460
# MFSW=CCIP -> n461
# MFSW=NATT -> n462
# MFSW=NATOFF -> n463
# Desig -> n464
# Redesignate -> n465
# AnyDestEntered -> n466
# HighDrag -> n467
# LowDrag -> n468
# OverflownExit -> n469
# Overflown>42 -> n470
# ACAIRB=Yes -> n471
n472 = self.context.mk_latch('A7E_requirements/WD/Init', self.context.mk_boolean_type())
n473 = self.context.mk_latch('A7E_requirements/WD/Past', self.context.mk_boolean_type())
n474 = self.context.mk_latch('A7E_requirements/WD/Past1', self.context.mk_boolean_type())
n475 = self.context.mk_latch('A7E_requirements/WD/Past10', self.context.mk_boolean_type())
n476 = self.context.mk_latch('A7E_requirements/WD/Past11', self.context.mk_boolean_type())
n477 = self.context.mk_latch('A7E_requirements/WD/Past12', self.context.mk_boolean_type())
n478 = self.context.mk_latch('A7E_requirements/WD/Past13', self.context.mk_boolean_type())
n479 = self.context.mk_latch('A7E_requirements/WD/Past14', self.context.mk_boolean_type())
n480 = self.context.mk_latch('A7E_requirements/WD/Past15', self.context.mk_boolean_type())
n481 = self.context.mk_latch('A7E_requirements/WD/Past16', self.context.mk_boolean_type())
n482 = self.context.mk_latch('A7E_requirements/WD/Past17', self.context.mk_boolean_type())
n483 = self.context.mk_latch('A7E_requirements/WD/Past18', self.context.mk_boolean_type())
n484 = self.context.mk_latch('A7E_requirements/WD/Past19', self.context.mk_boolean_type())
n485 = self.context.mk_latch('A7E_requirements/WD/Past7', self.context.mk_boolean_type())
n486 = self.context.mk_latch('A7E_requirements/WD/Past6', self.context.mk_boolean_type())
n487 = self.context.mk_latch('A7E_requirements/WD/Past9', self.context.mk_boolean_type())
n488 = self.context.mk_latch('A7E_requirements/WD/Past8', self.context.mk_boolean_type())
n489 = self.context.mk_latch('A7E_requirements/WD/Past3', self.context.mk_boolean_type())
n490 = self.context.mk_latch('A7E_requirements/WD/Past2', self.context.mk_boolean_type())
n491 = self.context.mk_latch('A7E_requirements/WD/Past5', self.context.mk_boolean_type())
n492 = self.context.mk_latch('A7E_requirements/WD/Past4', self.context.mk_boolean_type())
n493 = self.context.mk_latch('A7E_requirements/WD/Past(Mode)', self.context.mk_int8_type())
n494 = self.context.mk_latch('A7E_requirements/WD/Past20', self.context.mk_boolean_type())
n495 = self.context.mk_latch('A7E_requirements/WD/Past21', self.context.mk_boolean_type())
n496 = self.context.mk_latch('A7E_requirements/WD/Past22', self.context.mk_boolean_type())
n497 = self.context.mk_latch('A7E_requirements/WD/Past23', self.context.mk_boolean_type())
n498 = self.context.mk_latch('A7E_requirements/WD/Past24', self.context.mk_boolean_type())
n499 = self.context.mk_latch('A7E_requirements/WD/Past25', self.context.mk_boolean_type())
n500 = self.context.mk_latch('A7E_requirements/WD/Past26', self.context.mk_boolean_type())
n501 = self.context.mk_latch('A7E_requirements/WD/Past27', self.context.mk_boolean_type())
n502 = self.context.mk_latch('A7E_requirements/WD/Past28', self.context.mk_boolean_type())
# A7E_requirements/WD/false
n503 = self.context.mk_false()
self.nets['A7E_requirements/WD/false'] = n503
# A7E_requirements/WD/WNone
n504 = self.context.mk_number('25', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/WNone'] = n504
# A7E_requirements/WD/Mode1
n505 = self.context.mk_ite(n472, n504, n493)
self.nets['A7E_requirements/WD/Mode1'] = n505
# A7E_requirements/WD/OFF_MFS
n506 = self.context.mk_number('26', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/OFF_MFS'] = n506
# A7E_requirements/WD/Past(In OFF_MFSW) -> n507
n507 = self.context.mk_eq(n505, n506)
self.nets['A7E_requirements/WD/Past(In OFF_MFSW)'] = n507
# A7E_requirements/WD/WD_MFS
n508 = self.context.mk_number('27', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/WD_MFS'] = n508
# A7E_requirements/WD/Past(In WD_MFSW) -> n509
n509 = self.context.mk_eq(n505, n508)
self.nets['A7E_requirements/WD/Past(In WD_MFSW)'] = n509
# Bus Creator
n510 = [n445, n446, n447, n448, n449, n450, n451, n452, n453, n454, n455, n456, n457, n458, n459, n460, n461, n462, n463, n464, n465, n466, n467, n468, n469, n470, n471, n507, n509]
# Bus Creator1
n511 = [n473, n474, n490, n489, n492, n491, n486, n485, n488, n487, n475, n476, n477, n478, n479, n480, n481, n482, n483, n484, n494, n495, n496, n497, n498, n499, n500, n501, n502]
# A7E_requirements/WD/Nattack
n512 = self.context.mk_number('28', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/Nattack'] = n512
# A7E_requirements/WD/Noffset
n513 = self.context.mk_number('29', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/Noffset'] = n513
# A7E_requirements/WD/BOC
n514 = self.context.mk_number('30', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/BOC'] = n514
# A7E_requirements/WD/BOCFlyto0
n515 = self.context.mk_number('31', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/BOCFlyto0'] = n515
# A7E_requirements/WD/BOCoffset
n516 = self.context.mk_number('32', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/BOCoffset'] = n516
# A7E_requirements/WD/CCIP
n517 = self.context.mk_number('33', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/CCIP'] = n517
# A7E_requirements/WD/HUDdown1
n518 = self.context.mk_number('34', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/HUDdown1'] = n518
# A7E_requirements/WD/HUDdown2
n519 = self.context.mk_number('35', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/HUDdown2'] = n519
# A7E_requirements/WD/AG_Guns
n520 = self.context.mk_number('36', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/AG_Guns'] = n520
# A7E_requirements/WD/AA_Guns
n521 = self.context.mk_number('37', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/AA_Guns'] = n521
# A7E_requirements/WD/Manrip
n522 = self.context.mk_number('38', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/Manrip'] = n522
# A7E_requirements/WD/AA_Manrip
n523 = self.context.mk_number('39', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/AA_Manrip'] = n523
# A7E_requirements/WD/Snattack
n524 = self.context.mk_number('40', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/Snattack'] = n524
# A7E_requirements/WD/Snoffset
n525 = self.context.mk_number('41', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/Snoffset'] = n525
# A7E_requirements/WD/SBOC
n526 = self.context.mk_number('42', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/SBOC'] = n526
# A7E_requirements/WD/SBOCFlyto0
n527 = self.context.mk_number('43', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/SBOCFlyto0'] = n527
# A7E_requirements/WD/SBOCoffset
n528 = self.context.mk_number('44', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/SBOCoffset'] = n528
# A7E_requirements/WD/Walleye
n529 = self.context.mk_number('45', self.context.mk_int8_type())
self.nets['A7E_requirements/WD/Walleye'] = n529
# Bus Creator2
n530 = [n504, n506, n508, n512, n513, n514, n515, n516, n517, n518, n519, n520, n521, n522, n523, n524, n525, n526, n527, n528, n529]
n531_1 = ip.scr.mk_scr(self.context, from_fixture_path('WD'), n510, n511, n530, n505)
# A7E_requirements/WD/Mode
n532 = self.context.mk_ite(n472, n504, n531_1)
self.nets['A7E_requirements/WD/Mode'] = n532
in472 = self.context.mk_true()
self.context.set_latch_init_next(n472, in472, n503)
in473 = self.context.mk_true()
self.context.set_latch_init_next(n473, in473, n445)
in474 = self.context.mk_true()
self.context.set_latch_init_next(n474, in474, n446)
in475 = self.context.mk_true()
self.context.set_latch_init_next(n475, in475, n455)
in476 = self.context.mk_true()
self.context.set_latch_init_next(n476, in476, n456)
in477 = self.context.mk_true()
self.context.set_latch_init_next(n477, in477, n457)
in478 = self.context.mk_true()
self.context.set_latch_init_next(n478, in478, n458)
in479 = self.context.mk_true()
self.context.set_latch_init_next(n479, in479, n459)
in480 = self.context.mk_true()
self.context.set_latch_init_next(n480, in480, n460)
in481 = self.context.mk_true()
self.context.set_latch_init_next(n481, in481, n461)
in482 = self.context.mk_true()
self.context.set_latch_init_next(n482, in482, n462)
in483 = self.context.mk_true()
self.context.set_latch_init_next(n483, in483, n463)
in484 = self.context.mk_true()
self.context.set_latch_init_next(n484, in484, n464)
in485 = self.context.mk_true()
self.context.set_latch_init_next(n485, in485, n452)
in486 = self.context.mk_true()
self.context.set_latch_init_next(n486, in486, n451)
in487 = self.context.mk_true()
self.context.set_latch_init_next(n487, in487, n454)
in488 = self.context.mk_true()
self.context.set_latch_init_next(n488, in488, n453)
in489 = self.context.mk_true()
self.context.set_latch_init_next(n489, in489, n448)
in490 = self.context.mk_true()
self.context.set_latch_init_next(n490, in490, n447)
in491 = self.context.mk_true()
self.context.set_latch_init_next(n491, in491, n450)
in492 = self.context.mk_true()
self.context.set_latch_init_next(n492, in492, n449)
in493 = self.context.mk_number('1', self.context.mk_int8_type())
self.context.set_latch_init_next(n493, in493, n532)
in494 = self.context.mk_true()
self.context.set_latch_init_next(n494, in494, n465)
in495 = self.context.mk_true()
self.context.set_latch_init_next(n495, in495, n466)
in496 = self.context.mk_true()
self.context.set_latch_init_next(n496, in496, n467)
in497 = self.context.mk_true()
self.context.set_latch_init_next(n497, in497, n468)
in498 = self.context.mk_true()
self.context.set_latch_init_next(n498, in498, n469)
in499 = self.context.mk_true()
self.context.set_latch_init_next(n499, in499, n470)
in500 = self.context.mk_true()
self.context.set_latch_init_next(n500, in500, n471)
in501 = self.context.mk_true()
self.context.set_latch_init_next(n501, in501, n507)
in502 = self.context.mk_true()
self.context.set_latch_init_next(n502, in502, n509)
# n532 -> WD
return n532
| 53.117647 | 189 | 0.65542 | 10,617 | 81,270 | 4.789865 | 0.107092 | 0.19143 | 0.205018 | 0.129803 | 0.666516 | 0.585716 | 0.367306 | 0.259567 | 0.249872 | 0.249872 | 0 | 0.127423 | 0.217436 | 81,270 | 1,529 | 190 | 53.152387 | 0.67218 | 0.174529 | 0 | 0 | 0 | 0 | 0.177711 | 0.151478 | 0 | 0 | 0 | 0 | 0 | 1 | 0.007744 | false | 0.005808 | 0.00484 | 0 | 0.019361 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
243cee6a2d485166a72cb4091ffd65db1d0e6244 | 6,379 | py | Python | app/urls.py | silop4all/iam | 691316ca9ea7d3d10a3197b4d028e44e333c7060 | [
"Apache-2.0"
] | 2 | 2018-02-27T20:51:08.000Z | 2019-11-25T10:11:59.000Z | app/urls.py | silop4all/iam | 691316ca9ea7d3d10a3197b4d028e44e333c7060 | [
"Apache-2.0"
] | null | null | null | app/urls.py | silop4all/iam | 691316ca9ea7d3d10a3197b4d028e44e333c7060 | [
"Apache-2.0"
] | null | null | null | from django.conf.urls import patterns, url, include
from app.views import *
# Endpoints for views
endpoints = patterns(
'',
url(r'^signup-request/$', EmailForm.as_view(), name='signup_request_form'),
url(r'^signup/$', RegistrationView.as_view(), name='signup_form'),
url(r'^login/$', LoginView.as_view(), name='login_form'),
url(r'^index/$', DashboardView.as_view(), name='dashboard_form'),
url(r'^profile/$', ProfileView.as_view(), name='profile_preview'),
url(r'^profile/change-password/$', ProfileChangePwdView.as_view(), name='profile_change_pwd'),
url(r'^profile/edit/$', ProfileUpdateView.as_view(), name='profile_update'),
url(r'^applications/$', ApplicationListView.as_view(), name='applications_form'),
url(r'^applications/register/$', ApplicationCreateView.as_view(), name='application_form'),
url(r'^applications/(?P<clientId>[0-9a-zA-Z-]+)/$', ApplicationPreView.as_view(), name='application_info_form'),
url(r'^applications/(?P<clientId>[0-9a-zA-Z-]+)/edit/$', ApplicationUpdateView.as_view(), name='application_edit_form'),
url(r'^applications/(?P<clientId>[0-9a-zA-Z-]+)/roles/$', ApplicationRolesView.as_view(), name='application_role_form'),
url(r'^applications/(?P<clientId>[0-9a-zA-Z-]+)/members/$', ApplicationAuthMembersView.as_view(), name='application_auth_members_form'),
url(r'^authorized-applications/overview/$', ApplicationsListView.as_view(), name='list_applications_form'),
url(r'^authorized-applications/(?P<clientId>[0-9a-zA-Z-]+)/roles/$',AuthorizedApplicationRolesView.as_view(), name='authorized_application_role_form'),
url(r'^authorized-applications/access/$', AuthorizedApplicationsView.as_view(), name='authorized_applications_form'),
url(r'^oauth2/authorize/$', AuthorizeApplicationView.as_view(), name='authorize_app_form'),
)
# Private API
private_api = patterns(
'',
url(r'^iam/openam/authenticate$', MemberAuthentication.as_view({"post": "create"}), name='authenticate_url'),
url(r'^iam/openam/prepare/register$', PrepareRegistration.as_view({'post': 'create'}), name='pre_registration_url'),
url(r'^iam/openam/users$', MembersListAPIView.as_view({'post': 'create', "get": "retrieve"}), name='users_url'),
url(r'^iam/openam/users/(?P<username>[a-zA-Z0-9]+)$', Members.as_view({"put": 'update'}), name='user_url'),
url(r'^iam/openam/users/(?P<username>[a-zA-Z0-9]+)/logo$', UploadUserLogo.as_view(), name="user_logo"),
url(r'^iam/openam/users/(?P<username>[a-zA-Z0-9]+)/password$', MemberChangePasswordAPIView.as_view({"put": 'update'}), name='change_password_url'),
url(r'^iam/openam/requests$', Requests.as_view({"get": "retrieve"}), name='requests_url'),
url(r'^iam/openam/logout$', Logout.as_view({"get": "retrieve"}), name='logout_url'),
url(r'^iam/openam/authorize$', ClientAuthorization.as_view({"get": "retrieve"}), name='authorize_url'),
url(r'^iam/openam/roles$', GenericRolesListAPIView.as_view({"post": "create", "get": "retrieve"}), name='add_role_url'),
url(r'^iam/openam/applications$', ClientsList.as_view({"post": "create", "get": "retrieve"}), name='application_url'),
url(r'^iam/openam/applications/(?P<clientId>[0-9a-zA-Z-]+)$', ApplicationAPIView.as_view({"put":"update", "delete": "destroy"}), name='update_application_url'),
url(r'^iam/openam/applications/(?P<clientId>[0-9a-zA-Z-]+)/roles$', ApplicationRolesAPIView.as_view({"put":"update"}), name='set_application_role_url'),
url(r'^iam/openam/authorized-applications/(?P<clientId>[0-9a-zA-Z-]+)/users/(?P<username>[a-zA-Z0-9]+)$', ApplicationMemberAPIView.as_view({"post":"create", "delete": "destroy"}), name='set_application_member_url'),
url(r'^iam/openam/authorized-applications/(?P<clientId>[0-9a-zA-Z-]+)/users/(?P<username>[a-zA-Z0-9]+)/roles$',ApplicationMemberHasRoleAPIView.as_view({"put":"update"}), name='set_member_application_roles_url'),
url(r'^iam/openam/authorized-applications/tokens$', ActiveAccessTokenListAPIView.as_view({"get": "retrieve"}), name='access_tokens_url'),
url(r'^iam/openam/authorized-applications/tokens/(?P<id>[0-9a-zA-Z-]+)$',ActiveAccessTokenAPIView.as_view({"delete": "destroy"}), name='access_token_url'),
url(r'^iam/oauth2/authorize$', DirectClientAuthorization.as_view({"post": "create"}), name='authorize_direct_app_url'),
# endpoints
url(r'^roles$', RolesList.as_view(), name='roles_url'),
url(r'^users/(?P<username>[a-zA-Z0-9]+)/applications$', MyApplicationsList.as_view(), name='my_applications_url'),
) | 118.12963 | 234 | 0.50196 | 589 | 6,379 | 5.26146 | 0.195246 | 0.047757 | 0.064537 | 0.071313 | 0.391417 | 0.27525 | 0.242659 | 0.206196 | 0.16102 | 0.16102 | 0 | 0.008074 | 0.339865 | 6,379 | 54 | 235 | 118.12963 | 0.727856 | 0.006427 | 0 | 0.044444 | 0 | 0.133333 | 0.347182 | 0.227794 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.044444 | 0.044444 | 0 | 0.044444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
243d81dcf344a2faba2df31a69da9d81b705581d | 394 | py | Python | workouts/migrations/0009_auto_20180417_0936.py | patcurry/patcurryworks.com | 1b8aac688ca0925ca843683bba97e8ec59dea18a | [
"MIT"
] | null | null | null | workouts/migrations/0009_auto_20180417_0936.py | patcurry/patcurryworks.com | 1b8aac688ca0925ca843683bba97e8ec59dea18a | [
"MIT"
] | null | null | null | workouts/migrations/0009_auto_20180417_0936.py | patcurry/patcurryworks.com | 1b8aac688ca0925ca843683bba97e8ec59dea18a | [
"MIT"
] | null | null | null | # Generated by Django 2.0.4 on 2018-04-17 07:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workouts', '0008_auto_20180417_0934'),
]
operations = [
migrations.AlterField(
model_name='exercise',
name='exercise_slug',
field=models.SlugField(unique=True),
),
]
| 20.736842 | 48 | 0.606599 | 42 | 394 | 5.571429 | 0.833333 | 0.102564 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.109541 | 0.281726 | 394 | 18 | 49 | 21.888889 | 0.717314 | 0.114213 | 0 | 0 | 1 | 0 | 0.149856 | 0.066282 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
243e1ab1c1e6e1aac758f59036bc58edcc0b399c | 5,027 | py | Python | scienz/covid/the_racial_covid_data_tracker.py | Vibrant-Planet/aorist | 067e119ef4d0d40802ce74a8e47d882e557ce195 | [
"MIT"
] | 16 | 2021-08-14T10:20:16.000Z | 2022-03-31T04:19:26.000Z | hub/covid/the_racial_covid_data_tracker.py | scie-nz/aorist | ac1e31251af7d851c4491a310b417de880b79d09 | [
"MIT"
] | 5 | 2021-08-15T23:19:10.000Z | 2021-09-26T20:50:41.000Z | scienz/covid/the_racial_covid_data_tracker.py | Vibrant-Planet/aorist | 067e119ef4d0d40802ce74a8e47d882e557ce195 | [
"MIT"
] | 1 | 2022-01-06T01:26:24.000Z | 2022-01-06T01:26:24.000Z | from aorist import (
Attribute,
NaturalNumber,
StringIdentifier,
DateString,
POSIXTimestamp,
PositiveFloat,
default_tabular_schema,
RowStruct,
StaticDataTable,
DataSchema,
StorageSetup,
RemoteStorageSetup,
Storage,
RemoteStorage,
RemoteLocation,
CSVEncoding,
Encoding,
DataSet,
DatumTemplate,
Asset,
WebLocation,
FileBasedStorageLayout,
CSVHeader,
FileHeader,
APIOrFileLayout,
SingleFileLayout,
FreeText,
Empty,
FIPSStateCode,
IntegerNumber,
)
attributes = [
Attribute(DateString("Date")),
Attribute(StringIdentifier("State")),
Attribute(NaturalNumber("Cases_Total")),
Attribute(NaturalNumber("Cases_White")),
Attribute(NaturalNumber("Cases_Black")),
Attribute(NaturalNumber("Cases_Latinx")),
Attribute(NaturalNumber("Cases_Asian")),
Attribute(NaturalNumber("Cases_AIAN")),
Attribute(NaturalNumber("Cases_NHPI")),
Attribute(NaturalNumber("Cases_Multiracial")),
Attribute(NaturalNumber("Cases_Other")),
Attribute(NaturalNumber("Cases_Unknown")),
Attribute(NaturalNumber("Cases_Ethnicity_Hispanic")),
Attribute(NaturalNumber("Cases_Ethnicity_NonHispanic")),
Attribute(NaturalNumber("Cases_Ethnicity_Unknown")),
Attribute(NaturalNumber("Deaths_Total")),
Attribute(NaturalNumber("Deaths_White")),
Attribute(NaturalNumber("Deaths_Black")),
Attribute(NaturalNumber("Deaths_Latinx")),
Attribute(NaturalNumber("Deaths_Asian")),
Attribute(NaturalNumber("Deaths_AIAN")),
Attribute(NaturalNumber("Deaths_NHPI")),
Attribute(NaturalNumber("Deaths_Multiracial")),
Attribute(NaturalNumber("Deaths_Other")),
Attribute(NaturalNumber("Deaths_Unknown")),
Attribute(NaturalNumber("Deaths_Ethnicity_Hispanic")),
Attribute(NaturalNumber("Deaths_Ethnicity_NonHispanic")),
Attribute(NaturalNumber("Deaths_Ethnicity_Unknown")),
Attribute(NaturalNumber("Hosp_Total")),
Attribute(NaturalNumber("Hosp_White")),
Attribute(NaturalNumber("Hosp_Black")),
Attribute(NaturalNumber("Hosp_Latinx")),
Attribute(NaturalNumber("Hosp_Asian")),
Attribute(NaturalNumber("Hosp_AIAN")),
Attribute(NaturalNumber("Hosp_NHPI")),
Attribute(NaturalNumber("Hosp_Multiracial")),
Attribute(NaturalNumber("Hosp_Other")),
Attribute(NaturalNumber("Hosp_Unknown")),
Attribute(NaturalNumber("Hosp_Ethnicity_Hispanic")),
Attribute(NaturalNumber("Hosp_Ethnicity_NonHispanic")),
Attribute(NaturalNumber("Hosp_Ethnicity_Unknown")),
Attribute(NaturalNumber("Tests_Total")),
Attribute(NaturalNumber("Tests_White")),
Attribute(NaturalNumber("Tests_Black")),
Attribute(NaturalNumber("Tests_Latinx")),
Attribute(NaturalNumber("Tests_Asian")),
Attribute(NaturalNumber("Tests_AIAN")),
Attribute(NaturalNumber("Tests_NHPI")),
Attribute(NaturalNumber("Tests_Multiracial")),
Attribute(NaturalNumber("Tests_Other")),
Attribute(NaturalNumber("Tests_Unknown")),
Attribute(NaturalNumber("Tests_Ethnicity_Hispanic")),
Attribute(NaturalNumber("Tests_Ethnicity_NonHispanic")),
Attribute(NaturalNumber("Tests_Ethnicity_Unknown")),
]
trcdt_datum = RowStruct(
name="the_racial_covid_data_tracker_datum",
attributes=attributes,
)
trcdt_schema = default_tabular_schema(
DatumTemplate(trcdt_datum), attributes
)
table = Asset(StaticDataTable(
name="the_racial_covid_data_tracker_table",
schema=DataSchema(trcdt_schema),
setup=StorageSetup(RemoteStorageSetup(
remote=Storage(RemoteStorage(
location=RemoteLocation(
WebLocation(
address=("https://docs.google.com/spreadsheets/d/e/2PACX-1vS8SzaERcKJOD"
"_EzrtCDK1dX1zkoMochlA9iHoHg_RSw3V8bkpfk1mpw4pfL5RdtSOyx_oScsUt"
"yXyk/pub?gid=43720681&single=true&output=csv"),
)
),
layout=APIOrFileLayout(
FileBasedStorageLayout(
SingleFileLayout()
),
),
encoding=Encoding(CSVEncoding(header=FileHeader(
CSVHeader(num_lines=1)
))),
)),
)),
tag="the_racial_covid_data_tracker",
))
trcdt_dataset = DataSet(
name="The-covid-racial-data-tracker",
description="""
The COVID Racial Data Tracker is a collaboration between the COVID
Tracking Project and the Boston University Center for Antiracist
Research. Together, they’re gathering the most complete and up-to
-date race and ethnicity data on COVID-19 in the United States.
""",
source_path=__file__,
datum_templates=[DatumTemplate(trcdt_datum)],
assets={
"The COVID Racial Data Tracker data": table,
},
access_policies=[]
)
| 35.907143 | 101 | 0.67217 | 417 | 5,027 | 7.865707 | 0.309353 | 0.355488 | 0.107012 | 0.047561 | 0.048171 | 0.017683 | 0 | 0 | 0 | 0 | 0 | 0.005591 | 0.217227 | 5,027 | 139 | 102 | 36.165468 | 0.827954 | 0 | 0 | 0.037313 | 0 | 0 | 0.279292 | 0.105431 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.007463 | 0 | 0.007463 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
244274aed34752d9995f8d45a9df657835220bb7 | 10,287 | py | Python | ensemble/management/commands/seeddata.py | evanlouie/activelearning | 7ee6e9d2d795f85a441ad70e70ac0d8de9c25e31 | [
"MIT"
] | null | null | null | ensemble/management/commands/seeddata.py | evanlouie/activelearning | 7ee6e9d2d795f85a441ad70e70ac0d8de9c25e31 | [
"MIT"
] | null | null | null | ensemble/management/commands/seeddata.py | evanlouie/activelearning | 7ee6e9d2d795f85a441ad70e70ac0d8de9c25e31 | [
"MIT"
] | 1 | 2019-01-03T18:03:18.000Z | 2019-01-03T18:03:18.000Z | """
Test data scaffolding.
Read: https://docs.djangoproject.com/en/dev/howto/custom-management-commands/
"""
import json
from random import randint
from django.core.management.base import BaseCommand, CommandError
from ensemble.models import (
Classification,
Model,
ModelVersion,
MediaFile,
VideoPrediction,
AudioPrediction,
)
class Command(BaseCommand):
help = "Scaffold some test data for ensemble"
def handle(self, *args, **options):
gunshot = Classification(name="GUNSHOT")
gunshot.save()
audioset_model = Model(name="audioset")
audioset_model.save()
audioset_model.classifications.add(gunshot)
audioset_model_trained = ModelVersion(
model=audioset_model, version="0.01alpha2"
)
audioset_model_trained.save()
for movie in json.loads(self.__movie_json, strict=False):
media_file = MediaFile(
name=movie["title"],
url=movie["sources"][0],
description=movie["description"],
)
media_file.save()
video_predictions = [
self.__generate_random_video_prediction(
media_file, gunshot, audioset_model_trained
)
for _ in range(1000)
]
audio_predictions = [
self.__generate_random_audio_prediction(
media_file, gunshot, audioset_model_trained
)
for _ in range(1000)
]
# VideoPrediction.objects.bulk_create(video_predictions)
# AudioPrediction.objects.bulk_create(audio_predictions)
for prediction in video_predictions + audio_predictions:
prediction.save()
def __generate_random_video_prediction(self, media_file, classification, model):
"""
Generate a random gunshot video prediction for the video
"""
x = randint(0, 1280)
y = randint(0, 720)
width = randint(1, 1280 - x + 1)
height = randint(1, 720 - y + 1)
return VideoPrediction(
media_file=media_file,
classification=classification,
confidence=randint(0, 100),
model_version=model,
time=randint(0, 600000),
x=x,
y=y,
width=width,
height=height,
)
def __generate_random_audio_prediction(self, media_file, classification, model):
"""
Generate a random 10 second audio prediction for a gunshot
"""
return AudioPrediction(
media_file=media_file,
classification=classification,
confidence=randint(0, 100),
model_version=model,
time=randint(0, 600000),
duration="10000",
)
__movie_json = """
[
{
"description": "Big Buck Bunny tells the story of a giant rabbit with a heart bigger than himself. When one sunny day three rodents rudely harass him, something snaps... and the rabbit ain't no bunny anymore! In the typical cartoon tradition he prepares the nasty rodents a comical revenge.\n\nLicensed under the Creative Commons Attribution license\nhttp://www.bigbuckbunny.org",
"sources": [
"http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4"
],
"subtitle": "By Blender Foundation",
"thumb": "images/BigBuckBunny.jpg",
"title": "Big Buck Bunny"
},
{
"description": "The first Blender Open Movie from 2006",
"sources": [
"http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ElephantsDream.mp4"
],
"subtitle": "By Blender Foundation",
"thumb": "images/ElephantsDream.jpg",
"title": "Elephant Dream"
},
{
"description": "HBO GO now works with Chromecast -- the easiest way to enjoy online video on your TV. For when you want to settle into your Iron Throne to watch the latest episodes. For $35.\nLearn how to use Chromecast with HBO GO and more at google.com/chromecast.",
"sources": [
"http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerBlazes.mp4"
],
"subtitle": "By Google",
"thumb": "images/ForBiggerBlazes.jpg",
"title": "For Bigger Blazes"
},
{
"description": "Introducing Chromecast. The easiest way to enjoy online video and music on your TV—for when Batman's escapes aren't quite big enough. For $35. Learn how to use Chromecast with Google Play Movies and more at google.com/chromecast.",
"sources": [
"http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerEscapes.mp4"
],
"subtitle": "By Google",
"thumb": "images/ForBiggerEscapes.jpg",
"title": "For Bigger Escape"
},
{
"description": "Introducing Chromecast. The easiest way to enjoy online video and music on your TV. For $35. Find out more at google.com/chromecast.",
"sources": [
"http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerFun.mp4"
],
"subtitle": "By Google",
"thumb": "images/ForBiggerFun.jpg",
"title": "For Bigger Fun"
},
{
"description": "Introducing Chromecast. The easiest way to enjoy online video and music on your TV—for the times that call for bigger joyrides. For $35. Learn how to use Chromecast with YouTube and more at google.com/chromecast.",
"sources": [
"http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerJoyrides.mp4"
],
"subtitle": "By Google",
"thumb": "images/ForBiggerJoyrides.jpg",
"title": "For Bigger Joyrides"
},
{
"description": "Introducing Chromecast. The easiest way to enjoy online video and music on your TV—for when you want to make Buster's big meltdowns even bigger. For $35. Learn how to use Chromecast with Netflix and more at google.com/chromecast.",
"sources": [
"http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerMeltdowns.mp4"
],
"subtitle": "By Google",
"thumb": "images/ForBiggerMeltdowns.jpg",
"title": "For Bigger Meltdowns"
},
{
"description": "Sintel is an independently produced short film, initiated by the Blender Foundation as a means to further improve and validate the free/open source 3D creation suite Blender. With initial funding provided by 1000s of donations via the internet community, it has again proven to be a viable development model for both open 3D technology as for independent animation film.\nThis 15 minute film has been realized in the studio of the Amsterdam Blender Institute, by an international team of artists and developers. In addition to that, several crucial technical and creative targets have been realized online, by developers and artists and teams all over the world.\nwww.sintel.org",
"sources": [
"http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/Sintel.mp4"
],
"subtitle": "By Blender Foundation",
"thumb": "images/Sintel.jpg",
"title": "Sintel"
},
{
"description": "Smoking Tire takes the all-new Subaru Outback to the highest point we can find in hopes our customer-appreciation Balloon Launch will get some free T-shirts into the hands of our viewers.",
"sources": [
"http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/SubaruOutbackOnStreetAndDirt.mp4"
],
"subtitle": "By Garage419",
"thumb": "images/SubaruOutbackOnStreetAndDirt.jpg",
"title": "Subaru Outback On Street And Dirt"
},
{
"description": "Tears of Steel was realized with crowd-funding by users of the open source 3D creation tool Blender. Target was to improve and test a complete open and free pipeline for visual effects in film - and to make a compelling sci-fi film in Amsterdam, the Netherlands. The film itself, and all raw material used for making it, have been released under the Creatieve Commons 3.0 Attribution license. Visit the tearsofsteel.org website to find out more about this, or to purchase the 4-DVD box with a lot of extras. (CC) Blender Foundation - http://www.tearsofsteel.org",
"sources": [
"http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/TearsOfSteel.mp4"
],
"subtitle": "By Blender Foundation",
"thumb": "images/TearsOfSteel.jpg",
"title": "Tears of Steel"
},
{
"description": "The Smoking Tire heads out to Adams Motorsports Park in Riverside, CA to test the most requested car of 2010, the Volkswagen GTI. Will it beat the Mazdaspeed3's standard-setting lap time? Watch and see...",
"sources": [
"http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/VolkswagenGTIReview.mp4"
],
"subtitle": "By Garage419",
"thumb": "images/VolkswagenGTIReview.jpg",
"title": "Volkswagen GTI Review"
},
{
"description": "The Smoking Tire is going on the 2010 Bullrun Live Rally in a 2011 Shelby GT500, and posting a video from the road every single day! The only place to watch them is by subscribing to The Smoking Tire or watching at BlackMagicShine.com",
"sources": [
"http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/WeAreGoingOnBullrun.mp4"
],
"subtitle": "By Garage419",
"thumb": "images/WeAreGoingOnBullrun.jpg",
"title": "We Are Going On Bullrun"
},
{
"description": "The Smoking Tire meets up with Chris and Jorge from CarsForAGrand.com to see just how far $1,000 can go when looking for a car.The Smoking Tire meets up with Chris and Jorge from CarsForAGrand.com to see just how far $1,000 can go when looking for a car.",
"sources": [
"http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/WhatCarCanYouGetForAGrand.mp4"
],
"subtitle": "By Garage419",
"thumb": "images/WhatCarCanYouGetForAGrand.jpg",
"title": "What care can you get for a grand?"
}
]
"""
| 48.523585 | 704 | 0.652085 | 1,220 | 10,287 | 5.451639 | 0.32377 | 0.021501 | 0.054729 | 0.074275 | 0.384303 | 0.377237 | 0.334837 | 0.310179 | 0.289581 | 0.225831 | 0 | 0.017818 | 0.252552 | 10,287 | 211 | 705 | 48.753555 | 0.846794 | 0.031788 | 0 | 0.278947 | 0 | 0.131579 | 0.756285 | 0.056638 | 0 | 0 | 0 | 0 | 0 | 1 | 0.015789 | false | 0 | 0.021053 | 0 | 0.063158 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
2443028bd1f3dc0b8fea3a8e6493e8138104f5b7 | 5,088 | py | Python | module1-introduction-to-sql/rpg_queries.py | moviedatascience/DS-Unit-3-Sprint-2-SQL-and-Databases | 28ba92de2213b595dd693892f3cf04bbf029571d | [
"MIT"
] | null | null | null | module1-introduction-to-sql/rpg_queries.py | moviedatascience/DS-Unit-3-Sprint-2-SQL-and-Databases | 28ba92de2213b595dd693892f3cf04bbf029571d | [
"MIT"
] | null | null | null | module1-introduction-to-sql/rpg_queries.py | moviedatascience/DS-Unit-3-Sprint-2-SQL-and-Databases | 28ba92de2213b595dd693892f3cf04bbf029571d | [
"MIT"
] | null | null | null | import sqlite3 as sql
import pandas as pd
from tabulate import tabulate
connect = sql.connect("rpg_db.sqlite3")
cursor = connect.cursor()
def total_char_count():
""" Total all characters """
print(pd.read_sql_query('''SELECT COUNT(distinct character_id)
FROM charactercreator_character;''', connect))
def sub_class():
'''Grab population count of each subclass of characters'''
print(pd.read_sql_query(
'''SELECT
"mages", COUNT(*)
From charactercreator_mage
UNION
SELECT "clerics", COUNT(*)
from charactercreator_cleric
UNION
SELECT "fighter", COUNT(*)
FROM charactercreator_fighter
UNION
SELECT "thieves", COUNT(*)
FROM charactercreator_thief;''', connect))
def total_items():
'''Total count of all items '''
print(pd.read_sql_query('''SELECT COUNT(distinct item_id)
FROM armory_item;''', connect))
def weapon_items():
''' Total count of all weapon items '''
print(pd.read_sql_query('''SELECT COUNT(distinct item_ptr_id)
FROM armory_weapon;''', connect))
def weapon_category():
'''Total count of all weapon items and non weapon items'''
print(pd.read_sql_query('''SELECT COUNT(distinct item_ptr_id)
FROM armory_weapon;''', connect))
print(pd.read_sql_query(
'''SELECT COUNT(distinct item_id)
FROM armory_item
WHERE item_id < 138;''', connect))
def character_items():
''' Count of all items each character has'''
total_items_character_query = ('''SELECT cc.character_id, cc.name, COUNT(cci.item_id) AS item_count
FROM charactercreator_character AS cc
INNER JOIN charactercreator_character_inventory AS cci
ON cc.character_id = cci.character_id
GROUP BY cc.character_id
LIMIT 20''')
total_items_character = cursor.execute(total_items_character_query).fetchall()
print(tabulate(total_items_character,
headers=['ID', 'Character Name', 'Item Count']))
def character_weapons():
''' Count of all the weapons each character has '''
total_weapons_character_query = ('''SELECT cc.character_id, cc.name, COUNT()
FROM charactercreator_character AS cc
INNER JOIN charactercreator_character_inventory AS cci
ON cc.character_id = cci.character_id
INNER JOIN armory_item as ai
ON cc.item_id = ai.item_id
INNER JOIN armory_weapon as aw
ON ai.item_id = aw.item_ptr_id
GROUP BY cc.character_id
LIMIT 20''')
total_weapons_character = cursor.execute(total_weapons_character_query).fetchall()
print(tabulate(total_weapons_character,
headers=['ID', 'Character Name', 'Weapon Count']))
def character_item_avg():
''' Average amount of items the characters have '''
average_character_items_query = (
'''SELECT AVG(item_count) FROM
(SELECT cc.character_id, COUNT(cci.item_id) AS item_count
FROM charactercreator_character AS cc
LEFT JOIN charactercreator_character_inventory AS cci
ON cc.character_id = cci.character_id
GROUP BY cc.character_id
);''')
average_character_items = cursor.execute(average_character_items_query).fetchone()[0]
print(f'The average of items per player is {average_character_items:.2f}')
def character_weapon_avg():
''' Average amount of weapons the characters have '''
average_character_weapons_query = ('''
SELECT AVG(weapon_count) FROM
(
SELECT cc.character_id, COUNT(aw.item_ptr_id) AS weapon_count
FROM charactercreator_character AS cc
INNER JOIN charactercreator_character_inventory as cci
ON cc.character_id = cci.character_id
INNER JOIN armory_item as ai
ON cci.item_id = ai.item_id
LEFT JOIN armory_weapon as aw
ON ai.item_id = aw.item_ptr_id
GROUP BY cc.character_id
);''')
average_character_weapons = cursor.execute(average_character_weapons_query).fetchone()[0]
print(f'The average of weapons per player is {average_character_weapons:.2f}')
| 45.026549 | 103 | 0.553656 | 534 | 5,088 | 5.026217 | 0.153558 | 0.069672 | 0.058122 | 0.031297 | 0.628912 | 0.541356 | 0.496274 | 0.456781 | 0.406855 | 0.368852 | 0 | 0.004064 | 0.371266 | 5,088 | 112 | 104 | 45.428571 | 0.834948 | 0.070755 | 0 | 0.373134 | 0 | 0 | 0.625066 | 0.077737 | 0 | 0 | 0 | 0 | 0 | 1 | 0.134328 | false | 0 | 0.044776 | 0 | 0.179104 | 0.149254 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
24519ef2668459d9f777877b65690f1c784b5bd4 | 8,070 | py | Python | single_synthetic_comparison.py | wflynny/miseq-analysis | 960467933c11ea4dea949f3043e4e92a00767401 | [
"MIT"
] | null | null | null | single_synthetic_comparison.py | wflynny/miseq-analysis | 960467933c11ea4dea949f3043e4e92a00767401 | [
"MIT"
] | null | null | null | single_synthetic_comparison.py | wflynny/miseq-analysis | 960467933c11ea4dea949f3043e4e92a00767401 | [
"MIT"
] | null | null | null | import os
import sys
import csv
import glob
import sqlite3
import numpy as np
import pandas as pd
from itertools import combinations, izip_longest
from utils.wildtype import WILDTYPE_PRO
from utils.mut_freqs import DB_LOCATION
#MUT_PAIRS = ['30-88', '54-82', '73-90', '46-82', '24-74', '35-36', '69-84',
# '24-46', '24-82', '13-33', '10-93', '12-19', '33-66', '10-46',
# '32-82', '24-64', '37-63', '33-60', '41-93', '30-35', '35-88',
# '32-46', '20-62', '63-93']
#MUT_POSITIONS = list(set([s.split('-')[0] for s in MUT_PAIRS] + [s.split('-')[1] for s in MUT_PAIRS]))
#MUT_POSITIONS = sorted(map(int, MUT_POSITIONS))
seq2ind = dict(zip('MW.', range(3)))
"""
Given pair(s) of positions,
1. calculate the bivariate counts in each sample
- calculate the univariate marginals, bivariate marginals, and upper/lower bounds
2. calculate the aggregate bivariate count
- calculate aggregate univariate marginals, bivariate marginals, and upper/lower bounds
3. calculate average univariate marginals, bivariate marginals, upper/lower bounds
"""
def get_pair_counts(pair_list, seqfile):
# assumes we're dealing with fasta
pair_counts = np.zeros((len(pair_list), 3, 3), dtype=float)
with open(seqfile, 'r') as fin:
for name, seq in izip_longest(*[fin]*2):
seq = seq.strip()
if not seq: break
for k, (p1, p2) in enumerate(pair_list):
i1, i2 = p1 - 1, p2 - 1
pair_counts[k, seq2ind[seq[i1]], seq2ind[seq[i2]]] += 1
return pair_counts
def get_bounds(uni):
lower_bound = max(0, uni[0] + uni[2] - 1)
lower_table = np.array([lower_bound, uni[0] - lower_bound, uni[2] - lower_bound,
1 + lower_bound - uni[0] - uni[2]])
upper_bound = min(uni[0], uni[2])
upper_table = np.array([upper_bound, uni[0] - upper_bound, uni[2] - upper_bound,
1 + upper_bound - uni[0] - uni[2]])
assert lower_table[0] <= upper_table[0] or np.allclose(lower_table[0], upper_table[0]), (lower_table[0], upper_table[0])
assert abs(sum(lower_table) - 1) < 1e-6
assert abs(sum(upper_table) - 1) < 1e-6
return lower_table, upper_table
def osjoin(iterable):
return os.sep.join(iterable)
def sample_main(pair_list, remake=False):
aggregate_counts, lower_tables, upper_tables = [], [], []
for f in glob.glob('/u2/scripps/samples/*/*_pro_reads_MW.fasta'):
basepath = osjoin(f.split(os.sep)[:-1])
print basepath.split(os.sep)[-1]
if os.path.exists(osjoin((basepath, 'skip'))): continue
count_files = [osjoin((basepath, 'pair_counts_%i-%i'%pair)) for pair in pair_list]
bimarg_files = [osjoin((basepath, 'pair_bimarg_%i-%i'%pair)) for pair in pair_list]
unimarg_files = [osjoin((basepath, 'pair_unimarg_%i-%i'%pair)) for pair in pair_list]
lower_files = [osjoin((basepath, 'pair_lower_%i-%i'%pair)) for pair in pair_list]
upper_files = [osjoin((basepath, 'pair_upper_%i-%i'%pair)) for pair in pair_list]
# get counts
if not all(map(os.path.exists, count_files)) or remake:
bivariate_counts = get_pair_counts(pair_list, f)
# get rid of '.' counts
bivariate_counts = bivariate_counts[:, :-1, :-1]
for k, pair in enumerate(pair_list):
np.savetxt(count_files[k], bivariate_counts[k].reshape(1,4))
else:
bivariate_counts = np.array(map(np.loadtxt, count_files)).reshape(-1, 2, 2)
aggregate_counts.append(bivariate_counts)
# get marginals
if not all(map(os.path.exists, bimarg_files + unimarg_files)) or remake:
bivariate_marginals = bivariate_counts / bivariate_counts.sum(axis=-1).sum(axis=-1)[:, None, None]
univariate_marginals = np.hstack((bivariate_marginals.sum(axis=-1), bivariate_marginals.sum(axis=-2)))
assert np.allclose(bivariate_marginals.sum(axis=-1).sum(axis=-1), np.ones(len(pair_list))), bivariate_marginals.sum(axis=-1).sum(axis=-1)
assert np.allclose(univariate_marginals[:,:2].sum(axis=-1), np.ones(len(pair_list)))
assert np.allclose(univariate_marginals[:,2:].sum(axis=-1), np.ones(len(pair_list)))
for k, pair in enumerate(pair_list):
np.savetxt(bimarg_files[k], bivariate_marginals[k].reshape(1,4))
np.savetxt(unimarg_files[k], univariate_marginals[k].reshape(1,4))
else:
bivariate_marginals = np.array(map(np.loadtxt, bimarg_files)).reshape(-1, 2, 2)
univariate_marginals = np.array(map(np.loadtxt, unimarg_files)).reshape(-1, 2, 2)
# get bounds
if not all(map(os.path.exists, lower_files + upper_files)) or remake:
lowers = []
uppers = []
for k, pair in enumerate(pair_list):
lower, upper = get_bounds(univariate_marginals[k])
np.savetxt(lower_files[k], lower.reshape(1,4))
np.savetxt(upper_files[k], upper.reshape(1,4))
lowers.append(lower)
uppers.append(upper)
lowers, uppers = map(np.array, (lowers, uppers))
else:
lowers = np.array(map(np.loadtxt, lower_files)).reshape(-1, 2, 2)
uppers = np.array(map(np.loadtxt, upper_files)).reshape(-1, 2, 2)
lower_tables.append(lowers)
upper_tables.append(uppers)
# finished with this file. move to next one
#aggregate_counts = np.concatenate([x[None,...] for x in aggregate_counts], axis=0)
aggregate_counts = np.array(aggregate_counts)
lower_tables, upper_tables = map(np.array, (lower_tables, upper_tables))
assert lower_tables.shape[0] == upper_tables.shape[0]
return aggregate_counts, lower_tables, upper_tables
def aggregate_main(pair_list, aggregate_counts, lower_tables, upper_tables):
# aggregate marginals
aggregate_summed = aggregate_counts.sum(axis=0)
aggregate_bivariates = aggregate_summed / aggregate_summed.sum(axis=-1).sum(axis=-1)[...,None,None]
aggregate_univariates = np.hstack((aggregate_bivariates.sum(axis=-1), aggregate_bivariates.sum(axis=-2)))
for k, pair in enumerate(pair_list):
np.savetxt('agg_bimarg_%i-%i'%pair, aggregate_bivariates[k].reshape(1,4))
np.savetxt('agg_unimarg_%i-%i'%pair, aggregate_univariates[k].reshape(1,4))
# aggregate bounds
for k, pair in enumerate(pair_list):
lower, upper = get_bounds(aggregate_univariates[k])
np.savetxt('agg_lower_%i-%i'%pair, lower.reshape(1,4))
np.savetxt('agg_upper_%i-%i'%pair, upper.reshape(1,4))
# average marginals
print aggregate_counts.shape
average_bivariates = aggregate_counts / aggregate_counts.sum(axis=-1).sum(axis=-1)[...,None,None]
average_bivariates = average_bivariates.mean(axis=0)
print average_bivariates.shape
average_univariates = np.hstack((average_bivariates.sum(axis=-1), average_bivariates.sum(axis=-2)))
print average_univariates.shape
for k, pair in enumerate(pair_list):
np.savetxt('avg_bimarg_%i-%i'%pair, average_bivariates[k].reshape(1,4))
np.savetxt('avg_unimarg_%i-%i'%pair, average_univariates[k].reshape(1,4))
average_lower = lower_tables.mean(axis=0)
average_upper = upper_tables.mean(axis=0)
for k, pair in enumerate(pair_list):
np.savetxt('avg_lower_%i-%i'%pair, average_lower[k].reshape(1,4))
np.savetxt('avg_upper_%i-%i'%pair, average_upper[k].reshape(1,4))
MUT_PAIRS = ['30-88', '54-82', '73-90', '46-82', '24-74', '35-36', '69-84',
'24-46', '24-82', '13-33', '10-93', '12-19', '33-66', '10-46',
'32-82', '24-64', '37-63', '33-60', '41-93', '30-35', '35-88',
'32-46', '20-62', '63-93']
def main():
pair_list = [tuple(map(int, p.split('-'))) for p in MUT_PAIRS]
print pair_list
#pair_list = [(30, 88), (54, 82), (73, 90)]
args = sample_main(pair_list, remake=True)
aggregate_main(pair_list, *args)
if __name__ == "__main__":
main()
| 48.323353 | 149 | 0.640397 | 1,177 | 8,070 | 4.216653 | 0.159728 | 0.04191 | 0.024179 | 0.018134 | 0.389885 | 0.313319 | 0.232924 | 0.202297 | 0.134596 | 0.100746 | 0 | 0.051366 | 0.20632 | 8,070 | 166 | 150 | 48.614458 | 0.723497 | 0.089839 | 0 | 0.083333 | 0 | 0 | 0.056028 | 0.006049 | 0 | 0 | 0 | 0 | 0.058333 | 0 | null | null | 0 | 0.083333 | null | null | 0.041667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
2451b7ca11d5bb732c09f8c5ec0112a32409448a | 4,424 | py | Python | Inventario/migrations/0001_initial.py | yorlysoro/INCOLARA | ce09cac91a2fc0d3afa6a238369c59a0ae32f245 | [
"BSD-3-Clause"
] | null | null | null | Inventario/migrations/0001_initial.py | yorlysoro/INCOLARA | ce09cac91a2fc0d3afa6a238369c59a0ae32f245 | [
"BSD-3-Clause"
] | null | null | null | Inventario/migrations/0001_initial.py | yorlysoro/INCOLARA | ce09cac91a2fc0d3afa6a238369c59a0ae32f245 | [
"BSD-3-Clause"
] | null | null | null | # Generated by Django 2.2.13 on 2020-07-08 18:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Atributo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=255, verbose_name='Nombre del Atributo')),
],
options={
'verbose_name': 'Atributo',
'verbose_name_plural': 'Atributos',
},
),
migrations.CreateModel(
name='Producto',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre_producto', models.CharField(max_length=255, verbose_name='Nombre del Producto')),
('vender', models.BooleanField(default=True, verbose_name='Se puede vender')),
('comprar', models.BooleanField(default=True, verbose_name='Se puede comprar')),
('tipo_producto', models.CharField(choices=[('Co', 'Consumible'), ('Se', 'Servicio'), ('Al', 'Almacenable')], default='Al', max_length=255, verbose_name='Tipo de Producto')),
('codigo_barras', models.CharField(blank=True, max_length=255, null=True, verbose_name='Codigo de Barras')),
('precio_venta', models.DecimalField(decimal_places=2, default=0.0, max_digits=10, verbose_name='Precio de Venta')),
('coste', models.DecimalField(decimal_places=2, default=0.0, max_digits=10, verbose_name='Coste')),
('peso', models.DecimalField(decimal_places=2, default=0.0, max_digits=4, verbose_name='Peso')),
('volumen', models.DecimalField(decimal_places=2, default=0.0, max_digits=4, verbose_name='Volumen')),
('descripcion', models.TextField(blank=True, null=True, verbose_name='Descripcion')),
('foto', models.ImageField(blank=True, null=True, upload_to='fotos/producto/', verbose_name='Imagen del Producto')),
('cantidad', models.PositiveIntegerField(default=0, verbose_name='Cantidad a Mano')),
('capacidad', models.PositiveIntegerField(default=0, verbose_name='Capacidad de Produccion')),
('caducidad', models.DateField(blank=True, null=True, verbose_name='Fecha de Caducidad')),
('atributo', models.ManyToManyField(blank=True, related_name='AtributoProducto', to='Inventario.Atributo')),
],
options={
'verbose_name': 'Producto',
'verbose_name_plural': 'Productos',
},
),
migrations.CreateModel(
name='Valores_Atributo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('valor', models.CharField(max_length=255)),
],
options={
'verbose_name': 'Valor de Atributo',
'verbose_name_plural': 'Valores de Atributos',
},
),
migrations.CreateModel(
name='Variante',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('precio_venta', models.DecimalField(decimal_places=2, default=0.0, max_digits=10, verbose_name='Precio de Venta')),
('producto', models.ManyToManyField(to='Inventario.Producto')),
('valor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ValorVariante', to='Inventario.Valores_Atributo')),
],
options={
'verbose_name': 'Variante',
'verbose_name_plural': 'Variantes',
},
),
migrations.AddField(
model_name='producto',
name='valores',
field=models.ManyToManyField(blank=True, related_name='ValoresProducto', to='Inventario.Valores_Atributo'),
),
migrations.AddField(
model_name='atributo',
name='valor',
field=models.ManyToManyField(related_name='ValorAtributo', to='Inventario.Valores_Atributo'),
),
]
| 51.44186 | 190 | 0.597423 | 439 | 4,424 | 5.856492 | 0.252847 | 0.119798 | 0.023337 | 0.060288 | 0.455465 | 0.436017 | 0.347336 | 0.347336 | 0.310774 | 0.274212 | 0 | 0.017167 | 0.262658 | 4,424 | 85 | 191 | 52.047059 | 0.770999 | 0.010398 | 0 | 0.384615 | 1 | 0 | 0.211152 | 0.01851 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.025641 | 0 | 0.076923 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
24552b505f579c23274eb1a3f24409d591f02721 | 1,834 | py | Python | backend/algorithms/ls_backend/engine.py | AroMorin/DNNOP | 271e65811fe7cadcffc8155049e256fa78c0c5c6 | [
"MIT"
] | 6 | 2020-01-14T00:01:34.000Z | 2021-12-28T14:31:05.000Z | backend/algorithms/ls_backend/engine.py | AroMorin/DNNOP | 271e65811fe7cadcffc8155049e256fa78c0c5c6 | [
"MIT"
] | null | null | null | backend/algorithms/ls_backend/engine.py | AroMorin/DNNOP | 271e65811fe7cadcffc8155049e256fa78c0c5c6 | [
"MIT"
] | 1 | 2020-09-06T10:44:29.000Z | 2020-09-06T10:44:29.000Z | """base class for pool
The pool object will contain the models under optimization.
"""
from .noise import Noise
from .analysis import Analysis
import torch
class Engine(object):
def __init__(self, model, hyper_params):
self.vector = torch.nn.utils.parameters_to_vector(model.parameters())
self.noise = Noise(hyper_params, self.vector)
self.analyzer = Analysis(hyper_params)
self.elite = self.vector.clone()
self.jumped = False
def analyze(self, score, top_score):
score = score.float()
top_score = top_score.float()
self.analyzer.analyze(score, top_score)
#self.frustration.update(self.analyzer.improved)
def set_elite(self):
self.jumped = False
if self.analyzer.replace:
self.elite[self.noise.direction.value] = self.vector[self.noise.direction.value]
#self.elite.clamp_(-0.9, 0.9)
#self.elite.copy_(self.vector)
self.jumped = True
#self.frustration.reset_state()
def update_state(self):
"""Prepares the new pool based on the scores of the current generation
and the results of the analysis (such as value of intergrity).
"""
self.noise.update_state(self.analyzer.replace)
def set_vector(self):
if not self.jumped:
#self.vector.copy_(self.elite)
elite_vals = self.elite[self.noise.direction.value]
self.vector[self.noise.direction.value] = elite_vals
def generate(self):
noise_vals = self.vector[self.noise.direction.value]+self.noise.magnitude
self.vector[self.noise.direction.value] = noise_vals
#self.vector.add_(self.noise.vector)
def update_weights(self, model):
torch.nn.utils.vector_to_parameters(self.vector, model.parameters())
#
| 33.962963 | 92 | 0.660851 | 236 | 1,834 | 5.012712 | 0.300847 | 0.092984 | 0.071006 | 0.116653 | 0.172443 | 0.172443 | 0.144548 | 0.10989 | 0.10989 | 0.10989 | 0 | 0.002837 | 0.231189 | 1,834 | 53 | 93 | 34.603774 | 0.83617 | 0.22301 | 0 | 0.066667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.233333 | false | 0 | 0.1 | 0 | 0.366667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
245a8d9d9c67b8e67660a53952f4baf20840c230 | 1,127 | py | Python | pyteleport/tests/_test_teleport_try_w_stack.py | pulkin/pyteleport | d0291486b82a6f21f9886f920428ccc9b0f36da4 | [
"BSD-2-Clause"
] | 9 | 2021-06-21T09:17:42.000Z | 2022-01-28T10:34:36.000Z | pyteleport/tests/_test_teleport_try_w_stack.py | pulkin/pyteleport | d0291486b82a6f21f9886f920428ccc9b0f36da4 | [
"BSD-2-Clause"
] | null | null | null | pyteleport/tests/_test_teleport_try_w_stack.py | pulkin/pyteleport | d0291486b82a6f21f9886f920428ccc9b0f36da4 | [
"BSD-2-Clause"
] | 1 | 2021-09-25T02:03:41.000Z | 2021-09-25T02:03:41.000Z | """
[True] loop 0
[True] try
[True] teleport
[True] vstack [!<class 'range_iterator'>, !<class 'range_iterator'>]
[True] bstack [122/1, 122/1]
[False] vstack [!<class 'range_iterator'>, !<class 'range_iterator'>]
[False] bstack [122/1, 122/1]
[False] raise
[False] CustomException('hello')
[False] handle
[False] finally
[False] loop 1
[False] loop 2
[False] done
"""
from pyteleport import tp_dummy
from pyteleport.tests.helpers import setup_verbose_logging, print_stack_here, print_, get_tp_args
setup_verbose_logging()
class CustomException(Exception):
pass
for j in range(3):
print_(f"loop {j}")
if j == 0:
print_(f"try")
try:
for i in range(3, 6):
print_("teleport")
print_stack_here(print_)
tp_dummy(**get_tp_args())
print_stack_here(print_)
print_("raise")
raise CustomException("hello")
print_("unreachable")
except CustomException as e:
print_(repr(e))
print_("handle")
finally:
print_("finally")
print_("done")
| 23.978723 | 97 | 0.601597 | 137 | 1,127 | 4.715328 | 0.364964 | 0.06192 | 0.111455 | 0.088235 | 0.188854 | 0.188854 | 0.130031 | 0 | 0 | 0 | 0 | 0.027811 | 0.266193 | 1,127 | 46 | 98 | 24.5 | 0.753325 | 0.318545 | 0 | 0.083333 | 0 | 0 | 0.075 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.041667 | 0.083333 | 0 | 0.125 | 0.5 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 |
245b45ab62fe8c710268f382c976ce972894f666 | 1,690 | py | Python | w3/python/core/exception.py | Hepheir/Python-HTML-parser | 038098c198f5a34178e1e7d1b54da01e42e51178 | [
"MIT"
] | 4 | 2021-05-29T06:39:32.000Z | 2021-06-04T01:47:02.000Z | w3/python/core/exception.py | Hepheir/Python-HTML-parser | 038098c198f5a34178e1e7d1b54da01e42e51178 | [
"MIT"
] | 25 | 2021-05-22T12:29:51.000Z | 2021-06-11T02:38:09.000Z | w3/python/core/exception.py | Hepheir/Python-HTML-parser | 038098c198f5a34178e1e7d1b54da01e42e51178 | [
"MIT"
] | 1 | 2021-05-29T19:41:23.000Z | 2021-05-29T19:41:23.000Z | from ctypes import c_ushort
class DOMException(Exception):
"""Exception `DOMException`
DOM operations only raise exceptions in "exceptional" circumstances, i.e., when an operation is impossible to perform
(either for logical reasons, because data is lost, or because the implementation has become unstable).
In general, DOM methods return specific error values in ordinary processing situation, such as out-of-bound errors when using `NodeList`.
Implementations may raise other exceptions under other circumstances.
For example, implementations may raise an implementation-dependent exception if a `None` argument is passed.
Some languages and object systems do not support the concept of exceptions.
For such systems, error conditions may be indicated using native error reporting mechanisms.
For some bindings, for example, methods may return error codes similar to those listed in the corresponding method descriptions.
"""
# Definition group `ExceptionCode`
# An integer indicating the type of error generated.
INDEX_SIZE_ERR: c_ushort = c_ushort(1)
DOMSTRING_SIZE_ERR: c_ushort = c_ushort(2)
HIERARCHY_REQUEST_ERR: c_ushort = c_ushort(3)
WRONG_DOCUMENT_ERR: c_ushort = c_ushort(4)
INVALID_CHARACTER_ERR: c_ushort = c_ushort(5)
NO_DATA_ALLOWED_ERR: c_ushort = c_ushort(6)
NO_MODIFICATION_ALLOWED_ERR: c_ushort = c_ushort(7)
NOT_FOUND_ERR: c_ushort = c_ushort(8)
NOT_SUPPORTED_ERR: c_ushort = c_ushort(9)
INUSE_ATTRIBUTE_ERR: c_ushort = c_ushort(10)
def __init__(self, error_code: c_ushort, *args: object) -> None:
super().__init__(*args)
self.code: c_ushort = error_code
| 48.285714 | 141 | 0.756805 | 242 | 1,690 | 5.057851 | 0.533058 | 0.131536 | 0.081699 | 0.089869 | 0.156863 | 0.073529 | 0 | 0 | 0 | 0 | 0 | 0.007948 | 0.181065 | 1,690 | 34 | 142 | 49.705882 | 0.876445 | 0.560947 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0.066667 | 0 | 0.866667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
245b7bfcc5e3acb7390210b448e0a83c48b769bc | 14,020 | py | Python | libica/openapi/libgds/models/aws_s3_temporary_upload_credentials.py | umccr-illumina/libica | 916d27eea499f29bee590268b84208effb0cc576 | [
"MIT"
] | null | null | null | libica/openapi/libgds/models/aws_s3_temporary_upload_credentials.py | umccr-illumina/libica | 916d27eea499f29bee590268b84208effb0cc576 | [
"MIT"
] | 4 | 2021-11-15T10:47:51.000Z | 2022-02-22T04:43:20.000Z | libica/openapi/libgds/models/aws_s3_temporary_upload_credentials.py | umccr-illumina/libica | 916d27eea499f29bee590268b84208effb0cc576 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
Genomic Data Store Service
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from libica.openapi.libgds.configuration import Configuration
class AwsS3TemporaryUploadCredentials(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'access_key_id': 'str',
'secret_access_key': 'str',
'session_token': 'str',
'region': 'str',
'bucket_name': 'str',
'key_prefix': 'str',
'expiration_date': 'datetime',
'service_url': 'str',
'server_side_encryption_algorithm': 'str',
'server_side_encryption_key': 'str'
}
attribute_map = {
'access_key_id': 'access_Key_Id',
'secret_access_key': 'secret_Access_Key',
'session_token': 'session_Token',
'region': 'region',
'bucket_name': 'bucketName',
'key_prefix': 'keyPrefix',
'expiration_date': 'expirationDate',
'service_url': 'serviceUrl',
'server_side_encryption_algorithm': 'serverSideEncryptionAlgorithm',
'server_side_encryption_key': 'serverSideEncryptionKey'
}
def __init__(self, access_key_id=None, secret_access_key=None, session_token=None, region=None, bucket_name=None, key_prefix=None, expiration_date=None, service_url=None, server_side_encryption_algorithm=None, server_side_encryption_key=None, local_vars_configuration=None): # noqa: E501
"""AwsS3TemporaryUploadCredentials - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._access_key_id = None
self._secret_access_key = None
self._session_token = None
self._region = None
self._bucket_name = None
self._key_prefix = None
self._expiration_date = None
self._service_url = None
self._server_side_encryption_algorithm = None
self._server_side_encryption_key = None
self.discriminator = None
if access_key_id is not None:
self.access_key_id = access_key_id
if secret_access_key is not None:
self.secret_access_key = secret_access_key
if session_token is not None:
self.session_token = session_token
if region is not None:
self.region = region
if bucket_name is not None:
self.bucket_name = bucket_name
if key_prefix is not None:
self.key_prefix = key_prefix
if expiration_date is not None:
self.expiration_date = expiration_date
if service_url is not None:
self.service_url = service_url
if server_side_encryption_algorithm is not None:
self.server_side_encryption_algorithm = server_side_encryption_algorithm
if server_side_encryption_key is not None:
self.server_side_encryption_key = server_side_encryption_key
@property
def access_key_id(self):
"""Gets the access_key_id of this AwsS3TemporaryUploadCredentials. # noqa: E501
Access key for use with AWS S3 # noqa: E501
:return: The access_key_id of this AwsS3TemporaryUploadCredentials. # noqa: E501
:rtype: str
"""
return self._access_key_id
@access_key_id.setter
def access_key_id(self, access_key_id):
"""Sets the access_key_id of this AwsS3TemporaryUploadCredentials.
Access key for use with AWS S3 # noqa: E501
:param access_key_id: The access_key_id of this AwsS3TemporaryUploadCredentials. # noqa: E501
:type: str
"""
self._access_key_id = access_key_id
@property
def secret_access_key(self):
"""Gets the secret_access_key of this AwsS3TemporaryUploadCredentials. # noqa: E501
Secret key for use with AWS S3 # noqa: E501
:return: The secret_access_key of this AwsS3TemporaryUploadCredentials. # noqa: E501
:rtype: str
"""
return self._secret_access_key
@secret_access_key.setter
def secret_access_key(self, secret_access_key):
"""Sets the secret_access_key of this AwsS3TemporaryUploadCredentials.
Secret key for use with AWS S3 # noqa: E501
:param secret_access_key: The secret_access_key of this AwsS3TemporaryUploadCredentials. # noqa: E501
:type: str
"""
self._secret_access_key = secret_access_key
@property
def session_token(self):
"""Gets the session_token of this AwsS3TemporaryUploadCredentials. # noqa: E501
Token for use with AWS S3 # noqa: E501
:return: The session_token of this AwsS3TemporaryUploadCredentials. # noqa: E501
:rtype: str
"""
return self._session_token
@session_token.setter
def session_token(self, session_token):
"""Sets the session_token of this AwsS3TemporaryUploadCredentials.
Token for use with AWS S3 # noqa: E501
:param session_token: The session_token of this AwsS3TemporaryUploadCredentials. # noqa: E501
:type: str
"""
self._session_token = session_token
@property
def region(self):
"""Gets the region of this AwsS3TemporaryUploadCredentials. # noqa: E501
AWS region the folder will/does reside in # noqa: E501
:return: The region of this AwsS3TemporaryUploadCredentials. # noqa: E501
:rtype: str
"""
return self._region
@region.setter
def region(self, region):
"""Sets the region of this AwsS3TemporaryUploadCredentials.
AWS region the folder will/does reside in # noqa: E501
:param region: The region of this AwsS3TemporaryUploadCredentials. # noqa: E501
:type: str
"""
self._region = region
@property
def bucket_name(self):
"""Gets the bucket_name of this AwsS3TemporaryUploadCredentials. # noqa: E501
AWS bucket the folder will/does reside in # noqa: E501
:return: The bucket_name of this AwsS3TemporaryUploadCredentials. # noqa: E501
:rtype: str
"""
return self._bucket_name
@bucket_name.setter
def bucket_name(self, bucket_name):
"""Sets the bucket_name of this AwsS3TemporaryUploadCredentials.
AWS bucket the folder will/does reside in # noqa: E501
:param bucket_name: The bucket_name of this AwsS3TemporaryUploadCredentials. # noqa: E501
:type: str
"""
self._bucket_name = bucket_name
@property
def key_prefix(self):
"""Gets the key_prefix of this AwsS3TemporaryUploadCredentials. # noqa: E501
AWS upload location for this folder # noqa: E501
:return: The key_prefix of this AwsS3TemporaryUploadCredentials. # noqa: E501
:rtype: str
"""
return self._key_prefix
@key_prefix.setter
def key_prefix(self, key_prefix):
"""Sets the key_prefix of this AwsS3TemporaryUploadCredentials.
AWS upload location for this folder # noqa: E501
:param key_prefix: The key_prefix of this AwsS3TemporaryUploadCredentials. # noqa: E501
:type: str
"""
self._key_prefix = key_prefix
@property
def expiration_date(self):
"""Gets the expiration_date of this AwsS3TemporaryUploadCredentials. # noqa: E501
expiration for temporary credentials # noqa: E501
:return: The expiration_date of this AwsS3TemporaryUploadCredentials. # noqa: E501
:rtype: datetime
"""
return self._expiration_date
@expiration_date.setter
def expiration_date(self, expiration_date):
"""Sets the expiration_date of this AwsS3TemporaryUploadCredentials.
expiration for temporary credentials # noqa: E501
:param expiration_date: The expiration_date of this AwsS3TemporaryUploadCredentials. # noqa: E501
:type: datetime
"""
self._expiration_date = expiration_date
@property
def service_url(self):
"""Gets the service_url of this AwsS3TemporaryUploadCredentials. # noqa: E501
Service endpoint for accessing S3. This is optional for AWS S3, but mandatory for other services like Taiwan Computing Cloud. # noqa: E501
:return: The service_url of this AwsS3TemporaryUploadCredentials. # noqa: E501
:rtype: str
"""
return self._service_url
@service_url.setter
def service_url(self, service_url):
"""Sets the service_url of this AwsS3TemporaryUploadCredentials.
Service endpoint for accessing S3. This is optional for AWS S3, but mandatory for other services like Taiwan Computing Cloud. # noqa: E501
:param service_url: The service_url of this AwsS3TemporaryUploadCredentials. # noqa: E501
:type: str
"""
self._service_url = service_url
@property
def server_side_encryption_algorithm(self):
"""Gets the server_side_encryption_algorithm of this AwsS3TemporaryUploadCredentials. # noqa: E501
Used to specify the type of server-side encryption (SSE) to be used on the object provider. This value is used to determine the Amazon S3 header \"x-amz-server-side-encryption\" value. For example, specify \"AES256\" for SSE-S3, or \"AWS:KMS\" for SSE-KMS. By default if none is specified, \"AES256\" will be used. # noqa: E501
:return: The server_side_encryption_algorithm of this AwsS3TemporaryUploadCredentials. # noqa: E501
:rtype: str
"""
return self._server_side_encryption_algorithm
@server_side_encryption_algorithm.setter
def server_side_encryption_algorithm(self, server_side_encryption_algorithm):
"""Sets the server_side_encryption_algorithm of this AwsS3TemporaryUploadCredentials.
Used to specify the type of server-side encryption (SSE) to be used on the object provider. This value is used to determine the Amazon S3 header \"x-amz-server-side-encryption\" value. For example, specify \"AES256\" for SSE-S3, or \"AWS:KMS\" for SSE-KMS. By default if none is specified, \"AES256\" will be used. # noqa: E501
:param server_side_encryption_algorithm: The server_side_encryption_algorithm of this AwsS3TemporaryUploadCredentials. # noqa: E501
:type: str
"""
self._server_side_encryption_algorithm = server_side_encryption_algorithm
@property
def server_side_encryption_key(self):
"""Gets the server_side_encryption_key of this AwsS3TemporaryUploadCredentials. # noqa: E501
Used to specify the serve-side encryption key that might be associated with the specified server-side encryption algorithm This value can be the AWS KMS arn key, to be used for the Amazon S3 header \"x-amz-server-side-encryption-aws-kms-key-id\" value Value will be ignored if encryption is \"AES256\" # noqa: E501
:return: The server_side_encryption_key of this AwsS3TemporaryUploadCredentials. # noqa: E501
:rtype: str
"""
return self._server_side_encryption_key
@server_side_encryption_key.setter
def server_side_encryption_key(self, server_side_encryption_key):
"""Sets the server_side_encryption_key of this AwsS3TemporaryUploadCredentials.
Used to specify the serve-side encryption key that might be associated with the specified server-side encryption algorithm This value can be the AWS KMS arn key, to be used for the Amazon S3 header \"x-amz-server-side-encryption-aws-kms-key-id\" value Value will be ignored if encryption is \"AES256\" # noqa: E501
:param server_side_encryption_key: The server_side_encryption_key of this AwsS3TemporaryUploadCredentials. # noqa: E501
:type: str
"""
self._server_side_encryption_key = server_side_encryption_key
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AwsS3TemporaryUploadCredentials):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, AwsS3TemporaryUploadCredentials):
return True
return self.to_dict() != other.to_dict()
| 37.386667 | 339 | 0.66826 | 1,673 | 14,020 | 5.381351 | 0.107591 | 0.047095 | 0.102188 | 0.136621 | 0.693436 | 0.614795 | 0.54193 | 0.481173 | 0.355881 | 0.242142 | 0 | 0.023551 | 0.257989 | 14,020 | 374 | 340 | 37.486631 | 0.841873 | 0.463338 | 0 | 0.089744 | 1 | 0 | 0.079375 | 0.02625 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.025641 | 0 | 0.320513 | 0.012821 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
245de81ee3fb2c3848f1921251df3e1e4b581648 | 1,240 | py | Python | playlists/migrations/0006_auto_20210316_2355.py | KibetRonoh/Movie_Zone_-Django | 901b211540943a98b531db2bc77d6b3a483a694a | [
"MIT"
] | 58 | 2021-03-13T21:00:01.000Z | 2022-03-29T05:59:39.000Z | playlists/migrations/0006_auto_20210316_2355.py | KibetRonoh/Movie_Zone_-Django | 901b211540943a98b531db2bc77d6b3a483a694a | [
"MIT"
] | 1 | 2021-05-21T16:40:00.000Z | 2021-05-21T16:40:00.000Z | playlists/migrations/0006_auto_20210316_2355.py | KibetRonoh/Movie_Zone_-Django | 901b211540943a98b531db2bc77d6b3a483a694a | [
"MIT"
] | 35 | 2021-03-17T12:04:30.000Z | 2022-03-18T02:06:31.000Z | # Generated by Django 3.2b1 on 2021-03-16 23:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('videos', '0012_alter_video_video_id'),
('playlists', '0005_remove_playlist_videos'),
]
operations = [
migrations.CreateModel(
name='PlaylistItem',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('order', models.IntegerField(default=1)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('playlist', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='playlists.playlist')),
('video', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='videos.video')),
],
options={
'ordering': ['order', '-timestamp'],
},
),
migrations.AddField(
model_name='playlist',
name='videos',
field=models.ManyToManyField(blank=True, related_name='playlist_item', through='playlists.PlaylistItem', to='videos.Video'),
),
]
| 36.470588 | 136 | 0.600806 | 122 | 1,240 | 5.967213 | 0.532787 | 0.043956 | 0.057692 | 0.090659 | 0.151099 | 0.151099 | 0.151099 | 0.151099 | 0.151099 | 0.151099 | 0 | 0.026172 | 0.260484 | 1,240 | 33 | 137 | 37.575758 | 0.767721 | 0.03629 | 0 | 0.074074 | 1 | 0 | 0.187762 | 0.062029 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.074074 | 0 | 0.185185 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
245f4bc38e13de9cf166aa2c56f878ad96394a84 | 826 | py | Python | ProjectSimulator/Labels.py | awzdevelopers/SimulatorOfAWZ | 772108ed086066095dcd0c895b82b9b0bcb43a77 | [
"MIT"
] | null | null | null | ProjectSimulator/Labels.py | awzdevelopers/SimulatorOfAWZ | 772108ed086066095dcd0c895b82b9b0bcb43a77 | [
"MIT"
] | null | null | null | ProjectSimulator/Labels.py | awzdevelopers/SimulatorOfAWZ | 772108ed086066095dcd0c895b82b9b0bcb43a77 | [
"MIT"
] | null | null | null | import pygame as py
def TitleOfSim(title,displayGame):
font=py.font.SysFont("B-Zar",25)
text=font.render(title,True,(50,200,155))
displayGame.blit(text,(50,50))
def Messages(title,displayGame):
font=py.font.SysFont("B-Zar",25)
text=font.render(title,True,(50,0,155))
displayGame.blit(text,(300,300))
# def drawCTR(position,radius,color,displaygame,Surface1):
# Surface2=py.Surface((300,300))
# Surface1.fill((0,0,200))
# py.draw.circle(Surface1, color, position, radius)
# py.blit
def showLabelACFT(info,displayGame,x,y):
font=py.font.SysFont("B-Zar",25)
text=font.render(info,True,(0,0,0))
displayGame.blit(text,(x,y))
def showLabelRWY(info,displayGame,x,y):
font=py.font.SysFont("B-Zar",25)
text=font.render(info,True,(0,0,0))
displayGame.blit(text,(x,y))
| 30.592593 | 58 | 0.680387 | 130 | 826 | 4.323077 | 0.292308 | 0.017794 | 0.071174 | 0.120996 | 0.533808 | 0.533808 | 0.533808 | 0.533808 | 0.533808 | 0.533808 | 0 | 0.073611 | 0.128329 | 826 | 26 | 59 | 31.769231 | 0.706944 | 0.225182 | 0 | 0.470588 | 0 | 0 | 0.031546 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.235294 | false | 0 | 0.058824 | 0 | 0.294118 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
2462bb9b8ce0bfd343822e1f4af979fc397548f6 | 781 | py | Python | converted.py | Bmillidgework/Misc-Maths | 4e186920fbc6e3592bee57a64d56ad3a7c5bd178 | [
"MIT"
] | null | null | null | converted.py | Bmillidgework/Misc-Maths | 4e186920fbc6e3592bee57a64d56ad3a7c5bd178 | [
"MIT"
] | null | null | null | converted.py | Bmillidgework/Misc-Maths | 4e186920fbc6e3592bee57a64d56ad3a7c5bd178 | [
"MIT"
] | null | null | null | from kaffe.tensorflow import Network
class (Network):
def setup(self):
(self.feed('input')
.conv(1, 1, 3, 1, 1, group=3, relu=False, name='data_lab')
.conv(11, 11, 96, 4, 4, padding='VALID', name='conv1')
.max_pool(3, 3, 2, 2, padding='VALID', name='pool1')
.conv(5, 5, 256, 1, 1, group=2, name='conv2')
.max_pool(3, 3, 2, 2, padding='VALID', name='pool2')
.conv(3, 3, 384, 1, 1, name='conv3')
.conv(3, 3, 384, 1, 1, group=2, name='conv4')
.conv(3, 3, 256, 1, 1, group=2, name='conv5')
.max_pool(3, 3, 2, 2, padding='VALID', name='pool5')
.conv(6, 6, 4096, 1, 1, padding=None, name='fc6')
.conv(1, 1, 4096, 1, 1, name='fc7')) | 48.8125 | 71 | 0.491677 | 122 | 781 | 3.114754 | 0.360656 | 0.047368 | 0.073684 | 0.071053 | 0.376316 | 0.35 | 0.213158 | 0.213158 | 0.213158 | 0 | 0 | 0.152574 | 0.303457 | 781 | 16 | 72 | 48.8125 | 0.545956 | 0 | 0 | 0 | 0 | 0 | 0.101023 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.066667 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
24671278c6a6905c5e07554b85ebe9c03e3cdb21 | 618 | py | Python | pynex/hello_msg.py | Pixelsuft/pynex | c129244d4b8148a4b155425c1f7207892d51aaeb | [
"MIT"
] | null | null | null | pynex/hello_msg.py | Pixelsuft/pynex | c129244d4b8148a4b155425c1f7207892d51aaeb | [
"MIT"
] | null | null | null | pynex/hello_msg.py | Pixelsuft/pynex | c129244d4b8148a4b155425c1f7207892d51aaeb | [
"MIT"
] | null | null | null | import os
import sys
is_already_inited = sys.modules.get('pygame') is not None
hide_msg = os.getenv('PYGAME_HIDE_SUPPORT_PROMPT') is not None
if is_already_inited:
if not hide_msg:
print('pynex integrated')
elif not hide_msg:
os.environ['PYGAME_HIDE_SUPPORT_PROMPT'] = 'True'
import pygame # type: ignore
os.unsetenv('PYGAME_HIDE_SUPPORT_PROMPT')
print('pygame {} (SDL {}.{}.{}, Python {}.{}.{}, PyNex)'.format(
pygame.ver, *pygame.get_sdl_version() + sys.version_info[0:3]
))
print('Hello from the pygame community. https://www.pygame.org/contribute.html')
__all__ = ()
| 32.526316 | 84 | 0.687702 | 87 | 618 | 4.62069 | 0.505747 | 0.052239 | 0.126866 | 0.171642 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003884 | 0.166667 | 618 | 18 | 85 | 34.333333 | 0.776699 | 0.019417 | 0 | 0 | 0 | 0 | 0.369205 | 0.129139 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.1875 | 0 | 0.1875 | 0.1875 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
246ac2c1a7d374c87b6d65d1fb436d4605f74dfd | 444 | py | Python | users/migrations/0002_endworker_group.py | heolin123/funcrowd | 20167783de208394c09ed0429a5f02ec6dd79c42 | [
"MIT"
] | null | null | null | users/migrations/0002_endworker_group.py | heolin123/funcrowd | 20167783de208394c09ed0429a5f02ec6dd79c42 | [
"MIT"
] | 11 | 2019-11-12T23:26:45.000Z | 2021-06-10T17:37:23.000Z | users/migrations/0002_endworker_group.py | heolin123/funcrowd | 20167783de208394c09ed0429a5f02ec6dd79c42 | [
"MIT"
] | null | null | null | # Generated by Django 2.0.8 on 2018-12-16 12:59
from django.db import migrations, models
import users.models.utils.utils
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='endworker',
name='group',
field=models.IntegerField(default=users.models.utils.utils.get_group_number),
),
]
| 22.2 | 89 | 0.630631 | 50 | 444 | 5.52 | 0.68 | 0.07971 | 0.115942 | 0.152174 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.057402 | 0.254505 | 444 | 19 | 90 | 23.368421 | 0.776435 | 0.101351 | 0 | 0 | 1 | 0 | 0.078086 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.153846 | 0 | 0.384615 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
79e21e3228c6415468cb2f001a97551f2eaf8633 | 780 | py | Python | Object oriented programming.py | fatimatswanya/fatimaCSC102 | cab70bd696d39a9e16bcb57e0180e872be4f49bc | [
"MIT"
] | null | null | null | Object oriented programming.py | fatimatswanya/fatimaCSC102 | cab70bd696d39a9e16bcb57e0180e872be4f49bc | [
"MIT"
] | null | null | null | Object oriented programming.py | fatimatswanya/fatimaCSC102 | cab70bd696d39a9e16bcb57e0180e872be4f49bc | [
"MIT"
] | null | null | null | class Coffee:
coffeecupcounter =0
def __init__(self, themilk, thesugar, thecoffeemate):
self.milk = themilk
self.sugar = thesugar
self.coffeemate = thecoffeemate
Coffee.coffeecupcounter=Coffee.coffeecupcounter+1
print(f'You now have your coffee with {self.milk} milk, {self.sugar} sugar, {self.coffeemate} coffeemate')
mysugarfreecoffee= Coffee(2,0,1)
print(mysugarfreecoffee.sugar)
mysweetcoffee =Coffee(2,100,1)
print(mysweetcoffee.sugar)
print(f'We have made {Coffee.coffeecupcounter} coffee cups so far!')
print(f'We have made {mysugarfreecoffee.coffeecupcounter} coffee cups so far!')
print(f'We have made {mysweetcoffee.milk} coffee cups so far!')
print(f'We have made {mysweetcoffee.coffeecupcounter} coffee cups so far!') | 45.882353 | 114 | 0.74359 | 101 | 780 | 5.70297 | 0.29703 | 0.052083 | 0.055556 | 0.083333 | 0.34375 | 0.262153 | 0.262153 | 0.262153 | 0.262153 | 0.262153 | 0 | 0.015129 | 0.152564 | 780 | 17 | 115 | 45.882353 | 0.856278 | 0 | 0 | 0 | 0 | 0.0625 | 0.43662 | 0.119078 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0 | 0 | 0 | 0.1875 | 0.4375 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 |
79f17b0dc466449847bea66546af6c9d958315f5 | 876 | py | Python | Python 3 exercises/44 python answers/Ordem 3) Par Impar.py | Matheus-R-Sena/Python | c047288ad126e8d14c3186a912f1a1aa5a8a55d6 | [
"MIT"
] | 2 | 2022-03-18T00:04:42.000Z | 2022-03-25T11:45:37.000Z | Python 3 exercises/44 python answers/Ordem 3) Par Impar.py | Matheus-R-Sena/Python | c047288ad126e8d14c3186a912f1a1aa5a8a55d6 | [
"MIT"
] | null | null | null | Python 3 exercises/44 python answers/Ordem 3) Par Impar.py | Matheus-R-Sena/Python | c047288ad126e8d14c3186a912f1a1aa5a8a55d6 | [
"MIT"
] | null | null | null | n = int(input())
P = [] #pares
I = [] #ímpares
V = [] #Vetor principal
t = 0
for i in range (n):
V.append(int(input()))
for i in range(len(V)): #dividindo o vetor em 2 vetores menores (Par e Impar)
if V[i]%2==0:
P.append(V[i]) #dividindo para par
else:
I.append(V[i]) #dividindo para impar
V=[] #Zerando o Vetor principal
for i in range(len(P)-1): #Ordenando os pares
for j in range(i+1,len(P)):
if P[i] > P[j]:
t = P[i]
P[i] = P[j]
P[j] = t
for i in range(len(I)-1): #Ordenando os ímpares
for j in range(i+1,len(I)):
if I[i] < I[j]:
t = I[i]
I[i] = I[j]
I[j] = t
for i in range (len(P)):
V.append(P[i])
for i in range (len(I)):
V.append(I[i])
print(V)
| 20.372093 | 77 | 0.449772 | 150 | 876 | 2.626667 | 0.233333 | 0.142132 | 0.091371 | 0.167513 | 0.385787 | 0.243655 | 0.162437 | 0 | 0 | 0 | 0 | 0.014898 | 0.386986 | 876 | 43 | 78 | 20.372093 | 0.718808 | 0.206621 | 0 | 0.066667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.033333 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
79f218067fdd93862616a413ed28a8ec76a9e160 | 5,555 | py | Python | streaming/jobs/twitter-in.py | andrewreece/cs205-final-project | adcbf6f6e40b8eafa92d81f1adcb68864cf0a226 | [
"Unlicense"
] | null | null | null | streaming/jobs/twitter-in.py | andrewreece/cs205-final-project | adcbf6f6e40b8eafa92d81f1adcb68864cf0a226 | [
"Unlicense"
] | null | null | null | streaming/jobs/twitter-in.py | andrewreece/cs205-final-project | adcbf6f6e40b8eafa92d81f1adcb68864cf0a226 | [
"Unlicense"
] | null | null | null |
from kafka import SimpleProducer, KafkaClient
from os.path import expanduser
import requests
from requests_oauthlib import OAuth1
import urllib, datetime, time, json, sys, boto3
import creds # we made this module for importing twitter api creds
client = boto3.client('emr')
s3res = boto3.resource('s3')
bucket_name = 'cs205-final-project'
settings_key = 'setup/bake-defaults.json'
terms_key = 'scripts/search-terms.txt'
# how many minutes should the stream stay open?
settings = json.loads(s3res.Object(bucket_name,settings_key).get()['Body'].read())
minutes_forward = int(settings['Stream_Duration']['val'])
#minutes_forward = 10 # for testing
#print settings
print 'minutes forward',minutes_forward
''' list_clusters() is used here to find the current cluster ID
WARNING: this is a little shaky, as there may be >1 clusters running in production
better to search by cluster name as well as state
'''
''' We need to know if we're on an EMR cluster or a local machine.
- If we are on a cluster:
* We can't set 'localhost' for the kafka hostname, because other
nodes will have their own localhosts.
* We can determine the private IP address of the master node (where Kafka runs), and
use that instead of localhost.
* We set the location for search-terms.txt on s3
- If we are on a local machine, no cluster:
* We set Kafka's hostname to localhost.
* We set the location for search-terms.txt in our local directory
'''
on_cluster = (expanduser("~") == "/home/hadoop")
if on_cluster:
clusters = client.list_clusters(ClusterStates=['RUNNING','WAITING','BOOTSTRAPPING'])['Clusters']
cid = clusters[0]['Id']
master_instance = client.list_instances(ClusterId=cid,InstanceGroupTypes=['MASTER'])
hostname = master_instance['Instances'][0]['PrivateIpAddress']
else:
import findspark
findspark.init()
hostname = 'localhost'
# kafka can have multiple ports if multiple producers, be careful
kafka_port = '9092'
kafka_host = ':'.join([hostname,kafka_port])
kafka = KafkaClient(kafka_host)
producer = SimpleProducer(kafka)
APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET = creds.get_twitter_creds()
config_token = OAuth1(APP_KEY,
client_secret=APP_SECRET,
resource_owner_key=OAUTH_TOKEN,
resource_owner_secret=OAUTH_TOKEN_SECRET)
stream_url = 'https://stream.twitter.com/1.1/statuses/filter.json'
# Load nested JSON of search terms
search_terms = s3res.Object(bucket_name,terms_key).get()['Body'].read().replace("\n",",")
# some search terms have apostrophes, maybe other chars? need to be url-encoded for query string
search_terms = urllib.urlencode({"track":search_terms}).split("=")[1]
# Query parameters to Twitter Stream API
data = [('language', 'en'), ('track', search_terms)]
query_url = stream_url + '?' + '&'.join([str(t[0]) + '=' + str(t[1]) for t in data])
# Use stream=True switch on requests.get() to pull in stream indefinitely
response = requests.get(query_url, auth=config_token, stream=True)
def set_end_time(minutes_forward=minutes_forward):
''' This function is only for initial test output. We'll probably delete it soon.
It defines the amount of minutes we keep the tweet stream open for ingestion.
In production this will be open-ended, or it will be set based on when the debate ends.
'''
year = time.localtime().tm_year
month = time.localtime().tm_mon
day = time.localtime().tm_mday
hour = time.localtime().tm_hour
minute = time.localtime().tm_min
newmin = (minute + minutes_forward) % 60 # if adding minutes_forward goes over 60 min, take remainder
if newmin < minute:
hour = hour + 1
minute = newmin
else:
minute += minutes_forward
return {"year":year,"month":month,"day":day,"hour":hour,"minute":minute}
end_time = set_end_time()
''' Twitter API returns a number of different status codes.
We want status_code == 200.
See https://dev.twitter.com/streaming/overview/connecting for more.
NOTE: Twitter has 'etiquette' guidelines for how to handle 503, 401, etc. We should follow them!
Right now we don't do anything about this, other than to report the error to stdout.
'''
print "END TIME:",end_time
if response.status_code == 200:
print "Reponse Code = 200"
ct = 0
''' We will almost certainly not keep this code.
timesup just picks an end point (currently 2 minutes ahead) to stop ingesting tweets.
In production, we'd keep ingesting until either an error was thrown or the debate ended.
For that matter, we also need better error handling here, like how long to wait before
reconnecting if the stream drops or rate limits out?
'''
timesup = datetime.datetime(end_time['year'],
end_time['month'],
end_time['day'],
end_time['hour'],
end_time['minute']).strftime('%s')
for line in response.iter_lines(): # Iterate over streaming tweets
if int(timesup) > time.time():
#print(line.decode('utf8'))
try:
producer.send_messages('tweets', line)
except:
time.sleep(1)
producer.send_messages('tweets', line)
ct+=1
else:
break
else:
print("ERROR Response code:{}".format(response.status_code))
try:
producer.send_messages('tweets', "ERROR Response code:{}".format(response.status_code))
except:
time.sleep(1)
producer.send_messages('tweets', "ERROR Response code:{}".format(response.status_code))
print "END twitter-in.py"
response.close()
# restore duration default if changed
#if minutes_forward != DURATION_DEFAULT:
# s3res.Object(bucket_name,settings_key).put(Body=json.dumps(settings))
| 35.158228 | 102 | 0.725833 | 817 | 5,555 | 4.831089 | 0.390453 | 0.03547 | 0.019002 | 0.026349 | 0.11173 | 0.096529 | 0.080314 | 0.069927 | 0.055232 | 0.03395 | 0 | 0.011593 | 0.161476 | 5,555 | 157 | 103 | 35.382166 | 0.835766 | 0.127273 | 0 | 0.17284 | 0 | 0 | 0.150907 | 0.015544 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.08642 | null | null | 0.061728 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
79fcf62a1f352b071844ce1495996eda7af8f693 | 4,386 | py | Python | ahserver/server/protocol/__init__.py | ifplusor/ahserver | 26a6315d7a3b781d424c81bb3985016d9233e1b7 | [
"BSD-3-Clause"
] | 1 | 2019-10-08T07:57:32.000Z | 2019-10-08T07:57:32.000Z | ahserver/server/protocol/__init__.py | ifplusor/ahserver | 26a6315d7a3b781d424c81bb3985016d9233e1b7 | [
"BSD-3-Clause"
] | null | null | null | ahserver/server/protocol/__init__.py | ifplusor/ahserver | 26a6315d7a3b781d424c81bb3985016d9233e1b7 | [
"BSD-3-Clause"
] | null | null | null | # encoding=utf-8
__all__ = [
"HttpMethod",
"HttpVersion",
"HttpStatus",
"HttpHeader",
"PopularHeaders",
]
from enum import Enum
from ahserver.util.parser import FieldNameEnumParser, IntPairEnumParser
from . import httpheader as HttpHeader
@FieldNameEnumParser("http_method")
class HttpMethod(Enum):
OPTIONS = "OPTIONS"
GET = "GET"
HEAD = "HEAD"
POST = "POST"
PUT = "PUT"
DELETE = "DELETE"
TRACE = "TRACE"
CONNECT = "CONNECT"
PRI = "PRI"
def __str__(self) -> str:
return self.value
class HttpVersion(Enum):
V10 = "1.0"
V11 = "1.1"
V20 = "2.0"
def __str__(self):
return self.value
@staticmethod
def parse(version: str):
if version == "1.1":
return HttpVersion.V11
elif version == "2.0":
return HttpVersion.V20
elif version == "1.0":
return HttpVersion.V10
else:
raise Exception("Unknown http version.")
@IntPairEnumParser("http_status")
class HttpStatus(Enum):
# 1xx: 信息性 - 收到请求,继续处理
CONTINUE = (100, "Continue")
SWITCHING_PROTOCOL = (101, "Switching Protocol")
EARLY_HINT = (103, "Early Hints")
# 2xx: 成功性 - 成功收到、理解并接受行动
OK = (200, "OK")
CREATED = (201, "Created")
ACCEPTED = (202, "Accepted")
NON_AUTHORITATIVE_INFORMATION = (203, "Non-Authoritative Information")
NO_CONTENT = (204, "No Content")
RESET_CONTENT = (205, "Reset Content")
PARTIAL_CONTENT = (206, "Partial Content")
# 3xx: 重定向 - 必须采取进一步行动来完成请求
MOVE_PERMANENTLY = (301, "Moved Permanently")
FOUND = (302, "Found")
SEE_OTHER = (303, "See Other")
NOT_MODIFIED = (304, "Not Modified")
USE_PROXY = (305, "Use Proxy")
TEMPORARY_REDIRECT = (307, "Temporary Redirect")
PERMANENT_REDIRECT = (308, "Permanent Redirect")
# 4xx: 客户端错误 - 请求包含错误语法或不能完成
BAD_REQUEST = (400, "Bad Request")
UNAUTHORIZED = (401, "Unauthorized")
PAYMENT_REQUIRED = (402, "Payment Required")
FORBIDDEN = (403, "Forbidden")
NOT_FOUND = (404, "Not Found")
METHOD_NOT_ALLOWED = (405, "Method Not Allowed")
NOT_ACCEPTABLE = (406, "Not Acceptable")
PROXY_AUTHENTICATION_REQUIRED = (407, "Proxy Authentication Required")
REQUEST_TIMEOUT = (408, "Request Timeout")
CONFLICT = (409, "Conflict")
GONE = (410, "Gone")
LENGTH_REQUIRED = (411, "Length Required")
PRECONDITION_FAILED = (412, "Precondition Failed")
PAYLOAD_TOO_LARGE = (413, "Payload Too Large")
URI_TOO_LONG = (414, "URI Too Long")
UNSUPPORTED_MEDIA_TYPE = (415, "Unsupported Media Type")
RANGE_NOT_SATISFIABLE = (416, "Range Not Satisfiable")
EXPECTATION_FAILED = (417, "Expectation Failed")
I_M_A_TEAPOT = (418, "I'm a teapot")
UNPROCESSABLE_ENTITY = (422, "Unprocessable Entity")
TOO_EARLY = (425, "Too Early")
UPGRADE_REQUIRED = (426, "Upgrade Required")
PRECONDITION_REQUIRED = (428, "Precondition Required")
TOO_MANY_REQUESTS = (429, "Too Many Requests")
REQUEST_HEADER_FIELDS_TOO_LARGE = (431, "Request Header Fields Too Large")
UNAVAILABLE_FOR_LEGAL_REASONS = (451, "Unavailable For Legal Reasons")
# 5xx: 服务器错误 - 服务器没有成功完成显然有效的请求
INTERNAL_SERVER_ERROR = (500, "Internal Server Error")
NOT_IMPLEMENTED = (501, "Not Implemented")
BAD_GATEWAY = (502, "Bad Gateway")
SERVICE_UNAVAILABLE = (503, "Service Unavailable")
GATEWAY_TIMEOUT = (504, "Gateway Time-out")
HTTP_VERSION_NOT_UNSUPPORTED = (505, "HTTP Version not supported")
VARIANT_ALSO_NEGOTIATES = (506, "Variant Also Negotiates")
INSUFFICIENT_STORAGE = (507, "Insufficient Storage")
LOOP_DETECTED = (508, "Loop Detected")
NETWORK_AUTHENTICATION_REQUIRED = (511, "Network Authentication Required")
def __str__(self):
return "{} {}".format(self.value[0], self.value[1])
class PopularHeaders:
CONNECTION_CLOSE = {HttpHeader.CONNECTION: "close"}
CONTENT_EMPTY = {HttpHeader.CONTENT_LENGTH: "0"}
TYPE_PLAIN = {HttpHeader.CONTENT_TYPE: "text/plain"}
TYPE_HTML = {HttpHeader.CONTENT_TYPE: "text/html"}
TYPE_JSON = {HttpHeader.CONTENT_TYPE: "application/json"}
UPGRADE_H2C = {HttpHeader.CONNECTION: "Upgrade", HttpHeader.UPGRADE: "h2c"}
@staticmethod
def union(*args):
headers = {}
for arg in args:
headers.update(arg)
return headers
| 31.328571 | 79 | 0.654811 | 484 | 4,386 | 5.743802 | 0.46281 | 0.01295 | 0.010791 | 0.011511 | 0.019424 | 0 | 0 | 0 | 0 | 0 | 0 | 0.056775 | 0.22093 | 4,386 | 139 | 80 | 31.553957 | 0.756804 | 0.032376 | 0 | 0.055046 | 0 | 0 | 0.245929 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.045872 | false | 0 | 0.027523 | 0.027523 | 0.825688 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
79fd8c6ce233d4c7c58007694fcc63d3fd30ed18 | 949 | py | Python | examples/HelloAPI/app/__init__.py | neo1218/rest | 8bf7369aaa9da5cc4a300c625e4d7fea21f52681 | [
"MIT"
] | 3 | 2015-12-05T20:57:04.000Z | 2017-04-26T03:02:43.000Z | examples/HelloAPI/app/__init__.py | misakar/rest | 8bf7369aaa9da5cc4a300c625e4d7fea21f52681 | [
"MIT"
] | 1 | 2015-12-09T18:27:26.000Z | 2015-12-10T16:19:30.000Z | examples/HelloAPI/app/__init__.py | misakar/rest | 8bf7369aaa9da5cc4a300c625e4d7fea21f52681 | [
"MIT"
] | 4 | 2015-12-11T03:20:27.000Z | 2016-02-03T04:47:52.000Z | # coding: utf-8
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from config import config
app = Flask(__name__)
"""
config
-- 'default': DevelopmentConfig
-- 'develop': DevelopmentConfig
-- 'testing': TestingConfig
-- 'production': ProductionConfig
you can edit this in config.py
"""
config_name = 'default'
app.config.from_object(config[config_name])
config[config_name].init_app(app)
db = SQLAlchemy(app)
login_manager = LoginManager(app)
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
# admin site
from admin import views
"""
blueprint
you can register a <blueprint> by run:
-- mana blueprint <blueprint>
under app folder
"""
from main import main
app.register_blueprint(main, url_prefix='/main')
from auth import auth
app.register_blueprint(auth, url_prefix="/auth")
from api import api
app.register_blueprint(api, url_prefix="/api")
| 19.770833 | 48 | 0.765016 | 127 | 949 | 5.543307 | 0.385827 | 0.038352 | 0.085227 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001211 | 0.12961 | 949 | 47 | 49 | 20.191489 | 0.85109 | 0.02529 | 0 | 0 | 0 | 0 | 0.057994 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.421053 | 0 | 0.421053 | 0.157895 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
030555cbf9426dbf0f5ea08ed1283d06552e2fca | 2,220 | py | Python | application.py | dyphen12/vibra | 48d71958ab0498f2fa9d8b6633f5fc212b07879c | [
"MIT"
] | null | null | null | application.py | dyphen12/vibra | 48d71958ab0498f2fa9d8b6633f5fc212b07879c | [
"MIT"
] | null | null | null | application.py | dyphen12/vibra | 48d71958ab0498f2fa9d8b6633f5fc212b07879c | [
"MIT"
] | null | null | null | # Made by @dyphen12
from flask import Flask, request
from flask_cors import CORS
from flask_restful import reqparse, abort, Api, Resource
import json
import os
from vibra.api.core import api_version
from vibra.api.users import handler as uhd
app = Flask(__name__)
api = Api(app)
CORS(app)
class Hello(Resource):
def get(self):
return api_version()
api.add_resource(Hello, '/')
################# Login Api #######################
CREDENTIAL = {
'token1':{'user': "admin",
'pass': "admin1"}
}
def abort_if_credential_doesnt_exist(token_id):
if token_id not in CREDENTIAL:
abort(404, message="Token {} doesn't exist".format(token_id))
parserauth = reqparse.RequestParser()
parserauth.add_argument('user')
parserauth.add_argument('pass')
class Login(Resource):
def post(self):
args = parserauth.parse_args()
token_id = int(max(CREDENTIAL.keys()).lstrip('token')) + 1
token_id = 'token%i' % token_id
CREDENTIAL[token_id] = {'user': args['user'],
'pass': args['pass']}
token = CREDENTIAL[token_id]
x, auth = uhd.user_login_ryzen(token['user'],token['pass'])
try:
ids = x['id'].values[0]
ssid = ids
#print('auth success')
return int(ssid)
except TypeError:
ids = 0
print('auth failed')
return 'fail'
api.add_resource(Login, '/auth')
class getuserName(Resource):
def get(self, todo_id):
x = uhd.get_username_ryzen(int(todo_id))
return x.values[0]
api.add_resource(getuserName, '/user/<string:todo_id>')
class SignUp(Resource):
def get(self, todo_id):
query = json.loads(todo_id)
uname = query['results']['name']
ulastname = query['results']['lastname']
uemail = query['results']['email']
upass = query['results']['password']
resulta = uhd.user_signup_ryzen(uname, ulastname, upass, uemail)
return resulta
api.add_resource(SignUp, '/signup/<string:todo_id>')
if __name__ == '__main__':
# #app.run(host=os.getenv('IP', '0.0.0.0'), port=int(os.getenv('PORT', 8080)))
app.run() | 21.142857 | 81 | 0.602252 | 277 | 2,220 | 4.65704 | 0.357401 | 0.043411 | 0.043411 | 0.04186 | 0.037209 | 0.037209 | 0 | 0 | 0 | 0 | 0 | 0.011283 | 0.241441 | 2,220 | 105 | 82 | 21.142857 | 0.754751 | 0.057658 | 0 | 0.033898 | 0 | 0 | 0.10791 | 0.022461 | 0 | 0 | 0 | 0 | 0 | 1 | 0.084746 | false | 0.101695 | 0.118644 | 0.016949 | 0.355932 | 0.016949 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0309ca2de7206405c79a52c8d1fa62aa60e1cd29 | 1,974 | py | Python | jyserver/Flask.py | ftrias/jyserver | 84e96672686020dc5e75d44b5b29f33a24773a37 | [
"MIT"
] | 10 | 2020-05-11T11:36:29.000Z | 2021-09-14T06:00:06.000Z | jyserver/Flask.py | ftrias/jyserver | 84e96672686020dc5e75d44b5b29f33a24773a37 | [
"MIT"
] | 1 | 2021-11-25T13:20:11.000Z | 2021-11-25T13:20:11.000Z | jyserver/Flask.py | ftrias/jyserver | 84e96672686020dc5e75d44b5b29f33a24773a37 | [
"MIT"
] | 3 | 2020-05-11T11:36:34.000Z | 2022-03-09T06:17:31.000Z | '''
Module for using jyserver in Flask. This module provides to new
decorators.
Decorators
-----------
* @use
Link an application object to the Flask app
* @task
Helper that wraps a function inside a separate thread so that
it can execute concurrently.
Example
-------------
```html
<p id="time">TIME</p>
<button id="reset" onclick="server.reset()">Reset</button>
```
```python
import jyserver.Flask as js
import time
from flask import Flask, render_template, request
app = Flask(__name__)
@js.use(app)
class App():
def reset(self):
self.start0 = time.time()
self.js.dom.time.innerHTML = "{:.1f}".format(0)
@js.task
def main(self):
self.start0 = time.time()
while True:
t = "{:.1f}".format(time.time() - self.start0)
self.js.dom.time.innerHTML = t
time.sleep(0.1)
@app.route('/')
def index_page(name=None):
App.main()
return App.render(render_template('flask-simple.html')
'''
from flask import Flask, request
import json
import jyserver
import threading
def task(func):
'''
Decorator wraps the function in a separate thread for concurrent
execution.
'''
def wrapper(*args):
server_thread = threading.Thread(target=func, args=args, daemon=True)
server_thread.start()
return wrapper
def use(flaskapp):
'''
Link a class to an app object. Pass Flask's `app` object.
'''
def decorator(appClass):
global context
context = jyserver.ClientContext(appClass)
@flaskapp.route('/_process_srv0', methods=['GET', 'POST'])
def process():
if request.method == 'POST':
req = json.loads(request.data)
result = context.processCommand(req)
if result is None:
return ''
return result
else:
return "GET reqeust not allowed"
return context
return decorator | 22.953488 | 77 | 0.604863 | 242 | 1,974 | 4.88843 | 0.438017 | 0.02705 | 0.025359 | 0.033812 | 0.074387 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006241 | 0.269504 | 1,974 | 86 | 78 | 22.953488 | 0.814147 | 0.56383 | 0 | 0 | 0 | 0 | 0.058537 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.16 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
030fd2dfb8a4fa6185ed55acabcf1eed72bf0ff0 | 2,347 | py | Python | rbk_shares_grab.py | adamrfox/rbk_nas_bulk_add | 4784e0671a7abcb2b03df4f25ee8977eb2666636 | [
"MIT"
] | 3 | 2019-04-29T20:54:49.000Z | 2022-03-23T18:11:53.000Z | rbk_shares_grab.py | adamrfox/rbk_nas_bulk_add | 4784e0671a7abcb2b03df4f25ee8977eb2666636 | [
"MIT"
] | null | null | null | rbk_shares_grab.py | adamrfox/rbk_nas_bulk_add | 4784e0671a7abcb2b03df4f25ee8977eb2666636 | [
"MIT"
] | null | null | null | #!/usr/bin/python
from __future__ import print_function
import sys
import rubrik_cdm
import getopt
import getpass
import urllib3
urllib3.disable_warnings()
def usage():
sys.stderr.write("Usage: rbk_share_grab.py [-h] [-c creds] [-p protocol] [-t token] [-o outfile] rubrik\n")
sys.stderr.write("-h | --help: Prints this message\n")
sys.stderr.write("-c | --creds : Enter cluster credentials on the CLI [user:password]\n")
sys.stderr.write("-p | --protocol : Only grab shares of the given protocol [NFS | SMB]\n")
sys.stderr.write("-t | --token : Authenticate via token\n")
sys.stderr.write("-o | --output : Write output to a file\n")
sys.stderr.write("rubrik : Hostname or IP of a Rubrik Cluster\n")
exit(0)
def python_input(message):
if int(sys.version[0]) > 2:
value = input(message)
else:
value = raw_input(message)
return(value)
if __name__ == "__main__":
user = ""
password = ""
token = ""
protocol = ""
outfile = ""
timeout = 60
optlist, args = getopt.getopt(sys.argv[1:], 'c:t:p:ho:', ['creds=', 'token=', 'protocol=', 'help', 'output='])
for opt, a in optlist:
if opt in ('-c', '--creds'):
(user, password) = a.split(':')
if opt in ('-t', '--token'):
token = a
if opt in ('-p', '--protocol'):
protocol = a.upper()
if opt in ('-h', '--help'):
usage()
if opt in ('-o', '--output'):
outfile = a
try:
rubrik_node = args[0]
except:
usage()
if not user:
user = python_input("User: ")
if not password:
password = getpass.getpass("Password: ")
if token != "":
rubrik = rubrik_cdm.Connect(rubrik_node, api_token=token)
else:
rubrik = rubrik_cdm.Connect(rubrik_node, user, password)
hs_data = rubrik.get('internal', '/host/share', timeout=timeout)
if outfile:
fp = open(outfile, "w")
for hs in hs_data['data']:
if protocol != "" and protocol != hs['shareType']:
continue
if hs['status'] != "REPLICATION_TARGET":
if outfile:
fp.write(hs['hostname'] + ":" + hs['exportPoint'] + "\n")
else:
print(hs['hostname'] + ":" + hs['exportPoint'])
if outfile:
fp.close()
| 30.089744 | 114 | 0.558585 | 296 | 2,347 | 4.331081 | 0.364865 | 0.049142 | 0.076443 | 0.070203 | 0.049922 | 0.049922 | 0 | 0 | 0 | 0 | 0 | 0.005294 | 0.275671 | 2,347 | 77 | 115 | 30.480519 | 0.748824 | 0.006817 | 0 | 0.121212 | 0 | 0.015152 | 0.256664 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.030303 | false | 0.106061 | 0.090909 | 0 | 0.121212 | 0.030303 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0310a3f412e38098f32e0ef6e645cc58b02cafe4 | 440 | py | Python | Pacote/Python/teste.py | Rezende31/Primeiros-programas-em-Python | 69b4cdde36fdb2fa9193eac7c0f279fbeea21350 | [
"MIT"
] | null | null | null | Pacote/Python/teste.py | Rezende31/Primeiros-programas-em-Python | 69b4cdde36fdb2fa9193eac7c0f279fbeea21350 | [
"MIT"
] | null | null | null | Pacote/Python/teste.py | Rezende31/Primeiros-programas-em-Python | 69b4cdde36fdb2fa9193eac7c0f279fbeea21350 | [
"MIT"
] | null | null | null | maior = 0
menor = 0
totalPessoas = 10
for pessoa in range(1, 11):
idade = int(input("Digite a idade: "))
if idade >= 18:
maior += 1
else:
menor += 1
print("Quantidade de pessoas maior de idade: ", maior)
print("Quantidade de pessoas menor de idade: ", menor)
print("Porcentagem de pessoas menores de idade = ", (menor*100)/totalPessoas, "%")
print("Porcentagem de pessoas maiores de idade = ", (maior*100)/totalPessoas, "%") | 29.333333 | 82 | 0.670455 | 62 | 440 | 4.758065 | 0.435484 | 0.122034 | 0.115254 | 0.162712 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.047486 | 0.186364 | 440 | 15 | 83 | 29.333333 | 0.776536 | 0 | 0 | 0 | 0 | 0 | 0.403628 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.307692 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
03238ca23c3c7570a1e23790a0fd110cb3b831d7 | 407 | py | Python | discord/types/enums/stickerformattype.py | AA1999/Disthon | fcf4e550750938b87a1fb48ff3006bdeeecd73e3 | [
"MIT"
] | 14 | 2021-09-20T10:50:03.000Z | 2022-03-01T16:01:08.000Z | discord/types/enums/stickerformattype.py | Rashaad1268/Disthon | 50c191fc2750bcb28280006e1f9bbd1fa65bac33 | [
"MIT"
] | 25 | 2021-12-12T15:52:19.000Z | 2022-03-31T19:45:46.000Z | discord/types/enums/stickerformattype.py | Rashaad1268/Disthon | 50c191fc2750bcb28280006e1f9bbd1fa65bac33 | [
"MIT"
] | 12 | 2021-09-22T10:40:50.000Z | 2021-11-25T15:47:42.000Z | from __future__ import annotations
from enum import IntEnum
class StickerFormatType(IntEnum):
png = 1
apng = 2
lottie = 3
@property
def file_extension(self) -> str:
lookup: dict[StickerFormatType, str] = {
StickerFormatType.png: "png",
StickerFormatType.apng: "png",
StickerFormatType.lottie: "json",
}
return lookup[self]
| 21.421053 | 48 | 0.611794 | 39 | 407 | 6.25641 | 0.615385 | 0.163934 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010526 | 0.299754 | 407 | 18 | 49 | 22.611111 | 0.845614 | 0 | 0 | 0 | 0 | 0 | 0.02457 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0 | 0.142857 | 0 | 0.571429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
0326a1642c11c61dec31a86e0466b9ade9a04445 | 69,686 | py | Python | run_pasta_gui.py | koditaraszka/pasta | 2f14d5ae5b256ce1cd6bc49e44cc84a7b25be7f9 | [
"Python-2.0",
"BSD-Source-Code",
"OLDAP-2.7"
] | 1 | 2018-06-22T03:27:55.000Z | 2018-06-22T03:27:55.000Z | run_pasta_gui.py | kodicollins/pasta | 2f14d5ae5b256ce1cd6bc49e44cc84a7b25be7f9 | [
"Python-2.0",
"BSD-Source-Code",
"OLDAP-2.7"
] | null | null | null | run_pasta_gui.py | kodicollins/pasta | 2f14d5ae5b256ce1cd6bc49e44cc84a7b25be7f9 | [
"Python-2.0",
"BSD-Source-Code",
"OLDAP-2.7"
] | null | null | null | """Main script for PASTA GUI on Windows/Mac/Linux
"""
# This file is part of PASTA which is forked from SATe
# PASTA, like SATe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Jiaye Yu and Mark Holder, University of Kansas
import os
import platform
import subprocess
import tempfile
import sys
import time
import wx
import string
from pasta import PROGRAM_AUTHOR
from pasta import PROGRAM_INSTITUTE
from pasta import PROGRAM_DESCRIPTION
from pasta import PROGRAM_LICENSE
from pasta import PROGRAM_NAME
from pasta import PROGRAM_VERSION
from pasta import PROGRAM_WEBSITE
from pasta import PROGRAM_YEAR
from pasta import GLOBAL_DEBUG
from pasta import DEFAULT_MAX_MB
try:
from configparser import RawConfigParser
except:
from ConfigParser import RawConfigParser
from pasta import pasta_is_frozen
from pasta import pasta_home_dir
from pasta.configure import get_invoke_run_pasta_command
from pasta.tools import AlignerClasses
from pasta.tools import MergerClasses
from pasta.tools import TreeEstimatorClasses
from pasta.tools import get_aligner_classes, get_merger_classes, get_tree_estimator_classes
from pasta import filemgr
from pasta.usersettingclasses import get_list_of_seq_filepaths_from_dir
from pasta.alignment import summary_stats_from_parse
from pasta.mainpasta import get_auto_defaults_from_summary_stats
WELCOME_MESSAGE = "%s %s, %s\n\n"% (PROGRAM_NAME, PROGRAM_VERSION, PROGRAM_YEAR)
GRID_VGAP = 8
GRID_HGAP = 8
PARSING_FILES_IN_GUI = True
MAX_NUM_CPU = 16
PASTA_GUI_ONLY_PRINTS_CONFIG = os.environ.get('PASTA_GUI_ONLY_PRINTS_CONFIG') == '1'
def is_valid_int_str(s, min_v, max_v):
try:
i = int(s)
except:
return False
si = str(i)
if si != s:
return False
if min_v is not None and i < min_v:
return False
if max_v is not None and i > max_v:
return False
return True
class RangedIntValidator(wx.PyValidator):
def __init__(self, min_v, max_v):
wx.PyValidator.__init__(self)
self.min_v = min_v
self.max_v = max_v
self.Bind(wx.EVT_CHAR, self.OnChar)
def Clone(self):
return RangedIntValidator(self.min_v, self.max_v)
def is_valid_str(self, s):
return is_valid_int_str(s, self.min_v, self.max_v)
def Validate(self, win):
v = win.GetValue()
return self.is_valid_str(v)
def OnChar(self, event):
key = event.GetKeyCode()
textCtrl = self.GetWindow()
if key == wx.WXK_BACK or key == wx.WXK_DELETE:
textCtrl.SetBackgroundColour("white")
event.Skip()
return
if key < wx.WXK_SPACE or key > 255:
textCtrl.SetBackgroundColour("white")
event.Skip()
return
if chr(key) in string.digits:
textCtrl.SetBackgroundColour("white")
event.Skip()
return
if not wx.Validator_IsSilent():
wx.Bell()
# Returning without calling even.Skip eats the event before it
# gets to the text control
return
def TransferToWindow(self):
return True
def TransferFromWindow(self):
return True
class PastaFrame(wx.Frame):
def __init__(self, size):
wx.Frame.__init__(self, None, -1, "PASTA - Practical Alignment using SATe and TraAnsitivity", size=(640,480), style=wx.DEFAULT_FRAME_STYLE)
self.SetBackgroundColour(wx.LIGHT_GREY)
self.statusbar = self.CreateStatusBar()
self.statusbar.SetStatusText("PASTA Ready!")
if wx.Platform == "__WXMSW__" or wx.Platform == "__WXMAC__":
import base64
import io
icon = wx.EmptyIcon()
icon.CopyFromBitmap(wx.BitmapFromImage(wx.ImageFromStream(io.BytesIO(base64.b64decode(ICO_STR)))))
self.SetIcon(icon)
self.ctrls = []
sizer_all = wx.BoxSizer(wx.VERTICAL)
self.sizer_tool_settings = self._create_tools_sizer()
self.sizer_data = self._create_data_sizer()
self.sizer_pasta_settings = self._create_pasta_settings_sizer()
self.sizer_job_settings = self._create_job_settings_sizer()
self.sizer_workflow_settings = self._create_workflow_settings_sizer()
sizer1 = wx.BoxSizer(wx.VERTICAL)
sizer1.Add(self.sizer_tool_settings, 0, wx.EXPAND|wx.BOTTOM|wx.RIGHT, 5)
sizer1.Add(self.sizer_data, 0, wx.EXPAND|wx.TOP|wx.RIGHT, 5)
sizer1.Add(self.sizer_workflow_settings, 0, wx.EXPAND|wx.TOP|wx.RIGHT, 5)
self.sizer_settings = wx.BoxSizer(wx.HORIZONTAL)
self.sizer_settings.Add(sizer1, 0, wx.EXPAND|wx.ALL, 0)
sizer2 = wx.BoxSizer(wx.VERTICAL)
sizer2.Add(self.sizer_job_settings, 0, wx.EXPAND|wx.ALL, 0)
sizer2.Add(self.sizer_pasta_settings, 0, wx.EXPAND|wx.ALL, 0)
self.sizer_settings.Add(sizer2, 0, wx.EXPAND|wx.ALL, 0)
sizer_all.Add(self.sizer_settings, 0, wx.ALL|wx.ALIGN_CENTER_HORIZONTAL, 10)
self.button = wx.Button(self, label="Start")
self.log = wx.TextCtrl(self, -1, "", size=(200,120),style=wx.TE_MULTILINE|wx.TE_READONLY|wx.TE_RICH2)
self.log.AppendText(WELCOME_MESSAGE)
self.log.AppendText("Running Log (%s %s)\n\n" % (time.strftime("%Y-%m-%d %H:%M:%S"), time.tzname[0]))
sizer_all.Add(self.button, 0, wx.BOTTOM|wx.ALIGN_CENTER, 10)
sizer_all.Add(self.log, 4, wx.EXPAND)
self.SetAutoLayout(True)
self.Layout()
self.SetSizerAndFit(sizer_all)
self._create_menu()
self.process = None
self.process_cfg_file = None
self.Bind(wx.EVT_IDLE, self.OnIdle)
self.Bind(wx.EVT_END_PROCESS, self.OnProcessEnded)
self.Bind(wx.EVT_BUTTON, self.OnButton, self.button)
self.set_char_model() # this fixes the model based on the current default tree estimator
def _create_job_settings_sizer(self):
staticboxsizer = wx.StaticBoxSizer(wx.StaticBox(self, -1, "Job Settings"), wx.VERTICAL)
sizer = wx.GridBagSizer(GRID_VGAP, GRID_HGAP)
cr = 0
sizer.Add(wx.StaticText(self, -1, "Job Name"),(cr,0), flag=wx.ALIGN_LEFT )
self.txt_jobname = wx.TextCtrl(self,-1,"pastajob")
sizer.Add(self.txt_jobname, (cr,1), flag=wx.EXPAND)
cr += 1
self.outputdir_btn = wx.Button(self, label="Output Dir." )
sizer.Add(self.outputdir_btn,(cr,0), flag=wx.ALIGN_LEFT )
self.txt_outputdir = wx.TextCtrl(self, -1, "", size=(250,9))
sizer.Add(self.txt_outputdir, (cr,1), flag=wx.EXPAND)
cr += 1
sizer.Add(wx.StaticText(self, -1, "CPU(s) Available"), (cr,0), flag=wx.ALIGN_LEFT )
self.cb_ncpu = wx.ComboBox(self, -1, "1", choices=list(map(str, list(range(1, MAX_NUM_CPU + 1)))), style=wx.CB_READONLY)
sizer.Add(self.cb_ncpu, (cr,1), flag=wx.EXPAND)
cr += 1
sizer.Add(wx.StaticText(self, -1, "Max. Memory (MB)"), (cr,0), flag=wx.ALIGN_LEFT )
self.txt_maxmb = wx.TextCtrl(self, -1, str(DEFAULT_MAX_MB))
sizer.Add(self.txt_maxmb, (cr,1), flag=wx.EXPAND)
staticboxsizer.Add(sizer, 0, wx.CENTER, 0)
self.Bind(wx.EVT_BUTTON, self.OnChooseOutputDir, self.outputdir_btn)
return staticboxsizer
def validate_max_mb(self, value):
try:
mb = int(value)
if mb <= 0:
raise ValueError
return True
except ValueError:
wx.MessageBox("Invalid value for Maximum MB: '" + value + "': require positive integer value.",
"Invalid Value for Maximum MB",
wx.OK|wx.ICON_EXCLAMATION)
return False
def OnChooseOutputDir(self, event):
dialog = wx.DirDialog(None, "Choose directory for output", style=wx.FD_OPEN)
dialog.ShowModal()
self.txt_outputdir.SetValue( dialog.GetPath() )
def _set_custom_pasta_settings(self, event):
#self.cb_sate_presets.SetValue("(custom)")
pass
def _create_tools_sizer(self):
from pasta.configure import get_configuration
cfg = get_configuration()
staticboxsizer = wx.StaticBoxSizer(wx.StaticBox(self, -1, "External Tools"), wx.VERTICAL)
sizer = wx.FlexGridSizer(0, 2, GRID_VGAP, GRID_HGAP)
items = ["Aligner", "Merger", "TreeEstimator"]
tool_list_list = [get_aligner_classes(), get_merger_classes(), get_tree_estimator_classes()]
self.raxml_dna_models = ["GTRCAT", "GTRGAMMA", "GTRGAMMAI"]
self.fasttree_dna_models = ["GTR+G20", "GTR+CAT", "JC+G20", "JC+CAT"]
prot_matrix = ["DAYHOFF", "DCMUT", "JTT", "MTREV", "WAG", "RTREV", "CPREV", "VT", "BLOSUM62", "MTMAM", "LG"]
prot_type = ["PROTCAT", "PROTCATI", "PROTGAMMA", "PROTGAMMAI"]
self.raxml_prot_models = [j+i for i in prot_matrix for j in prot_type]
self.raxml_prot_models.extend([j+i+"F" for i in prot_matrix for j in prot_type])
self.fasttree_prot_models = ["JTT+G20", "JTT+CAT", "WAG+G20", "WAG+CAT"]
if GLOBAL_DEBUG:
defaults = {"Aligner":"PADALIGNER", "Merger":"PADALIGNER", "TreeEstimator":"RANDTREE"}
else:
defaults = {"Aligner":"MAFFT", "Merger":"MUSCLE", "TreeEstimator":"FASTTREE"}
self.cb_tools = {}
for item_idx, item in enumerate(items):
text = wx.StaticText(self, -1, "Tree Estimator") if item == "TreeEstimator" else wx.StaticText(self, -1, item)
sizer.Add(text, 0, wx.LEFT)
tool_list = tool_list_list[item_idx]
active_tool_name_list = []
for tool in tool_list:
try:
tool_attr_name = tool.section_name.split()[0].lower()
tool_path = getattr(cfg, tool_attr_name).path
if os.path.exists(tool_path):
active_tool_name_list.append(tool_attr_name.upper())
except :
raise
combobox = wx.ComboBox(self, -1, defaults[item], (-1,-1), (-1,-1), active_tool_name_list, wx.CB_READONLY)
self.cb_tools[item.lower()] = combobox
self.ctrls.append(self.cb_tools[item.lower()])
sizer.Add(combobox, 0, wx.EXPAND)
self.Bind(wx.EVT_COMBOBOX, self.OnTreeEstimatorChange, self.cb_tools["treeestimator"])
combobox = wx.ComboBox(self, -1, "GTRCAT", (-1,-1), (-1,-1), self.raxml_dna_models, wx.CB_READONLY)
self.cb_tools["model"] = combobox
self.ctrls.append(self.cb_tools["model"])
sizer.Add(wx.StaticText(self, -1, "Model"), wx.LEFT)
sizer.Add(combobox, 0, wx.EXPAND)
staticboxsizer.Add(sizer, 0, wx.CENTER, 0)
return staticboxsizer
def _create_data_sizer(self):
staticboxsizer = wx.StaticBoxSizer(wx.StaticBox(self, -1, "Sequences and Tree"), wx.VERTICAL)
sizer = wx.FlexGridSizer(0, 2, GRID_VGAP, GRID_HGAP)
self.datatype = wx.ComboBox(self, -1, "DNA", (-1, -1), (-1, -1), ["DNA", "RNA", "Protein"], wx.CB_READONLY)
self.seq_btn = wx.Button(self, label="Sequence file ..." )
self.tree_btn = wx.Button(self, label="Tree file (optional) ..." )
self.txt_seqfn = wx.TextCtrl(self,-1)
self.txt_treefn = wx.TextCtrl(self,-1)
self.cb_multilocus = wx.CheckBox(self, -1, "Multi-Locus Data")
self.cb_multilocus.Disable()
self.checkbox_aligned = wx.CheckBox(self, -1, "Use for inital tree")
self.checkbox_aligned.SetValue(False)
self._could_be_aligned = False
self.checkbox_aligned.Disable()
sizer.AddMany([ (self.seq_btn, 0, wx.LEFT|wx.EXPAND),
(self.txt_seqfn, 0),
(wx.StaticText(self, -1, ""), 0, wx.EXPAND),
(self.cb_multilocus, 1, wx.EXPAND),
(wx.StaticText(self, -1, "Data Type"), 0, wx.ALIGN_RIGHT),
(self.datatype, 0),
(wx.StaticText(self, -1, "Initial Alignment"), 0, wx.ALIGN_RIGHT),
(self.checkbox_aligned, 0),
(self.tree_btn, 0, wx.LEFT|wx.EXPAND),
(self.txt_treefn, 0),
])
self.ctrls.extend([self.seq_btn,
self.txt_seqfn,
self.tree_btn,
self.txt_treefn,
self.datatype])
staticboxsizer.Add(sizer, 0, wx.CENTER, 0)
self.Bind(wx.EVT_BUTTON, self.OnChooseSeq, self.seq_btn)
self.Bind(wx.EVT_BUTTON, self.OnChooseTree, self.tree_btn)
self.Bind(wx.EVT_COMBOBOX, self.OnDataType, self.datatype)
self.Bind(wx.EVT_CHECKBOX, self.OnMultiLocus, self.cb_multilocus)
return staticboxsizer
def _create_workflow_settings_sizer(self):
"""
returns a wx.StaticBoxSizer with the widgets that control pre and post
processing of PASTA output.
"""
staticboxsizer = wx.StaticBoxSizer(wx.StaticBox(self, -1, "Workflow Settings"), wx.VERTICAL)
sizer = wx.GridBagSizer(GRID_VGAP, GRID_HGAP)
self.two_phase = wx.CheckBox(self, -1, "Two-Phase (not PASTA)")
self.two_phase.Value = False
self.raxml_after = wx.CheckBox(self, -1, "Extra RAxML Search")
self.raxml_after.Value = False
#self.trusted_data = wx.CheckBox(self, -1, "Trusted Data")
#self.trusted_data.Value = True
self.ctrls.extend([self.two_phase,
])
cr = 0
sizer.Add(wx.StaticText(self, -1, "Algorithm"), (cr,0), flag=wx.ALIGN_LEFT )
sizer.Add(self.two_phase, (cr,1), flag=wx.EXPAND)
cr += 1
sizer.Add(wx.StaticText(self, -1, "Post-Processing"), (cr,0), flag=wx.ALIGN_LEFT )
sizer.Add(self.raxml_after, (cr,1), flag=wx.EXPAND)
#cr += 1
#sizer.Add(wx.StaticText(self, -1, "Input Validation"), (cr,0), flag=wx.ALIGN_LEFT )
#sizer.Add(self.trusted_data, (cr,1), flag=wx.EXPAND)
self.Bind(wx.EVT_CHECKBOX, self.OnTwoPhase, self.two_phase)
staticboxsizer.Add(sizer, 0, wx.ALL, 0)
return staticboxsizer
def _create_pasta_settings_sizer(self):
staticboxsizer = wx.StaticBoxSizer(wx.StaticBox(self, -1, "PASTA Settings"), wx.VERTICAL)
sizer = wx.GridBagSizer(GRID_VGAP, GRID_HGAP)
# preset_choices = ["SATe-II-fast", "SATe-II-ML", "SATe-II-simple", "(Custom)",]
# self.cb_sate_presets = wx.ComboBox(self,
# -1,
# "SATe-II-ML",
# choices=preset_choices,
# style=wx.CB_READONLY)
tree_and_alignment_choices = ["Final", "Best"]
self.cb_tree_and_alignment = wx.ComboBox(self,
-1,
tree_and_alignment_choices[0],
choices=tree_and_alignment_choices,
style=wx.CB_READONLY)
timelimit_list = list(map(str, [i/100.0 for i in range(1,10)] + [i/10.0 for i in range(1,10)] + list(range(1,73))))
iterlimit_list = list(map(str, [1, 5, 10, 20, 50, 100, 200, 500, 1000]))
self.rb_maxsub1 = wx.RadioButton(self, -1, "Percentage", name="frac", style=wx.RB_GROUP)
self.rb_maxsub2 = wx.RadioButton(self, -1, "Size", name="size")
self.cb_maxsub1 = wx.ComboBox(self, -1, "50", choices=list(map(str, list(range(1,51)))), style=wx.CB_READONLY)
self.cb_maxsub2 = wx.ComboBox(self, -1, "200", choices=list(map(str, list(range(1,201)))), style=wx.CB_READONLY)
self.ctrls.extend([self.rb_maxsub1,
self.cb_maxsub1,
self.rb_maxsub2,
self.cb_maxsub2
])
self.checkbox_stop_time = wx.CheckBox(self, -1, "Time Limit (hr)")
self.checkbox_stop_iter = wx.CheckBox(self, -1, "Iteration Limit")
self.cb_stop1 = wx.ComboBox(self, -1, "24", choices=timelimit_list, style=wx.CB_READONLY)
self._iter_limits = [1, None]
riv = RangedIntValidator(self._iter_limits[0], self._iter_limits[1])
self.text_stop2 = wx.TextCtrl(self, -1, "8", validator=riv)
# self.text_stop2.Bind(wx.EVT_KILL_FOCUS, lambda event : self.validate_iter_limit_text() and event.Skip())
# self.blindmode = wx.CheckBox(self, -1, "Blind Mode Enabled")
# self.blindmode.Value = True
self.ctrls.extend([self.checkbox_stop_time,
self.cb_stop1,
self.checkbox_stop_iter,
self.text_stop2,
])
strategy_list = ["Centroid", "Longest"]
self.cb_decomp = wx.ComboBox(self, -1, "Centroid", choices=strategy_list, style=wx.CB_READONLY)
self.ctrls.append(self.cb_decomp)
self.pasta_settings_ctrl_list = []
cr = 0
# sizer.Add(wx.StaticText(self, -1, "Quick Set"), (cr, 0), flag=wx.ALIGN_LEFT )
# sizer.Add(self.cb_sate_presets, (cr, 1), flag=wx.EXPAND)
# self.pasta_settings_ctrl_list.append(self.cb_sate_presets)
cr += 1
sizer.Add(wx.StaticText(self, -1, "Max. Subproblem"), (cr,0), flag=wx.ALIGN_LEFT )
sizer.Add(self.rb_maxsub1, (cr,1), flag=wx.ALIGN_LEFT)
sizer.Add(self.cb_maxsub1, (cr,2), flag=wx.EXPAND)
self.pasta_settings_ctrl_list.extend([self.rb_maxsub1, self.cb_maxsub1])
cr += 1
sizer.Add(self.rb_maxsub2, (cr,1), flag=wx.ALIGN_LEFT)
sizer.Add(self.cb_maxsub2, (cr,2), flag=wx.EXPAND)
self.pasta_settings_ctrl_list.extend([self.rb_maxsub2, self.cb_maxsub2])
cr += 1
sizer.Add(wx.StaticText(self, -1, "Decomposition"), (cr,0), flag=wx.ALIGN_LEFT )
sizer.Add(self.cb_decomp, (cr,1), flag=wx.EXPAND)
self.pasta_settings_ctrl_list.extend([self.cb_decomp])
# cr += 1
# sizer.Add(wx.StaticText(self, -1, "Apply Stop Rule"), (cr,0), flag=wx.ALIGN_LEFT )
# sizer.Add(self.cb_apply_stop_rule, (cr,1), flag=wx.EXPAND)
# self.pasta_settings_ctrl_list.extend([self.cb_apply_stop_rule])
# cr += 1
# sizer.Add(wx.StaticText(self, -1, "Stopping Rule"), (cr,0), flag=wx.ALIGN_LEFT )
# sizer.Add(self.blindmode, (cr,1), flag=wx.EXPAND)
# self.pasta_settings_ctrl_list.extend([self.blindmode])
cr += 1
sizer.Add(self.checkbox_stop_time, (cr,1), flag=wx.ALIGN_LEFT)
sizer.Add(self.cb_stop1, (cr,2), flag=wx.EXPAND)
self.pasta_settings_ctrl_list.extend([self.checkbox_stop_time, self.cb_stop1])
cr += 1
sizer.Add(self.checkbox_stop_iter, (cr,1), flag=wx.ALIGN_LEFT)
sizer.Add(self.text_stop2, (cr,2), flag=wx.EXPAND)
self.pasta_settings_ctrl_list.extend([self.checkbox_stop_iter, self.text_stop2])
cr += 1
sizer.Add(wx.StaticText(self, -1, "Return"), (cr, 0), flag=wx.ALIGN_LEFT )
sizer.Add(self.cb_tree_and_alignment, (cr, 1), flag=wx.EXPAND)
self.pasta_settings_ctrl_list.extend([self.cb_tree_and_alignment])
self.cb_maxsub1.Disable()
self.cb_maxsub2.Disable()
self.rb_maxsub1.Value = True
self.cb_maxsub1.Enable()
self.checkbox_stop_time.Value = False
self.cb_stop1.Disable()
self.text_stop2.Enable()
self.checkbox_stop_iter.Value = True
self.text_stop2.Value = "100"
#self.Bind(wx.EVT_COMBOBOX, self.OnSatePresets, self.cb_sate_presets)
#self.OnSatePresets(self.cb_sate_presets)
#self.Bind(wx.EVT_CHECKBOX, self.OnBlindMode, self.blindmode)
self.Bind(wx.EVT_RADIOBUTTON, self.OnMaxSubproblem, self.rb_maxsub1)
self.Bind(wx.EVT_RADIOBUTTON, self.OnMaxSubproblem, self.rb_maxsub2)
self.Bind(wx.EVT_CHECKBOX, self.OnTimeRuleCheckbox, self.checkbox_stop_time)
self.Bind(wx.EVT_CHECKBOX, self.OnIterRuleCheckbox, self.checkbox_stop_iter)
self.Bind(wx.EVT_COMBOBOX, self._set_custom_pasta_settings, self.cb_decomp)
#self.Bind(wx.EVT_COMBOBOX, self._set_custom_pasta_settings, self.cb_apply_stop_rule)
self.Bind(wx.EVT_COMBOBOX, self._set_custom_pasta_settings, self.cb_stop1)
self.Bind(wx.EVT_COMBOBOX, self._set_custom_pasta_settings, self.text_stop2)
self.Bind(wx.EVT_COMBOBOX, self._set_custom_pasta_settings, self.cb_tree_and_alignment)
#cr += 1
#presets = wx.ComboBox(self, -1, "1", choices=map(str, range(1,9)), style=wx.CB_READONLY)
#sizer.Add(wx.StaticText(self, -1, "Preset Configuration"), (cr,0), flag=wx.ALIGN_LEFT )
#sizer.Add(presets, (cr,1), flag=wx.EXPAND)
staticboxsizer.Add(sizer, 0, wx.ALL, 0)
return staticboxsizer
def validate_iter_limit_text(self):
field=self.text_stop2
t = field.GetValue()
if is_valid_int_str(t, self._iter_limits[0], self._iter_limits[1]):
return True
field.SetBackgroundColour("red")
#wx.MessageBox(message='"Iteration Limit" must contain a positive integer',
# caption='Input Error', style=wx.OK|wx.ICON_ERROR)
field.SetFocus()
field.Refresh()
return False
def _create_menu(self):
self.menuBar = wx.MenuBar()
self.menuFile = wx.Menu()
self.menuHelp = wx.Menu()
self.menuFileSaveLog = self.menuFile.Append(-1, "&Save Log...\tCtrl+S")
self.menuFileExit = self.menuFile.Append(wx.ID_EXIT, "&Quit PASTA\tCtrl+Q")
self.menuHelpHelp = self.menuHelp.Append( -1, "&Help")
self.menuHelpAbout = self.menuHelp.Append(wx.ID_ABOUT, "&About PASTA")
self.menuBar.Append(self.menuFile, "&File")
self.menuBar.Append(self.menuHelp, "&Help")
self.SetMenuBar(self.menuBar)
self.Bind(wx.EVT_MENU, self.OnSaveLog, self.menuFileSaveLog)
self.Bind(wx.EVT_MENU, self.OnExit, self.menuFileExit)
self.Bind(wx.EVT_MENU, self.OnHelp, self.menuHelpHelp)
self.Bind(wx.EVT_MENU, self.OnAbout, self.menuHelpAbout)
self.Bind(wx.EVT_CLOSE, self.OnExit)
def OnTreeEstimatorChange(self, event):
self.set_char_model()
def OnDataType(self, event):
self.set_char_model()
def set_char_model(self):
if self.datatype.Value == "DNA" or self.datatype.Value == "RNA":
self.cb_tools["model"].Clear()
if self.cb_tools["treeestimator"].Value.lower() == "raxml":
self.raxml_after.Value = False
for model in self.raxml_dna_models:
self.cb_tools["model"].Append(model)
self.cb_tools["model"].SetStringSelection("GTRCAT")
elif self.cb_tools["treeestimator"].Value.lower() == "fasttree":
for model in self.fasttree_dna_models:
self.cb_tools["model"].Append(model)
self.cb_tools["model"].SetStringSelection("GTR+G20")
elif self.datatype.Value == "Protein":
self.cb_tools["model"].Clear()
if self.cb_tools["treeestimator"].Value.lower() == "raxml":
self.raxml_after.Value = False
for model in self.raxml_prot_models:
self.cb_tools["model"].Append(model)
self.cb_tools["model"].SetStringSelection("PROTCATWAGF")
elif self.cb_tools["treeestimator"].Value.lower() == "fasttree":
for model in self.fasttree_prot_models:
self.cb_tools["model"].Append(model)
self.cb_tools["model"].SetStringSelection("JTT+G20")
def OnSaveLog(self, event):
dialog = wx.FileDialog(None, "Save Log", defaultFile=self.txt_jobname.Value, wildcard = "Log files (*.log)|*.log", style=wx.FD_OVERWRITE_PROMPT|wx.FD_SAVE)
dialog.ShowModal()
fn = dialog.GetPath()
if len(fn) > 4:
if not fn[-4:] == ".log":
fn += ".log"
else:
fn += ".log"
fc = open(fn, "w")
fc.write(self.log.GetValue())
fc.close()
def OnMaxSubproblem(self, event):
self._set_custom_pasta_settings(event)
radio_selected = event.GetEventObject()
if radio_selected.GetName() == "frac":
self.cb_maxsub1.Enable()
self.cb_maxsub2.Disable()
elif radio_selected.GetName() == "size":
self.cb_maxsub2.Enable()
self.cb_maxsub1.Disable()
def OnTwoPhase(self, event):
"""
Called every time the 'Two-Phase' checkbox is clicked. The main action
that needs to occur is the Disabling/Enabling of the PASTA settings
controls
"""
if self.two_phase.Value:
for c in self.pasta_settings_ctrl_list:
c.Disable()
self.cb_tools["merger"].Disable()
self.tree_btn.Disable()
self.txt_treefn.Disable()
self.raxml_after.Disable()
else:
fragile_list = [self.cb_maxsub1, self.cb_maxsub2, self.cb_stop1, self.text_stop2]
for c in self.pasta_settings_ctrl_list:
if c not in fragile_list:
c.Enable()
if self.rb_maxsub1.Value:
self.cb_maxsub1.Enable()
else:
self.cb_maxsub2.Enable()
if self.checkbox_stop_time.Value:
self.cb_stop1.Enable()
else:
self.text_stop2.Enable()
self.cb_tools["merger"].Enable()
self.tree_btn.Enable()
self.txt_treefn.Enable()
self.raxml_after.Enable()
def OnTimeRuleCheckbox(self, event):
self._set_custom_pasta_settings(event)
if self.checkbox_stop_time.Value:
self.cb_stop1.Enable()
else:
self.cb_stop1.Disable()
def OnIterRuleCheckbox(self, event):
self._set_custom_pasta_settings(event)
if self.checkbox_stop_iter.Value:
self.text_stop2.Enable()
else:
self.text_stop2.Disable()
def OnExit(self, event):
if self.process is not None:
wx.Process.Kill(self.pid, wx.SIGKILL)
self._remove_config_file()
self.Destroy()
def OnHelp(self, event):
import wx.html
wx.FileSystem.AddHandler(wx.ZipFSHandler())
def _addBook(filename):
if not self.help.AddBook(filename, True):
wx.MessageBox("Unable to open: " + filename, "Error", wx.OK|wx.ICON_EXCLAMATION)
self.help = wx.html.HtmlHelpController(style = wx.html.HF_DEFAULT_STYLE^wx.html.HF_BOOKMARKS^wx.html.HF_INDEX)
_addBook("help.zip")
self.help.DisplayContents()
def OnAbout(self, event):
from wx.lib.wordwrap import wordwrap
info = wx.AboutDialogInfo()
info.SetName(PROGRAM_NAME)
info.SetVersion(PROGRAM_VERSION)
info.SetCopyright("Copyright (C) %s" % PROGRAM_YEAR)
info.SetWebSite((PROGRAM_WEBSITE, "%s Homepage" % PROGRAM_NAME))
info.SetLicense(PROGRAM_LICENSE)
info.SetDescription(PROGRAM_DESCRIPTION)
[info.AddDeveloper(i) for i in PROGRAM_AUTHOR]
wx.AboutBox(info)
def _show_error_dialog(self, error_msg, caption):
"""
Puts up a modal dialog with a `error_msg` and `caption`
destroys the dialog after the user clicks `OK`
"""
error_msg_dlg = wx.MessageDialog(parent=self,
message=error_msg,
caption=caption,
style=wx.OK|wx.ICON_ERROR)
error_msg_dlg.ShowModal()
error_msg_dlg.Destroy()
def OnChooseSeq(self, event):
filepath = None
parse_as_multilocus = self.cb_multilocus.Value
if not parse_as_multilocus:
dialog = wx.FileDialog(None, "Choose sequences...", wildcard = "FASTA files (*.fasta)|*.fasta|FASTA files (*.fas)|*.fas|FASTA files (*)|*", style=wx.FD_OPEN)
dialog.ShowModal()
self.txt_seqfn.SetValue( dialog.GetPath() )
filepath = self._encode_arg(self.txt_seqfn.GetValue())
if filepath and not self.txt_outputdir.GetValue():
self.txt_outputdir.SetValue(os.path.dirname(os.path.abspath(filepath)))
else:
dialog = wx.DirDialog(None, "Choose directory for multiple sequence files", style=wx.FD_OPEN)
dialog.ShowModal()
self.txt_seqfn.SetValue( dialog.GetPath() )
filepath = self._encode_arg(self.txt_seqfn.GetValue())
if PARSING_FILES_IN_GUI and filepath:
confirm_parse_dlg = wx.MessageDialog(parent=self,
message="Do you want PASTA to read the data now? (this causes PASTA to customize some of the settings for your data).",
caption="Read input data now?",
style=wx.OK|wx.CANCEL|wx.ICON_QUESTION)
result = confirm_parse_dlg.ShowModal()
confirm_parse_dlg.Destroy()
if result == wx.ID_OK:
progress_dialog = wx.ProgressDialog(title="Reading input data",
message="Parsing data files ",
maximum=100,
parent=self,
style=wx.PD_AUTO_HIDE|wx.PD_APP_MODAL)
progress_dialog.Update(1, "Beginning Parse")
error_msg = None
try:
if parse_as_multilocus:
fn_list = get_list_of_seq_filepaths_from_dir(filepath)
else:
fn_list = [filepath]
# if self.datatype.Value == "Protein":
# datatype_list = ["PROTEIN"]
# else:
datatype_list = ["DNA", "RNA", "PROTEIN"]
careful_parse = False
summary_stats = summary_stats_from_parse(fn_list,
datatype_list,
None,
careful_parse=careful_parse)
progress_dialog.Update(100, "Done")
except Exception as x:
try:
error_msg = "Problem reading the data:\n" + str(x.message)
except:
error_msg = "Unknown error encountered while reading the data."
except:
error_msg = "Unknown error encountered while reading the data."
if error_msg:
self._show_error_dialog(error_msg, caption="Input parsing error")
filepath = None
self._could_be_aligned = False
self.refresh_aligned_checkbox()
else:
read_type = summary_stats[0]
if read_type == "PROTEIN":
self.datatype.SetValue("Protein")
else:
self.datatype.SetValue(read_type)
# Set defaults from "auto_defaults"
auto_opts = get_auto_defaults_from_summary_stats(summary_stats[0], summary_stats[1], summary_stats[2])
self._could_be_aligned = summary_stats[3]
self.refresh_aligned_checkbox()
auto_pasta_opts = auto_opts["sate"]
te_str = auto_pasta_opts["tree_estimator"].upper()
self.cb_tools["treeestimator"].SetStringSelection(te_str)
self.set_char_model()
if te_str == "FASTTREE":
te_opts = auto_opts['fasttree']
self.cb_tools["model"].SetStringSelection(te_opts["GUI_model"])
self.cb_tools["merger"].SetStringSelection(auto_pasta_opts["merger"].upper())
self.cb_tools["aligner"].SetStringSelection(auto_pasta_opts["aligner"].upper())
self.cb_ncpu.SetStringSelection(str(min(MAX_NUM_CPU, auto_pasta_opts["num_cpus"])))
# Set max decomposition based on data set size (always move to actual # here)
self.rb_maxsub1.Value = False
self.cb_maxsub1.Disable()
self.rb_maxsub2.Value = True
self.cb_maxsub2.SetStringSelection(str(max(1, auto_pasta_opts["max_subproblem_size"])))
self.cb_maxsub2.Enable()
bs = auto_pasta_opts["break_strategy"]
bs = bs[0].upper() + bs[1:].lower()
self.cb_decomp.SetValue(bs)
self.cb_stop1.Disable()
self.checkbox_stop_iter.Value = True
self.cb_maxsub2.SetStringSelection(str(max(1, auto_pasta_opts["max_subproblem_size"])))
self.cb_maxsub2.Enable()
if auto_pasta_opts['move_to_blind_on_worse_score']:
#self.blindmode.Value = True
t_l = auto_pasta_opts['after_blind_time_without_imp_limit']
else:
#self.blindmode.Value = False
t_l = auto_pasta_opts['time_limit']
if t_l <= 0:
self.checkbox_stop_time.Value = False
else:
self.checkbox_stop_time.Value = True
# self.cb_apply_stop_rule.SetValue("After Last Improvement")
after_blind_it_lim = auto_pasta_opts['iter_limit']
self.text_stop2.SetValue(str(after_blind_it_lim))
if self._could_be_aligned:
a_tag = "aligned"
else:
a_tag = "unaligned"
self.log.AppendText("Read %d file(s) with %s %s data. Total of %d taxa found.\n" % (len(fn_list), a_tag, read_type, summary_stats[2]))
by_file = summary_stats[1]
for n, fn in enumerate(fn_list):
t_c_tuple = by_file[n]
if self._could_be_aligned:
self.log.AppendText(' Parsing of the file "%s" returned %d sequences of length = %d\n' % (fn, t_c_tuple[0], t_c_tuple[1]))
else:
self.log.AppendText(' Parsing of the file "%s" returned %d sequences with longest length = %d\n' % (fn, t_c_tuple[0], t_c_tuple[1]))
progress_dialog.Destroy()
else:
self._could_be_aligned = True
self.refresh_aligned_checkbox()
if filepath:
if not parse_as_multilocus:
if filepath and not self.txt_outputdir.GetValue():
self.txt_outputdir.SetValue(os.path.dirname(os.path.abspath(filepath)))
else:
if filepath and not self.txt_outputdir.GetValue():
self.txt_outputdir.SetValue(os.path.abspath(filepath))
else:
self.txt_seqfn.SetValue("")
def refresh_aligned_checkbox(self):
self.checkbox_aligned.SetValue(self._could_be_aligned)
if self._could_be_aligned:
treefilename = self.txt_treefn.GetValue()
if treefilename and os.path.isfile(treefilename):
self.checkbox_aligned.Disable()
else:
self.checkbox_aligned.Enable()
else:
self.checkbox_aligned.Disable()
def OnChooseTree(self, event):
dialog = wx.FileDialog(None, "Choose tree...", wildcard = "Tree files (*.tre)|*.tre|Tree files (*.tree)|*.tree|Tree files (*.phy)|*.phy", style=wx.FD_OPEN)
dialog.ShowModal()
self.txt_treefn.SetValue( dialog.GetPath() )
self.refresh_aligned_checkbox()
def OnIdle(self, evt):
if self.process is not None:
stream = self.process.GetInputStream()
if stream is not None and stream.CanRead():
text = stream.read()
self.log.AppendText(text)
stream = self.process.GetErrorStream()
if stream is not None and stream.CanRead():
text = stream.read()
self.log.AppendText(text)
def OnProcessEnded(self, evt):
stream = self.process.GetInputStream()
if stream.CanRead():
text = stream.read()
self.log.AppendText(text)
stream = self.process.GetErrorStream()
if stream.CanRead():
text = stream.read()
self.log.AppendText(text)
self.process.Destroy()
self.process = None
self.log.AppendText("Job %s is finished.\n" % self.txt_jobname.GetValue())
self._remove_config_file()
self._ReactivateOptions()
self.statusbar.SetStatusText("PASTA Ready!")
self.button.SetLabel("Start")
def OnButton(self, event):
if self.button.GetLabel() == "Start":
self._OnStart()
elif self.button.GetLabel() == "Stop":
self._OnStop()
else:
raise ValueError("Button label %s not recognized.\n" % self.button.GetLabel() )
def OnMultiLocus(self, event):
if self.cb_multilocus.Value:
self.seq_btn.SetLabel("Sequence files ...")
else:
self.seq_btn.SetLabel("Sequence file ...")
self.txt_seqfn.SetValue("")
def _FreezeOptions(self):
self.prev_ctrls_status = []
for ctrl in self.ctrls:
self.prev_ctrls_status.append( ctrl.IsEnabled() )
ctrl.Disable()
def _ReactivateOptions(self):
for i in range(len(self.ctrls)):
self.ctrls[i].Enable(self.prev_ctrls_status[i])
def _OnStart(self):
if self.process is None:
if (not self.checkbox_stop_time.Value) and (not self.checkbox_stop_iter.Value):
self._show_error_dialog("Termination conditions are not set correctly. Either a time limit, an iteration limit, or both must be used.\n", caption="PASTA Settings Error")
return
if self.checkbox_stop_iter.Value and (not self.validate_iter_limit_text()):
self._show_error_dialog("Iteration limit is not set correctly. Enter a positive integer in that field.\n", caption="PASTA Settings Error")
return
input_filename = self._encode_arg(self.txt_seqfn.GetValue())
if not input_filename:
self._show_error_dialog("Input sequence file(s) are required.\n", caption="PASTA Settings Error")
return
if not os.path.exists(input_filename):
self._show_error_dialog('Input sequence file(s) are "%s" does not exist!.\n', caption="PASTA Settings Error")
return
if self.cb_multilocus.Value:
if not os.path.isdir(input_filename):
self._show_error_dialog('Input sequence file specification should be a directory when multilocus model is used.\n', caption="PASTA Settings Error")
return
elif not os.path.isfile(input_filename):
self._show_error_dialog('Input sequence file must be a file when single-locus mode used.\n', caption="PASTA Settings Error")
return
cfg_success = self._create_config_file()
if not cfg_success:
return
#command = [filemgr.quoted_file_path(x) for x in get_invoke_run_pasta_command()]
command = get_invoke_run_pasta_command()
treefilename = self._encode_arg(self.txt_treefn.GetValue())
jobname = self._encode_arg(self.txt_jobname.GetValue())
if not jobname:
wx.MessageBox("Job name cannot be empty, it is REQUIRED by PASTA!", "WARNING", wx.OK|wx.ICON_WARNING)
self._remove_config_file()
return
print(type(input_filename))
command.extend(["-i", filemgr.quoted_file_path(input_filename)])
if treefilename and os.path.isfile(treefilename):
command.extend(["-t", filemgr.quoted_file_path(treefilename)])
command.extend(["-j", filemgr.quoted_file_path(jobname) ])
if self.datatype.Value == "DNA":
dt = "dna"
elif self.datatype.Value == "RNA":
dt = "rna"
else:
dt = "protein"
command.extend(["-d", dt])
command.extend(["%s" % filemgr.quoted_file_path(self.process_cfg_file)])
if PASTA_GUI_ONLY_PRINTS_CONFIG:
self.log.AppendText("Command is:\n '%s'\n" % "' '".join(command))
self.log.AppendText("config_file:\n#############################################################\n")
for line in open(self.process_cfg_file, 'rU'):
self.log.AppendText(line)
self.log.AppendText("#############################################################\n")
self._remove_config_file()
self.statusbar.SetStatusText("\n\nRun emulated!\n\n")
else:
self.process = wx.Process(self)
self.process.Redirect()
self.pid = wx.Execute( " ".join(command), wx.EXEC_ASYNC, self.process)
self.button.SetLabel("Stop")
self.statusbar.SetStatusText("PASTA Running!")
self._FreezeOptions()
else:
self.log.AppendText("Job %s is still running!\n" % self.txt_jobname.GetValue())
def _OnStop(self):
if self.process is not None:
self.log.AppendText("Job %s is terminated early.\n" % self.txt_jobname.GetValue())
self.process.Kill(self.pid, wx.SIGKILL)
self._remove_config_file()
self._ReactivateOptions()
self.button.SetLabel("Start")
self.statusbar.SetStatusText("PASTA Ready!")
else:
self.log.AppendText("No active PASTA jobs to terminate!\n")
def _encode_arg(self, arg, encoding='utf-8'):
#if isinstance(arg, str):
# return arg.encode(encoding)
return arg
def _create_config_file(self):
from pasta.configure import get_configuration
cfg = get_configuration()
#if self.txt_resultdir.Value:
# basefilename = os.path.basename(self.txt_seqfn.GetValue())
# jobname = self.txt_jobname.GetValue()
# resultdir = self.txt_resultdir.Value
# cfg.commandline.output = os.path.join(resultdir, basefilename+"_%s.aln" % jobname )
# cfg.commandline.result = os.path.join(resultdir, basefilename+"_%s.tre" % jobname )
cfg.sate.aligner = self.cb_tools["aligner"].Value
cfg.sate.tree_estimator = self.cb_tools["treeestimator"].Value
if self.cb_tools["treeestimator"].Value.lower() == "raxml":
cfg.raxml.model = self.cb_tools["model"].Value
else:
model_desc = self.cb_tools["model"].Value
if model_desc == "GTR+G20":
cfg.fasttree.model = "-gtr -gamma"
elif model_desc == "GTR+CAT":
cfg.fasttree.model = "-gtr"
elif model_desc == "JC+G20":
cfg.fasttree.model = "-gamma"
elif model_desc == "JC+CAT":
cfg.fasttree.model = ""
elif model_desc == "JTT+G20":
cfg.fasttree.model = "-gamma"
elif model_desc == "JTT+CAT":
cfg.fasttree.model = ""
elif model_desc == "WAG+G20":
cfg.fasttree.model = "-wag -gamma"
elif model_desc == "WAG+CAT":
cfg.fasttree.model = "-wag"
else:
raise Exception("Unrecognized model: %s" % model_desc)
cfg.commandline.keeptemp = True
cfg.commandline.keepalignmenttemps = True
if self.checkbox_aligned.Value:
cfg.commandline.aligned = True
#cfg.commandline.untrusted = not bool(self.trusted_data.Value)
if self.cb_multilocus.Value:
cfg.commandline.multilocus = True
if self.two_phase.Value:
cfg.commandline.two_phase = True
cfg.commandline.raxml_search_after = False
else:
cfg.commandline.two_phase = False
cfg.commandline.raxml_search_after = bool(self.raxml_after.Value)
cfg.sate.merger = self.cb_tools["merger"].Value
cfg.sate.break_strategy = self.cb_decomp.Value
cfg.sate.start_tree_search_from_current = True
if self.rb_maxsub1.Value:
cfg.sate.max_subproblem_frac = float(self.cb_maxsub1.Value)/100.0
cfg.sate.max_subproblem_size = 0
elif self.rb_maxsub2.Value:
cfg.sate.max_subproblem_size = self.cb_maxsub2.Value
cfg.sate.max_subproblem_frac = 0.0
cfg.sate.time_limit = -1
cfg.sate.iter_limit = -1
cfg.sate.after_blind_time_without_imp_limit = -1
cfg.sate.after_blind_iter_without_imp_limit = -1
# if True:
# cfg.pasta.move_to_blind_on_worse_score = True
# if self.cb_apply_stop_rule.GetValue() == "After Last Improvement":
# if self.checkbox_stop_time.Value:
# cfg.pasta.after_blind_time_without_imp_limit = float(self.cb_stop1.Value)*3600
# if self.checkbox_stop_iter.Value:
# cfg.pasta.after_blind_iter_without_imp_limit = int(self.text_stop2.Value)
# else:
# if self.checkbox_stop_time.Value:
# cfg.pasta.time_limit = float(self.cb_stop1.Value)*3600
# if self.checkbox_stop_iter.Value:
# cfg.pasta.iter_limit = int(self.text_stop2.Value)
# else:
if self.checkbox_stop_time.Value:
cfg.sate.time_limit = float(self.cb_stop1.Value)*3600
if self.checkbox_stop_iter.Value:
cfg.sate.iter_limit = int(self.text_stop2.Value)
cfg.sate.return_final_tree_and_alignment = self.cb_tree_and_alignment.GetValue() == "Final"
cfg.sate.output_directory = self._encode_arg(self.txt_outputdir.GetValue())
cfg.sate.num_cpus = self.cb_ncpu.Value
max_mb = self.txt_maxmb.GetValue()
if not self.validate_max_mb(max_mb):
return False
cfg.sate.max_mem_mb = max_mb
# this creates a file that cannot be deleted while the Python
# process is running (under the mess that is called "Windows")
#tf, self.process_cfg_file = tempfile.mkstemp(dir=pasta_home_dir(),
# suffix="_internal.cfg")
tf = tempfile.NamedTemporaryFile(suffix="_internal.cfg", dir=pasta_home_dir())
self.process_cfg_file = tf.name
tf.close()
cfg.save_to_filepath(self.process_cfg_file)
return True
def _remove_config_file(self):
if "PASTA_GUIDEVMODE" in os.environ:
return
if self.process_cfg_file and os.path.exists(self.process_cfg_file):
try:
os.remove(self.process_cfg_file)
except:
# on windows, the config file sometimes cannot be deleted
# ("...because it is being used by another process...")
# resulting in an exception being thrown.
# so we just:
pass
class PastaApp(wx.PySimpleApp):
def OnInit(self):
self.frame = PastaFrame(size=wx.Display().GetClientArea())
self.frame.Show(True)
self.SetTopWindow(self.frame)
return True
def main_gui():
app = PastaApp()
app.MainLoop()
ICO_STR = """AAABAAMAEBAAAAAAIABoBAAANgAAACAgAAAAACAAqBAAAJ6EAAAwMAAAAAAgAKglAABGFQAAKAAA\nABAAAAAgAAAAAQAgAAAAAABABAAAAAAAAAAAAAAAAAAAAAAAAAAAAGsAAADvAAAAqQAAAEUAAACX\n////Af///wEAAAC3AAAAJQAAAGsAAABv////Af///wEAAACHAAAA8QAAAGkAAAD1AAAA/wAAAP8A\nAABpAAAA4f///wEAAAALAAAA/wAAABkAAACPAAAAk////wEAAAAVAAAA+wAAAP8AAADXAAAA8wAA\nALsAAAD7AAAAgQAAAPsAAAAlAAAAQwAAAPkAAAADAAAAjwAAAJP///8BAAAAUQAAAO0AAABfAAAA\ntwAAAGv///8BAAAAsQAAAHsAAAD/AAAA/wAAAP8AAADd////AQAAAI8AAACT////AQAAAHUAAACl\n////AQAAAB3///8B////AQAAAKcAAAB7AAAA6QAAAP8AAAD/AAAAv////wEAAACPAAAAk////wEA\nAACHAAAAsQAAAFMAAABT////AQAAABcAAADlAAAAcwAAAM0AAACvAAAAwQAAAKP///8BAAAAjwAA\nAJP///8BAAAAjwAAAP8AAAD/AAAA/wAAAB0AAADfAAAA/wAAAFsAAACvAAAAawAAAJMAAACH////\nAQAAAI8AAACT////AQAAAIsAAADnAAAAzwAAAPkAAACZAAAA/wAAAP0AAAAhAAAAkwAAAIUAAACv\nAAAAaf///wEAAACPAAAAk////wEAAAB7AAAAmQAAACMAAADrAAAA2QAAAP8AAACF////AQAAAHUA\nAAChAAAAyQAAAE3///8BAAAAjwAAAJP///8BAAAAWwAAANUAAABjAAAAzQAAAPMAAABv////Af//\n/wEAAABZAAAAvQAAAOUAAAAv////AQAAAI8AAACT////AQAAACMAAAD/AAAA/wAAAJUAAAD9AAAA\nE////wH///8BAAAAOwAAANsAAAD7AAAAEf///wEAAACPAAAAk////wH///8BAAAAtwAAAP0AAAAz\nAAAA9QAAACsAAAA3AAAAKQAAAB8AAAD9AAAA8wAAAAMAAAA3AAAApwAAAKkAAAA3AAAAAwAAAAsA\nAAAp////AQAAANkAAADvAAAA/QAAADMAAAAFAAAA+wAAANcAAAAJAAAA/wAAAP8AAAD/AAAA/wAA\nAAsAAAAFAAAAXf///wEAAACdAAAA/wAAAP8AAAAz////AQAAAOMAAAC5AAAACQAAAP8AAAD/AAAA\n/wAAAP8AAAAL////AQAAAKf///8BAAAAJQAAAMUAAACPAAAAC////wEAAAB3AAAAXwAAAAUAAACV\nAAAAlQAAAJUAAACVAAAAB////wEAAACZAAAAIf///wH///8B////Af///wH///8B////Af///wH/\n//8B////Af///wH///8B////Af///wH///8BAAAAXQAAAGsAAP//AAD//wAA//8AAP//AAD//wAA\n//8AAP//AAD//wAA//8AAP//AAD//wAA//8AAP//AAD//wAA//8AAP//KAAAACAAAABAAAAAAQAg\nAAAAAACAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAkAAABTAAAAyQAAAPkAAAC7AAAAMwAAAAcAAABb\nAAAAgQAAAEX///8B////Af///wH///8BAAAAbQAAAIEAAAA3////AQAAAB0AAABzAAAAdQAAAB//\n//8B////Af///wH///8BAAAAFwAAAJUAAAD3AAAA0QAAAFUAAAAJAAAAZwAAAOcAAAD/AAAA/wAA\nAP0AAAC1AAAABQAAAK0AAAD/AAAAmf///wH///8B////Af///wEAAADrAAAA/wAAAF3///8BAAAA\nOwAAAOUAAADnAAAAP////wH///8B////Af///wEAAAB5AAAA9wAAAP8AAAD/AAAA5wAAAF8AAADp\nAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAAxAAAAkQAAAP8AAAC3////Af///wH///8BAAAACwAAAP8A\nAAD/AAAAPf///wEAAAA7AAAA5QAAAOcAAAA/////Af///wH///8BAAAADQAAAO0AAAD/AAAA/wAA\nAP8AAAD/AAAArwAAAOkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAHEAAAB3AAAA/wAAAM////8B////\nAf///wEAAAAjAAAA/wAAAP8AAAAj////AQAAADsAAADlAAAA5wAAAD////8B////Af///wEAAABF\nAAAA/wAAAP8AAAD/AAAA/wAAAP8AAACvAAAA6QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAApwAAAFkA\nAAD/AAAA7////wH///8B////AQAAAEEAAAD/AAAA+wAAAAf///8BAAAAOwAAAOUAAADnAAAAP///\n/wH///8B////AQAAAI0AAAD/AAAA+wAAAKsAAAC3AAAA9QAAAK8AAADpAAAA+QAAAIUAAABpAAAA\n8QAAAP8AAAC5AAAASwAAAP8AAAD9AAAASQAAAEcAAABHAAAAgwAAAP8AAADp////Af///wEAAAA7\nAAAA5QAAAOcAAAA/////Af///wEAAAAHAAAArQAAAP8AAAC/AAAACQAAABMAAACPAAAAqwAAANUA\nAABX////Af///wEAAAB7AAAA/wAAAMUAAAA3AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAA\nAMn///8B////AQAAADsAAADlAAAA5wAAAD////8B////AQAAABsAAADFAAAA/QAAAGP///8B////\nAQAAABMAAABXAAAAeQAAAAv///8B////AQAAAFMAAAD5AAAAywAAACcAAAD7AAAA/wAAAP8AAAD/\nAAAA/wAAAP8AAAD/AAAArf///wH///8BAAAAOwAAAOUAAADnAAAAP////wH///8BAAAAJQAAAM8A\nAADvAAAARf///wH///8B////AQAAAAkAAAAF////Af///wH///8BAAAATQAAAPcAAADPAAAAJwAA\nAOEAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAACN////Af///wEAAAA7AAAA5QAAAOcAAAA/////\nAf///wEAAAAxAAAA2wAAAOMAAAA5////Af///wH///8B////Af///wH///8B////Af///wEAAABd\nAAAA/QAAANEAAAAnAAAAxwAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAHP///8B////AQAAADsA\nAADlAAAA5wAAAD////8B////AQAAADUAAADfAAAA8wAAALkAAACnAAAApwAAAKcAAACl////Af//\n/wH///8BAAAAAwAAAJ8AAAD/AAAAywAAACEAAACnAAAA/wAAAPEAAADbAAAA3wAAAPkAAAD9AAAA\nVf///wH///8BAAAAOwAAAOUAAADnAAAAP////wH///8BAAAAOQAAAOMAAAD/AAAA/wAAAP8AAAD/\nAAAA/wAAAP////8B////AQAAAAMAAABTAAAA9QAAAP8AAADFAAAAGwAAAI0AAAD/AAAAuwAAADMA\nAABTAAAA3QAAAPEAAABJ////Af///wEAAAA7AAAA5QAAAOcAAAA/////Af///wEAAAA5AAAA4wAA\nAP8AAAD/AAAA/wAAAP8AAAD/AAAA/////wEAAAAHAAAAjwAAAPUAAAD/AAAA/wAAALkAAAARAAAA\nawAAAP8AAAC5AAAADwAAADkAAADhAAAA4QAAADf///8B////AQAAADsAAADlAAAA5wAAAD////8B\n////AQAAADcAAADhAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD3AAAAAwAAAGkAAAD5AAAA/wAAAP8A\nAAD/AAAAoQAAAAMAAABVAAAA+wAAAMUAAAAbAAAARQAAAO0AAADVAAAAK////wH///8BAAAAOwAA\nAOUAAADnAAAAP////wH///8BAAAAMwAAAN0AAADxAAAAqwAAAJUAAACrAAAA/wAAAPEAAAAvAAAA\n1wAAAP8AAAD/AAAA/wAAAP8AAABn////AQAAAEUAAADtAAAA1QAAACsAAABVAAAA+wAAAMMAAAAb\n////Af///wEAAAA7AAAA5QAAAOcAAAA/////Af///wEAAAArAAAA0wAAAOcAAAA/////AQAAADsA\nAAD/AAAA3wAAAGUAAAD7AAAA/wAAAP8AAAD/AAAA9wAAAB3///8BAAAANwAAAOEAAADhAAAANwAA\nAGkAAAD/AAAAtwAAAA3///8B////AQAAADsAAADlAAAA5wAAAD////8B////AQAAACEAAADLAAAA\n9QAAAEv///8BAAAATwAAAP8AAADPAAAAnwAAAP8AAAD/AAAA/wAAAPUAAAB/////Af///wEAAAAn\nAAAA0QAAAPEAAABHAAAAhwAAAP8AAACj////Af///wH///8BAAAAOwAAAOUAAADnAAAAP////wH/\n//8BAAAADwAAALkAAAD/AAAAfwAAAAMAAACFAAAA/wAAAKsAAADLAAAA/wAAAP8AAAD/AAAAiQAA\nABP///8B////AQAAABsAAADDAAAA+wAAAFMAAACfAAAA/wAAAIv///8B////Af///wEAAAA7AAAA\n5QAAAOcAAAA/////Af///wEAAAADAAAAowAAAP8AAADXAAAAPQAAAMcAAAD/AAAAhwAAAOMAAAD/\nAAAA4wAAAFv///8B////Af///wH///8BAAAACQAAALMAAAD/AAAAbQAAAL0AAAD/AAAAa////wH/\n//8B////AQAAADsAAADlAAAA5wAAAD////8B////Af///wEAAABlAAAA/wAAAP8AAAD/AAAA/wAA\nAPcAAABTAAAA7QAAAP8AAAB3AAAAB////wH///8B////Af///wH///8BAAAAowAAAP8AAACHAAAA\n1QAAAP8AAABR////Af///wH///8BAAAAOwAAAOUAAADnAAAAP////wH///8B////AQAAACUAAAD/\nAAAA/wAAAP8AAAD/AAAA2wAAADEAAAD7AAAA/wAAAC////8B////Af///wH///8B////Af///wEA\nAACDAAAA/wAAAKUAAADzAAAA/wAAAC////8B////Af///wEAAAA7AAAA5QAAAOcAAAA/////Af//\n/wH///8B////AQAAALEAAAD/AAAA/wAAAP8AAACZAAAABwAAAPsAAAD/AAAAGf///wH///8B////\nAf///wH///8B////AQAAAGkAAAD/AAAAywAAAP8AAAD/AAAAFf///wH///8B////AQAAADsAAADl\nAAAA5wAAAD////8B////Af///wH///8BAAAARwAAAOUAAAD/AAAA8wAAAC3///8BAAAA9QAAAP8A\nAAAx////Af///wEAAAAdAAAAPf///wH///8BAAAASQAAAP8AAAD7AAAA/wAAAPX///8B////Af//\n/wH///8BAAAAOwAAAOUAAADnAAAAP////wH///8B////Af///wH///8BAAAAKQAAAHMAAAAx////\nAf///wEAAADlAAAA/wAAAHUAAAADAAAAGwAAAKUAAABn////Af///wEAAAAvAAAA/wAAAP8AAAD/\nAAAA2////wEAAAAHAAAAawAAAGsAAACNAAAA7wAAAPEAAACPAAAAawAAAGsAAAAJ////Af///wH/\n//8B////Af///wH///8B////AQAAAMcAAAD/AAAA9QAAAMkAAAD1AAAA/wAAAGf///8B////AQAA\nAA8AAAD/AAAA/wAAAP8AAAC7////AQAAAA8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA\n/wAAABf///8B////AQAAAAkAAABDAAAAD////wH///8BAAAAowAAAP8AAAD/AAAA/wAAAP8AAAD/\nAAAAZ////wH///8B////AQAAAPMAAAD/AAAA/wAAAKH///8BAAAADwAAAP8AAAD/AAAA/wAAAP8A\nAAD/AAAA/wAAAP8AAAD/AAAAF////wH///8BAAAACwAAAN8AAABB////Af///wEAAABlAAAA+wAA\nAP8AAAD/AAAA/wAAAP8AAABn////Af///wH///8BAAAA0wAAAP8AAAD/AAAAf////wEAAAAPAAAA\n/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAAX////Af///wH///8BAAAAwwAAAIP///8B\n////AQAAADcAAADfAAAA/wAAAP8AAAD/AAAA/wAAAGf///8B////Af///wEAAAC5AAAA/wAAAP8A\nAABl////AQAAAA8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAABf///8B////Af//\n/wEAAACPAAAAyf///wH///8BAAAABQAAAH0AAAD9AAAA/wAAAPkAAADFAAAAKf///wH///8B////\nAQAAAI0AAADrAAAA5QAAAEn///8BAAAADwAAAOsAAADrAAAA6wAAAOsAAADrAAAA6wAAAOsAAADr\nAAAAFf///wH///8B////AQAAAFUAAAD5AAAAIf///wH///8BAAAAEQAAAHsAAACbAAAAYQAAAB//\n//8B////Af///wH///8BAAAAJQAAAEEAAAA9AAAAE////wEAAAAFAAAAQQAAAEEAAABBAAAAQQAA\nAEEAAABBAAAAQQAAAEEAAAAH////Af///wH///8BAAAANwAAAOEAAABj////Af///wH///8B////\nAf///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B\n////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af///wEAAAAXAAAAwQAAAK8A\nAAAL////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af//\n/wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////\nAQAAAAMAAACXAAAAxwAAACcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACgAAAAwAAAAYAAAAAEAIAAAAAAAgCUAAAAA\nAAAAAAAAAAAAAAAAAAD///8B////AQAAAC0AAACTAAAA4QAAAP8AAADNAAAAOf///wH///8BAAAA\nCwAAAEEAAABBAAAAQQAAABH///8B////Af///wH///8B////Af///wEAAAAvAAAAQQAAAEEAAAAt\n////Af///wH///8BAAAAKwAAAEEAAABBAAAAL////wH///8B////Af///wH///8B////Af///wH/\n//8BAAAACwAAAJsAAAD3AAAA7QAAAJ8AAAAx////Af///wH///8BAAAARwAAAPUAAAD/AAAA/wAA\nAP8AAAD/AAAA9QAAADH///8BAAAAHQAAAP8AAAD/AAAA/wAAAFP///8B////Af///wH///8B////\nAf///wEAAADPAAAA/wAAAP8AAACf////Af///wH///8BAAAArwAAAP8AAAD/AAAAuf///wH///8B\n////Af///wH///8B////Af///wH///8BAAAAtwAAAP8AAAD/AAAA/wAAAP8AAAD1AAAARf///wEA\nAABZAAAA+wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAMH///8BAAAAAwAAAPsAAAD/AAAA/wAA\nAG////8B////Af///wH///8B////Af///wEAAADrAAAA/wAAAP8AAACD////Af///wH///8BAAAA\nrwAAAP8AAAD/AAAAuf///wH///8B////Af///wH///8B////Af///wEAAABjAAAA/wAAAP8AAAD/\nAAAA/wAAAP8AAAD/AAAA+QAAAEcAAADfAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8A\nAAAv////AQAAAOEAAAD/AAAA/wAAAIn///8B////Af///wH///8B////AQAAAAcAAAD/AAAA/wAA\nAP8AAABl////Af///wH///8BAAAArwAAAP8AAAD/AAAAuf///wH///8B////Af///wH///8B////\nAf///wEAAADVAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAIcAAADfAAAA/wAAAP8AAAD/\nAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAB7////AQAAAMUAAAD/AAAA/wAAAKX///8B////Af///wH/\n//8B////AQAAACEAAAD/AAAA/wAAAP8AAABJ////Af///wH///8BAAAArwAAAP8AAAD/AAAAuf//\n/wH///8B////Af///wH///8B////AQAAADkAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA\n/wAAAIcAAADfAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAC/////AQAAAKcAAAD/\nAAAA/wAAAMH///8B////Af///wH///8B////AQAAAD0AAAD/AAAA/wAAAP8AAAAr////Af///wH/\n//8BAAAArwAAAP8AAAD/AAAAuf///wH///8B////Af///wH///8B////AQAAAH8AAAD/AAAA/wAA\nAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAIcAAADfAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA\n/wAAAP8AAADv////AQAAAIsAAAD/AAAA/wAAAN3///8B////Af///wH///8B////AQAAAFkAAAD/\nAAAA/wAAAP8AAAAN////Af///wH///8BAAAArwAAAP8AAAD/AAAAuf///wH///8B////Af///wH/\n//8B////AQAAAMMAAAD/AAAA/wAAAP8AAAD9AAAA5QAAAP8AAAD/AAAA/wAAAIcAAADfAAAA/wAA\nAP8AAAD/AAAA/QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAHQAAAG0AAAD/AAAA/wAAAPf///8B////\nAf///wH///8B////AQAAAHUAAAD/AAAA/wAAAPH///8B////Af///wH///8BAAAArwAAAP8AAAD/\nAAAAuf///wH///8B////Af///wH///8B////AQAAAPMAAAD/AAAA/wAAAO8AAAAhAAAAAwAAAFMA\nAADVAAAA/wAAAIcAAADfAAAA/wAAAPMAAABlAAAACwAAACcAAADhAAAA/wAAAP8AAAD/AAAAOQAA\nAE8AAAD/AAAA/wAAAP8AAABxAAAAaQAAAGkAAABpAAAAaQAAALkAAAD/AAAA/wAAANP///8B////\nAf///wH///8BAAAArwAAAP8AAAD/AAAAuf///wH///8B////Af///wH///8BAAAAHQAAAP8AAAD/\nAAAA/wAAAHX///8B////Af///wEAAAAfAAAA7wAAAIcAAADfAAAA9QAAADX///8B////Af///wEA\nAABTAAAA/wAAAP8AAAD/AAAASwAAADMAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAA\nAP8AAAD/AAAA/wAAALf///8B////Af///wH///8BAAAArwAAAP8AAAD/AAAAuf///wH///8B////\nAf///wH///8BAAAARwAAAP8AAAD/AAAA/wAAACX///8B////Af///wH///8BAAAAUQAAAIcAAADf\nAAAAVf///wH///8B////Af///wEAAAALAAAA+wAAAP8AAAD/AAAAXwAAABUAAAD/AAAA/wAAAP8A\nAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAJn///8B////Af///wH///8BAAAArwAA\nAP8AAAD/AAAAuf///wH///8B////Af///wH///8BAAAAYQAAAP8AAAD/AAAA5////wH///8B////\nAf///wH///8B////AQAAACkAAACL////Af///wH///8B////Af///wH///8BAAAA6QAAAP8AAAD/\nAAAAZ////wEAAAD3AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAHv/\n//8B////Af///wH///8BAAAArwAAAP8AAAD/AAAAuf///wH///8B////Af///wH///8BAAAAdwAA\nAP8AAAD/AAAAw////wH///8B////Af///wH///8B////Af///wEAAAAJ////Af///wH///8B////\nAf///wH///8BAAAA3QAAAP8AAAD/AAAAb////wEAAADbAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/\nAAAA/wAAAP8AAAD/AAAA/wAAAF////8B////Af///wH///8BAAAArwAAAP8AAAD/AAAAuf///wH/\n//8B////Af///wH///8BAAAAiwAAAP8AAAD/AAAAr////wH///8B////Af///wH///8B////Af//\n/wH///8B////Af///wH///8B////Af///wH///8BAAAA8wAAAP8AAAD/AAAAd////wEAAAC9AAAA\n/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAEH///8B////Af///wH///8B\nAAAArwAAAP8AAAD/AAAAuf///wH///8B////Af///wH///8BAAAAmwAAAP8AAAD/AAAAm////wH/\n//8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af///wEAAAAVAAAA/wAA\nAP8AAAD/AAAAc////wEAAAChAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA\n/wAAACX///8B////Af///wH///8BAAAArwAAAP8AAAD/AAAAuf///wH///8B////Af///wH///8B\nAAAAoQAAAP8AAAD/AAAA/QAAAPkAAAD5AAAA+QAAAPkAAAD5AAAA+QAAAPf///8B////Af///wH/\n//8B////Af///wEAAAA/AAAA/wAAAP8AAAD/AAAAZf///wEAAACDAAAA/wAAAP8AAAD/AAAA/wAA\nAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAAf///8B////Af///wH///8BAAAArwAAAP8AAAD/AAAA\nuf///wH///8B////Af///wH///8BAAAApwAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/\nAAAA/wAAAP////8B////Af///wH///8B////AQAAAA0AAADTAAAA/wAAAP8AAAD/AAAAV////wEA\nAABnAAAA/wAAAP8AAAD7AAAAkQAAAJEAAACRAAAAwQAAAP8AAAD/AAAA6f///wH///8B////Af//\n/wH///8BAAAArwAAAP8AAAD/AAAAuf///wH///8B////Af///wH///8BAAAArQAAAP8AAAD/AAAA\n/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP////8B////Af///wH///8BAAAABwAAALEAAAD/\nAAAA/wAAAP8AAAD/AAAAS////wEAAABJAAAA/wAAAP8AAAD/AAAACf///wH///8BAAAAgwAAAP8A\nAAD/AAAAzf///wH///8B////Af///wH///8BAAAArwAAAP8AAAD/AAAAuf///wH///8B////Af//\n/wH///8BAAAAqwAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP3///8B////\nAf///wEAAAAlAAAA0wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAO////wEAAAArAAAA/wAAAP8AAAD/\nAAAAI////wH///8BAAAAnwAAAP8AAAD/AAAAr////wH///8B////Af///wH///8BAAAArwAAAP8A\nAAD/AAAAuf///wH///8B////Af///wH///8BAAAApQAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAA\nAP8AAAD/AAAA/wAAAPX///8B////AQAAAB8AAADjAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA\nFf///wEAAAAPAAAA/wAAAP8AAAD/AAAAP////wH///8BAAAAuQAAAP8AAAD/AAAAk////wH///8B\n////Af///wH///8BAAAArwAAAP8AAAD/AAAAuf///wH///8B////Af///wH///8BAAAAnwAAAP8A\nAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAO////8BAAAABwAAANsAAAD/AAAA/wAA\nAP8AAAD/AAAA/wAAAP8AAADj////Af///wH///8BAAAA8QAAAP8AAAD/AAAAW////wH///8BAAAA\n1QAAAP8AAAD/AAAAdf///wH///8B////Af///wH///8BAAAArwAAAP8AAAD/AAAAuf///wH///8B\n////Af///wH///8BAAAAlwAAAP8AAAD/AAAAwwAAAGEAAABhAAAAYQAAAI8AAAD/AAAA/wAAAOf/\n//8BAAAAYwAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAACx////Af///wH///8BAAAA1QAA\nAP8AAAD/AAAAd////wH///8BAAAA8QAAAP8AAAD/AAAAV////wH///8B////Af///wH///8BAAAA\nrwAAAP8AAAD/AAAAuf///wH///8B////Af///wH///8BAAAAhQAAAP8AAAD/AAAAsf///wH///8B\n////AQAAAFUAAAD/AAAA/wAAANf///8BAAAA1wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8A\nAABv////Af///wH///8BAAAAtwAAAP8AAAD/AAAAkf///wEAAAALAAAA/wAAAP8AAAD/AAAAO///\n/wH///8B////Af///wH///8BAAAArwAAAP8AAAD/AAAAuf///wH///8B////Af///wH///8BAAAA\nbwAAAP8AAAD/AAAAyf///wH///8B////AQAAAGEAAAD/AAAA/wAAAMMAAAAtAAAA/wAAAP8AAAD/\nAAAA/wAAAP8AAAD/AAAA/wAAAOsAAAAL////Af///wH///8BAAAAmQAAAP8AAAD/AAAArf///wEA\nAAAnAAAA/wAAAP8AAAD/AAAAHf///wH///8B////Af///wH///8BAAAArwAAAP8AAAD/AAAAuf//\n/wH///8B////Af///wH///8BAAAAWQAAAP8AAAD/AAAA6////wH///8B////AQAAAH8AAAD/AAAA\n/wAAAK8AAABfAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAHf///8B////Af///wH///8B\nAAAAfQAAAP8AAAD/AAAAyf///wEAAABBAAAA/wAAAP8AAAD9AAAAA////wH///8B////Af///wH/\n//8BAAAArwAAAP8AAAD/AAAAuf///wH///8B////Af///wH///8BAAAAOQAAAP8AAAD/AAAA/wAA\nACX///8B////AQAAALEAAAD/AAAA/wAAAI8AAACRAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA\nnwAAAAP///8B////Af///wH///8BAAAAXwAAAP8AAAD/AAAA5f///wEAAABdAAAA/wAAAP8AAADj\n////Af///wH///8B////Af///wH///8BAAAArwAAAP8AAAD/AAAAuf///wH///8B////Af///wH/\n//8BAAAAEQAAAP8AAAD/AAAA/wAAAHf///8BAAAACwAAAO8AAAD/AAAA/wAAAGcAAAC/AAAA/wAA\nAP8AAAD/AAAA/wAAAP8AAACN////Af///wH///8B////Af///wH///8BAAAAQwAAAP8AAAD/AAAA\n/QAAAAMAAAB3AAAA/wAAAP8AAADF////Af///wH///8B////Af///wH///8BAAAArwAAAP8AAAD/\nAAAAuf///wH///8B////Af///wH///8B////AQAAAOsAAAD/AAAA/wAAAOsAAAA/AAAAkQAAAP8A\nAAD/AAAA/wAAAD8AAADRAAAA/wAAAP8AAAD/AAAA7QAAAEn///8B////Af///wH///8B////Af//\n/wH///8BAAAAJQAAAP8AAAD/AAAA/wAAABsAAACTAAAA/wAAAP8AAACp////Af///wH///8B////\nAf///wH///8BAAAArwAAAP8AAAD/AAAAuf///wH///8B////Af///wH///8B////AQAAAK8AAAD/\nAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA+wAAAAsAAADdAAAA/wAAAP8AAAD3AAAAO////wH/\n//8B////Af///wH///8B////Af///wH///8BAAAACQAAAP8AAAD/AAAA/wAAADcAAACvAAAA/wAA\nAP8AAACL////Af///wH///8B////Af///wH///8BAAAArwAAAP8AAAD/AAAAuf///wH///8B////\nAf///wH///8B////AQAAAGsAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAAxf///wEAAADp\nAAAA/wAAAP8AAACB////Af///wH///8B////Af///wH///8B////Af///wH///8B////AQAAAOsA\nAAD/AAAA/wAAAFMAAADJAAAA/wAAAP8AAABv////Af///wH///8B////Af///wH///8BAAAArwAA\nAP8AAAD/AAAAuf///wH///8B////Af///wH///8B////AQAAAB8AAAD9AAAA/wAAAP8AAAD/AAAA\n/wAAAP8AAAD/AAAAef///wEAAAD3AAAA/wAAAP8AAABT////Af///wH///8B////Af///wH///8B\n////Af///wH///8B////AQAAAM0AAAD/AAAA/wAAAG8AAADlAAAA/wAAAP8AAABR////Af///wH/\n//8B////Af///wH///8BAAAArwAAAP8AAAD/AAAAuf///wH///8B////Af///wH///8B////Af//\n/wEAAACzAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD7AAAAGf///wEAAAD9AAAA/wAAAP8AAAAt////\nAf///wH///8B////Af///wH///8B////Af///wH///8B////AQAAAK8AAAD/AAAA/wAAAI0AAAD9\nAAAA/wAAAP8AAAAz////Af///wH///8B////Af///wH///8BAAAArwAAAP8AAAD/AAAAuf///wH/\n//8B////Af///wH///8B////Af///wEAAAA5AAAA/QAAAP8AAAD/AAAA/wAAAP8AAACf////Af//\n/wEAAAD3AAAA/wAAAP8AAAAj////Af///wH///8B////Af///wH///8B////Af///wH///8B////\nAQAAAJMAAAD/AAAA/wAAAMEAAAD/AAAA/wAAAP8AAAAX////Af///wH///8B////Af///wH///8B\nAAAArwAAAP8AAAD/AAAAuf///wH///8B////Af///wH///8B////Af///wH///8BAAAAiQAAAP8A\nAAD/AAAA/wAAAOUAAAAV////Af///wEAAADzAAAA/wAAAP8AAAA3////Af///wH///8B////Af//\n/wEAAAA7////Af///wH///8B////AQAAAHUAAAD/AAAA/wAAAPUAAAD/AAAA/wAAAPn///8B////\nAf///wH///8B////Af///wH///8BAAAArwAAAP8AAAD/AAAAuf///wH///8B////Af///wH///8B\n////Af///wH///8B////AQAAAF0AAACzAAAAnQAAAB3///8B////Af///wEAAADnAAAA/wAAAP8A\nAABr////Af///wH///8B////AQAAAHsAAACZ////Af///wH///8B////AQAAAFkAAAD/AAAA/wAA\nAP8AAAD/AAAA/wAAAN3///8B////Af///wH///8B////Af///wH///8BAAAArwAAAP8AAAD/AAAA\nuf///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B\n////Af///wEAAADPAAAA/wAAAP8AAADPAAAACf///wEAAAAFAAAAbQAAAP8AAACZ////Af///wH/\n//8B////AQAAADsAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAL////8B////AQAAAA0AAAChAAAAoQAA\nAKEAAAChAAAA4QAAAP8AAAD/AAAA5QAAAKEAAAChAAAAoQAAAKEAAAAV////Af///wH///8B////\nAf///wH///8B////Af///wH///8B////Af///wEAAAC3AAAA/wAAAP8AAAD/AAAA0wAAAJ0AAADp\nAAAA/wAAAP8AAACZ////Af///wH///8B////AQAAAB0AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAKH/\n//8B////AQAAABcAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAA\nAP8AAAAh////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af///wEAAACTAAAA\n/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAACZ////Af///wH///8B////AQAAAAUAAAD9\nAAAA/wAAAP8AAAD/AAAA/wAAAIX///8B////AQAAABcAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8A\nAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAAh////Af///wH///8B////AQAAACkAAADpAAAAhf//\n/wH///8B////Af///wEAAABlAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAACZ////\nAf///wH///8B////Af///wEAAADjAAAA/wAAAP8AAAD/AAAA/wAAAGf///8B////AQAAABcAAAD/\nAAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAAh////Af///wH/\n//8B////AQAAAAMAAADxAAAA3f///wH///8B////Af///wEAAAAtAAAA/wAAAP8AAAD/AAAA/wAA\nAP8AAAD/AAAA/wAAAP8AAACZ////Af///wH///8B////Af///wEAAADHAAAA/wAAAP8AAAD/AAAA\n/wAAAEn///8B////AQAAABcAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/\nAAAA/wAAAP8AAAAh////Af///wH///8B////Af///wEAAAC5AAAA/wAAACv///8B////Af///wH/\n//8BAAAA4QAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAACZ////Af///wH///8B////Af//\n/wEAAACpAAAA/wAAAP8AAAD/AAAA/wAAAC3///8B////AQAAABcAAAD/AAAA/wAAAP8AAAD/AAAA\n/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAAh////Af///wH///8B////Af///wEAAAB/\nAAAA/wAAAHn///8B////Af///wH///8BAAAAgQAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8A\nAACZ////Af///wH///8B////Af///wEAAACLAAAA/wAAAP8AAAD/AAAA/wAAAA////8B////AQAA\nABcAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAAh////\nAf///wH///8B////Af///wEAAABDAAAA/wAAAMf///8B////Af///wH///8BAAAAEwAAAO0AAAD/\nAAAA/wAAAP8AAAD/AAAA/wAAAPcAAABZ////Af///wH///8B////Af///wEAAABvAAAA/wAAAP8A\nAAD/AAAA8////wH///8B////AQAAABcAAAD/AAAA/wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAA\nAP8AAAD/AAAA/wAAAP8AAAAh////Af///wH///8B////Af///wEAAAALAAAA/QAAAP0AAAAX////\nAf///wH///8B////AQAAAEUAAAD1AAAA/wAAAP8AAAD/AAAAxQAAACf///8B////Af///wH///8B\n////Af///wEAAAA/AAAAwQAAAMEAAADBAAAAo////wH///8B////AQAAABEAAADBAAAAwQAAAMEA\nAADBAAAAwQAAAMEAAADBAAAAwQAAAMEAAADBAAAAwQAAAMEAAAAZ////Af///wH///8B////Af//\n/wH///8BAAAAzQAAAP8AAABh////Af///wH///8B////Af///wEAAAAhAAAAcQAAAGcAAAAn////\nAf///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B\n////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH/\n//8B////Af///wH///8B////Af///wH///8BAAAAkQAAAP8AAACv////Af///wH///8B////Af//\n/wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////\nAf///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B\n////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8BAAAAVwAAAP8A\nAAD1AAAACf///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af//\n/wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////\nAf///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B\n////Af///wH///8BAAAAGwAAAP8AAAD/AAAAS////wH///8B////Af///wH///8B////Af///wH/\n//8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af//\n/wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////Af///wH///8B////\nAf///wH///8B////Af///wH///8B////Af///wH///8B////AQAAAM0AAADpAAAAh////wEAAAAA\nAAD//wAAAAAAAP//AAAAAAAA//8AAAAAAAD//wAAAAAAAP//AAAAAAAA//8AAAAAAAD//wAAAAAA\nAP//AAAAAAAA//8AAAAAAAD//wAAAAAAAP//AAAAAAAA//8AAAAAAAD//wAAAAAAAP//AAAAAAAA\n//8AAAAAAAD//wAAAAAAAP//AAAAAAAA//8AAAAAAAD//wAAAAAAAP//AAAAAAAA//8AAAAAAAD/\n/wAAAAAAAP//AAAAAAAA//8AAAAAAAD//wAAAAAAAP//AAAAAAAA//8AAAAAAAD//wAAAAAAAP//\nAAAAAAAA//8AAAAAAAD//wAAAAAAAP//AAAAAAAA//8AAAAAAAD//wAAAAAAAP//AAAAAAAA//8A\nAAAAAAD//wAAAAAAAP//AAAAAAAA//8AAAAAAAD//wAAAAAAAP//AAAAAAAA//8AAAAAAAD//wAA\nAAAAAP//AAAAAAAA//8AAAAAAAD//wAAAAAAAP//AAAAAAAA//8=\n"""
if __name__ == "__main__":
try:
main_gui()
except Exception as x:
sys.exit("PASTA GUI is exiting because of an error:\n%s " % str(x))
| 64.464385 | 20,662 | 0.632939 | 8,305 | 69,686 | 5.165563 | 0.15593 | 0.022284 | 0.023916 | 0.024056 | 0.381072 | 0.307366 | 0.238392 | 0.192657 | 0.164079 | 0.138578 | 0 | 0.026561 | 0.205809 | 69,686 | 1,080 | 20,663 | 64.524074 | 0.748591 | 0.077433 | 0 | 0.302381 | 0 | 0.004762 | 0.381803 | 0.326098 | 0 | 0 | 0 | 0 | 0 | 1 | 0.057143 | false | 0.002381 | 0.044048 | 0.005952 | 0.147619 | 0.00119 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
03381189e2ef61ec33adddf77a23ff4319aaca5b | 2,341 | py | Python | install.py | Trick-17/arch-installer | 7759da8198ec11e695964253217a879d7455ab5f | [
"Unlicense"
] | 9 | 2017-02-18T17:52:31.000Z | 2021-03-20T16:42:19.000Z | install.py | Trick-17/arch-installer | 7759da8198ec11e695964253217a879d7455ab5f | [
"Unlicense"
] | 62 | 2017-01-01T20:39:52.000Z | 2018-02-16T09:39:16.000Z | install.py | GPMueller/arch-installer | 7759da8198ec11e695964253217a879d7455ab5f | [
"Unlicense"
] | 4 | 2017-06-12T15:12:55.000Z | 2020-08-06T04:05:36.000Z | """
Installs Arch-Linux when called from a live-iso
"""
import argparse
from pyscripts import s000_detect_hardware as hardware
from pyscripts import s00_user_input as user_input
from pyscripts import s01_partitions as partitions
from pyscripts import s02_basic_arch as basic_arch
from pyscripts import s03_package_manager as package_manager
from pyscripts import s04_packages as packages
from pyscripts import s05_languages as languages
from pyscripts import s06_bootloader as bootloader
from pyscripts import s07_fstab as fstab
from pyscripts import s08_timezone as timezone
from pyscripts import s09_hostname as hostname
from pyscripts import s10_desktop as desktop
from pyscripts import s11_autostart as autostart
from pyscripts import s12_shell as shell
from pyscripts import s13_pacman_reflector_hook as pacman_reflector_hook
from pyscripts import s14_users as users
import pyscripts.utilities as install_utilities
print(">>>> ARCH INSTALLER STARTED <<<<")
# Allow for additional info being printed during setup
parser = argparse.ArgumentParser()
parser.add_argument('--debug', action='store_true',
help='Print additional info during setup.')
args = parser.parse_args()
install_utilities.DEBUG = args.debug
if args.debug:
print('--- Debug info-printing enabled ---')
# Try to auto-detect the hardware currently installed
print(' >> Autodecting hardware...')
detected_hardware = {}
detected_hardware['cpu'] = hardware.get_cpu_vendor_id()
detected_hardware['gpu'] = hardware.get_gpu_vendor()
print(' >> Detected:')
print(' >> Graphics card vendor: ', detected_hardware['gpu'])
print(' >> Processor vendor: ', detected_hardware['cpu'])
print('')
ui = user_input.get_user_input(detected_hardware)
# Go through all the installation functions
partitions.create_and_mount()
basic_arch.install_basic_arch()
with install_utilities.fake_install_user() as user:
package_manager.install_package_manager(user)
packages.install_packages(ui, user)
languages.setup_languages(ui)
bootloader.configure_bootloader()
fstab.generate_fstab()
timezone.setup_timezone(ui)
hostname.setup_hostname(ui)
desktop.configure_desktop(ui)
autostart.autostart_add_services(ui)
shell.configure_shell()
users.configure_users(ui)
pacman_reflector_hook.configure_pacman_reflector_hook()
print(">>>> ARCH INSTALLER FINISHED <<<<") | 36.015385 | 72 | 0.805639 | 316 | 2,341 | 5.737342 | 0.341772 | 0.114727 | 0.167678 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.015888 | 0.112772 | 2,341 | 65 | 73 | 36.015385 | 0.857005 | 0.083298 | 0 | 0 | 0 | 0 | 0.119081 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.352941 | 0 | 0.352941 | 0.156863 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
033b815efd46e0d1a664d3f11c268118691a5667 | 315 | py | Python | wol/views/pages.py | JleMyP/wol | 02693fe79df1628e9fa773bd677bf79114fd4868 | [
"WTFPL"
] | null | null | null | wol/views/pages.py | JleMyP/wol | 02693fe79df1628e9fa773bd677bf79114fd4868 | [
"WTFPL"
] | 1 | 2021-05-09T02:52:10.000Z | 2021-05-09T03:35:07.000Z | wol/views/pages.py | JleMyP/wol | 02693fe79df1628e9fa773bd677bf79114fd4868 | [
"WTFPL"
] | null | null | null | from flask import Blueprint, render_template
from ..logic.crud import get_all_targets
pages = Blueprint('web', __name__, template_folder='../templates')
@pages.route('/targets/', methods=['GET'])
def get_web_targets():
targets = get_all_targets()
return render_template('targets.html', targets=targets)
| 26.25 | 66 | 0.746032 | 40 | 315 | 5.55 | 0.525 | 0.126126 | 0.117117 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.114286 | 315 | 11 | 67 | 28.636364 | 0.795699 | 0 | 0 | 0 | 0 | 0 | 0.12381 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.285714 | 0 | 0.571429 | 0.285714 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
034052e3e3d54baafeafaa4d2da6b4e8c641a704 | 3,867 | py | Python | osvc_python/osvc_python_connect.py | rajangdavis/osc_python | c67000d4df246764b0125a06734c926a23952693 | [
"MIT"
] | 7 | 2018-06-27T06:42:45.000Z | 2021-04-13T00:43:17.000Z | osvc_python/osvc_python_connect.py | rajangdavis/osc_python | c67000d4df246764b0125a06734c926a23952693 | [
"MIT"
] | 1 | 2018-05-27T09:38:37.000Z | 2018-05-27T09:38:37.000Z | osvc_python/osvc_python_connect.py | rajangdavis/osc_python | c67000d4df246764b0125a06734c926a23952693 | [
"MIT"
] | 1 | 2018-05-27T09:37:22.000Z | 2018-05-27T09:37:22.000Z | import requests
import json
from .osvc_python_file_handling import OSvCPythonFileHandler
from .osvc_python_config import OSvCPythonConfig
from .osvc_python_validations import OSvCPythonValidations
from .osvc_python_examples import CLIENT_NOT_DEFINED,CLIENT_NO_INTERFACE_SET_EXAMPLE,CLIENT_NO_USERNAME_SET_EXAMPLE,CLIENT_NO_PASSWORD_SET_EXAMPLE
class OSvCPythonConnect:
def __init__(self):
pass
def get(self,**kwargs):
if "url" not in kwargs:
kwargs["url"] = ""
kwargs['verb'] = "get"
return self.__generic_http_request(kwargs)
def post(self,**kwargs):
kwargs['verb'] = "post"
return self.__generic_http_request(kwargs)
def patch(self,**kwargs):
kwargs['verb'] = "patch"
return self.__generic_http_request(kwargs)
def delete(self,**kwargs):
kwargs['verb'] = "delete"
return self.__generic_http_request(kwargs)
def options(self,**kwargs):
kwargs['verb'] = "options"
return self.__generic_http_request(kwargs)
def build_request_data(self, kwargs):
client = self.__check_client(kwargs)
request_data = {
"verify" : not client.no_ssl_verify,
"url" : OSvCPythonConfig().url_format(kwargs),
"headers": OSvCPythonConfig().headers_check(kwargs)
}
if client.username!="":
request_data["auth"] = (client.username,client.password)
return request_data
def __generic_http_request(self,kwargs):
final_request_data = self.build_request_data(kwargs)
download_local = None
if kwargs['verb'] == "get":
download_local = self.__download_check(kwargs)
final_request_data["stream"] = download_local["stream"]
elif kwargs['verb'] in ["post","patch"]:
kwargs['original_verb'] = kwargs['verb']
kwargs['verb'] = "post"
final_request_data["data"] = json.dumps(OSvCPythonFileHandler().upload_check(kwargs))
kwargs['download'] = download_local
try:
return self.__print_response(requests.request(kwargs['verb'],**final_request_data), kwargs)
except requests.exceptions.ConnectionError as e:
print("\n\033[31mError: Cannot connect to %s \033[0m" % final_request_data["url"])
print("\n\nYou should check the 'interface' value set in the OSvCPythonClient\nor check your internet connection\n\n")
def __print_response(self,response,kwargs):
if kwargs['verb'] == "get" and "download" in kwargs and kwargs["download"]["stream"] == True:
return OSvCPythonFileHandler().download_file(response,kwargs["download"])
if kwargs.get("debug") == True:
return response
if kwargs['verb'] == "options":
return response.headers
if kwargs['verb'] == "delete" or ('original_verb' in kwargs and kwargs['original_verb'] == "patch"):
return response.content
else:
return response.json()
def __download_check(self,kwargs):
if kwargs.get("url").find("?download") > -1:
resource_url = kwargs.get("url").replace("?download","")
file_data = self.get(client=kwargs.get("client"),url=resource_url)
file_name = OSvCPythonFileHandler().set_file_name(file_data)
return {"file_name" : file_name, "stream" : True}
else:
return {"file_name" : None, "stream" : False }
def __check_client(self,kwargs):
if 'client' in kwargs:
return self.__check_client_props(kwargs.get('client'))
else:
return OSvCPythonValidations().custom_error("Client must be defined in keyword arguments",CLIENT_NOT_DEFINED)
def __check_client_props(self, client):
if client.interface == None:
return OSvCPythonValidations().custom_error("Client interface cannot be undefined.",CLIENT_NO_INTERFACE_SET_EXAMPLE)
if client.username == None and client.password != None:
return OSvCPythonValidations().custom_error("Password is set but username is not.",CLIENT_NO_USERNAME_SET_EXAMPLE)
if client.password == None and client.username != None:
return OSvCPythonValidations().custom_error("Username is set but password is not.",CLIENT_NO_PASSWORD_SET_EXAMPLE)
return client | 37.182692 | 146 | 0.74347 | 499 | 3,867 | 5.490982 | 0.214429 | 0.047445 | 0.039416 | 0.038321 | 0.189416 | 0.067518 | 0.067518 | 0 | 0 | 0 | 0 | 0.002967 | 0.128523 | 3,867 | 104 | 147 | 37.182692 | 0.810089 | 0 | 0 | 0.119048 | 0 | 0.011905 | 0.160031 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0.083333 | 0.071429 | 0 | 0.464286 | 0.047619 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0346123d4e8b36b1a22364f3527038880f8016ac | 231 | py | Python | src/apis/image/image/restoration.py | theunifai/unifai-apis-core | 1f2a9051c1e3df1bd19a96f22e4a07767ef3973a | [
"MIT"
] | 2 | 2021-11-09T07:18:06.000Z | 2022-01-04T19:37:17.000Z | src/apis/image/image/restoration.py | theunifai/unifai-apis-core | 1f2a9051c1e3df1bd19a96f22e4a07767ef3973a | [
"MIT"
] | 4 | 2021-11-04T08:28:59.000Z | 2021-11-07T05:59:59.000Z | src/apis/image/image/restoration.py | theunifai/unifai-apis-core | 1f2a9051c1e3df1bd19a96f22e4a07767ef3973a | [
"MIT"
] | 1 | 2022-01-07T09:12:22.000Z | 2022-01-07T09:12:22.000Z | from fastapi import APIRouter
from gladia_api_utils.submodules import TaskRouter
router = APIRouter()
TaskRouter(
router=router,
input="image",
output="image",
default_model="bringing-old-photos-back-to-life",
)
| 17.769231 | 53 | 0.735931 | 28 | 231 | 5.964286 | 0.75 | 0.191617 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.155844 | 231 | 12 | 54 | 19.25 | 0.85641 | 0 | 0 | 0 | 0 | 0 | 0.181818 | 0.138528 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.222222 | 0 | 0.222222 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0347d78cdbcf07e87eb4636ff1c0bcb164181fcc | 3,393 | py | Python | views/views_users.py | SideShowBoBGOT/EPAM-project | f96ce88c81a8c075daafc6f37b4df47186395320 | [
"Unlicense"
] | 1 | 2021-12-04T21:46:12.000Z | 2021-12-04T21:46:12.000Z | views/views_users.py | SideShowBoBGOT/EPAM-project | f96ce88c81a8c075daafc6f37b4df47186395320 | [
"Unlicense"
] | null | null | null | views/views_users.py | SideShowBoBGOT/EPAM-project | f96ce88c81a8c075daafc6f37b4df47186395320 | [
"Unlicense"
] | null | null | null | """
Module contains all functions working on users page.
Functions:
users_page()
edit_user(id)
delete_user(id)
check_session()
"""
import os
import sys
import urllib.parse
from flask_login import login_user, login_required
from flask import render_template, request, redirect, Blueprint, session
sys.path.append(os.path.abspath(os.path.join('..')))
from models.users import User
ADMIN = User.query.get(1).login
BASE_URL = 'http://127.0.0.1:5000/'
api_users = Blueprint('api_users', __name__)
@api_users.route('/users', methods=['POST', 'GET'])
@login_required
def users_page():
"""
Function working on departments page:
1) adding new users if method "POST" received and session is used by admin
2) showing the table of the users
:return: the template of the departments page
"""
if session.get('user') and session.get('user')[0] == ADMIN:
users = User.query.all()
if request.method == 'POST':
login = request.form.get('login')
password = request.form.get('password')
data = f'?login={session["user"][0]}&password={session["user"][1]}' \
f'&new_login={urllib.parse.quote(login)}&new_password={urllib.parse.quote(password)}&page=True'
return redirect('/api/users/add' + data)
return render_template('users_for_admin.html', users=users)
user = User.query.filter_by(login=session.get('user')[0]).first()
return render_template('users.html', user=user)
@api_users.route('/users/<int:id>/edit', methods=['GET', 'POST'])
@login_required
def edit_user(id):
"""
Function editing information about specific users
:param id: id of the specific user an admin wants to change information about
:return: return template of the users page or redirects to users page
"""
if session.get('user') and session.get('user')[0] == ADMIN:
users = User.query.all()
if User.query.get(id):
if request.method == 'POST':
login = request.form.get('new_login')
password = request.form.get('new_password')
data = f'?login={session["user"][0]}&password={session["user"][1]}' \
f'&id={id}&new_login={urllib.parse.quote(login)}&new_password={urllib.parse.quote(password)}&page=True'
return redirect('/api/users/edit' + data)
return render_template('users_for_admin.html', id=id, users=users)
return redirect('/users')
@api_users.route('/users/<int:id>/del')
@login_required
def delete_user(id):
"""
Function deleting specific user by its id
:param id: id of the specific user an admin wants to delete
:return: redirects user to the users page
"""
if session.get('user') and session.get('user')[0] == ADMIN:
data = f'?login={session["user"][0]}&password={session["user"][1]}' \
f'&id={id}&page=True'
return redirect('/api/users/del' + data)
@api_users.before_request
def check_session():
"""
Function logging in user to the page if session has been
already created. Else redirects to the main page.
:return: None or redirect
"""
if session.get('user'):
users = User.query.filter_by(login=session.get('user')[0]).all()
login_user(users[0])
session.permanent = False
else:
return redirect('/')
| 35.715789 | 126 | 0.639257 | 463 | 3,393 | 4.591793 | 0.224622 | 0.033866 | 0.059266 | 0.035278 | 0.450611 | 0.425212 | 0.389464 | 0.389464 | 0.316087 | 0.281279 | 0 | 0.009377 | 0.214265 | 3,393 | 94 | 127 | 36.095745 | 0.788072 | 0.2402 | 0 | 0.245283 | 0 | 0.037736 | 0.263137 | 0.146726 | 0 | 0 | 0 | 0 | 0 | 1 | 0.075472 | false | 0.132075 | 0.113208 | 0 | 0.339623 | 0.037736 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0348b3151492f1bf80185687c781237ee59205a5 | 1,722 | py | Python | polls/migrations/0001_initial.py | zzZ5/compost | 00cc2cbc74df6626e07072c8c1e638ffd9bac8f1 | [
"MIT"
] | 1 | 2020-11-28T00:06:06.000Z | 2020-11-28T00:06:06.000Z | polls/migrations/0001_initial.py | zzZ5/compost | 00cc2cbc74df6626e07072c8c1e638ffd9bac8f1 | [
"MIT"
] | null | null | null | polls/migrations/0001_initial.py | zzZ5/compost | 00cc2cbc74df6626e07072c8c1e638ffd9bac8f1 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.8 on 2020-11-23 11:30
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('account', '0003_confirmstring'),
]
operations = [
migrations.CreateModel(
name='Equipment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128, unique=True)),
('descript', models.CharField(max_length=256)),
('created_time', models.DateTimeField(auto_now_add=True)),
('user', models.ManyToManyField(to='account.User')),
],
options={
'verbose_name': 'Equipment',
'verbose_name_plural': 'Equipments',
'ordering': ['-created_time'],
},
),
migrations.CreateModel(
name='Data',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('key', models.CharField(max_length=256)),
('value', models.FloatField()),
('descript', models.CharField(max_length=256)),
('created_time', models.DateTimeField(auto_now_add=True)),
('equipment', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='polls.Equipment')),
],
options={
'verbose_name': 'Data',
'verbose_name_plural': 'Datas',
'ordering': ['-created_time'],
},
),
]
| 35.875 | 116 | 0.542973 | 158 | 1,722 | 5.753165 | 0.411392 | 0.072607 | 0.079208 | 0.105611 | 0.377338 | 0.347635 | 0.347635 | 0.347635 | 0.347635 | 0.347635 | 0 | 0.026316 | 0.315912 | 1,722 | 47 | 117 | 36.638298 | 0.745331 | 0.026132 | 0 | 0.45 | 1 | 0 | 0.161194 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.05 | 0 | 0.15 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0348d60d31e7478967314ca6fb318726da997b7d | 2,644 | py | Python | models/python/hypothalamus/dynamical/miro_experiments/recurrent_3_mot.py | ABRG-Models/MammalBot | 0b153232b94197c7a65156c1c3451ab2b9f725ae | [
"MIT"
] | null | null | null | models/python/hypothalamus/dynamical/miro_experiments/recurrent_3_mot.py | ABRG-Models/MammalBot | 0b153232b94197c7a65156c1c3451ab2b9f725ae | [
"MIT"
] | null | null | null | models/python/hypothalamus/dynamical/miro_experiments/recurrent_3_mot.py | ABRG-Models/MammalBot | 0b153232b94197c7a65156c1c3451ab2b9f725ae | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
N = 100
T = 20000
# nonlinearilties
qg= 0.950997659088
xg= 26.594968159566278
bg= 0.2818239551304378
A= 3.5505937843746906
qf= 0.8274882807912485
xf= 26.594968159566278
bf= 0.2818239551304378
# Presynaptic nonlinearity
def f(x):
return 0.5*(2*qf-1.+np.tanh(bf*(x-xf)))
# Postsynaptic nonlinearity
def g(x):
return 0.5*(2*qg-1.+np.tanh(bg*(x-xg)))
beta = 0.82
rm = 76.0
h0 = 2.46
tau = 10.0
# f = lambda eta: qf if xf <= eta else -(1 - qf)
# g = lambda eta: qg if xg <= eta else -(1 - qg)
phi = lambda x: rm/(1.0 + np.exp(-beta*(x - h0)))
p = 10
xi = np.random.normal(0.0, 1.0, (N,p))
dt = 0.01
ka = np.zeros(T)
kb = np.zeros(T)
kc = np.zeros(T)
ka[0] = 0.5
kb[0] = 0.4
kc[0] = 0.1
Ia = xi[:,0]
Ib = xi[:,1]
Ic = xi[:,2]
rho = np.zeros(N)
# Training
c = 0.001
J = np.zeros((N,N))
for i in range(N):
for j in range(N):
cij = 1 if np.random.random() < c else 0
for k in range(p):
J[i,j] += (cij*A/(c*N))*f(phi(xi[i,k]))*g(phi(xi[j,k]))
print "Presenting stimuli"
# Presentation
# I = phi(Ia)
# I = Ia
ta = np.zeros(T)
tb = np.zeros(T)
tc = np.zeros(T)
alpha = 0.2
rrate = lambda x: (x > 0.0)
# rrate = 1.0
simi = lambda a, r: np.mean(np.multiply(g(phi(a)), r))
for i in range(T-1):
# I = np.heaviside(1.-ka[i]-0.5,0)*Ia + np.heaviside(1.-kb[i]-0.5,0)*Ib + np.heaviside(1.-kc[i]-0.5,0)*Ic
I = ka[i]*Ia + kb[i]*Ib + kc[i]*Ic
# rho[:,i+1] = rho[:,i] + dt*(-rho[:,i] + phi(I + J.dot(rho[:,i])))/tau
rho = rho + dt*(-rho + phi(I + J.dot(rho)))/tau
ta[i+1] = simi(Ia, rho)
tb[i+1] = simi(Ib, rho)
tc[i+1] = simi(Ic, rho)
max_ac = np.argmax(np.array([ta[i+1], tb[i+1], tc[i+1]]))
ka[i+1] = ka[i] + dt*(-alpha*ka[i] + rrate(ta[i+1]))
kb[i+1] = kb[i] + dt*(-alpha*kb[i] + rrate(tb[i+1]))
kc[i+1]= kc[i] + dt*(-alpha*kc[i] + rrate(tc[i+1]))
if ka[i+1] > 1.0:
ka[i+1] = 1.0
if kb[i+1] > 1.0:
kb[i+1] = 1.0
if kc[i+1] > 1.0:
kc[i+1] = 1.0
# simi = lambda a, b: a.dot(b)/(np.linalg.norm(a)*np.linalg.norm(b))
# pa = xi[:,0]#phi(xi[:,0])
# pb = xi[:,1]#phi(xi[:,1])
# pc = xi[:,2]#phi(xi[:,2])
# print( "similar a: ", simi(pa, rho[:,-1]) )
# print( "similar b: ", simi(pb, rho[:,-1]) )
# print( "similar c: ", simi(pc, rho[:,-1]) )
# fig = plt.figure()
# plt.imshow(J)
fig, ax = plt.subplots(2,1)
xs = np.arange(N)
ax[0].plot(ta, label="Tendency a")
ax[0].plot(tb, label="Tendency b")
ax[0].plot(tc, label="Tendency b")
ax[1].plot(ka, label="Drive a")
ax[1].plot(kb, label="Drive b" )
ax[1].plot(kc, label="Drive c")
ax[0].legend()
ax[1].legend()
plt.show()
| 22.793103 | 109 | 0.541982 | 551 | 2,644 | 2.598911 | 0.212341 | 0.026536 | 0.03352 | 0.01676 | 0.074022 | 0 | 0 | 0 | 0 | 0 | 0 | 0.116169 | 0.202345 | 2,644 | 115 | 110 | 22.991304 | 0.562826 | 0.262481 | 0 | 0 | 0 | 0 | 0.035863 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.027027 | null | null | 0.013514 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
034b34fdf1049f91f02d84b8a1eb633f22e1bc51 | 2,044 | py | Python | fabfile.py | clemsos/fabric-node-deploy | 496ea7373db4303d41c9da9b0d5bee9c76fdfc94 | [
"CC0-1.0"
] | null | null | null | fabfile.py | clemsos/fabric-node-deploy | 496ea7373db4303d41c9da9b0d5bee9c76fdfc94 | [
"CC0-1.0"
] | null | null | null | fabfile.py | clemsos/fabric-node-deploy | 496ea7373db4303d41c9da9b0d5bee9c76fdfc94 | [
"CC0-1.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from fabric.api import *
from fabric.contrib import files
import os
from settings import *
# create
RUN_DIR=os.path.join(HOME_DIR, "run")
LOG_DIR=os.path.join(HOME_DIR, "log")
REMOTE_REPO_DIR = os.path.join(HOME_DIR, APP_NAME)
OUT_LOG_FILE = os.path.join(LOG_DIR, 'out.log')
ERROR_LOG_FILE = os.path.join(LOG_DIR, 'error.log')
def uptime():
""" Show number of active connections on the server """
run('uptime')
def remote_info():
""" Get name and info of remote host """
run('uname -a')
def local_info():
""" Get name and info of local host """
local('uname -a')
def create_dirs():
""" Create directory to store logs, PID, etc """
run("mkdir -p %s"%RUN_DIR)
run("mkdir -p %s"%LOG_DIR)
def update_code_from_git():
""" Download latest version of the code from git """
if not files.exists(REMOTE_REPO_DIR):
with cd(HOME_DIR):
run("git clone %s" % MAIN_GITHUB_REP )
with cd(REMOTE_REPO_DIR):
run("git pull")
def update_requirements():
""" Update external dependencies on host """
with cd(REMOTE_REPO_DIR):
cmd = ['npm install']
# cmd += ['--requirement %s' % os.path.join(CODE_DIR,'requirements.txt')]
run(' '.join(cmd))
def start():
with cd(REMOTE_REPO_DIR):
cmd = "export PORT=%s && forever start -o %s -e %s -a index.js -p %s"%( APP_PORT, OUT_LOG_FILE, ERROR_LOG_FILE, RUN_DIR)
run(cmd)
def stop():
with cd(REMOTE_REPO_DIR):
cmd = "forever stop -l %s -a index.js -p %s"%(APP_PORT, RUN_DIR)
run(cmd)
def restart():
with cd(REMOTE_REPO_DIR):
cmd = "forever restart -l %s -a index.js -p %s"%(APP_PORT, RUN_DIR)
run(cmd)
def error_log():
run("tail -200 %s"%ERROR_LOG_FILE)
def out_log():
run("tail -200 %s"%OUT_LOG_FILE)
def init():
""" Init setup of the project """
pass
def deploy():
""" Update the project """
create_dirs()
update_code_from_git()
update_requirements()
restart()
| 25.55 | 128 | 0.624266 | 318 | 2,044 | 3.823899 | 0.308176 | 0.034539 | 0.074836 | 0.065789 | 0.325658 | 0.274671 | 0.15625 | 0.070724 | 0.055921 | 0.055921 | 0 | 0.004403 | 0.222114 | 2,044 | 79 | 129 | 25.873418 | 0.760377 | 0.201076 | 0 | 0.156863 | 0 | 0.019608 | 0.163188 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.254902 | false | 0.019608 | 0.078431 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
035243c41a2fb6bdd10ac6c5768361efd5db9457 | 1,147 | py | Python | komoog/paths.py | benmaier/komoog | 5b6613a5ba31c71be28f2642681cb01c2cb67e41 | [
"MIT"
] | 2 | 2021-10-05T20:03:42.000Z | 2021-10-07T12:13:19.000Z | komoog/paths.py | benmaier/komoog | 5b6613a5ba31c71be28f2642681cb01c2cb67e41 | [
"MIT"
] | null | null | null | komoog/paths.py | benmaier/komoog | 5b6613a5ba31c71be28f2642681cb01c2cb67e41 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Path handling
"""
import pathlib
from pathlib import Path
import simplejson as json
customdir = Path.home() / ".komoog"
def _prepare():
customdir.mkdir(exist_ok=True)
cred_file = customdir / "komoot.json"
if not cred_file.exists():
data = {
"email" : "",
"password" : "",
"clientid" : "",
}
with open(cred_file,'w') as f:
credentials = json.dump(data,f)
def get_credentials():
"""
Returns credentials for komoot login in structure
.. code:: python
{
"email" : "",
"password" : "",
"clientid" : ""
}
from the file ``~/.komoog/komoot.json``
"""
_prepare()
cred_file = customdir / "komoot.json"
with open(cred_file,'r') as f:
credentials = json.load(f)
assert(all([ k in credentials.keys() for k in ["email", "password", "clientid"]]))
assert(not any([ credentials[k] == '' for k in ["email", "password", "clientid"]]))
return credentials
if __name__ == "__main__":
get_credentials()
| 20.854545 | 87 | 0.529207 | 120 | 1,147 | 4.908333 | 0.458333 | 0.067912 | 0.142615 | 0.078098 | 0.183362 | 0.091681 | 0 | 0 | 0 | 0 | 0 | 0.001276 | 0.316478 | 1,147 | 54 | 88 | 21.240741 | 0.75 | 0.199651 | 0 | 0.08 | 0 | 0 | 0.117647 | 0 | 0 | 0 | 0 | 0 | 0.08 | 1 | 0.08 | false | 0.12 | 0.12 | 0 | 0.24 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0358bad369ac30bcb876f6ad3eac9566fac65812 | 40,713 | py | Python | rest_gae/rest_gae.py | emarinizquierdo/xentinels | 596b51c332543bae1fb8ce2d1d271ebe05a117dc | [
"Apache-2.0"
] | 56 | 2015-02-25T02:04:59.000Z | 2020-06-10T06:00:04.000Z | rest_gae/rest_gae.py | emarinizquierdo/xentinels | 596b51c332543bae1fb8ce2d1d271ebe05a117dc | [
"Apache-2.0"
] | 5 | 2016-01-25T06:55:53.000Z | 2016-08-27T12:29:03.000Z | rest_gae/rest_gae.py | emarinizquierdo/xentinels | 596b51c332543bae1fb8ce2d1d271ebe05a117dc | [
"Apache-2.0"
] | 13 | 2015-01-11T15:25:03.000Z | 2018-02-27T19:27:27.000Z | """
Wraps NDB models and provided REST APIs (GET/POST/PUT/DELETE) arounds them. Fully supports permissions.
Some code is taken from: https://github.com/abahgat/webapp2-user-accounts
"""
import importlib
import json
import re
from urlparse import urlparse
from datetime import datetime, time, date
from urllib import urlencode
import webapp2
from google.appengine.ext import ndb
from google.appengine.ext.ndb import Cursor
from google.appengine.ext.db import BadValueError, BadRequestError
from webapp2_extras import auth
from webapp2_extras import sessions
from webapp2_extras.routes import NamePrefixRoute
from google.appengine.ext import blobstore
from google.appengine.ext.webapp import blobstore_handlers
from google.appengine.api import app_identity
from google.net.proto.ProtocolBuffer import ProtocolBufferDecodeError
try:
import dateutil.parser
except ImportError as e:
dateutil = None
# The REST permissions
PERMISSION_ANYONE = 'anyone'
PERMISSION_LOGGED_IN_USER = 'logged_in_user'
PERMISSION_OWNER_USER = 'owner_user'
PERMISSION_ADMIN = 'admin'
class NDBEncoder(json.JSONEncoder):
"""JSON encoding for NDB models and properties"""
def _decode_key(self, key):
model_class = ndb.Model._kind_map.get(key.kind())
if getattr(model_class, 'RESTMeta', None) and getattr(model_class.RESTMeta, 'use_input_id', False):
return key.string_id()
else:
return key.urlsafe()
def default(self, obj):
if isinstance(obj, ndb.Model):
obj_dict = obj.to_dict()
# Each BlobKeyProperty is represented as a dict of upload_url/download_url
for (name, prop) in obj._properties.iteritems():
if isinstance(prop, ndb.BlobKeyProperty):
server_host = app_identity.get_default_version_hostname()
blob_property_url = 'http://%s%s/%s/%s' % (server_host, obj.RESTMeta.base_url, self._decode_key(obj.key), name) # e.g. /api/my_model/<SOME_KEY>/blob_prop
obj_dict[name] = {
'upload_url': blob_property_url,
'download_url': blob_property_url if getattr(obj, name) else None # Display as null if the blob property is not set
}
# Filter the properties that will be returned to user
included_properties = get_included_properties(obj, 'output')
obj_dict = dict((k,v) for k,v in obj_dict.iteritems() if k in included_properties)
# Translate the property names
obj_dict = translate_property_names(obj_dict, obj, 'output')
obj_dict['id'] = self._decode_key(obj.key)
return obj_dict
elif isinstance(obj, datetime) or isinstance(obj, date) or isinstance(obj, time):
return obj.isoformat()
elif isinstance(obj, ndb.Key):
return self._decode_key(obj)
elif isinstance(obj, ndb.GeoPt):
return str(obj)
else:
return json.JSONEncoder.default(self, obj)
class RESTException(Exception):
"""REST methods exception"""
pass
class NoResponseResult(object):
"""A class representing a non-response - used by rest_method_wrapper to detect when we shouldn't print any data with response.write.
Used when serving blobs (for BlobKeyProperty)"""
pass
#
# Utility functions
#
def get_translation_table(model, input_type):
"""Returns the translation table for a given `model` with a given `input_type`"""
meta_class = getattr(model, 'RESTMeta', None)
if not meta_class:
return {}
translation_table = getattr(model.RESTMeta, 'translate_property_names', {})
translation_table.update(getattr(model.RESTMeta, 'translate_%s_property_names' % input_type, {}))
return translation_table
def translate_property_names(data, model, input_type):
"""Translates property names in `data` dict from one name to another, according to what is stated in `input_type` and the model's
RESTMeta.translate_property_names/translate_input_property_names/translate_output_property_name - note that the change of `data` is in-place."""
translation_table = get_translation_table(model, input_type)
if not translation_table:
return data
# Translate from one property name to another - for output, we turn the original property names
# into the new property names. For input, we convert back from the new property names to the original
# property names.
for old_name, new_name in translation_table.iteritems():
if input_type == 'output' and old_name not in data: continue
if input_type == 'input' and new_name not in data: continue
if input_type == 'output':
original_value = data[old_name]
del data[old_name]
data[new_name] = original_value
elif input_type == 'input':
original_value = data[new_name]
del data[new_name]
data[old_name] = original_value
return data
def get_included_properties(model, input_type):
"""Gets the properties of a `model` class to use for input/output (`input_type`). Uses the
model's Meta class to determine the included/excluded properties."""
meta_class = getattr(model, 'RESTMeta', None)
included_properties = set()
if meta_class:
included_properties = set(getattr(meta_class, 'included_%s_properties' % input_type, []))
included_properties.update(set(getattr(meta_class, 'included_properties', [])))
if not included_properties:
# No Meta class (or no included properties defined), assume all properties are included
included_properties = set(model._properties.keys())
if meta_class:
excluded_properties = set(getattr(meta_class, 'excluded_%s_properties' % input_type, []))
excluded_properties.update(set(getattr(meta_class, 'excluded_properties', [])))
else:
# No Meta class, assume no properties are excluded
excluded_properties = set()
# Add some default excluded properties
if input_type == 'input':
excluded_properties.update(set(BaseRESTHandler.DEFAULT_EXCLUDED_INPUT_PROPERTIES))
if meta_class and getattr(meta_class, 'use_input_id', False):
included_properties.update(['id'])
if input_type == 'output':
excluded_properties.update(set(BaseRESTHandler.DEFAULT_EXCLUDED_OUTPUT_PROPERTIES))
# Calculate the properties to include
properties = included_properties - excluded_properties
return properties
def import_class(input_cls):
"""Imports a class (if given as a string) or returns as-is (if given as a class)"""
if not isinstance(input_cls, str):
# It's a class - return as-is
return input_cls
try:
(module_name, class_name) = input_cls.rsplit('.', 1)
module = __import__(module_name, fromlist=[class_name])
return getattr(module, class_name)
except Exception, exc:
# Couldn't import the class
raise ValueError("Couldn't import the model class '%s'" % input_cls)
class BaseRESTHandler(webapp2.RequestHandler):
"""Base request handler class for REST handlers (used by RESTHandlerClass and UserRESTHandlerClass)"""
# The default number of results to return for a query in case `limit` parameter wasn't provided by the user
DEFAULT_MAX_QUERY_RESULTS = 1000
# The names of properties that should be excluded from input/output
DEFAULT_EXCLUDED_INPUT_PROPERTIES = [ 'class_' ] # 'class_' is a PolyModel attribute
DEFAULT_EXCLUDED_OUTPUT_PROPERTIES = [ ]
#
# Session related methods/properties
#
def dispatch(self):
"""Needed in order for the webapp2 sessions to work"""
# Get a session store for this request.
self.session_store = sessions.get_store(request=self.request)
try:
if getattr(self, 'allow_http_method_override', False) and ('X-HTTP-Method-Override' in self.request.headers):
# User wants to override method type
overridden_method_name = self.request.headers['X-HTTP-Method-Override'].upper().strip()
if overridden_method_name not in ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS']:
return self.method_not_allowed()
self.request.method = overridden_method_name
if getattr(self, 'allowed_origin', None):
allowed_origin = self.allowed_origin
if 'Origin' in self.request.headers:
# See if the origin matches
origin = self.request.headers['Origin']
if (origin != allowed_origin) and (allowed_origin != '*'):
return self.permission_denied('Origin not allowed')
# Dispatch the request.
response = webapp2.RequestHandler.dispatch(self)
except:
raise
else:
# Save all sessions.
self.session_store.save_sessions(response)
return response
@webapp2.cached_property
def session(self):
"""Shortcut to access the current session."""
backend = self.app.config.get("session_backend", "datastore")
return self.session_store.get_session(backend=backend)
#
# Authentication methods/properties
#
@webapp2.cached_property
def auth(self):
"""Shortcut to access the auth instance as a property."""
return auth.get_auth()
@webapp2.cached_property
def user_info(self):
"""Shortcut to access a subset of the user attributes that are stored
in the session.
The list of attributes to store in the session is specified in
config['webapp2_extras.auth']['user_attributes'].
:returns
A dictionary with most user information
"""
return self.auth.get_user_by_session()
@webapp2.cached_property
def user_model(self):
"""Returns the implementation of the user model.
It is consistent with config['webapp2_extras.auth']['user_model'], if set.
"""
return self.auth.store.user_model
@webapp2.cached_property
def user(self):
"""Shortcut to access the current logged in user.
Unlike user_info, it fetches information from the persistence layer and
returns an instance of the underlying model.
:returns
The instance of the user model associated to the logged in user.
"""
u = self.user_info
return self.user_model.get_by_id(u['user_id']) if u else None
#
# HTTP response helper methods
#
def get_response(self, status, content):
"""Returns an HTTP status message with JSON-encoded content (and appropriate HTTP response headers)"""
# Create the JSON-encoded response
response = webapp2.Response(json.dumps(content, cls=NDBEncoder))
response.status = status
response.headers['Content-Type'] = 'application/json'
response.headers['Access-Control-Allow-Methods'] = ', '.join(self.permissions.keys())
if getattr(self, 'allowed_origin', None):
response.headers['Access-Control-Allow-Origin'] = self.allowed_origin
return response
def success(self, content):
return self.get_response(200, content)
def error(self, exception):
return self.get_response(400, {'error': str(exception)})
def method_not_allowed(self):
return self.get_response(405, {})
def permission_denied(self, reason=None):
return self.get_response(403, { 'reason': reason})
def unauthorized(self):
return self.get_response(401, {})
def redirect(self, url, **kwd):
return webapp2.redirect(url, **kwd)
#
# Utility methods
#
def _model_id_to_model(self, model_id):
"""Returns the model according to the model_id; raises an exception if invalid ID / model not found"""
if not model_id:
return None
try:
if getattr(self.model, 'RESTMeta', None) and getattr(self.model.RESTMeta, 'use_input_id', False):
model = ndb.Key(self.model, model_id).get()
else:
model = ndb.Key(urlsafe=model_id).get()
if not model: raise Exception()
except Exception, exc:
# Invalid key name
raise RESTException('Invalid model id - %s' % model_id)
return model
def _build_next_query_url(self, cursor):
"""Returns the next URL to fetch results for - used when paging. Returns none if no more results"""
if not cursor:
return None
# Use all of the original query arguments - just override the cursor argument
params = self.request.GET
params['cursor'] = cursor.urlsafe()
return self.request.path_url + '?' + urlencode(params)
def _filter_query(self):
"""Filters the query results for given property filters (if provided by user)."""
if not self.request.GET.get('q'):
# No query given - return as-is
return self.model.query()
try:
# Translate any property names
translation_table = get_translation_table(self.model, 'input')
query = self.request.GET.get('q')
for original_name, new_name in translation_table.iteritems():
# Replace any references to the new property name with the old (original) one
query = re.sub(r'\b%s\s*(<=|>=|=|<|>|!=|(\s+IN\s+))' % new_name, r'%s \1' % original_name, query, flags=re.IGNORECASE)
return self.model.gql('WHERE ' + query)
except Exception, exc:
# Invalid query
raise RESTException('Invalid query param - "%s"' % self.request.GET.get('q'))
def _fetch_query(self, query):
"""Fetches the query results for a given limit (if provided by user) and for a specific results page (if given by user).
Returns a tuple of (results, cursor_for_next_fetch). cursor_for_next_fetch will be None is no more results are available."""
if not self.request.GET.get('limit'):
# No limit given - use default limit
limit = BaseRESTHandler.DEFAULT_MAX_QUERY_RESULTS
else:
try:
limit = int(self.request.GET.get('limit'))
if limit <= 0: raise ValueError('Limit cannot be zero or less')
except ValueError, exc:
# Invalid limit value
raise RESTException('Invalid "limit" parameter - %s' % self.request.GET.get('limit'))
if not self.request.GET.get('cursor'):
# Fetch results from scratch
cursor = None
else:
# Continue a previous query
try:
cursor = Cursor(urlsafe=self.request.GET.get('cursor'))
except BadValueError, exc:
raise RESTException('Invalid "cursor" argument - %s' % self.request.GET.get('cursor'))
try:
(results, cursor, more_available) = query.fetch_page(limit, start_cursor=cursor)
except BadRequestError, exc:
# This happens when we're using an existing cursor and the other query arguments were messed with
raise RESTException('Invalid "cursor" argument - %s' % self.request.GET.get('cursor'))
if not more_available:
cursor = None
return (results, cursor)
def _order_query(self, query):
"""Orders the query if input given by user. Returns the modified, sorted query"""
if not self.request.GET.get('order'):
# No order given
orders = []
else:
try:
# The order parameter is formatted as 'col1, -col2, col3'
orders = [o.strip() for o in self.request.GET.get('order').split(',')]
orders = ['+'+o if not o.startswith('-') and not o.startswith('+') else o for o in orders]
# Translate property names (if it's defined for the current model) - e.g. input 'col1' is actually 'my_col1' in MyModel
translated_orders = dict([order.lstrip('-+'), order[0]] for order in orders)
translated_orders = translate_property_names(translated_orders, self.model, 'input')
orders = [-getattr(self.model, order) if direction == '-' else getattr(self.model, order) for order,direction in translated_orders.iteritems()]
except AttributeError, exc:
# Invalid column name
raise RESTException('Invalid "order" parameter - %s' % self.request.GET.get('order'))
# Always use a sort-by-key order at the end - this solves the case where the query uses IN or != operators - since we're using a cursor
# to fetch results - there is a requirement for this solution in order for the fetch_page to work. See "Query cursors" at
# https://developers.google.com/appengine/docs/python/ndb/queries
orders.append(self.model.key)
# Return the ordered query
return query.order(*orders)
def _build_model_from_data(self, data, cls, model=None):
"""Builds a model instance (according to `cls`) from user input and returns it. Updates an existing model instance if given.
Raises exceptions if input data is invalid."""
# Translate the property names (this is done before the filtering in order to get the original property names by which the filtering is done)
data = translate_property_names(data, cls, 'input')
# Transform any raw input data into appropriate NDB properties - write all transformed properties
# into another dict (so any other unauthorized properties will be ignored).
input_properties = { }
for (name, prop) in cls._properties.iteritems():
if name not in data: continue # Input not given by user
if prop._repeated:
# This property is repeated (i.e. an array of values)
input_properties[name] = [self._value_to_property(value, prop) for value in data[name]]
else:
input_properties[name] = self._value_to_property(data[name], prop)
if not model and getattr(cls, 'RESTMeta', None) and getattr(cls.RESTMeta, 'use_input_id', False):
if 'id' not in data:
raise RESTException('id field is required')
input_properties['id'] = data['id']
# Filter the input properties
included_properties = get_included_properties(cls, 'input')
input_properties = dict((k,v) for k,v in input_properties.iteritems() if k in included_properties)
# Set the user owner property to the currently logged-in user (if it's defined for the model class) - note that we're doing this check on the input `cls` parameter
# and not the self.model class, since we need to support when a model has an inner StructuredProperty, and that model has its own RESTMeta definition.
if hasattr(cls, 'RESTMeta') and hasattr(cls.RESTMeta, 'user_owner_property'):
if not model and self.user:
# Only perform this update when creating a new model - otherwise, each update might change this (very problematic in case an
# admin updates another user's model instance - it'll change model ownership from that user to the admin)
input_properties[cls.RESTMeta.user_owner_property] = self.user.key
if not model:
# Create a new model instance
model = cls(**input_properties)
else:
# Update an existing model instance
model.populate(**input_properties)
return model
def _value_to_property(self, value, prop):
"""Converts raw data value into an appropriate NDB property"""
if isinstance(prop, ndb.KeyProperty):
if value is None:
return None
try:
return ndb.Key(urlsafe=value)
except ProtocolBufferDecodeError as e:
if prop._kind is not None:
model_class = ndb.Model._kind_map.get(prop._kind)
if getattr(model_class, 'RESTMeta', None) and getattr(model_class.RESTMeta, 'use_input_id', False):
return ndb.Key(model_class, value)
raise RESTException('invalid key: {}'.format(value) )
elif isinstance(prop, ndb.TimeProperty):
if dateutil is None:
try:
return datetime.strptime(value, "%H:%M:%S").time()
except ValueError as e:
raise RESTException("Invalid time. Must be in ISO 8601 format.")
else:
return dateutil.parser.parse(value).time()
elif isinstance(prop, ndb.DateProperty):
if dateutil is None:
try:
return datetime.strptime(value, "%Y-%m-%d").date()
except ValueError as e:
raise RESTException("Invalid date. Must be in ISO 8601 format.")
else:
return dateutil.parser.parse(value).date()
elif isinstance(prop, ndb.DateTimeProperty):
if dateutil is None:
try:
return datetime.strptime(value, "%Y-%m-%dT%H:%M:%S")
except ValueError as e:
raise RESTException("Invalid datetime. Must be in ISO 8601 format.")
else:
return dateutil.parser.parse(value)
elif isinstance(prop, ndb.GeoPtProperty):
# Convert from string (formatted as '52.37, 4.88') to GeoPt
return ndb.GeoPt(value)
elif isinstance(prop, ndb.StructuredProperty):
# It's a structured property - the input data is a dict - recursively parse it as well
return self._build_model_from_data(value, prop._modelclass)
else:
# Return as-is (no need for further manipulation)
return value
def get_rest_class(ndb_model, base_url, **kwd):
"""Returns a RESTHandlerClass with the ndb_model and permissions set according to input"""
class RESTHandlerClass(BaseRESTHandler, blobstore_handlers.BlobstoreUploadHandler, blobstore_handlers.BlobstoreDownloadHandler):
model = import_class(ndb_model)
# Save the base API URL for the model (used for BlobKeyProperty)
if not hasattr(model, 'RESTMeta'):
class NewRESTMeta: pass
model.RESTMeta = NewRESTMeta
model.RESTMeta.base_url = base_url
permissions = { 'OPTIONS': PERMISSION_ANYONE }
permissions.update(kwd.get('permissions', {}))
allow_http_method_override = kwd.get('allow_http_method_override', True)
allowed_origin = kwd.get('allowed_origin', None)
# Wrapping in a list so the functions won't be turned into bound methods
after_get_callback = [kwd.get('after_get_callback', None)]
before_post_callback = [kwd.get('before_post_callback', None)]
after_post_callback = [kwd.get('after_post_callback', None)]
before_put_callback = [kwd.get('before_put_callback', None)]
after_put_callback = [kwd.get('after_put_callback', None)]
before_delete_callback = [kwd.get('before_delete_callback', None)]
after_delete_callback = [kwd.get('after_delete_callback', None)]
# Validate arguments (we do this at this stage in order to raise exceptions immediately rather than while the app is running)
if PERMISSION_OWNER_USER in permissions.values():
if not hasattr(model, 'RESTMeta') or not hasattr(model.RESTMeta, 'user_owner_property'):
raise ValueError('Must define a RESTMeta.user_owner_property for the model class %s if user-owner permission is used' % (model))
if not hasattr(model, model.RESTMeta.user_owner_property):
raise ValueError('The user_owner_property "%s" (defined in RESTMeta.user_owner_property) does not exist in the given model %s' % (model.RESTMeta.user_owner_property, model))
def __init__(self, request, response):
self.initialize(request, response)
blobstore_handlers.BlobstoreUploadHandler.__init__(self, request, response)
blobstore_handlers.BlobstoreDownloadHandler.__init__(self, request, response)
self.after_get_callback = self.after_get_callback[0]
self.before_post_callback = self.before_post_callback[0]
self.after_post_callback = self.after_post_callback[0]
self.before_put_callback = self.before_put_callback[0]
self.after_put_callback = self.after_put_callback[0]
self.before_delete_callback = self.before_delete_callback[0]
self.after_delete_callback = self.after_delete_callback[0]
def rest_method_wrapper(func):
"""Wraps GET/POST/PUT/DELETE methods and adds standard functionality"""
def inner_f(self, model_id, property_name=None):
# See if method type is supported
method_name = func.func_name.upper()
if method_name not in self.permissions:
return self.method_not_allowed()
# Verify permissions
permission = self.permissions[method_name]
if (permission in [PERMISSION_LOGGED_IN_USER, PERMISSION_OWNER_USER, PERMISSION_ADMIN]) and (not self.user):
# User not logged-in as required
return self.unauthorized()
elif permission == PERMISSION_ADMIN and not self.is_user_admin:
# User is not an admin
return self.permission_denied()
try:
# Call original method
if model_id:
model = self._model_id_to_model(model_id.lstrip('/')) # Get rid of '/' at the beginning
if (permission == PERMISSION_OWNER_USER) and (self.get_model_owner(model) != self.user.key):
# The currently logged-in user is not the owner of the model
return self.permission_denied()
if property_name and model:
# Get the original name of the property
property_name = translate_property_names({ property_name: True }, model, 'input').keys()[0]
result = func(self, model, property_name)
else:
result = func(self, None, None)
if isinstance(result, webapp2.Response):
# webapp2.Response instance - no need for further manipulation (return as-is)
return result
elif not isinstance(result, NoResponseResult):
# Only return a result (i.e. write to the response object) if it's not a NoResponseResult (used when serving blobs - BlobKeyProperty)
return self.success(result)
except RESTException, exc:
return self.error(exc)
return inner_f
#
# REST endpoint methods
#
@rest_method_wrapper
def options(self, model, property_name=None):
"""OPTIONS endpoint - doesn't return anything (only returns options in the HTTP response headers)"""
return ''
@rest_method_wrapper
def get(self, model, property_name=None):
"""GET endpoint - retrieves a single model instance (by ID) or a list of model instances by query"""
if not model:
# Return a query with multiple results
query = self._filter_query() # Filter the results
if self.permissions['GET'] == PERMISSION_OWNER_USER:
# Return only models owned by currently logged-in user
query = query.filter(getattr(self.model, self.user_owner_property) == self.user.key)
query = self._order_query(query) # Order the results
(results, cursor) = self._fetch_query(query) # Fetch them (with a limit / specific page, if provided)
if self.after_get_callback:
# Additional processing required
results = self.after_get_callback(results)
return {
'results': results,
'next_results_url': self._build_next_query_url(cursor)
}
else:
if property_name:
# Return a specific property value - currently supported only for BlobKeyProperty
if not hasattr(model, property_name):
raise RESTException('Invalid property name "%s"' % property_name)
blob_key = getattr(model, property_name)
if not blob_key:
raise RESTException('"%s" is not set' % property_name)
if not isinstance(blob_key, blobstore.BlobKey):
raise RESTException('"%s" is not a BlobKeyProperty' % property_name)
# Send the blob contents
self.send_blob(blob_key)
# Make sure we don't return a value (i.e. not write to self.response) - so self.send_blob will work properly
return NoResponseResult()
# Return a single item (query by ID)
if self.after_get_callback:
# Additional processing required
model = self.after_get_callback(model)
return model
@rest_method_wrapper
def post(self, model, property_name=None):
"""POST endpoint - adds a new model instance"""
if model and not property_name:
# Invalid usage of the endpoint
raise RESTException('Cannot POST to a specific model ID')
if model and property_name:
# POST to a BlobKeyProperty
if not hasattr(model, property_name):
raise RESTException('Invalid property name "%s"' % property_name)
if not isinstance(model._properties[property_name], ndb.BlobKeyProperty):
raise RESTException('"%s" is not a BlobKeyProperty' % property_name)
# Next, get the created blob
upload_files = self.get_uploads()
if not upload_files:
# No upload data - this happens when the user POSTS for the first time - we need to create an upload URL and redirect
# the user to it (the BlobstoreUploadHandler will handle self.get_uploads() for us and we'll get to the same point).
# We do it this way and not simply refer the user directly to create_upload_url, so we won't call create_upload_url
# every time the user GETs to /my_model - since each create_upload_url call creates more DB garbage.
upload_url = blobstore.create_upload_url(self.request.url)
return self.redirect(upload_url, code=307) # We use a 307 redirect in order to tell the client (e.g. browser) to use the same method type (POST) and keep its POST data
blob_info = upload_files[0]
if getattr(model, property_name):
# The property already has a previous value - delete the older blob
blobstore.delete(getattr(model, property_name))
# Set the blob reference
setattr(model, property_name, blob_info.key())
model.put()
# Everything was OK
return { 'status': True }
try:
# Parse POST data as JSON
json_data = json.loads(self.request.body)
except ValueError as exc:
raise RESTException('Invalid JSON POST data')
if not isinstance(json_data, list):
json_data = [json_data]
models = []
for model_to_create in json_data:
try:
# Any exceptions raised due to invalid/missing input will be caught
model = self._build_model_from_data(model_to_create, self.model)
models.append(model)
except Exception as exc:
raise RESTException('Invalid JSON POST data - %s' % exc)
if self.before_post_callback:
models = self.before_post_callback(models, json_data)
# Commit all models in a transaction
created_keys = ndb.put_multi(models)
if self.after_post_callback:
models = self.after_post_callback(created_keys, models)
# Return the newly-created model instance(s)
return models
@rest_method_wrapper
def put(self, model, property_name=None):
"""PUT endpoint - updates an existing model instance"""
models = []
try:
# Parse PUT data as JSON
json_data = json.loads(self.request.body)
except ValueError as exc:
raise RESTException('Invalid JSON PUT data')
if model:
# Update just one model
model = self._build_model_from_data(json_data, self.model, model)
json_data = [json_data]
models.append(model)
else:
# Update several models at once
if not isinstance(json_data, list):
raise RESTException('Invalid JSON PUT data')
for model_to_update in json_data:
model_id = model_to_update.pop('id', None)
if model_id is None:
raise RESTException('Missing "id" argument for model')
model = self._model_id_to_model(model_id)
model = self._build_model_from_data(model_to_update, self.model, model)
models.append(model)
if self.before_put_callback:
models = self.before_put_callback(models, json_data)
# Commit all models in a transaction
updated_keys = ndb.put_multi(models)
if self.after_put_callback:
models = self.after_put_callback(updated_keys, models)
return models
def _delete_model_blobs(self, model):
"""Deletes all blobs associated with the model (finds all BlobKeyProperty)"""
for (name, prop) in model._properties.iteritems():
if isinstance(prop, ndb.BlobKeyProperty):
if getattr(model, name):
blobstore.delete(getattr(model, name))
@rest_method_wrapper
def delete(self, model, property_name=None):
"""DELETE endpoint - deletes an existing model instance"""
models = []
if model:
models.append(model)
else:
# Delete multiple model instances
if self.permissions['DELETE'] == PERMISSION_OWNER_USER:
# Delete all models owned by the currently logged-in user
query = self.model.query().filter(getattr(self.model, self.user_owner_property) == self.user.key)
else:
# Delete all models
query = self.model.query()
# Delete the models (we might need to fetch several pages in case of many results)
cursor = None
more_available = True
while more_available:
results, cursor, more_available = query.fetch_page(BaseRESTHandler.DEFAULT_MAX_QUERY_RESULTS, start_cursor=cursor)
if results:
models.extend(results)
if self.before_delete_callback:
models = self.before_delete_callback(models)
for m in models:
self._delete_model_blobs(m) # No easy way to delete blobstore entries in a transaction
deleted_keys = ndb.delete_multi(m.key for m in models)
if self.after_delete_callback:
self.after_delete_callback(deleted_keys, models)
# Return the deleted models
return models
#
# Utility methods/properties
#
@webapp2.cached_property
def is_user_admin(self):
"""Determines if the currently logged-in user is an admin or not (relies on the user class RESTMeta.admin_property)"""
if not hasattr(self.user, 'RESTMeta') or not hasattr(self.user.RESTMeta, 'admin_property'):
# This is caused due to a misconfiguration by the coder (didn't define a proper RESTMeta.admin_property) - we raise an exception so
# it'll trigger a 500 internal server error. This specific argument validation is done here instead of the class definition (where the
# rest of the arguments are being validated) since at that stage we can't see the webapp2 auth configuration to determine the User model.
raise ValueError('The user model class %s must include a RESTMeta class with `admin_property` defined' % (self.user.__class__))
admin_property = self.user.RESTMeta.admin_property
if not hasattr(self.user, admin_property):
raise ValueError('The user model class %s does not have the property %s as defined in its RESTMeta.admin_property' % (self.user.__class__, admin_property))
return getattr(self.user, admin_property)
@webapp2.cached_property
def user_owner_property(self):
"""Returns the name of the user_owner_property"""
return self.model.RESTMeta.user_owner_property
def get_model_owner(self, model):
"""Returns the user owner of the given `model` (relies on RESTMeta.user_owner_property)"""
return getattr(model, self.user_owner_property)
# Return the class statically initialized with given input arguments
return RESTHandlerClass
class RESTHandler(NamePrefixRoute): # We inherit from NamePrefixRoute so the same router can actually return several routes simultaneously (used for BlobKeyProperty)
"""Returns our RequestHandler with the appropriate permissions and model. Should be used as part of the WSGIApplication routing:
app = webapp2.WSGIApplication([('/mymodel', RESTHandler(
MyModel,
permissions={
'GET': PERMISSION_ANYONE,
'POST': PERMISSION_LOGGED_IN_USER,
'PUT': PERMISSION_OWNER_USER,
'DELETE': PERMISSION_ADMIN
}
)])
"""
def __init__(self, url, model, **kwd):
url = url.rstrip(' /')
model = import_class(model)
if not url.startswith('/'):
raise ValueError('RESHandler url should start with "/": %s' % url)
routes = [
# Make sure we catch both URLs: to '/mymodel' and to '/mymodel/123'
webapp2.Route(url + '<model_id:(/.+)?|/>', get_rest_class(model, url, **kwd), 'main')
]
included_properties = get_included_properties(model, 'input')
translation_table = get_translation_table(model, 'input')
# Build extra routes for each BlobKeyProperty
for (name, prop) in model._properties.iteritems():
if isinstance(prop, ndb.BlobKeyProperty) and name in included_properties:
# Register a route for the current BlobKeyProperty
property_name = translation_table.get(name, name)
blob_property_url = '%s/<model_id:.+?>/<property_name:%s>' % (url, property_name) # e.g. /api/my_model/<SOME_KEY>/blob_prop
# Upload/Download blob route and handler
routes.insert(0, webapp2.Route(blob_property_url, get_rest_class(model, url, **kwd), 'upload-download-blob'))
super(RESTHandler, self).__init__('rest-handler-', routes)
| 41.375 | 189 | 0.618034 | 4,850 | 40,713 | 5.035052 | 0.124124 | 0.016216 | 0.01638 | 0.00905 | 0.247461 | 0.165192 | 0.121622 | 0.086446 | 0.062695 | 0.056552 | 0 | 0.003303 | 0.300911 | 40,713 | 983 | 190 | 41.417091 | 0.854683 | 0.168054 | 0 | 0.237548 | 0 | 0.001916 | 0.080766 | 0.015075 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.005747 | 0.045977 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
035c251c4c248ccb50b1534a85f228e49bcae5e4 | 4,431 | py | Python | gallery/mode/photo_manager.py | bob-chen/gallery | 623c522bee7b6a469c3ef89396cabd1ac25dec46 | [
"MIT"
] | null | null | null | gallery/mode/photo_manager.py | bob-chen/gallery | 623c522bee7b6a469c3ef89396cabd1ac25dec46 | [
"MIT"
] | null | null | null | gallery/mode/photo_manager.py | bob-chen/gallery | 623c522bee7b6a469c3ef89396cabd1ac25dec46 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
'''
Created on Mar 19, 2015
@author: Bob.Chen
'''
import sys
import urllib2
import time
from PIL import Image
from constant import PHOTO_DATA_ROOT
from common import Logger, RandomID, getPhotoUrl, getPhotoPath
from error_api import Err
from gallery.models import Photo
from StringIO import StringIO
class PhotoManager(object):
@staticmethod
def addPhoto(content, title = "", comment = "", isUrl=False):
Logger.LogParameters(funcname = sys._getframe().f_code.co_name, func_vars = vars(), module = "PhotoManager")
try:
imageName = RandomID.gen()
if not isUrl:
fileObj = StringIO(content.read())
else:
#url = "http://www.didao8.com:8080/static/44/thumb/nvLcHMu1JS3mepZPkQBqriG4ANthz2s5.jpg"
fileObj = StringIO(urllib2.urlopen(content).read())
img = Image.open(fileObj)
width, height = img.size
fileName = "%s%s%s" % (imageName, ".", img.format)
filePath = (PHOTO_DATA_ROOT + "%s") % fileName
img.save(filePath)
photo = Photo(title=title, comment=comment, datetime = time.time(),
imageName=fileName, width = width, height = height)
photo.save()
return Err.genOK(photo.id)
except Exception, ex:
print ex
Logger.SaveLogDebug(ex, level=Logger.LEVEL_ERROR, module = "PhotoManager")
return Err.genErr(Err.ERR_ADD_PHOTO_FAIL)
@staticmethod
def getPhotos(page=1, prePage=5):
Logger.LogParameters(funcname = sys._getframe().f_code.co_name, func_vars = vars(), module = "PhotoManager")
try:
photos = []
intPage = int(page)
intPrePage = int(prePage)
photoItems = Photo.objects.all()[(intPage-1)*intPrePage:intPage*intPrePage]
attrList = ["id", "title", "comment", "datetime", "width", "height", "imageName"]
for item in photoItems:
photo = {key:getattr(item, key) for key in attrList}
photo["imageUrl"] = getPhotoUrl(item.imageName)
photos.append(photo)
return Err.genOK(photos)
except Exception, ex:
print ex
Logger.SaveLogDebug(ex, level=Logger.LEVEL_ERROR, module = "PhotoManager")
return Err.genErr(Err.ERR_GET_PHOTO_LIST_FAIL)
@staticmethod
def getPhoto(imageName, pointX=0, pointY=0, needWidth=0):
'''
Return a StringIO obj, use StringIO.getvalue() to get the content
'''
Logger.LogParameters(funcname = sys._getframe().f_code.co_name, func_vars = vars(), module = "PhotoManager")
try:
imagePath = getPhotoPath(imageName)
needWidth, pointX, pointY = int(needWidth), int(pointX), int(pointY)
im = Image.open(imagePath)
width, height = im.size
imgFormat = im.format
scale = float(height)/width
box = (0, 0, width, height)
if needWidth > 0 and pointX < width and pointY < height:
if needWidth + pointX > width:
needWidth = width - pointX
needHeight = needWidth * scale
if needHeight + pointY > height:
needHeight = height - pointY
needWidth = needHeight / scale
if needHeight < 1:
needHeight = 1
if needWidth < 1:
needWidth = 1
print needHeight, needWidth
#print needWidth, needHeight, scale
box = (pointX, pointY, int(pointX + needWidth), int(pointY + needHeight))
region = im.crop(box)
container = StringIO()
region.save(container, imgFormat)
return Err.genOK([container, imgFormat])
except Exception, ex:
print ex
Logger.SaveLogDebug(ex, level=Logger.LEVEL_ERROR, module = "PhotoManager")
return Err.genErr(Err.ERR_GET_PHOTO_DATA_FAIL)
| 35.166667 | 116 | 0.536448 | 425 | 4,431 | 5.52 | 0.331765 | 0.046036 | 0.034527 | 0.038363 | 0.244672 | 0.244672 | 0.244672 | 0.244672 | 0.244672 | 0.244672 | 0 | 0.01216 | 0.368991 | 4,431 | 126 | 117 | 35.166667 | 0.826896 | 0.032047 | 0 | 0.219512 | 0 | 0 | 0.031589 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.109756 | null | null | 0.04878 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
035eef11fbf5d60293a71487c390191332e516a1 | 1,140 | py | Python | hddcoin/hodl/cli/cmd_profits.py | u4ma-hdd/hddcoin-blockchain | 4199d1f1d87e129ae9c08bf50dd48ec3b2c08727 | [
"Apache-2.0"
] | 37 | 2021-07-08T23:42:01.000Z | 2022-03-26T21:30:10.000Z | hddcoin/hodl/cli/cmd_profits.py | u4ma-hdd/hddcoin-blockchain | 4199d1f1d87e129ae9c08bf50dd48ec3b2c08727 | [
"Apache-2.0"
] | 13 | 2021-07-11T15:12:01.000Z | 2022-03-15T08:36:18.000Z | hddcoin/hodl/cli/cmd_profits.py | u4ma-hdd/hddcoin-blockchain | 4199d1f1d87e129ae9c08bf50dd48ec3b2c08727 | [
"Apache-2.0"
] | 19 | 2021-07-10T14:09:07.000Z | 2022-03-14T11:17:05.000Z | # -*- coding: utf-8 -*-
from __future__ import annotations
import decimal
import json
import blspy #type:ignore
from hddcoin.hodl.hodlrpc import HodlRpcClient
from hddcoin.hodl.util import vlog
from .colours import *
from .colours import _
async def cmd_profits(hodlRpcClient: HodlRpcClient,
*,
dumpJson: bool,
) -> None:
vlog(1, "Fetching profit information")
apiDict = await hodlRpcClient.get(f"getProfits")
if dumpJson:
print(json.dumps(apiDict, indent=4))
else:
pastProfits_bytes = apiDict["profits_past"]
futureProfits_bytes = apiDict["profits_future"]
pastProfits_hdd = decimal.Decimal(pastProfits_bytes) / int(1e12)
futureProfits_hdd = decimal.Decimal(futureProfits_bytes) / int(1e12)
print(f"{C}Your profits from the HODL program are as follows:{_} ")
print(f" {W}HODL rewards paid out in full to date: {G}{pastProfits_hdd} {Y}HDD{_}")
print(f" {W}HODL rewards pending with current contracts: {G}{futureProfits_hdd} {Y}HDD{_}")
print(f"{G}KEEP ON HODL'ING!!!{_}")
| 35.625 | 100 | 0.652632 | 137 | 1,140 | 5.284672 | 0.525547 | 0.033149 | 0.041436 | 0.030387 | 0.077348 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010369 | 0.238596 | 1,140 | 31 | 101 | 36.774194 | 0.823733 | 0.02807 | 0 | 0 | 0 | 0.08 | 0.277828 | 0.01991 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.32 | 0 | 0.32 | 0.2 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
036da201f76a97f3bd6dd5bdbe4284b68396ee50 | 463 | py | Python | 19. Backtracking/subsets of an array.py | Ujjawalgupta42/Hacktoberfest2021-DSA | eccd9352055085973e3d6a1feb10dd193905584b | [
"MIT"
] | 225 | 2021-10-01T03:09:01.000Z | 2022-03-11T11:32:49.000Z | 19. Backtracking/subsets of an array.py | Ujjawalgupta42/Hacktoberfest2021-DSA | eccd9352055085973e3d6a1feb10dd193905584b | [
"MIT"
] | 252 | 2021-10-01T03:45:20.000Z | 2021-12-07T18:32:46.000Z | 19. Backtracking/subsets of an array.py | Ujjawalgupta42/Hacktoberfest2021-DSA | eccd9352055085973e3d6a1feb10dd193905584b | [
"MIT"
] | 911 | 2021-10-01T02:55:19.000Z | 2022-02-06T09:08:37.000Z | #Input: nums = [1,2,3]
#Output: [[],[1],[2],[1,2],[3],[1,3],[2,3],[1,2,3]]
def subsets(self, nums: List[int]) -> List[List[int]]:
self.result = []
self.helper(nums, 0, [])
return self.result
def helper(self, nums, start, subset):
self.result.append(subset[::])
for i in range(start, len(nums)):
subset.append(nums[i])
self.helper(nums, i + 1, subset)
subset.pop()
| 28.9375 | 56 | 0.492441 | 65 | 463 | 3.507692 | 0.369231 | 0.035088 | 0.039474 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.052147 | 0.295896 | 463 | 15 | 57 | 30.866667 | 0.647239 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
03731b307ec6331bb69d499df497733f21d49ef0 | 305 | py | Python | tests/test_version.py | marten-cz/citools | 452251b376c48bc8a9e1a8666d892a8e5a309108 | [
"MIT"
] | null | null | null | tests/test_version.py | marten-cz/citools | 452251b376c48bc8a9e1a8666d892a8e5a309108 | [
"MIT"
] | null | null | null | tests/test_version.py | marten-cz/citools | 452251b376c48bc8a9e1a8666d892a8e5a309108 | [
"MIT"
] | null | null | null | import click
from click.testing import CliRunner
from cctools.commands.version.commands import cli
def test_version():
runner = CliRunner()
result = runner.invoke(cli, ['raw', '--show', '--file', './tests/stubs/version.txt'])
assert result.exit_code == 0
assert '2.5.3' in result.output
| 27.727273 | 89 | 0.695082 | 42 | 305 | 5 | 0.690476 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.015564 | 0.157377 | 305 | 10 | 90 | 30.5 | 0.801556 | 0 | 0 | 0 | 0 | 0 | 0.147541 | 0.081967 | 0 | 0 | 0 | 0 | 0.25 | 1 | 0.125 | false | 0 | 0.375 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
cee35a0c038d5a35a076a859296711fc3b4238b6 | 3,932 | py | Python | rtgraph/ui/mainWindow_ui.py | spewil/RTGraph | 0e3f4502988a36866ac2aaa27e232ade2128d7ea | [
"MIT"
] | null | null | null | rtgraph/ui/mainWindow_ui.py | spewil/RTGraph | 0e3f4502988a36866ac2aaa27e232ade2128d7ea | [
"MIT"
] | null | null | null | rtgraph/ui/mainWindow_ui.py | spewil/RTGraph | 0e3f4502988a36866ac2aaa27e232ade2128d7ea | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mainWindow.ui'
#
# Created by: PyQt5 UI code generator 5.6
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(704, 558)
MainWindow.setMinimumSize(QtCore.QSize(0, 0))
MainWindow.setStyleSheet("")
MainWindow.setTabShape(QtWidgets.QTabWidget.Rounded)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.Layout_controls = QtWidgets.QGridLayout()
self.Layout_controls.setObjectName("Layout_controls")
self.cBox_Speed = QtWidgets.QComboBox(self.centralwidget)
self.cBox_Speed.setEditable(True)
self.cBox_Speed.setObjectName("cBox_Speed")
self.Layout_controls.addWidget(self.cBox_Speed, 1, 1, 1, 1)
self.pButton_Stop = QtWidgets.QPushButton(self.centralwidget)
self.pButton_Stop.setObjectName("pButton_Stop")
self.Layout_controls.addWidget(self.pButton_Stop, 1, 3, 1, 1)
self.cBox_Port = QtWidgets.QComboBox(self.centralwidget)
self.cBox_Port.setEditable(True)
self.cBox_Port.setObjectName("cBox_Port")
self.Layout_controls.addWidget(self.cBox_Port, 0, 1, 1, 1)
self.cBox_Source = QtWidgets.QComboBox(self.centralwidget)
self.cBox_Source.setObjectName("cBox_Source")
self.Layout_controls.addWidget(self.cBox_Source, 0, 0, 1, 1)
self.pButton_Start = QtWidgets.QPushButton(self.centralwidget)
self.pButton_Start.setMinimumSize(QtCore.QSize(0, 0))
self.pButton_Start.setObjectName("pButton_Start")
self.Layout_controls.addWidget(self.pButton_Start, 0, 3, 1, 1)
self.sBox_Samples = QtWidgets.QSpinBox(self.centralwidget)
self.sBox_Samples.setMinimum(1)
self.sBox_Samples.setMaximum(100000)
self.sBox_Samples.setProperty("value", 500)
self.sBox_Samples.setObjectName("sBox_Samples")
self.Layout_controls.addWidget(self.sBox_Samples, 0, 2, 1, 1)
self.chBox_export = QtWidgets.QCheckBox(self.centralwidget)
self.chBox_export.setEnabled(True)
self.chBox_export.setObjectName("chBox_export")
self.Layout_controls.addWidget(self.chBox_export, 1, 2, 1, 1)
self.gridLayout.addLayout(self.Layout_controls, 7, 0, 1, 2)
self.Layout_graphs = QtWidgets.QGridLayout()
self.Layout_graphs.setObjectName("Layout_graphs")
self.plt = GraphicsLayoutWidget(self.centralwidget)
self.plt.setAutoFillBackground(False)
self.plt.setStyleSheet("border: 0px;")
self.plt.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.plt.setFrameShadow(QtWidgets.QFrame.Plain)
self.plt.setLineWidth(0)
self.plt.setObjectName("plt")
self.Layout_graphs.addWidget(self.plt, 0, 0, 1, 1)
self.gridLayout.addLayout(self.Layout_graphs, 2, 1, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "RTGraph"))
self.pButton_Stop.setText(_translate("MainWindow", "Stop"))
self.pButton_Start.setText(_translate("MainWindow", "Start"))
self.sBox_Samples.setSuffix(_translate("MainWindow", " samples"))
self.sBox_Samples.setPrefix(_translate("MainWindow", "Show "))
self.chBox_export.setText(_translate("MainWindow", "Export to CSV"))
from pyqtgraph import GraphicsLayoutWidget
| 48.54321 | 76 | 0.71236 | 441 | 3,932 | 6.206349 | 0.251701 | 0.051151 | 0.076726 | 0.069054 | 0.219949 | 0.173913 | 0.025575 | 0 | 0 | 0 | 0 | 0.020724 | 0.177772 | 3,932 | 80 | 77 | 49.15 | 0.825858 | 0.046033 | 0 | 0 | 1 | 0 | 0.069979 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.030303 | false | 0 | 0.030303 | 0 | 0.075758 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ceee14d65f071777843767860f0b7d4cc2b7e26c | 4,105 | py | Python | generator_app/utils.py | badf00d21/JSD2021 | 0af83e671bdc2570b617ed29b395db4193dd7daf | [
"MIT"
] | null | null | null | generator_app/utils.py | badf00d21/JSD2021 | 0af83e671bdc2570b617ed29b395db4193dd7daf | [
"MIT"
] | null | null | null | generator_app/utils.py | badf00d21/JSD2021 | 0af83e671bdc2570b617ed29b395db4193dd7daf | [
"MIT"
] | null | null | null | # author: badf00d21
import os
from os.path import dirname, join
from textx import metamodel_from_file
from textx.export import metamodel_export, model_export
from datetime import datetime
from distutils.dir_util import copy_tree
CURRENT_DIR = dirname(__file__)
PROJECT_DIRECTORY_TREE = {}
PROJECT_GENERAL_INFO = {}
def init_general_info(projectModel):
global PROJECT_GENERAL_INFO
groupId = projectModel.gradleBuildModel.groupId
artifact = projectModel.gradleBuildModel.artifactId
name = projectModel.gradleBuildModel.projectName
version = '1.0.0'
if projectModel.gradleBuildModel.appVersion != '':
version = projectModel.gradleBuildModel.appVersion
project_package_root = groupId + '.' + name.lower()
PROJECT_GENERAL_INFO = {
'author': 'JSD SpringBoot generator by Petar Makevic',
'date': datetime.now().strftime('%d.%m.%y'),
'packageRoot': project_package_root,
'groupId': groupId,
'artifactId': artifact,
'name': name,
'version': version
}
def init_project_directory_tree(output_path):
global PROJECT_DIRECTORY_TREE
PROJECT_DIRECTORY_TREE['root'] = join(output_path, PROJECT_GENERAL_INFO['name'])
PROJECT_DIRECTORY_TREE['main'] = join(PROJECT_DIRECTORY_TREE['root'], 'src/main/java/' + PROJECT_GENERAL_INFO['packageRoot'].replace('.', '/'))
PROJECT_DIRECTORY_TREE['resources'] = join(PROJECT_DIRECTORY_TREE['root'], 'src/main/resources/')
PROJECT_DIRECTORY_TREE['test'] = join(PROJECT_DIRECTORY_TREE['root'], 'src/test/java/' + PROJECT_GENERAL_INFO['packageRoot'].replace('.', '/'))
PROJECT_DIRECTORY_TREE['generated'] = join(PROJECT_DIRECTORY_TREE['main'], 'generated')
PROJECT_DIRECTORY_TREE['model'] = join(PROJECT_DIRECTORY_TREE['generated'], 'model')
PROJECT_DIRECTORY_TREE['service_gen'] = join(PROJECT_DIRECTORY_TREE['generated'], 'service')
PROJECT_DIRECTORY_TREE['service'] = join(PROJECT_DIRECTORY_TREE['main'], 'service')
PROJECT_DIRECTORY_TREE['config'] = join(PROJECT_DIRECTORY_TREE['main'], 'config')
PROJECT_DIRECTORY_TREE['repository'] = join(PROJECT_DIRECTORY_TREE['main'], 'repository')
PROJECT_DIRECTORY_TREE['controller_gen'] = join(PROJECT_DIRECTORY_TREE['generated'], 'controller')
PROJECT_DIRECTORY_TREE['controller'] = join(PROJECT_DIRECTORY_TREE['main'], 'controller')
def copy_static_files():
from_directory = join(CURRENT_DIR, './static_files/gradle_wrapper')
to_directory = PROJECT_DIRECTORY_TREE['root']
copy_tree(from_directory, to_directory)
def prepare_env(projectModel, output_path):
if not os.path.exists(output_path):
os.makedirs(output_path)
print('Created directories on path: ' + output_path + ' for generating project & dotexport.')
init_general_info(projectModel)
init_project_directory_tree(output_path)
copy_static_files()
for key in PROJECT_DIRECTORY_TREE:
if not os.path.exists(PROJECT_DIRECTORY_TREE[key]):
os.makedirs(PROJECT_DIRECTORY_TREE[key])
print('Generated project directory on path: ', PROJECT_DIRECTORY_TREE[key])
return PROJECT_GENERAL_INFO
class BaseType(object):
def __init__(self, parent, name):
self.parent = parent
self.name = name
def __str__(self):
return self.name
def get_metamodel(path_to_grammar):
simple_types = {
'int': BaseType(None, 'int'),
'String': BaseType(None, 'String'),
'Long': BaseType(None, 'Long'),
'boolean': BaseType(None, 'boolean')
}
print('Loading metamodel_from_file: ' + path_to_grammar)
metamodel = metamodel_from_file(path_to_grammar,
classes=[BaseType],
builtins=simple_types)
return metamodel
def export_to_dot(mm, mff, path):
dot_folder = join(path, 'dotexport')
if not os.path.exists(dot_folder):
os.mkdir(dot_folder)
metamodel_export(mm, join(dot_folder, 'meta-model.dot'))
model_export(mff, join(dot_folder, 'model.dot'))
print('.dot files generated in:' + dot_folder)
| 40.245098 | 147 | 0.707917 | 477 | 4,105 | 5.773585 | 0.234801 | 0.191721 | 0.232389 | 0.095861 | 0.234205 | 0.152869 | 0.068991 | 0.043573 | 0.043573 | 0 | 0 | 0.00206 | 0.172229 | 4,105 | 101 | 148 | 40.643564 | 0.808417 | 0.004141 | 0 | 0 | 0 | 0 | 0.160832 | 0.007099 | 0 | 0 | 0 | 0 | 0 | 1 | 0.097561 | false | 0 | 0.073171 | 0.012195 | 0.219512 | 0.04878 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ceef19c41376f45364d4e99a91be8b81f66a7009 | 1,286 | py | Python | maskrcnn_benchmark/layers/_utils.py | cxq1/paddle_VinVL | f9136871c43b033cd209ddc7579fa986208e37db | [
"MIT"
] | null | null | null | maskrcnn_benchmark/layers/_utils.py | cxq1/paddle_VinVL | f9136871c43b033cd209ddc7579fa986208e37db | [
"MIT"
] | null | null | null | maskrcnn_benchmark/layers/_utils.py | cxq1/paddle_VinVL | f9136871c43b033cd209ddc7579fa986208e37db | [
"MIT"
] | null | null | null | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import glob
import os.path
import os
import paddle
try:
from paddle.utils.cpp_extension import load as load_ext
# from torch.utils.cpp_extension import CUDA_HOME#todo
cuda_home = os.environ.get('CUDA_HOME') or os.environ.get('CUDA_PATH')
CUDA_HOME = cuda_home
except ImportError:
raise ImportError("The cpp layer extensions requires PyTorch 0.4 or higher")
def _load_C_extensions():
this_dir = os.path.dirname(os.path.abspath(__file__))
this_dir = os.path.dirname(this_dir)
this_dir = os.path.join(this_dir, "csrc")
main_file = glob.glob(os.path.join(this_dir, "*.cpp"))
source_cpu = glob.glob(os.path.join(this_dir, "cpu", "*.cpp"))
source_cuda = glob.glob(os.path.join(this_dir, "cuda", "*.cu"))
source = main_file + source_cpu
extra_cflags = []
if paddle.cuda.is_available() and CUDA_HOME is not None:
source.extend(source_cuda)
extra_cflags = ["-DWITH_CUDA"]
source = [os.path.join(this_dir, s) for s in source]
extra_include_paths = [this_dir]
return load_ext(
"torchvision",
source,
extra_cflags=extra_cflags,
extra_include_paths=extra_include_paths,
)
_C = _load_C_extensions()
| 29.906977 | 80 | 0.693624 | 191 | 1,286 | 4.408377 | 0.361257 | 0.083135 | 0.059382 | 0.083135 | 0.17696 | 0.089074 | 0.089074 | 0 | 0 | 0 | 0 | 0.001923 | 0.191291 | 1,286 | 42 | 81 | 30.619048 | 0.807692 | 0.09409 | 0 | 0 | 0 | 0 | 0.103359 | 0 | 0 | 0 | 0 | 0.02381 | 0 | 1 | 0.032258 | false | 0 | 0.225806 | 0 | 0.290323 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cefdf491da32c65dfd43ba1c5f43f8392677e962 | 1,354 | py | Python | python/app/plugins/port/Mysql/Mysql_Weakpwd.py | taomujian/linbing | fe772a58f41e3b046b51a866bdb7e4655abaf51a | [
"MIT"
] | 351 | 2020-02-26T05:23:26.000Z | 2022-03-26T12:39:19.000Z | python/app/plugins/port/Mysql/Mysql_Weakpwd.py | taomujian/linbing | fe772a58f41e3b046b51a866bdb7e4655abaf51a | [
"MIT"
] | 15 | 2020-03-26T07:31:49.000Z | 2022-03-09T02:12:17.000Z | python/app/plugins/port/Mysql/Mysql_Weakpwd.py | taomujian/linbing | fe772a58f41e3b046b51a866bdb7e4655abaf51a | [
"MIT"
] | 99 | 2020-02-28T07:30:46.000Z | 2022-03-16T16:41:09.000Z | #!/usr/bin/env python3
import pymysql
from urllib.parse import urlparse
class Mysql_Weakpwd_BaseVerify:
def __init__(self, url):
self.info = {
'name': 'Mysql 弱口令漏洞',
'description': 'Mysql 弱口令漏洞',
'date': '',
'exptype': 'check',
'type': 'Weakpwd'
}
self.url = url
url_parse = urlparse(self.url)
self.host = url_parse.hostname
self.port = url_parse.port
if not self.port:
self.port = '3306'
def check(self):
"""
检测是否存在漏洞
:param:
:return bool True or False: 是否存在漏洞
"""
for pwd in open('app/password.txt', 'r', encoding = 'utf-8').readlines():
if pwd != '':
pwd = pwd.strip()
try:
conn = pymysql.connect(host = self.host, port = int(self.port), user = 'root', password = pwd, database = 'mysql')
print ('存在Mysql弱口令,弱口令为:', pwd)
conn.close()
return True
except Exception as e:
print(e)
pass
finally:
pass
print('不存在Mysql弱口令')
return False
if __name__ == "__main__":
Mysql_Weakpwd = Mysql_Weakpwd_BaseVerify('http://10.4.33.38:3306')
Mysql_Weakpwd.check() | 27.08 | 130 | 0.495569 | 140 | 1,354 | 4.642857 | 0.55 | 0.073846 | 0.067692 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.020408 | 0.384786 | 1,354 | 50 | 131 | 27.08 | 0.759904 | 0.055391 | 0 | 0.055556 | 0 | 0 | 0.125806 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0.111111 | 0.055556 | 0 | 0.194444 | 0.083333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
30153dd44ea6e8ce8941eb233f0c9ad38a2fdb9b | 510 | py | Python | src/apps/blog/forms.py | snicoper/snicoper.com | 22c17b5ead6096227a3415770c0cbd2923f2f14a | [
"MIT"
] | 2 | 2017-04-22T11:35:21.000Z | 2017-09-01T19:49:59.000Z | src/apps/blog/forms.py | snicoper/snicoper.com | 22c17b5ead6096227a3415770c0cbd2923f2f14a | [
"MIT"
] | null | null | null | src/apps/blog/forms.py | snicoper/snicoper.com | 22c17b5ead6096227a3415770c0cbd2923f2f14a | [
"MIT"
] | null | null | null | from django import forms
class ArticleRecommendForm(forms.Form):
"""Formulario para recomendar articulo."""
name = forms.CharField(
label='Nombre'
)
from_email = forms.EmailField(
label='Tu email',
widget=forms.EmailInput()
)
to_email = forms.EmailField(
label='Email destinatario',
widget=forms.EmailInput()
)
message = forms.CharField(
label='Mensaje (Opcional)',
required=False,
widget=forms.Textarea()
)
| 23.181818 | 46 | 0.613725 | 48 | 510 | 6.479167 | 0.583333 | 0.106109 | 0.122187 | 0.160772 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.272549 | 510 | 21 | 47 | 24.285714 | 0.838275 | 0.070588 | 0 | 0.111111 | 0 | 0 | 0.106838 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.055556 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3015bb26c571eef72566233c384c496194c64fab | 1,020 | py | Python | thermister_table.py | thesteg/thermister_table | 08d0f08009fc8db20224b21971f0396762c816f4 | [
"MIT"
] | null | null | null | thermister_table.py | thesteg/thermister_table | 08d0f08009fc8db20224b21971f0396762c816f4 | [
"MIT"
] | null | null | null | thermister_table.py | thesteg/thermister_table | 08d0f08009fc8db20224b21971f0396762c816f4 | [
"MIT"
] | null | null | null | #!/usr/bin/python
import math
import sys
#bValue = float(sys.argv[1])
#nomOhm = float(sys.argv[2])
#nomTemp = float(sys.argv[3])
#seriesR = float(sys.argv[4])
#adcRes = int(sys.argv[5])
bValue = 3750
nomOhm = 10000
nomTemp = 250
seriesR = 4700
adcRes = 10
adcMax = 2**adcRes
adcVal = 0
vals = [0] * adcMax
while adcVal < adcMax:
try:
vals[adcVal] = 10 / ((math.log((seriesR / (((adcMax - 1) / adcVal) -1)) / nomOhm) / bValue) + (10 / (nomTemp + 2731.5))) -2731
adcVal += 1
except:
adcVal += 1
print("int16_t tVals[{0}] = {{".format(adcMax))
adcVal = 0
while adcVal < adcMax:
print(" {0},{1},{2},{3},{4},{5},{6},{7},"
.format(
round(vals[adcVal]),
round(vals[adcVal + 1]),
round(vals[adcVal + 2]),
round(vals[adcVal + 3]),
round(vals[adcVal + 4]),
round(vals[adcVal + 5]),
round(vals[adcVal + 6]),
round(vals[adcVal + 7]),
))
adcVal += 8
print("};\n") | 20.816327 | 134 | 0.519608 | 133 | 1,020 | 3.977444 | 0.323308 | 0.170132 | 0.226843 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.08642 | 0.285294 | 1,020 | 49 | 135 | 20.816327 | 0.639232 | 0.148039 | 0 | 0.1875 | 0 | 0 | 0.072917 | 0.037037 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.0625 | 0 | 0.0625 | 0.09375 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3019bbbf445a8edf5dc9ccd702eacd957bed02fc | 1,636 | py | Python | hivs_administrative/migrations/0006_add area type model.py | tehamalab/hivs | db7dfa7f89174be07d42bd469fd23c8553c0eff2 | [
"MIT"
] | null | null | null | hivs_administrative/migrations/0006_add area type model.py | tehamalab/hivs | db7dfa7f89174be07d42bd469fd23c8553c0eff2 | [
"MIT"
] | null | null | null | hivs_administrative/migrations/0006_add area type model.py | tehamalab/hivs | db7dfa7f89174be07d42bd469fd23c8553c0eff2 | [
"MIT"
] | null | null | null | # Generated by Django 2.0.7 on 2018-09-19 18:38
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('hivs_administrative', '0005_set_extras_default_value_to_callable'),
]
operations = [
migrations.CreateModel(
name='AbstractAreaType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True, verbose_name='name')),
('timestamp', models.DateTimeField(auto_now_add=True, verbose_name='created')),
('last_modified', models.DateTimeField(auto_now=True, null=True, verbose_name='last modified')),
],
options={
'verbose_name': 'Area type',
'verbose_name_plural': 'Area types',
},
),
migrations.CreateModel(
name='AreaType',
fields=[
('abstractareatype_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='hivs_administrative.AbstractAreaType')),
],
bases=('hivs_administrative.abstractareatype',),
),
migrations.AddField(
model_name='area',
name='area_type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='areas', to='hivs_administrative.AreaType', verbose_name='area type'),
),
]
| 40.9 | 223 | 0.622249 | 168 | 1,636 | 5.85119 | 0.452381 | 0.078332 | 0.042726 | 0.067141 | 0.134283 | 0.134283 | 0.069176 | 0 | 0 | 0 | 0 | 0.017989 | 0.252445 | 1,636 | 39 | 224 | 41.948718 | 0.785773 | 0.027506 | 0 | 0.272727 | 1 | 0 | 0.210824 | 0.088735 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.060606 | 0 | 0.151515 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3019d0d0d66845d80647867288d3a7213f851ae7 | 1,642 | py | Python | mlxtend/mlxtend/evaluate/__init__.py | WhiteWolf21/fp-growth | 01e1d853b09f244f14e66d7d0c87f139a0f67c81 | [
"MIT"
] | null | null | null | mlxtend/mlxtend/evaluate/__init__.py | WhiteWolf21/fp-growth | 01e1d853b09f244f14e66d7d0c87f139a0f67c81 | [
"MIT"
] | null | null | null | mlxtend/mlxtend/evaluate/__init__.py | WhiteWolf21/fp-growth | 01e1d853b09f244f14e66d7d0c87f139a0f67c81 | [
"MIT"
] | null | null | null | # Sebastian Raschka 2014-2020
# mlxtend Machine Learning Library Extensions
# Author: Sebastian Raschka <sebastianraschka.com>
#
# License: BSD 3 clause
from .bootstrap import bootstrap
from .bootstrap_outofbag import BootstrapOutOfBag
from .bootstrap_point632 import bootstrap_point632_score
from .cochrans_q import cochrans_q
from .confusion_matrix import confusion_matrix
from .feature_importance import feature_importance_permutation
from .lift_score import lift_score
from .mcnemar import mcnemar_table
from .mcnemar import mcnemar_tables
from .mcnemar import mcnemar
from .permutation import permutation_test
from .scoring import scoring
from .ttest import paired_ttest_resampled
from .ttest import paired_ttest_kfold_cv
from .ttest import paired_ttest_5x2cv
from .holdout import RandomHoldoutSplit
from .holdout import PredefinedHoldoutSplit
from .f_test import ftest
from .f_test import combined_ftest_5x2cv
from .proportion_difference import proportion_difference
from .bias_variance_decomp import bias_variance_decomp
from .accuracy import accuracy_score
__all__ = ["scoring", "confusion_matrix",
"mcnemar_table", "mcnemar_tables",
"mcnemar", "lift_score",
"bootstrap", "permutation_test",
"BootstrapOutOfBag", "bootstrap_point632_score",
"cochrans_q", "paired_ttest_resampled",
"paired_ttest_kfold_cv", "paired_ttest_5x2cv",
"feature_importance_permutation",
"RandomHoldoutSplit", "PredefinedHoldoutSplit",
"ftest", "combined_ftest_5x2cv",
"proportion_difference", "bias_variance_decomp",
"accuracy_score"]
| 38.186047 | 62 | 0.781973 | 186 | 1,642 | 6.575269 | 0.295699 | 0.053966 | 0.041701 | 0.058872 | 0.063778 | 0 | 0 | 0 | 0 | 0 | 0 | 0.018773 | 0.156516 | 1,642 | 42 | 63 | 39.095238 | 0.86426 | 0.08648 | 0 | 0 | 0 | 0 | 0.236948 | 0.093708 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.676471 | 0 | 0.676471 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
3019d8b3a9a8b78def5cd463705dfbff73a5928e | 1,222 | py | Python | pygears/lib/rounding.py | bogdanvuk/pygears | a0b21d445e1d5c89ad66751447b8253536b835ee | [
"MIT"
] | 120 | 2018-04-23T08:29:04.000Z | 2022-03-30T14:41:52.000Z | pygears/lib/rounding.py | FZP1607152286/pygears | a0b21d445e1d5c89ad66751447b8253536b835ee | [
"MIT"
] | 12 | 2019-07-09T17:12:58.000Z | 2022-03-18T09:05:10.000Z | pygears/lib/rounding.py | FZP1607152286/pygears | a0b21d445e1d5c89ad66751447b8253536b835ee | [
"MIT"
] | 12 | 2019-05-10T19:42:08.000Z | 2022-03-28T18:26:44.000Z | from pygears import gear, datagear, alternative, module
from pygears.typing.qround import get_out_type, get_cut_bits
from pygears.typing import Uint, code, Bool, Int, Fixp, Ufixp
@datagear
def qround(din,
*,
fract=0,
cut_bits=b'get_cut_bits(din, fract)',
signed=b'din.signed') -> b'get_out_type(din, fract)':
res = code(din, Int if signed else Uint) + (Bool(1) << (cut_bits - 1))
return code(res >> cut_bits, module().tout)
# @datagear
# def qround_even(din,
# *,
# fract=0,
# cut_bits=b'get_cut_bits(din, fract)',
# signed=b'din.signed') -> b'get_out_type(din, fract)':
# val_coded = code(din, Int if signed else Uint)
# round_bit = val_coded[cut_bits]
# res = val_coded + Uint([round_bit] + [~round_bit] * (cut_bits - 1))
# return code(res[cut_bits:])
@gear
def truncate(din, *, nbits=2) -> b'din':
pass
@gear
def round_half_up(din, *, nbits=2) -> b'din':
pass
@gear
def round_to_zero(din, *, nbits=2) -> b'din':
pass
@gear
async def round_to_even(din, *, nbits=2) -> b'din':
async with din as d:
return round(float(d) / (2**nbits)) * (2**nbits)
| 23.056604 | 74 | 0.590835 | 183 | 1,222 | 3.770492 | 0.273224 | 0.101449 | 0.052174 | 0.057971 | 0.492754 | 0.473913 | 0.473913 | 0.368116 | 0.286957 | 0.202899 | 0 | 0.012101 | 0.256137 | 1,222 | 52 | 75 | 23.5 | 0.746975 | 0.318331 | 0 | 0.291667 | 0 | 0 | 0.085158 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0.125 | 0.125 | 0 | 0.375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
302008241a81889fef104c6dbb37a96238533df2 | 4,674 | py | Python | sdk/python/pulumi_aws_native/glue/get_trigger.py | pulumi/pulumi-aws-native | 1ae4a4d9c2256b2a79ca536f8d8497b28d10e4c3 | [
"Apache-2.0"
] | 29 | 2021-09-30T19:32:07.000Z | 2022-03-22T21:06:08.000Z | sdk/python/pulumi_aws_native/glue/get_trigger.py | pulumi/pulumi-aws-native | 1ae4a4d9c2256b2a79ca536f8d8497b28d10e4c3 | [
"Apache-2.0"
] | 232 | 2021-09-30T19:26:26.000Z | 2022-03-31T23:22:06.000Z | sdk/python/pulumi_aws_native/glue/get_trigger.py | pulumi/pulumi-aws-native | 1ae4a4d9c2256b2a79ca536f8d8497b28d10e4c3 | [
"Apache-2.0"
] | 4 | 2021-11-10T19:42:01.000Z | 2022-02-05T10:15:49.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetTriggerResult',
'AwaitableGetTriggerResult',
'get_trigger',
'get_trigger_output',
]
@pulumi.output_type
class GetTriggerResult:
def __init__(__self__, actions=None, description=None, id=None, predicate=None, schedule=None, start_on_creation=None, tags=None, type=None):
if actions and not isinstance(actions, list):
raise TypeError("Expected argument 'actions' to be a list")
pulumi.set(__self__, "actions", actions)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if predicate and not isinstance(predicate, dict):
raise TypeError("Expected argument 'predicate' to be a dict")
pulumi.set(__self__, "predicate", predicate)
if schedule and not isinstance(schedule, str):
raise TypeError("Expected argument 'schedule' to be a str")
pulumi.set(__self__, "schedule", schedule)
if start_on_creation and not isinstance(start_on_creation, bool):
raise TypeError("Expected argument 'start_on_creation' to be a bool")
pulumi.set(__self__, "start_on_creation", start_on_creation)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def actions(self) -> Optional[Sequence['outputs.TriggerAction']]:
return pulumi.get(self, "actions")
@property
@pulumi.getter
def description(self) -> Optional[str]:
return pulumi.get(self, "description")
@property
@pulumi.getter
def id(self) -> Optional[str]:
return pulumi.get(self, "id")
@property
@pulumi.getter
def predicate(self) -> Optional['outputs.TriggerPredicate']:
return pulumi.get(self, "predicate")
@property
@pulumi.getter
def schedule(self) -> Optional[str]:
return pulumi.get(self, "schedule")
@property
@pulumi.getter(name="startOnCreation")
def start_on_creation(self) -> Optional[bool]:
return pulumi.get(self, "start_on_creation")
@property
@pulumi.getter
def tags(self) -> Optional[Any]:
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> Optional[str]:
return pulumi.get(self, "type")
class AwaitableGetTriggerResult(GetTriggerResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetTriggerResult(
actions=self.actions,
description=self.description,
id=self.id,
predicate=self.predicate,
schedule=self.schedule,
start_on_creation=self.start_on_creation,
tags=self.tags,
type=self.type)
def get_trigger(id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetTriggerResult:
"""
Resource Type definition for AWS::Glue::Trigger
"""
__args__ = dict()
__args__['id'] = id
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('aws-native:glue:getTrigger', __args__, opts=opts, typ=GetTriggerResult).value
return AwaitableGetTriggerResult(
actions=__ret__.actions,
description=__ret__.description,
id=__ret__.id,
predicate=__ret__.predicate,
schedule=__ret__.schedule,
start_on_creation=__ret__.start_on_creation,
tags=__ret__.tags,
type=__ret__.type)
@_utilities.lift_output_func(get_trigger)
def get_trigger_output(id: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetTriggerResult]:
"""
Resource Type definition for AWS::Glue::Trigger
"""
...
| 34.622222 | 145 | 0.655755 | 537 | 4,674 | 5.458101 | 0.201117 | 0.028659 | 0.061412 | 0.081883 | 0.213579 | 0.145343 | 0.145343 | 0 | 0 | 0 | 0 | 0.00028 | 0.2362 | 4,674 | 134 | 146 | 34.880597 | 0.820728 | 0.062901 | 0 | 0.141509 | 1 | 0 | 0.138978 | 0.022089 | 0 | 0 | 0 | 0 | 0 | 1 | 0.113208 | false | 0 | 0.056604 | 0.075472 | 0.283019 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3020ebc2057f5b57aef317088f78b4f30a1889dd | 1,273 | py | Python | main/migrations/0001_initial.py | Mohsen7640/PicoSchool | 69d8658111b27e843928f2458f9a7108ab8bd4e0 | [
"Apache-2.0"
] | 48 | 2022-02-05T10:07:33.000Z | 2022-03-30T18:14:45.000Z | main/migrations/0001_initial.py | Mohsen7640/PicoSchool | 69d8658111b27e843928f2458f9a7108ab8bd4e0 | [
"Apache-2.0"
] | 2 | 2022-02-10T11:56:42.000Z | 2022-02-16T13:27:26.000Z | main/migrations/0001_initial.py | Mohsen7640/PicoSchool | 69d8658111b27e843928f2458f9a7108ab8bd4e0 | [
"Apache-2.0"
] | 14 | 2022-02-05T13:46:17.000Z | 2022-02-23T16:23:12.000Z | # Generated by Django 3.2 on 2022-02-04 11:22
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='SiteSetting',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('school_name', models.CharField(max_length=150, verbose_name='نام مدرسه')),
('school_logo', models.ImageField(upload_to='main/school_logo/', verbose_name='لوگو مدرسه')),
('site_color', models.CharField(choices=[('green', 'سبز'), ('orange', 'نارنجی'), ('yellow', 'زرد')], default='green', max_length=10, verbose_name='رنگ سایت')),
('site_menu_theme', models.CharField(choices=[('waterfall', 'منو آبشاری'), ('four_rooms', 'منو چارخونه ای')], default='waterfall', max_length=30, verbose_name='تم منو سایت')),
('favicon', models.ImageField(blank=True, null=True, upload_to='main/school_logo/', verbose_name='فاآیکون')),
],
options={
'verbose_name': 'تنظیمات سایت',
'verbose_name_plural': '01. تنظیمات سایت',
},
),
]
| 42.433333 | 191 | 0.597015 | 138 | 1,273 | 5.333333 | 0.57971 | 0.119565 | 0.032609 | 0.048913 | 0.089674 | 0.089674 | 0.089674 | 0 | 0 | 0 | 0 | 0.023933 | 0.24509 | 1,273 | 29 | 192 | 43.896552 | 0.741935 | 0.033778 | 0 | 0 | 1 | 0 | 0.238599 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.045455 | 0 | 0.227273 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
30210087e33980eb24daf4d679f8868eeef662a7 | 294 | py | Python | 0000 hihoOnce/175 Robots Crossing River/main.py | SLAPaper/hihoCoder | 3f64d678c5dd46db36345736eb56880fb2d2c5fe | [
"MIT"
] | null | null | null | 0000 hihoOnce/175 Robots Crossing River/main.py | SLAPaper/hihoCoder | 3f64d678c5dd46db36345736eb56880fb2d2c5fe | [
"MIT"
] | null | null | null | 0000 hihoOnce/175 Robots Crossing River/main.py | SLAPaper/hihoCoder | 3f64d678c5dd46db36345736eb56880fb2d2c5fe | [
"MIT"
] | null | null | null | from math import ceil
z, y, x = sorted(int(x) for x in raw_input().split())
if x <= y + z:
print int(ceil((x + y + z) / 20.0)) * 6
else:
run = (y + z) / 10
xr = x - 10 * run
nxr = (y + z) % 10
xr -= 15 - nxr if nxr < 8 else nxr
print int(run + 1 + ceil(xr / 15.0)) * 6
| 24.5 | 53 | 0.486395 | 59 | 294 | 2.40678 | 0.457627 | 0.056338 | 0.042254 | 0.084507 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.091371 | 0.329932 | 294 | 11 | 54 | 26.727273 | 0.629442 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.1 | null | null | 0.2 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
30222ed6a4345098e7f0e289697b8a0a1570b94f | 3,176 | py | Python | script/Isin google Nordnet/isin-nordnet.py | pettersoderlund/fondout | 99b14eaa8c6eb56fd862ab9bdf6acc8d537d4a31 | [
"BSD-3-Clause"
] | null | null | null | script/Isin google Nordnet/isin-nordnet.py | pettersoderlund/fondout | 99b14eaa8c6eb56fd862ab9bdf6acc8d537d4a31 | [
"BSD-3-Clause"
] | 4 | 2016-10-18T18:30:08.000Z | 2016-11-05T09:22:29.000Z | script/Isin google Nordnet/isin-nordnet.py | pettersoderlund/fondout | 99b14eaa8c6eb56fd862ab9bdf6acc8d537d4a31 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: UTF-8 -*-
import unicodedata
import openpyxl.reader.excel
import time
import datetime
import codecs
import mechanize
import cookielib
import re
import sys
from random import randint
def getNordnetIsin(br, nordnetUrl):
r = br.open(nordnetUrl)
result = re.search( r'<iframe src="?\'?([^"\'"]*)', r.read(), flags=0)
if result:
msseUrl = result.group()[13:]
print msseUrl
else:
print "no go\n"
r = br.open(msseUrl)
print
for line in r.read().split('\n'):
isin = None
isinResult = re.search( r'[A-Z]{2}[0-9]{1}[0-9A-Z]{8}[0-9]{1}', line, flags=0)
if isinResult:
print isinResult.group()
isin = isinResult.group()
return isin
else:
#print "No match!!", company
pass
def writetocsvfile(file, isin, name, delimiter):
print isin + delimiter + name +"\n"
if isin is None:
isin = ""
if name is None:
name = ""
try:
file.write(isin + delimiter + name.decode('utf8', 'ignore') +"\n")
except IOError as detail:
print "Error, problems writing to file: ", detail
except UnicodeEncodeError as detail:
print detail
#Parse arguments
filename = str(sys.argv[1])
sheetName = 'Sheet1'
# Output file
timenow = datetime.datetime.now()
# Write mode creates a new file or overwrites the existing content of the file.
# Write mode will _always_ destroy the existing contents of a file.
try:
# This will create a new file or **overwrite an existing file**.
f = codecs.open(filename + "_isincodes_" + timenow.isoformat() + ".csv", 'wb', "cp1252")
except IOError as detail:
print "Error, problems writing to file: ", detail
# Browser
br = mechanize.Browser()
# Cookie Jar
cj = cookielib.LWPCookieJar()
br.set_cookiejar(cj)
# Browser options
br.set_handle_equiv(True)
br.set_handle_gzip(True)
br.set_handle_redirect(True)
br.set_handle_referer(True)
br.set_handle_robots(False)
# Follows refresh 0 but not hangs on refresh > 0
br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1)
# User-Agent (this is cheating, ok?)
br.addheaders = [('User-agent', 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1')]
# Open import file
workbook = openpyxl.load_workbook(filename = filename, use_iterators = True)
worksheet = workbook.get_sheet_by_name(sheetName)
data = []
i = 0
for row in worksheet.iter_rows():
try:
data = {
'name': row[0].internal_value,
'url': row[1].internal_value
}
except AttributeError as detail:
print "AttributeError", detail
except TypeError:
print "TypeError"
if(data):
name = data['name'].encode("utf-8").translate(None, '!@#$€£&;,')
if (data['url']):
url = 'http://www.nordnet.se' + data['url'].encode("utf-8")
isin = getNordnetIsin(br, url)
else:
isin = 'Not found'
writetocsvfile(f, name, isin, "\t")
if (i > 9999): # How many rows to handle?
break;
i=i+1
#time.sleep(randint(0,15))
f.close()
| 25.206349 | 137 | 0.62563 | 431 | 3,176 | 4.552204 | 0.433875 | 0.017839 | 0.033639 | 0.030581 | 0.059123 | 0.059123 | 0.059123 | 0.059123 | 0.059123 | 0.059123 | 0 | 0.027686 | 0.238035 | 3,176 | 125 | 138 | 25.408 | 0.782231 | 0.146725 | 0 | 0.119048 | 0 | 0 | 0.138085 | 0.012992 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.011905 | 0.119048 | null | null | 0.119048 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3022766db89202607c22a14256bc547026ec8d98 | 582 | py | Python | src/examples/g_lists_and_tuples/main.py | acc-cosc-1336-spring-2022/acc-cosc-1336-spring-2022-rObErT-a93 | dfcbe42ce835b65db15928f89a37f32b599495ba | [
"MIT"
] | null | null | null | src/examples/g_lists_and_tuples/main.py | acc-cosc-1336-spring-2022/acc-cosc-1336-spring-2022-rObErT-a93 | dfcbe42ce835b65db15928f89a37f32b599495ba | [
"MIT"
] | null | null | null | src/examples/g_lists_and_tuples/main.py | acc-cosc-1336-spring-2022/acc-cosc-1336-spring-2022-rObErT-a93 | dfcbe42ce835b65db15928f89a37f32b599495ba | [
"MIT"
] | null | null | null | import lists
nums = [99,100,101,102]
lists.loop_list_w_for(nums)
print('----------')
for n in nums:
print(n)
print('*************')
num = [99,100,101,102]
lists.loop_list_w_while(num)
print('----------')
for n in num:
print(n)
print('**************')
lists.collect_home_values()
print('-----------------3/24 Lecture------------------------')
print()
list1 = ["C++", "C#", "Python", "Java"]
item1 = input('Enter item to find: ')
result = lists.find_items_in_lists(item1, list1)
if(result):
print(item1, 'in the list')
else:
print(item1, 'not in the list')
| 16.628571 | 62 | 0.54811 | 82 | 582 | 3.756098 | 0.463415 | 0.032468 | 0.051948 | 0.071429 | 0.162338 | 0.162338 | 0.162338 | 0.162338 | 0 | 0 | 0 | 0.062124 | 0.142612 | 582 | 34 | 63 | 17.117647 | 0.55511 | 0 | 0 | 0.173913 | 0 | 0 | 0.277108 | 0.089501 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.043478 | 0 | 0.043478 | 0.434783 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 |
3028563b4b9152c8a112821120d579b425227167 | 1,611 | py | Python | aionasa/exoplanet/_tests.py | nwunderly/aio-nasa | 9a083746eee6fe92e5663fdd2a8ebe38b192ac87 | [
"MIT"
] | 2 | 2020-07-09T00:20:37.000Z | 2020-07-09T00:25:06.000Z | aionasa/exoplanet/_tests.py | nwunderly/aio-nasa | 9a083746eee6fe92e5663fdd2a8ebe38b192ac87 | [
"MIT"
] | null | null | null | aionasa/exoplanet/_tests.py | nwunderly/aio-nasa | 9a083746eee6fe92e5663fdd2a8ebe38b192ac87 | [
"MIT"
] | 1 | 2020-07-14T23:17:06.000Z | 2020-07-14T23:17:06.000Z | from .api import Exoplanet
async def _test_method(ref, name, *args, **kwargs):
try:
result = await ref(*args, **kwargs)
try:
iter(result)
iterable = True
except TypeError:
iterable = False
print(
f"exoplanet.{name} success\n\t",
type(result),
len(result) if iterable else None,
)
# print(result)
except Exception as e:
print(f"exoplanet.{name} failed\n\t", e.__class__.__name__, e)
async def _run_tests():
async with Exoplanet() as exoplanet:
select = "pl_hostname,ra,dec"
where = "ra>45"
await _test_method(
exoplanet.query,
"query",
"exoplanets",
select=select,
where=where,
order="dec",
format="ascii",
)
await _test_method(
exoplanet.query_json,
"query_json",
"exoplanets",
select=select,
where=where,
order="dec",
)
await _test_method(
exoplanet.query_df,
"query_df",
"exoplanets",
select=select,
where=where,
order="dec",
)
await _test_method(exoplanet.query_aliastable, "query_aliastable", "bet Pic")
await _test_method(
exoplanet.query_aliastable_json, "query_aliastable_json", "bet Pic"
)
await _test_method(
exoplanet.query_aliastable_df, "query_aliastable_df", "bet Pic"
)
print("Done.")
| 25.571429 | 85 | 0.514587 | 157 | 1,611 | 5.031847 | 0.363057 | 0.088608 | 0.113924 | 0.182278 | 0.425316 | 0.351899 | 0.339241 | 0.288608 | 0.174684 | 0.174684 | 0 | 0.002018 | 0.384854 | 1,611 | 62 | 86 | 25.983871 | 0.795156 | 0.00807 | 0 | 0.358491 | 0 | 0 | 0.142231 | 0.013158 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.018868 | 0 | 0.018868 | 0.056604 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
302e2929f58992709dad8ed5380ff8b83767af39 | 2,357 | py | Python | tortoise/tests/test_init.py | EtzelWu/tortoise-orm | 6a79c87169c10ff25b0d84bca4db24f0c0737432 | [
"Apache-2.0"
] | null | null | null | tortoise/tests/test_init.py | EtzelWu/tortoise-orm | 6a79c87169c10ff25b0d84bca4db24f0c0737432 | [
"Apache-2.0"
] | null | null | null | tortoise/tests/test_init.py | EtzelWu/tortoise-orm | 6a79c87169c10ff25b0d84bca4db24f0c0737432 | [
"Apache-2.0"
] | null | null | null | from tortoise import Tortoise
from tortoise.contrib import test
from tortoise.exceptions import ConfigurationError
from tortoise.tests.testmodels import Tournament
class TestInitErrors(test.SimpleTestCase):
async def setUp(self):
self.apps = Tortoise.apps
self.inited = Tortoise._inited
Tortoise.apps = {}
Tortoise._inited = False
Tortoise._db_routing = None
Tortoise._global_connection = None
self.db = await self.getDB()
async def tearDown(self):
await self.db.close()
await self.db.db_delete()
Tortoise.apps = self.apps
Tortoise._inited = self.inited
def test_dup_model(self):
with self.assertRaisesRegex(ConfigurationError, 'duplicates in'):
Tortoise.register_model('models', 'Tournament', Tournament)
Tortoise.register_model('models', 'Tournament', Tournament)
def test_missing_app_route(self):
Tortoise.apps = self.apps
with self.assertRaisesRegex(ConfigurationError, 'No db instanced for apps'):
Tortoise._client_routing(db_routing={
'models': self.db,
})
def test_exclusive_route_param(self):
with self.assertRaisesRegex(ConfigurationError, 'You must pass either'):
Tortoise._client_routing(db_routing={
'models': self.db,
}, global_client=self.db)
def test_not_db(self):
with self.assertRaisesRegex(ConfigurationError,
'global_client must inherit from BaseDBAsyncClient'):
Tortoise._client_routing(global_client='moo')
def test_missing_param(self):
with self.assertRaisesRegex(ConfigurationError,
'You must pass either global_client or db_routing'):
Tortoise._client_routing()
def test_missing_app_route2(self):
Tortoise.apps = self.apps
with self.assertRaisesRegex(ConfigurationError,
'All app values must inherit from BaseDBAsyncClient'):
Tortoise._client_routing(db_routing={
'models': 'moo',
})
def test_dup_init(self):
with self.assertRaisesRegex(ConfigurationError, 'Already initialised'):
Tortoise.init(self.db)
Tortoise.init(self.db)
| 37.412698 | 90 | 0.640221 | 240 | 2,357 | 6.1 | 0.258333 | 0.032787 | 0.119536 | 0.205601 | 0.486339 | 0.390027 | 0.315574 | 0.243169 | 0.185792 | 0.094262 | 0 | 0.000586 | 0.276199 | 2,357 | 62 | 91 | 38.016129 | 0.857562 | 0 | 0 | 0.326923 | 0 | 0 | 0.118371 | 0 | 0 | 0 | 0 | 0 | 0.134615 | 1 | 0.134615 | false | 0.038462 | 0.076923 | 0 | 0.230769 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
303212f2de3163447a24ff07aa13ecbe61acaf1d | 1,137 | py | Python | rurusetto/wiki/migrations/0008_auto_20210802_2042.py | siddhantdixit/rurusetto | ed3aad56d20fbdc15e9ab7d2b77335de65009b7f | [
"MIT"
] | 19 | 2021-05-09T12:05:40.000Z | 2022-03-02T19:26:36.000Z | rurusetto/wiki/migrations/0008_auto_20210802_2042.py | siddhantdixit/rurusetto | ed3aad56d20fbdc15e9ab7d2b77335de65009b7f | [
"MIT"
] | 121 | 2021-05-04T19:18:13.000Z | 2022-03-21T22:11:25.000Z | rurusetto/wiki/migrations/0008_auto_20210802_2042.py | siddhantdixit/rurusetto | ed3aad56d20fbdc15e9ab7d2b77335de65009b7f | [
"MIT"
] | 12 | 2021-05-03T04:44:02.000Z | 2021-10-19T05:40:08.000Z | # Generated by Django 3.2.5 on 2021-08-02 20:42
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('wiki', '0007_alter_ruleset_last_edited_at'),
]
operations = [
migrations.AlterField(
model_name='ruleset',
name='creator',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='ruleset_page_creator', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='ruleset',
name='last_edited_by',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='ruleset_last_edited_by', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='ruleset',
name='owner',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='ruleset_ruleset_owner', to=settings.AUTH_USER_MODEL),
),
]
| 35.53125 | 149 | 0.676341 | 132 | 1,137 | 5.583333 | 0.356061 | 0.089552 | 0.075984 | 0.119403 | 0.565807 | 0.5346 | 0.480326 | 0.480326 | 0.480326 | 0.480326 | 0 | 0.021324 | 0.216359 | 1,137 | 31 | 150 | 36.677419 | 0.805836 | 0.039578 | 0 | 0.36 | 1 | 0 | 0.134862 | 0.069725 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.12 | 0 | 0.24 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3034af2279daaa6bfb2fc3c5e9c4af5f13761aa2 | 1,200 | py | Python | scripts/motion_controller.py | SaeedAlRahma/baxter-object-manipulation | 16ebc8872993d4f0a296d7e15bd400b8592bc180 | [
"BSD-2-Clause"
] | null | null | null | scripts/motion_controller.py | SaeedAlRahma/baxter-object-manipulation | 16ebc8872993d4f0a296d7e15bd400b8592bc180 | [
"BSD-2-Clause"
] | null | null | null | scripts/motion_controller.py | SaeedAlRahma/baxter-object-manipulation | 16ebc8872993d4f0a296d7e15bd400b8592bc180 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/python
# Motion Controller
""" IMPORT MODULES """
import sys, struct, time, json
sys.path.insert(0, "/home/saeed/Klampt/iml-internal/Ebolabot")
#-----------------------------------------------------------
#Imports require internal folders
from Motion import motion
from Motion import config
#================================================================
# End of Imports
#============================================================
# configuration variables
MODEL_DIR = "/home/saeed/Klampt/data/robots/"
LIBMOTION_DIR = "/home/saeed/catkin_ws/src/klampt/misc/"
KLAMPT_MODEL = "baxter_col.rob"
#============================================================
# paths/directories variables
if __name__ == "__main__":
"""The main loop that loads the planning / simulation models and
starts the OpenGL visualizer."""
print 'motion_controller runs!'
robot_model = MODEL_DIR+KLAMPT_MODEL
print robot_model
# Connect to Baxter
robot = motion.setup(
mode="physical", \
libpath=LIBMOTION_DIR, \
klampt_model= MODEL_DIR+KLAMPT_MODEL)
res = robot.startup()
if not res:
print "Error connecting to robot"
exit()
| 25.531915 | 68 | 0.553333 | 122 | 1,200 | 5.262295 | 0.590164 | 0.068536 | 0.065421 | 0.05919 | 0.074766 | 0 | 0 | 0 | 0 | 0 | 0 | 0.000991 | 0.159167 | 1,200 | 46 | 69 | 26.086957 | 0.635282 | 0.328333 | 0 | 0 | 0 | 0 | 0.278274 | 0.162202 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.157895 | null | null | 0.157895 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
30389a732bffc046d9a331b9ce96d0127f8d599d | 1,903 | py | Python | operations/pg/sprites_light.py | Sam-prog-sudo/MacGyver | 4048044bc18a06d2d37f0c0901f3b530c48cec02 | [
"MIT"
] | null | null | null | operations/pg/sprites_light.py | Sam-prog-sudo/MacGyver | 4048044bc18a06d2d37f0c0901f3b530c48cec02 | [
"MIT"
] | 4 | 2020-10-22T17:10:16.000Z | 2020-12-27T13:42:49.000Z | operations/pg/sprites_light.py | Sam-prog-sudo/MacGyver | 4048044bc18a06d2d37f0c0901f3b530c48cec02 | [
"MIT"
] | null | null | null | # encoding: utf-8
import pygame
from assets import constants as C
class Decor(pygame.sprite.Sprite):
def __init__(self, name, pos_tuple):
pygame.sprite.Sprite.__init__(self)
self.name = name
self.image = pygame.image.load(
self.full_path(C.IMAGES[name])
).convert()
self.rect = self.image.get_rect()
self.rect.x = pos_tuple[1] * C.A_MOVE
self.rect.y = pos_tuple[0] * C.A_MOVE
@staticmethod
def full_path(image: str):
return ''.join([C.IMAGE_FOLDER, image])
class Elements(Decor):
def __init__(self, elt):
super().__init__(elt.name, (elt.position_Y, elt.position_X))
self.rect.centerx = elt.position_X * C.A_MOVE + 20
self.rect.centery = elt.position_Y * C.A_MOVE + 20
class Player(Elements):
def __init__(self, maze, macgyver):
super().__init__(macgyver)
self.maze = maze
self.macgyver = macgyver
def right(self):
self.rect.x += C.A_MOVE
def left(self):
self.rect.x -= C.A_MOVE
def up(self):
self.rect.y -= C.A_MOVE
def down(self):
self.rect.y += C.A_MOVE
def update_pos(self, event, inter):
"""
Manage character movements in lab.
Args:
event : event in pygame.event.get()
inter : Instance of class Interaction
"""
keys = pygame.key.get_pressed()
mov_keys = (pygame.K_LEFT, pygame.K_RIGHT, pygame.K_UP, pygame.K_DOWN)
if (any(mov_keys) in keys) and event.type == pygame.KEYDOWN:
key = pygame.key.name(event.key)
if self.macgyver.move_char(key, self.maze.list_paths):
if inter.check_chars_pos():
self.maze.chars_meet_up()
elif self.maze.list_items:
inter.item_picking_process()
getattr(self, key)()
| 29.276923 | 78 | 0.586442 | 259 | 1,903 | 4.07722 | 0.332046 | 0.068182 | 0.045455 | 0.034091 | 0.083333 | 0.083333 | 0.083333 | 0.083333 | 0 | 0 | 0 | 0.005212 | 0.294272 | 1,903 | 64 | 79 | 29.734375 | 0.781087 | 0.073568 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.204545 | false | 0 | 0.045455 | 0.022727 | 0.340909 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3038e041781965ef085fd084ac47bd2ee1d01ca1 | 6,596 | py | Python | pyopt/packing/rectangular/drawer.py | stonelake/pyoptimization | 6e2a4acbaa6eac9716e9ef9e46ca23986dbb6c71 | [
"Apache-2.0"
] | null | null | null | pyopt/packing/rectangular/drawer.py | stonelake/pyoptimization | 6e2a4acbaa6eac9716e9ef9e46ca23986dbb6c71 | [
"Apache-2.0"
] | null | null | null | pyopt/packing/rectangular/drawer.py | stonelake/pyoptimization | 6e2a4acbaa6eac9716e9ef9e46ca23986dbb6c71 | [
"Apache-2.0"
] | null | null | null | __author__ = "Alex Baranov"
from random import randrange
from visual import *
from reports import ReportsBuilder
class BoxDrawer(object):
"""
Draws the boxes
"""
def __init__(self, packing_params=None, display_labels=True, **kwargs):
"""
Start the box drawing.
"""
self.win = display(title='Packing results', background=(0.1, 0.1, 0.1), randrange=scene.autoscale)
self.win.select()
self.display_labels = display_labels
# create frames
self.arrows_frame = frame()
self.containers_frame = frame()
self.boxes_frame = frame()
self.labels_frame = frame()
# draw arrows
self.__draw_arrows()
# assign default variables
self.container_color = kwargs.get("container_color", color.green)
# packing params
self.pack_params = packing_params
self.actions = []
self.action_index = 0
if packing_params:
self.actions = packing_params.get("actions", [])
@classmethod
def show_packing_results(cls, result, params, containers):
"""
Displays the packing results.
"""
bd = BoxDrawer(packing_params=params)
bd.add_containers(containers)
bd.add_boxes(result)
bd.display()
def __get_random_color(self):
"""
Generates the random color.
"""
return [randrange(0, 255) / 255. for _ in range(3)]
def add_boxes(self, boxes, change_action_pointer=True):
"""
Draws all the boxes that should or were packed.
"""
for pbox in boxes:
bcolor = self.__get_random_color()
box(frame=self.boxes_frame, pos=pbox.center, size=pbox.size, color=bcolor)
label(frame=self.labels_frame, pos=pbox.center, box=0,
text='name={}\npolus={}\nsize={}'.format(pbox.name, pbox.polus, pbox.size))
# if some boxes were added set the actions index to max
if boxes and change_action_pointer:
self.action_index = len(self.actions)
def remove_box(self, box):
"""
Removes bo from the display.
"""
for element in filter(lambda x: x.pos == box.center and x.size == box.size, self.boxes_frame.objects):
element.visible = False
# remove also label
for label in filter(lambda x: x.pos == box.center, self.labels_frame.objects):
label.visible = False
def add_containers(self, containers, random_color=False, opacity=None, centered_labels=False):
"""
Add container to the screen.
"""
op = opacity or 0.1
for index, container in enumerate(containers):
if random_color:
c = self.__get_random_color()
else:
c = self.container_color
box(frame=self.containers_frame, pos=container.center, size=container.size, opacity=op, color=c)
if centered_labels:
pos = container.polus
else:
pos = container.diagonal_polus
label(frame=self.labels_frame, pos=pos, box=0,
text='Container #{}\npolus={}\nsize={}'.format(index, container.polus, container.size))
def __draw_arrows(self):
"""
Draws the x,y,z arrows.
"""
#x
arrow(frame=self.arrows_frame, pos=(0, 0, 0), axis=(10, 0, 0), shaftwidth=0.01)
label(frame=self.arrows_frame, pos=(10, 0, 0), box=0, text='X')
#y
arrow(frame=self.arrows_frame, pos=(0, 0, 0), axis=(0, 10, 0), shaftwidth=0.01)
label(frame=self.arrows_frame, pos=(0, 10, 0), box=0, text='Y')
#z
arrow(frame=self.arrows_frame, pos=(0, 0, 0), axis=(0, 0, 10), shaftwidth=0.01)
label(frame=self.arrows_frame, pos=(0, 0, 10), box=0, text='Z')
for obj in self.arrows_frame.objects:
obj.color = color.orange
def __draw_action(self, action_pair):
name = action_pair[0]
b = action_pair[1]
if name == "pack":
self.add_boxes((b, ), change_action_pointer=False)
elif name == "unpack":
self.remove_box(b)
def __remove_all_boxes(self):
for element in self.boxes_frame.objects:
element.visible = False
# remove labels
for label in filter(lambda x: x.pos == element.pos,
self.labels_frame.objects):
label.visible = False
def display(self):
print "-------------------------------------------------------"
while 1:
rate(100)
if self.win.kb.keys:
s = self.win.kb.getkey()
if len(s) == 1:
if s == 'l' or s == 'L':
if self.display_labels:
self.labels_frame.visible = False
self.display_labels = False
else:
self.labels_frame.visible = True
self.display_labels = True
# display actions
if s == 'n' or s == 'N':
if not self.actions:
continue
else:
if len(self.actions) == self.action_index:
self.action_index = 0
if self.boxes_frame.objects:
# remove non-unpacked boxes
self.__remove_all_boxes()
print "Packing completed \n\n"
else:
if self.actions[self.action_index][0] == "pack":
frmt = "Packing"
else:
frmt = "Unpacking"
print "{} box: '{}'".format(frmt, self.actions[self.action_index][1])
self.__draw_action(self.actions[self.action_index])
self.action_index += 1
# display reports
if s == 'r' or s == 'R':
if self.pack_params:
ReportsBuilder.show_dynamic_report(self.pack_params)
| 36.043716 | 111 | 0.498181 | 710 | 6,596 | 4.469014 | 0.209859 | 0.036874 | 0.037819 | 0.037819 | 0.22849 | 0.21872 | 0.184053 | 0.183738 | 0.073747 | 0.073747 | 0 | 0.018717 | 0.392511 | 6,596 | 182 | 112 | 36.241758 | 0.773147 | 0.032141 | 0 | 0.108108 | 0 | 0 | 0.040629 | 0.017808 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.027027 | null | null | 0.027027 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
303d526540636dddcd088aaf9e7d83eb0eef0af8 | 1,744 | py | Python | moire/nn/functions/indexing.py | speedcell4/moire | d5acaee4ee0690627f1382debdd6a6f11cec77c7 | [
"MIT"
] | 2 | 2018-03-12T19:53:35.000Z | 2018-04-12T08:36:22.000Z | moire/nn/functions/indexing.py | speedcell4/moire | d5acaee4ee0690627f1382debdd6a6f11cec77c7 | [
"MIT"
] | 4 | 2018-03-11T06:01:04.000Z | 2020-01-15T16:27:35.000Z | moire/nn/functions/indexing.py | speedcell4/moire | d5acaee4ee0690627f1382debdd6a6f11cec77c7 | [
"MIT"
] | null | null | null | import dynet as dy
import numpy as np
import moire
from moire import Expression
__all__ = [
'argmax', 'argmin',
'epsilon_argmax', 'epsilon_argmin',
'gumbel_argmax', 'gumbel_argmin',
]
def argmax(x: Expression, axis: int = None) -> int:
return int(x.npvalue().argmax(axis=axis))
def argmin(x: Expression, axis: int = None) -> int:
return int(x.npvalue().argmin(axis=axis))
def epsilon_argmax(x: Expression, epsilon: float, axis: int = None) -> int:
if np.random.uniform(low=0.0, high=1.0, size=()) < epsilon:
dim, batch_size = x.dim()
return int(np.random.uniform(low=0, high=dim[0], size=()))
return argmax(x, axis)
def epsilon_argmin(x: Expression, epsilon: float, axis: int = None) -> int:
if np.random.uniform(low=0.0, high=1.0, size=()) < epsilon:
dim, batch_size = x.dim()
return int(np.random.uniform(low=0, high=dim[0], size=()))
return argmin(x, axis)
def gumbel_argmax(prob: Expression, loc: float = 0.0, scale: float = 1.0, axis: int = None) -> int:
shape, batch_size = prob.dim()
a = dy.inputVector(np.random.gumbel(loc=loc, scale=scale, size=shape))
return int(np.argmax((dy.log(prob) + a).value(), axis=axis).astype(np.int32, copy=False))
def gumbel_argmin(prob: Expression, loc: float = 0.0, scale: float = 1.0, axis: int = None) -> int:
shape, batch_size = prob.dim()
a = dy.inputVector(np.random.gumbel(loc=loc, scale=scale, size=shape))
return int(np.argmin((dy.log(prob) + a).value(), axis=axis).astype(np.int32, copy=False))
if __name__ == '__main__':
x = dy.inputVector([1, 2, 3, 4])
prob = dy.softmax(x)
a = np.array([gumbel_argmax(prob) for _ in range(1000)])
moire.debug(np.histogram(a, bins=4))
| 32.296296 | 99 | 0.645642 | 272 | 1,744 | 4.044118 | 0.213235 | 0.038182 | 0.06 | 0.076364 | 0.654545 | 0.654545 | 0.654545 | 0.654545 | 0.654545 | 0.654545 | 0 | 0.023061 | 0.179472 | 1,744 | 53 | 100 | 32.90566 | 0.745632 | 0 | 0 | 0.277778 | 0 | 0 | 0.042431 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.111111 | 0.055556 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
3043218abb082abd2bfe18a0babcb350c48b33b1 | 983 | py | Python | Project/Unit_Tests/test_Window.py | gmgoodale/Team19-Zoltar-Stock-Trader | c5157a6e40fd74461c414527070b818bb5fba9d2 | [
"MIT"
] | null | null | null | Project/Unit_Tests/test_Window.py | gmgoodale/Team19-Zoltar-Stock-Trader | c5157a6e40fd74461c414527070b818bb5fba9d2 | [
"MIT"
] | null | null | null | Project/Unit_Tests/test_Window.py | gmgoodale/Team19-Zoltar-Stock-Trader | c5157a6e40fd74461c414527070b818bb5fba9d2 | [
"MIT"
] | null | null | null | # Class to test the GrapherWindow Class
class GrapherTester:
def __init__(self):
subject = Grapher()
testFileName = 'TestData.csv'
testStockName = 'Test Stock'
testGenerateGraphWithAllPositiveNumbers(testFileName, testStockName)
def createTestData(xAxis, yAxis):
# Generates a graph with random Data
plotData = {'X-Axis':xAxis, 'Y-Axis':yAxis}
dataFrame = pandas.DataFrame(plotData)
return dataFrame
def testGenerateGraphWithAllPositiveNumbers(self, testFileName, stockName):
dataFrame = self.createTestData([0, 1, 2, 3, 4], [3, 5, 1, 2, 6])
subject.generateGraph(predictionFileName = "TestData.csv")
assert os.path.exists(self.testStockName + " Graph.png")
def testGenerateGraphWithSomeBadNumbers(self, testFileName, stockName):
dataFrame = self.createTestData([0, 1, 2, 3, 4], [3, 5, -1, 2, 6])
assert subject.generateGraph(testFileName, dataFrame) == False
| 42.73913 | 79 | 0.678535 | 100 | 983 | 6.63 | 0.5 | 0.012066 | 0.075415 | 0.102564 | 0.187029 | 0.187029 | 0.187029 | 0.187029 | 0.187029 | 0.187029 | 0 | 0.025873 | 0.213632 | 983 | 22 | 80 | 44.681818 | 0.831824 | 0.073245 | 0 | 0 | 0 | 0 | 0.061674 | 0 | 0 | 0 | 0 | 0 | 0.117647 | 1 | 0.235294 | false | 0 | 0 | 0 | 0.352941 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
30454978649b2dfb583e18b9df9d4e7ed82702f9 | 1,562 | py | Python | kfac/layers/__init__.py | saeedsoori/kfac_pytorch | a49d6f30daf6d1e989df9b13e1ca42f1d0742a44 | [
"MIT"
] | 35 | 2020-05-15T16:51:23.000Z | 2022-02-23T20:23:44.000Z | kfac/layers/__init__.py | MLHPC/Distributed_KFAC_Pytorch | cfd6d54012cf23d832818ce302d2d2d049da4936 | [
"MIT"
] | 18 | 2020-04-27T19:36:05.000Z | 2022-03-25T22:14:16.000Z | kfac/layers/__init__.py | MLHPC/Distributed_KFAC_Pytorch | cfd6d54012cf23d832818ce302d2d2d049da4936 | [
"MIT"
] | 14 | 2020-05-24T17:22:14.000Z | 2022-03-17T19:31:07.000Z | import torch.nn as nn
import kfac.modules as km
from kfac.layers.conv import Conv2dLayer
from kfac.layers.embedding import EmbeddingLayer
from kfac.layers.linear import LinearLayer
from kfac.layers.linear import LinearMultiLayer
__all__ = ['KNOWN_MODULES', 'get_kfac_layers', 'module_requires_grad']
KNOWN_MODULES = {'linear', 'conv2d', 'embedding', 'lstmcell'}
def get_kfac_layers(module, **kwargs):
"""Instantiates KFACLayer(s) for module
Args:
module: module to register
**kwargs: parameters to pass to KFACLayer
Returns:
list of tuples where each tuple is (module, KFACLayer)
"""
if isinstance(module, nn.Linear):
return [(module, LinearLayer(module, **kwargs))]
elif isinstance(module, nn.Conv2d):
return [(module, Conv2dLayer(module, **kwargs))]
elif isinstance(module, nn.Embedding):
return [(module, EmbeddingLayer(module, **kwargs))]
elif isinstance(module, km.LSTMCellBase):
return [(m, LinearMultiLayer(m, **kwargs)) for m in module.children()]
elif isinstance(module, nn.RNNCellBase):
raise TypeError('KFAC does not support torch.nn.{RNN,LSTM}Cell. Use '
'kfac.modules.{RNN,LSTM}Cell instead for KFAC support.')
else:
raise NotImplementedError('KFAC does not support layer {}'.format(
module.__class__.__name__))
def module_requires_grad(module):
"""Returns False if any module param has .requires_grad=False"""
return all([p.requires_grad for p in module.parameters()])
| 38.097561 | 80 | 0.685659 | 189 | 1,562 | 5.539683 | 0.386243 | 0.057307 | 0.053486 | 0.074499 | 0.145177 | 0.064947 | 0 | 0 | 0 | 0 | 0 | 0.00321 | 0.202305 | 1,562 | 40 | 81 | 39.05 | 0.837079 | 0.15493 | 0 | 0 | 0 | 0 | 0.16433 | 0.03972 | 0 | 0 | 0 | 0 | 0 | 1 | 0.08 | false | 0 | 0.24 | 0 | 0.52 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
30455c24d40d043c2b2e52a29833a6122193cce5 | 3,062 | py | Python | src/pycomposite/composite_decorator.py | BstLabs/py-composite | d839303de51000c9afd1568f63ba02e7bdebbc8c | [
"MIT"
] | 4 | 2022-03-10T12:43:53.000Z | 2022-03-11T21:22:16.000Z | src/pycomposite/composite_decorator.py | BstLabs/py-composite | d839303de51000c9afd1568f63ba02e7bdebbc8c | [
"MIT"
] | null | null | null | src/pycomposite/composite_decorator.py | BstLabs/py-composite | d839303de51000c9afd1568f63ba02e7bdebbc8c | [
"MIT"
] | null | null | null | from collections import deque
from functools import reduce
from inspect import getmembers, isfunction, signature
from typing import Any, Iterable, List
from deepmerge import always_merger
def _constructor(self, *parts: List[Iterable[Any]]) -> None:
self._parts = parts
def _make_iterator(cls):
def _iterator(self):
# Simple depth-first composite Iterator
# Recursive version did not work for some mysterious reason
# This one proved to be more reliable
# Credit: https://stackoverflow.com/questions/26145678/implementing-a-depth-first-tree-iterator-in-python
stack = deque(self._parts)
while stack:
# Pop out the first element in the stack
part = stack.popleft()
if cls == type(part): # The same composite exactly
stack.extendleft(reversed(part._parts))
elif isinstance(part, cls) or not isinstance(part, Iterable):
yield part # derived classes presumably have overloads
else: # Iterable
stack.extendleft(reversed(part))
return _iterator
def _make_initializer(rt: type) -> Any:
return getattr(rt, "__origin__", rt)()
def _make_method(name: str, func: callable) -> callable:
def _make_reduce(m: str, rt: type) -> callable:
def _reduce_parts(self, *args, **kwargs) -> Any:
# self is iterable, results come out flattened
return reduce(
lambda acc, obj: always_merger.merge(
acc, getattr(obj, m)(*args, **kwargs)
)
if rt is dict
else acc + getattr(obj, m)(*args, **kwargs),
self,
_make_initializer(rt),
)
return _reduce_parts
def _make_foreach(m) -> callable:
def _foreach_parts(self, *args, **kwargs) -> callable:
# self is iterable, concrete functions invoked depth first
for obj in self:
getattr(obj, m)(*args, **kwargs)
return _foreach_parts
rt: type = signature(func).return_annotation
return _make_foreach(name) if rt is None else _make_reduce(name, rt)
# TODO: type annotation for parts (have to be descendants from the original class)
def composite(cls: type) -> type:
"""
Generic class decorator to create a Composite from original class.
Notes:
1. the constructor does not make copy, so do not pass generators,
if you plan to invoke more than one operation.
2. it will return always flattened results of any operation.
:param cls: original class
:return: Composite version of original class
"""
setattr(cls, "__init__", _constructor)
base = cls.__bases__[0]
attrs = {
n: _make_method(n, f)
for n, f in getmembers(cls, predicate=isfunction)
if not n.startswith("_")
}
attrs["__init__"] = _constructor
composite_cls = type(cls.__name__, (base,), attrs)
composite_cls.__iter__ = _make_iterator(composite_cls)
return composite_cls
| 34.022222 | 113 | 0.635206 | 372 | 3,062 | 5.045699 | 0.384409 | 0.018647 | 0.017581 | 0.023974 | 0.036761 | 0.025573 | 0 | 0 | 0 | 0 | 0 | 0.004977 | 0.27825 | 3,062 | 89 | 114 | 34.404494 | 0.844344 | 0.281515 | 0 | 0 | 0 | 0 | 0.012558 | 0 | 0 | 0 | 0 | 0.011236 | 0 | 1 | 0.188679 | false | 0 | 0.09434 | 0.037736 | 0.415094 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
305ad04b531275575806e4dde57d0281d296f3a1 | 6,267 | py | Python | file_sync_tool/__main__.py | jackd248/file-sync-tool | 68fbca562f232c2bc064f546d9eade20a2ae456f | [
"MIT"
] | null | null | null | file_sync_tool/__main__.py | jackd248/file-sync-tool | 68fbca562f232c2bc064f546d9eade20a2ae456f | [
"MIT"
] | null | null | null | file_sync_tool/__main__.py | jackd248/file-sync-tool | 68fbca562f232c2bc064f546d9eade20a2ae456f | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: future_fstrings -*-
import argparse, sys, os
from collections import defaultdict
from db_sync_tool.utility import helper
# Workaround for ModuleNotFoundError
sys.path.append(os.getcwd())
from file_sync_tool import sync
def main(args={}):
"""
Main entry point for the command line. Parse the arguments and call to the main process.
:param args:
:return:
"""
args = get_arguments(args)
config = build_config(args)
sync.Sync(
config_file=args.config_file,
verbose=args.verbose,
mute=args.mute,
host_file=args.host_file,
config=config
)
def get_arguments(args):
"""
Parses and returns script arguments
:param args:
:return:
"""
parser = argparse.ArgumentParser(prog='file_sync_tool', description='A tool for automatic file synchronization from and to host systems.')
parser.add_argument('-f', '--config-file',
help='Path to configuration file',
required=False,
type=str)
parser.add_argument('-v', '--verbose',
help='Enable extended console output',
required=False,
action='store_true')
parser.add_argument('-m', '--mute',
help='Mute console output',
required=False,
action='store_true')
parser.add_argument('-o', '--host-file',
help='Using an additional hosts file for merging hosts information with the configuration file',
required=False,
type=str)
parser.add_argument('-th', '--target-host',
help='SSH host to target system',
required=False,
type=str)
parser.add_argument('-tu', '--target-user',
help='SSH user for target system',
required=False,
type=str)
parser.add_argument('-tpw', '--target-password',
help='SSH password for target system',
required=False,
type=str)
parser.add_argument('-tk', '--target-key',
help='File path to SSH key for target system',
required=False,
type=str)
parser.add_argument('-tpo', '--target-port',
help='SSH port for target system',
required=False,
type=int)
parser.add_argument('-oh', '--origin-host',
help='SSH host to origin system',
required=False,
type=str)
parser.add_argument('-ou', '--origin-user',
help='SSH user for origin system',
required=False,
type=str)
parser.add_argument('-opw', '--origin-password',
help='SSH password for origin system',
required=False,
type=str)
parser.add_argument('-ok', '--origin-key',
help='File path to SSH key for origin system',
required=False,
type=str)
parser.add_argument('-opo', '--origin-port',
help='SSH port for origin system',
required=False,
type=int)
parser.add_argument('-fo', '--files-origin',
help='File path for origin source of file sync',
required=False,
type=str)
parser.add_argument('-ft', '--files-target',
help='File path for target destination of file sync',
required=False,
type=str)
parser.add_argument('-fe', '--files-exclude',
help='Excludes for file sync',
required=False,
type=str)
parser.add_argument('-fop', '--files-option',
help='Additional rsync options',
required=False,
type=str)
return parser.parse_args(helper.dict_to_args(args))
def build_config(args):
"""
Building an optional config
:param args:
:return:
"""
config = defaultdict(dict)
config['target'] = defaultdict(dict)
config['origin'] = defaultdict(dict)
if not args.target_host is None:
config['target']['host'] = args.target_host
if not args.target_user is None:
config['target']['user'] = args.target_user
if not args.target_password is None:
config['target']['password'] = args.target_password
if not args.target_key is None:
config['target']['ssh_key'] = args.target_key
if not args.target_port is None:
config['target']['port'] = args.target_port
if not args.origin_host is None:
config['origin']['host'] = args.origin_host
if not args.origin_user is None:
config['origin']['user'] = args.origin_user
if not args.origin_password is None:
config['origin']['password'] = args.origin_password
if not args.origin_key is None:
config['origin']['ssh_key'] = args.origin_key
if not args.origin_port is None:
config['origin']['port'] = args.origin_port
if not args.files_origin is None:
if 'config' not in config['files']:
config['files']['config'] = []
config['files']['config'].append({})
config['files']['config'][0]['origin'] = args.files_origin
if not args.files_target is None:
if 'config' not in config['files']:
config['files']['config'] = []
config['files']['config'].append({})
config['files']['config'][0]['target'] = args.files_target
if not args.files_exclude is None:
config['files']['config'][0]['exclude'] = args.files_exclude.split(',')
if not args.files_option is None:
config['files']['option'] = args.files_option.split(',')
return config
if __name__ == "__main__":
main()
| 35.811429 | 142 | 0.529759 | 673 | 6,267 | 4.820208 | 0.190193 | 0.049938 | 0.094328 | 0.086313 | 0.381936 | 0.338779 | 0.333231 | 0.333231 | 0.29254 | 0.215166 | 0 | 0.000982 | 0.349928 | 6,267 | 174 | 143 | 36.017241 | 0.795287 | 0.048987 | 0 | 0.320611 | 0 | 0 | 0.210088 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.022901 | false | 0.061069 | 0.030534 | 0 | 0.068702 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
305fbb0c2a18407f12a620fb455d66ee0b1e4534 | 265 | py | Python | convokit/tests/run_all_tests.py | sophieball/Cornell-Conversational-Analysis-Toolkit | da65358baffc34a518114be2d94f1748f8e01240 | [
"MIT"
] | null | null | null | convokit/tests/run_all_tests.py | sophieball/Cornell-Conversational-Analysis-Toolkit | da65358baffc34a518114be2d94f1748f8e01240 | [
"MIT"
] | null | null | null | convokit/tests/run_all_tests.py | sophieball/Cornell-Conversational-Analysis-Toolkit | da65358baffc34a518114be2d94f1748f8e01240 | [
"MIT"
] | null | null | null | from unittest import TestLoader, TextTestRunner
if __name__ == "__main__":
loader = TestLoader()
tests = loader.discover('.')
testRunner = TextTestRunner()
test_results = testRunner.run(tests)
if len(test_results.errors) > 0:
exit(1)
| 22.083333 | 47 | 0.671698 | 28 | 265 | 6 | 0.714286 | 0.130952 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009615 | 0.215094 | 265 | 11 | 48 | 24.090909 | 0.798077 | 0 | 0 | 0 | 0 | 0 | 0.034091 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.125 | 0 | 0.125 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
306394c81660e163df8c60a47a7468258af7a301 | 6,907 | py | Python | mykonos/locator/locator_element.py | monicadanesa/mykonos | 06370a4349ecaba3c2e05561da622b7174435495 | [
"MIT"
] | 17 | 2019-06-10T07:52:17.000Z | 2019-07-20T15:57:44.000Z | mykonos/locator/locator_element.py | monicadanesa/robotframework-mykonos | 06370a4349ecaba3c2e05561da622b7174435495 | [
"MIT"
] | 11 | 2019-07-23T10:05:46.000Z | 2020-09-13T17:48:26.000Z | mykonos/locator/locator_element.py | monicadanesa/robotframework-mykonos | 06370a4349ecaba3c2e05561da622b7174435495 | [
"MIT"
] | 4 | 2019-11-14T00:09:29.000Z | 2021-05-18T10:37:21.000Z | import traceback
from alog import debug, info, error
from mykonos.core.core import Core
from mykonos.keywords.management_device import ManagementDevice
from mykonos.keywords.decorators import Parallel
class LocatorElement(Core):
def __init__(self):
self.device_mobile = self.device()
self.management_device = ManagementDevice()
@Parallel.device_check
def get_locator(self, device=None, *argument, **settings):
"""Access locator from device.
**selector support:**
* text, textContains, textMatches, textStartsWith
* className, classNameMatches
* description,descriptionContains,descriptionMatches,descriptionStartsWith
* checkable, checked, clickable, longClickable
* scrollable, enabled,focusable, focused, selected
* packageName, packageNameMatches
* resourceId, resourceIdMatches
* index, instance
**Example:**
|| ${locator}= Get Locator | text=sample text
With Device:
|| @{emulator} = | 192.168.1.1 | 192.168.1.2
|| ${locator}= Get Locator | text=sample text | devices_parallel=@{emulator}
"""
if device is not None:
get_device = self.management_device.scan_current_device(device)
return get_device(*argument, **settings)
else:
return self.device_mobile(*argument, **settings)
def get_child(self, parent, *argument, **settings):
"""Access child locator from device.
**Example:**
|| ${locator}= Get Locator | text=sample text
|| ${child}= Get Child | parent=${locator} text=sample text
"""
return parent.child(*argument, **settings)
def get_sibling(self, parent, *argument, **settings):
"""Access sibling locator from device.
**Example:**
|| ${locator}= Get Locator | text=sample text
|| ${sibling}= Get Sibiling | parent=${locator} text=sample text
"""
return parent.sibling(*argument, **settings)
def left_position(self, parent, *argument, **settings):
"""Access left position from device.
**Example:**
|| ${locator}= Get Locator | text=sample text
|| ${left}= Left Position | parent=${locator} text=sample text
"""
return parent.left(*argument, **settings)
def right_position(self, parent, *argument, **settings):
"""Access left position from device.
**Example:**
|| ${locator}= Get Locator | text=sample text
|| ${right}= Right Position | parent=${locator} text=sample text
"""
return parent.right(*argument, **settings)
def up_position(self, parent, *argument, **settings):
"""Access left position from device.
**Example:**
|| ${locator}= Get Locator | text=sample text
|| ${up}= Up Position | parent=${locator} text=sample text
"""
return parent.up(*argument, **settings)
def down_position(self, parent, *argument, **settings):
"""Access left position from device.
**Example:**
|| ${locator}= Get Locator | text=sample text
|| ${down}= Down Position | parent=${locator} text=sample text
"""
return parent.down(*argument, **settings)
@Parallel.device_check
def get_locator_by_index(self, device=None, *argument, **settings):
"""Get Element locator by index on device.
**Example:**
|| Get Locator By Index | text=sample_text | index=1
|| ${locator}= Get Locator | text=sample text
|| Get Locator By Index | locator=${locator} | index=1
"""
index = int(settings['index'])
del settings['index']
if 'locator' in settings:
locator = settings['locator']
del settings['locator']
return locator[index]
else:
if device is not None:
get_device = self.management_device.scan_current_device(device)
return get_device(*argument, **settings)[index]
else:
return self.device_mobile(*argument, **settings)[index]
def handlers(self, action, function):
"""Call customized function on device.
**Example:**
|| Handlers | action=on | function=sample_function
"""
if 'on' in action:
return self.device_mobile.handlers.on(function)
elif 'off' in action:
return self.device_mobile.handlers.off(function)
class WatcherElement(Core):
"""Class is used to perform some actions when selector cannot be found."""
def __init__(self):
self.device_mobile = self.device()
def __watcher_register(self, **settings):
name = settings['name']
del settings['name']
return self.device_mobile.watcher(name)
def watcher(self, **settings):
"""Watcher is registered when a selector cannot be find.
name=name of watcher
WHEN, className=sample_class
WHEN, packageName=sample_package
**Example:**
|| ${sample_watcher}=name=sample_watcher | className=sample_class
|| Click Element | watcher=${sample_watcher} | text=sample_text
"""
name_watcher = settings['name']
del settings['name']
self.__watcher = self.__watcher_register(name=name_watcher)
return self.__watcher.when(**settings)
def watcher_action(self, action, **settings):
"""Watcher Action is used to running the action on the watcher.
run=Force to run all watchers
remove=Remvoe watchers
reset=Reset all triggered watchers
list=List all watchers
triggered=Check if there is any watcher triggered
**Example:**
|| Watcher Action | action=run
|| Watcher Action | action=remove
|| Watcher Action | action=remove | name=sample_watcher
|| Watcher Action | action=reset
|| Watcher Action | action=list
|| Watcher Action | action=triggered
"""
if 'run' in action:
return self.device_mobile.watchers.run()
elif 'remove' in action:
if 'name' in settings:
name = settings['name']
del settings['name']
return self.device_mobile.watchers.remove(name)
else:
return self.device_mobile.watchers.remove()
elif 'list' in action:
return self.device_mobile.watchers
elif 'reset' in action:
return self.device_mobile.watchers.reset()
elif 'triggered' in action:
return self.device_mobile.watchers.triggered
| 30.697778 | 98 | 0.588968 | 701 | 6,907 | 5.704708 | 0.178317 | 0.072018 | 0.059515 | 0.07877 | 0.463366 | 0.424606 | 0.380595 | 0.273568 | 0.208052 | 0.208052 | 0 | 0.003729 | 0.301144 | 6,907 | 224 | 99 | 30.834821 | 0.824736 | 0.417403 | 0 | 0.253333 | 0 | 0 | 0.026647 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.186667 | false | 0 | 0.066667 | 0 | 0.56 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
3066a68f25a0b8f12738cd73460642da98c0609a | 2,832 | py | Python | validproxy-master/proxyon.py | Zusyaku/Termux-And-Lali-Linux-V2 | b1a1b0841d22d4bf2cc7932b72716d55f070871e | [
"Apache-2.0"
] | 2 | 2021-11-17T03:35:03.000Z | 2021-12-08T06:00:31.000Z | validproxy-master/proxyon.py | Zusyaku/Termux-And-Lali-Linux-V2 | b1a1b0841d22d4bf2cc7932b72716d55f070871e | [
"Apache-2.0"
] | null | null | null | validproxy-master/proxyon.py | Zusyaku/Termux-And-Lali-Linux-V2 | b1a1b0841d22d4bf2cc7932b72716d55f070871e | [
"Apache-2.0"
] | 2 | 2021-11-05T18:07:48.000Z | 2022-02-24T21:25:07.000Z | import requests
import argparse
import sys
import os
import urllib3
import random
def user_agent():
useragent = [
"Mozilla/5.0 CK={ } (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:24.0) Gecko/20100101 Firefox/24.0",
"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9a1) Gecko/20070308 Minefield/3.0a1",
"Mozilla/5.0 (Linux; U; Android 2.2) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Mozilla/5.0 (Linux; Android 4.2.1; en-us; Nexus 5 Build/JOP40D) AppleWebKit/535.19 (KHTML, like Gecko; googleweblight) Chrome/38.0.1025.166 Mobile Safari/535.19",
"Mozilla/5.0 (Linux; Android 6.0.1; RedMi Note 5 Build/RB3N5C; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/68.0.3440.91 Mobile Safari/537.36",
]
return random.choice(useragent)
def console_clear():
try:
if sys.platform == "win32":
os.system("cls")
else:
os.system("clear")
return 0
except Exception:
print("could not clear console")
return 1
console_clear()
def file_writer(filename, data):
opnr = open(filename, "a+")
opnr.write(data)
opnr.close()
parse = argparse.ArgumentParser(description="Send request to a website using proxies to check if it's online How to: python proxyon.py proxies.txt https://www.google.com/")
parse.add_argument("proxy", type=str, help="Enter the list of proxies Example: proxies.txt")
parse.add_argument("web", type=str, help="Enter a website to send http requests using proxies.txt Example: https://www.google.com/")
args = parse.parse_args()
with open(args.proxy, "r") as opnr:
for x in opnr:
if x[0].strip().isdigit():
try:
req = requests.get(args.web, headers={"User-Agent": user_agent()}, timeout=10 ,proxies={"https": x.strip(), "http": x.strip()})
if req.status_code == 200:
file_writer("onlineProxies.txt", x+"\n")
print("\033[1;32;40m[+]Status code: {0} OK Proxy: {1}\033[1;0m".format(req.status_code, x))
else:
print("\033[1;36;40m[-]Status code: {0} Proxy: {1}\033[1;0m".format(req.status_code, x))
except Exception:
print("\033[1;36m[-]Error: {0}\033[1;0m".format(x))
pass
| 44.952381 | 173 | 0.610876 | 428 | 2,832 | 4.011682 | 0.397196 | 0.041934 | 0.047175 | 0.048923 | 0.218404 | 0.193943 | 0.151427 | 0.037274 | 0.037274 | 0.037274 | 0 | 0.121114 | 0.239054 | 2,832 | 62 | 174 | 45.677419 | 0.675638 | 0 | 0 | 0.117647 | 0 | 0.254902 | 0.52852 | 0.015884 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058824 | false | 0.019608 | 0.117647 | 0 | 0.235294 | 0.078431 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
30685e05831381d8f7de6843e88f41f59b9c2942 | 1,794 | py | Python | 2017.d/1-echiquier-grain-de-ble.d/src/grain-de-ble.py | homeostasie/petits-pedestres | bf20d94a5f2b12d2bb860ebb06a6b18641271020 | [
"MIT"
] | 1 | 2018-12-29T12:46:53.000Z | 2018-12-29T12:46:53.000Z | 2017.d/1-echiquier-grain-de-ble.d/src/grain-de-ble.py | homeostasie/petits-pedestres | bf20d94a5f2b12d2bb860ebb06a6b18641271020 | [
"MIT"
] | null | null | null | 2017.d/1-echiquier-grain-de-ble.d/src/grain-de-ble.py | homeostasie/petits-pedestres | bf20d94a5f2b12d2bb860ebb06a6b18641271020 | [
"MIT"
] | null | null | null | # On est sur la première case.
# On a un grain de blé sur la première case.
# On a un grain de blé sur l'échéquier.
# Nombre de grain de blé par case.
# case est un nombre entier.
case = 1
# Nombre de grain blé au total sur l'échiquier.
# blé est un nombre entier.
ble = 1
# Coefficient d'augmentation du nombre de grain de blé par case.
# coef est un nombre entier
coef = 2
# Il y a 8*8=64 cases sur un échiquier.
# Il nous reste 63 cases à remplir de grain de blé.
# Si on veut connaitre pour toutes les cases.
print("Sur la première case, il y a : ", case, "grains de blé, pour un total de : ", ble , "grains sur l'échiquier.")
# On parcours les 63 cases restantes
for i in range(2,65):
# Les cases suivantes contienent deux fois plus de grain de blé.
case = case * coef
# À chaque fois on rajoute les grains de la case dans le sac de blé.
ble = ble + case
# Si on veut connaitre pour toutes les cases.
print("Sur la case : ", i , " , il y a : ", case, "grains de blé, pour un total de : ", ble , "grains sur l'échiquier.")
print("Nombre de grain de blé sur l'échéquier", ble)
print("Écriture scientifique : ", ble*1.)
# Poids en g. On multiplie le nombre de grain par 0.05g
poids = ble * 0.05
# Poids en tonnes. On divise par 10^6 ou on multiplie par 10^-6
poids = ble * 0.05 * 10**(-6)
print("Poids du blé total en tonnes : ", poids, " tonnes")
# Le nombre d'année nécéssaire.
# Par an, la production mondiale de blé est : 600 millions.
prod_an = 600 * 10 **6
print("La production annuelle de blé est de ", prod_an, "tonnes de blé")
# Le nombre d'année nécéssaire =
# notre poids de blé sur l'échiquier diviser par la production annuelle.
nombre_an = poids / prod_an
print("Il faudra : ", nombre_an, "années à l'empereur afin de combler ça promesse.")
| 34.5 | 124 | 0.687848 | 325 | 1,794 | 3.781538 | 0.289231 | 0.056957 | 0.056957 | 0.04882 | 0.328723 | 0.283157 | 0.256306 | 0.215622 | 0.215622 | 0.215622 | 0 | 0.029957 | 0.218506 | 1,794 | 51 | 125 | 35.176471 | 0.846648 | 0.544593 | 0 | 0 | 0 | 0 | 0.480454 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.411765 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 |
3072b72e99113472148324ff54733901999cc011 | 3,080 | py | Python | SIFTS/SIFTS2Table.py | ajing/SIFTS.py | e6bfef6cd047fe90fba4cfa77b09c405ea3caa4b | [
"MIT"
] | null | null | null | SIFTS/SIFTS2Table.py | ajing/SIFTS.py | e6bfef6cd047fe90fba4cfa77b09c405ea3caa4b | [
"MIT"
] | 1 | 2015-01-16T03:45:07.000Z | 2015-01-16T03:47:36.000Z | SIFTS/SIFTS2Table.py | ajing/SIFTS.py | e6bfef6cd047fe90fba4cfa77b09c405ea3caa4b | [
"MIT"
] | null | null | null | '''
SIFTS data to a table, so I can load to MySQL database.
'''
from SIFTSXMLMapControl import processOneXML
from SIFTSXMLMapModel import XMLDIR
import os
## parameters
OUTFILE = "./SIFTS/sift_sql_table_test.txt"
#OUTDIR = "./SIFTS/sift_sql_test/"
OUTDIR = "./SIFTS/sift_sql/"
MOADPDB = "./Data/MOADPDB.txt"
MULTIPRO= True
from multiprocessing import Pool
import threading
# thread safe for writing file
mutex_writefile = threading.Lock()
def oneXML2Table(filename):
#print "filename", filename
content = []
protein = processOneXML(filename)
pdbid = protein.pdbid
for eachres in protein.getResidues():
uniprot = eachres.uniprot
if uniprot is None:
continue
if not all(map(str, [pdbid, eachres.resChain, eachres.resNum, eachres.resName, uniprot.accid, uniprot.resname, uniprot.resnum])):
continue
line = "\t".join(map(str, [pdbid, eachres.resChain, eachres.resNum, eachres.resName, uniprot.accid, uniprot.resname, uniprot.resnum]))
content.append(line)
if content:
fileobj = open(OUTDIR + pdbid, "w")
fileobj.write("\n".join(content) + "\n")
fileobj.close()
def FileFilter(inputlist):
pdblist = []
#for line in open(OUTFILE):
# content = line.strip().split()
for line in os.listdir(OUTDIR):
content = line.strip()
pdb = content
if len(pdb) != 4:
print "something wrong with: " + pdb
continue
pdblist.append(pdb)
#print "PDB before set unique: ", len(pdblist)
pdblist = list(set(pdblist))
#print "PDB in outdir:", len(pdblist)
moadlist= []
for line in open(MOADPDB):
content = line.strip()
moadlist.append(content.lower())
newlist = []
moadcopy= list(moadlist)
#print "moadlist len,",len(moadlist)
#print "inputlist len,", len(inputlist)
for eachfile in inputlist:
pdb = eachfile.split('/')[-1].split('.')[0]
if not pdb in pdblist:
newlist.append(eachfile)
#print "pdb:", pdb
## the following code is for BindingMOAD only
#if pdb in pdblist and pdb in moadlist:
# moadcopy.remove(pdb)
#if not pdb in pdblist and pdb in moadlist:
# moadcopy.remove(pdb)
# newlist.append(eachfile)
#print "final list:", len(newlist)
return newlist
def XML2Table(filedir):
allcontent = []
inputlist = []
#try:
# os.remove(OUTFILE)
#except:
# pass
for root, dirs, files in os.walk(filedir):
for afile in files:
afile = os.path.join(root, afile)
inputlist.append(afile)
#print "original:", len(inputlist)
inputlist = FileFilter(inputlist)
#print len(inputlist)
if MULTIPRO:
pool = Pool(processes = 5)
result = pool.map_async(oneXML2Table, inputlist)
resulttxt = result.wait()
else:
# for single process
for filename in inputlist:
oneXML2Table(filename)
if __name__ == "__main__":
XML2Table(XMLDIR)
| 30.49505 | 145 | 0.619805 | 355 | 3,080 | 5.332394 | 0.360563 | 0.015848 | 0.019017 | 0.019017 | 0.156366 | 0.144744 | 0.144744 | 0.144744 | 0.144744 | 0.144744 | 0 | 0.00397 | 0.263961 | 3,080 | 100 | 146 | 30.8 | 0.831054 | 0.219156 | 0 | 0.078125 | 0 | 0 | 0.045455 | 0.01342 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.078125 | null | null | 0.015625 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
307bbc5138ee2f7503540b7554891898a4c7e155 | 435 | py | Python | members/crm/migrations/0017_profile_qb_realm_id.py | ocwc/ocwc-members | 3ede8e0ff830e2aaff4ae09f9aaefd3eaa83146b | [
"MIT"
] | null | null | null | members/crm/migrations/0017_profile_qb_realm_id.py | ocwc/ocwc-members | 3ede8e0ff830e2aaff4ae09f9aaefd3eaa83146b | [
"MIT"
] | 7 | 2015-11-27T15:59:52.000Z | 2022-01-13T00:38:38.000Z | members/crm/migrations/0017_profile_qb_realm_id.py | ocwc/ocwc-members | 3ede8e0ff830e2aaff4ae09f9aaefd3eaa83146b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2019-01-20 12:04
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("crm", "0016_profile")]
operations = [
migrations.AddField(
model_name="profile",
name="qb_realm_id",
field=models.TextField(blank=True, default=b""),
)
]
| 22.894737 | 60 | 0.627586 | 51 | 435 | 5.176471 | 0.823529 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.067073 | 0.245977 | 435 | 18 | 61 | 24.166667 | 0.737805 | 0.158621 | 0 | 0 | 1 | 0 | 0.090909 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.181818 | 0 | 0.454545 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
061ce201158d27d7729ca028543c403f4ac3915a | 3,083 | py | Python | osdria/views/draftbar_element_view.py | soberleitner/osdriaApp | 8b4638e6edc2d265afd09098cfc4a8db34403cce | [
"MIT"
] | null | null | null | osdria/views/draftbar_element_view.py | soberleitner/osdriaApp | 8b4638e6edc2d265afd09098cfc4a8db34403cce | [
"MIT"
] | 20 | 2019-01-17T10:31:10.000Z | 2019-03-15T07:12:11.000Z | osdria/views/draftbar_element_view.py | soberleitner/osdriaApp | 8b4638e6edc2d265afd09098cfc4a8db34403cce | [
"MIT"
] | null | null | null | from math import ceil
from PySide2.QtCore import QRect, QSize, Qt, QAbstractTableModel, QMimeData, QByteArray
from PySide2.QtGui import QPainter, QStandardItemModel, QStandardItem, QPen
from PySide2.QtWidgets import *
from models.constants import PropType, MimeType
from views.draftbar_element_view_ui import Ui_DraftElement
LIST_HEIGHT = 100
ELEMENT_COLUMN_NUMBER = 3
ELEMENT_SIZE = 30
class DraftbarElementView(QWidget):
"""view of draft element category in sidebar"""
def __init__(self, parent, category_name, element_model, element_controller):
super(DraftbarElementView, self).__init__(parent)
self._category = category_name
self._model = element_model
self._ctrl = element_controller
self._ui = Ui_DraftElement()
self._ui.setupUi(self)
"""connect widgets to controller"""
self._ui.dropdown_button.hide()
#self._ui.element_label.clicked.connect(self.toggle_list)
#self._ui.dropdown_button.clicked.connect(self.toggle_list)
"""listen for model event signals"""
"""initialize view"""
self._ui.element_label.setText(str(self._category))
list_model = ElementListModel(self)
list_model.set_model(self._model)
self._ui.element_list.setModel(list_model)
self.toggle_list()
def toggle_list(self):
"""show or hide element list"""
# todo toggle list
class ElementListModel(QAbstractTableModel):
def __init__(self, parent=None):
super(ElementListModel, self).__init__(parent)
self._thumbnail_size = QSize(ELEMENT_SIZE, ELEMENT_SIZE)
self._model = None
def set_model(self, model):
self._model = model
def rowCount(self, parent=None, *args, **kwargs):
return ceil(len(self._model) / ELEMENT_COLUMN_NUMBER)
def columnCount(self, parent=None, *args, **kwargs):
return ELEMENT_COLUMN_NUMBER
def flags(self, index):
"""disable items in last row without any elements"""
element_index = index.row() * ELEMENT_COLUMN_NUMBER + index.column()
if element_index >= len(self._model):
return Qt.NoItemFlags
else:
return Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsDragEnabled
def data(self, index, role=None):
element_index = index.row() * ELEMENT_COLUMN_NUMBER + index.column()
# last row might contain empty cells
if element_index >= len(self._model):
return
pix_map = self._model[element_index].icon.pixmap(self._thumbnail_size)
pix_map.scaled(self._thumbnail_size, Qt.KeepAspectRatio)
if role == Qt.DecorationRole:
return pix_map
def mimeData(self, indexes):
mime_data = QMimeData()
element_index = indexes[0].row() * ELEMENT_COLUMN_NUMBER + indexes[0].column()
# send name of process core to QGraphicsScene
data_array = QByteArray(bytes(self._model[element_index].name, 'UTF-8'))
mime_data.setData(MimeType.PROCESS_CORE.value, data_array)
return mime_data
| 35.034091 | 87 | 0.687642 | 368 | 3,083 | 5.494565 | 0.342391 | 0.04451 | 0.05638 | 0.032641 | 0.159248 | 0.110781 | 0.081108 | 0.049456 | 0.049456 | 0 | 0 | 0.004975 | 0.217645 | 3,083 | 87 | 88 | 35.436782 | 0.833333 | 0.105417 | 0 | 0.072727 | 0 | 0 | 0.00189 | 0 | 0 | 0 | 0 | 0.011494 | 0 | 1 | 0.163636 | false | 0 | 0.109091 | 0.036364 | 0.436364 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0620e768a7fbf1dee0b88c0596c61846cf5102f5 | 1,333 | py | Python | neo4s/bin/neo4j.py | omerl13/neo4s | 5471818838099310a153d2c0844cc3c0f1943b79 | [
"Apache-2.0"
] | 2 | 2019-03-12T06:33:14.000Z | 2020-02-07T05:57:26.000Z | neo4s/bin/neo4j.py | omerl13/neo4s | 5471818838099310a153d2c0844cc3c0f1943b79 | [
"Apache-2.0"
] | 1 | 2019-02-22T20:49:11.000Z | 2020-06-21T08:13:29.000Z | neo4s/bin/neo4j.py | omerl13/neo4s | 5471818838099310a153d2c0844cc3c0f1943b79 | [
"Apache-2.0"
] | 4 | 2018-02-16T22:11:29.000Z | 2020-10-01T10:26:16.000Z | import json
import sys
import time
from splunklib.searchcommands import dispatch, GeneratingCommand, Configuration, Option, validators
from neo4j import GraphDatabase, basic_auth
from neo4j.graph import Node, Relationship
from fields_extractor import FieldsExtractor
@Configuration()
class Neo4jCommand(GeneratingCommand):
query = Option(require=True)
host = Option(require=True)
username = Option(require=False, default="")
password = Option(require=False, default="")
scheme = Option(require=False, default="bolt")
def __get_data(self, query, host, username, password, scheme):
url = scheme + "://" + host
# set up authentication parameters
auth = None
if username != "" and password != "":
auth = basic_auth(username, password)
driver = GraphDatabase.driver(
url,
auth=auth)
session = driver.session()
results = session.run(query, parameters={})
for record in results:
yield(record)
def generate(self):
results = self.__get_data(self.query, self.host,
self.username, self.password, self.scheme)
fields_extractor = FieldsExtractor()
return fields_extractor.extract(results)
dispatch(Neo4jCommand, module_name=__name__)
| 33.325 | 99 | 0.660915 | 137 | 1,333 | 6.313869 | 0.430657 | 0.075145 | 0.062428 | 0.086705 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00398 | 0.246062 | 1,333 | 39 | 100 | 34.179487 | 0.856716 | 0.024006 | 0 | 0 | 0 | 0 | 0.005389 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0.15625 | 0.21875 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0622be91e71ec680b6eb184671caee406ba87623 | 489 | py | Python | data_loader.py | imdeepmind/GenerateFace | a88268597a42cea87bd8179b3151a222261e64b8 | [
"MIT"
] | null | null | null | data_loader.py | imdeepmind/GenerateFace | a88268597a42cea87bd8179b3151a222261e64b8 | [
"MIT"
] | null | null | null | data_loader.py | imdeepmind/GenerateFace | a88268597a42cea87bd8179b3151a222261e64b8 | [
"MIT"
] | null | null | null | from torch.utils.data import Dataset
from PIL import Image
import torch
import os
class FaceDataset(Dataset):
def __init__(self, path, transform=None):
self.__images = os.listdir(path)
self.__path = path
self.__transform = transform
def __len__(self):
return len(self.__images)
def __getitem__(self, index):
image = Image.open(os.path.join(self.__path, self.__images[index]))
if self.__transform:
image = self.__transform(image)
return image | 24.45 | 71 | 0.711656 | 65 | 489 | 4.923077 | 0.4 | 0.075 | 0.1125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.188139 | 489 | 20 | 72 | 24.45 | 0.806045 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1875 | false | 0 | 0.25 | 0.0625 | 0.625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
06299431284ea5f52f9c6bead3c645272c604cd5 | 14,196 | py | Python | src/zope/dublincore/interfaces.py | zopefoundation/zope.dublincore | c6e3478bf14e29070e5b859216a0c215b874f893 | [
"ZPL-2.1"
] | 2 | 2015-02-03T21:22:01.000Z | 2021-02-01T06:13:46.000Z | src/zope/dublincore/interfaces.py | zopefoundation/zope.dublincore | c6e3478bf14e29070e5b859216a0c215b874f893 | [
"ZPL-2.1"
] | 8 | 2015-02-03T14:16:22.000Z | 2020-11-23T07:43:59.000Z | src/zope/dublincore/interfaces.py | zopefoundation/zope.dublincore | c6e3478bf14e29070e5b859216a0c215b874f893 | [
"ZPL-2.1"
] | 1 | 2015-04-03T08:41:24.000Z | 2015-04-03T08:41:24.000Z | ##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Dublin Core interfaces
"""
from zope.interface import Interface
from zope.schema import Text, TextLine, Datetime, Tuple
class IDublinCoreElementItem(Interface):
"""A qualified dublin core element"""
qualification = TextLine(
title=u"Qualification",
description=u"The element qualification"
)
value = Text(
title=u"Value",
description=u"The element value",
)
class IGeneralDublinCore(Interface):
"""Dublin-core data access interface
The Dublin Core, http://dublincore.org/, is a meta data standard
that specifies a set of standard data elements. It provides
flexibility of interpretation of these elements by providing for
element qualifiers that specialize the meaning of specific
elements. For example, a date element might have a qualifier, like
"creation" to indicate that the date is a creation date. In
addition, any element may be repeated. For some elements, like
subject, and contributor, this is obviously necessary, but for
other elements, like title and description, allowing repetitions
is not very useful and adds complexity.
This interface provides methods for retrieving data in full
generality, to be compliant with the Dublin Core standard.
Other interfaces will provide more convenient access methods
tailored to specific element usage patterns.
"""
def getQualifiedTitles():
"""Return a sequence of Title IDublinCoreElementItem."""
def getQualifiedCreators():
"""Return a sequence of Creator IDublinCoreElementItem."""
def getQualifiedSubjects():
"""Return a sequence of Subject IDublinCoreElementItem."""
def getQualifiedDescriptions():
"""Return a sequence of Description IDublinCoreElementItem."""
def getQualifiedPublishers():
"""Return a sequence of Publisher IDublinCoreElementItem."""
def getQualifiedContributors():
"""Return a sequence of Contributor IDublinCoreElementItem."""
def getQualifiedDates():
"""Return a sequence of Date IDublinCoreElementItem."""
def getQualifiedTypes():
"""Return a sequence of Type IDublinCoreElementItem."""
def getQualifiedFormats():
"""Return a sequence of Format IDublinCoreElementItem."""
def getQualifiedIdentifiers():
"""Return a sequence of Identifier IDublinCoreElementItem."""
def getQualifiedSources():
"""Return a sequence of Source IDublinCoreElementItem."""
def getQualifiedLanguages():
"""Return a sequence of Language IDublinCoreElementItem."""
def getQualifiedRelations():
"""Return a sequence of Relation IDublinCoreElementItem."""
def getQualifiedCoverages():
"""Return a sequence of Coverage IDublinCoreElementItem."""
def getQualifiedRights():
"""Return a sequence of Rights IDublinCoreElementItem."""
class IWritableGeneralDublinCore(Interface):
"""Provide write access to dublin core data
This interface augments `IStandardDublinCore` with methods for
writing elements.
"""
def setQualifiedTitles(qualified_titles):
"""Set the qualified Title elements.
The argument must be a sequence of `IDublinCoreElementItem`.
"""
def setQualifiedCreators(qualified_creators):
"""Set the qualified Creator elements.
The argument must be a sequence of Creator `IDublinCoreElementItem`.
"""
def setQualifiedSubjects(qualified_subjects):
"""Set the qualified Subjects elements.
The argument must be a sequence of Subject `IDublinCoreElementItem`.
"""
def setQualifiedDescriptions(qualified_descriptions):
"""Set the qualified Descriptions elements.
The argument must be a sequence of Description
`IDublinCoreElementItem`.
"""
def setQualifiedPublishers(qualified_publishers):
"""Set the qualified Publishers elements.
The argument must be a sequence of Publisher `IDublinCoreElementItem`.
"""
def setQualifiedContributors(qualified_contributors):
"""Set the qualified Contributors elements.
The argument must be a sequence of Contributor
`IDublinCoreElementItem`.
"""
def setQualifiedDates(qualified_dates):
"""Set the qualified Dates elements.
The argument must be a sequence of Date `IDublinCoreElementItem`.
"""
def setQualifiedTypes(qualified_types):
"""Set the qualified Types elements.
The argument must be a sequence of Type `IDublinCoreElementItem`.
"""
def setQualifiedFormats(qualified_formats):
"""Set the qualified Formats elements.
The argument must be a sequence of Format `IDublinCoreElementItem`.
"""
def setQualifiedIdentifiers(qualified_identifiers):
"""Set the qualified Identifiers elements.
The argument must be a sequence of Identifier `IDublinCoreElementItem`.
"""
def setQualifiedSources(qualified_sources):
"""Set the qualified Sources elements.
The argument must be a sequence of Source `IDublinCoreElementItem`.
"""
def setQualifiedLanguages(qualified_languages):
"""Set the qualified Languages elements.
The argument must be a sequence of Language `IDublinCoreElementItem`.
"""
def setQualifiedRelations(qualified_relations):
"""Set the qualified Relations elements.
The argument must be a sequence of Relation `IDublinCoreElementItem`.
"""
def setQualifiedCoverages(qualified_coverages):
"""Set the qualified Coverages elements.
The argument must be a sequence of Coverage `IDublinCoreElementItem`.
"""
def setQualifiedRights(qualified_rights):
"""Set the qualified Rights elements.
The argument must be a sequence of Rights `IDublinCoreElementItem`.
"""
class IDCDescriptiveProperties(Interface):
"""Basic descriptive meta-data properties"""
title = TextLine(
title=u'Title',
description=u"The first unqualified Dublin Core 'Title' element value."
)
description = Text(
title=u'Description',
description=(
u"The first unqualified Dublin Core 'Description' element value.")
)
class IDCTimes(Interface):
"""Time properties"""
created = Datetime(
title=u'Creation Date',
description=u"The date and time that an object is created."
u"\nThis is normally set automatically."
)
modified = Datetime(
title=u'Modification Date',
description=u"The date and time that the object was last modified in a"
u" meaningful way."
)
class IDCPublishing(Interface):
"""Publishing properties"""
effective = Datetime(
title=u'Effective Date',
description=u"The date and time that an object should be published."
)
expires = Datetime(
title=u'Expiration Date',
description=u"The date and time that the object should become"
u" unpublished."
)
class IDCExtended(Interface):
"""Extended properties
This is a mixed bag of properties we want but that we probably haven't
quite figured out yet.
"""
creators = Tuple(
title=u'Creators',
description=u"The unqualified Dublin Core 'Creator' element values",
value_type=TextLine(),
)
subjects = Tuple(
title=u'Subjects',
description=u"The unqualified Dublin Core 'Subject' element values",
value_type=TextLine(),
)
publisher = Text(
title=u'Publisher',
description=u"The first unqualified Dublin Core 'Publisher' element"
u" value.",
)
contributors = Tuple(
title=u'Contributors',
description=u"The unqualified Dublin Core 'Contributor' element"
u" values",
value_type=TextLine(),
)
class ICMFDublinCore(Interface):
"""This interface duplicates the CMF dublin core interface."""
def Title():
"""Return the resource title.
The first unqualified Dublin Core `Title` element value is
returned as a unicode string if an unqualified element is
defined, otherwise, an empty unicode string is returned.
"""
def Creator():
"""Return the resource creators.
Return the full name(s) of the author(s) of the content
object.
The unqualified Dublin Core `Creator` element values are
returned as a sequence of unicode strings.
"""
def Subject():
"""Return the resource subjects.
The unqualified Dublin Core `Subject` element values are
returned as a sequence of unicode strings.
"""
def Description():
"""Return the resource description
Return a natural language description of this object.
The first unqualified Dublin Core `Description` element value is
returned as a unicode string if an unqualified element is
defined, otherwise, an empty unicode string is returned.
"""
def Publisher():
"""Dublin Core element - resource publisher
Return full formal name of the entity or person responsible
for publishing the resource.
The first unqualified Dublin Core `Publisher` element value is
returned as a unicode string if an unqualified element is
defined, otherwise, an empty unicode string is returned.
"""
def Contributors():
"""Return the resource contributors
Return any additional collaborators.
The unqualified Dublin Core `Contributor` element values are
returned as a sequence of unicode strings.
"""
def Date():
"""Return the default date
The first unqualified Dublin Core `Date` element value is
returned as a unicode string if an unqualified element is
defined, otherwise, an empty unicode string is returned. The
string is formatted 'YYYY-MM-DD H24:MN:SS TZ'.
"""
def CreationDate():
"""Return the creation date.
The value of the first Dublin Core `Date` element qualified by
'creation' is returned as a unicode string if a qualified
element is defined, otherwise, an empty unicode string is
returned. The string is formatted 'YYYY-MM-DD H24:MN:SS TZ'.
"""
def EffectiveDate():
"""Return the effective date
The value of the first Dublin Core `Date` element qualified by
'effective' is returned as a unicode string if a qualified
element is defined, otherwise, an empty unicode string is
returned. The string is formatted 'YYYY-MM-DD H24:MN:SS TZ'.
"""
def ExpirationDate():
"""Date resource expires.
The value of the first Dublin Core `Date` element qualified by
'expiration' is returned as a unicode string if a qualified
element is defined, otherwise, an empty unicode string is
returned. The string is formatted 'YYYY-MM-DD H24:MN:SS TZ'.
"""
def ModificationDate():
"""Date resource last modified.
The value of the first Dublin Core `Date` element qualified by
'modification' is returned as a unicode string if a qualified
element is defined, otherwise, an empty unicode string is
returned. The string is formatted 'YYYY-MM-DD H24:MN:SS TZ'.
"""
def Type():
"""Return the resource type
Return a human-readable type name for the resource.
The first unqualified Dublin Core `Type` element value is
returned as a unicode string if an unqualified element is
defined, otherwise, an empty unicode string is returned.
"""
def Format():
"""Return the resource format.
Return the resource's MIME type (e.g., 'text/html',
'image/png', etc.).
The first unqualified Dublin Core `Format` element value is
returned as a unicode string if an unqualified element is
defined, otherwise, an empty unicode string is returned.
"""
def Identifier():
"""Return the URL of the resource.
This value is computed. It is included in the output of
qualifiedIdentifiers with the qualification 'url'.
"""
def Language():
"""Return the resource language.
Return the RFC language code (e.g., 'en-US', 'pt-BR')
for the resource.
The first unqualified Dublin Core `Language` element value is
returned as a unicode string if an unqualified element is
defined, otherwise, an empty unicode string is returned.
"""
def Rights():
"""Return the resource rights.
Return a string describing the intellectual property status,
if any, of the resource. for the resource.
The first unqualified Dublin Core `Rights` element value is
returned as a unicode string if an unqualified element is
defined, otherwise, an empty unicode string is returned.
"""
class IZopeDublinCore(
IGeneralDublinCore,
ICMFDublinCore,
IDCDescriptiveProperties,
IDCTimes,
IDCPublishing,
IDCExtended,
):
"""Zope Dublin Core properties"""
class IWriteZopeDublinCore(
IZopeDublinCore,
IWritableGeneralDublinCore,
):
"""Zope Dublin Core properties with generate update support"""
| 31.616927 | 79 | 0.661736 | 1,559 | 14,196 | 6.014112 | 0.191148 | 0.031677 | 0.038716 | 0.027197 | 0.468003 | 0.458191 | 0.332231 | 0.302368 | 0.212671 | 0.212671 | 0 | 0.001516 | 0.256692 | 14,196 | 448 | 80 | 31.6875 | 0.887036 | 0.571569 | 0 | 0.04 | 0 | 0 | 0.167566 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.368 | false | 0 | 0.016 | 0 | 0.56 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0631e112bec29f16c5eb17d9c69bab9e634fcc33 | 1,203 | py | Python | backend/django/campsignup/backend/migrations/0018_auto_20200812_1752.py | Ring-deutscher-PfadfinderInnenverbande/campsignup | 1e182322d31dc41083fa06138d709eddc4f448be | [
"MIT"
] | null | null | null | backend/django/campsignup/backend/migrations/0018_auto_20200812_1752.py | Ring-deutscher-PfadfinderInnenverbande/campsignup | 1e182322d31dc41083fa06138d709eddc4f448be | [
"MIT"
] | null | null | null | backend/django/campsignup/backend/migrations/0018_auto_20200812_1752.py | Ring-deutscher-PfadfinderInnenverbande/campsignup | 1e182322d31dc41083fa06138d709eddc4f448be | [
"MIT"
] | null | null | null | # Generated by Django 3.0.8 on 2020-08-12 17:52
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('backend', '0017_participant_familymember'),
]
operations = [
migrations.AddField(
model_name='group',
name='more_owners',
field=models.ManyToManyField(default=None, help_text="Ein weiteres Array Feld, in dem das FrontEnd E-Mails ablegen kann, falls für die jeweilige Mail ein Benutzer gefunden wird, wird er hier verlinkt und erhält dann automatisch 'Owner' Rechte auf die Gruppe", null=True, related_name='more_grps', to=settings.AUTH_USER_MODEL, verbose_name='Weitere Besitzer'),
),
migrations.AddField(
model_name='participant',
name='group',
field=models.ForeignKey(default=1, help_text='Die Gruppierung des Teilnehmers', on_delete=django.db.models.deletion.PROTECT, related_name='participants', to='backend.Group', verbose_name='Gruppierung'),
preserve_default=False,
),
]
| 42.964286 | 371 | 0.699917 | 146 | 1,203 | 5.636986 | 0.636986 | 0.029162 | 0.034022 | 0.053463 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.020812 | 0.201164 | 1,203 | 27 | 372 | 44.555556 | 0.835588 | 0.037406 | 0 | 0.190476 | 1 | 0.047619 | 0.314014 | 0.025087 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.142857 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
06379da7ba190aa5c83bba0f35c8dcaf9f310e0c | 2,835 | py | Python | awacs/crowd.py | MicrohexHQ/awacs | 91800082135167d1e2e24a7f14b5e5800ef17a83 | [
"BSD-2-Clause"
] | null | null | null | awacs/crowd.py | MicrohexHQ/awacs | 91800082135167d1e2e24a7f14b5e5800ef17a83 | [
"BSD-2-Clause"
] | null | null | null | awacs/crowd.py | MicrohexHQ/awacs | 91800082135167d1e2e24a7f14b5e5800ef17a83 | [
"BSD-2-Clause"
] | null | null | null | # Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from aws import Action as BaseAction
from aws import BaseARN
service_name = 'Amazon Mechanical Turk Crowd'
prefix = 'crowd'
class Action(BaseAction):
def __init__(self, action=None):
sup = super(Action, self)
sup.__init__(prefix, action)
class ARN(BaseARN):
def __init__(self, resource='', region='', account=''):
sup = super(ARN, self)
sup.__init__(service=prefix, resource=resource, region=region,
account=account)
ApproveAssignment = Action('ApproveAssignment')
ApproveRejectedAssignment = Action('ApproveRejectedAssignment')
AssignQualification = Action('AssignQualification')
BlockWorker = Action('BlockWorker')
ChangeHITTypeOfHIT = Action('ChangeHITTypeOfHIT')
CreateHIT = Action('CreateHIT')
CreateQualificationType = Action('CreateQualificationType')
DisableHIT = Action('DisableHIT')
DisposeHIT = Action('DisposeHIT')
DisposeQualificationType = Action('DisposeQualificationType')
ExtendHIT = Action('ExtendHIT')
ForceExpireHIT = Action('ForceExpireHIT')
GetAccountBalance = Action('GetAccountBalance')
GetAssignment = Action('GetAssignment')
GetAssignmentsForHIT = Action('GetAssignmentsForHIT')
GetBlockedWorkers = Action('GetBlockedWorkers')
GetBonusPayments = Action('GetBonusPayments')
GetFileUploadURL = Action('GetFileUploadURL')
GetHIT = Action('GetHIT')
GetHITsForQualificationType = Action('GetHITsForQualificationType')
GetQualificationRequests = Action('GetQualificationRequests')
GetQualificationScore = Action('GetQualificationScore')
GetQualificationType = Action('GetQualificationType')
GetQualificationsForQualificationType = \
Action('GetQualificationsForQualificationType')
GetRequesterStatistic = Action('GetRequesterStatistic')
GetRequesterWorkerStatistic = Action('GetRequesterWorkerStatistic')
GetReviewResultsForHIT = Action('GetReviewResultsForHIT')
GetReviewableHITs = Action('GetReviewableHITs')
GetTask = Action('GetTask')
GrantBonus = Action('GrantBonus')
GrantQualification = Action('GrantQualification')
NotifyWorkers = Action('NotifyWorkers')
PutTask = Action('PutTask')
RegisterHITType = Action('RegisterHITType')
RejectAssignment = Action('RejectAssignment')
RejectQualificationRequest = Action('RejectQualificationRequest')
RevokeQualification = Action('RevokeQualification')
SearchHITs = Action('SearchHITs')
SearchQualificationTypes = Action('SearchQualificationTypes')
SendTestEventNotification = Action('SendTestEventNotification')
SetHITAsReviewing = Action('SetHITAsReviewing')
SetHITTypeNotification = Action('SetHITTypeNotification')
UnblockWorker = Action('UnblockWorker')
UpdateQualificationScore = Action('UpdateQualificationScore')
UpdateQualificationType = Action('UpdateQualificationType')
| 39.375 | 70 | 0.8 | 210 | 2,835 | 10.719048 | 0.404762 | 0.007108 | 0.01155 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003123 | 0.096296 | 2,835 | 71 | 71 | 39.929577 | 0.875488 | 0.03739 | 0 | 0 | 0 | 0 | 0.305433 | 0.145007 | 0 | 0 | 0 | 0 | 0 | 1 | 0.033898 | false | 0 | 0.033898 | 0 | 0.101695 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
064201ab68fe8026b9f8054b2d6766bf8202e591 | 9,582 | py | Python | data_api/api.py | xpenalosa/Degree-Final-Project | 5d6c1a6aa034c41bf88ae0c8b16d7c0ddb0c3eec | [
"Unlicense"
] | null | null | null | data_api/api.py | xpenalosa/Degree-Final-Project | 5d6c1a6aa034c41bf88ae0c8b16d7c0ddb0c3eec | [
"Unlicense"
] | null | null | null | data_api/api.py | xpenalosa/Degree-Final-Project | 5d6c1a6aa034c41bf88ae0c8b16d7c0ddb0c3eec | [
"Unlicense"
] | null | null | null | #!/usr/bin/python3
import logging
from datetime import datetime, timedelta
import json
import re
from kazoo.client import KazooClient
from kazoo.recipe.lock import Lock, ReadLock
from kazoo.exceptions import LockTimeout, NoNodeError
from data_api.errors import DataApiErrors, DataApiException
class DataAPI():
"""Data API for the tournament managing website.
"""
MATCH_STATUS = {
"UNPLAYED" : "U",
"P1_WON" : "1",
"P2_WON" : "2" }
def __init__(self, client):
self.client = client
self.path = "/tornejos"
self.client.ensure_path(self.path)
def __get_tournament_path(self, tournament_id):
"""Get the path to the specified tournament's zNode."""
t_id = '0'*(10-len(tournament_id)) + tournament_id
tournament_path = self.path + "/t" + t_id
return tournament_path
def __create_tournament(self,
name, modality, password, player_amount):
"""Create a new tournament zNode.
Creates a new node in the zookeeper database that corresponds
to a tournament. The information stored per zNode is a string
containing a json structure with the following format:
Tournament
{
'name' : <string[32]>,
'modality' : <integer>,
'password' : <string[16]>,
'classification' : <string[128]>,
'deletion_date' : <string[10]>
(dd/mm/yyyy)
}
On success returns the created tournament path.
"""
# Calculate deletion date and format to dd/mm/yyyy
deletion_date = datetime.now() + timedelta(days=30)
formatted_deletion_date = deletion_date.strftime("%d/%m/%Y")
# Calculate amount of matches
matches = player_amount - 1
# Initialize the classification to Unplayed for every match
classification = self.MATCH_STATUS["UNPLAYED"] * matches
# Concatenate the data into a dictionary
tournament_dict = {
'name' : name,
'modality' : modality,
'password' : password,
'classification': classification,
'deletion_date' : formatted_deletion_date}
# Create a json dump with the object
tournament_data = str.encode(json.dumps(tournament_dict))
# Store the data in the transaction object
tournament_path = self.client.create(
self.path + "/t", value=tournament_data, sequence=True)
return tournament_path
def __create_player(self, transaction, tournament_path, player):
"""Create a new player zNode
Creates a new node in the zookeeper database that corresponds
to a player. The information stored per zNode is a string
containing a json structure with the following format:
Player
{
'name' : <string[16]>,
'points' : <integer>,
'disqualified' : <integer>,
'wins' : <integer>,
'losses' : <integer>
}
"""
player_dict = {
'name' : player,
'points' : 0,
'disqualified' : 0,
'wins' : 0,
'losses' : 0}
player_data = str.encode(json.dumps(player_dict))
transaction.create(tournament_path + "/p",
value=player_data, sequence=True)
def __delete_tournament(self, tournament_path):
"""Delete a tournament zNode and all its children
Tries to delete the zNode located in the specified path.
If the node does not exist, no exceptions are raised.
"""
try:
self.client.delete(tournament_path, recursive=True)
except NoNodeError:
pass
return 0
def set_data_path(self, path):
"""Set the path used to create the nodes."""
self.path = path
self.client.ensure_path(self.path)
def create_tournament(self, name=None, modality=0, password=None,
players=None):
"""Create a new tournament with the players.
The zNode will have multiple children, each corresponding to a
player participating in the tournament.
On success returns the created tournament path.
"""
# Add a tournament node
tournament_path = self.__create_tournament(
name, modality, password, len(players))
# Spawn a transaction to add all the players
transaction = self.client.transaction()
# Add all the players to transaction
for player in players:
self.__create_player(transaction, tournament_path,
player)
# Commit all the changes to ZooKeeper
transaction_results = transaction.commit()
# Verify all players were created correctly
for result in transaction_results:
if type(result) not in [str,bytes]:
# Error, delete tournament node
self.client.delete(tournament_path)
# Return error
raise DataApiException(
DataApiErrors.ZOOKEEPER_ERROR)
return tournament_path
def delete_tournament(self, tournament_id, password):
"""Delete an existing tournament
The tournament is matched against its password and on a
successful validation, the tournament and all its players are
deleted.
On success returns 0.
"""
tournament_path = self.__get_tournament_path(tournament_id)
# Lock the node to ensure nobody is modifying it
lock = self.client.Lock(tournament_path, self.client.client_id)
try:
lock.acquire(timeout=0.5)
# Get data from the node
data, stats = self.client.get(tournament_path)
# Decode it
data_dict = json.loads(data.decode())
# Verify password matches
if data_dict['password'] != password:
# Raise error
raise DataApiException(
DataApiErrors.PASSWORD_MISMATCH)
else:
# Delete node
self.__delete_tournament(tournament_path)
except LockTimeout as e:
raise DataApiException(DataApiErrors.LOCK_TIMEOUT)
finally:
# Release lock regardless of outcome
lock.release()
return 0
def get_tournament(self, tournament_id):
"""Get the information about the tournament
Retrieve the information about the current classification of
the players.
Returns a dictionary with the following format:
data = {
'name' : <string>,
'classification' : <string>,
'modality' : <integer>
'version' : <integer>
'players' : [
'name' : <string>,
'points' : <integer>,
'disqualified' : <integer>,
'wins' : <string>,
'losses' : <string>
]
}
"""
tournament_path = self.__get_tournament_path(tournament_id)
lock = self.client.ReadLock(tournament_path,
self.client.client_id)
data = dict()
try:
lock.acquire(timeout=0.5)
# Get tournament data
t_data, t_stats = self.client.get(tournament_path)
if not t_data:
raise DataApiException(DataApiErrors.EMPTY_NODE)
# Unpack json
t_json = json.loads(t_data.decode())
# Get select fields
data['name'] = t_json['name']
data['classification'] = t_json['classification']
data['modality'] = t_json['modality']
data['version'] = t_stats.version
# Get info about children
player_ids = self.client.get_children(
tournament_path)
players_data = []
player_id_regex = re.compile("^p\d{10}$")
# Iterate over children
for p_id in player_ids:
# Skip read/write locks, treated as children
if not player_id_regex.match(p_id):
continue
# Get data
p_data, _ = self.client.get(tournament_path
+ "/" + p_id)
# Unpack json
p_json = json.loads(p_data.decode())
# Append to data dictionary
players_data.append(p_json)
data['players'] = players_data
except LockTimeout as e:
# Could not acquire lock
raise DataApiException(DataApiErrors.LOCK_TIMEOUT)
finally:
# Release lock regardless of outcome
lock.release()
return data
def update_tournament(self, tournament_id, version, classification,
password):
"""Update the classification of a tournament
"""
# Verify classification values
for value in classification:
if value not in self.MATCH_STATUS.values():
raise DataApiException(
DataApiErrors.CLASSIFICATION_VALUE)
# Get tournament path
tournament_path = self.__get_tournament_path(tournament_id)
# Lock zNode
lock = self.client.Lock(tournament_path, self.client.client_id)
try:
lock.acquire(timeout=0.5)
# Get tournament data
t_data, t_stats = self.client.get(tournament_path)
if not t_data:
raise DataApiException(DataApiErrors.EMPTY_NODE)
# Verify node version
if t_stats.version != version:
raise DataApiException(
DataApiErrors.VERSION_MISMATCH)
# Subtract lock children from count
t_child_nolock = t_stats.children_count - 1
# Verify classification string length (Players - 1)
if len(classification) != t_child_nolock - 1:
raise DataApiException(
DataApiErrors.CLASSIFICATION_LENGTH)
# Verify password
t_json = json.loads(t_data.decode())
if t_json['password'] != password:
raise DataApiException(
DataApiErrors.PASSWORD_MISMATCH)
# Update classification in dict
t_json['classification'] = classification
# Dump to json format
t_modified_data = str.encode(json.dumps(t_json))
# Update zNode
self.client.set(tournament_path, value=t_modified_data)
# TODO: Update players
except LockTimeout as e:
raise DataApiException(DataApiErrors.LOCK_TIMEOUT)
finally:
lock.release()
return True
def get_tournament_list(self):
"""Get a list of the available tournaments
Returns a list containing the names of the tournaments, their
ids and the amount of players in each tournament.
data = [{
'name' : <string>,
'id' : <integer>,
'players' : <integer>,
}]
"""
data = []
tournament_ids = self.client.get_children(self.path)
for t_id in tournament_ids:
t_data, t_stats = self.client.get("/".join([
self.path, t_id]))
if not t_data:
continue
t_json = json.loads(t_data.decode())
t_info = dict()
t_info['name'] = t_json['name']
t_info['id'] = str(int(t_id.lstrip('/').lstrip('t')))
t_info['players'] = t_stats.children_count
data.append(t_info)
return data
| 28.099707 | 68 | 0.704863 | 1,236 | 9,582 | 5.309871 | 0.188511 | 0.068261 | 0.056986 | 0.014627 | 0.305501 | 0.22505 | 0.206918 | 0.170197 | 0.170197 | 0.146884 | 0 | 0.005325 | 0.196514 | 9,582 | 340 | 69 | 28.182353 | 0.847123 | 0.418389 | 0 | 0.317365 | 0 | 0 | 0.043478 | 0 | 0 | 0 | 0 | 0.002941 | 0 | 1 | 0.065868 | false | 0.065868 | 0.047904 | 0 | 0.173653 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0649416e59c002f159dae001d775831978782720 | 3,935 | py | Python | final_16042020/final_alessandro-reggiani.py | allereggiani22/programming_alessandro_reggiani | 4b8f9ee1805a29cc6cd3a8b8d93bda5a7efb97ea | [
"MIT"
] | 1 | 2020-01-13T13:46:48.000Z | 2020-01-13T13:46:48.000Z | final_16042020/final_alessandro-reggiani.py | allereggiani22/programming_alessandro_reggiani | 4b8f9ee1805a29cc6cd3a8b8d93bda5a7efb97ea | [
"MIT"
] | null | null | null | final_16042020/final_alessandro-reggiani.py | allereggiani22/programming_alessandro_reggiani | 4b8f9ee1805a29cc6cd3a8b8d93bda5a7efb97ea | [
"MIT"
] | null | null | null | """
Pseudocode:
-import s1, s2, BLOSUM62 from input_data.py
-def function that takes as input s1, s2, BLOSUM and gap value to return F (SW) and P matrices
-defining F and P and filling with 0s, no need to initialise first row/column
-iterate SW algorithm
-return F,P
-def function that take F, P, s1 and s2 and returns the best local alignment(s) and corresponding score(s)
-assign variables to store baest score and relative indexes
-initialise empty strings to fill during the traceback
-move backwards from the best score to the first 0 met, using P matrix to insert the correct residue/gap
-return the two aligned sequences and the score
-def printing function to print results in a nice way
Invoke the 3 functions
"""
#This program is thought for a Python 2.7 interpreter
from input_data import *
def score_traceback(s1,s2,matrix,gap):
"""This function takes as input two sequences, a dictionary and a linear gap penalty
and returns the two matrices of the scores (F) and of traceback (P) in a tuple"""
M=len(s1)+1 #M and i always refer to s1 (upper side of the matrix)
N=len(s2)+1 #N and j always refet to s2 (left side of the matrix)
F=[[0]*M for j in range(N)]
P=[[0]*M for j in range(N)] #these cmds generate 2 matrices F and P, 0 filled, of N rows(lists) with length M
for j in range(1,N):
for i in range(1,M): #from 1 because the first row and colums in a SW matrix are 0-filled
U=F[j-1][i]-gap
L=F[j][i-1]-gap
D=F[j-1][i-1]+matrix[s1[i-1]+s2[j-1]]
F[j][i]=max(U,L,D,0) #In these lines I define the 3 possible values and find the max among them and 0,
#since I do not want negative values in the F matrix
if F[j][i]==D:
P[j][i]="D"
elif F[j][i]==U:
P[j][i]="U"
elif F[j][i]==L:
P[j][i]="L" #Here I fill the traceback matrix with the correct direction to follow
return F, P #Remember: this generates a tuple
def local_align(F,P,s1,s2):
"""This function takes as input two matrices of scores and traceback and two sequences,
finds the best score in the F matrix and traces back from there to the first 0, using the
traceback matrix. Then returns a tuple with the two aligned sequences and their score"""
M=len(s1)+1
N=len(s2)+1
best=0
imax=0
jmax=0
for j in range(1,N):
for i in range(1,M):
if F[j][i]>best:
best=F[j][i]
imax=i
jmax=j #I want to find the best score in the F matrix and store it and its indexes
aln_seq1=""
aln_seq2="" #Initialisation of the aln sequences that will be filled
while F[jmax][imax]>0: #Traceback will end at the first 0 score that is met
if P[jmax][imax]=="L":
aln_seq1+=s1[imax-1]
aln_seq2+="-" #I add a gap in the sequence towards which I move
imax-=1 #decrease the used index
elif P[jmax][imax]=="U":
aln_seq2+=s2[jmax-1]
aln_seq1+="-" #I add a gap in the sequence towards I move
jmax-=1 #decrease the used index
elif P[jmax][imax]=="D":
aln_seq1+=s1[imax-1]
aln_seq2+=s2[jmax-1] #no gaps, there is a match
imax-=1
jmax-=1 #decrease both used indexes
return (aln_seq1[::-1],aln_seq2[::-1],best)
def print_results(tuple): #this is useful to present results in an organized way
"This function prints line by line the elements in a tuple with length 3, in which the last is the score"
for i in range(2):
print tuple[i]
print "Score: "+str(tuple[2])
matrices=score_traceback(seq1,seq2,BLOSUM52,2)
aln=local_align(matrices[0],matrices[1],seq1,seq2)
print_results(aln)
| 43.241758 | 117 | 0.613723 | 679 | 3,935 | 3.530191 | 0.265096 | 0.009178 | 0.008761 | 0.018356 | 0.177722 | 0.147685 | 0.125156 | 0.095953 | 0.050063 | 0.021694 | 0 | 0.032235 | 0.29047 | 3,935 | 90 | 118 | 43.722222 | 0.826289 | 0.242948 | 0 | 0.285714 | 0 | 0.017857 | 0.066856 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.017857 | null | null | 0.089286 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
064b361f15c8216c7e88077e123c596203e3f817 | 555 | py | Python | bookbar/auth_app/urls.py | trenev/bookbar | bccfdf52293f7cf105d6768bb2a1a643c9a58bb7 | [
"MIT"
] | null | null | null | bookbar/auth_app/urls.py | trenev/bookbar | bccfdf52293f7cf105d6768bb2a1a643c9a58bb7 | [
"MIT"
] | null | null | null | bookbar/auth_app/urls.py | trenev/bookbar | bccfdf52293f7cf105d6768bb2a1a643c9a58bb7 | [
"MIT"
] | null | null | null | from django.urls import path
from bookbar.auth_app.views import UserRegistrationView, UserLoginView, logout_user, ChangeUserPasswordView, \
ChangeUserEmailView
urlpatterns = (
path('register/', UserRegistrationView.as_view(), name='register user'),
path('login/', UserLoginView.as_view(), name='login user'),
path('logout/', logout_user, name='logout user'),
path('change-email/<int:pk>/', ChangeUserEmailView.as_view(), name='change email'),
path('change-password/', ChangeUserPasswordView.as_view(), name='change password'),
)
| 42.692308 | 110 | 0.735135 | 61 | 555 | 6.57377 | 0.42623 | 0.05985 | 0.099751 | 0.079801 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.113514 | 555 | 12 | 111 | 46.25 | 0.815041 | 0 | 0 | 0 | 0 | 0 | 0.218018 | 0.03964 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.2 | 0.2 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
064b5656f8557e5883dd8696145c8db2cd075ae8 | 4,604 | py | Python | Data Visualization/School/code4.py | ALDOR99/Python | a76f37bb3e573cd3fdcfc19f4f73494cafa9140e | [
"MIT"
] | 2 | 2021-05-27T19:13:02.000Z | 2021-06-02T13:26:35.000Z | Data Visualization/School/code4.py | ALDOR99/Python | a76f37bb3e573cd3fdcfc19f4f73494cafa9140e | [
"MIT"
] | null | null | null | Data Visualization/School/code4.py | ALDOR99/Python | a76f37bb3e573cd3fdcfc19f4f73494cafa9140e | [
"MIT"
] | 1 | 2021-06-07T18:17:35.000Z | 2021-06-07T18:17:35.000Z | # -*- coding: utf-8 -*-
"""
Created on Tue Jun 15 13:36:53 2021
@author: ali_d
"""
#code3
import numpy as np
import pandas as pd
# plotly
from plotly.offline import init_notebook_mode, iplot, plot
import plotly as py
init_notebook_mode(connected=True)
import plotly.graph_objs as go
from wordcloud import WordCloud
# matplotlib
import matplotlib.pyplot as plt
from plotly import tools
data = pd.read_csv("cwurData.csv")
#data1 = pd.read_csv("education_expenditure_supplementary_data.csv")
data2 = pd.read_csv("educational_attainment_supplementary_data.csv")
data3 = pd.read_csv("school_and_country_table.csv")
data4 = pd.read_csv("shanghaiData.csv")
data5 = pd.read_csv("timesData.csv")
#%% Box Plots
# data preparation
x2015 = data5[data5.year == 2015]
trace0 = go.Box(
y=x2015.total_score,
name = 'total score of universities in 2015',
marker = dict(
color = 'rgb(12, 12, 140)',
)
)
trace1 = go.Box(
y=x2015.research,
name = 'research of universities in 2015',
marker = dict(
color = 'rgb(12, 128, 128)',
)
)
data = [trace0, trace1]
plot(data)
#%% Scatter Matrix Plots
import plotly.figure_factory as ff
dataframe = data5[data5.year == 2015]
datta2015 = dataframe.loc[:,["research","international", "total_score"]]
datta2015["index"] = np.arange(1,len(datta2015)+1)
fig = ff.create_scatterplotmatrix(datta2015, diag='box', index='index',colormap='Portland',
colormap_type='cat',
height=700, width=700)
plot(fig)
#%%
import plotly.figure_factory as ff
dataframe = data5[data5.year == 2015]
datta2015 = dataframe.loc[:,["research","international", "total_score"]]
datta2015["index"] = np.arange(1,len(datta2015)+1)
fig = ff.create_scatterplotmatrix(datta2015, diag='box', index='index',#colormap='Portland',
colormap_type='cat',
height=700, width=700)
plot(fig)
#%% Inset Plots
# first line plot
trace1 = go.Scatter(
x=dataframe.world_rank,
y=dataframe.teaching,
name = "teaching",
marker = dict(color = 'rgba(16, 112, 2, 0.8)'),
)
# second line plot
trace2 = go.Scatter(
x=dataframe.world_rank,
y=dataframe.income,
xaxis='x2',
yaxis='y2',
name = "income",
marker = dict(color = 'rgba(160, 112, 20, 0.8)'),
)
data = [trace1, trace2]
layout = go.Layout(
xaxis2=dict(
domain=[0.6, 0.95],
anchor='y2',
),
yaxis2=dict(
domain=[0.6, 0.95],
anchor='x2',
),
title = 'Income and Teaching vs World Rank of Universities'
)
fig = go.Figure(data=data, layout=layout)
plot(fig)
#%%
# first line plot
trace1 = go.Scatter(
x=dataframe.world_rank,
y=dataframe.teaching,
name = "teaching",
marker = dict(color = 'rgba(199, 142, 134, 0.8)'),
)
# second line plot
trace2 = go.Scatter(
x=dataframe.world_rank,
y=dataframe.citations,
xaxis='x2',
yaxis='y2',
name = "income",
marker = dict(color = 'rgba(1, 122, 112, 0.8)'),
)
data = [trace1, trace2]
layout = go.Layout(
xaxis2=dict(
domain=[0.6, 0.95],
anchor='y2',
),
yaxis2=dict(
domain=[0.6, 0.95],
anchor='x2',
),
title = 'Income and Teaching vs World Rank of Universities'
)
fig = go.Figure(data=data, layout=layout)
plot(fig)
#%% 3D Scatter Plot with Colorscaling
# create trace 1 that is 3d scatter
trace1 = go.Scatter3d(
x=dataframe.world_rank,
y=dataframe.research,
z=dataframe.citations,
mode='markers',
marker=dict(
size=10,
color='rgb(255,0,0)', # set color to an array/list of desired values
)
)
data = [trace1]
layout = go.Layout(
margin=dict(
l=0,
r=0,
b=0,
t=0
)
)
fig = go.Figure(data=data, layout=layout)
plot(fig)
#%%
trace1 = go.Scatter3d(
x=dataframe.world_rank,
y=dataframe.total_score,
z=dataframe.research,
mode='markers',
marker=dict(
size=10,
color='rgb(255,0,0)', # set color to an array/list of desired values
)
)
data = [trace1]
layout = go.Layout(
margin=dict(
l=0,
r=0,
b=0,
t=0
)
)
fig = go.Figure(data=data, layout=layout)
plot(fig)
| 14.75641 | 92 | 0.572763 | 574 | 4,604 | 4.529617 | 0.290941 | 0.030769 | 0.020769 | 0.043846 | 0.668462 | 0.668462 | 0.668462 | 0.668462 | 0.668462 | 0.602308 | 0 | 0.07156 | 0.292789 | 4,604 | 311 | 93 | 14.803859 | 0.726966 | 0.105995 | 0 | 0.62069 | 0 | 0 | 0.145473 | 0.01806 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.068966 | 0 | 0.068966 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
064c2f71cbe050b6b37e718ae911c458233562a6 | 479 | py | Python | tests/settings.py | Francislley/CrudDajndo2017.2 | 355a442670d28cd91526fd0127b7a01ce733cf2b | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | tests/settings.py | Francislley/CrudDajndo2017.2 | 355a442670d28cd91526fd0127b7a01ce733cf2b | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | tests/settings.py | Francislley/CrudDajndo2017.2 | 355a442670d28cd91526fd0127b7a01ce733cf2b | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | SECRET_KEY = 'testing'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
INSTALLED_APPS = [
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.admin',
'django.contrib.staticfiles',
'django.contrib.sessions',
'cruditor',
]
# We really don't rely on the urlconf but we need to set a path anyway.
ROOT_URLCONF = 'django.contrib.staticfiles.urls'
STATIC_URL = '/static/'
| 20.826087 | 71 | 0.643006 | 55 | 479 | 5.527273 | 0.727273 | 0.256579 | 0.157895 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002625 | 0.204593 | 479 | 22 | 72 | 21.772727 | 0.795276 | 0.14405 | 0 | 0 | 0 | 0 | 0.539216 | 0.32598 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0657e352b713da508a397e3b0b9f84963e91c583 | 301 | py | Python | Old/src/com/up/oop3.py | exchris/Pythonlearn | 174f38a86cf1c85d6fc099005aab3568e7549cd0 | [
"MIT"
] | null | null | null | Old/src/com/up/oop3.py | exchris/Pythonlearn | 174f38a86cf1c85d6fc099005aab3568e7549cd0 | [
"MIT"
] | 1 | 2018-11-27T09:58:54.000Z | 2018-11-27T09:58:54.000Z | Old/src/com/up/oop3.py | exchris/pythonlearn | 174f38a86cf1c85d6fc099005aab3568e7549cd0 | [
"MIT"
] | null | null | null | # -*- coding:UTF-8 -*-
class Person(object):
def __init__(self,name,gender,birth,job):
self.name = name
self.gender = gender
self.birth = birth
self.job = job
xiaoming = Person('Xiao Ming', 'Male', '1990-1-1', job='Student')
print xiaoming.name
print xiaoming.job | 25.083333 | 65 | 0.621262 | 41 | 301 | 4.463415 | 0.512195 | 0.087432 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.030043 | 0.225914 | 301 | 12 | 66 | 25.083333 | 0.755365 | 0.066445 | 0 | 0 | 0 | 0 | 0.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.222222 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
065a36ba274adc42303ce240127779695fadeb05 | 899 | py | Python | web/mainApp/migrations/0001_initial.py | njw1204/golf-online-judge | adb22653c457e97d5a239aa562725144235a2ab8 | [
"MIT"
] | 3 | 2019-07-22T20:00:07.000Z | 2021-12-17T17:54:09.000Z | web/mainApp/migrations/0001_initial.py | njw1204/golf-online-judge | adb22653c457e97d5a239aa562725144235a2ab8 | [
"MIT"
] | null | null | null | web/mainApp/migrations/0001_initial.py | njw1204/golf-online-judge | adb22653c457e97d5a239aa562725144235a2ab8 | [
"MIT"
] | 2 | 2019-06-18T18:41:10.000Z | 2021-04-15T09:57:31.000Z | # Generated by Django 2.1.8 on 2019-04-28 10:47
from django.db import migrations, models
import mainApp.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ProblemPost',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('body', models.TextField(max_length=100000)),
('example_in', models.TextField(max_length=1000)),
('example_out', models.TextField(max_length=1000)),
('time_limit', models.PositiveSmallIntegerField(validators=[mainApp.models.isValidTimeLimit])),
('created_date', models.DateTimeField(auto_now_add=True)),
],
),
]
| 32.107143 | 114 | 0.606229 | 90 | 899 | 5.911111 | 0.644444 | 0.067669 | 0.101504 | 0.135338 | 0.105263 | 0 | 0 | 0 | 0 | 0 | 0 | 0.048558 | 0.266963 | 899 | 27 | 115 | 33.296296 | 0.758725 | 0.050056 | 0 | 0 | 1 | 0 | 0.078639 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.1 | 0 | 0.3 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
066a6ecaf81a95bd428e102c57d2580b02a5ba55 | 8,113 | py | Python | src/ostorlab/agent/message/proto/v3/asset/ip/v4/geolocation/geolocation_pb2.py | bbhunter/ostorlab | 968fe4e5b927c0cd159594c13b73f95b71150154 | [
"Apache-2.0"
] | 113 | 2022-02-21T09:30:14.000Z | 2022-03-31T21:54:26.000Z | src/ostorlab/agent/message/proto/v3/asset/ip/v4/geolocation/geolocation_pb2.py | bbhunter/ostorlab | 968fe4e5b927c0cd159594c13b73f95b71150154 | [
"Apache-2.0"
] | 2 | 2022-02-25T10:56:55.000Z | 2022-03-24T13:08:06.000Z | src/ostorlab/agent/message/proto/v3/asset/ip/v4/geolocation/geolocation_pb2.py | bbhunter/ostorlab | 968fe4e5b927c0cd159594c13b73f95b71150154 | [
"Apache-2.0"
] | 20 | 2022-02-28T14:25:04.000Z | 2022-03-30T23:01:11.000Z | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: v3/asset/ip/v4/geolocation/geolocation.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='v3/asset/ip/v4/geolocation/geolocation.proto',
package='v3.asset.ip.v4.geolocation',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n,v3/asset/ip/v4/geolocation/geolocation.proto\x12\x1av3.asset.ip.v4.geolocation\"\x94\x02\n\x07Message\x12\x0c\n\x04host\x18\x01 \x02(\t\x12\x0c\n\x04mask\x18\x02 \x01(\t\x12\x12\n\x07version\x18\x03 \x02(\x05:\x01\x34\x12\x11\n\tcontinent\x18\x05 \x01(\t\x12\x16\n\x0e\x63ontinent_code\x18\x06 \x01(\t\x12\x0f\n\x07\x63ountry\x18\x07 \x01(\t\x12\x14\n\x0c\x63ountry_code\x18\x08 \x01(\t\x12\x0e\n\x06region\x18\t \x01(\t\x12\x13\n\x0bregion_name\x18\n \x01(\t\x12\x0c\n\x04\x63ity\x18\x0b \x01(\t\x12\x0b\n\x03zip\x18\x0c \x01(\t\x12\x10\n\x08latitude\x18\r \x01(\x02\x12\x11\n\tlongitude\x18\x0e \x01(\x02\x12\x10\n\x08timezone\x18\x0f \x01(\t\x12\x10\n\x08\x64istrict\x18\x10 \x01(\t')
)
_MESSAGE = _descriptor.Descriptor(
name='Message',
full_name='v3.asset.ip.v4.geolocation.Message',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='host', full_name='v3.asset.ip.v4.geolocation.Message.host', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mask', full_name='v3.asset.ip.v4.geolocation.Message.mask', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version', full_name='v3.asset.ip.v4.geolocation.Message.version', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=True, default_value=4,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='continent', full_name='v3.asset.ip.v4.geolocation.Message.continent', index=3,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='continent_code', full_name='v3.asset.ip.v4.geolocation.Message.continent_code', index=4,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='country', full_name='v3.asset.ip.v4.geolocation.Message.country', index=5,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='country_code', full_name='v3.asset.ip.v4.geolocation.Message.country_code', index=6,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='region', full_name='v3.asset.ip.v4.geolocation.Message.region', index=7,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='region_name', full_name='v3.asset.ip.v4.geolocation.Message.region_name', index=8,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='city', full_name='v3.asset.ip.v4.geolocation.Message.city', index=9,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='zip', full_name='v3.asset.ip.v4.geolocation.Message.zip', index=10,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='latitude', full_name='v3.asset.ip.v4.geolocation.Message.latitude', index=11,
number=13, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='longitude', full_name='v3.asset.ip.v4.geolocation.Message.longitude', index=12,
number=14, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='timezone', full_name='v3.asset.ip.v4.geolocation.Message.timezone', index=13,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='district', full_name='v3.asset.ip.v4.geolocation.Message.district', index=14,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=77,
serialized_end=353,
)
DESCRIPTOR.message_types_by_name['Message'] = _MESSAGE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Message = _reflection.GeneratedProtocolMessageType('Message', (_message.Message,), dict(
DESCRIPTOR = _MESSAGE,
__module__ = 'v3.asset.ip.v4.geolocation.geolocation_pb2'
# @@protoc_insertion_point(class_scope:v3.asset.ip.v4.geolocation.Message)
))
_sym_db.RegisterMessage(Message)
# @@protoc_insertion_point(module_scope)
| 48.291667 | 710 | 0.72957 | 1,147 | 8,113 | 4.940715 | 0.139494 | 0.064937 | 0.036527 | 0.081172 | 0.68943 | 0.677784 | 0.662961 | 0.642845 | 0.577554 | 0.529204 | 0 | 0.048832 | 0.129175 | 8,113 | 167 | 711 | 48.580838 | 0.753291 | 0.031431 | 0 | 0.554054 | 1 | 0.006757 | 0.214722 | 0.184794 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.033784 | 0 | 0.033784 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
06700b590dcc47592c65834acabe47782bd4ec63 | 2,383 | py | Python | world/dominion/migrations/0031_auto_20180804_2115.py | stesla/arxcode | a0ebf7c4d310de8c1980a8ba2a48948a68bb5a0a | [
"MIT"
] | 5 | 2019-03-16T08:26:53.000Z | 2019-11-27T15:42:16.000Z | world/dominion/migrations/0031_auto_20180804_2115.py | stesla/arxcode | a0ebf7c4d310de8c1980a8ba2a48948a68bb5a0a | [
"MIT"
] | 7 | 2018-09-29T05:08:15.000Z | 2021-06-10T21:35:32.000Z | world/dominion/migrations/0031_auto_20180804_2115.py | stesla/arxcode | a0ebf7c4d310de8c1980a8ba2a48948a68bb5a0a | [
"MIT"
] | 7 | 2018-09-19T21:11:29.000Z | 2019-11-19T12:46:14.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-08-04 21:15
from __future__ import unicode_literals
from django.db import migrations, models
def convert_modifiers_to_influence(apps, schema_editor):
from django.db.models import F
Organization = apps.get_model('dominion', 'Organization')
Organization.objects.update(economic_influence=3000 * F('economic_modifier') * F('economic_modifier'),
social_influence=3000 * F('social_modifier') * F('social_modifier'),
military_influence=3000 * F('military_modifier') * F('military_modifier'))
class Migration(migrations.Migration):
dependencies = [
('dominion', '0030_auto_20180804_1530'),
]
operations = [
migrations.AddField(
model_name='member',
name='investment_this_week',
field=models.SmallIntegerField(default=0),
),
migrations.AddField(
model_name='member',
name='investment_total',
field=models.SmallIntegerField(default=0),
),
migrations.AddField(
model_name='organization',
name='economic_influence',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='organization',
name='military_influence',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='organization',
name='social_influence',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='organization',
name='base_support_value',
field=models.SmallIntegerField(default=5),
),
migrations.AlterField(
model_name='organization',
name='member_support_multiplier',
field=models.SmallIntegerField(default=5),
),
migrations.RunPython(convert_modifiers_to_influence),
migrations.RemoveField(
model_name='organization',
name='economic_modifier',
),
migrations.RemoveField(
model_name='organization',
name='military_modifier',
),
migrations.RemoveField(
model_name='organization',
name='social_modifier',
),
]
| 33.097222 | 106 | 0.596307 | 211 | 2,383 | 6.507109 | 0.336493 | 0.06555 | 0.12236 | 0.145666 | 0.571741 | 0.539694 | 0.388201 | 0.224326 | 0.212673 | 0.120903 | 0 | 0.031026 | 0.296685 | 2,383 | 71 | 107 | 33.56338 | 0.788186 | 0.028535 | 0 | 0.596774 | 1 | 0 | 0.189014 | 0.020761 | 0 | 0 | 0 | 0 | 0 | 1 | 0.016129 | false | 0 | 0.048387 | 0 | 0.112903 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0670c5a5fe9ae16928e3705c60cd1a58c8a4c92a | 1,490 | py | Python | board/migrations/0001_initial.py | ev350/webchess | 50a23a47f03de4cab33767ac8cc23f743d354fa5 | [
"MIT"
] | null | null | null | board/migrations/0001_initial.py | ev350/webchess | 50a23a47f03de4cab33767ac8cc23f743d354fa5 | [
"MIT"
] | 5 | 2018-05-12T17:29:11.000Z | 2018-06-23T14:22:24.000Z | board/migrations/0001_initial.py | ev350/webchess | 50a23a47f03de4cab33767ac8cc23f743d354fa5 | [
"MIT"
] | null | null | null | # Generated by Django 2.0.5 on 2018-05-12 23:42
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Board',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fen', models.CharField(default='rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1', max_length=256)),
('date_started', models.DateTimeField(auto_now_add=True)),
('date_ended', models.DateTimeField(null=True)),
('is_won', models.NullBooleanField()),
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Move',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('to_position', models.CharField(max_length=5)),
('created', models.DateTimeField(auto_now_add=True)),
('board', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='moves', to='board.Board')),
],
),
]
| 39.210526 | 130 | 0.614094 | 164 | 1,490 | 5.426829 | 0.45122 | 0.035955 | 0.047191 | 0.074157 | 0.370787 | 0.370787 | 0.296629 | 0.296629 | 0.296629 | 0.177528 | 0 | 0.022442 | 0.252349 | 1,490 | 37 | 131 | 40.27027 | 0.776481 | 0.030201 | 0 | 0.333333 | 1 | 0 | 0.106029 | 0.029799 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.1 | 0 | 0.233333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0671e85cf1d1be82c5d6d702e01485edb93f61d2 | 1,987 | py | Python | openstack_exporter/BaseCollector.py | sapcc/openstack-exporter | d9472fcf5790bd02664bf57a890ca4d3eab73960 | [
"Apache-2.0"
] | null | null | null | openstack_exporter/BaseCollector.py | sapcc/openstack-exporter | d9472fcf5790bd02664bf57a890ca4d3eab73960 | [
"Apache-2.0"
] | 3 | 2020-10-22T14:49:22.000Z | 2022-03-23T13:53:51.000Z | openstack_exporter/BaseCollector.py | sapcc/openstack-exporter | d9472fcf5790bd02664bf57a890ca4d3eab73960 | [
"Apache-2.0"
] | null | null | null | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from abc import ABC, abstractmethod
import logging
import sys
import openstack
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
LOG = logging.getLogger('openstack_exporter.exporter')
openstack.enable_logging(debug=False, http_debug=False, stream=sys.stdout)
class BaseCollector(ABC):
def __init__(self, openstack_config):
self.config = openstack_config
self.region = self.config['region']
self.client = self._connect()
def _connect(self):
"""Connect to the OpenStack Service."""
LOG.debug("Connecting to Openstack API {}".format(
self.config['auth_url']
))
conn = openstack.connect(
auth_url=self.config['auth_url'],
username=self.config['username'],
password=self.config['password'],
user_domain_name=self.config['user_domain_name'],
project_domain_name=self.config['project_domain_name'],
project_name=self.config['project_name'],
region_name=self.region,
app_name='Openstack prometheus exporter',
app_version='1.0'
)
LOG.debug("Connected to OpenStack {}".format(
self.config['auth_url']
))
return conn
@abstractmethod
def describe(self):
pass
@abstractmethod
def collect(self):
pass
| 31.539683 | 78 | 0.667338 | 239 | 1,987 | 5.426778 | 0.468619 | 0.077101 | 0.032382 | 0.039322 | 0.035466 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005952 | 0.239054 | 1,987 | 62 | 79 | 32.048387 | 0.851852 | 0.304479 | 0 | 0.210526 | 0 | 0 | 0.15176 | 0.019795 | 0 | 0 | 0 | 0 | 0 | 1 | 0.105263 | false | 0.078947 | 0.131579 | 0 | 0.289474 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0679e338380b9ea02aa378e254fa20ca3cb8a03c | 224 | py | Python | Pacote download/ex012.py | SheilaLCC/Python-no-celular-android | e49b509da37714f0d05c153e947af468cdbef9fe | [
"MIT"
] | 2 | 2022-01-12T15:20:30.000Z | 2022-03-13T23:07:58.000Z | Pacote download/ex012.py | SheilaLCC/Python-no-celular-android | e49b509da37714f0d05c153e947af468cdbef9fe | [
"MIT"
] | null | null | null | Pacote download/ex012.py | SheilaLCC/Python-no-celular-android | e49b509da37714f0d05c153e947af468cdbef9fe | [
"MIT"
] | null | null | null | #crie um programa que leia o preço de um
#produto e mostre seu novo preço com 5%
#de desconto
p=float(input('Digite o preço do produto:R$ '))
pn=p-(p*5/100)
print ('O novo preço com desconto de 5% é:R$ {:.2f}'.format (pn))
| 32 | 65 | 0.683036 | 46 | 224 | 3.326087 | 0.608696 | 0.078431 | 0.156863 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.037634 | 0.169643 | 224 | 6 | 66 | 37.333333 | 0.784946 | 0.392857 | 0 | 0 | 0 | 0 | 0.545455 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
067a5e96c9e5811e431a9520c09c70d269fa0a4d | 339 | py | Python | developmentHub/contacts/admin.py | MariiaBel/developmentHub | 74474e5543a37350049dea8a304d9ced13d3660d | [
"BSD-3-Clause"
] | null | null | null | developmentHub/contacts/admin.py | MariiaBel/developmentHub | 74474e5543a37350049dea8a304d9ced13d3660d | [
"BSD-3-Clause"
] | null | null | null | developmentHub/contacts/admin.py | MariiaBel/developmentHub | 74474e5543a37350049dea8a304d9ced13d3660d | [
"BSD-3-Clause"
] | null | null | null | from django.contrib import admin
from .models import Contact
class ContactAdmin(admin.ModelAdmin):
list_display = ("name", "email", "subject", "body", "is_answered")
# добавляем интерфейс для поиска по тексту постов
search_fields = ("email",)
empty_value_display = "-пусто-"
admin.site.register(Contact, ContactAdmin) | 30.818182 | 71 | 0.722714 | 40 | 339 | 6 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.159292 | 339 | 11 | 72 | 30.818182 | 0.842105 | 0.138643 | 0 | 0 | 0 | 0 | 0.147766 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.285714 | 0 | 0.857143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
067b57b5fcb9714446093a5cc9f8697160d3918e | 2,806 | py | Python | gitlab/gitlab_mgr/gitlabmgr.py | charlessoft/docker_script | 5c9af6fe7c87cf7dc272d3ce05b96e996fd5fc87 | [
"Apache-2.0"
] | null | null | null | gitlab/gitlab_mgr/gitlabmgr.py | charlessoft/docker_script | 5c9af6fe7c87cf7dc272d3ce05b96e996fd5fc87 | [
"Apache-2.0"
] | null | null | null | gitlab/gitlab_mgr/gitlabmgr.py | charlessoft/docker_script | 5c9af6fe7c87cf7dc272d3ce05b96e996fd5fc87 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import click
import gitlab
from colored import fg, bg, attr
from core import gitlabwrapper
from config import Config
from configparser import ConfigParser
from core.utils import read_config, print_green, print_red, create_gl
g_namespace_lst = {}
from prettytable import PrettyTable
# def read_config(ctx, param, value):
# cfg = ConfigParser()
# cfg.read('gitlab.cfg')
# dict = {}
# for item in cfg.sections():
# dict.setdefault(item, {})
# for key, value in cfg[item].items():
# # print(key,value)
# dict[item].setdefault(key, value)
# return dict
@click.group(invoke_without_command=False)
@click.option('-c', '--config', callback=read_config, type=click.File('r'),
help='default gitlab.cfg')
@click.option('-s', '--section', type=str, default="global", help='use default global section')
@click.pass_context
def cli(ctx, **kwargs):
ctx.obj = {}
ctx.obj.update(kwargs)
if ctx.invoked_subcommand is None and not ctx.obj.get('testing_mode'):
ctx.invoke(all)
return ctx
@cli.command()
@click.pass_context
def namespace_list(ctx, **kwargs):
# gl = create_gl(ctx, **{"section": section})
gl = create_gl(ctx, **kwargs)
x = PrettyTable(['id', 'namespace'])
x.align["namespace"] = "l" # Left align city names
if gl:
for item in gl.get_namespace_list():
x.add_row([item.id, item.name])
print_green(x)
else:
print_red("not subtaks")
@cli.command()
@click.pass_context
def project_list(ctx, **kwargs):
gl = create_gl(ctx, **kwargs)
x = PrettyTable(['id', 'name'])
x.align["name"] = "l" # Left align city names
for item in gl.get_project_list():
x.add_row([item.id, item.name])
print(x)
@cli.command()
@click.option('--namespace', type=str)
@click.option('--name', type=str)
@click.pass_context
def project_create(ctx, namespace, name, **kwargs):
try:
gl = create_gl(ctx, **kwargs)
print('创建工程')
x = PrettyTable(['id', 'name_with_namespace', 'ssh_url_to_repo', 'http_url_to_repo'])
res = gl.create_project(namespace, name)
x.align['name_with_namespace'] = 'l'
x.add_row([res.id, res.name_with_namespace, res.ssh_url_to_repo, res.http_url_to_repo])
print(x)
except Exception as e:
print("创建失败, %s" % (e))
@cli.command()
@click.option('--id', type=str)
@click.pass_context
def project_remove(ctx, id, **kwargs):
try:
print('删除工程')
gl = create_gl(ctx, **kwargs)
res = gl.del_project_by_id(id)
print("删除成功")
except Exception as e:
print("删除失败, %s" % (e))
# mytestaaa11112
if __name__ == '__main__':
cli()
# 创建命名空间
# 获取命名空间
# 创建工程
# 删除工程
# 创建用户
| 25.743119 | 95 | 0.624733 | 383 | 2,806 | 4.407311 | 0.29765 | 0.037322 | 0.047393 | 0.05628 | 0.264218 | 0.171209 | 0.136848 | 0.074645 | 0.035545 | 0 | 0 | 0.002742 | 0.220242 | 2,806 | 108 | 96 | 25.981481 | 0.768739 | 0.163222 | 0 | 0.304348 | 0 | 0 | 0.11006 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.072464 | false | 0.072464 | 0.115942 | 0 | 0.202899 | 0.144928 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
068385efca2856ded22bcb8cce1f8b92076783ce | 6,879 | py | Python | app/main/forms.py | zhuojianyun/ubuntukgt | d04d253b485c03e21fb5626205f292df112dccbb | [
"MIT"
] | null | null | null | app/main/forms.py | zhuojianyun/ubuntukgt | d04d253b485c03e21fb5626205f292df112dccbb | [
"MIT"
] | null | null | null | app/main/forms.py | zhuojianyun/ubuntukgt | d04d253b485c03e21fb5626205f292df112dccbb | [
"MIT"
] | null | null | null | from flask_wtf import FlaskForm
from wtforms import StringField, TextAreaField, BooleanField, SelectField,\
SubmitField,IntegerField,DateField
from wtforms.validators import DataRequired, Length, Email, Regexp
from wtforms import ValidationError
from flask_pagedown.fields import PageDownField
from ..models import Role, User
class NameForm(FlaskForm):
name = StringField('What is your name?', validators=[DataRequired()])
submit = SubmitField('Submit')
class EditProfileForm(FlaskForm):
name = StringField('Real name', validators=[Length(0, 64)])
location = StringField('Location', validators=[Length(0, 64)])
about_me = TextAreaField('About me')
submit = SubmitField('Submit')
class EditProfileAdminForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Length(1, 64),
Email()])
username = StringField('Username', validators=[
DataRequired(), Length(1, 64),
Regexp('^[A-Za-z][A-Za-z0-9_.]*$', 0,
'Usernames must have only letters, numbers, dots or '
'underscores')])
confirmed = BooleanField('Confirmed')
role = SelectField('Role', coerce=int)
name = StringField('Real name', validators=[Length(0, 64)])
location = StringField('Location', validators=[Length(0, 64)])
about_me = TextAreaField('About me')
submit = SubmitField('Submit')
def __init__(self, user, *args, **kwargs):
super(EditProfileAdminForm, self).__init__(*args, **kwargs)
self.role.choices = [(role.id, role.name)
for role in Role.query.order_by(Role.name).all()]
self.user = user
def validate_email(self, field):
if field.data != self.user.email and \
User.query.filter_by(email=field.data).first():
raise ValidationError('Email already registered.')
def validate_username(self, field):
if field.data != self.user.username and \
User.query.filter_by(username=field.data).first():
raise ValidationError('Username already in use.')
class PostForm(FlaskForm):
#body = PageDownField("What's on your mind?", validators=[DataRequired()])
name = StringField('客户姓名', validators=[Length(0, 64)])
phnumber = StringField('手机号码', validators=[Length(0, 64)])
homeaddress = StringField('身份证号码', validators=[Length(0, 64)])
#career = StringField('职业', validators=[Length(0, 64)])
#company = StringField('公司名称', validators=[Length(0, 64)])
jobaddress = StringField('常住地址', validators=[Length(0, 64)])
old = IntegerField('出生年月')
#families = IntegerField('家庭成员')
#insurance = IntegerField('保单数')
source = StringField('客户来源', validators=[Length(0, 64)])
#married = BooleanField('是否已婚')
#bobies = BooleanField('是否有小孩')
#liking = StringField('兴趣爱好', validators=[Length(0, 64)])
connects = StringField('跟进建议')
#income = IntegerField('年收入')
submit = SubmitField('Submit')
class PostForm1(FlaskForm):
#body = PageDownField("What's on your mind?", validators=[DataRequired()])
name = StringField('客户姓名', validators=[Length(0, 64)])
phnumber = StringField('手机号码', validators=[Length(0, 64)])
homeaddress = StringField('身份证号码', validators=[Length(0, 64)])
career = StringField('职业', validators=[Length(0, 64)])
company = StringField('公司名称', validators=[Length(0, 64)])
#insurancese = IntegerField('保单数')
jobaddress = StringField('常住地址', validators=[Length(0, 64)])
grade = StringField('客户等级', validators=[Length(0, 64)])
gradeintro = StringField('客户经济状况描述', validators=[Length(0, 64)])
old = IntegerField('出生年月')
source = StringField('客户来源', validators=[Length(0, 64)])
married = BooleanField('是否已婚')
bobies = BooleanField('是否有小孩')
liking = StringField('兴趣爱好', validators=[Length(0, 64)])
connects = StringField('跟进建议')
income = IntegerField('年收入')
submit = SubmitField('Submit')
class CommentForm(FlaskForm):
#body = PageDownField("What's on your mind?", validators=[DataRequired()])
meetway = StringField('沟通方式', validators=[DataRequired()])
meetcase = StringField('沟通借口')
meetdate = DateField('拜访日期')
meetadress = StringField('拜访地址')
#meettimese = IntegerField('拜访次数')
beetway = StringField('沟通情况', validators=[DataRequired()])
newsabout = PageDownField("客户最新情况")
thisthink = StringField('沟通保险观念')
fation = StringField('客户反馈')
planbook = BooleanField('是否做计划书')
badthing = PageDownField("拜访不足总结")
donething = PageDownField("做了哪些准备")
nexttime = IntegerField('几天后再联系')
todo = PageDownField("这个客户以后怎么跟")
submit = SubmitField('Submit')
class CommentForm1(FlaskForm):
#body = PageDownField("What's on your mind?", validators=[DataRequired()])
meetway = StringField('沟通方式', validators=[DataRequired()])
meetcase = StringField('沟通借口', validators=[DataRequired()])
meetdate = DateField('拜访日期')
#meetadress = StringField('拜访地址', validators=[DataRequired()])
#meettimese = IntegerField('拜访次数')
meetway = StringField('沟通情况', validators=[DataRequired()])
#newsabout = PageDownField("客户最新情况", validators=[DataRequired()])
#thisthink = StringField('沟通保险观念', validators=[DataRequired()])
#fation = StringField('客户反馈', validators=[DataRequired()])
#planbook = BooleanField('是否做计划书')
#badthing = PageDownField("拜访不足总结", validators=[DataRequired()])
#donething = PageDownField("做了哪些准备", validators=[DataRequired()])
#nexttime = IntegerField('几天后再联系')
#todo = PageDownField("这个客户以后怎么跟", validators=[DataRequired()])
submit = SubmitField('Submit')
class InsuranceForm1(FlaskForm):
insurname = StringField('险种', validators=[DataRequired()])
toubaoriqi = IntegerField('投保日期')
baofei = IntegerField('保费')
baoer = IntegerField('保额')
submit = SubmitField('Submit')
class InsuranceForm(FlaskForm):
insurname = StringField('险种', validators=[DataRequired()])
toubaoriqi = IntegerField('投保日期')
baofei = IntegerField('保费',validators=[DataRequired()])
baoer = IntegerField('保额',validators=[DataRequired()])
baodanhao = StringField('投保单号')
shengxiaoriqi = IntegerField('生效日期')
baodanzhuangtai = StringField('保单状态')
jiaofeifangshi = StringField('缴费方式')
jiaofeiqi = IntegerField('缴费期')
baoxianqijian = IntegerField('保险期间')
shixiaoriqi = IntegerField('失效日期')
banknumber = StringField('银行账户')
bankname = StringField('缴费银行')
bxzeren = TextAreaField('保险责任')
tbname = StringField('投保人')
bbname = StringField('被保人')
syname = StringField('收益人')
jjname = StringField('紧急联系人')
submit = SubmitField('Submit')
| 42.99375 | 79 | 0.653002 | 649 | 6,879 | 6.893683 | 0.29584 | 0.113098 | 0.083594 | 0.093429 | 0.574207 | 0.536209 | 0.446357 | 0.372374 | 0.372374 | 0.372374 | 0 | 0.014151 | 0.198721 | 6,879 | 159 | 80 | 43.264151 | 0.797533 | 0.175462 | 0 | 0.321739 | 0 | 0 | 0.097103 | 0.004372 | 0 | 0 | 0 | 0.006289 | 0 | 1 | 0.026087 | false | 0 | 0.052174 | 0 | 0.86087 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
0688abce3e934249ce6c97bba6efb253d1740e95 | 2,010 | py | Python | misc/main.py | vishalbelsare/interpolation.py | 116d144700a1c78b5ad86eee097d064610be8325 | [
"BSD-2-Clause"
] | 110 | 2015-03-16T05:40:06.000Z | 2022-03-17T22:32:51.000Z | misc/main.py | vishalbelsare/interpolation.py | 116d144700a1c78b5ad86eee097d064610be8325 | [
"BSD-2-Clause"
] | 70 | 2016-01-18T11:51:30.000Z | 2021-09-27T13:21:41.000Z | misc/main.py | vishalbelsare/interpolation.py | 116d144700a1c78b5ad86eee097d064610be8325 | [
"BSD-2-Clause"
] | 32 | 2016-06-15T16:27:21.000Z | 2022-03-24T16:33:16.000Z | if True:
import numpy as np
d = 3
K = 50
N = 10 ** 6
a = np.zeros(3)
b = np.ones(3)
orders = np.array([K for i in range(d)])
coeffs = np.random.random([k + 2 for k in orders])
points = np.random.random((N, d)) # each line is a vector
points_c = points.T.copy() # each column is a vector
vals = np.zeros(N)
print(points.max().max())
print(points.min().min())
import time
from alternative_implementations import *
from eval_cubic_splines_cython import vec_eval_cubic_spline_3 as rr
vec_eval_cubic_spline_3(a, b, orders, coeffs, points, vals) # warmup
vec_eval_cubic_spline_3_inlined(a, b, orders, coeffs, points, vals) # warmup
vec_eval_cubic_spline_3_inlined_columns(
a, b, orders, coeffs, points_c, vals
) # warmup
vec_eval_cubic_spline_3_kernel(a, b, orders, coeffs, points, vals) # warmup
vec_eval_cubic_spline_3_inlined_lesswork(orders, coeffs, points, vals, Ad, dAd)
# rr(a,b,orders,coeffs,points,vals,Ad,dAd)
rr(a, b, orders, coeffs, points, vals)
t1 = time.time()
vec_eval_cubic_spline_3(a, b, orders, coeffs, points, vals)
t2 = time.time()
vec_eval_cubic_spline_3_inlined(a, b, orders, coeffs, points, vals)
t3 = time.time()
vec_eval_cubic_spline_3_inlined_columns(a, b, orders, coeffs, points_c, vals)
t4 = time.time()
vec_eval_cubic_spline_3_kernel(a, b, orders, coeffs, points, vals)
t5 = time.time()
vec_eval_cubic_spline_3_inlined_lesswork(orders, coeffs, points, vals, Ad, dAd)
t6 = time.time()
# rr(a,b,orders,coeffs,points,vals,Ad,dAd)
rr(a, b, orders, coeffs, points, vals)
t7 = time.time()
print("one function call per point: {}".format(t2 - t1))
print("inlined (points in rows): {}".format(t3 - t2))
print("inlined (points in columns): {}".format(t4 - t3))
print("kernel: {}".format(t5 - t4))
print("less work: {}".format(t6 - t5))
print("cython: {}".format(t7 - t6))
print(vals[:10, 0])
| 36.545455 | 83 | 0.651244 | 318 | 2,010 | 3.918239 | 0.232704 | 0.134831 | 0.202247 | 0.134831 | 0.577047 | 0.561798 | 0.561798 | 0.544141 | 0.52488 | 0.52488 | 0 | 0.026465 | 0.210448 | 2,010 | 54 | 84 | 37.222222 | 0.758664 | 0.078607 | 0 | 0.222222 | 0 | 0 | 0.066739 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.088889 | 0 | 0.088889 | 0.2 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
068db5f85677758fda292c1c8ed782424fbdc715 | 494 | py | Python | ews/migrations/0008_auto_20200731_0820.py | mrustl/plattform | 7c9fce2a697b7c9d3de0bd08382571ed89469281 | [
"MIT"
] | null | null | null | ews/migrations/0008_auto_20200731_0820.py | mrustl/plattform | 7c9fce2a697b7c9d3de0bd08382571ed89469281 | [
"MIT"
] | 3 | 2021-06-07T10:30:55.000Z | 2021-06-07T14:00:32.000Z | ews/migrations/0008_auto_20200731_0820.py | mrustl/plattform | 7c9fce2a697b7c9d3de0bd08382571ed89469281 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.8 on 2020-07-31 06:20
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('ews', '0007_auto_20200729_1039'),
]
operations = [
migrations.AlterField(
model_name='featuredata',
name='station',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='station', to='ews.Station'),
),
]
| 24.7 | 123 | 0.645749 | 57 | 494 | 5.491228 | 0.666667 | 0.076677 | 0.089457 | 0.140575 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.081794 | 0.232794 | 494 | 19 | 124 | 26 | 0.744063 | 0.091093 | 0 | 0 | 1 | 0 | 0.138702 | 0.051454 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.153846 | 0 | 0.384615 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0697e6d51a78ae7b1c4868e96ff0b634b1314e46 | 3,759 | py | Python | touchdown/tests/test_aws_elb_load_balancer.py | yaybu/touchdown | 70ecda5191ce2d095bc074dcb23bfa1584464814 | [
"Apache-2.0"
] | 14 | 2015-01-05T18:18:04.000Z | 2022-02-07T19:35:12.000Z | touchdown/tests/test_aws_elb_load_balancer.py | yaybu/touchdown | 70ecda5191ce2d095bc074dcb23bfa1584464814 | [
"Apache-2.0"
] | 106 | 2015-01-06T00:17:13.000Z | 2019-09-07T00:35:32.000Z | touchdown/tests/test_aws_elb_load_balancer.py | yaybu/touchdown | 70ecda5191ce2d095bc074dcb23bfa1584464814 | [
"Apache-2.0"
] | 5 | 2015-01-30T10:18:24.000Z | 2022-02-07T19:35:13.000Z | # Copyright 2015 Isotoma Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from touchdown.tests.aws import StubberTestCase
from touchdown.tests.stubs.aws import LoadBalancerStubber, Stubber
class TestCreateLoadBalancer(StubberTestCase):
def test_create_load_balancer(self):
goal = self.create_goal("apply")
load_balancer = self.fixtures.enter_context(
LoadBalancerStubber(
goal.get_service(
self.aws.add_load_balancer(name="test-load_balancer", listeners=[]),
"apply",
)
)
)
load_balancer.add_describe_load_balancers_empty()
load_balancer.add_create_load_balancer()
load_balancer.add_describe_load_balancers_one()
load_balancer.add_describe_load_balancer_attributes()
load_balancer.add_describe_load_balancers_one()
load_balancer.add_describe_load_balancer_attributes()
load_balancer.add_describe_load_balancers_one()
load_balancer.add_describe_load_balancer_attributes()
goal.execute()
def test_create_load_balancer_idempotent(self):
goal = self.create_goal("apply")
load_balancer = self.fixtures.enter_context(
LoadBalancerStubber(
goal.get_service(
self.aws.add_load_balancer(name="test-load_balancer", listeners=[]),
"apply",
)
)
)
load_balancer.add_describe_load_balancers_one()
load_balancer.add_describe_load_balancer_attributes()
self.assertEqual(len(list(goal.plan())), 0)
self.assertEqual(len(goal.get_changes(load_balancer.resource)), 0)
class TestDestroyLoadBalancer(StubberTestCase):
def test_destroy_load_balancer(self):
goal = self.create_goal("destroy")
load_balancer = self.fixtures.enter_context(
LoadBalancerStubber(
goal.get_service(
self.aws.add_load_balancer(name="test-load_balancer"), "destroy"
)
)
)
load_balancer.add_describe_load_balancers_one()
load_balancer.add_describe_load_balancer_attributes()
load_balancer.add_delete_load_balancer()
network_interface_waiter = self.fixtures.enter_context(
Stubber(load_balancer.service.ec2_client)
)
network_interface_waiter.add_response(
"describe_network_interfaces",
service_response={},
expected_params={
"Filters": [
{"Name": "description", "Values": ["ELB test-load_balancer"]}
]
},
)
goal.execute()
def test_destroy_load_balancer_idempotent(self):
goal = self.create_goal("destroy")
load_balancer = self.fixtures.enter_context(
LoadBalancerStubber(
goal.get_service(
self.aws.add_load_balancer(name="test-load_balancer"), "destroy"
)
)
)
load_balancer.add_describe_load_balancers_empty()
self.assertEqual(len(list(goal.plan())), 0)
self.assertEqual(len(goal.get_changes(load_balancer.resource)), 0)
| 33.864865 | 88 | 0.651237 | 405 | 3,759 | 5.730864 | 0.293827 | 0.211978 | 0.090478 | 0.118914 | 0.605343 | 0.582077 | 0.582077 | 0.567428 | 0.54847 | 0.54847 | 0 | 0.004708 | 0.265496 | 3,759 | 110 | 89 | 34.172727 | 0.835929 | 0.146316 | 0 | 0.540541 | 0 | 0 | 0.061659 | 0.008451 | 0 | 0 | 0 | 0 | 0.054054 | 1 | 0.054054 | false | 0 | 0.027027 | 0 | 0.108108 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
06a87a14f775f772ea8a415d8274858d0fe71c88 | 982 | py | Python | intro.py | tydyus/HardZone_enjam2017_exterieur | 6966389bbfd2176a3466c0e7191d88e87e3457af | [
"MIT"
] | null | null | null | intro.py | tydyus/HardZone_enjam2017_exterieur | 6966389bbfd2176a3466c0e7191d88e87e3457af | [
"MIT"
] | null | null | null | intro.py | tydyus/HardZone_enjam2017_exterieur | 6966389bbfd2176a3466c0e7191d88e87e3457af | [
"MIT"
] | null | null | null | import pygame
from pygame.locals import *
import time
from assets import *
from terrain import *
from player import *
from pnj import *
from carte import *
class intro():
time = 0
end = 670
def intro():
intro.time += 1
# arrivé dans le monde
s1 = 35
#vue du hl
s2 = 30
#hl donne popo
s3 = 30
#bois popo
s4 = 25
#warning
s5 = 40
#hard zone
s6 = 30
#
end = s1+s2+s3+s4+s5+s6
if intro.time < s1:
assets.img["scene1"].render()
elif intro.time < s1+s2:
assets.img["scene2"].render()
elif intro.time < s1+s2+s3:
assets.img["scene3"].render()
elif intro.time < s1+s2+s3+s4:
assets.img["scene4"].render()
elif intro.time < s1+s2+s3+s4+s5:
assets.img["scene5"].render()
elif intro.time < s1+s2+s3+s4+s5+s6:
assets.img["scene6"].render()
| 23.380952 | 44 | 0.510183 | 131 | 982 | 3.824427 | 0.381679 | 0.143713 | 0.131737 | 0.189621 | 0.293413 | 0.293413 | 0.219561 | 0.169661 | 0.115768 | 0 | 0 | 0.090469 | 0.369654 | 982 | 41 | 45 | 23.95122 | 0.718901 | 0.068228 | 0 | 0 | 0 | 0 | 0.039691 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.03125 | false | 0 | 0.25 | 0 | 0.375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
06acef4dad7a22308dcda78076df6778baa86028 | 387 | py | Python | app1/migrations/0004_auto_20200131_1129.py | awzdevelopers/FlightStrips_django | f7d89be6804c583bc9d2a86822b3538d8a340e92 | [
"MIT"
] | null | null | null | app1/migrations/0004_auto_20200131_1129.py | awzdevelopers/FlightStrips_django | f7d89be6804c583bc9d2a86822b3538d8a340e92 | [
"MIT"
] | null | null | null | app1/migrations/0004_auto_20200131_1129.py | awzdevelopers/FlightStrips_django | f7d89be6804c583bc9d2a86822b3538d8a340e92 | [
"MIT"
] | 1 | 2019-07-17T11:53:37.000Z | 2019-07-17T11:53:37.000Z | # Generated by Django 2.0.3 on 2020-01-31 07:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app1', '0003_auto_20200129_0337'),
]
operations = [
migrations.AlterField(
model_name='flight',
name='stripImage',
field=models.ImageField(upload_to=''),
),
]
| 20.368421 | 50 | 0.594315 | 41 | 387 | 5.487805 | 0.853659 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.115942 | 0.286822 | 387 | 18 | 51 | 21.5 | 0.699275 | 0.116279 | 0 | 0 | 1 | 0 | 0.126471 | 0.067647 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
06ad8916ff463408fd38fdd3b0644bb6c948b9d9 | 1,008 | py | Python | _includes/code/shopping/cart.py | rajat19/grokking-system-design | 4c5834ff9f3f7a9586bd732ab6fe0bc879243b3b | [
"MIT"
] | 4 | 2020-12-11T17:28:45.000Z | 2021-12-23T12:01:19.000Z | _includes/code/shopping/cart.py | rajat19/grokking-system-design | 4c5834ff9f3f7a9586bd732ab6fe0bc879243b3b | [
"MIT"
] | 1 | 2020-11-27T11:30:42.000Z | 2020-11-27T11:30:42.000Z | _includes/code/shopping/cart.py | rajat19/system-design | 4c5834ff9f3f7a9586bd732ab6fe0bc879243b3b | [
"MIT"
] | null | null | null | class Item:
def __init__(self, id, quantity, price):
self.__product_id = id
self.__quantity = quantity
self.__price = price
def update_quantity(self, quantity):
None
class ShoppingCart:
def __init__(self):
self.__items = []
def add_item(self, item):
None
def remove_item(self, item):
None
def update_item_quantity(self, item, quantity):
None
def get_items(self):
return self.__items
def checkout(self):
None
class OrderLog:
def __init__(self, order_number, status=OrderStatus.PENDING):
self.__order_number = order_number
self.__creation_date = datetime.date.today()
self.__status = status
class Order:
def __init__(self, order_number, status=OrderStatus.PENDING):
self.__order_number = 0
self.__status = status
self.__order_date = datetime.date.today()
self.__order_log = []
def send_for_shipment(self):
None
def make_payment(self, payment):
None
def add_order_log(self, order_log):
None | 19.384615 | 63 | 0.695437 | 132 | 1,008 | 4.848485 | 0.265152 | 0.098438 | 0.06875 | 0.05 | 0.328125 | 0.190625 | 0.190625 | 0.190625 | 0.190625 | 0.190625 | 0 | 0.001255 | 0.209325 | 1,008 | 52 | 64 | 19.384615 | 0.801757 | 0 | 0 | 0.324324 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.351351 | false | 0 | 0 | 0.027027 | 0.486486 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.