content stringlengths 7 1.05M | fixed_cases stringlengths 1 1.28M |
|---|---|
COMMON = 'http://www.webex.com/schemas/2002/06/common'
SERVICE = 'http://www.webex.com/schemas/2002/06/service'
EP = '%s/ep' % SERVICE
EVENT = '%s/event' % SERVICE
ATTENDEE = '%s/attendee' % SERVICE
HISTORY = '%s/history' % SERVICE
SITE = '%s/site' % SERVICE
PREFIXES = {
'com': COMMON,
'serv': SERVICE,
'ep': EP,
'event': EVENT,
'att': ATTENDEE,
'history': HISTORY,
'site': SITE
}
| common = 'http://www.webex.com/schemas/2002/06/common'
service = 'http://www.webex.com/schemas/2002/06/service'
ep = '%s/ep' % SERVICE
event = '%s/event' % SERVICE
attendee = '%s/attendee' % SERVICE
history = '%s/history' % SERVICE
site = '%s/site' % SERVICE
prefixes = {'com': COMMON, 'serv': SERVICE, 'ep': EP, 'event': EVENT, 'att': ATTENDEE, 'history': HISTORY, 'site': SITE} |
blosum ={
"SW": -3,
"GG": 6,
"EM": -2,
"AN": -2,
"AY": -2,
"WQ": -2,
"VN": -3,
"FK": -3,
"GE": -2,
"ED": 2,
"WP": -4,
"IT": -1,
"FD": -3,
"KV": -2,
"CY": -2,
"GD": -1,
"TN": 0,
"WW": 11,
"SS": 4,
"KC": -3,
"EF": -3,
"NL": -3,
"AK": -1,
"QP": -1,
"FG": -3,
"DS": 0,
"CV": -1,
"VT": 0,
"HP": -2,
"PV": -2,
"IQ": -3,
"FV": -1,
"WT": -2,
"HF": -1,
"PD": -1,
"QR": 1,
"DQ": 0,
"KQ": 1,
"DF": -3,
"VW": -3,
"TC": -1,
"AF": -2,
"TH": -2,
"AQ": -1,
"QT": -1,
"VF": -1,
"FC": -2,
"CR": -3,
"VP": -2,
"HT": -2,
"EL": -3,
"FR": -3,
"IG": -4,
"CQ": -3,
"YV": -1,
"TA": 0,
"TV": 0,
"QV": -2,
"SK": 0,
"KK": 5,
"EN": 0,
"NT": 0,
"AH": -2,
"AC": 0,
"VS": -2,
"QH": 0,
"HS": -1,
"QY": -1,
"PN": -2,
"IY": -1,
"PG": -2,
"FN": -3,
"HN": 1,
"KH": -1,
"NW": -4,
"SY": -2,
"WN": -4,
"DY": -3,
"EQ": 2,
"KY": -2,
"SG": 0,
"YS": -2,
"GR": -2,
"AL": -1,
"AG": 0,
"TK": -1,
"TP": -1,
"MV": 1,
"QL": -2,
"ES": 0,
"HW": -2,
"ID": -3,
"KF": -3,
"NA": -2,
"TI": -1,
"QN": 0,
"KW": -3,
"SC": -1,
"YY": 7,
"GV": -3,
"LV": 1,
"AR": -1,
"MR": -1,
"YL": -1,
"DC": -3,
"PP": 7,
"DH": -1,
"QQ": 5,
"IV": 3,
"PF": -4,
"IA": -1,
"FF": 6,
"KT": -1,
"LT": -1,
"SQ": 0,
"WF": 1,
"DA": -2,
"EY": -2,
"KA": -1,
"QS": 0,
"AD": -2,
"LR": -2,
"TS": 1,
"AV": 0,
"MN": -2,
"QD": 0,
"EP": -1,
"VV": 4,
"DN": 1,
"IS": -2,
"PM": -2,
"HD": -1,
"IL": 2,
"KN": 0,
"LP": -3,
"YI": -1,
"NI": -3,
"TQ": -1,
"QF": -3,
"SM": -1,
"ER": 0,
"QW": -2,
"GN": 0,
"LY": -1,
"LN": -3,
"AS": 1,
"DT": -1,
"ST": 1,
"PS": -1,
"VR": -3,
"DK": -1,
"PH": -2,
"HC": -3,
"QI": -3,
"HH": 8,
"II": 4,
"LW": -2,
"LL": 4,
"DR": -2,
"SI": -2,
"DI": -3,
"EA": -1,
"KI": -3,
"QK": 1,
"TD": -1,
"AW": -3,
"YR": -2,
"MF": 0,
"SP": -1,
"HQ": 0,
"YN": -2,
"IP": -3,
"EC": -4,
"HG": -2,
"PE": -1,
"QM": 0,
"HL": -3,
"LS": -2,
"LH": -3,
"NQ": 0,
"TY": -2,
"KG": -2,
"SE": 0,
"YE": -2,
"WR": -3,
"VM": 1,
"NR": 0,
"GF": -3,
"FY": 3,
"LQ": -2,
"MY": -1,
"AP": -1,
"SN": 1,
"CL": -1,
"LF": 0,
"DW": -4,
"SL": -2,
"PR": -2,
"PK": -1,
"YG": -3,
"CK": -3,
"HK": -1,
"QA": -1,
"IF": 0,
"KD": -1,
"NC": -3,
"LD": -4,
"YK": -2,
"SA": 1,
"WV": -3,
"EI": -3,
"VI": 3,
"QC": -3,
"TG": -2,
"TL": -1,
"LM": 2,
"AT": 0,
"CH": -3,
"PY": -3,
"SH": -1,
"HY": 2,
"EK": 1,
"CG": -3,
"IC": -1,
"QE": 2,
"KR": 2,
"TE": -1,
"LK": -2,
"MW": -1,
"NY": -2,
"NH": 1,
"VE": -2,
"QG": -2,
"YD": -3,
"FQ": -3,
"GY": -3,
"LI": 2,
"MQ": 0,
"RA": -1,
"CD": -3,
"SV": -2,
"DD": 6,
"SD": 0,
"PC": -3,
"CC": 9,
"WK": -3,
"IN": -3,
"KL": -2,
"NK": 0,
"LG": -4,
"MS": -1,
"RC": -3,
"RD": -2,
"VA": 0,
"WI": -3,
"TT": 5,
"FM": 0,
"LE": -3,
"MM": 5,
"RE": 0,
"WH": -2,
"SR": -1,
"EW": -3,
"PQ": -1,
"HA": -2,
"YA": -2,
"EH": 0,
"RF": -3,
"IK": -3,
"NE": 0,
"TM": -1,
"TR": -1,
"MT": -1,
"GS": 0,
"LC": -1,
"RG": -2,
"YM": -1,
"NF": -3,
"YQ": -1,
"NP": -2,
"RH": 0,
"WM": -1,
"CN": -3,
"VL": 1,
"FI": 0,
"GQ": -2,
"LA": -1,
"MI": 1,
"RI": -3,
"WL": -2,
"DG": -1,
"DL": -4,
"IR": -3,
"CM": -1,
"HE": 0,
"YW": 2,
"GP": -2,
"WC": -2,
"MP": -2,
"NS": 1,
"GW": -2,
"MK": -1,
"RK": 2,
"DE": 2,
"KE": 1,
"RL": -2,
"AI": -1,
"VY": -1,
"WA": -3,
"YF": 3,
"TW": -2,
"VH": -3,
"FE": -3,
"ME": -2,
"RM": -1,
"ET": -1,
"HR": 0,
"PI": -3,
"FT": -2,
"CI": -1,
"HI": -3,
"GT": -2,
"IH": -3,
"RN": 0,
"CW": -2,
"WG": -2,
"NM": -2,
"ML": 2,
"GK": -2,
"MG": -3,
"KS": 0,
"EV": -2,
"NN": 6,
"VK": -2,
"RP": -2,
"AM": -1,
"WE": -3,
"FW": 1,
"CF": -2,
"VD": -3,
"FA": -2,
"GI": -4,
"MA": -1,
"RQ": 1,
"CT": -1,
"WD": -4,
"HV": -3,
"SF": -2,
"PT": -1,
"FP": -4,
"CE": -4,
"HM": -2,
"IE": -3,
"GH": -2,
"RR": 5,
"KP": -1,
"CS": -1,
"DV": -3,
"MH": -2,
"MC": -1,
"RS": -1,
"DM": -3,
"EE": 5,
"KM": -1,
"VG": -3,
"RT": -1,
"AA": 4,
"VQ": -2,
"WY": 2,
"FS": -2,
"GM": -3,
"CP": -3,
"EG": -2,
"IW": -3,
"PA": -1,
"FL": 0,
"CA": 0,
"GL": -4,
"RV": -3,
"TF": -2,
"YP": -3,
"MD": -3,
"GC": -3,
"RW": -3,
"ND": 1,
"NV": -3,
"VC": -1,
"AE": -1,
"YH": 2,
"DP": -1,
"GA": 0,
"RY": -2,
"PW": -4,
"YC": -2,
"PL": -3,
"FH": -1,
"IM": 1,
"YT": -2,
"NG": 0,
"WS": -3
}
adj_blosum = {
b"AA": 0,
b"AC": 4,
b"AD": 4,
b"AE": 4,
b"AF": 4,
b"AG": 4,
b"AH": 4,
b"AI": 4,
b"AK": 4,
b"AL": 4,
b"AM": 4,
b"AN": 4,
b"AP": 4,
b"AQ": 4,
b"AR": 4,
b"AS": 3,
b"AT": 4,
b"AV": 4,
b"AW": 4,
b"AY": 4,
b"CA": 4,
b"CC": 0,
b"CD": 4,
b"CE": 4,
b"CF": 4,
b"CG": 4,
b"CH": 4,
b"CI": 4,
b"CK": 4,
b"CL": 4,
b"CM": 4,
b"CN": 4,
b"CP": 4,
b"CQ": 4,
b"CR": 4,
b"CS": 4,
b"CT": 4,
b"CV": 4,
b"CW": 4,
b"CY": 4,
b"DA": 4,
b"DC": 4,
b"DD": 0,
b"DE": 2,
b"DF": 4,
b"DG": 4,
b"DH": 4,
b"DI": 4,
b"DK": 4,
b"DL": 4,
b"DM": 4,
b"DN": 3,
b"DP": 4,
b"DQ": 4,
b"DR": 4,
b"DS": 4,
b"DT": 4,
b"DV": 4,
b"DW": 4,
b"DY": 4,
b"EA": 4,
b"EC": 4,
b"ED": 2,
b"EE": 0,
b"EF": 4,
b"EG": 4,
b"EH": 4,
b"EI": 4,
b"EK": 3,
b"EL": 4,
b"EM": 4,
b"EN": 4,
b"EP": 4,
b"EQ": 2,
b"ER": 4,
b"ES": 4,
b"ET": 4,
b"EV": 4,
b"EW": 4,
b"EY": 4,
b"FA": 4,
b"FC": 4,
b"FD": 4,
b"FE": 4,
b"FF": 0,
b"FG": 4,
b"FH": 4,
b"FI": 4,
b"FK": 4,
b"FL": 4,
b"FM": 4,
b"FN": 4,
b"FP": 4,
b"FQ": 4,
b"FR": 4,
b"FS": 4,
b"FT": 4,
b"FV": 4,
b"FW": 3,
b"FY": 1,
b"GA": 4,
b"GC": 4,
b"GD": 4,
b"GE": 4,
b"GF": 4,
b"GG": 0,
b"GH": 4,
b"GI": 4,
b"GK": 4,
b"GL": 4,
b"GM": 4,
b"GN": 4,
b"GP": 4,
b"GQ": 4,
b"GR": 4,
b"GS": 4,
b"GT": 4,
b"GV": 4,
b"GW": 4,
b"GY": 4,
b"HA": 4,
b"HC": 4,
b"HD": 4,
b"HE": 4,
b"HF": 4,
b"HG": 4,
b"HH": 0,
b"HI": 4,
b"HK": 4,
b"HL": 4,
b"HM": 4,
b"HN": 3,
b"HP": 4,
b"HQ": 4,
b"HR": 4,
b"HS": 4,
b"HT": 4,
b"HV": 4,
b"HW": 4,
b"HY": 2,
b"IA": 4,
b"IC": 4,
b"ID": 4,
b"IE": 4,
b"IF": 4,
b"IG": 4,
b"IH": 4,
b"II": 0,
b"IK": 4,
b"IL": 2,
b"IM": 3,
b"IN": 4,
b"IP": 4,
b"IQ": 4,
b"IR": 4,
b"IS": 4,
b"IT": 4,
b"IV": 1,
b"IW": 4,
b"IY": 4,
b"KA": 4,
b"KC": 4,
b"KD": 4,
b"KE": 3,
b"KF": 4,
b"KG": 4,
b"KH": 4,
b"KI": 4,
b"KK": 0,
b"KL": 4,
b"KM": 4,
b"KN": 4,
b"KP": 4,
b"KQ": 3,
b"KR": 2,
b"KS": 4,
b"KT": 4,
b"KV": 4,
b"KW": 4,
b"KY": 4,
b"LA": 4,
b"LC": 4,
b"LD": 4,
b"LE": 4,
b"LF": 4,
b"LG": 4,
b"LH": 4,
b"LI": 2,
b"LK": 4,
b"LL": 0,
b"LM": 2,
b"LN": 4,
b"LP": 4,
b"LQ": 4,
b"LR": 4,
b"LS": 4,
b"LT": 4,
b"LV": 3,
b"LW": 4,
b"LY": 4,
b"MA": 4,
b"MC": 4,
b"MD": 4,
b"ME": 4,
b"MF": 4,
b"MG": 4,
b"MH": 4,
b"MI": 3,
b"MK": 4,
b"ML": 2,
b"MM": 0,
b"MN": 4,
b"MP": 4,
b"MQ": 4,
b"MR": 4,
b"MS": 4,
b"MT": 4,
b"MV": 3,
b"MW": 4,
b"MY": 4,
b"NA": 4,
b"NC": 4,
b"ND": 3,
b"NE": 4,
b"NF": 4,
b"NG": 4,
b"NH": 3,
b"NI": 4,
b"NK": 4,
b"NL": 4,
b"NM": 4,
b"NN": 0,
b"NP": 4,
b"NQ": 4,
b"NR": 4,
b"NS": 3,
b"NT": 4,
b"NV": 4,
b"NW": 4,
b"NY": 4,
b"PA": 4,
b"PC": 4,
b"PD": 4,
b"PE": 4,
b"PF": 4,
b"PG": 4,
b"PH": 4,
b"PI": 4,
b"PK": 4,
b"PL": 4,
b"PM": 4,
b"PN": 4,
b"PP": 0,
b"PQ": 4,
b"PR": 4,
b"PS": 4,
b"PT": 4,
b"PV": 4,
b"PW": 4,
b"PY": 4,
b"QA": 4,
b"QC": 4,
b"QD": 4,
b"QE": 2,
b"QF": 4,
b"QG": 4,
b"QH": 4,
b"QI": 4,
b"QK": 3,
b"QL": 4,
b"QM": 4,
b"QN": 4,
b"QP": 4,
b"QQ": 0,
b"QR": 3,
b"QS": 4,
b"QT": 4,
b"QV": 4,
b"QW": 4,
b"QY": 4,
b"RA": 4,
b"RC": 4,
b"RD": 4,
b"RE": 4,
b"RF": 4,
b"RG": 4,
b"RH": 4,
b"RI": 4,
b"RK": 2,
b"RL": 4,
b"RM": 4,
b"RN": 4,
b"RP": 4,
b"RQ": 3,
b"RR": 0,
b"RS": 4,
b"RT": 4,
b"RV": 4,
b"RW": 4,
b"RY": 4,
b"SA": 3,
b"SC": 4,
b"SD": 4,
b"SE": 4,
b"SF": 4,
b"SG": 4,
b"SH": 4,
b"SI": 4,
b"SK": 4,
b"SL": 4,
b"SM": 4,
b"SN": 3,
b"SP": 4,
b"SQ": 4,
b"SR": 4,
b"SS": 0,
b"ST": 3,
b"SV": 4,
b"SW": 4,
b"SY": 4,
b"TA": 4,
b"TC": 4,
b"TD": 4,
b"TE": 4,
b"TF": 4,
b"TG": 4,
b"TH": 4,
b"TI": 4,
b"TK": 4,
b"TL": 4,
b"TM": 4,
b"TN": 4,
b"TP": 4,
b"TQ": 4,
b"TR": 4,
b"TS": 3,
b"TT": 0,
b"TV": 4,
b"TW": 4,
b"TY": 4,
b"VA": 4,
b"VC": 4,
b"VD": 4,
b"VE": 4,
b"VF": 4,
b"VG": 4,
b"VH": 4,
b"VI": 1,
b"VK": 4,
b"VL": 3,
b"VM": 3,
b"VN": 4,
b"VP": 4,
b"VQ": 4,
b"VR": 4,
b"VS": 4,
b"VT": 4,
b"VV": 0,
b"VW": 4,
b"VY": 4,
b"WA": 4,
b"WC": 4,
b"WD": 4,
b"WE": 4,
b"WF": 3,
b"WG": 4,
b"WH": 4,
b"WI": 4,
b"WK": 4,
b"WL": 4,
b"WM": 4,
b"WN": 4,
b"WP": 4,
b"WQ": 4,
b"WR": 4,
b"WS": 4,
b"WT": 4,
b"WV": 4,
b"WW": 0,
b"WY": 2,
b"YA": 4,
b"YC": 4,
b"YD": 4,
b"YE": 4,
b"YF": 1,
b"YG": 4,
b"YH": 2,
b"YI": 4,
b"YK": 4,
b"YL": 4,
b"YM": 4,
b"YN": 4,
b"YP": 4,
b"YQ": 4,
b"YR": 4,
b"YS": 4,
b"YT": 4,
b"YV": 4,
b"YW": 2,
b"YY": 0
} | blosum = {'SW': -3, 'GG': 6, 'EM': -2, 'AN': -2, 'AY': -2, 'WQ': -2, 'VN': -3, 'FK': -3, 'GE': -2, 'ED': 2, 'WP': -4, 'IT': -1, 'FD': -3, 'KV': -2, 'CY': -2, 'GD': -1, 'TN': 0, 'WW': 11, 'SS': 4, 'KC': -3, 'EF': -3, 'NL': -3, 'AK': -1, 'QP': -1, 'FG': -3, 'DS': 0, 'CV': -1, 'VT': 0, 'HP': -2, 'PV': -2, 'IQ': -3, 'FV': -1, 'WT': -2, 'HF': -1, 'PD': -1, 'QR': 1, 'DQ': 0, 'KQ': 1, 'DF': -3, 'VW': -3, 'TC': -1, 'AF': -2, 'TH': -2, 'AQ': -1, 'QT': -1, 'VF': -1, 'FC': -2, 'CR': -3, 'VP': -2, 'HT': -2, 'EL': -3, 'FR': -3, 'IG': -4, 'CQ': -3, 'YV': -1, 'TA': 0, 'TV': 0, 'QV': -2, 'SK': 0, 'KK': 5, 'EN': 0, 'NT': 0, 'AH': -2, 'AC': 0, 'VS': -2, 'QH': 0, 'HS': -1, 'QY': -1, 'PN': -2, 'IY': -1, 'PG': -2, 'FN': -3, 'HN': 1, 'KH': -1, 'NW': -4, 'SY': -2, 'WN': -4, 'DY': -3, 'EQ': 2, 'KY': -2, 'SG': 0, 'YS': -2, 'GR': -2, 'AL': -1, 'AG': 0, 'TK': -1, 'TP': -1, 'MV': 1, 'QL': -2, 'ES': 0, 'HW': -2, 'ID': -3, 'KF': -3, 'NA': -2, 'TI': -1, 'QN': 0, 'KW': -3, 'SC': -1, 'YY': 7, 'GV': -3, 'LV': 1, 'AR': -1, 'MR': -1, 'YL': -1, 'DC': -3, 'PP': 7, 'DH': -1, 'QQ': 5, 'IV': 3, 'PF': -4, 'IA': -1, 'FF': 6, 'KT': -1, 'LT': -1, 'SQ': 0, 'WF': 1, 'DA': -2, 'EY': -2, 'KA': -1, 'QS': 0, 'AD': -2, 'LR': -2, 'TS': 1, 'AV': 0, 'MN': -2, 'QD': 0, 'EP': -1, 'VV': 4, 'DN': 1, 'IS': -2, 'PM': -2, 'HD': -1, 'IL': 2, 'KN': 0, 'LP': -3, 'YI': -1, 'NI': -3, 'TQ': -1, 'QF': -3, 'SM': -1, 'ER': 0, 'QW': -2, 'GN': 0, 'LY': -1, 'LN': -3, 'AS': 1, 'DT': -1, 'ST': 1, 'PS': -1, 'VR': -3, 'DK': -1, 'PH': -2, 'HC': -3, 'QI': -3, 'HH': 8, 'II': 4, 'LW': -2, 'LL': 4, 'DR': -2, 'SI': -2, 'DI': -3, 'EA': -1, 'KI': -3, 'QK': 1, 'TD': -1, 'AW': -3, 'YR': -2, 'MF': 0, 'SP': -1, 'HQ': 0, 'YN': -2, 'IP': -3, 'EC': -4, 'HG': -2, 'PE': -1, 'QM': 0, 'HL': -3, 'LS': -2, 'LH': -3, 'NQ': 0, 'TY': -2, 'KG': -2, 'SE': 0, 'YE': -2, 'WR': -3, 'VM': 1, 'NR': 0, 'GF': -3, 'FY': 3, 'LQ': -2, 'MY': -1, 'AP': -1, 'SN': 1, 'CL': -1, 'LF': 0, 'DW': -4, 'SL': -2, 'PR': -2, 'PK': -1, 'YG': -3, 'CK': -3, 'HK': -1, 'QA': -1, 'IF': 0, 'KD': -1, 'NC': -3, 'LD': -4, 'YK': -2, 'SA': 1, 'WV': -3, 'EI': -3, 'VI': 3, 'QC': -3, 'TG': -2, 'TL': -1, 'LM': 2, 'AT': 0, 'CH': -3, 'PY': -3, 'SH': -1, 'HY': 2, 'EK': 1, 'CG': -3, 'IC': -1, 'QE': 2, 'KR': 2, 'TE': -1, 'LK': -2, 'MW': -1, 'NY': -2, 'NH': 1, 'VE': -2, 'QG': -2, 'YD': -3, 'FQ': -3, 'GY': -3, 'LI': 2, 'MQ': 0, 'RA': -1, 'CD': -3, 'SV': -2, 'DD': 6, 'SD': 0, 'PC': -3, 'CC': 9, 'WK': -3, 'IN': -3, 'KL': -2, 'NK': 0, 'LG': -4, 'MS': -1, 'RC': -3, 'RD': -2, 'VA': 0, 'WI': -3, 'TT': 5, 'FM': 0, 'LE': -3, 'MM': 5, 'RE': 0, 'WH': -2, 'SR': -1, 'EW': -3, 'PQ': -1, 'HA': -2, 'YA': -2, 'EH': 0, 'RF': -3, 'IK': -3, 'NE': 0, 'TM': -1, 'TR': -1, 'MT': -1, 'GS': 0, 'LC': -1, 'RG': -2, 'YM': -1, 'NF': -3, 'YQ': -1, 'NP': -2, 'RH': 0, 'WM': -1, 'CN': -3, 'VL': 1, 'FI': 0, 'GQ': -2, 'LA': -1, 'MI': 1, 'RI': -3, 'WL': -2, 'DG': -1, 'DL': -4, 'IR': -3, 'CM': -1, 'HE': 0, 'YW': 2, 'GP': -2, 'WC': -2, 'MP': -2, 'NS': 1, 'GW': -2, 'MK': -1, 'RK': 2, 'DE': 2, 'KE': 1, 'RL': -2, 'AI': -1, 'VY': -1, 'WA': -3, 'YF': 3, 'TW': -2, 'VH': -3, 'FE': -3, 'ME': -2, 'RM': -1, 'ET': -1, 'HR': 0, 'PI': -3, 'FT': -2, 'CI': -1, 'HI': -3, 'GT': -2, 'IH': -3, 'RN': 0, 'CW': -2, 'WG': -2, 'NM': -2, 'ML': 2, 'GK': -2, 'MG': -3, 'KS': 0, 'EV': -2, 'NN': 6, 'VK': -2, 'RP': -2, 'AM': -1, 'WE': -3, 'FW': 1, 'CF': -2, 'VD': -3, 'FA': -2, 'GI': -4, 'MA': -1, 'RQ': 1, 'CT': -1, 'WD': -4, 'HV': -3, 'SF': -2, 'PT': -1, 'FP': -4, 'CE': -4, 'HM': -2, 'IE': -3, 'GH': -2, 'RR': 5, 'KP': -1, 'CS': -1, 'DV': -3, 'MH': -2, 'MC': -1, 'RS': -1, 'DM': -3, 'EE': 5, 'KM': -1, 'VG': -3, 'RT': -1, 'AA': 4, 'VQ': -2, 'WY': 2, 'FS': -2, 'GM': -3, 'CP': -3, 'EG': -2, 'IW': -3, 'PA': -1, 'FL': 0, 'CA': 0, 'GL': -4, 'RV': -3, 'TF': -2, 'YP': -3, 'MD': -3, 'GC': -3, 'RW': -3, 'ND': 1, 'NV': -3, 'VC': -1, 'AE': -1, 'YH': 2, 'DP': -1, 'GA': 0, 'RY': -2, 'PW': -4, 'YC': -2, 'PL': -3, 'FH': -1, 'IM': 1, 'YT': -2, 'NG': 0, 'WS': -3}
adj_blosum = {b'AA': 0, b'AC': 4, b'AD': 4, b'AE': 4, b'AF': 4, b'AG': 4, b'AH': 4, b'AI': 4, b'AK': 4, b'AL': 4, b'AM': 4, b'AN': 4, b'AP': 4, b'AQ': 4, b'AR': 4, b'AS': 3, b'AT': 4, b'AV': 4, b'AW': 4, b'AY': 4, b'CA': 4, b'CC': 0, b'CD': 4, b'CE': 4, b'CF': 4, b'CG': 4, b'CH': 4, b'CI': 4, b'CK': 4, b'CL': 4, b'CM': 4, b'CN': 4, b'CP': 4, b'CQ': 4, b'CR': 4, b'CS': 4, b'CT': 4, b'CV': 4, b'CW': 4, b'CY': 4, b'DA': 4, b'DC': 4, b'DD': 0, b'DE': 2, b'DF': 4, b'DG': 4, b'DH': 4, b'DI': 4, b'DK': 4, b'DL': 4, b'DM': 4, b'DN': 3, b'DP': 4, b'DQ': 4, b'DR': 4, b'DS': 4, b'DT': 4, b'DV': 4, b'DW': 4, b'DY': 4, b'EA': 4, b'EC': 4, b'ED': 2, b'EE': 0, b'EF': 4, b'EG': 4, b'EH': 4, b'EI': 4, b'EK': 3, b'EL': 4, b'EM': 4, b'EN': 4, b'EP': 4, b'EQ': 2, b'ER': 4, b'ES': 4, b'ET': 4, b'EV': 4, b'EW': 4, b'EY': 4, b'FA': 4, b'FC': 4, b'FD': 4, b'FE': 4, b'FF': 0, b'FG': 4, b'FH': 4, b'FI': 4, b'FK': 4, b'FL': 4, b'FM': 4, b'FN': 4, b'FP': 4, b'FQ': 4, b'FR': 4, b'FS': 4, b'FT': 4, b'FV': 4, b'FW': 3, b'FY': 1, b'GA': 4, b'GC': 4, b'GD': 4, b'GE': 4, b'GF': 4, b'GG': 0, b'GH': 4, b'GI': 4, b'GK': 4, b'GL': 4, b'GM': 4, b'GN': 4, b'GP': 4, b'GQ': 4, b'GR': 4, b'GS': 4, b'GT': 4, b'GV': 4, b'GW': 4, b'GY': 4, b'HA': 4, b'HC': 4, b'HD': 4, b'HE': 4, b'HF': 4, b'HG': 4, b'HH': 0, b'HI': 4, b'HK': 4, b'HL': 4, b'HM': 4, b'HN': 3, b'HP': 4, b'HQ': 4, b'HR': 4, b'HS': 4, b'HT': 4, b'HV': 4, b'HW': 4, b'HY': 2, b'IA': 4, b'IC': 4, b'ID': 4, b'IE': 4, b'IF': 4, b'IG': 4, b'IH': 4, b'II': 0, b'IK': 4, b'IL': 2, b'IM': 3, b'IN': 4, b'IP': 4, b'IQ': 4, b'IR': 4, b'IS': 4, b'IT': 4, b'IV': 1, b'IW': 4, b'IY': 4, b'KA': 4, b'KC': 4, b'KD': 4, b'KE': 3, b'KF': 4, b'KG': 4, b'KH': 4, b'KI': 4, b'KK': 0, b'KL': 4, b'KM': 4, b'KN': 4, b'KP': 4, b'KQ': 3, b'KR': 2, b'KS': 4, b'KT': 4, b'KV': 4, b'KW': 4, b'KY': 4, b'LA': 4, b'LC': 4, b'LD': 4, b'LE': 4, b'LF': 4, b'LG': 4, b'LH': 4, b'LI': 2, b'LK': 4, b'LL': 0, b'LM': 2, b'LN': 4, b'LP': 4, b'LQ': 4, b'LR': 4, b'LS': 4, b'LT': 4, b'LV': 3, b'LW': 4, b'LY': 4, b'MA': 4, b'MC': 4, b'MD': 4, b'ME': 4, b'MF': 4, b'MG': 4, b'MH': 4, b'MI': 3, b'MK': 4, b'ML': 2, b'MM': 0, b'MN': 4, b'MP': 4, b'MQ': 4, b'MR': 4, b'MS': 4, b'MT': 4, b'MV': 3, b'MW': 4, b'MY': 4, b'NA': 4, b'NC': 4, b'ND': 3, b'NE': 4, b'NF': 4, b'NG': 4, b'NH': 3, b'NI': 4, b'NK': 4, b'NL': 4, b'NM': 4, b'NN': 0, b'NP': 4, b'NQ': 4, b'NR': 4, b'NS': 3, b'NT': 4, b'NV': 4, b'NW': 4, b'NY': 4, b'PA': 4, b'PC': 4, b'PD': 4, b'PE': 4, b'PF': 4, b'PG': 4, b'PH': 4, b'PI': 4, b'PK': 4, b'PL': 4, b'PM': 4, b'PN': 4, b'PP': 0, b'PQ': 4, b'PR': 4, b'PS': 4, b'PT': 4, b'PV': 4, b'PW': 4, b'PY': 4, b'QA': 4, b'QC': 4, b'QD': 4, b'QE': 2, b'QF': 4, b'QG': 4, b'QH': 4, b'QI': 4, b'QK': 3, b'QL': 4, b'QM': 4, b'QN': 4, b'QP': 4, b'QQ': 0, b'QR': 3, b'QS': 4, b'QT': 4, b'QV': 4, b'QW': 4, b'QY': 4, b'RA': 4, b'RC': 4, b'RD': 4, b'RE': 4, b'RF': 4, b'RG': 4, b'RH': 4, b'RI': 4, b'RK': 2, b'RL': 4, b'RM': 4, b'RN': 4, b'RP': 4, b'RQ': 3, b'RR': 0, b'RS': 4, b'RT': 4, b'RV': 4, b'RW': 4, b'RY': 4, b'SA': 3, b'SC': 4, b'SD': 4, b'SE': 4, b'SF': 4, b'SG': 4, b'SH': 4, b'SI': 4, b'SK': 4, b'SL': 4, b'SM': 4, b'SN': 3, b'SP': 4, b'SQ': 4, b'SR': 4, b'SS': 0, b'ST': 3, b'SV': 4, b'SW': 4, b'SY': 4, b'TA': 4, b'TC': 4, b'TD': 4, b'TE': 4, b'TF': 4, b'TG': 4, b'TH': 4, b'TI': 4, b'TK': 4, b'TL': 4, b'TM': 4, b'TN': 4, b'TP': 4, b'TQ': 4, b'TR': 4, b'TS': 3, b'TT': 0, b'TV': 4, b'TW': 4, b'TY': 4, b'VA': 4, b'VC': 4, b'VD': 4, b'VE': 4, b'VF': 4, b'VG': 4, b'VH': 4, b'VI': 1, b'VK': 4, b'VL': 3, b'VM': 3, b'VN': 4, b'VP': 4, b'VQ': 4, b'VR': 4, b'VS': 4, b'VT': 4, b'VV': 0, b'VW': 4, b'VY': 4, b'WA': 4, b'WC': 4, b'WD': 4, b'WE': 4, b'WF': 3, b'WG': 4, b'WH': 4, b'WI': 4, b'WK': 4, b'WL': 4, b'WM': 4, b'WN': 4, b'WP': 4, b'WQ': 4, b'WR': 4, b'WS': 4, b'WT': 4, b'WV': 4, b'WW': 0, b'WY': 2, b'YA': 4, b'YC': 4, b'YD': 4, b'YE': 4, b'YF': 1, b'YG': 4, b'YH': 2, b'YI': 4, b'YK': 4, b'YL': 4, b'YM': 4, b'YN': 4, b'YP': 4, b'YQ': 4, b'YR': 4, b'YS': 4, b'YT': 4, b'YV': 4, b'YW': 2, b'YY': 0} |
"""
Constants file
"""
ACCESS_TOKEN_KEY = 'access_token'
API_ID = 'API_ID'
APP_JSON_KEY = 'application/json'
AUTH0_AUDIENCE = 'AUTH0_AUDIENCE'
AUTH0_AUDIENCE_MNGNMT_API = 'AUTH0_AUDIENCE_MNGNMT_API'
AUTH0_CALLBACK_URL = 'AUTH0_CALLBACK_URL'
AUTH0_CLIENT_ID = 'AUTH0_CLIENT_ID'
AUTH0_CLIENT_SECRET = 'AUTH0_CLIENT_SECRET'
AUTH0_CLIENT_ID_MNGNMT_API = 'AUTH0_CLIENT_ID_MNGNMT_API'
AUTH0_CLIENT_SECRET_MNGNMT_API = 'AUTH0_CLIENT_SECRET_MNGNMT_API'
AUTH0_DOMAIN = 'AUTH0_DOMAIN'
AUTHORIZATION_CODE_KEY = 'authorization_code'
CLIENT_ID_KEY = 'client_id'
CLIENT_SECRET_KEY = 'client_secret'
CLIENTS_PAYLOAD = 'clients'
CODE_KEY = 'code'
CONTENT_TYPE_KEY = 'content-type'
GRANT_TYPE = 'GRANT_TYPE'
GRANT_TYPE_KEY = 'grant_type'
JWT_PAYLOAD = 'jwt_payload'
PROFILE_KEY = 'profile'
REDIRECT_URI_KEY = 'redirect_uri'
RULES_PAYLOAD = 'rules'
SECRET_KEY = 'ThisIsTheSecretKey'
| """
Constants file
"""
access_token_key = 'access_token'
api_id = 'API_ID'
app_json_key = 'application/json'
auth0_audience = 'AUTH0_AUDIENCE'
auth0_audience_mngnmt_api = 'AUTH0_AUDIENCE_MNGNMT_API'
auth0_callback_url = 'AUTH0_CALLBACK_URL'
auth0_client_id = 'AUTH0_CLIENT_ID'
auth0_client_secret = 'AUTH0_CLIENT_SECRET'
auth0_client_id_mngnmt_api = 'AUTH0_CLIENT_ID_MNGNMT_API'
auth0_client_secret_mngnmt_api = 'AUTH0_CLIENT_SECRET_MNGNMT_API'
auth0_domain = 'AUTH0_DOMAIN'
authorization_code_key = 'authorization_code'
client_id_key = 'client_id'
client_secret_key = 'client_secret'
clients_payload = 'clients'
code_key = 'code'
content_type_key = 'content-type'
grant_type = 'GRANT_TYPE'
grant_type_key = 'grant_type'
jwt_payload = 'jwt_payload'
profile_key = 'profile'
redirect_uri_key = 'redirect_uri'
rules_payload = 'rules'
secret_key = 'ThisIsTheSecretKey' |
ALL = [
"add_logentry",
"change_logentry",
"delete_logentry",
"view_logentry",
"can_export_data",
"can_import_historical",
"can_import_third_party",
"can_import_website",
"add_donation",
"change_donation",
"delete_donation",
"destroy_donation",
"generate_tax_receipt",
"view_donation",
"add_donor",
"change_donor",
"delete_donor",
"destroy_donor",
"view_donor",
"add_item",
"change_item",
"delete_item",
"destroy_item",
"update_status_item",
"update_value_item",
"view_item",
"add_itemdevice",
"change_itemdevice",
"delete_itemdevice",
"view_itemdevice",
"add_itemdevicetype",
"change_itemdevicetype",
"delete_itemdevicetype",
"view_itemdevicetype",
"add_group",
"change_group",
"delete_group",
"view_group",
"add_permission",
"change_permission",
"delete_permission",
"view_permission",
"add_user",
"change_user",
"delete_user",
"view_user",
"add_contenttype",
"change_contenttype",
"delete_contenttype",
"view_contenttype",
"add_session",
"change_session",
"delete_session",
"view_session",
]
FRONTLINE = [
'add_donation',
'change_donation',
'delete_donation',
'view_donation',
'add_donor',
'change_donor',
'delete_donor',
'view_donor',
'add_item',
'change_item',
'delete_item',
'view_item',
'add_itemdevice',
'change_itemdevice',
'delete_itemdevice',
'add_itemdevicetype',
'change_itemdevicetype',
'delete_itemdevicetype',
]
MANAGEMENT = FRONTLINE + [
'can_import_historical',
'can_import_third_party',
'can_import_website',
'can_export_data',
'generate_tax_receipt',
'update_status_item',
'update_value_item',
'generate_tax_receipt',
]
| all = ['add_logentry', 'change_logentry', 'delete_logentry', 'view_logentry', 'can_export_data', 'can_import_historical', 'can_import_third_party', 'can_import_website', 'add_donation', 'change_donation', 'delete_donation', 'destroy_donation', 'generate_tax_receipt', 'view_donation', 'add_donor', 'change_donor', 'delete_donor', 'destroy_donor', 'view_donor', 'add_item', 'change_item', 'delete_item', 'destroy_item', 'update_status_item', 'update_value_item', 'view_item', 'add_itemdevice', 'change_itemdevice', 'delete_itemdevice', 'view_itemdevice', 'add_itemdevicetype', 'change_itemdevicetype', 'delete_itemdevicetype', 'view_itemdevicetype', 'add_group', 'change_group', 'delete_group', 'view_group', 'add_permission', 'change_permission', 'delete_permission', 'view_permission', 'add_user', 'change_user', 'delete_user', 'view_user', 'add_contenttype', 'change_contenttype', 'delete_contenttype', 'view_contenttype', 'add_session', 'change_session', 'delete_session', 'view_session']
frontline = ['add_donation', 'change_donation', 'delete_donation', 'view_donation', 'add_donor', 'change_donor', 'delete_donor', 'view_donor', 'add_item', 'change_item', 'delete_item', 'view_item', 'add_itemdevice', 'change_itemdevice', 'delete_itemdevice', 'add_itemdevicetype', 'change_itemdevicetype', 'delete_itemdevicetype']
management = FRONTLINE + ['can_import_historical', 'can_import_third_party', 'can_import_website', 'can_export_data', 'generate_tax_receipt', 'update_status_item', 'update_value_item', 'generate_tax_receipt'] |
a= True
b = False
c= a and b
print("jika A={} and B={} = {}". format(a,b,c))
c= a or b
print("jika A={} or B={} = {}". format(a,b,c))
c= not a
print("jika A={} maka not A = {}". format) | a = True
b = False
c = a and b
print('jika A={} and B={} = {}'.format(a, b, c))
c = a or b
print('jika A={} or B={} = {}'.format(a, b, c))
c = not a
print('jika A={} maka not A = {}'.format) |
class GameConstants:
# the maximum number of rounds, until the winner is decided by a coinflip
MAX_ROUNDS = 500
# the board size
BOARD_SIZE = 16
# the default seed
DEFAULT_SEED = 1337
| class Gameconstants:
max_rounds = 500
board_size = 16
default_seed = 1337 |
"""
DAY 24 : Convert to Roman No.
https://www.geeksforgeeks.org/converting-decimal-number-lying-between-1-to-3999-to-roman-numerals/
QUESTION : Given an integer n, your task is to complete the function convertToRoman which prints the
corresponding roman number of n. Various symbols and their values are given below.
I 1
V 5
X 10
L 50
C 100
D 500
M 1000
Expected Time Complexity: O(log10N)
Expected Auxiliary Space: O(log10N * 10)
Constraints:
1<=n<=3999
"""
def convertRoman(number):
roman_value = ""
while number:
if number >= 1000:
roman_value += 'M'
number = number-1000
elif number >= 900:
roman_value += 'CM'
number = number-900
elif number >= 500:
roman_value += 'D'
number = number-500
elif number >= 400:
roman_value += 'CD'
number = number-400
elif number >= 100:
roman_value += 'C'
number = number-100
elif number >= 90:
roman_value += 'XC'
number = number-90
elif number >= 50:
roman_value += 'L'
number = number-50
elif number >= 40:
roman_value += 'XL'
number = number-40
elif number >= 10:
roman_value += 'X'
number = number-10
elif number >= 9:
roman_value += 'IX'
number = number-9
elif number >= 5:
roman_value += 'V'
number = number-5
elif number >= 4:
roman_value += 'IV'
number = number-4
elif number >= 1:
roman_value += 'I'
number = number-1
return roman_value
| """
DAY 24 : Convert to Roman No.
https://www.geeksforgeeks.org/converting-decimal-number-lying-between-1-to-3999-to-roman-numerals/
QUESTION : Given an integer n, your task is to complete the function convertToRoman which prints the
corresponding roman number of n. Various symbols and their values are given below.
I 1
V 5
X 10
L 50
C 100
D 500
M 1000
Expected Time Complexity: O(log10N)
Expected Auxiliary Space: O(log10N * 10)
Constraints:
1<=n<=3999
"""
def convert_roman(number):
roman_value = ''
while number:
if number >= 1000:
roman_value += 'M'
number = number - 1000
elif number >= 900:
roman_value += 'CM'
number = number - 900
elif number >= 500:
roman_value += 'D'
number = number - 500
elif number >= 400:
roman_value += 'CD'
number = number - 400
elif number >= 100:
roman_value += 'C'
number = number - 100
elif number >= 90:
roman_value += 'XC'
number = number - 90
elif number >= 50:
roman_value += 'L'
number = number - 50
elif number >= 40:
roman_value += 'XL'
number = number - 40
elif number >= 10:
roman_value += 'X'
number = number - 10
elif number >= 9:
roman_value += 'IX'
number = number - 9
elif number >= 5:
roman_value += 'V'
number = number - 5
elif number >= 4:
roman_value += 'IV'
number = number - 4
elif number >= 1:
roman_value += 'I'
number = number - 1
return roman_value |
# read file function
def read_file():
file_data = open('./python_examples/file_io/input_file.txt')
for single_line in file_data:
print(single_line, end='')
if __name__ == "__main__":
read_file()
| def read_file():
file_data = open('./python_examples/file_io/input_file.txt')
for single_line in file_data:
print(single_line, end='')
if __name__ == '__main__':
read_file() |
{
"variables": {
# Be sure to create OPENNI2 and NITE2 system vars
"OPENNI2%": "$(OPENNI2)",
"NITE2%": "$(NITE2)"
},
"targets": [
{
"target_name":"copy-files",
"conditions": [
[ "OS=='win'", {
"copies": [
{ "files": [ "<(OPENNI2)/Redist/OpenNI2/Drivers/Kinect.dll",
"<(OPENNI2)/Redist/OpenNI2/Drivers/OniFile.dll",
"<(OPENNI2)/Redist/OpenNI2/Drivers/PS1080.dll",
"<(OPENNI2)/Redist/OpenNI2/Drivers/PS1080.ini",
"<(OPENNI2)/Redist/OpenNI2/Drivers/PSLink.dll",
"<(OPENNI2)/Redist/OpenNI2/Drivers/PSLink.ini"],
"destination": "<(module_root_dir)/build/Release/OpenNI2/Drivers/"
},
# If NITE folder is not placed at root of project, it cannot be accessed
# go up through node_modules to project root and drop in NiTE2 folder
{ "files": [ "<(NITE2)/Redist/NiTE2/Data/lbsdata.idx",
"<(NITE2)/Redist/NiTE2/Data/lbsdata.lbd",
"<(NITE2)/Redist/NiTE2/Data/lbsparam1.lbd",
"<(NITE2)/Redist/NiTE2/Data/lbsparam2.lbd"],
"destination": "<(module_root_dir)/../../NiTE2/Data/"
},
{ "files": [ "<(NITE2)/Redist/NiTE2/FeatureExtraction.ini",
"<(NITE2)/Redist/NiTE2/h.dat",
"<(NITE2)/Redist/NiTE2/HandAlgorithms.ini",
"<(NITE2)/Redist/NiTE2/s.dat"],
"destination": "<(module_root_dir)/../../NiTE2/"
},
{ "files": [ "<(OPENNI2)/Redist/OpenNI2.dll",
"<(OPENNI2)/Redist/OpenNI.ini",
"<(NITE2)/Redist/NiTE2.dll",
"<(NITE2)/Redist/NiTE.ini" ],
"destination": "<(module_root_dir)/build/Release/"
}
],
"libraries": ["-l<(OPENNI2)/Lib/OpenNI2", "-l<(NITE2)/Lib/NiTE2"]
}],
["OS=='mac'", {
"copies": [
{ "files": [ "<(OPENNI2)/Redist/OpenNI2/Drivers/libOniFile.dylib",
"<(OPENNI2)/Redist/OpenNI2/Drivers/libPS1080.dylib",
"<(OPENNI2)/Redist/OpenNI2/Drivers/PS1080.ini",
"<(OPENNI2)/Redist/OpenNI2/Drivers/PSLink.ini"],
"destination": "<(module_root_dir)/build/Release/OpenNI2/Drivers/"
},
# If NITE folder is not placed at root of project, it cannot be accessed
# go up through node_modules to project root and drop in NiTE2 folder
{ "files": [ "<(NITE2)/Redist/NiTE2/Data/lbsdata.idx",
"<(NITE2)/Redist/NiTE2/Data/lbsdata.lbd",
"<(NITE2)/Redist/NiTE2/Data/lbsparam1.lbd",
"<(NITE2)/Redist/NiTE2/Data/lbsparam2.lbd"],
"destination": "<(module_root_dir)/../../NiTE2/Data/"
},
{ "files": [ "<(NITE2)/Redist/NiTE2/FeatureExtraction.ini",
"<(NITE2)/Redist/NiTE2/h.dat",
"<(NITE2)/Redist/NiTE2/HandAlgorithms.ini",
"<(NITE2)/Redist/NiTE2/s.dat"],
"destination": "<(module_root_dir)/../../NiTE2/"
},
{ "files": [ "<(OPENNI2)/Redist/libOpenNI2.dylib",
"<(OPENNI2)/Redist/OpenNI.ini",
"<(NITE2)/Redist/libNiTE2.dylib",
"<(NITE2)/Redist/NiTE.ini" ],
"destination": "<(module_root_dir)/build/Release/"
}
]
}],
["OS=='linux'", {
"copies": [
{ "files": [ "<(OPENNI2)/Redist/OpenNI2/Drivers/libOniFile.so",
"<(OPENNI2)/Redist/OpenNI2/Drivers/libPS1080.so",
"<(OPENNI2)/Redist/OpenNI2/Drivers/PS1080.ini",
"<(OPENNI2)/Redist/OpenNI2/Drivers/PSLink.ini"],
"destination": "<(module_root_dir)/build/Release/OpenNI2/Drivers/"
},
# If NITE folder is not placed at root of project, it cannot be accessed
# go up through node_modules to project root and drop in NiTE2 folder
{ "files": [ "<(NITE2)/Redist/NiTE2/Data/lbsdata.idx",
"<(NITE2)/Redist/NiTE2/Data/lbsdata.lbd",
"<(NITE2)/Redist/NiTE2/Data/lbsparam1.lbd",
"<(NITE2)/Redist/NiTE2/Data/lbsparam2.lbd"],
"destination": "<(module_root_dir)/../../NiTE2/Data/"
},
{ "files": [ "<(NITE2)/Redist/NiTE2/FeatureExtraction.ini",
"<(NITE2)/Redist/NiTE2/h.dat",
"<(NITE2)/Redist/NiTE2/HandAlgorithms.ini",
"<(NITE2)/Redist/NiTE2/s.dat"],
"destination": "<(module_root_dir)/../../NiTE2/"
},
{ "files": [ "<(OPENNI2)/Redist/libOpenNI2.so",
"<(OPENNI2)/Redist/OpenNI.ini",
"<(NITE2)/Redist/libNiTE2.so",
"<(NITE2)/Redist/NiTE.ini" ],
"destination": "<(module_root_dir)/build/Release/"
}
]
}]
]
},
{
"target_name": "nuimotion",
"sources": [
"src/Main.cpp",
"src/enums/EnumMapping.cpp",
"src/gestures/GestureRecognizer.cpp",
"src/gestures/Swipe.cpp",
"src/gestures/Wave.cpp" ],
"conditions": [
[ "OS=='win'", {
"libraries": ["-l<(OPENNI2)/Lib/OpenNI2", "-l<(NITE2)/Lib/NiTE2"]
}],
["OS=='mac'", {
"libraries": ["<(OPENNI2)/Tools/libOpenNI2.dylib", "<(NITE2)/Redist/libNiTE2.dylib"]
}],
["OS=='linux'", {
"libraries": ["<(OPENNI2)/Tools/libOpenNI2.so", "<(NITE2)/Redist/libNiTE2.so"]
}],
],
"include_dirs": [ "./src/enums", "./build/Release", "<(OPENNI2)/Include/", "<(NITE2)/Include/" ],
},
{
"target_name": "nuimotion-depth",
"sources": [
"src/Depth.cpp",
"src/enums/EnumMapping.cpp",
"src/gestures/GestureRecognizer.cpp",
"src/gestures/Swipe.cpp",
"src/gestures/Wave.cpp" ],
"conditions": [
[ "OS=='win'", {
"libraries": ["-l<(OPENNI2)/Lib/OpenNI2"]
}],
["OS=='mac'", {
"libraries": ["<(OPENNI2)/Tools/libOpenNI2.dylib"]
}],
["OS=='linux'", {
"libraries": ["<(OPENNI2)/Tools/libOpenNI2.so"]
}],
],
"include_dirs": [ "<(OPENNI2)/Include/"]
}
]
}
| {'variables': {'OPENNI2%': '$(OPENNI2)', 'NITE2%': '$(NITE2)'}, 'targets': [{'target_name': 'copy-files', 'conditions': [["OS=='win'", {'copies': [{'files': ['<(OPENNI2)/Redist/OpenNI2/Drivers/Kinect.dll', '<(OPENNI2)/Redist/OpenNI2/Drivers/OniFile.dll', '<(OPENNI2)/Redist/OpenNI2/Drivers/PS1080.dll', '<(OPENNI2)/Redist/OpenNI2/Drivers/PS1080.ini', '<(OPENNI2)/Redist/OpenNI2/Drivers/PSLink.dll', '<(OPENNI2)/Redist/OpenNI2/Drivers/PSLink.ini'], 'destination': '<(module_root_dir)/build/Release/OpenNI2/Drivers/'}, {'files': ['<(NITE2)/Redist/NiTE2/Data/lbsdata.idx', '<(NITE2)/Redist/NiTE2/Data/lbsdata.lbd', '<(NITE2)/Redist/NiTE2/Data/lbsparam1.lbd', '<(NITE2)/Redist/NiTE2/Data/lbsparam2.lbd'], 'destination': '<(module_root_dir)/../../NiTE2/Data/'}, {'files': ['<(NITE2)/Redist/NiTE2/FeatureExtraction.ini', '<(NITE2)/Redist/NiTE2/h.dat', '<(NITE2)/Redist/NiTE2/HandAlgorithms.ini', '<(NITE2)/Redist/NiTE2/s.dat'], 'destination': '<(module_root_dir)/../../NiTE2/'}, {'files': ['<(OPENNI2)/Redist/OpenNI2.dll', '<(OPENNI2)/Redist/OpenNI.ini', '<(NITE2)/Redist/NiTE2.dll', '<(NITE2)/Redist/NiTE.ini'], 'destination': '<(module_root_dir)/build/Release/'}], 'libraries': ['-l<(OPENNI2)/Lib/OpenNI2', '-l<(NITE2)/Lib/NiTE2']}], ["OS=='mac'", {'copies': [{'files': ['<(OPENNI2)/Redist/OpenNI2/Drivers/libOniFile.dylib', '<(OPENNI2)/Redist/OpenNI2/Drivers/libPS1080.dylib', '<(OPENNI2)/Redist/OpenNI2/Drivers/PS1080.ini', '<(OPENNI2)/Redist/OpenNI2/Drivers/PSLink.ini'], 'destination': '<(module_root_dir)/build/Release/OpenNI2/Drivers/'}, {'files': ['<(NITE2)/Redist/NiTE2/Data/lbsdata.idx', '<(NITE2)/Redist/NiTE2/Data/lbsdata.lbd', '<(NITE2)/Redist/NiTE2/Data/lbsparam1.lbd', '<(NITE2)/Redist/NiTE2/Data/lbsparam2.lbd'], 'destination': '<(module_root_dir)/../../NiTE2/Data/'}, {'files': ['<(NITE2)/Redist/NiTE2/FeatureExtraction.ini', '<(NITE2)/Redist/NiTE2/h.dat', '<(NITE2)/Redist/NiTE2/HandAlgorithms.ini', '<(NITE2)/Redist/NiTE2/s.dat'], 'destination': '<(module_root_dir)/../../NiTE2/'}, {'files': ['<(OPENNI2)/Redist/libOpenNI2.dylib', '<(OPENNI2)/Redist/OpenNI.ini', '<(NITE2)/Redist/libNiTE2.dylib', '<(NITE2)/Redist/NiTE.ini'], 'destination': '<(module_root_dir)/build/Release/'}]}], ["OS=='linux'", {'copies': [{'files': ['<(OPENNI2)/Redist/OpenNI2/Drivers/libOniFile.so', '<(OPENNI2)/Redist/OpenNI2/Drivers/libPS1080.so', '<(OPENNI2)/Redist/OpenNI2/Drivers/PS1080.ini', '<(OPENNI2)/Redist/OpenNI2/Drivers/PSLink.ini'], 'destination': '<(module_root_dir)/build/Release/OpenNI2/Drivers/'}, {'files': ['<(NITE2)/Redist/NiTE2/Data/lbsdata.idx', '<(NITE2)/Redist/NiTE2/Data/lbsdata.lbd', '<(NITE2)/Redist/NiTE2/Data/lbsparam1.lbd', '<(NITE2)/Redist/NiTE2/Data/lbsparam2.lbd'], 'destination': '<(module_root_dir)/../../NiTE2/Data/'}, {'files': ['<(NITE2)/Redist/NiTE2/FeatureExtraction.ini', '<(NITE2)/Redist/NiTE2/h.dat', '<(NITE2)/Redist/NiTE2/HandAlgorithms.ini', '<(NITE2)/Redist/NiTE2/s.dat'], 'destination': '<(module_root_dir)/../../NiTE2/'}, {'files': ['<(OPENNI2)/Redist/libOpenNI2.so', '<(OPENNI2)/Redist/OpenNI.ini', '<(NITE2)/Redist/libNiTE2.so', '<(NITE2)/Redist/NiTE.ini'], 'destination': '<(module_root_dir)/build/Release/'}]}]]}, {'target_name': 'nuimotion', 'sources': ['src/Main.cpp', 'src/enums/EnumMapping.cpp', 'src/gestures/GestureRecognizer.cpp', 'src/gestures/Swipe.cpp', 'src/gestures/Wave.cpp'], 'conditions': [["OS=='win'", {'libraries': ['-l<(OPENNI2)/Lib/OpenNI2', '-l<(NITE2)/Lib/NiTE2']}], ["OS=='mac'", {'libraries': ['<(OPENNI2)/Tools/libOpenNI2.dylib', '<(NITE2)/Redist/libNiTE2.dylib']}], ["OS=='linux'", {'libraries': ['<(OPENNI2)/Tools/libOpenNI2.so', '<(NITE2)/Redist/libNiTE2.so']}]], 'include_dirs': ['./src/enums', './build/Release', '<(OPENNI2)/Include/', '<(NITE2)/Include/']}, {'target_name': 'nuimotion-depth', 'sources': ['src/Depth.cpp', 'src/enums/EnumMapping.cpp', 'src/gestures/GestureRecognizer.cpp', 'src/gestures/Swipe.cpp', 'src/gestures/Wave.cpp'], 'conditions': [["OS=='win'", {'libraries': ['-l<(OPENNI2)/Lib/OpenNI2']}], ["OS=='mac'", {'libraries': ['<(OPENNI2)/Tools/libOpenNI2.dylib']}], ["OS=='linux'", {'libraries': ['<(OPENNI2)/Tools/libOpenNI2.so']}]], 'include_dirs': ['<(OPENNI2)/Include/']}]} |
def is_palindrome(number):
if int(number)%15 == 0:
rev = number[::-1]
return True if rev == number else False
else: return False
num = input("Enter a number to check palindrome divisible by 3 and 5: ")
if is_palindrome(number=num):
print(num, "is a Palindrome divisible by 3 and 5")
else:
print(num, "is not Palindrome divisible by 3 and 5") | def is_palindrome(number):
if int(number) % 15 == 0:
rev = number[::-1]
return True if rev == number else False
else:
return False
num = input('Enter a number to check palindrome divisible by 3 and 5: ')
if is_palindrome(number=num):
print(num, 'is a Palindrome divisible by 3 and 5')
else:
print(num, 'is not Palindrome divisible by 3 and 5') |
# Container With Most Water
# https://www.interviewbit.com/problems/container-with-most-water/
#
# Given n non-negative integers a1, a2, ..., an,
# where each represents a point at coordinate (i, ai).
# 'n' vertical lines are drawn such that the two endpoints of line i is at (i, ai) and (i, 0).
#
# Find two lines, which together with x-axis forms a container, such that the container contains the most water.
#
# Your program should return an integer which corresponds to the maximum area of water that can be
# contained ( Yes, we know maximum area instead of maximum volume sounds weird. But this is 2D plane
# we are working with for simplicity ).
#
# Note: You may not slant the container.
#
# Example :
#
# Input : [1, 5, 4, 3]
# Output : 6
#
# Explanation : 5 and 3 are distance 2 apart. So size of the base = 2. Height of container = min(5, 3) = 3.
# So total area = 3 * 2 = 6
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
class Solution:
# @param A : list of integers
# @return an integer
def maxArea(self, A):
i, j = 0, len(A) - 1
area = 0
while i < j:
area = max(area, (j - i) * min(A[i], A[j]))
if A[i] < A[j]:
i += 1
else:
j -= 1
return area
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
if __name__ == "__main__":
s = Solution()
print(s.maxArea([1, 5, 4, 3])) | class Solution:
def max_area(self, A):
(i, j) = (0, len(A) - 1)
area = 0
while i < j:
area = max(area, (j - i) * min(A[i], A[j]))
if A[i] < A[j]:
i += 1
else:
j -= 1
return area
if __name__ == '__main__':
s = solution()
print(s.maxArea([1, 5, 4, 3])) |
class Mazmorra():
def __init__(self):
self.__salas = []
@property
def salas(self):
return self.__salas
def addSala(self, sala):
self.__salas.append(sala)
| class Mazmorra:
def __init__(self):
self.__salas = []
@property
def salas(self):
return self.__salas
def add_sala(self, sala):
self.__salas.append(sala) |
a = 33
b = 200
if b > a:
pass | a = 33
b = 200
if b > a:
pass |
#
# PySNMP MIB module CISCO-WRED-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-WRED-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:21:31 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup")
Counter32, Integer32, IpAddress, ModuleIdentity, Gauge32, NotificationType, Counter64, Bits, ObjectIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, iso, TimeTicks, Unsigned32, MibIdentifier = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "Integer32", "IpAddress", "ModuleIdentity", "Gauge32", "NotificationType", "Counter64", "Bits", "ObjectIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "iso", "TimeTicks", "Unsigned32", "MibIdentifier")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
ciscoWredMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 83))
ciscoWredMIB.setRevisions(('1997-07-18 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: ciscoWredMIB.setRevisionsDescriptions(('Initial version of this MIB module.',))
if mibBuilder.loadTexts: ciscoWredMIB.setLastUpdated('9707180000Z')
if mibBuilder.loadTexts: ciscoWredMIB.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts: ciscoWredMIB.setContactInfo(' Cisco Systems Customer Service Postal: 170 W. Tasman Drive San Jose, CA 95134-1706 USA Tel: +1 800 553-NETS E-mail: tgrennan-group@cisco.com')
if mibBuilder.loadTexts: ciscoWredMIB.setDescription('Cisco WRED MIB - Overview Cisco Weighted Random Early Detection/Drop is a method which avoids traffic congestion on an output interface. Congestion is detected by computing the average output queue size against preset thresholds. WRED support are on the IP fast switching and IP flow switching only. It does not apply to IP process switching. This MIB incorporates objects from the Cisco WRED line interfaces. Its purpose is to provide Weighted Random Early Detection/Drop packet configuration and packet filtering information. WRED are configured/enabled through the CLI command. Defaults configuration values are assigned and values can be modified through additional CLI commands. ')
ciscoWredMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 83, 1))
cwredConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1))
cwredStats = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2))
cwredConfigGlobTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1, 1), )
if mibBuilder.loadTexts: cwredConfigGlobTable.setStatus('current')
if mibBuilder.loadTexts: cwredConfigGlobTable.setDescription('A table of WRED global configuration variables.')
cwredConfigGlobEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: cwredConfigGlobEntry.setStatus('current')
if mibBuilder.loadTexts: cwredConfigGlobEntry.setDescription('A collection of configuration entries on this interface. Entries are created and deleted via red command line interface.')
cwredConfigGlobQueueWeight = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwredConfigGlobQueueWeight.setStatus('current')
if mibBuilder.loadTexts: cwredConfigGlobQueueWeight.setDescription("The decay factor for the queue average calculation. Numbers are 2's exponent up to 16. The smaller the number, the faster it decays.")
cwredConfigPrecedTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1, 2), )
if mibBuilder.loadTexts: cwredConfigPrecedTable.setStatus('current')
if mibBuilder.loadTexts: cwredConfigPrecedTable.setDescription('A table of WRED configuration values with respect to the IP precedence of packets.')
cwredConfigPrecedEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "CISCO-WRED-MIB", "cwredConfigPrecedPrecedence"))
if mibBuilder.loadTexts: cwredConfigPrecedEntry.setStatus('current')
if mibBuilder.loadTexts: cwredConfigPrecedEntry.setDescription('WRED IP precedence configuration table entry. Entries are created and deleted via red command interface.')
cwredConfigPrecedPrecedence = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7)))
if mibBuilder.loadTexts: cwredConfigPrecedPrecedence.setStatus('current')
if mibBuilder.loadTexts: cwredConfigPrecedPrecedence.setDescription('The IP precedence of this entry.')
cwredConfigPrecedMinDepthThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1, 2, 1, 2), Integer32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: cwredConfigPrecedMinDepthThreshold.setStatus('current')
if mibBuilder.loadTexts: cwredConfigPrecedMinDepthThreshold.setDescription('The average queue depth at which WRED begins to drop packets.')
cwredConfigPrecedMaxDepthThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1, 2, 1, 3), Integer32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: cwredConfigPrecedMaxDepthThreshold.setStatus('current')
if mibBuilder.loadTexts: cwredConfigPrecedMaxDepthThreshold.setDescription('The average queue depth at which WRED may begin to drop all packets.')
cwredConfigPrecedPktsDropFraction = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1, 2, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwredConfigPrecedPktsDropFraction.setStatus('current')
if mibBuilder.loadTexts: cwredConfigPrecedPktsDropFraction.setDescription('The fraction of packets to be dropped when the average queue depth is above cwredConfigPrecedMinDepthThreshold but below cwredConfigPrecedMaxDepthThreshold.')
cwredQueueTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2, 1), )
if mibBuilder.loadTexts: cwredQueueTable.setStatus('current')
if mibBuilder.loadTexts: cwredQueueTable.setDescription('A table of WRED queue status variable.')
cwredQueueEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2, 1, 1), )
cwredConfigGlobEntry.registerAugmentions(("CISCO-WRED-MIB", "cwredQueueEntry"))
cwredQueueEntry.setIndexNames(*cwredConfigGlobEntry.getIndexNames())
if mibBuilder.loadTexts: cwredQueueEntry.setStatus('current')
if mibBuilder.loadTexts: cwredQueueEntry.setDescription('A table of WRED queue status variable entry. Entries are created and deleted via the red command line interface.')
cwredQueueAverage = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2, 1, 1, 1), Gauge32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: cwredQueueAverage.setStatus('current')
if mibBuilder.loadTexts: cwredQueueAverage.setDescription('The computed queue average length.')
cwredQueueDepth = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2, 1, 1, 2), Gauge32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: cwredQueueDepth.setStatus('current')
if mibBuilder.loadTexts: cwredQueueDepth.setDescription('The number of buffers/particles currently withheld in queue.')
cwredStatTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2, 2), )
if mibBuilder.loadTexts: cwredStatTable.setStatus('current')
if mibBuilder.loadTexts: cwredStatTable.setDescription('A table of WRED status information with respect to the IP precedence of packets.')
cwredStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2, 2, 1), )
cwredConfigPrecedEntry.registerAugmentions(("CISCO-WRED-MIB", "cwredStatEntry"))
cwredStatEntry.setIndexNames(*cwredConfigPrecedEntry.getIndexNames())
if mibBuilder.loadTexts: cwredStatEntry.setStatus('current')
if mibBuilder.loadTexts: cwredStatEntry.setDescription('The WRED interface status information entry. Entries are created and deleted via the red red command line interface.')
cwredStatSwitchedPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2, 2, 1, 1), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: cwredStatSwitchedPkts.setStatus('current')
if mibBuilder.loadTexts: cwredStatSwitchedPkts.setDescription('The number of packets output by WRED.')
cwredStatRandomFilteredPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2, 2, 1, 2), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: cwredStatRandomFilteredPkts.setStatus('current')
if mibBuilder.loadTexts: cwredStatRandomFilteredPkts.setDescription('The number of packets filtered/dropped due to average queue length exceeds cwredConfigMinDepthThreshold and meet a defined random drop policy.')
cwredStatMaxFilteredPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2, 2, 1, 3), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: cwredStatMaxFilteredPkts.setStatus('current')
if mibBuilder.loadTexts: cwredStatMaxFilteredPkts.setDescription('The number of packets filtered/dropped due to average queue length exceeds cwredConfigMaxDepthThreshold.')
ciscoWredMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 83, 3))
ciscoWredMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 83, 3, 1))
ciscoWredMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 83, 3, 2))
ciscoWredMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 83, 3, 1, 1)).setObjects(("CISCO-WRED-MIB", "ciscoWredMIBGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoWredMIBCompliance = ciscoWredMIBCompliance.setStatus('current')
if mibBuilder.loadTexts: ciscoWredMIBCompliance.setDescription('The compliance statement for entities which implement the WRED on a Cisco RSP platform.')
ciscoWredMIBGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 83, 3, 2, 1)).setObjects(("CISCO-WRED-MIB", "cwredConfigGlobQueueWeight"), ("CISCO-WRED-MIB", "cwredConfigPrecedMinDepthThreshold"), ("CISCO-WRED-MIB", "cwredConfigPrecedMaxDepthThreshold"), ("CISCO-WRED-MIB", "cwredConfigPrecedPktsDropFraction"), ("CISCO-WRED-MIB", "cwredQueueAverage"), ("CISCO-WRED-MIB", "cwredQueueDepth"), ("CISCO-WRED-MIB", "cwredStatSwitchedPkts"), ("CISCO-WRED-MIB", "cwredStatRandomFilteredPkts"), ("CISCO-WRED-MIB", "cwredStatMaxFilteredPkts"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoWredMIBGroup = ciscoWredMIBGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoWredMIBGroup.setDescription('A collection of objects providing WRED monitoring.')
mibBuilder.exportSymbols("CISCO-WRED-MIB", cwredConfigPrecedEntry=cwredConfigPrecedEntry, ciscoWredMIBCompliance=ciscoWredMIBCompliance, cwredConfigPrecedTable=cwredConfigPrecedTable, ciscoWredMIBGroups=ciscoWredMIBGroups, cwredStatSwitchedPkts=cwredStatSwitchedPkts, cwredQueueEntry=cwredQueueEntry, cwredStatMaxFilteredPkts=cwredStatMaxFilteredPkts, cwredConfigPrecedMinDepthThreshold=cwredConfigPrecedMinDepthThreshold, cwredStatEntry=cwredStatEntry, cwredQueueTable=cwredQueueTable, ciscoWredMIBGroup=ciscoWredMIBGroup, cwredConfigGlobEntry=cwredConfigGlobEntry, cwredConfig=cwredConfig, cwredConfigGlobTable=cwredConfigGlobTable, ciscoWredMIB=ciscoWredMIB, cwredQueueDepth=cwredQueueDepth, ciscoWredMIBConformance=ciscoWredMIBConformance, cwredQueueAverage=cwredQueueAverage, cwredStatTable=cwredStatTable, PYSNMP_MODULE_ID=ciscoWredMIB, ciscoWredMIBObjects=ciscoWredMIBObjects, cwredConfigPrecedPrecedence=cwredConfigPrecedPrecedence, cwredConfigPrecedMaxDepthThreshold=cwredConfigPrecedMaxDepthThreshold, cwredStats=cwredStats, cwredStatRandomFilteredPkts=cwredStatRandomFilteredPkts, ciscoWredMIBCompliances=ciscoWredMIBCompliances, cwredConfigPrecedPktsDropFraction=cwredConfigPrecedPktsDropFraction, cwredConfigGlobQueueWeight=cwredConfigGlobQueueWeight)
| (octet_string, object_identifier, integer) = mibBuilder.importSymbols('ASN1', 'OctetString', 'ObjectIdentifier', 'Integer')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(constraints_union, constraints_intersection, single_value_constraint, value_range_constraint, value_size_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ConstraintsUnion', 'ConstraintsIntersection', 'SingleValueConstraint', 'ValueRangeConstraint', 'ValueSizeConstraint')
(cisco_mgmt,) = mibBuilder.importSymbols('CISCO-SMI', 'ciscoMgmt')
(if_index,) = mibBuilder.importSymbols('IF-MIB', 'ifIndex')
(module_compliance, notification_group, object_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ModuleCompliance', 'NotificationGroup', 'ObjectGroup')
(counter32, integer32, ip_address, module_identity, gauge32, notification_type, counter64, bits, object_identity, mib_scalar, mib_table, mib_table_row, mib_table_column, iso, time_ticks, unsigned32, mib_identifier) = mibBuilder.importSymbols('SNMPv2-SMI', 'Counter32', 'Integer32', 'IpAddress', 'ModuleIdentity', 'Gauge32', 'NotificationType', 'Counter64', 'Bits', 'ObjectIdentity', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'iso', 'TimeTicks', 'Unsigned32', 'MibIdentifier')
(display_string, textual_convention) = mibBuilder.importSymbols('SNMPv2-TC', 'DisplayString', 'TextualConvention')
cisco_wred_mib = module_identity((1, 3, 6, 1, 4, 1, 9, 9, 83))
ciscoWredMIB.setRevisions(('1997-07-18 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts:
ciscoWredMIB.setRevisionsDescriptions(('Initial version of this MIB module.',))
if mibBuilder.loadTexts:
ciscoWredMIB.setLastUpdated('9707180000Z')
if mibBuilder.loadTexts:
ciscoWredMIB.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts:
ciscoWredMIB.setContactInfo(' Cisco Systems Customer Service Postal: 170 W. Tasman Drive San Jose, CA 95134-1706 USA Tel: +1 800 553-NETS E-mail: tgrennan-group@cisco.com')
if mibBuilder.loadTexts:
ciscoWredMIB.setDescription('Cisco WRED MIB - Overview Cisco Weighted Random Early Detection/Drop is a method which avoids traffic congestion on an output interface. Congestion is detected by computing the average output queue size against preset thresholds. WRED support are on the IP fast switching and IP flow switching only. It does not apply to IP process switching. This MIB incorporates objects from the Cisco WRED line interfaces. Its purpose is to provide Weighted Random Early Detection/Drop packet configuration and packet filtering information. WRED are configured/enabled through the CLI command. Defaults configuration values are assigned and values can be modified through additional CLI commands. ')
cisco_wred_mib_objects = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 83, 1))
cwred_config = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1))
cwred_stats = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2))
cwred_config_glob_table = mib_table((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1, 1))
if mibBuilder.loadTexts:
cwredConfigGlobTable.setStatus('current')
if mibBuilder.loadTexts:
cwredConfigGlobTable.setDescription('A table of WRED global configuration variables.')
cwred_config_glob_entry = mib_table_row((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1, 1, 1)).setIndexNames((0, 'IF-MIB', 'ifIndex'))
if mibBuilder.loadTexts:
cwredConfigGlobEntry.setStatus('current')
if mibBuilder.loadTexts:
cwredConfigGlobEntry.setDescription('A collection of configuration entries on this interface. Entries are created and deleted via red command line interface.')
cwred_config_glob_queue_weight = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1, 1, 1, 1), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
cwredConfigGlobQueueWeight.setStatus('current')
if mibBuilder.loadTexts:
cwredConfigGlobQueueWeight.setDescription("The decay factor for the queue average calculation. Numbers are 2's exponent up to 16. The smaller the number, the faster it decays.")
cwred_config_preced_table = mib_table((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1, 2))
if mibBuilder.loadTexts:
cwredConfigPrecedTable.setStatus('current')
if mibBuilder.loadTexts:
cwredConfigPrecedTable.setDescription('A table of WRED configuration values with respect to the IP precedence of packets.')
cwred_config_preced_entry = mib_table_row((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1, 2, 1)).setIndexNames((0, 'IF-MIB', 'ifIndex'), (0, 'CISCO-WRED-MIB', 'cwredConfigPrecedPrecedence'))
if mibBuilder.loadTexts:
cwredConfigPrecedEntry.setStatus('current')
if mibBuilder.loadTexts:
cwredConfigPrecedEntry.setDescription('WRED IP precedence configuration table entry. Entries are created and deleted via red command interface.')
cwred_config_preced_precedence = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1, 2, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(0, 7)))
if mibBuilder.loadTexts:
cwredConfigPrecedPrecedence.setStatus('current')
if mibBuilder.loadTexts:
cwredConfigPrecedPrecedence.setDescription('The IP precedence of this entry.')
cwred_config_preced_min_depth_threshold = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1, 2, 1, 2), integer32()).setUnits('packets').setMaxAccess('readonly')
if mibBuilder.loadTexts:
cwredConfigPrecedMinDepthThreshold.setStatus('current')
if mibBuilder.loadTexts:
cwredConfigPrecedMinDepthThreshold.setDescription('The average queue depth at which WRED begins to drop packets.')
cwred_config_preced_max_depth_threshold = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1, 2, 1, 3), integer32()).setUnits('packets').setMaxAccess('readonly')
if mibBuilder.loadTexts:
cwredConfigPrecedMaxDepthThreshold.setStatus('current')
if mibBuilder.loadTexts:
cwredConfigPrecedMaxDepthThreshold.setDescription('The average queue depth at which WRED may begin to drop all packets.')
cwred_config_preced_pkts_drop_fraction = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 1, 2, 1, 4), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
cwredConfigPrecedPktsDropFraction.setStatus('current')
if mibBuilder.loadTexts:
cwredConfigPrecedPktsDropFraction.setDescription('The fraction of packets to be dropped when the average queue depth is above cwredConfigPrecedMinDepthThreshold but below cwredConfigPrecedMaxDepthThreshold.')
cwred_queue_table = mib_table((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2, 1))
if mibBuilder.loadTexts:
cwredQueueTable.setStatus('current')
if mibBuilder.loadTexts:
cwredQueueTable.setDescription('A table of WRED queue status variable.')
cwred_queue_entry = mib_table_row((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2, 1, 1))
cwredConfigGlobEntry.registerAugmentions(('CISCO-WRED-MIB', 'cwredQueueEntry'))
cwredQueueEntry.setIndexNames(*cwredConfigGlobEntry.getIndexNames())
if mibBuilder.loadTexts:
cwredQueueEntry.setStatus('current')
if mibBuilder.loadTexts:
cwredQueueEntry.setDescription('A table of WRED queue status variable entry. Entries are created and deleted via the red command line interface.')
cwred_queue_average = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2, 1, 1, 1), gauge32()).setUnits('packets').setMaxAccess('readonly')
if mibBuilder.loadTexts:
cwredQueueAverage.setStatus('current')
if mibBuilder.loadTexts:
cwredQueueAverage.setDescription('The computed queue average length.')
cwred_queue_depth = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2, 1, 1, 2), gauge32()).setUnits('packets').setMaxAccess('readonly')
if mibBuilder.loadTexts:
cwredQueueDepth.setStatus('current')
if mibBuilder.loadTexts:
cwredQueueDepth.setDescription('The number of buffers/particles currently withheld in queue.')
cwred_stat_table = mib_table((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2, 2))
if mibBuilder.loadTexts:
cwredStatTable.setStatus('current')
if mibBuilder.loadTexts:
cwredStatTable.setDescription('A table of WRED status information with respect to the IP precedence of packets.')
cwred_stat_entry = mib_table_row((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2, 2, 1))
cwredConfigPrecedEntry.registerAugmentions(('CISCO-WRED-MIB', 'cwredStatEntry'))
cwredStatEntry.setIndexNames(*cwredConfigPrecedEntry.getIndexNames())
if mibBuilder.loadTexts:
cwredStatEntry.setStatus('current')
if mibBuilder.loadTexts:
cwredStatEntry.setDescription('The WRED interface status information entry. Entries are created and deleted via the red red command line interface.')
cwred_stat_switched_pkts = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2, 2, 1, 1), counter32()).setUnits('packets').setMaxAccess('readonly')
if mibBuilder.loadTexts:
cwredStatSwitchedPkts.setStatus('current')
if mibBuilder.loadTexts:
cwredStatSwitchedPkts.setDescription('The number of packets output by WRED.')
cwred_stat_random_filtered_pkts = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2, 2, 1, 2), counter32()).setUnits('packets').setMaxAccess('readonly')
if mibBuilder.loadTexts:
cwredStatRandomFilteredPkts.setStatus('current')
if mibBuilder.loadTexts:
cwredStatRandomFilteredPkts.setDescription('The number of packets filtered/dropped due to average queue length exceeds cwredConfigMinDepthThreshold and meet a defined random drop policy.')
cwred_stat_max_filtered_pkts = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 83, 1, 2, 2, 1, 3), counter32()).setUnits('packets').setMaxAccess('readonly')
if mibBuilder.loadTexts:
cwredStatMaxFilteredPkts.setStatus('current')
if mibBuilder.loadTexts:
cwredStatMaxFilteredPkts.setDescription('The number of packets filtered/dropped due to average queue length exceeds cwredConfigMaxDepthThreshold.')
cisco_wred_mib_conformance = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 83, 3))
cisco_wred_mib_compliances = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 83, 3, 1))
cisco_wred_mib_groups = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 83, 3, 2))
cisco_wred_mib_compliance = module_compliance((1, 3, 6, 1, 4, 1, 9, 9, 83, 3, 1, 1)).setObjects(('CISCO-WRED-MIB', 'ciscoWredMIBGroup'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cisco_wred_mib_compliance = ciscoWredMIBCompliance.setStatus('current')
if mibBuilder.loadTexts:
ciscoWredMIBCompliance.setDescription('The compliance statement for entities which implement the WRED on a Cisco RSP platform.')
cisco_wred_mib_group = object_group((1, 3, 6, 1, 4, 1, 9, 9, 83, 3, 2, 1)).setObjects(('CISCO-WRED-MIB', 'cwredConfigGlobQueueWeight'), ('CISCO-WRED-MIB', 'cwredConfigPrecedMinDepthThreshold'), ('CISCO-WRED-MIB', 'cwredConfigPrecedMaxDepthThreshold'), ('CISCO-WRED-MIB', 'cwredConfigPrecedPktsDropFraction'), ('CISCO-WRED-MIB', 'cwredQueueAverage'), ('CISCO-WRED-MIB', 'cwredQueueDepth'), ('CISCO-WRED-MIB', 'cwredStatSwitchedPkts'), ('CISCO-WRED-MIB', 'cwredStatRandomFilteredPkts'), ('CISCO-WRED-MIB', 'cwredStatMaxFilteredPkts'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cisco_wred_mib_group = ciscoWredMIBGroup.setStatus('current')
if mibBuilder.loadTexts:
ciscoWredMIBGroup.setDescription('A collection of objects providing WRED monitoring.')
mibBuilder.exportSymbols('CISCO-WRED-MIB', cwredConfigPrecedEntry=cwredConfigPrecedEntry, ciscoWredMIBCompliance=ciscoWredMIBCompliance, cwredConfigPrecedTable=cwredConfigPrecedTable, ciscoWredMIBGroups=ciscoWredMIBGroups, cwredStatSwitchedPkts=cwredStatSwitchedPkts, cwredQueueEntry=cwredQueueEntry, cwredStatMaxFilteredPkts=cwredStatMaxFilteredPkts, cwredConfigPrecedMinDepthThreshold=cwredConfigPrecedMinDepthThreshold, cwredStatEntry=cwredStatEntry, cwredQueueTable=cwredQueueTable, ciscoWredMIBGroup=ciscoWredMIBGroup, cwredConfigGlobEntry=cwredConfigGlobEntry, cwredConfig=cwredConfig, cwredConfigGlobTable=cwredConfigGlobTable, ciscoWredMIB=ciscoWredMIB, cwredQueueDepth=cwredQueueDepth, ciscoWredMIBConformance=ciscoWredMIBConformance, cwredQueueAverage=cwredQueueAverage, cwredStatTable=cwredStatTable, PYSNMP_MODULE_ID=ciscoWredMIB, ciscoWredMIBObjects=ciscoWredMIBObjects, cwredConfigPrecedPrecedence=cwredConfigPrecedPrecedence, cwredConfigPrecedMaxDepthThreshold=cwredConfigPrecedMaxDepthThreshold, cwredStats=cwredStats, cwredStatRandomFilteredPkts=cwredStatRandomFilteredPkts, ciscoWredMIBCompliances=ciscoWredMIBCompliances, cwredConfigPrecedPktsDropFraction=cwredConfigPrecedPktsDropFraction, cwredConfigGlobQueueWeight=cwredConfigGlobQueueWeight) |
class SqlQueries:
test_result_table_insert = ("""
SELECT
test_id,
vehicle_id,
test_date,
test_class_id,
test_type,
test_result,
test_mileage,
postcode_area
FROM staging_results
WHERE test_mileage IS NOT NULL
""")
test_item_table_insert = ("""
SELECT *
FROM staging_items
""")
vehicle_table_insert = ("""
SELECT
vehicle_id,
make,
model,
colour,
fuel_type,
cylinder_capacity,
first_use_date
FROM staging_results
WHERE cylinder_capacity IS NOT NULL
AND first_use_date IS NOT NULL
""")
test_item_table_update = ("""
UPDATE test_item_table
SET dangerous_mark = 'N'
WHERE dangerous_mark IS Null
""")
| class Sqlqueries:
test_result_table_insert = '\n SELECT\n test_id,\n vehicle_id,\n test_date,\n test_class_id,\n test_type,\n test_result,\n test_mileage,\n postcode_area\n FROM staging_results\n WHERE test_mileage IS NOT NULL\n '
test_item_table_insert = '\n SELECT *\n FROM staging_items\n '
vehicle_table_insert = '\n SELECT\n vehicle_id,\n make,\n model,\n colour,\n fuel_type,\n cylinder_capacity,\n first_use_date\n FROM staging_results\n WHERE cylinder_capacity IS NOT NULL\n AND first_use_date IS NOT NULL\n '
test_item_table_update = "\n UPDATE test_item_table\n SET dangerous_mark = 'N'\n WHERE dangerous_mark IS Null\n " |
"""
Lec 7 while loop
"""
i = 5
while i >= 0:
try:
print(1/(i-3))
except:
pass
i = i -1
# if i ==3:
# pass
# print(i)
# try:
# print(1/0)
# except ZeroDivisionError:
# print('Zero Division Error')
# except:
# print('Other Errors') | """
Lec 7 while loop
"""
i = 5
while i >= 0:
try:
print(1 / (i - 3))
except:
pass
i = i - 1 |
with open('fun_file.txt') as close_this_file:
setup = close_this_file.readline()
punchline = close_this_file.readline()
print(setup)
| with open('fun_file.txt') as close_this_file:
setup = close_this_file.readline()
punchline = close_this_file.readline()
print(setup) |
#!/usr/bin/env python3
BOLD = "\033[1m"
DIM = "\033[2m"
END = "\033[0m"
TOWERS = {
"": """
_
| |
| |
| |
| |
_____| |_____
""",
"123": """
_
| |
_|_|_
|_____|
|_______|
_|_________|_
""",
"23": """
_
| |
| |
__|_|__
|_______|
_|_________|_
""",
"12": """
_
| |
| |
_|_|_
|_____|
__|_______|__
""",
"13": """
_
| |
| |
_|_|_
_|_____|_
_|_________|_
""",
"1": """
_
| |
| |
| |
_|_|_
___|_____|___
""",
"2": """
_
| |
| |
| |
__|_|__
__|_______|__
""",
"3": """
_
| |
| |
| |
___|_|___
_|_________|_
""",
}
class HanoiTowers:
def __init__(self):
self.left = [1, 2, 3]
self.middle = []
self.right = []
self.moves = 0
def show_towers(self):
left_tower = TOWERS.get("".join([str(i) for i in self.left]))
middle_tower = TOWERS.get("".join([str(i) for i in self.middle]))
right_tower = TOWERS.get("".join([str(i) for i in self.right]))
output = zip(*[i.split("\n") for i in [left_tower, middle_tower, right_tower]])
for i in output:
print("".join(i))
def get_tower(self, msg):
full_msg = f"{msg} [L]eft, [M]iddle or [R]ight: "
while True:
tower = input(full_msg).lower()
if tower.startswith("l"):
return self.left
elif tower.startswith("m"):
return self.middle
elif tower.startswith("r"):
return self.right
else:
print(f"'{tower}' is not a valid choice")
def make_move(self):
while True:
src = self.get_tower("Which tower should we move from")
dest = self.get_tower("Which tower should we move to")
if src == dest:
print("Can't move from and to the same tower")
elif len(src) == 0:
print("No disks to move off that tower")
elif len(dest) and src[0] > dest[0]:
print(f"Cannot move {src[0]} on top of {dest[0]}")
else:
disk = src.pop(0)
dest.insert(0, disk)
return
def play(self):
print(f"{BOLD}Move all the disks to the Right Tower!{END}")
while True:
self.show_towers()
if self.right == [1, 2, 3]:
print(f"{BOLD}You have won, it took you {self.moves} move(s)!${END}")
break
self.make_move()
self.moves += 1
if __name__ == "__main__":
again = True
while again:
game = HanoiTowers()
game.play()
again = not input("Play again? [Yn] ").lower().startswith("n")
| bold = '\x1b[1m'
dim = '\x1b[2m'
end = '\x1b[0m'
towers = {'': '\n _ \n | | \n | | \n | | \n | | \n_____| |_____\n', '123': '\n _ \n | | \n _|_|_ \n |_____| \n |_______| \n_|_________|_\n', '23': '\n _ \n | | \n | | \n __|_|__ \n |_______| \n_|_________|_\n', '12': '\n _ \n | | \n | | \n _|_|_ \n |_____| \n__|_______|__\n', '13': '\n _ \n | | \n | | \n _|_|_ \n _|_____|_ \n_|_________|_\n', '1': '\n _ \n | | \n | | \n | | \n _|_|_ \n___|_____|___\n', '2': '\n _ \n | | \n | | \n | | \n __|_|__ \n__|_______|__\n', '3': '\n _ \n | | \n | | \n | | \n ___|_|___ \n_|_________|_\n'}
class Hanoitowers:
def __init__(self):
self.left = [1, 2, 3]
self.middle = []
self.right = []
self.moves = 0
def show_towers(self):
left_tower = TOWERS.get(''.join([str(i) for i in self.left]))
middle_tower = TOWERS.get(''.join([str(i) for i in self.middle]))
right_tower = TOWERS.get(''.join([str(i) for i in self.right]))
output = zip(*[i.split('\n') for i in [left_tower, middle_tower, right_tower]])
for i in output:
print(''.join(i))
def get_tower(self, msg):
full_msg = f'{msg} [L]eft, [M]iddle or [R]ight: '
while True:
tower = input(full_msg).lower()
if tower.startswith('l'):
return self.left
elif tower.startswith('m'):
return self.middle
elif tower.startswith('r'):
return self.right
else:
print(f"'{tower}' is not a valid choice")
def make_move(self):
while True:
src = self.get_tower('Which tower should we move from')
dest = self.get_tower('Which tower should we move to')
if src == dest:
print("Can't move from and to the same tower")
elif len(src) == 0:
print('No disks to move off that tower')
elif len(dest) and src[0] > dest[0]:
print(f'Cannot move {src[0]} on top of {dest[0]}')
else:
disk = src.pop(0)
dest.insert(0, disk)
return
def play(self):
print(f'{BOLD}Move all the disks to the Right Tower!{END}')
while True:
self.show_towers()
if self.right == [1, 2, 3]:
print(f'{BOLD}You have won, it took you {self.moves} move(s)!${END}')
break
self.make_move()
self.moves += 1
if __name__ == '__main__':
again = True
while again:
game = hanoi_towers()
game.play()
again = not input('Play again? [Yn] ').lower().startswith('n') |
def teleport(a,b,x,y):
d1 = abs(a-b)
d2 = abs(a-x)+abs(b-y)
d3 = abs(a-y)+abs(b-x)
print(d1,d2,d3)
if d1 <= d2 and d1 <= d3:
return d1
elif d2 <= d1 and d2 <= d3:
return d2
else:
return d3
print(teleport(3,10,8,2))
print(teleport(86,84,15,78))
print(teleport(35,94,92,87)) | def teleport(a, b, x, y):
d1 = abs(a - b)
d2 = abs(a - x) + abs(b - y)
d3 = abs(a - y) + abs(b - x)
print(d1, d2, d3)
if d1 <= d2 and d1 <= d3:
return d1
elif d2 <= d1 and d2 <= d3:
return d2
else:
return d3
print(teleport(3, 10, 8, 2))
print(teleport(86, 84, 15, 78))
print(teleport(35, 94, 92, 87)) |
# Write a program that reads an integer n. Then, for all numbers in the range [1, n], prints the number and if it is special or not (True / False).
# A number is special when the sum of its digits is 5, 7, or 11.
# Examples
# Input Output
# 15 1 -> False
# 2 -> False
# 3 -> False
# 4 -> False
# 5 -> True
# 6 -> False
# 7 -> True
# 8 -> False
# 9 -> False
# 10 -> False
# 11 -> False
# 12 -> False
# 13 -> False
# 14 -> True
# 15 -> False
# 6 1 -> False
# 2 -> False
# 3 -> False
# 4 -> False
# 5 -> True
# 6 -> False
special_range = (5,7,11)
num = int(input())
for i in range(1, num + 1):
str_i = str(i)
char_sum = 0
for j in range(len(str_i)):
char_sum += int(str_i[j])
is_special = True if char_sum in special_range else False
print(f"{i} -> {is_special}")
| special_range = (5, 7, 11)
num = int(input())
for i in range(1, num + 1):
str_i = str(i)
char_sum = 0
for j in range(len(str_i)):
char_sum += int(str_i[j])
is_special = True if char_sum in special_range else False
print(f'{i} -> {is_special}') |
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution:
def mergeTwoLists(self, l1: Optional[ListNode], l2: Optional[ListNode]) -> Optional[ListNode]:
if l1 is None and l2 is None:
return None
if l1 is None:
return l2
if l2 is None:
return l1
# same as
#if None in [l1, l2]:
# return l1 or l2
n1 = l1
n2 = l2
if n1.val <= n2.val:
result = n1
n1 = n1.next
else:
result = n2
n2 = n2.next
n3 = result
while n1 is not None and n2 is not None:
if n1.val <= n2.val:
n3.next = n1
n1 = n1.next
else:
n3.next = n2
n2 = n2.next
n3 = n3.next
if n1 is not None:
n3.next = n1
if n2 is not None:
n3.next = n2
return result
| class Solution:
def merge_two_lists(self, l1: Optional[ListNode], l2: Optional[ListNode]) -> Optional[ListNode]:
if l1 is None and l2 is None:
return None
if l1 is None:
return l2
if l2 is None:
return l1
n1 = l1
n2 = l2
if n1.val <= n2.val:
result = n1
n1 = n1.next
else:
result = n2
n2 = n2.next
n3 = result
while n1 is not None and n2 is not None:
if n1.val <= n2.val:
n3.next = n1
n1 = n1.next
else:
n3.next = n2
n2 = n2.next
n3 = n3.next
if n1 is not None:
n3.next = n1
if n2 is not None:
n3.next = n2
return result |
class Configuration(object):
"""Configuration describes how a benchmark should be run.
It selects the implementation (for example, different implementations) and
implementation-specific options (for example, whether replay compilation
should be used).
"""
def __init__(self, name):
self.name = name
self.args = {}
def update_args(self, args):
self.args.update(args)
return self
def set_description(self, descr):
self.descr = descr
return self | class Configuration(object):
"""Configuration describes how a benchmark should be run.
It selects the implementation (for example, different implementations) and
implementation-specific options (for example, whether replay compilation
should be used).
"""
def __init__(self, name):
self.name = name
self.args = {}
def update_args(self, args):
self.args.update(args)
return self
def set_description(self, descr):
self.descr = descr
return self |
f = open("nine.txt", "r")
lines = [x.strip() for x in f.readlines()]
height = len(lines)
width = len(lines[0])
map = []
for line in lines:
cur = [int(ch) for ch in list(line)]
map.append(cur)
risk = 0
for y in range(height):
for x in range(width):
val = map[y][x]
if y !=0 and map[y-1][x] <= val:
continue
if y != height - 1 and map[y+1][x] <= val:
continue
if x != 0 and map[y][x-1] <= val:
continue
if x != width - 1 and map[y][x+1] <= val:
continue
risk += map[y][x] + 1
print(risk)
height = len(lines)
width = len(lines[0])
map = []
for line in lines:
cur = [int(ch) for ch in list(line)]
map.append(cur)
basins = []
low_points = []
for y in range(height):
for x in range(width):
val = map[y][x]
if y !=0 and map[y-1][x] <= val:
continue
if y != height - 1 and map[y+1][x] <= val:
continue
if x != 0 and map[y][x-1] <= val:
continue
if x != width - 1 and map[y][x+1] <= val:
continue
low_points.append((x,y))
for (x,y) in low_points:
size = 0
# low point grow outwards
to_check = [(x,y)]
while len(to_check) != 0:
pos = to_check.pop(0)
x,y = pos
val = map[y][x]
if val == 9:
continue
size += 1
if y !=0 and map[y-1][x] > val and map[y-1][x] != 9:
to_check.append((x,y-1))
if y != height - 1 and map[y+1][x] > val and map[y+1][x] != 9:
to_check.append((x, y + 1))
if x != 0 and map[y][x-1] > val and map[y][x-1] != 9:
to_check.append((x-1, y))
if x != width - 1 and map[y][x+1] > val and map[y][x+1] != 9:
to_check.append((x+1,y))
map[y][x] = 9
basins.append(size)
basins = sorted(basins, reverse=True)
print(basins[0] * basins[1] * basins[2])
| f = open('nine.txt', 'r')
lines = [x.strip() for x in f.readlines()]
height = len(lines)
width = len(lines[0])
map = []
for line in lines:
cur = [int(ch) for ch in list(line)]
map.append(cur)
risk = 0
for y in range(height):
for x in range(width):
val = map[y][x]
if y != 0 and map[y - 1][x] <= val:
continue
if y != height - 1 and map[y + 1][x] <= val:
continue
if x != 0 and map[y][x - 1] <= val:
continue
if x != width - 1 and map[y][x + 1] <= val:
continue
risk += map[y][x] + 1
print(risk)
height = len(lines)
width = len(lines[0])
map = []
for line in lines:
cur = [int(ch) for ch in list(line)]
map.append(cur)
basins = []
low_points = []
for y in range(height):
for x in range(width):
val = map[y][x]
if y != 0 and map[y - 1][x] <= val:
continue
if y != height - 1 and map[y + 1][x] <= val:
continue
if x != 0 and map[y][x - 1] <= val:
continue
if x != width - 1 and map[y][x + 1] <= val:
continue
low_points.append((x, y))
for (x, y) in low_points:
size = 0
to_check = [(x, y)]
while len(to_check) != 0:
pos = to_check.pop(0)
(x, y) = pos
val = map[y][x]
if val == 9:
continue
size += 1
if y != 0 and map[y - 1][x] > val and (map[y - 1][x] != 9):
to_check.append((x, y - 1))
if y != height - 1 and map[y + 1][x] > val and (map[y + 1][x] != 9):
to_check.append((x, y + 1))
if x != 0 and map[y][x - 1] > val and (map[y][x - 1] != 9):
to_check.append((x - 1, y))
if x != width - 1 and map[y][x + 1] > val and (map[y][x + 1] != 9):
to_check.append((x + 1, y))
map[y][x] = 9
basins.append(size)
basins = sorted(basins, reverse=True)
print(basins[0] * basins[1] * basins[2]) |
adult = int(input())
crian = int(input())
preco = float(input())
preco_final = (crian * (preco/2)) + (adult * preco)
print('Total: R$ {:.2f}'.format(preco_final))
| adult = int(input())
crian = int(input())
preco = float(input())
preco_final = crian * (preco / 2) + adult * preco
print('Total: R$ {:.2f}'.format(preco_final)) |
# ranges sets
#range1
ghmin1, ghmax1, gsdmin1, gsdmax1= 0.35, 0.4, 0.197, 0.207 #range chaos 1 gh/gsd T36
ghmin2, ghmax2, gsdmin2, gsdmax2= 0.1, 0.15, 0.197, 0.207 #range nonchaos 1 gh/gsd T36
#range2
ghmin1b, ghmax1b, gsdmin1b, gsdmax1b= 0.35, 0.40, 0.275, 0.285 #range chaos 4 gh/gsd T36
ghmin2b, ghmax2b, gsdmin2b, gsdmax2b= 0.18, 0.23, 0.275,0.285 #range nonchaos 4 gh/gsd T36
# paremeters for the network
Nnode = 50 # the number of neurons
lyap_min=0 #minmum lyapunov value
lyap_max=1.6 #maxsimum lyapunov value
dt = 0.05 # the time step of simulation
Xdata=0
Ydata=4
Ydata2=3
tEnd=50000
nsim=50
tbin=40
rseed0=[0,20,25,33,40] # the seed for simulation
ranges0=[2,2,2,2,2] # the index of ranges2
Nindex=1 # The number of example ranges for fixed size
| (ghmin1, ghmax1, gsdmin1, gsdmax1) = (0.35, 0.4, 0.197, 0.207)
(ghmin2, ghmax2, gsdmin2, gsdmax2) = (0.1, 0.15, 0.197, 0.207)
(ghmin1b, ghmax1b, gsdmin1b, gsdmax1b) = (0.35, 0.4, 0.275, 0.285)
(ghmin2b, ghmax2b, gsdmin2b, gsdmax2b) = (0.18, 0.23, 0.275, 0.285)
nnode = 50
lyap_min = 0
lyap_max = 1.6
dt = 0.05
xdata = 0
ydata = 4
ydata2 = 3
t_end = 50000
nsim = 50
tbin = 40
rseed0 = [0, 20, 25, 33, 40]
ranges0 = [2, 2, 2, 2, 2]
nindex = 1 |
'''
Given an array of integers, sort the array into a wave like array and return it,
In other words, arrange the elements into a sequence such that a1 >= a2 <= a3 >= a4 <= a5.....
Example
Given [1, 2, 3, 4]
One possible answer : [2, 1, 4, 3]
Another possible answer : [4, 1, 3, 2]
'''
def wave_list(A: list) -> list:
A.sort()
for i in range(0, len(A)-1, 2):
A[i], A[i+1] = A[i+1], A[i]
return A
if __name__ == "__main__":
A = [1, 2, 3, 4]
print(wave_list(A))
| """
Given an array of integers, sort the array into a wave like array and return it,
In other words, arrange the elements into a sequence such that a1 >= a2 <= a3 >= a4 <= a5.....
Example
Given [1, 2, 3, 4]
One possible answer : [2, 1, 4, 3]
Another possible answer : [4, 1, 3, 2]
"""
def wave_list(A: list) -> list:
A.sort()
for i in range(0, len(A) - 1, 2):
(A[i], A[i + 1]) = (A[i + 1], A[i])
return A
if __name__ == '__main__':
a = [1, 2, 3, 4]
print(wave_list(A)) |
KEYS = {
"SPOTIFY_CLIENT_ID": "PLACEHOLDER_CLIENT_ID", # Create an app from [here](https://developer.spotify.com/dashboard/applications)
"SPOTIFY_CLIENT_SECRET": "PLACEHOLDER_CLIENT_SECRET", # Create an app from [here](https://developer.spotify.com/dashboard/applications)
"SPOTIFY_REDIRECT_URI": "http://localhost:5000/callback/spotify", # You have to register this call back in your Application's dashboard https://developer.spotify.com/dashboard/applications
}
| keys = {'SPOTIFY_CLIENT_ID': 'PLACEHOLDER_CLIENT_ID', 'SPOTIFY_CLIENT_SECRET': 'PLACEHOLDER_CLIENT_SECRET', 'SPOTIFY_REDIRECT_URI': 'http://localhost:5000/callback/spotify'} |
# https://www.codewars.com/kata/52c31f8e6605bcc646000082
def two_sum(numbers, target):
for i, n1 in enumerate(numbers):
for j, n2 in enumerate(numbers[i+1:]):
if n1+n2 == target: return [i, i+j+1]
| def two_sum(numbers, target):
for (i, n1) in enumerate(numbers):
for (j, n2) in enumerate(numbers[i + 1:]):
if n1 + n2 == target:
return [i, i + j + 1] |
"""
Custom exceptions raised by this local library
"""
class NoApisDefined(Exception):
"""
Raised when there are no APIs defined in the template
"""
pass
class OverridesNotWellDefinedError(Exception):
"""
Raised when the overrides file is invalid
"""
pass
| """
Custom exceptions raised by this local library
"""
class Noapisdefined(Exception):
"""
Raised when there are no APIs defined in the template
"""
pass
class Overridesnotwelldefinederror(Exception):
"""
Raised when the overrides file is invalid
"""
pass |
class Constants:
NUM_ARMS = "num_arms"
NUM_LEGS = "num_legs"
PERSON = "person"
ANIMAL_TYPE = "animal_type"
CAT = "cat"
DOG = "dog"
ANIMAL = "animal"
NAME = "name"
SURNAME = "surname"
WHISKERS = "whiskers"
TYPE = "type" | class Constants:
num_arms = 'num_arms'
num_legs = 'num_legs'
person = 'person'
animal_type = 'animal_type'
cat = 'cat'
dog = 'dog'
animal = 'animal'
name = 'name'
surname = 'surname'
whiskers = 'whiskers'
type = 'type' |
lista = [('Comestibles', 'Loby Bar', 1, 305.2),
('Comestibles', 'Loby Bar', 5, 87.23),
('Comestibles', 'Piano Bar', 2, 236.9),
('Comestibles', 'Piano Bar', 8, 412.69),
('Bebidas', 'Loby Bar', 3, 145.37),
('Bebidas', 'Loby Bar', 5, 640.81),
('Bebidas', 'Piano Bar', 12, 94.51),
('Tabacos', 'Cafeteria', 4, 498.12),
('Tabacos', 'Cafeteria', 6, 651.3),
('Tabacos', 'Piano Bar', 8, 813.5),
('Tabacos', 'Piano Bar', 11, 843.25),
('Otros', 'Loby Bar', 6, 140.24),
('Otros', 'Piano Bar', 9, 267.06),
('Otros', 'Cafeteria', 12, 695.12)]
if len(lista) > 0:
data = []
cur_fami = lista[0][0]
cur_pvta = lista[0][1]
pvtas = []
meses = ['', '', '', '', '', '', '', '', '', '', '', '', 0]
tot_fami = [0] * 13
tot = [0] * 13
for pvfa in lista:
if pvfa[1] != cur_pvta:
pvtas.append((cur_pvta, meses[:]))
cur_pvta = pvfa[1]
meses = ['', '', '', '', '', '', '', '', '', '', '', '', 0]
if pvfa[0] != cur_fami:
data.append((cur_fami, pvtas[:], tot_fami[:]))
cur_fami = pvfa[0]
pvtas = []
tot_fami = [0] * 13
meses[pvfa[2] - 1] = pvfa[3]
meses[12] += pvfa[3]
tot_fami[pvfa[2] - 1] += pvfa[3]
tot_fami[12] += pvfa[3]
tot[pvfa[2] - 1] += pvfa[3]
tot[12] += pvfa[3]
pvtas.append((cur_pvta, meses[:]))
data.append((cur_fami, pvtas[:], tot_fami[:]))
data.append(tot)
print(data)
x = [('Comestibles', [('Loby Bar', [305.2, '', '', '', 87.23, '', '', '', '', '', '', '', 392.43]),
('Piano Bar', ['', 236.9, '', '', '', '', '', 412.69, '', '', '', '', 649.59])],
[305.2, 236.9, 0, 0, 87.23, 0, 0, 412.69, 0, 0, 0, 0, 1042.02]),
('Bebidas', [('Loby Bar', ['', '', 145.37, '', 640.81, '', '', '', '', '', '', '', 786.18]),
('Piano Bar', ['', '', '', '', '', '', '', '', '', '', '', 94.51, 94.51])],
[0, 0, 145.37, 0, 640.81, 0, 0, 0, 0, 0, 0, 94.51, 880.6899999999999]),
('Tabacos', [('Cafeteria', ['', '', '', 498.12, '', 651.3, '', '', '', '', '', '', 1149.42]),
('Piano Bar', ['', '', '', '', '', '', '', 813.5, '', '', 843.25, '', 1656.75])],
[0, 0, 0, 498.12, 0, 651.3, 0, 813.5, 0, 0, 843.25, 0, 2806.17]),
('Otros', [('Loby Bar', ['', '', '', '', '', 140.24, '', '', '', '', '', '', 140.24]),
('Piano Bar', ['', '', '', '', '', '', '', '', 267.06, '', '', '', 267.06]),
('Cafeteria', ['', '', '', '', '', '', '', '', '', '', '', 695.12, 695.12])],
[0, 0, 0, 0, 0, 140.24, 0, 0, 267.06, 0, 0, 695.12, 1102.42]),
[305.2, 236.9, 145.37, 498.12, 728.04, 791.54, 0, 1226.19, 267.06, 0, 843.25, 789.63, 5831.3]]
| lista = [('Comestibles', 'Loby Bar', 1, 305.2), ('Comestibles', 'Loby Bar', 5, 87.23), ('Comestibles', 'Piano Bar', 2, 236.9), ('Comestibles', 'Piano Bar', 8, 412.69), ('Bebidas', 'Loby Bar', 3, 145.37), ('Bebidas', 'Loby Bar', 5, 640.81), ('Bebidas', 'Piano Bar', 12, 94.51), ('Tabacos', 'Cafeteria', 4, 498.12), ('Tabacos', 'Cafeteria', 6, 651.3), ('Tabacos', 'Piano Bar', 8, 813.5), ('Tabacos', 'Piano Bar', 11, 843.25), ('Otros', 'Loby Bar', 6, 140.24), ('Otros', 'Piano Bar', 9, 267.06), ('Otros', 'Cafeteria', 12, 695.12)]
if len(lista) > 0:
data = []
cur_fami = lista[0][0]
cur_pvta = lista[0][1]
pvtas = []
meses = ['', '', '', '', '', '', '', '', '', '', '', '', 0]
tot_fami = [0] * 13
tot = [0] * 13
for pvfa in lista:
if pvfa[1] != cur_pvta:
pvtas.append((cur_pvta, meses[:]))
cur_pvta = pvfa[1]
meses = ['', '', '', '', '', '', '', '', '', '', '', '', 0]
if pvfa[0] != cur_fami:
data.append((cur_fami, pvtas[:], tot_fami[:]))
cur_fami = pvfa[0]
pvtas = []
tot_fami = [0] * 13
meses[pvfa[2] - 1] = pvfa[3]
meses[12] += pvfa[3]
tot_fami[pvfa[2] - 1] += pvfa[3]
tot_fami[12] += pvfa[3]
tot[pvfa[2] - 1] += pvfa[3]
tot[12] += pvfa[3]
pvtas.append((cur_pvta, meses[:]))
data.append((cur_fami, pvtas[:], tot_fami[:]))
data.append(tot)
print(data)
x = [('Comestibles', [('Loby Bar', [305.2, '', '', '', 87.23, '', '', '', '', '', '', '', 392.43]), ('Piano Bar', ['', 236.9, '', '', '', '', '', 412.69, '', '', '', '', 649.59])], [305.2, 236.9, 0, 0, 87.23, 0, 0, 412.69, 0, 0, 0, 0, 1042.02]), ('Bebidas', [('Loby Bar', ['', '', 145.37, '', 640.81, '', '', '', '', '', '', '', 786.18]), ('Piano Bar', ['', '', '', '', '', '', '', '', '', '', '', 94.51, 94.51])], [0, 0, 145.37, 0, 640.81, 0, 0, 0, 0, 0, 0, 94.51, 880.6899999999999]), ('Tabacos', [('Cafeteria', ['', '', '', 498.12, '', 651.3, '', '', '', '', '', '', 1149.42]), ('Piano Bar', ['', '', '', '', '', '', '', 813.5, '', '', 843.25, '', 1656.75])], [0, 0, 0, 498.12, 0, 651.3, 0, 813.5, 0, 0, 843.25, 0, 2806.17]), ('Otros', [('Loby Bar', ['', '', '', '', '', 140.24, '', '', '', '', '', '', 140.24]), ('Piano Bar', ['', '', '', '', '', '', '', '', 267.06, '', '', '', 267.06]), ('Cafeteria', ['', '', '', '', '', '', '', '', '', '', '', 695.12, 695.12])], [0, 0, 0, 0, 0, 140.24, 0, 0, 267.06, 0, 0, 695.12, 1102.42]), [305.2, 236.9, 145.37, 498.12, 728.04, 791.54, 0, 1226.19, 267.06, 0, 843.25, 789.63, 5831.3]] |
for _ in range(int(input())):
t = 24*60
h, m = map(int, input().split())
print(t-(h*60)-m)
| for _ in range(int(input())):
t = 24 * 60
(h, m) = map(int, input().split())
print(t - h * 60 - m) |
'''
My functions that I created to support me during my lessons.
'''
def title(msg):
#This function will show up a title covered by two lines, one above and other below the msg
print('-'*30)
print(msg)
print('-'*30)
| """
My functions that I created to support me during my lessons.
"""
def title(msg):
print('-' * 30)
print(msg)
print('-' * 30) |
N=int(input())
for i in range(1,10):
m=N/i
if m.is_integer() and 1<=m<=9:
print("Yes")
break
else:
print("No") | n = int(input())
for i in range(1, 10):
m = N / i
if m.is_integer() and 1 <= m <= 9:
print('Yes')
break
else:
print('No') |
"""
# Mobius Software LTD
# Copyright 2015-2018, Mobius Software LTD
#
# This is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of
# the License, or (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this software; if not, write to the Free
# Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA, or see the FSF site: http://www.fsf.org.
"""
class MQConnect(object):
def __init__(self,username,password,clientID,cleanSession,keepAlive,will):
self.username = username
self.password = password
self.clientID = clientID
self.cleanSession = cleanSession
self.keepAlive = keepAlive
self.will = will
self.protocolLevel = 4
def getLength(self):
length = 10
length = length + len(self.clientID) + 2
if self.will is not None:
length = length + self.will.getLength()
if self.username is not None:
length = length + len(self.username) + 2
if self.password is not None:
length = length + len(self.password) + 2
return int(length)
def getType(self):
return 1
def getProtocol(self):
return 1
def setProtocolLevel(self, level):
self.protocolLevel = level
def willValid(self):
if self.will.getLength()==0:
return False
return True
def getLengthWill(self):
return self.will.getLength
def getKeepAlive(self):
return self.keepAlive | """
# Mobius Software LTD
# Copyright 2015-2018, Mobius Software LTD
#
# This is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of
# the License, or (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this software; if not, write to the Free
# Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA, or see the FSF site: http://www.fsf.org.
"""
class Mqconnect(object):
def __init__(self, username, password, clientID, cleanSession, keepAlive, will):
self.username = username
self.password = password
self.clientID = clientID
self.cleanSession = cleanSession
self.keepAlive = keepAlive
self.will = will
self.protocolLevel = 4
def get_length(self):
length = 10
length = length + len(self.clientID) + 2
if self.will is not None:
length = length + self.will.getLength()
if self.username is not None:
length = length + len(self.username) + 2
if self.password is not None:
length = length + len(self.password) + 2
return int(length)
def get_type(self):
return 1
def get_protocol(self):
return 1
def set_protocol_level(self, level):
self.protocolLevel = level
def will_valid(self):
if self.will.getLength() == 0:
return False
return True
def get_length_will(self):
return self.will.getLength
def get_keep_alive(self):
return self.keepAlive |
# Pancake Sorting
'''
Given an array of integers A, We need to sort the array performing a series of pancake flips.
In one pancake flip we do the following steps:
Choose an integer k where 0 <= k < A.length.
Reverse the sub-array A[0...k].
For example, if A = [3,2,1,4] and we performed a pancake flip choosing k = 2, we reverse the sub-array [3,2,1], so A = [1,2,3,4] after the pancake flip at k = 2.
Return an array of the k-values of the pancake flips that should be performed in order to sort A. Any valid answer that sorts the array within 10 * A.length flips will be judged as correct.
Example 1:
Input: A = [3,2,4,1]
Output: [4,2,4,3]
Explanation:
We perform 4 pancake flips, with k values 4, 2, 4, and 3.
Starting state: A = [3, 2, 4, 1]
After 1st flip (k = 4): A = [1, 4, 2, 3]
After 2nd flip (k = 2): A = [4, 1, 2, 3]
After 3rd flip (k = 4): A = [3, 2, 1, 4]
After 4th flip (k = 3): A = [1, 2, 3, 4], which is sorted.
Notice that we return an array of the chosen k values of the pancake flips.
Example 2:
Input: A = [1,2,3]
Output: []
Explanation: The input is already sorted, so there is no need to flip anything.
Note that other answers, such as [3, 3], would also be accepted.
Constraints:
1 <= A.length <= 100
1 <= A[i] <= A.length
All integers in A are unique (i.e. A is a permutation of the integers from 1 to A.length).
'''
class Solution:
def pancakeSort(self, arr: List[int]) -> List[int]:
n = end = len(arr)
ans = []
def find(num):
for i in range(n):
if arr[i]==num:
return i+1
def flip(i,j):
while i<j:
arr[i], arr[j] = arr[j], arr[i]
i+=1
j-=1
while end>1:
ind = find(end)
flip(0, ind-1)
flip(0, end-1)
ans.append(ind)
ans.append(end)
end-=1
return ans
| """
Given an array of integers A, We need to sort the array performing a series of pancake flips.
In one pancake flip we do the following steps:
Choose an integer k where 0 <= k < A.length.
Reverse the sub-array A[0...k].
For example, if A = [3,2,1,4] and we performed a pancake flip choosing k = 2, we reverse the sub-array [3,2,1], so A = [1,2,3,4] after the pancake flip at k = 2.
Return an array of the k-values of the pancake flips that should be performed in order to sort A. Any valid answer that sorts the array within 10 * A.length flips will be judged as correct.
Example 1:
Input: A = [3,2,4,1]
Output: [4,2,4,3]
Explanation:
We perform 4 pancake flips, with k values 4, 2, 4, and 3.
Starting state: A = [3, 2, 4, 1]
After 1st flip (k = 4): A = [1, 4, 2, 3]
After 2nd flip (k = 2): A = [4, 1, 2, 3]
After 3rd flip (k = 4): A = [3, 2, 1, 4]
After 4th flip (k = 3): A = [1, 2, 3, 4], which is sorted.
Notice that we return an array of the chosen k values of the pancake flips.
Example 2:
Input: A = [1,2,3]
Output: []
Explanation: The input is already sorted, so there is no need to flip anything.
Note that other answers, such as [3, 3], would also be accepted.
Constraints:
1 <= A.length <= 100
1 <= A[i] <= A.length
All integers in A are unique (i.e. A is a permutation of the integers from 1 to A.length).
"""
class Solution:
def pancake_sort(self, arr: List[int]) -> List[int]:
n = end = len(arr)
ans = []
def find(num):
for i in range(n):
if arr[i] == num:
return i + 1
def flip(i, j):
while i < j:
(arr[i], arr[j]) = (arr[j], arr[i])
i += 1
j -= 1
while end > 1:
ind = find(end)
flip(0, ind - 1)
flip(0, end - 1)
ans.append(ind)
ans.append(end)
end -= 1
return ans |
# The following code implements the knapsack problem with bottom-up dynamic programming approach.
def read_file(name):
"""Given the path/nname of a file ,return the Values list, Weights list,
capacity and number of jobs.
"""
file = open(name, 'r')
data = file.readlines()
capacity = int(data[0].split()[0])
n =int(data[0].split()[1])
# Create two lists to store values and sizes
V = [0]*(n+1)
W = [0]*(n+1)
for index, line in enumerate(data[1:]):
V[index+1]=int(line.split()[0])
W[index+1]=int(line.split()[1])
return V, W, capacity, n
V,W, capacity, n = read_file('knapsack1.txt')
def knapsack_dynamic(V, W, capacity, numbers):
"""Return the matrix of maximum value
"""
# initialize the 2-d array
A = [[0]*(capacity+1) for x in range(numbers+1)]
for i in range(1,numbers+1):
for j in range(capacity+1):
# make sure the size of current is not larger than the current capacity.
if W[i]>j:
A[i][j] = A[i-1][j]
else:
A[i][j] = max(A[i-1][j],A[i-1][j-W[i]]+V[i])
return A
def main():
V,W, capacity, n = read_file('knapsack1.txt')
A = knapsack_dynamic(V,W, capacity, n)
# return the largest value of the matrix.
print(A[-1][-1])
if __name__ == '__main__':
main()
### Test case:
values = [0,3,2,4,4]
sizes = [0,4,3,2,3]
capacity = 6
numbers = 4
B = knapsack_dynamic(values, sizes, capacity, numbers)
B[-1][-1] # The answer should be 8.
| def read_file(name):
"""Given the path/nname of a file ,return the Values list, Weights list,
capacity and number of jobs.
"""
file = open(name, 'r')
data = file.readlines()
capacity = int(data[0].split()[0])
n = int(data[0].split()[1])
v = [0] * (n + 1)
w = [0] * (n + 1)
for (index, line) in enumerate(data[1:]):
V[index + 1] = int(line.split()[0])
W[index + 1] = int(line.split()[1])
return (V, W, capacity, n)
(v, w, capacity, n) = read_file('knapsack1.txt')
def knapsack_dynamic(V, W, capacity, numbers):
"""Return the matrix of maximum value
"""
a = [[0] * (capacity + 1) for x in range(numbers + 1)]
for i in range(1, numbers + 1):
for j in range(capacity + 1):
if W[i] > j:
A[i][j] = A[i - 1][j]
else:
A[i][j] = max(A[i - 1][j], A[i - 1][j - W[i]] + V[i])
return A
def main():
(v, w, capacity, n) = read_file('knapsack1.txt')
a = knapsack_dynamic(V, W, capacity, n)
print(A[-1][-1])
if __name__ == '__main__':
main()
values = [0, 3, 2, 4, 4]
sizes = [0, 4, 3, 2, 3]
capacity = 6
numbers = 4
b = knapsack_dynamic(values, sizes, capacity, numbers)
B[-1][-1] |
# postgress creadentials
DB_NAME = "DB_NAME"
DB_ADDRESS = "HOST:PORT"
USER_NAME = "USER_NAME"
DB_PASSWORD = "PASSWORD"
SQLALCHEMY_DATABASE_URI = "postgresql+psycopg2://{}:{}@{}/{}".format(
USER_NAME, DB_PASSWORD, DB_ADDRESS, DB_NAME
)
# twitter creadentials
# https://apps.twitter.com
CONSUMER_KEY = "YOUR_CONSUMER_KEY"
CONSUMER_SECRET = "YOUR_CONSUMER_SECRET"
ACCESS_TOKEN = "YOUR_ACCESS_TOKEN"
ACCESS_TOKEN_SECRET = "YOUR_ACCESS_TOKEN_SECRET"
| db_name = 'DB_NAME'
db_address = 'HOST:PORT'
user_name = 'USER_NAME'
db_password = 'PASSWORD'
sqlalchemy_database_uri = 'postgresql+psycopg2://{}:{}@{}/{}'.format(USER_NAME, DB_PASSWORD, DB_ADDRESS, DB_NAME)
consumer_key = 'YOUR_CONSUMER_KEY'
consumer_secret = 'YOUR_CONSUMER_SECRET'
access_token = 'YOUR_ACCESS_TOKEN'
access_token_secret = 'YOUR_ACCESS_TOKEN_SECRET' |
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Silvio Peroni <essepuntato@gmail.com>
#
# Permission to use, copy, modify, and/or distribute this software for any purpose
# with or without fee is hereby granted, provided that the above copyright notice
# and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
# OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
# DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
# SOFTWARE.
# Test case for the function
def test_caesar_cypher(msg, left_shift, shift_quantity, expected):
result = caesar_cypher(msg, left_shift, shift_quantity)
if expected == result:
return True
else:
return False
# Code of the function
def caesar_cypher(msg, left_shift, shift_quantity):
result = list()
alphabet = "abcdefghijklmnopqrstuvwxyz"
if left_shift:
shift_quantity = -shift_quantity
cypher = alphabet[shift_quantity:] + alphabet[:shift_quantity]
for c in msg.lower():
if c in alphabet:
result.append(cypher[alphabet.index(c)])
else:
result.append(c)
return "".join(result)
# Tests
print(test_caesar_cypher("message to encrypt", True, 3, "jbppxdb ql bkzovmq"))
print(test_caesar_cypher("message to encrypt", False, 5, "rjxxflj yt jshwduy"))
| def test_caesar_cypher(msg, left_shift, shift_quantity, expected):
result = caesar_cypher(msg, left_shift, shift_quantity)
if expected == result:
return True
else:
return False
def caesar_cypher(msg, left_shift, shift_quantity):
result = list()
alphabet = 'abcdefghijklmnopqrstuvwxyz'
if left_shift:
shift_quantity = -shift_quantity
cypher = alphabet[shift_quantity:] + alphabet[:shift_quantity]
for c in msg.lower():
if c in alphabet:
result.append(cypher[alphabet.index(c)])
else:
result.append(c)
return ''.join(result)
print(test_caesar_cypher('message to encrypt', True, 3, 'jbppxdb ql bkzovmq'))
print(test_caesar_cypher('message to encrypt', False, 5, 'rjxxflj yt jshwduy')) |
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
#------*-python-*-------------------------------------------------------------
# Config file for the GFE (Graphical Forecast Editor).
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------- -------- --------- --------------------------------------------
# Nov 20, 2013 2488 randerso Changed to use DejaVu fonts
# May 28, 2014 2841 randerso Added separate configurable limits for text
# formatter and product script tasks
# Feb 04, 2015 17039 ryu Removed HighlightFramingCodes setting.
# Feb 09, 2016 5283 nabowle Remove NGM support.
# Jun 23, 2017 6138 dgilling Remove obsolete winter weather phensigs.
# Jan 23, 2018 7153 randerso Cleaned up spelling errors in comments
# Dec 06, 2017 DCS20267 psantos Add NWPS rip current guidance
##
##
# This is a base file that is not intended to be overridden.
#
# This file can be imported to override configuration settings. Please see the
# Configuration Guides->GFE Configuration section of the GFE Online Help for
# guidance on creating a new configuration file.
##
GFESUITE_HOME = "/awips2/GFESuite"
GFESUITE_PRDDIR = "/tmp/products"
yes = True
no = False
#------------------------------------------------------------------------
# Hiding the configuration file
#------------------------------------------------------------------------
# The gfe configuration file can be hidden in the Start Up Dialog by
# using the HideConfigFile keyword and setting it to 1, or by commenting
# out the following line.
# HideConfigFile = 1
#------------------------------------------------------------------------
# Mutable Parameter and Viewable Database Configurations
#------------------------------------------------------------------------
# mutableModel indicates the one database that can be modified. Format
# is "type_model_time". If time isn't important (for a singleton db),
# then the format is "type_model". If there isn't a type, then the
# format is "_model".
mutableModel = "_Fcst"
# dbTypes is a list of database types which the gfe should "see".
dbTypes = ['', 'D2D', 'V']
# The GFE supports filtering of the displayed data by site ID.
# If a config entry of the form SITEID_mask is set (to a named edit area),
# then the gfe will use the area as a mask in displaying data in the
# spatial editor. The user also can set masks for individual weather
# elements by adding config entries of the form SITEID_PARMNAME_mask.
# Simplified formats also available are PARMNAME_mask and just mask.
# The software first looks for a specific site/parmName entry, then
# for the site entry, then parmName, then just mask. If you want all of
# the weather elements clipped except one, then specify an empty name ("")
# of the edit area associated with that weather element.
#BOU_Wind_mask = "BOU"
#BOU_mask = "CWA"
#Wind_mask = "CWA"
#mask = "CWA"
#------------------------------------------------------------------------
# Initial GFE Startup Weather Elements, Samples, and Edit Action List
# Configurations
#------------------------------------------------------------------------
#------------------------------------------------------------------------
# Ordering the Weather Element Groups
# Defines the initial set of parameters to be loaded when starting
# the GFE. The name of the Group is specified.
# This is also the name of the default group of Edit Actions.
DefaultGroup = "Public"
# To provide an order for the weather elements, list an order
# preference in the list variable 'WEList'. Any elements not listed in
# 'WEList', will be listed at the bottom of the weather element group menu
# in alphabetical order.
# WEList = ["FireWx","Gweight","Public","Temps"]
# Defines the initial set of sample sets that are displayed when
# starting the GFE.
#DefaultSamples = ['DefaultSamples']
# Defines the Smart Tools to be displayed on the Spatial Editor button-3 pop up menu.
# All smart tools (screened by active element) will appear if
# AllEditActionsOnPopUp = yes
# Alternatively, you can set AllEditActionsOnPopUp = no and specify a list of smart tools (screened by active element) to appear.
AllEditActionsOnPopUp = yes
PopUpEditActions = ["Assign_Value","AdjustValue_Down","AdjustValue_Up","Smooth"]
# Defines the Smart Tools to be displayed on the Grid Manager button-3 pop up menu.
#GridManagerEditActions = ['nameOfTool1', 'nameOfTool2', 'nameOfTool3']
# Define keyboard shortcuts.
# You are allowed up to 200 shortcuts.
# IMPORTANT: You should test your shortcuts on your system as many
# keys are already bound by the system. For example, F10 is bound by some Tk
# widgets to bring up menus.
# Each shortcut is defined by a list with entries:
# Shortcut key
# State of ShortCut key
# None
# Ctrl (control key)
# Alt (alt key)
# Shift (shift key)
# key states can be combined (e.g. Ctrl+Alt)
# Action type:
# EditTool
# SmartTool
# Procedure
# Toggle
# Name of the action.
#
# The possible EditTool actions are:
# Sample
# Pencil
# Contour
# MoveCopy
# DrawEditArea
#
# The possible Toggle actions are:
# ISC
# TEGM (Temporal Editor/Grid Manager)
# HorizVert (Horizontal/Vertical Display)
#
# Examples:
#
#ShortCut1 = ["F1", "None", "SmartTool","Assign_Value"] # F1
#ShortCut2 = ["NUMPAD_SUBTRACT", "None", "SmartTool","AdjustValue_Down"] # Keypad -
#ShortCut3 = ["NUMPAD_ADD", "None", "SmartTool","AdjustValue_Up"] # Keypad +
#ShortCut4 = ["F2", "None", "SmartTool","Smooth"]
#ShortCut5 = ["F3", "None", "Procedure","ISC_Discrepancies"]
#ShortCut6 = ["F5", "None", "EditTool", "Sample"]
#ShortCut7 = ["F6", "None", "EditTool", "Contour"]
#ShortCut8 = ["F7", "None", "EditTool", "Pencil"]
#ShortCut9 = ["F8", "None", "EditTool", "MoveCopy"]
#ShortCut10 = ["F9", "None", "EditTool", "DrawEditArea"]
#
#ShortCut11 = ["F5", "Alt", "EditTool", "Sample"]
#ShortCut12 = ["F6", "Ctrl", "EditTool", "Contour"]
#ShortCut13 = ["F7", "Shift", "EditTool", "Pencil"]
# Defines the initial set of edit area groups to appear in the edit
# area and query dialog. If not specified, the default is Misc.
EditAreaGroups = ['Misc']
#------------------------------------------------------------------------
# Misc. Configuration
#------------------------------------------------------------------------
# This list of Weather element names will be used to sort the GridManager.
# Elements in this list will occur first. All others will be sorted
# by name.
GridManagerSortOrder = ['T', 'Td', 'RH', 'MaxT', 'MinT', 'MaxRH', 'MinRH',
'WindChill', 'HeatIndex', 'Wind', 'WindGust', 'FreeWind',
'TransWind', 'Sky', 'Wx', 'LAL', 'PoP', 'CWR', 'QPF', 'SnowAmt',
'StormTotalSnow', 'SnowLevel', 'MaxTAloft', 'WetBulb', 'Hazards',
'FzLevel', 'Haines', 'MixHgt']
# This algorithm determines the sorting order of weather elements in the
# Grid Manager, Samples, and Spatial Editor Legends. It contains of up to
# 5 characters in the order of sort importance. The characters are:
# 'm' for mutable, 'N' for parm name, 'M' for model name, 't' for model time,
# and 'o' for model optional type. For example, "mNMto" will result in
# the mutables first, then parm name, then model name, then model time, then
# optional type. This means that all of the weather elements with the same
# name from different models will be grouped together (except for the mutable).
#GridManagerSortAlgorithm = "mNMto"
# Auto Save Interval
# The Auto Save Interval entry defines the interval in minutes that is
# used to automatically save modified weather elements.
AutoSaveInterval = 0
# This is the list of entries that appear on the Publish Dialog. The
# entries are the names of the user-defined selection time ranges. The
# order of entries on the dialog will match this list.
PublishTimes = ['Today', 'Tonight', 'Tomorrow', 'Tomorrow Night', 'Day 3',
'Day 4', 'Day 5', 'Day 6', 'Day 7', 'Hour 0-240']
#Preselect a weather group to be loaded in the Publish Dialog
#PublishDialogInitialWEGroup = "Public"
# Interpolation Dialog defaults. By default, the dialog is shown
# with a minimum interval and duration. This can be changed. If the
# duration is specified, then the interval must also be specified.
# The units are hours and must range between 1 and 24.
#InterpolateDefaultInterval = 1
#InterpolateDefaultDuration = 1
# Create from Scratch Dialog defaults. By default, the dialog is shown
# with a minimum interval and duration. This can be changed. If the
# duration is specified, then the interval must also be specified.
# The units are hours and must range between 1 and 24.
#CreateScratchDefaultInterval = 1
#CreateScratchDefaultDuration = 1
# Defines the product file purge in hours
#ProductPurgeHours = 6
#------------------------------------------------------------------------
# Map Background Configuration
#------------------------------------------------------------------------
# Defines the initial loaded set of map backgrounds. The name of each
# background should match the name (without ".xml") of a map file in the
# CAVE/Bundles/maps directory under the Localization perspective.
MapBackgrounds_default = ['States','CWA']
# Specific Colors for a map background
# The user may specify a specific color to be used for a map background,
# rather than getting a random color assigned.
# Format is mapName_graphicColor = color.
#States_graphicColor = 'green'
# Specific Graphic Line Widths for a map
# Default line widths can be set for each map background based on
# map name. Zero is the default value, which represents thin lines.
# The larger the number, the wider the line. The format is mapName_lineWidth.
# Do not include a decimal point after these entries.
#States_lineWidth = 1
# Specific Line Pattern definitions for a map
# Default line patterns can be set up for each map background. The
# possible strings are "SOLID", "DASHED", "DOTTED", "DASHED_DOTTED". The
# values must be enclosed within quotes. The format is mapName_linePattern.
#States_linePattern = "SOLID"
# Specific Font Offsets for a map background.
# The font offset (called magnification on the GFE menus) allows the
# default font size to be increased or decreased on a per map basis.
# Numbers can range from -2 through +2. Format is mapName_fontOffset.
# Do not include a decimal point after these entries.
#States_fontOffset = 0
#------------------------------------------------------------------------
# Graphics Hardware Configurations
#------------------------------------------------------------------------
#
# general default X resources can be set here.
#
# Fonts. These are the various fonts that the GFE uses. They can be
# changed to increase/decrease the size of the text on the GFE. The
# fonts are in ascending sizes. A better way to override the fonts
# is to use the config items under UI Configuration.
# A valid font data representation is a string of the form fontname-style-height
# where fontname is the name of a font,
# style is a font style (one of "regular", "bold", "italic", or "bold italic")
# height is an integer representing the font height.
# Example: Times New Roman-bold-36.
TextFont0 = "DejaVu Sans Mono-regular-9"
TextFont1 = "DejaVu Sans Mono-regular-9"
TextFont2 = "DejaVu Sans Mono-bold-12"
TextFont3 = "DejaVu Sans Mono-bold-14"
TextFont4 = "DejaVu Sans Mono-bold-20"
# The color which will be used as the background for all of the display
# panes.
bgColor = "black"
#------------------------------------------------------------------------
# System Time Range Configuration
#------------------------------------------------------------------------
# These parameters indicate the span of the Grid Manager and Temporal
# Editor in relation to the current time. Units are in hours. If grids
# are present, the displayable time range may be expanded by the software.
SystemTimeRange_beforeCurrentTime = 48
SystemTimeRange_afterCurrentTime = 168
#------------------------------------------------------------------------
# UI Configuration
#------------------------------------------------------------------------
# Defines the color and pattern used in the Grid Manager to indicate
# a time selection.
Selected_color = 'LightSkyBlue'
Selected_fillPattern = 'TRANS_25PC_45DEG'
# Defines the color and pattern of the time scale lines in the Grid
# Manager and Temporal Editor
TimeScaleLines_color = 'Blue'
TimeScaleLines_pattern = 'DOTTED'
# Defines the color, width, and pattern used for the editor time line
# that runs through the Grid Manager and Temporal Editor
EditorTimeLine_color = 'Yellow'
EditorTimeLine_width = 2
EditorTimeLine_pattern = 'DOTTED'
# Defines the color used by the Grid Manager to indicate the
#current system time
CurrentSystemTime_color = 'Green'
# Defines the colors used in the Grid Manager to indicate that a
# time period is locked by you, or by another person.
LockedByMe_color = 'forestgreen'
LockedByMe_pattern = 'WHOLE'
LockedByOther_color = 'tomato2'
LockedByOther_pattern = 'WHOLE'
# Defines the visible, invisible, and active colors used in the Grid
# Manager to indicate when a grid block is either visible, invisible,
# and/or active. Defines the color used to indicate which grids
# may be modified during an edit action.(Preview_color)
TimeBlockVisible_color = 'White'
TimeBlockActive_color = 'Yellow'
TimeBlockInvisible_color = 'Gray50'
TimeBlockPreview_color = 'Cyan'
# Defines the color used to indicate the Edit Area on the spatial editor.
ReferenceSet_color = 'Gray80'
# Defines the border width used to indicate the Edit Area on the spatial editor
ReferenceSet_width = 0
# Defines the initial horizontal size of the grid manager when first
# starting the GFE in pixels. Do not place a decimal point after the number.
TimeScale_horizSize = 350
# Initial Legend Mode. Can be GRIDS for all weather elements (default),
# MUTABLE for just the Fcst weather elements,
# ACTIVE for just the active weather element, MAPS for just the maps,
# or SETIME for just the spatial editor time.
LegendMode = 'GRIDS'
# Initial Grid Manager Mode. Can be "Normal", "History", "Saved",
# "Modified", "Published", or "Sent". Default is "Normal".
InitialGMDisplayMode = 'Normal'
# Defines the number of Edit Area Quick Set Buttons. Do not place a
# decimal point after the buttons
#QuickSetButtons = 4
# Sets the maximum number of menu items before the menu will cascade
# with a 'More >'. Do not place a decimal point after the number.
MaxMenuItemsBeforeCascade = 30
# Defines the percent that the office domain will be expanded for the
# spatial editor full-screen view. The user can specify the expansion
# for each of the four directions. If not specified, the default is 10%.
OfficeDomain_expandLeft = 10
OfficeDomain_expandRight = 10
OfficeDomain_expandTop = 10
OfficeDomain_expandBottom = 10
# Initial location of Edit Action Dialog
# These are absolute screen coordinates (not relative to GFE window)
# To put Edit Action Dialog in lower left corner, set Ycoord to 600
#EditActionDialog_Xcoord = 99
#EditActionDialog_Ycoord = 74
# Initial layout up of Grid Manager/Temporal Editor:
# Values: "OnTop" (default)
# "OnLeft"
#GM_TE_Layout = "OnTop"
# Default setting for temporal editor weather elements. Choices are
# ALL for all weather elements are displayed in the temporal
# editor, ALL_NOISC is for all weather elements except ISC (intersite coord)
# elements, MUTABLE for just the mutable weather elements (e.g., Fcst)
# displayed in the temporal editor, VISIBLE (default) for all visible
# elements in the grid manager and ACTIVE for just the single
# active weather element to be displayed in the temporal editor.
TemporalEditorWEMode = "VISIBLE"
# Extra categories for the formatter launcher.
# Products beginning with this name will get their own
# cascade.
#FormatterLauncherDialog_Categories = []
# Default setting for the Wx/Discrete Show Description option. Setting it
# to True will enable the descriptions, setting it to False will disable the
# descriptions.
#WxDiscrete_Description = True
# Default setting for the font and colors for the Product Output Dialog.
#ProductOutputDialog_font = TextFont2
#ProductOutputDialog_fgColor = "#000000"
#ProductOutputDialog_bgColor = "#d0d0d0"
#ProductOutputDialog_wrapMode = 1 #default, if not listed in wrapPils, nonWrap
ProductOutputDialog_wrapPils = []
ProductOutputDialog_nonWrapPils = ['AFM','PFM','FWF','SFT','WCN','FWS','TCV','HLS']
#ProductOutputDialog_wrapSize = 66
#ProductOutputDialog_lockColor = "blue"
#ProductOutputDialog_frameColor = "red"
# The initial size of the Call to action dialog (in pixels)
#ProductOutputDialog_CTAWidth = 575
#ProductOutputDialog_CTAHeight = 300
# Default directory to use for the ProductOutputDialog editor when
# {prddir} is not set in the product definition.
#ProductEditorDirectory = '/tmp'
#------------------------------------------------------------------------
# Process Monitor Options
#------------------------------------------------------------------------
#
# The maximum number of pending product scripts to queue.
#ProcessMonitorMaxPendingScripts = 10
# The maximum number of finished product scripts to keep around (so you can
# see their output).
#ProcessMonitorMaxOldScripts = 5
# The maximum number of product scripts to run at one time (user can still
# start more via the ProcessMonitorDialog).
#ProcessMonitorMaxScripts = 1
# The maximum number of pending text formatters to queue.
#ProcessMonitorMaxPendingFormatters = 10
# The maximum number of finished text formatters to keep around (so you can
# see their output).
#ProcessMonitorMaxOldFormatters = 5
# The maximum number of text formatters to run at one time (user can still
# start more via the ProcessMonitorDialog).
#ProcessMonitorMaxFormatters = 1
#------------------------------------------------------------------------
# Sample and Legend Colors, Sample Shadows
#------------------------------------------------------------------------
# This section provides some control over the sample colors and
# the image legend color. Normally these values are set to "white",
# but might need to be changed if the background color for the drawing
# panes color is changed. The sample shadow may also be turned on
# or off.
# Alternative sample color. This is used primarily for ifpIMAGE when
# you want a specific color for the sample, rather than the default which
# is the graphic color. Format is parmname_Sample_color = "color".
# Note that this applies only if the data is displayed as a graphic.
# T_Sample_color = "#ff0672"
# Alternative legend color. This is used primarily for ifpIMAGE when
# you want a specific color for the legend, rather than the default which
# is the graphic color. Format is parmname_Legend_color = "color".
# Note that this applies only if the data is displayed as a graphic.
# T_Legend_color = "#ff0672"
# Sample LatLon and + Color. This affects the color of the '+' drawing,
# plus the color of the latitude/longitude samples on the spatial editor.
# SampleLLPlus_color = "white"
# Image Legend color. This affects the color of the legend when a weather
# element is displayed as an image. This also affects the sample color
# for weather element displayed as an image.
#ImageLegend_color = "white"
# Sample Shadows. The samples can have a shadow character written in
# black offset from the sample text. This improves contrast when the
# main sample color is light and the background color (e.g., image) is
# also fairly light. Acceptable entries are yes and no.
#ShowSampleShadows = yes
# Sample Shadow Color. The color of the shadows defaults to black.
# You can override this with any valid color.
#SampleShadow_color = "#000000"
# SampleLabelXOffset and SampleLabelYOffset are the number of pixels you
# wish to move sample labels relative to their "normal" position.
#SampleLabelXOffset = 0
#SampleLabelYOffset = 0
# Limiting Samples to Specific Weather Elements
# Controls the weather elements that will be displayed as samples.
# This feature is normally only used in conjunction with the creation
# of PNG imagery. If not specified, then all visible weather elements
# will have a sample value.
#SampleParms = ['T', 'Wind']
# Using descriptive names instead of the pretty Wx strings for samples.
# This set of parallel lists translate a pretty Wx string into a
# more descriptive name for the sample labels.
#AltWxSampleLabels_prettyWx = ['Sct RW-', 'Sct SW-']
#AltWxSampleLabels_label = ['Rain Showers', 'Snow Showers']
# ISC Update Time. The samples can show the ISC Update Time if in ISC mode.
# Acceptable entries are yes and no.
ShowISCUpdateTime = yes
# ISC Site Id. The samples can show the ISC Site Id if in ISC mode.
# Acceptable entries are yes and no.
ShowISCSiteID = yes
# Enable ISC Markers. ISC Markers are only shown
# if ISC mode or an ISC grid is displayed. Acceptable entries are yes and no.
ShowISCMarkers = yes
# ISC Update Time for Marker. The sample markers can show the ISC
# Update Time if in ISC mode.
# Acceptable entries are yes and no.
ShowISCUpdateTimeMarker = yes
# ISC Site Id Marker. The sample markers can show the ISC Site Id
# if in ISC mode. # Acceptable entries are yes and no.
ShowISCSiteIDMarker = yes
# ISC Official Symbol Marker. The sample markers can show the "P" symbol
# for the # official database data or not. Acceptable entries are yes and no.
ShowISCOfficialSymbolMarker = yes
# ISC Official Symbol. The samples can show the "P" symbol for the
# official database data or not. Acceptable entries are yes and no.
ShowISCOfficialSymbol = yes
# Spatial Editor Color Bar Label/Tick Colors
# Controls the tick, foreground text colors for the labels,
# and the foreground/background text colors for the pickup value. There
# is a special set of colors for the Wx/Discrete (WEATHER/DISCRETE) values.
#SEColorBar_tickColor = "white"
#SEColorBar_fgTextColor = "white"
#SEColorBar_fgPickUpColor = "white"
#SEColorBar_bgPickUpColor = "black"
#SEColorBar_fgWxPickUpColor = "white"
#SEColorBar_bgWxPickUpColor = "purple"
# Configure additional labels on the SE Color Bar for WEATHER.
# The format is an array of strings which represent the ugly weather
# string.
#Wx_AdditionalColorBarLabels = [ \
# "<NoCov>:<NoWx>:<NoInten>:<NoVis>:<NoAttr>" ]
#------------------------------------------------------------------------
# GFE Font Sizes
#------------------------------------------------------------------------
# This section provides the user the capability of changing the font
# sizes in various components of the GFE. The font numbers can range
# from 0 through 4 with 0 being the smallest.
# These font entries define the fonts used by various components of
# the GFE.
#ColorBarScale_font = 1
#ColorBarWxLabel_font = 2
#ColorBarPickUp_font = 3
#SESample_font = 2
#SEMarker_font = 3
#SELegend_font = 3
#TEDataSelector_font = 1
#TESample_font = 1
#TimeBlockLabel_font = 3
#TimeBlockSource_font = 1
#TimeScale_font = 2
#SetValueContLabel_font = 2
#SetValuePickUp_font = 3
# Defines the default labeling size on the Bounded Area display for
# weather, the contour tool depiction font, the map background font,
# and the contour labeling font. These fonts can also be modified on
# a per-parm basis using the fontOffset capability defined in the
# parameter configuration.
#BoundedArea_font = 2
#Cline_font = 2
#Contour_font = 2
#------------------------------------------------------------------------
# Grid Manager Saved, Published, Sent configurations
#------------------------------------------------------------------------
# Defines the colors and times used to color the Grid Manager when
# in the last saved, last modified, last published, or last sent display mode.
# parallel list of minutes and colors. If the last save, modified,
# published, sendISC time is less than the given time (in minutes),
# then that color is used to display the box. The default if the
# last xxx time is greater than the final "minutes" in the list, is Gray75.
Modified_minutes = [60, 180, 360, 720, 1440, 2880 ]
Modified_colors = ["#0bc71e", "#60c7b8", "#417fc7", "#e17c10",
"#ebdf00", "#e11a00"]
Saved_minutes = [60, 180, 360, 720, 1440, 2880 ]
Saved_colors = ["#0bc71e", "#60c7b8", "#417fc7", "#e17c10",
"#ebdf00", "#e11a00"]
Published_minutes = [60, 180, 360, 720, 1440, 2880 ]
Published_colors = ["#0bc71e", "#60c7b8", "#417fc7", "#e17c10",
"#ebdf00", "#e11a00"]
Sent_minutes = [60, 120, 180, 240, 300, 360]
Sent_colors = ["#0bc71e", "#60c7b8", "#417fc7", "#e17c10",
"#ebdf00", "#e11a00"]
#------------------------------------------------------------------------
# Grid Data History configuration
#------------------------------------------------------------------------
# Defines the characters, colors, and patterns that will appear
# in the Grid Manager grid blocks
# to indicate the source, origin, and modification states.
#
# If the grid has been modified by me or someone else, the text in the
# grid block and grid pattern is modified to that specified below:
HistoryUserModText_Me = "m" #Text for modified by me
HistoryUserModText_Other = "o" #Text for modified by other
HistoryUserModPattern_Me = "TRANS_25PC_45DEG" #Pattern for mod by me
HistoryUserModPattern_Other = "TRANS_25PC_135DEG" #Pattern for mod by other
# The text in the grid block and the grid color will represent the origin:
# Note that the user can also override the populated in the next section.
HistoryOriginText_Populated = "P"
HistoryOriginText_Calculated = "C"
HistoryOriginText_Scratch = "S"
HistoryOriginText_Interpolated = "I"
HistoryOriginText_Other = "?"
HistoryOriginColor_Populated = "wheat"
HistoryOriginColor_Calculated = "red"
HistoryOriginColor_Scratch = "magenta"
HistoryOriginColor_Interpolated = "blue"
HistoryOriginColor_Other = "gray75"
# This next section applies to the text and the color of the grid blocks
# that have an origin of Populated. The model determines the text and color.
# The format of the color entry is HistoryModelColor_modelname. The format
# of the text entry is: HistoryModelText_modelname. If a model is not
# listed here, then the HistoryOriginText_Populated and
# HistoryOriginColor_Populated is used.
HistoryModelColor_gfsLR = '#30df10'
HistoryModelColor_RAP40 = '#00ffff'
HistoryModelColor_MAVMOS = '#e6c8a1'
HistoryModelColor_GFSMOS = '#e6d8a1'
HistoryModelColor_METMOS = '#e6b8a1'
HistoryModelColor_MEXMOS = '#e6a8a1'
HistoryModelColor_NAM80 = '#ffff52'
HistoryModelColor_NAM95 = '#ffff52'
HistoryModelColor_NAM40 = '#ff99ff'
HistoryModelColor_NAM12 = '#ffcaa0'
HistoryModelColor_GFS80 = 'pink'
HistoryModelColor_GFS40 = 'pink'
HistoryModelColor_GFS190 = 'pink'
HistoryModelColor_GWW = '#a0a0ff'
HistoryModelColor_HPCStn = '#d0d0a0'
HistoryModelColor_HPCGrid = '#d0d0b0'
HistoryModelColor_ISC = '#b43aee'
HistoryModelColor_LAPS = '#b06b72'
HistoryModelColor_HPCQPF = '#3dc9ff'
HistoryModelColor_HPCGuide = '#3dc9ff'
HistoryModelColor_RFCQPF = '#3bffb7'
HistoryModelColor_Restore = '#e0a0ff'
HistoryModelColor_DGEX = 'orange'
HistoryModelColor_MOSGuide = '#e608ff'
HistoryModelColor_OPCTAFBE = '#a0a0cc'
HistoryModelColor_OPCTAFBSW = '#a0a0cc'
HistoryModelColor_OPCTAFBNW = '#a0a0cc'
HistoryModelColor_RTMA = '#a0522d'
HistoryModelColor_NamDNG5 = '#808000'
HistoryModelText_GFS80 = 'GFS'
HistoryModelText_GFS40 = 'GFS'
HistoryModelText_GFS190 = 'GFS'
HistoryModelText_RAP40 = 'RUC'
HistoryModelText_GFSMOS = 'GFSMOS'
HistoryModelText_MEXMOS = 'MEXMOS'
HistoryModelText_MAVMOS = 'MAVMOS'
HistoryModelText_METMOS = 'METMOS'
HistoryModelText_NAM80 = 'N80'
HistoryModelText_NAM95 = 'N95'
HistoryModelText_NAM40 = 'N40'
HistoryModelText_NAM20 = 'N20'
HistoryModelText_NAM12 = 'N12'
HistoryModelText_gfsLR = 'gfsLR'
HistoryModelText_HPCStn = 'HPCs'
HistoryModelText_HPCGrid = 'HPCg'
HistoryModelText_GWW = 'GWW'
HistoryModelText_ISC = 'ISC'
HistoryModelText_LAPS = 'LAPS'
HistoryModelText_HPCQPF = 'HPCQPF'
HistoryModelText_HPCGuide = 'HPCGuide'
HistoryModelText_RFCQPF = 'RFCQPF'
HistoryModelText_Restore = 'Restore'
HistoryModelText_DGEX = 'DGEX'
HistoryModelText_MOSGuide = 'GMOS'
HistoryModelText_OPCTAFBE = 'OPC'
HistoryModelText_OPCTAFBSW = 'OPC'
HistoryModelText_OPCTAFBNW = 'OPC'
HistoryModelText_RTMA = 'RTMA'
HistoryModelText_NamDNG5 = 'Nd5'
#------------------------------------------------------------------------
# Algorithm Configuration
#------------------------------------------------------------------------
# Smart tools can access time-weighted averages of multiple grids. Since
# weather is discrete, time weighted average for weather is based on
# all values of weather at that grid point as long as they occupy at least
# the given percentage of all grids. Do not place a decimal point after
# the number.
SignificantWeatherTimeWeightAverage_percent = 40
# The default width of the pencil tool can be specified in grid cells
# on a per weather element basis. The format is parmName_pencilWidth.
# If not specified, the value defaults to 4.
#T_pencilWidth = 4
# Pencil Tool influence sizes are specified here
PencilToolInfluence_list = [1, 2, 4, 8, 12, 16]
# Smooth algorithm default value
SmoothSize = 3
# Smooth Size Choices
SmoothSizeList = [3, 5, 7, 9]
# User can control the interpolation algorithm for each weather element.
# The format of the string is parmName_interpolateAlgorithm. The available
# options, which must be quoted, are "CUBIC_ADVECT", "LINEAR_ADVECT",
# "CUBIC_NOADVECT", and "LINEAR_NOADVECT". By default, most elements use
# CUBIC_NOADVECT, except for Wx, PoP, Sky, and QPF which use CUBIC_ADVECT.
# Wind and Wx cannot be changed.
# T_interpolateAlgorithm = "CUBIC_NOADVECT"
#------------------------------------------------------------------------
# Menu and Dialog Configuration
#------------------------------------------------------------------------
# Entries allow the specification of the zoom factor (click 1) over the
# Pickup Value Dialog. There is only one zoom step. If not specified,
# the default is set to a zoom factor of 4. You can also specify specific
# zoom factors based on the parameter name.
# SetValue_zoom is the generic zoom value. parmName_SetValue_zoom is
# the parameter-specific zoom value. Do not place a decimal point
# after the numbers.
SetValue_zoom = 4
QPF_SetValue_zoom = 10
# The maximum value on the Set Delta Dialog may be set on a
# per weather element basis. Format is weName_MaxDeltaDialogValue.
# The floating-point entry requires a decimal point in the value.
# The default is 20% of the weather element data range.
#Sky_MaxDeltaDialogValue = 30.0
# The default value of the Interpolate Dialog mode may be set to
# either "Gaps" or "Edited", which refer to "By Gaps" and "Based on
# Edited Data" on the dialog. The default if not specified is "By Gaps".
#InterpolateDialogMode = "Gaps"
#------------------------------------------------------------------------
# Weather Element Configuration
#------------------------------------------------------------------------
# generic colors for graphics -----------------------------------
# These colors will be the colors assigned to the graphics, unless
# specific colors are assigned to each parameter.
Generic_colors = ['#00ff00', '#ff8e59', '#00ffff', '#e6c8a1',
'#ffff52', '#ff99ff', '#aeb370', '#ff4000',
'#e6c8a1']
# Specific Graphic Colors for a parameter
# The user may specify a specific color to be used for a parameter, rather
# than getting a random color assigned. This color will be assigned, if
# available. Format is parmName_graphicColor = color. The color does
# not need to be in the Generic_colors list.
#T_graphicColor = 'green'
Wx_graphicColor = '#ffffff'
# Specific Graphic Line Widths for a parameter
# Default line widths can be set for each weather element, which will
# be used to draw their graphics on the spatial editor. 0 is the default
# value, which represents thin lines. The larger the number, the wider
# the line. The format is parmName_lineWidth. Do not include a decimal
# point after these entries.
#T_lineWidth = 1
# Specific Line Pattern definitions for a parameter.
# Default line patterns can be set up for each weather element. The
# possible strings are "SOLID", "DASHED", "DOTTED", "DASH_DOTTED". The
# values must be enclosed within quotes. The format is parmName_linePattern.
#T_linePattern = "SOLID"
# Specific Font Offsets for a parameter.
# The font offset (called magnification on the GFE menus) allows the
# default font size to be increased or decreased on a per-parameter
# basis. Note that for wind arrows/barbs, the fontOffset controls the
# size of the wind arrows/barbs. Numbers can range from -2 through +2.
# Format is parmName_fontOffset. Do not include a decimal point
# after these entries.
#T_fontOffset = 0
# Specific Density definitions for a parameter.
# The density controls the packing of wind barbs and arrows for the vector
# spatial editor displays, and the packing of contour intervals for the
# scalar spatial editor displays. The default is zero. Densities and
# contour values are related to each other. Typical values can range
# from -2 through +2. You can use values outside of this range if
# desired. Format is parmName_density. Do not include a
# decimal point after these entries.
#T_density = 0
# temporal editor sizes -----------------------------------------
# the initial size of temporal editor panes may be defined on a
# per parameter basis. If not specified, the default is 150 pixels.
# Format is: parmName_temporalDataPaneSize = size
# Do not place a decimal point after the numbers.
# Wx_temporalDataPaneSize = 200
# contour values -----------------------------------------------
# contour values may be defined on a per-parameter basis. If not
# defined, then contour values are automatically computed.
# Format is wxElementName_contourValues = [c1, c2, c3, c4, ... ]
# Be sure to include decimal points in each entry.
# This overrides any entries that may exist in contour interval.
QPF_contourValues = [0.01, 0.05, 0.10, 0.15, 0.20, 0.25, 0.30, 0.40, 0.50,
0.60, 0.7, 0.8, 0.9, 1.0, 1.2, 1.4, 1.6, 1.8, 2.0, 2.2, 2.4, 2.6, 2.8,
3.0, 3.2, 3.4, 3.6, 3.8, 4.0, 4.2, 4.4, 4.6, 4.8, 5.0]
Topography_contourValues = [5.0, 10.0, 20.0, 30.0, 40.0, 50.0,
60.0, 70.0, 80.0, 90.0, 100.0, 125.0, 150.0, 175.0, 200.0, 250.0,
300.0, 350.0, 400.0, 450.0, 500.0, 600.0, 700.0, 800.0, 900.0,
1000.0, 1250.0, 1500.0, 1750.0, 2000.0, 2500.0, 3000.0, 3500.0,
4000.0, 4500.0, 5000.0, 5500.0, 6000.0, 6500.0, 7000.0, 7500.0,
8000.0, 8500.0, 9000.0, 9500.0, 10000.0, 11000.0, 12000.0, 13000.0,
14000.0, 15000.0, 16000.0, 17000.0, 18000.0, 19000.0, 20000.0]
# contour intervals -----------------------------------------------
# contour intervals may be defined on a per-parameter basis. If not
# defined, then contour values are automatically computed.
# Format is wxElementName_contourInterval = value.
# Be sure to include decimal points in the entry.
# Note, you can also specify wxElementName_contourValues, which
# will override the entry for contour interval.
Sky_contourInterval = 10.0
PoP_contourInterval = 10.0
MinT_contourInterval = 5.0
MaxT_contourInterval = 5.0
T_contourInterval = 5.0
Td_contourInterval = 5.0
# delta values
# Delta values define the default delta (adjust up, adjust down) value
# for the adjust operations. The user can redefine this at any time
# through the GUI. If not specified, the delta value defaults to
# the precision value. For example, a precision of 0 indicates a delta of 1.
# and a precision of 1 indicates a delta of 0.1.
# Format is parmName_deltaValue = value.
# Be sure to include a decimal point.
#parmName_deltaValue = 10.0
FzLevel_deltaValue = 100.0
SnowLevel_deltaValue = 100.0
# fuzz values
# fuzz values define the value considered to be the same during a
# homogeneous area select using the GridPoint Tool. For example, if the
# fuzz is 2.0 degrees for Temperature and you click on 40 degrees, then
# all points between 38 and 42 will be selected as long as they are
# contiguous to the click point. If not specified, the fuzz is set
# to 1/100 of the parm range. Format is parmName_fuzzValue = value.
# Be sure to include a decimal point.
#parmName_fuzzValue = 10.0
# visual types
# This section defines the spatial and temporal editor visualization
# types for the scalar, vector, and weather parameters. There are two
# modes, graphic and image. For example, the weather parameter may be
# viewed as a bounded area and an image. Available types:
# Spatial Editor Options:
# Scalar: Image, Contour
# Vector: Image, WindBarb, WindArrow
# Weather: Image, BoundedArea
# Discrete: Image, BoundedArea
# Temporal Editor Options:
# Scalar: TEColorBar, TEColorRangeBar, TimeBar, RangeBar
# Vector: TEColorBar, TEColorRangeBar, TimeBar, RangeBar
# Weather: TEColorBar, TEColorRangeBar
# Discrete: TEColorBar, TEColorRangeBar
#
# format is: parmName_editorImageType = [ types ] or
# parmName_editorGraphicType = [ types ] where 'editor' is replaced with
# spatial or temporal.
# For example, to make wind appear as wind arrows on the spatial editor
# in graphic mode: Wind_spatialGraphicType = ["WindArrow"].
Wx_spatialImageType = [ "Image", "BoundedArea" ]
Headlines_spatialImageType = [ "Image", "BoundedArea" ]
Swell_spatialImageType = [ "Image", "WindArrow" ]
Swell2_spatialImageType = [ "Image", "WindArrow" ]
Swell_spatialGraphicType = [ "WindArrow" ]
Swell2_spatialGraphicType = [ "WindArrow" ]
# Bounded Area Visual attributes
# The user may turn on/off the boundary, and the text labels, for
# the bounded area visual. Allowable values are yes and no (or True and False).
# By default, then are both enabled.
#BoundedArea_Labels = yes
#BoundedArea_Boundary = yes
# Wind Barb and Arrow Default Sizes.
# The user may specify the default wind barb and arrow default sizes,
# for the GFE, or by the weather element name. The default size is 60
# pixels. The entry format for a particular weather element definition
# of arrow or barb size is parmName_windArrowDefaultSize and
# parmName_windBarbDefaultSize.
WindArrowDefaultSize = 60
WindBarbDefaultSize = 60
#Wind_windArrowDefaultSize = 60
#Wind_windBarbDefaultSize = 60
# Wind Arrow Scaling
# The user may specify the default scaling for the wind arrow. If not
# specified, then the wind arrows will grow linearly with an increase
# in magnitude. To emphasize the lower ranges, the user may set the
# wind arrow logarithmic scaling. The lower the number,
# the steeper the log curve will appear. Refer to on-line documentation
# for example values. Include decimal points with the numbers.
# Note that the factor needs to be greater than 0. The format of the
# entry is parmName_arrowScaling.
Wind_arrowScaling = 0.03
Swell_arrowScaling = 0.001
Swell2_arrowScaling = 0.001
# Wind Sample Format
# The user may specify the default sample format for vector weather elements.
# If not specified, then the format is "ddff". The user may specify a format
# for all vector elements, or can specify the format for a particular weather
# element. The four possible formats are "ddff", "8pt", "16pt", and "d/f".
# The configuration entry for the default sample format for all vector
# elements is WindFormat = "type". The entry format to define the format for
# a specific entry is parmName_windFormat = "type".
WindFormat = "ddff"
#Swell_windFormat = "8pt"
# Default Values (for create from scratch)
# The default values for SCALAR, VECTOR, WEATHER, and DISCRETE may be
# specified on a per-weather element basis. By default, SCALAR has the
# weather element's minimum value, VECTOR has a magnitude and direction of 0,
# WEATHER has <NoWx>, and DISCRETE has the first defined discrete key
# (always <None> for Hazards grids, user-defined DISCRETE grids may vary).
# Format of the entry is parmName_defaultValue, or parmName_level_defaultValue
# for non-surface based SCALAR, WEATHER, or DISCRETE elements. For VECTOR,
# the format is slightly different: parmName_magDefaultValue has the
# magnitude, and parmName_dirDefaultValue has the direction in degrees.
# A decimal point is required for SCALAR and VECTOR, strings are required for
# WEATHER and DISCRETE.
#
#T_defaultValue = 32.0
#Wx_defaultValue = "<NoCov>:<NoWx>:<NoInten>:<Novis>:"
#Wind_dirDefaultValue = 90.0
#------------------------------------------------------------------------
# Weather/Discrete Common Value Definitions
#------------------------------------------------------------------------
# the following describes common types that appear on the temporal
# editor popup menu and the spatial editor color bar popup menu.
# For WEATHER, the format is the "ugly" string of the Weather Key. For
# DISCRETE, the format is the key string of the Discrete Key.
# Prefixing an string with other strings that end with a vertical
# bar (|) will make these strings in a cascade,
# such as "Winter|Wide:S:--:<NoVis>:<NoAttr>",
# which will put the widespread snow under a Winter cascade. The format
# of this entry is parmName_commonValues, and applies to Weather and
# Discrete only.
Wx_commonValues = [ \
"<NoCov>:<NoWx>:<NoInten>:<NoVis>:<NoAttr>",
"Wide:R:-:<NoVis>:<NoAttr>",
"Wide:S:--:<NoVis>:<NoAttr>",
"Wide:R:-:<NoVis>:<NoAttr>^Wide:S:-:<NoVis>:<NoAttr>",
"Sct:RW:-:<NoVis>:<NoAttr>",
"Sct:SW:-:<NoVis>:<NoAttr>",
"Sct:T:<NoInten>:<NoVis>:<NoAttr>^Sct:RW:-:<NoVis>:<NoAttr>",
"Patchy:F:<NoInten>:<NoVis>:<NoAttr>"]
Hazards_commonValues = [ \
"Watches|Fire Weather|FW.A",
"Watches|Hydrology|FF.A",
"Watches|Hydrology|FA.A",
"Watches|Coastal Flooding|CF.A",
"Watches|Coastal Flooding|LS.A",
"Watches|Marine|GL.A",
"Watches|Marine|HF.A",
"Watches|Marine|SE.A",
"Watches|Marine|SR.A",
"Watches|Marine|UP.A",
"Watches|Non-Precipitation|EH.A",
"Watches|Non-Precipitation|FZ.A",
"Watches|Non-Precipitation|HW.A",
"Watches|Non-Precipitation|HZ.A",
"Watches|Non-Precipitation|EC.A",
"Watches|Winter Storm|WC.A",
"Watches|Winter Storm|WS.A",
"Warnings|Fire Weather|FW.W",
"Warnings|Coastal Flooding|CF.W",
"Warnings|Coastal Flooding|LS.W",
"Warnings|Coastal Flooding|SU.W",
"Warnings|Marine|MH.W",
"Warnings|Marine|HF.W",
"Warnings|Marine|GL.W",
"Warnings|Marine|UP.W",
"Warnings|Marine|SR.W",
"Warnings|Marine|SE.W",
"Warnings|Non-Precipitation|AF.W",
"Warnings|Non-Precipitation|DU.W",
"Warnings|Non-Precipitation|EH.W",
"Warnings|Non-Precipitation|FZ.W",
"Warnings|Non-Precipitation|HW.W",
"Warnings|Non-Precipitation|HZ.W",
"Warnings|Non-Precipitation|EC.W",
"Warnings|Winter Storm|BZ.W",
"Warnings|Winter Storm|IS.W",
"Warnings|Winter Storm|LE.W",
"Warnings|Winter Storm|WC.W",
"Warnings|Winter Storm|WS.W",
"Advisories|Marine|UP.Y",
"Advisories|Marine|LO.Y",
"Advisories|Marine|SC.Y",
"Advisories|Marine|SW.Y",
"Advisories|Marine|BW.Y",
"Advisories|Marine|RB.Y",
"Advisories|Marine|SI.Y",
"Advisories|Marine|MF.Y",
"Advisories|Marine|MS.Y",
"Advisories|Marine|MH.Y",
"Advisories|Coastal Flooding|CF.Y",
"Advisories|Coastal Flooding|LS.Y",
"Advisories|Coastal Flooding|SU.Y",
"Advisories|Non-Precipitation|AS.O",
"Advisories|Non-Precipitation|AS.Y",
"Advisories|Non-Precipitation|AQ.Y",
"Advisories|Non-Precipitation|DU.Y",
"Advisories|Non-Precipitation|FG.Y",
"Advisories|Non-Precipitation|SM.Y",
"Advisories|Non-Precipitation|ZF.Y",
"Advisories|Non-Precipitation|FR.Y",
"Advisories|Non-Precipitation|HT.Y",
"Advisories|Non-Precipitation|LW.Y",
"Advisories|Non-Precipitation|AF.Y",
"Advisories|Non-Precipitation|WI.Y",
"Advisories|Winter Weather|WC.Y",
"Advisories|Winter Weather|WW.Y",
"Statements|Coastal Flooding|CF.S",
"Statements|Coastal Flooding|LS.S",
"Statements|Coastal Flooding|RP.S",
"Statements|Marine|MA.S",
]
#------------------------------------------------------------------------
# Weather Dialog Default Values
#------------------------------------------------------------------------
# the following describes the intensity and coverage/probability defaults
# that appear in the Set Value dialog for Weather data. The format is
# the weather type (e.g., RW), followed by the keyword. The actual value
# is a string surrounded in quotes.
# Define the weather dialog default coverage/probabilities
R_defaultCoverage = "Wide"
RW_defaultCoverage = "Sct"
S_defaultCoverage = "Wide"
SW_defaultCoverage = "Sct"
T_defaultCoverage = "Sct"
# Define the weather dialog default intensities
R_defaultIntensity = "-"
RW_defaultIntensity = "-"
S_defaultIntensity = "-"
SW_defaultIntensity = "-"
L_defaultIntensity = "-"
ZR_defaultIntensity = "-"
ZL_defaultIntensity = "-"
IP_defaultIntensity = "-"
#------------------------------------------------------------------------
# Default Discrete Color Table Algorithm Configuration
#------------------------------------------------------------------------
# DiscreteOverlapPatterns are used for overlapping (non-exclusive)
# Discrete weather elements when two or more values are overlapping.
# Each entry denotes the fill pattern to use when it is overlapping
# another pattern. The available types are: WHOLE, WIDE, SCATTERED,
# WIDE_SCATTERED, ISOLATED, TRANS_25PC_45DEG, SELECTED_AREA, OCNL,
# LKLY, TRANS_25PC_135DEG, DUALCURVE, CURVE, VERTICAL, CROSS, HORIZONTAL,
# BIGCROSS. DiscreteOverlapPatterns are used for all discrete weather
# elements, unless a parmName_level_DiscreteOverlapPatterns is found.
#------------------------------------------------------------------------
DiscreteOverlapPatterns = ['TRANS_25PC_45DEG', 'TRANS_25PC_135DEG', 'CROSS']
#pName_level_DiscreteOverlapPatterns = ['pat1', 'pat2', 'pat3']
# DiscreteComplexColor is used when there aren't enough fill patterns
# defined for overlap. This color is used when a very complex overlapping
# situation occurs. DiscreteComplexColor applies to all discrete
# weather elements, unless a parmName_level_DiscreteComplexColor
# value is found. Default is "White".
#DiscreteComplexColor = 'White'
#pName_level_DiscreteComplexColor = 'color'
# DiscreteComplexPattern is used when there aren't enough fill patterns
# defined for overlap. This pattern is used when a very complex overlapping
# situation occurs. DiscreteComplexPattern applies to all discrete
# weather elements, unless a parmName_level_DiscreteComplexPattern
# value is found. Default is "SCATTERED".
#DiscreteComplexPattern = 'SCATTERED'
#pName_level_DiscreteComplexPattern = 'pattern'
#------------------------------------------------------------------------
# Default (non-weather) Color Table Algorithm Configuration
#------------------------------------------------------------------------
# The default color table is used for all parameters unless overridden in
# this configuration file. The left wavelength defines the left side
# value for the color in nanometers. 380 is roughly purple. The right
# wavelength defines the right side value for the color in nanometers.
# 650 is red. The number of colors indicate the number of color bins
# that will be used when the default color table is displayed.
# Use decimal points after the wavelengths, but not the numColors.
DefaultColorTable_leftWavelength = 380.0
DefaultColorTable_rightWavelength = 650.0
DefaultColorTable_numColors = 150
# color table default entries -----------------------------
# Entries can be made to define a default color table for a particular
# parameter. If a default color table is not defined for a parameter, then
# the spectrum defined in DefaultColorTable* will be used for the parameter.
# Entries are of the form parmName_defaultColorTable="colortablename".
# For example, if you want MaxT to always have a "Low-Enhanced" color table,
# then the entry would be as shown below.
# MaxT_defaultColorTable = "Low-Enhanced"
# You can determine the possible color tables that are on the system by
# displaying any scalar image and selecting "Change Color Table To".
RipProb_defaultColorTable="GFE/RipProb"
ErosionProb_defaultColorTable="GFE/RunupProbs"
OverwashProb_defaultColorTable="GFE/RunupProbs"
T_defaultColorTable="GFE/Mid Range Enhanced"
Td_defaultColorTable="GFE/Mid Range Enhanced"
MaxT_defaultColorTable="GFE/Mid Range Enhanced"
MinT_defaultColorTable="GFE/Mid Range Enhanced"
Sky_defaultColorTable="GFE/Cloud"
Wind_defaultColorTable="GFE/Low Range Enhanced"
Wind20ft_defaultColorTable="GFE/Low Range Enhanced"
PoP_defaultColorTable="GFE/ndfdPoP12"
QPF_defaultColorTable="GFE/Gridded Data"
Ttrend_defaultColorTable = "GFE/Discrepancy"
RHtrend_defaultColorTable = "GFE/Discrepancy"
Wetflag_defaultColorTable = "GFE/YesNo"
DeltaMinT_defaultColorTable = "GFE/Discrepancy"
DeltaMaxT_defaultColorTable = "GFE/Discrepancy"
DeltaWind_defaultColorTable = "GFE/Discrepancy"
DeltaSky_defaultColorTable = "GFE/Discrepancy"
DeltaPoP_defaultColorTable = "GFE/Discrepancy"
# Default Satellite weather element color tables
visibleEast_defaultColorTable = "Sat/VIS/ZA (Vis Default)"
ir11East_defaultColorTable = "Sat/IR/CIRA (IR Default)"
ir13East_defaultColorTable = "Sat/IR/CIRA (IR Default)"
ir39East_defaultColorTable = "Sat/IR/CIRA (IR Default)"
waterVaporEast_defaultColorTable = "Sat/WV/Gray Scale Water Vapor"
visibleCentral_defaultColorTable = "Sat/VIS/ZA (Vis Default)"
ir11Central_defaultColorTable = "Sat/IR/CIRA (IR Default)"
ir13Central_defaultColorTable = "Sat/IR/CIRA (IR Default)"
ir39Central_defaultColorTable = "Sat/IR/CIRA (IR Default)"
waterVaporCentral_defaultColorTable = "Sat/WV/Gray Scale Water Vapor"
visibleWest_defaultColorTable = "Sat/VIS/ZA (Vis Default)"
ir11West_defaultColorTable = "Sat/IR/CIRA (IR Default)"
ir13West_defaultColorTable = "Sat/IR/CIRA (IR Default)"
ir39West_defaultColorTable = "Sat/IR/CIRA (IR Default)"
waterVaporWest_defaultColorTable = "Sat/WV/Gray Scale Water Vapor"
VisibleE_defaultColorTable = "Sat/VIS/ZA (Vis Default)"
IR11E_defaultColorTable = "Sat/IR/CIRA (IR Default)"
IR13E_defaultColorTable = "Sat/IR/CIRA (IR Default)"
IR39E_defaultColorTable = "Sat/IR/CIRA (IR Default)"
WaterVaporE_defaultColorTable = "Sat/WV/Gray Scale Water Vapor"
FogE_defaultColorTable = "Sat/WV/Gray Scale Water Vapor"
VisibleC_defaultColorTable = "Sat/VIS/ZA (Vis Default)"
IR11C_defaultColorTable = "Sat/IR/CIRA (IR Default)"
IR13C_defaultColorTable = "Sat/IR/CIRA (IR Default)"
IR39C_defaultColorTable = "Sat/IR/CIRA (IR Default)"
WaterVaporC_defaultColorTable = "Sat/WV/Gray Scale Water Vapor"
FogC_defaultColorTable = "Sat/WV/Gray Scale Water Vapor"
VisibleW_defaultColorTable = "Sat/VIS/ZA (Vis Default)"
IR11W_defaultColorTable = "Sat/IR/CIRA (IR Default)"
IR13W_defaultColorTable = "Sat/IR/CIRA (IR Default)"
IR39W_defaultColorTable = "Sat/IR/CIRA (IR Default)"
WaterVaporW_defaultColorTable = "Sat/WV/Gray Scale Water Vapor"
FogW_defaultColorTable = "Sat/WV/Gray Scale Water Vapor"
Hazards_defaultColorTable = "GFE/Hazards"
# Start HTI entries
ProposedSS_defaultColorTable="GFE/w"
ProposedSSnc_defaultColorTable="GFE/w"
CollabDiffSS_defaultColorTable="GFE/diffSS"
InundationMax_defaultColorTable="GFE/Inundation"
InundationMax_maxColorTableValue = 30.0
InundationMax_minColorTableValue = 0.0
InundationMaxnc_defaultColorTable="GFE/Inundation"
InundationMaxnc_maxColorTableValue = 30.0
InundationMaxnc_minColorTableValue = 0.0
InundationTiming_defaultColorTable="GFE/Inundation"
InundationTiming_maxColorTableValue = 30.0
InundationTiming_minColorTableValue = 0.0
InundationTimingnc_defaultColorTable="GFE/Inundation"
InundationTimingnc_maxColorTableValue = 30.0
InundationTimingnc_minColorTableValue = 0.0
SurgeHtPlusTideMLLW_defaultColorTable="GFE/Inundation"
SurgeHtPlusTideMLLW_maxColorTableValue = 30.0
SurgeHtPlusTideMLLW_minColorTableValue = 0.0
SurgeHtPlusTideMLLWnc_defaultColorTable="GFE/Inundation"
SurgeHtPlusTideMLLWnc_maxColorTableValue = 30.0
SurgeHtPlusTideMLLWnc_minColorTableValue = 0.0
SurgeHtPlusTideMHHW_defaultColorTable="GFE/Inundation"
SurgeHtPlusTideMHHW_maxColorTableValue = 30.0
SurgeHtPlusTideMHHW_minColorTableValue = 0.0
SurgeHtPlusTideMHHWnc_defaultColorTable="GFE/Inundation"
SurgeHtPlusTideMHHWnc_maxColorTableValue = 30.0
SurgeHtPlusTideMHHWnc_minColorTableValue = 0.0
SurgeHtPlusTideNAVD_defaultColorTable="GFE/Inundation"
SurgeHtPlusTideNAVD_maxColorTableValue = 30.0
SurgeHtPlusTideNAVD_minColorTableValue = 0.0
SurgeHtPlusTideNAVDnc_defaultColorTable="GFE/Inundation"
SurgeHtPlusTideNAVDnc_maxColorTableValue = 30.0
SurgeHtPlusTideNAVDnc_minColorTableValue = 0.0
SurgeHtPlusTideMSL_defaultColorTable="GFE/Inundation"
SurgeHtPlusTideMSL_maxColorTableValue = 30.0
SurgeHtPlusTideMSL_minColorTableValue = 0.0
SurgeHtPlusTideMSLnc_defaultColorTable="GFE/Inundation"
SurgeHtPlusTideMSLnc_maxColorTableValue = 30.0
SurgeHtPlusTideMSLnc_minColorTableValue = 0.0
prob34_defaultColorTable="GFE/TPCprob"
prob64_defaultColorTable="GFE/TPCprob"
pwsD34_defaultColorTable="GFE/TPCprob"
pwsD64_defaultColorTable="GFE/TPCprob"
pwsN34_defaultColorTable="GFE/TPCprob"
pwsN64_defaultColorTable="GFE/TPCprob"
pws34int_defaultColorTable="GFE/TPCprob"
pws64int_defaultColorTable="GFE/TPCprob"
FloodingRainThreat_defaultColorTable = "GFE/gHLS_new"
StormSurgeThreat_defaultColorTable = "GFE/gHLS_new"
TornadoThreat_defaultColorTable = "GFE/gHLS_new"
WindThreat_defaultColorTable = "GFE/gHLS_new"
# End HTI entries
# TopDownWx
MaxTAloft_defaultColorTable="WarmNoseTemp"
WetBulb_defaultColorTable="WetBulbTemp"
# Logarithmic Color Table Assignments
# By default, all color tables are linear. Certain parameters may lend
# themselves to a logarithmic color table. To enable a logarithmic
# color table for a parameter, an entry in the form of
# parmName_LogFactor=factor is required. The closer the value is to zero,
# the steeper the log curve will appear. Refer to on-line documentation
# for example values. Include decimal points with the numbers.
# Note that the factor needs to be greater than 0
QPF_LogFactor = 0.03
SnowAmt_LogFactor = 0.6
# Default Max/Min Ranges for Color Tables
# By default, all colors tables (except for WEATHER) are spread out over
# the range of the minimum to maximum weather element possible value, as
# defined by serverConfig.py. The initial range of the color table can
# be specified through these entries. The form of the two entries are:
# parmName_maxColorTableValue and parmName_minColorTableValue. These
# values are floats and MUST have a decimal point in them.
#T_maxColorTableValue = 120.0
#T_minColorTableValue = -30.0
WetBulb_maxColorTableValue = 50.0
WetBulb_minColorTableValue = 20.0
# Fit To Data Color Tables
# Automatic Fit To Data color tables can be set up for the initial set
# of data in a weather element. The form of the entry is:
# parmName_fitToDataColorTable. The fit to data overrides any
# specified max/min color table values. There are several algorithms
# available: "None", "All Grids", "Single Grid", "All Grids over Area",
# and "Single Grid over Area". The Single Grid options are not
# available for the GFE and only apply to the ifpIMAGE program.
# Note that the ifpIMAGE program can specify an edit area to use for
# the "All Grids over Area" and "Single Grid over Area" algorithms.
# See Png_fitToDataArea. For the GFE, the active edit area is used in
# the fit to data scheme.
#T_fitToDataColorTable = "None"
# Configure the desired labels on the SE Color Bar on a per-parameter basis.
# The format is parmName + "_ColorBarLabels". For example, the color bar
# would be labeled at 10, 20, 40 & 60 for temperature with the following
# entry. Note that the values need to be entered as floats for all parameters.
# This is only used for SCALAR or VECTOR parameters.
# For WEATHER or DISCRETE parameters, use parmName_additionalColorBarLabels.
#T_ColorBarLabels = [10.00, 20.00, 40.00, 60.00]
#------------------------------------------------------------------------
# Weather Color Algorithm Configuration
#------------------------------------------------------------------------
# Color Tables for Weather are handled differently than scalar and
# vector data. Coverages are denoted by fill patterns. Composite
# types by colors. Complex weather of more than two coverages will
# result in a solid fill pattern and can't be configured.
# The WeatherCoverage_names and WeatherCoverage_fillPatterns indicate
# the fill pattern used for a particular weather coverage or probability.
# These are parallel lists. For example, if "Iso" coverage is in the 1st
# entry of the list and ISOLATED appears in the first entry of the
# fill patterns, the for Iso coverage, the fill pattern ISOLATED will
# be used.
WeatherCoverage_names = ["Iso", "Sct", "Num", "Wide", "Ocnl", "SChc",
"Chc", "Lkly", "Def", "Patchy", "<NoCov>", "Areas",
"Frq", "Brf", "Pds", "Inter"]
WeatherCoverage_fillPatterns = ["WIDE_SCATTERED", "SCATTERED", "LKLY", "WIDE",
"OCNL", "WIDE_SCATTERED", "SCATTERED", "LKLY",
"WIDE", "CURVE", "WHOLE", "DUALCURVE",
"OCNL", "OCNL", "OCNL", "OCNL"]
# The weather type entries are generic entries without intensities.
# Combinations are permitted. The WeatherType_names and WeatherType_colors
# are parallel lists of names and colors. The default weather color table
# algorithm looks at the weather type or combination of types, as listed
# in the _names, and matches the list with the specified color. For
# example, if T appears in the names as the first entry and brown2 appears
# in the colors for the first entry, then for weather type T, the color
# shown will be brown2.
WeatherType_names = ["<NoWx>", "T", "R", "RW", "L", "ZR", "ZL",
"S", "SW", "IP", "F", "H", "BS", "K", "BD",
"SA", "LC", "FR", "AT", "TRW"]
WeatherType_colors = ["Gray40", "red3", "ForestGreen",
"ForestGreen", "CadetBlue1", "darkorange1",
"goldenrod1", "Grey65", "Grey65", "plum1",
"khaki4", "Gray75", "snow", "grey30", "Brown",
"blue1", "coral1", "pale turquoise", "DeepPink",
"red3"]
# The weather type entries are specific entries that contain intensities.
# Combinations are permitted. The WeatherTypeInten_names and
# WeatherTypeInten_colors are parallel lists of names and colors. The
# algorithm looks first at this list to find a specific type/intensity
# match. If not found, then the algorithm looks in the WeatherType_names
# and WeatherType_colors list for a match. If not found, then a generic
# color is assigned.
# The weather type with intensity entries are specific entries
WeatherTypeInten_names = ["T+", "Rm", "R+", "RWm", "RW+"]
WeatherTypeInten_colors = ["red1", "green", "green", "green", "green"]
# Colors to use for weather which was not defined using any of the methods
# found above. The colors in this list will be used before a "random" color
# is chosen.
WeatherGeneric_colors = ["Coral", "CadetBlue2", "Aquamarine", "DarkKhaki",
"DodgerBlue", "IndianRed1", "PaleGreen", "MistyRose",
"chartreuse3", "PapayaWhip"]
#------------------------------------------------------------------------
# Preference Defaults
#------------------------------------------------------------------------
# Default setting for changing the active grid to an image-type display.
# This occurs when "Edit Grid" from the Grid Manager or setting a parameter
# active from the legend.
ImageOnActiveSE = yes
# Default visibility setting for showing the time scale lines in the
# Grid Manager and Temporal Editor
TimeScaleLines = yes
# Default visibility setting for showing the editor time lines in the
# Grid Manager and Temporal Editor. The editor time line is always on
# for the Time Scale.
EditorTimeLines = yes
# Default visibility setting for showing the split boundary or time
# constraints in the Grid Manager and Temporal Editor for mutable parameters.
SplitBoundaryDisplay = yes
# Default setting for combining like parameters (same units) in the
# temporal editor when loading parameters.
TemporalEditorOverlay = yes
# Default setting for temporal editor edits. Choices are absolute mode
# or relative mode which is defined by "yes" or "no".
TemporalEditorAbsoluteEditMode = no
# Initial statistics mode for temporal editor range statistics dialog.
# Choices are "ABSOLUTE", # "MODERATED", or "STANDARD_DEVIATION".
TemporalEditorStatisticsMode = "ABSOLUTE"
# Initial minimum and maximum values for scales on temporal editor range
# statistics dialog in moderated and standard deviation operation modes
# (dialog is not shown in absolute mode). Do NOT include a decimal point
# for moderated mode values, you MUST include a decimal point for standard
# deviation values.
TemporalEditorStatisticsModeModeratedMin = 15
TemporalEditorStatisticsModeModeratedMax = 15
TemporalEditorStatisticsModeStandardDeviationMin = 1.0
TemporalEditorStatisticsModeStandardDeviationMax = 1.0
# Default setting for editing components of vector parameters. Choices
# are MAG, DIR, or BOTH.
WindEditMode = "BOTH"
# Default setting for automatic combining of existing weather/discrete and new
# weather/discrete when editing. For example, if the setting is yes
# and existing weather is Rain, then setting the value to Snow will result in
# a Rain/Snow mix.
WeatherDiscreteCombineMode = no
# Default setting for Missing Data Mode. Possible values are:
# Stop: Stop execution of a smart tool if there is missing data.
# Skip: Skip grids for which there is missing data.
# A User Alert message will report which grids were skipped.
# Create: Create grids to supply the missing data.
# A User Alert message will report which grids were created.
MissingDataMode = "Stop"
# Default setting for showing the dialog box when the user attempts to
# edit grids when a selection time range is active. Editing grids when
# a selection time range is active may cause multiple grids to be
# edited.
ShowTimeRangeWarning = yes
# Default setting for showing the dialog box when the user attempts to
# edit grids without an edit area being set. The behavior is to edit
# the entire domain.
ShowEmptyEditAreaWarning = yes
# Specifies the default contour to grid algorithm. Can be set to
# "Contour Analyzer", "Internal SIRS Server"
ContourServer = "Contour Analyzer"
# The Countour Analyzer algorithm can run over a subsampled grid
# to improve performance. This is usually ok since the contour tool
# is mostly used where there is not much detail due to topography.
# The value of ContourSubSample is used to divide the x and y dimensions
# of the original grid to get the dimensions of the subsampled grid.
# So, setting ContourSubSample to 4 would cause the Contour Analyzer to
# reduce a 400x400 grid to a 100x100 grid for contouring purposes.
# This can greatly speed up the algorithm. Setting ContourSubSample to
# 1 will cause no reduction.
# The default value is 4. If ContourSubSample is set to a value less than
# or equal to 0 then it will go back to 4. If it is set to a value large
# enough to make the subsampled grid have an x or y dimension less than 5
# then it will be reduced so that the minimum dimension for x or y will be
# 5.
ContourSubSample = 4
# Specifies whether the selection time range will track the spatial
# editor time when time stepping using the toolbar buttons or keyboard.
SelectGridsWhenStepping = no
# Default Time Scale Periods that are shown on the time scale. These
# are names of the selection time ranges (SELECTTR).
TimeScalePeriods = ['Today', 'Tonight', 'Tomorrow', 'Tomorrow Night',
'Day 3', 'Day 4', 'Day 5', 'Day 6', 'Day 7']
# Contour Tool drawing color. Defaults to "White"
#ContourToolDrawing_color = "White"
# Move/Copy, Pencil, SelectPoints drawing color. Defaults to "White"
#Drawing_color = "White"
#------------------------------------------------------------------------
# PNG Graphic Product Generation (ifpIMAGE program)
#------------------------------------------------------------------------
# Defines what kind of files ifpIMAGE will produce. The default is
# png. But you may also choose from the following list. Note that
# these are case sensitive and only png, svg, and gif have been really
# tested. [ 'png', 'pnm', 'gif', 'svg', 'ai', 'ps',
# 'cgm', 'fig', 'pcl', 'hpgl', 'regis',
# 'tek', 'meta' ]
#
#Png_fileType = 'ps'
# Legends display mode - 0 for UTC, 1 for local time
# Do not include a decimal point after the number.
#Png_localTime = 1 # legend displays time in local or UTC (default to UTC)
# You can set the height and width (in pixels) for the Png images.
# It is only necessary to set one of these, as the other will
# be calculated using the aspect ratio of your office domain.
# Do not include decimal points after the numbers.
# Both default to 400
#Png_height = 400
#Png_width = 400
# Name of the weather element which will be displayed as
# an image in the png. If nothing is specified here, then all weather
# elements will be displayed as a graphic. Topo may also be added
# using the string "Topo"
#Png_image = 'T'
# Indicates that a snapshot time should be displayed instead of the valid time
# of the grid.
Png_snapshotTime = 0 # ifpIMAGE only
# Default format of the snapshot time if the Png_snapshotTime = 1
#Png_legendFormat_Zulu_snapshot = "%b%d%H%MZ"
# Default format of the snapshot itme if the Png_snapshotTime = 1 and
# Png_localTime = 1
#Png_legendFormat_LT_snapshot = "%d %b %I:%M %p %Z"
# Indicate if the Png image displayed should be smoothed (1 = smoothing
# enabled, 0 = smoothing disabled). Note that smoothing will only apply
# to scalar and vector images.
Png_smoothImage = 0 # ifpIMAGE only
# Alternate way of specifying the weather elements to be displayed.
# If this entry is specified, then the DefaultGroup is ignored (for
# ifpIMAGE). Format is a list of weather elements in a pseudo weather
# element bundle formats, which consist
# of "parmName_level:optType_modelName seq", where the
# seq is normally -1 for singleton databases, 0 for model databases for
# the most recent version, 1 for the prev. version of a model database.
# If you wish, you may add Topo to this list. For it just use
# the string "Topo" (none of the other nonsense is needed).
#Png_parms = ['FzLevel_SFC:_Fcst -1', 'Sky_SFC:_Fcst -1', 'QPF_SFC:_Fcst -1']
# Ability to turn on/off legends for the graphic generation. Applies
# only to graphic product generation and not GFE. Defaults to on
# if not specified. Do not include a decimal point after the number.
#Png_legend = 1 #1 for visible, 0 for invisible
# Legend weather element name mode - SHORT for weather element name,
# LONG for weather element descriptive name, ALT for alternate,
# OFF for no name
#Png_descriptiveWeName = 'SHORT'
# Alternate weather element name. Png_descriptiveWeName must be set to ALT.
# These entries define the weather element name to be displayed based
# on the weather element name (e.g., T). The string
# format is Png_wxelem_AltName. For example, Png_MaxT_AltName = "Highs" will
# display "Highs" for the wx element name rather than MaxT or
# Maximum Temperature. If not defined and ALT is set, then the weather
# element name will be the 'SHORT' name.
#Png_MaxT_AltName = "Highs"
# Legend format for Pngs. See strftime(3) for time string formats
# or ifpIMAGE documentation. If the duration, start time, or ending
# time is not desired, then the entry should be set to "". There are
# separate entries for Zulu and LocalTime. The duration formats
# can use the %H (hours) %M (minutes) formats.
Png_legendFormat_Zulu_dur = "" # ifpIMAGE only
Png_legendFormat_Zulu_start = "%b %d %H%MZ to " # ifpIMAGE only
Png_legendFormat_Zulu_end = "%b %d %H%MZ" # ifpIMAGE only
Png_legendFormat_LT_dur = "" # ifpIMAGE only
Png_legendFormat_LT_start = "%b %d %I:%M %p %Z to " # ifpIMAGE only
Png_legendFormat_LT_end = "%b %d %I:%M %p %Z" # ifpIMAGE only
# Png filename prefix
# Specifies the prefix to be applied to all generated png imagery
#Png_filenamePrefix = 'desiredPrefix'
# Png filename format
# Specifies the format to be used for the date/time string in the
# generated png imagery. See strftime(3) for time string formats
# or the ifpIMAGE documentation. Default is yyyymmdd_hhmm
#Png_baseTimeFormat = '%Y%m%d_%H%M'
#By default, png images are generated for each and every possible change
#in the generated grids. For example, if you are generating a grid for T
#and WaveHeight, and the T is a one hour grid and the WaveHeight a 6 hour
#grid, that starts at the same time (e.g., 12z), two different images will
#be generated. The first will have T and WaveHeight together and will be
#time stamped to 12z; the second will just have WaveHeight and will be time
#stamped to 13z. This is identical behavior to running the GFE with
#multiple visible weather elements.
#You can override this behavior for the creation of the Png imagery by
#specifying an interval for which to generate imagery. The interval is
#specified in hours. Setting the value to 6 will generate grids at 00z,
#06z,12z and 18z, assuming there is data available to generate the imagery.
#The configuration line to set the generation to every 6 hours is:
#Png_interval = 6
#Png imagery intervals can be offset by the amount set in the
#Png_intervalOffset option. If the Png_intervalOffset is 1 and Png_interval =6,
#(specified in hours) grids will be generated at 01z, 07z, 13z, etc.,
#assuming there is data available to generate the imagery. Png_intervalOffset
#is 0.
#Png_intervalOffset = 0
# If using fit to data for ifpIMAGE, and the option "All Grids over Area",
# or "Single Grid over Area" is enabled, then the ifpIMAGE program needs to
# know the name of the edit area.
#Png_fitToDataArea = "BOU"
# Add a "logo bar" to the bottom of each image. If this flag is set to 1,
# then a bar containing the NOAA and NWS logos will be inserted at the bottom
# of the image.
#Png_logo = 0
# If Png_logo is enabled, then this can be set to a string you would
# like to have in the "logo bar". The string will be centered in the bar.
#Png_logoString = ""
# If an alternate legend language is desired, then enter that here.
# Acceptable values those defined in the locale command (part of Unix).
# Checked values are "spanish" and "french".
#Png_legendLanguage = "spanish"
# If set to 1, then the colorbar will not be rendered for images.
#Png_omitColorBar = 0
# Disables Automatic Zooming feature when ifpIMAGE clipping is enabled.
# Default is that ifpIMAGE will automatically zoom. Set to yes or 1 to
# disable automatic zooming.
#Png_wholeDomain = 0
# Enables the creation of the PNG *.info files. Set to yes or 1 to enable
# the creation. Set to no or 0 to disable the creation.
#Png_infoFiles = 1
# Enables the special masking for ifpIMAGE to use the ISC grid data history
# information. This is used when creating imagery with ISC data. Areas
# not containing current ISC data will be blanked out. 0 for off, 1 for on.
# This entry overrides the other masking.
#Png_historyMask = 0
#------------------------------------------------------------------------
# INTERSITE COORDINATION
#------------------------------------------------------------------------
# Moved to serverConfig/localConfig for OB8.3
#------------------------------------------------------------------------
# ZONE COMBINER CONFIGURATION
#------------------------------------------------------------------------
# Specifies the height and width of the zone combiner. It can be resized
# larger, but not smaller in the GFE. Defaults are 400 pixels
#ZoneCombiner_height = 400
#ZoneCombiner_width = 400
# Specifies the zone combiner colors for the background,
# and the no zone color, which is used when a zone is not included
# in any combination.
#ZoneCombiner_backgroundColor = 'gray40'
#ZoneCombiner_noZoneColor = 'black'
# If set true, then these options will be set when the zone combiner
# starts for each product.
#ZoneCombiner_LabelZones = False
#ZoneCombiner_LabelGroups = True
#------------------------------------------------------------------------
# PRODUCT GENERATION SCRIPTS
#------------------------------------------------------------------------
# Product Generation Scripts appear under the product generation menu
# on the GFE.
Scripts = [
"Ascii Grids...: " +
"ifpAG -h {host} -r {port} -o {prddir}/AG/{ztime}.ag " +\
"-d {productDB} ",
"Make and Send HTI:" +
"xterm -e ssh px2f /awips2/GFESuite/hti/bin/make_hti.sh {site}",
"Official Grids to LDAD: " +
"ifpAG -h {host} -r {port} -o - -d {productDB} | gzip -9 > " +
" /data/fxa/LDAD/ifp/Official/.incoming; " +
"mv /data/fxa/LDAD/ifp/Official/.incoming /data/fxa/LDAD/ifp/Official/{ztime} &"
"Png Images...:" +
"ifpIMAGE " +\
"-h {host} -c {entry:ConfigFile:imageTest1} -o {prddir}/IMAGE",
"Send Grids to NDFD..:" +
"sendGridsToNDFD.sh {site} &",
"Send Point and Click Grids to Consolidated Web Farm..:" +
"/awips2/GFESuite/bin/rsyncGridsToCWF_client.sh {site} &",
]
## Note: Please define TextProducts through
## the DefineTextProducts dialog (Product Generation Menu)
## within the GFE.
# Ordering Product Generation
# NOTE: 'ProductList' is not supported in AWIPS 2.
# Products will be listed in the order they appear in the list of Scripts above.
#------------------------------------------------------------------------
# Product Generation Script Notes
#
# Each script entry is a text string of the form:
# "<Entry Name>: " +
# "<command line script> "
#
# where:
# <Entry Name> will appear in the Product Generation menu
# <command line script> is the command line that will be submitted when
# the script is chosen.
#
# The following variables can be used in scripts and the GFE will fill
# in the appropriate information before executing the script:
#
# {host} -- server hostname
# {port} -- server port
# {site} -- site identifier
# {productDB} -- product database -- this is the
# Official Database if it exists.
# Otherwise, it's the Mutable (Fcst) database.
# {SEstart} -- Start of Spatial Editor time:
# format of all times: YYYYMMDD_HHMM
# {SEend} -- Spatial Editor time plus one second
# {SelectedStart} -- Start of Selected Time range
# {SelectedEnd} -- End of Selected Time range
# {time} -- Current local time in format: YYYYMMDD_HHMM
# {ztime} -- Current Zulu time in format: YYYYMMDD_HHMM
# {module:<module name>} -- The correct path of the module will
# be substituted in the command line.
# The module must have a .py extension.
# {home} -- Substitutes the home GFESuite directory
# at runtime (may differ from local to server)
# {prddir} -- Substitutes the product directory
# at runtime (may differ from local to server)
#
# Note that the directory {} values should be used, rather than hard-coding
# them, if you want to be able to run a process locally as well as remotely.
#
# If the following variables are used in a script,
# a dialog will appear for the user to make selections from a simple GUI
# before the script is executed:
# {parmsMutable} (Those listed in Forecast database)
# {refsets}
# {maps}
# {databases}
# {output file}
# {output directory}
# {startTime}
# {endTime}
##
# Named Variable
# To have the user prompted for a named variable, use the following
# in your script:
# {entry: <name of variable>: <default value>}
# For example, to have the user prompted for "width" use:
# {entry: width: 350}
# in your script.
#
# Radio Button list of values
# To have the user prompted for a list of radiobutton variables, use
# {entryButtons: <name of variable>: <list of values separated by commas>}
# E.g.
# {entryButtons: ReportType: GeneralImages,CustomizedImages}
#
# Check Button list of values
# To have the user prompted for a list of radiobutton variables, use
# {entryChecks: <name of variable>: <list of values separated by commas>}
# E.g.
# {entryChecks: EditAreas: Area1,Area2,Area3}
# Edit Areas and Groups
# If the name of the entryButtons or entryChecks is "EditAreas",
# the system will accept edit area OR edit area group names.
# The system will check for groups and will automatically expand
# them to the appropriate areas
# {entryChecks: EditAreas: Group1,Group2,Area3,Area4}
# {entryButtons: EditAreas: Group1,Group2}
# Scripts with Multiple Command Lines
# To string multiple command lines together, use the following format for
# your command line script:
# "csh -c (<command line 1>; <command line 2>; <command line 3>)"
#------------------------------------------------------------------------
| gfesuite_home = '/awips2/GFESuite'
gfesuite_prddir = '/tmp/products'
yes = True
no = False
mutable_model = '_Fcst'
db_types = ['', 'D2D', 'V']
default_group = 'Public'
all_edit_actions_on_pop_up = yes
pop_up_edit_actions = ['Assign_Value', 'AdjustValue_Down', 'AdjustValue_Up', 'Smooth']
edit_area_groups = ['Misc']
grid_manager_sort_order = ['T', 'Td', 'RH', 'MaxT', 'MinT', 'MaxRH', 'MinRH', 'WindChill', 'HeatIndex', 'Wind', 'WindGust', 'FreeWind', 'TransWind', 'Sky', 'Wx', 'LAL', 'PoP', 'CWR', 'QPF', 'SnowAmt', 'StormTotalSnow', 'SnowLevel', 'MaxTAloft', 'WetBulb', 'Hazards', 'FzLevel', 'Haines', 'MixHgt']
auto_save_interval = 0
publish_times = ['Today', 'Tonight', 'Tomorrow', 'Tomorrow Night', 'Day 3', 'Day 4', 'Day 5', 'Day 6', 'Day 7', 'Hour 0-240']
map_backgrounds_default = ['States', 'CWA']
text_font0 = 'DejaVu Sans Mono-regular-9'
text_font1 = 'DejaVu Sans Mono-regular-9'
text_font2 = 'DejaVu Sans Mono-bold-12'
text_font3 = 'DejaVu Sans Mono-bold-14'
text_font4 = 'DejaVu Sans Mono-bold-20'
bg_color = 'black'
system_time_range_before_current_time = 48
system_time_range_after_current_time = 168
selected_color = 'LightSkyBlue'
selected_fill_pattern = 'TRANS_25PC_45DEG'
time_scale_lines_color = 'Blue'
time_scale_lines_pattern = 'DOTTED'
editor_time_line_color = 'Yellow'
editor_time_line_width = 2
editor_time_line_pattern = 'DOTTED'
current_system_time_color = 'Green'
locked_by_me_color = 'forestgreen'
locked_by_me_pattern = 'WHOLE'
locked_by_other_color = 'tomato2'
locked_by_other_pattern = 'WHOLE'
time_block_visible_color = 'White'
time_block_active_color = 'Yellow'
time_block_invisible_color = 'Gray50'
time_block_preview_color = 'Cyan'
reference_set_color = 'Gray80'
reference_set_width = 0
time_scale_horiz_size = 350
legend_mode = 'GRIDS'
initial_gm_display_mode = 'Normal'
max_menu_items_before_cascade = 30
office_domain_expand_left = 10
office_domain_expand_right = 10
office_domain_expand_top = 10
office_domain_expand_bottom = 10
temporal_editor_we_mode = 'VISIBLE'
product_output_dialog_wrap_pils = []
product_output_dialog_non_wrap_pils = ['AFM', 'PFM', 'FWF', 'SFT', 'WCN', 'FWS', 'TCV', 'HLS']
show_isc_update_time = yes
show_isc_site_id = yes
show_isc_markers = yes
show_isc_update_time_marker = yes
show_isc_site_id_marker = yes
show_isc_official_symbol_marker = yes
show_isc_official_symbol = yes
modified_minutes = [60, 180, 360, 720, 1440, 2880]
modified_colors = ['#0bc71e', '#60c7b8', '#417fc7', '#e17c10', '#ebdf00', '#e11a00']
saved_minutes = [60, 180, 360, 720, 1440, 2880]
saved_colors = ['#0bc71e', '#60c7b8', '#417fc7', '#e17c10', '#ebdf00', '#e11a00']
published_minutes = [60, 180, 360, 720, 1440, 2880]
published_colors = ['#0bc71e', '#60c7b8', '#417fc7', '#e17c10', '#ebdf00', '#e11a00']
sent_minutes = [60, 120, 180, 240, 300, 360]
sent_colors = ['#0bc71e', '#60c7b8', '#417fc7', '#e17c10', '#ebdf00', '#e11a00']
history_user_mod_text__me = 'm'
history_user_mod_text__other = 'o'
history_user_mod_pattern__me = 'TRANS_25PC_45DEG'
history_user_mod_pattern__other = 'TRANS_25PC_135DEG'
history_origin_text__populated = 'P'
history_origin_text__calculated = 'C'
history_origin_text__scratch = 'S'
history_origin_text__interpolated = 'I'
history_origin_text__other = '?'
history_origin_color__populated = 'wheat'
history_origin_color__calculated = 'red'
history_origin_color__scratch = 'magenta'
history_origin_color__interpolated = 'blue'
history_origin_color__other = 'gray75'
history_model_color_gfs_lr = '#30df10'
history_model_color_rap40 = '#00ffff'
history_model_color_mavmos = '#e6c8a1'
history_model_color_gfsmos = '#e6d8a1'
history_model_color_metmos = '#e6b8a1'
history_model_color_mexmos = '#e6a8a1'
history_model_color_nam80 = '#ffff52'
history_model_color_nam95 = '#ffff52'
history_model_color_nam40 = '#ff99ff'
history_model_color_nam12 = '#ffcaa0'
history_model_color_gfs80 = 'pink'
history_model_color_gfs40 = 'pink'
history_model_color_gfs190 = 'pink'
history_model_color_gww = '#a0a0ff'
history_model_color_hpc_stn = '#d0d0a0'
history_model_color_hpc_grid = '#d0d0b0'
history_model_color_isc = '#b43aee'
history_model_color_laps = '#b06b72'
history_model_color_hpcqpf = '#3dc9ff'
history_model_color_hpc_guide = '#3dc9ff'
history_model_color_rfcqpf = '#3bffb7'
history_model_color__restore = '#e0a0ff'
history_model_color_dgex = 'orange'
history_model_color_mos_guide = '#e608ff'
history_model_color_opctafbe = '#a0a0cc'
history_model_color_opctafbsw = '#a0a0cc'
history_model_color_opctafbnw = '#a0a0cc'
history_model_color_rtma = '#a0522d'
history_model_color__nam_dng5 = '#808000'
history_model_text_gfs80 = 'GFS'
history_model_text_gfs40 = 'GFS'
history_model_text_gfs190 = 'GFS'
history_model_text_rap40 = 'RUC'
history_model_text_gfsmos = 'GFSMOS'
history_model_text_mexmos = 'MEXMOS'
history_model_text_mavmos = 'MAVMOS'
history_model_text_metmos = 'METMOS'
history_model_text_nam80 = 'N80'
history_model_text_nam95 = 'N95'
history_model_text_nam40 = 'N40'
history_model_text_nam20 = 'N20'
history_model_text_nam12 = 'N12'
history_model_text_gfs_lr = 'gfsLR'
history_model_text_hpc_stn = 'HPCs'
history_model_text_hpc_grid = 'HPCg'
history_model_text_gww = 'GWW'
history_model_text_isc = 'ISC'
history_model_text_laps = 'LAPS'
history_model_text_hpcqpf = 'HPCQPF'
history_model_text_hpc_guide = 'HPCGuide'
history_model_text_rfcqpf = 'RFCQPF'
history_model_text__restore = 'Restore'
history_model_text_dgex = 'DGEX'
history_model_text_mos_guide = 'GMOS'
history_model_text_opctafbe = 'OPC'
history_model_text_opctafbsw = 'OPC'
history_model_text_opctafbnw = 'OPC'
history_model_text_rtma = 'RTMA'
history_model_text__nam_dng5 = 'Nd5'
significant_weather_time_weight_average_percent = 40
pencil_tool_influence_list = [1, 2, 4, 8, 12, 16]
smooth_size = 3
smooth_size_list = [3, 5, 7, 9]
set_value_zoom = 4
qpf__set_value_zoom = 10
generic_colors = ['#00ff00', '#ff8e59', '#00ffff', '#e6c8a1', '#ffff52', '#ff99ff', '#aeb370', '#ff4000', '#e6c8a1']
wx_graphic_color = '#ffffff'
qpf_contour_values = [0.01, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.2, 1.4, 1.6, 1.8, 2.0, 2.2, 2.4, 2.6, 2.8, 3.0, 3.2, 3.4, 3.6, 3.8, 4.0, 4.2, 4.4, 4.6, 4.8, 5.0]
topography_contour_values = [5.0, 10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0, 125.0, 150.0, 175.0, 200.0, 250.0, 300.0, 350.0, 400.0, 450.0, 500.0, 600.0, 700.0, 800.0, 900.0, 1000.0, 1250.0, 1500.0, 1750.0, 2000.0, 2500.0, 3000.0, 3500.0, 4000.0, 4500.0, 5000.0, 5500.0, 6000.0, 6500.0, 7000.0, 7500.0, 8000.0, 8500.0, 9000.0, 9500.0, 10000.0, 11000.0, 12000.0, 13000.0, 14000.0, 15000.0, 16000.0, 17000.0, 18000.0, 19000.0, 20000.0]
sky_contour_interval = 10.0
po_p_contour_interval = 10.0
min_t_contour_interval = 5.0
max_t_contour_interval = 5.0
t_contour_interval = 5.0
td_contour_interval = 5.0
fz_level_delta_value = 100.0
snow_level_delta_value = 100.0
wx_spatial_image_type = ['Image', 'BoundedArea']
headlines_spatial_image_type = ['Image', 'BoundedArea']
swell_spatial_image_type = ['Image', 'WindArrow']
swell2_spatial_image_type = ['Image', 'WindArrow']
swell_spatial_graphic_type = ['WindArrow']
swell2_spatial_graphic_type = ['WindArrow']
wind_arrow_default_size = 60
wind_barb_default_size = 60
wind_arrow_scaling = 0.03
swell_arrow_scaling = 0.001
swell2_arrow_scaling = 0.001
wind_format = 'ddff'
wx_common_values = ['<NoCov>:<NoWx>:<NoInten>:<NoVis>:<NoAttr>', 'Wide:R:-:<NoVis>:<NoAttr>', 'Wide:S:--:<NoVis>:<NoAttr>', 'Wide:R:-:<NoVis>:<NoAttr>^Wide:S:-:<NoVis>:<NoAttr>', 'Sct:RW:-:<NoVis>:<NoAttr>', 'Sct:SW:-:<NoVis>:<NoAttr>', 'Sct:T:<NoInten>:<NoVis>:<NoAttr>^Sct:RW:-:<NoVis>:<NoAttr>', 'Patchy:F:<NoInten>:<NoVis>:<NoAttr>']
hazards_common_values = ['Watches|Fire Weather|FW.A', 'Watches|Hydrology|FF.A', 'Watches|Hydrology|FA.A', 'Watches|Coastal Flooding|CF.A', 'Watches|Coastal Flooding|LS.A', 'Watches|Marine|GL.A', 'Watches|Marine|HF.A', 'Watches|Marine|SE.A', 'Watches|Marine|SR.A', 'Watches|Marine|UP.A', 'Watches|Non-Precipitation|EH.A', 'Watches|Non-Precipitation|FZ.A', 'Watches|Non-Precipitation|HW.A', 'Watches|Non-Precipitation|HZ.A', 'Watches|Non-Precipitation|EC.A', 'Watches|Winter Storm|WC.A', 'Watches|Winter Storm|WS.A', 'Warnings|Fire Weather|FW.W', 'Warnings|Coastal Flooding|CF.W', 'Warnings|Coastal Flooding|LS.W', 'Warnings|Coastal Flooding|SU.W', 'Warnings|Marine|MH.W', 'Warnings|Marine|HF.W', 'Warnings|Marine|GL.W', 'Warnings|Marine|UP.W', 'Warnings|Marine|SR.W', 'Warnings|Marine|SE.W', 'Warnings|Non-Precipitation|AF.W', 'Warnings|Non-Precipitation|DU.W', 'Warnings|Non-Precipitation|EH.W', 'Warnings|Non-Precipitation|FZ.W', 'Warnings|Non-Precipitation|HW.W', 'Warnings|Non-Precipitation|HZ.W', 'Warnings|Non-Precipitation|EC.W', 'Warnings|Winter Storm|BZ.W', 'Warnings|Winter Storm|IS.W', 'Warnings|Winter Storm|LE.W', 'Warnings|Winter Storm|WC.W', 'Warnings|Winter Storm|WS.W', 'Advisories|Marine|UP.Y', 'Advisories|Marine|LO.Y', 'Advisories|Marine|SC.Y', 'Advisories|Marine|SW.Y', 'Advisories|Marine|BW.Y', 'Advisories|Marine|RB.Y', 'Advisories|Marine|SI.Y', 'Advisories|Marine|MF.Y', 'Advisories|Marine|MS.Y', 'Advisories|Marine|MH.Y', 'Advisories|Coastal Flooding|CF.Y', 'Advisories|Coastal Flooding|LS.Y', 'Advisories|Coastal Flooding|SU.Y', 'Advisories|Non-Precipitation|AS.O', 'Advisories|Non-Precipitation|AS.Y', 'Advisories|Non-Precipitation|AQ.Y', 'Advisories|Non-Precipitation|DU.Y', 'Advisories|Non-Precipitation|FG.Y', 'Advisories|Non-Precipitation|SM.Y', 'Advisories|Non-Precipitation|ZF.Y', 'Advisories|Non-Precipitation|FR.Y', 'Advisories|Non-Precipitation|HT.Y', 'Advisories|Non-Precipitation|LW.Y', 'Advisories|Non-Precipitation|AF.Y', 'Advisories|Non-Precipitation|WI.Y', 'Advisories|Winter Weather|WC.Y', 'Advisories|Winter Weather|WW.Y', 'Statements|Coastal Flooding|CF.S', 'Statements|Coastal Flooding|LS.S', 'Statements|Coastal Flooding|RP.S', 'Statements|Marine|MA.S']
r_default_coverage = 'Wide'
rw_default_coverage = 'Sct'
s_default_coverage = 'Wide'
sw_default_coverage = 'Sct'
t_default_coverage = 'Sct'
r_default_intensity = '-'
rw_default_intensity = '-'
s_default_intensity = '-'
sw_default_intensity = '-'
l_default_intensity = '-'
zr_default_intensity = '-'
zl_default_intensity = '-'
ip_default_intensity = '-'
discrete_overlap_patterns = ['TRANS_25PC_45DEG', 'TRANS_25PC_135DEG', 'CROSS']
default_color_table_left_wavelength = 380.0
default_color_table_right_wavelength = 650.0
default_color_table_num_colors = 150
rip_prob_default_color_table = 'GFE/RipProb'
erosion_prob_default_color_table = 'GFE/RunupProbs'
overwash_prob_default_color_table = 'GFE/RunupProbs'
t_default_color_table = 'GFE/Mid Range Enhanced'
td_default_color_table = 'GFE/Mid Range Enhanced'
max_t_default_color_table = 'GFE/Mid Range Enhanced'
min_t_default_color_table = 'GFE/Mid Range Enhanced'
sky_default_color_table = 'GFE/Cloud'
wind_default_color_table = 'GFE/Low Range Enhanced'
wind20ft_default_color_table = 'GFE/Low Range Enhanced'
po_p_default_color_table = 'GFE/ndfdPoP12'
qpf_default_color_table = 'GFE/Gridded Data'
ttrend_default_color_table = 'GFE/Discrepancy'
r_htrend_default_color_table = 'GFE/Discrepancy'
wetflag_default_color_table = 'GFE/YesNo'
delta_min_t_default_color_table = 'GFE/Discrepancy'
delta_max_t_default_color_table = 'GFE/Discrepancy'
delta_wind_default_color_table = 'GFE/Discrepancy'
delta_sky_default_color_table = 'GFE/Discrepancy'
delta_po_p_default_color_table = 'GFE/Discrepancy'
visible_east_default_color_table = 'Sat/VIS/ZA (Vis Default)'
ir11_east_default_color_table = 'Sat/IR/CIRA (IR Default)'
ir13_east_default_color_table = 'Sat/IR/CIRA (IR Default)'
ir39_east_default_color_table = 'Sat/IR/CIRA (IR Default)'
water_vapor_east_default_color_table = 'Sat/WV/Gray Scale Water Vapor'
visible_central_default_color_table = 'Sat/VIS/ZA (Vis Default)'
ir11_central_default_color_table = 'Sat/IR/CIRA (IR Default)'
ir13_central_default_color_table = 'Sat/IR/CIRA (IR Default)'
ir39_central_default_color_table = 'Sat/IR/CIRA (IR Default)'
water_vapor_central_default_color_table = 'Sat/WV/Gray Scale Water Vapor'
visible_west_default_color_table = 'Sat/VIS/ZA (Vis Default)'
ir11_west_default_color_table = 'Sat/IR/CIRA (IR Default)'
ir13_west_default_color_table = 'Sat/IR/CIRA (IR Default)'
ir39_west_default_color_table = 'Sat/IR/CIRA (IR Default)'
water_vapor_west_default_color_table = 'Sat/WV/Gray Scale Water Vapor'
visible_e_default_color_table = 'Sat/VIS/ZA (Vis Default)'
ir11_e_default_color_table = 'Sat/IR/CIRA (IR Default)'
ir13_e_default_color_table = 'Sat/IR/CIRA (IR Default)'
ir39_e_default_color_table = 'Sat/IR/CIRA (IR Default)'
water_vapor_e_default_color_table = 'Sat/WV/Gray Scale Water Vapor'
fog_e_default_color_table = 'Sat/WV/Gray Scale Water Vapor'
visible_c_default_color_table = 'Sat/VIS/ZA (Vis Default)'
ir11_c_default_color_table = 'Sat/IR/CIRA (IR Default)'
ir13_c_default_color_table = 'Sat/IR/CIRA (IR Default)'
ir39_c_default_color_table = 'Sat/IR/CIRA (IR Default)'
water_vapor_c_default_color_table = 'Sat/WV/Gray Scale Water Vapor'
fog_c_default_color_table = 'Sat/WV/Gray Scale Water Vapor'
visible_w_default_color_table = 'Sat/VIS/ZA (Vis Default)'
ir11_w_default_color_table = 'Sat/IR/CIRA (IR Default)'
ir13_w_default_color_table = 'Sat/IR/CIRA (IR Default)'
ir39_w_default_color_table = 'Sat/IR/CIRA (IR Default)'
water_vapor_w_default_color_table = 'Sat/WV/Gray Scale Water Vapor'
fog_w_default_color_table = 'Sat/WV/Gray Scale Water Vapor'
hazards_default_color_table = 'GFE/Hazards'
proposed_ss_default_color_table = 'GFE/w'
proposed_s_snc_default_color_table = 'GFE/w'
collab_diff_ss_default_color_table = 'GFE/diffSS'
inundation_max_default_color_table = 'GFE/Inundation'
inundation_max_max_color_table_value = 30.0
inundation_max_min_color_table_value = 0.0
inundation_maxnc_default_color_table = 'GFE/Inundation'
inundation_maxnc_max_color_table_value = 30.0
inundation_maxnc_min_color_table_value = 0.0
inundation_timing_default_color_table = 'GFE/Inundation'
inundation_timing_max_color_table_value = 30.0
inundation_timing_min_color_table_value = 0.0
inundation_timingnc_default_color_table = 'GFE/Inundation'
inundation_timingnc_max_color_table_value = 30.0
inundation_timingnc_min_color_table_value = 0.0
surge_ht_plus_tide_mllw_default_color_table = 'GFE/Inundation'
surge_ht_plus_tide_mllw_max_color_table_value = 30.0
surge_ht_plus_tide_mllw_min_color_table_value = 0.0
surge_ht_plus_tide_mll_wnc_default_color_table = 'GFE/Inundation'
surge_ht_plus_tide_mll_wnc_max_color_table_value = 30.0
surge_ht_plus_tide_mll_wnc_min_color_table_value = 0.0
surge_ht_plus_tide_mhhw_default_color_table = 'GFE/Inundation'
surge_ht_plus_tide_mhhw_max_color_table_value = 30.0
surge_ht_plus_tide_mhhw_min_color_table_value = 0.0
surge_ht_plus_tide_mhh_wnc_default_color_table = 'GFE/Inundation'
surge_ht_plus_tide_mhh_wnc_max_color_table_value = 30.0
surge_ht_plus_tide_mhh_wnc_min_color_table_value = 0.0
surge_ht_plus_tide_navd_default_color_table = 'GFE/Inundation'
surge_ht_plus_tide_navd_max_color_table_value = 30.0
surge_ht_plus_tide_navd_min_color_table_value = 0.0
surge_ht_plus_tide_nav_dnc_default_color_table = 'GFE/Inundation'
surge_ht_plus_tide_nav_dnc_max_color_table_value = 30.0
surge_ht_plus_tide_nav_dnc_min_color_table_value = 0.0
surge_ht_plus_tide_msl_default_color_table = 'GFE/Inundation'
surge_ht_plus_tide_msl_max_color_table_value = 30.0
surge_ht_plus_tide_msl_min_color_table_value = 0.0
surge_ht_plus_tide_ms_lnc_default_color_table = 'GFE/Inundation'
surge_ht_plus_tide_ms_lnc_max_color_table_value = 30.0
surge_ht_plus_tide_ms_lnc_min_color_table_value = 0.0
prob34_default_color_table = 'GFE/TPCprob'
prob64_default_color_table = 'GFE/TPCprob'
pws_d34_default_color_table = 'GFE/TPCprob'
pws_d64_default_color_table = 'GFE/TPCprob'
pws_n34_default_color_table = 'GFE/TPCprob'
pws_n64_default_color_table = 'GFE/TPCprob'
pws34int_default_color_table = 'GFE/TPCprob'
pws64int_default_color_table = 'GFE/TPCprob'
flooding_rain_threat_default_color_table = 'GFE/gHLS_new'
storm_surge_threat_default_color_table = 'GFE/gHLS_new'
tornado_threat_default_color_table = 'GFE/gHLS_new'
wind_threat_default_color_table = 'GFE/gHLS_new'
max_t_aloft_default_color_table = 'WarmNoseTemp'
wet_bulb_default_color_table = 'WetBulbTemp'
qpf__log_factor = 0.03
snow_amt__log_factor = 0.6
wet_bulb_max_color_table_value = 50.0
wet_bulb_min_color_table_value = 20.0
weather_coverage_names = ['Iso', 'Sct', 'Num', 'Wide', 'Ocnl', 'SChc', 'Chc', 'Lkly', 'Def', 'Patchy', '<NoCov>', 'Areas', 'Frq', 'Brf', 'Pds', 'Inter']
weather_coverage_fill_patterns = ['WIDE_SCATTERED', 'SCATTERED', 'LKLY', 'WIDE', 'OCNL', 'WIDE_SCATTERED', 'SCATTERED', 'LKLY', 'WIDE', 'CURVE', 'WHOLE', 'DUALCURVE', 'OCNL', 'OCNL', 'OCNL', 'OCNL']
weather_type_names = ['<NoWx>', 'T', 'R', 'RW', 'L', 'ZR', 'ZL', 'S', 'SW', 'IP', 'F', 'H', 'BS', 'K', 'BD', 'SA', 'LC', 'FR', 'AT', 'TRW']
weather_type_colors = ['Gray40', 'red3', 'ForestGreen', 'ForestGreen', 'CadetBlue1', 'darkorange1', 'goldenrod1', 'Grey65', 'Grey65', 'plum1', 'khaki4', 'Gray75', 'snow', 'grey30', 'Brown', 'blue1', 'coral1', 'pale turquoise', 'DeepPink', 'red3']
weather_type_inten_names = ['T+', 'Rm', 'R+', 'RWm', 'RW+']
weather_type_inten_colors = ['red1', 'green', 'green', 'green', 'green']
weather_generic_colors = ['Coral', 'CadetBlue2', 'Aquamarine', 'DarkKhaki', 'DodgerBlue', 'IndianRed1', 'PaleGreen', 'MistyRose', 'chartreuse3', 'PapayaWhip']
image_on_active_se = yes
time_scale_lines = yes
editor_time_lines = yes
split_boundary_display = yes
temporal_editor_overlay = yes
temporal_editor_absolute_edit_mode = no
temporal_editor_statistics_mode = 'ABSOLUTE'
temporal_editor_statistics_mode_moderated_min = 15
temporal_editor_statistics_mode_moderated_max = 15
temporal_editor_statistics_mode_standard_deviation_min = 1.0
temporal_editor_statistics_mode_standard_deviation_max = 1.0
wind_edit_mode = 'BOTH'
weather_discrete_combine_mode = no
missing_data_mode = 'Stop'
show_time_range_warning = yes
show_empty_edit_area_warning = yes
contour_server = 'Contour Analyzer'
contour_sub_sample = 4
select_grids_when_stepping = no
time_scale_periods = ['Today', 'Tonight', 'Tomorrow', 'Tomorrow Night', 'Day 3', 'Day 4', 'Day 5', 'Day 6', 'Day 7']
png_snapshot_time = 0
png_smooth_image = 0
png_legend_format__zulu_dur = ''
png_legend_format__zulu_start = '%b %d %H%MZ to '
png_legend_format__zulu_end = '%b %d %H%MZ'
png_legend_format_lt_dur = ''
png_legend_format_lt_start = '%b %d %I:%M %p %Z to '
png_legend_format_lt_end = '%b %d %I:%M %p %Z'
scripts = ['Ascii Grids...: ' + 'ifpAG -h {host} -r {port} -o {prddir}/AG/{ztime}.ag ' + '-d {productDB} ', 'Make and Send HTI:' + 'xterm -e ssh px2f /awips2/GFESuite/hti/bin/make_hti.sh {site}', 'Official Grids to LDAD: ' + 'ifpAG -h {host} -r {port} -o - -d {productDB} | gzip -9 > ' + ' /data/fxa/LDAD/ifp/Official/.incoming; ' + 'mv /data/fxa/LDAD/ifp/Official/.incoming /data/fxa/LDAD/ifp/Official/{ztime} &Png Images...:' + 'ifpIMAGE ' + '-h {host} -c {entry:ConfigFile:imageTest1} -o {prddir}/IMAGE', 'Send Grids to NDFD..:' + 'sendGridsToNDFD.sh {site} &', 'Send Point and Click Grids to Consolidated Web Farm..:' + '/awips2/GFESuite/bin/rsyncGridsToCWF_client.sh {site} &'] |
class EvenIterator(object):
def __init__(self,collection):
self.iter = iter(collection[::2])
def __iter__(self):
return self
def __next__(self):
return next(self.iter)
if __name__=="__main__":
for i in EvenIterator([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]):
print(i)
| class Eveniterator(object):
def __init__(self, collection):
self.iter = iter(collection[::2])
def __iter__(self):
return self
def __next__(self):
return next(self.iter)
if __name__ == '__main__':
for i in even_iterator([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]):
print(i) |
#
# PySNMP MIB module CISCO-UNIFIED-COMPUTING-FSM-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-UNIFIED-COMPUTING-FSM-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:59:37 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
TimeIntervalSec, CiscoNetworkAddress, CiscoInetAddressMask, Unsigned64, CiscoAlarmSeverity = mibBuilder.importSymbols("CISCO-TC", "TimeIntervalSec", "CiscoNetworkAddress", "CiscoInetAddressMask", "Unsigned64", "CiscoAlarmSeverity")
CucsManagedObjectDn, ciscoUnifiedComputingMIBObjects, CucsManagedObjectId = mibBuilder.importSymbols("CISCO-UNIFIED-COMPUTING-MIB", "CucsManagedObjectDn", "ciscoUnifiedComputingMIBObjects", "CucsManagedObjectId")
CucsFsmFsmStageStatus, = mibBuilder.importSymbols("CISCO-UNIFIED-COMPUTING-TC-MIB", "CucsFsmFsmStageStatus")
InetAddressIPv4, InetAddressIPv6 = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressIPv4", "InetAddressIPv6")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
ModuleIdentity, IpAddress, Bits, Counter64, Unsigned32, Gauge32, iso, MibIdentifier, NotificationType, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32, Integer32, ObjectIdentity, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "IpAddress", "Bits", "Counter64", "Unsigned32", "Gauge32", "iso", "MibIdentifier", "NotificationType", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter32", "Integer32", "ObjectIdentity", "TimeTicks")
MacAddress, RowPointer, TruthValue, TimeInterval, DisplayString, TimeStamp, TextualConvention, DateAndTime = mibBuilder.importSymbols("SNMPv2-TC", "MacAddress", "RowPointer", "TruthValue", "TimeInterval", "DisplayString", "TimeStamp", "TextualConvention", "DateAndTime")
cucsFsmObjects = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63))
if mibBuilder.loadTexts: cucsFsmObjects.setLastUpdated('201601180000Z')
if mibBuilder.loadTexts: cucsFsmObjects.setOrganization('Cisco Systems Inc.')
cucsFsmStatusTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1), )
if mibBuilder.loadTexts: cucsFsmStatusTable.setStatus('current')
cucsFsmStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-FSM-MIB", "cucsFsmStatusInstanceId"))
if mibBuilder.loadTexts: cucsFsmStatusEntry.setStatus('current')
cucsFsmStatusInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsFsmStatusInstanceId.setStatus('current')
cucsFsmStatusDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsFsmStatusDn.setStatus('current')
cucsFsmStatusRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsFsmStatusRn.setStatus('current')
cucsFsmStatusConvertedEpRef = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsFsmStatusConvertedEpRef.setStatus('current')
cucsFsmStatusDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsFsmStatusDescr.setStatus('current')
cucsFsmStatusName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsFsmStatusName.setStatus('current')
cucsFsmStatusObjectClassName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsFsmStatusObjectClassName.setStatus('current')
cucsFsmStatusRemoteEpRef = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1, 8), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsFsmStatusRemoteEpRef.setStatus('current')
cucsFsmStatusState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1, 9), CucsFsmFsmStageStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsFsmStatusState.setStatus('current')
mibBuilder.exportSymbols("CISCO-UNIFIED-COMPUTING-FSM-MIB", cucsFsmStatusState=cucsFsmStatusState, cucsFsmStatusObjectClassName=cucsFsmStatusObjectClassName, cucsFsmObjects=cucsFsmObjects, cucsFsmStatusInstanceId=cucsFsmStatusInstanceId, cucsFsmStatusName=cucsFsmStatusName, cucsFsmStatusRn=cucsFsmStatusRn, cucsFsmStatusTable=cucsFsmStatusTable, cucsFsmStatusRemoteEpRef=cucsFsmStatusRemoteEpRef, cucsFsmStatusDn=cucsFsmStatusDn, cucsFsmStatusConvertedEpRef=cucsFsmStatusConvertedEpRef, cucsFsmStatusDescr=cucsFsmStatusDescr, PYSNMP_MODULE_ID=cucsFsmObjects, cucsFsmStatusEntry=cucsFsmStatusEntry)
| (octet_string, object_identifier, integer) = mibBuilder.importSymbols('ASN1', 'OctetString', 'ObjectIdentifier', 'Integer')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(single_value_constraint, value_size_constraint, value_range_constraint, constraints_intersection, constraints_union) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'SingleValueConstraint', 'ValueSizeConstraint', 'ValueRangeConstraint', 'ConstraintsIntersection', 'ConstraintsUnion')
(cisco_mgmt,) = mibBuilder.importSymbols('CISCO-SMI', 'ciscoMgmt')
(time_interval_sec, cisco_network_address, cisco_inet_address_mask, unsigned64, cisco_alarm_severity) = mibBuilder.importSymbols('CISCO-TC', 'TimeIntervalSec', 'CiscoNetworkAddress', 'CiscoInetAddressMask', 'Unsigned64', 'CiscoAlarmSeverity')
(cucs_managed_object_dn, cisco_unified_computing_mib_objects, cucs_managed_object_id) = mibBuilder.importSymbols('CISCO-UNIFIED-COMPUTING-MIB', 'CucsManagedObjectDn', 'ciscoUnifiedComputingMIBObjects', 'CucsManagedObjectId')
(cucs_fsm_fsm_stage_status,) = mibBuilder.importSymbols('CISCO-UNIFIED-COMPUTING-TC-MIB', 'CucsFsmFsmStageStatus')
(inet_address_i_pv4, inet_address_i_pv6) = mibBuilder.importSymbols('INET-ADDRESS-MIB', 'InetAddressIPv4', 'InetAddressIPv6')
(snmp_admin_string,) = mibBuilder.importSymbols('SNMP-FRAMEWORK-MIB', 'SnmpAdminString')
(module_compliance, notification_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ModuleCompliance', 'NotificationGroup')
(module_identity, ip_address, bits, counter64, unsigned32, gauge32, iso, mib_identifier, notification_type, mib_scalar, mib_table, mib_table_row, mib_table_column, counter32, integer32, object_identity, time_ticks) = mibBuilder.importSymbols('SNMPv2-SMI', 'ModuleIdentity', 'IpAddress', 'Bits', 'Counter64', 'Unsigned32', 'Gauge32', 'iso', 'MibIdentifier', 'NotificationType', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'Counter32', 'Integer32', 'ObjectIdentity', 'TimeTicks')
(mac_address, row_pointer, truth_value, time_interval, display_string, time_stamp, textual_convention, date_and_time) = mibBuilder.importSymbols('SNMPv2-TC', 'MacAddress', 'RowPointer', 'TruthValue', 'TimeInterval', 'DisplayString', 'TimeStamp', 'TextualConvention', 'DateAndTime')
cucs_fsm_objects = module_identity((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63))
if mibBuilder.loadTexts:
cucsFsmObjects.setLastUpdated('201601180000Z')
if mibBuilder.loadTexts:
cucsFsmObjects.setOrganization('Cisco Systems Inc.')
cucs_fsm_status_table = mib_table((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1))
if mibBuilder.loadTexts:
cucsFsmStatusTable.setStatus('current')
cucs_fsm_status_entry = mib_table_row((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1)).setIndexNames((0, 'CISCO-UNIFIED-COMPUTING-FSM-MIB', 'cucsFsmStatusInstanceId'))
if mibBuilder.loadTexts:
cucsFsmStatusEntry.setStatus('current')
cucs_fsm_status_instance_id = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1, 1), cucs_managed_object_id())
if mibBuilder.loadTexts:
cucsFsmStatusInstanceId.setStatus('current')
cucs_fsm_status_dn = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1, 2), cucs_managed_object_dn()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
cucsFsmStatusDn.setStatus('current')
cucs_fsm_status_rn = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1, 3), snmp_admin_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
cucsFsmStatusRn.setStatus('current')
cucs_fsm_status_converted_ep_ref = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1, 4), snmp_admin_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
cucsFsmStatusConvertedEpRef.setStatus('current')
cucs_fsm_status_descr = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1, 5), snmp_admin_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
cucsFsmStatusDescr.setStatus('current')
cucs_fsm_status_name = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1, 6), snmp_admin_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
cucsFsmStatusName.setStatus('current')
cucs_fsm_status_object_class_name = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1, 7), snmp_admin_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
cucsFsmStatusObjectClassName.setStatus('current')
cucs_fsm_status_remote_ep_ref = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1, 8), snmp_admin_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
cucsFsmStatusRemoteEpRef.setStatus('current')
cucs_fsm_status_state = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 63, 1, 1, 9), cucs_fsm_fsm_stage_status()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
cucsFsmStatusState.setStatus('current')
mibBuilder.exportSymbols('CISCO-UNIFIED-COMPUTING-FSM-MIB', cucsFsmStatusState=cucsFsmStatusState, cucsFsmStatusObjectClassName=cucsFsmStatusObjectClassName, cucsFsmObjects=cucsFsmObjects, cucsFsmStatusInstanceId=cucsFsmStatusInstanceId, cucsFsmStatusName=cucsFsmStatusName, cucsFsmStatusRn=cucsFsmStatusRn, cucsFsmStatusTable=cucsFsmStatusTable, cucsFsmStatusRemoteEpRef=cucsFsmStatusRemoteEpRef, cucsFsmStatusDn=cucsFsmStatusDn, cucsFsmStatusConvertedEpRef=cucsFsmStatusConvertedEpRef, cucsFsmStatusDescr=cucsFsmStatusDescr, PYSNMP_MODULE_ID=cucsFsmObjects, cucsFsmStatusEntry=cucsFsmStatusEntry) |
numero = int(input("Digite o valor de n:"))
x = 1
i = 1
while x <= numero:
print(i)
i = i+2;
x=x+1
| numero = int(input('Digite o valor de n:'))
x = 1
i = 1
while x <= numero:
print(i)
i = i + 2
x = x + 1 |
data = [
{'text':'oh hi duuuude how r uy??check this 1xbet'},
{'text':'Dear Harry Potter, i am Frodo Baggins i represent 1xbet company.Best bet service'},
{'text':'wooooh yoow harry look at my jackpot 100000000$ at 1xbet service'},
{'text':'Harry , today i saw the man who looks like Hawkeye from Avengers on 100% and he dont use 1xbet service'},
]
final_mail = 'Hello Harry, my name is Maksim, Im still waiting for the letter from Hogwarts'
spam_word = ''
q_spam = 0
database = []
for mail in data:
str = mail['text'].lower().split()
database.extend(str)
print(database)
for word in database:
quantity = database.count(word)
if quantity > q_spam:
q_spam = quantity
spam_word = word
if spam_word in final_mail.lower():
print('mail is not ok')
else:
print('mail is ok')
| data = [{'text': 'oh hi duuuude how r uy??check this 1xbet'}, {'text': 'Dear Harry Potter, i am Frodo Baggins i represent 1xbet company.Best bet service'}, {'text': 'wooooh yoow harry look at my jackpot 100000000$ at 1xbet service'}, {'text': 'Harry , today i saw the man who looks like Hawkeye from Avengers on 100% and he dont use 1xbet service'}]
final_mail = 'Hello Harry, my name is Maksim, Im still waiting for the letter from Hogwarts'
spam_word = ''
q_spam = 0
database = []
for mail in data:
str = mail['text'].lower().split()
database.extend(str)
print(database)
for word in database:
quantity = database.count(word)
if quantity > q_spam:
q_spam = quantity
spam_word = word
if spam_word in final_mail.lower():
print('mail is not ok')
else:
print('mail is ok') |
"""dots: dotfiles made easy
"""
__version__ = "0.0.1a0"
| """dots: dotfiles made easy
"""
__version__ = '0.0.1a0' |
#
# PySNMP MIB module BAY-STACK-LLDP-EXT-DOT3-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/BAY-STACK-LLDP-EXT-DOT3-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:35:40 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ValueSizeConstraint", "ConstraintsIntersection")
lldpXdot3LocPowerEntry, lldpXdot3RemPowerEntry = mibBuilder.importSymbols("LLDP-EXT-DOT3-MIB", "lldpXdot3LocPowerEntry", "lldpXdot3RemPowerEntry")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Counter64, ObjectIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, Gauge32, Counter32, IpAddress, MibIdentifier, iso, NotificationType, Integer32, Bits, ModuleIdentity, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "ObjectIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "Gauge32", "Counter32", "IpAddress", "MibIdentifier", "iso", "NotificationType", "Integer32", "Bits", "ModuleIdentity", "Unsigned32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
bayStackMibs, = mibBuilder.importSymbols("SYNOPTICS-ROOT-MIB", "bayStackMibs")
bayStackLldpXDot3Mib = ModuleIdentity((1, 3, 6, 1, 4, 1, 45, 5, 47))
bayStackLldpXDot3Mib.setRevisions(('2014-10-22 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: bayStackLldpXDot3Mib.setRevisionsDescriptions(('Ver 1: Initial version.',))
if mibBuilder.loadTexts: bayStackLldpXDot3Mib.setLastUpdated('201410220000Z')
if mibBuilder.loadTexts: bayStackLldpXDot3Mib.setOrganization('Avaya Inc.')
if mibBuilder.loadTexts: bayStackLldpXDot3Mib.setContactInfo('avaya.com')
if mibBuilder.loadTexts: bayStackLldpXDot3Mib.setDescription('This MIB module is an extension to the standard LLDP-EXT-DOT3 MIB.')
bsLldpXDot3Notifications = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 5, 47, 0))
bsLldpXDot3Objects = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 5, 47, 1))
bsLldpXdot3Config = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 1))
bsLldpXdot3LocalData = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 2))
bsLldpXdot3RemoteData = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 3))
bsLldpXdot3LocPowerTable = MibTable((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 2, 1), )
if mibBuilder.loadTexts: bsLldpXdot3LocPowerTable.setStatus('current')
if mibBuilder.loadTexts: bsLldpXdot3LocPowerTable.setDescription('This table contains one row per port of PSE PoE information on the local system known to this agent.')
bsLldpXdot3LocPowerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 2, 1, 1), )
lldpXdot3LocPowerEntry.registerAugmentions(("BAY-STACK-LLDP-EXT-DOT3-MIB", "bsLldpXdot3LocPowerEntry"))
bsLldpXdot3LocPowerEntry.setIndexNames(*lldpXdot3LocPowerEntry.getIndexNames())
if mibBuilder.loadTexts: bsLldpXdot3LocPowerEntry.setStatus('current')
if mibBuilder.loadTexts: bsLldpXdot3LocPowerEntry.setDescription('Information about a particular port PoE information.')
bsLldpXdot3LocPowerType = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("type2pse", 1), ("type2pd", 2), ("type1pse", 3), ("type1pd", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsLldpXdot3LocPowerType.setReference('802.3at, Section 30.12.2')
if mibBuilder.loadTexts: bsLldpXdot3LocPowerType.setStatus('current')
if mibBuilder.loadTexts: bsLldpXdot3LocPowerType.setDescription('A GET attribute that returns whether the local system is a PSE or a PD and whether it is Type 1 or Type 2.')
bsLldpXdot3LocPowerSource = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unknown", 1), ("primaryPs", 2), ("backupPs", 3), ("reserved", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsLldpXdot3LocPowerSource.setReference('802.3at, Section 30.12.2')
if mibBuilder.loadTexts: bsLldpXdot3LocPowerSource.setStatus('current')
if mibBuilder.loadTexts: bsLldpXdot3LocPowerSource.setDescription('A GET attribute indicating the PSE Power Sources of the local system. A PSE indicates whether it is being powered by a primary power source; a backup power source; or unknown. A value primaryPs(2) indicates that the device advertises its power source as primary. A value backupPs(3) indicates that the device advertises its power source as backup.')
bsLldpXdot3LocPowerPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unknown", 1), ("critical", 2), ("high", 3), ("low", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsLldpXdot3LocPowerPriority.setReference('802.3at, Section 30.12.2')
if mibBuilder.loadTexts: bsLldpXdot3LocPowerPriority.setStatus('current')
if mibBuilder.loadTexts: bsLldpXdot3LocPowerPriority.setDescription('Reflects the PD power priority that is being advertised on this PSE port. If both locally configure priority and ldpXMedRemXPoEPDPowerPriority are available on this port, it is a matter of local policy which one takes precedence. This object reflects the active value on this port. If the priority is not configured or known by the PD, the value unknown(1) will be returned. A value critical(2) indicates that the device advertises its power Priority as critical, as per RFC 3621. A value high(3) indicates that the device advertises its power Priority as high, as per RFC 3621. A value low(4) indicates that the device advertises its power Priority as low, as per RFC 3621.')
bsLldpXdot3LocPDRequestedPowerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setUnits('tenth of watt').setMaxAccess("readonly")
if mibBuilder.loadTexts: bsLldpXdot3LocPDRequestedPowerValue.setReference('802.3at, Section 30.12.2')
if mibBuilder.loadTexts: bsLldpXdot3LocPDRequestedPowerValue.setStatus('current')
if mibBuilder.loadTexts: bsLldpXdot3LocPDRequestedPowerValue.setDescription('A GET attribute that returns the PD requested power value. For a PSE, it is the power value that the PSE mirrors back to the remote system. This is the PD requested power value that was used by the PSE to compute the power it has currently allocated to the remote system. It is expressed in units of 0.1 watts.')
bsLldpXdot3LocPSEAllocatedPowerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setUnits('tenth of watt').setMaxAccess("readonly")
if mibBuilder.loadTexts: bsLldpXdot3LocPSEAllocatedPowerValue.setReference('802.3at, Section 30.12.2')
if mibBuilder.loadTexts: bsLldpXdot3LocPSEAllocatedPowerValue.setStatus('current')
if mibBuilder.loadTexts: bsLldpXdot3LocPSEAllocatedPowerValue.setDescription('A GET attribute that returns the PSE allocated power value. For a PSE, it is the power value that the PSE has currently allocated to the remote system. The PSE allocated power value is the maximum input average power that the PSE wants the PD to ever draw under this allocation if it is accepted. It is expressed in units of 0.1 watts.')
bsLldpXdot3RemPowerTable = MibTable((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 3, 1), )
if mibBuilder.loadTexts: bsLldpXdot3RemPowerTable.setStatus('current')
if mibBuilder.loadTexts: bsLldpXdot3RemPowerTable.setDescription('This table contains information about the PoE device type as advertised by the remote system.')
bsLldpXdot3RemPowerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 3, 1, 1), )
lldpXdot3RemPowerEntry.registerAugmentions(("BAY-STACK-LLDP-EXT-DOT3-MIB", "bsLldpXdot3RemPowerEntry"))
bsLldpXdot3RemPowerEntry.setIndexNames(*lldpXdot3RemPowerEntry.getIndexNames())
if mibBuilder.loadTexts: bsLldpXdot3RemPowerEntry.setStatus('current')
if mibBuilder.loadTexts: bsLldpXdot3RemPowerEntry.setDescription('Information about a particular port component.')
bsLldpXdot3RemPowerType = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("type2pse", 1), ("type2pd", 2), ("type1pse", 3), ("type1pd", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsLldpXdot3RemPowerType.setReference('802.3at, Section 30.12.3')
if mibBuilder.loadTexts: bsLldpXdot3RemPowerType.setStatus('current')
if mibBuilder.loadTexts: bsLldpXdot3RemPowerType.setDescription('A GET attribute that returns whether the remote system is a PSE or a PD and whether it is Type 1 or Type 2.')
bsLldpXdot3RemPowerSource = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unknown", 1), ("pse", 2), ("reserved", 3), ("pseAndLocal", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsLldpXdot3RemPowerSource.setReference('802.3at, Section 30.12.3')
if mibBuilder.loadTexts: bsLldpXdot3RemPowerSource.setStatus('current')
if mibBuilder.loadTexts: bsLldpXdot3RemPowerSource.setDescription('A GET attribute that returns the power sources of the remote system. When the remote system is a PD, it indicates whether it is being powered by: a PSE and locall; locally only; by a PSE only; or unknown.')
bsLldpXdot3RemPowerPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 3, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unknown", 1), ("critical", 2), ("high", 3), ("low", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsLldpXdot3RemPowerPriority.setReference('802.3at, Section 30.12.3')
if mibBuilder.loadTexts: bsLldpXdot3RemPowerPriority.setStatus('current')
if mibBuilder.loadTexts: bsLldpXdot3RemPowerPriority.setDescription('A GET operation returns the priority of the PD system received from the remote system. For a PD, this is the priority that the remote system has assigned to the PD.')
bsLldpXdot3RemPDRequestedPowerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 3, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setUnits('tenth of watt').setMaxAccess("readonly")
if mibBuilder.loadTexts: bsLldpXdot3RemPDRequestedPowerValue.setReference('802.3at, Section 30.12.3')
if mibBuilder.loadTexts: bsLldpXdot3RemPDRequestedPowerValue.setStatus('current')
if mibBuilder.loadTexts: bsLldpXdot3RemPDRequestedPowerValue.setDescription('A GET attribute that for a PSE returs the the PD requested power value received from the remote system. It is expressed in units of 0.1 watts.')
bsLldpXdot3RemPSEAllocatedPowerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setUnits('tenth of watt').setMaxAccess("readonly")
if mibBuilder.loadTexts: bsLldpXdot3RemPSEAllocatedPowerValue.setReference('802.3at, Section 30.12.3')
if mibBuilder.loadTexts: bsLldpXdot3RemPSEAllocatedPowerValue.setStatus('current')
if mibBuilder.loadTexts: bsLldpXdot3RemPSEAllocatedPowerValue.setDescription('A GET attribute that for a PSE returns the PSE allocated power value that was used by the remote system to compute the power value that it has currently requested from the PSE. It is expressed in units of 0.1 watts.')
mibBuilder.exportSymbols("BAY-STACK-LLDP-EXT-DOT3-MIB", bsLldpXdot3RemoteData=bsLldpXdot3RemoteData, bsLldpXdot3LocPowerSource=bsLldpXdot3LocPowerSource, bsLldpXdot3RemPowerSource=bsLldpXdot3RemPowerSource, bayStackLldpXDot3Mib=bayStackLldpXDot3Mib, bsLldpXDot3Objects=bsLldpXDot3Objects, bsLldpXdot3RemPowerType=bsLldpXdot3RemPowerType, bsLldpXdot3RemPowerPriority=bsLldpXdot3RemPowerPriority, bsLldpXdot3RemPSEAllocatedPowerValue=bsLldpXdot3RemPSEAllocatedPowerValue, PYSNMP_MODULE_ID=bayStackLldpXDot3Mib, bsLldpXdot3LocPowerEntry=bsLldpXdot3LocPowerEntry, bsLldpXdot3LocPDRequestedPowerValue=bsLldpXdot3LocPDRequestedPowerValue, bsLldpXdot3LocalData=bsLldpXdot3LocalData, bsLldpXdot3LocPowerType=bsLldpXdot3LocPowerType, bsLldpXdot3Config=bsLldpXdot3Config, bsLldpXdot3RemPowerEntry=bsLldpXdot3RemPowerEntry, bsLldpXDot3Notifications=bsLldpXDot3Notifications, bsLldpXdot3RemPowerTable=bsLldpXdot3RemPowerTable, bsLldpXdot3LocPowerPriority=bsLldpXdot3LocPowerPriority, bsLldpXdot3LocPowerTable=bsLldpXdot3LocPowerTable, bsLldpXdot3RemPDRequestedPowerValue=bsLldpXdot3RemPDRequestedPowerValue, bsLldpXdot3LocPSEAllocatedPowerValue=bsLldpXdot3LocPSEAllocatedPowerValue)
| (integer, object_identifier, octet_string) = mibBuilder.importSymbols('ASN1', 'Integer', 'ObjectIdentifier', 'OctetString')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(single_value_constraint, value_range_constraint, constraints_union, value_size_constraint, constraints_intersection) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'SingleValueConstraint', 'ValueRangeConstraint', 'ConstraintsUnion', 'ValueSizeConstraint', 'ConstraintsIntersection')
(lldp_xdot3_loc_power_entry, lldp_xdot3_rem_power_entry) = mibBuilder.importSymbols('LLDP-EXT-DOT3-MIB', 'lldpXdot3LocPowerEntry', 'lldpXdot3RemPowerEntry')
(module_compliance, notification_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ModuleCompliance', 'NotificationGroup')
(counter64, object_identity, mib_scalar, mib_table, mib_table_row, mib_table_column, time_ticks, gauge32, counter32, ip_address, mib_identifier, iso, notification_type, integer32, bits, module_identity, unsigned32) = mibBuilder.importSymbols('SNMPv2-SMI', 'Counter64', 'ObjectIdentity', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'TimeTicks', 'Gauge32', 'Counter32', 'IpAddress', 'MibIdentifier', 'iso', 'NotificationType', 'Integer32', 'Bits', 'ModuleIdentity', 'Unsigned32')
(display_string, textual_convention) = mibBuilder.importSymbols('SNMPv2-TC', 'DisplayString', 'TextualConvention')
(bay_stack_mibs,) = mibBuilder.importSymbols('SYNOPTICS-ROOT-MIB', 'bayStackMibs')
bay_stack_lldp_x_dot3_mib = module_identity((1, 3, 6, 1, 4, 1, 45, 5, 47))
bayStackLldpXDot3Mib.setRevisions(('2014-10-22 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts:
bayStackLldpXDot3Mib.setRevisionsDescriptions(('Ver 1: Initial version.',))
if mibBuilder.loadTexts:
bayStackLldpXDot3Mib.setLastUpdated('201410220000Z')
if mibBuilder.loadTexts:
bayStackLldpXDot3Mib.setOrganization('Avaya Inc.')
if mibBuilder.loadTexts:
bayStackLldpXDot3Mib.setContactInfo('avaya.com')
if mibBuilder.loadTexts:
bayStackLldpXDot3Mib.setDescription('This MIB module is an extension to the standard LLDP-EXT-DOT3 MIB.')
bs_lldp_x_dot3_notifications = mib_identifier((1, 3, 6, 1, 4, 1, 45, 5, 47, 0))
bs_lldp_x_dot3_objects = mib_identifier((1, 3, 6, 1, 4, 1, 45, 5, 47, 1))
bs_lldp_xdot3_config = mib_identifier((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 1))
bs_lldp_xdot3_local_data = mib_identifier((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 2))
bs_lldp_xdot3_remote_data = mib_identifier((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 3))
bs_lldp_xdot3_loc_power_table = mib_table((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 2, 1))
if mibBuilder.loadTexts:
bsLldpXdot3LocPowerTable.setStatus('current')
if mibBuilder.loadTexts:
bsLldpXdot3LocPowerTable.setDescription('This table contains one row per port of PSE PoE information on the local system known to this agent.')
bs_lldp_xdot3_loc_power_entry = mib_table_row((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 2, 1, 1))
lldpXdot3LocPowerEntry.registerAugmentions(('BAY-STACK-LLDP-EXT-DOT3-MIB', 'bsLldpXdot3LocPowerEntry'))
bsLldpXdot3LocPowerEntry.setIndexNames(*lldpXdot3LocPowerEntry.getIndexNames())
if mibBuilder.loadTexts:
bsLldpXdot3LocPowerEntry.setStatus('current')
if mibBuilder.loadTexts:
bsLldpXdot3LocPowerEntry.setDescription('Information about a particular port PoE information.')
bs_lldp_xdot3_loc_power_type = mib_table_column((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 2, 1, 1, 1), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('type2pse', 1), ('type2pd', 2), ('type1pse', 3), ('type1pd', 4)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bsLldpXdot3LocPowerType.setReference('802.3at, Section 30.12.2')
if mibBuilder.loadTexts:
bsLldpXdot3LocPowerType.setStatus('current')
if mibBuilder.loadTexts:
bsLldpXdot3LocPowerType.setDescription('A GET attribute that returns whether the local system is a PSE or a PD and whether it is Type 1 or Type 2.')
bs_lldp_xdot3_loc_power_source = mib_table_column((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 2, 1, 1, 2), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('unknown', 1), ('primaryPs', 2), ('backupPs', 3), ('reserved', 4)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bsLldpXdot3LocPowerSource.setReference('802.3at, Section 30.12.2')
if mibBuilder.loadTexts:
bsLldpXdot3LocPowerSource.setStatus('current')
if mibBuilder.loadTexts:
bsLldpXdot3LocPowerSource.setDescription('A GET attribute indicating the PSE Power Sources of the local system. A PSE indicates whether it is being powered by a primary power source; a backup power source; or unknown. A value primaryPs(2) indicates that the device advertises its power source as primary. A value backupPs(3) indicates that the device advertises its power source as backup.')
bs_lldp_xdot3_loc_power_priority = mib_table_column((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 2, 1, 1, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('unknown', 1), ('critical', 2), ('high', 3), ('low', 4)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bsLldpXdot3LocPowerPriority.setReference('802.3at, Section 30.12.2')
if mibBuilder.loadTexts:
bsLldpXdot3LocPowerPriority.setStatus('current')
if mibBuilder.loadTexts:
bsLldpXdot3LocPowerPriority.setDescription('Reflects the PD power priority that is being advertised on this PSE port. If both locally configure priority and ldpXMedRemXPoEPDPowerPriority are available on this port, it is a matter of local policy which one takes precedence. This object reflects the active value on this port. If the priority is not configured or known by the PD, the value unknown(1) will be returned. A value critical(2) indicates that the device advertises its power Priority as critical, as per RFC 3621. A value high(3) indicates that the device advertises its power Priority as high, as per RFC 3621. A value low(4) indicates that the device advertises its power Priority as low, as per RFC 3621.')
bs_lldp_xdot3_loc_pd_requested_power_value = mib_table_column((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 2, 1, 1, 4), integer32().subtype(subtypeSpec=value_range_constraint(1, 255))).setUnits('tenth of watt').setMaxAccess('readonly')
if mibBuilder.loadTexts:
bsLldpXdot3LocPDRequestedPowerValue.setReference('802.3at, Section 30.12.2')
if mibBuilder.loadTexts:
bsLldpXdot3LocPDRequestedPowerValue.setStatus('current')
if mibBuilder.loadTexts:
bsLldpXdot3LocPDRequestedPowerValue.setDescription('A GET attribute that returns the PD requested power value. For a PSE, it is the power value that the PSE mirrors back to the remote system. This is the PD requested power value that was used by the PSE to compute the power it has currently allocated to the remote system. It is expressed in units of 0.1 watts.')
bs_lldp_xdot3_loc_pse_allocated_power_value = mib_table_column((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 2, 1, 1, 5), integer32().subtype(subtypeSpec=value_range_constraint(1, 255))).setUnits('tenth of watt').setMaxAccess('readonly')
if mibBuilder.loadTexts:
bsLldpXdot3LocPSEAllocatedPowerValue.setReference('802.3at, Section 30.12.2')
if mibBuilder.loadTexts:
bsLldpXdot3LocPSEAllocatedPowerValue.setStatus('current')
if mibBuilder.loadTexts:
bsLldpXdot3LocPSEAllocatedPowerValue.setDescription('A GET attribute that returns the PSE allocated power value. For a PSE, it is the power value that the PSE has currently allocated to the remote system. The PSE allocated power value is the maximum input average power that the PSE wants the PD to ever draw under this allocation if it is accepted. It is expressed in units of 0.1 watts.')
bs_lldp_xdot3_rem_power_table = mib_table((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 3, 1))
if mibBuilder.loadTexts:
bsLldpXdot3RemPowerTable.setStatus('current')
if mibBuilder.loadTexts:
bsLldpXdot3RemPowerTable.setDescription('This table contains information about the PoE device type as advertised by the remote system.')
bs_lldp_xdot3_rem_power_entry = mib_table_row((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 3, 1, 1))
lldpXdot3RemPowerEntry.registerAugmentions(('BAY-STACK-LLDP-EXT-DOT3-MIB', 'bsLldpXdot3RemPowerEntry'))
bsLldpXdot3RemPowerEntry.setIndexNames(*lldpXdot3RemPowerEntry.getIndexNames())
if mibBuilder.loadTexts:
bsLldpXdot3RemPowerEntry.setStatus('current')
if mibBuilder.loadTexts:
bsLldpXdot3RemPowerEntry.setDescription('Information about a particular port component.')
bs_lldp_xdot3_rem_power_type = mib_table_column((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 3, 1, 1, 1), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('type2pse', 1), ('type2pd', 2), ('type1pse', 3), ('type1pd', 4)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bsLldpXdot3RemPowerType.setReference('802.3at, Section 30.12.3')
if mibBuilder.loadTexts:
bsLldpXdot3RemPowerType.setStatus('current')
if mibBuilder.loadTexts:
bsLldpXdot3RemPowerType.setDescription('A GET attribute that returns whether the remote system is a PSE or a PD and whether it is Type 1 or Type 2.')
bs_lldp_xdot3_rem_power_source = mib_table_column((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 3, 1, 1, 2), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('unknown', 1), ('pse', 2), ('reserved', 3), ('pseAndLocal', 4)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bsLldpXdot3RemPowerSource.setReference('802.3at, Section 30.12.3')
if mibBuilder.loadTexts:
bsLldpXdot3RemPowerSource.setStatus('current')
if mibBuilder.loadTexts:
bsLldpXdot3RemPowerSource.setDescription('A GET attribute that returns the power sources of the remote system. When the remote system is a PD, it indicates whether it is being powered by: a PSE and locall; locally only; by a PSE only; or unknown.')
bs_lldp_xdot3_rem_power_priority = mib_table_column((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 3, 1, 1, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('unknown', 1), ('critical', 2), ('high', 3), ('low', 4)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bsLldpXdot3RemPowerPriority.setReference('802.3at, Section 30.12.3')
if mibBuilder.loadTexts:
bsLldpXdot3RemPowerPriority.setStatus('current')
if mibBuilder.loadTexts:
bsLldpXdot3RemPowerPriority.setDescription('A GET operation returns the priority of the PD system received from the remote system. For a PD, this is the priority that the remote system has assigned to the PD.')
bs_lldp_xdot3_rem_pd_requested_power_value = mib_table_column((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 3, 1, 1, 4), integer32().subtype(subtypeSpec=value_range_constraint(1, 255))).setUnits('tenth of watt').setMaxAccess('readonly')
if mibBuilder.loadTexts:
bsLldpXdot3RemPDRequestedPowerValue.setReference('802.3at, Section 30.12.3')
if mibBuilder.loadTexts:
bsLldpXdot3RemPDRequestedPowerValue.setStatus('current')
if mibBuilder.loadTexts:
bsLldpXdot3RemPDRequestedPowerValue.setDescription('A GET attribute that for a PSE returs the the PD requested power value received from the remote system. It is expressed in units of 0.1 watts.')
bs_lldp_xdot3_rem_pse_allocated_power_value = mib_table_column((1, 3, 6, 1, 4, 1, 45, 5, 47, 1, 3, 1, 1, 5), integer32().subtype(subtypeSpec=value_range_constraint(1, 255))).setUnits('tenth of watt').setMaxAccess('readonly')
if mibBuilder.loadTexts:
bsLldpXdot3RemPSEAllocatedPowerValue.setReference('802.3at, Section 30.12.3')
if mibBuilder.loadTexts:
bsLldpXdot3RemPSEAllocatedPowerValue.setStatus('current')
if mibBuilder.loadTexts:
bsLldpXdot3RemPSEAllocatedPowerValue.setDescription('A GET attribute that for a PSE returns the PSE allocated power value that was used by the remote system to compute the power value that it has currently requested from the PSE. It is expressed in units of 0.1 watts.')
mibBuilder.exportSymbols('BAY-STACK-LLDP-EXT-DOT3-MIB', bsLldpXdot3RemoteData=bsLldpXdot3RemoteData, bsLldpXdot3LocPowerSource=bsLldpXdot3LocPowerSource, bsLldpXdot3RemPowerSource=bsLldpXdot3RemPowerSource, bayStackLldpXDot3Mib=bayStackLldpXDot3Mib, bsLldpXDot3Objects=bsLldpXDot3Objects, bsLldpXdot3RemPowerType=bsLldpXdot3RemPowerType, bsLldpXdot3RemPowerPriority=bsLldpXdot3RemPowerPriority, bsLldpXdot3RemPSEAllocatedPowerValue=bsLldpXdot3RemPSEAllocatedPowerValue, PYSNMP_MODULE_ID=bayStackLldpXDot3Mib, bsLldpXdot3LocPowerEntry=bsLldpXdot3LocPowerEntry, bsLldpXdot3LocPDRequestedPowerValue=bsLldpXdot3LocPDRequestedPowerValue, bsLldpXdot3LocalData=bsLldpXdot3LocalData, bsLldpXdot3LocPowerType=bsLldpXdot3LocPowerType, bsLldpXdot3Config=bsLldpXdot3Config, bsLldpXdot3RemPowerEntry=bsLldpXdot3RemPowerEntry, bsLldpXDot3Notifications=bsLldpXDot3Notifications, bsLldpXdot3RemPowerTable=bsLldpXdot3RemPowerTable, bsLldpXdot3LocPowerPriority=bsLldpXdot3LocPowerPriority, bsLldpXdot3LocPowerTable=bsLldpXdot3LocPowerTable, bsLldpXdot3RemPDRequestedPowerValue=bsLldpXdot3RemPDRequestedPowerValue, bsLldpXdot3LocPSEAllocatedPowerValue=bsLldpXdot3LocPSEAllocatedPowerValue) |
class proxy(ref):
def __call__(self, *args, **kwargs):
func = ref.__call__(self)
if func is None:
raise weakref.ReferenceError('referent object is dead')
else:
return func(*args, **kwargs)
def __eq__(self, other):
if type(other) != type(self):
return False
return ref.__call__(self) == ref.__call__(other)
| class Proxy(ref):
def __call__(self, *args, **kwargs):
func = ref.__call__(self)
if func is None:
raise weakref.ReferenceError('referent object is dead')
else:
return func(*args, **kwargs)
def __eq__(self, other):
if type(other) != type(self):
return False
return ref.__call__(self) == ref.__call__(other) |
# Generated file, do not modify by hand
"""Definitions to be used in rbe_repo attr of an rbe_autoconf rule """
_TOOLCHAIN_CONFIG_SPECS = []
_BAZEL_TO_CONFIG_SPEC_NAMES = {}
LATEST = ""
CONTAINER_TO_CONFIG_SPEC_NAMES = {}
_DEFAULT_TOOLCHAIN_CONFIG_SPEC = ""
TOOLCHAIN_CONFIG_AUTOGEN_SPEC = struct(
bazel_to_config_spec_names_map = _BAZEL_TO_CONFIG_SPEC_NAMES,
container_to_config_spec_names_map = CONTAINER_TO_CONFIG_SPEC_NAMES,
default_toolchain_config_spec = _DEFAULT_TOOLCHAIN_CONFIG_SPEC,
latest_container = LATEST,
toolchain_config_specs = _TOOLCHAIN_CONFIG_SPECS,
)
| """Definitions to be used in rbe_repo attr of an rbe_autoconf rule """
_toolchain_config_specs = []
_bazel_to_config_spec_names = {}
latest = ''
container_to_config_spec_names = {}
_default_toolchain_config_spec = ''
toolchain_config_autogen_spec = struct(bazel_to_config_spec_names_map=_BAZEL_TO_CONFIG_SPEC_NAMES, container_to_config_spec_names_map=CONTAINER_TO_CONFIG_SPEC_NAMES, default_toolchain_config_spec=_DEFAULT_TOOLCHAIN_CONFIG_SPEC, latest_container=LATEST, toolchain_config_specs=_TOOLCHAIN_CONFIG_SPECS) |
"""
None
"""
class Solution:
def lengthOfLongestSubstringKDistinct(self, s: str, k: int) -> int:
counter = {}
start = 0
m_len = 0
for i, c in enumerate(s):
if c not in counter:
counter[c] = 1
else:
counter[c] += 1
while len(counter.keys()) > k:
counter[s[start]] -= 1
if counter[s[start]] == 0:
del counter[s[start]]
start += 1
m_len = max(m_len, i - start + 1)
return m_len | """
None
"""
class Solution:
def length_of_longest_substring_k_distinct(self, s: str, k: int) -> int:
counter = {}
start = 0
m_len = 0
for (i, c) in enumerate(s):
if c not in counter:
counter[c] = 1
else:
counter[c] += 1
while len(counter.keys()) > k:
counter[s[start]] -= 1
if counter[s[start]] == 0:
del counter[s[start]]
start += 1
m_len = max(m_len, i - start + 1)
return m_len |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
@Desc : This module defines all codes, constants maintained globally \
and used across marvin and its test features.The main purpose \
is to maintain readability, maintain one common place for \
all codes used or reused across test features. It enhances \
maintainability and readability. Users just import statement \
to receive all the codes mentioned here. EX: Here, we define \
a code viz., ENABLED with value "Enabled",then using \
this code in a sample feature say test_a.py as below. \
from codes import *
if obj.getvalue() == ENABLED
@DateAdded: 20th October 2013
"""
RUNNING = "Running"
RECURRING = "RECURRING"
ENABLED = "Enabled"
NETWORK_OFFERING = "network_offering"
ROOT = "ROOT"
INVALID_INPUT = "INVALID INPUT"
EMPTY_LIST = "EMPTY_LIST"
FAIL = 0
PASS = 1
MATCH_NOT_FOUND = "ELEMENT NOT FOUND IN THE INPUT"
SUCCESS = "SUCCESS"
EXCEPTION_OCCURRED = "Exception Occurred"
NO = "no"
YES = "yes"
FAILED = "FAILED"
UNKNOWN_ERROR = "Unknown Error"
EXCEPTION = "EXCEPTION"
BASIC_ZONE = "basic"
ISOLATED_NETWORK = "ISOLATED"
SHARED_NETWORK = "SHARED"
VPC_NETWORK = "VPC"
| """
@Desc : This module defines all codes, constants maintained globally and used across marvin and its test features.The main purpose is to maintain readability, maintain one common place for all codes used or reused across test features. It enhances maintainability and readability. Users just import statement to receive all the codes mentioned here. EX: Here, we define a code viz., ENABLED with value "Enabled",then using this code in a sample feature say test_a.py as below.
from codes import *
if obj.getvalue() == ENABLED
@DateAdded: 20th October 2013
"""
running = 'Running'
recurring = 'RECURRING'
enabled = 'Enabled'
network_offering = 'network_offering'
root = 'ROOT'
invalid_input = 'INVALID INPUT'
empty_list = 'EMPTY_LIST'
fail = 0
pass = 1
match_not_found = 'ELEMENT NOT FOUND IN THE INPUT'
success = 'SUCCESS'
exception_occurred = 'Exception Occurred'
no = 'no'
yes = 'yes'
failed = 'FAILED'
unknown_error = 'Unknown Error'
exception = 'EXCEPTION'
basic_zone = 'basic'
isolated_network = 'ISOLATED'
shared_network = 'SHARED'
vpc_network = 'VPC' |
loop_flag = False
def consult_check(consult, doctor, upper, lower):
doc_test = doctor in {'Dr A Wettstein', 'Dr S Ghaly',
'Dr S Vivekanandarajah'}
path = upper in {'pb', 'pp'} or lower in {'cb', 'cp', 'sb', 'sp'}
cv_test = (doctor == 'Dr C Vickers') and path
return doc_test or cv_test
def get_consult(doctor, upper, lower, loop_flag):
while True:
consult = input('Consult: ')
if consult == '0':
consult = 'none'
if consult == 'q':
loop_flag = True
break
if consult in {'110', '116', 'none'}:
break
print('TRY AGAIN!')
if consult_check(consult, doctor, upper, lower) and loop_flag is False:
print('Confirm with {} that he/she'
' does not want a consult'.format(doctor))
while True:
consult = input('Consult either 0,110,116: ')
if consult == '0':
consult = 'none'
if consult in {'110', '116', 'none'}:
break
return consult, loop_flag
if __name__ == '__main__':
print(get_consult('Dr A Wettstein', '0', 'co', loop_flag))
| loop_flag = False
def consult_check(consult, doctor, upper, lower):
doc_test = doctor in {'Dr A Wettstein', 'Dr S Ghaly', 'Dr S Vivekanandarajah'}
path = upper in {'pb', 'pp'} or lower in {'cb', 'cp', 'sb', 'sp'}
cv_test = doctor == 'Dr C Vickers' and path
return doc_test or cv_test
def get_consult(doctor, upper, lower, loop_flag):
while True:
consult = input('Consult: ')
if consult == '0':
consult = 'none'
if consult == 'q':
loop_flag = True
break
if consult in {'110', '116', 'none'}:
break
print('TRY AGAIN!')
if consult_check(consult, doctor, upper, lower) and loop_flag is False:
print('Confirm with {} that he/she does not want a consult'.format(doctor))
while True:
consult = input('Consult either 0,110,116: ')
if consult == '0':
consult = 'none'
if consult in {'110', '116', 'none'}:
break
return (consult, loop_flag)
if __name__ == '__main__':
print(get_consult('Dr A Wettstein', '0', 'co', loop_flag)) |
def insertion_sort(array):
for index in range(1,len(array)):
value = array[index]
i = index - 1
while i >= 0:
if array[i] > array[i+1]:
array[i+1] = array[i]
array[i] = value
i = i - 1
else:
break
return array
if __name__ == '__main__':
array = [0,2,1,3,6,4,5,7,9,8]
insertion_sort(array)
| def insertion_sort(array):
for index in range(1, len(array)):
value = array[index]
i = index - 1
while i >= 0:
if array[i] > array[i + 1]:
array[i + 1] = array[i]
array[i] = value
i = i - 1
else:
break
return array
if __name__ == '__main__':
array = [0, 2, 1, 3, 6, 4, 5, 7, 9, 8]
insertion_sort(array) |
#!/usr/bin/env python3
RIGHT = 'R'
LEFT = 'L'
FORWARD = 'F'
NORTH = 'N'
EAST = 'E'
SOUTH = 'S'
WEST = 'W'
DIRECTIONS = {
'N': (0, 1),
'E': (1, 0),
'S': (0, -1),
'W': (-1, 0)
}
COMPASS = {
0: 'N',
90: 'E',
180: 'S',
270: 'W'
}
def parse(step):
return (step[0], int(step[1:]))
def load(file):
with open(file) as f:
step = [parse(line.strip()) for line in f.readlines()]
return step
def rotate(rotation, instruction, value):
if instruction != LEFT and instruction != RIGHT:
raise Exception(f'Unknown rotation instruction: {instruction}')
if value % 90 != 0:
raise Exception(f'Invalid rotation ({value}). Rotations must be increments of 90.')
direction = 0
if instruction == LEFT:
direction = -1
else:
direction = 1
rotation += direction * value
rotation %= 360
return rotation
def rotate_wp(x, y, instruction, value):
if instruction != LEFT and instruction != RIGHT:
raise Exception(f'Unknown rotation instruction: {instruction}')
if instruction == LEFT:
instruction = RIGHT
value = -value
value %= 360
if value == 0:
return x, y
elif value == 90:
return y, -x
elif value == 180:
return -x, -y
elif value == 270:
return -y, x
raise Exception(f'Invalid rotation ({value}). Rotations must be increments of 90.')
def part1(route):
'''
>>> part1(load('test1.txt'))
25
'''
x = 0
y = 0
ship_rotation = 90
for step in route:
instruction = step[0]
value = step[1]
if instruction == RIGHT or instruction == LEFT:
ship_rotation = rotate(ship_rotation, instruction, value)
else:
direction = DIRECTIONS[COMPASS[ship_rotation]]
if instruction != FORWARD:
direction = DIRECTIONS[instruction]
x += direction[0] * value
y += direction[1] * value
return abs(x) + abs(y)
def part2(route):
'''
>>> part2(load('test1.txt'))
286
'''
x = 0
y = 0
wp_x = 10
wp_y = 1
for step in route:
instruction = step[0]
value = step[1]
if instruction == FORWARD:
x += wp_x * value
y += wp_y * value
elif instruction == RIGHT or instruction == LEFT:
(wp_x, wp_y) = rotate_wp(wp_x, wp_y, instruction, value)
else:
direction = DIRECTIONS[instruction]
wp_x += direction[0] * value
wp_y += direction[1] * value
return abs(x) + abs(y)
def main():
route = load('input.txt')
value = part1(route)
print(f'Part 1: {value}')
assert value == 2057
value = part2(route)
print(f'Part 2: {value}')
assert value == 71504
if __name__ == '__main__':
main() | right = 'R'
left = 'L'
forward = 'F'
north = 'N'
east = 'E'
south = 'S'
west = 'W'
directions = {'N': (0, 1), 'E': (1, 0), 'S': (0, -1), 'W': (-1, 0)}
compass = {0: 'N', 90: 'E', 180: 'S', 270: 'W'}
def parse(step):
return (step[0], int(step[1:]))
def load(file):
with open(file) as f:
step = [parse(line.strip()) for line in f.readlines()]
return step
def rotate(rotation, instruction, value):
if instruction != LEFT and instruction != RIGHT:
raise exception(f'Unknown rotation instruction: {instruction}')
if value % 90 != 0:
raise exception(f'Invalid rotation ({value}). Rotations must be increments of 90.')
direction = 0
if instruction == LEFT:
direction = -1
else:
direction = 1
rotation += direction * value
rotation %= 360
return rotation
def rotate_wp(x, y, instruction, value):
if instruction != LEFT and instruction != RIGHT:
raise exception(f'Unknown rotation instruction: {instruction}')
if instruction == LEFT:
instruction = RIGHT
value = -value
value %= 360
if value == 0:
return (x, y)
elif value == 90:
return (y, -x)
elif value == 180:
return (-x, -y)
elif value == 270:
return (-y, x)
raise exception(f'Invalid rotation ({value}). Rotations must be increments of 90.')
def part1(route):
"""
>>> part1(load('test1.txt'))
25
"""
x = 0
y = 0
ship_rotation = 90
for step in route:
instruction = step[0]
value = step[1]
if instruction == RIGHT or instruction == LEFT:
ship_rotation = rotate(ship_rotation, instruction, value)
else:
direction = DIRECTIONS[COMPASS[ship_rotation]]
if instruction != FORWARD:
direction = DIRECTIONS[instruction]
x += direction[0] * value
y += direction[1] * value
return abs(x) + abs(y)
def part2(route):
"""
>>> part2(load('test1.txt'))
286
"""
x = 0
y = 0
wp_x = 10
wp_y = 1
for step in route:
instruction = step[0]
value = step[1]
if instruction == FORWARD:
x += wp_x * value
y += wp_y * value
elif instruction == RIGHT or instruction == LEFT:
(wp_x, wp_y) = rotate_wp(wp_x, wp_y, instruction, value)
else:
direction = DIRECTIONS[instruction]
wp_x += direction[0] * value
wp_y += direction[1] * value
return abs(x) + abs(y)
def main():
route = load('input.txt')
value = part1(route)
print(f'Part 1: {value}')
assert value == 2057
value = part2(route)
print(f'Part 2: {value}')
assert value == 71504
if __name__ == '__main__':
main() |
#!/usr/bin/python
#coding=utf-8
class RequestTmBase:
def addRecode(self, ssp, url, tmSpan, state, concurrency,countPer10s,size):
raise NotImplementedError
def startRecode(self):
raise NotImplementedError
def startServer(self):
raise NotImplementedError
def endRecode(self):
raise NotImplementedError | class Requesttmbase:
def add_recode(self, ssp, url, tmSpan, state, concurrency, countPer10s, size):
raise NotImplementedError
def start_recode(self):
raise NotImplementedError
def start_server(self):
raise NotImplementedError
def end_recode(self):
raise NotImplementedError |
"""
Exceptions for the pystrike module.
"""
class ConnectionException(Exception):
"""
Raised when the client is unable to communicate with the indicated
host.
This exception could indicate that the client is unable to contact
the indicated host, or it could indicate that the client was unable
to send an HTTP request to the indicated host, or that the client
was unable to receive an HTTP response from the indicated host.
"""
pass
class ClientRequestException(Exception):
"""
Raised when the server returns a 4xx response.
The library code shall include the content of the error message
from Strike, if available.
"""
pass
class ServerErrorException(Exception):
"""
Raised when the server returns a 5xx response.
The library code shall include the content of the error message
from Strike, if available.
"""
pass
class UnexpectedResponseException(Exception):
"""
Raised when the server returns a response that the library does not
understand.
"""
pass
class ChargeNotFoundException(ClientRequestException):
"""
Raised when the server returns a 404 response.
"""
pass
| """
Exceptions for the pystrike module.
"""
class Connectionexception(Exception):
"""
Raised when the client is unable to communicate with the indicated
host.
This exception could indicate that the client is unable to contact
the indicated host, or it could indicate that the client was unable
to send an HTTP request to the indicated host, or that the client
was unable to receive an HTTP response from the indicated host.
"""
pass
class Clientrequestexception(Exception):
"""
Raised when the server returns a 4xx response.
The library code shall include the content of the error message
from Strike, if available.
"""
pass
class Servererrorexception(Exception):
"""
Raised when the server returns a 5xx response.
The library code shall include the content of the error message
from Strike, if available.
"""
pass
class Unexpectedresponseexception(Exception):
"""
Raised when the server returns a response that the library does not
understand.
"""
pass
class Chargenotfoundexception(ClientRequestException):
"""
Raised when the server returns a 404 response.
"""
pass |
class Solution:
def sumEvenAfterQueries(self, A, queries):
# keep around total sum, we update (if we find evens) on
# each iteration.
evenSum = sum(i for i in A if i & 1 == 0)
for idx, (value, index) in enumerate(queries):
old_value = A[index]
new_value = value + old_value
# add new value if it is even.
if not new_value & 1:
evenSum = evenSum + new_value
# remove old value if it was even.
if not old_value & 1:
evenSum = evenSum - old_value
A[index] = new_value
# reuse it
queries[idx] = evenSum
return queries
| class Solution:
def sum_even_after_queries(self, A, queries):
even_sum = sum((i for i in A if i & 1 == 0))
for (idx, (value, index)) in enumerate(queries):
old_value = A[index]
new_value = value + old_value
if not new_value & 1:
even_sum = evenSum + new_value
if not old_value & 1:
even_sum = evenSum - old_value
A[index] = new_value
queries[idx] = evenSum
return queries |
"""
Objective
In this challenge, we're going to use loops to help us do some simple math. Check out the Tutorial tab to learn more.
Task
Given an integer, N, print its first 10 multiples.
Each multiple N * i (where 1 <= i <= 10) should be printed on a new line in the form:
N x i = result.
"""
N = int(input().strip())
for i in range(1, 11):
print('{0} x {1} = {2}'.format(N, i, N * i))
| """
Objective
In this challenge, we're going to use loops to help us do some simple math. Check out the Tutorial tab to learn more.
Task
Given an integer, N, print its first 10 multiples.
Each multiple N * i (where 1 <= i <= 10) should be printed on a new line in the form:
N x i = result.
"""
n = int(input().strip())
for i in range(1, 11):
print('{0} x {1} = {2}'.format(N, i, N * i)) |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2022/2/21
class OperationLog:
"""
append only write operation log
"""
pass
| class Operationlog:
"""
append only write operation log
"""
pass |
"""
Module: 'ucryptolib' on LEGO EV3 v1.0.0
"""
# MCU: sysname=ev3, nodename=ev3, release=('v1.0.0',), version=('0.0.0',), machine=ev3
# Stubber: 1.3.2
class aes:
''
def decrypt():
pass
def encrypt():
pass
| """
Module: 'ucryptolib' on LEGO EV3 v1.0.0
"""
class Aes:
""""""
def decrypt():
pass
def encrypt():
pass |
# -*- coding: utf-8 -*-
"""ciscosparkapi exception classes."""
__author__ = "Chris Lunsford"
__author_email__ = "chrlunsf@cisco.com"
__copyright__ = "Copyright (c) 2016 Cisco Systems, Inc."
__license__ = "MIT"
SPARK_RESPONSE_CODES = {
200: "OK",
204: "Member deleted.",
400: "The request was invalid or cannot be otherwise served. An "
"accompanying error message will explain further.",
401: "Authentication credentials were missing or incorrect.",
403: "The request is understood, but it has been refused or access is not "
"allowed.",
404: "The URI requested is invalid or the resource requested, such as a "
"user, does not exist. Also returned when the requested format is "
"not supported by the requested method.",
409: "The request could not be processed because it conflicts with some "
"established rule of the system. For example, a person may not be "
"added to a room more than once.",
500: "Something went wrong on the server.",
503: "Server is overloaded with requests. Try again later."
}
class ciscosparkapiException(Exception):
"""Base class for all ciscosparkapi package exceptions."""
def __init__(self, *args, **kwargs):
super(ciscosparkapiException, self).__init__(*args, **kwargs)
class SparkApiError(ciscosparkapiException):
"""Errors returned by requests to the Cisco Spark cloud APIs."""
def __init__(self, response_code, request=None, response=None):
assert isinstance(response_code, int)
self.response_code = response_code
self.request = request
self.response = response
response_text = SPARK_RESPONSE_CODES.get(response_code)
if response_text:
self.response_text = response_text
error_message = "Response Code [{!s}] - {}".format(response_code,
response_text)
else:
error_message = "Response Code [{!s}] - " \
"Unknown Response Code".format(response_code)
super(SparkApiError, self).__init__(error_message)
| """ciscosparkapi exception classes."""
__author__ = 'Chris Lunsford'
__author_email__ = 'chrlunsf@cisco.com'
__copyright__ = 'Copyright (c) 2016 Cisco Systems, Inc.'
__license__ = 'MIT'
spark_response_codes = {200: 'OK', 204: 'Member deleted.', 400: 'The request was invalid or cannot be otherwise served. An accompanying error message will explain further.', 401: 'Authentication credentials were missing or incorrect.', 403: 'The request is understood, but it has been refused or access is not allowed.', 404: 'The URI requested is invalid or the resource requested, such as a user, does not exist. Also returned when the requested format is not supported by the requested method.', 409: 'The request could not be processed because it conflicts with some established rule of the system. For example, a person may not be added to a room more than once.', 500: 'Something went wrong on the server.', 503: 'Server is overloaded with requests. Try again later.'}
class Ciscosparkapiexception(Exception):
"""Base class for all ciscosparkapi package exceptions."""
def __init__(self, *args, **kwargs):
super(ciscosparkapiException, self).__init__(*args, **kwargs)
class Sparkapierror(ciscosparkapiException):
"""Errors returned by requests to the Cisco Spark cloud APIs."""
def __init__(self, response_code, request=None, response=None):
assert isinstance(response_code, int)
self.response_code = response_code
self.request = request
self.response = response
response_text = SPARK_RESPONSE_CODES.get(response_code)
if response_text:
self.response_text = response_text
error_message = 'Response Code [{!s}] - {}'.format(response_code, response_text)
else:
error_message = 'Response Code [{!s}] - Unknown Response Code'.format(response_code)
super(SparkApiError, self).__init__(error_message) |
def get_layers(width, height, data):
layers = []
layer_area = width*height
data = [pixel for pixel in str(data)]
data.remove('\n')
while len(data) > 0:
layers.append(data[:layer_area])
data = data[layer_area:]
return layers
WIDTH = 25
HEIGHT = 6
layers = get_layers(WIDTH, HEIGHT, open('input').read())
fewest_zero_layer = min(layers, key=lambda layer: layer.count('0'))
print('Part 1 solution: %i' % (fewest_zero_layer.count('1') * fewest_zero_layer.count('2')))
BLACK = '0'
WHITE = '1'
TRANS = '2'
COLORS = {
WHITE: u'\u2588',
BLACK: ' ',
TRANS: None,
}
imagedata = []
for index, pixel in enumerate(layers[0]):
depth = 0
while pixel == TRANS:
depth += 1
pixel = layers[depth][index]
imagedata.append(pixel)
def get_image(width, height, imagedata):
output = ''
for i, pixel in enumerate(imagedata):
if i > 0 and i % width == 0:
output += '\n'
output += COLORS[pixel]
return output
print(get_image(WIDTH, HEIGHT, imagedata))
| def get_layers(width, height, data):
layers = []
layer_area = width * height
data = [pixel for pixel in str(data)]
data.remove('\n')
while len(data) > 0:
layers.append(data[:layer_area])
data = data[layer_area:]
return layers
width = 25
height = 6
layers = get_layers(WIDTH, HEIGHT, open('input').read())
fewest_zero_layer = min(layers, key=lambda layer: layer.count('0'))
print('Part 1 solution: %i' % (fewest_zero_layer.count('1') * fewest_zero_layer.count('2')))
black = '0'
white = '1'
trans = '2'
colors = {WHITE: u'█', BLACK: ' ', TRANS: None}
imagedata = []
for (index, pixel) in enumerate(layers[0]):
depth = 0
while pixel == TRANS:
depth += 1
pixel = layers[depth][index]
imagedata.append(pixel)
def get_image(width, height, imagedata):
output = ''
for (i, pixel) in enumerate(imagedata):
if i > 0 and i % width == 0:
output += '\n'
output += COLORS[pixel]
return output
print(get_image(WIDTH, HEIGHT, imagedata)) |
# =============================================================================
# Author: Teerapat Jenrungrot - https://github.com/mjenrungrot/
# FileName: 10494.py
# Description: UVa Online Judge - 10494
# =============================================================================
while True:
try:
line = input()
except EOFError:
break
if "/" in line:
a, b = list(map(int, line.split("/")))
print(a // b)
else:
a, b = list(map(int, line.split("%")))
print(a % b)
| while True:
try:
line = input()
except EOFError:
break
if '/' in line:
(a, b) = list(map(int, line.split('/')))
print(a // b)
else:
(a, b) = list(map(int, line.split('%')))
print(a % b) |
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def __init__(self):
self.tree_node = []
self.res = 0
def pathSum(self, root, sum):
"""
:type root: TreeNode
:type sum: int
:rtype: int
"""
self.traversal(root)
for i in self.tree_node:
self.res += self._pathSum(i, sum)
return self.res
def traversal(self, root):
if root:
self.traversal(root.left)
self.tree_node.append(root)
self.traversal(root.right)
def _pathSum(self, root, sum):
if not root:
return 0
if root.val == sum:
return 1+self._pathSum(root.left, 0)+self._pathSum(root.right, 0)
return self._pathSum(root.left, sum-root.val)+self._pathSum(root.right, sum-root.val)
| class Solution:
def __init__(self):
self.tree_node = []
self.res = 0
def path_sum(self, root, sum):
"""
:type root: TreeNode
:type sum: int
:rtype: int
"""
self.traversal(root)
for i in self.tree_node:
self.res += self._pathSum(i, sum)
return self.res
def traversal(self, root):
if root:
self.traversal(root.left)
self.tree_node.append(root)
self.traversal(root.right)
def _path_sum(self, root, sum):
if not root:
return 0
if root.val == sum:
return 1 + self._pathSum(root.left, 0) + self._pathSum(root.right, 0)
return self._pathSum(root.left, sum - root.val) + self._pathSum(root.right, sum - root.val) |
MAX_VAL = 2**31+1
def main():
n_ints = int(input())
ints = [int(x) for x in input().split()]
# Find all potential pivots by iterating from the left side.
highest = ints[0]
potential_pivots = set()
for i in ints:
if i >= highest:
potential_pivots.add(i)
highest = i
# Confirm pivots by iterating from right side.
pivot_count = 0
lowest = MAX_VAL
for i in ints[::-1]:
if i < lowest:
if i in potential_pivots:
pivot_count += 1
lowest = i
print(pivot_count)
if __name__ == '__main__':
main()
| max_val = 2 ** 31 + 1
def main():
n_ints = int(input())
ints = [int(x) for x in input().split()]
highest = ints[0]
potential_pivots = set()
for i in ints:
if i >= highest:
potential_pivots.add(i)
highest = i
pivot_count = 0
lowest = MAX_VAL
for i in ints[::-1]:
if i < lowest:
if i in potential_pivots:
pivot_count += 1
lowest = i
print(pivot_count)
if __name__ == '__main__':
main() |
""" Data structures """
class SymmetryData:
def __init__(self):
self.number_of_species = -1
self.structure = None
self.space_group = -1
self.point_group = -1
| """ Data structures """
class Symmetrydata:
def __init__(self):
self.number_of_species = -1
self.structure = None
self.space_group = -1
self.point_group = -1 |
one = {
'r': {
'__type__': 'tf.truncated_normal',
'__pre__': [],
'dtype': 'tf.float32',
'shape': [2, 2],
'name': '\'r\''
},
'w': {
'__type__': 'tf.Variable',
'__pre__': ['r'],
'dtype': 'tf.float32',
'initial_value': 'r',
'name': '\'w\''
},
'x': {
'__type__': 'tf.placeholder',
'__pre__': [],
'dtype': 'tf.float32',
'shape': [1, 2],
'name': '\'x\''
},
'dot': {
'__type__': 'tf.matmul',
'__pre__': ['w', 'x'],
'a': 'x',
'b': 'w',
'name': '\'dot\''
},
'y': {
'__type__': 'tf.placeholder',
'__pre__': [],
'dtype': 'tf.float32',
'shape': [1, 2],
'name': '\'y\''
},
'add': {
'__type__': 'tf.add',
'__pre__': ['dot', 'y'],
'x': 'dot',
'y': 'y',
'name': '\'add\''
}
}
| one = {'r': {'__type__': 'tf.truncated_normal', '__pre__': [], 'dtype': 'tf.float32', 'shape': [2, 2], 'name': "'r'"}, 'w': {'__type__': 'tf.Variable', '__pre__': ['r'], 'dtype': 'tf.float32', 'initial_value': 'r', 'name': "'w'"}, 'x': {'__type__': 'tf.placeholder', '__pre__': [], 'dtype': 'tf.float32', 'shape': [1, 2], 'name': "'x'"}, 'dot': {'__type__': 'tf.matmul', '__pre__': ['w', 'x'], 'a': 'x', 'b': 'w', 'name': "'dot'"}, 'y': {'__type__': 'tf.placeholder', '__pre__': [], 'dtype': 'tf.float32', 'shape': [1, 2], 'name': "'y'"}, 'add': {'__type__': 'tf.add', '__pre__': ['dot', 'y'], 'x': 'dot', 'y': 'y', 'name': "'add'"}} |
"""Constants for pypresseportal."""
MEDIA_TYPES = ("image", "document", "audio", "video")
PUBLIC_SERVICE_MEDIA_TYPES = ("image", "document")
RESSORTS = ("wirtschaft", "politik", "sport", "kultur", "vermischtes", "finanzen")
SECTORS = (
"arbeit",
"auto",
"banken",
"bauwesen",
"bildung",
"celebrities",
"chemie",
"computer",
"energie",
"fernsehen",
"fussball",
"gesundheit",
"handel",
"immobilien",
"kinder",
"lebensmittel",
"lifestyle",
"logistik",
"maschinenbau",
"medien",
"motorsport",
"presseschau",
"ratgeber",
"recht",
"soziales",
"telekommunikation",
"touristik",
"umwelt",
"unterhaltung",
"versicherungen",
"wissenschaft",
)
INVESTOR_RELATIONS_NEWS_TYPES = (
"all",
"adhoc",
"vote",
"nvr",
"dd",
"news",
"tip",
"report",
"wpueg",
"info",
"ers",
)
PUBLIC_SERVICE_REGIONS = (
"hh",
"sh",
"he",
"sl",
"bw",
"ni",
"bb",
"nrw",
"st",
"by",
"sn",
"rp",
"hb",
"mv",
"th",
)
TOPICS = (
"auto-verkehr",
"bau-immobilien",
"fashion-beauty",
"finanzen",
"gesundheit-medizin",
"handel",
"medien-kultur",
"netzwelt",
"panorama",
"people",
"politik",
"presseschau",
"soziales",
"sport",
"tourismus-urlaub",
"umwelt",
"wirtschaft",
"wissen-bildung",
)
KEYWORDS = (
"agrar",
"alternativeenergie",
"arbeit",
"armut",
"arzneimittel",
"atomenergie",
"aussenpolitik",
"auto",
"bahn",
"banken",
"bau",
"behinderte",
"bekleidung",
"bildung",
"boerse",
"buecher",
"bundesliga",
"bundesregierung",
"bundeswehr",
"chemie",
"computer",
"ecommerce",
"energie",
"erdbeben",
"familie",
"fernsehen",
"film",
"finanzen",
"fluechtlinge",
"formel1",
"freizeit",
"fussball",
"gas",
"gesellschaft",
"gesundheit",
"getraenke",
"gewerkschaften",
"globalisierung",
"golf",
"handball",
"handel",
"historisches",
"hunger",
"immobilien",
"industrie",
"innenpolitik",
"internet",
"jugendkriminalitaet",
"jugendlicher",
"justiz",
"katastrophe",
"kinder",
"kleidung",
"klimaveraenderung",
"konflikte",
"konjunktur",
"konsumgueter",
"kosmetik",
"krankenhaus",
"krankenversicherung",
"krieg",
"kriminalitaet",
"kultur",
"leichtathletik",
"celebrities",
"lifestyle",
"luftverkehr",
"luxusgueter",
"maschinenbau",
"medien",
"medizin",
"menschenrechte",
"mode",
"motorsport",
"musik",
"nahrungsmittel",
"naturschutz",
"oel",
"olympia",
"papier",
"partei",
"personalien",
"pharmaindustrie",
"politik",
"presseschau",
"radsport",
"ratgeber",
"religion",
"rente",
"schiffbau",
"schifffahrt",
"schule",
"senior",
"soziales",
"sport",
"steuern",
"strom",
"tabak",
"telekommunikation",
"tennis",
"textil",
"tier",
"tourismus",
"transport",
"umwelt",
"unterhaltung",
"verbraucher",
"verkehr",
"verlag",
"vermischtes",
"verpackung",
"versandhandel",
"versicherung",
"wahlen",
"weltmeisterschaft",
"werbung",
"wirtschaft",
"wissenschaft",
)
| """Constants for pypresseportal."""
media_types = ('image', 'document', 'audio', 'video')
public_service_media_types = ('image', 'document')
ressorts = ('wirtschaft', 'politik', 'sport', 'kultur', 'vermischtes', 'finanzen')
sectors = ('arbeit', 'auto', 'banken', 'bauwesen', 'bildung', 'celebrities', 'chemie', 'computer', 'energie', 'fernsehen', 'fussball', 'gesundheit', 'handel', 'immobilien', 'kinder', 'lebensmittel', 'lifestyle', 'logistik', 'maschinenbau', 'medien', 'motorsport', 'presseschau', 'ratgeber', 'recht', 'soziales', 'telekommunikation', 'touristik', 'umwelt', 'unterhaltung', 'versicherungen', 'wissenschaft')
investor_relations_news_types = ('all', 'adhoc', 'vote', 'nvr', 'dd', 'news', 'tip', 'report', 'wpueg', 'info', 'ers')
public_service_regions = ('hh', 'sh', 'he', 'sl', 'bw', 'ni', 'bb', 'nrw', 'st', 'by', 'sn', 'rp', 'hb', 'mv', 'th')
topics = ('auto-verkehr', 'bau-immobilien', 'fashion-beauty', 'finanzen', 'gesundheit-medizin', 'handel', 'medien-kultur', 'netzwelt', 'panorama', 'people', 'politik', 'presseschau', 'soziales', 'sport', 'tourismus-urlaub', 'umwelt', 'wirtschaft', 'wissen-bildung')
keywords = ('agrar', 'alternativeenergie', 'arbeit', 'armut', 'arzneimittel', 'atomenergie', 'aussenpolitik', 'auto', 'bahn', 'banken', 'bau', 'behinderte', 'bekleidung', 'bildung', 'boerse', 'buecher', 'bundesliga', 'bundesregierung', 'bundeswehr', 'chemie', 'computer', 'ecommerce', 'energie', 'erdbeben', 'familie', 'fernsehen', 'film', 'finanzen', 'fluechtlinge', 'formel1', 'freizeit', 'fussball', 'gas', 'gesellschaft', 'gesundheit', 'getraenke', 'gewerkschaften', 'globalisierung', 'golf', 'handball', 'handel', 'historisches', 'hunger', 'immobilien', 'industrie', 'innenpolitik', 'internet', 'jugendkriminalitaet', 'jugendlicher', 'justiz', 'katastrophe', 'kinder', 'kleidung', 'klimaveraenderung', 'konflikte', 'konjunktur', 'konsumgueter', 'kosmetik', 'krankenhaus', 'krankenversicherung', 'krieg', 'kriminalitaet', 'kultur', 'leichtathletik', 'celebrities', 'lifestyle', 'luftverkehr', 'luxusgueter', 'maschinenbau', 'medien', 'medizin', 'menschenrechte', 'mode', 'motorsport', 'musik', 'nahrungsmittel', 'naturschutz', 'oel', 'olympia', 'papier', 'partei', 'personalien', 'pharmaindustrie', 'politik', 'presseschau', 'radsport', 'ratgeber', 'religion', 'rente', 'schiffbau', 'schifffahrt', 'schule', 'senior', 'soziales', 'sport', 'steuern', 'strom', 'tabak', 'telekommunikation', 'tennis', 'textil', 'tier', 'tourismus', 'transport', 'umwelt', 'unterhaltung', 'verbraucher', 'verkehr', 'verlag', 'vermischtes', 'verpackung', 'versandhandel', 'versicherung', 'wahlen', 'weltmeisterschaft', 'werbung', 'wirtschaft', 'wissenschaft') |
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: zkinterface
class Message(object):
NONE = 0
Circuit = 1
R1CSConstraints = 2
Witness = 3
| class Message(object):
none = 0
circuit = 1
r1_cs_constraints = 2
witness = 3 |
"""
A set of PyQt distutils extensions for build qt ui files in a pythonic way:
- build_ui: build qt ui/qrc files
"""
__version__ = '0.7.3'
| """
A set of PyQt distutils extensions for build qt ui files in a pythonic way:
- build_ui: build qt ui/qrc files
"""
__version__ = '0.7.3' |
class Dawg (object):
def __init__(self, digraphs=[]):
self.root = self.index = 0
self.digraphs = digraphs
self.graph = {self.root: []}
self.accepts = {}
def tokenize(self, word):
return self._rtokenize(word, [])
def insert(self, word):
self._rinsert(self.root, self.tokenize(word), word)
def words(self):
return self.accepts.values()
def node(self, word):
return self._rnode(self.root, self.tokenize(word))
def pivot_search(self, substring):
results = []
tokens = self.tokenize(substring)
# Check if this node is the start of the substring.
pivot = tokens.index('.')
matches = self._rmatch_string(self.root, self.tokenize(substring), [])
for m in matches:
results += [self.tokenize(m)[pivot]]
return results
def _rtokenize(self, word, tokens):
if len(word) == 0:
return tokens
digraph = [dg for dg in self.digraphs if word.startswith(dg)]
if len(digraph) > 0:
return self._rtokenize(word[2:], tokens + [digraph[0]])
else:
return self._rtokenize(word[1:], tokens + [word[0]])
def _rinsert(self, node, word, orig):
try:
# Unzip the edge values (letters) and the edge indices (targets).
if len(self.graph[node]) > 0:
letters, targets = map(lambda x: list(x), zip(*self.graph[node]))
else:
letters = targets = []
if word[0] in letters:
# If this edge already exists in the graph, recurse to it's target
self._rinsert(targets[letters.index(word[0])], word[1:], orig)
else:
# If the edge doesn't already exist in the graph, create the edge
self.index += 1
self.graph[node].append((word[0],self.index))
self.graph[self.index] = []
# Move to the next letter
self._rinsert(self.index, word[1:], orig)
except IndexError:
# Set this node to an accepting node once the whole word is inserted.
if node not in self.accepts:
self.accepts[node] = orig
def _rmatch_string(self, node, tokens, results):
if len(tokens) == 0:
if node in self.accepts:
results += [self.accepts[node]]
return results
letter = tokens[0]
for e in self.graph[node]:
if letter == e[0] or '.' == letter:
results = self._rmatch_string(e[1], tokens[1:], results)
return results
def _rnode(self, node, word):
if len(word) == 0:
return node
for n in self.graph[node]:
if n[0] == word[0]:
return self._rnode(n[1], word[1:])
return None
| class Dawg(object):
def __init__(self, digraphs=[]):
self.root = self.index = 0
self.digraphs = digraphs
self.graph = {self.root: []}
self.accepts = {}
def tokenize(self, word):
return self._rtokenize(word, [])
def insert(self, word):
self._rinsert(self.root, self.tokenize(word), word)
def words(self):
return self.accepts.values()
def node(self, word):
return self._rnode(self.root, self.tokenize(word))
def pivot_search(self, substring):
results = []
tokens = self.tokenize(substring)
pivot = tokens.index('.')
matches = self._rmatch_string(self.root, self.tokenize(substring), [])
for m in matches:
results += [self.tokenize(m)[pivot]]
return results
def _rtokenize(self, word, tokens):
if len(word) == 0:
return tokens
digraph = [dg for dg in self.digraphs if word.startswith(dg)]
if len(digraph) > 0:
return self._rtokenize(word[2:], tokens + [digraph[0]])
else:
return self._rtokenize(word[1:], tokens + [word[0]])
def _rinsert(self, node, word, orig):
try:
if len(self.graph[node]) > 0:
(letters, targets) = map(lambda x: list(x), zip(*self.graph[node]))
else:
letters = targets = []
if word[0] in letters:
self._rinsert(targets[letters.index(word[0])], word[1:], orig)
else:
self.index += 1
self.graph[node].append((word[0], self.index))
self.graph[self.index] = []
self._rinsert(self.index, word[1:], orig)
except IndexError:
if node not in self.accepts:
self.accepts[node] = orig
def _rmatch_string(self, node, tokens, results):
if len(tokens) == 0:
if node in self.accepts:
results += [self.accepts[node]]
return results
letter = tokens[0]
for e in self.graph[node]:
if letter == e[0] or '.' == letter:
results = self._rmatch_string(e[1], tokens[1:], results)
return results
def _rnode(self, node, word):
if len(word) == 0:
return node
for n in self.graph[node]:
if n[0] == word[0]:
return self._rnode(n[1], word[1:])
return None |
#!/usr/bin/env python3
#
#Copyright 2022 Kurt R. Brorsen
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def hf_energy(eri_ee_full, eri_ep_full, mo_fock_1e, mo_fock_1p, e_nocc, p_nocc):
e_hf = 0.0
for i in range(e_nocc):
e_hf += 2*mo_fock_1e[i,i]
for j in range(e_nocc):
e_hf += (2*eri_ee_full[i,i,j,j] - eri_ee_full[i,j,i,j])
# need to subtract this due to double counting by using ee and pp fock matrices
for j in range(p_nocc):
e_hf -= 2*eri_ep_full[i,i,j,j]
for i in range(p_nocc):
e_hf += mo_fock_1p[i,i]
return e_hf
| def hf_energy(eri_ee_full, eri_ep_full, mo_fock_1e, mo_fock_1p, e_nocc, p_nocc):
e_hf = 0.0
for i in range(e_nocc):
e_hf += 2 * mo_fock_1e[i, i]
for j in range(e_nocc):
e_hf += 2 * eri_ee_full[i, i, j, j] - eri_ee_full[i, j, i, j]
for j in range(p_nocc):
e_hf -= 2 * eri_ep_full[i, i, j, j]
for i in range(p_nocc):
e_hf += mo_fock_1p[i, i]
return e_hf |
'''
The MIT License (MIT)
Copyright (c) 2016 WavyCloud
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
def batch_get_named_query(NamedQueryIds=None):
"""
Returns the details of a single named query or a list of up to 50 queries, which you provide as an array of query ID strings. Requires you to have access to the workgroup in which the queries were saved. Use ListNamedQueriesInput to get the list of named query IDs in the specified workgroup. If information could not be retrieved for a submitted query ID, information about the query ID submitted is listed under UnprocessedNamedQueryId . Named queries differ from executed queries. Use BatchGetQueryExecutionInput to get details about each unique query execution, and ListQueryExecutionsInput to get a list of query execution IDs.
See also: AWS API Documentation
Exceptions
:example: response = client.batch_get_named_query(
NamedQueryIds=[
'string',
]
)
:type NamedQueryIds: list
:param NamedQueryIds: [REQUIRED]\nAn array of query IDs.\n\n(string) --\n\n
:rtype: dict
ReturnsResponse Syntax{
'NamedQueries': [
{
'Name': 'string',
'Description': 'string',
'Database': 'string',
'QueryString': 'string',
'NamedQueryId': 'string',
'WorkGroup': 'string'
},
],
'UnprocessedNamedQueryIds': [
{
'NamedQueryId': 'string',
'ErrorCode': 'string',
'ErrorMessage': 'string'
},
]
}
Response Structure
(dict) --
NamedQueries (list) --Information about the named query IDs submitted.
(dict) --A query, where QueryString is the list of SQL query statements that comprise the query.
Name (string) --The query name.
Description (string) --The query description.
Database (string) --The database to which the query belongs.
QueryString (string) --The SQL query statements that comprise the query.
NamedQueryId (string) --The unique identifier of the query.
WorkGroup (string) --The name of the workgroup that contains the named query.
UnprocessedNamedQueryIds (list) --Information about provided query IDs.
(dict) --Information about a named query ID that could not be processed.
NamedQueryId (string) --The unique identifier of the named query.
ErrorCode (string) --The error code returned when the processing request for the named query failed, if applicable.
ErrorMessage (string) --The error message returned when the processing request for the named query failed, if applicable.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'NamedQueries': [
{
'Name': 'string',
'Description': 'string',
'Database': 'string',
'QueryString': 'string',
'NamedQueryId': 'string',
'WorkGroup': 'string'
},
],
'UnprocessedNamedQueryIds': [
{
'NamedQueryId': 'string',
'ErrorCode': 'string',
'ErrorMessage': 'string'
},
]
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def batch_get_query_execution(QueryExecutionIds=None):
"""
Returns the details of a single query execution or a list of up to 50 query executions, which you provide as an array of query execution ID strings. Requires you to have access to the workgroup in which the queries ran. To get a list of query execution IDs, use ListQueryExecutionsInput$WorkGroup . Query executions differ from named (saved) queries. Use BatchGetNamedQueryInput to get details about named queries.
See also: AWS API Documentation
Exceptions
:example: response = client.batch_get_query_execution(
QueryExecutionIds=[
'string',
]
)
:type QueryExecutionIds: list
:param QueryExecutionIds: [REQUIRED]\nAn array of query execution IDs.\n\n(string) --\n\n
:rtype: dict
ReturnsResponse Syntax{
'QueryExecutions': [
{
'QueryExecutionId': 'string',
'Query': 'string',
'StatementType': 'DDL'|'DML'|'UTILITY',
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'QueryExecutionContext': {
'Database': 'string'
},
'Status': {
'State': 'QUEUED'|'RUNNING'|'SUCCEEDED'|'FAILED'|'CANCELLED',
'StateChangeReason': 'string',
'SubmissionDateTime': datetime(2015, 1, 1),
'CompletionDateTime': datetime(2015, 1, 1)
},
'Statistics': {
'EngineExecutionTimeInMillis': 123,
'DataScannedInBytes': 123,
'DataManifestLocation': 'string',
'TotalExecutionTimeInMillis': 123,
'QueryQueueTimeInMillis': 123,
'QueryPlanningTimeInMillis': 123,
'ServiceProcessingTimeInMillis': 123
},
'WorkGroup': 'string'
},
],
'UnprocessedQueryExecutionIds': [
{
'QueryExecutionId': 'string',
'ErrorCode': 'string',
'ErrorMessage': 'string'
},
]
}
Response Structure
(dict) --
QueryExecutions (list) --Information about a query execution.
(dict) --Information about a single instance of a query execution.
QueryExecutionId (string) --The unique identifier for each query execution.
Query (string) --The SQL query statements which the query execution ran.
StatementType (string) --The type of query statement that was run. DDL indicates DDL query statements. DML indicates DML (Data Manipulation Language) query statements, such as CREATE TABLE AS SELECT . UTILITY indicates query statements other than DDL and DML, such as SHOW CREATE TABLE , or DESCRIBE <table> .
ResultConfiguration (dict) --The location in Amazon S3 where query results were stored and the encryption option, if any, used for query results. These are known as "client-side settings". If workgroup settings override client-side settings, then the query uses the location for the query results and the encryption configuration that are specified for the workgroup.
OutputLocation (string) --The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/ . To run the query, you must specify the query results location using one of the ways: either for individual queries using either this setting (client-side), or in the workgroup, using WorkGroupConfiguration . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results . If workgroup settings override client-side settings, then the query uses the settings specified for the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .
EncryptionConfiguration (dict) --If query results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE-KMS or CSE-KMS ) and key information. This is a client-side setting. If workgroup settings override client-side settings, then the query uses the encryption configuration that is specified for the workgroup, and also uses the location for storing query results specified in the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration and Workgroup Settings Override Client-Side Settings .
EncryptionOption (string) --Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.
If a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup\'s setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.
KmsKey (string) --For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.
QueryExecutionContext (dict) --The database in which the query execution occurred.
Database (string) --The name of the database.
Status (dict) --The completion date, current state, submission time, and state change reason (if applicable) for the query execution.
State (string) --The state of query execution. QUEUED indicates that the query has been submitted to the service, and Athena will execute the query as soon as resources are available. RUNNING indicates that the query is in execution phase. SUCCEEDED indicates that the query completed without errors. FAILED indicates that the query experienced an error and did not complete processing. CANCELLED indicates that a user input interrupted query execution.
StateChangeReason (string) --Further detail about the status of the query.
SubmissionDateTime (datetime) --The date and time that the query was submitted.
CompletionDateTime (datetime) --The date and time that the query completed.
Statistics (dict) --Query execution statistics, such as the amount of data scanned, the amount of time that the query took to process, and the type of statement that was run.
EngineExecutionTimeInMillis (integer) --The number of milliseconds that the query took to execute.
DataScannedInBytes (integer) --The number of bytes in the data that was queried.
DataManifestLocation (string) --The location and file name of a data manifest file. The manifest file is saved to the Athena query results location in Amazon S3. The manifest file tracks files that the query wrote to Amazon S3. If the query fails, the manifest file also tracks files that the query intended to write. The manifest is useful for identifying orphaned files resulting from a failed query. For more information, see Working with Query Results, Output Files, and Query History in the Amazon Athena User Guide .
TotalExecutionTimeInMillis (integer) --The number of milliseconds that Athena took to run the query.
QueryQueueTimeInMillis (integer) --The number of milliseconds that the query was in your query queue waiting for resources. Note that if transient errors occur, Athena might automatically add the query back to the queue.
QueryPlanningTimeInMillis (integer) --The number of milliseconds that Athena took to plan the query processing flow. This includes the time spent retrieving table partitions from the data source. Note that because the query engine performs the query planning, query planning time is a subset of engine processing time.
ServiceProcessingTimeInMillis (integer) --The number of milliseconds that Athena took to finalize and publish the query results after the query engine finished running the query.
WorkGroup (string) --The name of the workgroup in which the query ran.
UnprocessedQueryExecutionIds (list) --Information about the query executions that failed to run.
(dict) --Describes a query execution that failed to process.
QueryExecutionId (string) --The unique identifier of the query execution.
ErrorCode (string) --The error code returned when the query execution failed to process, if applicable.
ErrorMessage (string) --The error message returned when the query execution failed to process, if applicable.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'QueryExecutions': [
{
'QueryExecutionId': 'string',
'Query': 'string',
'StatementType': 'DDL'|'DML'|'UTILITY',
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'QueryExecutionContext': {
'Database': 'string'
},
'Status': {
'State': 'QUEUED'|'RUNNING'|'SUCCEEDED'|'FAILED'|'CANCELLED',
'StateChangeReason': 'string',
'SubmissionDateTime': datetime(2015, 1, 1),
'CompletionDateTime': datetime(2015, 1, 1)
},
'Statistics': {
'EngineExecutionTimeInMillis': 123,
'DataScannedInBytes': 123,
'DataManifestLocation': 'string',
'TotalExecutionTimeInMillis': 123,
'QueryQueueTimeInMillis': 123,
'QueryPlanningTimeInMillis': 123,
'ServiceProcessingTimeInMillis': 123
},
'WorkGroup': 'string'
},
],
'UnprocessedQueryExecutionIds': [
{
'QueryExecutionId': 'string',
'ErrorCode': 'string',
'ErrorMessage': 'string'
},
]
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def can_paginate(operation_name=None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name\nas the method name on the client. For example, if the\nmethod name is create_foo, and you\'d normally invoke the\noperation as client.create_foo(**kwargs), if the\ncreate_foo operation can be paginated, you can use the\ncall client.get_paginator('create_foo').
"""
pass
def create_named_query(Name=None, Description=None, Database=None, QueryString=None, ClientRequestToken=None, WorkGroup=None):
"""
Creates a named query in the specified workgroup. Requires that you have access to the workgroup.
For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.create_named_query(
Name='string',
Description='string',
Database='string',
QueryString='string',
ClientRequestToken='string',
WorkGroup='string'
)
:type Name: string
:param Name: [REQUIRED]\nThe query name.\n
:type Description: string
:param Description: The query description.
:type Database: string
:param Database: [REQUIRED]\nThe database to which the query belongs.\n
:type QueryString: string
:param QueryString: [REQUIRED]\nThe contents of the query with all query statements.\n
:type ClientRequestToken: string
:param ClientRequestToken: A unique case-sensitive string used to ensure the request to create the query is idempotent (executes only once). If another CreateNamedQuery request is received, the same response is returned and another query is not created. If a parameter has changed, for example, the QueryString , an error is returned.\n\nWarning\nThis token is listed as not required because AWS SDKs (for example the AWS SDK for Java) auto-generate the token for users. If you are not using the AWS SDK or the AWS CLI, you must provide this token or the action will fail.\n\nThis field is autopopulated if not provided.\n
:type WorkGroup: string
:param WorkGroup: The name of the workgroup in which the named query is being created.
:rtype: dict
ReturnsResponse Syntax
{
'NamedQueryId': 'string'
}
Response Structure
(dict) --
NamedQueryId (string) --
The unique ID of the query.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'NamedQueryId': 'string'
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def create_work_group(Name=None, Configuration=None, Description=None, Tags=None):
"""
Creates a workgroup with the specified name.
See also: AWS API Documentation
Exceptions
:example: response = client.create_work_group(
Name='string',
Configuration={
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'EnforceWorkGroupConfiguration': True|False,
'PublishCloudWatchMetricsEnabled': True|False,
'BytesScannedCutoffPerQuery': 123,
'RequesterPaysEnabled': True|False
},
Description='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type Name: string
:param Name: [REQUIRED]\nThe workgroup name.\n
:type Configuration: dict
:param Configuration: The configuration for the workgroup, which includes the location in Amazon S3 where query results are stored, the encryption configuration, if any, used for encrypting query results, whether the Amazon CloudWatch Metrics are enabled for the workgroup, the limit for the amount of bytes scanned (cutoff) per query, if it is specified, and whether workgroup\'s settings (specified with EnforceWorkGroupConfiguration) in the WorkGroupConfiguration override client-side settings. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .\n\nResultConfiguration (dict) --The configuration for the workgroup, which includes the location in Amazon S3 where query results are stored and the encryption option, if any, used for query results. To run the query, you must specify the query results location using one of the ways: either in the workgroup using this setting, or for individual queries (client-side), using ResultConfiguration$OutputLocation . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results .\n\nOutputLocation (string) --The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/ . To run the query, you must specify the query results location using one of the ways: either for individual queries using either this setting (client-side), or in the workgroup, using WorkGroupConfiguration . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results . If workgroup settings override client-side settings, then the query uses the settings specified for the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .\n\nEncryptionConfiguration (dict) --If query results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE-KMS or CSE-KMS ) and key information. This is a client-side setting. If workgroup settings override client-side settings, then the query uses the encryption configuration that is specified for the workgroup, and also uses the location for storing query results specified in the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration and Workgroup Settings Override Client-Side Settings .\n\nEncryptionOption (string) -- [REQUIRED]Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.\nIf a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup\'s setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.\n\nKmsKey (string) --For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.\n\n\n\n\n\nEnforceWorkGroupConfiguration (boolean) --If set to 'true', the settings for the workgroup override client-side settings. If set to 'false', client-side settings are used. For more information, see Workgroup Settings Override Client-Side Settings .\n\nPublishCloudWatchMetricsEnabled (boolean) --Indicates that the Amazon CloudWatch metrics are enabled for the workgroup.\n\nBytesScannedCutoffPerQuery (integer) --The upper data usage limit (cutoff) for the amount of bytes a single query in a workgroup is allowed to scan.\n\nRequesterPaysEnabled (boolean) --If set to true , allows members assigned to a workgroup to reference Amazon S3 Requester Pays buckets in queries. If set to false , workgroup members cannot query data from Requester Pays buckets, and queries that retrieve data from Requester Pays buckets cause an error. The default is false . For more information about Requester Pays buckets, see Requester Pays Buckets in the Amazon Simple Storage Service Developer Guide .\n\n\n
:type Description: string
:param Description: The workgroup description.
:type Tags: list
:param Tags: One or more tags, separated by commas, that you want to attach to the workgroup as you create it.\n\n(dict) --A tag that you can add to a resource. A tag is a label that you assign to an AWS Athena resource (a workgroup). Each tag consists of a key and an optional value, both of which you define. Tags enable you to categorize workgroups in Athena, for example, by purpose, owner, or environment. Use a consistent set of tag keys to make it easier to search and filter workgroups in your account. The maximum tag key length is 128 Unicode characters in UTF-8. The maximum tag value length is 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys and values are case-sensitive. Tag keys must be unique per resource.\n\nKey (string) --A tag key. The tag key length is from 1 to 128 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys are case-sensitive and must be unique per resource.\n\nValue (string) --A tag value. The tag value length is from 0 to 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag values are case-sensitive.\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {}
:returns:
(dict) --
"""
pass
def delete_named_query(NamedQueryId=None):
"""
Deletes the named query if you have access to the workgroup in which the query was saved.
For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.delete_named_query(
NamedQueryId='string'
)
:type NamedQueryId: string
:param NamedQueryId: [REQUIRED]\nThe unique ID of the query to delete.\nThis field is autopopulated if not provided.\n
:rtype: dict
ReturnsResponse Syntax{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def delete_work_group(WorkGroup=None, RecursiveDeleteOption=None):
"""
Deletes the workgroup with the specified name. The primary workgroup cannot be deleted.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_work_group(
WorkGroup='string',
RecursiveDeleteOption=True|False
)
:type WorkGroup: string
:param WorkGroup: [REQUIRED]\nThe unique name of the workgroup to delete.\n
:type RecursiveDeleteOption: boolean
:param RecursiveDeleteOption: The option to delete the workgroup and its contents even if the workgroup contains any named queries.
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {}
:returns:
(dict) --
"""
pass
def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to\nClientMethod.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid\nfor. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By\ndefault, the http method is whatever is used in the method\'s model.
"""
pass
def get_named_query(NamedQueryId=None):
"""
Returns information about a single query. Requires that you have access to the workgroup in which the query was saved.
See also: AWS API Documentation
Exceptions
:example: response = client.get_named_query(
NamedQueryId='string'
)
:type NamedQueryId: string
:param NamedQueryId: [REQUIRED]\nThe unique ID of the query. Use ListNamedQueries to get query IDs.\n
:rtype: dict
ReturnsResponse Syntax{
'NamedQuery': {
'Name': 'string',
'Description': 'string',
'Database': 'string',
'QueryString': 'string',
'NamedQueryId': 'string',
'WorkGroup': 'string'
}
}
Response Structure
(dict) --
NamedQuery (dict) --Information about the query.
Name (string) --The query name.
Description (string) --The query description.
Database (string) --The database to which the query belongs.
QueryString (string) --The SQL query statements that comprise the query.
NamedQueryId (string) --The unique identifier of the query.
WorkGroup (string) --The name of the workgroup that contains the named query.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'NamedQuery': {
'Name': 'string',
'Description': 'string',
'Database': 'string',
'QueryString': 'string',
'NamedQueryId': 'string',
'WorkGroup': 'string'
}
}
"""
pass
def get_paginator(operation_name=None):
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name\nas the method name on the client. For example, if the\nmethod name is create_foo, and you\'d normally invoke the\noperation as client.create_foo(**kwargs), if the\ncreate_foo operation can be paginated, you can use the\ncall client.get_paginator('create_foo').
:rtype: L{botocore.paginate.Paginator}
ReturnsA paginator object.
"""
pass
def get_query_execution(QueryExecutionId=None):
"""
Returns information about a single execution of a query if you have access to the workgroup in which the query ran. Each time a query executes, information about the query execution is saved with a unique ID.
See also: AWS API Documentation
Exceptions
:example: response = client.get_query_execution(
QueryExecutionId='string'
)
:type QueryExecutionId: string
:param QueryExecutionId: [REQUIRED]\nThe unique ID of the query execution.\n
:rtype: dict
ReturnsResponse Syntax{
'QueryExecution': {
'QueryExecutionId': 'string',
'Query': 'string',
'StatementType': 'DDL'|'DML'|'UTILITY',
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'QueryExecutionContext': {
'Database': 'string'
},
'Status': {
'State': 'QUEUED'|'RUNNING'|'SUCCEEDED'|'FAILED'|'CANCELLED',
'StateChangeReason': 'string',
'SubmissionDateTime': datetime(2015, 1, 1),
'CompletionDateTime': datetime(2015, 1, 1)
},
'Statistics': {
'EngineExecutionTimeInMillis': 123,
'DataScannedInBytes': 123,
'DataManifestLocation': 'string',
'TotalExecutionTimeInMillis': 123,
'QueryQueueTimeInMillis': 123,
'QueryPlanningTimeInMillis': 123,
'ServiceProcessingTimeInMillis': 123
},
'WorkGroup': 'string'
}
}
Response Structure
(dict) --
QueryExecution (dict) --Information about the query execution.
QueryExecutionId (string) --The unique identifier for each query execution.
Query (string) --The SQL query statements which the query execution ran.
StatementType (string) --The type of query statement that was run. DDL indicates DDL query statements. DML indicates DML (Data Manipulation Language) query statements, such as CREATE TABLE AS SELECT . UTILITY indicates query statements other than DDL and DML, such as SHOW CREATE TABLE , or DESCRIBE <table> .
ResultConfiguration (dict) --The location in Amazon S3 where query results were stored and the encryption option, if any, used for query results. These are known as "client-side settings". If workgroup settings override client-side settings, then the query uses the location for the query results and the encryption configuration that are specified for the workgroup.
OutputLocation (string) --The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/ . To run the query, you must specify the query results location using one of the ways: either for individual queries using either this setting (client-side), or in the workgroup, using WorkGroupConfiguration . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results . If workgroup settings override client-side settings, then the query uses the settings specified for the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .
EncryptionConfiguration (dict) --If query results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE-KMS or CSE-KMS ) and key information. This is a client-side setting. If workgroup settings override client-side settings, then the query uses the encryption configuration that is specified for the workgroup, and also uses the location for storing query results specified in the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration and Workgroup Settings Override Client-Side Settings .
EncryptionOption (string) --Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.
If a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup\'s setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.
KmsKey (string) --For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.
QueryExecutionContext (dict) --The database in which the query execution occurred.
Database (string) --The name of the database.
Status (dict) --The completion date, current state, submission time, and state change reason (if applicable) for the query execution.
State (string) --The state of query execution. QUEUED indicates that the query has been submitted to the service, and Athena will execute the query as soon as resources are available. RUNNING indicates that the query is in execution phase. SUCCEEDED indicates that the query completed without errors. FAILED indicates that the query experienced an error and did not complete processing. CANCELLED indicates that a user input interrupted query execution.
StateChangeReason (string) --Further detail about the status of the query.
SubmissionDateTime (datetime) --The date and time that the query was submitted.
CompletionDateTime (datetime) --The date and time that the query completed.
Statistics (dict) --Query execution statistics, such as the amount of data scanned, the amount of time that the query took to process, and the type of statement that was run.
EngineExecutionTimeInMillis (integer) --The number of milliseconds that the query took to execute.
DataScannedInBytes (integer) --The number of bytes in the data that was queried.
DataManifestLocation (string) --The location and file name of a data manifest file. The manifest file is saved to the Athena query results location in Amazon S3. The manifest file tracks files that the query wrote to Amazon S3. If the query fails, the manifest file also tracks files that the query intended to write. The manifest is useful for identifying orphaned files resulting from a failed query. For more information, see Working with Query Results, Output Files, and Query History in the Amazon Athena User Guide .
TotalExecutionTimeInMillis (integer) --The number of milliseconds that Athena took to run the query.
QueryQueueTimeInMillis (integer) --The number of milliseconds that the query was in your query queue waiting for resources. Note that if transient errors occur, Athena might automatically add the query back to the queue.
QueryPlanningTimeInMillis (integer) --The number of milliseconds that Athena took to plan the query processing flow. This includes the time spent retrieving table partitions from the data source. Note that because the query engine performs the query planning, query planning time is a subset of engine processing time.
ServiceProcessingTimeInMillis (integer) --The number of milliseconds that Athena took to finalize and publish the query results after the query engine finished running the query.
WorkGroup (string) --The name of the workgroup in which the query ran.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'QueryExecution': {
'QueryExecutionId': 'string',
'Query': 'string',
'StatementType': 'DDL'|'DML'|'UTILITY',
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'QueryExecutionContext': {
'Database': 'string'
},
'Status': {
'State': 'QUEUED'|'RUNNING'|'SUCCEEDED'|'FAILED'|'CANCELLED',
'StateChangeReason': 'string',
'SubmissionDateTime': datetime(2015, 1, 1),
'CompletionDateTime': datetime(2015, 1, 1)
},
'Statistics': {
'EngineExecutionTimeInMillis': 123,
'DataScannedInBytes': 123,
'DataManifestLocation': 'string',
'TotalExecutionTimeInMillis': 123,
'QueryQueueTimeInMillis': 123,
'QueryPlanningTimeInMillis': 123,
'ServiceProcessingTimeInMillis': 123
},
'WorkGroup': 'string'
}
}
"""
pass
def get_query_results(QueryExecutionId=None, NextToken=None, MaxResults=None):
"""
Streams the results of a single query execution specified by QueryExecutionId from the Athena query results location in Amazon S3. For more information, see Query Results in the Amazon Athena User Guide . This request does not execute the query but returns results. Use StartQueryExecution to run a query.
To stream query results successfully, the IAM principal with permission to call GetQueryResults also must have permissions to the Amazon S3 GetObject action for the Athena query results location.
See also: AWS API Documentation
Exceptions
:example: response = client.get_query_results(
QueryExecutionId='string',
NextToken='string',
MaxResults=123
)
:type QueryExecutionId: string
:param QueryExecutionId: [REQUIRED]\nThe unique ID of the query execution.\n
:type NextToken: string
:param NextToken: The token that specifies where to start pagination if a previous request was truncated.
:type MaxResults: integer
:param MaxResults: The maximum number of results (rows) to return in this request.
:rtype: dict
ReturnsResponse Syntax
{
'UpdateCount': 123,
'ResultSet': {
'Rows': [
{
'Data': [
{
'VarCharValue': 'string'
},
]
},
],
'ResultSetMetadata': {
'ColumnInfo': [
{
'CatalogName': 'string',
'SchemaName': 'string',
'TableName': 'string',
'Name': 'string',
'Label': 'string',
'Type': 'string',
'Precision': 123,
'Scale': 123,
'Nullable': 'NOT_NULL'|'NULLABLE'|'UNKNOWN',
'CaseSensitive': True|False
},
]
}
},
'NextToken': 'string'
}
Response Structure
(dict) --
UpdateCount (integer) --
The number of rows inserted with a CREATE TABLE AS SELECT statement.
ResultSet (dict) --
The results of the query execution.
Rows (list) --
The rows in the table.
(dict) --
The rows that comprise a query result table.
Data (list) --
The data that populates a row in a query result table.
(dict) --
A piece of data (a field in the table).
VarCharValue (string) --
The value of the datum.
ResultSetMetadata (dict) --
The metadata that describes the column structure and data types of a table of query results.
ColumnInfo (list) --
Information about the columns returned in a query result metadata.
(dict) --
Information about the columns in a query execution result.
CatalogName (string) --
The catalog to which the query results belong.
SchemaName (string) --
The schema name (database name) to which the query results belong.
TableName (string) --
The table name for the query results.
Name (string) --
The name of the column.
Label (string) --
A column label.
Type (string) --
The data type of the column.
Precision (integer) --
For DECIMAL data types, specifies the total number of digits, up to 38. For performance reasons, we recommend up to 18 digits.
Scale (integer) --
For DECIMAL data types, specifies the total number of digits in the fractional part of the value. Defaults to 0.
Nullable (string) --
Indicates the column\'s nullable status.
CaseSensitive (boolean) --
Indicates whether values in the column are case-sensitive.
NextToken (string) --
A token to be used by the next request if this request is truncated.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'UpdateCount': 123,
'ResultSet': {
'Rows': [
{
'Data': [
{
'VarCharValue': 'string'
},
]
},
],
'ResultSetMetadata': {
'ColumnInfo': [
{
'CatalogName': 'string',
'SchemaName': 'string',
'TableName': 'string',
'Name': 'string',
'Label': 'string',
'Type': 'string',
'Precision': 123,
'Scale': 123,
'Nullable': 'NOT_NULL'|'NULLABLE'|'UNKNOWN',
'CaseSensitive': True|False
},
]
}
},
'NextToken': 'string'
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def get_waiter(waiter_name=None):
"""
Returns an object that can wait for some condition.
:type waiter_name: str
:param waiter_name: The name of the waiter to get. See the waiters\nsection of the service docs for a list of available waiters.
:rtype: botocore.waiter.Waiter
"""
pass
def get_work_group(WorkGroup=None):
"""
Returns information about the workgroup with the specified name.
See also: AWS API Documentation
Exceptions
:example: response = client.get_work_group(
WorkGroup='string'
)
:type WorkGroup: string
:param WorkGroup: [REQUIRED]\nThe name of the workgroup.\n
:rtype: dict
ReturnsResponse Syntax{
'WorkGroup': {
'Name': 'string',
'State': 'ENABLED'|'DISABLED',
'Configuration': {
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'EnforceWorkGroupConfiguration': True|False,
'PublishCloudWatchMetricsEnabled': True|False,
'BytesScannedCutoffPerQuery': 123,
'RequesterPaysEnabled': True|False
},
'Description': 'string',
'CreationTime': datetime(2015, 1, 1)
}
}
Response Structure
(dict) --
WorkGroup (dict) --Information about the workgroup.
Name (string) --The workgroup name.
State (string) --The state of the workgroup: ENABLED or DISABLED.
Configuration (dict) --The configuration of the workgroup, which includes the location in Amazon S3 where query results are stored, the encryption configuration, if any, used for query results; whether the Amazon CloudWatch Metrics are enabled for the workgroup; whether workgroup settings override client-side settings; and the data usage limits for the amount of data scanned per query or per workgroup. The workgroup settings override is specified in EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .
ResultConfiguration (dict) --The configuration for the workgroup, which includes the location in Amazon S3 where query results are stored and the encryption option, if any, used for query results. To run the query, you must specify the query results location using one of the ways: either in the workgroup using this setting, or for individual queries (client-side), using ResultConfiguration$OutputLocation . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results .
OutputLocation (string) --The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/ . To run the query, you must specify the query results location using one of the ways: either for individual queries using either this setting (client-side), or in the workgroup, using WorkGroupConfiguration . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results . If workgroup settings override client-side settings, then the query uses the settings specified for the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .
EncryptionConfiguration (dict) --If query results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE-KMS or CSE-KMS ) and key information. This is a client-side setting. If workgroup settings override client-side settings, then the query uses the encryption configuration that is specified for the workgroup, and also uses the location for storing query results specified in the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration and Workgroup Settings Override Client-Side Settings .
EncryptionOption (string) --Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.
If a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup\'s setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.
KmsKey (string) --For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.
EnforceWorkGroupConfiguration (boolean) --If set to "true", the settings for the workgroup override client-side settings. If set to "false", client-side settings are used. For more information, see Workgroup Settings Override Client-Side Settings .
PublishCloudWatchMetricsEnabled (boolean) --Indicates that the Amazon CloudWatch metrics are enabled for the workgroup.
BytesScannedCutoffPerQuery (integer) --The upper data usage limit (cutoff) for the amount of bytes a single query in a workgroup is allowed to scan.
RequesterPaysEnabled (boolean) --If set to true , allows members assigned to a workgroup to reference Amazon S3 Requester Pays buckets in queries. If set to false , workgroup members cannot query data from Requester Pays buckets, and queries that retrieve data from Requester Pays buckets cause an error. The default is false . For more information about Requester Pays buckets, see Requester Pays Buckets in the Amazon Simple Storage Service Developer Guide .
Description (string) --The workgroup description.
CreationTime (datetime) --The date and time the workgroup was created.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'WorkGroup': {
'Name': 'string',
'State': 'ENABLED'|'DISABLED',
'Configuration': {
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'EnforceWorkGroupConfiguration': True|False,
'PublishCloudWatchMetricsEnabled': True|False,
'BytesScannedCutoffPerQuery': 123,
'RequesterPaysEnabled': True|False
},
'Description': 'string',
'CreationTime': datetime(2015, 1, 1)
}
}
"""
pass
def list_named_queries(NextToken=None, MaxResults=None, WorkGroup=None):
"""
Provides a list of available query IDs only for queries saved in the specified workgroup. Requires that you have access to the workgroup. If a workgroup is not specified, lists the saved queries for the primary workgroup.
For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.list_named_queries(
NextToken='string',
MaxResults=123,
WorkGroup='string'
)
:type NextToken: string
:param NextToken: The token that specifies where to start pagination if a previous request was truncated.
:type MaxResults: integer
:param MaxResults: The maximum number of queries to return in this request.
:type WorkGroup: string
:param WorkGroup: The name of the workgroup from which the named queries are returned. If a workgroup is not specified, the saved queries for the primary workgroup are returned.
:rtype: dict
ReturnsResponse Syntax
{
'NamedQueryIds': [
'string',
],
'NextToken': 'string'
}
Response Structure
(dict) --
NamedQueryIds (list) --
The list of unique query IDs.
(string) --
NextToken (string) --
A token to be used by the next request if this request is truncated.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'NamedQueryIds': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_query_executions(NextToken=None, MaxResults=None, WorkGroup=None):
"""
Provides a list of available query execution IDs for the queries in the specified workgroup. If a workgroup is not specified, returns a list of query execution IDs for the primary workgroup. Requires you to have access to the workgroup in which the queries ran.
For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.list_query_executions(
NextToken='string',
MaxResults=123,
WorkGroup='string'
)
:type NextToken: string
:param NextToken: The token that specifies where to start pagination if a previous request was truncated.
:type MaxResults: integer
:param MaxResults: The maximum number of query executions to return in this request.
:type WorkGroup: string
:param WorkGroup: The name of the workgroup from which queries are returned. If a workgroup is not specified, a list of available query execution IDs for the queries in the primary workgroup is returned.
:rtype: dict
ReturnsResponse Syntax
{
'QueryExecutionIds': [
'string',
],
'NextToken': 'string'
}
Response Structure
(dict) --
QueryExecutionIds (list) --
The unique IDs of each query execution as an array of strings.
(string) --
NextToken (string) --
A token to be used by the next request if this request is truncated.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'QueryExecutionIds': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_tags_for_resource(ResourceARN=None, NextToken=None, MaxResults=None):
"""
Lists the tags associated with this workgroup.
See also: AWS API Documentation
Exceptions
:example: response = client.list_tags_for_resource(
ResourceARN='string',
NextToken='string',
MaxResults=123
)
:type ResourceARN: string
:param ResourceARN: [REQUIRED]\nLists the tags for the workgroup resource with the specified ARN.\n
:type NextToken: string
:param NextToken: The token for the next set of results, or null if there are no additional results for this request, where the request lists the tags for the workgroup resource with the specified ARN.
:type MaxResults: integer
:param MaxResults: The maximum number of results to be returned per request that lists the tags for the workgroup resource.
:rtype: dict
ReturnsResponse Syntax
{
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
Tags (list) --
The list of tags associated with this workgroup.
(dict) --
A tag that you can add to a resource. A tag is a label that you assign to an AWS Athena resource (a workgroup). Each tag consists of a key and an optional value, both of which you define. Tags enable you to categorize workgroups in Athena, for example, by purpose, owner, or environment. Use a consistent set of tag keys to make it easier to search and filter workgroups in your account. The maximum tag key length is 128 Unicode characters in UTF-8. The maximum tag value length is 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys and values are case-sensitive. Tag keys must be unique per resource.
Key (string) --
A tag key. The tag key length is from 1 to 128 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys are case-sensitive and must be unique per resource.
Value (string) --
A tag value. The tag value length is from 0 to 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag values are case-sensitive.
NextToken (string) --
A token to be used by the next request if this request is truncated.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
Athena.Client.exceptions.ResourceNotFoundException
:return: {
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'NextToken': 'string'
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
Athena.Client.exceptions.ResourceNotFoundException
"""
pass
def list_work_groups(NextToken=None, MaxResults=None):
"""
Lists available workgroups for the account.
See also: AWS API Documentation
Exceptions
:example: response = client.list_work_groups(
NextToken='string',
MaxResults=123
)
:type NextToken: string
:param NextToken: A token to be used by the next request if this request is truncated.
:type MaxResults: integer
:param MaxResults: The maximum number of workgroups to return in this request.
:rtype: dict
ReturnsResponse Syntax
{
'WorkGroups': [
{
'Name': 'string',
'State': 'ENABLED'|'DISABLED',
'Description': 'string',
'CreationTime': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
WorkGroups (list) --
The list of workgroups, including their names, descriptions, creation times, and states.
(dict) --
The summary information for the workgroup, which includes its name, state, description, and the date and time it was created.
Name (string) --
The name of the workgroup.
State (string) --
The state of the workgroup.
Description (string) --
The workgroup description.
CreationTime (datetime) --
The workgroup creation date and time.
NextToken (string) --
A token to be used by the next request if this request is truncated.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'WorkGroups': [
{
'Name': 'string',
'State': 'ENABLED'|'DISABLED',
'Description': 'string',
'CreationTime': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def start_query_execution(QueryString=None, ClientRequestToken=None, QueryExecutionContext=None, ResultConfiguration=None, WorkGroup=None):
"""
Runs the SQL query statements contained in the Query . Requires you to have access to the workgroup in which the query ran.
For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.start_query_execution(
QueryString='string',
ClientRequestToken='string',
QueryExecutionContext={
'Database': 'string'
},
ResultConfiguration={
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
WorkGroup='string'
)
:type QueryString: string
:param QueryString: [REQUIRED]\nThe SQL query statements to be executed.\n
:type ClientRequestToken: string
:param ClientRequestToken: A unique case-sensitive string used to ensure the request to create the query is idempotent (executes only once). If another StartQueryExecution request is received, the same response is returned and another query is not created. If a parameter has changed, for example, the QueryString , an error is returned.\n\nWarning\nThis token is listed as not required because AWS SDKs (for example the AWS SDK for Java) auto-generate the token for users. If you are not using the AWS SDK or the AWS CLI, you must provide this token or the action will fail.\n\nThis field is autopopulated if not provided.\n
:type QueryExecutionContext: dict
:param QueryExecutionContext: The database within which the query executes.\n\nDatabase (string) --The name of the database.\n\n\n
:type ResultConfiguration: dict
:param ResultConfiguration: Specifies information about where and how to save the results of the query execution. If the query runs in a workgroup, then workgroup\'s settings may override query settings. This affects the query results location. The workgroup settings override is specified in EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .\n\nOutputLocation (string) --The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/ . To run the query, you must specify the query results location using one of the ways: either for individual queries using either this setting (client-side), or in the workgroup, using WorkGroupConfiguration . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results . If workgroup settings override client-side settings, then the query uses the settings specified for the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .\n\nEncryptionConfiguration (dict) --If query results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE-KMS or CSE-KMS ) and key information. This is a client-side setting. If workgroup settings override client-side settings, then the query uses the encryption configuration that is specified for the workgroup, and also uses the location for storing query results specified in the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration and Workgroup Settings Override Client-Side Settings .\n\nEncryptionOption (string) -- [REQUIRED]Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.\nIf a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup\'s setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.\n\nKmsKey (string) --For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.\n\n\n\n\n
:type WorkGroup: string
:param WorkGroup: The name of the workgroup in which the query is being started.
:rtype: dict
ReturnsResponse Syntax
{
'QueryExecutionId': 'string'
}
Response Structure
(dict) --
QueryExecutionId (string) --
The unique ID of the query that ran as a result of this request.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
Athena.Client.exceptions.TooManyRequestsException
:return: {
'QueryExecutionId': 'string'
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
Athena.Client.exceptions.TooManyRequestsException
"""
pass
def stop_query_execution(QueryExecutionId=None):
"""
Stops a query execution. Requires you to have access to the workgroup in which the query ran.
For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.stop_query_execution(
QueryExecutionId='string'
)
:type QueryExecutionId: string
:param QueryExecutionId: [REQUIRED]\nThe unique ID of the query execution to stop.\nThis field is autopopulated if not provided.\n
:rtype: dict
ReturnsResponse Syntax{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def tag_resource(ResourceARN=None, Tags=None):
"""
Adds one or more tags to the resource, such as a workgroup. A tag is a label that you assign to an AWS Athena resource (a workgroup). Each tag consists of a key and an optional value, both of which you define. Tags enable you to categorize resources (workgroups) in Athena, for example, by purpose, owner, or environment. Use a consistent set of tag keys to make it easier to search and filter workgroups in your account. For best practices, see AWS Tagging Strategies . The key length is from 1 (minimum) to 128 (maximum) Unicode characters in UTF-8. The tag value length is from 0 (minimum) to 256 (maximum) Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys and values are case-sensitive. Tag keys must be unique per resource. If you specify more than one, separate them by commas.
See also: AWS API Documentation
Exceptions
:example: response = client.tag_resource(
ResourceARN='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type ResourceARN: string
:param ResourceARN: [REQUIRED]\nRequests that one or more tags are added to the resource (such as a workgroup) for the specified ARN.\n
:type Tags: list
:param Tags: [REQUIRED]\nOne or more tags, separated by commas, to be added to the resource, such as a workgroup.\n\n(dict) --A tag that you can add to a resource. A tag is a label that you assign to an AWS Athena resource (a workgroup). Each tag consists of a key and an optional value, both of which you define. Tags enable you to categorize workgroups in Athena, for example, by purpose, owner, or environment. Use a consistent set of tag keys to make it easier to search and filter workgroups in your account. The maximum tag key length is 128 Unicode characters in UTF-8. The maximum tag value length is 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys and values are case-sensitive. Tag keys must be unique per resource.\n\nKey (string) --A tag key. The tag key length is from 1 to 128 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys are case-sensitive and must be unique per resource.\n\nValue (string) --A tag value. The tag value length is from 0 to 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag values are case-sensitive.\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
Athena.Client.exceptions.ResourceNotFoundException
:return: {}
:returns:
(dict) --
"""
pass
def untag_resource(ResourceARN=None, TagKeys=None):
"""
Removes one or more tags from the workgroup resource. Takes as an input a list of TagKey Strings separated by commas, and removes their tags at the same time.
See also: AWS API Documentation
Exceptions
:example: response = client.untag_resource(
ResourceARN='string',
TagKeys=[
'string',
]
)
:type ResourceARN: string
:param ResourceARN: [REQUIRED]\nRemoves one or more tags from the workgroup resource for the specified ARN.\n
:type TagKeys: list
:param TagKeys: [REQUIRED]\nRemoves the tags associated with one or more tag keys from the workgroup resource.\n\n(string) --\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
Athena.Client.exceptions.ResourceNotFoundException
:return: {}
:returns:
(dict) --
"""
pass
def update_work_group(WorkGroup=None, Description=None, ConfigurationUpdates=None, State=None):
"""
Updates the workgroup with the specified name. The workgroup\'s name cannot be changed.
See also: AWS API Documentation
Exceptions
:example: response = client.update_work_group(
WorkGroup='string',
Description='string',
ConfigurationUpdates={
'EnforceWorkGroupConfiguration': True|False,
'ResultConfigurationUpdates': {
'OutputLocation': 'string',
'RemoveOutputLocation': True|False,
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
},
'RemoveEncryptionConfiguration': True|False
},
'PublishCloudWatchMetricsEnabled': True|False,
'BytesScannedCutoffPerQuery': 123,
'RemoveBytesScannedCutoffPerQuery': True|False,
'RequesterPaysEnabled': True|False
},
State='ENABLED'|'DISABLED'
)
:type WorkGroup: string
:param WorkGroup: [REQUIRED]\nThe specified workgroup that will be updated.\n
:type Description: string
:param Description: The workgroup description.
:type ConfigurationUpdates: dict
:param ConfigurationUpdates: The workgroup configuration that will be updated for the given workgroup.\n\nEnforceWorkGroupConfiguration (boolean) --If set to 'true', the settings for the workgroup override client-side settings. If set to 'false' client-side settings are used. For more information, see Workgroup Settings Override Client-Side Settings .\n\nResultConfigurationUpdates (dict) --The result configuration information about the queries in this workgroup that will be updated. Includes the updated results location and an updated option for encrypting query results.\n\nOutputLocation (string) --The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/ . For more information, see Query Results If workgroup settings override client-side settings, then the query uses the location for the query results and the encryption configuration that are specified for the workgroup. The 'workgroup settings override' is specified in EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .\n\nRemoveOutputLocation (boolean) --If set to 'true', indicates that the previously-specified query results location (also known as a client-side setting) for queries in this workgroup should be ignored and set to null. If set to 'false' or not set, and a value is present in the OutputLocation in ResultConfigurationUpdates (the client-side setting), the OutputLocation in the workgroup\'s ResultConfiguration will be updated with the new value. For more information, see Workgroup Settings Override Client-Side Settings .\n\nEncryptionConfiguration (dict) --The encryption configuration for the query results.\n\nEncryptionOption (string) -- [REQUIRED]Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.\nIf a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup\'s setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.\n\nKmsKey (string) --For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.\n\n\n\nRemoveEncryptionConfiguration (boolean) --If set to 'true', indicates that the previously-specified encryption configuration (also known as the client-side setting) for queries in this workgroup should be ignored and set to null. If set to 'false' or not set, and a value is present in the EncryptionConfiguration in ResultConfigurationUpdates (the client-side setting), the EncryptionConfiguration in the workgroup\'s ResultConfiguration will be updated with the new value. For more information, see Workgroup Settings Override Client-Side Settings .\n\n\n\nPublishCloudWatchMetricsEnabled (boolean) --Indicates whether this workgroup enables publishing metrics to Amazon CloudWatch.\n\nBytesScannedCutoffPerQuery (integer) --The upper limit (cutoff) for the amount of bytes a single query in a workgroup is allowed to scan.\n\nRemoveBytesScannedCutoffPerQuery (boolean) --Indicates that the data usage control limit per query is removed. WorkGroupConfiguration$BytesScannedCutoffPerQuery\n\nRequesterPaysEnabled (boolean) --If set to true , allows members assigned to a workgroup to specify Amazon S3 Requester Pays buckets in queries. If set to false , workgroup members cannot query data from Requester Pays buckets, and queries that retrieve data from Requester Pays buckets cause an error. The default is false . For more information about Requester Pays buckets, see Requester Pays Buckets in the Amazon Simple Storage Service Developer Guide .\n\n\n
:type State: string
:param State: The workgroup state that will be updated for the given workgroup.
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {}
:returns:
(dict) --
"""
pass
| """
The MIT License (MIT)
Copyright (c) 2016 WavyCloud
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
def batch_get_named_query(NamedQueryIds=None):
"""
Returns the details of a single named query or a list of up to 50 queries, which you provide as an array of query ID strings. Requires you to have access to the workgroup in which the queries were saved. Use ListNamedQueriesInput to get the list of named query IDs in the specified workgroup. If information could not be retrieved for a submitted query ID, information about the query ID submitted is listed under UnprocessedNamedQueryId . Named queries differ from executed queries. Use BatchGetQueryExecutionInput to get details about each unique query execution, and ListQueryExecutionsInput to get a list of query execution IDs.
See also: AWS API Documentation
Exceptions
:example: response = client.batch_get_named_query(
NamedQueryIds=[
'string',
]
)
:type NamedQueryIds: list
:param NamedQueryIds: [REQUIRED]
An array of query IDs.
(string) --
:rtype: dict
ReturnsResponse Syntax{
'NamedQueries': [
{
'Name': 'string',
'Description': 'string',
'Database': 'string',
'QueryString': 'string',
'NamedQueryId': 'string',
'WorkGroup': 'string'
},
],
'UnprocessedNamedQueryIds': [
{
'NamedQueryId': 'string',
'ErrorCode': 'string',
'ErrorMessage': 'string'
},
]
}
Response Structure
(dict) --
NamedQueries (list) --Information about the named query IDs submitted.
(dict) --A query, where QueryString is the list of SQL query statements that comprise the query.
Name (string) --The query name.
Description (string) --The query description.
Database (string) --The database to which the query belongs.
QueryString (string) --The SQL query statements that comprise the query.
NamedQueryId (string) --The unique identifier of the query.
WorkGroup (string) --The name of the workgroup that contains the named query.
UnprocessedNamedQueryIds (list) --Information about provided query IDs.
(dict) --Information about a named query ID that could not be processed.
NamedQueryId (string) --The unique identifier of the named query.
ErrorCode (string) --The error code returned when the processing request for the named query failed, if applicable.
ErrorMessage (string) --The error message returned when the processing request for the named query failed, if applicable.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'NamedQueries': [
{
'Name': 'string',
'Description': 'string',
'Database': 'string',
'QueryString': 'string',
'NamedQueryId': 'string',
'WorkGroup': 'string'
},
],
'UnprocessedNamedQueryIds': [
{
'NamedQueryId': 'string',
'ErrorCode': 'string',
'ErrorMessage': 'string'
},
]
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def batch_get_query_execution(QueryExecutionIds=None):
"""
Returns the details of a single query execution or a list of up to 50 query executions, which you provide as an array of query execution ID strings. Requires you to have access to the workgroup in which the queries ran. To get a list of query execution IDs, use ListQueryExecutionsInput$WorkGroup . Query executions differ from named (saved) queries. Use BatchGetNamedQueryInput to get details about named queries.
See also: AWS API Documentation
Exceptions
:example: response = client.batch_get_query_execution(
QueryExecutionIds=[
'string',
]
)
:type QueryExecutionIds: list
:param QueryExecutionIds: [REQUIRED]
An array of query execution IDs.
(string) --
:rtype: dict
ReturnsResponse Syntax{
'QueryExecutions': [
{
'QueryExecutionId': 'string',
'Query': 'string',
'StatementType': 'DDL'|'DML'|'UTILITY',
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'QueryExecutionContext': {
'Database': 'string'
},
'Status': {
'State': 'QUEUED'|'RUNNING'|'SUCCEEDED'|'FAILED'|'CANCELLED',
'StateChangeReason': 'string',
'SubmissionDateTime': datetime(2015, 1, 1),
'CompletionDateTime': datetime(2015, 1, 1)
},
'Statistics': {
'EngineExecutionTimeInMillis': 123,
'DataScannedInBytes': 123,
'DataManifestLocation': 'string',
'TotalExecutionTimeInMillis': 123,
'QueryQueueTimeInMillis': 123,
'QueryPlanningTimeInMillis': 123,
'ServiceProcessingTimeInMillis': 123
},
'WorkGroup': 'string'
},
],
'UnprocessedQueryExecutionIds': [
{
'QueryExecutionId': 'string',
'ErrorCode': 'string',
'ErrorMessage': 'string'
},
]
}
Response Structure
(dict) --
QueryExecutions (list) --Information about a query execution.
(dict) --Information about a single instance of a query execution.
QueryExecutionId (string) --The unique identifier for each query execution.
Query (string) --The SQL query statements which the query execution ran.
StatementType (string) --The type of query statement that was run. DDL indicates DDL query statements. DML indicates DML (Data Manipulation Language) query statements, such as CREATE TABLE AS SELECT . UTILITY indicates query statements other than DDL and DML, such as SHOW CREATE TABLE , or DESCRIBE <table> .
ResultConfiguration (dict) --The location in Amazon S3 where query results were stored and the encryption option, if any, used for query results. These are known as "client-side settings". If workgroup settings override client-side settings, then the query uses the location for the query results and the encryption configuration that are specified for the workgroup.
OutputLocation (string) --The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/ . To run the query, you must specify the query results location using one of the ways: either for individual queries using either this setting (client-side), or in the workgroup, using WorkGroupConfiguration . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results . If workgroup settings override client-side settings, then the query uses the settings specified for the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .
EncryptionConfiguration (dict) --If query results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE-KMS or CSE-KMS ) and key information. This is a client-side setting. If workgroup settings override client-side settings, then the query uses the encryption configuration that is specified for the workgroup, and also uses the location for storing query results specified in the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration and Workgroup Settings Override Client-Side Settings .
EncryptionOption (string) --Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.
If a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup's setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.
KmsKey (string) --For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.
QueryExecutionContext (dict) --The database in which the query execution occurred.
Database (string) --The name of the database.
Status (dict) --The completion date, current state, submission time, and state change reason (if applicable) for the query execution.
State (string) --The state of query execution. QUEUED indicates that the query has been submitted to the service, and Athena will execute the query as soon as resources are available. RUNNING indicates that the query is in execution phase. SUCCEEDED indicates that the query completed without errors. FAILED indicates that the query experienced an error and did not complete processing. CANCELLED indicates that a user input interrupted query execution.
StateChangeReason (string) --Further detail about the status of the query.
SubmissionDateTime (datetime) --The date and time that the query was submitted.
CompletionDateTime (datetime) --The date and time that the query completed.
Statistics (dict) --Query execution statistics, such as the amount of data scanned, the amount of time that the query took to process, and the type of statement that was run.
EngineExecutionTimeInMillis (integer) --The number of milliseconds that the query took to execute.
DataScannedInBytes (integer) --The number of bytes in the data that was queried.
DataManifestLocation (string) --The location and file name of a data manifest file. The manifest file is saved to the Athena query results location in Amazon S3. The manifest file tracks files that the query wrote to Amazon S3. If the query fails, the manifest file also tracks files that the query intended to write. The manifest is useful for identifying orphaned files resulting from a failed query. For more information, see Working with Query Results, Output Files, and Query History in the Amazon Athena User Guide .
TotalExecutionTimeInMillis (integer) --The number of milliseconds that Athena took to run the query.
QueryQueueTimeInMillis (integer) --The number of milliseconds that the query was in your query queue waiting for resources. Note that if transient errors occur, Athena might automatically add the query back to the queue.
QueryPlanningTimeInMillis (integer) --The number of milliseconds that Athena took to plan the query processing flow. This includes the time spent retrieving table partitions from the data source. Note that because the query engine performs the query planning, query planning time is a subset of engine processing time.
ServiceProcessingTimeInMillis (integer) --The number of milliseconds that Athena took to finalize and publish the query results after the query engine finished running the query.
WorkGroup (string) --The name of the workgroup in which the query ran.
UnprocessedQueryExecutionIds (list) --Information about the query executions that failed to run.
(dict) --Describes a query execution that failed to process.
QueryExecutionId (string) --The unique identifier of the query execution.
ErrorCode (string) --The error code returned when the query execution failed to process, if applicable.
ErrorMessage (string) --The error message returned when the query execution failed to process, if applicable.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'QueryExecutions': [
{
'QueryExecutionId': 'string',
'Query': 'string',
'StatementType': 'DDL'|'DML'|'UTILITY',
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'QueryExecutionContext': {
'Database': 'string'
},
'Status': {
'State': 'QUEUED'|'RUNNING'|'SUCCEEDED'|'FAILED'|'CANCELLED',
'StateChangeReason': 'string',
'SubmissionDateTime': datetime(2015, 1, 1),
'CompletionDateTime': datetime(2015, 1, 1)
},
'Statistics': {
'EngineExecutionTimeInMillis': 123,
'DataScannedInBytes': 123,
'DataManifestLocation': 'string',
'TotalExecutionTimeInMillis': 123,
'QueryQueueTimeInMillis': 123,
'QueryPlanningTimeInMillis': 123,
'ServiceProcessingTimeInMillis': 123
},
'WorkGroup': 'string'
},
],
'UnprocessedQueryExecutionIds': [
{
'QueryExecutionId': 'string',
'ErrorCode': 'string',
'ErrorMessage': 'string'
},
]
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def can_paginate(operation_name=None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is create_foo, and you'd normally invoke the
operation as client.create_foo(**kwargs), if the
create_foo operation can be paginated, you can use the
call client.get_paginator('create_foo').
"""
pass
def create_named_query(Name=None, Description=None, Database=None, QueryString=None, ClientRequestToken=None, WorkGroup=None):
"""
Creates a named query in the specified workgroup. Requires that you have access to the workgroup.
For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.create_named_query(
Name='string',
Description='string',
Database='string',
QueryString='string',
ClientRequestToken='string',
WorkGroup='string'
)
:type Name: string
:param Name: [REQUIRED]
The query name.
:type Description: string
:param Description: The query description.
:type Database: string
:param Database: [REQUIRED]
The database to which the query belongs.
:type QueryString: string
:param QueryString: [REQUIRED]
The contents of the query with all query statements.
:type ClientRequestToken: string
:param ClientRequestToken: A unique case-sensitive string used to ensure the request to create the query is idempotent (executes only once). If another CreateNamedQuery request is received, the same response is returned and another query is not created. If a parameter has changed, for example, the QueryString , an error is returned.
Warning
This token is listed as not required because AWS SDKs (for example the AWS SDK for Java) auto-generate the token for users. If you are not using the AWS SDK or the AWS CLI, you must provide this token or the action will fail.
This field is autopopulated if not provided.
:type WorkGroup: string
:param WorkGroup: The name of the workgroup in which the named query is being created.
:rtype: dict
ReturnsResponse Syntax
{
'NamedQueryId': 'string'
}
Response Structure
(dict) --
NamedQueryId (string) --
The unique ID of the query.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'NamedQueryId': 'string'
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def create_work_group(Name=None, Configuration=None, Description=None, Tags=None):
"""
Creates a workgroup with the specified name.
See also: AWS API Documentation
Exceptions
:example: response = client.create_work_group(
Name='string',
Configuration={
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'EnforceWorkGroupConfiguration': True|False,
'PublishCloudWatchMetricsEnabled': True|False,
'BytesScannedCutoffPerQuery': 123,
'RequesterPaysEnabled': True|False
},
Description='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type Name: string
:param Name: [REQUIRED]
The workgroup name.
:type Configuration: dict
:param Configuration: The configuration for the workgroup, which includes the location in Amazon S3 where query results are stored, the encryption configuration, if any, used for encrypting query results, whether the Amazon CloudWatch Metrics are enabled for the workgroup, the limit for the amount of bytes scanned (cutoff) per query, if it is specified, and whether workgroup's settings (specified with EnforceWorkGroupConfiguration) in the WorkGroupConfiguration override client-side settings. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .
ResultConfiguration (dict) --The configuration for the workgroup, which includes the location in Amazon S3 where query results are stored and the encryption option, if any, used for query results. To run the query, you must specify the query results location using one of the ways: either in the workgroup using this setting, or for individual queries (client-side), using ResultConfiguration$OutputLocation . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results .
OutputLocation (string) --The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/ . To run the query, you must specify the query results location using one of the ways: either for individual queries using either this setting (client-side), or in the workgroup, using WorkGroupConfiguration . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results . If workgroup settings override client-side settings, then the query uses the settings specified for the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .
EncryptionConfiguration (dict) --If query results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE-KMS or CSE-KMS ) and key information. This is a client-side setting. If workgroup settings override client-side settings, then the query uses the encryption configuration that is specified for the workgroup, and also uses the location for storing query results specified in the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration and Workgroup Settings Override Client-Side Settings .
EncryptionOption (string) -- [REQUIRED]Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.
If a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup's setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.
KmsKey (string) --For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.
EnforceWorkGroupConfiguration (boolean) --If set to 'true', the settings for the workgroup override client-side settings. If set to 'false', client-side settings are used. For more information, see Workgroup Settings Override Client-Side Settings .
PublishCloudWatchMetricsEnabled (boolean) --Indicates that the Amazon CloudWatch metrics are enabled for the workgroup.
BytesScannedCutoffPerQuery (integer) --The upper data usage limit (cutoff) for the amount of bytes a single query in a workgroup is allowed to scan.
RequesterPaysEnabled (boolean) --If set to true , allows members assigned to a workgroup to reference Amazon S3 Requester Pays buckets in queries. If set to false , workgroup members cannot query data from Requester Pays buckets, and queries that retrieve data from Requester Pays buckets cause an error. The default is false . For more information about Requester Pays buckets, see Requester Pays Buckets in the Amazon Simple Storage Service Developer Guide .
:type Description: string
:param Description: The workgroup description.
:type Tags: list
:param Tags: One or more tags, separated by commas, that you want to attach to the workgroup as you create it.
(dict) --A tag that you can add to a resource. A tag is a label that you assign to an AWS Athena resource (a workgroup). Each tag consists of a key and an optional value, both of which you define. Tags enable you to categorize workgroups in Athena, for example, by purpose, owner, or environment. Use a consistent set of tag keys to make it easier to search and filter workgroups in your account. The maximum tag key length is 128 Unicode characters in UTF-8. The maximum tag value length is 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys and values are case-sensitive. Tag keys must be unique per resource.
Key (string) --A tag key. The tag key length is from 1 to 128 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys are case-sensitive and must be unique per resource.
Value (string) --A tag value. The tag value length is from 0 to 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag values are case-sensitive.
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {}
:returns:
(dict) --
"""
pass
def delete_named_query(NamedQueryId=None):
"""
Deletes the named query if you have access to the workgroup in which the query was saved.
For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.delete_named_query(
NamedQueryId='string'
)
:type NamedQueryId: string
:param NamedQueryId: [REQUIRED]
The unique ID of the query to delete.
This field is autopopulated if not provided.
:rtype: dict
ReturnsResponse Syntax{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def delete_work_group(WorkGroup=None, RecursiveDeleteOption=None):
"""
Deletes the workgroup with the specified name. The primary workgroup cannot be deleted.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_work_group(
WorkGroup='string',
RecursiveDeleteOption=True|False
)
:type WorkGroup: string
:param WorkGroup: [REQUIRED]
The unique name of the workgroup to delete.
:type RecursiveDeleteOption: boolean
:param RecursiveDeleteOption: The option to delete the workgroup and its contents even if the workgroup contains any named queries.
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {}
:returns:
(dict) --
"""
pass
def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to
ClientMethod.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid
for. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By
default, the http method is whatever is used in the method's model.
"""
pass
def get_named_query(NamedQueryId=None):
"""
Returns information about a single query. Requires that you have access to the workgroup in which the query was saved.
See also: AWS API Documentation
Exceptions
:example: response = client.get_named_query(
NamedQueryId='string'
)
:type NamedQueryId: string
:param NamedQueryId: [REQUIRED]
The unique ID of the query. Use ListNamedQueries to get query IDs.
:rtype: dict
ReturnsResponse Syntax{
'NamedQuery': {
'Name': 'string',
'Description': 'string',
'Database': 'string',
'QueryString': 'string',
'NamedQueryId': 'string',
'WorkGroup': 'string'
}
}
Response Structure
(dict) --
NamedQuery (dict) --Information about the query.
Name (string) --The query name.
Description (string) --The query description.
Database (string) --The database to which the query belongs.
QueryString (string) --The SQL query statements that comprise the query.
NamedQueryId (string) --The unique identifier of the query.
WorkGroup (string) --The name of the workgroup that contains the named query.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'NamedQuery': {
'Name': 'string',
'Description': 'string',
'Database': 'string',
'QueryString': 'string',
'NamedQueryId': 'string',
'WorkGroup': 'string'
}
}
"""
pass
def get_paginator(operation_name=None):
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is create_foo, and you'd normally invoke the
operation as client.create_foo(**kwargs), if the
create_foo operation can be paginated, you can use the
call client.get_paginator('create_foo').
:rtype: L{botocore.paginate.Paginator}
ReturnsA paginator object.
"""
pass
def get_query_execution(QueryExecutionId=None):
"""
Returns information about a single execution of a query if you have access to the workgroup in which the query ran. Each time a query executes, information about the query execution is saved with a unique ID.
See also: AWS API Documentation
Exceptions
:example: response = client.get_query_execution(
QueryExecutionId='string'
)
:type QueryExecutionId: string
:param QueryExecutionId: [REQUIRED]
The unique ID of the query execution.
:rtype: dict
ReturnsResponse Syntax{
'QueryExecution': {
'QueryExecutionId': 'string',
'Query': 'string',
'StatementType': 'DDL'|'DML'|'UTILITY',
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'QueryExecutionContext': {
'Database': 'string'
},
'Status': {
'State': 'QUEUED'|'RUNNING'|'SUCCEEDED'|'FAILED'|'CANCELLED',
'StateChangeReason': 'string',
'SubmissionDateTime': datetime(2015, 1, 1),
'CompletionDateTime': datetime(2015, 1, 1)
},
'Statistics': {
'EngineExecutionTimeInMillis': 123,
'DataScannedInBytes': 123,
'DataManifestLocation': 'string',
'TotalExecutionTimeInMillis': 123,
'QueryQueueTimeInMillis': 123,
'QueryPlanningTimeInMillis': 123,
'ServiceProcessingTimeInMillis': 123
},
'WorkGroup': 'string'
}
}
Response Structure
(dict) --
QueryExecution (dict) --Information about the query execution.
QueryExecutionId (string) --The unique identifier for each query execution.
Query (string) --The SQL query statements which the query execution ran.
StatementType (string) --The type of query statement that was run. DDL indicates DDL query statements. DML indicates DML (Data Manipulation Language) query statements, such as CREATE TABLE AS SELECT . UTILITY indicates query statements other than DDL and DML, such as SHOW CREATE TABLE , or DESCRIBE <table> .
ResultConfiguration (dict) --The location in Amazon S3 where query results were stored and the encryption option, if any, used for query results. These are known as "client-side settings". If workgroup settings override client-side settings, then the query uses the location for the query results and the encryption configuration that are specified for the workgroup.
OutputLocation (string) --The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/ . To run the query, you must specify the query results location using one of the ways: either for individual queries using either this setting (client-side), or in the workgroup, using WorkGroupConfiguration . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results . If workgroup settings override client-side settings, then the query uses the settings specified for the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .
EncryptionConfiguration (dict) --If query results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE-KMS or CSE-KMS ) and key information. This is a client-side setting. If workgroup settings override client-side settings, then the query uses the encryption configuration that is specified for the workgroup, and also uses the location for storing query results specified in the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration and Workgroup Settings Override Client-Side Settings .
EncryptionOption (string) --Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.
If a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup's setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.
KmsKey (string) --For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.
QueryExecutionContext (dict) --The database in which the query execution occurred.
Database (string) --The name of the database.
Status (dict) --The completion date, current state, submission time, and state change reason (if applicable) for the query execution.
State (string) --The state of query execution. QUEUED indicates that the query has been submitted to the service, and Athena will execute the query as soon as resources are available. RUNNING indicates that the query is in execution phase. SUCCEEDED indicates that the query completed without errors. FAILED indicates that the query experienced an error and did not complete processing. CANCELLED indicates that a user input interrupted query execution.
StateChangeReason (string) --Further detail about the status of the query.
SubmissionDateTime (datetime) --The date and time that the query was submitted.
CompletionDateTime (datetime) --The date and time that the query completed.
Statistics (dict) --Query execution statistics, such as the amount of data scanned, the amount of time that the query took to process, and the type of statement that was run.
EngineExecutionTimeInMillis (integer) --The number of milliseconds that the query took to execute.
DataScannedInBytes (integer) --The number of bytes in the data that was queried.
DataManifestLocation (string) --The location and file name of a data manifest file. The manifest file is saved to the Athena query results location in Amazon S3. The manifest file tracks files that the query wrote to Amazon S3. If the query fails, the manifest file also tracks files that the query intended to write. The manifest is useful for identifying orphaned files resulting from a failed query. For more information, see Working with Query Results, Output Files, and Query History in the Amazon Athena User Guide .
TotalExecutionTimeInMillis (integer) --The number of milliseconds that Athena took to run the query.
QueryQueueTimeInMillis (integer) --The number of milliseconds that the query was in your query queue waiting for resources. Note that if transient errors occur, Athena might automatically add the query back to the queue.
QueryPlanningTimeInMillis (integer) --The number of milliseconds that Athena took to plan the query processing flow. This includes the time spent retrieving table partitions from the data source. Note that because the query engine performs the query planning, query planning time is a subset of engine processing time.
ServiceProcessingTimeInMillis (integer) --The number of milliseconds that Athena took to finalize and publish the query results after the query engine finished running the query.
WorkGroup (string) --The name of the workgroup in which the query ran.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'QueryExecution': {
'QueryExecutionId': 'string',
'Query': 'string',
'StatementType': 'DDL'|'DML'|'UTILITY',
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'QueryExecutionContext': {
'Database': 'string'
},
'Status': {
'State': 'QUEUED'|'RUNNING'|'SUCCEEDED'|'FAILED'|'CANCELLED',
'StateChangeReason': 'string',
'SubmissionDateTime': datetime(2015, 1, 1),
'CompletionDateTime': datetime(2015, 1, 1)
},
'Statistics': {
'EngineExecutionTimeInMillis': 123,
'DataScannedInBytes': 123,
'DataManifestLocation': 'string',
'TotalExecutionTimeInMillis': 123,
'QueryQueueTimeInMillis': 123,
'QueryPlanningTimeInMillis': 123,
'ServiceProcessingTimeInMillis': 123
},
'WorkGroup': 'string'
}
}
"""
pass
def get_query_results(QueryExecutionId=None, NextToken=None, MaxResults=None):
"""
Streams the results of a single query execution specified by QueryExecutionId from the Athena query results location in Amazon S3. For more information, see Query Results in the Amazon Athena User Guide . This request does not execute the query but returns results. Use StartQueryExecution to run a query.
To stream query results successfully, the IAM principal with permission to call GetQueryResults also must have permissions to the Amazon S3 GetObject action for the Athena query results location.
See also: AWS API Documentation
Exceptions
:example: response = client.get_query_results(
QueryExecutionId='string',
NextToken='string',
MaxResults=123
)
:type QueryExecutionId: string
:param QueryExecutionId: [REQUIRED]
The unique ID of the query execution.
:type NextToken: string
:param NextToken: The token that specifies where to start pagination if a previous request was truncated.
:type MaxResults: integer
:param MaxResults: The maximum number of results (rows) to return in this request.
:rtype: dict
ReturnsResponse Syntax
{
'UpdateCount': 123,
'ResultSet': {
'Rows': [
{
'Data': [
{
'VarCharValue': 'string'
},
]
},
],
'ResultSetMetadata': {
'ColumnInfo': [
{
'CatalogName': 'string',
'SchemaName': 'string',
'TableName': 'string',
'Name': 'string',
'Label': 'string',
'Type': 'string',
'Precision': 123,
'Scale': 123,
'Nullable': 'NOT_NULL'|'NULLABLE'|'UNKNOWN',
'CaseSensitive': True|False
},
]
}
},
'NextToken': 'string'
}
Response Structure
(dict) --
UpdateCount (integer) --
The number of rows inserted with a CREATE TABLE AS SELECT statement.
ResultSet (dict) --
The results of the query execution.
Rows (list) --
The rows in the table.
(dict) --
The rows that comprise a query result table.
Data (list) --
The data that populates a row in a query result table.
(dict) --
A piece of data (a field in the table).
VarCharValue (string) --
The value of the datum.
ResultSetMetadata (dict) --
The metadata that describes the column structure and data types of a table of query results.
ColumnInfo (list) --
Information about the columns returned in a query result metadata.
(dict) --
Information about the columns in a query execution result.
CatalogName (string) --
The catalog to which the query results belong.
SchemaName (string) --
The schema name (database name) to which the query results belong.
TableName (string) --
The table name for the query results.
Name (string) --
The name of the column.
Label (string) --
A column label.
Type (string) --
The data type of the column.
Precision (integer) --
For DECIMAL data types, specifies the total number of digits, up to 38. For performance reasons, we recommend up to 18 digits.
Scale (integer) --
For DECIMAL data types, specifies the total number of digits in the fractional part of the value. Defaults to 0.
Nullable (string) --
Indicates the column's nullable status.
CaseSensitive (boolean) --
Indicates whether values in the column are case-sensitive.
NextToken (string) --
A token to be used by the next request if this request is truncated.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'UpdateCount': 123,
'ResultSet': {
'Rows': [
{
'Data': [
{
'VarCharValue': 'string'
},
]
},
],
'ResultSetMetadata': {
'ColumnInfo': [
{
'CatalogName': 'string',
'SchemaName': 'string',
'TableName': 'string',
'Name': 'string',
'Label': 'string',
'Type': 'string',
'Precision': 123,
'Scale': 123,
'Nullable': 'NOT_NULL'|'NULLABLE'|'UNKNOWN',
'CaseSensitive': True|False
},
]
}
},
'NextToken': 'string'
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def get_waiter(waiter_name=None):
"""
Returns an object that can wait for some condition.
:type waiter_name: str
:param waiter_name: The name of the waiter to get. See the waiters
section of the service docs for a list of available waiters.
:rtype: botocore.waiter.Waiter
"""
pass
def get_work_group(WorkGroup=None):
"""
Returns information about the workgroup with the specified name.
See also: AWS API Documentation
Exceptions
:example: response = client.get_work_group(
WorkGroup='string'
)
:type WorkGroup: string
:param WorkGroup: [REQUIRED]
The name of the workgroup.
:rtype: dict
ReturnsResponse Syntax{
'WorkGroup': {
'Name': 'string',
'State': 'ENABLED'|'DISABLED',
'Configuration': {
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'EnforceWorkGroupConfiguration': True|False,
'PublishCloudWatchMetricsEnabled': True|False,
'BytesScannedCutoffPerQuery': 123,
'RequesterPaysEnabled': True|False
},
'Description': 'string',
'CreationTime': datetime(2015, 1, 1)
}
}
Response Structure
(dict) --
WorkGroup (dict) --Information about the workgroup.
Name (string) --The workgroup name.
State (string) --The state of the workgroup: ENABLED or DISABLED.
Configuration (dict) --The configuration of the workgroup, which includes the location in Amazon S3 where query results are stored, the encryption configuration, if any, used for query results; whether the Amazon CloudWatch Metrics are enabled for the workgroup; whether workgroup settings override client-side settings; and the data usage limits for the amount of data scanned per query or per workgroup. The workgroup settings override is specified in EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .
ResultConfiguration (dict) --The configuration for the workgroup, which includes the location in Amazon S3 where query results are stored and the encryption option, if any, used for query results. To run the query, you must specify the query results location using one of the ways: either in the workgroup using this setting, or for individual queries (client-side), using ResultConfiguration$OutputLocation . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results .
OutputLocation (string) --The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/ . To run the query, you must specify the query results location using one of the ways: either for individual queries using either this setting (client-side), or in the workgroup, using WorkGroupConfiguration . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results . If workgroup settings override client-side settings, then the query uses the settings specified for the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .
EncryptionConfiguration (dict) --If query results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE-KMS or CSE-KMS ) and key information. This is a client-side setting. If workgroup settings override client-side settings, then the query uses the encryption configuration that is specified for the workgroup, and also uses the location for storing query results specified in the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration and Workgroup Settings Override Client-Side Settings .
EncryptionOption (string) --Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.
If a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup's setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.
KmsKey (string) --For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.
EnforceWorkGroupConfiguration (boolean) --If set to "true", the settings for the workgroup override client-side settings. If set to "false", client-side settings are used. For more information, see Workgroup Settings Override Client-Side Settings .
PublishCloudWatchMetricsEnabled (boolean) --Indicates that the Amazon CloudWatch metrics are enabled for the workgroup.
BytesScannedCutoffPerQuery (integer) --The upper data usage limit (cutoff) for the amount of bytes a single query in a workgroup is allowed to scan.
RequesterPaysEnabled (boolean) --If set to true , allows members assigned to a workgroup to reference Amazon S3 Requester Pays buckets in queries. If set to false , workgroup members cannot query data from Requester Pays buckets, and queries that retrieve data from Requester Pays buckets cause an error. The default is false . For more information about Requester Pays buckets, see Requester Pays Buckets in the Amazon Simple Storage Service Developer Guide .
Description (string) --The workgroup description.
CreationTime (datetime) --The date and time the workgroup was created.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'WorkGroup': {
'Name': 'string',
'State': 'ENABLED'|'DISABLED',
'Configuration': {
'ResultConfiguration': {
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
'EnforceWorkGroupConfiguration': True|False,
'PublishCloudWatchMetricsEnabled': True|False,
'BytesScannedCutoffPerQuery': 123,
'RequesterPaysEnabled': True|False
},
'Description': 'string',
'CreationTime': datetime(2015, 1, 1)
}
}
"""
pass
def list_named_queries(NextToken=None, MaxResults=None, WorkGroup=None):
"""
Provides a list of available query IDs only for queries saved in the specified workgroup. Requires that you have access to the workgroup. If a workgroup is not specified, lists the saved queries for the primary workgroup.
For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.list_named_queries(
NextToken='string',
MaxResults=123,
WorkGroup='string'
)
:type NextToken: string
:param NextToken: The token that specifies where to start pagination if a previous request was truncated.
:type MaxResults: integer
:param MaxResults: The maximum number of queries to return in this request.
:type WorkGroup: string
:param WorkGroup: The name of the workgroup from which the named queries are returned. If a workgroup is not specified, the saved queries for the primary workgroup are returned.
:rtype: dict
ReturnsResponse Syntax
{
'NamedQueryIds': [
'string',
],
'NextToken': 'string'
}
Response Structure
(dict) --
NamedQueryIds (list) --
The list of unique query IDs.
(string) --
NextToken (string) --
A token to be used by the next request if this request is truncated.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'NamedQueryIds': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_query_executions(NextToken=None, MaxResults=None, WorkGroup=None):
"""
Provides a list of available query execution IDs for the queries in the specified workgroup. If a workgroup is not specified, returns a list of query execution IDs for the primary workgroup. Requires you to have access to the workgroup in which the queries ran.
For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.list_query_executions(
NextToken='string',
MaxResults=123,
WorkGroup='string'
)
:type NextToken: string
:param NextToken: The token that specifies where to start pagination if a previous request was truncated.
:type MaxResults: integer
:param MaxResults: The maximum number of query executions to return in this request.
:type WorkGroup: string
:param WorkGroup: The name of the workgroup from which queries are returned. If a workgroup is not specified, a list of available query execution IDs for the queries in the primary workgroup is returned.
:rtype: dict
ReturnsResponse Syntax
{
'QueryExecutionIds': [
'string',
],
'NextToken': 'string'
}
Response Structure
(dict) --
QueryExecutionIds (list) --
The unique IDs of each query execution as an array of strings.
(string) --
NextToken (string) --
A token to be used by the next request if this request is truncated.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'QueryExecutionIds': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_tags_for_resource(ResourceARN=None, NextToken=None, MaxResults=None):
"""
Lists the tags associated with this workgroup.
See also: AWS API Documentation
Exceptions
:example: response = client.list_tags_for_resource(
ResourceARN='string',
NextToken='string',
MaxResults=123
)
:type ResourceARN: string
:param ResourceARN: [REQUIRED]
Lists the tags for the workgroup resource with the specified ARN.
:type NextToken: string
:param NextToken: The token for the next set of results, or null if there are no additional results for this request, where the request lists the tags for the workgroup resource with the specified ARN.
:type MaxResults: integer
:param MaxResults: The maximum number of results to be returned per request that lists the tags for the workgroup resource.
:rtype: dict
ReturnsResponse Syntax
{
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
Tags (list) --
The list of tags associated with this workgroup.
(dict) --
A tag that you can add to a resource. A tag is a label that you assign to an AWS Athena resource (a workgroup). Each tag consists of a key and an optional value, both of which you define. Tags enable you to categorize workgroups in Athena, for example, by purpose, owner, or environment. Use a consistent set of tag keys to make it easier to search and filter workgroups in your account. The maximum tag key length is 128 Unicode characters in UTF-8. The maximum tag value length is 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys and values are case-sensitive. Tag keys must be unique per resource.
Key (string) --
A tag key. The tag key length is from 1 to 128 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys are case-sensitive and must be unique per resource.
Value (string) --
A tag value. The tag value length is from 0 to 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag values are case-sensitive.
NextToken (string) --
A token to be used by the next request if this request is truncated.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
Athena.Client.exceptions.ResourceNotFoundException
:return: {
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'NextToken': 'string'
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
Athena.Client.exceptions.ResourceNotFoundException
"""
pass
def list_work_groups(NextToken=None, MaxResults=None):
"""
Lists available workgroups for the account.
See also: AWS API Documentation
Exceptions
:example: response = client.list_work_groups(
NextToken='string',
MaxResults=123
)
:type NextToken: string
:param NextToken: A token to be used by the next request if this request is truncated.
:type MaxResults: integer
:param MaxResults: The maximum number of workgroups to return in this request.
:rtype: dict
ReturnsResponse Syntax
{
'WorkGroups': [
{
'Name': 'string',
'State': 'ENABLED'|'DISABLED',
'Description': 'string',
'CreationTime': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
Response Structure
(dict) --
WorkGroups (list) --
The list of workgroups, including their names, descriptions, creation times, and states.
(dict) --
The summary information for the workgroup, which includes its name, state, description, and the date and time it was created.
Name (string) --
The name of the workgroup.
State (string) --
The state of the workgroup.
Description (string) --
The workgroup description.
CreationTime (datetime) --
The workgroup creation date and time.
NextToken (string) --
A token to be used by the next request if this request is truncated.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {
'WorkGroups': [
{
'Name': 'string',
'State': 'ENABLED'|'DISABLED',
'Description': 'string',
'CreationTime': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def start_query_execution(QueryString=None, ClientRequestToken=None, QueryExecutionContext=None, ResultConfiguration=None, WorkGroup=None):
"""
Runs the SQL query statements contained in the Query . Requires you to have access to the workgroup in which the query ran.
For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.start_query_execution(
QueryString='string',
ClientRequestToken='string',
QueryExecutionContext={
'Database': 'string'
},
ResultConfiguration={
'OutputLocation': 'string',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
}
},
WorkGroup='string'
)
:type QueryString: string
:param QueryString: [REQUIRED]
The SQL query statements to be executed.
:type ClientRequestToken: string
:param ClientRequestToken: A unique case-sensitive string used to ensure the request to create the query is idempotent (executes only once). If another StartQueryExecution request is received, the same response is returned and another query is not created. If a parameter has changed, for example, the QueryString , an error is returned.
Warning
This token is listed as not required because AWS SDKs (for example the AWS SDK for Java) auto-generate the token for users. If you are not using the AWS SDK or the AWS CLI, you must provide this token or the action will fail.
This field is autopopulated if not provided.
:type QueryExecutionContext: dict
:param QueryExecutionContext: The database within which the query executes.
Database (string) --The name of the database.
:type ResultConfiguration: dict
:param ResultConfiguration: Specifies information about where and how to save the results of the query execution. If the query runs in a workgroup, then workgroup's settings may override query settings. This affects the query results location. The workgroup settings override is specified in EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .
OutputLocation (string) --The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/ . To run the query, you must specify the query results location using one of the ways: either for individual queries using either this setting (client-side), or in the workgroup, using WorkGroupConfiguration . If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results . If workgroup settings override client-side settings, then the query uses the settings specified for the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .
EncryptionConfiguration (dict) --If query results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE-KMS or CSE-KMS ) and key information. This is a client-side setting. If workgroup settings override client-side settings, then the query uses the encryption configuration that is specified for the workgroup, and also uses the location for storing query results specified in the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration and Workgroup Settings Override Client-Side Settings .
EncryptionOption (string) -- [REQUIRED]Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.
If a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup's setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.
KmsKey (string) --For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.
:type WorkGroup: string
:param WorkGroup: The name of the workgroup in which the query is being started.
:rtype: dict
ReturnsResponse Syntax
{
'QueryExecutionId': 'string'
}
Response Structure
(dict) --
QueryExecutionId (string) --
The unique ID of the query that ran as a result of this request.
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
Athena.Client.exceptions.TooManyRequestsException
:return: {
'QueryExecutionId': 'string'
}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
Athena.Client.exceptions.TooManyRequestsException
"""
pass
def stop_query_execution(QueryExecutionId=None):
"""
Stops a query execution. Requires you to have access to the workgroup in which the query ran.
For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.stop_query_execution(
QueryExecutionId='string'
)
:type QueryExecutionId: string
:param QueryExecutionId: [REQUIRED]
The unique ID of the query execution to stop.
This field is autopopulated if not provided.
:rtype: dict
ReturnsResponse Syntax{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {}
:returns:
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
"""
pass
def tag_resource(ResourceARN=None, Tags=None):
"""
Adds one or more tags to the resource, such as a workgroup. A tag is a label that you assign to an AWS Athena resource (a workgroup). Each tag consists of a key and an optional value, both of which you define. Tags enable you to categorize resources (workgroups) in Athena, for example, by purpose, owner, or environment. Use a consistent set of tag keys to make it easier to search and filter workgroups in your account. For best practices, see AWS Tagging Strategies . The key length is from 1 (minimum) to 128 (maximum) Unicode characters in UTF-8. The tag value length is from 0 (minimum) to 256 (maximum) Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys and values are case-sensitive. Tag keys must be unique per resource. If you specify more than one, separate them by commas.
See also: AWS API Documentation
Exceptions
:example: response = client.tag_resource(
ResourceARN='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type ResourceARN: string
:param ResourceARN: [REQUIRED]
Requests that one or more tags are added to the resource (such as a workgroup) for the specified ARN.
:type Tags: list
:param Tags: [REQUIRED]
One or more tags, separated by commas, to be added to the resource, such as a workgroup.
(dict) --A tag that you can add to a resource. A tag is a label that you assign to an AWS Athena resource (a workgroup). Each tag consists of a key and an optional value, both of which you define. Tags enable you to categorize workgroups in Athena, for example, by purpose, owner, or environment. Use a consistent set of tag keys to make it easier to search and filter workgroups in your account. The maximum tag key length is 128 Unicode characters in UTF-8. The maximum tag value length is 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys and values are case-sensitive. Tag keys must be unique per resource.
Key (string) --A tag key. The tag key length is from 1 to 128 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys are case-sensitive and must be unique per resource.
Value (string) --A tag value. The tag value length is from 0 to 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag values are case-sensitive.
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
Athena.Client.exceptions.ResourceNotFoundException
:return: {}
:returns:
(dict) --
"""
pass
def untag_resource(ResourceARN=None, TagKeys=None):
"""
Removes one or more tags from the workgroup resource. Takes as an input a list of TagKey Strings separated by commas, and removes their tags at the same time.
See also: AWS API Documentation
Exceptions
:example: response = client.untag_resource(
ResourceARN='string',
TagKeys=[
'string',
]
)
:type ResourceARN: string
:param ResourceARN: [REQUIRED]
Removes one or more tags from the workgroup resource for the specified ARN.
:type TagKeys: list
:param TagKeys: [REQUIRED]
Removes the tags associated with one or more tag keys from the workgroup resource.
(string) --
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
Athena.Client.exceptions.ResourceNotFoundException
:return: {}
:returns:
(dict) --
"""
pass
def update_work_group(WorkGroup=None, Description=None, ConfigurationUpdates=None, State=None):
"""
Updates the workgroup with the specified name. The workgroup's name cannot be changed.
See also: AWS API Documentation
Exceptions
:example: response = client.update_work_group(
WorkGroup='string',
Description='string',
ConfigurationUpdates={
'EnforceWorkGroupConfiguration': True|False,
'ResultConfigurationUpdates': {
'OutputLocation': 'string',
'RemoveOutputLocation': True|False,
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
'KmsKey': 'string'
},
'RemoveEncryptionConfiguration': True|False
},
'PublishCloudWatchMetricsEnabled': True|False,
'BytesScannedCutoffPerQuery': 123,
'RemoveBytesScannedCutoffPerQuery': True|False,
'RequesterPaysEnabled': True|False
},
State='ENABLED'|'DISABLED'
)
:type WorkGroup: string
:param WorkGroup: [REQUIRED]
The specified workgroup that will be updated.
:type Description: string
:param Description: The workgroup description.
:type ConfigurationUpdates: dict
:param ConfigurationUpdates: The workgroup configuration that will be updated for the given workgroup.
EnforceWorkGroupConfiguration (boolean) --If set to 'true', the settings for the workgroup override client-side settings. If set to 'false' client-side settings are used. For more information, see Workgroup Settings Override Client-Side Settings .
ResultConfigurationUpdates (dict) --The result configuration information about the queries in this workgroup that will be updated. Includes the updated results location and an updated option for encrypting query results.
OutputLocation (string) --The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/ . For more information, see Query Results If workgroup settings override client-side settings, then the query uses the location for the query results and the encryption configuration that are specified for the workgroup. The 'workgroup settings override' is specified in EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See WorkGroupConfiguration$EnforceWorkGroupConfiguration .
RemoveOutputLocation (boolean) --If set to 'true', indicates that the previously-specified query results location (also known as a client-side setting) for queries in this workgroup should be ignored and set to null. If set to 'false' or not set, and a value is present in the OutputLocation in ResultConfigurationUpdates (the client-side setting), the OutputLocation in the workgroup's ResultConfiguration will be updated with the new value. For more information, see Workgroup Settings Override Client-Side Settings .
EncryptionConfiguration (dict) --The encryption configuration for the query results.
EncryptionOption (string) -- [REQUIRED]Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.
If a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup's setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup.
KmsKey (string) --For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.
RemoveEncryptionConfiguration (boolean) --If set to 'true', indicates that the previously-specified encryption configuration (also known as the client-side setting) for queries in this workgroup should be ignored and set to null. If set to 'false' or not set, and a value is present in the EncryptionConfiguration in ResultConfigurationUpdates (the client-side setting), the EncryptionConfiguration in the workgroup's ResultConfiguration will be updated with the new value. For more information, see Workgroup Settings Override Client-Side Settings .
PublishCloudWatchMetricsEnabled (boolean) --Indicates whether this workgroup enables publishing metrics to Amazon CloudWatch.
BytesScannedCutoffPerQuery (integer) --The upper limit (cutoff) for the amount of bytes a single query in a workgroup is allowed to scan.
RemoveBytesScannedCutoffPerQuery (boolean) --Indicates that the data usage control limit per query is removed. WorkGroupConfiguration$BytesScannedCutoffPerQuery
RequesterPaysEnabled (boolean) --If set to true , allows members assigned to a workgroup to specify Amazon S3 Requester Pays buckets in queries. If set to false , workgroup members cannot query data from Requester Pays buckets, and queries that retrieve data from Requester Pays buckets cause an error. The default is false . For more information about Requester Pays buckets, see Requester Pays Buckets in the Amazon Simple Storage Service Developer Guide .
:type State: string
:param State: The workgroup state that will be updated for the given workgroup.
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
Athena.Client.exceptions.InternalServerException
Athena.Client.exceptions.InvalidRequestException
:return: {}
:returns:
(dict) --
"""
pass |
# -*- coding: utf-8 -*-
class Node:
def __init__(self, val, isLeaf, topLeft, topRight, bottomLeft, bottomRight):
self.val = val
self.isLeaf = isLeaf
self.topLeft = topLeft
self.topRight = topRight
self.bottomLeft = bottomLeft
self.bottomRight = bottomRight
def __eq__(self, other):
return (
other is not None and
self.val == other.val and
self.isLeaf == other.isLeaf and
self.topLeft == other.topLeft and
self.topRight == other.topRight and
self.bottomLeft == other.bottomLeft and
self.bottomRight == other.bottomRight
)
class Solution:
def intersect(self, quadTree1, quadTree2):
if quadTree1.isLeaf:
return quadTree1 if quadTree1.val else quadTree2
elif quadTree2.isLeaf:
return quadTree2 if quadTree2.val else quadTree1
topLeft = self.intersect(quadTree1.topLeft, quadTree2.topLeft)
topRight = self.intersect(quadTree1.topRight, quadTree2.topRight)
bottomLeft = self.intersect(quadTree1.bottomLeft, quadTree2.bottomLeft)
bottomRight = self.intersect(quadTree1.bottomRight, quadTree2.bottomRight)
if topLeft.isLeaf and topRight.isLeaf and bottomLeft.isLeaf and bottomRight.isLeaf:
if topLeft.val == topRight.val == bottomLeft.val == bottomRight.val:
return Node(topLeft.val, True, None, None, None, None)
return Node(None, False, topLeft, topRight, bottomLeft, bottomRight)
if __name__ == '__main__':
solution = Solution()
t0_4 = Node(False, True, None, None, None, None)
t0_3 = Node(False, True, None, None, None, None)
t0_2 = Node(True, True, None, None, None, None)
t0_1 = Node(True, True, None, None, None, None)
t0_0 = Node(None, False, t0_1, t0_2, t0_3, t0_4)
t1_8 = Node(True, True, None, None, None, None)
t1_7 = Node(True, True, None, None, None, None)
t1_6 = Node(False, True, None, None, None, None)
t1_5 = Node(False, True, None, None, None, None)
t1_4 = Node(False, True, None, None, None, None)
t1_3 = Node(True, True, None, None, None, None)
t1_2 = Node(None, False, t1_5, t1_6, t1_7, t1_8)
t1_1 = Node(True, True, None, None, None, None)
t1_0 = Node(None, False, t1_1, t1_2, t1_3, t1_4)
t2_4 = Node(False, True, None, None, None, None)
t2_3 = Node(True, True, None, None, None, None)
t2_2 = Node(True, True, None, None, None, None)
t2_1 = Node(True, True, None, None, None, None)
t2_0 = Node(None, False, t2_1, t2_2, t2_3, t2_4)
assert t2_0 == solution.intersect(t0_0, t1_0)
| class Node:
def __init__(self, val, isLeaf, topLeft, topRight, bottomLeft, bottomRight):
self.val = val
self.isLeaf = isLeaf
self.topLeft = topLeft
self.topRight = topRight
self.bottomLeft = bottomLeft
self.bottomRight = bottomRight
def __eq__(self, other):
return other is not None and self.val == other.val and (self.isLeaf == other.isLeaf) and (self.topLeft == other.topLeft) and (self.topRight == other.topRight) and (self.bottomLeft == other.bottomLeft) and (self.bottomRight == other.bottomRight)
class Solution:
def intersect(self, quadTree1, quadTree2):
if quadTree1.isLeaf:
return quadTree1 if quadTree1.val else quadTree2
elif quadTree2.isLeaf:
return quadTree2 if quadTree2.val else quadTree1
top_left = self.intersect(quadTree1.topLeft, quadTree2.topLeft)
top_right = self.intersect(quadTree1.topRight, quadTree2.topRight)
bottom_left = self.intersect(quadTree1.bottomLeft, quadTree2.bottomLeft)
bottom_right = self.intersect(quadTree1.bottomRight, quadTree2.bottomRight)
if topLeft.isLeaf and topRight.isLeaf and bottomLeft.isLeaf and bottomRight.isLeaf:
if topLeft.val == topRight.val == bottomLeft.val == bottomRight.val:
return node(topLeft.val, True, None, None, None, None)
return node(None, False, topLeft, topRight, bottomLeft, bottomRight)
if __name__ == '__main__':
solution = solution()
t0_4 = node(False, True, None, None, None, None)
t0_3 = node(False, True, None, None, None, None)
t0_2 = node(True, True, None, None, None, None)
t0_1 = node(True, True, None, None, None, None)
t0_0 = node(None, False, t0_1, t0_2, t0_3, t0_4)
t1_8 = node(True, True, None, None, None, None)
t1_7 = node(True, True, None, None, None, None)
t1_6 = node(False, True, None, None, None, None)
t1_5 = node(False, True, None, None, None, None)
t1_4 = node(False, True, None, None, None, None)
t1_3 = node(True, True, None, None, None, None)
t1_2 = node(None, False, t1_5, t1_6, t1_7, t1_8)
t1_1 = node(True, True, None, None, None, None)
t1_0 = node(None, False, t1_1, t1_2, t1_3, t1_4)
t2_4 = node(False, True, None, None, None, None)
t2_3 = node(True, True, None, None, None, None)
t2_2 = node(True, True, None, None, None, None)
t2_1 = node(True, True, None, None, None, None)
t2_0 = node(None, False, t2_1, t2_2, t2_3, t2_4)
assert t2_0 == solution.intersect(t0_0, t1_0) |
'''
@Author: Hata
@Date: 2020-07-30 09:27:42
@LastEditors: Hata
@LastEditTime: 2020-07-30 09:29:33
@FilePath: \LeetCode\M02-04.py
@Description: https://leetcode-cn.com/problems/partition-list-lcci/
'''
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def partition(self, head: ListNode, x: int) -> ListNode:
p, q = head, head
while q:
if q.val < x:
q.val, p.val = p.val, q.val
p = p.next
q = q.next
return head | """
@Author: Hata
@Date: 2020-07-30 09:27:42
@LastEditors: Hata
@LastEditTime: 2020-07-30 09:29:33
@FilePath: \\LeetCode\\M02-04.py
@Description: https://leetcode-cn.com/problems/partition-list-lcci/
"""
class Listnode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def partition(self, head: ListNode, x: int) -> ListNode:
(p, q) = (head, head)
while q:
if q.val < x:
(q.val, p.val) = (p.val, q.val)
p = p.next
q = q.next
return head |
print("{:_^20}".format("School average"))
FirstGrade = float(input("First Grade: "))
SecondGrade = float(input("Second Grade: "))
Media = (FirstGrade + SecondGrade) / 2
print("Media: {:.1f}".format(Media))
print("{:_^20}".format("School average 2"))
FirstGrade = float(input("First Grade: "))
SecondGrade = float(input("Second Grade: "))
print("Media: {:.1f}".format((FirstGrade + SecondGrade) / 2))
if(Media >= 6.0): {
print("Student approved")
}
else: {
print("Student failed")
} | print('{:_^20}'.format('School average'))
first_grade = float(input('First Grade: '))
second_grade = float(input('Second Grade: '))
media = (FirstGrade + SecondGrade) / 2
print('Media: {:.1f}'.format(Media))
print('{:_^20}'.format('School average 2'))
first_grade = float(input('First Grade: '))
second_grade = float(input('Second Grade: '))
print('Media: {:.1f}'.format((FirstGrade + SecondGrade) / 2))
if Media >= 6.0:
{print('Student approved')}
else:
{print('Student failed')} |
"""
A system log management tool with time-series causal analysis
"""
| """
A system log management tool with time-series causal analysis
""" |
class BaseException(Exception):
code = 0
message = ''
def __init__(self, code, message):
self.code = code
self.message = message
class ServerException(BaseException):
def __init__(self, message):
super().__init__(500, message)
class ClientException(BaseException):
def __init__(self, message):
super().__init__(400, message)
| class Baseexception(Exception):
code = 0
message = ''
def __init__(self, code, message):
self.code = code
self.message = message
class Serverexception(BaseException):
def __init__(self, message):
super().__init__(500, message)
class Clientexception(BaseException):
def __init__(self, message):
super().__init__(400, message) |
# Copyright (c) 2020 jya
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Color classes."""
# color mode
MODE_GRAYSCALE = 'grayscale'
MODE_RGB = 'rgb'
# preset grayscale colors
BLACK = 0x00
DARK_GRAY = 0x9d
GRAY = 0xc9
WHITE = 0xfe
TRANSPARENT = 0xff
# preset RGB colors
RGB_BLACK = 0x000000
RGB_DARK_GRAY = 0x9d9d9d
RGB_GRAY = 0xc9c9c9
RGB_WHITE = 0xfefefe
RGB_TRANSPARENT = 0xffffff
def get_rgb(value):
r = (value & 0xff0000) >> 16
g = (value & 0x00ff00) >> 8
b = value & 0x0000ff
return (r, g, b)
class ColorPalette:
def __init__(self, mode=MODE_GRAYSCALE, colors=(BLACK, DARK_GRAY, GRAY, WHITE)):
if mode not in [MODE_GRAYSCALE, MODE_RGB]:
raise ValueError('mode must be MODE_GRAYSCALE or MODE_RGB')
if len(colors) != 4:
raise ValueError('colors must have 4 color values (black, darkgray, gray, white)')
self.mode = mode
self.black = colors[0]
self.darkgray = colors[1]
self.gray = colors[2]
self.white = colors[3]
if mode == MODE_GRAYSCALE:
self.transparent = TRANSPARENT
else:
self.transparent = RGB_TRANSPARENT
DEFAULT_COLORPALETTE = ColorPalette(MODE_GRAYSCALE, (BLACK, DARK_GRAY, GRAY, WHITE))
DEFAULT_RGB_COLORPALETTE = ColorPalette(MODE_RGB, (RGB_BLACK, RGB_DARK_GRAY, RGB_GRAY, RGB_WHITE))
| """Color classes."""
mode_grayscale = 'grayscale'
mode_rgb = 'rgb'
black = 0
dark_gray = 157
gray = 201
white = 254
transparent = 255
rgb_black = 0
rgb_dark_gray = 10329501
rgb_gray = 13224393
rgb_white = 16711422
rgb_transparent = 16777215
def get_rgb(value):
r = (value & 16711680) >> 16
g = (value & 65280) >> 8
b = value & 255
return (r, g, b)
class Colorpalette:
def __init__(self, mode=MODE_GRAYSCALE, colors=(BLACK, DARK_GRAY, GRAY, WHITE)):
if mode not in [MODE_GRAYSCALE, MODE_RGB]:
raise value_error('mode must be MODE_GRAYSCALE or MODE_RGB')
if len(colors) != 4:
raise value_error('colors must have 4 color values (black, darkgray, gray, white)')
self.mode = mode
self.black = colors[0]
self.darkgray = colors[1]
self.gray = colors[2]
self.white = colors[3]
if mode == MODE_GRAYSCALE:
self.transparent = TRANSPARENT
else:
self.transparent = RGB_TRANSPARENT
default_colorpalette = color_palette(MODE_GRAYSCALE, (BLACK, DARK_GRAY, GRAY, WHITE))
default_rgb_colorpalette = color_palette(MODE_RGB, (RGB_BLACK, RGB_DARK_GRAY, RGB_GRAY, RGB_WHITE)) |
UNTRACKED_PATH = "repositorios"
COMPILER = "pdflatex"
SAE_COUNTER_GITHUB = "https://github.com/comissao-aerodesign/PyAeroCounter.git"
SAE_COUNTER_PATH = "PyAeroCounter"
PROJECTS_OVERLEAF = [
{
'name': "<Nome do projeto>",
'path': "<Pasta_do_projeto>",
'main': "<Arquivo tex>",
'url': "https://git.overleaf.com/<SUA_URL_OVERLEAF_GIT>"
},
]
OVERLEAF_USER = "<SEU_USUARIO>"
OVERLEAF_PASSWORD = "<SUA_SENHA>" | untracked_path = 'repositorios'
compiler = 'pdflatex'
sae_counter_github = 'https://github.com/comissao-aerodesign/PyAeroCounter.git'
sae_counter_path = 'PyAeroCounter'
projects_overleaf = [{'name': '<Nome do projeto>', 'path': '<Pasta_do_projeto>', 'main': '<Arquivo tex>', 'url': 'https://git.overleaf.com/<SUA_URL_OVERLEAF_GIT>'}]
overleaf_user = '<SEU_USUARIO>'
overleaf_password = '<SUA_SENHA>' |
def test(function, arguments_result_list, one_argument_function=False):
"""
testing function
for every (arguments,expected_result) pair in arguments_result_list
checks if function(arguments) == result
"""
for args, expected_result in arguments_result_list:
if one_argument_function:
args = [args]
call_result = function(*args)
if call_result != expected_result:
exception_message = f"function {function.__name__} returned {call_result} instaed of {expected_result} for aguments:{args}"
raise AssertionError(exception_message)
| def test(function, arguments_result_list, one_argument_function=False):
"""
testing function
for every (arguments,expected_result) pair in arguments_result_list
checks if function(arguments) == result
"""
for (args, expected_result) in arguments_result_list:
if one_argument_function:
args = [args]
call_result = function(*args)
if call_result != expected_result:
exception_message = f'function {function.__name__} returned {call_result} instaed of {expected_result} for aguments:{args}'
raise assertion_error(exception_message) |
def get_entity_bios(seq,id2label):
"""Gets entities from sequence.
note: BIOS
Args:
seq (list): sequence of labels.
Returns:
list: list of (chunk_type, chunk_start, chunk_end).
Example:
# >>> seq = ['B-PER', 'I-PER', 'O', 'S-LOC']
# >>> get_entity_bios(seq)
[['PER', 0,1], ['LOC', 3, 3]]
"""
chunks = []
chunk = [-1, -1, -1]
for indx, tag in enumerate(seq):
if not isinstance(tag, str):
tag = id2label[tag]
if tag.startswith("S-"):
if chunk[2] != -1:
chunks.append(chunk)
chunk = [-1, -1, -1]
chunk[1] = indx
chunk[2] = indx
chunk[0] = tag.split('-')[1]
chunks.append(chunk)
chunk = (-1, -1, -1)
if tag.startswith("B-"):
if chunk[2] != -1:
chunks.append(chunk)
chunk = [-1, -1, -1]
chunk[1] = indx
chunk[0] = tag.split('-')[1]
elif tag.startswith('I-') and chunk[1] != -1:
_type = tag.split('-')[1]
if _type == chunk[0]:
chunk[2] = indx
if indx == len(seq) - 1:
chunks.append(chunk)
else:
if chunk[2] != -1:
chunks.append(chunk)
chunk = [-1, -1, -1]
return chunks
def get_entity_bio(seq,id2label):
"""Gets entities from sequence.
note: BIO
Args:
seq (list): sequence of labels.
Returns:
list: list of (chunk_type, chunk_start, chunk_end).
Example:
seq = ['B-PER', 'I-PER', 'O', 'B-LOC']
get_entity_bio(seq)
#output
[['PER', 0,1], ['LOC', 3, 3]]
"""
chunks = []
chunk = [-1, -1, -1]
for indx, tag in enumerate(seq):
if not isinstance(tag, str):
tag = id2label[tag]
if tag.startswith("B-"):
if chunk[2] != -1:
chunks.append(chunk)
chunk = [-1, -1, -1]
chunk[1] = indx
chunk[0] = tag.split('-')[1]
chunk[2] = indx
if indx == len(seq) - 1:
chunks.append(chunk)
elif tag.startswith('I-') and chunk[1] != -1:
_type = tag.split('-')[1]
if _type == chunk[0]:
chunk[2] = indx
if indx == len(seq) - 1:
chunks.append(chunk)
else:
if chunk[2] != -1:
chunks.append(chunk)
chunk = [-1, -1, -1]
return chunks
def get_entities(seq,id2label,markup='bios'):
'''
:param seq:
:param id2label:
:param markup:
:return:
'''
assert markup in ['bio','bios']
if markup =='bio':
return get_entity_bio(seq,id2label)
else:
return get_entity_bios(seq,id2label)
| def get_entity_bios(seq, id2label):
"""Gets entities from sequence.
note: BIOS
Args:
seq (list): sequence of labels.
Returns:
list: list of (chunk_type, chunk_start, chunk_end).
Example:
# >>> seq = ['B-PER', 'I-PER', 'O', 'S-LOC']
# >>> get_entity_bios(seq)
[['PER', 0,1], ['LOC', 3, 3]]
"""
chunks = []
chunk = [-1, -1, -1]
for (indx, tag) in enumerate(seq):
if not isinstance(tag, str):
tag = id2label[tag]
if tag.startswith('S-'):
if chunk[2] != -1:
chunks.append(chunk)
chunk = [-1, -1, -1]
chunk[1] = indx
chunk[2] = indx
chunk[0] = tag.split('-')[1]
chunks.append(chunk)
chunk = (-1, -1, -1)
if tag.startswith('B-'):
if chunk[2] != -1:
chunks.append(chunk)
chunk = [-1, -1, -1]
chunk[1] = indx
chunk[0] = tag.split('-')[1]
elif tag.startswith('I-') and chunk[1] != -1:
_type = tag.split('-')[1]
if _type == chunk[0]:
chunk[2] = indx
if indx == len(seq) - 1:
chunks.append(chunk)
else:
if chunk[2] != -1:
chunks.append(chunk)
chunk = [-1, -1, -1]
return chunks
def get_entity_bio(seq, id2label):
"""Gets entities from sequence.
note: BIO
Args:
seq (list): sequence of labels.
Returns:
list: list of (chunk_type, chunk_start, chunk_end).
Example:
seq = ['B-PER', 'I-PER', 'O', 'B-LOC']
get_entity_bio(seq)
#output
[['PER', 0,1], ['LOC', 3, 3]]
"""
chunks = []
chunk = [-1, -1, -1]
for (indx, tag) in enumerate(seq):
if not isinstance(tag, str):
tag = id2label[tag]
if tag.startswith('B-'):
if chunk[2] != -1:
chunks.append(chunk)
chunk = [-1, -1, -1]
chunk[1] = indx
chunk[0] = tag.split('-')[1]
chunk[2] = indx
if indx == len(seq) - 1:
chunks.append(chunk)
elif tag.startswith('I-') and chunk[1] != -1:
_type = tag.split('-')[1]
if _type == chunk[0]:
chunk[2] = indx
if indx == len(seq) - 1:
chunks.append(chunk)
else:
if chunk[2] != -1:
chunks.append(chunk)
chunk = [-1, -1, -1]
return chunks
def get_entities(seq, id2label, markup='bios'):
"""
:param seq:
:param id2label:
:param markup:
:return:
"""
assert markup in ['bio', 'bios']
if markup == 'bio':
return get_entity_bio(seq, id2label)
else:
return get_entity_bios(seq, id2label) |
class AppValidationError(Exception):
def __init__(self, msg, response=None):
super(AppValidationError, self).__init__(msg)
self.response = response
| class Appvalidationerror(Exception):
def __init__(self, msg, response=None):
super(AppValidationError, self).__init__(msg)
self.response = response |
# ----------------------------------------------------------------------------
# Copyright (c) 2017-, labman development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
class LabmanError(Exception):
"""Base class for all labman exceptions"""
pass
class LabmanUnknownIdError(LabmanError):
"""Exception for error when an object doesn't exist in the DB
Parameters
----------
obj_name : str
The name of the object
obj_id : str
The unknown id
"""
def __init__(self, obj_name, obj_id):
super(LabmanUnknownIdError, self).__init__()
self.args = ("%s with ID '%s' does not exist" % (obj_name, obj_id), )
class LabmanDuplicateError(LabmanError):
"""Exception for error when duplicates occur
Parameters
----------
obj_name : str
The name of the object
attributes : list of (str, str)
The duplicated attributes
"""
def __init__(self, obj_name, attributes):
super(LabmanDuplicateError, self).__init__()
attr = ', '.join(["%s = %s" % (key, val) for key, val in attributes])
self.args = ("%s with %s already exists" % (obj_name, attr), )
class LabmanLoginError(LabmanError):
"""Exception for error when login in"""
def __init__(self):
super(LabmanLoginError, self).__init__()
self.args = ("Incorrect user id or password", )
class LabmanLoginDisabledError(LabmanError):
"""Exception for error when user is not allowed"""
def __init__(self):
super(LabmanLoginDisabledError, self).__init__()
self.args = ("Login credentials disabled for this portal", )
| class Labmanerror(Exception):
"""Base class for all labman exceptions"""
pass
class Labmanunknowniderror(LabmanError):
"""Exception for error when an object doesn't exist in the DB
Parameters
----------
obj_name : str
The name of the object
obj_id : str
The unknown id
"""
def __init__(self, obj_name, obj_id):
super(LabmanUnknownIdError, self).__init__()
self.args = ("%s with ID '%s' does not exist" % (obj_name, obj_id),)
class Labmanduplicateerror(LabmanError):
"""Exception for error when duplicates occur
Parameters
----------
obj_name : str
The name of the object
attributes : list of (str, str)
The duplicated attributes
"""
def __init__(self, obj_name, attributes):
super(LabmanDuplicateError, self).__init__()
attr = ', '.join(['%s = %s' % (key, val) for (key, val) in attributes])
self.args = ('%s with %s already exists' % (obj_name, attr),)
class Labmanloginerror(LabmanError):
"""Exception for error when login in"""
def __init__(self):
super(LabmanLoginError, self).__init__()
self.args = ('Incorrect user id or password',)
class Labmanlogindisablederror(LabmanError):
"""Exception for error when user is not allowed"""
def __init__(self):
super(LabmanLoginDisabledError, self).__init__()
self.args = ('Login credentials disabled for this portal',) |
# import random
# class InvalidGreetingException(Exception):
# """
# Raised when a string does not imply greeting
# """
# pass
# def main():
# """
# Doc string.Girish
# """
# greeting_messages = ["Hello", "Welcome", "Hey", "Hi"]
# choices = greeting_messages + ["Bye", "Thanks", "GoodBye"]
# greeting_choice = random.choice(choices)
# print("Selected choice: {0!s}".format(greeting_choice))
# if (greeting_choice not in greeting_messages):
# raise InvalidGreetingException(
# "Not a valid word for greeting", greeting_choice)
# if __name__ == "__main__":
# try:
# main()
# except InvalidGreetingException as ex:
# for arg in ex.args:
# print(arg)
# class Phone:
# def __init__(self, name, color):
# self._name = name
# self._color = color
# # name not changeable once set
# @property
# def name(self):
# return self._name
# @property
# def color(self):
# """
# Color of the phone
# """
# return self._color
# # property returns an object
# # which is the same as getter method
# #
# @color.setter
# def color(self, color):
# self._color = color
# phone = Phone("Motorola onepower", "black")
# print("Name: {0!s}, Color: {1!s}".format(phone.name, phone.color))
# phone.color = "red"
# print("Name: {0!s}, Color: {1!s}".format(phone.name, phone.color))
# class Sample:
# def print_message(self, message):
# print("Inside: print_message, message:{}".format(message))
# def fake_print_message(message):
# print("Inside: fake_print_message, message:{}".format(message))
# sample = Sample()
# sample.print_message("Hello")
# print(sample.__dict__)
# # think of this like replacing the __dict__ of the object with key "print_message", updating its value to point to a different function object.
# sample.print_message = fake_print_message
# sample.print_message("Hello")
# print(sample.__dict__)
class CallableObject:
def __call__(self, message):
print("message: {}".format(message))
obj = CallableObject()
obj("This is a callable object behaving like a function")
CallableObject()("Hello")
| class Callableobject:
def __call__(self, message):
print('message: {}'.format(message))
obj = callable_object()
obj('This is a callable object behaving like a function')
callable_object()('Hello') |
#program to sort a list of elements using Comb sort.
def comb_sort(nums):
shrink_fact = 1.3
gaps = len(nums)
swapped = True
i = 0
while gaps > 1 or swapped:
gaps = int(float(gaps) / shrink_fact)
swapped = False
i = 0
while gaps + i < len(nums):
if nums[i] > nums[i+gaps]:
nums[i], nums[i+gaps] = nums[i+gaps], nums[i]
swapped = True
i += 1
return nums
num1 = input('Input comma separated numbers:\n').strip()
nums = [int(item) for item in num1.split(',')]
print(comb_sort(nums))
| def comb_sort(nums):
shrink_fact = 1.3
gaps = len(nums)
swapped = True
i = 0
while gaps > 1 or swapped:
gaps = int(float(gaps) / shrink_fact)
swapped = False
i = 0
while gaps + i < len(nums):
if nums[i] > nums[i + gaps]:
(nums[i], nums[i + gaps]) = (nums[i + gaps], nums[i])
swapped = True
i += 1
return nums
num1 = input('Input comma separated numbers:\n').strip()
nums = [int(item) for item in num1.split(',')]
print(comb_sort(nums)) |
#!/usr/bin/env python
# I need to figure out how I want to deal with these classes
class FacebookEndpoint:
pass | class Facebookendpoint:
pass |
def is_phone_number(text):
"""
This function was made to recognize a Brazilian phone number
:param text: number
:return:
"""
# normally, brazilians use the format +00 (00) 00000-0000 the brazilian code is +55, in the beginning
disallowed_characters = '+() -'
for character in disallowed_characters:
text = text.replace(character, '')
if text[0] == '5' and text[1] == '5':
text = text[2:]
if len(text) != 11:
return False
return True
print(is_phone_number('2191111-1111'))
print(is_phone_number('248888-9999'))
| def is_phone_number(text):
"""
This function was made to recognize a Brazilian phone number
:param text: number
:return:
"""
disallowed_characters = '+() -'
for character in disallowed_characters:
text = text.replace(character, '')
if text[0] == '5' and text[1] == '5':
text = text[2:]
if len(text) != 11:
return False
return True
print(is_phone_number('2191111-1111'))
print(is_phone_number('248888-9999')) |
sns.relplot(
data=monthly_victim_counts,
kind="line",
palette="colorblind",
height=3, aspect=4,
) | sns.relplot(data=monthly_victim_counts, kind='line', palette='colorblind', height=3, aspect=4) |
"""
Dictionary:
1. Normal variable holds 1 value; dictionary holds collection of key-value pairs; all keys must be distinct but values may be repeated
2. {} - curly bracket
3. Unordered
4. Mutable
5. uses Hashing internally
6. Functions:
1. dict[] : returns value at specified index
2. len() : returns length of dictionary
min() : returns min value in dictionary
max() : returns max value in dictionary
sum() : returns sum of values in dictionary
3. dict.reverse() : 'dict' object has no attribute 'reverse'
4. dict.sort() : 'dict' object has no attribute 'sort'
5. in : operator returns bool stating if specified value present in dictionary or not
6. dict[key] = value : add value with specified key
7. dict[key] : get value from dict with specified key
dict.get(key) returns None if key dosen't exists
11. dict.pop(key) : dict.pop()
dict.popitem() pop() will remove last value
12. del dict[key] : delete
"""
dict = {10:"abc", 20:"xyz", 30:"pqr"}
print(dict)
print(type(dict))
print(dict[10])
print(dict, len(dict), min(dict), max(dict), sum(dict))
dict[40] = "def"
print(dict)
print(dict[30], dict.get(30))
print(dict.get(50), dict.get(60, "Not Available"))
#dict.reverse()
#dict.sort()
print(20 in dict, 80 in dict)
dict.popitem()
print(dict)
dict.pop(10)
print(dict)
del dict[30]
print(dict) | """
Dictionary:
1. Normal variable holds 1 value; dictionary holds collection of key-value pairs; all keys must be distinct but values may be repeated
2. {} - curly bracket
3. Unordered
4. Mutable
5. uses Hashing internally
6. Functions:
1. dict[] : returns value at specified index
2. len() : returns length of dictionary
min() : returns min value in dictionary
max() : returns max value in dictionary
sum() : returns sum of values in dictionary
3. dict.reverse() : 'dict' object has no attribute 'reverse'
4. dict.sort() : 'dict' object has no attribute 'sort'
5. in : operator returns bool stating if specified value present in dictionary or not
6. dict[key] = value : add value with specified key
7. dict[key] : get value from dict with specified key
dict.get(key) returns None if key dosen't exists
11. dict.pop(key) : dict.pop()
dict.popitem() pop() will remove last value
12. del dict[key] : delete
"""
dict = {10: 'abc', 20: 'xyz', 30: 'pqr'}
print(dict)
print(type(dict))
print(dict[10])
print(dict, len(dict), min(dict), max(dict), sum(dict))
dict[40] = 'def'
print(dict)
print(dict[30], dict.get(30))
print(dict.get(50), dict.get(60, 'Not Available'))
print(20 in dict, 80 in dict)
dict.popitem()
print(dict)
dict.pop(10)
print(dict)
del dict[30]
print(dict) |
# https://www.codechef.com/problems/FLOW015
for T in range(int(input())):
n,days,c=int(input()),['sunday','monday','tuesday','wednesday','thursday','friday','saturday'],1
if(n>2001):
for z in range(2002,n+1):
if((z-1)%4==0 and ((z-1)%400==0 or (z-1)%100!=0)): c+=2
else: c+=1
else:
for z in range(2000,n-1,-1):
if(z%4==0 and (z%400==0 or z%100!=0)): c-=2
else: c-=1
print(days[c%7]) | for t in range(int(input())):
(n, days, c) = (int(input()), ['sunday', 'monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday'], 1)
if n > 2001:
for z in range(2002, n + 1):
if (z - 1) % 4 == 0 and ((z - 1) % 400 == 0 or (z - 1) % 100 != 0):
c += 2
else:
c += 1
else:
for z in range(2000, n - 1, -1):
if z % 4 == 0 and (z % 400 == 0 or z % 100 != 0):
c -= 2
else:
c -= 1
print(days[c % 7]) |
#In this file I am going to model a Spotify playlist
playlist ={
'name':'hip-hop',
'author':'John',
'songs' : [
{
'title':'Walk it Talk it',
'artist': 'Migos',
},
{
'title':'New Freezer',
'artist' : 'Rich Kid',
},
{
'title':'New Freezer',
'artist':'Chris Brown',
}
],
}
for song in playlist['songs']:
print(song['title']) | playlist = {'name': 'hip-hop', 'author': 'John', 'songs': [{'title': 'Walk it Talk it', 'artist': 'Migos'}, {'title': 'New Freezer', 'artist': 'Rich Kid'}, {'title': 'New Freezer', 'artist': 'Chris Brown'}]}
for song in playlist['songs']:
print(song['title']) |
load("@bazel_gazelle//:deps.bzl", _go_repository = "go_repository")
load("@io_bazel_rules_go//go:def.bzl", _go_binary = "go_binary", _go_library = "go_library", _go_test = "go_test")
def go_repository(name, **kwargs):
"""Macro wrapping the Gazelle go_repository rule.
This conditionally defines the repository if it hasn't already been.
"""
if name in native.existing_rules():
return
_go_repository(name = name, **kwargs)
# Go importpath prefix shared by all Kythe libraries
go_prefix = "kythe.io/"
def _infer_importpath(name):
basename = native.package_name().split("/")[-1]
importpath = go_prefix + native.package_name()
if basename == name:
return importpath
return importpath + "/" + name
def go_binary(name, importpath = None, **kwargs):
"""This macro wraps the go_binary rule provided by the Bazel Go rules to
automatically infer the binary's importpath. It is otherwise equivalent in
function to a go_binary.
"""
if importpath == None:
importpath = _infer_importpath(name)
_go_binary(
name = name,
importpath = importpath,
out = name,
**kwargs
)
def go_library(name, importpath = None, **kwargs):
"""This macro wraps the go_library rule provided by the Bazel Go rules to
automatically infer the library's importpath. It is otherwise equivalent in
function to a go_library.
"""
if importpath == None:
importpath = _infer_importpath(name)
_go_library(
name = name,
importpath = importpath,
**kwargs
)
def go_test(name, library = None, **kwargs):
"""This macro wraps the go_test rule provided by the Bazel Go rules
to silence a deprecation warning for use of the "library" attribute.
It is otherwise equivalent in function to a go_test.
"""
# For internal tests (defined in the same package), we need to embed
# the library under test, but this is not needed for external tests.
embed = [library] if library else []
_go_test(
name = name,
embed = embed,
**kwargs
)
| load('@bazel_gazelle//:deps.bzl', _go_repository='go_repository')
load('@io_bazel_rules_go//go:def.bzl', _go_binary='go_binary', _go_library='go_library', _go_test='go_test')
def go_repository(name, **kwargs):
"""Macro wrapping the Gazelle go_repository rule.
This conditionally defines the repository if it hasn't already been.
"""
if name in native.existing_rules():
return
_go_repository(name=name, **kwargs)
go_prefix = 'kythe.io/'
def _infer_importpath(name):
basename = native.package_name().split('/')[-1]
importpath = go_prefix + native.package_name()
if basename == name:
return importpath
return importpath + '/' + name
def go_binary(name, importpath=None, **kwargs):
"""This macro wraps the go_binary rule provided by the Bazel Go rules to
automatically infer the binary's importpath. It is otherwise equivalent in
function to a go_binary.
"""
if importpath == None:
importpath = _infer_importpath(name)
_go_binary(name=name, importpath=importpath, out=name, **kwargs)
def go_library(name, importpath=None, **kwargs):
"""This macro wraps the go_library rule provided by the Bazel Go rules to
automatically infer the library's importpath. It is otherwise equivalent in
function to a go_library.
"""
if importpath == None:
importpath = _infer_importpath(name)
_go_library(name=name, importpath=importpath, **kwargs)
def go_test(name, library=None, **kwargs):
"""This macro wraps the go_test rule provided by the Bazel Go rules
to silence a deprecation warning for use of the "library" attribute.
It is otherwise equivalent in function to a go_test.
"""
embed = [library] if library else []
_go_test(name=name, embed=embed, **kwargs) |
# Numbers can be combined using mathematical operators
x = 1 + 1
y = 2 * 3
# Variables holding numbers can be used any way numbers can be used
z = y / x
# We can prove that these computations worked out the same
# using comparison operators, specifically == to test for equality:
print('===comparing===')
print('2 == x', 2 == x)
print('6 == y', 6 == y)
print('3 == z', 3 == z)
# Note that z is a float. Division in Python 3+ always returns a float.
# We can coerce the result to an int using the "integer division" operator
# which always rounds down:
int_div = y // x
# We can also use the "modulo" operator to compute the remainder:
remainder = y % x
print() # Just for a blank line in the output
# Two values can only be equal if they have the same type
print("1 == '1'", 1 == '1')
# Other common comparisons include <, <=, >, >=
print('1 < 2', 1 < 2) # True
print('10 >= 10', 10 >= 10) # True
print('10 > 10', 10 > 10) # False
print() # For a blank line in the output
# Strings are compared pseudo-alphabetically for greater than / less than
print('"albert" < "bill"', "albert" < "bill") # True
# HOWEVER, in python ALL capital letters come before ANY lowercase letters
print('"B" < "a"', "B" < "a") # True
# FYI: There are additional rules for other characters like $, %, ., and so on
# that we're ignoring for now.
# Strings can also be combined with math operators, but they mean different
# things when operating on strings
x = "hello " + "world." # Concatenation, x is "hello world."
y = "a" * 4 # Duplication, y = "aaaa"
print()
print(x)
print(y)
# Finally, we can combine the assignment operator and these math operations
# using the following shorthands:
x = 4
x += 3 # x = x + 3
x -= 1 # x = x - 1
x *= 2 # x = x * 2
x /= 4 # x = x / 4
# Micro-Exercise: predict the value of x. Then write a comparison statement
# involving x that evaluates to False. Print the result of that comparison.
| x = 1 + 1
y = 2 * 3
z = y / x
print('===comparing===')
print('2 == x', 2 == x)
print('6 == y', 6 == y)
print('3 == z', 3 == z)
int_div = y // x
remainder = y % x
print()
print("1 == '1'", 1 == '1')
print('1 < 2', 1 < 2)
print('10 >= 10', 10 >= 10)
print('10 > 10', 10 > 10)
print()
print('"albert" < "bill"', 'albert' < 'bill')
print('"B" < "a"', 'B' < 'a')
x = 'hello ' + 'world.'
y = 'a' * 4
print()
print(x)
print(y)
x = 4
x += 3
x -= 1
x *= 2
x /= 4 |
class Solution:
def kthFactor(self, n: int, k: int) -> int:
count = 0
for i in range(1,n+1):
if n%i==0:
count+=1
if count==k:
return i
return -1
| class Solution:
def kth_factor(self, n: int, k: int) -> int:
count = 0
for i in range(1, n + 1):
if n % i == 0:
count += 1
if count == k:
return i
return -1 |
{
'name': 'Junari Odoo Website Utils',
'version': '1.0',
'summary': 'Re-usable widgets for odoo website modules',
'author': 'Junari Ltd',
'category': 'CRM',
'website': 'https://www.junari.com',
'images': [],
'depends': [
'website'
],
'data': [
'views/assets.xml'
],
'js': [],
'qweb': [],
'css': [],
'demo': [],
'test': [],
'application': False,
'installable': True,
'auto_install': False,
}
| {'name': 'Junari Odoo Website Utils', 'version': '1.0', 'summary': 'Re-usable widgets for odoo website modules', 'author': 'Junari Ltd', 'category': 'CRM', 'website': 'https://www.junari.com', 'images': [], 'depends': ['website'], 'data': ['views/assets.xml'], 'js': [], 'qweb': [], 'css': [], 'demo': [], 'test': [], 'application': False, 'installable': True, 'auto_install': False} |
while True:
num = int(input('Quer ver a tabuada de qual valor? '))
if num < 0:
break
print('-' * 30)
for mult in range(1, 11, 1):
print(f'{num} x {mult} = {num * mult}')
print('-' * 30)
print('PROGRAMA TABUADA ENCERRADO. Volte sempre!')
| while True:
num = int(input('Quer ver a tabuada de qual valor? '))
if num < 0:
break
print('-' * 30)
for mult in range(1, 11, 1):
print(f'{num} x {mult} = {num * mult}')
print('-' * 30)
print('PROGRAMA TABUADA ENCERRADO. Volte sempre!') |
class Humidifier:
"""Class that represents a humidifier object in the Venta API."""
def __init__(self, request):
"""Initialize a humidifier object."""
self.state = {}
self.request = request
self.update()
# Note: each property name maps the name in the returned data
@property
def mac(self) -> int:
"""Return the Mac of the humidifier."""
return self.state["Header"]["MacAdress"]
@property
def temperature(self) -> int:
"""Return current temperature."""
return self.state["Measure"]["Temperature"]
@property
def humidity(self) -> int:
"""Return current humidity."""
return self.state["Measure"]["Humidity"]
@property
def dust(self) -> int:
"""Return current dust."""
return self.state["Measure"]["Dust"]
@property
def target_humidity(self) -> int:
"""Return target_humidity."""
return self.state["Action"]["TargetHum"]
@property
def fan_speed(self) -> int:
"""Return the fan speed."""
return self.state["Action"]["FanSpeed"]
@property
def is_on(self) -> bool:
"""Return if the humidifier is running."""
return self.state["Action"]["Power"]
@property
def is_sleep_mode(self) -> bool:
"""Return if the humidifier is in Sleep mode."""
return self.state["Action"]["SleepMode"]
@property
def is_auto_mode(self) -> bool:
"""Return if the humidifier is in Auto mode."""
return self.state["Action"]["Automatic"]
def set_humidity(self, humidity: int):
res = self.request(json={"Action": {"TargetHum": humidity}})
self.state = res.json()
def change_mode(self, mode: str, speed: int = 0):
turn_off = {"Action": {"Power": False}}
turn_on = {"Action": {"Power": True}}
sleep_mode = {"Action": {"Power": True, "SleepMode": True, "Automatic": False}}
automatic_mode = {
"Action": {"Power": True, "SleepMode": False, "Automatic": True}
}
def fan_speed_mode(speed):
return {
"Action": {
"Power": True,
"SleepMode": False,
"Automatic": False,
"FanSpeed": speed,
}
}
if mode == "off":
action = turn_off
elif mode == "on":
action = turn_on
elif mode == "sleep":
action = sleep_mode
elif mode == "automatic":
action = automatic_mode
elif mode == "manual":
print("speed", speed)
action = fan_speed_mode(speed)
res = self.request(json=action)
self.state = res.json()
def update(self):
"""Update the humidifier data."""
res = self.request()
self.state = res.json()
| class Humidifier:
"""Class that represents a humidifier object in the Venta API."""
def __init__(self, request):
"""Initialize a humidifier object."""
self.state = {}
self.request = request
self.update()
@property
def mac(self) -> int:
"""Return the Mac of the humidifier."""
return self.state['Header']['MacAdress']
@property
def temperature(self) -> int:
"""Return current temperature."""
return self.state['Measure']['Temperature']
@property
def humidity(self) -> int:
"""Return current humidity."""
return self.state['Measure']['Humidity']
@property
def dust(self) -> int:
"""Return current dust."""
return self.state['Measure']['Dust']
@property
def target_humidity(self) -> int:
"""Return target_humidity."""
return self.state['Action']['TargetHum']
@property
def fan_speed(self) -> int:
"""Return the fan speed."""
return self.state['Action']['FanSpeed']
@property
def is_on(self) -> bool:
"""Return if the humidifier is running."""
return self.state['Action']['Power']
@property
def is_sleep_mode(self) -> bool:
"""Return if the humidifier is in Sleep mode."""
return self.state['Action']['SleepMode']
@property
def is_auto_mode(self) -> bool:
"""Return if the humidifier is in Auto mode."""
return self.state['Action']['Automatic']
def set_humidity(self, humidity: int):
res = self.request(json={'Action': {'TargetHum': humidity}})
self.state = res.json()
def change_mode(self, mode: str, speed: int=0):
turn_off = {'Action': {'Power': False}}
turn_on = {'Action': {'Power': True}}
sleep_mode = {'Action': {'Power': True, 'SleepMode': True, 'Automatic': False}}
automatic_mode = {'Action': {'Power': True, 'SleepMode': False, 'Automatic': True}}
def fan_speed_mode(speed):
return {'Action': {'Power': True, 'SleepMode': False, 'Automatic': False, 'FanSpeed': speed}}
if mode == 'off':
action = turn_off
elif mode == 'on':
action = turn_on
elif mode == 'sleep':
action = sleep_mode
elif mode == 'automatic':
action = automatic_mode
elif mode == 'manual':
print('speed', speed)
action = fan_speed_mode(speed)
res = self.request(json=action)
self.state = res.json()
def update(self):
"""Update the humidifier data."""
res = self.request()
self.state = res.json() |
# -*- coding: utf-8 -*-
class Duck(object):
def quark(self):
print('Quaaaaaark!')
class Person(object):
def quark(self):
print('Hello!')
def quarking(duck):
try:
duck.quark()
except AttributeError:
pass
if __name__ == '__main__':
duck = Duck()
person = Person()
quarking(duck)
quarking(person) | class Duck(object):
def quark(self):
print('Quaaaaaark!')
class Person(object):
def quark(self):
print('Hello!')
def quarking(duck):
try:
duck.quark()
except AttributeError:
pass
if __name__ == '__main__':
duck = duck()
person = person()
quarking(duck)
quarking(person) |
# -*- coding: utf-8 -*-
"""
smallparts.constants - common constants
"""
#
# Single character constants
#
AMPERSAND = '&'
ASTERISK = '*'
AT = '@'
BLANK = SPACE = SP = ' '
BRACE_OPEN = '{'
BRACE_CLOSE = '}'
COLON = ':'
COMMA = ','
CARRIAGE_RETURN = CR = '\r'
DASH = '-'
DOT = '.'
DOUBLE_QUOTE = '"'
EMPTY = ''
EQUALS = '='
HASH = POUND = '#'
LINEFEED = LF = NEWLINE = NL = '\n'
PERCENT = '%'
PIPE = '|'
PLUS_SIGN = '+'
QUESTION_MARK = '?'
SEMICOLON = ';'
SINGLE_QUOTE = "'"
SLASH = '/'
TILDE = '~'
UNDERSCORE = '_'
#
# Compound constants
#
COLON_BLANK = COLON + BLANK
COMMA_BLANK = COMMA + BLANK
CRLF = CR + LF
#
# Numeric constants
#
ZERO = 0
ONE = 1
FIRST_INDEX = ZERO
SECOND_INDEX = ONE
LAST_INDEX = -1
#
# Encodings
#
ASCII = 'ascii'
CP1252 = 'cp1252'
UTF_8 = 'utf-8'
#
# Functional constants
#
MODE_APPEND = 'a+'
MODE_APPEND_BINARY = 'a+b'
MODE_READ = 'r'
MODE_READ_BINARY = 'rb'
MODE_WRITE = 'w'
MODE_WRITE_BINARY = 'wb'
YES = 'yes'
NO = 'no'
XML_1_0 = '1.0'
XML_1_1 = '1.1'
#
# Return codes
#
RC_ERROR = 1
RC_OK = 0
# vim: fileencoding=utf-8 ts=4 sts=4 sw=4 autoindent expandtab syntax=python:
| """
smallparts.constants - common constants
"""
ampersand = '&'
asterisk = '*'
at = '@'
blank = space = sp = ' '
brace_open = '{'
brace_close = '}'
colon = ':'
comma = ','
carriage_return = cr = '\r'
dash = '-'
dot = '.'
double_quote = '"'
empty = ''
equals = '='
hash = pound = '#'
linefeed = lf = newline = nl = '\n'
percent = '%'
pipe = '|'
plus_sign = '+'
question_mark = '?'
semicolon = ';'
single_quote = "'"
slash = '/'
tilde = '~'
underscore = '_'
colon_blank = COLON + BLANK
comma_blank = COMMA + BLANK
crlf = CR + LF
zero = 0
one = 1
first_index = ZERO
second_index = ONE
last_index = -1
ascii = 'ascii'
cp1252 = 'cp1252'
utf_8 = 'utf-8'
mode_append = 'a+'
mode_append_binary = 'a+b'
mode_read = 'r'
mode_read_binary = 'rb'
mode_write = 'w'
mode_write_binary = 'wb'
yes = 'yes'
no = 'no'
xml_1_0 = '1.0'
xml_1_1 = '1.1'
rc_error = 1
rc_ok = 0 |
DATA = [
("Load JS/WebAssembly", 2, 2, 2),
("Load /tmp/lines.txt", 225, 222, 218),
("From JS new Fzf() until ready to ....",
7825, 8548, 1579),
("Calling fzf-lib's fzf.New()", 1255, 3121, 963),
("return from fzfNew() function", 358, 7, 0),
("search() until library has result", 4235, 1394, 12132),
("Returning search result to JS callback", 1908, 1378, 416),
]
def create_plot(ax):
labels = ["Go", "TinyGo", "GopherJS"]
bottoms = [0, 0, 0]
for row in DATA:
ax.bar(labels, row[1:], label=row[0], bottom=bottoms)
bottoms = [bottoms[i] + row[1:][i] for i in range(len(bottoms))]
ax.set_ylabel("Time (ms)")
ax.set_ylim([0, 20000])
ax.legend(ncol=2)
| data = [('Load JS/WebAssembly', 2, 2, 2), ('Load /tmp/lines.txt', 225, 222, 218), ('From JS new Fzf() until ready to ....', 7825, 8548, 1579), ("Calling fzf-lib's fzf.New()", 1255, 3121, 963), ('return from fzfNew() function', 358, 7, 0), ('search() until library has result', 4235, 1394, 12132), ('Returning search result to JS callback', 1908, 1378, 416)]
def create_plot(ax):
labels = ['Go', 'TinyGo', 'GopherJS']
bottoms = [0, 0, 0]
for row in DATA:
ax.bar(labels, row[1:], label=row[0], bottom=bottoms)
bottoms = [bottoms[i] + row[1:][i] for i in range(len(bottoms))]
ax.set_ylabel('Time (ms)')
ax.set_ylim([0, 20000])
ax.legend(ncol=2) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.