content stringlengths 7 1.05M | fixed_cases stringlengths 1 1.28M |
|---|---|
IN = 'ga.txt'
OUT = 'ga.dat'
with open(IN, 'r') as f, open(OUT, 'w') as g:
f.readline()
for line in f.readlines():
sol = int(line.split(',')[1])
if sol >= 13:
g.write(line.split(',')[2])
else:
print(line) | in = 'ga.txt'
out = 'ga.dat'
with open(IN, 'r') as f, open(OUT, 'w') as g:
f.readline()
for line in f.readlines():
sol = int(line.split(',')[1])
if sol >= 13:
g.write(line.split(',')[2])
else:
print(line) |
# Printing out in python
print("Hello World!")
# Command Terminal
# python --version
# Commenting and Docstring
""" DocString for Python """
# Find out the type
x = 1
y = 2.8
z = 1j
a = 12E4
b = -87.7e100
print(type(x)) # Int
print(type(y)) # Float
print(type(z)) # Complex
print(type(a)) # Float
print(type(b)) # Float
# Casting into Into Integer or String
casting_into_numbers = int("3")
casting_into_string = str(3)
print(type(casting_into_numbers))
print(type(casting_into_string))
| print('Hello World!')
' DocString for Python '
x = 1
y = 2.8
z = 1j
a = 120000.0
b = -8.77e+101
print(type(x))
print(type(y))
print(type(z))
print(type(a))
print(type(b))
casting_into_numbers = int('3')
casting_into_string = str(3)
print(type(casting_into_numbers))
print(type(casting_into_string)) |
class Skills_Placement(object):
def __init__(self, skills_placement):
self.skills_placement = skills_placement | class Skills_Placement(object):
def __init__(self, skills_placement):
self.skills_placement = skills_placement |
L = -np.ones(T) * (min_error_controlgain-0.3)
s_closed_loop, a_closed_loop = lds.dynamics_closedloop(D, B, L)
with plt.xkcd():
fig = plt.figure(figsize=(8, 6))
plot_vs_time(s_closed_loop,'Closed Loop','b',goal)
plt.title('Closed Loop State Evolution with Under-Ambitious Control Gain')
plt.show()
| l = -np.ones(T) * (min_error_controlgain - 0.3)
(s_closed_loop, a_closed_loop) = lds.dynamics_closedloop(D, B, L)
with plt.xkcd():
fig = plt.figure(figsize=(8, 6))
plot_vs_time(s_closed_loop, 'Closed Loop', 'b', goal)
plt.title('Closed Loop State Evolution with Under-Ambitious Control Gain')
plt.show() |
class BaseEventRule(object):
rules = dict()
functions = []
name = ''
@property
def expression(self):
raise NotImplementedError
class RateEventRule(BaseEventRule):
UNIT_MINIUTES = 'minutes'
UNIT_HOURS = 'hours'
UNIT_DAYS = 'days'
def __init__(self, name, value, unit='minutes'):
if not isinstance(value, int):
raise TypeError('Parameter "value" must be type of "int", not "%s"' % str(type(value)))
units = [getattr(self, key) for key in dir(self) if key.startswith('UNIT_')]
if unit not in units:
raise ValueError('Parameter "unit" must be one of %s' % ','.join(units))
self.name = name
self.value = value
self.unit = unit
@property
def expression(self):
return 'rate(%d %s)' % (self.value, self.unit)
class TimeEventRule(BaseEventRule):
def __init__(self, name, pattern):
if not isinstance(pattern, basestring):
raise TypeError('Parameter "expression" must be type of "string", not "%s"' % str(type(pattern)))
self.name = name
self.pattern = pattern
@property
def expression(self):
return 'cron(%s)' % self.pattern
| class Baseeventrule(object):
rules = dict()
functions = []
name = ''
@property
def expression(self):
raise NotImplementedError
class Rateeventrule(BaseEventRule):
unit_miniutes = 'minutes'
unit_hours = 'hours'
unit_days = 'days'
def __init__(self, name, value, unit='minutes'):
if not isinstance(value, int):
raise type_error('Parameter "value" must be type of "int", not "%s"' % str(type(value)))
units = [getattr(self, key) for key in dir(self) if key.startswith('UNIT_')]
if unit not in units:
raise value_error('Parameter "unit" must be one of %s' % ','.join(units))
self.name = name
self.value = value
self.unit = unit
@property
def expression(self):
return 'rate(%d %s)' % (self.value, self.unit)
class Timeeventrule(BaseEventRule):
def __init__(self, name, pattern):
if not isinstance(pattern, basestring):
raise type_error('Parameter "expression" must be type of "string", not "%s"' % str(type(pattern)))
self.name = name
self.pattern = pattern
@property
def expression(self):
return 'cron(%s)' % self.pattern |
# Adding two number provided by user input
num1 = input("First Number: ")
num2 = input("\nSecond Number: ")
#adding two number
sum = float(num1) + float(num2)
#display the sum
print("The sum of {0} and {1} is {2}".format(num1, num2, sum)) | num1 = input('First Number: ')
num2 = input('\nSecond Number: ')
sum = float(num1) + float(num2)
print('The sum of {0} and {1} is {2}'.format(num1, num2, sum)) |
meatPrice = 4.00
meatTax = 0.03 * meatPrice
milkPrice = 2.00
milkTax = 0.03 * milkPrice
print(meatTax + meatPrice + milkTax + milkPrice)
| meat_price = 4.0
meat_tax = 0.03 * meatPrice
milk_price = 2.0
milk_tax = 0.03 * milkPrice
print(meatTax + meatPrice + milkTax + milkPrice) |
'''
Arrival of the General
'''
n = int(input())
soldiers = list(map(int, input().split(' ')))
maxs = max(soldiers)
mins = min(soldiers)
posi_max = soldiers.index(maxs)
soldiers.reverse()
posi_min = n-1 - soldiers.index(mins)
if posi_max > posi_min:
swap = (posi_max - 0) + (n - 1 - (posi_min + 1))
else:
swap = (posi_max - 0) + (n - 1 - posi_min)
print(swap) | """
Arrival of the General
"""
n = int(input())
soldiers = list(map(int, input().split(' ')))
maxs = max(soldiers)
mins = min(soldiers)
posi_max = soldiers.index(maxs)
soldiers.reverse()
posi_min = n - 1 - soldiers.index(mins)
if posi_max > posi_min:
swap = posi_max - 0 + (n - 1 - (posi_min + 1))
else:
swap = posi_max - 0 + (n - 1 - posi_min)
print(swap) |
class lazy_property():
"""Defines a property whose value will be computed only once and as needed.
This can only be used on instance methods.
"""
def __init__(self, func):
self._func = func
def __get__(self, obj_self, cls):
value = self._func(obj_self)
setattr(obj_self, self._func.__name__, value)
return value
| class Lazy_Property:
"""Defines a property whose value will be computed only once and as needed.
This can only be used on instance methods.
"""
def __init__(self, func):
self._func = func
def __get__(self, obj_self, cls):
value = self._func(obj_self)
setattr(obj_self, self._func.__name__, value)
return value |
languages = ['en', 'de', 'fr']
vocab_dirs = ['data/Multi30K_DE/', 'data/Multi30K_DE/', 'data/Multi30K_FR/']
for language, vocab_dir in zip(languages, vocab_dirs):
with open('data/AmbiguousCOCO/test_2017_mscoco.lc.norm.tok.'+language, 'r') as f:
coco = [line.strip() for line in f.readlines()]
with open(vocab_dir + 'vocab.'+language, 'r') as f:
en_vocab = [(line.strip(), len(line)) for i, line in enumerate(f.readlines())]
unk = '[unk]'
en_vocab += [(unk, -1)]
en_vocab = dict(en_vocab)
def get_bpe_segment(token):
for l in range(len(token)-2 if token.endswith('@@') else len(token)):
word2id = en_vocab.get(token[l:], -1)
if word2id != -1:
bpe = []
bpe.append(token[l:])
if l != 0:
bpe.extend(get_bpe_segment(token[:l]+'@@'))
return bpe
return [token]
with open('data/AmbiguousCOCO/test.norm.tok.lc.10000bpe.'+language, 'w') as f:
for line in coco:
tokens = line.split()
bpe_tokens = []
for token in tokens:
if token in en_vocab.keys():
bpe = token
else:
bpe = get_bpe_segment(token)
bpe.reverse()
bpe = ' '.join(bpe)
bpe_tokens.append(bpe)
f.write(' '.join(bpe_tokens)+'\n')
| languages = ['en', 'de', 'fr']
vocab_dirs = ['data/Multi30K_DE/', 'data/Multi30K_DE/', 'data/Multi30K_FR/']
for (language, vocab_dir) in zip(languages, vocab_dirs):
with open('data/AmbiguousCOCO/test_2017_mscoco.lc.norm.tok.' + language, 'r') as f:
coco = [line.strip() for line in f.readlines()]
with open(vocab_dir + 'vocab.' + language, 'r') as f:
en_vocab = [(line.strip(), len(line)) for (i, line) in enumerate(f.readlines())]
unk = '[unk]'
en_vocab += [(unk, -1)]
en_vocab = dict(en_vocab)
def get_bpe_segment(token):
for l in range(len(token) - 2 if token.endswith('@@') else len(token)):
word2id = en_vocab.get(token[l:], -1)
if word2id != -1:
bpe = []
bpe.append(token[l:])
if l != 0:
bpe.extend(get_bpe_segment(token[:l] + '@@'))
return bpe
return [token]
with open('data/AmbiguousCOCO/test.norm.tok.lc.10000bpe.' + language, 'w') as f:
for line in coco:
tokens = line.split()
bpe_tokens = []
for token in tokens:
if token in en_vocab.keys():
bpe = token
else:
bpe = get_bpe_segment(token)
bpe.reverse()
bpe = ' '.join(bpe)
bpe_tokens.append(bpe)
f.write(' '.join(bpe_tokens) + '\n') |
expected_output = {
"tag": {
"1": {
"level": {
1: {
"hosts": {
"R1-asr1k-43": {
"metric": 33554428,
"interface": {
"Gi0/1/4": {
"next_hop": "R3-asr1k-53",
"snpa": "c014.fe84.b306"
}
}
},
"R3-asr1k-53": {
"metric": 16777214,
"interface": {
"Gi0/1/4": {
"next_hop": "R3-asr1k-53",
"snpa": "c014.fe84.b306"
}
}
},
"R5-asr1k-11": {},
"R6-asr1k-20": {
"metric": 16777214,
"interface": {
"Gi0/0/2": {
"next_hop": "R6-asr1k-20",
"snpa": "3c57.31c1.fb32"
},
"Gi0/0/3": {
"next_hop": "R6-asr1k-20",
"snpa": "3c57.31c1.fb33"
}
}
}
},
"flex_algo": 129
},
2: {
"hosts": {
"R1-asr1k-43": {
"metric": 33554428,
"interface": {
"Gi0/1/4": {
"next_hop": "R3-asr1k-53",
"snpa": "c014.fe84.b306"
}
}
},
"R3-asr1k-53": {
"metric": 16777214,
"interface": {
"Gi0/1/4": {
"next_hop": "R3-asr1k-53",
"snpa": "c014.fe84.b306"
}
}
},
"R5-asr1k-11": {},
"R6-asr1k-20": {
"metric": 16777214,
"interface": {
"Gi0/0/2": {
"next_hop": "R6-asr1k-20",
"snpa": "3c57.31c1.fb32"
},
"Gi0/0/3": {
"next_hop": "R6-asr1k-20",
"snpa": "3c57.31c1.fb33"
}
}
}
},
"flex_algo": 129
}
}
}
}
} | expected_output = {'tag': {'1': {'level': {1: {'hosts': {'R1-asr1k-43': {'metric': 33554428, 'interface': {'Gi0/1/4': {'next_hop': 'R3-asr1k-53', 'snpa': 'c014.fe84.b306'}}}, 'R3-asr1k-53': {'metric': 16777214, 'interface': {'Gi0/1/4': {'next_hop': 'R3-asr1k-53', 'snpa': 'c014.fe84.b306'}}}, 'R5-asr1k-11': {}, 'R6-asr1k-20': {'metric': 16777214, 'interface': {'Gi0/0/2': {'next_hop': 'R6-asr1k-20', 'snpa': '3c57.31c1.fb32'}, 'Gi0/0/3': {'next_hop': 'R6-asr1k-20', 'snpa': '3c57.31c1.fb33'}}}}, 'flex_algo': 129}, 2: {'hosts': {'R1-asr1k-43': {'metric': 33554428, 'interface': {'Gi0/1/4': {'next_hop': 'R3-asr1k-53', 'snpa': 'c014.fe84.b306'}}}, 'R3-asr1k-53': {'metric': 16777214, 'interface': {'Gi0/1/4': {'next_hop': 'R3-asr1k-53', 'snpa': 'c014.fe84.b306'}}}, 'R5-asr1k-11': {}, 'R6-asr1k-20': {'metric': 16777214, 'interface': {'Gi0/0/2': {'next_hop': 'R6-asr1k-20', 'snpa': '3c57.31c1.fb32'}, 'Gi0/0/3': {'next_hop': 'R6-asr1k-20', 'snpa': '3c57.31c1.fb33'}}}}, 'flex_algo': 129}}}}} |
type_of_fire = input().split("#")
water_amount = int(input())
effort = 0
total_fire = 0
print(f"Cells:")
for i in type_of_fire:
cell = i.split(" ")
if water_amount <= 0:
break
if 0 < int(cell[2]) <= 50 and cell[0] == "Low":
if int(cell[2]) > water_amount:
continue
else:
water_amount -= int(cell[2])
effort += (int(cell[2]) / 4)
total_fire += int(cell[2])
print(f" - {cell[2]}")
elif 50 < int(cell[2]) <= 80 and cell[0] == "Medium":
if int(cell[2]) > water_amount:
continue
else:
water_amount -= int(cell[2])
effort += (int(cell[2]) / 4)
total_fire += int(cell[2])
print(f" - {cell[2]}")
elif 80 < int(cell[2]) <= 125 and cell[0] == "High":
if int(cell[2]) > water_amount:
continue
else:
water_amount -= int(cell[2])
effort += (int(cell[2]) / 4)
total_fire += int(cell[2])
print(f" - {cell[2]}")
print(f"Effort: {effort:.2f}\nTotal Fire: {total_fire}") | type_of_fire = input().split('#')
water_amount = int(input())
effort = 0
total_fire = 0
print(f'Cells:')
for i in type_of_fire:
cell = i.split(' ')
if water_amount <= 0:
break
if 0 < int(cell[2]) <= 50 and cell[0] == 'Low':
if int(cell[2]) > water_amount:
continue
else:
water_amount -= int(cell[2])
effort += int(cell[2]) / 4
total_fire += int(cell[2])
print(f' - {cell[2]}')
elif 50 < int(cell[2]) <= 80 and cell[0] == 'Medium':
if int(cell[2]) > water_amount:
continue
else:
water_amount -= int(cell[2])
effort += int(cell[2]) / 4
total_fire += int(cell[2])
print(f' - {cell[2]}')
elif 80 < int(cell[2]) <= 125 and cell[0] == 'High':
if int(cell[2]) > water_amount:
continue
else:
water_amount -= int(cell[2])
effort += int(cell[2]) / 4
total_fire += int(cell[2])
print(f' - {cell[2]}')
print(f'Effort: {effort:.2f}\nTotal Fire: {total_fire}') |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
def person(name, age, **kwargs):
print(name, age, kwargs)
def product(*args):
r = 1
for v in args:
r *= v
return r
if __name__ == '__main__':
extra = {'city': 'Beijing', 'job': 'Engineer'}
person('Jack', 24, city=extra['city'], job=extra['job'])
print('product(1, 2, 3):', product(1, 2, 3))
| def person(name, age, **kwargs):
print(name, age, kwargs)
def product(*args):
r = 1
for v in args:
r *= v
return r
if __name__ == '__main__':
extra = {'city': 'Beijing', 'job': 'Engineer'}
person('Jack', 24, city=extra['city'], job=extra['job'])
print('product(1, 2, 3):', product(1, 2, 3)) |
#!/usr/bin/python
# Filename: ex_celsisu_to_fahrenheit_v1.py
celsius = 20
fahrenheit = (celsius * 9 / 5) + 32
print(fahrenheit)
| celsius = 20
fahrenheit = celsius * 9 / 5 + 32
print(fahrenheit) |
TRANSACTION_LEARN = 'learn'
TRANSACTION_PREDICT = 'predict'
TRANSACTION_NORMAL_SELECT = 'normal_select'
TRANSACTION_NORMAL_MODIFY = 'normal_modify'
TRANSACTION_BAD_QUERY = 'bad_query'
TRANSACTION_DROP_MODEL ='drop_model'
STOP_TRAINING = 'stop_training'
KILL_TRAINING = 'kill_training'
KEY_NO_GROUP_BY = 'ALL_ROWS_NO_GROUP_BY'
class DATA_SUBTYPES:
# Numeric
INT = 'Int'
FLOAT = 'Float'
BINARY = 'Binary' # Should we have this ?
# DATETIME
DATE = 'Date' # YYYY-MM-DD
TIMESTAMP = 'Timestamp' # YYYY-MM-DD hh:mm:ss or 1852362464
# CATEGORICAL
SINGLE = 'Binary Category'
MULTIPLE = 'Category'
# FILE_PATH
IMAGE = 'Image'
VIDEO = 'Video'
AUDIO = 'Audio'
# URL
# How do we detect the tpye here... maybe setup async download for random sample an stats ?
# SEQUENTIAL
TEXT = 'Text'
ARRAY = 'Array' # Do we even want to support arrays / structs / nested ... etc ?
class DATA_TYPES:
NUMERIC = 'Numeric'
DATE = 'Date'
CATEGORICAL = 'Categorical'
FILE_PATH = 'File Path'
URL = 'Url'
SEQUENTIAL = 'Sequential'
class DATA_TYPES_SUBTYPES:
subtypes = {
DATA_TYPES.NUMERIC: (DATA_SUBTYPES.INT, DATA_SUBTYPES.FLOAT, DATA_SUBTYPES.BINARY)
,DATA_TYPES.DATE:(DATA_SUBTYPES.DATE, DATA_SUBTYPES.TIMESTAMP)
,DATA_TYPES.CATEGORICAL:(DATA_SUBTYPES.SINGLE, DATA_SUBTYPES.MULTIPLE)
,DATA_TYPES.FILE_PATH:(DATA_SUBTYPES.IMAGE, DATA_SUBTYPES.VIDEO, DATA_SUBTYPES.AUDIO)
,DATA_TYPES.URL:()
,DATA_TYPES.SEQUENTIAL:(DATA_SUBTYPES.TEXT, DATA_SUBTYPES.ARRAY)
}
class ORDER_BY_KEYS:
COLUMN = 0
ASCENDING_VALUE = 1
PHASE_DATA_EXTRACTOR = 1
PHASE_STATS_GENERATOR = 2
PHASE_MODEL_INTERFACE = 3
PHASE_MODEL_ANALYZER = 4
MODEL_STATUS_TRAINED = "Trained"
MODEL_STATUS_PREPARING = "Preparing"
MODEL_STATUS_DATA_ANALYSIS = "Data Analysis"
MODEL_STATUS_TRAINING= "Training"
MODEL_STATUS_ANALYZING = "Analyzing"
MODEL_STATUS_ERROR = "Error"
WORD_SEPARATORS = [',', "\t", ' ']
DEBUG_LOG_LEVEL = 10
INFO_LOG_LEVEL = 20
WARNING_LOG_LEVEL = 30
ERROR_LOG_LEVEL = 40
NO_LOGS_LOG_LEVEL = 50
| transaction_learn = 'learn'
transaction_predict = 'predict'
transaction_normal_select = 'normal_select'
transaction_normal_modify = 'normal_modify'
transaction_bad_query = 'bad_query'
transaction_drop_model = 'drop_model'
stop_training = 'stop_training'
kill_training = 'kill_training'
key_no_group_by = 'ALL_ROWS_NO_GROUP_BY'
class Data_Subtypes:
int = 'Int'
float = 'Float'
binary = 'Binary'
date = 'Date'
timestamp = 'Timestamp'
single = 'Binary Category'
multiple = 'Category'
image = 'Image'
video = 'Video'
audio = 'Audio'
text = 'Text'
array = 'Array'
class Data_Types:
numeric = 'Numeric'
date = 'Date'
categorical = 'Categorical'
file_path = 'File Path'
url = 'Url'
sequential = 'Sequential'
class Data_Types_Subtypes:
subtypes = {DATA_TYPES.NUMERIC: (DATA_SUBTYPES.INT, DATA_SUBTYPES.FLOAT, DATA_SUBTYPES.BINARY), DATA_TYPES.DATE: (DATA_SUBTYPES.DATE, DATA_SUBTYPES.TIMESTAMP), DATA_TYPES.CATEGORICAL: (DATA_SUBTYPES.SINGLE, DATA_SUBTYPES.MULTIPLE), DATA_TYPES.FILE_PATH: (DATA_SUBTYPES.IMAGE, DATA_SUBTYPES.VIDEO, DATA_SUBTYPES.AUDIO), DATA_TYPES.URL: (), DATA_TYPES.SEQUENTIAL: (DATA_SUBTYPES.TEXT, DATA_SUBTYPES.ARRAY)}
class Order_By_Keys:
column = 0
ascending_value = 1
phase_data_extractor = 1
phase_stats_generator = 2
phase_model_interface = 3
phase_model_analyzer = 4
model_status_trained = 'Trained'
model_status_preparing = 'Preparing'
model_status_data_analysis = 'Data Analysis'
model_status_training = 'Training'
model_status_analyzing = 'Analyzing'
model_status_error = 'Error'
word_separators = [',', '\t', ' ']
debug_log_level = 10
info_log_level = 20
warning_log_level = 30
error_log_level = 40
no_logs_log_level = 50 |
choices = ["rock", "paper", "scissors"]
player_lives = 3
computer_lives = 3
total_lives = 3
player = False; | choices = ['rock', 'paper', 'scissors']
player_lives = 3
computer_lives = 3
total_lives = 3
player = False |
path = os.getcwd()
csv_files = []
for i in csv_folders:
csv_files.append(glob.glob(os.path.join(f'{i}', "*.csv")))
#speed (max, min, avg, median), distance travelled (max, min, avg, median), time travelled per trip (max, min, avg, median)
def data(file):
for i in range(len(file)):
d = [dist(i)[0] for i in drive(file[i])]
sp = [dist(i)[1] for i in drive(file[i])]
t = [dist(i)[2] for i in drive(file[i])]
details = [str(i), max(sp), min(sp), mean(sp), median(sp), max(d), min(d), mean(d), median(d), sum(d), max(t), min(t), mean(t), median(t), sum(t)]
df.loc[len(df)] = details
return df
def data_per_driver(file):
distance = []
for i in range(1, len(file)):
distance.append(((file.x[i]-file.x[i-1])**2 + (file.y[i]-file.y[i-1])**2)**0.5)
return [sum(distance)/1000, sum(distance)*18/(len(file)*5), len(file)/3600]
#df = data(csv_files)
#new_csv = csv_files
portion1 = new_csv[0:50]
new_csv = new_csv[50:]
print(len(new_csv), len(csv_files))
res = pd.DataFrame(columns = ['turno', 'tripDuration', 'tripDistance', 'activeDriving', 'stopDuration', 'stopNo', 'speed', 'jerkCount', 'maxAngVel', 'maxAcc',
'maxJerk', 'medianSpeed', 'medianAcc', 'avgAngleChange', 'meanSpeed', 'meanAcc', 'urban', 'highway', 'speedingUrban',
'speedingHighway'])
res = pd.read_csv('final.csv')
del res['Unnamed: 0']
p1 = join(portion4, 50)
p2 = join(portion5, 36)
#p3 = join(portion3, 50)
res = pd.concat([res, p1, p2], axis=0)
| path = os.getcwd()
csv_files = []
for i in csv_folders:
csv_files.append(glob.glob(os.path.join(f'{i}', '*.csv')))
def data(file):
for i in range(len(file)):
d = [dist(i)[0] for i in drive(file[i])]
sp = [dist(i)[1] for i in drive(file[i])]
t = [dist(i)[2] for i in drive(file[i])]
details = [str(i), max(sp), min(sp), mean(sp), median(sp), max(d), min(d), mean(d), median(d), sum(d), max(t), min(t), mean(t), median(t), sum(t)]
df.loc[len(df)] = details
return df
def data_per_driver(file):
distance = []
for i in range(1, len(file)):
distance.append(((file.x[i] - file.x[i - 1]) ** 2 + (file.y[i] - file.y[i - 1]) ** 2) ** 0.5)
return [sum(distance) / 1000, sum(distance) * 18 / (len(file) * 5), len(file) / 3600]
portion1 = new_csv[0:50]
new_csv = new_csv[50:]
print(len(new_csv), len(csv_files))
res = pd.DataFrame(columns=['turno', 'tripDuration', 'tripDistance', 'activeDriving', 'stopDuration', 'stopNo', 'speed', 'jerkCount', 'maxAngVel', 'maxAcc', 'maxJerk', 'medianSpeed', 'medianAcc', 'avgAngleChange', 'meanSpeed', 'meanAcc', 'urban', 'highway', 'speedingUrban', 'speedingHighway'])
res = pd.read_csv('final.csv')
del res['Unnamed: 0']
p1 = join(portion4, 50)
p2 = join(portion5, 36)
res = pd.concat([res, p1, p2], axis=0) |
__name__ = "__main__"
def bar():
print("bar")
print("before __name__ guard")
if __name__ == "__main__":
bar()
print("after __name__ guard") | __name__ = '__main__'
def bar():
print('bar')
print('before __name__ guard')
if __name__ == '__main__':
bar()
print('after __name__ guard') |
__all__ = ["user"]
for _import in __all__:
__import__(__package__ + "." + _import)
| __all__ = ['user']
for _import in __all__:
__import__(__package__ + '.' + _import) |
class Solution:
def removeDuplicates(self, nums: List[int]) -> int:
count = 1
while count < len(nums):
if nums[count] == nums[count - 1]:
nums.remove(nums[count])
else:
count += 1
return len(nums)
| class Solution:
def remove_duplicates(self, nums: List[int]) -> int:
count = 1
while count < len(nums):
if nums[count] == nums[count - 1]:
nums.remove(nums[count])
else:
count += 1
return len(nums) |
def hello(name='World'):
if not isinstance(name, str):
raise Exception("ValueError: name should be of type str")
return 'Hello, {}!'.format(name) | def hello(name='World'):
if not isinstance(name, str):
raise exception('ValueError: name should be of type str')
return 'Hello, {}!'.format(name) |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2014, Mingze
# Author: Mingze (mxu@microstrategy.com)
class cmd:
ATT_ErrorRsp = 'ATT_ErrorRsp',
ATT_ExchangeMTUReq = 'ATT_ExchangeMTUReq',
ATT_ExchangeMTURsp = 'ATT_ExchangeMTURsp',
ATT_FindInfoReq = 'ATT_FindInfoReq',
ATT_FindInfoRsp = 'ATT_FindInfoRsp',
ATT_FindByTypeValueReq = 'ATT_FindByTypeValueReq',
ATT_FindByTypeValueRsp = 'ATT_FindByTypeValueRsp',
ATT_FindByTypeReq = 'ATT_FindByTypeReq',
ATT_FindByTypeRsp = 'ATT_FindByTypeRsp',
ATT_ReadReq = 'ATT_ReadReq',
ATT_ReadRsp = 'ATT_ReadRsp',
ATT_ReadBlobReq = 'ATT_ReadBlobReq',
ATT_ReadBlobRsp = 'ATT_ReadBlobRsp',
ATT_ReadByGrpTypeReq = 'ATT_ReadByGrpTypeReq',
ATT_ReadByGrpTypeRsp = 'ATT_ReadByGrpTypeRsp',
ATT_WriteReq = 'ATT_WriteReq',
ATT_WriteRsp = 'ATT_WriteRsp',
ATT_PrepareWriteReq = 'ATT_PrepareWriteReq',
ATT_PrepareWriteRsp = 'ATT_PrepareWriteRsp',
ATT_ExecuteWriteReq = 'ATT_ExecuteWriteReq',
ATT_ExecuteWriteRsp = 'ATT_ExecuteWriteRsp',
ATT_HandleValueNotification = 'ATT_HandleValueNotification',
ATT_HandleValueIndication = 'ATT_HandleValueIndication',
ATT_HandleValueConfirmation = 'ATT_HandleValueConfirmation',
GATT_DiscPrimaryServiceByUUID = 'GATT_DiscPrimaryServiceByUUID',
GATT_DiscCharsByUUID = 'GATT_DiscCharsByUUID',
GATT_ReadCharValue = 'GATT_ReadCharValue',
GATT_WriteCharValue = 'GATT_WriteCharValue',
GATT_ReadMultipleCharValues = 'GATT_ReadMultipleCharValues',
GATT_ReadMultipleCharValues = 'GATT_WriteCharValue',
GATT_WriteLongCharValue = 'GATT_WriteLongCharValue',
GATT_DiscAllChars = 'GATT_DiscAllChars',
GATT_ReadUsingCharUUID = 'GATT_ReadUsingCharUUID',
GATT_AddService = 'GATT_AddService',
GATT_DelService = 'GATT_DelService',
GATT_AddAttribute = 'GATT_AddAttribute',
GAP_DeviceInit = 'GAP_DeviceInit',
GAP_ConfigureDeviceAddr = 'GAP_ConfigureDeviceAddr',
GATT_DeviceDiscoveryRequest = 'GATT_DeviceDiscoveryRequest',
GATT_DeviceDiscoveryCancel = 'GATT_DeviceDiscoveryCancel',
GAP_MakeDiscoverable = 'GAP_MakeDiscoverable',
GAP_UpdateAdvertisingData = 'GAP_UpdateAdvertisingData',
GAP_EndDiscoverable = 'GAP_EndDiscoverable',
GAP_EstablishLinkRequest = 'GAP_EstablishLinkRequest',
GAP_TerminateLinkRequest = 'GAP_TerminateLinkRequest',
GAP_UpdateLinkParamReq = 'GAP_UpdateLinkParamReq',
GAP_SetParam = 'GAP_SetParam',
GAP_GetParam = 'GAP_GetParam',
HTIL_Reset = 'HTIL_Reset'
class event:
HCI_LE_ExtEvent = 'HCI_LE_ExtEvent'
ATT_ErrorRsp = 'ATT_ErrorRsp',
ATT_ExchangeMTUReq = 'ATT_ExchangeMTUReq',
ATT_ExchangeMTURsp = 'ATT_ExchangeMTURsp',
ATT_FindInfoReq = 'ATT_FindInfoReq',
ATT_FindInfoRsp = 'ATT_FindInfoRsp',
ATT_FindByTypeValueReq = 'ATT_FindByTypeValueReq',
ATT_FindByTypeValueRsp = 'ATT_FindByTypeValueRsp',
ATT_ReadByTypeReq = 'ATT_ReadByTypeReq',
ATT_ReadByTypeRsp = 'ATT_ReadByTypeRsp',
ATT_ReadReq = 'ATT_ReadReq',
ATT_ReadRsp = 'ATT_ReadRsp',
ATT_ReadBlobReq = 'ATT_ReadBlobReq',
ATT_ReadBlobRsp = 'ATT_ReadBlobRsp',
ATT_ReadMultiReq = 'ATT_ReadMultiReq',
ATT_ReadMultiRsp = 'ATT_ReadMultiRsp',
ATT_ReadByGrpTypeReq = 'ATT_ReadByGrpTypeReq',
ATT_ReadByGrpTypeRsp = 'ATT_ReadByGrpTypeRsp',
ATT_WriteReq = 'ATT_WriteReq',
ATT_WriteRsp = 'ATT_WriteRsp',
ATT_PrepareWriteReq = 'ATT_PrepareWriteReq',
ATT_PrepareWriteRsp = 'ATT_PrepareWriteRsp',
ATT_ExecuteWriteReq = 'ATT_ExecuteWriteReq',
ATT_ExecuteWriteRsp = 'ATT_ExecuteWriteRsp',
ATT_HandleValueNotification = 'ATT_HandleValueNotification',
ATT_HandleValueIndication = 'ATT_HandleValueIndication',
ATT_HandleValueConfirmation = 'ATT_HandleValueConfirmation',
GATT_ClientCharCfgUpdated = 'GATT_ClientCharCfgUpdated',
GATT_DiscCharsByUUID = 'GATT_DiscCharsByUUID',
GAP_DeviceInitDone = 'GAP_DeviceInitDone',
GAP_DeviceDiscoveryDone = 'GAP_DeviceDiscoveryDone',
GAP_AdvertDataUpdateDone = 'GAP_AdvertDataUpdateDone',
GAP_MakeDiscoverableDone = 'GAP_MakeDiscoverableDone',
GAP_EndDiscoverableDone = 'GAP_EndDiscoverableDone',
GAP_LinkEstablished = 'GAP_LinkEstablished',
GAP_LinkTerminated = 'GAP_LinkTerminated',
GAP_LinkParamUpdate = 'GAP_LinkParamUpdate',
GAP_DeviceInformation = 'GAP_DeviceInformation',
GAP_HCI_ExtensionCommandStatus = 'GAP_HCI_ExtensionCommandStatus',
opcodes = {
"fd01":cmd.ATT_ErrorRsp,
"fd02":cmd.ATT_ExchangeMTUReq,
"fd03":cmd.ATT_ExchangeMTURsp,
"fd04":cmd.ATT_FindInfoReq,
"fd05":cmd.ATT_FindInfoRsp,
"fd06":cmd.ATT_FindByTypeValueReq,
"fd07":cmd.ATT_FindByTypeValueRsp,
"fd08":cmd.ATT_FindByTypeReq,
"fd09":cmd.ATT_FindByTypeRsp,
"fd0a":cmd.ATT_ReadReq,
"fd0b":cmd.ATT_ReadRsp,
"fd0c":cmd.ATT_ReadBlobReq,
"fd0d":cmd.ATT_ReadBlobRsp,
"fd10":cmd.ATT_ReadByGrpTypeReq,
"fd11":cmd.ATT_ReadByGrpTypeRsp,
"fd12":cmd.ATT_WriteReq,
"fd13":cmd.ATT_WriteRsp,
"fd16":cmd.ATT_PrepareWriteReq,
"fd17":cmd.ATT_PrepareWriteRsp,
"fd18":cmd.ATT_ExecuteWriteReq,
"fd19":cmd.ATT_ExecuteWriteRsp,
"fd1b":cmd.ATT_HandleValueNotification,
"fd1d":cmd.ATT_HandleValueIndication,
"fd1e":cmd.ATT_HandleValueConfirmation,
"fd86":cmd.GATT_DiscPrimaryServiceByUUID,
"fd88":cmd.GATT_DiscCharsByUUID,
"fd8a":cmd.GATT_ReadCharValue,
"fd8e":cmd.GATT_ReadMultipleCharValues,
"fd92":cmd.GATT_WriteCharValue,
"fd96":cmd.GATT_WriteLongCharValue,
"fdb2":cmd.GATT_DiscAllChars,
"fdb4":cmd.GATT_ReadUsingCharUUID,
"fdfc":cmd.GATT_AddService,
"fdfd":cmd.GATT_DelService,
"fdfe":cmd.GATT_AddAttribute,
"fe00":cmd.GAP_DeviceInit,
"fe03":cmd.GAP_ConfigureDeviceAddr,
"fe04":cmd.GATT_DeviceDiscoveryRequest,
"fe05":cmd.GATT_DeviceDiscoveryCancel,
"fe06":cmd.GAP_MakeDiscoverable,
"fe07":cmd.GAP_UpdateAdvertisingData,
"fe08":cmd.GAP_EndDiscoverable,
"fe09":cmd.GAP_EstablishLinkRequest,
"fe0a":cmd.GAP_TerminateLinkRequest,
"fe11":cmd.GAP_UpdateLinkParamReq,
"fe30":cmd.GAP_SetParam,
"fe31":cmd.GAP_GetParam,
"fe80":cmd.HTIL_Reset,
}
hci_cmds = {
"fd01":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'},
{'name':'req_opcode', 'len':1, 'default':'\x00'},
{'name':'handle', 'len':2, 'default':'\x00\x00'},
{'name':'error_code', 'len':1, 'default':'\x00'}],
"fd02":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'},
{'name':'client_rx_mtu','len':2, 'default':'\x00\x87'}],
"fd03":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'},
{'name':'server_rx_mtu','len':2, 'default':'\x00\x87'}],
"fd04":
[{'name':'conn_handle', 'len':2, 'default':'\xff\xfe'},
{'name':'start_handle','len':2, 'default':'\x00\x01'},
{'name':'end_handle', 'len':2, 'default':'\xff\xff'}],
"fd09":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'},
{'name':'data_length', 'len':1, 'default':None},
{'name':'value', 'len':None, 'default':None}],
"fd0c":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'},
{'name':'handle', 'len':2, 'default':'\x00\x00'},
{'name':'offset', 'len':2, 'default':'\x00\x00'}],
"fd0d":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'},
{'name':'value', 'len':None, 'default':None}],
"fd10":
[{'name':'conn_handle', 'len':2, 'default':'\xff\xfe'},
{'name':'start_handle','len':2, 'default':'\x00\x01'},
{'name':'end_handle', 'len':2, 'default':'\xff\xff'},
{'name':'group_type', 'len':None, 'default':'\x00\x28'}], #by default it's service
"fd11":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'},
{'name':'data_length', 'len':1, 'default':None},
{'name':'value', 'len':None, 'default':'\x00\x00'}],
"fd13":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'}],
"fd1b":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'},
{'name':'authenticated', 'len':1, 'default':'\x00'},
{'name':'handle', 'len':2, 'default':'\xfe\xff'},
{'name':'value', 'len':None, 'default':None}],
"fd1d":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'},
{'name':'authenticated', 'len':1, 'default':'\x00'},
{'name':'handle', 'len':2, 'default':'\xfe\xff'},
{'name':'value', 'len':None, 'default':None}],
"fd1e":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'}],
"fd86":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'},
{'name':'value', 'len':None, 'default':None}],
"fd88":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'},
{'name':'start_handle','len':2, 'default':'\x01\x00'},
{'name':'end_handle', 'len':2, 'default':'\xfe\xff'},
{'name':'type', 'len':None, 'default':None}],
"fd8a":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'},
{'name':'handle', 'len':2, 'default':None}],
"fd8c":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'},
{'name':'handle', 'len':2, 'default':'\x00\x00'},
{'name':'offset', 'len':2, 'default':'\x00\x00'},
{'name':'type', 'len':1, 'default':'\x00'}],
"fd8e":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'},
{'name':'handles', 'len':None, 'default':None}],
"fd92":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'},
{'name':'handle', 'len':2, 'default':None},
{'name':'value', 'len':None, 'default':None}],
"fd96":
[{'name':'handle', 'len':2, 'default':'\x00\x00'},
{'name':'offset', 'len':1, 'default':None},
{'name':'value', 'len':None, 'default':None}],
"fdb2":
[{'name':'start_handle','len':2, 'default':'\x00\x00'},
{'name':'end_handle', 'len':2, 'default':'\xff\xff'}],
"fdb4":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'},
{'name':'start_handle','len':2, 'default':'\x01\x00'},
{'name':'end_handle', 'len':2, 'default':'\xff\xff'},
{'name':'read_type', 'len':2, 'default':None}],
"fdfc":
[{'name':'uuid', 'len':2, 'default':'\x28\x00'},
{'name':'numAttrs', 'len':2, 'default':'\x00\x01'}],
"fdfd":
[{'name':'handle', 'len':2, 'default':'\x00\x01'}],
"fdfe":
[{'name':'uuid', 'len':None, 'default':'\x00\0x00'},
{'name':'permissions', 'len':1, 'default':'\x03'}],
"fe00":
[{'name':'profile_role','len':1, 'default':'\x08'},
{'name':'max_scan_rsps','len':1, 'default':'\xa0'},
{'name':'irk', 'len':16, 'default':'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'},
{'name':'csrk', 'len':16, 'default':'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'},
{'name':'sign_counter','len':4, 'default':'\x01\x00\x00\x00'}],
"fe03":
[{'name':'addr_type', 'len':1, 'default':None},
{'name':'addr', 'len':6, 'default':None}],
"fe04":
[{'name':'mode', 'len':1, 'default':None},
{'name':'active_scan', 'len':1, 'default':'\x01'},
{'name':'white_list', 'len':1, 'default':'\x00'}],
"fe05":
[],
"fe06":
[{'name':'event_type', 'len':1, 'default':'\x00'},
{'name':'init_addr_type', 'len':1, 'default':'\x00'},
{'name':'init_addrs', 'len':6, 'default':'\x00\x00\x00\x00\x00\x00'},
{'name':'channel_map', 'len':1, 'default':'\x07'},
{'name':'filter_policy','len':1, 'default':'\x00'}],
"fe07":
[{'name':'ad_type', 'len':1, 'default':'\x01'},
{'name':'data_length', 'len':1, 'default':None},
{'name':'advert_data', 'len':None, 'default':'\x02\x01\x07'}],
"fe08":
[],
"fe09":
[{'name':'high_duty_cycle','len':1, 'default':'\x00'},
{'name':'white_list', 'len':1, 'default':'\x00'},
{'name':'addr_type_peer','len':1, 'default':'\x00'},
{'name':'peer_addr', 'len':6, 'default':None}],
"fe0a":
[{'name':'conn_handle', 'len':2, 'default':'\x00\x00'},
{'name':'disconnect_reason', 'len':1, 'default':'\x13'}],
"fe30":
[{'name':'param_id', 'len':1, 'default':None},
{'name':'param_value', 'len':2, 'default':None}],
"fe31":
[{'name':'param_id', 'len':1, 'default':None}],
"fe80":
[{'name':'reset_type', 'len':1, 'default':'\x01'}],
"0c03":
[],
}
hci_events = {"ff":
{'name':event.HCI_LE_ExtEvent,
'structure':
[{'name':'ext_event', 'len':None}]},
}
ext_events= {"0501":
{'name':event.ATT_ErrorRsp,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1},
{'name':'req_op_code', 'len':1},
{'name':'handle', 'len':2},
{'name':'error_code', 'len':1}]},
"0502":
{'name':event.ATT_ExchangeMTUReq,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1},
{'name':'client_rx_mtu','len':2}]},
"0503":
{'name':event.ATT_ExchangeMTURsp,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1},
{'name':'server_rx_mtu','len':2}]},
"0504":
{'name':event.ATT_FindInfoReq,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1},
{'name':'start_handle','len':2},
{'name':'end_handle', 'len':2}]},
"0505":
{'name':event.ATT_FindInfoRsp,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1},
{'name':'format', 'len':1},
{'name':'results', 'len':None}],
'parsing':
[('results', lambda ble, original:
ble._parse_find_info_results(original['results'], original['format']))]},
"0506":
{'name':event.ATT_FindByTypeValueReq,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1},
{'name':'start_handle','len':2},
{'name':'end_handle', 'len':2}]},
"0507":
{'name':event.ATT_FindByTypeValueRsp,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1},
{'name':'start_handle','len':2},
{'name':'end_handle', 'len':2},
{'name':'value', 'len':None}]},
"0508":
{'name':event.ATT_ReadByTypeReq,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1},
{'name':'start_handle','len':2},
{'name':'end_handle', 'len':2},
{'name':'type', 'len':None}]},
"0509":
{'name':event.ATT_ReadByTypeRsp,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1},
{'name':'length', 'len':1},
{'name':'handle', 'len':2},
{'name':'value', 'len':None}]},
"050b":
{'name':event.ATT_ReadRsp,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1},
{'name':'value', 'len':None}]},
"050c":
{'name':event.ATT_ReadBlobReq,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1},
{'name':'handle', 'len':2},
{'name':'offset', 'len':2}]},
"050d":
{'name':event.ATT_ReadBlobRsp,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1},
{'name':'value', 'len':None}]},
"050f":
{'name':event.ATT_ReadMultiRsp,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1},
{'name':'results', 'len':None}]},
"0510":
{'name':event.ATT_ReadByGrpTypeReq,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1},
{'name':'start_handle','len':2},
{'name':'end_handle', 'len':2},
{'name':'group_type', 'len':2}]},
"0511":
{'name':event.ATT_ReadByGrpTypeRsp,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1}]},
"0512":
{'name':event.ATT_WriteReq,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1},
{'name':'signature', 'len':1},
{'name':'command', 'len':1},
{'name':'handle', 'len':2},
{'name':'value', 'len':None}]},
"0513":
{'name':event.ATT_WriteRsp,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1}]},
"051b":
{'name':event.ATT_HandleValueNotification,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1},
{'name':'handle', 'len':2},
{'name':'values', 'len':None}]},
"051d":
{'name':event.ATT_HandleValueIndication,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1},
{'name':'handle', 'len':2},
{'name':'values', 'len':None}]},
"051e":
{'name':event.ATT_HandleValueConfirmation,
'structure':
[{'name':'conn_handle', 'len':2}]},
"0580":
{'name':event.GATT_ClientCharCfgUpdated,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'pdu_len', 'len':1},
{'name':'attr_handle', 'len':2},
{'name':'value', 'len':1}]},
"0600":
{'name':event.GAP_DeviceInitDone,
'structure':
[{'name':'dev_addr', 'len':6},
{'name':'data_pkt_len','len':2},
{'name':'num_data_pkts','len':1},
{'name':'irk', 'len':16},
{'name':'csrk', 'len':16}]},
"0601":
{'name':event.GAP_DeviceDiscoveryDone,
'structure':
[{'name':'num_devs', 'len':1},
{'name':'devices', 'len':None}],
'parsing':
[('devices', lambda ble, original:
ble._parse_devices(original['devices']))]},
"0602":
{'name':event.GAP_AdvertDataUpdateDone,
'structure':
[{'name':'ad_type', 'len':1}]},
"0603":
{'name':event.GAP_MakeDiscoverableDone,
'structure':
[]},
"0604":
{'name':event.GAP_EndDiscoverableDone,
'structure':
[]},
"0605":
{'name':event.GAP_LinkEstablished,
'structure':
[{'name':'dev_addr_type','len':1},
{'name':'dev_addr', 'len':6},
{'name':'conn_handle', 'len':2},
{'name':'conn_interval','len':2},
{'name':'conn_latency','len':2},
{'name':'conn_timeout','len':2},
{'name':'clock_accuracy','len':1}]},
"0606":
{'name':event.GAP_LinkTerminated,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'value', 'len':None}]},
"0607":
{'name':event.GAP_LinkParamUpdate,
'structure':
[{'name':'conn_handle', 'len':2},
{'name':'conn_interval', 'len':2},
{'name':'conn_latency', 'len':2},
{'name':'conn_timeout', 'len':2}]},
"060d":
{'name':event.GAP_DeviceInformation,
'structure':
[{'name':'event_type', 'len':1},
{'name':'addr_type', 'len':1},
{'name':'addr', 'len':6},
{'name':'rssi', 'len':1},
{'name':'data_len', 'len':1},
{'name':'data_field', 'len':None}]},
"067f":
{'name':event.GAP_HCI_ExtensionCommandStatus,
'structure':
[{'name':'op_code', 'len':2},
{'name':'data_len', 'len':1},
{'name':'param_value', 'len':None}],
'parsing':
[('op_code', lambda ble, original:
ble._parse_opcodes(original['op_code']))]},
} | class Cmd:
att__error_rsp = ('ATT_ErrorRsp',)
att__exchange_mtu_req = ('ATT_ExchangeMTUReq',)
att__exchange_mtu_rsp = ('ATT_ExchangeMTURsp',)
att__find_info_req = ('ATT_FindInfoReq',)
att__find_info_rsp = ('ATT_FindInfoRsp',)
att__find_by_type_value_req = ('ATT_FindByTypeValueReq',)
att__find_by_type_value_rsp = ('ATT_FindByTypeValueRsp',)
att__find_by_type_req = ('ATT_FindByTypeReq',)
att__find_by_type_rsp = ('ATT_FindByTypeRsp',)
att__read_req = ('ATT_ReadReq',)
att__read_rsp = ('ATT_ReadRsp',)
att__read_blob_req = ('ATT_ReadBlobReq',)
att__read_blob_rsp = ('ATT_ReadBlobRsp',)
att__read_by_grp_type_req = ('ATT_ReadByGrpTypeReq',)
att__read_by_grp_type_rsp = ('ATT_ReadByGrpTypeRsp',)
att__write_req = ('ATT_WriteReq',)
att__write_rsp = ('ATT_WriteRsp',)
att__prepare_write_req = ('ATT_PrepareWriteReq',)
att__prepare_write_rsp = ('ATT_PrepareWriteRsp',)
att__execute_write_req = ('ATT_ExecuteWriteReq',)
att__execute_write_rsp = ('ATT_ExecuteWriteRsp',)
att__handle_value_notification = ('ATT_HandleValueNotification',)
att__handle_value_indication = ('ATT_HandleValueIndication',)
att__handle_value_confirmation = ('ATT_HandleValueConfirmation',)
gatt__disc_primary_service_by_uuid = ('GATT_DiscPrimaryServiceByUUID',)
gatt__disc_chars_by_uuid = ('GATT_DiscCharsByUUID',)
gatt__read_char_value = ('GATT_ReadCharValue',)
gatt__write_char_value = ('GATT_WriteCharValue',)
gatt__read_multiple_char_values = ('GATT_ReadMultipleCharValues',)
gatt__read_multiple_char_values = ('GATT_WriteCharValue',)
gatt__write_long_char_value = ('GATT_WriteLongCharValue',)
gatt__disc_all_chars = ('GATT_DiscAllChars',)
gatt__read_using_char_uuid = ('GATT_ReadUsingCharUUID',)
gatt__add_service = ('GATT_AddService',)
gatt__del_service = ('GATT_DelService',)
gatt__add_attribute = ('GATT_AddAttribute',)
gap__device_init = ('GAP_DeviceInit',)
gap__configure_device_addr = ('GAP_ConfigureDeviceAddr',)
gatt__device_discovery_request = ('GATT_DeviceDiscoveryRequest',)
gatt__device_discovery_cancel = ('GATT_DeviceDiscoveryCancel',)
gap__make_discoverable = ('GAP_MakeDiscoverable',)
gap__update_advertising_data = ('GAP_UpdateAdvertisingData',)
gap__end_discoverable = ('GAP_EndDiscoverable',)
gap__establish_link_request = ('GAP_EstablishLinkRequest',)
gap__terminate_link_request = ('GAP_TerminateLinkRequest',)
gap__update_link_param_req = ('GAP_UpdateLinkParamReq',)
gap__set_param = ('GAP_SetParam',)
gap__get_param = ('GAP_GetParam',)
htil__reset = 'HTIL_Reset'
class Event:
hci_le__ext_event = 'HCI_LE_ExtEvent'
att__error_rsp = ('ATT_ErrorRsp',)
att__exchange_mtu_req = ('ATT_ExchangeMTUReq',)
att__exchange_mtu_rsp = ('ATT_ExchangeMTURsp',)
att__find_info_req = ('ATT_FindInfoReq',)
att__find_info_rsp = ('ATT_FindInfoRsp',)
att__find_by_type_value_req = ('ATT_FindByTypeValueReq',)
att__find_by_type_value_rsp = ('ATT_FindByTypeValueRsp',)
att__read_by_type_req = ('ATT_ReadByTypeReq',)
att__read_by_type_rsp = ('ATT_ReadByTypeRsp',)
att__read_req = ('ATT_ReadReq',)
att__read_rsp = ('ATT_ReadRsp',)
att__read_blob_req = ('ATT_ReadBlobReq',)
att__read_blob_rsp = ('ATT_ReadBlobRsp',)
att__read_multi_req = ('ATT_ReadMultiReq',)
att__read_multi_rsp = ('ATT_ReadMultiRsp',)
att__read_by_grp_type_req = ('ATT_ReadByGrpTypeReq',)
att__read_by_grp_type_rsp = ('ATT_ReadByGrpTypeRsp',)
att__write_req = ('ATT_WriteReq',)
att__write_rsp = ('ATT_WriteRsp',)
att__prepare_write_req = ('ATT_PrepareWriteReq',)
att__prepare_write_rsp = ('ATT_PrepareWriteRsp',)
att__execute_write_req = ('ATT_ExecuteWriteReq',)
att__execute_write_rsp = ('ATT_ExecuteWriteRsp',)
att__handle_value_notification = ('ATT_HandleValueNotification',)
att__handle_value_indication = ('ATT_HandleValueIndication',)
att__handle_value_confirmation = ('ATT_HandleValueConfirmation',)
gatt__client_char_cfg_updated = ('GATT_ClientCharCfgUpdated',)
gatt__disc_chars_by_uuid = ('GATT_DiscCharsByUUID',)
gap__device_init_done = ('GAP_DeviceInitDone',)
gap__device_discovery_done = ('GAP_DeviceDiscoveryDone',)
gap__advert_data_update_done = ('GAP_AdvertDataUpdateDone',)
gap__make_discoverable_done = ('GAP_MakeDiscoverableDone',)
gap__end_discoverable_done = ('GAP_EndDiscoverableDone',)
gap__link_established = ('GAP_LinkEstablished',)
gap__link_terminated = ('GAP_LinkTerminated',)
gap__link_param_update = ('GAP_LinkParamUpdate',)
gap__device_information = ('GAP_DeviceInformation',)
gap_hci__extension_command_status = ('GAP_HCI_ExtensionCommandStatus',)
opcodes = {'fd01': cmd.ATT_ErrorRsp, 'fd02': cmd.ATT_ExchangeMTUReq, 'fd03': cmd.ATT_ExchangeMTURsp, 'fd04': cmd.ATT_FindInfoReq, 'fd05': cmd.ATT_FindInfoRsp, 'fd06': cmd.ATT_FindByTypeValueReq, 'fd07': cmd.ATT_FindByTypeValueRsp, 'fd08': cmd.ATT_FindByTypeReq, 'fd09': cmd.ATT_FindByTypeRsp, 'fd0a': cmd.ATT_ReadReq, 'fd0b': cmd.ATT_ReadRsp, 'fd0c': cmd.ATT_ReadBlobReq, 'fd0d': cmd.ATT_ReadBlobRsp, 'fd10': cmd.ATT_ReadByGrpTypeReq, 'fd11': cmd.ATT_ReadByGrpTypeRsp, 'fd12': cmd.ATT_WriteReq, 'fd13': cmd.ATT_WriteRsp, 'fd16': cmd.ATT_PrepareWriteReq, 'fd17': cmd.ATT_PrepareWriteRsp, 'fd18': cmd.ATT_ExecuteWriteReq, 'fd19': cmd.ATT_ExecuteWriteRsp, 'fd1b': cmd.ATT_HandleValueNotification, 'fd1d': cmd.ATT_HandleValueIndication, 'fd1e': cmd.ATT_HandleValueConfirmation, 'fd86': cmd.GATT_DiscPrimaryServiceByUUID, 'fd88': cmd.GATT_DiscCharsByUUID, 'fd8a': cmd.GATT_ReadCharValue, 'fd8e': cmd.GATT_ReadMultipleCharValues, 'fd92': cmd.GATT_WriteCharValue, 'fd96': cmd.GATT_WriteLongCharValue, 'fdb2': cmd.GATT_DiscAllChars, 'fdb4': cmd.GATT_ReadUsingCharUUID, 'fdfc': cmd.GATT_AddService, 'fdfd': cmd.GATT_DelService, 'fdfe': cmd.GATT_AddAttribute, 'fe00': cmd.GAP_DeviceInit, 'fe03': cmd.GAP_ConfigureDeviceAddr, 'fe04': cmd.GATT_DeviceDiscoveryRequest, 'fe05': cmd.GATT_DeviceDiscoveryCancel, 'fe06': cmd.GAP_MakeDiscoverable, 'fe07': cmd.GAP_UpdateAdvertisingData, 'fe08': cmd.GAP_EndDiscoverable, 'fe09': cmd.GAP_EstablishLinkRequest, 'fe0a': cmd.GAP_TerminateLinkRequest, 'fe11': cmd.GAP_UpdateLinkParamReq, 'fe30': cmd.GAP_SetParam, 'fe31': cmd.GAP_GetParam, 'fe80': cmd.HTIL_Reset}
hci_cmds = {'fd01': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'req_opcode', 'len': 1, 'default': '\x00'}, {'name': 'handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'error_code', 'len': 1, 'default': '\x00'}], 'fd02': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'client_rx_mtu', 'len': 2, 'default': '\x00\x87'}], 'fd03': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'server_rx_mtu', 'len': 2, 'default': '\x00\x87'}], 'fd04': [{'name': 'conn_handle', 'len': 2, 'default': 'ÿþ'}, {'name': 'start_handle', 'len': 2, 'default': '\x00\x01'}, {'name': 'end_handle', 'len': 2, 'default': 'ÿÿ'}], 'fd09': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'data_length', 'len': 1, 'default': None}, {'name': 'value', 'len': None, 'default': None}], 'fd0c': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'offset', 'len': 2, 'default': '\x00\x00'}], 'fd0d': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'value', 'len': None, 'default': None}], 'fd10': [{'name': 'conn_handle', 'len': 2, 'default': 'ÿþ'}, {'name': 'start_handle', 'len': 2, 'default': '\x00\x01'}, {'name': 'end_handle', 'len': 2, 'default': 'ÿÿ'}, {'name': 'group_type', 'len': None, 'default': '\x00('}], 'fd11': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'data_length', 'len': 1, 'default': None}, {'name': 'value', 'len': None, 'default': '\x00\x00'}], 'fd13': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}], 'fd1b': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'authenticated', 'len': 1, 'default': '\x00'}, {'name': 'handle', 'len': 2, 'default': 'þÿ'}, {'name': 'value', 'len': None, 'default': None}], 'fd1d': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'authenticated', 'len': 1, 'default': '\x00'}, {'name': 'handle', 'len': 2, 'default': 'þÿ'}, {'name': 'value', 'len': None, 'default': None}], 'fd1e': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}], 'fd86': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'value', 'len': None, 'default': None}], 'fd88': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'start_handle', 'len': 2, 'default': '\x01\x00'}, {'name': 'end_handle', 'len': 2, 'default': 'þÿ'}, {'name': 'type', 'len': None, 'default': None}], 'fd8a': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'handle', 'len': 2, 'default': None}], 'fd8c': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'offset', 'len': 2, 'default': '\x00\x00'}, {'name': 'type', 'len': 1, 'default': '\x00'}], 'fd8e': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'handles', 'len': None, 'default': None}], 'fd92': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'handle', 'len': 2, 'default': None}, {'name': 'value', 'len': None, 'default': None}], 'fd96': [{'name': 'handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'offset', 'len': 1, 'default': None}, {'name': 'value', 'len': None, 'default': None}], 'fdb2': [{'name': 'start_handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'end_handle', 'len': 2, 'default': 'ÿÿ'}], 'fdb4': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'start_handle', 'len': 2, 'default': '\x01\x00'}, {'name': 'end_handle', 'len': 2, 'default': 'ÿÿ'}, {'name': 'read_type', 'len': 2, 'default': None}], 'fdfc': [{'name': 'uuid', 'len': 2, 'default': '(\x00'}, {'name': 'numAttrs', 'len': 2, 'default': '\x00\x01'}], 'fdfd': [{'name': 'handle', 'len': 2, 'default': '\x00\x01'}], 'fdfe': [{'name': 'uuid', 'len': None, 'default': '\x00\x00x00'}, {'name': 'permissions', 'len': 1, 'default': '\x03'}], 'fe00': [{'name': 'profile_role', 'len': 1, 'default': '\x08'}, {'name': 'max_scan_rsps', 'len': 1, 'default': '\xa0'}, {'name': 'irk', 'len': 16, 'default': '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'}, {'name': 'csrk', 'len': 16, 'default': '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'}, {'name': 'sign_counter', 'len': 4, 'default': '\x01\x00\x00\x00'}], 'fe03': [{'name': 'addr_type', 'len': 1, 'default': None}, {'name': 'addr', 'len': 6, 'default': None}], 'fe04': [{'name': 'mode', 'len': 1, 'default': None}, {'name': 'active_scan', 'len': 1, 'default': '\x01'}, {'name': 'white_list', 'len': 1, 'default': '\x00'}], 'fe05': [], 'fe06': [{'name': 'event_type', 'len': 1, 'default': '\x00'}, {'name': 'init_addr_type', 'len': 1, 'default': '\x00'}, {'name': 'init_addrs', 'len': 6, 'default': '\x00\x00\x00\x00\x00\x00'}, {'name': 'channel_map', 'len': 1, 'default': '\x07'}, {'name': 'filter_policy', 'len': 1, 'default': '\x00'}], 'fe07': [{'name': 'ad_type', 'len': 1, 'default': '\x01'}, {'name': 'data_length', 'len': 1, 'default': None}, {'name': 'advert_data', 'len': None, 'default': '\x02\x01\x07'}], 'fe08': [], 'fe09': [{'name': 'high_duty_cycle', 'len': 1, 'default': '\x00'}, {'name': 'white_list', 'len': 1, 'default': '\x00'}, {'name': 'addr_type_peer', 'len': 1, 'default': '\x00'}, {'name': 'peer_addr', 'len': 6, 'default': None}], 'fe0a': [{'name': 'conn_handle', 'len': 2, 'default': '\x00\x00'}, {'name': 'disconnect_reason', 'len': 1, 'default': '\x13'}], 'fe30': [{'name': 'param_id', 'len': 1, 'default': None}, {'name': 'param_value', 'len': 2, 'default': None}], 'fe31': [{'name': 'param_id', 'len': 1, 'default': None}], 'fe80': [{'name': 'reset_type', 'len': 1, 'default': '\x01'}], '0c03': []}
hci_events = {'ff': {'name': event.HCI_LE_ExtEvent, 'structure': [{'name': 'ext_event', 'len': None}]}}
ext_events = {'0501': {'name': event.ATT_ErrorRsp, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}, {'name': 'req_op_code', 'len': 1}, {'name': 'handle', 'len': 2}, {'name': 'error_code', 'len': 1}]}, '0502': {'name': event.ATT_ExchangeMTUReq, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}, {'name': 'client_rx_mtu', 'len': 2}]}, '0503': {'name': event.ATT_ExchangeMTURsp, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}, {'name': 'server_rx_mtu', 'len': 2}]}, '0504': {'name': event.ATT_FindInfoReq, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}, {'name': 'start_handle', 'len': 2}, {'name': 'end_handle', 'len': 2}]}, '0505': {'name': event.ATT_FindInfoRsp, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}, {'name': 'format', 'len': 1}, {'name': 'results', 'len': None}], 'parsing': [('results', lambda ble, original: ble._parse_find_info_results(original['results'], original['format']))]}, '0506': {'name': event.ATT_FindByTypeValueReq, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}, {'name': 'start_handle', 'len': 2}, {'name': 'end_handle', 'len': 2}]}, '0507': {'name': event.ATT_FindByTypeValueRsp, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}, {'name': 'start_handle', 'len': 2}, {'name': 'end_handle', 'len': 2}, {'name': 'value', 'len': None}]}, '0508': {'name': event.ATT_ReadByTypeReq, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}, {'name': 'start_handle', 'len': 2}, {'name': 'end_handle', 'len': 2}, {'name': 'type', 'len': None}]}, '0509': {'name': event.ATT_ReadByTypeRsp, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}, {'name': 'length', 'len': 1}, {'name': 'handle', 'len': 2}, {'name': 'value', 'len': None}]}, '050b': {'name': event.ATT_ReadRsp, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}, {'name': 'value', 'len': None}]}, '050c': {'name': event.ATT_ReadBlobReq, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}, {'name': 'handle', 'len': 2}, {'name': 'offset', 'len': 2}]}, '050d': {'name': event.ATT_ReadBlobRsp, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}, {'name': 'value', 'len': None}]}, '050f': {'name': event.ATT_ReadMultiRsp, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}, {'name': 'results', 'len': None}]}, '0510': {'name': event.ATT_ReadByGrpTypeReq, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}, {'name': 'start_handle', 'len': 2}, {'name': 'end_handle', 'len': 2}, {'name': 'group_type', 'len': 2}]}, '0511': {'name': event.ATT_ReadByGrpTypeRsp, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}]}, '0512': {'name': event.ATT_WriteReq, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}, {'name': 'signature', 'len': 1}, {'name': 'command', 'len': 1}, {'name': 'handle', 'len': 2}, {'name': 'value', 'len': None}]}, '0513': {'name': event.ATT_WriteRsp, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}]}, '051b': {'name': event.ATT_HandleValueNotification, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}, {'name': 'handle', 'len': 2}, {'name': 'values', 'len': None}]}, '051d': {'name': event.ATT_HandleValueIndication, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}, {'name': 'handle', 'len': 2}, {'name': 'values', 'len': None}]}, '051e': {'name': event.ATT_HandleValueConfirmation, 'structure': [{'name': 'conn_handle', 'len': 2}]}, '0580': {'name': event.GATT_ClientCharCfgUpdated, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'pdu_len', 'len': 1}, {'name': 'attr_handle', 'len': 2}, {'name': 'value', 'len': 1}]}, '0600': {'name': event.GAP_DeviceInitDone, 'structure': [{'name': 'dev_addr', 'len': 6}, {'name': 'data_pkt_len', 'len': 2}, {'name': 'num_data_pkts', 'len': 1}, {'name': 'irk', 'len': 16}, {'name': 'csrk', 'len': 16}]}, '0601': {'name': event.GAP_DeviceDiscoveryDone, 'structure': [{'name': 'num_devs', 'len': 1}, {'name': 'devices', 'len': None}], 'parsing': [('devices', lambda ble, original: ble._parse_devices(original['devices']))]}, '0602': {'name': event.GAP_AdvertDataUpdateDone, 'structure': [{'name': 'ad_type', 'len': 1}]}, '0603': {'name': event.GAP_MakeDiscoverableDone, 'structure': []}, '0604': {'name': event.GAP_EndDiscoverableDone, 'structure': []}, '0605': {'name': event.GAP_LinkEstablished, 'structure': [{'name': 'dev_addr_type', 'len': 1}, {'name': 'dev_addr', 'len': 6}, {'name': 'conn_handle', 'len': 2}, {'name': 'conn_interval', 'len': 2}, {'name': 'conn_latency', 'len': 2}, {'name': 'conn_timeout', 'len': 2}, {'name': 'clock_accuracy', 'len': 1}]}, '0606': {'name': event.GAP_LinkTerminated, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'value', 'len': None}]}, '0607': {'name': event.GAP_LinkParamUpdate, 'structure': [{'name': 'conn_handle', 'len': 2}, {'name': 'conn_interval', 'len': 2}, {'name': 'conn_latency', 'len': 2}, {'name': 'conn_timeout', 'len': 2}]}, '060d': {'name': event.GAP_DeviceInformation, 'structure': [{'name': 'event_type', 'len': 1}, {'name': 'addr_type', 'len': 1}, {'name': 'addr', 'len': 6}, {'name': 'rssi', 'len': 1}, {'name': 'data_len', 'len': 1}, {'name': 'data_field', 'len': None}]}, '067f': {'name': event.GAP_HCI_ExtensionCommandStatus, 'structure': [{'name': 'op_code', 'len': 2}, {'name': 'data_len', 'len': 1}, {'name': 'param_value', 'len': None}], 'parsing': [('op_code', lambda ble, original: ble._parse_opcodes(original['op_code']))]}} |
# Time - O (N) | Space O(N)
def tournamentWinner(competitions, results):
points = {}
for comp in range(len(competitions)):
if results[comp] == 1:
if competitions[comp][0] not in points:
points[competitions[comp][0]] = 3
else:
points[competitions[comp][0]] = points[competitions[comp][0]] + 3
else:
if competitions[comp][1] not in points:
points[competitions[comp][1]] = 3
else:
points[competitions[comp][1]] = points[competitions[comp][1]] + 3
winner = ""
maxPoints = -1
for key in points:
if points[key] > maxPoints:
winner = key
maxPoints = points[key]
return winner | def tournament_winner(competitions, results):
points = {}
for comp in range(len(competitions)):
if results[comp] == 1:
if competitions[comp][0] not in points:
points[competitions[comp][0]] = 3
else:
points[competitions[comp][0]] = points[competitions[comp][0]] + 3
elif competitions[comp][1] not in points:
points[competitions[comp][1]] = 3
else:
points[competitions[comp][1]] = points[competitions[comp][1]] + 3
winner = ''
max_points = -1
for key in points:
if points[key] > maxPoints:
winner = key
max_points = points[key]
return winner |
# Game Properties
WIDTH = 600
HEIGHT = 600
FPS = 30
# Game Colours
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
RED = (255, 0, 0)
YELLOW = (255, 255, 0) | width = 600
height = 600
fps = 30
white = (255, 255, 255)
black = (0, 0, 0)
red = (255, 0, 0)
yellow = (255, 255, 0) |
class MailException(Exception):
pass
| class Mailexception(Exception):
pass |
# def alias(alias_var):
#
# @property
# def func(cls):
# return getattr(cls, alias_var)
#
# return func
# we get bonus 1 for free!
# def alias(alias_var, write=False):
#
# def getter(cls):
# return getattr(cls, alias_var)
#
# def setter(cls, value):
# if write is True:
# setattr(cls, alias_var, value)
# else:
# raise AttributeError("can't set attribute")
#
# return property(getter, setter)
class Alias:
def __init__(self, alias_var, write=False):
self.alias_var, self.write = alias_var, write
def __get__(self, instance, owner):
if instance is None:
return getattr(owner, self.alias_var)
else:
return getattr(instance, self.alias_var)
def __set__(self, instance, value):
if self.write is True:
setattr(instance, self.alias_var, value)
else:
raise AttributeError("can't set attribute")
alias = Alias
| class Alias:
def __init__(self, alias_var, write=False):
(self.alias_var, self.write) = (alias_var, write)
def __get__(self, instance, owner):
if instance is None:
return getattr(owner, self.alias_var)
else:
return getattr(instance, self.alias_var)
def __set__(self, instance, value):
if self.write is True:
setattr(instance, self.alias_var, value)
else:
raise attribute_error("can't set attribute")
alias = Alias |
products = [
{
'_id': "1",
'name': "Airpods Wireless Bluetooth Headphones",
'image': "/images/airpods.jpg",
'description':
"Bluetooth technology lets you connect it with compatible devices wirelessly High-quality AAC audio offers immersive listening experience Built-in microphone allows you to take calls while working",
'brand': "Apple",
'category': "Electronics",
'price': 89.99,
'countInStock': 10,
'rating': 4.5,
'numReviews': 12,
},
{
'_id': "2",
'name': "iPhone 11 Pro 256GB Memory",
'image': "/images/phone.jpg",
'description':
"Introducing the iPhone 11 Pro. A transformative triple-camera system that adds tons of capability without complexity. An unprecedented leap in battery life",
'brand': "Apple",
'category': "Electronics",
'price': 599.99,
'countInStock': 7,
'rating': 4.0,
'numReviews': 8,
},
{
'_id': "3",
'name': "Cannon EOS 80D DSLR Camera",
'image': "/images/camera.jpg",
'description':
"Characterized by versatile imaging specs, the Canon EOS 80D further clarifies itself using a pair of robust focusing systems and an intuitive design",
'brand': "Cannon",
'category': "Electronics",
'price': 929.99,
'countInStock': 5,
'rating': 3,
'numReviews': 12,
},
{
'_id': "4",
'name': "Sony Playstation 4 Pro White Version",
'image': "/images/playstation.jpg",
'description':
"The ultimate home entertainment center starts with PlayStation. Whether you are into gaming, HD movies, television, music",
'brand': "Sony",
'category': "Electronics",
'price': 399.99,
'countInStock': 11,
'rating': 5,
'numReviews': 12,
},
{
'_id': "5",
'name': "Logitech G-Series Gaming Mouse",
'image': "/images/mouse.jpg",
'description':
"Get a better handle on your games with this Logitech LIGHTSYNC gaming mouse. The six programmable buttons allow customization for a smooth playing experience",
'brand': "Logitech",
'category': "Electronics",
'price': 49.99,
'countInStock': 7,
'rating': 3.5,
'numReviews': 10,
},
{
'_id': "6",
'name': "Amazon Echo Dot 3rd Generation",
'image': "/images/alexa.jpg",
'description':
"Meet Echo Dot - Our most popular smart speaker with a fabric design. It is our most compact smart speaker that fits perfectly into small space",
'brand': "Amazon",
'category': "Electronics",
'price': 29.99,
'countInStock': 0,
'rating': 4,
'numReviews': 12,
},
] | products = [{'_id': '1', 'name': 'Airpods Wireless Bluetooth Headphones', 'image': '/images/airpods.jpg', 'description': 'Bluetooth technology lets you connect it with compatible devices wirelessly High-quality AAC audio offers immersive listening experience Built-in microphone allows you to take calls while working', 'brand': 'Apple', 'category': 'Electronics', 'price': 89.99, 'countInStock': 10, 'rating': 4.5, 'numReviews': 12}, {'_id': '2', 'name': 'iPhone 11 Pro 256GB Memory', 'image': '/images/phone.jpg', 'description': 'Introducing the iPhone 11 Pro. A transformative triple-camera system that adds tons of capability without complexity. An unprecedented leap in battery life', 'brand': 'Apple', 'category': 'Electronics', 'price': 599.99, 'countInStock': 7, 'rating': 4.0, 'numReviews': 8}, {'_id': '3', 'name': 'Cannon EOS 80D DSLR Camera', 'image': '/images/camera.jpg', 'description': 'Characterized by versatile imaging specs, the Canon EOS 80D further clarifies itself using a pair of robust focusing systems and an intuitive design', 'brand': 'Cannon', 'category': 'Electronics', 'price': 929.99, 'countInStock': 5, 'rating': 3, 'numReviews': 12}, {'_id': '4', 'name': 'Sony Playstation 4 Pro White Version', 'image': '/images/playstation.jpg', 'description': 'The ultimate home entertainment center starts with PlayStation. Whether you are into gaming, HD movies, television, music', 'brand': 'Sony', 'category': 'Electronics', 'price': 399.99, 'countInStock': 11, 'rating': 5, 'numReviews': 12}, {'_id': '5', 'name': 'Logitech G-Series Gaming Mouse', 'image': '/images/mouse.jpg', 'description': 'Get a better handle on your games with this Logitech LIGHTSYNC gaming mouse. The six programmable buttons allow customization for a smooth playing experience', 'brand': 'Logitech', 'category': 'Electronics', 'price': 49.99, 'countInStock': 7, 'rating': 3.5, 'numReviews': 10}, {'_id': '6', 'name': 'Amazon Echo Dot 3rd Generation', 'image': '/images/alexa.jpg', 'description': 'Meet Echo Dot - Our most popular smart speaker with a fabric design. It is our most compact smart speaker that fits perfectly into small space', 'brand': 'Amazon', 'category': 'Electronics', 'price': 29.99, 'countInStock': 0, 'rating': 4, 'numReviews': 12}] |
"""
Defines the Singleton metaclass available through :class:`objecttools.Singleton`
"""
__all__ = ('Singleton',)
class Singleton(type):
"""A metaclass for defining singletons"""
def __new__(mcs, name, bases, dict):
"""
Create a new :class:`Singleton` instance
:param name: Name of the new class
:type name: str
:param bases: Base classes of the new class
:type bases: Tuple[type, ...]
:param dict: Attributes of the new class
:type dict: Dict[Str, Any]
:return: A new class of type Singleton
:rtype: Singleton
"""
return super(Singleton, mcs).__new__(mcs, name, bases, dict)
def __init__(cls, name, bases, dict):
"""
Instantiate a :class:`Singleton` class
:param name: Name of the new class
:type name: str
:param bases: Base classes of the new class
:type bases: Tuple[type, ...]
:param dict: Attributes of the new class
:type dict: Dict[Str, Any]
:return: None
:rtype: NoneType
"""
super(Singleton, cls).__init__(name, bases, dict)
old_new = cls.__new__
__init__ = cls.__init__
this_cls = cls
def __new__(cls=None):
self = old_new(this_cls)
__init__(self)
this_cls.__self__ = self
def __new__(cls=None):
return self
this_cls.__new__ = staticmethod(__new__)
return self
cls.__new__ = staticmethod(__new__)
@classmethod
def create(mcs, name, dict=None, object_name=None):
"""
Create a new :class:`Singleton` class
:param name: Name of the new class (Used in its __repr__ if no object_name)
:type name: str
:param dict: Optional dictionary of the classes' attributes
:type dict: Optional[Dict[str, Any]]
:param object_name: Name of an instance of the singleton. Used in __repr__.
:type object_name: Optional[str]
:return: A new Singleton instance
:rtype: Singleton
"""
if dict is None:
dict = {}
_repr = name + '()' if object_name is None else object_name
def __repr__(self=None):
return _repr
dict.setdefault('__repr__', __repr__)
return mcs(name, (object,), dict)
@classmethod
def as_decorator(mcs, cls):
"""
Use :class:`Singleton` as a decorator for Python 2/3 compatibility::
@Singleton.as_decorator
class SingletonType(object):
def __repr__(self):
return 'singleton'
singleton = SingletonType()
:param cls: Class to become a singleton
:type cls: type
:return: The new singleton
:rtype: Singleton
"""
return mcs(cls.__name__, cls.__bases__, cls.__dict__.copy())
def __repr__(cls):
return cls.__name__
| """
Defines the Singleton metaclass available through :class:`objecttools.Singleton`
"""
__all__ = ('Singleton',)
class Singleton(type):
"""A metaclass for defining singletons"""
def __new__(mcs, name, bases, dict):
"""
Create a new :class:`Singleton` instance
:param name: Name of the new class
:type name: str
:param bases: Base classes of the new class
:type bases: Tuple[type, ...]
:param dict: Attributes of the new class
:type dict: Dict[Str, Any]
:return: A new class of type Singleton
:rtype: Singleton
"""
return super(Singleton, mcs).__new__(mcs, name, bases, dict)
def __init__(cls, name, bases, dict):
"""
Instantiate a :class:`Singleton` class
:param name: Name of the new class
:type name: str
:param bases: Base classes of the new class
:type bases: Tuple[type, ...]
:param dict: Attributes of the new class
:type dict: Dict[Str, Any]
:return: None
:rtype: NoneType
"""
super(Singleton, cls).__init__(name, bases, dict)
old_new = cls.__new__
__init__ = cls.__init__
this_cls = cls
def __new__(cls=None):
self = old_new(this_cls)
__init__(self)
this_cls.__self__ = self
def __new__(cls=None):
return self
this_cls.__new__ = staticmethod(__new__)
return self
cls.__new__ = staticmethod(__new__)
@classmethod
def create(mcs, name, dict=None, object_name=None):
"""
Create a new :class:`Singleton` class
:param name: Name of the new class (Used in its __repr__ if no object_name)
:type name: str
:param dict: Optional dictionary of the classes' attributes
:type dict: Optional[Dict[str, Any]]
:param object_name: Name of an instance of the singleton. Used in __repr__.
:type object_name: Optional[str]
:return: A new Singleton instance
:rtype: Singleton
"""
if dict is None:
dict = {}
_repr = name + '()' if object_name is None else object_name
def __repr__(self=None):
return _repr
dict.setdefault('__repr__', __repr__)
return mcs(name, (object,), dict)
@classmethod
def as_decorator(mcs, cls):
"""
Use :class:`Singleton` as a decorator for Python 2/3 compatibility::
@Singleton.as_decorator
class SingletonType(object):
def __repr__(self):
return 'singleton'
singleton = SingletonType()
:param cls: Class to become a singleton
:type cls: type
:return: The new singleton
:rtype: Singleton
"""
return mcs(cls.__name__, cls.__bases__, cls.__dict__.copy())
def __repr__(cls):
return cls.__name__ |
class Diff:
"""
A list of Models which can be propagated from parent to child node
an operation associated.
This structure is necessary because of
the access permissions/kv/kvcomp
propagation from parent to child nodes i.e. if some access/kv/kvcomp(es)
is (are) applied to a node - it will affect (update, insert, delete)
all its descendents.
"""
DELETE = -1
UPDATE = 0
ADD = 1 # accesses in the list will be added
REPLACE = 2
def __init__(self, operation, instances_set=[]):
self._op = operation
if len(instances_set) == 0:
self._set = set()
else:
self._set = instances_set
@property
def operation(self):
return self._op
def add(self, instance):
self._set.add(instance)
def __len__(self):
return len(self._set)
def __iter__(self):
return iter(self._set)
def first(self):
if len(self) > 0:
return list(self._set)[0]
return None
def pop(self):
return self._set.pop()
def is_update(self):
return self.operation == self.UPDATE
def is_add(self):
return self.operation == self.ADD
def is_delete(self):
return self.operation == self.DELETE
def is_replace(self):
return self.operation == self.REPLACE
def __str__(self):
op_name = {
self.DELETE: "delete",
self.UPDATE: "update",
self.ADD: "add",
self.REPLACE: "replace"
}
inst_list = [
str(inst) for inst in self._set
]
op = op_name[self._op]
return f"Diff({op}, {inst_list})"
def __repr__(self):
return self.__str__()
| class Diff:
"""
A list of Models which can be propagated from parent to child node
an operation associated.
This structure is necessary because of
the access permissions/kv/kvcomp
propagation from parent to child nodes i.e. if some access/kv/kvcomp(es)
is (are) applied to a node - it will affect (update, insert, delete)
all its descendents.
"""
delete = -1
update = 0
add = 1
replace = 2
def __init__(self, operation, instances_set=[]):
self._op = operation
if len(instances_set) == 0:
self._set = set()
else:
self._set = instances_set
@property
def operation(self):
return self._op
def add(self, instance):
self._set.add(instance)
def __len__(self):
return len(self._set)
def __iter__(self):
return iter(self._set)
def first(self):
if len(self) > 0:
return list(self._set)[0]
return None
def pop(self):
return self._set.pop()
def is_update(self):
return self.operation == self.UPDATE
def is_add(self):
return self.operation == self.ADD
def is_delete(self):
return self.operation == self.DELETE
def is_replace(self):
return self.operation == self.REPLACE
def __str__(self):
op_name = {self.DELETE: 'delete', self.UPDATE: 'update', self.ADD: 'add', self.REPLACE: 'replace'}
inst_list = [str(inst) for inst in self._set]
op = op_name[self._op]
return f'Diff({op}, {inst_list})'
def __repr__(self):
return self.__str__() |
"""455. Assign Cookies"""
class Solution(object):
def findContentChildren(self, g, s):
"""
:type g: List[int]
:type s: List[int]
:rtype: int
"""
g.sort()
s.sort()
cookie = 0
child = 0
while cookie <= len(s)-1 and child <= len(g)-1:
if s[cookie] >= g[child]:
child += 1
cookie += 1
return child
| """455. Assign Cookies"""
class Solution(object):
def find_content_children(self, g, s):
"""
:type g: List[int]
:type s: List[int]
:rtype: int
"""
g.sort()
s.sort()
cookie = 0
child = 0
while cookie <= len(s) - 1 and child <= len(g) - 1:
if s[cookie] >= g[child]:
child += 1
cookie += 1
return child |
_base_ = [
'../_base_/models/stylegan/stylegan3_base.py',
'../_base_/datasets/unconditional_imgs_flip_lanczos_resize_256x256.py',
'../_base_/default_runtime.py'
]
synthesis_cfg = {
'type': 'SynthesisNetwork',
'channel_base': 16384,
'channel_max': 512,
'magnitude_ema_beta': 0.999
}
r1_gamma = 2. # set by user
d_reg_interval = 16
model = dict(
type='StaticUnconditionalGAN',
generator=dict(out_size=256, img_channels=3, synthesis_cfg=synthesis_cfg),
discriminator=dict(in_size=256, channel_multiplier=1),
gan_loss=dict(type='GANLoss', gan_type='wgan-logistic-ns'),
disc_auxiliary_loss=dict(loss_weight=r1_gamma / 2.0 * d_reg_interval))
imgs_root = 'data/ffhq/images'
data = dict(
samples_per_gpu=4,
train=dict(dataset=dict(imgs_root=imgs_root)),
val=dict(imgs_root=imgs_root))
ema_half_life = 10. # G_smoothing_kimg
custom_hooks = [
dict(
type='VisualizeUnconditionalSamples',
output_dir='training_samples',
interval=5000),
dict(
type='ExponentialMovingAverageHook',
module_keys=('generator_ema', ),
interp_mode='lerp',
interval=1,
start_iter=0,
momentum_policy='rampup',
momentum_cfg=dict(
ema_kimg=10, ema_rampup=0.05, batch_size=32, eps=1e-8),
priority='VERY_HIGH')
]
inception_pkl = 'work_dirs/inception_pkl/ffhq-lanczos-256x256.pkl'
metrics = dict(
fid50k=dict(
type='FID',
num_images=50000,
inception_pkl=inception_pkl,
inception_args=dict(type='StyleGAN'),
bgr2rgb=True))
inception_path = None
evaluation = dict(
type='GenerativeEvalHook',
interval=10000,
metrics=dict(
type='FID',
num_images=50000,
inception_pkl=inception_pkl,
inception_args=dict(type='StyleGAN', inception_path=inception_path),
bgr2rgb=True),
sample_kwargs=dict(sample_model='ema'))
checkpoint_config = dict(interval=10000, by_epoch=False, max_keep_ckpts=30)
lr_config = None
total_iters = 800002
| _base_ = ['../_base_/models/stylegan/stylegan3_base.py', '../_base_/datasets/unconditional_imgs_flip_lanczos_resize_256x256.py', '../_base_/default_runtime.py']
synthesis_cfg = {'type': 'SynthesisNetwork', 'channel_base': 16384, 'channel_max': 512, 'magnitude_ema_beta': 0.999}
r1_gamma = 2.0
d_reg_interval = 16
model = dict(type='StaticUnconditionalGAN', generator=dict(out_size=256, img_channels=3, synthesis_cfg=synthesis_cfg), discriminator=dict(in_size=256, channel_multiplier=1), gan_loss=dict(type='GANLoss', gan_type='wgan-logistic-ns'), disc_auxiliary_loss=dict(loss_weight=r1_gamma / 2.0 * d_reg_interval))
imgs_root = 'data/ffhq/images'
data = dict(samples_per_gpu=4, train=dict(dataset=dict(imgs_root=imgs_root)), val=dict(imgs_root=imgs_root))
ema_half_life = 10.0
custom_hooks = [dict(type='VisualizeUnconditionalSamples', output_dir='training_samples', interval=5000), dict(type='ExponentialMovingAverageHook', module_keys=('generator_ema',), interp_mode='lerp', interval=1, start_iter=0, momentum_policy='rampup', momentum_cfg=dict(ema_kimg=10, ema_rampup=0.05, batch_size=32, eps=1e-08), priority='VERY_HIGH')]
inception_pkl = 'work_dirs/inception_pkl/ffhq-lanczos-256x256.pkl'
metrics = dict(fid50k=dict(type='FID', num_images=50000, inception_pkl=inception_pkl, inception_args=dict(type='StyleGAN'), bgr2rgb=True))
inception_path = None
evaluation = dict(type='GenerativeEvalHook', interval=10000, metrics=dict(type='FID', num_images=50000, inception_pkl=inception_pkl, inception_args=dict(type='StyleGAN', inception_path=inception_path), bgr2rgb=True), sample_kwargs=dict(sample_model='ema'))
checkpoint_config = dict(interval=10000, by_epoch=False, max_keep_ckpts=30)
lr_config = None
total_iters = 800002 |
test_data = """11111
19991
19191
19991
11111"""
sample_data = """5483143223
2745854711
5264556173
6141336146
6357385478
4167524645
2176841721
6882881134
4846848554
5283751526"""
class bcolors:
WARNING = '\033[93m'
ENDC = '\033[0m'
# Converts the multi-line string into a 2D array
def listify_input(data):
ret = []
for line in data.splitlines():
row = []
for char in list(line):
row.append(int(char))
ret.append(row)
return ret
# Print out the array with some style
def print_octopi(octopi, flashed = None):
for y in range(len(octopi)):
line = ''
for x in range(len(octopi[y])):
if flashed is not None and flashed[y][x]:
print(f'{bcolors.WARNING}{octopi[y][x]}{bcolors.ENDC}', end='')
else:
print(f'{octopi[y][x]}', end='')
print()
print()
# This handles what constitutes as a step for the puzzle
# and it returns the array, the flashes array, and the number
# of flashes that occured in the step
def do_step(octopi):
flashed = []
num_flashes = 0
# First, the energy level of each octopus increases by 1.
# and build the flashed array to track whether an octopus
# has already flashed
for y in range(len(octopi)):
flashed_row = []
for x in range(len(octopi[y])):
flashed_row.append(False)
octopi[y][x] += 1
flashed.append(flashed_row)
# Then, any octopus with an energy level greater than 9 flashes.
# This increases the energy level of all adjacent octopuses by 1,
# including octopuses that are diagonally adjacent. If this causes
# an octopus to have an energy level greater than 9, it also flashes.
# This process continues as long as new octopuses keep having their
# energy level increased beyond 9. (An octopus can only flash at
# most once per step.)
while True:
new_flashes = 0
for y in range(len(octopi)):
for x in range(len(octopi[y])):
# An octopus can only flash once, so only count the instant it's over 9
if octopi[y][x] > 9 and not flashed[y][x]:
octopi[y][x] = 0
flashed[y][x] = True
new_flashes += 1
for flashy in [y - 1, y, y + 1]:
for flashx in [x - 1, x, x + 1]:
if flashy >= 0 and flashy < len(octopi) and flashx >= 0 and flashx < len(octopi[y]) and not flashed[flashy][flashx]:
octopi[flashy][flashx] += 1
if new_flashes == 0:
break
else:
num_flashes += new_flashes
return octopi, flashed, num_flashes
def partOne(data, num_steps):
octopi = listify_input(data)
total_flashes = 0
for step in range(num_steps):
print(f'Step {step + 1}:')
octopi, flashed, num_flashes = do_step(octopi)
print_octopi(octopi, flashed)
# Accumulate flashes
total_flashes += num_flashes
return total_flashes
# Find the first step where all the octopi flash at once
def partTwo(data):
octopi = listify_input(data)
# This is the number of flashes we'd have if all the octopi flashed at once
target_num_flashes = len(octopi) * len(octopi[0])
step = 0
while True:
step += 1
octopi, flashed, num_flashes = do_step(octopi)
if num_flashes == target_num_flashes:
return step
# Import the data file into a list of lists of four elements reprenting x1,y1,x2,y2
values = ''
with open ('day-11.txt', 'r') as f:
values = f.read()
print('Problem 1 Test: {}'.format(partOne(test_data, 2)))
print('Problem 1 Sample Pt 1 Value (Target 204): {}'.format(partOne(sample_data, 10)))
print('Problem 1 Sample Pt 2 Value (Target 1656): {}'.format(partOne(sample_data, 100)))
print('Problem 1 Solution: {}'.format(partOne(values, 100)))
print('Problem 2 Sample Value (Target 195): {}'.format(partTwo(sample_data)))
print('Problem 2 Solution: {}'.format(partTwo(values)))
| test_data = '11111\n19991\n19191\n19991\n11111'
sample_data = '5483143223\n2745854711\n5264556173\n6141336146\n6357385478\n4167524645\n2176841721\n6882881134\n4846848554\n5283751526'
class Bcolors:
warning = '\x1b[93m'
endc = '\x1b[0m'
def listify_input(data):
ret = []
for line in data.splitlines():
row = []
for char in list(line):
row.append(int(char))
ret.append(row)
return ret
def print_octopi(octopi, flashed=None):
for y in range(len(octopi)):
line = ''
for x in range(len(octopi[y])):
if flashed is not None and flashed[y][x]:
print(f'{bcolors.WARNING}{octopi[y][x]}{bcolors.ENDC}', end='')
else:
print(f'{octopi[y][x]}', end='')
print()
print()
def do_step(octopi):
flashed = []
num_flashes = 0
for y in range(len(octopi)):
flashed_row = []
for x in range(len(octopi[y])):
flashed_row.append(False)
octopi[y][x] += 1
flashed.append(flashed_row)
while True:
new_flashes = 0
for y in range(len(octopi)):
for x in range(len(octopi[y])):
if octopi[y][x] > 9 and (not flashed[y][x]):
octopi[y][x] = 0
flashed[y][x] = True
new_flashes += 1
for flashy in [y - 1, y, y + 1]:
for flashx in [x - 1, x, x + 1]:
if flashy >= 0 and flashy < len(octopi) and (flashx >= 0) and (flashx < len(octopi[y])) and (not flashed[flashy][flashx]):
octopi[flashy][flashx] += 1
if new_flashes == 0:
break
else:
num_flashes += new_flashes
return (octopi, flashed, num_flashes)
def part_one(data, num_steps):
octopi = listify_input(data)
total_flashes = 0
for step in range(num_steps):
print(f'Step {step + 1}:')
(octopi, flashed, num_flashes) = do_step(octopi)
print_octopi(octopi, flashed)
total_flashes += num_flashes
return total_flashes
def part_two(data):
octopi = listify_input(data)
target_num_flashes = len(octopi) * len(octopi[0])
step = 0
while True:
step += 1
(octopi, flashed, num_flashes) = do_step(octopi)
if num_flashes == target_num_flashes:
return step
values = ''
with open('day-11.txt', 'r') as f:
values = f.read()
print('Problem 1 Test: {}'.format(part_one(test_data, 2)))
print('Problem 1 Sample Pt 1 Value (Target 204): {}'.format(part_one(sample_data, 10)))
print('Problem 1 Sample Pt 2 Value (Target 1656): {}'.format(part_one(sample_data, 100)))
print('Problem 1 Solution: {}'.format(part_one(values, 100)))
print('Problem 2 Sample Value (Target 195): {}'.format(part_two(sample_data)))
print('Problem 2 Solution: {}'.format(part_two(values))) |
# Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def sumEvenGrandparent(self, root: TreeNode) -> int:
if not root:
return 0
if root.val % 2 == 0:
if root.left is not None:
root.left.has_even_parent = True
if root.right is not None:
root.right.has_even_parent = True
if hasattr(root, "has_even_parent"):
if root.left is not None:
root.left.has_even_grand_parent = True
if root.right is not None:
root.right.has_even_grand_parent = True
value = 0
if hasattr(root, "has_even_grand_parent"):
value = root.val
return value + self.sumEvenGrandparent(root.left) + self.sumEvenGrandparent(root.right)
| class Treenode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def sum_even_grandparent(self, root: TreeNode) -> int:
if not root:
return 0
if root.val % 2 == 0:
if root.left is not None:
root.left.has_even_parent = True
if root.right is not None:
root.right.has_even_parent = True
if hasattr(root, 'has_even_parent'):
if root.left is not None:
root.left.has_even_grand_parent = True
if root.right is not None:
root.right.has_even_grand_parent = True
value = 0
if hasattr(root, 'has_even_grand_parent'):
value = root.val
return value + self.sumEvenGrandparent(root.left) + self.sumEvenGrandparent(root.right) |
#
# Constants, shared by all scenes
#
# Scene keys (any unique values):
SCENE_A = 'scene A'
SCENE_B = 'scene B'
SCENE_C = 'scene C'
# Message ID's (any unique values):
SEND_MESSAGE = 'send message'
GET_DATA = 'get data'
WHITE = (255, 255, 255)
GRAYA = (50, 50, 50)
GRAYB = (100, 100, 100)
GRAYC = (150, 150, 150)
| scene_a = 'scene A'
scene_b = 'scene B'
scene_c = 'scene C'
send_message = 'send message'
get_data = 'get data'
white = (255, 255, 255)
graya = (50, 50, 50)
grayb = (100, 100, 100)
grayc = (150, 150, 150) |
# Copyright 2014 Mirantis, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Common constants that can be used all over the manilaclient."""
# These are used for providing desired sorting params with list requests
SORT_DIR_VALUES = ('asc', 'desc')
SHARE_SORT_KEY_VALUES = (
'id', 'status', 'size', 'host', 'share_proto',
'export_location', 'availability_zone',
'user_id', 'project_id',
'created_at', 'updated_at',
'display_name', 'name',
'share_type_id', 'share_type',
'share_network_id', 'share_network',
'snapshot_id', 'snapshot',
)
SNAPSHOT_SORT_KEY_VALUES = (
'id',
'status',
'size',
'share_id',
'user_id',
'project_id',
'progress',
'name',
'display_name',
)
CONSISTENCY_GROUP_SORT_KEY_VALUES = (
'id',
'name',
'status',
'host',
'user_id',
'project_id',
'created_at',
'source_cgsnapshot_id',
)
CG_SNAPSHOT_SORT_KEY_VALUES = (
'id',
'name',
'status',
'host',
'user_id',
'project_id',
'created_at',
)
CG_SNAPSHOT_MEMBERS_SORT_KEY_VALUES = (
'id',
'name',
'created_at',
'size',
'share_protocol',
'project_id',
'share_type_id',
'cgsnapshot_id',
)
EXPERIMENTAL_HTTP_HEADER = 'X-OpenStack-Manila-API-Experimental'
V1_SERVICE_TYPE = 'share'
V2_SERVICE_TYPE = 'sharev2'
SERVICE_TYPES = {'1': V1_SERVICE_TYPE, '2': V2_SERVICE_TYPE}
| """Common constants that can be used all over the manilaclient."""
sort_dir_values = ('asc', 'desc')
share_sort_key_values = ('id', 'status', 'size', 'host', 'share_proto', 'export_location', 'availability_zone', 'user_id', 'project_id', 'created_at', 'updated_at', 'display_name', 'name', 'share_type_id', 'share_type', 'share_network_id', 'share_network', 'snapshot_id', 'snapshot')
snapshot_sort_key_values = ('id', 'status', 'size', 'share_id', 'user_id', 'project_id', 'progress', 'name', 'display_name')
consistency_group_sort_key_values = ('id', 'name', 'status', 'host', 'user_id', 'project_id', 'created_at', 'source_cgsnapshot_id')
cg_snapshot_sort_key_values = ('id', 'name', 'status', 'host', 'user_id', 'project_id', 'created_at')
cg_snapshot_members_sort_key_values = ('id', 'name', 'created_at', 'size', 'share_protocol', 'project_id', 'share_type_id', 'cgsnapshot_id')
experimental_http_header = 'X-OpenStack-Manila-API-Experimental'
v1_service_type = 'share'
v2_service_type = 'sharev2'
service_types = {'1': V1_SERVICE_TYPE, '2': V2_SERVICE_TYPE} |
# Time: O(n * 2^n)
# Space: O(2^n)
class Solution(object):
# @return a string
def countAndSay(self, n):
seq = "1"
for i in xrange(n - 1):
seq = self.getNext(seq)
return seq
def getNext(self, seq):
i, next_seq = 0, ""
while i < len(seq):
cnt = 1
while i < len(seq) - 1 and seq[i] == seq[i + 1]:
cnt += 1
i += 1
next_seq += str(cnt) + seq[i]
i += 1
return next_seq
| class Solution(object):
def count_and_say(self, n):
seq = '1'
for i in xrange(n - 1):
seq = self.getNext(seq)
return seq
def get_next(self, seq):
(i, next_seq) = (0, '')
while i < len(seq):
cnt = 1
while i < len(seq) - 1 and seq[i] == seq[i + 1]:
cnt += 1
i += 1
next_seq += str(cnt) + seq[i]
i += 1
return next_seq |
"""
kb_observer.py
Copyright 2015 Andres Riancho
This file is part of w3af, http://w3af.org/ .
w3af is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 2 of the License.
w3af is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with w3af; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
class KBObserver(object):
"""
When you want to listen to KB changes the best way is to create a KBObserver
instance and call kb.add_observer(kb_observer). Then, the KB will call the
methods in this instance to notify you about the changes.
This is a base implementation that you should extend in order to provide
real features. For now we just define the methods with a no-op
implementation.
Note that the methods in this class are named just like the ones in
KnowledgeBase which trigger the calls.
"""
def append(self, location_a, location_b, value, ignore_type=False):
pass
def add_url(self, url):
pass
def update(self, old_info, new_info):
pass
| """
kb_observer.py
Copyright 2015 Andres Riancho
This file is part of w3af, http://w3af.org/ .
w3af is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 2 of the License.
w3af is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with w3af; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
class Kbobserver(object):
"""
When you want to listen to KB changes the best way is to create a KBObserver
instance and call kb.add_observer(kb_observer). Then, the KB will call the
methods in this instance to notify you about the changes.
This is a base implementation that you should extend in order to provide
real features. For now we just define the methods with a no-op
implementation.
Note that the methods in this class are named just like the ones in
KnowledgeBase which trigger the calls.
"""
def append(self, location_a, location_b, value, ignore_type=False):
pass
def add_url(self, url):
pass
def update(self, old_info, new_info):
pass |
def checkWalletState(cli, totalIds, isAbbr, isCrypto):
if cli._activeWallet:
assert len(cli._activeWallet.idsToSigners) == totalIds
if totalIds > 0:
activeSigner = cli._activeWallet.idsToSigners[
cli._activeWallet.defaultId]
if isAbbr:
assert activeSigner.verkey.startswith("~"), \
"verkey {} doesn't look like abbreviated verkey".\
format(activeSigner.verkey)
assert cli._activeWallet.defaultId != activeSigner.verkey, \
"new DID should not be equal to abbreviated verkey"
if isCrypto:
assert not activeSigner.verkey.startswith("~"), \
"verkey {} doesn't look like cryptographic verkey". \
format(activeSigner.verkey)
assert cli._activeWallet.defaultId == activeSigner.verkey, \
"new DID should be equal to verkey"
def getTotalIds(cli):
if cli._activeWallet:
return len(cli._activeWallet.idsToSigners)
else:
return 0
def testNewIdWithIncorrectSeed(be, do, aliceCLI):
totalIds = getTotalIds(aliceCLI)
be(aliceCLI)
# Seed not of length 32 or 64
do("new DID with seed aaaaaaaaaaa",
expect=["Seed needs to be 32 or 64 characters (if hex) long"])
checkWalletState(aliceCLI, totalIds=totalIds, isAbbr=False, isCrypto=False)
# Seed of length 64 but not hex
do("new DID with seed "
"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy",
expect=["Seed needs to be 32 or 64 characters (if hex) long"])
checkWalletState(aliceCLI, totalIds=totalIds, isAbbr=False,
isCrypto=False)
# Seed of length 64 and hex
do("new DID with seed "
"2af3d062450c942be50ee766ce2571a6c75c0aca0de322293e7e9f116959c9c3",
expect=["Current DID set to"])
checkWalletState(aliceCLI, totalIds=totalIds + 1, isAbbr=False,
isCrypto=False)
def testNewIdIsNotInvalidCommand(be, do, aliceCLI):
totalIds = getTotalIds(aliceCLI)
be(aliceCLI)
do("new DID", not_expect=["Invalid command"])
checkWalletState(aliceCLI, totalIds=totalIds +
1, isAbbr=False, isCrypto=False)
def testNewId(be, do, aliceCLI):
totalIds = getTotalIds(aliceCLI)
be(aliceCLI)
do("new DID",
expect=["Current DID set to"])
checkWalletState(aliceCLI, totalIds=totalIds +
1, isAbbr=False, isCrypto=False)
| def check_wallet_state(cli, totalIds, isAbbr, isCrypto):
if cli._activeWallet:
assert len(cli._activeWallet.idsToSigners) == totalIds
if totalIds > 0:
active_signer = cli._activeWallet.idsToSigners[cli._activeWallet.defaultId]
if isAbbr:
assert activeSigner.verkey.startswith('~'), "verkey {} doesn't look like abbreviated verkey".format(activeSigner.verkey)
assert cli._activeWallet.defaultId != activeSigner.verkey, 'new DID should not be equal to abbreviated verkey'
if isCrypto:
assert not activeSigner.verkey.startswith('~'), "verkey {} doesn't look like cryptographic verkey".format(activeSigner.verkey)
assert cli._activeWallet.defaultId == activeSigner.verkey, 'new DID should be equal to verkey'
def get_total_ids(cli):
if cli._activeWallet:
return len(cli._activeWallet.idsToSigners)
else:
return 0
def test_new_id_with_incorrect_seed(be, do, aliceCLI):
total_ids = get_total_ids(aliceCLI)
be(aliceCLI)
do('new DID with seed aaaaaaaaaaa', expect=['Seed needs to be 32 or 64 characters (if hex) long'])
check_wallet_state(aliceCLI, totalIds=totalIds, isAbbr=False, isCrypto=False)
do('new DID with seed xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy', expect=['Seed needs to be 32 or 64 characters (if hex) long'])
check_wallet_state(aliceCLI, totalIds=totalIds, isAbbr=False, isCrypto=False)
do('new DID with seed 2af3d062450c942be50ee766ce2571a6c75c0aca0de322293e7e9f116959c9c3', expect=['Current DID set to'])
check_wallet_state(aliceCLI, totalIds=totalIds + 1, isAbbr=False, isCrypto=False)
def test_new_id_is_not_invalid_command(be, do, aliceCLI):
total_ids = get_total_ids(aliceCLI)
be(aliceCLI)
do('new DID', not_expect=['Invalid command'])
check_wallet_state(aliceCLI, totalIds=totalIds + 1, isAbbr=False, isCrypto=False)
def test_new_id(be, do, aliceCLI):
total_ids = get_total_ids(aliceCLI)
be(aliceCLI)
do('new DID', expect=['Current DID set to'])
check_wallet_state(aliceCLI, totalIds=totalIds + 1, isAbbr=False, isCrypto=False) |
__copyright__ = 'Copyright (C) 2019, Nokia'
class InteractiveSessionError(Exception):
"""Base class for exceptions raised by :class:`.InteractiveSession`
and by :class:`.InteractiveSession` shells inherited from
:class:`.shells.shell.Shell`.
"""
| __copyright__ = 'Copyright (C) 2019, Nokia'
class Interactivesessionerror(Exception):
"""Base class for exceptions raised by :class:`.InteractiveSession`
and by :class:`.InteractiveSession` shells inherited from
:class:`.shells.shell.Shell`.
""" |
expected_output = {
"instance": {
"0": {
"level": {
"L2": {
"interfaces": {
"To-GENIE01R07-LAG-7": {
"system_id": {
"0691.58ff.79a2": {
"hold_time": 22,
"hostname": "GENIE01R07",
"ipv4_adj_sid": "Label 524213",
"ipv4_neighbor": "10.11.97.22",
"ipv6_neighbor": "::",
"l_circ_typ": "L2",
"last_restart_at": "Never",
"max_hold": 30,
"mt_enabled": "No",
"nbr_sys_typ": "L2",
"number_of_restarts": 0,
"priority": 0,
"restart_support": "Disabled",
"restart_supressed": "Disabled",
"restart_status": "Not currently being helped",
"snpa": "00:23:3e:ff:a6:27",
"state": "Up",
"topology": "Unicast",
"up_time": "58d 03:24:48",
}
}
},
"To-GENIE04XR1-LAG-4": {
"system_id": {
"0670.70ff.b258": {
"hold_time": 23,
"hostname": "GENIE04XR1",
"ipv4_adj_sid": "Label 524127",
"ipv4_neighbor": "10.11.79.245",
"ipv6_neighbor": "::",
"l_circ_typ": "L2",
"last_restart_at": "Never",
"max_hold": 30,
"mt_enabled": "No",
"nbr_sys_typ": "L2",
"number_of_restarts": 0,
"priority": 0,
"restart_support": "Disabled",
"restart_supressed": "Disabled",
"restart_status": "Not currently being helped",
"snpa": "84:26:2b:ff:e9:9e",
"state": "Up",
"topology": "Unicast",
"up_time": "36d 23:21:57",
}
}
},
"To-GENIE03R07-LAG-9": {
"system_id": {
"0691.58ff.79aa": {
"hold_time": 22,
"hostname": "GENIE03R07",
"ipv4_adj_sid": "Label 524214",
"ipv4_neighbor": "10.11.79.242",
"ipv6_neighbor": "::",
"l_circ_typ": "L2",
"last_restart_at": "Never",
"max_hold": 30,
"mt_enabled": "No",
"nbr_sys_typ": "L2",
"number_of_restarts": 0,
"priority": 0,
"restart_support": "Disabled",
"restart_supressed": "Disabled",
"restart_status": "Not currently being helped",
"snpa": "00:23:3e:ff:bc:27",
"state": "Up",
"topology": "Unicast",
"up_time": "58d 03:24:48",
}
}
},
}
}
}
},
"1": {
"level": {
"L2": {
"interfaces": {
"To-GENIE01R07-LAG-7": {
"system_id": {
"0691.58ff.79a2": {
"hold_time": 22,
"hostname": "GENIE01R07",
"ipv4_adj_sid": "Label 524213",
"ipv4_neighbor": "10.11.97.22",
"ipv6_neighbor": "::",
"l_circ_typ": "L2",
"last_restart_at": "Never",
"max_hold": 30,
"mt_enabled": "No",
"nbr_sys_typ": "L2",
"number_of_restarts": 0,
"priority": 0,
"restart_support": "Disabled",
"restart_supressed": "Disabled",
"restart_status": "Not currently being helped",
"snpa": "00:23:3e:ff:a6:27",
"state": "Up",
"topology": "Unicast",
"up_time": "58d 03:24:48",
}
}
}
}
}
}
},
}
}
| expected_output = {'instance': {'0': {'level': {'L2': {'interfaces': {'To-GENIE01R07-LAG-7': {'system_id': {'0691.58ff.79a2': {'hold_time': 22, 'hostname': 'GENIE01R07', 'ipv4_adj_sid': 'Label 524213', 'ipv4_neighbor': '10.11.97.22', 'ipv6_neighbor': '::', 'l_circ_typ': 'L2', 'last_restart_at': 'Never', 'max_hold': 30, 'mt_enabled': 'No', 'nbr_sys_typ': 'L2', 'number_of_restarts': 0, 'priority': 0, 'restart_support': 'Disabled', 'restart_supressed': 'Disabled', 'restart_status': 'Not currently being helped', 'snpa': '00:23:3e:ff:a6:27', 'state': 'Up', 'topology': 'Unicast', 'up_time': '58d 03:24:48'}}}, 'To-GENIE04XR1-LAG-4': {'system_id': {'0670.70ff.b258': {'hold_time': 23, 'hostname': 'GENIE04XR1', 'ipv4_adj_sid': 'Label 524127', 'ipv4_neighbor': '10.11.79.245', 'ipv6_neighbor': '::', 'l_circ_typ': 'L2', 'last_restart_at': 'Never', 'max_hold': 30, 'mt_enabled': 'No', 'nbr_sys_typ': 'L2', 'number_of_restarts': 0, 'priority': 0, 'restart_support': 'Disabled', 'restart_supressed': 'Disabled', 'restart_status': 'Not currently being helped', 'snpa': '84:26:2b:ff:e9:9e', 'state': 'Up', 'topology': 'Unicast', 'up_time': '36d 23:21:57'}}}, 'To-GENIE03R07-LAG-9': {'system_id': {'0691.58ff.79aa': {'hold_time': 22, 'hostname': 'GENIE03R07', 'ipv4_adj_sid': 'Label 524214', 'ipv4_neighbor': '10.11.79.242', 'ipv6_neighbor': '::', 'l_circ_typ': 'L2', 'last_restart_at': 'Never', 'max_hold': 30, 'mt_enabled': 'No', 'nbr_sys_typ': 'L2', 'number_of_restarts': 0, 'priority': 0, 'restart_support': 'Disabled', 'restart_supressed': 'Disabled', 'restart_status': 'Not currently being helped', 'snpa': '00:23:3e:ff:bc:27', 'state': 'Up', 'topology': 'Unicast', 'up_time': '58d 03:24:48'}}}}}}}, '1': {'level': {'L2': {'interfaces': {'To-GENIE01R07-LAG-7': {'system_id': {'0691.58ff.79a2': {'hold_time': 22, 'hostname': 'GENIE01R07', 'ipv4_adj_sid': 'Label 524213', 'ipv4_neighbor': '10.11.97.22', 'ipv6_neighbor': '::', 'l_circ_typ': 'L2', 'last_restart_at': 'Never', 'max_hold': 30, 'mt_enabled': 'No', 'nbr_sys_typ': 'L2', 'number_of_restarts': 0, 'priority': 0, 'restart_support': 'Disabled', 'restart_supressed': 'Disabled', 'restart_status': 'Not currently being helped', 'snpa': '00:23:3e:ff:a6:27', 'state': 'Up', 'topology': 'Unicast', 'up_time': '58d 03:24:48'}}}}}}}}} |
def dummy(environ, start_response):
start_response('200 OK', [])
yield [b'dummy']
| def dummy(environ, start_response):
start_response('200 OK', [])
yield [b'dummy'] |
# program to convert temperature from celcius to farenheit
x = float(input("Enter the temperature in celcius: "))
f = (x * 9 / 5) + 32
print(f"Temperaturn in farenheit: {f}")
| x = float(input('Enter the temperature in celcius: '))
f = x * 9 / 5 + 32
print(f'Temperaturn in farenheit: {f}') |
map_out = {
r'\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{1,6}(-|\+)\d{2}' : 'YYYY-MM-DD HH:MM:SS.FFFFFF-TZ'
, r'\d{2}:\d{2}:\d{2}.\d{1,6}' : 'HH:MM:SS.FFFFFF'
, r'\d{4}-\d{2}-\d{2}' : 'YYYY-MM-DD'}
test_cases = [
test_case(
cmd=('yb_chunk_dml_by_integer_yyyymmdd.py @{argsdir}/yb_chunk_dml_by_integer_yyyymmdd__args1 '
'--execute_chunk_dml')
, exit_code=0
, stdout="""-- Running DML chunking.
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Starting YYYYMMDD Integer Date Chunking, first calculating date group counts
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Build Chunk DMLs
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 1, Rows: 166582, Range 20200101 <= col19 < 20200111
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 2, Rows: 100018, Range 20200111 <= col19 < 20200902
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 3, Rows: 101800, Range 20200902 <= col19 < 20210426
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 4, Rows: 100376, Range 20210426 <= col19 < 20211215
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 5, Rows: 100212, Range 20211215 <= col19 < 20220727
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 6, Rows: 100988, Range 20220727 <= col19 < 20230415
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 7, Rows: 102860, Range 20230415 <= col19 < 20240222
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 8, Rows: 100266, Range 20240222 <= col19 < 20250401
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 9, Rows: 100036, Range 20250401 <= col19 < 20320311
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 10, Rows: 26862, Range 20320311 <= col19 < 20420307
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 11, Rows: 0, col19 IS NULL
--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Completed YYYYMMDD Integer Date Chunked DML
--Total Rows : 1000000
--IS NULL Rows : 0
--Running total check: PASSED
--Duration : HH:MM:SS.FFFFFF
--Overhead duration : HH:MM:SS.FFFFFF
--Total Chunks : 11
--Min chunk size : 100000
--Largest chunk size : 166582
--Average chunk size : 90909
-- Completed DML chunking."""
, stderr=''
, map_out=map_out)
, test_case(
cmd=('yb_chunk_dml_by_integer_yyyymmdd.py @{argsdir}/yb_chunk_dml_by_integer_yyyymmdd__args1 '
'--print_chunk_dml --null_chunk_off --verbose_chunk_off')
, exit_code=0
, stdout="""-- Running DML chunking.
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 1, size: 166582) >>>*/ 20200101 <= col19 AND col19 < 20200111 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 2, size: 100018) >>>*/ 20200111 <= col19 AND col19 < 20200902 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 3, size: 101800) >>>*/ 20200902 <= col19 AND col19 < 20210426 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 4, size: 100376) >>>*/ 20210426 <= col19 AND col19 < 20211215 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 5, size: 100212) >>>*/ 20211215 <= col19 AND col19 < 20220727 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 6, size: 100988) >>>*/ 20220727 <= col19 AND col19 < 20230415 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 7, size: 102860) >>>*/ 20230415 <= col19 AND col19 < 20240222 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 8, size: 100266) >>>*/ 20240222 <= col19 AND col19 < 20250401 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 9, size: 100036) >>>*/ 20250401 <= col19 AND col19 < 20320311 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 10, size: 26862) >>>*/ 20320311 <= col19 AND col19 < 20420307 /*<<< chunk_clause */;
-- Completed DML chunking."""
, stderr='')
, test_case(
cmd=('yb_chunk_dml_by_integer_yyyymmdd.py @{argsdir}/yb_chunk_dml_by_integer_yyyymmdd__args1 '
'--print_chunk_dml')
, exit_code=0
, stdout="""-- Running DML chunking.
--2020-08-22 23:04:57.77992-06: Starting YYYYMMDD Integer Date Chunking, first calculating date group counts
--2020-08-22 23:04:58.202254-06: Build Chunk DMLs
--2020-08-22 23:04:58.202609-06: Chunk: 1, Rows: 166582, Range 20200101 <= col19 < 20200111
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 1, size: 166582) >>>*/ 20200101 <= col19 AND col19 < 20200111 /*<<< chunk_clause */;
--2020-08-22 23:04:58.203502-06: Chunk: 2, Rows: 100018, Range 20200111 <= col19 < 20200902
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 2, size: 100018) >>>*/ 20200111 <= col19 AND col19 < 20200902 /*<<< chunk_clause */;
--2020-08-22 23:04:58.203782-06: Chunk: 3, Rows: 101800, Range 20200902 <= col19 < 20210426
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 3, size: 101800) >>>*/ 20200902 <= col19 AND col19 < 20210426 /*<<< chunk_clause */;
--2020-08-22 23:04:58.204023-06: Chunk: 4, Rows: 100376, Range 20210426 <= col19 < 20211215
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 4, size: 100376) >>>*/ 20210426 <= col19 AND col19 < 20211215 /*<<< chunk_clause */;
--2020-08-22 23:04:58.204269-06: Chunk: 5, Rows: 100212, Range 20211215 <= col19 < 20220727
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 5, size: 100212) >>>*/ 20211215 <= col19 AND col19 < 20220727 /*<<< chunk_clause */;
--2020-08-22 23:04:58.204521-06: Chunk: 6, Rows: 100988, Range 20220727 <= col19 < 20230415
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 6, size: 100988) >>>*/ 20220727 <= col19 AND col19 < 20230415 /*<<< chunk_clause */;
--2020-08-22 23:04:58.204862-06: Chunk: 7, Rows: 102860, Range 20230415 <= col19 < 20240222
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 7, size: 102860) >>>*/ 20230415 <= col19 AND col19 < 20240222 /*<<< chunk_clause */;
--2020-08-22 23:04:58.205211-06: Chunk: 8, Rows: 100266, Range 20240222 <= col19 < 20250401
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 8, size: 100266) >>>*/ 20240222 <= col19 AND col19 < 20250401 /*<<< chunk_clause */;
--2020-08-22 23:04:58.207026-06: Chunk: 9, Rows: 100036, Range 20250401 <= col19 < 20320311
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 9, size: 100036) >>>*/ 20250401 <= col19 AND col19 < 20320311 /*<<< chunk_clause */;
--2020-08-22 23:04:58.207984-06: Chunk: 10, Rows: 26862, Range 20320311 <= col19 < 20420307
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 10, size: 26862) >>>*/ 20320311 <= col19 AND col19 < 20420307 /*<<< chunk_clause */;
--2020-08-22 23:04:58.208485-06: Chunk: 11, Rows: 0, col19 IS NULL
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE col19 IS NULL;
--2020-08-22 23:04:58.208789-06: Completed YYYYMMDD Integer Date Chunked DML
--Total Rows : 1000000
--IS NULL Rows : 0
--Running total check: PASSED
--Duration : 00:00:00.430099
--Overhead duration : 00:00:00.430176
--Total Chunks : 11
--Min chunk size : 100000
--Largest chunk size : 166582
--Average chunk size : 90909
-- Completed DML chunking."""
, stderr=''
, map_out=map_out)
] | map_out = {'\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}.\\d{1,6}(-|\\+)\\d{2}': 'YYYY-MM-DD HH:MM:SS.FFFFFF-TZ', '\\d{2}:\\d{2}:\\d{2}.\\d{1,6}': 'HH:MM:SS.FFFFFF', '\\d{4}-\\d{2}-\\d{2}': 'YYYY-MM-DD'}
test_cases = [test_case(cmd='yb_chunk_dml_by_integer_yyyymmdd.py @{argsdir}/yb_chunk_dml_by_integer_yyyymmdd__args1 --execute_chunk_dml', exit_code=0, stdout='-- Running DML chunking.\n--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Starting YYYYMMDD Integer Date Chunking, first calculating date group counts\n--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Build Chunk DMLs\n--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 1, Rows: 166582, Range 20200101 <= col19 < 20200111\n--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 2, Rows: 100018, Range 20200111 <= col19 < 20200902\n--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 3, Rows: 101800, Range 20200902 <= col19 < 20210426\n--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 4, Rows: 100376, Range 20210426 <= col19 < 20211215\n--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 5, Rows: 100212, Range 20211215 <= col19 < 20220727\n--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 6, Rows: 100988, Range 20220727 <= col19 < 20230415\n--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 7, Rows: 102860, Range 20230415 <= col19 < 20240222\n--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 8, Rows: 100266, Range 20240222 <= col19 < 20250401\n--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 9, Rows: 100036, Range 20250401 <= col19 < 20320311\n--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 10, Rows: 26862, Range 20320311 <= col19 < 20420307\n--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Chunk: 11, Rows: 0, col19 IS NULL\n--YYYY-MM-DD HH:MM:SS.FFFFFF-TZ: Completed YYYYMMDD Integer Date Chunked DML\n--Total Rows : 1000000\n--IS NULL Rows : 0\n--Running total check: PASSED\n--Duration : HH:MM:SS.FFFFFF\n--Overhead duration : HH:MM:SS.FFFFFF\n--Total Chunks : 11\n--Min chunk size : 100000\n--Largest chunk size : 166582\n--Average chunk size : 90909\n-- Completed DML chunking.', stderr='', map_out=map_out), test_case(cmd='yb_chunk_dml_by_integer_yyyymmdd.py @{argsdir}/yb_chunk_dml_by_integer_yyyymmdd__args1 --print_chunk_dml --null_chunk_off --verbose_chunk_off', exit_code=0, stdout='-- Running DML chunking.\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 1, size: 166582) >>>*/ 20200101 <= col19 AND col19 < 20200111 /*<<< chunk_clause */;\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 2, size: 100018) >>>*/ 20200111 <= col19 AND col19 < 20200902 /*<<< chunk_clause */;\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 3, size: 101800) >>>*/ 20200902 <= col19 AND col19 < 20210426 /*<<< chunk_clause */;\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 4, size: 100376) >>>*/ 20210426 <= col19 AND col19 < 20211215 /*<<< chunk_clause */;\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 5, size: 100212) >>>*/ 20211215 <= col19 AND col19 < 20220727 /*<<< chunk_clause */;\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 6, size: 100988) >>>*/ 20220727 <= col19 AND col19 < 20230415 /*<<< chunk_clause */;\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 7, size: 102860) >>>*/ 20230415 <= col19 AND col19 < 20240222 /*<<< chunk_clause */;\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 8, size: 100266) >>>*/ 20240222 <= col19 AND col19 < 20250401 /*<<< chunk_clause */;\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 9, size: 100036) >>>*/ 20250401 <= col19 AND col19 < 20320311 /*<<< chunk_clause */;\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 10, size: 26862) >>>*/ 20320311 <= col19 AND col19 < 20420307 /*<<< chunk_clause */;\n-- Completed DML chunking.', stderr=''), test_case(cmd='yb_chunk_dml_by_integer_yyyymmdd.py @{argsdir}/yb_chunk_dml_by_integer_yyyymmdd__args1 --print_chunk_dml', exit_code=0, stdout='-- Running DML chunking.\n--2020-08-22 23:04:57.77992-06: Starting YYYYMMDD Integer Date Chunking, first calculating date group counts\n--2020-08-22 23:04:58.202254-06: Build Chunk DMLs\n--2020-08-22 23:04:58.202609-06: Chunk: 1, Rows: 166582, Range 20200101 <= col19 < 20200111\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 1, size: 166582) >>>*/ 20200101 <= col19 AND col19 < 20200111 /*<<< chunk_clause */;\n--2020-08-22 23:04:58.203502-06: Chunk: 2, Rows: 100018, Range 20200111 <= col19 < 20200902\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 2, size: 100018) >>>*/ 20200111 <= col19 AND col19 < 20200902 /*<<< chunk_clause */;\n--2020-08-22 23:04:58.203782-06: Chunk: 3, Rows: 101800, Range 20200902 <= col19 < 20210426\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 3, size: 101800) >>>*/ 20200902 <= col19 AND col19 < 20210426 /*<<< chunk_clause */;\n--2020-08-22 23:04:58.204023-06: Chunk: 4, Rows: 100376, Range 20210426 <= col19 < 20211215\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 4, size: 100376) >>>*/ 20210426 <= col19 AND col19 < 20211215 /*<<< chunk_clause */;\n--2020-08-22 23:04:58.204269-06: Chunk: 5, Rows: 100212, Range 20211215 <= col19 < 20220727\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 5, size: 100212) >>>*/ 20211215 <= col19 AND col19 < 20220727 /*<<< chunk_clause */;\n--2020-08-22 23:04:58.204521-06: Chunk: 6, Rows: 100988, Range 20220727 <= col19 < 20230415\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 6, size: 100988) >>>*/ 20220727 <= col19 AND col19 < 20230415 /*<<< chunk_clause */;\n--2020-08-22 23:04:58.204862-06: Chunk: 7, Rows: 102860, Range 20230415 <= col19 < 20240222\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 7, size: 102860) >>>*/ 20230415 <= col19 AND col19 < 20240222 /*<<< chunk_clause */;\n--2020-08-22 23:04:58.205211-06: Chunk: 8, Rows: 100266, Range 20240222 <= col19 < 20250401\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 8, size: 100266) >>>*/ 20240222 <= col19 AND col19 < 20250401 /*<<< chunk_clause */;\n--2020-08-22 23:04:58.207026-06: Chunk: 9, Rows: 100036, Range 20250401 <= col19 < 20320311\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 9, size: 100036) >>>*/ 20250401 <= col19 AND col19 < 20320311 /*<<< chunk_clause */;\n--2020-08-22 23:04:58.207984-06: Chunk: 10, Rows: 26862, Range 20320311 <= col19 < 20420307\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 10, size: 26862) >>>*/ 20320311 <= col19 AND col19 < 20420307 /*<<< chunk_clause */;\n--2020-08-22 23:04:58.208485-06: Chunk: 11, Rows: 0, col19 IS NULL\nINSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE col19 IS NULL;\n--2020-08-22 23:04:58.208789-06: Completed YYYYMMDD Integer Date Chunked DML\n--Total Rows : 1000000\n--IS NULL Rows : 0\n--Running total check: PASSED\n--Duration : 00:00:00.430099\n--Overhead duration : 00:00:00.430176\n--Total Chunks : 11\n--Min chunk size : 100000\n--Largest chunk size : 166582\n--Average chunk size : 90909\n-- Completed DML chunking.', stderr='', map_out=map_out)] |
# Please see __init__.py in this folder for documentation
stain_color_map = {
'hematoxylin': [0.65, 0.70, 0.29],
'eosin': [0.07, 0.99, 0.11],
'dab': [0.27, 0.57, 0.78],
'null': [0.0, 0.0, 0.0]
}
| stain_color_map = {'hematoxylin': [0.65, 0.7, 0.29], 'eosin': [0.07, 0.99, 0.11], 'dab': [0.27, 0.57, 0.78], 'null': [0.0, 0.0, 0.0]} |
class Zipper:
@staticmethod
def from_tree(tree):
return Zipper(dict(tree), [])
def __init__(self, tree, ancestors):
self.tree = tree
self.ancestors = ancestors
def value(self):
return self.tree['value']
def set_value(self, value):
self.tree['value'] = value
return self
def left(self):
if self.tree['left'] is None:
return None
return Zipper(self.tree['left'], self.ancestors + [self.tree])
def set_left(self, tree):
self.tree['left'] = tree
return self
def right(self):
if self.tree['right'] is None:
return None
return Zipper(self.tree['right'], self.ancestors + [self.tree])
def set_right(self, tree):
self.tree['right'] = tree
return self
def up(self):
return Zipper(self.ancestors[-1], self.ancestors[:-1])
def to_tree(self):
if any(self.ancestors):
return self.ancestors[0]
return self.tree
| class Zipper:
@staticmethod
def from_tree(tree):
return zipper(dict(tree), [])
def __init__(self, tree, ancestors):
self.tree = tree
self.ancestors = ancestors
def value(self):
return self.tree['value']
def set_value(self, value):
self.tree['value'] = value
return self
def left(self):
if self.tree['left'] is None:
return None
return zipper(self.tree['left'], self.ancestors + [self.tree])
def set_left(self, tree):
self.tree['left'] = tree
return self
def right(self):
if self.tree['right'] is None:
return None
return zipper(self.tree['right'], self.ancestors + [self.tree])
def set_right(self, tree):
self.tree['right'] = tree
return self
def up(self):
return zipper(self.ancestors[-1], self.ancestors[:-1])
def to_tree(self):
if any(self.ancestors):
return self.ancestors[0]
return self.tree |
def randomData(val):
data = []
random = [1, 4, 6, 10, 13]
for i in range(len(val)):
if i in random:
data.append(val[i])
return data
| def random_data(val):
data = []
random = [1, 4, 6, 10, 13]
for i in range(len(val)):
if i in random:
data.append(val[i])
return data |
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2010 Doug Hellmann. All rights reserved.
#
"""Telling doctest to ignore extra whitespace in test data.
"""
#end_pymotw_header
def my_function(a, b):
"""Returns a * b.
>>> my_function(['A', 'B'], 3) #doctest: +NORMALIZE_WHITESPACE
['A', 'B',
'A', 'B',
'A', 'B',]
This does not match because of the extra space after the [ in
the list.
>>> my_function(['A', 'B'], 2) #doctest: +NORMALIZE_WHITESPACE
[ 'A', 'B',
'A', 'B', ]
"""
return a * b
| """Telling doctest to ignore extra whitespace in test data.
"""
def my_function(a, b):
"""Returns a * b.
>>> my_function(['A', 'B'], 3) #doctest: +NORMALIZE_WHITESPACE
['A', 'B',
'A', 'B',
'A', 'B',]
This does not match because of the extra space after the [ in
the list.
>>> my_function(['A', 'B'], 2) #doctest: +NORMALIZE_WHITESPACE
[ 'A', 'B',
'A', 'B', ]
"""
return a * b |
"""Represents a contract.
This merges the Contract and ContractDetails classes from the Java API into a
single class.
"""
class Contract:
"""Represents a single contract.
Attributes not specified in the constructor:
local_symbol -- local exchange symbol
primary_exch -- listing exchange
min_tick -- minimum price tick
long_name -- long name of the contract
industry -- industry classification
category -- industry category
subcategory -- industry subcategory
...
"""
def __init__(self, sec_type='', symbol='', currency='', exchange=''):
"""Initialize a new instance of a Contract.
Keyword arguments:
sec_type -- security type ('STK', 'CASH', 'OPT', etc.)
symbol -- symbol of the underlying asset
currency -- currency
exchange -- order destination ('SMART', 'IDEALPRO', etc.)
"""
# Passed parameters
self.sec_type = sec_type
self.symbol = symbol
self.currency = currency
self.exchange = exchange
# Basic contract
self.local_symbol = ''
self.con_id = 0
self.expiry = ''
self.strike = 0
self.right = ''
self.multiplier = ''
self.primary_exch = ''
self.include_expired = False
self.sec_id_type = ''
self.sec_id = ''
# Combos
self.combo_legs_descrip = ''
self.combo_legs = []
# Delta neutral
self.under_comp = None
self.under_type = None
#
# Contract details
#
self.market_name = ''
self.trading_class = ''
self.min_tick = 0
self.price_magnifier = ''
self.order_types = ''
self.valid_exchanges = ''
self.under_con_id = 0
self.long_name = ''
self.contract_month = ''
self.industry = ''
self.category = ''
self.subcategory = ''
self.time_zone_id = ''
self.trading_hours = ''
self.liquid_hours = ''
# Bond values
self.cusip = ''
self.ratings = ''
self.desc_append = ''
self.bond_type = ''
self.coupon_type = ''
self.callable = False
self.putable = False
self.coupon = 0
self.convertible = False
self.maturity = ''
self.issue_date = ''
self.next_option_date = ''
self.next_option_type = ''
self.next_option_partial = False
self.notes = ''
def __lt__(self, other):
"""Return True if this object is strictly less than the specified
object; False, otherwise.
Keyword arguments:
other -- Contract to compare to this Contract
"""
return self.local_symbol < other.local_symbol
| """Represents a contract.
This merges the Contract and ContractDetails classes from the Java API into a
single class.
"""
class Contract:
"""Represents a single contract.
Attributes not specified in the constructor:
local_symbol -- local exchange symbol
primary_exch -- listing exchange
min_tick -- minimum price tick
long_name -- long name of the contract
industry -- industry classification
category -- industry category
subcategory -- industry subcategory
...
"""
def __init__(self, sec_type='', symbol='', currency='', exchange=''):
"""Initialize a new instance of a Contract.
Keyword arguments:
sec_type -- security type ('STK', 'CASH', 'OPT', etc.)
symbol -- symbol of the underlying asset
currency -- currency
exchange -- order destination ('SMART', 'IDEALPRO', etc.)
"""
self.sec_type = sec_type
self.symbol = symbol
self.currency = currency
self.exchange = exchange
self.local_symbol = ''
self.con_id = 0
self.expiry = ''
self.strike = 0
self.right = ''
self.multiplier = ''
self.primary_exch = ''
self.include_expired = False
self.sec_id_type = ''
self.sec_id = ''
self.combo_legs_descrip = ''
self.combo_legs = []
self.under_comp = None
self.under_type = None
self.market_name = ''
self.trading_class = ''
self.min_tick = 0
self.price_magnifier = ''
self.order_types = ''
self.valid_exchanges = ''
self.under_con_id = 0
self.long_name = ''
self.contract_month = ''
self.industry = ''
self.category = ''
self.subcategory = ''
self.time_zone_id = ''
self.trading_hours = ''
self.liquid_hours = ''
self.cusip = ''
self.ratings = ''
self.desc_append = ''
self.bond_type = ''
self.coupon_type = ''
self.callable = False
self.putable = False
self.coupon = 0
self.convertible = False
self.maturity = ''
self.issue_date = ''
self.next_option_date = ''
self.next_option_type = ''
self.next_option_partial = False
self.notes = ''
def __lt__(self, other):
"""Return True if this object is strictly less than the specified
object; False, otherwise.
Keyword arguments:
other -- Contract to compare to this Contract
"""
return self.local_symbol < other.local_symbol |
class Solution:
def findDuplicate(self, nums):
n = len(nums)
temp = [0] * n
for i in nums:
temp[i] += 1
if temp[i] > 1:
return i
b = Solution()
print(b.findDuplicate([1,3,4,2,2])) | class Solution:
def find_duplicate(self, nums):
n = len(nums)
temp = [0] * n
for i in nums:
temp[i] += 1
if temp[i] > 1:
return i
b = solution()
print(b.findDuplicate([1, 3, 4, 2, 2])) |
"""This module provides mechanisms to use signal handlers in Python.
Functions:
alarm() -- cause SIGALRM after a specified time [Unix only]
setitimer() -- cause a signal (described below) after a specified
float time and the timer may restart then [Unix only]
getitimer() -- get current value of timer [Unix only]
signal() -- set the action for a given signal
getsignal() -- get the signal action for a given signal
pause() -- wait until a signal arrives [Unix only]
default_int_handler() -- default SIGINT handler
signal constants:
SIG_DFL -- used to refer to the system default handler
SIG_IGN -- used to ignore the signal
NSIG -- number of defined signals
SIGINT, SIGTERM, etc. -- signal numbers
itimer constants:
ITIMER_REAL -- decrements in real time, and delivers SIGALRM upon
expiration
ITIMER_VIRTUAL -- decrements only when the process is executing,
and delivers SIGVTALRM upon expiration
ITIMER_PROF -- decrements both when the process is executing and
when the system is executing on behalf of the process.
Coupled with ITIMER_VIRTUAL, this timer is usually
used to profile the time spent by the application
in user and kernel space. SIGPROF is delivered upon
expiration.
*** IMPORTANT NOTICE ***
A signal handler function is called with two arguments:
the first is the signal number, the second is the interrupted stack frame."""
CTRL_BREAK_EVENT=1
CTRL_C_EVENT=0
NSIG=23
SIGABRT=22
SIGBREAK=21
SIGFPE=8
SIGILL=4
SIGINT=2
SIGSEGV=11
SIGTERM=15
SIG_DFL=0
SIG_IGN=1
def signal(signalnum, handler) :
pass
| """This module provides mechanisms to use signal handlers in Python.
Functions:
alarm() -- cause SIGALRM after a specified time [Unix only]
setitimer() -- cause a signal (described below) after a specified
float time and the timer may restart then [Unix only]
getitimer() -- get current value of timer [Unix only]
signal() -- set the action for a given signal
getsignal() -- get the signal action for a given signal
pause() -- wait until a signal arrives [Unix only]
default_int_handler() -- default SIGINT handler
signal constants:
SIG_DFL -- used to refer to the system default handler
SIG_IGN -- used to ignore the signal
NSIG -- number of defined signals
SIGINT, SIGTERM, etc. -- signal numbers
itimer constants:
ITIMER_REAL -- decrements in real time, and delivers SIGALRM upon
expiration
ITIMER_VIRTUAL -- decrements only when the process is executing,
and delivers SIGVTALRM upon expiration
ITIMER_PROF -- decrements both when the process is executing and
when the system is executing on behalf of the process.
Coupled with ITIMER_VIRTUAL, this timer is usually
used to profile the time spent by the application
in user and kernel space. SIGPROF is delivered upon
expiration.
*** IMPORTANT NOTICE ***
A signal handler function is called with two arguments:
the first is the signal number, the second is the interrupted stack frame."""
ctrl_break_event = 1
ctrl_c_event = 0
nsig = 23
sigabrt = 22
sigbreak = 21
sigfpe = 8
sigill = 4
sigint = 2
sigsegv = 11
sigterm = 15
sig_dfl = 0
sig_ign = 1
def signal(signalnum, handler):
pass |
"""
Find all possible combinations of k numbers that add up to a number n, given that only numbers from 1 to 9 can be used and each combination should be a unique set of numbers.
Note:
All numbers will be positive integers.
The solution set must not contain duplicate combinations.
Example 1:
Input: k = 3, n = 7
Output: [[1,2,4]]
Example 2:
Input: k = 3, n = 9
Output: [[1,2,6], [1,3,5], [2,3,4]]
"""
class Solution(object):
def combinationSum3(self, k, n):
"""
:type k: int
:type n: int
:rtype: List[List[int]]
"""
"""
Method: DFS
@params: nums, target, index, arr_current_res
"""
def dfs(nums, target, index, arr_current, res):
if target < 0:
return None
elif target == 0 and len(arr_current[1]) == k:
res.append(arr_current[1])
return None
else:
for i in range(index, len(nums)):
if i not in arr_current[0]:
arr_current_update = [arr_current[0] + [i], arr_current[1] + [nums[i]]]
dfs(nums, target - nums[i], i, arr_current_update, res)
res = []
target = n
candidates = range(1, 10)
dfs(candidates, target, 0, [[], []], res)
res_res = []
for ele in res:
if sorted(ele) not in res_res:
res_res.append(sorted(ele))
return res_res | """
Find all possible combinations of k numbers that add up to a number n, given that only numbers from 1 to 9 can be used and each combination should be a unique set of numbers.
Note:
All numbers will be positive integers.
The solution set must not contain duplicate combinations.
Example 1:
Input: k = 3, n = 7
Output: [[1,2,4]]
Example 2:
Input: k = 3, n = 9
Output: [[1,2,6], [1,3,5], [2,3,4]]
"""
class Solution(object):
def combination_sum3(self, k, n):
"""
:type k: int
:type n: int
:rtype: List[List[int]]
"""
'\n Method: DFS\n\n @params: nums, target, index, arr_current_res\n\n '
def dfs(nums, target, index, arr_current, res):
if target < 0:
return None
elif target == 0 and len(arr_current[1]) == k:
res.append(arr_current[1])
return None
else:
for i in range(index, len(nums)):
if i not in arr_current[0]:
arr_current_update = [arr_current[0] + [i], arr_current[1] + [nums[i]]]
dfs(nums, target - nums[i], i, arr_current_update, res)
res = []
target = n
candidates = range(1, 10)
dfs(candidates, target, 0, [[], []], res)
res_res = []
for ele in res:
if sorted(ele) not in res_res:
res_res.append(sorted(ele))
return res_res |
"""
Fenwick tree.
You are given an array of length 24, where each element represents the number of new subscribers during the corresponding hour.
Implement a data structure that efficiently supports the following:
update (hour, value): increment the element at index hour by value.
query(start, end) retrieve the numbner of subscribers that have signed up between start and end (inclusive).
You can assume that all values get cleared at the end of the day, and that you will not be asked for start and end values
that wrap around midnight.
"""
class Subscribers1():
"""
naive O(N) solution.
"""
def __init__(self, nums):
self.counter = {index : value for index, value in enumerate(nums)}
def update(self, hour, value):
self.counter[hour] += value
def query(self, start, end):
values = [self.counter[index] for index in range(start, end + 1)]
return sum( values )
class BIT:
def __init__(self, nums):
# Prepend a zero to our array to use lowest set bit trick.
self.tree = [0 for _ in range(len(nums) + 1)]
for i, num in enumerate(nums):
self.update(i + 1, num)
def update(self, index, value):
while index < len(self.tree):
self.tree[index] += value
index += index & -index
def query(self, index):
total = 0
while index > 0:
total += self.tree[index]
index -= index & -index
return total
class Subscribers2:
"""
O(log n) solution
"""
def __init__(self, nums):
self.bit = BIT(nums)
self.nums = nums
def update(self, hour, value):
self.bit.update(hour, value - self.nums[hour])
self.nums[hour] = value
def query(self, start, end):
# Shift start and end indices forward as our array is 1-based.
return self.bit.query(end + 1) - self.bit.query(start) | """
Fenwick tree.
You are given an array of length 24, where each element represents the number of new subscribers during the corresponding hour.
Implement a data structure that efficiently supports the following:
update (hour, value): increment the element at index hour by value.
query(start, end) retrieve the numbner of subscribers that have signed up between start and end (inclusive).
You can assume that all values get cleared at the end of the day, and that you will not be asked for start and end values
that wrap around midnight.
"""
class Subscribers1:
"""
naive O(N) solution.
"""
def __init__(self, nums):
self.counter = {index: value for (index, value) in enumerate(nums)}
def update(self, hour, value):
self.counter[hour] += value
def query(self, start, end):
values = [self.counter[index] for index in range(start, end + 1)]
return sum(values)
class Bit:
def __init__(self, nums):
self.tree = [0 for _ in range(len(nums) + 1)]
for (i, num) in enumerate(nums):
self.update(i + 1, num)
def update(self, index, value):
while index < len(self.tree):
self.tree[index] += value
index += index & -index
def query(self, index):
total = 0
while index > 0:
total += self.tree[index]
index -= index & -index
return total
class Subscribers2:
"""
O(log n) solution
"""
def __init__(self, nums):
self.bit = bit(nums)
self.nums = nums
def update(self, hour, value):
self.bit.update(hour, value - self.nums[hour])
self.nums[hour] = value
def query(self, start, end):
return self.bit.query(end + 1) - self.bit.query(start) |
class ConnectorDomainType(Enum,IComparable,IFormattable,IConvertible):
"""
Enumeration of connector domain types
enum ConnectorDomainType,values: CableTrayConduit (4),Electrical (2),Hvac (1),Piping (3),StructuralAnalytical (5),Undefined (0)
"""
def __eq__(self,*args):
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self,*args):
""" __format__(formattable: IFormattable,format: str) -> str """
pass
def __ge__(self,*args):
pass
def __gt__(self,*args):
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self,*args):
pass
def __lt__(self,*args):
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
CableTrayConduit=None
Electrical=None
Hvac=None
Piping=None
StructuralAnalytical=None
Undefined=None
value__=None
| class Connectordomaintype(Enum, IComparable, IFormattable, IConvertible):
"""
Enumeration of connector domain types
enum ConnectorDomainType,values: CableTrayConduit (4),Electrical (2),Hvac (1),Piping (3),StructuralAnalytical (5),Undefined (0)
"""
def __eq__(self, *args):
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args):
""" __format__(formattable: IFormattable,format: str) -> str """
pass
def __ge__(self, *args):
pass
def __gt__(self, *args):
pass
def __init__(self, *args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args):
pass
def __lt__(self, *args):
pass
def __ne__(self, *args):
pass
def __reduce_ex__(self, *args):
pass
def __str__(self, *args):
pass
cable_tray_conduit = None
electrical = None
hvac = None
piping = None
structural_analytical = None
undefined = None
value__ = None |
n,l,k=map(int,input().split())
oo=[]
for i in range(n):
x,y=map(int,input().split())
t=0
if l>=x: t=100
if l>=y: t+=40
if t>0: oo.append(t)
oo.sort(reverse=True)
print(sum(oo[:k])) | (n, l, k) = map(int, input().split())
oo = []
for i in range(n):
(x, y) = map(int, input().split())
t = 0
if l >= x:
t = 100
if l >= y:
t += 40
if t > 0:
oo.append(t)
oo.sort(reverse=True)
print(sum(oo[:k])) |
# Copyright (C) 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base model for keystone internal services
Unless marked otherwise, all fields are strings.
"""
class Model(dict):
"""Base model class."""
def __hash__(self):
return self['id'].__hash__()
@property
def known_keys(cls):
return cls.required_keys + cls.optional_keys
class Token(Model):
"""Token object.
Required keys:
id
expires (datetime)
Optional keys:
user
project
metadata
"""
required_keys = ('id', 'expires')
optional_keys = ('extra',)
class Service(Model):
"""Service object.
Required keys:
id
type
name
Optional keys:
"""
required_keys = ('id', 'type', 'name')
optional_keys = tuple()
class Endpoint(Model):
"""Endpoint object
Required keys:
id
region
service_id
Optional keys:
internalurl
publicurl
adminurl
"""
required_keys = ('id', 'region', 'service_id')
optional_keys = ('internalurl', 'publicurl', 'adminurl')
class User(Model):
"""User object.
Required keys:
id
name
domain_id
Optional keys:
password
description
email
enabled (bool, default True)
"""
required_keys = ('id', 'name', 'domain_id')
optional_keys = ('password', 'description', 'email', 'enabled')
class Group(Model):
"""Group object.
Required keys:
id
name
domain_id
Optional keys:
description
"""
required_keys = ('id', 'name', 'domain_id')
optional_keys = ('description',)
class Project(Model):
"""Project object.
Required keys:
id
name
domain_id
Optional Keys:
description
enabled (bool, default True)
"""
required_keys = ('id', 'name', 'domain_id')
optional_keys = ('description', 'enabled')
class Role(Model):
"""Role object.
Required keys:
id
name
"""
required_keys = ('id', 'name', 'domain_id')
optional_keys = ('description', 'extra')
class Domain(Model):
"""Domain object.
Required keys:
id
name
Optional keys:
description
enabled (bool, default True)
"""
required_keys = ('id', 'name')
optional_keys = ('description', 'enabled')
| """Base model for keystone internal services
Unless marked otherwise, all fields are strings.
"""
class Model(dict):
"""Base model class."""
def __hash__(self):
return self['id'].__hash__()
@property
def known_keys(cls):
return cls.required_keys + cls.optional_keys
class Token(Model):
"""Token object.
Required keys:
id
expires (datetime)
Optional keys:
user
project
metadata
"""
required_keys = ('id', 'expires')
optional_keys = ('extra',)
class Service(Model):
"""Service object.
Required keys:
id
type
name
Optional keys:
"""
required_keys = ('id', 'type', 'name')
optional_keys = tuple()
class Endpoint(Model):
"""Endpoint object
Required keys:
id
region
service_id
Optional keys:
internalurl
publicurl
adminurl
"""
required_keys = ('id', 'region', 'service_id')
optional_keys = ('internalurl', 'publicurl', 'adminurl')
class User(Model):
"""User object.
Required keys:
id
name
domain_id
Optional keys:
password
description
email
enabled (bool, default True)
"""
required_keys = ('id', 'name', 'domain_id')
optional_keys = ('password', 'description', 'email', 'enabled')
class Group(Model):
"""Group object.
Required keys:
id
name
domain_id
Optional keys:
description
"""
required_keys = ('id', 'name', 'domain_id')
optional_keys = ('description',)
class Project(Model):
"""Project object.
Required keys:
id
name
domain_id
Optional Keys:
description
enabled (bool, default True)
"""
required_keys = ('id', 'name', 'domain_id')
optional_keys = ('description', 'enabled')
class Role(Model):
"""Role object.
Required keys:
id
name
"""
required_keys = ('id', 'name', 'domain_id')
optional_keys = ('description', 'extra')
class Domain(Model):
"""Domain object.
Required keys:
id
name
Optional keys:
description
enabled (bool, default True)
"""
required_keys = ('id', 'name')
optional_keys = ('description', 'enabled') |
# encoding: utf-8
"""
@version: v1.0
@author: Richard
@license: Apache Licence
@contact: billions.richard@qq.com
@site:
@software: PyCharm
@time: 2019/10/4 17:07
"""
class Solution:
def rob(self, nums: list) -> int:
if not nums:
return 0
num_len = len(nums)
if num_len <= 2:
return max(nums)
dp = [nums[0], nums[1], nums[0] + nums[2]]
for i in range(3, num_len):
dp_i= max(dp[i-2], dp[i-3]) + nums[i]
dp.append(dp_i)
return max(dp[-1], dp[-2])
| """
@version: v1.0
@author: Richard
@license: Apache Licence
@contact: billions.richard@qq.com
@site:
@software: PyCharm
@time: 2019/10/4 17:07
"""
class Solution:
def rob(self, nums: list) -> int:
if not nums:
return 0
num_len = len(nums)
if num_len <= 2:
return max(nums)
dp = [nums[0], nums[1], nums[0] + nums[2]]
for i in range(3, num_len):
dp_i = max(dp[i - 2], dp[i - 3]) + nums[i]
dp.append(dp_i)
return max(dp[-1], dp[-2]) |
def findClosestValueInBst(tree, target):
currentNode = tree
closest = tree.value
while currentNode is not None:
if abs(target-currentNode.value) < abs(target-closest):
closest = currentNode.value
if target<currentNode.value:
currentNode = currentNode.left
elif target>currentNode.value:
currentNode = currentNode.right
else:
break
return closest
# This is the class of the input tree. Do not edit.
class BST:
def __init__(self, value):
self.value = value
self.left = None
self.right = None
| def find_closest_value_in_bst(tree, target):
current_node = tree
closest = tree.value
while currentNode is not None:
if abs(target - currentNode.value) < abs(target - closest):
closest = currentNode.value
if target < currentNode.value:
current_node = currentNode.left
elif target > currentNode.value:
current_node = currentNode.right
else:
break
return closest
class Bst:
def __init__(self, value):
self.value = value
self.left = None
self.right = None |
class CircularBufferAdaptor:
def __init__(self, l: list):
self._buffer = l
self._buffer_len = len(self._buffer)
def __getitem__(self, index: int):
return self._buffer[self._get_index(index)]
def _get_index(self, index: int):
return index % self._buffer_len
def from_offset(self, offset: int):
for i in range(self._buffer_len):
yield self[i + offset]
| class Circularbufferadaptor:
def __init__(self, l: list):
self._buffer = l
self._buffer_len = len(self._buffer)
def __getitem__(self, index: int):
return self._buffer[self._get_index(index)]
def _get_index(self, index: int):
return index % self._buffer_len
def from_offset(self, offset: int):
for i in range(self._buffer_len):
yield self[i + offset] |
_base_ = [
'../_base_/models/retinanet_r50_fpn_icdar2021.py',
'../_base_/datasets/icdar2021_detection.py',
'../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py'
]
# optimizer
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001)
# data = dict(
# samples_per_gpu=1,
# workers_per_gpu=2)
| _base_ = ['../_base_/models/retinanet_r50_fpn_icdar2021.py', '../_base_/datasets/icdar2021_detection.py', '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py']
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) |
FORMAT_VERSION = 131
NEW_FLOAT_EXT = 70 # [Float64:IEEE float]
BIT_BINARY_EXT = 77 # [UInt32:Len, UInt8:Bits, Len:Data]
SMALL_INTEGER_EXT = 97 # [UInt8:Int]
INTEGER_EXT = 98 # [Int32:Int]
FLOAT_EXT = 99 # [31:Float String] Float in string format (formatted "%.20e", sscanf "%lf"). Superseded by NEW_FLOAT_EXT
ATOM_EXT = 100 # 100 [UInt16:Len, Len:AtomName] max Len is 255
REFERENCE_EXT = 101 # 101 [atom:Node, UInt32:ID, UInt8:Creation]
PORT_EXT = 102 # [atom:Node, UInt32:ID, UInt8:Creation]
PID_EXT = 103 # [atom:Node, UInt32:ID, UInt32:Serial, UInt8:Creation]
SMALL_TUPLE_EXT = 104 # [UInt8:Arity, N:Elements]
LARGE_TUPLE_EXT = 105 # [UInt32:Arity, N:Elements]
NIL_EXT = 106 # empty list
STRING_EXT = 107 # [UInt32:Len, Len:Characters]
LIST_EXT = 108 # [UInt32:Len, Elements, Tail]
BINARY_EXT = 109 # [UInt32:Len, Len:Data]
SMALL_BIG_EXT = 110 # [UInt8:n, UInt8:Sign, n:nums]
LARGE_BIG_EXT = 111 # [UInt32:n, UInt8:Sign, n:nums]
NEW_FUN_EXT = 112 # [UInt32:Size, UInt8:Arity, 16*Uint6-MD5:Uniq, UInt32:Index, UInt32:NumFree, atom:Module, int:OldIndex, int:OldUniq, pid:Pid, NunFree*ext:FreeVars]
EXPORT_EXT = 113 # [atom:Module, atom:Function, smallint:Arity]
NEW_REFERENCE_EXT = 114 # [UInt16:Len, atom:Node, UInt8:Creation, Len*UInt32:ID]
SMALL_ATOM_EXT = 115 # [UInt8:Len, Len:AtomName]
FUN_EXT = 117 # [UInt4:NumFree, pid:Pid, atom:Module, int:Index, int:Uniq, NumFree*ext:FreeVars]
COMPRESSED = 80 # [UInt4:UncompressedSize, N:ZlibCompressedData]
MAP_EXT = 116 # [Uint64:Arity,N:K-V]
ATOM_UTF8_EXT = 118
SMALL_ATOM_UTF8_EXT = 119
| format_version = 131
new_float_ext = 70
bit_binary_ext = 77
small_integer_ext = 97
integer_ext = 98
float_ext = 99
atom_ext = 100
reference_ext = 101
port_ext = 102
pid_ext = 103
small_tuple_ext = 104
large_tuple_ext = 105
nil_ext = 106
string_ext = 107
list_ext = 108
binary_ext = 109
small_big_ext = 110
large_big_ext = 111
new_fun_ext = 112
export_ext = 113
new_reference_ext = 114
small_atom_ext = 115
fun_ext = 117
compressed = 80
map_ext = 116
atom_utf8_ext = 118
small_atom_utf8_ext = 119 |
# SPDX-License-Identifier: GPL-2.0
"""
Ask new choice values when they become visible.
If new choice values are added with new dependency, and they become
visible during user configuration, oldconfig should recognize them
as (NEW), and ask the user for choice.
Related Linux commit: 5d09598d488f081e3be23f885ed65cbbe2d073b5
"""
def test(conf):
assert conf.oldconfig('config', 'y') == 0
assert conf.stdout_contains('expected_stdout')
| """
Ask new choice values when they become visible.
If new choice values are added with new dependency, and they become
visible during user configuration, oldconfig should recognize them
as (NEW), and ask the user for choice.
Related Linux commit: 5d09598d488f081e3be23f885ed65cbbe2d073b5
"""
def test(conf):
assert conf.oldconfig('config', 'y') == 0
assert conf.stdout_contains('expected_stdout') |
DEFAULT_FORMAT = {
'users': {
'meta': {
'last_id': '0'
}
},
'rooms': {
'meta': {
'last_id': '0'
}
},
'messages': {
'meta': {
'last_id': '0'
}
}
}
| default_format = {'users': {'meta': {'last_id': '0'}}, 'rooms': {'meta': {'last_id': '0'}}, 'messages': {'meta': {'last_id': '0'}}} |
DEBUG = True
SERVER_NAME = 'localhost:8000'
SECRET_KEY = 'insecurekeyfordev'
# SQLAlchemy.
db_uri = 'postgresql://flaskwallet:walletpassword@postgres:5432/flaskwallet'
SQLALCHEMY_DATABASE_URI = db_uri
SQLALCHEMY_TRACK_MODIFICATIONS = False
| debug = True
server_name = 'localhost:8000'
secret_key = 'insecurekeyfordev'
db_uri = 'postgresql://flaskwallet:walletpassword@postgres:5432/flaskwallet'
sqlalchemy_database_uri = db_uri
sqlalchemy_track_modifications = False |
protocol_version = 1
last_refresh_time = 0
cert = None
data_path = None
config = None
proxy = None
session = None
socks5_server = None
last_api_error = ""
quota_list = {}
quota = 0
server_host = ""
server_port = 0
balance = 0
| protocol_version = 1
last_refresh_time = 0
cert = None
data_path = None
config = None
proxy = None
session = None
socks5_server = None
last_api_error = ''
quota_list = {}
quota = 0
server_host = ''
server_port = 0
balance = 0 |
"""
# Sample code to perform I/O:
name = input() # Reading input from STDIN
print('Hi, %s.' % name) # Writing output to STDOUT
# Warning: Printing unwanted or ill-formatted data to output will cause the test cases to fail
"""
# Write your code here
s = input().strip()
temp = s
if len(set(s)) > 1:
while s == temp[::-1]:
temp = temp[:-1]
print(len(temp))
else:
print(0)
| """
# Sample code to perform I/O:
name = input() # Reading input from STDIN
print('Hi, %s.' % name) # Writing output to STDOUT
# Warning: Printing unwanted or ill-formatted data to output will cause the test cases to fail
"""
s = input().strip()
temp = s
if len(set(s)) > 1:
while s == temp[::-1]:
temp = temp[:-1]
print(len(temp))
else:
print(0) |
def help_menu():
help_text = """speedfetch -- a program for fetching internet speeds and other miscellaneous information about your network
Usage:
speedfetch (no args required but are optional)
ARGS:
--hide-country/-hc: Hides the country when passed
--hide-isp/-hisp: Hides the ISP when passed
--version/-v: Shows the current version of the program
--help/-h: Shows this menu (for help)"""
print(help_text)
def version_menu():
text = """
[Version of speedfetch: 1.0]
https://github.com/polisflatt/speedfetch
polisflatt@gmail.com"""
print(text)
# Convert bits per second to mbps thru divison
def bps_to_mbps(bitspersecond):
return bitspersecond / 1000000
| def help_menu():
help_text = 'speedfetch -- a program for fetching internet speeds and other miscellaneous information about your network\n\nUsage:\n\nspeedfetch (no args required but are optional)\n\nARGS:\n--hide-country/-hc: Hides the country when passed\n--hide-isp/-hisp: Hides the ISP when passed\n--version/-v: Shows the current version of the program\n--help/-h: Shows this menu (for help)'
print(help_text)
def version_menu():
text = '\n[Version of speedfetch: 1.0]\n\nhttps://github.com/polisflatt/speedfetch\npolisflatt@gmail.com'
print(text)
def bps_to_mbps(bitspersecond):
return bitspersecond / 1000000 |
def numberOfPaths(arr,row,col,cost):
if cost < 0:
return 0
elif row == 0 and col == 0:
if arr[row][col] - cost == 0:
return 1
else:
return 0
elif row == 0:
return numberOfPaths(arr,0,col-1,cost-arr[row][col])
elif col == 0:
return numberOfPaths(arr,row-1,0,cost-arr[row][col])
else:
op1 = numberOfPaths(arr,row-1,col,cost-arr[row][col])
op2 = numberOfPaths(arr,row,col-1,cost-arr[row][col])
return op1+op2
| def number_of_paths(arr, row, col, cost):
if cost < 0:
return 0
elif row == 0 and col == 0:
if arr[row][col] - cost == 0:
return 1
else:
return 0
elif row == 0:
return number_of_paths(arr, 0, col - 1, cost - arr[row][col])
elif col == 0:
return number_of_paths(arr, row - 1, 0, cost - arr[row][col])
else:
op1 = number_of_paths(arr, row - 1, col, cost - arr[row][col])
op2 = number_of_paths(arr, row, col - 1, cost - arr[row][col])
return op1 + op2 |
# _*_coding:utf-8_*_
class Solution:
def StrToInt(self, s):
if None is s or len(s) <= 0:
return False
min_value = ord('0')
max_value = ord('9')
plus = ord('+')
sub = ord('-')
index = 1
result = 0
sign = 1
for item in s[::-1]:
temp = ord(item)
if min_value <= temp <= max_value:
result += index * (temp - min_value)
index *= 10
elif plus == temp:
pass
elif sub == temp:
sign = -1
else:
return False
return result * sign
s = Solution()
print(s.StrToInt('1234'))
print(s.StrToInt('+1234'))
print(s.StrToInt('-1234'))
print(s.StrToInt('a1234'))
print(s.StrToInt('1234b'))
print(s.StrToInt('abcdb')) | class Solution:
def str_to_int(self, s):
if None is s or len(s) <= 0:
return False
min_value = ord('0')
max_value = ord('9')
plus = ord('+')
sub = ord('-')
index = 1
result = 0
sign = 1
for item in s[::-1]:
temp = ord(item)
if min_value <= temp <= max_value:
result += index * (temp - min_value)
index *= 10
elif plus == temp:
pass
elif sub == temp:
sign = -1
else:
return False
return result * sign
s = solution()
print(s.StrToInt('1234'))
print(s.StrToInt('+1234'))
print(s.StrToInt('-1234'))
print(s.StrToInt('a1234'))
print(s.StrToInt('1234b'))
print(s.StrToInt('abcdb')) |
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
_DEFAULT_GERRIT_HOST = 'chromium-review.googlesource.com'
def IsCodeReviewGerrit(review_server_host, code_review_settings=None):
settings = code_review_settings or {}
return review_server_host in settings.get('gerrit_hosts',
[_DEFAULT_GERRIT_HOST])
| _default_gerrit_host = 'chromium-review.googlesource.com'
def is_code_review_gerrit(review_server_host, code_review_settings=None):
settings = code_review_settings or {}
return review_server_host in settings.get('gerrit_hosts', [_DEFAULT_GERRIT_HOST]) |
# CVDF mirror of http://yann.lecun.com/exdb/mnist/
SOURCE_URL = 'http://yann.lecun.com/exdb/mnist/'
# SOURCE_URL = 'https://storage.googleapis.com/cvdf-datasets/mnist/'
WORK_DIRECTORY = 'data'
IMAGE_SIZE = 28
NUM_CHANNELS = 1
PIXEL_DEPTH = 255
NUM_LABELS = 10
VALIDATION_SIZE = 5000 # Size of the validation set.
SEED = 66478 # Set to None for random seed.
BATCH_SIZE = 64
NUM_EPOCHS = 10
EVAL_BATCH_SIZE = 64
EVAL_FREQUENCY = 100 # Number of steps between evaluations.
| source_url = 'http://yann.lecun.com/exdb/mnist/'
work_directory = 'data'
image_size = 28
num_channels = 1
pixel_depth = 255
num_labels = 10
validation_size = 5000
seed = 66478
batch_size = 64
num_epochs = 10
eval_batch_size = 64
eval_frequency = 100 |
class AudioPlayer:
def __init__(self, player, player_state):
self._player = player
self._player_state = player_state
@staticmethod
def create(g_streamer):
g_streamer.init(None)
player = g_streamer.ElementFactory.make("playbin", "player")
return AudioPlayer(player, g_streamer.State)
@property
def is_playing(self):
current_state = self._player.current_state
return current_state is self._player_state.PLAYING
def set_uri(self, uri):
self._player.set_property('uri', uri)
def play(self):
self._player.set_state(self._player_state.PLAYING)
def stop(self):
self._player.set_state(self._player_state.NULL)
| class Audioplayer:
def __init__(self, player, player_state):
self._player = player
self._player_state = player_state
@staticmethod
def create(g_streamer):
g_streamer.init(None)
player = g_streamer.ElementFactory.make('playbin', 'player')
return audio_player(player, g_streamer.State)
@property
def is_playing(self):
current_state = self._player.current_state
return current_state is self._player_state.PLAYING
def set_uri(self, uri):
self._player.set_property('uri', uri)
def play(self):
self._player.set_state(self._player_state.PLAYING)
def stop(self):
self._player.set_state(self._player_state.NULL) |
'''
import datetime
Plugins can import anything they want of python for their use.
from base_plugin import *
This will get you all of the "standard" framework stuff you need. Comes with
Plugin (the class you inherit from to build plugins for)
as well as all the utilities and filters in PluginUtils.py.
NOTE: you also get a bunch of other convenience functions, like write_to_file,
put_object_to_file, etc. Look around in PluginUtils.py, basically, and at the
other example plugins for how to use these.
LastSeenPlugin.py is basically an example of how you can do almost everything
just as raw python.
LogPlugin.py is an example of how to use the API for almost everything.
You can use as much or as little as you like.
class TestPlugin(Plugin): NOTE: All class plugins must inherit from Plugin.
def initialize(self):
Define the initialize method as so if needed to perform
stateful initialization of the plugin. This happens before it is ever run.
print "Initializing test plugin."
self.add_trigger(on_message)
The add_trigger function is the first utility of note. provided with
a filter, (which can be defined custom if needed, see PluginUtils.py)
it ensures that your plugin will run whenever that type of message occurs.
You can apply as many filters as you'd like. Any one succeeding allows the message through.
NOTE: you can use the filters as functions anywhere else in your plugin as well.
self.add_command("!test", self.test)
The add_command utility is another builtin which allows you to make "commands",
functions triggered by a command string sent via private message to the bot,
which directly call a specified function. whitespace seperated items following
the initial command are interpreted as arguments to the function.
def run(self, message):
The primary method of the plugin, called whenever the plugin triggers as defined
by the filters added in initialization. The message argument is the message
which triggered this run of the plugin. See below for the commonly used parts of the message.
print "Running test plugin on: "
print message Some common fields:
print "to:", message.To -to: the target of the message (you, the bot.)
print "room:", message.Room -room: the group chat room, if this message is a group chat message.
print "from:", message.From -from: the entity sending the message. This has useful subcomponents.
print "type:", message.Type -type: the type of the message. (chat, groupchat, available, unavailable, etc.)
print "body:", message.Body -body: the body of the message.
print "________________________"
def test(self, message, *args):
An example of a command-triggered function. The Message argument is the message which
triggered the command. For demonstration, this function also can take a variable number
of additional args which it will print to the command line, before in any case replying
"Success." to the sender.
for arg in args: if the *args stuff is confusing you, look up python varargs;
print "got:", arg it's just for show of the private message tokens to function argument translation here.
self.send_message(message.From, "Success")
This is how you reply to a message. Pretty straightforward.
@Command
def AnotherTest(message):
Some functionality can also be defined outside of classes, for small standalones.
This would be called via '!AnotherTest'
send_message(message.From, "Echo")
NOTE:
To see further examples, look at the other files in the ./plugins directory. To see full functionality, help(BasePlugin) and help(PluginUtils) may be of some assistance.
'''
| """
import datetime
Plugins can import anything they want of python for their use.
from base_plugin import *
This will get you all of the "standard" framework stuff you need. Comes with
Plugin (the class you inherit from to build plugins for)
as well as all the utilities and filters in PluginUtils.py.
NOTE: you also get a bunch of other convenience functions, like write_to_file,
put_object_to_file, etc. Look around in PluginUtils.py, basically, and at the
other example plugins for how to use these.
LastSeenPlugin.py is basically an example of how you can do almost everything
just as raw python.
LogPlugin.py is an example of how to use the API for almost everything.
You can use as much or as little as you like.
class TestPlugin(Plugin): NOTE: All class plugins must inherit from Plugin.
def initialize(self):
Define the initialize method as so if needed to perform
stateful initialization of the plugin. This happens before it is ever run.
print "Initializing test plugin."
self.add_trigger(on_message)
The add_trigger function is the first utility of note. provided with
a filter, (which can be defined custom if needed, see PluginUtils.py)
it ensures that your plugin will run whenever that type of message occurs.
You can apply as many filters as you'd like. Any one succeeding allows the message through.
NOTE: you can use the filters as functions anywhere else in your plugin as well.
self.add_command("!test", self.test)
The add_command utility is another builtin which allows you to make "commands",
functions triggered by a command string sent via private message to the bot,
which directly call a specified function. whitespace seperated items following
the initial command are interpreted as arguments to the function.
def run(self, message):
The primary method of the plugin, called whenever the plugin triggers as defined
by the filters added in initialization. The message argument is the message
which triggered this run of the plugin. See below for the commonly used parts of the message.
print "Running test plugin on: "
print message Some common fields:
print "to:", message.To -to: the target of the message (you, the bot.)
print "room:", message.Room -room: the group chat room, if this message is a group chat message.
print "from:", message.From -from: the entity sending the message. This has useful subcomponents.
print "type:", message.Type -type: the type of the message. (chat, groupchat, available, unavailable, etc.)
print "body:", message.Body -body: the body of the message.
print "________________________"
def test(self, message, *args):
An example of a command-triggered function. The Message argument is the message which
triggered the command. For demonstration, this function also can take a variable number
of additional args which it will print to the command line, before in any case replying
"Success." to the sender.
for arg in args: if the *args stuff is confusing you, look up python varargs;
print "got:", arg it's just for show of the private message tokens to function argument translation here.
self.send_message(message.From, "Success")
This is how you reply to a message. Pretty straightforward.
@Command
def AnotherTest(message):
Some functionality can also be defined outside of classes, for small standalones.
This would be called via '!AnotherTest'
send_message(message.From, "Echo")
NOTE:
To see further examples, look at the other files in the ./plugins directory. To see full functionality, help(BasePlugin) and help(PluginUtils) may be of some assistance.
""" |
VERSION = (0, 5, 0)
__version__ = '.'.join(map(str, VERSION))
ALL = (None,) # Sentinel value for all HTTP methods.
UNSAFE = ['DELETE', 'PATCH', 'POST', 'PUT']
| version = (0, 5, 0)
__version__ = '.'.join(map(str, VERSION))
all = (None,)
unsafe = ['DELETE', 'PATCH', 'POST', 'PUT'] |
#
# PySNMP MIB module CISCO-WAN-BBIF-ATM-CONN-STAT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-WAN-BBIF-ATM-CONN-STAT-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:20:12 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsUnion, SingleValueConstraint, ConstraintsIntersection, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsUnion", "SingleValueConstraint", "ConstraintsIntersection", "ValueRangeConstraint")
bbChanCntGrp, = mibBuilder.importSymbols("BASIS-MIB", "bbChanCntGrp")
ciscoWan, = mibBuilder.importSymbols("CISCOWAN-SMI", "ciscoWan")
ObjectGroup, NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "NotificationGroup", "ModuleCompliance")
Integer32, ModuleIdentity, Gauge32, TimeTicks, NotificationType, MibScalar, MibTable, MibTableRow, MibTableColumn, Unsigned32, Counter32, IpAddress, iso, Counter64, ObjectIdentity, MibIdentifier, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "ModuleIdentity", "Gauge32", "TimeTicks", "NotificationType", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Unsigned32", "Counter32", "IpAddress", "iso", "Counter64", "ObjectIdentity", "MibIdentifier", "Bits")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
ciscoWanBbifAtmConnStatMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 351, 150, 36))
ciscoWanBbifAtmConnStatMIB.setRevisions(('2002-10-18 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: ciscoWanBbifAtmConnStatMIB.setRevisionsDescriptions(('Initial version of the MIB. The content of this MIB was originally available in CISCO-WAN-AXIPOP-MIB defined using SMIv1. The applicable objects from CISCO-WAN-AXIPOP-MIB are defined using SMIv2 in this MIB. Also the descriptions of some of the objects have been modified.',))
if mibBuilder.loadTexts: ciscoWanBbifAtmConnStatMIB.setLastUpdated('200210180000Z')
if mibBuilder.loadTexts: ciscoWanBbifAtmConnStatMIB.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts: ciscoWanBbifAtmConnStatMIB.setContactInfo(' Cisco Systems Customer Service Postal: 170 W Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS E-mail: cs-wanatm@cisco.com')
if mibBuilder.loadTexts: ciscoWanBbifAtmConnStatMIB.setDescription('This MIB module contains ATM Connection related real time statistical counter objects. The ATM connections are applicable to PXM1 service module. in MGX82xx products. Logical Channel refers to ATM Connection(or connection end point). Terminologies used: broadband interface - Logical port on PXM1. SCR - Sustained Cell Rate BT - Burst Tolerance PCR - Peak Cell Rate CDVT - Cell Delay Transfer Variance Tolerance GCRA - The Generic Cell Rate Algorithm(GCRA) is used to define comformance with repect to the traffic contract. For each cell arrival, the GCRA determines whether the cell conforms to the traffic contract of the connection. GCRA1 - GCRA Bucket 1. For CBR and UBR connections, only GCRA-1 is needed to check for PCR, CDVT Conformance. GCRA2 - GCRA Bucket 2. For VBR and ABR Connections, GCRA-1 is needed to check for PCR, CDVT conformance and GCRA-2 for SCR, BT Conformance. EFCI - Explicit Forward Congestion Indication. QE - Queue Engine(QE). This ASIC Provides the traffic management functions related to VC queues, QoS queues and interface queues. This management function is performed for both directions of traffic(ingress, egress). ingress - ingress traffic is defined as data flowing toward the switch fabric. Ingress data can come from either the Service Modules through the backplane or the PXM1 Uplink back card. egress - Egress traffic is defined as data flowing away from the switch fabric.')
bbChanCntGrpTable = MibTable((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1), )
if mibBuilder.loadTexts: bbChanCntGrpTable.setStatus('current')
if mibBuilder.loadTexts: bbChanCntGrpTable.setDescription('This is the broadband channel statistics table.')
bbChanCntGrpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1), ).setIndexNames((0, "CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanCntNum"))
if mibBuilder.loadTexts: bbChanCntGrpEntry.setStatus('current')
if mibBuilder.loadTexts: bbChanCntGrpEntry.setDescription('An entry for statistics on logical channel. Each entry contains statistics for an ATM connection on broadband interface.')
bbChanCntNum = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 4111))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bbChanCntNum.setReference('bbChanCnfNum is defined in CISCO-WAN-BBIF-ATM-CONN-MIB.')
if mibBuilder.loadTexts: bbChanCntNum.setStatus('current')
if mibBuilder.loadTexts: bbChanCntNum.setDescription("This object identifies the logical channel number. The value for this object must be same as the value of 'bbChanCnfNum' from bbChanCnfGrpTable.")
bbChanRcvClp0Cells = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bbChanRcvClp0Cells.setStatus('current')
if mibBuilder.loadTexts: bbChanRcvClp0Cells.setDescription('CLP_0 Cells Received from broadband interface.')
bbChanRcvClp1Cells = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bbChanRcvClp1Cells.setStatus('current')
if mibBuilder.loadTexts: bbChanRcvClp1Cells.setDescription('CLP_1 Cells Received from broadband interface.')
bbChanNonConformCellsAtGcra1Policer = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bbChanNonConformCellsAtGcra1Policer.setStatus('current')
if mibBuilder.loadTexts: bbChanNonConformCellsAtGcra1Policer.setDescription('This object provides a count of the nonconforming cells that are received at the first policing point(GCRA1 policer).')
bbChanNonConformCellsAtGcra2Policer = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bbChanNonConformCellsAtGcra2Policer.setStatus('current')
if mibBuilder.loadTexts: bbChanNonConformCellsAtGcra2Policer.setDescription('This object provides a count of the nonconforming cells that are received at the second policing point(GCRA2 policer).')
bbChanRcvEOFCells = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bbChanRcvEOFCells.setStatus('current')
if mibBuilder.loadTexts: bbChanRcvEOFCells.setDescription('This object provides count of End Of Frame(EOF) cells that are received.')
bbChanDscdClp0Cells = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bbChanDscdClp0Cells.setStatus('current')
if mibBuilder.loadTexts: bbChanDscdClp0Cells.setDescription('This object provides a count of the CLP equal to 0 that are received and then discarded.')
bbChanDscdClp1Cells = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bbChanDscdClp1Cells.setStatus('current')
if mibBuilder.loadTexts: bbChanDscdClp1Cells.setDescription('This object provides a count of the CLP equal to 1 that are received and then discarded.')
bbChanRcvCellsSent = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bbChanRcvCellsSent.setStatus('current')
if mibBuilder.loadTexts: bbChanRcvCellsSent.setDescription('Number of cells received from broadband interface and sent out of the Queue Engine(QE device).')
bbChanXmtClp0Cells = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bbChanXmtClp0Cells.setStatus('current')
if mibBuilder.loadTexts: bbChanXmtClp0Cells.setDescription('This object provides count of Number of cells with EFCI=0 that are transmitted.')
bbChanXmtClp1Cells = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bbChanXmtClp1Cells.setStatus('current')
if mibBuilder.loadTexts: bbChanXmtClp1Cells.setDescription('This object provides count of Number of cells with EFCI set that are transmitted.')
bbChanDscdClpZeroCellsToPort = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bbChanDscdClpZeroCellsToPort.setStatus('current')
if mibBuilder.loadTexts: bbChanDscdClpZeroCellsToPort.setDescription('Number of CLP = 0 cells discarded before transmitted to broadband interface.')
bbChanDscdClpOneCellsToPort = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bbChanDscdClpOneCellsToPort.setStatus('current')
if mibBuilder.loadTexts: bbChanDscdClpOneCellsToPort.setDescription('Number of CLP = 1 cells discarded before transmitted to broadband interface.')
bbChanCntClrButton = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("noAction", 1), ("resetCounters", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bbChanCntClrButton.setStatus('current')
if mibBuilder.loadTexts: bbChanCntClrButton.setDescription('Writing a value of 2 resets all the counters.')
cwbAtmConnStatMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 351, 150, 36, 2))
cwbAtmConnStatMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 351, 150, 36, 2, 1))
cwbAtmConnStatMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 351, 150, 36, 2, 2))
cwbAtmConnStatCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 351, 150, 36, 2, 2, 1)).setObjects(("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "cwbAtmConnStatsGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cwbAtmConnStatCompliance = cwbAtmConnStatCompliance.setStatus('current')
if mibBuilder.loadTexts: cwbAtmConnStatCompliance.setDescription('The compliance statement for broadband interface Connection Statstics MIB.')
cwbAtmConnStatsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 351, 150, 36, 2, 1, 1)).setObjects(("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanCntNum"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanRcvClp0Cells"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanRcvClp1Cells"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanNonConformCellsAtGcra1Policer"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanNonConformCellsAtGcra2Policer"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanRcvEOFCells"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanDscdClp0Cells"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanDscdClp1Cells"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanRcvCellsSent"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanXmtClp0Cells"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanXmtClp1Cells"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanDscdClpZeroCellsToPort"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanDscdClpOneCellsToPort"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanCntClrButton"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cwbAtmConnStatsGroup = cwbAtmConnStatsGroup.setStatus('current')
if mibBuilder.loadTexts: cwbAtmConnStatsGroup.setDescription('The objects related to broadband interface connection statistics.')
mibBuilder.exportSymbols("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", bbChanRcvCellsSent=bbChanRcvCellsSent, cwbAtmConnStatMIBCompliances=cwbAtmConnStatMIBCompliances, bbChanCntGrpEntry=bbChanCntGrpEntry, bbChanDscdClp1Cells=bbChanDscdClp1Cells, bbChanDscdClpOneCellsToPort=bbChanDscdClpOneCellsToPort, bbChanRcvEOFCells=bbChanRcvEOFCells, bbChanRcvClp1Cells=bbChanRcvClp1Cells, bbChanDscdClp0Cells=bbChanDscdClp0Cells, bbChanCntGrpTable=bbChanCntGrpTable, bbChanNonConformCellsAtGcra2Policer=bbChanNonConformCellsAtGcra2Policer, cwbAtmConnStatsGroup=cwbAtmConnStatsGroup, bbChanXmtClp1Cells=bbChanXmtClp1Cells, bbChanNonConformCellsAtGcra1Policer=bbChanNonConformCellsAtGcra1Policer, bbChanCntClrButton=bbChanCntClrButton, cwbAtmConnStatMIBGroups=cwbAtmConnStatMIBGroups, PYSNMP_MODULE_ID=ciscoWanBbifAtmConnStatMIB, cwbAtmConnStatMIBConformance=cwbAtmConnStatMIBConformance, bbChanXmtClp0Cells=bbChanXmtClp0Cells, ciscoWanBbifAtmConnStatMIB=ciscoWanBbifAtmConnStatMIB, bbChanRcvClp0Cells=bbChanRcvClp0Cells, cwbAtmConnStatCompliance=cwbAtmConnStatCompliance, bbChanDscdClpZeroCellsToPort=bbChanDscdClpZeroCellsToPort, bbChanCntNum=bbChanCntNum)
| (object_identifier, integer, octet_string) = mibBuilder.importSymbols('ASN1', 'ObjectIdentifier', 'Integer', 'OctetString')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(value_size_constraint, constraints_union, single_value_constraint, constraints_intersection, value_range_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ValueSizeConstraint', 'ConstraintsUnion', 'SingleValueConstraint', 'ConstraintsIntersection', 'ValueRangeConstraint')
(bb_chan_cnt_grp,) = mibBuilder.importSymbols('BASIS-MIB', 'bbChanCntGrp')
(cisco_wan,) = mibBuilder.importSymbols('CISCOWAN-SMI', 'ciscoWan')
(object_group, notification_group, module_compliance) = mibBuilder.importSymbols('SNMPv2-CONF', 'ObjectGroup', 'NotificationGroup', 'ModuleCompliance')
(integer32, module_identity, gauge32, time_ticks, notification_type, mib_scalar, mib_table, mib_table_row, mib_table_column, unsigned32, counter32, ip_address, iso, counter64, object_identity, mib_identifier, bits) = mibBuilder.importSymbols('SNMPv2-SMI', 'Integer32', 'ModuleIdentity', 'Gauge32', 'TimeTicks', 'NotificationType', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'Unsigned32', 'Counter32', 'IpAddress', 'iso', 'Counter64', 'ObjectIdentity', 'MibIdentifier', 'Bits')
(display_string, textual_convention) = mibBuilder.importSymbols('SNMPv2-TC', 'DisplayString', 'TextualConvention')
cisco_wan_bbif_atm_conn_stat_mib = module_identity((1, 3, 6, 1, 4, 1, 351, 150, 36))
ciscoWanBbifAtmConnStatMIB.setRevisions(('2002-10-18 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts:
ciscoWanBbifAtmConnStatMIB.setRevisionsDescriptions(('Initial version of the MIB. The content of this MIB was originally available in CISCO-WAN-AXIPOP-MIB defined using SMIv1. The applicable objects from CISCO-WAN-AXIPOP-MIB are defined using SMIv2 in this MIB. Also the descriptions of some of the objects have been modified.',))
if mibBuilder.loadTexts:
ciscoWanBbifAtmConnStatMIB.setLastUpdated('200210180000Z')
if mibBuilder.loadTexts:
ciscoWanBbifAtmConnStatMIB.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts:
ciscoWanBbifAtmConnStatMIB.setContactInfo(' Cisco Systems Customer Service Postal: 170 W Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS E-mail: cs-wanatm@cisco.com')
if mibBuilder.loadTexts:
ciscoWanBbifAtmConnStatMIB.setDescription('This MIB module contains ATM Connection related real time statistical counter objects. The ATM connections are applicable to PXM1 service module. in MGX82xx products. Logical Channel refers to ATM Connection(or connection end point). Terminologies used: broadband interface - Logical port on PXM1. SCR - Sustained Cell Rate BT - Burst Tolerance PCR - Peak Cell Rate CDVT - Cell Delay Transfer Variance Tolerance GCRA - The Generic Cell Rate Algorithm(GCRA) is used to define comformance with repect to the traffic contract. For each cell arrival, the GCRA determines whether the cell conforms to the traffic contract of the connection. GCRA1 - GCRA Bucket 1. For CBR and UBR connections, only GCRA-1 is needed to check for PCR, CDVT Conformance. GCRA2 - GCRA Bucket 2. For VBR and ABR Connections, GCRA-1 is needed to check for PCR, CDVT conformance and GCRA-2 for SCR, BT Conformance. EFCI - Explicit Forward Congestion Indication. QE - Queue Engine(QE). This ASIC Provides the traffic management functions related to VC queues, QoS queues and interface queues. This management function is performed for both directions of traffic(ingress, egress). ingress - ingress traffic is defined as data flowing toward the switch fabric. Ingress data can come from either the Service Modules through the backplane or the PXM1 Uplink back card. egress - Egress traffic is defined as data flowing away from the switch fabric.')
bb_chan_cnt_grp_table = mib_table((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1))
if mibBuilder.loadTexts:
bbChanCntGrpTable.setStatus('current')
if mibBuilder.loadTexts:
bbChanCntGrpTable.setDescription('This is the broadband channel statistics table.')
bb_chan_cnt_grp_entry = mib_table_row((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1)).setIndexNames((0, 'CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanCntNum'))
if mibBuilder.loadTexts:
bbChanCntGrpEntry.setStatus('current')
if mibBuilder.loadTexts:
bbChanCntGrpEntry.setDescription('An entry for statistics on logical channel. Each entry contains statistics for an ATM connection on broadband interface.')
bb_chan_cnt_num = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(16, 4111))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bbChanCntNum.setReference('bbChanCnfNum is defined in CISCO-WAN-BBIF-ATM-CONN-MIB.')
if mibBuilder.loadTexts:
bbChanCntNum.setStatus('current')
if mibBuilder.loadTexts:
bbChanCntNum.setDescription("This object identifies the logical channel number. The value for this object must be same as the value of 'bbChanCnfNum' from bbChanCnfGrpTable.")
bb_chan_rcv_clp0_cells = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 2), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bbChanRcvClp0Cells.setStatus('current')
if mibBuilder.loadTexts:
bbChanRcvClp0Cells.setDescription('CLP_0 Cells Received from broadband interface.')
bb_chan_rcv_clp1_cells = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 3), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bbChanRcvClp1Cells.setStatus('current')
if mibBuilder.loadTexts:
bbChanRcvClp1Cells.setDescription('CLP_1 Cells Received from broadband interface.')
bb_chan_non_conform_cells_at_gcra1_policer = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 4), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bbChanNonConformCellsAtGcra1Policer.setStatus('current')
if mibBuilder.loadTexts:
bbChanNonConformCellsAtGcra1Policer.setDescription('This object provides a count of the nonconforming cells that are received at the first policing point(GCRA1 policer).')
bb_chan_non_conform_cells_at_gcra2_policer = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 5), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bbChanNonConformCellsAtGcra2Policer.setStatus('current')
if mibBuilder.loadTexts:
bbChanNonConformCellsAtGcra2Policer.setDescription('This object provides a count of the nonconforming cells that are received at the second policing point(GCRA2 policer).')
bb_chan_rcv_eof_cells = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 6), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bbChanRcvEOFCells.setStatus('current')
if mibBuilder.loadTexts:
bbChanRcvEOFCells.setDescription('This object provides count of End Of Frame(EOF) cells that are received.')
bb_chan_dscd_clp0_cells = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 7), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bbChanDscdClp0Cells.setStatus('current')
if mibBuilder.loadTexts:
bbChanDscdClp0Cells.setDescription('This object provides a count of the CLP equal to 0 that are received and then discarded.')
bb_chan_dscd_clp1_cells = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 8), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bbChanDscdClp1Cells.setStatus('current')
if mibBuilder.loadTexts:
bbChanDscdClp1Cells.setDescription('This object provides a count of the CLP equal to 1 that are received and then discarded.')
bb_chan_rcv_cells_sent = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 9), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bbChanRcvCellsSent.setStatus('current')
if mibBuilder.loadTexts:
bbChanRcvCellsSent.setDescription('Number of cells received from broadband interface and sent out of the Queue Engine(QE device).')
bb_chan_xmt_clp0_cells = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 10), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bbChanXmtClp0Cells.setStatus('current')
if mibBuilder.loadTexts:
bbChanXmtClp0Cells.setDescription('This object provides count of Number of cells with EFCI=0 that are transmitted.')
bb_chan_xmt_clp1_cells = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 11), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bbChanXmtClp1Cells.setStatus('current')
if mibBuilder.loadTexts:
bbChanXmtClp1Cells.setDescription('This object provides count of Number of cells with EFCI set that are transmitted.')
bb_chan_dscd_clp_zero_cells_to_port = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 12), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bbChanDscdClpZeroCellsToPort.setStatus('current')
if mibBuilder.loadTexts:
bbChanDscdClpZeroCellsToPort.setDescription('Number of CLP = 0 cells discarded before transmitted to broadband interface.')
bb_chan_dscd_clp_one_cells_to_port = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 13), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bbChanDscdClpOneCellsToPort.setStatus('current')
if mibBuilder.loadTexts:
bbChanDscdClpOneCellsToPort.setDescription('Number of CLP = 1 cells discarded before transmitted to broadband interface.')
bb_chan_cnt_clr_button = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 14), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('noAction', 1), ('resetCounters', 2)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
bbChanCntClrButton.setStatus('current')
if mibBuilder.loadTexts:
bbChanCntClrButton.setDescription('Writing a value of 2 resets all the counters.')
cwb_atm_conn_stat_mib_conformance = mib_identifier((1, 3, 6, 1, 4, 1, 351, 150, 36, 2))
cwb_atm_conn_stat_mib_groups = mib_identifier((1, 3, 6, 1, 4, 1, 351, 150, 36, 2, 1))
cwb_atm_conn_stat_mib_compliances = mib_identifier((1, 3, 6, 1, 4, 1, 351, 150, 36, 2, 2))
cwb_atm_conn_stat_compliance = module_compliance((1, 3, 6, 1, 4, 1, 351, 150, 36, 2, 2, 1)).setObjects(('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'cwbAtmConnStatsGroup'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cwb_atm_conn_stat_compliance = cwbAtmConnStatCompliance.setStatus('current')
if mibBuilder.loadTexts:
cwbAtmConnStatCompliance.setDescription('The compliance statement for broadband interface Connection Statstics MIB.')
cwb_atm_conn_stats_group = object_group((1, 3, 6, 1, 4, 1, 351, 150, 36, 2, 1, 1)).setObjects(('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanCntNum'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanRcvClp0Cells'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanRcvClp1Cells'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanNonConformCellsAtGcra1Policer'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanNonConformCellsAtGcra2Policer'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanRcvEOFCells'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanDscdClp0Cells'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanDscdClp1Cells'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanRcvCellsSent'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanXmtClp0Cells'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanXmtClp1Cells'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanDscdClpZeroCellsToPort'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanDscdClpOneCellsToPort'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanCntClrButton'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cwb_atm_conn_stats_group = cwbAtmConnStatsGroup.setStatus('current')
if mibBuilder.loadTexts:
cwbAtmConnStatsGroup.setDescription('The objects related to broadband interface connection statistics.')
mibBuilder.exportSymbols('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', bbChanRcvCellsSent=bbChanRcvCellsSent, cwbAtmConnStatMIBCompliances=cwbAtmConnStatMIBCompliances, bbChanCntGrpEntry=bbChanCntGrpEntry, bbChanDscdClp1Cells=bbChanDscdClp1Cells, bbChanDscdClpOneCellsToPort=bbChanDscdClpOneCellsToPort, bbChanRcvEOFCells=bbChanRcvEOFCells, bbChanRcvClp1Cells=bbChanRcvClp1Cells, bbChanDscdClp0Cells=bbChanDscdClp0Cells, bbChanCntGrpTable=bbChanCntGrpTable, bbChanNonConformCellsAtGcra2Policer=bbChanNonConformCellsAtGcra2Policer, cwbAtmConnStatsGroup=cwbAtmConnStatsGroup, bbChanXmtClp1Cells=bbChanXmtClp1Cells, bbChanNonConformCellsAtGcra1Policer=bbChanNonConformCellsAtGcra1Policer, bbChanCntClrButton=bbChanCntClrButton, cwbAtmConnStatMIBGroups=cwbAtmConnStatMIBGroups, PYSNMP_MODULE_ID=ciscoWanBbifAtmConnStatMIB, cwbAtmConnStatMIBConformance=cwbAtmConnStatMIBConformance, bbChanXmtClp0Cells=bbChanXmtClp0Cells, ciscoWanBbifAtmConnStatMIB=ciscoWanBbifAtmConnStatMIB, bbChanRcvClp0Cells=bbChanRcvClp0Cells, cwbAtmConnStatCompliance=cwbAtmConnStatCompliance, bbChanDscdClpZeroCellsToPort=bbChanDscdClpZeroCellsToPort, bbChanCntNum=bbChanCntNum) |
"""
Copyright (c) 2019 Fraunhofer Institute for Manufacturing Engineering and Automation (IPA)
Authors: Daniel Stock, Matthias Stoehr
Licensed under the Apache License, Version 2.0
See the file "LICENSE" for the full license governing this code.
"""
__version__ = "1.0.8"
# from .ComplexDataFormat import ComplexDataFormat
# from .DataFormat import DataFormat
# from .DataType import convertDataType, getDataType, DataType
# from .Event import Event
# from .Function import Function
# from .MsbClient import MsbClient
| """
Copyright (c) 2019 Fraunhofer Institute for Manufacturing Engineering and Automation (IPA)
Authors: Daniel Stock, Matthias Stoehr
Licensed under the Apache License, Version 2.0
See the file "LICENSE" for the full license governing this code.
"""
__version__ = '1.0.8' |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class PyTensorboard(Package):
"""TensorBoard is a suite of web applications for
inspecting and understanding your TensorFlow runs and
graphs."""
homepage = "https://github.com/tensorflow/tensorboard"
url = "https://github.com/tensorflow/tensorboard/archive/2.2.0.tar.gz"
maintainers = ['aweits']
version('2.3.0', sha256='947a58702c2841eb4559637dbf8639633f79de9a0f422be9737f3563a1725440')
version('2.2.0', sha256='d0dfbf0e4b3b5ebbc3fafa6d281d4b9aa5478eac6bac3330652ab6674278ab77')
depends_on('python@2.7:2.8,3.2:', type=('build', 'run'))
depends_on('bazel@0.26.1:', type='build')
depends_on('py-setuptools@41.0.0:', type=('build', 'run'))
depends_on('py-absl-py@0.4:', type=('build', 'run'))
depends_on('py-markdown@2.6.8:', type=('build', 'run'))
depends_on('py-requests@2.21.0:2.999', type=('build', 'run'))
depends_on('py-futures@3.1.1:', type=('build', 'run'), when='^python@:2')
depends_on('py-grpcio@1.24.3:', type=('build', 'run'), when='@2.3.0')
depends_on('py-grpcio@1.23.3:', type=('build', 'run'), when='@2.2.0')
depends_on('py-google-auth@1.6.3:1.99.99', type=('build', 'run'))
depends_on('py-numpy@1.12.0:', type=('build', 'run'))
depends_on('py-protobuf@3.6.0:', type=('build', 'run'))
depends_on('py-six@1.10.0:', type=('build', 'run'))
depends_on('py-werkzeug@0.11.15:', type=('build', 'run'))
depends_on('py-wheel', type='build')
depends_on('py-wheel@0.26:', type='build', when='@0.6: ^python@3:')
depends_on('py-google-auth-oauthlib@0.4.1:0.4.999', type=('build', 'run'))
depends_on('py-tensorboard-plugin-wit@1.6.0:', type=('build', 'run'), when='@2.2.0:')
depends_on('py-tensorflow-estimator@2.2.0', type='run', when='@2.2.0')
depends_on('py-tensorflow-estimator@2.3.0', type='run', when='@2.3.0')
extends('python')
patch('tboard_shellenv.patch')
phases = ['configure', 'build', 'install']
def patch(self):
filter_file('build --define=angular_ivy_enabled=True',
'build --define=angular_ivy_enabled=True\n'
'build --distinct_host_configuration=false\n'
'build --action_env=PYTHONPATH="{0}"\n'.format(
env['PYTHONPATH']),
'.bazelrc')
def setup_build_environment(self, env):
tmp_path = '/tmp/spack/tb'
mkdirp(tmp_path)
env.set('TEST_TMPDIR', tmp_path)
def configure(self, spec, prefix):
builddir = join_path(self.stage.source_path, 'spack-build')
mkdirp(builddir)
filter_file(r'workdir=.*',
'workdir="{0}"'.format(builddir),
'tensorboard/pip_package/build_pip_package.sh')
filter_file(r'pip install .*',
'',
'tensorboard/pip_package/build_pip_package.sh')
filter_file(r'command \-v .*',
'',
'tensorboard/pip_package/build_pip_package.sh')
filter_file(r'virtualenv .*',
'',
'tensorboard/pip_package/build_pip_package.sh')
filter_file('trap cleanup EXIT',
'',
'tensorboard/pip_package/build_pip_package.sh')
filter_file('unset PYTHON_HOME',
'export PYTHONPATH="{0}"'.format(env['PYTHONPATH']),
'tensorboard/pip_package/build_pip_package.sh')
filter_file('python setup.py',
'{0} setup.py'.format(spec['python'].command.path),
'tensorboard/pip_package/build_pip_package.sh')
def build(self, spec, prefix):
tmp_path = env['TEST_TMPDIR']
bazel('--nohome_rc',
'--nosystem_rc',
'--output_user_root=' + tmp_path,
'build',
# watch https://github.com/bazelbuild/bazel/issues/7254
'--define=EXECUTOR=remote',
'--verbose_failures',
'--spawn_strategy=local',
'--subcommands=pretty_print',
'//tensorboard/pip_package')
def install(self, spec, prefix):
with working_dir('spack-build'):
setup_py('install', '--prefix={0}'.format(prefix),
'--single-version-externally-managed', '--root=/')
| class Pytensorboard(Package):
"""TensorBoard is a suite of web applications for
inspecting and understanding your TensorFlow runs and
graphs."""
homepage = 'https://github.com/tensorflow/tensorboard'
url = 'https://github.com/tensorflow/tensorboard/archive/2.2.0.tar.gz'
maintainers = ['aweits']
version('2.3.0', sha256='947a58702c2841eb4559637dbf8639633f79de9a0f422be9737f3563a1725440')
version('2.2.0', sha256='d0dfbf0e4b3b5ebbc3fafa6d281d4b9aa5478eac6bac3330652ab6674278ab77')
depends_on('python@2.7:2.8,3.2:', type=('build', 'run'))
depends_on('bazel@0.26.1:', type='build')
depends_on('py-setuptools@41.0.0:', type=('build', 'run'))
depends_on('py-absl-py@0.4:', type=('build', 'run'))
depends_on('py-markdown@2.6.8:', type=('build', 'run'))
depends_on('py-requests@2.21.0:2.999', type=('build', 'run'))
depends_on('py-futures@3.1.1:', type=('build', 'run'), when='^python@:2')
depends_on('py-grpcio@1.24.3:', type=('build', 'run'), when='@2.3.0')
depends_on('py-grpcio@1.23.3:', type=('build', 'run'), when='@2.2.0')
depends_on('py-google-auth@1.6.3:1.99.99', type=('build', 'run'))
depends_on('py-numpy@1.12.0:', type=('build', 'run'))
depends_on('py-protobuf@3.6.0:', type=('build', 'run'))
depends_on('py-six@1.10.0:', type=('build', 'run'))
depends_on('py-werkzeug@0.11.15:', type=('build', 'run'))
depends_on('py-wheel', type='build')
depends_on('py-wheel@0.26:', type='build', when='@0.6: ^python@3:')
depends_on('py-google-auth-oauthlib@0.4.1:0.4.999', type=('build', 'run'))
depends_on('py-tensorboard-plugin-wit@1.6.0:', type=('build', 'run'), when='@2.2.0:')
depends_on('py-tensorflow-estimator@2.2.0', type='run', when='@2.2.0')
depends_on('py-tensorflow-estimator@2.3.0', type='run', when='@2.3.0')
extends('python')
patch('tboard_shellenv.patch')
phases = ['configure', 'build', 'install']
def patch(self):
filter_file('build --define=angular_ivy_enabled=True', 'build --define=angular_ivy_enabled=True\nbuild --distinct_host_configuration=false\nbuild --action_env=PYTHONPATH="{0}"\n'.format(env['PYTHONPATH']), '.bazelrc')
def setup_build_environment(self, env):
tmp_path = '/tmp/spack/tb'
mkdirp(tmp_path)
env.set('TEST_TMPDIR', tmp_path)
def configure(self, spec, prefix):
builddir = join_path(self.stage.source_path, 'spack-build')
mkdirp(builddir)
filter_file('workdir=.*', 'workdir="{0}"'.format(builddir), 'tensorboard/pip_package/build_pip_package.sh')
filter_file('pip install .*', '', 'tensorboard/pip_package/build_pip_package.sh')
filter_file('command \\-v .*', '', 'tensorboard/pip_package/build_pip_package.sh')
filter_file('virtualenv .*', '', 'tensorboard/pip_package/build_pip_package.sh')
filter_file('trap cleanup EXIT', '', 'tensorboard/pip_package/build_pip_package.sh')
filter_file('unset PYTHON_HOME', 'export PYTHONPATH="{0}"'.format(env['PYTHONPATH']), 'tensorboard/pip_package/build_pip_package.sh')
filter_file('python setup.py', '{0} setup.py'.format(spec['python'].command.path), 'tensorboard/pip_package/build_pip_package.sh')
def build(self, spec, prefix):
tmp_path = env['TEST_TMPDIR']
bazel('--nohome_rc', '--nosystem_rc', '--output_user_root=' + tmp_path, 'build', '--define=EXECUTOR=remote', '--verbose_failures', '--spawn_strategy=local', '--subcommands=pretty_print', '//tensorboard/pip_package')
def install(self, spec, prefix):
with working_dir('spack-build'):
setup_py('install', '--prefix={0}'.format(prefix), '--single-version-externally-managed', '--root=/') |
# -*- encoding: utf-8 -*-
{
'name': 'Ticket Viewer',
'version': '1.0',
'author': 'Damien Bouvy',
'website': 'https://www.damienbouvy.be',
'summary': 'Demo a WebApp to view tickets online',
'depends': ['web', 'base_setup', 'bus'],
'description': """
Ticket Viewer Demo
==================
View & submit support tickets online.
Odoo Experience 2017 demo of the Odoo Javascript Framework.
""",
"data": [
"views/ticket_views.xml",
"views/ticket_templates.xml",
"data/ir.model.access.csv",
"data/ticket_security.xml",
],
"demo": [
"demo/ticket_demo.xml",
],
'installable': True,
'application': True,
'license': 'LGPL-3',
}
| {'name': 'Ticket Viewer', 'version': '1.0', 'author': 'Damien Bouvy', 'website': 'https://www.damienbouvy.be', 'summary': 'Demo a WebApp to view tickets online', 'depends': ['web', 'base_setup', 'bus'], 'description': '\nTicket Viewer Demo\n==================\nView & submit support tickets online.\nOdoo Experience 2017 demo of the Odoo Javascript Framework.\n', 'data': ['views/ticket_views.xml', 'views/ticket_templates.xml', 'data/ir.model.access.csv', 'data/ticket_security.xml'], 'demo': ['demo/ticket_demo.xml'], 'installable': True, 'application': True, 'license': 'LGPL-3'} |
# Calculates the amount of each nucleotide on a .fasta sequence
sequence = open('/mnt/Data/Documents/sequence.fasta','r').read().replace('\n', '')
nucleotides = {
'A': 0,
'T': 0,
'C': 0,
'G': 0
}
i=0
while i < len(sequence):
if sequence[i] not in nucleotides:
print('Wrong character found: '+sequence[i])
else:
nucleotides[sequence[i]]+=1
i+=1
for nucleotide, amount in nucleotides.items():
print("The nucleotide "+nucleotide+" occurs "+str(amount)+" times.") | sequence = open('/mnt/Data/Documents/sequence.fasta', 'r').read().replace('\n', '')
nucleotides = {'A': 0, 'T': 0, 'C': 0, 'G': 0}
i = 0
while i < len(sequence):
if sequence[i] not in nucleotides:
print('Wrong character found: ' + sequence[i])
else:
nucleotides[sequence[i]] += 1
i += 1
for (nucleotide, amount) in nucleotides.items():
print('The nucleotide ' + nucleotide + ' occurs ' + str(amount) + ' times.') |
"""
Datos de entrada
dinero-->d-->int
porcentaje ganancia-->p-->int
Datos de salida
total-->t-->int
"""
#Entradas
d=int(input("Ingrese el valor: "))
#Caja negra
p=d*0.02
t=p+d
#Salidas
print(f"Su saldo pasado el mes es: {int(t)}")
| """
Datos de entrada
dinero-->d-->int
porcentaje ganancia-->p-->int
Datos de salida
total-->t-->int
"""
d = int(input('Ingrese el valor: '))
p = d * 0.02
t = p + d
print(f'Su saldo pasado el mes es: {int(t)}') |
class Ability:
def __init__(self, resource_cost, cast_time=0, name=""):
self.name = name
self.cost = resource_cost
self.cast_time = cast_time
class HealingAbility(Ability):
def __init__(self, healing_applied, mana_cost, **kwargs):
super(HealingAbility, self).__init__(mana_cost, **kwargs)
self.healing_applied = healing_applied
@property
def healing(self):
return self.healing_applied
class RestoringAbility(Ability):
def __init__(self, mana_restored, mana_cost, **kwargs):
super(RestoringAbility, self).__init__(mana_cost, **kwargs)
self.mana_restored = mana_restored
@property
def restored(self):
return self.mana_restored
class DamagingAbility(Ability):
def __init__(self, damage_applied, mana_cost, **kwargs):
super(DamagingAbility, self).__init__(mana_cost, **kwargs)
self.damage_applied = damage_applied
@property
def damage(self):
return self.damage_applied | class Ability:
def __init__(self, resource_cost, cast_time=0, name=''):
self.name = name
self.cost = resource_cost
self.cast_time = cast_time
class Healingability(Ability):
def __init__(self, healing_applied, mana_cost, **kwargs):
super(HealingAbility, self).__init__(mana_cost, **kwargs)
self.healing_applied = healing_applied
@property
def healing(self):
return self.healing_applied
class Restoringability(Ability):
def __init__(self, mana_restored, mana_cost, **kwargs):
super(RestoringAbility, self).__init__(mana_cost, **kwargs)
self.mana_restored = mana_restored
@property
def restored(self):
return self.mana_restored
class Damagingability(Ability):
def __init__(self, damage_applied, mana_cost, **kwargs):
super(DamagingAbility, self).__init__(mana_cost, **kwargs)
self.damage_applied = damage_applied
@property
def damage(self):
return self.damage_applied |
class Logger:
"""Logger class, better console output. Do not use as instance!"""
colorcodes = { #ansi color codes
"black" : "\u001b[1m\u001b[30m",
"red" : "\u001b[1m\u001b[31m",
"green" : "\u001b[1m\u001b[32m",
"yellow" : "\u001b[1m\u001b[33m",
"blue" : "\u001b[1m\u001b[34m",
"magenta" : "\u001b[1m\u001b[35m",
"cyan" : "\u001b[1m\u001b[36m",
"white" : "\u001b[1m\u001b[37m"
}
@staticmethod
def log(*args, color="white", sep="", end="\n"):
"""
log any number of objects to console
*args: object(s) to log
color= color of text, defaults to "white"
sep= string to insert between objects, defaults to ""
end= string to insert at end, defaults to "\\n"
"""
print(Logger.colorcodes[color], end="")
for i in range(0, len(args)):
if i == len(args) - 1:
print(args[i], end=end)
else:
print(args[i], end=sep)
print(Logger.colorcodes["white"], end="")
@staticmethod
def objPrint(obj, compact=False):
"""
prints snippet containing instance data for an object
set compact=True for no newlines
"""
print(Logger.colorcodes["magenta"], end="")
if compact:
print(obj.__class__.__name__," 0x",id(obj)," {",sep="",end="")
index = 0 #use this to tell when we are on the last key so we don"t put that last comma
for key in obj.__dict__.keys():
if key != "__dict__":
if index == len(obj.__dict__.keys()) - 1: #-1 to not count the __dict__ key thing
print("\"",key,"\":\"",obj.__dict__[key],"\"}\n",sep="")
else:
print("\"",key,"\":\"",obj.__dict__[key],"\",",sep="",end="")
index += 1
else:
print(obj.__class__.__name__," 0x",id(obj),"\n{",sep="")
index = 0 #use this to tell when we are on the last key so we don"t put that last comma
for key in obj.__dict__.keys():
if key != "__dict__":
if index == len(obj.__dict__.keys()) - 1: #-1 to not count the __dict__ key thing
print("\"",key,"\":\"",obj.__dict__[key],"\"\n}\n",sep="")
else:
print("\"",key,"\":\"",obj.__dict__[key],"\",",sep="")
index += 1
print(Logger.colorcodes["white"], end="")
| class Logger:
"""Logger class, better console output. Do not use as instance!"""
colorcodes = {'black': '\x1b[1m\x1b[30m', 'red': '\x1b[1m\x1b[31m', 'green': '\x1b[1m\x1b[32m', 'yellow': '\x1b[1m\x1b[33m', 'blue': '\x1b[1m\x1b[34m', 'magenta': '\x1b[1m\x1b[35m', 'cyan': '\x1b[1m\x1b[36m', 'white': '\x1b[1m\x1b[37m'}
@staticmethod
def log(*args, color='white', sep='', end='\n'):
"""
log any number of objects to console
*args: object(s) to log
color= color of text, defaults to "white"
sep= string to insert between objects, defaults to ""
end= string to insert at end, defaults to "\\n"
"""
print(Logger.colorcodes[color], end='')
for i in range(0, len(args)):
if i == len(args) - 1:
print(args[i], end=end)
else:
print(args[i], end=sep)
print(Logger.colorcodes['white'], end='')
@staticmethod
def obj_print(obj, compact=False):
"""
prints snippet containing instance data for an object
set compact=True for no newlines
"""
print(Logger.colorcodes['magenta'], end='')
if compact:
print(obj.__class__.__name__, ' 0x', id(obj), ' {', sep='', end='')
index = 0
for key in obj.__dict__.keys():
if key != '__dict__':
if index == len(obj.__dict__.keys()) - 1:
print('"', key, '":"', obj.__dict__[key], '"}\n', sep='')
else:
print('"', key, '":"', obj.__dict__[key], '",', sep='', end='')
index += 1
else:
print(obj.__class__.__name__, ' 0x', id(obj), '\n{', sep='')
index = 0
for key in obj.__dict__.keys():
if key != '__dict__':
if index == len(obj.__dict__.keys()) - 1:
print('"', key, '":"', obj.__dict__[key], '"\n}\n', sep='')
else:
print('"', key, '":"', obj.__dict__[key], '",', sep='')
index += 1
print(Logger.colorcodes['white'], end='') |
VERSION_MAJOR = 1
VERSION_MINOR = 0
VERSION_PATCH = 0
VERSION_EXTRA = 'a0'
__version__ = "{}.{}.{}".format(VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH)
__loose_version__ = "{}.{}".format(VERSION_MAJOR, VERSION_MINOR)
if VERSION_EXTRA:
__version__ = "{}-{}".format(__version__, VERSION_EXTRA)
__loose_version__ = "{}-{}".format(__loose_version__, VERSION_EXTRA)
__version_info__ = (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH, float('inf'))
else:
__version_info__ = (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH)
__all__ = ['{}'.format(__version__), '{}'.format(__version_info__), '{}'.format(__loose_version__)]
| version_major = 1
version_minor = 0
version_patch = 0
version_extra = 'a0'
__version__ = '{}.{}.{}'.format(VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH)
__loose_version__ = '{}.{}'.format(VERSION_MAJOR, VERSION_MINOR)
if VERSION_EXTRA:
__version__ = '{}-{}'.format(__version__, VERSION_EXTRA)
__loose_version__ = '{}-{}'.format(__loose_version__, VERSION_EXTRA)
__version_info__ = (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH, float('inf'))
else:
__version_info__ = (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH)
__all__ = ['{}'.format(__version__), '{}'.format(__version_info__), '{}'.format(__loose_version__)] |
class ToArmCoord:
"""
Convert from world coordinate (x, y, z)
to arm coordinate (x, -z, y)
"""
@staticmethod
def convert(worldCoord):
"""
arg:
worldCoord: [x, y, z]
An array of 3 containing the 3 world coordinate.
"""
return [worldCoord[0], -worldCoord[2], worldCoord[1]]
class Func:
@staticmethod
def getValue(positionSensorList):
psValue = []
for i in positionSensorList:
psValue.append(i.getValue())
return psValue
@staticmethod
def get_All_motors(robot):
"""
Get 7 motors from the robot model
"""
motorList = []
for i in range(7):
motorName = 'motor' + str(i + 1)
motor = robot.getDevice(motorName) # Get the motor handle #positionSensor1
motor.setPosition(float('inf')) # Set starting position
motor.setVelocity(0.0) # Zero out starting velocity
motorList.append(motor)
return motorList
@staticmethod
def get_All_positionSensors(robot, timestep):
"""
Get 7 position sensors from the robot model
"""
positionSensorList = []
for i in range(7):
positionSensorName = 'positionSensor' + str(i+1)
positionSensor = robot.getDevice(positionSensorName)
positionSensor.enable(timestep)
positionSensorList.append(positionSensor)
return positionSensorList
| class Toarmcoord:
"""
Convert from world coordinate (x, y, z)
to arm coordinate (x, -z, y)
"""
@staticmethod
def convert(worldCoord):
"""
arg:
worldCoord: [x, y, z]
An array of 3 containing the 3 world coordinate.
"""
return [worldCoord[0], -worldCoord[2], worldCoord[1]]
class Func:
@staticmethod
def get_value(positionSensorList):
ps_value = []
for i in positionSensorList:
psValue.append(i.getValue())
return psValue
@staticmethod
def get__all_motors(robot):
"""
Get 7 motors from the robot model
"""
motor_list = []
for i in range(7):
motor_name = 'motor' + str(i + 1)
motor = robot.getDevice(motorName)
motor.setPosition(float('inf'))
motor.setVelocity(0.0)
motorList.append(motor)
return motorList
@staticmethod
def get__all_position_sensors(robot, timestep):
"""
Get 7 position sensors from the robot model
"""
position_sensor_list = []
for i in range(7):
position_sensor_name = 'positionSensor' + str(i + 1)
position_sensor = robot.getDevice(positionSensorName)
positionSensor.enable(timestep)
positionSensorList.append(positionSensor)
return positionSensorList |
i = 0
o = 10
while i < 9:
print(f"Contando!\n"
f"I: {i}\n"
f"O: {o}")
i+= 1
o -= 1
print("Terminada a contagem") | i = 0
o = 10
while i < 9:
print(f'Contando!\nI: {i}\nO: {o}')
i += 1
o -= 1
print('Terminada a contagem') |
def human_to_mb(s):
"""Translates human-readable strings like '10G' to numeric
megabytes"""
md = dict(M=1, G=1024, T=1024 * 1024, P=1024 * 1024 * 1024)
return _human_to(s, md)
def human_to_gb(s):
md = dict(M=1 / 1024, G=1, T=1024, P=1024 * 1024)
return _human_to(s, md)
def _human_to(s, md):
if len(s) == 0:
raise Exception("unexpected empty string")
suffix = s[-1].upper()
if suffix.isalpha():
return int(float(s[:-1]) * md[suffix])
else:
return int(s)
def mb_to_human(num):
"""Translates float number of bytes into human readable strings."""
suffixes = ['M', 'G', 'T', 'P']
return _to_human(num, suffixes)
def gb_to_human(num):
"""Translates float number of gigabytes into human readable strings."""
suffixes = ['G', 'T', 'P']
return _to_human(num, suffixes)
def _to_human(num, suffixes):
if num == 0:
return '0B'
i = 0
while num >= 1024 and i < len(suffixes) - 1:
num /= 1024
i += 1
return "{:d}{}".format(num, suffixes[i])
| def human_to_mb(s):
"""Translates human-readable strings like '10G' to numeric
megabytes"""
md = dict(M=1, G=1024, T=1024 * 1024, P=1024 * 1024 * 1024)
return _human_to(s, md)
def human_to_gb(s):
md = dict(M=1 / 1024, G=1, T=1024, P=1024 * 1024)
return _human_to(s, md)
def _human_to(s, md):
if len(s) == 0:
raise exception('unexpected empty string')
suffix = s[-1].upper()
if suffix.isalpha():
return int(float(s[:-1]) * md[suffix])
else:
return int(s)
def mb_to_human(num):
"""Translates float number of bytes into human readable strings."""
suffixes = ['M', 'G', 'T', 'P']
return _to_human(num, suffixes)
def gb_to_human(num):
"""Translates float number of gigabytes into human readable strings."""
suffixes = ['G', 'T', 'P']
return _to_human(num, suffixes)
def _to_human(num, suffixes):
if num == 0:
return '0B'
i = 0
while num >= 1024 and i < len(suffixes) - 1:
num /= 1024
i += 1
return '{:d}{}'.format(num, suffixes[i]) |
class GeoError(Exception):
pass
class NotFound(Exception):
pass
class LoginError(Exception):
pass
class MSLClientError(Exception):
pass
class DecryptionError(Exception):
pass
class InvalidProfile(Exception):
pass
class Denied(Exception):
pass
class NetflixStatusError(Exception):
pass | class Geoerror(Exception):
pass
class Notfound(Exception):
pass
class Loginerror(Exception):
pass
class Mslclienterror(Exception):
pass
class Decryptionerror(Exception):
pass
class Invalidprofile(Exception):
pass
class Denied(Exception):
pass
class Netflixstatuserror(Exception):
pass |
x, y = 0, 0
s = 100
# cyan
cmykFill(1, 0, 0, 0)
rect(x, y, s, s)
x += s
# magenta
cmykFill(0, 1, 0, 0)
rect(x, y, s, s)
x += s
# yellow
cmykFill(0, 0, 1, 0)
rect(x, y, s, s)
x += s
# black
cmykFill(0, 0, 0, 1)
rect(x, y, s, s)
| (x, y) = (0, 0)
s = 100
cmyk_fill(1, 0, 0, 0)
rect(x, y, s, s)
x += s
cmyk_fill(0, 1, 0, 0)
rect(x, y, s, s)
x += s
cmyk_fill(0, 0, 1, 0)
rect(x, y, s, s)
x += s
cmyk_fill(0, 0, 0, 1)
rect(x, y, s, s) |
# Config for Notebook Server
# base URL of server
BASE_URL = 'http://ipython.local-server.org'
# Input for Notebook server password and port setup
NB_SERVER_SETTINGS_FILE = '/home/faruq/projects/supervised-ipython-nbserver/shared_config_files/all_nbserver_config_201309231124.csv'
# Location of student list CSV file
NB_SERVER_ACCESS_CONFIG_PATH = 'nbserver_access_config_files'
| base_url = 'http://ipython.local-server.org'
nb_server_settings_file = '/home/faruq/projects/supervised-ipython-nbserver/shared_config_files/all_nbserver_config_201309231124.csv'
nb_server_access_config_path = 'nbserver_access_config_files' |
#
# PySNMP MIB module CISCO-MODULE-VIRTUALIZATION-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-MODULE-VIRTUALIZATION-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:51:03 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion, ConstraintsIntersection, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsUnion", "ConstraintsIntersection", "ValueRangeConstraint")
CiscoResourceClass, = mibBuilder.importSymbols("CISCO-L4L7MODULE-RESOURCE-LIMIT-MIB", "CiscoResourceClass")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
CiscoURLString, = mibBuilder.importSymbols("CISCO-TC", "CiscoURLString")
entPhysicalIndex, = mibBuilder.importSymbols("ENTITY-MIB", "entPhysicalIndex")
IANAifType, = mibBuilder.importSymbols("IANAifType-MIB", "IANAifType")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
Bits, Gauge32, IpAddress, iso, ObjectIdentity, TimeTicks, MibIdentifier, Counter32, Unsigned32, MibScalar, MibTable, MibTableRow, MibTableColumn, NotificationType, Counter64, Integer32, ModuleIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "Gauge32", "IpAddress", "iso", "ObjectIdentity", "TimeTicks", "MibIdentifier", "Counter32", "Unsigned32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "NotificationType", "Counter64", "Integer32", "ModuleIdentity")
StorageType, TruthValue, TextualConvention, DisplayString, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "StorageType", "TruthValue", "TextualConvention", "DisplayString", "RowStatus")
ciscoModuleVirtualizationMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 472))
ciscoModuleVirtualizationMIB.setRevisions(('2006-05-29 00:00', '2005-12-12 00:00',))
if mibBuilder.loadTexts: ciscoModuleVirtualizationMIB.setLastUpdated('200605290000Z')
if mibBuilder.loadTexts: ciscoModuleVirtualizationMIB.setOrganization('Cisco Systems, Inc.')
cmVirtualizationNotifs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 472, 0))
cmVirtualizationMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 472, 1))
cmVirtualizationMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 472, 2))
cmVirtualContext = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1))
cmVirtualContextNotifControl = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 2))
cmVirtualContextNotifObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 3))
cmVirtualContextTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 1), )
if mibBuilder.loadTexts: cmVirtualContextTable.setStatus('current')
cmVirtualContextEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 1, 1), ).setIndexNames((0, "ENTITY-MIB", "entPhysicalIndex"), (0, "CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextName"))
if mibBuilder.loadTexts: cmVirtualContextEntry.setStatus('current')
cmVirtContextName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 1, 1, 1), SnmpAdminString())
if mibBuilder.loadTexts: cmVirtContextName.setStatus('current')
cmVirtContextDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 1, 1, 2), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cmVirtContextDescr.setStatus('current')
cmVirtContextURL = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 1, 1, 3), CiscoURLString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cmVirtContextURL.setStatus('current')
cmVirtContextResourceClass = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 1, 1, 4), CiscoResourceClass()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cmVirtContextResourceClass.setStatus('current')
cmVirtContextStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 1, 1, 5), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cmVirtContextStorageType.setStatus('current')
cmVirtContextRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 1, 1, 6), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cmVirtContextRowStatus.setStatus('current')
cmVirtContextIfMapTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 2), )
if mibBuilder.loadTexts: cmVirtContextIfMapTable.setStatus('current')
cmVirtContextIfMapEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 2, 1), ).setIndexNames((0, "CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextName"), (0, "CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextIfMapType"), (0, "CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextIfMapIdLow"))
if mibBuilder.loadTexts: cmVirtContextIfMapEntry.setStatus('current')
cmVirtContextIfMapType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 2, 1, 1), IANAifType())
if mibBuilder.loadTexts: cmVirtContextIfMapType.setStatus('current')
cmVirtContextIfMapIdLow = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 2, 1, 2), Unsigned32())
if mibBuilder.loadTexts: cmVirtContextIfMapIdLow.setStatus('current')
cmVirtContextIfMapIdHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 2, 1, 3), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cmVirtContextIfMapIdHigh.setStatus('current')
cmVirtContextIfMapStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 2, 1, 4), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cmVirtContextIfMapStorageType.setStatus('current')
cmVirtContextIfMapRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 2, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cmVirtContextIfMapRowStatus.setStatus('current')
cmVirtContextNotifEnable = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 2, 1), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmVirtContextNotifEnable.setStatus('current')
cmNotifVirtContextName = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 3, 1), SnmpAdminString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: cmNotifVirtContextName.setStatus('current')
cmVirtContextAdded = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 472, 0, 1)).setObjects(("CISCO-MODULE-VIRTUALIZATION-MIB", "cmNotifVirtContextName"))
if mibBuilder.loadTexts: cmVirtContextAdded.setStatus('current')
cmVirtContextRemoved = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 472, 0, 2)).setObjects(("CISCO-MODULE-VIRTUALIZATION-MIB", "cmNotifVirtContextName"))
if mibBuilder.loadTexts: cmVirtContextRemoved.setStatus('current')
cmVirtualizationCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 472, 2, 1))
cmVirtualizationGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 472, 2, 2))
cmVirtualizationCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 472, 2, 1, 1)).setObjects(("CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextconfigGroup"), ("CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextIfMapGroup"), ("CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextNotifControlGroup"), ("CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextNotificationGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cmVirtualizationCompliance = cmVirtualizationCompliance.setStatus('current')
cmVirtContextconfigGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 472, 2, 2, 1)).setObjects(("CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextDescr"), ("CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextURL"), ("CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextResourceClass"), ("CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextStorageType"), ("CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cmVirtContextconfigGroup = cmVirtContextconfigGroup.setStatus('current')
cmVirtContextIfMapGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 472, 2, 2, 2)).setObjects(("CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextIfMapIdHigh"), ("CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextIfMapStorageType"), ("CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextIfMapRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cmVirtContextIfMapGroup = cmVirtContextIfMapGroup.setStatus('current')
cmVirtContextNotifControlGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 472, 2, 2, 3)).setObjects(("CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextNotifEnable"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cmVirtContextNotifControlGroup = cmVirtContextNotifControlGroup.setStatus('current')
cmVirtContextNotifObjectsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 472, 2, 2, 4)).setObjects(("CISCO-MODULE-VIRTUALIZATION-MIB", "cmNotifVirtContextName"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cmVirtContextNotifObjectsGroup = cmVirtContextNotifObjectsGroup.setStatus('current')
cmVirtContextNotificationGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 9, 472, 2, 2, 5)).setObjects(("CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextAdded"), ("CISCO-MODULE-VIRTUALIZATION-MIB", "cmVirtContextRemoved"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cmVirtContextNotificationGroup = cmVirtContextNotificationGroup.setStatus('current')
mibBuilder.exportSymbols("CISCO-MODULE-VIRTUALIZATION-MIB", cmVirtualizationMIBConformance=cmVirtualizationMIBConformance, cmVirtContextIfMapIdLow=cmVirtContextIfMapIdLow, cmVirtContextNotifEnable=cmVirtContextNotifEnable, cmVirtContextRowStatus=cmVirtContextRowStatus, cmVirtualContextNotifObjects=cmVirtualContextNotifObjects, cmVirtContextconfigGroup=cmVirtContextconfigGroup, cmVirtContextDescr=cmVirtContextDescr, cmVirtContextResourceClass=cmVirtContextResourceClass, cmVirtContextAdded=cmVirtContextAdded, cmVirtualizationCompliance=cmVirtualizationCompliance, cmNotifVirtContextName=cmNotifVirtContextName, cmVirtContextIfMapTable=cmVirtContextIfMapTable, ciscoModuleVirtualizationMIB=ciscoModuleVirtualizationMIB, cmVirtualizationMIBObjects=cmVirtualizationMIBObjects, cmVirtualizationNotifs=cmVirtualizationNotifs, cmVirtContextIfMapEntry=cmVirtContextIfMapEntry, cmVirtContextName=cmVirtContextName, PYSNMP_MODULE_ID=ciscoModuleVirtualizationMIB, cmVirtContextURL=cmVirtContextURL, cmVirtualContext=cmVirtualContext, cmVirtContextIfMapIdHigh=cmVirtContextIfMapIdHigh, cmVirtualizationGroups=cmVirtualizationGroups, cmVirtContextNotifObjectsGroup=cmVirtContextNotifObjectsGroup, cmVirtualContextEntry=cmVirtualContextEntry, cmVirtualContextTable=cmVirtualContextTable, cmVirtContextNotifControlGroup=cmVirtContextNotifControlGroup, cmVirtContextRemoved=cmVirtContextRemoved, cmVirtContextIfMapGroup=cmVirtContextIfMapGroup, cmVirtualContextNotifControl=cmVirtualContextNotifControl, cmVirtContextIfMapStorageType=cmVirtContextIfMapStorageType, cmVirtContextIfMapType=cmVirtContextIfMapType, cmVirtContextIfMapRowStatus=cmVirtContextIfMapRowStatus, cmVirtContextStorageType=cmVirtContextStorageType, cmVirtualizationCompliances=cmVirtualizationCompliances, cmVirtContextNotificationGroup=cmVirtContextNotificationGroup)
| (object_identifier, integer, octet_string) = mibBuilder.importSymbols('ASN1', 'ObjectIdentifier', 'Integer', 'OctetString')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(value_size_constraint, single_value_constraint, constraints_union, constraints_intersection, value_range_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ValueSizeConstraint', 'SingleValueConstraint', 'ConstraintsUnion', 'ConstraintsIntersection', 'ValueRangeConstraint')
(cisco_resource_class,) = mibBuilder.importSymbols('CISCO-L4L7MODULE-RESOURCE-LIMIT-MIB', 'CiscoResourceClass')
(cisco_mgmt,) = mibBuilder.importSymbols('CISCO-SMI', 'ciscoMgmt')
(cisco_url_string,) = mibBuilder.importSymbols('CISCO-TC', 'CiscoURLString')
(ent_physical_index,) = mibBuilder.importSymbols('ENTITY-MIB', 'entPhysicalIndex')
(ian_aif_type,) = mibBuilder.importSymbols('IANAifType-MIB', 'IANAifType')
(snmp_admin_string,) = mibBuilder.importSymbols('SNMP-FRAMEWORK-MIB', 'SnmpAdminString')
(module_compliance, object_group, notification_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ModuleCompliance', 'ObjectGroup', 'NotificationGroup')
(bits, gauge32, ip_address, iso, object_identity, time_ticks, mib_identifier, counter32, unsigned32, mib_scalar, mib_table, mib_table_row, mib_table_column, notification_type, counter64, integer32, module_identity) = mibBuilder.importSymbols('SNMPv2-SMI', 'Bits', 'Gauge32', 'IpAddress', 'iso', 'ObjectIdentity', 'TimeTicks', 'MibIdentifier', 'Counter32', 'Unsigned32', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'NotificationType', 'Counter64', 'Integer32', 'ModuleIdentity')
(storage_type, truth_value, textual_convention, display_string, row_status) = mibBuilder.importSymbols('SNMPv2-TC', 'StorageType', 'TruthValue', 'TextualConvention', 'DisplayString', 'RowStatus')
cisco_module_virtualization_mib = module_identity((1, 3, 6, 1, 4, 1, 9, 9, 472))
ciscoModuleVirtualizationMIB.setRevisions(('2006-05-29 00:00', '2005-12-12 00:00'))
if mibBuilder.loadTexts:
ciscoModuleVirtualizationMIB.setLastUpdated('200605290000Z')
if mibBuilder.loadTexts:
ciscoModuleVirtualizationMIB.setOrganization('Cisco Systems, Inc.')
cm_virtualization_notifs = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 472, 0))
cm_virtualization_mib_objects = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 472, 1))
cm_virtualization_mib_conformance = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 472, 2))
cm_virtual_context = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1))
cm_virtual_context_notif_control = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 2))
cm_virtual_context_notif_objects = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 3))
cm_virtual_context_table = mib_table((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 1))
if mibBuilder.loadTexts:
cmVirtualContextTable.setStatus('current')
cm_virtual_context_entry = mib_table_row((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 1, 1)).setIndexNames((0, 'ENTITY-MIB', 'entPhysicalIndex'), (0, 'CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextName'))
if mibBuilder.loadTexts:
cmVirtualContextEntry.setStatus('current')
cm_virt_context_name = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 1, 1, 1), snmp_admin_string())
if mibBuilder.loadTexts:
cmVirtContextName.setStatus('current')
cm_virt_context_descr = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 1, 1, 2), snmp_admin_string()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
cmVirtContextDescr.setStatus('current')
cm_virt_context_url = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 1, 1, 3), cisco_url_string()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
cmVirtContextURL.setStatus('current')
cm_virt_context_resource_class = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 1, 1, 4), cisco_resource_class()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
cmVirtContextResourceClass.setStatus('current')
cm_virt_context_storage_type = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 1, 1, 5), storage_type().clone('nonVolatile')).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
cmVirtContextStorageType.setStatus('current')
cm_virt_context_row_status = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 1, 1, 6), row_status()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
cmVirtContextRowStatus.setStatus('current')
cm_virt_context_if_map_table = mib_table((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 2))
if mibBuilder.loadTexts:
cmVirtContextIfMapTable.setStatus('current')
cm_virt_context_if_map_entry = mib_table_row((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 2, 1)).setIndexNames((0, 'CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextName'), (0, 'CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextIfMapType'), (0, 'CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextIfMapIdLow'))
if mibBuilder.loadTexts:
cmVirtContextIfMapEntry.setStatus('current')
cm_virt_context_if_map_type = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 2, 1, 1), ian_aif_type())
if mibBuilder.loadTexts:
cmVirtContextIfMapType.setStatus('current')
cm_virt_context_if_map_id_low = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 2, 1, 2), unsigned32())
if mibBuilder.loadTexts:
cmVirtContextIfMapIdLow.setStatus('current')
cm_virt_context_if_map_id_high = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 2, 1, 3), unsigned32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
cmVirtContextIfMapIdHigh.setStatus('current')
cm_virt_context_if_map_storage_type = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 2, 1, 4), storage_type().clone('nonVolatile')).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
cmVirtContextIfMapStorageType.setStatus('current')
cm_virt_context_if_map_row_status = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 1, 2, 1, 5), row_status()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
cmVirtContextIfMapRowStatus.setStatus('current')
cm_virt_context_notif_enable = mib_scalar((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 2, 1), truth_value()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
cmVirtContextNotifEnable.setStatus('current')
cm_notif_virt_context_name = mib_scalar((1, 3, 6, 1, 4, 1, 9, 9, 472, 1, 3, 1), snmp_admin_string()).setMaxAccess('accessiblefornotify')
if mibBuilder.loadTexts:
cmNotifVirtContextName.setStatus('current')
cm_virt_context_added = notification_type((1, 3, 6, 1, 4, 1, 9, 9, 472, 0, 1)).setObjects(('CISCO-MODULE-VIRTUALIZATION-MIB', 'cmNotifVirtContextName'))
if mibBuilder.loadTexts:
cmVirtContextAdded.setStatus('current')
cm_virt_context_removed = notification_type((1, 3, 6, 1, 4, 1, 9, 9, 472, 0, 2)).setObjects(('CISCO-MODULE-VIRTUALIZATION-MIB', 'cmNotifVirtContextName'))
if mibBuilder.loadTexts:
cmVirtContextRemoved.setStatus('current')
cm_virtualization_compliances = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 472, 2, 1))
cm_virtualization_groups = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 472, 2, 2))
cm_virtualization_compliance = module_compliance((1, 3, 6, 1, 4, 1, 9, 9, 472, 2, 1, 1)).setObjects(('CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextconfigGroup'), ('CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextIfMapGroup'), ('CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextNotifControlGroup'), ('CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextNotificationGroup'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cm_virtualization_compliance = cmVirtualizationCompliance.setStatus('current')
cm_virt_contextconfig_group = object_group((1, 3, 6, 1, 4, 1, 9, 9, 472, 2, 2, 1)).setObjects(('CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextDescr'), ('CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextURL'), ('CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextResourceClass'), ('CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextStorageType'), ('CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextRowStatus'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cm_virt_contextconfig_group = cmVirtContextconfigGroup.setStatus('current')
cm_virt_context_if_map_group = object_group((1, 3, 6, 1, 4, 1, 9, 9, 472, 2, 2, 2)).setObjects(('CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextIfMapIdHigh'), ('CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextIfMapStorageType'), ('CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextIfMapRowStatus'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cm_virt_context_if_map_group = cmVirtContextIfMapGroup.setStatus('current')
cm_virt_context_notif_control_group = object_group((1, 3, 6, 1, 4, 1, 9, 9, 472, 2, 2, 3)).setObjects(('CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextNotifEnable'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cm_virt_context_notif_control_group = cmVirtContextNotifControlGroup.setStatus('current')
cm_virt_context_notif_objects_group = object_group((1, 3, 6, 1, 4, 1, 9, 9, 472, 2, 2, 4)).setObjects(('CISCO-MODULE-VIRTUALIZATION-MIB', 'cmNotifVirtContextName'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cm_virt_context_notif_objects_group = cmVirtContextNotifObjectsGroup.setStatus('current')
cm_virt_context_notification_group = notification_group((1, 3, 6, 1, 4, 1, 9, 9, 472, 2, 2, 5)).setObjects(('CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextAdded'), ('CISCO-MODULE-VIRTUALIZATION-MIB', 'cmVirtContextRemoved'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cm_virt_context_notification_group = cmVirtContextNotificationGroup.setStatus('current')
mibBuilder.exportSymbols('CISCO-MODULE-VIRTUALIZATION-MIB', cmVirtualizationMIBConformance=cmVirtualizationMIBConformance, cmVirtContextIfMapIdLow=cmVirtContextIfMapIdLow, cmVirtContextNotifEnable=cmVirtContextNotifEnable, cmVirtContextRowStatus=cmVirtContextRowStatus, cmVirtualContextNotifObjects=cmVirtualContextNotifObjects, cmVirtContextconfigGroup=cmVirtContextconfigGroup, cmVirtContextDescr=cmVirtContextDescr, cmVirtContextResourceClass=cmVirtContextResourceClass, cmVirtContextAdded=cmVirtContextAdded, cmVirtualizationCompliance=cmVirtualizationCompliance, cmNotifVirtContextName=cmNotifVirtContextName, cmVirtContextIfMapTable=cmVirtContextIfMapTable, ciscoModuleVirtualizationMIB=ciscoModuleVirtualizationMIB, cmVirtualizationMIBObjects=cmVirtualizationMIBObjects, cmVirtualizationNotifs=cmVirtualizationNotifs, cmVirtContextIfMapEntry=cmVirtContextIfMapEntry, cmVirtContextName=cmVirtContextName, PYSNMP_MODULE_ID=ciscoModuleVirtualizationMIB, cmVirtContextURL=cmVirtContextURL, cmVirtualContext=cmVirtualContext, cmVirtContextIfMapIdHigh=cmVirtContextIfMapIdHigh, cmVirtualizationGroups=cmVirtualizationGroups, cmVirtContextNotifObjectsGroup=cmVirtContextNotifObjectsGroup, cmVirtualContextEntry=cmVirtualContextEntry, cmVirtualContextTable=cmVirtualContextTable, cmVirtContextNotifControlGroup=cmVirtContextNotifControlGroup, cmVirtContextRemoved=cmVirtContextRemoved, cmVirtContextIfMapGroup=cmVirtContextIfMapGroup, cmVirtualContextNotifControl=cmVirtualContextNotifControl, cmVirtContextIfMapStorageType=cmVirtContextIfMapStorageType, cmVirtContextIfMapType=cmVirtContextIfMapType, cmVirtContextIfMapRowStatus=cmVirtContextIfMapRowStatus, cmVirtContextStorageType=cmVirtContextStorageType, cmVirtualizationCompliances=cmVirtualizationCompliances, cmVirtContextNotificationGroup=cmVirtContextNotificationGroup) |
arr = [0, 0, 0]
arr = sum(arr)
rem = arr % 2
if rem == 0:
print("even")
else:
print("odd") | arr = [0, 0, 0]
arr = sum(arr)
rem = arr % 2
if rem == 0:
print('even')
else:
print('odd') |
class Solution:
def removeDuplicates(self, nums: List[int]) -> int:
idx = 0
for i, num in enumerate(nums):
if i == 0 or nums[i - 1] != num:
nums[idx] = num
idx += 1
return idx
| class Solution:
def remove_duplicates(self, nums: List[int]) -> int:
idx = 0
for (i, num) in enumerate(nums):
if i == 0 or nums[i - 1] != num:
nums[idx] = num
idx += 1
return idx |
def find_instr(func, key):
"""
Print instructions if contain key word.
Parameters
----------
func : Numba function
Numba function for find instructions
key : str
Key instruction word
Returns
-------
Nothing
"""
cnt = 0
for txt in func.inspect_asm(func.signatures[0]).split('\n'):
if key in txt:
cnt += 1
print(txt)
if cnt == 0:
print('Cant found instructions:', key)
| def find_instr(func, key):
"""
Print instructions if contain key word.
Parameters
----------
func : Numba function
Numba function for find instructions
key : str
Key instruction word
Returns
-------
Nothing
"""
cnt = 0
for txt in func.inspect_asm(func.signatures[0]).split('\n'):
if key in txt:
cnt += 1
print(txt)
if cnt == 0:
print('Cant found instructions:', key) |
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def deepestLeavesSum(self, root: TreeNode) -> int:
frontier = [root]
ans = 0
while frontier:
next_level = []
ans = 0
for node in frontier:
ans += node.val
if node.left is not None:
next_level.append(node.left)
if node.right is not None:
next_level.append(node.right)
frontier = next_level
return ans
| class Solution:
def deepest_leaves_sum(self, root: TreeNode) -> int:
frontier = [root]
ans = 0
while frontier:
next_level = []
ans = 0
for node in frontier:
ans += node.val
if node.left is not None:
next_level.append(node.left)
if node.right is not None:
next_level.append(node.right)
frontier = next_level
return ans |
# The words parameter is a list of strings.
def build_dictionary(words):
# The frequencies dictionary will be built with your code below.
# Each key is a word string and the corresponding value is an integer
# indicating that word's frequency.
frequencies = {}
for word in words:
if word in frequencies:
frequencies[word] += 1
else:
frequencies[word] = 1
return frequencies
# The following code asks for input, splits the input into a word list,
# calls build_dictionary(), and displays the contents sorted by key.
if __name__ == '__main__':
words = input('please paste in your input').split()
your_dictionary = build_dictionary(words)
sorted_keys = sorted(your_dictionary.keys())
for key in sorted_keys:
print(key + ': ' + str(your_dictionary[key]))
| def build_dictionary(words):
frequencies = {}
for word in words:
if word in frequencies:
frequencies[word] += 1
else:
frequencies[word] = 1
return frequencies
if __name__ == '__main__':
words = input('please paste in your input').split()
your_dictionary = build_dictionary(words)
sorted_keys = sorted(your_dictionary.keys())
for key in sorted_keys:
print(key + ': ' + str(your_dictionary[key])) |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# qooxdoo - the new era of web development
#
# http://qooxdoo.org
#
# Copyright:
# 2006-2010 1&1 Internet AG, Germany, http://www.1und1.de
#
# License:
# MIT: https://opensource.org/licenses/MIT
# See the LICENSE file in the project's top-level directory for details.
#
# Authors:
# * Thomas Herchenroeder (thron7)
#
################################################################################
##
# Context - provide global symbols as context data, like console, cache, ...
##
console = None
config = None
jobconf = None
generator_opts = []
| console = None
config = None
jobconf = None
generator_opts = [] |
# coding: utf-8
# Copyright (c) Henniggroup.
# Distributed under the terms of the MIT License.
"""
This package contains modules to parse the outputs from VASP calculations,
apply the Freysoldt-Neugebauer charge correction scheme,
and evaluate defect formation energies.
"""
| """
This package contains modules to parse the outputs from VASP calculations,
apply the Freysoldt-Neugebauer charge correction scheme,
and evaluate defect formation energies.
""" |
a = 5 # int INTeger
b = 3.14 # float numbers
sentence = 'I\'m from Poland' #string - sequence of characters
isBig = False #bool BOOLEAN
sizeOfSection = 4 #Every word you start inside variable name should be capitalized
| a = 5
b = 3.14
sentence = "I'm from Poland"
is_big = False
size_of_section = 4 |
class Credentials:
def __init__(self, username:str, password:str):
self.username = username
self.password = password | class Credentials:
def __init__(self, username: str, password: str):
self.username = username
self.password = password |
def get_order_by(query_dict, order_by_param, secondary=None):
'''
``query_dict`` is either the get or post data
``order_by_param`` is the variable name with which to sort on
``default`` which column to order on in a default case
'''
order_by = query_dict.get(order_by_param)
if order_by and not order_by.lstrip('-') == secondary:
order_by = [order_by, secondary]
return order_by
| def get_order_by(query_dict, order_by_param, secondary=None):
"""
``query_dict`` is either the get or post data
``order_by_param`` is the variable name with which to sort on
``default`` which column to order on in a default case
"""
order_by = query_dict.get(order_by_param)
if order_by and (not order_by.lstrip('-') == secondary):
order_by = [order_by, secondary]
return order_by |
SERVER_NAME = "IBM"
mongo_url = "mongodb://xxxxx"
eegdb_name = "eegdb_test"
data_folder = "/path/to/data/"
output_folder = "/path/to/output/" | server_name = 'IBM'
mongo_url = 'mongodb://xxxxx'
eegdb_name = 'eegdb_test'
data_folder = '/path/to/data/'
output_folder = '/path/to/output/' |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.