content
stringlengths
7
1.05M
fixed_cases
stringlengths
1
1.28M
'''Basic object to store the agents and auxiliary content in the agent system graph. The object should be considered to be replaced with namedtuple at some point, once the default field has matured ''' class Node(object): '''Basic object to store agent and auxiliary content in the agent system. Parameters ---------- name : str Name of node agent_content : Agent An Agent object aux_content : optional Auxiliary content, such as an immediate environment, to the Agent of the Node other_attributes : dict, optional Dictionary of additional attributes assigned to the Node. These can be part of operations on the graph during a simulation or they can be part of graph sampling, for example. Each key is the name of the attribute, the value is the value of the attribute. ''' def __str__(self): return 'Node(name:%s)' %(self.name) def __contains__(self, item): if self.agent_content is None: return False else: return item == self.agent_content.agent_id_system def __init__(self, name, agent_content, aux_content=None, other_attributes={}): self.name = name self.agent_content = agent_content self.aux_content = aux_content for key, item in other_attributes: setattr(self, key, item) def node_maker(agents, envs=None, node_names=None, node_attributes=None): '''Convenience function to place a collection of agents and environments in nodes Parameters ---------- TBD Returns ------- TBD ''' n_nodes = len(agents) if not envs is None: if len(envs) != n_nodes: raise ValueError('Environment container not of same size as agent container') envs_iter = envs else: envs_iter = [None] * n_nodes if not node_names is None: if len(node_names) != n_nodes: raise ValueError('Node names container no of same size as agent container') node_names_iter = node_names else: node_names_iter = ['ID {}'.format(k) for k in range(n_nodes)] if not node_attributes is None: if len(node_attributes) != n_nodes: raise ValueError('Node attributes container not of same size as agent container') node_attributes_iter = node_attributes else: node_attributes_iter = [{}] * n_nodes ret = [] for agent, env, name, attributes in zip(agents, envs_iter, node_names_iter, node_attributes_iter): ret.append(Node(name, agent, env, attributes)) return ret
"""Basic object to store the agents and auxiliary content in the agent system graph. The object should be considered to be replaced with namedtuple at some point, once the default field has matured """ class Node(object): """Basic object to store agent and auxiliary content in the agent system. Parameters ---------- name : str Name of node agent_content : Agent An Agent object aux_content : optional Auxiliary content, such as an immediate environment, to the Agent of the Node other_attributes : dict, optional Dictionary of additional attributes assigned to the Node. These can be part of operations on the graph during a simulation or they can be part of graph sampling, for example. Each key is the name of the attribute, the value is the value of the attribute. """ def __str__(self): return 'Node(name:%s)' % self.name def __contains__(self, item): if self.agent_content is None: return False else: return item == self.agent_content.agent_id_system def __init__(self, name, agent_content, aux_content=None, other_attributes={}): self.name = name self.agent_content = agent_content self.aux_content = aux_content for (key, item) in other_attributes: setattr(self, key, item) def node_maker(agents, envs=None, node_names=None, node_attributes=None): """Convenience function to place a collection of agents and environments in nodes Parameters ---------- TBD Returns ------- TBD """ n_nodes = len(agents) if not envs is None: if len(envs) != n_nodes: raise value_error('Environment container not of same size as agent container') envs_iter = envs else: envs_iter = [None] * n_nodes if not node_names is None: if len(node_names) != n_nodes: raise value_error('Node names container no of same size as agent container') node_names_iter = node_names else: node_names_iter = ['ID {}'.format(k) for k in range(n_nodes)] if not node_attributes is None: if len(node_attributes) != n_nodes: raise value_error('Node attributes container not of same size as agent container') node_attributes_iter = node_attributes else: node_attributes_iter = [{}] * n_nodes ret = [] for (agent, env, name, attributes) in zip(agents, envs_iter, node_names_iter, node_attributes_iter): ret.append(node(name, agent, env, attributes)) return ret
masuk=int(input("Masukkan Jam Masuk = ")) keluar=int(input("Masukkan Jam Keluar =")) lama=keluar-masuk payment=12000 print("Lama Mengajar = ", lama, "jam") if lama <=1: satu_jam_pertama=payment print("Biaya Mengajar= Rp", satu_jam_pertama) elif lama <10: biaya_selanjutnya = (lama+1)*3000+payment print("Biaya Mengajar = Rp", biaya_selanjutnya) elif lama >= 10: print("Biaya Mengajar = Rp", 1000000) else: print("nul")
masuk = int(input('Masukkan Jam Masuk = ')) keluar = int(input('Masukkan Jam Keluar =')) lama = keluar - masuk payment = 12000 print('Lama Mengajar = ', lama, 'jam') if lama <= 1: satu_jam_pertama = payment print('Biaya Mengajar= Rp', satu_jam_pertama) elif lama < 10: biaya_selanjutnya = (lama + 1) * 3000 + payment print('Biaya Mengajar = Rp', biaya_selanjutnya) elif lama >= 10: print('Biaya Mengajar = Rp', 1000000) else: print('nul')
# dataset settings dataset_type = 'PhoneDataset' data_root = '/home/ubuntu/tienpv/datasets/PhoneDatasets/COCO2017/' ann_files = '/home/ubuntu/tienpv/datasets/PhoneDatasets/COCO2017/annotations/instances_train2017_cell_phone_format_widerface.txt' val_data_root = '/home/ubuntu/tienpv/datasets/PhoneDatasets/COCO2017/' val_ann_files = '/home/ubuntu/tienpv/datasets/PhoneDatasets/COCO2017/annotations/instances_val2017_cell_phone_format_widerface.txt' img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) train_pipeline = [ dict(type='LoadImageFromFile', to_float32=True), dict(type='LoadAnnotations', with_bbox=True), dict( type='PhotoMetricDistortion', brightness_delta=32, contrast_range=(0.5, 1.5), saturation_range=(0.5, 1.5), hue_delta=18), dict( type='Expand', mean=img_norm_cfg['mean'], to_rgb=img_norm_cfg['to_rgb'], ratio_range=(1, 4)), dict( type='MinIoURandomCrop', min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), min_crop_size=0.3), dict(type='Resize', img_scale=(320, 320), keep_ratio=False), dict(type='Normalize', **img_norm_cfg), dict(type='RandomFlip', flip_ratio=0.5), dict(type='DefaultFormatBundle'), dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), ] gray_train_pipeline = [ dict(type='LoadImageFromFile', to_float32=True, color_type='grayscale'), dict(type='Stack'), dict(type='LoadAnnotations', with_bbox=True), dict( type='PhotoMetricDistortion', brightness_delta=32, contrast_range=(0.5, 1.5), saturation_range=(0.5, 1.5), hue_delta=18), dict( type='Expand', mean=img_norm_cfg['mean'], to_rgb=img_norm_cfg['to_rgb'], ratio_range=(1, 4)), dict( type='MinIoURandomCrop', min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), min_crop_size=0.3), dict(type='Resize', img_scale=(320, 320), keep_ratio=False), dict(type='Normalize', **img_norm_cfg), dict(type='RandomFlip', flip_ratio=0.5), dict(type='DefaultFormatBundle'), dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), ] test_pipeline = [ dict(type='LoadImageFromFile'), dict( type='MultiScaleFlipAug', img_scale=(320, 320), flip=False, transforms=[ dict(type='Resize', keep_ratio=False), dict(type='Normalize', **img_norm_cfg), dict(type='ImageToTensor', keys=['img']), dict(type='Collect', keys=['img']), ]) ] # rgb_dataset_train = dict( # type='RepeatDataset', # times=2, # dataset=dict( # type=dataset_type, # ann_file=ann_files, # img_prefix=data_root, # pipeline=train_pipeline # ) # ) # gray_dataset_train = dict( # type='RepeatDataset', # times=2, # dataset=dict( # type=dataset_type, # ann_file=ann_files, # img_prefix=data_root, # pipeline=gray_train_pipeline # ) # ) data = dict( samples_per_gpu=60, workers_per_gpu=4, # train=[rgb_dataset_train, gray_dataset_train], train=dict( type='RepeatDataset', times=2, dataset=dict( type=dataset_type, ann_file=ann_files, img_prefix=data_root, pipeline=train_pipeline ) ), val=dict( type=dataset_type, ann_file=val_ann_files, img_prefix=val_data_root, pipeline=test_pipeline), test=dict( type=dataset_type, ann_file=val_ann_files, img_prefix=val_data_root, pipeline=test_pipeline))
dataset_type = 'PhoneDataset' data_root = '/home/ubuntu/tienpv/datasets/PhoneDatasets/COCO2017/' ann_files = '/home/ubuntu/tienpv/datasets/PhoneDatasets/COCO2017/annotations/instances_train2017_cell_phone_format_widerface.txt' val_data_root = '/home/ubuntu/tienpv/datasets/PhoneDatasets/COCO2017/' val_ann_files = '/home/ubuntu/tienpv/datasets/PhoneDatasets/COCO2017/annotations/instances_val2017_cell_phone_format_widerface.txt' img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) train_pipeline = [dict(type='LoadImageFromFile', to_float32=True), dict(type='LoadAnnotations', with_bbox=True), dict(type='PhotoMetricDistortion', brightness_delta=32, contrast_range=(0.5, 1.5), saturation_range=(0.5, 1.5), hue_delta=18), dict(type='Expand', mean=img_norm_cfg['mean'], to_rgb=img_norm_cfg['to_rgb'], ratio_range=(1, 4)), dict(type='MinIoURandomCrop', min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), min_crop_size=0.3), dict(type='Resize', img_scale=(320, 320), keep_ratio=False), dict(type='Normalize', **img_norm_cfg), dict(type='RandomFlip', flip_ratio=0.5), dict(type='DefaultFormatBundle'), dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels'])] gray_train_pipeline = [dict(type='LoadImageFromFile', to_float32=True, color_type='grayscale'), dict(type='Stack'), dict(type='LoadAnnotations', with_bbox=True), dict(type='PhotoMetricDistortion', brightness_delta=32, contrast_range=(0.5, 1.5), saturation_range=(0.5, 1.5), hue_delta=18), dict(type='Expand', mean=img_norm_cfg['mean'], to_rgb=img_norm_cfg['to_rgb'], ratio_range=(1, 4)), dict(type='MinIoURandomCrop', min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), min_crop_size=0.3), dict(type='Resize', img_scale=(320, 320), keep_ratio=False), dict(type='Normalize', **img_norm_cfg), dict(type='RandomFlip', flip_ratio=0.5), dict(type='DefaultFormatBundle'), dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels'])] test_pipeline = [dict(type='LoadImageFromFile'), dict(type='MultiScaleFlipAug', img_scale=(320, 320), flip=False, transforms=[dict(type='Resize', keep_ratio=False), dict(type='Normalize', **img_norm_cfg), dict(type='ImageToTensor', keys=['img']), dict(type='Collect', keys=['img'])])] data = dict(samples_per_gpu=60, workers_per_gpu=4, train=dict(type='RepeatDataset', times=2, dataset=dict(type=dataset_type, ann_file=ann_files, img_prefix=data_root, pipeline=train_pipeline)), val=dict(type=dataset_type, ann_file=val_ann_files, img_prefix=val_data_root, pipeline=test_pipeline), test=dict(type=dataset_type, ann_file=val_ann_files, img_prefix=val_data_root, pipeline=test_pipeline))
def longestPeak(array): max_size = 0 i = 1 while i < len(array) - 1: peak = array[i - 1] < array[i] > array[i + 1] if not peak: i += 1 continue left = i - 1 right = i + 1 while left >= 0 and array[left] < array[left + 1]: left -= 1 while right < len(array) and array[right] < array[right - 1]: right += 1 max_size = max(max_size, right - left - 1) i = right return max_size
def longest_peak(array): max_size = 0 i = 1 while i < len(array) - 1: peak = array[i - 1] < array[i] > array[i + 1] if not peak: i += 1 continue left = i - 1 right = i + 1 while left >= 0 and array[left] < array[left + 1]: left -= 1 while right < len(array) and array[right] < array[right - 1]: right += 1 max_size = max(max_size, right - left - 1) i = right return max_size
class Agent: """An abstract class defining the interface for a Reversi agent.""" def __init__(self, reversi, color): raise NotImplementedError def get_action(self, game_state, legal_moves=None): raise NotImplementedError def observe_win(self, state, winner): raise NotImplementedError def reset(self): raise NotImplementedError
class Agent: """An abstract class defining the interface for a Reversi agent.""" def __init__(self, reversi, color): raise NotImplementedError def get_action(self, game_state, legal_moves=None): raise NotImplementedError def observe_win(self, state, winner): raise NotImplementedError def reset(self): raise NotImplementedError
# ======================== # Information # ======================== # Direct Link: https://www.hackerrank.com/challenges/s10-standard-deviation # Difficulty: Easy # Max Score: 30 # Language: Python # ======================== # Solution # ======================== N = int(input()) X = list(map(int, input().strip().split(' '))) MEAN = sum(X)/N sum = 0 for i in range(N): sum += ((X[i]-MEAN)**2)/N print(round(sum**0.5, 1))
n = int(input()) x = list(map(int, input().strip().split(' '))) mean = sum(X) / N sum = 0 for i in range(N): sum += (X[i] - MEAN) ** 2 / N print(round(sum ** 0.5, 1))
class Entity(object): def __init__(self, name, represented_class_name=None, parent_entity=None, is_abstract=False, attributes=None, relationships=None): self.name = name self.represented_class_name = represented_class_name or name self.parent_entity = parent_entity self.is_abstract = is_abstract self.attributes = attributes or [] self.relationships = relationships or [] def __str__(self): return self.name def __repr__(self): return '<Entity {}>'.format(self.name) def __eq__(self, other): return isinstance(other, Entity) and \ other.name == self.name and \ other.represented_class_name == self.represented_class_name and \ other.parent_entity == self.parent_entity and \ other.is_abstract == self.is_abstract and \ other.attributes == self.attributes and \ other.relationships == self.relationships @property def super_class_name(self): if self.parent_entity: return self.parent_entity.represented_class_name return 'NSManagedObject' @property def to_many_relationships(self): return [relationship for relationship in self.relationships if relationship.is_to_many] @property def to_one_relationships(self): return [relationship for relationship in self.relationships if relationship.is_to_one]
class Entity(object): def __init__(self, name, represented_class_name=None, parent_entity=None, is_abstract=False, attributes=None, relationships=None): self.name = name self.represented_class_name = represented_class_name or name self.parent_entity = parent_entity self.is_abstract = is_abstract self.attributes = attributes or [] self.relationships = relationships or [] def __str__(self): return self.name def __repr__(self): return '<Entity {}>'.format(self.name) def __eq__(self, other): return isinstance(other, Entity) and other.name == self.name and (other.represented_class_name == self.represented_class_name) and (other.parent_entity == self.parent_entity) and (other.is_abstract == self.is_abstract) and (other.attributes == self.attributes) and (other.relationships == self.relationships) @property def super_class_name(self): if self.parent_entity: return self.parent_entity.represented_class_name return 'NSManagedObject' @property def to_many_relationships(self): return [relationship for relationship in self.relationships if relationship.is_to_many] @property def to_one_relationships(self): return [relationship for relationship in self.relationships if relationship.is_to_one]
class Solution: def answer(self, current, end, scalar): if current == end: return scalar self.visited.add(current) if current in self.graph: for i in self.graph[current]: if i[0] not in self.visited: a = self.answer(i[0], end, scalar*i[1]) if a != -1: return a return -1 def calcEquation(self, equations: List[List[str]], values: List[float], queries: List[List[str]]) -> List[float]: self.graph, self.visited = {}, set() for i in range(len(equations)): if equations[i][0] not in self.graph: self.graph[equations[i][0]] = [] if equations[i][1] not in self.graph: self.graph[equations[i][1]] = [] self.graph[equations[i][0]].append((equations[i][1], 1/values[i])) self.graph[equations[i][1]].append((equations[i][0], values[i])) v = [] for i in queries: self.visited = set() if i[0] not in self.graph or i[1] not in self.graph: v.append(-1) continue v.append(1/self.answer(i[0], i[1], 1) if i[0] != i[1] else 1) return v
class Solution: def answer(self, current, end, scalar): if current == end: return scalar self.visited.add(current) if current in self.graph: for i in self.graph[current]: if i[0] not in self.visited: a = self.answer(i[0], end, scalar * i[1]) if a != -1: return a return -1 def calc_equation(self, equations: List[List[str]], values: List[float], queries: List[List[str]]) -> List[float]: (self.graph, self.visited) = ({}, set()) for i in range(len(equations)): if equations[i][0] not in self.graph: self.graph[equations[i][0]] = [] if equations[i][1] not in self.graph: self.graph[equations[i][1]] = [] self.graph[equations[i][0]].append((equations[i][1], 1 / values[i])) self.graph[equations[i][1]].append((equations[i][0], values[i])) v = [] for i in queries: self.visited = set() if i[0] not in self.graph or i[1] not in self.graph: v.append(-1) continue v.append(1 / self.answer(i[0], i[1], 1) if i[0] != i[1] else 1) return v
# -*- coding: utf-8 -*- """Test strategy with hashing mutiple shift invariant aligned patches See: https://stackoverflow.com/a/20316789/51627 """ def main(): pass if __name__ == "__main__": main()
"""Test strategy with hashing mutiple shift invariant aligned patches See: https://stackoverflow.com/a/20316789/51627 """ def main(): pass if __name__ == '__main__': main()
def isIsosceles(x, y, z): if x <= 0 or y <=0 or z <=0: return False if x == y: return True if y == z: return True if x == z: return True else: return False print(isIsosceles(-2, -2, 3)) print(isIsosceles(2, 3, 2)) def isIsosceles(x, y, z): if x <= 0 or y <=0 or z <=0: return False elif x == y or y == z or x == z: return True else: return False print(isIsosceles(-2, -2, 3)) print(isIsosceles(2, 3, 2))
def is_isosceles(x, y, z): if x <= 0 or y <= 0 or z <= 0: return False if x == y: return True if y == z: return True if x == z: return True else: return False print(is_isosceles(-2, -2, 3)) print(is_isosceles(2, 3, 2)) def is_isosceles(x, y, z): if x <= 0 or y <= 0 or z <= 0: return False elif x == y or y == z or x == z: return True else: return False print(is_isosceles(-2, -2, 3)) print(is_isosceles(2, 3, 2))
# -*- coding: utf-8 -*- __author__ = 'lycheng' __email__ = "lycheng997@gmail.com" class Solution(object): def wordPattern(self, pattern, str): """ :type pattern: str :type str: str :rtype: bool """ words = str.split(" ") if len(pattern) != len(words): return False word_map = {} pattern_map = {} for idx, word in enumerate(words): p = pattern[idx] if p not in pattern_map and word not in word_map: pattern_map[p] = word word_map[word] = p continue if pattern_map.get(p) != word or word_map.get(word) != p: return False return True
__author__ = 'lycheng' __email__ = 'lycheng997@gmail.com' class Solution(object): def word_pattern(self, pattern, str): """ :type pattern: str :type str: str :rtype: bool """ words = str.split(' ') if len(pattern) != len(words): return False word_map = {} pattern_map = {} for (idx, word) in enumerate(words): p = pattern[idx] if p not in pattern_map and word not in word_map: pattern_map[p] = word word_map[word] = p continue if pattern_map.get(p) != word or word_map.get(word) != p: return False return True
# Copyright 2017 The Chromium OS Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. inline = """ <map> <name>adc_mux</name> <doc>valid mux values for DUT's two banks of INA219 off PCA9540 ADCs</doc> <params clobber_ok="" none="0" bank0="4" bank1="5"></params> </map> <control> <name>adc_mux</name> <doc>4 to 1 mux to steer remote i2c i2c_mux:rem to two sets of 16 INA219 ADCs. Note they are only on leg0 and leg1</doc> <params clobber_ok="" interface="2" drv="pca9546" child="0x70" map="adc_mux"></params> </control> """ inas = [ ('ina219', 0x40, 'ppvar_bat', 3.8, 0.005, 'loc', True), ('ina219', 0x41, 'ppvar_bigcpu', 1.0, 0.01, 'loc', True), ('ina219', 0x42, 'ppvar_litcpu', 1.0, 0.01, 'loc', True), ('ina219', 0x43, 'ppvar_gpu', 1.0, 0.01, 'loc', True), ('ina219', 0x44, 'pp900_s0', 0.9, 0.01, 'loc', True), ('ina219', 0x45, 'pp1250_s3', 1.25, 0.01, 'loc', True), ('ina219', 0x46, 'pp1800', 1.8, 0.01, 'loc', True), ('ina219', 0x47, 'pp1800_ec', 1.8, 0.1, 'loc', True), ('ina219', 0x48, 'pp1800_s3', 1.8, 0.01, 'loc', True), ('ina219', 0x49, 'pp1800_lpddr', 1.8, 0.01, 'loc', True), ('ina219', 0x4A, 'pp1800_s0', 1.8, 0.01, 'loc', True), ('ina219', 0x4B, 'pp1800_pcie', 1.8, 0.01, 'loc', True), ('ina219', 0x4C, 'pp1800_mipi', 1.8, 0.01, 'loc', True), ('ina219', 0x4D, 'pp3300', 3.3, 0.01, 'loc', True), ('ina219', 0x4E, 'pp3300_s3', 3.3, 0.01, 'loc', True), ('ina219', 0x4F, 'pp3300_s0', 3.3, 0.01, 'loc', True), ]
inline = '\n <map>\n <name>adc_mux</name>\n <doc>valid mux values for DUT\'s two banks of INA219 off PCA9540\n ADCs</doc>\n <params clobber_ok="" none="0" bank0="4" bank1="5"></params>\n </map>\n <control>\n <name>adc_mux</name>\n <doc>4 to 1 mux to steer remote i2c i2c_mux:rem to two sets of\n 16 INA219 ADCs. Note they are only on leg0 and leg1</doc>\n <params clobber_ok="" interface="2" drv="pca9546" child="0x70"\n map="adc_mux"></params>\n </control>\n' inas = [('ina219', 64, 'ppvar_bat', 3.8, 0.005, 'loc', True), ('ina219', 65, 'ppvar_bigcpu', 1.0, 0.01, 'loc', True), ('ina219', 66, 'ppvar_litcpu', 1.0, 0.01, 'loc', True), ('ina219', 67, 'ppvar_gpu', 1.0, 0.01, 'loc', True), ('ina219', 68, 'pp900_s0', 0.9, 0.01, 'loc', True), ('ina219', 69, 'pp1250_s3', 1.25, 0.01, 'loc', True), ('ina219', 70, 'pp1800', 1.8, 0.01, 'loc', True), ('ina219', 71, 'pp1800_ec', 1.8, 0.1, 'loc', True), ('ina219', 72, 'pp1800_s3', 1.8, 0.01, 'loc', True), ('ina219', 73, 'pp1800_lpddr', 1.8, 0.01, 'loc', True), ('ina219', 74, 'pp1800_s0', 1.8, 0.01, 'loc', True), ('ina219', 75, 'pp1800_pcie', 1.8, 0.01, 'loc', True), ('ina219', 76, 'pp1800_mipi', 1.8, 0.01, 'loc', True), ('ina219', 77, 'pp3300', 3.3, 0.01, 'loc', True), ('ina219', 78, 'pp3300_s3', 3.3, 0.01, 'loc', True), ('ina219', 79, 'pp3300_s0', 3.3, 0.01, 'loc', True)]
class CmdResponse: __status: bool __type: str __data: dict __content: str def __init__(self, status: bool, contentType: str): self.__status = status self.__type = contentType self.__data = {'status': status} self.__content = None def setData(self, data: object): self.__data['data'] = data def setContent(self, content: str): self.__content = content def getContent(self) -> str: return self.__content def getData(self) -> dict: return self.__data def getContentType(self) -> str: return self.__type def getStatus(self) -> bool: return self.__status
class Cmdresponse: __status: bool __type: str __data: dict __content: str def __init__(self, status: bool, contentType: str): self.__status = status self.__type = contentType self.__data = {'status': status} self.__content = None def set_data(self, data: object): self.__data['data'] = data def set_content(self, content: str): self.__content = content def get_content(self) -> str: return self.__content def get_data(self) -> dict: return self.__data def get_content_type(self) -> str: return self.__type def get_status(self) -> bool: return self.__status
with open("pytest_results.xml", "w") as f: f.write("<?xml version='1.0' encoding='utf-8'?>") f.write("<test>") f.write("<!-- No tests executed -->") f.write("</test>")
with open('pytest_results.xml', 'w') as f: f.write("<?xml version='1.0' encoding='utf-8'?>") f.write('<test>') f.write('<!-- No tests executed -->') f.write('</test>')
def exec(path: str, data: bytes) -> None: fs = open(path, 'wb') fs.write(data) fs.close()
def exec(path: str, data: bytes) -> None: fs = open(path, 'wb') fs.write(data) fs.close()
class TernarySearchTrie: """Implements https://en.wikipedia.org/wiki/Ternary_search_tree""" def __init__(self): self.root = None def get(self, s: str) -> bool: """Return True if string s is in trie, else False""" return self._get(s, 0, self.root) def put(self, s: str, label): """Upsert string s into trie""" self.root = self._put(s, 0, label, self.root) def delete(self, s: str): """Delete string s from trie""" self.root = self._delete(s, 0, self.root) def _get(self, s: str, i: int, node): """Recursively traverse trie to find string s""" c = s[i] if node is None: print(f'String {s} is not in trie') return False if c < node.c: return self._get(s, i, node.left) elif c > node.c: return self._get(s, i, node.right) elif i < len(s) - 1: return self._get(s, i + 1, node.down) else: if node.label: print(f'String {s} is in trie with label {node.label}') return True print(f'String {s} is not in trie') return False def _put(self, s: str, i: int, label, node): """Recursively upsert string s with label into trie""" c = s[i] if node is None: node = _TernarySearchTrieNode(c) if c < node.c: node.left = self._put(s, i, label, node.left) elif c > node.c: node.right = self._put(s, i, label, node.right) elif i < len(s) - 1: node.down = self._put(s, i + 1, label, node.down) else: node.label = label return node def _delete(self, s: str, i: int, node): """Recursively delete string s from trie, including cleaning up trie""" c = s[i] if node is None: print(f'String {s} is not in trie') return None if c < node.c: node.left = self._delete(s, i, node.left) elif c > node.c: node.right = self._delete(s, i, node.right) elif i < len(s) - 1: node.down = self._delete(s, i + 1, node.down) else: node.label = None return (None if not node.left and not node.down and not node.right else node) class _TernarySearchTrieNode: """Implements a TST node, storing a char, a label and three pointers""" def __init__(self, c: str, label=None): self.c = c self.label = label self.left = self.down = self.right = None if __name__ == '__main__': T = TernarySearchTrie() print(T) TEST_STRINGS = ['appleE', "donkey'][]", 'donner', 'garfield123', 'garfunkel'] for i, s in enumerate(TEST_STRINGS): T.put(s, i + 1) for s in TEST_STRINGS: assert T.get(s) is True T.delete('garfield123') assert T.get('garfield123') is False assert T.get('garfunkel') is True assert T.get('a') is False print(T)
class Ternarysearchtrie: """Implements https://en.wikipedia.org/wiki/Ternary_search_tree""" def __init__(self): self.root = None def get(self, s: str) -> bool: """Return True if string s is in trie, else False""" return self._get(s, 0, self.root) def put(self, s: str, label): """Upsert string s into trie""" self.root = self._put(s, 0, label, self.root) def delete(self, s: str): """Delete string s from trie""" self.root = self._delete(s, 0, self.root) def _get(self, s: str, i: int, node): """Recursively traverse trie to find string s""" c = s[i] if node is None: print(f'String {s} is not in trie') return False if c < node.c: return self._get(s, i, node.left) elif c > node.c: return self._get(s, i, node.right) elif i < len(s) - 1: return self._get(s, i + 1, node.down) else: if node.label: print(f'String {s} is in trie with label {node.label}') return True print(f'String {s} is not in trie') return False def _put(self, s: str, i: int, label, node): """Recursively upsert string s with label into trie""" c = s[i] if node is None: node = __ternary_search_trie_node(c) if c < node.c: node.left = self._put(s, i, label, node.left) elif c > node.c: node.right = self._put(s, i, label, node.right) elif i < len(s) - 1: node.down = self._put(s, i + 1, label, node.down) else: node.label = label return node def _delete(self, s: str, i: int, node): """Recursively delete string s from trie, including cleaning up trie""" c = s[i] if node is None: print(f'String {s} is not in trie') return None if c < node.c: node.left = self._delete(s, i, node.left) elif c > node.c: node.right = self._delete(s, i, node.right) elif i < len(s) - 1: node.down = self._delete(s, i + 1, node.down) else: node.label = None return None if not node.left and (not node.down) and (not node.right) else node class _Ternarysearchtrienode: """Implements a TST node, storing a char, a label and three pointers""" def __init__(self, c: str, label=None): self.c = c self.label = label self.left = self.down = self.right = None if __name__ == '__main__': t = ternary_search_trie() print(T) test_strings = ['appleE', "donkey'][]", 'donner', 'garfield123', 'garfunkel'] for (i, s) in enumerate(TEST_STRINGS): T.put(s, i + 1) for s in TEST_STRINGS: assert T.get(s) is True T.delete('garfield123') assert T.get('garfield123') is False assert T.get('garfunkel') is True assert T.get('a') is False print(T)
# model batch = 1 in_chans = 1 out_chans = 1 in_rows = 4 in_cols = 4 out_rows = 8 out_cols = 8 ker_rows = 3 ker_cols = 3 stride = 2 # pad is 0 (left: 0 right: 1 top: 0 bottom: 1) input_table = [x for x in range(batch * in_rows * in_cols * in_chans)] kernel_table = [x for x in range(out_chans * ker_rows * ker_cols * in_chans)] out_table = [0 for x in range(batch * out_rows * out_cols * out_chans)] for i in range(batch): for j in range(in_rows): for k in range(in_cols): for l in range(in_chans): out_row_origin = j * stride out_col_origin = k * stride input_value = input_table[((i * in_rows + j) * in_cols + k) * in_chans + l] for m in range(ker_rows): for n in range(ker_cols): for o in range(out_chans): out_row = out_row_origin + m out_col = out_col_origin + n if (out_row < out_rows) and (out_col < out_cols) and (out_row >= 0) and (out_col >= 0): kernel_value = kernel_table[((o * ker_rows + m) * ker_cols + n) * in_chans + l] out_table[((i * out_rows + out_row) * out_cols + out_col) * out_chans + o] += (input_value * kernel_value) model = Model() i0 = Input("op_shape", "TENSOR_INT32", "{4}") weights = Parameter("ker", "TENSOR_FLOAT32", "{1, 3, 3, 1}", kernel_table) i1 = Input("in", "TENSOR_FLOAT32", "{1, 4, 4, 1}" ) pad = Int32Scalar("pad_same", 1) s_x = Int32Scalar("stride_x", 2) s_y = Int32Scalar("stride_y", 2) i2 = Output("op", "TENSOR_FLOAT32", "{1, 8, 8, 1}") model = model.Operation("TRANSPOSE_CONV_EX", i0, weights, i1, pad, s_x, s_y).To(i2) # Example 1. Input in operand 0, input0 = {i0: # output shape [1, 8, 8, 1], i1: # input 0 input_table} output0 = {i2: # output 0 out_table} # Instantiate an example Example((input0, output0))
batch = 1 in_chans = 1 out_chans = 1 in_rows = 4 in_cols = 4 out_rows = 8 out_cols = 8 ker_rows = 3 ker_cols = 3 stride = 2 input_table = [x for x in range(batch * in_rows * in_cols * in_chans)] kernel_table = [x for x in range(out_chans * ker_rows * ker_cols * in_chans)] out_table = [0 for x in range(batch * out_rows * out_cols * out_chans)] for i in range(batch): for j in range(in_rows): for k in range(in_cols): for l in range(in_chans): out_row_origin = j * stride out_col_origin = k * stride input_value = input_table[((i * in_rows + j) * in_cols + k) * in_chans + l] for m in range(ker_rows): for n in range(ker_cols): for o in range(out_chans): out_row = out_row_origin + m out_col = out_col_origin + n if out_row < out_rows and out_col < out_cols and (out_row >= 0) and (out_col >= 0): kernel_value = kernel_table[((o * ker_rows + m) * ker_cols + n) * in_chans + l] out_table[((i * out_rows + out_row) * out_cols + out_col) * out_chans + o] += input_value * kernel_value model = model() i0 = input('op_shape', 'TENSOR_INT32', '{4}') weights = parameter('ker', 'TENSOR_FLOAT32', '{1, 3, 3, 1}', kernel_table) i1 = input('in', 'TENSOR_FLOAT32', '{1, 4, 4, 1}') pad = int32_scalar('pad_same', 1) s_x = int32_scalar('stride_x', 2) s_y = int32_scalar('stride_y', 2) i2 = output('op', 'TENSOR_FLOAT32', '{1, 8, 8, 1}') model = model.Operation('TRANSPOSE_CONV_EX', i0, weights, i1, pad, s_x, s_y).To(i2) input0 = {i0: [1, 8, 8, 1], i1: input_table} output0 = {i2: out_table} example((input0, output0))
def main(): # input css = [[*map(int, input().split())] for _ in range(3)] # compute for i in range(3): if css[i-1][i-1]+css[i][i] != css[i-1][i]+css[i][i-1]: print('No') exit() # output print('Yes') if __name__ == '__main__': main()
def main(): css = [[*map(int, input().split())] for _ in range(3)] for i in range(3): if css[i - 1][i - 1] + css[i][i] != css[i - 1][i] + css[i][i - 1]: print('No') exit() print('Yes') if __name__ == '__main__': main()
''' This is a math Module Do Some thing ''' def add(a=0, b=0): return a + b; def minus(a=0, b=0): return a - b; def multy(a=1, b=1): return a * b;
""" This is a math Module Do Some thing """ def add(a=0, b=0): return a + b def minus(a=0, b=0): return a - b def multy(a=1, b=1): return a * b
# Time: O(n) # Space: O(1) class Solution(object): def maxDepthAfterSplit(self, seq): """ :type seq: str :rtype: List[int] """ return [(i & 1) ^ (seq[i] == '(') for i, c in enumerate(seq)] # Time: O(n) # Space: O(1) class Solution2(object): def maxDepthAfterSplit(self, seq): """ :type seq: str :rtype: List[int] """ A, B = 0, 0 result = [0]*len(seq) for i, c in enumerate(seq): point = 1 if c == '(' else -1 if (point == 1 and A <= B) or \ (point == -1 and A >= B): A += point else: B += point result[i] = 1 return result
class Solution(object): def max_depth_after_split(self, seq): """ :type seq: str :rtype: List[int] """ return [i & 1 ^ (seq[i] == '(') for (i, c) in enumerate(seq)] class Solution2(object): def max_depth_after_split(self, seq): """ :type seq: str :rtype: List[int] """ (a, b) = (0, 0) result = [0] * len(seq) for (i, c) in enumerate(seq): point = 1 if c == '(' else -1 if point == 1 and A <= B or (point == -1 and A >= B): a += point else: b += point result[i] = 1 return result
class MyClass: data = 3 a = MyClass() b = MyClass() a.data = 5 print(a.data) print(b.data)
class Myclass: data = 3 a = my_class() b = my_class() a.data = 5 print(a.data) print(b.data)
class Solution: def findLHS(self, nums) -> int: nums.sort() pre_num, pre_length = -1, 0 cur_num, cur_length = -1, 0 i = 0 max_length = 0 while i < len(nums): if nums[i] == cur_num: cur_length += 1 else: if cur_num == pre_num + 1: max_length = max(max_length, cur_length + pre_length) pre_num = cur_num pre_length = cur_length cur_num = nums[i] cur_length = 1 i += 1 if cur_num == pre_num + 1: max_length = max(max_length, cur_length + pre_length) return max_length slu = Solution() print(slu.findLHS([1, 1, 1, 1, 2]))
class Solution: def find_lhs(self, nums) -> int: nums.sort() (pre_num, pre_length) = (-1, 0) (cur_num, cur_length) = (-1, 0) i = 0 max_length = 0 while i < len(nums): if nums[i] == cur_num: cur_length += 1 else: if cur_num == pre_num + 1: max_length = max(max_length, cur_length + pre_length) pre_num = cur_num pre_length = cur_length cur_num = nums[i] cur_length = 1 i += 1 if cur_num == pre_num + 1: max_length = max(max_length, cur_length + pre_length) return max_length slu = solution() print(slu.findLHS([1, 1, 1, 1, 2]))
def validate_count(d): print(len([0 for e in d if((c:=e[2].count(e[1]))>e[0][0])and(c<e[0][1])])) def validate_position(d): print(len([0 for e in d if(e[2][e[0][0]-1]==e[1])^(e[2][e[0][1]-1]==e[1])])) if __name__ == "__main__": with open('2020/input/day02.txt') as f: database = [[[*map(int, (e := entry.split(' '))[0].split('-'))], e[1][0], e[2].replace('\n', '')] for entry in f.readlines()] validate_count(database) # 410 validate_position(database) # 694
def validate_count(d): print(len([0 for e in d if (c := e[2].count(e[1])) > e[0][0] and c < e[0][1]])) def validate_position(d): print(len([0 for e in d if (e[2][e[0][0] - 1] == e[1]) ^ (e[2][e[0][1] - 1] == e[1])])) if __name__ == '__main__': with open('2020/input/day02.txt') as f: database = [[[*map(int, (e := entry.split(' '))[0].split('-'))], e[1][0], e[2].replace('\n', '')] for entry in f.readlines()] validate_count(database) validate_position(database)
# Copyright 2017 Brocade Communications Systems, Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may also obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. class pyfos_type(): type_na = 0 type_int = 1 type_wwn = 2 type_str = 3 type_bool = 4 type_ip_addr = 5 type_ipv6_addr = 6 type_zoning_name = 7 type_domain_port = 8 def __init__(self, pyfos_type): self.pyfos_type = pyfos_type def get_type(self): return self.pyfos_type def vaildate_set(self, value): return True def __validate_peek_help(self, cur_type, value): if value is None: return True, None elif cur_type == pyfos_type.type_int: cur_value = int(value) if isinstance(cur_value, int): return True, cur_value elif cur_type == pyfos_type.type_wwn: cur_value = str(value) if isinstance(cur_value, str): return True, cur_value elif cur_type == pyfos_type.type_wwn: cur_value = str(value) if isinstance(cur_value, str): return True, cur_value elif cur_type == pyfos_type.type_str: cur_value = str(value) if isinstance(cur_value, str): return True, cur_value elif cur_type == pyfos_type.type_bool: cur_value = bool(value) if isinstance(cur_value, bool): return True, cur_value elif cur_type == pyfos_type.type_ip_addr: cur_value = str(value) if isinstance(cur_value, str): return True, cur_value elif cur_type == pyfos_type.type_zoning_name: cur_value = str(value) if isinstance(cur_value, str): return True, cur_value elif cur_type == pyfos_type.type_domain_port: cur_value = str(value) if isinstance(cur_value, str): return True, cur_value if cur_type == pyfos_type.type_na: return True, value else: return False, None def validate_peek(self, value): if isinstance(value, list): # if the list is empty, just return if not list: return True, value # otherwise, walk through element # and see if they are of the type # expected ret_list = [] for cur_value in value: correct_type, cast_value = self.__validate_peek_help( self.pyfos_type, cur_value) if correct_type is True: ret_list.append(cast_value) else: print("invalid type", value, cur_value, self.pyfos_type) return True, ret_list else: return self.__validate_peek_help(self.pyfos_type, value)
class Pyfos_Type: type_na = 0 type_int = 1 type_wwn = 2 type_str = 3 type_bool = 4 type_ip_addr = 5 type_ipv6_addr = 6 type_zoning_name = 7 type_domain_port = 8 def __init__(self, pyfos_type): self.pyfos_type = pyfos_type def get_type(self): return self.pyfos_type def vaildate_set(self, value): return True def __validate_peek_help(self, cur_type, value): if value is None: return (True, None) elif cur_type == pyfos_type.type_int: cur_value = int(value) if isinstance(cur_value, int): return (True, cur_value) elif cur_type == pyfos_type.type_wwn: cur_value = str(value) if isinstance(cur_value, str): return (True, cur_value) elif cur_type == pyfos_type.type_wwn: cur_value = str(value) if isinstance(cur_value, str): return (True, cur_value) elif cur_type == pyfos_type.type_str: cur_value = str(value) if isinstance(cur_value, str): return (True, cur_value) elif cur_type == pyfos_type.type_bool: cur_value = bool(value) if isinstance(cur_value, bool): return (True, cur_value) elif cur_type == pyfos_type.type_ip_addr: cur_value = str(value) if isinstance(cur_value, str): return (True, cur_value) elif cur_type == pyfos_type.type_zoning_name: cur_value = str(value) if isinstance(cur_value, str): return (True, cur_value) elif cur_type == pyfos_type.type_domain_port: cur_value = str(value) if isinstance(cur_value, str): return (True, cur_value) if cur_type == pyfos_type.type_na: return (True, value) else: return (False, None) def validate_peek(self, value): if isinstance(value, list): if not list: return (True, value) ret_list = [] for cur_value in value: (correct_type, cast_value) = self.__validate_peek_help(self.pyfos_type, cur_value) if correct_type is True: ret_list.append(cast_value) else: print('invalid type', value, cur_value, self.pyfos_type) return (True, ret_list) else: return self.__validate_peek_help(self.pyfos_type, value)
{ "includes": [ "../common.gypi" ], "targets": [ { "configurations": { "Release": { "defines": [ "NDEBUG" ] } }, "include_dirs": [ "apr-iconv/include" ], "sources": [ "dependencies/apr-iconv/lib/iconv.c", "dependencies/apr-iconv/lib/iconv_ces.c", "dependencies/apr-iconv/lib/iconv_ces_euc.c", "dependencies/apr-iconv/lib/iconv_ces_iso2022.c", "dependencies/apr-iconv/lib/iconv_int.c", "dependencies/apr-iconv/lib/iconv_module.c", "dependencies/apr-iconv/lib/iconv_uc.c" ], "target_name": "apr-iconv", } ] }
{'includes': ['../common.gypi'], 'targets': [{'configurations': {'Release': {'defines': ['NDEBUG']}}, 'include_dirs': ['apr-iconv/include'], 'sources': ['dependencies/apr-iconv/lib/iconv.c', 'dependencies/apr-iconv/lib/iconv_ces.c', 'dependencies/apr-iconv/lib/iconv_ces_euc.c', 'dependencies/apr-iconv/lib/iconv_ces_iso2022.c', 'dependencies/apr-iconv/lib/iconv_int.c', 'dependencies/apr-iconv/lib/iconv_module.c', 'dependencies/apr-iconv/lib/iconv_uc.c'], 'target_name': 'apr-iconv'}]}
def flatten_forest(forest): flat_forest = [] for row in forest: flat_forest += row return flat_forest def deflatten_forest(forest_1d, rows): cols = len(forest_1d) // rows forest_2d = [] for i in range(cols): forest_slice = forest_1d[i*cols: (i+1)*cols] forest_2d.append(forest_slice) return forest_2d
def flatten_forest(forest): flat_forest = [] for row in forest: flat_forest += row return flat_forest def deflatten_forest(forest_1d, rows): cols = len(forest_1d) // rows forest_2d = [] for i in range(cols): forest_slice = forest_1d[i * cols:(i + 1) * cols] forest_2d.append(forest_slice) return forest_2d
#!/usr/bin/python # -*- encoding: utf-8; py-indent-offset: 4 -*- # +------------------------------------------------------------------+ # | ____ _ _ __ __ _ __ | # | / ___| |__ ___ ___| | __ | \/ | |/ / | # | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / | # | | |___| | | | __/ (__| < | | | | . \ | # | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ | # | | # | Copyright Mathias Kettner 2014 mk@mathias-kettner.de | # +------------------------------------------------------------------+ # # This file is part of Check_MK. # The official homepage is at http://mathias-kettner.de/check_mk. # # check_mk is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation in version 2. check_mk is distributed # in the hope that it will be useful, but WITHOUT ANY WARRANTY; with- # out even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. See the GNU General Public License for more de- # tails. You should have received a copy of the GNU General Public # License along with GNU Make; see the file COPYING. If not, write # to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, # Boston, MA 02110-1301 USA. # Temporary variable which stores settings during the backup process backup_perfdata_enabled = True def performancedata_restore(pre_restore = True): global backup_perfdata_enabled site = config.default_site() html.live.set_only_sites([site]) if pre_restore: data = html.live.query("GET status\nColumns: process_performance_data") if data: backup_perfdata_enabled = data[0][0] == 1 else: backup_perfdata_enabled = None # Core is offline # Return if perfdata is not activated - nothing to do.. if not backup_perfdata_enabled: # False or None return [] command = pre_restore and "DISABLE_PERFORMANCE_DATA" or "ENABLE_PERFORMANCE_DATA" html.live.command("[%d] %s" % (int(time.time()), command), site) html.live.set_only_sites() return [] if not defaults.omd_root: backup_domains.update( { "noomd-config": { "group" : _("Configuration"), "title" : _("WATO Configuration"), "prefix" : defaults.default_config_dir, "paths" : [ ("dir", "conf.d/wato"), ("dir", "multisite.d/wato"), ("file", "multisite.d/sites.mk") ], "default" : True, }, "noomd-personalsettings": { "title" : _("Personal User Settings and Custom Views"), "prefix" : defaults.var_dir, "paths" : [ ("dir", "web") ], "default" : True }, "noomd-authorization": { "group" : _("Configuration"), "title" : _("Local Authentication Data"), "prefix" : os.path.dirname(defaults.htpasswd_file), "paths" : [ ("file", "htpasswd"), ("file", "auth.secret"), ("file", "auth.serials") ], "cleanup" : False, "default" : True }}) else: backup_domains.update({ "check_mk": { "group" : _("Configuration"), "title" : _("Hosts, Services, Groups, Timeperiods, Business Intelligence and Monitoring Configuration"), "prefix" : defaults.default_config_dir, "paths" : [ ("file", "liveproxyd.mk"), ("file", "main.mk"), ("file", "final.mk"), ("file", "local.mk"), ("file", "mkeventd.mk"), ("dir", "conf.d"), ("dir", "multisite.d"), ("dir", "mkeventd.d"), ("dir", "mknotifyd.d"), ], "default" : True, }, "authorization": { # This domain is obsolete # It no longer shows up in the backup screen "deprecated" : True, "group" : _("Configuration"), "title" : _("Local Authentication Data"), "prefix" : os.path.dirname(defaults.htpasswd_file), "paths" : [ ("file", "htpasswd"), ("file", "auth.secret"), ("file", "auth.serials") ], "cleanup" : False, "default" : True, }, "authorization_v1": { "group" : _("Configuration"), "title" : _("Local Authentication Data"), "prefix" : defaults.omd_root, "paths" : [ ("file", "etc/htpasswd"), ("file", "etc/auth.secret"), ("file", "etc/auth.serials"), ("file", "var/check_mk/web/*/serial.mk") ], "cleanup" : False, "default" : True }, "personalsettings": { "title" : _("Personal User Settings and Custom Views"), "prefix" : defaults.var_dir, "paths" : [ ("dir", "web") ], "exclude" : [ "*/serial.mk" ], "cleanup" : False, }, "autochecks": { "group" : _("Configuration"), "title" : _("Automatically Detected Services"), "prefix" : defaults.autochecksdir, "paths" : [ ("dir", "") ], }, "snmpwalks": { "title" : _("Stored SNMP Walks"), "prefix" : defaults.snmpwalks_dir, "paths" : [ ("dir", "") ], }, "logwatch": { "group" : _("Historic Data"), "title" : _("Logwatch Data"), "prefix" : defaults.var_dir, "paths" : [ ("dir", "logwatch"), ], }, "mkeventstatus": { "group" : _("Configuration"), "title" : _("Event Console Configuration"), "prefix" : defaults.omd_root, "paths" : [ ("dir", "etc/check_mk/mkeventd.d"), ], "default" : True }, "mkeventhistory": { "group" : _("Historic Data"), "title" : _("Event Console Archive and Current State"), "prefix" : defaults.omd_root, "paths" : [ ("dir", "var/mkeventd/history"), ("file", "var/mkeventd/status"), ("file", "var/mkeventd/messages"), ("dir", "var/mkeventd/messages-history"), ], }, "corehistory": { "group" : _("Historic Data"), "title" : _("Monitoring History"), "prefix" : defaults.omd_root, "paths" : [ ("dir", "var/nagios/archive"), ("file", "var/nagios/nagios.log"), ("dir", "var/icinga/archive"), ("file", "var/icinga/icinga.log"), ("dir", "var/check_mk/core/archive"), ("file", "var/check_mk/core/history"), ], }, "performancedata": { "group" : _("Historic Data"), "title" : _("Performance Data"), "prefix" : defaults.omd_root, "paths" : [ ("dir", "var/pnp4nagios/perfdata"), ("dir", "var/rrdcached"), ("dir", "var/check_mk/rrd"), ], "pre_restore" : lambda: performancedata_restore(pre_restore = True), "post_restore" : lambda: performancedata_restore(pre_restore = False), "checksum" : False, }, "applicationlogs": { "group" : _("Historic Data"), "title" : _("Application Logs"), "prefix" : defaults.omd_root, "paths" : [ ("dir", "var/log"), ("file", "var/nagios/livestatus.log"), ("dir", "var/pnp4nagios/log"), ], "checksum" : False, }, "snmpmibs": { "group" : _("Configuration"), "title" : _("SNMP MIBs"), "prefix" : defaults.omd_root, "paths" : [ ("dir", "local/share/check_mk/mibs"), ], }, "extensions" : { "title" : _("Extensions in <tt>~/local/</tt> and MKPs"), "prefix" : defaults.omd_root, "paths" : [ ("dir", "var/check_mk/packages" ), ("dir", "local" ), ], "default" : True, }, "dokuwiki": { "title" : _("Doku Wiki Pages and Settings"), "prefix" : defaults.omd_root, "paths" : [ ("dir", "var/dokuwiki"), ], }, "nagvis": { "title" : _("NagVis Maps, Configurations and User Files"), "prefix" : defaults.omd_root, "exclude" : [ "etc/nagvis/apache.conf", "etc/nagvis/conf.d/authorisation.ini.php", "etc/nagvis/conf.d/omd.ini.php", "etc/nagvis/conf.d/cookie_auth.ini.php", "etc/nagvis/conf.d/urls.ini.php" ], "paths" : [ ("dir", "local/share/nagvis"), ("dir", "etc/nagvis"), ("dir", "var/nagvis"), ], }, })
backup_perfdata_enabled = True def performancedata_restore(pre_restore=True): global backup_perfdata_enabled site = config.default_site() html.live.set_only_sites([site]) if pre_restore: data = html.live.query('GET status\nColumns: process_performance_data') if data: backup_perfdata_enabled = data[0][0] == 1 else: backup_perfdata_enabled = None if not backup_perfdata_enabled: return [] command = pre_restore and 'DISABLE_PERFORMANCE_DATA' or 'ENABLE_PERFORMANCE_DATA' html.live.command('[%d] %s' % (int(time.time()), command), site) html.live.set_only_sites() return [] if not defaults.omd_root: backup_domains.update({'noomd-config': {'group': _('Configuration'), 'title': _('WATO Configuration'), 'prefix': defaults.default_config_dir, 'paths': [('dir', 'conf.d/wato'), ('dir', 'multisite.d/wato'), ('file', 'multisite.d/sites.mk')], 'default': True}, 'noomd-personalsettings': {'title': _('Personal User Settings and Custom Views'), 'prefix': defaults.var_dir, 'paths': [('dir', 'web')], 'default': True}, 'noomd-authorization': {'group': _('Configuration'), 'title': _('Local Authentication Data'), 'prefix': os.path.dirname(defaults.htpasswd_file), 'paths': [('file', 'htpasswd'), ('file', 'auth.secret'), ('file', 'auth.serials')], 'cleanup': False, 'default': True}}) else: backup_domains.update({'check_mk': {'group': _('Configuration'), 'title': _('Hosts, Services, Groups, Timeperiods, Business Intelligence and Monitoring Configuration'), 'prefix': defaults.default_config_dir, 'paths': [('file', 'liveproxyd.mk'), ('file', 'main.mk'), ('file', 'final.mk'), ('file', 'local.mk'), ('file', 'mkeventd.mk'), ('dir', 'conf.d'), ('dir', 'multisite.d'), ('dir', 'mkeventd.d'), ('dir', 'mknotifyd.d')], 'default': True}, 'authorization': {'deprecated': True, 'group': _('Configuration'), 'title': _('Local Authentication Data'), 'prefix': os.path.dirname(defaults.htpasswd_file), 'paths': [('file', 'htpasswd'), ('file', 'auth.secret'), ('file', 'auth.serials')], 'cleanup': False, 'default': True}, 'authorization_v1': {'group': _('Configuration'), 'title': _('Local Authentication Data'), 'prefix': defaults.omd_root, 'paths': [('file', 'etc/htpasswd'), ('file', 'etc/auth.secret'), ('file', 'etc/auth.serials'), ('file', 'var/check_mk/web/*/serial.mk')], 'cleanup': False, 'default': True}, 'personalsettings': {'title': _('Personal User Settings and Custom Views'), 'prefix': defaults.var_dir, 'paths': [('dir', 'web')], 'exclude': ['*/serial.mk'], 'cleanup': False}, 'autochecks': {'group': _('Configuration'), 'title': _('Automatically Detected Services'), 'prefix': defaults.autochecksdir, 'paths': [('dir', '')]}, 'snmpwalks': {'title': _('Stored SNMP Walks'), 'prefix': defaults.snmpwalks_dir, 'paths': [('dir', '')]}, 'logwatch': {'group': _('Historic Data'), 'title': _('Logwatch Data'), 'prefix': defaults.var_dir, 'paths': [('dir', 'logwatch')]}, 'mkeventstatus': {'group': _('Configuration'), 'title': _('Event Console Configuration'), 'prefix': defaults.omd_root, 'paths': [('dir', 'etc/check_mk/mkeventd.d')], 'default': True}, 'mkeventhistory': {'group': _('Historic Data'), 'title': _('Event Console Archive and Current State'), 'prefix': defaults.omd_root, 'paths': [('dir', 'var/mkeventd/history'), ('file', 'var/mkeventd/status'), ('file', 'var/mkeventd/messages'), ('dir', 'var/mkeventd/messages-history')]}, 'corehistory': {'group': _('Historic Data'), 'title': _('Monitoring History'), 'prefix': defaults.omd_root, 'paths': [('dir', 'var/nagios/archive'), ('file', 'var/nagios/nagios.log'), ('dir', 'var/icinga/archive'), ('file', 'var/icinga/icinga.log'), ('dir', 'var/check_mk/core/archive'), ('file', 'var/check_mk/core/history')]}, 'performancedata': {'group': _('Historic Data'), 'title': _('Performance Data'), 'prefix': defaults.omd_root, 'paths': [('dir', 'var/pnp4nagios/perfdata'), ('dir', 'var/rrdcached'), ('dir', 'var/check_mk/rrd')], 'pre_restore': lambda : performancedata_restore(pre_restore=True), 'post_restore': lambda : performancedata_restore(pre_restore=False), 'checksum': False}, 'applicationlogs': {'group': _('Historic Data'), 'title': _('Application Logs'), 'prefix': defaults.omd_root, 'paths': [('dir', 'var/log'), ('file', 'var/nagios/livestatus.log'), ('dir', 'var/pnp4nagios/log')], 'checksum': False}, 'snmpmibs': {'group': _('Configuration'), 'title': _('SNMP MIBs'), 'prefix': defaults.omd_root, 'paths': [('dir', 'local/share/check_mk/mibs')]}, 'extensions': {'title': _('Extensions in <tt>~/local/</tt> and MKPs'), 'prefix': defaults.omd_root, 'paths': [('dir', 'var/check_mk/packages'), ('dir', 'local')], 'default': True}, 'dokuwiki': {'title': _('Doku Wiki Pages and Settings'), 'prefix': defaults.omd_root, 'paths': [('dir', 'var/dokuwiki')]}, 'nagvis': {'title': _('NagVis Maps, Configurations and User Files'), 'prefix': defaults.omd_root, 'exclude': ['etc/nagvis/apache.conf', 'etc/nagvis/conf.d/authorisation.ini.php', 'etc/nagvis/conf.d/omd.ini.php', 'etc/nagvis/conf.d/cookie_auth.ini.php', 'etc/nagvis/conf.d/urls.ini.php'], 'paths': [('dir', 'local/share/nagvis'), ('dir', 'etc/nagvis'), ('dir', 'var/nagvis')]}})
def create_mine_field(n, m, mines): mine_field = [ [0 for _ in range(m) ] for _ in range(n) ] for mine in mines: x, y = mine mine_field[x-1][y-1] = '*' return mine_field def neighbours(i, j, m): nearest = [m[x][y] for x in [i-1, i, i+1] for y in [j-1, j, j+1] if x in range(0, len(m)) and y in range(0, len(m[x])) and (x, y) != (i, j)] nearest_count = nearest.count('*') return nearest_count def check_field(mine_field, n, m): for x in range(n): for y in range(m): if mine_field[x][y] == '*': continue else: mine_field[x][y] = neighbours(i=x, j=y, m=mine_field) with open('input.txt') as file: lines = file.readlines() n, m, k = list(map(int, lines[0].split())) mines = [] for line in lines[1::]: mines.append(list(map(int, line.split()))) mine_field = create_mine_field(n, m, mines) check_field(mine_field, n, m) with open('output.txt', 'w') as file: rows = [] for row in mine_field: line = f"{' '.join([str(item) for item in row])}\n" rows.append(line) file.writelines(rows)
def create_mine_field(n, m, mines): mine_field = [[0 for _ in range(m)] for _ in range(n)] for mine in mines: (x, y) = mine mine_field[x - 1][y - 1] = '*' return mine_field def neighbours(i, j, m): nearest = [m[x][y] for x in [i - 1, i, i + 1] for y in [j - 1, j, j + 1] if x in range(0, len(m)) and y in range(0, len(m[x])) and ((x, y) != (i, j))] nearest_count = nearest.count('*') return nearest_count def check_field(mine_field, n, m): for x in range(n): for y in range(m): if mine_field[x][y] == '*': continue else: mine_field[x][y] = neighbours(i=x, j=y, m=mine_field) with open('input.txt') as file: lines = file.readlines() (n, m, k) = list(map(int, lines[0].split())) mines = [] for line in lines[1:]: mines.append(list(map(int, line.split()))) mine_field = create_mine_field(n, m, mines) check_field(mine_field, n, m) with open('output.txt', 'w') as file: rows = [] for row in mine_field: line = f"{' '.join([str(item) for item in row])}\n" rows.append(line) file.writelines(rows)
line = '-'*39 blank = '|' + ' '*37 + '|' print(line) print(blank) print(blank) print(blank) print(blank) print(blank) print(line)
line = '-' * 39 blank = '|' + ' ' * 37 + '|' print(line) print(blank) print(blank) print(blank) print(blank) print(blank) print(line)
"""Iteration utilities""" class Batch: """Yields batches (groups) from an iterable Modified from: http://codereview.stackexchange.com/questions/118883/split-up-an-iterable-into-batches Args: iterable (iterable) any iterable limit (int) How many items to include per group """ def __init__(self, iterable, limit=None): self.iterator = iter(iterable) self.limit = limit try: self.current = next(self.iterator) except StopIteration: self.on_going = False else: self.on_going = True def group(self): """Yield a group from the iterable""" yield self.current # start enumerate at 1 because we already yielded the last saved item for num, item in enumerate(self.iterator, 1): self.current = item if num == self.limit: break yield item else: self.on_going = False def __iter__(self): """Implementation of __iter__ to allow a standard interface: for group in Batch(iterable, 10): do_stuff(group) """ while self.on_going: yield self.group()
"""Iteration utilities""" class Batch: """Yields batches (groups) from an iterable Modified from: http://codereview.stackexchange.com/questions/118883/split-up-an-iterable-into-batches Args: iterable (iterable) any iterable limit (int) How many items to include per group """ def __init__(self, iterable, limit=None): self.iterator = iter(iterable) self.limit = limit try: self.current = next(self.iterator) except StopIteration: self.on_going = False else: self.on_going = True def group(self): """Yield a group from the iterable""" yield self.current for (num, item) in enumerate(self.iterator, 1): self.current = item if num == self.limit: break yield item else: self.on_going = False def __iter__(self): """Implementation of __iter__ to allow a standard interface: for group in Batch(iterable, 10): do_stuff(group) """ while self.on_going: yield self.group()
""" Created on February 17 2021 @author: Andreas Spanopoulos Contains custom Exceptions classes that can be used for debugging purposes. """ class GameIsNotOverError(Exception): """ Custom exception raised when the outcome of a game that has not yet finished is queried """ def __init__(self, *args): """ constructor """ self.fen = args[0] if args else None def __str__(self): """ print when raised outside try block """ message = 'The game has not reached a terminal state.' if self.fen: message += f' The current FEN position is: {self.fen}' return message class InvalidConfigurationError(Exception): """ Custom Error raised when a configuration file is invalid """ def __init__(self, **kwargs): """ constructor """ super(InvalidConfigurationError, self).__init__() self.message = '' + kwargs.get('msg', '') def __str__(self): """ return string representation of the error """ return self.message class InvalidArchitectureError(Exception): """ Custom Error raised when the architecture of a Network is invalid """ def __init__(self, **kwargs): """ constructor """ super(InvalidArchitectureError, self).__init__() self.message = '' + kwargs.get('msg', '') def __str__(self): """ return string representation of the error """ return self.message
""" Created on February 17 2021 @author: Andreas Spanopoulos Contains custom Exceptions classes that can be used for debugging purposes. """ class Gameisnotovererror(Exception): """ Custom exception raised when the outcome of a game that has not yet finished is queried """ def __init__(self, *args): """ constructor """ self.fen = args[0] if args else None def __str__(self): """ print when raised outside try block """ message = 'The game has not reached a terminal state.' if self.fen: message += f' The current FEN position is: {self.fen}' return message class Invalidconfigurationerror(Exception): """ Custom Error raised when a configuration file is invalid """ def __init__(self, **kwargs): """ constructor """ super(InvalidConfigurationError, self).__init__() self.message = '' + kwargs.get('msg', '') def __str__(self): """ return string representation of the error """ return self.message class Invalidarchitectureerror(Exception): """ Custom Error raised when the architecture of a Network is invalid """ def __init__(self, **kwargs): """ constructor """ super(InvalidArchitectureError, self).__init__() self.message = '' + kwargs.get('msg', '') def __str__(self): """ return string representation of the error """ return self.message
class Solution: def findMedianSortedArrays(self, nums1: List[int], nums2: List[int]) -> float: if len(a:=nums1) > len(b:=nums2): a, b = b, a n = len(a) m = len(b) median, i, j = 0, 0, 0 min_index = 0 max_index = n while (min_index <= max_index) : i = int((min_index + max_index) / 2) j = int(((n + m + 1) / 2) - i) if (i < n and j > 0 and b[j - 1] > a[i]) : min_index = i + 1 elif (i > 0 and j < m and b[j] < a[i - 1]) : max_index = i - 1 else : if (i == 0) : median = b[j - 1] elif (j == 0) : median = a[i - 1] else : median = maximum(a[i - 1], b[j - 1]) break if ((n + m) % 2 == 1) : return median if (i == n) : return ((median + b[j]) / 2.0) if (j == m) : return ((median + a[i]) / 2.0) return ((median + minimum(a[i], b[j])) / 2.0) def maximum(a, b) : return a if a > b else b def minimum(a, b) : return a if a < b else b
class Solution: def find_median_sorted_arrays(self, nums1: List[int], nums2: List[int]) -> float: if len((a := nums1)) > len((b := nums2)): (a, b) = (b, a) n = len(a) m = len(b) (median, i, j) = (0, 0, 0) min_index = 0 max_index = n while min_index <= max_index: i = int((min_index + max_index) / 2) j = int((n + m + 1) / 2 - i) if i < n and j > 0 and (b[j - 1] > a[i]): min_index = i + 1 elif i > 0 and j < m and (b[j] < a[i - 1]): max_index = i - 1 else: if i == 0: median = b[j - 1] elif j == 0: median = a[i - 1] else: median = maximum(a[i - 1], b[j - 1]) break if (n + m) % 2 == 1: return median if i == n: return (median + b[j]) / 2.0 if j == m: return (median + a[i]) / 2.0 return (median + minimum(a[i], b[j])) / 2.0 def maximum(a, b): return a if a > b else b def minimum(a, b): return a if a < b else b
''' Python function to check whether a number is divisible by another number. Accept two integers values form the user. ''' def multiple(m, n): return True if m % n == 0 else False print(multiple(20, 5)) print(multiple(7, 2))
""" Python function to check whether a number is divisible by another number. Accept two integers values form the user. """ def multiple(m, n): return True if m % n == 0 else False print(multiple(20, 5)) print(multiple(7, 2))
# # PySNMP MIB module MISSION-CRITICAL-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/MISSION-CRITICAL-MIB # Produced by pysmi-0.3.4 at Wed May 1 14:12:55 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") SingleValueConstraint, ConstraintsUnion, ValueSizeConstraint, ConstraintsIntersection, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ValueSizeConstraint", "ConstraintsIntersection", "ValueRangeConstraint") NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance") NotificationType, TimeTicks, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, MibIdentifier, Bits, NotificationType, enterprises, Gauge32, Counter32, Unsigned32, IpAddress, Integer32, ModuleIdentity, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "NotificationType", "TimeTicks", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "MibIdentifier", "Bits", "NotificationType", "enterprises", "Gauge32", "Counter32", "Unsigned32", "IpAddress", "Integer32", "ModuleIdentity", "ObjectIdentity") TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString") missionCritical = MibIdentifier((1, 3, 6, 1, 4, 1, 2349)) mcsCompanyInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 2349, 1)) mcsSoftware = MibIdentifier((1, 3, 6, 1, 4, 1, 2349, 2)) eemProductInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 2349, 2, 1)) omProductInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 2349, 2, 2)) ownershipDetails = MibScalar((1, 3, 6, 1, 4, 1, 2349, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: ownershipDetails.setStatus('mandatory') if mibBuilder.loadTexts: ownershipDetails.setDescription('Details of the company providing this MIB') contactDetails = MibScalar((1, 3, 6, 1, 4, 1, 2349, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: contactDetails.setStatus('mandatory') if mibBuilder.loadTexts: contactDetails.setDescription('Contact responsible for maintaining this MIB') eemService = MibIdentifier((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1)) version = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 16))).setMaxAccess("readonly") if mibBuilder.loadTexts: version.setStatus('mandatory') if mibBuilder.loadTexts: version.setDescription('The version of the EEM Agent running') primaryServer = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 16))).setMaxAccess("readonly") if mibBuilder.loadTexts: primaryServer.setStatus('mandatory') if mibBuilder.loadTexts: primaryServer.setDescription('The Primary Server for this EEM Agent') serviceState = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: serviceState.setStatus('mandatory') if mibBuilder.loadTexts: serviceState.setDescription('State of the service. Running is 1, stopped is 2') serviceUpTime = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1, 4), TimeTicks()).setMaxAccess("readonly") if mibBuilder.loadTexts: serviceUpTime.setStatus('mandatory') if mibBuilder.loadTexts: serviceUpTime.setDescription('No. of milliseconds since the service was started') redTrapCount = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1, 5), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: redTrapCount.setStatus('deprecated') if mibBuilder.loadTexts: redTrapCount.setDescription('The number of red alert traps sent since the service was started') orangeTrapCount = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1, 6), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: orangeTrapCount.setStatus('deprecated') if mibBuilder.loadTexts: orangeTrapCount.setDescription('The number of orange alert traps sent since the service was started') amberTrapCount = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1, 7), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: amberTrapCount.setStatus('deprecated') if mibBuilder.loadTexts: amberTrapCount.setDescription('The number of yellow alert traps sent since the service was started') blueTrapCount = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1, 8), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: blueTrapCount.setStatus('deprecated') if mibBuilder.loadTexts: blueTrapCount.setDescription('The number of blue alert traps sent since the service was started') greenTrapCount = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1, 9), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: greenTrapCount.setStatus('deprecated') if mibBuilder.loadTexts: greenTrapCount.setDescription('The number of Green Alert Traps since the service was started') eemLastTrap = MibIdentifier((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2)) trapTime = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 1), TimeTicks()).setMaxAccess("readonly") if mibBuilder.loadTexts: trapTime.setStatus('deprecated') if mibBuilder.loadTexts: trapTime.setDescription('Time of the last trap sent') alertLevel = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("red", 1), ("orange", 2), ("yellow", 3), ("blue", 4), ("green", 5)))).setMaxAccess("readonly") if mibBuilder.loadTexts: alertLevel.setStatus('mandatory') if mibBuilder.loadTexts: alertLevel.setDescription('Alert level of the last trap sent. red=1, orange=2, yellow=3, blue=4, green=5') logType = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 99))).clone(namedValues=NamedValues(("ntevent", 1), ("application", 2), ("snmp", 3), ("wbem", 4), ("activemonitoring", 5), ("performancemonitoring", 6), ("timedevent", 7), ("eem", 99)))).setMaxAccess("readonly") if mibBuilder.loadTexts: logType.setStatus('mandatory') if mibBuilder.loadTexts: logType.setDescription('Log type generating the last trap sent. system=1,application=2,security=3 (fill in others here) EEM=99') server = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: server.setStatus('mandatory') if mibBuilder.loadTexts: server.setDescription('Server generating the last trap sent') source = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: source.setStatus('mandatory') if mibBuilder.loadTexts: source.setDescription('Source generating the last trap sent') user = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: user.setStatus('mandatory') if mibBuilder.loadTexts: user.setDescription('User generating the last trap sent') eventID = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 7), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: eventID.setStatus('mandatory') if mibBuilder.loadTexts: eventID.setDescription('Event ID of the last trap sent') description = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 1024))).setMaxAccess("readonly") if mibBuilder.loadTexts: description.setStatus('mandatory') if mibBuilder.loadTexts: description.setDescription('Text description of the last trap sent') genericTrapNumber = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 9), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: genericTrapNumber.setStatus('mandatory') if mibBuilder.loadTexts: genericTrapNumber.setDescription('The generic trap number of the last trap sent') specificTrapNumber = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 10), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: specificTrapNumber.setStatus('mandatory') if mibBuilder.loadTexts: specificTrapNumber.setDescription('The user specific trap number of the last trap sent') serviceGoingDown = NotificationType((1, 3, 6, 1, 4, 1, 2349, 2, 1) + (0,2)) if mibBuilder.loadTexts: serviceGoingDown.setDescription('The SeNTry EEM Sender service is stopping.') serviceComingUp = NotificationType((1, 3, 6, 1, 4, 1, 2349, 2, 1) + (0,3)) if mibBuilder.loadTexts: serviceComingUp.setDescription('The SeNTry EEM Sender service is starting.') gathererServiceGoingDown = NotificationType((1, 3, 6, 1, 4, 1, 2349, 2, 1) + (0,4)) if mibBuilder.loadTexts: gathererServiceGoingDown.setDescription('The SeNTry EEM Gatherer service is stopping.') gathererServiceComingUp = NotificationType((1, 3, 6, 1, 4, 1, 2349, 2, 1) + (0,5)) if mibBuilder.loadTexts: gathererServiceComingUp.setDescription('The SeNTry EEM Gatherer service is starting.') eemRedAlert = NotificationType((1, 3, 6, 1, 4, 1, 2349, 2, 1) + (0,100)).setObjects(("MISSION-CRITICAL-MIB", "alertLevel"), ("MISSION-CRITICAL-MIB", "logType"), ("MISSION-CRITICAL-MIB", "server"), ("MISSION-CRITICAL-MIB", "source"), ("MISSION-CRITICAL-MIB", "user"), ("MISSION-CRITICAL-MIB", "eventID"), ("MISSION-CRITICAL-MIB", "description")) if mibBuilder.loadTexts: eemRedAlert.setDescription('A SeNTry EEM red alert has been generated.') eemOrangeAlert = NotificationType((1, 3, 6, 1, 4, 1, 2349, 2, 1) + (0,200)).setObjects(("MISSION-CRITICAL-MIB", "alertLevel"), ("MISSION-CRITICAL-MIB", "logType"), ("MISSION-CRITICAL-MIB", "server"), ("MISSION-CRITICAL-MIB", "source"), ("MISSION-CRITICAL-MIB", "user"), ("MISSION-CRITICAL-MIB", "eventID"), ("MISSION-CRITICAL-MIB", "description")) if mibBuilder.loadTexts: eemOrangeAlert.setDescription('A SeNTry EEM orange alert has been generated.') eemYellowAlert = NotificationType((1, 3, 6, 1, 4, 1, 2349, 2, 1) + (0,300)).setObjects(("MISSION-CRITICAL-MIB", "alertLevel"), ("MISSION-CRITICAL-MIB", "logType"), ("MISSION-CRITICAL-MIB", "server"), ("MISSION-CRITICAL-MIB", "source"), ("MISSION-CRITICAL-MIB", "user"), ("MISSION-CRITICAL-MIB", "eventID"), ("MISSION-CRITICAL-MIB", "description")) if mibBuilder.loadTexts: eemYellowAlert.setDescription('A SeNTry EEM yellow alert has been generated.') eemBlueAlert = NotificationType((1, 3, 6, 1, 4, 1, 2349, 2, 1) + (0,400)).setObjects(("MISSION-CRITICAL-MIB", "alertLevel"), ("MISSION-CRITICAL-MIB", "logType"), ("MISSION-CRITICAL-MIB", "server"), ("MISSION-CRITICAL-MIB", "source"), ("MISSION-CRITICAL-MIB", "user"), ("MISSION-CRITICAL-MIB", "eventID"), ("MISSION-CRITICAL-MIB", "description")) if mibBuilder.loadTexts: eemBlueAlert.setDescription('A SeNTry EEM blue alert has been generated.') eemGreenAlert = NotificationType((1, 3, 6, 1, 4, 1, 2349, 2, 1) + (0,500)).setObjects(("MISSION-CRITICAL-MIB", "alertLevel"), ("MISSION-CRITICAL-MIB", "logType"), ("MISSION-CRITICAL-MIB", "server"), ("MISSION-CRITICAL-MIB", "source"), ("MISSION-CRITICAL-MIB", "user"), ("MISSION-CRITICAL-MIB", "eventID"), ("MISSION-CRITICAL-MIB", "description")) if mibBuilder.loadTexts: eemGreenAlert.setDescription('A SeNTry EEM green alert has been generated.') omService = MibIdentifier((1, 3, 6, 1, 4, 1, 2349, 2, 2, 1)) omLastTrap = MibIdentifier((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2)) omTrapTime = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 1), TimeTicks()).setMaxAccess("readonly") if mibBuilder.loadTexts: omTrapTime.setStatus('deprecated') if mibBuilder.loadTexts: omTrapTime.setDescription('Time of the last trap sent.') omAlertLevel = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 2), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: omAlertLevel.setStatus('mandatory') if mibBuilder.loadTexts: omAlertLevel.setDescription('Alert level of the last trap sent.') omAlertLevelName = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: omAlertLevelName.setStatus('mandatory') if mibBuilder.loadTexts: omAlertLevelName.setDescription('A textual description of the alert level for the last trap sent.') omServer = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: omServer.setStatus('mandatory') if mibBuilder.loadTexts: omServer.setDescription('Server generating the last trap sent.') omSource = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: omSource.setStatus('mandatory') if mibBuilder.loadTexts: omSource.setDescription('Source generating the last trap sent.') omOwner = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: omOwner.setStatus('mandatory') if mibBuilder.loadTexts: omOwner.setDescription('User generating the last trap sent.') omDescription = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 1024))).setMaxAccess("readonly") if mibBuilder.loadTexts: omDescription.setStatus('mandatory') if mibBuilder.loadTexts: omDescription.setDescription('Text description of the last trap sent.') omCustomField1 = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 1024))).setMaxAccess("readonly") if mibBuilder.loadTexts: omCustomField1.setStatus('mandatory') if mibBuilder.loadTexts: omCustomField1.setDescription('Custom Field 1 defined by user') omCustomField2 = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 9), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 1024))).setMaxAccess("readonly") if mibBuilder.loadTexts: omCustomField2.setStatus('mandatory') if mibBuilder.loadTexts: omCustomField2.setDescription('Custom Field 2 defined by user') omCustomField3 = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 10), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 1024))).setMaxAccess("readonly") if mibBuilder.loadTexts: omCustomField3.setStatus('mandatory') if mibBuilder.loadTexts: omCustomField3.setDescription('Custom Field 3 defined by user') omCustomField4 = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 11), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 1024))).setMaxAccess("readonly") if mibBuilder.loadTexts: omCustomField4.setStatus('mandatory') if mibBuilder.loadTexts: omCustomField4.setDescription('Custom Field 4 defined by user') omCustomField5 = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 12), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 1024))).setMaxAccess("readonly") if mibBuilder.loadTexts: omCustomField5.setStatus('mandatory') if mibBuilder.loadTexts: omCustomField5.setDescription('Custom Field 5 defined by user') omAlertURL = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 13), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 2048))).setMaxAccess("readonly") if mibBuilder.loadTexts: omAlertURL.setStatus('mandatory') if mibBuilder.loadTexts: omAlertURL.setDescription('URL used to view alert details') omGenericTrapNumber = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 14), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: omGenericTrapNumber.setStatus('mandatory') if mibBuilder.loadTexts: omGenericTrapNumber.setDescription('The generic trap number of the last trap sent.') omSpecificTrapNumber = MibScalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 15), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: omSpecificTrapNumber.setStatus('mandatory') if mibBuilder.loadTexts: omSpecificTrapNumber.setDescription('The user specific trap number of the last trap sent') omBlueAlert = NotificationType((1, 3, 6, 1, 4, 1, 2349, 2, 2) + (0,10)).setObjects(("MISSION-CRITICAL-MIB", "omAlertLevel"), ("MISSION-CRITICAL-MIB", "omAlertLevelName"), ("MISSION-CRITICAL-MIB", "omServer"), ("MISSION-CRITICAL-MIB", "omSource"), ("MISSION-CRITICAL-MIB", "omOwner"), ("MISSION-CRITICAL-MIB", "omDescription"), ("MISSION-CRITICAL-MIB", "omCustomField1"), ("MISSION-CRITICAL-MIB", "omCustomField2"), ("MISSION-CRITICAL-MIB", "omCustomField3"), ("MISSION-CRITICAL-MIB", "omCustomField4"), ("MISSION-CRITICAL-MIB", "omCustomField5"), ("MISSION-CRITICAL-MIB", "omAlertURL")) if mibBuilder.loadTexts: omBlueAlert.setDescription('A OnePoint Operations Manager Blue Alert has been generated.') omGreenAlert = NotificationType((1, 3, 6, 1, 4, 1, 2349, 2, 2) + (0,20)).setObjects(("MISSION-CRITICAL-MIB", "omAlertLevel"), ("MISSION-CRITICAL-MIB", "omAlertLevelName"), ("MISSION-CRITICAL-MIB", "omServer"), ("MISSION-CRITICAL-MIB", "omSource"), ("MISSION-CRITICAL-MIB", "omOwner"), ("MISSION-CRITICAL-MIB", "omDescription"), ("MISSION-CRITICAL-MIB", "omCustomField1"), ("MISSION-CRITICAL-MIB", "omCustomField2"), ("MISSION-CRITICAL-MIB", "omCustomField3"), ("MISSION-CRITICAL-MIB", "omCustomField4"), ("MISSION-CRITICAL-MIB", "omCustomField5"), ("MISSION-CRITICAL-MIB", "omAlertURL")) if mibBuilder.loadTexts: omGreenAlert.setDescription('A OnePoint Operations Manager Green Alert has been generated.') omYellowAlert = NotificationType((1, 3, 6, 1, 4, 1, 2349, 2, 2) + (0,30)).setObjects(("MISSION-CRITICAL-MIB", "omAlertLevel"), ("MISSION-CRITICAL-MIB", "omAlertLevelName"), ("MISSION-CRITICAL-MIB", "omServer"), ("MISSION-CRITICAL-MIB", "omSource"), ("MISSION-CRITICAL-MIB", "omOwner"), ("MISSION-CRITICAL-MIB", "omDescription"), ("MISSION-CRITICAL-MIB", "omCustomField1"), ("MISSION-CRITICAL-MIB", "omCustomField2"), ("MISSION-CRITICAL-MIB", "omCustomField3"), ("MISSION-CRITICAL-MIB", "omCustomField4"), ("MISSION-CRITICAL-MIB", "omCustomField5"), ("MISSION-CRITICAL-MIB", "omAlertURL")) if mibBuilder.loadTexts: omYellowAlert.setDescription('A OnePoint Operations Manager Yellow Alert has been generated.') omOrangeAlert = NotificationType((1, 3, 6, 1, 4, 1, 2349, 2, 2) + (0,40)).setObjects(("MISSION-CRITICAL-MIB", "omAlertLevel"), ("MISSION-CRITICAL-MIB", "omAlertLevelName"), ("MISSION-CRITICAL-MIB", "omServer"), ("MISSION-CRITICAL-MIB", "omSource"), ("MISSION-CRITICAL-MIB", "omOwner"), ("MISSION-CRITICAL-MIB", "omDescription"), ("MISSION-CRITICAL-MIB", "omCustomField1"), ("MISSION-CRITICAL-MIB", "omCustomField2"), ("MISSION-CRITICAL-MIB", "omCustomField3"), ("MISSION-CRITICAL-MIB", "omCustomField4"), ("MISSION-CRITICAL-MIB", "omCustomField5"), ("MISSION-CRITICAL-MIB", "omAlertURL")) if mibBuilder.loadTexts: omOrangeAlert.setDescription('A OnePoint Operations Manager Orange Alert has been generated.') omRedCriticalErrorAlert = NotificationType((1, 3, 6, 1, 4, 1, 2349, 2, 2) + (0,50)).setObjects(("MISSION-CRITICAL-MIB", "omAlertLevel"), ("MISSION-CRITICAL-MIB", "omAlertLevelName"), ("MISSION-CRITICAL-MIB", "omServer"), ("MISSION-CRITICAL-MIB", "omSource"), ("MISSION-CRITICAL-MIB", "omOwner"), ("MISSION-CRITICAL-MIB", "omDescription"), ("MISSION-CRITICAL-MIB", "omCustomField1"), ("MISSION-CRITICAL-MIB", "omCustomField2"), ("MISSION-CRITICAL-MIB", "omCustomField3"), ("MISSION-CRITICAL-MIB", "omCustomField4"), ("MISSION-CRITICAL-MIB", "omCustomField5"), ("MISSION-CRITICAL-MIB", "omAlertURL")) if mibBuilder.loadTexts: omRedCriticalErrorAlert.setDescription('A OnePoint Operations Manager Critical Error Alert has been generated.') omRedSecurityBreachAlert = NotificationType((1, 3, 6, 1, 4, 1, 2349, 2, 2) + (0,60)).setObjects(("MISSION-CRITICAL-MIB", "omAlertLevel"), ("MISSION-CRITICAL-MIB", "omAlertLevelName"), ("MISSION-CRITICAL-MIB", "omServer"), ("MISSION-CRITICAL-MIB", "omSource"), ("MISSION-CRITICAL-MIB", "omOwner"), ("MISSION-CRITICAL-MIB", "omDescription"), ("MISSION-CRITICAL-MIB", "omCustomField1"), ("MISSION-CRITICAL-MIB", "omCustomField2"), ("MISSION-CRITICAL-MIB", "omCustomField3"), ("MISSION-CRITICAL-MIB", "omCustomField4"), ("MISSION-CRITICAL-MIB", "omCustomField5"), ("MISSION-CRITICAL-MIB", "omAlertURL")) if mibBuilder.loadTexts: omRedSecurityBreachAlert.setDescription('A OnePoint Operations Manager Security Breach Alert has been generated.') omRedServiceUnavailableAlert = NotificationType((1, 3, 6, 1, 4, 1, 2349, 2, 2) + (0,70)).setObjects(("MISSION-CRITICAL-MIB", "omAlertLevel"), ("MISSION-CRITICAL-MIB", "omAlertLevelName"), ("MISSION-CRITICAL-MIB", "omServer"), ("MISSION-CRITICAL-MIB", "omSource"), ("MISSION-CRITICAL-MIB", "omOwner"), ("MISSION-CRITICAL-MIB", "omDescription"), ("MISSION-CRITICAL-MIB", "omCustomField1"), ("MISSION-CRITICAL-MIB", "omCustomField2"), ("MISSION-CRITICAL-MIB", "omCustomField3"), ("MISSION-CRITICAL-MIB", "omCustomField4"), ("MISSION-CRITICAL-MIB", "omCustomField5"), ("MISSION-CRITICAL-MIB", "omAlertURL")) if mibBuilder.loadTexts: omRedServiceUnavailableAlert.setDescription('A OnePoint Operations Manager Service Unavailable Alert has been generated.') mibBuilder.exportSymbols("MISSION-CRITICAL-MIB", serviceUpTime=serviceUpTime, omYellowAlert=omYellowAlert, redTrapCount=redTrapCount, eemOrangeAlert=eemOrangeAlert, mcsCompanyInfo=mcsCompanyInfo, omCustomField4=omCustomField4, gathererServiceComingUp=gathererServiceComingUp, serviceState=serviceState, omCustomField2=omCustomField2, omDescription=omDescription, missionCritical=missionCritical, omService=omService, eventID=eventID, omAlertLevelName=omAlertLevelName, serviceGoingDown=serviceGoingDown, omProductInfo=omProductInfo, trapTime=trapTime, eemService=eemService, eemYellowAlert=eemYellowAlert, omRedCriticalErrorAlert=omRedCriticalErrorAlert, omRedSecurityBreachAlert=omRedSecurityBreachAlert, blueTrapCount=blueTrapCount, greenTrapCount=greenTrapCount, omServer=omServer, mcsSoftware=mcsSoftware, serviceComingUp=serviceComingUp, omCustomField1=omCustomField1, omGreenAlert=omGreenAlert, eemLastTrap=eemLastTrap, omCustomField5=omCustomField5, omAlertURL=omAlertURL, omOrangeAlert=omOrangeAlert, omTrapTime=omTrapTime, logType=logType, amberTrapCount=amberTrapCount, user=user, specificTrapNumber=specificTrapNumber, source=source, omBlueAlert=omBlueAlert, ownershipDetails=ownershipDetails, eemRedAlert=eemRedAlert, omSpecificTrapNumber=omSpecificTrapNumber, omOwner=omOwner, gathererServiceGoingDown=gathererServiceGoingDown, orangeTrapCount=orangeTrapCount, server=server, omLastTrap=omLastTrap, omAlertLevel=omAlertLevel, omCustomField3=omCustomField3, omGenericTrapNumber=omGenericTrapNumber, description=description, genericTrapNumber=genericTrapNumber, eemGreenAlert=eemGreenAlert, primaryServer=primaryServer, alertLevel=alertLevel, version=version, omSource=omSource, eemProductInfo=eemProductInfo, eemBlueAlert=eemBlueAlert, contactDetails=contactDetails, omRedServiceUnavailableAlert=omRedServiceUnavailableAlert)
(integer, octet_string, object_identifier) = mibBuilder.importSymbols('ASN1', 'Integer', 'OctetString', 'ObjectIdentifier') (named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues') (single_value_constraint, constraints_union, value_size_constraint, constraints_intersection, value_range_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'SingleValueConstraint', 'ConstraintsUnion', 'ValueSizeConstraint', 'ConstraintsIntersection', 'ValueRangeConstraint') (notification_group, module_compliance) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ModuleCompliance') (notification_type, time_ticks, iso, mib_scalar, mib_table, mib_table_row, mib_table_column, counter64, mib_identifier, bits, notification_type, enterprises, gauge32, counter32, unsigned32, ip_address, integer32, module_identity, object_identity) = mibBuilder.importSymbols('SNMPv2-SMI', 'NotificationType', 'TimeTicks', 'iso', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'Counter64', 'MibIdentifier', 'Bits', 'NotificationType', 'enterprises', 'Gauge32', 'Counter32', 'Unsigned32', 'IpAddress', 'Integer32', 'ModuleIdentity', 'ObjectIdentity') (textual_convention, display_string) = mibBuilder.importSymbols('SNMPv2-TC', 'TextualConvention', 'DisplayString') mission_critical = mib_identifier((1, 3, 6, 1, 4, 1, 2349)) mcs_company_info = mib_identifier((1, 3, 6, 1, 4, 1, 2349, 1)) mcs_software = mib_identifier((1, 3, 6, 1, 4, 1, 2349, 2)) eem_product_info = mib_identifier((1, 3, 6, 1, 4, 1, 2349, 2, 1)) om_product_info = mib_identifier((1, 3, 6, 1, 4, 1, 2349, 2, 2)) ownership_details = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 1, 1), display_string().subtype(subtypeSpec=value_size_constraint(1, 255))).setMaxAccess('readonly') if mibBuilder.loadTexts: ownershipDetails.setStatus('mandatory') if mibBuilder.loadTexts: ownershipDetails.setDescription('Details of the company providing this MIB') contact_details = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 1, 2), display_string().subtype(subtypeSpec=value_size_constraint(1, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: contactDetails.setStatus('mandatory') if mibBuilder.loadTexts: contactDetails.setDescription('Contact responsible for maintaining this MIB') eem_service = mib_identifier((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1)) version = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1, 1), display_string().subtype(subtypeSpec=value_size_constraint(1, 16))).setMaxAccess('readonly') if mibBuilder.loadTexts: version.setStatus('mandatory') if mibBuilder.loadTexts: version.setDescription('The version of the EEM Agent running') primary_server = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1, 2), display_string().subtype(subtypeSpec=value_size_constraint(1, 16))).setMaxAccess('readonly') if mibBuilder.loadTexts: primaryServer.setStatus('mandatory') if mibBuilder.loadTexts: primaryServer.setDescription('The Primary Server for this EEM Agent') service_state = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('up', 1), ('down', 2)))).setMaxAccess('readonly') if mibBuilder.loadTexts: serviceState.setStatus('mandatory') if mibBuilder.loadTexts: serviceState.setDescription('State of the service. Running is 1, stopped is 2') service_up_time = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1, 4), time_ticks()).setMaxAccess('readonly') if mibBuilder.loadTexts: serviceUpTime.setStatus('mandatory') if mibBuilder.loadTexts: serviceUpTime.setDescription('No. of milliseconds since the service was started') red_trap_count = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1, 5), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: redTrapCount.setStatus('deprecated') if mibBuilder.loadTexts: redTrapCount.setDescription('The number of red alert traps sent since the service was started') orange_trap_count = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1, 6), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: orangeTrapCount.setStatus('deprecated') if mibBuilder.loadTexts: orangeTrapCount.setDescription('The number of orange alert traps sent since the service was started') amber_trap_count = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1, 7), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: amberTrapCount.setStatus('deprecated') if mibBuilder.loadTexts: amberTrapCount.setDescription('The number of yellow alert traps sent since the service was started') blue_trap_count = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1, 8), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: blueTrapCount.setStatus('deprecated') if mibBuilder.loadTexts: blueTrapCount.setDescription('The number of blue alert traps sent since the service was started') green_trap_count = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 1, 9), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: greenTrapCount.setStatus('deprecated') if mibBuilder.loadTexts: greenTrapCount.setDescription('The number of Green Alert Traps since the service was started') eem_last_trap = mib_identifier((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2)) trap_time = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 1), time_ticks()).setMaxAccess('readonly') if mibBuilder.loadTexts: trapTime.setStatus('deprecated') if mibBuilder.loadTexts: trapTime.setDescription('Time of the last trap sent') alert_level = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 2), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5))).clone(namedValues=named_values(('red', 1), ('orange', 2), ('yellow', 3), ('blue', 4), ('green', 5)))).setMaxAccess('readonly') if mibBuilder.loadTexts: alertLevel.setStatus('mandatory') if mibBuilder.loadTexts: alertLevel.setDescription('Alert level of the last trap sent. red=1, orange=2, yellow=3, blue=4, green=5') log_type = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6, 7, 99))).clone(namedValues=named_values(('ntevent', 1), ('application', 2), ('snmp', 3), ('wbem', 4), ('activemonitoring', 5), ('performancemonitoring', 6), ('timedevent', 7), ('eem', 99)))).setMaxAccess('readonly') if mibBuilder.loadTexts: logType.setStatus('mandatory') if mibBuilder.loadTexts: logType.setDescription('Log type generating the last trap sent. system=1,application=2,security=3 (fill in others here) EEM=99') server = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 4), display_string().subtype(subtypeSpec=value_size_constraint(1, 255))).setMaxAccess('readonly') if mibBuilder.loadTexts: server.setStatus('mandatory') if mibBuilder.loadTexts: server.setDescription('Server generating the last trap sent') source = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 5), display_string().subtype(subtypeSpec=value_size_constraint(1, 255))).setMaxAccess('readonly') if mibBuilder.loadTexts: source.setStatus('mandatory') if mibBuilder.loadTexts: source.setDescription('Source generating the last trap sent') user = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 6), display_string().subtype(subtypeSpec=value_size_constraint(1, 255))).setMaxAccess('readonly') if mibBuilder.loadTexts: user.setStatus('mandatory') if mibBuilder.loadTexts: user.setDescription('User generating the last trap sent') event_id = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 7), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: eventID.setStatus('mandatory') if mibBuilder.loadTexts: eventID.setDescription('Event ID of the last trap sent') description = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 8), display_string().subtype(subtypeSpec=value_size_constraint(1, 1024))).setMaxAccess('readonly') if mibBuilder.loadTexts: description.setStatus('mandatory') if mibBuilder.loadTexts: description.setDescription('Text description of the last trap sent') generic_trap_number = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 9), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: genericTrapNumber.setStatus('mandatory') if mibBuilder.loadTexts: genericTrapNumber.setDescription('The generic trap number of the last trap sent') specific_trap_number = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 1, 2, 10), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: specificTrapNumber.setStatus('mandatory') if mibBuilder.loadTexts: specificTrapNumber.setDescription('The user specific trap number of the last trap sent') service_going_down = notification_type((1, 3, 6, 1, 4, 1, 2349, 2, 1) + (0, 2)) if mibBuilder.loadTexts: serviceGoingDown.setDescription('The SeNTry EEM Sender service is stopping.') service_coming_up = notification_type((1, 3, 6, 1, 4, 1, 2349, 2, 1) + (0, 3)) if mibBuilder.loadTexts: serviceComingUp.setDescription('The SeNTry EEM Sender service is starting.') gatherer_service_going_down = notification_type((1, 3, 6, 1, 4, 1, 2349, 2, 1) + (0, 4)) if mibBuilder.loadTexts: gathererServiceGoingDown.setDescription('The SeNTry EEM Gatherer service is stopping.') gatherer_service_coming_up = notification_type((1, 3, 6, 1, 4, 1, 2349, 2, 1) + (0, 5)) if mibBuilder.loadTexts: gathererServiceComingUp.setDescription('The SeNTry EEM Gatherer service is starting.') eem_red_alert = notification_type((1, 3, 6, 1, 4, 1, 2349, 2, 1) + (0, 100)).setObjects(('MISSION-CRITICAL-MIB', 'alertLevel'), ('MISSION-CRITICAL-MIB', 'logType'), ('MISSION-CRITICAL-MIB', 'server'), ('MISSION-CRITICAL-MIB', 'source'), ('MISSION-CRITICAL-MIB', 'user'), ('MISSION-CRITICAL-MIB', 'eventID'), ('MISSION-CRITICAL-MIB', 'description')) if mibBuilder.loadTexts: eemRedAlert.setDescription('A SeNTry EEM red alert has been generated.') eem_orange_alert = notification_type((1, 3, 6, 1, 4, 1, 2349, 2, 1) + (0, 200)).setObjects(('MISSION-CRITICAL-MIB', 'alertLevel'), ('MISSION-CRITICAL-MIB', 'logType'), ('MISSION-CRITICAL-MIB', 'server'), ('MISSION-CRITICAL-MIB', 'source'), ('MISSION-CRITICAL-MIB', 'user'), ('MISSION-CRITICAL-MIB', 'eventID'), ('MISSION-CRITICAL-MIB', 'description')) if mibBuilder.loadTexts: eemOrangeAlert.setDescription('A SeNTry EEM orange alert has been generated.') eem_yellow_alert = notification_type((1, 3, 6, 1, 4, 1, 2349, 2, 1) + (0, 300)).setObjects(('MISSION-CRITICAL-MIB', 'alertLevel'), ('MISSION-CRITICAL-MIB', 'logType'), ('MISSION-CRITICAL-MIB', 'server'), ('MISSION-CRITICAL-MIB', 'source'), ('MISSION-CRITICAL-MIB', 'user'), ('MISSION-CRITICAL-MIB', 'eventID'), ('MISSION-CRITICAL-MIB', 'description')) if mibBuilder.loadTexts: eemYellowAlert.setDescription('A SeNTry EEM yellow alert has been generated.') eem_blue_alert = notification_type((1, 3, 6, 1, 4, 1, 2349, 2, 1) + (0, 400)).setObjects(('MISSION-CRITICAL-MIB', 'alertLevel'), ('MISSION-CRITICAL-MIB', 'logType'), ('MISSION-CRITICAL-MIB', 'server'), ('MISSION-CRITICAL-MIB', 'source'), ('MISSION-CRITICAL-MIB', 'user'), ('MISSION-CRITICAL-MIB', 'eventID'), ('MISSION-CRITICAL-MIB', 'description')) if mibBuilder.loadTexts: eemBlueAlert.setDescription('A SeNTry EEM blue alert has been generated.') eem_green_alert = notification_type((1, 3, 6, 1, 4, 1, 2349, 2, 1) + (0, 500)).setObjects(('MISSION-CRITICAL-MIB', 'alertLevel'), ('MISSION-CRITICAL-MIB', 'logType'), ('MISSION-CRITICAL-MIB', 'server'), ('MISSION-CRITICAL-MIB', 'source'), ('MISSION-CRITICAL-MIB', 'user'), ('MISSION-CRITICAL-MIB', 'eventID'), ('MISSION-CRITICAL-MIB', 'description')) if mibBuilder.loadTexts: eemGreenAlert.setDescription('A SeNTry EEM green alert has been generated.') om_service = mib_identifier((1, 3, 6, 1, 4, 1, 2349, 2, 2, 1)) om_last_trap = mib_identifier((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2)) om_trap_time = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 1), time_ticks()).setMaxAccess('readonly') if mibBuilder.loadTexts: omTrapTime.setStatus('deprecated') if mibBuilder.loadTexts: omTrapTime.setDescription('Time of the last trap sent.') om_alert_level = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 2), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: omAlertLevel.setStatus('mandatory') if mibBuilder.loadTexts: omAlertLevel.setDescription('Alert level of the last trap sent.') om_alert_level_name = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 3), display_string().subtype(subtypeSpec=value_size_constraint(0, 255))).setMaxAccess('readonly') if mibBuilder.loadTexts: omAlertLevelName.setStatus('mandatory') if mibBuilder.loadTexts: omAlertLevelName.setDescription('A textual description of the alert level for the last trap sent.') om_server = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 4), display_string().subtype(subtypeSpec=value_size_constraint(0, 255))).setMaxAccess('readonly') if mibBuilder.loadTexts: omServer.setStatus('mandatory') if mibBuilder.loadTexts: omServer.setDescription('Server generating the last trap sent.') om_source = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 5), display_string().subtype(subtypeSpec=value_size_constraint(0, 255))).setMaxAccess('readonly') if mibBuilder.loadTexts: omSource.setStatus('mandatory') if mibBuilder.loadTexts: omSource.setDescription('Source generating the last trap sent.') om_owner = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 6), display_string().subtype(subtypeSpec=value_size_constraint(0, 255))).setMaxAccess('readonly') if mibBuilder.loadTexts: omOwner.setStatus('mandatory') if mibBuilder.loadTexts: omOwner.setDescription('User generating the last trap sent.') om_description = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 7), display_string().subtype(subtypeSpec=value_size_constraint(0, 1024))).setMaxAccess('readonly') if mibBuilder.loadTexts: omDescription.setStatus('mandatory') if mibBuilder.loadTexts: omDescription.setDescription('Text description of the last trap sent.') om_custom_field1 = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 8), display_string().subtype(subtypeSpec=value_size_constraint(0, 1024))).setMaxAccess('readonly') if mibBuilder.loadTexts: omCustomField1.setStatus('mandatory') if mibBuilder.loadTexts: omCustomField1.setDescription('Custom Field 1 defined by user') om_custom_field2 = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 9), display_string().subtype(subtypeSpec=value_size_constraint(0, 1024))).setMaxAccess('readonly') if mibBuilder.loadTexts: omCustomField2.setStatus('mandatory') if mibBuilder.loadTexts: omCustomField2.setDescription('Custom Field 2 defined by user') om_custom_field3 = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 10), display_string().subtype(subtypeSpec=value_size_constraint(0, 1024))).setMaxAccess('readonly') if mibBuilder.loadTexts: omCustomField3.setStatus('mandatory') if mibBuilder.loadTexts: omCustomField3.setDescription('Custom Field 3 defined by user') om_custom_field4 = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 11), display_string().subtype(subtypeSpec=value_size_constraint(0, 1024))).setMaxAccess('readonly') if mibBuilder.loadTexts: omCustomField4.setStatus('mandatory') if mibBuilder.loadTexts: omCustomField4.setDescription('Custom Field 4 defined by user') om_custom_field5 = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 12), display_string().subtype(subtypeSpec=value_size_constraint(0, 1024))).setMaxAccess('readonly') if mibBuilder.loadTexts: omCustomField5.setStatus('mandatory') if mibBuilder.loadTexts: omCustomField5.setDescription('Custom Field 5 defined by user') om_alert_url = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 13), display_string().subtype(subtypeSpec=value_size_constraint(0, 2048))).setMaxAccess('readonly') if mibBuilder.loadTexts: omAlertURL.setStatus('mandatory') if mibBuilder.loadTexts: omAlertURL.setDescription('URL used to view alert details') om_generic_trap_number = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 14), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: omGenericTrapNumber.setStatus('mandatory') if mibBuilder.loadTexts: omGenericTrapNumber.setDescription('The generic trap number of the last trap sent.') om_specific_trap_number = mib_scalar((1, 3, 6, 1, 4, 1, 2349, 2, 2, 2, 15), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: omSpecificTrapNumber.setStatus('mandatory') if mibBuilder.loadTexts: omSpecificTrapNumber.setDescription('The user specific trap number of the last trap sent') om_blue_alert = notification_type((1, 3, 6, 1, 4, 1, 2349, 2, 2) + (0, 10)).setObjects(('MISSION-CRITICAL-MIB', 'omAlertLevel'), ('MISSION-CRITICAL-MIB', 'omAlertLevelName'), ('MISSION-CRITICAL-MIB', 'omServer'), ('MISSION-CRITICAL-MIB', 'omSource'), ('MISSION-CRITICAL-MIB', 'omOwner'), ('MISSION-CRITICAL-MIB', 'omDescription'), ('MISSION-CRITICAL-MIB', 'omCustomField1'), ('MISSION-CRITICAL-MIB', 'omCustomField2'), ('MISSION-CRITICAL-MIB', 'omCustomField3'), ('MISSION-CRITICAL-MIB', 'omCustomField4'), ('MISSION-CRITICAL-MIB', 'omCustomField5'), ('MISSION-CRITICAL-MIB', 'omAlertURL')) if mibBuilder.loadTexts: omBlueAlert.setDescription('A OnePoint Operations Manager Blue Alert has been generated.') om_green_alert = notification_type((1, 3, 6, 1, 4, 1, 2349, 2, 2) + (0, 20)).setObjects(('MISSION-CRITICAL-MIB', 'omAlertLevel'), ('MISSION-CRITICAL-MIB', 'omAlertLevelName'), ('MISSION-CRITICAL-MIB', 'omServer'), ('MISSION-CRITICAL-MIB', 'omSource'), ('MISSION-CRITICAL-MIB', 'omOwner'), ('MISSION-CRITICAL-MIB', 'omDescription'), ('MISSION-CRITICAL-MIB', 'omCustomField1'), ('MISSION-CRITICAL-MIB', 'omCustomField2'), ('MISSION-CRITICAL-MIB', 'omCustomField3'), ('MISSION-CRITICAL-MIB', 'omCustomField4'), ('MISSION-CRITICAL-MIB', 'omCustomField5'), ('MISSION-CRITICAL-MIB', 'omAlertURL')) if mibBuilder.loadTexts: omGreenAlert.setDescription('A OnePoint Operations Manager Green Alert has been generated.') om_yellow_alert = notification_type((1, 3, 6, 1, 4, 1, 2349, 2, 2) + (0, 30)).setObjects(('MISSION-CRITICAL-MIB', 'omAlertLevel'), ('MISSION-CRITICAL-MIB', 'omAlertLevelName'), ('MISSION-CRITICAL-MIB', 'omServer'), ('MISSION-CRITICAL-MIB', 'omSource'), ('MISSION-CRITICAL-MIB', 'omOwner'), ('MISSION-CRITICAL-MIB', 'omDescription'), ('MISSION-CRITICAL-MIB', 'omCustomField1'), ('MISSION-CRITICAL-MIB', 'omCustomField2'), ('MISSION-CRITICAL-MIB', 'omCustomField3'), ('MISSION-CRITICAL-MIB', 'omCustomField4'), ('MISSION-CRITICAL-MIB', 'omCustomField5'), ('MISSION-CRITICAL-MIB', 'omAlertURL')) if mibBuilder.loadTexts: omYellowAlert.setDescription('A OnePoint Operations Manager Yellow Alert has been generated.') om_orange_alert = notification_type((1, 3, 6, 1, 4, 1, 2349, 2, 2) + (0, 40)).setObjects(('MISSION-CRITICAL-MIB', 'omAlertLevel'), ('MISSION-CRITICAL-MIB', 'omAlertLevelName'), ('MISSION-CRITICAL-MIB', 'omServer'), ('MISSION-CRITICAL-MIB', 'omSource'), ('MISSION-CRITICAL-MIB', 'omOwner'), ('MISSION-CRITICAL-MIB', 'omDescription'), ('MISSION-CRITICAL-MIB', 'omCustomField1'), ('MISSION-CRITICAL-MIB', 'omCustomField2'), ('MISSION-CRITICAL-MIB', 'omCustomField3'), ('MISSION-CRITICAL-MIB', 'omCustomField4'), ('MISSION-CRITICAL-MIB', 'omCustomField5'), ('MISSION-CRITICAL-MIB', 'omAlertURL')) if mibBuilder.loadTexts: omOrangeAlert.setDescription('A OnePoint Operations Manager Orange Alert has been generated.') om_red_critical_error_alert = notification_type((1, 3, 6, 1, 4, 1, 2349, 2, 2) + (0, 50)).setObjects(('MISSION-CRITICAL-MIB', 'omAlertLevel'), ('MISSION-CRITICAL-MIB', 'omAlertLevelName'), ('MISSION-CRITICAL-MIB', 'omServer'), ('MISSION-CRITICAL-MIB', 'omSource'), ('MISSION-CRITICAL-MIB', 'omOwner'), ('MISSION-CRITICAL-MIB', 'omDescription'), ('MISSION-CRITICAL-MIB', 'omCustomField1'), ('MISSION-CRITICAL-MIB', 'omCustomField2'), ('MISSION-CRITICAL-MIB', 'omCustomField3'), ('MISSION-CRITICAL-MIB', 'omCustomField4'), ('MISSION-CRITICAL-MIB', 'omCustomField5'), ('MISSION-CRITICAL-MIB', 'omAlertURL')) if mibBuilder.loadTexts: omRedCriticalErrorAlert.setDescription('A OnePoint Operations Manager Critical Error Alert has been generated.') om_red_security_breach_alert = notification_type((1, 3, 6, 1, 4, 1, 2349, 2, 2) + (0, 60)).setObjects(('MISSION-CRITICAL-MIB', 'omAlertLevel'), ('MISSION-CRITICAL-MIB', 'omAlertLevelName'), ('MISSION-CRITICAL-MIB', 'omServer'), ('MISSION-CRITICAL-MIB', 'omSource'), ('MISSION-CRITICAL-MIB', 'omOwner'), ('MISSION-CRITICAL-MIB', 'omDescription'), ('MISSION-CRITICAL-MIB', 'omCustomField1'), ('MISSION-CRITICAL-MIB', 'omCustomField2'), ('MISSION-CRITICAL-MIB', 'omCustomField3'), ('MISSION-CRITICAL-MIB', 'omCustomField4'), ('MISSION-CRITICAL-MIB', 'omCustomField5'), ('MISSION-CRITICAL-MIB', 'omAlertURL')) if mibBuilder.loadTexts: omRedSecurityBreachAlert.setDescription('A OnePoint Operations Manager Security Breach Alert has been generated.') om_red_service_unavailable_alert = notification_type((1, 3, 6, 1, 4, 1, 2349, 2, 2) + (0, 70)).setObjects(('MISSION-CRITICAL-MIB', 'omAlertLevel'), ('MISSION-CRITICAL-MIB', 'omAlertLevelName'), ('MISSION-CRITICAL-MIB', 'omServer'), ('MISSION-CRITICAL-MIB', 'omSource'), ('MISSION-CRITICAL-MIB', 'omOwner'), ('MISSION-CRITICAL-MIB', 'omDescription'), ('MISSION-CRITICAL-MIB', 'omCustomField1'), ('MISSION-CRITICAL-MIB', 'omCustomField2'), ('MISSION-CRITICAL-MIB', 'omCustomField3'), ('MISSION-CRITICAL-MIB', 'omCustomField4'), ('MISSION-CRITICAL-MIB', 'omCustomField5'), ('MISSION-CRITICAL-MIB', 'omAlertURL')) if mibBuilder.loadTexts: omRedServiceUnavailableAlert.setDescription('A OnePoint Operations Manager Service Unavailable Alert has been generated.') mibBuilder.exportSymbols('MISSION-CRITICAL-MIB', serviceUpTime=serviceUpTime, omYellowAlert=omYellowAlert, redTrapCount=redTrapCount, eemOrangeAlert=eemOrangeAlert, mcsCompanyInfo=mcsCompanyInfo, omCustomField4=omCustomField4, gathererServiceComingUp=gathererServiceComingUp, serviceState=serviceState, omCustomField2=omCustomField2, omDescription=omDescription, missionCritical=missionCritical, omService=omService, eventID=eventID, omAlertLevelName=omAlertLevelName, serviceGoingDown=serviceGoingDown, omProductInfo=omProductInfo, trapTime=trapTime, eemService=eemService, eemYellowAlert=eemYellowAlert, omRedCriticalErrorAlert=omRedCriticalErrorAlert, omRedSecurityBreachAlert=omRedSecurityBreachAlert, blueTrapCount=blueTrapCount, greenTrapCount=greenTrapCount, omServer=omServer, mcsSoftware=mcsSoftware, serviceComingUp=serviceComingUp, omCustomField1=omCustomField1, omGreenAlert=omGreenAlert, eemLastTrap=eemLastTrap, omCustomField5=omCustomField5, omAlertURL=omAlertURL, omOrangeAlert=omOrangeAlert, omTrapTime=omTrapTime, logType=logType, amberTrapCount=amberTrapCount, user=user, specificTrapNumber=specificTrapNumber, source=source, omBlueAlert=omBlueAlert, ownershipDetails=ownershipDetails, eemRedAlert=eemRedAlert, omSpecificTrapNumber=omSpecificTrapNumber, omOwner=omOwner, gathererServiceGoingDown=gathererServiceGoingDown, orangeTrapCount=orangeTrapCount, server=server, omLastTrap=omLastTrap, omAlertLevel=omAlertLevel, omCustomField3=omCustomField3, omGenericTrapNumber=omGenericTrapNumber, description=description, genericTrapNumber=genericTrapNumber, eemGreenAlert=eemGreenAlert, primaryServer=primaryServer, alertLevel=alertLevel, version=version, omSource=omSource, eemProductInfo=eemProductInfo, eemBlueAlert=eemBlueAlert, contactDetails=contactDetails, omRedServiceUnavailableAlert=omRedServiceUnavailableAlert)
def assert_has_size(output_bytes, value, delta=0): """Asserts the specified output has a size of the specified value""" output_size = len(output_bytes) assert abs(output_size - int(value)) <= int(delta), "Expected file size was %s, actual file size was %s (difference of %s accepted)" % (value, output_size, delta)
def assert_has_size(output_bytes, value, delta=0): """Asserts the specified output has a size of the specified value""" output_size = len(output_bytes) assert abs(output_size - int(value)) <= int(delta), 'Expected file size was %s, actual file size was %s (difference of %s accepted)' % (value, output_size, delta)
num = 111 num = 222 num = 333333 num = 333 num4 = 44444
num = 111 num = 222 num = 333333 num = 333 num4 = 44444
__author__ = "hoongeun" __version__ = "0.0.1" __copyright__ = "Copyright (c) hoongeun" __license__ = "Beer ware"
__author__ = 'hoongeun' __version__ = '0.0.1' __copyright__ = 'Copyright (c) hoongeun' __license__ = 'Beer ware'
# Function arguments ... # # Class instances can be pass as arguments to functions class Point: """ a 2D point """ p = Point() p.x = 1 p.y = 2 def print_point(point): print('(%s, %s)' % (point.x, point.y)) print_point(p) # (1, 2)
class Point: """ a 2D point """ p = point() p.x = 1 p.y = 2 def print_point(point): print('(%s, %s)' % (point.x, point.y)) print_point(p)
def test1(): inp="0 2 7 0" inp="4 10 4 1 8 4 9 14 5 1 14 15 0 15 3 5" nums = list(map(lambda x: int(x), inp.split())) hist = [ nums ] step = 1 current = nums[:] while True: #print('step', step, 'current', current) #search max m = max(current) #max index idx = current.index(m) current[idx] = 0 idx += 1 while m > 0: idx = 0 if idx >= len(current) else idx current[idx] += 1 m -= 1 idx += 1 if current in hist: print(step, hist.index(current), step - hist.index(current)) break step += 1 hist.append(current[:]) #print(hist[0])
def test1(): inp = '0 2 7 0' inp = '4 10 4 1 8 4 9 14 5 1 14 15 0 15 3 5' nums = list(map(lambda x: int(x), inp.split())) hist = [nums] step = 1 current = nums[:] while True: m = max(current) idx = current.index(m) current[idx] = 0 idx += 1 while m > 0: idx = 0 if idx >= len(current) else idx current[idx] += 1 m -= 1 idx += 1 if current in hist: print(step, hist.index(current), step - hist.index(current)) break step += 1 hist.append(current[:])
#!/usr/bin/env python # # @Author: Dalmasso Giovanni <gioda> # @Date: 09-Feb-2018 # @Email: giovanni.dalmasso@embl.es # @Project: python_utils # @Filename: colors.py # @Last modified by: gioda # @Last modified time: 15-Mar-2018 # @License: MIT """Collection of basic colors as RGB for plotting (inspired by "Tableau" www.tableau.com).""" def colBase10(): """List of 10 basic colors as RGB.""" col = [(31, 119, 180), (255, 127, 14), (44, 160, 44), (214, 39, 40), (148, 103, 189), (140, 86, 75), (227, 119, 194), (127, 127, 127), (188, 189, 34), (23, 190, 207)] # Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts. for j in range(len(col)): r, g, b = col[j] col[j] = (r / 255., g / 255., b / 255.) col = col * 100000 return col def colLight(): """List of 10 light colors as RGB.""" col = [(174, 199, 232), (255, 187, 120), (152, 223, 138), (255, 152, 150), (197, 176, 213), (196, 156, 148), (247, 182, 210), (199, 199, 199), (219, 219, 141), (158, 218, 229)] # Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts. for j in range(len(col)): r, g, b = col[j] col[j] = (r / 255., g / 255., b / 255.) col = col * 100000 return col def colMedium(): """List of 10 medium colors as RGB.""" col = [(114, 158, 206), (255, 158, 74), (103, 191, 92), (237, 102, 93), (173, 139, 201), (168, 120, 110), (237, 151, 202), (162, 162, 162), (205, 204, 93), (109, 204, 218)] # Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts. for j in range(len(col)): r, g, b = col[j] col[j] = (r / 255., g / 255., b / 255.) col = col * 100000 return col def colCblind(): """List of 10 color blind colors as RGB.""" col = [(0, 107, 164), (255, 128, 14), (171, 171, 171), (89, 89, 89), (95, 158, 209), (200, 82, 0), (137, 137, 137), (162, 200, 236), (255, 188, 121), (207, 207, 207)] # Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts. for j in range(len(col)): r, g, b = col[j] col[j] = (r / 255., g / 255., b / 255.) col = col * 100000 return col def colGrey(): """List of 5 grey colors as RGB.""" col = [(207, 207, 207), (165, 172, 175), (143, 135, 130), (96, 99, 106), (65, 68, 81)] # Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts. for j in range(len(col)): r, g, b = col[j] col[j] = (r / 255., g / 255., b / 255.) col = col * 100000 return col def colTrafficLigth(): """List of 9 traffic-light colors as RGB.""" col = [(255, 193, 86), (219, 161, 58), (216, 37, 38), (177, 3, 24), (48, 147, 67), (255, 221, 113), (242, 108, 100), (159, 205, 153), (105, 183, 100)] # Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts. for j in range(len(col)): r, g, b = col[j] col[j] = (r / 255., g / 255., b / 255.) col = col * 100000 return col def colPurpleGrey(): """List of 6 purple-grey colors as RGB.""" col = [(220, 95, 189), (208, 152, 238), (153, 86, 136), (148, 145, 123), (123, 102, 210), (215, 213, 197)] # Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts. for j in range(len(col)): r, g, b = col[j] col[j] = (r / 255., g / 255., b / 255.) col = col * 100000 return col def colBase20(): """List of 20 basic colors as RGB.""" col = [(31, 119, 180), (174, 199, 232), (255, 127, 14), (255, 187, 120), (44, 160, 44), (152, 223, 138), (214, 39, 40), (255, 152, 150), (148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148), (227, 119, 194), (247, 182, 210), (127, 127, 127), (199, 199, 199), (188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229)] # Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts. for j in range(len(col)): r, g, b = col[j] col[j] = (r / 255., g / 255., b / 255.) col = col * 100000 return col
"""Collection of basic colors as RGB for plotting (inspired by "Tableau" www.tableau.com).""" def col_base10(): """List of 10 basic colors as RGB.""" col = [(31, 119, 180), (255, 127, 14), (44, 160, 44), (214, 39, 40), (148, 103, 189), (140, 86, 75), (227, 119, 194), (127, 127, 127), (188, 189, 34), (23, 190, 207)] for j in range(len(col)): (r, g, b) = col[j] col[j] = (r / 255.0, g / 255.0, b / 255.0) col = col * 100000 return col def col_light(): """List of 10 light colors as RGB.""" col = [(174, 199, 232), (255, 187, 120), (152, 223, 138), (255, 152, 150), (197, 176, 213), (196, 156, 148), (247, 182, 210), (199, 199, 199), (219, 219, 141), (158, 218, 229)] for j in range(len(col)): (r, g, b) = col[j] col[j] = (r / 255.0, g / 255.0, b / 255.0) col = col * 100000 return col def col_medium(): """List of 10 medium colors as RGB.""" col = [(114, 158, 206), (255, 158, 74), (103, 191, 92), (237, 102, 93), (173, 139, 201), (168, 120, 110), (237, 151, 202), (162, 162, 162), (205, 204, 93), (109, 204, 218)] for j in range(len(col)): (r, g, b) = col[j] col[j] = (r / 255.0, g / 255.0, b / 255.0) col = col * 100000 return col def col_cblind(): """List of 10 color blind colors as RGB.""" col = [(0, 107, 164), (255, 128, 14), (171, 171, 171), (89, 89, 89), (95, 158, 209), (200, 82, 0), (137, 137, 137), (162, 200, 236), (255, 188, 121), (207, 207, 207)] for j in range(len(col)): (r, g, b) = col[j] col[j] = (r / 255.0, g / 255.0, b / 255.0) col = col * 100000 return col def col_grey(): """List of 5 grey colors as RGB.""" col = [(207, 207, 207), (165, 172, 175), (143, 135, 130), (96, 99, 106), (65, 68, 81)] for j in range(len(col)): (r, g, b) = col[j] col[j] = (r / 255.0, g / 255.0, b / 255.0) col = col * 100000 return col def col_traffic_ligth(): """List of 9 traffic-light colors as RGB.""" col = [(255, 193, 86), (219, 161, 58), (216, 37, 38), (177, 3, 24), (48, 147, 67), (255, 221, 113), (242, 108, 100), (159, 205, 153), (105, 183, 100)] for j in range(len(col)): (r, g, b) = col[j] col[j] = (r / 255.0, g / 255.0, b / 255.0) col = col * 100000 return col def col_purple_grey(): """List of 6 purple-grey colors as RGB.""" col = [(220, 95, 189), (208, 152, 238), (153, 86, 136), (148, 145, 123), (123, 102, 210), (215, 213, 197)] for j in range(len(col)): (r, g, b) = col[j] col[j] = (r / 255.0, g / 255.0, b / 255.0) col = col * 100000 return col def col_base20(): """List of 20 basic colors as RGB.""" col = [(31, 119, 180), (174, 199, 232), (255, 127, 14), (255, 187, 120), (44, 160, 44), (152, 223, 138), (214, 39, 40), (255, 152, 150), (148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148), (227, 119, 194), (247, 182, 210), (127, 127, 127), (199, 199, 199), (188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229)] for j in range(len(col)): (r, g, b) = col[j] col[j] = (r / 255.0, g / 255.0, b / 255.0) col = col * 100000 return col
# test floor-division and modulo operators @micropython.viper def div(x:int, y:int) -> int: return x // y @micropython.viper def mod(x:int, y:int) -> int: return x % y def dm(x, y): print(div(x, y), mod(x, y)) for x in (-6, 6): for y in range(-7, 8): if y == 0: continue dm(x, y)
@micropython.viper def div(x: int, y: int) -> int: return x // y @micropython.viper def mod(x: int, y: int) -> int: return x % y def dm(x, y): print(div(x, y), mod(x, y)) for x in (-6, 6): for y in range(-7, 8): if y == 0: continue dm(x, y)
# -*- coding: utf-8 -*- def func(precess_data, x): precess_data = list(range(0, 100, 3)) low = 0 high = 34 guess = int((low + high) / 2) while precess_data[guess] != x: if precess_data[guess] < x: low = guess elif precess_data[guess] > x: high = guess else: break guess = (low + high) // 2 return guess print(func(list(range(0, 100, 3)), 99))
def func(precess_data, x): precess_data = list(range(0, 100, 3)) low = 0 high = 34 guess = int((low + high) / 2) while precess_data[guess] != x: if precess_data[guess] < x: low = guess elif precess_data[guess] > x: high = guess else: break guess = (low + high) // 2 return guess print(func(list(range(0, 100, 3)), 99))
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ We have two functions in this class. one to collect data from the winner and store in a text file, and one to print this data on a scoreboard. """ class Highscore(): """Highscore class.""" def show_score_board(self, filename): """Read textfile, format the data to display the scores.""" with open(filename, "r") as file: print("{:*^50}".format(" HIGHSCORE TABLE ")) data = file.readlines() all_scores = [] for line in data: name, total, streak = line.split(";") score = (name, int(total), streak.rstrip()) all_scores.append(score) all_scores.sort(key=lambda y: y[1], reverse=True) position = 1 print(" Name: Total Score: Longest Streak:") for score in all_scores: print(f"{position:>2}: {score[0]:15}" + f"{score[1]:<15} {score[2]}") position = position + 1 def collect_score(self, name, score, longeststreak, filename): """Write score from winning player to textfile.""" with open(filename, "a") as file: file.write(name + ";" + str(score) + ";" + str(longeststreak) + "\n")
""" We have two functions in this class. one to collect data from the winner and store in a text file, and one to print this data on a scoreboard. """ class Highscore: """Highscore class.""" def show_score_board(self, filename): """Read textfile, format the data to display the scores.""" with open(filename, 'r') as file: print('{:*^50}'.format(' HIGHSCORE TABLE ')) data = file.readlines() all_scores = [] for line in data: (name, total, streak) = line.split(';') score = (name, int(total), streak.rstrip()) all_scores.append(score) all_scores.sort(key=lambda y: y[1], reverse=True) position = 1 print(' Name: Total Score: Longest Streak:') for score in all_scores: print(f'{position:>2}: {score[0]:15}' + f'{score[1]:<15} {score[2]}') position = position + 1 def collect_score(self, name, score, longeststreak, filename): """Write score from winning player to textfile.""" with open(filename, 'a') as file: file.write(name + ';' + str(score) + ';' + str(longeststreak) + '\n')
''' Created on 08.06.2014 @author: ionitadaniel19 ''' map_selenium_objects={ "SUSER":"name=login", "SPWD":"name=password", "SREMEMBER":"id=remember_me", "SSUBMIT":"name=commit", "SKEYWORD":"id=q1c", "SSHOWANSER":"name=showanswer", "SANSWER":"css=#answer > p" }
""" Created on 08.06.2014 @author: ionitadaniel19 """ map_selenium_objects = {'SUSER': 'name=login', 'SPWD': 'name=password', 'SREMEMBER': 'id=remember_me', 'SSUBMIT': 'name=commit', 'SKEYWORD': 'id=q1c', 'SSHOWANSER': 'name=showanswer', 'SANSWER': 'css=#answer > p'}
linesize = int(input()) table = [[0 for x in range(4)] for y in range(linesize)] queue = [] for i in range(linesize): entry = input().split(' ') # print(entry, 'pushed') country = (int(entry[1]),int(entry[2]),int(entry[3]),str(entry[0])) queue.append(country) out = sorted(queue, key = lambda x: x[3]) out = sorted(out, key = lambda x: (x[0], x[1], x[2]), reverse=True) for elemt in out: print("{0} {1} {2} {3}".format(elemt[3],elemt[0],elemt[1],elemt[2]))
linesize = int(input()) table = [[0 for x in range(4)] for y in range(linesize)] queue = [] for i in range(linesize): entry = input().split(' ') country = (int(entry[1]), int(entry[2]), int(entry[3]), str(entry[0])) queue.append(country) out = sorted(queue, key=lambda x: x[3]) out = sorted(out, key=lambda x: (x[0], x[1], x[2]), reverse=True) for elemt in out: print('{0} {1} {2} {3}'.format(elemt[3], elemt[0], elemt[1], elemt[2]))
name = input('Enter your Name: ') sen = "Hello "+ name +" ,How r u today??" print(sen) para = ''' hey , this is a multiline comment.Lets see how it works.''' print(para)
name = input('Enter your Name: ') sen = 'Hello ' + name + ' ,How r u today??' print(sen) para = ' hey , this is a\n multiline comment.Lets see how\n it works.' print(para)
x, y = map(float, input().split()) exp = 0.0001 count = 1 while y - x > exp: x += x * 0.7 count += 1 print(count)
(x, y) = map(float, input().split()) exp = 0.0001 count = 1 while y - x > exp: x += x * 0.7 count += 1 print(count)
#!/usr/bin/python class helloworld: def __init__(self): print("Hello World!") helloworld()
class Helloworld: def __init__(self): print('Hello World!') helloworld()
def init(): return { "ingest": { "outputKafkaTopic": "telemetry.ingest", "inputPrefix": "ingest", "dependentSinkSources": [ { "type": "azure", "prefix": "raw" }, { "type": "azure", "prefix": "unique" }, { "type": "azure", "prefix": "channel" }, { "type": "azure", "prefix": "telemetry-denormalized/raw" }, { "type": "druid", "prefix": "telemetry-events" }, { "type": "druid", "prefix": "telemetry-log-events" }, { "type": "druid", "prefix": "telemetry-error-events" }, { "type": "druid", "prefix": "telemetry-feedback-events" } ] }, "raw": { "outputKafkaTopic": "telemetry.raw", "inputPrefix": "raw", "dependentSinkSources": [ { "type": "azure", "prefix": "unique" }, { "type": "azure", "prefix": "channel" }, { "type": "azure", "prefix": "telemetry-denormalized/raw" }, { "type": "druid", "prefix": "telemetry-events" }, { "type": "druid", "prefix": "telemetry-log-events" }, { "type": "druid", "prefix": "telemetry-error-events" }, { "type": "druid", "prefix": "telemetry-feedback-events" } ] }, "unique": { "outputKafkaTopic": "telemetry.unique", "inputPrefix": "unique", "dependentSinkSources": [ { "type": "azure", "prefix": "channel" }, { "type": "azure", "prefix": "telemetry-denormalized/raw" }, { "type": "druid", "prefix": "telemetry-events" }, { "type": "druid", "prefix": "telemetry-log-events" }, { "type": "druid", "prefix": "telemetry-error-events" }, { "type": "druid", "prefix": "telemetry-feedback-events" } ] }, "telemetry-denorm": { "outputKafkaTopic": "telemetry.denorm", "inputPrefix": "telemetry-denormalized/raw", "dependentSinkSources": [ { "type": "druid", "prefix": "telemetry-events" }, { "type": "druid", "prefix": "telemetry-feedback-events" } ] }, "summary-denorm": { "outputKafkaTopic": "telemetry.denorm", "inputPrefix": "telemetry-denormalized/summary", "dependentSinkSources": [ { "type": "druid", "prefix": "summary-events" } ] }, "failed": { "outputKafkaTopic": "telemetry.raw", "inputPrefix": "failed", "dependentSinkSources": [ ], "filters": [ { "key": "flags", "operator": "Is Null", "value": "" } ] }, "batch-failed": { "outputKafkaTopic": "telemetry.ingest", "inputPrefix": "extractor-failed", "dependentSinkSources": [ ], "filters": [ { "key": "flags", "operator": "Is Null", "value": "" } ] }, "wfs": { "outputKafkaTopic": "telemetry.derived", "inputPrefix": "derived/wfs", "dependentSinkSources": [ { "type": "azure", "prefix": "channel" }, { "type": "azure", "prefix": "telemetry-denormalized/summary" }, { "type": "druid", "prefix": "summary-events" } ] } }
def init(): return {'ingest': {'outputKafkaTopic': 'telemetry.ingest', 'inputPrefix': 'ingest', 'dependentSinkSources': [{'type': 'azure', 'prefix': 'raw'}, {'type': 'azure', 'prefix': 'unique'}, {'type': 'azure', 'prefix': 'channel'}, {'type': 'azure', 'prefix': 'telemetry-denormalized/raw'}, {'type': 'druid', 'prefix': 'telemetry-events'}, {'type': 'druid', 'prefix': 'telemetry-log-events'}, {'type': 'druid', 'prefix': 'telemetry-error-events'}, {'type': 'druid', 'prefix': 'telemetry-feedback-events'}]}, 'raw': {'outputKafkaTopic': 'telemetry.raw', 'inputPrefix': 'raw', 'dependentSinkSources': [{'type': 'azure', 'prefix': 'unique'}, {'type': 'azure', 'prefix': 'channel'}, {'type': 'azure', 'prefix': 'telemetry-denormalized/raw'}, {'type': 'druid', 'prefix': 'telemetry-events'}, {'type': 'druid', 'prefix': 'telemetry-log-events'}, {'type': 'druid', 'prefix': 'telemetry-error-events'}, {'type': 'druid', 'prefix': 'telemetry-feedback-events'}]}, 'unique': {'outputKafkaTopic': 'telemetry.unique', 'inputPrefix': 'unique', 'dependentSinkSources': [{'type': 'azure', 'prefix': 'channel'}, {'type': 'azure', 'prefix': 'telemetry-denormalized/raw'}, {'type': 'druid', 'prefix': 'telemetry-events'}, {'type': 'druid', 'prefix': 'telemetry-log-events'}, {'type': 'druid', 'prefix': 'telemetry-error-events'}, {'type': 'druid', 'prefix': 'telemetry-feedback-events'}]}, 'telemetry-denorm': {'outputKafkaTopic': 'telemetry.denorm', 'inputPrefix': 'telemetry-denormalized/raw', 'dependentSinkSources': [{'type': 'druid', 'prefix': 'telemetry-events'}, {'type': 'druid', 'prefix': 'telemetry-feedback-events'}]}, 'summary-denorm': {'outputKafkaTopic': 'telemetry.denorm', 'inputPrefix': 'telemetry-denormalized/summary', 'dependentSinkSources': [{'type': 'druid', 'prefix': 'summary-events'}]}, 'failed': {'outputKafkaTopic': 'telemetry.raw', 'inputPrefix': 'failed', 'dependentSinkSources': [], 'filters': [{'key': 'flags', 'operator': 'Is Null', 'value': ''}]}, 'batch-failed': {'outputKafkaTopic': 'telemetry.ingest', 'inputPrefix': 'extractor-failed', 'dependentSinkSources': [], 'filters': [{'key': 'flags', 'operator': 'Is Null', 'value': ''}]}, 'wfs': {'outputKafkaTopic': 'telemetry.derived', 'inputPrefix': 'derived/wfs', 'dependentSinkSources': [{'type': 'azure', 'prefix': 'channel'}, {'type': 'azure', 'prefix': 'telemetry-denormalized/summary'}, {'type': 'druid', 'prefix': 'summary-events'}]}}
#!/usr/bin/python # -*- coding: utf-8 -*- RECOVER_ITEM = [ ("n 't ", "n't ") ] def recover_quotewords(text): for before, after in RECOVER_ITEM: text = text.replace(before, after) return text
recover_item = [("n 't ", "n't ")] def recover_quotewords(text): for (before, after) in RECOVER_ITEM: text = text.replace(before, after) return text
names = [ 'Christal', 'Ray', 'Ron' ] print(names)
names = ['Christal', 'Ray', 'Ron'] print(names)
def solution(numBottles,numExchange): finalsum = numBottles emptyBottles = numBottles numBottles = 0 while (emptyBottles >= numExchange): numBottles = emptyBottles // numExchange emptyBottles -= emptyBottles // numExchange * numExchange finalsum += numBottles emptyBottles += numBottles print (finalsum) numBottles = int(input("numBottles = ")) numExchange = int(input("numExchange = ")) solution(numBottles,numExchange)
def solution(numBottles, numExchange): finalsum = numBottles empty_bottles = numBottles num_bottles = 0 while emptyBottles >= numExchange: num_bottles = emptyBottles // numExchange empty_bottles -= emptyBottles // numExchange * numExchange finalsum += numBottles empty_bottles += numBottles print(finalsum) num_bottles = int(input('numBottles = ')) num_exchange = int(input('numExchange = ')) solution(numBottles, numExchange)
def undistort_image(image, objectpoints, imagepoints): # Get image size img_size = (image.shape[1], image.shape[0]) # Calibrate camera based on objectpoints, imagepoints, and image size ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objectpoints, imagepoints, img_size, None, None) # Call cv2.undistort dst = cv2.undistort(image, mtx, dist, None, mtx) return dst def get_shresholded_img(image,grad_thresh,s_thresh): gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) #process the x direction gradient sobelx = cv2.Sobel(gray, cv2.CV_64F, 1, 0) # Take the derivative in x abs_sobelx = np.absolute(sobelx) # Absolute x derivative to accentuate lines away from horizontal scaled_sobel = np.uint8(255*abs_sobelx/np.max(abs_sobelx)) sxbinary = np.zeros_like(scaled_sobel) sxbinary[(scaled_sobel >= grad_thresh[0]) & (scaled_sobel <= grad_thresh[1])] = 1 #process the HIS s channel hls = cv2.cvtColor(img, cv2.COLOR_RGB2HLS) s_channel = hls[:,:,2] s_binary = np.zeros_like(s_channel) s_binary[(s_channel >= s_thresh[0]) & (s_channel <= s_thresh[1])] = 1 # color_binary = np.dstack(( np.zeros_like(sxbinary), sxbinary, s_binary)) * 255 # one can show it out to see the colored binary # Combine the two binary thresholds combined_binary = np.zeros_like(sxbinary) combined_binary[(s_binary == 1) | (sxbinary == 1)] = 1 return combined_binary def warp_image_to_birdseye_view(image,corners): img_size=(image.shape[1], image.shape[0]) #choose an offset to determine the distination for birdseye view area offset = 150 src = np.float32( [corners[0], corners[1], corners[2], corners[3]]) #decide a place to place the birdviewed image, get these points by testing an image dst = np.float32([ [offset, 0], [offset, img_size[1]], [img_size[0] - offset, img_size[1]], [img_size[0] - offset,0]]) # Get perspective transform perspectiveTransform = cv2.getPerspectiveTransform(src, dst) # Warp perspective warped = cv2.warpPerspective(image, perspectiveTransform, img_size, flags=cv2.INTER_LINEAR) # Get the destination perspective transform Minv = cv2.getPerspectiveTransform(dst, src) return warped, Minv def find_lane_lines(warped_binary_image, testing=False): if testing == True: # Create an output image to draw on and visualize the result output_image = np.dstack((warped_binary_image, warped_binary_image, warped_binary_image))*255 # Create histogram to find the lanes by identifying the peaks in the histogram histogram = np.sum(warped_binary_image[int(warped_binary_image.shape[0]/2):,:], axis=0) # Find the peak of the left and right halves of the histogram midpoint = np.int(histogram.shape[0]/2) left_x_base = np.argmax(histogram[:midpoint]) right_x_base = np.argmax(histogram[midpoint:]) + midpoint # Choose the number of sliding windows number_of_windows = 9 # Set height of windows window_height = np.int(warped_binary_image.shape[0]/number_of_windows) # Identify the x and y positions of all nonzero pixels in the image nonzero_pixels = warped_binary_image.nonzero() nonzero_y_pixels = np.array(nonzero_pixels[0]) nonzero_x_pixels = np.array(nonzero_pixels[1]) # Current positions to be updated for each window left_x_current = left_x_base right_x_current = right_x_base # Set the width of the windows +/- margin margin = 100 # Set minimum number of pixels found to recenter window minpix = 50 # Create empty lists to receive left and right lane pixel indices left_lane_inds = [] right_lane_inds = [] # Step through the windows one by one for window in range(number_of_windows): # Identify window boundaries in x and y (and right and left) win_y_low = warped_binary_image.shape[0] - (window+1)*window_height win_y_high = warped_binary_image.shape[0] - window*window_height win_x_left_low = left_x_current - margin win_x_left_high = left_x_current + margin win_x_right_low = right_x_current - margin win_x_right_high = right_x_current + margin if testing == True: # Draw the windows on the visualization image cv2.rectangle(output_image, (win_x_left_low,win_y_low), (win_x_left_high,win_y_high), (0,255,0), 2) cv2.rectangle(output_image, (win_x_right_low,win_y_low), (win_x_right_high,win_y_high), (0,255,0), 2) # Identify the nonzero pixels in x and y within the window left_inds = ((nonzero_y_pixels >= win_y_low) & (nonzero_y_pixels < win_y_high) & (nonzero_x_pixels >= win_x_left_low) & (nonzero_x_pixels < win_x_left_high)).nonzero()[0] right_inds = ((nonzero_y_pixels >= win_y_low) & (nonzero_y_pixels < win_y_high) & (nonzero_x_pixels >= win_x_right_low) & (nonzero_x_pixels < win_x_right_high)).nonzero()[0] # Append these indices to the lists left_lane_inds.append(left_inds) right_lane_inds.append(right_inds) # If you found > minpix pixels, recenter next window on their mean position if len(left_inds) > minpix: left_x_current = np.int(np.mean(nonzero_x_pixels[left_inds])) if len(right_inds) > minpix: right_x_current = np.int(np.mean(nonzero_x_pixels[right_inds])) # Concatenate the arrays of indices left_lane_inds = np.concatenate(left_lane_inds) right_lane_inds = np.concatenate(right_lane_inds) # Extract left and right line pixel positions left_x = nonzero_x_pixels[left_lane_inds] left_y = nonzero_y_pixels[left_lane_inds] right_x = nonzero_x_pixels[right_lane_inds] right_y = nonzero_y_pixels[right_lane_inds] # Fit a second order polynomial to each left_fit = np.polyfit(left_y, left_x, 2) right_fit = np.polyfit(right_y, right_x, 2) # Generate x and y values for plotting plot_y = np.linspace(0, warped_binary_image.shape[0]-1, warped_binary_image.shape[0] ) left_fit_x = left_fit[0]*plot_y**2 + left_fit[1]*plot_y + left_fit[2] right_fit_x = right_fit[0]*plot_y**2 + right_fit[1]*plot_y + right_fit[2] # Get binary warped image size image_size = warped_binary_image.shape # Get max of plot_y y_eval = np.max(plot_y) # Define conversions in x and y from pixels space to meters y_m_per_pix = 30/720 x_m_per_pix = 3.7/700 # Fit new polynomials to x,y in world space left_fit_cr = np.polyfit(left_y*y_m_per_pix, left_x*x_m_per_pix, 2) right_fit_cr = np.polyfit(right_y*y_m_per_pix, right_x*x_m_per_pix, 2) # Calculate radius of curve left_curve = ((1+(2*left_fit_cr[0]*y_eval*y_m_per_pix+left_fit_cr[1])**2)**1.5)/np.absolute(2*left_fit_cr[0]) right_curve = ((1+(2*right_fit_cr[0]*y_eval*y_m_per_pix+right_fit_cr[1])**2)**1.5)/np.absolute(2*right_fit_cr[0]) # Calculate lane deviation from center of lane scene_height = image_size[0] * y_m_per_pix scene_width = image_size[1] * x_m_per_pix # Calculate the intercept points at the bottom of our image left_intercept = left_fit_cr[0] * scene_height ** 2 + left_fit_cr[1] * scene_height + left_fit_cr[2] right_intercept = right_fit_cr[0] * scene_height ** 2 + right_fit_cr[1] * scene_height + right_fit_cr[2] center = (left_intercept + right_intercept) / 2.0 # Use intercept points to calculate the lane deviation of the vehicle lane_deviation = (center - scene_width / 2.0) if testing == True: output_image[nonzero_y_pixels[left_lane_inds], nonzero_x_pixels[left_lane_inds]] = [255, 0, 0] output_image[nonzero_y_pixels[right_lane_inds], nonzero_x_pixels[right_lane_inds]] = [0, 0, 255] return left_fit_x, right_fit_x, plot_y, left_fit, right_fit, left_curve, right_curve, lane_deviation, output_image else: return left_fit_x, right_fit_x, plot_y, left_curve, right_curve, lane_deviation def draw_lane_lines(warped_binary_image, undistorted_image, Minv): # Create a blank image to draw the lines on warp_zero = np.zeros_like(warped_binary_image).astype(np.uint8) color_warp = np.dstack((warp_zero, warp_zero, warp_zero)) left_fit_x, right_fit_x, ploty, left_radius, right_radius, lane_deviation=find_lane_lines(warped_binary_image) # Recast the x and y points into usable format for cv2.fillPoly() pts_left = np.array([np.transpose(np.vstack([left_fit_x, ploty]))]) pts_right = np.array([np.flipud(np.transpose(np.vstack([right_fit_x, ploty])))]) pts = np.hstack((pts_left, pts_right)) # Draw the lane onto the warped blank image with green color cv2.fillPoly(color_warp, np.int_([pts]), (0, 255, 0)) # Warp the blank back to original image space using inverse perspective matrix (Minv) unwarp = cv2.warpPerspective(color_warp, Minv, (undistorted_image.shape[1], undistorted_image.shape[0])) # Combine the result with the original image result = cv2.addWeighted(undistorted_image, 1, unwarp, 0.3, 0) # Write text on image curvature_text = "Curvature: Left = " + str(np.round(left_radius, 2)) + ", Right = " + str(np.round(right_radius, 2)) font = cv2.FONT_HERSHEY_TRIPLEX cv2.putText(result, curvature_text, (30, 60), font, 1, (0,255,0), 2) deviation_text = "Lane deviation from center = {:.2f} m".format(lane_deviation) font = cv2.FONT_HERSHEY_TRIPLEX cv2.putText(result, deviation_text, (30, 90), font, 1, (0,255,0), 2) return result #the pipeline function def process_image(image): undistorted = undistort_image(image, objpoints, imgpoints) combined_binary = get_shresholded_img(undistorted,grad_thresh,s_thresh) binary_warped, Minv = warp_image_to_birdseye_view(combined_binary,corners) lane_lines_img = draw_lane_lines(binary_warped, undistorted, Minv) return lane_lines_img
def undistort_image(image, objectpoints, imagepoints): img_size = (image.shape[1], image.shape[0]) (ret, mtx, dist, rvecs, tvecs) = cv2.calibrateCamera(objectpoints, imagepoints, img_size, None, None) dst = cv2.undistort(image, mtx, dist, None, mtx) return dst def get_shresholded_img(image, grad_thresh, s_thresh): gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) sobelx = cv2.Sobel(gray, cv2.CV_64F, 1, 0) abs_sobelx = np.absolute(sobelx) scaled_sobel = np.uint8(255 * abs_sobelx / np.max(abs_sobelx)) sxbinary = np.zeros_like(scaled_sobel) sxbinary[(scaled_sobel >= grad_thresh[0]) & (scaled_sobel <= grad_thresh[1])] = 1 hls = cv2.cvtColor(img, cv2.COLOR_RGB2HLS) s_channel = hls[:, :, 2] s_binary = np.zeros_like(s_channel) s_binary[(s_channel >= s_thresh[0]) & (s_channel <= s_thresh[1])] = 1 combined_binary = np.zeros_like(sxbinary) combined_binary[(s_binary == 1) | (sxbinary == 1)] = 1 return combined_binary def warp_image_to_birdseye_view(image, corners): img_size = (image.shape[1], image.shape[0]) offset = 150 src = np.float32([corners[0], corners[1], corners[2], corners[3]]) dst = np.float32([[offset, 0], [offset, img_size[1]], [img_size[0] - offset, img_size[1]], [img_size[0] - offset, 0]]) perspective_transform = cv2.getPerspectiveTransform(src, dst) warped = cv2.warpPerspective(image, perspectiveTransform, img_size, flags=cv2.INTER_LINEAR) minv = cv2.getPerspectiveTransform(dst, src) return (warped, Minv) def find_lane_lines(warped_binary_image, testing=False): if testing == True: output_image = np.dstack((warped_binary_image, warped_binary_image, warped_binary_image)) * 255 histogram = np.sum(warped_binary_image[int(warped_binary_image.shape[0] / 2):, :], axis=0) midpoint = np.int(histogram.shape[0] / 2) left_x_base = np.argmax(histogram[:midpoint]) right_x_base = np.argmax(histogram[midpoint:]) + midpoint number_of_windows = 9 window_height = np.int(warped_binary_image.shape[0] / number_of_windows) nonzero_pixels = warped_binary_image.nonzero() nonzero_y_pixels = np.array(nonzero_pixels[0]) nonzero_x_pixels = np.array(nonzero_pixels[1]) left_x_current = left_x_base right_x_current = right_x_base margin = 100 minpix = 50 left_lane_inds = [] right_lane_inds = [] for window in range(number_of_windows): win_y_low = warped_binary_image.shape[0] - (window + 1) * window_height win_y_high = warped_binary_image.shape[0] - window * window_height win_x_left_low = left_x_current - margin win_x_left_high = left_x_current + margin win_x_right_low = right_x_current - margin win_x_right_high = right_x_current + margin if testing == True: cv2.rectangle(output_image, (win_x_left_low, win_y_low), (win_x_left_high, win_y_high), (0, 255, 0), 2) cv2.rectangle(output_image, (win_x_right_low, win_y_low), (win_x_right_high, win_y_high), (0, 255, 0), 2) left_inds = ((nonzero_y_pixels >= win_y_low) & (nonzero_y_pixels < win_y_high) & (nonzero_x_pixels >= win_x_left_low) & (nonzero_x_pixels < win_x_left_high)).nonzero()[0] right_inds = ((nonzero_y_pixels >= win_y_low) & (nonzero_y_pixels < win_y_high) & (nonzero_x_pixels >= win_x_right_low) & (nonzero_x_pixels < win_x_right_high)).nonzero()[0] left_lane_inds.append(left_inds) right_lane_inds.append(right_inds) if len(left_inds) > minpix: left_x_current = np.int(np.mean(nonzero_x_pixels[left_inds])) if len(right_inds) > minpix: right_x_current = np.int(np.mean(nonzero_x_pixels[right_inds])) left_lane_inds = np.concatenate(left_lane_inds) right_lane_inds = np.concatenate(right_lane_inds) left_x = nonzero_x_pixels[left_lane_inds] left_y = nonzero_y_pixels[left_lane_inds] right_x = nonzero_x_pixels[right_lane_inds] right_y = nonzero_y_pixels[right_lane_inds] left_fit = np.polyfit(left_y, left_x, 2) right_fit = np.polyfit(right_y, right_x, 2) plot_y = np.linspace(0, warped_binary_image.shape[0] - 1, warped_binary_image.shape[0]) left_fit_x = left_fit[0] * plot_y ** 2 + left_fit[1] * plot_y + left_fit[2] right_fit_x = right_fit[0] * plot_y ** 2 + right_fit[1] * plot_y + right_fit[2] image_size = warped_binary_image.shape y_eval = np.max(plot_y) y_m_per_pix = 30 / 720 x_m_per_pix = 3.7 / 700 left_fit_cr = np.polyfit(left_y * y_m_per_pix, left_x * x_m_per_pix, 2) right_fit_cr = np.polyfit(right_y * y_m_per_pix, right_x * x_m_per_pix, 2) left_curve = (1 + (2 * left_fit_cr[0] * y_eval * y_m_per_pix + left_fit_cr[1]) ** 2) ** 1.5 / np.absolute(2 * left_fit_cr[0]) right_curve = (1 + (2 * right_fit_cr[0] * y_eval * y_m_per_pix + right_fit_cr[1]) ** 2) ** 1.5 / np.absolute(2 * right_fit_cr[0]) scene_height = image_size[0] * y_m_per_pix scene_width = image_size[1] * x_m_per_pix left_intercept = left_fit_cr[0] * scene_height ** 2 + left_fit_cr[1] * scene_height + left_fit_cr[2] right_intercept = right_fit_cr[0] * scene_height ** 2 + right_fit_cr[1] * scene_height + right_fit_cr[2] center = (left_intercept + right_intercept) / 2.0 lane_deviation = center - scene_width / 2.0 if testing == True: output_image[nonzero_y_pixels[left_lane_inds], nonzero_x_pixels[left_lane_inds]] = [255, 0, 0] output_image[nonzero_y_pixels[right_lane_inds], nonzero_x_pixels[right_lane_inds]] = [0, 0, 255] return (left_fit_x, right_fit_x, plot_y, left_fit, right_fit, left_curve, right_curve, lane_deviation, output_image) else: return (left_fit_x, right_fit_x, plot_y, left_curve, right_curve, lane_deviation) def draw_lane_lines(warped_binary_image, undistorted_image, Minv): warp_zero = np.zeros_like(warped_binary_image).astype(np.uint8) color_warp = np.dstack((warp_zero, warp_zero, warp_zero)) (left_fit_x, right_fit_x, ploty, left_radius, right_radius, lane_deviation) = find_lane_lines(warped_binary_image) pts_left = np.array([np.transpose(np.vstack([left_fit_x, ploty]))]) pts_right = np.array([np.flipud(np.transpose(np.vstack([right_fit_x, ploty])))]) pts = np.hstack((pts_left, pts_right)) cv2.fillPoly(color_warp, np.int_([pts]), (0, 255, 0)) unwarp = cv2.warpPerspective(color_warp, Minv, (undistorted_image.shape[1], undistorted_image.shape[0])) result = cv2.addWeighted(undistorted_image, 1, unwarp, 0.3, 0) curvature_text = 'Curvature: Left = ' + str(np.round(left_radius, 2)) + ', Right = ' + str(np.round(right_radius, 2)) font = cv2.FONT_HERSHEY_TRIPLEX cv2.putText(result, curvature_text, (30, 60), font, 1, (0, 255, 0), 2) deviation_text = 'Lane deviation from center = {:.2f} m'.format(lane_deviation) font = cv2.FONT_HERSHEY_TRIPLEX cv2.putText(result, deviation_text, (30, 90), font, 1, (0, 255, 0), 2) return result def process_image(image): undistorted = undistort_image(image, objpoints, imgpoints) combined_binary = get_shresholded_img(undistorted, grad_thresh, s_thresh) (binary_warped, minv) = warp_image_to_birdseye_view(combined_binary, corners) lane_lines_img = draw_lane_lines(binary_warped, undistorted, Minv) return lane_lines_img
""" The :mod:`fatf.utils.data` module holds data tools and data sets. """ # Author: Kacper Sokol <k.sokol@bristol.ac.uk> # License: new BSD
""" The :mod:`fatf.utils.data` module holds data tools and data sets. """
largest=None smallest=None while True: number=input("Enter a number:") if number == "done": break try: number=int(number) if largest == None: largest = number elif largest < number: largest = number if smallest==None: smallest=number elif smallest>number: smallest=number except ValueError: print("Invalid input") print ("Maximum is", largest) print ("Minimum is", smallest)
largest = None smallest = None while True: number = input('Enter a number:') if number == 'done': break try: number = int(number) if largest == None: largest = number elif largest < number: largest = number if smallest == None: smallest = number elif smallest > number: smallest = number except ValueError: print('Invalid input') print('Maximum is', largest) print('Minimum is', smallest)
class UsdValue(float): def __init__(self, v) -> None: super().__init__() class UsdPrice(float): def __init__(self, v) -> None: super().__init__()
class Usdvalue(float): def __init__(self, v) -> None: super().__init__() class Usdprice(float): def __init__(self, v) -> None: super().__init__()
def filter(fname,data): list=[] for i in range(len(data)): f=fname(data[i]) if f==True: list.append(data[i]) return list def map(fname,newdata): list=[] for i in range(len(newdata)): f=fname(newdata[i]) list.append(f) return list def reduce(fname,incrementdata): list=[] for i in range(len(incrementdata)): if (len(incrementdata))>=2: f=fname(incrementdata[0],incrementdata[1]) del incrementdata[0] del incrementdata[0] incrementdata.append(f) return incrementdata[0]
def filter(fname, data): list = [] for i in range(len(data)): f = fname(data[i]) if f == True: list.append(data[i]) return list def map(fname, newdata): list = [] for i in range(len(newdata)): f = fname(newdata[i]) list.append(f) return list def reduce(fname, incrementdata): list = [] for i in range(len(incrementdata)): if len(incrementdata) >= 2: f = fname(incrementdata[0], incrementdata[1]) del incrementdata[0] del incrementdata[0] incrementdata.append(f) return incrementdata[0]
# numbers = [str(x) for x in range(32)] letters = [chr(x) for x in range(97, 123)] crate = ''' sandbox crate map {boot: @init} /*initialize utility vars and register vars*/ service init { writer = 0 alpha = 0 beta = 0 status = 0''' for letter in letters: crate += '\n ' + letter + ' = 0' crate += ''' } /*map operator service to exec jump table*/ map { copy: @copy add: @add sub: @sub not: @not or: @or and: @and eq: @eq ne: @ne gt: @gt lt: @lt gte: @gte lte: @lte unary: @status_alpha } service copy { @status_zero alpha = beta @writer} service add { @status_zero alpha = alpha + beta @writer} service sub { @status_zero alpha = alpha - beta @writer} service not { @status_zero alpha = !beta @writer} service or { @status_zero alpha = alpha | beta @writer} service and { @status_zero alpha = alpha & beta @writer} service eq { @status_zero if (alpha == beta) {[true]} else {[false]}} service ne { @status_zero if (alpha != beta) {[true]} else {[false]}} service gt { @status_zero if (alpha > beta) {[true]} else {[false]}} service lt { @status_zero if (alpha < beta) {[true]} else {[false]}} service gte { @status_zero if (alpha >= beta) {[true]} else {[false]}} service lte { @status_zero if (alpha <= beta) {[true]} else {[false]}} service status_zero { status = 0 } service status_alpha { status = 1 } service status_beta { status = 2 } service writer { jump (writer) {''' for letter in letters: crate += '{ ' + letter + ' = alpha } ' crate += '''} } map {jump: @jump} service jump { jump (z) {''' for number in numbers: crate += '{ [ jump' + number + '] } ' crate += '''} } map {printme : @printme} service printme { [''' for number in numbers: crate += '''alias jump''' + number + ''' echo ''' + number + ''';''' crate += '''jump] }''' for letter in letters: crate += ''' map {''' + letter + ' : @' + letter + '''} service ''' + letter + ''' { jump (status) { { alpha = ''' + letter + ''' @status_alpha} { beta = ''' + letter + ''' @status_beta} { writer = ''' + str(ord(letter) - 97) + ''' } } }''' for number in numbers: crate += ''' map {delete''' + number + ' : @delete' + number + '''} service delete''' + number + ''' { jump (status) { { alpha = ''' + number + ''' @status_alpha} { beta = ''' + number + ''' @status_beta} { } } }''' print(crate)
numbers = [str(x) for x in range(32)] letters = [chr(x) for x in range(97, 123)] crate = '\nsandbox crate\n\nmap {boot: @init}\n/*initialize utility vars and register vars*/\nservice init {\n writer = 0\n alpha = 0\n beta = 0\n status = 0' for letter in letters: crate += '\n ' + letter + ' = 0' crate += '\n}\n\n/*map operator service to exec jump table*/\n\nmap {\n copy: @copy\n add: @add\n sub: @sub\n not: @not\n or: @or\n and: @and\n eq: @eq\n ne: @ne\n gt: @gt\n lt: @lt\n gte: @gte\n lte: @lte\n unary: @status_alpha\n}\n\nservice copy { @status_zero alpha = beta @writer}\n\nservice add { @status_zero alpha = alpha + beta @writer}\n\nservice sub { @status_zero alpha = alpha - beta @writer}\n\nservice not { @status_zero alpha = !beta @writer}\n\nservice or { @status_zero alpha = alpha | beta @writer}\n\nservice and { @status_zero alpha = alpha & beta @writer}\n\nservice eq { @status_zero if (alpha == beta) {[true]} else {[false]}}\n\nservice ne { @status_zero if (alpha != beta) {[true]} else {[false]}}\n\nservice gt { @status_zero if (alpha > beta) {[true]} else {[false]}}\n\nservice lt { @status_zero if (alpha < beta) {[true]} else {[false]}}\n\nservice gte { @status_zero if (alpha >= beta) {[true]} else {[false]}}\n\nservice lte { @status_zero if (alpha <= beta) {[true]} else {[false]}}\n\nservice status_zero {\n status = 0\n}\n\nservice status_alpha {\n status = 1\n}\n\nservice status_beta {\n status = 2\n}\n\nservice writer {\n jump (writer) {' for letter in letters: crate += '{ ' + letter + ' = alpha } ' crate += '}\n}\n\nmap {jump: @jump}\nservice jump {\n jump (z) {' for number in numbers: crate += '{ [ jump' + number + '] } ' crate += '}\n}\n\n\nmap {printme : @printme}\nservice printme { [' for number in numbers: crate += 'alias jump' + number + ' echo ' + number + ';' crate += 'jump]\n}' for letter in letters: crate += '\nmap {' + letter + ' : @' + letter + '}\nservice ' + letter + ' { jump (status) {\n { alpha = ' + letter + ' @status_alpha}\n { beta = ' + letter + ' @status_beta}\n { writer = ' + str(ord(letter) - 97) + ' }\n } \n}' for number in numbers: crate += '\nmap {delete' + number + ' : @delete' + number + '}\nservice delete' + number + ' { jump (status) {\n { alpha = ' + number + ' @status_alpha}\n { beta = ' + number + ' @status_beta}\n { }\n }\n}' print(crate)
# coding: utf-8 # http://www.crummy.com/software/BeautifulSoup/bs4/doc/#installing-a-parser DEFAULT_PARSER = 'lxml' ALLOWED_CONTENT_TYPES = [ 'text/html', 'image/', ] FINDER_PIPELINE = ( 'haul.finders.pipeline.html.img_src_finder', 'haul.finders.pipeline.html.a_href_finder', 'haul.finders.pipeline.css.background_image_finder', ) EXTENDER_PIPELINE = ( 'haul.extenders.pipeline.google.blogspot_s1600_extender', 'haul.extenders.pipeline.google.ggpht_s1600_extender', 'haul.extenders.pipeline.google.googleusercontent_s1600_extender', 'haul.extenders.pipeline.pinterest.original_image_extender', 'haul.extenders.pipeline.wordpress.original_image_extender', 'haul.extenders.pipeline.tumblr.media_1280_extender', 'haul.extenders.pipeline.tumblr.avatar_128_extender', ) SHOULD_JOIN_URL = True
default_parser = 'lxml' allowed_content_types = ['text/html', 'image/'] finder_pipeline = ('haul.finders.pipeline.html.img_src_finder', 'haul.finders.pipeline.html.a_href_finder', 'haul.finders.pipeline.css.background_image_finder') extender_pipeline = ('haul.extenders.pipeline.google.blogspot_s1600_extender', 'haul.extenders.pipeline.google.ggpht_s1600_extender', 'haul.extenders.pipeline.google.googleusercontent_s1600_extender', 'haul.extenders.pipeline.pinterest.original_image_extender', 'haul.extenders.pipeline.wordpress.original_image_extender', 'haul.extenders.pipeline.tumblr.media_1280_extender', 'haul.extenders.pipeline.tumblr.avatar_128_extender') should_join_url = True
# pythran export _brief_loop(float64[:,:], uint8[:,:], # intp[:,:], int[:,:], int[:,:]) def _brief_loop(image, descriptors, keypoints, pos0, pos1): for k in range(len(keypoints)): kr, kc = keypoints[k] for p in range(len(pos0)): pr0, pc0 = pos0[p] pr1, pc1 = pos1[p] descriptors[k, p] = (image[kr + pr0, kc + pc0] < image[kr + pr1, kc + pc1])
def _brief_loop(image, descriptors, keypoints, pos0, pos1): for k in range(len(keypoints)): (kr, kc) = keypoints[k] for p in range(len(pos0)): (pr0, pc0) = pos0[p] (pr1, pc1) = pos1[p] descriptors[k, p] = image[kr + pr0, kc + pc0] < image[kr + pr1, kc + pc1]
factors = { 1:{ 1:"I",5:"I",9:"I",13:"I",17:"I",21:"I",25:"I",29:"I",33:"I",37:"I",41:"I",45:"I",49:"I",53:"I",57:"I" , 2:"S", 6:"S", 10:"S", 14:"S", 18:"S", 22:"S", 26:"S",30:"S" ,34:"S",38:"S",42:"S",46:"S",50:"S",54:"S",58:"S" , 3:"T", 7:"T" , 11:"T", 15:"T", 19:"T",23:"T" ,27:"T", 31:"T" ,35:"T" ,39:"T",43:"T",47:"T",51:"T" ,55:"T",59:"T" , 4:"P", 8:"P", 12:"P", 16:"P", 20:"P", 24:"P", 28:"P", 32:"P", 36:"P", 40:"P", 44:"P", 48:"P", 52:"P", 56:"P", 60:"P" } , 2 :{ 1:"E",5:"E",9:"E",13:"E",17:"E",21:"E",25:"E",29:"E",33:"E",37:"E",41:"E",45:"E",49:"E",53:"E",57:"E" , 2:"N", 6:"N", 10:"N", 14:"N", 18:"N", 22:"N", 26:"N",30:"N" ,34:"N",38:"N",42:"N",46:"N",50:"N",54:"N",58:"N" , 3:"F", 7:"F" , 11:"F", 15:"F", 19:"F",23:"F" ,27:"F", 31:"F" ,35:"F" ,39:"F",43:"F",47:"F",51:"F" ,55:"F",59:"F" , 4:"J", 8:"J", 12:"J", 16:"J", 20:"J", 24:"J", 28:"J", 32:"J", 36:"J", 40:"J", 44:"J", 48:"J", 52:"J", 56:"J", 60:"J" } } factors_names = ('E', 'I', 'S', 'N', 'F', 'T', 'P', 'J', 'report') factors_group = (('E', 'I'), ('S', 'N'), ('F', 'T'), ('P', 'J'))
factors = {1: {1: 'I', 5: 'I', 9: 'I', 13: 'I', 17: 'I', 21: 'I', 25: 'I', 29: 'I', 33: 'I', 37: 'I', 41: 'I', 45: 'I', 49: 'I', 53: 'I', 57: 'I', 2: 'S', 6: 'S', 10: 'S', 14: 'S', 18: 'S', 22: 'S', 26: 'S', 30: 'S', 34: 'S', 38: 'S', 42: 'S', 46: 'S', 50: 'S', 54: 'S', 58: 'S', 3: 'T', 7: 'T', 11: 'T', 15: 'T', 19: 'T', 23: 'T', 27: 'T', 31: 'T', 35: 'T', 39: 'T', 43: 'T', 47: 'T', 51: 'T', 55: 'T', 59: 'T', 4: 'P', 8: 'P', 12: 'P', 16: 'P', 20: 'P', 24: 'P', 28: 'P', 32: 'P', 36: 'P', 40: 'P', 44: 'P', 48: 'P', 52: 'P', 56: 'P', 60: 'P'}, 2: {1: 'E', 5: 'E', 9: 'E', 13: 'E', 17: 'E', 21: 'E', 25: 'E', 29: 'E', 33: 'E', 37: 'E', 41: 'E', 45: 'E', 49: 'E', 53: 'E', 57: 'E', 2: 'N', 6: 'N', 10: 'N', 14: 'N', 18: 'N', 22: 'N', 26: 'N', 30: 'N', 34: 'N', 38: 'N', 42: 'N', 46: 'N', 50: 'N', 54: 'N', 58: 'N', 3: 'F', 7: 'F', 11: 'F', 15: 'F', 19: 'F', 23: 'F', 27: 'F', 31: 'F', 35: 'F', 39: 'F', 43: 'F', 47: 'F', 51: 'F', 55: 'F', 59: 'F', 4: 'J', 8: 'J', 12: 'J', 16: 'J', 20: 'J', 24: 'J', 28: 'J', 32: 'J', 36: 'J', 40: 'J', 44: 'J', 48: 'J', 52: 'J', 56: 'J', 60: 'J'}} factors_names = ('E', 'I', 'S', 'N', 'F', 'T', 'P', 'J', 'report') factors_group = (('E', 'I'), ('S', 'N'), ('F', 'T'), ('P', 'J'))
wkidInfo = { '4326':{'type':'gcs', 'path':'World/WGS 1984.prj'}, '102100':{'type':'pcs', 'path':r'World/WGS 1984 Web Mercator (auxiliary sphere).prj'}, '3857' : {'type':'pcs', 'path':r'World/WGS 1984 Web Mercator (auxiliary sphere).prj'} }
wkid_info = {'4326': {'type': 'gcs', 'path': 'World/WGS 1984.prj'}, '102100': {'type': 'pcs', 'path': 'World/WGS 1984 Web Mercator (auxiliary sphere).prj'}, '3857': {'type': 'pcs', 'path': 'World/WGS 1984 Web Mercator (auxiliary sphere).prj'}}
#import ctypes #import GdaImport #import matplotlib.pyplot as plt # getting example # gjden def GDA_MAIN(gda_obj): per='the apk permission:\n' # per+=gda_obj.GetAppString() # per+=gda_obj.GetCert() # per+=gda_obj.GetUrlString() # per+=gda_obj.GetPermission() gda_obj.log(per) tofile = open('out.txt','w') tofile.write(per) tofile.close() return 0
def gda_main(gda_obj): per = 'the apk permission:\n' per += gda_obj.GetPermission() gda_obj.log(per) tofile = open('out.txt', 'w') tofile.write(per) tofile.close() return 0
# Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. { 'targets': [ { 'target_name': 'control_bar', 'dependencies': [ '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:cr', '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior', 'profile_browser_proxy', ], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'], }, { 'target_name': 'create_profile', 'dependencies': [ '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior', '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:util', '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:web_ui_listener_behavior', 'profile_browser_proxy', ], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'], }, { 'target_name': 'error_dialog', 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'], }, { 'target_name': 'import_supervised_user', 'dependencies': [ '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior', 'profile_browser_proxy', ], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'], }, { 'target_name': 'profile_browser_proxy', 'dependencies': [ '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:assert', '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:cr', ], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'], }, { 'target_name': 'supervised_user_create_confirm', 'dependencies': [ '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior', '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:util', 'profile_browser_proxy', ], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'], }, { 'target_name': 'supervised_user_learn_more', 'dependencies': [ 'profile_browser_proxy', ], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'], }, { 'target_name': 'user_manager_pages', 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'], }, { 'target_name': 'user_manager_tutorial', 'dependencies': [ '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior', '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:util', ], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'], }, ], }
{'targets': [{'target_name': 'control_bar', 'dependencies': ['<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:cr', '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior', 'profile_browser_proxy'], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi']}, {'target_name': 'create_profile', 'dependencies': ['<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior', '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:util', '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:web_ui_listener_behavior', 'profile_browser_proxy'], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi']}, {'target_name': 'error_dialog', 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi']}, {'target_name': 'import_supervised_user', 'dependencies': ['<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior', 'profile_browser_proxy'], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi']}, {'target_name': 'profile_browser_proxy', 'dependencies': ['<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:assert', '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:cr'], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi']}, {'target_name': 'supervised_user_create_confirm', 'dependencies': ['<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior', '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:util', 'profile_browser_proxy'], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi']}, {'target_name': 'supervised_user_learn_more', 'dependencies': ['profile_browser_proxy'], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi']}, {'target_name': 'user_manager_pages', 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi']}, {'target_name': 'user_manager_tutorial', 'dependencies': ['<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior', '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:util'], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi']}]}
# Part 1 of the Python Review lab. def hello_world(): print("hello world") pass def greet_by_name(name): print("please enter your name") name = input print pass def encode(x): pass def decode(coded_message): pass
def hello_world(): print('hello world') pass def greet_by_name(name): print('please enter your name') name = input print pass def encode(x): pass def decode(coded_message): pass
#!/usr/bin/env python # -*- coding: utf-8 -*- # vim: ai ts=4 sts=4 et sw=4 """ django-fhir FILE: __init__.py Created: 1/6/16 5:07 PM """ __author__ = 'Mark Scrimshire:@ekivemark' # Hello World is here to test the loading of the module from fhir.settings # from .settings import * #from fhir_io_hapi.views.get import hello_world #from fhir_io_hapi.views.delete import delete #from fhir_io_hapi.views.get import (read, vread, history) #from fhir_io_hapi.views.search import find # Used to load post_save signal for write to backend fhir server default_app_config = 'fhir_io_hapi.apps.fhir_io_hapi_config'
""" django-fhir FILE: __init__.py Created: 1/6/16 5:07 PM """ __author__ = 'Mark Scrimshire:@ekivemark' default_app_config = 'fhir_io_hapi.apps.fhir_io_hapi_config'
pizzas = ["triple carne", "extra queso", "suprema"] friend_pizzas = ["triple carne", "extra queso", "suprema"] pizzas.append("baggel") friend_pizzas.append("hawaiana") print("Mis pizzas favoritas son:") for i in range(0,len(pizzas)): print(pizzas[i]) print() print("Las pizzas favoritas de mi amigo son:") for i in range(0,len(friend_pizzas)): print(friend_pizzas[i])
pizzas = ['triple carne', 'extra queso', 'suprema'] friend_pizzas = ['triple carne', 'extra queso', 'suprema'] pizzas.append('baggel') friend_pizzas.append('hawaiana') print('Mis pizzas favoritas son:') for i in range(0, len(pizzas)): print(pizzas[i]) print() print('Las pizzas favoritas de mi amigo son:') for i in range(0, len(friend_pizzas)): print(friend_pizzas[i])
# -*- coding: utf-8 -*- GITHUB_STRING = 'https://github.com/earaujoassis/watchman/archive/v{0}.zip' NAME = "agents" VERSION = "0.2.4"
github_string = 'https://github.com/earaujoassis/watchman/archive/v{0}.zip' name = 'agents' version = '0.2.4'
def first(arr, low , high): if high >= low: mid = low + (high - low)//2 if (mid ==0 or arr[mid-1] == 0) and arr[mid] == 1: return mid elif arr[mid] == 0: return first(arr, mid+1, high) else: return first(arr, low, mid-1) return -1 def row_with_max_ones(mat): r = len(mat) c = len(mat[0]) max_row_index = 0 max_ = -1 for i in range(r): index = first(mat[i], 0, c-1) if index != -1 and c - index > max_: max_ = c - index max_row_index = i return max_row_index
def first(arr, low, high): if high >= low: mid = low + (high - low) // 2 if (mid == 0 or arr[mid - 1] == 0) and arr[mid] == 1: return mid elif arr[mid] == 0: return first(arr, mid + 1, high) else: return first(arr, low, mid - 1) return -1 def row_with_max_ones(mat): r = len(mat) c = len(mat[0]) max_row_index = 0 max_ = -1 for i in range(r): index = first(mat[i], 0, c - 1) if index != -1 and c - index > max_: max_ = c - index max_row_index = i return max_row_index
class Solution: def plusOne(self, digits): """ :type digits: List[int] :rtype: List[int] """ if not digits: return [1] carry = (digits[-1] + 1) // 10 digits[-1] = (digits[-1] + 1) % 10 for i in reversed(range(len(digits) - 1)): number = digits[i] digits[i] = (number + carry) % 10 carry = (number + carry) // 10 if carry > 0: return [carry] + digits else: return digits print(Solution().plusOne([]))
class Solution: def plus_one(self, digits): """ :type digits: List[int] :rtype: List[int] """ if not digits: return [1] carry = (digits[-1] + 1) // 10 digits[-1] = (digits[-1] + 1) % 10 for i in reversed(range(len(digits) - 1)): number = digits[i] digits[i] = (number + carry) % 10 carry = (number + carry) // 10 if carry > 0: return [carry] + digits else: return digits print(solution().plusOne([]))
def infer_mask_from_batch_data(batch_data): """ Create binary mask for all non-empty timesteps :param batch_data: BatchSize x SequenceLen x Features :return: BatchSize x SequenceLen """ return batch_data.abs().sum(-1) > 0 def infer_lengths_from_mask(mask): """ Get array of lengths from binary mask :param mask: BatchSize x SequenceLen :return: BatchSize """ return mask.long().sum(1)
def infer_mask_from_batch_data(batch_data): """ Create binary mask for all non-empty timesteps :param batch_data: BatchSize x SequenceLen x Features :return: BatchSize x SequenceLen """ return batch_data.abs().sum(-1) > 0 def infer_lengths_from_mask(mask): """ Get array of lengths from binary mask :param mask: BatchSize x SequenceLen :return: BatchSize """ return mask.long().sum(1)
def get_path_components(path): path = path.strip("/").split("/") path = [c for c in path if c] normalized = [] for comp in path: if comp == ".": continue elif comp == "..": if normalized: normalized.pop() else: raise ValueError("URL tried to traverse above root") else: normalized.append(comp) return normalized
def get_path_components(path): path = path.strip('/').split('/') path = [c for c in path if c] normalized = [] for comp in path: if comp == '.': continue elif comp == '..': if normalized: normalized.pop() else: raise value_error('URL tried to traverse above root') else: normalized.append(comp) return normalized
#Belajar String Method #https://docs.python.org/3/library/stdtypes.html#string-methods nama = "muhammad aris septanugroho" print(nama) print(nama.upper()) #Huruf besar semua print(nama.capitalize()) #Huruf besar kata pertama print(nama.title()) #Huruf besar tiap kata print(nama.split(" ")) #Memisah data menjadi list dengan ketentuan "spasi"
nama = 'muhammad aris septanugroho' print(nama) print(nama.upper()) print(nama.capitalize()) print(nama.title()) print(nama.split(' '))
# Code adapted from Corey Shafer """Note: generators are more performat because they don't hold all the values at the same time! Way better in memory, altho execution will be a bit slower""" def square_numbers(nums): for i in nums: # yield makes this a generator # Returns one result at a time yield(i * i) my_nums = square_numbers([1, 2, 3, 4, 5]) # Alternative list comprehension my_nums = [x*x for x in [1, 2, 3, 4, 5]] # and my_nums = (x*x for x in [1, 2, 3, 4, 5]) with circular brackets we are using a generator for num in my_nums: print(next(my_nums))
"""Note: generators are more performat because they don't hold all the values at the same time! Way better in memory, altho execution will be a bit slower""" def square_numbers(nums): for i in nums: yield (i * i) my_nums = square_numbers([1, 2, 3, 4, 5]) for num in my_nums: print(next(my_nums))
class Solution: # def maxProduct(self, nums): # """ # :type nums: List[int] # :rtype: int # """ # for i in range(1, len(nums)): # nums[i] = max(nums[i], nums[i] * nums[i - 1]) # return max(nums) def maxProduct3(self, nums): if not nums: return 0 # locMin = nums[0] # locMax = nums[0] locMinPrev = nums[0] locMaxPrev = nums[0] gloMax = nums[0] for i in range(1, len(nums)): locMin = min(locMinPrev * nums[i], locMaxPrev * nums[i], nums[i]) locMax = max(locMaxPrev * nums[i], locMinPrev * nums[i], nums[i]) locMinPrev = locMin locMaxPrev = locMax gloMax = max(locMax, gloMax) return gloMax def maxProduct2(self, nums): ans = nums[0] # locMin, locMax stores the max/min product of subarray that ends with the current number locMin = locMax = ans for i in range(1, len(nums)): # multiplying with a negative number makes a negative number positive, a positive number negative if nums[i] < 0: locMax, locMin = locMin, locMax locMin = min(nums[i], locMin * nums[i]) locMax = max(nums[i], locMax * nums[i]) ans = max(ans, locMax) return ans def maxProduct(self, nums): ans = nums[0] # locMin, locMax stores the max/min product of subarray that ends with the current number locMin = locMax = ans for i in range(1, len(nums)): candidates = (nums[i], locMax * nums[i], locMin * nums[i]) locMin = min(candidates) locMax = max(candidates) # warning: cannot do the following # locMin is updated before calculating locMax # locMin = min(nums[i], locMax * nums[i], locMin * nums[i]) # locMax = max(nums[i], locMax * nums[i], locMin * nums[i]) ans = max(ans, locMax) return ans solver = Solution() ans = solver.maxProduct3([-4,-3,-2]) print(ans)
class Solution: def max_product3(self, nums): if not nums: return 0 loc_min_prev = nums[0] loc_max_prev = nums[0] glo_max = nums[0] for i in range(1, len(nums)): loc_min = min(locMinPrev * nums[i], locMaxPrev * nums[i], nums[i]) loc_max = max(locMaxPrev * nums[i], locMinPrev * nums[i], nums[i]) loc_min_prev = locMin loc_max_prev = locMax glo_max = max(locMax, gloMax) return gloMax def max_product2(self, nums): ans = nums[0] loc_min = loc_max = ans for i in range(1, len(nums)): if nums[i] < 0: (loc_max, loc_min) = (locMin, locMax) loc_min = min(nums[i], locMin * nums[i]) loc_max = max(nums[i], locMax * nums[i]) ans = max(ans, locMax) return ans def max_product(self, nums): ans = nums[0] loc_min = loc_max = ans for i in range(1, len(nums)): candidates = (nums[i], locMax * nums[i], locMin * nums[i]) loc_min = min(candidates) loc_max = max(candidates) ans = max(ans, locMax) return ans solver = solution() ans = solver.maxProduct3([-4, -3, -2]) print(ans)
def product_left_recursive(alist, result=None): if alist == []: return result g = result[-1] * alist[0] result.append(g) return product_left_recursive(alist[1:], result) def product_left(alist): new_list = [1] for index in range(1, len(alist)): value = new_list[-1] * alist[index-1] new_list.append(value) return new_list def product_right(alist): new_list = [1] for index in range(len(alist)-2, -1, -1): value = new_list[-1] * alist[index-1] new_list.append(value) return new_list def product_of_array_of_array(alist): left_list = product_left(alist) right_list = product_right(alist) new_list = [] for index, item in enumerate(alist): value = left_list[index] * right_list[index] new_list.append(value) return new_list def product_recursive(alist): if alist == []: return 1 return alist[0] * product_recursive(alist[1:]) def paa(alist): new_list = [] for index, item in enumerate(alist): current_list = alist[:index] + alist[index+1:] value = product_recursive(current_list) new_list.append(value) return new_list alist = [1, 2, 3, 4, 5, 6] rlist = alist[::-1] print(alist) print(product_left(alist)) print(product_left_recursive(alist[1:], [1])) print(product_left_recursive(rlist[1:], [1])) print(product_right(alist)) #print(product_right_recursive(alist, [1])) #print(product_of_array_of_array(alist)) #print(paa(alist))
def product_left_recursive(alist, result=None): if alist == []: return result g = result[-1] * alist[0] result.append(g) return product_left_recursive(alist[1:], result) def product_left(alist): new_list = [1] for index in range(1, len(alist)): value = new_list[-1] * alist[index - 1] new_list.append(value) return new_list def product_right(alist): new_list = [1] for index in range(len(alist) - 2, -1, -1): value = new_list[-1] * alist[index - 1] new_list.append(value) return new_list def product_of_array_of_array(alist): left_list = product_left(alist) right_list = product_right(alist) new_list = [] for (index, item) in enumerate(alist): value = left_list[index] * right_list[index] new_list.append(value) return new_list def product_recursive(alist): if alist == []: return 1 return alist[0] * product_recursive(alist[1:]) def paa(alist): new_list = [] for (index, item) in enumerate(alist): current_list = alist[:index] + alist[index + 1:] value = product_recursive(current_list) new_list.append(value) return new_list alist = [1, 2, 3, 4, 5, 6] rlist = alist[::-1] print(alist) print(product_left(alist)) print(product_left_recursive(alist[1:], [1])) print(product_left_recursive(rlist[1:], [1])) print(product_right(alist))
# coding: utf-8 # pragma: no cover class Transformator: """Rule to transform values""" def __init__(self, *args, **kwargs): pass class TransformatorList(list): """Wrapper for all registered Transformators""" def __init__(self, settings, *args, **kwargs): super(TransformatorList, self).__init__(*args, **kwargs) self.settings = settings def register(self, *args): self.extend(args)
class Transformator: """Rule to transform values""" def __init__(self, *args, **kwargs): pass class Transformatorlist(list): """Wrapper for all registered Transformators""" def __init__(self, settings, *args, **kwargs): super(TransformatorList, self).__init__(*args, **kwargs) self.settings = settings def register(self, *args): self.extend(args)
fileName = ["nohup_2", "nohup_1", "nohup_4", "nohup"] Fo = open("new nohup", "w") for fil in fileName: lineNum = 0 with open(fil) as F: for line in F: if lineNum % 10 == 0: Fo.write(",\t".join(line.split())) Fo.write("\n") lineNum += 1 Fo.write("e\n")
file_name = ['nohup_2', 'nohup_1', 'nohup_4', 'nohup'] fo = open('new nohup', 'w') for fil in fileName: line_num = 0 with open(fil) as f: for line in F: if lineNum % 10 == 0: Fo.write(',\t'.join(line.split())) Fo.write('\n') line_num += 1 Fo.write('e\n')
# Python - 3.6.0 test.assert_equals(last([1, 2, 3, 4, 5]), 5) test.assert_equals(last('abcde'), 'e') test.assert_equals(last(1, 'b', 3, 'd', 5), 5)
test.assert_equals(last([1, 2, 3, 4, 5]), 5) test.assert_equals(last('abcde'), 'e') test.assert_equals(last(1, 'b', 3, 'd', 5), 5)
class Student: def __init__(self,m1,m2): self.m1 = m1 self.m2 = m2 def sum(self, a = None, b = None, c = None): addition = 0 if a!=None and b!=None and c!=None: addition = a + b + c elif a!=None and b!= None: addition = a + b else: addition = a return addition s1 = Student(10,20) print(s1.sum(2,4))
class Student: def __init__(self, m1, m2): self.m1 = m1 self.m2 = m2 def sum(self, a=None, b=None, c=None): addition = 0 if a != None and b != None and (c != None): addition = a + b + c elif a != None and b != None: addition = a + b else: addition = a return addition s1 = student(10, 20) print(s1.sum(2, 4))
a = int(input("Enter number of elements in set A ")) A = set(map(int,input("# Spaced Separated list of elements of A ").split())) # Spaced Separated list of elements of A n = int(input("Number of sets ")) # Number of sets for i in range(n): p = input("Enter the operation and number of elements in set"+i).split() s2 = set(map(int,input("Enter space separated list of elements for operation #"+p[1]+" ").split())) if p[0] == "intersection_update": A.intersection_update(s2) elif p[0]=="update": A.update(s2) elif p[0]=="symmetric_difference_update": A.symmetric_difference_update(s2) elif p[0]=="difference_update": A.difference_update(s2) print(sum(A))
a = int(input('Enter number of elements in set A ')) a = set(map(int, input('# Spaced Separated list of elements of A ').split())) n = int(input('Number of sets ')) for i in range(n): p = input('Enter the operation and number of elements in set' + i).split() s2 = set(map(int, input('Enter space separated list of elements for operation #' + p[1] + ' ').split())) if p[0] == 'intersection_update': A.intersection_update(s2) elif p[0] == 'update': A.update(s2) elif p[0] == 'symmetric_difference_update': A.symmetric_difference_update(s2) elif p[0] == 'difference_update': A.difference_update(s2) print(sum(A))
class Solution: def sqrt(self, x): low = 0 high = 65536 best = 0 while high > low: mid = (high + low) / 2 sqr = mid ** 2 if sqr > x: high = mid elif sqr == x: return mid else: best = mid low = mid + 1 return best
class Solution: def sqrt(self, x): low = 0 high = 65536 best = 0 while high > low: mid = (high + low) / 2 sqr = mid ** 2 if sqr > x: high = mid elif sqr == x: return mid else: best = mid low = mid + 1 return best
def palindrome(word, ind): if word == word[::-1]: return f"{word} is a palindrome" if word[ind] != word[len(word) - 1 - ind]: return f"{word} is not a palindrome" return palindrome(word, ind + 1) print(palindrome("abcba", 0)) print(palindrome("peter", 0))
def palindrome(word, ind): if word == word[::-1]: return f'{word} is a palindrome' if word[ind] != word[len(word) - 1 - ind]: return f'{word} is not a palindrome' return palindrome(word, ind + 1) print(palindrome('abcba', 0)) print(palindrome('peter', 0))
#NETWORK LOCALHOST = "127.0.0.1" PI_ADDRESS = "192.168.0.1" PORT = 5000 #STATE MOVEMENT_MARGIN = 2 KICK_TIMEOUT = 1 LAST_POSITION = -1 PLAYER_LENGTH = 2 NOISE_THRESHOLD = 3 MIN_VELOCITY_THRESHOLD = 300 OPEN_PREP_RANGE = -30 BLOCK_PREP_RANGE = 100 OPEN_KICK_RANGE = -20 BLOCK_KICK_RANGE = 60 KICK_ANGLE = 55 PREP_ANGLE = -30 BLOCK_ANGLE = 0 OPEN_ANGLE = -90 SPEED_THRESHOLD = 3000 MIN_PLAYER_OFFSET = 40 MAX_PLAYER_OFFSET = 640 IDLE_RANGE = 600 RECOVERY_LINEAR = 80 RECOVERY_ANGLE = -57 #PHYSICAL DIMENSIONS GOAL_ROD = {"maxActuation":228, "playerSpacing":182, "rodX":1125, "numPlayers":3} TWO_ROD = {"maxActuation":356, "playerSpacing":237, "rodX":975, "numPlayers":2} FIVE_ROD = {"maxActuation":115, "playerSpacing":120, "rodX":675, "numPlayers":5} THREE_ROD = {"maxActuation":181, "playerSpacing":207, "rodX":375, "numPlayers":3} TABLE = {"robot_goalX":1200, "robot_goalY":350, "player_goalX":0, "player_goalY":350, "goalWidth":200, "width":685, "length":1200}
localhost = '127.0.0.1' pi_address = '192.168.0.1' port = 5000 movement_margin = 2 kick_timeout = 1 last_position = -1 player_length = 2 noise_threshold = 3 min_velocity_threshold = 300 open_prep_range = -30 block_prep_range = 100 open_kick_range = -20 block_kick_range = 60 kick_angle = 55 prep_angle = -30 block_angle = 0 open_angle = -90 speed_threshold = 3000 min_player_offset = 40 max_player_offset = 640 idle_range = 600 recovery_linear = 80 recovery_angle = -57 goal_rod = {'maxActuation': 228, 'playerSpacing': 182, 'rodX': 1125, 'numPlayers': 3} two_rod = {'maxActuation': 356, 'playerSpacing': 237, 'rodX': 975, 'numPlayers': 2} five_rod = {'maxActuation': 115, 'playerSpacing': 120, 'rodX': 675, 'numPlayers': 5} three_rod = {'maxActuation': 181, 'playerSpacing': 207, 'rodX': 375, 'numPlayers': 3} table = {'robot_goalX': 1200, 'robot_goalY': 350, 'player_goalX': 0, 'player_goalY': 350, 'goalWidth': 200, 'width': 685, 'length': 1200}
#! /usr/bin/env python3.6 #a = 'str' a = '32' print(f'float(a) = {float(a)}') print(f'int(a) = {int(a)}') if(isinstance(a, str)): print("Yes, it is string.") else: print("No, it is not string.")
a = '32' print(f'float(a) = {float(a)}') print(f'int(a) = {int(a)}') if isinstance(a, str): print('Yes, it is string.') else: print('No, it is not string.')
class TreeNode: def __init__(self, val): self.left = None self.right = None self.val = val def is_valid_BST(node, min, max): if node == None: return True if (min is not None and node.val <= min) or (max is not None and max <= node.val): return False return is_valid_BST(node.left, min, node.val) and is_valid_BST(node.right, node.val, max)
class Treenode: def __init__(self, val): self.left = None self.right = None self.val = val def is_valid_bst(node, min, max): if node == None: return True if min is not None and node.val <= min or (max is not None and max <= node.val): return False return is_valid_bst(node.left, min, node.val) and is_valid_bst(node.right, node.val, max)
"""Heisenbridge An alternative to https://github.com/matrix-org/matrix-appservice-irc/issues """
"""Heisenbridge An alternative to https://github.com/matrix-org/matrix-appservice-irc/issues """
class lagrange(object): def __init__(self, eval_x = 0): self._eval_x = eval_x self._extrapolations = [] def add_point(self, x, y): new_extraps = [(y, x)] for past_extrap, x_old in self._extrapolations: new_val = ((self._eval_x - x) * past_extrap \ + (x_old - self._eval_x) * new_extraps[-1][0])\ / (x_old - x) new_extraps.append((new_val, x_old)) self._extrapolations = new_extraps return self.estimate @property def estimate(self): return self._extrapolations[-1][0] if __name__ == "__main__": interpolator = lagrange(eval_x = 0) print(interpolator.add_point(1,2)) print(interpolator.add_point(0.5,3)) print(interpolator.add_point(0.25,3.75)) print(interpolator.add_point(0.125,4.25)) print(interpolator.add_point(0.0625,4.5))
class Lagrange(object): def __init__(self, eval_x=0): self._eval_x = eval_x self._extrapolations = [] def add_point(self, x, y): new_extraps = [(y, x)] for (past_extrap, x_old) in self._extrapolations: new_val = ((self._eval_x - x) * past_extrap + (x_old - self._eval_x) * new_extraps[-1][0]) / (x_old - x) new_extraps.append((new_val, x_old)) self._extrapolations = new_extraps return self.estimate @property def estimate(self): return self._extrapolations[-1][0] if __name__ == '__main__': interpolator = lagrange(eval_x=0) print(interpolator.add_point(1, 2)) print(interpolator.add_point(0.5, 3)) print(interpolator.add_point(0.25, 3.75)) print(interpolator.add_point(0.125, 4.25)) print(interpolator.add_point(0.0625, 4.5))
# -*- coding: utf-8 -*- __version__ = '1.0.0' default_app_config = 'webmap.apps.WebmapConfig'
__version__ = '1.0.0' default_app_config = 'webmap.apps.WebmapConfig'
#Get a string which is n (non-negative integer) copies of a given string # #function to display the string def dispfunc(iteration): output=str("") for i in range(iteration): output=output+entry print(output) # entry=str(input("\nenter a string : ")) displaynumber=int(input("how many times must it be displayed? : ")) dispfunc(displaynumber) #experimental feedback=str(input("\nwould you try it for the stringlength? : ")) if feedback == "yes" or "Yes" or "YES" or "yeah": dispfunc(len(entry)) #program ends here
def dispfunc(iteration): output = str('') for i in range(iteration): output = output + entry print(output) entry = str(input('\nenter a string : ')) displaynumber = int(input('how many times must it be displayed? : ')) dispfunc(displaynumber) feedback = str(input('\nwould you try it for the stringlength? : ')) if feedback == 'yes' or 'Yes' or 'YES' or 'yeah': dispfunc(len(entry))
spaces = int(input()) steps =0 while(spaces > 0): if(spaces >= 5): spaces -= 5 steps += 1 elif(spaces >= 4): spaces -= 4 steps += 1 elif(spaces >= 3): spaces -= 3 steps += 1 elif(spaces >= 2): spaces -= 2 steps += 1 elif(spaces >= 1): spaces -= 1 steps += 1 print(str(steps))
spaces = int(input()) steps = 0 while spaces > 0: if spaces >= 5: spaces -= 5 steps += 1 elif spaces >= 4: spaces -= 4 steps += 1 elif spaces >= 3: spaces -= 3 steps += 1 elif spaces >= 2: spaces -= 2 steps += 1 elif spaces >= 1: spaces -= 1 steps += 1 print(str(steps))
# Straightforward implementation of the Singleton Pattern class Logger(object): _instance = None def __new__(cls): if cls._instance is None: print('Creating the object') cls._instance = super(Logger, cls).__new__(cls) # Put any initialization here. return cls._instance log1 = Logger() print(log1) log2 = Logger() print(log2) print('Are they the same object?', log1 is log2)
class Logger(object): _instance = None def __new__(cls): if cls._instance is None: print('Creating the object') cls._instance = super(Logger, cls).__new__(cls) return cls._instance log1 = logger() print(log1) log2 = logger() print(log2) print('Are they the same object?', log1 is log2)
load("@rules_pkg//:providers.bzl", "PackageFilesInfo", "PackageSymlinkInfo", "PackageFilegroupInfo") def _runfile_path(ctx, file, runfiles_dir): path = file.short_path if path.startswith(".."): return path.replace("..", runfiles_dir) if not file.owner.workspace_name: return "/".join([runfiles_dir, ctx.workspace_name, path]) return path def _runfiles_impl(ctx): default = ctx.attr.binary[DefaultInfo] executable = default.files_to_run.executable manifest = default.files_to_run.runfiles_manifest runfiles_dir = manifest.short_path.replace(manifest.basename, "")[:-1] files = depset(transitive = [default.files, default.default_runfiles.files]) fileMap = { executable.short_path: executable } for file in files.to_list(): fileMap[_runfile_path(ctx, file, runfiles_dir)] = file files = depset([executable], transitive = [files]) symlinks = [] for symlink in default.data_runfiles.root_symlinks.to_list(): info = PackageSymlinkInfo( source = "/%s" % _runfile_path(ctx, symlink.target_file, runfiles_dir), destination = "/%s" % "/".join([runfiles_dir, symlink.path]), attributes = { "mode": "0777" } ) symlinks.append([info, ctx.label]) return [ PackageFilegroupInfo( pkg_dirs = [], pkg_files = [ [PackageFilesInfo( dest_src_map = fileMap, attributes = {}, ), ctx.label] ], pkg_symlinks = symlinks, ), DefaultInfo(files = files), ] expand_runfiles = rule( implementation = _runfiles_impl, attrs = { "binary": attr.label() } )
load('@rules_pkg//:providers.bzl', 'PackageFilesInfo', 'PackageSymlinkInfo', 'PackageFilegroupInfo') def _runfile_path(ctx, file, runfiles_dir): path = file.short_path if path.startswith('..'): return path.replace('..', runfiles_dir) if not file.owner.workspace_name: return '/'.join([runfiles_dir, ctx.workspace_name, path]) return path def _runfiles_impl(ctx): default = ctx.attr.binary[DefaultInfo] executable = default.files_to_run.executable manifest = default.files_to_run.runfiles_manifest runfiles_dir = manifest.short_path.replace(manifest.basename, '')[:-1] files = depset(transitive=[default.files, default.default_runfiles.files]) file_map = {executable.short_path: executable} for file in files.to_list(): fileMap[_runfile_path(ctx, file, runfiles_dir)] = file files = depset([executable], transitive=[files]) symlinks = [] for symlink in default.data_runfiles.root_symlinks.to_list(): info = package_symlink_info(source='/%s' % _runfile_path(ctx, symlink.target_file, runfiles_dir), destination='/%s' % '/'.join([runfiles_dir, symlink.path]), attributes={'mode': '0777'}) symlinks.append([info, ctx.label]) return [package_filegroup_info(pkg_dirs=[], pkg_files=[[package_files_info(dest_src_map=fileMap, attributes={}), ctx.label]], pkg_symlinks=symlinks), default_info(files=files)] expand_runfiles = rule(implementation=_runfiles_impl, attrs={'binary': attr.label()})
# You can also nest for loops with # while loops. Check it out! for i in range(4): print("For loop: " + str(i)) x = i while x >= 0: print(" While loop: " + str(x)) x = x - 1
for i in range(4): print('For loop: ' + str(i)) x = i while x >= 0: print(' While loop: ' + str(x)) x = x - 1
##list of integers student_score= [99, 88, 60] ##printing out that list print(student_score) ##printing all the integers in a range print(list(range(1,10))) ##printing out all the integers in a range skipping one every time print(list(range(1,10,2))) ## manipulating a string and printting all the modifications x = "hello" y = x.upper() z = x.title() print(x, y, z)
student_score = [99, 88, 60] print(student_score) print(list(range(1, 10))) print(list(range(1, 10, 2))) x = 'hello' y = x.upper() z = x.title() print(x, y, z)
def harmonic(a, b): return (2*a*b)/(a + b); a, b = map(int, input().split()) print(harmonic(a, b))
def harmonic(a, b): return 2 * a * b / (a + b) (a, b) = map(int, input().split()) print(harmonic(a, b))
#!/usr/bin/env python # -*- coding: utf-8 -*- # Created by PyCharm # @author : mystic # @date : 2017/11/11 21:01 """ Override Configuration """ configs = { 'db': { 'host': '127.0.0.1' } }
""" Override Configuration """ configs = {'db': {'host': '127.0.0.1'}}
# from recipes.decor.tests import test_cases as tcx # pylint: disable-all def test_expose_decor(): @expose.show def foo(a, b=1, *args, c=2, **kws): pass foo(88, 12, 11, c=4, y=1) def test_expose_decor(): @expose.args def foo(a, b=1, *args, c=2, **kws): pass foo(88, 12, 11, c=4, y=1) # # print(i) # # print(sig) # # print(ba) # ba.apply_defaults() # # print(ba) # print(f'{ba!s}'.replace('<BoundArguments ', fun.__qualname__).rstrip('>')) # # print('*'*88) # from IPython import embed # embed(header="Embedded interpreter at 'test_expose.py':32")
def test_expose_decor(): @expose.show def foo(a, b=1, *args, c=2, **kws): pass foo(88, 12, 11, c=4, y=1) def test_expose_decor(): @expose.args def foo(a, b=1, *args, c=2, **kws): pass foo(88, 12, 11, c=4, y=1)
def fill_bin_num(dataframe, feature, bin_feature, bin_size, stat_measure, min_bin=None, max_bin=None, default_val='No'): if min_bin is None: min_bin = dataframe[bin_feature].min() if max_bin is None: max_bin = dataframe[bin_feature].max() new_dataframe = dataframe.copy() df_meancat = pd.DataFrame(columns=['interval', 'stat_measure']) for num_bin, subset in dataframe.groupby(pd.cut(dataframe[bin_feature], np.arange(min_bin, max_bin+bin_size, bin_size), include_lowest=True)): if stat_measure is 'mean': row = [num_bin, subset[feature].mean()] elif stat_measure is 'mode': mode_ar = subset[feature].mode().values if len(mode_ar) > 0: row = [num_bin, mode_ar[0]] else: row = [num_bin, default_val] else: raise Exception('Unknown statistical measure: ' + stat_measure) df_meancat.loc[len(df_meancat)] = row for index, row_df in dataframe[dataframe[feature].isna()].iterrows(): for _, row_meancat in df_meancat.iterrows(): if row_df[bin_feature] in row_meancat['interval']: new_dataframe.at[index, feature] = row_meancat['stat_measure'] return new_dataframe def make_dummy_cols(dataframe, column, prefix, drop_dummy): dummy = pd.get_dummies(dataframe[column], prefix=prefix) dummy = dummy.drop(columns=prefix+'_'+drop_dummy) dataframe = pd.concat([dataframe, dummy], axis=1) dataframe = dataframe.drop(columns=column) return dataframe def cleaning(dataframe_raw): dataframe = dataframe_raw.copy() dataframe = dataframe.set_index('ID') dataframe.loc[(dataframe['Age']<=13) & (dataframe['Education'].isna()), 'Education'] = 'Lower School/Kindergarten' dataframe.loc[(dataframe['Age']==14) & (dataframe['Education'].isna()), 'Education'] = '8th Grade' dataframe.loc[(dataframe['Age']<=17) & (dataframe['Education'].isna()), 'Education'] = '9 - 11th Grade' dataframe.loc[(dataframe['Age']<=21) & (dataframe['Education'].isna()), 'Education'] = 'High School' dataframe['Education'] = dataframe['Education'].fillna('Some College') dataframe.loc[(dataframe['Age']<=20) & (dataframe['MaritalStatus'].isna()), 'MaritalStatus'] = 'NeverMarried' dataframe.at[dataframe['MaritalStatus'].isna(), 'MaritalStatus'] = fill_bin_num(dataframe, 'MaritalStatus', 'Age', 5, 'mode',20) dataframe = dataframe.drop(columns=['HHIncome']) dataframe.loc[dataframe['HHIncomeMid'].isna(), 'HHIncomeMid'] = dataframe['HHIncomeMid'].mean() dataframe.loc[dataframe['Poverty'].isna(), 'Poverty'] = dataframe['Poverty'].mean() dataframe.loc[dataframe['HomeRooms'].isna(), 'HomeRooms'] = dataframe['HomeRooms'].mean() dataframe.loc[dataframe['HomeOwn'].isna(), 'HomeOwn'] = dataframe['HomeOwn'].mode().values[0] dataframe.loc[(dataframe['Work'].isna()) & (dataframe['Education'].isna()) & (dataframe['Age']<=20), 'Work'] = 'NotWorking' dataframe.loc[dataframe['Work'].isna(), 'Work'] = dataframe['Work'].mode().values[0] dataframe = fill_bin_num(dataframe, 'Weight', 'Age', 2, 'mean') dataframe = dataframe.drop(columns=['HeadCirc']) for index, row in dataframe.iterrows(): if np.isnan(row['Height']) and not np.isnan(row['Length']): dataframe.at[index, 'Height'] = row['Length'] dataframe = fill_bin_num(dataframe, 'Height', 'Age', 2, 'mean') dataframe = dataframe.drop(columns=['Length']) for index, row in dataframe[dataframe['BMI'].isna()].iterrows(): dataframe.at[index, 'BMI'] = row['Weight'] / ((row['Height']/100)**2) dataframe = dataframe.drop(columns='BMICatUnder20yrs') dataframe = dataframe.drop(columns='BMI_WHO') dataframe = fill_bin_num(dataframe, 'Pulse', 'Age', 10, 'mean') dataframe.loc[(dataframe['Age']<10) & (dataframe['BPSysAve'].isna()), 'BPSysAve'] = 105 dataframe = fill_bin_num(dataframe, 'BPSysAve', 'Age', 5, 'mean', 10) dataframe.loc[(dataframe['Age']<10) & (dataframe['BPDiaAve'].isna()), 'BPDiaAve'] = 60 dataframe = fill_bin_num(dataframe, 'BPDiaAve', 'Age', 5, 'mean', 10) dataframe = dataframe.drop(columns='BPSys1') dataframe = dataframe.drop(columns='BPDia1') dataframe = dataframe.drop(columns='BPSys2') dataframe = dataframe.drop(columns='BPDia2') dataframe = dataframe.drop(columns='BPSys3') dataframe = dataframe.drop(columns='BPDia3') dataframe = dataframe.drop(columns=['Testosterone']) dataframe.loc[(dataframe['Age']<10) & (dataframe['DirectChol'].isna()), 'DirectChol'] = 0 dataframe = fill_bin_num(dataframe, 'DirectChol', 'Age', 5, 'mean', 10) dataframe.loc[(dataframe['Age']<10) & (dataframe['TotChol'].isna()), 'TotChol'] = 0 dataframe = fill_bin_num(dataframe, 'TotChol', 'Age', 5, 'mean', 10) dataframe = dataframe.drop(columns=['UrineVol1']) dataframe = dataframe.drop(columns=['UrineFlow1']) dataframe = dataframe.drop(columns=['UrineVol2']) dataframe = dataframe.drop(columns=['UrineFlow2']) dataframe['Diabetes'] = dataframe['Diabetes'].fillna('No') dataframe['DiabetesAge'] = dataframe['DiabetesAge'].fillna(0) dataframe.loc[(dataframe['Age']<=12) & (dataframe['HealthGen'].isna()), 'HealthGen'] = 'Good' dataframe = fill_bin_num(dataframe, 'HealthGen', 'Age', 5, 'mode', 10) dataframe.loc[(dataframe['Age']<=12) & (dataframe['DaysMentHlthBad'].isna()), 'DaysMentHlthBad'] = 0 dataframe = fill_bin_num(dataframe, 'DaysMentHlthBad', 'Age', 5, 'mean', 10) dataframe.loc[(dataframe['Age']<=15) & (dataframe['LittleInterest'].isna()), 'LittleInterest'] = 'None' dataframe = fill_bin_num(dataframe, 'LittleInterest', 'Age', 5, 'mode', 15) dataframe.loc[(dataframe['Age']<=12) & (dataframe['DaysMentHlthBad'].isna()), 'DaysMentHlthBad'] = 0 dataframe = fill_bin_num(dataframe, 'DaysMentHlthBad', 'Age', 5, 'mean', 10) for index, row in dataframe.iterrows(): if np.isnan(row['nBabies']) and not np.isnan(row['nPregnancies']): dataframe.at[index, 'nBabies'] = row['nPregnancies'] dataframe['nBabies'] = dataframe['nBabies'].fillna(0) dataframe['nPregnancies'] = dataframe['nPregnancies'].fillna(0) dataframe['Age1stBaby'] = dataframe['Age1stBaby'].fillna(0) dataframe.loc[(dataframe['Age']==0) & (dataframe['SleepHrsNight'].isna()), 'SleepHrsNight'] = 14 dataframe.loc[(dataframe['Age']<=2) & (dataframe['SleepHrsNight'].isna()), 'SleepHrsNight'] = 12 dataframe.loc[(dataframe['Age']<=5) & (dataframe['SleepHrsNight'].isna()), 'SleepHrsNight'] = 10 dataframe.loc[(dataframe['Age']<=10) & (dataframe['SleepHrsNight'].isna()), 'SleepHrsNight'] = 9 dataframe.loc[(dataframe['Age']<=15) & (dataframe['SleepHrsNight'].isna()), 'SleepHrsNight'] = 8 dataframe['SleepHrsNight'] = dataframe['SleepHrsNight'].fillna(dataframe_raw['SleepHrsNight'].mean()) dataframe['SleepTrouble'] = dataframe['SleepTrouble'].fillna('No') dataframe.loc[(dataframe['Age']<=4) & (dataframe['PhysActive'].isna()), 'PhysActive'] = 'No' dataframe = fill_bin_num(dataframe, 'PhysActive', 'Age', 2, 'mode', 16) dataframe['PhysActive'] = dataframe['PhysActive'].fillna('Yes') # Big assumption here. All kids between 4 and 16 are physically active dataframe = dataframe.drop(columns=['PhysActiveDays']) dataframe = dataframe.drop(columns=['TVHrsDay']) dataframe = dataframe.drop(columns=['TVHrsDayChild']) dataframe = dataframe.drop(columns=['CompHrsDay']) dataframe = dataframe.drop(columns=['CompHrsDayChild']) dataframe.loc[(dataframe['Age']<18) & (dataframe['Alcohol12PlusYr'].isna()), 'Alcohol12PlusYr'] = 'No' dataframe = fill_bin_num(dataframe, 'Alcohol12PlusYr', 'Age', 5, 'mode', 18) dataframe.loc[(dataframe['Age']<18) & (dataframe['AlcoholDay'].isna()), 'AlcoholDay'] = 0 dataframe = fill_bin_num(dataframe, 'AlcoholDay', 'Age', 5, 'mean', 18) dataframe.loc[(dataframe['Age']<18) & (dataframe['AlcoholYear'].isna()), 'AlcoholYear'] = 0 dataframe = fill_bin_num(dataframe, 'AlcoholYear', 'Age', 5, 'mean', 18) dataframe.loc[(dataframe['Age']<20) & (dataframe['SmokeNow'].isna()), 'SmokeNow'] = 'No' dataframe = fill_bin_num(dataframe, 'SmokeNow', 'Age', 5, 'mode', 20) dataframe['Smoke100'] = dataframe['Smoke100'].fillna('No') dataframe['Smoke100n'] = dataframe['Smoke100n'].fillna('No') dataframe.loc[(dataframe['SmokeNow']=='No') & (dataframe['SmokeAge'].isna()), 'SmokeAge'] = 0 dataframe = fill_bin_num(dataframe, 'SmokeAge', 'Age', 5, 'mean', 20) dataframe.loc[(dataframe['Age']<18) & (dataframe['Marijuana'].isna()), 'Marijuana'] = 'No' dataframe.loc[(dataframe['Marijuana'].isna()) & (dataframe['SmokeNow']=='No'), 'Marijuana'] = 'No' dataframe = fill_bin_num(dataframe, 'Marijuana', 'Age', 5, 'mode', 20) dataframe.loc[(dataframe['Marijuana']=='No') & (dataframe['AgeFirstMarij'].isna()), 'AgeFirstMarij'] = 0 dataframe = fill_bin_num(dataframe, 'AgeFirstMarij', 'Age', 5, 'mean', 20) dataframe.loc[(dataframe['Marijuana']=='No') & (dataframe['RegularMarij'].isna()), 'RegularMarij'] = 'No' dataframe = fill_bin_num(dataframe, 'RegularMarij', 'Age', 5, 'mode', 20) dataframe.loc[(dataframe['RegularMarij']=='No') & (dataframe['AgeRegMarij'].isna()), 'AgeRegMarij'] = 0 dataframe = fill_bin_num(dataframe, 'AgeRegMarij', 'Age', 5, 'mean', 20) dataframe.loc[(dataframe['Age']<18) & (dataframe['HardDrugs'].isna()), 'HardDrugs'] = 'No' dataframe = fill_bin_num(dataframe, 'HardDrugs', 'Age', 5, 'mode', 18) mode_sex_age = dataframe['SexAge'].mode()[0] dataframe.loc[(dataframe['Age']<=mode_sex_age) & (dataframe['SexEver'].isna()), 'SexEver'] = 'No' dataframe['SexEver'] = dataframe['SexEver'].fillna('Yes') dataframe.loc[(dataframe['SexEver']=='No') & (dataframe['SexAge'].isna()), 'SexAge'] = 0 dataframe.loc[(dataframe['SexAge'].isna() & (dataframe['Age']<mode_sex_age)), 'SexAge'] = dataframe.loc[(dataframe['SexAge'].isna() & (dataframe['Age']<mode_sex_age)), 'Age'] dataframe['SexAge'] = dataframe['SexAge'].fillna(mode_sex_age) dataframe.loc[(dataframe['SexEver']=='No') & (dataframe['SexNumPartnLife'].isna()), 'SexNumPartnLife'] = 0 dataframe = fill_bin_num(dataframe, 'SexNumPartnLife', 'Age', 5, 'mean') dataframe['SexNumPartnLife'] = dataframe_raw.loc[(dataframe_raw['Age'] >= 60) & (dataframe_raw['Age'] <= 70), 'SexNumPartnLife'].mode()[0] # Missing values for the elderly. Assumed that lifetime sex partners do not increase after 60. dataframe.loc[(dataframe['SexEver']=='No') & (dataframe['SexNumPartYear'].isna()), 'SexNumPartYear'] = 0 dataframe = fill_bin_num(dataframe, 'SexNumPartYear', 'Age', 10, 'mean') dataframe['SexNumPartYear'] = dataframe['SexNumPartYear'].fillna(0) dataframe = dataframe.drop(columns=['SameSex']) dataframe = dataframe.drop(columns=['SexOrientation']) dataframe['PregnantNow'] = dataframe['PregnantNow'].fillna('No') # Making dummy variables dataframe['male'] = 1*(dataframe['Gender'] == 'male') dataframe = dataframe.drop(columns=['Gender']) dataframe['white'] = np.where(dataframe['Race1'] == 'white',1,0) dataframe = dataframe.drop(columns=['Race1']) dataframe = make_dummy_cols(dataframe, 'Education', 'education', '8th Grade') dataframe = make_dummy_cols(dataframe, 'MaritalStatus', 'maritalstatus', 'Separated') dataframe = make_dummy_cols(dataframe, 'HomeOwn', 'homeown', 'Other') dataframe = make_dummy_cols(dataframe, 'Work', 'work', 'Looking') dataframe['Diabetes'] = np.where(dataframe['Diabetes'] == 'Yes',1,0) dataframe = make_dummy_cols(dataframe, 'HealthGen', 'healthgen', 'Poor') dataframe = make_dummy_cols(dataframe, 'LittleInterest', 'littleinterest', 'None') dataframe = make_dummy_cols(dataframe, 'Depressed', 'depressed', 'None') dataframe['SleepTrouble'] = np.where(dataframe['SleepTrouble'] == 'Yes',1,0) dataframe['PhysActive'] = np.where(dataframe['PhysActive'] == 'Yes',1,0) dataframe['Alcohol12PlusYr'] = np.where(dataframe['Alcohol12PlusYr'] == 'Yes',1,0) dataframe['SmokeNow'] = np.where(dataframe['SmokeNow'] == 'Yes',1,0) dataframe['Smoke100'] = np.where(dataframe['Smoke100'] == 'Yes',1,0) dataframe['Smoke100n'] = np.where(dataframe['Smoke100n'] == 'Yes',1,0) dataframe['Marijuana'] = np.where(dataframe['Marijuana'] == 'Yes',1,0) dataframe['RegularMarij'] = np.where(dataframe['RegularMarij'] == 'Yes',1,0) dataframe['HardDrugs'] = np.where(dataframe['HardDrugs'] == 'Yes',1,0) dataframe['SexEver'] = np.where(dataframe['SexEver'] == 'Yes',1,0) dataframe['PregnantNow'] = np.where(dataframe['PregnantNow'] == 'Yes',1,0) return dataframe
def fill_bin_num(dataframe, feature, bin_feature, bin_size, stat_measure, min_bin=None, max_bin=None, default_val='No'): if min_bin is None: min_bin = dataframe[bin_feature].min() if max_bin is None: max_bin = dataframe[bin_feature].max() new_dataframe = dataframe.copy() df_meancat = pd.DataFrame(columns=['interval', 'stat_measure']) for (num_bin, subset) in dataframe.groupby(pd.cut(dataframe[bin_feature], np.arange(min_bin, max_bin + bin_size, bin_size), include_lowest=True)): if stat_measure is 'mean': row = [num_bin, subset[feature].mean()] elif stat_measure is 'mode': mode_ar = subset[feature].mode().values if len(mode_ar) > 0: row = [num_bin, mode_ar[0]] else: row = [num_bin, default_val] else: raise exception('Unknown statistical measure: ' + stat_measure) df_meancat.loc[len(df_meancat)] = row for (index, row_df) in dataframe[dataframe[feature].isna()].iterrows(): for (_, row_meancat) in df_meancat.iterrows(): if row_df[bin_feature] in row_meancat['interval']: new_dataframe.at[index, feature] = row_meancat['stat_measure'] return new_dataframe def make_dummy_cols(dataframe, column, prefix, drop_dummy): dummy = pd.get_dummies(dataframe[column], prefix=prefix) dummy = dummy.drop(columns=prefix + '_' + drop_dummy) dataframe = pd.concat([dataframe, dummy], axis=1) dataframe = dataframe.drop(columns=column) return dataframe def cleaning(dataframe_raw): dataframe = dataframe_raw.copy() dataframe = dataframe.set_index('ID') dataframe.loc[(dataframe['Age'] <= 13) & dataframe['Education'].isna(), 'Education'] = 'Lower School/Kindergarten' dataframe.loc[(dataframe['Age'] == 14) & dataframe['Education'].isna(), 'Education'] = '8th Grade' dataframe.loc[(dataframe['Age'] <= 17) & dataframe['Education'].isna(), 'Education'] = '9 - 11th Grade' dataframe.loc[(dataframe['Age'] <= 21) & dataframe['Education'].isna(), 'Education'] = 'High School' dataframe['Education'] = dataframe['Education'].fillna('Some College') dataframe.loc[(dataframe['Age'] <= 20) & dataframe['MaritalStatus'].isna(), 'MaritalStatus'] = 'NeverMarried' dataframe.at[dataframe['MaritalStatus'].isna(), 'MaritalStatus'] = fill_bin_num(dataframe, 'MaritalStatus', 'Age', 5, 'mode', 20) dataframe = dataframe.drop(columns=['HHIncome']) dataframe.loc[dataframe['HHIncomeMid'].isna(), 'HHIncomeMid'] = dataframe['HHIncomeMid'].mean() dataframe.loc[dataframe['Poverty'].isna(), 'Poverty'] = dataframe['Poverty'].mean() dataframe.loc[dataframe['HomeRooms'].isna(), 'HomeRooms'] = dataframe['HomeRooms'].mean() dataframe.loc[dataframe['HomeOwn'].isna(), 'HomeOwn'] = dataframe['HomeOwn'].mode().values[0] dataframe.loc[dataframe['Work'].isna() & dataframe['Education'].isna() & (dataframe['Age'] <= 20), 'Work'] = 'NotWorking' dataframe.loc[dataframe['Work'].isna(), 'Work'] = dataframe['Work'].mode().values[0] dataframe = fill_bin_num(dataframe, 'Weight', 'Age', 2, 'mean') dataframe = dataframe.drop(columns=['HeadCirc']) for (index, row) in dataframe.iterrows(): if np.isnan(row['Height']) and (not np.isnan(row['Length'])): dataframe.at[index, 'Height'] = row['Length'] dataframe = fill_bin_num(dataframe, 'Height', 'Age', 2, 'mean') dataframe = dataframe.drop(columns=['Length']) for (index, row) in dataframe[dataframe['BMI'].isna()].iterrows(): dataframe.at[index, 'BMI'] = row['Weight'] / (row['Height'] / 100) ** 2 dataframe = dataframe.drop(columns='BMICatUnder20yrs') dataframe = dataframe.drop(columns='BMI_WHO') dataframe = fill_bin_num(dataframe, 'Pulse', 'Age', 10, 'mean') dataframe.loc[(dataframe['Age'] < 10) & dataframe['BPSysAve'].isna(), 'BPSysAve'] = 105 dataframe = fill_bin_num(dataframe, 'BPSysAve', 'Age', 5, 'mean', 10) dataframe.loc[(dataframe['Age'] < 10) & dataframe['BPDiaAve'].isna(), 'BPDiaAve'] = 60 dataframe = fill_bin_num(dataframe, 'BPDiaAve', 'Age', 5, 'mean', 10) dataframe = dataframe.drop(columns='BPSys1') dataframe = dataframe.drop(columns='BPDia1') dataframe = dataframe.drop(columns='BPSys2') dataframe = dataframe.drop(columns='BPDia2') dataframe = dataframe.drop(columns='BPSys3') dataframe = dataframe.drop(columns='BPDia3') dataframe = dataframe.drop(columns=['Testosterone']) dataframe.loc[(dataframe['Age'] < 10) & dataframe['DirectChol'].isna(), 'DirectChol'] = 0 dataframe = fill_bin_num(dataframe, 'DirectChol', 'Age', 5, 'mean', 10) dataframe.loc[(dataframe['Age'] < 10) & dataframe['TotChol'].isna(), 'TotChol'] = 0 dataframe = fill_bin_num(dataframe, 'TotChol', 'Age', 5, 'mean', 10) dataframe = dataframe.drop(columns=['UrineVol1']) dataframe = dataframe.drop(columns=['UrineFlow1']) dataframe = dataframe.drop(columns=['UrineVol2']) dataframe = dataframe.drop(columns=['UrineFlow2']) dataframe['Diabetes'] = dataframe['Diabetes'].fillna('No') dataframe['DiabetesAge'] = dataframe['DiabetesAge'].fillna(0) dataframe.loc[(dataframe['Age'] <= 12) & dataframe['HealthGen'].isna(), 'HealthGen'] = 'Good' dataframe = fill_bin_num(dataframe, 'HealthGen', 'Age', 5, 'mode', 10) dataframe.loc[(dataframe['Age'] <= 12) & dataframe['DaysMentHlthBad'].isna(), 'DaysMentHlthBad'] = 0 dataframe = fill_bin_num(dataframe, 'DaysMentHlthBad', 'Age', 5, 'mean', 10) dataframe.loc[(dataframe['Age'] <= 15) & dataframe['LittleInterest'].isna(), 'LittleInterest'] = 'None' dataframe = fill_bin_num(dataframe, 'LittleInterest', 'Age', 5, 'mode', 15) dataframe.loc[(dataframe['Age'] <= 12) & dataframe['DaysMentHlthBad'].isna(), 'DaysMentHlthBad'] = 0 dataframe = fill_bin_num(dataframe, 'DaysMentHlthBad', 'Age', 5, 'mean', 10) for (index, row) in dataframe.iterrows(): if np.isnan(row['nBabies']) and (not np.isnan(row['nPregnancies'])): dataframe.at[index, 'nBabies'] = row['nPregnancies'] dataframe['nBabies'] = dataframe['nBabies'].fillna(0) dataframe['nPregnancies'] = dataframe['nPregnancies'].fillna(0) dataframe['Age1stBaby'] = dataframe['Age1stBaby'].fillna(0) dataframe.loc[(dataframe['Age'] == 0) & dataframe['SleepHrsNight'].isna(), 'SleepHrsNight'] = 14 dataframe.loc[(dataframe['Age'] <= 2) & dataframe['SleepHrsNight'].isna(), 'SleepHrsNight'] = 12 dataframe.loc[(dataframe['Age'] <= 5) & dataframe['SleepHrsNight'].isna(), 'SleepHrsNight'] = 10 dataframe.loc[(dataframe['Age'] <= 10) & dataframe['SleepHrsNight'].isna(), 'SleepHrsNight'] = 9 dataframe.loc[(dataframe['Age'] <= 15) & dataframe['SleepHrsNight'].isna(), 'SleepHrsNight'] = 8 dataframe['SleepHrsNight'] = dataframe['SleepHrsNight'].fillna(dataframe_raw['SleepHrsNight'].mean()) dataframe['SleepTrouble'] = dataframe['SleepTrouble'].fillna('No') dataframe.loc[(dataframe['Age'] <= 4) & dataframe['PhysActive'].isna(), 'PhysActive'] = 'No' dataframe = fill_bin_num(dataframe, 'PhysActive', 'Age', 2, 'mode', 16) dataframe['PhysActive'] = dataframe['PhysActive'].fillna('Yes') dataframe = dataframe.drop(columns=['PhysActiveDays']) dataframe = dataframe.drop(columns=['TVHrsDay']) dataframe = dataframe.drop(columns=['TVHrsDayChild']) dataframe = dataframe.drop(columns=['CompHrsDay']) dataframe = dataframe.drop(columns=['CompHrsDayChild']) dataframe.loc[(dataframe['Age'] < 18) & dataframe['Alcohol12PlusYr'].isna(), 'Alcohol12PlusYr'] = 'No' dataframe = fill_bin_num(dataframe, 'Alcohol12PlusYr', 'Age', 5, 'mode', 18) dataframe.loc[(dataframe['Age'] < 18) & dataframe['AlcoholDay'].isna(), 'AlcoholDay'] = 0 dataframe = fill_bin_num(dataframe, 'AlcoholDay', 'Age', 5, 'mean', 18) dataframe.loc[(dataframe['Age'] < 18) & dataframe['AlcoholYear'].isna(), 'AlcoholYear'] = 0 dataframe = fill_bin_num(dataframe, 'AlcoholYear', 'Age', 5, 'mean', 18) dataframe.loc[(dataframe['Age'] < 20) & dataframe['SmokeNow'].isna(), 'SmokeNow'] = 'No' dataframe = fill_bin_num(dataframe, 'SmokeNow', 'Age', 5, 'mode', 20) dataframe['Smoke100'] = dataframe['Smoke100'].fillna('No') dataframe['Smoke100n'] = dataframe['Smoke100n'].fillna('No') dataframe.loc[(dataframe['SmokeNow'] == 'No') & dataframe['SmokeAge'].isna(), 'SmokeAge'] = 0 dataframe = fill_bin_num(dataframe, 'SmokeAge', 'Age', 5, 'mean', 20) dataframe.loc[(dataframe['Age'] < 18) & dataframe['Marijuana'].isna(), 'Marijuana'] = 'No' dataframe.loc[dataframe['Marijuana'].isna() & (dataframe['SmokeNow'] == 'No'), 'Marijuana'] = 'No' dataframe = fill_bin_num(dataframe, 'Marijuana', 'Age', 5, 'mode', 20) dataframe.loc[(dataframe['Marijuana'] == 'No') & dataframe['AgeFirstMarij'].isna(), 'AgeFirstMarij'] = 0 dataframe = fill_bin_num(dataframe, 'AgeFirstMarij', 'Age', 5, 'mean', 20) dataframe.loc[(dataframe['Marijuana'] == 'No') & dataframe['RegularMarij'].isna(), 'RegularMarij'] = 'No' dataframe = fill_bin_num(dataframe, 'RegularMarij', 'Age', 5, 'mode', 20) dataframe.loc[(dataframe['RegularMarij'] == 'No') & dataframe['AgeRegMarij'].isna(), 'AgeRegMarij'] = 0 dataframe = fill_bin_num(dataframe, 'AgeRegMarij', 'Age', 5, 'mean', 20) dataframe.loc[(dataframe['Age'] < 18) & dataframe['HardDrugs'].isna(), 'HardDrugs'] = 'No' dataframe = fill_bin_num(dataframe, 'HardDrugs', 'Age', 5, 'mode', 18) mode_sex_age = dataframe['SexAge'].mode()[0] dataframe.loc[(dataframe['Age'] <= mode_sex_age) & dataframe['SexEver'].isna(), 'SexEver'] = 'No' dataframe['SexEver'] = dataframe['SexEver'].fillna('Yes') dataframe.loc[(dataframe['SexEver'] == 'No') & dataframe['SexAge'].isna(), 'SexAge'] = 0 dataframe.loc[dataframe['SexAge'].isna() & (dataframe['Age'] < mode_sex_age), 'SexAge'] = dataframe.loc[dataframe['SexAge'].isna() & (dataframe['Age'] < mode_sex_age), 'Age'] dataframe['SexAge'] = dataframe['SexAge'].fillna(mode_sex_age) dataframe.loc[(dataframe['SexEver'] == 'No') & dataframe['SexNumPartnLife'].isna(), 'SexNumPartnLife'] = 0 dataframe = fill_bin_num(dataframe, 'SexNumPartnLife', 'Age', 5, 'mean') dataframe['SexNumPartnLife'] = dataframe_raw.loc[(dataframe_raw['Age'] >= 60) & (dataframe_raw['Age'] <= 70), 'SexNumPartnLife'].mode()[0] dataframe.loc[(dataframe['SexEver'] == 'No') & dataframe['SexNumPartYear'].isna(), 'SexNumPartYear'] = 0 dataframe = fill_bin_num(dataframe, 'SexNumPartYear', 'Age', 10, 'mean') dataframe['SexNumPartYear'] = dataframe['SexNumPartYear'].fillna(0) dataframe = dataframe.drop(columns=['SameSex']) dataframe = dataframe.drop(columns=['SexOrientation']) dataframe['PregnantNow'] = dataframe['PregnantNow'].fillna('No') dataframe['male'] = 1 * (dataframe['Gender'] == 'male') dataframe = dataframe.drop(columns=['Gender']) dataframe['white'] = np.where(dataframe['Race1'] == 'white', 1, 0) dataframe = dataframe.drop(columns=['Race1']) dataframe = make_dummy_cols(dataframe, 'Education', 'education', '8th Grade') dataframe = make_dummy_cols(dataframe, 'MaritalStatus', 'maritalstatus', 'Separated') dataframe = make_dummy_cols(dataframe, 'HomeOwn', 'homeown', 'Other') dataframe = make_dummy_cols(dataframe, 'Work', 'work', 'Looking') dataframe['Diabetes'] = np.where(dataframe['Diabetes'] == 'Yes', 1, 0) dataframe = make_dummy_cols(dataframe, 'HealthGen', 'healthgen', 'Poor') dataframe = make_dummy_cols(dataframe, 'LittleInterest', 'littleinterest', 'None') dataframe = make_dummy_cols(dataframe, 'Depressed', 'depressed', 'None') dataframe['SleepTrouble'] = np.where(dataframe['SleepTrouble'] == 'Yes', 1, 0) dataframe['PhysActive'] = np.where(dataframe['PhysActive'] == 'Yes', 1, 0) dataframe['Alcohol12PlusYr'] = np.where(dataframe['Alcohol12PlusYr'] == 'Yes', 1, 0) dataframe['SmokeNow'] = np.where(dataframe['SmokeNow'] == 'Yes', 1, 0) dataframe['Smoke100'] = np.where(dataframe['Smoke100'] == 'Yes', 1, 0) dataframe['Smoke100n'] = np.where(dataframe['Smoke100n'] == 'Yes', 1, 0) dataframe['Marijuana'] = np.where(dataframe['Marijuana'] == 'Yes', 1, 0) dataframe['RegularMarij'] = np.where(dataframe['RegularMarij'] == 'Yes', 1, 0) dataframe['HardDrugs'] = np.where(dataframe['HardDrugs'] == 'Yes', 1, 0) dataframe['SexEver'] = np.where(dataframe['SexEver'] == 'Yes', 1, 0) dataframe['PregnantNow'] = np.where(dataframe['PregnantNow'] == 'Yes', 1, 0) return dataframe
def main(): num = int(input("introduce un numero:")) for x in range (1,num): print(x, end=",") else: print(num, end="")
def main(): num = int(input('introduce un numero:')) for x in range(1, num): print(x, end=',') else: print(num, end='')