content
stringlengths
7
1.05M
fixed_cases
stringlengths
1
1.28M
# @desc Triangle practice # @desc by Bernie '21 def pythagSolve(side_A, side_B): side_A = side_A ** 2 side_B = side_B ** 2 side_C= (side_A + side_B) ** 0.5 return side_C def main(): print(pythagSolve(8, 6)) print(pythagSolve(3, 4)) print(pythagSolve(4, 3)) print(pythagSolve(5, 12)) if __name__ == '__main__': main()
def pythag_solve(side_A, side_B): side_a = side_A ** 2 side_b = side_B ** 2 side_c = (side_A + side_B) ** 0.5 return side_C def main(): print(pythag_solve(8, 6)) print(pythag_solve(3, 4)) print(pythag_solve(4, 3)) print(pythag_solve(5, 12)) if __name__ == '__main__': main()
# Needed to allow import # # Copyright (C) 2006 British Broadcasting Corporation and Kamaelia Contributors(1) # All Rights Reserved. # # You may only modify and redistribute this under the terms of any of the # following licenses(2): Mozilla Public License, V1.1, GNU General # Public License, V2.0, GNU Lesser General Public License, V2.1 # # (1) Kamaelia Contributors are listed in the AUTHORS file and at # http://kamaelia.sourceforge.net/AUTHORS - please extend this file, # not this notice. # (2) Reproduced in the COPYING file, and at: # http://kamaelia.sourceforge.net/COPYING # Under section 3.5 of the MPL, we are using this text since we deem the MPL # notice inappropriate for this file. As per MPL/GPL/LGPL removal of this # notice is prohibited. # # Please contact us via: kamaelia-list-owner@lists.sourceforge.net # to discuss alternative licensing. # ------------------------------------------------------------------------- """ ============================================================= Components for parsing PSI data in DVB MPEG Transport Streams ============================================================= DVB MPEG Transport Streams carry, on certain PIDs, tables of data. Some tables contain data explaining the structure of services (channels) being carried, and what PIDs to find their component audio and video streams being carried in. Others carry ancilliary data such as electronic programme guide information and events, or time and date information or the frequencies on which other multiplexes can be found. Tables are delivered in 'sections'. The parsing process is basically: * Use appropriate Kamaelia.Device.DVB component(s) to receive and demultiplex and appropriate PID containing table(s) from a broadcast multiplex (transport stream) * Use Kamaelia.Device.DVB.Parse.ReassemblePSITables to extract the table sections from a stream of TS packets * Feed these raw sections to an appropriate table parsing component to parse the table. These components typically convert the table from its raw binary form to python dictionary based data structures containing the same information, but parsed into a more convenient form. For a detailed explanation of the purposes and details of tables, see: - ISO/IEC 13818-1 (aka "MPEG: Systems") "GENERIC CODING OF MOVING PICTURES AND ASSOCIATED AUDIO: SYSTEMS" ISO / Motion Picture Experts Grou7p - ETSI EN 300 468 "Digital Video Broadcasting (DVB); Specification for Service Information (SI) in DVB systems" ETSI / EBU (DVB group) """ # RELEASE: MH, MPS
""" ============================================================= Components for parsing PSI data in DVB MPEG Transport Streams ============================================================= DVB MPEG Transport Streams carry, on certain PIDs, tables of data. Some tables contain data explaining the structure of services (channels) being carried, and what PIDs to find their component audio and video streams being carried in. Others carry ancilliary data such as electronic programme guide information and events, or time and date information or the frequencies on which other multiplexes can be found. Tables are delivered in 'sections'. The parsing process is basically: * Use appropriate Kamaelia.Device.DVB component(s) to receive and demultiplex and appropriate PID containing table(s) from a broadcast multiplex (transport stream) * Use Kamaelia.Device.DVB.Parse.ReassemblePSITables to extract the table sections from a stream of TS packets * Feed these raw sections to an appropriate table parsing component to parse the table. These components typically convert the table from its raw binary form to python dictionary based data structures containing the same information, but parsed into a more convenient form. For a detailed explanation of the purposes and details of tables, see: - ISO/IEC 13818-1 (aka "MPEG: Systems") "GENERIC CODING OF MOVING PICTURES AND ASSOCIATED AUDIO: SYSTEMS" ISO / Motion Picture Experts Grou7p - ETSI EN 300 468 "Digital Video Broadcasting (DVB); Specification for Service Information (SI) in DVB systems" ETSI / EBU (DVB group) """
r=float(input())*3.5 pi=float(input())*7.5 a = (pi + r)/(11.0) print("MEDIA = {0:.5f}".format(a))
r = float(input()) * 3.5 pi = float(input()) * 7.5 a = (pi + r) / 11.0 print('MEDIA = {0:.5f}'.format(a))
class QueryDictMixin(object): """ Simple query based dictionary mixin. This extends the standard Python dictionary objects to allow you to easily query nested dictionaries for a particular value. This is especially useful if you don't know the structure beforehand allowing you to dynamically receive the path and data set from an unknown source. """ path_separator = "/" def _get(self, data_dictionary, split_path, default): """ Args: data_dictionary: split_path: default: Returns: """ key = split_path.pop(0) # Base termination recursion condition if we are at the end of # the list of keys. Return the value or the default value if len(split_path) == 0: return dict.get(data_dictionary, key, default) # If the key doesn't exist then just return the default value if key not in data_dictionary.keys(): return default new_target = data_dictionary[key] # Finally before we call ourselves again, make sure the value is # of base type dictionary. If not then return the default as # we can be at the end of the path yet and we obviously are # being called on a non nested dictionary object. if not isinstance(new_target, dict): return default # If we get here start recursion on the new target return self._get(new_target, split_path, default) def get(self, key, default=None): """ This function replaces python's default "get" definition with a custom function. The function takes the same arguments as before. However, instead of a single key you can pass in a string which contains a list of keys which should be followed to get the value from a nested set of dictionaries. Each key needs to be separated with a '/' e.g. 'first/second'. So this would related to '2' from the following data structure: { "first": { "second": 2 }, "third": {} } Args: key (str): The single key or set of keys to the value that you would like to get from your dictionary. If the key is more than a single value, then each value needs to be separated by '/'. default: Returns: """ return self._get(self, key.split(self.path_separator), default) QueryDict = type('QueryDict', (QueryDictMixin, dict), {})
class Querydictmixin(object): """ Simple query based dictionary mixin. This extends the standard Python dictionary objects to allow you to easily query nested dictionaries for a particular value. This is especially useful if you don't know the structure beforehand allowing you to dynamically receive the path and data set from an unknown source. """ path_separator = '/' def _get(self, data_dictionary, split_path, default): """ Args: data_dictionary: split_path: default: Returns: """ key = split_path.pop(0) if len(split_path) == 0: return dict.get(data_dictionary, key, default) if key not in data_dictionary.keys(): return default new_target = data_dictionary[key] if not isinstance(new_target, dict): return default return self._get(new_target, split_path, default) def get(self, key, default=None): """ This function replaces python's default "get" definition with a custom function. The function takes the same arguments as before. However, instead of a single key you can pass in a string which contains a list of keys which should be followed to get the value from a nested set of dictionaries. Each key needs to be separated with a '/' e.g. 'first/second'. So this would related to '2' from the following data structure: { "first": { "second": 2 }, "third": {} } Args: key (str): The single key or set of keys to the value that you would like to get from your dictionary. If the key is more than a single value, then each value needs to be separated by '/'. default: Returns: """ return self._get(self, key.split(self.path_separator), default) query_dict = type('QueryDict', (QueryDictMixin, dict), {})
def view(): return dict() def list(): return dict(hunt_id=request.vars.hunt_id) def approve_request(): """Approves a request from another user.""" hunt_id = request.vars.hunt_id user = request.vars.user if hunt_id and user: return dict(hunt_id=hunt_id, user=user) def hunts_clients(): hunt_id = request.vars.hunt_id if hunt_id: return dict(hunt_id=hunt_id) def describe_client(): client_id = request.vars.client_id hunt_id = request.vars.hunt_id if client_id and hunt_id: return dict(client_id=client_id, hunt_id=hunt_id)
def view(): return dict() def list(): return dict(hunt_id=request.vars.hunt_id) def approve_request(): """Approves a request from another user.""" hunt_id = request.vars.hunt_id user = request.vars.user if hunt_id and user: return dict(hunt_id=hunt_id, user=user) def hunts_clients(): hunt_id = request.vars.hunt_id if hunt_id: return dict(hunt_id=hunt_id) def describe_client(): client_id = request.vars.client_id hunt_id = request.vars.hunt_id if client_id and hunt_id: return dict(client_id=client_id, hunt_id=hunt_id)
# coding: utf-8 """ Classification of Sugar exit codes. These are intended to augment universal exit codes (found in Python's `os` module with the `EX_` prefix or in `sysexits.h`). """ # The os.EX_* exit codes are Unix only so in the interest of cross-platform # compatiblility define them explicitly here. # # These constants are documented here: # https://docs.python.org/2/library/os.html#os.EX_OK EX_OK = 0 # successful termination EX_GENERIC = 1 # Generic EX_USAGE = 64 # command line usage error EX_NOUSER = 67 # addressee unknown EX_UNAVAILABLE = 69 # service unavailable EX_SOFTWARE = 70 # internal software error EX_CANTCREAT = 73 # can't create (user) output file EX_TEMPFAIL = 75 # temp failure; user is invited to retry EX_NOPERM = 77 # permission denied
""" Classification of Sugar exit codes. These are intended to augment universal exit codes (found in Python's `os` module with the `EX_` prefix or in `sysexits.h`). """ ex_ok = 0 ex_generic = 1 ex_usage = 64 ex_nouser = 67 ex_unavailable = 69 ex_software = 70 ex_cantcreat = 73 ex_tempfail = 75 ex_noperm = 77
# Copyright (c) Vera Galstyan Jan 25,2018 people = ['jen', 'vera','phil','ofa','sarah'] languages = { 'jen': 'c', 'sarah':'python', 'edward':'ruby', 'phil':'python' } for name in people: if name in languages.keys(): print(name + " , thank you for your particiaption") else: print(name + " , you should take a poll")
people = ['jen', 'vera', 'phil', 'ofa', 'sarah'] languages = {'jen': 'c', 'sarah': 'python', 'edward': 'ruby', 'phil': 'python'} for name in people: if name in languages.keys(): print(name + ' , thank you for your particiaption') else: print(name + ' , you should take a poll')
opt_use_idf = "use_idf" opt_idf_boosting_threshold = "idf_boosting_threshold" opt_intensify_factor_m = "intensify_factor_m" opt_intensify_factor_p = "intensify_factor_p" opt_ceiling = "ceiling" opt_multiprocessing = "multiprocessing"
opt_use_idf = 'use_idf' opt_idf_boosting_threshold = 'idf_boosting_threshold' opt_intensify_factor_m = 'intensify_factor_m' opt_intensify_factor_p = 'intensify_factor_p' opt_ceiling = 'ceiling' opt_multiprocessing = 'multiprocessing'
# AUTOGENERATED BY NBDEV! DO NOT EDIT! __all__ = ["index", "modules", "custom_doc_links", "git_url"] index = {"product_moment_corr": "encoding.ipynb", "get_model_plus_scores": "encoding.ipynb", "BlockMultiOutput": "encoding.ipynb", "preprocess_bold_fmri": "preprocessing.ipynb", "get_remove_idx": "preprocessing.ipynb", "make_lagged_stimulus": "preprocessing.ipynb", "generate_lagged_stimulus": "preprocessing.ipynb", "make_X_Y": "preprocessing.ipynb", "create_stim_filename_from_args": "process_bids.ipynb", "create_output_filename_from_args": "process_bids.ipynb", "create_metadata_filename_from_args": "process_bids.ipynb", "create_bold_glob_from_args": "process_bids.ipynb", "run": "process_bids.ipynb", "get_func_bold_directory": "process_bids.ipynb", "process_bids_subject": "process_bids.ipynb", "run_model_for_subject": "process_bids.ipynb"} modules = ["encoding.py", "preprocessing.py", "process_bids.py"] doc_url = "https://mjboos.github.io/voxelwiseencoding/" git_url = "https://github.com/mjboos/voxelwiseencoding/tree/master/" def custom_doc_links(name): return None
__all__ = ['index', 'modules', 'custom_doc_links', 'git_url'] index = {'product_moment_corr': 'encoding.ipynb', 'get_model_plus_scores': 'encoding.ipynb', 'BlockMultiOutput': 'encoding.ipynb', 'preprocess_bold_fmri': 'preprocessing.ipynb', 'get_remove_idx': 'preprocessing.ipynb', 'make_lagged_stimulus': 'preprocessing.ipynb', 'generate_lagged_stimulus': 'preprocessing.ipynb', 'make_X_Y': 'preprocessing.ipynb', 'create_stim_filename_from_args': 'process_bids.ipynb', 'create_output_filename_from_args': 'process_bids.ipynb', 'create_metadata_filename_from_args': 'process_bids.ipynb', 'create_bold_glob_from_args': 'process_bids.ipynb', 'run': 'process_bids.ipynb', 'get_func_bold_directory': 'process_bids.ipynb', 'process_bids_subject': 'process_bids.ipynb', 'run_model_for_subject': 'process_bids.ipynb'} modules = ['encoding.py', 'preprocessing.py', 'process_bids.py'] doc_url = 'https://mjboos.github.io/voxelwiseencoding/' git_url = 'https://github.com/mjboos/voxelwiseencoding/tree/master/' def custom_doc_links(name): return None
class Cella: def __init__(self, init_str): cols = {82: "red", 66: "blue", 71: "green", 32: "white"} self.x = init_str['userX'] self.y = init_str['userY'] self.color = cols[init_str['userVal']] def __repr__(self) -> str: str_ = f"x: {self.x}\ty: {self.y}\tcolor: {self.color}" return str_ def to_csv(self) -> str: str_ = f"{self.x},{self.y},{self.color}\n" return str_
class Cella: def __init__(self, init_str): cols = {82: 'red', 66: 'blue', 71: 'green', 32: 'white'} self.x = init_str['userX'] self.y = init_str['userY'] self.color = cols[init_str['userVal']] def __repr__(self) -> str: str_ = f'x: {self.x}\ty: {self.y}\tcolor: {self.color}' return str_ def to_csv(self) -> str: str_ = f'{self.x},{self.y},{self.color}\n' return str_
def reduce_namedtuples_for_httpRequest(tpdict): kvfinal = {} for key in tpdict._fields: _val = getattr(tpdict, (key)) if isinstance(_val, list): kvfinal.update({key: _val.pop()}) else: kvfinal.update({key: _val}) return tpdict._replace(**kvfinal)
def reduce_namedtuples_for_http_request(tpdict): kvfinal = {} for key in tpdict._fields: _val = getattr(tpdict, key) if isinstance(_val, list): kvfinal.update({key: _val.pop()}) else: kvfinal.update({key: _val}) return tpdict._replace(**kvfinal)
ROW_LIMIT = 5000 SUPERSET_WEBSERVER_PORT = 8088 SUPERSET_WEBSERVER_TIMEOUT = 60 SECRET_KEY = "XPivhoGODD" SQLALCHEMY_DATABASE_URI = "sqlite:////superset/superset.db" WTF_CSRF_ENABLED = True
row_limit = 5000 superset_webserver_port = 8088 superset_webserver_timeout = 60 secret_key = 'XPivhoGODD' sqlalchemy_database_uri = 'sqlite:////superset/superset.db' wtf_csrf_enabled = True
def memorize(func): memo = {} def helper(x): if not (x in memo): memo[x] = func(x) return memo[x] return helper def tripnach(n): if n == 1: return 0 elif n == 2: return 0 elif n == 3: return 1 else: return tripnach(n-1) + tripnach(n-2) + tripnach(n-3) def main(): n = int(input()) func = memorize(tripnach) print(func(n)) if __name__=='__main__': main()
def memorize(func): memo = {} def helper(x): if not x in memo: memo[x] = func(x) return memo[x] return helper def tripnach(n): if n == 1: return 0 elif n == 2: return 0 elif n == 3: return 1 else: return tripnach(n - 1) + tripnach(n - 2) + tripnach(n - 3) def main(): n = int(input()) func = memorize(tripnach) print(func(n)) if __name__ == '__main__': main()
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Time-stamp: "2017-08-03 15:29:09 jlenain" """ Extras functions for FLaapLUC @author Jean-Philippe Lenain <mailto:jlenain@in2p3.fr> """ def met2mjd(met): """ Converts Mission Elapsed Time (MET, in seconds) in Modified Julian Day. Cf. http://fermi.gsfc.nasa.gov/ssc/data/analysis/documentation/Cicerone/Cicerone_Data/Time_in_ScienceTools.html to see how the time is handled in the Fermi Science Tools. Input: time in MET (s) Output: time in MJD (fraction of a day) """ MJDREFI = 51910.0 MJDREFF = 7.428703703703703e-4 return (MJDREFI + MJDREFF + met / 24. / 60. / 60.) def mjd2met(mjd): """ Converts Modified Julian Day in Mission Elapsed Time (MET, in seconds). Cf. http://fermi.gsfc.nasa.gov/ssc/data/analysis/documentation/Cicerone/Cicerone_Data/Time_in_ScienceTools.html to see how the time is handled in the Fermi Science Tools. Input: time in MJD (fraction of a day) Output: time in MET (s) """ MJDREFI = 51910.0 MJDREFF = 7.428703703703703e-4 return (24. * 60. * 60 * (mjd - MJDREFI - MJDREFF)) def unixtime2mjd(unixtime): """ Converts a UNIX time stamp in Modified Julian Day Input: time in UNIX seconds Output: time in MJD (fraction of a day) """ # unixtime gives seconds passed since "The Epoch": 1.1.1970 00:00 # MJD at that time was 40587.0 result = 40587.0 + unixtime / (24. * 60. * 60.) return result def jd2gd(x): """ Compute gregorian date out of julian date input: julian date x (float) return value: string of gregorian date based on/copied from script jd2dg.py from Enno Middelberg http://www.atnf.csiro.au/people/Enno.Middelberg/python/jd2gd.py task to convert a list of julian dates to gregorian dates description at http://mathforum.org/library/drmath/view/51907.html Original algorithm in Jean Meeus, "Astronomical Formulae for Calculators" """ jd = float(x) jd = jd + 0.5 Z = int(jd) F = jd - Z alpha = int((Z - 1867216.25) / 36524.25) A = Z + 1 + alpha - int(alpha / 4) B = A + 1524 C = int((B - 122.1) / 365.25) D = int(365.25 * C) E = int((B - D) / 30.6001) dd = B - D - int(30.6001 * E) + F if E < 13.5: mm = E - 1 if E > 13.5: mm = E - 13 if mm > 2.5: yyyy = C - 4716 if mm < 2.5: yyyy = C - 4715 daylist = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] daylist2 = [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] h = int((dd - int(dd)) * 24) min = int((((dd - int(dd)) * 24) - h) * 60) sec = 86400 * (dd - int(dd)) - h * 3600 - min * 60 # Now calculate the fractional year. Do we have a leap year? if (yyyy % 4 != 0): days = daylist2 elif (yyyy % 400 == 0): days = daylist2 elif (yyyy % 100 == 0): days = daylist else: days = daylist2 string = "%04d-%02d-%02d %02d:%02d:%04.1f" % (yyyy, mm, dd, h, min, sec) return string def mjd2gd(time): """ Converts Modified Julian Day in Gregorian Date. Under the hood, it calls jd2gd(). """ return jd2gd(time + 2400000.5)
""" Extras functions for FLaapLUC @author Jean-Philippe Lenain <mailto:jlenain@in2p3.fr> """ def met2mjd(met): """ Converts Mission Elapsed Time (MET, in seconds) in Modified Julian Day. Cf. http://fermi.gsfc.nasa.gov/ssc/data/analysis/documentation/Cicerone/Cicerone_Data/Time_in_ScienceTools.html to see how the time is handled in the Fermi Science Tools. Input: time in MET (s) Output: time in MJD (fraction of a day) """ mjdrefi = 51910.0 mjdreff = 0.0007428703703703703 return MJDREFI + MJDREFF + met / 24.0 / 60.0 / 60.0 def mjd2met(mjd): """ Converts Modified Julian Day in Mission Elapsed Time (MET, in seconds). Cf. http://fermi.gsfc.nasa.gov/ssc/data/analysis/documentation/Cicerone/Cicerone_Data/Time_in_ScienceTools.html to see how the time is handled in the Fermi Science Tools. Input: time in MJD (fraction of a day) Output: time in MET (s) """ mjdrefi = 51910.0 mjdreff = 0.0007428703703703703 return 24.0 * 60.0 * 60 * (mjd - MJDREFI - MJDREFF) def unixtime2mjd(unixtime): """ Converts a UNIX time stamp in Modified Julian Day Input: time in UNIX seconds Output: time in MJD (fraction of a day) """ result = 40587.0 + unixtime / (24.0 * 60.0 * 60.0) return result def jd2gd(x): """ Compute gregorian date out of julian date input: julian date x (float) return value: string of gregorian date based on/copied from script jd2dg.py from Enno Middelberg http://www.atnf.csiro.au/people/Enno.Middelberg/python/jd2gd.py task to convert a list of julian dates to gregorian dates description at http://mathforum.org/library/drmath/view/51907.html Original algorithm in Jean Meeus, "Astronomical Formulae for Calculators" """ jd = float(x) jd = jd + 0.5 z = int(jd) f = jd - Z alpha = int((Z - 1867216.25) / 36524.25) a = Z + 1 + alpha - int(alpha / 4) b = A + 1524 c = int((B - 122.1) / 365.25) d = int(365.25 * C) e = int((B - D) / 30.6001) dd = B - D - int(30.6001 * E) + F if E < 13.5: mm = E - 1 if E > 13.5: mm = E - 13 if mm > 2.5: yyyy = C - 4716 if mm < 2.5: yyyy = C - 4715 daylist = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] daylist2 = [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] h = int((dd - int(dd)) * 24) min = int(((dd - int(dd)) * 24 - h) * 60) sec = 86400 * (dd - int(dd)) - h * 3600 - min * 60 if yyyy % 4 != 0: days = daylist2 elif yyyy % 400 == 0: days = daylist2 elif yyyy % 100 == 0: days = daylist else: days = daylist2 string = '%04d-%02d-%02d %02d:%02d:%04.1f' % (yyyy, mm, dd, h, min, sec) return string def mjd2gd(time): """ Converts Modified Julian Day in Gregorian Date. Under the hood, it calls jd2gd(). """ return jd2gd(time + 2400000.5)
Import("env") optimze_flags = [s for s in env.GetProjectOption("system_flags", "").splitlines() if s] linker_flags = [] common_flags = [ "-Wdouble-promotion", "-fsingle-precision-constant", "-fno-exceptions", "-fno-strict-aliasing", "-fstack-usage", "-fno-stack-protector", "-fomit-frame-pointer", "-fno-unwind-tables", "-fno-asynchronous-unwind-tables", "-fno-math-errno", "-fmerge-all-constants" ] if env.GetBuildType() == "release": common_flags.append("-s") common_flags.append("-O3") else: common_flags.append("-O1") env.Append( BUILD_FLAGS=["-std=gnu11"], BUILD_UNFLAGS=["-Og", "-Os"], ASFLAGS=optimze_flags + common_flags, CCFLAGS=linker_flags + optimze_flags + common_flags, LINKFLAGS=linker_flags + optimze_flags + common_flags )
import('env') optimze_flags = [s for s in env.GetProjectOption('system_flags', '').splitlines() if s] linker_flags = [] common_flags = ['-Wdouble-promotion', '-fsingle-precision-constant', '-fno-exceptions', '-fno-strict-aliasing', '-fstack-usage', '-fno-stack-protector', '-fomit-frame-pointer', '-fno-unwind-tables', '-fno-asynchronous-unwind-tables', '-fno-math-errno', '-fmerge-all-constants'] if env.GetBuildType() == 'release': common_flags.append('-s') common_flags.append('-O3') else: common_flags.append('-O1') env.Append(BUILD_FLAGS=['-std=gnu11'], BUILD_UNFLAGS=['-Og', '-Os'], ASFLAGS=optimze_flags + common_flags, CCFLAGS=linker_flags + optimze_flags + common_flags, LINKFLAGS=linker_flags + optimze_flags + common_flags)
#! /usr/bin/env python3 """ This file contains variable and parameter definitions for the task-tool """ # Dict with field names with their sql data type, python data type, # display label and whether they are automatically generated task_fields = {} task_fields['name'] = { 'sql_type': 'TEXT PRIMARY KEY', 'p_type': str, 'label': 'Name of the task', 'auto': False } task_fields['creation'] = { 'sql_type': 'INTEGER', 'p_type': int, 'label': 'Task creation time', 'auto': True } task_fields['start_time'] = { 'sql_type': 'INTEGER', 'p_type': int, 'label': 'Time the task started', 'auto': True } task_fields['finish_time'] = { 'sql_type': 'INTEGER', 'p_type': int, 'label': 'Time the task was completed', 'auto': True } task_fields['priority'] = { 'sql_type': 'INTEGER', 'p_type': int, 'label': 'Task priority from 0 (min) to 10 (max)', 'auto': False } task_fields['difficulty'] = { 'sql_type': 'INTEGER', 'p_type': int, 'label': 'Task difficulty from 0 (min) to 10 (max)', 'auto': False } task_fields['run_time'] = { 'sql_type': 'INTEGER', 'p_type': int, 'label': 'Total time the task has been running', 'auto': True } task_fields['active_periods'] = { 'sql_type': 'TEXT', 'p_type': str, 'label': 'Time periods the task has been running', 'auto': True } task_fields['status'] = { 'sql_type': 'INTEGER', 'p_type': int, 'label': 'Current status of the task', 'auto': True } task_fields['tags'] = { 'sql_type': 'TEXT', 'p_type': str, 'label': 'List of classification tags', 'auto': False } task_fields['description'] = { 'sql_type': 'TEXT', 'p_type': str, 'label': 'Detailed description of the task', 'auto': False } # Map status numbers to actual words status = {} status[0] = {'label': 'new', 'bs_color': 'primary'} status[1] = {'label': 'running', 'bs_color': 'success'} status[2] = {'label': 'paused', 'bs_color': 'muted'} status[3] = {'label': 'waiting', 'bs_color': 'warning'} status[4] = {'label': 'completed', 'bs_color': 'info'} status[5] = {'label': 'unknown', 'bs_color': 'dark'}
""" This file contains variable and parameter definitions for the task-tool """ task_fields = {} task_fields['name'] = {'sql_type': 'TEXT PRIMARY KEY', 'p_type': str, 'label': 'Name of the task', 'auto': False} task_fields['creation'] = {'sql_type': 'INTEGER', 'p_type': int, 'label': 'Task creation time', 'auto': True} task_fields['start_time'] = {'sql_type': 'INTEGER', 'p_type': int, 'label': 'Time the task started', 'auto': True} task_fields['finish_time'] = {'sql_type': 'INTEGER', 'p_type': int, 'label': 'Time the task was completed', 'auto': True} task_fields['priority'] = {'sql_type': 'INTEGER', 'p_type': int, 'label': 'Task priority from 0 (min) to 10 (max)', 'auto': False} task_fields['difficulty'] = {'sql_type': 'INTEGER', 'p_type': int, 'label': 'Task difficulty from 0 (min) to 10 (max)', 'auto': False} task_fields['run_time'] = {'sql_type': 'INTEGER', 'p_type': int, 'label': 'Total time the task has been running', 'auto': True} task_fields['active_periods'] = {'sql_type': 'TEXT', 'p_type': str, 'label': 'Time periods the task has been running', 'auto': True} task_fields['status'] = {'sql_type': 'INTEGER', 'p_type': int, 'label': 'Current status of the task', 'auto': True} task_fields['tags'] = {'sql_type': 'TEXT', 'p_type': str, 'label': 'List of classification tags', 'auto': False} task_fields['description'] = {'sql_type': 'TEXT', 'p_type': str, 'label': 'Detailed description of the task', 'auto': False} status = {} status[0] = {'label': 'new', 'bs_color': 'primary'} status[1] = {'label': 'running', 'bs_color': 'success'} status[2] = {'label': 'paused', 'bs_color': 'muted'} status[3] = {'label': 'waiting', 'bs_color': 'warning'} status[4] = {'label': 'completed', 'bs_color': 'info'} status[5] = {'label': 'unknown', 'bs_color': 'dark'}
# https://open.kattis.com/problems/pet largest = 0 index = 1 for i in range(5): s = sum(map(int, input().split())) if s > largest: largest = s index = i + 1 print('%s %s' % (index, largest))
largest = 0 index = 1 for i in range(5): s = sum(map(int, input().split())) if s > largest: largest = s index = i + 1 print('%s %s' % (index, largest))
# # PySNMP MIB module CTRON-SFPS-PKTMGR-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CTRON-SFPS-PKTMGR-MIB # Produced by pysmi-0.3.4 at Mon Apr 29 18:15:27 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ConstraintsIntersection, SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsUnion") sfpsSwitchSfpsPacket, sfpsCSPPacket, sfpsPktDispatchStats = mibBuilder.importSymbols("CTRON-SFPS-INCLUDE-MIB", "sfpsSwitchSfpsPacket", "sfpsCSPPacket", "sfpsPktDispatchStats") ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup") Counter32, NotificationType, IpAddress, Gauge32, ModuleIdentity, ObjectIdentity, Integer32, TimeTicks, iso, Unsigned32, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "NotificationType", "IpAddress", "Gauge32", "ModuleIdentity", "ObjectIdentity", "Integer32", "TimeTicks", "iso", "Unsigned32", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "Bits") TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString") class SfpsSwitchInstance(Integer32): pass class HexInteger(Integer32): pass sfpsPacketMgrTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1), ) if mibBuilder.loadTexts: sfpsPacketMgrTable.setStatus('mandatory') sfpsPacketMgrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1), ).setIndexNames((0, "CTRON-SFPS-PKTMGR-MIB", "sfpsPacketMgrSwitchID"), (0, "CTRON-SFPS-PKTMGR-MIB", "sfpsPacketMgrPacketType")) if mibBuilder.loadTexts: sfpsPacketMgrEntry.setStatus('mandatory') sfpsPacketMgrSwitchID = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 1), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketMgrSwitchID.setStatus('mandatory') sfpsPacketMgrPacketType = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 2), HexInteger()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketMgrPacketType.setStatus('mandatory') sfpsPacketMgrTotalPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 3), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketMgrTotalPackets.setStatus('mandatory') sfpsPacketMgrPktsUsed = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketMgrPktsUsed.setStatus('mandatory') sfpsPacketMgrPktsAvailable = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 5), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketMgrPktsAvailable.setStatus('mandatory') sfpsPacketMgrPktsInUse = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 6), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketMgrPktsInUse.setStatus('mandatory') sfpsPacketMgrNotFound = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 8), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketMgrNotFound.setStatus('mandatory') sfpsPacketMgrTooLarge = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 9), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketMgrTooLarge.setStatus('mandatory') sfpsPacketMgrToCreate = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 10), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsPacketMgrToCreate.setStatus('mandatory') sfpsPacketMgrReInit = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("other", 1), ("reinit", 2)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsPacketMgrReInit.setStatus('mandatory') sfpsPacketListTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2), ) if mibBuilder.loadTexts: sfpsPacketListTable.setStatus('mandatory') sfpsPacketListEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1), ).setIndexNames((0, "CTRON-SFPS-PKTMGR-MIB", "sfpsPacketListPacketType"), (0, "CTRON-SFPS-PKTMGR-MIB", "sfpsPacketListSize")) if mibBuilder.loadTexts: sfpsPacketListEntry.setStatus('mandatory') sfpsPacketListPacketType = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1, 1), HexInteger()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketListPacketType.setStatus('mandatory') sfpsPacketListSize = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1, 2), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketListSize.setStatus('mandatory') sfpsPacketListTotalPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1, 3), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketListTotalPackets.setStatus('mandatory') sfpsPacketListPktsUsed = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketListPktsUsed.setStatus('mandatory') sfpsPacketListPktsLeft = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1, 5), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketListPktsLeft.setStatus('mandatory') sfpsPacketListPktsInUse = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1, 6), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketListPktsInUse.setStatus('mandatory') sfpsPacketListLowWater = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1, 7), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketListLowWater.setStatus('mandatory') sfpsPacketListNotFound = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1, 8), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketListNotFound.setStatus('mandatory') sfpsPacketListStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("enabled", 2), ("disabled", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketListStatus.setStatus('mandatory') sfpsPacketSizeTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 3), ) if mibBuilder.loadTexts: sfpsPacketSizeTable.setStatus('mandatory') sfpsPacketSizeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 3, 1), ).setIndexNames((0, "CTRON-SFPS-PKTMGR-MIB", "sfpsPacketSizeSwitchInstance"), (0, "CTRON-SFPS-PKTMGR-MIB", "sfpsPacketSizeSize")) if mibBuilder.loadTexts: sfpsPacketSizeEntry.setStatus('mandatory') sfpsPacketSizeSwitchInstance = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 3, 1, 1), SfpsSwitchInstance()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketSizeSwitchInstance.setStatus('mandatory') sfpsPacketSizeSize = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 3, 1, 2), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketSizeSize.setStatus('mandatory') sfpsPacketSizePktsUsed = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 3, 1, 3), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketSizePktsUsed.setStatus('mandatory') sfpsPacketSizeNotFound = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 3, 1, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketSizeNotFound.setStatus('mandatory') sfpsPacketQTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 4), ) if mibBuilder.loadTexts: sfpsPacketQTable.setStatus('mandatory') sfpsPacketQEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 4, 1), ).setIndexNames((0, "CTRON-SFPS-PKTMGR-MIB", "sfpsPacketQPriorityQ")) if mibBuilder.loadTexts: sfpsPacketQEntry.setStatus('mandatory') sfpsPacketQPriorityQ = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 4, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("low", 1), ("medium", 2), ("high", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketQPriorityQ.setStatus('mandatory') sfpsPacketQTotalPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 4, 1, 2), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketQTotalPackets.setStatus('mandatory') sfpsPacketQCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 4, 1, 3), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketQCurrent.setStatus('mandatory') sfpsPacketQHighWater = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 4, 1, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsPacketQHighWater.setStatus('mandatory') sfpsCSPPacketStatsPacketsSentBad = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 10, 1), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsCSPPacketStatsPacketsSentBad.setStatus('mandatory') sfpsCSPPacketStatsPacketsSentGood = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 10, 2), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsCSPPacketStatsPacketsSentGood.setStatus('mandatory') sfpsCSPPacketStatsPacketsReceivedBad = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 10, 3), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsCSPPacketStatsPacketsReceivedBad.setStatus('mandatory') sfpsCSPPacketStatsPacketsReceivedGood = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 10, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsCSPPacketStatsPacketsReceivedGood.setStatus('mandatory') sfpsPktDispatchStatsVerb = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("other", 1), ("resetAllStats", 2)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsPktDispatchStatsVerb.setStatus('mandatory') numHPMInvalidFrameTypeDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 2), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numHPMInvalidFrameTypeDrops.setStatus('mandatory') numHPMFilterMgtPortDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 3), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numHPMFilterMgtPortDrops.setStatus('mandatory') numHPMPhysToLogPortDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numHPMPhysToLogPortDrops.setStatus('mandatory') numHPMNullSFPSPktDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 5), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numHPMNullSFPSPktDrops.setStatus('mandatory') numHPM81fdThrottleDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 6), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numHPM81fdThrottleDrops.setStatus('mandatory') numHPM81ffThrottleDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 7), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numHPM81ffThrottleDrops.setStatus('mandatory') numHPMPhysStandbyMaskDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 8), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numHPMPhysStandbyMaskDrops.setStatus('mandatory') numBSInvSrcPortDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 9), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBSInvSrcPortDrops.setStatus('mandatory') numBSSourceBlockDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 10), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBSSourceBlockDrops.setStatus('mandatory') numBSViolationDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 11), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBSViolationDrops.setStatus('mandatory') numBSUnknownPortDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 12), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBSUnknownPortDrops.setStatus('mandatory') numBSStandbyPortDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 13), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBSStandbyPortDrops.setStatus('mandatory') numBSFabricNghbrPortDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 14), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBSFabricNghbrPortDrops.setStatus('mandatory') numBSGoingToAccessPortDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 15), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBSGoingToAccessPortDrops.setStatus('mandatory') numBSInvPortTypeDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 16), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBSInvPortTypeDrops.setStatus('mandatory') numBSNullCallDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 17), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBSNullCallDrops.setStatus('mandatory') numBSNullBottomCPDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 18), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBSNullBottomCPDrops.setStatus('mandatory') numBSInvCSPTypeDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 19), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBSInvCSPTypeDrops.setStatus('mandatory') numBSNonHello81fdDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 20), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBSNonHello81fdDrops.setStatus('mandatory') numBSCSPCtrlDisableDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 21), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBSCSPCtrlDisableDrops.setStatus('mandatory') numBSCSPCtrlIndexDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 22), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBSCSPCtrlIndexDrops.setStatus('mandatory') numBCPNullCallDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 23), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBCPNullCallDrops.setStatus('mandatory') numBCPCPFaultedDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 24), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBCPCPFaultedDrops.setStatus('mandatory') numBCPGleanFailDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 25), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBCPGleanFailDrops.setStatus('mandatory') numBCPCPHaltedDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 26), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBCPCPHaltedDrops.setStatus('mandatory') numBCPSwitchedBCADrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 27), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBCPSwitchedBCADrops.setStatus('mandatory') numBCPCallNotAcceptedDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 28), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numBCPCallNotAcceptedDrops.setStatus('mandatory') numHPM81fdNullPktDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 29), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numHPM81fdNullPktDrops.setStatus('mandatory') numHPM81fdHelloNullPktDrops = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 30), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: numHPM81fdHelloNullPktDrops.setStatus('mandatory') mibBuilder.exportSymbols("CTRON-SFPS-PKTMGR-MIB", numBSInvCSPTypeDrops=numBSInvCSPTypeDrops, numBCPGleanFailDrops=numBCPGleanFailDrops, sfpsPacketListPktsLeft=sfpsPacketListPktsLeft, numBCPCPHaltedDrops=numBCPCPHaltedDrops, sfpsPacketListNotFound=sfpsPacketListNotFound, sfpsPacketSizeSize=sfpsPacketSizeSize, numBCPCPFaultedDrops=numBCPCPFaultedDrops, numBSNullBottomCPDrops=numBSNullBottomCPDrops, sfpsPacketMgrPacketType=sfpsPacketMgrPacketType, sfpsPacketListTotalPackets=sfpsPacketListTotalPackets, sfpsCSPPacketStatsPacketsSentGood=sfpsCSPPacketStatsPacketsSentGood, sfpsPacketMgrSwitchID=sfpsPacketMgrSwitchID, numBCPCallNotAcceptedDrops=numBCPCallNotAcceptedDrops, sfpsPacketQPriorityQ=sfpsPacketQPriorityQ, sfpsPacketMgrPktsAvailable=sfpsPacketMgrPktsAvailable, numHPMInvalidFrameTypeDrops=numHPMInvalidFrameTypeDrops, sfpsPacketSizeSwitchInstance=sfpsPacketSizeSwitchInstance, numBSFabricNghbrPortDrops=numBSFabricNghbrPortDrops, sfpsPacketSizeTable=sfpsPacketSizeTable, numHPM81fdHelloNullPktDrops=numHPM81fdHelloNullPktDrops, sfpsPacketListSize=sfpsPacketListSize, numBSGoingToAccessPortDrops=numBSGoingToAccessPortDrops, numHPMFilterMgtPortDrops=numHPMFilterMgtPortDrops, sfpsPacketMgrEntry=sfpsPacketMgrEntry, sfpsCSPPacketStatsPacketsSentBad=sfpsCSPPacketStatsPacketsSentBad, numBSCSPCtrlDisableDrops=numBSCSPCtrlDisableDrops, sfpsPacketMgrTooLarge=sfpsPacketMgrTooLarge, numHPM81fdThrottleDrops=numHPM81fdThrottleDrops, numBSNullCallDrops=numBSNullCallDrops, numBSInvSrcPortDrops=numBSInvSrcPortDrops, sfpsPacketMgrToCreate=sfpsPacketMgrToCreate, SfpsSwitchInstance=SfpsSwitchInstance, sfpsPacketListLowWater=sfpsPacketListLowWater, numBSViolationDrops=numBSViolationDrops, sfpsPacketMgrPktsUsed=sfpsPacketMgrPktsUsed, numHPM81ffThrottleDrops=numHPM81ffThrottleDrops, numBSUnknownPortDrops=numBSUnknownPortDrops, sfpsPacketQHighWater=sfpsPacketQHighWater, sfpsCSPPacketStatsPacketsReceivedGood=sfpsCSPPacketStatsPacketsReceivedGood, sfpsPacketMgrTable=sfpsPacketMgrTable, sfpsPacketListEntry=sfpsPacketListEntry, sfpsPacketQTable=sfpsPacketQTable, sfpsPacketQTotalPackets=sfpsPacketQTotalPackets, sfpsPktDispatchStatsVerb=sfpsPktDispatchStatsVerb, numHPMPhysToLogPortDrops=numHPMPhysToLogPortDrops, numBSSourceBlockDrops=numBSSourceBlockDrops, sfpsPacketSizePktsUsed=sfpsPacketSizePktsUsed, numBSCSPCtrlIndexDrops=numBSCSPCtrlIndexDrops, sfpsPacketListTable=sfpsPacketListTable, sfpsPacketQCurrent=sfpsPacketQCurrent, numHPMPhysStandbyMaskDrops=numHPMPhysStandbyMaskDrops, numBSInvPortTypeDrops=numBSInvPortTypeDrops, numBSNonHello81fdDrops=numBSNonHello81fdDrops, numBCPSwitchedBCADrops=numBCPSwitchedBCADrops, sfpsPacketMgrPktsInUse=sfpsPacketMgrPktsInUse, sfpsPacketListPacketType=sfpsPacketListPacketType, sfpsCSPPacketStatsPacketsReceivedBad=sfpsCSPPacketStatsPacketsReceivedBad, numBSStandbyPortDrops=numBSStandbyPortDrops, sfpsPacketMgrTotalPackets=sfpsPacketMgrTotalPackets, sfpsPacketListStatus=sfpsPacketListStatus, sfpsPacketMgrReInit=sfpsPacketMgrReInit, sfpsPacketListPktsInUse=sfpsPacketListPktsInUse, sfpsPacketQEntry=sfpsPacketQEntry, HexInteger=HexInteger, sfpsPacketSizeNotFound=sfpsPacketSizeNotFound, numHPMNullSFPSPktDrops=numHPMNullSFPSPktDrops, numBCPNullCallDrops=numBCPNullCallDrops, sfpsPacketMgrNotFound=sfpsPacketMgrNotFound, numHPM81fdNullPktDrops=numHPM81fdNullPktDrops, sfpsPacketListPktsUsed=sfpsPacketListPktsUsed, sfpsPacketSizeEntry=sfpsPacketSizeEntry)
(object_identifier, octet_string, integer) = mibBuilder.importSymbols('ASN1', 'ObjectIdentifier', 'OctetString', 'Integer') (named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues') (constraints_intersection, single_value_constraint, value_range_constraint, value_size_constraint, constraints_union) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ConstraintsIntersection', 'SingleValueConstraint', 'ValueRangeConstraint', 'ValueSizeConstraint', 'ConstraintsUnion') (sfps_switch_sfps_packet, sfps_csp_packet, sfps_pkt_dispatch_stats) = mibBuilder.importSymbols('CTRON-SFPS-INCLUDE-MIB', 'sfpsSwitchSfpsPacket', 'sfpsCSPPacket', 'sfpsPktDispatchStats') (module_compliance, notification_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ModuleCompliance', 'NotificationGroup') (counter32, notification_type, ip_address, gauge32, module_identity, object_identity, integer32, time_ticks, iso, unsigned32, mib_identifier, mib_scalar, mib_table, mib_table_row, mib_table_column, counter64, bits) = mibBuilder.importSymbols('SNMPv2-SMI', 'Counter32', 'NotificationType', 'IpAddress', 'Gauge32', 'ModuleIdentity', 'ObjectIdentity', 'Integer32', 'TimeTicks', 'iso', 'Unsigned32', 'MibIdentifier', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'Counter64', 'Bits') (textual_convention, display_string) = mibBuilder.importSymbols('SNMPv2-TC', 'TextualConvention', 'DisplayString') class Sfpsswitchinstance(Integer32): pass class Hexinteger(Integer32): pass sfps_packet_mgr_table = mib_table((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1)) if mibBuilder.loadTexts: sfpsPacketMgrTable.setStatus('mandatory') sfps_packet_mgr_entry = mib_table_row((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1)).setIndexNames((0, 'CTRON-SFPS-PKTMGR-MIB', 'sfpsPacketMgrSwitchID'), (0, 'CTRON-SFPS-PKTMGR-MIB', 'sfpsPacketMgrPacketType')) if mibBuilder.loadTexts: sfpsPacketMgrEntry.setStatus('mandatory') sfps_packet_mgr_switch_id = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 1), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketMgrSwitchID.setStatus('mandatory') sfps_packet_mgr_packet_type = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 2), hex_integer()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketMgrPacketType.setStatus('mandatory') sfps_packet_mgr_total_packets = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 3), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketMgrTotalPackets.setStatus('mandatory') sfps_packet_mgr_pkts_used = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 4), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketMgrPktsUsed.setStatus('mandatory') sfps_packet_mgr_pkts_available = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 5), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketMgrPktsAvailable.setStatus('mandatory') sfps_packet_mgr_pkts_in_use = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 6), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketMgrPktsInUse.setStatus('mandatory') sfps_packet_mgr_not_found = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 8), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketMgrNotFound.setStatus('mandatory') sfps_packet_mgr_too_large = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 9), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketMgrTooLarge.setStatus('mandatory') sfps_packet_mgr_to_create = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 10), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsPacketMgrToCreate.setStatus('mandatory') sfps_packet_mgr_re_init = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 1, 1, 11), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('other', 1), ('reinit', 2)))).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsPacketMgrReInit.setStatus('mandatory') sfps_packet_list_table = mib_table((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2)) if mibBuilder.loadTexts: sfpsPacketListTable.setStatus('mandatory') sfps_packet_list_entry = mib_table_row((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1)).setIndexNames((0, 'CTRON-SFPS-PKTMGR-MIB', 'sfpsPacketListPacketType'), (0, 'CTRON-SFPS-PKTMGR-MIB', 'sfpsPacketListSize')) if mibBuilder.loadTexts: sfpsPacketListEntry.setStatus('mandatory') sfps_packet_list_packet_type = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1, 1), hex_integer()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketListPacketType.setStatus('mandatory') sfps_packet_list_size = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1, 2), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketListSize.setStatus('mandatory') sfps_packet_list_total_packets = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1, 3), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketListTotalPackets.setStatus('mandatory') sfps_packet_list_pkts_used = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1, 4), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketListPktsUsed.setStatus('mandatory') sfps_packet_list_pkts_left = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1, 5), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketListPktsLeft.setStatus('mandatory') sfps_packet_list_pkts_in_use = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1, 6), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketListPktsInUse.setStatus('mandatory') sfps_packet_list_low_water = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1, 7), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketListLowWater.setStatus('mandatory') sfps_packet_list_not_found = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1, 8), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketListNotFound.setStatus('mandatory') sfps_packet_list_status = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 2, 1, 9), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3))).clone(namedValues=named_values(('other', 1), ('enabled', 2), ('disabled', 3)))).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketListStatus.setStatus('mandatory') sfps_packet_size_table = mib_table((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 3)) if mibBuilder.loadTexts: sfpsPacketSizeTable.setStatus('mandatory') sfps_packet_size_entry = mib_table_row((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 3, 1)).setIndexNames((0, 'CTRON-SFPS-PKTMGR-MIB', 'sfpsPacketSizeSwitchInstance'), (0, 'CTRON-SFPS-PKTMGR-MIB', 'sfpsPacketSizeSize')) if mibBuilder.loadTexts: sfpsPacketSizeEntry.setStatus('mandatory') sfps_packet_size_switch_instance = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 3, 1, 1), sfps_switch_instance()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketSizeSwitchInstance.setStatus('mandatory') sfps_packet_size_size = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 3, 1, 2), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketSizeSize.setStatus('mandatory') sfps_packet_size_pkts_used = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 3, 1, 3), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketSizePktsUsed.setStatus('mandatory') sfps_packet_size_not_found = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 3, 1, 4), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketSizeNotFound.setStatus('mandatory') sfps_packet_q_table = mib_table((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 4)) if mibBuilder.loadTexts: sfpsPacketQTable.setStatus('mandatory') sfps_packet_q_entry = mib_table_row((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 4, 1)).setIndexNames((0, 'CTRON-SFPS-PKTMGR-MIB', 'sfpsPacketQPriorityQ')) if mibBuilder.loadTexts: sfpsPacketQEntry.setStatus('mandatory') sfps_packet_q_priority_q = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 4, 1, 1), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3))).clone(namedValues=named_values(('low', 1), ('medium', 2), ('high', 3)))).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketQPriorityQ.setStatus('mandatory') sfps_packet_q_total_packets = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 4, 1, 2), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketQTotalPackets.setStatus('mandatory') sfps_packet_q_current = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 4, 1, 3), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketQCurrent.setStatus('mandatory') sfps_packet_q_high_water = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 4, 1, 4), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsPacketQHighWater.setStatus('mandatory') sfps_csp_packet_stats_packets_sent_bad = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 10, 1), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsCSPPacketStatsPacketsSentBad.setStatus('mandatory') sfps_csp_packet_stats_packets_sent_good = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 10, 2), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsCSPPacketStatsPacketsSentGood.setStatus('mandatory') sfps_csp_packet_stats_packets_received_bad = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 10, 3), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsCSPPacketStatsPacketsReceivedBad.setStatus('mandatory') sfps_csp_packet_stats_packets_received_good = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 10, 4), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsCSPPacketStatsPacketsReceivedGood.setStatus('mandatory') sfps_pkt_dispatch_stats_verb = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 1), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('other', 1), ('resetAllStats', 2)))).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsPktDispatchStatsVerb.setStatus('mandatory') num_hpm_invalid_frame_type_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 2), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numHPMInvalidFrameTypeDrops.setStatus('mandatory') num_hpm_filter_mgt_port_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 3), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numHPMFilterMgtPortDrops.setStatus('mandatory') num_hpm_phys_to_log_port_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 4), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numHPMPhysToLogPortDrops.setStatus('mandatory') num_hpm_null_sfps_pkt_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 5), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numHPMNullSFPSPktDrops.setStatus('mandatory') num_hpm81fd_throttle_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 6), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numHPM81fdThrottleDrops.setStatus('mandatory') num_hpm81ff_throttle_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 7), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numHPM81ffThrottleDrops.setStatus('mandatory') num_hpm_phys_standby_mask_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 8), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numHPMPhysStandbyMaskDrops.setStatus('mandatory') num_bs_inv_src_port_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 9), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBSInvSrcPortDrops.setStatus('mandatory') num_bs_source_block_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 10), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBSSourceBlockDrops.setStatus('mandatory') num_bs_violation_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 11), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBSViolationDrops.setStatus('mandatory') num_bs_unknown_port_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 12), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBSUnknownPortDrops.setStatus('mandatory') num_bs_standby_port_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 13), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBSStandbyPortDrops.setStatus('mandatory') num_bs_fabric_nghbr_port_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 14), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBSFabricNghbrPortDrops.setStatus('mandatory') num_bs_going_to_access_port_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 15), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBSGoingToAccessPortDrops.setStatus('mandatory') num_bs_inv_port_type_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 16), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBSInvPortTypeDrops.setStatus('mandatory') num_bs_null_call_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 17), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBSNullCallDrops.setStatus('mandatory') num_bs_null_bottom_cp_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 18), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBSNullBottomCPDrops.setStatus('mandatory') num_bs_inv_csp_type_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 19), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBSInvCSPTypeDrops.setStatus('mandatory') num_bs_non_hello81fd_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 20), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBSNonHello81fdDrops.setStatus('mandatory') num_bscsp_ctrl_disable_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 21), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBSCSPCtrlDisableDrops.setStatus('mandatory') num_bscsp_ctrl_index_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 22), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBSCSPCtrlIndexDrops.setStatus('mandatory') num_bcp_null_call_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 23), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBCPNullCallDrops.setStatus('mandatory') num_bcpcp_faulted_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 24), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBCPCPFaultedDrops.setStatus('mandatory') num_bcp_glean_fail_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 25), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBCPGleanFailDrops.setStatus('mandatory') num_bcpcp_halted_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 26), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBCPCPHaltedDrops.setStatus('mandatory') num_bcp_switched_bca_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 27), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBCPSwitchedBCADrops.setStatus('mandatory') num_bcp_call_not_accepted_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 28), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numBCPCallNotAcceptedDrops.setStatus('mandatory') num_hpm81fd_null_pkt_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 29), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numHPM81fdNullPktDrops.setStatus('mandatory') num_hpm81fd_hello_null_pkt_drops = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 7, 5, 30), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: numHPM81fdHelloNullPktDrops.setStatus('mandatory') mibBuilder.exportSymbols('CTRON-SFPS-PKTMGR-MIB', numBSInvCSPTypeDrops=numBSInvCSPTypeDrops, numBCPGleanFailDrops=numBCPGleanFailDrops, sfpsPacketListPktsLeft=sfpsPacketListPktsLeft, numBCPCPHaltedDrops=numBCPCPHaltedDrops, sfpsPacketListNotFound=sfpsPacketListNotFound, sfpsPacketSizeSize=sfpsPacketSizeSize, numBCPCPFaultedDrops=numBCPCPFaultedDrops, numBSNullBottomCPDrops=numBSNullBottomCPDrops, sfpsPacketMgrPacketType=sfpsPacketMgrPacketType, sfpsPacketListTotalPackets=sfpsPacketListTotalPackets, sfpsCSPPacketStatsPacketsSentGood=sfpsCSPPacketStatsPacketsSentGood, sfpsPacketMgrSwitchID=sfpsPacketMgrSwitchID, numBCPCallNotAcceptedDrops=numBCPCallNotAcceptedDrops, sfpsPacketQPriorityQ=sfpsPacketQPriorityQ, sfpsPacketMgrPktsAvailable=sfpsPacketMgrPktsAvailable, numHPMInvalidFrameTypeDrops=numHPMInvalidFrameTypeDrops, sfpsPacketSizeSwitchInstance=sfpsPacketSizeSwitchInstance, numBSFabricNghbrPortDrops=numBSFabricNghbrPortDrops, sfpsPacketSizeTable=sfpsPacketSizeTable, numHPM81fdHelloNullPktDrops=numHPM81fdHelloNullPktDrops, sfpsPacketListSize=sfpsPacketListSize, numBSGoingToAccessPortDrops=numBSGoingToAccessPortDrops, numHPMFilterMgtPortDrops=numHPMFilterMgtPortDrops, sfpsPacketMgrEntry=sfpsPacketMgrEntry, sfpsCSPPacketStatsPacketsSentBad=sfpsCSPPacketStatsPacketsSentBad, numBSCSPCtrlDisableDrops=numBSCSPCtrlDisableDrops, sfpsPacketMgrTooLarge=sfpsPacketMgrTooLarge, numHPM81fdThrottleDrops=numHPM81fdThrottleDrops, numBSNullCallDrops=numBSNullCallDrops, numBSInvSrcPortDrops=numBSInvSrcPortDrops, sfpsPacketMgrToCreate=sfpsPacketMgrToCreate, SfpsSwitchInstance=SfpsSwitchInstance, sfpsPacketListLowWater=sfpsPacketListLowWater, numBSViolationDrops=numBSViolationDrops, sfpsPacketMgrPktsUsed=sfpsPacketMgrPktsUsed, numHPM81ffThrottleDrops=numHPM81ffThrottleDrops, numBSUnknownPortDrops=numBSUnknownPortDrops, sfpsPacketQHighWater=sfpsPacketQHighWater, sfpsCSPPacketStatsPacketsReceivedGood=sfpsCSPPacketStatsPacketsReceivedGood, sfpsPacketMgrTable=sfpsPacketMgrTable, sfpsPacketListEntry=sfpsPacketListEntry, sfpsPacketQTable=sfpsPacketQTable, sfpsPacketQTotalPackets=sfpsPacketQTotalPackets, sfpsPktDispatchStatsVerb=sfpsPktDispatchStatsVerb, numHPMPhysToLogPortDrops=numHPMPhysToLogPortDrops, numBSSourceBlockDrops=numBSSourceBlockDrops, sfpsPacketSizePktsUsed=sfpsPacketSizePktsUsed, numBSCSPCtrlIndexDrops=numBSCSPCtrlIndexDrops, sfpsPacketListTable=sfpsPacketListTable, sfpsPacketQCurrent=sfpsPacketQCurrent, numHPMPhysStandbyMaskDrops=numHPMPhysStandbyMaskDrops, numBSInvPortTypeDrops=numBSInvPortTypeDrops, numBSNonHello81fdDrops=numBSNonHello81fdDrops, numBCPSwitchedBCADrops=numBCPSwitchedBCADrops, sfpsPacketMgrPktsInUse=sfpsPacketMgrPktsInUse, sfpsPacketListPacketType=sfpsPacketListPacketType, sfpsCSPPacketStatsPacketsReceivedBad=sfpsCSPPacketStatsPacketsReceivedBad, numBSStandbyPortDrops=numBSStandbyPortDrops, sfpsPacketMgrTotalPackets=sfpsPacketMgrTotalPackets, sfpsPacketListStatus=sfpsPacketListStatus, sfpsPacketMgrReInit=sfpsPacketMgrReInit, sfpsPacketListPktsInUse=sfpsPacketListPktsInUse, sfpsPacketQEntry=sfpsPacketQEntry, HexInteger=HexInteger, sfpsPacketSizeNotFound=sfpsPacketSizeNotFound, numHPMNullSFPSPktDrops=numHPMNullSFPSPktDrops, numBCPNullCallDrops=numBCPNullCallDrops, sfpsPacketMgrNotFound=sfpsPacketMgrNotFound, numHPM81fdNullPktDrops=numHPM81fdNullPktDrops, sfpsPacketListPktsUsed=sfpsPacketListPktsUsed, sfpsPacketSizeEntry=sfpsPacketSizeEntry)
# _____ _____ _____ _______ _____ # /\ \ /\ \ /\ \ /::\ \ /\ \ # /::\ \ /::\____\ /::\ \ /::::\ \ /::\ \ # /::::\ \ /:::/ / \:::\ \ /::::::\ \ \:::\ \ # /::::::\ \ /:::/ / \:::\ \ /::::::::\ \ \:::\ \ # /:::/\:::\ \ /:::/ / \:::\ \ /:::/~~\:::\ \ \:::\ \ # /:::/ \:::\ \ /:::/ / \:::\ \ /:::/ \:::\ \ \:::\ \ # /:::/ \:::\ \ /:::/ / /::::\ \ /:::/ / \:::\ \ \:::\ \ # /:::/ / \:::\ \ /:::/ / ____ /::::::\ \ /:::/____/ \:::\____\ \:::\ \ # /:::/ / \:::\ \ /:::/ / /\ \ /:::/\:::\ \ |:::| | |:::| | \:::\ \ # /:::/____/ \:::\____\/:::/____/ /::\ \/:::/ \:::\____\|:::|____| |:::|____|_______________\:::\____\ # \:::\ \ \::/ /\:::\ \ \:::\ /:::/ \::/ / \:::\ _\___/:::/ / \::::::::::::::::::/ / # \:::\ \ \/____/ \:::\ \ \:::\/:::/ / \/____/ \:::\ |::| /:::/ / \::::::::::::::::/____/ # \:::\ \ \:::\ \ \::::::/ / \:::\|::|/:::/ / \:::\~~~~\~~~~~~ # \:::\ \ \:::\ \ \::::/____/ \::::::::::/ / \:::\ \ # \:::\ \ \:::\ \ \:::\ \ \::::::::/ / \:::\ \ # \:::\ \ \:::\ \ \:::\ \ \::::::/ / \:::\ \ # \:::\ \ \:::\ \ \:::\ \ \::::/____/ \:::\ \ # \:::\____\ \:::\____\ \:::\____\ |::| | \:::\____\ # \::/ / \::/ / \::/ / |::|____| \::/ / # \/____/ \/____/ \/____/ ~~ \/____/ # # ~ VERSION = (0,1,2) __version__ = '.'.join(map(str, VERSION))
version = (0, 1, 2) __version__ = '.'.join(map(str, VERSION))
class DbObject(object): def __init__(self, **args): for (column, value) in args.iteritems(): setattr(self, column, value)
class Dbobject(object): def __init__(self, **args): for (column, value) in args.iteritems(): setattr(self, column, value)
# -*- coding: utf-8 -*- """ @author: Anil Sen & Beyza Arslan """ font="Roboto" title_font_size=12 #bubbles' colors color_discrete_sequence_bubble=["blue", "red", "green", "magenta", "goldenrod"] #If you want the graphics prepared as html to be opened automatically, type True auto_open=False
""" @author: Anil Sen & Beyza Arslan """ font = 'Roboto' title_font_size = 12 color_discrete_sequence_bubble = ['blue', 'red', 'green', 'magenta', 'goldenrod'] auto_open = False
list1=[1,2,4,5,6] print(list1[0:2]) print(list1[4]) list1[1]=9 print(list1) # adds data in a particular position #updating in list names=['Kiran','Ravi',1996,2001] print(names) print("Value at index 2",names[2]) names[2]=2013 print("New Value at index 2 now" , names[2]) print(names) print(names[:2]) print(names[1:]) print(names[-3:-1]) print(names[:-1])
list1 = [1, 2, 4, 5, 6] print(list1[0:2]) print(list1[4]) list1[1] = 9 print(list1) names = ['Kiran', 'Ravi', 1996, 2001] print(names) print('Value at index 2', names[2]) names[2] = 2013 print('New Value at index 2 now', names[2]) print(names) print(names[:2]) print(names[1:]) print(names[-3:-1]) print(names[:-1])
# creates the salt for security !!change this!! on the config.py file config = { 'secret': 'secret-sauce' }
config = {'secret': 'secret-sauce'}
""" the problem will be to find the longest substring in S that's in A. then replace with B idea1: check if A exist in S by iterate through all chars in s for all char in key in each key tatal number of keys = k total number of characters in s = n average length of keys = key_length # of occurances of keys in s: occurance sort(a) by reverse length: O(klogk) O(n) * O(k) * O(key_length) - O(n) each char in s - O(k) # of keys - O(key_length) #average key_length per key once it is found, it takes O(target key_length) to search through char by char then it takes O(n) to assemble modified s overall O(klogk) + O(n * k * key_length) + O(occurance) * O(n) """ class Solution: """ @param a: The A array @param b: The B array @param s: The S string @return: The answer """ def stringReplace(self, a, b, s): # Write your code here replacements = {a[i]:b[i] for i in range(len(a))} a.sort(key = lambda x: -len(x)) i = 0 while i < len(s): for key in a: if s[i:i + len(key)] == key: s = s[:i] + replacements[key] + s[i + len(key):] i += len(key) - 1 break i += 1 return s s = Solution() A = ["cd","dab","zbc"] B = ["cc","aaa","ddc"] S = "cdab" print(s.stringReplace(A, B, S))
""" the problem will be to find the longest substring in S that's in A. then replace with B idea1: check if A exist in S by iterate through all chars in s for all char in key in each key tatal number of keys = k total number of characters in s = n average length of keys = key_length # of occurances of keys in s: occurance sort(a) by reverse length: O(klogk) O(n) * O(k) * O(key_length) - O(n) each char in s - O(k) # of keys - O(key_length) #average key_length per key once it is found, it takes O(target key_length) to search through char by char then it takes O(n) to assemble modified s overall O(klogk) + O(n * k * key_length) + O(occurance) * O(n) """ class Solution: """ @param a: The A array @param b: The B array @param s: The S string @return: The answer """ def string_replace(self, a, b, s): replacements = {a[i]: b[i] for i in range(len(a))} a.sort(key=lambda x: -len(x)) i = 0 while i < len(s): for key in a: if s[i:i + len(key)] == key: s = s[:i] + replacements[key] + s[i + len(key):] i += len(key) - 1 break i += 1 return s s = solution() a = ['cd', 'dab', 'zbc'] b = ['cc', 'aaa', 'ddc'] s = 'cdab' print(s.stringReplace(A, B, S))
class Solution: def findDisappearedNumbers(self, nums: List[int]) -> List[int]: for i in range(len(nums)): index = abs(nums[i]) - 1 if nums[index] > 0: nums[index] = -nums[index] return [i + 1 for i in range(len(nums)) if nums[i] > 0]
class Solution: def find_disappeared_numbers(self, nums: List[int]) -> List[int]: for i in range(len(nums)): index = abs(nums[i]) - 1 if nums[index] > 0: nums[index] = -nums[index] return [i + 1 for i in range(len(nums)) if nums[i] > 0]
''' This directory contains libraries and code for reading file formats using vendorized dependencies, not necessarily for MS vendor binary files. These vendorized libraries may still require other platform-dependent resources such as installed DLLs or 3rd party programs. Each reader has had it's author's license prepended. '''
""" This directory contains libraries and code for reading file formats using vendorized dependencies, not necessarily for MS vendor binary files. These vendorized libraries may still require other platform-dependent resources such as installed DLLs or 3rd party programs. Each reader has had it's author's license prepended. """
class MyService: def __init__(self, sso_registry): self.sso_registry = sso_registry def handle_request_correctly(self, request, token): if self.sso_registry.is_valid(token): return "hello world" return "please enter your login details" def handle_request_wrong_token(self, request, token): if self.sso_registry.is_valid(None): return "hello world" return "please enter your login details" def handle_request_no_call_to_is_valid(self, request, token): if token: return "hello world" return "please enter your login details" handle_request = handle_request_correctly
class Myservice: def __init__(self, sso_registry): self.sso_registry = sso_registry def handle_request_correctly(self, request, token): if self.sso_registry.is_valid(token): return 'hello world' return 'please enter your login details' def handle_request_wrong_token(self, request, token): if self.sso_registry.is_valid(None): return 'hello world' return 'please enter your login details' def handle_request_no_call_to_is_valid(self, request, token): if token: return 'hello world' return 'please enter your login details' handle_request = handle_request_correctly
class Contract(object): def __init__(self, provider): self._provider = provider self._number = None @property def provider(self): return self._provider.calculate() @property def number(self): return self._number @number.setter def number(self,value): self._number = value
class Contract(object): def __init__(self, provider): self._provider = provider self._number = None @property def provider(self): return self._provider.calculate() @property def number(self): return self._number @number.setter def number(self, value): self._number = value
class OpenTrackingSubstitutionTag(object): """The open tracking substitution tag of an SubscriptionTracking object.""" def __init__(self, open_tracking_substitution_tag=None): """Create a OpenTrackingSubstitutionTag object :param open_tracking_substitution_tag: Allows you to specify a substitution tag that you can insert in the body of your email at a location that you desire. This tag will be replaced by the open tracking pixel. """ self._open_tracking_substitution_tag = None if open_tracking_substitution_tag is not None: self.open_tracking_substitution_tag = \ open_tracking_substitution_tag @property def open_tracking_substitution_tag(self): """Allows you to specify a substitution tag that you can insert in the body of your email at a location that you desire. This tag will be replaced by the open tracking pixel. :rtype: string """ return self._open_tracking_substitution_tag @open_tracking_substitution_tag.setter def open_tracking_substitution_tag(self, value): """Allows you to specify a substitution tag that you can insert in the body of your email at a location that you desire. This tag will be replaced by the open tracking pixel. :param value: Allows you to specify a substitution tag that you can insert in the body of your email at a location that you desire. This tag will be replaced by the open tracking pixel. :type value: string """ self._open_tracking_substitution_tag = value def get(self): """ Get a JSON-ready representation of this OpenTrackingSubstitutionTag. :returns: This OpenTrackingSubstitutionTag, ready for use in a request body. :rtype: string """ return self.open_tracking_substitution_tag
class Opentrackingsubstitutiontag(object): """The open tracking substitution tag of an SubscriptionTracking object.""" def __init__(self, open_tracking_substitution_tag=None): """Create a OpenTrackingSubstitutionTag object :param open_tracking_substitution_tag: Allows you to specify a substitution tag that you can insert in the body of your email at a location that you desire. This tag will be replaced by the open tracking pixel. """ self._open_tracking_substitution_tag = None if open_tracking_substitution_tag is not None: self.open_tracking_substitution_tag = open_tracking_substitution_tag @property def open_tracking_substitution_tag(self): """Allows you to specify a substitution tag that you can insert in the body of your email at a location that you desire. This tag will be replaced by the open tracking pixel. :rtype: string """ return self._open_tracking_substitution_tag @open_tracking_substitution_tag.setter def open_tracking_substitution_tag(self, value): """Allows you to specify a substitution tag that you can insert in the body of your email at a location that you desire. This tag will be replaced by the open tracking pixel. :param value: Allows you to specify a substitution tag that you can insert in the body of your email at a location that you desire. This tag will be replaced by the open tracking pixel. :type value: string """ self._open_tracking_substitution_tag = value def get(self): """ Get a JSON-ready representation of this OpenTrackingSubstitutionTag. :returns: This OpenTrackingSubstitutionTag, ready for use in a request body. :rtype: string """ return self.open_tracking_substitution_tag
three = ['A', 'E', 'I', 'O', 'S', 'a', 'e', 'i', 'o', 's'] for k in range(int(input())): res = 1 for w in input(): if w in three: res *= 3 else: res *= 2 print(res)
three = ['A', 'E', 'I', 'O', 'S', 'a', 'e', 'i', 'o', 's'] for k in range(int(input())): res = 1 for w in input(): if w in three: res *= 3 else: res *= 2 print(res)
def get_lines_from_file(path): with open(path, 'r') as fh: return fh.read().splitlines() def write_lines_to_file(path, lines): """ Takes a list of strings and writes them to <path> with a \n after each element :param path: str: The filepath to write to :param lines: list: The list of strings to write """ with open(path, 'w') as fh: fh.write('\n'.join(lines))
def get_lines_from_file(path): with open(path, 'r') as fh: return fh.read().splitlines() def write_lines_to_file(path, lines): """ Takes a list of strings and writes them to <path> with a after each element :param path: str: The filepath to write to :param lines: list: The list of strings to write """ with open(path, 'w') as fh: fh.write('\n'.join(lines))
# -*- coding: utf-8 -*- def hello(): print("Hello World") return True def add_two_things(a, b): return a+b
def hello(): print('Hello World') return True def add_two_things(a, b): return a + b
# Actually, this solution is to be finished because it is out of time limits. # The time complexity is n^2, which is still rejected by Leetcode. # Date: 2018-08-13. class Solution(object): def threeSum(self, nums): """ :type nums: List[int] :rtype: List[List[int]] """ if len(nums) < 3: return [] # if len(nums) == 3: # if nums[0]+nums[1]+nums[2]==0: # return [nums] # else: # return [] res = [] res_final = [] nums = sorted(nums) for i in range(len(nums)): nums_clean = nums[:i]+nums[i+1:] res += self.twoSum(nums_clean, -nums[i]) # if result: # for item in result: # l = item[0] # r = item[1] # res.append(sorted([nums[i], l, r])) for j in res: j = sorted(j) if j not in res_final: res_final.append(j) return res_final def twoSum(self, nums, target): left = 0 right = len(nums)-1 res_middle = [] while left < right: if nums[left] + nums[right] == target: res_middle.append([-target, nums[left],nums[right]]) left += 1 right -= 1 if nums[left] + nums[right] < target: left += 1 if nums[left] + nums[right] > target: right -= 1 return res_middle
class Solution(object): def three_sum(self, nums): """ :type nums: List[int] :rtype: List[List[int]] """ if len(nums) < 3: return [] res = [] res_final = [] nums = sorted(nums) for i in range(len(nums)): nums_clean = nums[:i] + nums[i + 1:] res += self.twoSum(nums_clean, -nums[i]) for j in res: j = sorted(j) if j not in res_final: res_final.append(j) return res_final def two_sum(self, nums, target): left = 0 right = len(nums) - 1 res_middle = [] while left < right: if nums[left] + nums[right] == target: res_middle.append([-target, nums[left], nums[right]]) left += 1 right -= 1 if nums[left] + nums[right] < target: left += 1 if nums[left] + nums[right] > target: right -= 1 return res_middle
class Occurrence(object): """ An Occurrence is an incarnation of a recurring event for a given date. """ def __init__(self,event,start,end): self.event = event self.start = start self.end = end def __unicode__(self): return "%s to %s" %(self.start, self.end) def __cmp__(self, other): rank = cmp(self.start, other.start) if rank == 0: return cmp(self.end, other.end) return rank
class Occurrence(object): """ An Occurrence is an incarnation of a recurring event for a given date. """ def __init__(self, event, start, end): self.event = event self.start = start self.end = end def __unicode__(self): return '%s to %s' % (self.start, self.end) def __cmp__(self, other): rank = cmp(self.start, other.start) if rank == 0: return cmp(self.end, other.end) return rank
def partition(arr, start, end): pivot = arr[end] tail_index = start for i in range(start, end): if arr[i] < pivot: arr[i], arr[tail_index] = arr[tail_index], arr[i] tail_index += 1 arr[end], arr[tail_index] = arr[tail_index], arr[end] return tail_index def find_kth_min(k, arr): if k < 1 or k > len(arr): print('Invalid Input!') return -1 else: start = 0 end = len(arr)-1 while start <= end: pivot_index = partition(arr, start, end) if pivot_index == (k-1): return arr[pivot_index] elif pivot_index < (k-1): start = pivot_index+1 else: end = pivot_index-1 def find_kth_max(k, arr): if k < 1 or k > len(arr): print('Invalid Input!') return -1 else: start = 0 end = len(arr)-1 while start <= end: pivot_index = partition(arr, start, end) if pivot_index == (len(arr)-k): return arr[pivot_index] elif pivot_index < (len(arr)-k): start = pivot_index+1 else: end = pivot_index-1 if __name__ == '__main__': arr = [10, 2, 7, 4, 9, 6, 3, 8, 1, 5] k = 9 print("K-th MIN : ", find_kth_min(k, arr)) print("K-th MAX : ", find_kth_max(k, arr))
def partition(arr, start, end): pivot = arr[end] tail_index = start for i in range(start, end): if arr[i] < pivot: (arr[i], arr[tail_index]) = (arr[tail_index], arr[i]) tail_index += 1 (arr[end], arr[tail_index]) = (arr[tail_index], arr[end]) return tail_index def find_kth_min(k, arr): if k < 1 or k > len(arr): print('Invalid Input!') return -1 else: start = 0 end = len(arr) - 1 while start <= end: pivot_index = partition(arr, start, end) if pivot_index == k - 1: return arr[pivot_index] elif pivot_index < k - 1: start = pivot_index + 1 else: end = pivot_index - 1 def find_kth_max(k, arr): if k < 1 or k > len(arr): print('Invalid Input!') return -1 else: start = 0 end = len(arr) - 1 while start <= end: pivot_index = partition(arr, start, end) if pivot_index == len(arr) - k: return arr[pivot_index] elif pivot_index < len(arr) - k: start = pivot_index + 1 else: end = pivot_index - 1 if __name__ == '__main__': arr = [10, 2, 7, 4, 9, 6, 3, 8, 1, 5] k = 9 print('K-th MIN : ', find_kth_min(k, arr)) print('K-th MAX : ', find_kth_max(k, arr))
# -*- coding: utf-8 -* def format_table(table, format): if format == "xml": return table2xml(keyphrases_table) elif format == "csv": return table2csv(keyphrases_table) else: raise Exception("Unknown table format: '%s'. " "Please use one of: 'xml', 'csv'." % format) def table2xml(keyphrases_table): res = "<table>\n" for keyphrase in sorted(keyphrases_table.keys()): res += ' <keyphrase value="%s">\n' % keyphrase for text in sorted(keyphrases_table[keyphrase].keys()): res += ' <text name="%s">' % text res += '%.3f' % keyphrases_table[keyphrase][text] res += '</text>\n' res += ' </keyphrase>\n' res += "</table>\n" return res def table2csv(keyphrases_table): def quote(s): return '"' + s.replace('"', "'") + '"' keyphrases = sorted(keyphrases_table.keys()) texts = sorted(keyphrases_table[keyphrases[0]].keys()) res = "," + ",".join(map(quote, keyphrases)) + "\n" # Heading for text in texts: scores = map(lambda score: u"%.3f" % score, [keyphrases_table[keyphrase][text] for keyphrase in keyphrases]) res += (quote(text) + "," + ",".join(scores) + "\n") return res def format_graph(graph, format): if format == "gml": return graph2gml(graph) elif format == "edges": return graph2edges(graph) else: raise Exception("Unknown graph format: '%s'. " "Please use one of: 'gml', 'edges'." % format) def graph2edges(graph): # TODO(mikhaildubov): Exception on the US constitution example! res = "" node_edges = {} for edge in graph["edges"]: source_label = graph["nodes"][edge["source"]]["label"] target_label = graph["nodes"][edge["target"]]["label"] if source_label not in node_edges: node_edges[source_label] = [] node_edges[source_label].append(target_label) for node in node_edges: res += "%s -> %s\n" % (node, ", ".join(node_edges[node])) return res def graph2gml(graph): res = "graph\n[\n" res += " directed 1\n" res += " referral_confidence %.2f\n" % graph["referral_confidence"] res += " relevance_threshold %.2f\n" % graph["relevance_threshold"] res += " support_threshold %i\n" % graph["support_threshold"] for node in graph["nodes"]: res += (' node\n [\n id %i\n label "%s"\n ]\n' % (node["id"], node["label"])) for edge in graph["edges"]: res += (' edge\n [\n source %i\n target %i\n confidence %.2f\n ]\n' % (edge["source"], edge["target"], edge["confidence"])) res += "]\n" return res
def format_table(table, format): if format == 'xml': return table2xml(keyphrases_table) elif format == 'csv': return table2csv(keyphrases_table) else: raise exception("Unknown table format: '%s'. Please use one of: 'xml', 'csv'." % format) def table2xml(keyphrases_table): res = '<table>\n' for keyphrase in sorted(keyphrases_table.keys()): res += ' <keyphrase value="%s">\n' % keyphrase for text in sorted(keyphrases_table[keyphrase].keys()): res += ' <text name="%s">' % text res += '%.3f' % keyphrases_table[keyphrase][text] res += '</text>\n' res += ' </keyphrase>\n' res += '</table>\n' return res def table2csv(keyphrases_table): def quote(s): return '"' + s.replace('"', "'") + '"' keyphrases = sorted(keyphrases_table.keys()) texts = sorted(keyphrases_table[keyphrases[0]].keys()) res = ',' + ','.join(map(quote, keyphrases)) + '\n' for text in texts: scores = map(lambda score: u'%.3f' % score, [keyphrases_table[keyphrase][text] for keyphrase in keyphrases]) res += quote(text) + ',' + ','.join(scores) + '\n' return res def format_graph(graph, format): if format == 'gml': return graph2gml(graph) elif format == 'edges': return graph2edges(graph) else: raise exception("Unknown graph format: '%s'. Please use one of: 'gml', 'edges'." % format) def graph2edges(graph): res = '' node_edges = {} for edge in graph['edges']: source_label = graph['nodes'][edge['source']]['label'] target_label = graph['nodes'][edge['target']]['label'] if source_label not in node_edges: node_edges[source_label] = [] node_edges[source_label].append(target_label) for node in node_edges: res += '%s -> %s\n' % (node, ', '.join(node_edges[node])) return res def graph2gml(graph): res = 'graph\n[\n' res += ' directed 1\n' res += ' referral_confidence %.2f\n' % graph['referral_confidence'] res += ' relevance_threshold %.2f\n' % graph['relevance_threshold'] res += ' support_threshold %i\n' % graph['support_threshold'] for node in graph['nodes']: res += ' node\n [\n id %i\n label "%s"\n ]\n' % (node['id'], node['label']) for edge in graph['edges']: res += ' edge\n [\n source %i\n target %i\n confidence %.2f\n ]\n' % (edge['source'], edge['target'], edge['confidence']) res += ']\n' return res
#!/usr/bin/env python numbers = [ 1721, 979, 366, 299, 675, 1456, ] # Identify pairs pairs = list() for i in numbers: for j in numbers: if i+j == 2020: pairs.append((i, j)) # Remove redundant pairs for pair in pairs: i, j = pair if (j, i) in pairs: pairs.remove((j, i)) # Print the answer[s] for pair in pairs: print(pair[0]*pair[1])
numbers = [1721, 979, 366, 299, 675, 1456] pairs = list() for i in numbers: for j in numbers: if i + j == 2020: pairs.append((i, j)) for pair in pairs: (i, j) = pair if (j, i) in pairs: pairs.remove((j, i)) for pair in pairs: print(pair[0] * pair[1])
""" 1357. Path Sum II https://www.lintcode.com/problem/path-sum-ii/description?_from=ladder&&fromId=131 """ """ Definition of TreeNode: class TreeNode: def __init__(self, val): self.val = val self.left, self.right = None, None """ class Solution: """ @param root: a binary tree @param sum: the sum @return: the scheme """ def pathSum(self, root, sum): # Write your code here. result = [] if not root: return [] self.dfs(root, 0, [], sum, result) return result def dfs(self, root, curr, path, target, result): if root is None: return if curr + root.val > target: return if curr + root.val == target: result.append([x.val for x in path] + [root.val]) return path.append(root) self.dfs(root.left, curr + root.val, path, target, result) self.dfs(root.right, curr + root.val, path, target, result) path.pop()
""" 1357. Path Sum II https://www.lintcode.com/problem/path-sum-ii/description?_from=ladder&&fromId=131 """ '\nDefinition of TreeNode:\nclass TreeNode:\n def __init__(self, val):\n self.val = val\n self.left, self.right = None, None\n' class Solution: """ @param root: a binary tree @param sum: the sum @return: the scheme """ def path_sum(self, root, sum): result = [] if not root: return [] self.dfs(root, 0, [], sum, result) return result def dfs(self, root, curr, path, target, result): if root is None: return if curr + root.val > target: return if curr + root.val == target: result.append([x.val for x in path] + [root.val]) return path.append(root) self.dfs(root.left, curr + root.val, path, target, result) self.dfs(root.right, curr + root.val, path, target, result) path.pop()
class CollectionClass: def __init__(self, items, params): self.items = items self.params = params self.cursor = 0 def items(self): return self.items def params(self): return self.params def delete_all(self): self.cursor = 0 while len(self.items) > 0: item = self.items.pop() item.delete() return True def first(self): self.cursor = 0 if len(self.items) > 0: return self.items[0] else: return None # TODO: Add tests def iter(self): if self.cursor + 1 < len(self.items): self.cursor += 1 return self.items[self.cursor] else: return None
class Collectionclass: def __init__(self, items, params): self.items = items self.params = params self.cursor = 0 def items(self): return self.items def params(self): return self.params def delete_all(self): self.cursor = 0 while len(self.items) > 0: item = self.items.pop() item.delete() return True def first(self): self.cursor = 0 if len(self.items) > 0: return self.items[0] else: return None def iter(self): if self.cursor + 1 < len(self.items): self.cursor += 1 return self.items[self.cursor] else: return None
# execfile(r'C:\qtlab-aalto\scripts\Qubit\TimeDomain\swap_batch\0dbm_swap\swap_with_high_pulse_30.py') # execfile(r'C:\qtlab-aalto\scripts\Qubit\TimeDomain\swap_batch\0dbm_swap\swap_with_high_pulse_100.py') #execfile(r'C:\qtlab-aalto\scripts\Qubit\TimeDomain\swap_batch\0dbm_swap\swap_with_high_pulse_200.py') #execfile(r'C:\qtlab-aalto\scripts\Qubit\TimeDomain\swap_batch\0dbm_swap\swap_with_high_pulse_300.py') #execfile(r'C:\qtlab-aalto\scripts\Qubit\TimeDomain\swap_batch\0dbm_swap\swap_with_high_pulse_400.py') #execfile(r'C:\qtlab-aalto\scripts\Qubit\TimeDomain\swap_batch\0dbm_swap\swap_with_high_pulse_500.py') #execfile(r'C:\qtlab-aalto\scripts\Qubit\TimeDomain\swap_batch\0dbm_swap\swap_with_high_pulse_600.py') #execfile(r'C:\qtlab-aalto\scripts\Qubit\TimeDomain\swap_batch\0dbm_swap\swap_with_high_pulse_800.py') #execfile(r'C:\qtlab-aalto\scripts\Qubit\TimeDomain\swap_batch\0dbm_swap\swap_with_high_pulse_1000.py') execfile(r'C:\qtlab-aalto\scripts\Qubit\TimeDomain\swap_batch\0dbm_swap\swap_with_high_pulse_2000.py') execfile(r'C:\qtlab-aalto\scripts\Qubit\TimeDomain\swap_batch\0dbm_swap\swap_with_high_pulse_3000.py') execfile(r'C:\qtlab-aalto\scripts\Qubit\TimeDomain\swap_batch\0dbm_swap\swap_with_high_pulse_4000.py') execfile(r'C:\qtlab-aalto\scripts\Qubit\TimeDomain\swap_batch\0dbm_swap\swap_with_high_pulse_5000.py')
execfile('C:\\qtlab-aalto\\scripts\\Qubit\\TimeDomain\\swap_batch\\0dbm_swap\\swap_with_high_pulse_2000.py') execfile('C:\\qtlab-aalto\\scripts\\Qubit\\TimeDomain\\swap_batch\\0dbm_swap\\swap_with_high_pulse_3000.py') execfile('C:\\qtlab-aalto\\scripts\\Qubit\\TimeDomain\\swap_batch\\0dbm_swap\\swap_with_high_pulse_4000.py') execfile('C:\\qtlab-aalto\\scripts\\Qubit\\TimeDomain\\swap_batch\\0dbm_swap\\swap_with_high_pulse_5000.py')
def test(): r0 = 123.0 r1 = 123.1 r2 = 123.2 r3 = 123.3 r4 = 123.4 r5 = 123.5 r6 = 123.6 r7 = 123.7 r8 = 123.8 r9 = 123.9 i = 0 while i < 1e7: t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 i += 1 test()
def test(): r0 = 123.0 r1 = 123.1 r2 = 123.2 r3 = 123.3 r4 = 123.4 r5 = 123.5 r6 = 123.6 r7 = 123.7 r8 = 123.8 r9 = 123.9 i = 0 while i < 10000000.0: t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 t = r0 t = r1 t = r2 t = r3 t = r4 t = r5 t = r6 t = r7 t = r8 t = r9 i += 1 test()
def permutation(array, start = 0): if (start == len(array)): print(array) return for i in range(start, len(array)): array[start], array[i] = array[i], array[start] permutation(array, start + 1) array[start], array[i] = array[i], array[start] if __name__ == "__main__": permutation(['d','a','n'])
def permutation(array, start=0): if start == len(array): print(array) return for i in range(start, len(array)): (array[start], array[i]) = (array[i], array[start]) permutation(array, start + 1) (array[start], array[i]) = (array[i], array[start]) if __name__ == '__main__': permutation(['d', 'a', 'n'])
def end_zeros(num: int) -> int: """ find out how many zeros a given number has at the end How it works: ------------- Find the first non-zero while looping to the inverse string representation of the number """ zero_count = 0 for c in str(num)[::-1]: if c != "0": break zero_count += 1 return zero_count if __name__ == '__main__': print("Example:") print(end_zeros(0)) # These "asserts" are used for self-checking and not for an auto-testing assert end_zeros(0) == 1 assert end_zeros(1) == 0 assert end_zeros(10) == 1 assert end_zeros(101) == 0 assert end_zeros(245) == 0 assert end_zeros(100100) == 2 print("Coding complete? Click 'Check' to earn cool rewards!")
def end_zeros(num: int) -> int: """ find out how many zeros a given number has at the end How it works: ------------- Find the first non-zero while looping to the inverse string representation of the number """ zero_count = 0 for c in str(num)[::-1]: if c != '0': break zero_count += 1 return zero_count if __name__ == '__main__': print('Example:') print(end_zeros(0)) assert end_zeros(0) == 1 assert end_zeros(1) == 0 assert end_zeros(10) == 1 assert end_zeros(101) == 0 assert end_zeros(245) == 0 assert end_zeros(100100) == 2 print("Coding complete? Click 'Check' to earn cool rewards!")
""" Implementation of a labeled property graph. Note that classes are fine to be keys in a dictionary, so the graph itself is going to be a dictionary with Node class objects as keys. """ class Node: """Node object that will have a relationship to other nodes. Methods incorporate CRUD principles. """ def __init__(self): """Initialized nodes contain properties and methods to view them.""" self.properties = {} def add_property(self, property_, value): """Method to add a property to a node.""" if property_ in self.properties: raise KeyError("Property already exists, use change_property()" "to alter property value") self.properties[property_] = value def change_property(self, property_, value): """Method to alter a value on a property.""" if property_ not in self.properties: raise KeyError("Property does not exist, use add_property()" "to add a property") self.properties[property_] = value def remove_property(self, property_): """Method to remove a property from a node.""" if property_ not in self.properties: raise KeyError("Node does not contain that property") del self.properties[property_] class Relationship: """Relationship object that will be able to have properties as well.""" def __init__(self): """Initialize relationships as to contain properites like nodes.""" self.properties = {} class LabeledPropertyGraph: """Define a labeled property graph as dictionary composition.""" def __init__(self): """Initialize the graph as a dictionary.""" self._graph = {} def nodes(self): """Return a list of nodes in the graph.""" return list(self._graph.keys()) def relationships(self): """Return list of relationships."""
""" Implementation of a labeled property graph. Note that classes are fine to be keys in a dictionary, so the graph itself is going to be a dictionary with Node class objects as keys. """ class Node: """Node object that will have a relationship to other nodes. Methods incorporate CRUD principles. """ def __init__(self): """Initialized nodes contain properties and methods to view them.""" self.properties = {} def add_property(self, property_, value): """Method to add a property to a node.""" if property_ in self.properties: raise key_error('Property already exists, use change_property()to alter property value') self.properties[property_] = value def change_property(self, property_, value): """Method to alter a value on a property.""" if property_ not in self.properties: raise key_error('Property does not exist, use add_property()to add a property') self.properties[property_] = value def remove_property(self, property_): """Method to remove a property from a node.""" if property_ not in self.properties: raise key_error('Node does not contain that property') del self.properties[property_] class Relationship: """Relationship object that will be able to have properties as well.""" def __init__(self): """Initialize relationships as to contain properites like nodes.""" self.properties = {} class Labeledpropertygraph: """Define a labeled property graph as dictionary composition.""" def __init__(self): """Initialize the graph as a dictionary.""" self._graph = {} def nodes(self): """Return a list of nodes in the graph.""" return list(self._graph.keys()) def relationships(self): """Return list of relationships."""
#Getting Input strInput = input('Enter a string: ') #Check if string is empty if strInput == "": print('The string is empty.') else: #String length function print(len(strInput))
str_input = input('Enter a string: ') if strInput == '': print('The string is empty.') else: print(len(strInput))
# # @lc app=leetcode id=172 lang=python3 # # [172] Factorial Trailing Zeroes # # @lc code=start class Solution: def trailingZeroes(self, n: int) -> int: res = 0 while n > 0: n //= 5 res += n return res # @lc code=end # Accepted # 502/502 cases passed(32 ms) # Your runtime beats 88.85 % of python3 submissions # Your memory usage beats 100 % of python3 submissions(12.6 MB)
class Solution: def trailing_zeroes(self, n: int) -> int: res = 0 while n > 0: n //= 5 res += n return res
income = float(input()) middle_grade = float(input()) minimal_payment = float(input()) scholarship = 0 if income < minimal_payment and middle_grade > 4.50: scholarship = minimal_payment * 0.65 elif middle_grade <= 5.50: scholarship = middle_grade * 25
income = float(input()) middle_grade = float(input()) minimal_payment = float(input()) scholarship = 0 if income < minimal_payment and middle_grade > 4.5: scholarship = minimal_payment * 0.65 elif middle_grade <= 5.5: scholarship = middle_grade * 25
def estimate_probability(word, previous_n_gram, n_gram_counts, n_plus1_gram_counts, vocabulary_size, k=1.0): denominator = n_gram_counts.get(previous_n_gram, 0) denominator += k * vocabulary_size numerator = n_plus1_gram_counts.get(previous_n_gram + ' ' + word, 0) numerator += k probability = numerator / denominator return probability
def estimate_probability(word, previous_n_gram, n_gram_counts, n_plus1_gram_counts, vocabulary_size, k=1.0): denominator = n_gram_counts.get(previous_n_gram, 0) denominator += k * vocabulary_size numerator = n_plus1_gram_counts.get(previous_n_gram + ' ' + word, 0) numerator += k probability = numerator / denominator return probability
""" Workflow Archiver Error """ class WorkflowArchiverError(Exception): """ Error for Workflow Archiver module """ def __init__(self, message): super().__init__(message)
""" Workflow Archiver Error """ class Workflowarchivererror(Exception): """ Error for Workflow Archiver module """ def __init__(self, message): super().__init__(message)
"""File output for field(s) value on a grid. """ class XDMFWriter(object): def __init__(self, h5filename, dimension, resolution, origin, space_step, dataset_names, ite, time): """ Parameters ---------- h5filename : nom du fichier h5 contenant les donnees dimension du domaine resolution de la grille origin : coordonnees de l'origine du domain space_step : pas d'espace dataset_names : liste des datasets qu'on souhaite ecrire dans le xdmf ite : numero de l'iteration courante time : temps """ self.xmffilename = h5filename.split('.')[0] + '.xmf' res = list(resolution) f = open(self.xmffilename, 'w') f.write("<?xml version=\"1.0\" ?>\n") f.write("<!DOCTYPE Xdmf SYSTEM \"Xdmf.dtd\">\n") f.write("<Xdmf Version=\"2.0\">\n") f.write(" <Domain>\n") f.write(" <Grid Name=\"CellTime\" GridType=\"Collection\" ") f.write("CollectionType=\"Temporal\">\n") f.write(self._write_grid_attributes(dimension, res, origin, space_step, dataset_names, ite, time, h5filename)) f.write(" </Grid>\n") f.write(" </Domain>\n") f.write("</Xdmf>\n") print('Ecriture du fichier ' + self.xmffilename) f.close() def _write_grid_attributes(self, dimension, resolution, origin, space_step, dataset_names, ite, time, filename): """ Write XDMF header into a file Returns: -------- string the xml-like header. """ assert isinstance(resolution, list) assert isinstance(origin, list) assert isinstance(space_step, list) # The header (xml-like), saved in a string. xml_grid = "" if dimension == 2: topo_type = "2DCORECTMesh" geo_type = "ORIGIN_DXDY" elif dimension == 3: topo_type = "3DCORECTMesh" geo_type = "ORIGIN_DXDYDZ" xml_grid += " <Grid Name=\"Iteration {0:03d}\"".format(ite) xml_grid += " GridType=\"Uniform\">\n" xml_grid += " <Time Value=\"{0}\" />\n".format(time) xml_grid += " <Topology TopologyType=\"" + str(topo_type) + "\"" xml_grid += " NumberOfElements=\"" resolution.reverse() origin.reverse() xml_grid += XDMFWriter._list_format(resolution) + " \"/>\n" xml_grid += " <Geometry GeometryType=\"" + geo_type + "\">\n" xml_grid += " <DataItem Dimensions=\"" + str(dimension) + " \"" xml_grid += " NumberType=\"Float\" Precision=\"8\" Format=\"XML\">\n" xml_grid += " " + XDMFWriter._list_format(origin) + "\n" xml_grid += " </DataItem>\n" xml_grid += " <DataItem Dimensions=\"" + str(dimension) + " \"" xml_grid += " NumberType=\"Float\" Precision=\"8\" Format=\"XML\">\n" step = space_step step.reverse() xml_grid += " " + XDMFWriter._list_format(step) + "\n" xml_grid += " </DataItem>\n" xml_grid += " </Geometry>\n" # Append dataset parameters for name in dataset_names: xml_grid += " <Attribute Name=\"" xml_grid += name + "\"" xml_grid += " AttributeType=\"Scalar\" Center=\"Node\">\n" xml_grid += " <DataItem Dimensions=\"" xml_grid += XDMFWriter._list_format(resolution) + " \"" xml_grid += " NumberType=\"Float\" Precision=\"8\" Format=\"HDF\"" xml_grid += " Compression=\"Raw\">\n" # xml_grid += " " + filename.split('/')[-1] xml_grid += ":/" + name xml_grid += "\n </DataItem>\n" xml_grid += " </Attribute>\n" xml_grid += " </Grid>\n" return xml_grid @staticmethod def _list_format(l): """Format a list to the xml output. Removes the '[]()' and replace ',' with ' ' in default str. Parameters ---------- l : list to format """ buff = str(l).replace(',', ' ').replace('[', '') return buff.replace(']', '').replace('(', '').replace(')', '')
"""File output for field(s) value on a grid. """ class Xdmfwriter(object): def __init__(self, h5filename, dimension, resolution, origin, space_step, dataset_names, ite, time): """ Parameters ---------- h5filename : nom du fichier h5 contenant les donnees dimension du domaine resolution de la grille origin : coordonnees de l'origine du domain space_step : pas d'espace dataset_names : liste des datasets qu'on souhaite ecrire dans le xdmf ite : numero de l'iteration courante time : temps """ self.xmffilename = h5filename.split('.')[0] + '.xmf' res = list(resolution) f = open(self.xmffilename, 'w') f.write('<?xml version="1.0" ?>\n') f.write('<!DOCTYPE Xdmf SYSTEM "Xdmf.dtd">\n') f.write('<Xdmf Version="2.0">\n') f.write(' <Domain>\n') f.write(' <Grid Name="CellTime" GridType="Collection" ') f.write('CollectionType="Temporal">\n') f.write(self._write_grid_attributes(dimension, res, origin, space_step, dataset_names, ite, time, h5filename)) f.write(' </Grid>\n') f.write(' </Domain>\n') f.write('</Xdmf>\n') print('Ecriture du fichier ' + self.xmffilename) f.close() def _write_grid_attributes(self, dimension, resolution, origin, space_step, dataset_names, ite, time, filename): """ Write XDMF header into a file Returns: -------- string the xml-like header. """ assert isinstance(resolution, list) assert isinstance(origin, list) assert isinstance(space_step, list) xml_grid = '' if dimension == 2: topo_type = '2DCORECTMesh' geo_type = 'ORIGIN_DXDY' elif dimension == 3: topo_type = '3DCORECTMesh' geo_type = 'ORIGIN_DXDYDZ' xml_grid += ' <Grid Name="Iteration {0:03d}"'.format(ite) xml_grid += ' GridType="Uniform">\n' xml_grid += ' <Time Value="{0}" />\n'.format(time) xml_grid += ' <Topology TopologyType="' + str(topo_type) + '"' xml_grid += ' NumberOfElements="' resolution.reverse() origin.reverse() xml_grid += XDMFWriter._list_format(resolution) + ' "/>\n' xml_grid += ' <Geometry GeometryType="' + geo_type + '">\n' xml_grid += ' <DataItem Dimensions="' + str(dimension) + ' "' xml_grid += ' NumberType="Float" Precision="8" Format="XML">\n' xml_grid += ' ' + XDMFWriter._list_format(origin) + '\n' xml_grid += ' </DataItem>\n' xml_grid += ' <DataItem Dimensions="' + str(dimension) + ' "' xml_grid += ' NumberType="Float" Precision="8" Format="XML">\n' step = space_step step.reverse() xml_grid += ' ' + XDMFWriter._list_format(step) + '\n' xml_grid += ' </DataItem>\n' xml_grid += ' </Geometry>\n' for name in dataset_names: xml_grid += ' <Attribute Name="' xml_grid += name + '"' xml_grid += ' AttributeType="Scalar" Center="Node">\n' xml_grid += ' <DataItem Dimensions="' xml_grid += XDMFWriter._list_format(resolution) + ' "' xml_grid += ' NumberType="Float" Precision="8" Format="HDF"' xml_grid += ' Compression="Raw">\n' xml_grid += ' ' + filename.split('/')[-1] xml_grid += ':/' + name xml_grid += '\n </DataItem>\n' xml_grid += ' </Attribute>\n' xml_grid += ' </Grid>\n' return xml_grid @staticmethod def _list_format(l): """Format a list to the xml output. Removes the '[]()' and replace ',' with ' ' in default str. Parameters ---------- l : list to format """ buff = str(l).replace(',', ' ').replace('[', '') return buff.replace(']', '').replace('(', '').replace(')', '')
# Hayley # Check if a number is prime p = 13 * 17 m = 2 while m < p: if p % m == 0: print(m , "divides", p, "and therefore", p, "is not prime.") m = m + 1
p = 13 * 17 m = 2 while m < p: if p % m == 0: print(m, 'divides', p, 'and therefore', p, 'is not prime.') m = m + 1
class QuickFindUF: id = [] def __init__(self, N): self.id = [] for i in range(N): self.id[i] = i def connected(self, p, q): return self.id[p] == self.id[q] def union(self, p, q): pid = self.id[p] qid = self.id[q] for i in range(len(self.id)): if id[i] == pid: id[i] = qid
class Quickfinduf: id = [] def __init__(self, N): self.id = [] for i in range(N): self.id[i] = i def connected(self, p, q): return self.id[p] == self.id[q] def union(self, p, q): pid = self.id[p] qid = self.id[q] for i in range(len(self.id)): if id[i] == pid: id[i] = qid
#!/usr/bin/python3 class Solution: def isPalindrome(self, x: int) -> bool: if x < 0: return False elif x < 10: return True reversed, k = 0, x while k != 0: reversed = reversed * 10 + (k % 10) k //= 10 return x == reversed
class Solution: def is_palindrome(self, x: int) -> bool: if x < 0: return False elif x < 10: return True (reversed, k) = (0, x) while k != 0: reversed = reversed * 10 + k % 10 k //= 10 return x == reversed
class Host: """A single host in the network. Note this class is mainly used to store initial scenario data for a host. The HostVector class is used to store and track the current state of a host (for efficiency and ease of use reasons). """ def __init__(self, address, os, services, processes, firewall, value=0.0, discovery_value=0.0, compromised=False, reachable=False, discovered=False, access=0): """ Arguments --------- address : (int, int) address of host as (subnet, id) os : dict A os_name: bool dictionary indicating which OS the host is runinng services : dict a (service_name, bool) dictionary indicating which services are present/absent processes : dict a (process_name, bool) dictionary indicating which processes are running on host or not firewall : dict a (addr, denied services) dictionary defining which services are blocked from other hosts in the network. If other host not in firewall assumes all services allowed value : float, optional value of the host (default=0.0) discovery_value : float, optional the reward gained for discovering the host (default=0.0) compromised : bool, optional whether host has been compromised or not (default=False) reachable : bool, optional whether host is reachable by attacker or not (default=False) discovered : bool, optional whether host has been reachable discovered by attacker or not (default=False) access : int, optional access level of attacker on host (default=0) """ self.address = address self.os = os self.services = services self.processes = processes self.firewall = firewall self.value = value self.discovery_value = discovery_value self.compromised = compromised self.reachable = reachable self.discovered = discovered self.access = access def is_running_service(self, service): return self.services[service] def is_running_os(self, os): return self.os[os] def is_running_process(self, process): return self.processes[process] def traffic_permitted(self, addr, service): return service not in self.firewall.get(addr, []) def __str__(self): output = ["Host: {"] output.append(f"\taddress: {self.address}") output.append(f"\tcompromised: {self.compromised}") output.append(f"\treachable: {self.reachable}") output.append(f"\tvalue: {self.value}") output.append(f"\taccess: {self.access}") output.append("\tOS: {") for os_name, val in self.os.items(): output.append(f"\t\t{os_name}: {val}") output.append("\t}") output.append("\tservices: {") for name, val in self.services.items(): output.append(f"\t\t{name}: {val}") output.append("\t}") output.append("\tprocesses: {") for name, val in self.processes.items(): output.append(f"\t\t{name}: {val}") output.append("\t}") output.append("\tfirewall: {") for addr, val in self.firewall.items(): output.append(f"\t\t{addr}: {val}") output.append("\t}") return "\n".join(output) def __repr__(self): return f"Host: {self.address}"
class Host: """A single host in the network. Note this class is mainly used to store initial scenario data for a host. The HostVector class is used to store and track the current state of a host (for efficiency and ease of use reasons). """ def __init__(self, address, os, services, processes, firewall, value=0.0, discovery_value=0.0, compromised=False, reachable=False, discovered=False, access=0): """ Arguments --------- address : (int, int) address of host as (subnet, id) os : dict A os_name: bool dictionary indicating which OS the host is runinng services : dict a (service_name, bool) dictionary indicating which services are present/absent processes : dict a (process_name, bool) dictionary indicating which processes are running on host or not firewall : dict a (addr, denied services) dictionary defining which services are blocked from other hosts in the network. If other host not in firewall assumes all services allowed value : float, optional value of the host (default=0.0) discovery_value : float, optional the reward gained for discovering the host (default=0.0) compromised : bool, optional whether host has been compromised or not (default=False) reachable : bool, optional whether host is reachable by attacker or not (default=False) discovered : bool, optional whether host has been reachable discovered by attacker or not (default=False) access : int, optional access level of attacker on host (default=0) """ self.address = address self.os = os self.services = services self.processes = processes self.firewall = firewall self.value = value self.discovery_value = discovery_value self.compromised = compromised self.reachable = reachable self.discovered = discovered self.access = access def is_running_service(self, service): return self.services[service] def is_running_os(self, os): return self.os[os] def is_running_process(self, process): return self.processes[process] def traffic_permitted(self, addr, service): return service not in self.firewall.get(addr, []) def __str__(self): output = ['Host: {'] output.append(f'\taddress: {self.address}') output.append(f'\tcompromised: {self.compromised}') output.append(f'\treachable: {self.reachable}') output.append(f'\tvalue: {self.value}') output.append(f'\taccess: {self.access}') output.append('\tOS: {') for (os_name, val) in self.os.items(): output.append(f'\t\t{os_name}: {val}') output.append('\t}') output.append('\tservices: {') for (name, val) in self.services.items(): output.append(f'\t\t{name}: {val}') output.append('\t}') output.append('\tprocesses: {') for (name, val) in self.processes.items(): output.append(f'\t\t{name}: {val}') output.append('\t}') output.append('\tfirewall: {') for (addr, val) in self.firewall.items(): output.append(f'\t\t{addr}: {val}') output.append('\t}') return '\n'.join(output) def __repr__(self): return f'Host: {self.address}'
max=0 while True: a=int(input("masukan bilangan : ")) if max < a : max = a if a==0: break print("bilangan terbesar adalah = ",max)
max = 0 while True: a = int(input('masukan bilangan : ')) if max < a: max = a if a == 0: break print('bilangan terbesar adalah = ', max)
# Author: Gaurav Pande # find the paths in a bt which adds up to the target. # link: https://leetcode.com/problems/path-sum-iii/description/ class Solution(object): def helper(self, root, target, so_far, cache): if root: complement = so_far + root.val - target if complement in cache: self.result += cache[complement] cache.setdefault(so_far+root.val, 0) cache[so_far+root.val] += 1 self.helper(root.left, target, so_far+root.val, cache) self.helper(root.right, target, so_far+root.val, cache) cache[so_far+root.val] -= 1 return def pathSum(self, root, sum): """ :type root: TreeNode :type sum: int :rtype: int """ self.result = 0 self.helper(root, sum, 0, {0:1}) return self.result # Definition for a binary tree node. # class TreeNode(object): # def __init__(self, val=0, left=None, right=None): # self.val = val # self.left = left # self.right = right class Solution(object): def pathSum(self, root, sum): """ :type root: TreeNode :type sum: int :rtype: List[List[int]] """ res = [] self.pathSumHelper(root,sum, res, []) return res def pathSumHelper(self, root, target, res, temp): if not root: return if not root.left and not root.right and target-root.val == 0: # print(temp) temp.append(root.val) res.append(temp[:]) else: self.pathSumHelper(root.left, target-root.val, res, temp+[root.val]) self.pathSumHelper(root.right, target-root.val, res, temp + [root.val])
class Solution(object): def helper(self, root, target, so_far, cache): if root: complement = so_far + root.val - target if complement in cache: self.result += cache[complement] cache.setdefault(so_far + root.val, 0) cache[so_far + root.val] += 1 self.helper(root.left, target, so_far + root.val, cache) self.helper(root.right, target, so_far + root.val, cache) cache[so_far + root.val] -= 1 return def path_sum(self, root, sum): """ :type root: TreeNode :type sum: int :rtype: int """ self.result = 0 self.helper(root, sum, 0, {0: 1}) return self.result class Solution(object): def path_sum(self, root, sum): """ :type root: TreeNode :type sum: int :rtype: List[List[int]] """ res = [] self.pathSumHelper(root, sum, res, []) return res def path_sum_helper(self, root, target, res, temp): if not root: return if not root.left and (not root.right) and (target - root.val == 0): temp.append(root.val) res.append(temp[:]) else: self.pathSumHelper(root.left, target - root.val, res, temp + [root.val]) self.pathSumHelper(root.right, target - root.val, res, temp + [root.val])
class Person: "This is the Base Class" def get_name(self, name): "This is the Person Class Function" self.name= name def get_details(self): return self.name class Student(Person): def fill_details(self, name, branch, year): Person.get_name(self, name) self.branch = branch self.year = year def get_details(self): print("Name:", Person.get_details(self)) # print("Name:", self.name) print("Branch:", self.branch) print("Year:", self.year) p1=Person() s1=Student() p1.get_name("ABC") s1.fill_details("XYZ", "CSE", 2020) print(p1.get_details()) s1.get_details() # Class built-in Attributes print(Student.__bases__) print(Student.__dict__) print(s1.__dict__) print(Student.__name__) print(s1.__module__) print(Person.__doc__) print(Person.get_name.__doc__) print(p1.get_name.__doc__)
class Person: """This is the Base Class""" def get_name(self, name): """This is the Person Class Function""" self.name = name def get_details(self): return self.name class Student(Person): def fill_details(self, name, branch, year): Person.get_name(self, name) self.branch = branch self.year = year def get_details(self): print('Name:', Person.get_details(self)) print('Branch:', self.branch) print('Year:', self.year) p1 = person() s1 = student() p1.get_name('ABC') s1.fill_details('XYZ', 'CSE', 2020) print(p1.get_details()) s1.get_details() print(Student.__bases__) print(Student.__dict__) print(s1.__dict__) print(Student.__name__) print(s1.__module__) print(Person.__doc__) print(Person.get_name.__doc__) print(p1.get_name.__doc__)
n, k=map(int,input().split()) e=list(map(int,input().split())) p=v=0 a='' for c, i in enumerate(e[1:]): if i > e[c]: if a == 'd':v+=1 a='s' elif i < e[c]: if a=='s': p+=1 a='d' print('beautiful') if k==p==v+1 else print('ugly')
(n, k) = map(int, input().split()) e = list(map(int, input().split())) p = v = 0 a = '' for (c, i) in enumerate(e[1:]): if i > e[c]: if a == 'd': v += 1 a = 's' elif i < e[c]: if a == 's': p += 1 a = 'd' print('beautiful') if k == p == v + 1 else print('ugly')
""" 56. Merge Intervals Given a collection of intervals, merge all overlapping intervals. Example 1: Input: [[1,3],[2,6],[8,10],[15,18]] Output: [[1,6],[8,10],[15,18]] Explanation: Since intervals [1,3] and [2,6] overlaps, merge them into [1,6]. Example 2: Input: [[1,4],[4,5]] Output: [[1,5]] Explanation: Intervals [1,4] and [4,5] are considered overlapping. NOTE: input types have been changed on April 15, 2019. Please reset to default code definition to get new method signature. """ # Runtime: 92 ms, faster than 79.23% of Python3 online submissions for Merge Intervals. # Memory Usage: 14.6 MB, less than 6.52% of Python3 online submissions for Merge Intervals. class Solution: def merge(self, intervals: List[List[int]]) -> List[List[int]]: if len(intervals) == 0: return [] intervals = sorted(intervals, key=lambda x:x[0]) left, right = 0, 1 res = [intervals[0]] while right < len(intervals): left_node = res[-1] right_node = intervals[right] if left_node[1] >= right_node[0]: # can be merge res[-1][1] = max(left_node[1], right_node[1]) right += 1 else: res.append(right_node) right += 1 return res
""" 56. Merge Intervals Given a collection of intervals, merge all overlapping intervals. Example 1: Input: [[1,3],[2,6],[8,10],[15,18]] Output: [[1,6],[8,10],[15,18]] Explanation: Since intervals [1,3] and [2,6] overlaps, merge them into [1,6]. Example 2: Input: [[1,4],[4,5]] Output: [[1,5]] Explanation: Intervals [1,4] and [4,5] are considered overlapping. NOTE: input types have been changed on April 15, 2019. Please reset to default code definition to get new method signature. """ class Solution: def merge(self, intervals: List[List[int]]) -> List[List[int]]: if len(intervals) == 0: return [] intervals = sorted(intervals, key=lambda x: x[0]) (left, right) = (0, 1) res = [intervals[0]] while right < len(intervals): left_node = res[-1] right_node = intervals[right] if left_node[1] >= right_node[0]: res[-1][1] = max(left_node[1], right_node[1]) right += 1 else: res.append(right_node) right += 1 return res
# Databricks notebook source # MAGIC %md-sandbox # MAGIC # MAGIC <div style="text-align: center; line-height: 0; padding-top: 9px;"> # MAGIC <img src="https://databricks.com/wp-content/uploads/2018/03/db-academy-rgb-1200px.png" alt="Databricks Learning" style="width: 600px"> # MAGIC </div> # COMMAND ---------- # MAGIC %md # MAGIC # Lab: Basic SQL # COMMAND ---------- # MAGIC %run ./Includes/Classroom-Setup # COMMAND ---------- step = DA.publisher.add_step(False, instructions=""" <h2><img class="image-icon-inline" src="https://s3.us-west-2.amazonaws.com/files.training.databricks.com/images/step-icon_small.png" alt="heading-icon" /> Lesson Objective</h2> <div class="instructions-div"> <p>At the end of this lesson, you will be able to:</p> <ul> <li>Write basic SQL queries to subset tables using Databricks SQL</li> <li>Join multiple tables together to create a new table</li> <li>Aggregate data columns using SQL functions to answer defined business questions</li> </ul></div> """, statements=None) step.render(DA.username) step.execute(DA.username) # COMMAND ---------- step = DA.publisher.add_validation(instructions=""" <h2><img class="image-icon-inline" src="https://s3.us-west-2.amazonaws.com/files.training.databricks.com/images/step-icon_small.png" alt="heading-icon" /> Retrieve Data</h2> <div class="instructions-div"> <p>The statement we are using in this part of the lab implements <span class="monofont">SELECT</span>, <span class="monofont">SELECT ... AS</span>, <span class="monofont">GROUP BY</span>, and <span class="monofont">ORDER BY</span>. Note that <span class="monofont">FROM</span>, <span class="monofont">GROUP BY</span>, and <span class="monofont">ORDER BY</span> need to occur in a specific order, or an error will be thrown.</p> <p>Complete the following:</p> <ol> <li>Make the required changes to the query below</li> <li>Run the query in Databricks SQL</li> <li>Check your work by entering your answer to the question</li> <li>After pressing <span class="monofont">ENTER/RETURN</span>, green indicates a correct answer, and red indicates incorrect</li> </ol> <p><pre><span class="monofont">USE <span style="color:red;">FILL_IN</span>; SELECT loyalty_segment, count(loyalty_segment) AS Count <span style="color:red;">FILL_IN</span> customers <span style="color:red;">FILL_IN</span> BY loyalty_segment <span style="color:red;">FILL_IN</span> BY loyalty_segment;</pre></p> </div> """, statements="""SELECT loyalty_segment, count(loyalty_segment) AS Count FROM customers GROUP BY loyalty_segment ORDER BY loyalty_segment;""", label="""How many customers are in loyalty_segment 0? """, expected="11097", length=10) step.render(DA.username) step.execute(DA.username) # COMMAND ---------- step = DA.publisher.add_validation(instructions=""" <h2><img class="image-icon-inline" src="https://s3.us-west-2.amazonaws.com/files.training.databricks.com/images/step-icon_small.png" alt="heading-icon" /> Use Column Expressions</h2> <div class="instructions-div"> <p>The <span class="monofont">customers</span> table contains the column <span class="monofont">customer_name</span>, which has the same problem as the <span class="monofont">city</span> column had in the last lesson. The customer names are all in lower-case. Run a <span class="monofont">SELECT</span> query, using the <span class="monofont">initcap()</span> function to examine the results of using this function on the <span class="monofont">customer_name</span>.</p> <p>Complete the following:</p> <ol> <li>Make the required changes to the query below</li> <li>Run the query in Databricks SQL</li> <li>Check your work by entering your answer to the question</li> <li>After pressing <span class="monofont">ENTER/RETURN</span>, green indicates a correct answer, and red indicates incorrect</li> </ol> <p><pre><span class="monofont">USE <span style="color:red;">FILL_IN</span>; SELECT <span style="color:red;">FILL_IN</span>(<span style="color:red;">FILL_IN</span>) AS Customer_Name FROM customers ORDER BY customer_name DESC;</pre></p> </div> """, statements="""SELECT initcap(customer_name) AS Customer_Name FROM customers ORDER BY customer_name DESC;""", label="""What is the last name of the last customer (alphabetically ) in the table? Ensure answer is in lower case """, expected="zyskowski", length=10) step.render(DA.username) step.execute(DA.username) # COMMAND ---------- step = DA.publisher.add_validation(instructions=""" <h2><img class="image-icon-inline" src="https://s3.us-west-2.amazonaws.com/files.training.databricks.com/images/step-icon_small.png" alt="heading-icon" /> Update Data</h2> <div class="instructions-div"> <p>Let's go ahead and implement the changes we examined in the last section. Use an <span class="monofont">UPDATE</span> statement to change the data in the <span class="monofont">customers</span> table.</p> <p>Complete the following:</p> <ol> <li>Make the required changes to the query below</li> <li>Run the query in Databricks SQL</li> <li>Check your work by entering your answer to the question</li> <li>After pressing <span class="monofont">ENTER/RETURN</span>, green indicates a correct answer, and red indicates incorrect</li> </ol> <p><pre><span class="monofont">USE <span style="color:red;">FILL_IN</span>; <span style="color:red;">FILL_IN</span> customers <span style="color:red;">FILL_IN</span> customer_name = initcap(customer_name); SELECT * FROM customers;</pre></p> </div> """, statements=["UPDATE customers SET customer_name = initcap(customer_name);", "SELECT * FROM customers;"], label="""In which city is Bittner Engineering, Inc. located? """, expected="randolph", length=10) step.render(DA.username) step.execute(DA.username) # COMMAND ---------- step = DA.publisher.add_validation(instructions=""" <h2><img class="image-icon-inline" src="https://s3.us-west-2.amazonaws.com/files.training.databricks.com/images/step-icon_small.png" alt="heading-icon" /> Insert Data</h2> <div class="instructions-div"> <p>We can implement a fifth loyalty segment in our <span class="monofont">loyalty_segments</span> table. We will use a <span class="monofont">unit_threshold</span> of 130 units.</p> <p>Complete the following:</p> <ol> <li>Make the required changes to the query below</li> <li>Run the query in Databricks SQL</li> <li>Check your work by entering your answer to the question</li> <li>After pressing <span class="monofont">ENTER/RETURN</span>, green indicates a correct answer, and red indicates incorrect</li> </ol> <p><pre><span class="monofont">USE <span style="color:red;">FILL_IN</span>; <span style="color:red;">FILL_IN</span> INTO loyalty_segments (loyalty_segment_id, loyalty_segment_description, unit_threshold, valid_from, valid_to) <span style="color:red;">FILL_IN</span> (5, 'level_5', 130, current_date(), Null); SELECT * FROM loyalty_segments;</pre></p> </div> """, statements=["""INSERT INTO loyalty_segments (loyalty_segment_id, loyalty_segment_description, unit_threshold, valid_from, valid_to) VALUES (5, 'level_5', 130, current_date(), Null);""", "SELECT * FROM loyalty_segments;"], label="""How many rows are in the table now? """, expected="6", length=10) step.render(DA.username) step.execute(DA.username) # COMMAND ---------- step = DA.publisher.add_validation(instructions=""" <h2><img class="image-icon-inline" src="https://s3.us-west-2.amazonaws.com/files.training.databricks.com/images/step-icon_small.png" alt="heading-icon" /> CREATE OR REPLACE TABLE AS statement</h2> <div class="instructions-div"> <p>In this part of the lab, we are going to create a view that contains only those customers in California (CA) who have a <span class="monofont">loyalty_segment</span> of 3.</p> <p>Complete the following:</p> <ol> <li>Make the required changes to the query below</li> <li>Run the query in Databricks SQL</li> <li>Check your work by entering your answer to the question</li> <li>After pressing <span class="monofont">ENTER/RETURN</span>, green indicates a correct answer, and red indicates incorrect</li> </ol> <p><pre><span class="monofont">USE <span style="color:red;">FILL_IN</span>; CREATE OR REPLACE <span style="color:red;">FILL_IN</span> high_value_CA_customers AS SELECT * FROM customers WHERE state = '<span style="color:red;">FILL_IN</span>' AND loyalty_segment = <span style="color:red;">FILL_IN</span>; SELECT * FROM high_value_CA_customers;</pre></p> </div> """, statements=["""CREATE OR REPLACE TABLE high_value_CA_customers AS SELECT * FROM customers WHERE state = 'CA' AND loyalty_segment = 3;""", "SELECT * FROM high_value_CA_customers;"], label="""How many rows are in the view? """, expected="949", length=10) step.render(DA.username) step.execute(DA.username) # COMMAND ---------- step = DA.publisher.add_validation(instructions=""" <h2><img class="image-icon-inline" src="https://s3.us-west-2.amazonaws.com/files.training.databricks.com/images/step-icon_small.png" alt="heading-icon" /> Joins</h2> <div class="instructions-div"> <p>Let's use a <span class="monofont">INNER JOIN</span>, <span class="monofont">GROUP BY</span>, <span class="monofont">ORDER BY</span>, and a function to calculate the total dollar amount of sales to various states in the sales table. Note that <span class="monofont">INNER JOIN</span> is the default join type, so we can just type <span class="monofont">JOIN</span>.</p> <p>Complete the following:</p> <ol> <li>Make the required changes to the query below</li> <li>Run the query in Databricks SQL</li> <li>Check your work by entering your answer to the question</li> <li>After pressing <span class="monofont">ENTER/RETURN</span>, green indicates a correct answer, and red indicates incorrect</li> </ol> <p><pre><span class="monofont">USE <span style="color:red;">FILL_IN</span>; <span style="color:red;">FILL_IN</span> customers.state, sum(total_price) AS Total FROM customers <span style="color:red;">FILL_IN</span> sales <span style="color:red;">FILL_IN</span> customers.customer_id = sales.customer_id GROUP BY customers.state ORDER BY Total DESC;</pre></p> </div> """, statements=["""CREATE OR REPLACE TABLE high_value_CA_customers AS SELECT * FROM customers WHERE state = 'CA' AND loyalty_segment = 3;""", "SELECT * FROM high_value_CA_customers;"], label="""Which state has the highest sales? """, expected="or", length=10) step.render(DA.username) step.execute(DA.username) # COMMAND ---------- step = DA.publisher.add_validation(instructions=""" <h2><img class="image-icon-inline" src="https://s3.us-west-2.amazonaws.com/files.training.databricks.com/images/step-icon_small.png" alt="heading-icon" /> Use Aggregations</h2> <div class="instructions-div"> <p>We have already used a few aggregate functions in the lab. Let's finish by running a simple aggregation to find the best price our suppliers have been able to provide on a specific piece of vintage electronics, a 5-disk CD changer. The best price means the lowest price, and we can use the <span class="monofont">min()</span> function to make this calculation.</p> <p>Complete the following:</p> <ol> <li>Make the required changes to the query below</li> <li>Run the query in Databricks SQL</li> <li>Check your work by entering your answer to the question</li> <li>After pressing <span class="monofont">ENTER/RETURN</span>, green indicates a correct answer, and red indicates incorrect</li> </ol> <p><pre><span class="monofont">USE <span style="color:red;">FILL_IN</span>; SELECT <span style="color:red;">FILL_IN</span>(total_price) FROM sales;</pre></p> </div> """, statements=["""SELECT min(total_price) FROM sales;"""], label="""What is the lowest price? (numbers only) """, expected="11", length=10) step.render(DA.username) step.execute(DA.username) # COMMAND ---------- DA.validate_datasets() html = DA.publisher.publish() displayHTML(html) # COMMAND ---------- # MAGIC %md-sandbox # MAGIC &copy; 2022 Databricks, Inc. All rights reserved.<br/> # MAGIC Apache, Apache Spark, Spark and the Spark logo are trademarks of the <a href="https://www.apache.org/">Apache Software Foundation</a>.<br/> # MAGIC <br/> # MAGIC <a href="https://databricks.com/privacy-policy">Privacy Policy</a> | <a href="https://databricks.com/terms-of-use">Terms of Use</a> | <a href="https://help.databricks.com/">Support</a>
step = DA.publisher.add_step(False, instructions=' \n\n <h2><img class="image-icon-inline" src="https://s3.us-west-2.amazonaws.com/files.training.databricks.com/images/step-icon_small.png" alt="heading-icon" />\n Lesson Objective</h2>\n <div class="instructions-div">\n <p>At the end of this lesson, you will be able to:</p>\n <ul>\n <li>Write basic SQL queries to subset tables using Databricks SQL</li>\n <li>Join multiple tables together to create a new table</li>\n <li>Aggregate data columns using SQL functions to answer defined business questions</li>\n </ul></div>\n \n ', statements=None) step.render(DA.username) step.execute(DA.username) step = DA.publisher.add_validation(instructions='\n\n<h2><img class="image-icon-inline" src="https://s3.us-west-2.amazonaws.com/files.training.databricks.com/images/step-icon_small.png" alt="heading-icon" />\nRetrieve Data</h2>\n <div class="instructions-div">\n <p>The statement we are using in this part of the lab implements <span class="monofont">SELECT</span>, <span class="monofont">SELECT ... AS</span>, <span class="monofont">GROUP BY</span>, and <span class="monofont">ORDER BY</span>. Note that <span class="monofont">FROM</span>, <span class="monofont">GROUP BY</span>, and <span class="monofont">ORDER BY</span> need to occur in a specific order, or an error will be thrown.</p>\n <p>Complete the following:</p>\n <ol>\n <li>Make the required changes to the query below</li>\n <li>Run the query in Databricks SQL</li>\n <li>Check your work by entering your answer to the question</li>\n <li>After pressing <span class="monofont">ENTER/RETURN</span>, green indicates a correct answer, and red indicates incorrect</li>\n </ol>\n <p><pre><span class="monofont">USE <span style="color:red;">FILL_IN</span>;\nSELECT loyalty_segment, count(loyalty_segment) AS Count \n <span style="color:red;">FILL_IN</span> customers \n <span style="color:red;">FILL_IN</span> BY loyalty_segment \n <span style="color:red;">FILL_IN</span> BY loyalty_segment;</pre></p>\n </div>\n\n\n', statements='SELECT loyalty_segment, count(loyalty_segment) AS Count \n FROM customers \n GROUP BY loyalty_segment \n ORDER BY loyalty_segment;', label='How many customers are in loyalty_segment 0? ', expected='11097', length=10) step.render(DA.username) step.execute(DA.username) step = DA.publisher.add_validation(instructions='\n\n<h2><img class="image-icon-inline" src="https://s3.us-west-2.amazonaws.com/files.training.databricks.com/images/step-icon_small.png" alt="heading-icon" />\nUse Column Expressions</h2>\n <div class="instructions-div">\n <p>The <span class="monofont">customers</span> table contains the column <span class="monofont">customer_name</span>, which has the same problem as the <span class="monofont">city</span> column had in the last lesson. The customer names are all in lower-case. Run a <span class="monofont">SELECT</span> query, using the <span class="monofont">initcap()</span> function to examine the results of using this function on the <span class="monofont">customer_name</span>.</p>\n <p>Complete the following:</p>\n <ol>\n <li>Make the required changes to the query below</li>\n <li>Run the query in Databricks SQL</li>\n <li>Check your work by entering your answer to the question</li>\n <li>After pressing <span class="monofont">ENTER/RETURN</span>, green indicates a correct answer, and red indicates incorrect</li>\n </ol>\n <p><pre><span class="monofont">USE <span style="color:red;">FILL_IN</span>;\nSELECT <span style="color:red;">FILL_IN</span>(<span style="color:red;">FILL_IN</span>) AS Customer_Name \n FROM customers\n ORDER BY customer_name DESC;</pre></p>\n </div>\n\n\n', statements='SELECT initcap(customer_name) AS Customer_Name \n FROM customers\n ORDER BY customer_name DESC;', label='What is the last name of the last customer (alphabetically ) in the table? Ensure answer is in lower case ', expected='zyskowski', length=10) step.render(DA.username) step.execute(DA.username) step = DA.publisher.add_validation(instructions='\n\n<h2><img class="image-icon-inline" src="https://s3.us-west-2.amazonaws.com/files.training.databricks.com/images/step-icon_small.png" alt="heading-icon" />\nUpdate Data</h2>\n <div class="instructions-div">\n <p>Let\'s go ahead and implement the changes we examined in the last section. Use an <span class="monofont">UPDATE</span> statement to change the data in the <span class="monofont">customers</span> table.</p>\n <p>Complete the following:</p>\n <ol>\n <li>Make the required changes to the query below</li>\n <li>Run the query in Databricks SQL</li>\n <li>Check your work by entering your answer to the question</li>\n <li>After pressing <span class="monofont">ENTER/RETURN</span>, green indicates a correct answer, and red indicates incorrect</li>\n </ol>\n <p><pre><span class="monofont">USE <span style="color:red;">FILL_IN</span>;\n<span style="color:red;">FILL_IN</span> customers <span style="color:red;">FILL_IN</span> customer_name = initcap(customer_name);\nSELECT * FROM customers;</pre></p>\n </div>\n\n\n', statements=['UPDATE customers SET customer_name = initcap(customer_name);', 'SELECT * FROM customers;'], label='In which city is Bittner Engineering, Inc. located? ', expected='randolph', length=10) step.render(DA.username) step.execute(DA.username) step = DA.publisher.add_validation(instructions='\n\n<h2><img class="image-icon-inline" src="https://s3.us-west-2.amazonaws.com/files.training.databricks.com/images/step-icon_small.png" alt="heading-icon" />\nInsert Data</h2>\n <div class="instructions-div">\n <p>We can implement a fifth loyalty segment in our <span class="monofont">loyalty_segments</span> table. We will use a <span class="monofont">unit_threshold</span> of 130 units.</p>\n <p>Complete the following:</p>\n <ol>\n <li>Make the required changes to the query below</li>\n <li>Run the query in Databricks SQL</li>\n <li>Check your work by entering your answer to the question</li>\n <li>After pressing <span class="monofont">ENTER/RETURN</span>, green indicates a correct answer, and red indicates incorrect</li>\n </ol>\n <p><pre><span class="monofont">USE <span style="color:red;">FILL_IN</span>;\n<span style="color:red;">FILL_IN</span> INTO loyalty_segments\n (loyalty_segment_id, loyalty_segment_description, unit_threshold, valid_from, valid_to)\n <span style="color:red;">FILL_IN</span>\n (5, \'level_5\', 130, current_date(), Null);\nSELECT * FROM loyalty_segments;</pre></p>\n </div>\n\n\n', statements=["INSERT INTO loyalty_segments\n (loyalty_segment_id, loyalty_segment_description, unit_threshold, valid_from, valid_to)\n VALUES\n (5, 'level_5', 130, current_date(), Null);", 'SELECT * FROM loyalty_segments;'], label='How many rows are in the table now? ', expected='6', length=10) step.render(DA.username) step.execute(DA.username) step = DA.publisher.add_validation(instructions='\n\n<h2><img class="image-icon-inline" src="https://s3.us-west-2.amazonaws.com/files.training.databricks.com/images/step-icon_small.png" alt="heading-icon" />\nCREATE OR REPLACE TABLE AS statement</h2>\n <div class="instructions-div">\n <p>In this part of the lab, we are going to create a view that contains only those customers in California (CA) who have a <span class="monofont">loyalty_segment</span> of 3.</p>\n <p>Complete the following:</p>\n <ol>\n <li>Make the required changes to the query below</li>\n <li>Run the query in Databricks SQL</li>\n <li>Check your work by entering your answer to the question</li>\n <li>After pressing <span class="monofont">ENTER/RETURN</span>, green indicates a correct answer, and red indicates incorrect</li>\n </ol>\n <p><pre><span class="monofont">USE <span style="color:red;">FILL_IN</span>;\nCREATE OR REPLACE <span style="color:red;">FILL_IN</span> high_value_CA_customers AS\n SELECT * \n FROM customers \n WHERE state = \'<span style="color:red;">FILL_IN</span>\'\n AND loyalty_segment = <span style="color:red;">FILL_IN</span>;\nSELECT * FROM high_value_CA_customers;</pre></p>\n </div>\n\n\n', statements=["CREATE OR REPLACE TABLE high_value_CA_customers AS\n SELECT * \n FROM customers \n WHERE state = 'CA'\n AND loyalty_segment = 3;", 'SELECT * FROM high_value_CA_customers;'], label='How many rows are in the view? ', expected='949', length=10) step.render(DA.username) step.execute(DA.username) step = DA.publisher.add_validation(instructions='\n\n<h2><img class="image-icon-inline" src="https://s3.us-west-2.amazonaws.com/files.training.databricks.com/images/step-icon_small.png" alt="heading-icon" />\nJoins</h2>\n <div class="instructions-div">\n <p>Let\'s use a <span class="monofont">INNER JOIN</span>, <span class="monofont">GROUP BY</span>, <span class="monofont">ORDER BY</span>, and a function to calculate the total dollar amount of sales to various states in the sales table. Note that <span class="monofont">INNER JOIN</span> is the default join type, so we can just type <span class="monofont">JOIN</span>.</p>\n <p>Complete the following:</p>\n <ol>\n <li>Make the required changes to the query below</li>\n <li>Run the query in Databricks SQL</li>\n <li>Check your work by entering your answer to the question</li>\n <li>After pressing <span class="monofont">ENTER/RETURN</span>, green indicates a correct answer, and red indicates incorrect</li>\n </ol>\n <p><pre><span class="monofont">USE <span style="color:red;">FILL_IN</span>;\n<span style="color:red;">FILL_IN</span> customers.state, sum(total_price) AS Total FROM customers\n <span style="color:red;">FILL_IN</span> sales\n <span style="color:red;">FILL_IN</span> customers.customer_id = sales.customer_id\n GROUP BY customers.state\n ORDER BY Total DESC;</pre></p>\n </div>\n\n\n', statements=["CREATE OR REPLACE TABLE high_value_CA_customers AS\n SELECT * \n FROM customers \n WHERE state = 'CA'\n AND loyalty_segment = 3;", 'SELECT * FROM high_value_CA_customers;'], label='Which state has the highest sales? ', expected='or', length=10) step.render(DA.username) step.execute(DA.username) step = DA.publisher.add_validation(instructions='\n\n<h2><img class="image-icon-inline" src="https://s3.us-west-2.amazonaws.com/files.training.databricks.com/images/step-icon_small.png" alt="heading-icon" />\nUse Aggregations</h2>\n <div class="instructions-div">\n <p>We have already used a few aggregate functions in the lab. Let\'s finish by running a simple aggregation to find the best price our suppliers have been able to provide on a specific piece of vintage electronics, a 5-disk CD changer. The best price means the lowest price, and we can use the <span class="monofont">min()</span> function to make this calculation.</p>\n <p>Complete the following:</p>\n <ol>\n <li>Make the required changes to the query below</li>\n <li>Run the query in Databricks SQL</li>\n <li>Check your work by entering your answer to the question</li>\n <li>After pressing <span class="monofont">ENTER/RETURN</span>, green indicates a correct answer, and red indicates incorrect</li>\n </ol>\n <p><pre><span class="monofont">USE <span style="color:red;">FILL_IN</span>;\nSELECT <span style="color:red;">FILL_IN</span>(total_price)\n FROM sales;</pre></p>\n </div>\n\n\n', statements=['SELECT min(total_price) \n FROM sales;'], label='What is the lowest price? (numbers only) ', expected='11', length=10) step.render(DA.username) step.execute(DA.username) DA.validate_datasets() html = DA.publisher.publish() display_html(html)
########## Imports and other setup ############ ########## Preparing data ############## target = open('../data/example').read().split(': ')[1].split(', ') xTarget = target[0]; yTarget = target[1] xTarget = xTarget.split('=')[1].split('..'); yTarget = yTarget.split('=')[1].split('..') xTarget = [int(x) for x in xTarget]; yTarget = [int(y) for y in yTarget] position = {'x':0, 'y':0} velocity = {'x':0, 'y':0} ########### Functions that update the positions and check if a certain position matches target area############ def applyVelocity(): position['x'] += velocity['x']; position['y'] += velocity['y'] def applyDrag(): if velocity['x'] > 0: velocity['x'] -=1 def applyGravity(): velocity['y'] -= 1 def matchesTargetArea(): if xTarget[0] <= position['x'] <= xTarget[1] and yTarget[0] <= position['y'] <= yTarget[1]: return True return False def neverGonnaMatchArea(): if velocity['x'] == xTarget[0] and position['y'] < yTarget[0] and velocity['y'] < 0: return True return False ############ Actual solution ############### ## We are going to try a lot of different velocities but let's pick our values carefully ## Like we can't select an x value for velocity that is higher than the x value of target area ## We can say this because the x seeks 0, y on the other hand will just continue to get more negative ## Thus we know the initial y value can not be more negative than the target area. So some limit on y ## Also the problem is to find the highest y value. We also know that y/x is the ## So we have at least some initial thoughts on what values we should try. Let's try them. def doesItHitTheTarget(vel): velocity['x'] = vel[0]; velocity['y'] = vel[1] while True: if matchesTargetArea(): position['x'] = 0; position['y'] = 0; velocity['x'] = 0; velocity['y'] = 0; return vel, True elif neverGonnaMatchArea(): position['x'] = 0; position['y'] = 0; velocity['x'] = 0; velocity['y'] = 0; return vel, False applyVelocity(); applyDrag(); applyGravity() def doesRangeHitTarget(x_max = 10, y_min = -10, y_max = 10): velsThatHitTarget = [] for y in range(y_min, y_max): for x in range(1,x_max): vel, b = doesItHitTheTarget((x,y)) if b: velsThatHitTarget.append(vel) return velsThatHitTarget def getHighestYValue(vel): velocity['x'] = vel[0]; velocity['y'] = vel[1] while True: if velocity['y'] <= 0: x,y = position['x'], position['y'] position['x'] = 0; position['y'] = 0; velocity['x'] = 0; velocity['y'] = 0 return vel, (x,y) applyVelocity(); applyDrag(); applyGravity() def getAllHighestValues(hits): topVelOPoint = {} for hit in hits: vel, pos = getHighestYValue(hit) topVelOPoint[vel] = pos return topVelOPoint pointsThatHitTarget = doesRangeHitTarget(x_max=xTarget[1], y_min=yTarget[1]) allHighestValues = getAllHighestValues(pointsThatHitTarget)
target = open('../data/example').read().split(': ')[1].split(', ') x_target = target[0] y_target = target[1] x_target = xTarget.split('=')[1].split('..') y_target = yTarget.split('=')[1].split('..') x_target = [int(x) for x in xTarget] y_target = [int(y) for y in yTarget] position = {'x': 0, 'y': 0} velocity = {'x': 0, 'y': 0} def apply_velocity(): position['x'] += velocity['x'] position['y'] += velocity['y'] def apply_drag(): if velocity['x'] > 0: velocity['x'] -= 1 def apply_gravity(): velocity['y'] -= 1 def matches_target_area(): if xTarget[0] <= position['x'] <= xTarget[1] and yTarget[0] <= position['y'] <= yTarget[1]: return True return False def never_gonna_match_area(): if velocity['x'] == xTarget[0] and position['y'] < yTarget[0] and (velocity['y'] < 0): return True return False def does_it_hit_the_target(vel): velocity['x'] = vel[0] velocity['y'] = vel[1] while True: if matches_target_area(): position['x'] = 0 position['y'] = 0 velocity['x'] = 0 velocity['y'] = 0 return (vel, True) elif never_gonna_match_area(): position['x'] = 0 position['y'] = 0 velocity['x'] = 0 velocity['y'] = 0 return (vel, False) apply_velocity() apply_drag() apply_gravity() def does_range_hit_target(x_max=10, y_min=-10, y_max=10): vels_that_hit_target = [] for y in range(y_min, y_max): for x in range(1, x_max): (vel, b) = does_it_hit_the_target((x, y)) if b: velsThatHitTarget.append(vel) return velsThatHitTarget def get_highest_y_value(vel): velocity['x'] = vel[0] velocity['y'] = vel[1] while True: if velocity['y'] <= 0: (x, y) = (position['x'], position['y']) position['x'] = 0 position['y'] = 0 velocity['x'] = 0 velocity['y'] = 0 return (vel, (x, y)) apply_velocity() apply_drag() apply_gravity() def get_all_highest_values(hits): top_vel_o_point = {} for hit in hits: (vel, pos) = get_highest_y_value(hit) topVelOPoint[vel] = pos return topVelOPoint points_that_hit_target = does_range_hit_target(x_max=xTarget[1], y_min=yTarget[1]) all_highest_values = get_all_highest_values(pointsThatHitTarget)
#!/usr/bin/python3 # -*-coding:utf-8-*- __author__ = "Bannings" class Solution: def toGoatLatin(self, S: str) -> str: vowel = set([ 'a', 'e', 'i', 'o', 'u', 'A', 'E', 'I', 'O', 'U', ]) ans, words_count, first_char, is_begin = [], 1, "", True for char in S: if char == " ": if is_begin: continue tail = "a" * words_count ans.append(f"{first_char}ma{tail}") is_begin, first_char, words_count = True, "", words_count + 1 elif is_begin: if words_count > 1: ans.append(" ") if char in vowel: ans.append(char) else: first_char = char is_begin = False else: ans.append(char) if not is_begin: tail = "a" * words_count ans.append(f"{first_char}ma{tail}") return "".join(ans) if __name__ == '__main__': assert Solution().toGoatLatin("I speak Goat Latin") == "Imaa peaksmaaa oatGmaaaa atinLmaaaaa" assert Solution().toGoatLatin(" I speak Goat Latin") == "Imaa peaksmaaa oatGmaaaa atinLmaaaaa" assert Solution().toGoatLatin(" I speak Goat Latin ") == "Imaa peaksmaaa oatGmaaaa atinLmaaaaa" assert Solution().toGoatLatin("The quick brown fox jumped over the lazy dog") == "heTmaa uickqmaaa rownbmaaaa oxfmaaaaa umpedjmaaaaaa overmaaaaaaa hetmaaaaaaaa azylmaaaaaaaaa ogdmaaaaaaaaaa"
__author__ = 'Bannings' class Solution: def to_goat_latin(self, S: str) -> str: vowel = set(['a', 'e', 'i', 'o', 'u', 'A', 'E', 'I', 'O', 'U']) (ans, words_count, first_char, is_begin) = ([], 1, '', True) for char in S: if char == ' ': if is_begin: continue tail = 'a' * words_count ans.append(f'{first_char}ma{tail}') (is_begin, first_char, words_count) = (True, '', words_count + 1) elif is_begin: if words_count > 1: ans.append(' ') if char in vowel: ans.append(char) else: first_char = char is_begin = False else: ans.append(char) if not is_begin: tail = 'a' * words_count ans.append(f'{first_char}ma{tail}') return ''.join(ans) if __name__ == '__main__': assert solution().toGoatLatin('I speak Goat Latin') == 'Imaa peaksmaaa oatGmaaaa atinLmaaaaa' assert solution().toGoatLatin(' I speak Goat Latin') == 'Imaa peaksmaaa oatGmaaaa atinLmaaaaa' assert solution().toGoatLatin(' I speak Goat Latin ') == 'Imaa peaksmaaa oatGmaaaa atinLmaaaaa' assert solution().toGoatLatin('The quick brown fox jumped over the lazy dog') == 'heTmaa uickqmaaa rownbmaaaa oxfmaaaaa umpedjmaaaaaa overmaaaaaaa hetmaaaaaaaa azylmaaaaaaaaa ogdmaaaaaaaaaa'
""" checks section sizes on disk and in virtual meory to indicate a packer """ def run(peobject): alerts = [] found = [] # loop through each section d = peobject.dict() if 'SECTIONS' in d: for s in d['SECTIONS']: # check for a raw size of 0 on disk but a non-zero size in virtual memory if (s['SizeOfRawData'] == 0) and (s['VirtualSize'] > 0): found.append('Section {0} has a disk size of {1} bytes but a virtual size of {2} bytes'.format(s['Name'], s['SizeOfRawData'], s['VirtualSize'])) # this rule generates only one alert if found: alerts.append({ 'title': 'Possible Malware Packer Used', 'description': 'This indicates a packer may be using the following sections to hold unpacked code at runtime.', 'data': found, 'code': '', }) return alerts
""" checks section sizes on disk and in virtual meory to indicate a packer """ def run(peobject): alerts = [] found = [] d = peobject.dict() if 'SECTIONS' in d: for s in d['SECTIONS']: if s['SizeOfRawData'] == 0 and s['VirtualSize'] > 0: found.append('Section {0} has a disk size of {1} bytes but a virtual size of {2} bytes'.format(s['Name'], s['SizeOfRawData'], s['VirtualSize'])) if found: alerts.append({'title': 'Possible Malware Packer Used', 'description': 'This indicates a packer may be using the following sections to hold unpacked code at runtime.', 'data': found, 'code': ''}) return alerts
class Node: def __init__(self ,data): self.left = None self.right = None self.data = data def breadthfirst(root): if root is None: return my_list = [] result=[] result.append(root.data) my_list.append(root) while(len(my_list) > 0): node = my_list.pop(0) if node.left: my_list.append(node.left) result.append(node.left.data) if node.right: my_list.append(node.right) result.append(node.right.data) return result def count_file(root , root1): result = breadthfirst(root) final_result = result.count("file") result1 = breadthfirst(root1) final_result1 = result1.count("file") if final_result == final_result1: return True else: return False if __name__=="__main__": root = Node(2) root.left = Node(7) root.right = Node(5) root.left.left = Node(2) root.left.right = Node(6) root.right.right = Node("file") root.left.right.left = Node("file") root.left.right.right = Node("file") root.right.right.left = Node(4) root1 = Node(2) root1.left = Node(7) root1.right = Node(5) root1.left.left = Node(2) root1.left.right = Node(6) root1.right.right = Node("file") root1.left.right.left = Node("file") root1.left.right.right = Node("file") root1.right.right.left = Node(4) # print(breadthfirst(root)) print(count_file(root , root1))
class Node: def __init__(self, data): self.left = None self.right = None self.data = data def breadthfirst(root): if root is None: return my_list = [] result = [] result.append(root.data) my_list.append(root) while len(my_list) > 0: node = my_list.pop(0) if node.left: my_list.append(node.left) result.append(node.left.data) if node.right: my_list.append(node.right) result.append(node.right.data) return result def count_file(root, root1): result = breadthfirst(root) final_result = result.count('file') result1 = breadthfirst(root1) final_result1 = result1.count('file') if final_result == final_result1: return True else: return False if __name__ == '__main__': root = node(2) root.left = node(7) root.right = node(5) root.left.left = node(2) root.left.right = node(6) root.right.right = node('file') root.left.right.left = node('file') root.left.right.right = node('file') root.right.right.left = node(4) root1 = node(2) root1.left = node(7) root1.right = node(5) root1.left.left = node(2) root1.left.right = node(6) root1.right.right = node('file') root1.left.right.left = node('file') root1.left.right.right = node('file') root1.right.right.left = node(4) print(count_file(root, root1))
size(256, 256) scale(0.5, 0.5) p = '../images/drawbot.png' blendMode("multiply") image(p, (0, 0)) image(p, (100, 100))
size(256, 256) scale(0.5, 0.5) p = '../images/drawbot.png' blend_mode('multiply') image(p, (0, 0)) image(p, (100, 100))
# DB connection PORT = 8000 db_name="routing" db_user="routing" db_pass="KadufZyn8Dr" # E-mail email_login="login" email_password="password" email_adress="login@email.com"
port = 8000 db_name = 'routing' db_user = 'routing' db_pass = 'KadufZyn8Dr' email_login = 'login' email_password = 'password' email_adress = 'login@email.com'
class TestDataError(Exception): pass class MissingElementAmountValue(TestDataError): pass class FactoryStartedAlready(TestDataError): pass class NoSuchDatatype(TestDataError): pass class InvalidFieldType(TestDataError): pass class MissingRequiredFields(TestDataError): pass class UnmetDependentFields(TestDataError): pass class NoFactoriesProvided(TestDataError): pass class InvalidTotalPrecentage(TestDataError): pass
class Testdataerror(Exception): pass class Missingelementamountvalue(TestDataError): pass class Factorystartedalready(TestDataError): pass class Nosuchdatatype(TestDataError): pass class Invalidfieldtype(TestDataError): pass class Missingrequiredfields(TestDataError): pass class Unmetdependentfields(TestDataError): pass class Nofactoriesprovided(TestDataError): pass class Invalidtotalprecentage(TestDataError): pass
class NotFoundError(Exception): '''raise this when requested repository is not found''' pass class ApiRateLimitError(Exception): '''raise this when API rate limit exceeded''' pass class BadCredentialsError(Exception): '''raise this when bad credentials were provided for the API''' pass
class Notfounderror(Exception): """raise this when requested repository is not found""" pass class Apiratelimiterror(Exception): """raise this when API rate limit exceeded""" pass class Badcredentialserror(Exception): """raise this when bad credentials were provided for the API""" pass
""" You are given an unsorted array with both positive and negative elements. The positive numbers need not be in range from 1 to n !! You have to find the smallest positive number missing from the array in O(n) time using constant extra space. Examples Input: {2, 3, 7, 6, 8, -1, -10, 15} Output: 1 Input: { 2, 3, -7, 6, 8, 1, -10, 15 } Output: 4 Input: {1, 1, 0, -1, -2} Output: 2 SOLUTION: Exactly same as the solution to `find_duplicates.py`. Basically, we mark the occurrence of a number 'elem' by making arr[elem] as -ve. Then we traverse the array; the first index i such that arr[i] is +ve is the answer. Note that before doing this we'll need to bring all the non-positive elements to the left side to segregate them since they dont matter. """ def segregate_non_positive(arr): # Stores the index of the slot where the next non-positive number will be swapped to. # This is also the index of the first positive number. # All the numbers arr[i] where i < next_slot, will be None. next_slot = 0 for i in range(len(arr)): if arr[i] <= 0: arr[i], arr[next_slot] = arr[next_slot], arr[i] arr[next_slot] = None next_slot += 1 return next_slot def get_smallest_missing(arr): idx = segregate_non_positive(arr) for i in range(idx, len(arr)): slot = abs(arr[i]) - 1 if slot >= len(arr): continue if arr[slot] is None: arr[slot] = 0 elif arr[slot] > 0: arr[slot] = -arr[slot] for i in range(len(arr)): if arr[i] is None or arr[i] > 0: return i + 1 return len(arr) + 1 def main(): arr = [-1, 0, 5, 1, -2, -4, 6] arr = [-1] ans = get_smallest_missing(arr) print(ans) print(arr) main()
""" You are given an unsorted array with both positive and negative elements. The positive numbers need not be in range from 1 to n !! You have to find the smallest positive number missing from the array in O(n) time using constant extra space. Examples Input: {2, 3, 7, 6, 8, -1, -10, 15} Output: 1 Input: { 2, 3, -7, 6, 8, 1, -10, 15 } Output: 4 Input: {1, 1, 0, -1, -2} Output: 2 SOLUTION: Exactly same as the solution to `find_duplicates.py`. Basically, we mark the occurrence of a number 'elem' by making arr[elem] as -ve. Then we traverse the array; the first index i such that arr[i] is +ve is the answer. Note that before doing this we'll need to bring all the non-positive elements to the left side to segregate them since they dont matter. """ def segregate_non_positive(arr): next_slot = 0 for i in range(len(arr)): if arr[i] <= 0: (arr[i], arr[next_slot]) = (arr[next_slot], arr[i]) arr[next_slot] = None next_slot += 1 return next_slot def get_smallest_missing(arr): idx = segregate_non_positive(arr) for i in range(idx, len(arr)): slot = abs(arr[i]) - 1 if slot >= len(arr): continue if arr[slot] is None: arr[slot] = 0 elif arr[slot] > 0: arr[slot] = -arr[slot] for i in range(len(arr)): if arr[i] is None or arr[i] > 0: return i + 1 return len(arr) + 1 def main(): arr = [-1, 0, 5, 1, -2, -4, 6] arr = [-1] ans = get_smallest_missing(arr) print(ans) print(arr) main()
class MigrationScope: _engine = None @classmethod def engine(cls): assert cls._engine is not None return cls._engine def __init__(self, engine): self.old = None self.engine = engine def __enter__(self): self.old = MigrationScope._engine MigrationScope._engine = self.engine def __exit__(self, exc_type, exc_val, exc_tb): MigrationScope._engine = self.old
class Migrationscope: _engine = None @classmethod def engine(cls): assert cls._engine is not None return cls._engine def __init__(self, engine): self.old = None self.engine = engine def __enter__(self): self.old = MigrationScope._engine MigrationScope._engine = self.engine def __exit__(self, exc_type, exc_val, exc_tb): MigrationScope._engine = self.old
n=int(input()) s=input().lower() if len(set(s))==26: print("YES") else: print("NO")
n = int(input()) s = input().lower() if len(set(s)) == 26: print('YES') else: print('NO')
# username ,password => database # 'Bla Bla' , '123456' a, b, c, d = 5, 5, 10, 4 password = '1234' username = 'Bla Bla' result = (a == b) # true result = (a == c) # false result = ('blbl'== username) result = ('Bla Bla'== username) result = (a != b) result = (a != c) result = (a > c) result = (a < c) result = (a >= b) result = (c <= b) result = (True == 1) result = (False == 0) result = False+ True+40 print(result)
(a, b, c, d) = (5, 5, 10, 4) password = '1234' username = 'Bla Bla' result = a == b result = a == c result = 'blbl' == username result = 'Bla Bla' == username result = a != b result = a != c result = a > c result = a < c result = a >= b result = c <= b result = True == 1 result = False == 0 result = False + True + 40 print(result)
#Given an integer, write a function that reverses the bits (in binary) and returns the integer result. #Examples: #csReverseIntegerBits(417) -> 267 #417 in binary is 110100001. Reversing the binary is 100001011, which is 267 in decimal. #csReverseIntegerBits(267) -> 417 #csReverseIntegerBits(0) -> 0 def csReverseIntegerBits(n): return int(''.join(reversed(bin(n)[2:])), 2) #Given a binary string (ASCII encoded), write a function that returns the equivalent decoded text. #Every eight bits in the binary string represents one character on the ASCII table. def csBinaryToASCII(binary): if binary == "": return "" number = int(binary, 2) return number.to_bytes((number.bit_length() + 7) // 8, 'big').decode() #Given a number, write a function that converts that number into a string that contains "raindrop sounds" corresponding to certain #potential factors. A factor is a number that evenly divides into another number, leaving no remainder. # The simplest way to test if one number is a factor of another is to use the modulo operator. #Here are the rules for csRaindrop. If the input number: #has 3 as a factor, add "Pling" to the result. #has 5 as a factor, add "Plang" to the result. #has 7 as a factor, add "Plong" to the result. #does not have any of 3, 5, or 7 as a factor, the result should be the digits of the input number. def csRaindrops(number): if number % 3 == 0: return "Pling" elif number % 5 == 0: return "Plang" elif number % 7 == 0: return "Plong" elif number % 3 == 0 and number % 5 == 0 and number % 7 == 0: return "PlingPlangPlong" elif number % 3 == 0 and number % 7 == 0 and number % 5 != 0: return "PlingPlong" elif number % 3 == 0 and number % 5 == 0 and number % 7 != 0: return "PlingPlang" elif number % 3 != 0 and number % 5 == 0 and number % 7 == 0: return "PlangPlong" elif number % 3 != 0 and number % 5 != 0 and number % 7 != 0: return str(number)
def cs_reverse_integer_bits(n): return int(''.join(reversed(bin(n)[2:])), 2) def cs_binary_to_ascii(binary): if binary == '': return '' number = int(binary, 2) return number.to_bytes((number.bit_length() + 7) // 8, 'big').decode() def cs_raindrops(number): if number % 3 == 0: return 'Pling' elif number % 5 == 0: return 'Plang' elif number % 7 == 0: return 'Plong' elif number % 3 == 0 and number % 5 == 0 and (number % 7 == 0): return 'PlingPlangPlong' elif number % 3 == 0 and number % 7 == 0 and (number % 5 != 0): return 'PlingPlong' elif number % 3 == 0 and number % 5 == 0 and (number % 7 != 0): return 'PlingPlang' elif number % 3 != 0 and number % 5 == 0 and (number % 7 == 0): return 'PlangPlong' elif number % 3 != 0 and number % 5 != 0 and (number % 7 != 0): return str(number)
class GutenbergBook: """ @brief A Gutenberg Book object metadata only, used along with the books data source. This is a convenience class provided for users who wish to use this data source as part of their application. It provides an API that makes it easy to access the attributes of this data set. Refer to tutorial examples to using this data source in data structure assignments. @author Matthew Mcquaigue, Kalpathi Subramanian @date 2/1/17, 12/29/20 """ def __init__(self, author_name: str = "", author_birth: int = 0, author_death: int = 0, title: str = "", lang: str = "", genre: str = "", subject: str = "", num_chars: int = 0, num_words: int = 0, num_sentences: int = 0, num_difficult_words: int = 0, url: str = "", downloads: int = 0): """ @brief constructor Args: author_name: name of author [string] author_birth: author birth date [int] author_death: aurthor death date [int] title: title of book [string] lang: language of book [string] genre: genres of book [list of strings] subject: subject of book [string] num_chars: number of characters [int] num_words: number of words [int] num_sentences: number of sentences [int] num_difficult_words: number of difficult words [int] url: url of book [string] downloads: number of downloads of book [int] """ self._author_name = author_name self._author_birth = author_birth self.author_death = author_death self._title = title self._lang = lang self._genre = genre self._subject = subject self._num_chars = num_chars self._num_words = num_words self._num_sentences = num_sentences self._num_difficult_words = num_difficult_words self._url = url self._downloads = downloads @property def name(self): """ @brief get author's name Returns: name of author """ return self._author_name @name.setter def name(self, n): """ @brief Set author's name Args: n: name of author """ self._author_name = n @property def birth(self): """ @brief get birth date of book's author Returns: birthdate of author """ return self._author_birth @birth.setter def birth(self, b): """ @brief Set author's birth date Args: b: author birth date to set """ self._author_birth = b @property def death(self): """ @brief get date of death of book's author Returns: date of death of author """ return self._author_death @death.setter def death(self, d): """ @brief Set date of death of book's author Args: d: author's date of death to be set """ self._author_death = d @property def title(self): """ @brief Get title of book Returns: book title """ return self._title @title.setter def title(self, t): """ @brief Set book title Args: t: title to set """ self._title = t """ @brief Get language of book Returns: book language """ @property def lang(self): """ @brief Get book's language Returns: book's language """ return self._lang @lang.setter def lang(self, l): """ @brief Set book's language Args: l: language to set """ self._lang = l @property def genre(self): """ @brief Get book's genres Returns: book's genres """ return self._genre @genre.setter def genre(self, g): """ @brief Set book's genres Args: g: genres to set """ self._genre = g @property def subject(self): """ @brief Get book's subject Returns: book's subject """ return self._subject @subject.setter def subject(self, s): """ @brief Set book's subject Args: s: subject to set """ self._subject = s @property def url(self): """ @brief Get book's url Returns: book's url """ return self._url @url.setter def url(self, u): """ @brief Set book's url Args: u: url to set """ self._url = u @property def num_chars(self): """ @brief Get number of characters in book Returns: book's character count """ return self._num_chars @num_chars.setter def num_chars(self, n): """ @brief Set book's number of characters Args: n: number of characters to set """ self._num_chars = n @property def num_words(self): """ @brief Get number of words in book Returns: book's words count """ return self._num_words @num_words.setter def num_words(self, w): """ @brief Set book's number of words Args: w: number of words to set """ self._num_words = w @property def num_sentences(self): """ @brief Get number of sentences in book Returns: book's sentence count """ return self._num_sentences @num_sentences.setter def num_sentences(self, n): """ @brief Set book's number of sentences Args: n: number of sentences to set """ self._num_sentences = n @property def num_difficult_words(self): """ @brief Get number of difficulut words in book Returns: book's difficulut word count """ return self._num_difficult_words @num_difficult_words.setter def num_difficult_words(self, w): """ @brief Set book's number of difficult words Args: w: difficult word count to set """ self._num_difficult_words = w @property def downloads(self): """ @brief Get number of downloads of book Returns: book's download count """ return self._downloads @downloads.setter def downloads(self, d): """ @brief Set book's number of downloads Args: d: download count to set """ self._downloads = d
class Gutenbergbook: """ @brief A Gutenberg Book object metadata only, used along with the books data source. This is a convenience class provided for users who wish to use this data source as part of their application. It provides an API that makes it easy to access the attributes of this data set. Refer to tutorial examples to using this data source in data structure assignments. @author Matthew Mcquaigue, Kalpathi Subramanian @date 2/1/17, 12/29/20 """ def __init__(self, author_name: str='', author_birth: int=0, author_death: int=0, title: str='', lang: str='', genre: str='', subject: str='', num_chars: int=0, num_words: int=0, num_sentences: int=0, num_difficult_words: int=0, url: str='', downloads: int=0): """ @brief constructor Args: author_name: name of author [string] author_birth: author birth date [int] author_death: aurthor death date [int] title: title of book [string] lang: language of book [string] genre: genres of book [list of strings] subject: subject of book [string] num_chars: number of characters [int] num_words: number of words [int] num_sentences: number of sentences [int] num_difficult_words: number of difficult words [int] url: url of book [string] downloads: number of downloads of book [int] """ self._author_name = author_name self._author_birth = author_birth self.author_death = author_death self._title = title self._lang = lang self._genre = genre self._subject = subject self._num_chars = num_chars self._num_words = num_words self._num_sentences = num_sentences self._num_difficult_words = num_difficult_words self._url = url self._downloads = downloads @property def name(self): """ @brief get author's name Returns: name of author """ return self._author_name @name.setter def name(self, n): """ @brief Set author's name Args: n: name of author """ self._author_name = n @property def birth(self): """ @brief get birth date of book's author Returns: birthdate of author """ return self._author_birth @birth.setter def birth(self, b): """ @brief Set author's birth date Args: b: author birth date to set """ self._author_birth = b @property def death(self): """ @brief get date of death of book's author Returns: date of death of author """ return self._author_death @death.setter def death(self, d): """ @brief Set date of death of book's author Args: d: author's date of death to be set """ self._author_death = d @property def title(self): """ @brief Get title of book Returns: book title """ return self._title @title.setter def title(self, t): """ @brief Set book title Args: t: title to set """ self._title = t '\n @brief Get language of book\n Returns:\n book language\n ' @property def lang(self): """ @brief Get book's language Returns: book's language """ return self._lang @lang.setter def lang(self, l): """ @brief Set book's language Args: l: language to set """ self._lang = l @property def genre(self): """ @brief Get book's genres Returns: book's genres """ return self._genre @genre.setter def genre(self, g): """ @brief Set book's genres Args: g: genres to set """ self._genre = g @property def subject(self): """ @brief Get book's subject Returns: book's subject """ return self._subject @subject.setter def subject(self, s): """ @brief Set book's subject Args: s: subject to set """ self._subject = s @property def url(self): """ @brief Get book's url Returns: book's url """ return self._url @url.setter def url(self, u): """ @brief Set book's url Args: u: url to set """ self._url = u @property def num_chars(self): """ @brief Get number of characters in book Returns: book's character count """ return self._num_chars @num_chars.setter def num_chars(self, n): """ @brief Set book's number of characters Args: n: number of characters to set """ self._num_chars = n @property def num_words(self): """ @brief Get number of words in book Returns: book's words count """ return self._num_words @num_words.setter def num_words(self, w): """ @brief Set book's number of words Args: w: number of words to set """ self._num_words = w @property def num_sentences(self): """ @brief Get number of sentences in book Returns: book's sentence count """ return self._num_sentences @num_sentences.setter def num_sentences(self, n): """ @brief Set book's number of sentences Args: n: number of sentences to set """ self._num_sentences = n @property def num_difficult_words(self): """ @brief Get number of difficulut words in book Returns: book's difficulut word count """ return self._num_difficult_words @num_difficult_words.setter def num_difficult_words(self, w): """ @brief Set book's number of difficult words Args: w: difficult word count to set """ self._num_difficult_words = w @property def downloads(self): """ @brief Get number of downloads of book Returns: book's download count """ return self._downloads @downloads.setter def downloads(self, d): """ @brief Set book's number of downloads Args: d: download count to set """ self._downloads = d
# %% [1290. Convert Binary Number in a Linked List to Integer](https://leetcode.com/problems/convert-binary-number-in-a-linked-list-to-integer/) class Solution: def getDecimalValue(self, head: ListNode) -> int: return int("".join(str(i) for i in to_iter(head)), 2) def to_iter(ln, isval=True): while ln: yield ln.val if isval else ln ln = ln.next
class Solution: def get_decimal_value(self, head: ListNode) -> int: return int(''.join((str(i) for i in to_iter(head))), 2) def to_iter(ln, isval=True): while ln: yield (ln.val if isval else ln) ln = ln.next
class Solution: """ @param a: the list of salary @param target: the target of the sum @return: the cap it should be """ def getCap(self, a, target): start = min(a) end = target // len(a) + 1 while start + 1 < end: mid = start + (end - start) // 2 if self.sumByCap(a, mid) < target: start = mid else: end = mid if self.sumByCap(a, start) >= target: return start return end def sumByCap(self, a, cap): res = 0 for num in a: res += cap if num < cap else num return res class Solution: """ @param a: the list of salary @param target: the target of the sum @return: the cap it should be """ def getCap(self, a, target): a.sort() summ = 0 for i in range(len(a) - 1, -1, -1): ex = (target - summ) // (i + 1) print(summ, ex, a[i]) if ex >= a[i]: return ex + 1 if (target - summ) % (i + 1) else ex summ += a[i] return -1
class Solution: """ @param a: the list of salary @param target: the target of the sum @return: the cap it should be """ def get_cap(self, a, target): start = min(a) end = target // len(a) + 1 while start + 1 < end: mid = start + (end - start) // 2 if self.sumByCap(a, mid) < target: start = mid else: end = mid if self.sumByCap(a, start) >= target: return start return end def sum_by_cap(self, a, cap): res = 0 for num in a: res += cap if num < cap else num return res class Solution: """ @param a: the list of salary @param target: the target of the sum @return: the cap it should be """ def get_cap(self, a, target): a.sort() summ = 0 for i in range(len(a) - 1, -1, -1): ex = (target - summ) // (i + 1) print(summ, ex, a[i]) if ex >= a[i]: return ex + 1 if (target - summ) % (i + 1) else ex summ += a[i] return -1
#!/usr/bin/env python3 # https://cses.fi/problemset/task/1083 n = int(input()) print(n * (n + 1) // 2 - sum(map(int, input().split())))
n = int(input()) print(n * (n + 1) // 2 - sum(map(int, input().split())))
ix.enable_command_history() ix.application.get_selection().deselect_all() ix.disable_command_history()
ix.enable_command_history() ix.application.get_selection().deselect_all() ix.disable_command_history()
class UnsupportedLanguageError(AttributeError): """Raised when an unsupported language is supplied""" pass class InvalidGeolocationError(ValueError): """Raised when an invalid latitude or longitude is supplied""" pass
class Unsupportedlanguageerror(AttributeError): """Raised when an unsupported language is supplied""" pass class Invalidgeolocationerror(ValueError): """Raised when an invalid latitude or longitude is supplied""" pass
# Definition for singly-linked list. # class ListNode: # def __init__(self, val=0, next=None): # self.val = val # self.next = next class Solution: def reverseKGroup(self, head: ListNode, k: int) -> ListNode: dummy = ListNode(-1) dummy.next = head pre = dummy while head: tail = pre for i in range(k): tail = tail.next if not tail: return dummy.next # tail_nx = tail.next newhead, newtail = self.reverse(head, tail) # pre.next, newtail.next = newhead, tail_nx pre.next = newhead pre = newtail head = newtail.next return dummy.next def reverse(self, head, tail): prev = tail.next curr = head while prev != tail: tmp = curr.next curr.next = prev prev = curr curr = tmp return prev, head
class Solution: def reverse_k_group(self, head: ListNode, k: int) -> ListNode: dummy = list_node(-1) dummy.next = head pre = dummy while head: tail = pre for i in range(k): tail = tail.next if not tail: return dummy.next (newhead, newtail) = self.reverse(head, tail) pre.next = newhead pre = newtail head = newtail.next return dummy.next def reverse(self, head, tail): prev = tail.next curr = head while prev != tail: tmp = curr.next curr.next = prev prev = curr curr = tmp return (prev, head)
# File: Grid.py # Description: Assignment 11 | Greatest Path Sum in a Grid # Student Name: Matthew Maxwell # Student UT EID: mrm5632 # Course Name: CS 313E # Unique Number: 50205 # Date Created: 10-07-2019 # Date Last Modified: 10-13-2019 # counts all the possible paths in a grid recursively def count_paths (n, row, col): if(row == n - 1 or col == n - 1): return(1) else: return(count_paths(n, row + 1, col) + count_paths(n, row, col + 1)) # recursively gets the greatest sum of all the paths in the grid def path_sum(grid, n, row, col): if(row == n - 1 and col == n - 1): return(grid[row][col]) else: if(row == n - 1): return(grid[row][col] + path_sum(grid, n, row, col + 1)) elif(col == n - 1): return(grid[row][col] + path_sum(grid, n, row + 1, col)) else: return(grid[row][col] + max(path_sum(grid, n, row + 1, col), path_sum(grid, n, row, col + 1))) # dynamic programming solution (can easily find path) def path_sum_dp(grid, path): n = len(grid) dp = [[0 for j in range(n + 1)] for i in range(n + 1)] for i in range(1, n + 1): for j in range(1, n + 1): dp[i][j] = grid[i - 1][j - 1] + max(dp[i][j - 1], dp[i - 1][j]) i = j = 1 while(i < n and j < n): path.append(grid[i - 1][j - 1]) if(dp[i][j + 1] > dp[i + 1][j]): j += 1 else: i += 1 while(i < n): path.append(grid[i - 1][j - 1]) i += 1 while(j < n): path.append(grid[i - 1][j - 1]) j += 1 path.append(grid[i - 1][j - 1]) return(dp[n][n]) def main(): # open file for reading in_file = open ("./grid.txt", "r") # read the dimension of the grid dim = in_file.readline() dim = dim.strip() dim = int(dim) # create an empty grid grid = [] # populate the grid for i in range (dim): line = in_file.readline() line = line.strip() row = line.split() for j in range (dim): row[j] = int (row[j]) grid.append (row) # close the file in_file.close() # get the number of paths in the grid and print num_paths = count_paths (dim, 0, 0) print ('Number of paths in a grid of dimension', dim, 'is', num_paths) print () # get the maximum path sum and print path = [] max_path_sum = path_sum(grid, dim, 0, 0) max_path_sum = path_sum_dp(grid, path) print ('Greatest path sum is', max_path_sum, "\n") print(f"Actual path is {path}") main()
def count_paths(n, row, col): if row == n - 1 or col == n - 1: return 1 else: return count_paths(n, row + 1, col) + count_paths(n, row, col + 1) def path_sum(grid, n, row, col): if row == n - 1 and col == n - 1: return grid[row][col] elif row == n - 1: return grid[row][col] + path_sum(grid, n, row, col + 1) elif col == n - 1: return grid[row][col] + path_sum(grid, n, row + 1, col) else: return grid[row][col] + max(path_sum(grid, n, row + 1, col), path_sum(grid, n, row, col + 1)) def path_sum_dp(grid, path): n = len(grid) dp = [[0 for j in range(n + 1)] for i in range(n + 1)] for i in range(1, n + 1): for j in range(1, n + 1): dp[i][j] = grid[i - 1][j - 1] + max(dp[i][j - 1], dp[i - 1][j]) i = j = 1 while i < n and j < n: path.append(grid[i - 1][j - 1]) if dp[i][j + 1] > dp[i + 1][j]: j += 1 else: i += 1 while i < n: path.append(grid[i - 1][j - 1]) i += 1 while j < n: path.append(grid[i - 1][j - 1]) j += 1 path.append(grid[i - 1][j - 1]) return dp[n][n] def main(): in_file = open('./grid.txt', 'r') dim = in_file.readline() dim = dim.strip() dim = int(dim) grid = [] for i in range(dim): line = in_file.readline() line = line.strip() row = line.split() for j in range(dim): row[j] = int(row[j]) grid.append(row) in_file.close() num_paths = count_paths(dim, 0, 0) print('Number of paths in a grid of dimension', dim, 'is', num_paths) print() path = [] max_path_sum = path_sum(grid, dim, 0, 0) max_path_sum = path_sum_dp(grid, path) print('Greatest path sum is', max_path_sum, '\n') print(f'Actual path is {path}') main()
# data TRAINING_DATA_FILE = "raw.csv" PIPELINE_NAME = 'model' TARGET = 'RUL' # input variables FEATURES = ['engine_id', 'time_cycle','op_set_1', 'op_set_2', 'op_set_3', 'sensor_1', 'sensor_2', 'sensor_3', 'sensor_4', 'sensor_5', 'sensor_6', 'sensor_7', 'sensor_8', 'sensor_9', 'sensor_10', 'sensor_11', 'sensor_12', 'sensor_13', 'sensor_14', 'sensor_15', 'sensor_16', 'sensor_17', 'sensor_18', 'sensor_19', 'sensor_20', 'sensor_21'] # must be dropped afterwards labels = ['engine_id', 'time_cycle', 'max_cycle'] # id/label cols cst_features = ['op_set_3', 'sensor_18', 'sensor_19'] # constant features quasi_cst_features = ['sensor_1', 'sensor_5', 'sensor_6', 'sensor_10', 'sensor_16'] # quasi-constant features high_corr_to_drop = ['sensor_9', 'sensor_14'] # fan speed & corrected fan speed discreet_features = ['op_set_2', 'sensor_17'] DROP_FEATURES = labels + cst_features + quasi_cst_features + high_corr_to_drop['sensor_14'] TEMPORAL_VARS = 'YearRemodAdd' # variables to log transform NUMERICALS_LOG_VARS = ['LotFrontage', '1stFlrSF', 'GrLivArea']
training_data_file = 'raw.csv' pipeline_name = 'model' target = 'RUL' features = ['engine_id', 'time_cycle', 'op_set_1', 'op_set_2', 'op_set_3', 'sensor_1', 'sensor_2', 'sensor_3', 'sensor_4', 'sensor_5', 'sensor_6', 'sensor_7', 'sensor_8', 'sensor_9', 'sensor_10', 'sensor_11', 'sensor_12', 'sensor_13', 'sensor_14', 'sensor_15', 'sensor_16', 'sensor_17', 'sensor_18', 'sensor_19', 'sensor_20', 'sensor_21'] labels = ['engine_id', 'time_cycle', 'max_cycle'] cst_features = ['op_set_3', 'sensor_18', 'sensor_19'] quasi_cst_features = ['sensor_1', 'sensor_5', 'sensor_6', 'sensor_10', 'sensor_16'] high_corr_to_drop = ['sensor_9', 'sensor_14'] discreet_features = ['op_set_2', 'sensor_17'] drop_features = labels + cst_features + quasi_cst_features + high_corr_to_drop['sensor_14'] temporal_vars = 'YearRemodAdd' numericals_log_vars = ['LotFrontage', '1stFlrSF', 'GrLivArea']
# -*- coding: utf-8 -*- # # panel_label.py # # Copyright 2017 Sebastian Spreizer # The MIT License def panel_label(ax, label, x=-0.4, y=1.0): ax.text(x, y, label, transform=ax.transAxes, fontsize=10, fontweight='bold', va='bottom', ha='left')
def panel_label(ax, label, x=-0.4, y=1.0): ax.text(x, y, label, transform=ax.transAxes, fontsize=10, fontweight='bold', va='bottom', ha='left')
# float number 0.5 is represented as 0.4999999999998 in python def get_zt_price(price): zt_price = int(price * 1.1 * 100 + 0.50001) /100.0 return zt_price def get_dt_price(price): zt_price = int(price * 0.9 * 100 + 0.50001) /100.0 return zt_price
def get_zt_price(price): zt_price = int(price * 1.1 * 100 + 0.50001) / 100.0 return zt_price def get_dt_price(price): zt_price = int(price * 0.9 * 100 + 0.50001) / 100.0 return zt_price
# generated from genmsg/cmake/pkg-genmsg.context.in messages_str = "/home/rudy-001/Relocalisation/src/select_pcd/msg/updated_coord.msg" services_str = "" pkg_name = "select_pcd" dependencies_str = "sensor_msgs;std_msgs" langs = "gencpp;geneus;genlisp;gennodejs;genpy" dep_include_paths_str = "select_pcd;/home/rudy-001/Relocalisation/src/select_pcd/msg;sensor_msgs;/opt/ros/melodic/share/sensor_msgs/cmake/../msg;std_msgs;/opt/ros/melodic/share/std_msgs/cmake/../msg;geometry_msgs;/opt/ros/melodic/share/geometry_msgs/cmake/../msg" PYTHON_EXECUTABLE = "/usr/bin/python2" package_has_static_sources = '' == 'TRUE' genmsg_check_deps_script = "/opt/ros/melodic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
messages_str = '/home/rudy-001/Relocalisation/src/select_pcd/msg/updated_coord.msg' services_str = '' pkg_name = 'select_pcd' dependencies_str = 'sensor_msgs;std_msgs' langs = 'gencpp;geneus;genlisp;gennodejs;genpy' dep_include_paths_str = 'select_pcd;/home/rudy-001/Relocalisation/src/select_pcd/msg;sensor_msgs;/opt/ros/melodic/share/sensor_msgs/cmake/../msg;std_msgs;/opt/ros/melodic/share/std_msgs/cmake/../msg;geometry_msgs;/opt/ros/melodic/share/geometry_msgs/cmake/../msg' python_executable = '/usr/bin/python2' package_has_static_sources = '' == 'TRUE' genmsg_check_deps_script = '/opt/ros/melodic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py'
# coding: utf-8 class RpiCarDExp(Exception): """ An exception used for this project. """ def __init__(self, retcode, retmsg): """ Init. :retcode: retcode :retmsg: retmsg """ Exception.__init__(self) self._retcode = retcode self._retmsg = retmsg def retcode(self): """ return retcode. :returns: retcode """ return self._retcode def retmsg(self): """ return retmsg. :returns: retmsg """ return self._retmsg def __str__(self): return self._retmsg
class Rpicardexp(Exception): """ An exception used for this project. """ def __init__(self, retcode, retmsg): """ Init. :retcode: retcode :retmsg: retmsg """ Exception.__init__(self) self._retcode = retcode self._retmsg = retmsg def retcode(self): """ return retcode. :returns: retcode """ return self._retcode def retmsg(self): """ return retmsg. :returns: retmsg """ return self._retmsg def __str__(self): return self._retmsg
# compare three values, return true only if 2 or more values are equal # integer value of boolean True is 1 # integer value of boolean False is 0 def compare(a = None, b = None, c = None): try: value = False a = int(a) b = int(b) c = int(c) print("Valid values.") if a == b or a == c or b == c: value = True except ValueError: print("ValueError: Only, numbers are allowed.") except TypeError: print("TypeError: Please enter only numbers.") except Exception: print(Exception) print("Exception, Only numbers are allowed!") finally: print("Values ", a, b, c) return value # all integers print("Result: ",compare(1, 2, 3)) print("\n") # numbers in str print("Result: ",compare("2", "3", "4")) print("\n") # alphabets print("Result: ",compare("a", "b", "c")) print("\n") # one boolean print("Result: ",compare(True)) print("\n") # three boolean values print("Result: ",compare(True, True, False)) print("\n") # integer with list print("Result: ",compare(1, 2, [True])) print("\n") # str with boolean print("Result: ",compare("2", "2", True)) print("\n") # equal numbers in str print("Result: ",compare("2", "2", "2")) print("\n") # empty str print("Result: ",compare("-1", "")) print("\n") # three empty str print("Result: ",compare("", "", "")) print("\n") #floats print("Result: ",compare("1.1", "1","1.1")) """ EZEBUIRO UCHECHUKWU VINCENT """
def compare(a=None, b=None, c=None): try: value = False a = int(a) b = int(b) c = int(c) print('Valid values.') if a == b or a == c or b == c: value = True except ValueError: print('ValueError: Only, numbers are allowed.') except TypeError: print('TypeError: Please enter only numbers.') except Exception: print(Exception) print('Exception, Only numbers are allowed!') finally: print('Values ', a, b, c) return value print('Result: ', compare(1, 2, 3)) print('\n') print('Result: ', compare('2', '3', '4')) print('\n') print('Result: ', compare('a', 'b', 'c')) print('\n') print('Result: ', compare(True)) print('\n') print('Result: ', compare(True, True, False)) print('\n') print('Result: ', compare(1, 2, [True])) print('\n') print('Result: ', compare('2', '2', True)) print('\n') print('Result: ', compare('2', '2', '2')) print('\n') print('Result: ', compare('-1', '')) print('\n') print('Result: ', compare('', '', '')) print('\n') print('Result: ', compare('1.1', '1', '1.1')) '\nEZEBUIRO\nUCHECHUKWU\nVINCENT\n'
def run(): # Continue # for i in range(1000): # if i % 2 != 0: # continue # print(i) # for i in range(10000): # print(i) # if i == 300: # break text = input('Write a text: ') for char in text: if char == 'o': break print(char) if __name__ == '__main__': run()
def run(): text = input('Write a text: ') for char in text: if char == 'o': break print(char) if __name__ == '__main__': run()
input=__import__('sys').stdin.readline r="";c=1 while True: n=int(input()) if n==0: break r+=str(c)+'\n';c+=1 a=[input().strip() for _ in range(n)] a.sort() r+='\n'.join(a) r+='\n' print(r.strip())
input = __import__('sys').stdin.readline r = '' c = 1 while True: n = int(input()) if n == 0: break r += str(c) + '\n' c += 1 a = [input().strip() for _ in range(n)] a.sort() r += '\n'.join(a) r += '\n' print(r.strip())
"""Depth-first traversing in a graph. Call a function _f on every vertex accessible for vertex _v, in depth-first prefix order Source: programming-idioms.org """ # Implementation author: bukzor # Created on 2018-04-08T19:17:47.214988Z # Last modified on 2018-04-08T19:18:25.491984Z # Version 2 # It's best to not recurse in Python when the structure size is unknown, since we have a fixed, small stack size. def depth_first(start, f): seen = set() stack = [start] while stack: vertex = stack.pop() # paroxython: -member_call_method:pop +member_call_method:list:pop f(vertex) seen.add(vertex) stack.extend(v for v in vertex.adjacent if v not in seen)
"""Depth-first traversing in a graph. Call a function _f on every vertex accessible for vertex _v, in depth-first prefix order Source: programming-idioms.org """ def depth_first(start, f): seen = set() stack = [start] while stack: vertex = stack.pop() f(vertex) seen.add(vertex) stack.extend((v for v in vertex.adjacent if v not in seen))
""" Contains the custom exceptions used by the restclients. """ class PhoneNumberRequired(Exception): """Exception for missing phone number.""" pass class InvalidPhoneNumber(Exception): """Exception for invalid phone numbers.""" pass class InvalidNetID(Exception): """Exception for invalid netid.""" pass class InvalidRegID(Exception): """Exception for invalid regid.""" pass class InvalidEmployeeID(Exception): """Exception for invalid employee id.""" pass class InvalidUUID(Exception): """Exception for invalid UUID.""" pass class InvalidSectionID(Exception): """Exception for invalid section id.""" pass class InvalidSectionURL(Exception): """Exception for invalid section url.""" pass class InvalidGroupID(Exception): """Exception for invalid group id.""" pass class InvalidIdCardPhotoSize(Exception): """Exception for invalid photo size.""" pass class InvalidEndpointProtocol(Exception): """Exception for invalid endpoint protocol.""" pass class InvalidCanvasIndependentStudyCourse(Exception): """Exception for invalid Canvas course.""" pass class InvalidCanvasSection(Exception): """Exception for invalid Canvas section.""" pass class InvalidGradebookID: """Exception for invalid gradebook id.""" pass class InvalidIRWSName(Exception): """Exception for invalid IRWS name.""" pass class DataFailureException(Exception): """ This exception means there was an error fetching content in one of the rest clients. You can get the url that failed with .url, the status of the error with .status, and any message with .msg """ def __init__(self, url, status, msg): self.url = url self.status = status self.msg = msg def __str__(self): return ("Error fetching %s. Status code: %s. Message: %s." % (self.url, self.status, self.msg))
""" Contains the custom exceptions used by the restclients. """ class Phonenumberrequired(Exception): """Exception for missing phone number.""" pass class Invalidphonenumber(Exception): """Exception for invalid phone numbers.""" pass class Invalidnetid(Exception): """Exception for invalid netid.""" pass class Invalidregid(Exception): """Exception for invalid regid.""" pass class Invalidemployeeid(Exception): """Exception for invalid employee id.""" pass class Invaliduuid(Exception): """Exception for invalid UUID.""" pass class Invalidsectionid(Exception): """Exception for invalid section id.""" pass class Invalidsectionurl(Exception): """Exception for invalid section url.""" pass class Invalidgroupid(Exception): """Exception for invalid group id.""" pass class Invalididcardphotosize(Exception): """Exception for invalid photo size.""" pass class Invalidendpointprotocol(Exception): """Exception for invalid endpoint protocol.""" pass class Invalidcanvasindependentstudycourse(Exception): """Exception for invalid Canvas course.""" pass class Invalidcanvassection(Exception): """Exception for invalid Canvas section.""" pass class Invalidgradebookid: """Exception for invalid gradebook id.""" pass class Invalidirwsname(Exception): """Exception for invalid IRWS name.""" pass class Datafailureexception(Exception): """ This exception means there was an error fetching content in one of the rest clients. You can get the url that failed with .url, the status of the error with .status, and any message with .msg """ def __init__(self, url, status, msg): self.url = url self.status = status self.msg = msg def __str__(self): return 'Error fetching %s. Status code: %s. Message: %s.' % (self.url, self.status, self.msg)
class Matematica(object): def somar(valor1, valor2): ''' Este metodo tem a funcionalidade de somar dois valores. Parametros: 1. Valor1 (Number) - Primeira parcela 2. Valor2 (Number) - Segunda parcela Ex: Matematica.somar(1, 2) ''' return valor1 + valor2 def subtrair(valor1, valor2): ''' Este metodo tem a funcionalidade de subtrair dois valores Parametros: 1. Valor1 (Number) - Minuendo 2. Valor2 (Number) - Subtraendo Ex: Matematica.subtrair(1, 2) ''' return valor1 - valor2
class Matematica(object): def somar(valor1, valor2): """ Este metodo tem a funcionalidade de somar dois valores. Parametros: 1. Valor1 (Number) - Primeira parcela 2. Valor2 (Number) - Segunda parcela Ex: Matematica.somar(1, 2) """ return valor1 + valor2 def subtrair(valor1, valor2): """ Este metodo tem a funcionalidade de subtrair dois valores Parametros: 1. Valor1 (Number) - Minuendo 2. Valor2 (Number) - Subtraendo Ex: Matematica.subtrair(1, 2) """ return valor1 - valor2
class Solution: def minimumEffortPath(self, heights: List[List[int]]) -> int: # The maximum cost seen H, W = len(heights), len(heights[0]) costs = [[math.inf] * W for _ in range(H)] costs[0][0] = 0 pq = [(0, 0, 0)] while pq: cost, r, c = heapq.heappop(pq) if r == H - 1 and c == W - 1: return cost for dr, dc in (0, 1), (0, -1), (1, 0), (-1, 0): newr, newc = r + dr, c + dc if 0 <= newr < H and 0 <= newc < W: new_cost = max(cost, abs(heights[newr][newc] - heights[r][c])) if new_cost < costs[newr][newc]: costs[newr][newc] = new_cost heapq.heappush(pq, (new_cost, newr, newc)) return -1
class Solution: def minimum_effort_path(self, heights: List[List[int]]) -> int: (h, w) = (len(heights), len(heights[0])) costs = [[math.inf] * W for _ in range(H)] costs[0][0] = 0 pq = [(0, 0, 0)] while pq: (cost, r, c) = heapq.heappop(pq) if r == H - 1 and c == W - 1: return cost for (dr, dc) in ((0, 1), (0, -1), (1, 0), (-1, 0)): (newr, newc) = (r + dr, c + dc) if 0 <= newr < H and 0 <= newc < W: new_cost = max(cost, abs(heights[newr][newc] - heights[r][c])) if new_cost < costs[newr][newc]: costs[newr][newc] = new_cost heapq.heappush(pq, (new_cost, newr, newc)) return -1
# Description: Label the main chain atoms with the following: resn,resi,atom name. # Source: placeHolder """ cmd.do('label name n+c+o+ca,"%s%s%s" % (resn,resi,name);') """ cmd.do('label name n+c+o+ca,"%s%s%s" % (resn,resi,name);')
""" cmd.do('label name n+c+o+ca,"%s%s%s" % (resn,resi,name);') """ cmd.do('label name n+c+o+ca,"%s%s%s" % (resn,resi,name);')
## Find Maximum and Minimum Values of a List ## 8 kyu ## https://www.codewars.com//kata/577a98a6ae28071780000989 def minimum(arr): return min(arr) def maximum(arr): return max(arr)
def minimum(arr): return min(arr) def maximum(arr): return max(arr)
# Find this puzzle at: # https://adventofcode.com/2020/day/2 with open('input.txt', 'r') as file: puzzle_input = file.read().splitlines() valid = 0 for line in puzzle_input: # Extract the limits, letter, and password limits, letter, password = line.split() limit_low, limit_hi = limits.split('-') # Check if occurance of letter is within limits if (password.count(letter[0]) >= int(limit_low) and password.count(letter [0]) <= int(limit_hi)): valid += 1 print(valid)
with open('input.txt', 'r') as file: puzzle_input = file.read().splitlines() valid = 0 for line in puzzle_input: (limits, letter, password) = line.split() (limit_low, limit_hi) = limits.split('-') if password.count(letter[0]) >= int(limit_low) and password.count(letter[0]) <= int(limit_hi): valid += 1 print(valid)
# Diameter of Binary Tree: https://leetcode.com/problems/diameter-of-binary-tree/ # Given the root of a binary tree, return the length of the diameter of the tree. # The diameter of a binary tree is the length of the longest path between any two nodes in a tree. This path may or may not pass through the root. # The length of a path between two nodes is represented by the number of edges between them. # Definition for a binary tree node. class TreeNode: def __init__(self, val=0, left=None, right=None): self.val = val self.left = left self.right = right # Okay so this problem is looking for the longest possible paths through a tree. This seems like a simple dfs problem where we check # the left side the right side and either take left + Mid + right or mid + max(left, right) but mid is ignored as we have a path of x - 1 class Solution: def diameterOfBinaryTree(self, root: TreeNode) -> int: self.result = 0 def dfs(node): # When the node is empty we can return there is no path so our result is 0 if node is None: return 0 # Traverse both sides left = dfs(node.left) right = dfs(node.right) # Compare the whole path at each node self.result = max(self.result, left + right) # move up 1 node plus whatever path was highest to this point return 1 + max(left, right) dfs(root) return self.result # Score Card # Did I need hints? N # Did you finish within 30 min? 10 # Was the solution optimal? Yup this is optimal o(N) and o(N) as we have to traverse every node with a stack # Were there any bugs? 0 bugs for me today # 5 5 5 5 = 5
class Treenode: def __init__(self, val=0, left=None, right=None): self.val = val self.left = left self.right = right class Solution: def diameter_of_binary_tree(self, root: TreeNode) -> int: self.result = 0 def dfs(node): if node is None: return 0 left = dfs(node.left) right = dfs(node.right) self.result = max(self.result, left + right) return 1 + max(left, right) dfs(root) return self.result
load("//3rdparty:dependencies.bzl", "maven_dependencies") load("//3rdparty:load.bzl", "declare_maven") def a_app(): print ("Loading [a.app]") maven_dependencies(declare_maven)
load('//3rdparty:dependencies.bzl', 'maven_dependencies') load('//3rdparty:load.bzl', 'declare_maven') def a_app(): print('Loading [a.app]') maven_dependencies(declare_maven)
''' Mcast Genie Ops Object Outputs for IOSXR. ''' class McastOutput(object): ShowVrfAllDetail = { "default": { "description": "not set", "vrf_mode": "regular", "address_family": { "ipv6 unicast": { "route_target": { "400:1": { "rt_type": "import", "route_target": "400:1"}, "300:1": { "rt_type": "import", "route_target": "300:1"}, "200:1": { "rt_type": "both", "route_target": "200:1"}, "200:2": { "rt_type": "import", "route_target": "200:2"}}}, "ipv4 unicast": { "route_target": { "400:1": { "rt_type": "import", "route_target": "400:1"}, "300:1": { "rt_type": "import", "route_target": "300:1"}, "200:1": { "rt_type": "both", "route_target": "200:1"}, "200:2": { "rt_type": "import", "route_target": "200:2"}}}}, "route_distinguisher": "200:1", "interfaces": ["GigabitEthernet0/0/0/1"]}, "VRF1": { "description": "not set", "vrf_mode": "regular", "address_family": { "ipv6 unicast": { "route_target": { "400:1": { "rt_type": "import", "route_target": "400:1"}, "300:1": { "rt_type": "import", "route_target": "300:1"}, "200:1": { "rt_type": "both", "route_target": "200:1"}, "200:2": { "rt_type": "import", "route_target": "200:2"}}}, "ipv4 unicast": { "route_target": { "400:1": { "rt_type": "import", "route_target": "400:1"}, "300:1": { "rt_type": "import", "route_target": "300:1"}, "200:1": { "rt_type": "both", "route_target": "200:1"}, "200:2": { "rt_type": "import", "route_target": "200:2"}}}}, "route_distinguisher": "200:1", "interfaces": ["GigabitEthernet0/0/0/1"]}} ############################################ # INFO - VRF: default ############################################ PimVrfDefaultIpv4Mstatic = '''\ RP/0/0/CPU0:R2# show pim vrf default ipv4 mstatic Mon May 29 14:37:05.732 UTC IP Multicast Static Routes Information * 10.10.10.10/32 via GigabitEthernet0/0/0/0 with nexthop 192.168.1.0 and distance 10 * 10.10.10.11/32 via GigabitEthernet0/0/0/1 with nexthop 192.168.1.1 and distance 11 * 10.10.10.12/32 via GigabitEthernet0/0/0/2 with nexthop 192.168.1.2 and distance 12 * 10.10.10.13/32 via GigabitEthernet0/0/0/3 with nexthop 192.168.1.3 and distance 13 * 10.10.10.14/32 via GigabitEthernet0/0/0/4 with nexthop 192.168.1.4 and distance 14 * 10.10.10.15/32 via GigabitEthernet0/0/0/5 with nexthop 192.168.1.5 and distance 15 * 10.10.10.16/32 via GigabitEthernet0/0/0/6 with nexthop 192.168.1.6 and distance 16 * 10.10.10.17/32 via GigabitEthernet0/0/0/7 with nexthop 192.168.1.7 and distance 17 ''' PimVrfDefaultIpv6Mstatic = '''\ RP/0/0/CPU0:R2# show pim vrf default ipv6 mstatic Mon May 29 14:37:26.421 UTC IP Multicast Static Routes Information * 2001:10:10::10/128 via GigabitEthernet0/0/0/0 with nexthop 2001:11:11::10 and distance 10 * 2001:10:10::11/128 via GigabitEthernet0/0/0/1 with nexthop 2001:11:11::11 and distance 11 * 2001:10:10::12/128 via GigabitEthernet0/0/0/2 with nexthop 2001:11:11::12 and distance 12 * 2001:10:10::13/128 via GigabitEthernet0/0/0/3 with nexthop 2001:11:11::13 and distance 13 * 2001:10:10::14/128 via GigabitEthernet0/0/0/4 with nexthop 2001:11:11::14 and distance 14 * 2001:10:10::15/128 via GigabitEthernet0/0/0/5 with nexthop 2001:11:11::15 and distance 15 ''' PimVrfDefaultIpv4InterfaceDetail = '''\ RP/0/0/CPU0:R2#show pim vrf default ipv4 interface detail Mon May 29 14:41:28.444 UTC PIM interfaces in VRF default IP PIM Multicast Interface State Flag: B - Bidir enabled, NB - Bidir disabled P - PIM Proxy enabled, NP - PIM Proxy disabled V - Virtual Interface BFD State - State/Interval/Multiplier Interface PIM Nbr Hello DR Count Intvl Prior Loopback0 on 1 30 1 Primary Address : 2.2.2.2 Flags : B P V BFD : Off/150 ms/3 DR : this system Propagation delay : 500 Override Interval : 2500 Hello Timer : 00:00:15 Neighbor Filter : - GigabitEthernet0/0/0/0 on 1 30 1 Primary Address : 10.2.3.2 Flags : B P BFD : Off/150 ms/3 DR : this system Propagation delay : 500 Override Interval : 2500 Hello Timer : 00:00:01 Neighbor Filter : - GigabitEthernet0/0/0/1 on 2 30 1 Primary Address : 10.1.2.2 Flags : NB P BFD : Off/150 ms/3 DR : this system Propagation delay : 500 Override Interval : 2500 Hello Timer : 00:00:07 Neighbor Filter : - ''' PimVrfDefaultIpv6InterfaceDetail = '''\ RP/0/0/CPU0:R2#show pim vrf default ipv6 interface detail Mon May 29 14:41:52.972 UTC PIM interfaces in VRF default IP PIM Multicast Interface State Flag: B - Bidir enabled, NB - Bidir disabled P - PIM Proxy enabled, NP - PIM Proxy disabled A - PIM Assert batching capable, NA - PIM Assert batching incapable V - Virtual Interface Interface PIM Nbr Hello DR Count Intvl Prior Loopback0 on 1 30 1 Primary Address : fe80::85c6:bdff:fe62:61e Address : 2001:db8:2:2::2 Flags : B P NA V BFD : Off/150 ms/3 DR : this system Propagation delay : 500 Override Interval : 2500 Hello Timer : 00:00:19 Neighbor Filter : - GigabitEthernet0/0/0/0 on 1 30 1 Primary Address : fe80::5054:ff:fee4:f669 Address : 2001:db8:2:3::2 Flags : B P NA BFD : Off/150 ms/3 DR : this system Propagation delay : 500 Override Interval : 2500 Hello Timer : 00:00:22 Neighbor Filter : - GigabitEthernet0/0/0/1 on 1 30 1 Primary Address : fe80::5054:ff:feac:64b3 Address : 2001:db8:1:2::2 Flags : B P NA BFD : Off/150 ms/3 DR : this system Propagation delay : 500 Override Interval : 2500 Hello Timer : 00:00:02 Neighbor Filter : - ''' PimVrfDefaultIpv4RpfSummary = '''\ RP/0/0/CPU0:R2#show pim vrf default ipv4 rpf summary Mon May 29 14:42:47.569 UTC ISIS Mcast Topology Not configured MoFRR Flow-based Not configured MoFRR RIB Not configured RUMP MuRIB Not enabled PIM RPFs registered with Unicast RIB table Default RPF Table: IPv4-Unicast-default RIB Convergence Timeout Value: 00:30:00 RIB Convergence Time Left: 00:00:00 Multipath RPF Selection is Enabled Table: IPv4-Unicast-default PIM RPF Registrations = 1 RIB Table converged ''' PimVrfDefaultIpv6RpfSummary = '''\ RP/0/0/CPU0:R2#show pim vrf default ipv6 rpf summary Mon May 29 14:42:53.538 UTC ISIS Mcast Topology Not configured MoFRR Flow-based Not configured MoFRR RIB Not configured RUMP MuRIB Not enabled PIM RPFs registered with Unicast RIB table Default RPF Table: IPv6-Unicast-default RIB Convergence Timeout Value: 00:30:00 RIB Convergence Time Left: 00:00:00 Multipath RPF Selection is Enabled Table: IPv6-Unicast-default PIM RPF Registrations = 0 RIB Table converged ''' ############################################ # INFO - VRF: VRF1 ############################################ PimVrfVRF1Ipv4Mstatic = '''\ RP/0/0/CPU0:R2# show pim vrf VRF1 ipv4 mstatic Mon May 29 14:37:05.732 UTC IP Multicast Static Routes Information * 20.10.10.10/32 via GigabitEthernet1/0/0/0 with nexthop 192.168.1.0 and distance 10 * 20.10.10.11/32 via GigabitEthernet1/0/0/1 with nexthop 192.168.1.1 and distance 11 * 20.10.10.12/32 via GigabitEthernet1/0/0/2 with nexthop 192.168.1.2 and distance 12 * 20.10.10.13/32 via GigabitEthernet1/0/0/3 with nexthop 192.168.1.3 and distance 13 * 20.10.10.14/32 via GigabitEthernet1/0/0/4 with nexthop 192.168.1.4 and distance 14 * 20.10.10.15/32 via GigabitEthernet1/0/0/5 with nexthop 192.168.1.5 and distance 15 * 20.10.10.16/32 via GigabitEthernet1/0/0/6 with nexthop 192.168.1.6 and distance 16 * 20.10.10.17/32 via GigabitEthernet1/0/0/7 with nexthop 192.168.1.7 and distance 17 ''' PimVrfVRF1Ipv6Mstatic = '''\ RP/0/0/CPU0:R2# show pim vrf VRF1 ipv6 mstatic Mon May 29 14:37:26.421 UTC IP Multicast Static Routes Information * 3001:10:10::10/128 via GigabitEthernet1/0/0/0 with nexthop 2001:11:11::10 and distance 10 * 3001:10:10::11/128 via GigabitEthernet1/0/0/1 with nexthop 2001:11:11::11 and distance 11 * 3001:10:10::12/128 via GigabitEthernet1/0/0/2 with nexthop 2001:11:11::12 and distance 12 * 3001:10:10::13/128 via GigabitEthernet1/0/0/3 with nexthop 2001:11:11::13 and distance 13 * 3001:10:10::14/128 via GigabitEthernet1/0/0/4 with nexthop 2001:11:11::14 and distance 14 * 3001:10:10::15/128 via GigabitEthernet1/0/0/5 with nexthop 2001:11:11::15 and distance 15 ''' PimVrfVRF1Ipv4InterfaceDetail = '''\ RP/0/0/CPU0:R2#show pim vrf VRF1 ipv4 interface detail Mon May 29 14:41:28.444 UTC PIM interfaces in VRF VRF1 IP PIM Multicast Interface State Flag: B - Bidir enabled, NB - Bidir disabled P - PIM Proxy enabled, NP - PIM Proxy disabled V - Virtual Interface BFD State - State/Interval/Multiplier Interface PIM Nbr Hello DR Count Intvl Prior Loopback0 on 1 30 1 Primary Address : 2.2.2.2 Flags : B P V BFD : Off/150 ms/3 DR : this system Propagation delay : 500 Override Interval : 2500 Hello Timer : 00:00:15 Neighbor Filter : - GigabitEthernet0/0/0/0 on 1 30 1 Primary Address : 10.2.3.2 Flags : B P BFD : Off/150 ms/3 DR : this system Propagation delay : 500 Override Interval : 2500 Hello Timer : 00:00:01 Neighbor Filter : - GigabitEthernet0/0/0/1 on 2 30 1 Primary Address : 10.1.2.2 Flags : NB P BFD : Off/150 ms/3 DR : this system Propagation delay : 500 Override Interval : 2500 Hello Timer : 00:00:07 Neighbor Filter : - ''' PimVrfVRF1Ipv6InterfaceDetail = '''\ RP/0/0/CPU0:R2#show pim vrf VRF1 ipv6 interface detail Mon May 29 14:41:52.972 UTC PIM interfaces in VRF VRF1 IP PIM Multicast Interface State Flag: B - Bidir enabled, NB - Bidir disabled P - PIM Proxy enabled, NP - PIM Proxy disabled A - PIM Assert batching capable, NA - PIM Assert batching incapable V - Virtual Interface Interface PIM Nbr Hello DR Count Intvl Prior Loopback0 on 1 30 1 Primary Address : fe80::85c6:bdff:fe62:61e Address : 2001:db8:2:2::2 Flags : B P NA V BFD : Off/150 ms/3 DR : this system Propagation delay : 500 Override Interval : 2500 Hello Timer : 00:00:19 Neighbor Filter : - GigabitEthernet0/0/0/0 on 1 30 1 Primary Address : fe80::5054:ff:fee4:f669 Address : 2001:db8:2:3::2 Flags : B P NA BFD : Off/150 ms/3 DR : this system Propagation delay : 500 Override Interval : 2500 Hello Timer : 00:00:22 Neighbor Filter : - GigabitEthernet0/0/0/1 on 1 30 1 Primary Address : fe80::5054:ff:feac:64b3 Address : 2001:db8:1:2::2 Flags : B P NA BFD : Off/150 ms/3 DR : this system Propagation delay : 500 Override Interval : 2500 Hello Timer : 00:00:02 Neighbor Filter : - ''' PimVrfVRF1Ipv4RpfSummary = '''\ RP/0/0/CPU0:R2#show pim VRF1 default ipv4 rpf summary Mon May 29 14:42:47.569 UTC ISIS Mcast Topology Not configured MoFRR Flow-based Not configured MoFRR RIB Not configured RUMP MuRIB Not enabled PIM RPFs registered with Unicast RIB table Default RPF Table: IPv4-Unicast-default RIB Convergence Timeout Value: 00:30:00 RIB Convergence Time Left: 00:00:00 Multipath RPF Selection is Enabled Table: IPv4-Unicast-default PIM RPF Registrations = 1 RIB Table converged ''' PimVrfVRF1Ipv6RpfSummary = '''\ RP/0/0/CPU0:R2#show pim vrf VRF1 ipv6 rpf summary Mon May 29 14:42:53.538 UTC ISIS Mcast Topology Not configured MoFRR Flow-based Not configured MoFRR RIB Not configured RUMP MuRIB Not enabled PIM RPFs registered with Unicast RIB table Default RPF Table: IPv6-Unicast-default RIB Convergence Timeout Value: 00:30:00 RIB Convergence Time Left: 00:00:00 Multipath RPF Selection is Enabled Table: IPv6-Unicast-default PIM RPF Registrations = 0 RIB Table converged ''' ############################################ # TABLE - VRF: default ############################################ MribVrfDefaultIpv4Route = '''\ RP/0/1/CPU0:rtr1#show mrib vrf default ipv4 route Mon Nov 2 15:26:01.015 PST IP Multicast Routing Information Base Entry flags: L - Domain-Local Source, E - External Source to the Domain, C - Directly-Connected Check, S - Signal, IA - Inherit Accept, IF - Inherit From, D - Drop, ME - MDT Encap, EID - Encap ID, MD - MDT Decap, MT - MDT Threshold Crossed, MH - MDT interface handle CD - Conditional Decap, MPLS - MPLS Decap, EX - Extranet MoFE - MoFRR Enabled, MoFS - MoFRR State, MoFP - MoFRR Primary MoFB - MoFRR Backup, RPFID - RPF ID Set, X - VXLAN Interface flags: F - Forward, A - Accept, IC - Internal Copy, NS - Negate Signal, DP - Don't Preserve, SP - Signal Present, II - Internal Interest, ID - Internal Disinterest, LI - Local Interest, LD - Local Disinterest, DI - Decapsulation Interface EI - Encapsulation Interface, MI - MDT Interface, LVIF - MPLS Encap, EX - Extranet, A2 - Secondary Accept, MT - MDT Threshold Crossed, MA - Data MDT Assigned, LMI - mLDP MDT Interface, TMI - P2MP-TE MDT Interface IRMI - IR MDT Interface (*,224.0.0.0/4) RPF nbr: 0.0.0.0 Flags: C RPF P Up: 00:00:58 (*,224.0.0.0/24) Flags: D P Up: 00:00:58 (*,224.0.1.39) Flags: S P Up: 00:00:58 (*,227.1.1.1) RPF nbr: 0.0.0.0 Flags: C RPF MD MH CD MVPN TID: 0xe000001f MVPN Remote TID: 0x0 MVPN Payload: IPv4 MDT IFH: 0x803380 Up: 00:00:54 Outgoing Interface List Loopback0 Flags: F NS, Up: 00:00:54 (192.168.0.12,227.1.1.1) RPF nbr: 192.168.0.12 Flags: RPF ME MH MVPN TID: 0xe000001f MVPN Remote TID: 0x0 MVPN Payload: IPv4 MDT IFH: 0x803380 Up: 00:00:54 Incoming Interface List Loopback0 Flags: F NS, Up: 00:00:58 Outgoing Interface List Loopback0 Flags: F A, Up: 00:00:54 (*,232.0.0.0/8) Flags: D P Up: 00:00:58 (*,236.5.5.5) RPF nbr: 0.0.0.0 Flags: C RPF MD MH CD MVPN TID: 0xe0000018 MVPN Remote TID: 0xe0800018 MVPN Payload: IPv4 IPv6 MDT IFH: 0x803480 Up: 00:00:54 Outgoing Interface List Loopback0 Flags: F NS, Up: 00:00:54 (192.168.0.12,236.5.5.5) RPF nbr: 192.168.0.12 Flags: RPF ME MH MVPN TID: 0xe0000018 MVPN Remote TID: 0xe0800018 MVPN Payload: IPv4 IPv6 MDT IFH: 0x803480 Up: 00:00:54 Incoming Interface List Loopback0 Flags: F A, Up: 00:00:54 Outgoing Interface List Loopback0 Flags: F A, Up: 00:00:54 (192.168.0.22,236.5.5.5) RPF nbr: 11.0.1.22 Flags: C RPF MD MH CD MVPN TID: 0xe0000018 MVPN Remote TID: 0xe0800018 MVPN Payload: IPv4 IPv6 MDT IFH: 0x803480 Up: 00:00:13 Outgoing Interface List Loopback0 Flags: F NS, Up: 00:00:13 GigabitEthernet0/1/0/1 Flags: NS, Up: 00:00:01 ''' MribVrfDefaultIpv6Route = '''\ RP/0/1/CPU0:rtr1#show mrib vrf default ipv6 route Mon Nov 2 15:26:01.015 PST IP Multicast Routing Information Base Entry flags: L - Domain-Local Source, E - External Source to the Domain, C - Directly-Connected Check, S - Signal, IA - Inherit Accept, IF - Inherit From, D - Drop, ME - MDT Encap, EID - Encap ID, MD - MDT Decap, MT - MDT Threshold Crossed, MH - MDT interface handle CD - Conditional Decap, MPLS - MPLS Decap, EX - Extranet MoFE - MoFRR Enabled, MoFS - MoFRR State, MoFP - MoFRR Primary MoFB - MoFRR Backup, RPFID - RPF ID Set, X - VXLAN Interface flags: F - Forward, A - Accept, IC - Internal Copy, NS - Negate Signal, DP - Don't Preserve, SP - Signal Present, II - Internal Interest, ID - Internal Disinterest, LI - Local Interest, LD - Local Disinterest, DI - Decapsulation Interface EI - Encapsulation Interface, MI - MDT Interface, LVIF - MPLS Encap, EX - Extranet, A2 - Secondary Accept, MT - MDT Threshold Crossed, MA - Data MDT Assigned, LMI - mLDP MDT Interface, TMI - P2MP-TE MDT Interface IRMI - IR MDT Interface (*,ff00::/8) RPF nbr: 150::150:150:150:150 Flags: L C RPF P Up: 00:04:45 Outgoing Interface List Decaps6tunnel0 Flags: NS DI, Up: 00:04:40 (*,ff00::/15) Flags: D P Up: 00:04:45 (*,ff02::/16) Flags: D P Up: 00:04:45 (*,ff10::/15) Flags: D P Up: 00:04:45 (*,ff12::/16) Flags: D P Up: 00:04:45 (1::1:1:1:2,ff15::1:1) RPF nbr: 1::1:1:1:2 Flags: L RPF MT MT Slot: 0/2/CPU0 Up: 00:02:53 Incoming Interface List GigabitEthernet150/0/0/6 Flags: A, Up: 00:02:53 Outgoing Interface List mdtvpn1 Flags: F NS MI MT MA, Up: 00:02:53 (4::4:4:4:5,ff15::2:1) RPF nbr: ::ffff:200.200.200.200 Flags: L RPF Up: 00:03:59 Incoming Interface List mdtvpn1 Flags: A MI, Up: 00:03:35 Outgoing Interface List GigabitEthernet150/0/0/6 Flags: F NS, Up: 00:03:59 (*,ff20::/15) Flags: D P Up: 00:04:45 (*,ff22::/16) Flags: D P Up: 00:04:45 ''' ############################################ # TABLE - VRF: VRF1 ############################################ MribVrfVRF1Ipv4Route = '''\ RP/0/1/CPU0:rtr1#show mrib vrf VRF1 ipv4 route Mon Nov 2 15:26:01.015 PST IP Multicast Routing Information Base Entry flags: L - Domain-Local Source, E - External Source to the Domain, C - Directly-Connected Check, S - Signal, IA - Inherit Accept, IF - Inherit From, D - Drop, ME - MDT Encap, EID - Encap ID, MD - MDT Decap, MT - MDT Threshold Crossed, MH - MDT interface handle CD - Conditional Decap, MPLS - MPLS Decap, EX - Extranet MoFE - MoFRR Enabled, MoFS - MoFRR State, MoFP - MoFRR Primary MoFB - MoFRR Backup, RPFID - RPF ID Set, X - VXLAN Interface flags: F - Forward, A - Accept, IC - Internal Copy, NS - Negate Signal, DP - Don't Preserve, SP - Signal Present, II - Internal Interest, ID - Internal Disinterest, LI - Local Interest, LD - Local Disinterest, DI - Decapsulation Interface EI - Encapsulation Interface, MI - MDT Interface, LVIF - MPLS Encap, EX - Extranet, A2 - Secondary Accept, MT - MDT Threshold Crossed, MA - Data MDT Assigned, LMI - mLDP MDT Interface, TMI - P2MP-TE MDT Interface IRMI - IR MDT Interface (*,234.0.0.0/4) RPF nbr: 0.0.0.1 Flags: MD RPF P Up: 00:01:28 (*,124.0.0.0/32) Flags: P D Up: 00:01:38 (*,124.0.1.40) Flags: S P Up: 00:00:46 (172.150.0.15,217.1.1.1) RPF nbr: 192.168.0.12 Flags: RPF ME MH MVPN TID: 0xe000001f MVPN Remote TID: 0x0 MVPN Payload: IPv4 MDT IFH: 0x803380 Up: 00:00:54 Incoming Interface List GigabitEthernet0/0/0/1 Flags: F NS, Up: 00:01:38 Outgoing Interface List GigabitEthernet0/0/0/2 Flags: F A, Up: 00:01:24 ''' MribVrfVRF1Ipv6Route = '''\ RP/0/1/CPU0:rtr1#show mrib vrf VRF1 ipv6 route Mon Nov 2 15:26:01.015 PST IP Multicast Routing Information Base Entry flags: L - Domain-Local Source, E - External Source to the Domain, C - Directly-Connected Check, S - Signal, IA - Inherit Accept, IF - Inherit From, D - Drop, ME - MDT Encap, EID - Encap ID, MD - MDT Decap, MT - MDT Threshold Crossed, MH - MDT interface handle CD - Conditional Decap, MPLS - MPLS Decap, EX - Extranet MoFE - MoFRR Enabled, MoFS - MoFRR State, MoFP - MoFRR Primary MoFB - MoFRR Backup, RPFID - RPF ID Set, X - VXLAN Interface flags: F - Forward, A - Accept, IC - Internal Copy, NS - Negate Signal, DP - Don't Preserve, SP - Signal Present, II - Internal Interest, ID - Internal Disinterest, LI - Local Interest, LD - Local Disinterest, DI - Decapsulation Interface EI - Encapsulation Interface, MI - MDT Interface, LVIF - MPLS Encap, EX - Extranet, A2 - Secondary Accept, MT - MDT Threshold Crossed, MA - Data MDT Assigned, LMI - mLDP MDT Interface, TMI - P2MP-TE MDT Interface IRMI - IR MDT Interface (*,ff70::/12) RPF nbr: :: Flags: C RPF P Up: 00:04:45 (*,ff70::/15) Flags: D P Up: 00:04:45 (*,ff72::/16) Flags: D P Up: 00:04:45 (*,ff80::/15) Flags: D P Up: 00:04:45 (*,ff82::/16) Flags: D P Up: 00:04:45 (*,ff90::/15) Flags: D P Up: 00:04:45 ''' McastInfo = { 'vrf': {'VRF1': {'address_family': {'ipv4': {'enable': True, 'mroute': {'20.10.10.10/32': {'path': {'192.168.1.0 GigabitEthernet1/0/0/0 10': {'admin_distance': 10, 'interface_name': 'GigabitEthernet1/0/0/0', 'neighbor_address': '192.168.1.0'}}}, '20.10.10.11/32': {'path': {'192.168.1.1 GigabitEthernet1/0/0/1 11': {'admin_distance': 11, 'interface_name': 'GigabitEthernet1/0/0/1', 'neighbor_address': '192.168.1.1'}}}, '20.10.10.12/32': {'path': {'192.168.1.2 GigabitEthernet1/0/0/2 12': {'admin_distance': 12, 'interface_name': 'GigabitEthernet1/0/0/2', 'neighbor_address': '192.168.1.2'}}}, '20.10.10.13/32': {'path': {'192.168.1.3 GigabitEthernet1/0/0/3 13': {'admin_distance': 13, 'interface_name': 'GigabitEthernet1/0/0/3', 'neighbor_address': '192.168.1.3'}}}, '20.10.10.14/32': {'path': {'192.168.1.4 GigabitEthernet1/0/0/4 14': {'admin_distance': 14, 'interface_name': 'GigabitEthernet1/0/0/4', 'neighbor_address': '192.168.1.4'}}}, '20.10.10.15/32': {'path': {'192.168.1.5 GigabitEthernet1/0/0/5 15': {'admin_distance': 15, 'interface_name': 'GigabitEthernet1/0/0/5', 'neighbor_address': '192.168.1.5'}}}, '20.10.10.16/32': {'path': {'192.168.1.6 GigabitEthernet1/0/0/6 16': {'admin_distance': 16, 'interface_name': 'GigabitEthernet1/0/0/6', 'neighbor_address': '192.168.1.6'}}}, '20.10.10.17/32': {'path': {'192.168.1.7 GigabitEthernet1/0/0/7 17': {'admin_distance': 17, 'interface_name': 'GigabitEthernet1/0/0/7', 'neighbor_address': '192.168.1.7'}}}}, 'multipath': True}, 'ipv6': {'enable': True, 'mroute': {'3001:10:10::10/128': {'path': {'2001:11:11::10 GigabitEthernet1/0/0/0 10': {'admin_distance': 10, 'interface_name': 'GigabitEthernet1/0/0/0', 'neighbor_address': '2001:11:11::10'}}}, '3001:10:10::11/128': {'path': {'2001:11:11::11 GigabitEthernet1/0/0/1 11': {'admin_distance': 11, 'interface_name': 'GigabitEthernet1/0/0/1', 'neighbor_address': '2001:11:11::11'}}}, '3001:10:10::12/128': {'path': {'2001:11:11::12 GigabitEthernet1/0/0/2 12': {'admin_distance': 12, 'interface_name': 'GigabitEthernet1/0/0/2', 'neighbor_address': '2001:11:11::12'}}}, '3001:10:10::13/128': {'path': {'2001:11:11::13 GigabitEthernet1/0/0/3 13': {'admin_distance': 13, 'interface_name': 'GigabitEthernet1/0/0/3', 'neighbor_address': '2001:11:11::13'}}}, '3001:10:10::14/128': {'path': {'2001:11:11::14 GigabitEthernet1/0/0/4 14': {'admin_distance': 14, 'interface_name': 'GigabitEthernet1/0/0/4', 'neighbor_address': '2001:11:11::14'}}}, '3001:10:10::15/128': {'path': {'2001:11:11::15 GigabitEthernet1/0/0/5 15': {'admin_distance': 15, 'interface_name': 'GigabitEthernet1/0/0/5', 'neighbor_address': '2001:11:11::15'}}}}, 'multipath': True}}}, 'default': {'address_family': {'ipv4': {'enable': True, 'mroute': {'10.10.10.10/32': {'path': {'192.168.1.0 GigabitEthernet0/0/0/0 10': {'admin_distance': 10, 'interface_name': 'GigabitEthernet0/0/0/0', 'neighbor_address': '192.168.1.0'}}}, '10.10.10.11/32': {'path': {'192.168.1.1 GigabitEthernet0/0/0/1 11': {'admin_distance': 11, 'interface_name': 'GigabitEthernet0/0/0/1', 'neighbor_address': '192.168.1.1'}}}, '10.10.10.12/32': {'path': {'192.168.1.2 GigabitEthernet0/0/0/2 12': {'admin_distance': 12, 'interface_name': 'GigabitEthernet0/0/0/2', 'neighbor_address': '192.168.1.2'}}}, '10.10.10.13/32': {'path': {'192.168.1.3 GigabitEthernet0/0/0/3 13': {'admin_distance': 13, 'interface_name': 'GigabitEthernet0/0/0/3', 'neighbor_address': '192.168.1.3'}}}, '10.10.10.14/32': {'path': {'192.168.1.4 GigabitEthernet0/0/0/4 14': {'admin_distance': 14, 'interface_name': 'GigabitEthernet0/0/0/4', 'neighbor_address': '192.168.1.4'}}}, '10.10.10.15/32': {'path': {'192.168.1.5 GigabitEthernet0/0/0/5 15': {'admin_distance': 15, 'interface_name': 'GigabitEthernet0/0/0/5', 'neighbor_address': '192.168.1.5'}}}, '10.10.10.16/32': {'path': {'192.168.1.6 GigabitEthernet0/0/0/6 16': {'admin_distance': 16, 'interface_name': 'GigabitEthernet0/0/0/6', 'neighbor_address': '192.168.1.6'}}}, '10.10.10.17/32': {'path': {'192.168.1.7 GigabitEthernet0/0/0/7 17': {'admin_distance': 17, 'interface_name': 'GigabitEthernet0/0/0/7', 'neighbor_address': '192.168.1.7'}}}}, 'multipath': True}, 'ipv6': {'enable': True, 'mroute': {'2001:10:10::10/128': {'path': {'2001:11:11::10 GigabitEthernet0/0/0/0 10': {'admin_distance': 10, 'interface_name': 'GigabitEthernet0/0/0/0', 'neighbor_address': '2001:11:11::10'}}}, '2001:10:10::11/128': {'path': {'2001:11:11::11 GigabitEthernet0/0/0/1 11': {'admin_distance': 11, 'interface_name': 'GigabitEthernet0/0/0/1', 'neighbor_address': '2001:11:11::11'}}}, '2001:10:10::12/128': {'path': {'2001:11:11::12 GigabitEthernet0/0/0/2 12': {'admin_distance': 12, 'interface_name': 'GigabitEthernet0/0/0/2', 'neighbor_address': '2001:11:11::12'}}}, '2001:10:10::13/128': {'path': {'2001:11:11::13 GigabitEthernet0/0/0/3 13': {'admin_distance': 13, 'interface_name': 'GigabitEthernet0/0/0/3', 'neighbor_address': '2001:11:11::13'}}}, '2001:10:10::14/128': {'path': {'2001:11:11::14 GigabitEthernet0/0/0/4 14': {'admin_distance': 14, 'interface_name': 'GigabitEthernet0/0/0/4', 'neighbor_address': '2001:11:11::14'}}}, '2001:10:10::15/128': {'path': {'2001:11:11::15 GigabitEthernet0/0/0/5 15': {'admin_distance': 15, 'interface_name': 'GigabitEthernet0/0/0/5', 'neighbor_address': '2001:11:11::15'}}}}, 'multipath': True}}}}} McastTable = { 'vrf': {'VRF1': {'address_family': {'ipv4': {'multicast_group': {'124.0.0.0/32': {'source_address': {'*': {'flags': 'P D', 'uptime': '00:01:38'}}}, '124.0.1.40': {'source_address': {'*': {'flags': 'S P', 'uptime': '00:00:46'}}}, '217.1.1.1': {'source_address': {'172.150.0.15': {'flags': 'RPF ME MH', 'incoming_interface_list': {'GigabitEthernet0/0/0/1': {'rpf_nbr': '192.168.0.12'}}, 'outgoing_interface_list': {'GigabitEthernet0/0/0/2': {'flags': 'F A', 'uptime': '00:01:24'}}, 'uptime': '00:00:54'}}}, '234.0.0.0/4': {'source_address': {'*': {'flags': 'MD RPF P', 'uptime': '00:01:28'}}}}}, 'ipv6': {'multicast_group': {'ff70::/12': {'source_address': {'*': {'flags': 'C RPF P', 'uptime': '00:04:45'}}}, 'ff70::/15': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}, 'ff72::/16': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}, 'ff80::/15': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}, 'ff82::/16': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}, 'ff90::/15': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}}}}}, 'default': {'address_family': {'ipv4': {'multicast_group': {'224.0.0.0/24': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:00:58'}}}, '224.0.0.0/4': {'source_address': {'*': {'flags': 'C RPF P', 'uptime': '00:00:58'}}}, '224.0.1.39': {'source_address': {'*': {'flags': 'S P', 'uptime': '00:00:58'}}}, '227.1.1.1': {'source_address': {'*': {'flags': 'C RPF MD MH CD', 'outgoing_interface_list': {'Loopback0': {'flags': 'F NS', 'uptime': '00:00:54'}}, 'uptime': '00:00:54'}, '192.168.0.12': {'flags': 'RPF ME MH', 'incoming_interface_list': {'Loopback0': {'rpf_nbr': '192.168.0.12'}}, 'outgoing_interface_list': {'Loopback0': {'flags': 'F A', 'uptime': '00:00:54'}}, 'uptime': '00:00:54'}}}, '232.0.0.0/8': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:00:58'}}}, '236.5.5.5': {'source_address': {'*': {'flags': 'C RPF MD MH CD', 'outgoing_interface_list': {'Loopback0': {'flags': 'F NS', 'uptime': '00:00:54'}}, 'uptime': '00:00:54'}, '192.168.0.12': {'flags': 'RPF ME MH', 'incoming_interface_list': {'Loopback0': {'rpf_nbr': '192.168.0.12'}}, 'outgoing_interface_list': {'Loopback0': {'flags': 'F A', 'uptime': '00:00:54'}}, 'uptime': '00:00:54'}, '192.168.0.22': {'flags': 'C RPF MD MH CD', 'outgoing_interface_list': {'GigabitEthernet0/1/0/1': {'flags': 'NS', 'uptime': '00:00:01'}, 'Loopback0': {'flags': 'F NS', 'uptime': '00:00:13'}}, 'uptime': '00:00:13'}}}}}, 'ipv6': {'multicast_group': {'ff00::/15': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}, 'ff00::/8': {'source_address': {'*': {'flags': 'L C RPF P', 'outgoing_interface_list': {'Decaps6tunnel0': {'flags': 'NS DI', 'uptime': '00:04:40'}}, 'uptime': '00:04:45'}}}, 'ff02::/16': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}, 'ff10::/15': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}, 'ff12::/16': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}, 'ff15::1:1': {'source_address': {'1::1:1:1:2': {'flags': 'L RPF MT', 'incoming_interface_list': {'GigabitEthernet150/0/0/6': {'rpf_nbr': '1::1:1:1:2'}}, 'outgoing_interface_list': {'mdtvpn1': {'flags': 'F NS MI MT MA', 'uptime': '00:02:53'}}, 'uptime': '00:02:53'}}}, 'ff15::2:1': {'source_address': {'4::4:4:4:5': {'flags': 'L RPF', 'incoming_interface_list': {'mdtvpn1': {'rpf_nbr': '::ffff:200.200.200.200'}}, 'outgoing_interface_list': {'GigabitEthernet150/0/0/6': {'flags': 'F NS', 'uptime': '00:03:59'}}, 'uptime': '00:03:59'}}}, 'ff20::/15': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}, 'ff22::/16': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}}}}}}}
""" Mcast Genie Ops Object Outputs for IOSXR. """ class Mcastoutput(object): show_vrf_all_detail = {'default': {'description': 'not set', 'vrf_mode': 'regular', 'address_family': {'ipv6 unicast': {'route_target': {'400:1': {'rt_type': 'import', 'route_target': '400:1'}, '300:1': {'rt_type': 'import', 'route_target': '300:1'}, '200:1': {'rt_type': 'both', 'route_target': '200:1'}, '200:2': {'rt_type': 'import', 'route_target': '200:2'}}}, 'ipv4 unicast': {'route_target': {'400:1': {'rt_type': 'import', 'route_target': '400:1'}, '300:1': {'rt_type': 'import', 'route_target': '300:1'}, '200:1': {'rt_type': 'both', 'route_target': '200:1'}, '200:2': {'rt_type': 'import', 'route_target': '200:2'}}}}, 'route_distinguisher': '200:1', 'interfaces': ['GigabitEthernet0/0/0/1']}, 'VRF1': {'description': 'not set', 'vrf_mode': 'regular', 'address_family': {'ipv6 unicast': {'route_target': {'400:1': {'rt_type': 'import', 'route_target': '400:1'}, '300:1': {'rt_type': 'import', 'route_target': '300:1'}, '200:1': {'rt_type': 'both', 'route_target': '200:1'}, '200:2': {'rt_type': 'import', 'route_target': '200:2'}}}, 'ipv4 unicast': {'route_target': {'400:1': {'rt_type': 'import', 'route_target': '400:1'}, '300:1': {'rt_type': 'import', 'route_target': '300:1'}, '200:1': {'rt_type': 'both', 'route_target': '200:1'}, '200:2': {'rt_type': 'import', 'route_target': '200:2'}}}}, 'route_distinguisher': '200:1', 'interfaces': ['GigabitEthernet0/0/0/1']}} pim_vrf_default_ipv4_mstatic = ' RP/0/0/CPU0:R2# show pim vrf default ipv4 mstatic\n Mon May 29 14:37:05.732 UTC\n IP Multicast Static Routes Information\n\n * 10.10.10.10/32 via GigabitEthernet0/0/0/0 with nexthop 192.168.1.0 and distance 10\n * 10.10.10.11/32 via GigabitEthernet0/0/0/1 with nexthop 192.168.1.1 and distance 11\n * 10.10.10.12/32 via GigabitEthernet0/0/0/2 with nexthop 192.168.1.2 and distance 12\n * 10.10.10.13/32 via GigabitEthernet0/0/0/3 with nexthop 192.168.1.3 and distance 13\n * 10.10.10.14/32 via GigabitEthernet0/0/0/4 with nexthop 192.168.1.4 and distance 14\n * 10.10.10.15/32 via GigabitEthernet0/0/0/5 with nexthop 192.168.1.5 and distance 15\n * 10.10.10.16/32 via GigabitEthernet0/0/0/6 with nexthop 192.168.1.6 and distance 16\n * 10.10.10.17/32 via GigabitEthernet0/0/0/7 with nexthop 192.168.1.7 and distance 17\n ' pim_vrf_default_ipv6_mstatic = ' RP/0/0/CPU0:R2# show pim vrf default ipv6 mstatic\n Mon May 29 14:37:26.421 UTC\n IP Multicast Static Routes Information\n\n * 2001:10:10::10/128 via GigabitEthernet0/0/0/0 with nexthop 2001:11:11::10 and distance 10 \n * 2001:10:10::11/128 via GigabitEthernet0/0/0/1 with nexthop 2001:11:11::11 and distance 11 \n * 2001:10:10::12/128 via GigabitEthernet0/0/0/2 with nexthop 2001:11:11::12 and distance 12\n * 2001:10:10::13/128 via GigabitEthernet0/0/0/3 with nexthop 2001:11:11::13 and distance 13\n * 2001:10:10::14/128 via GigabitEthernet0/0/0/4 with nexthop 2001:11:11::14 and distance 14\n * 2001:10:10::15/128 via GigabitEthernet0/0/0/5 with nexthop 2001:11:11::15 and distance 15\n ' pim_vrf_default_ipv4_interface_detail = ' RP/0/0/CPU0:R2#show pim vrf default ipv4 interface detail\n Mon May 29 14:41:28.444 UTC\n\n PIM interfaces in VRF default\n IP PIM Multicast Interface State\n Flag: B - Bidir enabled, NB - Bidir disabled\n P - PIM Proxy enabled, NP - PIM Proxy disabled\n V - Virtual Interface\n BFD State - State/Interval/Multiplier\n\n Interface PIM Nbr Hello DR\n Count Intvl Prior\n\n Loopback0 on 1 30 1 \n Primary Address : 2.2.2.2\n Flags : B P V\n BFD : Off/150 ms/3\n DR : this system\n Propagation delay : 500\n Override Interval : 2500\n Hello Timer : 00:00:15\n Neighbor Filter : -\n\n GigabitEthernet0/0/0/0 on 1 30 1 \n Primary Address : 10.2.3.2\n Flags : B P \n BFD : Off/150 ms/3\n DR : this system\n Propagation delay : 500\n Override Interval : 2500\n Hello Timer : 00:00:01\n Neighbor Filter : -\n\n GigabitEthernet0/0/0/1 on 2 30 1 \n Primary Address : 10.1.2.2\n Flags : NB P \n BFD : Off/150 ms/3\n DR : this system\n Propagation delay : 500\n Override Interval : 2500\n Hello Timer : 00:00:07\n Neighbor Filter : -\n ' pim_vrf_default_ipv6_interface_detail = ' RP/0/0/CPU0:R2#show pim vrf default ipv6 interface detail\n Mon May 29 14:41:52.972 UTC\n\n PIM interfaces in VRF default\n IP PIM Multicast Interface State\n Flag: B - Bidir enabled, NB - Bidir disabled\n P - PIM Proxy enabled, NP - PIM Proxy disabled\n A - PIM Assert batching capable, NA - PIM Assert batching incapable\n V - Virtual Interface\n\n Interface PIM Nbr Hello DR\n Count Intvl Prior\n\n Loopback0 on 1 30 1 \n Primary Address : fe80::85c6:bdff:fe62:61e\n Address : 2001:db8:2:2::2\n Flags : B P NA V\n BFD : Off/150 ms/3\n DR : this system\n\n Propagation delay : 500\n Override Interval : 2500\n Hello Timer : 00:00:19\n Neighbor Filter : -\n\n GigabitEthernet0/0/0/0 on 1 30 1 \n Primary Address : fe80::5054:ff:fee4:f669\n Address : 2001:db8:2:3::2\n Flags : B P NA \n BFD : Off/150 ms/3\n DR : this system\n\n Propagation delay : 500\n Override Interval : 2500\n Hello Timer : 00:00:22\n Neighbor Filter : -\n\n GigabitEthernet0/0/0/1 on 1 30 1 \n Primary Address : fe80::5054:ff:feac:64b3\n Address : 2001:db8:1:2::2\n Flags : B P NA \n BFD : Off/150 ms/3\n DR : this system\n\n Propagation delay : 500\n Override Interval : 2500\n Hello Timer : 00:00:02\n Neighbor Filter : -\n ' pim_vrf_default_ipv4_rpf_summary = ' RP/0/0/CPU0:R2#show pim vrf default ipv4 rpf summary \n Mon May 29 14:42:47.569 UTC\n ISIS Mcast Topology Not configured\n MoFRR Flow-based Not configured\n MoFRR RIB Not configured\n RUMP MuRIB Not enabled\n\n PIM RPFs registered with Unicast RIB table\n\n Default RPF Table: IPv4-Unicast-default\n RIB Convergence Timeout Value: 00:30:00\n RIB Convergence Time Left: 00:00:00\n Multipath RPF Selection is Enabled\n\n Table: IPv4-Unicast-default\n PIM RPF Registrations = 1\n RIB Table converged\n ' pim_vrf_default_ipv6_rpf_summary = ' RP/0/0/CPU0:R2#show pim vrf default ipv6 rpf summary \n Mon May 29 14:42:53.538 UTC\n ISIS Mcast Topology Not configured\n MoFRR Flow-based Not configured\n MoFRR RIB Not configured\n RUMP MuRIB Not enabled\n\n PIM RPFs registered with Unicast RIB table\n\n Default RPF Table: IPv6-Unicast-default\n RIB Convergence Timeout Value: 00:30:00\n RIB Convergence Time Left: 00:00:00\n Multipath RPF Selection is Enabled\n\n Table: IPv6-Unicast-default\n PIM RPF Registrations = 0\n RIB Table converged\n ' pim_vrf_vrf1_ipv4_mstatic = ' RP/0/0/CPU0:R2# show pim vrf VRF1 ipv4 mstatic\n Mon May 29 14:37:05.732 UTC\n IP Multicast Static Routes Information\n\n * 20.10.10.10/32 via GigabitEthernet1/0/0/0 with nexthop 192.168.1.0 and distance 10\n * 20.10.10.11/32 via GigabitEthernet1/0/0/1 with nexthop 192.168.1.1 and distance 11\n * 20.10.10.12/32 via GigabitEthernet1/0/0/2 with nexthop 192.168.1.2 and distance 12\n * 20.10.10.13/32 via GigabitEthernet1/0/0/3 with nexthop 192.168.1.3 and distance 13\n * 20.10.10.14/32 via GigabitEthernet1/0/0/4 with nexthop 192.168.1.4 and distance 14\n * 20.10.10.15/32 via GigabitEthernet1/0/0/5 with nexthop 192.168.1.5 and distance 15\n * 20.10.10.16/32 via GigabitEthernet1/0/0/6 with nexthop 192.168.1.6 and distance 16\n * 20.10.10.17/32 via GigabitEthernet1/0/0/7 with nexthop 192.168.1.7 and distance 17\n ' pim_vrf_vrf1_ipv6_mstatic = ' RP/0/0/CPU0:R2# show pim vrf VRF1 ipv6 mstatic\n Mon May 29 14:37:26.421 UTC\n IP Multicast Static Routes Information\n\n * 3001:10:10::10/128 via GigabitEthernet1/0/0/0 with nexthop 2001:11:11::10 and distance 10 \n * 3001:10:10::11/128 via GigabitEthernet1/0/0/1 with nexthop 2001:11:11::11 and distance 11 \n * 3001:10:10::12/128 via GigabitEthernet1/0/0/2 with nexthop 2001:11:11::12 and distance 12\n * 3001:10:10::13/128 via GigabitEthernet1/0/0/3 with nexthop 2001:11:11::13 and distance 13\n * 3001:10:10::14/128 via GigabitEthernet1/0/0/4 with nexthop 2001:11:11::14 and distance 14\n * 3001:10:10::15/128 via GigabitEthernet1/0/0/5 with nexthop 2001:11:11::15 and distance 15\n ' pim_vrf_vrf1_ipv4_interface_detail = ' RP/0/0/CPU0:R2#show pim vrf VRF1 ipv4 interface detail\n Mon May 29 14:41:28.444 UTC\n\n PIM interfaces in VRF VRF1\n IP PIM Multicast Interface State\n Flag: B - Bidir enabled, NB - Bidir disabled\n P - PIM Proxy enabled, NP - PIM Proxy disabled\n V - Virtual Interface\n BFD State - State/Interval/Multiplier\n\n Interface PIM Nbr Hello DR\n Count Intvl Prior\n\n Loopback0 on 1 30 1 \n Primary Address : 2.2.2.2\n Flags : B P V\n BFD : Off/150 ms/3\n DR : this system\n Propagation delay : 500\n Override Interval : 2500\n Hello Timer : 00:00:15\n Neighbor Filter : -\n\n GigabitEthernet0/0/0/0 on 1 30 1 \n Primary Address : 10.2.3.2\n Flags : B P \n BFD : Off/150 ms/3\n DR : this system\n Propagation delay : 500\n Override Interval : 2500\n Hello Timer : 00:00:01\n Neighbor Filter : -\n\n GigabitEthernet0/0/0/1 on 2 30 1 \n Primary Address : 10.1.2.2\n Flags : NB P \n BFD : Off/150 ms/3\n DR : this system\n Propagation delay : 500\n Override Interval : 2500\n Hello Timer : 00:00:07\n Neighbor Filter : -\n ' pim_vrf_vrf1_ipv6_interface_detail = ' RP/0/0/CPU0:R2#show pim vrf VRF1 ipv6 interface detail\n Mon May 29 14:41:52.972 UTC\n\n PIM interfaces in VRF VRF1\n IP PIM Multicast Interface State\n Flag: B - Bidir enabled, NB - Bidir disabled\n P - PIM Proxy enabled, NP - PIM Proxy disabled\n A - PIM Assert batching capable, NA - PIM Assert batching incapable\n V - Virtual Interface\n\n Interface PIM Nbr Hello DR\n Count Intvl Prior\n\n Loopback0 on 1 30 1 \n Primary Address : fe80::85c6:bdff:fe62:61e\n Address : 2001:db8:2:2::2\n Flags : B P NA V\n BFD : Off/150 ms/3\n DR : this system\n\n Propagation delay : 500\n Override Interval : 2500\n Hello Timer : 00:00:19\n Neighbor Filter : -\n\n GigabitEthernet0/0/0/0 on 1 30 1 \n Primary Address : fe80::5054:ff:fee4:f669\n Address : 2001:db8:2:3::2\n Flags : B P NA \n BFD : Off/150 ms/3\n DR : this system\n\n Propagation delay : 500\n Override Interval : 2500\n Hello Timer : 00:00:22\n Neighbor Filter : -\n\n GigabitEthernet0/0/0/1 on 1 30 1 \n Primary Address : fe80::5054:ff:feac:64b3\n Address : 2001:db8:1:2::2\n Flags : B P NA \n BFD : Off/150 ms/3\n DR : this system\n\n Propagation delay : 500\n Override Interval : 2500\n Hello Timer : 00:00:02\n Neighbor Filter : -\n ' pim_vrf_vrf1_ipv4_rpf_summary = ' RP/0/0/CPU0:R2#show pim VRF1 default ipv4 rpf summary \n Mon May 29 14:42:47.569 UTC\n ISIS Mcast Topology Not configured\n MoFRR Flow-based Not configured\n MoFRR RIB Not configured\n RUMP MuRIB Not enabled\n\n PIM RPFs registered with Unicast RIB table\n\n Default RPF Table: IPv4-Unicast-default\n RIB Convergence Timeout Value: 00:30:00\n RIB Convergence Time Left: 00:00:00\n Multipath RPF Selection is Enabled\n\n Table: IPv4-Unicast-default\n PIM RPF Registrations = 1\n RIB Table converged\n ' pim_vrf_vrf1_ipv6_rpf_summary = ' RP/0/0/CPU0:R2#show pim vrf VRF1 ipv6 rpf summary \n Mon May 29 14:42:53.538 UTC\n ISIS Mcast Topology Not configured\n MoFRR Flow-based Not configured\n MoFRR RIB Not configured\n RUMP MuRIB Not enabled\n\n PIM RPFs registered with Unicast RIB table\n\n Default RPF Table: IPv6-Unicast-default\n RIB Convergence Timeout Value: 00:30:00\n RIB Convergence Time Left: 00:00:00\n Multipath RPF Selection is Enabled\n\n Table: IPv6-Unicast-default\n PIM RPF Registrations = 0\n RIB Table converged\n ' mrib_vrf_default_ipv4_route = " RP/0/1/CPU0:rtr1#show mrib vrf default ipv4 route\n Mon Nov 2 15:26:01.015 PST\n\n IP Multicast Routing Information Base\n Entry flags: L - Domain-Local Source, E - External Source to the Domain,\n C - Directly-Connected Check, S - Signal, IA - Inherit Accept,\n IF - Inherit From, D - Drop, ME - MDT Encap, EID - Encap ID,\n MD - MDT Decap, MT - MDT Threshold Crossed, MH - MDT interface handle\n CD - Conditional Decap, MPLS - MPLS Decap, EX - Extranet\n MoFE - MoFRR Enabled, MoFS - MoFRR State, MoFP - MoFRR Primary\n MoFB - MoFRR Backup, RPFID - RPF ID Set, X - VXLAN\n Interface flags: F - Forward, A - Accept, IC - Internal Copy,\n NS - Negate Signal, DP - Don't Preserve, SP - Signal Present,\n II - Internal Interest, ID - Internal Disinterest, LI - Local Interest,\n LD - Local Disinterest, DI - Decapsulation Interface\n EI - Encapsulation Interface, MI - MDT Interface, LVIF - MPLS Encap,\n EX - Extranet, A2 - Secondary Accept, MT - MDT Threshold Crossed,\n MA - Data MDT Assigned, LMI - mLDP MDT Interface, TMI - P2MP-TE MDT Interface\n IRMI - IR MDT Interface\n\n (*,224.0.0.0/4) RPF nbr: 0.0.0.0 Flags: C RPF P\n Up: 00:00:58\n\n (*,224.0.0.0/24) Flags: D P\n Up: 00:00:58\n\n (*,224.0.1.39) Flags: S P\n Up: 00:00:58\n\n (*,227.1.1.1) RPF nbr: 0.0.0.0 Flags: C RPF MD MH CD\n MVPN TID: 0xe000001f\n MVPN Remote TID: 0x0\n MVPN Payload: IPv4\n MDT IFH: 0x803380\n Up: 00:00:54\n Outgoing Interface List\n Loopback0 Flags: F NS, Up: 00:00:54\n\n (192.168.0.12,227.1.1.1) RPF nbr: 192.168.0.12 Flags: RPF ME MH\n MVPN TID: 0xe000001f\n MVPN Remote TID: 0x0\n MVPN Payload: IPv4\n MDT IFH: 0x803380\n Up: 00:00:54\n Incoming Interface List\n Loopback0 Flags: F NS, Up: 00:00:58\n Outgoing Interface List\n Loopback0 Flags: F A, Up: 00:00:54\n\n (*,232.0.0.0/8) Flags: D P\n Up: 00:00:58\n\n (*,236.5.5.5) RPF nbr: 0.0.0.0 Flags: C RPF MD MH CD\n MVPN TID: 0xe0000018\n MVPN Remote TID: 0xe0800018\n MVPN Payload: IPv4 IPv6\n MDT IFH: 0x803480\n Up: 00:00:54\n Outgoing Interface List\n Loopback0 Flags: F NS, Up: 00:00:54\n\n (192.168.0.12,236.5.5.5) RPF nbr: 192.168.0.12 Flags: RPF ME MH\n MVPN TID: 0xe0000018\n MVPN Remote TID: 0xe0800018\n MVPN Payload: IPv4 IPv6\n MDT IFH: 0x803480\n Up: 00:00:54\n Incoming Interface List\n Loopback0 Flags: F A, Up: 00:00:54\n Outgoing Interface List\n Loopback0 Flags: F A, Up: 00:00:54\n\n (192.168.0.22,236.5.5.5) RPF nbr: 11.0.1.22 Flags: C RPF MD MH CD\n MVPN TID: 0xe0000018\n MVPN Remote TID: 0xe0800018\n MVPN Payload: IPv4 IPv6\n MDT IFH: 0x803480\n Up: 00:00:13\n Outgoing Interface List\n Loopback0 Flags: F NS, Up: 00:00:13\n GigabitEthernet0/1/0/1 Flags: NS, Up: 00:00:01\n " mrib_vrf_default_ipv6_route = " RP/0/1/CPU0:rtr1#show mrib vrf default ipv6 route \n Mon Nov 2 15:26:01.015 PST\n\n IP Multicast Routing Information Base\n Entry flags: L - Domain-Local Source, E - External Source to the Domain,\n C - Directly-Connected Check, S - Signal, IA - Inherit Accept,\n IF - Inherit From, D - Drop, ME - MDT Encap, EID - Encap ID,\n MD - MDT Decap, MT - MDT Threshold Crossed, MH - MDT interface handle\n CD - Conditional Decap, MPLS - MPLS Decap, EX - Extranet\n MoFE - MoFRR Enabled, MoFS - MoFRR State, MoFP - MoFRR Primary\n MoFB - MoFRR Backup, RPFID - RPF ID Set, X - VXLAN\n Interface flags: F - Forward, A - Accept, IC - Internal Copy,\n NS - Negate Signal, DP - Don't Preserve, SP - Signal Present,\n II - Internal Interest, ID - Internal Disinterest, LI - Local Interest,\n LD - Local Disinterest, DI - Decapsulation Interface\n EI - Encapsulation Interface, MI - MDT Interface, LVIF - MPLS Encap,\n EX - Extranet, A2 - Secondary Accept, MT - MDT Threshold Crossed,\n MA - Data MDT Assigned, LMI - mLDP MDT Interface, TMI - P2MP-TE MDT Interface\n IRMI - IR MDT Interface\n\n (*,ff00::/8)\n RPF nbr: 150::150:150:150:150 Flags: L C RPF P\n Up: 00:04:45\n Outgoing Interface List\n Decaps6tunnel0 Flags: NS DI, Up: 00:04:40\n\n (*,ff00::/15)\n Flags: D P\n Up: 00:04:45\n\n (*,ff02::/16)\n Flags: D P\n Up: 00:04:45\n\n (*,ff10::/15)\n Flags: D P\n Up: 00:04:45\n\n (*,ff12::/16)\n Flags: D P\n Up: 00:04:45\n\n (1::1:1:1:2,ff15::1:1)\n RPF nbr: 1::1:1:1:2 Flags: L RPF MT\n MT Slot: 0/2/CPU0\n Up: 00:02:53\n Incoming Interface List\n GigabitEthernet150/0/0/6 Flags: A, Up: 00:02:53\n Outgoing Interface List\n mdtvpn1 Flags: F NS MI MT MA, Up: 00:02:53\n\n (4::4:4:4:5,ff15::2:1)\n RPF nbr: ::ffff:200.200.200.200 Flags: L RPF\n Up: 00:03:59\n Incoming Interface List\n mdtvpn1 Flags: A MI, Up: 00:03:35\n Outgoing Interface List\n GigabitEthernet150/0/0/6 Flags: F NS, Up: 00:03:59\n\n (*,ff20::/15)\n Flags: D P\n Up: 00:04:45\n\n (*,ff22::/16)\n Flags: D P\n Up: 00:04:45\n " mrib_vrf_vrf1_ipv4_route = " RP/0/1/CPU0:rtr1#show mrib vrf VRF1 ipv4 route\n Mon Nov 2 15:26:01.015 PST\n\n IP Multicast Routing Information Base\n Entry flags: L - Domain-Local Source, E - External Source to the Domain,\n C - Directly-Connected Check, S - Signal, IA - Inherit Accept,\n IF - Inherit From, D - Drop, ME - MDT Encap, EID - Encap ID,\n MD - MDT Decap, MT - MDT Threshold Crossed, MH - MDT interface handle\n CD - Conditional Decap, MPLS - MPLS Decap, EX - Extranet\n MoFE - MoFRR Enabled, MoFS - MoFRR State, MoFP - MoFRR Primary\n MoFB - MoFRR Backup, RPFID - RPF ID Set, X - VXLAN\n Interface flags: F - Forward, A - Accept, IC - Internal Copy,\n NS - Negate Signal, DP - Don't Preserve, SP - Signal Present,\n II - Internal Interest, ID - Internal Disinterest, LI - Local Interest,\n LD - Local Disinterest, DI - Decapsulation Interface\n EI - Encapsulation Interface, MI - MDT Interface, LVIF - MPLS Encap,\n EX - Extranet, A2 - Secondary Accept, MT - MDT Threshold Crossed,\n MA - Data MDT Assigned, LMI - mLDP MDT Interface, TMI - P2MP-TE MDT Interface\n IRMI - IR MDT Interface\n\n (*,234.0.0.0/4) RPF nbr: 0.0.0.1 Flags: MD RPF P\n Up: 00:01:28\n\n (*,124.0.0.0/32) Flags: P D\n Up: 00:01:38\n\n (*,124.0.1.40) Flags: S P\n Up: 00:00:46\n\n (172.150.0.15,217.1.1.1) RPF nbr: 192.168.0.12 Flags: RPF ME MH\n MVPN TID: 0xe000001f\n MVPN Remote TID: 0x0\n MVPN Payload: IPv4\n MDT IFH: 0x803380\n Up: 00:00:54\n Incoming Interface List\n GigabitEthernet0/0/0/1 Flags: F NS, Up: 00:01:38\n Outgoing Interface List\n GigabitEthernet0/0/0/2 Flags: F A, Up: 00:01:24\n " mrib_vrf_vrf1_ipv6_route = " RP/0/1/CPU0:rtr1#show mrib vrf VRF1 ipv6 route \n Mon Nov 2 15:26:01.015 PST\n\n IP Multicast Routing Information Base\n Entry flags: L - Domain-Local Source, E - External Source to the Domain,\n C - Directly-Connected Check, S - Signal, IA - Inherit Accept,\n IF - Inherit From, D - Drop, ME - MDT Encap, EID - Encap ID,\n MD - MDT Decap, MT - MDT Threshold Crossed, MH - MDT interface handle\n CD - Conditional Decap, MPLS - MPLS Decap, EX - Extranet\n MoFE - MoFRR Enabled, MoFS - MoFRR State, MoFP - MoFRR Primary\n MoFB - MoFRR Backup, RPFID - RPF ID Set, X - VXLAN\n Interface flags: F - Forward, A - Accept, IC - Internal Copy,\n NS - Negate Signal, DP - Don't Preserve, SP - Signal Present,\n II - Internal Interest, ID - Internal Disinterest, LI - Local Interest,\n LD - Local Disinterest, DI - Decapsulation Interface\n EI - Encapsulation Interface, MI - MDT Interface, LVIF - MPLS Encap,\n EX - Extranet, A2 - Secondary Accept, MT - MDT Threshold Crossed,\n MA - Data MDT Assigned, LMI - mLDP MDT Interface, TMI - P2MP-TE MDT Interface\n IRMI - IR MDT Interface\n\n (*,ff70::/12)\n RPF nbr: :: Flags: C RPF P\n Up: 00:04:45\n\n (*,ff70::/15)\n Flags: D P\n Up: 00:04:45\n\n (*,ff72::/16)\n Flags: D P\n Up: 00:04:45\n\n (*,ff80::/15)\n Flags: D P\n Up: 00:04:45\n\n (*,ff82::/16)\n Flags: D P\n Up: 00:04:45\n\n (*,ff90::/15)\n Flags: D P\n Up: 00:04:45\n " mcast_info = {'vrf': {'VRF1': {'address_family': {'ipv4': {'enable': True, 'mroute': {'20.10.10.10/32': {'path': {'192.168.1.0 GigabitEthernet1/0/0/0 10': {'admin_distance': 10, 'interface_name': 'GigabitEthernet1/0/0/0', 'neighbor_address': '192.168.1.0'}}}, '20.10.10.11/32': {'path': {'192.168.1.1 GigabitEthernet1/0/0/1 11': {'admin_distance': 11, 'interface_name': 'GigabitEthernet1/0/0/1', 'neighbor_address': '192.168.1.1'}}}, '20.10.10.12/32': {'path': {'192.168.1.2 GigabitEthernet1/0/0/2 12': {'admin_distance': 12, 'interface_name': 'GigabitEthernet1/0/0/2', 'neighbor_address': '192.168.1.2'}}}, '20.10.10.13/32': {'path': {'192.168.1.3 GigabitEthernet1/0/0/3 13': {'admin_distance': 13, 'interface_name': 'GigabitEthernet1/0/0/3', 'neighbor_address': '192.168.1.3'}}}, '20.10.10.14/32': {'path': {'192.168.1.4 GigabitEthernet1/0/0/4 14': {'admin_distance': 14, 'interface_name': 'GigabitEthernet1/0/0/4', 'neighbor_address': '192.168.1.4'}}}, '20.10.10.15/32': {'path': {'192.168.1.5 GigabitEthernet1/0/0/5 15': {'admin_distance': 15, 'interface_name': 'GigabitEthernet1/0/0/5', 'neighbor_address': '192.168.1.5'}}}, '20.10.10.16/32': {'path': {'192.168.1.6 GigabitEthernet1/0/0/6 16': {'admin_distance': 16, 'interface_name': 'GigabitEthernet1/0/0/6', 'neighbor_address': '192.168.1.6'}}}, '20.10.10.17/32': {'path': {'192.168.1.7 GigabitEthernet1/0/0/7 17': {'admin_distance': 17, 'interface_name': 'GigabitEthernet1/0/0/7', 'neighbor_address': '192.168.1.7'}}}}, 'multipath': True}, 'ipv6': {'enable': True, 'mroute': {'3001:10:10::10/128': {'path': {'2001:11:11::10 GigabitEthernet1/0/0/0 10': {'admin_distance': 10, 'interface_name': 'GigabitEthernet1/0/0/0', 'neighbor_address': '2001:11:11::10'}}}, '3001:10:10::11/128': {'path': {'2001:11:11::11 GigabitEthernet1/0/0/1 11': {'admin_distance': 11, 'interface_name': 'GigabitEthernet1/0/0/1', 'neighbor_address': '2001:11:11::11'}}}, '3001:10:10::12/128': {'path': {'2001:11:11::12 GigabitEthernet1/0/0/2 12': {'admin_distance': 12, 'interface_name': 'GigabitEthernet1/0/0/2', 'neighbor_address': '2001:11:11::12'}}}, '3001:10:10::13/128': {'path': {'2001:11:11::13 GigabitEthernet1/0/0/3 13': {'admin_distance': 13, 'interface_name': 'GigabitEthernet1/0/0/3', 'neighbor_address': '2001:11:11::13'}}}, '3001:10:10::14/128': {'path': {'2001:11:11::14 GigabitEthernet1/0/0/4 14': {'admin_distance': 14, 'interface_name': 'GigabitEthernet1/0/0/4', 'neighbor_address': '2001:11:11::14'}}}, '3001:10:10::15/128': {'path': {'2001:11:11::15 GigabitEthernet1/0/0/5 15': {'admin_distance': 15, 'interface_name': 'GigabitEthernet1/0/0/5', 'neighbor_address': '2001:11:11::15'}}}}, 'multipath': True}}}, 'default': {'address_family': {'ipv4': {'enable': True, 'mroute': {'10.10.10.10/32': {'path': {'192.168.1.0 GigabitEthernet0/0/0/0 10': {'admin_distance': 10, 'interface_name': 'GigabitEthernet0/0/0/0', 'neighbor_address': '192.168.1.0'}}}, '10.10.10.11/32': {'path': {'192.168.1.1 GigabitEthernet0/0/0/1 11': {'admin_distance': 11, 'interface_name': 'GigabitEthernet0/0/0/1', 'neighbor_address': '192.168.1.1'}}}, '10.10.10.12/32': {'path': {'192.168.1.2 GigabitEthernet0/0/0/2 12': {'admin_distance': 12, 'interface_name': 'GigabitEthernet0/0/0/2', 'neighbor_address': '192.168.1.2'}}}, '10.10.10.13/32': {'path': {'192.168.1.3 GigabitEthernet0/0/0/3 13': {'admin_distance': 13, 'interface_name': 'GigabitEthernet0/0/0/3', 'neighbor_address': '192.168.1.3'}}}, '10.10.10.14/32': {'path': {'192.168.1.4 GigabitEthernet0/0/0/4 14': {'admin_distance': 14, 'interface_name': 'GigabitEthernet0/0/0/4', 'neighbor_address': '192.168.1.4'}}}, '10.10.10.15/32': {'path': {'192.168.1.5 GigabitEthernet0/0/0/5 15': {'admin_distance': 15, 'interface_name': 'GigabitEthernet0/0/0/5', 'neighbor_address': '192.168.1.5'}}}, '10.10.10.16/32': {'path': {'192.168.1.6 GigabitEthernet0/0/0/6 16': {'admin_distance': 16, 'interface_name': 'GigabitEthernet0/0/0/6', 'neighbor_address': '192.168.1.6'}}}, '10.10.10.17/32': {'path': {'192.168.1.7 GigabitEthernet0/0/0/7 17': {'admin_distance': 17, 'interface_name': 'GigabitEthernet0/0/0/7', 'neighbor_address': '192.168.1.7'}}}}, 'multipath': True}, 'ipv6': {'enable': True, 'mroute': {'2001:10:10::10/128': {'path': {'2001:11:11::10 GigabitEthernet0/0/0/0 10': {'admin_distance': 10, 'interface_name': 'GigabitEthernet0/0/0/0', 'neighbor_address': '2001:11:11::10'}}}, '2001:10:10::11/128': {'path': {'2001:11:11::11 GigabitEthernet0/0/0/1 11': {'admin_distance': 11, 'interface_name': 'GigabitEthernet0/0/0/1', 'neighbor_address': '2001:11:11::11'}}}, '2001:10:10::12/128': {'path': {'2001:11:11::12 GigabitEthernet0/0/0/2 12': {'admin_distance': 12, 'interface_name': 'GigabitEthernet0/0/0/2', 'neighbor_address': '2001:11:11::12'}}}, '2001:10:10::13/128': {'path': {'2001:11:11::13 GigabitEthernet0/0/0/3 13': {'admin_distance': 13, 'interface_name': 'GigabitEthernet0/0/0/3', 'neighbor_address': '2001:11:11::13'}}}, '2001:10:10::14/128': {'path': {'2001:11:11::14 GigabitEthernet0/0/0/4 14': {'admin_distance': 14, 'interface_name': 'GigabitEthernet0/0/0/4', 'neighbor_address': '2001:11:11::14'}}}, '2001:10:10::15/128': {'path': {'2001:11:11::15 GigabitEthernet0/0/0/5 15': {'admin_distance': 15, 'interface_name': 'GigabitEthernet0/0/0/5', 'neighbor_address': '2001:11:11::15'}}}}, 'multipath': True}}}}} mcast_table = {'vrf': {'VRF1': {'address_family': {'ipv4': {'multicast_group': {'124.0.0.0/32': {'source_address': {'*': {'flags': 'P D', 'uptime': '00:01:38'}}}, '124.0.1.40': {'source_address': {'*': {'flags': 'S P', 'uptime': '00:00:46'}}}, '217.1.1.1': {'source_address': {'172.150.0.15': {'flags': 'RPF ME MH', 'incoming_interface_list': {'GigabitEthernet0/0/0/1': {'rpf_nbr': '192.168.0.12'}}, 'outgoing_interface_list': {'GigabitEthernet0/0/0/2': {'flags': 'F A', 'uptime': '00:01:24'}}, 'uptime': '00:00:54'}}}, '234.0.0.0/4': {'source_address': {'*': {'flags': 'MD RPF P', 'uptime': '00:01:28'}}}}}, 'ipv6': {'multicast_group': {'ff70::/12': {'source_address': {'*': {'flags': 'C RPF P', 'uptime': '00:04:45'}}}, 'ff70::/15': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}, 'ff72::/16': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}, 'ff80::/15': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}, 'ff82::/16': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}, 'ff90::/15': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}}}}}, 'default': {'address_family': {'ipv4': {'multicast_group': {'224.0.0.0/24': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:00:58'}}}, '224.0.0.0/4': {'source_address': {'*': {'flags': 'C RPF P', 'uptime': '00:00:58'}}}, '224.0.1.39': {'source_address': {'*': {'flags': 'S P', 'uptime': '00:00:58'}}}, '227.1.1.1': {'source_address': {'*': {'flags': 'C RPF MD MH CD', 'outgoing_interface_list': {'Loopback0': {'flags': 'F NS', 'uptime': '00:00:54'}}, 'uptime': '00:00:54'}, '192.168.0.12': {'flags': 'RPF ME MH', 'incoming_interface_list': {'Loopback0': {'rpf_nbr': '192.168.0.12'}}, 'outgoing_interface_list': {'Loopback0': {'flags': 'F A', 'uptime': '00:00:54'}}, 'uptime': '00:00:54'}}}, '232.0.0.0/8': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:00:58'}}}, '236.5.5.5': {'source_address': {'*': {'flags': 'C RPF MD MH CD', 'outgoing_interface_list': {'Loopback0': {'flags': 'F NS', 'uptime': '00:00:54'}}, 'uptime': '00:00:54'}, '192.168.0.12': {'flags': 'RPF ME MH', 'incoming_interface_list': {'Loopback0': {'rpf_nbr': '192.168.0.12'}}, 'outgoing_interface_list': {'Loopback0': {'flags': 'F A', 'uptime': '00:00:54'}}, 'uptime': '00:00:54'}, '192.168.0.22': {'flags': 'C RPF MD MH CD', 'outgoing_interface_list': {'GigabitEthernet0/1/0/1': {'flags': 'NS', 'uptime': '00:00:01'}, 'Loopback0': {'flags': 'F NS', 'uptime': '00:00:13'}}, 'uptime': '00:00:13'}}}}}, 'ipv6': {'multicast_group': {'ff00::/15': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}, 'ff00::/8': {'source_address': {'*': {'flags': 'L C RPF P', 'outgoing_interface_list': {'Decaps6tunnel0': {'flags': 'NS DI', 'uptime': '00:04:40'}}, 'uptime': '00:04:45'}}}, 'ff02::/16': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}, 'ff10::/15': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}, 'ff12::/16': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}, 'ff15::1:1': {'source_address': {'1::1:1:1:2': {'flags': 'L RPF MT', 'incoming_interface_list': {'GigabitEthernet150/0/0/6': {'rpf_nbr': '1::1:1:1:2'}}, 'outgoing_interface_list': {'mdtvpn1': {'flags': 'F NS MI MT MA', 'uptime': '00:02:53'}}, 'uptime': '00:02:53'}}}, 'ff15::2:1': {'source_address': {'4::4:4:4:5': {'flags': 'L RPF', 'incoming_interface_list': {'mdtvpn1': {'rpf_nbr': '::ffff:200.200.200.200'}}, 'outgoing_interface_list': {'GigabitEthernet150/0/0/6': {'flags': 'F NS', 'uptime': '00:03:59'}}, 'uptime': '00:03:59'}}}, 'ff20::/15': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}, 'ff22::/16': {'source_address': {'*': {'flags': 'D P', 'uptime': '00:04:45'}}}}}}}}}
def bad_apples(apples): res=[] remain=[] indexes=[] index=0 for i in apples: if i[0]==0 and i[1]==0: continue elif i[0]==0 or i[1]==0: remain.append(i[0]) if i[1]==0 else remain.append(i[1]) indexes.append(index) index+=1 res.append([i[0], i[1]]) for i in range(len(remain)//2): if res[indexes[i*2]][0]==0: res[indexes[i*2]][0]=remain[i*2] if remain[i*2]!=res[indexes[i*2]][1] else remain[i*2+1] res[indexes[i*2]][0], res[indexes[i*2]][1]=res[indexes[i*2]][1], res[indexes[i*2]][0] elif res[indexes[i*2]][1]==0: res[indexes[i*2]][1]=remain[i*2] if remain[i*2]!=res[indexes[i*2]][0] else remain[i*2+1] return [i for i in res if (i[0]!=0 and i[1]!=0)]
def bad_apples(apples): res = [] remain = [] indexes = [] index = 0 for i in apples: if i[0] == 0 and i[1] == 0: continue elif i[0] == 0 or i[1] == 0: remain.append(i[0]) if i[1] == 0 else remain.append(i[1]) indexes.append(index) index += 1 res.append([i[0], i[1]]) for i in range(len(remain) // 2): if res[indexes[i * 2]][0] == 0: res[indexes[i * 2]][0] = remain[i * 2] if remain[i * 2] != res[indexes[i * 2]][1] else remain[i * 2 + 1] (res[indexes[i * 2]][0], res[indexes[i * 2]][1]) = (res[indexes[i * 2]][1], res[indexes[i * 2]][0]) elif res[indexes[i * 2]][1] == 0: res[indexes[i * 2]][1] = remain[i * 2] if remain[i * 2] != res[indexes[i * 2]][0] else remain[i * 2 + 1] return [i for i in res if i[0] != 0 and i[1] != 0]