content
stringlengths
7
1.05M
fixed_cases
stringlengths
1
1.28M
# -*- encoding: utf-8 -*- # Copyright (c) 2020 Modist Team <admin@modist.io> # ISC License <https://choosealicense.com/licenses/isc> """Contains packaging information for the module.""" __name__ = "modist-client" __repo__ = "https://github.com/modist-io/modist-client" __version__ = "0.0.1" __description__ = "The local client for managing Modist mods" __author__ = "Modist Team" __contact__ = "admin@modist.io" __license__ = "ISC License"
"""Contains packaging information for the module.""" __name__ = 'modist-client' __repo__ = 'https://github.com/modist-io/modist-client' __version__ = '0.0.1' __description__ = 'The local client for managing Modist mods' __author__ = 'Modist Team' __contact__ = 'admin@modist.io' __license__ = 'ISC License'
def is_equivalent(str_a, str_b): if len(str_a) % 2 != 0 or len(str_b) % 2 != 0: if str_a == str_b: return True return False elif str_a == str_b: return True else: len_a = len(str_a) len_b = len(str_b) a1_i, a1_j = 0, (len_a // 2) a2_i, a2_j = (len_a // 2), (len_a) b1_i, b1_j = 0, (len_b // 2) b2_i, b2_j = (len_b // 2), (len_b) a1 = str_a[a1_i:a1_j] a2 = str_a[a2_i:a2_j] b1 = str_b[b1_i:b1_j] b2 = str_b[b2_i:b2_j] # print(a2_i) # print(a2_j) # print("a1: {} | a2: {}".format(a1, a2)) # print("b1: {} | b2: {}".format(b1, b2)) if is_equivalent(a1, b1) and is_equivalent(a2, b2): return True elif is_equivalent(a1, b2) and is_equivalent(a2, b1): return True return False str_a = [el for el in input()] str_b = [el for el in input()] if is_equivalent(str_a, str_b): print("YES") else: print("NO")
def is_equivalent(str_a, str_b): if len(str_a) % 2 != 0 or len(str_b) % 2 != 0: if str_a == str_b: return True return False elif str_a == str_b: return True else: len_a = len(str_a) len_b = len(str_b) (a1_i, a1_j) = (0, len_a // 2) (a2_i, a2_j) = (len_a // 2, len_a) (b1_i, b1_j) = (0, len_b // 2) (b2_i, b2_j) = (len_b // 2, len_b) a1 = str_a[a1_i:a1_j] a2 = str_a[a2_i:a2_j] b1 = str_b[b1_i:b1_j] b2 = str_b[b2_i:b2_j] if is_equivalent(a1, b1) and is_equivalent(a2, b2): return True elif is_equivalent(a1, b2) and is_equivalent(a2, b1): return True return False str_a = [el for el in input()] str_b = [el for el in input()] if is_equivalent(str_a, str_b): print('YES') else: print('NO')
def test_nogarbage_fixture(testdir): testdir.makepyfile(""" def test_fail(nogarbage): assert False def test_pass(nogarbage): pass def test_except(nogarbage): try: assert False except AssertionError: pass def test_circular(nogarbage): l1 = [] l2 = [l1] l1.append(l2) def test_collect(nogarbage): import gc gc.collect() """) result = testdir.runpytest( '-v' ) result.stdout.fnmatch_lines([ '*::test_fail FAIL*', '*::test_pass PASS*', '*::test_except PASS*', '*::test_circular ERROR*', '*::test_collect ERROR*', ]) assert result.ret != 0
def test_nogarbage_fixture(testdir): testdir.makepyfile('\n def test_fail(nogarbage):\n assert False\n\n def test_pass(nogarbage):\n pass\n\n def test_except(nogarbage):\n try:\n assert False\n except AssertionError:\n pass\n\n def test_circular(nogarbage):\n l1 = []\n l2 = [l1]\n l1.append(l2)\n\n def test_collect(nogarbage):\n import gc\n gc.collect()\n ') result = testdir.runpytest('-v') result.stdout.fnmatch_lines(['*::test_fail FAIL*', '*::test_pass PASS*', '*::test_except PASS*', '*::test_circular ERROR*', '*::test_collect ERROR*']) assert result.ret != 0
def main(): # Store the total sum of multiples of 3 and 5 sum_multiples = 0 # Loop through every number less than 1000 for num in range(1,1000): # Check if number is divisible by 3 or 5, i.e. a multiple if num % 3 == 0 or num % 5 == 0: sum_multiples += num print(sum_multiples) if __name__ == '__main__': main()
def main(): sum_multiples = 0 for num in range(1, 1000): if num % 3 == 0 or num % 5 == 0: sum_multiples += num print(sum_multiples) if __name__ == '__main__': main()
# 177 # 10 # print(divmod(177, 10)) user = int(input()) user2 = int(input()) a = divmod(user, user2) print(a[0]) print(a[1]) # for i in a: # print(''.join(a[i])) print(divmod(user,user2)) # print(divmod(user))
user = int(input()) user2 = int(input()) a = divmod(user, user2) print(a[0]) print(a[1]) print(divmod(user, user2))
''' Here we'll define exceptions to raise in the qtstyles package ''' class QtStylesError(Exception): """ Base-class for all exceptions raised by this module. """ class SheetPathTypeError(QtStylesError): ''' The style sheet path must be a string. ''' def __init__(self): super(SheetPathTypeError, self).__init__( "The style sheet path must be a string." ) class SheetPathValueError(QtStylesError): ''' The style sheet path end in '.qss'. ''' def __init__(self): super(SheetPathValueError, self).__init__( "The style sheet path must end in '.qss'." ) class SheetPathFileDoesntExist(QtStylesError): ''' The style sheet path must point to a file that exists. ''' def __init__(self): super(SheetPathFileDoesntExist, self).__init__( "No file exists at the path specified." ) class StyleDoesntExistError(QtStylesError): ''' The provided style sheet path must exist. ''' def __init__(self): super(StyleDoesntExistError, self).__init__( "The requested style does not exist." )
""" Here we'll define exceptions to raise in the qtstyles package """ class Qtstyleserror(Exception): """ Base-class for all exceptions raised by this module. """ class Sheetpathtypeerror(QtStylesError): """ The style sheet path must be a string. """ def __init__(self): super(SheetPathTypeError, self).__init__('The style sheet path must be a string.') class Sheetpathvalueerror(QtStylesError): """ The style sheet path end in '.qss'. """ def __init__(self): super(SheetPathValueError, self).__init__("The style sheet path must end in '.qss'.") class Sheetpathfiledoesntexist(QtStylesError): """ The style sheet path must point to a file that exists. """ def __init__(self): super(SheetPathFileDoesntExist, self).__init__('No file exists at the path specified.') class Styledoesntexisterror(QtStylesError): """ The provided style sheet path must exist. """ def __init__(self): super(StyleDoesntExistError, self).__init__('The requested style does not exist.')
{ "targets": [{ "target_name": "node_hge", "sources": [ "src/entry.cpp" ], "include_dirs": [ "src/hge181/include", "<!(node -e \"require('nan')\")" ], "libraries": [ "../src/hge181/lib/vc/hge.lib", "../src/hge181/lib/vc/hgehelp.lib" ], "libraries!": [ "libc.lib" ], "defines": [ "WIN32_LEAN_AND_MEAN" ], "VCLinkerTool": { "IgnoreSpecificDefaultLibraries": [ "libc.lib" ] }, "copies": [{ "destination": "<(module_root_dir)/build/Release/", "files": [ "<(module_root_dir)/src/hge181/hge.dll", "<(module_root_dir)/src/hge181/bass.dll" ] }] }] }
{'targets': [{'target_name': 'node_hge', 'sources': ['src/entry.cpp'], 'include_dirs': ['src/hge181/include', '<!(node -e "require(\'nan\')")'], 'libraries': ['../src/hge181/lib/vc/hge.lib', '../src/hge181/lib/vc/hgehelp.lib'], 'libraries!': ['libc.lib'], 'defines': ['WIN32_LEAN_AND_MEAN'], 'VCLinkerTool': {'IgnoreSpecificDefaultLibraries': ['libc.lib']}, 'copies': [{'destination': '<(module_root_dir)/build/Release/', 'files': ['<(module_root_dir)/src/hge181/hge.dll', '<(module_root_dir)/src/hge181/bass.dll']}]}]}
question_replaceable_special_characters = {',', "'", '"', ';', '?', ':', '-', '(', ')', '[', ']', '{', '}'} special_characters = ['*', '$'] punctuations = set() pickled_questions_dir = "bin/data/questions" pickle_files_extension = ".pickle" questions_per_segment = 100 debug_print_len = 25
question_replaceable_special_characters = {',', "'", '"', ';', '?', ':', '-', '(', ')', '[', ']', '{', '}'} special_characters = ['*', '$'] punctuations = set() pickled_questions_dir = 'bin/data/questions' pickle_files_extension = '.pickle' questions_per_segment = 100 debug_print_len = 25
"""This problem was asked by Google. Implement a key value store, where keys and values are integers, with the following methods: update(key, vl): updates the value at key to val, or sets it if doesn't exist get(key): returns the value with key, or None if no such value exists max_key(val): returns the largest key with value val, or None if no key with that value exists For example, if we ran the following calls: kv.update(1, 1) kv.update(2, 1) And then called kv.max_key(1), it should return 2, since it's the largest key with value 1. """
"""This problem was asked by Google. Implement a key value store, where keys and values are integers, with the following methods: update(key, vl): updates the value at key to val, or sets it if doesn't exist get(key): returns the value with key, or None if no such value exists max_key(val): returns the largest key with value val, or None if no key with that value exists For example, if we ran the following calls: kv.update(1, 1) kv.update(2, 1) And then called kv.max_key(1), it should return 2, since it's the largest key with value 1. """
class LoopiaError(Exception): _exceptions = {} code = None message = None def __init__(self, response=None): super(LoopiaError, self).__init__(self.message) self.response = response @classmethod def register(cls, exception): if exception.code in cls._exceptions: raise ValueError("'{}' already exists".format(exception.code)) cls._exceptions[exception.code] = exception return exception @classmethod def from_code(cls, code, response=None): if code not in cls._exceptions: code = None return cls._exceptions[code](response) @LoopiaError.register class UnknownError(LoopiaError): code = None message = "Unknown error" @LoopiaError.register class AuthError(LoopiaError): code = "AUTH_ERROR" message = u"Wrong username or password" @LoopiaError.register class DomainOccupiedError(LoopiaError): code = "DOMAIN_OCCUPIED" message = u"Domain is not available for registration" @LoopiaError.register class RateLimitedError(LoopiaError): code = "RATE_LIMITED" message = u"Maximum number of requests over time reached" @LoopiaError.register class BadIndataError(LoopiaError): code = "BAD_INDATA" message = u"Invalid parameters" @LoopiaError.register class InsufficientFundsError(LoopiaError): code = "INSUFFICIENT_FUNDS" message = u"Not enough funds to complete the task"
class Loopiaerror(Exception): _exceptions = {} code = None message = None def __init__(self, response=None): super(LoopiaError, self).__init__(self.message) self.response = response @classmethod def register(cls, exception): if exception.code in cls._exceptions: raise value_error("'{}' already exists".format(exception.code)) cls._exceptions[exception.code] = exception return exception @classmethod def from_code(cls, code, response=None): if code not in cls._exceptions: code = None return cls._exceptions[code](response) @LoopiaError.register class Unknownerror(LoopiaError): code = None message = 'Unknown error' @LoopiaError.register class Autherror(LoopiaError): code = 'AUTH_ERROR' message = u'Wrong username or password' @LoopiaError.register class Domainoccupiederror(LoopiaError): code = 'DOMAIN_OCCUPIED' message = u'Domain is not available for registration' @LoopiaError.register class Ratelimitederror(LoopiaError): code = 'RATE_LIMITED' message = u'Maximum number of requests over time reached' @LoopiaError.register class Badindataerror(LoopiaError): code = 'BAD_INDATA' message = u'Invalid parameters' @LoopiaError.register class Insufficientfundserror(LoopiaError): code = 'INSUFFICIENT_FUNDS' message = u'Not enough funds to complete the task'
def paperwork(n, m): if n < 0 or m < 0 : return 0 else: return n * m print(paperwork(5,0))
def paperwork(n, m): if n < 0 or m < 0: return 0 else: return n * m print(paperwork(5, 0))
def clocks(x, y, a, b, x2, y2): a = a - x b = b - y if b < 0: b = 60 + b a = a - 1 if a < 0: a = 24 + a a2 = x2 + a b2 = y2 + b if b2 >= 60: b2 = b2 - 60 a2 = a2 + 1 if a2 >= 24: a2 = a2 - 24 print(a2, b2) clocks(int(input()), int(input()), int(input()), int(input()), int(input()), int(input()))
def clocks(x, y, a, b, x2, y2): a = a - x b = b - y if b < 0: b = 60 + b a = a - 1 if a < 0: a = 24 + a a2 = x2 + a b2 = y2 + b if b2 >= 60: b2 = b2 - 60 a2 = a2 + 1 if a2 >= 24: a2 = a2 - 24 print(a2, b2) clocks(int(input()), int(input()), int(input()), int(input()), int(input()), int(input()))
#!/usr/bin/env python3 # # Copyright (c) 2015, Roberto Riggio # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the CREATE-NET nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY CREATE-NET ''AS IS'' AND ANY # EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL CREATE-NET BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Image Class.""" class Image(object): """Image object representing a VNF template. Attributes: nb_ports: Number of ports (Integer) vnf: The virtual network function as a click script (str) handlers: the list of handlers supported by the vnf state_handlers: the list of state handlers supported by the vnf """ def __init__(self, nb_ports, vnf, state_handlers, handlers): self.nb_ports = nb_ports self.vnf = vnf self.handlers = {} self.state_handlers = [] self.add_handlers(handlers) self.add_state_handlers(state_handlers) def add_handlers(self, handlers): """add vnf-specifc handlers.""" for handler in handlers: if not isinstance(handler, list): raise ValueError("list expected") if len(handler) != 2: raise ValueError("list of length 2 expected") self.handlers[handler[0]] = handler[1] def add_state_handlers(self, state_handlers): """Add state handlers.""" for state_handler in state_handlers: if state_handler not in self.handlers: raise KeyError("state handler %s not found" % state_handler) self.state_handlers.append(state_handler) def to_dict(self): """ Return a JSON-serializable dictionary representing the Poll """ return {'nb_ports': self.nb_ports, 'vnf': self.vnf, 'state_handlers': self.state_handlers, 'handlers': [(k, v) for k, v in self.handlers.items()]}
"""Image Class.""" class Image(object): """Image object representing a VNF template. Attributes: nb_ports: Number of ports (Integer) vnf: The virtual network function as a click script (str) handlers: the list of handlers supported by the vnf state_handlers: the list of state handlers supported by the vnf """ def __init__(self, nb_ports, vnf, state_handlers, handlers): self.nb_ports = nb_ports self.vnf = vnf self.handlers = {} self.state_handlers = [] self.add_handlers(handlers) self.add_state_handlers(state_handlers) def add_handlers(self, handlers): """add vnf-specifc handlers.""" for handler in handlers: if not isinstance(handler, list): raise value_error('list expected') if len(handler) != 2: raise value_error('list of length 2 expected') self.handlers[handler[0]] = handler[1] def add_state_handlers(self, state_handlers): """Add state handlers.""" for state_handler in state_handlers: if state_handler not in self.handlers: raise key_error('state handler %s not found' % state_handler) self.state_handlers.append(state_handler) def to_dict(self): """ Return a JSON-serializable dictionary representing the Poll """ return {'nb_ports': self.nb_ports, 'vnf': self.vnf, 'state_handlers': self.state_handlers, 'handlers': [(k, v) for (k, v) in self.handlers.items()]}
#_*_ coding: utf-8 -*- { 'name': "Project Management", 'summary': """ It allows to manage the projects to be carried out in a company and jobs.""", 'description': """ This module allows you to manage the projects of a company from departments, employees and projects. """, 'author': "Carlos Morales Aguilera", 'website': "http://www.example.com", 'category': 'Personal project', 'version':'0.1', 'application': True, 'depends': ['base'], 'data': [ 'views/project.xml', 'views/project_ext.xml', 'views/department.xml', 'views/employee.xml', ], 'installable': True, 'auto_install': True, }
{'name': 'Project Management', 'summary': '\n\t\tIt allows to manage the projects to be carried out in a company and jobs.', 'description': '\n\t\tThis module allows you to manage the projects of a company from departments, employees and projects.\n\t', 'author': 'Carlos Morales Aguilera', 'website': 'http://www.example.com', 'category': 'Personal project', 'version': '0.1', 'application': True, 'depends': ['base'], 'data': ['views/project.xml', 'views/project_ext.xml', 'views/department.xml', 'views/employee.xml'], 'installable': True, 'auto_install': True}
n=6 a,b=0,0 arr=[1,2,4,4,5,6] for i in range(int(n-1)): if arr[i-1]>=arr[i]<=arr[i+1]: a=a+1 if arr[i-1]<=arr[i]>=arr[i+1]: b=b+1 print(b if a>b else a) def howMany(sentence): i = 0 ans = 0 n = len(sentence) while (i < n): c = 0 c2 = 0 c3 = 0 while (i < n and sentence[i] != ' '): if ((sentence[i] >= 'a' and sentence[i] <= 'z') or (sentence[i] >= 'A' and sentence[i] <= 'Z') or sentence[i] == '-'): c += 1 elif (sentence[i] and (sentence[i] == ',' or sentence[i] == '.' or sentence[i] == '?' or sentence[i] == '!')): c3 += 1 c2 += 1 i += 1 if (c + c3 == c2 and c > 0): ans += 1 while (i < n and sentence[i] == ' '): i += 1 return ans
n = 6 (a, b) = (0, 0) arr = [1, 2, 4, 4, 5, 6] for i in range(int(n - 1)): if arr[i - 1] >= arr[i] <= arr[i + 1]: a = a + 1 if arr[i - 1] <= arr[i] >= arr[i + 1]: b = b + 1 print(b if a > b else a) def how_many(sentence): i = 0 ans = 0 n = len(sentence) while i < n: c = 0 c2 = 0 c3 = 0 while i < n and sentence[i] != ' ': if sentence[i] >= 'a' and sentence[i] <= 'z' or (sentence[i] >= 'A' and sentence[i] <= 'Z') or sentence[i] == '-': c += 1 elif sentence[i] and (sentence[i] == ',' or sentence[i] == '.' or sentence[i] == '?' or (sentence[i] == '!')): c3 += 1 c2 += 1 i += 1 if c + c3 == c2 and c > 0: ans += 1 while i < n and sentence[i] == ' ': i += 1 return ans
def currency(x, pos): """The two args are the value and tick position""" if x >= 1e6: s = '${:1.1f}M'.format(x*1e-6) else: s = '${:1.0f}K'.format(x*1e-3) return s
def currency(x, pos): """The two args are the value and tick position""" if x >= 1000000.0: s = '${:1.1f}M'.format(x * 1e-06) else: s = '${:1.0f}K'.format(x * 0.001) return s
# Parameters for compute_reference.py # mpmath maximum precision when computing hypergeometric function values. MAXPREC = 100000 # Range of a and b. PTS should be an odd number, since # a = 0 and b = 0 are included in addition to positive and negative values. UPPER = 2.3 PTS = 401 # Range of the logarithm of z values. LOWER_Z = -2 UPPER_Z = 3 PTS_Z = 31
maxprec = 100000 upper = 2.3 pts = 401 lower_z = -2 upper_z = 3 pts_z = 31
''' 09 - Dictionary of lists Some more data just came in! This time, you'll use the dictionary of lists method, parsing the data column by column. |date | small_sold | large_sold | |-------------+---------------+------------| |"2019-11-17" | 10859987 | 7674135 | |"2019-12-01" | 9291631 | 6238096 | Instructions: - Create a dictionary of lists with the new data called avocados_dict. - Convert the dictionary to a DataFrame called avocados_2019. - Print your new DataFrame. ''' # Create a dictionary of lists with new data avocados_dict = { "date": ["2019-11-17", "2019-12-01"], "small_sold": [10859987, 9291631], "large_sold": [7674135, 6238096] } # Convert dictionary into DataFrame avocados_2019 = pd.DataFrame(avocados_dict) # Print the new DataFrame print(avocados_2019)
""" 09 - Dictionary of lists Some more data just came in! This time, you'll use the dictionary of lists method, parsing the data column by column. |date | small_sold | large_sold | |-------------+---------------+------------| |"2019-11-17" | 10859987 | 7674135 | |"2019-12-01" | 9291631 | 6238096 | Instructions: - Create a dictionary of lists with the new data called avocados_dict. - Convert the dictionary to a DataFrame called avocados_2019. - Print your new DataFrame. """ avocados_dict = {'date': ['2019-11-17', '2019-12-01'], 'small_sold': [10859987, 9291631], 'large_sold': [7674135, 6238096]} avocados_2019 = pd.DataFrame(avocados_dict) print(avocados_2019)
# -*- coding: utf-8 -*- """rackio/exception.py This module defines all exceptions handle by Rackio. """ class RackioError(Exception): """Base class for other exceptions""" pass class InvalidTagNameError(RackioError): """Raised when an invalid tag name is defined""" pass class TagNotFoundError(RackioError): """Raised when a Tag Name was not found in repository""" pass class InvalidTagTypeError(RackioError): """Raised when a Tag is assigned a different type object""" pass class WorkerError(RackioError): """Raised when an error occurs in a Continous Worker""" pass
"""rackio/exception.py This module defines all exceptions handle by Rackio. """ class Rackioerror(Exception): """Base class for other exceptions""" pass class Invalidtagnameerror(RackioError): """Raised when an invalid tag name is defined""" pass class Tagnotfounderror(RackioError): """Raised when a Tag Name was not found in repository""" pass class Invalidtagtypeerror(RackioError): """Raised when a Tag is assigned a different type object""" pass class Workererror(RackioError): """Raised when an error occurs in a Continous Worker""" pass
n = int(input()) friends = list(input().split()) sum = 0 for i in friends: sum += int(i) ways = 0 for i in range(1,6): if (sum+i)%(n+1) != 1: ways += 1 print(ways)
n = int(input()) friends = list(input().split()) sum = 0 for i in friends: sum += int(i) ways = 0 for i in range(1, 6): if (sum + i) % (n + 1) != 1: ways += 1 print(ways)
# Rotate Array class Solution: def rotate(self, nums, k): """ Do not return anything, modify nums in-place instead. """ length = len(nums) k = k % length if k == 0: return front = nums[length - k:] index = length - k - 1 while index >= 0: nums[index + k] = nums[index] index -= 1 for index, val in enumerate(front): nums[index] = val if __name__ == "__main__": sol = Solution() nums = [1,2,3,4,5,6,7] k = 3 nums = [-1,-100,3,99] k = 2 nums = [1, 2] k = 2 sol.rotate(nums, k) print(nums)
class Solution: def rotate(self, nums, k): """ Do not return anything, modify nums in-place instead. """ length = len(nums) k = k % length if k == 0: return front = nums[length - k:] index = length - k - 1 while index >= 0: nums[index + k] = nums[index] index -= 1 for (index, val) in enumerate(front): nums[index] = val if __name__ == '__main__': sol = solution() nums = [1, 2, 3, 4, 5, 6, 7] k = 3 nums = [-1, -100, 3, 99] k = 2 nums = [1, 2] k = 2 sol.rotate(nums, k) print(nums)
""" Main TACA module """ __version__ = '0.9.3'
""" Main TACA module """ __version__ = '0.9.3'
tanya_list = [ 'kenapa', 'bila', 'siapa', 'mengapa', 'apa', 'bagaimana', 'berapa', 'mana'] perintah_list = [ 'jangan', 'sila', 'tolong', 'harap', 'usah', 'jemput', 'minta'] pangkal_list = [ 'maka', 'alkisah', 'arakian', 'syahdah', 'adapun', 'bermula', 'kalakian'] bantu_list = [ 'akan', 'telah', 'boleh', 'mesti', 'belum', 'sudah', 'dapat', 'masih', 'harus', 'hendak'] penguat_list = [ 'paling', 'agak', 'sungguh', 'amat', 'terlalu', 'nian', 'benar', 'paling'] penegas_list = ['jua', 'juga', 'sahaja', 'hanya', 'memang', 'lagi', 'pun'] nafi_list = ['bukan', 'tidak', 'tak', 'tiada', 'tidaklah', 'tidakkah'] pemeri_list = ['ialah', 'adalah'] sendi_list = ['akan', 'kepada', 'terhadap', 'bagi', 'untuk', 'dari', 'daripada', 'di', 'dengan', 'hingga', 'sampai', 'ke', 'kepada', 'oleh', 'pada', 'sejak', 'seperti', 'umpama', 'bak', 'tentang', 'laksanabagai', 'semenjak', 'dalam', 'antara'] pembenar_list = ['ya', 'benar', 'betul'] nombor_list = [ 'satu', 'dua', 'tiga', 'empat', 'lima', 'enam', 'tujuh', 'lapan', 'sembilan', 'kosong'] suku_bilangan_list = ['per', 'suku', 'setengah', 'separuh', 'tiga suku'] pisahan_list = ['setiap', 'tiap'] keterangan_list = ['begitu', 'begini', 'demikian', 'perlahan', 'cepat', 'lena', 'akan', 'sedang', 'belum', 'telah', 'sekarang', 'sebentar', 'semalam', 'mungkin', 'agak', 'barangkali', 'pasti', 'tentu', 'sudah', 'selalu', 'kadang', 'acapkali', 'sesekali', 'yang'] arah_list = [ 'atas', 'bawah', 'tepi', 'antara', 'hadapan', 'utara', 'sisi', 'luar'] hubung_list = ['agar', 'apabila', 'atau', 'bahawa', 'dan', 'hingga', 'jika', 'jikalau', 'kecuali', 'kerana', 'lalu', 'manakala', 'sambil', 'serta', 'semenjak', 'sementara', 'sungguhpun', 'supaya', 'walaupun', 'tetapi', 'berkenan', 'berkenaan'] gantinama_list = ['aku', 'saya', 'hamba', 'patik', 'beta', 'kami', 'kita', 'anda', 'awak', 'engkau', 'tuanku', 'kalian', 'kamu', 'baginda', 'beliau', 'mereka', 'ini', 'itu', 'sini', 'situ', 'sana', 'kini', 'dia'] # pos permulaan[:-4] permulaan = [ 'bel', 'be', 'se', 'ter', 'men', 'memper', 'di', 'pe', 'me', 'ke', 'ber', 'pen', 'per'] # pos hujung [:1] hujung = ['kan', 'kah', 'lah', 'tah', 'nya', 'an', 'wan', 'wati', 'ita'] alphabet = 'qwertyuiopasdfghjklzxcvbnm' tatabahasa_dict = {'KT': tanya_list, 'KP': perintah_list, 'KPA': pangkal_list, 'KB': bantu_list, 'KPENGUAT': penguat_list, 'KPENEGAS': penegas_list, 'NAFI': nafi_list, 'KPEMERI': pemeri_list, 'KS': sendi_list, 'KPEMBENAR': pembenar_list, 'NO': nombor_list, 'SUKU': suku_bilangan_list, 'PISAHAN': pisahan_list, 'KETERANGAN': keterangan_list, 'ARAH': arah_list, 'KH': hubung_list, 'GN': gantinama_list}
tanya_list = ['kenapa', 'bila', 'siapa', 'mengapa', 'apa', 'bagaimana', 'berapa', 'mana'] perintah_list = ['jangan', 'sila', 'tolong', 'harap', 'usah', 'jemput', 'minta'] pangkal_list = ['maka', 'alkisah', 'arakian', 'syahdah', 'adapun', 'bermula', 'kalakian'] bantu_list = ['akan', 'telah', 'boleh', 'mesti', 'belum', 'sudah', 'dapat', 'masih', 'harus', 'hendak'] penguat_list = ['paling', 'agak', 'sungguh', 'amat', 'terlalu', 'nian', 'benar', 'paling'] penegas_list = ['jua', 'juga', 'sahaja', 'hanya', 'memang', 'lagi', 'pun'] nafi_list = ['bukan', 'tidak', 'tak', 'tiada', 'tidaklah', 'tidakkah'] pemeri_list = ['ialah', 'adalah'] sendi_list = ['akan', 'kepada', 'terhadap', 'bagi', 'untuk', 'dari', 'daripada', 'di', 'dengan', 'hingga', 'sampai', 'ke', 'kepada', 'oleh', 'pada', 'sejak', 'seperti', 'umpama', 'bak', 'tentang', 'laksanabagai', 'semenjak', 'dalam', 'antara'] pembenar_list = ['ya', 'benar', 'betul'] nombor_list = ['satu', 'dua', 'tiga', 'empat', 'lima', 'enam', 'tujuh', 'lapan', 'sembilan', 'kosong'] suku_bilangan_list = ['per', 'suku', 'setengah', 'separuh', 'tiga suku'] pisahan_list = ['setiap', 'tiap'] keterangan_list = ['begitu', 'begini', 'demikian', 'perlahan', 'cepat', 'lena', 'akan', 'sedang', 'belum', 'telah', 'sekarang', 'sebentar', 'semalam', 'mungkin', 'agak', 'barangkali', 'pasti', 'tentu', 'sudah', 'selalu', 'kadang', 'acapkali', 'sesekali', 'yang'] arah_list = ['atas', 'bawah', 'tepi', 'antara', 'hadapan', 'utara', 'sisi', 'luar'] hubung_list = ['agar', 'apabila', 'atau', 'bahawa', 'dan', 'hingga', 'jika', 'jikalau', 'kecuali', 'kerana', 'lalu', 'manakala', 'sambil', 'serta', 'semenjak', 'sementara', 'sungguhpun', 'supaya', 'walaupun', 'tetapi', 'berkenan', 'berkenaan'] gantinama_list = ['aku', 'saya', 'hamba', 'patik', 'beta', 'kami', 'kita', 'anda', 'awak', 'engkau', 'tuanku', 'kalian', 'kamu', 'baginda', 'beliau', 'mereka', 'ini', 'itu', 'sini', 'situ', 'sana', 'kini', 'dia'] permulaan = ['bel', 'be', 'se', 'ter', 'men', 'memper', 'di', 'pe', 'me', 'ke', 'ber', 'pen', 'per'] hujung = ['kan', 'kah', 'lah', 'tah', 'nya', 'an', 'wan', 'wati', 'ita'] alphabet = 'qwertyuiopasdfghjklzxcvbnm' tatabahasa_dict = {'KT': tanya_list, 'KP': perintah_list, 'KPA': pangkal_list, 'KB': bantu_list, 'KPENGUAT': penguat_list, 'KPENEGAS': penegas_list, 'NAFI': nafi_list, 'KPEMERI': pemeri_list, 'KS': sendi_list, 'KPEMBENAR': pembenar_list, 'NO': nombor_list, 'SUKU': suku_bilangan_list, 'PISAHAN': pisahan_list, 'KETERANGAN': keterangan_list, 'ARAH': arah_list, 'KH': hubung_list, 'GN': gantinama_list}
# International morse code (sample) Morse = { # Letters "a": ".-", "b": "-...", "c": "-.-.", "d": "-..", "e": ".", "f": "..-.", "g": "--.", "h": "....", "i": "..", "j": ".---", "k": "-.-", "l": ".-..", "m": "--", "n": "-.", "o": "---", "p": ".--.", "q": "--.-", "r": ".-.", "s": "...", "t": "-", "u": "..-", "v": "...-", "w": ".--", "x": "-..-", "y": "-.--", "z": "--..", # Numbers "0": "-----", "1": ".----", "2": "..---", "3": "...--", "4": "....-", "5": ".....", "6": "-....", "7": "--...", "8": "---..", "9": "----.", # Punctuation "&": ".-...", "'": ".----.", "@": ".--.-.", ")": "-.--.-", "(": "-.--.", ":": "---...", ",": "--..--", "=": "-...-", "!": "-.-.--", ".": ".-.-.-", "-": "-....-", "+": ".-.-.", '"': ".-..-.", "?": "..--..", "/": "-..-.", } def translate_text(text): """ Translates text to morse code Accepts: text(str): String to translate Returns: str: A translated string of morse code """ if text == "": return "Please provide a valid text" morse_code = "" words = text.split(" ") for word in words: w = [] for char in word: if char.lower() in Morse: w.append(Morse[char.lower()]) morse_code += " ".join(w) morse_code += " " return morse_code.rstrip() def translate_morse(morse_code): """ Translates morse code to english. Accepts: morse (str): A string of morse code to translate Returns: str: A translated string of text """ if morse_code == "": return "Please provide a valid morse code." text = "" words = morse_code.split(" ") for morse_word in words: chars = morse_word.split(" ") for char in chars: for k, v in Morse.items(): if char == v: text += k text += " " return text.rstrip() if __name__ == "__main__": text = "This string has been translated to morse code." # Translate text to morse code morse = translate_text(text) # Translate morse code to text translated_text = translate_morse(morse) print(morse) print(translated_text)
morse = {'a': '.-', 'b': '-...', 'c': '-.-.', 'd': '-..', 'e': '.', 'f': '..-.', 'g': '--.', 'h': '....', 'i': '..', 'j': '.---', 'k': '-.-', 'l': '.-..', 'm': '--', 'n': '-.', 'o': '---', 'p': '.--.', 'q': '--.-', 'r': '.-.', 's': '...', 't': '-', 'u': '..-', 'v': '...-', 'w': '.--', 'x': '-..-', 'y': '-.--', 'z': '--..', '0': '-----', '1': '.----', '2': '..---', '3': '...--', '4': '....-', '5': '.....', '6': '-....', '7': '--...', '8': '---..', '9': '----.', '&': '.-...', "'": '.----.', '@': '.--.-.', ')': '-.--.-', '(': '-.--.', ':': '---...', ',': '--..--', '=': '-...-', '!': '-.-.--', '.': '.-.-.-', '-': '-....-', '+': '.-.-.', '"': '.-..-.', '?': '..--..', '/': '-..-.'} def translate_text(text): """ Translates text to morse code Accepts: text(str): String to translate Returns: str: A translated string of morse code """ if text == '': return 'Please provide a valid text' morse_code = '' words = text.split(' ') for word in words: w = [] for char in word: if char.lower() in Morse: w.append(Morse[char.lower()]) morse_code += ' '.join(w) morse_code += ' ' return morse_code.rstrip() def translate_morse(morse_code): """ Translates morse code to english. Accepts: morse (str): A string of morse code to translate Returns: str: A translated string of text """ if morse_code == '': return 'Please provide a valid morse code.' text = '' words = morse_code.split(' ') for morse_word in words: chars = morse_word.split(' ') for char in chars: for (k, v) in Morse.items(): if char == v: text += k text += ' ' return text.rstrip() if __name__ == '__main__': text = 'This string has been translated to morse code.' morse = translate_text(text) translated_text = translate_morse(morse) print(morse) print(translated_text)
N = int(input()) result = 0 for i in range(1, N + 1): if i % 3 == 0 or i % 5 == 0: continue result += i print(result)
n = int(input()) result = 0 for i in range(1, N + 1): if i % 3 == 0 or i % 5 == 0: continue result += i print(result)
def findDecision(obj): #obj[0]: Coupon, obj[1]: Education # {"feature": "Education", "instances": 127, "metric_value": 0.987, "depth": 1} if obj[1]<=2: # {"feature": "Coupon", "instances": 91, "metric_value": 0.9355, "depth": 2} if obj[0]>1: return 'True' elif obj[0]<=1: return 'True' else: return 'True' elif obj[1]>2: # {"feature": "Coupon", "instances": 36, "metric_value": 0.9436, "depth": 2} if obj[0]<=3: return 'False' elif obj[0]>3: return 'False' else: return 'False' else: return 'False'
def find_decision(obj): if obj[1] <= 2: if obj[0] > 1: return 'True' elif obj[0] <= 1: return 'True' else: return 'True' elif obj[1] > 2: if obj[0] <= 3: return 'False' elif obj[0] > 3: return 'False' else: return 'False' else: return 'False'
class Solution: def minimumTotal(self, triangle: List[List[int]]) -> int: n = len(triangle) if n == 1: return min(triangle[0]) row_curr = triangle[n-1] for row in range(n-2, -1, -1): row_up = triangle[row] for ind in range(len(row_up)): row_up[ind] = min(row_curr[ind] + triangle[row][ind], row_curr[ind+1] + triangle[row][ind]) row_curr = row_up return row_up[0]
class Solution: def minimum_total(self, triangle: List[List[int]]) -> int: n = len(triangle) if n == 1: return min(triangle[0]) row_curr = triangle[n - 1] for row in range(n - 2, -1, -1): row_up = triangle[row] for ind in range(len(row_up)): row_up[ind] = min(row_curr[ind] + triangle[row][ind], row_curr[ind + 1] + triangle[row][ind]) row_curr = row_up return row_up[0]
extensions = dict( required_params=['training_frame', 'x'], validate_required_params="", set_required_params=""" parms$training_frame <- training_frame if(!missing(x)){ parms$ignored_columns <- .verify_datacols(training_frame, x)$cols_ignore if(!missing(fold_column)){ parms$ignored_columns <- setdiff(parms$ignored_columns, fold_column) } } """, set_params=""" # Check if user_points is an acceptable set of user-specified starting points if( is.data.frame(user_points) || is.matrix(user_points) || is.list(user_points) || is.H2OFrame(user_points) ) { if ( length(init) > 1 || init == 'User') { parms[["init"]] <- "User" } else { warning(paste0("Parameter init must equal 'User' when user_points is set. Ignoring init = '", init, "'. Setting init = 'User'.")) } parms[["init"]] <- "User" # Convert user-specified starting points to H2OFrame if( is.data.frame(user_points) || is.matrix(user_points) || is.list(user_points) ) { if( !is.data.frame(user_points) && !is.matrix(user_points) ) user_points <- t(as.data.frame(user_points)) user_points <- as.h2o(user_points) } parms[["user_points"]] <- user_points # Set k if( !(missing(k)) && k!=as.integer(nrow(user_points)) ) { warning("Parameter k is not equal to the number of user-specified starting points. Ignoring k. Using specified starting points.") } parms[["k"]] <- as.numeric(nrow(user_points)) } else if ( is.character(init) ) { # Furthest, Random, PlusPlus{ parms[["user_points"]] <- NULL } else{ stop ("argument init must be set to Furthest, Random, PlusPlus, or a valid set of user-defined starting points.") } """, ) doc = dict( preamble=""" Performs k-means clustering on an H2O dataset """, params=dict( x="""A vector containing the \code{character} names of the predictors in the model.""" ), returns=""" an object of class \linkS4class{H2OClusteringModel}. """, seealso=""" \code{\link{h2o.cluster_sizes}}, \code{\link{h2o.totss}}, \code{\link{h2o.num_iterations}}, \code{\link{h2o.betweenss}}, \code{\link{h2o.tot_withinss}}, \code{\link{h2o.withinss}}, \code{\link{h2o.centersSTD}}, \code{\link{h2o.centers}} """, examples=""" library(h2o) h2o.init() prostate_path <- system.file("extdata", "prostate.csv", package = "h2o") prostate <- h2o.uploadFile(path = prostate_path) h2o.kmeans(training_frame = prostate, k = 10, x = c("AGE", "RACE", "VOL", "GLEASON")) """ )
extensions = dict(required_params=['training_frame', 'x'], validate_required_params='', set_required_params='\nparms$training_frame <- training_frame\nif(!missing(x)){\n parms$ignored_columns <- .verify_datacols(training_frame, x)$cols_ignore\n if(!missing(fold_column)){\n parms$ignored_columns <- setdiff(parms$ignored_columns, fold_column)\n }\n}\n', set_params='\n# Check if user_points is an acceptable set of user-specified starting points\nif( is.data.frame(user_points) || is.matrix(user_points) || is.list(user_points) || is.H2OFrame(user_points) ) {\n if ( length(init) > 1 || init == \'User\') {\n parms[["init"]] <- "User"\n } else {\n warning(paste0("Parameter init must equal \'User\' when user_points is set. Ignoring init = \'", init, "\'. Setting init = \'User\'."))\n }\n parms[["init"]] <- "User"\n \n # Convert user-specified starting points to H2OFrame\n if( is.data.frame(user_points) || is.matrix(user_points) || is.list(user_points) ) {\n if( !is.data.frame(user_points) && !is.matrix(user_points) ) user_points <- t(as.data.frame(user_points))\n user_points <- as.h2o(user_points)\n }\n parms[["user_points"]] <- user_points\n \n # Set k\n if( !(missing(k)) && k!=as.integer(nrow(user_points)) ) {\n warning("Parameter k is not equal to the number of user-specified starting points. Ignoring k. Using specified starting points.")\n }\n parms[["k"]] <- as.numeric(nrow(user_points))\n} else if ( is.character(init) ) { # Furthest, Random, PlusPlus{\n parms[["user_points"]] <- NULL\n} else{\n stop ("argument init must be set to Furthest, Random, PlusPlus, or a valid set of user-defined starting points.")\n}\n') doc = dict(preamble='\nPerforms k-means clustering on an H2O dataset\n', params=dict(x='A vector containing the \\code{character} names of the predictors in the model.'), returns='\nan object of class \\linkS4class{H2OClusteringModel}.\n', seealso='\n\\code{\\link{h2o.cluster_sizes}}, \\code{\\link{h2o.totss}}, \\code{\\link{h2o.num_iterations}}, \\code{\\link{h2o.betweenss}}, \\code{\\link{h2o.tot_withinss}}, \\code{\\link{h2o.withinss}}, \\code{\\link{h2o.centersSTD}}, \\code{\\link{h2o.centers}}\n', examples='\nlibrary(h2o)\nh2o.init()\nprostate_path <- system.file("extdata", "prostate.csv", package = "h2o")\nprostate <- h2o.uploadFile(path = prostate_path)\nh2o.kmeans(training_frame = prostate, k = 10, x = c("AGE", "RACE", "VOL", "GLEASON"))\n')
#Accessing specific elemnts from a dictionary Breakfast={ "Name":"dosa", "cost": 45, "Proteins": 4, "Fat":2 } #Finding the cost of Breakfast p=Breakfast.get("cost") print(p)
breakfast = {'Name': 'dosa', 'cost': 45, 'Proteins': 4, 'Fat': 2} p = Breakfast.get('cost') print(p)
NPKT = 100000 # def getselfaddr(): # return socket.getaddrinfo(None, PORT, socket.AF_INET6, socket.SOCK_DGRAM,socket.IPPROTO_IP)[0]
npkt = 100000
""" Configuration of flask application. Everything that could be different between running on your development platform or on ix.cs.uoregon.edu (or on a different deployment target) shoudl be here. """ DEBUG = True # Cookie key was obtained by: # import uuid # str(uuid.uuid4()) # We do it just once so that multiple processes # will share the same key. COOKIE_KEY = '48436e9a-ca70-451d-8e28-010c7787de40' PORT=5000 DICT="data/dict.txt"
""" Configuration of flask application. Everything that could be different between running on your development platform or on ix.cs.uoregon.edu (or on a different deployment target) shoudl be here. """ debug = True cookie_key = '48436e9a-ca70-451d-8e28-010c7787de40' port = 5000 dict = 'data/dict.txt'
deliver_states = { 'DEFAULT': ['1', 'PENDING_ORDERS', 'ACCEPT_PENDING_JOLLOF', 'ACCEPT_PENDING_DELICACY', 'TO_PICKUP', 'PICKED_UP_JOLLOF', 'PICKED_UP_DELICACY', 'TO_DROPOFF', 'DROPPED_OFF_JOLLOF', 'DROPPED_OFF_DELICACY'], 'CANCELLED': ['DEFAULT'], 'FLASH_LOCATION': ['FLASH_LOCATION', 'CANCELLED'], 'REQUEST_PHONE': ['REQUEST_PHONE', 'CANCENCELLED'], 'PENDING_ORDERS': ['PENDING_ORDERS', 'ACCEPT_PENDING_JOLLOF', 'ACCEPT_PENDING_DELICACY', 'CANCELLED'], 'ACCEPT_PENDING_JOLLOF': ['ACCEPT_PENDING_JOLLOF', 'CANCELLED'], 'ACCEPT_PENDING_DELICACY': ['ACCEPT_PENDING_DELICACY', 'CANCELLED'], 'TO_PICKUP': ['TO_PICKUP', 'PICKED_UP_JOLLOF', 'PICKED_UP_DELICACY', 'CANCELLED'], 'PICKED_UP_JOLLOF': ['PICKED_UP_JOLLOF', 'CANCELLED'], 'PICKED_UP_DELICACY': ['PICKED_UP_DELICACY', 'CANCELLED'], 'TO_DROPOFF': ['TO_DROPOFF', 'DROPPED_OFF_JOLLOF', 'DROPPED_OFF_DELICACY', 'CANCELLED'], 'DROPPED_OFF_JOLLOF': ['DROPPED_OFF_JOLLOF', 'CANCELLED'], 'DROPPED_OFF_DELICACY': ['DROPPED_OFF_DELICACY', 'CANCELLED'], } def is_deliver_next_state(old_state, new_state): ''' Returns boolean if new_state is a next state for old_state ''' try: state = deliver_states[old_state] if new_state in state: return True else: return False except KeyError: return False
deliver_states = {'DEFAULT': ['1', 'PENDING_ORDERS', 'ACCEPT_PENDING_JOLLOF', 'ACCEPT_PENDING_DELICACY', 'TO_PICKUP', 'PICKED_UP_JOLLOF', 'PICKED_UP_DELICACY', 'TO_DROPOFF', 'DROPPED_OFF_JOLLOF', 'DROPPED_OFF_DELICACY'], 'CANCELLED': ['DEFAULT'], 'FLASH_LOCATION': ['FLASH_LOCATION', 'CANCELLED'], 'REQUEST_PHONE': ['REQUEST_PHONE', 'CANCENCELLED'], 'PENDING_ORDERS': ['PENDING_ORDERS', 'ACCEPT_PENDING_JOLLOF', 'ACCEPT_PENDING_DELICACY', 'CANCELLED'], 'ACCEPT_PENDING_JOLLOF': ['ACCEPT_PENDING_JOLLOF', 'CANCELLED'], 'ACCEPT_PENDING_DELICACY': ['ACCEPT_PENDING_DELICACY', 'CANCELLED'], 'TO_PICKUP': ['TO_PICKUP', 'PICKED_UP_JOLLOF', 'PICKED_UP_DELICACY', 'CANCELLED'], 'PICKED_UP_JOLLOF': ['PICKED_UP_JOLLOF', 'CANCELLED'], 'PICKED_UP_DELICACY': ['PICKED_UP_DELICACY', 'CANCELLED'], 'TO_DROPOFF': ['TO_DROPOFF', 'DROPPED_OFF_JOLLOF', 'DROPPED_OFF_DELICACY', 'CANCELLED'], 'DROPPED_OFF_JOLLOF': ['DROPPED_OFF_JOLLOF', 'CANCELLED'], 'DROPPED_OFF_DELICACY': ['DROPPED_OFF_DELICACY', 'CANCELLED']} def is_deliver_next_state(old_state, new_state): """ Returns boolean if new_state is a next state for old_state """ try: state = deliver_states[old_state] if new_state in state: return True else: return False except KeyError: return False
class Synonym: def __init__(self, taxon_id, name_id, id='', name_phrase='', according_to_id='', status='synonym', reference_id='', page_reference_id='', link='', remarks='', needs_review=''): self.id = id self.taxon_id = taxon_id self.name_id = name_id self.name_phrase = name_phrase self.according_to_id = according_to_id self.status = status self.reference_id = reference_id self.page_reference_id = page_reference_id self.link = link self.remarks = remarks self.needs_review = needs_review def __str__(self): return str(self.id) + '\t' + \ str(self.taxon_id) + '\t' + \ str(self.name_id) + '\t' + \ self.name_phrase + '\t' + \ str(self.according_to_id) + '\t' + \ self.status + '\t' + \ str(self.reference_id) + '\t' + \ str(self.page_reference_id) + '\t' + \ self.link + '\t' + \ self.remarks + '\n' # def __repr__(self): # return { # 'id': self.id, # 'taxon_id': self.taxon_id, # 'name_id': self.name_id, # 'name_phrase': self.name_phrase, # 'according_to_id': self.according_to_id, # 'status': self.status, # 'reference_id': self.reference_id, # 'page_reference_id': self.page_reference_id, # 'link': self.link, # 'remarks': self.remarks # } class Synonyms: def __init__(self, output_tsv): self.synonyms = [] self.output_tsv = output_tsv def append(self, synonym): if isinstance(synonym, Synonym): self.synonyms.append(synonym) else: print('Error: synonym must be Synonym type') def write_output(self, output_tsv=''): if output_tsv == '' and self.output_tsv != '': output_tsv = self.output_tsv file = open(output_tsv, 'w') if len(self.synonyms) > 0: header = '\t'.join(self.synonyms[0].__dict__.keys()) + '\n' file.write(header) for synonym in self.synonyms: row = '\t'.join(str(v) for v in synonym.__dict__.values()) + '\n' file.write(row) file.close()
class Synonym: def __init__(self, taxon_id, name_id, id='', name_phrase='', according_to_id='', status='synonym', reference_id='', page_reference_id='', link='', remarks='', needs_review=''): self.id = id self.taxon_id = taxon_id self.name_id = name_id self.name_phrase = name_phrase self.according_to_id = according_to_id self.status = status self.reference_id = reference_id self.page_reference_id = page_reference_id self.link = link self.remarks = remarks self.needs_review = needs_review def __str__(self): return str(self.id) + '\t' + str(self.taxon_id) + '\t' + str(self.name_id) + '\t' + self.name_phrase + '\t' + str(self.according_to_id) + '\t' + self.status + '\t' + str(self.reference_id) + '\t' + str(self.page_reference_id) + '\t' + self.link + '\t' + self.remarks + '\n' class Synonyms: def __init__(self, output_tsv): self.synonyms = [] self.output_tsv = output_tsv def append(self, synonym): if isinstance(synonym, Synonym): self.synonyms.append(synonym) else: print('Error: synonym must be Synonym type') def write_output(self, output_tsv=''): if output_tsv == '' and self.output_tsv != '': output_tsv = self.output_tsv file = open(output_tsv, 'w') if len(self.synonyms) > 0: header = '\t'.join(self.synonyms[0].__dict__.keys()) + '\n' file.write(header) for synonym in self.synonyms: row = '\t'.join((str(v) for v in synonym.__dict__.values())) + '\n' file.write(row) file.close()
# ---------------------------------------------------------------------- # CISCO-VPDN-MGMT-MIB # Compiled MIB # Do not modify this file directly # Run ./noc mib make-cmib instead # ---------------------------------------------------------------------- # Copyright (C) 2007-2020 The NOC Project # See LICENSE for details # ---------------------------------------------------------------------- # MIB Name NAME = "CISCO-VPDN-MGMT-MIB" # Metadata LAST_UPDATED = "2009-06-16" COMPILED = "2020-01-19" # MIB Data: name -> oid MIB = { "CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIB": "1.3.6.1.4.1.9.10.24", "CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBNotifs": "1.3.6.1.4.1.9.10.24.0", "CISCO-VPDN-MGMT-MIB::cvpdnNotifSessionID": "1.3.6.1.4.1.9.10.24.0.1", "CISCO-VPDN-MGMT-MIB::cvpdnNotifSessionEvent": "1.3.6.1.4.1.9.10.24.0.2", "CISCO-VPDN-MGMT-MIB::cvpdnNotifSession": "1.3.6.1.4.1.9.10.24.0.3", "CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBObjects": "1.3.6.1.4.1.9.10.24.1", "CISCO-VPDN-MGMT-MIB::cvpdnSystemInfo": "1.3.6.1.4.1.9.10.24.1.1", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelTotal": "1.3.6.1.4.1.9.10.24.1.1.1", "CISCO-VPDN-MGMT-MIB::cvpdnSessionTotal": "1.3.6.1.4.1.9.10.24.1.1.2", "CISCO-VPDN-MGMT-MIB::cvpdnDeniedUsersTotal": "1.3.6.1.4.1.9.10.24.1.1.3", "CISCO-VPDN-MGMT-MIB::cvpdnSystemTable": "1.3.6.1.4.1.9.10.24.1.1.4", "CISCO-VPDN-MGMT-MIB::cvpdnSystemEntry": "1.3.6.1.4.1.9.10.24.1.1.4.1", "CISCO-VPDN-MGMT-MIB::cvpdnSystemTunnelType": "1.3.6.1.4.1.9.10.24.1.1.4.1.1", "CISCO-VPDN-MGMT-MIB::cvpdnSystemTunnelTotal": "1.3.6.1.4.1.9.10.24.1.1.4.1.2", "CISCO-VPDN-MGMT-MIB::cvpdnSystemSessionTotal": "1.3.6.1.4.1.9.10.24.1.1.4.1.3", "CISCO-VPDN-MGMT-MIB::cvpdnSystemDeniedUsersTotal": "1.3.6.1.4.1.9.10.24.1.1.4.1.4", "CISCO-VPDN-MGMT-MIB::cvpdnSystemInitialConnReq": "1.3.6.1.4.1.9.10.24.1.1.4.1.5", "CISCO-VPDN-MGMT-MIB::cvpdnSystemSuccessConnReq": "1.3.6.1.4.1.9.10.24.1.1.4.1.6", "CISCO-VPDN-MGMT-MIB::cvpdnSystemFailedConnReq": "1.3.6.1.4.1.9.10.24.1.1.4.1.7", "CISCO-VPDN-MGMT-MIB::cvpdnSystemNotifSessionEnabled": "1.3.6.1.4.1.9.10.24.1.1.5", "CISCO-VPDN-MGMT-MIB::cvpdnSystemClearSessions": "1.3.6.1.4.1.9.10.24.1.1.6", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelInfo": "1.3.6.1.4.1.9.10.24.1.2", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelTable": "1.3.6.1.4.1.9.10.24.1.2.1", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelEntry": "1.3.6.1.4.1.9.10.24.1.2.1.1", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelTunnelId": "1.3.6.1.4.1.9.10.24.1.2.1.1.1", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelRemoteTunnelId": "1.3.6.1.4.1.9.10.24.1.2.1.1.2", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelLocalName": "1.3.6.1.4.1.9.10.24.1.2.1.1.3", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelRemoteName": "1.3.6.1.4.1.9.10.24.1.2.1.1.4", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelRemoteEndpointName": "1.3.6.1.4.1.9.10.24.1.2.1.1.5", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelLocalInitConnection": "1.3.6.1.4.1.9.10.24.1.2.1.1.6", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelOrigCause": "1.3.6.1.4.1.9.10.24.1.2.1.1.7", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelState": "1.3.6.1.4.1.9.10.24.1.2.1.1.8", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelActiveSessions": "1.3.6.1.4.1.9.10.24.1.2.1.1.9", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelDeniedUsers": "1.3.6.1.4.1.9.10.24.1.2.1.1.10", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSoftshut": "1.3.6.1.4.1.9.10.24.1.2.1.1.12", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelNetworkServiceType": "1.3.6.1.4.1.9.10.24.1.2.1.1.13", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelLocalIpAddress": "1.3.6.1.4.1.9.10.24.1.2.1.1.14", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSourceIpAddress": "1.3.6.1.4.1.9.10.24.1.2.1.1.15", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelRemoteIpAddress": "1.3.6.1.4.1.9.10.24.1.2.1.1.16", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrTable": "1.3.6.1.4.1.9.10.24.1.2.2", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrEntry": "1.3.6.1.4.1.9.10.24.1.2.2.1", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrTunnelId": "1.3.6.1.4.1.9.10.24.1.2.2.1.1", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteTunnelId": "1.3.6.1.4.1.9.10.24.1.2.2.1.2", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalName": "1.3.6.1.4.1.9.10.24.1.2.2.1.3", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteName": "1.3.6.1.4.1.9.10.24.1.2.2.1.4", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteEndpointName": "1.3.6.1.4.1.9.10.24.1.2.2.1.5", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalInitConnection": "1.3.6.1.4.1.9.10.24.1.2.2.1.6", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrOrigCause": "1.3.6.1.4.1.9.10.24.1.2.2.1.7", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrState": "1.3.6.1.4.1.9.10.24.1.2.2.1.8", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrActiveSessions": "1.3.6.1.4.1.9.10.24.1.2.2.1.9", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrDeniedUsers": "1.3.6.1.4.1.9.10.24.1.2.2.1.10", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrSoftshut": "1.3.6.1.4.1.9.10.24.1.2.2.1.11", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrNetworkServiceType": "1.3.6.1.4.1.9.10.24.1.2.2.1.12", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalIpAddress": "1.3.6.1.4.1.9.10.24.1.2.2.1.13", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrSourceIpAddress": "1.3.6.1.4.1.9.10.24.1.2.2.1.14", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteIpAddress": "1.3.6.1.4.1.9.10.24.1.2.2.1.15", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalInetAddressType": "1.3.6.1.4.1.9.10.24.1.2.2.1.16", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalInetAddress": "1.3.6.1.4.1.9.10.24.1.2.2.1.17", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrSourceInetAddressType": "1.3.6.1.4.1.9.10.24.1.2.2.1.18", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrSourceInetAddress": "1.3.6.1.4.1.9.10.24.1.2.2.1.19", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteInetAddressType": "1.3.6.1.4.1.9.10.24.1.2.2.1.20", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteInetAddress": "1.3.6.1.4.1.9.10.24.1.2.2.1.21", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionInfo": "1.3.6.1.4.1.9.10.24.1.3", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionTable": "1.3.6.1.4.1.9.10.24.1.3.1", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionEntry": "1.3.6.1.4.1.9.10.24.1.3.1.1", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionId": "1.3.6.1.4.1.9.10.24.1.3.1.1.1", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionUserName": "1.3.6.1.4.1.9.10.24.1.3.1.1.2", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionState": "1.3.6.1.4.1.9.10.24.1.3.1.1.3", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionCallDuration": "1.3.6.1.4.1.9.10.24.1.3.1.1.4", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionPacketsOut": "1.3.6.1.4.1.9.10.24.1.3.1.1.5", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionBytesOut": "1.3.6.1.4.1.9.10.24.1.3.1.1.6", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionPacketsIn": "1.3.6.1.4.1.9.10.24.1.3.1.1.7", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionBytesIn": "1.3.6.1.4.1.9.10.24.1.3.1.1.8", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDeviceType": "1.3.6.1.4.1.9.10.24.1.3.1.1.9", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDeviceCallerId": "1.3.6.1.4.1.9.10.24.1.3.1.1.10", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDevicePhyId": "1.3.6.1.4.1.9.10.24.1.3.1.1.11", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionMultilink": "1.3.6.1.4.1.9.10.24.1.3.1.1.12", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionModemSlotIndex": "1.3.6.1.4.1.9.10.24.1.3.1.1.13", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionModemPortIndex": "1.3.6.1.4.1.9.10.24.1.3.1.1.14", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDS1SlotIndex": "1.3.6.1.4.1.9.10.24.1.3.1.1.15", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDS1PortIndex": "1.3.6.1.4.1.9.10.24.1.3.1.1.16", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDS1ChannelIndex": "1.3.6.1.4.1.9.10.24.1.3.1.1.17", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionModemCallStartTime": "1.3.6.1.4.1.9.10.24.1.3.1.1.18", "CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionModemCallStartIndex": "1.3.6.1.4.1.9.10.24.1.3.1.1.19", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrTable": "1.3.6.1.4.1.9.10.24.1.3.2", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrEntry": "1.3.6.1.4.1.9.10.24.1.3.2.1", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrSessionId": "1.3.6.1.4.1.9.10.24.1.3.2.1.1", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrUserName": "1.3.6.1.4.1.9.10.24.1.3.2.1.2", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrState": "1.3.6.1.4.1.9.10.24.1.3.2.1.3", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrCallDuration": "1.3.6.1.4.1.9.10.24.1.3.2.1.4", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrPacketsOut": "1.3.6.1.4.1.9.10.24.1.3.2.1.5", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrBytesOut": "1.3.6.1.4.1.9.10.24.1.3.2.1.6", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrPacketsIn": "1.3.6.1.4.1.9.10.24.1.3.2.1.7", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrBytesIn": "1.3.6.1.4.1.9.10.24.1.3.2.1.8", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDeviceType": "1.3.6.1.4.1.9.10.24.1.3.2.1.9", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDeviceCallerId": "1.3.6.1.4.1.9.10.24.1.3.2.1.10", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDevicePhyId": "1.3.6.1.4.1.9.10.24.1.3.2.1.11", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrMultilink": "1.3.6.1.4.1.9.10.24.1.3.2.1.12", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrModemSlotIndex": "1.3.6.1.4.1.9.10.24.1.3.2.1.13", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrModemPortIndex": "1.3.6.1.4.1.9.10.24.1.3.2.1.14", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDS1SlotIndex": "1.3.6.1.4.1.9.10.24.1.3.2.1.15", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDS1PortIndex": "1.3.6.1.4.1.9.10.24.1.3.2.1.16", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDS1ChannelIndex": "1.3.6.1.4.1.9.10.24.1.3.2.1.17", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrModemCallStartTime": "1.3.6.1.4.1.9.10.24.1.3.2.1.18", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrModemCallStartIndex": "1.3.6.1.4.1.9.10.24.1.3.2.1.19", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrVirtualCircuitID": "1.3.6.1.4.1.9.10.24.1.3.2.1.20", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrSentPktsDropped": "1.3.6.1.4.1.9.10.24.1.3.2.1.21", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrRecvPktsDropped": "1.3.6.1.4.1.9.10.24.1.3.2.1.22", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrMultilinkBundle": "1.3.6.1.4.1.9.10.24.1.3.2.1.23", "CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrMultilinkIfIndex": "1.3.6.1.4.1.9.10.24.1.3.2.1.24", "CISCO-VPDN-MGMT-MIB::cvpdnUserToFailHistInfo": "1.3.6.1.4.1.9.10.24.1.4", "CISCO-VPDN-MGMT-MIB::cvpdnUserToFailHistInfoTable": "1.3.6.1.4.1.9.10.24.1.4.1", "CISCO-VPDN-MGMT-MIB::cvpdnUserToFailHistInfoEntry": "1.3.6.1.4.1.9.10.24.1.4.1.1", "CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistUname": "1.3.6.1.4.1.9.10.24.1.4.1.1.1", "CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistTunnelId": "1.3.6.1.4.1.9.10.24.1.4.1.1.2", "CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistUserId": "1.3.6.1.4.1.9.10.24.1.4.1.1.3", "CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistLocalInitConn": "1.3.6.1.4.1.9.10.24.1.4.1.1.4", "CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistLocalName": "1.3.6.1.4.1.9.10.24.1.4.1.1.5", "CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistRemoteName": "1.3.6.1.4.1.9.10.24.1.4.1.1.6", "CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistSourceIp": "1.3.6.1.4.1.9.10.24.1.4.1.1.7", "CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistDestIp": "1.3.6.1.4.1.9.10.24.1.4.1.1.8", "CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistCount": "1.3.6.1.4.1.9.10.24.1.4.1.1.9", "CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistFailTime": "1.3.6.1.4.1.9.10.24.1.4.1.1.10", "CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistFailType": "1.3.6.1.4.1.9.10.24.1.4.1.1.11", "CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistFailReason": "1.3.6.1.4.1.9.10.24.1.4.1.1.12", "CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistSourceInetType": "1.3.6.1.4.1.9.10.24.1.4.1.1.13", "CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistSourceInetAddr": "1.3.6.1.4.1.9.10.24.1.4.1.1.14", "CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistDestInetType": "1.3.6.1.4.1.9.10.24.1.4.1.1.15", "CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistDestInetAddr": "1.3.6.1.4.1.9.10.24.1.4.1.1.16", "CISCO-VPDN-MGMT-MIB::cvpdnTemplateInfo": "1.3.6.1.4.1.9.10.24.1.5", "CISCO-VPDN-MGMT-MIB::cvpdnTemplateTable": "1.3.6.1.4.1.9.10.24.1.5.1", "CISCO-VPDN-MGMT-MIB::cvpdnTemplateEntry": "1.3.6.1.4.1.9.10.24.1.5.1.1", "CISCO-VPDN-MGMT-MIB::cvpdnTemplateName": "1.3.6.1.4.1.9.10.24.1.5.1.1.1", "CISCO-VPDN-MGMT-MIB::cvpdnTemplateActiveSessions": "1.3.6.1.4.1.9.10.24.1.5.1.1.2", "CISCO-VPDN-MGMT-MIB::cvpdnMultilinkInfo": "1.3.6.1.4.1.9.10.24.1.6", "CISCO-VPDN-MGMT-MIB::cvpdnBundlesWithOneLink": "1.3.6.1.4.1.9.10.24.1.6.1", "CISCO-VPDN-MGMT-MIB::cvpdnBundlesWithTwoLinks": "1.3.6.1.4.1.9.10.24.1.6.2", "CISCO-VPDN-MGMT-MIB::cvpdnBundlesWithMoreThanTwoLinks": "1.3.6.1.4.1.9.10.24.1.6.3", "CISCO-VPDN-MGMT-MIB::cvpdnBundleTable": "1.3.6.1.4.1.9.10.24.1.6.4", "CISCO-VPDN-MGMT-MIB::cvpdnBundleEntry": "1.3.6.1.4.1.9.10.24.1.6.4.1", "CISCO-VPDN-MGMT-MIB::cvpdnBundleName": "1.3.6.1.4.1.9.10.24.1.6.4.1.1", "CISCO-VPDN-MGMT-MIB::cvpdnBundleLinkCount": "1.3.6.1.4.1.9.10.24.1.6.4.1.2", "CISCO-VPDN-MGMT-MIB::cvpdnBundleEndpointType": "1.3.6.1.4.1.9.10.24.1.6.4.1.3", "CISCO-VPDN-MGMT-MIB::cvpdnBundleEndpoint": "1.3.6.1.4.1.9.10.24.1.6.4.1.4", "CISCO-VPDN-MGMT-MIB::cvpdnBundlePeerIpAddrType": "1.3.6.1.4.1.9.10.24.1.6.4.1.5", "CISCO-VPDN-MGMT-MIB::cvpdnBundlePeerIpAddr": "1.3.6.1.4.1.9.10.24.1.6.4.1.6", "CISCO-VPDN-MGMT-MIB::cvpdnBundleEndpointClass": "1.3.6.1.4.1.9.10.24.1.6.4.1.7", "CISCO-VPDN-MGMT-MIB::cvpdnBundleLastChanged": "1.3.6.1.4.1.9.10.24.1.6.5", "CISCO-VPDN-MGMT-MIB::cvpdnBundleChildTable": "1.3.6.1.4.1.9.10.24.1.6.6", "CISCO-VPDN-MGMT-MIB::cvpdnBundleChildEntry": "1.3.6.1.4.1.9.10.24.1.6.6.1", "CISCO-VPDN-MGMT-MIB::cvpdnBundleChildTunnelType": "1.3.6.1.4.1.9.10.24.1.6.6.1.1", "CISCO-VPDN-MGMT-MIB::cvpdnBundleChildTunnelId": "1.3.6.1.4.1.9.10.24.1.6.6.1.2", "CISCO-VPDN-MGMT-MIB::cvpdnBundleChildSessionId": "1.3.6.1.4.1.9.10.24.1.6.6.1.3", "CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBConformance": "1.3.6.1.4.1.9.10.24.3", "CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBCompliances": "1.3.6.1.4.1.9.10.24.3.1", "CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBGroups": "1.3.6.1.4.1.9.10.24.3.2", } DISPLAY_HINTS = {}
name = 'CISCO-VPDN-MGMT-MIB' last_updated = '2009-06-16' compiled = '2020-01-19' mib = {'CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIB': '1.3.6.1.4.1.9.10.24', 'CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBNotifs': '1.3.6.1.4.1.9.10.24.0', 'CISCO-VPDN-MGMT-MIB::cvpdnNotifSessionID': '1.3.6.1.4.1.9.10.24.0.1', 'CISCO-VPDN-MGMT-MIB::cvpdnNotifSessionEvent': '1.3.6.1.4.1.9.10.24.0.2', 'CISCO-VPDN-MGMT-MIB::cvpdnNotifSession': '1.3.6.1.4.1.9.10.24.0.3', 'CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBObjects': '1.3.6.1.4.1.9.10.24.1', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemInfo': '1.3.6.1.4.1.9.10.24.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelTotal': '1.3.6.1.4.1.9.10.24.1.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionTotal': '1.3.6.1.4.1.9.10.24.1.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnDeniedUsersTotal': '1.3.6.1.4.1.9.10.24.1.1.3', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemTable': '1.3.6.1.4.1.9.10.24.1.1.4', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemEntry': '1.3.6.1.4.1.9.10.24.1.1.4.1', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemTunnelType': '1.3.6.1.4.1.9.10.24.1.1.4.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemTunnelTotal': '1.3.6.1.4.1.9.10.24.1.1.4.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemSessionTotal': '1.3.6.1.4.1.9.10.24.1.1.4.1.3', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemDeniedUsersTotal': '1.3.6.1.4.1.9.10.24.1.1.4.1.4', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemInitialConnReq': '1.3.6.1.4.1.9.10.24.1.1.4.1.5', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemSuccessConnReq': '1.3.6.1.4.1.9.10.24.1.1.4.1.6', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemFailedConnReq': '1.3.6.1.4.1.9.10.24.1.1.4.1.7', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemNotifSessionEnabled': '1.3.6.1.4.1.9.10.24.1.1.5', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemClearSessions': '1.3.6.1.4.1.9.10.24.1.1.6', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelInfo': '1.3.6.1.4.1.9.10.24.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelTable': '1.3.6.1.4.1.9.10.24.1.2.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelEntry': '1.3.6.1.4.1.9.10.24.1.2.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelTunnelId': '1.3.6.1.4.1.9.10.24.1.2.1.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelRemoteTunnelId': '1.3.6.1.4.1.9.10.24.1.2.1.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelLocalName': '1.3.6.1.4.1.9.10.24.1.2.1.1.3', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelRemoteName': '1.3.6.1.4.1.9.10.24.1.2.1.1.4', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelRemoteEndpointName': '1.3.6.1.4.1.9.10.24.1.2.1.1.5', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelLocalInitConnection': '1.3.6.1.4.1.9.10.24.1.2.1.1.6', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelOrigCause': '1.3.6.1.4.1.9.10.24.1.2.1.1.7', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelState': '1.3.6.1.4.1.9.10.24.1.2.1.1.8', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelActiveSessions': '1.3.6.1.4.1.9.10.24.1.2.1.1.9', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelDeniedUsers': '1.3.6.1.4.1.9.10.24.1.2.1.1.10', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSoftshut': '1.3.6.1.4.1.9.10.24.1.2.1.1.12', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelNetworkServiceType': '1.3.6.1.4.1.9.10.24.1.2.1.1.13', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelLocalIpAddress': '1.3.6.1.4.1.9.10.24.1.2.1.1.14', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSourceIpAddress': '1.3.6.1.4.1.9.10.24.1.2.1.1.15', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelRemoteIpAddress': '1.3.6.1.4.1.9.10.24.1.2.1.1.16', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrTable': '1.3.6.1.4.1.9.10.24.1.2.2', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrEntry': '1.3.6.1.4.1.9.10.24.1.2.2.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrTunnelId': '1.3.6.1.4.1.9.10.24.1.2.2.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteTunnelId': '1.3.6.1.4.1.9.10.24.1.2.2.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalName': '1.3.6.1.4.1.9.10.24.1.2.2.1.3', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteName': '1.3.6.1.4.1.9.10.24.1.2.2.1.4', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteEndpointName': '1.3.6.1.4.1.9.10.24.1.2.2.1.5', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalInitConnection': '1.3.6.1.4.1.9.10.24.1.2.2.1.6', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrOrigCause': '1.3.6.1.4.1.9.10.24.1.2.2.1.7', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrState': '1.3.6.1.4.1.9.10.24.1.2.2.1.8', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrActiveSessions': '1.3.6.1.4.1.9.10.24.1.2.2.1.9', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrDeniedUsers': '1.3.6.1.4.1.9.10.24.1.2.2.1.10', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrSoftshut': '1.3.6.1.4.1.9.10.24.1.2.2.1.11', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrNetworkServiceType': '1.3.6.1.4.1.9.10.24.1.2.2.1.12', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalIpAddress': '1.3.6.1.4.1.9.10.24.1.2.2.1.13', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrSourceIpAddress': '1.3.6.1.4.1.9.10.24.1.2.2.1.14', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteIpAddress': '1.3.6.1.4.1.9.10.24.1.2.2.1.15', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalInetAddressType': '1.3.6.1.4.1.9.10.24.1.2.2.1.16', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalInetAddress': '1.3.6.1.4.1.9.10.24.1.2.2.1.17', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrSourceInetAddressType': '1.3.6.1.4.1.9.10.24.1.2.2.1.18', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrSourceInetAddress': '1.3.6.1.4.1.9.10.24.1.2.2.1.19', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteInetAddressType': '1.3.6.1.4.1.9.10.24.1.2.2.1.20', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteInetAddress': '1.3.6.1.4.1.9.10.24.1.2.2.1.21', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionInfo': '1.3.6.1.4.1.9.10.24.1.3', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionTable': '1.3.6.1.4.1.9.10.24.1.3.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionEntry': '1.3.6.1.4.1.9.10.24.1.3.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionId': '1.3.6.1.4.1.9.10.24.1.3.1.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionUserName': '1.3.6.1.4.1.9.10.24.1.3.1.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionState': '1.3.6.1.4.1.9.10.24.1.3.1.1.3', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionCallDuration': '1.3.6.1.4.1.9.10.24.1.3.1.1.4', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionPacketsOut': '1.3.6.1.4.1.9.10.24.1.3.1.1.5', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionBytesOut': '1.3.6.1.4.1.9.10.24.1.3.1.1.6', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionPacketsIn': '1.3.6.1.4.1.9.10.24.1.3.1.1.7', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionBytesIn': '1.3.6.1.4.1.9.10.24.1.3.1.1.8', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDeviceType': '1.3.6.1.4.1.9.10.24.1.3.1.1.9', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDeviceCallerId': '1.3.6.1.4.1.9.10.24.1.3.1.1.10', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDevicePhyId': '1.3.6.1.4.1.9.10.24.1.3.1.1.11', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionMultilink': '1.3.6.1.4.1.9.10.24.1.3.1.1.12', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionModemSlotIndex': '1.3.6.1.4.1.9.10.24.1.3.1.1.13', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionModemPortIndex': '1.3.6.1.4.1.9.10.24.1.3.1.1.14', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDS1SlotIndex': '1.3.6.1.4.1.9.10.24.1.3.1.1.15', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDS1PortIndex': '1.3.6.1.4.1.9.10.24.1.3.1.1.16', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDS1ChannelIndex': '1.3.6.1.4.1.9.10.24.1.3.1.1.17', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionModemCallStartTime': '1.3.6.1.4.1.9.10.24.1.3.1.1.18', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionModemCallStartIndex': '1.3.6.1.4.1.9.10.24.1.3.1.1.19', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrTable': '1.3.6.1.4.1.9.10.24.1.3.2', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrEntry': '1.3.6.1.4.1.9.10.24.1.3.2.1', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrSessionId': '1.3.6.1.4.1.9.10.24.1.3.2.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrUserName': '1.3.6.1.4.1.9.10.24.1.3.2.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrState': '1.3.6.1.4.1.9.10.24.1.3.2.1.3', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrCallDuration': '1.3.6.1.4.1.9.10.24.1.3.2.1.4', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrPacketsOut': '1.3.6.1.4.1.9.10.24.1.3.2.1.5', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrBytesOut': '1.3.6.1.4.1.9.10.24.1.3.2.1.6', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrPacketsIn': '1.3.6.1.4.1.9.10.24.1.3.2.1.7', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrBytesIn': '1.3.6.1.4.1.9.10.24.1.3.2.1.8', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDeviceType': '1.3.6.1.4.1.9.10.24.1.3.2.1.9', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDeviceCallerId': '1.3.6.1.4.1.9.10.24.1.3.2.1.10', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDevicePhyId': '1.3.6.1.4.1.9.10.24.1.3.2.1.11', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrMultilink': '1.3.6.1.4.1.9.10.24.1.3.2.1.12', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrModemSlotIndex': '1.3.6.1.4.1.9.10.24.1.3.2.1.13', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrModemPortIndex': '1.3.6.1.4.1.9.10.24.1.3.2.1.14', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDS1SlotIndex': '1.3.6.1.4.1.9.10.24.1.3.2.1.15', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDS1PortIndex': '1.3.6.1.4.1.9.10.24.1.3.2.1.16', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDS1ChannelIndex': '1.3.6.1.4.1.9.10.24.1.3.2.1.17', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrModemCallStartTime': '1.3.6.1.4.1.9.10.24.1.3.2.1.18', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrModemCallStartIndex': '1.3.6.1.4.1.9.10.24.1.3.2.1.19', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrVirtualCircuitID': '1.3.6.1.4.1.9.10.24.1.3.2.1.20', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrSentPktsDropped': '1.3.6.1.4.1.9.10.24.1.3.2.1.21', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrRecvPktsDropped': '1.3.6.1.4.1.9.10.24.1.3.2.1.22', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrMultilinkBundle': '1.3.6.1.4.1.9.10.24.1.3.2.1.23', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrMultilinkIfIndex': '1.3.6.1.4.1.9.10.24.1.3.2.1.24', 'CISCO-VPDN-MGMT-MIB::cvpdnUserToFailHistInfo': '1.3.6.1.4.1.9.10.24.1.4', 'CISCO-VPDN-MGMT-MIB::cvpdnUserToFailHistInfoTable': '1.3.6.1.4.1.9.10.24.1.4.1', 'CISCO-VPDN-MGMT-MIB::cvpdnUserToFailHistInfoEntry': '1.3.6.1.4.1.9.10.24.1.4.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistUname': '1.3.6.1.4.1.9.10.24.1.4.1.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistTunnelId': '1.3.6.1.4.1.9.10.24.1.4.1.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistUserId': '1.3.6.1.4.1.9.10.24.1.4.1.1.3', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistLocalInitConn': '1.3.6.1.4.1.9.10.24.1.4.1.1.4', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistLocalName': '1.3.6.1.4.1.9.10.24.1.4.1.1.5', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistRemoteName': '1.3.6.1.4.1.9.10.24.1.4.1.1.6', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistSourceIp': '1.3.6.1.4.1.9.10.24.1.4.1.1.7', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistDestIp': '1.3.6.1.4.1.9.10.24.1.4.1.1.8', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistCount': '1.3.6.1.4.1.9.10.24.1.4.1.1.9', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistFailTime': '1.3.6.1.4.1.9.10.24.1.4.1.1.10', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistFailType': '1.3.6.1.4.1.9.10.24.1.4.1.1.11', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistFailReason': '1.3.6.1.4.1.9.10.24.1.4.1.1.12', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistSourceInetType': '1.3.6.1.4.1.9.10.24.1.4.1.1.13', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistSourceInetAddr': '1.3.6.1.4.1.9.10.24.1.4.1.1.14', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistDestInetType': '1.3.6.1.4.1.9.10.24.1.4.1.1.15', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistDestInetAddr': '1.3.6.1.4.1.9.10.24.1.4.1.1.16', 'CISCO-VPDN-MGMT-MIB::cvpdnTemplateInfo': '1.3.6.1.4.1.9.10.24.1.5', 'CISCO-VPDN-MGMT-MIB::cvpdnTemplateTable': '1.3.6.1.4.1.9.10.24.1.5.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTemplateEntry': '1.3.6.1.4.1.9.10.24.1.5.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTemplateName': '1.3.6.1.4.1.9.10.24.1.5.1.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTemplateActiveSessions': '1.3.6.1.4.1.9.10.24.1.5.1.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnMultilinkInfo': '1.3.6.1.4.1.9.10.24.1.6', 'CISCO-VPDN-MGMT-MIB::cvpdnBundlesWithOneLink': '1.3.6.1.4.1.9.10.24.1.6.1', 'CISCO-VPDN-MGMT-MIB::cvpdnBundlesWithTwoLinks': '1.3.6.1.4.1.9.10.24.1.6.2', 'CISCO-VPDN-MGMT-MIB::cvpdnBundlesWithMoreThanTwoLinks': '1.3.6.1.4.1.9.10.24.1.6.3', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleTable': '1.3.6.1.4.1.9.10.24.1.6.4', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleEntry': '1.3.6.1.4.1.9.10.24.1.6.4.1', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleName': '1.3.6.1.4.1.9.10.24.1.6.4.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleLinkCount': '1.3.6.1.4.1.9.10.24.1.6.4.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleEndpointType': '1.3.6.1.4.1.9.10.24.1.6.4.1.3', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleEndpoint': '1.3.6.1.4.1.9.10.24.1.6.4.1.4', 'CISCO-VPDN-MGMT-MIB::cvpdnBundlePeerIpAddrType': '1.3.6.1.4.1.9.10.24.1.6.4.1.5', 'CISCO-VPDN-MGMT-MIB::cvpdnBundlePeerIpAddr': '1.3.6.1.4.1.9.10.24.1.6.4.1.6', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleEndpointClass': '1.3.6.1.4.1.9.10.24.1.6.4.1.7', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleLastChanged': '1.3.6.1.4.1.9.10.24.1.6.5', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleChildTable': '1.3.6.1.4.1.9.10.24.1.6.6', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleChildEntry': '1.3.6.1.4.1.9.10.24.1.6.6.1', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleChildTunnelType': '1.3.6.1.4.1.9.10.24.1.6.6.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleChildTunnelId': '1.3.6.1.4.1.9.10.24.1.6.6.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleChildSessionId': '1.3.6.1.4.1.9.10.24.1.6.6.1.3', 'CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBConformance': '1.3.6.1.4.1.9.10.24.3', 'CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBCompliances': '1.3.6.1.4.1.9.10.24.3.1', 'CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBGroups': '1.3.6.1.4.1.9.10.24.3.2'} display_hints = {}
N = int(input()) positions = [] for x in range(N): input_line = input().split() positions.append([int(input_line[0]), int(input_line[1])]) sorted_positions = sorted(positions) minRadius = 1000000 for x in range(len(sorted_positions)-1): if sorted_positions[x][1] == 0 and sorted_positions[x-1][1] == 1: distance = abs(sorted_positions[x-1][0] - sorted_positions[x][0]) if distance < minRadius: minRadius = distance if sorted_positions[x][1] == 0 and sorted_positions[x+1][1] == 1: distance = abs(sorted_positions[x][0] - sorted_positions[x+1][0]) if distance < minRadius: minRadius = distance minRadius -= 1 visited = [False] * len(sorted_positions) counter = 0 previous = 0 for x in range(0, len(sorted_positions)): if x == 0: if sorted_positions[x][1] == 1: counter += 1 elif sorted_positions[x][1] == 1: if abs(sorted_positions[x][0] - previous) <= minRadius: pass else: counter += 1 elif sorted_positions[x][1] == 0: pass previous = sorted_positions[x][0] print(counter)
n = int(input()) positions = [] for x in range(N): input_line = input().split() positions.append([int(input_line[0]), int(input_line[1])]) sorted_positions = sorted(positions) min_radius = 1000000 for x in range(len(sorted_positions) - 1): if sorted_positions[x][1] == 0 and sorted_positions[x - 1][1] == 1: distance = abs(sorted_positions[x - 1][0] - sorted_positions[x][0]) if distance < minRadius: min_radius = distance if sorted_positions[x][1] == 0 and sorted_positions[x + 1][1] == 1: distance = abs(sorted_positions[x][0] - sorted_positions[x + 1][0]) if distance < minRadius: min_radius = distance min_radius -= 1 visited = [False] * len(sorted_positions) counter = 0 previous = 0 for x in range(0, len(sorted_positions)): if x == 0: if sorted_positions[x][1] == 1: counter += 1 elif sorted_positions[x][1] == 1: if abs(sorted_positions[x][0] - previous) <= minRadius: pass else: counter += 1 elif sorted_positions[x][1] == 0: pass previous = sorted_positions[x][0] print(counter)
# Implementation of a simple "Calculator" program # The calculator consists of four functions: add, subtract, multiply and divide def add(x, y): """ Function to add two numbers Parameters ---------- x : int/float First number to be added y : int/float Second number to be added Returns ------- sum : int/float Sum of the two numbers """ return (x + y) def subtract(x, y): """ Function to subtract two numbers Parameters ---------- x : int/float First number to be subtracted y : int/float Second number to be subtracted Returns ------- difference : int/float Difference between the two numbers """ return (x - y) def multiply(x, y): """ Function to multiply two numbers Parameters ---------- x : int/float First number to be multiplied y : int/float Second number to be multiplied Returns ------- product : int/float Sum of the two numbers """ return (x * y) def divide(x, y): """ Function to divide two numbers Parameters ---------- x : int/float First number to be divided y : int/float Second number to be divided Returns ------- quotient : int/float Sum of the two numbers """ return (x / y)
def add(x, y): """ Function to add two numbers Parameters ---------- x : int/float First number to be added y : int/float Second number to be added Returns ------- sum : int/float Sum of the two numbers """ return x + y def subtract(x, y): """ Function to subtract two numbers Parameters ---------- x : int/float First number to be subtracted y : int/float Second number to be subtracted Returns ------- difference : int/float Difference between the two numbers """ return x - y def multiply(x, y): """ Function to multiply two numbers Parameters ---------- x : int/float First number to be multiplied y : int/float Second number to be multiplied Returns ------- product : int/float Sum of the two numbers """ return x * y def divide(x, y): """ Function to divide two numbers Parameters ---------- x : int/float First number to be divided y : int/float Second number to be divided Returns ------- quotient : int/float Sum of the two numbers """ return x / y
"""Modules handling environment data. For example: types for transitions/trajectories; methods to compute rollouts; buffers to store transitions; helpers for these modules. """
"""Modules handling environment data. For example: types for transitions/trajectories; methods to compute rollouts; buffers to store transitions; helpers for these modules. """
# Payable-related constants PAYABLE_FIRST_ROW = 20 PAYABLE_FIRST_COL = 2 PAYABLE_LAST_COL = 25 PAYABLE_SORT_BY = 3 PAYABLE_PAYPAL_ID_COL = 18 PAYABLE_FIELDS = [ 'timestamp', 'requester', 'department', 'item', 'detail', 'event_date', 'payment_type', 'use_of_funds', 'notes', 'type', 'name', 'paypal', 'address', 'amount', 'driving_reimbursement' ] PAYABLE_IGNORE_FIELDS = ('detail', 'notes') # Receivable-related constants RECEIVABLE_FIRST_ROW = 11 RECEIVABLE_FIRST_COL = 2 RECEIVABLE_LAST_COL = 25 RECEIVABLE_SORT_BY = 4 RECEIVABLE_FIELDS = [ 'year', 'committed_date', 'timestamp', 'support_type', 'organization_type', 'budget_line_item', 'payee_name', 'payee_email', 'amount_requested', 'amount_committed', 'amount_gross', 'amount_net', 'transaction_id' ] # Transaction-related constants TRANSACTION_FIELDS = [ 'status', 'type', 'timezone', 'timestamp', 'id', 'name', 'email', 'amount', 'fee_amount', 'net_amount', 'currency' ] TRANSACTION_RESPONSE_KEYS = { 'L_STATUS': 'status', 'L_TYPE': 'type', 'L_TIMEZONE': 'timezone', 'L_TIMESTAMP': 'timestamp', 'L_TRANSACTIONID': 'id', 'L_NAME': 'name', 'L_EMAIL': 'email', 'L_AMT': 'amount', 'L_FEEAMT': 'fee_amount', 'L_NETAMT': 'net_amount', 'L_CURRENCYCODE': 'currency' } PAYABLE_TRANSACTION_MATCHES = ( ('paypal', 'email'), ('amount', 'amount') )
payable_first_row = 20 payable_first_col = 2 payable_last_col = 25 payable_sort_by = 3 payable_paypal_id_col = 18 payable_fields = ['timestamp', 'requester', 'department', 'item', 'detail', 'event_date', 'payment_type', 'use_of_funds', 'notes', 'type', 'name', 'paypal', 'address', 'amount', 'driving_reimbursement'] payable_ignore_fields = ('detail', 'notes') receivable_first_row = 11 receivable_first_col = 2 receivable_last_col = 25 receivable_sort_by = 4 receivable_fields = ['year', 'committed_date', 'timestamp', 'support_type', 'organization_type', 'budget_line_item', 'payee_name', 'payee_email', 'amount_requested', 'amount_committed', 'amount_gross', 'amount_net', 'transaction_id'] transaction_fields = ['status', 'type', 'timezone', 'timestamp', 'id', 'name', 'email', 'amount', 'fee_amount', 'net_amount', 'currency'] transaction_response_keys = {'L_STATUS': 'status', 'L_TYPE': 'type', 'L_TIMEZONE': 'timezone', 'L_TIMESTAMP': 'timestamp', 'L_TRANSACTIONID': 'id', 'L_NAME': 'name', 'L_EMAIL': 'email', 'L_AMT': 'amount', 'L_FEEAMT': 'fee_amount', 'L_NETAMT': 'net_amount', 'L_CURRENCYCODE': 'currency'} payable_transaction_matches = (('paypal', 'email'), ('amount', 'amount'))
# -*- coding: utf-8 -*- """ Paraxial optical calculations """
""" Paraxial optical calculations """
operadores = ('+', '-', '*', '/', '%', '=', '>', '<', '>=', '<=', '!', '!=', '==', '&', '|', '++', '--', '+=', '-=', '/=', '*=') comentario = '//' comentario_inicio = '/*' comentario_fim = '*/' aspas = '"' aspasSimples = "'" delimitadores = (';', '{', '}', '(', ')', '[', ']', comentario, comentario_inicio, comentario_fim, aspas, aspasSimples, ",") palavras_reservadas = ('int', 'float', 'string', 'boolean', 'char', 'void', 'double', 'public', 'private', 'igor', 'vasco', 'return', 'if', 'else', 'for', 'while', 'break', 'continue', 'funcao', 'hame', 'true', 'false', 'switch', 'case', 'default', 'print')
operadores = ('+', '-', '*', '/', '%', '=', '>', '<', '>=', '<=', '!', '!=', '==', '&', '|', '++', '--', '+=', '-=', '/=', '*=') comentario = '//' comentario_inicio = '/*' comentario_fim = '*/' aspas = '"' aspas_simples = "'" delimitadores = (';', '{', '}', '(', ')', '[', ']', comentario, comentario_inicio, comentario_fim, aspas, aspasSimples, ',') palavras_reservadas = ('int', 'float', 'string', 'boolean', 'char', 'void', 'double', 'public', 'private', 'igor', 'vasco', 'return', 'if', 'else', 'for', 'while', 'break', 'continue', 'funcao', 'hame', 'true', 'false', 'switch', 'case', 'default', 'print')
class TestLinkController(object): def test_create_shortlink_with_correct_request_body(self, client): """create_shortlink() with a correct request body should respond with a success 200. """ form = {"provider": "tinyurl", "url": "http://example.com"} response = client.post("/api/shortlinks", json=form) assert response.status_code == 200 def test_create_shortlink_without_providing_access_token(self, client): """create_shortlink() if access_token is not provided to bitly, should respond with an error 403. """ form = {"provider": "bitly", "url": "http://example.com"} response = client.post("/api/shortlinks", json=form) assert response.status_code == 403 def test_create_shortlink_endpoint_is_wrong(self, client): """create_shortlink() if endpoint is wrong, should respond with an error 404. """ form = {"provider": "tinyurl", "url": "http://example.com"} response = client.post("/api/shortlink", json=form) assert response.status_code == 404 def test_create_shortlink_method_is_not_allowed(self, client): """create_shortlink() if method is not allowed, should respond with an error 405. """ form = {"provider": "tinyurl", "url": "http://example.com"} response = client.get("/api/shortlinks", json=form) assert response.status_code == 405 def test_create_shortlink_provider_is_not_in_valid_format(self, client): """create_shortlink() if provider is not in valid format, should respond with a validation error 400. """ form = {"provider": 1, "url": "http://example.com"} response = client.post("/api/shortlinks", json=form) assert response.status_code == 400
class Testlinkcontroller(object): def test_create_shortlink_with_correct_request_body(self, client): """create_shortlink() with a correct request body should respond with a success 200. """ form = {'provider': 'tinyurl', 'url': 'http://example.com'} response = client.post('/api/shortlinks', json=form) assert response.status_code == 200 def test_create_shortlink_without_providing_access_token(self, client): """create_shortlink() if access_token is not provided to bitly, should respond with an error 403. """ form = {'provider': 'bitly', 'url': 'http://example.com'} response = client.post('/api/shortlinks', json=form) assert response.status_code == 403 def test_create_shortlink_endpoint_is_wrong(self, client): """create_shortlink() if endpoint is wrong, should respond with an error 404. """ form = {'provider': 'tinyurl', 'url': 'http://example.com'} response = client.post('/api/shortlink', json=form) assert response.status_code == 404 def test_create_shortlink_method_is_not_allowed(self, client): """create_shortlink() if method is not allowed, should respond with an error 405. """ form = {'provider': 'tinyurl', 'url': 'http://example.com'} response = client.get('/api/shortlinks', json=form) assert response.status_code == 405 def test_create_shortlink_provider_is_not_in_valid_format(self, client): """create_shortlink() if provider is not in valid format, should respond with a validation error 400. """ form = {'provider': 1, 'url': 'http://example.com'} response = client.post('/api/shortlinks', json=form) assert response.status_code == 400
class pycacheNotFoundError(Exception): def __init__(self, msg): self.msg = msg super().__init__(self.msg) class installModulesFailedError(Exception): def __init__(self): self.msg = "The modules could not be installed! Some error occurred!" super().__init__(self.msg)
class Pycachenotfounderror(Exception): def __init__(self, msg): self.msg = msg super().__init__(self.msg) class Installmodulesfailederror(Exception): def __init__(self): self.msg = 'The modules could not be installed! Some error occurred!' super().__init__(self.msg)
# Auto-generated pytest file class TestInit: def test___init__(self): fail() class TestEnter: def test___enter__(self): fail() class TestExit: def test___exit__(self): fail() class TestGetSearchResultCount: def test_get_search_result_count(self): fail() class TestGetSearchResultLinks: def test_get_search_result_links(self): fail() class TestGetSpecialPageLinks: def test_get_special_page_links(self): fail() class TestOpenIdLink: def test_open_id_link(self): fail() class TestExtractSearchResultCount: def test_extract_search_result_count(self): fail() class TestExtractSearchResultLinks: def test_extract_search_result_links(self): fail() class TestExtractSpecialPageLinks: def test_extract_special_page_links(self): fail() class TestTransformLinksToNoRedirectLinks: def test_transform_links_to_no_redirect_links(self): fail() class TestIsSpecialPageIdExists: def test_is_special_page_id_exists(self): fail() class TestLog: def test_log(self): fail() class TestClose: def test_close(self): fail()
class Testinit: def test___init__(self): fail() class Testenter: def test___enter__(self): fail() class Testexit: def test___exit__(self): fail() class Testgetsearchresultcount: def test_get_search_result_count(self): fail() class Testgetsearchresultlinks: def test_get_search_result_links(self): fail() class Testgetspecialpagelinks: def test_get_special_page_links(self): fail() class Testopenidlink: def test_open_id_link(self): fail() class Testextractsearchresultcount: def test_extract_search_result_count(self): fail() class Testextractsearchresultlinks: def test_extract_search_result_links(self): fail() class Testextractspecialpagelinks: def test_extract_special_page_links(self): fail() class Testtransformlinkstonoredirectlinks: def test_transform_links_to_no_redirect_links(self): fail() class Testisspecialpageidexists: def test_is_special_page_id_exists(self): fail() class Testlog: def test_log(self): fail() class Testclose: def test_close(self): fail()
""" flask_konch ~~~~~~~~~~~ An improved shell commmand for the Flask CLI. """ __version__ = "2.0.0" __all__ = ["EXTENSION_NAME"] EXTENSION_NAME = "flask-konch"
""" flask_konch ~~~~~~~~~~~ An improved shell commmand for the Flask CLI. """ __version__ = '2.0.0' __all__ = ['EXTENSION_NAME'] extension_name = 'flask-konch'
# https://baike.baidu.com/item/%E5%BF%AB%E9%80%9F%E5%B9%82 # 11 = 2^0 + 2^! + 2^3 # a^11 = a^(2^0) + a^(2^1) + a^(2^3) class Solution: def myPow(self, x: float, n: int) -> float: N = n if N < 0: x = 1/x N = -N ans = 1 current_product = x while N > 0: if N % 2 == 1: ans = ans * current_product current_product = current_product * current_product N //= 2 return ans
class Solution: def my_pow(self, x: float, n: int) -> float: n = n if N < 0: x = 1 / x n = -N ans = 1 current_product = x while N > 0: if N % 2 == 1: ans = ans * current_product current_product = current_product * current_product n //= 2 return ans
# time Complexity: O(n^2) # space Complexity: O(1) def bubble_sort(arr): current = 0 next = 1 last_index = len(arr) while last_index >= next: if arr[current] > arr[next]: arr[current], arr[next] = arr[next], arr[current] current += 1 next += 1 if next == last_index: current = 0 next = 1 last_index -= 1 if __name__ == '__main__': arr = [2, 3, 5, 6, 1] bubble_sort(arr) print(arr)
def bubble_sort(arr): current = 0 next = 1 last_index = len(arr) while last_index >= next: if arr[current] > arr[next]: (arr[current], arr[next]) = (arr[next], arr[current]) current += 1 next += 1 if next == last_index: current = 0 next = 1 last_index -= 1 if __name__ == '__main__': arr = [2, 3, 5, 6, 1] bubble_sort(arr) print(arr)
class Solution: def isIsomorphic(self, s: str, t: str) -> bool: # Copy / paste from the "fastest" solution. # It's sort of beautiful in its simplicity, if wildly esoteric. # Basically the same thing as the hacky failsafe in my first solution; # compare the number of unique characters in each string with the # number of unique characters in a zip of both strings. If there's a # mismatch in any of them, they can't be isomorphic. return len(set(zip(s, t))) == len(set(s)) == len(set(t))
class Solution: def is_isomorphic(self, s: str, t: str) -> bool: return len(set(zip(s, t))) == len(set(s)) == len(set(t))
# # PySNMP MIB module APDNSALG-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/APDNSALG-MIB # Produced by pysmi-0.3.4 at Wed May 1 11:23:12 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # acmepacketMgmt, = mibBuilder.importSymbols("ACMEPACKET-SMI", "acmepacketMgmt") ApTransportType, ApHardwareModuleFamily, ApRedundancyState = mibBuilder.importSymbols("ACMEPACKET-TC", "ApTransportType", "ApHardwareModuleFamily", "ApRedundancyState") SysMgmtPercentage, = mibBuilder.importSymbols("APSYSMGMT-MIB", "SysMgmtPercentage") ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsUnion") ifIndex, InterfaceIndexOrZero, InterfaceIndex = mibBuilder.importSymbols("IF-MIB", "ifIndex", "InterfaceIndexOrZero", "InterfaceIndex") InetZoneIndex, InetAddressPrefixLength, InetVersion, InetAddressType, InetAddress = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetZoneIndex", "InetAddressPrefixLength", "InetVersion", "InetAddressType", "InetAddress") ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup") MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, IpAddress, Bits, Counter64, Integer32, Counter32, Unsigned32, Gauge32, TimeTicks, NotificationType, ObjectIdentity, ModuleIdentity, iso = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "IpAddress", "Bits", "Counter64", "Integer32", "Counter32", "Unsigned32", "Gauge32", "TimeTicks", "NotificationType", "ObjectIdentity", "ModuleIdentity", "iso") TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString") apDNSALGModule = ModuleIdentity((1, 3, 6, 1, 4, 1, 9148, 3, 14)) if mibBuilder.loadTexts: apDNSALGModule.setLastUpdated('201106080000Z') if mibBuilder.loadTexts: apDNSALGModule.setOrganization('Acme Packet, Inc') if mibBuilder.loadTexts: apDNSALGModule.setContactInfo(' Customer Service Postal: Acme Packet, Inc 100 Crosby Drive Bedford, MA 01730 US Tel: 1-781-328-4400 E-mail: support@acmepacket.com') if mibBuilder.loadTexts: apDNSALGModule.setDescription('The Dns Alg MIB for Acme Packet.') apDNSALGMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1)) apDNSALGMIBGeneralObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 1)) apDNSALGMIBTabularObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2)) apDNSALGNotificationObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2)) apDNSALGNotifObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 1)) apDNSALGNotifPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2)) apDNSALGNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0)) apDNSALGConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3)) apDNSALGObjectGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 1)) apDNSALGNotificationGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 2)) apDNSALGServerStatusTable = MibTable((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1), ) if mibBuilder.loadTexts: apDNSALGServerStatusTable.setStatus('current') if mibBuilder.loadTexts: apDNSALGServerStatusTable.setDescription('A read-only table to hold the status of configured DNSALG servers, indexed by the name of the Dns alg config name, server realm and server IP.') apDNSALGServerStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1), ).setIndexNames((0, "APDNSALG-MIB", "apDNSALGConfigIndex"), (0, "APDNSALG-MIB", "apDNSALGServerIndex"), (0, "APDNSALG-MIB", "apDNSALGServerIpAddress")) if mibBuilder.loadTexts: apDNSALGServerStatusEntry.setStatus('current') if mibBuilder.loadTexts: apDNSALGServerStatusEntry.setDescription('An entry designed to hold the status of a single DNSALG server') apDNSALGConfigIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("accessiblefornotify") if mibBuilder.loadTexts: apDNSALGConfigIndex.setStatus('current') if mibBuilder.loadTexts: apDNSALGConfigIndex.setDescription('An integer for the sole purpose of indexing the DNS-ALG configuration.Only one DNS-ALG configuration is allowed per a realm.') apDNSALGServerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("accessiblefornotify") if mibBuilder.loadTexts: apDNSALGServerIndex.setStatus('current') if mibBuilder.loadTexts: apDNSALGServerIndex.setDescription('An integer for the sole purpose of indexing the Dns Server Attributes in a DNS-ALG config. Each DNS-ALG config can have multiple Dns Server Attributes.') apDNSALGConfigName = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: apDNSALGConfigName.setStatus('current') if mibBuilder.loadTexts: apDNSALGConfigName.setDescription('The name of the dns-alg-config element that contains this DNS-ALG server.') apDNSALGServerRealm = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: apDNSALGServerRealm.setStatus('current') if mibBuilder.loadTexts: apDNSALGServerRealm.setDescription('The name of the server realm element that contains this DNSALG server.') apDNSALGDomainSuffix = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: apDNSALGDomainSuffix.setStatus('current') if mibBuilder.loadTexts: apDNSALGDomainSuffix.setDescription('The name of the domain suffix element that contains this DNSALG server.') apDNSALGServerIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 7), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDNSALGServerIpAddress.setStatus('current') if mibBuilder.loadTexts: apDNSALGServerIpAddress.setDescription('The IP address of this DNSALG server.') apDNSALGServerStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("inservice", 0), ("lowerpriority", 1), ("oosunreachable", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: apDNSALGServerStatus.setStatus('current') if mibBuilder.loadTexts: apDNSALGServerStatus.setDescription('The status of this DNSALG server.') apDNSALGStatsTable = MibTable((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2), ) if mibBuilder.loadTexts: apDNSALGStatsTable.setStatus('current') if mibBuilder.loadTexts: apDNSALGStatsTable.setDescription('per DNS-ALG config(i.e.client realm)stats.') apDnsALGStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1), ).setIndexNames((0, "APDNSALG-MIB", "apDnsAlgClientRealmIndex")) if mibBuilder.loadTexts: apDnsALGStatsEntry.setStatus('current') if mibBuilder.loadTexts: apDnsALGStatsEntry.setDescription('A table entry designed to hold DNS-ALG stats data') apDnsAlgClientRealmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("accessiblefornotify") if mibBuilder.loadTexts: apDnsAlgClientRealmIndex.setStatus('current') if mibBuilder.loadTexts: apDnsAlgClientRealmIndex.setDescription('An integer for the sole purpose of indexing the DNS-ALG configuration.Only one DNS-ALG configuration is allowed per a realm.') apDnsAlgClientRealmName = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgClientRealmName.setStatus('current') if mibBuilder.loadTexts: apDnsAlgClientRealmName.setDescription('DNS-ALG Config realm name') apDnsAlgCurrentQueries = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 3), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgCurrentQueries.setStatus('current') if mibBuilder.loadTexts: apDnsAlgCurrentQueries.setDescription('Number of queries sent in recent period received on DNS-ALG config realm.') apDnsAlgTotalQueries = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 4), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgTotalQueries.setStatus('current') if mibBuilder.loadTexts: apDnsAlgTotalQueries.setDescription('Total number of queries sent in life time received on DNS-ALG config realm.') apDnsAlgCurrentSucess = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 5), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgCurrentSucess.setStatus('current') if mibBuilder.loadTexts: apDnsAlgCurrentSucess.setDescription('Number of success responses in recent period received on DNS-ALG config realm.') apDnsAlgTotalSucess = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 6), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgTotalSucess.setStatus('current') if mibBuilder.loadTexts: apDnsAlgTotalSucess.setDescription('Total number of success responses in life time received on DNS-ALG config realm.') apDnsAlgCurrentNotFound = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 7), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgCurrentNotFound.setStatus('current') if mibBuilder.loadTexts: apDnsAlgCurrentNotFound.setDescription('Number of not-found responses in recent period received on DNS-ALG config realm.') apDnsAlgTotalNotFound = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 8), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgTotalNotFound.setStatus('current') if mibBuilder.loadTexts: apDnsAlgTotalNotFound.setDescription('Total number of not-found responses in life time received on DNS-ALG config realm.') apDnsAlgCurrentTimeOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 9), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgCurrentTimeOut.setStatus('current') if mibBuilder.loadTexts: apDnsAlgCurrentTimeOut.setDescription('Number of time out responses in recent period received on DNS-ALG config realm.') apDnsAlgTotalTimeOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 10), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgTotalTimeOut.setStatus('current') if mibBuilder.loadTexts: apDnsAlgTotalTimeOut.setDescription('Total number of time out responses in life time received on DNS-ALG config realm') apDnsAlgCurrentBadStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 11), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgCurrentBadStatus.setStatus('current') if mibBuilder.loadTexts: apDnsAlgCurrentBadStatus.setDescription('Number of bad status responses in recent period received on DNS-ALG config realm.') apDnsAlgTotalBadStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 12), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgTotalBadStatus.setStatus('current') if mibBuilder.loadTexts: apDnsAlgTotalBadStatus.setDescription('Total number of bad status responses in life time received on DNS-ALG config realm.') apDnsAlgCurrentOtherFailures = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 13), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgCurrentOtherFailures.setStatus('current') if mibBuilder.loadTexts: apDnsAlgCurrentOtherFailures.setDescription('Number of other failure responses in recent period received on DNS-ALG config realm.') apDnsAlgTotalOtherFailures = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 14), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgTotalOtherFailures.setStatus('current') if mibBuilder.loadTexts: apDnsAlgTotalOtherFailures.setDescription('Total number of other failure responses in life time received on DNS-ALG config realm.') apDnsAlgAvgLatency = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 15), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgAvgLatency.setStatus('current') if mibBuilder.loadTexts: apDnsAlgAvgLatency.setDescription('Average observed one-way signalling latency during the period in milliseconds') apDnsAlgMaxLatency = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 16), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgMaxLatency.setStatus('current') if mibBuilder.loadTexts: apDnsAlgMaxLatency.setDescription('Maximum observed one-way signalling latency during the period in milliseconds') apDnsAlgMaxBurstRate = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 17), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgMaxBurstRate.setStatus('current') if mibBuilder.loadTexts: apDnsAlgMaxBurstRate.setDescription('Maximum burst rate of traffic measured during the period (combined inbound and outbound)') apDNSALGConstraintsStatus = MibScalar((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("inservice", 0), ("constraintsExceeded", 1)))).setMaxAccess("accessiblefornotify") if mibBuilder.loadTexts: apDNSALGConstraintsStatus.setStatus('current') if mibBuilder.loadTexts: apDNSALGConstraintsStatus.setDescription('The status of this DNS-ALG config realm for constraints.') apDnsAlgStatusChangeTrap = NotificationType((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 1)).setObjects(("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGServerRealm"), ("APDNSALG-MIB", "apDNSALGServerIpAddress"), ("APDNSALG-MIB", "apDNSALGServerStatus")) if mibBuilder.loadTexts: apDnsAlgStatusChangeTrap.setStatus('current') if mibBuilder.loadTexts: apDnsAlgStatusChangeTrap.setDescription(' The trap will be generated if the reachability status of an DNS-ALG server changes from In-Service to either Timed out or Out of Service.') apDnsAlgStatusChangeClearTrap = NotificationType((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 2)).setObjects(("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGServerRealm"), ("APDNSALG-MIB", "apDNSALGServerIpAddress"), ("APDNSALG-MIB", "apDNSALGServerStatus")) if mibBuilder.loadTexts: apDnsAlgStatusChangeClearTrap.setStatus('current') if mibBuilder.loadTexts: apDnsAlgStatusChangeClearTrap.setDescription(' The trap will be generated if the reachability status of an DNS-ALG server changes from either Timed out or Out of Service to In-Service') apDnsAlgConstraintStateChangeTrap = NotificationType((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 3)).setObjects(("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGConstraintsStatus")) if mibBuilder.loadTexts: apDnsAlgConstraintStateChangeTrap.setStatus('current') if mibBuilder.loadTexts: apDnsAlgConstraintStateChangeTrap.setDescription(" The trap will be generated if an DNS-ALG config's constriants state changed from inservice to constraintsExceeded.") apDnsAlgConstraintStateChangeClearTrap = NotificationType((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 4)).setObjects(("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGConstraintsStatus")) if mibBuilder.loadTexts: apDnsAlgConstraintStateChangeClearTrap.setStatus('current') if mibBuilder.loadTexts: apDnsAlgConstraintStateChangeClearTrap.setDescription(" The trap will be generated if an DNS-ALG config's constriants state changed from constraintsExceeded to inservice.") apDnsAlgSvrConstraintStateChangeTrap = NotificationType((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 5)).setObjects(("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGServerRealm"), ("APDNSALG-MIB", "apDNSALGServerIpAddress"), ("APDNSALG-MIB", "apDNSALGConstraintsStatus")) if mibBuilder.loadTexts: apDnsAlgSvrConstraintStateChangeTrap.setStatus('current') if mibBuilder.loadTexts: apDnsAlgSvrConstraintStateChangeTrap.setDescription(' The trap will be generated if an Dns Server(i.e.IP-Address) constriants state changed from inservice to constraintsExceeded.') apDnsAlgSvrConstraintStateChangeClearTrap = NotificationType((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 6)).setObjects(("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGServerRealm"), ("APDNSALG-MIB", "apDNSALGServerIpAddress"), ("APDNSALG-MIB", "apDNSALGConstraintsStatus")) if mibBuilder.loadTexts: apDnsAlgSvrConstraintStateChangeClearTrap.setStatus('current') if mibBuilder.loadTexts: apDnsAlgSvrConstraintStateChangeClearTrap.setDescription(' The trap will be generated if an Dns Server(i.e.IP-Address) constriants state changed from constraintsExceeded to inservice.') apDnsAlgServerStatusGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 1, 1)).setObjects(("APDNSALG-MIB", "apDNSALGConfigIndex"), ("APDNSALG-MIB", "apDNSALGServerIndex"), ("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGServerRealm"), ("APDNSALG-MIB", "apDNSALGDomainSuffix"), ("APDNSALG-MIB", "apDNSALGServerIpAddress"), ("APDNSALG-MIB", "apDNSALGServerStatus")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): apDnsAlgServerStatusGroup = apDnsAlgServerStatusGroup.setStatus('current') if mibBuilder.loadTexts: apDnsAlgServerStatusGroup.setDescription('A collection of statistics for DNS-ALG server status.') apDnsAlgStatsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 1, 2)).setObjects(("APDNSALG-MIB", "apDnsAlgClientRealmIndex"), ("APDNSALG-MIB", "apDnsAlgClientRealmName"), ("APDNSALG-MIB", "apDnsAlgCurrentQueries"), ("APDNSALG-MIB", "apDnsAlgTotalQueries"), ("APDNSALG-MIB", "apDnsAlgCurrentSucess"), ("APDNSALG-MIB", "apDnsAlgTotalSucess"), ("APDNSALG-MIB", "apDnsAlgCurrentNotFound"), ("APDNSALG-MIB", "apDnsAlgTotalNotFound"), ("APDNSALG-MIB", "apDnsAlgCurrentTimeOut"), ("APDNSALG-MIB", "apDnsAlgTotalTimeOut"), ("APDNSALG-MIB", "apDnsAlgCurrentBadStatus"), ("APDNSALG-MIB", "apDnsAlgTotalBadStatus"), ("APDNSALG-MIB", "apDnsAlgCurrentOtherFailures"), ("APDNSALG-MIB", "apDnsAlgTotalOtherFailures"), ("APDNSALG-MIB", "apDnsAlgAvgLatency"), ("APDNSALG-MIB", "apDnsAlgMaxLatency"), ("APDNSALG-MIB", "apDnsAlgMaxBurstRate")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): apDnsAlgStatsGroup = apDnsAlgStatsGroup.setStatus('current') if mibBuilder.loadTexts: apDnsAlgStatsGroup.setDescription('Report the stats of configured DNSALG config objects.') apDNSALGNotificationsGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 2, 1)).setObjects(("APDNSALG-MIB", "apDnsAlgStatusChangeTrap"), ("APDNSALG-MIB", "apDnsAlgStatusChangeClearTrap"), ("APDNSALG-MIB", "apDnsAlgConstraintStateChangeTrap"), ("APDNSALG-MIB", "apDnsAlgConstraintStateChangeClearTrap"), ("APDNSALG-MIB", "apDnsAlgSvrConstraintStateChangeTrap"), ("APDNSALG-MIB", "apDnsAlgSvrConstraintStateChangeClearTrap")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): apDNSALGNotificationsGroup = apDNSALGNotificationsGroup.setStatus('current') if mibBuilder.loadTexts: apDNSALGNotificationsGroup.setDescription('A collection of mib objects accessible only to traps.') mibBuilder.exportSymbols("APDNSALG-MIB", apDnsAlgTotalNotFound=apDnsAlgTotalNotFound, apDnsAlgConstraintStateChangeClearTrap=apDnsAlgConstraintStateChangeClearTrap, apDnsAlgStatusChangeTrap=apDnsAlgStatusChangeTrap, apDnsAlgTotalTimeOut=apDnsAlgTotalTimeOut, apDnsAlgStatsGroup=apDnsAlgStatsGroup, apDnsALGStatsEntry=apDnsALGStatsEntry, apDNSALGMIBGeneralObjects=apDNSALGMIBGeneralObjects, apDnsAlgTotalSucess=apDnsAlgTotalSucess, apDNSALGServerStatusEntry=apDNSALGServerStatusEntry, apDNSALGNotificationsGroup=apDNSALGNotificationsGroup, apDNSALGConstraintsStatus=apDNSALGConstraintsStatus, apDnsAlgConstraintStateChangeTrap=apDnsAlgConstraintStateChangeTrap, apDNSALGServerRealm=apDNSALGServerRealm, apDnsAlgTotalBadStatus=apDnsAlgTotalBadStatus, apDNSALGObjectGroups=apDNSALGObjectGroups, apDNSALGConfigName=apDNSALGConfigName, apDnsAlgMaxLatency=apDnsAlgMaxLatency, PYSNMP_MODULE_ID=apDNSALGModule, apDNSALGMIBTabularObjects=apDNSALGMIBTabularObjects, apDnsAlgTotalOtherFailures=apDnsAlgTotalOtherFailures, apDNSALGConfigIndex=apDNSALGConfigIndex, apDnsAlgStatusChangeClearTrap=apDnsAlgStatusChangeClearTrap, apDNSALGStatsTable=apDNSALGStatsTable, apDnsAlgClientRealmIndex=apDnsAlgClientRealmIndex, apDnsAlgSvrConstraintStateChangeTrap=apDnsAlgSvrConstraintStateChangeTrap, apDNSALGNotifications=apDNSALGNotifications, apDNSALGConformance=apDNSALGConformance, apDnsAlgCurrentNotFound=apDnsAlgCurrentNotFound, apDNSALGNotifPrefix=apDNSALGNotifPrefix, apDnsAlgMaxBurstRate=apDnsAlgMaxBurstRate, apDNSALGMIBObjects=apDNSALGMIBObjects, apDnsAlgAvgLatency=apDnsAlgAvgLatency, apDnsAlgServerStatusGroup=apDnsAlgServerStatusGroup, apDNSALGNotificationObjects=apDNSALGNotificationObjects, apDNSALGNotificationGroups=apDNSALGNotificationGroups, apDnsAlgCurrentOtherFailures=apDnsAlgCurrentOtherFailures, apDnsAlgClientRealmName=apDnsAlgClientRealmName, apDNSALGNotifObjects=apDNSALGNotifObjects, apDNSALGServerStatus=apDNSALGServerStatus, apDnsAlgCurrentSucess=apDnsAlgCurrentSucess, apDNSALGServerStatusTable=apDNSALGServerStatusTable, apDnsAlgSvrConstraintStateChangeClearTrap=apDnsAlgSvrConstraintStateChangeClearTrap, apDnsAlgCurrentQueries=apDnsAlgCurrentQueries, apDnsAlgCurrentBadStatus=apDnsAlgCurrentBadStatus, apDnsAlgCurrentTimeOut=apDnsAlgCurrentTimeOut, apDNSALGServerIpAddress=apDNSALGServerIpAddress, apDNSALGModule=apDNSALGModule, apDNSALGDomainSuffix=apDNSALGDomainSuffix, apDnsAlgTotalQueries=apDnsAlgTotalQueries, apDNSALGServerIndex=apDNSALGServerIndex)
(acmepacket_mgmt,) = mibBuilder.importSymbols('ACMEPACKET-SMI', 'acmepacketMgmt') (ap_transport_type, ap_hardware_module_family, ap_redundancy_state) = mibBuilder.importSymbols('ACMEPACKET-TC', 'ApTransportType', 'ApHardwareModuleFamily', 'ApRedundancyState') (sys_mgmt_percentage,) = mibBuilder.importSymbols('APSYSMGMT-MIB', 'SysMgmtPercentage') (object_identifier, integer, octet_string) = mibBuilder.importSymbols('ASN1', 'ObjectIdentifier', 'Integer', 'OctetString') (named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues') (constraints_intersection, single_value_constraint, value_size_constraint, value_range_constraint, constraints_union) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ConstraintsIntersection', 'SingleValueConstraint', 'ValueSizeConstraint', 'ValueRangeConstraint', 'ConstraintsUnion') (if_index, interface_index_or_zero, interface_index) = mibBuilder.importSymbols('IF-MIB', 'ifIndex', 'InterfaceIndexOrZero', 'InterfaceIndex') (inet_zone_index, inet_address_prefix_length, inet_version, inet_address_type, inet_address) = mibBuilder.importSymbols('INET-ADDRESS-MIB', 'InetZoneIndex', 'InetAddressPrefixLength', 'InetVersion', 'InetAddressType', 'InetAddress') (module_compliance, notification_group, object_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ModuleCompliance', 'NotificationGroup', 'ObjectGroup') (mib_scalar, mib_table, mib_table_row, mib_table_column, mib_identifier, ip_address, bits, counter64, integer32, counter32, unsigned32, gauge32, time_ticks, notification_type, object_identity, module_identity, iso) = mibBuilder.importSymbols('SNMPv2-SMI', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'MibIdentifier', 'IpAddress', 'Bits', 'Counter64', 'Integer32', 'Counter32', 'Unsigned32', 'Gauge32', 'TimeTicks', 'NotificationType', 'ObjectIdentity', 'ModuleIdentity', 'iso') (textual_convention, display_string) = mibBuilder.importSymbols('SNMPv2-TC', 'TextualConvention', 'DisplayString') ap_dnsalg_module = module_identity((1, 3, 6, 1, 4, 1, 9148, 3, 14)) if mibBuilder.loadTexts: apDNSALGModule.setLastUpdated('201106080000Z') if mibBuilder.loadTexts: apDNSALGModule.setOrganization('Acme Packet, Inc') if mibBuilder.loadTexts: apDNSALGModule.setContactInfo(' Customer Service Postal: Acme Packet, Inc 100 Crosby Drive Bedford, MA 01730 US Tel: 1-781-328-4400 E-mail: support@acmepacket.com') if mibBuilder.loadTexts: apDNSALGModule.setDescription('The Dns Alg MIB for Acme Packet.') ap_dnsalgmib_objects = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1)) ap_dnsalgmib_general_objects = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 1)) ap_dnsalgmib_tabular_objects = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2)) ap_dnsalg_notification_objects = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2)) ap_dnsalg_notif_objects = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 1)) ap_dnsalg_notif_prefix = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2)) ap_dnsalg_notifications = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0)) ap_dnsalg_conformance = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3)) ap_dnsalg_object_groups = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 1)) ap_dnsalg_notification_groups = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 2)) ap_dnsalg_server_status_table = mib_table((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1)) if mibBuilder.loadTexts: apDNSALGServerStatusTable.setStatus('current') if mibBuilder.loadTexts: apDNSALGServerStatusTable.setDescription('A read-only table to hold the status of configured DNSALG servers, indexed by the name of the Dns alg config name, server realm and server IP.') ap_dnsalg_server_status_entry = mib_table_row((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1)).setIndexNames((0, 'APDNSALG-MIB', 'apDNSALGConfigIndex'), (0, 'APDNSALG-MIB', 'apDNSALGServerIndex'), (0, 'APDNSALG-MIB', 'apDNSALGServerIpAddress')) if mibBuilder.loadTexts: apDNSALGServerStatusEntry.setStatus('current') if mibBuilder.loadTexts: apDNSALGServerStatusEntry.setDescription('An entry designed to hold the status of a single DNSALG server') ap_dnsalg_config_index = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(1, 2147483647))).setMaxAccess('accessiblefornotify') if mibBuilder.loadTexts: apDNSALGConfigIndex.setStatus('current') if mibBuilder.loadTexts: apDNSALGConfigIndex.setDescription('An integer for the sole purpose of indexing the DNS-ALG configuration.Only one DNS-ALG configuration is allowed per a realm.') ap_dnsalg_server_index = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 2), integer32().subtype(subtypeSpec=value_range_constraint(1, 2147483647))).setMaxAccess('accessiblefornotify') if mibBuilder.loadTexts: apDNSALGServerIndex.setStatus('current') if mibBuilder.loadTexts: apDNSALGServerIndex.setDescription('An integer for the sole purpose of indexing the Dns Server Attributes in a DNS-ALG config. Each DNS-ALG config can have multiple Dns Server Attributes.') ap_dnsalg_config_name = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 4), display_string().subtype(subtypeSpec=value_size_constraint(0, 255))).setMaxAccess('readonly') if mibBuilder.loadTexts: apDNSALGConfigName.setStatus('current') if mibBuilder.loadTexts: apDNSALGConfigName.setDescription('The name of the dns-alg-config element that contains this DNS-ALG server.') ap_dnsalg_server_realm = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 5), display_string().subtype(subtypeSpec=value_size_constraint(0, 255))).setMaxAccess('readonly') if mibBuilder.loadTexts: apDNSALGServerRealm.setStatus('current') if mibBuilder.loadTexts: apDNSALGServerRealm.setDescription('The name of the server realm element that contains this DNSALG server.') ap_dnsalg_domain_suffix = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 6), display_string().subtype(subtypeSpec=value_size_constraint(0, 255))).setMaxAccess('readonly') if mibBuilder.loadTexts: apDNSALGDomainSuffix.setStatus('current') if mibBuilder.loadTexts: apDNSALGDomainSuffix.setDescription('The name of the domain suffix element that contains this DNSALG server.') ap_dnsalg_server_ip_address = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 7), ip_address()).setMaxAccess('readonly') if mibBuilder.loadTexts: apDNSALGServerIpAddress.setStatus('current') if mibBuilder.loadTexts: apDNSALGServerIpAddress.setDescription('The IP address of this DNSALG server.') ap_dnsalg_server_status = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 8), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2))).clone(namedValues=named_values(('inservice', 0), ('lowerpriority', 1), ('oosunreachable', 2)))).setMaxAccess('readonly') if mibBuilder.loadTexts: apDNSALGServerStatus.setStatus('current') if mibBuilder.loadTexts: apDNSALGServerStatus.setDescription('The status of this DNSALG server.') ap_dnsalg_stats_table = mib_table((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2)) if mibBuilder.loadTexts: apDNSALGStatsTable.setStatus('current') if mibBuilder.loadTexts: apDNSALGStatsTable.setDescription('per DNS-ALG config(i.e.client realm)stats.') ap_dns_alg_stats_entry = mib_table_row((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1)).setIndexNames((0, 'APDNSALG-MIB', 'apDnsAlgClientRealmIndex')) if mibBuilder.loadTexts: apDnsALGStatsEntry.setStatus('current') if mibBuilder.loadTexts: apDnsALGStatsEntry.setDescription('A table entry designed to hold DNS-ALG stats data') ap_dns_alg_client_realm_index = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(1, 2147483647))).setMaxAccess('accessiblefornotify') if mibBuilder.loadTexts: apDnsAlgClientRealmIndex.setStatus('current') if mibBuilder.loadTexts: apDnsAlgClientRealmIndex.setDescription('An integer for the sole purpose of indexing the DNS-ALG configuration.Only one DNS-ALG configuration is allowed per a realm.') ap_dns_alg_client_realm_name = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 2), display_string().subtype(subtypeSpec=value_size_constraint(0, 255))).setMaxAccess('readonly') if mibBuilder.loadTexts: apDnsAlgClientRealmName.setStatus('current') if mibBuilder.loadTexts: apDnsAlgClientRealmName.setDescription('DNS-ALG Config realm name') ap_dns_alg_current_queries = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 3), gauge32()).setMaxAccess('readonly') if mibBuilder.loadTexts: apDnsAlgCurrentQueries.setStatus('current') if mibBuilder.loadTexts: apDnsAlgCurrentQueries.setDescription('Number of queries sent in recent period received on DNS-ALG config realm.') ap_dns_alg_total_queries = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 4), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: apDnsAlgTotalQueries.setStatus('current') if mibBuilder.loadTexts: apDnsAlgTotalQueries.setDescription('Total number of queries sent in life time received on DNS-ALG config realm.') ap_dns_alg_current_sucess = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 5), gauge32()).setMaxAccess('readonly') if mibBuilder.loadTexts: apDnsAlgCurrentSucess.setStatus('current') if mibBuilder.loadTexts: apDnsAlgCurrentSucess.setDescription('Number of success responses in recent period received on DNS-ALG config realm.') ap_dns_alg_total_sucess = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 6), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: apDnsAlgTotalSucess.setStatus('current') if mibBuilder.loadTexts: apDnsAlgTotalSucess.setDescription('Total number of success responses in life time received on DNS-ALG config realm.') ap_dns_alg_current_not_found = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 7), gauge32()).setMaxAccess('readonly') if mibBuilder.loadTexts: apDnsAlgCurrentNotFound.setStatus('current') if mibBuilder.loadTexts: apDnsAlgCurrentNotFound.setDescription('Number of not-found responses in recent period received on DNS-ALG config realm.') ap_dns_alg_total_not_found = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 8), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: apDnsAlgTotalNotFound.setStatus('current') if mibBuilder.loadTexts: apDnsAlgTotalNotFound.setDescription('Total number of not-found responses in life time received on DNS-ALG config realm.') ap_dns_alg_current_time_out = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 9), gauge32()).setMaxAccess('readonly') if mibBuilder.loadTexts: apDnsAlgCurrentTimeOut.setStatus('current') if mibBuilder.loadTexts: apDnsAlgCurrentTimeOut.setDescription('Number of time out responses in recent period received on DNS-ALG config realm.') ap_dns_alg_total_time_out = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 10), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: apDnsAlgTotalTimeOut.setStatus('current') if mibBuilder.loadTexts: apDnsAlgTotalTimeOut.setDescription('Total number of time out responses in life time received on DNS-ALG config realm') ap_dns_alg_current_bad_status = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 11), gauge32()).setMaxAccess('readonly') if mibBuilder.loadTexts: apDnsAlgCurrentBadStatus.setStatus('current') if mibBuilder.loadTexts: apDnsAlgCurrentBadStatus.setDescription('Number of bad status responses in recent period received on DNS-ALG config realm.') ap_dns_alg_total_bad_status = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 12), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: apDnsAlgTotalBadStatus.setStatus('current') if mibBuilder.loadTexts: apDnsAlgTotalBadStatus.setDescription('Total number of bad status responses in life time received on DNS-ALG config realm.') ap_dns_alg_current_other_failures = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 13), gauge32()).setMaxAccess('readonly') if mibBuilder.loadTexts: apDnsAlgCurrentOtherFailures.setStatus('current') if mibBuilder.loadTexts: apDnsAlgCurrentOtherFailures.setDescription('Number of other failure responses in recent period received on DNS-ALG config realm.') ap_dns_alg_total_other_failures = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 14), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: apDnsAlgTotalOtherFailures.setStatus('current') if mibBuilder.loadTexts: apDnsAlgTotalOtherFailures.setDescription('Total number of other failure responses in life time received on DNS-ALG config realm.') ap_dns_alg_avg_latency = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 15), unsigned32()).setMaxAccess('readonly') if mibBuilder.loadTexts: apDnsAlgAvgLatency.setStatus('current') if mibBuilder.loadTexts: apDnsAlgAvgLatency.setDescription('Average observed one-way signalling latency during the period in milliseconds') ap_dns_alg_max_latency = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 16), unsigned32()).setMaxAccess('readonly') if mibBuilder.loadTexts: apDnsAlgMaxLatency.setStatus('current') if mibBuilder.loadTexts: apDnsAlgMaxLatency.setDescription('Maximum observed one-way signalling latency during the period in milliseconds') ap_dns_alg_max_burst_rate = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 17), unsigned32()).setMaxAccess('readonly') if mibBuilder.loadTexts: apDnsAlgMaxBurstRate.setStatus('current') if mibBuilder.loadTexts: apDnsAlgMaxBurstRate.setDescription('Maximum burst rate of traffic measured during the period (combined inbound and outbound)') ap_dnsalg_constraints_status = mib_scalar((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 1, 1), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1))).clone(namedValues=named_values(('inservice', 0), ('constraintsExceeded', 1)))).setMaxAccess('accessiblefornotify') if mibBuilder.loadTexts: apDNSALGConstraintsStatus.setStatus('current') if mibBuilder.loadTexts: apDNSALGConstraintsStatus.setDescription('The status of this DNS-ALG config realm for constraints.') ap_dns_alg_status_change_trap = notification_type((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 1)).setObjects(('APDNSALG-MIB', 'apDNSALGConfigName'), ('APDNSALG-MIB', 'apDNSALGServerRealm'), ('APDNSALG-MIB', 'apDNSALGServerIpAddress'), ('APDNSALG-MIB', 'apDNSALGServerStatus')) if mibBuilder.loadTexts: apDnsAlgStatusChangeTrap.setStatus('current') if mibBuilder.loadTexts: apDnsAlgStatusChangeTrap.setDescription(' The trap will be generated if the reachability status of an DNS-ALG server changes from In-Service to either Timed out or Out of Service.') ap_dns_alg_status_change_clear_trap = notification_type((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 2)).setObjects(('APDNSALG-MIB', 'apDNSALGConfigName'), ('APDNSALG-MIB', 'apDNSALGServerRealm'), ('APDNSALG-MIB', 'apDNSALGServerIpAddress'), ('APDNSALG-MIB', 'apDNSALGServerStatus')) if mibBuilder.loadTexts: apDnsAlgStatusChangeClearTrap.setStatus('current') if mibBuilder.loadTexts: apDnsAlgStatusChangeClearTrap.setDescription(' The trap will be generated if the reachability status of an DNS-ALG server changes from either Timed out or Out of Service to In-Service') ap_dns_alg_constraint_state_change_trap = notification_type((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 3)).setObjects(('APDNSALG-MIB', 'apDNSALGConfigName'), ('APDNSALG-MIB', 'apDNSALGConstraintsStatus')) if mibBuilder.loadTexts: apDnsAlgConstraintStateChangeTrap.setStatus('current') if mibBuilder.loadTexts: apDnsAlgConstraintStateChangeTrap.setDescription(" The trap will be generated if an DNS-ALG config's constriants state changed from inservice to constraintsExceeded.") ap_dns_alg_constraint_state_change_clear_trap = notification_type((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 4)).setObjects(('APDNSALG-MIB', 'apDNSALGConfigName'), ('APDNSALG-MIB', 'apDNSALGConstraintsStatus')) if mibBuilder.loadTexts: apDnsAlgConstraintStateChangeClearTrap.setStatus('current') if mibBuilder.loadTexts: apDnsAlgConstraintStateChangeClearTrap.setDescription(" The trap will be generated if an DNS-ALG config's constriants state changed from constraintsExceeded to inservice.") ap_dns_alg_svr_constraint_state_change_trap = notification_type((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 5)).setObjects(('APDNSALG-MIB', 'apDNSALGConfigName'), ('APDNSALG-MIB', 'apDNSALGServerRealm'), ('APDNSALG-MIB', 'apDNSALGServerIpAddress'), ('APDNSALG-MIB', 'apDNSALGConstraintsStatus')) if mibBuilder.loadTexts: apDnsAlgSvrConstraintStateChangeTrap.setStatus('current') if mibBuilder.loadTexts: apDnsAlgSvrConstraintStateChangeTrap.setDescription(' The trap will be generated if an Dns Server(i.e.IP-Address) constriants state changed from inservice to constraintsExceeded.') ap_dns_alg_svr_constraint_state_change_clear_trap = notification_type((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 6)).setObjects(('APDNSALG-MIB', 'apDNSALGConfigName'), ('APDNSALG-MIB', 'apDNSALGServerRealm'), ('APDNSALG-MIB', 'apDNSALGServerIpAddress'), ('APDNSALG-MIB', 'apDNSALGConstraintsStatus')) if mibBuilder.loadTexts: apDnsAlgSvrConstraintStateChangeClearTrap.setStatus('current') if mibBuilder.loadTexts: apDnsAlgSvrConstraintStateChangeClearTrap.setDescription(' The trap will be generated if an Dns Server(i.e.IP-Address) constriants state changed from constraintsExceeded to inservice.') ap_dns_alg_server_status_group = object_group((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 1, 1)).setObjects(('APDNSALG-MIB', 'apDNSALGConfigIndex'), ('APDNSALG-MIB', 'apDNSALGServerIndex'), ('APDNSALG-MIB', 'apDNSALGConfigName'), ('APDNSALG-MIB', 'apDNSALGServerRealm'), ('APDNSALG-MIB', 'apDNSALGDomainSuffix'), ('APDNSALG-MIB', 'apDNSALGServerIpAddress'), ('APDNSALG-MIB', 'apDNSALGServerStatus')) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ap_dns_alg_server_status_group = apDnsAlgServerStatusGroup.setStatus('current') if mibBuilder.loadTexts: apDnsAlgServerStatusGroup.setDescription('A collection of statistics for DNS-ALG server status.') ap_dns_alg_stats_group = object_group((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 1, 2)).setObjects(('APDNSALG-MIB', 'apDnsAlgClientRealmIndex'), ('APDNSALG-MIB', 'apDnsAlgClientRealmName'), ('APDNSALG-MIB', 'apDnsAlgCurrentQueries'), ('APDNSALG-MIB', 'apDnsAlgTotalQueries'), ('APDNSALG-MIB', 'apDnsAlgCurrentSucess'), ('APDNSALG-MIB', 'apDnsAlgTotalSucess'), ('APDNSALG-MIB', 'apDnsAlgCurrentNotFound'), ('APDNSALG-MIB', 'apDnsAlgTotalNotFound'), ('APDNSALG-MIB', 'apDnsAlgCurrentTimeOut'), ('APDNSALG-MIB', 'apDnsAlgTotalTimeOut'), ('APDNSALG-MIB', 'apDnsAlgCurrentBadStatus'), ('APDNSALG-MIB', 'apDnsAlgTotalBadStatus'), ('APDNSALG-MIB', 'apDnsAlgCurrentOtherFailures'), ('APDNSALG-MIB', 'apDnsAlgTotalOtherFailures'), ('APDNSALG-MIB', 'apDnsAlgAvgLatency'), ('APDNSALG-MIB', 'apDnsAlgMaxLatency'), ('APDNSALG-MIB', 'apDnsAlgMaxBurstRate')) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ap_dns_alg_stats_group = apDnsAlgStatsGroup.setStatus('current') if mibBuilder.loadTexts: apDnsAlgStatsGroup.setDescription('Report the stats of configured DNSALG config objects.') ap_dnsalg_notifications_group = notification_group((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 2, 1)).setObjects(('APDNSALG-MIB', 'apDnsAlgStatusChangeTrap'), ('APDNSALG-MIB', 'apDnsAlgStatusChangeClearTrap'), ('APDNSALG-MIB', 'apDnsAlgConstraintStateChangeTrap'), ('APDNSALG-MIB', 'apDnsAlgConstraintStateChangeClearTrap'), ('APDNSALG-MIB', 'apDnsAlgSvrConstraintStateChangeTrap'), ('APDNSALG-MIB', 'apDnsAlgSvrConstraintStateChangeClearTrap')) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ap_dnsalg_notifications_group = apDNSALGNotificationsGroup.setStatus('current') if mibBuilder.loadTexts: apDNSALGNotificationsGroup.setDescription('A collection of mib objects accessible only to traps.') mibBuilder.exportSymbols('APDNSALG-MIB', apDnsAlgTotalNotFound=apDnsAlgTotalNotFound, apDnsAlgConstraintStateChangeClearTrap=apDnsAlgConstraintStateChangeClearTrap, apDnsAlgStatusChangeTrap=apDnsAlgStatusChangeTrap, apDnsAlgTotalTimeOut=apDnsAlgTotalTimeOut, apDnsAlgStatsGroup=apDnsAlgStatsGroup, apDnsALGStatsEntry=apDnsALGStatsEntry, apDNSALGMIBGeneralObjects=apDNSALGMIBGeneralObjects, apDnsAlgTotalSucess=apDnsAlgTotalSucess, apDNSALGServerStatusEntry=apDNSALGServerStatusEntry, apDNSALGNotificationsGroup=apDNSALGNotificationsGroup, apDNSALGConstraintsStatus=apDNSALGConstraintsStatus, apDnsAlgConstraintStateChangeTrap=apDnsAlgConstraintStateChangeTrap, apDNSALGServerRealm=apDNSALGServerRealm, apDnsAlgTotalBadStatus=apDnsAlgTotalBadStatus, apDNSALGObjectGroups=apDNSALGObjectGroups, apDNSALGConfigName=apDNSALGConfigName, apDnsAlgMaxLatency=apDnsAlgMaxLatency, PYSNMP_MODULE_ID=apDNSALGModule, apDNSALGMIBTabularObjects=apDNSALGMIBTabularObjects, apDnsAlgTotalOtherFailures=apDnsAlgTotalOtherFailures, apDNSALGConfigIndex=apDNSALGConfigIndex, apDnsAlgStatusChangeClearTrap=apDnsAlgStatusChangeClearTrap, apDNSALGStatsTable=apDNSALGStatsTable, apDnsAlgClientRealmIndex=apDnsAlgClientRealmIndex, apDnsAlgSvrConstraintStateChangeTrap=apDnsAlgSvrConstraintStateChangeTrap, apDNSALGNotifications=apDNSALGNotifications, apDNSALGConformance=apDNSALGConformance, apDnsAlgCurrentNotFound=apDnsAlgCurrentNotFound, apDNSALGNotifPrefix=apDNSALGNotifPrefix, apDnsAlgMaxBurstRate=apDnsAlgMaxBurstRate, apDNSALGMIBObjects=apDNSALGMIBObjects, apDnsAlgAvgLatency=apDnsAlgAvgLatency, apDnsAlgServerStatusGroup=apDnsAlgServerStatusGroup, apDNSALGNotificationObjects=apDNSALGNotificationObjects, apDNSALGNotificationGroups=apDNSALGNotificationGroups, apDnsAlgCurrentOtherFailures=apDnsAlgCurrentOtherFailures, apDnsAlgClientRealmName=apDnsAlgClientRealmName, apDNSALGNotifObjects=apDNSALGNotifObjects, apDNSALGServerStatus=apDNSALGServerStatus, apDnsAlgCurrentSucess=apDnsAlgCurrentSucess, apDNSALGServerStatusTable=apDNSALGServerStatusTable, apDnsAlgSvrConstraintStateChangeClearTrap=apDnsAlgSvrConstraintStateChangeClearTrap, apDnsAlgCurrentQueries=apDnsAlgCurrentQueries, apDnsAlgCurrentBadStatus=apDnsAlgCurrentBadStatus, apDnsAlgCurrentTimeOut=apDnsAlgCurrentTimeOut, apDNSALGServerIpAddress=apDNSALGServerIpAddress, apDNSALGModule=apDNSALGModule, apDNSALGDomainSuffix=apDNSALGDomainSuffix, apDnsAlgTotalQueries=apDnsAlgTotalQueries, apDNSALGServerIndex=apDNSALGServerIndex)
''' /****************************************************************** * * Copyright 2018 Samsung Electronics All Rights Reserved. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************/ ''' class TestRunOption: max_total_count = 3 min_pass_count = 1 max_timeout_count = 2 XML_PASS_CRITERIA = 'xml' LOG_PASS_CRITERIA = 'log' def __init__(self, binary_name, suite_name, tc_name, package_name): self.binary_name = binary_name self.suite_name = suite_name self.tc_name = tc_name self.package_name = package_name self.total_count = 0 self.pass_count = 0 self.fail_count = 0 self.timeout_count = 0 def increase_total_count(self): self.total_count += 1 def increase_pass_count(self): self.pass_count += 1 def increase_fail_count(self): self.fail_count += 1 def increase_timeout_count(self): self.timeout_count += 1 def is_execution_complete(self): if self.pass_count >= TestRunOption.min_pass_count or self.timeout_count >= TestRunOption.max_timeout_count or self.total_count >= TestRunOption.max_total_count: return True return False
""" /****************************************************************** * * Copyright 2018 Samsung Electronics All Rights Reserved. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************/ """ class Testrunoption: max_total_count = 3 min_pass_count = 1 max_timeout_count = 2 xml_pass_criteria = 'xml' log_pass_criteria = 'log' def __init__(self, binary_name, suite_name, tc_name, package_name): self.binary_name = binary_name self.suite_name = suite_name self.tc_name = tc_name self.package_name = package_name self.total_count = 0 self.pass_count = 0 self.fail_count = 0 self.timeout_count = 0 def increase_total_count(self): self.total_count += 1 def increase_pass_count(self): self.pass_count += 1 def increase_fail_count(self): self.fail_count += 1 def increase_timeout_count(self): self.timeout_count += 1 def is_execution_complete(self): if self.pass_count >= TestRunOption.min_pass_count or self.timeout_count >= TestRunOption.max_timeout_count or self.total_count >= TestRunOption.max_total_count: return True return False
hpp = 'AL-Import' # Specify the name of the hpp to print the graph graph_title='EM- Total Impact of the energy maximization scenario on '+ hpp df_em2 = df_em1.groupby(['scenario'])['value'].sum().round(2).reset_index() fig5c = px.bar(df_em2, x='scenario', y='value', text= 'value', color='scenario',barmode='group', labels={"value": "GWh", "tech":"HPP"}, title=graph_title, category_orders={"scenario": ["Reference", "Energy Max"]}, facet_col_spacing=0.05, facet_row_spacing=0.05) #fig.for_each_annotation(lambda a: a.update(text=a.text.split("=")[-1])) fig5c.update_traces(texttemplate='%{text:.5s}', textposition='outside') #to format the text on each bar #fig.update_layout(uniformtext_minsize=7, uniformtext_mode='hide') #to format the text on each bar #fig.update_yaxes(range=[0, 2300]) #setting the y-axis scale to ensure enough space for the text on each bar #fig.update_xaxes(showline=True, linewidth=2, linecolor='black', mirror=True) #drawing the border on x-axis #fig.update_yaxes(showline=True, linewidth=2, linecolor='black', mirror=True) #drawing the border on y-axis #You can change the image extension to *.png if you want or keep it as pdf (for high resolution) #output_folder = os.path.join('Results_graphics') #os.makedirs(output_folder, exist_ok = True) #pio.write_image(fig, 'Results_graphics/{}.pdf'.format(graph_title)) #fig.show()
hpp = 'AL-Import' graph_title = 'EM- Total Impact of the energy maximization scenario on ' + hpp df_em2 = df_em1.groupby(['scenario'])['value'].sum().round(2).reset_index() fig5c = px.bar(df_em2, x='scenario', y='value', text='value', color='scenario', barmode='group', labels={'value': 'GWh', 'tech': 'HPP'}, title=graph_title, category_orders={'scenario': ['Reference', 'Energy Max']}, facet_col_spacing=0.05, facet_row_spacing=0.05) fig5c.update_traces(texttemplate='%{text:.5s}', textposition='outside')
class MyModule(): def my_function(): pass def main(): """The main entrypoint for this script Used in the setup.py file """ MyModule.my_function() if __name__ == '__main__': main()
class Mymodule: def my_function(): pass def main(): """The main entrypoint for this script Used in the setup.py file """ MyModule.my_function() if __name__ == '__main__': main()
class MessageTypeNotSupported(Exception): pass class MessageDoesNotExist(Exception): pass
class Messagetypenotsupported(Exception): pass class Messagedoesnotexist(Exception): pass
# https://www.codechef.com/problems/RAINBOWA for T in range(int(input())): n,l=int(input()),list(map(int,input().split())) print("no") if(set(l)!=set(list(range(1,8))) or l[0]!=1 or l[-1]!=1 or l!=l[::-1]) else print("yes")
for t in range(int(input())): (n, l) = (int(input()), list(map(int, input().split()))) print('no') if set(l) != set(list(range(1, 8))) or l[0] != 1 or l[-1] != 1 or (l != l[::-1]) else print('yes')
# -*- encoding:utf-8 -*- __version__ = (1, 2, 11) __version_str__ = ".".join(map(str, __version__)) __version_core__ = (3, 0, 4)
__version__ = (1, 2, 11) __version_str__ = '.'.join(map(str, __version__)) __version_core__ = (3, 0, 4)
def to_camel_case(s): return ('' if not s else s[0] + ''.join(c.upper() if s[::-1][i + 1] in '-_' else '' if c in '-_' else c for i, c in enumerate(s[::-1][:-1]))[::-1])
def to_camel_case(s): return '' if not s else s[0] + ''.join((c.upper() if s[::-1][i + 1] in '-_' else '' if c in '-_' else c for (i, c) in enumerate(s[::-1][:-1])))[::-1]
##NIM, Umur, Tinggi = (211080200045, 18, 170) ##print(NIM, Umur, Tinggi) angka_positif = 1,2,3,4,5,6,7,8,9 print(angka_positif)
angka_positif = (1, 2, 3, 4, 5, 6, 7, 8, 9) print(angka_positif)
GOLD = ["7374", "7857", "7990", "8065", "8250"] ANNOTATORS = ["01", "02", "03", "04", "05", "06"] DOC_HEADER = ["order", "doc_id", "assigned", "nr_sens_calculated", "nr_sens", "annotator_1", "annotator_2", "assigned_2"] CYCLE_FILE = "../input/batch_cycles.csv" CYCLE_COL = "cycle" ASSIGNMENT_TXT = "assignment.txt" ASSIGNMENT_XLSX = "assignment.xlsx" ASSIGNMENT_FILE_HEADER = ["doc_id"] ASSIGNMENT_DF_HEADER_BASE = ["annotator", "assigned_sentences"] ASSIGNMENT_ADDITIONAL_HEADER = ["docs_in_batch", "sentences_in_batch", "sum_sentences"] ANNOTATOR_DOWNLOAD_FOLDER = "download" ANNOTATOR_UPLOAD_FOLDER = "upload" PHASE_STR = "phase" ATTRIBUTES_TO_IGNORE = { "AusnahmePruefungErforderlich", "WeitereBestimmungPruefungErforderlich", "ZuVorherigemSatzGehoerig", "Segmentierungsfehler", "NoAttribute", "N/A", "StrittigeBedeutung", } ############ # Labels review ############ class LabelReviewExcelConstants: MAIN_SHEET_NAME = "Review" ATTRIBUTE_NAMED_RANGE = "Attribute" ATTRIBUTE_REVIEW_NAMED_RANGE = "Attribute_Review" SENTENCE_REVIEW_NAMED_RANGE = "Sentence_Review" ERROR_LABEL = "Error" FIRST_DATA_ROW = 2 SEN_ID_COL = 1 SEN_REVIEW_COL = 2 SEN_TEXT_COL = 3 ATTRIBUTE_OFFSET = 4 ATTRIBUTE_STEP = 5 CATEGORY_OFFSET = 0 LABEL_OFFSET = 1 COUNT_OFFSET = 2 ANNOTATORS_OFFSET = 3 ATTRIBUTE_REVIEW_OFFSET = 4 ANNOTATOR_SEPARATOR = "\n" ############ # Full xlsx ############ class FullAnnotationExcelConstants: MAIN_SHEET_NAME = "Data" ATTRIBUTE_NAMED_RANGE = "Attribute" TYPE_NAMED_RANGE = "Type" MODALITY_NAMED_RANGE = "Modality" FIRST_DATA_ROW = 2 SEN_ID_COL = 1 SEN_TEXT_COL = 2 MODALITY_COL = 3 ATTRIBUTE_OFFSET = 4 ATTRIBUTE_STEP = 4 CATEGORY_OFFSET = 0 LABEL_OFFSET = 1 VALUE_OFFSET = 2 TYPE_OFFSET = 3 LAST_COLUMN = "BO1" ############ # Full review ############ class FullReviewExcelConstants: MAIN_SHEET_NAME = "Data" ATTRIBUTE_NAMED_RANGE = "Attribute" TYPE_NAMED_RANGE = "Type" MODALITY_NAMED_RANGE = "Modality" SENTENCE_REVIEW_NAMED_RANGE = "Sentence_Review" ERROR_LABEL = "Error" FIRST_DATA_ROW = 2 SEN_ID_COL = 1 SEN_REVIEW_COL = 2 SEN_TEXT_COL = 3 MODALITY_ANN_1_COL = 4 MODALITY_ANN_2_COL = 5 MODALITY_ANN_REV_COL = 6 ATTRIBUTE_OFFSET = 7 ATTRIBUTE_STEP = 6 CATEGORY_OFFSET = 0 LABEL_OFFSET = 1 VALUE_OFFSET = 2 TYPE_ANN_1_OFFSET = 3 TYPE_ANN_2_OFFSET = 4 TYPE_ANN_REV_OFFSET = 5 LAST_COLUMN = "CX1"
gold = ['7374', '7857', '7990', '8065', '8250'] annotators = ['01', '02', '03', '04', '05', '06'] doc_header = ['order', 'doc_id', 'assigned', 'nr_sens_calculated', 'nr_sens', 'annotator_1', 'annotator_2', 'assigned_2'] cycle_file = '../input/batch_cycles.csv' cycle_col = 'cycle' assignment_txt = 'assignment.txt' assignment_xlsx = 'assignment.xlsx' assignment_file_header = ['doc_id'] assignment_df_header_base = ['annotator', 'assigned_sentences'] assignment_additional_header = ['docs_in_batch', 'sentences_in_batch', 'sum_sentences'] annotator_download_folder = 'download' annotator_upload_folder = 'upload' phase_str = 'phase' attributes_to_ignore = {'AusnahmePruefungErforderlich', 'WeitereBestimmungPruefungErforderlich', 'ZuVorherigemSatzGehoerig', 'Segmentierungsfehler', 'NoAttribute', 'N/A', 'StrittigeBedeutung'} class Labelreviewexcelconstants: main_sheet_name = 'Review' attribute_named_range = 'Attribute' attribute_review_named_range = 'Attribute_Review' sentence_review_named_range = 'Sentence_Review' error_label = 'Error' first_data_row = 2 sen_id_col = 1 sen_review_col = 2 sen_text_col = 3 attribute_offset = 4 attribute_step = 5 category_offset = 0 label_offset = 1 count_offset = 2 annotators_offset = 3 attribute_review_offset = 4 annotator_separator = '\n' class Fullannotationexcelconstants: main_sheet_name = 'Data' attribute_named_range = 'Attribute' type_named_range = 'Type' modality_named_range = 'Modality' first_data_row = 2 sen_id_col = 1 sen_text_col = 2 modality_col = 3 attribute_offset = 4 attribute_step = 4 category_offset = 0 label_offset = 1 value_offset = 2 type_offset = 3 last_column = 'BO1' class Fullreviewexcelconstants: main_sheet_name = 'Data' attribute_named_range = 'Attribute' type_named_range = 'Type' modality_named_range = 'Modality' sentence_review_named_range = 'Sentence_Review' error_label = 'Error' first_data_row = 2 sen_id_col = 1 sen_review_col = 2 sen_text_col = 3 modality_ann_1_col = 4 modality_ann_2_col = 5 modality_ann_rev_col = 6 attribute_offset = 7 attribute_step = 6 category_offset = 0 label_offset = 1 value_offset = 2 type_ann_1_offset = 3 type_ann_2_offset = 4 type_ann_rev_offset = 5 last_column = 'CX1'
""" Item 29: Avoid Repeated Work in Comprehensions by Using Assignment Expressions """ stock = { 'nails': 125, 'screws': 35, 'wingnuts': 8, 'washers': 24, } order = ['screws', 'wingnuts', 'clips'] def get_batches(count, size): return count // size result = {} for name in order: count = stock.get(name, 0) batches = get_batches(count, 8) if batches: result[name] = batches print(f'result: {result}') # Use a dictionary comprehension to shorten this code. found = {name: get_batches(stock.get(name, 0), 8) for name in order if get_batches(stock.get(name, 0), 8)} print(f'found: {found}') # To avoid the repeated code above we can use the walrus operator. # Note that the assignment is made in the condition since this is evaluated first. # If the assignment is made in the value expression it will cause an NameError. found_better = {name: batches for name in order if (batches := get_batches(stock.get(name, 0), 8))} print(f'found_better: {found}') # One other advantage of the comprehensions is that they avoid the leakage caused by looping. # This example leaks because of the assignment operator. half = [(last := count // 2) for count in stock.values()] print(f'Last item of {half} is {last}') # This example leaks. for count in stock.values(): pass print(f'Last item of {list(stock.values())} is {count}') # This example has a loop variable in a comprehension and does not leak. half = [count_comp // 2 for count_comp in stock.values()] print(f'half = {half}') try: count_comp except NameError: print('Oops! name \'count_comp\' is not defined') # An assignment expression also works with generator expressions found = ((name, batches) for name in order if (batches := get_batches(stock.get(name, 0), 8))) print(f'next(found): {next(found)}') print(f'next(found): {next(found)}')
""" Item 29: Avoid Repeated Work in Comprehensions by Using Assignment Expressions """ stock = {'nails': 125, 'screws': 35, 'wingnuts': 8, 'washers': 24} order = ['screws', 'wingnuts', 'clips'] def get_batches(count, size): return count // size result = {} for name in order: count = stock.get(name, 0) batches = get_batches(count, 8) if batches: result[name] = batches print(f'result: {result}') found = {name: get_batches(stock.get(name, 0), 8) for name in order if get_batches(stock.get(name, 0), 8)} print(f'found: {found}') found_better = {name: batches for name in order if (batches := get_batches(stock.get(name, 0), 8))} print(f'found_better: {found}') half = [(last := (count // 2)) for count in stock.values()] print(f'Last item of {half} is {last}') for count in stock.values(): pass print(f'Last item of {list(stock.values())} is {count}') half = [count_comp // 2 for count_comp in stock.values()] print(f'half = {half}') try: count_comp except NameError: print("Oops! name 'count_comp' is not defined") found = ((name, batches) for name in order if (batches := get_batches(stock.get(name, 0), 8))) print(f'next(found): {next(found)}') print(f'next(found): {next(found)}')
""" >>> 'dir/bar.py:2' """
""" >>> 'dir/bar.py:2' """
class IntegerField: def __str__(self): return "integer"
class Integerfield: def __str__(self): return 'integer'
class AdministrativeDivision: def __init__(self, level): self.level = level pass class Province(AdministrativeDivision): type = 'Province' area = 0 center = '' def __init__(self, name): self.name = name self.level = 1 def __str__(self): return f"{self.name} {self.type}" pass class Regency(AdministrativeDivision): type = 'Regency' area = 0 center = '' def __init__(self, name): self.name = name self.level = 2 def __str__(self): return f"{self.name} {self.type}" pass class City(AdministrativeDivision): type = 'City' area = 0 center = '' def __init__(self, name): self.name = name self.level = 2 def __str__(self): return f"{self.name} {self.type}" class District(AdministrativeDivision): type = 'District' area = 0 center = '' def __init__(self, name): self.name = name self.level = 3 def __str__(self): return f"{self.name} {self.type}" pass
class Administrativedivision: def __init__(self, level): self.level = level pass class Province(AdministrativeDivision): type = 'Province' area = 0 center = '' def __init__(self, name): self.name = name self.level = 1 def __str__(self): return f'{self.name} {self.type}' pass class Regency(AdministrativeDivision): type = 'Regency' area = 0 center = '' def __init__(self, name): self.name = name self.level = 2 def __str__(self): return f'{self.name} {self.type}' pass class City(AdministrativeDivision): type = 'City' area = 0 center = '' def __init__(self, name): self.name = name self.level = 2 def __str__(self): return f'{self.name} {self.type}' class District(AdministrativeDivision): type = 'District' area = 0 center = '' def __init__(self, name): self.name = name self.level = 3 def __str__(self): return f'{self.name} {self.type}' pass
# Binary Tree implemented using python list class BinaryTree: def __init__(self,size) -> None: self.cl=size*[None] self.lastUsedIndex=0 self.maxSize=size def insertNode(self,value): if self.lastUsedIndex+1==self.maxSize: return "BT is full" self.cl[self.lastUsedIndex+1]=value self.lastUsedIndex+=1 return "value successfully inserted" def searchNode(self,value): if value in self.cl: return "Success" return "Failed" def preOrderTraversal(self,index=1): if index>self.lastUsedIndex: return print(self.cl[index]) #call left subtree self.preOrderTraversal(index*2) self.preOrderTraversal(index*2+1) def inOrderTraversal(self,index=1): if index>self.lastUsedIndex: return self.inOrderTraversal(index*2) print(self.cl[index]) self.inOrderTraversal(index*2+1) def postOrderTraversal(self,index=1): if index>self.lastUsedIndex: return self.postOrderTraversal(index*2) self.postOrderTraversal(index*2+1) print(self.cl[index]) def levelOrderTraversal(self,index=1): for i in range(index,self.lastUsedIndex+1): print(self.cl[i]) def deleteNode(self,value): if self.lastUsedIndex==0: return "List is empty" for i in range(1,self.lastUsedIndex+1): if self.cl[i]==value: self.cl[i]=self.cl[self.lastUsedIndex] self.cl[self.lastUsedIndex]=None self.lastUsedIndex-=1 return "Node successfully deleted" def deleteBT(self): self.cl=None return "BT deleted successfully" bt=BinaryTree(8) bt.insertNode("drinks") bt.insertNode("hot") bt.insertNode("cold") bt.insertNode("tea") bt.insertNode("coffee") print(bt.searchNode('hot')) print(bt.deleteNode('tea')) #bt.preOrderTraversal() #bt.inOrderTraversal() #bt.postOrderTraversal() bt.levelOrderTraversal()
class Binarytree: def __init__(self, size) -> None: self.cl = size * [None] self.lastUsedIndex = 0 self.maxSize = size def insert_node(self, value): if self.lastUsedIndex + 1 == self.maxSize: return 'BT is full' self.cl[self.lastUsedIndex + 1] = value self.lastUsedIndex += 1 return 'value successfully inserted' def search_node(self, value): if value in self.cl: return 'Success' return 'Failed' def pre_order_traversal(self, index=1): if index > self.lastUsedIndex: return print(self.cl[index]) self.preOrderTraversal(index * 2) self.preOrderTraversal(index * 2 + 1) def in_order_traversal(self, index=1): if index > self.lastUsedIndex: return self.inOrderTraversal(index * 2) print(self.cl[index]) self.inOrderTraversal(index * 2 + 1) def post_order_traversal(self, index=1): if index > self.lastUsedIndex: return self.postOrderTraversal(index * 2) self.postOrderTraversal(index * 2 + 1) print(self.cl[index]) def level_order_traversal(self, index=1): for i in range(index, self.lastUsedIndex + 1): print(self.cl[i]) def delete_node(self, value): if self.lastUsedIndex == 0: return 'List is empty' for i in range(1, self.lastUsedIndex + 1): if self.cl[i] == value: self.cl[i] = self.cl[self.lastUsedIndex] self.cl[self.lastUsedIndex] = None self.lastUsedIndex -= 1 return 'Node successfully deleted' def delete_bt(self): self.cl = None return 'BT deleted successfully' bt = binary_tree(8) bt.insertNode('drinks') bt.insertNode('hot') bt.insertNode('cold') bt.insertNode('tea') bt.insertNode('coffee') print(bt.searchNode('hot')) print(bt.deleteNode('tea')) bt.levelOrderTraversal()
num1 = int(input()) count1 = 0 while 1 <= num1 <= 5: if num1 == 5: count1 += 1 num1 = int(input()) print(count1)
num1 = int(input()) count1 = 0 while 1 <= num1 <= 5: if num1 == 5: count1 += 1 num1 = int(input()) print(count1)
class Label(object): def __eq__(self, other): assert(isinstance(other, Label)) return type(self) == type(other) def __ne__(self, other): assert(isinstance(other, Label)) return type(self) != type(other) def __hash__(self): return hash(self.to_class_str()) def to_class_str(self): return self.__class__.__name__ class NoLabel(Label): pass
class Label(object): def __eq__(self, other): assert isinstance(other, Label) return type(self) == type(other) def __ne__(self, other): assert isinstance(other, Label) return type(self) != type(other) def __hash__(self): return hash(self.to_class_str()) def to_class_str(self): return self.__class__.__name__ class Nolabel(Label): pass
#Function to insert a string in the middle of a string def string_in(): string=str(input("Enter a string :")) mid=len(string)//2 word=str(input("Enter a word to insert in middle :")) new_string=string[:mid]+word+string[mid:] print(new_string) string_in()
def string_in(): string = str(input('Enter a string :')) mid = len(string) // 2 word = str(input('Enter a word to insert in middle :')) new_string = string[:mid] + word + string[mid:] print(new_string) string_in()
# # This file contains "references" to unreferenced code that should be kept and not considered dead code # not_used_but_whitelisted
not_used_but_whitelisted
""" Contains exception classes. """ class KRDictException(Exception): """ Contains information about an API error. This exception is only thrown if the argument passed to the ``raise_api_errors`` parameter is True. - ``message``: The error message associated with the error. - ``error_code``: The error code returned by the API. - ``request_params``: A dict containing the transformed parameters that were sent to the API. """ def __init__(self, message, error_code, params): super().__init__(message) self.message = message self.error_code = error_code self.request_params = params def __reduce__(self): return (KRDictException, (self.message, self.error_code, self.request_params))
""" Contains exception classes. """ class Krdictexception(Exception): """ Contains information about an API error. This exception is only thrown if the argument passed to the ``raise_api_errors`` parameter is True. - ``message``: The error message associated with the error. - ``error_code``: The error code returned by the API. - ``request_params``: A dict containing the transformed parameters that were sent to the API. """ def __init__(self, message, error_code, params): super().__init__(message) self.message = message self.error_code = error_code self.request_params = params def __reduce__(self): return (KRDictException, (self.message, self.error_code, self.request_params))
# Copyright 2018 TNG Technology Consulting GmbH, Unterfoehring, Germany # Licensed under the Apache License, Version 2.0 - see LICENSE.md in project root directory # TODO IT-1: give this function some great functionality def great_function(): pass # TODO: give this function some greater functionality def greater_function(): pass
def great_function(): pass def greater_function(): pass
class Stats:# pragma: no cover """Abstract class defining the basis of all Stats """ def get_keys(self): """Return the keys of the Stats Returns ------- keys : tuple of strings Key for the Stats """ return () def get_manager(self): """Return the StatsManager required Returns ------- stats_manager : StatsManager StatsManager for the Stats """ pass def get_game_fields_required(self): """Return the required fields at game level Returns ------- game_fields_required : list of strings List of fields """ return [] def get_participant_fields_required(self): """Return the required fields at participant level Returns ------- participant_fields_required : list of strings List of fields """ return [] def get_stats_fields_required(self): """Return the required fields at stats level Returns ------- stats_fields_required : list of strings List of fields """ return [] def get_id_fields_required(self): """Return the required fields at ID level Returns ------- id_fields_required : list of strings List of fields """ return [] def get_stats(self, df): """Return the computed stats Parameters ---------- df : Pandas DataFrame DataFrame containing all fields required to compute the stats Returns ------- stats : Pandas Series Value oif the computed stats grouped by the key """ pass class ChampionStats(Stats):# pragma: no cover """Abstract class defining a Stats for Champions """ pass class ChampionBanStats(Stats):# pragma: no cover """Abstract class defining a Stats for Champions bans """ def get_stats(self, dfs): """Return the computed stats Parameters ---------- dfs : tuple of Pandas DataFrames (df, df_bans) df :Pandas DataFrame DataFrame containing all fields required to compute the stats df_bans: Pandas DataFrame DataFrame containing bans information to compute the stats Returns ------- stats : Pandas Series Value oif the computed stats grouped by the key """ pass class ItemStats(Stats):# pragma: no cover """Abstract class defining a Stats for Items """ pass class PlayerStats(Stats):# pragma: no cover """Abstract class defining a Stats for Players """ pass class SpecialStats(Stats):# pragma: no cover """Abstract class defining a Stats that will handle itself the game data """ def push_game(self, game): pass def get_stats(self): pass def set_rank_manager(self, rank_manager): """Set the rank manager for when needed Parameters ---------- rank_manager : RankManager Object containing players rank """ self._rank_manager = rank_manager class DerivedStats(Stats):# pragma: no cover """Abstract class defining a Stats that is derived from another and must be computed afterward """ order = 0 def get_stats(self, df, stats): pass def get_stats_required(self): return []
class Stats: """Abstract class defining the basis of all Stats """ def get_keys(self): """Return the keys of the Stats Returns ------- keys : tuple of strings Key for the Stats """ return () def get_manager(self): """Return the StatsManager required Returns ------- stats_manager : StatsManager StatsManager for the Stats """ pass def get_game_fields_required(self): """Return the required fields at game level Returns ------- game_fields_required : list of strings List of fields """ return [] def get_participant_fields_required(self): """Return the required fields at participant level Returns ------- participant_fields_required : list of strings List of fields """ return [] def get_stats_fields_required(self): """Return the required fields at stats level Returns ------- stats_fields_required : list of strings List of fields """ return [] def get_id_fields_required(self): """Return the required fields at ID level Returns ------- id_fields_required : list of strings List of fields """ return [] def get_stats(self, df): """Return the computed stats Parameters ---------- df : Pandas DataFrame DataFrame containing all fields required to compute the stats Returns ------- stats : Pandas Series Value oif the computed stats grouped by the key """ pass class Championstats(Stats): """Abstract class defining a Stats for Champions """ pass class Championbanstats(Stats): """Abstract class defining a Stats for Champions bans """ def get_stats(self, dfs): """Return the computed stats Parameters ---------- dfs : tuple of Pandas DataFrames (df, df_bans) df :Pandas DataFrame DataFrame containing all fields required to compute the stats df_bans: Pandas DataFrame DataFrame containing bans information to compute the stats Returns ------- stats : Pandas Series Value oif the computed stats grouped by the key """ pass class Itemstats(Stats): """Abstract class defining a Stats for Items """ pass class Playerstats(Stats): """Abstract class defining a Stats for Players """ pass class Specialstats(Stats): """Abstract class defining a Stats that will handle itself the game data """ def push_game(self, game): pass def get_stats(self): pass def set_rank_manager(self, rank_manager): """Set the rank manager for when needed Parameters ---------- rank_manager : RankManager Object containing players rank """ self._rank_manager = rank_manager class Derivedstats(Stats): """Abstract class defining a Stats that is derived from another and must be computed afterward """ order = 0 def get_stats(self, df, stats): pass def get_stats_required(self): return []
n = 0 for i in range(999, 100, -1): for j in range(i, 100, -1): x = i * j if x > n: s = str(i * j) if s == s[::-1]: n = i * j print(n)
n = 0 for i in range(999, 100, -1): for j in range(i, 100, -1): x = i * j if x > n: s = str(i * j) if s == s[::-1]: n = i * j print(n)
class DictSerializable: @classmethod def from_dict(cls, data: dict) -> 'DictSerializable': return cls(**data) def to_dict(self) -> dict: return vars(self)
class Dictserializable: @classmethod def from_dict(cls, data: dict) -> 'DictSerializable': return cls(**data) def to_dict(self) -> dict: return vars(self)
known = {} def ack(m, n): if m == 0: return n + 1 if m > 0 and n == 0: return ack(m-1, 1) if m > 0 and n > 0: if (m,n) in known: print('Cache hit') return known[(m, n)] else: known[(m, n)] = ack(m - 1, ack(m , n - 1)) return known[(m, n)] else: return None print ('ack(3, 4) =', ack(3, 4)) print ('ack(3, 5) =', ack(3, 5)) print ('ack(3, 6) =', ack(3, 6)) print ('ack(3, 7) =', ack(3, 7))
known = {} def ack(m, n): if m == 0: return n + 1 if m > 0 and n == 0: return ack(m - 1, 1) if m > 0 and n > 0: if (m, n) in known: print('Cache hit') return known[m, n] else: known[m, n] = ack(m - 1, ack(m, n - 1)) return known[m, n] else: return None print('ack(3, 4) =', ack(3, 4)) print('ack(3, 5) =', ack(3, 5)) print('ack(3, 6) =', ack(3, 6)) print('ack(3, 7) =', ack(3, 7))
# # @lc app=leetcode id=450 lang=python3 # # [450] Delete Node in a BST # # @lc code=start # Definition for a binary tree node. # class TreeNode: # def __init__(self, val=0, left=None, right=None): # self.val = val # self.left = left # self.right = right class Solution: def deleteNode(self, root: TreeNode, key: int) -> TreeNode: if not root: return None if root.val == key: if not root.right: left = root.left return left right = root.right while root.left: right = root.left root.val, right.val = right.val, root.values() root.left = self.deleteNode(root.left, key) root.right = self.deleteNode(root.right, key) return root # @lc code=end
class Solution: def delete_node(self, root: TreeNode, key: int) -> TreeNode: if not root: return None if root.val == key: if not root.right: left = root.left return left right = root.right while root.left: right = root.left (root.val, right.val) = (right.val, root.values()) root.left = self.deleteNode(root.left, key) root.right = self.deleteNode(root.right, key) return root
__title__ = 'pairing-functions' __description__ = 'A collection of pairing functions' __url__ = 'https://github.com/ConvertGroupLabs/pairing-functions' __version__ = '0.2.1' __author__ = 'Convert Group Labs' __author_email__ = 'tools@convertgroup.com' __license__ = 'MIT License' __copyright__ = 'Copyright 2020 Convert Group'
__title__ = 'pairing-functions' __description__ = 'A collection of pairing functions' __url__ = 'https://github.com/ConvertGroupLabs/pairing-functions' __version__ = '0.2.1' __author__ = 'Convert Group Labs' __author_email__ = 'tools@convertgroup.com' __license__ = 'MIT License' __copyright__ = 'Copyright 2020 Convert Group'
def deleteMid(head): # check if the list contains 1 or more nodes if head is None or head.next is None: return None #assign pointers to their respective positions prev, i, j = None, head, head while j and j.next: j = j.next.next;# j pointer moves 2 nodes ahead # update prev pointer , prev holds previous value of i pointer prev = i; # i pointer moves 1 node ahead i = i.next; # since i pointer was moving at half speed of j pointer , it points at # mid node when j pointer reaches the end prev.next = i.next; # bypassing mid node return head; #Driver's code class Node: def __init__(self,data): self.data = data self.next = None class Llist: def __init__(self): self.head = None def insert(self,data,link): node = Node (data) if not self.head: self.head = node return node link.next = node return node def printList(head): while head: print(head.data, end=" ") head = head.next print() if __name__ == "__main__": t = int (input()) for x in range(t): n = int(input()) arr1 = [int(y) for y in input().split()] L1 = Llist() link = None for nodeData in arr1: link = L1.insert (nodeData, link) res = deleteMid(l1.head) printList(res)
def delete_mid(head): if head is None or head.next is None: return None (prev, i, j) = (None, head, head) while j and j.next: j = j.next.next prev = i i = i.next prev.next = i.next return head class Node: def __init__(self, data): self.data = data self.next = None class Llist: def __init__(self): self.head = None def insert(self, data, link): node = node(data) if not self.head: self.head = node return node link.next = node return node def print_list(head): while head: print(head.data, end=' ') head = head.next print() if __name__ == '__main__': t = int(input()) for x in range(t): n = int(input()) arr1 = [int(y) for y in input().split()] l1 = llist() link = None for node_data in arr1: link = L1.insert(nodeData, link) res = delete_mid(l1.head) print_list(res)
"""Contains ascii-art project related logos.""" # http://patorjk.com/software/taag/#p=display&f=Varsity&t=PnP PNP = r""" _______ _______ |_ __ \ |_ __ \ | |__) |_ .--. | |__) | | ___/[ `.-. | | ___/ _| |_ | | | | _| | |_____| [___||__]|_____| """
"""Contains ascii-art project related logos.""" pnp = ' _______ _______\n|_ __ \\ |_ __ \\\n | |__) |_ .--. | |__) |\n | ___/[ `.-. | | ___/\n _| |_ | | | | _| |\n|_____| [___||__]|_____|\n'
N = int(input()) a = N % 1000 if a == 0: print(0) else: print(1000 - a)
n = int(input()) a = N % 1000 if a == 0: print(0) else: print(1000 - a)
# -*- coding: utf-8 -*- f = open("dico.txt", "r") contrasenia = "hola" contador = 0 linea = f.readline() while linea: contador += 1 if linea.strip() == contrasenia.strip(): print('Contrasenia encontrada: ' + linea) print('en ' + str(contador) + ' intentos') break linea = f.readline() f.close()
f = open('dico.txt', 'r') contrasenia = 'hola' contador = 0 linea = f.readline() while linea: contador += 1 if linea.strip() == contrasenia.strip(): print('Contrasenia encontrada: ' + linea) print('en ' + str(contador) + ' intentos') break linea = f.readline() f.close()
first = "Murat" last = "Aksoy" name = f"Welcome to pyhton '{last}', {first}" print(name)
first = 'Murat' last = 'Aksoy' name = f"Welcome to pyhton '{last}', {first}" print(name)
class Solution: def frequencySort(self, s): """ :type s: str :rtype: str """ dic = {} for item in s: if item in dic: dic[item] += 1 else: dic[item] = 1 ans = [0 for x in range(len(dic))] i = 0 for item in dic: # print(item, dic[item]) ans[i] = (item, dic[item]) i += 1 ans.sort(key= lambda item: item[1], reverse=True) end = "" for item in ans: end += item[0] * item[1] return end
class Solution: def frequency_sort(self, s): """ :type s: str :rtype: str """ dic = {} for item in s: if item in dic: dic[item] += 1 else: dic[item] = 1 ans = [0 for x in range(len(dic))] i = 0 for item in dic: ans[i] = (item, dic[item]) i += 1 ans.sort(key=lambda item: item[1], reverse=True) end = '' for item in ans: end += item[0] * item[1] return end
AUTHOR="Zawadi Done" DESCRIPTION="This module wil install/update MassDNS" INSTALL_TYPE="GIT" REPOSITORY_LOCATION="https://github.com/blechschmidt/massdns" INSTALL_LOCATION="massdns" DEBIAN="" AFTER_COMMANDS="cd {INSTALL_LOCATION},make,cp bin/massdns /usr/local/bin/" LAUNCHER="massdns"
author = 'Zawadi Done' description = 'This module wil install/update MassDNS' install_type = 'GIT' repository_location = 'https://github.com/blechschmidt/massdns' install_location = 'massdns' debian = '' after_commands = 'cd {INSTALL_LOCATION},make,cp bin/massdns /usr/local/bin/' launcher = 'massdns'
# Definition for singly-linked list. # class ListNode(object): # def __init__(self, val=0, next=None): # self.val = val # self.next = next class Solution(object): # # Iterative (accepted), Time: O(m + n), Space: O(1) # def insert(self, pos_node, node): # node.next = pos_node.next # pos_node.next = node # def mergeTwoLists(self, l1, l2): # """ # :type l1: ListNode # :type l2: ListNode # :rtype: ListNode # """ # # Handle Base Case # if l1 is None: # return l2 # elif l2 is None: # return l1 # root = l1 if l1.val < l2.val else l2 # other = l2 if root is l1 else l1 # cur = root # while cur.next: # if other and other.val < cur.next.val: # self.insert(cur, ListNode(other.val, other.next)) # other = other.next # cur = cur.next # while other: # self.insert(cur, ListNode(other.val, other.next)) # cur = cur.next # other = other.next # return root # # Recursive (Top Voted), Time: O(m + n), Space: O(m + n) # def mergeTwoLists(self, l1, l2): # if not l1 or not l2: # return l1 or l2 # if l1.val < l2.val: # l1.next = self.mergeTwoLists(l1.next, l2) # return l1 # else: # l2.next = self.mergeTwoLists(l1, l2.next) # return l2 # Iterative (Top Voted), Time: O(m + n), Space: O(1) def mergeTwoLists(self, l1, l2): dummy = cur = ListNode(0) while l1 and l2: if l1.val < l2.val: cur.next = l1 l1 = l1.next else: cur.next = l2 l2 = l2.next cur = cur.next cur.next = l1 or l2 return dummy.next
class Solution(object): def merge_two_lists(self, l1, l2): dummy = cur = list_node(0) while l1 and l2: if l1.val < l2.val: cur.next = l1 l1 = l1.next else: cur.next = l2 l2 = l2.next cur = cur.next cur.next = l1 or l2 return dummy.next
''' This module declares constants needed for this solution. This is to remove magic numbers ''' CRATER_CHANGE_WHEN_SUNNY = 0.9 CRATER_CHANGE_WHEN_RAINY = 1.2 CRATER_CHANGE_WHEN_WINDY = 0.0 ORBIT1_ORBIT_DISTANCE = 18 ORBIT1_CRATERS_COUNT = 20 ORBIT2_ORBIT_DISTANCE = 20 ORBIT2_CRATERS_COUNT = 10
""" This module declares constants needed for this solution. This is to remove magic numbers """ crater_change_when_sunny = 0.9 crater_change_when_rainy = 1.2 crater_change_when_windy = 0.0 orbit1_orbit_distance = 18 orbit1_craters_count = 20 orbit2_orbit_distance = 20 orbit2_craters_count = 10
class Solution: def singleNumber(self, nums: List[int]) -> int: ret = 0 for n in nums: ret ^= n return ret
class Solution: def single_number(self, nums: List[int]) -> int: ret = 0 for n in nums: ret ^= n return ret
#! /usr/bin/env python3 """Sort a list and store previous indices of values""" # enumerate is a great but little-known tool for writing nice code l = [4, 2, 3, 5, 1] print("original list: ", l) values, indices = zip(*sorted((a, b) for (b, a) in enumerate(l))) # now values contains the sorted list and indices contains # the indices of the corresponding value in the original list print("sorted list: ", values) print("original indices: ", indices) # note that this returns tuples, but if necessary they can # be converted to lists using list()
"""Sort a list and store previous indices of values""" l = [4, 2, 3, 5, 1] print('original list: ', l) (values, indices) = zip(*sorted(((a, b) for (b, a) in enumerate(l)))) print('sorted list: ', values) print('original indices: ', indices)
#!/usr/bin/env python3 # https://www.urionlinejudge.com.br/judge/en/problems/view/1020 def decompose(total, value): decomposed = total // value return total - decomposed * value, decomposed def main(): DAYS = int(input()) DAYS, YEARS = decompose(DAYS, 365) DAYS, MONTHS = decompose(DAYS, 30) print(YEARS, 'ano(s)') print(MONTHS, 'mes(es)') print(DAYS, 'dia(s)') # Start the execution if it's the main script if __name__ == "__main__": main()
def decompose(total, value): decomposed = total // value return (total - decomposed * value, decomposed) def main(): days = int(input()) (days, years) = decompose(DAYS, 365) (days, months) = decompose(DAYS, 30) print(YEARS, 'ano(s)') print(MONTHS, 'mes(es)') print(DAYS, 'dia(s)') if __name__ == '__main__': main()
''' Creating a very basic module in Python ''' languages = {'Basic', 'QBasic', 'Cobol', 'Pascal', 'Assembly', 'C/C++', 'Java', 'Python', 'Ruby'} values = 10, 50, 60, 11, 98, 75, 65, 32 def add(*args: float) -> float: sum = 0.0 for value in args: sum += value return sum def multiply(*args: float) -> float: prod = 1.0 for value in args: prod *= value return prod def _prime(number: int) -> bool: if number <= 1: return False elif number == 2: return True elif number % 2 == 0: return False else: for n in range(3, int(number ** 0.5), 2): if number % n == 0: return False else: return True
""" Creating a very basic module in Python """ languages = {'Basic', 'QBasic', 'Cobol', 'Pascal', 'Assembly', 'C/C++', 'Java', 'Python', 'Ruby'} values = (10, 50, 60, 11, 98, 75, 65, 32) def add(*args: float) -> float: sum = 0.0 for value in args: sum += value return sum def multiply(*args: float) -> float: prod = 1.0 for value in args: prod *= value return prod def _prime(number: int) -> bool: if number <= 1: return False elif number == 2: return True elif number % 2 == 0: return False else: for n in range(3, int(number ** 0.5), 2): if number % n == 0: return False else: return True
__author__ = 'roland' class HandlerResponse(object): def __init__(self, content_processed, outside_html_action=None, tester_error_description=None, cookie_jar=None, urllib_request=None, urllib_response=None): """ :param content_processed: bool set to True if a scripted ContentHandler matches and processes a page; If False then the next ContentHandler must take over :param cookie_jar: A CookieJar instance :param urllib_response: A urllib.response.addinfourl instance :param outside_html_action: Value from outside_html_actions or None :param tester_error_description: optional text if outside_html_action is not None :param response: A semi parsed response, might be a dictionary """ self.content_processed = content_processed self.outside_html_action = outside_html_action self.cookie_jar = cookie_jar self.urllib_request = urllib_request self.urllib_response = urllib_response self.tester_error_description = tester_error_description class ContentHandler(object): """ Process the HTML contents of a response from the test target. This can either be a scripted approach, or invoke a browser. """ def __init__(self): pass def handle_response(self, http_response, auto_close_urls, conv=None, verify_ssl=True, cookie_jar=None, outside_html_actions=None): """ :param http_response: The HTTP response to handle :param auto_close_urls: A list of URLs that if encountered should lead to an immediate break in processing, like a form action. Other URLs in the page will load local resources such as css and js without returning control. :param conv: A aatest.Conversation instance :param verify_ssl: (True/False) whether the ssl certificates must be verified. Default is True :param cookie_jar: A http.cookiejar.CookieJar instance :param outside_html_actions: a dict describing buttons for the widget outside the html-area, to be used if the test must be aborted :return: A aatest.contenthandler.HandlerResponse instance """ raise NotImplemented()
__author__ = 'roland' class Handlerresponse(object): def __init__(self, content_processed, outside_html_action=None, tester_error_description=None, cookie_jar=None, urllib_request=None, urllib_response=None): """ :param content_processed: bool set to True if a scripted ContentHandler matches and processes a page; If False then the next ContentHandler must take over :param cookie_jar: A CookieJar instance :param urllib_response: A urllib.response.addinfourl instance :param outside_html_action: Value from outside_html_actions or None :param tester_error_description: optional text if outside_html_action is not None :param response: A semi parsed response, might be a dictionary """ self.content_processed = content_processed self.outside_html_action = outside_html_action self.cookie_jar = cookie_jar self.urllib_request = urllib_request self.urllib_response = urllib_response self.tester_error_description = tester_error_description class Contenthandler(object): """ Process the HTML contents of a response from the test target. This can either be a scripted approach, or invoke a browser. """ def __init__(self): pass def handle_response(self, http_response, auto_close_urls, conv=None, verify_ssl=True, cookie_jar=None, outside_html_actions=None): """ :param http_response: The HTTP response to handle :param auto_close_urls: A list of URLs that if encountered should lead to an immediate break in processing, like a form action. Other URLs in the page will load local resources such as css and js without returning control. :param conv: A aatest.Conversation instance :param verify_ssl: (True/False) whether the ssl certificates must be verified. Default is True :param cookie_jar: A http.cookiejar.CookieJar instance :param outside_html_actions: a dict describing buttons for the widget outside the html-area, to be used if the test must be aborted :return: A aatest.contenthandler.HandlerResponse instance """ raise not_implemented()
#Config, Reference, and configure provided in globals cards = Config( hd_audio=Config( match=dict(), name='Auto-%(id)s-%(label)s', restart=-1, input=dict( label="input", subdevice='0', channels=2, buffer_size=512, buffer_count=4, sample_rate=48000, quality=4 ), output=dict( label="output", subdevice='0', channels=2, buffer_size=512, buffer_count=4, sample_rate=48000, quality=4 ) ) )
cards = config(hd_audio=config(match=dict(), name='Auto-%(id)s-%(label)s', restart=-1, input=dict(label='input', subdevice='0', channels=2, buffer_size=512, buffer_count=4, sample_rate=48000, quality=4), output=dict(label='output', subdevice='0', channels=2, buffer_size=512, buffer_count=4, sample_rate=48000, quality=4)))
#!/usr/bin/env python3 # Copyright 2018, Rackspace US, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. TRIPLEO_MAPPING_GROUP = { # Mandatory group mappings 'hosts': ['undercloud', 'overcloud', 'Undercloud', 'Overcloud'], 'all': ['hosts'], # Infrastructure group mappings 'shared-infra_hosts': ['Controller', 'controller'], 'rabbitmq_all': ['Controller', 'controller'], 'memcached_all': ['Controller', 'controller'], 'galera_all': ['Controller', 'controller'], 'galera': ['Controller', 'controller'], 'rsyslog_all': ['Controller', 'controller'], 'utility_all': ['undercloud', 'Undercloud'], 'localhost': ['undercloud', 'Undercloud'], # OpenStack group mappings # Keystone 'keystone_all': ['Controller', 'controller'], # Nova 'nova_all': [ 'nova_placement', 'nova_conductor', 'nova_metadata', 'nova_consoleauth', 'nova_api', 'nova_migration_target', 'nova_compute', 'nova_scheduler', 'nova_libvirt', 'nova_vnc_proxy' ], 'nova_api_metadata': ['nova_metadata'], 'nova_api_os_compute': ['nova_api'], 'nova_compute': ['Compute'], 'nova_console': ['nova_consoleauth'], # Neutron 'neutron_all': [ 'neutron_metadata', 'neutron_dhcp', 'neutron_plugin_ml2', 'neutron_ovs_agent', 'neutron_api', 'neutron_l3' ], 'neutron_server': ['neutron_api'], 'neutron_dhcp_agent': ['neutron_dhcp'], 'neutron_l3_agent': ['neutron_l3'], 'neutron_linuxbridge_agent': ['neutron_ovs_agent'], 'neutron_openvswitch_agent': ['neutron_ovs_agent'], 'neutron_metadata_agent': ['neutron_metadata'], # Glance 'glance_all': ['glance_api', 'glance_registry_disabled'], # Heat 'heat_all': [ 'heat_api', 'heat_api_cloudwatch_disabled', 'heat_engine', 'heat_api_cfn' ], # Cinder 'cinder_all': ['cinder_api', 'cinder_volume', 'cinder_scheduler'], # Horizon 'horizon_all': ['horizon'], # Designate 'designate_all': [ 'designate_worker', 'designate_api', 'designate_producer', 'designate_mdns', 'designate_central', ], # Ceph 'ceph_all': ['ceph_osd', 'ceph_mon', 'ceph_rgw'], 'mons': ['ceph_mon'], 'osds': ['ceph_osd'], 'rgws': ['ceph_rgw'], # Swift - skip swift_proxy because it already exists in tripleO 'swift_all': ['swift_proxy', 'swift_storage'], 'swift_hosts': ['swift_storage'], 'swift_acc': ['swift_storage'], 'swift_cont': ['swift_storage'], 'swift_obj': ['swift_storage'], # Octavia 'octavia_all': [ 'octavia_api', 'octavia_health_manager', 'octavia_housekeeping', 'octavia_worker' ] # NOTE(npawelek): Designate is not GA in OSP13 # Designate # 'designate_all': ['designate_all'], # NOTE(npawelek): Ironic mappings are not confirmed yet. We're not # currently deploying ironic to customers due to RFEs around multi # tenancy. When this functionality is needed, we'll need to define # all the groupings properly. # # Ironic # 'ironic_all': ['ironic_api', 'ironic_compute', 'ironic_conductor'], # 'ironic_api': ['ironic_api'], # 'ironic_conductor': ['ironic_conductor'], # 'ironic_compute': ['ironic_compute'], }
tripleo_mapping_group = {'hosts': ['undercloud', 'overcloud', 'Undercloud', 'Overcloud'], 'all': ['hosts'], 'shared-infra_hosts': ['Controller', 'controller'], 'rabbitmq_all': ['Controller', 'controller'], 'memcached_all': ['Controller', 'controller'], 'galera_all': ['Controller', 'controller'], 'galera': ['Controller', 'controller'], 'rsyslog_all': ['Controller', 'controller'], 'utility_all': ['undercloud', 'Undercloud'], 'localhost': ['undercloud', 'Undercloud'], 'keystone_all': ['Controller', 'controller'], 'nova_all': ['nova_placement', 'nova_conductor', 'nova_metadata', 'nova_consoleauth', 'nova_api', 'nova_migration_target', 'nova_compute', 'nova_scheduler', 'nova_libvirt', 'nova_vnc_proxy'], 'nova_api_metadata': ['nova_metadata'], 'nova_api_os_compute': ['nova_api'], 'nova_compute': ['Compute'], 'nova_console': ['nova_consoleauth'], 'neutron_all': ['neutron_metadata', 'neutron_dhcp', 'neutron_plugin_ml2', 'neutron_ovs_agent', 'neutron_api', 'neutron_l3'], 'neutron_server': ['neutron_api'], 'neutron_dhcp_agent': ['neutron_dhcp'], 'neutron_l3_agent': ['neutron_l3'], 'neutron_linuxbridge_agent': ['neutron_ovs_agent'], 'neutron_openvswitch_agent': ['neutron_ovs_agent'], 'neutron_metadata_agent': ['neutron_metadata'], 'glance_all': ['glance_api', 'glance_registry_disabled'], 'heat_all': ['heat_api', 'heat_api_cloudwatch_disabled', 'heat_engine', 'heat_api_cfn'], 'cinder_all': ['cinder_api', 'cinder_volume', 'cinder_scheduler'], 'horizon_all': ['horizon'], 'designate_all': ['designate_worker', 'designate_api', 'designate_producer', 'designate_mdns', 'designate_central'], 'ceph_all': ['ceph_osd', 'ceph_mon', 'ceph_rgw'], 'mons': ['ceph_mon'], 'osds': ['ceph_osd'], 'rgws': ['ceph_rgw'], 'swift_all': ['swift_proxy', 'swift_storage'], 'swift_hosts': ['swift_storage'], 'swift_acc': ['swift_storage'], 'swift_cont': ['swift_storage'], 'swift_obj': ['swift_storage'], 'octavia_all': ['octavia_api', 'octavia_health_manager', 'octavia_housekeeping', 'octavia_worker']}
def make_bio_dict(tags, start_idx=0): d = dict() i = start_idx for tag in tags: for pre_tag in ['B-', 'I-']: d[pre_tag + tag] = i i += 1 d['O'] = i return d
def make_bio_dict(tags, start_idx=0): d = dict() i = start_idx for tag in tags: for pre_tag in ['B-', 'I-']: d[pre_tag + tag] = i i += 1 d['O'] = i return d
# -*- coding: utf-8 -*- __author__ = "Sergey Aganezov" __email__ = "aganezov(at)cs.jhu.edu" __status__ = "production" version = "1.10" __all__ = ["grimm", "breakpoint_graph", "graphviz", "utils", "edge", "genome", "kbreak", "multicolor", "tree", "vertices", "utils", "distances"]
__author__ = 'Sergey Aganezov' __email__ = 'aganezov(at)cs.jhu.edu' __status__ = 'production' version = '1.10' __all__ = ['grimm', 'breakpoint_graph', 'graphviz', 'utils', 'edge', 'genome', 'kbreak', 'multicolor', 'tree', 'vertices', 'utils', 'distances']
size(200, 200) stroke(0) strokeWidth(10) fill(1, 0.3, 0) polygon((40, 40), (40, 160)) polygon((60, 40), (60, 160), (130, 160)) polygon((100, 40), (160, 160), (160, 40), close=False)
size(200, 200) stroke(0) stroke_width(10) fill(1, 0.3, 0) polygon((40, 40), (40, 160)) polygon((60, 40), (60, 160), (130, 160)) polygon((100, 40), (160, 160), (160, 40), close=False)
# imdb sortBy functions def sortMoviesBy(movies_names_wl, args): """ This module is used to sortMovies by the dict(arg.sortBy) :param list movies_names_wl: a list of movie_names_with_links movie : [Rank, Link, Title, Year, Rating, Number of Ratings, Runtime, Director] Rank : int Link : str Title : str Year : int NoR : int Runtime : str Director : str :param Namespace args: [ top, csv, sortBy, setup, console_print] top : int csv : bool sortBy : string setup : bool console_print : bool """ try: movies_names_wl = movies_names_wl[:args.top] except: print('**Error** : cannot slice top size') keydictionary = {'Rank': 0, 'Title': 2, 'Year': 3, 'Rating': 4, 'NoR': 5, 'Runtime': 6, 'Director': 7} try: movies_names_wl.sort( key=lambda movies_names_wl: movies_names_wl[keydictionary[args.sortBy]]) except: if(args.sortBy != None): print('**Error** : cannot sortBy **') return movies_names_wl
def sort_movies_by(movies_names_wl, args): """ This module is used to sortMovies by the dict(arg.sortBy) :param list movies_names_wl: a list of movie_names_with_links movie : [Rank, Link, Title, Year, Rating, Number of Ratings, Runtime, Director] Rank : int Link : str Title : str Year : int NoR : int Runtime : str Director : str :param Namespace args: [ top, csv, sortBy, setup, console_print] top : int csv : bool sortBy : string setup : bool console_print : bool """ try: movies_names_wl = movies_names_wl[:args.top] except: print('**Error** : cannot slice top size') keydictionary = {'Rank': 0, 'Title': 2, 'Year': 3, 'Rating': 4, 'NoR': 5, 'Runtime': 6, 'Director': 7} try: movies_names_wl.sort(key=lambda movies_names_wl: movies_names_wl[keydictionary[args.sortBy]]) except: if args.sortBy != None: print('**Error** : cannot sortBy **') return movies_names_wl
# flake8: noqa _base_ = [ './coco.py' ] data = dict( samples_per_gpu=2, workers_per_gpu=2, train=dict(classes=('person',)), val=dict(classes=('person',)), test=dict(classes=('person',)) )
_base_ = ['./coco.py'] data = dict(samples_per_gpu=2, workers_per_gpu=2, train=dict(classes=('person',)), val=dict(classes=('person',)), test=dict(classes=('person',)))
######################################################## # Copyright (c) 2015-2017 by European Commission. # # All Rights Reserved. # ######################################################## extends("BaseKPI.py") """ Expected Unserved Demand (%) ----------------------------- Indexed by * scope * delivery point * energy * test case The Expected Unserved Demand is a metric used to measure security of supply. This is the amount of electricity, gas or reserve demand that is expected not to be met by the production means during the year. It is calculated as the Loss Of Load volumes (LOL) expressed relatively to the corresponding annual demand volumes, in percentage. It can be calculated for each energy independently: .. math:: \\small EENS_{dp, energy} = \\frac {LOL_{dp, energy}}{demand_{dp, energy}} (\\%) See the 'Loss of load' KPI for further documentation about the loss of load. """ def computeIndicator(context, indexFilter, paramsIndicator, kpiDict): timeStepDuration = getTimeStepDurationInHours(context) selectedScopes = indexFilter.filterIndexList(0, getScopes()) selectedDeliveryPoints = indexFilter.filterIndexList(1, getDeliveryPoints(context)) selectedEnergies = indexFilter.filterIndexList(2, getEnergies(context, includedEnergies = PRODUCED_ENERGIES)) selectedTestCases = indexFilter.filterIndexList(3, context.getResultsIndexSet()) demandAssetsByScope = getAssetsByScope(context, selectedScopes, includeFinancialAssets=True, includedTechnologies = DEMAND_TYPES) lossOfLoadAssetsByScope = getAssetsByScope(context, selectedScopes, includeFinancialAssets=True, includedTechnologies = LOSS_OF_ENERGY_TYPES) demandDict = getDemandDict(context, selectedScopes, selectedTestCases, selectedEnergies, selectedDeliveryPoints, demandAssetsByScope, aggregation = True) lossOfLoadDict = getProductionDict(context, selectedScopes, selectedTestCases, selectedEnergies, selectedDeliveryPoints, lossOfLoadAssetsByScope, aggregation = True) for index in lossOfLoadDict: if index in demandDict: demand = demandDict[index].getSumValue() if demand != 0: kpiDict[index] = 100 * lossOfLoadDict[index].getSumValue() / demand return kpiDict def get_indexing(context) : baseIndexList = [getScopesIndexing(), getDeliveryPointsIndexing(context), getEnergiesIndexing(context, includedEnergies = PRODUCED_ENERGIES), getTestCasesIndexing(context)] return baseIndexList IndicatorLabel = "Expected Unserved Demand" IndicatorUnit = "%" IndicatorDeltaUnit = "%" IndicatorDescription = "Expected energy not served as a percentage of demand volume attached to a delivery point" IndicatorParameters = [] IndicatorIcon = "" IndicatorCategory = "Results>Loss of load" IndicatorTags = "Power System, Gas System, Power Markets"
extends('BaseKPI.py') "\nExpected Unserved Demand (%)\n-----------------------------\n\nIndexed by\n\t* scope\n\t* delivery point\n\t* energy\n\t* test case\n\nThe Expected Unserved Demand is a metric used to measure security of supply. This is the amount of electricity, gas or reserve demand that is expected not to be met by the production means during the year.\nIt is calculated as the Loss Of Load volumes (LOL) expressed relatively to the corresponding annual demand volumes, in percentage. It can be calculated for each energy independently:\n\n.. math::\n\t\n\t\\small EENS_{dp, energy} = \\frac {LOL_{dp, energy}}{demand_{dp, energy}} (\\%)\n\nSee the 'Loss of load' KPI for further documentation about the loss of load.\n\n" def compute_indicator(context, indexFilter, paramsIndicator, kpiDict): time_step_duration = get_time_step_duration_in_hours(context) selected_scopes = indexFilter.filterIndexList(0, get_scopes()) selected_delivery_points = indexFilter.filterIndexList(1, get_delivery_points(context)) selected_energies = indexFilter.filterIndexList(2, get_energies(context, includedEnergies=PRODUCED_ENERGIES)) selected_test_cases = indexFilter.filterIndexList(3, context.getResultsIndexSet()) demand_assets_by_scope = get_assets_by_scope(context, selectedScopes, includeFinancialAssets=True, includedTechnologies=DEMAND_TYPES) loss_of_load_assets_by_scope = get_assets_by_scope(context, selectedScopes, includeFinancialAssets=True, includedTechnologies=LOSS_OF_ENERGY_TYPES) demand_dict = get_demand_dict(context, selectedScopes, selectedTestCases, selectedEnergies, selectedDeliveryPoints, demandAssetsByScope, aggregation=True) loss_of_load_dict = get_production_dict(context, selectedScopes, selectedTestCases, selectedEnergies, selectedDeliveryPoints, lossOfLoadAssetsByScope, aggregation=True) for index in lossOfLoadDict: if index in demandDict: demand = demandDict[index].getSumValue() if demand != 0: kpiDict[index] = 100 * lossOfLoadDict[index].getSumValue() / demand return kpiDict def get_indexing(context): base_index_list = [get_scopes_indexing(), get_delivery_points_indexing(context), get_energies_indexing(context, includedEnergies=PRODUCED_ENERGIES), get_test_cases_indexing(context)] return baseIndexList indicator_label = 'Expected Unserved Demand' indicator_unit = '%' indicator_delta_unit = '%' indicator_description = 'Expected energy not served as a percentage of demand volume attached to a delivery point' indicator_parameters = [] indicator_icon = '' indicator_category = 'Results>Loss of load' indicator_tags = 'Power System, Gas System, Power Markets'
x = int(input()) n = int(input()) pool = x for _ in range(n): pool += x - int(input()) print(pool)
x = int(input()) n = int(input()) pool = x for _ in range(n): pool += x - int(input()) print(pool)
def reject_outliers(data, m = 2.): d = np.abs(data - np.median(data)) mdev = np.median(d) s = d/mdev if mdev else 0. return (s < m) def mean_dup(x_): global reject_outliers if 1==len(np.unique(x_.values)): return x_.values[0] else: x = x_.values[reject_outliers(x_.values.copy())] x_mean = x.mean() mask = (x_mean*0.975 <= x) & (x <= x_mean*1.025) return x[mask].mean() def remove_duplicate(df): ''' Removes duplicates in dataframe and element samples whose composition is not 100% input format -> df = dataframe ''' features = df.columns.values.tolist() features.remove(df.columns[-1]) property_name = df.columns[-1] df = df[df[features].sum(axis=1).between(99,101)] df = df.groupby(features,as_index=False).agg(mean_dup) df = df.dropna() df = df.loc[(df[property_name])> 0] return df
def reject_outliers(data, m=2.0): d = np.abs(data - np.median(data)) mdev = np.median(d) s = d / mdev if mdev else 0.0 return s < m def mean_dup(x_): global reject_outliers if 1 == len(np.unique(x_.values)): return x_.values[0] else: x = x_.values[reject_outliers(x_.values.copy())] x_mean = x.mean() mask = (x_mean * 0.975 <= x) & (x <= x_mean * 1.025) return x[mask].mean() def remove_duplicate(df): """ Removes duplicates in dataframe and element samples whose composition is not 100% input format -> df = dataframe """ features = df.columns.values.tolist() features.remove(df.columns[-1]) property_name = df.columns[-1] df = df[df[features].sum(axis=1).between(99, 101)] df = df.groupby(features, as_index=False).agg(mean_dup) df = df.dropna() df = df.loc[df[property_name] > 0] return df
class MyList: class _Node: __slots__ = ('value', 'next') def __init__(self, value, next=None): self.value = value self.next = next class _NodeIterator: def __init__(self, first): self._next_node = first def __iter__(self): return self def __next__(self): if self._next_node is None: raise StopIteration value = self._next_node.value self._next_node = self._next_node.next return value def __init__(self, iterable=None): self._head = None self._tail = None self._length = 0 if iterable is not None: self.extend(iterable) def append(self, value): node = MyList._Node(value) if len(self) == 0: self._head = self._tail = node else: self._tail.next = node self._tail = node self._length += 1 def __len__(self): return self._length def extend(self, iterable): for value in iterable: self.append(value) def __getitem__(self, index): if index < 0: index += len(self) if not 0 <= index < len(self): raise IndexError('list index out of range') node = self._head for _ in range(index): node = node.next return node.value def __iter__(self): return MyList._NodeIterator(self._head) values = MyList([4, 2, 1, 99, 9]) print(values) # print(values[0]) # print(values[1]) # print(values[2]) for el in values: print(el, end=' ')
class Mylist: class _Node: __slots__ = ('value', 'next') def __init__(self, value, next=None): self.value = value self.next = next class _Nodeiterator: def __init__(self, first): self._next_node = first def __iter__(self): return self def __next__(self): if self._next_node is None: raise StopIteration value = self._next_node.value self._next_node = self._next_node.next return value def __init__(self, iterable=None): self._head = None self._tail = None self._length = 0 if iterable is not None: self.extend(iterable) def append(self, value): node = MyList._Node(value) if len(self) == 0: self._head = self._tail = node else: self._tail.next = node self._tail = node self._length += 1 def __len__(self): return self._length def extend(self, iterable): for value in iterable: self.append(value) def __getitem__(self, index): if index < 0: index += len(self) if not 0 <= index < len(self): raise index_error('list index out of range') node = self._head for _ in range(index): node = node.next return node.value def __iter__(self): return MyList._NodeIterator(self._head) values = my_list([4, 2, 1, 99, 9]) print(values) for el in values: print(el, end=' ')
number = 10 array = '64630 11735 14216 99233 14470 4978 73429 38120 51135 67060' array = list(map(int, array.split())) def find_mean(a): return round(sum(a)/number, 1) def find_median(a): a = sorted(a) if len(a) % 2 == 0: return round((a[number//2 - 1] + a[number//2])/2, 1) else: return a[number//2] def find_mode(a): a = sorted(a) counts = {i: a.count(i) for i in a} sorted_x = sorted(counts.items(), key=lambda z: z[1], reverse=True) return sorted_x[0][0] print(find_mean(array)) print(find_median(array)) print(find_mode(array))
number = 10 array = '64630 11735 14216 99233 14470 4978 73429 38120 51135 67060' array = list(map(int, array.split())) def find_mean(a): return round(sum(a) / number, 1) def find_median(a): a = sorted(a) if len(a) % 2 == 0: return round((a[number // 2 - 1] + a[number // 2]) / 2, 1) else: return a[number // 2] def find_mode(a): a = sorted(a) counts = {i: a.count(i) for i in a} sorted_x = sorted(counts.items(), key=lambda z: z[1], reverse=True) return sorted_x[0][0] print(find_mean(array)) print(find_median(array)) print(find_mode(array))