content
stringlengths
7
1.05M
fixed_cases
stringlengths
1
1.28M
#!/bin/python3 # https://www.hackerrank.com/challenges/py-check-subset/problem # Author : Sagar Malik (sagarmalik@gmail.com) n = int(input()) for _ in range(n): K = int(input()) first = set(input().split()) t = int(input()) second = set(input().split()) print(len(first-second) == 0)
n = int(input()) for _ in range(n): k = int(input()) first = set(input().split()) t = int(input()) second = set(input().split()) print(len(first - second) == 0)
class Pipelines(object): def __init__(self, client): self._client = client def get_pipeline(self, pipeline_id, **kwargs): url = 'pipelines/{}'.format(pipeline_id) return self._client._get(self._client.BASE_URL + url, **kwargs) def get_all_pipelines(self, **kwargs): url = 'pipelines' return self._client._get(self._client.BASE_URL + url, **kwargs) def get_pipeline_deals(self, pipeline_id, **kwargs): url = 'pipelines/{}/deals'.format(pipeline_id) return self._client._get(self._client.BASE_URL + url, **kwargs)
class Pipelines(object): def __init__(self, client): self._client = client def get_pipeline(self, pipeline_id, **kwargs): url = 'pipelines/{}'.format(pipeline_id) return self._client._get(self._client.BASE_URL + url, **kwargs) def get_all_pipelines(self, **kwargs): url = 'pipelines' return self._client._get(self._client.BASE_URL + url, **kwargs) def get_pipeline_deals(self, pipeline_id, **kwargs): url = 'pipelines/{}/deals'.format(pipeline_id) return self._client._get(self._client.BASE_URL + url, **kwargs)
def grow_plants(db, messenger, object): # # grow plant db.increment_property_of_component('plant', object['entity'], 'growth', object['growth_rate']) return [] def ripen_fruit(db, messenger, object): db.increment_property_of_component('plant', object['entity'], 'fruit_growth', object['fruit_growth_rate']) return []
def grow_plants(db, messenger, object): db.increment_property_of_component('plant', object['entity'], 'growth', object['growth_rate']) return [] def ripen_fruit(db, messenger, object): db.increment_property_of_component('plant', object['entity'], 'fruit_growth', object['fruit_growth_rate']) return []
for c in range(1,50): if c%2==0: print('.',end='') print(c,end=' ')
for c in range(1, 50): if c % 2 == 0: print('.', end='') print(c, end=' ')
__author__ = 'khomitsevich' ATTRIBUTES_COUNT: int = 14 class Metrics: """ Metrics data class. """ # TODO: Initialization process should be more clearer, better to pass dict with keys as class parameter titles def __init__(self, filepath:str, filename:str, args:list): self.__argument_count_validation(filepath, filename, args) self.__non_empty_arguments_validation(filepath, filename, args) self.name = filename self.step = float(args[0].replace(',', '.')) self.angel = float(args[1].replace(',', '.')) self.last_dots_count = float(args[2].replace(',', '.')) self.in_to_out_coef = float(args[3].replace(',', '.')) self.out_to_in_coef = float(args[4].replace(',', '.')) self.dice = float(args[5].replace(',', '.')) self.spec = float(args[6].replace(',', '.')) self.sens = float(args[7].replace(',', '.')) self.accu = float(args[8].replace(',', '.')) self.time_of_work_with_init = float(args[9].replace(',', '.')) self.time_of_work_wihout_init = float(args[10].replace(',', '.')) self.otsu_treshold = float(args[11].replace(',', '.')) self.average_in = float(args[12].replace(',', '.')) self.average_out = float(args[13].replace(',', '.')) def __repr__(self): return """Metrics: ('name': {self.name}, 'step': {self.step}, 'angel': {self.angel}, 'last_dots_count': {self.last_dots_count}, 'in_to_out_coef': {self.in_to_out_coef}, 'out_to_in_coef': {self.out_to_in_coef}, 'dice': {self.dice}, 'spec': {self.spec}, 'sens': {self.sens}, 'accu': {self.accu}, 'time_of_work_with_init': {self.time_of_work_with_init}, 'time_of_work_wihout_init': {self.time_of_work_wihout_init}, 'otsu_treshold': {self.otsu_treshold}, 'average_in': {self.average_in}, 'average_out': {self.average_out}) """.format(self=self) def __str__(self): return self.__repr__ def __argument_count_validation(self, filepath:str, filename:str, args:list): if len(args) != ATTRIBUTES_COUNT: error_message = f"WARNING: At instance '{filename}' on path '{filepath}' has been passed list with '{len(args)}' arguments, should be {ATTRIBUTES_COUNT}." print(error_message) raise ValueError(error_message) def __non_empty_arguments_validation(self, filepath:str, filename:str, args:list): for arg in args: if arg is None: error_message = f"WARNING: Found an empty argument at file: '{filename}' on path '{filepath}'." print(error_message) raise ValueError(error_message)
__author__ = 'khomitsevich' attributes_count: int = 14 class Metrics: """ Metrics data class. """ def __init__(self, filepath: str, filename: str, args: list): self.__argument_count_validation(filepath, filename, args) self.__non_empty_arguments_validation(filepath, filename, args) self.name = filename self.step = float(args[0].replace(',', '.')) self.angel = float(args[1].replace(',', '.')) self.last_dots_count = float(args[2].replace(',', '.')) self.in_to_out_coef = float(args[3].replace(',', '.')) self.out_to_in_coef = float(args[4].replace(',', '.')) self.dice = float(args[5].replace(',', '.')) self.spec = float(args[6].replace(',', '.')) self.sens = float(args[7].replace(',', '.')) self.accu = float(args[8].replace(',', '.')) self.time_of_work_with_init = float(args[9].replace(',', '.')) self.time_of_work_wihout_init = float(args[10].replace(',', '.')) self.otsu_treshold = float(args[11].replace(',', '.')) self.average_in = float(args[12].replace(',', '.')) self.average_out = float(args[13].replace(',', '.')) def __repr__(self): return "Metrics: ('name': {self.name}, 'step': {self.step}, 'angel': {self.angel}, \n 'last_dots_count': {self.last_dots_count}, 'in_to_out_coef': {self.in_to_out_coef}, \n 'out_to_in_coef': {self.out_to_in_coef}, 'dice': {self.dice}, 'spec': {self.spec}, 'sens': {self.sens}, \n 'accu': {self.accu}, 'time_of_work_with_init': {self.time_of_work_with_init}, \n 'time_of_work_wihout_init': {self.time_of_work_wihout_init}, 'otsu_treshold': {self.otsu_treshold},\n 'average_in': {self.average_in}, 'average_out': {self.average_out})\n ".format(self=self) def __str__(self): return self.__repr__ def __argument_count_validation(self, filepath: str, filename: str, args: list): if len(args) != ATTRIBUTES_COUNT: error_message = f"WARNING: At instance '{filename}' on path '{filepath}' has been passed list with '{len(args)}' arguments, should be {ATTRIBUTES_COUNT}." print(error_message) raise value_error(error_message) def __non_empty_arguments_validation(self, filepath: str, filename: str, args: list): for arg in args: if arg is None: error_message = f"WARNING: Found an empty argument at file: '{filename}' on path '{filepath}'." print(error_message) raise value_error(error_message)
# input sell price a = input("Input Final Sale Price") # input P&P cost b = input("Input P&P Costs") # add a & b together to get total # fees = total * 0.128 + 0.3 //12.8% + 30p # total - fees = profit # output total # output fees # output profit # output description explaining forumla # output note explaining that fees are charged on P&P as well as sale price.
a = input('Input Final Sale Price') b = input('Input P&P Costs')
class ValidationError(Exception): """ Base class """ pass class AppStoreValidationError(ValidationError): message = None def __init__( self, message: str ): self.message = message super().__init__(message) def __str__(self) -> str: return self.message
class Validationerror(Exception): """ Base class """ pass class Appstorevalidationerror(ValidationError): message = None def __init__(self, message: str): self.message = message super().__init__(message) def __str__(self) -> str: return self.message
class Solution: def isPalindrome(self, x: int) -> bool: if x < 0 or (not x % 10 and x): return False rev = 0 while x > rev: rev = rev * 10 + x % 10 x //= 10 return rev == x or rev//10 == x
class Solution: def is_palindrome(self, x: int) -> bool: if x < 0 or (not x % 10 and x): return False rev = 0 while x > rev: rev = rev * 10 + x % 10 x //= 10 return rev == x or rev // 10 == x
valorc = float(input('Qual o valor da Casa? R$ ')) salario = float(input('Qual o valor do salario? R$')) anos = int(input('Em quantos anos deseja pagar? ')) prest = valorc / (anos * 12) if prest > (salario * (30/100)): print('Fincanciamento Negado') else: print('Financiamento Autorizado')
valorc = float(input('Qual o valor da Casa? R$ ')) salario = float(input('Qual o valor do salario? R$')) anos = int(input('Em quantos anos deseja pagar? ')) prest = valorc / (anos * 12) if prest > salario * (30 / 100): print('Fincanciamento Negado') else: print('Financiamento Autorizado')
''' Unit tests module for PaPaS module ''' __all__ = []
""" Unit tests module for PaPaS module """ __all__ = []
N = int(input()) for i in range(N): n, k = map(int, input().split()) ranges = {n: 1} max_range = 0 while k > 0: max_range, count_range = max(ranges.items()) if k > count_range: k -= count_range del ranges[max_range] range_1, range_2 = (max_range - 1)//2, max_range//2 ranges[range_1] = ranges.get(range_1, 0) + count_range ranges[range_2] = ranges.get(range_2, 0) + count_range else: print('Case #{}: {} {}'.format(i + 1, max_range//2, (max_range - 1)//2, )) break
n = int(input()) for i in range(N): (n, k) = map(int, input().split()) ranges = {n: 1} max_range = 0 while k > 0: (max_range, count_range) = max(ranges.items()) if k > count_range: k -= count_range del ranges[max_range] (range_1, range_2) = ((max_range - 1) // 2, max_range // 2) ranges[range_1] = ranges.get(range_1, 0) + count_range ranges[range_2] = ranges.get(range_2, 0) + count_range else: print('Case #{}: {} {}'.format(i + 1, max_range // 2, (max_range - 1) // 2)) break
""" While thinking abou this problem, many might come up with a DP algorithm. But this problem is much easier than DP problem. First, scan the input string, and store the maximum occurance index of every leter. Then, scan the input string again, considering the maximum occurance of each letter.While scanning, if you encounter a letter whose maxium occurance index is larger than current, update maximum occurance. If the scanning index equals the maximum occurance index, then we get a new break. """ class Solution: def partitionLabels(self, S): """ :type S: str :rtype: List[int] """ # S = "ababcbacadefegdehijhklij" ret = [] map = {} # last = {c: i for i, c in enumerate(S)} for i, s in enumerate(S): map[s] = i partition = prev = 0 for i, s in enumerate(S): partition = max(partition, map[s]) if i == partition: ret.append(partition + 1 - prev) prev = partition + 1 return ret S = "eaaaabaaec" s = Solution() print(s.partitionLabels(S))
""" While thinking abou this problem, many might come up with a DP algorithm. But this problem is much easier than DP problem. First, scan the input string, and store the maximum occurance index of every leter. Then, scan the input string again, considering the maximum occurance of each letter.While scanning, if you encounter a letter whose maxium occurance index is larger than current, update maximum occurance. If the scanning index equals the maximum occurance index, then we get a new break. """ class Solution: def partition_labels(self, S): """ :type S: str :rtype: List[int] """ ret = [] map = {} for (i, s) in enumerate(S): map[s] = i partition = prev = 0 for (i, s) in enumerate(S): partition = max(partition, map[s]) if i == partition: ret.append(partition + 1 - prev) prev = partition + 1 return ret s = 'eaaaabaaec' s = solution() print(s.partitionLabels(S))
alpha_num_dict = { 'a':1, 'b':2, 'c':3 }
alpha_num_dict = {'a': 1, 'b': 2, 'c': 3}
# Creating an empty Tuple Tuple1 = (Hello) print("Initial empty Tuple: ") print(Tuple1) A=(1,2,3,4) B=('a','b','c') C=(5,6,7,8) #second tuple print(A,'length= ',len(A)) print(B,'length= ',len(B)) print(A<C) print(A+C) print(max(A)) print(min(B)) tuple('hey') 'good'*3
tuple1 = Hello print('Initial empty Tuple: ') print(Tuple1) a = (1, 2, 3, 4) b = ('a', 'b', 'c') c = (5, 6, 7, 8) print(A, 'length= ', len(A)) print(B, 'length= ', len(B)) print(A < C) print(A + C) print(max(A)) print(min(B)) tuple('hey') 'good' * 3
# # PySNMP MIB module DSA-MIB (http://pysnmp.sf.net) # ASN.1 source http://mibs.snmplabs.com:80/asn1/DSA-MIB # Produced by pysmi-0.0.7 at Sun Feb 14 00:11:07 2016 # On host bldfarm platform Linux version 4.1.13-100.fc21.x86_64 by user goose # Using Python version 3.5.0 (default, Jan 5 2016, 17:11:52) # ( Integer, ObjectIdentifier, OctetString, ) = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString") ( NamedValues, ) = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ( SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion, ) = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion") ( DistinguishedName, applIndex, ) = mibBuilder.importSymbols("NETWORK-SERVICES-MIB", "DistinguishedName", "applIndex") ( NotificationGroup, ModuleCompliance, ObjectGroup, ) = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup") ( MibScalar, MibTable, MibTableRow, MibTableColumn, Unsigned32, Gauge32, iso, NotificationType, Bits, Counter32, mib_2, ModuleIdentity, Integer32, ObjectIdentity, IpAddress, TimeTicks, MibIdentifier, Counter64, ) = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Unsigned32", "Gauge32", "iso", "NotificationType", "Bits", "Counter32", "mib-2", "ModuleIdentity", "Integer32", "ObjectIdentity", "IpAddress", "TimeTicks", "MibIdentifier", "Counter64") ( DisplayString, TimeStamp, TextualConvention, ) = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TimeStamp", "TextualConvention") dsaMIB = ModuleIdentity((1, 3, 6, 1, 2, 1, 29)) if mibBuilder.loadTexts: dsaMIB.setLastUpdated('9311250000Z') if mibBuilder.loadTexts: dsaMIB.setOrganization('IETF Mail and Directory Management Working\n Group') if mibBuilder.loadTexts: dsaMIB.setContactInfo(' Glenn Mansfield\n\n Postal: AIC Systems Laboratory\n 6-6-3, Minami Yoshinari\n Aoba-ku, Sendai, 989-32\n JP\n\n Tel: +81 22 279 3310\n Fax: +81 22 279 3640\n E-Mail: glenn@aic.co.jp') if mibBuilder.loadTexts: dsaMIB.setDescription(' The MIB module for monitoring Directory System Agents.') dsaOpsTable = MibTable((1, 3, 6, 1, 2, 1, 29, 1), ) if mibBuilder.loadTexts: dsaOpsTable.setDescription(' The table holding information related to the\n DSA operations.') dsaOpsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 29, 1, 1), ).setIndexNames((0, "NETWORK-SERVICES-MIB", "applIndex")) if mibBuilder.loadTexts: dsaOpsEntry.setDescription(' Entry containing operations related statistics\n for a DSA.') dsaAnonymousBinds = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 1), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaAnonymousBinds.setDescription(' Number of anonymous binds to this DSA from DUAs\n since application start.') dsaUnauthBinds = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 2), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaUnauthBinds.setDescription(' Number of un-authenticated binds to this\n DSA since application start.') dsaSimpleAuthBinds = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 3), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaSimpleAuthBinds.setDescription(' Number of binds to this DSA that were authenticated\n using simple authentication procedures since\n application start.') dsaStrongAuthBinds = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 4), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaStrongAuthBinds.setDescription(' Number of binds to this DSA that were authenticated\n using the strong authentication procedures since\n application start. This includes the binds that were\n authenticated using external authentication procedures.') dsaBindSecurityErrors = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 5), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaBindSecurityErrors.setDescription(' Number of bind operations that have been rejected\n by this DSA due to inappropriateAuthentication or\n invalidCredentials.') dsaInOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 6), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaInOps.setDescription(' Number of operations forwarded to this DSA\n from DUAs or other DSAs since application\n start up.') dsaReadOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 7), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaReadOps.setDescription(' Number of read operations serviced by\n this DSA since application startup.') dsaCompareOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 8), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaCompareOps.setDescription(' Number of compare operations serviced by\n this DSA since application startup.') dsaAddEntryOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 9), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaAddEntryOps.setDescription(' Number of addEntry operations serviced by\n this DSA since application startup.') dsaRemoveEntryOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 10), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaRemoveEntryOps.setDescription(' Number of removeEntry operations serviced by\n this DSA since application startup.') dsaModifyEntryOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 11), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaModifyEntryOps.setDescription(' Number of modifyEntry operations serviced by\n this DSA since application startup.') dsaModifyRDNOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 12), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaModifyRDNOps.setDescription(' Number of modifyRDN operations serviced by\n this DSA since application startup.') dsaListOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 13), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaListOps.setDescription(' Number of list operations serviced by\n this DSA since application startup.') dsaSearchOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 14), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaSearchOps.setDescription(' Number of search operations- baseObjectSearches,\n oneLevelSearches and subTreeSearches, serviced\n by this DSA since application startup.') dsaOneLevelSearchOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 15), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaOneLevelSearchOps.setDescription(' Number of oneLevelSearch operations serviced\n by this DSA since application startup.') dsaWholeTreeSearchOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 16), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaWholeTreeSearchOps.setDescription(' Number of wholeTreeSearch operations serviced\n by this DSA since application startup.') dsaReferrals = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 17), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaReferrals.setDescription(' Number of referrals returned by this DSA in response\n to requests for operations since application startup.') dsaChainings = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 18), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaChainings.setDescription(' Number of operations forwarded by this DSA\n to other DSAs since application startup.') dsaSecurityErrors = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 19), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaSecurityErrors.setDescription(' Number of operations forwarded to this DSA\n which did not meet the security requirements. ') dsaErrors = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 20), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaErrors.setDescription(' Number of operations that could not be serviced\n due to errors other than security errors, and\n referrals.\n A partially serviced operation will not be counted\n as an error.\n The errors include NameErrors, UpdateErrors, Attribute\n errors and ServiceErrors.') dsaEntriesTable = MibTable((1, 3, 6, 1, 2, 1, 29, 2), ) if mibBuilder.loadTexts: dsaEntriesTable.setDescription(' The table holding information related to the\n\n entry statistics and cache performance of the DSAs.') dsaEntriesEntry = MibTableRow((1, 3, 6, 1, 2, 1, 29, 2, 1), ).setIndexNames((0, "NETWORK-SERVICES-MIB", "applIndex")) if mibBuilder.loadTexts: dsaEntriesEntry.setDescription(' Entry containing statistics pertaining to entries\n held by a DSA.') dsaMasterEntries = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 2, 1, 1), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaMasterEntries.setDescription(' Number of entries mastered in the DSA.') dsaCopyEntries = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 2, 1, 2), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaCopyEntries.setDescription(' Number of entries for which systematic (slave)\n copies are maintained in the DSA.') dsaCacheEntries = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 2, 1, 3), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaCacheEntries.setDescription(' Number of entries cached (non-systematic copies) in\n the DSA. This will include the entries that are\n cached partially. The negative cache is not counted.') dsaCacheHits = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 2, 1, 4), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaCacheHits.setDescription(' Number of operations that were serviced from\n the locally held cache since application\n startup.') dsaSlaveHits = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 2, 1, 5), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaSlaveHits.setDescription(' Number of operations that were serviced from\n the locally held object replications [ shadow\n entries] since application startup.') dsaIntTable = MibTable((1, 3, 6, 1, 2, 1, 29, 3), ) if mibBuilder.loadTexts: dsaIntTable.setDescription(' Each row of this table contains some details\n related to the history of the interaction\n of the monitored DSAs with their respective\n peer DSAs.') dsaIntEntry = MibTableRow((1, 3, 6, 1, 2, 1, 29, 3, 1), ).setIndexNames((0, "NETWORK-SERVICES-MIB", "applIndex"), (0, "DSA-MIB", "dsaIntIndex")) if mibBuilder.loadTexts: dsaIntEntry.setDescription(' Entry containing interaction details of a DSA\n with a peer DSA.') dsaIntIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1,2147483647))) if mibBuilder.loadTexts: dsaIntIndex.setDescription(' Together with applIndex it forms the unique key to\n identify the conceptual row which contains useful info\n on the (attempted) interaction between the DSA (referred\n to by applIndex) and a peer DSA.') dsaName = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 3, 1, 2), DistinguishedName()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaName.setDescription(' Distinguished Name of the peer DSA to which this\n entry pertains.') dsaTimeOfCreation = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 3, 1, 3), TimeStamp()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaTimeOfCreation.setDescription(' The value of sysUpTime when this row was created.\n If the entry was created before the network management\n subsystem was initialized, this object will contain\n a value of zero.') dsaTimeOfLastAttempt = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 3, 1, 4), TimeStamp()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaTimeOfLastAttempt.setDescription(' The value of sysUpTime when the last attempt was made\n to contact this DSA. If the last attempt was made before\n the network management subsystem was initialized, this\n object will contain a value of zero.') dsaTimeOfLastSuccess = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 3, 1, 5), TimeStamp()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaTimeOfLastSuccess.setDescription(' The value of sysUpTime when the last attempt made to\n contact this DSA was successful. If there have\n been no successful attempts this entry will have a value\n of zero. If the last successful attempt was made before\n the network management subsystem was initialized, this\n object will contain a value of zero.') dsaFailuresSinceLastSuccess = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 3, 1, 6), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaFailuresSinceLastSuccess.setDescription(' The number of failures since the last time an\n attempt to contact this DSA was successful. If\n there has been no successful attempts, this counter\n will contain the number of failures since this entry\n was created.') dsaFailures = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 3, 1, 7), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaFailures.setDescription(' Cumulative failures since the creation of\n this entry.') dsaSuccesses = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 3, 1, 8), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: dsaSuccesses.setDescription(' Cumulative successes since the creation of\n this entry.') dsaConformance = MibIdentifier((1, 3, 6, 1, 2, 1, 29, 4)) dsaGroups = MibIdentifier((1, 3, 6, 1, 2, 1, 29, 4, 1)) dsaCompliances = MibIdentifier((1, 3, 6, 1, 2, 1, 29, 4, 2)) dsaOpsCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 29, 4, 2, 1)).setObjects(*(("DSA-MIB", "dsaOpsGroup"),)) if mibBuilder.loadTexts: dsaOpsCompliance.setDescription('The compliance statement for SNMPv2 entities\n which implement the DSA-MIB for monitoring\n DSA operations.') dsaEntryCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 29, 4, 2, 2)).setObjects(*(("DSA-MIB", "dsaOpsGroup"), ("DSA-MIB", "dsaEntryGroup"),)) if mibBuilder.loadTexts: dsaEntryCompliance.setDescription('The compliance statement for SNMPv2 entities\n which implement the DSA-MIB for monitoring\n DSA operations, entry statistics and cache\n performance.') dsaIntCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 29, 4, 2, 3)).setObjects(*(("DSA-MIB", "dsaOpsGroup"), ("DSA-MIB", "dsaIntGroup"),)) if mibBuilder.loadTexts: dsaIntCompliance.setDescription(' The compliance statement for SNMPv2 entities\n which implement the DSA-MIB for monitoring DSA\n operations and the interaction of the DSA with\n peer DSAs.') dsaOpsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 29, 4, 1, 1)).setObjects(*(("DSA-MIB", "dsaAnonymousBinds"), ("DSA-MIB", "dsaUnauthBinds"), ("DSA-MIB", "dsaSimpleAuthBinds"), ("DSA-MIB", "dsaStrongAuthBinds"), ("DSA-MIB", "dsaBindSecurityErrors"), ("DSA-MIB", "dsaInOps"), ("DSA-MIB", "dsaReadOps"), ("DSA-MIB", "dsaCompareOps"), ("DSA-MIB", "dsaAddEntryOps"), ("DSA-MIB", "dsaRemoveEntryOps"), ("DSA-MIB", "dsaModifyEntryOps"), ("DSA-MIB", "dsaModifyRDNOps"), ("DSA-MIB", "dsaListOps"), ("DSA-MIB", "dsaSearchOps"), ("DSA-MIB", "dsaOneLevelSearchOps"), ("DSA-MIB", "dsaWholeTreeSearchOps"), ("DSA-MIB", "dsaReferrals"), ("DSA-MIB", "dsaChainings"), ("DSA-MIB", "dsaSecurityErrors"), ("DSA-MIB", "dsaErrors"),)) if mibBuilder.loadTexts: dsaOpsGroup.setDescription(' A collection of objects for monitoring the DSA\n operations.') dsaEntryGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 29, 4, 1, 2)).setObjects(*(("DSA-MIB", "dsaMasterEntries"), ("DSA-MIB", "dsaCopyEntries"), ("DSA-MIB", "dsaCacheEntries"), ("DSA-MIB", "dsaCacheHits"), ("DSA-MIB", "dsaSlaveHits"),)) if mibBuilder.loadTexts: dsaEntryGroup.setDescription(' A collection of objects for monitoring the DSA\n entry statistics and cache performance.') dsaIntGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 29, 4, 1, 3)).setObjects(*(("DSA-MIB", "dsaName"), ("DSA-MIB", "dsaTimeOfCreation"), ("DSA-MIB", "dsaTimeOfLastAttempt"), ("DSA-MIB", "dsaTimeOfLastSuccess"), ("DSA-MIB", "dsaFailuresSinceLastSuccess"), ("DSA-MIB", "dsaFailures"), ("DSA-MIB", "dsaSuccesses"),)) if mibBuilder.loadTexts: dsaIntGroup.setDescription(" A collection of objects for monitoring the DSA's\n interaction with peer DSAs.") mibBuilder.exportSymbols("DSA-MIB", dsaErrors=dsaErrors, dsaOpsGroup=dsaOpsGroup, dsaTimeOfLastSuccess=dsaTimeOfLastSuccess, dsaGroups=dsaGroups, dsaWholeTreeSearchOps=dsaWholeTreeSearchOps, dsaConformance=dsaConformance, dsaOneLevelSearchOps=dsaOneLevelSearchOps, dsaBindSecurityErrors=dsaBindSecurityErrors, dsaOpsEntry=dsaOpsEntry, dsaSuccesses=dsaSuccesses, dsaOpsCompliance=dsaOpsCompliance, dsaSearchOps=dsaSearchOps, dsaMasterEntries=dsaMasterEntries, dsaTimeOfLastAttempt=dsaTimeOfLastAttempt, dsaUnauthBinds=dsaUnauthBinds, dsaEntryCompliance=dsaEntryCompliance, dsaFailuresSinceLastSuccess=dsaFailuresSinceLastSuccess, dsaMIB=dsaMIB, dsaSecurityErrors=dsaSecurityErrors, dsaModifyEntryOps=dsaModifyEntryOps, dsaIntCompliance=dsaIntCompliance, dsaName=dsaName, dsaOpsTable=dsaOpsTable, dsaIntIndex=dsaIntIndex, dsaTimeOfCreation=dsaTimeOfCreation, dsaChainings=dsaChainings, dsaInOps=dsaInOps, dsaCacheEntries=dsaCacheEntries, dsaEntryGroup=dsaEntryGroup, dsaEntriesEntry=dsaEntriesEntry, dsaStrongAuthBinds=dsaStrongAuthBinds, dsaIntEntry=dsaIntEntry, dsaSimpleAuthBinds=dsaSimpleAuthBinds, dsaReadOps=dsaReadOps, dsaRemoveEntryOps=dsaRemoveEntryOps, dsaModifyRDNOps=dsaModifyRDNOps, dsaFailures=dsaFailures, dsaListOps=dsaListOps, dsaCacheHits=dsaCacheHits, dsaIntTable=dsaIntTable, dsaEntriesTable=dsaEntriesTable, PYSNMP_MODULE_ID=dsaMIB, dsaCompliances=dsaCompliances, dsaCompareOps=dsaCompareOps, dsaCopyEntries=dsaCopyEntries, dsaSlaveHits=dsaSlaveHits, dsaAnonymousBinds=dsaAnonymousBinds, dsaIntGroup=dsaIntGroup, dsaReferrals=dsaReferrals, dsaAddEntryOps=dsaAddEntryOps)
(integer, object_identifier, octet_string) = mibBuilder.importSymbols('ASN1', 'Integer', 'ObjectIdentifier', 'OctetString') (named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues') (single_value_constraint, value_size_constraint, value_range_constraint, constraints_intersection, constraints_union) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'SingleValueConstraint', 'ValueSizeConstraint', 'ValueRangeConstraint', 'ConstraintsIntersection', 'ConstraintsUnion') (distinguished_name, appl_index) = mibBuilder.importSymbols('NETWORK-SERVICES-MIB', 'DistinguishedName', 'applIndex') (notification_group, module_compliance, object_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ModuleCompliance', 'ObjectGroup') (mib_scalar, mib_table, mib_table_row, mib_table_column, unsigned32, gauge32, iso, notification_type, bits, counter32, mib_2, module_identity, integer32, object_identity, ip_address, time_ticks, mib_identifier, counter64) = mibBuilder.importSymbols('SNMPv2-SMI', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'Unsigned32', 'Gauge32', 'iso', 'NotificationType', 'Bits', 'Counter32', 'mib-2', 'ModuleIdentity', 'Integer32', 'ObjectIdentity', 'IpAddress', 'TimeTicks', 'MibIdentifier', 'Counter64') (display_string, time_stamp, textual_convention) = mibBuilder.importSymbols('SNMPv2-TC', 'DisplayString', 'TimeStamp', 'TextualConvention') dsa_mib = module_identity((1, 3, 6, 1, 2, 1, 29)) if mibBuilder.loadTexts: dsaMIB.setLastUpdated('9311250000Z') if mibBuilder.loadTexts: dsaMIB.setOrganization('IETF Mail and Directory Management Working\n Group') if mibBuilder.loadTexts: dsaMIB.setContactInfo(' Glenn Mansfield\n\n Postal: AIC Systems Laboratory\n 6-6-3, Minami Yoshinari\n Aoba-ku, Sendai, 989-32\n JP\n\n Tel: +81 22 279 3310\n Fax: +81 22 279 3640\n E-Mail: glenn@aic.co.jp') if mibBuilder.loadTexts: dsaMIB.setDescription(' The MIB module for monitoring Directory System Agents.') dsa_ops_table = mib_table((1, 3, 6, 1, 2, 1, 29, 1)) if mibBuilder.loadTexts: dsaOpsTable.setDescription(' The table holding information related to the\n DSA operations.') dsa_ops_entry = mib_table_row((1, 3, 6, 1, 2, 1, 29, 1, 1)).setIndexNames((0, 'NETWORK-SERVICES-MIB', 'applIndex')) if mibBuilder.loadTexts: dsaOpsEntry.setDescription(' Entry containing operations related statistics\n for a DSA.') dsa_anonymous_binds = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 1), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaAnonymousBinds.setDescription(' Number of anonymous binds to this DSA from DUAs\n since application start.') dsa_unauth_binds = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 2), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaUnauthBinds.setDescription(' Number of un-authenticated binds to this\n DSA since application start.') dsa_simple_auth_binds = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 3), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaSimpleAuthBinds.setDescription(' Number of binds to this DSA that were authenticated\n using simple authentication procedures since\n application start.') dsa_strong_auth_binds = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 4), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaStrongAuthBinds.setDescription(' Number of binds to this DSA that were authenticated\n using the strong authentication procedures since\n application start. This includes the binds that were\n authenticated using external authentication procedures.') dsa_bind_security_errors = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 5), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaBindSecurityErrors.setDescription(' Number of bind operations that have been rejected\n by this DSA due to inappropriateAuthentication or\n invalidCredentials.') dsa_in_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 6), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaInOps.setDescription(' Number of operations forwarded to this DSA\n from DUAs or other DSAs since application\n start up.') dsa_read_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 7), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaReadOps.setDescription(' Number of read operations serviced by\n this DSA since application startup.') dsa_compare_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 8), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaCompareOps.setDescription(' Number of compare operations serviced by\n this DSA since application startup.') dsa_add_entry_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 9), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaAddEntryOps.setDescription(' Number of addEntry operations serviced by\n this DSA since application startup.') dsa_remove_entry_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 10), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaRemoveEntryOps.setDescription(' Number of removeEntry operations serviced by\n this DSA since application startup.') dsa_modify_entry_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 11), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaModifyEntryOps.setDescription(' Number of modifyEntry operations serviced by\n this DSA since application startup.') dsa_modify_rdn_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 12), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaModifyRDNOps.setDescription(' Number of modifyRDN operations serviced by\n this DSA since application startup.') dsa_list_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 13), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaListOps.setDescription(' Number of list operations serviced by\n this DSA since application startup.') dsa_search_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 14), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaSearchOps.setDescription(' Number of search operations- baseObjectSearches,\n oneLevelSearches and subTreeSearches, serviced\n by this DSA since application startup.') dsa_one_level_search_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 15), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaOneLevelSearchOps.setDescription(' Number of oneLevelSearch operations serviced\n by this DSA since application startup.') dsa_whole_tree_search_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 16), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaWholeTreeSearchOps.setDescription(' Number of wholeTreeSearch operations serviced\n by this DSA since application startup.') dsa_referrals = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 17), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaReferrals.setDescription(' Number of referrals returned by this DSA in response\n to requests for operations since application startup.') dsa_chainings = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 18), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaChainings.setDescription(' Number of operations forwarded by this DSA\n to other DSAs since application startup.') dsa_security_errors = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 19), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaSecurityErrors.setDescription(' Number of operations forwarded to this DSA\n which did not meet the security requirements. ') dsa_errors = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 20), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaErrors.setDescription(' Number of operations that could not be serviced\n due to errors other than security errors, and\n referrals.\n A partially serviced operation will not be counted\n as an error.\n The errors include NameErrors, UpdateErrors, Attribute\n errors and ServiceErrors.') dsa_entries_table = mib_table((1, 3, 6, 1, 2, 1, 29, 2)) if mibBuilder.loadTexts: dsaEntriesTable.setDescription(' The table holding information related to the\n\n entry statistics and cache performance of the DSAs.') dsa_entries_entry = mib_table_row((1, 3, 6, 1, 2, 1, 29, 2, 1)).setIndexNames((0, 'NETWORK-SERVICES-MIB', 'applIndex')) if mibBuilder.loadTexts: dsaEntriesEntry.setDescription(' Entry containing statistics pertaining to entries\n held by a DSA.') dsa_master_entries = mib_table_column((1, 3, 6, 1, 2, 1, 29, 2, 1, 1), gauge32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaMasterEntries.setDescription(' Number of entries mastered in the DSA.') dsa_copy_entries = mib_table_column((1, 3, 6, 1, 2, 1, 29, 2, 1, 2), gauge32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaCopyEntries.setDescription(' Number of entries for which systematic (slave)\n copies are maintained in the DSA.') dsa_cache_entries = mib_table_column((1, 3, 6, 1, 2, 1, 29, 2, 1, 3), gauge32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaCacheEntries.setDescription(' Number of entries cached (non-systematic copies) in\n the DSA. This will include the entries that are\n cached partially. The negative cache is not counted.') dsa_cache_hits = mib_table_column((1, 3, 6, 1, 2, 1, 29, 2, 1, 4), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaCacheHits.setDescription(' Number of operations that were serviced from\n the locally held cache since application\n startup.') dsa_slave_hits = mib_table_column((1, 3, 6, 1, 2, 1, 29, 2, 1, 5), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaSlaveHits.setDescription(' Number of operations that were serviced from\n the locally held object replications [ shadow\n entries] since application startup.') dsa_int_table = mib_table((1, 3, 6, 1, 2, 1, 29, 3)) if mibBuilder.loadTexts: dsaIntTable.setDescription(' Each row of this table contains some details\n related to the history of the interaction\n of the monitored DSAs with their respective\n peer DSAs.') dsa_int_entry = mib_table_row((1, 3, 6, 1, 2, 1, 29, 3, 1)).setIndexNames((0, 'NETWORK-SERVICES-MIB', 'applIndex'), (0, 'DSA-MIB', 'dsaIntIndex')) if mibBuilder.loadTexts: dsaIntEntry.setDescription(' Entry containing interaction details of a DSA\n with a peer DSA.') dsa_int_index = mib_table_column((1, 3, 6, 1, 2, 1, 29, 3, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(1, 2147483647))) if mibBuilder.loadTexts: dsaIntIndex.setDescription(' Together with applIndex it forms the unique key to\n identify the conceptual row which contains useful info\n on the (attempted) interaction between the DSA (referred\n to by applIndex) and a peer DSA.') dsa_name = mib_table_column((1, 3, 6, 1, 2, 1, 29, 3, 1, 2), distinguished_name()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaName.setDescription(' Distinguished Name of the peer DSA to which this\n entry pertains.') dsa_time_of_creation = mib_table_column((1, 3, 6, 1, 2, 1, 29, 3, 1, 3), time_stamp()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaTimeOfCreation.setDescription(' The value of sysUpTime when this row was created.\n If the entry was created before the network management\n subsystem was initialized, this object will contain\n a value of zero.') dsa_time_of_last_attempt = mib_table_column((1, 3, 6, 1, 2, 1, 29, 3, 1, 4), time_stamp()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaTimeOfLastAttempt.setDescription(' The value of sysUpTime when the last attempt was made\n to contact this DSA. If the last attempt was made before\n the network management subsystem was initialized, this\n object will contain a value of zero.') dsa_time_of_last_success = mib_table_column((1, 3, 6, 1, 2, 1, 29, 3, 1, 5), time_stamp()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaTimeOfLastSuccess.setDescription(' The value of sysUpTime when the last attempt made to\n contact this DSA was successful. If there have\n been no successful attempts this entry will have a value\n of zero. If the last successful attempt was made before\n the network management subsystem was initialized, this\n object will contain a value of zero.') dsa_failures_since_last_success = mib_table_column((1, 3, 6, 1, 2, 1, 29, 3, 1, 6), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaFailuresSinceLastSuccess.setDescription(' The number of failures since the last time an\n attempt to contact this DSA was successful. If\n there has been no successful attempts, this counter\n will contain the number of failures since this entry\n was created.') dsa_failures = mib_table_column((1, 3, 6, 1, 2, 1, 29, 3, 1, 7), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaFailures.setDescription(' Cumulative failures since the creation of\n this entry.') dsa_successes = mib_table_column((1, 3, 6, 1, 2, 1, 29, 3, 1, 8), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: dsaSuccesses.setDescription(' Cumulative successes since the creation of\n this entry.') dsa_conformance = mib_identifier((1, 3, 6, 1, 2, 1, 29, 4)) dsa_groups = mib_identifier((1, 3, 6, 1, 2, 1, 29, 4, 1)) dsa_compliances = mib_identifier((1, 3, 6, 1, 2, 1, 29, 4, 2)) dsa_ops_compliance = module_compliance((1, 3, 6, 1, 2, 1, 29, 4, 2, 1)).setObjects(*(('DSA-MIB', 'dsaOpsGroup'),)) if mibBuilder.loadTexts: dsaOpsCompliance.setDescription('The compliance statement for SNMPv2 entities\n which implement the DSA-MIB for monitoring\n DSA operations.') dsa_entry_compliance = module_compliance((1, 3, 6, 1, 2, 1, 29, 4, 2, 2)).setObjects(*(('DSA-MIB', 'dsaOpsGroup'), ('DSA-MIB', 'dsaEntryGroup'))) if mibBuilder.loadTexts: dsaEntryCompliance.setDescription('The compliance statement for SNMPv2 entities\n which implement the DSA-MIB for monitoring\n DSA operations, entry statistics and cache\n performance.') dsa_int_compliance = module_compliance((1, 3, 6, 1, 2, 1, 29, 4, 2, 3)).setObjects(*(('DSA-MIB', 'dsaOpsGroup'), ('DSA-MIB', 'dsaIntGroup'))) if mibBuilder.loadTexts: dsaIntCompliance.setDescription(' The compliance statement for SNMPv2 entities\n which implement the DSA-MIB for monitoring DSA\n operations and the interaction of the DSA with\n peer DSAs.') dsa_ops_group = object_group((1, 3, 6, 1, 2, 1, 29, 4, 1, 1)).setObjects(*(('DSA-MIB', 'dsaAnonymousBinds'), ('DSA-MIB', 'dsaUnauthBinds'), ('DSA-MIB', 'dsaSimpleAuthBinds'), ('DSA-MIB', 'dsaStrongAuthBinds'), ('DSA-MIB', 'dsaBindSecurityErrors'), ('DSA-MIB', 'dsaInOps'), ('DSA-MIB', 'dsaReadOps'), ('DSA-MIB', 'dsaCompareOps'), ('DSA-MIB', 'dsaAddEntryOps'), ('DSA-MIB', 'dsaRemoveEntryOps'), ('DSA-MIB', 'dsaModifyEntryOps'), ('DSA-MIB', 'dsaModifyRDNOps'), ('DSA-MIB', 'dsaListOps'), ('DSA-MIB', 'dsaSearchOps'), ('DSA-MIB', 'dsaOneLevelSearchOps'), ('DSA-MIB', 'dsaWholeTreeSearchOps'), ('DSA-MIB', 'dsaReferrals'), ('DSA-MIB', 'dsaChainings'), ('DSA-MIB', 'dsaSecurityErrors'), ('DSA-MIB', 'dsaErrors'))) if mibBuilder.loadTexts: dsaOpsGroup.setDescription(' A collection of objects for monitoring the DSA\n operations.') dsa_entry_group = object_group((1, 3, 6, 1, 2, 1, 29, 4, 1, 2)).setObjects(*(('DSA-MIB', 'dsaMasterEntries'), ('DSA-MIB', 'dsaCopyEntries'), ('DSA-MIB', 'dsaCacheEntries'), ('DSA-MIB', 'dsaCacheHits'), ('DSA-MIB', 'dsaSlaveHits'))) if mibBuilder.loadTexts: dsaEntryGroup.setDescription(' A collection of objects for monitoring the DSA\n entry statistics and cache performance.') dsa_int_group = object_group((1, 3, 6, 1, 2, 1, 29, 4, 1, 3)).setObjects(*(('DSA-MIB', 'dsaName'), ('DSA-MIB', 'dsaTimeOfCreation'), ('DSA-MIB', 'dsaTimeOfLastAttempt'), ('DSA-MIB', 'dsaTimeOfLastSuccess'), ('DSA-MIB', 'dsaFailuresSinceLastSuccess'), ('DSA-MIB', 'dsaFailures'), ('DSA-MIB', 'dsaSuccesses'))) if mibBuilder.loadTexts: dsaIntGroup.setDescription(" A collection of objects for monitoring the DSA's\n interaction with peer DSAs.") mibBuilder.exportSymbols('DSA-MIB', dsaErrors=dsaErrors, dsaOpsGroup=dsaOpsGroup, dsaTimeOfLastSuccess=dsaTimeOfLastSuccess, dsaGroups=dsaGroups, dsaWholeTreeSearchOps=dsaWholeTreeSearchOps, dsaConformance=dsaConformance, dsaOneLevelSearchOps=dsaOneLevelSearchOps, dsaBindSecurityErrors=dsaBindSecurityErrors, dsaOpsEntry=dsaOpsEntry, dsaSuccesses=dsaSuccesses, dsaOpsCompliance=dsaOpsCompliance, dsaSearchOps=dsaSearchOps, dsaMasterEntries=dsaMasterEntries, dsaTimeOfLastAttempt=dsaTimeOfLastAttempt, dsaUnauthBinds=dsaUnauthBinds, dsaEntryCompliance=dsaEntryCompliance, dsaFailuresSinceLastSuccess=dsaFailuresSinceLastSuccess, dsaMIB=dsaMIB, dsaSecurityErrors=dsaSecurityErrors, dsaModifyEntryOps=dsaModifyEntryOps, dsaIntCompliance=dsaIntCompliance, dsaName=dsaName, dsaOpsTable=dsaOpsTable, dsaIntIndex=dsaIntIndex, dsaTimeOfCreation=dsaTimeOfCreation, dsaChainings=dsaChainings, dsaInOps=dsaInOps, dsaCacheEntries=dsaCacheEntries, dsaEntryGroup=dsaEntryGroup, dsaEntriesEntry=dsaEntriesEntry, dsaStrongAuthBinds=dsaStrongAuthBinds, dsaIntEntry=dsaIntEntry, dsaSimpleAuthBinds=dsaSimpleAuthBinds, dsaReadOps=dsaReadOps, dsaRemoveEntryOps=dsaRemoveEntryOps, dsaModifyRDNOps=dsaModifyRDNOps, dsaFailures=dsaFailures, dsaListOps=dsaListOps, dsaCacheHits=dsaCacheHits, dsaIntTable=dsaIntTable, dsaEntriesTable=dsaEntriesTable, PYSNMP_MODULE_ID=dsaMIB, dsaCompliances=dsaCompliances, dsaCompareOps=dsaCompareOps, dsaCopyEntries=dsaCopyEntries, dsaSlaveHits=dsaSlaveHits, dsaAnonymousBinds=dsaAnonymousBinds, dsaIntGroup=dsaIntGroup, dsaReferrals=dsaReferrals, dsaAddEntryOps=dsaAddEntryOps)
# basic model configuration related and data and training (model specific configuration is declared with Notebook) args = { "batch_size":128, "lr":1e-3, "epochs":10, }
args = {'batch_size': 128, 'lr': 0.001, 'epochs': 10}
marks = [[1,2,3],[4,5,6],[7,8,9]] rotate = [[False for i in range(len(marks[0]))] for j in range(len(marks))] for row, items in enumerate(marks): for col, val in enumerate(items): rotate[col][row] = val for row in marks: print(row) for row in rotate: print(row)
marks = [[1, 2, 3], [4, 5, 6], [7, 8, 9]] rotate = [[False for i in range(len(marks[0]))] for j in range(len(marks))] for (row, items) in enumerate(marks): for (col, val) in enumerate(items): rotate[col][row] = val for row in marks: print(row) for row in rotate: print(row)
# do a bunch of ternary operations on an NA object x = 1 / 0 assert type(x) is NA assert type(pow(x, 2)) is NA assert type(pow(2, x)) is NA assert type(x ** 2) is NA assert type(2 ** x) is NA
x = 1 / 0 assert type(x) is NA assert type(pow(x, 2)) is NA assert type(pow(2, x)) is NA assert type(x ** 2) is NA assert type(2 ** x) is NA
# first line: 10 @memory.cache def read_wav(): wav = dl.data.get_smashing_baby() return wavfile.read(wav)
@memory.cache def read_wav(): wav = dl.data.get_smashing_baby() return wavfile.read(wav)
#!/usr/bin/python3 # 3-print_reversed_list_integer.py def print_reversed_list_integer(my_list=[]): """Print all integers of a list in reverse order.""" if isinstance(my_list, list): my_list.reverse() for i in my_list: print("{:d}".format(i))
def print_reversed_list_integer(my_list=[]): """Print all integers of a list in reverse order.""" if isinstance(my_list, list): my_list.reverse() for i in my_list: print('{:d}'.format(i))
class Config: BASE_DIR = "/usr/local/lib/python3.9/site-packages" FACEBOOK_PACKAGE = "facebook_business" ADOBJECT_DIR = "adobjects" # https://github.com/facebook/facebook-python-business-sdk/tree/master/facebook_business/adobjects FULL_PATH = f"{BASE_DIR}/{FACEBOOK_PACKAGE}/{ADOBJECT_DIR}" NEO4J_HOST = "bolt://service-neo4j:7687" EXCLUSION_LIST = ["__init__.py", "abstractobject.py", "abstractcrudobject.py"]
class Config: base_dir = '/usr/local/lib/python3.9/site-packages' facebook_package = 'facebook_business' adobject_dir = 'adobjects' full_path = f'{BASE_DIR}/{FACEBOOK_PACKAGE}/{ADOBJECT_DIR}' neo4_j_host = 'bolt://service-neo4j:7687' exclusion_list = ['__init__.py', 'abstractobject.py', 'abstractcrudobject.py']
__version__ = "2.0.1" __version_info__ = tuple(int(num) for num in __version__.split(".")) default_app_config = "dbfiles.apps.DBFilesConfig"
__version__ = '2.0.1' __version_info__ = tuple((int(num) for num in __version__.split('.'))) default_app_config = 'dbfiles.apps.DBFilesConfig'
fo = open("list.txt", "r") lines = fo.readlines() outf = open("out.txt", "w") for line in lines: l = line.replace("\n","") ls = l.split(",") pl = "first: " + ls[0] + " second: " + ls[1] + " third: " + ls[2] outf.write(pl) outf.close()
fo = open('list.txt', 'r') lines = fo.readlines() outf = open('out.txt', 'w') for line in lines: l = line.replace('\n', '') ls = l.split(',') pl = 'first: ' + ls[0] + ' second: ' + ls[1] + ' third: ' + ls[2] outf.write(pl) outf.close()
# coding: utf-8 # created by Martin Haese, Tel FRM 10763 # last modified 01.02.2018 # to call it # ssh -X refsans@refsansctrl01 oder 02 # cd /refsanscontrol/src/nicos-core # INSTRUMENT=nicos_mlz.refsans bin/nicos-monitor -S monitor_scatgeo description = 'REFSANS scattering geometry monitor' group = 'special' # Legende fuer _componentpositioncol # dev='...' stellt hier nur die moeglichen Werte dar, keine devices _componentpositioncol = Column( # Block(' x: component position in beam direction (b3 = 0.0mm) ', [ Block(' x: component position in beam direction ', [ BlockRow( Field(name='goniometer', dev='goniometer_x', width=14, unit='mm'), Field(name='sample center', dev='sample_x', width=14, unit='mm'), Field(name='monitor pos', dev='monitor_pos', width=14, unit='mm'), Field(name='backguard pos', dev='backguard_pos', width=14, unit='mm'), Field(name='tube pivot', dev='tube_pivot', width=10), Field(name='table origin', dev='table_zero', width=14, unit='mm'), Field(name='table', dev='table_pos', width=14, unit='mm'), Field(name='dist b3 gonio', dev='b3_gonio', width=14, unit='mm'), Field(name='dist b3 sample', dev='b3_sample', width=14, unit='mm'), Field(name='dist b3 monitor', dev='b3_monitor', width=14, unit='mm'), # Field(name='dist b3 backguard', dev='b3_backguard', width=14, unit='mm'), Field(name='det_pivot', dev='det_pivot', width=10), Field(name='det_table', dev='det_table', width=14, unit='mm'), Field(name='dist sample det', dev='sample_det', width=14, unit='mm'), Field(name='flight path', dev='flight_path', width=14, unit='mm'), ), ], ), ) # Legende fuer _componentlateralcol # dev='...' stellt hier nur die moeglichen Werte dar, keine devices _componentlateralcol = Column( Block(' y: lateral component position and angle (b3 = 0.0mm) ', [ # Block(' y: lateral component position ', [ BlockRow( Field(name='sample y', dev='probenwechsler', width=14, unit='mm'), Field(name='sample phi', dev='sample_phi', width=14, unit='deg'), Field(name='tube y', dev='tube_y', width=14, unit='mm'), Field(name='tube angle', dev='tube_lateral_angle', width=14, unit='deg'), Field(name='probenwechsler', dev='probenwechsler', width=14, unit='mm'), Field(name='gonio_y', dev='gonio_y', width=14, unit='mm'), Field(name='det y', dev='det_y', width=14, unit='mm'), Field(name='beamstop y', dev='beamstop_y', width=14, unit='mm'), ), ], ), ) # Legende fuer _componentverticalcol # dev='...' stellt hier nur die moeglichen Werte dar, keine devices _componentverticalcol = Column( Block(' z: vertical component position and angle (b3 = 0.0mm) ', [ # Block(' z: vertical component position ', [ BlockRow( Field(name='sample z', dev='sample_z', width=14, unit='mm'), Field(name='sample theta', dev='sample_theta', width=14, unit='deg'), Field(name='backguard', dev='backguard_z', width=14, unit='mm'), Field(name='tube', dev='tube_z', width=14, unit='mm'), Field(name='tube angle', dev='tube_vertical_angle', width=14, unit='deg'), Field(name='beamstop', dev='beamstop_z', width=14, unit='mm'), Field(name='gonio_z', dev='gonio_z', width=14, unit='mm'), Field(name='backguard', dev='backguard', width=14, unit='mm'), Field(name='det_yoke', dev='det_yoke', width=14, unit='mm'), Field(name='beamstop z', dev='beamstop_z', width=14, unit='mm'), ), ], ), ) # Legende fuer _componentanglescol # dev='...' stellt hier nur die moeglichen Werte dar, keine devices _componentanglescol = Column( Block(' component angles ', [ BlockRow( Field(name='gonio_theta', dev='gonio_theta', width=14, unit='deg'), Field(name='gonio_phi', dev='gonio_phi', width=14, unit='deg'), Field(name='gonio_omega', dev='gonio_omega', width=14, unit='deg'), Field(name='det hor angle', dev='det_hor_angle', width=14, unit='deg'), Field(name='det vert angle', dev='det_vert_angle', width=14, unit='deg'), Field(name='beam_tilt', dev='beam_tilt', width=14, unit='mrad'), ), ], ), ) # Legende fuer _topgoniometercol # dev='...' stellt hier nur die moeglichen Werte dar, keine devices _topgoniometercol = Column( Block(' top goniometer ', [ BlockRow( Field(name='top_theta', dev='top_theta', width=14, unit='deg'), Field(name='top_phi', dev='top_phi', width=14, unit='deg'), Field(name='top_omega', dev='top_omega', width=14, unit='deg'), Field(name='top_x', dev='top_x', width=14, unit='mm'), Field(name='top_y', dev='top_y', width=14, unit='mm'), Field(name='top_z', dev='top_z', width=14, unit='mm'), ), ], ), ) # Legende fuer _autocollimatorcol # dev='...' stellt hier nur die moeglichen Werte dar, keine devices _autocollimatorcol = Column( Block(' autocollimator ', [ BlockRow( Field(name='ac_theta', dev='ac_theta', width=14, unit='deg'), Field(name='ac_phi', dev='ac_phi', width=14, unit='deg'), Field(name='ac_error', dev='ac_error', width=14, unit='deg'), ), ], ), ) # Legende fuer _altimetercol # dev='...' stellt hier nur die moeglichen Werte dar, keine devices _altimetercol = Column( Block(' altimeter ', [ BlockRow( Field(name='height', dev='altimeter', width=14, unit='mm'), ), ], ), ) # Legende fuer _samplesizecol # dev='...' stellt hier nur die moeglichen Werte dar, keine devices _samplesizecol = Column( Block(' sample size ', [ BlockRow( Field(name='length', key='sample/length', width=14, unit='mm'), Field(name='width', key='sample/width', width=14, unit='mm'), Field(name='footprint', dev='footprint', width=14, unit='mm'), ), ], ), ) # Legende fuer _sampletempcol # dev='...' stellt hier nur die moeglichen Werte dar, keine devices _sampletempcol = Column( Block(' sample temperature ', [ BlockRow( Field(name='julabo', dev='temp_julabo', width=14, unit='deg C'), Field(name='cryo', dev='temp_cryo', width=14, unit='K'), ), ], ), ) # Legende fuer _samplethumicol # dev='...' stellt hier nur die moeglichen Werte dar, keine devices _samplethumicol = Column( Block(' sample ', [ BlockRow( Field(name='humidity', dev='humidity', width=14, unit='%'), ), ], ), ) devices = dict( Monitor = device('nicos.services.monitor.qt.Monitor', title = description, loglevel = 'info', cache = 'localhost', valuefont = 'Consolas', padding = 5, layout = [ Row(_componentpositioncol), Row(_componentlateralcol, _sampletempcol), Row(_componentverticalcol, _altimetercol, _samplethumicol), Row(_autocollimatorcol, _samplesizecol), Row(_componentanglescol), Row(_topgoniometercol), ], ), )
description = 'REFSANS scattering geometry monitor' group = 'special' _componentpositioncol = column(block(' x: component position in beam direction ', [block_row(field(name='goniometer', dev='goniometer_x', width=14, unit='mm'), field(name='sample center', dev='sample_x', width=14, unit='mm'), field(name='monitor pos', dev='monitor_pos', width=14, unit='mm'), field(name='backguard pos', dev='backguard_pos', width=14, unit='mm'), field(name='tube pivot', dev='tube_pivot', width=10), field(name='table origin', dev='table_zero', width=14, unit='mm'), field(name='table', dev='table_pos', width=14, unit='mm'), field(name='dist b3 gonio', dev='b3_gonio', width=14, unit='mm'), field(name='dist b3 sample', dev='b3_sample', width=14, unit='mm'), field(name='dist b3 monitor', dev='b3_monitor', width=14, unit='mm'), field(name='det_pivot', dev='det_pivot', width=10), field(name='det_table', dev='det_table', width=14, unit='mm'), field(name='dist sample det', dev='sample_det', width=14, unit='mm'), field(name='flight path', dev='flight_path', width=14, unit='mm'))])) _componentlateralcol = column(block(' y: lateral component position and angle (b3 = 0.0mm) ', [block_row(field(name='sample y', dev='probenwechsler', width=14, unit='mm'), field(name='sample phi', dev='sample_phi', width=14, unit='deg'), field(name='tube y', dev='tube_y', width=14, unit='mm'), field(name='tube angle', dev='tube_lateral_angle', width=14, unit='deg'), field(name='probenwechsler', dev='probenwechsler', width=14, unit='mm'), field(name='gonio_y', dev='gonio_y', width=14, unit='mm'), field(name='det y', dev='det_y', width=14, unit='mm'), field(name='beamstop y', dev='beamstop_y', width=14, unit='mm'))])) _componentverticalcol = column(block(' z: vertical component position and angle (b3 = 0.0mm) ', [block_row(field(name='sample z', dev='sample_z', width=14, unit='mm'), field(name='sample theta', dev='sample_theta', width=14, unit='deg'), field(name='backguard', dev='backguard_z', width=14, unit='mm'), field(name='tube', dev='tube_z', width=14, unit='mm'), field(name='tube angle', dev='tube_vertical_angle', width=14, unit='deg'), field(name='beamstop', dev='beamstop_z', width=14, unit='mm'), field(name='gonio_z', dev='gonio_z', width=14, unit='mm'), field(name='backguard', dev='backguard', width=14, unit='mm'), field(name='det_yoke', dev='det_yoke', width=14, unit='mm'), field(name='beamstop z', dev='beamstop_z', width=14, unit='mm'))])) _componentanglescol = column(block(' component angles ', [block_row(field(name='gonio_theta', dev='gonio_theta', width=14, unit='deg'), field(name='gonio_phi', dev='gonio_phi', width=14, unit='deg'), field(name='gonio_omega', dev='gonio_omega', width=14, unit='deg'), field(name='det hor angle', dev='det_hor_angle', width=14, unit='deg'), field(name='det vert angle', dev='det_vert_angle', width=14, unit='deg'), field(name='beam_tilt', dev='beam_tilt', width=14, unit='mrad'))])) _topgoniometercol = column(block(' top goniometer ', [block_row(field(name='top_theta', dev='top_theta', width=14, unit='deg'), field(name='top_phi', dev='top_phi', width=14, unit='deg'), field(name='top_omega', dev='top_omega', width=14, unit='deg'), field(name='top_x', dev='top_x', width=14, unit='mm'), field(name='top_y', dev='top_y', width=14, unit='mm'), field(name='top_z', dev='top_z', width=14, unit='mm'))])) _autocollimatorcol = column(block(' autocollimator ', [block_row(field(name='ac_theta', dev='ac_theta', width=14, unit='deg'), field(name='ac_phi', dev='ac_phi', width=14, unit='deg'), field(name='ac_error', dev='ac_error', width=14, unit='deg'))])) _altimetercol = column(block(' altimeter ', [block_row(field(name='height', dev='altimeter', width=14, unit='mm'))])) _samplesizecol = column(block(' sample size ', [block_row(field(name='length', key='sample/length', width=14, unit='mm'), field(name='width', key='sample/width', width=14, unit='mm'), field(name='footprint', dev='footprint', width=14, unit='mm'))])) _sampletempcol = column(block(' sample temperature ', [block_row(field(name='julabo', dev='temp_julabo', width=14, unit='deg C'), field(name='cryo', dev='temp_cryo', width=14, unit='K'))])) _samplethumicol = column(block(' sample ', [block_row(field(name='humidity', dev='humidity', width=14, unit='%'))])) devices = dict(Monitor=device('nicos.services.monitor.qt.Monitor', title=description, loglevel='info', cache='localhost', valuefont='Consolas', padding=5, layout=[row(_componentpositioncol), row(_componentlateralcol, _sampletempcol), row(_componentverticalcol, _altimetercol, _samplethumicol), row(_autocollimatorcol, _samplesizecol), row(_componentanglescol), row(_topgoniometercol)]))
#!/usr/bin/env python ''' --- Day 12: Passage Pathing --- With your submarine's subterranean subsystems subsisting suboptimally, the only way you're getting out of this cave anytime soon is by finding a path yourself. Not just a path - the only way to know if you've found the best path is to find all of them. Fortunately, the sensors are still mostly working, and so you build a rough map of the remaining caves (your puzzle input). For example: start-A start-b A-c A-b b-d A-end b-end This is a list of how all of the caves are connected. You start in the cave named start, and your destination is the cave named end. An entry like b-d means that cave b is connected to cave d - that is, you can move between them. So, the above cave system looks roughly like this: start / \ c--A-----b--d \ / end Your goal is to find the number of distinct paths that start at start, end at end, and don't visit small caves more than once. There are two types of caves: big caves (written in uppercase, like A) and small caves (written in lowercase, like b). It would be a waste of time to visit any small cave more than once, but big caves are large enough that it might be worth visiting them multiple times. So, all paths you find should visit small caves at most once, and can visit big caves any number of times. Given these rules, there are 10 paths through this example cave system: start,A,b,A,c,A,end start,A,b,A,end start,A,b,end start,A,c,A,b,A,end start,A,c,A,b,end start,A,c,A,end start,A,end start,b,A,c,A,end start,b,A,end start,b,end (Each line in the above list corresponds to a single path; the caves visited by that path are listed in the order they are visited and separated by commas.) Note that in this cave system, cave d is never visited by any path: to do so, cave b would need to be visited twice (once on the way to cave d and a second time when returning from cave d), and since cave b is small, this is not allowed. Here is a slightly larger example: dc-end HN-start start-kj dc-start dc-HN LN-dc HN-end kj-sa kj-HN kj-dc The 19 paths through it are as follows: start,HN,dc,HN,end start,HN,dc,HN,kj,HN,end start,HN,dc,end start,HN,dc,kj,HN,end start,HN,end start,HN,kj,HN,dc,HN,end start,HN,kj,HN,dc,end start,HN,kj,HN,end start,HN,kj,dc,HN,end start,HN,kj,dc,end start,dc,HN,end start,dc,HN,kj,HN,end start,dc,end start,dc,kj,HN,end start,kj,HN,dc,HN,end start,kj,HN,dc,end start,kj,HN,end start,kj,dc,HN,end start,kj,dc,end Finally, this even larger example has 226 paths through it: fs-end he-DX fs-he start-DX pj-DX end-zg zg-sl zg-pj pj-he RW-he fs-DX pj-RW zg-RW start-pj he-WI zg-he pj-fs start-RW How many paths through this cave system are there that visit small caves at most once? Your puzzle answer was 3485. --- Part Two --- After reviewing the available paths, you realize you might have time to visit a single small cave twice. Specifically, big caves can be visited any number of times, a single small cave can be visited at most twice, and the remaining small caves can be visited at most once. However, the caves named start and end can only be visited exactly once each: once you leave the start cave, you may not return to it, and once you reach the end cave, the path must end immediately. Now, the 36 possible paths through the first example above are: start,A,b,A,b,A,c,A,end start,A,b,A,b,A,end start,A,b,A,b,end start,A,b,A,c,A,b,A,end start,A,b,A,c,A,b,end start,A,b,A,c,A,c,A,end start,A,b,A,c,A,end start,A,b,A,end start,A,b,d,b,A,c,A,end start,A,b,d,b,A,end start,A,b,d,b,end start,A,b,end start,A,c,A,b,A,b,A,end start,A,c,A,b,A,b,end start,A,c,A,b,A,c,A,end start,A,c,A,b,A,end start,A,c,A,b,d,b,A,end start,A,c,A,b,d,b,end start,A,c,A,b,end start,A,c,A,c,A,b,A,end start,A,c,A,c,A,b,end start,A,c,A,c,A,end start,A,c,A,end start,A,end start,b,A,b,A,c,A,end start,b,A,b,A,end start,b,A,b,end start,b,A,c,A,b,A,end start,b,A,c,A,b,end start,b,A,c,A,c,A,end start,b,A,c,A,end start,b,A,end start,b,d,b,A,c,A,end start,b,d,b,A,end start,b,d,b,end start,b,end The slightly larger example above now has 103 paths through it, and the even larger example now has 3509 paths through it. Given these new rules, how many paths through this cave system are there? Your puzzle answer was 85062. ''' graph_map = {} class PassageMap: def __init__(self, part_1): self.graph = {} self.small_caves = {} self.paths_to_end = 0 self.small_cave_travel_twice = False self.part_1 = part_1 def add_node(self, start, end): # Add the start and end node in the graph, these are bi-directional graphs if start not in self.graph: self.graph[start] = [] if end not in self.graph: self.graph[end] = [] if start.islower(): if start not in self.small_caves: self.small_caves[start] = 0 if end.islower(): if end not in self.small_caves: self.small_caves[end] = 0 self.graph[start].append(end) self.graph[end].append(start) def depth_first_search_to_end(self, current_cave='start'): # using depth first search find all the paths to end for cave in self.graph[current_cave]: if cave == 'start': # ignore start cave continue if cave == 'end': # we are at the end of the path self.paths_to_end += 1 continue # Check the small caves we have traverse if cave in self.small_caves: if self.small_caves[cave] >= 1: if self.part_1 or self.small_cave_travel_twice: continue self.small_cave_travel_twice = True self.small_caves[cave] += 1 # Go deeper self.depth_first_search_to_end(cave) if cave in self.small_caves: # decrease the number of small caves self.small_caves[cave] -= 1 if self.small_caves[cave] == 1: self.small_cave_travel_twice = False def number_paths_to_end(self): return self.paths_to_end def reset(self, part_1): self.paths_to_end = 0 self.part_1 = part_1 passage_map = PassageMap(part_1=True) with open('input_data.txt', 'r') as f: line = f.readline() while line: start, end = line.strip().split('-') passage_map.add_node(start, end) line = f.readline() passage_map.depth_first_search_to_end() print(f"number of paths to end for part-1: {passage_map.number_paths_to_end()}") passage_map.reset(part_1=False) passage_map.depth_first_search_to_end() print(f"number of paths to end for part-2: {passage_map.number_paths_to_end()}")
""" --- Day 12: Passage Pathing --- With your submarine's subterranean subsystems subsisting suboptimally, the only way you're getting out of this cave anytime soon is by finding a path yourself. Not just a path - the only way to know if you've found the best path is to find all of them. Fortunately, the sensors are still mostly working, and so you build a rough map of the remaining caves (your puzzle input). For example: start-A start-b A-c A-b b-d A-end b-end This is a list of how all of the caves are connected. You start in the cave named start, and your destination is the cave named end. An entry like b-d means that cave b is connected to cave d - that is, you can move between them. So, the above cave system looks roughly like this: start / c--A-----b--d \\ / end Your goal is to find the number of distinct paths that start at start, end at end, and don't visit small caves more than once. There are two types of caves: big caves (written in uppercase, like A) and small caves (written in lowercase, like b). It would be a waste of time to visit any small cave more than once, but big caves are large enough that it might be worth visiting them multiple times. So, all paths you find should visit small caves at most once, and can visit big caves any number of times. Given these rules, there are 10 paths through this example cave system: start,A,b,A,c,A,end start,A,b,A,end start,A,b,end start,A,c,A,b,A,end start,A,c,A,b,end start,A,c,A,end start,A,end start,b,A,c,A,end start,b,A,end start,b,end (Each line in the above list corresponds to a single path; the caves visited by that path are listed in the order they are visited and separated by commas.) Note that in this cave system, cave d is never visited by any path: to do so, cave b would need to be visited twice (once on the way to cave d and a second time when returning from cave d), and since cave b is small, this is not allowed. Here is a slightly larger example: dc-end HN-start start-kj dc-start dc-HN LN-dc HN-end kj-sa kj-HN kj-dc The 19 paths through it are as follows: start,HN,dc,HN,end start,HN,dc,HN,kj,HN,end start,HN,dc,end start,HN,dc,kj,HN,end start,HN,end start,HN,kj,HN,dc,HN,end start,HN,kj,HN,dc,end start,HN,kj,HN,end start,HN,kj,dc,HN,end start,HN,kj,dc,end start,dc,HN,end start,dc,HN,kj,HN,end start,dc,end start,dc,kj,HN,end start,kj,HN,dc,HN,end start,kj,HN,dc,end start,kj,HN,end start,kj,dc,HN,end start,kj,dc,end Finally, this even larger example has 226 paths through it: fs-end he-DX fs-he start-DX pj-DX end-zg zg-sl zg-pj pj-he RW-he fs-DX pj-RW zg-RW start-pj he-WI zg-he pj-fs start-RW How many paths through this cave system are there that visit small caves at most once? Your puzzle answer was 3485. --- Part Two --- After reviewing the available paths, you realize you might have time to visit a single small cave twice. Specifically, big caves can be visited any number of times, a single small cave can be visited at most twice, and the remaining small caves can be visited at most once. However, the caves named start and end can only be visited exactly once each: once you leave the start cave, you may not return to it, and once you reach the end cave, the path must end immediately. Now, the 36 possible paths through the first example above are: start,A,b,A,b,A,c,A,end start,A,b,A,b,A,end start,A,b,A,b,end start,A,b,A,c,A,b,A,end start,A,b,A,c,A,b,end start,A,b,A,c,A,c,A,end start,A,b,A,c,A,end start,A,b,A,end start,A,b,d,b,A,c,A,end start,A,b,d,b,A,end start,A,b,d,b,end start,A,b,end start,A,c,A,b,A,b,A,end start,A,c,A,b,A,b,end start,A,c,A,b,A,c,A,end start,A,c,A,b,A,end start,A,c,A,b,d,b,A,end start,A,c,A,b,d,b,end start,A,c,A,b,end start,A,c,A,c,A,b,A,end start,A,c,A,c,A,b,end start,A,c,A,c,A,end start,A,c,A,end start,A,end start,b,A,b,A,c,A,end start,b,A,b,A,end start,b,A,b,end start,b,A,c,A,b,A,end start,b,A,c,A,b,end start,b,A,c,A,c,A,end start,b,A,c,A,end start,b,A,end start,b,d,b,A,c,A,end start,b,d,b,A,end start,b,d,b,end start,b,end The slightly larger example above now has 103 paths through it, and the even larger example now has 3509 paths through it. Given these new rules, how many paths through this cave system are there? Your puzzle answer was 85062. """ graph_map = {} class Passagemap: def __init__(self, part_1): self.graph = {} self.small_caves = {} self.paths_to_end = 0 self.small_cave_travel_twice = False self.part_1 = part_1 def add_node(self, start, end): if start not in self.graph: self.graph[start] = [] if end not in self.graph: self.graph[end] = [] if start.islower(): if start not in self.small_caves: self.small_caves[start] = 0 if end.islower(): if end not in self.small_caves: self.small_caves[end] = 0 self.graph[start].append(end) self.graph[end].append(start) def depth_first_search_to_end(self, current_cave='start'): for cave in self.graph[current_cave]: if cave == 'start': continue if cave == 'end': self.paths_to_end += 1 continue if cave in self.small_caves: if self.small_caves[cave] >= 1: if self.part_1 or self.small_cave_travel_twice: continue self.small_cave_travel_twice = True self.small_caves[cave] += 1 self.depth_first_search_to_end(cave) if cave in self.small_caves: self.small_caves[cave] -= 1 if self.small_caves[cave] == 1: self.small_cave_travel_twice = False def number_paths_to_end(self): return self.paths_to_end def reset(self, part_1): self.paths_to_end = 0 self.part_1 = part_1 passage_map = passage_map(part_1=True) with open('input_data.txt', 'r') as f: line = f.readline() while line: (start, end) = line.strip().split('-') passage_map.add_node(start, end) line = f.readline() passage_map.depth_first_search_to_end() print(f'number of paths to end for part-1: {passage_map.number_paths_to_end()}') passage_map.reset(part_1=False) passage_map.depth_first_search_to_end() print(f'number of paths to end for part-2: {passage_map.number_paths_to_end()}')
intervals = [(2, 15), (36, 45), (9, 29), (16, 23), (4, 9)] def room_num(intervals): intervals_sorted = sorted(intervals, key=lambda x: x[0]) rooms = 1 room_open_time = [intervals_sorted[0][1]] for interval in intervals_sorted[1:]: if interval[0] < min(room_open_time): rooms += 1 room_open_time.append(interval[1]) else: room_open_time[room_open_time.index(min(room_open_time))] = interval[1] return rooms print(room_num(intervals))
intervals = [(2, 15), (36, 45), (9, 29), (16, 23), (4, 9)] def room_num(intervals): intervals_sorted = sorted(intervals, key=lambda x: x[0]) rooms = 1 room_open_time = [intervals_sorted[0][1]] for interval in intervals_sorted[1:]: if interval[0] < min(room_open_time): rooms += 1 room_open_time.append(interval[1]) else: room_open_time[room_open_time.index(min(room_open_time))] = interval[1] return rooms print(room_num(intervals))
""" Given an array of integers and an integer k, find out whether there are two distinct indices i and j in the array such that nums[i] = nums[j] and the absolute difference between i and j is at most k. """ class Solution(): def contains_dups(self, nums, k): nhash = {} for i in range(len(nums)): if nhash.get(nums[i]): j = max(nhash[nums[i]]) if i - j <= k: return True nhash[nums[i]].append(i) else: nhash[nums[i]] = [i] return False nums = [1, 2, 3, 1] k = 3 # nums = [1, 2, 3, 1, 2, 3] # k = 2 s = Solution() print(s.contains_dups(nums, k))
""" Given an array of integers and an integer k, find out whether there are two distinct indices i and j in the array such that nums[i] = nums[j] and the absolute difference between i and j is at most k. """ class Solution: def contains_dups(self, nums, k): nhash = {} for i in range(len(nums)): if nhash.get(nums[i]): j = max(nhash[nums[i]]) if i - j <= k: return True nhash[nums[i]].append(i) else: nhash[nums[i]] = [i] return False nums = [1, 2, 3, 1] k = 3 s = solution() print(s.contains_dups(nums, k))
def web_page_wifi(): html = """<!DOCTYPE html><html lang="de"> <head> <meta http-equiv="content-type" content="text/html; charset=UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> </head> <body> <form action="/get"> <h1>Wifi configuration</h1> <br> <a href= "mailto:support@mcmoe.de">contact support</a> <br> <br> <label for="ssid">SSID:</label> <input type="text" id="ssid" name="ssid"><br><br> <label for="password">Password:</label> <input type="password" id="password" name="password"><br><br> <input type="hidden" id="helpvarcryptoticker" name="helpvarcryptoticker"><br> <input type="submit" value="Submit"> </form></body>""" return html
def web_page_wifi(): html = '<!DOCTYPE html><html lang="de">\n <head>\n <meta http-equiv="content-type" content="text/html; charset=UTF-8">\n <meta name="viewport" content="width=device-width, initial-scale=1">\n </head>\n <body>\n <form action="/get">\n <h1>Wifi configuration</h1>\n <br>\n <a href= "mailto:support@mcmoe.de">contact support</a>\n <br>\n <br>\n <label for="ssid">SSID:</label>\n <input type="text" id="ssid" name="ssid"><br><br>\n <label for="password">Password:</label>\n <input type="password" id="password" name="password"><br><br>\n <input type="hidden" id="helpvarcryptoticker" name="helpvarcryptoticker"><br>\n <input type="submit" value="Submit">\n </form></body>' return html
#!/usr/bin/env python # -*- coding: utf-8 -*- """ ----------------------------------------- @Author: isky @Email: 19110240019@fudan.edu.cn @Created: 2019/11/19 ------------------------------------------ @Modify: 2019/11/19 ------------------------------------------ @Description: """
""" ----------------------------------------- @Author: isky @Email: 19110240019@fudan.edu.cn @Created: 2019/11/19 ------------------------------------------ @Modify: 2019/11/19 ------------------------------------------ @Description: """
def merge(pic_1, pic_2): return { 'id': pic_1['id'] + pic_2['id'], 'tags': pic_1['tags'] | pic_2['tags'], } def get_score_table(pics): score_table = {} for i in range(len(pics)): for j in range(i + 1, len(pics)): r = len(pics[i]['tags'] & pics[j]['tags']) p = len(pics[i]['tags']) - r q = len(pics[j]['tags']) - r score = min([r, p, q]) if i not in score_table: score_table[i] = [] if j not in score_table: score_table[j] = [] score_table[i].append((score, j)) score_table[j].append((score, i)) for idx in score_table: score_table[idx].sort(key=lambda x: -x[0]) return score_table def get_slides(h_pics, v_pics): get_num_tags = lambda pic: len(pic['tags']) sorted_v_pics = sorted(v_pics, key=get_num_tags) v_slides = [merge(sorted_v_pics[2 * i], sorted_v_pics[2 * i + 1]) for i in range(len(sorted_v_pics) // 2)] slides = sorted(h_pics + v_slides, key=get_num_tags) partition = 10 top_tier = slides[-len(slides) // partition:] score_table = get_score_table(top_tier) seen_table = {top_tier_idx: 0 for top_tier_idx in score_table} selected_id = set([0]) sorted_top_tier = [top_tier[0]] last_idx = 0 for i in range(len(top_tier) - 1): for j in range(seen_table[last_idx], len(top_tier) - 1): next_idx = score_table[last_idx][j][1] if next_idx not in selected_id: sorted_top_tier.append(top_tier[next_idx]) seen_table[last_idx] = j selected_id.add(next_idx) last_idx = next_idx break return slides[:-len(slides) // partition] + sorted_top_tier def get_score(slides): score = 0 for i in range(len(slides) - 1): r = len(slides[i]['tags'] & slides[i + 1]['tags']) p = len(slides[i]['tags']) - r q = len(slides[i + 1]['tags']) - r score += min([p, q, r]) return score sol_folder = 'top_tier' datasets = ['a_example', 'b_lovely_landscapes', 'c_memorable_moments', 'd_pet_pictures', 'e_shiny_selfies'] # datasets = ['d_pet_pictures'] max_score = {dataset: 0 for dataset in datasets} ref_ans = {dataset: [] for dataset in datasets} random_round = 1 for dataset in datasets: f = open('QualificationRound/{}.txt'.format(dataset)) n = int(f.readline()) h_pics, v_pics = [], [] for i in range(n): tags = f.readline().strip().split() pic_type, num_tags, tags = tags[0], int(tags[1]), set(tags[2:]) if pic_type == 'H': h_pics.append({ 'id': [str(i)], 'tags': tags, }) elif pic_type == 'V': v_pics.append({ 'id': [str(i)], 'tags': tags }) for a in range(random_round): slides = get_slides(h_pics, v_pics) score = get_score(slides) if score > max_score[dataset]: max_score[dataset] = score ref_ans[dataset] = slides for dataset in datasets: wf = open('QualificationRound/{}/{}.txt'.format(sol_folder, dataset), 'w') wf.write('{}\n'.format(len(ref_ans[dataset]))) for slide in ref_ans[dataset]: wf.write('{}\n'.format(' '.join(slide['id']))) wf.close() # for i in range(len(slides)): # print(slides[i]['tags']) # if(i!=len(slides)-1): # middle = len(slides[i]['tags'].intersection(slides[i+1]['tags'])) # left = len(slides[i]['tags']) - middle # right = len(slides[i+1]['tags']) - middle # print(left, middle, right, min(left,middle,right))
def merge(pic_1, pic_2): return {'id': pic_1['id'] + pic_2['id'], 'tags': pic_1['tags'] | pic_2['tags']} def get_score_table(pics): score_table = {} for i in range(len(pics)): for j in range(i + 1, len(pics)): r = len(pics[i]['tags'] & pics[j]['tags']) p = len(pics[i]['tags']) - r q = len(pics[j]['tags']) - r score = min([r, p, q]) if i not in score_table: score_table[i] = [] if j not in score_table: score_table[j] = [] score_table[i].append((score, j)) score_table[j].append((score, i)) for idx in score_table: score_table[idx].sort(key=lambda x: -x[0]) return score_table def get_slides(h_pics, v_pics): get_num_tags = lambda pic: len(pic['tags']) sorted_v_pics = sorted(v_pics, key=get_num_tags) v_slides = [merge(sorted_v_pics[2 * i], sorted_v_pics[2 * i + 1]) for i in range(len(sorted_v_pics) // 2)] slides = sorted(h_pics + v_slides, key=get_num_tags) partition = 10 top_tier = slides[-len(slides) // partition:] score_table = get_score_table(top_tier) seen_table = {top_tier_idx: 0 for top_tier_idx in score_table} selected_id = set([0]) sorted_top_tier = [top_tier[0]] last_idx = 0 for i in range(len(top_tier) - 1): for j in range(seen_table[last_idx], len(top_tier) - 1): next_idx = score_table[last_idx][j][1] if next_idx not in selected_id: sorted_top_tier.append(top_tier[next_idx]) seen_table[last_idx] = j selected_id.add(next_idx) last_idx = next_idx break return slides[:-len(slides) // partition] + sorted_top_tier def get_score(slides): score = 0 for i in range(len(slides) - 1): r = len(slides[i]['tags'] & slides[i + 1]['tags']) p = len(slides[i]['tags']) - r q = len(slides[i + 1]['tags']) - r score += min([p, q, r]) return score sol_folder = 'top_tier' datasets = ['a_example', 'b_lovely_landscapes', 'c_memorable_moments', 'd_pet_pictures', 'e_shiny_selfies'] max_score = {dataset: 0 for dataset in datasets} ref_ans = {dataset: [] for dataset in datasets} random_round = 1 for dataset in datasets: f = open('QualificationRound/{}.txt'.format(dataset)) n = int(f.readline()) (h_pics, v_pics) = ([], []) for i in range(n): tags = f.readline().strip().split() (pic_type, num_tags, tags) = (tags[0], int(tags[1]), set(tags[2:])) if pic_type == 'H': h_pics.append({'id': [str(i)], 'tags': tags}) elif pic_type == 'V': v_pics.append({'id': [str(i)], 'tags': tags}) for a in range(random_round): slides = get_slides(h_pics, v_pics) score = get_score(slides) if score > max_score[dataset]: max_score[dataset] = score ref_ans[dataset] = slides for dataset in datasets: wf = open('QualificationRound/{}/{}.txt'.format(sol_folder, dataset), 'w') wf.write('{}\n'.format(len(ref_ans[dataset]))) for slide in ref_ans[dataset]: wf.write('{}\n'.format(' '.join(slide['id']))) wf.close()
def for_E(): """We are creating user defined function for alphabetical pattern of capital E with "*" symbol""" row=7 col=5 for i in range(row): for j in range(col): if i==0 or i==3 or i==6 or j==0: print("*",end=" ") else: print(" ",end=" ") print() def while_E(): i=0 while i<7: j=0 while j<5: if i==0 or i==3 or i==6 or j==0 : print("*",end=" ") else: print(" ",end=" ") j+=1 i+=1 print()
def for_e(): """We are creating user defined function for alphabetical pattern of capital E with "*" symbol""" row = 7 col = 5 for i in range(row): for j in range(col): if i == 0 or i == 3 or i == 6 or (j == 0): print('*', end=' ') else: print(' ', end=' ') print() def while_e(): i = 0 while i < 7: j = 0 while j < 5: if i == 0 or i == 3 or i == 6 or (j == 0): print('*', end=' ') else: print(' ', end=' ') j += 1 i += 1 print()
def part1(inp): drawn_numbers = [int(x) for x in inp.pop(0).split(",")] boards = [] for i in range(len(inp) // 6): inp.pop(0) board = [[int(x) for x in inp.pop(0).split()] for j in range(5)] boards.append(board) for num in drawn_numbers: for board in boards: mark_board(board, num) if check_for_win(board): return num * sum( [sum(x for x in board[i][:] if x != -1) for i in range(len(board))] ) def part2(inp): drawn_numbers = [int(x) for x in inp.pop(0).split(",")] boards = [] for i in range(len(inp) // 6): inp.pop(0) board = [[int(x) for x in inp.pop(0).split()] for j in range(5)] boards.append(board) boards_won = set() for num in drawn_numbers: for i, board in enumerate(boards): if i in boards_won: continue mark_board(board, num) if check_for_win(board): boards_won.add(i) if len(boards_won) == len(boards): return num * sum( [sum(x for x in board[i][:] if x != -1) for i in range(len(board))] ) def mark_board(board, num): for i in range(len(board)): for j in range(len(board[0])): if board[i][j] == num: board[i][j] = -1 def check_for_win(board): for row in board: if all(x == -1 for x in row): return True for col in range(len(board[0])): if all(board[i][col] == -1 for i in range(len(board))): return True return False # def part2(inp): def test_check_for_win(): board = [ [1, 2, 3, 4], [1, 2, 3, 4], [-1, -1, -1, -1], [1, 2, 3, 4], ] assert check_for_win(board) is True board[2] = [1, 1, 1, 1] assert check_for_win(board) is False board[0][1] = -1 board[1][1] = -1 board[2][1] = -1 board[3][1] = -1 assert check_for_win(board) is True def test_day4_sample(): with open("/Users/sep/CLionProjects/aoc-2021/src/test_files/day4_sample.txt") as f: inp = [s.rstrip("\n").lstrip() for s in f.readlines()] assert part1(inp) == 4512 def test_day4_submission(): with open( "/Users/sep/CLionProjects/aoc-2021/src/test_files/day4_submission.txt" ) as f: inp = [s.rstrip("\n") for s in f.readlines()] assert part1(inp) == 16674 def test_day4_part2_sample(): with open("/Users/sep/CLionProjects/aoc-2021/src/test_files/day4_sample.txt") as f: inp = [s.rstrip("\n") for s in f.readlines()] assert part2(inp) == 1924 def test_day4_part2_submission(): with open( "/Users/sep/CLionProjects/aoc-2021/src/test_files/day4_submission.txt" ) as f: inp = [s.rstrip("\n") for s in f.readlines()] assert part2(inp) == 7075
def part1(inp): drawn_numbers = [int(x) for x in inp.pop(0).split(',')] boards = [] for i in range(len(inp) // 6): inp.pop(0) board = [[int(x) for x in inp.pop(0).split()] for j in range(5)] boards.append(board) for num in drawn_numbers: for board in boards: mark_board(board, num) if check_for_win(board): return num * sum([sum((x for x in board[i][:] if x != -1)) for i in range(len(board))]) def part2(inp): drawn_numbers = [int(x) for x in inp.pop(0).split(',')] boards = [] for i in range(len(inp) // 6): inp.pop(0) board = [[int(x) for x in inp.pop(0).split()] for j in range(5)] boards.append(board) boards_won = set() for num in drawn_numbers: for (i, board) in enumerate(boards): if i in boards_won: continue mark_board(board, num) if check_for_win(board): boards_won.add(i) if len(boards_won) == len(boards): return num * sum([sum((x for x in board[i][:] if x != -1)) for i in range(len(board))]) def mark_board(board, num): for i in range(len(board)): for j in range(len(board[0])): if board[i][j] == num: board[i][j] = -1 def check_for_win(board): for row in board: if all((x == -1 for x in row)): return True for col in range(len(board[0])): if all((board[i][col] == -1 for i in range(len(board)))): return True return False def test_check_for_win(): board = [[1, 2, 3, 4], [1, 2, 3, 4], [-1, -1, -1, -1], [1, 2, 3, 4]] assert check_for_win(board) is True board[2] = [1, 1, 1, 1] assert check_for_win(board) is False board[0][1] = -1 board[1][1] = -1 board[2][1] = -1 board[3][1] = -1 assert check_for_win(board) is True def test_day4_sample(): with open('/Users/sep/CLionProjects/aoc-2021/src/test_files/day4_sample.txt') as f: inp = [s.rstrip('\n').lstrip() for s in f.readlines()] assert part1(inp) == 4512 def test_day4_submission(): with open('/Users/sep/CLionProjects/aoc-2021/src/test_files/day4_submission.txt') as f: inp = [s.rstrip('\n') for s in f.readlines()] assert part1(inp) == 16674 def test_day4_part2_sample(): with open('/Users/sep/CLionProjects/aoc-2021/src/test_files/day4_sample.txt') as f: inp = [s.rstrip('\n') for s in f.readlines()] assert part2(inp) == 1924 def test_day4_part2_submission(): with open('/Users/sep/CLionProjects/aoc-2021/src/test_files/day4_submission.txt') as f: inp = [s.rstrip('\n') for s in f.readlines()] assert part2(inp) == 7075
""" Purpose: stackoverflow solution. Date created: 2021-03-04 Title: How to generate a lists of lists or nested from user input while outputting amount of times a word is stated? URL: https://stackoverflow.com/questions/66483811/how-to-generate-a-lists-of-lists-or-nested-from-user-input-while-outputting-amou/66484266#66484266 Contributor(s): Mark M. """ sample = "Nice day #running #10k #power #running" # Output: [running,10k, power, running] and [[running,2],[10k,1],[power,1]] def incr(el, ddict): if not el in ddict: ddict[el] = 1 else: ddict[el] += 1 def process_input(string, key_char = "#"): # Creating a clean list of keywords by a given target character. base_list = [i.replace(f"{key_char}", "") for i in string.split(" ") if i[0] == f"{key_char}"] # We can use the list.count() function on a set of keywords # to return the instance of each word within a given string. word_count_list = [[w, base_list.count(w)] for w in set(base_list)] return base_list, word_count_list process_input(sample)
""" Purpose: stackoverflow solution. Date created: 2021-03-04 Title: How to generate a lists of lists or nested from user input while outputting amount of times a word is stated? URL: https://stackoverflow.com/questions/66483811/how-to-generate-a-lists-of-lists-or-nested-from-user-input-while-outputting-amou/66484266#66484266 Contributor(s): Mark M. """ sample = 'Nice day #running #10k #power #running' def incr(el, ddict): if not el in ddict: ddict[el] = 1 else: ddict[el] += 1 def process_input(string, key_char='#'): base_list = [i.replace(f'{key_char}', '') for i in string.split(' ') if i[0] == f'{key_char}'] word_count_list = [[w, base_list.count(w)] for w in set(base_list)] return (base_list, word_count_list) process_input(sample)
class Version(str): SEPARATOR = '.' def __new__(cls, version=""): obj = str.__new__(cls, version) obj._list = None return obj @property def list(self): if self._list is None: self._list = [] for item in self.split(Version.SEPARATOR): self._list.append(int(item) if item.isdigit() else item) return self._list def serialize(self): return str(self) @staticmethod def deserialize(data): if not data: return None return Version(data) def __cmp__(self, other): if other is None: return cmp(self.list, None) if isinstance(other, basestring): other = Version(other) return cmp(self.list, other.list) def __gt__(self, other): return cmp(self, other) == 1 def __lt__(self, other): return cmp(self, other) == -1 def __le__(self, other): c = cmp(self, other) return c in [0, -1] def __ge__(self, other): c = cmp(self, other) return c in [0, 1]
class Version(str): separator = '.' def __new__(cls, version=''): obj = str.__new__(cls, version) obj._list = None return obj @property def list(self): if self._list is None: self._list = [] for item in self.split(Version.SEPARATOR): self._list.append(int(item) if item.isdigit() else item) return self._list def serialize(self): return str(self) @staticmethod def deserialize(data): if not data: return None return version(data) def __cmp__(self, other): if other is None: return cmp(self.list, None) if isinstance(other, basestring): other = version(other) return cmp(self.list, other.list) def __gt__(self, other): return cmp(self, other) == 1 def __lt__(self, other): return cmp(self, other) == -1 def __le__(self, other): c = cmp(self, other) return c in [0, -1] def __ge__(self, other): c = cmp(self, other) return c in [0, 1]
class Solution: """ @param nums: an array of integers @param k: an integer @return: the number of unique k-diff pairs """ def findPairs(self, nums, k): ans = 0 num_set = set(nums) if k == 0: num_dict = dict([(num, 0) for num in num_set]) for num in nums: num_dict[num] += 1 for num, times in num_dict.items(): if times > 1: ans += 1 else: possible_nums = set() for num in num_set: possible_nums.add(num + k) possible_nums.add(num - k) if num in possible_nums: ans += 1 return ans
class Solution: """ @param nums: an array of integers @param k: an integer @return: the number of unique k-diff pairs """ def find_pairs(self, nums, k): ans = 0 num_set = set(nums) if k == 0: num_dict = dict([(num, 0) for num in num_set]) for num in nums: num_dict[num] += 1 for (num, times) in num_dict.items(): if times > 1: ans += 1 else: possible_nums = set() for num in num_set: possible_nums.add(num + k) possible_nums.add(num - k) if num in possible_nums: ans += 1 return ans
# MEDIUM # this is like Word Break => DFS + Memoization # define a lambda x,y: x {+,-,*} y # scan the string s: # break at operators "+-*" => left = s[:operator] right = s[operator+1:] # recurse each left, right: # try every possible operations of left and right # Time O(N!) Space O(N) class Solution: def diffWaysToCompute(self, input: str) -> List[int]: self.memo = {} self.ops = { "+": lambda x,y: x+y, "-": lambda x,y: x-y, "*": lambda x,y: x*y, } return self.dfs(input) def dfs(self,s): result = [] if s in self.memo: return self.memo[s] for i in range(len(s)): if s[i] in {"+","-","*"}: left = s[:i] right = s[i+1:] l = self.dfs(left) r = self.dfs(right) for a in l: for b in r: result.append(self.ops[s[i]](a,b)) if not result: result.append(int(s)) self.memo[s] = result return result
class Solution: def diff_ways_to_compute(self, input: str) -> List[int]: self.memo = {} self.ops = {'+': lambda x, y: x + y, '-': lambda x, y: x - y, '*': lambda x, y: x * y} return self.dfs(input) def dfs(self, s): result = [] if s in self.memo: return self.memo[s] for i in range(len(s)): if s[i] in {'+', '-', '*'}: left = s[:i] right = s[i + 1:] l = self.dfs(left) r = self.dfs(right) for a in l: for b in r: result.append(self.ops[s[i]](a, b)) if not result: result.append(int(s)) self.memo[s] = result return result
#!/usr/bin/env python3 # day007.py # By Sebastian Raaphorst, 2019. # We memoize the auxiliary internal function in calculate_decodings, because the recursion explodes into # already-solved sub-problems. # For the last test, without memoization, it takes class Memoize: def __init__(self, f): self.f = f self.memo = {} def __call__(self, *args): if not args in self.memo: self.memo[args] = self.f(*args) return self.memo[args] def calculate_decodings(enc: str) -> int: """ Given an encoded string consisting of numbers, using the decoding: a -> 1, b -> 2, ..., z -> 26, calculate the number of ways in which the string can be decoded. :param enc: the string to decode :return: the number of possible decodings >>> calculate_decodings('111') 3 >>> calculate_decodings('121') 3 >>> calculate_decodings('131') 2 >>> calculate_decodings('1234') 3 >>> calculate_decodings('2563') 2 >>> calculate_decodings('4123') 3 >>> calculate_decodings('1101') 1 >>> calculate_decodings('11101') 2 >>> calculate_decodings('1001') 0 # 17 is 1/7 or 17 2 choices # 224 is 2/2/4 or 22/4 or 2/24 3 choices # 3 is 3 1 choice # 15 is 1/5 or 15 2 choices # 9 is 9 1 choice # 20 is 20 1 choice # 22 is 2/2 or 22 2 choices # Total = 2^3 * 3 = 24 choices >>> calculate_decodings('1722431592022') 24 # On my MacBook pro: # cProfile.run("calculate_decodings('111111111111111111111111111111111111')") # * without memoization, this takes 76.355 seconds. # * with memoization, this takes less than 0.01 seconds. # Profiled using: # import cProfile # cProfile.run(...) # Note that using a string entirely of 1s and / or 2s leads to the Fibonacci numbers, as we must break # the string into bits of lengths 1 and 2, which is equivalent to the coding of a 1xn board with # squares and dominoes. >>> calculate_decodings('111111111111111111111111111111111111') 24157817 """ # General strategy: we break this down into a case-by-case basis and recursively ascertain the number of possible # decoding. To begin with, the numbers: # (A) 3 - 6 are the same and will be treated as such; and # (B) 7 - 9 are the same. # This, we begin by converting these characters to tokens A and B respectively to simplify the processing # and to increase the chances at memoization. enc = enc.translate(enc.maketrans({'3': 'A', '4': 'A', '5': 'A', '6': 'A', '7': 'B', '8': 'B', '9': 'B'})) # Now we use an auxiliary method to consider all possible cases. # There could be some optimization here: slicing + comparing is slightly faster than startswith, and # >>> timeit.timeit('"1234".startswith("12")', number=10000000) # 1.5383420000434853 # >>> timeit.timeit('"1234"[:2] == "12"', number=10000000) # 1.2081479519838467 # and furthermore, the long if statement might be faster if using sets, e.g. # if len(2) > 1 and ((s[0] == '1' and s[1] in S1) or (s[0] == '2' and s[2] in S2) # but I think it is clearer what is happening with the list of startswith. @Memoize def aux(s: str) -> int: # Base case 1: if s is empty, there is a unique decoding. if len(s) <= 1: return 1 # Base case 2: if the first character of s is 0, we have made a mistake. # This could occur in, e.g., 110 when we consider 110 to be 11/0 instead of 1/10. if s[0] == '0': return 0 # Base case 2: if s contains a single character, we only have one decoding. if len(s) == 1: assert s[0] != '0', f"{s} contains 0 in illegal position" return 1 # If s starts with either A or B, then we can only decode to the symbol that was changed to A or B # so this part of the encoding is fixed. Advance and recurse. if s.startswith('A') or s.startswith('B'): return aux(s[1:]) # If s starts with either 10 or 20, then we can only decode to j or t. Advance and recurse. if s.startswith('10') or s.startswith('20'): return aux(s[2:]) # If s starts with 2B, then we can only decode to b for 2 and the symbol represented by B. # Advance and recurse. if s.startswith('2B'): return aux(s[2:]) # Now the mixed case: # If s starts with 11, 12, 1A, or 1B, then it can be decoded into two possibilities. # If s starts with 21, 22, or 2A, then it too can be decoded into two possibilities. if s.startswith('11') or s.startswith('12') or s.startswith('1A') or s.startswith('1B') or \ s.startswith('21') or s.startswith('22') or s.startswith('2A'): return aux(s[1:]) + aux(s[2:]) # If we reached this point, something went wrong: an illegal character? assert False, f"{s} is badly formed" return aux(enc)
class Memoize: def __init__(self, f): self.f = f self.memo = {} def __call__(self, *args): if not args in self.memo: self.memo[args] = self.f(*args) return self.memo[args] def calculate_decodings(enc: str) -> int: """ Given an encoded string consisting of numbers, using the decoding: a -> 1, b -> 2, ..., z -> 26, calculate the number of ways in which the string can be decoded. :param enc: the string to decode :return: the number of possible decodings >>> calculate_decodings('111') 3 >>> calculate_decodings('121') 3 >>> calculate_decodings('131') 2 >>> calculate_decodings('1234') 3 >>> calculate_decodings('2563') 2 >>> calculate_decodings('4123') 3 >>> calculate_decodings('1101') 1 >>> calculate_decodings('11101') 2 >>> calculate_decodings('1001') 0 # 17 is 1/7 or 17 2 choices # 224 is 2/2/4 or 22/4 or 2/24 3 choices # 3 is 3 1 choice # 15 is 1/5 or 15 2 choices # 9 is 9 1 choice # 20 is 20 1 choice # 22 is 2/2 or 22 2 choices # Total = 2^3 * 3 = 24 choices >>> calculate_decodings('1722431592022') 24 # On my MacBook pro: # cProfile.run("calculate_decodings('111111111111111111111111111111111111')") # * without memoization, this takes 76.355 seconds. # * with memoization, this takes less than 0.01 seconds. # Profiled using: # import cProfile # cProfile.run(...) # Note that using a string entirely of 1s and / or 2s leads to the Fibonacci numbers, as we must break # the string into bits of lengths 1 and 2, which is equivalent to the coding of a 1xn board with # squares and dominoes. >>> calculate_decodings('111111111111111111111111111111111111') 24157817 """ enc = enc.translate(enc.maketrans({'3': 'A', '4': 'A', '5': 'A', '6': 'A', '7': 'B', '8': 'B', '9': 'B'})) @Memoize def aux(s: str) -> int: if len(s) <= 1: return 1 if s[0] == '0': return 0 if len(s) == 1: assert s[0] != '0', f'{s} contains 0 in illegal position' return 1 if s.startswith('A') or s.startswith('B'): return aux(s[1:]) if s.startswith('10') or s.startswith('20'): return aux(s[2:]) if s.startswith('2B'): return aux(s[2:]) if s.startswith('11') or s.startswith('12') or s.startswith('1A') or s.startswith('1B') or s.startswith('21') or s.startswith('22') or s.startswith('2A'): return aux(s[1:]) + aux(s[2:]) assert False, f'{s} is badly formed' return aux(enc)
""" -The zip functions takes some iterators and zips Them on Tuples. - Used to parallel iterations - Retun a zip object which is an iterators of zip. the zip function takes the len of the shortest zip ands used it as main path to zip to the other zip. """ countries = "Ecuador" capitals = "Quito" countries_capitals = zip(countries, capitals) print(list(countries_capitals))
""" -The zip functions takes some iterators and zips Them on Tuples. - Used to parallel iterations - Retun a zip object which is an iterators of zip. the zip function takes the len of the shortest zip ands used it as main path to zip to the other zip. """ countries = 'Ecuador' capitals = 'Quito' countries_capitals = zip(countries, capitals) print(list(countries_capitals))
git_response = { "login": "dimddev", "id": 57534, "node_id": "MdQ6VXnlc4U3NTM0NDA=", "avatar_url": "https://avatars1.githubusercontent.com/u/5753440?v=4", "gravatar_id": "", "url": "https://api.github.com/users/dimddev", "html_url": "https://github.com/dimddev", "followers_url": "https://api.github.com/users/dimddev/followers", "following_url": "https://api.github.com/users/dimddev/following{/other_user}", "gists_url": "https://api.github.com/users/dimddev/gists{/gist_id}", "starred_url": "https://api.github.com/users/dimddev/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/dimddev/subscriptions", "organizations_url": "https://api.github.com/users/dimddev/orgs", "repos_url": "https://api.github.com/users/dimddev/repos", "events_url": "https://api.github.com/users/dimddev/events{/privacy}", "received_events_url": "https://api.github.com/users/dimddev/received_events", "type": "User", "site_admin": "false", "name": "Di Mita Hakini", "company": "null", "blog": "http://nasa.gov", "location": "Sofia", "email": "targolini@gmail.com", "hireable": "null", "bio": "null", "public_repos": 34, "public_gists": 2, "followers": 21, "following": 12, "created_at": "2013-10-23T06:07:30Z", "updated_at": "2019-08-24T09:06:41Z", "private_gists": 1, "total_private_repos": 0, "owned_private_repos": 0, "disk_usage": 5819, "collaborators": 0, "two_factor_authentication": "false", "plan": { "name": "free", "space": 976562499, "collaborators": 0, "private_repos": 10000 } } fresh_response = { "active": 'false', "address": "Sofia", "company_id": 'null', "view_all_tickets": 'null', "deleted": 'false', "description": 'null', "email": "dmhakini1012@gmail.com", "id": 47003672837, "job_title": 'null', "language": "en", "mobile": 'null', "name": "Di Mita Hakini", "phone": 'null', "time_zone": "Baghdad", "twitter_id": 'null', "custom_fields": {}, "tags": [], "other_emails": [], "facebook_id": 'null', "created_at": "2019-09-30T14:05:52Z", "updated_at": "2019-09-30T14:05:52Z", "other_companies": [], "unique_external_id": 'null', "avatar": 'null' }
git_response = {'login': 'dimddev', 'id': 57534, 'node_id': 'MdQ6VXnlc4U3NTM0NDA=', 'avatar_url': 'https://avatars1.githubusercontent.com/u/5753440?v=4', 'gravatar_id': '', 'url': 'https://api.github.com/users/dimddev', 'html_url': 'https://github.com/dimddev', 'followers_url': 'https://api.github.com/users/dimddev/followers', 'following_url': 'https://api.github.com/users/dimddev/following{/other_user}', 'gists_url': 'https://api.github.com/users/dimddev/gists{/gist_id}', 'starred_url': 'https://api.github.com/users/dimddev/starred{/owner}{/repo}', 'subscriptions_url': 'https://api.github.com/users/dimddev/subscriptions', 'organizations_url': 'https://api.github.com/users/dimddev/orgs', 'repos_url': 'https://api.github.com/users/dimddev/repos', 'events_url': 'https://api.github.com/users/dimddev/events{/privacy}', 'received_events_url': 'https://api.github.com/users/dimddev/received_events', 'type': 'User', 'site_admin': 'false', 'name': 'Di Mita Hakini', 'company': 'null', 'blog': 'http://nasa.gov', 'location': 'Sofia', 'email': 'targolini@gmail.com', 'hireable': 'null', 'bio': 'null', 'public_repos': 34, 'public_gists': 2, 'followers': 21, 'following': 12, 'created_at': '2013-10-23T06:07:30Z', 'updated_at': '2019-08-24T09:06:41Z', 'private_gists': 1, 'total_private_repos': 0, 'owned_private_repos': 0, 'disk_usage': 5819, 'collaborators': 0, 'two_factor_authentication': 'false', 'plan': {'name': 'free', 'space': 976562499, 'collaborators': 0, 'private_repos': 10000}} fresh_response = {'active': 'false', 'address': 'Sofia', 'company_id': 'null', 'view_all_tickets': 'null', 'deleted': 'false', 'description': 'null', 'email': 'dmhakini1012@gmail.com', 'id': 47003672837, 'job_title': 'null', 'language': 'en', 'mobile': 'null', 'name': 'Di Mita Hakini', 'phone': 'null', 'time_zone': 'Baghdad', 'twitter_id': 'null', 'custom_fields': {}, 'tags': [], 'other_emails': [], 'facebook_id': 'null', 'created_at': '2019-09-30T14:05:52Z', 'updated_at': '2019-09-30T14:05:52Z', 'other_companies': [], 'unique_external_id': 'null', 'avatar': 'null'}
# # user-statistician: Github action for generating a user stats card # # Copyright (c) 2021 Vincent A Cicirello # https://www.cicirello.org/ # # MIT License # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # # Notes on the included themes: # # The light, dark, and dark-dimmed themes are based on # GitHub's themes, and color-palette (see # https://primer.style/css/support/color-system # and https://primer.style/primitives/). # # Specifically, from the link above we use: # * background color (bg): bg.canvasInset # * border color: box.blueBorder # * icons: icon.info # * text: text.primary # * title: bg.canvasInverse # # Notes to Potential Contributors: # # (1) For those who want to contribute a theme, # please check the combination of your background # color with text color, and background with title # color for accessibility at this site, # https://colorable.jxnblk.com/, and make sure the # combination has a rating of at least AA. You can also # simply run the test cases, which will automatically # verify that the text color and the background color have # a contrast ratio of at least 4.5:1, which is AA. # The contrast ratio between the background and title # colors should also be at least 4.5:1 (also enforced by test cases). # # (2) Before contributing a new color theme, ask yourself # whether it will likely have broad appeal or a narrow # audience. For example, if it is just the color palette # of your personal website or blog, then a theme may not # be necessary. You can simply use the colors input for # your usage. # # (3) Is it similar to one of the existing themes? Or does it # provide users with something truly new to choose from? # # (4) Please add the new theme alphabetized by theme name. # # (5) Include a comment with your GitHub userid indicating you # are the contributor of the theme (see the existing themes). # # (6) You can use either 3-digit hex, 6-digit hex, or named colors. # # (7) The existing test cases will automatically test that your # colors are valid hex, or valid named colors. # See https://developer.mozilla.org/en-US/docs/Web/CSS/color_value # for list of named colors. colorMapping = { # Contributor: cicirello (part of initial theme set) "dark" : { "bg" : "#090c10", "border" : "#0d419d", "icons" : "#79c0ff", "text" : "#c9d1d9", "title" : "#f0f6fc" }, # Contributor: cicirello (part of initial theme set) "dark-dimmed" : { "bg" : "#1e2228", "border" : "#1b4b91", "icons" : "#6cb6ff", "text" : "#adbac7", "title" : "#cdd9e5" }, # Contributor: cicirello (part of initial theme set) "light" : { "bg" : "#f6f8fa", "border" : "#c8e1ff", "icons" : "#0366d6", "text" : "#24292e", "title" : "#24292e" } }
color_mapping = {'dark': {'bg': '#090c10', 'border': '#0d419d', 'icons': '#79c0ff', 'text': '#c9d1d9', 'title': '#f0f6fc'}, 'dark-dimmed': {'bg': '#1e2228', 'border': '#1b4b91', 'icons': '#6cb6ff', 'text': '#adbac7', 'title': '#cdd9e5'}, 'light': {'bg': '#f6f8fa', 'border': '#c8e1ff', 'icons': '#0366d6', 'text': '#24292e', 'title': '#24292e'}}
class Node: def __init__(self, element, parent, left_child, right_child): self.element = element self.parent = parent self.left_child = left_child self.right_child = right_child def getElement(self): return self.element def getParent(self): return self.parent def getLeftChild(self): return self.left_child def getRightChild(self): return self.right_child def setElement(self, element): self.element = element def setParent(self, parent): self.parent = parent def setLeftChild(self, node): self.left_child = node def setRightChild(self, node): self.right_child = node def getSibling(self): parent = self.getParent() if parent.getLeftChild() == self: return parent.getRightChild() else: return parent.getLeftChild() def hasLeftChild(self): return self.getLeftChild() != None def hasRightChild(self): return self.getRightChild() != None def hasParent(self): return self.getParent() != None def isLeftChild(self): if self.hasParent() == False: return False else: parent = self.getParent() return parent.getLeftChild() == self def isRightChild(self): if self.hasParent() == False: return False else: parent = self.getParent() return parent.getRightChild() == self def isInternal(self): return self.hasLeftChild() or self.hasRightChild() def isExternal(self): return not self.isInternal() def hasElement(self): return self.getElement() != None """ def hasNonExternalNodeChild(self): if self.isExternal() == True: return False else: left_child_is_not_external = self.getLeftChild().isExternal() == False right_child_is_not_external = self.getRightChild().isExternal() == False return left_child_is_not_external or right_child_is_not_external """ # show string corresponding to entry def toString(self): if self.hasElement() == False: returnNone else: return self.getElement().toString() # show only key corresponding to entry def toKeyString(self): if self.hasElement() == False: return None else: return self.getElement().toKeyString()
class Node: def __init__(self, element, parent, left_child, right_child): self.element = element self.parent = parent self.left_child = left_child self.right_child = right_child def get_element(self): return self.element def get_parent(self): return self.parent def get_left_child(self): return self.left_child def get_right_child(self): return self.right_child def set_element(self, element): self.element = element def set_parent(self, parent): self.parent = parent def set_left_child(self, node): self.left_child = node def set_right_child(self, node): self.right_child = node def get_sibling(self): parent = self.getParent() if parent.getLeftChild() == self: return parent.getRightChild() else: return parent.getLeftChild() def has_left_child(self): return self.getLeftChild() != None def has_right_child(self): return self.getRightChild() != None def has_parent(self): return self.getParent() != None def is_left_child(self): if self.hasParent() == False: return False else: parent = self.getParent() return parent.getLeftChild() == self def is_right_child(self): if self.hasParent() == False: return False else: parent = self.getParent() return parent.getRightChild() == self def is_internal(self): return self.hasLeftChild() or self.hasRightChild() def is_external(self): return not self.isInternal() def has_element(self): return self.getElement() != None '\n\n def hasNonExternalNodeChild(self):\n\n if self.isExternal() == True:\n\n return False\n\n else:\n\n left_child_is_not_external = self.getLeftChild().isExternal() == False\n\n right_child_is_not_external = self.getRightChild().isExternal() == False\n\n return left_child_is_not_external or right_child_is_not_external\n\n ' def to_string(self): if self.hasElement() == False: returnNone else: return self.getElement().toString() def to_key_string(self): if self.hasElement() == False: return None else: return self.getElement().toKeyString()
# These default settings initally apply to all installations of the config_app. # # This file _is_ and should remain under version control. # DEBUG = False SQLALCHEMY_ECHO = False SECRET_KEY = b'default_SECRET_KEY' # # IMPORTANT: Do _not_ edit this file. # Instead, over-ride with settings in the instance/config.py file. #
debug = False sqlalchemy_echo = False secret_key = b'default_SECRET_KEY'
class Solution: def getHint(self, secret: str, guess: str) -> str: index = 0 secret = list(secret) guess = list(guess) A = 0 while index < len(secret): # count A if secret[index] == guess[index]: secret = secret[:index] + secret[index + 1:] guess = guess[:index] + guess[index + 1:] A += 1 else: index += 1 # count B B = 0 di = collections.defaultdict(int) for s in secret: di[s] += 1 for g in guess: if di[g] > 0: di[g] -= 1 B += 1 return '{}A{}B'.format(A, B)
class Solution: def get_hint(self, secret: str, guess: str) -> str: index = 0 secret = list(secret) guess = list(guess) a = 0 while index < len(secret): if secret[index] == guess[index]: secret = secret[:index] + secret[index + 1:] guess = guess[:index] + guess[index + 1:] a += 1 else: index += 1 b = 0 di = collections.defaultdict(int) for s in secret: di[s] += 1 for g in guess: if di[g] > 0: di[g] -= 1 b += 1 return '{}A{}B'.format(A, B)
COLOMBIA_PLACES_FILTERS = [ # Colombia {'country': 'CO', 'location': 'Bogota'}, {'country': 'CO', 'location': 'Medellin'}, # TODO: solve issue, meetup is not returning results for cali even though it actually exist a django group {'country': 'CO', 'location': 'Cali'}, {'country': 'CO', 'location': 'Barranquilla'}, {'country': 'CO', 'location': 'Santa marta'}, {'country': 'CO', 'location': 'Pasto'}, ] LATAM_PLACES_FILTERS = [ # Argentina {'country': 'AR', 'location': 'Buenos Aires'}, {'country': 'AR', 'location': 'Cordoba'}, # TODO: solve issue, when this filter is enabled, US groups are being returned. Only AR groups should ve returned #{'country': 'AR', 'location': 'La Plata'}, # Bolivia {'country': 'BO', 'location': 'Santa Cruz de la Sierra'}, {'country': 'BO', 'location': 'El Alto'}, {'country': 'BO', 'location': 'La Paz'}, {'country': 'BO', 'location': 'Cochabamba'}, # # Brazil {'country': 'BR', 'location': 'sao paulo'}, {'country': 'BR', 'location': 'brasilia'}, # Chile {'country': 'CL', 'location': 'Santiago'}, # # # # Costa Rica # TODO: solve issue, when this filter is enabled, US groups are being returned. Only AR groups should ve returned # {'country': 'CR', 'location': 'San Jose'}, # # # # Cuba # TODO: solve issue, when this filter is enabled, US groups are being returned. Only AR groups should ve returned # {'country': 'CU', 'location': 'Habana'}, # Ecuador {'country': 'EC', 'location': 'quito'}, # Guatemala {'country': 'GT', 'location': 'Guatemala'}, # Honduras {'country': 'HN', 'location': 'Tegucigalpa'}, # Mexico {'country': 'MX', 'location': 'ciudad de mexico'}, {'country': 'MX', 'location': 'Ecatepec'}, {'country': 'MX', 'location': 'Guadalajara'}, {'country': 'MX', 'location': 'Puebla'}, {'country': 'MX', 'location': 'Monterrey'}, # Paraguay {'country': 'PY', 'location': 'asuncion'}, # Peru {'country': 'PE', 'location': 'Lima'}, # Uruguay {'country': 'UY', 'location': 'Montevideo'}, # El Salvador {'country': 'SV', 'location': 'San Salvador'}, ] # add colombia filters to latam filters LATAM_PLACES_FILTERS.extend(COLOMBIA_PLACES_FILTERS) # Keyword filers that contain the criteria a group would be considered in the statistics KEYWORD_FILTERS = [ # general keywords "Django", "Python", "PyData", "PyLadies", "Py", # special group names "Grupy-SP", ] LOCATION_CODES = { "CO": "Colombia", "LATAM": "LATAM" }
colombia_places_filters = [{'country': 'CO', 'location': 'Bogota'}, {'country': 'CO', 'location': 'Medellin'}, {'country': 'CO', 'location': 'Cali'}, {'country': 'CO', 'location': 'Barranquilla'}, {'country': 'CO', 'location': 'Santa marta'}, {'country': 'CO', 'location': 'Pasto'}] latam_places_filters = [{'country': 'AR', 'location': 'Buenos Aires'}, {'country': 'AR', 'location': 'Cordoba'}, {'country': 'BO', 'location': 'Santa Cruz de la Sierra'}, {'country': 'BO', 'location': 'El Alto'}, {'country': 'BO', 'location': 'La Paz'}, {'country': 'BO', 'location': 'Cochabamba'}, {'country': 'BR', 'location': 'sao paulo'}, {'country': 'BR', 'location': 'brasilia'}, {'country': 'CL', 'location': 'Santiago'}, {'country': 'EC', 'location': 'quito'}, {'country': 'GT', 'location': 'Guatemala'}, {'country': 'HN', 'location': 'Tegucigalpa'}, {'country': 'MX', 'location': 'ciudad de mexico'}, {'country': 'MX', 'location': 'Ecatepec'}, {'country': 'MX', 'location': 'Guadalajara'}, {'country': 'MX', 'location': 'Puebla'}, {'country': 'MX', 'location': 'Monterrey'}, {'country': 'PY', 'location': 'asuncion'}, {'country': 'PE', 'location': 'Lima'}, {'country': 'UY', 'location': 'Montevideo'}, {'country': 'SV', 'location': 'San Salvador'}] LATAM_PLACES_FILTERS.extend(COLOMBIA_PLACES_FILTERS) keyword_filters = ['Django', 'Python', 'PyData', 'PyLadies', 'Py', 'Grupy-SP'] location_codes = {'CO': 'Colombia', 'LATAM': 'LATAM'}
# -*- coding: utf-8 -*- n = int(input()) t = [] for _ in range(n): t.append(int(input())) for i in range(n): if i > 0 and i < n - 1: print(sum(t[i - 1:i + 2])) elif i == 0: print(sum(t[:2])) else: print(sum(t[i - 1:]))
n = int(input()) t = [] for _ in range(n): t.append(int(input())) for i in range(n): if i > 0 and i < n - 1: print(sum(t[i - 1:i + 2])) elif i == 0: print(sum(t[:2])) else: print(sum(t[i - 1:]))
N = int(input()) lst = [list(map(int,input().split())) for i in range(N)] for i in range(N-2): for j in range(i+1,N-1): for k in range(j+1,N): x0,y0 = lst[i] x1,y1 = lst[j] x2,y2 = lst[k] x0 -= x2 x1 -= x2 y0 -= y2 y1 -= y2 if x0*y1==x1*y0: print("Yes") exit() print("No")
n = int(input()) lst = [list(map(int, input().split())) for i in range(N)] for i in range(N - 2): for j in range(i + 1, N - 1): for k in range(j + 1, N): (x0, y0) = lst[i] (x1, y1) = lst[j] (x2, y2) = lst[k] x0 -= x2 x1 -= x2 y0 -= y2 y1 -= y2 if x0 * y1 == x1 * y0: print('Yes') exit() print('No')
class ConsoleLine: body = None current_character = None type = None
class Consoleline: body = None current_character = None type = None
class KeyExReturn: def __init__(self): self._status_code = None self._msg = None def __call__(self): return self._msg, self._status_code def status_code(self): return self._status_code def message(self): return self._msg class OK(KeyExReturn): def __init__(self, data): super().__init__() self._status_code = 200 self._msg = data class Success(KeyExReturn): def __init__(self, data): super().__init__() self._status_code = 201 self._msg = data class NonUniqueKey(KeyExReturn): def __init__(self, dev_id): super().__init__() self._status_code = 400 self._msg = "Device ID {} already exists; and could not be added.".format(dev_id) class Forbidden(KeyExReturn): def __init__(self): super().__init__() self._status_code = 403 self._msg = "Server is inactive and not accepting new registration requests." class MissingJSON(KeyExReturn): def __init__(self, data): super().__init__() self._status_code = 400 self._msg = "Missing or invalid values in JSON data received:\n{}".format(data) class BadJSON(KeyExReturn): def __init__(self, data): super().__init__() self._status_code = 400 self._msg = "Malformed JSON data received:\n{}".format(data) class DatabaseError(KeyExReturn): def __init__(self, data): super().__init__() self._status_code = 500 self._msg = "A database error occurred - {}".format(data) class DatabaseConnectionError(KeyExReturn): def __init__(self): super().__init__() self._status_code = 500 self._msg = "An internal database error has occurred..." class SignatureVerificationError(KeyExReturn): def __init__(self): super().__init__() self._status_code = 400 self._msg = "The signature did not verify" class CSRVerificationError(KeyExReturn): def __init__(self): super().__init__() self._status_code = 400 self._msg = "The CSR data did not verify" class IDNotFound(KeyExReturn): def __init__(self): super().__init__() self._status_code = 404 self._msg = "The specified device identity was not found"
class Keyexreturn: def __init__(self): self._status_code = None self._msg = None def __call__(self): return (self._msg, self._status_code) def status_code(self): return self._status_code def message(self): return self._msg class Ok(KeyExReturn): def __init__(self, data): super().__init__() self._status_code = 200 self._msg = data class Success(KeyExReturn): def __init__(self, data): super().__init__() self._status_code = 201 self._msg = data class Nonuniquekey(KeyExReturn): def __init__(self, dev_id): super().__init__() self._status_code = 400 self._msg = 'Device ID {} already exists; and could not be added.'.format(dev_id) class Forbidden(KeyExReturn): def __init__(self): super().__init__() self._status_code = 403 self._msg = 'Server is inactive and not accepting new registration requests.' class Missingjson(KeyExReturn): def __init__(self, data): super().__init__() self._status_code = 400 self._msg = 'Missing or invalid values in JSON data received:\n{}'.format(data) class Badjson(KeyExReturn): def __init__(self, data): super().__init__() self._status_code = 400 self._msg = 'Malformed JSON data received:\n{}'.format(data) class Databaseerror(KeyExReturn): def __init__(self, data): super().__init__() self._status_code = 500 self._msg = 'A database error occurred - {}'.format(data) class Databaseconnectionerror(KeyExReturn): def __init__(self): super().__init__() self._status_code = 500 self._msg = 'An internal database error has occurred...' class Signatureverificationerror(KeyExReturn): def __init__(self): super().__init__() self._status_code = 400 self._msg = 'The signature did not verify' class Csrverificationerror(KeyExReturn): def __init__(self): super().__init__() self._status_code = 400 self._msg = 'The CSR data did not verify' class Idnotfound(KeyExReturn): def __init__(self): super().__init__() self._status_code = 404 self._msg = 'The specified device identity was not found'
# Radix sort in Python def counting_sort(array, place): size = len(array) output = [0] * size count = [0] * 10 for i in range(0, size): index = array[i] // place count[index % 10] += 1 for i in range(1, 10): count[i] += count[i - 1] i = size - 1 while i >= 0: index = array[i] // place output[count[index % 10] - 1] = array[i] count[index % 10] -= 1 i -= 1 for i in range(0, size): array[i] = output[i] def radix_sort(array): max_element = max(array) place = 1 while max_element // place > 0: counting_sort(array, place) place *= 10 data = [121, 432, 564, 23, 1, 45, 788] radix_sort(data) print(data)
def counting_sort(array, place): size = len(array) output = [0] * size count = [0] * 10 for i in range(0, size): index = array[i] // place count[index % 10] += 1 for i in range(1, 10): count[i] += count[i - 1] i = size - 1 while i >= 0: index = array[i] // place output[count[index % 10] - 1] = array[i] count[index % 10] -= 1 i -= 1 for i in range(0, size): array[i] = output[i] def radix_sort(array): max_element = max(array) place = 1 while max_element // place > 0: counting_sort(array, place) place *= 10 data = [121, 432, 564, 23, 1, 45, 788] radix_sort(data) print(data)
class Fields: #rearrange the field order at will #if renaming or adding additional fields modifying target.csv is required values = ['Timestamp', 'Transaction Id', 'Payment ID', 'Note', 'Receive/Send Address', 'Debit', 'Credit', 'Network Fee', 'Balance', 'Currency']
class Fields: values = ['Timestamp', 'Transaction Id', 'Payment ID', 'Note', 'Receive/Send Address', 'Debit', 'Credit', 'Network Fee', 'Balance', 'Currency']
TABLES = ["departments", "dept_manager", "dept_emp", "titles", "salaries"] QUERYLIST_CREATE_STRUCTURE = [ "DROP TABLE IF EXISTS dept_emp, dept_manager, titles, salaries, employees, departments;", """CREATE TABLE employees ( emp_no INT NOT NULL, birth_date DATE NOT NULL, first_name VARCHAR(14) NOT NULL, last_name VARCHAR(16) NOT NULL, gender ENUM ('M','F') NOT NULL, hire_date DATE NOT NULL, PRIMARY KEY (emp_no) );""", """CREATE TABLE departments ( dept_no CHAR(4) NOT NULL, dept_name VARCHAR(40) NOT NULL, PRIMARY KEY (dept_no), UNIQUE KEY (dept_name) );""", """CREATE TABLE dept_manager ( emp_no INT NOT NULL, dept_no CHAR(4) NOT NULL, from_date DATE NOT NULL, to_date DATE NOT NULL, FOREIGN KEY (emp_no) REFERENCES employees (emp_no) ON DELETE CASCADE, FOREIGN KEY (dept_no) REFERENCES departments (dept_no) ON DELETE CASCADE, PRIMARY KEY (emp_no,dept_no) );""", """CREATE TABLE dept_emp ( emp_no INT NOT NULL, dept_no CHAR(4) NOT NULL, from_date DATE NOT NULL, to_date DATE NOT NULL, FOREIGN KEY (emp_no) REFERENCES employees (emp_no) ON DELETE CASCADE, FOREIGN KEY (dept_no) REFERENCES departments (dept_no) ON DELETE CASCADE, PRIMARY KEY (emp_no,dept_no) );""", """CREATE TABLE titles ( emp_no INT NOT NULL, title VARCHAR(50) NOT NULL, from_date DATE NOT NULL, to_date DATE, FOREIGN KEY (emp_no) REFERENCES employees (emp_no) ON DELETE CASCADE, PRIMARY KEY (emp_no,title, from_date) );""", """CREATE TABLE salaries ( emp_no INT NOT NULL, salary INT NOT NULL, from_date DATE NOT NULL, to_date DATE NOT NULL, FOREIGN KEY (emp_no) REFERENCES employees (emp_no) ON DELETE CASCADE, PRIMARY KEY (emp_no, from_date) );""" ] QUERY_DROP_STRUCTURE = "DROP TABLE IF EXISTS dept_emp, dept_manager, titles, salaries, departments;" QUERYLIST_CREATE_DUMMY_STRUCTURE = [ "CREATE TABLE dummy_departments SELECT * FROM departments;", "CREATE TABLE dummy_dept_manager SELECT * FROM dept_manager;", "CREATE TABLE dummy_dept_emp SELECT * FROM dept_emp;", "CREATE TABLE dummy_titles SELECT * FROM titles;", "CREATE TABLE dummy_salaries SELECT * FROM salaries;" ] QUERY_SELECT_SIMPLE = """SELECT * FROM `employees` JOIN `titles` ON `titles`.`emp_no` = `employees`.`emp_no` WHERE `gender`='F' AND `title`='Engineer'; """ QUERY_SELECT_SORT = "SELECT * FROM employees ORDER BY `last_name`;" QUERY_SELECT_JOIN = """SELECT `first_name`, `last_name`, `title` FROM `employees` JOIN `titles` ON `employees`.`emp_no` = `titles`.`emp_no` WHERE `from_date` < '2000-01-01' AND `to_date` > '2000-01-01'; """ QUERY_SELECT_GROUP = """SELECT `departments`.`dept_name`, COUNT(1) FROM `dept_emp` JOIN `departments` ON `departments`.`dept_no` = `dept_emp`.`dept_no` GROUP BY `departments`.`dept_no`; """ QUERY_SELECT_AGGREGATE = """SELECT `employees`.`first_name`, `employees`.`last_name`, SUM(`salaries`.`salary`) AS SumSalary, AVG(`salaries`.`salary`) AS AvgSalary, MAX(`salaries`.`salary`) AS MaxSalary, MIN(`salaries`.`salary`) AS MinSalary FROM `salaries` JOIN `employees` ON `employees`.`emp_no` = `salaries`.`emp_no` GROUP BY `employees`.`emp_no` ORDER BY `AvgSalary` DESC; """ QUERY_DIS_FULL_GROUP_BY = "SET SESSION sql_mode = TRIM(BOTH ',' FROM REPLACE(@@SESSION.sql_mode, 'ONLY_FULL_GROUP_BY', ''));" QUERY_CHANGE_ENGINE_MYISAM = "SET default_storage_engine='MyISAM';" QUERY_CHANGE_ENGINE_INNODB = "SET default_storage_engine='InnoDB';" QUERY_MULTIPLY_EMPLOYEES = "INSERT INTO `employees` SELECT * FROM `employees`;" def query_select_string(args): return f"""SELECT `first_name`, `last_name` FROM `employees` WHERE `first_name` LIKE '{args[0]}%' AND `last_name` LIKE '{args[1]}%'; """ def make_query_on_dummy(query: str): for table in TABLES: query = query.replace(table, f"dummy_{table}") return query
tables = ['departments', 'dept_manager', 'dept_emp', 'titles', 'salaries'] querylist_create_structure = ['DROP TABLE IF EXISTS dept_emp, dept_manager, titles, salaries, employees, departments;', "CREATE TABLE employees (\n emp_no INT NOT NULL,\n birth_date DATE NOT NULL,\n first_name VARCHAR(14) NOT NULL,\n last_name VARCHAR(16) NOT NULL,\n gender ENUM ('M','F') NOT NULL, \n hire_date DATE NOT NULL,\n PRIMARY KEY (emp_no)\n );", 'CREATE TABLE departments (\n dept_no CHAR(4) NOT NULL,\n dept_name VARCHAR(40) NOT NULL,\n PRIMARY KEY (dept_no),\n UNIQUE KEY (dept_name)\n );', 'CREATE TABLE dept_manager (\n emp_no INT NOT NULL,\n dept_no CHAR(4) NOT NULL,\n from_date DATE NOT NULL,\n to_date DATE NOT NULL,\n FOREIGN KEY (emp_no) REFERENCES employees (emp_no) ON DELETE CASCADE,\n FOREIGN KEY (dept_no) REFERENCES departments (dept_no) ON DELETE CASCADE,\n PRIMARY KEY (emp_no,dept_no)\n );', 'CREATE TABLE dept_emp (\n emp_no INT NOT NULL,\n dept_no CHAR(4) NOT NULL,\n from_date DATE NOT NULL,\n to_date DATE NOT NULL,\n FOREIGN KEY (emp_no) REFERENCES employees (emp_no) ON DELETE CASCADE,\n FOREIGN KEY (dept_no) REFERENCES departments (dept_no) ON DELETE CASCADE,\n PRIMARY KEY (emp_no,dept_no)\n );', 'CREATE TABLE titles (\n emp_no INT NOT NULL,\n title VARCHAR(50) NOT NULL,\n from_date DATE NOT NULL,\n to_date DATE,\n FOREIGN KEY (emp_no) REFERENCES employees (emp_no) ON DELETE CASCADE,\n PRIMARY KEY (emp_no,title, from_date)\n );', 'CREATE TABLE salaries (\n emp_no INT NOT NULL,\n salary INT NOT NULL,\n from_date DATE NOT NULL,\n to_date DATE NOT NULL,\n FOREIGN KEY (emp_no) REFERENCES employees (emp_no) ON DELETE CASCADE,\n PRIMARY KEY (emp_no, from_date)\n );'] query_drop_structure = 'DROP TABLE IF EXISTS dept_emp, dept_manager, titles, salaries, departments;' querylist_create_dummy_structure = ['CREATE TABLE dummy_departments SELECT * FROM departments;', 'CREATE TABLE dummy_dept_manager SELECT * FROM dept_manager;', 'CREATE TABLE dummy_dept_emp SELECT * FROM dept_emp;', 'CREATE TABLE dummy_titles SELECT * FROM titles;', 'CREATE TABLE dummy_salaries SELECT * FROM salaries;'] query_select_simple = "SELECT * FROM `employees` \nJOIN `titles` ON `titles`.`emp_no` = `employees`.`emp_no`\nWHERE `gender`='F' AND `title`='Engineer';\n" query_select_sort = 'SELECT * FROM employees ORDER BY `last_name`;' query_select_join = "SELECT `first_name`, `last_name`, `title` FROM `employees`\nJOIN `titles` ON `employees`.`emp_no` = `titles`.`emp_no`\nWHERE `from_date` < '2000-01-01' AND `to_date` > '2000-01-01';\n" query_select_group = 'SELECT `departments`.`dept_name`, COUNT(1) FROM `dept_emp`\nJOIN `departments` ON `departments`.`dept_no` = `dept_emp`.`dept_no`\nGROUP BY `departments`.`dept_no`;\n' query_select_aggregate = 'SELECT `employees`.`first_name`, `employees`.`last_name`, \nSUM(`salaries`.`salary`) AS SumSalary,\nAVG(`salaries`.`salary`) AS AvgSalary, \nMAX(`salaries`.`salary`) AS MaxSalary, \nMIN(`salaries`.`salary`) AS MinSalary\nFROM `salaries`\nJOIN `employees` ON `employees`.`emp_no` = `salaries`.`emp_no`\nGROUP BY `employees`.`emp_no`\nORDER BY `AvgSalary` DESC; \n' query_dis_full_group_by = "SET SESSION sql_mode = TRIM(BOTH ',' FROM REPLACE(@@SESSION.sql_mode, 'ONLY_FULL_GROUP_BY', ''));" query_change_engine_myisam = "SET default_storage_engine='MyISAM';" query_change_engine_innodb = "SET default_storage_engine='InnoDB';" query_multiply_employees = 'INSERT INTO `employees` SELECT * FROM `employees`;' def query_select_string(args): return f"SELECT `first_name`, `last_name` FROM `employees` \n WHERE `first_name` LIKE '{args[0]}%' AND `last_name` LIKE '{args[1]}%';\n " def make_query_on_dummy(query: str): for table in TABLES: query = query.replace(table, f'dummy_{table}') return query
""" Follow up for "Unique Paths": Now consider if some obstacles are added to the grids. How many unique paths would there be? An obstacle and empty space is marked as 1 and 0 respectively in the grid. For example, There is one obstacle in the middle of a 3x3 grid as illustrated below. [ [0,0,0], [0,1,0], [0,0,0] ] The total number of unique paths is 2. Note: m and n will be at most 100. """ class Solution: # @param obstacleGrid, a list of lists of integers # @return an integer def uniquePathsWithObstacles(self, obstacleGrid): n = len(obstacleGrid) m = len(obstacleGrid[0]) t = [[-1 for i in range(m)] for j in range(n)] return self.unique_paths(obstacleGrid, m - 1, n - 1, t) def unique_paths(self, grid, x, y, t): if x == 0 and y == 0: t[y][x] = 1 if grid[y][x] == 0 else 0 return t[y][x] elif grid[y][x] == 1: t[y][x] = 0 return t[y][x] elif t[y][x] != -1: return t[y][x] elif x > 0 and y == 0: t[y][x] = self.unique_paths(grid, x - 1, y, t) return t[y][x] elif y > 0 and x == 0: t[y][x] = self.unique_paths(grid, x, y - 1, t) return t[y][x] else: a = self.unique_paths(grid, x - 1, y, t) b = self.unique_paths(grid, x, y - 1, t) t[y][x] = a + b return t[y][x]
""" Follow up for "Unique Paths": Now consider if some obstacles are added to the grids. How many unique paths would there be? An obstacle and empty space is marked as 1 and 0 respectively in the grid. For example, There is one obstacle in the middle of a 3x3 grid as illustrated below. [ [0,0,0], [0,1,0], [0,0,0] ] The total number of unique paths is 2. Note: m and n will be at most 100. """ class Solution: def unique_paths_with_obstacles(self, obstacleGrid): n = len(obstacleGrid) m = len(obstacleGrid[0]) t = [[-1 for i in range(m)] for j in range(n)] return self.unique_paths(obstacleGrid, m - 1, n - 1, t) def unique_paths(self, grid, x, y, t): if x == 0 and y == 0: t[y][x] = 1 if grid[y][x] == 0 else 0 return t[y][x] elif grid[y][x] == 1: t[y][x] = 0 return t[y][x] elif t[y][x] != -1: return t[y][x] elif x > 0 and y == 0: t[y][x] = self.unique_paths(grid, x - 1, y, t) return t[y][x] elif y > 0 and x == 0: t[y][x] = self.unique_paths(grid, x, y - 1, t) return t[y][x] else: a = self.unique_paths(grid, x - 1, y, t) b = self.unique_paths(grid, x, y - 1, t) t[y][x] = a + b return t[y][x]
''' URL: https://leetcode.com/problems/basic-calculator/ Time complexity: O(n) Space complexity: O(n) ''' class Solution: def _get_next_num(self, i, s): curr_num = "" while i < len(s) and s[i].isdigit(): curr_num += s[i] i += 1 return i-1, int(curr_num) def calculate(self, s): """ :type s: str :rtype: int """ stack = [(0, 1)] prev_char = None i = 0 while i < len(s): char = s[i] if char == ' ': i += 1 continue if char.isdigit(): i, num = self._get_next_num(i, s) if prev_char == '-': stack[-1] = (stack[-1][0]-num, stack[-1][1]) else: stack[-1] = (stack[-1][0]+num, stack[-1][1]) elif char == ')': val, sign = stack.pop() if len(stack) > 0: stack[-1] = (stack[-1][0] + val * sign, stack[-1][1]) else: return val * sign elif char == '(': if prev_char == '-': sign = -1 else: sign = 1 stack.append((0, sign)) prev_char = char i += 1 if len(stack) > 0: return (stack[-1][0] * stack[-1][1]) return -1
""" URL: https://leetcode.com/problems/basic-calculator/ Time complexity: O(n) Space complexity: O(n) """ class Solution: def _get_next_num(self, i, s): curr_num = '' while i < len(s) and s[i].isdigit(): curr_num += s[i] i += 1 return (i - 1, int(curr_num)) def calculate(self, s): """ :type s: str :rtype: int """ stack = [(0, 1)] prev_char = None i = 0 while i < len(s): char = s[i] if char == ' ': i += 1 continue if char.isdigit(): (i, num) = self._get_next_num(i, s) if prev_char == '-': stack[-1] = (stack[-1][0] - num, stack[-1][1]) else: stack[-1] = (stack[-1][0] + num, stack[-1][1]) elif char == ')': (val, sign) = stack.pop() if len(stack) > 0: stack[-1] = (stack[-1][0] + val * sign, stack[-1][1]) else: return val * sign elif char == '(': if prev_char == '-': sign = -1 else: sign = 1 stack.append((0, sign)) prev_char = char i += 1 if len(stack) > 0: return stack[-1][0] * stack[-1][1] return -1
FILENAME = "input.txt" class Expression: def __init__(self, expression_str: str): expression_list = expression_str.split() if len(expression_list) == 1: self.parse_expression(None, None, expression_list[0]) elif len(expression_list) == 2: self.parse_expression(None, *expression_list) else: self.parse_expression(*expression_list) def parse_expression( self, left_literal: str | None, operator: str | None, right_literal: str ): try: self._left_literal = int(left_literal) except: self._left_literal = left_literal try: self._right_literal = int(right_literal) except: self._right_literal = right_literal self._operator = operator def eval(self, table) -> int: if self._left_literal is not None: if isinstance(self._left_literal, int): left = self._left_literal else: left = table.get_value(self._left_literal) if isinstance(self._right_literal, int): right = self._right_literal else: right = table.get_value(self._right_literal) if self._operator == "AND": return left & right elif self._operator == "OR": return left | right elif self._operator == "NOT": return 2**16 - right - 1 elif self._operator == "LSHIFT": return left << right elif self._operator == "RSHIFT": return left >> right elif self._operator is None: return right else: raise ValueError(f"Unknown operator: {self._operator}") class Table: def __init__(self): self._variable_dict = dict() def process_expression(self, variable: str, expression: Expression): self._variable_dict[variable] = expression def get_value(self, variable: str) -> int: result = self._variable_dict[variable].eval(self) self._variable_dict[variable] = Expression(str(result)) return result def get_input() -> Table: table = Table() with open(FILENAME) as file: for line in file.readlines(): lhs, rhs = line.strip().split(" -> ") table.process_expression(rhs, Expression(lhs)) return table def part_1(): table = get_input() WIRE_VAR = "a" result = table.get_value(WIRE_VAR) print(f"Signal {result} is ultimately provided to wire {WIRE_VAR}") def part_2(): table1 = get_input() table2 = get_input() WIRE_VAR = "a" a_signal = table1.get_value(WIRE_VAR) table2.process_expression("b", Expression(str(a_signal))) result = table2.get_value(WIRE_VAR) print(f"Signal {result} is ultimately provided to wire {WIRE_VAR}") if __name__ == "__main__": # part_1() # part_2() pass
filename = 'input.txt' class Expression: def __init__(self, expression_str: str): expression_list = expression_str.split() if len(expression_list) == 1: self.parse_expression(None, None, expression_list[0]) elif len(expression_list) == 2: self.parse_expression(None, *expression_list) else: self.parse_expression(*expression_list) def parse_expression(self, left_literal: str | None, operator: str | None, right_literal: str): try: self._left_literal = int(left_literal) except: self._left_literal = left_literal try: self._right_literal = int(right_literal) except: self._right_literal = right_literal self._operator = operator def eval(self, table) -> int: if self._left_literal is not None: if isinstance(self._left_literal, int): left = self._left_literal else: left = table.get_value(self._left_literal) if isinstance(self._right_literal, int): right = self._right_literal else: right = table.get_value(self._right_literal) if self._operator == 'AND': return left & right elif self._operator == 'OR': return left | right elif self._operator == 'NOT': return 2 ** 16 - right - 1 elif self._operator == 'LSHIFT': return left << right elif self._operator == 'RSHIFT': return left >> right elif self._operator is None: return right else: raise value_error(f'Unknown operator: {self._operator}') class Table: def __init__(self): self._variable_dict = dict() def process_expression(self, variable: str, expression: Expression): self._variable_dict[variable] = expression def get_value(self, variable: str) -> int: result = self._variable_dict[variable].eval(self) self._variable_dict[variable] = expression(str(result)) return result def get_input() -> Table: table = table() with open(FILENAME) as file: for line in file.readlines(): (lhs, rhs) = line.strip().split(' -> ') table.process_expression(rhs, expression(lhs)) return table def part_1(): table = get_input() wire_var = 'a' result = table.get_value(WIRE_VAR) print(f'Signal {result} is ultimately provided to wire {WIRE_VAR}') def part_2(): table1 = get_input() table2 = get_input() wire_var = 'a' a_signal = table1.get_value(WIRE_VAR) table2.process_expression('b', expression(str(a_signal))) result = table2.get_value(WIRE_VAR) print(f'Signal {result} is ultimately provided to wire {WIRE_VAR}') if __name__ == '__main__': pass
# generated from genmsg/cmake/pkg-genmsg.context.in messages_str = "/workspace/src/ros_control/controller_manager_msgs/msg/ControllerState.msg;/workspace/src/ros_control/controller_manager_msgs/msg/ControllerStatistics.msg;/workspace/src/ros_control/controller_manager_msgs/msg/ControllersStatistics.msg;/workspace/src/ros_control/controller_manager_msgs/msg/HardwareInterfaceResources.msg" services_str = "/workspace/src/ros_control/controller_manager_msgs/srv/ListControllerTypes.srv;/workspace/src/ros_control/controller_manager_msgs/srv/ListControllers.srv;/workspace/src/ros_control/controller_manager_msgs/srv/LoadController.srv;/workspace/src/ros_control/controller_manager_msgs/srv/ReloadControllerLibraries.srv;/workspace/src/ros_control/controller_manager_msgs/srv/SwitchController.srv;/workspace/src/ros_control/controller_manager_msgs/srv/UnloadController.srv" pkg_name = "controller_manager_msgs" dependencies_str = "std_msgs" langs = "gencpp;geneus;genlisp;gennodejs;genpy" dep_include_paths_str = "controller_manager_msgs;/workspace/src/ros_control/controller_manager_msgs/msg;std_msgs;/opt/ros/melodic/share/std_msgs/cmake/../msg" PYTHON_EXECUTABLE = "/usr/bin/python2" package_has_static_sources = 'TRUE' == 'TRUE' genmsg_check_deps_script = "/opt/ros/melodic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
messages_str = '/workspace/src/ros_control/controller_manager_msgs/msg/ControllerState.msg;/workspace/src/ros_control/controller_manager_msgs/msg/ControllerStatistics.msg;/workspace/src/ros_control/controller_manager_msgs/msg/ControllersStatistics.msg;/workspace/src/ros_control/controller_manager_msgs/msg/HardwareInterfaceResources.msg' services_str = '/workspace/src/ros_control/controller_manager_msgs/srv/ListControllerTypes.srv;/workspace/src/ros_control/controller_manager_msgs/srv/ListControllers.srv;/workspace/src/ros_control/controller_manager_msgs/srv/LoadController.srv;/workspace/src/ros_control/controller_manager_msgs/srv/ReloadControllerLibraries.srv;/workspace/src/ros_control/controller_manager_msgs/srv/SwitchController.srv;/workspace/src/ros_control/controller_manager_msgs/srv/UnloadController.srv' pkg_name = 'controller_manager_msgs' dependencies_str = 'std_msgs' langs = 'gencpp;geneus;genlisp;gennodejs;genpy' dep_include_paths_str = 'controller_manager_msgs;/workspace/src/ros_control/controller_manager_msgs/msg;std_msgs;/opt/ros/melodic/share/std_msgs/cmake/../msg' python_executable = '/usr/bin/python2' package_has_static_sources = 'TRUE' == 'TRUE' genmsg_check_deps_script = '/opt/ros/melodic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py'
a=[] while True: b=input("Enter Number(Break Using String):") if b.isalpha(): break else: a.append(int(b)) continue c=a[0] for x in a: if c>x: c=x else: continue d=0 if c in a: d=a.index(c) print ("Index:",d) print ("Number:",c)
a = [] while True: b = input('Enter Number(Break Using String):') if b.isalpha(): break else: a.append(int(b)) continue c = a[0] for x in a: if c > x: c = x else: continue d = 0 if c in a: d = a.index(c) print('Index:', d) print('Number:', c)
''' PROBLEM: Length of Last Word Given a string s consists of upper/lower-case alphabets and empty space characters ' ', return the length of last word (last word means the last appearing word if we loop from left to right) in the string. If the last word does not exist, return 0. Note: A word is defined as a maximal substring consisting of non-space characters only. Example: Input: "Hello World" Output: 5 Problem link : https://leetcode.com/problems/length-of-last-word/ ''' ''' APPROACH - We can convert string into list of words and can calculate length using reverse indexing ''' class Solution: def lengthOfLastWord(self, s: str) -> int: a = s.split() if (len(a)>=1): return len(a[-1]) else: return 0
""" PROBLEM: Length of Last Word Given a string s consists of upper/lower-case alphabets and empty space characters ' ', return the length of last word (last word means the last appearing word if we loop from left to right) in the string. If the last word does not exist, return 0. Note: A word is defined as a maximal substring consisting of non-space characters only. Example: Input: "Hello World" Output: 5 Problem link : https://leetcode.com/problems/length-of-last-word/ """ '\nAPPROACH -\nWe can convert string into list of words and can calculate length using reverse indexing\n' class Solution: def length_of_last_word(self, s: str) -> int: a = s.split() if len(a) >= 1: return len(a[-1]) else: return 0
# # PySNMP MIB module RBN-TC (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/RBN-TC # Produced by pysmi-0.3.4 at Wed May 1 14:52:26 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "ValueSizeConstraint") rbnModules, = mibBuilder.importSymbols("RBN-SMI", "rbnModules") ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup") MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, ObjectIdentity, Counter32, MibIdentifier, TimeTicks, Gauge32, ModuleIdentity, Unsigned32, iso, IpAddress, Bits, Integer32, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "ObjectIdentity", "Counter32", "MibIdentifier", "TimeTicks", "Gauge32", "ModuleIdentity", "Unsigned32", "iso", "IpAddress", "Bits", "Integer32", "NotificationType") DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention") rbnTC = ModuleIdentity((1, 3, 6, 1, 4, 1, 2352, 5, 2)) rbnTC.setRevisions(('2009-10-20 17:00', '2004-06-19 17:00', '2003-03-17 17:00', '2002-11-11 00:00', '2002-06-26 00:00', '2000-07-14 00:00',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: rbnTC.setRevisionsDescriptions(('Added new textual convention: RbnUnsigned64 for read-write capable 64 bit integer value.', 'Added new textual convention: RbnPortMediumType. Correct warnings given by smilint.', 'Added new textual convention: RbnVidOrUntagged.', 'Moved definitions of RbnSlot and RbnPort from RBN-PVC-MIB. Updated range on RbnSlot and RbnPort.', 'Updated CONTACT-INFO. Added new textual conventions: RbnKBytes and RbnPercentage.', 'Initial version.',)) if mibBuilder.loadTexts: rbnTC.setLastUpdated('200910201700Z') if mibBuilder.loadTexts: rbnTC.setOrganization('Redback Networks, Inc.') if mibBuilder.loadTexts: rbnTC.setContactInfo(' RedBack Networks, Inc. Postal: 300 Holger Way San Jose, CA 95134-1362 USA Phone: +1 408 750 5000 Fax: +1 408 750 5599 E-mail: mib-info@redback.com ') if mibBuilder.loadTexts: rbnTC.setDescription('Defines common textual conventions used in Redback mib modules.') class RbnCircuitHandle(TextualConvention, OctetString): description = 'A unique identifier for individual circuits. The string is composed of the following: Octet 1 slot 2 port 3-8 circuit identifier slots/ports are numbered 0..n. The SMS CLI also numbers slots/ports 0..n but SE CLI numbers slots/ports 1..n. For example: When the SE CLI refers to slot/port 1/2, this maps to to the RbnCircuitHandle slot/port 0/1 ' status = 'current' displayHint = '1d:1d:2x-2x-2x' subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(8, 8) fixedLength = 8 class RbnKBytes(TextualConvention, Integer32): description = 'Storage size, expressed in units of 1024 bytes.' status = 'current' displayHint = 'd' subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 2147483647) class RbnPercentage(TextualConvention, Integer32): description = 'This Textual Convention describes an object that stores a whole integer percentage value.' status = 'current' displayHint = 'd%' subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 100) class RbnSlot(TextualConvention, Unsigned32): description = "The chassis slot number. This is the physical slot number as reported in the CLI command 'show hardware' on SMS and the CLI command 'show port' on SE." status = 'current' displayHint = 'd' subtypeSpec = Unsigned32.subtypeSpec + ValueRangeConstraint(0, 255) class RbnPort(TextualConvention, Unsigned32): description = "The chassis port number. This is the physical port number as reported in the CLI command 'show hardware' on SMS and the CLI command 'show port' on SE." status = 'current' displayHint = 'd' subtypeSpec = Unsigned32.subtypeSpec + ValueRangeConstraint(0, 255) class RbnVidOrUntagged(TextualConvention, Integer32): description = 'The twelve-bit VLAN Identifer (VID) used to uniquely identify the VLAN to which the frame belongs. The VID is encoded as an unsigned binary number. An untagged frame does not carry any identification of the VLAN to which it belongs and is designated with a value of 4096.' status = 'current' displayHint = 'd' subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(1, 4096) class RbnPortMediumType(TextualConvention, Integer32): description = 'Medium type of NAS port.' status = 'current' subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 11, 12, 13, 14)) namedValues = NamedValues(("unknown", 0), ("dsl", 11), ("cable", 12), ("wireless", 13), ("satellite", 14)) class RbnUnsigned64(TextualConvention, OctetString): description = 'Unsigned 64 bit integer value is represented as an OCTET STRING. This allows an unsigned integer value in the range 0..18446744073709551615. The octets are ordered with the first octet containing the highest ordered bits of the integer and the 8th octet containing the lowest ordered bits, corresponding to network byte order.' status = 'current' displayHint = '8d' subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(8, 8) fixedLength = 8 mibBuilder.exportSymbols("RBN-TC", RbnSlot=RbnSlot, RbnVidOrUntagged=RbnVidOrUntagged, RbnPercentage=RbnPercentage, RbnCircuitHandle=RbnCircuitHandle, PYSNMP_MODULE_ID=rbnTC, rbnTC=rbnTC, RbnPortMediumType=RbnPortMediumType, RbnKBytes=RbnKBytes, RbnUnsigned64=RbnUnsigned64, RbnPort=RbnPort)
(integer, object_identifier, octet_string) = mibBuilder.importSymbols('ASN1', 'Integer', 'ObjectIdentifier', 'OctetString') (named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues') (single_value_constraint, value_range_constraint, constraints_union, constraints_intersection, value_size_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'SingleValueConstraint', 'ValueRangeConstraint', 'ConstraintsUnion', 'ConstraintsIntersection', 'ValueSizeConstraint') (rbn_modules,) = mibBuilder.importSymbols('RBN-SMI', 'rbnModules') (module_compliance, notification_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ModuleCompliance', 'NotificationGroup') (mib_scalar, mib_table, mib_table_row, mib_table_column, counter64, object_identity, counter32, mib_identifier, time_ticks, gauge32, module_identity, unsigned32, iso, ip_address, bits, integer32, notification_type) = mibBuilder.importSymbols('SNMPv2-SMI', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'Counter64', 'ObjectIdentity', 'Counter32', 'MibIdentifier', 'TimeTicks', 'Gauge32', 'ModuleIdentity', 'Unsigned32', 'iso', 'IpAddress', 'Bits', 'Integer32', 'NotificationType') (display_string, textual_convention) = mibBuilder.importSymbols('SNMPv2-TC', 'DisplayString', 'TextualConvention') rbn_tc = module_identity((1, 3, 6, 1, 4, 1, 2352, 5, 2)) rbnTC.setRevisions(('2009-10-20 17:00', '2004-06-19 17:00', '2003-03-17 17:00', '2002-11-11 00:00', '2002-06-26 00:00', '2000-07-14 00:00')) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: rbnTC.setRevisionsDescriptions(('Added new textual convention: RbnUnsigned64 for read-write capable 64 bit integer value.', 'Added new textual convention: RbnPortMediumType. Correct warnings given by smilint.', 'Added new textual convention: RbnVidOrUntagged.', 'Moved definitions of RbnSlot and RbnPort from RBN-PVC-MIB. Updated range on RbnSlot and RbnPort.', 'Updated CONTACT-INFO. Added new textual conventions: RbnKBytes and RbnPercentage.', 'Initial version.')) if mibBuilder.loadTexts: rbnTC.setLastUpdated('200910201700Z') if mibBuilder.loadTexts: rbnTC.setOrganization('Redback Networks, Inc.') if mibBuilder.loadTexts: rbnTC.setContactInfo(' RedBack Networks, Inc. Postal: 300 Holger Way San Jose, CA 95134-1362 USA Phone: +1 408 750 5000 Fax: +1 408 750 5599 E-mail: mib-info@redback.com ') if mibBuilder.loadTexts: rbnTC.setDescription('Defines common textual conventions used in Redback mib modules.') class Rbncircuithandle(TextualConvention, OctetString): description = 'A unique identifier for individual circuits. The string is composed of the following: Octet 1 slot 2 port 3-8 circuit identifier slots/ports are numbered 0..n. The SMS CLI also numbers slots/ports 0..n but SE CLI numbers slots/ports 1..n. For example: When the SE CLI refers to slot/port 1/2, this maps to to the RbnCircuitHandle slot/port 0/1 ' status = 'current' display_hint = '1d:1d:2x-2x-2x' subtype_spec = OctetString.subtypeSpec + value_size_constraint(8, 8) fixed_length = 8 class Rbnkbytes(TextualConvention, Integer32): description = 'Storage size, expressed in units of 1024 bytes.' status = 'current' display_hint = 'd' subtype_spec = Integer32.subtypeSpec + value_range_constraint(0, 2147483647) class Rbnpercentage(TextualConvention, Integer32): description = 'This Textual Convention describes an object that stores a whole integer percentage value.' status = 'current' display_hint = 'd%' subtype_spec = Integer32.subtypeSpec + value_range_constraint(0, 100) class Rbnslot(TextualConvention, Unsigned32): description = "The chassis slot number. This is the physical slot number as reported in the CLI command 'show hardware' on SMS and the CLI command 'show port' on SE." status = 'current' display_hint = 'd' subtype_spec = Unsigned32.subtypeSpec + value_range_constraint(0, 255) class Rbnport(TextualConvention, Unsigned32): description = "The chassis port number. This is the physical port number as reported in the CLI command 'show hardware' on SMS and the CLI command 'show port' on SE." status = 'current' display_hint = 'd' subtype_spec = Unsigned32.subtypeSpec + value_range_constraint(0, 255) class Rbnvidoruntagged(TextualConvention, Integer32): description = 'The twelve-bit VLAN Identifer (VID) used to uniquely identify the VLAN to which the frame belongs. The VID is encoded as an unsigned binary number. An untagged frame does not carry any identification of the VLAN to which it belongs and is designated with a value of 4096.' status = 'current' display_hint = 'd' subtype_spec = Integer32.subtypeSpec + value_range_constraint(1, 4096) class Rbnportmediumtype(TextualConvention, Integer32): description = 'Medium type of NAS port.' status = 'current' subtype_spec = Integer32.subtypeSpec + constraints_union(single_value_constraint(0, 11, 12, 13, 14)) named_values = named_values(('unknown', 0), ('dsl', 11), ('cable', 12), ('wireless', 13), ('satellite', 14)) class Rbnunsigned64(TextualConvention, OctetString): description = 'Unsigned 64 bit integer value is represented as an OCTET STRING. This allows an unsigned integer value in the range 0..18446744073709551615. The octets are ordered with the first octet containing the highest ordered bits of the integer and the 8th octet containing the lowest ordered bits, corresponding to network byte order.' status = 'current' display_hint = '8d' subtype_spec = OctetString.subtypeSpec + value_size_constraint(8, 8) fixed_length = 8 mibBuilder.exportSymbols('RBN-TC', RbnSlot=RbnSlot, RbnVidOrUntagged=RbnVidOrUntagged, RbnPercentage=RbnPercentage, RbnCircuitHandle=RbnCircuitHandle, PYSNMP_MODULE_ID=rbnTC, rbnTC=rbnTC, RbnPortMediumType=RbnPortMediumType, RbnKBytes=RbnKBytes, RbnUnsigned64=RbnUnsigned64, RbnPort=RbnPort)
def quicksort(ar:list): """ Sort a list with quicksort algorithm. The quicksort algorithm splits a list into two parts and recursively sorts those parts by making swaps based on the elements value in relation to the pivot value. It is an O(n log(n)) sort. Args: ar: list to sort. Returns: The input list sorted. """ def sort(ar, lo, hi): # if the two ends haven't swapped if lo < hi: # find the partition partition_index = partition(ar, lo, hi) # sort both sides sort(ar, lo, partition_index - 1) sort(ar, partition_index + 1, hi) def partition(ar, lo, hi): # basic partition by using the high value as a pivot pivot = ar[hi] # the value being examined i = lo for j in range(lo, hi): # if ar[j] <= pivot: ar[i], ar[j] = ar[j], ar[i] i += 1 # swap the pivot into place ar[i], ar[hi] = ar[hi], ar[i] return i sort(ar, 0, len(ar)-1) return ar
def quicksort(ar: list): """ Sort a list with quicksort algorithm. The quicksort algorithm splits a list into two parts and recursively sorts those parts by making swaps based on the elements value in relation to the pivot value. It is an O(n log(n)) sort. Args: ar: list to sort. Returns: The input list sorted. """ def sort(ar, lo, hi): if lo < hi: partition_index = partition(ar, lo, hi) sort(ar, lo, partition_index - 1) sort(ar, partition_index + 1, hi) def partition(ar, lo, hi): pivot = ar[hi] i = lo for j in range(lo, hi): if ar[j] <= pivot: (ar[i], ar[j]) = (ar[j], ar[i]) i += 1 (ar[i], ar[hi]) = (ar[hi], ar[i]) return i sort(ar, 0, len(ar) - 1) return ar
imports="" loader=""" //handle := C.MemoryLoadLibrary(unsafe.Pointer(&full_payload[0]),(C.size_t)(len(full_payload))) handle := C.MemoryLoadLibraryEx(unsafe.Pointer(&full_payload[0]), (C.size_t)(len(full_payload)), (*[0]byte)(C.MemoryDefaultLoadLibrary), // loadLibrary func ptr (*[0]byte)(C.MemoryDefaultGetProcAddress), // getProcAddress func ptr (*[0]byte)(C.MemoryDefaultFreeLibrary), // freeLibrary func ptr unsafe.Pointer(nil), // void *userdata (we're not passing any data to the dll or exe) ) if handle == nil { fmt.Println("MemoryLoadLibrary failed") os.Exit(1) } //output := C.MemoryCallEntryPoint(handle) _ = C.MemoryCallEntryPoint(handle) //fmt.Println(output) C.MemoryFreeLibrary(handle) """
imports = '' loader = '\n\t//handle := C.MemoryLoadLibrary(unsafe.Pointer(&full_payload[0]),(C.size_t)(len(full_payload)))\n\thandle := C.MemoryLoadLibraryEx(unsafe.Pointer(&full_payload[0]),\n (C.size_t)(len(full_payload)),\n (*[0]byte)(C.MemoryDefaultLoadLibrary), // loadLibrary func ptr\n (*[0]byte)(C.MemoryDefaultGetProcAddress), // getProcAddress func ptr\n (*[0]byte)(C.MemoryDefaultFreeLibrary), // freeLibrary func ptr\n unsafe.Pointer(nil), // void *userdata (we\'re not passing any data to the dll or exe)\n )\n if handle == nil {\n fmt.Println("MemoryLoadLibrary failed")\n os.Exit(1)\n }\n\n //output := C.MemoryCallEntryPoint(handle)\n _ = C.MemoryCallEntryPoint(handle)\n //fmt.Println(output)\n C.MemoryFreeLibrary(handle)\n'
# Copyright (C) 2019 SignalFx, Inc. All rights reserved. name = 'signalfx_serverless_gcf' version = '0.0.1' user_agent = 'signalfx_serverless/' + version packages = ['signalfx_gcf', 'signalfx_gcf.serverless']
name = 'signalfx_serverless_gcf' version = '0.0.1' user_agent = 'signalfx_serverless/' + version packages = ['signalfx_gcf', 'signalfx_gcf.serverless']
print("Hello! I am a script in python"); def Hi(firstName, lastName): print("Hello " + firstName + " " + lastName)
print('Hello! I am a script in python') def hi(firstName, lastName): print('Hello ' + firstName + ' ' + lastName)
class GN3: def __init__(self): self.name = 'GN3' def __str__(self): return self.name
class Gn3: def __init__(self): self.name = 'GN3' def __str__(self): return self.name
# Specialization: Google IT Automation with Python # Course 01: Crash Course with Python # Week 2 Module Part 1 Exercise 02 # Student: Shawn Solomon # Learning Platform: Coursera.org # Practice writing some expressions and conversions yourself. # In this scenario, we have a directory with 5 files in it. Each file has a different size: 2048, 4357, 97658, 125, and 8. # Fill in the blanks to calculate the average file size by having Python add all the values for you, and then set the files variable to the number of files. # Finally, output a message saying "The average size is: " followed by the resulting number. Remember to use the str() function to convert the number into a string. # total = 2048 + ___ + ___ + ___ + ___ # files = ___ # average = total / files # print("___" + str(___)) total = 2048 + 4357 + 97658 + 125 + 8 files = 5 average = total / files print("The average size is: " + str(average))
total = 2048 + 4357 + 97658 + 125 + 8 files = 5 average = total / files print('The average size is: ' + str(average))
#!/usr/bin/env python # -*- coding: utf-8 -*- # This file is part of xy-cli. # https://github.com/exiahuang/xy-cli # Licensed under the Apache License 2.0: # http://www.opensource.org/licenses/Apache-2.0 # Copyright (c) 2020, exiahuang <exia.huang@outlook.com> __version__ = '0.8' # NOQA __desc__ = 'xy command tools'
__version__ = '0.8' __desc__ = 'xy command tools'
class Stack: def __init__(self) -> None: self.elements = [] def push(self, element): self.elements.append(element) def size(self): return len(self.elements) def pop(self): result = self.elements[self.size()-1] self.elements = self.elements[:self.size()-1] return result def peek(self): result = self.elements[self.size()-1] return result
class Stack: def __init__(self) -> None: self.elements = [] def push(self, element): self.elements.append(element) def size(self): return len(self.elements) def pop(self): result = self.elements[self.size() - 1] self.elements = self.elements[:self.size() - 1] return result def peek(self): result = self.elements[self.size() - 1] return result
# Lets attempt to draw two points and have them move also pos_1 = 0 velo_1 = 1 pos_2 = 9 velo_2 = -1 line = 10*[' '] # The code is beginning to be clustered and hard to read for i in range(10): line[pos_1] = '*' line[pos_2] = '*' print("".join(line)) line[pos_1] = ' ' line[pos_2] = ' ' pos_1 += velo_1 pos_2 += velo_2 # Note that if we used the pos now to index the line, we would get errors or wrong behavior print(pos_1, pos_2) print("".join(line)) # This is going to become extremely problematic if we try to go beyond this # If we wanted to add more points or dimensions, the code will quickly degrade`
pos_1 = 0 velo_1 = 1 pos_2 = 9 velo_2 = -1 line = 10 * [' '] for i in range(10): line[pos_1] = '*' line[pos_2] = '*' print(''.join(line)) line[pos_1] = ' ' line[pos_2] = ' ' pos_1 += velo_1 pos_2 += velo_2 print(pos_1, pos_2) print(''.join(line))
a = int(input("Enter a -: ")) b = int(input("Enter b -: ")) print("A, B se bada ya barabar h bhai") if a >= b else print( "B, A se bada h bhai")
a = int(input('Enter a -: ')) b = int(input('Enter b -: ')) print('A, B se bada ya barabar h bhai') if a >= b else print('B, A se bada h bhai')
# A postgres database url # postgresql://[user[:password]@][netloc][:port][/dbname] DATABASE_URL="" # Your discord bot token TOKEN=""
database_url = '' token = ''
class Server(object): def __init__(self, crt_name, deploy_full_chain=False, **kwargs): r"""Default server implementation describing interface of a server This is an abstract class, so each specialized method must be overridden in parent class. :param crt_name: name of certificate on server (without any extension like .crt) :type crt_name: str :param deploy_full_chain: if True, deploy server certificate with full chain of trust in crt file else, simply deploy server certificate in crt file :type deploy_full_chain: bool """ self.crt_name = crt_name self.deploy_full_chain = deploy_full_chain def get_description(self): """Get description of this server :return: server description :rtype: str """ raise NotImplementedError("Must be overridden in parent class") def deploy_cert(self, key, cert, **kwargs): r"""Deploy input certificate on server :param key: path to local private key :type key: pathlib.Path :param cert: path to local public certificate :type cert: pathlib.Path :raise exception.DeployCertificateError: if unexpected error occurred during deployment on server """ raise NotImplementedError("Must be overridden in parent class") def create_acme_challenge(self, token, key_authorization): """Create token on server with specified value :param token: challenge key :param key_authorization: challenge value """ raise NotImplementedError("Must be overridden in parent class") def delete_acme_challenge(self, token): """Delete challenge created on server :param token: challenge key to delete from server :type token: str """ raise NotImplementedError("Must be overridden in parent class") def get_certificate_information(self): """Retrieve certificate information from server. Must be implemented for each type of server. :return: SSL certificate information :rtype: autossl.ssl.SslCertificate :raise autossl.exception.CertificateNotFound: if certificate does not exist yet on server """ raise NotImplementedError("Must be overridden in parent class") def is_same(self, common_name=None, sans=None, exact_match=False): """Check if current certificate deployed on server is covering all specified domains :param common_name: Common name :type common_name: str :param sans: list of Subject Alternate Names :type sans: list :param exact_match: if True, certificate must exactly match input domains if False, input domain will also match wilcard certificate and additional domains in certificate will be ignored :type exact_match: bool :return: True is certificate is already covering all domains """ return self.get_certificate_information().is_same(common_name, sans, exact_match) def is_expired(self, expiration_delay=0): """Check for expiration of specified certificate :param expiration_delay: Number of days before real expiration we consider a renewal needed :type expiration_delay: int :return: True is certificate is going to expire in less than expiration_delay days :rtype: bool """ return self.get_certificate_information().is_expired(expiration_delay)
class Server(object): def __init__(self, crt_name, deploy_full_chain=False, **kwargs): """Default server implementation describing interface of a server This is an abstract class, so each specialized method must be overridden in parent class. :param crt_name: name of certificate on server (without any extension like .crt) :type crt_name: str :param deploy_full_chain: if True, deploy server certificate with full chain of trust in crt file else, simply deploy server certificate in crt file :type deploy_full_chain: bool """ self.crt_name = crt_name self.deploy_full_chain = deploy_full_chain def get_description(self): """Get description of this server :return: server description :rtype: str """ raise not_implemented_error('Must be overridden in parent class') def deploy_cert(self, key, cert, **kwargs): """Deploy input certificate on server :param key: path to local private key :type key: pathlib.Path :param cert: path to local public certificate :type cert: pathlib.Path :raise exception.DeployCertificateError: if unexpected error occurred during deployment on server """ raise not_implemented_error('Must be overridden in parent class') def create_acme_challenge(self, token, key_authorization): """Create token on server with specified value :param token: challenge key :param key_authorization: challenge value """ raise not_implemented_error('Must be overridden in parent class') def delete_acme_challenge(self, token): """Delete challenge created on server :param token: challenge key to delete from server :type token: str """ raise not_implemented_error('Must be overridden in parent class') def get_certificate_information(self): """Retrieve certificate information from server. Must be implemented for each type of server. :return: SSL certificate information :rtype: autossl.ssl.SslCertificate :raise autossl.exception.CertificateNotFound: if certificate does not exist yet on server """ raise not_implemented_error('Must be overridden in parent class') def is_same(self, common_name=None, sans=None, exact_match=False): """Check if current certificate deployed on server is covering all specified domains :param common_name: Common name :type common_name: str :param sans: list of Subject Alternate Names :type sans: list :param exact_match: if True, certificate must exactly match input domains if False, input domain will also match wilcard certificate and additional domains in certificate will be ignored :type exact_match: bool :return: True is certificate is already covering all domains """ return self.get_certificate_information().is_same(common_name, sans, exact_match) def is_expired(self, expiration_delay=0): """Check for expiration of specified certificate :param expiration_delay: Number of days before real expiration we consider a renewal needed :type expiration_delay: int :return: True is certificate is going to expire in less than expiration_delay days :rtype: bool """ return self.get_certificate_information().is_expired(expiration_delay)
def gcd(a,b): return gcd(b,a%b) if b>0 else a a,b,c=map(int,input().split()) if a*b//gcd(a,b) <=c: print("yes") else: print("no")
def gcd(a, b): return gcd(b, a % b) if b > 0 else a (a, b, c) = map(int, input().split()) if a * b // gcd(a, b) <= c: print('yes') else: print('no')
# Definition for a binary tree node. # class TreeNode(object): # def __init__(self, x): # self.val = x # self.left = None # self.right = None class Solution(object): def search_paths(self, root, sum, path, paths): if not(root): return sum -= root.val if sum < 0: return path.append(root.val) if sum == 0: paths.append(path[:]) self.search_paths(root.left, sum, path, paths) self.search_paths(root.right, sum, path, paths) path.pop() def pathSum(self, root, sum): """ :type root: TreeNode :type sum: int :rtype: List[List[int]] Assumptions: root to leaf paths (root has to be included) return list of all the possible paths. Path is a list of integers that sum up to sum. Approach #1: DFS search_paths(root, sum, path, paths) if root == null, return sum -= root.val if sum < 0, return path.append(root.val) if sum == 0, paths.append(path) path.pop() return search_paths(root.left, sum, path, paths) search_paths(root.right, sum, path, paths) path.pop() """ path = [] paths = [] self.search_paths(root, sum, path, paths) return paths # Definition for a binary tree node. # class TreeNode(object): # def __init__(self, x): # self.val = x # self.left = None # self.right = None class Solution(object): def search_paths(self, root, sum, path, paths): if not(root): return path.append(root.val) if not(root.left) and not(root.right) and sum == root.val: paths.append(path[:]) if root.left: self.search_paths(root.left, sum-root.val, path, paths) if root.right: self.search_paths(root.right, sum-root.val, path, paths) path.pop() def pathSum(self, root, sum): """ :type root: TreeNode :type sum: int :rtype: List[List[int]] Assumptions: root to leaf paths (root has to be included) return list of all the possible paths. Path is a list of integers that sum up to sum. Approach #1: DFS search_paths(root, sum, path, paths) if root == null, return sum -= root.val if sum < 0, return path.append(root.val) if sum == 0, paths.append(path) path.pop() return search_paths(root.left, sum, path, paths) search_paths(root.right, sum, path, paths) path.pop() """ path = [] paths = [] self.search_paths(root, sum, path, paths) return paths
class Solution(object): def search_paths(self, root, sum, path, paths): if not root: return sum -= root.val if sum < 0: return path.append(root.val) if sum == 0: paths.append(path[:]) self.search_paths(root.left, sum, path, paths) self.search_paths(root.right, sum, path, paths) path.pop() def path_sum(self, root, sum): """ :type root: TreeNode :type sum: int :rtype: List[List[int]] Assumptions: root to leaf paths (root has to be included) return list of all the possible paths. Path is a list of integers that sum up to sum. Approach #1: DFS search_paths(root, sum, path, paths) if root == null, return sum -= root.val if sum < 0, return path.append(root.val) if sum == 0, paths.append(path) path.pop() return search_paths(root.left, sum, path, paths) search_paths(root.right, sum, path, paths) path.pop() """ path = [] paths = [] self.search_paths(root, sum, path, paths) return paths class Solution(object): def search_paths(self, root, sum, path, paths): if not root: return path.append(root.val) if not root.left and (not root.right) and (sum == root.val): paths.append(path[:]) if root.left: self.search_paths(root.left, sum - root.val, path, paths) if root.right: self.search_paths(root.right, sum - root.val, path, paths) path.pop() def path_sum(self, root, sum): """ :type root: TreeNode :type sum: int :rtype: List[List[int]] Assumptions: root to leaf paths (root has to be included) return list of all the possible paths. Path is a list of integers that sum up to sum. Approach #1: DFS search_paths(root, sum, path, paths) if root == null, return sum -= root.val if sum < 0, return path.append(root.val) if sum == 0, paths.append(path) path.pop() return search_paths(root.left, sum, path, paths) search_paths(root.right, sum, path, paths) path.pop() """ path = [] paths = [] self.search_paths(root, sum, path, paths) return paths
load("@io_bazel_rules_go//go:def.bzl", "go_context", "go_rule") load("@bazel_skylib//lib:shell.bzl", "shell") def _go_vendor(ctx): go = go_context(ctx) out = ctx.actions.declare_file(ctx.label.name + ".sh") substitutions = { "@@GO@@": shell.quote(go.go.path), "@@GAZELLE@@": shell.quote(ctx.executable._gazelle.short_path), } ctx.actions.expand_template( template = ctx.file._template, output = out, substitutions = substitutions, is_executable = True, ) runfiles = ctx.runfiles(files = [go.go, ctx.executable._gazelle]) return [ DefaultInfo( runfiles = runfiles, executable = out, ), ] go_vendor = go_rule( implementation = _go_vendor, executable = True, attrs = { "_template": attr.label( default = "//build/rules/go:vendor.bash", allow_single_file = True, ), "_gazelle": attr.label( default = "@bazel_gazelle//cmd/gazelle", executable = True, cfg = "host", ), }, )
load('@io_bazel_rules_go//go:def.bzl', 'go_context', 'go_rule') load('@bazel_skylib//lib:shell.bzl', 'shell') def _go_vendor(ctx): go = go_context(ctx) out = ctx.actions.declare_file(ctx.label.name + '.sh') substitutions = {'@@GO@@': shell.quote(go.go.path), '@@GAZELLE@@': shell.quote(ctx.executable._gazelle.short_path)} ctx.actions.expand_template(template=ctx.file._template, output=out, substitutions=substitutions, is_executable=True) runfiles = ctx.runfiles(files=[go.go, ctx.executable._gazelle]) return [default_info(runfiles=runfiles, executable=out)] go_vendor = go_rule(implementation=_go_vendor, executable=True, attrs={'_template': attr.label(default='//build/rules/go:vendor.bash', allow_single_file=True), '_gazelle': attr.label(default='@bazel_gazelle//cmd/gazelle', executable=True, cfg='host')})
# ------------------------------ # 137. Single Number II # # Description: # Given a non-empty array of integers, every element appears three times except for one, which appears exactly once. Find that single one. # Note: # Your algorithm should have a linear runtime complexity. Could you implement it without using extra memory? # Example 1: # Input: [2,2,3,2] # Output: 3 # # Example 2: # Input: [0,1,0,1,0,1,99] # Output: 99 # # Version: 1.0 # 08/20/18 by Jianfa # ------------------------------ class Solution(object): def singleNumber(self, nums): """ :type nums: List[int] :rtype: int """ while nums: n = nums.pop(0) if n in nums: nums.remove(n) nums.remove(n) else: return n # Used for testing if __name__ == "__main__": test = Solution() # ------------------------------ # Summary: # Top voted solution: # public int singleNumber(int[] A) { # int ones = 0, twos = 0; # for(int i = 0; i < A.length; i++){ # ones = (ones ^ A[i]) & ~twos; # twos = (twos ^ A[i]) & ~ones; # } # return ones; # }
class Solution(object): def single_number(self, nums): """ :type nums: List[int] :rtype: int """ while nums: n = nums.pop(0) if n in nums: nums.remove(n) nums.remove(n) else: return n if __name__ == '__main__': test = solution()
class InputPin: """ The Rosetta graph input pin """ # TODO: Should be able to find connected output pin - so traversal up the graph is possible. def __init__(self, pin_name, mime_type_map, filter): """ c'tor :param pin_name: The name of the pin for diagnostics, etc. :param mime_type_map: Maps a mime type to a handler. * maps everything. :param filter: A reference to the filter to which this pin belongs. """ self._pin_name = pin_name self._mime_type_map = mime_type_map self._filter = filter @property def pin_name(self): return self._pin_name def recv(self, mime_type, payload, metadata_dict): """ Receive a payload. Payload must be in either str or a binary sequence convertible to bytes format. :param mime_type: The mime_type of the payload being received :param payload: The payload to be received :param metadata_dict: A dictionary of metadata values to be passed down the filter chain :return: None """ disp_fun = self._mime_type_map.get(mime_type) if disp_fun is None: disp_fun = self._mime_type_map.get('*') if disp_fun is None: raise ValueError("Pin {0} could not find a dispatcher for {1}".format(self._pin_name, mime_type)) disp_fun(mime_type, payload, metadata_dict)
class Inputpin: """ The Rosetta graph input pin """ def __init__(self, pin_name, mime_type_map, filter): """ c'tor :param pin_name: The name of the pin for diagnostics, etc. :param mime_type_map: Maps a mime type to a handler. * maps everything. :param filter: A reference to the filter to which this pin belongs. """ self._pin_name = pin_name self._mime_type_map = mime_type_map self._filter = filter @property def pin_name(self): return self._pin_name def recv(self, mime_type, payload, metadata_dict): """ Receive a payload. Payload must be in either str or a binary sequence convertible to bytes format. :param mime_type: The mime_type of the payload being received :param payload: The payload to be received :param metadata_dict: A dictionary of metadata values to be passed down the filter chain :return: None """ disp_fun = self._mime_type_map.get(mime_type) if disp_fun is None: disp_fun = self._mime_type_map.get('*') if disp_fun is None: raise value_error('Pin {0} could not find a dispatcher for {1}'.format(self._pin_name, mime_type)) disp_fun(mime_type, payload, metadata_dict)
# # PySNMP MIB module CTRON-SFPS-COMMON-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CTRON-SFPS-COMMON-MIB # Produced by pysmi-0.3.4 at Wed May 1 12:30:50 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ValueSizeConstraint, ValueRangeConstraint, ConstraintsUnion, SingleValueConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsUnion", "SingleValueConstraint", "ConstraintsIntersection") sfpsSystemGenerics, sfpsDiagEventLog, sfpsAOPropertiesAPI, sfpsAOProperties = mibBuilder.importSymbols("CTRON-SFPS-INCLUDE-MIB", "sfpsSystemGenerics", "sfpsDiagEventLog", "sfpsAOPropertiesAPI", "sfpsAOProperties") NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance") Integer32, TimeTicks, Counter32, Counter64, Gauge32, IpAddress, ObjectIdentity, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Unsigned32, iso, NotificationType, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "TimeTicks", "Counter32", "Counter64", "Gauge32", "IpAddress", "ObjectIdentity", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "Unsigned32", "iso", "NotificationType", "Bits") TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString") class HexInteger(Integer32): pass sfpsGenericVersionTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 3, 1), ) if mibBuilder.loadTexts: sfpsGenericVersionTable.setStatus('mandatory') if mibBuilder.loadTexts: sfpsGenericVersionTable.setDescription('Table describing the SFS generics and their versions that are contained within this image.') sfpsGenericVersionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 3, 1, 1), ).setIndexNames((0, "CTRON-SFPS-COMMON-MIB", "sfpsGenericVersionHash")) if mibBuilder.loadTexts: sfpsGenericVersionEntry.setStatus('mandatory') if mibBuilder.loadTexts: sfpsGenericVersionEntry.setDescription('An entry in the table instanced by the Generic name.') sfpsGenericVersionHash = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 3, 1, 1, 1), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsGenericVersionHash.setStatus('mandatory') if mibBuilder.loadTexts: sfpsGenericVersionHash.setDescription('Hash of the name to make it a unique entry.') sfpsGenericVersionName = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 3, 1, 1, 2), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsGenericVersionName.setStatus('mandatory') if mibBuilder.loadTexts: sfpsGenericVersionName.setDescription('Name of the Generic.') sfpsGenericVersionVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 3, 1, 1, 3), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsGenericVersionVersion.setStatus('mandatory') if mibBuilder.loadTexts: sfpsGenericVersionVersion.setDescription('Version stamp of the generic component.') sfpsGenericVersionMIBRev = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 3, 1, 1, 4), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsGenericVersionMIBRev.setStatus('mandatory') if mibBuilder.loadTexts: sfpsGenericVersionMIBRev.setDescription('MIB version of the generic component.') sfpsDiagLogConfigTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1), ) if mibBuilder.loadTexts: sfpsDiagLogConfigTable.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigTable.setDescription('This table contains the informtion to configure an Event Logger object.') sfpsDiagLogConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1), ).setIndexNames((0, "CTRON-SFPS-COMMON-MIB", "sfpsDiagLogConfigInstance")) if mibBuilder.loadTexts: sfpsDiagLogConfigEntry.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigEntry.setDescription('Each entry contains configuration data.') sfpsDiagLogConfigInstance = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 1), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsDiagLogConfigInstance.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigInstance.setDescription('The instance of this Event Logger.') sfpsDiagLogConfigStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogConfigStatus.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigStatus.setDescription('The status of this Event Logger object.') sfpsDiagLogConfigIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 3), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsDiagLogConfigIndex.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigIndex.setDescription('The current index in the circular buffer where events are being logged.') sfpsDiagLogConfigStart = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 4), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogConfigStart.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigStart.setDescription('The starting index in the circular buffer to display.') sfpsDiagLogConfigStop = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 5), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogConfigStop.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigStop.setDescription('The last index in the circular buffer to display.') sfpsDiagLogConfigLogIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 6), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogConfigLogIndex.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigLogIndex.setDescription('The current index in the log buffer where we are looking.') sfpsDiagLogConfigFilterMatch = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 7), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogConfigFilterMatch.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigFilterMatch.setDescription('The specified Event Id to log. A value of 0 will cause every event to be logged.') sfpsDiagLogConfigFilterStart = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 8), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogConfigFilterStart.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigFilterStart.setDescription('The beginning range of Event Ids to log.') sfpsDiagLogConfigFilterStop = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 9), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogConfigFilterStop.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigFilterStop.setDescription('The ending range of Event Ids to log.') sfpsDiagLogAccessPortControl = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 10), HexInteger()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogAccessPortControl.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogAccessPortControl.setDescription('Leaf to be used to set a port bit mask. This bit mask represents which ports shall be statically set to be an ACCESS port. Bit 0 corresponds to Port 1.') sfpsDiagLogCallIdleTime = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 11), Integer32().clone(60)).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogCallIdleTime.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogCallIdleTime.setDescription('Leaf to be used to set a debug variable. (in seconds)') sfpsDiagLogFilterAddTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 12), Integer32().clone(900)).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogFilterAddTimer.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogFilterAddTimer.setDescription('Leaf to be used to set how long a filter connection should be (Default : 900 seconds)') sfpsDiagLogRedirectorWakeup = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 13), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogRedirectorWakeup.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogRedirectorWakeup.setDescription('Leaf to be used to set how often the Redirector wakes up to service the queue (in seconds)') sfpsDiagLogRedirectorNumPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 14), Integer32().clone(64)).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogRedirectorNumPackets.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogRedirectorNumPackets.setDescription('Leaf to be used to set how many packets the Redirector reads off at a time.') sfpsDiagLogEndSystemTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 15), Integer32().clone(600)).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogEndSystemTimeout.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogEndSystemTimeout.setDescription('Leaf to be used to set a debug variable. (Default : 10 minutes) (in seconds)') sfpsDiagLogSwitchIdleInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 16), Integer32().clone(30)).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogSwitchIdleInterval.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogSwitchIdleInterval.setDescription('Leaf to be used to set a debug variable. (in seconds)') sfpsDiagLogInlnFltrAgeTime = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 17), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogInlnFltrAgeTime.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogInlnFltrAgeTime.setDescription('Leaf to be used to set the BAF age time (seconds).') sfpsDiagLogConfigDebug9 = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 18), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogConfigDebug9.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigDebug9.setDescription('Leaf to be used to set a debug variable.') sfpsDiagLogSignalThrottle = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 19), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogSignalThrottle.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogSignalThrottle.setDescription('Leaf to be used to set the Signal Thottle knob. Throttling defaults to off. By setting it to 1,2,5, or 10 one can dictate the rate (signal/second) at which SFPS sends signals to the ACMS Signal Stack..') sfpsDiagLogConfigOther = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("string", 1), ("integer", 2)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogConfigOther.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigOther.setDescription('Dictates the format of the Event to be displayed (either Text or Integer values).') sfpsDiagLogConfigSoftReset = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 21), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogConfigSoftReset.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigSoftReset.setDescription('') sfpsDiagLogConfigSFPSVlan = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 22), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsDiagLogConfigSFPSVlan.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigSFPSVlan.setDescription('') sfpsAOPropertiesTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1), ) if mibBuilder.loadTexts: sfpsAOPropertiesTable.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesTable.setDescription('') sfpsAOPropertiesEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1), ).setIndexNames((0, "CTRON-SFPS-COMMON-MIB", "sfpsAOPropertiesTag")) if mibBuilder.loadTexts: sfpsAOPropertiesEntry.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesEntry.setDescription('An entry in the table instanced by the tag.') sfpsAOPropertiesTag = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1, 1), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsAOPropertiesTag.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesTag.setDescription('') sfpsAOPropertiesTagDescriptor = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1, 2), OctetString()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsAOPropertiesTagDescriptor.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesTagDescriptor.setDescription('') sfpsAOPropertiesPrettyType = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1, 3), OctetString()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsAOPropertiesPrettyType.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesPrettyType.setDescription('') sfpsAOPropertiesNumBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsAOPropertiesNumBytes.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesNumBytes.setDescription('') sfpsAOPropertiesIsLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("false", 1), ("true", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsAOPropertiesIsLimit.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesIsLimit.setDescription('') sfpsAOPropertiesIsMobile = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("false", 1), ("true", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsAOPropertiesIsMobile.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesIsMobile.setDescription('') sfpsAOPropertiesIsSingle = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("false", 1), ("true", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsAOPropertiesIsSingle.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesIsSingle.setDescription('') sfpsAOPropertiesNoBlock = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("false", 1), ("true", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsAOPropertiesNoBlock.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesNoBlock.setDescription('') sfpsAOPropertiesNoDelta = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("false", 1), ("true", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsAOPropertiesNoDelta.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesNoDelta.setDescription('') sfpsAOPropertiesAPITag = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 1), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsAOPropertiesAPITag.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPITag.setDescription('') sfpsAOPropertiesAPITagString = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 2), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsAOPropertiesAPITagString.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPITagString.setDescription('') sfpsAOPropertiesAPIPrettyType = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 3), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsAOPropertiesAPIPrettyType.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPIPrettyType.setDescription('') sfpsAOPropertiesAPINumBytes = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: sfpsAOPropertiesAPINumBytes.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPINumBytes.setDescription('') sfpsAOPropertiesAPIIsLimit = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("notSet", 1), ("false", 2), ("true", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsAOPropertiesAPIIsLimit.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPIIsLimit.setDescription('') sfpsAOPropertiesAPIIsMobile = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("notSet", 1), ("false", 2), ("true", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsAOPropertiesAPIIsMobile.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPIIsMobile.setDescription('') sfpsAOPropertiesAPIIsSingle = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("notSet", 1), ("false", 2), ("true", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsAOPropertiesAPIIsSingle.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPIIsSingle.setDescription('') sfpsAOPropertiesAPINoBlock = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("notSet", 1), ("false", 2), ("true", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsAOPropertiesAPINoBlock.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPINoBlock.setDescription('') sfpsAOPropertiesAPINoDelta = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("notSet", 1), ("false", 2), ("true", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsAOPropertiesAPINoDelta.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPINoDelta.setDescription('') sfpsAOPropertiesAPIAction = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("readProperties", 1), ("setProperties", 2)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: sfpsAOPropertiesAPIAction.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPIAction.setDescription('') mibBuilder.exportSymbols("CTRON-SFPS-COMMON-MIB", sfpsDiagLogConfigDebug9=sfpsDiagLogConfigDebug9, sfpsDiagLogConfigOther=sfpsDiagLogConfigOther, sfpsGenericVersionVersion=sfpsGenericVersionVersion, sfpsDiagLogConfigStatus=sfpsDiagLogConfigStatus, sfpsDiagLogEndSystemTimeout=sfpsDiagLogEndSystemTimeout, sfpsAOPropertiesTable=sfpsAOPropertiesTable, sfpsDiagLogConfigIndex=sfpsDiagLogConfigIndex, sfpsAOPropertiesNoBlock=sfpsAOPropertiesNoBlock, sfpsAOPropertiesAPIAction=sfpsAOPropertiesAPIAction, sfpsDiagLogConfigSFPSVlan=sfpsDiagLogConfigSFPSVlan, sfpsDiagLogSwitchIdleInterval=sfpsDiagLogSwitchIdleInterval, sfpsAOPropertiesAPIPrettyType=sfpsAOPropertiesAPIPrettyType, sfpsDiagLogConfigFilterMatch=sfpsDiagLogConfigFilterMatch, sfpsGenericVersionEntry=sfpsGenericVersionEntry, sfpsAOPropertiesTag=sfpsAOPropertiesTag, HexInteger=HexInteger, sfpsDiagLogRedirectorNumPackets=sfpsDiagLogRedirectorNumPackets, sfpsDiagLogConfigFilterStop=sfpsDiagLogConfigFilterStop, sfpsAOPropertiesPrettyType=sfpsAOPropertiesPrettyType, sfpsDiagLogConfigLogIndex=sfpsDiagLogConfigLogIndex, sfpsAOPropertiesAPIIsLimit=sfpsAOPropertiesAPIIsLimit, sfpsAOPropertiesAPINumBytes=sfpsAOPropertiesAPINumBytes, sfpsDiagLogConfigInstance=sfpsDiagLogConfigInstance, sfpsAOPropertiesAPINoBlock=sfpsAOPropertiesAPINoBlock, sfpsAOPropertiesAPINoDelta=sfpsAOPropertiesAPINoDelta, sfpsAOPropertiesIsMobile=sfpsAOPropertiesIsMobile, sfpsDiagLogRedirectorWakeup=sfpsDiagLogRedirectorWakeup, sfpsAOPropertiesIsLimit=sfpsAOPropertiesIsLimit, sfpsDiagLogAccessPortControl=sfpsDiagLogAccessPortControl, sfpsAOPropertiesNoDelta=sfpsAOPropertiesNoDelta, sfpsAOPropertiesAPIIsSingle=sfpsAOPropertiesAPIIsSingle, sfpsDiagLogSignalThrottle=sfpsDiagLogSignalThrottle, sfpsAOPropertiesAPIIsMobile=sfpsAOPropertiesAPIIsMobile, sfpsGenericVersionHash=sfpsGenericVersionHash, sfpsDiagLogConfigTable=sfpsDiagLogConfigTable, sfpsAOPropertiesEntry=sfpsAOPropertiesEntry, sfpsDiagLogConfigEntry=sfpsDiagLogConfigEntry, sfpsGenericVersionName=sfpsGenericVersionName, sfpsDiagLogCallIdleTime=sfpsDiagLogCallIdleTime, sfpsGenericVersionTable=sfpsGenericVersionTable, sfpsDiagLogFilterAddTimer=sfpsDiagLogFilterAddTimer, sfpsDiagLogConfigStop=sfpsDiagLogConfigStop, sfpsAOPropertiesAPITagString=sfpsAOPropertiesAPITagString, sfpsAOPropertiesTagDescriptor=sfpsAOPropertiesTagDescriptor, sfpsDiagLogConfigFilterStart=sfpsDiagLogConfigFilterStart, sfpsDiagLogConfigStart=sfpsDiagLogConfigStart, sfpsGenericVersionMIBRev=sfpsGenericVersionMIBRev, sfpsAOPropertiesAPITag=sfpsAOPropertiesAPITag, sfpsDiagLogConfigSoftReset=sfpsDiagLogConfigSoftReset, sfpsAOPropertiesNumBytes=sfpsAOPropertiesNumBytes, sfpsDiagLogInlnFltrAgeTime=sfpsDiagLogInlnFltrAgeTime, sfpsAOPropertiesIsSingle=sfpsAOPropertiesIsSingle)
(integer, octet_string, object_identifier) = mibBuilder.importSymbols('ASN1', 'Integer', 'OctetString', 'ObjectIdentifier') (named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues') (value_size_constraint, value_range_constraint, constraints_union, single_value_constraint, constraints_intersection) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ValueSizeConstraint', 'ValueRangeConstraint', 'ConstraintsUnion', 'SingleValueConstraint', 'ConstraintsIntersection') (sfps_system_generics, sfps_diag_event_log, sfps_ao_properties_api, sfps_ao_properties) = mibBuilder.importSymbols('CTRON-SFPS-INCLUDE-MIB', 'sfpsSystemGenerics', 'sfpsDiagEventLog', 'sfpsAOPropertiesAPI', 'sfpsAOProperties') (notification_group, module_compliance) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ModuleCompliance') (integer32, time_ticks, counter32, counter64, gauge32, ip_address, object_identity, module_identity, mib_scalar, mib_table, mib_table_row, mib_table_column, mib_identifier, unsigned32, iso, notification_type, bits) = mibBuilder.importSymbols('SNMPv2-SMI', 'Integer32', 'TimeTicks', 'Counter32', 'Counter64', 'Gauge32', 'IpAddress', 'ObjectIdentity', 'ModuleIdentity', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'MibIdentifier', 'Unsigned32', 'iso', 'NotificationType', 'Bits') (textual_convention, display_string) = mibBuilder.importSymbols('SNMPv2-TC', 'TextualConvention', 'DisplayString') class Hexinteger(Integer32): pass sfps_generic_version_table = mib_table((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 3, 1)) if mibBuilder.loadTexts: sfpsGenericVersionTable.setStatus('mandatory') if mibBuilder.loadTexts: sfpsGenericVersionTable.setDescription('Table describing the SFS generics and their versions that are contained within this image.') sfps_generic_version_entry = mib_table_row((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 3, 1, 1)).setIndexNames((0, 'CTRON-SFPS-COMMON-MIB', 'sfpsGenericVersionHash')) if mibBuilder.loadTexts: sfpsGenericVersionEntry.setStatus('mandatory') if mibBuilder.loadTexts: sfpsGenericVersionEntry.setDescription('An entry in the table instanced by the Generic name.') sfps_generic_version_hash = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 3, 1, 1, 1), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsGenericVersionHash.setStatus('mandatory') if mibBuilder.loadTexts: sfpsGenericVersionHash.setDescription('Hash of the name to make it a unique entry.') sfps_generic_version_name = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 3, 1, 1, 2), display_string()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsGenericVersionName.setStatus('mandatory') if mibBuilder.loadTexts: sfpsGenericVersionName.setDescription('Name of the Generic.') sfps_generic_version_version = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 3, 1, 1, 3), display_string()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsGenericVersionVersion.setStatus('mandatory') if mibBuilder.loadTexts: sfpsGenericVersionVersion.setDescription('Version stamp of the generic component.') sfps_generic_version_mib_rev = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 3, 1, 1, 4), display_string()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsGenericVersionMIBRev.setStatus('mandatory') if mibBuilder.loadTexts: sfpsGenericVersionMIBRev.setDescription('MIB version of the generic component.') sfps_diag_log_config_table = mib_table((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1)) if mibBuilder.loadTexts: sfpsDiagLogConfigTable.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigTable.setDescription('This table contains the informtion to configure an Event Logger object.') sfps_diag_log_config_entry = mib_table_row((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1)).setIndexNames((0, 'CTRON-SFPS-COMMON-MIB', 'sfpsDiagLogConfigInstance')) if mibBuilder.loadTexts: sfpsDiagLogConfigEntry.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigEntry.setDescription('Each entry contains configuration data.') sfps_diag_log_config_instance = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 1), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsDiagLogConfigInstance.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigInstance.setDescription('The instance of this Event Logger.') sfps_diag_log_config_status = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 2), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3))).clone(namedValues=named_values(('other', 1), ('disabled', 2), ('enabled', 3)))).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogConfigStatus.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigStatus.setDescription('The status of this Event Logger object.') sfps_diag_log_config_index = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 3), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsDiagLogConfigIndex.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigIndex.setDescription('The current index in the circular buffer where events are being logged.') sfps_diag_log_config_start = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 4), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogConfigStart.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigStart.setDescription('The starting index in the circular buffer to display.') sfps_diag_log_config_stop = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 5), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogConfigStop.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigStop.setDescription('The last index in the circular buffer to display.') sfps_diag_log_config_log_index = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 6), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogConfigLogIndex.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigLogIndex.setDescription('The current index in the log buffer where we are looking.') sfps_diag_log_config_filter_match = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 7), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogConfigFilterMatch.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigFilterMatch.setDescription('The specified Event Id to log. A value of 0 will cause every event to be logged.') sfps_diag_log_config_filter_start = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 8), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogConfigFilterStart.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigFilterStart.setDescription('The beginning range of Event Ids to log.') sfps_diag_log_config_filter_stop = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 9), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogConfigFilterStop.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigFilterStop.setDescription('The ending range of Event Ids to log.') sfps_diag_log_access_port_control = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 10), hex_integer()).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogAccessPortControl.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogAccessPortControl.setDescription('Leaf to be used to set a port bit mask. This bit mask represents which ports shall be statically set to be an ACCESS port. Bit 0 corresponds to Port 1.') sfps_diag_log_call_idle_time = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 11), integer32().clone(60)).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogCallIdleTime.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogCallIdleTime.setDescription('Leaf to be used to set a debug variable. (in seconds)') sfps_diag_log_filter_add_timer = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 12), integer32().clone(900)).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogFilterAddTimer.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogFilterAddTimer.setDescription('Leaf to be used to set how long a filter connection should be (Default : 900 seconds)') sfps_diag_log_redirector_wakeup = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 13), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogRedirectorWakeup.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogRedirectorWakeup.setDescription('Leaf to be used to set how often the Redirector wakes up to service the queue (in seconds)') sfps_diag_log_redirector_num_packets = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 14), integer32().clone(64)).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogRedirectorNumPackets.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogRedirectorNumPackets.setDescription('Leaf to be used to set how many packets the Redirector reads off at a time.') sfps_diag_log_end_system_timeout = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 15), integer32().clone(600)).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogEndSystemTimeout.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogEndSystemTimeout.setDescription('Leaf to be used to set a debug variable. (Default : 10 minutes) (in seconds)') sfps_diag_log_switch_idle_interval = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 16), integer32().clone(30)).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogSwitchIdleInterval.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogSwitchIdleInterval.setDescription('Leaf to be used to set a debug variable. (in seconds)') sfps_diag_log_inln_fltr_age_time = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 17), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogInlnFltrAgeTime.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogInlnFltrAgeTime.setDescription('Leaf to be used to set the BAF age time (seconds).') sfps_diag_log_config_debug9 = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 18), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogConfigDebug9.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigDebug9.setDescription('Leaf to be used to set a debug variable.') sfps_diag_log_signal_throttle = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 19), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogSignalThrottle.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogSignalThrottle.setDescription('Leaf to be used to set the Signal Thottle knob. Throttling defaults to off. By setting it to 1,2,5, or 10 one can dictate the rate (signal/second) at which SFPS sends signals to the ACMS Signal Stack..') sfps_diag_log_config_other = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 20), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('string', 1), ('integer', 2)))).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogConfigOther.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigOther.setDescription('Dictates the format of the Event to be displayed (either Text or Integer values).') sfps_diag_log_config_soft_reset = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 21), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogConfigSoftReset.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigSoftReset.setDescription('') sfps_diag_log_config_sfps_vlan = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 2, 6, 1, 1, 1, 22), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsDiagLogConfigSFPSVlan.setStatus('mandatory') if mibBuilder.loadTexts: sfpsDiagLogConfigSFPSVlan.setDescription('') sfps_ao_properties_table = mib_table((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1)) if mibBuilder.loadTexts: sfpsAOPropertiesTable.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesTable.setDescription('') sfps_ao_properties_entry = mib_table_row((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1)).setIndexNames((0, 'CTRON-SFPS-COMMON-MIB', 'sfpsAOPropertiesTag')) if mibBuilder.loadTexts: sfpsAOPropertiesEntry.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesEntry.setDescription('An entry in the table instanced by the tag.') sfps_ao_properties_tag = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1, 1), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsAOPropertiesTag.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesTag.setDescription('') sfps_ao_properties_tag_descriptor = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1, 2), octet_string()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsAOPropertiesTagDescriptor.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesTagDescriptor.setDescription('') sfps_ao_properties_pretty_type = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1, 3), octet_string()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsAOPropertiesPrettyType.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesPrettyType.setDescription('') sfps_ao_properties_num_bytes = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1, 4), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsAOPropertiesNumBytes.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesNumBytes.setDescription('') sfps_ao_properties_is_limit = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1, 5), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('false', 1), ('true', 2)))).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsAOPropertiesIsLimit.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesIsLimit.setDescription('') sfps_ao_properties_is_mobile = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1, 6), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('false', 1), ('true', 2)))).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsAOPropertiesIsMobile.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesIsMobile.setDescription('') sfps_ao_properties_is_single = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1, 7), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('false', 1), ('true', 2)))).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsAOPropertiesIsSingle.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesIsSingle.setDescription('') sfps_ao_properties_no_block = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1, 8), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('false', 1), ('true', 2)))).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsAOPropertiesNoBlock.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesNoBlock.setDescription('') sfps_ao_properties_no_delta = mib_table_column((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 1, 1, 9), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('false', 1), ('true', 2)))).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsAOPropertiesNoDelta.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesNoDelta.setDescription('') sfps_ao_properties_api_tag = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 1), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsAOPropertiesAPITag.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPITag.setDescription('') sfps_ao_properties_api_tag_string = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 2), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsAOPropertiesAPITagString.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPITagString.setDescription('') sfps_ao_properties_api_pretty_type = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 3), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsAOPropertiesAPIPrettyType.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPIPrettyType.setDescription('') sfps_ao_properties_api_num_bytes = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 4), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: sfpsAOPropertiesAPINumBytes.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPINumBytes.setDescription('') sfps_ao_properties_api_is_limit = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 5), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3))).clone(namedValues=named_values(('notSet', 1), ('false', 2), ('true', 3)))).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsAOPropertiesAPIIsLimit.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPIIsLimit.setDescription('') sfps_ao_properties_api_is_mobile = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 6), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3))).clone(namedValues=named_values(('notSet', 1), ('false', 2), ('true', 3)))).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsAOPropertiesAPIIsMobile.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPIIsMobile.setDescription('') sfps_ao_properties_api_is_single = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 7), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3))).clone(namedValues=named_values(('notSet', 1), ('false', 2), ('true', 3)))).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsAOPropertiesAPIIsSingle.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPIIsSingle.setDescription('') sfps_ao_properties_api_no_block = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 8), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3))).clone(namedValues=named_values(('notSet', 1), ('false', 2), ('true', 3)))).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsAOPropertiesAPINoBlock.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPINoBlock.setDescription('') sfps_ao_properties_api_no_delta = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 9), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3))).clone(namedValues=named_values(('notSet', 1), ('false', 2), ('true', 3)))).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsAOPropertiesAPINoDelta.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPINoDelta.setDescription('') sfps_ao_properties_api_action = mib_scalar((1, 3, 6, 1, 4, 1, 52, 4, 2, 4, 2, 1, 1, 5, 2, 10), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('readProperties', 1), ('setProperties', 2)))).setMaxAccess('readwrite') if mibBuilder.loadTexts: sfpsAOPropertiesAPIAction.setStatus('mandatory') if mibBuilder.loadTexts: sfpsAOPropertiesAPIAction.setDescription('') mibBuilder.exportSymbols('CTRON-SFPS-COMMON-MIB', sfpsDiagLogConfigDebug9=sfpsDiagLogConfigDebug9, sfpsDiagLogConfigOther=sfpsDiagLogConfigOther, sfpsGenericVersionVersion=sfpsGenericVersionVersion, sfpsDiagLogConfigStatus=sfpsDiagLogConfigStatus, sfpsDiagLogEndSystemTimeout=sfpsDiagLogEndSystemTimeout, sfpsAOPropertiesTable=sfpsAOPropertiesTable, sfpsDiagLogConfigIndex=sfpsDiagLogConfigIndex, sfpsAOPropertiesNoBlock=sfpsAOPropertiesNoBlock, sfpsAOPropertiesAPIAction=sfpsAOPropertiesAPIAction, sfpsDiagLogConfigSFPSVlan=sfpsDiagLogConfigSFPSVlan, sfpsDiagLogSwitchIdleInterval=sfpsDiagLogSwitchIdleInterval, sfpsAOPropertiesAPIPrettyType=sfpsAOPropertiesAPIPrettyType, sfpsDiagLogConfigFilterMatch=sfpsDiagLogConfigFilterMatch, sfpsGenericVersionEntry=sfpsGenericVersionEntry, sfpsAOPropertiesTag=sfpsAOPropertiesTag, HexInteger=HexInteger, sfpsDiagLogRedirectorNumPackets=sfpsDiagLogRedirectorNumPackets, sfpsDiagLogConfigFilterStop=sfpsDiagLogConfigFilterStop, sfpsAOPropertiesPrettyType=sfpsAOPropertiesPrettyType, sfpsDiagLogConfigLogIndex=sfpsDiagLogConfigLogIndex, sfpsAOPropertiesAPIIsLimit=sfpsAOPropertiesAPIIsLimit, sfpsAOPropertiesAPINumBytes=sfpsAOPropertiesAPINumBytes, sfpsDiagLogConfigInstance=sfpsDiagLogConfigInstance, sfpsAOPropertiesAPINoBlock=sfpsAOPropertiesAPINoBlock, sfpsAOPropertiesAPINoDelta=sfpsAOPropertiesAPINoDelta, sfpsAOPropertiesIsMobile=sfpsAOPropertiesIsMobile, sfpsDiagLogRedirectorWakeup=sfpsDiagLogRedirectorWakeup, sfpsAOPropertiesIsLimit=sfpsAOPropertiesIsLimit, sfpsDiagLogAccessPortControl=sfpsDiagLogAccessPortControl, sfpsAOPropertiesNoDelta=sfpsAOPropertiesNoDelta, sfpsAOPropertiesAPIIsSingle=sfpsAOPropertiesAPIIsSingle, sfpsDiagLogSignalThrottle=sfpsDiagLogSignalThrottle, sfpsAOPropertiesAPIIsMobile=sfpsAOPropertiesAPIIsMobile, sfpsGenericVersionHash=sfpsGenericVersionHash, sfpsDiagLogConfigTable=sfpsDiagLogConfigTable, sfpsAOPropertiesEntry=sfpsAOPropertiesEntry, sfpsDiagLogConfigEntry=sfpsDiagLogConfigEntry, sfpsGenericVersionName=sfpsGenericVersionName, sfpsDiagLogCallIdleTime=sfpsDiagLogCallIdleTime, sfpsGenericVersionTable=sfpsGenericVersionTable, sfpsDiagLogFilterAddTimer=sfpsDiagLogFilterAddTimer, sfpsDiagLogConfigStop=sfpsDiagLogConfigStop, sfpsAOPropertiesAPITagString=sfpsAOPropertiesAPITagString, sfpsAOPropertiesTagDescriptor=sfpsAOPropertiesTagDescriptor, sfpsDiagLogConfigFilterStart=sfpsDiagLogConfigFilterStart, sfpsDiagLogConfigStart=sfpsDiagLogConfigStart, sfpsGenericVersionMIBRev=sfpsGenericVersionMIBRev, sfpsAOPropertiesAPITag=sfpsAOPropertiesAPITag, sfpsDiagLogConfigSoftReset=sfpsDiagLogConfigSoftReset, sfpsAOPropertiesNumBytes=sfpsAOPropertiesNumBytes, sfpsDiagLogInlnFltrAgeTime=sfpsDiagLogInlnFltrAgeTime, sfpsAOPropertiesIsSingle=sfpsAOPropertiesIsSingle)
print("git UNT ") print("git lunes")
print('git UNT ') print('git lunes')
def metaLine2metaDict(metaLine): metaDict = {} fields = metaLine.split(';') for field in fields: fl = field.split('=') subfieldName = fl[0] fieldInfo = fl[1] metaDict[subfieldName] = fieldInfo return metaDict def getGeneInformationFromGFFline(line, field): result = False if not line.startswith('#'): ll = line.strip().split('\t') if len(ll) > 2 and ll[2] == 'gene': start = ll[3] end = ll[4] strand = ll[6] metaLine = ll[8] metaDict= metaLine2metaDict(metaLine) result = metaDict[field] return result
def meta_line2meta_dict(metaLine): meta_dict = {} fields = metaLine.split(';') for field in fields: fl = field.split('=') subfield_name = fl[0] field_info = fl[1] metaDict[subfieldName] = fieldInfo return metaDict def get_gene_information_from_gf_fline(line, field): result = False if not line.startswith('#'): ll = line.strip().split('\t') if len(ll) > 2 and ll[2] == 'gene': start = ll[3] end = ll[4] strand = ll[6] meta_line = ll[8] meta_dict = meta_line2meta_dict(metaLine) result = metaDict[field] return result
s=input('Enter Main String:') subs=input('Enter Substring to search:') if subs in s: print(subs, 'Is found in Main String') else: print(subs, 'is not found in Main String')
s = input('Enter Main String:') subs = input('Enter Substring to search:') if subs in s: print(subs, 'Is found in Main String') else: print(subs, 'is not found in Main String')
class Solution(object): def maxProduct(self, nums): """ :type nums: List[int] :rtype: int """ max_product = local_min = local_max = nums[0] for i in range(1, len(nums)): cur = nums[i] local_min, local_max = min(min(cur*local_min, cur*local_max), cur), max(max(cur*local_min, cur*local_max), cur) max_product = max(max_product, local_max) return max_product
class Solution(object): def max_product(self, nums): """ :type nums: List[int] :rtype: int """ max_product = local_min = local_max = nums[0] for i in range(1, len(nums)): cur = nums[i] (local_min, local_max) = (min(min(cur * local_min, cur * local_max), cur), max(max(cur * local_min, cur * local_max), cur)) max_product = max(max_product, local_max) return max_product
# automatically generated by the FlatBuffers compiler, do not modify # namespace: FBS class FByteDataType(object): UINT8 = 0 FLOAT16 = 1 FLOAT32 = 2 PNG = 3 JPEG = 4 Other = 5
class Fbytedatatype(object): uint8 = 0 float16 = 1 float32 = 2 png = 3 jpeg = 4 other = 5
FILE_PATH = './Day4/input.txt' def parseData(recordData): record = {} for data in recordData: fieldPairs = data.split(' ') for fieldPair in fieldPairs: field, value = fieldPair.split(':') record[field] = value if field not in ['byr', 'iyr', 'eyr', 'hgt', 'hcl', 'ecl', 'pid', 'cid']: print('What no!!! {}'.format(field)) return record def readFile(path): f = open(path, 'r') records = [] recordData = [] for l in f: l = l.strip() if l == '': record = parseData(recordData) records.append(record) recordData = [] else: recordData.append(l) records.append(parseData(recordData)) return records def isPassportValid(record): requiredFields = ['byr', 'iyr', 'eyr', 'hgt', 'hcl', 'ecl', 'pid'] for field in requiredFields: if not field in record: print('missingField: {}. {}'.format(field, record)) return False return True def checkPassports(): records = readFile(FILE_PATH) print('{} passports'.format(len(records))) count = 0 for rec in records: if isPassportValid(rec): count += 1 return count def validateBYR(byr): if not byr.isnumeric() or len(byr) != 4: return False byrNum = int(byr) return byrNum >= 1920 and byrNum <= 2002 def validateIYR(iyr): if not iyr.isnumeric() or len(iyr) != 4: return False iyrNum = int(iyr) return iyrNum >= 2010 and iyrNum <= 2020 def validateEYR(eyr): if not eyr.isnumeric() or len(eyr) != 4: return False eyrNum = int(eyr) return eyrNum >= 2020 and eyrNum <= 2030 def validateHeight(height): if len(height) < 4: return False if height[-2:] == 'cm': hVal = height[:-2] if hVal.isnumeric(): hNum = int(hVal) return hNum <= 193 and hNum >= 150 elif height[-2:] == 'in': hVal = height[:-2] if hVal.isnumeric(): hNum = int(hVal) return hNum <= 76 and hNum >= 59 return False def validateHCL(hcl): if len(hcl) != 7 or hcl[0] != '#': return False validChars = '1234567890abcdef' for i in range(1, len(hcl)): if hcl[i] not in validChars: return False return True def validateECL(ecl): validClr = set(['amb', 'blu', 'brn', 'gry', 'grn', 'hzl', 'oth']) return ecl in validClr def validatePID(pid): return len(pid) == 9 and pid.isnumeric() def validatePassport(passport): if not isPassportValid(passport): return False if not validateBYR(passport['byr']): return False if not validateIYR(passport['iyr']): return False if not validateEYR(passport['eyr']): return False if not validateHeight(passport['hgt']): return False if not validateHCL(passport['hcl']): return False if not validateECL(passport['ecl']): return False if not validatePID(passport['pid']): return False return True def validatePassports(): records = readFile(FILE_PATH) count = 0 for rec in records: if validatePassport(rec): count += 1 return count if __name__ == "__main__": # print(checkPassports()) print(validatePassports())
file_path = './Day4/input.txt' def parse_data(recordData): record = {} for data in recordData: field_pairs = data.split(' ') for field_pair in fieldPairs: (field, value) = fieldPair.split(':') record[field] = value if field not in ['byr', 'iyr', 'eyr', 'hgt', 'hcl', 'ecl', 'pid', 'cid']: print('What no!!! {}'.format(field)) return record def read_file(path): f = open(path, 'r') records = [] record_data = [] for l in f: l = l.strip() if l == '': record = parse_data(recordData) records.append(record) record_data = [] else: recordData.append(l) records.append(parse_data(recordData)) return records def is_passport_valid(record): required_fields = ['byr', 'iyr', 'eyr', 'hgt', 'hcl', 'ecl', 'pid'] for field in requiredFields: if not field in record: print('missingField: {}. {}'.format(field, record)) return False return True def check_passports(): records = read_file(FILE_PATH) print('{} passports'.format(len(records))) count = 0 for rec in records: if is_passport_valid(rec): count += 1 return count def validate_byr(byr): if not byr.isnumeric() or len(byr) != 4: return False byr_num = int(byr) return byrNum >= 1920 and byrNum <= 2002 def validate_iyr(iyr): if not iyr.isnumeric() or len(iyr) != 4: return False iyr_num = int(iyr) return iyrNum >= 2010 and iyrNum <= 2020 def validate_eyr(eyr): if not eyr.isnumeric() or len(eyr) != 4: return False eyr_num = int(eyr) return eyrNum >= 2020 and eyrNum <= 2030 def validate_height(height): if len(height) < 4: return False if height[-2:] == 'cm': h_val = height[:-2] if hVal.isnumeric(): h_num = int(hVal) return hNum <= 193 and hNum >= 150 elif height[-2:] == 'in': h_val = height[:-2] if hVal.isnumeric(): h_num = int(hVal) return hNum <= 76 and hNum >= 59 return False def validate_hcl(hcl): if len(hcl) != 7 or hcl[0] != '#': return False valid_chars = '1234567890abcdef' for i in range(1, len(hcl)): if hcl[i] not in validChars: return False return True def validate_ecl(ecl): valid_clr = set(['amb', 'blu', 'brn', 'gry', 'grn', 'hzl', 'oth']) return ecl in validClr def validate_pid(pid): return len(pid) == 9 and pid.isnumeric() def validate_passport(passport): if not is_passport_valid(passport): return False if not validate_byr(passport['byr']): return False if not validate_iyr(passport['iyr']): return False if not validate_eyr(passport['eyr']): return False if not validate_height(passport['hgt']): return False if not validate_hcl(passport['hcl']): return False if not validate_ecl(passport['ecl']): return False if not validate_pid(passport['pid']): return False return True def validate_passports(): records = read_file(FILE_PATH) count = 0 for rec in records: if validate_passport(rec): count += 1 return count if __name__ == '__main__': print(validate_passports())
""" TASK1: Finding the Square Root of an Integer Find the square root of the integer without using any Python library. You have to find the floor value of the square root. For example if the given number is 16, then the answer would be 4. If the given number is 27, the answer would be 5 because sqrt(5) = 5.196 whose floor value is 5. The expected time complexity is O(log(n)) """ def sqrt_slower(number: int) -> int: """ Calculate the floored square root of a number Args: number(int): Number to find the floored squared root Returns: int: Floored Square Root """ print("->sqrt for number " + str(number)) if number == 0: return 0 if number is None or number < 0: return "Error" if number == 1: return 1 counter = 2 while True: square = counter * counter print("counter= " + str(counter) + ", square=" + str(square)) if square == number: return counter elif square < number: counter += 1 else: return counter - 1 # Newton's method def sqrt(number: int) -> int: if number == 0: return 0 if number is None or number < 0: return "Error" x = number y = (x + 1) // 2 while y < x: x = y y = (x + number // x) // 2 print("x=" + str(x) + ", y=" + str(y)) return x def test_sqrt(): print("case1----------------------------------------------------------------------------------") # case1 expected_result = 3 actual_result = sqrt(9) print("result= " + str(actual_result)) assert (expected_result == actual_result), "{}expected is {}, actual is {}".format("case1: ", expected_result, actual_result) print("\ncase2----------------------------------------------------------------------------------") # case2 expected_result = 0 actual_result = sqrt(0) print("result= " + str(actual_result)) assert (expected_result == actual_result), "{}expected is {}, actual is {}".format("case2: ", expected_result, actual_result) print("\ncase3----------------------------------------------------------------------------------") # case3 expected_result = 4 actual_result = sqrt(16) print("result= " + str(actual_result)) assert (expected_result == actual_result), "{}expected is {}, actual is {}".format("case3: ", expected_result, actual_result) print("\ncase4----------------------------------------------------------------------------------") # case4 expected_result = 1 actual_result = sqrt(1) print("result= " + str(actual_result)) assert (expected_result == actual_result), "{}expected is {}, actual is {}".format("case4: ", expected_result, actual_result) print("\ncase5----------------------------------------------------------------------------------") # case5 expected_result = 5 actual_result = sqrt(27) print("result= " + str(actual_result)) assert (expected_result == actual_result), "{}expected is {}, actual is {}".format("case5: ", expected_result, actual_result) print("\ncase6----------------------------------------------------------------------------------") # case6 expected_result = 5 actual_result = sqrt(34) print("result= " + str(actual_result)) assert (expected_result == actual_result), "{}expected is {}, actual is {}".format("case6: ", expected_result, actual_result) print("\ncase7----------------------------------------------------------------------------------") # case6 expected_result = 32 actual_result = sqrt(1024) print("result= " + str(actual_result)) assert (expected_result == actual_result), "{}expected is {}, actual is {}".format("case7: ", expected_result, actual_result) print("\ncase8----------------------------------------------------------------------------------") # case7 expected_result = "Error" actual_result = sqrt(None) print("result= " + str(actual_result)) assert (expected_result == actual_result), "{}expected is {}, actual is {}".format("case8: ", expected_result, actual_result) print("\ncase9----------------------------------------------------------------------------------") # case8 expected_result = "Error" actual_result = sqrt(-16) print("result= " + str(actual_result)) assert (expected_result == actual_result), "{}expected is {}, actual is {}".format("case9: ", expected_result, actual_result) test_sqrt()
""" TASK1: Finding the Square Root of an Integer Find the square root of the integer without using any Python library. You have to find the floor value of the square root. For example if the given number is 16, then the answer would be 4. If the given number is 27, the answer would be 5 because sqrt(5) = 5.196 whose floor value is 5. The expected time complexity is O(log(n)) """ def sqrt_slower(number: int) -> int: """ Calculate the floored square root of a number Args: number(int): Number to find the floored squared root Returns: int: Floored Square Root """ print('->sqrt for number ' + str(number)) if number == 0: return 0 if number is None or number < 0: return 'Error' if number == 1: return 1 counter = 2 while True: square = counter * counter print('counter= ' + str(counter) + ', square=' + str(square)) if square == number: return counter elif square < number: counter += 1 else: return counter - 1 def sqrt(number: int) -> int: if number == 0: return 0 if number is None or number < 0: return 'Error' x = number y = (x + 1) // 2 while y < x: x = y y = (x + number // x) // 2 print('x=' + str(x) + ', y=' + str(y)) return x def test_sqrt(): print('case1----------------------------------------------------------------------------------') expected_result = 3 actual_result = sqrt(9) print('result= ' + str(actual_result)) assert expected_result == actual_result, '{}expected is {}, actual is {}'.format('case1: ', expected_result, actual_result) print('\ncase2----------------------------------------------------------------------------------') expected_result = 0 actual_result = sqrt(0) print('result= ' + str(actual_result)) assert expected_result == actual_result, '{}expected is {}, actual is {}'.format('case2: ', expected_result, actual_result) print('\ncase3----------------------------------------------------------------------------------') expected_result = 4 actual_result = sqrt(16) print('result= ' + str(actual_result)) assert expected_result == actual_result, '{}expected is {}, actual is {}'.format('case3: ', expected_result, actual_result) print('\ncase4----------------------------------------------------------------------------------') expected_result = 1 actual_result = sqrt(1) print('result= ' + str(actual_result)) assert expected_result == actual_result, '{}expected is {}, actual is {}'.format('case4: ', expected_result, actual_result) print('\ncase5----------------------------------------------------------------------------------') expected_result = 5 actual_result = sqrt(27) print('result= ' + str(actual_result)) assert expected_result == actual_result, '{}expected is {}, actual is {}'.format('case5: ', expected_result, actual_result) print('\ncase6----------------------------------------------------------------------------------') expected_result = 5 actual_result = sqrt(34) print('result= ' + str(actual_result)) assert expected_result == actual_result, '{}expected is {}, actual is {}'.format('case6: ', expected_result, actual_result) print('\ncase7----------------------------------------------------------------------------------') expected_result = 32 actual_result = sqrt(1024) print('result= ' + str(actual_result)) assert expected_result == actual_result, '{}expected is {}, actual is {}'.format('case7: ', expected_result, actual_result) print('\ncase8----------------------------------------------------------------------------------') expected_result = 'Error' actual_result = sqrt(None) print('result= ' + str(actual_result)) assert expected_result == actual_result, '{}expected is {}, actual is {}'.format('case8: ', expected_result, actual_result) print('\ncase9----------------------------------------------------------------------------------') expected_result = 'Error' actual_result = sqrt(-16) print('result= ' + str(actual_result)) assert expected_result == actual_result, '{}expected is {}, actual is {}'.format('case9: ', expected_result, actual_result) test_sqrt()
print(60*'=') print(' CAIXA ELETRONICO ') print(60*'=') total = int(input('Que valor deseja sacar ? R$ ')) cedula = 50 totalced = 0 while True: if total >= cedula: total-=cedula totalced+=1 print(total) else: if totalced>0: print(f'Total de {totalced} cedulas de R$ {cedula}') if cedula==50: cedula = 20 elif cedula==20: cedula = 10 elif cedula==10: cedula=1 totalced= 0 if total == 0: break print(60*'=') print(' VOLTE SEMPRE ')
print(60 * '=') print(' CAIXA ELETRONICO ') print(60 * '=') total = int(input('Que valor deseja sacar ? R$ ')) cedula = 50 totalced = 0 while True: if total >= cedula: total -= cedula totalced += 1 print(total) else: if totalced > 0: print(f'Total de {totalced} cedulas de R$ {cedula}') if cedula == 50: cedula = 20 elif cedula == 20: cedula = 10 elif cedula == 10: cedula = 1 totalced = 0 if total == 0: break print(60 * '=') print(' VOLTE SEMPRE ')
class BaseRandomizer(): def __init__(self, projectName=None, seed=None, programMode=True) -> None: self.seed = seed if programMode: self.inputPath = f'projects/{projectName}/tmp/text/' else: self.inputPath = f'projects/{projectName}/text/'
class Baserandomizer: def __init__(self, projectName=None, seed=None, programMode=True) -> None: self.seed = seed if programMode: self.inputPath = f'projects/{projectName}/tmp/text/' else: self.inputPath = f'projects/{projectName}/text/'
""" This constant file was automatically generated by a quick script I wrote for the enum part and h2py for the constant part. It aims to help out remembering the enum values and constants, but probably as bugs... please refer to the actual documentation for the correct values (if something is not working) and feel free to correct the values and submit a pull request. """ class EWfmItem: WfmItem_MARKER1 = 1 WfmItem_MARKER2 = 2 WfmItem_DIGITAL0 = 3 WfmItem_DIGITAL1 = 4 WfmItem_DIGITAL2 = 5 WfmItem_DIGITAL3 = 6 WfmItem_DIGITAL4 = 7 WfmItem_DIGITAL5 = 8 WfmItem_DIGITAL6 = 9 WfmItem_DIGITAL7 = 10 WfmItem_DIGITAL8 = 11 WfmItem_DIGITAL9 = 12 WfmItem_DIGITAL10 = 13 WfmItem_DIGITAL11 = 14 WfmItem_DIGITAL12 = 15 WfmItem_DIGITAL13 = 16 class ELogicSignature: LogicSignature_0 = 1 LogicSignature_1 = 2 LogicSignature_MIX = 3 class EStdWfmType: StdWfmType_SQUARE = 1 StdWfmType_TRIANGLE = 2 StdWfmType_RAMP = 3 StdWfmType_GAUSSIAN_NOISE = 4 class EPointDrawInterpolationType: PointDrawInterpolationType_SPLINE = 1 class EDualWfmMathType: MATH_SUB = 1 class EMarkDensity: MARK_DENSITY_1_4 = 1 MARK_DENSITY_1_2 = 2 MARK_DENSITY_3_4 = 3 class ESeqItem: ITEM_WFM_CH2 = 1 ITEM_WFM_CH3 = 2 ITEM_WFM_CH4 = 3 ITEM_WAIT = 4 ITEM_LOOP = 5 ITEM_JUMP_TO = 6 ITEM_GO_TO = 7 class ECalDiagType: CAL_DIAG_TYPE_YIG = 1 CAL_DIAG_TYPE_CLOCKPOWER = 2 CAL_DIAG_TYPE_IPC = 3 CAL_DIAG_TYPE_DTIMING = 4 CAL_DIAG_TYPE_ANALOG = 5 CAL_DIAG_TYPE_MARKER = 6 CAL_DIAG_TYPE_MDELAY = 7 CAL_DIAG_TYPE_MCPOINT = 8 CAL_DIAG_TYPE_SKEW = 9 class EUserCalDiagType: USER_CALDIAG_TYPE_D_AWG_REGISTER = 1 USER_CALDIAG_TYPE_D_AWG_WFM_MEM_CH1 = 2 USER_CALDIAG_TYPE_D_AWG_WFM_MEM_CH2 = 3 USER_CALDIAG_TYPE_D_AWG_WFM_MEM_CH3 = 4 USER_CALDIAG_TYPE_D_AWG_WFM_MEM_CH4 = 5 USER_CALDIAG_TYPE_D_CLOCK_REGISTER = 6 USER_CALDIAG_TYPE_D_CLOCK_PLL = 7 USER_CALDIAG_TYPE_D_OUTPUT1_REGISTER = 8 USER_CALDIAG_TYPE_D_OUTPUT1_ANALOG = 9 USER_CALDIAG_TYPE_D_OUTPUT1_MARKER = 10 USER_CALDIAG_TYPE_D_OUTPUT2_REGISTER = 11 USER_CALDIAG_TYPE_D_OUTPUT2_ANALOG = 12 USER_CALDIAG_TYPE_D_OUTPUT2_MARKER = 13 USER_CALDIAG_TYPE_C_LEVEL_ANALOG_CH1 = 14 USER_CALDIAG_TYPE_C_LEVEL_MARKER_CH1 = 15 USER_CALDIAG_TYPE_C_LEVEL_ANALOG_CH2 = 16 USER_CALDIAG_TYPE_C_LEVEL_MARKER_CH2 = 17 USER_CALDIAG_TYPE_C_LEVEL_ANALOG_CH3 = 18 USER_CALDIAG_TYPE_C_LEVEL_MARKER_CH3 = 19 USER_CALDIAG_TYPE_C_LEVEL_ANALOG_CH4 = 20 USER_CALDIAG_TYPE_C_LEVEL_MARKER_CH4 = 21 USER_CALDIAG_TYPE_D_AWG_DTIMING = 22 USER_CALDIAG_TYPE_D_OUTPUT1_ANALOG_2 = 23 class EClockSource: ClockSource_INTERNAL = 1 ClockSource_EXTERNAL = 2 class EReferenceSource: ReferenceSource_INTERNAL = 1 ReferenceSource_EXTERNAL = 2 class EReferenceType: ReferenceType_Fixed = 1 ReferenceType_Variable = 2 class EReferenceClockFrequency: ReferenceClockFrequency_10MHZ = 1 ReferenceClockFrequency_20MHZ = 2 ReferenceClockFrequency_100MHZ = 3 class EDividerRate: DividerRate_DIV_1 = 1 DividerRate_DIV_2 = 2 DividerRate_DIV_4 = 3 DividerRate_DIV_8 = 4 DividerRate_DIV_16 = 5 DividerRate_DIV_32 = 6 DividerRate_DIV_64 = 7 DividerRate_DIV_128 = 8 DividerRate_DIV_256 = 9 class EExternalReferenceStatus: ExternalReferenceStatus_INPUT_OK = 1 ExternalReferenceStatus_INPUT_MISSING = 2 ExternalReferenceStatus_INPUT_OUT_OF_RANGE = 3 ExternalReferenceStatus_INPUT_UNSTABLE = 4 class EExternalOscillatorStatus: ExternalOscillatorStatus_INPUT_OK = 1 ExternalOscillatorStatus_INPUT_MISSING = 2 ExternalOscillatorStatus_INPUT_OUT_OF_RANGE = 3 ExternalOscillatorStatus_INPUT_UNSTABLE = 4 class ETriggerSource: TriggerSource_EXTERNAL = 1 TriggerSource_INTERNAL = 2 class ETriggerInputImpedance: TriggerInputImpedance_50OHM = 1 TriggerInputImpedance_1KOHM = 2 class ETriggerInputSlope: TriggerInputSlope_POL_POSITIVE = 1 TriggerInputSlope_POL_NEGATIVE = 2 class ETriggerInputPolarity: TriggerInputPolarity_POSITIVE = 1 TriggerInputPolarity_NEGATIVE = 2 class EEventInputImpedance: EventInputImpedance_50OHM = 1 EventInputImpedance_1KOHM = 2 class EEventInputPolarity: EventInputPolarity_POSITIVE = 1 EventInputPolarity_NEGATIVE = 2 class EJumpTiming: JumpTiming_SYNC = 1 JumpTiming_ASYNC = 2 class EAnalogInputMethod: AnalogInputMethod_IMAmplitudeOffset = 1 AnalogInputMethod_IMHighLow = 2 class EDacResolution: DacResolution_8 = 1 DacResolution_10 = 2 DacResolution_14 = 3 class EFilter: Filter_20MHZ = 1 Filter_50MHZ = 2 Filter_100MHZ = 3 Filter_200MHZ = 4 Filter_THROUGH = 5 class EMarkerInputMethod: MarkerInputMethod_IMAmplitudeOffset = 1 MarkerInputMethod_IMHighLow = 2 class EChannelCoupling: ChannelCoupling_Off = 1 ChannelCoupling_Pair = 2 ChannelCoupling_All = 3 class ERunMode: RunMode_CONTINUOUS = 1 RunMode_TRIGGERED = 2 RunMode_GATED = 3 RunMode_SEQUENCE = 4 class ESequencerState: SequencerState_STOPPED = 1 SequencerState_RUNNING = 2 SequencerState_WAIT_FOR_TRIGGER = 3 class ESequencerType: SequencerType_HW = 1 SequencerType_SW = 2 class EWaitValue: WaitValue_FIRST = 1 WaitValue_LAST = 2 class EJumpTargetType: JumpTargetType_OFF = 1 JumpTargetType_INDEX = 2 JumpTargetType_NEXT = 3 class EWaveformType: WaveformType_INTEGER = 1 WaveformType_REAL = 2 class EWfmDacResolution: WfmDacResolution_8 = 1 WfmDacResolution_10 = 2 WfmDacResolution_14 = 3 class EOverHeat: OverHeat_None = 1 OverHeat_Message = 2 OverHeat_Shutdown = 3 class EDigitalInputMethod: DigitalInputMethod_IMAmplitudeOffset = 1 DigitalInputMethod_IMHighLow = 2 class ENormalizationType: NormalizationType_NONE = 1 NormalizationType_FULL_SCALE = 2 NormalizationType_ZERO_REFERENCE = 3 class EPhaseDelayInputMethod: PhaseDelayInputMethod_PHASE = 1 PhaseDelayInputMethod_DELAY_IN_TIME = 2 class ETeklinkConnectionType: TeklinkConnectionType_NONE = 1 TeklinkConnectionType_HUB = 2 TeklinkConnectionType_INSTRUMENT = 3 class ETeklinkCpState: TeklinkCpState_NOTRUNNING = 1 TeklinkCpState_RUNNING = 2 TeklinkCpState_PENDING = 3 class EJumpMode: JumpMode_EVENT = 1 JumpMode_TABLE = 2 class Commands: CS_MAGIC = "MAGIC" CS_VERSION = "VERSION" CS_SAMPLING_RATE = "SAMPLING_RATE" CS_REPETITION_RATE = "REPETITION_RATE" CS_HOLD_REPETITION_RATE = "HOLD_REPETITION_RATE" CS_CLOCK_SOURCE = "CLOCK_SOURCE" CS_REFERENCE_SOURCE = "REFERENCE_SOURCE" CS_EXTERNAL_REFERENCE_TYPE = "EXTERNAL_REFERENCE_TYPE" CS_REFERENCE_CLOCK_FREQUENCY_SELECTION = "REFERENCE_CLOCK_FREQUENCY_SELECTION" CS_REFERENCE_MULTIPLIER_RATE = "REFERENCE_MULTIPLIER_RATE" CS_DIVIDER_RATE = "DIVIDER_RATE" CS_TRIGGER_SOURCE = "TRIGGER_SOURCE" CS_INTERNAL_TRIGGER_RATE = "INTERNAL_TRIGGER_RATE" CS_TRIGGER_INPUT_IMPEDANCE = "TRIGGER_INPUT_IMPEDANCE" CS_TRIGGER_INPUT_SLOPE = "TRIGGER_INPUT_SLOPE" CS_TRIGGER_INPUT_POLARITY = "TRIGGER_INPUT_POLARITY" CS_TRIGGER_INPUT_THRESHOLD = "TRIGGER_INPUT_THRESHOLD" CS_EVENT_INPUT_IMPEDANCE = "EVENT_INPUT_IMPEDANCE" CS_EVENT_INPUT_POLARITY = "EVENT_INPUT_POLARITY" CS_EVENT_INPUT_THRESHOLD = "EVENT_INPUT_THRESHOLD" CS_JUMP_TIMING = "JUMP_TIMING" CS_INTERLEAVE = "INTERLEAVE" CS_ZEROING = "ZEROING" CS_INTERLEAVE_ADJ_PHASE = "INTERLEAVE_ADJ_PHASE" CS_INTERLEAVE_ADJ_AMPLITUDE = "INTERLEAVE_ADJ_AMPLITUDE" CS_COUPLING = "COUPLING" CS_RUN_MODE = "RUN_MODE" CS_WAIT_VALUE = "WAIT_VALUE" CS_RUN_STATE = "RUN_STATE" CS_EVENT_JUMP_MODE = "EVENT_JUMP_MODE" CS_TABLE_JUMP_STROBE = "TABLE_JUMP_STROBE" CS_TABLE_JUMP_DEFINITION = "TABLE_JUMP_DEFINITION" CS_DAC_RESOLUTION_1 = "DAC_RESOLUTION_1" CS_OUTPUT_WAVEFORM_NAME_1 = "OUTPUT_WAVEFORM_NAME_1" CS_CHANNEL_STATE_1 = "CHANNEL_STATE_1" CS_ANALOG_DIRECT_OUTPUT_1 = "ANALOG_DIRECT_OUTPUT_1" CS_ANALOG_FILTER_1 = "ANALOG_FILTER_1" CS_ANALOG_METHOD_1 = "ANALOG_METHOD_1" CS_ANALOG_AMPLITUDE_1 = "ANALOG_AMPLITUDE_1" CS_ANALOG_OFFSET_1 = "ANALOG_OFFSET_1" CS_ANALOG_HIGH_1 = "ANALOG_HIGH_1" CS_ANALOG_LOW_1 = "ANALOG_LOW_1" CS_MARKER1_SKEW_1 = "MARKER1_SKEW_1" CS_MARKER1_METHOD_1 = "MARKER1_METHOD_1" CS_MARKER1_AMPLITUDE_1 = "MARKER1_AMPLITUDE_1" CS_MARKER1_OFFSET_1 = "MARKER1_OFFSET_1" CS_MARKER1_HIGH_1 = "MARKER1_HIGH_1" CS_MARKER1_LOW_1 = "MARKER1_LOW_1" CS_MARKER2_SKEW_1 = "MARKER2_SKEW_1" CS_MARKER2_METHOD_1 = "MARKER2_METHOD_1" CS_MARKER2_AMPLITUDE_1 = "MARKER2_AMPLITUDE_1" CS_MARKER2_OFFSET_1 = "MARKER2_OFFSET_1" CS_MARKER2_HIGH_1 = "MARKER2_HIGH_1" CS_MARKER2_LOW_1 = "MARKER2_LOW_1" CS_DIGITAL_METHOD_1 = "DIGITAL_METHOD_1" CS_DIGITAL_AMPLITUDE_1 = "DIGITAL_AMPLITUDE_1" CS_DIGITAL_OFFSET_1 = "DIGITAL_OFFSET_1" CS_DIGITAL_HIGH_1 = "DIGITAL_HIGH_1" CS_DIGITAL_LOW_1 = "DIGITAL_LOW_1" CS_EXTERNAL_ADD_1 = "EXTERNAL_ADD_1" CS_PHASE_DELAY_INPUT_METHOD_1 = "PHASE_DELAY_INPUT_METHOD_1" CS_PHASE_1 = "PHASE_1" CS_DELAY_IN_TIME_1 = "DELAY_IN_TIME_1" CS_DELAY_IN_POINTS_1 = "DELAY_IN_POINTS_1" CS_CHANNEL_SKEW_1 = "CHANNEL_SKEW_1" CS_DAC_RESOLUTION_2 = "DAC_RESOLUTION_2" CS_OUTPUT_WAVEFORM_NAME_2 = "OUTPUT_WAVEFORM_NAME_2" CS_CHANNEL_STATE_2 = "CHANNEL_STATE_2" CS_ANALOG_DIRECT_OUTPUT_2 = "ANALOG_DIRECT_OUTPUT_2" CS_ANALOG_FILTER_2 = "ANALOG_FILTER_2" CS_ANALOG_METHOD_2 = "ANALOG_METHOD_2" CS_ANALOG_AMPLITUDE_2 = "ANALOG_AMPLITUDE_2" CS_ANALOG_OFFSET_2 = "ANALOG_OFFSET_2" CS_ANALOG_HIGH_2 = "ANALOG_HIGH_2" CS_ANALOG_LOW_2 = "ANALOG_LOW_2" CS_MARKER1_SKEW_2 = "MARKER1_SKEW_2" CS_MARKER1_METHOD_2 = "MARKER1_METHOD_2" CS_MARKER1_AMPLITUDE_2 = "MARKER1_AMPLITUDE_2" CS_MARKER1_OFFSET_2 = "MARKER1_OFFSET_2" CS_MARKER1_HIGH_2 = "MARKER1_HIGH_2" CS_MARKER1_LOW_2 = "MARKER1_LOW_2" CS_MARKER2_SKEW_2 = "MARKER2_SKEW_2" CS_MARKER2_METHOD_2 = "MARKER2_METHOD_2" CS_MARKER2_AMPLITUDE_2 = "MARKER2_AMPLITUDE_2" CS_MARKER2_OFFSET_2 = "MARKER2_OFFSET_2" CS_MARKER2_HIGH_2 = "MARKER2_HIGH_2" CS_MARKER2_LOW_2 = "MARKER2_LOW_2" CS_DIGITAL_METHOD_2 = "DIGITAL_METHOD_2" CS_DIGITAL_AMPLITUDE_2 = "DIGITAL_AMPLITUDE_2" CS_DIGITAL_OFFSET_2 = "DIGITAL_OFFSET_2" CS_DIGITAL_HIGH_2 = "DIGITAL_HIGH_2" CS_DIGITAL_LOW_2 = "DIGITAL_LOW_2" CS_EXTERNAL_ADD_2 = "EXTERNAL_ADD_2" CS_PHASE_DELAY_INPUT_METHOD_2 = "PHASE_DELAY_INPUT_METHOD_2" CS_PHASE_2 = "PHASE_2" CS_DELAY_IN_TIME_2 = "DELAY_IN_TIME_2" CS_DELAY_IN_POINTS_2 = "DELAY_IN_POINTS_2" CS_CHANNEL_SKEW_2 = "CHANNEL_SKEW_2" CS_DAC_RESOLUTION_3 = "DAC_RESOLUTION_3" CS_OUTPUT_WAVEFORM_NAME_3 = "OUTPUT_WAVEFORM_NAME_3" CS_CHANNEL_STATE_3 = "CHANNEL_STATE_3" CS_ANALOG_DIRECT_OUTPUT_3 = "ANALOG_DIRECT_OUTPUT_3" CS_ANALOG_FILTER_3 = "ANALOG_FILTER_3" CS_ANALOG_METHOD_3 = "ANALOG_METHOD_3" CS_ANALOG_AMPLITUDE_3 = "ANALOG_AMPLITUDE_3" CS_ANALOG_OFFSET_3 = "ANALOG_OFFSET_3" CS_ANALOG_HIGH_3 = "ANALOG_HIGH_3" CS_ANALOG_LOW_3 = "ANALOG_LOW_3" CS_MARKER1_SKEW_3 = "MARKER1_SKEW_3" CS_MARKER1_METHOD_3 = "MARKER1_METHOD_3" CS_MARKER1_AMPLITUDE_3 = "MARKER1_AMPLITUDE_3" CS_MARKER1_OFFSET_3 = "MARKER1_OFFSET_3" CS_MARKER1_HIGH_3 = "MARKER1_HIGH_3" CS_MARKER1_LOW_3 = "MARKER1_LOW_3" CS_MARKER2_SKEW_3 = "MARKER2_SKEW_3" CS_MARKER2_METHOD_3 = "MARKER2_METHOD_3" CS_MARKER2_AMPLITUDE_3 = "MARKER2_AMPLITUDE_3" CS_MARKER2_OFFSET_3 = "MARKER2_OFFSET_3" CS_MARKER2_HIGH_3 = "MARKER2_HIGH_3" CS_MARKER2_LOW_3 = "MARKER2_LOW_3" CS_DIGITAL_METHOD_3 = "DIGITAL_METHOD_3" CS_DIGITAL_AMPLITUDE_3 = "DIGITAL_AMPLITUDE_3" CS_DIGITAL_OFFSET_3 = "DIGITAL_OFFSET_3" CS_DIGITAL_HIGH_3 = "DIGITAL_HIGH_3" CS_DIGITAL_LOW_3 = "DIGITAL_LOW_3" CS_EXTERNAL_ADD_3 = "EXTERNAL_ADD_3" CS_PHASE_DELAY_INPUT_METHOD_3 = "PHASE_DELAY_INPUT_METHOD_3" CS_PHASE_3 = "PHASE_3" CS_DELAY_IN_TIME_3 = "DELAY_IN_TIME_3" CS_DELAY_IN_POINTS_3 = "DELAY_IN_POINTS_3" CS_CHANNEL_SKEW_3 = "CHANNEL_SKEW_3" CS_DAC_RESOLUTION_4 = "DAC_RESOLUTION_4" CS_OUTPUT_WAVEFORM_NAME_4 = "OUTPUT_WAVEFORM_NAME_4" CS_CHANNEL_STATE_4 = "CHANNEL_STATE_4" CS_ANALOG_DIRECT_OUTPUT_4 = "ANALOG_DIRECT_OUTPUT_4" CS_ANALOG_FILTER_4 = "ANALOG_FILTER_4" CS_ANALOG_METHOD_4 = "ANALOG_METHOD_4" CS_ANALOG_AMPLITUDE_4 = "ANALOG_AMPLITUDE_4" CS_ANALOG_OFFSET_4 = "ANALOG_OFFSET_4" CS_ANALOG_HIGH_4 = "ANALOG_HIGH_4" CS_ANALOG_LOW_4 = "ANALOG_LOW_4" CS_MARKER1_SKEW_4 = "MARKER1_SKEW_4" CS_MARKER1_METHOD_4 = "MARKER1_METHOD_4" CS_MARKER1_AMPLITUDE_4 = "MARKER1_AMPLITUDE_4" CS_MARKER1_OFFSET_4 = "MARKER1_OFFSET_4" CS_MARKER1_HIGH_4 = "MARKER1_HIGH_4" CS_MARKER1_LOW_4 = "MARKER1_LOW_4" CS_MARKER2_SKEW_4 = "MARKER2_SKEW_4" CS_MARKER2_METHOD_4 = "MARKER2_METHOD_4" CS_MARKER2_AMPLITUDE_4 = "MARKER2_AMPLITUDE_4" CS_MARKER2_OFFSET_4 = "MARKER2_OFFSET_4" CS_MARKER2_HIGH_4 = "MARKER2_HIGH_4" CS_MARKER2_LOW_4 = "MARKER2_LOW_4" CS_DIGITAL_METHOD_4 = "DIGITAL_METHOD_4" CS_DIGITAL_AMPLITUDE_4 = "DIGITAL_AMPLITUDE_4" CS_DIGITAL_OFFSET_4 = "DIGITAL_OFFSET_4" CS_DIGITAL_HIGH_4 = "DIGITAL_HIGH_4" CS_DIGITAL_LOW_4 = "DIGITAL_LOW_4" CS_EXTERNAL_ADD_4 = "EXTERNAL_ADD_4" CS_PHASE_DELAY_INPUT_METHOD_4 = "PHASE_DELAY_INPUT_METHOD_4" CS_PHASE_4 = "PHASE_4" CS_DELAY_IN_TIME_4 = "DELAY_IN_TIME_4" CS_DELAY_IN_POINTS_4 = "DELAY_IN_POINTS_4" CS_CHANNEL_SKEW_4 = "CHANNEL_SKEW_4" CS_DAC_RESOLUTION_N = "DAC_RESOLUTION_" CS_OUTPUT_WAVEFORM_NAME_N = "OUTPUT_WAVEFORM_NAME_" CS_CHANNEL_STATE_N = "CHANNEL_STATE_" CS_ANALOG_DIRECT_OUTPUT_N = "ANALOG_DIRECT_OUTPUT_" CS_ANALOG_FILTER_N = "ANALOG_FILTER_" CS_ANALOG_METHOD_N = "ANALOG_METHOD_" CS_ANALOG_AMPLITUDE_N = "ANALOG_AMPLITUDE_" CS_ANALOG_OFFSET_N = "ANALOG_OFFSET_" CS_ANALOG_HIGH_N = "ANALOG_HIGH_" CS_ANALOG_LOW_N = "ANALOG_LOW_" CS_MARKER1_SKEW_N = "MARKER1_SKEW_" CS_MARKER1_METHOD_N = "MARKER1_METHOD_" CS_MARKER1_AMPLITUDE_N = "MARKER1_AMPLITUDE_" CS_MARKER1_OFFSET_N = "MARKER1_OFFSET_" CS_MARKER1_HIGH_N = "MARKER1_HIGH_" CS_MARKER1_LOW_N = "MARKER1_LOW_" CS_MARKER2_SKEW_N = "MARKER2_SKEW_" CS_MARKER2_METHOD_N = "MARKER2_METHOD_" CS_MARKER2_AMPLITUDE_N = "MARKER2_AMPLITUDE_" CS_MARKER2_OFFSET_N = "MARKER2_OFFSET_" CS_MARKER2_HIGH_N = "MARKER2_HIGH_" CS_MARKER2_LOW_N = "MARKER2_LOW_" CS_DIGITAL_METHOD_N = "DIGITAL_METHOD_" CS_DIGITAL_AMPLITUDE_N = "DIGITAL_AMPLITUDE_" CS_DIGITAL_OFFSET_N = "DIGITAL_OFFSET_" CS_DIGITAL_HIGH_N = "DIGITAL_HIGH_" CS_DIGITAL_LOW_N = "DIGITAL_LOW_" CS_EXTERNAL_ADD_N = "EXTERNAL_ADD_" CS_PHASE_DELAY_INPUT_METHOD_N = "PHASE_DELAY_INPUT_METHOD_" CS_PHASE_N = "PHASE_" CS_DELAY_IN_TIME_N = "DELAY_IN_TIME_" CS_DELAY_IN_POINTS_N = "DELAY_IN_POINTS_" CS_CHANNEL_SKEW_N = "CHANNEL_SKEW_" CS_DC_OUTPUT_LEVEL_1 = "DC_OUTPUT_LEVEL_1" CS_DC_OUTPUT_LEVEL_2 = "DC_OUTPUT_LEVEL_2" CS_DC_OUTPUT_LEVEL_3 = "DC_OUTPUT_LEVEL_3" CS_DC_OUTPUT_LEVEL_4 = "DC_OUTPUT_LEVEL_4" CS_DC_OUTPUT_LEVEL_N = "DC_OUTPUT_LEVEL_" CS_WAVEFORM_NAME_N = "WAVEFORM_NAME_" CS_WAVEFORM_TYPE_N = "WAVEFORM_TYPE_" CS_WAVEFORM_LENGTH_N = "WAVEFORM_LENGTH_" CS_WAVEFORM_TIMESTAMP_N = "WAVEFORM_TIMESTAMP_" CS_WAVEFORM_DATA_N = "WAVEFORM_DATA_" CS_SEQUENCE_WAIT_N = "SEQUENCE_WAIT_" CS_SEQUENCE_LOOP_N = "SEQUENCE_LOOP_" CS_SEQUENCE_JUMP_N = "SEQUENCE_JUMP_" CS_SEQUENCE_GOTO_N = "SEQUENCE_GOTO_" CS_SEQUENCE_WAVEFORM_NAME_CH_N_N = "SEQUENCE_WAVEFORM_NAME_CH_" CS_SEQUENCE_IS_SUBSEQUENCE_N = "SEQUENCE_IS_SUBSEQ_" CS_SEQUENCE_SUBSEQUENCE_NAME_N = "SEQUENCE_SUBSEQ_NAME_" CS_SUBSEQUENCE_NAME_O = "SUBSEQ_NAME_" CS_SUBSEQUENCE_TIMESTAMP_O = "SUBSEQ_TIMESTAMP_" CS_SUBSEQUENCE_LENGTH_O = "SUBSEQ_LENGTH_" CS_SUBSEQUENCE_LOOP_E_O_U = "SUBSEQ_LOOP_" CS_SUBSEQUENCE_WAVEFORM_NAME_CH_X_E_O_U = "SUBSEQ_WAVEFORM_NAME_CH_" SEQUENCE_JUMP_TARGET_TYPE_OFF = 0 SEQUENCE_JUMP_TARGET_TYPE_NEXT = -1
""" This constant file was automatically generated by a quick script I wrote for the enum part and h2py for the constant part. It aims to help out remembering the enum values and constants, but probably as bugs... please refer to the actual documentation for the correct values (if something is not working) and feel free to correct the values and submit a pull request. """ class Ewfmitem: wfm_item_marker1 = 1 wfm_item_marker2 = 2 wfm_item_digital0 = 3 wfm_item_digital1 = 4 wfm_item_digital2 = 5 wfm_item_digital3 = 6 wfm_item_digital4 = 7 wfm_item_digital5 = 8 wfm_item_digital6 = 9 wfm_item_digital7 = 10 wfm_item_digital8 = 11 wfm_item_digital9 = 12 wfm_item_digital10 = 13 wfm_item_digital11 = 14 wfm_item_digital12 = 15 wfm_item_digital13 = 16 class Elogicsignature: logic_signature_0 = 1 logic_signature_1 = 2 logic_signature_mix = 3 class Estdwfmtype: std_wfm_type_square = 1 std_wfm_type_triangle = 2 std_wfm_type_ramp = 3 std_wfm_type_gaussian_noise = 4 class Epointdrawinterpolationtype: point_draw_interpolation_type_spline = 1 class Edualwfmmathtype: math_sub = 1 class Emarkdensity: mark_density_1_4 = 1 mark_density_1_2 = 2 mark_density_3_4 = 3 class Eseqitem: item_wfm_ch2 = 1 item_wfm_ch3 = 2 item_wfm_ch4 = 3 item_wait = 4 item_loop = 5 item_jump_to = 6 item_go_to = 7 class Ecaldiagtype: cal_diag_type_yig = 1 cal_diag_type_clockpower = 2 cal_diag_type_ipc = 3 cal_diag_type_dtiming = 4 cal_diag_type_analog = 5 cal_diag_type_marker = 6 cal_diag_type_mdelay = 7 cal_diag_type_mcpoint = 8 cal_diag_type_skew = 9 class Eusercaldiagtype: user_caldiag_type_d_awg_register = 1 user_caldiag_type_d_awg_wfm_mem_ch1 = 2 user_caldiag_type_d_awg_wfm_mem_ch2 = 3 user_caldiag_type_d_awg_wfm_mem_ch3 = 4 user_caldiag_type_d_awg_wfm_mem_ch4 = 5 user_caldiag_type_d_clock_register = 6 user_caldiag_type_d_clock_pll = 7 user_caldiag_type_d_output1_register = 8 user_caldiag_type_d_output1_analog = 9 user_caldiag_type_d_output1_marker = 10 user_caldiag_type_d_output2_register = 11 user_caldiag_type_d_output2_analog = 12 user_caldiag_type_d_output2_marker = 13 user_caldiag_type_c_level_analog_ch1 = 14 user_caldiag_type_c_level_marker_ch1 = 15 user_caldiag_type_c_level_analog_ch2 = 16 user_caldiag_type_c_level_marker_ch2 = 17 user_caldiag_type_c_level_analog_ch3 = 18 user_caldiag_type_c_level_marker_ch3 = 19 user_caldiag_type_c_level_analog_ch4 = 20 user_caldiag_type_c_level_marker_ch4 = 21 user_caldiag_type_d_awg_dtiming = 22 user_caldiag_type_d_output1_analog_2 = 23 class Eclocksource: clock_source_internal = 1 clock_source_external = 2 class Ereferencesource: reference_source_internal = 1 reference_source_external = 2 class Ereferencetype: reference_type__fixed = 1 reference_type__variable = 2 class Ereferenceclockfrequency: reference_clock_frequency_10_mhz = 1 reference_clock_frequency_20_mhz = 2 reference_clock_frequency_100_mhz = 3 class Edividerrate: divider_rate_div_1 = 1 divider_rate_div_2 = 2 divider_rate_div_4 = 3 divider_rate_div_8 = 4 divider_rate_div_16 = 5 divider_rate_div_32 = 6 divider_rate_div_64 = 7 divider_rate_div_128 = 8 divider_rate_div_256 = 9 class Eexternalreferencestatus: external_reference_status_input_ok = 1 external_reference_status_input_missing = 2 external_reference_status_input_out_of_range = 3 external_reference_status_input_unstable = 4 class Eexternaloscillatorstatus: external_oscillator_status_input_ok = 1 external_oscillator_status_input_missing = 2 external_oscillator_status_input_out_of_range = 3 external_oscillator_status_input_unstable = 4 class Etriggersource: trigger_source_external = 1 trigger_source_internal = 2 class Etriggerinputimpedance: trigger_input_impedance_50_ohm = 1 trigger_input_impedance_1_kohm = 2 class Etriggerinputslope: trigger_input_slope_pol_positive = 1 trigger_input_slope_pol_negative = 2 class Etriggerinputpolarity: trigger_input_polarity_positive = 1 trigger_input_polarity_negative = 2 class Eeventinputimpedance: event_input_impedance_50_ohm = 1 event_input_impedance_1_kohm = 2 class Eeventinputpolarity: event_input_polarity_positive = 1 event_input_polarity_negative = 2 class Ejumptiming: jump_timing_sync = 1 jump_timing_async = 2 class Eanaloginputmethod: analog_input_method_im_amplitude_offset = 1 analog_input_method_im_high_low = 2 class Edacresolution: dac_resolution_8 = 1 dac_resolution_10 = 2 dac_resolution_14 = 3 class Efilter: filter_20_mhz = 1 filter_50_mhz = 2 filter_100_mhz = 3 filter_200_mhz = 4 filter_through = 5 class Emarkerinputmethod: marker_input_method_im_amplitude_offset = 1 marker_input_method_im_high_low = 2 class Echannelcoupling: channel_coupling__off = 1 channel_coupling__pair = 2 channel_coupling__all = 3 class Erunmode: run_mode_continuous = 1 run_mode_triggered = 2 run_mode_gated = 3 run_mode_sequence = 4 class Esequencerstate: sequencer_state_stopped = 1 sequencer_state_running = 2 sequencer_state_wait_for_trigger = 3 class Esequencertype: sequencer_type_hw = 1 sequencer_type_sw = 2 class Ewaitvalue: wait_value_first = 1 wait_value_last = 2 class Ejumptargettype: jump_target_type_off = 1 jump_target_type_index = 2 jump_target_type_next = 3 class Ewaveformtype: waveform_type_integer = 1 waveform_type_real = 2 class Ewfmdacresolution: wfm_dac_resolution_8 = 1 wfm_dac_resolution_10 = 2 wfm_dac_resolution_14 = 3 class Eoverheat: over_heat__none = 1 over_heat__message = 2 over_heat__shutdown = 3 class Edigitalinputmethod: digital_input_method_im_amplitude_offset = 1 digital_input_method_im_high_low = 2 class Enormalizationtype: normalization_type_none = 1 normalization_type_full_scale = 2 normalization_type_zero_reference = 3 class Ephasedelayinputmethod: phase_delay_input_method_phase = 1 phase_delay_input_method_delay_in_time = 2 class Eteklinkconnectiontype: teklink_connection_type_none = 1 teklink_connection_type_hub = 2 teklink_connection_type_instrument = 3 class Eteklinkcpstate: teklink_cp_state_notrunning = 1 teklink_cp_state_running = 2 teklink_cp_state_pending = 3 class Ejumpmode: jump_mode_event = 1 jump_mode_table = 2 class Commands: cs_magic = 'MAGIC' cs_version = 'VERSION' cs_sampling_rate = 'SAMPLING_RATE' cs_repetition_rate = 'REPETITION_RATE' cs_hold_repetition_rate = 'HOLD_REPETITION_RATE' cs_clock_source = 'CLOCK_SOURCE' cs_reference_source = 'REFERENCE_SOURCE' cs_external_reference_type = 'EXTERNAL_REFERENCE_TYPE' cs_reference_clock_frequency_selection = 'REFERENCE_CLOCK_FREQUENCY_SELECTION' cs_reference_multiplier_rate = 'REFERENCE_MULTIPLIER_RATE' cs_divider_rate = 'DIVIDER_RATE' cs_trigger_source = 'TRIGGER_SOURCE' cs_internal_trigger_rate = 'INTERNAL_TRIGGER_RATE' cs_trigger_input_impedance = 'TRIGGER_INPUT_IMPEDANCE' cs_trigger_input_slope = 'TRIGGER_INPUT_SLOPE' cs_trigger_input_polarity = 'TRIGGER_INPUT_POLARITY' cs_trigger_input_threshold = 'TRIGGER_INPUT_THRESHOLD' cs_event_input_impedance = 'EVENT_INPUT_IMPEDANCE' cs_event_input_polarity = 'EVENT_INPUT_POLARITY' cs_event_input_threshold = 'EVENT_INPUT_THRESHOLD' cs_jump_timing = 'JUMP_TIMING' cs_interleave = 'INTERLEAVE' cs_zeroing = 'ZEROING' cs_interleave_adj_phase = 'INTERLEAVE_ADJ_PHASE' cs_interleave_adj_amplitude = 'INTERLEAVE_ADJ_AMPLITUDE' cs_coupling = 'COUPLING' cs_run_mode = 'RUN_MODE' cs_wait_value = 'WAIT_VALUE' cs_run_state = 'RUN_STATE' cs_event_jump_mode = 'EVENT_JUMP_MODE' cs_table_jump_strobe = 'TABLE_JUMP_STROBE' cs_table_jump_definition = 'TABLE_JUMP_DEFINITION' cs_dac_resolution_1 = 'DAC_RESOLUTION_1' cs_output_waveform_name_1 = 'OUTPUT_WAVEFORM_NAME_1' cs_channel_state_1 = 'CHANNEL_STATE_1' cs_analog_direct_output_1 = 'ANALOG_DIRECT_OUTPUT_1' cs_analog_filter_1 = 'ANALOG_FILTER_1' cs_analog_method_1 = 'ANALOG_METHOD_1' cs_analog_amplitude_1 = 'ANALOG_AMPLITUDE_1' cs_analog_offset_1 = 'ANALOG_OFFSET_1' cs_analog_high_1 = 'ANALOG_HIGH_1' cs_analog_low_1 = 'ANALOG_LOW_1' cs_marker1_skew_1 = 'MARKER1_SKEW_1' cs_marker1_method_1 = 'MARKER1_METHOD_1' cs_marker1_amplitude_1 = 'MARKER1_AMPLITUDE_1' cs_marker1_offset_1 = 'MARKER1_OFFSET_1' cs_marker1_high_1 = 'MARKER1_HIGH_1' cs_marker1_low_1 = 'MARKER1_LOW_1' cs_marker2_skew_1 = 'MARKER2_SKEW_1' cs_marker2_method_1 = 'MARKER2_METHOD_1' cs_marker2_amplitude_1 = 'MARKER2_AMPLITUDE_1' cs_marker2_offset_1 = 'MARKER2_OFFSET_1' cs_marker2_high_1 = 'MARKER2_HIGH_1' cs_marker2_low_1 = 'MARKER2_LOW_1' cs_digital_method_1 = 'DIGITAL_METHOD_1' cs_digital_amplitude_1 = 'DIGITAL_AMPLITUDE_1' cs_digital_offset_1 = 'DIGITAL_OFFSET_1' cs_digital_high_1 = 'DIGITAL_HIGH_1' cs_digital_low_1 = 'DIGITAL_LOW_1' cs_external_add_1 = 'EXTERNAL_ADD_1' cs_phase_delay_input_method_1 = 'PHASE_DELAY_INPUT_METHOD_1' cs_phase_1 = 'PHASE_1' cs_delay_in_time_1 = 'DELAY_IN_TIME_1' cs_delay_in_points_1 = 'DELAY_IN_POINTS_1' cs_channel_skew_1 = 'CHANNEL_SKEW_1' cs_dac_resolution_2 = 'DAC_RESOLUTION_2' cs_output_waveform_name_2 = 'OUTPUT_WAVEFORM_NAME_2' cs_channel_state_2 = 'CHANNEL_STATE_2' cs_analog_direct_output_2 = 'ANALOG_DIRECT_OUTPUT_2' cs_analog_filter_2 = 'ANALOG_FILTER_2' cs_analog_method_2 = 'ANALOG_METHOD_2' cs_analog_amplitude_2 = 'ANALOG_AMPLITUDE_2' cs_analog_offset_2 = 'ANALOG_OFFSET_2' cs_analog_high_2 = 'ANALOG_HIGH_2' cs_analog_low_2 = 'ANALOG_LOW_2' cs_marker1_skew_2 = 'MARKER1_SKEW_2' cs_marker1_method_2 = 'MARKER1_METHOD_2' cs_marker1_amplitude_2 = 'MARKER1_AMPLITUDE_2' cs_marker1_offset_2 = 'MARKER1_OFFSET_2' cs_marker1_high_2 = 'MARKER1_HIGH_2' cs_marker1_low_2 = 'MARKER1_LOW_2' cs_marker2_skew_2 = 'MARKER2_SKEW_2' cs_marker2_method_2 = 'MARKER2_METHOD_2' cs_marker2_amplitude_2 = 'MARKER2_AMPLITUDE_2' cs_marker2_offset_2 = 'MARKER2_OFFSET_2' cs_marker2_high_2 = 'MARKER2_HIGH_2' cs_marker2_low_2 = 'MARKER2_LOW_2' cs_digital_method_2 = 'DIGITAL_METHOD_2' cs_digital_amplitude_2 = 'DIGITAL_AMPLITUDE_2' cs_digital_offset_2 = 'DIGITAL_OFFSET_2' cs_digital_high_2 = 'DIGITAL_HIGH_2' cs_digital_low_2 = 'DIGITAL_LOW_2' cs_external_add_2 = 'EXTERNAL_ADD_2' cs_phase_delay_input_method_2 = 'PHASE_DELAY_INPUT_METHOD_2' cs_phase_2 = 'PHASE_2' cs_delay_in_time_2 = 'DELAY_IN_TIME_2' cs_delay_in_points_2 = 'DELAY_IN_POINTS_2' cs_channel_skew_2 = 'CHANNEL_SKEW_2' cs_dac_resolution_3 = 'DAC_RESOLUTION_3' cs_output_waveform_name_3 = 'OUTPUT_WAVEFORM_NAME_3' cs_channel_state_3 = 'CHANNEL_STATE_3' cs_analog_direct_output_3 = 'ANALOG_DIRECT_OUTPUT_3' cs_analog_filter_3 = 'ANALOG_FILTER_3' cs_analog_method_3 = 'ANALOG_METHOD_3' cs_analog_amplitude_3 = 'ANALOG_AMPLITUDE_3' cs_analog_offset_3 = 'ANALOG_OFFSET_3' cs_analog_high_3 = 'ANALOG_HIGH_3' cs_analog_low_3 = 'ANALOG_LOW_3' cs_marker1_skew_3 = 'MARKER1_SKEW_3' cs_marker1_method_3 = 'MARKER1_METHOD_3' cs_marker1_amplitude_3 = 'MARKER1_AMPLITUDE_3' cs_marker1_offset_3 = 'MARKER1_OFFSET_3' cs_marker1_high_3 = 'MARKER1_HIGH_3' cs_marker1_low_3 = 'MARKER1_LOW_3' cs_marker2_skew_3 = 'MARKER2_SKEW_3' cs_marker2_method_3 = 'MARKER2_METHOD_3' cs_marker2_amplitude_3 = 'MARKER2_AMPLITUDE_3' cs_marker2_offset_3 = 'MARKER2_OFFSET_3' cs_marker2_high_3 = 'MARKER2_HIGH_3' cs_marker2_low_3 = 'MARKER2_LOW_3' cs_digital_method_3 = 'DIGITAL_METHOD_3' cs_digital_amplitude_3 = 'DIGITAL_AMPLITUDE_3' cs_digital_offset_3 = 'DIGITAL_OFFSET_3' cs_digital_high_3 = 'DIGITAL_HIGH_3' cs_digital_low_3 = 'DIGITAL_LOW_3' cs_external_add_3 = 'EXTERNAL_ADD_3' cs_phase_delay_input_method_3 = 'PHASE_DELAY_INPUT_METHOD_3' cs_phase_3 = 'PHASE_3' cs_delay_in_time_3 = 'DELAY_IN_TIME_3' cs_delay_in_points_3 = 'DELAY_IN_POINTS_3' cs_channel_skew_3 = 'CHANNEL_SKEW_3' cs_dac_resolution_4 = 'DAC_RESOLUTION_4' cs_output_waveform_name_4 = 'OUTPUT_WAVEFORM_NAME_4' cs_channel_state_4 = 'CHANNEL_STATE_4' cs_analog_direct_output_4 = 'ANALOG_DIRECT_OUTPUT_4' cs_analog_filter_4 = 'ANALOG_FILTER_4' cs_analog_method_4 = 'ANALOG_METHOD_4' cs_analog_amplitude_4 = 'ANALOG_AMPLITUDE_4' cs_analog_offset_4 = 'ANALOG_OFFSET_4' cs_analog_high_4 = 'ANALOG_HIGH_4' cs_analog_low_4 = 'ANALOG_LOW_4' cs_marker1_skew_4 = 'MARKER1_SKEW_4' cs_marker1_method_4 = 'MARKER1_METHOD_4' cs_marker1_amplitude_4 = 'MARKER1_AMPLITUDE_4' cs_marker1_offset_4 = 'MARKER1_OFFSET_4' cs_marker1_high_4 = 'MARKER1_HIGH_4' cs_marker1_low_4 = 'MARKER1_LOW_4' cs_marker2_skew_4 = 'MARKER2_SKEW_4' cs_marker2_method_4 = 'MARKER2_METHOD_4' cs_marker2_amplitude_4 = 'MARKER2_AMPLITUDE_4' cs_marker2_offset_4 = 'MARKER2_OFFSET_4' cs_marker2_high_4 = 'MARKER2_HIGH_4' cs_marker2_low_4 = 'MARKER2_LOW_4' cs_digital_method_4 = 'DIGITAL_METHOD_4' cs_digital_amplitude_4 = 'DIGITAL_AMPLITUDE_4' cs_digital_offset_4 = 'DIGITAL_OFFSET_4' cs_digital_high_4 = 'DIGITAL_HIGH_4' cs_digital_low_4 = 'DIGITAL_LOW_4' cs_external_add_4 = 'EXTERNAL_ADD_4' cs_phase_delay_input_method_4 = 'PHASE_DELAY_INPUT_METHOD_4' cs_phase_4 = 'PHASE_4' cs_delay_in_time_4 = 'DELAY_IN_TIME_4' cs_delay_in_points_4 = 'DELAY_IN_POINTS_4' cs_channel_skew_4 = 'CHANNEL_SKEW_4' cs_dac_resolution_n = 'DAC_RESOLUTION_' cs_output_waveform_name_n = 'OUTPUT_WAVEFORM_NAME_' cs_channel_state_n = 'CHANNEL_STATE_' cs_analog_direct_output_n = 'ANALOG_DIRECT_OUTPUT_' cs_analog_filter_n = 'ANALOG_FILTER_' cs_analog_method_n = 'ANALOG_METHOD_' cs_analog_amplitude_n = 'ANALOG_AMPLITUDE_' cs_analog_offset_n = 'ANALOG_OFFSET_' cs_analog_high_n = 'ANALOG_HIGH_' cs_analog_low_n = 'ANALOG_LOW_' cs_marker1_skew_n = 'MARKER1_SKEW_' cs_marker1_method_n = 'MARKER1_METHOD_' cs_marker1_amplitude_n = 'MARKER1_AMPLITUDE_' cs_marker1_offset_n = 'MARKER1_OFFSET_' cs_marker1_high_n = 'MARKER1_HIGH_' cs_marker1_low_n = 'MARKER1_LOW_' cs_marker2_skew_n = 'MARKER2_SKEW_' cs_marker2_method_n = 'MARKER2_METHOD_' cs_marker2_amplitude_n = 'MARKER2_AMPLITUDE_' cs_marker2_offset_n = 'MARKER2_OFFSET_' cs_marker2_high_n = 'MARKER2_HIGH_' cs_marker2_low_n = 'MARKER2_LOW_' cs_digital_method_n = 'DIGITAL_METHOD_' cs_digital_amplitude_n = 'DIGITAL_AMPLITUDE_' cs_digital_offset_n = 'DIGITAL_OFFSET_' cs_digital_high_n = 'DIGITAL_HIGH_' cs_digital_low_n = 'DIGITAL_LOW_' cs_external_add_n = 'EXTERNAL_ADD_' cs_phase_delay_input_method_n = 'PHASE_DELAY_INPUT_METHOD_' cs_phase_n = 'PHASE_' cs_delay_in_time_n = 'DELAY_IN_TIME_' cs_delay_in_points_n = 'DELAY_IN_POINTS_' cs_channel_skew_n = 'CHANNEL_SKEW_' cs_dc_output_level_1 = 'DC_OUTPUT_LEVEL_1' cs_dc_output_level_2 = 'DC_OUTPUT_LEVEL_2' cs_dc_output_level_3 = 'DC_OUTPUT_LEVEL_3' cs_dc_output_level_4 = 'DC_OUTPUT_LEVEL_4' cs_dc_output_level_n = 'DC_OUTPUT_LEVEL_' cs_waveform_name_n = 'WAVEFORM_NAME_' cs_waveform_type_n = 'WAVEFORM_TYPE_' cs_waveform_length_n = 'WAVEFORM_LENGTH_' cs_waveform_timestamp_n = 'WAVEFORM_TIMESTAMP_' cs_waveform_data_n = 'WAVEFORM_DATA_' cs_sequence_wait_n = 'SEQUENCE_WAIT_' cs_sequence_loop_n = 'SEQUENCE_LOOP_' cs_sequence_jump_n = 'SEQUENCE_JUMP_' cs_sequence_goto_n = 'SEQUENCE_GOTO_' cs_sequence_waveform_name_ch_n_n = 'SEQUENCE_WAVEFORM_NAME_CH_' cs_sequence_is_subsequence_n = 'SEQUENCE_IS_SUBSEQ_' cs_sequence_subsequence_name_n = 'SEQUENCE_SUBSEQ_NAME_' cs_subsequence_name_o = 'SUBSEQ_NAME_' cs_subsequence_timestamp_o = 'SUBSEQ_TIMESTAMP_' cs_subsequence_length_o = 'SUBSEQ_LENGTH_' cs_subsequence_loop_e_o_u = 'SUBSEQ_LOOP_' cs_subsequence_waveform_name_ch_x_e_o_u = 'SUBSEQ_WAVEFORM_NAME_CH_' sequence_jump_target_type_off = 0 sequence_jump_target_type_next = -1
def padlindromic_date1(date1): d,m,y = date1.split('/') return (d+y)[::-1] == y and (m+d) [::-1] == y def padlindromic_date2(date2): dd,mm,yyyy = date2.split('/') date11 = ''.join([dd,mm,yyyy]) date12 = ''.join([mm,dd,yyyy]) return date11 == date11[::-1] and date12 == date12[::-1] def padlindromic_date3(date3): d,m,y = date3.split('/') return d==m and d+m==y[::-1] pd1 = padlindromic_date1("02/02/2020"), padlindromic_date1("11/12/2019"), padlindromic_date1("11/12/2011") print(pd1) pd2 = padlindromic_date2("02/02/2020"), padlindromic_date2("11/12/2019"), padlindromic_date2("11/12/2011") print(pd2) pd3 = padlindromic_date3("02/02/2020"), padlindromic_date3("11/12/2019"), padlindromic_date3("11/12/2011") print(pd3)
def padlindromic_date1(date1): (d, m, y) = date1.split('/') return (d + y)[::-1] == y and (m + d)[::-1] == y def padlindromic_date2(date2): (dd, mm, yyyy) = date2.split('/') date11 = ''.join([dd, mm, yyyy]) date12 = ''.join([mm, dd, yyyy]) return date11 == date11[::-1] and date12 == date12[::-1] def padlindromic_date3(date3): (d, m, y) = date3.split('/') return d == m and d + m == y[::-1] pd1 = (padlindromic_date1('02/02/2020'), padlindromic_date1('11/12/2019'), padlindromic_date1('11/12/2011')) print(pd1) pd2 = (padlindromic_date2('02/02/2020'), padlindromic_date2('11/12/2019'), padlindromic_date2('11/12/2011')) print(pd2) pd3 = (padlindromic_date3('02/02/2020'), padlindromic_date3('11/12/2019'), padlindromic_date3('11/12/2011')) print(pd3)
sum = 0 for i in range(1,1000): if i%3==0 or i%5==0: sum += i print(sum)
sum = 0 for i in range(1, 1000): if i % 3 == 0 or i % 5 == 0: sum += i print(sum)
class ORMBaseException(Exception): def __init__(self): self.message = "" super().__init__() def __str__(self): return self.message class FieldDoesNotExist(ORMBaseException): def __init__(self, field: str): self.message = f"This field '{field}' is not avaible"
class Ormbaseexception(Exception): def __init__(self): self.message = '' super().__init__() def __str__(self): return self.message class Fielddoesnotexist(ORMBaseException): def __init__(self, field: str): self.message = f"This field '{field}' is not avaible"
r""" Global variables to the migration simulations and plot analysis. """ END_TIME = 13.2 # total simulation time in Gyr # Width of each annulus in kpc # This needs modified *only* if running the plotting scripts. ZONE_WIDTH = 0.1 MAX_SF_RADIUS = 15.5 # Radius in kpc beyond which the SFR = 0 # Stellar mass of Milky Way (Licquia & Newman 2015, ApJ, 806, 96) M_STAR_MW = 5.17e10 COLORMAP = "winter"
""" Global variables to the migration simulations and plot analysis. """ end_time = 13.2 zone_width = 0.1 max_sf_radius = 15.5 m_star_mw = 51700000000.0 colormap = 'winter'
#Translation table for atomic numbers to element names and vice versa #Note that the NIST database provides data up to atomic number 92 (= Uranium) #Last column contains material densities ElementaryData = [ (0, "Void", "X", 0), (1, "Hydrogen", "H", 8.375E-05), (2, "Helium", "He", 1.663E-04), (3, "Lithium", "Li", 5.340E-01), (4, "Beryllium", "Be", 1.848E+00), (5, "Boron", "B", 2.370E+00), (6, "Carbon", "C", 1.700E+00), (7, "Nitrogen", "N", 1.165E-03), (8, "Oxygen", "O", 1.332E-03), (9, "Fluorine", "F", 1.580E-03), (10, "Neon", "Ne", 8.385E-04), (11, "Sodium", "Na", 9.710E-01), (12, "Magnesium", "Mg", 1.740E+00), (13, "Aluminium", "Al", 2.699E+00), (14, "Silicon", "Si", 2.330E+00), (15, "Phosphorus", "P", 2.200E+00), (16, "Sulfur", "S", 2.000E+00), (17, "Chlorine", "Cl", 2.995E-03), (18, "Argon", "Ar", 1.662E-03), (19, "Potassium", "K", 8.620E-01), (20, "Calcium", "Ca", 1.550E+00), (21, "Scandium", "Sc", 2.989E+00), (22, "Titanium", "Ti", 4.540E+00), (23, "Vanadium", "V", 6.110E+00), (24, "Chromium", "Cr", 7.180E+00), (25, "Manganese", "Mn", 7.440E+00), (26, "Iron", "Fe", 7.874E+00), (27, "Cobalt", "Co", 8.900E+00), (28, "Nickel", "Ni", 8.902E+00), (29, "Copper", "Cu", 8.960E+00), (30, "Zinc", "Zn", 7.133E+00), (31, "Gallium", "Ga", 5.904E+00), (32, "Germanium", "Ge", 5.323E+00), (33, "Arsenic", "As", 5.730E+00), (34, "Selenium", "Se", 4.500E+00), (35, "Bromine", "Br", 7.072E-03), (36, "Krypton", "Kr", 3.478E-03), (37, "Rubidium", "Rb", 1.532E+00), (38, "Strontium", "Sr", 2.540E+00), (39, "Yttrium", "Y", 4.469E+00), (40, "Zirconium", "Zr", 6.506E+00), (41, "Niobium", "Nb", 8.570E+00), (42, "Molybdenum", "Mo", 1.022E+01), (43, "Technetium", "Tc", 1.150E+01), (44, "Ruthenium", "Ru", 1.241E+01), (45, "Rhodium", "Rh", 1.241E+01), (46, "Palladium", "Pd", 1.202E+01), (47, "Silver", "Ag", 1.050E+01), (48, "Cadmium", "Cd", 8.650E+00), (49, "Indium", "In", 7.310E+00), (50, "Tin", "Sn", 7.310E+00), (51, "Antimony", "Sb", 6.691E+00), (52, "Tellurium", "Te", 6.240E+00), (53, "Iodine", "I", 4.930E+00), (54, "Xenon", "Xe", 5.485E-03), (55, "Cesium", "Cs", 1.873E+00), (56, "Barium", "Ba", 3.500E+00), (57, "Lanthanum", "La", 6.154E+00), (58, "Cerium", "Ce", 6.657E+00), (59, "Praseodymium", "Pr", 6.710E+00), (60, "Neodymium", "Nd", 6.900E+00), (61, "Promethium", "Pm", 7.220E+00), (62, "Samarium", "Sm", 7.460E+00), (63, "Europium", "Eu", 5.243E+00), (64, "Gadolinium", "Gd", 7.900E+00), (65, "Terbium", "Tb", 8.229E+00), (66, "Dysprosium", "Dy", 8.550E+00), (67, "Holmium", "Ho", 8.795E+00), (68, "Erbium", "Er", 9.066E+00), (69, "Thulium", "Tm", 9.321E+00), (70, "Ytterbium", "Yb", 6.730E+00), (71, "Lutetium", "Lu", 9.840E+00), (72, "Hafnium", "Hf", 1.331E+01), (73, "Tantalum", "Ta", 1.665E+01), (74, "Tungsten", "W", 1.930E+01), (75, "Rhenium", "Re", 2.102E+01), (76, "Osmium", "Os", 2.257E+01), (77, "Iridium", "Ir", 2.242E+01), (78, "Platinum", "Pt", 2.145E+01), (79, "Gold", "Au", 1.932E+01), (80, "Mercury", "Hg", 1.355E+01), (81, "Thallium", "Tl", 1.172E+01), (82, "Lead", "Pb", 1.135E+01), (83, "Bismuth", "Bi", 9.747E+00), (84, "Polonium", "Po", 9.320E+00), (85, "Astatine", "At", 1.000E+01), (86, "Radon", "Rn", 9.066E-03), (87, "Francium", "Fr", 1.000E+01), (88, "Radium", "Ra", 5.000E+00), (89, "Actinium", "Ac", 1.007E+01), (90, "Thorium", "Th", 1.172E+01), (91, "Protactinium", "Pa", 1.537E+01), (92, "Uranium", "U", 1.895E+01), (93, "Neptunium", "Np", 0), (94, "Plutonium", "Pu", 0), (95, "Americium", "Am", 0), (96, "Curium", "Cm", 0), (97, "Berkelium", "Bk", 0), (98, "Californium", "Cf", 0), (99, "Einsteinium", "Es", 0), (100,"Fermium", "Fm", 0), (101,"Mendelevium", "Md", 0), (102,"Nobelium", "No", 0), (103,"Lawrencium", "Lr", 0), (104,"Rutherfordium", "Rf", 0), (105,"Dubnium", "Db", 0), (106,"Seaborgium", "Sg", 0), (107,"Bohrium", "Bh", 0), (108,"Hassium", "Hs", 0), (109,"Meitnerium", "Mt", 0), (110,"Darmstadtium", "Ds", 0), (111,"Roentgenium" "Rg", 0), (112,"Ununbium", "Uub", 0), (113,"Ununtrium", "Uut", 0), (114,"Ununquadium", "Uuq", 0), (115,"Ununpentium", "Uup", 0), (116,"Ununhexium", "Uuh", 0), (117,"Ununseptium", "Uus", 0), (118,"Ununoctium", "Uuo", 0) ]
elementary_data = [(0, 'Void', 'X', 0), (1, 'Hydrogen', 'H', 8.375e-05), (2, 'Helium', 'He', 0.0001663), (3, 'Lithium', 'Li', 0.534), (4, 'Beryllium', 'Be', 1.848), (5, 'Boron', 'B', 2.37), (6, 'Carbon', 'C', 1.7), (7, 'Nitrogen', 'N', 0.001165), (8, 'Oxygen', 'O', 0.001332), (9, 'Fluorine', 'F', 0.00158), (10, 'Neon', 'Ne', 0.0008385), (11, 'Sodium', 'Na', 0.971), (12, 'Magnesium', 'Mg', 1.74), (13, 'Aluminium', 'Al', 2.699), (14, 'Silicon', 'Si', 2.33), (15, 'Phosphorus', 'P', 2.2), (16, 'Sulfur', 'S', 2.0), (17, 'Chlorine', 'Cl', 0.002995), (18, 'Argon', 'Ar', 0.001662), (19, 'Potassium', 'K', 0.862), (20, 'Calcium', 'Ca', 1.55), (21, 'Scandium', 'Sc', 2.989), (22, 'Titanium', 'Ti', 4.54), (23, 'Vanadium', 'V', 6.11), (24, 'Chromium', 'Cr', 7.18), (25, 'Manganese', 'Mn', 7.44), (26, 'Iron', 'Fe', 7.874), (27, 'Cobalt', 'Co', 8.9), (28, 'Nickel', 'Ni', 8.902), (29, 'Copper', 'Cu', 8.96), (30, 'Zinc', 'Zn', 7.133), (31, 'Gallium', 'Ga', 5.904), (32, 'Germanium', 'Ge', 5.323), (33, 'Arsenic', 'As', 5.73), (34, 'Selenium', 'Se', 4.5), (35, 'Bromine', 'Br', 0.007072), (36, 'Krypton', 'Kr', 0.003478), (37, 'Rubidium', 'Rb', 1.532), (38, 'Strontium', 'Sr', 2.54), (39, 'Yttrium', 'Y', 4.469), (40, 'Zirconium', 'Zr', 6.506), (41, 'Niobium', 'Nb', 8.57), (42, 'Molybdenum', 'Mo', 10.22), (43, 'Technetium', 'Tc', 11.5), (44, 'Ruthenium', 'Ru', 12.41), (45, 'Rhodium', 'Rh', 12.41), (46, 'Palladium', 'Pd', 12.02), (47, 'Silver', 'Ag', 10.5), (48, 'Cadmium', 'Cd', 8.65), (49, 'Indium', 'In', 7.31), (50, 'Tin', 'Sn', 7.31), (51, 'Antimony', 'Sb', 6.691), (52, 'Tellurium', 'Te', 6.24), (53, 'Iodine', 'I', 4.93), (54, 'Xenon', 'Xe', 0.005485), (55, 'Cesium', 'Cs', 1.873), (56, 'Barium', 'Ba', 3.5), (57, 'Lanthanum', 'La', 6.154), (58, 'Cerium', 'Ce', 6.657), (59, 'Praseodymium', 'Pr', 6.71), (60, 'Neodymium', 'Nd', 6.9), (61, 'Promethium', 'Pm', 7.22), (62, 'Samarium', 'Sm', 7.46), (63, 'Europium', 'Eu', 5.243), (64, 'Gadolinium', 'Gd', 7.9), (65, 'Terbium', 'Tb', 8.229), (66, 'Dysprosium', 'Dy', 8.55), (67, 'Holmium', 'Ho', 8.795), (68, 'Erbium', 'Er', 9.066), (69, 'Thulium', 'Tm', 9.321), (70, 'Ytterbium', 'Yb', 6.73), (71, 'Lutetium', 'Lu', 9.84), (72, 'Hafnium', 'Hf', 13.31), (73, 'Tantalum', 'Ta', 16.65), (74, 'Tungsten', 'W', 19.3), (75, 'Rhenium', 'Re', 21.02), (76, 'Osmium', 'Os', 22.57), (77, 'Iridium', 'Ir', 22.42), (78, 'Platinum', 'Pt', 21.45), (79, 'Gold', 'Au', 19.32), (80, 'Mercury', 'Hg', 13.55), (81, 'Thallium', 'Tl', 11.72), (82, 'Lead', 'Pb', 11.35), (83, 'Bismuth', 'Bi', 9.747), (84, 'Polonium', 'Po', 9.32), (85, 'Astatine', 'At', 10.0), (86, 'Radon', 'Rn', 0.009066), (87, 'Francium', 'Fr', 10.0), (88, 'Radium', 'Ra', 5.0), (89, 'Actinium', 'Ac', 10.07), (90, 'Thorium', 'Th', 11.72), (91, 'Protactinium', 'Pa', 15.37), (92, 'Uranium', 'U', 18.95), (93, 'Neptunium', 'Np', 0), (94, 'Plutonium', 'Pu', 0), (95, 'Americium', 'Am', 0), (96, 'Curium', 'Cm', 0), (97, 'Berkelium', 'Bk', 0), (98, 'Californium', 'Cf', 0), (99, 'Einsteinium', 'Es', 0), (100, 'Fermium', 'Fm', 0), (101, 'Mendelevium', 'Md', 0), (102, 'Nobelium', 'No', 0), (103, 'Lawrencium', 'Lr', 0), (104, 'Rutherfordium', 'Rf', 0), (105, 'Dubnium', 'Db', 0), (106, 'Seaborgium', 'Sg', 0), (107, 'Bohrium', 'Bh', 0), (108, 'Hassium', 'Hs', 0), (109, 'Meitnerium', 'Mt', 0), (110, 'Darmstadtium', 'Ds', 0), (111, 'RoentgeniumRg', 0), (112, 'Ununbium', 'Uub', 0), (113, 'Ununtrium', 'Uut', 0), (114, 'Ununquadium', 'Uuq', 0), (115, 'Ununpentium', 'Uup', 0), (116, 'Ununhexium', 'Uuh', 0), (117, 'Ununseptium', 'Uus', 0), (118, 'Ununoctium', 'Uuo', 0)]
# Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. class Parameter(object): """This bean is used for building config entries.""" def __init__(self, config): self.name = config['name'] self.description = config.get('description', "No description") self.required = not config['is_optional'] self.default_value = config.get('default_value', None) self.initial_value = self.default_value self.param_type = config['config_type'] self.priority = int(config.get('priority', 2))
class Parameter(object): """This bean is used for building config entries.""" def __init__(self, config): self.name = config['name'] self.description = config.get('description', 'No description') self.required = not config['is_optional'] self.default_value = config.get('default_value', None) self.initial_value = self.default_value self.param_type = config['config_type'] self.priority = int(config.get('priority', 2))
#!/usr/bin/env python3 # -*- coding: utf-8 -*- __author__ = 'Phoenix1327' a = 'ABC' b = a a = 'XYZ' print(b) # a --> 'ABC' # b --> a --> 'ABC'(i.e. b --> 'ABC') # a --> 'XYZ' #exercise n = 123 f = 456.789 s1 = 'Hello, world' s2 = 'Hello, \'Adam\'' s3 = r'Hello, "Bart"' s4 = r'''Hello, Lisa!''' print (n) print (f) print (s1) print (s2) print (s3) print (s4)
__author__ = 'Phoenix1327' a = 'ABC' b = a a = 'XYZ' print(b) n = 123 f = 456.789 s1 = 'Hello, world' s2 = "Hello, 'Adam'" s3 = 'Hello, "Bart"' s4 = 'Hello,\nLisa!' print(n) print(f) print(s1) print(s2) print(s3) print(s4)
class URL(object): BASE_URL = 'https://uatapi.nimbbl.tech/api' ORDER_URL = "/orders" AUTHURL = "v2/generate-token"; ORDER_CREATE = "v2/create-order"; ORDER_GET = "v2/get-order"; ORDER_LIST = "orders/many?f=&pt=yes"; LIST_QUERYPARAM1 = "f"; LIST_QUERYPARAM2 = "pt"; NO = "no"; Empty = ""; USER_CREATE = "users/create"; USER_GET = "users/one"; USER_LIST = "users/many?f=&pt=yes"; Transaction_CREATE = "transactions/create"; Transaction_GET = "transactions/one"; Transaction_LIST = "transactions/many?f=%sandpt=no"; ACCESS_KEY = "access_key"; SECRET_KEY = "access_secret"; TOKEN = "token"; Bearer = "Bearer ";
class Url(object): base_url = 'https://uatapi.nimbbl.tech/api' order_url = '/orders' authurl = 'v2/generate-token' order_create = 'v2/create-order' order_get = 'v2/get-order' order_list = 'orders/many?f=&pt=yes' list_queryparam1 = 'f' list_queryparam2 = 'pt' no = 'no' empty = '' user_create = 'users/create' user_get = 'users/one' user_list = 'users/many?f=&pt=yes' transaction_create = 'transactions/create' transaction_get = 'transactions/one' transaction_list = 'transactions/many?f=%sandpt=no' access_key = 'access_key' secret_key = 'access_secret' token = 'token' bearer = 'Bearer '
#!/usr/bin/python # encoding: utf-8 def search(key, *args, kwargs): pass if __name__ == "__main__": pass
def search(key, *args, kwargs): pass if __name__ == '__main__': pass
"""A utility module for ASP (Active Server Pages on MS Internet Info Server. Contains: iif -- A utility function to avoid using "if" statements in ASP <% tags """ def iif(cond, t, f): if cond: return t else: return f
"""A utility module for ASP (Active Server Pages on MS Internet Info Server. Contains: iif -- A utility function to avoid using "if" statements in ASP <% tags """ def iif(cond, t, f): if cond: return t else: return f
# -*- coding: utf-8 -*- # @Author: rish # @Date: 2020-08-02 23:03:40 # @Last Modified by: rish # @Last Modified time: 2020-08-04 01:20:05 def info(): print( 'er_extractor module - functionality for data collection of exchange\ rates based on provided arguments and persistence of data collected\ into the database.' )
def info(): print('er_extractor module - functionality for data collection of exchange\t\trates based on provided arguments and persistence of data collected\t\tinto the database.')
class Node(object): def __init__(self, value = None, leftChild = None, rightChild = None): self.value = value self.leftChild = leftChild self.rightChild = rightChild
class Node(object): def __init__(self, value=None, leftChild=None, rightChild=None): self.value = value self.leftChild = leftChild self.rightChild = rightChild
class TreeNode: def __init__(self, x): self.val = x self.left = None self.right = None class Solution: def lowestCommonAncestor(self, root, p, q): if root is None or root == p or root == q: return root l = self.lowestCommonAncestor(root.left, p, q) r = self.lowestCommonAncestor(root.right, p, q) if l is None: return r else: if r is None: return l return root if __name__ == "__main__": solution = Solution() root = TreeNode(3) root.left = TreeNode(5) root.right = TreeNode(1) root.left.left = TreeNode(6) root.left.right = TreeNode(2) root.left.right.left = TreeNode(7) root.left.right.right = TreeNode(4) root.right.left = TreeNode(0) root.right.right = TreeNode(8) print(solution.lowestCommonAncestor(root, root.left, root.right))
class Treenode: def __init__(self, x): self.val = x self.left = None self.right = None class Solution: def lowest_common_ancestor(self, root, p, q): if root is None or root == p or root == q: return root l = self.lowestCommonAncestor(root.left, p, q) r = self.lowestCommonAncestor(root.right, p, q) if l is None: return r else: if r is None: return l return root if __name__ == '__main__': solution = solution() root = tree_node(3) root.left = tree_node(5) root.right = tree_node(1) root.left.left = tree_node(6) root.left.right = tree_node(2) root.left.right.left = tree_node(7) root.left.right.right = tree_node(4) root.right.left = tree_node(0) root.right.right = tree_node(8) print(solution.lowestCommonAncestor(root, root.left, root.right))
#!/usr/bin/env python3 """ A crude solver for [Move Here Move There] (https://www.newgrounds.com/portal/view/718498). """ board = { (0, 0): "X", (3, 0): "X", (4, 0): [(-3, 3)], (6, 0): "X", (0, 1): "X", (4, 2): "X", (0, 4): [(4, 0)], (3, 4): [(3, -3)], (4, 4): [(1, 0), (-1, 1)], (1, 5): "X", (3, 5): [(-1, -1)], (5, 5): "X", } pieces = [ [(-1, -1), (0, 4)], [(2, -2), (-1, -1)], [(4, 0)], [(0, -5)], [(2, 2)], [(0, 3)], [(-2, -2)], [(-5, 0)], ] start = (3, 4) maxes = (6, 5) def move(pos, piece): """ Return new position after moving by one piece. :param pos: A 2-tuple of `int`s, describing the starting position. :param piece: A list of 2-tuples of `int`s, describing one piece's moves. :raise ValueError: Raised if the move is impossible. :return: A 2-tuple of `int`s describing the new position. """ for jmp in piece: pos = (pos[0] + jmp[0], pos[1] + jmp[1]) if not (0 <= pos[0] <= maxes[0] and 0 <= pos[1] <= maxes[1]): raise ValueError() return pos def step(pos, pieces, history): """ Execute one step. :param pos: A 2-tuple of `int`s, describing the starting position. :param pieces: A list of lists of 2-tuples of `int`s, describing still unused pieces. :param history: A list of lists of 2-tuples of `int`s, describing already used pieces. """ if not pieces and all(v == "X" for v in board.values()): print(f"{pos}: {history}") return try: nxt = board[pos] except KeyError: for pidx, piece in enumerate(pieces): try: pos2 = move(pos, piece) except ValueError: continue else: board[pos] = "X" step(pos2, pieces[:pidx] + pieces[pidx + 1:], history + [piece]) del board[pos] else: if nxt == "X": return try: pos2 = move(pos, nxt) except ValueError: return else: board[pos] = "X" step(pos2, pieces, history) board[pos] = nxt step(start, pieces, list())
""" A crude solver for [Move Here Move There] (https://www.newgrounds.com/portal/view/718498). """ board = {(0, 0): 'X', (3, 0): 'X', (4, 0): [(-3, 3)], (6, 0): 'X', (0, 1): 'X', (4, 2): 'X', (0, 4): [(4, 0)], (3, 4): [(3, -3)], (4, 4): [(1, 0), (-1, 1)], (1, 5): 'X', (3, 5): [(-1, -1)], (5, 5): 'X'} pieces = [[(-1, -1), (0, 4)], [(2, -2), (-1, -1)], [(4, 0)], [(0, -5)], [(2, 2)], [(0, 3)], [(-2, -2)], [(-5, 0)]] start = (3, 4) maxes = (6, 5) def move(pos, piece): """ Return new position after moving by one piece. :param pos: A 2-tuple of `int`s, describing the starting position. :param piece: A list of 2-tuples of `int`s, describing one piece's moves. :raise ValueError: Raised if the move is impossible. :return: A 2-tuple of `int`s describing the new position. """ for jmp in piece: pos = (pos[0] + jmp[0], pos[1] + jmp[1]) if not (0 <= pos[0] <= maxes[0] and 0 <= pos[1] <= maxes[1]): raise value_error() return pos def step(pos, pieces, history): """ Execute one step. :param pos: A 2-tuple of `int`s, describing the starting position. :param pieces: A list of lists of 2-tuples of `int`s, describing still unused pieces. :param history: A list of lists of 2-tuples of `int`s, describing already used pieces. """ if not pieces and all((v == 'X' for v in board.values())): print(f'{pos}: {history}') return try: nxt = board[pos] except KeyError: for (pidx, piece) in enumerate(pieces): try: pos2 = move(pos, piece) except ValueError: continue else: board[pos] = 'X' step(pos2, pieces[:pidx] + pieces[pidx + 1:], history + [piece]) del board[pos] else: if nxt == 'X': return try: pos2 = move(pos, nxt) except ValueError: return else: board[pos] = 'X' step(pos2, pieces, history) board[pos] = nxt step(start, pieces, list())