content
stringlengths
7
1.05M
fixed_cases
stringlengths
1
1.28M
# TODO: *3 @task def setupNetwork(ifaces): interfaces = '''auto lo iface lo inet loopback ''' for iface, config in ifaces.items(): interfaces += ''' auto %s iface %s inet static address %s netmask %s ''' % (iface, iface, config[0], config[1]) if iface == 'eth1': interfaces += ' gateway %s\n' % config[2] sudo('echo "%s" > /etc/network/interfaces' % interfaces) sudo("ifdown -a; ifup -a", timeout=1)
@task def setup_network(ifaces): interfaces = 'auto lo\niface lo inet loopback\n' for (iface, config) in ifaces.items(): interfaces += '\nauto %s\niface %s inet static\n address %s\n netmask %s\n' % (iface, iface, config[0], config[1]) if iface == 'eth1': interfaces += ' gateway %s\n' % config[2] sudo('echo "%s" > /etc/network/interfaces' % interfaces) sudo('ifdown -a; ifup -a', timeout=1)
OCTICON_SHARE = """ <svg class="octicon octicon-share" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16" width="16" height="16"><path fill-rule="evenodd" d="M7.823.177L4.927 3.073a.25.25 0 00.177.427H7.25v5.75a.75.75 0 001.5 0V3.5h2.146a.25.25 0 00.177-.427L8.177.177a.25.25 0 00-.354 0zM3.75 6.5a.25.25 0 00-.25.25v6.5c0 .138.112.25.25.25h8.5a.25.25 0 00.25-.25v-6.5a.25.25 0 00-.25-.25h-1a.75.75 0 010-1.5h1c.966 0 1.75.784 1.75 1.75v6.5A1.75 1.75 0 0112.25 15h-8.5A1.75 1.75 0 012 13.25v-6.5C2 5.784 2.784 5 3.75 5h1a.75.75 0 110 1.5h-1z"></path></svg> """
octicon_share = '\n<svg class="octicon octicon-share" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16" width="16" height="16"><path fill-rule="evenodd" d="M7.823.177L4.927 3.073a.25.25 0 00.177.427H7.25v5.75a.75.75 0 001.5 0V3.5h2.146a.25.25 0 00.177-.427L8.177.177a.25.25 0 00-.354 0zM3.75 6.5a.25.25 0 00-.25.25v6.5c0 .138.112.25.25.25h8.5a.25.25 0 00.25-.25v-6.5a.25.25 0 00-.25-.25h-1a.75.75 0 010-1.5h1c.966 0 1.75.784 1.75 1.75v6.5A1.75 1.75 0 0112.25 15h-8.5A1.75 1.75 0 012 13.25v-6.5C2 5.784 2.784 5 3.75 5h1a.75.75 0 110 1.5h-1z"></path></svg>\n'
#pylint: disable=invalid-name,missing-docstring # Basic test with a list TEST_LIST1 = ['a' 'b'] # [implicit-str-concat] # Testing with unicode strings in a tuple, with a comma AFTER concatenation TEST_LIST2 = (u"a" u"b", u"c") # [implicit-str-concat] # Testing with raw strings in a set, with a comma BEFORE concatenation TEST_LIST3 = {r'''a''', r'''b''' r'''c'''} # [implicit-str-concat] # Testing that only ONE warning is generated when string concatenation happens # in the middle of a list TEST_LIST4 = ["""a""", """b""" """c""", """d"""] # [implicit-str-concat] # The following shouldn't raise a warning because it is a function call print('a', 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb' 'ccc') # The following shouldn't raise a warning because string literals are # on different lines TEST_LIST5 = ('a', 'b' 'c') # The following shouldn't raise a warning because of the escaped newline TEST_LIST6 = ('bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb \ ccc') # But we should emit when there is an actual juxtaposition # +1: [implicit-str-concat] TEST_LIST7 = ('a' 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb \ ccc') # No warning for bytes TEST_LIST8 = [b'A' b'B']
test_list1 = ['ab'] test_list2 = (u'ab', u'c') test_list3 = {'a', 'bc'} test_list4 = ['a', 'bc', 'd'] print('a', 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbccc') test_list5 = ('a', 'bc') test_list6 = 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb ccc' test_list7 = 'abbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb ccc' test_list8 = [b'AB']
class LexpyError(Exception): pass class InvalidWildCardExpressionError(LexpyError): def __init__(self, expr, message): self.expr = expr self.message = message def __str__(self): return repr(': '.join([self.message, self.expr]))
class Lexpyerror(Exception): pass class Invalidwildcardexpressionerror(LexpyError): def __init__(self, expr, message): self.expr = expr self.message = message def __str__(self): return repr(': '.join([self.message, self.expr]))
# Python program to create a bytearray from a list nums = [10, 20, 56, 35, 17, 99] values = bytearray(nums) for x in values: print(x)
nums = [10, 20, 56, 35, 17, 99] values = bytearray(nums) for x in values: print(x)
#!/usr/bin/env python3 x = 0 y = (-1/4)*(x-1)+3 print(y)
x = 0 y = -1 / 4 * (x - 1) + 3 print(y)
"""Constants for Camera component.""" DOMAIN = "camera" DATA_CAMERA_PREFS = "camera_prefs" PREF_PRELOAD_STREAM = "preload_stream"
"""Constants for Camera component.""" domain = 'camera' data_camera_prefs = 'camera_prefs' pref_preload_stream = 'preload_stream'
#!/usr/bin/env python """ Problem 38 daily-coding-problem.com """ def n_queens(n, board=[]): ''' see https://www.dailycodingproblem.com/blog/an-introduction-to-backtracking/''' if n == len(board): return 1 count = 0 for col in range(n): board.append(col) if is_valid(board): count += n_queens(n, board) board.pop() return count def is_valid(board): current_queen_row, current_queen_col = len(board) - 1, board[-1] # Check if any queens can attack the last queen. for row, col in enumerate(board[:-1]): diff = abs(current_queen_col - col) if diff == 0 or diff == current_queen_row - row: return False return True if __name__ == "__main__": assert n_queens(1) == 1 assert n_queens(4) == 2 assert n_queens(7) == 40 assert n_queens(10) == 724
""" Problem 38 daily-coding-problem.com """ def n_queens(n, board=[]): """ see https://www.dailycodingproblem.com/blog/an-introduction-to-backtracking/""" if n == len(board): return 1 count = 0 for col in range(n): board.append(col) if is_valid(board): count += n_queens(n, board) board.pop() return count def is_valid(board): (current_queen_row, current_queen_col) = (len(board) - 1, board[-1]) for (row, col) in enumerate(board[:-1]): diff = abs(current_queen_col - col) if diff == 0 or diff == current_queen_row - row: return False return True if __name__ == '__main__': assert n_queens(1) == 1 assert n_queens(4) == 2 assert n_queens(7) == 40 assert n_queens(10) == 724
class American(object): pass class NewYorker(American): pass anAmerican = American() aNewYorker = NewYorker()
class American(object): pass class Newyorker(American): pass an_american = american() a_new_yorker = new_yorker()
# [7 kyu] Descending Order # # Author: Hsins # Date: 2019/12/31 def descending_order(num): return int("".join(sorted(str(num), reverse=True)))
def descending_order(num): return int(''.join(sorted(str(num), reverse=True)))
def minimum_bracket_reversals(input_string): if len(input_string) % 2 == 1: return -1 stack = Stack() count = 0 for bracket in input_string: if stack.is_empty(): stack.push(bracket) else: top = stack.top() if top != bracket: if top == '{': stack.pop() continue stack.push(bracket) ls = list() while not stack.is_empty(): first = stack.pop() second = stack.pop() ls.append(first) ls.append(second) if first == '}' and second == '}': count += 1 elif first == '{' and second == '}': count += 2 elif first == '{' and second == '{': count += 1 return count
def minimum_bracket_reversals(input_string): if len(input_string) % 2 == 1: return -1 stack = stack() count = 0 for bracket in input_string: if stack.is_empty(): stack.push(bracket) else: top = stack.top() if top != bracket: if top == '{': stack.pop() continue stack.push(bracket) ls = list() while not stack.is_empty(): first = stack.pop() second = stack.pop() ls.append(first) ls.append(second) if first == '}' and second == '}': count += 1 elif first == '{' and second == '}': count += 2 elif first == '{' and second == '{': count += 1 return count
# -*- coding: utf-8 -*- """ 460. LFU Cache Design and implement a data structure for Least Frequently Used (LFU) cache. It should support the following operations: get and set. """ class LFUCache(object): def __init__(self, capacity): """ :type capacity: int """ self.capacity = capacity def get(self, key): """ :type key: int :rtype: int """ def set(self, key, value): """ :type key: int :type value: int :rtype: void """ # Your LFUCache object will be instantiated and called as such: # obj = LFUCache(capacity) # param_1 = obj.get(key) # obj.set(key,value) def main(): pass if __name__ == "__main__": main()
""" 460. LFU Cache Design and implement a data structure for Least Frequently Used (LFU) cache. It should support the following operations: get and set. """ class Lfucache(object): def __init__(self, capacity): """ :type capacity: int """ self.capacity = capacity def get(self, key): """ :type key: int :rtype: int """ def set(self, key, value): """ :type key: int :type value: int :rtype: void """ def main(): pass if __name__ == '__main__': main()
def intercalaEmOrdem(lista1, lista2): intercalada = [] lista1.sort() lista2.sort() while len(lista1) > 0 and len(lista2) > 0: if lista1[0] < lista2[0]: intercalada.append(lista1.pop(0)) else: intercalada.append(lista2.pop(0)) if len(lista1) > 0: intercalada += lista1 if len(lista2) > 0: intercalada += lista2 return intercalada lista1 = [2, 4, 6, 8, 10] lista2 = [1, 3, 5, 7, 9] print(intercalaEmOrdem(lista1, lista2))
def intercala_em_ordem(lista1, lista2): intercalada = [] lista1.sort() lista2.sort() while len(lista1) > 0 and len(lista2) > 0: if lista1[0] < lista2[0]: intercalada.append(lista1.pop(0)) else: intercalada.append(lista2.pop(0)) if len(lista1) > 0: intercalada += lista1 if len(lista2) > 0: intercalada += lista2 return intercalada lista1 = [2, 4, 6, 8, 10] lista2 = [1, 3, 5, 7, 9] print(intercala_em_ordem(lista1, lista2))
def f2(a): global b print(a) print(b) b = 9 print(b) b = 5 f2(3)
def f2(a): global b print(a) print(b) b = 9 print(b) b = 5 f2(3)
# # PySNMP MIB module CPQCLUSTER-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CPQCLUSTER-MIB # Produced by pysmi-0.3.4 at Wed May 1 12:27:15 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint", "ValueRangeConstraint") compaq, cpqHoTrapFlags = mibBuilder.importSymbols("CPQHOST-MIB", "compaq", "cpqHoTrapFlags") NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance") sysName, = mibBuilder.importSymbols("SNMPv2-MIB", "sysName") Gauge32, Counter64, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, NotificationType, Unsigned32, Integer32, TimeTicks, iso, Counter32, MibIdentifier, Bits, ObjectIdentity, NotificationType, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "Counter64", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "NotificationType", "Unsigned32", "Integer32", "TimeTicks", "iso", "Counter32", "MibIdentifier", "Bits", "ObjectIdentity", "NotificationType", "IpAddress") DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention") cpqCluster = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 15)) cpqClusterMibRev = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 15, 1)) cpqClusterComponent = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 15, 2)) cpqClusterTrap = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 15, 3)) cpqClusterInterface = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 15, 2, 1)) cpqClusterInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 15, 2, 2)) cpqClusterNode = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 15, 2, 3)) cpqClusterResource = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 15, 2, 4)) cpqClusterInterconnect = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 15, 2, 5)) cpqClusterNetwork = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 15, 2, 6)) cpqClusterOsCommon = MibIdentifier((1, 3, 6, 1, 4, 1, 232, 15, 2, 1, 4)) cpqClusterMibRevMajor = MibScalar((1, 3, 6, 1, 4, 1, 232, 15, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterMibRevMajor.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterMibRevMajor.setDescription('The Major Revision level of the MIB. A change in the major revision level represents a major change in the architecture of the MIB. A change in the major revision level may indicate a significant change in the information supported and/or the meaning of the supported information. Correct interpretation of data may require a MIB document with the same major revision level.') cpqClusterMibRevMinor = MibScalar((1, 3, 6, 1, 4, 1, 232, 15, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterMibRevMinor.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterMibRevMinor.setDescription('The Minor Revision level of the MIB. A change in the minor revision level may represent some minor additional support, no changes to any pre-existing information has occurred.') cpqClusterMibCondition = MibScalar((1, 3, 6, 1, 4, 1, 232, 15, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("ok", 2), ("degraded", 3), ("failed", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterMibCondition.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterMibCondition.setDescription('The overall condition of the cluster represented by this MIB. This variable is the same as cpqClusterCondition in the Cluster Info Group. It is a combination of the Cluster node conditions, the resource conditions, and the network conditions as defined later in the Cluster Node group the Cluster Resource group, and the Cluster Network group. other(1) The cluster condition can not be determined. Every node condition, resource condition, and network condition is undetermined. ok(2) The cluster condition is functioning normally. Every node condition, resource condition, and network condition is ok. degraded(3) The cluster condition is degraded if at least one node condition is failed or degraded or at least one resource condition, or one network condition is degraded. failed(4) The cluster condition is failed if every node condition is failed, or at least one resource condition is failed, or at least one network condition is failed.') cpqClusterOsCommonPollFreq = MibScalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 1, 4, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite") if mibBuilder.loadTexts: cpqClusterOsCommonPollFreq.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterOsCommonPollFreq.setDescription("The Insight Agent's polling frequency. The frequency, in seconds, at which the Insight Agent requests information from the device driver. A frequency of zero (0) indicates that the Insight Agent retrieves the information upon request of a management station, it does not poll the device driver at a specific interval. If the poll frequency is zero (0) all attempts to write to this object will fail. If the poll frequency is non-zero, setting this value will change the polling frequency of the Insight Agent. Setting the poll frequency to zero (0) will always fail, an agent may also choose to fail any request to change the poll frequency to a value that would severely impact system performance.") cpqClusterOsCommonModuleTable = MibTable((1, 3, 6, 1, 4, 1, 232, 15, 2, 1, 4, 2), ) if mibBuilder.loadTexts: cpqClusterOsCommonModuleTable.setStatus('deprecated') if mibBuilder.loadTexts: cpqClusterOsCommonModuleTable.setDescription('A table of software modules that provide an interface to the device this MIB describes.') cpqClusterOsCommonModuleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 232, 15, 2, 1, 4, 2, 1), ).setIndexNames((0, "CPQCLUSTER-MIB", "cpqClusterOsCommonModuleIndex")) if mibBuilder.loadTexts: cpqClusterOsCommonModuleEntry.setStatus('deprecated') if mibBuilder.loadTexts: cpqClusterOsCommonModuleEntry.setDescription('A description of a software module that provides an interface to the device this MIB describes.') cpqClusterOsCommonModuleIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 1, 4, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterOsCommonModuleIndex.setStatus('deprecated') if mibBuilder.loadTexts: cpqClusterOsCommonModuleIndex.setDescription('A unique index for this module description.') cpqClusterOsCommonModuleName = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 1, 4, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterOsCommonModuleName.setStatus('deprecated') if mibBuilder.loadTexts: cpqClusterOsCommonModuleName.setDescription('The module name.') cpqClusterOsCommonModuleVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 1, 4, 2, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 5))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterOsCommonModuleVersion.setStatus('deprecated') if mibBuilder.loadTexts: cpqClusterOsCommonModuleVersion.setDescription('The module version in XX.YY format. Where XX is the major version number and YY is the minor version number. This field will be null (size 0) string if the agent cannot provide the module version.') cpqClusterOsCommonModuleDate = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 1, 4, 2, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(7, 7)).setFixedLength(7)).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterOsCommonModuleDate.setStatus('deprecated') if mibBuilder.loadTexts: cpqClusterOsCommonModuleDate.setDescription('The module date. field octets contents range ===== ====== ======= ===== 1 1-2 year 0..65536 2 3 month 1..12 3 4 day 1..31 4 5 hour 0..23 5 6 minute 0..59 6 7 second 0..60 (use 60 for leap-second) This field will be set to year = 0 if the agent cannot provide the module date. The hour, minute, and second field will be set to zero (0) if they are not relevant. The year field is set with the most significant octet first.') cpqClusterOsCommonModulePurpose = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 1, 4, 2, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterOsCommonModulePurpose.setStatus('deprecated') if mibBuilder.loadTexts: cpqClusterOsCommonModulePurpose.setDescription('The purpose of the module described in this entry.') cpqClusterName = MibScalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite") if mibBuilder.loadTexts: cpqClusterName.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterName.setDescription('The name of the cluster.') cpqClusterCondition = MibScalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("ok", 2), ("degraded", 3), ("failed", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterCondition.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterCondition.setDescription('The cluster condition as reported by this node. It is a combination of the Cluster node conditions, resource conditions, and network conditions as defined later in the Cluster Node group, Cluster Resource group, and Cluster Network group. other(1) The cluster condition can not be determined. Every node condition, resource condition, and network condition is undetermined. ok(2) The cluster condition is functioning normally. Every node condition, resource condition, and network condition is ok. degraded(3) The cluster condition is degraded if at least one node condition is failed or degraded or at least one resource condition, or one network condition is degraded. failed(4) The cluster condition is failed if every node condition is failed, or at least one resource condition is failed, or at least one network condition is failed.') cpqClusterIpAddress = MibScalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterIpAddress.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterIpAddress.setDescription("The first cluster static IP address enumerated. This cluster IP address and any other cluster IP address are in the Cluster Resource Group with the resource type 'IP Address'.") cpqClusterQuorumResource = MibScalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterQuorumResource.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterQuorumResource.setDescription('The Quorum resource name for the cluster. This number is the index into the resource table which contains the Quorum resource. -1 No Quorum resource available. 0..64 Index into the resource table.') cpqClusterMajorVersion = MibScalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 5), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterMajorVersion.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterMajorVersion.setDescription('Identifies the major version number of the cluster software.') cpqClusterMinorVersion = MibScalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 6), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterMinorVersion.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterMinorVersion.setDescription('Identifies the minor version number of the cluster software.') cpqClusterCSDVersion = MibScalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterCSDVersion.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterCSDVersion.setDescription('The latest Service Pack installed on the system. If no Service Pack has been installed, the string is empty.') cpqClusterVendorId = MibScalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterVendorId.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterVendorId.setDescription('The cluster software vendor identifier information.') cpqClusterResourceAggregateCondition = MibScalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("ok", 2), ("degraded", 3), ("failed", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterResourceAggregateCondition.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceAggregateCondition.setDescription('The cluster resource aggregate condition as reported by this node. This condition is derived directly from each and every Cluster resource condition as defined later in the Cluster Resource group. other(1) The condition can not be determined, which equates to each and every resource condition as undetermined. ok(2) The condition is functioning normally, which equates to each and every resource condition as ok. degraded(3) The condition is degraded if at least one resource condition is degraded. failed(4) The condition is failed if at least one resource condition is failed.') cpqClusterNetworkAggregateCondition = MibScalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("ok", 2), ("degraded", 3), ("failed", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterNetworkAggregateCondition.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkAggregateCondition.setDescription('The cluster network aggregate condition as reported by this node. This condition is derived directly from the condition of each Cluster network with a role of internal, or clientAndInternal or client as defined later in the Cluster Network group. Networks with a role of none are not considered in overall condition. other(1) The condition can not be determined, all network conditions are undetermined. ok(2) The condition is functioning normally, which equates to each and every network condition as ok. degraded(3) The condition is degraded if at least one network condition is degraded. failed(4) The condition is failed if at least one network condition is failed.') cpqClusterNodeTable = MibTable((1, 3, 6, 1, 4, 1, 232, 15, 2, 3, 1), ) if mibBuilder.loadTexts: cpqClusterNodeTable.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNodeTable.setDescription('A table of cluster node entries.') cpqClusterNodeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 232, 15, 2, 3, 1, 1), ).setIndexNames((0, "CPQCLUSTER-MIB", "cpqClusterNodeIndex")) if mibBuilder.loadTexts: cpqClusterNodeEntry.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNodeEntry.setDescription('A description of a cluster node') cpqClusterNodeIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterNodeIndex.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNodeIndex.setDescription('A unique index for this node entry.') cpqClusterNodeName = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 3, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterNodeName.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNodeName.setDescription('The name of the node.') cpqClusterNodeStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 3, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("other", 1), ("nodeUp", 2), ("nodeDown", 3), ("nodePaused", 4), ("nodeJoining", 5)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterNodeStatus.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNodeStatus.setDescription('The current status of the node. The following values are defined: other(1) - Indicates that an error has occurred and the exact state of the node could not be determined, or the node status is unavailable. nodeUp(2) - The node is operating as an active member of a cluster. A node that is up responds to updates to the cluster database, can host and manage groups, and can maintain communication with other nodes in the cluster. nodeDown(3) - The node is trying to form or rejoin a cluster or is down. A node that is down is not an active cluster member and it may or may not be running. The Cluster Service may have started and then failed, or may have failed to start completely. nodePaused(4) - The node is operating as an active member of a cluster but cannot host any resources or resource groups,is up but cluster activity is paused. Nodes that are undergoing maintenance are typically placed in this state. nodeJoining(5) - The node is in the process of joining a cluster. This is a short lived state.') cpqClusterNodeCondition = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 3, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("ok", 2), ("degraded", 3), ("failed", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterNodeCondition.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNodeCondition.setDescription('The current condition of the node. The following values are defined: other(1) - The node status is unavailable, or could not be determined. ok(2) - The node status is nodeUp. degraded(3) - The node status is nodeUnavailable or nodePaused or nodeJoining. failed(4) - The node status is nodeDown.') cpqClusterResourceTable = MibTable((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1), ) if mibBuilder.loadTexts: cpqClusterResourceTable.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceTable.setDescription('A table of resources managed by the cluster reported by this MIB.') cpqClusterResourceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1), ).setIndexNames((0, "CPQCLUSTER-MIB", "cpqClusterResourceIndex")) if mibBuilder.loadTexts: cpqClusterResourceEntry.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceEntry.setDescription('The properties describing a resource managed by the cluster.') cpqClusterResourceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterResourceIndex.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceIndex.setDescription('A unique index for this resource entry.') cpqClusterResourceName = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterResourceName.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceName.setDescription('The name of the resource. It must be unique within the cluster.') cpqClusterResourceType = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterResourceType.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceType.setDescription("The resource type, such as 'Physical Disk', 'Generic Application', 'IP Address', 'File Share', 'Network Name', etc..") cpqClusterResourceState = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("other", 1), ("online", 2), ("offline", 3), ("failed", 4), ("onlinePending", 5), ("offlinePending", 6)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterResourceState.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceState.setDescription("The resource's current state. The following values are defined: other(1) - Indicates that an error has occurred and the exact state of the resource could not be determined or the resource state is unavailable. online(2) - The resource is online and functioning normally. offline(3) - The resource is offline. failed(4) - The resource has failed. onlinePending(5) - The resource is in the process of coming online. offlinePending(6)- The resource is in the process of going offline.") cpqClusterResourceOwnerNode = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterResourceOwnerNode.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceOwnerNode.setDescription('The node in the cluster where the group of the resource is currently online.') cpqClusterResourcePhysId = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterResourcePhysId.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourcePhysId.setDescription("The physical identification for resource type 'Physical Disk'. It contains the following components: storage box name, logical drive NN. where NN is a number from 0..n. It is blank for all other resource types.") cpqClusterResourceCondition = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("ok", 2), ("degraded", 3), ("failed", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterResourceCondition.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceCondition.setDescription('The resource condition. The following values are defined: other(1) - Unable to determine the resource condition. ok(2) - The resource status is online. degraded(3) - The resource status is unavailable or offline or online pending or offline pending. failed(4) - The resource status is failed.') cpqClusterResourceDriveLetter = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterResourceDriveLetter.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceDriveLetter.setDescription("The drive letter with semi-colon of a physical disk such as x:. Blank if the resource type is not 'Physical Disk'.") cpqClusterResourceIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 9), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterResourceIpAddress.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceIpAddress.setDescription("A cluster IP address expressed as xxx.xxx.xxx.xxx where xxx is a decimal number between 0 and 255. Blank if the resource type is not 'IP Address'.") cpqClusterResourceGroupName = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 10), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterResourceGroupName.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceGroupName.setDescription('The name of the cluster group that the resource belongs to.') cpqClusterInterconnectTable = MibTable((1, 3, 6, 1, 4, 1, 232, 15, 2, 5, 1), ) if mibBuilder.loadTexts: cpqClusterInterconnectTable.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterInterconnectTable.setDescription('A table of network interfaces used by the node for communication.') cpqClusterInterconnectEntry = MibTableRow((1, 3, 6, 1, 4, 1, 232, 15, 2, 5, 1, 1), ).setIndexNames((0, "CPQCLUSTER-MIB", "cpqClusterInterconnectIndex")) if mibBuilder.loadTexts: cpqClusterInterconnectEntry.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterInterconnectEntry.setDescription('The properties describing the interconnect.') cpqClusterInterconnectIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 5, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterInterconnectIndex.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterInterconnectIndex.setDescription('Uniquely identifies the interconnect entry.') cpqClusterInterconnectPhysId = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 5, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterInterconnectPhysId.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterInterconnectPhysId.setDescription('The physical identification of the device. For an embedded NIC the value format is as followed: 1) for embedded NIC, Embedded NIC, Base I/O Addr: <base addr> 2) Known slot number, Slot: <slot number>, Base I/O Addr: <base addr> 3) Unknown slot number, Slot: unknown, Base I/O Addr: <base addr>') cpqClusterInterconnectTransport = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 5, 1, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterInterconnectTransport.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterInterconnectTransport.setDescription('The network transport used by the interconnect. For example, Tcpip.') cpqClusterInterconnectAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 5, 1, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterInterconnectAddress.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterInterconnectAddress.setDescription('The address used by the interconnect expressed in the format specified by the transport type.') cpqClusterInterconnectNetworkName = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 5, 1, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterInterconnectNetworkName.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterInterconnectNetworkName.setDescription('This interconnect is a part of this network. The network name is used to correlate information in the network table.') cpqClusterInterconnectNodeName = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 5, 1, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterInterconnectNodeName.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterInterconnectNodeName.setDescription('The name of the node in which the network interface is installed.') cpqClusterInterconnectRole = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 5, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("none", 1), ("client", 2), ("internal", 3), ("clientAndInternal", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterInterconnectRole.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterInterconnectRole.setDescription('The communications role of the interconnect in the cluster. The following values are defined: none(1) - The interconnect is not used by the cluster. client(2) - The interconnect is used to connect client systems to the cluster. internal(3) - The interconnect is used to carry internal cluster communication. clientAndInternal(4) - The interconnect is used to connect client systems and for internal cluster communication.') cpqClusterNetworkTable = MibTable((1, 3, 6, 1, 4, 1, 232, 15, 2, 6, 1), ) if mibBuilder.loadTexts: cpqClusterNetworkTable.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkTable.setDescription('A table of networks available for communication with other nodes or clients.') cpqClusterNetworkEntry = MibTableRow((1, 3, 6, 1, 4, 1, 232, 15, 2, 6, 1, 1), ).setIndexNames((0, "CPQCLUSTER-MIB", "cpqClusterNetworkIndex")) if mibBuilder.loadTexts: cpqClusterNetworkEntry.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkEntry.setDescription('The properties describing the network.') cpqClusterNetworkIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 6, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterNetworkIndex.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkIndex.setDescription('Uniquely identifies the network entry.') cpqClusterNetworkName = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 6, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterNetworkName.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkName.setDescription('The text name of the network.') cpqClusterNetworkAddressMask = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 6, 1, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterNetworkAddressMask.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkAddressMask.setDescription('The network IP address mask expressed as xxx.xxx.xxx.xxx where xxx is a decimal number between 0 and 255.') cpqClusterNetworkDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 6, 1, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterNetworkDescription.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkDescription.setDescription('The text description of the network.') cpqClusterNetworkRole = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 6, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("none", 1), ("client", 2), ("internal", 3), ("clientAndInternal", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterNetworkRole.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkRole.setDescription('The communications role of the network in the cluster. The following values are defined: none(1) - The network is not used by the cluster. client(2) - The network is used to connect client systems to the cluster. internal(3) - The network is used to carry internal cluster communication. clientAndInternal(4) - The network is used to connect client systems and for internal cluster communication.') cpqClusterNetworkState = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 6, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("other", 1), ("online", 2), ("offline", 3), ("partitioned", 4), ("unavailable", 5)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterNetworkState.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkState.setDescription("The network's current state. The following values are defined: other(1) - Indicates that an error has occurred and the exact state of the network could not be determined. online(2) - The network is operational; all of the nodes in the cluster can communicate. offline(3) - The network is not operational; none of the nodes on the network can communicate. partitioned(4) - The network is operational, but two or more nodes on the network cannot communicate. Typically a path-specific problem has occurred. unavailable(5) - The network is unavailable to the cluster because the network's role is 'none'.") cpqClusterNetworkCondition = MibTableColumn((1, 3, 6, 1, 4, 1, 232, 15, 2, 6, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("ok", 2), ("degraded", 3), ("failed", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpqClusterNetworkCondition.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkCondition.setDescription('The network condition uses cpqClusterNetworkState to determine the network condition. The following values are defined: other(1) - The network state indicates that an error has occurred and the exact state of the network could not be determined or the network state is unavailable. ok(2) - The network state is online or unavailable. degraded(3) - The network state is partitioned. failed(4) - The network state is offline.') cpqClusterDegraded = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,15001)).setObjects(("SNMPv2-MIB", "sysName"), ("CPQHOST-MIB", "cpqHoTrapFlags"), ("CPQCLUSTER-MIB", "cpqClusterName")) if mibBuilder.loadTexts: cpqClusterDegraded.setDescription('This trap will be sent any time the condition of the cluster becomes degraded.') cpqClusterFailed = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,15002)).setObjects(("SNMPv2-MIB", "sysName"), ("CPQHOST-MIB", "cpqHoTrapFlags"), ("CPQCLUSTER-MIB", "cpqClusterName")) if mibBuilder.loadTexts: cpqClusterFailed.setDescription('This trap will be sent any time the condition of the cluster becomes failed.') cpqClusterNodeDegraded = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,15003)).setObjects(("SNMPv2-MIB", "sysName"), ("CPQHOST-MIB", "cpqHoTrapFlags"), ("CPQCLUSTER-MIB", "cpqClusterNodeName")) if mibBuilder.loadTexts: cpqClusterNodeDegraded.setDescription('This trap will be sent any time the condition of a node in the cluster becomes degraded. User Action: Make a note of the cluster node name then check the node for the cause of the degraded condition.') cpqClusterNodeFailed = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,15004)).setObjects(("SNMPv2-MIB", "sysName"), ("CPQHOST-MIB", "cpqHoTrapFlags"), ("CPQCLUSTER-MIB", "cpqClusterNodeName")) if mibBuilder.loadTexts: cpqClusterNodeFailed.setDescription('This trap will be sent any time the condition of a node in the cluster becomes failed. User Action: Make a note of the cluster node name then check the node for the cause of the failure.') cpqClusterResourceDegraded = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,15005)).setObjects(("SNMPv2-MIB", "sysName"), ("CPQHOST-MIB", "cpqHoTrapFlags"), ("CPQCLUSTER-MIB", "cpqClusterResourceName")) if mibBuilder.loadTexts: cpqClusterResourceDegraded.setDescription('This trap will be sent any time the condition of a cluster resource becomes degraded. User Action: Make a note of the cluster resource name then check the resource for the cause of the degraded condition.') cpqClusterResourceFailed = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,15006)).setObjects(("SNMPv2-MIB", "sysName"), ("CPQHOST-MIB", "cpqHoTrapFlags"), ("CPQCLUSTER-MIB", "cpqClusterResourceName")) if mibBuilder.loadTexts: cpqClusterResourceFailed.setDescription('This trap will be sent any time the condition of a cluster resource becomes failed. User Action: Make a note of the cluster resource name then check the resource for the cause of the failure.') cpqClusterNetworkDegraded = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,15007)).setObjects(("SNMPv2-MIB", "sysName"), ("CPQHOST-MIB", "cpqHoTrapFlags"), ("CPQCLUSTER-MIB", "cpqClusterNetworkName")) if mibBuilder.loadTexts: cpqClusterNetworkDegraded.setDescription('This trap will be sent any time the condition of a cluster network becomes degraded. User Action: Make a note of the cluster network name then check the network for the cause of the degraded condition.') cpqClusterNetworkFailed = NotificationType((1, 3, 6, 1, 4, 1, 232) + (0,15008)).setObjects(("SNMPv2-MIB", "sysName"), ("CPQHOST-MIB", "cpqHoTrapFlags"), ("CPQCLUSTER-MIB", "cpqClusterNetworkName")) if mibBuilder.loadTexts: cpqClusterNetworkFailed.setDescription('This trap will be sent any time the condition of a cluster network becomes failed. User Action: Make a note of the cluster network name then check the network for the cause of the failure.') mibBuilder.exportSymbols("CPQCLUSTER-MIB", cpqClusterOsCommonModuleVersion=cpqClusterOsCommonModuleVersion, cpqClusterOsCommon=cpqClusterOsCommon, cpqClusterCondition=cpqClusterCondition, cpqClusterMinorVersion=cpqClusterMinorVersion, cpqClusterMajorVersion=cpqClusterMajorVersion, cpqClusterMibRevMinor=cpqClusterMibRevMinor, cpqClusterResourceOwnerNode=cpqClusterResourceOwnerNode, cpqClusterMibRev=cpqClusterMibRev, cpqClusterOsCommonModuleTable=cpqClusterOsCommonModuleTable, cpqClusterOsCommonModuleDate=cpqClusterOsCommonModuleDate, cpqClusterInterconnectEntry=cpqClusterInterconnectEntry, cpqClusterNodeEntry=cpqClusterNodeEntry, cpqClusterNetworkIndex=cpqClusterNetworkIndex, cpqClusterIpAddress=cpqClusterIpAddress, cpqClusterInterconnectNetworkName=cpqClusterInterconnectNetworkName, cpqClusterNodeName=cpqClusterNodeName, cpqClusterResourcePhysId=cpqClusterResourcePhysId, cpqClusterDegraded=cpqClusterDegraded, cpqClusterTrap=cpqClusterTrap, cpqClusterNetworkDegraded=cpqClusterNetworkDegraded, cpqClusterMibRevMajor=cpqClusterMibRevMajor, cpqClusterNetwork=cpqClusterNetwork, cpqClusterResourceGroupName=cpqClusterResourceGroupName, cpqClusterFailed=cpqClusterFailed, cpqClusterNetworkEntry=cpqClusterNetworkEntry, cpqClusterInterface=cpqClusterInterface, cpqClusterNetworkFailed=cpqClusterNetworkFailed, cpqClusterResourceDegraded=cpqClusterResourceDegraded, cpqClusterNodeDegraded=cpqClusterNodeDegraded, cpqClusterNetworkTable=cpqClusterNetworkTable, cpqClusterInterconnectPhysId=cpqClusterInterconnectPhysId, cpqClusterResourceName=cpqClusterResourceName, cpqClusterNodeCondition=cpqClusterNodeCondition, cpqClusterInterconnectTransport=cpqClusterInterconnectTransport, cpqClusterNetworkName=cpqClusterNetworkName, cpqClusterOsCommonModulePurpose=cpqClusterOsCommonModulePurpose, cpqClusterNode=cpqClusterNode, cpqClusterName=cpqClusterName, cpqClusterNodeStatus=cpqClusterNodeStatus, cpqClusterOsCommonPollFreq=cpqClusterOsCommonPollFreq, cpqClusterResourceIpAddress=cpqClusterResourceIpAddress, cpqClusterNetworkAddressMask=cpqClusterNetworkAddressMask, cpqClusterResourceAggregateCondition=cpqClusterResourceAggregateCondition, cpqClusterOsCommonModuleIndex=cpqClusterOsCommonModuleIndex, cpqClusterResourceDriveLetter=cpqClusterResourceDriveLetter, cpqCluster=cpqCluster, cpqClusterNodeIndex=cpqClusterNodeIndex, cpqClusterInterconnect=cpqClusterInterconnect, cpqClusterResourceIndex=cpqClusterResourceIndex, cpqClusterResourceType=cpqClusterResourceType, cpqClusterNetworkState=cpqClusterNetworkState, cpqClusterNodeFailed=cpqClusterNodeFailed, cpqClusterInterconnectNodeName=cpqClusterInterconnectNodeName, cpqClusterInterconnectAddress=cpqClusterInterconnectAddress, cpqClusterResourceCondition=cpqClusterResourceCondition, cpqClusterInterconnectRole=cpqClusterInterconnectRole, cpqClusterQuorumResource=cpqClusterQuorumResource, cpqClusterResourceState=cpqClusterResourceState, cpqClusterInfo=cpqClusterInfo, cpqClusterNetworkCondition=cpqClusterNetworkCondition, cpqClusterResourceFailed=cpqClusterResourceFailed, cpqClusterCSDVersion=cpqClusterCSDVersion, cpqClusterNetworkDescription=cpqClusterNetworkDescription, cpqClusterMibCondition=cpqClusterMibCondition, cpqClusterOsCommonModuleEntry=cpqClusterOsCommonModuleEntry, cpqClusterNetworkAggregateCondition=cpqClusterNetworkAggregateCondition, cpqClusterOsCommonModuleName=cpqClusterOsCommonModuleName, cpqClusterInterconnectIndex=cpqClusterInterconnectIndex, cpqClusterVendorId=cpqClusterVendorId, cpqClusterResource=cpqClusterResource, cpqClusterInterconnectTable=cpqClusterInterconnectTable, cpqClusterComponent=cpqClusterComponent, cpqClusterNetworkRole=cpqClusterNetworkRole, cpqClusterNodeTable=cpqClusterNodeTable, cpqClusterResourceEntry=cpqClusterResourceEntry, cpqClusterResourceTable=cpqClusterResourceTable)
(integer, octet_string, object_identifier) = mibBuilder.importSymbols('ASN1', 'Integer', 'OctetString', 'ObjectIdentifier') (named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues') (value_size_constraint, constraints_union, constraints_intersection, single_value_constraint, value_range_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ValueSizeConstraint', 'ConstraintsUnion', 'ConstraintsIntersection', 'SingleValueConstraint', 'ValueRangeConstraint') (compaq, cpq_ho_trap_flags) = mibBuilder.importSymbols('CPQHOST-MIB', 'compaq', 'cpqHoTrapFlags') (notification_group, module_compliance) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ModuleCompliance') (sys_name,) = mibBuilder.importSymbols('SNMPv2-MIB', 'sysName') (gauge32, counter64, module_identity, mib_scalar, mib_table, mib_table_row, mib_table_column, notification_type, unsigned32, integer32, time_ticks, iso, counter32, mib_identifier, bits, object_identity, notification_type, ip_address) = mibBuilder.importSymbols('SNMPv2-SMI', 'Gauge32', 'Counter64', 'ModuleIdentity', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'NotificationType', 'Unsigned32', 'Integer32', 'TimeTicks', 'iso', 'Counter32', 'MibIdentifier', 'Bits', 'ObjectIdentity', 'NotificationType', 'IpAddress') (display_string, textual_convention) = mibBuilder.importSymbols('SNMPv2-TC', 'DisplayString', 'TextualConvention') cpq_cluster = mib_identifier((1, 3, 6, 1, 4, 1, 232, 15)) cpq_cluster_mib_rev = mib_identifier((1, 3, 6, 1, 4, 1, 232, 15, 1)) cpq_cluster_component = mib_identifier((1, 3, 6, 1, 4, 1, 232, 15, 2)) cpq_cluster_trap = mib_identifier((1, 3, 6, 1, 4, 1, 232, 15, 3)) cpq_cluster_interface = mib_identifier((1, 3, 6, 1, 4, 1, 232, 15, 2, 1)) cpq_cluster_info = mib_identifier((1, 3, 6, 1, 4, 1, 232, 15, 2, 2)) cpq_cluster_node = mib_identifier((1, 3, 6, 1, 4, 1, 232, 15, 2, 3)) cpq_cluster_resource = mib_identifier((1, 3, 6, 1, 4, 1, 232, 15, 2, 4)) cpq_cluster_interconnect = mib_identifier((1, 3, 6, 1, 4, 1, 232, 15, 2, 5)) cpq_cluster_network = mib_identifier((1, 3, 6, 1, 4, 1, 232, 15, 2, 6)) cpq_cluster_os_common = mib_identifier((1, 3, 6, 1, 4, 1, 232, 15, 2, 1, 4)) cpq_cluster_mib_rev_major = mib_scalar((1, 3, 6, 1, 4, 1, 232, 15, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(1, 65535))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterMibRevMajor.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterMibRevMajor.setDescription('The Major Revision level of the MIB. A change in the major revision level represents a major change in the architecture of the MIB. A change in the major revision level may indicate a significant change in the information supported and/or the meaning of the supported information. Correct interpretation of data may require a MIB document with the same major revision level.') cpq_cluster_mib_rev_minor = mib_scalar((1, 3, 6, 1, 4, 1, 232, 15, 1, 2), integer32().subtype(subtypeSpec=value_range_constraint(0, 65535))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterMibRevMinor.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterMibRevMinor.setDescription('The Minor Revision level of the MIB. A change in the minor revision level may represent some minor additional support, no changes to any pre-existing information has occurred.') cpq_cluster_mib_condition = mib_scalar((1, 3, 6, 1, 4, 1, 232, 15, 1, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('other', 1), ('ok', 2), ('degraded', 3), ('failed', 4)))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterMibCondition.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterMibCondition.setDescription('The overall condition of the cluster represented by this MIB. This variable is the same as cpqClusterCondition in the Cluster Info Group. It is a combination of the Cluster node conditions, the resource conditions, and the network conditions as defined later in the Cluster Node group the Cluster Resource group, and the Cluster Network group. other(1) The cluster condition can not be determined. Every node condition, resource condition, and network condition is undetermined. ok(2) The cluster condition is functioning normally. Every node condition, resource condition, and network condition is ok. degraded(3) The cluster condition is degraded if at least one node condition is failed or degraded or at least one resource condition, or one network condition is degraded. failed(4) The cluster condition is failed if every node condition is failed, or at least one resource condition is failed, or at least one network condition is failed.') cpq_cluster_os_common_poll_freq = mib_scalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 1, 4, 1), integer32().subtype(subtypeSpec=value_range_constraint(0, 65535))).setMaxAccess('readwrite') if mibBuilder.loadTexts: cpqClusterOsCommonPollFreq.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterOsCommonPollFreq.setDescription("The Insight Agent's polling frequency. The frequency, in seconds, at which the Insight Agent requests information from the device driver. A frequency of zero (0) indicates that the Insight Agent retrieves the information upon request of a management station, it does not poll the device driver at a specific interval. If the poll frequency is zero (0) all attempts to write to this object will fail. If the poll frequency is non-zero, setting this value will change the polling frequency of the Insight Agent. Setting the poll frequency to zero (0) will always fail, an agent may also choose to fail any request to change the poll frequency to a value that would severely impact system performance.") cpq_cluster_os_common_module_table = mib_table((1, 3, 6, 1, 4, 1, 232, 15, 2, 1, 4, 2)) if mibBuilder.loadTexts: cpqClusterOsCommonModuleTable.setStatus('deprecated') if mibBuilder.loadTexts: cpqClusterOsCommonModuleTable.setDescription('A table of software modules that provide an interface to the device this MIB describes.') cpq_cluster_os_common_module_entry = mib_table_row((1, 3, 6, 1, 4, 1, 232, 15, 2, 1, 4, 2, 1)).setIndexNames((0, 'CPQCLUSTER-MIB', 'cpqClusterOsCommonModuleIndex')) if mibBuilder.loadTexts: cpqClusterOsCommonModuleEntry.setStatus('deprecated') if mibBuilder.loadTexts: cpqClusterOsCommonModuleEntry.setDescription('A description of a software module that provides an interface to the device this MIB describes.') cpq_cluster_os_common_module_index = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 1, 4, 2, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(0, 255))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterOsCommonModuleIndex.setStatus('deprecated') if mibBuilder.loadTexts: cpqClusterOsCommonModuleIndex.setDescription('A unique index for this module description.') cpq_cluster_os_common_module_name = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 1, 4, 2, 1, 2), display_string().subtype(subtypeSpec=value_size_constraint(0, 255))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterOsCommonModuleName.setStatus('deprecated') if mibBuilder.loadTexts: cpqClusterOsCommonModuleName.setDescription('The module name.') cpq_cluster_os_common_module_version = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 1, 4, 2, 1, 3), display_string().subtype(subtypeSpec=value_size_constraint(0, 5))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterOsCommonModuleVersion.setStatus('deprecated') if mibBuilder.loadTexts: cpqClusterOsCommonModuleVersion.setDescription('The module version in XX.YY format. Where XX is the major version number and YY is the minor version number. This field will be null (size 0) string if the agent cannot provide the module version.') cpq_cluster_os_common_module_date = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 1, 4, 2, 1, 4), octet_string().subtype(subtypeSpec=value_size_constraint(7, 7)).setFixedLength(7)).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterOsCommonModuleDate.setStatus('deprecated') if mibBuilder.loadTexts: cpqClusterOsCommonModuleDate.setDescription('The module date. field octets contents range ===== ====== ======= ===== 1 1-2 year 0..65536 2 3 month 1..12 3 4 day 1..31 4 5 hour 0..23 5 6 minute 0..59 6 7 second 0..60 (use 60 for leap-second) This field will be set to year = 0 if the agent cannot provide the module date. The hour, minute, and second field will be set to zero (0) if they are not relevant. The year field is set with the most significant octet first.') cpq_cluster_os_common_module_purpose = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 1, 4, 2, 1, 5), display_string().subtype(subtypeSpec=value_size_constraint(0, 255))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterOsCommonModulePurpose.setStatus('deprecated') if mibBuilder.loadTexts: cpqClusterOsCommonModulePurpose.setDescription('The purpose of the module described in this entry.') cpq_cluster_name = mib_scalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 1), display_string().subtype(subtypeSpec=value_size_constraint(0, 255))).setMaxAccess('readwrite') if mibBuilder.loadTexts: cpqClusterName.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterName.setDescription('The name of the cluster.') cpq_cluster_condition = mib_scalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 2), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('other', 1), ('ok', 2), ('degraded', 3), ('failed', 4)))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterCondition.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterCondition.setDescription('The cluster condition as reported by this node. It is a combination of the Cluster node conditions, resource conditions, and network conditions as defined later in the Cluster Node group, Cluster Resource group, and Cluster Network group. other(1) The cluster condition can not be determined. Every node condition, resource condition, and network condition is undetermined. ok(2) The cluster condition is functioning normally. Every node condition, resource condition, and network condition is ok. degraded(3) The cluster condition is degraded if at least one node condition is failed or degraded or at least one resource condition, or one network condition is degraded. failed(4) The cluster condition is failed if every node condition is failed, or at least one resource condition is failed, or at least one network condition is failed.') cpq_cluster_ip_address = mib_scalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 3), display_string().subtype(subtypeSpec=value_size_constraint(0, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterIpAddress.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterIpAddress.setDescription("The first cluster static IP address enumerated. This cluster IP address and any other cluster IP address are in the Cluster Resource Group with the resource type 'IP Address'.") cpq_cluster_quorum_resource = mib_scalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 4), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterQuorumResource.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterQuorumResource.setDescription('The Quorum resource name for the cluster. This number is the index into the resource table which contains the Quorum resource. -1 No Quorum resource available. 0..64 Index into the resource table.') cpq_cluster_major_version = mib_scalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 5), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterMajorVersion.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterMajorVersion.setDescription('Identifies the major version number of the cluster software.') cpq_cluster_minor_version = mib_scalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 6), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterMinorVersion.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterMinorVersion.setDescription('Identifies the minor version number of the cluster software.') cpq_cluster_csd_version = mib_scalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 7), display_string().subtype(subtypeSpec=value_size_constraint(0, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterCSDVersion.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterCSDVersion.setDescription('The latest Service Pack installed on the system. If no Service Pack has been installed, the string is empty.') cpq_cluster_vendor_id = mib_scalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 8), display_string().subtype(subtypeSpec=value_size_constraint(0, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterVendorId.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterVendorId.setDescription('The cluster software vendor identifier information.') cpq_cluster_resource_aggregate_condition = mib_scalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 9), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('other', 1), ('ok', 2), ('degraded', 3), ('failed', 4)))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterResourceAggregateCondition.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceAggregateCondition.setDescription('The cluster resource aggregate condition as reported by this node. This condition is derived directly from each and every Cluster resource condition as defined later in the Cluster Resource group. other(1) The condition can not be determined, which equates to each and every resource condition as undetermined. ok(2) The condition is functioning normally, which equates to each and every resource condition as ok. degraded(3) The condition is degraded if at least one resource condition is degraded. failed(4) The condition is failed if at least one resource condition is failed.') cpq_cluster_network_aggregate_condition = mib_scalar((1, 3, 6, 1, 4, 1, 232, 15, 2, 2, 10), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('other', 1), ('ok', 2), ('degraded', 3), ('failed', 4)))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterNetworkAggregateCondition.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkAggregateCondition.setDescription('The cluster network aggregate condition as reported by this node. This condition is derived directly from the condition of each Cluster network with a role of internal, or clientAndInternal or client as defined later in the Cluster Network group. Networks with a role of none are not considered in overall condition. other(1) The condition can not be determined, all network conditions are undetermined. ok(2) The condition is functioning normally, which equates to each and every network condition as ok. degraded(3) The condition is degraded if at least one network condition is degraded. failed(4) The condition is failed if at least one network condition is failed.') cpq_cluster_node_table = mib_table((1, 3, 6, 1, 4, 1, 232, 15, 2, 3, 1)) if mibBuilder.loadTexts: cpqClusterNodeTable.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNodeTable.setDescription('A table of cluster node entries.') cpq_cluster_node_entry = mib_table_row((1, 3, 6, 1, 4, 1, 232, 15, 2, 3, 1, 1)).setIndexNames((0, 'CPQCLUSTER-MIB', 'cpqClusterNodeIndex')) if mibBuilder.loadTexts: cpqClusterNodeEntry.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNodeEntry.setDescription('A description of a cluster node') cpq_cluster_node_index = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 3, 1, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(0, 65535))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterNodeIndex.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNodeIndex.setDescription('A unique index for this node entry.') cpq_cluster_node_name = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 3, 1, 1, 2), display_string().subtype(subtypeSpec=value_size_constraint(0, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterNodeName.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNodeName.setDescription('The name of the node.') cpq_cluster_node_status = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 3, 1, 1, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5))).clone(namedValues=named_values(('other', 1), ('nodeUp', 2), ('nodeDown', 3), ('nodePaused', 4), ('nodeJoining', 5)))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterNodeStatus.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNodeStatus.setDescription('The current status of the node. The following values are defined: other(1) - Indicates that an error has occurred and the exact state of the node could not be determined, or the node status is unavailable. nodeUp(2) - The node is operating as an active member of a cluster. A node that is up responds to updates to the cluster database, can host and manage groups, and can maintain communication with other nodes in the cluster. nodeDown(3) - The node is trying to form or rejoin a cluster or is down. A node that is down is not an active cluster member and it may or may not be running. The Cluster Service may have started and then failed, or may have failed to start completely. nodePaused(4) - The node is operating as an active member of a cluster but cannot host any resources or resource groups,is up but cluster activity is paused. Nodes that are undergoing maintenance are typically placed in this state. nodeJoining(5) - The node is in the process of joining a cluster. This is a short lived state.') cpq_cluster_node_condition = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 3, 1, 1, 4), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('other', 1), ('ok', 2), ('degraded', 3), ('failed', 4)))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterNodeCondition.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNodeCondition.setDescription('The current condition of the node. The following values are defined: other(1) - The node status is unavailable, or could not be determined. ok(2) - The node status is nodeUp. degraded(3) - The node status is nodeUnavailable or nodePaused or nodeJoining. failed(4) - The node status is nodeDown.') cpq_cluster_resource_table = mib_table((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1)) if mibBuilder.loadTexts: cpqClusterResourceTable.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceTable.setDescription('A table of resources managed by the cluster reported by this MIB.') cpq_cluster_resource_entry = mib_table_row((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1)).setIndexNames((0, 'CPQCLUSTER-MIB', 'cpqClusterResourceIndex')) if mibBuilder.loadTexts: cpqClusterResourceEntry.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceEntry.setDescription('The properties describing a resource managed by the cluster.') cpq_cluster_resource_index = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(0, 65535))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterResourceIndex.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceIndex.setDescription('A unique index for this resource entry.') cpq_cluster_resource_name = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 2), display_string().subtype(subtypeSpec=value_size_constraint(0, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterResourceName.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceName.setDescription('The name of the resource. It must be unique within the cluster.') cpq_cluster_resource_type = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 3), display_string().subtype(subtypeSpec=value_size_constraint(0, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterResourceType.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceType.setDescription("The resource type, such as 'Physical Disk', 'Generic Application', 'IP Address', 'File Share', 'Network Name', etc..") cpq_cluster_resource_state = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 4), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6))).clone(namedValues=named_values(('other', 1), ('online', 2), ('offline', 3), ('failed', 4), ('onlinePending', 5), ('offlinePending', 6)))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterResourceState.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceState.setDescription("The resource's current state. The following values are defined: other(1) - Indicates that an error has occurred and the exact state of the resource could not be determined or the resource state is unavailable. online(2) - The resource is online and functioning normally. offline(3) - The resource is offline. failed(4) - The resource has failed. onlinePending(5) - The resource is in the process of coming online. offlinePending(6)- The resource is in the process of going offline.") cpq_cluster_resource_owner_node = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 5), display_string().subtype(subtypeSpec=value_size_constraint(0, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterResourceOwnerNode.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceOwnerNode.setDescription('The node in the cluster where the group of the resource is currently online.') cpq_cluster_resource_phys_id = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 6), display_string().subtype(subtypeSpec=value_size_constraint(0, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterResourcePhysId.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourcePhysId.setDescription("The physical identification for resource type 'Physical Disk'. It contains the following components: storage box name, logical drive NN. where NN is a number from 0..n. It is blank for all other resource types.") cpq_cluster_resource_condition = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 7), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('other', 1), ('ok', 2), ('degraded', 3), ('failed', 4)))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterResourceCondition.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceCondition.setDescription('The resource condition. The following values are defined: other(1) - Unable to determine the resource condition. ok(2) - The resource status is online. degraded(3) - The resource status is unavailable or offline or online pending or offline pending. failed(4) - The resource status is failed.') cpq_cluster_resource_drive_letter = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 8), display_string().subtype(subtypeSpec=value_size_constraint(0, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterResourceDriveLetter.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceDriveLetter.setDescription("The drive letter with semi-colon of a physical disk such as x:. Blank if the resource type is not 'Physical Disk'.") cpq_cluster_resource_ip_address = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 9), display_string().subtype(subtypeSpec=value_size_constraint(0, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterResourceIpAddress.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceIpAddress.setDescription("A cluster IP address expressed as xxx.xxx.xxx.xxx where xxx is a decimal number between 0 and 255. Blank if the resource type is not 'IP Address'.") cpq_cluster_resource_group_name = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 4, 1, 1, 10), display_string().subtype(subtypeSpec=value_size_constraint(0, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterResourceGroupName.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterResourceGroupName.setDescription('The name of the cluster group that the resource belongs to.') cpq_cluster_interconnect_table = mib_table((1, 3, 6, 1, 4, 1, 232, 15, 2, 5, 1)) if mibBuilder.loadTexts: cpqClusterInterconnectTable.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterInterconnectTable.setDescription('A table of network interfaces used by the node for communication.') cpq_cluster_interconnect_entry = mib_table_row((1, 3, 6, 1, 4, 1, 232, 15, 2, 5, 1, 1)).setIndexNames((0, 'CPQCLUSTER-MIB', 'cpqClusterInterconnectIndex')) if mibBuilder.loadTexts: cpqClusterInterconnectEntry.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterInterconnectEntry.setDescription('The properties describing the interconnect.') cpq_cluster_interconnect_index = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 5, 1, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(1, 65535))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterInterconnectIndex.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterInterconnectIndex.setDescription('Uniquely identifies the interconnect entry.') cpq_cluster_interconnect_phys_id = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 5, 1, 1, 2), display_string().subtype(subtypeSpec=value_size_constraint(0, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterInterconnectPhysId.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterInterconnectPhysId.setDescription('The physical identification of the device. For an embedded NIC the value format is as followed: 1) for embedded NIC, Embedded NIC, Base I/O Addr: <base addr> 2) Known slot number, Slot: <slot number>, Base I/O Addr: <base addr> 3) Unknown slot number, Slot: unknown, Base I/O Addr: <base addr>') cpq_cluster_interconnect_transport = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 5, 1, 1, 3), display_string().subtype(subtypeSpec=value_size_constraint(0, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterInterconnectTransport.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterInterconnectTransport.setDescription('The network transport used by the interconnect. For example, Tcpip.') cpq_cluster_interconnect_address = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 5, 1, 1, 4), display_string().subtype(subtypeSpec=value_size_constraint(0, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterInterconnectAddress.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterInterconnectAddress.setDescription('The address used by the interconnect expressed in the format specified by the transport type.') cpq_cluster_interconnect_network_name = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 5, 1, 1, 5), display_string().subtype(subtypeSpec=value_size_constraint(0, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterInterconnectNetworkName.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterInterconnectNetworkName.setDescription('This interconnect is a part of this network. The network name is used to correlate information in the network table.') cpq_cluster_interconnect_node_name = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 5, 1, 1, 6), display_string().subtype(subtypeSpec=value_size_constraint(0, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterInterconnectNodeName.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterInterconnectNodeName.setDescription('The name of the node in which the network interface is installed.') cpq_cluster_interconnect_role = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 5, 1, 1, 7), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('none', 1), ('client', 2), ('internal', 3), ('clientAndInternal', 4)))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterInterconnectRole.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterInterconnectRole.setDescription('The communications role of the interconnect in the cluster. The following values are defined: none(1) - The interconnect is not used by the cluster. client(2) - The interconnect is used to connect client systems to the cluster. internal(3) - The interconnect is used to carry internal cluster communication. clientAndInternal(4) - The interconnect is used to connect client systems and for internal cluster communication.') cpq_cluster_network_table = mib_table((1, 3, 6, 1, 4, 1, 232, 15, 2, 6, 1)) if mibBuilder.loadTexts: cpqClusterNetworkTable.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkTable.setDescription('A table of networks available for communication with other nodes or clients.') cpq_cluster_network_entry = mib_table_row((1, 3, 6, 1, 4, 1, 232, 15, 2, 6, 1, 1)).setIndexNames((0, 'CPQCLUSTER-MIB', 'cpqClusterNetworkIndex')) if mibBuilder.loadTexts: cpqClusterNetworkEntry.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkEntry.setDescription('The properties describing the network.') cpq_cluster_network_index = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 6, 1, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(1, 65535))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterNetworkIndex.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkIndex.setDescription('Uniquely identifies the network entry.') cpq_cluster_network_name = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 6, 1, 1, 2), display_string().subtype(subtypeSpec=value_size_constraint(0, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterNetworkName.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkName.setDescription('The text name of the network.') cpq_cluster_network_address_mask = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 6, 1, 1, 3), display_string().subtype(subtypeSpec=value_size_constraint(0, 64))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterNetworkAddressMask.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkAddressMask.setDescription('The network IP address mask expressed as xxx.xxx.xxx.xxx where xxx is a decimal number between 0 and 255.') cpq_cluster_network_description = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 6, 1, 1, 4), display_string().subtype(subtypeSpec=value_size_constraint(0, 128))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterNetworkDescription.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkDescription.setDescription('The text description of the network.') cpq_cluster_network_role = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 6, 1, 1, 5), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('none', 1), ('client', 2), ('internal', 3), ('clientAndInternal', 4)))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterNetworkRole.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkRole.setDescription('The communications role of the network in the cluster. The following values are defined: none(1) - The network is not used by the cluster. client(2) - The network is used to connect client systems to the cluster. internal(3) - The network is used to carry internal cluster communication. clientAndInternal(4) - The network is used to connect client systems and for internal cluster communication.') cpq_cluster_network_state = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 6, 1, 1, 6), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5))).clone(namedValues=named_values(('other', 1), ('online', 2), ('offline', 3), ('partitioned', 4), ('unavailable', 5)))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterNetworkState.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkState.setDescription("The network's current state. The following values are defined: other(1) - Indicates that an error has occurred and the exact state of the network could not be determined. online(2) - The network is operational; all of the nodes in the cluster can communicate. offline(3) - The network is not operational; none of the nodes on the network can communicate. partitioned(4) - The network is operational, but two or more nodes on the network cannot communicate. Typically a path-specific problem has occurred. unavailable(5) - The network is unavailable to the cluster because the network's role is 'none'.") cpq_cluster_network_condition = mib_table_column((1, 3, 6, 1, 4, 1, 232, 15, 2, 6, 1, 1, 7), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('other', 1), ('ok', 2), ('degraded', 3), ('failed', 4)))).setMaxAccess('readonly') if mibBuilder.loadTexts: cpqClusterNetworkCondition.setStatus('mandatory') if mibBuilder.loadTexts: cpqClusterNetworkCondition.setDescription('The network condition uses cpqClusterNetworkState to determine the network condition. The following values are defined: other(1) - The network state indicates that an error has occurred and the exact state of the network could not be determined or the network state is unavailable. ok(2) - The network state is online or unavailable. degraded(3) - The network state is partitioned. failed(4) - The network state is offline.') cpq_cluster_degraded = notification_type((1, 3, 6, 1, 4, 1, 232) + (0, 15001)).setObjects(('SNMPv2-MIB', 'sysName'), ('CPQHOST-MIB', 'cpqHoTrapFlags'), ('CPQCLUSTER-MIB', 'cpqClusterName')) if mibBuilder.loadTexts: cpqClusterDegraded.setDescription('This trap will be sent any time the condition of the cluster becomes degraded.') cpq_cluster_failed = notification_type((1, 3, 6, 1, 4, 1, 232) + (0, 15002)).setObjects(('SNMPv2-MIB', 'sysName'), ('CPQHOST-MIB', 'cpqHoTrapFlags'), ('CPQCLUSTER-MIB', 'cpqClusterName')) if mibBuilder.loadTexts: cpqClusterFailed.setDescription('This trap will be sent any time the condition of the cluster becomes failed.') cpq_cluster_node_degraded = notification_type((1, 3, 6, 1, 4, 1, 232) + (0, 15003)).setObjects(('SNMPv2-MIB', 'sysName'), ('CPQHOST-MIB', 'cpqHoTrapFlags'), ('CPQCLUSTER-MIB', 'cpqClusterNodeName')) if mibBuilder.loadTexts: cpqClusterNodeDegraded.setDescription('This trap will be sent any time the condition of a node in the cluster becomes degraded. User Action: Make a note of the cluster node name then check the node for the cause of the degraded condition.') cpq_cluster_node_failed = notification_type((1, 3, 6, 1, 4, 1, 232) + (0, 15004)).setObjects(('SNMPv2-MIB', 'sysName'), ('CPQHOST-MIB', 'cpqHoTrapFlags'), ('CPQCLUSTER-MIB', 'cpqClusterNodeName')) if mibBuilder.loadTexts: cpqClusterNodeFailed.setDescription('This trap will be sent any time the condition of a node in the cluster becomes failed. User Action: Make a note of the cluster node name then check the node for the cause of the failure.') cpq_cluster_resource_degraded = notification_type((1, 3, 6, 1, 4, 1, 232) + (0, 15005)).setObjects(('SNMPv2-MIB', 'sysName'), ('CPQHOST-MIB', 'cpqHoTrapFlags'), ('CPQCLUSTER-MIB', 'cpqClusterResourceName')) if mibBuilder.loadTexts: cpqClusterResourceDegraded.setDescription('This trap will be sent any time the condition of a cluster resource becomes degraded. User Action: Make a note of the cluster resource name then check the resource for the cause of the degraded condition.') cpq_cluster_resource_failed = notification_type((1, 3, 6, 1, 4, 1, 232) + (0, 15006)).setObjects(('SNMPv2-MIB', 'sysName'), ('CPQHOST-MIB', 'cpqHoTrapFlags'), ('CPQCLUSTER-MIB', 'cpqClusterResourceName')) if mibBuilder.loadTexts: cpqClusterResourceFailed.setDescription('This trap will be sent any time the condition of a cluster resource becomes failed. User Action: Make a note of the cluster resource name then check the resource for the cause of the failure.') cpq_cluster_network_degraded = notification_type((1, 3, 6, 1, 4, 1, 232) + (0, 15007)).setObjects(('SNMPv2-MIB', 'sysName'), ('CPQHOST-MIB', 'cpqHoTrapFlags'), ('CPQCLUSTER-MIB', 'cpqClusterNetworkName')) if mibBuilder.loadTexts: cpqClusterNetworkDegraded.setDescription('This trap will be sent any time the condition of a cluster network becomes degraded. User Action: Make a note of the cluster network name then check the network for the cause of the degraded condition.') cpq_cluster_network_failed = notification_type((1, 3, 6, 1, 4, 1, 232) + (0, 15008)).setObjects(('SNMPv2-MIB', 'sysName'), ('CPQHOST-MIB', 'cpqHoTrapFlags'), ('CPQCLUSTER-MIB', 'cpqClusterNetworkName')) if mibBuilder.loadTexts: cpqClusterNetworkFailed.setDescription('This trap will be sent any time the condition of a cluster network becomes failed. User Action: Make a note of the cluster network name then check the network for the cause of the failure.') mibBuilder.exportSymbols('CPQCLUSTER-MIB', cpqClusterOsCommonModuleVersion=cpqClusterOsCommonModuleVersion, cpqClusterOsCommon=cpqClusterOsCommon, cpqClusterCondition=cpqClusterCondition, cpqClusterMinorVersion=cpqClusterMinorVersion, cpqClusterMajorVersion=cpqClusterMajorVersion, cpqClusterMibRevMinor=cpqClusterMibRevMinor, cpqClusterResourceOwnerNode=cpqClusterResourceOwnerNode, cpqClusterMibRev=cpqClusterMibRev, cpqClusterOsCommonModuleTable=cpqClusterOsCommonModuleTable, cpqClusterOsCommonModuleDate=cpqClusterOsCommonModuleDate, cpqClusterInterconnectEntry=cpqClusterInterconnectEntry, cpqClusterNodeEntry=cpqClusterNodeEntry, cpqClusterNetworkIndex=cpqClusterNetworkIndex, cpqClusterIpAddress=cpqClusterIpAddress, cpqClusterInterconnectNetworkName=cpqClusterInterconnectNetworkName, cpqClusterNodeName=cpqClusterNodeName, cpqClusterResourcePhysId=cpqClusterResourcePhysId, cpqClusterDegraded=cpqClusterDegraded, cpqClusterTrap=cpqClusterTrap, cpqClusterNetworkDegraded=cpqClusterNetworkDegraded, cpqClusterMibRevMajor=cpqClusterMibRevMajor, cpqClusterNetwork=cpqClusterNetwork, cpqClusterResourceGroupName=cpqClusterResourceGroupName, cpqClusterFailed=cpqClusterFailed, cpqClusterNetworkEntry=cpqClusterNetworkEntry, cpqClusterInterface=cpqClusterInterface, cpqClusterNetworkFailed=cpqClusterNetworkFailed, cpqClusterResourceDegraded=cpqClusterResourceDegraded, cpqClusterNodeDegraded=cpqClusterNodeDegraded, cpqClusterNetworkTable=cpqClusterNetworkTable, cpqClusterInterconnectPhysId=cpqClusterInterconnectPhysId, cpqClusterResourceName=cpqClusterResourceName, cpqClusterNodeCondition=cpqClusterNodeCondition, cpqClusterInterconnectTransport=cpqClusterInterconnectTransport, cpqClusterNetworkName=cpqClusterNetworkName, cpqClusterOsCommonModulePurpose=cpqClusterOsCommonModulePurpose, cpqClusterNode=cpqClusterNode, cpqClusterName=cpqClusterName, cpqClusterNodeStatus=cpqClusterNodeStatus, cpqClusterOsCommonPollFreq=cpqClusterOsCommonPollFreq, cpqClusterResourceIpAddress=cpqClusterResourceIpAddress, cpqClusterNetworkAddressMask=cpqClusterNetworkAddressMask, cpqClusterResourceAggregateCondition=cpqClusterResourceAggregateCondition, cpqClusterOsCommonModuleIndex=cpqClusterOsCommonModuleIndex, cpqClusterResourceDriveLetter=cpqClusterResourceDriveLetter, cpqCluster=cpqCluster, cpqClusterNodeIndex=cpqClusterNodeIndex, cpqClusterInterconnect=cpqClusterInterconnect, cpqClusterResourceIndex=cpqClusterResourceIndex, cpqClusterResourceType=cpqClusterResourceType, cpqClusterNetworkState=cpqClusterNetworkState, cpqClusterNodeFailed=cpqClusterNodeFailed, cpqClusterInterconnectNodeName=cpqClusterInterconnectNodeName, cpqClusterInterconnectAddress=cpqClusterInterconnectAddress, cpqClusterResourceCondition=cpqClusterResourceCondition, cpqClusterInterconnectRole=cpqClusterInterconnectRole, cpqClusterQuorumResource=cpqClusterQuorumResource, cpqClusterResourceState=cpqClusterResourceState, cpqClusterInfo=cpqClusterInfo, cpqClusterNetworkCondition=cpqClusterNetworkCondition, cpqClusterResourceFailed=cpqClusterResourceFailed, cpqClusterCSDVersion=cpqClusterCSDVersion, cpqClusterNetworkDescription=cpqClusterNetworkDescription, cpqClusterMibCondition=cpqClusterMibCondition, cpqClusterOsCommonModuleEntry=cpqClusterOsCommonModuleEntry, cpqClusterNetworkAggregateCondition=cpqClusterNetworkAggregateCondition, cpqClusterOsCommonModuleName=cpqClusterOsCommonModuleName, cpqClusterInterconnectIndex=cpqClusterInterconnectIndex, cpqClusterVendorId=cpqClusterVendorId, cpqClusterResource=cpqClusterResource, cpqClusterInterconnectTable=cpqClusterInterconnectTable, cpqClusterComponent=cpqClusterComponent, cpqClusterNetworkRole=cpqClusterNetworkRole, cpqClusterNodeTable=cpqClusterNodeTable, cpqClusterResourceEntry=cpqClusterResourceEntry, cpqClusterResourceTable=cpqClusterResourceTable)
#!/usr/bin/env python f = open('/Users/kosta/dev/advent-of-code-17/day10/input.txt') lengths = list(map(int, f.readline().split(','))) hash_list = list(range(256)) skip = 0 cur_index = 0 for length in lengths: if cur_index + length > len(hash_list): sub_list = hash_list[cur_index:] sub_list.extend(hash_list[:cur_index + length - len(hash_list)]) sub_list.reverse() hash_list[cur_index:] = sub_list[:len(hash_list) - cur_index] hash_list[:cur_index + length - len(hash_list)] = sub_list[(len(hash_list) - cur_index):] cur_index = (cur_index + length + skip) % len(hash_list) else: sub_list = hash_list[cur_index:cur_index+length] sub_list.reverse() hash_list[cur_index:cur_index+length] = sub_list cur_index += length + skip skip+=1 result = hash_list[0] * hash_list[1] print(result)
f = open('/Users/kosta/dev/advent-of-code-17/day10/input.txt') lengths = list(map(int, f.readline().split(','))) hash_list = list(range(256)) skip = 0 cur_index = 0 for length in lengths: if cur_index + length > len(hash_list): sub_list = hash_list[cur_index:] sub_list.extend(hash_list[:cur_index + length - len(hash_list)]) sub_list.reverse() hash_list[cur_index:] = sub_list[:len(hash_list) - cur_index] hash_list[:cur_index + length - len(hash_list)] = sub_list[len(hash_list) - cur_index:] cur_index = (cur_index + length + skip) % len(hash_list) else: sub_list = hash_list[cur_index:cur_index + length] sub_list.reverse() hash_list[cur_index:cur_index + length] = sub_list cur_index += length + skip skip += 1 result = hash_list[0] * hash_list[1] print(result)
def softmax(x): t = np.exp(x) return t/t.sum() q_relu = softmax(C @ relu(A @ x + b) + d) q_sigmoid = softmax(C @ sigmoid(A @ x + b) + d)
def softmax(x): t = np.exp(x) return t / t.sum() q_relu = softmax(C @ relu(A @ x + b) + d) q_sigmoid = softmax(C @ sigmoid(A @ x + b) + d)
"""Image-to-text implementation based on http://arxiv.org/abs/1411.4555. "Show and Tell: A Neural Image Caption Generator" Oriol Vinyals, Alexander Toshev, Samy Bengio, Dumitru Erhan """ class ShowAndTellModel(object): """Image-to-text implementation based on http://arxiv.org/abs/1411.4555. "Show and Tell: A Neural Image Caption Generator" Oriol Vinyals, Alexander Toshev, Samy Bengio, Dumitru Erhan """ def __init__(self, config, mode, train_inception=False): """Basic setup. Args: config: Object containing configuration parameters. mode: "train", "eval" or "inference". train_inception: Whether the inception submodel variables are trainable. """ assert mode in ["train", "eval", "inference"] self.config = config self.mode = mode self.train_inception = train_inception self.reader = tf.TFRecordReader() self.initializer = tf.random_uniform_initializer(minval=-self.config.initializer_scale, maxval=self.config.initializer_scale) self.images = None self.input_seqs = None self.target_seqs = None self.input_mask = None self.image_embeddings = None self.seq_embeddings = None self.total_loss = None self.target_cross_entropy_losses = None self.target_cross_entropy_loss_weights = None self.inception_variables = [] self.init_fn = None self.global_step = None def is_training(self): """Returns true if the model is built for training mode.""" return self.mode == "train" def process_image(self, encoded_image, thread_id=0): """Decodes and processes an image string. Args: encoded_image: A scalar string Tensor; the encoded image. thread_id: Preprocessing thread id used to select the ordering of color distortions. Returns: A float32 Tensor of shape [height, width, 3]; the processed image. """ return image_processing.process_image(encoded_image, is_training=self.is_training(), height=self.config.image_height, width=self.config.image_width, thread_id=thread_id, image_format=self.config.image_format) def build_inputs(self): """Input prefetching, preprocessing and batching. Outputs: self.images self.input_seqs self.target_seqs (training and eval only) self.input_mask (training and eval only) """ if self.mode == "inference": image_feed = tf.placeholder(dtype=tf.string, shape=[], name="image_feed") input_feed = tf.placeholder(dtype=tf.int64, shape=[None], name="input_feed") images = tf.expand_dims(self.process_image(image_feed), 0) input_seqs = tf.expand_dims(input_feed, 1) target_seqs = None input_mask = None else: input_queue = input_ops.prefetch_input_data(self.reader, self.config.input_file_pattern, is_training=self.is_training(), batch_size=self.config.batch_size, values_per_shard=self.config.values_per_input_shard, input_queue_capacity_factor=self.config.input_queue_capacity_factor, num_reader_threads=self.config.num_input_reader_threads) assert self.config.num_preprocess_threads % 2 == 0 images_and_captions = [] for thread_id in range(self.config.num_preprocess_threads): serialized_sequence_example = input_queue.dequeue() encoded_image, caption = input_ops.parse_sequence_example(serialized_sequence_example, image_feature=self.config.image_feature_name, caption_feature=self.config.caption_feature_name) image = self.process_image(encoded_image, thread_id=thread_id) images_and_captions.append([image, caption]) queue_capacity = 2 * self.config.num_preprocess_threads * self.config.batch_size images, input_seqs, target_seqs, input_mask = input_ops.batch_with_dynamic_pad(images_and_captions, batch_size=self.config.batch_size, queue_capacity=queue_capacity) self.images = images self.input_seqs = input_seqs self.target_seqs = target_seqs self.input_mask = input_mask def build_image_embeddings(self): """Builds the image model subgraph and generates image embeddings. Inputs: self.images Outputs: self.image_embeddings """ inception_output = image_embedding.inception_v3(self.images, trainable=self.train_inception, is_training=self.is_training()) self.inception_variables = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope="InceptionV3") with tf.variable_scope("image_embedding") as scope: image_embeddings = tf.contrib.layers.fully_connected(inputs=inception_output, num_outputs=self.config.embedding_size, activation_fn=None, weights_initializer=self.initializer, biases_initializer=None, scope=scope) tf.constant(self.config.embedding_size, name="embedding_size") self.image_embeddings = image_embeddings def build_seq_embeddings(self): """Builds the input sequence embeddings. Inputs: self.input_seqs Outputs: self.seq_embeddings """ with tf.variable_scope("seq_embedding"), tf.device("/cpu:0"): embedding_map = tf.get_variable(name="map", shape=[self.config.vocab_size, self.config.embedding_size], initializer=self.initializer) seq_embeddings = tf.nn.embedding_lookup(embedding_map, self.input_seqs) self.seq_embeddings = seq_embeddings def build_model(self): """Builds the model. Inputs: self.image_embeddings self.seq_embeddings self.target_seqs (training and eval only) self.input_mask (training and eval only) Outputs: self.total_loss (training and eval only) self.target_cross_entropy_losses (training and eval only) self.target_cross_entropy_loss_weights (training and eval only) """ lstm_cell = tf.contrib.rnn.BasicLSTMCell(num_units=self.config.num_lstm_units, state_is_tuple=True) if self.mode == "train": lstm_cell = tf.contrib.rnn.DropoutWrapper(lstm_cell, input_keep_prob=self.config.lstm_dropout_keep_prob, output_keep_prob=self.config.lstm_dropout_keep_prob) with tf.variable_scope("lstm", initializer=self.initializer) as lstm_scope: zero_state = lstm_cell.zero_state(batch_size=self.image_embeddings.get_shape()[0], dtype=tf.float32) _, initial_state = lstm_cell(self.image_embeddings, zero_state) lstm_scope.reuse_variables() if self.mode == "inference": tf.concat(axis=1, values=initial_state, name="initial_state") state_feed = tf.placeholder(dtype=tf.float32, shape=[None, sum(lstm_cell.state_size)], name="state_feed") state_tuple = tf.split(value=state_feed, num_or_size_splits=2, axis=1) lstm_outputs, state_tuple = lstm_cell(inputs=tf.squeeze(self.seq_embeddings, axis=[1]), state=state_tuple) tf.concat(axis=1, values=state_tuple, name="state") else: sequence_length = tf.reduce_sum(self.input_mask, 1) lstm_outputs, _ = tf.nn.dynamic_rnn(cell=lstm_cell, inputs=self.seq_embeddings, sequence_length=sequence_length, initial_state=initial_state, dtype=tf.float32, scope=lstm_scope) lstm_outputs = tf.reshape(lstm_outputs, [-1, lstm_cell.output_size]) with tf.variable_scope("logits") as logits_scope: logits = tf.contrib.layers.fully_connected(inputs=lstm_outputs, num_outputs=self.config.vocab_size, activation_fn=None, weights_initializer=self.initializer, scope=logits_scope) if self.mode == "inference": tf.nn.softmax(logits, name="softmax") else: targets = tf.reshape(self.target_seqs, [-1]) weights = tf.to_float(tf.reshape(self.input_mask, [-1])) losses = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=targets, logits=logits) batch_loss = tf.div(tf.reduce_sum(tf.multiply(losses, weights)), tf.reduce_sum(weights), name="batch_loss") tf.losses.add_loss(batch_loss) total_loss = tf.losses.get_total_loss() tf.summary.scalar("losses/batch_loss", batch_loss) tf.summary.scalar("losses/total_loss", total_loss) for var in tf.trainable_variables(): tf.summary.histogram("parameters/" + var.op.name, var) self.total_loss = total_loss self.target_cross_entropy_losses = losses self.target_cross_entropy_loss_weights = weights def setup_inception_initializer(self): """Sets up the function to restore inception variables from checkpoint.""" if self.mode != "inference": saver = tf.train.Saver(self.inception_variables) def restore_fn(sess): tf.logging.info("Restoring Inception variables from checkpoint file %s", self.config.inception_checkpoint_file) saver.restore(sess, self.config.inception_checkpoint_file) self.init_fn = restore_fn def setup_global_step(self): """Sets up the global step Tensor.""" global_step = tf.Variable(initial_value=0, name="global_step", trainable=False, collections=[tf.GraphKeys.GLOBAL_STEP, tf.GraphKeys.GLOBAL_VARIABLES]) self.global_step = global_step def build(self): """Creates all ops for training and evaluation.""" self.build_inputs() self.build_image_embeddings() self.build_seq_embeddings() self.build_model() self.setup_inception_initializer() self.setup_global_step()
"""Image-to-text implementation based on http://arxiv.org/abs/1411.4555. "Show and Tell: A Neural Image Caption Generator" Oriol Vinyals, Alexander Toshev, Samy Bengio, Dumitru Erhan """ class Showandtellmodel(object): """Image-to-text implementation based on http://arxiv.org/abs/1411.4555. "Show and Tell: A Neural Image Caption Generator" Oriol Vinyals, Alexander Toshev, Samy Bengio, Dumitru Erhan """ def __init__(self, config, mode, train_inception=False): """Basic setup. Args: config: Object containing configuration parameters. mode: "train", "eval" or "inference". train_inception: Whether the inception submodel variables are trainable. """ assert mode in ['train', 'eval', 'inference'] self.config = config self.mode = mode self.train_inception = train_inception self.reader = tf.TFRecordReader() self.initializer = tf.random_uniform_initializer(minval=-self.config.initializer_scale, maxval=self.config.initializer_scale) self.images = None self.input_seqs = None self.target_seqs = None self.input_mask = None self.image_embeddings = None self.seq_embeddings = None self.total_loss = None self.target_cross_entropy_losses = None self.target_cross_entropy_loss_weights = None self.inception_variables = [] self.init_fn = None self.global_step = None def is_training(self): """Returns true if the model is built for training mode.""" return self.mode == 'train' def process_image(self, encoded_image, thread_id=0): """Decodes and processes an image string. Args: encoded_image: A scalar string Tensor; the encoded image. thread_id: Preprocessing thread id used to select the ordering of color distortions. Returns: A float32 Tensor of shape [height, width, 3]; the processed image. """ return image_processing.process_image(encoded_image, is_training=self.is_training(), height=self.config.image_height, width=self.config.image_width, thread_id=thread_id, image_format=self.config.image_format) def build_inputs(self): """Input prefetching, preprocessing and batching. Outputs: self.images self.input_seqs self.target_seqs (training and eval only) self.input_mask (training and eval only) """ if self.mode == 'inference': image_feed = tf.placeholder(dtype=tf.string, shape=[], name='image_feed') input_feed = tf.placeholder(dtype=tf.int64, shape=[None], name='input_feed') images = tf.expand_dims(self.process_image(image_feed), 0) input_seqs = tf.expand_dims(input_feed, 1) target_seqs = None input_mask = None else: input_queue = input_ops.prefetch_input_data(self.reader, self.config.input_file_pattern, is_training=self.is_training(), batch_size=self.config.batch_size, values_per_shard=self.config.values_per_input_shard, input_queue_capacity_factor=self.config.input_queue_capacity_factor, num_reader_threads=self.config.num_input_reader_threads) assert self.config.num_preprocess_threads % 2 == 0 images_and_captions = [] for thread_id in range(self.config.num_preprocess_threads): serialized_sequence_example = input_queue.dequeue() (encoded_image, caption) = input_ops.parse_sequence_example(serialized_sequence_example, image_feature=self.config.image_feature_name, caption_feature=self.config.caption_feature_name) image = self.process_image(encoded_image, thread_id=thread_id) images_and_captions.append([image, caption]) queue_capacity = 2 * self.config.num_preprocess_threads * self.config.batch_size (images, input_seqs, target_seqs, input_mask) = input_ops.batch_with_dynamic_pad(images_and_captions, batch_size=self.config.batch_size, queue_capacity=queue_capacity) self.images = images self.input_seqs = input_seqs self.target_seqs = target_seqs self.input_mask = input_mask def build_image_embeddings(self): """Builds the image model subgraph and generates image embeddings. Inputs: self.images Outputs: self.image_embeddings """ inception_output = image_embedding.inception_v3(self.images, trainable=self.train_inception, is_training=self.is_training()) self.inception_variables = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='InceptionV3') with tf.variable_scope('image_embedding') as scope: image_embeddings = tf.contrib.layers.fully_connected(inputs=inception_output, num_outputs=self.config.embedding_size, activation_fn=None, weights_initializer=self.initializer, biases_initializer=None, scope=scope) tf.constant(self.config.embedding_size, name='embedding_size') self.image_embeddings = image_embeddings def build_seq_embeddings(self): """Builds the input sequence embeddings. Inputs: self.input_seqs Outputs: self.seq_embeddings """ with tf.variable_scope('seq_embedding'), tf.device('/cpu:0'): embedding_map = tf.get_variable(name='map', shape=[self.config.vocab_size, self.config.embedding_size], initializer=self.initializer) seq_embeddings = tf.nn.embedding_lookup(embedding_map, self.input_seqs) self.seq_embeddings = seq_embeddings def build_model(self): """Builds the model. Inputs: self.image_embeddings self.seq_embeddings self.target_seqs (training and eval only) self.input_mask (training and eval only) Outputs: self.total_loss (training and eval only) self.target_cross_entropy_losses (training and eval only) self.target_cross_entropy_loss_weights (training and eval only) """ lstm_cell = tf.contrib.rnn.BasicLSTMCell(num_units=self.config.num_lstm_units, state_is_tuple=True) if self.mode == 'train': lstm_cell = tf.contrib.rnn.DropoutWrapper(lstm_cell, input_keep_prob=self.config.lstm_dropout_keep_prob, output_keep_prob=self.config.lstm_dropout_keep_prob) with tf.variable_scope('lstm', initializer=self.initializer) as lstm_scope: zero_state = lstm_cell.zero_state(batch_size=self.image_embeddings.get_shape()[0], dtype=tf.float32) (_, initial_state) = lstm_cell(self.image_embeddings, zero_state) lstm_scope.reuse_variables() if self.mode == 'inference': tf.concat(axis=1, values=initial_state, name='initial_state') state_feed = tf.placeholder(dtype=tf.float32, shape=[None, sum(lstm_cell.state_size)], name='state_feed') state_tuple = tf.split(value=state_feed, num_or_size_splits=2, axis=1) (lstm_outputs, state_tuple) = lstm_cell(inputs=tf.squeeze(self.seq_embeddings, axis=[1]), state=state_tuple) tf.concat(axis=1, values=state_tuple, name='state') else: sequence_length = tf.reduce_sum(self.input_mask, 1) (lstm_outputs, _) = tf.nn.dynamic_rnn(cell=lstm_cell, inputs=self.seq_embeddings, sequence_length=sequence_length, initial_state=initial_state, dtype=tf.float32, scope=lstm_scope) lstm_outputs = tf.reshape(lstm_outputs, [-1, lstm_cell.output_size]) with tf.variable_scope('logits') as logits_scope: logits = tf.contrib.layers.fully_connected(inputs=lstm_outputs, num_outputs=self.config.vocab_size, activation_fn=None, weights_initializer=self.initializer, scope=logits_scope) if self.mode == 'inference': tf.nn.softmax(logits, name='softmax') else: targets = tf.reshape(self.target_seqs, [-1]) weights = tf.to_float(tf.reshape(self.input_mask, [-1])) losses = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=targets, logits=logits) batch_loss = tf.div(tf.reduce_sum(tf.multiply(losses, weights)), tf.reduce_sum(weights), name='batch_loss') tf.losses.add_loss(batch_loss) total_loss = tf.losses.get_total_loss() tf.summary.scalar('losses/batch_loss', batch_loss) tf.summary.scalar('losses/total_loss', total_loss) for var in tf.trainable_variables(): tf.summary.histogram('parameters/' + var.op.name, var) self.total_loss = total_loss self.target_cross_entropy_losses = losses self.target_cross_entropy_loss_weights = weights def setup_inception_initializer(self): """Sets up the function to restore inception variables from checkpoint.""" if self.mode != 'inference': saver = tf.train.Saver(self.inception_variables) def restore_fn(sess): tf.logging.info('Restoring Inception variables from checkpoint file %s', self.config.inception_checkpoint_file) saver.restore(sess, self.config.inception_checkpoint_file) self.init_fn = restore_fn def setup_global_step(self): """Sets up the global step Tensor.""" global_step = tf.Variable(initial_value=0, name='global_step', trainable=False, collections=[tf.GraphKeys.GLOBAL_STEP, tf.GraphKeys.GLOBAL_VARIABLES]) self.global_step = global_step def build(self): """Creates all ops for training and evaluation.""" self.build_inputs() self.build_image_embeddings() self.build_seq_embeddings() self.build_model() self.setup_inception_initializer() self.setup_global_step()
# Filters and the expression register are two really cool ways of calling arbitrary code from vim. # Filters are just anything that takes your text via stdin and gives you something via stdout # Filters can be invoked via `!{motion}` or `!` while selecting, and can call arbitrary scripts # Examples with either `!}` or `v}!`: # - !grep cat # - !sed s/dog/cat/ # - !sed s/cat/dog/ # - !echo foo # # This works with arbitrary scripts (do_stuff_to_my_code.sh, print_funny_comment.sh, etc) as well! "The quick brown fox jumps over the lazy dog" "The quick brown fox jumps over the lazy dog" "The quick brown fox jumps over the lazy cat" "The quick brown fox jumps over the lazy dog" # Expression registers are similar but not quite--They are for evaluating arbitrary vimscript expressions. This MAY # include a system() that shells out to something external, but probably not the greatest idea. Like other registers, # you can access the expression register via <CTRL-r>--In this case, `<CTRL-r>=`. # Ex: `<CTRL-r>=1+1<CR>` from insert mode will print '2'
"""The quick brown fox jumps over the lazy dog""" 'The quick brown fox jumps over the lazy dog' 'The quick brown fox jumps over the lazy cat' 'The quick brown fox jumps over the lazy dog'
#encoding:utf-8 subreddit = 'india' t_channel = '@r_indiaa' def send_post(submission, r2t): return r2t.send_simple(submission)
subreddit = 'india' t_channel = '@r_indiaa' def send_post(submission, r2t): return r2t.send_simple(submission)
""" Problem: There are three types of edits that can be performed on strings: insert a character, remove a character, or replace a character. Given two strings, write a function to check if they are one edit (or zero edits) away. Implementation: An initial solution might check for all three cases separately. In my solution, I decided to go with using the ASCII values of the letters to determine whether they were within the edit distance. This worked very well for the insertion and deletion cases, but failed for replacement. For replacement, we have to add the letters of each string into sets. We then take the set difference. If the set difference has one or no characters remaining, then we know we're within one replacement. Otherwise, we would have to replace more than one character, and we can return false. Efficiency: Time: O(A + B) Space: O(A + B) """ def ord_sum(string: str) -> int: """ Sum the ASCII values of the characters of a passed string. Args: string (str): The string whose ASCII values we are summing. Returns: int: The sum of each letter's ASCII value. """ return sum([ord(c) for c in string]) def one_away(str1: str, str2: str) -> bool: """ Check if the first string is at most one 'edit' away from the second. An 'edit' is defined as a character insertion, deletion, or replacement. Args: str1 (str): The first string we are checking. str2 (str): The second string we are checking. Returns: bool: True if string one is at most one 'edit' away from the other. """ if abs(len(str1) - len(str2)) > 1: return False alphabet = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' ascii_values = {ord(c) for c in alphabet} str1_value = ord_sum(str1) str2_value = ord_sum(str2) difference = abs(str1_value - str2_value) # The strings are the same. if difference == 0: return True # The strings are one insertion or deletion apart. if difference in ascii_values: return True # The strings are one replacement apart. s1_chars = {c for c in str1} s2_chars = {c for c in str2} return len(s1_chars - s2_chars) <= 1 # Zero edits assert one_away('pale', 'pale') # Removal assert one_away('pale', 'ple') # Insertion assert one_away('pales', 'pale') # Replacement assert one_away('pale', 'bale') # Two replacements apart assert not one_away('pale', 'bake') # One string is two characters longer assert not one_away('palest', 'bake')
""" Problem: There are three types of edits that can be performed on strings: insert a character, remove a character, or replace a character. Given two strings, write a function to check if they are one edit (or zero edits) away. Implementation: An initial solution might check for all three cases separately. In my solution, I decided to go with using the ASCII values of the letters to determine whether they were within the edit distance. This worked very well for the insertion and deletion cases, but failed for replacement. For replacement, we have to add the letters of each string into sets. We then take the set difference. If the set difference has one or no characters remaining, then we know we're within one replacement. Otherwise, we would have to replace more than one character, and we can return false. Efficiency: Time: O(A + B) Space: O(A + B) """ def ord_sum(string: str) -> int: """ Sum the ASCII values of the characters of a passed string. Args: string (str): The string whose ASCII values we are summing. Returns: int: The sum of each letter's ASCII value. """ return sum([ord(c) for c in string]) def one_away(str1: str, str2: str) -> bool: """ Check if the first string is at most one 'edit' away from the second. An 'edit' is defined as a character insertion, deletion, or replacement. Args: str1 (str): The first string we are checking. str2 (str): The second string we are checking. Returns: bool: True if string one is at most one 'edit' away from the other. """ if abs(len(str1) - len(str2)) > 1: return False alphabet = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' ascii_values = {ord(c) for c in alphabet} str1_value = ord_sum(str1) str2_value = ord_sum(str2) difference = abs(str1_value - str2_value) if difference == 0: return True if difference in ascii_values: return True s1_chars = {c for c in str1} s2_chars = {c for c in str2} return len(s1_chars - s2_chars) <= 1 assert one_away('pale', 'pale') assert one_away('pale', 'ple') assert one_away('pales', 'pale') assert one_away('pale', 'bale') assert not one_away('pale', 'bake') assert not one_away('palest', 'bake')
microcode = ''' def macroop VCVTSD2SS_XMM_XMM { cvtf2f xmm0, xmm0m, destSize=4, srcSize=8, ext=Scalar movfph2h xmm0, xmm0v, dataSize=4 movfp xmm1, xmm1v, dataSize=8 vclear dest=xmm2, destVL=16 }; def macroop VCVTSD2SS_XMM_M { ldfp ufp1, seg, sib, disp, dataSize=8 cvtf2f xmm0, ufp1, destSize=4, srcSize=8, ext=Scalar movfph2h xmm0, xmm0v, dataSize=4 movfp xmm1, xmm1v, dataSize=8 vclear dest=xmm2, destVL=16 }; def macroop VCVTSD2SS_XMM_P { rdip t7 ldfp ufp1, seg, riprel, disp, dataSize=8 cvtf2f xmm0, ufp1, destSize=4, srcSize=8, ext=Scalar movfph2h xmm0, xmm0v, dataSize=4 movfp xmm1, xmm1v, dataSize=8 vclear dest=xmm2, destVL=16 }; '''
microcode = '\n\ndef macroop VCVTSD2SS_XMM_XMM {\n cvtf2f xmm0, xmm0m, destSize=4, srcSize=8, ext=Scalar\n movfph2h xmm0, xmm0v, dataSize=4\n movfp xmm1, xmm1v, dataSize=8\n vclear dest=xmm2, destVL=16\n};\n\ndef macroop VCVTSD2SS_XMM_M {\n ldfp ufp1, seg, sib, disp, dataSize=8\n cvtf2f xmm0, ufp1, destSize=4, srcSize=8, ext=Scalar\n movfph2h xmm0, xmm0v, dataSize=4\n movfp xmm1, xmm1v, dataSize=8\n vclear dest=xmm2, destVL=16\n};\n\ndef macroop VCVTSD2SS_XMM_P {\n rdip t7\n ldfp ufp1, seg, riprel, disp, dataSize=8\n cvtf2f xmm0, ufp1, destSize=4, srcSize=8, ext=Scalar\n movfph2h xmm0, xmm0v, dataSize=4\n movfp xmm1, xmm1v, dataSize=8\n vclear dest=xmm2, destVL=16\n};\n\n'
# --- Day 2: I Was Told There Would Be No Math --- def createLineList(input): lineList = [line.rstrip('\r\n') for line in open(input)] return lineList def createDimList(lines): dimList = [] for L in lines: dim = L.split("x") for i in range(0, len(dim)): dim[i] = int(dim[i]) dimList.append(dim) return dimList def createAreaList(dimensions): areaList = [] for d in dimensions: lw = d[0] * d[1] wh = d[1] * d[2] hl = d[2] * d[0] slack = min(lw, wh, hl) area = 2 * lw + 2 * wh + 2 * hl + slack areaList.append(area) return areaList def createRibbonList(dimensions): lenList = [] for d in dimensions: l = d[0] w = d[1] h = d[2] ribbonLen = l * 2 + w * 2 + h * 2 - max(l, w, h) * 2 bowLen = l * w * h totalLen = ribbonLen + bowLen lenList.append(totalLen) return lenList line_list = createLineList("input.txt") dim_list = createDimList(line_list) area_list = createAreaList(dim_list) ribbon_list = createRibbonList(dim_list) print("The total amount of wrapping paper required is " + str(sum(area_list)) + " square feet.") print("The total amount of ribbons required is " + str(sum(ribbon_list)) + " feet.")
def create_line_list(input): line_list = [line.rstrip('\r\n') for line in open(input)] return lineList def create_dim_list(lines): dim_list = [] for l in lines: dim = L.split('x') for i in range(0, len(dim)): dim[i] = int(dim[i]) dimList.append(dim) return dimList def create_area_list(dimensions): area_list = [] for d in dimensions: lw = d[0] * d[1] wh = d[1] * d[2] hl = d[2] * d[0] slack = min(lw, wh, hl) area = 2 * lw + 2 * wh + 2 * hl + slack areaList.append(area) return areaList def create_ribbon_list(dimensions): len_list = [] for d in dimensions: l = d[0] w = d[1] h = d[2] ribbon_len = l * 2 + w * 2 + h * 2 - max(l, w, h) * 2 bow_len = l * w * h total_len = ribbonLen + bowLen lenList.append(totalLen) return lenList line_list = create_line_list('input.txt') dim_list = create_dim_list(line_list) area_list = create_area_list(dim_list) ribbon_list = create_ribbon_list(dim_list) print('The total amount of wrapping paper required is ' + str(sum(area_list)) + ' square feet.') print('The total amount of ribbons required is ' + str(sum(ribbon_list)) + ' feet.')
""" Problem: Write a function that returns the bitwise AND of all integers between M and N, inclusive. """ def bitwise_and_on_range(start: int, end: int) -> int: # using naive approach result = start for num in range(start + 1, end + 1): result = result & num return result if __name__ == "__main__": print(bitwise_and_on_range(3, 4)) print(bitwise_and_on_range(5, 6)) print(bitwise_and_on_range(126, 127)) print(bitwise_and_on_range(127, 215)) print(bitwise_and_on_range(129, 215)) """ SPECS: TIME COMPLEXITY: O(n) SPACE COMPLEXITY: O(1) """
""" Problem: Write a function that returns the bitwise AND of all integers between M and N, inclusive. """ def bitwise_and_on_range(start: int, end: int) -> int: result = start for num in range(start + 1, end + 1): result = result & num return result if __name__ == '__main__': print(bitwise_and_on_range(3, 4)) print(bitwise_and_on_range(5, 6)) print(bitwise_and_on_range(126, 127)) print(bitwise_and_on_range(127, 215)) print(bitwise_and_on_range(129, 215)) '\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n'
__author__ = 'burakks41' line1 = input().split() line2 = input().split() flowers = int(line1[0]) persons = int(line1[1]) person = [0] * persons flowers_cost = sorted([int(n) for n in line2],reverse=True) sum = 0 for i in range(flowers): x, person[i % persons] = person[i % persons], person[i % persons] + 1 sum += flowers_cost[i] * (x+1) print(sum)
__author__ = 'burakks41' line1 = input().split() line2 = input().split() flowers = int(line1[0]) persons = int(line1[1]) person = [0] * persons flowers_cost = sorted([int(n) for n in line2], reverse=True) sum = 0 for i in range(flowers): (x, person[i % persons]) = (person[i % persons], person[i % persons] + 1) sum += flowers_cost[i] * (x + 1) print(sum)
# You are given an array of non-negative integers numbers. You are allowed to choose any number from this array and swap any two digits in it. If after the swap operation the number contains leading zeros, they can be omitted and not considered (eg: 010 will be considered just 10). # Your task is to check whether it is possible to apply the swap operation at most once, so that the elements of the resulting array are strictly increasing. # Example # For numbers = [1, 5, 10, 20], the output should be makeIncreasing(numbers) = true. # The initial array is already strictly increasing, so no actions are required. # For numbers = [1, 3, 900, 10], the output should be makeIncreasing(numbers) = true. # By choosing numbers[2] = 900 and swapping its first and third digits, the resulting number 009 is considered to be just 9. So the updated array will look like [1, 3, 9, 10], which is strictly increasing. # For numbers = [13, 31, 30], the output should be makeIncreasing(numbers) = false. # The initial array elements are not increasing. # By swapping the digits of numbers[0] = 13, the array becomes [31, 31, 30] which is not strictly increasing; # By swapping the digits of numbers[1] = 31, the array becomes [13, 13, 30] which is not strictly increasing; # By swapping the digits of numbers[2] = 30, the array becomes [13, 31, 3] which is not strictly increasing; # So, it's not possible to obtain a strictly increasing array, and the answer is false. def compare(arr1, arr2): store = [] for i in range(len(arr1)): if arr1[i] != arr2[i]: store.append(i) return store def getOptions(str): store = [] for i in range(len(str) - 1): store.append(int(str[i + 1:] + str[:i + 1])) return store def check(i, arr): curr = str(arr[i]) options = getOptions(curr) if i == 0: for v in options: if v < arr[i + 1]: return True elif i == len(arr) - 1: for v in options: if v > arr[i - 1]: return True else: for v in options: if v > arr[i - 1] and v < arr[i + 1]: return True return False def makeIncreasing(numbers): b = sorted(numbers) store = compare(numbers, b) print(store) if len(store) > 2 and store[-1] - store[0] != len(store) - 1: return False if len(store) == 0: return True if check(store[0], numbers) or check(store[1], numbers): return True else: return False
def compare(arr1, arr2): store = [] for i in range(len(arr1)): if arr1[i] != arr2[i]: store.append(i) return store def get_options(str): store = [] for i in range(len(str) - 1): store.append(int(str[i + 1:] + str[:i + 1])) return store def check(i, arr): curr = str(arr[i]) options = get_options(curr) if i == 0: for v in options: if v < arr[i + 1]: return True elif i == len(arr) - 1: for v in options: if v > arr[i - 1]: return True else: for v in options: if v > arr[i - 1] and v < arr[i + 1]: return True return False def make_increasing(numbers): b = sorted(numbers) store = compare(numbers, b) print(store) if len(store) > 2 and store[-1] - store[0] != len(store) - 1: return False if len(store) == 0: return True if check(store[0], numbers) or check(store[1], numbers): return True else: return False
def balanced_split_exists(arr): if len(arr) < 2 or sum(arr) % 2 != 0: return False arr.sort() # n log n l, r = 0, len(arr) - 1 left_sum, right_sum = arr[l], arr[r] while l <= r: if arr[l] >= arr[r]: return False if left_sum < right_sum: l += 1 left_sum += arr[l] elif right_sum < left_sum: r -= 1 right_sum += arr[r] else: l += 1 r -= 1 left_sum += arr[l] right_sum += arr[r] return True # Test cases: print(balanced_split_exists([5, 5]) == False) print(balanced_split_exists([1, 5, 7, 1]) == True) print(balanced_split_exists([12, 7, 6, 7, 6]) == False) print(balanced_split_exists([2, 1, 2, 5]) == True) print(balanced_split_exists([3, 6, 3, 4, 4]) == False) print(balanced_split_exists([]) == False) print(balanced_split_exists([0]) == False) print(balanced_split_exists([1, 5, 4]) == True) print(balanced_split_exists([5, 1, 6, 5, 5]) == False) print(balanced_split_exists([0, 1, 0, 2, 3, 4]) == False) print(balanced_split_exists([0, 1, 4, 4, 2, 3]) == False) print(balanced_split_exists([1, 1, 1, 1, 1, 2, 3]) == True) print(balanced_split_exists([1, 1, 1, 1, 1, 2, 3, 4]) == True) print(balanced_split_exists([1, 1, 1, 1, 1, 2, 2, 2, 3, 4, 4]) == True) print(balanced_split_exists([4, 4, 4, 4, 4, 4, 8, 8, 8]) == True) print(balanced_split_exists([5, 7, 20, 12, 5, 7, 6, 14, 5, 5, 6]) == True) print(balanced_split_exists([5, 7, 20, 12, 5, 7, 6, 7, 14, 5, 5, 6]) == False) print(balanced_split_exists([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]) == False)
def balanced_split_exists(arr): if len(arr) < 2 or sum(arr) % 2 != 0: return False arr.sort() (l, r) = (0, len(arr) - 1) (left_sum, right_sum) = (arr[l], arr[r]) while l <= r: if arr[l] >= arr[r]: return False if left_sum < right_sum: l += 1 left_sum += arr[l] elif right_sum < left_sum: r -= 1 right_sum += arr[r] else: l += 1 r -= 1 left_sum += arr[l] right_sum += arr[r] return True print(balanced_split_exists([5, 5]) == False) print(balanced_split_exists([1, 5, 7, 1]) == True) print(balanced_split_exists([12, 7, 6, 7, 6]) == False) print(balanced_split_exists([2, 1, 2, 5]) == True) print(balanced_split_exists([3, 6, 3, 4, 4]) == False) print(balanced_split_exists([]) == False) print(balanced_split_exists([0]) == False) print(balanced_split_exists([1, 5, 4]) == True) print(balanced_split_exists([5, 1, 6, 5, 5]) == False) print(balanced_split_exists([0, 1, 0, 2, 3, 4]) == False) print(balanced_split_exists([0, 1, 4, 4, 2, 3]) == False) print(balanced_split_exists([1, 1, 1, 1, 1, 2, 3]) == True) print(balanced_split_exists([1, 1, 1, 1, 1, 2, 3, 4]) == True) print(balanced_split_exists([1, 1, 1, 1, 1, 2, 2, 2, 3, 4, 4]) == True) print(balanced_split_exists([4, 4, 4, 4, 4, 4, 8, 8, 8]) == True) print(balanced_split_exists([5, 7, 20, 12, 5, 7, 6, 14, 5, 5, 6]) == True) print(balanced_split_exists([5, 7, 20, 12, 5, 7, 6, 7, 14, 5, 5, 6]) == False) print(balanced_split_exists([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]) == False)
def get_hours_since_midnight(total_seconds): hours = total_seconds // 3600 return hours total_seconds = int(input('Enter a number of seconds: ')) hours = get_hours_since_midnight(total_seconds) format(hours,'02d') def get_minutes(total_seconds): minutes = ( total_seconds % 3600) minutes //= 60 return minutes minutes = get_minutes(total_seconds) format(minutes, '02d') def get_seconds(total_seconds, minutes): seconds = (total_seconds % 3600) % 60 return seconds seconds = get_seconds(total_seconds, minutes) print('The time since midnight is %02d:%02d:%0d' %(hours,minutes,seconds))
def get_hours_since_midnight(total_seconds): hours = total_seconds // 3600 return hours total_seconds = int(input('Enter a number of seconds: ')) hours = get_hours_since_midnight(total_seconds) format(hours, '02d') def get_minutes(total_seconds): minutes = total_seconds % 3600 minutes //= 60 return minutes minutes = get_minutes(total_seconds) format(minutes, '02d') def get_seconds(total_seconds, minutes): seconds = total_seconds % 3600 % 60 return seconds seconds = get_seconds(total_seconds, minutes) print('The time since midnight is %02d:%02d:%0d' % (hours, minutes, seconds))
# Copyright (C) 2018-present ichenq@outlook.com. All rights reserved. # Distributed under the terms and conditions of the Apache License. # See accompanying files LICENSE. CSHARP_MANAGER_TEMPLATE = """ public class %s { public const char TABUGEN_CSV_SEP = '%s'; // CSV field delimiter public const char TABUGEN_CSV_QUOTE = '"'; // CSV field quote public const char TABUGEN_ARRAY_DELIM = '%s'; // array item delimiter public const char TABUGEN_MAP_DELIM1 = '%s'; // map item delimiter public const char TABUGEN_MAP_DELIM2 = '%s'; // map key-value delimiter // self-defined boolean value parse public static bool ParseBool(string text) { if (text == null || text.Length == 0) { return false; } return string.Equals(text, "1") || string.Equals(text, "Y") || string.Equals(text, "ON"); } """
csharp_manager_template = '\npublic class %s \n{ \n public const char TABUGEN_CSV_SEP = \'%s\'; // CSV field delimiter\n public const char TABUGEN_CSV_QUOTE = \'"\'; // CSV field quote\n public const char TABUGEN_ARRAY_DELIM = \'%s\'; // array item delimiter\n public const char TABUGEN_MAP_DELIM1 = \'%s\'; // map item delimiter\n public const char TABUGEN_MAP_DELIM2 = \'%s\'; // map key-value delimiter\n \n // self-defined boolean value parse\n public static bool ParseBool(string text)\n {\n if (text == null || text.Length == 0) {\n return false;\n }\n return string.Equals(text, "1") ||\n string.Equals(text, "Y") || \n string.Equals(text, "ON");\n }\n'
brd = { 'name': ('Raspberry Pi B+/2'), 'port': { 'rpigpio': { 'gpio' : { # BCM/Functional RPi pin names. 'bcm2_sda' : '3', 'bcm3_scl' : '5', 'bcm4_gpclk0': '7', 'bcm17' : '11', 'bcm27_pcm_d': '13', 'bcm22' : '15', 'bcm10_mosi' : '19', 'bcm9_miso' : '21', 'bcm11_sclk' : '23', 'bcm5' : '29', 'bcm6' : '31', 'bcm13' : '33', 'bcm19_miso' : '35', 'bcm26' : '37', 'bcm21_sclk' : '40', 'bcm20_mosi' : '38', 'bcm16' : '36', 'bcm12' : '32', 'bcm7_ce1' : '26', 'bcm8_ce0' : '24', 'bcm25' : '22', 'bcm24' : '18', 'bcm23' : '16', 'bcm18_pcm_c': '12', 'bcm15_rxd' : '10', 'bcm14_txd' : '8', # Functional RPi pin names. # 'bcm2' : '3', # 'bcm3' : '5', # 'bcm4' : '7', # 'bcm5' : '29', # 'bcm6' : '31', # 'bcm7' : '26', # 'bcm8' : '24', # 'bcm9' : '21', # 'bcm10': '19', # 'bcm11': '23', # 'bcm12': '32', # 'bcm13': '33', # 'bcm14': '8', # 'bcm15': '10', # 'bcm16': '36', # 'bcm17': '11', # 'bcm18': '12', # 'bcm19': '35', # 'bcm20': '38', # 'bcm21': '40', # 'bcm22': '15', # 'bcm23': '16', # 'bcm24': '18', # 'bcm25': '22', # 'bcm26': '37', # 'bcm27': '13', # Positional RPi pin names. # 'gpio03': '3', # 'gpio05': '5', # 'gpio07': '7', # 'gpio29': '29', # 'gpio31': '31', # 'gpio26': '26', # 'gpio24': '24', # 'gpio21': '21', # 'gpio19': '19', # 'gpio23': '23', # 'gpio32': '32', # 'gpio33': '33', # 'gpio08': '8', # 'gpio10': '10', # 'gpio36': '36', # 'gpio11': '11', # 'gpio12': '12', # 'gpio35': '35', # 'gpio38': '38', # 'gpio40': '40', # 'gpio15': '15', # 'gpio16': '16', # 'gpio18': '18', # 'gpio22': '22', # 'gpio37': '37', # 'gpio13': '13', } } } }
brd = {'name': 'Raspberry Pi B+/2', 'port': {'rpigpio': {'gpio': {'bcm2_sda': '3', 'bcm3_scl': '5', 'bcm4_gpclk0': '7', 'bcm17': '11', 'bcm27_pcm_d': '13', 'bcm22': '15', 'bcm10_mosi': '19', 'bcm9_miso': '21', 'bcm11_sclk': '23', 'bcm5': '29', 'bcm6': '31', 'bcm13': '33', 'bcm19_miso': '35', 'bcm26': '37', 'bcm21_sclk': '40', 'bcm20_mosi': '38', 'bcm16': '36', 'bcm12': '32', 'bcm7_ce1': '26', 'bcm8_ce0': '24', 'bcm25': '22', 'bcm24': '18', 'bcm23': '16', 'bcm18_pcm_c': '12', 'bcm15_rxd': '10', 'bcm14_txd': '8'}}}}
n = int(input()) X = list(map(int, input().split())) X.sort() def median(n, X): if n % 2 == 0: numerator = X[int(n / 2)] + X[int(n / 2 - 1)] median_value = numerator / 2 else: median_value = X[int(n / 2)] return int(median_value) print(median(int(n / 2), X[: int(n / 2)])) print(median(n, X)) if n % 2 == 0: print(median(int(n / 2), X[int(n / 2) :])) else: print(median(int(n / 2), X[int(n / 2) + 1 :]))
n = int(input()) x = list(map(int, input().split())) X.sort() def median(n, X): if n % 2 == 0: numerator = X[int(n / 2)] + X[int(n / 2 - 1)] median_value = numerator / 2 else: median_value = X[int(n / 2)] return int(median_value) print(median(int(n / 2), X[:int(n / 2)])) print(median(n, X)) if n % 2 == 0: print(median(int(n / 2), X[int(n / 2):])) else: print(median(int(n / 2), X[int(n / 2) + 1:]))
{ 'includes': [ '../common.gyp' ], 'targets': [ { 'target_name': 'libzxing', 'type': 'static_library', 'include_dirs': [ 'core/src', ], 'sources': [ 'core/src/bigint/BigInteger.cc', 'core/src/bigint/BigIntegerAlgorithms.cc', 'core/src/bigint/BigIntegerUtils.cc', 'core/src/bigint/BigUnsigned.cc', 'core/src/bigint/BigUnsignedInABase.cc', 'core/src/zxing/BarcodeFormat.cpp', 'core/src/zxing/Binarizer.cpp', 'core/src/zxing/BinaryBitmap.cpp', 'core/src/zxing/ChecksumException.cpp', 'core/src/zxing/DecodeHints.cpp', 'core/src/zxing/Exception.cpp', 'core/src/zxing/FormatException.cpp', 'core/src/zxing/InvertedLuminanceSource.cpp', 'core/src/zxing/LuminanceSource.cpp', 'core/src/zxing/MultiFormatReader.cpp', 'core/src/zxing/Reader.cpp', 'core/src/zxing/Result.cpp', 'core/src/zxing/ResultIO.cpp', 'core/src/zxing/ResultPoint.cpp', 'core/src/zxing/ResultPointCallback.cpp', 'core/src/zxing/aztec/AztecDetectorResult.cpp', 'core/src/zxing/aztec/AztecReader.cpp', 'core/src/zxing/aztec/decoder/1Decoder.cpp', 'core/src/zxing/aztec/detector/1Detector.cpp', 'core/src/zxing/common/BitArray.cpp', 'core/src/zxing/common/BitArrayIO.cpp', 'core/src/zxing/common/BitMatrix.cpp', 'core/src/zxing/common/BitSource.cpp', 'core/src/zxing/common/CharacterSetECI.cpp', 'core/src/zxing/common/DecoderResult.cpp', 'core/src/zxing/common/DetectorResult.cpp', 'core/src/zxing/common/GlobalHistogramBinarizer.cpp', 'core/src/zxing/common/GreyscaleLuminanceSource.cpp', 'core/src/zxing/common/GreyscaleRotatedLuminanceSource.cpp', 'core/src/zxing/common/GridSampler.cpp', 'core/src/zxing/common/HybridBinarizer.cpp', 'core/src/zxing/common/IllegalArgumentException.cpp', 'core/src/zxing/common/PerspectiveTransform.cpp', 'core/src/zxing/common/Str.cpp', 'core/src/zxing/common/StringUtils.cpp', 'core/src/zxing/common/detector/MonochromeRectangleDetector.cpp', 'core/src/zxing/common/detector/WhiteRectangleDetector.cpp', 'core/src/zxing/common/reedsolomon/GenericGF.cpp', 'core/src/zxing/common/reedsolomon/GenericGFPoly.cpp', 'core/src/zxing/common/reedsolomon/ReedSolomonDecoder.cpp', 'core/src/zxing/common/reedsolomon/ReedSolomonException.cpp', 'core/src/zxing/datamatrix/1Version.cpp', 'core/src/zxing/datamatrix/DataMatrixReader.cpp', 'core/src/zxing/datamatrix/decoder/1BitMatrixParser.cpp', 'core/src/zxing/datamatrix/decoder/1DataBlock.cpp', 'core/src/zxing/datamatrix/decoder/1DecodedBitStreamParser.cpp', 'core/src/zxing/datamatrix/decoder/2Decoder.cpp', 'core/src/zxing/datamatrix/detector/2Detector.cpp', 'core/src/zxing/datamatrix/detector/CornerPoint.cpp', 'core/src/zxing/datamatrix/detector/DetectorException.cpp', 'core/src/zxing/multi/ByQuadrantReader.cpp', 'core/src/zxing/multi/GenericMultipleBarcodeReader.cpp', 'core/src/zxing/multi/MultipleBarcodeReader.cpp', 'core/src/zxing/multi/qrcode/QRCodeMultiReader.cpp', 'core/src/zxing/multi/qrcode/detector/MultiDetector.cpp', 'core/src/zxing/multi/qrcode/detector/MultiFinderPatternFinder.cpp', 'core/src/zxing/oned/CodaBarReader.cpp', 'core/src/zxing/oned/Code128Reader.cpp', 'core/src/zxing/oned/Code39Reader.cpp', 'core/src/zxing/oned/Code93Reader.cpp', 'core/src/zxing/oned/EAN13Reader.cpp', 'core/src/zxing/oned/EAN8Reader.cpp', 'core/src/zxing/oned/ITFReader.cpp', 'core/src/zxing/oned/MultiFormatOneDReader.cpp', 'core/src/zxing/oned/MultiFormatUPCEANReader.cpp', 'core/src/zxing/oned/OneDReader.cpp', 'core/src/zxing/oned/OneDResultPoint.cpp', 'core/src/zxing/oned/UPCAReader.cpp', 'core/src/zxing/oned/UPCEANReader.cpp', 'core/src/zxing/oned/UPCEReader.cpp', 'core/src/zxing/pdf417/PDF417Reader.cpp', 'core/src/zxing/pdf417/decoder/2BitMatrixParser.cpp', 'core/src/zxing/pdf417/decoder/2DecodedBitStreamParser.cpp', 'core/src/zxing/pdf417/decoder/3Decoder.cpp', 'core/src/zxing/pdf417/decoder/ec/ErrorCorrection.cpp', 'core/src/zxing/pdf417/decoder/ec/ModulusGF.cpp', 'core/src/zxing/pdf417/decoder/ec/ModulusPoly.cpp', 'core/src/zxing/pdf417/detector/3Detector.cpp', 'core/src/zxing/pdf417/detector/LinesSampler.cpp', 'core/src/zxing/qrcode/2Version.cpp', 'core/src/zxing/qrcode/ErrorCorrectionLevel.cpp', 'core/src/zxing/qrcode/FormatInformation.cpp', 'core/src/zxing/qrcode/QRCodeReader.cpp', 'core/src/zxing/qrcode/decoder/2DataBlock.cpp', 'core/src/zxing/qrcode/decoder/3BitMatrixParser.cpp', 'core/src/zxing/qrcode/decoder/3DecodedBitStreamParser.cpp', 'core/src/zxing/qrcode/decoder/4Decoder.cpp', 'core/src/zxing/qrcode/decoder/DataMask.cpp', 'core/src/zxing/qrcode/decoder/Mode.cpp', 'core/src/zxing/qrcode/detector/4Detector.cpp', 'core/src/zxing/qrcode/detector/AlignmentPattern.cpp', 'core/src/zxing/qrcode/detector/AlignmentPatternFinder.cpp', 'core/src/zxing/qrcode/detector/FinderPattern.cpp', 'core/src/zxing/qrcode/detector/FinderPatternFinder.cpp', 'core/src/zxing/qrcode/detector/FinderPatternInfo.cpp', ], 'conditions': [ ['OS=="win"', { 'include_dirs': [ 'core/src/win32/zxing/', ], 'sources': [ 'core/src/win32/zxing/win_iconv.c', ], } ], ], }, ] }
{'includes': ['../common.gyp'], 'targets': [{'target_name': 'libzxing', 'type': 'static_library', 'include_dirs': ['core/src'], 'sources': ['core/src/bigint/BigInteger.cc', 'core/src/bigint/BigIntegerAlgorithms.cc', 'core/src/bigint/BigIntegerUtils.cc', 'core/src/bigint/BigUnsigned.cc', 'core/src/bigint/BigUnsignedInABase.cc', 'core/src/zxing/BarcodeFormat.cpp', 'core/src/zxing/Binarizer.cpp', 'core/src/zxing/BinaryBitmap.cpp', 'core/src/zxing/ChecksumException.cpp', 'core/src/zxing/DecodeHints.cpp', 'core/src/zxing/Exception.cpp', 'core/src/zxing/FormatException.cpp', 'core/src/zxing/InvertedLuminanceSource.cpp', 'core/src/zxing/LuminanceSource.cpp', 'core/src/zxing/MultiFormatReader.cpp', 'core/src/zxing/Reader.cpp', 'core/src/zxing/Result.cpp', 'core/src/zxing/ResultIO.cpp', 'core/src/zxing/ResultPoint.cpp', 'core/src/zxing/ResultPointCallback.cpp', 'core/src/zxing/aztec/AztecDetectorResult.cpp', 'core/src/zxing/aztec/AztecReader.cpp', 'core/src/zxing/aztec/decoder/1Decoder.cpp', 'core/src/zxing/aztec/detector/1Detector.cpp', 'core/src/zxing/common/BitArray.cpp', 'core/src/zxing/common/BitArrayIO.cpp', 'core/src/zxing/common/BitMatrix.cpp', 'core/src/zxing/common/BitSource.cpp', 'core/src/zxing/common/CharacterSetECI.cpp', 'core/src/zxing/common/DecoderResult.cpp', 'core/src/zxing/common/DetectorResult.cpp', 'core/src/zxing/common/GlobalHistogramBinarizer.cpp', 'core/src/zxing/common/GreyscaleLuminanceSource.cpp', 'core/src/zxing/common/GreyscaleRotatedLuminanceSource.cpp', 'core/src/zxing/common/GridSampler.cpp', 'core/src/zxing/common/HybridBinarizer.cpp', 'core/src/zxing/common/IllegalArgumentException.cpp', 'core/src/zxing/common/PerspectiveTransform.cpp', 'core/src/zxing/common/Str.cpp', 'core/src/zxing/common/StringUtils.cpp', 'core/src/zxing/common/detector/MonochromeRectangleDetector.cpp', 'core/src/zxing/common/detector/WhiteRectangleDetector.cpp', 'core/src/zxing/common/reedsolomon/GenericGF.cpp', 'core/src/zxing/common/reedsolomon/GenericGFPoly.cpp', 'core/src/zxing/common/reedsolomon/ReedSolomonDecoder.cpp', 'core/src/zxing/common/reedsolomon/ReedSolomonException.cpp', 'core/src/zxing/datamatrix/1Version.cpp', 'core/src/zxing/datamatrix/DataMatrixReader.cpp', 'core/src/zxing/datamatrix/decoder/1BitMatrixParser.cpp', 'core/src/zxing/datamatrix/decoder/1DataBlock.cpp', 'core/src/zxing/datamatrix/decoder/1DecodedBitStreamParser.cpp', 'core/src/zxing/datamatrix/decoder/2Decoder.cpp', 'core/src/zxing/datamatrix/detector/2Detector.cpp', 'core/src/zxing/datamatrix/detector/CornerPoint.cpp', 'core/src/zxing/datamatrix/detector/DetectorException.cpp', 'core/src/zxing/multi/ByQuadrantReader.cpp', 'core/src/zxing/multi/GenericMultipleBarcodeReader.cpp', 'core/src/zxing/multi/MultipleBarcodeReader.cpp', 'core/src/zxing/multi/qrcode/QRCodeMultiReader.cpp', 'core/src/zxing/multi/qrcode/detector/MultiDetector.cpp', 'core/src/zxing/multi/qrcode/detector/MultiFinderPatternFinder.cpp', 'core/src/zxing/oned/CodaBarReader.cpp', 'core/src/zxing/oned/Code128Reader.cpp', 'core/src/zxing/oned/Code39Reader.cpp', 'core/src/zxing/oned/Code93Reader.cpp', 'core/src/zxing/oned/EAN13Reader.cpp', 'core/src/zxing/oned/EAN8Reader.cpp', 'core/src/zxing/oned/ITFReader.cpp', 'core/src/zxing/oned/MultiFormatOneDReader.cpp', 'core/src/zxing/oned/MultiFormatUPCEANReader.cpp', 'core/src/zxing/oned/OneDReader.cpp', 'core/src/zxing/oned/OneDResultPoint.cpp', 'core/src/zxing/oned/UPCAReader.cpp', 'core/src/zxing/oned/UPCEANReader.cpp', 'core/src/zxing/oned/UPCEReader.cpp', 'core/src/zxing/pdf417/PDF417Reader.cpp', 'core/src/zxing/pdf417/decoder/2BitMatrixParser.cpp', 'core/src/zxing/pdf417/decoder/2DecodedBitStreamParser.cpp', 'core/src/zxing/pdf417/decoder/3Decoder.cpp', 'core/src/zxing/pdf417/decoder/ec/ErrorCorrection.cpp', 'core/src/zxing/pdf417/decoder/ec/ModulusGF.cpp', 'core/src/zxing/pdf417/decoder/ec/ModulusPoly.cpp', 'core/src/zxing/pdf417/detector/3Detector.cpp', 'core/src/zxing/pdf417/detector/LinesSampler.cpp', 'core/src/zxing/qrcode/2Version.cpp', 'core/src/zxing/qrcode/ErrorCorrectionLevel.cpp', 'core/src/zxing/qrcode/FormatInformation.cpp', 'core/src/zxing/qrcode/QRCodeReader.cpp', 'core/src/zxing/qrcode/decoder/2DataBlock.cpp', 'core/src/zxing/qrcode/decoder/3BitMatrixParser.cpp', 'core/src/zxing/qrcode/decoder/3DecodedBitStreamParser.cpp', 'core/src/zxing/qrcode/decoder/4Decoder.cpp', 'core/src/zxing/qrcode/decoder/DataMask.cpp', 'core/src/zxing/qrcode/decoder/Mode.cpp', 'core/src/zxing/qrcode/detector/4Detector.cpp', 'core/src/zxing/qrcode/detector/AlignmentPattern.cpp', 'core/src/zxing/qrcode/detector/AlignmentPatternFinder.cpp', 'core/src/zxing/qrcode/detector/FinderPattern.cpp', 'core/src/zxing/qrcode/detector/FinderPatternFinder.cpp', 'core/src/zxing/qrcode/detector/FinderPatternInfo.cpp'], 'conditions': [['OS=="win"', {'include_dirs': ['core/src/win32/zxing/'], 'sources': ['core/src/win32/zxing/win_iconv.c']}]]}]}
# -*- coding: utf-8 -*- """ Created on Tue Feb 1 10:11:25 2022 @author: User """ # contadore de 1 en 1 cont = 0 cont +=1 #--> cont= cont + 1 # 1.- contar numero del 1 al 10 y mostrar en la pantalla while cont < 10: cont = cont + 1 print(cont) # 2.- Sumar los numeros del 1 al 10 # list = {1,2,3,.....10} # range (1,n) --> [1,2,3,....,(n-1)] sum=0 for num in range(1,11): # [1,2,3....] sum = sum + num print (f'La suma total del 1 al 10 es: {sum}') # 3.- Multiplicador mult = 1 for num in range(1,11): # [1,2,3...10] mult=mult*num print (f'la multiplicacion es: {mult}') # 4.- mostrar los pares del 1 al 10 for num in range (1,11): if num % 2 == 0: print (" numero pares:", num) # 5.- mostrar los numeros impares print() for num in range (1,11): if num % 2 == 1: print (" los numeros impares son:", num) ## Ejercicio de clase 01 print() print("Ejercicio de clase # 1") print() print("Contar la cantitatd de elementos de:[1,3,4,66,55,4]") list = [1,3,4,66,55,4] cont = 0 for num in list: cont = cont + 1 print(f'- La cantidad de digitos es: {cont}') print() print("lista de los numeros impares y pares es:[1,3,4,66,55,4]") list = [1,3,4,66,55,4] for num in list: if num % 2 == 1: print ("- Los numeros impares son:", num) print() for num in list: if num % 2 == 0: print ("- Los numeros pares son:", num) # Ejercicio de clase 02 print() print("Ejercicio de clase # 2") print() print("Mostrar la suma y la multiplicacion de los numeros mostrados es:[1,3,4,66,55,4]") list = [1,3,4,66,55,4] sum = 0 for num in list: sum = sum + num print ("- La suma es =", sum) mult = 1 for num in list: mult = mult*num print("- La multiplicacion es =", mult)
""" Created on Tue Feb 1 10:11:25 2022 @author: User """ cont = 0 cont += 1 while cont < 10: cont = cont + 1 print(cont) sum = 0 for num in range(1, 11): sum = sum + num print(f'La suma total del 1 al 10 es: {sum}') mult = 1 for num in range(1, 11): mult = mult * num print(f'la multiplicacion es: {mult}') for num in range(1, 11): if num % 2 == 0: print(' numero pares:', num) print() for num in range(1, 11): if num % 2 == 1: print(' los numeros impares son:', num) print() print('Ejercicio de clase # 1') print() print('Contar la cantitatd de elementos de:[1,3,4,66,55,4]') list = [1, 3, 4, 66, 55, 4] cont = 0 for num in list: cont = cont + 1 print(f'- La cantidad de digitos es: {cont}') print() print('lista de los numeros impares y pares es:[1,3,4,66,55,4]') list = [1, 3, 4, 66, 55, 4] for num in list: if num % 2 == 1: print('- Los numeros impares son:', num) print() for num in list: if num % 2 == 0: print('- Los numeros pares son:', num) print() print('Ejercicio de clase # 2') print() print('Mostrar la suma y la multiplicacion de los numeros mostrados es:[1,3,4,66,55,4]') list = [1, 3, 4, 66, 55, 4] sum = 0 for num in list: sum = sum + num print('- La suma es =', sum) mult = 1 for num in list: mult = mult * num print('- La multiplicacion es =', mult)
# -------------- # Code starts here class_1 = ['Geoffrey Hinton', 'Andrew Ng', 'Sebastian Raschka', 'Yoshua Bengio'] class_2 = ['Hilary Mason','Carla Gentry','Corinna Cortes'] new_class = (class_1 + class_2) print (new_class) new_class.append('Peter Warden') print(new_class) new_class.remove('Carla Gentry') print(new_class) # Code ends here # -------------- # Code starts here courses = {'Math': 65, 'English': 70, 'History': 80,'French': 70, 'Science': 60 } print(courses.values()) total = (sum(courses.values())) print(total) percentage = ((total/500) * 100) print(percentage) # Code ends here # -------------- # Code starts here mathematics = {'Geoffrey Hinton': 78, 'Andrew Ng': 95,'Sebastian Raschka': 65,'Yoshua Benjio': 50,'Hilary Mason': 70,'Corinna Cortes': 66, 'Peter Warden': 75} topper = max(mathematics,key = mathematics.get) print (topper) # Code ends here # -------------- # Given string topper = 'andrew ng' # Code starts here first_name = (topper.split()[0]) last_name = (topper.split()[1]) full_name = last_name + " " + first_name certificate_name = (full_name.upper()) print (certificate_name) # Code ends here
class_1 = ['Geoffrey Hinton', 'Andrew Ng', 'Sebastian Raschka', 'Yoshua Bengio'] class_2 = ['Hilary Mason', 'Carla Gentry', 'Corinna Cortes'] new_class = class_1 + class_2 print(new_class) new_class.append('Peter Warden') print(new_class) new_class.remove('Carla Gentry') print(new_class) courses = {'Math': 65, 'English': 70, 'History': 80, 'French': 70, 'Science': 60} print(courses.values()) total = sum(courses.values()) print(total) percentage = total / 500 * 100 print(percentage) mathematics = {'Geoffrey Hinton': 78, 'Andrew Ng': 95, 'Sebastian Raschka': 65, 'Yoshua Benjio': 50, 'Hilary Mason': 70, 'Corinna Cortes': 66, 'Peter Warden': 75} topper = max(mathematics, key=mathematics.get) print(topper) topper = 'andrew ng' first_name = topper.split()[0] last_name = topper.split()[1] full_name = last_name + ' ' + first_name certificate_name = full_name.upper() print(certificate_name)
def increment(x, by=1): return x + by def tokenize(sentence): return pos_tag(word_tokenize(sentence)) def synsets(sentence): tokens = tokenize(sentence) return remove_none(tagged_to_synset(*t) for t in tokens) def remove_none(xs): return [x for x in xs if x is not None] def score(synset, synsets): return max(synset.path_similarity(s) for s in synsets) def sentence_similarity(sentence1, sentence2): synsets1 = synsets(sentence1) synsets2 = synsets(sentence2) scores = remove_none(score(s, synsets2) for s in synsets1) return float(sum(scores)) / len(scores)
def increment(x, by=1): return x + by def tokenize(sentence): return pos_tag(word_tokenize(sentence)) def synsets(sentence): tokens = tokenize(sentence) return remove_none((tagged_to_synset(*t) for t in tokens)) def remove_none(xs): return [x for x in xs if x is not None] def score(synset, synsets): return max((synset.path_similarity(s) for s in synsets)) def sentence_similarity(sentence1, sentence2): synsets1 = synsets(sentence1) synsets2 = synsets(sentence2) scores = remove_none((score(s, synsets2) for s in synsets1)) return float(sum(scores)) / len(scores)
class Solution(object): def bruteforce(self, strs): """ :type strs: List[str] :rtype: str """ result = '' if len(strs) == 0: return '' i = 0 d = {i: len(v) for i,v in enumerate(strs)} count = min(d.values()) for i in range(1, count+1): prefix = strs[0][:i] for s in strs: if s[:i] != prefix: return result result = prefix return result def optimized(self, strs): result = "" for n in zip(*strs): if (len(set(n))) == 1: result = result + n[0] else: return result return result s = Solution() input_1 = ["flower","flow","flight"] input_2 = ["dog","racecar","car"] print(f'Input 1: {input_1}') print(f'Bruteforce Solution: \n{s.bruteforce(input_1)}') print(f'Optimized Solution: \n{s.bruteforce(input_1)}') print(f'\nInput 2: {input_2}') print(f'Bruteforce Solution: \n{s.bruteforce(input_2)}') print(f'Optimized Solution: \n{s.bruteforce(input_2)}')
class Solution(object): def bruteforce(self, strs): """ :type strs: List[str] :rtype: str """ result = '' if len(strs) == 0: return '' i = 0 d = {i: len(v) for (i, v) in enumerate(strs)} count = min(d.values()) for i in range(1, count + 1): prefix = strs[0][:i] for s in strs: if s[:i] != prefix: return result result = prefix return result def optimized(self, strs): result = '' for n in zip(*strs): if len(set(n)) == 1: result = result + n[0] else: return result return result s = solution() input_1 = ['flower', 'flow', 'flight'] input_2 = ['dog', 'racecar', 'car'] print(f'Input 1: {input_1}') print(f'Bruteforce Solution: \n{s.bruteforce(input_1)}') print(f'Optimized Solution: \n{s.bruteforce(input_1)}') print(f'\nInput 2: {input_2}') print(f'Bruteforce Solution: \n{s.bruteforce(input_2)}') print(f'Optimized Solution: \n{s.bruteforce(input_2)}')
s1 = input() s2 = input() pairs = set() for i in range(len(s2) - 1): pairs.add(s2[i:i + 2]) ans = 0 for i in range(len(s1) - 1): if s1[i: i + 2] in pairs: ans += 1 print(ans)
s1 = input() s2 = input() pairs = set() for i in range(len(s2) - 1): pairs.add(s2[i:i + 2]) ans = 0 for i in range(len(s1) - 1): if s1[i:i + 2] in pairs: ans += 1 print(ans)
# makes a BST from an array class Node: def __init__(self, value, left, right): self.value = value self.left = left self.right = right def bst_from_array(array, start, end): if start >= end: return None mid = (start + end) // 2 value = array[mid] left = bst_from_array(array, start, mid) right = bst_from_array(array, mid + 1, end) return Node(value, left, right) def print_bst(start): queue = [(start, 0)] print('PRINTING') while queue: node, depth = queue.pop() print(' ' * depth + str(node.value)) if left := node.left: queue.append((left, depth + 1)) if right := node.right: queue.append((right, depth + 1)) def make_bst_and_print(num: int): bst = bst_from_array(list(range(num)), 0, num) print_bst(bst)
class Node: def __init__(self, value, left, right): self.value = value self.left = left self.right = right def bst_from_array(array, start, end): if start >= end: return None mid = (start + end) // 2 value = array[mid] left = bst_from_array(array, start, mid) right = bst_from_array(array, mid + 1, end) return node(value, left, right) def print_bst(start): queue = [(start, 0)] print('PRINTING') while queue: (node, depth) = queue.pop() print(' ' * depth + str(node.value)) if (left := node.left): queue.append((left, depth + 1)) if (right := node.right): queue.append((right, depth + 1)) def make_bst_and_print(num: int): bst = bst_from_array(list(range(num)), 0, num) print_bst(bst)
# Author: Tianyao (Till) Chen # Email: tillchen417@gmail.com # File: settings.py class Settings: """The class to store all the settings for Alien Invasion.""" def __init__(self): """Initialize the static settings.""" # Screen settings. self.screen_width = 1200 self.screen_height = 800 self.bg_color = (230, 230, 230) # Ship settings self.ship_limit = 3 # Bullet settings. self.bullet_width = 3 self.bullet_height = 15 self.bullet_color = (60, 60, 60) self.bullets_allowed = 3 # Alien settings. self.fleet_drop_speed = 15 # Speedup scale self.speedup_scale = 1.5 self.init_dynamic_settings() def init_dynamic_settings(self): """Initialize the settings that might change during the game.""" self.ship_speed = 6 self.bullet_speed = 10.0 self.alien_speed = 3.0 self.fleet_direction = 1 # 1 means right, -1 means left # Scoring self.alien_points = 50 def speed_up(self): """Speed up the game.""" self.ship_speed *= self.speedup_scale self.alien_speed *= self.speedup_scale self.bullet_speed *= self.speedup_scale self.alien_points *= self.speedup_scale
class Settings: """The class to store all the settings for Alien Invasion.""" def __init__(self): """Initialize the static settings.""" self.screen_width = 1200 self.screen_height = 800 self.bg_color = (230, 230, 230) self.ship_limit = 3 self.bullet_width = 3 self.bullet_height = 15 self.bullet_color = (60, 60, 60) self.bullets_allowed = 3 self.fleet_drop_speed = 15 self.speedup_scale = 1.5 self.init_dynamic_settings() def init_dynamic_settings(self): """Initialize the settings that might change during the game.""" self.ship_speed = 6 self.bullet_speed = 10.0 self.alien_speed = 3.0 self.fleet_direction = 1 self.alien_points = 50 def speed_up(self): """Speed up the game.""" self.ship_speed *= self.speedup_scale self.alien_speed *= self.speedup_scale self.bullet_speed *= self.speedup_scale self.alien_points *= self.speedup_scale
''' Created on 24.05.2011 @author: Sergey Khayrulin ''' class Position(object): ''' Definition for position of point ''' def __init__(self,proximal_point=None,distal_point=None): ''' Constructor ''' self.distal_point = distal_point self.proximal_point = proximal_point #if null than = distal point of parent
""" Created on 24.05.2011 @author: Sergey Khayrulin """ class Position(object): """ Definition for position of point """ def __init__(self, proximal_point=None, distal_point=None): """ Constructor """ self.distal_point = distal_point self.proximal_point = proximal_point
sum_counter = 0 # 600851475143 number = 600851475143 for i in range(2, 10000): for j in range(1, i + 1): if i % j == 0: sum_counter += 1 if sum_counter == 2: # print(i, end= " ") if number % i == 0: print() print(f"prime factors for {number} : {i}", end=" ") print() sum_counter = 0 print()
sum_counter = 0 number = 600851475143 for i in range(2, 10000): for j in range(1, i + 1): if i % j == 0: sum_counter += 1 if sum_counter == 2: if number % i == 0: print() print(f'prime factors for {number} : {i}', end=' ') print() sum_counter = 0 print()
class MemoryItem: def __init__(self, observation, action, next_observation, reward, done, info): self.observation = observation self.action = action self.next_observation = next_observation self.reward = reward self.done = done self.info = info
class Memoryitem: def __init__(self, observation, action, next_observation, reward, done, info): self.observation = observation self.action = action self.next_observation = next_observation self.reward = reward self.done = done self.info = info
class Queen: def __init__(self, pos, visited): self.pos = pos self.visited = visited
class Queen: def __init__(self, pos, visited): self.pos = pos self.visited = visited
VALID_USER = { "username": "Test", "email": "test@email.com", "password": "Test@123", "password2": "Test@123", }
valid_user = {'username': 'Test', 'email': 'test@email.com', 'password': 'Test@123', 'password2': 'Test@123'}
class Solution: def minJumps(self, arr, n): jumps = [-1] * n jumps[0] = 0 front = 0 rear = 0 while rear < n : remaining_jumps = arr[rear] - (front - rear) while remaining_jumps > 0 and front < n-1: front += 1 j = jumps[rear] jumps[front] = j + 1 remaining_jumps -= 1 rear += 1 if rear > front : break return jumps[n-1] if __name__ == '__main__': T=int(input()) for i in range(T): n = int(input()) Arr = [int(x) for x in input().split()] ob = Solution() ans = ob.minJumps(Arr,n) print(ans)
class Solution: def min_jumps(self, arr, n): jumps = [-1] * n jumps[0] = 0 front = 0 rear = 0 while rear < n: remaining_jumps = arr[rear] - (front - rear) while remaining_jumps > 0 and front < n - 1: front += 1 j = jumps[rear] jumps[front] = j + 1 remaining_jumps -= 1 rear += 1 if rear > front: break return jumps[n - 1] if __name__ == '__main__': t = int(input()) for i in range(T): n = int(input()) arr = [int(x) for x in input().split()] ob = solution() ans = ob.minJumps(Arr, n) print(ans)
# # Copyright (c) 2013-2014, PagerDuty, Inc. <info@pagerduty.com> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # class MockQueue: def __init__( self, event=None, status=None, detailed_snapshot=None, cleanup_age_secs=None ): self.event = event self.status = status self.expected_detailed_snapshot = detailed_snapshot self.expected_cleanup_age = cleanup_age_secs self.consume_code = None self.cleaned_up = False def get_stats(self, detailed_snapshot=False): if detailed_snapshot == self.expected_detailed_snapshot: return self.status raise Exception( "Received detailed_snapshot=%s; expected detailed_snapshot=%s" % (detailed_snapshot, self.expected_detailed_snapshot) ) def flush(self, consume_func, stop_check_func): self.consume_code = consume_func(self.event, self.event) def cleanup(self, before): if before == self.expected_cleanup_age: self.cleaned_up = True else: raise Exception( "Received cleanup_before=%s, expected=%s" % (before, self.expected_cleanup_age) )
class Mockqueue: def __init__(self, event=None, status=None, detailed_snapshot=None, cleanup_age_secs=None): self.event = event self.status = status self.expected_detailed_snapshot = detailed_snapshot self.expected_cleanup_age = cleanup_age_secs self.consume_code = None self.cleaned_up = False def get_stats(self, detailed_snapshot=False): if detailed_snapshot == self.expected_detailed_snapshot: return self.status raise exception('Received detailed_snapshot=%s; expected detailed_snapshot=%s' % (detailed_snapshot, self.expected_detailed_snapshot)) def flush(self, consume_func, stop_check_func): self.consume_code = consume_func(self.event, self.event) def cleanup(self, before): if before == self.expected_cleanup_age: self.cleaned_up = True else: raise exception('Received cleanup_before=%s, expected=%s' % (before, self.expected_cleanup_age))
# 3. File Writer # Create a program that creates a file called my_first_file.txt. # In that file write a single line with the content: 'I just created my first file!' # file = open("my_first_file.txt", "w") # # file.write('I just created my first file!') # # file.close() # With manager is better than just open as it automatically closes file with open("my_first_file.txt", "w") as file: file.write('I just created my first file!')
with open('my_first_file.txt', 'w') as file: file.write('I just created my first file!')
# 4-5. Summing a Million: Make a list of the numbers from one to one million, and then use min() and max() # to make sure your list actually starts at one and ends at one million. # Also, use the sum() function to see how quickly Python can add a million numbers. numbers = list(range(1, 1000001)) print(min(numbers)) print(max(numbers)) print(sum(numbers))
numbers = list(range(1, 1000001)) print(min(numbers)) print(max(numbers)) print(sum(numbers))
def print_formatted(number): # your code goes here if number in range (1,100): l = len(bin(number)[2:]) for i in range (1,n+1): decimal = str(i).rjust(l) octal = str(oct(i)[2:]).rjust(l) hexadecimal = hex(i)[2:].rjust(l) binary = bin(i)[2:].rjust(l) print (decimal + ' ' + octal + ' ' + hexadecimal.upper() + ' ' + binary) else: print ("Input not valid. Retry") if __name__ == '__main__': n = int(input()) print_formatted(n)
def print_formatted(number): if number in range(1, 100): l = len(bin(number)[2:]) for i in range(1, n + 1): decimal = str(i).rjust(l) octal = str(oct(i)[2:]).rjust(l) hexadecimal = hex(i)[2:].rjust(l) binary = bin(i)[2:].rjust(l) print(decimal + ' ' + octal + ' ' + hexadecimal.upper() + ' ' + binary) else: print('Input not valid. Retry') if __name__ == '__main__': n = int(input()) print_formatted(n)
def get_city_year(p0, perc, delta, p): years = 1 curr = p0 + p0 * (0.01 * perc) + delta if curr <= p0: # remember == would imply stagnation return -1 else: while curr < p: curr = curr + curr * (0.01 * perc) + delta years += 1 return years print(get_city_year(1000, 2, -50, 5000)) print(get_city_year(1500, 5, 100, 5000)) print(get_city_year(1500000, 2.5, 10000, 2000000)) print(get_city_year(1000, 2, -20, 2000))
def get_city_year(p0, perc, delta, p): years = 1 curr = p0 + p0 * (0.01 * perc) + delta if curr <= p0: return -1 else: while curr < p: curr = curr + curr * (0.01 * perc) + delta years += 1 return years print(get_city_year(1000, 2, -50, 5000)) print(get_city_year(1500, 5, 100, 5000)) print(get_city_year(1500000, 2.5, 10000, 2000000)) print(get_city_year(1000, 2, -20, 2000))
#First Example - uncomment lines or change values to test the code phone_balance = 7.62 bank_balance = 104.39 #phone_balance = 12.34 #bank_balance = 25 if phone_balance < 10: phone_balance += 10 bank_balance -= 10 print(phone_balance) print(bank_balance) #Second Example #change the number to experiment! number = 145346334 #number = 5 #3 sir if number % 2 == 0: print("The number " + str(number) + " is even.") else: print("The number " + str(number) + " is odd.") #Third Example #change the age to experiment with the pricing age = 35 #set the age limits for bus fares free_up_to_age = 4 child_up_to_age = 18 senior_from_age = 65 #set bus fares concession_ticket = 1.25 adult_ticket = 2.50 #ticket price logic if age <= free_up_to_age: ticket_price = 0 elif age <= child_up_to_age: ticket_price = concession_ticket elif age >= senior_from_age: ticket_price = concession_ticket else: ticket_price = adult_ticket message = "Somebody who is {} years old will pay ${} to ride the bus.".format(age,ticket_price) print(message)
phone_balance = 7.62 bank_balance = 104.39 if phone_balance < 10: phone_balance += 10 bank_balance -= 10 print(phone_balance) print(bank_balance) number = 145346334 if number % 2 == 0: print('The number ' + str(number) + ' is even.') else: print('The number ' + str(number) + ' is odd.') age = 35 free_up_to_age = 4 child_up_to_age = 18 senior_from_age = 65 concession_ticket = 1.25 adult_ticket = 2.5 if age <= free_up_to_age: ticket_price = 0 elif age <= child_up_to_age: ticket_price = concession_ticket elif age >= senior_from_age: ticket_price = concession_ticket else: ticket_price = adult_ticket message = 'Somebody who is {} years old will pay ${} to ride the bus.'.format(age, ticket_price) print(message)
print('pypypy') print('pypypy111') print('pycharm1111') print('pycharm222') print('pypypy222') print('pycharm3333') print('pypypy333') hheheheh # zuishuaidezishi ssshhhhhhh
print('pypypy') print('pypypy111') print('pycharm1111') print('pycharm222') print('pypypy222') print('pycharm3333') print('pypypy333') hheheheh ssshhhhhhh
# Design # a # stack # that # supports # push, pop, top, and retrieving # the # minimum # element in constant # time. # # Implement # the # MinStack # # # class: # # # MinStack() # initializes # the # stack # object. # void # push(int # val) pushes # the # element # val # onto # the # stack. # void # pop() # removes # the # element # on # the # top # of # the # stack. # int # top() # gets # the # top # element # of # the # stack. # int # getMin() # retrieves # the # minimum # element in the # stack. # # Example # 1: # # Input # ["MinStack", "push", "push", "push", "getMin", "pop", "top", "getMin"] # [[], [-2], [0], [-3], [], [], [], []] # # Output # [null, null, null, null, -3, null, 0, -2] # # Explanation # MinStack # minStack = new # MinStack(); # minStack.push(-2); # minStack.push(0); # minStack.push(-3); # minStack.getMin(); // return -3 # minStack.pop(); # minStack.top(); // return 0 # minStack.getMin(); // return -2 # # Constraints: # # -231 <= val <= 231 - 1 # Methods # pop, top and getMin # operations # will # always # be # called # on # non - empty # stacks. # At # most # 3 * 104 # calls # will # be # made # to # push, pop, top, and getMin. class MinStack: def __init__(self): self.stack = [] def push(self, val: int) -> None: self.stack.append(val) def pop(self) -> None: if (len(self.stack) > 0): self.stack.pop() def top(self) -> int: if (len(self.stack) > 0): return self.stack[-1] def getMin(self) -> int: if (len(self.stack) > 0): return min(self.stack) # Your MinStack object will be instantiated and called as such: # obj = MinStack() # obj.push(val) # obj.pop() # param_3 = obj.top() # param_4 = obj.getMin()
class Minstack: def __init__(self): self.stack = [] def push(self, val: int) -> None: self.stack.append(val) def pop(self) -> None: if len(self.stack) > 0: self.stack.pop() def top(self) -> int: if len(self.stack) > 0: return self.stack[-1] def get_min(self) -> int: if len(self.stack) > 0: return min(self.stack)
def baseline_opt_default_args(prob_type): defaults = {} defaults['simpleVar'] = 100 defaults['simpleIneq'] = 50 defaults['simpleEq'] = 50 defaults['simpleEx'] = 10000 defaults['nonconvexVar'] = 100 defaults['nonconvexIneq'] = 50 defaults['nonconvexEq'] = 50 defaults['nonconvexEx'] = 10000 if prob_type == 'simple': defaults['corrEps'] = 1e-4 elif prob_type == 'nonconvex': defaults['corrEps'] = 1e-4 elif 'acopf' in prob_type: defaults['corrEps'] = 1e-4 return defaults def baseline_nn_default_args(prob_type): defaults = {} defaults['simpleVar'] = 100 defaults['simpleIneq'] = 50 defaults['simpleEq'] = 50 defaults['simpleEx'] = 10000 defaults['nonconvexVar'] = 100 defaults['nonconvexIneq'] = 50 defaults['nonconvexEq'] = 50 defaults['nonconvexEx'] = 10000 defaults['saveAllStats'] = True defaults['resultsSaveFreq'] = 50 if prob_type == 'simple': defaults['epochs'] = 1000 defaults['batchSize'] = 200 defaults['lr'] = 1e-4 defaults['hiddenSize'] = 200 defaults['softWeight'] = 100 defaults['softWeightEqFrac'] = 0.5 defaults['useTestCorr'] = True defaults['corrTestMaxSteps'] = 10 defaults['corrEps'] = 1e-4 defaults['corrLr'] = 1e-7 defaults['corrMomentum'] = 0.5 elif prob_type == 'nonconvex': defaults['epochs'] = 1000 defaults['batchSize'] = 200 defaults['lr'] = 1e-4 defaults['hiddenSize'] = 200 defaults['softWeight'] = 100 defaults['softWeightEqFrac'] = 0.5 defaults['useTestCorr'] = True defaults['corrTestMaxSteps'] = 10 defaults['corrEps'] = 1e-4 defaults['corrLr'] = 1e-7 defaults['corrMomentum'] = 0.5 elif 'acopf' in prob_type: defaults['epochs'] = 1000 defaults['batchSize'] = 200 defaults['lr'] = 1e-3 defaults['hiddenSize'] = 200 defaults['softWeight'] = 100 defaults['softWeightEqFrac'] = 0.5 defaults['useTestCorr'] = True defaults['corrTestMaxSteps'] = 5 defaults['corrEps'] = 1e-4 defaults['corrLr'] = 1e-5 defaults['corrMomentum'] = 0.5 else: raise NotImplementedError return defaults def baseline_eq_nn_default_args(prob_type): defaults = {} defaults['simpleVar'] = 100 defaults['simpleIneq'] = 50 defaults['simpleEq'] = 50 defaults['simpleEx'] = 10000 defaults['nonconvexVar'] = 100 defaults['nonconvexIneq'] = 50 defaults['nonconvexEq'] = 50 defaults['nonconvexEx'] = 10000 defaults['saveAllStats'] = True defaults['resultsSaveFreq'] = 50 if prob_type == 'simple': defaults['epochs'] = 1000 defaults['batchSize'] = 200 defaults['lr'] = 1e-4 defaults['hiddenSize'] = 200 defaults['softWeightEqFrac'] = 0.5 defaults['useTestCorr'] = True defaults['corrMode'] = 'partial' defaults['corrTestMaxSteps'] = 10 defaults['corrEps'] = 1e-4 defaults['corrLr'] = 1e-7 defaults['corrMomentum'] = 0.5 elif prob_type == 'nonconvex': defaults['epochs'] = 1000 defaults['batchSize'] = 200 defaults['lr'] = 1e-4 defaults['hiddenSize'] = 200 defaults['softWeightEqFrac'] = 0.5 defaults['useTestCorr'] = True defaults['corrMode'] = 'partial' defaults['corrTestMaxSteps'] = 10 defaults['corrEps'] = 1e-4 defaults['corrLr'] = 1e-7 defaults['corrMomentum'] = 0.5 elif 'acopf' in prob_type: defaults['epochs'] = 1000 defaults['batchSize'] = 200 defaults['lr'] = 1e-3 defaults['hiddenSize'] = 200 defaults['softWeightEqFrac'] = 0.5 defaults['useTestCorr'] = True defaults['corrMode'] = 'full' defaults['corrTestMaxSteps'] = 5 defaults['corrEps'] = 1e-4 defaults['corrLr'] = 1e-5 defaults['corrMomentum'] = 0.5 else: raise NotImplementedError return defaults def method_default_args(prob_type): defaults = {} defaults['simpleVar'] = 100 defaults['simpleIneq'] = 50 defaults['simpleEq'] = 50 defaults['simpleEx'] = 10000 defaults['nonconvexVar'] = 100 defaults['nonconvexIneq'] = 50 defaults['nonconvexEq'] = 50 defaults['nonconvexEx'] = 10000 defaults['saveAllStats'] = True defaults['resultsSaveFreq'] = 50 if prob_type == 'simple': defaults['epochs'] = 1000 defaults['batchSize'] = 200 defaults['lr'] = 1e-4 defaults['hiddenSize'] = 200 defaults['softWeight'] = 10 # use 100 if useCompl=False defaults['softWeightEqFrac'] = 0.5 defaults['useCompl'] = True defaults['useTrainCorr'] = True defaults['useTestCorr'] = True defaults['corrMode'] = 'partial' # use 'full' if useCompl=False defaults['corrTrainSteps'] = 10 defaults['corrTestMaxSteps'] = 10 defaults['corrEps'] = 1e-4 defaults['corrLr'] = 1e-7 defaults['corrMomentum'] = 0.5 elif prob_type == 'nonconvex': defaults['epochs'] = 1000 defaults['batchSize'] = 200 defaults['lr'] = 1e-4 defaults['hiddenSize'] = 200 defaults['softWeight'] = 10 # use 100 if useCompl=False defaults['softWeightEqFrac'] = 0.5 defaults['useCompl'] = True defaults['useTrainCorr'] = True defaults['useTestCorr'] = True defaults['corrMode'] = 'partial' # use 'full' if useCompl=False defaults['corrTrainSteps'] = 10 defaults['corrTestMaxSteps'] = 10 defaults['corrEps'] = 1e-4 defaults['corrLr'] = 1e-7 defaults['corrMomentum'] = 0.5 elif 'acopf' in prob_type: defaults['epochs'] = 1000 defaults['batchSize'] = 200 defaults['lr'] = 1e-3 defaults['hiddenSize'] = 200 defaults['softWeight'] = 10 # use 100 if useCompl=False defaults['softWeightEqFrac'] = 0.5 defaults['useCompl'] = True defaults['useTrainCorr'] = True defaults['useTestCorr'] = True defaults['corrMode'] = 'partial' # use 'full' if useCompl=False defaults['corrTrainSteps'] = 5 defaults['corrTestMaxSteps'] = 5 defaults['corrEps'] = 1e-4 defaults['corrLr'] = 1e-4 # use 1e-5 if useCompl=False defaults['corrMomentum'] = 0.5 else: raise NotImplementedError return defaults
def baseline_opt_default_args(prob_type): defaults = {} defaults['simpleVar'] = 100 defaults['simpleIneq'] = 50 defaults['simpleEq'] = 50 defaults['simpleEx'] = 10000 defaults['nonconvexVar'] = 100 defaults['nonconvexIneq'] = 50 defaults['nonconvexEq'] = 50 defaults['nonconvexEx'] = 10000 if prob_type == 'simple': defaults['corrEps'] = 0.0001 elif prob_type == 'nonconvex': defaults['corrEps'] = 0.0001 elif 'acopf' in prob_type: defaults['corrEps'] = 0.0001 return defaults def baseline_nn_default_args(prob_type): defaults = {} defaults['simpleVar'] = 100 defaults['simpleIneq'] = 50 defaults['simpleEq'] = 50 defaults['simpleEx'] = 10000 defaults['nonconvexVar'] = 100 defaults['nonconvexIneq'] = 50 defaults['nonconvexEq'] = 50 defaults['nonconvexEx'] = 10000 defaults['saveAllStats'] = True defaults['resultsSaveFreq'] = 50 if prob_type == 'simple': defaults['epochs'] = 1000 defaults['batchSize'] = 200 defaults['lr'] = 0.0001 defaults['hiddenSize'] = 200 defaults['softWeight'] = 100 defaults['softWeightEqFrac'] = 0.5 defaults['useTestCorr'] = True defaults['corrTestMaxSteps'] = 10 defaults['corrEps'] = 0.0001 defaults['corrLr'] = 1e-07 defaults['corrMomentum'] = 0.5 elif prob_type == 'nonconvex': defaults['epochs'] = 1000 defaults['batchSize'] = 200 defaults['lr'] = 0.0001 defaults['hiddenSize'] = 200 defaults['softWeight'] = 100 defaults['softWeightEqFrac'] = 0.5 defaults['useTestCorr'] = True defaults['corrTestMaxSteps'] = 10 defaults['corrEps'] = 0.0001 defaults['corrLr'] = 1e-07 defaults['corrMomentum'] = 0.5 elif 'acopf' in prob_type: defaults['epochs'] = 1000 defaults['batchSize'] = 200 defaults['lr'] = 0.001 defaults['hiddenSize'] = 200 defaults['softWeight'] = 100 defaults['softWeightEqFrac'] = 0.5 defaults['useTestCorr'] = True defaults['corrTestMaxSteps'] = 5 defaults['corrEps'] = 0.0001 defaults['corrLr'] = 1e-05 defaults['corrMomentum'] = 0.5 else: raise NotImplementedError return defaults def baseline_eq_nn_default_args(prob_type): defaults = {} defaults['simpleVar'] = 100 defaults['simpleIneq'] = 50 defaults['simpleEq'] = 50 defaults['simpleEx'] = 10000 defaults['nonconvexVar'] = 100 defaults['nonconvexIneq'] = 50 defaults['nonconvexEq'] = 50 defaults['nonconvexEx'] = 10000 defaults['saveAllStats'] = True defaults['resultsSaveFreq'] = 50 if prob_type == 'simple': defaults['epochs'] = 1000 defaults['batchSize'] = 200 defaults['lr'] = 0.0001 defaults['hiddenSize'] = 200 defaults['softWeightEqFrac'] = 0.5 defaults['useTestCorr'] = True defaults['corrMode'] = 'partial' defaults['corrTestMaxSteps'] = 10 defaults['corrEps'] = 0.0001 defaults['corrLr'] = 1e-07 defaults['corrMomentum'] = 0.5 elif prob_type == 'nonconvex': defaults['epochs'] = 1000 defaults['batchSize'] = 200 defaults['lr'] = 0.0001 defaults['hiddenSize'] = 200 defaults['softWeightEqFrac'] = 0.5 defaults['useTestCorr'] = True defaults['corrMode'] = 'partial' defaults['corrTestMaxSteps'] = 10 defaults['corrEps'] = 0.0001 defaults['corrLr'] = 1e-07 defaults['corrMomentum'] = 0.5 elif 'acopf' in prob_type: defaults['epochs'] = 1000 defaults['batchSize'] = 200 defaults['lr'] = 0.001 defaults['hiddenSize'] = 200 defaults['softWeightEqFrac'] = 0.5 defaults['useTestCorr'] = True defaults['corrMode'] = 'full' defaults['corrTestMaxSteps'] = 5 defaults['corrEps'] = 0.0001 defaults['corrLr'] = 1e-05 defaults['corrMomentum'] = 0.5 else: raise NotImplementedError return defaults def method_default_args(prob_type): defaults = {} defaults['simpleVar'] = 100 defaults['simpleIneq'] = 50 defaults['simpleEq'] = 50 defaults['simpleEx'] = 10000 defaults['nonconvexVar'] = 100 defaults['nonconvexIneq'] = 50 defaults['nonconvexEq'] = 50 defaults['nonconvexEx'] = 10000 defaults['saveAllStats'] = True defaults['resultsSaveFreq'] = 50 if prob_type == 'simple': defaults['epochs'] = 1000 defaults['batchSize'] = 200 defaults['lr'] = 0.0001 defaults['hiddenSize'] = 200 defaults['softWeight'] = 10 defaults['softWeightEqFrac'] = 0.5 defaults['useCompl'] = True defaults['useTrainCorr'] = True defaults['useTestCorr'] = True defaults['corrMode'] = 'partial' defaults['corrTrainSteps'] = 10 defaults['corrTestMaxSteps'] = 10 defaults['corrEps'] = 0.0001 defaults['corrLr'] = 1e-07 defaults['corrMomentum'] = 0.5 elif prob_type == 'nonconvex': defaults['epochs'] = 1000 defaults['batchSize'] = 200 defaults['lr'] = 0.0001 defaults['hiddenSize'] = 200 defaults['softWeight'] = 10 defaults['softWeightEqFrac'] = 0.5 defaults['useCompl'] = True defaults['useTrainCorr'] = True defaults['useTestCorr'] = True defaults['corrMode'] = 'partial' defaults['corrTrainSteps'] = 10 defaults['corrTestMaxSteps'] = 10 defaults['corrEps'] = 0.0001 defaults['corrLr'] = 1e-07 defaults['corrMomentum'] = 0.5 elif 'acopf' in prob_type: defaults['epochs'] = 1000 defaults['batchSize'] = 200 defaults['lr'] = 0.001 defaults['hiddenSize'] = 200 defaults['softWeight'] = 10 defaults['softWeightEqFrac'] = 0.5 defaults['useCompl'] = True defaults['useTrainCorr'] = True defaults['useTestCorr'] = True defaults['corrMode'] = 'partial' defaults['corrTrainSteps'] = 5 defaults['corrTestMaxSteps'] = 5 defaults['corrEps'] = 0.0001 defaults['corrLr'] = 0.0001 defaults['corrMomentum'] = 0.5 else: raise NotImplementedError return defaults
class S1C1(): @staticmethod def ret_true(): return True @staticmethod def hex_to_base64(hx): return "x"
class S1C1: @staticmethod def ret_true(): return True @staticmethod def hex_to_base64(hx): return 'x'
def get_client_ip(request): x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR') if x_forwarded_for: ip = x_forwarded_for.split(',')[0] else: ip = request.META.get('REMOTE_ADDR') return ip def calc_hba1c(value): """ Calculate the HbA1c from the given average blood glucose value. This formula is the same one used by Accu-Chek: https://www.accu-chek.com/us/glucose-monitoring/a1c-calculator.html# """ if value: return ((46.7 + value) / 28.7) else: return 0 def round_value(value): """ Round the given value to 1 decimal place. If the value is 0 or None, then simply return 0. """ if value: return round(float(value), 1) else: return 0 def percent(part, whole): """ Get the percentage of the given values. If the the total/whole is 0 or none, then simply return 0. """ if whole: return round_value(100 * float(part)/float(whole)) else: return 0 def to_mmol(value): """ Convert a given value in mg/dL to mmol/L rounded to 1 decimal place. """ return round((float(value) / 18.018), 1) def to_mg(value): """ Convert a given value in mmol/L to mg/dL rounded to nearest integer. """ try: return int(round((float(value) * 18.018), 0)) except ValueError: # We're catching ValueError here as some browsers like Firefox won't # validate the input for us. We're returning the value entered as this # will be passed in to the Django validator which will return the # validation error message. return value def glucose_by_unit_setting(user, value): """ Return the glucose value based on the unit setting. Glucose values are stored in mg/dL in the database. If a user's setting is set to mmol/L, convert the value. """ if user.settings.glucose_unit.name == 'mmol/L': return to_mmol(value) else: return value
def get_client_ip(request): x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR') if x_forwarded_for: ip = x_forwarded_for.split(',')[0] else: ip = request.META.get('REMOTE_ADDR') return ip def calc_hba1c(value): """ Calculate the HbA1c from the given average blood glucose value. This formula is the same one used by Accu-Chek: https://www.accu-chek.com/us/glucose-monitoring/a1c-calculator.html# """ if value: return (46.7 + value) / 28.7 else: return 0 def round_value(value): """ Round the given value to 1 decimal place. If the value is 0 or None, then simply return 0. """ if value: return round(float(value), 1) else: return 0 def percent(part, whole): """ Get the percentage of the given values. If the the total/whole is 0 or none, then simply return 0. """ if whole: return round_value(100 * float(part) / float(whole)) else: return 0 def to_mmol(value): """ Convert a given value in mg/dL to mmol/L rounded to 1 decimal place. """ return round(float(value) / 18.018, 1) def to_mg(value): """ Convert a given value in mmol/L to mg/dL rounded to nearest integer. """ try: return int(round(float(value) * 18.018, 0)) except ValueError: return value def glucose_by_unit_setting(user, value): """ Return the glucose value based on the unit setting. Glucose values are stored in mg/dL in the database. If a user's setting is set to mmol/L, convert the value. """ if user.settings.glucose_unit.name == 'mmol/L': return to_mmol(value) else: return value
s = input() answer = len(s) k = s.count('a') for i in range(len(s)): cnt = 0 for j in range(k): if s[(i+j)%len(s)] == 'b': cnt+=1 if cnt < answer: answer = cnt print(answer)
s = input() answer = len(s) k = s.count('a') for i in range(len(s)): cnt = 0 for j in range(k): if s[(i + j) % len(s)] == 'b': cnt += 1 if cnt < answer: answer = cnt print(answer)
def math(): i_put = input() if len(i_put) <= 140: print('TWEET') else: print('MUTE') if __name__ == '__main__': math()
def math(): i_put = input() if len(i_put) <= 140: print('TWEET') else: print('MUTE') if __name__ == '__main__': math()
''' A better way to implement the fibonacci series T(n) = 2n + 2 ''' def fibonacci(n): # Taking 1st two fibonacci nubers as 0 and 1 FibArray = [0, 1] while len(FibArray) < n + 1: FibArray.append(0) if n <= 1: return n else: if FibArray[n - 1] == 0: FibArray[n - 1] = fibonacci(n - 1) if FibArray[n - 2] == 0: FibArray[n - 2] = fibonacci(n - 2) FibArray[n] = FibArray[n - 2] + FibArray[n - 1] return FibArray[n] print(fibonacci(9))
""" A better way to implement the fibonacci series T(n) = 2n + 2 """ def fibonacci(n): fib_array = [0, 1] while len(FibArray) < n + 1: FibArray.append(0) if n <= 1: return n else: if FibArray[n - 1] == 0: FibArray[n - 1] = fibonacci(n - 1) if FibArray[n - 2] == 0: FibArray[n - 2] = fibonacci(n - 2) FibArray[n] = FibArray[n - 2] + FibArray[n - 1] return FibArray[n] print(fibonacci(9))
eval_cfgs = [ dict( metrics=dict(type='OPE'), dataset=dict(type='OTB100'), hypers=dict( epoch=list(range(31, 51, 2)), window=dict( weight=[0.200, 0.300, 0.400] ) ) ), ]
eval_cfgs = [dict(metrics=dict(type='OPE'), dataset=dict(type='OTB100'), hypers=dict(epoch=list(range(31, 51, 2)), window=dict(weight=[0.2, 0.3, 0.4])))]
class Solution: # Enumerate shortest length (Accepted), O(n*l) time, O(l) space (n = len(strs), l = len(shortest str)) def longestCommonPrefix(self, strs: List[str]) -> str: min_len = min(len(s) for s in strs) res = "" for i in range(min_len): c = strs[0][i] for s in strs: if s[i] != c: return res res += c return res # Enumerate shortest (Top Voted), O(n*l) time, O(l) space def longestCommonPrefix(self, strs: List[str]) -> str: if not strs: return "" shortest = min(strs, key=len) for i, ch in enumerate(shortest): for other in strs: if other[i] != ch: return shortest[:i] return shortest
class Solution: def longest_common_prefix(self, strs: List[str]) -> str: min_len = min((len(s) for s in strs)) res = '' for i in range(min_len): c = strs[0][i] for s in strs: if s[i] != c: return res res += c return res def longest_common_prefix(self, strs: List[str]) -> str: if not strs: return '' shortest = min(strs, key=len) for (i, ch) in enumerate(shortest): for other in strs: if other[i] != ch: return shortest[:i] return shortest
n = int(input("Please enter the size of the table: ")) # print the first row (header) print(" ", end = "") for i in range(1, n + 1): print(" ", i, end = "") print() # new line # print the actual table for row in range(1, n + 1): # print row number at the beginning of each row print(" ", row, end = "") # print Xs if column number divides row number for col in range(1, n + 1): if col % row == 0: print(" X", end = "") else: print(" ", end = "") print() # new line at the end of each line
n = int(input('Please enter the size of the table: ')) print(' ', end='') for i in range(1, n + 1): print(' ', i, end='') print() for row in range(1, n + 1): print(' ', row, end='') for col in range(1, n + 1): if col % row == 0: print(' X', end='') else: print(' ', end='') print()
# Copyright 2018 The Fuchsia Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # DO NOT MANUALLY EDIT! # Generated by //scripts/sdk/bazel/generate.py. load("@io_bazel_rules_dart//dart/build_rules/internal:pub.bzl", "pub_repository") def setup_dart(): pub_repository( name = "vendor_meta", output = ".", package = "meta", version = "1.1.6", pub_deps = [], ) pub_repository( name = "vendor_logging", output = ".", package = "logging", version = "0.11.3+2", pub_deps = [], ) pub_repository( name = "vendor_uuid", output = ".", package = "uuid", version = "1.0.3", pub_deps = [], )
load('@io_bazel_rules_dart//dart/build_rules/internal:pub.bzl', 'pub_repository') def setup_dart(): pub_repository(name='vendor_meta', output='.', package='meta', version='1.1.6', pub_deps=[]) pub_repository(name='vendor_logging', output='.', package='logging', version='0.11.3+2', pub_deps=[]) pub_repository(name='vendor_uuid', output='.', package='uuid', version='1.0.3', pub_deps=[])
def split(list): n = len(list) if n == 1: return list, [] middle = int(n/2) return list[0:middle], list[middle:n] def merge_sort(list): if len(list) == 0: return list a, b = split(list) if a == list: return a else: new_a = merge_sort(a) new_b = merge_sort(b) a_len = len(new_a) b_len = len(new_b) a_iter = 0 b_iter = 0 result = [] while a_iter < a_len and b_iter < b_len: if new_a[a_iter] < new_b[b_iter]: result.append(new_a[a_iter]) a_iter += 1 else: result.append(new_b[b_iter]) b_iter += 1 while a_iter < a_len: result.append(new_a[a_iter]) a_iter += 1 while b_iter < b_len: result.append(new_b[b_iter]) b_iter += 1 return result
def split(list): n = len(list) if n == 1: return (list, []) middle = int(n / 2) return (list[0:middle], list[middle:n]) def merge_sort(list): if len(list) == 0: return list (a, b) = split(list) if a == list: return a else: new_a = merge_sort(a) new_b = merge_sort(b) a_len = len(new_a) b_len = len(new_b) a_iter = 0 b_iter = 0 result = [] while a_iter < a_len and b_iter < b_len: if new_a[a_iter] < new_b[b_iter]: result.append(new_a[a_iter]) a_iter += 1 else: result.append(new_b[b_iter]) b_iter += 1 while a_iter < a_len: result.append(new_a[a_iter]) a_iter += 1 while b_iter < b_len: result.append(new_b[b_iter]) b_iter += 1 return result
# Definition for a binary tree node. # class TreeNode: # def __init__(self, val=0, left=None, right=None): # self.val = val # self.left = left # self.right = right # We would need to create an map out of in-order array to access index easily class Solution: # Faster solution def buildTree(self, preorder: List[int], inorder: List[int]) -> TreeNode: def helper(in_left=0, in_right=len(inorder)): # Using index in preorder list. nonlocal pre_idx # base cases if in_right <= in_left: return None # Preorder element left set as the curr node. node_val = preorder[pre_idx] node = TreeNode(node_val) # We find where this element in in_idx = inorderMap[node_val] # Finally increase to the next index in preorder pre_idx += 1 # left node.left = helper(in_left, in_idx) # right node.right = helper(in_idx + 1, in_right) # Finally return the node as root for that level return node # Index element to set to zeroth element of preorder. pre_idx = 0 # Hashmap of value to index of inOrder array. inorderMap = {v:k for k,v in enumerate(inorder)} return helper() # def buildTree(self, preorder, inorder): # if inorder: # ind = inorder.index(preorder.pop(0)) # root = TreeNode(inorder[ind]) # root.left = self.buildTree(preorder, inorder[0:ind]) # root.right = self.buildTree(preorder, inorder[ind+1:]) # return root
class Solution: def build_tree(self, preorder: List[int], inorder: List[int]) -> TreeNode: def helper(in_left=0, in_right=len(inorder)): nonlocal pre_idx if in_right <= in_left: return None node_val = preorder[pre_idx] node = tree_node(node_val) in_idx = inorderMap[node_val] pre_idx += 1 node.left = helper(in_left, in_idx) node.right = helper(in_idx + 1, in_right) return node pre_idx = 0 inorder_map = {v: k for (k, v) in enumerate(inorder)} return helper()
""" Buffer the StreetTrees feature class by 20 meters """ # Import modules # Set variables # Execute operation
""" Buffer the StreetTrees feature class by 20 meters """
class Solution: def partitionLabels(self, S): """ :type S: str :rtype: List[int] """ positions = dict() for i, c in enumerate(S): if c not in positions: positions[c] = [] positions[c].append(i) result = [] end = -1 while True: st = end + 1 if st >= len(S): break end = positions[S[st]][-1] while True: extend = max(positions[c][-1] for c in S[st:end + 1]) if extend > end: end = extend else: break result.append(end - st + 1) return result sol = Solution().partitionLabels print(sol("ababcbacadefegdehijhklij"))
class Solution: def partition_labels(self, S): """ :type S: str :rtype: List[int] """ positions = dict() for (i, c) in enumerate(S): if c not in positions: positions[c] = [] positions[c].append(i) result = [] end = -1 while True: st = end + 1 if st >= len(S): break end = positions[S[st]][-1] while True: extend = max((positions[c][-1] for c in S[st:end + 1])) if extend > end: end = extend else: break result.append(end - st + 1) return result sol = solution().partitionLabels print(sol('ababcbacadefegdehijhklij'))
a = 3 b = 4 z = a + b print(z)
a = 3 b = 4 z = a + b print(z)
expected = 'Jana III Sobieskiego' a = ' Jana III Sobieskiego ' b = 'ul Jana III SobIESkiego' c = '\tul. Jana trzeciego Sobieskiego' d = 'ulicaJana III Sobieskiego' e = 'UL. JA\tNA 3 SOBIES\tKIEGO' f = 'UL. jana III SOBiesKIEGO' g = 'ULICA JANA III SOBIESKIEGO ' h = 'ULICA. JANA III SOBIeskieGO' i = ' Jana 3 Sobieskiego ' j = 'Jana III\tSobieskiego ' k = 'ul.Jana III Sob\n\nieskiego\n' a = a.strip() b = b.upper().replace('UL', '').strip().title().replace('Iii', 'III') c = c.upper().replace('UL.', '').strip().title().replace('Trzeciego', 'III') d = d.upper().replace('ULICA', '').strip().title().replace('Iii', 'III') e = e.upper().replace('UL.', '').strip().replace('\t', '').title().replace('3', 'III') f = f.upper().replace('UL.', '').strip().title().replace('Iii', 'III') g = g.upper().replace('ULICA', '').strip().title().replace('Iii', 'III') h = h.upper().replace('ULICA.', '').strip().title().replace('Iii', 'III') i = i.strip().replace('3', 'III') j = j.strip().replace('\t', ' ') k = k.upper().replace('UL.', '').replace('\n', '').title().replace('Iii', 'III') expected = 'Jana III Sobieskiego' print(f'{a == expected}\t a: "{a}"') print(f'{b == expected}\t b: "{b}"') print(f'{c == expected}\t c: "{c}"') print(f'{d == expected}\t d: "{d}"') print(f'{e == expected}\t e: "{e}"') print(f'{f == expected}\t f: "{f}"') print(f'{g == expected}\t g: "{g}"') print(f'{h == expected}\t h: "{h}"') print(f'{i == expected}\t i: "{i}"') print(f'{j == expected}\t j: "{j}"') print(f'{k == expected}\t k: "{k}"')
expected = 'Jana III Sobieskiego' a = ' Jana III Sobieskiego ' b = 'ul Jana III SobIESkiego' c = '\tul. Jana trzeciego Sobieskiego' d = 'ulicaJana III Sobieskiego' e = 'UL. JA\tNA 3 SOBIES\tKIEGO' f = 'UL. jana III SOBiesKIEGO' g = 'ULICA JANA III SOBIESKIEGO ' h = 'ULICA. JANA III SOBIeskieGO' i = ' Jana 3 Sobieskiego ' j = 'Jana III\tSobieskiego ' k = 'ul.Jana III Sob\n\nieskiego\n' a = a.strip() b = b.upper().replace('UL', '').strip().title().replace('Iii', 'III') c = c.upper().replace('UL.', '').strip().title().replace('Trzeciego', 'III') d = d.upper().replace('ULICA', '').strip().title().replace('Iii', 'III') e = e.upper().replace('UL.', '').strip().replace('\t', '').title().replace('3', 'III') f = f.upper().replace('UL.', '').strip().title().replace('Iii', 'III') g = g.upper().replace('ULICA', '').strip().title().replace('Iii', 'III') h = h.upper().replace('ULICA.', '').strip().title().replace('Iii', 'III') i = i.strip().replace('3', 'III') j = j.strip().replace('\t', ' ') k = k.upper().replace('UL.', '').replace('\n', '').title().replace('Iii', 'III') expected = 'Jana III Sobieskiego' print(f'{a == expected}\t a: "{a}"') print(f'{b == expected}\t b: "{b}"') print(f'{c == expected}\t c: "{c}"') print(f'{d == expected}\t d: "{d}"') print(f'{e == expected}\t e: "{e}"') print(f'{f == expected}\t f: "{f}"') print(f'{g == expected}\t g: "{g}"') print(f'{h == expected}\t h: "{h}"') print(f'{i == expected}\t i: "{i}"') print(f'{j == expected}\t j: "{j}"') print(f'{k == expected}\t k: "{k}"')
# # PySNMP MIB module CISCO-CALL-TRACKER-TCP-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-CALL-TRACKER-TCP-MIB # Produced by pysmi-0.3.4 at Mon Apr 29 17:34:55 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ValueRangeConstraint, ConstraintsUnion, ValueSizeConstraint, ConstraintsIntersection, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsUnion", "ValueSizeConstraint", "ConstraintsIntersection", "SingleValueConstraint") cctActiveCallId, cctHistoryIndex = mibBuilder.importSymbols("CISCO-CALL-TRACKER-MIB", "cctActiveCallId", "cctHistoryIndex") ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt") CiscoPort, = mibBuilder.importSymbols("CISCO-TC", "CiscoPort") NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance") iso, NotificationType, Counter32, TimeTicks, MibIdentifier, Counter64, Unsigned32, Bits, Gauge32, ModuleIdentity, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "iso", "NotificationType", "Counter32", "TimeTicks", "MibIdentifier", "Counter64", "Unsigned32", "Bits", "Gauge32", "ModuleIdentity", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "ObjectIdentity") TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString") ciscoCallTrackerTCPMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 164)) ciscoCallTrackerTCPMIB.setRevisions(('2005-12-06 00:00', '2000-06-07 00:00',)) if mibBuilder.loadTexts: ciscoCallTrackerTCPMIB.setLastUpdated('200512060000Z') if mibBuilder.loadTexts: ciscoCallTrackerTCPMIB.setOrganization('Cisco Systems, Inc.') ccttMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 164, 1)) ccttActive = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 1)) ccttHistory = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 2)) ccttActiveTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 1, 1), ) if mibBuilder.loadTexts: ccttActiveTable.setStatus('current') ccttActiveEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 1, 1, 1), ).setIndexNames((0, "CISCO-CALL-TRACKER-MIB", "cctActiveCallId")) if mibBuilder.loadTexts: ccttActiveEntry.setStatus('current') ccttActiveLocalIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 1, 1, 1, 1), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: ccttActiveLocalIpAddress.setStatus('current') ccttActiveLocalTcpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 1, 1, 1, 2), CiscoPort()).setMaxAccess("readonly") if mibBuilder.loadTexts: ccttActiveLocalTcpPort.setStatus('current') ccttActiveRemoteIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 1, 1, 1, 3), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: ccttActiveRemoteIpAddress.setStatus('current') ccttActiveRemoteTcpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 1, 1, 1, 4), CiscoPort()).setMaxAccess("readonly") if mibBuilder.loadTexts: ccttActiveRemoteTcpPort.setStatus('current') ccttActiveDestinationFailures = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 1, 1, 1, 5), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ccttActiveDestinationFailures.setStatus('current') ccttHistoryTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 2, 1), ) if mibBuilder.loadTexts: ccttHistoryTable.setStatus('current') ccttHistoryEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 2, 1, 1), ).setIndexNames((0, "CISCO-CALL-TRACKER-MIB", "cctHistoryIndex")) if mibBuilder.loadTexts: ccttHistoryEntry.setStatus('current') ccttHistoryLocalIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 2, 1, 1, 1), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: ccttHistoryLocalIpAddress.setStatus('current') ccttHistoryLocalTcpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 2, 1, 1, 2), CiscoPort()).setMaxAccess("readonly") if mibBuilder.loadTexts: ccttHistoryLocalTcpPort.setStatus('current') ccttHistoryRemoteIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 2, 1, 1, 3), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: ccttHistoryRemoteIpAddress.setStatus('current') ccttHistoryRemoteTcpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 2, 1, 1, 4), CiscoPort()).setMaxAccess("readonly") if mibBuilder.loadTexts: ccttHistoryRemoteTcpPort.setStatus('current') ccttHistoryDestinationFailures = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 2, 1, 1, 5), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ccttHistoryDestinationFailures.setStatus('current') ccttMIBNotificationPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 164, 2)) ccttMIBNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 164, 2, 0)) ccttMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 164, 3)) ccttMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 164, 3, 1)) ccttMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 164, 3, 2)) ccttMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 164, 3, 1, 1)).setObjects(("CISCO-CALL-TRACKER-TCP-MIB", "ccttActiveGroup"), ("CISCO-CALL-TRACKER-TCP-MIB", "ccttHistoryGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ccttMIBCompliance = ccttMIBCompliance.setStatus('current') ccttActiveGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 164, 3, 2, 2)).setObjects(("CISCO-CALL-TRACKER-TCP-MIB", "ccttActiveLocalIpAddress"), ("CISCO-CALL-TRACKER-TCP-MIB", "ccttActiveLocalTcpPort"), ("CISCO-CALL-TRACKER-TCP-MIB", "ccttActiveRemoteIpAddress"), ("CISCO-CALL-TRACKER-TCP-MIB", "ccttActiveRemoteTcpPort"), ("CISCO-CALL-TRACKER-TCP-MIB", "ccttActiveDestinationFailures")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ccttActiveGroup = ccttActiveGroup.setStatus('current') ccttHistoryGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 164, 3, 2, 3)).setObjects(("CISCO-CALL-TRACKER-TCP-MIB", "ccttHistoryLocalIpAddress"), ("CISCO-CALL-TRACKER-TCP-MIB", "ccttHistoryLocalTcpPort"), ("CISCO-CALL-TRACKER-TCP-MIB", "ccttHistoryRemoteIpAddress"), ("CISCO-CALL-TRACKER-TCP-MIB", "ccttHistoryRemoteTcpPort"), ("CISCO-CALL-TRACKER-TCP-MIB", "ccttHistoryDestinationFailures")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ccttHistoryGroup = ccttHistoryGroup.setStatus('current') mibBuilder.exportSymbols("CISCO-CALL-TRACKER-TCP-MIB", PYSNMP_MODULE_ID=ciscoCallTrackerTCPMIB, ccttActiveDestinationFailures=ccttActiveDestinationFailures, ccttMIBObjects=ccttMIBObjects, ciscoCallTrackerTCPMIB=ciscoCallTrackerTCPMIB, ccttActiveLocalIpAddress=ccttActiveLocalIpAddress, ccttMIBCompliance=ccttMIBCompliance, ccttActive=ccttActive, ccttActiveRemoteTcpPort=ccttActiveRemoteTcpPort, ccttHistoryTable=ccttHistoryTable, ccttHistory=ccttHistory, ccttMIBCompliances=ccttMIBCompliances, ccttActiveEntry=ccttActiveEntry, ccttMIBNotificationPrefix=ccttMIBNotificationPrefix, ccttHistoryDestinationFailures=ccttHistoryDestinationFailures, ccttHistoryRemoteIpAddress=ccttHistoryRemoteIpAddress, ccttActiveLocalTcpPort=ccttActiveLocalTcpPort, ccttActiveGroup=ccttActiveGroup, ccttActiveTable=ccttActiveTable, ccttHistoryEntry=ccttHistoryEntry, ccttMIBConformance=ccttMIBConformance, ccttHistoryRemoteTcpPort=ccttHistoryRemoteTcpPort, ccttHistoryLocalTcpPort=ccttHistoryLocalTcpPort, ccttHistoryGroup=ccttHistoryGroup, ccttMIBNotifications=ccttMIBNotifications, ccttMIBGroups=ccttMIBGroups, ccttHistoryLocalIpAddress=ccttHistoryLocalIpAddress, ccttActiveRemoteIpAddress=ccttActiveRemoteIpAddress)
(octet_string, object_identifier, integer) = mibBuilder.importSymbols('ASN1', 'OctetString', 'ObjectIdentifier', 'Integer') (named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues') (value_range_constraint, constraints_union, value_size_constraint, constraints_intersection, single_value_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ValueRangeConstraint', 'ConstraintsUnion', 'ValueSizeConstraint', 'ConstraintsIntersection', 'SingleValueConstraint') (cct_active_call_id, cct_history_index) = mibBuilder.importSymbols('CISCO-CALL-TRACKER-MIB', 'cctActiveCallId', 'cctHistoryIndex') (cisco_mgmt,) = mibBuilder.importSymbols('CISCO-SMI', 'ciscoMgmt') (cisco_port,) = mibBuilder.importSymbols('CISCO-TC', 'CiscoPort') (notification_group, object_group, module_compliance) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ObjectGroup', 'ModuleCompliance') (iso, notification_type, counter32, time_ticks, mib_identifier, counter64, unsigned32, bits, gauge32, module_identity, integer32, mib_scalar, mib_table, mib_table_row, mib_table_column, ip_address, object_identity) = mibBuilder.importSymbols('SNMPv2-SMI', 'iso', 'NotificationType', 'Counter32', 'TimeTicks', 'MibIdentifier', 'Counter64', 'Unsigned32', 'Bits', 'Gauge32', 'ModuleIdentity', 'Integer32', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'IpAddress', 'ObjectIdentity') (textual_convention, display_string) = mibBuilder.importSymbols('SNMPv2-TC', 'TextualConvention', 'DisplayString') cisco_call_tracker_tcpmib = module_identity((1, 3, 6, 1, 4, 1, 9, 9, 164)) ciscoCallTrackerTCPMIB.setRevisions(('2005-12-06 00:00', '2000-06-07 00:00')) if mibBuilder.loadTexts: ciscoCallTrackerTCPMIB.setLastUpdated('200512060000Z') if mibBuilder.loadTexts: ciscoCallTrackerTCPMIB.setOrganization('Cisco Systems, Inc.') cctt_mib_objects = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 164, 1)) cctt_active = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 1)) cctt_history = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 2)) cctt_active_table = mib_table((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 1, 1)) if mibBuilder.loadTexts: ccttActiveTable.setStatus('current') cctt_active_entry = mib_table_row((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 1, 1, 1)).setIndexNames((0, 'CISCO-CALL-TRACKER-MIB', 'cctActiveCallId')) if mibBuilder.loadTexts: ccttActiveEntry.setStatus('current') cctt_active_local_ip_address = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 1, 1, 1, 1), ip_address()).setMaxAccess('readonly') if mibBuilder.loadTexts: ccttActiveLocalIpAddress.setStatus('current') cctt_active_local_tcp_port = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 1, 1, 1, 2), cisco_port()).setMaxAccess('readonly') if mibBuilder.loadTexts: ccttActiveLocalTcpPort.setStatus('current') cctt_active_remote_ip_address = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 1, 1, 1, 3), ip_address()).setMaxAccess('readonly') if mibBuilder.loadTexts: ccttActiveRemoteIpAddress.setStatus('current') cctt_active_remote_tcp_port = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 1, 1, 1, 4), cisco_port()).setMaxAccess('readonly') if mibBuilder.loadTexts: ccttActiveRemoteTcpPort.setStatus('current') cctt_active_destination_failures = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 1, 1, 1, 5), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: ccttActiveDestinationFailures.setStatus('current') cctt_history_table = mib_table((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 2, 1)) if mibBuilder.loadTexts: ccttHistoryTable.setStatus('current') cctt_history_entry = mib_table_row((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 2, 1, 1)).setIndexNames((0, 'CISCO-CALL-TRACKER-MIB', 'cctHistoryIndex')) if mibBuilder.loadTexts: ccttHistoryEntry.setStatus('current') cctt_history_local_ip_address = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 2, 1, 1, 1), ip_address()).setMaxAccess('readonly') if mibBuilder.loadTexts: ccttHistoryLocalIpAddress.setStatus('current') cctt_history_local_tcp_port = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 2, 1, 1, 2), cisco_port()).setMaxAccess('readonly') if mibBuilder.loadTexts: ccttHistoryLocalTcpPort.setStatus('current') cctt_history_remote_ip_address = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 2, 1, 1, 3), ip_address()).setMaxAccess('readonly') if mibBuilder.loadTexts: ccttHistoryRemoteIpAddress.setStatus('current') cctt_history_remote_tcp_port = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 2, 1, 1, 4), cisco_port()).setMaxAccess('readonly') if mibBuilder.loadTexts: ccttHistoryRemoteTcpPort.setStatus('current') cctt_history_destination_failures = mib_table_column((1, 3, 6, 1, 4, 1, 9, 9, 164, 1, 2, 1, 1, 5), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: ccttHistoryDestinationFailures.setStatus('current') cctt_mib_notification_prefix = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 164, 2)) cctt_mib_notifications = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 164, 2, 0)) cctt_mib_conformance = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 164, 3)) cctt_mib_compliances = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 164, 3, 1)) cctt_mib_groups = mib_identifier((1, 3, 6, 1, 4, 1, 9, 9, 164, 3, 2)) cctt_mib_compliance = module_compliance((1, 3, 6, 1, 4, 1, 9, 9, 164, 3, 1, 1)).setObjects(('CISCO-CALL-TRACKER-TCP-MIB', 'ccttActiveGroup'), ('CISCO-CALL-TRACKER-TCP-MIB', 'ccttHistoryGroup')) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cctt_mib_compliance = ccttMIBCompliance.setStatus('current') cctt_active_group = object_group((1, 3, 6, 1, 4, 1, 9, 9, 164, 3, 2, 2)).setObjects(('CISCO-CALL-TRACKER-TCP-MIB', 'ccttActiveLocalIpAddress'), ('CISCO-CALL-TRACKER-TCP-MIB', 'ccttActiveLocalTcpPort'), ('CISCO-CALL-TRACKER-TCP-MIB', 'ccttActiveRemoteIpAddress'), ('CISCO-CALL-TRACKER-TCP-MIB', 'ccttActiveRemoteTcpPort'), ('CISCO-CALL-TRACKER-TCP-MIB', 'ccttActiveDestinationFailures')) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cctt_active_group = ccttActiveGroup.setStatus('current') cctt_history_group = object_group((1, 3, 6, 1, 4, 1, 9, 9, 164, 3, 2, 3)).setObjects(('CISCO-CALL-TRACKER-TCP-MIB', 'ccttHistoryLocalIpAddress'), ('CISCO-CALL-TRACKER-TCP-MIB', 'ccttHistoryLocalTcpPort'), ('CISCO-CALL-TRACKER-TCP-MIB', 'ccttHistoryRemoteIpAddress'), ('CISCO-CALL-TRACKER-TCP-MIB', 'ccttHistoryRemoteTcpPort'), ('CISCO-CALL-TRACKER-TCP-MIB', 'ccttHistoryDestinationFailures')) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cctt_history_group = ccttHistoryGroup.setStatus('current') mibBuilder.exportSymbols('CISCO-CALL-TRACKER-TCP-MIB', PYSNMP_MODULE_ID=ciscoCallTrackerTCPMIB, ccttActiveDestinationFailures=ccttActiveDestinationFailures, ccttMIBObjects=ccttMIBObjects, ciscoCallTrackerTCPMIB=ciscoCallTrackerTCPMIB, ccttActiveLocalIpAddress=ccttActiveLocalIpAddress, ccttMIBCompliance=ccttMIBCompliance, ccttActive=ccttActive, ccttActiveRemoteTcpPort=ccttActiveRemoteTcpPort, ccttHistoryTable=ccttHistoryTable, ccttHistory=ccttHistory, ccttMIBCompliances=ccttMIBCompliances, ccttActiveEntry=ccttActiveEntry, ccttMIBNotificationPrefix=ccttMIBNotificationPrefix, ccttHistoryDestinationFailures=ccttHistoryDestinationFailures, ccttHistoryRemoteIpAddress=ccttHistoryRemoteIpAddress, ccttActiveLocalTcpPort=ccttActiveLocalTcpPort, ccttActiveGroup=ccttActiveGroup, ccttActiveTable=ccttActiveTable, ccttHistoryEntry=ccttHistoryEntry, ccttMIBConformance=ccttMIBConformance, ccttHistoryRemoteTcpPort=ccttHistoryRemoteTcpPort, ccttHistoryLocalTcpPort=ccttHistoryLocalTcpPort, ccttHistoryGroup=ccttHistoryGroup, ccttMIBNotifications=ccttMIBNotifications, ccttMIBGroups=ccttMIBGroups, ccttHistoryLocalIpAddress=ccttHistoryLocalIpAddress, ccttActiveRemoteIpAddress=ccttActiveRemoteIpAddress)
def test_index(client): res = client.get("/") json_data = res.get_json() assert json_data["msg"] == "Don't panic" assert res.status_code == 200
def test_index(client): res = client.get('/') json_data = res.get_json() assert json_data['msg'] == "Don't panic" assert res.status_code == 200
#!/usr/bin/env python numbers = [1,2,3,4,5,8,3,2,3,1,1,1] def bubblesort(numbers): # Keep a boolean to determine if we switched switched = True # One round of sorting while switched: switched = False indexA = 0 indexB = 1 while indexB < len(numbers): numberA = numbers[indexA] numberB = numbers[indexB] # If the two numbers are our of order, change them if numberA > numberB: holder = numbers[indexA] numbers[indexA] = numbers[indexB] numbers[indexB] = holder switched = True indexA+=1 indexB+=1 print('Sorted numbers are %s' % numbers) return numbers bubblesort(numbers)
numbers = [1, 2, 3, 4, 5, 8, 3, 2, 3, 1, 1, 1] def bubblesort(numbers): switched = True while switched: switched = False index_a = 0 index_b = 1 while indexB < len(numbers): number_a = numbers[indexA] number_b = numbers[indexB] if numberA > numberB: holder = numbers[indexA] numbers[indexA] = numbers[indexB] numbers[indexB] = holder switched = True index_a += 1 index_b += 1 print('Sorted numbers are %s' % numbers) return numbers bubblesort(numbers)
class Solution(object): def majorityElement(self, nums): """ :type nums: List[int] :rtype: int """ res = None count = 0 for n in nums: if res is None: res = n count = 1 elif res == n: count += 1 elif res !=n: count -= 1 if count == 0: res = None return res
class Solution(object): def majority_element(self, nums): """ :type nums: List[int] :rtype: int """ res = None count = 0 for n in nums: if res is None: res = n count = 1 elif res == n: count += 1 elif res != n: count -= 1 if count == 0: res = None return res
class Solution: def transpose(self, A: List[List[int]]) -> List[List[int]]: output=[] rows=len(A) cols=len(A[0]) output=[[0]*rows for i in range(cols)] for i in range(0,len(A)): for j in range(0,len(A[0])): output[j][i]=A[i][j] return output
class Solution: def transpose(self, A: List[List[int]]) -> List[List[int]]: output = [] rows = len(A) cols = len(A[0]) output = [[0] * rows for i in range(cols)] for i in range(0, len(A)): for j in range(0, len(A[0])): output[j][i] = A[i][j] return output
""" spin multiplicities """ def spin(mult): """ number of unpaired electrons """ return mult - 1
""" spin multiplicities """ def spin(mult): """ number of unpaired electrons """ return mult - 1
def missing_values1 (data): """ Deals in place with missing values in specified columns: 'fields_of_study', 'title', 'abstract', 'authors', 'venue', 'references', 'topics' """ data.loc[data['fields_of_study'].isnull(), 'fields_of_study'] = "" data.loc[data['title'].isnull(), 'title'] = "" data.loc[data['abstract'].isnull(), 'abstract'] = "" data.loc[data['authors'].isnull(), 'authors'] = "" data.loc[data['venue'].isnull(), 'venue'] = "" data.loc[data['references'].isnull(), 'references'] = data['references'].mean() data.loc[data['topics'].isnull(), 'topics'] = "" def missing_values2 (data): """ Deals in place with missing values in specified columns: 'open_access' and 'year' """ # Open_access, thanks to jreback (27th of July 2016) https://github.com/pandas-dev/pandas/issues/13809 OpAc_by_venue = data.groupby('venue').open_access.apply(lambda x: x.mode()) # Take mode for each venue OpAc_by_venue = OpAc_by_venue.to_dict() missing_OpAc = data.loc[data['open_access'].isnull(),] for i, i_paper in missing_OpAc.iterrows(): venue = i_paper['venue'] doi = i_paper['doi'] index = data[data['doi'] == doi].index[0] if venue in OpAc_by_venue.keys(): # If a known venue, append the most frequent value for that venue data.loc[index,'open_access'] = OpAc_by_venue[venue] # Set most frequent occurrence else: # Else take most occurring value in entire dataset data.loc[index,'open_access'] = data.open_access.mode()[0] # Thanks to BENY (2nd of February, 2018) https://stackoverflow.com/questions/48590268/pandas-get-the-most-frequent-values-of-a-column # Year year_by_venue = data.groupby('venue').year.apply(lambda x: x.mean()) # Take mean for each venue year_by_venue = year_by_venue.to_dict() missing_year = data.loc[data['year'].isnull(),] for i, i_paper in missing_year.iterrows(): venue = i_paper['venue'] doi = i_paper['doi'] index = data[data['doi'] == doi].index[0] if venue in year_by_venue.keys(): # If a known venue, append the mean value for that venue data.loc[index, 'year'] = year_by_venue[venue] # Set mean publication year else: # Else take mean value of entire dataset data.loc[index,'year'] = data.year.mean()
def missing_values1(data): """ Deals in place with missing values in specified columns: 'fields_of_study', 'title', 'abstract', 'authors', 'venue', 'references', 'topics' """ data.loc[data['fields_of_study'].isnull(), 'fields_of_study'] = '' data.loc[data['title'].isnull(), 'title'] = '' data.loc[data['abstract'].isnull(), 'abstract'] = '' data.loc[data['authors'].isnull(), 'authors'] = '' data.loc[data['venue'].isnull(), 'venue'] = '' data.loc[data['references'].isnull(), 'references'] = data['references'].mean() data.loc[data['topics'].isnull(), 'topics'] = '' def missing_values2(data): """ Deals in place with missing values in specified columns: 'open_access' and 'year' """ op_ac_by_venue = data.groupby('venue').open_access.apply(lambda x: x.mode()) op_ac_by_venue = OpAc_by_venue.to_dict() missing__op_ac = data.loc[data['open_access'].isnull(),] for (i, i_paper) in missing_OpAc.iterrows(): venue = i_paper['venue'] doi = i_paper['doi'] index = data[data['doi'] == doi].index[0] if venue in OpAc_by_venue.keys(): data.loc[index, 'open_access'] = OpAc_by_venue[venue] else: data.loc[index, 'open_access'] = data.open_access.mode()[0] year_by_venue = data.groupby('venue').year.apply(lambda x: x.mean()) year_by_venue = year_by_venue.to_dict() missing_year = data.loc[data['year'].isnull(),] for (i, i_paper) in missing_year.iterrows(): venue = i_paper['venue'] doi = i_paper['doi'] index = data[data['doi'] == doi].index[0] if venue in year_by_venue.keys(): data.loc[index, 'year'] = year_by_venue[venue] else: data.loc[index, 'year'] = data.year.mean()
def Settings( **kwargs ): return { 'flags': [ '-O3', '-std=gnu11', '-fms-extensions', '-Wno-microsoft-anon-tag', '-Iinclude/', '-Ilib/', '-pthread', '-Wall', '-Werror', '-pedantic'] }
def settings(**kwargs): return {'flags': ['-O3', '-std=gnu11', '-fms-extensions', '-Wno-microsoft-anon-tag', '-Iinclude/', '-Ilib/', '-pthread', '-Wall', '-Werror', '-pedantic']}
schema = { # Schema definition, of the CLARA items. 'main_scale': { 'type': 'string', 'minlength': 1, 'maxlength': 20, 'required': True }, 'itembank_id': { 'type': 'string', 'minlength': 1, 'maxlength': 4, 'required': True }, 'presenting_order': { 'type': 'integer', 'required': True }, 'clara_item': { 'type': 'string', 'minlength': 1, 'maxlength': 255, 'required': True }, 'language': { 'type': 'string', 'minlength': 2, 'maxlength': 2, 'required': True }, } clara_items = { # 'title' tag used in item links. Defaults to the resource title minus # the final, plural 's' (works fine in most cases but not for 'people') # 'item_title': 'person', # by default the standard item entry point is defined as # '/people/<ObjectId>'. We leave it untouched, and we also enable an # additional read-only entry point. This way consumers can also perform # GET requests at '/people/<lastname>'. # 'additional_lookup': { # 'url': 'regex("[\w]+")', # 'field': 'language' # }, # We choose to override global cache-control directives for this resource. 'cache_control': 'max-age=10,must-revalidate', 'cache_expires': 10, # most global settings can be overridden at resource level 'resource_methods': ['GET'], 'schema': schema }
schema = {'main_scale': {'type': 'string', 'minlength': 1, 'maxlength': 20, 'required': True}, 'itembank_id': {'type': 'string', 'minlength': 1, 'maxlength': 4, 'required': True}, 'presenting_order': {'type': 'integer', 'required': True}, 'clara_item': {'type': 'string', 'minlength': 1, 'maxlength': 255, 'required': True}, 'language': {'type': 'string', 'minlength': 2, 'maxlength': 2, 'required': True}} clara_items = {'cache_control': 'max-age=10,must-revalidate', 'cache_expires': 10, 'resource_methods': ['GET'], 'schema': schema}
n,d = map(int,raw_input().split()) c = map(int,raw_input().split(' ')) gc = 0 for i in range(len(c)): if c[i]+d in c and c[i]+2*d in c: gc+=1 print (gc)
(n, d) = map(int, raw_input().split()) c = map(int, raw_input().split(' ')) gc = 0 for i in range(len(c)): if c[i] + d in c and c[i] + 2 * d in c: gc += 1 print(gc)
{ "targets": [ { "target_name": "glace", "sources": [ "target/glace.cc", ], "include_dirs": [ "target/deps/include", ], "libraries": [], "library_dirs": [ "/usr/local/lib" ] } ] }
{'targets': [{'target_name': 'glace', 'sources': ['target/glace.cc'], 'include_dirs': ['target/deps/include'], 'libraries': [], 'library_dirs': ['/usr/local/lib']}]}
OUT_FORMAT_CONVERSION = { "t": "", "b": "b", "u": "bu", }
out_format_conversion = {'t': '', 'b': 'b', 'u': 'bu'}
Scale.default = Scale.egyptian Root.default = 0 Clock.bpm = 110 ~p1 >> play('m', dur=PDur(3,16), sample=[0,2,3]) ~p2 >> play('V', dur=1, pan=[-1,1], sample=var([0,2],[24,8])) ~p3 >> play('n', dur=var([.5,.25,.125,2/3],[24,4,2,2])) p_all.lpf = 0 p_all.rate = .5 ~s1 >> space(P[0,3,5,1]+(0,3), dur=4, chop=6, slide=0, echo=.5, room=.5, mix=.5) ~s2 >> glass(P[0,3,5,var([1,8,7],[16,8,8])], oct=4, amp=2)
Scale.default = Scale.egyptian Root.default = 0 Clock.bpm = 110 ~p1 >> play('m', dur=p_dur(3, 16), sample=[0, 2, 3]) ~p2 >> play('V', dur=1, pan=[-1, 1], sample=var([0, 2], [24, 8])) ~p3 >> play('n', dur=var([0.5, 0.25, 0.125, 2 / 3], [24, 4, 2, 2])) p_all.lpf = 0 p_all.rate = 0.5 ~s1 >> space(P[0, 3, 5, 1] + (0, 3), dur=4, chop=6, slide=0, echo=0.5, room=0.5, mix=0.5) ~s2 >> glass(P[0, 3, 5, var([1, 8, 7], [16, 8, 8])], oct=4, amp=2)
"""" Copyright 2019 by J. Christopher Wagner (jwag). All rights reserved. :license: MIT, see LICENSE for more details. This packages contains OPTIONAL models for various ORMs/databases that can be used to quickly get the required DB models setup. These models have the fields for ALL features. This makes it easy for applications to add features w/o a DB migration (and modern DBs are pretty efficient at storing empty values!). """
"""" Copyright 2019 by J. Christopher Wagner (jwag). All rights reserved. :license: MIT, see LICENSE for more details. This packages contains OPTIONAL models for various ORMs/databases that can be used to quickly get the required DB models setup. These models have the fields for ALL features. This makes it easy for applications to add features w/o a DB migration (and modern DBs are pretty efficient at storing empty values!). """
params = { # file to load wav from 'file': 'C:/Users/qweis/Documents/GitHub/unknown-pleasures/wavs/famous.wav', # output file for images 'save_loc': 'covers/frame.png', # save location for video 'vid_save': 'time_test.mp4', # number of spacers between each data point 'spacers': 5, # number of pixels added as offset per wave 'offset': 10, # how many amp data points per line 'data_line': 40, # number of lines 'n_lines': 80, # percentage of actual data that is added as noise at end 'noise_frac': 1, # min ticks to reach mean when drawing connecting line 'min_ticks': 5, # how much range does the random noise have 'random_noise_range': 1, # noise range for connecting line 'connecting_line_range': 6, # frames per second 'fps': 30, # thickness of waveform lines 'line_width': 3, # value to scale amplitudes to 'scale_val': 125, }
params = {'file': 'C:/Users/qweis/Documents/GitHub/unknown-pleasures/wavs/famous.wav', 'save_loc': 'covers/frame.png', 'vid_save': 'time_test.mp4', 'spacers': 5, 'offset': 10, 'data_line': 40, 'n_lines': 80, 'noise_frac': 1, 'min_ticks': 5, 'random_noise_range': 1, 'connecting_line_range': 6, 'fps': 30, 'line_width': 3, 'scale_val': 125}
input = """-1,-1,-4,-1 0,1,8,5 -8,-5,2,-6 -1,3,-2,-3 -1,1,6,1 4,-8,0,-5 8,-6,-5,6 -6,2,-8,1 -5,6,-2,-5 3,-8,0,0 -3,-7,-5,-6 -1,-4,-7,-5 -7,3,1,-6 -5,4,-4,0 1,-7,0,-6 4,-1,1,2 6,1,6,-2 2,1,-8,-6 -7,-7,3,2 -8,-5,8,-5 -8,2,-1,4 8,6,7,1 6,-4,-1,-7 8,-2,2,4 0,8,8,3 6,-7,-6,8 -8,-2,8,6 1,-8,-6,-8 -6,-1,5,-6 5,2,7,-3 5,7,0,-3 0,2,-8,7 1,8,2,8 5,-5,3,-3 7,-5,1,-8 -3,1,-3,1 0,-5,5,5 1,-5,2,-4 7,-6,1,0 8,-3,-3,-4 4,0,2,-6 6,7,-6,-5 4,8,0,-5 -3,-1,-1,0 4,-4,4,-4 -3,4,-3,-4 0,-8,-6,6 -4,3,-4,0 -1,3,2,0 1,6,5,-3 -8,-8,3,8 0,-1,5,-1 -6,0,-5,2 4,-4,0,-2 4,0,-8,-3 -6,-6,-2,-3 3,-5,0,-3 3,-3,6,1 -1,0,1,6 -7,-1,-2,-1 -1,4,-7,0 -1,7,8,-8 -1,-1,3,0 -8,8,-3,2 -8,5,-1,8 0,-1,0,2 -1,7,5,6 -4,6,-6,2 -3,-5,3,-4 8,-7,-4,6 1,-3,0,5 -1,-6,-5,-8 1,8,-2,-7 -8,7,-1,6 3,2,0,6 -5,-8,7,-5 5,-6,-6,-5 -7,-7,2,-8 -2,-1,2,-1 6,-5,-6,-4 7,7,-1,-5 8,5,3,4 -5,-5,-5,-3 0,6,-4,3 -7,5,4,-4 6,-8,-1,-8 8,3,-8,1 3,-3,0,-2 8,3,4,8 0,-6,-1,-4 5,-2,6,-2 1,-2,0,-3 -3,-7,-3,8 -1,-6,0,0 6,-1,-3,4 2,2,-5,0 3,8,2,-7 -6,-3,-4,6 -2,-4,-5,-3 -6,-7,-2,7 4,7,8,1 -4,5,-6,5 0,6,1,1 -1,3,8,-6 -2,5,-4,-3 -5,0,8,7 -1,7,-5,6 7,1,-4,4 -6,-3,-7,8 3,-8,1,8 -6,-3,6,0 -7,0,-8,3 1,1,1,-6 -8,5,3,6 7,-2,2,-1 3,-8,-1,-4 0,1,2,-5 -4,2,0,3 -2,-5,6,-8 -1,-6,1,6 4,7,1,-2 5,-5,7,-4 4,4,7,-2 -8,2,2,7 -2,7,4,-2 6,-8,7,-7 -3,2,5,1 -6,2,5,-2 -3,6,8,-1 -5,2,4,7 -3,5,5,-6 0,8,5,7 -5,-5,0,-1 8,6,3,-8 -6,-2,7,2 -8,8,-2,-3 1,4,4,3 -3,3,-3,-1 -6,-4,8,-3 -3,1,-6,0 3,-6,-7,-6 -7,0,-7,7 -7,-2,1,1 -8,7,-4,5 -5,0,2,0 -6,-5,8,0 5,1,2,6 -4,-2,-7,-6 2,5,-2,-6 -3,-3,-8,8 8,-1,0,-1 8,-3,-6,0 -7,0,4,-7 1,6,-6,6 6,6,0,-7 6,8,-5,-5 7,-3,-2,-6 5,-2,-8,2 -1,4,-5,-8 3,6,-2,0 -2,-5,0,-8 -1,0,2,4 0,7,8,0 2,-5,-4,7 2,5,1,3 0,4,0,1 -5,-2,2,-5 2,-7,-6,-5 -1,-7,8,-5 2,-3,-1,-5 5,-2,-8,-4 -3,-1,2,4 -2,-5,6,-5 0,2,-6,6 2,1,-4,1 -3,1,-3,8 -4,-8,7,6 6,-8,2,6 -4,-4,-6,-2 4,-8,-8,-2 -8,-7,-4,0 -5,2,-5,-6 3,-7,-7,6 -8,-3,7,6 2,5,-1,8 8,5,-4,0 -7,0,-3,-7 -1,3,-7,-7 -2,0,7,6 2,-1,-2,-7 1,0,0,-6 0,-1,-1,-7 1,-1,2,-4 8,-7,-5,-8 -3,2,-8,2 -5,5,3,-8 -4,2,-8,-4 -8,7,2,1 8,-4,-2,0 0,-8,1,-8 -3,1,4,-8 -8,-7,-1,0 8,7,-4,7 -1,5,6,-4 4,2,-3,8 3,0,-5,-4 -3,5,-3,-5 8,4,0,-1 -1,-5,7,7 2,5,-6,-2 0,-6,-7,-5 0,0,4,1 0,-2,6,-2 -5,-1,2,6 -5,1,-8,-6 1,-1,-1,5 -7,6,7,-1 -8,6,-7,0 -5,-5,-2,-7 -5,-2,5,8 1,1,6,4 -1,2,6,8 2,-7,-2,0 3,0,1,2 -6,5,-1,-5 7,-5,-8,-5 -2,-1,3,4 -6,-7,-5,8 -3,0,-5,5 6,-7,4,-5 3,0,8,-5 4,-4,-4,0 6,-6,3,-1 -6,-7,7,-7 -1,-6,-6,-7 7,0,2,4 -8,-2,4,-8 8,5,8,0 -2,4,5,-4 5,0,0,2 0,6,7,3 1,7,8,2 -6,5,5,3 7,-7,7,-6 6,3,0,0 5,8,-7,-2 0,6,-3,8 -6,-4,3,7 0,2,6,6 2,-1,8,-3 2,0,4,7 8,-2,0,-7 6,-1,-2,6 -7,-1,-4,0 0,-8,1,4 -6,-7,8,0 8,-2,-2,-8 -4,-5,-4,-3 -7,2,6,-8 -5,8,-4,-8 8,4,6,5 4,8,-7,-2 -2,7,6,0 4,-3,-5,8 2,-3,-5,3 3,-5,0,-7 6,4,-7,3 -6,7,2,-3 2,-4,2,-6 7,-4,4,-2 2,-7,8,-4 5,-1,-3,6 -5,8,-1,-4 -5,0,7,-2 -6,0,5,0 6,7,7,5 -1,1,4,-8 -6,-6,4,-7 5,3,-8,0 -2,-5,1,-8 -2,-8,-5,-4 8,-4,5,7 0,-7,-6,-2 -1,3,1,-8 3,2,0,3 4,4,5,0 8,0,0,0 -4,0,2,5 7,-3,2,1 -3,-6,-1,6 2,-5,3,0 -6,-8,-4,-7 7,-5,-1,3 -3,-6,-6,3 4,0,2,7 0,0,4,6 8,8,0,5 6,-4,3,4 -5,8,-3,-7 1,8,-3,0 8,7,6,1 -4,2,8,-2 -6,4,3,2 -5,0,4,-3 6,1,7,0 -3,-8,8,-5 0,0,-8,-6 6,-3,-1,7 -6,-7,-6,1 -3,5,7,3 2,4,-8,7 6,0,0,-2 -7,-3,-4,-3 5,-5,-6,-6 3,3,-5,-5 -8,0,2,5 0,-4,-5,0 6,4,2,-8 2,-1,6,-3 -1,8,-3,2 -2,-6,8,-1 -7,-8,3,6 7,4,-5,-2 -3,-6,8,-7 -7,-1,-3,3 8,8,0,-1 4,2,5,-8 0,-4,4,7 1,-5,4,2 3,-8,3,-2 1,0,-5,0 -1,0,0,2 -2,-5,8,7 -5,8,-5,-4 7,-1,-5,4 -2,-8,-7,1 1,-8,-1,3 -8,-8,0,-4 -5,0,6,-7 -2,-8,-1,-4 0,0,-1,8 -8,-8,-2,-1 6,-2,0,-3 -1,-8,1,7 2,1,-4,-1 3,1,-5,5 0,2,-1,7 -1,-5,-5,2 4,0,-1,3 2,2,6,3 -2,2,5,4 8,4,2,3 0,8,0,3 1,1,7,3 -4,0,-6,-3 0,6,-8,2 8,3,-2,-5 -1,4,-5,7 -1,2,3,0 0,1,6,0 3,-7,6,-4 -6,-8,-6,8 -4,-6,1,-3 7,-7,-4,7 0,-2,-7,0 7,0,-7,3 6,-1,-2,4 3,-4,4,-7 3,0,6,-6 -5,6,8,-7 1,-2,6,-6 -1,-8,-1,-8 -3,-6,-5,5 3,2,3,7 -4,-3,2,-1 -7,-5,-3,5 -4,-4,-8,2 -1,2,0,0 -6,5,2,8 -6,-7,-6,-6 4,-1,4,-3 -2,-7,-5,5 0,6,1,-3 0,-1,2,-7 0,-5,-3,7 1,0,-2,5 0,-7,1,3 2,-1,-8,7 0,1,6,3 -2,-8,4,4 3,-6,-2,-5 6,-5,2,5 -5,0,-3,-5 0,-4,-3,-7 5,2,5,1 5,0,-7,6 -7,-1,1,7 -6,4,1,7 -7,6,-7,-2 6,2,5,1 2,7,-4,3 5,8,-4,7 -6,7,-6,-6 -2,3,-6,-1 -6,1,-8,8 8,-8,2,-4 -8,0,6,1 1,6,-7,-5 0,-3,-3,-8 -6,1,1,1 2,8,2,7 -1,-3,5,4 -5,6,-5,2 7,7,-6,-5 4,-2,3,2 5,0,3,-4 -6,-8,5,4 -5,-2,-5,-2 6,-7,7,-4 7,8,7,8 6,-6,7,8 0,-5,-8,-3 3,7,-8,0 4,-1,-4,4 3,-5,6,5 -6,7,-7,3 2,-2,0,-6 -8,-6,7,8 -7,6,1,-1 -7,8,-1,-7 3,0,0,8 -7,1,6,7 7,-8,4,0 2,-4,0,-5 -7,8,6,1 -2,0,3,8 0,-8,3,2 -4,4,5,-4 3,3,-5,-1 0,4,5,4 7,8,2,6 2,0,0,2 8,2,-3,-2 8,2,5,-7 0,1,0,0 6,6,7,0 -7,0,-5,5 0,7,-5,-1 -3,-1,-3,-2 -1,5,0,-7 2,-2,-1,8 4,4,0,-5 2,0,7,7 7,6,8,2 0,1,5,7 5,3,-8,-5 1,8,-1,4 2,0,7,-3 7,2,-2,7 5,0,0,3 -3,-7,-2,2 -4,0,0,4 1,-7,-7,8 0,0,0,7 0,-3,-3,-1 -8,7,8,-1 4,-8,-8,7 2,2,3,-4 -7,7,-2,3 8,0,4,4 -5,-7,5,-2 1,0,-1,-5 6,-7,1,2 -4,0,-4,7 -7,5,3,2 -1,0,3,-3 -8,-2,4,-1 -7,-8,-2,7 -4,6,-3,-6 7,0,7,-4 2,-8,0,3 5,0,4,3 6,-5,-8,2 8,0,-6,2 0,6,3,-3 6,-8,0,6 -5,7,4,-1 5,6,3,-1 -3,-7,0,0 5,-5,0,4 7,2,-4,-2 -7,-5,-7,3 -3,2,6,0 8,-4,6,0 -4,2,-1,-1 -2,8,-5,-3 -5,-6,-4,-2 3,2,2,3 6,0,-2,0 -4,8,7,4 -2,6,-5,-3 6,4,0,5 5,0,1,3 4,3,6,1 -2,-4,-3,0 -5,1,0,7 0,-6,-1,-6 7,6,-1,-4 -4,6,4,2 7,1,4,-8 -1,2,8,-1 1,7,4,-4 5,3,-1,-8 -8,-2,-3,5 -1,-7,-8,5 -5,-8,-3,4 1,0,3,-8 -8,6,-5,0 -7,-6,5,5 3,2,5,5 0,8,4,5 0,5,0,-3 -1,1,-8,-6 2,0,-4,6 -2,-5,4,8 6,6,-5,2 0,6,-1,6 8,-8,2,-8 -3,4,-1,5 -2,-6,7,4 4,5,8,-1 -3,8,7,-7 5,4,4,6 3,-2,-1,-6 -7,8,-7,-2 0,-1,-8,-7 -3,-3,5,6 3,4,7,-8 -5,1,-6,5 -6,7,1,2 3,6,-2,5 3,0,4,3 2,-3,7,-2 4,-8,-7,7 -3,8,3,5 -7,-5,1,3 -5,-5,0,-6 -8,-6,-7,8 4,-1,-6,-4 5,3,2,-1 -4,1,6,5 -8,4,3,-1 8,7,5,-5 8,4,-4,7 1,-6,-1,3 8,4,-7,-2 -2,2,-4,6 -3,-2,3,1 -3,3,-3,3 -8,1,5,-8 -2,4,-5,-6 7,1,0,-6 1,-7,-4,-8 8,0,1,-3 -8,5,-2,4 6,7,-1,8 -5,4,-1,0 -8,-7,1,4 8,4,-3,-4 -7,-3,8,-3 -2,-2,2,1 -1,7,-7,4 4,8,5,4 -7,8,3,5 5,7,6,5 -1,1,-2,-5 -1,-6,0,-2 -2,8,4,-1 0,3,-2,7 -1,-2,1,2 3,-5,6,-8 4,-3,4,-6 0,-4,-8,-1 -5,-2,-8,0 -5,-5,-3,-4 2,2,3,-6 0,-5,0,2 -3,-8,0,0 3,2,-1,8 0,8,1,2 7,3,6,5 -1,0,0,3 -8,6,1,-3 -3,-3,-6,3 -7,-2,7,1 8,-6,3,4 5,1,7,7 -2,2,3,-8 -7,-8,-2,2 2,8,-4,-1 7,3,-7,-7 -4,3,7,-3 -4,-4,4,7 7,-1,-1,2 3,8,-3,-2 -5,-7,-2,-4 -1,-6,-5,-5 4,8,0,6 5,0,1,-7 -6,6,-2,0 -1,5,0,0 4,-2,0,7 -2,2,-1,1 2,-6,4,2 -2,5,-1,-2 -8,8,0,7 0,-1,-7,-6 3,-6,1,-6 -1,-3,-5,-3 -7,8,-5,0 2,1,6,-7 6,2,4,-7 -1,-6,-7,4 -5,4,6,-3 5,1,-6,-7 7,1,-4,-6 -4,-8,-5,2 1,-1,-3,-6 2,-5,-5,-5 -6,4,-6,-1 -7,1,-4,-4 8,6,0,5 4,7,0,8 8,-8,-4,-6 2,3,0,-7 -2,4,0,2 -3,-7,2,0 8,0,-2,1 -3,-1,-2,8 -3,5,3,8 0,-5,-1,6 5,1,2,7 6,4,1,3 0,4,6,4 -6,0,-4,3 -8,-1,5,-7 -1,4,-2,0 1,-8,-3,-1 0,2,-6,5 -7,-2,-6,-2 8,6,8,6 -8,-4,0,5 1,3,-6,-4 -4,3,-5,7 7,-5,0,0 -8,4,2,-8 5,7,-5,0 7,-7,-5,-4 -5,0,1,-3 -4,6,1,-3 4,-3,3,4 0,0,8,0 5,-6,-6,-6 -3,7,7,-2 -7,-3,8,-4 6,8,-7,-4 -6,6,-4,0 4,0,8,3 2,8,0,-6 0,-6,6,-3 0,-6,3,-8 0,0,0,-7 -1,7,-7,3 -5,-4,-1,-7 -8,-3,-1,8 6,5,-7,3 -4,4,0,-4 -3,-8,5,7 0,1,0,-4 7,-3,0,-5 -2,-4,0,2 -5,4,2,3 -3,-8,-7,-5 4,-5,3,0 1,-4,1,2 -6,2,3,8 2,-6,-7,-2 0,-6,-4,8 1,5,0,-6 1,3,-7,-2 6,3,-5,3 -1,8,6,-4 3,3,3,0 -2,0,-3,4 2,0,7,4 6,8,-8,7 8,6,-1,-6 -7,-5,5,-2 0,8,-2,-3 -7,2,2,-8 8,5,-4,4 -5,7,5,-1 7,6,0,8 1,-7,3,-6 2,7,-6,0 3,3,0,-4 5,7,-8,5 4,-8,8,0 -7,-2,2,5 -1,7,1,3 -8,-8,2,-6 6,0,2,2 1,2,4,-4 7,-8,-2,5 1,3,5,0 -6,4,-5,1 -4,0,-5,1 4,-3,-4,8 -6,-1,5,8 7,-8,7,8 -3,-7,-6,8 -8,-4,0,0 -3,5,2,7 -3,7,-3,0 -3,0,0,5 8,3,8,4 -6,0,0,7 6,5,2,-2 -3,-8,4,0 4,-2,0,2 -1,0,0,7 -4,4,-4,6 -5,-6,2,1 -7,0,-7,3 -7,3,4,-7 -7,-8,2,2 1,-4,5,-7 -2,5,3,-8 5,1,-7,2 -2,-3,3,1 -1,0,5,-3 -2,5,1,0 -3,-6,0,-1 5,0,7,-7 8,8,-1,-2 -7,-3,2,1 -2,2,3,0 -8,-4,-8,-6 0,8,-3,0 4,0,6,-5 -6,2,7,1 2,3,1,-6 -5,-1,-4,1 4,-2,2,-6 5,4,5,-6 -3,5,1,-6 -4,-5,4,-2 1,-5,4,-3 8,-5,6,-4 1,4,5,-1 -5,-4,-2,3 0,3,3,-6 -5,-6,0,-8 -3,5,3,-1 0,0,7,6 -4,-6,5,5 -4,-1,-2,-6 -7,-6,-4,-8 2,-4,0,3 4,-2,0,5 -3,1,-7,-7 -7,0,-4,4 -6,-1,7,-8 -5,2,7,8 3,-7,-2,5 2,-3,-3,2 2,3,7,-4 -7,4,-2,8 -3,1,-3,3 -6,-8,7,-2 -5,0,-8,-4 5,8,0,4 -5,-8,-4,5 -3,-7,5,6 -5,7,0,7 -2,-4,-6,-3 -8,-8,-2,2 -6,-1,-1,8 -3,0,4,5 8,0,-8,7 4,8,0,-6 0,3,-8,7 0,-8,-5,-4 0,0,6,6 -5,-7,0,-7 -3,-1,3,4 3,3,-7,6 7,6,7,0 -1,-3,-4,-8 6,8,-1,-6 3,-8,7,1 -8,-5,1,-3 -8,-5,-3,-8 -6,-5,-3,-1 5,0,-5,0 -5,0,0,7 8,-8,6,8 4,6,4,-3 2,2,6,1 0,0,-4,3 -6,-4,2,6 7,0,-6,-3 8,3,-2,2 -7,1,5,-3 7,7,-1,0 -1,3,5,0 -4,-5,-3,-1 -1,-4,6,4 5,0,3,6 -1,2,-6,1 8,-3,4,-2 4,0,-2,7 0,7,0,-3 0,8,2,4 0,-7,-7,8 -2,-8,7,3 -1,7,-8,8 7,4,-1,5 -5,1,-8,-2 -6,6,-5,-4 -8,3,2,-3 -4,-3,0,3 -4,-4,-1,0 3,7,1,2 -5,-6,-1,-4 5,7,8,8 2,-2,-8,-6 0,7,-6,1 5,-8,-3,-2 -3,7,-8,-4 -8,0,3,0 0,-2,7,0 5,7,4,7 8,2,-5,-5 -1,-4,-3,6 0,1,6,7 -8,0,7,0 2,-7,-6,-1 -1,0,-8,1 -8,6,-3,6 7,7,-6,8 1,-3,1,-5 -4,3,0,1 3,7,-7,-8 0,-7,4,3 -5,-2,4,5 0,-5,3,0 -5,4,0,0 2,-5,-6,2 -1,-2,0,4 0,5,4,0 -5,-1,4,8 7,0,0,2 0,2,-6,0 1,-3,7,0 -6,0,-8,8 -6,-5,8,7 -1,-3,1,8 1,0,-2,-6 6,4,6,-3 -1,-5,-5,5 -4,7,3,-5 0,-4,3,6 -6,3,3,8 5,1,3,8 -8,-2,6,-3 -2,-7,7,-7 7,-4,-3,5 7,-1,4,0 -8,-3,1,-4 -1,0,-1,1 -4,-6,-2,2 0,-6,-3,-6 0,-5,1,8 4,8,-5,-3 -2,-4,6,1 -5,0,4,-4 6,1,-8,7 8,-4,0,-1 -2,-5,-7,-7 -3,3,-7,-7 8,-2,-6,5 -3,0,-5,-3 -8,5,0,0 -4,0,4,0 -7,2,8,1 0,-4,6,-1 -8,-3,6,-8 5,0,4,-3 -4,-6,0,0 6,2,0,0 8,2,2,7 8,-1,0,-2 6,8,0,0 -3,2,2,-5 0,-6,-1,5 0,-5,7,0 0,1,8,8 -1,2,5,4 -2,-1,5,1 -4,-6,6,5 -8,-8,8,2 0,-6,-1,6 -5,0,7,-4 7,0,-3,-8 -2,-5,1,2 0,-2,7,-2 -2,-8,-3,-3 -7,3,4,6 7,2,-8,2 4,6,-5,-3 -1,-1,-7,0 5,-8,-7,6 4,5,-3,0 0,-6,1,-4 -1,2,4,-3 -2,5,-7,0 -5,4,0,4 3,-1,-7,3 4,5,5,7 2,2,-8,7 5,7,-3,1 -6,5,-5,-3 7,-2,-2,-3 2,-4,0,-4 -3,0,-3,-8 1,-4,4,0 -2,0,3,-8 -2,3,-5,-8 -5,7,-6,-6 -4,0,5,-3 4,-4,-2,-2 -8,-8,8,-4 5,-2,0,1 8,1,-7,0 -7,-4,-4,-2 1,5,0,-4 3,4,7,-4 8,5,-7,0 8,-8,7,-6 5,6,8,8 -7,1,-4,-1 -8,2,3,-4 4,7,-3,-2 2,-8,-4,-2 -5,4,1,4 0,6,-4,-1 -7,-1,0,1 5,-4,-7,-3 -2,0,2,3 1,0,-2,-7 0,-7,2,-1 -5,3,-4,1 -1,-8,1,-4 -2,-8,-6,-5 7,-6,2,-3 -7,4,6,-6 5,7,6,-3 8,0,1,-7 4,-2,-7,-3 8,-5,-6,3 -1,7,-1,-2 -8,4,5,3 1,-3,4,-8 -2,-5,-5,-8 2,1,3,8 4,7,7,0 1,2,0,0 -5,2,-1,7 3,8,-6,3 0,6,2,3 7,4,-8,0 -1,-5,0,3 -1,0,0,0 -2,1,-2,5 1,2,4,-3 2,4,0,-4 6,0,7,-2 4,-2,-2,-7 3,-2,-2,3 8,-5,-8,-7 4,-2,0,-7 6,3,-7,0 -7,-8,-4,0 6,0,4,-3 4,-4,0,4 1,0,-1,-1 -4,3,-2,2 -8,-8,2,2 -3,-3,-5,-2 -1,0,5,1 -2,8,5,-8 6,-8,5,6 -2,-6,-1,-3 5,2,-5,-6 -6,-3,-2,-1 -2,3,3,4 2,-3,-7,4 2,-1,-7,0 -1,-2,-4,-8 -3,-7,7,3 -4,7,8,7 -8,1,-1,0 -6,8,2,-6 5,-3,7,-8 -7,6,6,-3 0,1,2,-7 3,0,-7,-2 2,-7,1,6 5,-3,0,5 6,-2,-1,-4 4,-3,-5,0 -8,1,-2,-2 0,5,0,2 -7,6,2,8 4,1,5,7 -2,0,6,-3 7,-2,5,0 -3,0,-4,4 -7,5,0,0 -4,2,0,-1 3,-8,1,-5 -2,-4,0,5 -3,6,-6,-6 3,1,-1,-7 -4,0,1,-6 -7,0,-4,-8 -7,-4,8,-6 -3,-4,-8,5 2,-7,-1,-5 1,8,0,0 7,2,6,-4 0,6,-4,-2 0,-1,7,4 3,6,0,8 3,7,-1,-1 -2,-1,-2,-2 -4,5,7,4 4,-2,-6,-4 -6,3,-5,4 -8,-7,-4,7 2,6,3,-5 -4,-8,4,8 -3,5,1,0 1,1,5,-2 7,5,1,-7 1,0,-8,-4 0,-2,-1,3 0,0,3,-7 7,2,1,0 1,-2,8,-6 -3,0,0,-8 -7,0,-2,0 1,1,-2,7 8,4,6,4 -7,7,5,5 0,-1,-7,-3 -3,0,-3,-2 -8,-7,6,-3 4,-2,4,1 -2,-8,5,-4 -8,-3,7,2 0,7,2,-1 8,7,-1,-5 1,0,-3,-4 0,3,0,-4 0,-2,0,5 -2,5,-2,-1 -8,-8,4,8 -8,5,4,-8 0,2,-1,2 -4,0,8,7 0,-4,1,-2 3,1,-1,1 2,-6,7,3 5,-4,1,-2 -5,-8,-2,-6 2,5,3,4 3,0,8,3 8,6,-5,-4 -7,8,6,2 -5,-8,8,1 6,1,1,-8 0,-1,2,8 -5,0,-7,-6 2,-4,8,6 -6,0,1,0 3,6,-8,-3 -1,5,5,1 -3,0,7,8 -4,0,-8,0 5,-3,8,-6 -8,-6,0,-3 -6,7,3,2 3,-8,-1,-1 4,0,-6,-3 2,-3,8,1 -5,3,0,2 -2,7,-3,-1 0,-5,-1,3 0,2,2,8 7,-6,-6,-7 -5,-5,-4,7 5,2,-4,-6 0,6,-6,-4 -1,7,-2,0 -6,-7,-1,-3 -1,1,-3,1 2,-4,-1,0 6,2,3,-2 -7,-5,-2,-7 -4,-1,-5,4 0,4,5,6 -7,-4,1,-7 0,-1,-8,8 7,-1,1,-7 -1,1,0,0 -4,-3,2,-8 -8,-8,-5,6 4,3,-4,6 -2,8,7,-5 2,0,7,-6 2,-1,-7,-8 8,4,-5,-6 -6,1,-5,-3 -7,1,-2,0 -7,-6,0,-7 1,0,-8,1 0,-2,6,0 0,1,5,0 8,-5,-1,5 0,-2,-7,2 0,-5,4,-3 -6,7,3,3 3,-7,-6,0 -4,3,-5,0 7,-5,4,1 8,3,0,7 -4,4,3,6 8,1,-6,-3 3,-3,-6,-4 -5,0,-6,-6 2,5,-8,3 -6,6,-6,-1 6,-2,0,-6 4,7,-3,-8 2,6,-6,0 -8,0,6,0 -5,-5,0,-3 -3,4,1,-8 -5,-1,4,-4 0,0,-5,-7 -6,1,-2,0 5,-3,-1,-4 -7,-8,5,-1 -8,-5,6,1 6,5,-1,-5 8,-5,-6,6 -2,-1,-4,7 3,-4,4,1 8,0,6,-6 -7,7,-4,-5 -5,5,-6,3 -8,1,7,-3 -1,-4,0,6 3,3,2,3 7,4,-7,-8 -8,8,6,-3 -1,-8,0,8 -8,7,-4,-6 0,5,7,0 -6,-6,3,1 -8,-3,6,7 -2,-2,-2,-7 -4,6,5,-2 2,-2,7,8 -7,8,0,2 2,2,-6,5 6,-3,-3,-8 -2,-3,-8,7 -6,8,-5,-1 4,-2,-3,-3 -8,-4,-6,4 -8,4,-2,1 -4,-1,-2,7 5,6,-2,-5 0,2,6,-2 0,0,3,0 4,8,-3,-1 4,7,0,-1 -6,3,7,8 -4,2,-2,-2 5,8,7,5 6,8,1,2 0,2,0,-4 5,4,-6,-5 6,-8,4,1 0,-7,-3,-5 1,3,-3,6 5,8,2,-4 -4,0,5,8 2,0,2,3 8,-2,-5,3 -8,-5,2,8 -2,0,-7,-6 -4,-8,8,2 6,-8,5,-2 -4,6,0,8 -3,-8,4,6 1,7,3,7 6,-7,-5,3 -8,0,0,0 6,-1,-2,-1 -2,7,4,6 -7,-5,2,-8 1,-6,-5,-1 -5,-3,0,-8 8,-7,8,-6 0,5,-7,0 7,3,5,4 4,-2,2,1 -5,-6,2,-8 -8,4,-4,-3 0,2,6,2 -4,-8,0,5 -2,-4,-5,-8 -4,2,0,-8 2,-7,-4,6 -8,-8,-7,4 7,0,2,7 -1,-2,1,-5 -7,-5,1,-7 4,-4,-6,6 5,-3,6,-1 2,2,-1,-4 8,4,8,2 3,8,0,-4 -1,-7,0,6 -1,7,6,-7 8,5,0,2 -6,0,-3,4 -6,-3,3,0 -7,-1,-6,-7 -1,4,-3,1 -2,-7,2,8 -8,-3,8,0 -4,-7,-6,0 -7,-1,-6,2 -3,-1,4,6 0,-1,7,-4 -8,-8,-8,1 4,-5,4,1 -5,-7,-3,2 -1,-3,4,-1 -4,-1,-3,-3 0,-3,8,5 3,0,-8,-1 8,-8,1,-7 -2,7,-3,7 -1,0,-5,4 -3,-2,4,-3 -7,-8,6,-5 2,5,3,5 3,-7,8,-3 -5,4,2,-6 1,-4,0,-2 0,-8,-5,6 8,-8,-6,-2 -7,-6,7,2 -5,-3,-1,-8 8,5,-5,-8 3,6,8,4 0,1,4,7 -1,7,4,-7 -6,-1,-6,6 2,3,0,-5 -4,0,-5,0 3,-7,-2,-3 8,0,-4,-6 4,-2,-4,7 -7,-6,4,-8 -4,-3,2,-6 0,-7,-3,3 -7,6,-4,-5 0,4,1,-6 -3,1,0,5 -7,-8,-1,0 4,5,8,3 4,6,7,-1 5,-5,-1,0 4,-4,2,-2 0,8,-8,-1 2,-7,-7,7 5,0,-6,0 -4,4,5,8 -4,-5,5,6 8,-5,-5,0 5,2,-3,-2 -5,-2,6,6 6,3,-7,-4 1,-1,-1,-5 -5,-2,-7,1 -5,3,1,-6 3,6,6,3 -2,-4,5,-5 4,-6,7,5 6,8,8,8 2,-2,4,1 -3,2,-7,7 8,-1,-4,-5 6,2,2,7 0,-8,-5,-7 -8,5,6,2 5,5,6,8 -3,-4,5,-4 1,-1,8,4 6,-3,-1,-2 -1,4,6,-7 2,4,0,6 -7,1,-6,6 -7,-8,8,-8 0,3,-2,-8 1,-5,-1,5 -5,-3,-7,5 5,-4,-4,7 -4,0,0,-4 -7,-5,-7,8 -7,2,-1,-4 4,-3,-3,0 3,-2,-6,-1 8,-1,-1,-2 -5,-5,5,-7 -7,0,-3,1 -2,-7,-1,-6 8,-4,-6,-7 5,-3,6,5 7,-1,3,-3 -7,0,7,-1 -6,-8,-5,6 -8,0,-3,4 4,-2,3,-5 3,6,7,3 0,-3,-7,2 4,-6,-7,2 1,-7,0,0 8,0,-7,-4 6,-1,8,4""" def distance(a, b): return abs(a[0] - b[0]) + abs(a[1] - b[1]) + abs(a[2] - b[2]) + abs(a[3] - b[3]) constellations = [] for line in input.splitlines(): x, y, z, t = (int(a) for a in line.split(",")) s = (x, y, z, t) matching_constellations = [] for c in constellations: for star in c: if distance(star, s) <= 3: matching_constellations.append(c) break resulting_constellation = {s} for c in matching_constellations: constellations.remove(c) resulting_constellation |= c constellations.append(frozenset(resulting_constellation)) print(len(constellations))
input = '-1,-1,-4,-1\n0,1,8,5\n-8,-5,2,-6\n-1,3,-2,-3\n-1,1,6,1\n4,-8,0,-5\n8,-6,-5,6\n-6,2,-8,1\n-5,6,-2,-5\n3,-8,0,0\n-3,-7,-5,-6\n-1,-4,-7,-5\n-7,3,1,-6\n-5,4,-4,0\n1,-7,0,-6\n4,-1,1,2\n6,1,6,-2\n2,1,-8,-6\n-7,-7,3,2\n-8,-5,8,-5\n-8,2,-1,4\n8,6,7,1\n6,-4,-1,-7\n8,-2,2,4\n0,8,8,3\n6,-7,-6,8\n-8,-2,8,6\n1,-8,-6,-8\n-6,-1,5,-6\n5,2,7,-3\n5,7,0,-3\n0,2,-8,7\n1,8,2,8\n5,-5,3,-3\n7,-5,1,-8\n-3,1,-3,1\n0,-5,5,5\n1,-5,2,-4\n7,-6,1,0\n8,-3,-3,-4\n4,0,2,-6\n6,7,-6,-5\n4,8,0,-5\n-3,-1,-1,0\n4,-4,4,-4\n-3,4,-3,-4\n0,-8,-6,6\n-4,3,-4,0\n-1,3,2,0\n1,6,5,-3\n-8,-8,3,8\n0,-1,5,-1\n-6,0,-5,2\n4,-4,0,-2\n4,0,-8,-3\n-6,-6,-2,-3\n3,-5,0,-3\n3,-3,6,1\n-1,0,1,6\n-7,-1,-2,-1\n-1,4,-7,0\n-1,7,8,-8\n-1,-1,3,0\n-8,8,-3,2\n-8,5,-1,8\n0,-1,0,2\n-1,7,5,6\n-4,6,-6,2\n-3,-5,3,-4\n8,-7,-4,6\n1,-3,0,5\n-1,-6,-5,-8\n1,8,-2,-7\n-8,7,-1,6\n3,2,0,6\n-5,-8,7,-5\n5,-6,-6,-5\n-7,-7,2,-8\n-2,-1,2,-1\n6,-5,-6,-4\n7,7,-1,-5\n8,5,3,4\n-5,-5,-5,-3\n0,6,-4,3\n-7,5,4,-4\n6,-8,-1,-8\n8,3,-8,1\n3,-3,0,-2\n8,3,4,8\n0,-6,-1,-4\n5,-2,6,-2\n1,-2,0,-3\n-3,-7,-3,8\n-1,-6,0,0\n6,-1,-3,4\n2,2,-5,0\n3,8,2,-7\n-6,-3,-4,6\n-2,-4,-5,-3\n-6,-7,-2,7\n4,7,8,1\n-4,5,-6,5\n0,6,1,1\n-1,3,8,-6\n-2,5,-4,-3\n-5,0,8,7\n-1,7,-5,6\n7,1,-4,4\n-6,-3,-7,8\n3,-8,1,8\n-6,-3,6,0\n-7,0,-8,3\n1,1,1,-6\n-8,5,3,6\n7,-2,2,-1\n3,-8,-1,-4\n0,1,2,-5\n-4,2,0,3\n-2,-5,6,-8\n-1,-6,1,6\n4,7,1,-2\n5,-5,7,-4\n4,4,7,-2\n-8,2,2,7\n-2,7,4,-2\n6,-8,7,-7\n-3,2,5,1\n-6,2,5,-2\n-3,6,8,-1\n-5,2,4,7\n-3,5,5,-6\n0,8,5,7\n-5,-5,0,-1\n8,6,3,-8\n-6,-2,7,2\n-8,8,-2,-3\n1,4,4,3\n-3,3,-3,-1\n-6,-4,8,-3\n-3,1,-6,0\n3,-6,-7,-6\n-7,0,-7,7\n-7,-2,1,1\n-8,7,-4,5\n-5,0,2,0\n-6,-5,8,0\n5,1,2,6\n-4,-2,-7,-6\n2,5,-2,-6\n-3,-3,-8,8\n8,-1,0,-1\n8,-3,-6,0\n-7,0,4,-7\n1,6,-6,6\n6,6,0,-7\n6,8,-5,-5\n7,-3,-2,-6\n5,-2,-8,2\n-1,4,-5,-8\n3,6,-2,0\n-2,-5,0,-8\n-1,0,2,4\n0,7,8,0\n2,-5,-4,7\n2,5,1,3\n0,4,0,1\n-5,-2,2,-5\n2,-7,-6,-5\n-1,-7,8,-5\n2,-3,-1,-5\n5,-2,-8,-4\n-3,-1,2,4\n-2,-5,6,-5\n0,2,-6,6\n2,1,-4,1\n-3,1,-3,8\n-4,-8,7,6\n6,-8,2,6\n-4,-4,-6,-2\n4,-8,-8,-2\n-8,-7,-4,0\n-5,2,-5,-6\n3,-7,-7,6\n-8,-3,7,6\n2,5,-1,8\n8,5,-4,0\n-7,0,-3,-7\n-1,3,-7,-7\n-2,0,7,6\n2,-1,-2,-7\n1,0,0,-6\n0,-1,-1,-7\n1,-1,2,-4\n8,-7,-5,-8\n-3,2,-8,2\n-5,5,3,-8\n-4,2,-8,-4\n-8,7,2,1\n8,-4,-2,0\n0,-8,1,-8\n-3,1,4,-8\n-8,-7,-1,0\n8,7,-4,7\n-1,5,6,-4\n4,2,-3,8\n3,0,-5,-4\n-3,5,-3,-5\n8,4,0,-1\n-1,-5,7,7\n2,5,-6,-2\n0,-6,-7,-5\n0,0,4,1\n0,-2,6,-2\n-5,-1,2,6\n-5,1,-8,-6\n1,-1,-1,5\n-7,6,7,-1\n-8,6,-7,0\n-5,-5,-2,-7\n-5,-2,5,8\n1,1,6,4\n-1,2,6,8\n2,-7,-2,0\n3,0,1,2\n-6,5,-1,-5\n7,-5,-8,-5\n-2,-1,3,4\n-6,-7,-5,8\n-3,0,-5,5\n6,-7,4,-5\n3,0,8,-5\n4,-4,-4,0\n6,-6,3,-1\n-6,-7,7,-7\n-1,-6,-6,-7\n7,0,2,4\n-8,-2,4,-8\n8,5,8,0\n-2,4,5,-4\n5,0,0,2\n0,6,7,3\n1,7,8,2\n-6,5,5,3\n7,-7,7,-6\n6,3,0,0\n5,8,-7,-2\n0,6,-3,8\n-6,-4,3,7\n0,2,6,6\n2,-1,8,-3\n2,0,4,7\n8,-2,0,-7\n6,-1,-2,6\n-7,-1,-4,0\n0,-8,1,4\n-6,-7,8,0\n8,-2,-2,-8\n-4,-5,-4,-3\n-7,2,6,-8\n-5,8,-4,-8\n8,4,6,5\n4,8,-7,-2\n-2,7,6,0\n4,-3,-5,8\n2,-3,-5,3\n3,-5,0,-7\n6,4,-7,3\n-6,7,2,-3\n2,-4,2,-6\n7,-4,4,-2\n2,-7,8,-4\n5,-1,-3,6\n-5,8,-1,-4\n-5,0,7,-2\n-6,0,5,0\n6,7,7,5\n-1,1,4,-8\n-6,-6,4,-7\n5,3,-8,0\n-2,-5,1,-8\n-2,-8,-5,-4\n8,-4,5,7\n0,-7,-6,-2\n-1,3,1,-8\n3,2,0,3\n4,4,5,0\n8,0,0,0\n-4,0,2,5\n7,-3,2,1\n-3,-6,-1,6\n2,-5,3,0\n-6,-8,-4,-7\n7,-5,-1,3\n-3,-6,-6,3\n4,0,2,7\n0,0,4,6\n8,8,0,5\n6,-4,3,4\n-5,8,-3,-7\n1,8,-3,0\n8,7,6,1\n-4,2,8,-2\n-6,4,3,2\n-5,0,4,-3\n6,1,7,0\n-3,-8,8,-5\n0,0,-8,-6\n6,-3,-1,7\n-6,-7,-6,1\n-3,5,7,3\n2,4,-8,7\n6,0,0,-2\n-7,-3,-4,-3\n5,-5,-6,-6\n3,3,-5,-5\n-8,0,2,5\n0,-4,-5,0\n6,4,2,-8\n2,-1,6,-3\n-1,8,-3,2\n-2,-6,8,-1\n-7,-8,3,6\n7,4,-5,-2\n-3,-6,8,-7\n-7,-1,-3,3\n8,8,0,-1\n4,2,5,-8\n0,-4,4,7\n1,-5,4,2\n3,-8,3,-2\n1,0,-5,0\n-1,0,0,2\n-2,-5,8,7\n-5,8,-5,-4\n7,-1,-5,4\n-2,-8,-7,1\n1,-8,-1,3\n-8,-8,0,-4\n-5,0,6,-7\n-2,-8,-1,-4\n0,0,-1,8\n-8,-8,-2,-1\n6,-2,0,-3\n-1,-8,1,7\n2,1,-4,-1\n3,1,-5,5\n0,2,-1,7\n-1,-5,-5,2\n4,0,-1,3\n2,2,6,3\n-2,2,5,4\n8,4,2,3\n0,8,0,3\n1,1,7,3\n-4,0,-6,-3\n0,6,-8,2\n8,3,-2,-5\n-1,4,-5,7\n-1,2,3,0\n0,1,6,0\n3,-7,6,-4\n-6,-8,-6,8\n-4,-6,1,-3\n7,-7,-4,7\n0,-2,-7,0\n7,0,-7,3\n6,-1,-2,4\n3,-4,4,-7\n3,0,6,-6\n-5,6,8,-7\n1,-2,6,-6\n-1,-8,-1,-8\n-3,-6,-5,5\n3,2,3,7\n-4,-3,2,-1\n-7,-5,-3,5\n-4,-4,-8,2\n-1,2,0,0\n-6,5,2,8\n-6,-7,-6,-6\n4,-1,4,-3\n-2,-7,-5,5\n0,6,1,-3\n0,-1,2,-7\n0,-5,-3,7\n1,0,-2,5\n0,-7,1,3\n2,-1,-8,7\n0,1,6,3\n-2,-8,4,4\n3,-6,-2,-5\n6,-5,2,5\n-5,0,-3,-5\n0,-4,-3,-7\n5,2,5,1\n5,0,-7,6\n-7,-1,1,7\n-6,4,1,7\n-7,6,-7,-2\n6,2,5,1\n2,7,-4,3\n5,8,-4,7\n-6,7,-6,-6\n-2,3,-6,-1\n-6,1,-8,8\n8,-8,2,-4\n-8,0,6,1\n1,6,-7,-5\n0,-3,-3,-8\n-6,1,1,1\n2,8,2,7\n-1,-3,5,4\n-5,6,-5,2\n7,7,-6,-5\n4,-2,3,2\n5,0,3,-4\n-6,-8,5,4\n-5,-2,-5,-2\n6,-7,7,-4\n7,8,7,8\n6,-6,7,8\n0,-5,-8,-3\n3,7,-8,0\n4,-1,-4,4\n3,-5,6,5\n-6,7,-7,3\n2,-2,0,-6\n-8,-6,7,8\n-7,6,1,-1\n-7,8,-1,-7\n3,0,0,8\n-7,1,6,7\n7,-8,4,0\n2,-4,0,-5\n-7,8,6,1\n-2,0,3,8\n0,-8,3,2\n-4,4,5,-4\n3,3,-5,-1\n0,4,5,4\n7,8,2,6\n2,0,0,2\n8,2,-3,-2\n8,2,5,-7\n0,1,0,0\n6,6,7,0\n-7,0,-5,5\n0,7,-5,-1\n-3,-1,-3,-2\n-1,5,0,-7\n2,-2,-1,8\n4,4,0,-5\n2,0,7,7\n7,6,8,2\n0,1,5,7\n5,3,-8,-5\n1,8,-1,4\n2,0,7,-3\n7,2,-2,7\n5,0,0,3\n-3,-7,-2,2\n-4,0,0,4\n1,-7,-7,8\n0,0,0,7\n0,-3,-3,-1\n-8,7,8,-1\n4,-8,-8,7\n2,2,3,-4\n-7,7,-2,3\n8,0,4,4\n-5,-7,5,-2\n1,0,-1,-5\n6,-7,1,2\n-4,0,-4,7\n-7,5,3,2\n-1,0,3,-3\n-8,-2,4,-1\n-7,-8,-2,7\n-4,6,-3,-6\n7,0,7,-4\n2,-8,0,3\n5,0,4,3\n6,-5,-8,2\n8,0,-6,2\n0,6,3,-3\n6,-8,0,6\n-5,7,4,-1\n5,6,3,-1\n-3,-7,0,0\n5,-5,0,4\n7,2,-4,-2\n-7,-5,-7,3\n-3,2,6,0\n8,-4,6,0\n-4,2,-1,-1\n-2,8,-5,-3\n-5,-6,-4,-2\n3,2,2,3\n6,0,-2,0\n-4,8,7,4\n-2,6,-5,-3\n6,4,0,5\n5,0,1,3\n4,3,6,1\n-2,-4,-3,0\n-5,1,0,7\n0,-6,-1,-6\n7,6,-1,-4\n-4,6,4,2\n7,1,4,-8\n-1,2,8,-1\n1,7,4,-4\n5,3,-1,-8\n-8,-2,-3,5\n-1,-7,-8,5\n-5,-8,-3,4\n1,0,3,-8\n-8,6,-5,0\n-7,-6,5,5\n3,2,5,5\n0,8,4,5\n0,5,0,-3\n-1,1,-8,-6\n2,0,-4,6\n-2,-5,4,8\n6,6,-5,2\n0,6,-1,6\n8,-8,2,-8\n-3,4,-1,5\n-2,-6,7,4\n4,5,8,-1\n-3,8,7,-7\n5,4,4,6\n3,-2,-1,-6\n-7,8,-7,-2\n0,-1,-8,-7\n-3,-3,5,6\n3,4,7,-8\n-5,1,-6,5\n-6,7,1,2\n3,6,-2,5\n3,0,4,3\n2,-3,7,-2\n4,-8,-7,7\n-3,8,3,5\n-7,-5,1,3\n-5,-5,0,-6\n-8,-6,-7,8\n4,-1,-6,-4\n5,3,2,-1\n-4,1,6,5\n-8,4,3,-1\n8,7,5,-5\n8,4,-4,7\n1,-6,-1,3\n8,4,-7,-2\n-2,2,-4,6\n-3,-2,3,1\n-3,3,-3,3\n-8,1,5,-8\n-2,4,-5,-6\n7,1,0,-6\n1,-7,-4,-8\n8,0,1,-3\n-8,5,-2,4\n6,7,-1,8\n-5,4,-1,0\n-8,-7,1,4\n8,4,-3,-4\n-7,-3,8,-3\n-2,-2,2,1\n-1,7,-7,4\n4,8,5,4\n-7,8,3,5\n5,7,6,5\n-1,1,-2,-5\n-1,-6,0,-2\n-2,8,4,-1\n0,3,-2,7\n-1,-2,1,2\n3,-5,6,-8\n4,-3,4,-6\n0,-4,-8,-1\n-5,-2,-8,0\n-5,-5,-3,-4\n2,2,3,-6\n0,-5,0,2\n-3,-8,0,0\n3,2,-1,8\n0,8,1,2\n7,3,6,5\n-1,0,0,3\n-8,6,1,-3\n-3,-3,-6,3\n-7,-2,7,1\n8,-6,3,4\n5,1,7,7\n-2,2,3,-8\n-7,-8,-2,2\n2,8,-4,-1\n7,3,-7,-7\n-4,3,7,-3\n-4,-4,4,7\n7,-1,-1,2\n3,8,-3,-2\n-5,-7,-2,-4\n-1,-6,-5,-5\n4,8,0,6\n5,0,1,-7\n-6,6,-2,0\n-1,5,0,0\n4,-2,0,7\n-2,2,-1,1\n2,-6,4,2\n-2,5,-1,-2\n-8,8,0,7\n0,-1,-7,-6\n3,-6,1,-6\n-1,-3,-5,-3\n-7,8,-5,0\n2,1,6,-7\n6,2,4,-7\n-1,-6,-7,4\n-5,4,6,-3\n5,1,-6,-7\n7,1,-4,-6\n-4,-8,-5,2\n1,-1,-3,-6\n2,-5,-5,-5\n-6,4,-6,-1\n-7,1,-4,-4\n8,6,0,5\n4,7,0,8\n8,-8,-4,-6\n2,3,0,-7\n-2,4,0,2\n-3,-7,2,0\n8,0,-2,1\n-3,-1,-2,8\n-3,5,3,8\n0,-5,-1,6\n5,1,2,7\n6,4,1,3\n0,4,6,4\n-6,0,-4,3\n-8,-1,5,-7\n-1,4,-2,0\n1,-8,-3,-1\n0,2,-6,5\n-7,-2,-6,-2\n8,6,8,6\n-8,-4,0,5\n1,3,-6,-4\n-4,3,-5,7\n7,-5,0,0\n-8,4,2,-8\n5,7,-5,0\n7,-7,-5,-4\n-5,0,1,-3\n-4,6,1,-3\n4,-3,3,4\n0,0,8,0\n5,-6,-6,-6\n-3,7,7,-2\n-7,-3,8,-4\n6,8,-7,-4\n-6,6,-4,0\n4,0,8,3\n2,8,0,-6\n0,-6,6,-3\n0,-6,3,-8\n0,0,0,-7\n-1,7,-7,3\n-5,-4,-1,-7\n-8,-3,-1,8\n6,5,-7,3\n-4,4,0,-4\n-3,-8,5,7\n0,1,0,-4\n7,-3,0,-5\n-2,-4,0,2\n-5,4,2,3\n-3,-8,-7,-5\n4,-5,3,0\n1,-4,1,2\n-6,2,3,8\n2,-6,-7,-2\n0,-6,-4,8\n1,5,0,-6\n1,3,-7,-2\n6,3,-5,3\n-1,8,6,-4\n3,3,3,0\n-2,0,-3,4\n2,0,7,4\n6,8,-8,7\n8,6,-1,-6\n-7,-5,5,-2\n0,8,-2,-3\n-7,2,2,-8\n8,5,-4,4\n-5,7,5,-1\n7,6,0,8\n1,-7,3,-6\n2,7,-6,0\n3,3,0,-4\n5,7,-8,5\n4,-8,8,0\n-7,-2,2,5\n-1,7,1,3\n-8,-8,2,-6\n6,0,2,2\n1,2,4,-4\n7,-8,-2,5\n1,3,5,0\n-6,4,-5,1\n-4,0,-5,1\n4,-3,-4,8\n-6,-1,5,8\n7,-8,7,8\n-3,-7,-6,8\n-8,-4,0,0\n-3,5,2,7\n-3,7,-3,0\n-3,0,0,5\n8,3,8,4\n-6,0,0,7\n6,5,2,-2\n-3,-8,4,0\n4,-2,0,2\n-1,0,0,7\n-4,4,-4,6\n-5,-6,2,1\n-7,0,-7,3\n-7,3,4,-7\n-7,-8,2,2\n1,-4,5,-7\n-2,5,3,-8\n5,1,-7,2\n-2,-3,3,1\n-1,0,5,-3\n-2,5,1,0\n-3,-6,0,-1\n5,0,7,-7\n8,8,-1,-2\n-7,-3,2,1\n-2,2,3,0\n-8,-4,-8,-6\n0,8,-3,0\n4,0,6,-5\n-6,2,7,1\n2,3,1,-6\n-5,-1,-4,1\n4,-2,2,-6\n5,4,5,-6\n-3,5,1,-6\n-4,-5,4,-2\n1,-5,4,-3\n8,-5,6,-4\n1,4,5,-1\n-5,-4,-2,3\n0,3,3,-6\n-5,-6,0,-8\n-3,5,3,-1\n0,0,7,6\n-4,-6,5,5\n-4,-1,-2,-6\n-7,-6,-4,-8\n2,-4,0,3\n4,-2,0,5\n-3,1,-7,-7\n-7,0,-4,4\n-6,-1,7,-8\n-5,2,7,8\n3,-7,-2,5\n2,-3,-3,2\n2,3,7,-4\n-7,4,-2,8\n-3,1,-3,3\n-6,-8,7,-2\n-5,0,-8,-4\n5,8,0,4\n-5,-8,-4,5\n-3,-7,5,6\n-5,7,0,7\n-2,-4,-6,-3\n-8,-8,-2,2\n-6,-1,-1,8\n-3,0,4,5\n8,0,-8,7\n4,8,0,-6\n0,3,-8,7\n0,-8,-5,-4\n0,0,6,6\n-5,-7,0,-7\n-3,-1,3,4\n3,3,-7,6\n7,6,7,0\n-1,-3,-4,-8\n6,8,-1,-6\n3,-8,7,1\n-8,-5,1,-3\n-8,-5,-3,-8\n-6,-5,-3,-1\n5,0,-5,0\n-5,0,0,7\n8,-8,6,8\n4,6,4,-3\n2,2,6,1\n0,0,-4,3\n-6,-4,2,6\n7,0,-6,-3\n8,3,-2,2\n-7,1,5,-3\n7,7,-1,0\n-1,3,5,0\n-4,-5,-3,-1\n-1,-4,6,4\n5,0,3,6\n-1,2,-6,1\n8,-3,4,-2\n4,0,-2,7\n0,7,0,-3\n0,8,2,4\n0,-7,-7,8\n-2,-8,7,3\n-1,7,-8,8\n7,4,-1,5\n-5,1,-8,-2\n-6,6,-5,-4\n-8,3,2,-3\n-4,-3,0,3\n-4,-4,-1,0\n3,7,1,2\n-5,-6,-1,-4\n5,7,8,8\n2,-2,-8,-6\n0,7,-6,1\n5,-8,-3,-2\n-3,7,-8,-4\n-8,0,3,0\n0,-2,7,0\n5,7,4,7\n8,2,-5,-5\n-1,-4,-3,6\n0,1,6,7\n-8,0,7,0\n2,-7,-6,-1\n-1,0,-8,1\n-8,6,-3,6\n7,7,-6,8\n1,-3,1,-5\n-4,3,0,1\n3,7,-7,-8\n0,-7,4,3\n-5,-2,4,5\n0,-5,3,0\n-5,4,0,0\n2,-5,-6,2\n-1,-2,0,4\n0,5,4,0\n-5,-1,4,8\n7,0,0,2\n0,2,-6,0\n1,-3,7,0\n-6,0,-8,8\n-6,-5,8,7\n-1,-3,1,8\n1,0,-2,-6\n6,4,6,-3\n-1,-5,-5,5\n-4,7,3,-5\n0,-4,3,6\n-6,3,3,8\n5,1,3,8\n-8,-2,6,-3\n-2,-7,7,-7\n7,-4,-3,5\n7,-1,4,0\n-8,-3,1,-4\n-1,0,-1,1\n-4,-6,-2,2\n0,-6,-3,-6\n0,-5,1,8\n4,8,-5,-3\n-2,-4,6,1\n-5,0,4,-4\n6,1,-8,7\n8,-4,0,-1\n-2,-5,-7,-7\n-3,3,-7,-7\n8,-2,-6,5\n-3,0,-5,-3\n-8,5,0,0\n-4,0,4,0\n-7,2,8,1\n0,-4,6,-1\n-8,-3,6,-8\n5,0,4,-3\n-4,-6,0,0\n6,2,0,0\n8,2,2,7\n8,-1,0,-2\n6,8,0,0\n-3,2,2,-5\n0,-6,-1,5\n0,-5,7,0\n0,1,8,8\n-1,2,5,4\n-2,-1,5,1\n-4,-6,6,5\n-8,-8,8,2\n0,-6,-1,6\n-5,0,7,-4\n7,0,-3,-8\n-2,-5,1,2\n0,-2,7,-2\n-2,-8,-3,-3\n-7,3,4,6\n7,2,-8,2\n4,6,-5,-3\n-1,-1,-7,0\n5,-8,-7,6\n4,5,-3,0\n0,-6,1,-4\n-1,2,4,-3\n-2,5,-7,0\n-5,4,0,4\n3,-1,-7,3\n4,5,5,7\n2,2,-8,7\n5,7,-3,1\n-6,5,-5,-3\n7,-2,-2,-3\n2,-4,0,-4\n-3,0,-3,-8\n1,-4,4,0\n-2,0,3,-8\n-2,3,-5,-8\n-5,7,-6,-6\n-4,0,5,-3\n4,-4,-2,-2\n-8,-8,8,-4\n5,-2,0,1\n8,1,-7,0\n-7,-4,-4,-2\n1,5,0,-4\n3,4,7,-4\n8,5,-7,0\n8,-8,7,-6\n5,6,8,8\n-7,1,-4,-1\n-8,2,3,-4\n4,7,-3,-2\n2,-8,-4,-2\n-5,4,1,4\n0,6,-4,-1\n-7,-1,0,1\n5,-4,-7,-3\n-2,0,2,3\n1,0,-2,-7\n0,-7,2,-1\n-5,3,-4,1\n-1,-8,1,-4\n-2,-8,-6,-5\n7,-6,2,-3\n-7,4,6,-6\n5,7,6,-3\n8,0,1,-7\n4,-2,-7,-3\n8,-5,-6,3\n-1,7,-1,-2\n-8,4,5,3\n1,-3,4,-8\n-2,-5,-5,-8\n2,1,3,8\n4,7,7,0\n1,2,0,0\n-5,2,-1,7\n3,8,-6,3\n0,6,2,3\n7,4,-8,0\n-1,-5,0,3\n-1,0,0,0\n-2,1,-2,5\n1,2,4,-3\n2,4,0,-4\n6,0,7,-2\n4,-2,-2,-7\n3,-2,-2,3\n8,-5,-8,-7\n4,-2,0,-7\n6,3,-7,0\n-7,-8,-4,0\n6,0,4,-3\n4,-4,0,4\n1,0,-1,-1\n-4,3,-2,2\n-8,-8,2,2\n-3,-3,-5,-2\n-1,0,5,1\n-2,8,5,-8\n6,-8,5,6\n-2,-6,-1,-3\n5,2,-5,-6\n-6,-3,-2,-1\n-2,3,3,4\n2,-3,-7,4\n2,-1,-7,0\n-1,-2,-4,-8\n-3,-7,7,3\n-4,7,8,7\n-8,1,-1,0\n-6,8,2,-6\n5,-3,7,-8\n-7,6,6,-3\n0,1,2,-7\n3,0,-7,-2\n2,-7,1,6\n5,-3,0,5\n6,-2,-1,-4\n4,-3,-5,0\n-8,1,-2,-2\n0,5,0,2\n-7,6,2,8\n4,1,5,7\n-2,0,6,-3\n7,-2,5,0\n-3,0,-4,4\n-7,5,0,0\n-4,2,0,-1\n3,-8,1,-5\n-2,-4,0,5\n-3,6,-6,-6\n3,1,-1,-7\n-4,0,1,-6\n-7,0,-4,-8\n-7,-4,8,-6\n-3,-4,-8,5\n2,-7,-1,-5\n1,8,0,0\n7,2,6,-4\n0,6,-4,-2\n0,-1,7,4\n3,6,0,8\n3,7,-1,-1\n-2,-1,-2,-2\n-4,5,7,4\n4,-2,-6,-4\n-6,3,-5,4\n-8,-7,-4,7\n2,6,3,-5\n-4,-8,4,8\n-3,5,1,0\n1,1,5,-2\n7,5,1,-7\n1,0,-8,-4\n0,-2,-1,3\n0,0,3,-7\n7,2,1,0\n1,-2,8,-6\n-3,0,0,-8\n-7,0,-2,0\n1,1,-2,7\n8,4,6,4\n-7,7,5,5\n0,-1,-7,-3\n-3,0,-3,-2\n-8,-7,6,-3\n4,-2,4,1\n-2,-8,5,-4\n-8,-3,7,2\n0,7,2,-1\n8,7,-1,-5\n1,0,-3,-4\n0,3,0,-4\n0,-2,0,5\n-2,5,-2,-1\n-8,-8,4,8\n-8,5,4,-8\n0,2,-1,2\n-4,0,8,7\n0,-4,1,-2\n3,1,-1,1\n2,-6,7,3\n5,-4,1,-2\n-5,-8,-2,-6\n2,5,3,4\n3,0,8,3\n8,6,-5,-4\n-7,8,6,2\n-5,-8,8,1\n6,1,1,-8\n0,-1,2,8\n-5,0,-7,-6\n2,-4,8,6\n-6,0,1,0\n3,6,-8,-3\n-1,5,5,1\n-3,0,7,8\n-4,0,-8,0\n5,-3,8,-6\n-8,-6,0,-3\n-6,7,3,2\n3,-8,-1,-1\n4,0,-6,-3\n2,-3,8,1\n-5,3,0,2\n-2,7,-3,-1\n0,-5,-1,3\n0,2,2,8\n7,-6,-6,-7\n-5,-5,-4,7\n5,2,-4,-6\n0,6,-6,-4\n-1,7,-2,0\n-6,-7,-1,-3\n-1,1,-3,1\n2,-4,-1,0\n6,2,3,-2\n-7,-5,-2,-7\n-4,-1,-5,4\n0,4,5,6\n-7,-4,1,-7\n0,-1,-8,8\n7,-1,1,-7\n-1,1,0,0\n-4,-3,2,-8\n-8,-8,-5,6\n4,3,-4,6\n-2,8,7,-5\n2,0,7,-6\n2,-1,-7,-8\n8,4,-5,-6\n-6,1,-5,-3\n-7,1,-2,0\n-7,-6,0,-7\n1,0,-8,1\n0,-2,6,0\n0,1,5,0\n8,-5,-1,5\n0,-2,-7,2\n0,-5,4,-3\n-6,7,3,3\n3,-7,-6,0\n-4,3,-5,0\n7,-5,4,1\n8,3,0,7\n-4,4,3,6\n8,1,-6,-3\n3,-3,-6,-4\n-5,0,-6,-6\n2,5,-8,3\n-6,6,-6,-1\n6,-2,0,-6\n4,7,-3,-8\n2,6,-6,0\n-8,0,6,0\n-5,-5,0,-3\n-3,4,1,-8\n-5,-1,4,-4\n0,0,-5,-7\n-6,1,-2,0\n5,-3,-1,-4\n-7,-8,5,-1\n-8,-5,6,1\n6,5,-1,-5\n8,-5,-6,6\n-2,-1,-4,7\n3,-4,4,1\n8,0,6,-6\n-7,7,-4,-5\n-5,5,-6,3\n-8,1,7,-3\n-1,-4,0,6\n3,3,2,3\n7,4,-7,-8\n-8,8,6,-3\n-1,-8,0,8\n-8,7,-4,-6\n0,5,7,0\n-6,-6,3,1\n-8,-3,6,7\n-2,-2,-2,-7\n-4,6,5,-2\n2,-2,7,8\n-7,8,0,2\n2,2,-6,5\n6,-3,-3,-8\n-2,-3,-8,7\n-6,8,-5,-1\n4,-2,-3,-3\n-8,-4,-6,4\n-8,4,-2,1\n-4,-1,-2,7\n5,6,-2,-5\n0,2,6,-2\n0,0,3,0\n4,8,-3,-1\n4,7,0,-1\n-6,3,7,8\n-4,2,-2,-2\n5,8,7,5\n6,8,1,2\n0,2,0,-4\n5,4,-6,-5\n6,-8,4,1\n0,-7,-3,-5\n1,3,-3,6\n5,8,2,-4\n-4,0,5,8\n2,0,2,3\n8,-2,-5,3\n-8,-5,2,8\n-2,0,-7,-6\n-4,-8,8,2\n6,-8,5,-2\n-4,6,0,8\n-3,-8,4,6\n1,7,3,7\n6,-7,-5,3\n-8,0,0,0\n6,-1,-2,-1\n-2,7,4,6\n-7,-5,2,-8\n1,-6,-5,-1\n-5,-3,0,-8\n8,-7,8,-6\n0,5,-7,0\n7,3,5,4\n4,-2,2,1\n-5,-6,2,-8\n-8,4,-4,-3\n0,2,6,2\n-4,-8,0,5\n-2,-4,-5,-8\n-4,2,0,-8\n2,-7,-4,6\n-8,-8,-7,4\n7,0,2,7\n-1,-2,1,-5\n-7,-5,1,-7\n4,-4,-6,6\n5,-3,6,-1\n2,2,-1,-4\n8,4,8,2\n3,8,0,-4\n-1,-7,0,6\n-1,7,6,-7\n8,5,0,2\n-6,0,-3,4\n-6,-3,3,0\n-7,-1,-6,-7\n-1,4,-3,1\n-2,-7,2,8\n-8,-3,8,0\n-4,-7,-6,0\n-7,-1,-6,2\n-3,-1,4,6\n0,-1,7,-4\n-8,-8,-8,1\n4,-5,4,1\n-5,-7,-3,2\n-1,-3,4,-1\n-4,-1,-3,-3\n0,-3,8,5\n3,0,-8,-1\n8,-8,1,-7\n-2,7,-3,7\n-1,0,-5,4\n-3,-2,4,-3\n-7,-8,6,-5\n2,5,3,5\n3,-7,8,-3\n-5,4,2,-6\n1,-4,0,-2\n0,-8,-5,6\n8,-8,-6,-2\n-7,-6,7,2\n-5,-3,-1,-8\n8,5,-5,-8\n3,6,8,4\n0,1,4,7\n-1,7,4,-7\n-6,-1,-6,6\n2,3,0,-5\n-4,0,-5,0\n3,-7,-2,-3\n8,0,-4,-6\n4,-2,-4,7\n-7,-6,4,-8\n-4,-3,2,-6\n0,-7,-3,3\n-7,6,-4,-5\n0,4,1,-6\n-3,1,0,5\n-7,-8,-1,0\n4,5,8,3\n4,6,7,-1\n5,-5,-1,0\n4,-4,2,-2\n0,8,-8,-1\n2,-7,-7,7\n5,0,-6,0\n-4,4,5,8\n-4,-5,5,6\n8,-5,-5,0\n5,2,-3,-2\n-5,-2,6,6\n6,3,-7,-4\n1,-1,-1,-5\n-5,-2,-7,1\n-5,3,1,-6\n3,6,6,3\n-2,-4,5,-5\n4,-6,7,5\n6,8,8,8\n2,-2,4,1\n-3,2,-7,7\n8,-1,-4,-5\n6,2,2,7\n0,-8,-5,-7\n-8,5,6,2\n5,5,6,8\n-3,-4,5,-4\n1,-1,8,4\n6,-3,-1,-2\n-1,4,6,-7\n2,4,0,6\n-7,1,-6,6\n-7,-8,8,-8\n0,3,-2,-8\n1,-5,-1,5\n-5,-3,-7,5\n5,-4,-4,7\n-4,0,0,-4\n-7,-5,-7,8\n-7,2,-1,-4\n4,-3,-3,0\n3,-2,-6,-1\n8,-1,-1,-2\n-5,-5,5,-7\n-7,0,-3,1\n-2,-7,-1,-6\n8,-4,-6,-7\n5,-3,6,5\n7,-1,3,-3\n-7,0,7,-1\n-6,-8,-5,6\n-8,0,-3,4\n4,-2,3,-5\n3,6,7,3\n0,-3,-7,2\n4,-6,-7,2\n1,-7,0,0\n8,0,-7,-4\n6,-1,8,4' def distance(a, b): return abs(a[0] - b[0]) + abs(a[1] - b[1]) + abs(a[2] - b[2]) + abs(a[3] - b[3]) constellations = [] for line in input.splitlines(): (x, y, z, t) = (int(a) for a in line.split(',')) s = (x, y, z, t) matching_constellations = [] for c in constellations: for star in c: if distance(star, s) <= 3: matching_constellations.append(c) break resulting_constellation = {s} for c in matching_constellations: constellations.remove(c) resulting_constellation |= c constellations.append(frozenset(resulting_constellation)) print(len(constellations))
# The Twitter API keys needed to send tweets # Two applications for the same account are needed, # since two streamers can't be run on the same account # main account. needs all privileges CONSUMER_KEY = "enter your consumer key here" CONSUMER_SECRET = "enter your secret consumer key here" ACCESS_TOKEN = "enter your access token here" ACCESS_TOKEN_SECRET = "enter your secret access token here" # account for mining users. needs all privileges MINE_CONSUMER_KEY = "enter your consumer key here" MINE_CONSUMER_SECRET = "enter your secret consumer key here" MINE_ACCESS_TOKEN = "enter your access token here" MINE_ACCESS_TOKEN_SECRET = "enter your secret access token here"
consumer_key = 'enter your consumer key here' consumer_secret = 'enter your secret consumer key here' access_token = 'enter your access token here' access_token_secret = 'enter your secret access token here' mine_consumer_key = 'enter your consumer key here' mine_consumer_secret = 'enter your secret consumer key here' mine_access_token = 'enter your access token here' mine_access_token_secret = 'enter your secret access token here'
CATEGORIES = { "all", "arts", "automotive", "baby", "beauty", "books", "boys", "computers", "electronics", "girls", "health", "kitchen", "industrial", "mens", "pets", "sports", "games", "travel", "womens", } CATEGORIES_CHOICES = [ ("all", "All"), ("arts", "Arts"), ("automotive", "Automotive"), ("baby", "Baby"), ("beauty", "Beauty"), ("books", "Books"), ("boys", "Boy's"), ("computers", "Computers"), ("electronics", "Electronics"), ("girls", "Girl's"), ("health", "Health"), ("kitchen", "Kitchen"), ("industrial", "Industrial"), ("mens", "Men's"), ("pets", "Pets"), ("sports", "Sports"), ("games", "Games"), ("travel", "Travel"), ("womens", "Women's"), ] METRIC_PERIODS = [ ("daily", "Daily"), ("weekly", "Weekly"), ] METRIC_TYPES = [ ("visits", "Visits"), ("search", "Search"), ("clicks", "Clicks"), ]
categories = {'all', 'arts', 'automotive', 'baby', 'beauty', 'books', 'boys', 'computers', 'electronics', 'girls', 'health', 'kitchen', 'industrial', 'mens', 'pets', 'sports', 'games', 'travel', 'womens'} categories_choices = [('all', 'All'), ('arts', 'Arts'), ('automotive', 'Automotive'), ('baby', 'Baby'), ('beauty', 'Beauty'), ('books', 'Books'), ('boys', "Boy's"), ('computers', 'Computers'), ('electronics', 'Electronics'), ('girls', "Girl's"), ('health', 'Health'), ('kitchen', 'Kitchen'), ('industrial', 'Industrial'), ('mens', "Men's"), ('pets', 'Pets'), ('sports', 'Sports'), ('games', 'Games'), ('travel', 'Travel'), ('womens', "Women's")] metric_periods = [('daily', 'Daily'), ('weekly', 'Weekly')] metric_types = [('visits', 'Visits'), ('search', 'Search'), ('clicks', 'Clicks')]
"""#ip 2 addi 2 16 2 seti 1 2 4 seti 1 8 1 mulr 4 1 5 eqrr 5 3 5 addr 5 2 2 addi 2 1 2 addr 4 0 0 addi 1 1 1 gtrr 1 3 5 addr 2 5 2 seti 2 6 2 addi 4 1 4 gtrr 4 3 5 addr 5 2 2 seti 1 2 2 mulr 2 2 2 addi 3 2 3 mulr 3 3 3 mulr 2 3 3 muli 3 11 3 addi 5 2 5 mulr 5 2 5 addi 5 8 5 addr 3 5 3 addr 2 0 2 seti 0 4 2 setr 2 5 5 mulr 5 2 5 addr 2 5 5 mulr 2 5 5 muli 5 14 5 mulr 5 2 5 addr 3 5 3 seti 0 8 0 seti 0 5 2 """ ip_reg = 2 reg = [1, 0, 0, 0, 0, 0] # addi 2 16 2 | ip=0 reg[2] += 17 # seti 1 2 4 reg[4] = 1 reg[2] += 1 # seti 1 8 1 reg[1] = 1 reg[2] += 1 # mulr 4 1 5 reg[5] = reg[4] * reg[1] reg[2] += 1 # eqrr 5 3 5 reg[5] = int(reg[5] == reg[3]) reg[2] += 1 # addr 5 2 2 reg[2] += reg[5] + 1 # addi 2 1 2 reg[2] += 2 # addr 4 0 0 reg[0] += reg[4] # addi 1 1 1 reg[1] += 1 reg[2] += 1 # gtrr 1 3 5 reg[5] = int(reg[1] > reg[2]) reg[2] += 1 # addr 2 5 2 reg[2] += reg[5] + 1 # seti 2 6 2 reg[2] = 3 # addi 4 1 4 reg[4] += 1 reg[2] += 1 # gtrr 4 3 5 reg[5] = int(reg[4] > reg[3]) reg[2] += 1 # addr 5 2 2 reg[2] += reg[5] + 1 # seti 1 2 2 reg[2] = 2 # mulr 2 2 2 reg[2] = reg[2] * reg[2] + 1 # addi 3 2 3 reg[3] += 2 reg[2] += 1 # mulr 3 3 3 reg[3] = reg[3] * reg[3] reg[2] += 1 # mulr 2 3 3 reg[3] = reg[2] * reg[3] reg[2] += 1 # muli 3 11 3 reg[3] *= 11 reg[2] += 1 # addi 5 2 5 reg[5] += 2 reg[2] += 1 # mulr 5 2 5 reg[5] *= 2 reg[2] += 1 # addi 5 8 5 reg[5] += 8 reg[2] += 1 # addr 3 5 3 reg[3] += reg[5] reg[2] += 1 # addr 2 0 2 reg[2] += reg[0] + 1 # seti 0 4 2 reg[2] = 1 # setr 2 5 5 reg[5] = reg[2] reg[2] += 1 # mulr 5 2 5 reg[5] *= reg[2] reg[2] += 1 # addr 2 5 5 reg[5] += reg[2] reg[2] += 1 # mulr 2 5 5 reg[5] *= reg[2] reg[2] += 1 # muli 5 14 5 reg[5] *= 14 reg[2] += 1 # mulr 5 2 5 reg[5] *= reg[2] reg[2] += 1 # addr 3 5 3 reg[3] += reg[5] reg[2] += 1 # seti 0 8 0 reg[0] = 0 reg[2] += 1 # seti 0 5 2 reg[2] = 1
"""#ip 2 addi 2 16 2 seti 1 2 4 seti 1 8 1 mulr 4 1 5 eqrr 5 3 5 addr 5 2 2 addi 2 1 2 addr 4 0 0 addi 1 1 1 gtrr 1 3 5 addr 2 5 2 seti 2 6 2 addi 4 1 4 gtrr 4 3 5 addr 5 2 2 seti 1 2 2 mulr 2 2 2 addi 3 2 3 mulr 3 3 3 mulr 2 3 3 muli 3 11 3 addi 5 2 5 mulr 5 2 5 addi 5 8 5 addr 3 5 3 addr 2 0 2 seti 0 4 2 setr 2 5 5 mulr 5 2 5 addr 2 5 5 mulr 2 5 5 muli 5 14 5 mulr 5 2 5 addr 3 5 3 seti 0 8 0 seti 0 5 2 """ ip_reg = 2 reg = [1, 0, 0, 0, 0, 0] reg[2] += 17 reg[4] = 1 reg[2] += 1 reg[1] = 1 reg[2] += 1 reg[5] = reg[4] * reg[1] reg[2] += 1 reg[5] = int(reg[5] == reg[3]) reg[2] += 1 reg[2] += reg[5] + 1 reg[2] += 2 reg[0] += reg[4] reg[1] += 1 reg[2] += 1 reg[5] = int(reg[1] > reg[2]) reg[2] += 1 reg[2] += reg[5] + 1 reg[2] = 3 reg[4] += 1 reg[2] += 1 reg[5] = int(reg[4] > reg[3]) reg[2] += 1 reg[2] += reg[5] + 1 reg[2] = 2 reg[2] = reg[2] * reg[2] + 1 reg[3] += 2 reg[2] += 1 reg[3] = reg[3] * reg[3] reg[2] += 1 reg[3] = reg[2] * reg[3] reg[2] += 1 reg[3] *= 11 reg[2] += 1 reg[5] += 2 reg[2] += 1 reg[5] *= 2 reg[2] += 1 reg[5] += 8 reg[2] += 1 reg[3] += reg[5] reg[2] += 1 reg[2] += reg[0] + 1 reg[2] = 1 reg[5] = reg[2] reg[2] += 1 reg[5] *= reg[2] reg[2] += 1 reg[5] += reg[2] reg[2] += 1 reg[5] *= reg[2] reg[2] += 1 reg[5] *= 14 reg[2] += 1 reg[5] *= reg[2] reg[2] += 1 reg[3] += reg[5] reg[2] += 1 reg[0] = 0 reg[2] += 1 reg[2] = 1
mensaje = "Registre el nombre del estudiante " estudiantes = [] estudiante = input(mensaje) estudiantes.append(estudiante) estudiante = input(mensaje) estudiantes.append(estudiante) estudiante = input(mensaje) estudiantes.append(estudiante) estudiante = input(mensaje) estudiantes.append(estudiante) estudiante = input(mensaje) estudiantes.append(estudiante) print(estudiantes)
mensaje = 'Registre el nombre del estudiante ' estudiantes = [] estudiante = input(mensaje) estudiantes.append(estudiante) estudiante = input(mensaje) estudiantes.append(estudiante) estudiante = input(mensaje) estudiantes.append(estudiante) estudiante = input(mensaje) estudiantes.append(estudiante) estudiante = input(mensaje) estudiantes.append(estudiante) print(estudiantes)
# from https://www.ngdc.noaa.gov/geomag/WMM/data/WMM2020/WMM2020_Report.pdf EQUATOR_RADIUS = 6378137 FLATTENING = 1 / 298.257223563 EE2 = FLATTENING * (2 - FLATTENING) # eecentricity squared N_MAX = 12 # degree of expansion
equator_radius = 6378137 flattening = 1 / 298.257223563 ee2 = FLATTENING * (2 - FLATTENING) n_max = 12
# -*- coding: utf-8 -*- def suma(num1, num2): sm = num1 + num2 return sm print(suma(2,3))
def suma(num1, num2): sm = num1 + num2 return sm print(suma(2, 3))
myVariable = 0 def when_started1(): global myVariable fork_motor_group.spin_to_position(1100, DEGREES, wait=False) drivetrain.turn_for(RIGHT, 22, DEGREES, wait=True) # Get goalpost with 2 rings into our zone (22 points) drivetrain.drive_for(FORWARD, 1200, MM, wait=True) drivetrain.set_drive_velocity(100, PERCENT) drivetrain.set_turn_velocity(100, PERCENT) drivetrain.drive_for(REVERSE, 100, MM, wait=True) drivetrain.turn_for(LEFT, 20, DEGREES, wait=True) fork_motor_group.spin_to_position(1800, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 200, MM, wait=True) fork_motor_group.spin_to_position(1300, DEGREES, wait=True) drivetrain.drive_for(REVERSE, 800, MM, wait=True) drivetrain.turn_to_heading(90, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 800, MM, wait=True) fork_motor_group.spin_to_position(1800, DEGREES, wait=True) drivetrain.drive_for(REVERSE, 1200, MM, wait=True) # Remove blue goalpost from lever drivetrain.turn_to_heading(130, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 600, MM, wait=True) fork_motor_group.spin_to_position(1300, DEGREES, wait=True) drivetrain.drive_for(REVERSE, 500, MM, wait=True) drivetrain.turn_to_heading(90, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 750, MM, wait=True) fork_motor_group.spin_to_position(1800, DEGREES, wait=True) drivetrain.drive_for(REVERSE, 580, MM, wait=True) # Get red goalpost drivetrain.turn_to_heading(0, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 1600, MM, wait=True) fork_motor_group.spin_to_position(1100, DEGREES, wait=True) drivetrain.turn_to_heading(320, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 300, MM, wait=True) drivetrain.drive_for(REVERSE, 300, MM, wait=True) fork_motor_group.spin_to_position(1800, DEGREES, wait=True) drivetrain.turn_to_heading(300, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 400, MM, wait=True) fork_motor_group.spin_to_position(0, DEGREES, wait=False) drivetrain.drive_for(REVERSE, 300, MM, wait=True) drivetrain.turn_to_heading(0, DEGREES, wait=True) drivetrain.drive_for(REVERSE, 1900, MM, wait=True) # Get on the balance drivetrain.set_drive_velocity(50, PERCENT) drivetrain.set_turn_velocity(50, PERCENT) drivetrain.turn_to_heading(125, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 400, MM, wait=True) drivetrain.turn_to_heading(90, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 200, MM, wait=True) fork_motor_group.spin_to_position(1700, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 750, MM, wait=True) drivetrain.stop() wait(1, SECONDS) stop_project() vr_thread(when_started1)
my_variable = 0 def when_started1(): global myVariable fork_motor_group.spin_to_position(1100, DEGREES, wait=False) drivetrain.turn_for(RIGHT, 22, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 1200, MM, wait=True) drivetrain.set_drive_velocity(100, PERCENT) drivetrain.set_turn_velocity(100, PERCENT) drivetrain.drive_for(REVERSE, 100, MM, wait=True) drivetrain.turn_for(LEFT, 20, DEGREES, wait=True) fork_motor_group.spin_to_position(1800, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 200, MM, wait=True) fork_motor_group.spin_to_position(1300, DEGREES, wait=True) drivetrain.drive_for(REVERSE, 800, MM, wait=True) drivetrain.turn_to_heading(90, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 800, MM, wait=True) fork_motor_group.spin_to_position(1800, DEGREES, wait=True) drivetrain.drive_for(REVERSE, 1200, MM, wait=True) drivetrain.turn_to_heading(130, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 600, MM, wait=True) fork_motor_group.spin_to_position(1300, DEGREES, wait=True) drivetrain.drive_for(REVERSE, 500, MM, wait=True) drivetrain.turn_to_heading(90, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 750, MM, wait=True) fork_motor_group.spin_to_position(1800, DEGREES, wait=True) drivetrain.drive_for(REVERSE, 580, MM, wait=True) drivetrain.turn_to_heading(0, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 1600, MM, wait=True) fork_motor_group.spin_to_position(1100, DEGREES, wait=True) drivetrain.turn_to_heading(320, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 300, MM, wait=True) drivetrain.drive_for(REVERSE, 300, MM, wait=True) fork_motor_group.spin_to_position(1800, DEGREES, wait=True) drivetrain.turn_to_heading(300, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 400, MM, wait=True) fork_motor_group.spin_to_position(0, DEGREES, wait=False) drivetrain.drive_for(REVERSE, 300, MM, wait=True) drivetrain.turn_to_heading(0, DEGREES, wait=True) drivetrain.drive_for(REVERSE, 1900, MM, wait=True) drivetrain.set_drive_velocity(50, PERCENT) drivetrain.set_turn_velocity(50, PERCENT) drivetrain.turn_to_heading(125, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 400, MM, wait=True) drivetrain.turn_to_heading(90, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 200, MM, wait=True) fork_motor_group.spin_to_position(1700, DEGREES, wait=True) drivetrain.drive_for(FORWARD, 750, MM, wait=True) drivetrain.stop() wait(1, SECONDS) stop_project() vr_thread(when_started1)
def search(f, k, ma): la = 0 r = ma while r - la > 1: d = (la + r) // 2 t = f(d) if t >= k: r = d else: la = d return r a, b, c, x, k = map(int, input().split()) def f(t): if t > b: rp = t elif a <= t and t * (1 + c / 100) > b + 1: rp = b + 1 elif a <= t: rp = t * (1 + c / 100) else: rp = t return 0 if rp * k <= x else 1 print(search(f, 1, 10 ** 12) - 1)
def search(f, k, ma): la = 0 r = ma while r - la > 1: d = (la + r) // 2 t = f(d) if t >= k: r = d else: la = d return r (a, b, c, x, k) = map(int, input().split()) def f(t): if t > b: rp = t elif a <= t and t * (1 + c / 100) > b + 1: rp = b + 1 elif a <= t: rp = t * (1 + c / 100) else: rp = t return 0 if rp * k <= x else 1 print(search(f, 1, 10 ** 12) - 1)
def startup(addPluginHook, addHook, world) : addPluginHook(world, "list", main, 1, ["self", "info", "args", "world"]) def main(self, info, args, world) : """list The list command lists all plugins""" pluginlist = world.plugins.keys() pluginlist.sort() self.msg(info["channel"], "I will send you the list of plugins in a notice because there are too many to list here", reply=True) self.msg(info["sender"], " ".join(pluginlist))
def startup(addPluginHook, addHook, world): add_plugin_hook(world, 'list', main, 1, ['self', 'info', 'args', 'world']) def main(self, info, args, world): """list The list command lists all plugins""" pluginlist = world.plugins.keys() pluginlist.sort() self.msg(info['channel'], 'I will send you the list of plugins in a notice because there are too many to list here', reply=True) self.msg(info['sender'], ' '.join(pluginlist))
#!/usr/bin/env python class H2O: def __init__(self): pass def hydrogen(self, releaseHydrogen: 'Callable[[], None]') -> None: # releaseHydrogen() outputs "H". Do not change or remove this line. releaseHydrogen() def oxygen(self, releaseOxygen: 'Callable[[], None]') -> None: # releaseOxygen() outputs "O". Do not change or remove this line. releaseOxygen()
class H2O: def __init__(self): pass def hydrogen(self, releaseHydrogen: 'Callable[[], None]') -> None: release_hydrogen() def oxygen(self, releaseOxygen: 'Callable[[], None]') -> None: release_oxygen()
def recursive_update(default, custom): """A recursive version of Python dict#update""" if not isinstance(default, dict) or not isinstance(custom, dict): raise TypeError('Params of recursive_update() must be a dictionnaries.') for key in custom: if isinstance(custom[key], dict) and isinstance( default.get(key), dict): default[key] = recursive_update(default[key], custom[key]) else: default[key] = custom[key] return default def key_from_configdict(d): """Return key (the most inner first item) of a config dictionnary""" if not isinstance(d, dict): raise TypeError('Params of key_from_configdict() must be a dictionnary.') try: item = [k for k, v in d.items()][0] except IndexError: item = '' return item
def recursive_update(default, custom): """A recursive version of Python dict#update""" if not isinstance(default, dict) or not isinstance(custom, dict): raise type_error('Params of recursive_update() must be a dictionnaries.') for key in custom: if isinstance(custom[key], dict) and isinstance(default.get(key), dict): default[key] = recursive_update(default[key], custom[key]) else: default[key] = custom[key] return default def key_from_configdict(d): """Return key (the most inner first item) of a config dictionnary""" if not isinstance(d, dict): raise type_error('Params of key_from_configdict() must be a dictionnary.') try: item = [k for (k, v) in d.items()][0] except IndexError: item = '' return item
def longestValidParentheses(s: str) -> int: result = 0 if not s: return result n = len(s) stack = [-1] for i in range(n): if s[i] == "(": stack.append(i) else: stack.pop() if not stack: stack.append(i) else: result = max(result, i - stack[len(stack) - 1]) return result if __name__ == "__main__" : s = "(())()" result = longestValidParentheses(s) print(result)
def longest_valid_parentheses(s: str) -> int: result = 0 if not s: return result n = len(s) stack = [-1] for i in range(n): if s[i] == '(': stack.append(i) else: stack.pop() if not stack: stack.append(i) else: result = max(result, i - stack[len(stack) - 1]) return result if __name__ == '__main__': s = '(())()' result = longest_valid_parentheses(s) print(result)
""" Selection Sort https://en.wikipedia.org/wiki/Selection_sort Worst-case performance: O(N^2) If you call selection_sort(arr,True), you can see the process of the sort Default is simulation = False """ def selection_sort(arr, simulation=False): iteration = 0 if simulation: print("iteration",iteration,":",*arr) for i in range(len(arr)): minimum = i for j in range(i + 1, len(arr)): # "Select" the correct value if arr[j] < arr[minimum]: minimum = j arr[minimum], arr[i] = arr[i], arr[minimum] if simulation: iteration = iteration + 1 print("iteration",iteration,":",*arr) return arr def main(): array = [1,5,8,5,150,44,4,3,6] #static inputs result = selection_sort(array) print(result) if __name__=="__main__": main()
""" Selection Sort https://en.wikipedia.org/wiki/Selection_sort Worst-case performance: O(N^2) If you call selection_sort(arr,True), you can see the process of the sort Default is simulation = False """ def selection_sort(arr, simulation=False): iteration = 0 if simulation: print('iteration', iteration, ':', *arr) for i in range(len(arr)): minimum = i for j in range(i + 1, len(arr)): if arr[j] < arr[minimum]: minimum = j (arr[minimum], arr[i]) = (arr[i], arr[minimum]) if simulation: iteration = iteration + 1 print('iteration', iteration, ':', *arr) return arr def main(): array = [1, 5, 8, 5, 150, 44, 4, 3, 6] result = selection_sort(array) print(result) if __name__ == '__main__': main()
# Definition for a binary tree node. # class TreeNode(object): # def __init__(self, x): # self.val = x # self.left = None # self.right = None class Solution(object): def binaryTreePaths(self, root): """ :type root: TreeNode :rtype: List[str] """ if root == None: return [] if root.left == None and root.right == None: return [str(root.val)] ret = [] if root.left != None: left = self.binaryTreePaths(root.left) left = map(lambda x: str(root.val) + '->' + x, left) ret += left if root.right != None: right = self.binaryTreePaths(root.right) right = map(lambda x: str(root.val) + '->' + x, right) ret += right return ret
class Solution(object): def binary_tree_paths(self, root): """ :type root: TreeNode :rtype: List[str] """ if root == None: return [] if root.left == None and root.right == None: return [str(root.val)] ret = [] if root.left != None: left = self.binaryTreePaths(root.left) left = map(lambda x: str(root.val) + '->' + x, left) ret += left if root.right != None: right = self.binaryTreePaths(root.right) right = map(lambda x: str(root.val) + '->' + x, right) ret += right return ret
# -*- coding: utf-8 -*- # Visigoth: A lightweight Python3 library for rendering data visualizations in SVG # Copyright (C) 2020-2021 Visigoth Developers # # Permission is hereby granted, free of charge, to any person obtaining a copy of this software # and associated documentation files (the "Software"), to deal in the Software without # restriction, including without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all copies or # substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING # BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. class Multithing(object): """ Base class for an object (point,line,polygon) Arguments: id (str): an ID associated with the polygon category (str): a category associated with the polygon label (str): a label associated with these polygons tooltip (str): a tooltip associated with these polygons properties (dict): metadata for the polygons fill (str): a fill hue to use stroke (stroke): the stroke hue to use stroke_width (float): the width of the stroke """ def __init__(self,id,category,label,tooltip,properties,fill,stroke,stroke_width): self.id = id self.category = category self.label = label self.tooltip = tooltip self.properties = properties self.fill = fill self.stroke = stroke self.stroke_width = stroke_width def getCoordinates(self): return self.coordinates def getId(self): return self.id def getCategory(self): return self.category def getLabel(self): return self.label def getStrokeWidth(self): return self.stroke_width def getProperties(self): return self.properties
class Multithing(object): """ Base class for an object (point,line,polygon) Arguments: id (str): an ID associated with the polygon category (str): a category associated with the polygon label (str): a label associated with these polygons tooltip (str): a tooltip associated with these polygons properties (dict): metadata for the polygons fill (str): a fill hue to use stroke (stroke): the stroke hue to use stroke_width (float): the width of the stroke """ def __init__(self, id, category, label, tooltip, properties, fill, stroke, stroke_width): self.id = id self.category = category self.label = label self.tooltip = tooltip self.properties = properties self.fill = fill self.stroke = stroke self.stroke_width = stroke_width def get_coordinates(self): return self.coordinates def get_id(self): return self.id def get_category(self): return self.category def get_label(self): return self.label def get_stroke_width(self): return self.stroke_width def get_properties(self): return self.properties