content stringlengths 7 1.05M | fixed_cases stringlengths 1 1.28M |
|---|---|
def fibonacci(n):
if n == 1 or n == 2:
return 1
return fibonacci(n-1) + fibonacci(n-2)
| def fibonacci(n):
if n == 1 or n == 2:
return 1
return fibonacci(n - 1) + fibonacci(n - 2) |
class Event():
def __init__(self, id, dateTime, userId):
self.id = id
self.dateTime = dateTime
self.userId = userId
def getDateTime(self):
return self.dateTime
def getUserId(self):
return self.userId
class Scheduler():
def __init__(self):
self.calendar = {}
self.userIdIndex = {}
self.dateIndex = {}
self.eventId = 0
self.dateTimeFormat = '%Y-%m-%d %H:%M'
def getDateTimeFormat(self):
return self.dateTimeFormat
def getEvents(self, userId):
"""
Get events of a specific user
:param userId: ID of user
"""
events = []
if userId in self.userIdIndex.keys():
eventIds = self.userIdIndex[userId]
for eventId in eventIds:
event = self.calendar[eventId]
responseEvent = {}
responseEvent['date_time'] = event.getDateTime()
responseEvent['user_id'] = event.getUserId()
events.append(responseEvent)
# Sort
events = sorted(events, key=lambda event: event['date_time'].timestamp())
return events
def addEvent(self, dateTime, userId):
"""
Add a single event at the date-time for the user with userId
:param dateTime: date-time
:param userId: ID of user
"""
# Check for other events on same day
dateFormat = self.getDateTimeFormat().split()[0]
date = dateTime.strftime(dateFormat)
if userId in self.dateIndex.keys() and date in self.dateIndex[userId].keys():
return {
"success": False,
"error": "Only one event allowed on the same day.",
}
self.eventId += 1
# Create new event
event = Event(self.eventId, dateTime, userId)
self.calendar[self.eventId] = event
# Update user Id index
if userId not in self.userIdIndex.keys():
self.userIdIndex[userId] = []
self.userIdIndex[userId].append(self.eventId)
# Update date index
if userId not in self.dateIndex.keys():
self.dateIndex[userId] = {}
self.dateIndex[userId][date] = self.eventId
return {
"success": True,
} | class Event:
def __init__(self, id, dateTime, userId):
self.id = id
self.dateTime = dateTime
self.userId = userId
def get_date_time(self):
return self.dateTime
def get_user_id(self):
return self.userId
class Scheduler:
def __init__(self):
self.calendar = {}
self.userIdIndex = {}
self.dateIndex = {}
self.eventId = 0
self.dateTimeFormat = '%Y-%m-%d %H:%M'
def get_date_time_format(self):
return self.dateTimeFormat
def get_events(self, userId):
"""
Get events of a specific user
:param userId: ID of user
"""
events = []
if userId in self.userIdIndex.keys():
event_ids = self.userIdIndex[userId]
for event_id in eventIds:
event = self.calendar[eventId]
response_event = {}
responseEvent['date_time'] = event.getDateTime()
responseEvent['user_id'] = event.getUserId()
events.append(responseEvent)
events = sorted(events, key=lambda event: event['date_time'].timestamp())
return events
def add_event(self, dateTime, userId):
"""
Add a single event at the date-time for the user with userId
:param dateTime: date-time
:param userId: ID of user
"""
date_format = self.getDateTimeFormat().split()[0]
date = dateTime.strftime(dateFormat)
if userId in self.dateIndex.keys() and date in self.dateIndex[userId].keys():
return {'success': False, 'error': 'Only one event allowed on the same day.'}
self.eventId += 1
event = event(self.eventId, dateTime, userId)
self.calendar[self.eventId] = event
if userId not in self.userIdIndex.keys():
self.userIdIndex[userId] = []
self.userIdIndex[userId].append(self.eventId)
if userId not in self.dateIndex.keys():
self.dateIndex[userId] = {}
self.dateIndex[userId][date] = self.eventId
return {'success': True} |
# -*- coding: utf-8 -*-
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def mergeTwoLists(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
"""
if not l1:
return l2
if not l2:
return l1
if l1.val < l2.val:
l1.next = self.mergeTwoLists(l1.next, l2)
return l1
else:
l2.next = self.mergeTwoLists(l1, l2.next)
return l2
| class Solution(object):
def merge_two_lists(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
"""
if not l1:
return l2
if not l2:
return l1
if l1.val < l2.val:
l1.next = self.mergeTwoLists(l1.next, l2)
return l1
else:
l2.next = self.mergeTwoLists(l1, l2.next)
return l2 |
class Solution:
# @return an integer
def maxArea(self, height):
n = len(height)
i = 0
j = n - 1
max_area = 0
while i < j:
max_area = max(max_area, (j - i) * min(height[i], height[j]))
if height[i] <= height[j]:
i += 1
else:
j -= 1
return max_area
| class Solution:
def max_area(self, height):
n = len(height)
i = 0
j = n - 1
max_area = 0
while i < j:
max_area = max(max_area, (j - i) * min(height[i], height[j]))
if height[i] <= height[j]:
i += 1
else:
j -= 1
return max_area |
#!/usr/bin/env python
"""Solution for problem C to Codejam 2016, Round 1B of Martin Thoma."""
def get_dicts(topics):
a_words = {}
b_words = {}
for a, b in topics:
if a in a_words:
a_words[a] += 1
else:
a_words[a] = 1
if b in b_words:
b_words[b] += 1
else:
b_words[b] = 1
return a_words, b_words
def solve(topics):
"""Solve."""
a_words, b_words = get_dicts(topics)
candidates = []
original = []
duplicates = []
for a, b in topics:
# print(a, b)
# print(a_words[a], b_words[b])
if not (a_words[a] == 1 or b_words[b] == 1):
candidates.append((a, b))
else:
original.append((a, b))
a_words_org, b_words_org = get_dicts(original)
while len(candidates) > 0:
l_candidates = []
for a, b in candidates:
if a_words_org[a] >= 1 and b_words_org[b] >= 1:
duplicates.append((a, b))
else:
l_candidates.append((a, b))
candidates = l_candidates[:]
# print(candidates)
return len(candidates)
if __name__ == "__main__":
testcases = input()
for caseNr in xrange(1, testcases+1):
n = input()
topics = []
for topic in xrange(1, n+1):
a, b = raw_input().split(" ")
topics.append((a, b))
print("Case #%i: %s" % (caseNr, solve(topics)))
| """Solution for problem C to Codejam 2016, Round 1B of Martin Thoma."""
def get_dicts(topics):
a_words = {}
b_words = {}
for (a, b) in topics:
if a in a_words:
a_words[a] += 1
else:
a_words[a] = 1
if b in b_words:
b_words[b] += 1
else:
b_words[b] = 1
return (a_words, b_words)
def solve(topics):
"""Solve."""
(a_words, b_words) = get_dicts(topics)
candidates = []
original = []
duplicates = []
for (a, b) in topics:
if not (a_words[a] == 1 or b_words[b] == 1):
candidates.append((a, b))
else:
original.append((a, b))
(a_words_org, b_words_org) = get_dicts(original)
while len(candidates) > 0:
l_candidates = []
for (a, b) in candidates:
if a_words_org[a] >= 1 and b_words_org[b] >= 1:
duplicates.append((a, b))
else:
l_candidates.append((a, b))
candidates = l_candidates[:]
return len(candidates)
if __name__ == '__main__':
testcases = input()
for case_nr in xrange(1, testcases + 1):
n = input()
topics = []
for topic in xrange(1, n + 1):
(a, b) = raw_input().split(' ')
topics.append((a, b))
print('Case #%i: %s' % (caseNr, solve(topics))) |
class InfoUnavailableError(ValueError):
"""CloudVolume was unable to access this layer's info file."""
pass
class ScaleUnavailableError(IndexError):
"""The info file is not configured to support this scale / mip level."""
pass
class AlignmentError(ValueError):
"""Signals that an operation requiring chunk alignment was not aligned."""
pass
class EmptyVolumeException(Exception):
"""Raised upon finding a missing chunk."""
pass
class EmptyRequestException(ValueError):
"""
Requesting uploading or downloading
a bounding box of less than one cubic voxel
is impossible.
"""
pass
class DecodingError(Exception):
"""Generic decoding error. Applies to content aware and unaware codecs."""
pass
class EncodingError(Exception):
"""Generic decoding error. Applies to content aware and unaware codecs."""
pass
class OutOfBoundsError(ValueError):
"""
Raised upon trying to obtain or assign to a bbox of a volume outside
of the volume's bounds
"""
# Inheritance below done for backwards compatibility reasons.
class DecompressionError(DecodingError):
"""
Decompression failed. This exception is used for codecs
that are naieve to data contents like gzip, lzma, etc. as opposed
to codecs that are aware of array shape like fpzip or compressed_segmentation.
"""
pass
class CompressionError(EncodingError):
"""
Compression failed. This exception is used for codecs
that are naieve to data contents like gzip, lzma, etc. as opposed
to codecs that are aware of array shape like fpzip or compressed_segmentation.
"""
pass
class SkeletonUnassignedEdgeError(Exception):
"""This skeleton has an edge to a vertex that doesn't exist."""
pass
class SkeletonDecodeError(Exception):
"""Unable to decode a binary skeleton into a Python object."""
pass
class SkeletonEncodeError(Exception):
"""Unable to encode a PrecomputedSkeleton into a binary object."""
pass
class UnsupportedProtocolError(ValueError):
"""Unknown protocol extension."""
pass | class Infounavailableerror(ValueError):
"""CloudVolume was unable to access this layer's info file."""
pass
class Scaleunavailableerror(IndexError):
"""The info file is not configured to support this scale / mip level."""
pass
class Alignmenterror(ValueError):
"""Signals that an operation requiring chunk alignment was not aligned."""
pass
class Emptyvolumeexception(Exception):
"""Raised upon finding a missing chunk."""
pass
class Emptyrequestexception(ValueError):
"""
Requesting uploading or downloading
a bounding box of less than one cubic voxel
is impossible.
"""
pass
class Decodingerror(Exception):
"""Generic decoding error. Applies to content aware and unaware codecs."""
pass
class Encodingerror(Exception):
"""Generic decoding error. Applies to content aware and unaware codecs."""
pass
class Outofboundserror(ValueError):
"""
Raised upon trying to obtain or assign to a bbox of a volume outside
of the volume's bounds
"""
class Decompressionerror(DecodingError):
"""
Decompression failed. This exception is used for codecs
that are naieve to data contents like gzip, lzma, etc. as opposed
to codecs that are aware of array shape like fpzip or compressed_segmentation.
"""
pass
class Compressionerror(EncodingError):
"""
Compression failed. This exception is used for codecs
that are naieve to data contents like gzip, lzma, etc. as opposed
to codecs that are aware of array shape like fpzip or compressed_segmentation.
"""
pass
class Skeletonunassignededgeerror(Exception):
"""This skeleton has an edge to a vertex that doesn't exist."""
pass
class Skeletondecodeerror(Exception):
"""Unable to decode a binary skeleton into a Python object."""
pass
class Skeletonencodeerror(Exception):
"""Unable to encode a PrecomputedSkeleton into a binary object."""
pass
class Unsupportedprotocolerror(ValueError):
"""Unknown protocol extension."""
pass |
def isnotebook():
try:
shell = get_ipython().__class__.__name__
if shell == 'ZMQInteractiveShell':
return True # Jupyter notebook or qtconsole
elif shell == 'TerminalInteractiveShell':
return False # Terminal running IPython
else:
return False
except NameError:
return False
| def isnotebook():
try:
shell = get_ipython().__class__.__name__
if shell == 'ZMQInteractiveShell':
return True
elif shell == 'TerminalInteractiveShell':
return False
else:
return False
except NameError:
return False |
l = [*map(int, input().split())]
l.sort()
if l[0]+l[3] == l[1]+l[2] or l[3] == l[0]+l[1]+l[2]:
print("YES")
else:
print("NO")
| l = [*map(int, input().split())]
l.sort()
if l[0] + l[3] == l[1] + l[2] or l[3] == l[0] + l[1] + l[2]:
print('YES')
else:
print('NO') |
def is_positive(num):
if int(num) > 0:
return True
else:
return False
def is_negative(num):
if int(num) < 0:
return True
else:
return False
def is_zero(num):
if int(num) == 0:
return True
else:
return False
def is_odd(num):
if int(num) <= 0:
return False
if int(num) % 2 != 0:
return True
else:
return False
def is_even(num):
if int(num) <= 0:
return False
if int(num) % 2 == 0:
return True
else:
return False
def is_prime(num):
if int(num) == 0 or int(num) == 1:
return False
result = 0
total = 0
while True:
if total == int(num):
break
total += 1
if int(num) % total == 0:
result += 1
if result <= 2:
return True
else:
return False
| def is_positive(num):
if int(num) > 0:
return True
else:
return False
def is_negative(num):
if int(num) < 0:
return True
else:
return False
def is_zero(num):
if int(num) == 0:
return True
else:
return False
def is_odd(num):
if int(num) <= 0:
return False
if int(num) % 2 != 0:
return True
else:
return False
def is_even(num):
if int(num) <= 0:
return False
if int(num) % 2 == 0:
return True
else:
return False
def is_prime(num):
if int(num) == 0 or int(num) == 1:
return False
result = 0
total = 0
while True:
if total == int(num):
break
total += 1
if int(num) % total == 0:
result += 1
if result <= 2:
return True
else:
return False |
def factorial(curr):
g = 1
for i in range(1, curr + 1):
g *= i
return g
def non_recurrsion():
num = 1
try:
row = int(input("Enter number of rows:"))
except:
print("Invalid input. Please enter an integer")
exit(1)
for i in range(1, row + 1):
for j in range(0, i):
print(factorial(num), end=" ")
num += 1
print("")
# Recursive statement included
def factorial_recursion(numb):
if numb == 1:
return 1
else:
return numb * factorial_recursion(numb - 1)
def recursion():
num = 1
try:
row = int(input("Enter number of rows:"))
except:
print("Invalid input. Please enter an integer")
exit(1)
for i in range(1, row + 1):
for j in range(0, i):
print(factorial_recursion(num), end=" ")
num += 1
print("")
non_recurrsion()
# recursion()
| def factorial(curr):
g = 1
for i in range(1, curr + 1):
g *= i
return g
def non_recurrsion():
num = 1
try:
row = int(input('Enter number of rows:'))
except:
print('Invalid input. Please enter an integer')
exit(1)
for i in range(1, row + 1):
for j in range(0, i):
print(factorial(num), end=' ')
num += 1
print('')
def factorial_recursion(numb):
if numb == 1:
return 1
else:
return numb * factorial_recursion(numb - 1)
def recursion():
num = 1
try:
row = int(input('Enter number of rows:'))
except:
print('Invalid input. Please enter an integer')
exit(1)
for i in range(1, row + 1):
for j in range(0, i):
print(factorial_recursion(num), end=' ')
num += 1
print('')
non_recurrsion() |
# -*- coding: utf-8 -*-
"""Container for all required classes."""
# __init__.py
#
# Created by Thomas Nelson <tn90ca@gmail.com>
# Created..........................2015-01-25
# Modified.........................2015-01-25
#
# This module was developed for use in the Bugs project.
#
# Copyright (C) 2015 Thomas Nelson
__all__ = ["dna", "brain", "bug", "food", "world"] | """Container for all required classes."""
__all__ = ['dna', 'brain', 'bug', 'food', 'world'] |
def can_build(env, platform):
return platform == "windows" # For now, GGPO isn't available on linux or mac
def configure(env):
env.Append(CPPPATH=["#modules/godotggpo/sdk/include/"])
if env["platform"] == "windows":
if env["CC"] == "cl":
env.Append(LINKFLAGS=["GGPO.lib"])
env.Append(LIBPATH=["#modules/godotggpo/sdk/bin"])
else:
env.Append(LIBS=["ggpo"])
env.Append(LIBPATH=["#modules/godotggpo/sdk/bin"])
def get_doc_classes():
return [
"GGPO",
]
def get_doc_path():
return "doc_class" | def can_build(env, platform):
return platform == 'windows'
def configure(env):
env.Append(CPPPATH=['#modules/godotggpo/sdk/include/'])
if env['platform'] == 'windows':
if env['CC'] == 'cl':
env.Append(LINKFLAGS=['GGPO.lib'])
env.Append(LIBPATH=['#modules/godotggpo/sdk/bin'])
else:
env.Append(LIBS=['ggpo'])
env.Append(LIBPATH=['#modules/godotggpo/sdk/bin'])
def get_doc_classes():
return ['GGPO']
def get_doc_path():
return 'doc_class' |
print("find greatest common divisor:")
def gcd(m, n):
cf = []
for i in range (1,min(m,n)+1):
if (m%i) == 0 and (n%i) == 0 :
cf.append(i)
print(cf)
print(cf[-1])
gcd(int(input()), int(input()))
| print('find greatest common divisor:')
def gcd(m, n):
cf = []
for i in range(1, min(m, n) + 1):
if m % i == 0 and n % i == 0:
cf.append(i)
print(cf)
print(cf[-1])
gcd(int(input()), int(input())) |
def object_function_apply_by_key(object_to_apply, key_to_find, function_to_apply):
if object_to_apply:
if isinstance(object_to_apply, list) and len(object_to_apply) > 0:
for item in object_to_apply:
object_function_apply_by_key(item, key_to_find, function_to_apply)
elif isinstance(object_to_apply, dict):
for k in object_to_apply:
if isinstance(object_to_apply[k], list):
for item in object_to_apply[k]:
object_function_apply_by_key(item, key_to_find, function_to_apply)
elif isinstance(object_to_apply[k], dict):
object_function_apply_by_key(object_to_apply[k], key_to_find, function_to_apply)
if k == key_to_find:
function_to_apply(object_to_apply, k)
| def object_function_apply_by_key(object_to_apply, key_to_find, function_to_apply):
if object_to_apply:
if isinstance(object_to_apply, list) and len(object_to_apply) > 0:
for item in object_to_apply:
object_function_apply_by_key(item, key_to_find, function_to_apply)
elif isinstance(object_to_apply, dict):
for k in object_to_apply:
if isinstance(object_to_apply[k], list):
for item in object_to_apply[k]:
object_function_apply_by_key(item, key_to_find, function_to_apply)
elif isinstance(object_to_apply[k], dict):
object_function_apply_by_key(object_to_apply[k], key_to_find, function_to_apply)
if k == key_to_find:
function_to_apply(object_to_apply, k) |
# encoding: utf-8
"""
@version: v1.0
@author: Richard
@license: Apache Licence
@contact: billions.richard@qq.com
@site:
@software: PyCharm
@time: 2019/11/10 10:50
"""
class TreeNode(object):
def __init__(self, val,
left: 'TreeNode ' = None,
right: 'TreeNode ' = None):
self.val = val
self.left = left
self.right = right
| """
@version: v1.0
@author: Richard
@license: Apache Licence
@contact: billions.richard@qq.com
@site:
@software: PyCharm
@time: 2019/11/10 10:50
"""
class Treenode(object):
def __init__(self, val, left: 'TreeNode '=None, right: 'TreeNode '=None):
self.val = val
self.left = left
self.right = right |
r=""
for _ in range(int(input())):
x=int(input())
if abs(x)%2==0:
r+=str(x)+" is even\n"
else:
r+=str(x)+" is odd\n"
print(r,end="")
| r = ''
for _ in range(int(input())):
x = int(input())
if abs(x) % 2 == 0:
r += str(x) + ' is even\n'
else:
r += str(x) + ' is odd\n'
print(r, end='') |
def slider_event_cb(slider, event):
if event == lv.EVENT.VALUE_CHANGED:
slider_label.set_text("%u" % slider.get_value())
# Create a slider in the center of the display
slider = lv.slider(lv.scr_act())
slider.set_width(200)
slider.align(None, lv.ALIGN.CENTER, 0, 0)
slider.set_event_cb(slider_event_cb)
slider.set_range(0, 100)
# Create a label below the slider
slider_label = lv.label(lv.scr_act())
slider_label.set_text("0")
slider_label.set_auto_realign(True)
slider_label.align(slider, lv.ALIGN.OUT_BOTTOM_MID, 0, 10)
# Create an informative label
info = lv.label(lv.scr_act())
info.set_text("""Welcome to the slider+label demo!
Move the slider and see that the label
updates to match it.""")
info.align(None, lv.ALIGN.IN_TOP_LEFT, 10, 10)
| def slider_event_cb(slider, event):
if event == lv.EVENT.VALUE_CHANGED:
slider_label.set_text('%u' % slider.get_value())
slider = lv.slider(lv.scr_act())
slider.set_width(200)
slider.align(None, lv.ALIGN.CENTER, 0, 0)
slider.set_event_cb(slider_event_cb)
slider.set_range(0, 100)
slider_label = lv.label(lv.scr_act())
slider_label.set_text('0')
slider_label.set_auto_realign(True)
slider_label.align(slider, lv.ALIGN.OUT_BOTTOM_MID, 0, 10)
info = lv.label(lv.scr_act())
info.set_text('Welcome to the slider+label demo!\nMove the slider and see that the label\nupdates to match it.')
info.align(None, lv.ALIGN.IN_TOP_LEFT, 10, 10) |
# Dictionaries
# Giving a key value and calling
my_stuff = {'key1': "123", "key2": "Value of key2"}
print(my_stuff['key1'])
print(my_stuff['key2'])
# Something nexted
my_stuff2 = {'key1': "123", "key2": "Value of key2", 'key3': {'key4': [1, 3, 2]}}
print(my_stuff2['key3'])
print(my_stuff2['key3']['key4']) # This will print the nexted dictionary in the mystuff_2
# Printing entire Dictionaries
print(my_stuff, '= Dictionary 1')
print(my_stuff2, '= Dictionary 2') # Note in the output is not same in the dictionary as they are different from lists
# SMALL EXERCISE
my_stuff3 = {'key1': "123", "key2": "Value of key2", 'key3': {'key4': [1, 3, 2, 'grab me']}}
print(my_stuff3['key3']['key4'][3]) # This is just an example for something complicated
print(my_stuff3['key3']['key4'][3].upper()) # Same result but in upper case
print(my_stuff3['key3']['key4'][3].capitalize()) # Same result but in capitalize form
# Redefining a the value
food = {'lunch': 'pizza', 'breakfast': 'eggs'} # Main dictionary for down CODE
food['lunch'] = 'burger'
print(food['lunch']) # re assaigned value for 'lunch'
print(food) # Value is changed here
food['dinner'] = 'Pasta'
print(food) # New value added here
| my_stuff = {'key1': '123', 'key2': 'Value of key2'}
print(my_stuff['key1'])
print(my_stuff['key2'])
my_stuff2 = {'key1': '123', 'key2': 'Value of key2', 'key3': {'key4': [1, 3, 2]}}
print(my_stuff2['key3'])
print(my_stuff2['key3']['key4'])
print(my_stuff, '= Dictionary 1')
print(my_stuff2, '= Dictionary 2')
my_stuff3 = {'key1': '123', 'key2': 'Value of key2', 'key3': {'key4': [1, 3, 2, 'grab me']}}
print(my_stuff3['key3']['key4'][3])
print(my_stuff3['key3']['key4'][3].upper())
print(my_stuff3['key3']['key4'][3].capitalize())
food = {'lunch': 'pizza', 'breakfast': 'eggs'}
food['lunch'] = 'burger'
print(food['lunch'])
print(food)
food['dinner'] = 'Pasta'
print(food) |
def method1(str1, str2, m, n):
# If first string is empty, the only option is to
# insert all characters of second string into first
if m == 0:
return n
# If second string is empty, the only option is to
# remove all characters of first string
if n == 0:
return m
# If last characters of two strings are same, nothing
# much to do. Ignore last characters and get count for
# remaining strings.
if str1[m - 1] == str2[n - 1]:
return method1(str1, str2, m - 1, n - 1)
# If last characters are not same, consider all three
# operations on last character of first string, recursively
# compute minimum cost for all three operations and take
# minimum of three values.
return 1 + min(
method1(str1, str2, m, n - 1), # Insert
method1(str1, str2, m - 1, n), # Remove
method1(str1, str2, m - 1, n - 1), # Replace
)
if __name__ == "__main__":
"""
from timeit import timeit
# Driver code
str1 = "sunday"
str2 = "saturday"
print(
timeit(lambda: method1(str1, str2, len(str1), len(str2)), number=10000)
) # 0.2074630530041759
""" | def method1(str1, str2, m, n):
if m == 0:
return n
if n == 0:
return m
if str1[m - 1] == str2[n - 1]:
return method1(str1, str2, m - 1, n - 1)
return 1 + min(method1(str1, str2, m, n - 1), method1(str1, str2, m - 1, n), method1(str1, str2, m - 1, n - 1))
if __name__ == '__main__':
'\n from timeit import timeit\n\n # Driver code\n str1 = "sunday"\n str2 = "saturday"\n print(\n timeit(lambda: method1(str1, str2, len(str1), len(str2)), number=10000)\n ) # 0.2074630530041759\n ' |
# Copyright 2018, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
i0 = Input("i0", ("TENSOR_FLOAT32", [2, 2])) # input 0
o1 = Output("o1", ("TENSOR_FLOAT32", [1, 2])) # output for model1
o2 = Output("o2", ("TENSOR_FLOAT32", [2, 1])) # output for model2
o3 = Output("o3", ("TENSOR_FLOAT32", [1])) # output for model3
model1 = Model().Operation("MEAN", i0, [0], 1).To(o1) # along axis 0, keep_dim=True
model2 = Model().Operation("MEAN", i0, [1], 1).To(o2) # along axis 1, keep_dim=True
model3 = Model().Operation("MEAN", i0, [0, 1], 0).To(o3) # along both axis, keep_dim=False
inputs1 = {i0: [1, 2, 3, 4]}
outputs11 = {o1: [4, 6]}
outputs12 = {o2: [3, 7]}
outputs13 = {o3: [10]}
inputs2 = {i0: [-1, -2, -3, -4]}
outputs21 = {o1: [-4, -6]}
outputs22 = {o2: [-3, -7]}
outputs23 = {o3: [-10]}
Example((inputs1, outputs11), model=model1)
Example((inputs1, outputs12), model=model2)
Example((inputs1, outputs13), model=model3)
Example((inputs2, outputs21), model=model1)
Example((inputs2, outputs22), model=model2)
Example((inputs2, outputs23), model=model3)
| i0 = input('i0', ('TENSOR_FLOAT32', [2, 2]))
o1 = output('o1', ('TENSOR_FLOAT32', [1, 2]))
o2 = output('o2', ('TENSOR_FLOAT32', [2, 1]))
o3 = output('o3', ('TENSOR_FLOAT32', [1]))
model1 = model().Operation('MEAN', i0, [0], 1).To(o1)
model2 = model().Operation('MEAN', i0, [1], 1).To(o2)
model3 = model().Operation('MEAN', i0, [0, 1], 0).To(o3)
inputs1 = {i0: [1, 2, 3, 4]}
outputs11 = {o1: [4, 6]}
outputs12 = {o2: [3, 7]}
outputs13 = {o3: [10]}
inputs2 = {i0: [-1, -2, -3, -4]}
outputs21 = {o1: [-4, -6]}
outputs22 = {o2: [-3, -7]}
outputs23 = {o3: [-10]}
example((inputs1, outputs11), model=model1)
example((inputs1, outputs12), model=model2)
example((inputs1, outputs13), model=model3)
example((inputs2, outputs21), model=model1)
example((inputs2, outputs22), model=model2)
example((inputs2, outputs23), model=model3) |
class PushCrewBaseException(BaseException):
"""
Generic exception
"""
def __init__(self, *args, **kwargs):
BaseException.__init__(self, *args, **kwargs)
| class Pushcrewbaseexception(BaseException):
"""
Generic exception
"""
def __init__(self, *args, **kwargs):
BaseException.__init__(self, *args, **kwargs) |
# auth.signature add default fields like the created on/ create by/ modified by/ modified on
db.define_table('blog_post',
Field('title', requires=IS_NOT_EMPTY()),
Field('body', 'text', requires=IS_NOT_EMPTY()),
Field('photo', 'upload'),
auth.signature)
db.define_table('blog_comment',
Field('blog_post', 'reference blog_post'),
Field('comments', 'text', requires=IS_NOT_EMPTY()),
auth.signature
)
db.blog_post.title.requires = IS_NOT_IN_DB(db, db.blog_post.title)
db.define_table('test',
Field('first_name', requires = IS_NOT_EMPTY()),
Field('last_name', requires = IS_NOT_EMPTY()),
Field('email', requires= IS_EMAIL()),
Field('email_validate',requires = IS_EQUAL_TO(request.vars.email)))
db.define_table("entries", Field("entry", "text"))
| db.define_table('blog_post', field('title', requires=is_not_empty()), field('body', 'text', requires=is_not_empty()), field('photo', 'upload'), auth.signature)
db.define_table('blog_comment', field('blog_post', 'reference blog_post'), field('comments', 'text', requires=is_not_empty()), auth.signature)
db.blog_post.title.requires = is_not_in_db(db, db.blog_post.title)
db.define_table('test', field('first_name', requires=is_not_empty()), field('last_name', requires=is_not_empty()), field('email', requires=is_email()), field('email_validate', requires=is_equal_to(request.vars.email)))
db.define_table('entries', field('entry', 'text')) |
"""
--- Day 2: Dive! ---
https://adventofcode.com/2021/day/2
summary: process directions to calculate depth and horizontal position
Part 1 - 2322630
Part 2 - 2105273490
"""
def load_data():
#datafile = 'input-day2-example'
datafile = 'input-day2'
data = []
with open(datafile, 'r') as input:
for line in input:
line_list = line.split()
line_list[1] = int(line_list[1])
data.append(line_list)
return data
def part1(commands):
"""
forward X increases the horizontal position by X units.
down X increases the depth by X units.
up X decreases the depth by X units.
Calculate the horizontal position and depth you would have after following the planned course.
What do you get if you multiply your final horizontal position by your final depth
"""
x = 0
y = 0
for command in commands:
dir = command[0]
num = command[1]
if dir == "forward":
x += num
elif dir == "up":
y -= num
else:
y += num
return x * y
def part2(commands):
"""
down X increases your aim by X units.
up X decreases your aim by X units.
forward X does two things:
It increases your horizontal position by X units.
It increases your depth by your aim multiplied by X.
Calculate the horizontal position and depth you would have after following the planned course.
What do you get if you multiply your final horizontal position by your final depth
"""
x = 0
depth = 0
aim = 0
for command in commands:
dir = command[0]
num = command[1]
if dir == "forward":
x += num
depth += aim * num
elif dir == "up":
aim -= num
else:
aim += num
return x * depth
if __name__ == '__main__':
data = load_data()
print(f"{data}\n")
results1 = part1(data)
print(f"Part 1 - {results1}")
results2 = part2(data)
print(f"Part 2 - {results2}\n")
| """
--- Day 2: Dive! ---
https://adventofcode.com/2021/day/2
summary: process directions to calculate depth and horizontal position
Part 1 - 2322630
Part 2 - 2105273490
"""
def load_data():
datafile = 'input-day2'
data = []
with open(datafile, 'r') as input:
for line in input:
line_list = line.split()
line_list[1] = int(line_list[1])
data.append(line_list)
return data
def part1(commands):
"""
forward X increases the horizontal position by X units.
down X increases the depth by X units.
up X decreases the depth by X units.
Calculate the horizontal position and depth you would have after following the planned course.
What do you get if you multiply your final horizontal position by your final depth
"""
x = 0
y = 0
for command in commands:
dir = command[0]
num = command[1]
if dir == 'forward':
x += num
elif dir == 'up':
y -= num
else:
y += num
return x * y
def part2(commands):
"""
down X increases your aim by X units.
up X decreases your aim by X units.
forward X does two things:
It increases your horizontal position by X units.
It increases your depth by your aim multiplied by X.
Calculate the horizontal position and depth you would have after following the planned course.
What do you get if you multiply your final horizontal position by your final depth
"""
x = 0
depth = 0
aim = 0
for command in commands:
dir = command[0]
num = command[1]
if dir == 'forward':
x += num
depth += aim * num
elif dir == 'up':
aim -= num
else:
aim += num
return x * depth
if __name__ == '__main__':
data = load_data()
print(f'{data}\n')
results1 = part1(data)
print(f'Part 1 - {results1}')
results2 = part2(data)
print(f'Part 2 - {results2}\n') |
"""
multithreading support. See: https://docs.micropython.org/en/v1.17/library/_thread.html
|see_cpython_module| :mod:`python:_thread` https://docs.python.org/3/library/_thread.html .
This module implements multithreading support.
This module is highly experimental and its API is not yet fully settled
and not yet described in this documentation.
"""
# source version: v1_17
# origin module:: micropython/docs/library/_thread.rst
| """
multithreading support. See: https://docs.micropython.org/en/v1.17/library/_thread.html
|see_cpython_module| :mod:`python:_thread` https://docs.python.org/3/library/_thread.html .
This module implements multithreading support.
This module is highly experimental and its API is not yet fully settled
and not yet described in this documentation.
""" |
# ------------------------------------------------------------------
# Copyright (c) 2020 PyInstaller Development Team.
#
# This file is distributed under the terms of the GNU General Public
# License (version 2.0 or later).
#
# The full license is available in LICENSE.GPL.txt, distributed with
# this software.
#
# SPDX-License-Identifier: GPL-2.0-or-later
# ------------------------------------------------------------------
# Tested on Windows 10 1809 64bit with scikit-learn 0.22.1 and Python 3.7
hiddenimports = ['sklearn.neighbors.typedefs',
'sklearn.utils._cython_blas',
'sklearn.neighbors.quad_tree',
'sklearn.tree._utils']
| hiddenimports = ['sklearn.neighbors.typedefs', 'sklearn.utils._cython_blas', 'sklearn.neighbors.quad_tree', 'sklearn.tree._utils'] |
pkgname = "libxshmfence"
pkgver = "1.3"
pkgrel = 0
build_style = "gnu_configure"
configure_args = ["--with-shared-memory-dir=/dev/shm"]
hostmakedepends = ["pkgconf"]
makedepends = ["xorgproto"]
pkgdesc = "X SyncFence synchronization primitive"
maintainer = "q66 <q66@chimera-linux.org>"
license = "MIT"
url = "https://xorg.freedesktop.org"
source = f"$(XORG_SITE)/lib/libxshmfence-{pkgver}.tar.bz2"
sha256 = "b884300d26a14961a076fbebc762a39831cb75f92bed5ccf9836345b459220c7"
def post_install(self):
self.install_license("COPYING")
@subpackage("libxshmfence-devel")
def _devel(self):
return self.default_devel()
| pkgname = 'libxshmfence'
pkgver = '1.3'
pkgrel = 0
build_style = 'gnu_configure'
configure_args = ['--with-shared-memory-dir=/dev/shm']
hostmakedepends = ['pkgconf']
makedepends = ['xorgproto']
pkgdesc = 'X SyncFence synchronization primitive'
maintainer = 'q66 <q66@chimera-linux.org>'
license = 'MIT'
url = 'https://xorg.freedesktop.org'
source = f'$(XORG_SITE)/lib/libxshmfence-{pkgver}.tar.bz2'
sha256 = 'b884300d26a14961a076fbebc762a39831cb75f92bed5ccf9836345b459220c7'
def post_install(self):
self.install_license('COPYING')
@subpackage('libxshmfence-devel')
def _devel(self):
return self.default_devel() |
nombre_archivo = input("Ingrese el nombre del archivo que contiene las palabras: ")
archivo = open(nombre_archivo,"r")
texto = archivo.read()
palabras = texto.split()
ocurrencias = {}
for palabra in palabras:
if ocurrencias.get(palabra):
ocurrencias[palabra]+=1
else:
ocurrencias[palabra]=1
maxpar = None, 0
for palabra, cantidad in ocurrencias.items():
if maxpar[1]<cantidad:
maxpar=palabra,cantidad
print("La palabra con mayor cantidad de repeticion es: ",maxpar[0],"repetida",maxpar[1],"veces") | nombre_archivo = input('Ingrese el nombre del archivo que contiene las palabras: ')
archivo = open(nombre_archivo, 'r')
texto = archivo.read()
palabras = texto.split()
ocurrencias = {}
for palabra in palabras:
if ocurrencias.get(palabra):
ocurrencias[palabra] += 1
else:
ocurrencias[palabra] = 1
maxpar = (None, 0)
for (palabra, cantidad) in ocurrencias.items():
if maxpar[1] < cantidad:
maxpar = (palabra, cantidad)
print('La palabra con mayor cantidad de repeticion es: ', maxpar[0], 'repetida', maxpar[1], 'veces') |
#!/usr/bin/python
# ==============================================================================
# Author: Tao Li (taoli@ucsd.edu)
# Date: May 1, 2015
# Question: 009-Palindrome-Number
# Link: https://leetcode.com/problems/palindrome-number/
# ==============================================================================
# Determine whether an integer is a palindrome. Do this without extra space.
#
# Some hints:
# Could negative integers be palindromes? (ie, -1)
#
# If you are thinking of converting the integer to string, note the restriction of using extra space.
#
# You could also try reversing an integer. However, if you have solved the problem "Reverse Integer", you know that the reversed integer might overflow. How would you handle such case?
#
# There is a more generic way of solving this problem.
# ==============================================================================
class Solution:
# @param {integer} x
# @return {boolean}
def isPalindrome(self, x):
if x < 0:
return False
tmp_str = str(x)
for i in range(0, len(tmp_str)/2):
if tmp_str[i] != tmp_str[-(i+1)]:
return False
return True | class Solution:
def is_palindrome(self, x):
if x < 0:
return False
tmp_str = str(x)
for i in range(0, len(tmp_str) / 2):
if tmp_str[i] != tmp_str[-(i + 1)]:
return False
return True |
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
def trellis_deps():
maybe(
http_archive,
name = "ecal",
build_file = Label("//third_party:ecal.BUILD"),
sha256 = "1d83d3accfb4a936ffd343524e4a626f0265e600226d6e997b3dbbd7f62eaac6",
strip_prefix = "ecal-5.10.0",
urls = [
"https://github.com/continental/ecal/archive/refs/tags/v5.10.0.tar.gz",
],
)
maybe(
http_archive,
name = "fmt",
build_file = Label("//third_party:fmt.BUILD"),
sha256 = "36016a75dd6e0a9c1c7df5edb98c93a3e77dabcf122de364116efb9f23c6954a",
strip_prefix = "fmt-8.0.0",
urls = [
"https://github.com/fmtlib/fmt/releases/download/8.0.0/fmt-8.0.0.zip",
],
)
maybe(
http_archive,
name = "yaml-cpp",
sha256 = "43e6a9fcb146ad871515f0d0873947e5d497a1c9c60c58cb102a97b47208b7c3",
strip_prefix = "yaml-cpp-yaml-cpp-0.7.0",
urls = [
"https://github.com/jbeder/yaml-cpp/archive/refs/tags/yaml-cpp-0.7.0.tar.gz",
],
)
maybe(
http_archive,
name = "cxxopts",
build_file = Label("//third_party:cxxopts.BUILD"),
sha256 = "984aa3c8917d649b14d7f6277104ce38dd142ce378a9198ec926f03302399681",
strip_prefix = "cxxopts-2.2.1",
urls = [
"https://github.com/jarro2783/cxxopts/archive/refs/tags/v2.2.1.tar.gz",
],
)
maybe(
http_archive,
name = "tclap",
build_file = Label("//third_party:tclap.BUILD"),
sha256 = "7363f8f571e6e733b269c4b4e9c18f392d3cd7240d39a379d95de5a4c4bdc47f",
strip_prefix = "tclap-1.2.4",
urls = [
"https://github.com/xguerin/tclap/archive/refs/tags/v1.2.4.tar.gz",
],
)
maybe(
http_archive,
name = "asio",
build_file = Label("//third_party:asio.BUILD"),
sha256 = "5ee191aee825dfb1325cbacf643d599b186de057c88464ea98f1bae5ba4ff47a",
strip_prefix = "asio-asio-1-19-2",
urls = [
"https://github.com/chriskohlhoff/asio/archive/refs/tags/asio-1-19-2.tar.gz",
],
)
maybe(
http_archive,
name = "gtest",
sha256 = "9dc9157a9a1551ec7a7e43daea9a694a0bb5fb8bec81235d8a1e6ef64c716dcb",
strip_prefix = "googletest-release-1.10.0",
urls = [
"https://github.com/google/googletest/archive/release-1.10.0.tar.gz", # Oct 3, 2019
],
)
maybe(
http_archive,
name = "simpleini",
build_file = Label("//third_party:simpleini.BUILD"),
sha256 = "14e5bc1cb318ed374d45d6faf48da0b79db7e069c12ec6e090523b8652ef47c7",
strip_prefix = "simpleini-4.17",
urls = [
"https://github.com/brofield/simpleini/archive/refs/tags/4.17.tar.gz",
],
)
maybe(
http_archive,
name = "com_google_protobuf",
sha256 = "cf63d46ef743f4c30b0e36a562caf83cabed3f10e6ca49eb476913c4655394d5",
strip_prefix = "protobuf-436bd7880e458532901c58f4d9d1ea23fa7edd52",
urls = [
"https://storage.googleapis.com/grpc-bazel-mirror/github.com/google/protobuf/archive/436bd7880e458532901c58f4d9d1ea23fa7edd52.tar.gz",
"https://github.com/google/protobuf/archive/436bd7880e458532901c58f4d9d1ea23fa7edd52.tar.gz",
],
)
maybe(
http_archive,
name = "hdf5",
build_file = Label("//third_party:hdf5.BUILD"),
sha256 = "7a1a0a54371275ce2dfc5cd093775bb025c365846512961e7e5ceaecb437ef15",
strip_prefix = "hdf5-1.10.7",
urls = [
"https://hdf-wordpress-1.s3.amazonaws.com/wp-content/uploads/manual/HDF5/HDF5_1_10_7/src/hdf5-1.10.7.tar.gz", # Oct 16, 2020
],
)
maybe(
http_archive,
name = "termcolor",
build_file = Label("//third_party:termcolor.BUILD"),
sha256 = "4a73a77053822ca1ed6d4a2af416d31028ec992fb0ffa794af95bd6216bb6a20",
strip_prefix = "termcolor-2.0.0",
urls = [
"https://github.com/ikalnytskyi/termcolor/archive/refs/tags/v2.0.0.tar.gz",
],
)
maybe(
http_archive,
name = "spdlog",
build_file = Label("//third_party:spdlog.BUILD"),
sha256 = "6fff9215f5cb81760be4cc16d033526d1080427d236e86d70bb02994f85e3d38",
strip_prefix = "spdlog-1.9.2",
urls = [
"https://github.com/gabime/spdlog/archive/refs/tags/v1.9.2.tar.gz",
],
)
maybe(
http_archive,
name = "json",
build_file = Label("//third_party:json.BUILD"),
sha256 = "61e605be15e88deeac4582aaf01c09d616f8302edde7adcaba9261ddc3b4ceca",
strip_prefix = "single_include/nlohmann",
urls = [
"https://github.com/nlohmann/json/releases/download/v3.10.2/include.zip",
],
)
# New eCAL dependency as of v5.10.0
maybe(
http_archive,
name = "tcp_pubsub",
build_file = Label("//third_party:tcp_pubsub.BUILD"),
sha256 = "c335faf859219070ef67d62821a5f9ac5a308152bf7a1293a0d0cc74fefb9b58",
strip_prefix = "tcp_pubsub-1.0.0/tcp_pubsub",
urls = [
"https://github.com/continental/tcp_pubsub/archive/refs/tags/v1.0.0.tar.gz",
],
)
# Submodule of tcp_pubsub
maybe(
http_archive,
name = "recycle",
build_file = Label("//third_party:recycle.BUILD"),
sha256 = "d1cf8a5256110c068f366b0e4e16ad39427b9def13876670aad9f167afd7aaee",
strip_prefix = "recycle-c5425709b2273ef6371647247d1a1d86aa75c2e6",
urls = [
"https://github.com/steinwurf/recycle/archive/c5425709b2273ef6371647247d1a1d86aa75c2e6.tar.gz",
],
)
maybe(
http_archive,
name = "rules_pkg",
sha256 = "62eeb544ff1ef41d786e329e1536c1d541bb9bcad27ae984d57f18f314018e66",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/rules_pkg/releases/download/0.6.0/rules_pkg-0.6.0.tar.gz",
"https://github.com/bazelbuild/rules_pkg/releases/download/0.6.0/rules_pkg-0.6.0.tar.gz",
],
)
maybe(
http_archive,
name = "variadic_table",
build_file = Label("//third_party:variadic_table.BUILD"),
sha256 = "6799c0ee507fb3c739bde936630fc826f3c13abeb7b3245ebf997a6446fd0cb3",
strip_prefix = "variadic_table-82fcf65c00c70afca95f71c0c77fba1982a20a86",
urls = [
"https://github.com/friedmud/variadic_table/archive/82fcf65c00c70afca95f71c0c77fba1982a20a86.tar.gz",
],
)
maybe(
http_archive,
name = "eigen",
build_file = Label("//third_party:eigen.BUILD"),
sha256 = "8586084f71f9bde545ee7fa6d00288b264a2b7ac3607b974e54d13e7162c1c72",
strip_prefix = "eigen-3.4.0",
urls = [
"https://github.com/agtonomy/eigen/archive/refs/tags/3.4.0.tar.gz",
],
)
| load('@bazel_tools//tools/build_defs/repo:http.bzl', 'http_archive')
load('@bazel_tools//tools/build_defs/repo:utils.bzl', 'maybe')
def trellis_deps():
maybe(http_archive, name='ecal', build_file=label('//third_party:ecal.BUILD'), sha256='1d83d3accfb4a936ffd343524e4a626f0265e600226d6e997b3dbbd7f62eaac6', strip_prefix='ecal-5.10.0', urls=['https://github.com/continental/ecal/archive/refs/tags/v5.10.0.tar.gz'])
maybe(http_archive, name='fmt', build_file=label('//third_party:fmt.BUILD'), sha256='36016a75dd6e0a9c1c7df5edb98c93a3e77dabcf122de364116efb9f23c6954a', strip_prefix='fmt-8.0.0', urls=['https://github.com/fmtlib/fmt/releases/download/8.0.0/fmt-8.0.0.zip'])
maybe(http_archive, name='yaml-cpp', sha256='43e6a9fcb146ad871515f0d0873947e5d497a1c9c60c58cb102a97b47208b7c3', strip_prefix='yaml-cpp-yaml-cpp-0.7.0', urls=['https://github.com/jbeder/yaml-cpp/archive/refs/tags/yaml-cpp-0.7.0.tar.gz'])
maybe(http_archive, name='cxxopts', build_file=label('//third_party:cxxopts.BUILD'), sha256='984aa3c8917d649b14d7f6277104ce38dd142ce378a9198ec926f03302399681', strip_prefix='cxxopts-2.2.1', urls=['https://github.com/jarro2783/cxxopts/archive/refs/tags/v2.2.1.tar.gz'])
maybe(http_archive, name='tclap', build_file=label('//third_party:tclap.BUILD'), sha256='7363f8f571e6e733b269c4b4e9c18f392d3cd7240d39a379d95de5a4c4bdc47f', strip_prefix='tclap-1.2.4', urls=['https://github.com/xguerin/tclap/archive/refs/tags/v1.2.4.tar.gz'])
maybe(http_archive, name='asio', build_file=label('//third_party:asio.BUILD'), sha256='5ee191aee825dfb1325cbacf643d599b186de057c88464ea98f1bae5ba4ff47a', strip_prefix='asio-asio-1-19-2', urls=['https://github.com/chriskohlhoff/asio/archive/refs/tags/asio-1-19-2.tar.gz'])
maybe(http_archive, name='gtest', sha256='9dc9157a9a1551ec7a7e43daea9a694a0bb5fb8bec81235d8a1e6ef64c716dcb', strip_prefix='googletest-release-1.10.0', urls=['https://github.com/google/googletest/archive/release-1.10.0.tar.gz'])
maybe(http_archive, name='simpleini', build_file=label('//third_party:simpleini.BUILD'), sha256='14e5bc1cb318ed374d45d6faf48da0b79db7e069c12ec6e090523b8652ef47c7', strip_prefix='simpleini-4.17', urls=['https://github.com/brofield/simpleini/archive/refs/tags/4.17.tar.gz'])
maybe(http_archive, name='com_google_protobuf', sha256='cf63d46ef743f4c30b0e36a562caf83cabed3f10e6ca49eb476913c4655394d5', strip_prefix='protobuf-436bd7880e458532901c58f4d9d1ea23fa7edd52', urls=['https://storage.googleapis.com/grpc-bazel-mirror/github.com/google/protobuf/archive/436bd7880e458532901c58f4d9d1ea23fa7edd52.tar.gz', 'https://github.com/google/protobuf/archive/436bd7880e458532901c58f4d9d1ea23fa7edd52.tar.gz'])
maybe(http_archive, name='hdf5', build_file=label('//third_party:hdf5.BUILD'), sha256='7a1a0a54371275ce2dfc5cd093775bb025c365846512961e7e5ceaecb437ef15', strip_prefix='hdf5-1.10.7', urls=['https://hdf-wordpress-1.s3.amazonaws.com/wp-content/uploads/manual/HDF5/HDF5_1_10_7/src/hdf5-1.10.7.tar.gz'])
maybe(http_archive, name='termcolor', build_file=label('//third_party:termcolor.BUILD'), sha256='4a73a77053822ca1ed6d4a2af416d31028ec992fb0ffa794af95bd6216bb6a20', strip_prefix='termcolor-2.0.0', urls=['https://github.com/ikalnytskyi/termcolor/archive/refs/tags/v2.0.0.tar.gz'])
maybe(http_archive, name='spdlog', build_file=label('//third_party:spdlog.BUILD'), sha256='6fff9215f5cb81760be4cc16d033526d1080427d236e86d70bb02994f85e3d38', strip_prefix='spdlog-1.9.2', urls=['https://github.com/gabime/spdlog/archive/refs/tags/v1.9.2.tar.gz'])
maybe(http_archive, name='json', build_file=label('//third_party:json.BUILD'), sha256='61e605be15e88deeac4582aaf01c09d616f8302edde7adcaba9261ddc3b4ceca', strip_prefix='single_include/nlohmann', urls=['https://github.com/nlohmann/json/releases/download/v3.10.2/include.zip'])
maybe(http_archive, name='tcp_pubsub', build_file=label('//third_party:tcp_pubsub.BUILD'), sha256='c335faf859219070ef67d62821a5f9ac5a308152bf7a1293a0d0cc74fefb9b58', strip_prefix='tcp_pubsub-1.0.0/tcp_pubsub', urls=['https://github.com/continental/tcp_pubsub/archive/refs/tags/v1.0.0.tar.gz'])
maybe(http_archive, name='recycle', build_file=label('//third_party:recycle.BUILD'), sha256='d1cf8a5256110c068f366b0e4e16ad39427b9def13876670aad9f167afd7aaee', strip_prefix='recycle-c5425709b2273ef6371647247d1a1d86aa75c2e6', urls=['https://github.com/steinwurf/recycle/archive/c5425709b2273ef6371647247d1a1d86aa75c2e6.tar.gz'])
maybe(http_archive, name='rules_pkg', sha256='62eeb544ff1ef41d786e329e1536c1d541bb9bcad27ae984d57f18f314018e66', urls=['https://mirror.bazel.build/github.com/bazelbuild/rules_pkg/releases/download/0.6.0/rules_pkg-0.6.0.tar.gz', 'https://github.com/bazelbuild/rules_pkg/releases/download/0.6.0/rules_pkg-0.6.0.tar.gz'])
maybe(http_archive, name='variadic_table', build_file=label('//third_party:variadic_table.BUILD'), sha256='6799c0ee507fb3c739bde936630fc826f3c13abeb7b3245ebf997a6446fd0cb3', strip_prefix='variadic_table-82fcf65c00c70afca95f71c0c77fba1982a20a86', urls=['https://github.com/friedmud/variadic_table/archive/82fcf65c00c70afca95f71c0c77fba1982a20a86.tar.gz'])
maybe(http_archive, name='eigen', build_file=label('//third_party:eigen.BUILD'), sha256='8586084f71f9bde545ee7fa6d00288b264a2b7ac3607b974e54d13e7162c1c72', strip_prefix='eigen-3.4.0', urls=['https://github.com/agtonomy/eigen/archive/refs/tags/3.4.0.tar.gz']) |
# VOLTAGE and CURRENT
#: Unit for Voltage
UNIT_VOLT = 'V'
#: Unit for Voltage*10^-3
UNIT_MILLI_VOLT = 'mV'
#: Unit for Current
UNIT_AMPERE = 'A'
#: Unit for Current*10^-3
UNIT_MILLI_AMPERE = 'mA'
#: Unit for Current*10^-6
UNIT_MICRO_AMPERE = 'uA'
# FREQUENCY
#: Unit for Frequencies
UNIT_HERTZ = 'Hz'
#: Unit for Frequencies*10^3
UNIT_KILO_HERTZ = 'kHz'
# POWER
#: Unit for reactive power
UNIT_VOLT_AMPERE = 'VA'
#: Unit for power*10^-3
UNIT_MILLI_WATT = 'mW'
#: Unit for power
UNIT_WATT = 'W'
#: Unit for power*10^3
UNIT_KILO_WATT = 'kW'
#: Unit for energy*10^3
UNIT_KILO_WATT_HOURS = 'kWh'
# TIME
#: Unit for seconds
UNIT_SECONDS = 's'
#: Unit for milliseconds
UNIT_MILLI_SECONDS = 'ms'
#: Unit for microseconds
UNIT_MICRO_SECONDS = 'us'
#: Unit for minutes
UNIT_MINUTES = 'min'
#: Unit for hours
UNIT_HOURS = 'h'
#: Unit for days
UNIT_DAYS = 'd'
| unit_volt = 'V'
unit_milli_volt = 'mV'
unit_ampere = 'A'
unit_milli_ampere = 'mA'
unit_micro_ampere = 'uA'
unit_hertz = 'Hz'
unit_kilo_hertz = 'kHz'
unit_volt_ampere = 'VA'
unit_milli_watt = 'mW'
unit_watt = 'W'
unit_kilo_watt = 'kW'
unit_kilo_watt_hours = 'kWh'
unit_seconds = 's'
unit_milli_seconds = 'ms'
unit_micro_seconds = 'us'
unit_minutes = 'min'
unit_hours = 'h'
unit_days = 'd' |
players = ['Nicola', 'Penny', 'Dom', 'Nathan', 'Josie']
print(f"Friends: {players[0]}, {players[1]}, {players[2]}, {players[3]}, {players[4]}")
find = input("Who did you find? ")
if find in players:
print(f"{find} has turned into a zombie!")
players[players.index(find)] = "Zombie"
print(f"Remaining players: {players[0]}, {players[1]}, {players[2]}, {players[3]}, {players[4]}")
else:
print("Everyone is still in the game!")
print(f"Remaining players: {players[0]}, {players[1]}, {players[2]}, {players[3]}, {players[4]}") | players = ['Nicola', 'Penny', 'Dom', 'Nathan', 'Josie']
print(f'Friends: {players[0]}, {players[1]}, {players[2]}, {players[3]}, {players[4]}')
find = input('Who did you find? ')
if find in players:
print(f'{find} has turned into a zombie!')
players[players.index(find)] = 'Zombie'
print(f'Remaining players: {players[0]}, {players[1]}, {players[2]}, {players[3]}, {players[4]}')
else:
print('Everyone is still in the game!')
print(f'Remaining players: {players[0]}, {players[1]}, {players[2]}, {players[3]}, {players[4]}') |
"""0MQ Constant names"""
# Copyright (C) PyZMQ Developers
# Distributed under the terms of the Modified BSD License.
#-----------------------------------------------------------------------------
# Python module level constants
#-----------------------------------------------------------------------------
# dictionaries of constants new or removed in particular versions
new_in = {
(2,2,0) : [
'RCVTIMEO',
'SNDTIMEO',
],
(3,2,2) : [
# errnos
'EMSGSIZE',
'EAFNOSUPPORT',
'ENETUNREACH',
'ECONNABORTED',
'ECONNRESET',
'ENOTCONN',
'ETIMEDOUT',
'EHOSTUNREACH',
'ENETRESET',
'IO_THREADS',
'MAX_SOCKETS',
'IO_THREADS_DFLT',
'MAX_SOCKETS_DFLT',
'ROUTER_BEHAVIOR',
'ROUTER_MANDATORY',
'FAIL_UNROUTABLE',
'TCP_KEEPALIVE',
'TCP_KEEPALIVE_CNT',
'TCP_KEEPALIVE_IDLE',
'TCP_KEEPALIVE_INTVL',
'DELAY_ATTACH_ON_CONNECT',
'XPUB_VERBOSE',
'EVENT_CONNECTED',
'EVENT_CONNECT_DELAYED',
'EVENT_CONNECT_RETRIED',
'EVENT_LISTENING',
'EVENT_BIND_FAILED',
'EVENT_ACCEPTED',
'EVENT_ACCEPT_FAILED',
'EVENT_CLOSED',
'EVENT_CLOSE_FAILED',
'EVENT_DISCONNECTED',
'EVENT_ALL',
],
(4,0,0) : [
# socket types
'STREAM',
# socket opts
'IMMEDIATE',
'ROUTER_RAW',
'IPV6',
'MECHANISM',
'PLAIN_SERVER',
'PLAIN_USERNAME',
'PLAIN_PASSWORD',
'CURVE_SERVER',
'CURVE_PUBLICKEY',
'CURVE_SECRETKEY',
'CURVE_SERVERKEY',
'PROBE_ROUTER',
'REQ_RELAXED',
'REQ_CORRELATE',
'CONFLATE',
'ZAP_DOMAIN',
# security
'NULL',
'PLAIN',
'CURVE',
# events
'EVENT_MONITOR_STOPPED',
],
(4,1,0) : [
# socket opts
'ROUTER_HANDOVER',
'TOS',
'IPC_FILTER_PID',
'IPC_FILTER_UID',
'IPC_FILTER_GID',
'CONNECT_RID',
],
}
removed_in = {
(3,2,2) : [
'UPSTREAM',
'DOWNSTREAM',
'HWM',
'SWAP',
'MCAST_LOOP',
'RECOVERY_IVL_MSEC',
]
}
# collections of zmq constant names based on their role
# base names have no specific use
# opt names are validated in get/set methods of various objects
base_names = [
# base
'VERSION',
'VERSION_MAJOR',
'VERSION_MINOR',
'VERSION_PATCH',
'NOBLOCK',
'DONTWAIT',
'POLLIN',
'POLLOUT',
'POLLERR',
'SNDMORE',
'STREAMER',
'FORWARDER',
'QUEUE',
'IO_THREADS_DFLT',
'MAX_SOCKETS_DFLT',
# socktypes
'PAIR',
'PUB',
'SUB',
'REQ',
'REP',
'DEALER',
'ROUTER',
'PULL',
'PUSH',
'XPUB',
'XSUB',
'UPSTREAM',
'DOWNSTREAM',
'STREAM',
# events
'EVENT_CONNECTED',
'EVENT_CONNECT_DELAYED',
'EVENT_CONNECT_RETRIED',
'EVENT_LISTENING',
'EVENT_BIND_FAILED',
'EVENT_ACCEPTED',
'EVENT_ACCEPT_FAILED',
'EVENT_CLOSED',
'EVENT_CLOSE_FAILED',
'EVENT_DISCONNECTED',
'EVENT_ALL',
'EVENT_MONITOR_STOPPED',
# security
'NULL',
'PLAIN',
'CURVE',
## ERRNO
# Often used (these are alse in errno.)
'EAGAIN',
'EINVAL',
'EFAULT',
'ENOMEM',
'ENODEV',
'EMSGSIZE',
'EAFNOSUPPORT',
'ENETUNREACH',
'ECONNABORTED',
'ECONNRESET',
'ENOTCONN',
'ETIMEDOUT',
'EHOSTUNREACH',
'ENETRESET',
# For Windows compatability
'HAUSNUMERO',
'ENOTSUP',
'EPROTONOSUPPORT',
'ENOBUFS',
'ENETDOWN',
'EADDRINUSE',
'EADDRNOTAVAIL',
'ECONNREFUSED',
'EINPROGRESS',
'ENOTSOCK',
# 0MQ Native
'EFSM',
'ENOCOMPATPROTO',
'ETERM',
'EMTHREAD',
]
int64_sockopt_names = [
'AFFINITY',
'MAXMSGSIZE',
# sockopts removed in 3.0.0
'HWM',
'SWAP',
'MCAST_LOOP',
'RECOVERY_IVL_MSEC',
]
bytes_sockopt_names = [
'IDENTITY',
'SUBSCRIBE',
'UNSUBSCRIBE',
'LAST_ENDPOINT',
'TCP_ACCEPT_FILTER',
'PLAIN_USERNAME',
'PLAIN_PASSWORD',
'CURVE_PUBLICKEY',
'CURVE_SECRETKEY',
'CURVE_SERVERKEY',
'ZAP_DOMAIN',
'CONNECT_RID',
]
int_sockopt_names = [
# sockopts
'RECONNECT_IVL_MAX',
# sockopts new in 2.2.0
'SNDTIMEO',
'RCVTIMEO',
# new in 3.x
'SNDHWM',
'RCVHWM',
'MULTICAST_HOPS',
'IPV4ONLY',
'ROUTER_BEHAVIOR',
'TCP_KEEPALIVE',
'TCP_KEEPALIVE_CNT',
'TCP_KEEPALIVE_IDLE',
'TCP_KEEPALIVE_INTVL',
'DELAY_ATTACH_ON_CONNECT',
'XPUB_VERBOSE',
'FD',
'EVENTS',
'TYPE',
'LINGER',
'RECONNECT_IVL',
'BACKLOG',
'ROUTER_MANDATORY',
'FAIL_UNROUTABLE',
'ROUTER_RAW',
'IMMEDIATE',
'IPV6',
'MECHANISM',
'PLAIN_SERVER',
'CURVE_SERVER',
'PROBE_ROUTER',
'REQ_RELAXED',
'REQ_CORRELATE',
'CONFLATE',
'ROUTER_HANDOVER',
'TOS',
'IPC_FILTER_PID',
'IPC_FILTER_UID',
'IPC_FILTER_GID',
]
switched_sockopt_names = [
'RATE',
'RECOVERY_IVL',
'SNDBUF',
'RCVBUF',
'RCVMORE',
]
ctx_opt_names = [
'IO_THREADS',
'MAX_SOCKETS',
]
msg_opt_names = [
'MORE',
]
all_names = (
base_names + ctx_opt_names + msg_opt_names +
bytes_sockopt_names + int_sockopt_names + int64_sockopt_names + switched_sockopt_names
)
def no_prefix(name):
"""does the given constant have a ZMQ_ prefix?"""
return name.startswith('E') and not name.startswith('EVENT')
| """0MQ Constant names"""
new_in = {(2, 2, 0): ['RCVTIMEO', 'SNDTIMEO'], (3, 2, 2): ['EMSGSIZE', 'EAFNOSUPPORT', 'ENETUNREACH', 'ECONNABORTED', 'ECONNRESET', 'ENOTCONN', 'ETIMEDOUT', 'EHOSTUNREACH', 'ENETRESET', 'IO_THREADS', 'MAX_SOCKETS', 'IO_THREADS_DFLT', 'MAX_SOCKETS_DFLT', 'ROUTER_BEHAVIOR', 'ROUTER_MANDATORY', 'FAIL_UNROUTABLE', 'TCP_KEEPALIVE', 'TCP_KEEPALIVE_CNT', 'TCP_KEEPALIVE_IDLE', 'TCP_KEEPALIVE_INTVL', 'DELAY_ATTACH_ON_CONNECT', 'XPUB_VERBOSE', 'EVENT_CONNECTED', 'EVENT_CONNECT_DELAYED', 'EVENT_CONNECT_RETRIED', 'EVENT_LISTENING', 'EVENT_BIND_FAILED', 'EVENT_ACCEPTED', 'EVENT_ACCEPT_FAILED', 'EVENT_CLOSED', 'EVENT_CLOSE_FAILED', 'EVENT_DISCONNECTED', 'EVENT_ALL'], (4, 0, 0): ['STREAM', 'IMMEDIATE', 'ROUTER_RAW', 'IPV6', 'MECHANISM', 'PLAIN_SERVER', 'PLAIN_USERNAME', 'PLAIN_PASSWORD', 'CURVE_SERVER', 'CURVE_PUBLICKEY', 'CURVE_SECRETKEY', 'CURVE_SERVERKEY', 'PROBE_ROUTER', 'REQ_RELAXED', 'REQ_CORRELATE', 'CONFLATE', 'ZAP_DOMAIN', 'NULL', 'PLAIN', 'CURVE', 'EVENT_MONITOR_STOPPED'], (4, 1, 0): ['ROUTER_HANDOVER', 'TOS', 'IPC_FILTER_PID', 'IPC_FILTER_UID', 'IPC_FILTER_GID', 'CONNECT_RID']}
removed_in = {(3, 2, 2): ['UPSTREAM', 'DOWNSTREAM', 'HWM', 'SWAP', 'MCAST_LOOP', 'RECOVERY_IVL_MSEC']}
base_names = ['VERSION', 'VERSION_MAJOR', 'VERSION_MINOR', 'VERSION_PATCH', 'NOBLOCK', 'DONTWAIT', 'POLLIN', 'POLLOUT', 'POLLERR', 'SNDMORE', 'STREAMER', 'FORWARDER', 'QUEUE', 'IO_THREADS_DFLT', 'MAX_SOCKETS_DFLT', 'PAIR', 'PUB', 'SUB', 'REQ', 'REP', 'DEALER', 'ROUTER', 'PULL', 'PUSH', 'XPUB', 'XSUB', 'UPSTREAM', 'DOWNSTREAM', 'STREAM', 'EVENT_CONNECTED', 'EVENT_CONNECT_DELAYED', 'EVENT_CONNECT_RETRIED', 'EVENT_LISTENING', 'EVENT_BIND_FAILED', 'EVENT_ACCEPTED', 'EVENT_ACCEPT_FAILED', 'EVENT_CLOSED', 'EVENT_CLOSE_FAILED', 'EVENT_DISCONNECTED', 'EVENT_ALL', 'EVENT_MONITOR_STOPPED', 'NULL', 'PLAIN', 'CURVE', 'EAGAIN', 'EINVAL', 'EFAULT', 'ENOMEM', 'ENODEV', 'EMSGSIZE', 'EAFNOSUPPORT', 'ENETUNREACH', 'ECONNABORTED', 'ECONNRESET', 'ENOTCONN', 'ETIMEDOUT', 'EHOSTUNREACH', 'ENETRESET', 'HAUSNUMERO', 'ENOTSUP', 'EPROTONOSUPPORT', 'ENOBUFS', 'ENETDOWN', 'EADDRINUSE', 'EADDRNOTAVAIL', 'ECONNREFUSED', 'EINPROGRESS', 'ENOTSOCK', 'EFSM', 'ENOCOMPATPROTO', 'ETERM', 'EMTHREAD']
int64_sockopt_names = ['AFFINITY', 'MAXMSGSIZE', 'HWM', 'SWAP', 'MCAST_LOOP', 'RECOVERY_IVL_MSEC']
bytes_sockopt_names = ['IDENTITY', 'SUBSCRIBE', 'UNSUBSCRIBE', 'LAST_ENDPOINT', 'TCP_ACCEPT_FILTER', 'PLAIN_USERNAME', 'PLAIN_PASSWORD', 'CURVE_PUBLICKEY', 'CURVE_SECRETKEY', 'CURVE_SERVERKEY', 'ZAP_DOMAIN', 'CONNECT_RID']
int_sockopt_names = ['RECONNECT_IVL_MAX', 'SNDTIMEO', 'RCVTIMEO', 'SNDHWM', 'RCVHWM', 'MULTICAST_HOPS', 'IPV4ONLY', 'ROUTER_BEHAVIOR', 'TCP_KEEPALIVE', 'TCP_KEEPALIVE_CNT', 'TCP_KEEPALIVE_IDLE', 'TCP_KEEPALIVE_INTVL', 'DELAY_ATTACH_ON_CONNECT', 'XPUB_VERBOSE', 'FD', 'EVENTS', 'TYPE', 'LINGER', 'RECONNECT_IVL', 'BACKLOG', 'ROUTER_MANDATORY', 'FAIL_UNROUTABLE', 'ROUTER_RAW', 'IMMEDIATE', 'IPV6', 'MECHANISM', 'PLAIN_SERVER', 'CURVE_SERVER', 'PROBE_ROUTER', 'REQ_RELAXED', 'REQ_CORRELATE', 'CONFLATE', 'ROUTER_HANDOVER', 'TOS', 'IPC_FILTER_PID', 'IPC_FILTER_UID', 'IPC_FILTER_GID']
switched_sockopt_names = ['RATE', 'RECOVERY_IVL', 'SNDBUF', 'RCVBUF', 'RCVMORE']
ctx_opt_names = ['IO_THREADS', 'MAX_SOCKETS']
msg_opt_names = ['MORE']
all_names = base_names + ctx_opt_names + msg_opt_names + bytes_sockopt_names + int_sockopt_names + int64_sockopt_names + switched_sockopt_names
def no_prefix(name):
"""does the given constant have a ZMQ_ prefix?"""
return name.startswith('E') and (not name.startswith('EVENT')) |
class Solution:
def isAnagram(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
return sorted(s) == sorted(t)
| class Solution:
def is_anagram(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
return sorted(s) == sorted(t) |
WINDOW_WIDTH = 560
MODE_SELECTOR_HEIGHT = 50
CONTROLS_FRAME_HEIGHT = 80
KEYBOARD_HEIGHT = 160
SCORE_DISPLAY_HEIGHT = 110
WINDOW_HEIGHT = KEYBOARD_HEIGHT + CONTROLS_FRAME_HEIGHT + MODE_SELECTOR_HEIGHT + SCORE_DISPLAY_HEIGHT
CHOICES = ['Scales','Chords','Chord Progressions']
| window_width = 560
mode_selector_height = 50
controls_frame_height = 80
keyboard_height = 160
score_display_height = 110
window_height = KEYBOARD_HEIGHT + CONTROLS_FRAME_HEIGHT + MODE_SELECTOR_HEIGHT + SCORE_DISPLAY_HEIGHT
choices = ['Scales', 'Chords', 'Chord Progressions'] |
# -*- coding: utf-8 -*-
"""
Created on Sun May 5 14:55:31 2019
@author: asus
"""
"""
House hunting
caculate the months to save enough money to make the down payment of your dream house
"""
annual_salary = float(input("Enter your annual salary: "))
portion_saved = float(input("Enter the percent of your salary to save, as a demical: "))
total_cost = float(input("Enter the cost of your dream house: "))
portion_down_payment = total_cost*0.25
r = 0.04 #the annual rate of investment reward
monthly_salary = annual_salary/12
current_savings = 0
months = 0
while current_savings < portion_down_payment:
current_savings += current_savings*(r/12) + monthly_salary*portion_saved
months += 1
print("Number of months: ", months)
| """
Created on Sun May 5 14:55:31 2019
@author: asus
"""
'\nHouse hunting\ncaculate the months to save enough money to make the down payment of your dream house\n'
annual_salary = float(input('Enter your annual salary: '))
portion_saved = float(input('Enter the percent of your salary to save, as a demical: '))
total_cost = float(input('Enter the cost of your dream house: '))
portion_down_payment = total_cost * 0.25
r = 0.04
monthly_salary = annual_salary / 12
current_savings = 0
months = 0
while current_savings < portion_down_payment:
current_savings += current_savings * (r / 12) + monthly_salary * portion_saved
months += 1
print('Number of months: ', months) |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `ansible_task_worker` package."""
| """Tests for `ansible_task_worker` package.""" |
'''
Writing and reading files using w+
'''
f=open("foo.txt","w+")
f.writelines(["Hello\n","This is a new line"])
f.flush()
f.seek(0)
print(f.read())
f.close()
| """
Writing and reading files using w+
"""
f = open('foo.txt', 'w+')
f.writelines(['Hello\n', 'This is a new line'])
f.flush()
f.seek(0)
print(f.read())
f.close() |
class Solution:
def buildTree(self, inorder: List[int], postorder: List[int]) -> TreeNode:
def _build(l1, r1, l2, r2):
if l1 > r1:
return None
if l1 == r1 or l2 == r2:
return TreeNode(postorder[l2])
root = TreeNode(postorder[r2])
idx = inorder.index(root.val)
root.left = _build(l1, idx - 1, l2, l2 + idx - 1 - l1)
root.right = _build(idx + 1, r1, l2 + idx - l1, r2 - 1)
return root
n = len(inorder)
return _build(0, n - 1, 0, n - 1)
| class Solution:
def build_tree(self, inorder: List[int], postorder: List[int]) -> TreeNode:
def _build(l1, r1, l2, r2):
if l1 > r1:
return None
if l1 == r1 or l2 == r2:
return tree_node(postorder[l2])
root = tree_node(postorder[r2])
idx = inorder.index(root.val)
root.left = _build(l1, idx - 1, l2, l2 + idx - 1 - l1)
root.right = _build(idx + 1, r1, l2 + idx - l1, r2 - 1)
return root
n = len(inorder)
return _build(0, n - 1, 0, n - 1) |
"""
This module contains utilities to work with code generated by prost-build.
"""
load("@rules_rust//rust:defs.bzl", "rust_binary", "rust_test")
def generated_files_check(name, srcs, deps, data, manifest_dir):
rust_test(
name = name,
srcs = srcs,
data = data + [
"@rules_rust//rust/toolchain:current_exec_rustfmt_files",
"@com_google_protobuf//:protoc",
"@com_google_protobuf//:well_known_protos",
],
edition = "2018",
env = {
"PROTOC": "$(rootpath @com_google_protobuf//:protoc)",
"PROTOC_INCLUDE": "external/com_github_protocolbuffers_protobuf/src",
"CARGO_MANIFEST_DIR": manifest_dir,
},
deps = deps,
)
def protobuf_generator(name, srcs, deps = [], data = []):
rust_binary(
name = name,
srcs = srcs,
data = data + [
"@com_google_protobuf//:protoc",
"@com_google_protobuf//:well_known_protos",
"@rules_rust//rust/toolchain:current_exec_rustfmt_files",
],
edition = "2018",
rustc_env = {
"PROTOC": "$(rootpath @com_google_protobuf//:protoc)",
"PROTOC_INCLUDE": "external/com_github_protocolbuffers_protobuf/src",
},
deps = deps,
)
| """
This module contains utilities to work with code generated by prost-build.
"""
load('@rules_rust//rust:defs.bzl', 'rust_binary', 'rust_test')
def generated_files_check(name, srcs, deps, data, manifest_dir):
rust_test(name=name, srcs=srcs, data=data + ['@rules_rust//rust/toolchain:current_exec_rustfmt_files', '@com_google_protobuf//:protoc', '@com_google_protobuf//:well_known_protos'], edition='2018', env={'PROTOC': '$(rootpath @com_google_protobuf//:protoc)', 'PROTOC_INCLUDE': 'external/com_github_protocolbuffers_protobuf/src', 'CARGO_MANIFEST_DIR': manifest_dir}, deps=deps)
def protobuf_generator(name, srcs, deps=[], data=[]):
rust_binary(name=name, srcs=srcs, data=data + ['@com_google_protobuf//:protoc', '@com_google_protobuf//:well_known_protos', '@rules_rust//rust/toolchain:current_exec_rustfmt_files'], edition='2018', rustc_env={'PROTOC': '$(rootpath @com_google_protobuf//:protoc)', 'PROTOC_INCLUDE': 'external/com_github_protocolbuffers_protobuf/src'}, deps=deps) |
# 364 - Nested List Weight Sum II (Medium)
# https://leetcode.com/problems/nested-list-weight-sum-ii/
# """
# This is the interface that allows for creating nested lists.
# You should not implement it, or speculate about its implementation
# """
#class NestedInteger(object):
#
# def isInteger(self):
# """
# @return True if this NestedInteger holds a single integer, rather than a nested list.
# :rtype bool
# """
#
# def getInteger(self):
# """
# @return the single integer that this NestedInteger holds, if it holds a single integer
# Return None if this NestedInteger holds a nested list
# :rtype int
# """
#
# def getList(self):
# """
# @return the nested list that this NestedInteger holds, if it holds a nested list
# Return None if this NestedInteger holds a single integer
# :rtype List[NestedInteger]
# """
class Solution(object):
# Find the max depth, so that when the usual DFS is done, the inverse
# depth is calculated.
def findMaxDepth(self, nestedList, depth):
if len(nestedList) == 0:
return 0
nextDepth = depth + 1
for NI in nestedList:
if NI.isInteger():
continue
else:
depth = max(depth, self.findMaxDepth(NI.getList(), nextDepth))
return depth
# The usual DFS but instead of multiplying by real depth, do it with the
# inverse which needs to have beforehand the max depth.
def dfs(self, nestedList, depth, maxDepth):
if len(nestedList) == 0:
return 0
acum = 0
for NI in nestedList:
if NI.isInteger():
acum += NI.getInteger() * (maxDepth - depth + 1)
else:
acum += self.dfs(NI.getList(), depth+1, maxDepth)
return acum
def depthSumInverse(self, nestedList):
"""
:type nestedList: List[NestedInteger]
:rtype: int
"""
maxDepth = self.findMaxDepth(nestedList, 1)
return self.dfs(nestedList, 1, maxDepth)
| class Solution(object):
def find_max_depth(self, nestedList, depth):
if len(nestedList) == 0:
return 0
next_depth = depth + 1
for ni in nestedList:
if NI.isInteger():
continue
else:
depth = max(depth, self.findMaxDepth(NI.getList(), nextDepth))
return depth
def dfs(self, nestedList, depth, maxDepth):
if len(nestedList) == 0:
return 0
acum = 0
for ni in nestedList:
if NI.isInteger():
acum += NI.getInteger() * (maxDepth - depth + 1)
else:
acum += self.dfs(NI.getList(), depth + 1, maxDepth)
return acum
def depth_sum_inverse(self, nestedList):
"""
:type nestedList: List[NestedInteger]
:rtype: int
"""
max_depth = self.findMaxDepth(nestedList, 1)
return self.dfs(nestedList, 1, maxDepth) |
class ProductFileMetadata(object):
def __init__(self, output_name, local_path, media_type=None, remote_path=None, data_start=None, data_end=None,
geojson=None):
self.data_start = data_start
self.data_end = data_end
self.geojson = geojson
self.local_path = local_path
self.media_type = media_type
self.output_name = output_name
self.remote_path = remote_path | class Productfilemetadata(object):
def __init__(self, output_name, local_path, media_type=None, remote_path=None, data_start=None, data_end=None, geojson=None):
self.data_start = data_start
self.data_end = data_end
self.geojson = geojson
self.local_path = local_path
self.media_type = media_type
self.output_name = output_name
self.remote_path = remote_path |
{
"name": "PersianTweets",
"version": "2020",
"task": "Corpus",
"splits": [],
"description": "LSCP: Enhanced Large Scale Colloquial Persian Language Understanding <br>\nLearn more about this study at https://iasbs.ac.ir/~ansari/lscp/",
"size": 20665964,
"filenames": ["lscp-0.5-fa-normalized.txt"]
}
| {'name': 'PersianTweets', 'version': '2020', 'task': 'Corpus', 'splits': [], 'description': 'LSCP: Enhanced Large Scale Colloquial Persian Language Understanding <br>\nLearn more about this study at https://iasbs.ac.ir/~ansari/lscp/', 'size': 20665964, 'filenames': ['lscp-0.5-fa-normalized.txt']} |
class LoginLimiter(object):
# use an array to keep track of most recent 10 requests
def __init__(self):
self.rctCalls = []
# receive timestamp of a call attempt
# return true if call is allowed
def isAllowed(self, ts):
if len(self.rctCalls) < 10:
# when client has made less than 10 requests, always pass request
self.rctCalls.append(ts)
print("Call API at timestamp %s" % ts)
return True
else:
# when client has made at least 10 requests, pass request only when
# new request is made less than 60 seconds after 10th most recent legal call
if ts - self.rctCalls[0] < 60:
# deny call
print("Exceeding call limit at timestamp %s. Call denied" % ts)
return False
else:
# pass call and update cache
self.rctCalls.pop(0)
self.rctCalls.append(ts)
print("Call API at timestamp %s" % ts)
return True
# test limiter functionality
print("\nTest basic limiter functionality:")
solver = LoginLimiter()
timestamps = [i * 5 for i in range(100)]
for ts in timestamps:
solver.isAllowed(ts)
print("\nTest multiple clients:")
# apply limiter to multiple client IDs
clientID = ["A", "B"]
clients = {ID: LoginLimiter() for ID in clientID}
# let multiple clients call API simultaneously
timestamps = [i for i in range(15)]
for ts in timestamps:
for ID in clients:
limiter = clients[ID]
print("client ID: %s" % ID, end=' ')
limiter.isAllowed(ts) | class Loginlimiter(object):
def __init__(self):
self.rctCalls = []
def is_allowed(self, ts):
if len(self.rctCalls) < 10:
self.rctCalls.append(ts)
print('Call API at timestamp %s' % ts)
return True
elif ts - self.rctCalls[0] < 60:
print('Exceeding call limit at timestamp %s. Call denied' % ts)
return False
else:
self.rctCalls.pop(0)
self.rctCalls.append(ts)
print('Call API at timestamp %s' % ts)
return True
print('\nTest basic limiter functionality:')
solver = login_limiter()
timestamps = [i * 5 for i in range(100)]
for ts in timestamps:
solver.isAllowed(ts)
print('\nTest multiple clients:')
client_id = ['A', 'B']
clients = {ID: login_limiter() for id in clientID}
timestamps = [i for i in range(15)]
for ts in timestamps:
for id in clients:
limiter = clients[ID]
print('client ID: %s' % ID, end=' ')
limiter.isAllowed(ts) |
class Unserializable(Exception):
"""
The item is not serializable by the save system.
"""
pass
class DeserializationError(Exception):
"""
Error deserializing a value during game load
"""
pass
class VerbDefinitionError(Exception):
"""
A verb is defined in an incorrect or inconsistent way
"""
pass
class ParserError(Exception):
"""
Error parsing the player command
"""
pass
class VerbMatchError(ParserError):
"""
No matching verb could be identified from the player input
"""
pass
class ObjectMatchError(ParserError):
"""
No matching IFPObject could be found for either the direct or indirect object
in the player command
"""
pass
class OutOfRange(ParserError):
"""
The specified object is out of range for the current verb
"""
pass
class AbortTurn(Exception):
"""
Abort the current turn. Error message will not be printed.
"""
pass
class NoMatchingSuggestion(Exception):
def __init__(self, query, options, matches):
self.query = query
self.options = options
self.matches = matches
msg = (
f"Unable to unambiguaously match a suggestion from options {options} "
f"with query `{query}`. Not excluded: {matches}."
)
super().__init__(msg)
class IFPError(Exception):
pass
| class Unserializable(Exception):
"""
The item is not serializable by the save system.
"""
pass
class Deserializationerror(Exception):
"""
Error deserializing a value during game load
"""
pass
class Verbdefinitionerror(Exception):
"""
A verb is defined in an incorrect or inconsistent way
"""
pass
class Parsererror(Exception):
"""
Error parsing the player command
"""
pass
class Verbmatcherror(ParserError):
"""
No matching verb could be identified from the player input
"""
pass
class Objectmatcherror(ParserError):
"""
No matching IFPObject could be found for either the direct or indirect object
in the player command
"""
pass
class Outofrange(ParserError):
"""
The specified object is out of range for the current verb
"""
pass
class Abortturn(Exception):
"""
Abort the current turn. Error message will not be printed.
"""
pass
class Nomatchingsuggestion(Exception):
def __init__(self, query, options, matches):
self.query = query
self.options = options
self.matches = matches
msg = f'Unable to unambiguaously match a suggestion from options {options} with query `{query}`. Not excluded: {matches}.'
super().__init__(msg)
class Ifperror(Exception):
pass |
# Copyright 2019 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Code for interacting with git binary to get the file tree checked out at the specified revision.
"""
_GitRepoInfo = provider(
doc = "Provider to organize precomputed arguments for calling git.",
fields = {
"directory": "Working directory path",
"shallow": "Defines the depth of a fetch. Either empty, --depth=1, or --shallow-since=<>",
"reset_ref": """Reference to use for resetting the git repository.
Either commit hash, tag or branch.""",
"fetch_ref": """Reference for fetching. Can be empty (HEAD), tag or branch.
Can not be a commit hash, since typically it is forbidden by git servers.""",
"remote": "URL of the git repository to fetch from.",
"init_submodules": """If True, submodules update command will be called after fetching
and resetting to the specified reference.""",
},
)
def git_repo(ctx, directory):
""" Fetches data from git repository and checks out file tree.
Called by git_repository or new_git_repository rules.
Args:
ctx: Context of the calling rules, for reading the attributes.
Please refer to the git_repository and new_git_repository rules for the description.
directory: Directory where to check out the file tree.
Returns:
The struct with the following fields:
commit: Actual HEAD commit of the checked out data.
shallow_since: Actual date and time of the HEAD commit of the checked out data.
"""
if ctx.attr.shallow_since:
if ctx.attr.tag:
fail("shallow_since not allowed if a tag is specified; --depth=1 will be used for tags")
if ctx.attr.branch:
fail("shallow_since not allowed if a branch is specified; --depth=1 will be used for branches")
shallow = "--depth=1"
if ctx.attr.commit:
# We can not use the commit value in --shallow-since;
# And since we are fetching HEAD in this case, we can not use --depth=1
shallow = ""
# Use shallow-since if given
if ctx.attr.shallow_since:
shallow = "--shallow-since=%s" % ctx.attr.shallow_since
reset_ref = ""
fetch_ref = ""
if ctx.attr.commit:
reset_ref = ctx.attr.commit
elif ctx.attr.tag:
reset_ref = "tags/" + ctx.attr.tag
fetch_ref = "tags/" + ctx.attr.tag + ":tags/" + ctx.attr.tag
elif ctx.attr.branch:
reset_ref = "origin/" + ctx.attr.branch
fetch_ref = ctx.attr.branch + ":origin/" + ctx.attr.branch
git_repo = _GitRepoInfo(
directory = ctx.path(directory),
shallow = shallow,
reset_ref = reset_ref,
fetch_ref = fetch_ref,
remote = str(ctx.attr.remote),
init_submodules = ctx.attr.init_submodules,
)
ctx.report_progress("Cloning %s of %s" % (reset_ref, ctx.attr.remote))
if (ctx.attr.verbose):
print("git.bzl: Cloning or updating %s repository %s using strip_prefix of [%s]" %
(
" (%s)" % shallow if shallow else "",
ctx.name,
ctx.attr.strip_prefix if ctx.attr.strip_prefix else "None",
))
_update(ctx, git_repo)
ctx.report_progress("Recording actual commit")
actual_commit = _get_head_commit(ctx, git_repo)
shallow_date = _get_head_date(ctx, git_repo)
return struct(commit = actual_commit, shallow_since = shallow_date)
def _update(ctx, git_repo):
ctx.delete(git_repo.directory)
init(ctx, git_repo)
add_origin(ctx, git_repo, ctx.attr.remote)
fetch(ctx, git_repo)
reset(ctx, git_repo)
clean(ctx, git_repo)
if git_repo.init_submodules:
ctx.report_progress("Updating submodules")
update_submodules(ctx, git_repo)
def init(ctx, git_repo):
cl = ["git", "init", str(git_repo.directory)]
st = ctx.execute(cl, environment = ctx.os.environ)
if st.return_code != 0:
_error(ctx.name, cl, st.stderr)
def add_origin(ctx, git_repo, remote):
_git(ctx, git_repo, "remote", "add", "origin", remote)
def fetch(ctx, git_repo):
if not git_repo.fetch_ref:
# We need to explicitly specify to fetch all branches and tags, otherwise only
# HEAD-reachable is fetched.
# The semantics of --tags flag of git-fetch have changed in Git 1.9, from 1.9 it means
# "everything that is already specified and all tags"; before 1.9, it used to mean
# "ignore what is specified and fetch all tags".
# The arguments below work correctly for both before 1.9 and after 1.9,
# as we directly specify the list of references to fetch.
_git_maybe_shallow(
ctx,
git_repo,
"fetch",
"origin",
"refs/heads/*:refs/remotes/origin/*",
"refs/tags/*:refs/tags/*",
)
else:
_git_maybe_shallow(ctx, git_repo, "fetch", "origin", git_repo.fetch_ref)
def reset(ctx, git_repo):
_git(ctx, git_repo, "reset", "--hard", git_repo.reset_ref)
def clean(ctx, git_repo):
_git(ctx, git_repo, "clean", "-xdf")
def update_submodules(ctx, git_repo):
_git(ctx, git_repo, "submodule", "update", "--init", "--checkout", "--force")
def _get_head_commit(ctx, git_repo):
return _git(ctx, git_repo, "log", "-n", "1", "--pretty=format:%H")
def _get_head_date(ctx, git_repo):
return _git(ctx, git_repo, "log", "-n", "1", "--pretty=format:%cd", "--date=raw")
def _git(ctx, git_repo, command, *args):
start = ["git", command]
st = _execute(ctx, git_repo, start + list(args))
if st.return_code != 0:
_error(ctx.name, start + list(args), st.stderr)
return st.stdout
def _git_maybe_shallow(ctx, git_repo, command, *args):
start = ["git", command]
args_list = list(args)
if git_repo.shallow:
st = _execute(ctx, git_repo, start + [git_repo.shallow] + args_list)
if st.return_code == 0:
return
st = _execute(ctx, git_repo, start + args_list)
if st.return_code != 0:
_error(ctx.name, start + args_list, st.stderr)
def _execute(ctx, git_repo, args):
return ctx.execute(
args,
environment = ctx.os.environ,
working_directory = str(git_repo.directory),
)
def _error(name, command, stderr):
command_text = " ".join([str(item).strip() for item in command])
fail("error running '%s' while working with @%s:\n%s" % (command_text, name, stderr))
| """Code for interacting with git binary to get the file tree checked out at the specified revision.
"""
__git_repo_info = provider(doc='Provider to organize precomputed arguments for calling git.', fields={'directory': 'Working directory path', 'shallow': 'Defines the depth of a fetch. Either empty, --depth=1, or --shallow-since=<>', 'reset_ref': 'Reference to use for resetting the git repository.\nEither commit hash, tag or branch.', 'fetch_ref': 'Reference for fetching. Can be empty (HEAD), tag or branch.\nCan not be a commit hash, since typically it is forbidden by git servers.', 'remote': 'URL of the git repository to fetch from.', 'init_submodules': 'If True, submodules update command will be called after fetching\nand resetting to the specified reference.'})
def git_repo(ctx, directory):
""" Fetches data from git repository and checks out file tree.
Called by git_repository or new_git_repository rules.
Args:
ctx: Context of the calling rules, for reading the attributes.
Please refer to the git_repository and new_git_repository rules for the description.
directory: Directory where to check out the file tree.
Returns:
The struct with the following fields:
commit: Actual HEAD commit of the checked out data.
shallow_since: Actual date and time of the HEAD commit of the checked out data.
"""
if ctx.attr.shallow_since:
if ctx.attr.tag:
fail('shallow_since not allowed if a tag is specified; --depth=1 will be used for tags')
if ctx.attr.branch:
fail('shallow_since not allowed if a branch is specified; --depth=1 will be used for branches')
shallow = '--depth=1'
if ctx.attr.commit:
shallow = ''
if ctx.attr.shallow_since:
shallow = '--shallow-since=%s' % ctx.attr.shallow_since
reset_ref = ''
fetch_ref = ''
if ctx.attr.commit:
reset_ref = ctx.attr.commit
elif ctx.attr.tag:
reset_ref = 'tags/' + ctx.attr.tag
fetch_ref = 'tags/' + ctx.attr.tag + ':tags/' + ctx.attr.tag
elif ctx.attr.branch:
reset_ref = 'origin/' + ctx.attr.branch
fetch_ref = ctx.attr.branch + ':origin/' + ctx.attr.branch
git_repo = __git_repo_info(directory=ctx.path(directory), shallow=shallow, reset_ref=reset_ref, fetch_ref=fetch_ref, remote=str(ctx.attr.remote), init_submodules=ctx.attr.init_submodules)
ctx.report_progress('Cloning %s of %s' % (reset_ref, ctx.attr.remote))
if ctx.attr.verbose:
print('git.bzl: Cloning or updating %s repository %s using strip_prefix of [%s]' % (' (%s)' % shallow if shallow else '', ctx.name, ctx.attr.strip_prefix if ctx.attr.strip_prefix else 'None'))
_update(ctx, git_repo)
ctx.report_progress('Recording actual commit')
actual_commit = _get_head_commit(ctx, git_repo)
shallow_date = _get_head_date(ctx, git_repo)
return struct(commit=actual_commit, shallow_since=shallow_date)
def _update(ctx, git_repo):
ctx.delete(git_repo.directory)
init(ctx, git_repo)
add_origin(ctx, git_repo, ctx.attr.remote)
fetch(ctx, git_repo)
reset(ctx, git_repo)
clean(ctx, git_repo)
if git_repo.init_submodules:
ctx.report_progress('Updating submodules')
update_submodules(ctx, git_repo)
def init(ctx, git_repo):
cl = ['git', 'init', str(git_repo.directory)]
st = ctx.execute(cl, environment=ctx.os.environ)
if st.return_code != 0:
_error(ctx.name, cl, st.stderr)
def add_origin(ctx, git_repo, remote):
_git(ctx, git_repo, 'remote', 'add', 'origin', remote)
def fetch(ctx, git_repo):
if not git_repo.fetch_ref:
_git_maybe_shallow(ctx, git_repo, 'fetch', 'origin', 'refs/heads/*:refs/remotes/origin/*', 'refs/tags/*:refs/tags/*')
else:
_git_maybe_shallow(ctx, git_repo, 'fetch', 'origin', git_repo.fetch_ref)
def reset(ctx, git_repo):
_git(ctx, git_repo, 'reset', '--hard', git_repo.reset_ref)
def clean(ctx, git_repo):
_git(ctx, git_repo, 'clean', '-xdf')
def update_submodules(ctx, git_repo):
_git(ctx, git_repo, 'submodule', 'update', '--init', '--checkout', '--force')
def _get_head_commit(ctx, git_repo):
return _git(ctx, git_repo, 'log', '-n', '1', '--pretty=format:%H')
def _get_head_date(ctx, git_repo):
return _git(ctx, git_repo, 'log', '-n', '1', '--pretty=format:%cd', '--date=raw')
def _git(ctx, git_repo, command, *args):
start = ['git', command]
st = _execute(ctx, git_repo, start + list(args))
if st.return_code != 0:
_error(ctx.name, start + list(args), st.stderr)
return st.stdout
def _git_maybe_shallow(ctx, git_repo, command, *args):
start = ['git', command]
args_list = list(args)
if git_repo.shallow:
st = _execute(ctx, git_repo, start + [git_repo.shallow] + args_list)
if st.return_code == 0:
return
st = _execute(ctx, git_repo, start + args_list)
if st.return_code != 0:
_error(ctx.name, start + args_list, st.stderr)
def _execute(ctx, git_repo, args):
return ctx.execute(args, environment=ctx.os.environ, working_directory=str(git_repo.directory))
def _error(name, command, stderr):
command_text = ' '.join([str(item).strip() for item in command])
fail("error running '%s' while working with @%s:\n%s" % (command_text, name, stderr)) |
class Node_Types:
image_texture = 'TEX_IMAGE'
pbr_node = 'BSDF_PRINCIPLED'
mapping = 'MAPPING'
normal_map = 'NORMAL_MAP'
bump_map = 'BUMP'
material_output = 'OUTPUT_MATERIAL'
class Shader_Node_Types:
emission = "ShaderNodeEmission"
image_texture = "ShaderNodeTexImage"
mapping = "ShaderNodeMapping"
normal = "ShaderNodeNormalMap"
ao = "ShaderNodeAmbientOcclusion"
uv = "ShaderNodeUVMap"
comp_image_node = 'CompositorNodeImage'
mix ="ShaderNodeMixRGB"
class Bake_Passes:
pbr = ["EMISSION"]
lightmap = ["NOISY", "NRM", "COLOR"]
ao = ["AO","COLOR"]
class Material_Suffix:
bake_type_mat_suffix = {
"pbr" : "_Bake",
"ao" : "_AO",
"lightmap" : "_AO"
}
| class Node_Types:
image_texture = 'TEX_IMAGE'
pbr_node = 'BSDF_PRINCIPLED'
mapping = 'MAPPING'
normal_map = 'NORMAL_MAP'
bump_map = 'BUMP'
material_output = 'OUTPUT_MATERIAL'
class Shader_Node_Types:
emission = 'ShaderNodeEmission'
image_texture = 'ShaderNodeTexImage'
mapping = 'ShaderNodeMapping'
normal = 'ShaderNodeNormalMap'
ao = 'ShaderNodeAmbientOcclusion'
uv = 'ShaderNodeUVMap'
comp_image_node = 'CompositorNodeImage'
mix = 'ShaderNodeMixRGB'
class Bake_Passes:
pbr = ['EMISSION']
lightmap = ['NOISY', 'NRM', 'COLOR']
ao = ['AO', 'COLOR']
class Material_Suffix:
bake_type_mat_suffix = {'pbr': '_Bake', 'ao': '_AO', 'lightmap': '_AO'} |
# https://www.codewars.com/kata/52b757663a95b11b3d00062d/
'''
Instructions :
Write a function toWeirdCase (weirdcase in Ruby) that accepts a string, and returns the same string with all even indexed characters in each word upper cased, and all odd indexed characters in each word lower cased. The indexing just explained is zero based, so the zero-ith index is even, therefore that character should be upper cased.
The passed in string will only consist of alphabetical characters and spaces(' '). Spaces will only be present if there are multiple words. Words will be separated by a single space(' ').
Examples:
to_weird_case('String'); # => returns 'StRiNg'
to_weird_case('Weird string case') # => returns 'WeIrD StRiNg CaSe'
'''
def to_weird_case(string):
index = 0
new_string = ''
for word in string:
if word == ' ':
index = -1
new_string += word
elif index % 2 == 0:
new_string += word.upper()
else:
new_string += word.lower()
index += 1
return new_string
| """
Instructions :
Write a function toWeirdCase (weirdcase in Ruby) that accepts a string, and returns the same string with all even indexed characters in each word upper cased, and all odd indexed characters in each word lower cased. The indexing just explained is zero based, so the zero-ith index is even, therefore that character should be upper cased.
The passed in string will only consist of alphabetical characters and spaces(' '). Spaces will only be present if there are multiple words. Words will be separated by a single space(' ').
Examples:
to_weird_case('String'); # => returns 'StRiNg'
to_weird_case('Weird string case') # => returns 'WeIrD StRiNg CaSe'
"""
def to_weird_case(string):
index = 0
new_string = ''
for word in string:
if word == ' ':
index = -1
new_string += word
elif index % 2 == 0:
new_string += word.upper()
else:
new_string += word.lower()
index += 1
return new_string |
"""
https://edabit.com/challenge/ogjDWJAT2kTXEzkD5
https://www.programiz.com/python-programming/args-and-kwargs#:~:text=Python%20has%20*args%20which%20allow,to%20pass%20variable%20length%20arguments.
"""
| """
https://edabit.com/challenge/ogjDWJAT2kTXEzkD5
https://www.programiz.com/python-programming/args-and-kwargs#:~:text=Python%20has%20*args%20which%20allow,to%20pass%20variable%20length%20arguments.
""" |
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'targets': [
{
'target_name': 'device_hid',
'type': 'static_library',
'include_dirs': [
'../..',
],
'dependencies': [
'../../components/components.gyp:device_event_log_component',
'../../net/net.gyp:net',
'../core/core.gyp:device_core',
],
'sources': [
'hid_collection_info.cc',
'hid_collection_info.h',
'hid_connection.cc',
'hid_connection.h',
'hid_connection_linux.cc',
'hid_connection_linux.h',
'hid_connection_mac.cc',
'hid_connection_mac.h',
'hid_connection_win.cc',
'hid_connection_win.h',
'hid_device_filter.cc',
'hid_device_filter.h',
'hid_device_info.cc',
'hid_device_info.h',
'hid_device_info_linux.cc',
'hid_device_info_linux.h',
'hid_report_descriptor.cc',
'hid_report_descriptor.h',
'hid_report_descriptor_item.cc',
'hid_report_descriptor_item.h',
'hid_service.cc',
'hid_service.h',
'hid_service_mac.cc',
'hid_service_mac.h',
'hid_service_win.cc',
'hid_service_win.h',
'hid_usage_and_page.cc',
'hid_usage_and_page.h',
],
'conditions': [
['OS=="linux" and use_udev==1', {
'dependencies': [
'../udev_linux/udev.gyp:udev_linux',
],
'sources': [
'fake_input_service_linux.cc',
'fake_input_service_linux.h',
'hid_service_linux.cc',
'hid_service_linux.h',
'input_service_linux.cc',
'input_service_linux.h',
],
}],
['OS=="win"', {
'all_dependent_settings': {
'msvs_settings': {
'VCLinkerTool': {
'AdditionalDependencies': [
'hid.lib',
'setupapi.lib',
],
},
},
},
'msvs_settings': {
'VCLinkerTool': {
'AdditionalDependencies': [
'hid.lib',
'setupapi.lib',
],
},
},
}],
],
},
{
'target_name': 'device_hid_mocks',
'type': 'static_library',
'include_dirs': [
'../..',
],
'dependencies': [
'../../testing/gmock.gyp:gmock',
'device_hid',
],
'sources': [
'mock_hid_service.cc',
'mock_hid_service.h',
],
},
],
}
| {'variables': {'chromium_code': 1}, 'targets': [{'target_name': 'device_hid', 'type': 'static_library', 'include_dirs': ['../..'], 'dependencies': ['../../components/components.gyp:device_event_log_component', '../../net/net.gyp:net', '../core/core.gyp:device_core'], 'sources': ['hid_collection_info.cc', 'hid_collection_info.h', 'hid_connection.cc', 'hid_connection.h', 'hid_connection_linux.cc', 'hid_connection_linux.h', 'hid_connection_mac.cc', 'hid_connection_mac.h', 'hid_connection_win.cc', 'hid_connection_win.h', 'hid_device_filter.cc', 'hid_device_filter.h', 'hid_device_info.cc', 'hid_device_info.h', 'hid_device_info_linux.cc', 'hid_device_info_linux.h', 'hid_report_descriptor.cc', 'hid_report_descriptor.h', 'hid_report_descriptor_item.cc', 'hid_report_descriptor_item.h', 'hid_service.cc', 'hid_service.h', 'hid_service_mac.cc', 'hid_service_mac.h', 'hid_service_win.cc', 'hid_service_win.h', 'hid_usage_and_page.cc', 'hid_usage_and_page.h'], 'conditions': [['OS=="linux" and use_udev==1', {'dependencies': ['../udev_linux/udev.gyp:udev_linux'], 'sources': ['fake_input_service_linux.cc', 'fake_input_service_linux.h', 'hid_service_linux.cc', 'hid_service_linux.h', 'input_service_linux.cc', 'input_service_linux.h']}], ['OS=="win"', {'all_dependent_settings': {'msvs_settings': {'VCLinkerTool': {'AdditionalDependencies': ['hid.lib', 'setupapi.lib']}}}, 'msvs_settings': {'VCLinkerTool': {'AdditionalDependencies': ['hid.lib', 'setupapi.lib']}}}]]}, {'target_name': 'device_hid_mocks', 'type': 'static_library', 'include_dirs': ['../..'], 'dependencies': ['../../testing/gmock.gyp:gmock', 'device_hid'], 'sources': ['mock_hid_service.cc', 'mock_hid_service.h']}]} |
'''
Abstract base class for audio speech and sound command processing. Provides
methods shared among all platform implementations.
Copyright (c) 2008 Carolina Computer Assistive Technology
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
'''
class ChannelBase(object):
def __init__(self, ch_id):
# unique id for this channel
self.id = ch_id
# observer for channel callbacks
self.observer = None
# queue of utterances
self.queue = []
# deferred results
self.deferreds = {}
# latest deferred request id that stalled the queue
self.stalled_id = None
# busy flag; used instead of tts and sound busy methods which are
# not documented as to when they are set and reset
self.busy = False
# name assigned by the client to a speech utterance or sound that
# can be paired with callback data
self.name = None
def _processQueue(self):
while (not self.busy) and len(self.queue):
# peek at the top command to see if it is deferred
cmd = self.queue[0]
reqid = cmd.get('deferred')
if reqid is not None:
# check if the deferred result is already available
result = self.deferreds.get(reqid)
if result is None:
# store the current request ID
self.stalled_id = reqid
# and stall the queue for now
return
else:
# set the deferred result action to that of the original
result['action'] = cmd['action']
# remove the deferred from the list of deferreds
del self.deferreds[reqid]
# use the result instead of the original
cmd = result
# handle the next command
self._handleCommand(cmd)
# remember to pop the command
cmd = self.queue.pop(0)
def _handleCommand(self, cmd):
action = cmd.get('action')
if action == 'say':
self.say(cmd)
elif action == 'play':
self.play(cmd)
elif action == 'set-queued':
self.setProperty(cmd)
elif action == 'get-config':
self.getConfig(cmd)
elif action == 'reset-queued':
self.reset()
def setObserver(self, ob):
self.observer = ob
def pushRequest(self, cmd):
action = cmd.get('action')
if action == 'stop':
# process stops immediately
self.stop()
elif action == 'set-now':
# process immediate property changes
self.setProperty(cmd)
elif action == 'reset-now':
# process immediate reset of all properties
self.reset()
elif action == 'deferred-result':
# process incoming deferred result
self.deferred(cmd)
else:
# queue command; slight waste of time if we immediately pull it back
# out again, but it's clean
self.queue.append(cmd)
# process the queue
self._processQueue()
def deferred(self, cmd):
try:
reqid = cmd['deferred']
except KeyError:
return
# put the deferred into holding
self.deferreds[reqid] = cmd
# check if this deferred is the one that stalled the pipe
if reqid == self.stalled_id:
# if so, pump the queue
self._processQueue()
# if not, just continue
def stop(self):
# reset queue and flags
self.queue = []
self.busy = False
self.name = None
# reset deferreds
self.stalled_id = None
self.deferreds = {}
def shutdown(self):
self.observer = None
| """
Abstract base class for audio speech and sound command processing. Provides
methods shared among all platform implementations.
Copyright (c) 2008 Carolina Computer Assistive Technology
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
class Channelbase(object):
def __init__(self, ch_id):
self.id = ch_id
self.observer = None
self.queue = []
self.deferreds = {}
self.stalled_id = None
self.busy = False
self.name = None
def _process_queue(self):
while not self.busy and len(self.queue):
cmd = self.queue[0]
reqid = cmd.get('deferred')
if reqid is not None:
result = self.deferreds.get(reqid)
if result is None:
self.stalled_id = reqid
return
else:
result['action'] = cmd['action']
del self.deferreds[reqid]
cmd = result
self._handleCommand(cmd)
cmd = self.queue.pop(0)
def _handle_command(self, cmd):
action = cmd.get('action')
if action == 'say':
self.say(cmd)
elif action == 'play':
self.play(cmd)
elif action == 'set-queued':
self.setProperty(cmd)
elif action == 'get-config':
self.getConfig(cmd)
elif action == 'reset-queued':
self.reset()
def set_observer(self, ob):
self.observer = ob
def push_request(self, cmd):
action = cmd.get('action')
if action == 'stop':
self.stop()
elif action == 'set-now':
self.setProperty(cmd)
elif action == 'reset-now':
self.reset()
elif action == 'deferred-result':
self.deferred(cmd)
else:
self.queue.append(cmd)
self._processQueue()
def deferred(self, cmd):
try:
reqid = cmd['deferred']
except KeyError:
return
self.deferreds[reqid] = cmd
if reqid == self.stalled_id:
self._processQueue()
def stop(self):
self.queue = []
self.busy = False
self.name = None
self.stalled_id = None
self.deferreds = {}
def shutdown(self):
self.observer = None |
# -*- coding: utf-8 -*-
"""
Created on Mon Dec 4 13:07:33 2017
@author: James Jiang
"""
with open('Data.txt') as f:
all_lines = []
for line in f:
line = line.split()
all_lines.append(line)
total = 0
for i in range(len(all_lines)):
counter = 0
for j in range(len(all_lines[i])):
for k in range(len(all_lines[i])):
digits_j = [a for a in all_lines[i][j]]
digits_k = [a for a in all_lines[i][k]]
digits_j.sort()
digits_k.sort()
if digits_j != digits_k:
counter += 1
if counter == (len(all_lines[i])**2 - len(all_lines[i])):
total += 1
print(total)
| """
Created on Mon Dec 4 13:07:33 2017
@author: James Jiang
"""
with open('Data.txt') as f:
all_lines = []
for line in f:
line = line.split()
all_lines.append(line)
total = 0
for i in range(len(all_lines)):
counter = 0
for j in range(len(all_lines[i])):
for k in range(len(all_lines[i])):
digits_j = [a for a in all_lines[i][j]]
digits_k = [a for a in all_lines[i][k]]
digits_j.sort()
digits_k.sort()
if digits_j != digits_k:
counter += 1
if counter == len(all_lines[i]) ** 2 - len(all_lines[i]):
total += 1
print(total) |
text = input().split(" ")
even_words = [i for i in text if len(i) % 2 == 0]
for word in even_words:
print(word) | text = input().split(' ')
even_words = [i for i in text if len(i) % 2 == 0]
for word in even_words:
print(word) |
# Definition for singly-linked list.
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
def insertionSortList(self, head: ListNode) -> ListNode:
if head == None or head.next == None:
return head
cur = head
root = None
while cur!=None:
post = cur.next
cur.next = None
root = self.insertSort(root, cur)
cur = post
return root
def insertSort(self,root, node):
if root==None or node.val < root.val:
node.next = root
return node
cur = root
while cur.next != None and cur.next.val < node.val:
cur = cur.next
tmp = cur.next
cur.next = node
node.next = tmp
return root
| class Listnode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
def insertion_sort_list(self, head: ListNode) -> ListNode:
if head == None or head.next == None:
return head
cur = head
root = None
while cur != None:
post = cur.next
cur.next = None
root = self.insertSort(root, cur)
cur = post
return root
def insert_sort(self, root, node):
if root == None or node.val < root.val:
node.next = root
return node
cur = root
while cur.next != None and cur.next.val < node.val:
cur = cur.next
tmp = cur.next
cur.next = node
node.next = tmp
return root |
#
# PySNMP MIB module APCUPS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/APCUPS-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:07:20 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
enterprises, Unsigned32, TimeTicks, NotificationType, IpAddress, Counter32, Counter64, Gauge32, iso, ModuleIdentity, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, ObjectIdentity, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "enterprises", "Unsigned32", "TimeTicks", "NotificationType", "IpAddress", "Counter32", "Counter64", "Gauge32", "iso", "ModuleIdentity", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "ObjectIdentity", "Integer32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
class DmiCounter(Counter32):
pass
class DmiGauge(Gauge32):
pass
class DmiInteger(Integer32):
pass
class DmiDisplaystring(DisplayString):
pass
class DmiDateX(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(28, 28)
fixedLength = 28
class DmiComponentIndex(Integer32):
pass
apc = MibIdentifier((1, 3, 6, 1, 4, 1, 318))
products = MibIdentifier((1, 3, 6, 1, 4, 1, 318, 1))
software = MibIdentifier((1, 3, 6, 1, 4, 1, 318, 1, 2))
powerChuteDMIAgent = MibIdentifier((1, 3, 6, 1, 4, 1, 318, 1, 2, 2))
dmtfGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1))
tComponentid = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1), )
if mibBuilder.loadTexts: tComponentid.setStatus('mandatory')
eComponentid = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eComponentid.setStatus('mandatory')
a1Manufacturer = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1, 1), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1Manufacturer.setStatus('mandatory')
a1Product = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1, 2), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1Product.setStatus('mandatory')
a1Version = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1, 3), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1Version.setStatus('mandatory')
a1SerialNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1, 4), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1SerialNumber.setStatus('mandatory')
a1Installation = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1, 5), DmiDateX()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1Installation.setStatus('mandatory')
a1Verify = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("vAnErrorOccuredCheckStatusCode", 0), ("vThisComponentDoesNotExist", 1), ("vVerificationIsNotSupported", 2), ("vReserved", 3), ("vThisComponentExistsButTheFunctionalityI", 4), ("vThisComponentExistsButTheFunctionality1", 5), ("vThisComponentExistsAndIsNotFunctioningC", 6), ("vThisComponentExistsAndIsFunctioningCorr", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1Verify.setStatus('mandatory')
tUpsBattery = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2), )
if mibBuilder.loadTexts: tUpsBattery.setStatus('mandatory')
eUpsBattery = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eUpsBattery.setStatus('mandatory')
a2BatteryStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("vUnknown", 1), ("vBatteryNormal", 2), ("vBatteryLow", 3), ("vBatteryDepleted", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2BatteryStatus.setStatus('mandatory')
a2SecondsOnBattery = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 2), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2SecondsOnBattery.setStatus('mandatory')
a2EstimatedMinutesRemaining = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2EstimatedMinutesRemaining.setStatus('mandatory')
a2EstimatedChargeRemaining = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 4), DmiGauge()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2EstimatedChargeRemaining.setStatus('mandatory')
a2BatteryVoltage = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 5), DmiGauge()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2BatteryVoltage.setStatus('mandatory')
a2BatteryCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 6), DmiGauge()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2BatteryCurrent.setStatus('mandatory')
a2TemperatureProbeIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2TemperatureProbeIndex.setStatus('mandatory')
a2FruGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2FruGroupIndex.setStatus('mandatory')
a2OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 9), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2OperationalGroupIndex.setStatus('mandatory')
tTemperatureProbe = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3), )
if mibBuilder.loadTexts: tTemperatureProbe.setStatus('mandatory')
eTemperatureProbe = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eTemperatureProbe.setStatus('mandatory')
a3TemperatureProbeTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3TemperatureProbeTableIndex.setStatus('mandatory')
a3TemperatureProbeLocation = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vProcessor", 3), ("vDisk", 4), ("vPeripheralBay", 5), ("vSmbMaster", 6), ("vMotherboard", 7), ("vMemoryModule", 8), ("vProcessorModule", 9), ("vPowerUnit", 10), ("vAdd-inCard", 11)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3TemperatureProbeLocation.setStatus('mandatory')
a3TemperatureProbeDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 3), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3TemperatureProbeDescription.setStatus('mandatory')
a3TemperatureStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vOk", 3), ("vNon-critical", 4), ("vCritical", 5), ("vNon-recoverable", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3TemperatureStatus.setStatus('mandatory')
a3TemperatureProbeTemperatureReading = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 5), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3TemperatureProbeTemperatureReading.setStatus('mandatory')
a3MonitoredTemperatureNominalReading = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3MonitoredTemperatureNominalReading.setStatus('mandatory')
a3MonitoredTemperatureNormalMaximum = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3MonitoredTemperatureNormalMaximum.setStatus('mandatory')
a3MonitoredTemperatureNormalMinimum = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3MonitoredTemperatureNormalMinimum.setStatus('mandatory')
a3TemperatureProbeMaximum = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 9), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3TemperatureProbeMaximum.setStatus('mandatory')
a3TemperatureProbeMinimum = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 10), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3TemperatureProbeMinimum.setStatus('mandatory')
a3TemperatureReadingLowerThreshold_Non_c = MibScalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 11), DmiInteger()).setLabel("a3TemperatureReadingLowerThreshold-Non-c").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3TemperatureReadingLowerThreshold_Non_c.setStatus('mandatory')
a3TemperatureReadingUpperThreshold_Non_c = MibScalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 12), DmiInteger()).setLabel("a3TemperatureReadingUpperThreshold-Non-c").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3TemperatureReadingUpperThreshold_Non_c.setStatus('mandatory')
a3TemperatureReadingLowerThreshold_Criti = MibScalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 13), DmiInteger()).setLabel("a3TemperatureReadingLowerThreshold-Criti").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3TemperatureReadingLowerThreshold_Criti.setStatus('mandatory')
a3TemperatureReadingUpperThreshold_Criti = MibScalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 14), DmiInteger()).setLabel("a3TemperatureReadingUpperThreshold-Criti").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3TemperatureReadingUpperThreshold_Criti.setStatus('mandatory')
a3TemperatureReadingLowerThreshold_Non_r = MibScalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 15), DmiInteger()).setLabel("a3TemperatureReadingLowerThreshold-Non-r").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3TemperatureReadingLowerThreshold_Non_r.setStatus('mandatory')
a3TemperatureReadingUpperThreshold_Non_r = MibScalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 16), DmiInteger()).setLabel("a3TemperatureReadingUpperThreshold-Non-r").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3TemperatureReadingUpperThreshold_Non_r.setStatus('mandatory')
a3TemperatureProbeResolution = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 17), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3TemperatureProbeResolution.setStatus('mandatory')
a3TemperatureProbeTolerance = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 18), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3TemperatureProbeTolerance.setStatus('mandatory')
a3TemperatureProbeAccuracy = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 19), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3TemperatureProbeAccuracy.setStatus('mandatory')
a3FruGroupIndex = MibScalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 20), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3FruGroupIndex.setStatus('mandatory')
a3OperationalGroupIndex = MibScalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 21), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3OperationalGroupIndex.setStatus('mandatory')
tOperationalStateTable = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4), )
if mibBuilder.loadTexts: tOperationalStateTable.setStatus('mandatory')
eOperationalStateTable = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"), (0, "APCUPS-MIB", "a4OperationalStateInstanceIndex"))
if mibBuilder.loadTexts: eOperationalStateTable.setStatus('mandatory')
a4OperationalStateInstanceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4OperationalStateInstanceIndex.setStatus('mandatory')
a4DeviceGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4DeviceGroupIndex.setStatus('mandatory')
a4OperationalStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vEnabled", 3), ("vDisabled", 4), ("vNotApplicable", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4OperationalStatus.setStatus('mandatory')
a4UsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vIdle", 3), ("vActive", 4), ("vBusy", 5), ("vNotApplicable", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4UsageState.setStatus('mandatory')
a4AvailabilityStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vRunning", 3), ("vWarning", 4), ("vInTest", 5), ("vNotApplicable", 6), ("vPowerOff", 7), ("vOffLine", 8), ("vOffDuty", 9), ("vDegraded", 10), ("vNotInstalled", 11), ("vInstallError", 12), ("vPowerSave", 13)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4AvailabilityStatus.setStatus('mandatory')
a4AdministrativeState = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vLocked", 3), ("vUnlocked", 4), ("vNotApplicable", 5), ("vShuttingDown", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4AdministrativeState.setStatus('mandatory')
a4FatalErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 7), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4FatalErrorCount.setStatus('mandatory')
a4MajorErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 8), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4MajorErrorCount.setStatus('mandatory')
a4WarningErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 9), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4WarningErrorCount.setStatus('mandatory')
a4CurrentErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vOk", 3), ("vNon-critical", 4), ("vCritical", 5), ("vNon-recoverable", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4CurrentErrorStatus.setStatus('mandatory')
tDiagnostics = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5), )
if mibBuilder.loadTexts: tDiagnostics.setStatus('mandatory')
eDiagnostics = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"), (0, "APCUPS-MIB", "a5DiagnosticFunctionTableIndex"))
if mibBuilder.loadTexts: eDiagnostics.setStatus('mandatory')
a5DiagnosticFunctionTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5DiagnosticFunctionTableIndex.setStatus('mandatory')
a5DiagnosticFunctionName = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1, 2), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5DiagnosticFunctionName.setStatus('mandatory')
a5DiagnosticFunctionDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1, 3), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5DiagnosticFunctionDescription.setStatus('mandatory')
a5ExclusiveAccessRequired = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5ExclusiveAccessRequired.setStatus('mandatory')
a5PrerequisiteConditions = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vNoPrerequisites", 3), ("vWrapPlugInstalled", 4), ("vNoMediaInstalled", 5), ("vScratchMediaInstalled", 6), ("vTestMediaInstalled", 7), ("vSystemReferenceDisketteInstalled", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5PrerequisiteConditions.setStatus('mandatory')
a5PrerequisiteDiagnosticFunction = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5PrerequisiteDiagnosticFunction.setStatus('mandatory')
tDiagnosticRequestGroup = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 6), )
if mibBuilder.loadTexts: tDiagnosticRequestGroup.setStatus('mandatory')
eDiagnosticRequestGroup = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 6, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eDiagnosticRequestGroup.setStatus('mandatory')
a6DiagnosticFunctionReserveKey = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 6, 1, 1), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a6DiagnosticFunctionReserveKey.setStatus('mandatory')
a6DiagnosticFunctionRequest = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 6, 1, 2), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a6DiagnosticFunctionRequest.setStatus('mandatory')
a6DiagnosticFunctionResult = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 6, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6DiagnosticFunctionResult.setStatus('mandatory')
tDiagnosticResults = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 7), )
if mibBuilder.loadTexts: tDiagnosticResults.setStatus('mandatory')
eDiagnosticResults = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 7, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"), (0, "APCUPS-MIB", "a7DiagnosticFunctionId"), (0, "APCUPS-MIB", "a7DiagnosticFunctionResult"))
if mibBuilder.loadTexts: eDiagnosticResults.setStatus('mandatory')
a7DiagnosticFunctionId = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 7, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a7DiagnosticFunctionId.setStatus('mandatory')
a7DiagnosticFunctionResult = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 7, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a7DiagnosticFunctionResult.setStatus('mandatory')
a7DiagnosticFunctionResultDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 7, 1, 3), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a7DiagnosticFunctionResultDescription.setStatus('mandatory')
a7FaultIsolatedToThisComponent = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 7, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a7FaultIsolatedToThisComponent.setStatus('mandatory')
tErrorControlGroup = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8), )
if mibBuilder.loadTexts: tErrorControlGroup.setStatus('mandatory')
eErrorControlGroup = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eErrorControlGroup.setStatus('mandatory')
a8Selfid = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a8Selfid.setStatus('mandatory')
a8NumberOfFatalErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 2), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a8NumberOfFatalErrors.setStatus('mandatory')
a8NumberOfMajorErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 3), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a8NumberOfMajorErrors.setStatus('mandatory')
a8NumberOfWarnings = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 4), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a8NumberOfWarnings.setStatus('mandatory')
a8ErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("vOk", 0), ("vWarning", 1), ("vMajor", 2), ("vFatal", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a8ErrorStatus.setStatus('mandatory')
a8ErrorStatusType = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("vPost", 0), ("vRuntime", 1), ("vDiagnosticTest", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a8ErrorStatusType.setStatus('mandatory')
a8AlarmGeneration = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOff", 0), ("vOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a8AlarmGeneration.setStatus('mandatory')
tMiftomib = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 99), )
if mibBuilder.loadTexts: tMiftomib.setStatus('mandatory')
eMiftomib = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 99, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eMiftomib.setStatus('mandatory')
a99MibName = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 99, 1, 1), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a99MibName.setStatus('mandatory')
a99MibOid = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 99, 1, 2), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a99MibOid.setStatus('mandatory')
a99DisableTrap = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 99, 1, 3), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a99DisableTrap.setStatus('mandatory')
tTrapGroup = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999), )
if mibBuilder.loadTexts: tTrapGroup.setStatus('mandatory')
eTrapGroup = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eTrapGroup.setStatus('mandatory')
a9999ErrorTime = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ErrorTime.setStatus('mandatory')
a9999ErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ErrorStatus.setStatus('mandatory')
a9999ErrorGroupId = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ErrorGroupId.setStatus('mandatory')
a9999ErrorInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ErrorInstanceId.setStatus('mandatory')
a9999ComponentId = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 5), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ComponentId.setStatus('mandatory')
a9999GroupId = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999GroupId.setStatus('mandatory')
a9999InstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999InstanceId.setStatus('mandatory')
a9999VendorCode1 = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999VendorCode1.setStatus('mandatory')
a9999VendorCode2 = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 9), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999VendorCode2.setStatus('mandatory')
a9999VendorText = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 10), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999VendorText.setStatus('mandatory')
a9999ParentGroupId = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 11), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ParentGroupId.setStatus('mandatory')
a9999ParentInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 12), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ParentInstanceId.setStatus('mandatory')
pwrchuteEventError = NotificationType((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1) + (0,1)).setObjects(("APCUPS-MIB", "a9999ErrorTime"), ("APCUPS-MIB", "a9999ErrorStatus"), ("APCUPS-MIB", "a9999ErrorGroupId"), ("APCUPS-MIB", "a9999ErrorInstanceId"), ("APCUPS-MIB", "a9999ComponentId"), ("APCUPS-MIB", "a9999GroupId"), ("APCUPS-MIB", "a9999InstanceId"), ("APCUPS-MIB", "a9999VendorCode1"), ("APCUPS-MIB", "a9999VendorCode2"), ("APCUPS-MIB", "a9999VendorText"), ("APCUPS-MIB", "a9999ParentGroupId"), ("APCUPS-MIB", "a9999ParentInstanceId"))
mibBuilder.exportSymbols("APCUPS-MIB", a9999GroupId=a9999GroupId, a4AvailabilityStatus=a4AvailabilityStatus, dmtfGroups=dmtfGroups, a8ErrorStatusType=a8ErrorStatusType, a2EstimatedMinutesRemaining=a2EstimatedMinutesRemaining, a9999ErrorInstanceId=a9999ErrorInstanceId, a6DiagnosticFunctionReserveKey=a6DiagnosticFunctionReserveKey, a1Installation=a1Installation, a3TemperatureProbeTemperatureReading=a3TemperatureProbeTemperatureReading, a3TemperatureReadingUpperThreshold_Criti=a3TemperatureReadingUpperThreshold_Criti, a3TemperatureProbeMaximum=a3TemperatureProbeMaximum, a9999ErrorStatus=a9999ErrorStatus, tDiagnosticResults=tDiagnosticResults, a9999ErrorTime=a9999ErrorTime, a5DiagnosticFunctionDescription=a5DiagnosticFunctionDescription, a5PrerequisiteConditions=a5PrerequisiteConditions, DmiCounter=DmiCounter, a3MonitoredTemperatureNormalMinimum=a3MonitoredTemperatureNormalMinimum, tOperationalStateTable=tOperationalStateTable, a9999ParentInstanceId=a9999ParentInstanceId, a8ErrorStatus=a8ErrorStatus, powerChuteDMIAgent=powerChuteDMIAgent, a2FruGroupIndex=a2FruGroupIndex, tUpsBattery=tUpsBattery, a5ExclusiveAccessRequired=a5ExclusiveAccessRequired, a5DiagnosticFunctionTableIndex=a5DiagnosticFunctionTableIndex, a99MibOid=a99MibOid, a6DiagnosticFunctionResult=a6DiagnosticFunctionResult, a3FruGroupIndex=a3FruGroupIndex, tDiagnostics=tDiagnostics, a9999InstanceId=a9999InstanceId, a4OperationalStateInstanceIndex=a4OperationalStateInstanceIndex, eErrorControlGroup=eErrorControlGroup, a6DiagnosticFunctionRequest=a6DiagnosticFunctionRequest, a9999ParentGroupId=a9999ParentGroupId, a2OperationalGroupIndex=a2OperationalGroupIndex, DmiDisplaystring=DmiDisplaystring, tComponentid=tComponentid, a4OperationalStatus=a4OperationalStatus, a3TemperatureReadingUpperThreshold_Non_c=a3TemperatureReadingUpperThreshold_Non_c, a3TemperatureProbeLocation=a3TemperatureProbeLocation, a3TemperatureProbeTolerance=a3TemperatureProbeTolerance, a4MajorErrorCount=a4MajorErrorCount, a3TemperatureProbeAccuracy=a3TemperatureProbeAccuracy, a1Version=a1Version, a3TemperatureProbeDescription=a3TemperatureProbeDescription, a4DeviceGroupIndex=a4DeviceGroupIndex, a9999VendorCode2=a9999VendorCode2, a3MonitoredTemperatureNormalMaximum=a3MonitoredTemperatureNormalMaximum, eOperationalStateTable=eOperationalStateTable, a3TemperatureReadingLowerThreshold_Non_c=a3TemperatureReadingLowerThreshold_Non_c, eDiagnostics=eDiagnostics, a99DisableTrap=a99DisableTrap, a2EstimatedChargeRemaining=a2EstimatedChargeRemaining, eTrapGroup=eTrapGroup, DmiInteger=DmiInteger, eTemperatureProbe=eTemperatureProbe, a3TemperatureProbeResolution=a3TemperatureProbeResolution, a2BatteryCurrent=a2BatteryCurrent, a1Manufacturer=a1Manufacturer, a3OperationalGroupIndex=a3OperationalGroupIndex, a1Verify=a1Verify, a9999ComponentId=a9999ComponentId, a8NumberOfFatalErrors=a8NumberOfFatalErrors, apc=apc, a3TemperatureProbeMinimum=a3TemperatureProbeMinimum, a3TemperatureStatus=a3TemperatureStatus, a7DiagnosticFunctionResult=a7DiagnosticFunctionResult, a1Product=a1Product, tTemperatureProbe=tTemperatureProbe, a7DiagnosticFunctionId=a7DiagnosticFunctionId, eMiftomib=eMiftomib, a4WarningErrorCount=a4WarningErrorCount, a8NumberOfMajorErrors=a8NumberOfMajorErrors, a3TemperatureReadingLowerThreshold_Criti=a3TemperatureReadingLowerThreshold_Criti, a2BatteryVoltage=a2BatteryVoltage, a9999ErrorGroupId=a9999ErrorGroupId, a5PrerequisiteDiagnosticFunction=a5PrerequisiteDiagnosticFunction, pwrchuteEventError=pwrchuteEventError, DmiComponentIndex=DmiComponentIndex, eDiagnosticResults=eDiagnosticResults, a9999VendorCode1=a9999VendorCode1, DmiGauge=DmiGauge, eDiagnosticRequestGroup=eDiagnosticRequestGroup, a8Selfid=a8Selfid, eComponentid=eComponentid, a4FatalErrorCount=a4FatalErrorCount, a8NumberOfWarnings=a8NumberOfWarnings, a3TemperatureProbeTableIndex=a3TemperatureProbeTableIndex, a2TemperatureProbeIndex=a2TemperatureProbeIndex, products=products, tErrorControlGroup=tErrorControlGroup, a4CurrentErrorStatus=a4CurrentErrorStatus, a4UsageState=a4UsageState, a2SecondsOnBattery=a2SecondsOnBattery, a3TemperatureReadingLowerThreshold_Non_r=a3TemperatureReadingLowerThreshold_Non_r, software=software, tMiftomib=tMiftomib, a3MonitoredTemperatureNominalReading=a3MonitoredTemperatureNominalReading, tTrapGroup=tTrapGroup, a2BatteryStatus=a2BatteryStatus, DmiDateX=DmiDateX, a99MibName=a99MibName, a1SerialNumber=a1SerialNumber, eUpsBattery=eUpsBattery, a8AlarmGeneration=a8AlarmGeneration, a4AdministrativeState=a4AdministrativeState, tDiagnosticRequestGroup=tDiagnosticRequestGroup, a3TemperatureReadingUpperThreshold_Non_r=a3TemperatureReadingUpperThreshold_Non_r, a7FaultIsolatedToThisComponent=a7FaultIsolatedToThisComponent, a7DiagnosticFunctionResultDescription=a7DiagnosticFunctionResultDescription, a5DiagnosticFunctionName=a5DiagnosticFunctionName, a9999VendorText=a9999VendorText)
| (object_identifier, octet_string, integer) = mibBuilder.importSymbols('ASN1', 'ObjectIdentifier', 'OctetString', 'Integer')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(constraints_union, single_value_constraint, value_range_constraint, value_size_constraint, constraints_intersection) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ConstraintsUnion', 'SingleValueConstraint', 'ValueRangeConstraint', 'ValueSizeConstraint', 'ConstraintsIntersection')
(notification_group, module_compliance) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ModuleCompliance')
(enterprises, unsigned32, time_ticks, notification_type, ip_address, counter32, counter64, gauge32, iso, module_identity, mib_identifier, mib_scalar, mib_table, mib_table_row, mib_table_column, bits, object_identity, integer32) = mibBuilder.importSymbols('SNMPv2-SMI', 'enterprises', 'Unsigned32', 'TimeTicks', 'NotificationType', 'IpAddress', 'Counter32', 'Counter64', 'Gauge32', 'iso', 'ModuleIdentity', 'MibIdentifier', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'Bits', 'ObjectIdentity', 'Integer32')
(textual_convention, display_string) = mibBuilder.importSymbols('SNMPv2-TC', 'TextualConvention', 'DisplayString')
class Dmicounter(Counter32):
pass
class Dmigauge(Gauge32):
pass
class Dmiinteger(Integer32):
pass
class Dmidisplaystring(DisplayString):
pass
class Dmidatex(OctetString):
subtype_spec = OctetString.subtypeSpec + value_size_constraint(28, 28)
fixed_length = 28
class Dmicomponentindex(Integer32):
pass
apc = mib_identifier((1, 3, 6, 1, 4, 1, 318))
products = mib_identifier((1, 3, 6, 1, 4, 1, 318, 1))
software = mib_identifier((1, 3, 6, 1, 4, 1, 318, 1, 2))
power_chute_dmi_agent = mib_identifier((1, 3, 6, 1, 4, 1, 318, 1, 2, 2))
dmtf_groups = mib_identifier((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1))
t_componentid = mib_table((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1))
if mibBuilder.loadTexts:
tComponentid.setStatus('mandatory')
e_componentid = mib_table_row((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1)).setIndexNames((0, 'APCUPS-MIB', 'DmiComponentIndex'))
if mibBuilder.loadTexts:
eComponentid.setStatus('mandatory')
a1_manufacturer = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1, 1), dmi_displaystring()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a1Manufacturer.setStatus('mandatory')
a1_product = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1, 2), dmi_displaystring()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a1Product.setStatus('mandatory')
a1_version = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1, 3), dmi_displaystring()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a1Version.setStatus('mandatory')
a1_serial_number = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1, 4), dmi_displaystring()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a1SerialNumber.setStatus('mandatory')
a1_installation = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1, 5), dmi_date_x()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a1Installation.setStatus('mandatory')
a1_verify = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1, 6), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=named_values(('vAnErrorOccuredCheckStatusCode', 0), ('vThisComponentDoesNotExist', 1), ('vVerificationIsNotSupported', 2), ('vReserved', 3), ('vThisComponentExistsButTheFunctionalityI', 4), ('vThisComponentExistsButTheFunctionality1', 5), ('vThisComponentExistsAndIsNotFunctioningC', 6), ('vThisComponentExistsAndIsFunctioningCorr', 7)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a1Verify.setStatus('mandatory')
t_ups_battery = mib_table((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2))
if mibBuilder.loadTexts:
tUpsBattery.setStatus('mandatory')
e_ups_battery = mib_table_row((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1)).setIndexNames((0, 'APCUPS-MIB', 'DmiComponentIndex'))
if mibBuilder.loadTexts:
eUpsBattery.setStatus('mandatory')
a2_battery_status = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 1), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('vUnknown', 1), ('vBatteryNormal', 2), ('vBatteryLow', 3), ('vBatteryDepleted', 4)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a2BatteryStatus.setStatus('mandatory')
a2_seconds_on_battery = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 2), dmi_counter()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a2SecondsOnBattery.setStatus('mandatory')
a2_estimated_minutes_remaining = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 3), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a2EstimatedMinutesRemaining.setStatus('mandatory')
a2_estimated_charge_remaining = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 4), dmi_gauge()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a2EstimatedChargeRemaining.setStatus('mandatory')
a2_battery_voltage = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 5), dmi_gauge()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a2BatteryVoltage.setStatus('mandatory')
a2_battery_current = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 6), dmi_gauge()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a2BatteryCurrent.setStatus('mandatory')
a2_temperature_probe_index = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 7), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a2TemperatureProbeIndex.setStatus('mandatory')
a2_fru_group_index = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 8), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a2FruGroupIndex.setStatus('mandatory')
a2_operational_group_index = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 9), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a2OperationalGroupIndex.setStatus('mandatory')
t_temperature_probe = mib_table((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3))
if mibBuilder.loadTexts:
tTemperatureProbe.setStatus('mandatory')
e_temperature_probe = mib_table_row((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1)).setIndexNames((0, 'APCUPS-MIB', 'DmiComponentIndex'))
if mibBuilder.loadTexts:
eTemperatureProbe.setStatus('mandatory')
a3_temperature_probe_table_index = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 1), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a3TemperatureProbeTableIndex.setStatus('mandatory')
a3_temperature_probe_location = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 2), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=named_values(('vOther', 1), ('vUnknown', 2), ('vProcessor', 3), ('vDisk', 4), ('vPeripheralBay', 5), ('vSmbMaster', 6), ('vMotherboard', 7), ('vMemoryModule', 8), ('vProcessorModule', 9), ('vPowerUnit', 10), ('vAdd-inCard', 11)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a3TemperatureProbeLocation.setStatus('mandatory')
a3_temperature_probe_description = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 3), dmi_displaystring()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a3TemperatureProbeDescription.setStatus('mandatory')
a3_temperature_status = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 4), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6))).clone(namedValues=named_values(('vOther', 1), ('vUnknown', 2), ('vOk', 3), ('vNon-critical', 4), ('vCritical', 5), ('vNon-recoverable', 6)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a3TemperatureStatus.setStatus('mandatory')
a3_temperature_probe_temperature_reading = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 5), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a3TemperatureProbeTemperatureReading.setStatus('mandatory')
a3_monitored_temperature_nominal_reading = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 6), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a3MonitoredTemperatureNominalReading.setStatus('mandatory')
a3_monitored_temperature_normal_maximum = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 7), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a3MonitoredTemperatureNormalMaximum.setStatus('mandatory')
a3_monitored_temperature_normal_minimum = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 8), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a3MonitoredTemperatureNormalMinimum.setStatus('mandatory')
a3_temperature_probe_maximum = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 9), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a3TemperatureProbeMaximum.setStatus('mandatory')
a3_temperature_probe_minimum = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 10), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a3TemperatureProbeMinimum.setStatus('mandatory')
a3_temperature_reading_lower_threshold__non_c = mib_scalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 11), dmi_integer()).setLabel('a3TemperatureReadingLowerThreshold-Non-c').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
a3TemperatureReadingLowerThreshold_Non_c.setStatus('mandatory')
a3_temperature_reading_upper_threshold__non_c = mib_scalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 12), dmi_integer()).setLabel('a3TemperatureReadingUpperThreshold-Non-c').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
a3TemperatureReadingUpperThreshold_Non_c.setStatus('mandatory')
a3_temperature_reading_lower_threshold__criti = mib_scalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 13), dmi_integer()).setLabel('a3TemperatureReadingLowerThreshold-Criti').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
a3TemperatureReadingLowerThreshold_Criti.setStatus('mandatory')
a3_temperature_reading_upper_threshold__criti = mib_scalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 14), dmi_integer()).setLabel('a3TemperatureReadingUpperThreshold-Criti').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
a3TemperatureReadingUpperThreshold_Criti.setStatus('mandatory')
a3_temperature_reading_lower_threshold__non_r = mib_scalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 15), dmi_integer()).setLabel('a3TemperatureReadingLowerThreshold-Non-r').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
a3TemperatureReadingLowerThreshold_Non_r.setStatus('mandatory')
a3_temperature_reading_upper_threshold__non_r = mib_scalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 16), dmi_integer()).setLabel('a3TemperatureReadingUpperThreshold-Non-r').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
a3TemperatureReadingUpperThreshold_Non_r.setStatus('mandatory')
a3_temperature_probe_resolution = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 17), dmi_integer()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
a3TemperatureProbeResolution.setStatus('mandatory')
a3_temperature_probe_tolerance = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 18), dmi_integer()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
a3TemperatureProbeTolerance.setStatus('mandatory')
a3_temperature_probe_accuracy = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 19), dmi_integer()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
a3TemperatureProbeAccuracy.setStatus('mandatory')
a3_fru_group_index = mib_scalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 20), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a3FruGroupIndex.setStatus('mandatory')
a3_operational_group_index = mib_scalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 21), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a3OperationalGroupIndex.setStatus('mandatory')
t_operational_state_table = mib_table((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4))
if mibBuilder.loadTexts:
tOperationalStateTable.setStatus('mandatory')
e_operational_state_table = mib_table_row((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1)).setIndexNames((0, 'APCUPS-MIB', 'DmiComponentIndex'), (0, 'APCUPS-MIB', 'a4OperationalStateInstanceIndex'))
if mibBuilder.loadTexts:
eOperationalStateTable.setStatus('mandatory')
a4_operational_state_instance_index = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 1), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a4OperationalStateInstanceIndex.setStatus('mandatory')
a4_device_group_index = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 2), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a4DeviceGroupIndex.setStatus('mandatory')
a4_operational_status = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5))).clone(namedValues=named_values(('vOther', 1), ('vUnknown', 2), ('vEnabled', 3), ('vDisabled', 4), ('vNotApplicable', 5)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a4OperationalStatus.setStatus('mandatory')
a4_usage_state = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 4), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6))).clone(namedValues=named_values(('vOther', 1), ('vUnknown', 2), ('vIdle', 3), ('vActive', 4), ('vBusy', 5), ('vNotApplicable', 6)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a4UsageState.setStatus('mandatory')
a4_availability_status = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 5), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))).clone(namedValues=named_values(('vOther', 1), ('vUnknown', 2), ('vRunning', 3), ('vWarning', 4), ('vInTest', 5), ('vNotApplicable', 6), ('vPowerOff', 7), ('vOffLine', 8), ('vOffDuty', 9), ('vDegraded', 10), ('vNotInstalled', 11), ('vInstallError', 12), ('vPowerSave', 13)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a4AvailabilityStatus.setStatus('mandatory')
a4_administrative_state = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 6), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6))).clone(namedValues=named_values(('vOther', 1), ('vUnknown', 2), ('vLocked', 3), ('vUnlocked', 4), ('vNotApplicable', 5), ('vShuttingDown', 6)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a4AdministrativeState.setStatus('mandatory')
a4_fatal_error_count = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 7), dmi_counter()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a4FatalErrorCount.setStatus('mandatory')
a4_major_error_count = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 8), dmi_counter()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a4MajorErrorCount.setStatus('mandatory')
a4_warning_error_count = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 9), dmi_counter()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a4WarningErrorCount.setStatus('mandatory')
a4_current_error_status = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 10), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6))).clone(namedValues=named_values(('vOther', 1), ('vUnknown', 2), ('vOk', 3), ('vNon-critical', 4), ('vCritical', 5), ('vNon-recoverable', 6)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a4CurrentErrorStatus.setStatus('mandatory')
t_diagnostics = mib_table((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5))
if mibBuilder.loadTexts:
tDiagnostics.setStatus('mandatory')
e_diagnostics = mib_table_row((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1)).setIndexNames((0, 'APCUPS-MIB', 'DmiComponentIndex'), (0, 'APCUPS-MIB', 'a5DiagnosticFunctionTableIndex'))
if mibBuilder.loadTexts:
eDiagnostics.setStatus('mandatory')
a5_diagnostic_function_table_index = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1, 1), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a5DiagnosticFunctionTableIndex.setStatus('mandatory')
a5_diagnostic_function_name = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1, 2), dmi_displaystring()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a5DiagnosticFunctionName.setStatus('mandatory')
a5_diagnostic_function_description = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1, 3), dmi_displaystring()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a5DiagnosticFunctionDescription.setStatus('mandatory')
a5_exclusive_access_required = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1, 4), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1))).clone(namedValues=named_values(('vFalse', 0), ('vTrue', 1)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a5ExclusiveAccessRequired.setStatus('mandatory')
a5_prerequisite_conditions = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1, 5), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=named_values(('vOther', 1), ('vUnknown', 2), ('vNoPrerequisites', 3), ('vWrapPlugInstalled', 4), ('vNoMediaInstalled', 5), ('vScratchMediaInstalled', 6), ('vTestMediaInstalled', 7), ('vSystemReferenceDisketteInstalled', 8)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a5PrerequisiteConditions.setStatus('mandatory')
a5_prerequisite_diagnostic_function = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1, 6), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a5PrerequisiteDiagnosticFunction.setStatus('mandatory')
t_diagnostic_request_group = mib_table((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 6))
if mibBuilder.loadTexts:
tDiagnosticRequestGroup.setStatus('mandatory')
e_diagnostic_request_group = mib_table_row((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 6, 1)).setIndexNames((0, 'APCUPS-MIB', 'DmiComponentIndex'))
if mibBuilder.loadTexts:
eDiagnosticRequestGroup.setStatus('mandatory')
a6_diagnostic_function_reserve_key = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 6, 1, 1), dmi_integer()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
a6DiagnosticFunctionReserveKey.setStatus('mandatory')
a6_diagnostic_function_request = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 6, 1, 2), dmi_integer()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
a6DiagnosticFunctionRequest.setStatus('mandatory')
a6_diagnostic_function_result = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 6, 1, 3), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a6DiagnosticFunctionResult.setStatus('mandatory')
t_diagnostic_results = mib_table((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 7))
if mibBuilder.loadTexts:
tDiagnosticResults.setStatus('mandatory')
e_diagnostic_results = mib_table_row((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 7, 1)).setIndexNames((0, 'APCUPS-MIB', 'DmiComponentIndex'), (0, 'APCUPS-MIB', 'a7DiagnosticFunctionId'), (0, 'APCUPS-MIB', 'a7DiagnosticFunctionResult'))
if mibBuilder.loadTexts:
eDiagnosticResults.setStatus('mandatory')
a7_diagnostic_function_id = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 7, 1, 1), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a7DiagnosticFunctionId.setStatus('mandatory')
a7_diagnostic_function_result = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 7, 1, 2), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a7DiagnosticFunctionResult.setStatus('mandatory')
a7_diagnostic_function_result_description = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 7, 1, 3), dmi_displaystring()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a7DiagnosticFunctionResultDescription.setStatus('mandatory')
a7_fault_isolated_to_this_component = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 7, 1, 4), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1))).clone(namedValues=named_values(('vFalse', 0), ('vTrue', 1)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a7FaultIsolatedToThisComponent.setStatus('mandatory')
t_error_control_group = mib_table((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8))
if mibBuilder.loadTexts:
tErrorControlGroup.setStatus('mandatory')
e_error_control_group = mib_table_row((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1)).setIndexNames((0, 'APCUPS-MIB', 'DmiComponentIndex'))
if mibBuilder.loadTexts:
eErrorControlGroup.setStatus('mandatory')
a8_selfid = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 1), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a8Selfid.setStatus('mandatory')
a8_number_of_fatal_errors = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 2), dmi_counter()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a8NumberOfFatalErrors.setStatus('mandatory')
a8_number_of_major_errors = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 3), dmi_counter()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a8NumberOfMajorErrors.setStatus('mandatory')
a8_number_of_warnings = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 4), dmi_counter()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a8NumberOfWarnings.setStatus('mandatory')
a8_error_status = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 5), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2, 3))).clone(namedValues=named_values(('vOk', 0), ('vWarning', 1), ('vMajor', 2), ('vFatal', 3)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a8ErrorStatus.setStatus('mandatory')
a8_error_status_type = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 6), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2))).clone(namedValues=named_values(('vPost', 0), ('vRuntime', 1), ('vDiagnosticTest', 2)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a8ErrorStatusType.setStatus('mandatory')
a8_alarm_generation = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 7), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1))).clone(namedValues=named_values(('vOff', 0), ('vOn', 1)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
a8AlarmGeneration.setStatus('mandatory')
t_miftomib = mib_table((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 99))
if mibBuilder.loadTexts:
tMiftomib.setStatus('mandatory')
e_miftomib = mib_table_row((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 99, 1)).setIndexNames((0, 'APCUPS-MIB', 'DmiComponentIndex'))
if mibBuilder.loadTexts:
eMiftomib.setStatus('mandatory')
a99_mib_name = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 99, 1, 1), dmi_displaystring()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a99MibName.setStatus('mandatory')
a99_mib_oid = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 99, 1, 2), dmi_displaystring()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a99MibOid.setStatus('mandatory')
a99_disable_trap = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 99, 1, 3), dmi_integer()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
a99DisableTrap.setStatus('mandatory')
t_trap_group = mib_table((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999))
if mibBuilder.loadTexts:
tTrapGroup.setStatus('mandatory')
e_trap_group = mib_table_row((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1)).setIndexNames((0, 'APCUPS-MIB', 'DmiComponentIndex'))
if mibBuilder.loadTexts:
eTrapGroup.setStatus('mandatory')
a9999_error_time = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 1), display_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a9999ErrorTime.setStatus('mandatory')
a9999_error_status = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 2), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a9999ErrorStatus.setStatus('mandatory')
a9999_error_group_id = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 3), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a9999ErrorGroupId.setStatus('mandatory')
a9999_error_instance_id = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 4), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a9999ErrorInstanceId.setStatus('mandatory')
a9999_component_id = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 5), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a9999ComponentId.setStatus('mandatory')
a9999_group_id = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 6), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a9999GroupId.setStatus('mandatory')
a9999_instance_id = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 7), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a9999InstanceId.setStatus('mandatory')
a9999_vendor_code1 = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 8), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a9999VendorCode1.setStatus('mandatory')
a9999_vendor_code2 = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 9), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a9999VendorCode2.setStatus('mandatory')
a9999_vendor_text = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 10), octet_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a9999VendorText.setStatus('mandatory')
a9999_parent_group_id = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 11), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a9999ParentGroupId.setStatus('mandatory')
a9999_parent_instance_id = mib_table_column((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 12), dmi_integer()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
a9999ParentInstanceId.setStatus('mandatory')
pwrchute_event_error = notification_type((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1) + (0, 1)).setObjects(('APCUPS-MIB', 'a9999ErrorTime'), ('APCUPS-MIB', 'a9999ErrorStatus'), ('APCUPS-MIB', 'a9999ErrorGroupId'), ('APCUPS-MIB', 'a9999ErrorInstanceId'), ('APCUPS-MIB', 'a9999ComponentId'), ('APCUPS-MIB', 'a9999GroupId'), ('APCUPS-MIB', 'a9999InstanceId'), ('APCUPS-MIB', 'a9999VendorCode1'), ('APCUPS-MIB', 'a9999VendorCode2'), ('APCUPS-MIB', 'a9999VendorText'), ('APCUPS-MIB', 'a9999ParentGroupId'), ('APCUPS-MIB', 'a9999ParentInstanceId'))
mibBuilder.exportSymbols('APCUPS-MIB', a9999GroupId=a9999GroupId, a4AvailabilityStatus=a4AvailabilityStatus, dmtfGroups=dmtfGroups, a8ErrorStatusType=a8ErrorStatusType, a2EstimatedMinutesRemaining=a2EstimatedMinutesRemaining, a9999ErrorInstanceId=a9999ErrorInstanceId, a6DiagnosticFunctionReserveKey=a6DiagnosticFunctionReserveKey, a1Installation=a1Installation, a3TemperatureProbeTemperatureReading=a3TemperatureProbeTemperatureReading, a3TemperatureReadingUpperThreshold_Criti=a3TemperatureReadingUpperThreshold_Criti, a3TemperatureProbeMaximum=a3TemperatureProbeMaximum, a9999ErrorStatus=a9999ErrorStatus, tDiagnosticResults=tDiagnosticResults, a9999ErrorTime=a9999ErrorTime, a5DiagnosticFunctionDescription=a5DiagnosticFunctionDescription, a5PrerequisiteConditions=a5PrerequisiteConditions, DmiCounter=DmiCounter, a3MonitoredTemperatureNormalMinimum=a3MonitoredTemperatureNormalMinimum, tOperationalStateTable=tOperationalStateTable, a9999ParentInstanceId=a9999ParentInstanceId, a8ErrorStatus=a8ErrorStatus, powerChuteDMIAgent=powerChuteDMIAgent, a2FruGroupIndex=a2FruGroupIndex, tUpsBattery=tUpsBattery, a5ExclusiveAccessRequired=a5ExclusiveAccessRequired, a5DiagnosticFunctionTableIndex=a5DiagnosticFunctionTableIndex, a99MibOid=a99MibOid, a6DiagnosticFunctionResult=a6DiagnosticFunctionResult, a3FruGroupIndex=a3FruGroupIndex, tDiagnostics=tDiagnostics, a9999InstanceId=a9999InstanceId, a4OperationalStateInstanceIndex=a4OperationalStateInstanceIndex, eErrorControlGroup=eErrorControlGroup, a6DiagnosticFunctionRequest=a6DiagnosticFunctionRequest, a9999ParentGroupId=a9999ParentGroupId, a2OperationalGroupIndex=a2OperationalGroupIndex, DmiDisplaystring=DmiDisplaystring, tComponentid=tComponentid, a4OperationalStatus=a4OperationalStatus, a3TemperatureReadingUpperThreshold_Non_c=a3TemperatureReadingUpperThreshold_Non_c, a3TemperatureProbeLocation=a3TemperatureProbeLocation, a3TemperatureProbeTolerance=a3TemperatureProbeTolerance, a4MajorErrorCount=a4MajorErrorCount, a3TemperatureProbeAccuracy=a3TemperatureProbeAccuracy, a1Version=a1Version, a3TemperatureProbeDescription=a3TemperatureProbeDescription, a4DeviceGroupIndex=a4DeviceGroupIndex, a9999VendorCode2=a9999VendorCode2, a3MonitoredTemperatureNormalMaximum=a3MonitoredTemperatureNormalMaximum, eOperationalStateTable=eOperationalStateTable, a3TemperatureReadingLowerThreshold_Non_c=a3TemperatureReadingLowerThreshold_Non_c, eDiagnostics=eDiagnostics, a99DisableTrap=a99DisableTrap, a2EstimatedChargeRemaining=a2EstimatedChargeRemaining, eTrapGroup=eTrapGroup, DmiInteger=DmiInteger, eTemperatureProbe=eTemperatureProbe, a3TemperatureProbeResolution=a3TemperatureProbeResolution, a2BatteryCurrent=a2BatteryCurrent, a1Manufacturer=a1Manufacturer, a3OperationalGroupIndex=a3OperationalGroupIndex, a1Verify=a1Verify, a9999ComponentId=a9999ComponentId, a8NumberOfFatalErrors=a8NumberOfFatalErrors, apc=apc, a3TemperatureProbeMinimum=a3TemperatureProbeMinimum, a3TemperatureStatus=a3TemperatureStatus, a7DiagnosticFunctionResult=a7DiagnosticFunctionResult, a1Product=a1Product, tTemperatureProbe=tTemperatureProbe, a7DiagnosticFunctionId=a7DiagnosticFunctionId, eMiftomib=eMiftomib, a4WarningErrorCount=a4WarningErrorCount, a8NumberOfMajorErrors=a8NumberOfMajorErrors, a3TemperatureReadingLowerThreshold_Criti=a3TemperatureReadingLowerThreshold_Criti, a2BatteryVoltage=a2BatteryVoltage, a9999ErrorGroupId=a9999ErrorGroupId, a5PrerequisiteDiagnosticFunction=a5PrerequisiteDiagnosticFunction, pwrchuteEventError=pwrchuteEventError, DmiComponentIndex=DmiComponentIndex, eDiagnosticResults=eDiagnosticResults, a9999VendorCode1=a9999VendorCode1, DmiGauge=DmiGauge, eDiagnosticRequestGroup=eDiagnosticRequestGroup, a8Selfid=a8Selfid, eComponentid=eComponentid, a4FatalErrorCount=a4FatalErrorCount, a8NumberOfWarnings=a8NumberOfWarnings, a3TemperatureProbeTableIndex=a3TemperatureProbeTableIndex, a2TemperatureProbeIndex=a2TemperatureProbeIndex, products=products, tErrorControlGroup=tErrorControlGroup, a4CurrentErrorStatus=a4CurrentErrorStatus, a4UsageState=a4UsageState, a2SecondsOnBattery=a2SecondsOnBattery, a3TemperatureReadingLowerThreshold_Non_r=a3TemperatureReadingLowerThreshold_Non_r, software=software, tMiftomib=tMiftomib, a3MonitoredTemperatureNominalReading=a3MonitoredTemperatureNominalReading, tTrapGroup=tTrapGroup, a2BatteryStatus=a2BatteryStatus, DmiDateX=DmiDateX, a99MibName=a99MibName, a1SerialNumber=a1SerialNumber, eUpsBattery=eUpsBattery, a8AlarmGeneration=a8AlarmGeneration, a4AdministrativeState=a4AdministrativeState, tDiagnosticRequestGroup=tDiagnosticRequestGroup, a3TemperatureReadingUpperThreshold_Non_r=a3TemperatureReadingUpperThreshold_Non_r, a7FaultIsolatedToThisComponent=a7FaultIsolatedToThisComponent, a7DiagnosticFunctionResultDescription=a7DiagnosticFunctionResultDescription, a5DiagnosticFunctionName=a5DiagnosticFunctionName, a9999VendorText=a9999VendorText) |
class frodo:
def __init__(self,x):
self.x = x
def __less__(self,other):
if self.x < other.x:
return False
else:
return True
a = frodo(10)
b = frodo(50)
print(a<b) | class Frodo:
def __init__(self, x):
self.x = x
def __less__(self, other):
if self.x < other.x:
return False
else:
return True
a = frodo(10)
b = frodo(50)
print(a < b) |
print("%(lang)s is fun!" % {"lang":"test"})
#Output
"""
Python is fun!
"""
| print('%(lang)s is fun!' % {'lang': 'test'})
'\nPython is fun!\n' |
# forcing a build
def recurring_fibonacci_number(number: int) -> int:
"""
Calculates the fibonacci number needed.
:param number: (int) the Fibonacci number to be calculated
:return: (Optional[int]) the calculated fibonacci number
"""
if number < 0:
raise ValueError("Fibonacci has to be equal or above zero")
elif number <= 1:
return number
else:
return recurring_fibonacci_number(number - 1) + \
recurring_fibonacci_number(number - 2)
| def recurring_fibonacci_number(number: int) -> int:
"""
Calculates the fibonacci number needed.
:param number: (int) the Fibonacci number to be calculated
:return: (Optional[int]) the calculated fibonacci number
"""
if number < 0:
raise value_error('Fibonacci has to be equal or above zero')
elif number <= 1:
return number
else:
return recurring_fibonacci_number(number - 1) + recurring_fibonacci_number(number - 2) |
better_eyesight = False
gold_mult = 1
legday_mult = 1
lifesteal_mult = 0
max_health_mult = 1
acid_blood_mult = 0
bleeding = 0
soul_collector = False
soul_eater = False
soul_blast = False
damage_mult = 1
knockback_mult = 1
resistance_mult = 1
enemy_health_mult = 1
def reset_multipliers():
global better_eyesight, gold_mult, legday_mult, lifesteal_mult, max_health_mult, acid_blood_mult, bleeding, soul_collector, soul_eater, soul_blast, damage_mult, knockback_mult, resistance_mult
better_eyesight = False
gold_mult = 1
legday_mult = 1
lifesteal_mult = 0
max_health_mult = 1
acid_blood_mult = 0
bleeding = 0
soul_collector = False
soul_eater = False
soul_blast = False
damage_mult = 1
knockback_mult = 1
resistance_mult = 1
coins = 0
| better_eyesight = False
gold_mult = 1
legday_mult = 1
lifesteal_mult = 0
max_health_mult = 1
acid_blood_mult = 0
bleeding = 0
soul_collector = False
soul_eater = False
soul_blast = False
damage_mult = 1
knockback_mult = 1
resistance_mult = 1
enemy_health_mult = 1
def reset_multipliers():
global better_eyesight, gold_mult, legday_mult, lifesteal_mult, max_health_mult, acid_blood_mult, bleeding, soul_collector, soul_eater, soul_blast, damage_mult, knockback_mult, resistance_mult
better_eyesight = False
gold_mult = 1
legday_mult = 1
lifesteal_mult = 0
max_health_mult = 1
acid_blood_mult = 0
bleeding = 0
soul_collector = False
soul_eater = False
soul_blast = False
damage_mult = 1
knockback_mult = 1
resistance_mult = 1
coins = 0 |
countdown_3_grid = [{(11, 16): 2, (4, 18): 2, (7, 16): 2, (11, 14): 2, (9, 18): 2, (7, 15): 2, (5, 18): 2, (10, 18): 2, (4, 13): 2, (11, 18): 2, (11, 13): 2, (7, 14): 2, (6, 18): 2, (4, 14): 2, (7, 18): 2, (4, 16): 2, (11, 17): 2, (20, 2): 3, (4, 15): 2, (4, 17): 2, (8, 18): 2, (7, 17): 2, (11, 15): 2}, (20, 2)]
countdown_2_grid = [{(11, 16): 2, (5, 13): 2, (7, 16): 2, (11, 14): 2, (6, 17): 2, (9, 15): 2, (5, 12): 2, (5, 18): 2, (10, 12): 2, (4, 13): 2, (11, 18): 2, (8, 16): 2, (8, 15): 2, (11, 13): 2, (6, 18): 2, (9, 14): 2, (5, 17): 2, (10, 13): 2, (4, 14): 2, (4, 16): 2, (9, 16): 2, (8, 17): 2, (11, 12): 2, (9, 13): 2, (11, 17): 2, (20, 2): 3, (4, 15): 2, (4, 17): 2, (10, 14): 2, (7, 17): 2, (11, 15): 2}, (20, 2)]
countdown_1_grid = [{(11, 16): 2, (5, 14): 2, (11, 17): 2, (7, 15): 2, (9, 15): 2, (20, 2): 3, (4, 15): 2, (10, 15): 2, (5, 15): 2, (11, 14): 2, (11, 15): 2, (6, 14): 2, (6, 15): 2, (8, 15): 2, (11, 13): 2}, (20, 2)]
init_grid = [{(14, 17): 2, (12, 1): 2, (16, 9): 2, (6, 28): 2, (18, 4): 2, (7, 25): 2, (13, 17): 2, (12, 31): 2, (4, 2): 2, (3, 7): 2, (6, 7): 2, (4, 19): 2, (6, 10): 2, (7, 19): 2, (14, 1): 2, (18, 9): 2, (15, 4): 2, (12, 28): 2, (1, 1): 2, (15, 18): 2, (14, 14): 2, (14, 24): 2, (4, 16): 2, (15, 29): 2, (12, 11): 2, (17, 13): 2, (7, 22): 2, (15, 1): 2, (12, 22): 2, (17, 18): 2, (4, 15): 2, (3, 1): 2, (16, 18): 2, (4, 26): 2, (6, 13): 2, (4, 21): 2, (15, 16): 2, (12, 8): 2, (1, 21): 2, (7, 21): 2, (12, 27): 2, (1, 26): 2, (17, 29): 2, (2, 1): 2, (1, 15): 2, (5, 1): 2, (15, 22): 2, (12, 2): 2, (7, 15): 2, (1, 16): 2, (7, 1): 2, (16, 29): 2, (17, 24): 2, (13, 29): 2, (18, 24): 2, (4, 1): 2, (4, 28): 2, (14, 21): 2, (16, 4): 2, (12, 7): 2, (1, 7): 2, (16, 23): 2, (1, 19): 2, (12, 29): 2, (18, 21): 2, (2, 7): 2, (16, 13): 2, (6, 1): 2, (5, 7): 2, (12, 4): 2, (15, 9): 2, (13, 9): 2, (12, 23): 2, (15, 21): 2, (1, 3): 2, (4, 8): 2, (5, 13): 2, (4, 27): 2, (15, 13): 2, (4, 22): 2, (14, 9): 2, (12, 9): 2, (1, 20): 2, (7, 20): 2, (2, 19): 2, (1, 25): 2, (3, 10): 2, (1, 14): 2, (4, 13): 2, (2, 13): 2, (12, 3): 2, (7, 14): 2, (1, 28): 2, (18, 2): 2, (7, 27): 2, (7, 28): 2, (1, 9): 2, (1, 22): 2, (14, 29): 2, (18, 1): 2, (7, 26): 2, (7, 13): 2, (17, 21): 2, (12, 30): 2, (2, 25): 2, (18, 18): 2, (7, 16): 2, (3, 4): 2, (2, 4): 2, (5, 9): 2, (4, 7): 2, (3, 25): 2, (7, 7): 2, (15, 3): 2, (16, 21): 2, (12, 16): 2, (13, 21): 2, (1, 2): 2, (4, 9): 2, (18, 29): 2, (7, 10): 2, (6, 22): 2, (5, 22): 2, (12, 10): 2, (15, 15): 2, (12, 21): 2, (18, 13): 2, (13, 24): 2, (3, 13): 2, (1, 13): 2, (4, 14): 2, (2, 10): 2, (4, 25): 2, (3, 19): 2, (5, 28): 2, (4, 20): 2, (17, 4): 2, (15, 23): 2, (13, 1): 2, (12, 15): 2, (18, 3): 2, (17, 9): 2, (15, 2): 2, (13, 14): 2, (1, 27): 2, (1, 8): 2, (4, 3): 2}]
| countdown_3_grid = [{(11, 16): 2, (4, 18): 2, (7, 16): 2, (11, 14): 2, (9, 18): 2, (7, 15): 2, (5, 18): 2, (10, 18): 2, (4, 13): 2, (11, 18): 2, (11, 13): 2, (7, 14): 2, (6, 18): 2, (4, 14): 2, (7, 18): 2, (4, 16): 2, (11, 17): 2, (20, 2): 3, (4, 15): 2, (4, 17): 2, (8, 18): 2, (7, 17): 2, (11, 15): 2}, (20, 2)]
countdown_2_grid = [{(11, 16): 2, (5, 13): 2, (7, 16): 2, (11, 14): 2, (6, 17): 2, (9, 15): 2, (5, 12): 2, (5, 18): 2, (10, 12): 2, (4, 13): 2, (11, 18): 2, (8, 16): 2, (8, 15): 2, (11, 13): 2, (6, 18): 2, (9, 14): 2, (5, 17): 2, (10, 13): 2, (4, 14): 2, (4, 16): 2, (9, 16): 2, (8, 17): 2, (11, 12): 2, (9, 13): 2, (11, 17): 2, (20, 2): 3, (4, 15): 2, (4, 17): 2, (10, 14): 2, (7, 17): 2, (11, 15): 2}, (20, 2)]
countdown_1_grid = [{(11, 16): 2, (5, 14): 2, (11, 17): 2, (7, 15): 2, (9, 15): 2, (20, 2): 3, (4, 15): 2, (10, 15): 2, (5, 15): 2, (11, 14): 2, (11, 15): 2, (6, 14): 2, (6, 15): 2, (8, 15): 2, (11, 13): 2}, (20, 2)]
init_grid = [{(14, 17): 2, (12, 1): 2, (16, 9): 2, (6, 28): 2, (18, 4): 2, (7, 25): 2, (13, 17): 2, (12, 31): 2, (4, 2): 2, (3, 7): 2, (6, 7): 2, (4, 19): 2, (6, 10): 2, (7, 19): 2, (14, 1): 2, (18, 9): 2, (15, 4): 2, (12, 28): 2, (1, 1): 2, (15, 18): 2, (14, 14): 2, (14, 24): 2, (4, 16): 2, (15, 29): 2, (12, 11): 2, (17, 13): 2, (7, 22): 2, (15, 1): 2, (12, 22): 2, (17, 18): 2, (4, 15): 2, (3, 1): 2, (16, 18): 2, (4, 26): 2, (6, 13): 2, (4, 21): 2, (15, 16): 2, (12, 8): 2, (1, 21): 2, (7, 21): 2, (12, 27): 2, (1, 26): 2, (17, 29): 2, (2, 1): 2, (1, 15): 2, (5, 1): 2, (15, 22): 2, (12, 2): 2, (7, 15): 2, (1, 16): 2, (7, 1): 2, (16, 29): 2, (17, 24): 2, (13, 29): 2, (18, 24): 2, (4, 1): 2, (4, 28): 2, (14, 21): 2, (16, 4): 2, (12, 7): 2, (1, 7): 2, (16, 23): 2, (1, 19): 2, (12, 29): 2, (18, 21): 2, (2, 7): 2, (16, 13): 2, (6, 1): 2, (5, 7): 2, (12, 4): 2, (15, 9): 2, (13, 9): 2, (12, 23): 2, (15, 21): 2, (1, 3): 2, (4, 8): 2, (5, 13): 2, (4, 27): 2, (15, 13): 2, (4, 22): 2, (14, 9): 2, (12, 9): 2, (1, 20): 2, (7, 20): 2, (2, 19): 2, (1, 25): 2, (3, 10): 2, (1, 14): 2, (4, 13): 2, (2, 13): 2, (12, 3): 2, (7, 14): 2, (1, 28): 2, (18, 2): 2, (7, 27): 2, (7, 28): 2, (1, 9): 2, (1, 22): 2, (14, 29): 2, (18, 1): 2, (7, 26): 2, (7, 13): 2, (17, 21): 2, (12, 30): 2, (2, 25): 2, (18, 18): 2, (7, 16): 2, (3, 4): 2, (2, 4): 2, (5, 9): 2, (4, 7): 2, (3, 25): 2, (7, 7): 2, (15, 3): 2, (16, 21): 2, (12, 16): 2, (13, 21): 2, (1, 2): 2, (4, 9): 2, (18, 29): 2, (7, 10): 2, (6, 22): 2, (5, 22): 2, (12, 10): 2, (15, 15): 2, (12, 21): 2, (18, 13): 2, (13, 24): 2, (3, 13): 2, (1, 13): 2, (4, 14): 2, (2, 10): 2, (4, 25): 2, (3, 19): 2, (5, 28): 2, (4, 20): 2, (17, 4): 2, (15, 23): 2, (13, 1): 2, (12, 15): 2, (18, 3): 2, (17, 9): 2, (15, 2): 2, (13, 14): 2, (1, 27): 2, (1, 8): 2, (4, 3): 2}] |
description = 'Kompass standard instrument'
group = 'basic'
includes = ['mono', 'guidefocus', 'selector', 'astrium', 'sample',
'reactor',
#'detector',
]
| description = 'Kompass standard instrument'
group = 'basic'
includes = ['mono', 'guidefocus', 'selector', 'astrium', 'sample', 'reactor'] |
class GraphQLEnabledModel:
"""
Subclass used by all the models that are dynamically registered
as a GraphQL object type.
"""
pass
class GraphQLField:
"""
Specify metadata about a model field that is to be registered at
a GraphQL object type.
:param name: Name of the field.
:param resolve_func: A custom resolve function that will be used to
resolve data for this field.
:type resolve_func: callable
:param graphql_type: Graphene type that will be used by that field.
"""
def __init__(self, name, resolve_func=None, graphql_type=None):
if not isinstance(name, str):
raise TypeError('Name has to be a string')
name = name.strip()
if not name:
raise ValueError('Field name cannot be empty')
self.__name = name
self.__resolve_func = resolve_func
self.__graphql_type = graphql_type
@property
def name(self):
return self.__name
@property
def resolve_func(self):
return self.__resolve_func
@property
def graphql_type(self):
return self.__graphql_type
| class Graphqlenabledmodel:
"""
Subclass used by all the models that are dynamically registered
as a GraphQL object type.
"""
pass
class Graphqlfield:
"""
Specify metadata about a model field that is to be registered at
a GraphQL object type.
:param name: Name of the field.
:param resolve_func: A custom resolve function that will be used to
resolve data for this field.
:type resolve_func: callable
:param graphql_type: Graphene type that will be used by that field.
"""
def __init__(self, name, resolve_func=None, graphql_type=None):
if not isinstance(name, str):
raise type_error('Name has to be a string')
name = name.strip()
if not name:
raise value_error('Field name cannot be empty')
self.__name = name
self.__resolve_func = resolve_func
self.__graphql_type = graphql_type
@property
def name(self):
return self.__name
@property
def resolve_func(self):
return self.__resolve_func
@property
def graphql_type(self):
return self.__graphql_type |
"""Websauna Depot models.
Place your SQLAlchemy models in this file.
"""
| """Websauna Depot models.
Place your SQLAlchemy models in this file.
""" |
# Use this to take notes on the Edpuzzle video. Try each example rather than just watching it - you will get much more out of it!
# Most things are commented out because they can't all coexist without a syntax error
user = {"name": "Kasey", "age": 15, "courses": ["History, CompSci"]}
for key, value in user.items():
print(key, value)
#print(user.items())
#print(user.values())
#print(user.keys())
#print(len(user))
#age=user.pop("age")
# del user['age']
'''user.update({"name": "Bob", "age": 25, "phone": "888-8888"})
user['phone'] = '888-8888'
user['name']='Bob'''''
#print(user.get("age",'not found'))
#print(user)
#print(age) | user = {'name': 'Kasey', 'age': 15, 'courses': ['History, CompSci']}
for (key, value) in user.items():
print(key, value)
'user.update({"name": "Bob", "age": 25, "phone": "888-8888"})\nuser[\'phone\'] = \'888-8888\'\nuser[\'name\']=\'Bob' |
class Solution:
def createTargetArray(self, nums: List[int], index: List[int]) -> List[int]:
target = []
for i, num in enumerate(nums):
idx = index[i]
target = target[:idx] + [num] + target[idx:]
return target
| class Solution:
def create_target_array(self, nums: List[int], index: List[int]) -> List[int]:
target = []
for (i, num) in enumerate(nums):
idx = index[i]
target = target[:idx] + [num] + target[idx:]
return target |
class MySQLClimateQuery:
@staticmethod
def drop_sport_climates():
return 'DROP TABLE IF EXISTS sport_climates'
@staticmethod
def create_sport_climates():
return ('CREATE TABLE sport_climates ('
'sport_id int NOT NULL,'
'climate_name varchar(50) NOT NULL,'
'PRIMARY KEY (sport_id, climate_name),'
'FOREIGN KEY (sport_id) REFERENCES sports(id) ON DELETE CASCADE,'
'FOREIGN KEY (climate_name) REFERENCES climates(name) ON DELETE CASCADE'
');')
@staticmethod
def drop_practice_center_climates():
return 'DROP TABLE IF EXISTS practice_center_climates'
@staticmethod
def create_practice_center_climates():
return ('CREATE TABLE practice_center_climates ('
'practice_center_id int NOT NULL,'
'climate_name varchar(50) NOT NULL,'
'PRIMARY KEY (practice_center_id, climate_name),'
'FOREIGN KEY (practice_center_id) REFERENCES practice_centers(id) '
'ON DELETE CASCADE,'
'FOREIGN KEY (climate_name) REFERENCES climates(name) ON DELETE CASCADE'
');')
@staticmethod
def drop_climates():
return 'DROP TABLE IF EXISTS climates'
@staticmethod
def create_climates():
return ('CREATE TABLE climates ('
'name varchar(50) NOT NULL PRIMARY KEY'
');')
| class Mysqlclimatequery:
@staticmethod
def drop_sport_climates():
return 'DROP TABLE IF EXISTS sport_climates'
@staticmethod
def create_sport_climates():
return 'CREATE TABLE sport_climates (sport_id int NOT NULL,climate_name varchar(50) NOT NULL,PRIMARY KEY (sport_id, climate_name),FOREIGN KEY (sport_id) REFERENCES sports(id) ON DELETE CASCADE,FOREIGN KEY (climate_name) REFERENCES climates(name) ON DELETE CASCADE);'
@staticmethod
def drop_practice_center_climates():
return 'DROP TABLE IF EXISTS practice_center_climates'
@staticmethod
def create_practice_center_climates():
return 'CREATE TABLE practice_center_climates (practice_center_id int NOT NULL,climate_name varchar(50) NOT NULL,PRIMARY KEY (practice_center_id, climate_name),FOREIGN KEY (practice_center_id) REFERENCES practice_centers(id) ON DELETE CASCADE,FOREIGN KEY (climate_name) REFERENCES climates(name) ON DELETE CASCADE);'
@staticmethod
def drop_climates():
return 'DROP TABLE IF EXISTS climates'
@staticmethod
def create_climates():
return 'CREATE TABLE climates (name varchar(50) NOT NULL PRIMARY KEY);' |
#!/usr/bin/env python3
# TODO nedd develop logic for handling strucutre with figure element
class FilterModule(object):
def filters(self):
return {
'json_select': self.json_select
}
def jmagik(self, jbody, jpth, jfil):
if jpth != "" and type(jpth) is not int:
jvar=jbody
for i in jpth:
jvar=jvar[i]
elif type(jpth) is int:
jvar=jbody[jpth]
else:
jvar=jbody
if type(jvar) is not list: # we must convert dict to list because without it, we geting ['','','','',''.........,''] but we need [{'','','',...,''}]
jvar = [jvar]
for nm in range(len(jvar)): # chek how levels exist if it's [{'.......'},....{'......'}]
for i in list((jvar[nm])): # convert jvar[nm] because it's dict(we need list) and iterate
jvar[nm].update(jfil)
return jvar
def json_select(self, jbody, jpth, jfil):
if(jpth != "" and type(jpth) is not int ):
if type(jbody is list):
jbody = self.jmagik(jbody, jpth, jfil)
else:
jbody[str(jpth)] = self.jmagik(jbody, jpth, jfil)
del jbody[str(jpth)]
elif(type(jpth) is int):
jbody[jpth] = self.jmagik(jbody, jpth, jfil)
else:
jbody = self.jmagik(jbody, jpth, jfil)
return jbody
| class Filtermodule(object):
def filters(self):
return {'json_select': self.json_select}
def jmagik(self, jbody, jpth, jfil):
if jpth != '' and type(jpth) is not int:
jvar = jbody
for i in jpth:
jvar = jvar[i]
elif type(jpth) is int:
jvar = jbody[jpth]
else:
jvar = jbody
if type(jvar) is not list:
jvar = [jvar]
for nm in range(len(jvar)):
for i in list(jvar[nm]):
jvar[nm].update(jfil)
return jvar
def json_select(self, jbody, jpth, jfil):
if jpth != '' and type(jpth) is not int:
if type(jbody is list):
jbody = self.jmagik(jbody, jpth, jfil)
else:
jbody[str(jpth)] = self.jmagik(jbody, jpth, jfil)
del jbody[str(jpth)]
elif type(jpth) is int:
jbody[jpth] = self.jmagik(jbody, jpth, jfil)
else:
jbody = self.jmagik(jbody, jpth, jfil)
return jbody |
__pycmd_map = {}
def register_pycmd(name, pycmd):
__pycmd_map[name] = pycmd
def get_pycmd(name):
if isinstance(name, str) and name in __pycmd_map:
return __pycmd_map[name]
elif callable(name):
return name
else:
return None
class PyCmdOption(object):
def __init__(self, globals, locals):
self.__globals = globals
self.__locals = locals
def globals(self):
return self.__globals
def locals(self):
return self.__locals
class PyCmd(object):
def __init__(self, body, name, inType=None, outType=None):
self.__body = body
self.__name = name
self.__inType = inType
self.__outType = outType
def __call__(self, *args, **kwds):
return self.__body(*args, **kwds)
def name(self):
return self.__name
def inType(self):
return self.__inType
def outType(self):
return self.__outType
def pycmd(*args, **kwds):
if args:
assert len(args) == 1
assert not kwds
assert callable(args[0])
cmd = args[0]
if not isinstance(cmd, PyCmd):
cmd = PyCmd(cmd, name=cmd.func_name)
register_pycmd(cmd.name(), cmd)
return cmd
if kwds:
assert not args
def register(func):
if 'name' not in kwds:
kwds['name'] = func.func_name
cmd = PyCmd(func, **kwds)
register_pycmd(cmd.name(), cmd)
return register
else:
raise Exception('Wrong params')
class IOType(object):
Python = 1
File = 2
No = 3
| __pycmd_map = {}
def register_pycmd(name, pycmd):
__pycmd_map[name] = pycmd
def get_pycmd(name):
if isinstance(name, str) and name in __pycmd_map:
return __pycmd_map[name]
elif callable(name):
return name
else:
return None
class Pycmdoption(object):
def __init__(self, globals, locals):
self.__globals = globals
self.__locals = locals
def globals(self):
return self.__globals
def locals(self):
return self.__locals
class Pycmd(object):
def __init__(self, body, name, inType=None, outType=None):
self.__body = body
self.__name = name
self.__inType = inType
self.__outType = outType
def __call__(self, *args, **kwds):
return self.__body(*args, **kwds)
def name(self):
return self.__name
def in_type(self):
return self.__inType
def out_type(self):
return self.__outType
def pycmd(*args, **kwds):
if args:
assert len(args) == 1
assert not kwds
assert callable(args[0])
cmd = args[0]
if not isinstance(cmd, PyCmd):
cmd = py_cmd(cmd, name=cmd.func_name)
register_pycmd(cmd.name(), cmd)
return cmd
if kwds:
assert not args
def register(func):
if 'name' not in kwds:
kwds['name'] = func.func_name
cmd = py_cmd(func, **kwds)
register_pycmd(cmd.name(), cmd)
return register
else:
raise exception('Wrong params')
class Iotype(object):
python = 1
file = 2
no = 3 |
"""
Given a set, remove all the even numbers from
it, and for each even number removed, add
"Removed [insert the even number you removed]".
Example: {1,54, 2, 5} becomes {"Removed 54", 1,
5, "Removed 2"}. It is possible to solve this
problem using either discard or remove.
"""
def odd_set_day(given_set):
add_remove = []
for elem in given_set:
if elem % 2 == 0:
add_remove.append(elem)
for remove in add_remove:
given_set.remove(remove)
given_set.add("Removed " + str(remove))
given_set = {1, 2, 4, 5}
odd_set_day(given_set)
print(given_set)
| """
Given a set, remove all the even numbers from
it, and for each even number removed, add
"Removed [insert the even number you removed]".
Example: {1,54, 2, 5} becomes {"Removed 54", 1,
5, "Removed 2"}. It is possible to solve this
problem using either discard or remove.
"""
def odd_set_day(given_set):
add_remove = []
for elem in given_set:
if elem % 2 == 0:
add_remove.append(elem)
for remove in add_remove:
given_set.remove(remove)
given_set.add('Removed ' + str(remove))
given_set = {1, 2, 4, 5}
odd_set_day(given_set)
print(given_set) |
def format_words(words):
if not words:
return ""
while "" in words:
words.remove("")
if not words:
return ""
elif len(words)==1:
return words[0]
return ", ".join(words[:-1])+" and "+words[-1] | def format_words(words):
if not words:
return ''
while '' in words:
words.remove('')
if not words:
return ''
elif len(words) == 1:
return words[0]
return ', '.join(words[:-1]) + ' and ' + words[-1] |
class Solution:
def removeDuplicates(self, nums):
i = 0
while i < len(nums):
if i == 0:
lastNum = nums[i]
else:
if lastNum != nums[i]:
lastNum = nums[i]
else:
nums.pop(i)
continue
i = i + 1
return len(nums)
if __name__ == "__main__":
test = Solution()
nums = [0,0,1,1,1,2,2,3,3,4]
ans = test.removeDuplicates(nums)
print(nums)
| class Solution:
def remove_duplicates(self, nums):
i = 0
while i < len(nums):
if i == 0:
last_num = nums[i]
elif lastNum != nums[i]:
last_num = nums[i]
else:
nums.pop(i)
continue
i = i + 1
return len(nums)
if __name__ == '__main__':
test = solution()
nums = [0, 0, 1, 1, 1, 2, 2, 3, 3, 4]
ans = test.removeDuplicates(nums)
print(nums) |
data = (
'You ', # 0x00
'Yang ', # 0x01
'Lu ', # 0x02
'Si ', # 0x03
'Jie ', # 0x04
'Ying ', # 0x05
'Du ', # 0x06
'Wang ', # 0x07
'Hui ', # 0x08
'Xie ', # 0x09
'Pan ', # 0x0a
'Shen ', # 0x0b
'Biao ', # 0x0c
'Chan ', # 0x0d
'Mo ', # 0x0e
'Liu ', # 0x0f
'Jian ', # 0x10
'Pu ', # 0x11
'Se ', # 0x12
'Cheng ', # 0x13
'Gu ', # 0x14
'Bin ', # 0x15
'Huo ', # 0x16
'Xian ', # 0x17
'Lu ', # 0x18
'Qin ', # 0x19
'Han ', # 0x1a
'Ying ', # 0x1b
'Yong ', # 0x1c
'Li ', # 0x1d
'Jing ', # 0x1e
'Xiao ', # 0x1f
'Ying ', # 0x20
'Sui ', # 0x21
'Wei ', # 0x22
'Xie ', # 0x23
'Huai ', # 0x24
'Hao ', # 0x25
'Zhu ', # 0x26
'Long ', # 0x27
'Lai ', # 0x28
'Dui ', # 0x29
'Fan ', # 0x2a
'Hu ', # 0x2b
'Lai ', # 0x2c
None, # 0x2d
None, # 0x2e
'Ying ', # 0x2f
'Mi ', # 0x30
'Ji ', # 0x31
'Lian ', # 0x32
'Jian ', # 0x33
'Ying ', # 0x34
'Fen ', # 0x35
'Lin ', # 0x36
'Yi ', # 0x37
'Jian ', # 0x38
'Yue ', # 0x39
'Chan ', # 0x3a
'Dai ', # 0x3b
'Rang ', # 0x3c
'Jian ', # 0x3d
'Lan ', # 0x3e
'Fan ', # 0x3f
'Shuang ', # 0x40
'Yuan ', # 0x41
'Zhuo ', # 0x42
'Feng ', # 0x43
'She ', # 0x44
'Lei ', # 0x45
'Lan ', # 0x46
'Cong ', # 0x47
'Qu ', # 0x48
'Yong ', # 0x49
'Qian ', # 0x4a
'Fa ', # 0x4b
'Guan ', # 0x4c
'Que ', # 0x4d
'Yan ', # 0x4e
'Hao ', # 0x4f
'Hyeng ', # 0x50
'Sa ', # 0x51
'Zan ', # 0x52
'Luan ', # 0x53
'Yan ', # 0x54
'Li ', # 0x55
'Mi ', # 0x56
'Shan ', # 0x57
'Tan ', # 0x58
'Dang ', # 0x59
'Jiao ', # 0x5a
'Chan ', # 0x5b
None, # 0x5c
'Hao ', # 0x5d
'Ba ', # 0x5e
'Zhu ', # 0x5f
'Lan ', # 0x60
'Lan ', # 0x61
'Nang ', # 0x62
'Wan ', # 0x63
'Luan ', # 0x64
'Xun ', # 0x65
'Xian ', # 0x66
'Yan ', # 0x67
'Gan ', # 0x68
'Yan ', # 0x69
'Yu ', # 0x6a
'Huo ', # 0x6b
'Si ', # 0x6c
'Mie ', # 0x6d
'Guang ', # 0x6e
'Deng ', # 0x6f
'Hui ', # 0x70
'Xiao ', # 0x71
'Xiao ', # 0x72
'Hu ', # 0x73
'Hong ', # 0x74
'Ling ', # 0x75
'Zao ', # 0x76
'Zhuan ', # 0x77
'Jiu ', # 0x78
'Zha ', # 0x79
'Xie ', # 0x7a
'Chi ', # 0x7b
'Zhuo ', # 0x7c
'Zai ', # 0x7d
'Zai ', # 0x7e
'Can ', # 0x7f
'Yang ', # 0x80
'Qi ', # 0x81
'Zhong ', # 0x82
'Fen ', # 0x83
'Niu ', # 0x84
'Jiong ', # 0x85
'Wen ', # 0x86
'Po ', # 0x87
'Yi ', # 0x88
'Lu ', # 0x89
'Chui ', # 0x8a
'Pi ', # 0x8b
'Kai ', # 0x8c
'Pan ', # 0x8d
'Yan ', # 0x8e
'Kai ', # 0x8f
'Pang ', # 0x90
'Mu ', # 0x91
'Chao ', # 0x92
'Liao ', # 0x93
'Gui ', # 0x94
'Kang ', # 0x95
'Tun ', # 0x96
'Guang ', # 0x97
'Xin ', # 0x98
'Zhi ', # 0x99
'Guang ', # 0x9a
'Guang ', # 0x9b
'Wei ', # 0x9c
'Qiang ', # 0x9d
None, # 0x9e
'Da ', # 0x9f
'Xia ', # 0xa0
'Zheng ', # 0xa1
'Zhu ', # 0xa2
'Ke ', # 0xa3
'Zhao ', # 0xa4
'Fu ', # 0xa5
'Ba ', # 0xa6
'Duo ', # 0xa7
'Duo ', # 0xa8
'Ling ', # 0xa9
'Zhuo ', # 0xaa
'Xuan ', # 0xab
'Ju ', # 0xac
'Tan ', # 0xad
'Pao ', # 0xae
'Jiong ', # 0xaf
'Pao ', # 0xb0
'Tai ', # 0xb1
'Tai ', # 0xb2
'Bing ', # 0xb3
'Yang ', # 0xb4
'Tong ', # 0xb5
'Han ', # 0xb6
'Zhu ', # 0xb7
'Zha ', # 0xb8
'Dian ', # 0xb9
'Wei ', # 0xba
'Shi ', # 0xbb
'Lian ', # 0xbc
'Chi ', # 0xbd
'Huang ', # 0xbe
None, # 0xbf
'Hu ', # 0xc0
'Shuo ', # 0xc1
'Lan ', # 0xc2
'Jing ', # 0xc3
'Jiao ', # 0xc4
'Xu ', # 0xc5
'Xing ', # 0xc6
'Quan ', # 0xc7
'Lie ', # 0xc8
'Huan ', # 0xc9
'Yang ', # 0xca
'Xiao ', # 0xcb
'Xiu ', # 0xcc
'Xian ', # 0xcd
'Yin ', # 0xce
'Wu ', # 0xcf
'Zhou ', # 0xd0
'Yao ', # 0xd1
'Shi ', # 0xd2
'Wei ', # 0xd3
'Tong ', # 0xd4
'Xue ', # 0xd5
'Zai ', # 0xd6
'Kai ', # 0xd7
'Hong ', # 0xd8
'Luo ', # 0xd9
'Xia ', # 0xda
'Zhu ', # 0xdb
'Xuan ', # 0xdc
'Zheng ', # 0xdd
'Po ', # 0xde
'Yan ', # 0xdf
'Hui ', # 0xe0
'Guang ', # 0xe1
'Zhe ', # 0xe2
'Hui ', # 0xe3
'Kao ', # 0xe4
None, # 0xe5
'Fan ', # 0xe6
'Shao ', # 0xe7
'Ye ', # 0xe8
'Hui ', # 0xe9
None, # 0xea
'Tang ', # 0xeb
'Jin ', # 0xec
'Re ', # 0xed
None, # 0xee
'Xi ', # 0xef
'Fu ', # 0xf0
'Jiong ', # 0xf1
'Che ', # 0xf2
'Pu ', # 0xf3
'Jing ', # 0xf4
'Zhuo ', # 0xf5
'Ting ', # 0xf6
'Wan ', # 0xf7
'Hai ', # 0xf8
'Peng ', # 0xf9
'Lang ', # 0xfa
'Shan ', # 0xfb
'Hu ', # 0xfc
'Feng ', # 0xfd
'Chi ', # 0xfe
'Rong ', # 0xff
)
| data = ('You ', 'Yang ', 'Lu ', 'Si ', 'Jie ', 'Ying ', 'Du ', 'Wang ', 'Hui ', 'Xie ', 'Pan ', 'Shen ', 'Biao ', 'Chan ', 'Mo ', 'Liu ', 'Jian ', 'Pu ', 'Se ', 'Cheng ', 'Gu ', 'Bin ', 'Huo ', 'Xian ', 'Lu ', 'Qin ', 'Han ', 'Ying ', 'Yong ', 'Li ', 'Jing ', 'Xiao ', 'Ying ', 'Sui ', 'Wei ', 'Xie ', 'Huai ', 'Hao ', 'Zhu ', 'Long ', 'Lai ', 'Dui ', 'Fan ', 'Hu ', 'Lai ', None, None, 'Ying ', 'Mi ', 'Ji ', 'Lian ', 'Jian ', 'Ying ', 'Fen ', 'Lin ', 'Yi ', 'Jian ', 'Yue ', 'Chan ', 'Dai ', 'Rang ', 'Jian ', 'Lan ', 'Fan ', 'Shuang ', 'Yuan ', 'Zhuo ', 'Feng ', 'She ', 'Lei ', 'Lan ', 'Cong ', 'Qu ', 'Yong ', 'Qian ', 'Fa ', 'Guan ', 'Que ', 'Yan ', 'Hao ', 'Hyeng ', 'Sa ', 'Zan ', 'Luan ', 'Yan ', 'Li ', 'Mi ', 'Shan ', 'Tan ', 'Dang ', 'Jiao ', 'Chan ', None, 'Hao ', 'Ba ', 'Zhu ', 'Lan ', 'Lan ', 'Nang ', 'Wan ', 'Luan ', 'Xun ', 'Xian ', 'Yan ', 'Gan ', 'Yan ', 'Yu ', 'Huo ', 'Si ', 'Mie ', 'Guang ', 'Deng ', 'Hui ', 'Xiao ', 'Xiao ', 'Hu ', 'Hong ', 'Ling ', 'Zao ', 'Zhuan ', 'Jiu ', 'Zha ', 'Xie ', 'Chi ', 'Zhuo ', 'Zai ', 'Zai ', 'Can ', 'Yang ', 'Qi ', 'Zhong ', 'Fen ', 'Niu ', 'Jiong ', 'Wen ', 'Po ', 'Yi ', 'Lu ', 'Chui ', 'Pi ', 'Kai ', 'Pan ', 'Yan ', 'Kai ', 'Pang ', 'Mu ', 'Chao ', 'Liao ', 'Gui ', 'Kang ', 'Tun ', 'Guang ', 'Xin ', 'Zhi ', 'Guang ', 'Guang ', 'Wei ', 'Qiang ', None, 'Da ', 'Xia ', 'Zheng ', 'Zhu ', 'Ke ', 'Zhao ', 'Fu ', 'Ba ', 'Duo ', 'Duo ', 'Ling ', 'Zhuo ', 'Xuan ', 'Ju ', 'Tan ', 'Pao ', 'Jiong ', 'Pao ', 'Tai ', 'Tai ', 'Bing ', 'Yang ', 'Tong ', 'Han ', 'Zhu ', 'Zha ', 'Dian ', 'Wei ', 'Shi ', 'Lian ', 'Chi ', 'Huang ', None, 'Hu ', 'Shuo ', 'Lan ', 'Jing ', 'Jiao ', 'Xu ', 'Xing ', 'Quan ', 'Lie ', 'Huan ', 'Yang ', 'Xiao ', 'Xiu ', 'Xian ', 'Yin ', 'Wu ', 'Zhou ', 'Yao ', 'Shi ', 'Wei ', 'Tong ', 'Xue ', 'Zai ', 'Kai ', 'Hong ', 'Luo ', 'Xia ', 'Zhu ', 'Xuan ', 'Zheng ', 'Po ', 'Yan ', 'Hui ', 'Guang ', 'Zhe ', 'Hui ', 'Kao ', None, 'Fan ', 'Shao ', 'Ye ', 'Hui ', None, 'Tang ', 'Jin ', 'Re ', None, 'Xi ', 'Fu ', 'Jiong ', 'Che ', 'Pu ', 'Jing ', 'Zhuo ', 'Ting ', 'Wan ', 'Hai ', 'Peng ', 'Lang ', 'Shan ', 'Hu ', 'Feng ', 'Chi ', 'Rong ') |
"""
Given a binary tree, return the zigzag level order traversal
of its nodes' values.
(ie, from left to right, then right to left
for the next level and alternate between).
For example:
Given binary tree [3,9,20,null,null,15,7],
3
/ \
9 20
/ \
15 7
return its zigzag level order traversal as:
[
[3],
[20,9],
[15,7]
]
"""
def zigzag_level(root):
res = []
if not root:
return res
level = [root]
flag = 1
while level:
current = []
new_level = []
for node in level:
current.append(node.val)
if node.left:
new_level.append(node.left)
if node.right:
new_level.append(node.right)
level = new_level
res.append(current[::flag])
flag *= -1
return res
| """
Given a binary tree, return the zigzag level order traversal
of its nodes' values.
(ie, from left to right, then right to left
for the next level and alternate between).
For example:
Given binary tree [3,9,20,null,null,15,7],
3
/ 9 20
/ 15 7
return its zigzag level order traversal as:
[
[3],
[20,9],
[15,7]
]
"""
def zigzag_level(root):
res = []
if not root:
return res
level = [root]
flag = 1
while level:
current = []
new_level = []
for node in level:
current.append(node.val)
if node.left:
new_level.append(node.left)
if node.right:
new_level.append(node.right)
level = new_level
res.append(current[::flag])
flag *= -1
return res |
# -*- coding: utf-8 -*-
def main():
n = int(input())
mod = 10 ** 9 + 7
ans = 0
for i in range(n):
ans += ((i + 1) ** 10 - i ** 10) * (n // (i + 1)) ** 10
ans %= mod
print(ans)
if __name__ == '__main__':
main()
| def main():
n = int(input())
mod = 10 ** 9 + 7
ans = 0
for i in range(n):
ans += ((i + 1) ** 10 - i ** 10) * (n // (i + 1)) ** 10
ans %= mod
print(ans)
if __name__ == '__main__':
main() |
load("//tools/bzl:maven_jar.bzl", "maven_jar")
GUAVA_VERSION = "30.1-jre"
GUAVA_BIN_SHA1 = "00d0c3ce2311c9e36e73228da25a6e99b2ab826f"
GUAVA_DOC_URL = "https://google.github.io/guava/releases/" + GUAVA_VERSION + "/api/docs/"
TESTCONTAINERS_VERSION = "1.15.3"
def declare_nongoogle_deps():
"""loads dependencies that are not used at Google.
Changes to versions are exempt from library compliance review. New
dependencies must pass through library compliance review. This is
enforced by //lib:nongoogle_test.
"""
maven_jar(
name = "j2objc",
artifact = "com.google.j2objc:j2objc-annotations:1.1",
sha1 = "ed28ded51a8b1c6b112568def5f4b455e6809019",
)
# Transitive dependency of commons-compress
maven_jar(
name = "tukaani-xz",
artifact = "org.tukaani:xz:1.8",
sha1 = "c4f7d054303948eb6a4066194253886c8af07128",
)
maven_jar(
name = "dropwizard-core",
artifact = "io.dropwizard.metrics:metrics-core:4.1.12.1",
sha1 = "cb2f351bf4463751201f43bb99865235d5ba07ca",
)
SSHD_VERS = "2.6.0"
maven_jar(
name = "sshd-osgi",
artifact = "org.apache.sshd:sshd-osgi:" + SSHD_VERS,
sha1 = "40e365bb799e1bff3d31dc858b1e59a93c123f29",
)
maven_jar(
name = "sshd-sftp",
artifact = "org.apache.sshd:sshd-sftp:" + SSHD_VERS,
sha1 = "6eddfe8fdf59a3d9a49151e4177f8c1bebeb30c9",
)
maven_jar(
name = "eddsa",
artifact = "net.i2p.crypto:eddsa:0.3.0",
sha1 = "1901c8d4d8bffb7d79027686cfb91e704217c3e1",
)
maven_jar(
name = "mina-core",
artifact = "org.apache.mina:mina-core:2.0.21",
sha1 = "e1a317689ecd438f54e863747e832f741ef8e092",
)
maven_jar(
name = "sshd-mina",
artifact = "org.apache.sshd:sshd-mina:" + SSHD_VERS,
sha1 = "d22138ba75dee95e2123f0e53a9c514b2a766da9",
)
# elasticsearch-rest-client explicitly depends on this version
maven_jar(
name = "httpasyncclient",
artifact = "org.apache.httpcomponents:httpasyncclient:4.1.4",
sha1 = "f3a3240681faae3fa46b573a4c7e50cec9db0d86",
)
# elasticsearch-rest-client explicitly depends on this version
maven_jar(
name = "httpcore-nio",
artifact = "org.apache.httpcomponents:httpcore-nio:4.4.12",
sha1 = "84cd29eca842f31db02987cfedea245af020198b",
)
maven_jar(
name = "openid-consumer",
artifact = "org.openid4java:openid4java:1.0.0",
sha1 = "541091bb49f2c0d583544c5bb1e6df7612d31e3e",
)
maven_jar(
name = "nekohtml",
artifact = "net.sourceforge.nekohtml:nekohtml:1.9.10",
sha1 = "14052461031a7054aa094f5573792feb6686d3de",
)
maven_jar(
name = "xerces",
artifact = "xerces:xercesImpl:2.8.1",
attach_source = False,
sha1 = "25101e37ec0c907db6f0612cbf106ee519c1aef1",
)
maven_jar(
name = "jruby",
artifact = "org.jruby:jruby-complete:9.1.17.0",
sha1 = "76716d529710fc03d1d429b43e3cedd4419f78d4",
)
maven_jar(
name = "jackson-core",
artifact = "com.fasterxml.jackson.core:jackson-core:2.12.0",
sha1 = "afe52c6947d9939170da7989612cef544115511a",
)
maven_jar(
name = "commons-io",
artifact = "commons-io:commons-io:2.4",
sha1 = "b1b6ea3b7e4aa4f492509a4952029cd8e48019ad",
)
# Google internal dependencies: these are developed at Google, so there is
# no concern about version skew.
FLOGGER_VERS = "0.5.1"
maven_jar(
name = "flogger",
artifact = "com.google.flogger:flogger:" + FLOGGER_VERS,
sha1 = "71d1e2cef9cc604800825583df56b8ef5c053f14",
)
maven_jar(
name = "flogger-log4j-backend",
artifact = "com.google.flogger:flogger-log4j-backend:" + FLOGGER_VERS,
sha1 = "5e2794b75c88223f263f1c1a9d7ea51e2dc45732",
)
maven_jar(
name = "flogger-system-backend",
artifact = "com.google.flogger:flogger-system-backend:" + FLOGGER_VERS,
sha1 = "b66d3bedb14da604828a8693bb24fd78e36b0e9e",
)
maven_jar(
name = "guava",
artifact = "com.google.guava:guava:" + GUAVA_VERSION,
sha1 = GUAVA_BIN_SHA1,
)
GUICE_VERS = "5.0.1"
maven_jar(
name = "guice-library",
artifact = "com.google.inject:guice:" + GUICE_VERS,
sha1 = "0dae7556b441cada2b4f0a2314eb68e1ff423429",
)
maven_jar(
name = "guice-assistedinject",
artifact = "com.google.inject.extensions:guice-assistedinject:" + GUICE_VERS,
sha1 = "62e02f2aceb7d90ba354584dacc018c1e94ff01c",
)
maven_jar(
name = "guice-servlet",
artifact = "com.google.inject.extensions:guice-servlet:" + GUICE_VERS,
sha1 = "f527009d51f172a2e6937bfb55fcb827e2e2386b",
)
# Keep this version of Soy synchronized with the version used in Gitiles.
maven_jar(
name = "soy",
artifact = "com.google.template:soy:2021-02-01",
sha1 = "8e833744832ba88059205a1e30e0898f925d8cb5",
)
# Test-only dependencies below.
maven_jar(
name = "cglib-3_2",
artifact = "cglib:cglib-nodep:3.2.6",
sha1 = "92bf48723d277d6efd1150b2f7e9e1e92cb56caf",
)
maven_jar(
name = "objenesis",
artifact = "org.objenesis:objenesis:1.3",
sha1 = "dc13ae4faca6df981fc7aeb5a522d9db446d5d50",
)
DOCKER_JAVA_VERS = "3.2.8"
maven_jar(
name = "docker-java-api",
artifact = "com.github.docker-java:docker-java-api:" + DOCKER_JAVA_VERS,
sha1 = "4ac22a72d546a9f3523cd4b5fabffa77c4a6ec7c",
)
maven_jar(
name = "docker-java-transport",
artifact = "com.github.docker-java:docker-java-transport:" + DOCKER_JAVA_VERS,
sha1 = "c3b5598c67d0a5e2e780bf48f520da26b9915eab",
)
# https://github.com/docker-java/docker-java/blob/3.2.8/pom.xml#L61
# <=> DOCKER_JAVA_VERS
maven_jar(
name = "jackson-annotations",
artifact = "com.fasterxml.jackson.core:jackson-annotations:2.10.3",
sha1 = "0f63b3b1da563767d04d2e4d3fc1ae0cdeffebe7",
)
maven_jar(
name = "testcontainers",
artifact = "org.testcontainers:testcontainers:" + TESTCONTAINERS_VERSION,
sha1 = "95c6cfde71c2209f0c29cb14e432471e0b111880",
)
maven_jar(
name = "duct-tape",
artifact = "org.rnorth.duct-tape:duct-tape:1.0.8",
sha1 = "92edc22a9ab2f3e17c9bf700aaee377d50e8b530",
)
maven_jar(
name = "visible-assertions",
artifact = "org.rnorth.visible-assertions:visible-assertions:2.1.2",
sha1 = "20d31a578030ec8e941888537267d3123c2ad1c1",
)
maven_jar(
name = "jna",
artifact = "net.java.dev.jna:jna:5.5.0",
sha1 = "0e0845217c4907822403912ad6828d8e0b256208",
)
maven_jar(
name = "jimfs",
artifact = "com.google.jimfs:jimfs:1.2",
sha1 = "48462eb319817c90c27d377341684b6b81372e08",
)
TRUTH_VERS = "1.1"
maven_jar(
name = "truth",
artifact = "com.google.truth:truth:" + TRUTH_VERS,
sha1 = "6a096a16646559c24397b03f797d0c9d75ee8720",
)
maven_jar(
name = "truth-java8-extension",
artifact = "com.google.truth.extensions:truth-java8-extension:" + TRUTH_VERS,
sha1 = "258db6eb8df61832c5c059ed2bc2e1c88683e92f",
)
maven_jar(
name = "truth-liteproto-extension",
artifact = "com.google.truth.extensions:truth-liteproto-extension:" + TRUTH_VERS,
sha1 = "bf65afa13aa03330e739bcaa5d795fe0f10fbf20",
)
maven_jar(
name = "truth-proto-extension",
artifact = "com.google.truth.extensions:truth-proto-extension:" + TRUTH_VERS,
sha1 = "64cba89cf87c1d84cb8c81d06f0b9c482f10b4dc",
)
| load('//tools/bzl:maven_jar.bzl', 'maven_jar')
guava_version = '30.1-jre'
guava_bin_sha1 = '00d0c3ce2311c9e36e73228da25a6e99b2ab826f'
guava_doc_url = 'https://google.github.io/guava/releases/' + GUAVA_VERSION + '/api/docs/'
testcontainers_version = '1.15.3'
def declare_nongoogle_deps():
"""loads dependencies that are not used at Google.
Changes to versions are exempt from library compliance review. New
dependencies must pass through library compliance review. This is
enforced by //lib:nongoogle_test.
"""
maven_jar(name='j2objc', artifact='com.google.j2objc:j2objc-annotations:1.1', sha1='ed28ded51a8b1c6b112568def5f4b455e6809019')
maven_jar(name='tukaani-xz', artifact='org.tukaani:xz:1.8', sha1='c4f7d054303948eb6a4066194253886c8af07128')
maven_jar(name='dropwizard-core', artifact='io.dropwizard.metrics:metrics-core:4.1.12.1', sha1='cb2f351bf4463751201f43bb99865235d5ba07ca')
sshd_vers = '2.6.0'
maven_jar(name='sshd-osgi', artifact='org.apache.sshd:sshd-osgi:' + SSHD_VERS, sha1='40e365bb799e1bff3d31dc858b1e59a93c123f29')
maven_jar(name='sshd-sftp', artifact='org.apache.sshd:sshd-sftp:' + SSHD_VERS, sha1='6eddfe8fdf59a3d9a49151e4177f8c1bebeb30c9')
maven_jar(name='eddsa', artifact='net.i2p.crypto:eddsa:0.3.0', sha1='1901c8d4d8bffb7d79027686cfb91e704217c3e1')
maven_jar(name='mina-core', artifact='org.apache.mina:mina-core:2.0.21', sha1='e1a317689ecd438f54e863747e832f741ef8e092')
maven_jar(name='sshd-mina', artifact='org.apache.sshd:sshd-mina:' + SSHD_VERS, sha1='d22138ba75dee95e2123f0e53a9c514b2a766da9')
maven_jar(name='httpasyncclient', artifact='org.apache.httpcomponents:httpasyncclient:4.1.4', sha1='f3a3240681faae3fa46b573a4c7e50cec9db0d86')
maven_jar(name='httpcore-nio', artifact='org.apache.httpcomponents:httpcore-nio:4.4.12', sha1='84cd29eca842f31db02987cfedea245af020198b')
maven_jar(name='openid-consumer', artifact='org.openid4java:openid4java:1.0.0', sha1='541091bb49f2c0d583544c5bb1e6df7612d31e3e')
maven_jar(name='nekohtml', artifact='net.sourceforge.nekohtml:nekohtml:1.9.10', sha1='14052461031a7054aa094f5573792feb6686d3de')
maven_jar(name='xerces', artifact='xerces:xercesImpl:2.8.1', attach_source=False, sha1='25101e37ec0c907db6f0612cbf106ee519c1aef1')
maven_jar(name='jruby', artifact='org.jruby:jruby-complete:9.1.17.0', sha1='76716d529710fc03d1d429b43e3cedd4419f78d4')
maven_jar(name='jackson-core', artifact='com.fasterxml.jackson.core:jackson-core:2.12.0', sha1='afe52c6947d9939170da7989612cef544115511a')
maven_jar(name='commons-io', artifact='commons-io:commons-io:2.4', sha1='b1b6ea3b7e4aa4f492509a4952029cd8e48019ad')
flogger_vers = '0.5.1'
maven_jar(name='flogger', artifact='com.google.flogger:flogger:' + FLOGGER_VERS, sha1='71d1e2cef9cc604800825583df56b8ef5c053f14')
maven_jar(name='flogger-log4j-backend', artifact='com.google.flogger:flogger-log4j-backend:' + FLOGGER_VERS, sha1='5e2794b75c88223f263f1c1a9d7ea51e2dc45732')
maven_jar(name='flogger-system-backend', artifact='com.google.flogger:flogger-system-backend:' + FLOGGER_VERS, sha1='b66d3bedb14da604828a8693bb24fd78e36b0e9e')
maven_jar(name='guava', artifact='com.google.guava:guava:' + GUAVA_VERSION, sha1=GUAVA_BIN_SHA1)
guice_vers = '5.0.1'
maven_jar(name='guice-library', artifact='com.google.inject:guice:' + GUICE_VERS, sha1='0dae7556b441cada2b4f0a2314eb68e1ff423429')
maven_jar(name='guice-assistedinject', artifact='com.google.inject.extensions:guice-assistedinject:' + GUICE_VERS, sha1='62e02f2aceb7d90ba354584dacc018c1e94ff01c')
maven_jar(name='guice-servlet', artifact='com.google.inject.extensions:guice-servlet:' + GUICE_VERS, sha1='f527009d51f172a2e6937bfb55fcb827e2e2386b')
maven_jar(name='soy', artifact='com.google.template:soy:2021-02-01', sha1='8e833744832ba88059205a1e30e0898f925d8cb5')
maven_jar(name='cglib-3_2', artifact='cglib:cglib-nodep:3.2.6', sha1='92bf48723d277d6efd1150b2f7e9e1e92cb56caf')
maven_jar(name='objenesis', artifact='org.objenesis:objenesis:1.3', sha1='dc13ae4faca6df981fc7aeb5a522d9db446d5d50')
docker_java_vers = '3.2.8'
maven_jar(name='docker-java-api', artifact='com.github.docker-java:docker-java-api:' + DOCKER_JAVA_VERS, sha1='4ac22a72d546a9f3523cd4b5fabffa77c4a6ec7c')
maven_jar(name='docker-java-transport', artifact='com.github.docker-java:docker-java-transport:' + DOCKER_JAVA_VERS, sha1='c3b5598c67d0a5e2e780bf48f520da26b9915eab')
maven_jar(name='jackson-annotations', artifact='com.fasterxml.jackson.core:jackson-annotations:2.10.3', sha1='0f63b3b1da563767d04d2e4d3fc1ae0cdeffebe7')
maven_jar(name='testcontainers', artifact='org.testcontainers:testcontainers:' + TESTCONTAINERS_VERSION, sha1='95c6cfde71c2209f0c29cb14e432471e0b111880')
maven_jar(name='duct-tape', artifact='org.rnorth.duct-tape:duct-tape:1.0.8', sha1='92edc22a9ab2f3e17c9bf700aaee377d50e8b530')
maven_jar(name='visible-assertions', artifact='org.rnorth.visible-assertions:visible-assertions:2.1.2', sha1='20d31a578030ec8e941888537267d3123c2ad1c1')
maven_jar(name='jna', artifact='net.java.dev.jna:jna:5.5.0', sha1='0e0845217c4907822403912ad6828d8e0b256208')
maven_jar(name='jimfs', artifact='com.google.jimfs:jimfs:1.2', sha1='48462eb319817c90c27d377341684b6b81372e08')
truth_vers = '1.1'
maven_jar(name='truth', artifact='com.google.truth:truth:' + TRUTH_VERS, sha1='6a096a16646559c24397b03f797d0c9d75ee8720')
maven_jar(name='truth-java8-extension', artifact='com.google.truth.extensions:truth-java8-extension:' + TRUTH_VERS, sha1='258db6eb8df61832c5c059ed2bc2e1c88683e92f')
maven_jar(name='truth-liteproto-extension', artifact='com.google.truth.extensions:truth-liteproto-extension:' + TRUTH_VERS, sha1='bf65afa13aa03330e739bcaa5d795fe0f10fbf20')
maven_jar(name='truth-proto-extension', artifact='com.google.truth.extensions:truth-proto-extension:' + TRUTH_VERS, sha1='64cba89cf87c1d84cb8c81d06f0b9c482f10b4dc') |
# leetcode 625. Minimum Factorization
# Given a positive integer a, find the smallest positive integer b whose multiplication of each digit equals to a.
# If there is no answer or the answer is not fit in 32-bit signed integer, then return 0.
# Example 1
# Input:
# 48
# Output:
# 68
# Example 2
# Input:
# 15
# Output:
# 35
# V1
# idea :
# please notice the "each digit" term in the problem description
class Solution(object):
def smallestFactorization(self, a):
if a ==1:
return 1
for i in range(2,10):
if a%i == 0:
q = int(a/i)
ans = min((a*10+i), (i*10)+a)
if ans <= 2**31: # 2**31 or 0x7FFFFFFF : 32-bit signed integer
return ans
return 0
return 0
# V2
# http://bookshadow.com/weblog/2017/06/18/leetcode-minimum-factorization/
# https://blog.csdn.net/feifeiiong/article/details/73556747
class Solution(object):
def smallestFactorization(self, a):
"""
:type a: int
:rtype: int
"""
if a == 1: return 1
cnt = [0] * 10
for x in range(9, 1, -1):
while a % x == 0:
cnt[x] += 1
a /= x
if a > 1: return 0
ans = int(''.join(str(n) * cnt[n] for n in range(2, 10)))
return ans <= 0x7FFFFFFF and ans or 0
# V3
# Time: O(loga)
# Space: O(1)
class Solution(object):
def smallestFactorization(self, a):
"""
:type a: int
:rtype: int
"""
if a < 2:
return a
result, mul = 0, 1
for i in reversed(range(2, 10)):
while a % i == 0:
a /= i
result = mul*i + result
mul *= 10
return result if a == 1 and result < 2**31 else 0
| class Solution(object):
def smallest_factorization(self, a):
if a == 1:
return 1
for i in range(2, 10):
if a % i == 0:
q = int(a / i)
ans = min(a * 10 + i, i * 10 + a)
if ans <= 2 ** 31:
return ans
return 0
return 0
class Solution(object):
def smallest_factorization(self, a):
"""
:type a: int
:rtype: int
"""
if a == 1:
return 1
cnt = [0] * 10
for x in range(9, 1, -1):
while a % x == 0:
cnt[x] += 1
a /= x
if a > 1:
return 0
ans = int(''.join((str(n) * cnt[n] for n in range(2, 10))))
return ans <= 2147483647 and ans or 0
class Solution(object):
def smallest_factorization(self, a):
"""
:type a: int
:rtype: int
"""
if a < 2:
return a
(result, mul) = (0, 1)
for i in reversed(range(2, 10)):
while a % i == 0:
a /= i
result = mul * i + result
mul *= 10
return result if a == 1 and result < 2 ** 31 else 0 |
'''
Last digit of number's factorial
Status: Accepted
'''
###############################################################################
def main():
"""Read input and print output"""
nonzero = {}
nonzero[1] = 1
nonzero[2] = 2
nonzero[3] = 6
nonzero[4] = 4
for _ in range(int(input())):
i = int(input())
if i in nonzero:
print(nonzero[i])
else:
print('0')
###############################################################################
if __name__ == '__main__':
main()
| """
Last digit of number's factorial
Status: Accepted
"""
def main():
"""Read input and print output"""
nonzero = {}
nonzero[1] = 1
nonzero[2] = 2
nonzero[3] = 6
nonzero[4] = 4
for _ in range(int(input())):
i = int(input())
if i in nonzero:
print(nonzero[i])
else:
print('0')
if __name__ == '__main__':
main() |
def find_metathesis_pair(filename):
"""Takes a word list as text file, returns a list of word pairs that can
be created by swapping one pair of letters"""
res = []
t = find_anagrams(filename)
for i in t:
possibles = i
for x in range(len(possibles)):
for y in range(len(possibles)):
if is_swappable(possibles[x], possibles[y]):
res.append((possibles[x], possibles[y]))
return res
def is_swappable(word1, word2):
"""Input: 2 strings of equal length
Returns True if Word2 can be formed by swapping 2 letters in Word1, else False"""
differing_letter_positions = []
index = 0
for i in word1:
if word1[index] != word2[index]:
differing_letter_positions.append(index)
index += 1
if len(differing_letter_positions) != 2:
return False
word2 = list(word2)
index1 = differing_letter_positions[0]
index2 = differing_letter_positions[1]
word2[index1], word2[index2] = word2[index2], word2[index1]
if list(word1) == (word2):
return True
return False
def make_anagram_dict(filename):
"""Takes a text file containing one word per line.
Returns a dictionary:
Key is an alphabetised duple of letters in each word,
Value is a list of all words that can be formed by those letters"""
result = {}
fin = open(filename)
for line in fin:
word = line.strip().lower()
letters_in_word = tuple(sorted(word))
if letters_in_word not in result:
result[letters_in_word] = [word]
else:
result[letters_in_word].append(word)
return result
def find_anagrams(filename):
"""Takes a text file word list, returns an alphabetised list of lists of
anagrams found in the word list"""
result = []
t = make_anagram_dict(filename)
for i in t:
anagrams = []
for word in t[i]:
anagrams.append(word)
if sorted(anagrams) not in anagrams:
result.append(sorted(anagrams))
list_of_anagrams = []
for i in range (len(result)):
if len(result[i]) > 1:
list_of_anagrams.append(result[i])
return list_of_anagrams
def sort_anagram_list(filename):
"""Takes a list of lists of anagrams
Returns a list of duples - number of letters(sorted high to low), list of anagrams
for those letters"""
t = find_anagrams('words.txt')
res = []
lengths = []
for i in t:
lengths.append((len(i), i))
for i in sorted(lengths, reverse = True):
res.append(i)
return res
t = find_metathesis_pair('words.txt')
for i in t:
print(i)
| def find_metathesis_pair(filename):
"""Takes a word list as text file, returns a list of word pairs that can
be created by swapping one pair of letters"""
res = []
t = find_anagrams(filename)
for i in t:
possibles = i
for x in range(len(possibles)):
for y in range(len(possibles)):
if is_swappable(possibles[x], possibles[y]):
res.append((possibles[x], possibles[y]))
return res
def is_swappable(word1, word2):
"""Input: 2 strings of equal length
Returns True if Word2 can be formed by swapping 2 letters in Word1, else False"""
differing_letter_positions = []
index = 0
for i in word1:
if word1[index] != word2[index]:
differing_letter_positions.append(index)
index += 1
if len(differing_letter_positions) != 2:
return False
word2 = list(word2)
index1 = differing_letter_positions[0]
index2 = differing_letter_positions[1]
(word2[index1], word2[index2]) = (word2[index2], word2[index1])
if list(word1) == word2:
return True
return False
def make_anagram_dict(filename):
"""Takes a text file containing one word per line.
Returns a dictionary:
Key is an alphabetised duple of letters in each word,
Value is a list of all words that can be formed by those letters"""
result = {}
fin = open(filename)
for line in fin:
word = line.strip().lower()
letters_in_word = tuple(sorted(word))
if letters_in_word not in result:
result[letters_in_word] = [word]
else:
result[letters_in_word].append(word)
return result
def find_anagrams(filename):
"""Takes a text file word list, returns an alphabetised list of lists of
anagrams found in the word list"""
result = []
t = make_anagram_dict(filename)
for i in t:
anagrams = []
for word in t[i]:
anagrams.append(word)
if sorted(anagrams) not in anagrams:
result.append(sorted(anagrams))
list_of_anagrams = []
for i in range(len(result)):
if len(result[i]) > 1:
list_of_anagrams.append(result[i])
return list_of_anagrams
def sort_anagram_list(filename):
"""Takes a list of lists of anagrams
Returns a list of duples - number of letters(sorted high to low), list of anagrams
for those letters"""
t = find_anagrams('words.txt')
res = []
lengths = []
for i in t:
lengths.append((len(i), i))
for i in sorted(lengths, reverse=True):
res.append(i)
return res
t = find_metathesis_pair('words.txt')
for i in t:
print(i) |
Import("env")
print("Extra Script (Pre): common_pre.py")
# Get build flags values from env
def get_build_flag_value(flag_name):
build_flags = env.ParseFlags(env['BUILD_FLAGS'])
flags_with_value_list = [build_flag for build_flag in build_flags.get('CPPDEFINES') if type(build_flag) == list]
defines = {k: v for (k, v) in flags_with_value_list}
return defines.get(flag_name)
# Current details
print("Device Code: %s" % get_build_flag_value("DEVICE_CODE"))
print("Build tag: %s" % get_build_flag_value("BUILD_TAG"))
# Change build file name
new_name = "%s-Pv%s" % (get_build_flag_value("DEVICE_CODE"), get_build_flag_value("BUILD_TAG"))
print("Changing build file name to: %s" % new_name)
env.Replace(PROGNAME=new_name)
| import('env')
print('Extra Script (Pre): common_pre.py')
def get_build_flag_value(flag_name):
build_flags = env.ParseFlags(env['BUILD_FLAGS'])
flags_with_value_list = [build_flag for build_flag in build_flags.get('CPPDEFINES') if type(build_flag) == list]
defines = {k: v for (k, v) in flags_with_value_list}
return defines.get(flag_name)
print('Device Code: %s' % get_build_flag_value('DEVICE_CODE'))
print('Build tag: %s' % get_build_flag_value('BUILD_TAG'))
new_name = '%s-Pv%s' % (get_build_flag_value('DEVICE_CODE'), get_build_flag_value('BUILD_TAG'))
print('Changing build file name to: %s' % new_name)
env.Replace(PROGNAME=new_name) |
KONSTANT = "KONSTANT"
def funktion(value):
print(value)
class Klass:
def method(self):
funktion(KONSTANT)
Klass().method()
| konstant = 'KONSTANT'
def funktion(value):
print(value)
class Klass:
def method(self):
funktion(KONSTANT)
klass().method() |
# -*- coding: utf-8 -*-
"""
square
This file was automatically generated by APIMATIC v2.0 ( https://apimatic.io ).
"""
class HttpResponse(object):
"""Information about an HTTP Response including its status code, returned
headers, and raw body
Attributes:
status_code (int): The status code response from the server that
corresponds to this response.
reason_phrase (string): The reason phrase returned by the server.
headers (dict): A dictionary of headers (key : value) that were
returned with the response
text (string): The Raw body of the HTTP Response as a string
request (HttpRequest): The request that resulted in this response.
"""
def __init__(self,
status_code,
reason_phrase,
headers,
text,
request):
"""Constructor for the HttpResponse class
Args:
status_code (int): The response status code.
reason_phrase (string): The response reason phrase.
headers (dict): The response headers.
text (string): The raw body from the server.
request (HttpRequest): The request that resulted in this response.
"""
self.status_code = status_code
self.reason_phrase = reason_phrase
self.headers = headers
self.text = text
self.request = request
| """
square
This file was automatically generated by APIMATIC v2.0 ( https://apimatic.io ).
"""
class Httpresponse(object):
"""Information about an HTTP Response including its status code, returned
headers, and raw body
Attributes:
status_code (int): The status code response from the server that
corresponds to this response.
reason_phrase (string): The reason phrase returned by the server.
headers (dict): A dictionary of headers (key : value) that were
returned with the response
text (string): The Raw body of the HTTP Response as a string
request (HttpRequest): The request that resulted in this response.
"""
def __init__(self, status_code, reason_phrase, headers, text, request):
"""Constructor for the HttpResponse class
Args:
status_code (int): The response status code.
reason_phrase (string): The response reason phrase.
headers (dict): The response headers.
text (string): The raw body from the server.
request (HttpRequest): The request that resulted in this response.
"""
self.status_code = status_code
self.reason_phrase = reason_phrase
self.headers = headers
self.text = text
self.request = request |
conf = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'isAccessLog': {
'()': 'utils.CustomLogFilter.AccessLogFilter'
},
'isHuntLog': {
'()': 'utils.CustomLogFilter.HuntLogFilter'
},
'isHuntResultLog': {
'()': 'utils.CustomLogFilter.HuntResultLogFilter'
},
},
# 'loggers': {
# 'elasticsearch': {
# 'level': 'INFO',
# 'handlers': [
# 'consoleHandler',
# 'logFileHandler',
# ],
# "propagate": "no",
# }
# },
'root': {
'level': 'DEBUG',
'handlers': [
'consoleHandler',
'logFileHandler',
'AccessLogFileHandler',
# 'AccessLogSysLogHandler',
'HuntLogFileHandler',
'HuntResultLogFileHandler',
]
},
'handlers': {
'consoleHandler': {
'class': 'logging.StreamHandler',
'level': 'INFO',
'formatter': 'consoleFormatter',
'stream': 'ext://sys.stdout'
},
'logFileHandler': {
'class': 'logging.handlers.TimedRotatingFileHandler',
'level': 'DEBUG',
'formatter': 'logFileFormatter',
'filename': './log/wowhoneypot.log',
'when': 'MIDNIGHT',
'backupCount': 10,
'encoding': 'utf-8'
},
'AccessLogFileHandler': {
'class': 'logging.handlers.TimedRotatingFileHandler',
'level': 'INFO',
'formatter': 'AccessLogFileFormatter',
'filename': './log/access.log',
'when': 'MIDNIGHT',
'backupCount': 10,
'encoding': 'utf-8',
'filters': [
'isAccessLog'
]
},
# 'AccessLogSysLogHandler': {
# 'class': 'logging.handlers.SysLogHandler',
# 'address': ('127.0.0.1', 514),
# 'facility': "local0",
# 'filters': [
# 'isAccessLog'
# ]
# },
'HuntLogFileHandler': {
'class': 'logging.handlers.TimedRotatingFileHandler',
'level': 'INFO',
'formatter': 'HuntLogFileFormatter',
'filename': './log/hunting.log',
'when': 'MIDNIGHT',
'backupCount': 10,
'encoding': 'utf-8',
'filters': [
'isHuntLog'
]
},
'HuntResultLogFileHandler': {
'class': 'logging.handlers.TimedRotatingFileHandler',
'level': 'INFO',
'formatter': 'HuntLogFileFormatter',
'filename': './log/hunt_result.log',
'when': 'MIDNIGHT',
'backupCount': 10,
'encoding': 'utf-8',
'filters': [
'isHuntResultLog'
]
},
},
'formatters': {
'consoleFormatter': {
'format': '%(asctime)s [%(levelname)-8s] %(funcName)s - %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S%z'
},
'logFileFormatter': {
'format': '%(asctime)s|%(levelname)-8s|%(name)s|%(funcName)s|%(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S%z'
},
'AccessLogFileFormatter': {
'format': '%(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S%z'
},
'HuntLogFileFormatter': {
'format': '[%(asctime)s] %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S%z'
},
'HuntResultLogFileFormatter': {
'format': '[%(asctime)s] %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S%z'
},
}
}
| conf = {'version': 1, 'disable_existing_loggers': False, 'filters': {'isAccessLog': {'()': 'utils.CustomLogFilter.AccessLogFilter'}, 'isHuntLog': {'()': 'utils.CustomLogFilter.HuntLogFilter'}, 'isHuntResultLog': {'()': 'utils.CustomLogFilter.HuntResultLogFilter'}}, 'root': {'level': 'DEBUG', 'handlers': ['consoleHandler', 'logFileHandler', 'AccessLogFileHandler', 'HuntLogFileHandler', 'HuntResultLogFileHandler']}, 'handlers': {'consoleHandler': {'class': 'logging.StreamHandler', 'level': 'INFO', 'formatter': 'consoleFormatter', 'stream': 'ext://sys.stdout'}, 'logFileHandler': {'class': 'logging.handlers.TimedRotatingFileHandler', 'level': 'DEBUG', 'formatter': 'logFileFormatter', 'filename': './log/wowhoneypot.log', 'when': 'MIDNIGHT', 'backupCount': 10, 'encoding': 'utf-8'}, 'AccessLogFileHandler': {'class': 'logging.handlers.TimedRotatingFileHandler', 'level': 'INFO', 'formatter': 'AccessLogFileFormatter', 'filename': './log/access.log', 'when': 'MIDNIGHT', 'backupCount': 10, 'encoding': 'utf-8', 'filters': ['isAccessLog']}, 'HuntLogFileHandler': {'class': 'logging.handlers.TimedRotatingFileHandler', 'level': 'INFO', 'formatter': 'HuntLogFileFormatter', 'filename': './log/hunting.log', 'when': 'MIDNIGHT', 'backupCount': 10, 'encoding': 'utf-8', 'filters': ['isHuntLog']}, 'HuntResultLogFileHandler': {'class': 'logging.handlers.TimedRotatingFileHandler', 'level': 'INFO', 'formatter': 'HuntLogFileFormatter', 'filename': './log/hunt_result.log', 'when': 'MIDNIGHT', 'backupCount': 10, 'encoding': 'utf-8', 'filters': ['isHuntResultLog']}}, 'formatters': {'consoleFormatter': {'format': '%(asctime)s [%(levelname)-8s] %(funcName)s - %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S%z'}, 'logFileFormatter': {'format': '%(asctime)s|%(levelname)-8s|%(name)s|%(funcName)s|%(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S%z'}, 'AccessLogFileFormatter': {'format': '%(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S%z'}, 'HuntLogFileFormatter': {'format': '[%(asctime)s] %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S%z'}, 'HuntResultLogFileFormatter': {'format': '[%(asctime)s] %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S%z'}}} |
def isPerfectCube(num):
ans = 0
while ans**3 < abs(num):
ans += 1
if ans**3 != abs(num):
print("Not a perfect cube")
else:
if num < 0:
ans = -ans
print(ans, "is a cube root of", num)
def main():
if __name__ == "__main__":
print(isPerfectCube(8), "answer is 2")
print(isPerfectCube(-8), "answer is -2")
print(isPerfectCube(9), "answer is not perfect cube")
main() | def is_perfect_cube(num):
ans = 0
while ans ** 3 < abs(num):
ans += 1
if ans ** 3 != abs(num):
print('Not a perfect cube')
else:
if num < 0:
ans = -ans
print(ans, 'is a cube root of', num)
def main():
if __name__ == '__main__':
print(is_perfect_cube(8), 'answer is 2')
print(is_perfect_cube(-8), 'answer is -2')
print(is_perfect_cube(9), 'answer is not perfect cube')
main() |
# -*- coding: utf-8 -*-
"""
#project: CCP_Python3
#file: CCPRest.py
#author: ceephoen
#contact: ceephoen@163.com
#time: 2019/6/13 21:43:21
#desc:
""" | """
#project: CCP_Python3
#file: CCPRest.py
#author: ceephoen
#contact: ceephoen@163.com
#time: 2019/6/13 21:43:21
#desc:
""" |
"""
# Definition for Employee.
class Employee:
def __init__(self, id: int, importance: int, subordinates: List[int]):
self.id = id
self.importance = importance
self.subordinates = subordinates
"""
class Solution:
def getImportance(self, employees: List['Employee'], id: int) -> int:
emap = {e.id: e for e in employees}
def dfs(eid):
employee = emap[eid]
return (employee.importance +
sum(dfs(eid) for eid in employee.subordinates))
return dfs(id)
| """
# Definition for Employee.
class Employee:
def __init__(self, id: int, importance: int, subordinates: List[int]):
self.id = id
self.importance = importance
self.subordinates = subordinates
"""
class Solution:
def get_importance(self, employees: List['Employee'], id: int) -> int:
emap = {e.id: e for e in employees}
def dfs(eid):
employee = emap[eid]
return employee.importance + sum((dfs(eid) for eid in employee.subordinates))
return dfs(id) |
def detect_db(fh):
"""
Parameters
----------
fh : file-like object
Returns
-------
db_type : str
one of 'irefindex', 'string'
Notes
-----
STRING
======
head -n2 9606.protein.links.full.v10.5.txt
protein1 protein2 neighborhood neighborhood_transferred fusion cooccurence homology coexpression coexpression_transferred experiments experiments_transferred database database_transferred textmining textmining_transferred combined_score
9606.ENSP00000000233 9606.ENSP00000263431 0 0 0 0 0 0 53 0 176 0 0 0 128 260
iRefIndex
========
head -n2 9606.mitab.04072015.txt
#uidA uidB altA altB aliasA aliasB method author pmids taxa taxb interactionType sourcedb interactionIdentifier confidence expansion biological_role_A biological_role_B experimental_role_A experimental_role_B interactor_type_A interactor_type_B xrefs_A xrefs_B xrefs_Interaction Annotations_A Annotations_B Annotations_Interaction Host_organism_taxid parameters_Interaction Creation_date Update_date Checksum_A Checksum_B Checksum_Interaction Negative OriginalReferenceA OriginalReferenceB FinalReferenceA FinalReferenceB MappingScoreA MappingScoreB irogida irogidb irigid crogida crogidb crigid icrogida icrogidb icrigid imex_id edgetype numParticipants
uniprotkb:A0A024R3E3 uniprotkb:A0A024R3E3 entrezgene/locuslink:335|genbank_protein_gi:4557321|refseq:NP_000030|rogid:yEzPDeU8Uu/43dkLLOBAy6ey1vs9606|irogid:122812673 entrezgene/locuslink:335|genbank_protein_gi:4557321|refseq:NP_000030|rogid:yEzPDeU8Uu/43dkLLOBAy6ey1vs9606|irogid:122812673 hgnc:APOA1|uniprotkb:A0A024R3E3_HUMAN|uniprotkb:APOA1_HUMAN|crogid:yEzPDeU8Uu/43dkLLOBAy6ey1vs9606|icrogid:122812673 hgnc:APOA1|uniprotkb:A0A024R3E3_HUMAN|uniprotkb:APOA1_HUMAN|crogid:yEzPDeU8Uu/43dkLLOBAy6ey1vs9606|icrogid:122812673 - - pubmed:9003180|pubmed:9200714|pubmed:9356442 taxid:9606(Homo sapiens) taxid:9606(Homo sapiens) - MI:0462(bind) bind:75986|rigid:WRUQaMHXGmnC/H/BzyolIfyaa7Y|edgetype:X hpr:141818|lpr:1|np:8 none MI:0000(unspecified) MI:0000(unspecified) MI:0000(unspecified) MI:0000(unspecified) MI:0326(protein) MI:0326(protein) - - - - - - - - 2015-04-07 2015-04-07 rogid:yEzPDeU8Uu/43dkLLOBAy6ey1vs9606 rogid:yEzPDeU8Uu/43dkLLOBAy6ey1vs9606 rigid:WRUQaMHXGmnC/H/BzyolIfyaa7Y false GenBank:NP_000030 GenBank:NP_000030 refseq:NP_000030 refseq:NP_000030 PD PD 122812673 122812673 881764 yEzPDeU8Uu/43dkLLOBAy6ey1vs9606 yEzPDeU8Uu/43dkLLOBAy6ey1vs9606 WRUQaMHXGmnC/H/BzyolIfyaa7Y 122812673 122812673 881764 - X 2
"""
string_header = """protein1 protein2 neighborhood neighborhood_transferred fusion cooccurence homology coexpression coexpression_transferred experiments experiments_transferred database database_transferred textmining textmining_transferred combined_score""".split()
irefindex_header = """#uidA uidB altA altB aliasA aliasB method author pmids taxa taxb interactionType sourcedb interactionIdentifier confidence expansion biological_role_A biological_role_B experimental_role_A experimental_role_B interactor_type_A interactor_type_B xrefs_A xrefs_B xrefs_Interaction Annotations_A Annotations_B Annotations_Interaction Host_organism_taxid parameters_Interaction Creation_date Update_date Checksum_A Checksum_B Checksum_Interaction Negative OriginalReferenceA OriginalReferenceB FinalReferenceA FinalReferenceB MappingScoreA MappingScoreB irogida irogidb irigid crogida crogidb crigid icrogida icrogidb icrigid imex_id edgetype numParticipants""".split()
rv = None
fh.seek(0)
fh_header = fh.next().rstrip().split()
if fh_header == string_header:
rv = "string"
elif fh_header == irefindex_header:
rv = "irefindex"
else:
raise RuntimeError("Unrecognized database file: it is not one of iRefIndex, STRING")
fh.seek(0)
return rv
| def detect_db(fh):
"""
Parameters
----------
fh : file-like object
Returns
-------
db_type : str
one of 'irefindex', 'string'
Notes
-----
STRING
======
head -n2 9606.protein.links.full.v10.5.txt
protein1 protein2 neighborhood neighborhood_transferred fusion cooccurence homology coexpression coexpression_transferred experiments experiments_transferred database database_transferred textmining textmining_transferred combined_score
9606.ENSP00000000233 9606.ENSP00000263431 0 0 0 0 0 0 53 0 176 0 0 0 128 260
iRefIndex
========
head -n2 9606.mitab.04072015.txt
#uidA uidB altA altB aliasA aliasB method author pmids taxa taxb interactionType sourcedb interactionIdentifier confidence expansion biological_role_A biological_role_B experimental_role_A experimental_role_B interactor_type_A interactor_type_B xrefs_A xrefs_B xrefs_Interaction Annotations_A Annotations_B Annotations_Interaction Host_organism_taxid parameters_Interaction Creation_date Update_date Checksum_A Checksum_B Checksum_Interaction Negative OriginalReferenceA OriginalReferenceB FinalReferenceA FinalReferenceB MappingScoreA MappingScoreB irogida irogidb irigid crogida crogidb crigid icrogida icrogidb icrigid imex_id edgetype numParticipants
uniprotkb:A0A024R3E3 uniprotkb:A0A024R3E3 entrezgene/locuslink:335|genbank_protein_gi:4557321|refseq:NP_000030|rogid:yEzPDeU8Uu/43dkLLOBAy6ey1vs9606|irogid:122812673 entrezgene/locuslink:335|genbank_protein_gi:4557321|refseq:NP_000030|rogid:yEzPDeU8Uu/43dkLLOBAy6ey1vs9606|irogid:122812673 hgnc:APOA1|uniprotkb:A0A024R3E3_HUMAN|uniprotkb:APOA1_HUMAN|crogid:yEzPDeU8Uu/43dkLLOBAy6ey1vs9606|icrogid:122812673 hgnc:APOA1|uniprotkb:A0A024R3E3_HUMAN|uniprotkb:APOA1_HUMAN|crogid:yEzPDeU8Uu/43dkLLOBAy6ey1vs9606|icrogid:122812673 - - pubmed:9003180|pubmed:9200714|pubmed:9356442 taxid:9606(Homo sapiens) taxid:9606(Homo sapiens) - MI:0462(bind) bind:75986|rigid:WRUQaMHXGmnC/H/BzyolIfyaa7Y|edgetype:X hpr:141818|lpr:1|np:8 none MI:0000(unspecified) MI:0000(unspecified) MI:0000(unspecified) MI:0000(unspecified) MI:0326(protein) MI:0326(protein) - - - - - - - - 2015-04-07 2015-04-07 rogid:yEzPDeU8Uu/43dkLLOBAy6ey1vs9606 rogid:yEzPDeU8Uu/43dkLLOBAy6ey1vs9606 rigid:WRUQaMHXGmnC/H/BzyolIfyaa7Y false GenBank:NP_000030 GenBank:NP_000030 refseq:NP_000030 refseq:NP_000030 PD PD 122812673 122812673 881764 yEzPDeU8Uu/43dkLLOBAy6ey1vs9606 yEzPDeU8Uu/43dkLLOBAy6ey1vs9606 WRUQaMHXGmnC/H/BzyolIfyaa7Y 122812673 122812673 881764 - X 2
"""
string_header = 'protein1 protein2 neighborhood neighborhood_transferred fusion cooccurence homology coexpression coexpression_transferred experiments experiments_transferred database database_transferred textmining textmining_transferred combined_score'.split()
irefindex_header = '#uidA uidB altA altB aliasA aliasB method author pmids taxa taxb interactionType sourcedb interactionIdentifier confidence expansion biological_role_A biological_role_B experimental_role_A experimental_role_B interactor_type_A interactor_type_B xrefs_A xrefs_B xrefs_Interaction Annotations_A Annotations_B Annotations_Interaction Host_organism_taxid parameters_Interaction Creation_date Update_date Checksum_A Checksum_B Checksum_Interaction Negative OriginalReferenceA OriginalReferenceB FinalReferenceA FinalReferenceB MappingScoreA MappingScoreB irogida irogidb irigid crogida crogidb crigid icrogida icrogidb icrigid imex_id edgetype numParticipants'.split()
rv = None
fh.seek(0)
fh_header = fh.next().rstrip().split()
if fh_header == string_header:
rv = 'string'
elif fh_header == irefindex_header:
rv = 'irefindex'
else:
raise runtime_error('Unrecognized database file: it is not one of iRefIndex, STRING')
fh.seek(0)
return rv |
"""
This module contains exceptions for use throughout the L11 Colorlib.
"""
class ColorMathException(Exception):
"""
Base exception for all colormath exceptions.
"""
pass
class UndefinedConversionError(ColorMathException):
"""
Raised when the user asks for a color space conversion that does not exist.
"""
def __init__(self, cobj, cs_to):
super(UndefinedConversionError, self).__init__(cobj, cs_to)
self.message = "Conversion from %s to %s is not defined." % (cobj, cs_to)
class InvalidIlluminantError(ColorMathException):
"""
Raised when an invalid illuminant is set on a ColorObj.
"""
def __init__(self, illuminant):
super(InvalidIlluminantError, self).__init__(illuminant)
self.message = "Invalid illuminant specified: %s" % illuminant
class InvalidObserverError(ColorMathException):
"""
Raised when an invalid observer is set on a ColorObj.
"""
def __init__(self, cobj):
super(InvalidObserverError, self).__init__(cobj)
self.message = "Invalid observer angle specified: %s" % cobj.observer
| """
This module contains exceptions for use throughout the L11 Colorlib.
"""
class Colormathexception(Exception):
"""
Base exception for all colormath exceptions.
"""
pass
class Undefinedconversionerror(ColorMathException):
"""
Raised when the user asks for a color space conversion that does not exist.
"""
def __init__(self, cobj, cs_to):
super(UndefinedConversionError, self).__init__(cobj, cs_to)
self.message = 'Conversion from %s to %s is not defined.' % (cobj, cs_to)
class Invalidilluminanterror(ColorMathException):
"""
Raised when an invalid illuminant is set on a ColorObj.
"""
def __init__(self, illuminant):
super(InvalidIlluminantError, self).__init__(illuminant)
self.message = 'Invalid illuminant specified: %s' % illuminant
class Invalidobservererror(ColorMathException):
"""
Raised when an invalid observer is set on a ColorObj.
"""
def __init__(self, cobj):
super(InvalidObserverError, self).__init__(cobj)
self.message = 'Invalid observer angle specified: %s' % cobj.observer |
# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Modifications made by Cloudera are:
# Copyright (c) 2016 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
class AltusCLIError(Exception):
"""
The base exception class for Altus CLI exceptions.
"""
fmt = 'An unspecified error occured'
def __init__(self, **kwargs):
msg = self.fmt.format(**kwargs)
Exception.__init__(self, msg)
self.kwargs = kwargs
class ValidationError(AltusCLIError):
"""
An exception occurred validating parameters.
"""
fmt = "Invalid value ('{value}') for param {param} of type {type_name}"
class ParamValidationError(AltusCLIError):
fmt = 'Parameter validation failed:\n{report}'
class DataNotFoundError(AltusCLIError):
"""
The data associated with a particular path could not be loaded.
"""
fmt = 'Unable to load data for: {data_path}'
class ExecutableNotFoundError(AltusCLIError):
"""
The executable was not found.
"""
fmt = 'Could not find executable named: {executable_name}'
class OperationNotPageableError(AltusCLIError):
fmt = 'Operation cannot be paginated: {operation_name}'
class ClientError(Exception):
MSG_TEMPLATE = (
'An error occurred: {error_message} ('
'Status Code: {http_status_code}; '
'Error Code: {error_code}; '
'Service: {service_name}; '
'Operation: {operation_name}; '
'Request ID: {request_id};)')
def __init__(self, error_response, operation_name, service_name,
http_status_code, request_id):
msg = self.MSG_TEMPLATE.format(
error_code=error_response['error'].get('code', 'Unknown'),
error_message=error_response['error'].get('message', 'Unknown'),
operation_name=operation_name,
service_name=service_name,
http_status_code=http_status_code,
request_id=request_id)
super(ClientError, self).__init__(msg)
self.response = error_response
class UnseekableStreamError(AltusCLIError):
"""
Need to seek a stream, but stream does not support seeking.
"""
fmt = ('Need to rewind the stream {stream_object}, but stream '
'is not seekable.')
class EndpointConnectionError(AltusCLIError):
fmt = (
'Could not connect to the endpoint URL: "{endpoint_url}"')
class IncompleteReadError(AltusCLIError):
"""
HTTP response did not return expected number of bytes.
"""
fmt = ('{actual_bytes} read, but total bytes '
'expected is {expected_bytes}.')
class PaginationError(AltusCLIError):
fmt = 'Error during pagination: {message}'
class UnknownSignatureVersionError(AltusCLIError):
"""
Requested Signature Version is not known.
"""
fmt = 'Unknown Signature Version: {signature_version}.'
class UnsupportedSignatureVersionError(AltusCLIError):
"""
Error when trying to access a method on a client that does not exist.
"""
fmt = 'Signature version is not supported: {signature_version}'
class NoCredentialsError(AltusCLIError):
"""
No credentials could be found
"""
fmt = 'Unable to locate Altus credentials'
class UnknownCredentialError(AltusCLIError):
"""
Tried to insert before/after an unregistered credential type.
"""
fmt = 'Credential named {name} not found.'
class PartialCredentialsError(AltusCLIError):
"""
Only partial credentials were found.
"""
fmt = 'Partial credentials found in {provider}, missing: {cred_var}'
class BaseEndpointResolverError(AltusCLIError):
"""
Base error for endpoint resolving errors.
Should never be raised directly, but clients can catch
this exception if they want to generically handle any errors
during the endpoint resolution process.
"""
class NoRegionError(BaseEndpointResolverError):
"""
No region was specified.
"""
fmt = 'You must specify a region.'
class ProfileNotFound(AltusCLIError):
"""
The specified configuration profile was not found in the
configuration file.
"""
fmt = 'The config profile ({profile}) could not be found'
class ConfigNotFound(AltusCLIError):
"""
The specified configuration file could not be found.
"""
fmt = 'The specified config file ({path}) could not be found.'
class ConfigParseError(AltusCLIError):
"""
The configuration file could not be parsed.
"""
fmt = 'Unable to parse config file: {path}'
class ClusterTerminatingError(AltusCLIError):
"""
The cluster is terminating or has already terminated.
"""
fmt = 'Cluster {cluster_name} is terminating.'
class ClusterStartingError(AltusCLIError):
"""
The cluster is starting.
"""
fmt = 'Cluster {cluster_name} is starting.'
class ClusterFailedError(AltusCLIError):
"""
The cluster failed to start.
"""
fmt = 'Cluster {cluster_name} failed to start.'
class ClusterDoesNotExistError(AltusCLIError):
"""
Cluster with the given name does not exist.
"""
fmt = 'Cluster {cluster_name} does not exist.'
class ClusterStatusNotFound(AltusCLIError):
"""
Unable to find cluster status.
"""
fmt = 'Unable to find {cluster_name}\'s status.'
class ClusterEndpointNotFound(AltusCLIError):
"""
Unable to find cluster's Cloudera Manager Endpoint.
"""
fmt = 'Unable to find {cluster_name}\'s Cloudera Manager Endpoint.'
class MultipleClustersExist(AltusCLIError):
"""
Multiple clusters exist, expected single cluster.
"""
fmt = 'Multiple clusters exist, expected single cluster.'
class SSHNotFoundError(AltusCLIError):
"""
SSH or Putty not available.
"""
fmt = 'SSH or Putty not available.'
class WrongPuttyKeyError(AltusCLIError):
"""
A wrong key has been used with a compatible program.
"""
fmt = 'Key file file format is incorrect. Putty expects a ppk file.'
| class Altusclierror(Exception):
"""
The base exception class for Altus CLI exceptions.
"""
fmt = 'An unspecified error occured'
def __init__(self, **kwargs):
msg = self.fmt.format(**kwargs)
Exception.__init__(self, msg)
self.kwargs = kwargs
class Validationerror(AltusCLIError):
"""
An exception occurred validating parameters.
"""
fmt = "Invalid value ('{value}') for param {param} of type {type_name}"
class Paramvalidationerror(AltusCLIError):
fmt = 'Parameter validation failed:\n{report}'
class Datanotfounderror(AltusCLIError):
"""
The data associated with a particular path could not be loaded.
"""
fmt = 'Unable to load data for: {data_path}'
class Executablenotfounderror(AltusCLIError):
"""
The executable was not found.
"""
fmt = 'Could not find executable named: {executable_name}'
class Operationnotpageableerror(AltusCLIError):
fmt = 'Operation cannot be paginated: {operation_name}'
class Clienterror(Exception):
msg_template = 'An error occurred: {error_message} (Status Code: {http_status_code}; Error Code: {error_code}; Service: {service_name}; Operation: {operation_name}; Request ID: {request_id};)'
def __init__(self, error_response, operation_name, service_name, http_status_code, request_id):
msg = self.MSG_TEMPLATE.format(error_code=error_response['error'].get('code', 'Unknown'), error_message=error_response['error'].get('message', 'Unknown'), operation_name=operation_name, service_name=service_name, http_status_code=http_status_code, request_id=request_id)
super(ClientError, self).__init__(msg)
self.response = error_response
class Unseekablestreamerror(AltusCLIError):
"""
Need to seek a stream, but stream does not support seeking.
"""
fmt = 'Need to rewind the stream {stream_object}, but stream is not seekable.'
class Endpointconnectionerror(AltusCLIError):
fmt = 'Could not connect to the endpoint URL: "{endpoint_url}"'
class Incompletereaderror(AltusCLIError):
"""
HTTP response did not return expected number of bytes.
"""
fmt = '{actual_bytes} read, but total bytes expected is {expected_bytes}.'
class Paginationerror(AltusCLIError):
fmt = 'Error during pagination: {message}'
class Unknownsignatureversionerror(AltusCLIError):
"""
Requested Signature Version is not known.
"""
fmt = 'Unknown Signature Version: {signature_version}.'
class Unsupportedsignatureversionerror(AltusCLIError):
"""
Error when trying to access a method on a client that does not exist.
"""
fmt = 'Signature version is not supported: {signature_version}'
class Nocredentialserror(AltusCLIError):
"""
No credentials could be found
"""
fmt = 'Unable to locate Altus credentials'
class Unknowncredentialerror(AltusCLIError):
"""
Tried to insert before/after an unregistered credential type.
"""
fmt = 'Credential named {name} not found.'
class Partialcredentialserror(AltusCLIError):
"""
Only partial credentials were found.
"""
fmt = 'Partial credentials found in {provider}, missing: {cred_var}'
class Baseendpointresolvererror(AltusCLIError):
"""
Base error for endpoint resolving errors.
Should never be raised directly, but clients can catch
this exception if they want to generically handle any errors
during the endpoint resolution process.
"""
class Noregionerror(BaseEndpointResolverError):
"""
No region was specified.
"""
fmt = 'You must specify a region.'
class Profilenotfound(AltusCLIError):
"""
The specified configuration profile was not found in the
configuration file.
"""
fmt = 'The config profile ({profile}) could not be found'
class Confignotfound(AltusCLIError):
"""
The specified configuration file could not be found.
"""
fmt = 'The specified config file ({path}) could not be found.'
class Configparseerror(AltusCLIError):
"""
The configuration file could not be parsed.
"""
fmt = 'Unable to parse config file: {path}'
class Clusterterminatingerror(AltusCLIError):
"""
The cluster is terminating or has already terminated.
"""
fmt = 'Cluster {cluster_name} is terminating.'
class Clusterstartingerror(AltusCLIError):
"""
The cluster is starting.
"""
fmt = 'Cluster {cluster_name} is starting.'
class Clusterfailederror(AltusCLIError):
"""
The cluster failed to start.
"""
fmt = 'Cluster {cluster_name} failed to start.'
class Clusterdoesnotexisterror(AltusCLIError):
"""
Cluster with the given name does not exist.
"""
fmt = 'Cluster {cluster_name} does not exist.'
class Clusterstatusnotfound(AltusCLIError):
"""
Unable to find cluster status.
"""
fmt = "Unable to find {cluster_name}'s status."
class Clusterendpointnotfound(AltusCLIError):
"""
Unable to find cluster's Cloudera Manager Endpoint.
"""
fmt = "Unable to find {cluster_name}'s Cloudera Manager Endpoint."
class Multipleclustersexist(AltusCLIError):
"""
Multiple clusters exist, expected single cluster.
"""
fmt = 'Multiple clusters exist, expected single cluster.'
class Sshnotfounderror(AltusCLIError):
"""
SSH or Putty not available.
"""
fmt = 'SSH or Putty not available.'
class Wrongputtykeyerror(AltusCLIError):
"""
A wrong key has been used with a compatible program.
"""
fmt = 'Key file file format is incorrect. Putty expects a ppk file.' |
NORMALIZED_POWERS = {
191: ('1x127', '1.5'),
200: ('1x133', '1.5'),
330: ('1x220', '1.5'),
345: ('1x230', '1.5'),
381: ('1x127', '3'),
399: ('1x133', '3'),
445: ('1x127', '3.5'),
466: ('1x133', '3.5'),
572: ('3x220/127', '1.5'),
598: ('3x230/133', '1.5'),
635: ('1x127', '5'),
660: ('1x220', '3'),
665: ('1x133', '5'),
690: ('1x230', '3'),
770: ('1x220', '3.5'),
805: ('1x230', '3.5'),
953: ('1x127', '7.5'),
987: ('3x380/220', '1.5'),
998: ('1x133', '7.5'),
1039: ('3x400/230', '1.5'),
1100: ('1x220', '5'),
1143: ('3x220/127', '3'),
1150: ('1x230', '5'),
1195: ('3x230/133', '3'),
1270: ('1x127', '10'),
1330: ('1x133', '10'),
1334: ('3x220/127', '3.5'),
1394: ('3x230/133', '3.5'),
1650: ('1x220', '7.5'),
1725: ('1x230', '7.5'),
1905: ('1x127', '15'),
1975: ('3x380/220', '3'),
1992: ('3x230/133', '5'),
1995: ('1x133', '15'),
2078: ('3x400/230', '3'),
2200: ('1x220', '10'),
2300: ('1x230', '10'),
2304: ('3x380/220', '3.5'),
2425: ('3x400/230', '3.5'),
2540: ('1x127', '20'),
2660: ('1x133', '20'),
2858: ('3x220/127', '7.5'),
2988: ('3x230/133', '7.5'),
3175: ('1x127', '25'),
3291: ('3x380/220', '5'),
3300: ('1x220', '15'),
3325: ('1x133', '25'),
3450: ('1x230', '15'),
3464: ('3x400/230', '5'),
3810: ('1x127', '30'),
3811: ('3x220/127', '10'),
3984: ('3x230/133', '10'),
3990: ('1x133', '30'),
4400: ('1x220', '20'),
4445: ('1x127', '35'),
4600: ('1x230', '20'),
4655: ('1x133', '35'),
4936: ('3x380/220', '7.5'),
5080: ('1x127', '40'),
5196: ('3x400/230', '7.5'),
5320: ('1x133', '40'),
5500: ('1x220', '25'),
5715: ('1x127', '45'),
5716: ('3x220/127', '15'),
5750: ('1x230', '25'),
5976: ('3x230/133', '15'),
5985: ('1x133', '45'),
6350: ('1x127', '50'),
6582: ('3x380/220', '10'),
6600: ('1x220', '30'),
6650: ('1x133', '50'),
6900: ('1x230', '30'),
6928: ('3x400/230', '10'),
7621: ('3x220/127', '20'),
7700: ('1x220', '35'),
7967: ('3x230/133', '20'),
8001: ('1x127', '63'),
8050: ('1x230', '35'),
8379: ('1x133', '63'),
8800: ('1x220', '40'),
9200: ('1x230', '40'),
9526: ('3x220/127', '25'),
9873: ('3x380/220', '15'),
9900: ('1x220', '45'),
9959: ('3x230/133', '25'),
10350: ('1x230', '45'),
10392: ('3x400/230', '15'),
11000: ('1x220', '50'),
11432: ('3x220/127', '30'),
11500: ('1x230', '50'),
11951: ('3x230/133', '30'),
13164: ('3x380/220', '20'),
13337: ('3x220/127', '35'),
13856: ('3x400/230', '20'),
13860: ('1x220', '63'),
13943: ('3x230/133', '35'),
14490: ('1x230', '63'),
15242: ('3x220/127', '40'),
15935: ('3x230/133', '40'),
16454: ('3x380/220', '25'),
17147: ('3x220/127', '45'),
17321: ('3x400/230', '25'),
17927: ('3x230/133', '45'),
19053: ('3x220/127', '50'),
19745: ('3x380/220', '30'),
19919: ('3x230/133', '50'),
20785: ('3x400/230', '30'),
23036: ('3x380/220', '35'),
24006: ('3x220/127', '63'),
24249: ('3x400/230', '35'),
25097: ('3x230/133', '63'),
26327: ('3x380/220', '40'),
27713: ('3x400/230', '40'),
29618: ('3x380/220', '45'),
31177: ('3x400/230', '45'),
32909: ('3x380/220', '50'),
34641: ('3x400/230', '50'),
41465: ('3x380/220', '63'),
43648: ('3x400/230', '63')
}
NOT_NORMALIZED_100 = dict([(p, (None, None)) for p in range(100, 15001, 100)
if p not in NORMALIZED_POWERS])
ALL_POWERS = NOT_NORMALIZED_100.copy()
ALL_POWERS.update(NORMALIZED_POWERS)
class NormalizedPower(object):
def get_volt_int(self, pot):
volt_int = ALL_POWERS.get(pot, None)
if volt_int is None:
raise ValueError('The given power is not normalized')
return volt_int
def is_normalized(self, pot):
return pot in ALL_POWERS
def get_norm_powers(self, pot_min, pot_max):
for norm_pow in sorted(ALL_POWERS):
if pot_min < norm_pow <= pot_max:
yield norm_pow
elif norm_pow > pot_max:
break
| normalized_powers = {191: ('1x127', '1.5'), 200: ('1x133', '1.5'), 330: ('1x220', '1.5'), 345: ('1x230', '1.5'), 381: ('1x127', '3'), 399: ('1x133', '3'), 445: ('1x127', '3.5'), 466: ('1x133', '3.5'), 572: ('3x220/127', '1.5'), 598: ('3x230/133', '1.5'), 635: ('1x127', '5'), 660: ('1x220', '3'), 665: ('1x133', '5'), 690: ('1x230', '3'), 770: ('1x220', '3.5'), 805: ('1x230', '3.5'), 953: ('1x127', '7.5'), 987: ('3x380/220', '1.5'), 998: ('1x133', '7.5'), 1039: ('3x400/230', '1.5'), 1100: ('1x220', '5'), 1143: ('3x220/127', '3'), 1150: ('1x230', '5'), 1195: ('3x230/133', '3'), 1270: ('1x127', '10'), 1330: ('1x133', '10'), 1334: ('3x220/127', '3.5'), 1394: ('3x230/133', '3.5'), 1650: ('1x220', '7.5'), 1725: ('1x230', '7.5'), 1905: ('1x127', '15'), 1975: ('3x380/220', '3'), 1992: ('3x230/133', '5'), 1995: ('1x133', '15'), 2078: ('3x400/230', '3'), 2200: ('1x220', '10'), 2300: ('1x230', '10'), 2304: ('3x380/220', '3.5'), 2425: ('3x400/230', '3.5'), 2540: ('1x127', '20'), 2660: ('1x133', '20'), 2858: ('3x220/127', '7.5'), 2988: ('3x230/133', '7.5'), 3175: ('1x127', '25'), 3291: ('3x380/220', '5'), 3300: ('1x220', '15'), 3325: ('1x133', '25'), 3450: ('1x230', '15'), 3464: ('3x400/230', '5'), 3810: ('1x127', '30'), 3811: ('3x220/127', '10'), 3984: ('3x230/133', '10'), 3990: ('1x133', '30'), 4400: ('1x220', '20'), 4445: ('1x127', '35'), 4600: ('1x230', '20'), 4655: ('1x133', '35'), 4936: ('3x380/220', '7.5'), 5080: ('1x127', '40'), 5196: ('3x400/230', '7.5'), 5320: ('1x133', '40'), 5500: ('1x220', '25'), 5715: ('1x127', '45'), 5716: ('3x220/127', '15'), 5750: ('1x230', '25'), 5976: ('3x230/133', '15'), 5985: ('1x133', '45'), 6350: ('1x127', '50'), 6582: ('3x380/220', '10'), 6600: ('1x220', '30'), 6650: ('1x133', '50'), 6900: ('1x230', '30'), 6928: ('3x400/230', '10'), 7621: ('3x220/127', '20'), 7700: ('1x220', '35'), 7967: ('3x230/133', '20'), 8001: ('1x127', '63'), 8050: ('1x230', '35'), 8379: ('1x133', '63'), 8800: ('1x220', '40'), 9200: ('1x230', '40'), 9526: ('3x220/127', '25'), 9873: ('3x380/220', '15'), 9900: ('1x220', '45'), 9959: ('3x230/133', '25'), 10350: ('1x230', '45'), 10392: ('3x400/230', '15'), 11000: ('1x220', '50'), 11432: ('3x220/127', '30'), 11500: ('1x230', '50'), 11951: ('3x230/133', '30'), 13164: ('3x380/220', '20'), 13337: ('3x220/127', '35'), 13856: ('3x400/230', '20'), 13860: ('1x220', '63'), 13943: ('3x230/133', '35'), 14490: ('1x230', '63'), 15242: ('3x220/127', '40'), 15935: ('3x230/133', '40'), 16454: ('3x380/220', '25'), 17147: ('3x220/127', '45'), 17321: ('3x400/230', '25'), 17927: ('3x230/133', '45'), 19053: ('3x220/127', '50'), 19745: ('3x380/220', '30'), 19919: ('3x230/133', '50'), 20785: ('3x400/230', '30'), 23036: ('3x380/220', '35'), 24006: ('3x220/127', '63'), 24249: ('3x400/230', '35'), 25097: ('3x230/133', '63'), 26327: ('3x380/220', '40'), 27713: ('3x400/230', '40'), 29618: ('3x380/220', '45'), 31177: ('3x400/230', '45'), 32909: ('3x380/220', '50'), 34641: ('3x400/230', '50'), 41465: ('3x380/220', '63'), 43648: ('3x400/230', '63')}
not_normalized_100 = dict([(p, (None, None)) for p in range(100, 15001, 100) if p not in NORMALIZED_POWERS])
all_powers = NOT_NORMALIZED_100.copy()
ALL_POWERS.update(NORMALIZED_POWERS)
class Normalizedpower(object):
def get_volt_int(self, pot):
volt_int = ALL_POWERS.get(pot, None)
if volt_int is None:
raise value_error('The given power is not normalized')
return volt_int
def is_normalized(self, pot):
return pot in ALL_POWERS
def get_norm_powers(self, pot_min, pot_max):
for norm_pow in sorted(ALL_POWERS):
if pot_min < norm_pow <= pot_max:
yield norm_pow
elif norm_pow > pot_max:
break |
dict_camera = {'wfpc1': 1, 'wfpc1_planetary': 2, 'wfpc1_foc_f48': 3, 'wfpc1_foc_f48': 4, 'wfpc2': 5, 'wfpc2_planetary': 6, 'wfpc2_foc_f48': 7, 'wfpc2_foc_f48': 8, 'nicmos1_precryo': 9, 'nicmos2_precryo': 10, 'nicmos3_precryo': 11, 'stis_ccd': 12, 'stis_nuv': 13, 'stis_fuv': 14, 'acs_widefield': 15, 'acs_highres': 16, 'acs_coronoffspot': 17, 'acs_solarblind': 18, 'nicmos1_cryo': 19, 'nicmos2_cryo': 20, 'nicmos3_cryo': 21, 'wfc3_uvis': 22, 'wfc3_ir': 23,}
dict_spectrum_form = {'stellar': 1, 'blackbody': 2, 'powerlaw_nu': 3, 'powerlaw_lam': 4, 'user': 5}
dict_spectrum_stellar = {'o5': 1, 'o8f': 2, 'o6': 3, 'b1v': 4, 'b3v': 5, 'b6v': 6,
'a0v': 7, 'a5v': 8, 'f6v': 9, 'f8v': 10, 'g2v': 11, 'g5v': 12, 'g8v': 13,
'k4v': 14, 'k7v': 15, 'm1.5v': 16, 'm3v': 17}
| dict_camera = {'wfpc1': 1, 'wfpc1_planetary': 2, 'wfpc1_foc_f48': 3, 'wfpc1_foc_f48': 4, 'wfpc2': 5, 'wfpc2_planetary': 6, 'wfpc2_foc_f48': 7, 'wfpc2_foc_f48': 8, 'nicmos1_precryo': 9, 'nicmos2_precryo': 10, 'nicmos3_precryo': 11, 'stis_ccd': 12, 'stis_nuv': 13, 'stis_fuv': 14, 'acs_widefield': 15, 'acs_highres': 16, 'acs_coronoffspot': 17, 'acs_solarblind': 18, 'nicmos1_cryo': 19, 'nicmos2_cryo': 20, 'nicmos3_cryo': 21, 'wfc3_uvis': 22, 'wfc3_ir': 23}
dict_spectrum_form = {'stellar': 1, 'blackbody': 2, 'powerlaw_nu': 3, 'powerlaw_lam': 4, 'user': 5}
dict_spectrum_stellar = {'o5': 1, 'o8f': 2, 'o6': 3, 'b1v': 4, 'b3v': 5, 'b6v': 6, 'a0v': 7, 'a5v': 8, 'f6v': 9, 'f8v': 10, 'g2v': 11, 'g5v': 12, 'g8v': 13, 'k4v': 14, 'k7v': 15, 'm1.5v': 16, 'm3v': 17} |
def log_error(error):
'''
This logging function just print a formated error message
'''
print(error)
| def log_error(error):
"""
This logging function just print a formated error message
"""
print(error) |
class Depvar:
"""The Depvar object specifies solution-dependent state variables.
Notes
-----
This object can be accessed by:
.. code-block:: python
import material
mdb.models[name].materials[name].depvar
import odbMaterial
session.odbs[name].materials[name].depvar
The corresponding analysis keywords are:
- DEPVAR
"""
def __init__(self, deleteVar: int = 0, n: int = 0):
"""This method creates a Depvar object.
Notes
-----
This function can be accessed by:
.. code-block:: python
mdb.models[name].materials[name].Depvar
session.odbs[name].materials[name].Depvar
Parameters
----------
deleteVar
An Int specifying the state variable number controlling the element deletion flag. The
default value is 0.This argument applies only to Abaqus/Explicit analyses.
n
An Int specifying the number of solution-dependent state variables required at each
integration point. The default value is 0.
Returns
-------
A Depvar object.
Raises
------
RangeError
"""
pass
def setValues(self):
"""This method modifies the Depvar object.
Raises
------
RangeError
"""
pass
| class Depvar:
"""The Depvar object specifies solution-dependent state variables.
Notes
-----
This object can be accessed by:
.. code-block:: python
import material
mdb.models[name].materials[name].depvar
import odbMaterial
session.odbs[name].materials[name].depvar
The corresponding analysis keywords are:
- DEPVAR
"""
def __init__(self, deleteVar: int=0, n: int=0):
"""This method creates a Depvar object.
Notes
-----
This function can be accessed by:
.. code-block:: python
mdb.models[name].materials[name].Depvar
session.odbs[name].materials[name].Depvar
Parameters
----------
deleteVar
An Int specifying the state variable number controlling the element deletion flag. The
default value is 0.This argument applies only to Abaqus/Explicit analyses.
n
An Int specifying the number of solution-dependent state variables required at each
integration point. The default value is 0.
Returns
-------
A Depvar object.
Raises
------
RangeError
"""
pass
def set_values(self):
"""This method modifies the Depvar object.
Raises
------
RangeError
"""
pass |
host = 'Your SMTP server host here'
port = 25 # Your SMTP server port here
username = 'Your SMTP server username here'
password = 'Your SMTP server password here'
encryption = 'required' # Your SMTP server security policy here. Must be one of 'required', 'optional', or 'ssl'
__all__ = ['host', 'port', 'username', 'password', 'encryption']
| host = 'Your SMTP server host here'
port = 25
username = 'Your SMTP server username here'
password = 'Your SMTP server password here'
encryption = 'required'
__all__ = ['host', 'port', 'username', 'password', 'encryption'] |
class Solution:
# @param {string} a a number
# @param {string} b a number
# @return {string} the result
def addBinary(self, a, b):
# Write your code here
alen, blen = len(a), len(b)
if alen > blen:
b = '0' * (alen - blen) + b
nlen = alen
else:
a = '0' * (blen - alen) + a
nlen = blen
res, c = '', 0
for i in range(nlen - 1, -1, -1):
at, bt = int(a[i]), int(b[i])
if at + bt + c > 1:
res = str(at + bt + c - 2) + res
c = 1
else:
res = str(at + bt + c) + res
c = 0
if c == 1:
res = '1' + res
return res
| class Solution:
def add_binary(self, a, b):
(alen, blen) = (len(a), len(b))
if alen > blen:
b = '0' * (alen - blen) + b
nlen = alen
else:
a = '0' * (blen - alen) + a
nlen = blen
(res, c) = ('', 0)
for i in range(nlen - 1, -1, -1):
(at, bt) = (int(a[i]), int(b[i]))
if at + bt + c > 1:
res = str(at + bt + c - 2) + res
c = 1
else:
res = str(at + bt + c) + res
c = 0
if c == 1:
res = '1' + res
return res |
palettes = {
"material_design": {
"red_500": 0xF44336,
"pink_500": 0xE91E63,
"purple_500": 0x9C27B0,
"deep_purple_500": 0x673AB7,
"indigo_500": 0x3F51B5,
"blue_500": 0x2196F3,
"light_blue_500": 0x03A9F4,
"cyan_500": 0x00BCD4,
"teal_500": 0x009688,
"green_500": 0x4CAF50,
"light_green_500": 0x8BC34A,
"lime_500": 0xCDDC39,
"yellow_500": 0xFFEB3B,
"amber_500": 0xFFC107,
"orange_500": 0xFF9800,
"deep_orange_500": 0xFF5722,
"brown_500": 0x795548,
}
}
| palettes = {'material_design': {'red_500': 16007990, 'pink_500': 15277667, 'purple_500': 10233776, 'deep_purple_500': 6765239, 'indigo_500': 4149685, 'blue_500': 2201331, 'light_blue_500': 240116, 'cyan_500': 48340, 'teal_500': 38536, 'green_500': 5025616, 'light_green_500': 9159498, 'lime_500': 13491257, 'yellow_500': 16771899, 'amber_500': 16761095, 'orange_500': 16750592, 'deep_orange_500': 16733986, 'brown_500': 7951688}} |
def main(request, response):
try:
name = "recon_fail_" + request.GET.first("id")
headers = [("Content-Type", "text/event-stream")]
cookie = request.cookies.first(name, None)
state = cookie.value if cookie is not None else None
if state == 'opened':
status = (200, "RECONNECT")
response.set_cookie(name, "reconnected")
body = "data: reconnected\n\n"
elif state == 'reconnected':
status = (204, "NO CONTENT (CLOSE)")
response.delete_cookie(name)
body = "data: closed\n\n" # Will never get through
else:
status = (200, "OPEN")
response.set_cookie(name, "opened")
body = "retry: 2\ndata: opened\n\n"
return status, headers, body
except Exception as ex:
return "error"
| def main(request, response):
try:
name = 'recon_fail_' + request.GET.first('id')
headers = [('Content-Type', 'text/event-stream')]
cookie = request.cookies.first(name, None)
state = cookie.value if cookie is not None else None
if state == 'opened':
status = (200, 'RECONNECT')
response.set_cookie(name, 'reconnected')
body = 'data: reconnected\n\n'
elif state == 'reconnected':
status = (204, 'NO CONTENT (CLOSE)')
response.delete_cookie(name)
body = 'data: closed\n\n'
else:
status = (200, 'OPEN')
response.set_cookie(name, 'opened')
body = 'retry: 2\ndata: opened\n\n'
return (status, headers, body)
except Exception as ex:
return 'error' |
#! python3
# __author__ = "YangJiaHao"
# date: 2018/2/14
class Solution:
def findSubstring(self, s, words):
"""
:type s: str
:type words: List[str]
:rtype: List[int]
"""
if words == []:
return []
word_length = len(words[0])
words_length = word_length * len(words)
dic = {}
for word in words:
dic[word] = dic[word] + 1 if word in dic else 1
res = []
def find(s, l, r, curr):
if l >= r:
return True
word = s[l:l + word_length]
if word in dic:
curr[word] = curr[word] + 1 if word in curr else 1
if curr[word] > dic[word]:
return False
else:
return find(s, l + word_length, r, curr)
else:
return False
for i in range(len(s) - words_length + 1):
if find(s, i, i + words_length, {}):
res.append(i)
return res
class Solution2:
def findSubstring(self, s, words):
"""
:type s: str
:type words: List[str]
:rtype: List[int]
"""
if words == []:
return []
word_length = len(words[0])
words_length = word_length * len(words)
if len(s) < words_length:
return []
res = []
def find(s, words):
if len(words) == 0:
return True
if s[:word_length] in words:
words.remove(s[:word_length])
return find(s[word_length:], words)
else:
return False
for i in range(len(s) - words_length + 1):
if find(s[i:i + words_length], words[:]):
res.append(i)
return res
if __name__ == '__main__':
s = "aabbaacbbaacdyangacbabc"
words = ['aa', 'bb']
so = Solution()
res = so.findSubstring(s, words)
print(res)
| class Solution:
def find_substring(self, s, words):
"""
:type s: str
:type words: List[str]
:rtype: List[int]
"""
if words == []:
return []
word_length = len(words[0])
words_length = word_length * len(words)
dic = {}
for word in words:
dic[word] = dic[word] + 1 if word in dic else 1
res = []
def find(s, l, r, curr):
if l >= r:
return True
word = s[l:l + word_length]
if word in dic:
curr[word] = curr[word] + 1 if word in curr else 1
if curr[word] > dic[word]:
return False
else:
return find(s, l + word_length, r, curr)
else:
return False
for i in range(len(s) - words_length + 1):
if find(s, i, i + words_length, {}):
res.append(i)
return res
class Solution2:
def find_substring(self, s, words):
"""
:type s: str
:type words: List[str]
:rtype: List[int]
"""
if words == []:
return []
word_length = len(words[0])
words_length = word_length * len(words)
if len(s) < words_length:
return []
res = []
def find(s, words):
if len(words) == 0:
return True
if s[:word_length] in words:
words.remove(s[:word_length])
return find(s[word_length:], words)
else:
return False
for i in range(len(s) - words_length + 1):
if find(s[i:i + words_length], words[:]):
res.append(i)
return res
if __name__ == '__main__':
s = 'aabbaacbbaacdyangacbabc'
words = ['aa', 'bb']
so = solution()
res = so.findSubstring(s, words)
print(res) |
# Python - 3.6.0
paradise = God()
test.assert_equals(isinstance(paradise[0], Man), True, 'First object are a man')
| paradise = god()
test.assert_equals(isinstance(paradise[0], Man), True, 'First object are a man') |
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
# pylint: disable=unused-argument
# EXAMPLE: /RedisEnterprise/put/RedisEnterpriseCreate
# NOTE: Functions will always first be looked up in manual/custom.py followed by generated/custom.py
def step_create(test, checks=None, cache_num=1):
if checks is None:
checks = []
if test.kwargs.get('no_database'):
test.cmd('az redisenterprise create '
'--cluster-name "{cluster}" '
'--sku "EnterpriseFlash_F300" '
'--tags tag1="value1" '
'--no-database '
'--resource-group "{rg}"',
checks=checks)
elif test.kwargs.get('geo-replication'):
if cache_num == 1:
test.cmd('az redisenterprise create '
'--cluster-name "{cluster31}" '
'--sku "EnterpriseFlash_F300" '
'--tags tag1="value1" '
'--no-database '
'--resource-group "{rg31}"',
checks=checks)
elif cache_num == 2:
test.cmd('az redisenterprise create '
'--location "West US" '
'--cluster-name "{cluster32}" '
'--sku "EnterpriseFlash_F300" '
'--client-protocol "Encrypted" '
'--clustering-policy "EnterpriseCluster" '
'--eviction-policy "NoEviction" '
'--group-nickname "groupName" '
'--linked-databases id="/subscriptions/{subscription}/resourceGroups/{rg31}/providers/Microsoft.Cache/redisEnterprise/{cluster31}/databases/{database}" '
'--linked-databases id="/subscriptions/{subscription}/resourceGroups/{rg32}/providers/Microsoft.Cache/redisEnterprise/{cluster32}/databases/{database}" '
'--port 10000 '
'--resource-group "{rg32}"',
checks=checks)
else:
test.cmd('az redisenterprise create '
'--cluster-name "{cluster}" '
'--sku "Enterprise_E20" '
'--capacity 4 '
'--tags tag1="value1" '
'--zones "1" "2" "3" '
'--minimum-tls-version "1.2" '
'--client-protocol "Encrypted" '
'--clustering-policy "EnterpriseCluster" '
'--eviction-policy "NoEviction" '
'--modules name="RedisBloom" '
'--modules name="RedisTimeSeries" '
'--modules name="RediSearch" '
'--port 10000 '
'--resource-group "{rg}"',
checks=checks)
# EXAMPLE: /Databases/post/RedisEnterpriseDatabasesForceUnlink - unlinking a database during a regional outage
def step_database_force_unlink(test, checks=None):
if checks is None:
checks = []
test.cmd('az redisenterprise database force-unlink '
'--cluster-name "{cluster32}" '
'--unlink-ids "/subscriptions/{subscription}/resourceGroups/{rg31}/providers/Microsoft.Cache/redisEnterprise/{'
'myRedisEnterprise2}/databases/{database}" '
'--resource-group "{rg32}"',
checks=checks)
# EXAMPLE: /RedisEnterprise/get/RedisEnterpriseGet
def step_show(test, checks=None):
if checks is None:
checks = []
if test.kwargs.get('geo-replication'):
test.cmd('az redisenterprise show '
'--cluster-name "{cluster32}" '
'--resource-group "{rg32}"',
checks=checks)
else:
test.cmd('az redisenterprise show '
'--cluster-name "{cluster}" '
'--resource-group "{rg}"',
checks=checks)
# EXAMPLE: /RedisEnterprise/delete/RedisEnterpriseDelete
def step_delete(test, checks=None):
if checks is None:
checks = []
if test.kwargs.get('geo-replication'):
test.cmd('az redisenterprise delete -y '
'--cluster-name "{cluster31}" '
'--resource-group "{rg31}"',
checks=checks)
test.cmd('az redisenterprise delete -y '
'--cluster-name "{cluster32}" '
'--resource-group "{rg32}"',
checks=checks)
else:
test.cmd('az redisenterprise delete -y '
'--cluster-name "{cluster}" '
'--resource-group "{rg}"',
checks=checks)
# EXAMPLE: /Databases/put/RedisEnterpriseDatabasesCreate
def step_database_create(test, checks=None):
if checks is None:
checks = []
if test.kwargs.get('geo-replication'):
test.cmd('az redisenterprise database create '
'--cluster-name "{cluster31}" '
'--client-protocol "Encrypted" '
'--clustering-policy "EnterpriseCluster" '
'--eviction-policy "NoEviction" '
'--group-nickname "groupName" '
'--linked-databases id="/subscriptions/{subscription}/resourceGroups/{rg31}/providers/Microsoft.Cache/redisEnterprise/{cluster31}/databases/{database}" '
'--port 10000 '
'--resource-group "{rg31}"',
checks=checks)
else:
test.cmd('az redisenterprise database create '
'--cluster-name "{cluster}" '
'--client-protocol "Plaintext" '
'--clustering-policy "OSSCluster" '
'--eviction-policy "AllKeysLRU" '
'--port 10000 '
'--resource-group "{rg}"',
checks=checks)
def step_database_force_unlink(test, checks=None):
if checks is None:
checks = []
test.cmd('az redisenterprise database force-unlink '
'--cluster-name "{cluster32}" '
'--unlink-ids "/subscriptions/{subscription}/resourceGroups/{rg31}/providers/Microsoft.Cache/redisEnterprise/{'
'cluster31}/databases/{database}" '
'--resource-group "{rg32}"',
checks=checks)
# EXAMPLE: /Databases/get/RedisEnterpriseDatabasesGet
def step_database_show(test, checks=None):
if checks is None:
checks = []
test.cmd('az redisenterprise database show '
'--cluster-name "{cluster}" '
'--resource-group "{rg}"',
checks=checks)
# EXAMPLE: /Databases/get/RedisEnterpriseDatabasesListByCluster
def step_database_list(test, checks=None):
if checks is None:
checks = []
test.cmd('az redisenterprise database list '
'--cluster-name "{cluster}" '
'--resource-group "{rg}"',
checks=checks)
# EXAMPLE: /Databases/post/RedisEnterpriseDatabasesListKeys
def step_database_list_keys(test, checks=None):
if checks is None:
checks = []
test.cmd('az redisenterprise database list-keys '
'--cluster-name "{cluster}" '
'--resource-group "{rg}"',
checks=checks)
# EXAMPLE: /Databases/post/RedisEnterpriseDatabasesRegenerateKey
def step_database_regenerate_key(test, checks=None):
if checks is None:
checks = []
test.cmd('az redisenterprise database regenerate-key '
'--cluster-name "{cluster}" '
'--key-type "Primary" '
'--resource-group "{rg}"',
checks=checks)
# EXAMPLE: /Databases/delete/RedisEnterpriseDatabasesDelete
def step_database_delete(test, checks=None):
if checks is None:
checks = []
if test.kwargs.get('geo-replication'):
test.cmd('az redisenterprise database delete -y '
'--cluster-name "{cluster31}" '
'--resource-group "{rg31}"',
checks=checks)
test.cmd('az redisenterprise database delete -y '
'--cluster-name "{cluster32}" '
'--resource-group "{rg32}"',
checks=checks)
else:
test.cmd('az redisenterprise database delete -y '
'--cluster-name "{cluster}" '
'--resource-group "{rg}"',
checks=checks)
| def step_create(test, checks=None, cache_num=1):
if checks is None:
checks = []
if test.kwargs.get('no_database'):
test.cmd('az redisenterprise create --cluster-name "{cluster}" --sku "EnterpriseFlash_F300" --tags tag1="value1" --no-database --resource-group "{rg}"', checks=checks)
elif test.kwargs.get('geo-replication'):
if cache_num == 1:
test.cmd('az redisenterprise create --cluster-name "{cluster31}" --sku "EnterpriseFlash_F300" --tags tag1="value1" --no-database --resource-group "{rg31}"', checks=checks)
elif cache_num == 2:
test.cmd('az redisenterprise create --location "West US" --cluster-name "{cluster32}" --sku "EnterpriseFlash_F300" --client-protocol "Encrypted" --clustering-policy "EnterpriseCluster" --eviction-policy "NoEviction" --group-nickname "groupName" --linked-databases id="/subscriptions/{subscription}/resourceGroups/{rg31}/providers/Microsoft.Cache/redisEnterprise/{cluster31}/databases/{database}" --linked-databases id="/subscriptions/{subscription}/resourceGroups/{rg32}/providers/Microsoft.Cache/redisEnterprise/{cluster32}/databases/{database}" --port 10000 --resource-group "{rg32}"', checks=checks)
else:
test.cmd('az redisenterprise create --cluster-name "{cluster}" --sku "Enterprise_E20" --capacity 4 --tags tag1="value1" --zones "1" "2" "3" --minimum-tls-version "1.2" --client-protocol "Encrypted" --clustering-policy "EnterpriseCluster" --eviction-policy "NoEviction" --modules name="RedisBloom" --modules name="RedisTimeSeries" --modules name="RediSearch" --port 10000 --resource-group "{rg}"', checks=checks)
def step_database_force_unlink(test, checks=None):
if checks is None:
checks = []
test.cmd('az redisenterprise database force-unlink --cluster-name "{cluster32}" --unlink-ids "/subscriptions/{subscription}/resourceGroups/{rg31}/providers/Microsoft.Cache/redisEnterprise/{myRedisEnterprise2}/databases/{database}" --resource-group "{rg32}"', checks=checks)
def step_show(test, checks=None):
if checks is None:
checks = []
if test.kwargs.get('geo-replication'):
test.cmd('az redisenterprise show --cluster-name "{cluster32}" --resource-group "{rg32}"', checks=checks)
else:
test.cmd('az redisenterprise show --cluster-name "{cluster}" --resource-group "{rg}"', checks=checks)
def step_delete(test, checks=None):
if checks is None:
checks = []
if test.kwargs.get('geo-replication'):
test.cmd('az redisenterprise delete -y --cluster-name "{cluster31}" --resource-group "{rg31}"', checks=checks)
test.cmd('az redisenterprise delete -y --cluster-name "{cluster32}" --resource-group "{rg32}"', checks=checks)
else:
test.cmd('az redisenterprise delete -y --cluster-name "{cluster}" --resource-group "{rg}"', checks=checks)
def step_database_create(test, checks=None):
if checks is None:
checks = []
if test.kwargs.get('geo-replication'):
test.cmd('az redisenterprise database create --cluster-name "{cluster31}" --client-protocol "Encrypted" --clustering-policy "EnterpriseCluster" --eviction-policy "NoEviction" --group-nickname "groupName" --linked-databases id="/subscriptions/{subscription}/resourceGroups/{rg31}/providers/Microsoft.Cache/redisEnterprise/{cluster31}/databases/{database}" --port 10000 --resource-group "{rg31}"', checks=checks)
else:
test.cmd('az redisenterprise database create --cluster-name "{cluster}" --client-protocol "Plaintext" --clustering-policy "OSSCluster" --eviction-policy "AllKeysLRU" --port 10000 --resource-group "{rg}"', checks=checks)
def step_database_force_unlink(test, checks=None):
if checks is None:
checks = []
test.cmd('az redisenterprise database force-unlink --cluster-name "{cluster32}" --unlink-ids "/subscriptions/{subscription}/resourceGroups/{rg31}/providers/Microsoft.Cache/redisEnterprise/{cluster31}/databases/{database}" --resource-group "{rg32}"', checks=checks)
def step_database_show(test, checks=None):
if checks is None:
checks = []
test.cmd('az redisenterprise database show --cluster-name "{cluster}" --resource-group "{rg}"', checks=checks)
def step_database_list(test, checks=None):
if checks is None:
checks = []
test.cmd('az redisenterprise database list --cluster-name "{cluster}" --resource-group "{rg}"', checks=checks)
def step_database_list_keys(test, checks=None):
if checks is None:
checks = []
test.cmd('az redisenterprise database list-keys --cluster-name "{cluster}" --resource-group "{rg}"', checks=checks)
def step_database_regenerate_key(test, checks=None):
if checks is None:
checks = []
test.cmd('az redisenterprise database regenerate-key --cluster-name "{cluster}" --key-type "Primary" --resource-group "{rg}"', checks=checks)
def step_database_delete(test, checks=None):
if checks is None:
checks = []
if test.kwargs.get('geo-replication'):
test.cmd('az redisenterprise database delete -y --cluster-name "{cluster31}" --resource-group "{rg31}"', checks=checks)
test.cmd('az redisenterprise database delete -y --cluster-name "{cluster32}" --resource-group "{rg32}"', checks=checks)
else:
test.cmd('az redisenterprise database delete -y --cluster-name "{cluster}" --resource-group "{rg}"', checks=checks) |
#input
# 15
# 2 4 3 6 7 9 1 5 8
# 9 3 7 8 6 1 5 2 4
# 1 4 9 5 6 3 2 8 7
# 1 6 8 3 4 2 9 7 5
# 7 4 6 5 1 9 3 8 2
# 8 1 7 5 6 3 9 2 4
# 2 4 3 9 7 8 5 1 6
# 6 3 1 9 2 7 4 5 8
# 1 4 7 6 8 9 5 3 2
# 7 9 1 8 5 6 3 2 4
# 1 3 9 6 8 2 5 7 4
# 8 5 4 6 3 7 2 1 9
# 7 2 4 5 8 1 9 3 6
# 5 2 6 1 8 4 9 3 7
# 4 8 5 3 2 6 1 7 9
class Game:
board = []
def initialize(self):
self.board = []
for i in range(0, 3):
self.board.append([None, None, None])
def mark(self, pos, p):
self.board[pos // 3][pos % 3] = p
def is_game_over(self):
##check rows
for i in range(0, 3):
if (self.board[i][0] == None):
continue
if (self.board[i][0] == self.board[i][1]
and self.board[i][1] == self.board[i][2]):
return True
##check columns
for i in range(0, 3):
if (self.board[0][i] == None):
continue
if (self.board[0][i] == self.board[1][i]
and self.board[1][i] == self.board[2][i]):
return True
##check diagonals
if self.board[1][1] == None:
return False
if self.board[0][0] == self.board[1][1] and self.board[1][1] == self.board[2][2]:
return True
if self.board[2][0] == self.board[1][1] and self.board[1][1] == self.board[0][2]:
return True
return False
game = Game()
n = int(input())
for i in range(0, n):
game.initialize()
moves = [int(x) for x in input().split()]
for j in range(0, len(moves)):
player = None
if j % 2 == 0:
player = 'x'
else:
player = 'o'
game.mark(moves[j] - 1, player)
if game.is_game_over():
print(str(j+1), "", end="")
break
if j == len(moves) - 1:
print(0, "", end="") | class Game:
board = []
def initialize(self):
self.board = []
for i in range(0, 3):
self.board.append([None, None, None])
def mark(self, pos, p):
self.board[pos // 3][pos % 3] = p
def is_game_over(self):
for i in range(0, 3):
if self.board[i][0] == None:
continue
if self.board[i][0] == self.board[i][1] and self.board[i][1] == self.board[i][2]:
return True
for i in range(0, 3):
if self.board[0][i] == None:
continue
if self.board[0][i] == self.board[1][i] and self.board[1][i] == self.board[2][i]:
return True
if self.board[1][1] == None:
return False
if self.board[0][0] == self.board[1][1] and self.board[1][1] == self.board[2][2]:
return True
if self.board[2][0] == self.board[1][1] and self.board[1][1] == self.board[0][2]:
return True
return False
game = game()
n = int(input())
for i in range(0, n):
game.initialize()
moves = [int(x) for x in input().split()]
for j in range(0, len(moves)):
player = None
if j % 2 == 0:
player = 'x'
else:
player = 'o'
game.mark(moves[j] - 1, player)
if game.is_game_over():
print(str(j + 1), '', end='')
break
if j == len(moves) - 1:
print(0, '', end='') |
#!/usr/bin/env python3
# Write a program that prints the reverse-complement of a DNA sequence
# You must use a loop and conditional
dna = 'ACTGAAAAAAAAAAA'
rvdna = ''
for i in range(len(dna) -1, -1, -1) :
nt = dna[i]
if nt == 'A' : rvdna += 'T'
elif nt == 'T': rvdna += 'A'
elif nt == 'C': rvdna += 'G'
elif nt == 'G': rvdna += 'C'
print(rvdna)
"""
python3 anti.py
TTTTTTTTTTTCAGT
"""
| dna = 'ACTGAAAAAAAAAAA'
rvdna = ''
for i in range(len(dna) - 1, -1, -1):
nt = dna[i]
if nt == 'A':
rvdna += 'T'
elif nt == 'T':
rvdna += 'A'
elif nt == 'C':
rvdna += 'G'
elif nt == 'G':
rvdna += 'C'
print(rvdna)
'\npython3 anti.py\nTTTTTTTTTTTCAGT\n' |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.