content stringlengths 7 1.05M | fixed_cases stringlengths 1 1.28M |
|---|---|
def make_pizza(*toppings):
print(toppings)
# for item in toppings:
# print("Add " + item)
| def make_pizza(*toppings):
print(toppings) |
class Solution:
def numSplits(self, s: str) -> int:
answer=0
left=[0]*26
right=[0]*26
for x in s:
right[ord(x)-ord('a')]+=1
leftNum=0
rightNum=sum(x > 0 for x in right)
for x in s:
index=ord(x)-ord('a')
if left[index]==0:
leftNum+=1
left[index]+=1
right[index]-=1
if right[index]==0:
rightNum-=1
if leftNum==rightNum:
answer+=1
return answer
| class Solution:
def num_splits(self, s: str) -> int:
answer = 0
left = [0] * 26
right = [0] * 26
for x in s:
right[ord(x) - ord('a')] += 1
left_num = 0
right_num = sum((x > 0 for x in right))
for x in s:
index = ord(x) - ord('a')
if left[index] == 0:
left_num += 1
left[index] += 1
right[index] -= 1
if right[index] == 0:
right_num -= 1
if leftNum == rightNum:
answer += 1
return answer |
#!/usr/bin/python3
# -*- mode: python; coding: utf-8 -*-
VERSION=1
TEMPLATE_KEY_PREFIX = 'jflow.template'
SEPARATOR_KEY = '.'
KEY_VERSION = 'version'
KEY_FORK = 'fork'
KEY_UPSTREAM = 'upstream'
KEY_PUBLIC = 'public'
KEY_DEBUG = 'debug'
KEY_REMOTE = 'remote'
KEY_MERGE_TO = 'merge-to'
KEY_EXTRA = 'extra'
# Template-only keys
KEY_PUBLIC_PREFIX = 'public-prefix'
KEY_PUBLIC_SUFFIX = 'public-suffix'
KEY_DEBUG_PREFIX = 'debug-prefix'
KEY_DEBUG_SUFFIX = 'debug-suffix'
KEY_REMOTE_PREFIX = 'remote-prefix'
KEY_REMOTE_SUFFIX = 'remote-suffix'
DEFAULT_DEBUG_PREFIX = 'feature/'
DEFAULT_DEBUG_SUFFIX = '.debug'
STGIT_SUFFIX = '.stgit'
def make_key(*items):
return SEPARATOR_KEY.join(items)
def make_prefix(*items):
return make_key(*items) + SEPARATOR_KEY
def make_suffix(*items):
return SEPARATOR_KEY + make_key(*items)
def branch_key_base(b):
return make_key('branch', b, 'jflow')
def branch_key_version(b):
return make_key(branch_key_base(b), KEY_VERSION)
def branch_key_fork(b):
return make_key(branch_key_base(b), KEY_FORK)
def branch_key_upstream(b):
return make_key(branch_key_base(b), KEY_UPSTREAM)
def branch_key_public(b):
return make_key(branch_key_base(b), KEY_PUBLIC)
def branch_key_debug(b):
return make_key(branch_key_base(b), KEY_DEBUG)
def branch_key_debug_prefix(b):
return make_key(branch_key_base(b), KEY_DEBUG_PREFIX)
def branch_key_debug_suffix(b):
return make_key(branch_key_base(b), KEY_DEBUG_SUFFIX)
def branch_key_remote(b):
return make_key(branch_key_base(b), KEY_REMOTE)
def branch_key_extra(b):
return make_key(branch_key_base(b), KEY_EXTRA)
def branch_key_merge_to(b):
return make_key(branch_key_base(b), KEY_MERGE_TO)
def branch_key_stgit_version(b):
return make_key('branch', b, 'stgit', 'stackformatversion')
def branch_key_description(b):
return make_key('branch', b, 'description')
def branch_stgit_name(b):
return b + STGIT_SUFFIX
def remote_key_url(r):
return make_key('remote', r, 'url')
| version = 1
template_key_prefix = 'jflow.template'
separator_key = '.'
key_version = 'version'
key_fork = 'fork'
key_upstream = 'upstream'
key_public = 'public'
key_debug = 'debug'
key_remote = 'remote'
key_merge_to = 'merge-to'
key_extra = 'extra'
key_public_prefix = 'public-prefix'
key_public_suffix = 'public-suffix'
key_debug_prefix = 'debug-prefix'
key_debug_suffix = 'debug-suffix'
key_remote_prefix = 'remote-prefix'
key_remote_suffix = 'remote-suffix'
default_debug_prefix = 'feature/'
default_debug_suffix = '.debug'
stgit_suffix = '.stgit'
def make_key(*items):
return SEPARATOR_KEY.join(items)
def make_prefix(*items):
return make_key(*items) + SEPARATOR_KEY
def make_suffix(*items):
return SEPARATOR_KEY + make_key(*items)
def branch_key_base(b):
return make_key('branch', b, 'jflow')
def branch_key_version(b):
return make_key(branch_key_base(b), KEY_VERSION)
def branch_key_fork(b):
return make_key(branch_key_base(b), KEY_FORK)
def branch_key_upstream(b):
return make_key(branch_key_base(b), KEY_UPSTREAM)
def branch_key_public(b):
return make_key(branch_key_base(b), KEY_PUBLIC)
def branch_key_debug(b):
return make_key(branch_key_base(b), KEY_DEBUG)
def branch_key_debug_prefix(b):
return make_key(branch_key_base(b), KEY_DEBUG_PREFIX)
def branch_key_debug_suffix(b):
return make_key(branch_key_base(b), KEY_DEBUG_SUFFIX)
def branch_key_remote(b):
return make_key(branch_key_base(b), KEY_REMOTE)
def branch_key_extra(b):
return make_key(branch_key_base(b), KEY_EXTRA)
def branch_key_merge_to(b):
return make_key(branch_key_base(b), KEY_MERGE_TO)
def branch_key_stgit_version(b):
return make_key('branch', b, 'stgit', 'stackformatversion')
def branch_key_description(b):
return make_key('branch', b, 'description')
def branch_stgit_name(b):
return b + STGIT_SUFFIX
def remote_key_url(r):
return make_key('remote', r, 'url') |
while True:
print('Who are you?')
name=input()
if name!='Joe':
continue
print('Hello,Joe.What is the password?(it is a fish)')
password=input()
if password=='swordfish':
break
print('Access granted')
| while True:
print('Who are you?')
name = input()
if name != 'Joe':
continue
print('Hello,Joe.What is the password?(it is a fish)')
password = input()
if password == 'swordfish':
break
print('Access granted') |
def GenerateConfig(context):
resources = []
haEnabled = context.properties['num_instances'] > 1
# Enabling services
services = {
'name': 'enable_services',
'type': 'enable_services.py',
'properties': {
'services': context.properties['services']
}
}
# Networking provisioning
networking = {
'name': 'networking',
'type': 'networking.py',
'properties': {
'ha_enabled': haEnabled,
'region': context.properties['region'],
'mgmt_network': context.properties['mgmt_network'],
'custom_route_tag': context.properties['custom_route_tag'],
# Using email from service_account's output
'service_account': '$(ref.service_accounts.email)',
'service_port': context.properties['service_port'],
'networks': context.properties['networks']
}
}
# Additional networtks
if 'inside_network' in context.properties:
networking['properties']['inside_network'] = context.properties['inside_network']
if 'outside_network' in context.properties:
networking['properties']['outside_network'] = context.properties['outside_network']
if 'dmz1_network' in context.properties:
networking['properties']['dmz1_network'] = context.properties['dmz1_network']
if 'dmz2_network' in context.properties:
networking['properties']['dmz2_network'] = context.properties['dmz2_network']
# Service Account
sa = {
'name': 'service_accounts',
'type': 'service_accounts.py',
'properties': {
'account_id': context.properties['account_id'],
'display_name': context.properties['display_name']
},
'metadata': {
'dependsOn': ['enable_services']
}
}
# Appliance VMs
vm = {
'name': 'vm',
'type': 'vm.py',
'properties': {
'vm_zones': context.properties['vm_zones'],
'num_instances': context.properties['num_instances'],
'cisco_product_version': context.properties['cisco_product_version'],
'vm_machine_type': context.properties['vm_machine_type'],
'vm_instance_labels': context.properties['vm_instance_labels'],
'vm_instance_tags': context.properties['vm_instance_tags'],
'admin_ssh_pub_key': context.properties['admin_ssh_pub_key'],
'day_0_config': context.properties['day_0_config'],
'service_account': '$(ref.service_accounts.email)',
'networks': context.properties['networks']
},
'metadata': {
'dependsOn': ['networking']
}
}
# Prepare all resources to be provisioned
resources += [services, sa, networking, vm]
outputs = [{
'name': 'vm_urls',
'value': '$(ref.vm.instance_urls)'
},{
'name': 'vm_external_ips',
'value': '$(ref.vm.vm_external_ips)'
}]
if haEnabled:
resources.append({
'name': 'load_balancer',
'type': 'load_balancer.py',
'properties': {
'region': context.properties['region'],
'num_instances': context.properties['num_instances'],
'vm_zones': context.properties['vm_zones'],
'named_ports': context.properties['named_ports'],
'service_port': context.properties['service_port'],
'allow_global_access': context.properties['allow_global_access'],
'inside_network': context.properties['inside_network'],
'use_internal_lb': context.properties['use_internal_lb']
# 'instance_urls': '$(ref.vm.instance_urls)'
},
'metadata': {
'dependsOn': ['vm']
}
})
outputs.append({
'name': 'external_lb_ip',
'value': '$(ref.load_balancer.external_lb_ip)'
})
if context.properties['use_internal_lb']:
outputs.append({
'name': 'internal_lb_ip',
'value': '$(ref.load_balancer.internal_lb_ip)'
})
return {'resources': resources, 'outputs': outputs} | def generate_config(context):
resources = []
ha_enabled = context.properties['num_instances'] > 1
services = {'name': 'enable_services', 'type': 'enable_services.py', 'properties': {'services': context.properties['services']}}
networking = {'name': 'networking', 'type': 'networking.py', 'properties': {'ha_enabled': haEnabled, 'region': context.properties['region'], 'mgmt_network': context.properties['mgmt_network'], 'custom_route_tag': context.properties['custom_route_tag'], 'service_account': '$(ref.service_accounts.email)', 'service_port': context.properties['service_port'], 'networks': context.properties['networks']}}
if 'inside_network' in context.properties:
networking['properties']['inside_network'] = context.properties['inside_network']
if 'outside_network' in context.properties:
networking['properties']['outside_network'] = context.properties['outside_network']
if 'dmz1_network' in context.properties:
networking['properties']['dmz1_network'] = context.properties['dmz1_network']
if 'dmz2_network' in context.properties:
networking['properties']['dmz2_network'] = context.properties['dmz2_network']
sa = {'name': 'service_accounts', 'type': 'service_accounts.py', 'properties': {'account_id': context.properties['account_id'], 'display_name': context.properties['display_name']}, 'metadata': {'dependsOn': ['enable_services']}}
vm = {'name': 'vm', 'type': 'vm.py', 'properties': {'vm_zones': context.properties['vm_zones'], 'num_instances': context.properties['num_instances'], 'cisco_product_version': context.properties['cisco_product_version'], 'vm_machine_type': context.properties['vm_machine_type'], 'vm_instance_labels': context.properties['vm_instance_labels'], 'vm_instance_tags': context.properties['vm_instance_tags'], 'admin_ssh_pub_key': context.properties['admin_ssh_pub_key'], 'day_0_config': context.properties['day_0_config'], 'service_account': '$(ref.service_accounts.email)', 'networks': context.properties['networks']}, 'metadata': {'dependsOn': ['networking']}}
resources += [services, sa, networking, vm]
outputs = [{'name': 'vm_urls', 'value': '$(ref.vm.instance_urls)'}, {'name': 'vm_external_ips', 'value': '$(ref.vm.vm_external_ips)'}]
if haEnabled:
resources.append({'name': 'load_balancer', 'type': 'load_balancer.py', 'properties': {'region': context.properties['region'], 'num_instances': context.properties['num_instances'], 'vm_zones': context.properties['vm_zones'], 'named_ports': context.properties['named_ports'], 'service_port': context.properties['service_port'], 'allow_global_access': context.properties['allow_global_access'], 'inside_network': context.properties['inside_network'], 'use_internal_lb': context.properties['use_internal_lb']}, 'metadata': {'dependsOn': ['vm']}})
outputs.append({'name': 'external_lb_ip', 'value': '$(ref.load_balancer.external_lb_ip)'})
if context.properties['use_internal_lb']:
outputs.append({'name': 'internal_lb_ip', 'value': '$(ref.load_balancer.internal_lb_ip)'})
return {'resources': resources, 'outputs': outputs} |
def is_equivalent(str_a, str_b):
if len(str_a) % 2 != 0 or len(str_b) % 2 != 0:
if str_a == str_b:
return True
return False
elif str_a == str_b:
return True
else:
len_a = len(str_a)
len_b = len(str_b)
a1_i, a1_j = 0, (len_a // 2)
a2_i, a2_j = (len_a // 2), (len_a)
b1_i, b1_j = 0, (len_b // 2)
b2_i, b2_j = (len_b // 2), (len_b)
a1 = str_a[a1_i:a1_j]
a2 = str_a[a2_i:a2_j]
b1 = str_b[b1_i:b1_j]
b2 = str_b[b2_i:b2_j]
# print(a2_i)
# print(a2_j)
# print("a1: {} | a2: {}".format(a1, a2))
# print("b1: {} | b2: {}".format(b1, b2))
if is_equivalent(a1, b1) and is_equivalent(a2, b2):
return True
elif is_equivalent(a1, b2) and is_equivalent(a2, b1):
return True
return False
str_a = [el for el in input()]
str_b = [el for el in input()]
if is_equivalent(str_a, str_b):
print("YES")
else:
print("NO") | def is_equivalent(str_a, str_b):
if len(str_a) % 2 != 0 or len(str_b) % 2 != 0:
if str_a == str_b:
return True
return False
elif str_a == str_b:
return True
else:
len_a = len(str_a)
len_b = len(str_b)
(a1_i, a1_j) = (0, len_a // 2)
(a2_i, a2_j) = (len_a // 2, len_a)
(b1_i, b1_j) = (0, len_b // 2)
(b2_i, b2_j) = (len_b // 2, len_b)
a1 = str_a[a1_i:a1_j]
a2 = str_a[a2_i:a2_j]
b1 = str_b[b1_i:b1_j]
b2 = str_b[b2_i:b2_j]
if is_equivalent(a1, b1) and is_equivalent(a2, b2):
return True
elif is_equivalent(a1, b2) and is_equivalent(a2, b1):
return True
return False
str_a = [el for el in input()]
str_b = [el for el in input()]
if is_equivalent(str_a, str_b):
print('YES')
else:
print('NO') |
def main():
# Store the total sum of multiples of 3 and 5
sum_multiples = 0
# Loop through every number less than 1000
for num in range(1,1000):
# Check if number is divisible by 3 or 5, i.e. a multiple
if num % 3 == 0 or num % 5 == 0:
sum_multiples += num
print(sum_multiples)
if __name__ == '__main__':
main() | def main():
sum_multiples = 0
for num in range(1, 1000):
if num % 3 == 0 or num % 5 == 0:
sum_multiples += num
print(sum_multiples)
if __name__ == '__main__':
main() |
# 177
# 10
# print(divmod(177, 10))
user = int(input())
user2 = int(input())
a = divmod(user, user2)
print(a[0])
print(a[1])
# for i in a:
# print(''.join(a[i]))
print(divmod(user,user2))
# print(divmod(user)) | user = int(input())
user2 = int(input())
a = divmod(user, user2)
print(a[0])
print(a[1])
print(divmod(user, user2)) |
{
"targets": [{
"target_name": "node_hge",
"sources": [
"src/entry.cpp"
],
"include_dirs": [
"src/hge181/include",
"<!(node -e \"require('nan')\")"
],
"libraries": [
"../src/hge181/lib/vc/hge.lib",
"../src/hge181/lib/vc/hgehelp.lib"
],
"libraries!": [
"libc.lib"
],
"defines": [
"WIN32_LEAN_AND_MEAN"
],
"VCLinkerTool": {
"IgnoreSpecificDefaultLibraries": [
"libc.lib"
]
},
"copies": [{
"destination": "<(module_root_dir)/build/Release/",
"files": [ "<(module_root_dir)/src/hge181/hge.dll", "<(module_root_dir)/src/hge181/bass.dll" ]
}]
}]
} | {'targets': [{'target_name': 'node_hge', 'sources': ['src/entry.cpp'], 'include_dirs': ['src/hge181/include', '<!(node -e "require(\'nan\')")'], 'libraries': ['../src/hge181/lib/vc/hge.lib', '../src/hge181/lib/vc/hgehelp.lib'], 'libraries!': ['libc.lib'], 'defines': ['WIN32_LEAN_AND_MEAN'], 'VCLinkerTool': {'IgnoreSpecificDefaultLibraries': ['libc.lib']}, 'copies': [{'destination': '<(module_root_dir)/build/Release/', 'files': ['<(module_root_dir)/src/hge181/hge.dll', '<(module_root_dir)/src/hge181/bass.dll']}]}]} |
question_replaceable_special_characters = {',', "'", '"', ';', '?', ':', '-', '(', ')', '[', ']', '{', '}'}
special_characters = ['*', '$']
punctuations = set()
pickled_questions_dir = "bin/data/questions"
pickle_files_extension = ".pickle"
questions_per_segment = 100
debug_print_len = 25 | question_replaceable_special_characters = {',', "'", '"', ';', '?', ':', '-', '(', ')', '[', ']', '{', '}'}
special_characters = ['*', '$']
punctuations = set()
pickled_questions_dir = 'bin/data/questions'
pickle_files_extension = '.pickle'
questions_per_segment = 100
debug_print_len = 25 |
class LoopiaError(Exception):
_exceptions = {}
code = None
message = None
def __init__(self, response=None):
super(LoopiaError, self).__init__(self.message)
self.response = response
@classmethod
def register(cls, exception):
if exception.code in cls._exceptions:
raise ValueError("'{}' already exists".format(exception.code))
cls._exceptions[exception.code] = exception
return exception
@classmethod
def from_code(cls, code, response=None):
if code not in cls._exceptions:
code = None
return cls._exceptions[code](response)
@LoopiaError.register
class UnknownError(LoopiaError):
code = None
message = "Unknown error"
@LoopiaError.register
class AuthError(LoopiaError):
code = "AUTH_ERROR"
message = u"Wrong username or password"
@LoopiaError.register
class DomainOccupiedError(LoopiaError):
code = "DOMAIN_OCCUPIED"
message = u"Domain is not available for registration"
@LoopiaError.register
class RateLimitedError(LoopiaError):
code = "RATE_LIMITED"
message = u"Maximum number of requests over time reached"
@LoopiaError.register
class BadIndataError(LoopiaError):
code = "BAD_INDATA"
message = u"Invalid parameters"
@LoopiaError.register
class InsufficientFundsError(LoopiaError):
code = "INSUFFICIENT_FUNDS"
message = u"Not enough funds to complete the task"
| class Loopiaerror(Exception):
_exceptions = {}
code = None
message = None
def __init__(self, response=None):
super(LoopiaError, self).__init__(self.message)
self.response = response
@classmethod
def register(cls, exception):
if exception.code in cls._exceptions:
raise value_error("'{}' already exists".format(exception.code))
cls._exceptions[exception.code] = exception
return exception
@classmethod
def from_code(cls, code, response=None):
if code not in cls._exceptions:
code = None
return cls._exceptions[code](response)
@LoopiaError.register
class Unknownerror(LoopiaError):
code = None
message = 'Unknown error'
@LoopiaError.register
class Autherror(LoopiaError):
code = 'AUTH_ERROR'
message = u'Wrong username or password'
@LoopiaError.register
class Domainoccupiederror(LoopiaError):
code = 'DOMAIN_OCCUPIED'
message = u'Domain is not available for registration'
@LoopiaError.register
class Ratelimitederror(LoopiaError):
code = 'RATE_LIMITED'
message = u'Maximum number of requests over time reached'
@LoopiaError.register
class Badindataerror(LoopiaError):
code = 'BAD_INDATA'
message = u'Invalid parameters'
@LoopiaError.register
class Insufficientfundserror(LoopiaError):
code = 'INSUFFICIENT_FUNDS'
message = u'Not enough funds to complete the task' |
def paperwork(n, m):
if n < 0 or m < 0 : return 0
else: return n * m
print(paperwork(5,0)) | def paperwork(n, m):
if n < 0 or m < 0:
return 0
else:
return n * m
print(paperwork(5, 0)) |
def clocks(x, y, a, b, x2, y2):
a = a - x
b = b - y
if b < 0:
b = 60 + b
a = a - 1
if a < 0:
a = 24 + a
a2 = x2 + a
b2 = y2 + b
if b2 >= 60:
b2 = b2 - 60
a2 = a2 + 1
if a2 >= 24:
a2 = a2 - 24
print(a2, b2)
clocks(int(input()), int(input()), int(input()), int(input()), int(input()), int(input())) | def clocks(x, y, a, b, x2, y2):
a = a - x
b = b - y
if b < 0:
b = 60 + b
a = a - 1
if a < 0:
a = 24 + a
a2 = x2 + a
b2 = y2 + b
if b2 >= 60:
b2 = b2 - 60
a2 = a2 + 1
if a2 >= 24:
a2 = a2 - 24
print(a2, b2)
clocks(int(input()), int(input()), int(input()), int(input()), int(input()), int(input())) |
n=6
a,b=0,0
arr=[1,2,4,4,5,6]
for i in range(int(n-1)):
if arr[i-1]>=arr[i]<=arr[i+1]:
a=a+1
if arr[i-1]<=arr[i]>=arr[i+1]:
b=b+1
print(b if a>b else a)
def howMany(sentence):
i = 0
ans = 0
n = len(sentence)
while (i < n):
c = 0
c2 = 0
c3 = 0
while (i < n and sentence[i] != ' '):
if ((sentence[i] >= 'a' and sentence[i] <= 'z') or (sentence[i] >= 'A' and sentence[i] <= 'Z') or sentence[i] == '-'):
c += 1
elif (sentence[i] and (sentence[i] == ',' or sentence[i] == '.' or sentence[i] == '?' or sentence[i] == '!')):
c3 += 1
c2 += 1
i += 1
if (c + c3 == c2 and c > 0):
ans += 1
while (i < n and sentence[i] == ' '):
i += 1
return ans | n = 6
(a, b) = (0, 0)
arr = [1, 2, 4, 4, 5, 6]
for i in range(int(n - 1)):
if arr[i - 1] >= arr[i] <= arr[i + 1]:
a = a + 1
if arr[i - 1] <= arr[i] >= arr[i + 1]:
b = b + 1
print(b if a > b else a)
def how_many(sentence):
i = 0
ans = 0
n = len(sentence)
while i < n:
c = 0
c2 = 0
c3 = 0
while i < n and sentence[i] != ' ':
if sentence[i] >= 'a' and sentence[i] <= 'z' or (sentence[i] >= 'A' and sentence[i] <= 'Z') or sentence[i] == '-':
c += 1
elif sentence[i] and (sentence[i] == ',' or sentence[i] == '.' or sentence[i] == '?' or (sentence[i] == '!')):
c3 += 1
c2 += 1
i += 1
if c + c3 == c2 and c > 0:
ans += 1
while i < n and sentence[i] == ' ':
i += 1
return ans |
# Parameters for compute_reference.py
# mpmath maximum precision when computing hypergeometric function values.
MAXPREC = 100000
# Range of a and b. PTS should be an odd number, since
# a = 0 and b = 0 are included in addition to positive and negative values.
UPPER = 2.3
PTS = 401
# Range of the logarithm of z values.
LOWER_Z = -2
UPPER_Z = 3
PTS_Z = 31
| maxprec = 100000
upper = 2.3
pts = 401
lower_z = -2
upper_z = 3
pts_z = 31 |
'''
09 - Dictionary of lists
Some more data just came in! This time, you'll use the dictionary of lists method,
parsing the data column by column.
|date | small_sold | large_sold |
|-------------+---------------+------------|
|"2019-11-17" | 10859987 | 7674135 |
|"2019-12-01" | 9291631 | 6238096 |
Instructions:
- Create a dictionary of lists with the new data called avocados_dict.
- Convert the dictionary to a DataFrame called avocados_2019.
- Print your new DataFrame.
'''
# Create a dictionary of lists with new data
avocados_dict = {
"date": ["2019-11-17", "2019-12-01"],
"small_sold": [10859987, 9291631],
"large_sold": [7674135, 6238096]
}
# Convert dictionary into DataFrame
avocados_2019 = pd.DataFrame(avocados_dict)
# Print the new DataFrame
print(avocados_2019)
| """
09 - Dictionary of lists
Some more data just came in! This time, you'll use the dictionary of lists method,
parsing the data column by column.
|date | small_sold | large_sold |
|-------------+---------------+------------|
|"2019-11-17" | 10859987 | 7674135 |
|"2019-12-01" | 9291631 | 6238096 |
Instructions:
- Create a dictionary of lists with the new data called avocados_dict.
- Convert the dictionary to a DataFrame called avocados_2019.
- Print your new DataFrame.
"""
avocados_dict = {'date': ['2019-11-17', '2019-12-01'], 'small_sold': [10859987, 9291631], 'large_sold': [7674135, 6238096]}
avocados_2019 = pd.DataFrame(avocados_dict)
print(avocados_2019) |
n = int(input())
friends = list(input().split())
sum = 0
for i in friends:
sum += int(i)
ways = 0
for i in range(1,6):
if (sum+i)%(n+1) != 1:
ways += 1
print(ways)
| n = int(input())
friends = list(input().split())
sum = 0
for i in friends:
sum += int(i)
ways = 0
for i in range(1, 6):
if (sum + i) % (n + 1) != 1:
ways += 1
print(ways) |
tanya_list = [
'kenapa',
'bila',
'siapa',
'mengapa',
'apa',
'bagaimana',
'berapa',
'mana']
perintah_list = [
'jangan',
'sila',
'tolong',
'harap',
'usah',
'jemput',
'minta']
pangkal_list = [
'maka',
'alkisah',
'arakian',
'syahdah',
'adapun',
'bermula',
'kalakian']
bantu_list = [
'akan',
'telah',
'boleh',
'mesti',
'belum',
'sudah',
'dapat',
'masih',
'harus',
'hendak']
penguat_list = [
'paling',
'agak',
'sungguh',
'amat',
'terlalu',
'nian',
'benar',
'paling']
penegas_list = ['jua', 'juga', 'sahaja', 'hanya', 'memang', 'lagi', 'pun']
nafi_list = ['bukan', 'tidak', 'tak', 'tiada', 'tidaklah', 'tidakkah']
pemeri_list = ['ialah', 'adalah']
sendi_list = ['akan', 'kepada', 'terhadap', 'bagi', 'untuk', 'dari', 'daripada', 'di', 'dengan', 'hingga', 'sampai',
'ke', 'kepada', 'oleh', 'pada', 'sejak', 'seperti', 'umpama', 'bak', 'tentang', 'laksanabagai',
'semenjak', 'dalam', 'antara']
pembenar_list = ['ya', 'benar', 'betul']
nombor_list = [
'satu',
'dua',
'tiga',
'empat',
'lima',
'enam',
'tujuh',
'lapan',
'sembilan',
'kosong']
suku_bilangan_list = ['per', 'suku', 'setengah', 'separuh', 'tiga suku']
pisahan_list = ['setiap', 'tiap']
keterangan_list = ['begitu', 'begini', 'demikian', 'perlahan', 'cepat', 'lena', 'akan', 'sedang', 'belum',
'telah', 'sekarang', 'sebentar', 'semalam', 'mungkin', 'agak', 'barangkali', 'pasti', 'tentu',
'sudah', 'selalu', 'kadang', 'acapkali', 'sesekali', 'yang']
arah_list = [
'atas',
'bawah',
'tepi',
'antara',
'hadapan',
'utara',
'sisi',
'luar']
hubung_list = ['agar', 'apabila', 'atau', 'bahawa', 'dan', 'hingga', 'jika', 'jikalau', 'kecuali', 'kerana',
'lalu', 'manakala', 'sambil', 'serta', 'semenjak', 'sementara', 'sungguhpun', 'supaya', 'walaupun', 'tetapi', 'berkenan', 'berkenaan']
gantinama_list = ['aku', 'saya', 'hamba', 'patik', 'beta', 'kami', 'kita', 'anda', 'awak', 'engkau', 'tuanku', 'kalian',
'kamu', 'baginda', 'beliau', 'mereka', 'ini', 'itu', 'sini', 'situ', 'sana', 'kini', 'dia']
# pos permulaan[:-4]
permulaan = [
'bel',
'be',
'se',
'ter',
'men',
'memper',
'di',
'pe',
'me',
'ke',
'ber',
'pen',
'per']
# pos hujung [:1]
hujung = ['kan', 'kah', 'lah', 'tah', 'nya', 'an', 'wan', 'wati', 'ita']
alphabet = 'qwertyuiopasdfghjklzxcvbnm'
tatabahasa_dict = {'KT': tanya_list, 'KP': perintah_list, 'KPA': pangkal_list, 'KB': bantu_list, 'KPENGUAT': penguat_list,
'KPENEGAS': penegas_list, 'NAFI': nafi_list, 'KPEMERI': pemeri_list, 'KS': sendi_list, 'KPEMBENAR': pembenar_list,
'NO': nombor_list, 'SUKU': suku_bilangan_list, 'PISAHAN': pisahan_list, 'KETERANGAN': keterangan_list,
'ARAH': arah_list, 'KH': hubung_list, 'GN': gantinama_list}
| tanya_list = ['kenapa', 'bila', 'siapa', 'mengapa', 'apa', 'bagaimana', 'berapa', 'mana']
perintah_list = ['jangan', 'sila', 'tolong', 'harap', 'usah', 'jemput', 'minta']
pangkal_list = ['maka', 'alkisah', 'arakian', 'syahdah', 'adapun', 'bermula', 'kalakian']
bantu_list = ['akan', 'telah', 'boleh', 'mesti', 'belum', 'sudah', 'dapat', 'masih', 'harus', 'hendak']
penguat_list = ['paling', 'agak', 'sungguh', 'amat', 'terlalu', 'nian', 'benar', 'paling']
penegas_list = ['jua', 'juga', 'sahaja', 'hanya', 'memang', 'lagi', 'pun']
nafi_list = ['bukan', 'tidak', 'tak', 'tiada', 'tidaklah', 'tidakkah']
pemeri_list = ['ialah', 'adalah']
sendi_list = ['akan', 'kepada', 'terhadap', 'bagi', 'untuk', 'dari', 'daripada', 'di', 'dengan', 'hingga', 'sampai', 'ke', 'kepada', 'oleh', 'pada', 'sejak', 'seperti', 'umpama', 'bak', 'tentang', 'laksanabagai', 'semenjak', 'dalam', 'antara']
pembenar_list = ['ya', 'benar', 'betul']
nombor_list = ['satu', 'dua', 'tiga', 'empat', 'lima', 'enam', 'tujuh', 'lapan', 'sembilan', 'kosong']
suku_bilangan_list = ['per', 'suku', 'setengah', 'separuh', 'tiga suku']
pisahan_list = ['setiap', 'tiap']
keterangan_list = ['begitu', 'begini', 'demikian', 'perlahan', 'cepat', 'lena', 'akan', 'sedang', 'belum', 'telah', 'sekarang', 'sebentar', 'semalam', 'mungkin', 'agak', 'barangkali', 'pasti', 'tentu', 'sudah', 'selalu', 'kadang', 'acapkali', 'sesekali', 'yang']
arah_list = ['atas', 'bawah', 'tepi', 'antara', 'hadapan', 'utara', 'sisi', 'luar']
hubung_list = ['agar', 'apabila', 'atau', 'bahawa', 'dan', 'hingga', 'jika', 'jikalau', 'kecuali', 'kerana', 'lalu', 'manakala', 'sambil', 'serta', 'semenjak', 'sementara', 'sungguhpun', 'supaya', 'walaupun', 'tetapi', 'berkenan', 'berkenaan']
gantinama_list = ['aku', 'saya', 'hamba', 'patik', 'beta', 'kami', 'kita', 'anda', 'awak', 'engkau', 'tuanku', 'kalian', 'kamu', 'baginda', 'beliau', 'mereka', 'ini', 'itu', 'sini', 'situ', 'sana', 'kini', 'dia']
permulaan = ['bel', 'be', 'se', 'ter', 'men', 'memper', 'di', 'pe', 'me', 'ke', 'ber', 'pen', 'per']
hujung = ['kan', 'kah', 'lah', 'tah', 'nya', 'an', 'wan', 'wati', 'ita']
alphabet = 'qwertyuiopasdfghjklzxcvbnm'
tatabahasa_dict = {'KT': tanya_list, 'KP': perintah_list, 'KPA': pangkal_list, 'KB': bantu_list, 'KPENGUAT': penguat_list, 'KPENEGAS': penegas_list, 'NAFI': nafi_list, 'KPEMERI': pemeri_list, 'KS': sendi_list, 'KPEMBENAR': pembenar_list, 'NO': nombor_list, 'SUKU': suku_bilangan_list, 'PISAHAN': pisahan_list, 'KETERANGAN': keterangan_list, 'ARAH': arah_list, 'KH': hubung_list, 'GN': gantinama_list} |
N = int(input())
result = 0
for i in range(1, N + 1):
if i % 3 == 0 or i % 5 == 0:
continue
result += i
print(result)
| n = int(input())
result = 0
for i in range(1, N + 1):
if i % 3 == 0 or i % 5 == 0:
continue
result += i
print(result) |
def findDecision(obj): #obj[0]: Coupon, obj[1]: Education
# {"feature": "Education", "instances": 127, "metric_value": 0.987, "depth": 1}
if obj[1]<=2:
# {"feature": "Coupon", "instances": 91, "metric_value": 0.9355, "depth": 2}
if obj[0]>1:
return 'True'
elif obj[0]<=1:
return 'True'
else: return 'True'
elif obj[1]>2:
# {"feature": "Coupon", "instances": 36, "metric_value": 0.9436, "depth": 2}
if obj[0]<=3:
return 'False'
elif obj[0]>3:
return 'False'
else: return 'False'
else: return 'False'
| def find_decision(obj):
if obj[1] <= 2:
if obj[0] > 1:
return 'True'
elif obj[0] <= 1:
return 'True'
else:
return 'True'
elif obj[1] > 2:
if obj[0] <= 3:
return 'False'
elif obj[0] > 3:
return 'False'
else:
return 'False'
else:
return 'False' |
class Solution:
def minimumTotal(self, triangle: List[List[int]]) -> int:
n = len(triangle)
if n == 1: return min(triangle[0])
row_curr = triangle[n-1]
for row in range(n-2, -1, -1):
row_up = triangle[row]
for ind in range(len(row_up)):
row_up[ind] = min(row_curr[ind] + triangle[row][ind],
row_curr[ind+1] + triangle[row][ind])
row_curr = row_up
return row_up[0]
| class Solution:
def minimum_total(self, triangle: List[List[int]]) -> int:
n = len(triangle)
if n == 1:
return min(triangle[0])
row_curr = triangle[n - 1]
for row in range(n - 2, -1, -1):
row_up = triangle[row]
for ind in range(len(row_up)):
row_up[ind] = min(row_curr[ind] + triangle[row][ind], row_curr[ind + 1] + triangle[row][ind])
row_curr = row_up
return row_up[0] |
#Accessing specific elemnts from a dictionary
Breakfast={
"Name":"dosa",
"cost": 45,
"Proteins": 4,
"Fat":2
}
#Finding the cost of Breakfast
p=Breakfast.get("cost")
print(p)
| breakfast = {'Name': 'dosa', 'cost': 45, 'Proteins': 4, 'Fat': 2}
p = Breakfast.get('cost')
print(p) |
NPKT = 100000
# def getselfaddr():
# return socket.getaddrinfo(None, PORT, socket.AF_INET6, socket.SOCK_DGRAM,socket.IPPROTO_IP)[0]
| npkt = 100000 |
deliver_states = {
'DEFAULT': ['1', 'PENDING_ORDERS', 'ACCEPT_PENDING_JOLLOF', 'ACCEPT_PENDING_DELICACY', 'TO_PICKUP', 'PICKED_UP_JOLLOF', 'PICKED_UP_DELICACY', 'TO_DROPOFF', 'DROPPED_OFF_JOLLOF', 'DROPPED_OFF_DELICACY'],
'CANCELLED': ['DEFAULT'],
'FLASH_LOCATION': ['FLASH_LOCATION', 'CANCELLED'],
'REQUEST_PHONE': ['REQUEST_PHONE', 'CANCENCELLED'],
'PENDING_ORDERS': ['PENDING_ORDERS', 'ACCEPT_PENDING_JOLLOF', 'ACCEPT_PENDING_DELICACY', 'CANCELLED'],
'ACCEPT_PENDING_JOLLOF': ['ACCEPT_PENDING_JOLLOF', 'CANCELLED'],
'ACCEPT_PENDING_DELICACY': ['ACCEPT_PENDING_DELICACY', 'CANCELLED'],
'TO_PICKUP': ['TO_PICKUP', 'PICKED_UP_JOLLOF', 'PICKED_UP_DELICACY', 'CANCELLED'],
'PICKED_UP_JOLLOF': ['PICKED_UP_JOLLOF', 'CANCELLED'],
'PICKED_UP_DELICACY': ['PICKED_UP_DELICACY', 'CANCELLED'],
'TO_DROPOFF': ['TO_DROPOFF', 'DROPPED_OFF_JOLLOF', 'DROPPED_OFF_DELICACY', 'CANCELLED'],
'DROPPED_OFF_JOLLOF': ['DROPPED_OFF_JOLLOF', 'CANCELLED'],
'DROPPED_OFF_DELICACY': ['DROPPED_OFF_DELICACY', 'CANCELLED'],
}
def is_deliver_next_state(old_state, new_state):
'''
Returns boolean if new_state is a next state for old_state
'''
try:
state = deliver_states[old_state]
if new_state in state:
return True
else:
return False
except KeyError:
return False | deliver_states = {'DEFAULT': ['1', 'PENDING_ORDERS', 'ACCEPT_PENDING_JOLLOF', 'ACCEPT_PENDING_DELICACY', 'TO_PICKUP', 'PICKED_UP_JOLLOF', 'PICKED_UP_DELICACY', 'TO_DROPOFF', 'DROPPED_OFF_JOLLOF', 'DROPPED_OFF_DELICACY'], 'CANCELLED': ['DEFAULT'], 'FLASH_LOCATION': ['FLASH_LOCATION', 'CANCELLED'], 'REQUEST_PHONE': ['REQUEST_PHONE', 'CANCENCELLED'], 'PENDING_ORDERS': ['PENDING_ORDERS', 'ACCEPT_PENDING_JOLLOF', 'ACCEPT_PENDING_DELICACY', 'CANCELLED'], 'ACCEPT_PENDING_JOLLOF': ['ACCEPT_PENDING_JOLLOF', 'CANCELLED'], 'ACCEPT_PENDING_DELICACY': ['ACCEPT_PENDING_DELICACY', 'CANCELLED'], 'TO_PICKUP': ['TO_PICKUP', 'PICKED_UP_JOLLOF', 'PICKED_UP_DELICACY', 'CANCELLED'], 'PICKED_UP_JOLLOF': ['PICKED_UP_JOLLOF', 'CANCELLED'], 'PICKED_UP_DELICACY': ['PICKED_UP_DELICACY', 'CANCELLED'], 'TO_DROPOFF': ['TO_DROPOFF', 'DROPPED_OFF_JOLLOF', 'DROPPED_OFF_DELICACY', 'CANCELLED'], 'DROPPED_OFF_JOLLOF': ['DROPPED_OFF_JOLLOF', 'CANCELLED'], 'DROPPED_OFF_DELICACY': ['DROPPED_OFF_DELICACY', 'CANCELLED']}
def is_deliver_next_state(old_state, new_state):
"""
Returns boolean if new_state is a next state for old_state
"""
try:
state = deliver_states[old_state]
if new_state in state:
return True
else:
return False
except KeyError:
return False |
class Synonym:
def __init__(self,
taxon_id,
name_id,
id='',
name_phrase='',
according_to_id='',
status='synonym',
reference_id='',
page_reference_id='',
link='',
remarks='',
needs_review=''):
self.id = id
self.taxon_id = taxon_id
self.name_id = name_id
self.name_phrase = name_phrase
self.according_to_id = according_to_id
self.status = status
self.reference_id = reference_id
self.page_reference_id = page_reference_id
self.link = link
self.remarks = remarks
self.needs_review = needs_review
def __str__(self):
return str(self.id) + '\t' + \
str(self.taxon_id) + '\t' + \
str(self.name_id) + '\t' + \
self.name_phrase + '\t' + \
str(self.according_to_id) + '\t' + \
self.status + '\t' + \
str(self.reference_id) + '\t' + \
str(self.page_reference_id) + '\t' + \
self.link + '\t' + \
self.remarks + '\n'
# def __repr__(self):
# return {
# 'id': self.id,
# 'taxon_id': self.taxon_id,
# 'name_id': self.name_id,
# 'name_phrase': self.name_phrase,
# 'according_to_id': self.according_to_id,
# 'status': self.status,
# 'reference_id': self.reference_id,
# 'page_reference_id': self.page_reference_id,
# 'link': self.link,
# 'remarks': self.remarks
# }
class Synonyms:
def __init__(self, output_tsv):
self.synonyms = []
self.output_tsv = output_tsv
def append(self, synonym):
if isinstance(synonym, Synonym):
self.synonyms.append(synonym)
else:
print('Error: synonym must be Synonym type')
def write_output(self, output_tsv=''):
if output_tsv == '' and self.output_tsv != '':
output_tsv = self.output_tsv
file = open(output_tsv, 'w')
if len(self.synonyms) > 0:
header = '\t'.join(self.synonyms[0].__dict__.keys()) + '\n'
file.write(header)
for synonym in self.synonyms:
row = '\t'.join(str(v) for v in synonym.__dict__.values()) + '\n'
file.write(row)
file.close()
| class Synonym:
def __init__(self, taxon_id, name_id, id='', name_phrase='', according_to_id='', status='synonym', reference_id='', page_reference_id='', link='', remarks='', needs_review=''):
self.id = id
self.taxon_id = taxon_id
self.name_id = name_id
self.name_phrase = name_phrase
self.according_to_id = according_to_id
self.status = status
self.reference_id = reference_id
self.page_reference_id = page_reference_id
self.link = link
self.remarks = remarks
self.needs_review = needs_review
def __str__(self):
return str(self.id) + '\t' + str(self.taxon_id) + '\t' + str(self.name_id) + '\t' + self.name_phrase + '\t' + str(self.according_to_id) + '\t' + self.status + '\t' + str(self.reference_id) + '\t' + str(self.page_reference_id) + '\t' + self.link + '\t' + self.remarks + '\n'
class Synonyms:
def __init__(self, output_tsv):
self.synonyms = []
self.output_tsv = output_tsv
def append(self, synonym):
if isinstance(synonym, Synonym):
self.synonyms.append(synonym)
else:
print('Error: synonym must be Synonym type')
def write_output(self, output_tsv=''):
if output_tsv == '' and self.output_tsv != '':
output_tsv = self.output_tsv
file = open(output_tsv, 'w')
if len(self.synonyms) > 0:
header = '\t'.join(self.synonyms[0].__dict__.keys()) + '\n'
file.write(header)
for synonym in self.synonyms:
row = '\t'.join((str(v) for v in synonym.__dict__.values())) + '\n'
file.write(row)
file.close() |
# ----------------------------------------------------------------------
# CISCO-VPDN-MGMT-MIB
# Compiled MIB
# Do not modify this file directly
# Run ./noc mib make-cmib instead
# ----------------------------------------------------------------------
# Copyright (C) 2007-2020 The NOC Project
# See LICENSE for details
# ----------------------------------------------------------------------
# MIB Name
NAME = "CISCO-VPDN-MGMT-MIB"
# Metadata
LAST_UPDATED = "2009-06-16"
COMPILED = "2020-01-19"
# MIB Data: name -> oid
MIB = {
"CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIB": "1.3.6.1.4.1.9.10.24",
"CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBNotifs": "1.3.6.1.4.1.9.10.24.0",
"CISCO-VPDN-MGMT-MIB::cvpdnNotifSessionID": "1.3.6.1.4.1.9.10.24.0.1",
"CISCO-VPDN-MGMT-MIB::cvpdnNotifSessionEvent": "1.3.6.1.4.1.9.10.24.0.2",
"CISCO-VPDN-MGMT-MIB::cvpdnNotifSession": "1.3.6.1.4.1.9.10.24.0.3",
"CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBObjects": "1.3.6.1.4.1.9.10.24.1",
"CISCO-VPDN-MGMT-MIB::cvpdnSystemInfo": "1.3.6.1.4.1.9.10.24.1.1",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelTotal": "1.3.6.1.4.1.9.10.24.1.1.1",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionTotal": "1.3.6.1.4.1.9.10.24.1.1.2",
"CISCO-VPDN-MGMT-MIB::cvpdnDeniedUsersTotal": "1.3.6.1.4.1.9.10.24.1.1.3",
"CISCO-VPDN-MGMT-MIB::cvpdnSystemTable": "1.3.6.1.4.1.9.10.24.1.1.4",
"CISCO-VPDN-MGMT-MIB::cvpdnSystemEntry": "1.3.6.1.4.1.9.10.24.1.1.4.1",
"CISCO-VPDN-MGMT-MIB::cvpdnSystemTunnelType": "1.3.6.1.4.1.9.10.24.1.1.4.1.1",
"CISCO-VPDN-MGMT-MIB::cvpdnSystemTunnelTotal": "1.3.6.1.4.1.9.10.24.1.1.4.1.2",
"CISCO-VPDN-MGMT-MIB::cvpdnSystemSessionTotal": "1.3.6.1.4.1.9.10.24.1.1.4.1.3",
"CISCO-VPDN-MGMT-MIB::cvpdnSystemDeniedUsersTotal": "1.3.6.1.4.1.9.10.24.1.1.4.1.4",
"CISCO-VPDN-MGMT-MIB::cvpdnSystemInitialConnReq": "1.3.6.1.4.1.9.10.24.1.1.4.1.5",
"CISCO-VPDN-MGMT-MIB::cvpdnSystemSuccessConnReq": "1.3.6.1.4.1.9.10.24.1.1.4.1.6",
"CISCO-VPDN-MGMT-MIB::cvpdnSystemFailedConnReq": "1.3.6.1.4.1.9.10.24.1.1.4.1.7",
"CISCO-VPDN-MGMT-MIB::cvpdnSystemNotifSessionEnabled": "1.3.6.1.4.1.9.10.24.1.1.5",
"CISCO-VPDN-MGMT-MIB::cvpdnSystemClearSessions": "1.3.6.1.4.1.9.10.24.1.1.6",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelInfo": "1.3.6.1.4.1.9.10.24.1.2",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelTable": "1.3.6.1.4.1.9.10.24.1.2.1",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelEntry": "1.3.6.1.4.1.9.10.24.1.2.1.1",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelTunnelId": "1.3.6.1.4.1.9.10.24.1.2.1.1.1",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelRemoteTunnelId": "1.3.6.1.4.1.9.10.24.1.2.1.1.2",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelLocalName": "1.3.6.1.4.1.9.10.24.1.2.1.1.3",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelRemoteName": "1.3.6.1.4.1.9.10.24.1.2.1.1.4",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelRemoteEndpointName": "1.3.6.1.4.1.9.10.24.1.2.1.1.5",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelLocalInitConnection": "1.3.6.1.4.1.9.10.24.1.2.1.1.6",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelOrigCause": "1.3.6.1.4.1.9.10.24.1.2.1.1.7",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelState": "1.3.6.1.4.1.9.10.24.1.2.1.1.8",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelActiveSessions": "1.3.6.1.4.1.9.10.24.1.2.1.1.9",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelDeniedUsers": "1.3.6.1.4.1.9.10.24.1.2.1.1.10",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSoftshut": "1.3.6.1.4.1.9.10.24.1.2.1.1.12",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelNetworkServiceType": "1.3.6.1.4.1.9.10.24.1.2.1.1.13",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelLocalIpAddress": "1.3.6.1.4.1.9.10.24.1.2.1.1.14",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSourceIpAddress": "1.3.6.1.4.1.9.10.24.1.2.1.1.15",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelRemoteIpAddress": "1.3.6.1.4.1.9.10.24.1.2.1.1.16",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrTable": "1.3.6.1.4.1.9.10.24.1.2.2",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrEntry": "1.3.6.1.4.1.9.10.24.1.2.2.1",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrTunnelId": "1.3.6.1.4.1.9.10.24.1.2.2.1.1",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteTunnelId": "1.3.6.1.4.1.9.10.24.1.2.2.1.2",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalName": "1.3.6.1.4.1.9.10.24.1.2.2.1.3",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteName": "1.3.6.1.4.1.9.10.24.1.2.2.1.4",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteEndpointName": "1.3.6.1.4.1.9.10.24.1.2.2.1.5",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalInitConnection": "1.3.6.1.4.1.9.10.24.1.2.2.1.6",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrOrigCause": "1.3.6.1.4.1.9.10.24.1.2.2.1.7",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrState": "1.3.6.1.4.1.9.10.24.1.2.2.1.8",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrActiveSessions": "1.3.6.1.4.1.9.10.24.1.2.2.1.9",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrDeniedUsers": "1.3.6.1.4.1.9.10.24.1.2.2.1.10",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrSoftshut": "1.3.6.1.4.1.9.10.24.1.2.2.1.11",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrNetworkServiceType": "1.3.6.1.4.1.9.10.24.1.2.2.1.12",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalIpAddress": "1.3.6.1.4.1.9.10.24.1.2.2.1.13",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrSourceIpAddress": "1.3.6.1.4.1.9.10.24.1.2.2.1.14",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteIpAddress": "1.3.6.1.4.1.9.10.24.1.2.2.1.15",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalInetAddressType": "1.3.6.1.4.1.9.10.24.1.2.2.1.16",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalInetAddress": "1.3.6.1.4.1.9.10.24.1.2.2.1.17",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrSourceInetAddressType": "1.3.6.1.4.1.9.10.24.1.2.2.1.18",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrSourceInetAddress": "1.3.6.1.4.1.9.10.24.1.2.2.1.19",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteInetAddressType": "1.3.6.1.4.1.9.10.24.1.2.2.1.20",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteInetAddress": "1.3.6.1.4.1.9.10.24.1.2.2.1.21",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionInfo": "1.3.6.1.4.1.9.10.24.1.3",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionTable": "1.3.6.1.4.1.9.10.24.1.3.1",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionEntry": "1.3.6.1.4.1.9.10.24.1.3.1.1",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionId": "1.3.6.1.4.1.9.10.24.1.3.1.1.1",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionUserName": "1.3.6.1.4.1.9.10.24.1.3.1.1.2",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionState": "1.3.6.1.4.1.9.10.24.1.3.1.1.3",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionCallDuration": "1.3.6.1.4.1.9.10.24.1.3.1.1.4",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionPacketsOut": "1.3.6.1.4.1.9.10.24.1.3.1.1.5",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionBytesOut": "1.3.6.1.4.1.9.10.24.1.3.1.1.6",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionPacketsIn": "1.3.6.1.4.1.9.10.24.1.3.1.1.7",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionBytesIn": "1.3.6.1.4.1.9.10.24.1.3.1.1.8",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDeviceType": "1.3.6.1.4.1.9.10.24.1.3.1.1.9",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDeviceCallerId": "1.3.6.1.4.1.9.10.24.1.3.1.1.10",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDevicePhyId": "1.3.6.1.4.1.9.10.24.1.3.1.1.11",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionMultilink": "1.3.6.1.4.1.9.10.24.1.3.1.1.12",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionModemSlotIndex": "1.3.6.1.4.1.9.10.24.1.3.1.1.13",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionModemPortIndex": "1.3.6.1.4.1.9.10.24.1.3.1.1.14",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDS1SlotIndex": "1.3.6.1.4.1.9.10.24.1.3.1.1.15",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDS1PortIndex": "1.3.6.1.4.1.9.10.24.1.3.1.1.16",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDS1ChannelIndex": "1.3.6.1.4.1.9.10.24.1.3.1.1.17",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionModemCallStartTime": "1.3.6.1.4.1.9.10.24.1.3.1.1.18",
"CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionModemCallStartIndex": "1.3.6.1.4.1.9.10.24.1.3.1.1.19",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrTable": "1.3.6.1.4.1.9.10.24.1.3.2",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrEntry": "1.3.6.1.4.1.9.10.24.1.3.2.1",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrSessionId": "1.3.6.1.4.1.9.10.24.1.3.2.1.1",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrUserName": "1.3.6.1.4.1.9.10.24.1.3.2.1.2",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrState": "1.3.6.1.4.1.9.10.24.1.3.2.1.3",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrCallDuration": "1.3.6.1.4.1.9.10.24.1.3.2.1.4",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrPacketsOut": "1.3.6.1.4.1.9.10.24.1.3.2.1.5",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrBytesOut": "1.3.6.1.4.1.9.10.24.1.3.2.1.6",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrPacketsIn": "1.3.6.1.4.1.9.10.24.1.3.2.1.7",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrBytesIn": "1.3.6.1.4.1.9.10.24.1.3.2.1.8",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDeviceType": "1.3.6.1.4.1.9.10.24.1.3.2.1.9",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDeviceCallerId": "1.3.6.1.4.1.9.10.24.1.3.2.1.10",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDevicePhyId": "1.3.6.1.4.1.9.10.24.1.3.2.1.11",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrMultilink": "1.3.6.1.4.1.9.10.24.1.3.2.1.12",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrModemSlotIndex": "1.3.6.1.4.1.9.10.24.1.3.2.1.13",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrModemPortIndex": "1.3.6.1.4.1.9.10.24.1.3.2.1.14",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDS1SlotIndex": "1.3.6.1.4.1.9.10.24.1.3.2.1.15",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDS1PortIndex": "1.3.6.1.4.1.9.10.24.1.3.2.1.16",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDS1ChannelIndex": "1.3.6.1.4.1.9.10.24.1.3.2.1.17",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrModemCallStartTime": "1.3.6.1.4.1.9.10.24.1.3.2.1.18",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrModemCallStartIndex": "1.3.6.1.4.1.9.10.24.1.3.2.1.19",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrVirtualCircuitID": "1.3.6.1.4.1.9.10.24.1.3.2.1.20",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrSentPktsDropped": "1.3.6.1.4.1.9.10.24.1.3.2.1.21",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrRecvPktsDropped": "1.3.6.1.4.1.9.10.24.1.3.2.1.22",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrMultilinkBundle": "1.3.6.1.4.1.9.10.24.1.3.2.1.23",
"CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrMultilinkIfIndex": "1.3.6.1.4.1.9.10.24.1.3.2.1.24",
"CISCO-VPDN-MGMT-MIB::cvpdnUserToFailHistInfo": "1.3.6.1.4.1.9.10.24.1.4",
"CISCO-VPDN-MGMT-MIB::cvpdnUserToFailHistInfoTable": "1.3.6.1.4.1.9.10.24.1.4.1",
"CISCO-VPDN-MGMT-MIB::cvpdnUserToFailHistInfoEntry": "1.3.6.1.4.1.9.10.24.1.4.1.1",
"CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistUname": "1.3.6.1.4.1.9.10.24.1.4.1.1.1",
"CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistTunnelId": "1.3.6.1.4.1.9.10.24.1.4.1.1.2",
"CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistUserId": "1.3.6.1.4.1.9.10.24.1.4.1.1.3",
"CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistLocalInitConn": "1.3.6.1.4.1.9.10.24.1.4.1.1.4",
"CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistLocalName": "1.3.6.1.4.1.9.10.24.1.4.1.1.5",
"CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistRemoteName": "1.3.6.1.4.1.9.10.24.1.4.1.1.6",
"CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistSourceIp": "1.3.6.1.4.1.9.10.24.1.4.1.1.7",
"CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistDestIp": "1.3.6.1.4.1.9.10.24.1.4.1.1.8",
"CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistCount": "1.3.6.1.4.1.9.10.24.1.4.1.1.9",
"CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistFailTime": "1.3.6.1.4.1.9.10.24.1.4.1.1.10",
"CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistFailType": "1.3.6.1.4.1.9.10.24.1.4.1.1.11",
"CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistFailReason": "1.3.6.1.4.1.9.10.24.1.4.1.1.12",
"CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistSourceInetType": "1.3.6.1.4.1.9.10.24.1.4.1.1.13",
"CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistSourceInetAddr": "1.3.6.1.4.1.9.10.24.1.4.1.1.14",
"CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistDestInetType": "1.3.6.1.4.1.9.10.24.1.4.1.1.15",
"CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistDestInetAddr": "1.3.6.1.4.1.9.10.24.1.4.1.1.16",
"CISCO-VPDN-MGMT-MIB::cvpdnTemplateInfo": "1.3.6.1.4.1.9.10.24.1.5",
"CISCO-VPDN-MGMT-MIB::cvpdnTemplateTable": "1.3.6.1.4.1.9.10.24.1.5.1",
"CISCO-VPDN-MGMT-MIB::cvpdnTemplateEntry": "1.3.6.1.4.1.9.10.24.1.5.1.1",
"CISCO-VPDN-MGMT-MIB::cvpdnTemplateName": "1.3.6.1.4.1.9.10.24.1.5.1.1.1",
"CISCO-VPDN-MGMT-MIB::cvpdnTemplateActiveSessions": "1.3.6.1.4.1.9.10.24.1.5.1.1.2",
"CISCO-VPDN-MGMT-MIB::cvpdnMultilinkInfo": "1.3.6.1.4.1.9.10.24.1.6",
"CISCO-VPDN-MGMT-MIB::cvpdnBundlesWithOneLink": "1.3.6.1.4.1.9.10.24.1.6.1",
"CISCO-VPDN-MGMT-MIB::cvpdnBundlesWithTwoLinks": "1.3.6.1.4.1.9.10.24.1.6.2",
"CISCO-VPDN-MGMT-MIB::cvpdnBundlesWithMoreThanTwoLinks": "1.3.6.1.4.1.9.10.24.1.6.3",
"CISCO-VPDN-MGMT-MIB::cvpdnBundleTable": "1.3.6.1.4.1.9.10.24.1.6.4",
"CISCO-VPDN-MGMT-MIB::cvpdnBundleEntry": "1.3.6.1.4.1.9.10.24.1.6.4.1",
"CISCO-VPDN-MGMT-MIB::cvpdnBundleName": "1.3.6.1.4.1.9.10.24.1.6.4.1.1",
"CISCO-VPDN-MGMT-MIB::cvpdnBundleLinkCount": "1.3.6.1.4.1.9.10.24.1.6.4.1.2",
"CISCO-VPDN-MGMT-MIB::cvpdnBundleEndpointType": "1.3.6.1.4.1.9.10.24.1.6.4.1.3",
"CISCO-VPDN-MGMT-MIB::cvpdnBundleEndpoint": "1.3.6.1.4.1.9.10.24.1.6.4.1.4",
"CISCO-VPDN-MGMT-MIB::cvpdnBundlePeerIpAddrType": "1.3.6.1.4.1.9.10.24.1.6.4.1.5",
"CISCO-VPDN-MGMT-MIB::cvpdnBundlePeerIpAddr": "1.3.6.1.4.1.9.10.24.1.6.4.1.6",
"CISCO-VPDN-MGMT-MIB::cvpdnBundleEndpointClass": "1.3.6.1.4.1.9.10.24.1.6.4.1.7",
"CISCO-VPDN-MGMT-MIB::cvpdnBundleLastChanged": "1.3.6.1.4.1.9.10.24.1.6.5",
"CISCO-VPDN-MGMT-MIB::cvpdnBundleChildTable": "1.3.6.1.4.1.9.10.24.1.6.6",
"CISCO-VPDN-MGMT-MIB::cvpdnBundleChildEntry": "1.3.6.1.4.1.9.10.24.1.6.6.1",
"CISCO-VPDN-MGMT-MIB::cvpdnBundleChildTunnelType": "1.3.6.1.4.1.9.10.24.1.6.6.1.1",
"CISCO-VPDN-MGMT-MIB::cvpdnBundleChildTunnelId": "1.3.6.1.4.1.9.10.24.1.6.6.1.2",
"CISCO-VPDN-MGMT-MIB::cvpdnBundleChildSessionId": "1.3.6.1.4.1.9.10.24.1.6.6.1.3",
"CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBConformance": "1.3.6.1.4.1.9.10.24.3",
"CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBCompliances": "1.3.6.1.4.1.9.10.24.3.1",
"CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBGroups": "1.3.6.1.4.1.9.10.24.3.2",
}
DISPLAY_HINTS = {}
| name = 'CISCO-VPDN-MGMT-MIB'
last_updated = '2009-06-16'
compiled = '2020-01-19'
mib = {'CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIB': '1.3.6.1.4.1.9.10.24', 'CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBNotifs': '1.3.6.1.4.1.9.10.24.0', 'CISCO-VPDN-MGMT-MIB::cvpdnNotifSessionID': '1.3.6.1.4.1.9.10.24.0.1', 'CISCO-VPDN-MGMT-MIB::cvpdnNotifSessionEvent': '1.3.6.1.4.1.9.10.24.0.2', 'CISCO-VPDN-MGMT-MIB::cvpdnNotifSession': '1.3.6.1.4.1.9.10.24.0.3', 'CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBObjects': '1.3.6.1.4.1.9.10.24.1', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemInfo': '1.3.6.1.4.1.9.10.24.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelTotal': '1.3.6.1.4.1.9.10.24.1.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionTotal': '1.3.6.1.4.1.9.10.24.1.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnDeniedUsersTotal': '1.3.6.1.4.1.9.10.24.1.1.3', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemTable': '1.3.6.1.4.1.9.10.24.1.1.4', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemEntry': '1.3.6.1.4.1.9.10.24.1.1.4.1', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemTunnelType': '1.3.6.1.4.1.9.10.24.1.1.4.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemTunnelTotal': '1.3.6.1.4.1.9.10.24.1.1.4.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemSessionTotal': '1.3.6.1.4.1.9.10.24.1.1.4.1.3', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemDeniedUsersTotal': '1.3.6.1.4.1.9.10.24.1.1.4.1.4', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemInitialConnReq': '1.3.6.1.4.1.9.10.24.1.1.4.1.5', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemSuccessConnReq': '1.3.6.1.4.1.9.10.24.1.1.4.1.6', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemFailedConnReq': '1.3.6.1.4.1.9.10.24.1.1.4.1.7', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemNotifSessionEnabled': '1.3.6.1.4.1.9.10.24.1.1.5', 'CISCO-VPDN-MGMT-MIB::cvpdnSystemClearSessions': '1.3.6.1.4.1.9.10.24.1.1.6', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelInfo': '1.3.6.1.4.1.9.10.24.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelTable': '1.3.6.1.4.1.9.10.24.1.2.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelEntry': '1.3.6.1.4.1.9.10.24.1.2.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelTunnelId': '1.3.6.1.4.1.9.10.24.1.2.1.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelRemoteTunnelId': '1.3.6.1.4.1.9.10.24.1.2.1.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelLocalName': '1.3.6.1.4.1.9.10.24.1.2.1.1.3', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelRemoteName': '1.3.6.1.4.1.9.10.24.1.2.1.1.4', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelRemoteEndpointName': '1.3.6.1.4.1.9.10.24.1.2.1.1.5', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelLocalInitConnection': '1.3.6.1.4.1.9.10.24.1.2.1.1.6', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelOrigCause': '1.3.6.1.4.1.9.10.24.1.2.1.1.7', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelState': '1.3.6.1.4.1.9.10.24.1.2.1.1.8', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelActiveSessions': '1.3.6.1.4.1.9.10.24.1.2.1.1.9', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelDeniedUsers': '1.3.6.1.4.1.9.10.24.1.2.1.1.10', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSoftshut': '1.3.6.1.4.1.9.10.24.1.2.1.1.12', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelNetworkServiceType': '1.3.6.1.4.1.9.10.24.1.2.1.1.13', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelLocalIpAddress': '1.3.6.1.4.1.9.10.24.1.2.1.1.14', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSourceIpAddress': '1.3.6.1.4.1.9.10.24.1.2.1.1.15', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelRemoteIpAddress': '1.3.6.1.4.1.9.10.24.1.2.1.1.16', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrTable': '1.3.6.1.4.1.9.10.24.1.2.2', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrEntry': '1.3.6.1.4.1.9.10.24.1.2.2.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrTunnelId': '1.3.6.1.4.1.9.10.24.1.2.2.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteTunnelId': '1.3.6.1.4.1.9.10.24.1.2.2.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalName': '1.3.6.1.4.1.9.10.24.1.2.2.1.3', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteName': '1.3.6.1.4.1.9.10.24.1.2.2.1.4', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteEndpointName': '1.3.6.1.4.1.9.10.24.1.2.2.1.5', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalInitConnection': '1.3.6.1.4.1.9.10.24.1.2.2.1.6', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrOrigCause': '1.3.6.1.4.1.9.10.24.1.2.2.1.7', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrState': '1.3.6.1.4.1.9.10.24.1.2.2.1.8', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrActiveSessions': '1.3.6.1.4.1.9.10.24.1.2.2.1.9', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrDeniedUsers': '1.3.6.1.4.1.9.10.24.1.2.2.1.10', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrSoftshut': '1.3.6.1.4.1.9.10.24.1.2.2.1.11', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrNetworkServiceType': '1.3.6.1.4.1.9.10.24.1.2.2.1.12', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalIpAddress': '1.3.6.1.4.1.9.10.24.1.2.2.1.13', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrSourceIpAddress': '1.3.6.1.4.1.9.10.24.1.2.2.1.14', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteIpAddress': '1.3.6.1.4.1.9.10.24.1.2.2.1.15', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalInetAddressType': '1.3.6.1.4.1.9.10.24.1.2.2.1.16', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrLocalInetAddress': '1.3.6.1.4.1.9.10.24.1.2.2.1.17', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrSourceInetAddressType': '1.3.6.1.4.1.9.10.24.1.2.2.1.18', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrSourceInetAddress': '1.3.6.1.4.1.9.10.24.1.2.2.1.19', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteInetAddressType': '1.3.6.1.4.1.9.10.24.1.2.2.1.20', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelAttrRemoteInetAddress': '1.3.6.1.4.1.9.10.24.1.2.2.1.21', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionInfo': '1.3.6.1.4.1.9.10.24.1.3', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionTable': '1.3.6.1.4.1.9.10.24.1.3.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionEntry': '1.3.6.1.4.1.9.10.24.1.3.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionId': '1.3.6.1.4.1.9.10.24.1.3.1.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionUserName': '1.3.6.1.4.1.9.10.24.1.3.1.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionState': '1.3.6.1.4.1.9.10.24.1.3.1.1.3', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionCallDuration': '1.3.6.1.4.1.9.10.24.1.3.1.1.4', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionPacketsOut': '1.3.6.1.4.1.9.10.24.1.3.1.1.5', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionBytesOut': '1.3.6.1.4.1.9.10.24.1.3.1.1.6', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionPacketsIn': '1.3.6.1.4.1.9.10.24.1.3.1.1.7', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionBytesIn': '1.3.6.1.4.1.9.10.24.1.3.1.1.8', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDeviceType': '1.3.6.1.4.1.9.10.24.1.3.1.1.9', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDeviceCallerId': '1.3.6.1.4.1.9.10.24.1.3.1.1.10', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDevicePhyId': '1.3.6.1.4.1.9.10.24.1.3.1.1.11', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionMultilink': '1.3.6.1.4.1.9.10.24.1.3.1.1.12', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionModemSlotIndex': '1.3.6.1.4.1.9.10.24.1.3.1.1.13', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionModemPortIndex': '1.3.6.1.4.1.9.10.24.1.3.1.1.14', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDS1SlotIndex': '1.3.6.1.4.1.9.10.24.1.3.1.1.15', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDS1PortIndex': '1.3.6.1.4.1.9.10.24.1.3.1.1.16', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionDS1ChannelIndex': '1.3.6.1.4.1.9.10.24.1.3.1.1.17', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionModemCallStartTime': '1.3.6.1.4.1.9.10.24.1.3.1.1.18', 'CISCO-VPDN-MGMT-MIB::cvpdnTunnelSessionModemCallStartIndex': '1.3.6.1.4.1.9.10.24.1.3.1.1.19', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrTable': '1.3.6.1.4.1.9.10.24.1.3.2', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrEntry': '1.3.6.1.4.1.9.10.24.1.3.2.1', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrSessionId': '1.3.6.1.4.1.9.10.24.1.3.2.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrUserName': '1.3.6.1.4.1.9.10.24.1.3.2.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrState': '1.3.6.1.4.1.9.10.24.1.3.2.1.3', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrCallDuration': '1.3.6.1.4.1.9.10.24.1.3.2.1.4', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrPacketsOut': '1.3.6.1.4.1.9.10.24.1.3.2.1.5', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrBytesOut': '1.3.6.1.4.1.9.10.24.1.3.2.1.6', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrPacketsIn': '1.3.6.1.4.1.9.10.24.1.3.2.1.7', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrBytesIn': '1.3.6.1.4.1.9.10.24.1.3.2.1.8', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDeviceType': '1.3.6.1.4.1.9.10.24.1.3.2.1.9', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDeviceCallerId': '1.3.6.1.4.1.9.10.24.1.3.2.1.10', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDevicePhyId': '1.3.6.1.4.1.9.10.24.1.3.2.1.11', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrMultilink': '1.3.6.1.4.1.9.10.24.1.3.2.1.12', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrModemSlotIndex': '1.3.6.1.4.1.9.10.24.1.3.2.1.13', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrModemPortIndex': '1.3.6.1.4.1.9.10.24.1.3.2.1.14', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDS1SlotIndex': '1.3.6.1.4.1.9.10.24.1.3.2.1.15', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDS1PortIndex': '1.3.6.1.4.1.9.10.24.1.3.2.1.16', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrDS1ChannelIndex': '1.3.6.1.4.1.9.10.24.1.3.2.1.17', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrModemCallStartTime': '1.3.6.1.4.1.9.10.24.1.3.2.1.18', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrModemCallStartIndex': '1.3.6.1.4.1.9.10.24.1.3.2.1.19', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrVirtualCircuitID': '1.3.6.1.4.1.9.10.24.1.3.2.1.20', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrSentPktsDropped': '1.3.6.1.4.1.9.10.24.1.3.2.1.21', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrRecvPktsDropped': '1.3.6.1.4.1.9.10.24.1.3.2.1.22', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrMultilinkBundle': '1.3.6.1.4.1.9.10.24.1.3.2.1.23', 'CISCO-VPDN-MGMT-MIB::cvpdnSessionAttrMultilinkIfIndex': '1.3.6.1.4.1.9.10.24.1.3.2.1.24', 'CISCO-VPDN-MGMT-MIB::cvpdnUserToFailHistInfo': '1.3.6.1.4.1.9.10.24.1.4', 'CISCO-VPDN-MGMT-MIB::cvpdnUserToFailHistInfoTable': '1.3.6.1.4.1.9.10.24.1.4.1', 'CISCO-VPDN-MGMT-MIB::cvpdnUserToFailHistInfoEntry': '1.3.6.1.4.1.9.10.24.1.4.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistUname': '1.3.6.1.4.1.9.10.24.1.4.1.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistTunnelId': '1.3.6.1.4.1.9.10.24.1.4.1.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistUserId': '1.3.6.1.4.1.9.10.24.1.4.1.1.3', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistLocalInitConn': '1.3.6.1.4.1.9.10.24.1.4.1.1.4', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistLocalName': '1.3.6.1.4.1.9.10.24.1.4.1.1.5', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistRemoteName': '1.3.6.1.4.1.9.10.24.1.4.1.1.6', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistSourceIp': '1.3.6.1.4.1.9.10.24.1.4.1.1.7', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistDestIp': '1.3.6.1.4.1.9.10.24.1.4.1.1.8', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistCount': '1.3.6.1.4.1.9.10.24.1.4.1.1.9', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistFailTime': '1.3.6.1.4.1.9.10.24.1.4.1.1.10', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistFailType': '1.3.6.1.4.1.9.10.24.1.4.1.1.11', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistFailReason': '1.3.6.1.4.1.9.10.24.1.4.1.1.12', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistSourceInetType': '1.3.6.1.4.1.9.10.24.1.4.1.1.13', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistSourceInetAddr': '1.3.6.1.4.1.9.10.24.1.4.1.1.14', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistDestInetType': '1.3.6.1.4.1.9.10.24.1.4.1.1.15', 'CISCO-VPDN-MGMT-MIB::cvpdnUnameToFailHistDestInetAddr': '1.3.6.1.4.1.9.10.24.1.4.1.1.16', 'CISCO-VPDN-MGMT-MIB::cvpdnTemplateInfo': '1.3.6.1.4.1.9.10.24.1.5', 'CISCO-VPDN-MGMT-MIB::cvpdnTemplateTable': '1.3.6.1.4.1.9.10.24.1.5.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTemplateEntry': '1.3.6.1.4.1.9.10.24.1.5.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTemplateName': '1.3.6.1.4.1.9.10.24.1.5.1.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnTemplateActiveSessions': '1.3.6.1.4.1.9.10.24.1.5.1.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnMultilinkInfo': '1.3.6.1.4.1.9.10.24.1.6', 'CISCO-VPDN-MGMT-MIB::cvpdnBundlesWithOneLink': '1.3.6.1.4.1.9.10.24.1.6.1', 'CISCO-VPDN-MGMT-MIB::cvpdnBundlesWithTwoLinks': '1.3.6.1.4.1.9.10.24.1.6.2', 'CISCO-VPDN-MGMT-MIB::cvpdnBundlesWithMoreThanTwoLinks': '1.3.6.1.4.1.9.10.24.1.6.3', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleTable': '1.3.6.1.4.1.9.10.24.1.6.4', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleEntry': '1.3.6.1.4.1.9.10.24.1.6.4.1', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleName': '1.3.6.1.4.1.9.10.24.1.6.4.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleLinkCount': '1.3.6.1.4.1.9.10.24.1.6.4.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleEndpointType': '1.3.6.1.4.1.9.10.24.1.6.4.1.3', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleEndpoint': '1.3.6.1.4.1.9.10.24.1.6.4.1.4', 'CISCO-VPDN-MGMT-MIB::cvpdnBundlePeerIpAddrType': '1.3.6.1.4.1.9.10.24.1.6.4.1.5', 'CISCO-VPDN-MGMT-MIB::cvpdnBundlePeerIpAddr': '1.3.6.1.4.1.9.10.24.1.6.4.1.6', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleEndpointClass': '1.3.6.1.4.1.9.10.24.1.6.4.1.7', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleLastChanged': '1.3.6.1.4.1.9.10.24.1.6.5', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleChildTable': '1.3.6.1.4.1.9.10.24.1.6.6', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleChildEntry': '1.3.6.1.4.1.9.10.24.1.6.6.1', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleChildTunnelType': '1.3.6.1.4.1.9.10.24.1.6.6.1.1', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleChildTunnelId': '1.3.6.1.4.1.9.10.24.1.6.6.1.2', 'CISCO-VPDN-MGMT-MIB::cvpdnBundleChildSessionId': '1.3.6.1.4.1.9.10.24.1.6.6.1.3', 'CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBConformance': '1.3.6.1.4.1.9.10.24.3', 'CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBCompliances': '1.3.6.1.4.1.9.10.24.3.1', 'CISCO-VPDN-MGMT-MIB::ciscoVpdnMgmtMIBGroups': '1.3.6.1.4.1.9.10.24.3.2'}
display_hints = {} |
N = int(input())
positions = []
for x in range(N):
input_line = input().split()
positions.append([int(input_line[0]), int(input_line[1])])
sorted_positions = sorted(positions)
minRadius = 1000000
for x in range(len(sorted_positions)-1):
if sorted_positions[x][1] == 0 and sorted_positions[x-1][1] == 1:
distance = abs(sorted_positions[x-1][0] - sorted_positions[x][0])
if distance < minRadius:
minRadius = distance
if sorted_positions[x][1] == 0 and sorted_positions[x+1][1] == 1:
distance = abs(sorted_positions[x][0] - sorted_positions[x+1][0])
if distance < minRadius:
minRadius = distance
minRadius -= 1
visited = [False] * len(sorted_positions)
counter = 0
previous = 0
for x in range(0, len(sorted_positions)):
if x == 0:
if sorted_positions[x][1] == 1:
counter += 1
elif sorted_positions[x][1] == 1:
if abs(sorted_positions[x][0] - previous) <= minRadius:
pass
else:
counter += 1
elif sorted_positions[x][1] == 0:
pass
previous = sorted_positions[x][0]
print(counter)
| n = int(input())
positions = []
for x in range(N):
input_line = input().split()
positions.append([int(input_line[0]), int(input_line[1])])
sorted_positions = sorted(positions)
min_radius = 1000000
for x in range(len(sorted_positions) - 1):
if sorted_positions[x][1] == 0 and sorted_positions[x - 1][1] == 1:
distance = abs(sorted_positions[x - 1][0] - sorted_positions[x][0])
if distance < minRadius:
min_radius = distance
if sorted_positions[x][1] == 0 and sorted_positions[x + 1][1] == 1:
distance = abs(sorted_positions[x][0] - sorted_positions[x + 1][0])
if distance < minRadius:
min_radius = distance
min_radius -= 1
visited = [False] * len(sorted_positions)
counter = 0
previous = 0
for x in range(0, len(sorted_positions)):
if x == 0:
if sorted_positions[x][1] == 1:
counter += 1
elif sorted_positions[x][1] == 1:
if abs(sorted_positions[x][0] - previous) <= minRadius:
pass
else:
counter += 1
elif sorted_positions[x][1] == 0:
pass
previous = sorted_positions[x][0]
print(counter) |
# Payable-related constants
PAYABLE_FIRST_ROW = 20
PAYABLE_FIRST_COL = 2
PAYABLE_LAST_COL = 25
PAYABLE_SORT_BY = 3
PAYABLE_PAYPAL_ID_COL = 18
PAYABLE_FIELDS = [
'timestamp',
'requester',
'department',
'item',
'detail',
'event_date',
'payment_type',
'use_of_funds',
'notes',
'type',
'name',
'paypal',
'address',
'amount',
'driving_reimbursement'
]
PAYABLE_IGNORE_FIELDS = ('detail', 'notes')
# Receivable-related constants
RECEIVABLE_FIRST_ROW = 11
RECEIVABLE_FIRST_COL = 2
RECEIVABLE_LAST_COL = 25
RECEIVABLE_SORT_BY = 4
RECEIVABLE_FIELDS = [
'year',
'committed_date',
'timestamp',
'support_type',
'organization_type',
'budget_line_item',
'payee_name',
'payee_email',
'amount_requested',
'amount_committed',
'amount_gross',
'amount_net',
'transaction_id'
]
# Transaction-related constants
TRANSACTION_FIELDS = [
'status',
'type',
'timezone',
'timestamp',
'id',
'name',
'email',
'amount',
'fee_amount',
'net_amount',
'currency'
]
TRANSACTION_RESPONSE_KEYS = {
'L_STATUS': 'status',
'L_TYPE': 'type',
'L_TIMEZONE': 'timezone',
'L_TIMESTAMP': 'timestamp',
'L_TRANSACTIONID': 'id',
'L_NAME': 'name',
'L_EMAIL': 'email',
'L_AMT': 'amount',
'L_FEEAMT': 'fee_amount',
'L_NETAMT': 'net_amount',
'L_CURRENCYCODE': 'currency'
}
PAYABLE_TRANSACTION_MATCHES = (
('paypal', 'email'),
('amount', 'amount')
) | payable_first_row = 20
payable_first_col = 2
payable_last_col = 25
payable_sort_by = 3
payable_paypal_id_col = 18
payable_fields = ['timestamp', 'requester', 'department', 'item', 'detail', 'event_date', 'payment_type', 'use_of_funds', 'notes', 'type', 'name', 'paypal', 'address', 'amount', 'driving_reimbursement']
payable_ignore_fields = ('detail', 'notes')
receivable_first_row = 11
receivable_first_col = 2
receivable_last_col = 25
receivable_sort_by = 4
receivable_fields = ['year', 'committed_date', 'timestamp', 'support_type', 'organization_type', 'budget_line_item', 'payee_name', 'payee_email', 'amount_requested', 'amount_committed', 'amount_gross', 'amount_net', 'transaction_id']
transaction_fields = ['status', 'type', 'timezone', 'timestamp', 'id', 'name', 'email', 'amount', 'fee_amount', 'net_amount', 'currency']
transaction_response_keys = {'L_STATUS': 'status', 'L_TYPE': 'type', 'L_TIMEZONE': 'timezone', 'L_TIMESTAMP': 'timestamp', 'L_TRANSACTIONID': 'id', 'L_NAME': 'name', 'L_EMAIL': 'email', 'L_AMT': 'amount', 'L_FEEAMT': 'fee_amount', 'L_NETAMT': 'net_amount', 'L_CURRENCYCODE': 'currency'}
payable_transaction_matches = (('paypal', 'email'), ('amount', 'amount')) |
operadores = ('+', '-', '*', '/', '%', '=', '>', '<', '>=', '<=', '!', '!=', '==', '&', '|', '++', '--', '+=', '-=',
'/=', '*=')
comentario = '//'
comentario_inicio = '/*'
comentario_fim = '*/'
aspas = '"'
aspasSimples = "'"
delimitadores = (';', '{', '}', '(', ')', '[', ']', comentario, comentario_inicio, comentario_fim, aspas, aspasSimples, ",")
palavras_reservadas = ('int', 'float', 'string', 'boolean', 'char', 'void', 'double', 'public', 'private', 'igor',
'vasco', 'return', 'if', 'else', 'for', 'while', 'break', 'continue', 'funcao', 'hame', 'true',
'false', 'switch', 'case', 'default', 'print')
| operadores = ('+', '-', '*', '/', '%', '=', '>', '<', '>=', '<=', '!', '!=', '==', '&', '|', '++', '--', '+=', '-=', '/=', '*=')
comentario = '//'
comentario_inicio = '/*'
comentario_fim = '*/'
aspas = '"'
aspas_simples = "'"
delimitadores = (';', '{', '}', '(', ')', '[', ']', comentario, comentario_inicio, comentario_fim, aspas, aspasSimples, ',')
palavras_reservadas = ('int', 'float', 'string', 'boolean', 'char', 'void', 'double', 'public', 'private', 'igor', 'vasco', 'return', 'if', 'else', 'for', 'while', 'break', 'continue', 'funcao', 'hame', 'true', 'false', 'switch', 'case', 'default', 'print') |
class pycacheNotFoundError(Exception):
def __init__(self, msg):
self.msg = msg
super().__init__(self.msg)
class installModulesFailedError(Exception):
def __init__(self):
self.msg = "The modules could not be installed! Some error occurred!"
super().__init__(self.msg)
| class Pycachenotfounderror(Exception):
def __init__(self, msg):
self.msg = msg
super().__init__(self.msg)
class Installmodulesfailederror(Exception):
def __init__(self):
self.msg = 'The modules could not be installed! Some error occurred!'
super().__init__(self.msg) |
# Auto-generated pytest file
class TestInit:
def test___init__(self):
fail()
class TestEnter:
def test___enter__(self):
fail()
class TestExit:
def test___exit__(self):
fail()
class TestGetSearchResultCount:
def test_get_search_result_count(self):
fail()
class TestGetSearchResultLinks:
def test_get_search_result_links(self):
fail()
class TestGetSpecialPageLinks:
def test_get_special_page_links(self):
fail()
class TestOpenIdLink:
def test_open_id_link(self):
fail()
class TestExtractSearchResultCount:
def test_extract_search_result_count(self):
fail()
class TestExtractSearchResultLinks:
def test_extract_search_result_links(self):
fail()
class TestExtractSpecialPageLinks:
def test_extract_special_page_links(self):
fail()
class TestTransformLinksToNoRedirectLinks:
def test_transform_links_to_no_redirect_links(self):
fail()
class TestIsSpecialPageIdExists:
def test_is_special_page_id_exists(self):
fail()
class TestLog:
def test_log(self):
fail()
class TestClose:
def test_close(self):
fail()
| class Testinit:
def test___init__(self):
fail()
class Testenter:
def test___enter__(self):
fail()
class Testexit:
def test___exit__(self):
fail()
class Testgetsearchresultcount:
def test_get_search_result_count(self):
fail()
class Testgetsearchresultlinks:
def test_get_search_result_links(self):
fail()
class Testgetspecialpagelinks:
def test_get_special_page_links(self):
fail()
class Testopenidlink:
def test_open_id_link(self):
fail()
class Testextractsearchresultcount:
def test_extract_search_result_count(self):
fail()
class Testextractsearchresultlinks:
def test_extract_search_result_links(self):
fail()
class Testextractspecialpagelinks:
def test_extract_special_page_links(self):
fail()
class Testtransformlinkstonoredirectlinks:
def test_transform_links_to_no_redirect_links(self):
fail()
class Testisspecialpageidexists:
def test_is_special_page_id_exists(self):
fail()
class Testlog:
def test_log(self):
fail()
class Testclose:
def test_close(self):
fail() |
# https://baike.baidu.com/item/%E5%BF%AB%E9%80%9F%E5%B9%82
# 11 = 2^0 + 2^! + 2^3
# a^11 = a^(2^0) + a^(2^1) + a^(2^3)
class Solution:
def myPow(self, x: float, n: int) -> float:
N = n
if N < 0:
x = 1/x
N = -N
ans = 1
current_product = x
while N > 0:
if N % 2 == 1:
ans = ans * current_product
current_product = current_product * current_product
N //= 2
return ans | class Solution:
def my_pow(self, x: float, n: int) -> float:
n = n
if N < 0:
x = 1 / x
n = -N
ans = 1
current_product = x
while N > 0:
if N % 2 == 1:
ans = ans * current_product
current_product = current_product * current_product
n //= 2
return ans |
# time Complexity: O(n^2)
# space Complexity: O(1)
def bubble_sort(arr):
current = 0
next = 1
last_index = len(arr)
while last_index >= next:
if arr[current] > arr[next]:
arr[current], arr[next] = arr[next], arr[current]
current += 1
next += 1
if next == last_index:
current = 0
next = 1
last_index -= 1
if __name__ == '__main__':
arr = [2, 3, 5, 6, 1]
bubble_sort(arr)
print(arr)
| def bubble_sort(arr):
current = 0
next = 1
last_index = len(arr)
while last_index >= next:
if arr[current] > arr[next]:
(arr[current], arr[next]) = (arr[next], arr[current])
current += 1
next += 1
if next == last_index:
current = 0
next = 1
last_index -= 1
if __name__ == '__main__':
arr = [2, 3, 5, 6, 1]
bubble_sort(arr)
print(arr) |
class Solution:
def isIsomorphic(self, s: str, t: str) -> bool:
# Copy / paste from the "fastest" solution.
# It's sort of beautiful in its simplicity, if wildly esoteric.
# Basically the same thing as the hacky failsafe in my first solution;
# compare the number of unique characters in each string with the
# number of unique characters in a zip of both strings. If there's a
# mismatch in any of them, they can't be isomorphic.
return len(set(zip(s, t))) == len(set(s)) == len(set(t)) | class Solution:
def is_isomorphic(self, s: str, t: str) -> bool:
return len(set(zip(s, t))) == len(set(s)) == len(set(t)) |
#
# PySNMP MIB module APDNSALG-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/APDNSALG-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:23:12 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
acmepacketMgmt, = mibBuilder.importSymbols("ACMEPACKET-SMI", "acmepacketMgmt")
ApTransportType, ApHardwareModuleFamily, ApRedundancyState = mibBuilder.importSymbols("ACMEPACKET-TC", "ApTransportType", "ApHardwareModuleFamily", "ApRedundancyState")
SysMgmtPercentage, = mibBuilder.importSymbols("APSYSMGMT-MIB", "SysMgmtPercentage")
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsUnion")
ifIndex, InterfaceIndexOrZero, InterfaceIndex = mibBuilder.importSymbols("IF-MIB", "ifIndex", "InterfaceIndexOrZero", "InterfaceIndex")
InetZoneIndex, InetAddressPrefixLength, InetVersion, InetAddressType, InetAddress = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetZoneIndex", "InetAddressPrefixLength", "InetVersion", "InetAddressType", "InetAddress")
ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, IpAddress, Bits, Counter64, Integer32, Counter32, Unsigned32, Gauge32, TimeTicks, NotificationType, ObjectIdentity, ModuleIdentity, iso = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "IpAddress", "Bits", "Counter64", "Integer32", "Counter32", "Unsigned32", "Gauge32", "TimeTicks", "NotificationType", "ObjectIdentity", "ModuleIdentity", "iso")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
apDNSALGModule = ModuleIdentity((1, 3, 6, 1, 4, 1, 9148, 3, 14))
if mibBuilder.loadTexts: apDNSALGModule.setLastUpdated('201106080000Z')
if mibBuilder.loadTexts: apDNSALGModule.setOrganization('Acme Packet, Inc')
if mibBuilder.loadTexts: apDNSALGModule.setContactInfo(' Customer Service Postal: Acme Packet, Inc 100 Crosby Drive Bedford, MA 01730 US Tel: 1-781-328-4400 E-mail: support@acmepacket.com')
if mibBuilder.loadTexts: apDNSALGModule.setDescription('The Dns Alg MIB for Acme Packet.')
apDNSALGMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1))
apDNSALGMIBGeneralObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 1))
apDNSALGMIBTabularObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2))
apDNSALGNotificationObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2))
apDNSALGNotifObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 1))
apDNSALGNotifPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2))
apDNSALGNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0))
apDNSALGConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3))
apDNSALGObjectGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 1))
apDNSALGNotificationGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 2))
apDNSALGServerStatusTable = MibTable((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1), )
if mibBuilder.loadTexts: apDNSALGServerStatusTable.setStatus('current')
if mibBuilder.loadTexts: apDNSALGServerStatusTable.setDescription('A read-only table to hold the status of configured DNSALG servers, indexed by the name of the Dns alg config name, server realm and server IP.')
apDNSALGServerStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1), ).setIndexNames((0, "APDNSALG-MIB", "apDNSALGConfigIndex"), (0, "APDNSALG-MIB", "apDNSALGServerIndex"), (0, "APDNSALG-MIB", "apDNSALGServerIpAddress"))
if mibBuilder.loadTexts: apDNSALGServerStatusEntry.setStatus('current')
if mibBuilder.loadTexts: apDNSALGServerStatusEntry.setDescription('An entry designed to hold the status of a single DNSALG server')
apDNSALGConfigIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: apDNSALGConfigIndex.setStatus('current')
if mibBuilder.loadTexts: apDNSALGConfigIndex.setDescription('An integer for the sole purpose of indexing the DNS-ALG configuration.Only one DNS-ALG configuration is allowed per a realm.')
apDNSALGServerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: apDNSALGServerIndex.setStatus('current')
if mibBuilder.loadTexts: apDNSALGServerIndex.setDescription('An integer for the sole purpose of indexing the Dns Server Attributes in a DNS-ALG config. Each DNS-ALG config can have multiple Dns Server Attributes.')
apDNSALGConfigName = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDNSALGConfigName.setStatus('current')
if mibBuilder.loadTexts: apDNSALGConfigName.setDescription('The name of the dns-alg-config element that contains this DNS-ALG server.')
apDNSALGServerRealm = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDNSALGServerRealm.setStatus('current')
if mibBuilder.loadTexts: apDNSALGServerRealm.setDescription('The name of the server realm element that contains this DNSALG server.')
apDNSALGDomainSuffix = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDNSALGDomainSuffix.setStatus('current')
if mibBuilder.loadTexts: apDNSALGDomainSuffix.setDescription('The name of the domain suffix element that contains this DNSALG server.')
apDNSALGServerIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 7), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDNSALGServerIpAddress.setStatus('current')
if mibBuilder.loadTexts: apDNSALGServerIpAddress.setDescription('The IP address of this DNSALG server.')
apDNSALGServerStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("inservice", 0), ("lowerpriority", 1), ("oosunreachable", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDNSALGServerStatus.setStatus('current')
if mibBuilder.loadTexts: apDNSALGServerStatus.setDescription('The status of this DNSALG server.')
apDNSALGStatsTable = MibTable((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2), )
if mibBuilder.loadTexts: apDNSALGStatsTable.setStatus('current')
if mibBuilder.loadTexts: apDNSALGStatsTable.setDescription('per DNS-ALG config(i.e.client realm)stats.')
apDnsALGStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1), ).setIndexNames((0, "APDNSALG-MIB", "apDnsAlgClientRealmIndex"))
if mibBuilder.loadTexts: apDnsALGStatsEntry.setStatus('current')
if mibBuilder.loadTexts: apDnsALGStatsEntry.setDescription('A table entry designed to hold DNS-ALG stats data')
apDnsAlgClientRealmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: apDnsAlgClientRealmIndex.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgClientRealmIndex.setDescription('An integer for the sole purpose of indexing the DNS-ALG configuration.Only one DNS-ALG configuration is allowed per a realm.')
apDnsAlgClientRealmName = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDnsAlgClientRealmName.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgClientRealmName.setDescription('DNS-ALG Config realm name')
apDnsAlgCurrentQueries = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDnsAlgCurrentQueries.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgCurrentQueries.setDescription('Number of queries sent in recent period received on DNS-ALG config realm.')
apDnsAlgTotalQueries = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDnsAlgTotalQueries.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgTotalQueries.setDescription('Total number of queries sent in life time received on DNS-ALG config realm.')
apDnsAlgCurrentSucess = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDnsAlgCurrentSucess.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgCurrentSucess.setDescription('Number of success responses in recent period received on DNS-ALG config realm.')
apDnsAlgTotalSucess = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDnsAlgTotalSucess.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgTotalSucess.setDescription('Total number of success responses in life time received on DNS-ALG config realm.')
apDnsAlgCurrentNotFound = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDnsAlgCurrentNotFound.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgCurrentNotFound.setDescription('Number of not-found responses in recent period received on DNS-ALG config realm.')
apDnsAlgTotalNotFound = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDnsAlgTotalNotFound.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgTotalNotFound.setDescription('Total number of not-found responses in life time received on DNS-ALG config realm.')
apDnsAlgCurrentTimeOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDnsAlgCurrentTimeOut.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgCurrentTimeOut.setDescription('Number of time out responses in recent period received on DNS-ALG config realm.')
apDnsAlgTotalTimeOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDnsAlgTotalTimeOut.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgTotalTimeOut.setDescription('Total number of time out responses in life time received on DNS-ALG config realm')
apDnsAlgCurrentBadStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDnsAlgCurrentBadStatus.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgCurrentBadStatus.setDescription('Number of bad status responses in recent period received on DNS-ALG config realm.')
apDnsAlgTotalBadStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDnsAlgTotalBadStatus.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgTotalBadStatus.setDescription('Total number of bad status responses in life time received on DNS-ALG config realm.')
apDnsAlgCurrentOtherFailures = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 13), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDnsAlgCurrentOtherFailures.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgCurrentOtherFailures.setDescription('Number of other failure responses in recent period received on DNS-ALG config realm.')
apDnsAlgTotalOtherFailures = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDnsAlgTotalOtherFailures.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgTotalOtherFailures.setDescription('Total number of other failure responses in life time received on DNS-ALG config realm.')
apDnsAlgAvgLatency = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 15), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDnsAlgAvgLatency.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgAvgLatency.setDescription('Average observed one-way signalling latency during the period in milliseconds')
apDnsAlgMaxLatency = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 16), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDnsAlgMaxLatency.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgMaxLatency.setDescription('Maximum observed one-way signalling latency during the period in milliseconds')
apDnsAlgMaxBurstRate = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 17), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apDnsAlgMaxBurstRate.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgMaxBurstRate.setDescription('Maximum burst rate of traffic measured during the period (combined inbound and outbound)')
apDNSALGConstraintsStatus = MibScalar((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("inservice", 0), ("constraintsExceeded", 1)))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: apDNSALGConstraintsStatus.setStatus('current')
if mibBuilder.loadTexts: apDNSALGConstraintsStatus.setDescription('The status of this DNS-ALG config realm for constraints.')
apDnsAlgStatusChangeTrap = NotificationType((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 1)).setObjects(("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGServerRealm"), ("APDNSALG-MIB", "apDNSALGServerIpAddress"), ("APDNSALG-MIB", "apDNSALGServerStatus"))
if mibBuilder.loadTexts: apDnsAlgStatusChangeTrap.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgStatusChangeTrap.setDescription(' The trap will be generated if the reachability status of an DNS-ALG server changes from In-Service to either Timed out or Out of Service.')
apDnsAlgStatusChangeClearTrap = NotificationType((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 2)).setObjects(("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGServerRealm"), ("APDNSALG-MIB", "apDNSALGServerIpAddress"), ("APDNSALG-MIB", "apDNSALGServerStatus"))
if mibBuilder.loadTexts: apDnsAlgStatusChangeClearTrap.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgStatusChangeClearTrap.setDescription(' The trap will be generated if the reachability status of an DNS-ALG server changes from either Timed out or Out of Service to In-Service')
apDnsAlgConstraintStateChangeTrap = NotificationType((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 3)).setObjects(("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGConstraintsStatus"))
if mibBuilder.loadTexts: apDnsAlgConstraintStateChangeTrap.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgConstraintStateChangeTrap.setDescription(" The trap will be generated if an DNS-ALG config's constriants state changed from inservice to constraintsExceeded.")
apDnsAlgConstraintStateChangeClearTrap = NotificationType((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 4)).setObjects(("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGConstraintsStatus"))
if mibBuilder.loadTexts: apDnsAlgConstraintStateChangeClearTrap.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgConstraintStateChangeClearTrap.setDescription(" The trap will be generated if an DNS-ALG config's constriants state changed from constraintsExceeded to inservice.")
apDnsAlgSvrConstraintStateChangeTrap = NotificationType((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 5)).setObjects(("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGServerRealm"), ("APDNSALG-MIB", "apDNSALGServerIpAddress"), ("APDNSALG-MIB", "apDNSALGConstraintsStatus"))
if mibBuilder.loadTexts: apDnsAlgSvrConstraintStateChangeTrap.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgSvrConstraintStateChangeTrap.setDescription(' The trap will be generated if an Dns Server(i.e.IP-Address) constriants state changed from inservice to constraintsExceeded.')
apDnsAlgSvrConstraintStateChangeClearTrap = NotificationType((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 6)).setObjects(("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGServerRealm"), ("APDNSALG-MIB", "apDNSALGServerIpAddress"), ("APDNSALG-MIB", "apDNSALGConstraintsStatus"))
if mibBuilder.loadTexts: apDnsAlgSvrConstraintStateChangeClearTrap.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgSvrConstraintStateChangeClearTrap.setDescription(' The trap will be generated if an Dns Server(i.e.IP-Address) constriants state changed from constraintsExceeded to inservice.')
apDnsAlgServerStatusGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 1, 1)).setObjects(("APDNSALG-MIB", "apDNSALGConfigIndex"), ("APDNSALG-MIB", "apDNSALGServerIndex"), ("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGServerRealm"), ("APDNSALG-MIB", "apDNSALGDomainSuffix"), ("APDNSALG-MIB", "apDNSALGServerIpAddress"), ("APDNSALG-MIB", "apDNSALGServerStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
apDnsAlgServerStatusGroup = apDnsAlgServerStatusGroup.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgServerStatusGroup.setDescription('A collection of statistics for DNS-ALG server status.')
apDnsAlgStatsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 1, 2)).setObjects(("APDNSALG-MIB", "apDnsAlgClientRealmIndex"), ("APDNSALG-MIB", "apDnsAlgClientRealmName"), ("APDNSALG-MIB", "apDnsAlgCurrentQueries"), ("APDNSALG-MIB", "apDnsAlgTotalQueries"), ("APDNSALG-MIB", "apDnsAlgCurrentSucess"), ("APDNSALG-MIB", "apDnsAlgTotalSucess"), ("APDNSALG-MIB", "apDnsAlgCurrentNotFound"), ("APDNSALG-MIB", "apDnsAlgTotalNotFound"), ("APDNSALG-MIB", "apDnsAlgCurrentTimeOut"), ("APDNSALG-MIB", "apDnsAlgTotalTimeOut"), ("APDNSALG-MIB", "apDnsAlgCurrentBadStatus"), ("APDNSALG-MIB", "apDnsAlgTotalBadStatus"), ("APDNSALG-MIB", "apDnsAlgCurrentOtherFailures"), ("APDNSALG-MIB", "apDnsAlgTotalOtherFailures"), ("APDNSALG-MIB", "apDnsAlgAvgLatency"), ("APDNSALG-MIB", "apDnsAlgMaxLatency"), ("APDNSALG-MIB", "apDnsAlgMaxBurstRate"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
apDnsAlgStatsGroup = apDnsAlgStatsGroup.setStatus('current')
if mibBuilder.loadTexts: apDnsAlgStatsGroup.setDescription('Report the stats of configured DNSALG config objects.')
apDNSALGNotificationsGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 2, 1)).setObjects(("APDNSALG-MIB", "apDnsAlgStatusChangeTrap"), ("APDNSALG-MIB", "apDnsAlgStatusChangeClearTrap"), ("APDNSALG-MIB", "apDnsAlgConstraintStateChangeTrap"), ("APDNSALG-MIB", "apDnsAlgConstraintStateChangeClearTrap"), ("APDNSALG-MIB", "apDnsAlgSvrConstraintStateChangeTrap"), ("APDNSALG-MIB", "apDnsAlgSvrConstraintStateChangeClearTrap"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
apDNSALGNotificationsGroup = apDNSALGNotificationsGroup.setStatus('current')
if mibBuilder.loadTexts: apDNSALGNotificationsGroup.setDescription('A collection of mib objects accessible only to traps.')
mibBuilder.exportSymbols("APDNSALG-MIB", apDnsAlgTotalNotFound=apDnsAlgTotalNotFound, apDnsAlgConstraintStateChangeClearTrap=apDnsAlgConstraintStateChangeClearTrap, apDnsAlgStatusChangeTrap=apDnsAlgStatusChangeTrap, apDnsAlgTotalTimeOut=apDnsAlgTotalTimeOut, apDnsAlgStatsGroup=apDnsAlgStatsGroup, apDnsALGStatsEntry=apDnsALGStatsEntry, apDNSALGMIBGeneralObjects=apDNSALGMIBGeneralObjects, apDnsAlgTotalSucess=apDnsAlgTotalSucess, apDNSALGServerStatusEntry=apDNSALGServerStatusEntry, apDNSALGNotificationsGroup=apDNSALGNotificationsGroup, apDNSALGConstraintsStatus=apDNSALGConstraintsStatus, apDnsAlgConstraintStateChangeTrap=apDnsAlgConstraintStateChangeTrap, apDNSALGServerRealm=apDNSALGServerRealm, apDnsAlgTotalBadStatus=apDnsAlgTotalBadStatus, apDNSALGObjectGroups=apDNSALGObjectGroups, apDNSALGConfigName=apDNSALGConfigName, apDnsAlgMaxLatency=apDnsAlgMaxLatency, PYSNMP_MODULE_ID=apDNSALGModule, apDNSALGMIBTabularObjects=apDNSALGMIBTabularObjects, apDnsAlgTotalOtherFailures=apDnsAlgTotalOtherFailures, apDNSALGConfigIndex=apDNSALGConfigIndex, apDnsAlgStatusChangeClearTrap=apDnsAlgStatusChangeClearTrap, apDNSALGStatsTable=apDNSALGStatsTable, apDnsAlgClientRealmIndex=apDnsAlgClientRealmIndex, apDnsAlgSvrConstraintStateChangeTrap=apDnsAlgSvrConstraintStateChangeTrap, apDNSALGNotifications=apDNSALGNotifications, apDNSALGConformance=apDNSALGConformance, apDnsAlgCurrentNotFound=apDnsAlgCurrentNotFound, apDNSALGNotifPrefix=apDNSALGNotifPrefix, apDnsAlgMaxBurstRate=apDnsAlgMaxBurstRate, apDNSALGMIBObjects=apDNSALGMIBObjects, apDnsAlgAvgLatency=apDnsAlgAvgLatency, apDnsAlgServerStatusGroup=apDnsAlgServerStatusGroup, apDNSALGNotificationObjects=apDNSALGNotificationObjects, apDNSALGNotificationGroups=apDNSALGNotificationGroups, apDnsAlgCurrentOtherFailures=apDnsAlgCurrentOtherFailures, apDnsAlgClientRealmName=apDnsAlgClientRealmName, apDNSALGNotifObjects=apDNSALGNotifObjects, apDNSALGServerStatus=apDNSALGServerStatus, apDnsAlgCurrentSucess=apDnsAlgCurrentSucess, apDNSALGServerStatusTable=apDNSALGServerStatusTable, apDnsAlgSvrConstraintStateChangeClearTrap=apDnsAlgSvrConstraintStateChangeClearTrap, apDnsAlgCurrentQueries=apDnsAlgCurrentQueries, apDnsAlgCurrentBadStatus=apDnsAlgCurrentBadStatus, apDnsAlgCurrentTimeOut=apDnsAlgCurrentTimeOut, apDNSALGServerIpAddress=apDNSALGServerIpAddress, apDNSALGModule=apDNSALGModule, apDNSALGDomainSuffix=apDNSALGDomainSuffix, apDnsAlgTotalQueries=apDnsAlgTotalQueries, apDNSALGServerIndex=apDNSALGServerIndex)
| (acmepacket_mgmt,) = mibBuilder.importSymbols('ACMEPACKET-SMI', 'acmepacketMgmt')
(ap_transport_type, ap_hardware_module_family, ap_redundancy_state) = mibBuilder.importSymbols('ACMEPACKET-TC', 'ApTransportType', 'ApHardwareModuleFamily', 'ApRedundancyState')
(sys_mgmt_percentage,) = mibBuilder.importSymbols('APSYSMGMT-MIB', 'SysMgmtPercentage')
(object_identifier, integer, octet_string) = mibBuilder.importSymbols('ASN1', 'ObjectIdentifier', 'Integer', 'OctetString')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(constraints_intersection, single_value_constraint, value_size_constraint, value_range_constraint, constraints_union) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ConstraintsIntersection', 'SingleValueConstraint', 'ValueSizeConstraint', 'ValueRangeConstraint', 'ConstraintsUnion')
(if_index, interface_index_or_zero, interface_index) = mibBuilder.importSymbols('IF-MIB', 'ifIndex', 'InterfaceIndexOrZero', 'InterfaceIndex')
(inet_zone_index, inet_address_prefix_length, inet_version, inet_address_type, inet_address) = mibBuilder.importSymbols('INET-ADDRESS-MIB', 'InetZoneIndex', 'InetAddressPrefixLength', 'InetVersion', 'InetAddressType', 'InetAddress')
(module_compliance, notification_group, object_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ModuleCompliance', 'NotificationGroup', 'ObjectGroup')
(mib_scalar, mib_table, mib_table_row, mib_table_column, mib_identifier, ip_address, bits, counter64, integer32, counter32, unsigned32, gauge32, time_ticks, notification_type, object_identity, module_identity, iso) = mibBuilder.importSymbols('SNMPv2-SMI', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'MibIdentifier', 'IpAddress', 'Bits', 'Counter64', 'Integer32', 'Counter32', 'Unsigned32', 'Gauge32', 'TimeTicks', 'NotificationType', 'ObjectIdentity', 'ModuleIdentity', 'iso')
(textual_convention, display_string) = mibBuilder.importSymbols('SNMPv2-TC', 'TextualConvention', 'DisplayString')
ap_dnsalg_module = module_identity((1, 3, 6, 1, 4, 1, 9148, 3, 14))
if mibBuilder.loadTexts:
apDNSALGModule.setLastUpdated('201106080000Z')
if mibBuilder.loadTexts:
apDNSALGModule.setOrganization('Acme Packet, Inc')
if mibBuilder.loadTexts:
apDNSALGModule.setContactInfo(' Customer Service Postal: Acme Packet, Inc 100 Crosby Drive Bedford, MA 01730 US Tel: 1-781-328-4400 E-mail: support@acmepacket.com')
if mibBuilder.loadTexts:
apDNSALGModule.setDescription('The Dns Alg MIB for Acme Packet.')
ap_dnsalgmib_objects = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1))
ap_dnsalgmib_general_objects = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 1))
ap_dnsalgmib_tabular_objects = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2))
ap_dnsalg_notification_objects = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2))
ap_dnsalg_notif_objects = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 1))
ap_dnsalg_notif_prefix = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2))
ap_dnsalg_notifications = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0))
ap_dnsalg_conformance = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3))
ap_dnsalg_object_groups = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 1))
ap_dnsalg_notification_groups = mib_identifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 2))
ap_dnsalg_server_status_table = mib_table((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1))
if mibBuilder.loadTexts:
apDNSALGServerStatusTable.setStatus('current')
if mibBuilder.loadTexts:
apDNSALGServerStatusTable.setDescription('A read-only table to hold the status of configured DNSALG servers, indexed by the name of the Dns alg config name, server realm and server IP.')
ap_dnsalg_server_status_entry = mib_table_row((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1)).setIndexNames((0, 'APDNSALG-MIB', 'apDNSALGConfigIndex'), (0, 'APDNSALG-MIB', 'apDNSALGServerIndex'), (0, 'APDNSALG-MIB', 'apDNSALGServerIpAddress'))
if mibBuilder.loadTexts:
apDNSALGServerStatusEntry.setStatus('current')
if mibBuilder.loadTexts:
apDNSALGServerStatusEntry.setDescription('An entry designed to hold the status of a single DNSALG server')
ap_dnsalg_config_index = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(1, 2147483647))).setMaxAccess('accessiblefornotify')
if mibBuilder.loadTexts:
apDNSALGConfigIndex.setStatus('current')
if mibBuilder.loadTexts:
apDNSALGConfigIndex.setDescription('An integer for the sole purpose of indexing the DNS-ALG configuration.Only one DNS-ALG configuration is allowed per a realm.')
ap_dnsalg_server_index = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 2), integer32().subtype(subtypeSpec=value_range_constraint(1, 2147483647))).setMaxAccess('accessiblefornotify')
if mibBuilder.loadTexts:
apDNSALGServerIndex.setStatus('current')
if mibBuilder.loadTexts:
apDNSALGServerIndex.setDescription('An integer for the sole purpose of indexing the Dns Server Attributes in a DNS-ALG config. Each DNS-ALG config can have multiple Dns Server Attributes.')
ap_dnsalg_config_name = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 4), display_string().subtype(subtypeSpec=value_size_constraint(0, 255))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDNSALGConfigName.setStatus('current')
if mibBuilder.loadTexts:
apDNSALGConfigName.setDescription('The name of the dns-alg-config element that contains this DNS-ALG server.')
ap_dnsalg_server_realm = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 5), display_string().subtype(subtypeSpec=value_size_constraint(0, 255))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDNSALGServerRealm.setStatus('current')
if mibBuilder.loadTexts:
apDNSALGServerRealm.setDescription('The name of the server realm element that contains this DNSALG server.')
ap_dnsalg_domain_suffix = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 6), display_string().subtype(subtypeSpec=value_size_constraint(0, 255))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDNSALGDomainSuffix.setStatus('current')
if mibBuilder.loadTexts:
apDNSALGDomainSuffix.setDescription('The name of the domain suffix element that contains this DNSALG server.')
ap_dnsalg_server_ip_address = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 7), ip_address()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDNSALGServerIpAddress.setStatus('current')
if mibBuilder.loadTexts:
apDNSALGServerIpAddress.setDescription('The IP address of this DNSALG server.')
ap_dnsalg_server_status = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 8), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2))).clone(namedValues=named_values(('inservice', 0), ('lowerpriority', 1), ('oosunreachable', 2)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDNSALGServerStatus.setStatus('current')
if mibBuilder.loadTexts:
apDNSALGServerStatus.setDescription('The status of this DNSALG server.')
ap_dnsalg_stats_table = mib_table((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2))
if mibBuilder.loadTexts:
apDNSALGStatsTable.setStatus('current')
if mibBuilder.loadTexts:
apDNSALGStatsTable.setDescription('per DNS-ALG config(i.e.client realm)stats.')
ap_dns_alg_stats_entry = mib_table_row((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1)).setIndexNames((0, 'APDNSALG-MIB', 'apDnsAlgClientRealmIndex'))
if mibBuilder.loadTexts:
apDnsALGStatsEntry.setStatus('current')
if mibBuilder.loadTexts:
apDnsALGStatsEntry.setDescription('A table entry designed to hold DNS-ALG stats data')
ap_dns_alg_client_realm_index = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(1, 2147483647))).setMaxAccess('accessiblefornotify')
if mibBuilder.loadTexts:
apDnsAlgClientRealmIndex.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgClientRealmIndex.setDescription('An integer for the sole purpose of indexing the DNS-ALG configuration.Only one DNS-ALG configuration is allowed per a realm.')
ap_dns_alg_client_realm_name = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 2), display_string().subtype(subtypeSpec=value_size_constraint(0, 255))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDnsAlgClientRealmName.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgClientRealmName.setDescription('DNS-ALG Config realm name')
ap_dns_alg_current_queries = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 3), gauge32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDnsAlgCurrentQueries.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgCurrentQueries.setDescription('Number of queries sent in recent period received on DNS-ALG config realm.')
ap_dns_alg_total_queries = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 4), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDnsAlgTotalQueries.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgTotalQueries.setDescription('Total number of queries sent in life time received on DNS-ALG config realm.')
ap_dns_alg_current_sucess = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 5), gauge32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDnsAlgCurrentSucess.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgCurrentSucess.setDescription('Number of success responses in recent period received on DNS-ALG config realm.')
ap_dns_alg_total_sucess = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 6), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDnsAlgTotalSucess.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgTotalSucess.setDescription('Total number of success responses in life time received on DNS-ALG config realm.')
ap_dns_alg_current_not_found = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 7), gauge32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDnsAlgCurrentNotFound.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgCurrentNotFound.setDescription('Number of not-found responses in recent period received on DNS-ALG config realm.')
ap_dns_alg_total_not_found = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 8), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDnsAlgTotalNotFound.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgTotalNotFound.setDescription('Total number of not-found responses in life time received on DNS-ALG config realm.')
ap_dns_alg_current_time_out = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 9), gauge32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDnsAlgCurrentTimeOut.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgCurrentTimeOut.setDescription('Number of time out responses in recent period received on DNS-ALG config realm.')
ap_dns_alg_total_time_out = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 10), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDnsAlgTotalTimeOut.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgTotalTimeOut.setDescription('Total number of time out responses in life time received on DNS-ALG config realm')
ap_dns_alg_current_bad_status = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 11), gauge32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDnsAlgCurrentBadStatus.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgCurrentBadStatus.setDescription('Number of bad status responses in recent period received on DNS-ALG config realm.')
ap_dns_alg_total_bad_status = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 12), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDnsAlgTotalBadStatus.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgTotalBadStatus.setDescription('Total number of bad status responses in life time received on DNS-ALG config realm.')
ap_dns_alg_current_other_failures = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 13), gauge32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDnsAlgCurrentOtherFailures.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgCurrentOtherFailures.setDescription('Number of other failure responses in recent period received on DNS-ALG config realm.')
ap_dns_alg_total_other_failures = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 14), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDnsAlgTotalOtherFailures.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgTotalOtherFailures.setDescription('Total number of other failure responses in life time received on DNS-ALG config realm.')
ap_dns_alg_avg_latency = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 15), unsigned32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDnsAlgAvgLatency.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgAvgLatency.setDescription('Average observed one-way signalling latency during the period in milliseconds')
ap_dns_alg_max_latency = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 16), unsigned32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDnsAlgMaxLatency.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgMaxLatency.setDescription('Maximum observed one-way signalling latency during the period in milliseconds')
ap_dns_alg_max_burst_rate = mib_table_column((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 17), unsigned32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
apDnsAlgMaxBurstRate.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgMaxBurstRate.setDescription('Maximum burst rate of traffic measured during the period (combined inbound and outbound)')
ap_dnsalg_constraints_status = mib_scalar((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 1, 1), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1))).clone(namedValues=named_values(('inservice', 0), ('constraintsExceeded', 1)))).setMaxAccess('accessiblefornotify')
if mibBuilder.loadTexts:
apDNSALGConstraintsStatus.setStatus('current')
if mibBuilder.loadTexts:
apDNSALGConstraintsStatus.setDescription('The status of this DNS-ALG config realm for constraints.')
ap_dns_alg_status_change_trap = notification_type((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 1)).setObjects(('APDNSALG-MIB', 'apDNSALGConfigName'), ('APDNSALG-MIB', 'apDNSALGServerRealm'), ('APDNSALG-MIB', 'apDNSALGServerIpAddress'), ('APDNSALG-MIB', 'apDNSALGServerStatus'))
if mibBuilder.loadTexts:
apDnsAlgStatusChangeTrap.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgStatusChangeTrap.setDescription(' The trap will be generated if the reachability status of an DNS-ALG server changes from In-Service to either Timed out or Out of Service.')
ap_dns_alg_status_change_clear_trap = notification_type((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 2)).setObjects(('APDNSALG-MIB', 'apDNSALGConfigName'), ('APDNSALG-MIB', 'apDNSALGServerRealm'), ('APDNSALG-MIB', 'apDNSALGServerIpAddress'), ('APDNSALG-MIB', 'apDNSALGServerStatus'))
if mibBuilder.loadTexts:
apDnsAlgStatusChangeClearTrap.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgStatusChangeClearTrap.setDescription(' The trap will be generated if the reachability status of an DNS-ALG server changes from either Timed out or Out of Service to In-Service')
ap_dns_alg_constraint_state_change_trap = notification_type((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 3)).setObjects(('APDNSALG-MIB', 'apDNSALGConfigName'), ('APDNSALG-MIB', 'apDNSALGConstraintsStatus'))
if mibBuilder.loadTexts:
apDnsAlgConstraintStateChangeTrap.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgConstraintStateChangeTrap.setDescription(" The trap will be generated if an DNS-ALG config's constriants state changed from inservice to constraintsExceeded.")
ap_dns_alg_constraint_state_change_clear_trap = notification_type((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 4)).setObjects(('APDNSALG-MIB', 'apDNSALGConfigName'), ('APDNSALG-MIB', 'apDNSALGConstraintsStatus'))
if mibBuilder.loadTexts:
apDnsAlgConstraintStateChangeClearTrap.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgConstraintStateChangeClearTrap.setDescription(" The trap will be generated if an DNS-ALG config's constriants state changed from constraintsExceeded to inservice.")
ap_dns_alg_svr_constraint_state_change_trap = notification_type((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 5)).setObjects(('APDNSALG-MIB', 'apDNSALGConfigName'), ('APDNSALG-MIB', 'apDNSALGServerRealm'), ('APDNSALG-MIB', 'apDNSALGServerIpAddress'), ('APDNSALG-MIB', 'apDNSALGConstraintsStatus'))
if mibBuilder.loadTexts:
apDnsAlgSvrConstraintStateChangeTrap.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgSvrConstraintStateChangeTrap.setDescription(' The trap will be generated if an Dns Server(i.e.IP-Address) constriants state changed from inservice to constraintsExceeded.')
ap_dns_alg_svr_constraint_state_change_clear_trap = notification_type((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 6)).setObjects(('APDNSALG-MIB', 'apDNSALGConfigName'), ('APDNSALG-MIB', 'apDNSALGServerRealm'), ('APDNSALG-MIB', 'apDNSALGServerIpAddress'), ('APDNSALG-MIB', 'apDNSALGConstraintsStatus'))
if mibBuilder.loadTexts:
apDnsAlgSvrConstraintStateChangeClearTrap.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgSvrConstraintStateChangeClearTrap.setDescription(' The trap will be generated if an Dns Server(i.e.IP-Address) constriants state changed from constraintsExceeded to inservice.')
ap_dns_alg_server_status_group = object_group((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 1, 1)).setObjects(('APDNSALG-MIB', 'apDNSALGConfigIndex'), ('APDNSALG-MIB', 'apDNSALGServerIndex'), ('APDNSALG-MIB', 'apDNSALGConfigName'), ('APDNSALG-MIB', 'apDNSALGServerRealm'), ('APDNSALG-MIB', 'apDNSALGDomainSuffix'), ('APDNSALG-MIB', 'apDNSALGServerIpAddress'), ('APDNSALG-MIB', 'apDNSALGServerStatus'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ap_dns_alg_server_status_group = apDnsAlgServerStatusGroup.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgServerStatusGroup.setDescription('A collection of statistics for DNS-ALG server status.')
ap_dns_alg_stats_group = object_group((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 1, 2)).setObjects(('APDNSALG-MIB', 'apDnsAlgClientRealmIndex'), ('APDNSALG-MIB', 'apDnsAlgClientRealmName'), ('APDNSALG-MIB', 'apDnsAlgCurrentQueries'), ('APDNSALG-MIB', 'apDnsAlgTotalQueries'), ('APDNSALG-MIB', 'apDnsAlgCurrentSucess'), ('APDNSALG-MIB', 'apDnsAlgTotalSucess'), ('APDNSALG-MIB', 'apDnsAlgCurrentNotFound'), ('APDNSALG-MIB', 'apDnsAlgTotalNotFound'), ('APDNSALG-MIB', 'apDnsAlgCurrentTimeOut'), ('APDNSALG-MIB', 'apDnsAlgTotalTimeOut'), ('APDNSALG-MIB', 'apDnsAlgCurrentBadStatus'), ('APDNSALG-MIB', 'apDnsAlgTotalBadStatus'), ('APDNSALG-MIB', 'apDnsAlgCurrentOtherFailures'), ('APDNSALG-MIB', 'apDnsAlgTotalOtherFailures'), ('APDNSALG-MIB', 'apDnsAlgAvgLatency'), ('APDNSALG-MIB', 'apDnsAlgMaxLatency'), ('APDNSALG-MIB', 'apDnsAlgMaxBurstRate'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ap_dns_alg_stats_group = apDnsAlgStatsGroup.setStatus('current')
if mibBuilder.loadTexts:
apDnsAlgStatsGroup.setDescription('Report the stats of configured DNSALG config objects.')
ap_dnsalg_notifications_group = notification_group((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 2, 1)).setObjects(('APDNSALG-MIB', 'apDnsAlgStatusChangeTrap'), ('APDNSALG-MIB', 'apDnsAlgStatusChangeClearTrap'), ('APDNSALG-MIB', 'apDnsAlgConstraintStateChangeTrap'), ('APDNSALG-MIB', 'apDnsAlgConstraintStateChangeClearTrap'), ('APDNSALG-MIB', 'apDnsAlgSvrConstraintStateChangeTrap'), ('APDNSALG-MIB', 'apDnsAlgSvrConstraintStateChangeClearTrap'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ap_dnsalg_notifications_group = apDNSALGNotificationsGroup.setStatus('current')
if mibBuilder.loadTexts:
apDNSALGNotificationsGroup.setDescription('A collection of mib objects accessible only to traps.')
mibBuilder.exportSymbols('APDNSALG-MIB', apDnsAlgTotalNotFound=apDnsAlgTotalNotFound, apDnsAlgConstraintStateChangeClearTrap=apDnsAlgConstraintStateChangeClearTrap, apDnsAlgStatusChangeTrap=apDnsAlgStatusChangeTrap, apDnsAlgTotalTimeOut=apDnsAlgTotalTimeOut, apDnsAlgStatsGroup=apDnsAlgStatsGroup, apDnsALGStatsEntry=apDnsALGStatsEntry, apDNSALGMIBGeneralObjects=apDNSALGMIBGeneralObjects, apDnsAlgTotalSucess=apDnsAlgTotalSucess, apDNSALGServerStatusEntry=apDNSALGServerStatusEntry, apDNSALGNotificationsGroup=apDNSALGNotificationsGroup, apDNSALGConstraintsStatus=apDNSALGConstraintsStatus, apDnsAlgConstraintStateChangeTrap=apDnsAlgConstraintStateChangeTrap, apDNSALGServerRealm=apDNSALGServerRealm, apDnsAlgTotalBadStatus=apDnsAlgTotalBadStatus, apDNSALGObjectGroups=apDNSALGObjectGroups, apDNSALGConfigName=apDNSALGConfigName, apDnsAlgMaxLatency=apDnsAlgMaxLatency, PYSNMP_MODULE_ID=apDNSALGModule, apDNSALGMIBTabularObjects=apDNSALGMIBTabularObjects, apDnsAlgTotalOtherFailures=apDnsAlgTotalOtherFailures, apDNSALGConfigIndex=apDNSALGConfigIndex, apDnsAlgStatusChangeClearTrap=apDnsAlgStatusChangeClearTrap, apDNSALGStatsTable=apDNSALGStatsTable, apDnsAlgClientRealmIndex=apDnsAlgClientRealmIndex, apDnsAlgSvrConstraintStateChangeTrap=apDnsAlgSvrConstraintStateChangeTrap, apDNSALGNotifications=apDNSALGNotifications, apDNSALGConformance=apDNSALGConformance, apDnsAlgCurrentNotFound=apDnsAlgCurrentNotFound, apDNSALGNotifPrefix=apDNSALGNotifPrefix, apDnsAlgMaxBurstRate=apDnsAlgMaxBurstRate, apDNSALGMIBObjects=apDNSALGMIBObjects, apDnsAlgAvgLatency=apDnsAlgAvgLatency, apDnsAlgServerStatusGroup=apDnsAlgServerStatusGroup, apDNSALGNotificationObjects=apDNSALGNotificationObjects, apDNSALGNotificationGroups=apDNSALGNotificationGroups, apDnsAlgCurrentOtherFailures=apDnsAlgCurrentOtherFailures, apDnsAlgClientRealmName=apDnsAlgClientRealmName, apDNSALGNotifObjects=apDNSALGNotifObjects, apDNSALGServerStatus=apDNSALGServerStatus, apDnsAlgCurrentSucess=apDnsAlgCurrentSucess, apDNSALGServerStatusTable=apDNSALGServerStatusTable, apDnsAlgSvrConstraintStateChangeClearTrap=apDnsAlgSvrConstraintStateChangeClearTrap, apDnsAlgCurrentQueries=apDnsAlgCurrentQueries, apDnsAlgCurrentBadStatus=apDnsAlgCurrentBadStatus, apDnsAlgCurrentTimeOut=apDnsAlgCurrentTimeOut, apDNSALGServerIpAddress=apDNSALGServerIpAddress, apDNSALGModule=apDNSALGModule, apDNSALGDomainSuffix=apDNSALGDomainSuffix, apDnsAlgTotalQueries=apDnsAlgTotalQueries, apDNSALGServerIndex=apDNSALGServerIndex) |
'''
/******************************************************************
*
* Copyright 2018 Samsung Electronics All Rights Reserved.
*
*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************/
'''
class TestRunOption:
max_total_count = 3
min_pass_count = 1
max_timeout_count = 2
XML_PASS_CRITERIA = 'xml'
LOG_PASS_CRITERIA = 'log'
def __init__(self, binary_name, suite_name, tc_name, package_name):
self.binary_name = binary_name
self.suite_name = suite_name
self.tc_name = tc_name
self.package_name = package_name
self.total_count = 0
self.pass_count = 0
self.fail_count = 0
self.timeout_count = 0
def increase_total_count(self):
self.total_count += 1
def increase_pass_count(self):
self.pass_count += 1
def increase_fail_count(self):
self.fail_count += 1
def increase_timeout_count(self):
self.timeout_count += 1
def is_execution_complete(self):
if self.pass_count >= TestRunOption.min_pass_count or self.timeout_count >= TestRunOption.max_timeout_count or self.total_count >= TestRunOption.max_total_count:
return True
return False
| """
/******************************************************************
*
* Copyright 2018 Samsung Electronics All Rights Reserved.
*
*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************/
"""
class Testrunoption:
max_total_count = 3
min_pass_count = 1
max_timeout_count = 2
xml_pass_criteria = 'xml'
log_pass_criteria = 'log'
def __init__(self, binary_name, suite_name, tc_name, package_name):
self.binary_name = binary_name
self.suite_name = suite_name
self.tc_name = tc_name
self.package_name = package_name
self.total_count = 0
self.pass_count = 0
self.fail_count = 0
self.timeout_count = 0
def increase_total_count(self):
self.total_count += 1
def increase_pass_count(self):
self.pass_count += 1
def increase_fail_count(self):
self.fail_count += 1
def increase_timeout_count(self):
self.timeout_count += 1
def is_execution_complete(self):
if self.pass_count >= TestRunOption.min_pass_count or self.timeout_count >= TestRunOption.max_timeout_count or self.total_count >= TestRunOption.max_total_count:
return True
return False |
hpp = 'AL-Import' # Specify the name of the hpp to print the graph
graph_title='EM- Total Impact of the energy maximization scenario on '+ hpp
df_em2 = df_em1.groupby(['scenario'])['value'].sum().round(2).reset_index()
fig5c = px.bar(df_em2, x='scenario', y='value', text= 'value', color='scenario',barmode='group',
labels={"value": "GWh", "tech":"HPP"}, title=graph_title,
category_orders={"scenario": ["Reference", "Energy Max"]},
facet_col_spacing=0.05, facet_row_spacing=0.05)
#fig.for_each_annotation(lambda a: a.update(text=a.text.split("=")[-1]))
fig5c.update_traces(texttemplate='%{text:.5s}', textposition='outside') #to format the text on each bar
#fig.update_layout(uniformtext_minsize=7, uniformtext_mode='hide') #to format the text on each bar
#fig.update_yaxes(range=[0, 2300]) #setting the y-axis scale to ensure enough space for the text on each bar
#fig.update_xaxes(showline=True, linewidth=2, linecolor='black', mirror=True) #drawing the border on x-axis
#fig.update_yaxes(showline=True, linewidth=2, linecolor='black', mirror=True) #drawing the border on y-axis
#You can change the image extension to *.png if you want or keep it as pdf (for high resolution)
#output_folder = os.path.join('Results_graphics')
#os.makedirs(output_folder, exist_ok = True)
#pio.write_image(fig, 'Results_graphics/{}.pdf'.format(graph_title))
#fig.show() | hpp = 'AL-Import'
graph_title = 'EM- Total Impact of the energy maximization scenario on ' + hpp
df_em2 = df_em1.groupby(['scenario'])['value'].sum().round(2).reset_index()
fig5c = px.bar(df_em2, x='scenario', y='value', text='value', color='scenario', barmode='group', labels={'value': 'GWh', 'tech': 'HPP'}, title=graph_title, category_orders={'scenario': ['Reference', 'Energy Max']}, facet_col_spacing=0.05, facet_row_spacing=0.05)
fig5c.update_traces(texttemplate='%{text:.5s}', textposition='outside') |
class MessageTypeNotSupported(Exception):
pass
class MessageDoesNotExist(Exception):
pass
| class Messagetypenotsupported(Exception):
pass
class Messagedoesnotexist(Exception):
pass |
# https://www.codechef.com/problems/RAINBOWA
for T in range(int(input())):
n,l=int(input()),list(map(int,input().split()))
print("no") if(set(l)!=set(list(range(1,8))) or l[0]!=1 or l[-1]!=1 or l!=l[::-1]) else print("yes") | for t in range(int(input())):
(n, l) = (int(input()), list(map(int, input().split())))
print('no') if set(l) != set(list(range(1, 8))) or l[0] != 1 or l[-1] != 1 or (l != l[::-1]) else print('yes') |
# -*- encoding:utf-8 -*-
__version__ = (1, 2, 11)
__version_str__ = ".".join(map(str, __version__))
__version_core__ = (3, 0, 4)
| __version__ = (1, 2, 11)
__version_str__ = '.'.join(map(str, __version__))
__version_core__ = (3, 0, 4) |
def to_camel_case(s):
return ('' if not s else s[0] + ''.join(c.upper() if s[::-1][i + 1] in '-_'
else '' if c in '-_'
else c for i, c in
enumerate(s[::-1][:-1]))[::-1])
| def to_camel_case(s):
return '' if not s else s[0] + ''.join((c.upper() if s[::-1][i + 1] in '-_' else '' if c in '-_' else c for (i, c) in enumerate(s[::-1][:-1])))[::-1] |
##NIM, Umur, Tinggi = (211080200045, 18, 170)
##print(NIM, Umur, Tinggi)
angka_positif = 1,2,3,4,5,6,7,8,9
print(angka_positif)
| angka_positif = (1, 2, 3, 4, 5, 6, 7, 8, 9)
print(angka_positif) |
GOLD = ["7374", "7857", "7990", "8065", "8250"]
ANNOTATORS = ["01", "02", "03", "04", "05", "06"]
DOC_HEADER = ["order", "doc_id", "assigned", "nr_sens_calculated", "nr_sens", "annotator_1", "annotator_2",
"assigned_2"]
CYCLE_FILE = "../input/batch_cycles.csv"
CYCLE_COL = "cycle"
ASSIGNMENT_TXT = "assignment.txt"
ASSIGNMENT_XLSX = "assignment.xlsx"
ASSIGNMENT_FILE_HEADER = ["doc_id"]
ASSIGNMENT_DF_HEADER_BASE = ["annotator", "assigned_sentences"]
ASSIGNMENT_ADDITIONAL_HEADER = ["docs_in_batch", "sentences_in_batch", "sum_sentences"]
ANNOTATOR_DOWNLOAD_FOLDER = "download"
ANNOTATOR_UPLOAD_FOLDER = "upload"
PHASE_STR = "phase"
ATTRIBUTES_TO_IGNORE = {
"AusnahmePruefungErforderlich",
"WeitereBestimmungPruefungErforderlich",
"ZuVorherigemSatzGehoerig",
"Segmentierungsfehler",
"NoAttribute",
"N/A",
"StrittigeBedeutung",
}
############
# Labels review
############
class LabelReviewExcelConstants:
MAIN_SHEET_NAME = "Review"
ATTRIBUTE_NAMED_RANGE = "Attribute"
ATTRIBUTE_REVIEW_NAMED_RANGE = "Attribute_Review"
SENTENCE_REVIEW_NAMED_RANGE = "Sentence_Review"
ERROR_LABEL = "Error"
FIRST_DATA_ROW = 2
SEN_ID_COL = 1
SEN_REVIEW_COL = 2
SEN_TEXT_COL = 3
ATTRIBUTE_OFFSET = 4
ATTRIBUTE_STEP = 5
CATEGORY_OFFSET = 0
LABEL_OFFSET = 1
COUNT_OFFSET = 2
ANNOTATORS_OFFSET = 3
ATTRIBUTE_REVIEW_OFFSET = 4
ANNOTATOR_SEPARATOR = "\n"
############
# Full xlsx
############
class FullAnnotationExcelConstants:
MAIN_SHEET_NAME = "Data"
ATTRIBUTE_NAMED_RANGE = "Attribute"
TYPE_NAMED_RANGE = "Type"
MODALITY_NAMED_RANGE = "Modality"
FIRST_DATA_ROW = 2
SEN_ID_COL = 1
SEN_TEXT_COL = 2
MODALITY_COL = 3
ATTRIBUTE_OFFSET = 4
ATTRIBUTE_STEP = 4
CATEGORY_OFFSET = 0
LABEL_OFFSET = 1
VALUE_OFFSET = 2
TYPE_OFFSET = 3
LAST_COLUMN = "BO1"
############
# Full review
############
class FullReviewExcelConstants:
MAIN_SHEET_NAME = "Data"
ATTRIBUTE_NAMED_RANGE = "Attribute"
TYPE_NAMED_RANGE = "Type"
MODALITY_NAMED_RANGE = "Modality"
SENTENCE_REVIEW_NAMED_RANGE = "Sentence_Review"
ERROR_LABEL = "Error"
FIRST_DATA_ROW = 2
SEN_ID_COL = 1
SEN_REVIEW_COL = 2
SEN_TEXT_COL = 3
MODALITY_ANN_1_COL = 4
MODALITY_ANN_2_COL = 5
MODALITY_ANN_REV_COL = 6
ATTRIBUTE_OFFSET = 7
ATTRIBUTE_STEP = 6
CATEGORY_OFFSET = 0
LABEL_OFFSET = 1
VALUE_OFFSET = 2
TYPE_ANN_1_OFFSET = 3
TYPE_ANN_2_OFFSET = 4
TYPE_ANN_REV_OFFSET = 5
LAST_COLUMN = "CX1"
| gold = ['7374', '7857', '7990', '8065', '8250']
annotators = ['01', '02', '03', '04', '05', '06']
doc_header = ['order', 'doc_id', 'assigned', 'nr_sens_calculated', 'nr_sens', 'annotator_1', 'annotator_2', 'assigned_2']
cycle_file = '../input/batch_cycles.csv'
cycle_col = 'cycle'
assignment_txt = 'assignment.txt'
assignment_xlsx = 'assignment.xlsx'
assignment_file_header = ['doc_id']
assignment_df_header_base = ['annotator', 'assigned_sentences']
assignment_additional_header = ['docs_in_batch', 'sentences_in_batch', 'sum_sentences']
annotator_download_folder = 'download'
annotator_upload_folder = 'upload'
phase_str = 'phase'
attributes_to_ignore = {'AusnahmePruefungErforderlich', 'WeitereBestimmungPruefungErforderlich', 'ZuVorherigemSatzGehoerig', 'Segmentierungsfehler', 'NoAttribute', 'N/A', 'StrittigeBedeutung'}
class Labelreviewexcelconstants:
main_sheet_name = 'Review'
attribute_named_range = 'Attribute'
attribute_review_named_range = 'Attribute_Review'
sentence_review_named_range = 'Sentence_Review'
error_label = 'Error'
first_data_row = 2
sen_id_col = 1
sen_review_col = 2
sen_text_col = 3
attribute_offset = 4
attribute_step = 5
category_offset = 0
label_offset = 1
count_offset = 2
annotators_offset = 3
attribute_review_offset = 4
annotator_separator = '\n'
class Fullannotationexcelconstants:
main_sheet_name = 'Data'
attribute_named_range = 'Attribute'
type_named_range = 'Type'
modality_named_range = 'Modality'
first_data_row = 2
sen_id_col = 1
sen_text_col = 2
modality_col = 3
attribute_offset = 4
attribute_step = 4
category_offset = 0
label_offset = 1
value_offset = 2
type_offset = 3
last_column = 'BO1'
class Fullreviewexcelconstants:
main_sheet_name = 'Data'
attribute_named_range = 'Attribute'
type_named_range = 'Type'
modality_named_range = 'Modality'
sentence_review_named_range = 'Sentence_Review'
error_label = 'Error'
first_data_row = 2
sen_id_col = 1
sen_review_col = 2
sen_text_col = 3
modality_ann_1_col = 4
modality_ann_2_col = 5
modality_ann_rev_col = 6
attribute_offset = 7
attribute_step = 6
category_offset = 0
label_offset = 1
value_offset = 2
type_ann_1_offset = 3
type_ann_2_offset = 4
type_ann_rev_offset = 5
last_column = 'CX1' |
class IntegerField:
def __str__(self):
return "integer"
| class Integerfield:
def __str__(self):
return 'integer' |
class AdministrativeDivision:
def __init__(self, level):
self.level = level
pass
class Province(AdministrativeDivision):
type = 'Province'
area = 0
center = ''
def __init__(self, name):
self.name = name
self.level = 1
def __str__(self):
return f"{self.name} {self.type}"
pass
class Regency(AdministrativeDivision):
type = 'Regency'
area = 0
center = ''
def __init__(self, name):
self.name = name
self.level = 2
def __str__(self):
return f"{self.name} {self.type}"
pass
class City(AdministrativeDivision):
type = 'City'
area = 0
center = ''
def __init__(self, name):
self.name = name
self.level = 2
def __str__(self):
return f"{self.name} {self.type}"
class District(AdministrativeDivision):
type = 'District'
area = 0
center = ''
def __init__(self, name):
self.name = name
self.level = 3
def __str__(self):
return f"{self.name} {self.type}"
pass
| class Administrativedivision:
def __init__(self, level):
self.level = level
pass
class Province(AdministrativeDivision):
type = 'Province'
area = 0
center = ''
def __init__(self, name):
self.name = name
self.level = 1
def __str__(self):
return f'{self.name} {self.type}'
pass
class Regency(AdministrativeDivision):
type = 'Regency'
area = 0
center = ''
def __init__(self, name):
self.name = name
self.level = 2
def __str__(self):
return f'{self.name} {self.type}'
pass
class City(AdministrativeDivision):
type = 'City'
area = 0
center = ''
def __init__(self, name):
self.name = name
self.level = 2
def __str__(self):
return f'{self.name} {self.type}'
class District(AdministrativeDivision):
type = 'District'
area = 0
center = ''
def __init__(self, name):
self.name = name
self.level = 3
def __str__(self):
return f'{self.name} {self.type}'
pass |
# Binary Tree implemented using python list
class BinaryTree:
def __init__(self,size) -> None:
self.cl=size*[None]
self.lastUsedIndex=0
self.maxSize=size
def insertNode(self,value):
if self.lastUsedIndex+1==self.maxSize:
return "BT is full"
self.cl[self.lastUsedIndex+1]=value
self.lastUsedIndex+=1
return "value successfully inserted"
def searchNode(self,value):
if value in self.cl:
return "Success"
return "Failed"
def preOrderTraversal(self,index=1):
if index>self.lastUsedIndex:
return
print(self.cl[index])
#call left subtree
self.preOrderTraversal(index*2)
self.preOrderTraversal(index*2+1)
def inOrderTraversal(self,index=1):
if index>self.lastUsedIndex:
return
self.inOrderTraversal(index*2)
print(self.cl[index])
self.inOrderTraversal(index*2+1)
def postOrderTraversal(self,index=1):
if index>self.lastUsedIndex:
return
self.postOrderTraversal(index*2)
self.postOrderTraversal(index*2+1)
print(self.cl[index])
def levelOrderTraversal(self,index=1):
for i in range(index,self.lastUsedIndex+1):
print(self.cl[i])
def deleteNode(self,value):
if self.lastUsedIndex==0:
return "List is empty"
for i in range(1,self.lastUsedIndex+1):
if self.cl[i]==value:
self.cl[i]=self.cl[self.lastUsedIndex]
self.cl[self.lastUsedIndex]=None
self.lastUsedIndex-=1
return "Node successfully deleted"
def deleteBT(self):
self.cl=None
return "BT deleted successfully"
bt=BinaryTree(8)
bt.insertNode("drinks")
bt.insertNode("hot")
bt.insertNode("cold")
bt.insertNode("tea")
bt.insertNode("coffee")
print(bt.searchNode('hot'))
print(bt.deleteNode('tea'))
#bt.preOrderTraversal()
#bt.inOrderTraversal()
#bt.postOrderTraversal()
bt.levelOrderTraversal() | class Binarytree:
def __init__(self, size) -> None:
self.cl = size * [None]
self.lastUsedIndex = 0
self.maxSize = size
def insert_node(self, value):
if self.lastUsedIndex + 1 == self.maxSize:
return 'BT is full'
self.cl[self.lastUsedIndex + 1] = value
self.lastUsedIndex += 1
return 'value successfully inserted'
def search_node(self, value):
if value in self.cl:
return 'Success'
return 'Failed'
def pre_order_traversal(self, index=1):
if index > self.lastUsedIndex:
return
print(self.cl[index])
self.preOrderTraversal(index * 2)
self.preOrderTraversal(index * 2 + 1)
def in_order_traversal(self, index=1):
if index > self.lastUsedIndex:
return
self.inOrderTraversal(index * 2)
print(self.cl[index])
self.inOrderTraversal(index * 2 + 1)
def post_order_traversal(self, index=1):
if index > self.lastUsedIndex:
return
self.postOrderTraversal(index * 2)
self.postOrderTraversal(index * 2 + 1)
print(self.cl[index])
def level_order_traversal(self, index=1):
for i in range(index, self.lastUsedIndex + 1):
print(self.cl[i])
def delete_node(self, value):
if self.lastUsedIndex == 0:
return 'List is empty'
for i in range(1, self.lastUsedIndex + 1):
if self.cl[i] == value:
self.cl[i] = self.cl[self.lastUsedIndex]
self.cl[self.lastUsedIndex] = None
self.lastUsedIndex -= 1
return 'Node successfully deleted'
def delete_bt(self):
self.cl = None
return 'BT deleted successfully'
bt = binary_tree(8)
bt.insertNode('drinks')
bt.insertNode('hot')
bt.insertNode('cold')
bt.insertNode('tea')
bt.insertNode('coffee')
print(bt.searchNode('hot'))
print(bt.deleteNode('tea'))
bt.levelOrderTraversal() |
num1 = int(input())
count1 = 0
while 1 <= num1 <= 5:
if num1 == 5:
count1 += 1
num1 = int(input())
print(count1)
| num1 = int(input())
count1 = 0
while 1 <= num1 <= 5:
if num1 == 5:
count1 += 1
num1 = int(input())
print(count1) |
class Label(object):
def __eq__(self, other):
assert(isinstance(other, Label))
return type(self) == type(other)
def __ne__(self, other):
assert(isinstance(other, Label))
return type(self) != type(other)
def __hash__(self):
return hash(self.to_class_str())
def to_class_str(self):
return self.__class__.__name__
class NoLabel(Label):
pass
| class Label(object):
def __eq__(self, other):
assert isinstance(other, Label)
return type(self) == type(other)
def __ne__(self, other):
assert isinstance(other, Label)
return type(self) != type(other)
def __hash__(self):
return hash(self.to_class_str())
def to_class_str(self):
return self.__class__.__name__
class Nolabel(Label):
pass |
#Function to insert a string in the middle of a string
def string_in():
string=str(input("Enter a string :"))
mid=len(string)//2
word=str(input("Enter a word to insert in middle :"))
new_string=string[:mid]+word+string[mid:]
print(new_string)
string_in()
| def string_in():
string = str(input('Enter a string :'))
mid = len(string) // 2
word = str(input('Enter a word to insert in middle :'))
new_string = string[:mid] + word + string[mid:]
print(new_string)
string_in() |
#
# This file contains "references" to unreferenced code that should be kept and not considered dead code
#
not_used_but_whitelisted
| not_used_but_whitelisted |
# Copyright 2018 TNG Technology Consulting GmbH, Unterfoehring, Germany
# Licensed under the Apache License, Version 2.0 - see LICENSE.md in project root directory
# TODO IT-1: give this function some great functionality
def great_function():
pass
# TODO: give this function some greater functionality
def greater_function():
pass
| def great_function():
pass
def greater_function():
pass |
n = 0
for i in range(999, 100, -1):
for j in range(i, 100, -1):
x = i * j
if x > n:
s = str(i * j)
if s == s[::-1]:
n = i * j
print(n)
| n = 0
for i in range(999, 100, -1):
for j in range(i, 100, -1):
x = i * j
if x > n:
s = str(i * j)
if s == s[::-1]:
n = i * j
print(n) |
class DictSerializable:
@classmethod
def from_dict(cls, data: dict) -> 'DictSerializable':
return cls(**data)
def to_dict(self) -> dict:
return vars(self) | class Dictserializable:
@classmethod
def from_dict(cls, data: dict) -> 'DictSerializable':
return cls(**data)
def to_dict(self) -> dict:
return vars(self) |
known = {}
def ack(m, n):
if m == 0:
return n + 1
if m > 0 and n == 0:
return ack(m-1, 1)
if m > 0 and n > 0:
if (m,n) in known:
print('Cache hit')
return known[(m, n)]
else:
known[(m, n)] = ack(m - 1, ack(m , n - 1))
return known[(m, n)]
else:
return None
print ('ack(3, 4) =', ack(3, 4))
print ('ack(3, 5) =', ack(3, 5))
print ('ack(3, 6) =', ack(3, 6))
print ('ack(3, 7) =', ack(3, 7))
| known = {}
def ack(m, n):
if m == 0:
return n + 1
if m > 0 and n == 0:
return ack(m - 1, 1)
if m > 0 and n > 0:
if (m, n) in known:
print('Cache hit')
return known[m, n]
else:
known[m, n] = ack(m - 1, ack(m, n - 1))
return known[m, n]
else:
return None
print('ack(3, 4) =', ack(3, 4))
print('ack(3, 5) =', ack(3, 5))
print('ack(3, 6) =', ack(3, 6))
print('ack(3, 7) =', ack(3, 7)) |
#
# @lc app=leetcode id=450 lang=python3
#
# [450] Delete Node in a BST
#
# @lc code=start
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def deleteNode(self, root: TreeNode, key: int) -> TreeNode:
if not root:
return None
if root.val == key:
if not root.right:
left = root.left
return left
right = root.right
while root.left:
right = root.left
root.val, right.val = right.val, root.values()
root.left = self.deleteNode(root.left, key)
root.right = self.deleteNode(root.right, key)
return root
# @lc code=end
| class Solution:
def delete_node(self, root: TreeNode, key: int) -> TreeNode:
if not root:
return None
if root.val == key:
if not root.right:
left = root.left
return left
right = root.right
while root.left:
right = root.left
(root.val, right.val) = (right.val, root.values())
root.left = self.deleteNode(root.left, key)
root.right = self.deleteNode(root.right, key)
return root |
__title__ = 'pairing-functions'
__description__ = 'A collection of pairing functions'
__url__ = 'https://github.com/ConvertGroupLabs/pairing-functions'
__version__ = '0.2.1'
__author__ = 'Convert Group Labs'
__author_email__ = 'tools@convertgroup.com'
__license__ = 'MIT License'
__copyright__ = 'Copyright 2020 Convert Group'
| __title__ = 'pairing-functions'
__description__ = 'A collection of pairing functions'
__url__ = 'https://github.com/ConvertGroupLabs/pairing-functions'
__version__ = '0.2.1'
__author__ = 'Convert Group Labs'
__author_email__ = 'tools@convertgroup.com'
__license__ = 'MIT License'
__copyright__ = 'Copyright 2020 Convert Group' |
def deleteMid(head):
# check if the list contains 1 or more nodes
if head is None or head.next is None:
return None
#assign pointers to their respective positions
prev, i, j = None, head, head
while j and j.next:
j = j.next.next;# j pointer moves 2 nodes ahead
# update prev pointer , prev holds previous value of i pointer
prev = i;
# i pointer moves 1 node ahead
i = i.next;
# since i pointer was moving at half speed of j pointer , it points at
# mid node when j pointer reaches the end
prev.next = i.next; # bypassing mid node
return head;
#Driver's code
class Node:
def __init__(self,data):
self.data = data
self.next = None
class Llist:
def __init__(self):
self.head = None
def insert(self,data,link):
node = Node (data)
if not self.head:
self.head = node
return node
link.next = node
return node
def printList(head):
while head:
print(head.data, end=" ")
head = head.next
print()
if __name__ == "__main__":
t = int (input())
for x in range(t):
n = int(input())
arr1 = [int(y) for y in input().split()]
L1 = Llist()
link = None
for nodeData in arr1:
link = L1.insert (nodeData, link)
res = deleteMid(l1.head)
printList(res)
| def delete_mid(head):
if head is None or head.next is None:
return None
(prev, i, j) = (None, head, head)
while j and j.next:
j = j.next.next
prev = i
i = i.next
prev.next = i.next
return head
class Node:
def __init__(self, data):
self.data = data
self.next = None
class Llist:
def __init__(self):
self.head = None
def insert(self, data, link):
node = node(data)
if not self.head:
self.head = node
return node
link.next = node
return node
def print_list(head):
while head:
print(head.data, end=' ')
head = head.next
print()
if __name__ == '__main__':
t = int(input())
for x in range(t):
n = int(input())
arr1 = [int(y) for y in input().split()]
l1 = llist()
link = None
for node_data in arr1:
link = L1.insert(nodeData, link)
res = delete_mid(l1.head)
print_list(res) |
N = int(input())
a = N % 1000
if a == 0:
print(0)
else:
print(1000 - a)
| n = int(input())
a = N % 1000
if a == 0:
print(0)
else:
print(1000 - a) |
# -*- coding: utf-8 -*-
f = open("dico.txt", "r")
contrasenia = "hola"
contador = 0
linea = f.readline()
while linea:
contador += 1
if linea.strip() == contrasenia.strip():
print('Contrasenia encontrada: ' + linea)
print('en ' + str(contador) + ' intentos')
break
linea = f.readline()
f.close()
| f = open('dico.txt', 'r')
contrasenia = 'hola'
contador = 0
linea = f.readline()
while linea:
contador += 1
if linea.strip() == contrasenia.strip():
print('Contrasenia encontrada: ' + linea)
print('en ' + str(contador) + ' intentos')
break
linea = f.readline()
f.close() |
first = "Murat"
last = "Aksoy"
name = f"Welcome to pyhton '{last}', {first}"
print(name) | first = 'Murat'
last = 'Aksoy'
name = f"Welcome to pyhton '{last}', {first}"
print(name) |
AUTHOR="Zawadi Done"
DESCRIPTION="This module wil install/update MassDNS"
INSTALL_TYPE="GIT"
REPOSITORY_LOCATION="https://github.com/blechschmidt/massdns"
INSTALL_LOCATION="massdns"
DEBIAN=""
AFTER_COMMANDS="cd {INSTALL_LOCATION},make,cp bin/massdns /usr/local/bin/"
LAUNCHER="massdns"
| author = 'Zawadi Done'
description = 'This module wil install/update MassDNS'
install_type = 'GIT'
repository_location = 'https://github.com/blechschmidt/massdns'
install_location = 'massdns'
debian = ''
after_commands = 'cd {INSTALL_LOCATION},make,cp bin/massdns /usr/local/bin/'
launcher = 'massdns' |
'''
This module declares constants needed for this solution. This is to remove
magic numbers
'''
CRATER_CHANGE_WHEN_SUNNY = 0.9
CRATER_CHANGE_WHEN_RAINY = 1.2
CRATER_CHANGE_WHEN_WINDY = 0.0
ORBIT1_ORBIT_DISTANCE = 18
ORBIT1_CRATERS_COUNT = 20
ORBIT2_ORBIT_DISTANCE = 20
ORBIT2_CRATERS_COUNT = 10
| """
This module declares constants needed for this solution. This is to remove
magic numbers
"""
crater_change_when_sunny = 0.9
crater_change_when_rainy = 1.2
crater_change_when_windy = 0.0
orbit1_orbit_distance = 18
orbit1_craters_count = 20
orbit2_orbit_distance = 20
orbit2_craters_count = 10 |
class Solution:
def singleNumber(self, nums: List[int]) -> int:
ret = 0
for n in nums:
ret ^= n
return ret | class Solution:
def single_number(self, nums: List[int]) -> int:
ret = 0
for n in nums:
ret ^= n
return ret |
#!/usr/bin/env python3
# https://www.urionlinejudge.com.br/judge/en/problems/view/1020
def decompose(total, value):
decomposed = total // value
return total - decomposed * value, decomposed
def main():
DAYS = int(input())
DAYS, YEARS = decompose(DAYS, 365)
DAYS, MONTHS = decompose(DAYS, 30)
print(YEARS, 'ano(s)')
print(MONTHS, 'mes(es)')
print(DAYS, 'dia(s)')
# Start the execution if it's the main script
if __name__ == "__main__":
main()
| def decompose(total, value):
decomposed = total // value
return (total - decomposed * value, decomposed)
def main():
days = int(input())
(days, years) = decompose(DAYS, 365)
(days, months) = decompose(DAYS, 30)
print(YEARS, 'ano(s)')
print(MONTHS, 'mes(es)')
print(DAYS, 'dia(s)')
if __name__ == '__main__':
main() |
'''
Creating a very basic module in Python
'''
languages = {'Basic', 'QBasic', 'Cobol', 'Pascal', 'Assembly', 'C/C++', 'Java', 'Python', 'Ruby'}
values = 10, 50, 60, 11, 98, 75, 65, 32
def add(*args: float) -> float:
sum = 0.0
for value in args:
sum += value
return sum
def multiply(*args: float) -> float:
prod = 1.0
for value in args:
prod *= value
return prod
def _prime(number: int) -> bool:
if number <= 1:
return False
elif number == 2:
return True
elif number % 2 == 0:
return False
else:
for n in range(3, int(number ** 0.5), 2):
if number % n == 0:
return False
else:
return True
| """
Creating a very basic module in Python
"""
languages = {'Basic', 'QBasic', 'Cobol', 'Pascal', 'Assembly', 'C/C++', 'Java', 'Python', 'Ruby'}
values = (10, 50, 60, 11, 98, 75, 65, 32)
def add(*args: float) -> float:
sum = 0.0
for value in args:
sum += value
return sum
def multiply(*args: float) -> float:
prod = 1.0
for value in args:
prod *= value
return prod
def _prime(number: int) -> bool:
if number <= 1:
return False
elif number == 2:
return True
elif number % 2 == 0:
return False
else:
for n in range(3, int(number ** 0.5), 2):
if number % n == 0:
return False
else:
return True |
#Config, Reference, and configure provided in globals
cards = Config(
hd_audio=Config(
match=dict(),
name='Auto-%(id)s-%(label)s',
restart=-1,
input=dict(
label="input",
subdevice='0',
channels=2,
buffer_size=512,
buffer_count=4,
sample_rate=48000,
quality=4
),
output=dict(
label="output",
subdevice='0',
channels=2,
buffer_size=512,
buffer_count=4,
sample_rate=48000,
quality=4
)
)
)
| cards = config(hd_audio=config(match=dict(), name='Auto-%(id)s-%(label)s', restart=-1, input=dict(label='input', subdevice='0', channels=2, buffer_size=512, buffer_count=4, sample_rate=48000, quality=4), output=dict(label='output', subdevice='0', channels=2, buffer_size=512, buffer_count=4, sample_rate=48000, quality=4))) |
#!/usr/bin/env python3
# Copyright 2018, Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
TRIPLEO_MAPPING_GROUP = {
# Mandatory group mappings
'hosts': ['undercloud', 'overcloud', 'Undercloud', 'Overcloud'],
'all': ['hosts'],
# Infrastructure group mappings
'shared-infra_hosts': ['Controller', 'controller'],
'rabbitmq_all': ['Controller', 'controller'],
'memcached_all': ['Controller', 'controller'],
'galera_all': ['Controller', 'controller'],
'galera': ['Controller', 'controller'],
'rsyslog_all': ['Controller', 'controller'],
'utility_all': ['undercloud', 'Undercloud'],
'localhost': ['undercloud', 'Undercloud'],
# OpenStack group mappings
# Keystone
'keystone_all': ['Controller', 'controller'],
# Nova
'nova_all': [
'nova_placement', 'nova_conductor', 'nova_metadata',
'nova_consoleauth', 'nova_api', 'nova_migration_target',
'nova_compute', 'nova_scheduler', 'nova_libvirt',
'nova_vnc_proxy'
],
'nova_api_metadata': ['nova_metadata'],
'nova_api_os_compute': ['nova_api'],
'nova_compute': ['Compute'],
'nova_console': ['nova_consoleauth'],
# Neutron
'neutron_all': [
'neutron_metadata', 'neutron_dhcp',
'neutron_plugin_ml2', 'neutron_ovs_agent',
'neutron_api', 'neutron_l3'
],
'neutron_server': ['neutron_api'],
'neutron_dhcp_agent': ['neutron_dhcp'],
'neutron_l3_agent': ['neutron_l3'],
'neutron_linuxbridge_agent': ['neutron_ovs_agent'],
'neutron_openvswitch_agent': ['neutron_ovs_agent'],
'neutron_metadata_agent': ['neutron_metadata'],
# Glance
'glance_all': ['glance_api', 'glance_registry_disabled'],
# Heat
'heat_all': [
'heat_api', 'heat_api_cloudwatch_disabled',
'heat_engine', 'heat_api_cfn'
],
# Cinder
'cinder_all': ['cinder_api', 'cinder_volume', 'cinder_scheduler'],
# Horizon
'horizon_all': ['horizon'],
# Designate
'designate_all': [
'designate_worker',
'designate_api',
'designate_producer',
'designate_mdns',
'designate_central',
],
# Ceph
'ceph_all': ['ceph_osd', 'ceph_mon', 'ceph_rgw'],
'mons': ['ceph_mon'],
'osds': ['ceph_osd'],
'rgws': ['ceph_rgw'],
# Swift - skip swift_proxy because it already exists in tripleO
'swift_all': ['swift_proxy', 'swift_storage'],
'swift_hosts': ['swift_storage'],
'swift_acc': ['swift_storage'],
'swift_cont': ['swift_storage'],
'swift_obj': ['swift_storage'],
# Octavia
'octavia_all': [
'octavia_api',
'octavia_health_manager',
'octavia_housekeeping',
'octavia_worker'
]
# NOTE(npawelek): Designate is not GA in OSP13
# Designate
# 'designate_all': ['designate_all'],
# NOTE(npawelek): Ironic mappings are not confirmed yet. We're not
# currently deploying ironic to customers due to RFEs around multi
# tenancy. When this functionality is needed, we'll need to define
# all the groupings properly.
#
# Ironic
# 'ironic_all': ['ironic_api', 'ironic_compute', 'ironic_conductor'],
# 'ironic_api': ['ironic_api'],
# 'ironic_conductor': ['ironic_conductor'],
# 'ironic_compute': ['ironic_compute'],
}
| tripleo_mapping_group = {'hosts': ['undercloud', 'overcloud', 'Undercloud', 'Overcloud'], 'all': ['hosts'], 'shared-infra_hosts': ['Controller', 'controller'], 'rabbitmq_all': ['Controller', 'controller'], 'memcached_all': ['Controller', 'controller'], 'galera_all': ['Controller', 'controller'], 'galera': ['Controller', 'controller'], 'rsyslog_all': ['Controller', 'controller'], 'utility_all': ['undercloud', 'Undercloud'], 'localhost': ['undercloud', 'Undercloud'], 'keystone_all': ['Controller', 'controller'], 'nova_all': ['nova_placement', 'nova_conductor', 'nova_metadata', 'nova_consoleauth', 'nova_api', 'nova_migration_target', 'nova_compute', 'nova_scheduler', 'nova_libvirt', 'nova_vnc_proxy'], 'nova_api_metadata': ['nova_metadata'], 'nova_api_os_compute': ['nova_api'], 'nova_compute': ['Compute'], 'nova_console': ['nova_consoleauth'], 'neutron_all': ['neutron_metadata', 'neutron_dhcp', 'neutron_plugin_ml2', 'neutron_ovs_agent', 'neutron_api', 'neutron_l3'], 'neutron_server': ['neutron_api'], 'neutron_dhcp_agent': ['neutron_dhcp'], 'neutron_l3_agent': ['neutron_l3'], 'neutron_linuxbridge_agent': ['neutron_ovs_agent'], 'neutron_openvswitch_agent': ['neutron_ovs_agent'], 'neutron_metadata_agent': ['neutron_metadata'], 'glance_all': ['glance_api', 'glance_registry_disabled'], 'heat_all': ['heat_api', 'heat_api_cloudwatch_disabled', 'heat_engine', 'heat_api_cfn'], 'cinder_all': ['cinder_api', 'cinder_volume', 'cinder_scheduler'], 'horizon_all': ['horizon'], 'designate_all': ['designate_worker', 'designate_api', 'designate_producer', 'designate_mdns', 'designate_central'], 'ceph_all': ['ceph_osd', 'ceph_mon', 'ceph_rgw'], 'mons': ['ceph_mon'], 'osds': ['ceph_osd'], 'rgws': ['ceph_rgw'], 'swift_all': ['swift_proxy', 'swift_storage'], 'swift_hosts': ['swift_storage'], 'swift_acc': ['swift_storage'], 'swift_cont': ['swift_storage'], 'swift_obj': ['swift_storage'], 'octavia_all': ['octavia_api', 'octavia_health_manager', 'octavia_housekeeping', 'octavia_worker']} |
def make_bio_dict(tags, start_idx=0):
d = dict()
i = start_idx
for tag in tags:
for pre_tag in ['B-', 'I-']:
d[pre_tag + tag] = i
i += 1
d['O'] = i
return d | def make_bio_dict(tags, start_idx=0):
d = dict()
i = start_idx
for tag in tags:
for pre_tag in ['B-', 'I-']:
d[pre_tag + tag] = i
i += 1
d['O'] = i
return d |
# -*- coding: utf-8 -*-
__author__ = "Sergey Aganezov"
__email__ = "aganezov(at)cs.jhu.edu"
__status__ = "production"
version = "1.10"
__all__ = ["grimm",
"breakpoint_graph",
"graphviz",
"utils",
"edge",
"genome",
"kbreak",
"multicolor",
"tree",
"vertices",
"utils",
"distances"]
| __author__ = 'Sergey Aganezov'
__email__ = 'aganezov(at)cs.jhu.edu'
__status__ = 'production'
version = '1.10'
__all__ = ['grimm', 'breakpoint_graph', 'graphviz', 'utils', 'edge', 'genome', 'kbreak', 'multicolor', 'tree', 'vertices', 'utils', 'distances'] |
size(200, 200)
stroke(0)
strokeWidth(10)
fill(1, 0.3, 0)
polygon((40, 40), (40, 160))
polygon((60, 40), (60, 160), (130, 160))
polygon((100, 40), (160, 160), (160, 40), close=False)
| size(200, 200)
stroke(0)
stroke_width(10)
fill(1, 0.3, 0)
polygon((40, 40), (40, 160))
polygon((60, 40), (60, 160), (130, 160))
polygon((100, 40), (160, 160), (160, 40), close=False) |
# flake8: noqa
_base_ = [
'./coco.py'
]
data = dict(
samples_per_gpu=2,
workers_per_gpu=2,
train=dict(classes=('person',)),
val=dict(classes=('person',)),
test=dict(classes=('person',))
)
| _base_ = ['./coco.py']
data = dict(samples_per_gpu=2, workers_per_gpu=2, train=dict(classes=('person',)), val=dict(classes=('person',)), test=dict(classes=('person',))) |
x = int(input())
n = int(input())
pool = x
for _ in range(n):
pool += x - int(input())
print(pool)
| x = int(input())
n = int(input())
pool = x
for _ in range(n):
pool += x - int(input())
print(pool) |
def reject_outliers(data, m = 2.):
d = np.abs(data - np.median(data))
mdev = np.median(d)
s = d/mdev if mdev else 0.
return (s < m)
def mean_dup(x_):
global reject_outliers
if 1==len(np.unique(x_.values)):
return x_.values[0]
else:
x = x_.values[reject_outliers(x_.values.copy())]
x_mean = x.mean()
mask = (x_mean*0.975 <= x) & (x <= x_mean*1.025)
return x[mask].mean()
def remove_duplicate(df):
'''
Removes duplicates in dataframe and element samples whose composition is not 100%
input format -> df = dataframe
'''
features = df.columns.values.tolist()
features.remove(df.columns[-1])
property_name = df.columns[-1]
df = df[df[features].sum(axis=1).between(99,101)]
df = df.groupby(features,as_index=False).agg(mean_dup)
df = df.dropna()
df = df.loc[(df[property_name])> 0]
return df
| def reject_outliers(data, m=2.0):
d = np.abs(data - np.median(data))
mdev = np.median(d)
s = d / mdev if mdev else 0.0
return s < m
def mean_dup(x_):
global reject_outliers
if 1 == len(np.unique(x_.values)):
return x_.values[0]
else:
x = x_.values[reject_outliers(x_.values.copy())]
x_mean = x.mean()
mask = (x_mean * 0.975 <= x) & (x <= x_mean * 1.025)
return x[mask].mean()
def remove_duplicate(df):
"""
Removes duplicates in dataframe and element samples whose composition is not 100%
input format -> df = dataframe
"""
features = df.columns.values.tolist()
features.remove(df.columns[-1])
property_name = df.columns[-1]
df = df[df[features].sum(axis=1).between(99, 101)]
df = df.groupby(features, as_index=False).agg(mean_dup)
df = df.dropna()
df = df.loc[df[property_name] > 0]
return df |
class MyList:
class _Node:
__slots__ = ('value', 'next')
def __init__(self, value, next=None):
self.value = value
self.next = next
class _NodeIterator:
def __init__(self, first):
self._next_node = first
def __iter__(self):
return self
def __next__(self):
if self._next_node is None:
raise StopIteration
value = self._next_node.value
self._next_node = self._next_node.next
return value
def __init__(self, iterable=None):
self._head = None
self._tail = None
self._length = 0
if iterable is not None:
self.extend(iterable)
def append(self, value):
node = MyList._Node(value)
if len(self) == 0:
self._head = self._tail = node
else:
self._tail.next = node
self._tail = node
self._length += 1
def __len__(self):
return self._length
def extend(self, iterable):
for value in iterable:
self.append(value)
def __getitem__(self, index):
if index < 0:
index += len(self)
if not 0 <= index < len(self):
raise IndexError('list index out of range')
node = self._head
for _ in range(index):
node = node.next
return node.value
def __iter__(self):
return MyList._NodeIterator(self._head)
values = MyList([4, 2, 1, 99, 9])
print(values)
# print(values[0])
# print(values[1])
# print(values[2])
for el in values:
print(el, end=' ') | class Mylist:
class _Node:
__slots__ = ('value', 'next')
def __init__(self, value, next=None):
self.value = value
self.next = next
class _Nodeiterator:
def __init__(self, first):
self._next_node = first
def __iter__(self):
return self
def __next__(self):
if self._next_node is None:
raise StopIteration
value = self._next_node.value
self._next_node = self._next_node.next
return value
def __init__(self, iterable=None):
self._head = None
self._tail = None
self._length = 0
if iterable is not None:
self.extend(iterable)
def append(self, value):
node = MyList._Node(value)
if len(self) == 0:
self._head = self._tail = node
else:
self._tail.next = node
self._tail = node
self._length += 1
def __len__(self):
return self._length
def extend(self, iterable):
for value in iterable:
self.append(value)
def __getitem__(self, index):
if index < 0:
index += len(self)
if not 0 <= index < len(self):
raise index_error('list index out of range')
node = self._head
for _ in range(index):
node = node.next
return node.value
def __iter__(self):
return MyList._NodeIterator(self._head)
values = my_list([4, 2, 1, 99, 9])
print(values)
for el in values:
print(el, end=' ') |
number = 10
array = '64630 11735 14216 99233 14470 4978 73429 38120 51135 67060'
array = list(map(int, array.split()))
def find_mean(a):
return round(sum(a)/number, 1)
def find_median(a):
a = sorted(a)
if len(a) % 2 == 0:
return round((a[number//2 - 1] + a[number//2])/2, 1)
else:
return a[number//2]
def find_mode(a):
a = sorted(a)
counts = {i: a.count(i) for i in a}
sorted_x = sorted(counts.items(), key=lambda z: z[1], reverse=True)
return sorted_x[0][0]
print(find_mean(array))
print(find_median(array))
print(find_mode(array))
| number = 10
array = '64630 11735 14216 99233 14470 4978 73429 38120 51135 67060'
array = list(map(int, array.split()))
def find_mean(a):
return round(sum(a) / number, 1)
def find_median(a):
a = sorted(a)
if len(a) % 2 == 0:
return round((a[number // 2 - 1] + a[number // 2]) / 2, 1)
else:
return a[number // 2]
def find_mode(a):
a = sorted(a)
counts = {i: a.count(i) for i in a}
sorted_x = sorted(counts.items(), key=lambda z: z[1], reverse=True)
return sorted_x[0][0]
print(find_mean(array))
print(find_median(array))
print(find_mode(array)) |
class IAMPolicies():
def __init__(self, iam):
self.client = iam
def _marker_handler(self, marker=None, scope='All'):
if marker:
response = self.client.list_policies(
Scope=scope,
OnlyAttached=True,
PolicyUsageFilter='PermissionsPolicy',
Marker=marker)
else:
response = self.client.list_policies(
Scope=scope,
OnlyAttached=True,
PolicyUsageFilter='PermissionsPolicy'
)
return response
def get_policy_document(self, arn, version_id):
response = self.client.get_policy_version(PolicyArn=arn, VersionId=version_id)
document = response['PolicyVersion']['Document']
return document
def get_policies(self):
policies = []
marker = None
for scope in ['AWS', 'Local']:
while True:
resp = self._marker_handler(marker, scope)
for policy in resp['Policies']:
policy['Scope'] = scope
policies.append(policy)
marker = resp.get('Marker')
if not resp['IsTruncated']:
break
return policies | class Iampolicies:
def __init__(self, iam):
self.client = iam
def _marker_handler(self, marker=None, scope='All'):
if marker:
response = self.client.list_policies(Scope=scope, OnlyAttached=True, PolicyUsageFilter='PermissionsPolicy', Marker=marker)
else:
response = self.client.list_policies(Scope=scope, OnlyAttached=True, PolicyUsageFilter='PermissionsPolicy')
return response
def get_policy_document(self, arn, version_id):
response = self.client.get_policy_version(PolicyArn=arn, VersionId=version_id)
document = response['PolicyVersion']['Document']
return document
def get_policies(self):
policies = []
marker = None
for scope in ['AWS', 'Local']:
while True:
resp = self._marker_handler(marker, scope)
for policy in resp['Policies']:
policy['Scope'] = scope
policies.append(policy)
marker = resp.get('Marker')
if not resp['IsTruncated']:
break
return policies |
runtime_project='core'
editor_project='core-Editor'
runtime_project_file='Assembly-CSharp'
editor_project_file='Assembly-CSharp-Editor'
define='ANDROID'
MONO="/Applications/Unity/MonoDevelop.app/Contents/Frameworks/Mono.framework/Versions/Current/bin/mono"
MDTOOL="/Applications/Unity/MonoDevelop.app/Contents/MacOS/lib/monodevelop/bin/mdtool.exe"
MONO_SOLUTION="Core.sln"
| runtime_project = 'core'
editor_project = 'core-Editor'
runtime_project_file = 'Assembly-CSharp'
editor_project_file = 'Assembly-CSharp-Editor'
define = 'ANDROID'
mono = '/Applications/Unity/MonoDevelop.app/Contents/Frameworks/Mono.framework/Versions/Current/bin/mono'
mdtool = '/Applications/Unity/MonoDevelop.app/Contents/MacOS/lib/monodevelop/bin/mdtool.exe'
mono_solution = 'Core.sln' |
L = 0
heatmap = []
while True:
try:
line = [int(x) for x in input()]
# Pad heatmap with 9s
heat = [9] + line + [9]
L = len(heat)
heatmap.extend(heat)
except EOFError:
break
index = 0
# Pad 9s in top and bottom
heatmap = heatmap + (L*[9])
bigmap = (L*[9]) + heatmap + (L*[9])
# Part 1
total = []
for index, value in enumerate(heatmap, start = 0):
if bigmap[index] == 9:
continue
me = bigmap[index]
left = bigmap[index - 1]
right = bigmap[index + 1]
top = bigmap[index - L]
bottom = bigmap[index + L]
if me == left or me == right or me == top or me == bottom:
continue
lowest = min(me, left, right, top, bottom)
if me == lowest:
total.append(lowest + 1)
print("SUM", sum(total))
# Part 2
# Get list of the indexes of lowest points
#
# From each lowest point go:
# UP then as far left and right as possiple (repeat with UP steps)
# Down then as far left and right as possiple (repeat with DOWN steps)
| l = 0
heatmap = []
while True:
try:
line = [int(x) for x in input()]
heat = [9] + line + [9]
l = len(heat)
heatmap.extend(heat)
except EOFError:
break
index = 0
heatmap = heatmap + L * [9]
bigmap = L * [9] + heatmap + L * [9]
total = []
for (index, value) in enumerate(heatmap, start=0):
if bigmap[index] == 9:
continue
me = bigmap[index]
left = bigmap[index - 1]
right = bigmap[index + 1]
top = bigmap[index - L]
bottom = bigmap[index + L]
if me == left or me == right or me == top or (me == bottom):
continue
lowest = min(me, left, right, top, bottom)
if me == lowest:
total.append(lowest + 1)
print('SUM', sum(total)) |
# Enter your code for "Hello with attitude" here.
name = input("What is your name? ")
print("So you call yourself '" + name + "' huh?")
| name = input('What is your name? ')
print("So you call yourself '" + name + "' huh?") |
# https://www.codewars.com/kata/59e49b2afc3c494d5d00002a/train/python
def sort_vowels(s):
if isinstance(s, int) or s == None:
return ''
vovels = ['a', 'e', 'u', 'i', 'o']
output = []
for letter in s:
if vovels.count(letter.lower()) > 0:
output.append(f'|{letter}')
else:
output.append(f'{letter}|')
return '\n'.join(output)
| def sort_vowels(s):
if isinstance(s, int) or s == None:
return ''
vovels = ['a', 'e', 'u', 'i', 'o']
output = []
for letter in s:
if vovels.count(letter.lower()) > 0:
output.append(f'|{letter}')
else:
output.append(f'{letter}|')
return '\n'.join(output) |
#!/usr/bin/python3
# The MDPs consists of a range of integers 0..stateMax which represent
# the states of the MDP, a set of actions. The rewards and transition
# probabilities are accessed with some of the functions below defined
# for the Python classes that represent MDPs.
#
# - The __init__ constructor builds the state of the MDP, possibly
# with additional configuration options.
# - applicableActions(state) returns all actions possible in a state.
# - successors(state,action) returns the information about possible
# successor state of a state. It is a triple (s,p,r) where
# - s is a successor state,
# - p is the probability of reaching s from state, and
# - r is the reward/cost when going from state to s.
# - stateMax is the maximum state index. The minimum is 0.
# - show() visualizes the MDP in whatever way it can be visualized.
# The example MDP class provided for this exercise is a grid navigation
# problem, in which an agent can move to the four cardinal directions
# in a finite rectangular grid.
# The move actions are nondeterministic: with 0.8 probability the move
# is to the nominal direction (N,S,E,W), but with 0.1+0.1 probabilities
# the move is to one of the direction 90 degrees off the nominal direction.
# So, when trying to move North, with probability 0.8 the move actually
# is to North, but it will be to the East with probability 0.1 and to
# the West with probability 0.1.
# Grid cells are associated with rewards/costs, obtained when reaching
# the cell. 99 is a special number in the grid cell which indicates that
# the cell cannot be entered. Moves to these 99 cells or against the outside
# wall of the grid will result in the agent not moving anywhere.
# The example MDP has a 'teleport' feature which may be turned on when
# creating the MDP: all moves from the NE corner will lead to the SW corner.
class GridMDP:
def __init__(self, xs, ys, cells, teleport=False):
self.xSize = xs # number of columns
self.ySize = ys # number of rows
self.stateMax = xs * ys - 1 # index of last (SE corner) cell
self.grid = cells # List for rewards/costs of all cells
self.teleport = teleport
NORTH = 1
SOUTH = 2
WEST = 3
EAST = 4
ACTIONS = [NORTH, SOUTH, WEST, EAST]
def turnleft(self, a):
if a == self.NORTH:
return self.WEST
elif a == self.WEST:
return self.SOUTH
elif a == self.SOUTH:
return self.EAST
else:
return self.NORTH
def turnright(self, a):
if a == self.NORTH:
return self.EAST
elif a == self.EAST:
return self.SOUTH
elif a == self.SOUTH:
return self.WEST
else:
return self.NORTH
def actionName(self, a):
if a == self.NORTH:
return "N"
elif a == self.SOUTH:
return "S"
elif a == self.EAST:
return "E"
else:
return "W"
def possible(self, action, state):
if self.grid[state] == 99:
return False
else:
return True
def applicableActions(self, state):
return [x for x in self.ACTIONS if self.possible(x, state)]
# For every state and action, compute list of (state',P,R)
# where state' is a successor of state
# P is the probability of reaching state'
# R is the reward obtained when reaching state'
# Triples with the same state' will be merged.
# The sum of the probabilities P is always 1.
def addmove(self, state, direction, prob, dict):
if direction == self.NORTH and state >= self.xSize and self.grid[state - self.xSize] != 99:
state2 = state - self.xSize
elif direction == self.SOUTH and state <= self.stateMax - self.xSize and self.grid[state + self.xSize] != 99:
state2 = state + self.xSize
elif direction == self.EAST and (state + 1) % self.xSize > 0 and self.grid[state + 1] != 99:
state2 = state + 1
elif direction == self.WEST and state % self.xSize > 0 and self.grid[state - 1] != 99:
state2 = state - 1
else:
state2 = state
if self.teleport and state == self.xSize - 1: # Teleport from the NE corner
state2 = self.stateMax - self.xSize + 1 # to the SW corner
reward = self.grid[state2]
if state2 in dict:
tmp = dict[state2]
dict[state2] = (tmp[0] + prob, reward) # Sum the probabilities when merging.
else:
dict[state2] = (prob, reward)
# Compute all successor state of state, with their probabilities and rewards
def successors(self, state, action):
dict = {}
self.addmove(state, self.turnleft(action), 0.1, dict),
self.addmove(state, self.turnright(action), 0.1, dict),
self.addmove(state, action, 0.8, dict)
succlist = []
for state2, value in dict.items():
tmp = (state2, value[0], value[1])
succlist.append(tmp)
return succlist
# Show the rewards of all grid cells
def show(self):
print("--------------------")
for y in range(0, self.ySize):
for x in range(0, self.xSize):
i = y * self.xSize + x
if self.grid[i] == 99: # wall cell inside the grid
print("##", end="")
elif self.grid[i] == 0: # 0 reward cells shown as .
print(" .", end="")
else:
print("%2d" % (self.grid[i]), end="")
print("")
# Show the policy/plan for a grid MDP
# MDP policies represented as dictionaries with the state
# indices 0..stateMax as the dictionary keys, and the actions
# as the values.
def showPolicy(self, policy):
print("--------------------")
for y in range(0, self.ySize):
for x in range(0, self.xSize):
i = y * self.xSize + x
if self.grid[i] == 99:
print("#", end="")
else:
print(self.actionName(policy[i]), end="")
print("")
def showValues(self, V):
print("--------------------")
for y in range(0, self.ySize):
for x in range(0, self.xSize):
i = y * self.xSize + x
print(" %3.3f" % V[i], end='')
print("")
| class Gridmdp:
def __init__(self, xs, ys, cells, teleport=False):
self.xSize = xs
self.ySize = ys
self.stateMax = xs * ys - 1
self.grid = cells
self.teleport = teleport
north = 1
south = 2
west = 3
east = 4
actions = [NORTH, SOUTH, WEST, EAST]
def turnleft(self, a):
if a == self.NORTH:
return self.WEST
elif a == self.WEST:
return self.SOUTH
elif a == self.SOUTH:
return self.EAST
else:
return self.NORTH
def turnright(self, a):
if a == self.NORTH:
return self.EAST
elif a == self.EAST:
return self.SOUTH
elif a == self.SOUTH:
return self.WEST
else:
return self.NORTH
def action_name(self, a):
if a == self.NORTH:
return 'N'
elif a == self.SOUTH:
return 'S'
elif a == self.EAST:
return 'E'
else:
return 'W'
def possible(self, action, state):
if self.grid[state] == 99:
return False
else:
return True
def applicable_actions(self, state):
return [x for x in self.ACTIONS if self.possible(x, state)]
def addmove(self, state, direction, prob, dict):
if direction == self.NORTH and state >= self.xSize and (self.grid[state - self.xSize] != 99):
state2 = state - self.xSize
elif direction == self.SOUTH and state <= self.stateMax - self.xSize and (self.grid[state + self.xSize] != 99):
state2 = state + self.xSize
elif direction == self.EAST and (state + 1) % self.xSize > 0 and (self.grid[state + 1] != 99):
state2 = state + 1
elif direction == self.WEST and state % self.xSize > 0 and (self.grid[state - 1] != 99):
state2 = state - 1
else:
state2 = state
if self.teleport and state == self.xSize - 1:
state2 = self.stateMax - self.xSize + 1
reward = self.grid[state2]
if state2 in dict:
tmp = dict[state2]
dict[state2] = (tmp[0] + prob, reward)
else:
dict[state2] = (prob, reward)
def successors(self, state, action):
dict = {}
(self.addmove(state, self.turnleft(action), 0.1, dict),)
(self.addmove(state, self.turnright(action), 0.1, dict),)
self.addmove(state, action, 0.8, dict)
succlist = []
for (state2, value) in dict.items():
tmp = (state2, value[0], value[1])
succlist.append(tmp)
return succlist
def show(self):
print('--------------------')
for y in range(0, self.ySize):
for x in range(0, self.xSize):
i = y * self.xSize + x
if self.grid[i] == 99:
print('##', end='')
elif self.grid[i] == 0:
print(' .', end='')
else:
print('%2d' % self.grid[i], end='')
print('')
def show_policy(self, policy):
print('--------------------')
for y in range(0, self.ySize):
for x in range(0, self.xSize):
i = y * self.xSize + x
if self.grid[i] == 99:
print('#', end='')
else:
print(self.actionName(policy[i]), end='')
print('')
def show_values(self, V):
print('--------------------')
for y in range(0, self.ySize):
for x in range(0, self.xSize):
i = y * self.xSize + x
print(' %3.3f' % V[i], end='')
print('') |
W = int(input())
N, K = map(int, input().split())
dp = [{} for _ in range(K + 1)]
dp[0][0] = 0
for _ in range(N):
A, B = map(int, input().split())
for i in range(K - 1, -1, -1):
for j in dp[i]:
if j + A <= W:
dp[i + 1].setdefault(j + A, 0)
dp[i + 1][j + A] = max(dp[i + 1][j + A], dp[i][j] + B)
result = 0
for i in range(K + 1):
if len(dp[i]) == 0:
continue
result = max(result, max(dp[i].values()))
print(result)
| w = int(input())
(n, k) = map(int, input().split())
dp = [{} for _ in range(K + 1)]
dp[0][0] = 0
for _ in range(N):
(a, b) = map(int, input().split())
for i in range(K - 1, -1, -1):
for j in dp[i]:
if j + A <= W:
dp[i + 1].setdefault(j + A, 0)
dp[i + 1][j + A] = max(dp[i + 1][j + A], dp[i][j] + B)
result = 0
for i in range(K + 1):
if len(dp[i]) == 0:
continue
result = max(result, max(dp[i].values()))
print(result) |
class RenderInterface(object):
def render(self):
raise NotImplementedError("Class %s doesn't implement render()" % (self.__class__.__name__))
class ViewportInterface(object):
def to_dict(self):
raise NotImplementedError("Class %s doesn't implement to_dict()" % (self.__class__.__name__))
def render(self):
raise NotImplementedError("Class %s doesn't implement render()" % (self.__class__.__name__))
def autocompute(self):
raise NotImplementedError("Class %s doesn't implement autocompute()" % (self.__class__.__name__))
| class Renderinterface(object):
def render(self):
raise not_implemented_error("Class %s doesn't implement render()" % self.__class__.__name__)
class Viewportinterface(object):
def to_dict(self):
raise not_implemented_error("Class %s doesn't implement to_dict()" % self.__class__.__name__)
def render(self):
raise not_implemented_error("Class %s doesn't implement render()" % self.__class__.__name__)
def autocompute(self):
raise not_implemented_error("Class %s doesn't implement autocompute()" % self.__class__.__name__) |
num1 = int(input('digite um valor'))
num2 = int(input('digite um valor'))
s = num1 + num2
print('A soma entre {} e {} vale {}'.format(num1, num2, s))
| num1 = int(input('digite um valor'))
num2 = int(input('digite um valor'))
s = num1 + num2
print('A soma entre {} e {} vale {}'.format(num1, num2, s)) |
def modify_phoneme_script_to_create_grapheme_script(original_dataset_path, grapheme_dataset_path):
with open(original_dataset_path, 'r', encoding='utf-8-sig') as f:
lines = f.readlines()
new_lines = []
for line in lines:
split_result = line.split('|')
wav_path = split_result[0]
speaker = split_result[2].rstrip()
speaking_emotion = 0
content_emotion = 0
txt_path = wav_path.replace('selvas_wav', 'selvas_txt').replace('wav_trimmed_22050', 'script').replace('.wav',
'.txt')
with open(txt_path, 'r', encoding='utf-8-sig') as f:
txt = f.readline().rstrip()
# new_line = '{}|{}|{}|{}|{}'.format(wav_path,txt, speaker, speaking_emotion, content_emotion)
new_line = txt
new_lines.append(new_line)
with open(grapheme_dataset_path, 'w', encoding='utf-8') as f:
for line in new_lines:
f.write(line+'\n')
if __name__ == '__main__':
# original_dataset_path = '/home/admin/projects/graduate/emotion_vector/filelists/selvas_main_train.txt'
# grapheme_dataset_path = '/home/admin/projects/graduate/emotion_vector/filelists/grapheme/grapheme_selvas_main_train_tmp.txt'
original_dataset_path = '/home/admin/projects/graduate/emotion_vector/filelists/single_language_selvas/train_file_list_pron.txt'
grapheme_dataset_path = '/home/admin/projects/graduate/emotion_vector/filelists/grapheme_selvas_multi_train_tmp.txt'
modify_phoneme_script_to_create_grapheme_script(original_dataset_path, grapheme_dataset_path) | def modify_phoneme_script_to_create_grapheme_script(original_dataset_path, grapheme_dataset_path):
with open(original_dataset_path, 'r', encoding='utf-8-sig') as f:
lines = f.readlines()
new_lines = []
for line in lines:
split_result = line.split('|')
wav_path = split_result[0]
speaker = split_result[2].rstrip()
speaking_emotion = 0
content_emotion = 0
txt_path = wav_path.replace('selvas_wav', 'selvas_txt').replace('wav_trimmed_22050', 'script').replace('.wav', '.txt')
with open(txt_path, 'r', encoding='utf-8-sig') as f:
txt = f.readline().rstrip()
new_line = txt
new_lines.append(new_line)
with open(grapheme_dataset_path, 'w', encoding='utf-8') as f:
for line in new_lines:
f.write(line + '\n')
if __name__ == '__main__':
original_dataset_path = '/home/admin/projects/graduate/emotion_vector/filelists/single_language_selvas/train_file_list_pron.txt'
grapheme_dataset_path = '/home/admin/projects/graduate/emotion_vector/filelists/grapheme_selvas_multi_train_tmp.txt'
modify_phoneme_script_to_create_grapheme_script(original_dataset_path, grapheme_dataset_path) |
# Solution
def add_one(arr):
output = 1;
for i in range(len(arr), 0, -1):
output = output + arr[i - 1]
borrow = output//10
if borrow == 0:
arr[i - 1] = output
break
else:
arr[i - 1] = output % 10
output = borrow
arr = [borrow] + arr
index = 0
while arr[index]==0:
index += 1
return arr[index:] | def add_one(arr):
output = 1
for i in range(len(arr), 0, -1):
output = output + arr[i - 1]
borrow = output // 10
if borrow == 0:
arr[i - 1] = output
break
else:
arr[i - 1] = output % 10
output = borrow
arr = [borrow] + arr
index = 0
while arr[index] == 0:
index += 1
return arr[index:] |
def interpolation_search(arr, key):
low = 0
high = len(arr) - 1
while arr[high] != arr[low] and key >= arr[low] and key <= arr[high]:
mid = int(low + ((key - arr[low]) * (high - low) / (arr[high] - arr[low])))
if arr[mid] == key:
return mid
elif arr[mid] < key:
low = mid + 1
else:
high = mid - 1
return -1
# input arr
arr = [2, 4, 6, 8, 10, 12, 14, 16]
# interpolation_search call to search 3 in arr
print('6 is at index: ', interpolation_search(arr, 6))
# Output: 6 is at index: 2 | def interpolation_search(arr, key):
low = 0
high = len(arr) - 1
while arr[high] != arr[low] and key >= arr[low] and (key <= arr[high]):
mid = int(low + (key - arr[low]) * (high - low) / (arr[high] - arr[low]))
if arr[mid] == key:
return mid
elif arr[mid] < key:
low = mid + 1
else:
high = mid - 1
return -1
arr = [2, 4, 6, 8, 10, 12, 14, 16]
print('6 is at index: ', interpolation_search(arr, 6)) |
try:
with open('../../.password/google-maps/api', 'r') as fp:
key = fp.readlines()
key = ''.join(key)
except:
# Insert your API key here
key = 'AIzaSyDxydKN7Yt54JNmVw9opg9EcibCghjetgw'
| try:
with open('../../.password/google-maps/api', 'r') as fp:
key = fp.readlines()
key = ''.join(key)
except:
key = 'AIzaSyDxydKN7Yt54JNmVw9opg9EcibCghjetgw' |
#SOLUTION FOR P20
'''P20 (*) Remove the K'th element from a list.
Example:
* (remove-at '(a b c d) 2)
(A C D)'''
my_list = ['a','b','c','d','e']
pos= int(input('Element to remove = '))
if pos <= len(my_list): #CHECK IF INPUT IS IN RANGE
my_list.pop(pos-1) #REMOVE THE ELEMENT AT GIVEN INDEX
print(my_list) #PRINT THE LIST
else:
print('Invalid input ')
| """P20 (*) Remove the K'th element from a list.
Example:
* (remove-at '(a b c d) 2)
(A C D)"""
my_list = ['a', 'b', 'c', 'd', 'e']
pos = int(input('Element to remove = '))
if pos <= len(my_list):
my_list.pop(pos - 1)
print(my_list)
else:
print('Invalid input ') |
class PairSet(object):
__slots__ = '_data',
def __init__(self):
self._data = set()
def __contains__(self, item):
return item in self._data
def has(self, a, b):
return (a, b) in self._data
def add(self, a, b):
self._data.add((a, b))
self._data.add((b, a))
return self
def remove(self, a, b):
self._data.discard((a, b))
self._data.discard((b, a))
| class Pairset(object):
__slots__ = ('_data',)
def __init__(self):
self._data = set()
def __contains__(self, item):
return item in self._data
def has(self, a, b):
return (a, b) in self._data
def add(self, a, b):
self._data.add((a, b))
self._data.add((b, a))
return self
def remove(self, a, b):
self._data.discard((a, b))
self._data.discard((b, a)) |
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def count_unival_subtrees(self, root: TreeNode) -> int:
self.count = 0
self.is_unival(root)
return self.count
def is_unival(self, root: TreeNode) -> bool:
# Leaf Node must be an Univalue Tree, return True
if root is None:
return True
# Traverse tree with DFS
left = self.is_unival(root.left)
right = self.is_unival(root.right)
# If both children are Univalue Tree and root.value is
# equal to both children's values. Then the tree of root
# node is an Univalue Tree
if left and right:
if (root.left is not None) and (root.val != root.left.val):
return False
if (root.right is not None) and (root.val != root.right.val):
return False
self.count += 1
return True
return False
| class Treenode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def count_unival_subtrees(self, root: TreeNode) -> int:
self.count = 0
self.is_unival(root)
return self.count
def is_unival(self, root: TreeNode) -> bool:
if root is None:
return True
left = self.is_unival(root.left)
right = self.is_unival(root.right)
if left and right:
if root.left is not None and root.val != root.left.val:
return False
if root.right is not None and root.val != root.right.val:
return False
self.count += 1
return True
return False |
class Node:
def __init__(self, data):
self.data = data
self.next = None
class LinkedList:
def __init__(self):
self.head = None
def push(self, new_data):
new_node = Node(new_data)
new_node.next = self.head
self.head = new_node
def getNth(self, llist, position):
llist.getNthNode(self.head, position, llist)
def getNthNode(self, head, position, llist):
count = 0
if(head):
if count == position:
print(head.data)
else:
llist.getNthNode(head.next, position - 1, llist)
else:
print("Index Doesn't Exist")
if __name__ == "__main__":
llist = LinkedList()
llist.push(5)
llist.push(6)
llist.push(98)
llist.push(3)
llist.push(1)
llist.push(10)
print("Element at Index 4 is:", end=" ")
llist.getNth(llist, 4) | class Node:
def __init__(self, data):
self.data = data
self.next = None
class Linkedlist:
def __init__(self):
self.head = None
def push(self, new_data):
new_node = node(new_data)
new_node.next = self.head
self.head = new_node
def get_nth(self, llist, position):
llist.getNthNode(self.head, position, llist)
def get_nth_node(self, head, position, llist):
count = 0
if head:
if count == position:
print(head.data)
else:
llist.getNthNode(head.next, position - 1, llist)
else:
print("Index Doesn't Exist")
if __name__ == '__main__':
llist = linked_list()
llist.push(5)
llist.push(6)
llist.push(98)
llist.push(3)
llist.push(1)
llist.push(10)
print('Element at Index 4 is:', end=' ')
llist.getNth(llist, 4) |
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'elasticsearch-objects-operator'
copyright = '2020, 90poe & elasticsearch-objects-operator development tean'
author = 'elasticsearch-objects-operator development team'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'recommonmark',
'sphinx_markdown_tables',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
source_suffix = ['.rst', '.md']
# The master toctree document.
master_doc = 'index'
pygments_style = 'sphinx'
# Output file base name for HTML help builder.
htmlhelp_basename = 'elasticsearch-objects-operatordoc'
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'elasticsearch-objects-operator', 'elasticsearch-objects-operator Documentation',
[author], 1)
]
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'elasticsearch-objects-operator', 'elasticsearch-objects-operator Documentation',
author, 'elasticsearch-objects-operator', 'One line description of project.',
'Miscellaneous'),
]
def setup(app):
app.add_stylesheet('custom.css')
| project = 'elasticsearch-objects-operator'
copyright = '2020, 90poe & elasticsearch-objects-operator development tean'
author = 'elasticsearch-objects-operator development team'
extensions = ['recommonmark', 'sphinx_markdown_tables']
templates_path = ['_templates']
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
html_theme = 'sphinx_rtd_theme'
html_static_path = ['_static']
source_suffix = ['.rst', '.md']
master_doc = 'index'
pygments_style = 'sphinx'
htmlhelp_basename = 'elasticsearch-objects-operatordoc'
man_pages = [(master_doc, 'elasticsearch-objects-operator', 'elasticsearch-objects-operator Documentation', [author], 1)]
texinfo_documents = [(master_doc, 'elasticsearch-objects-operator', 'elasticsearch-objects-operator Documentation', author, 'elasticsearch-objects-operator', 'One line description of project.', 'Miscellaneous')]
def setup(app):
app.add_stylesheet('custom.css') |
def draw_line(tick_length, tick_label=""):
line = "-" * tick_length
if tick_label:
line += " " + tick_label
print(line)
def draw_interval(center_length):
if center_length > 0:
draw_interval(center_length - 1)
draw_line(center_length)
draw_interval(center_length - 1)
def draw_ruler(num_inches, major_length):
draw_line(major_length, "0")
for i in range(1, 1 + num_inches):
draw_interval(major_length - 1)
draw_line(major_length, str(i))
if __name__ == '__main__':
draw_ruler(1, 3)
draw_ruler(1, 4)
| def draw_line(tick_length, tick_label=''):
line = '-' * tick_length
if tick_label:
line += ' ' + tick_label
print(line)
def draw_interval(center_length):
if center_length > 0:
draw_interval(center_length - 1)
draw_line(center_length)
draw_interval(center_length - 1)
def draw_ruler(num_inches, major_length):
draw_line(major_length, '0')
for i in range(1, 1 + num_inches):
draw_interval(major_length - 1)
draw_line(major_length, str(i))
if __name__ == '__main__':
draw_ruler(1, 3)
draw_ruler(1, 4) |
class TrainConfig(typing.NamedTuple):
T: int
train_size: int
batch_size: int
loss_func: typing.Callable
class TrainData(typing.NamedTuple):
feats: np.ndarray
targs: np.ndarray
DaRnnNet = collections.namedtuple("DaRnnNet", ["encoder", "decoder", "enc_opt", "dec_opt"]) | class Trainconfig(typing.NamedTuple):
t: int
train_size: int
batch_size: int
loss_func: typing.Callable
class Traindata(typing.NamedTuple):
feats: np.ndarray
targs: np.ndarray
da_rnn_net = collections.namedtuple('DaRnnNet', ['encoder', 'decoder', 'enc_opt', 'dec_opt']) |
__author__ = 'matti'
class Config(object):
DEBUG = False
TESTING = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:testipassu@localhost/gachimuchio'
class DebugConfig(Config):
DEBUG = True | __author__ = 'matti'
class Config(object):
debug = False
testing = False
sqlalchemy_database_uri = 'postgresql://postgres:testipassu@localhost/gachimuchio'
class Debugconfig(Config):
debug = True |
phrase = "Awana Academy"
print("Awana\nAcademy")
print("Awana\"Academy")
print("Awana\Academy")
print(phrase + " is cool")
print(phrase.capitalize())
print(phrase.lower())
print(phrase.upper())
print(phrase.isupper())
print(phrase.upper().isupper())
print(len(phrase))
print(phrase[0])
print(phrase.index("A"))
print(phrase.index("na"))
print(phrase.replace("Awana", "Diamondtia" )) | phrase = 'Awana Academy'
print('Awana\nAcademy')
print('Awana"Academy')
print('Awana\\Academy')
print(phrase + ' is cool')
print(phrase.capitalize())
print(phrase.lower())
print(phrase.upper())
print(phrase.isupper())
print(phrase.upper().isupper())
print(len(phrase))
print(phrase[0])
print(phrase.index('A'))
print(phrase.index('na'))
print(phrase.replace('Awana', 'Diamondtia')) |
#!/usr/bin/env python3
with open('AUTHORS', 'r') as authors_file:
authors = list(sorted([x.strip() for x in authors_file]))
with open('Qiskit.bib', 'w') as fd:
fd.write("@misc{ Qiskit,\n")
fd.write(' author = {%s},\n' % ' and '.join(authors))
fd.write(' title = {Qiskit: the Quantum Information Science Kit},\n')
fd.write(' year = {2019},\n}\n')
| with open('AUTHORS', 'r') as authors_file:
authors = list(sorted([x.strip() for x in authors_file]))
with open('Qiskit.bib', 'w') as fd:
fd.write('@misc{ Qiskit,\n')
fd.write(' author = {%s},\n' % ' and '.join(authors))
fd.write(' title = {Qiskit: the Quantum Information Science Kit},\n')
fd.write(' year = {2019},\n}\n') |
#while loop
temp = 0
while(temp < 20):
temp+=1
if(temp>10):
print(temp,"> 10")
print(temp)
# for loops
colors = ['yellow','white','blue','magenta','red']
for color in colors:
print(color)
print(range(10))
for index in range(10):
print(index) | temp = 0
while temp < 20:
temp += 1
if temp > 10:
print(temp, '> 10')
print(temp)
colors = ['yellow', 'white', 'blue', 'magenta', 'red']
for color in colors:
print(color)
print(range(10))
for index in range(10):
print(index) |
# alias to Bazel module `toolchains/cc`
load("@rules_nixpkgs_cc//:foreign_cc.bzl", _nixpkgs_foreign_cc_configure = "nixpkgs_foreign_cc_configure")
nixpkgs_foreign_cc_configure = _nixpkgs_foreign_cc_configure
| load('@rules_nixpkgs_cc//:foreign_cc.bzl', _nixpkgs_foreign_cc_configure='nixpkgs_foreign_cc_configure')
nixpkgs_foreign_cc_configure = _nixpkgs_foreign_cc_configure |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.