content stringlengths 7 1.05M |
|---|
"""
By starting at the top of the triangle below and moving to adjacent numbers on the row below, the maximum total from
top to bottom is 23.
3
7 4
2 4 6
8 5 9 3
That is, 3 + 7 + 4 + 9 = 23.
Find the maximum total from top to bottom of the triangle below:
75
95 64
17 47 82
18 35 87 10
20 04 82 47 65
19 01 23 75 03 34
88 02 77 73 07 63 67
99 65 04 28 06 16 70 92
41 41 26 56 83 40 80 70 33
41 48 72 33 47 32 37 16 94 29
53 71 44 65 25 43 91 52 97 51 14
70 11 33 28 77 73 17 78 39 68 17 57
91 71 52 38 17 14 91 43 58 50 27 29 48
63 66 04 68 89 53 67 30 73 16 69 87 40 31
04 62 98 27 23 09 70 98 73 93 38 53 60 04 23
NOTE: As there are only 16384 routes, it is possible to solve this problem by trying every route.
However, Problem 67, is the same challenge with a triangle containing one-hundred rows; it cannot be solved by
brute force, and requires a clever method! ;o)
"""
''' Brute force method using a recursive function '''
def brute(inp, level, number, sum):
sum += inp[level][number]
if level < len(inp)-1:
s1 = brute(inp, level+1, number, sum)
s2 = brute(inp, level+1, number+1, sum)
return max(s1,s2)
else:
return sum
string = """75
95 64
17 47 82
18 35 87 10
20 04 82 47 65
19 01 23 75 03 34
88 02 77 73 07 63 67
99 65 04 28 06 16 70 92
41 41 26 56 83 40 80 70 33
41 48 72 33 47 32 37 16 94 29
53 71 44 65 25 43 91 52 97 51 14
70 11 33 28 77 73 17 78 39 68 17 57
91 71 52 38 17 14 91 43 58 50 27 29 48
63 66 04 68 89 53 67 30 73 16 69 87 40 31
04 62 98 27 23 09 70 98 73 93 38 53 60 04 23"""
string = string.split('\n')
for s in range(len(string)):
string[s] = string[s].split()
string[s] = [int(i) for i in string[s]]
print(brute(string,0,0,0)) |
class BST():
def __init__(self,data):
self.key = data
self.lch = None
self.rch = None
def Print(self):
if self is None or self.key is None:
return
if self.lch:
self.lch.Print()
if self.rch:
self.rch.Print()
print(self.key,end=" ")
def insert(self,data):
if self.key is None:
self.key = data
if data <= self.key:
if self.lch:
self.lch.insert(data)
else:
self.lch = BST(data)
if data > self.key:
if self.rch:
self.rch.insert(data)
else:
self.rch = BST(data)
def countNodes(root,low,high):
global count
temp = list(range(low,high+1))
if root is None:
return 0
if root:
if root.key in temp:
count+=1
if root.lch:
countNodes(root.lch,low,high)
if root.rch:
countNodes(root.rch,low,high)
count = 0
root = BST(15)
root.insert(10)
root.insert(25)
root.insert(8)
root.insert(12)
root.insert(20)
root.insert(30)
root.insert(0)
root.insert(-1)
root.insert(21)
root.insert(22)
countNodes(root,-1,10)
print(count)
root.Print() |
i = 0
while True:
print(i)
i+=1 |
class Solution:
def getSum(self, a, b):
"""
:type a: int
:type b: int
:rtype: int
"""
# 32 bits integer max
MAX = 0x7FFFFFFF
# 32 bits interger min
MIN = 0x80000000
# mask to get last 32 bits
mask = 0xFFFFFFFF
while b != 0:
# ^ get different bits and & gets double 1s, << moves carry
a, b = (a ^ b) & mask, ((a & b) << 1) & mask
print(a,b)
# if a is negative, get a's 32 bits complement positive first
# then get 32-bit positive's Python complement negative
return a if a <= MAX else ~(a ^ mask)
# test
a = -70
b = 5
print(Solution().getSum(a,b)) |
"""
Task Score: 100%
Complexity: O(N * log(N))
"""
def solution(A):
"""
On the face of it this is quite simple, until you factor in negative
numbers.
Approach is very simple:
1. Sort array in descending order (or ascending if you like)
2. Take the max value in between the sum of the head of the array and the first
element of the array combined with the last two elements (two negatives)
"""
# sort array in descending order
A = sorted(A, reverse=True)
# get the product of the first three elements
return max(A[0] * A[1] * A[2], A[0] * A[-2] * A[-1])
|
"""
python中的类和对象, 所有的类的祖先都是object和java是一样的
1- 前两种叫做经典类存在python 2.x中, 最后一种叫做新式类
2- python 2中类并没有父类,自己就是父类
3- 而python3中得class Person(object)存在父类,可以使用根类的方法
"""
class Person:
pass
class Person():
pass
class Person(object):
pass
"""
1- 实例方法的第一个形参是self自身,表示调用该方法的对象,谁调用谁就是self
2- 给一个**对象**添加属性,并不是添加到类中的,有点奇怪
3- 类的方法中,通过self获取对象的属性self.name
4- 重写__init__() 用来给属性添加默认值,避免多个对象对共同的属性name赋值
5- python中不能定义多个构造方法,只能定义一个,不然会报错
6- 因为python对参数的类型没有限制,构造器中参数一般为字典{}用来处理不同数量参数的问题
"""
class Hero(object):
# python提供的两个下划线的魔法方法,创建该类对象的时候自动调用,类似于构造方法
# def __init__(self):
# self.name = "旺财"
def __init__(self, age, my_name = "默认的名字"): # 构造方法的参数也和kotlin一样可以默认参数
self.name = my_name
self.age = age
def move(self):
print(id(self))
print("英雄会行走...")
def print_info(self):
print(self.name)
print(self.age)
print(self.hp)
hero01 = Hero(22)
print(id(hero01)) # 打印的id和move方法中的self的id是同一个地址
hero01.move()
hero01.name = "英雄名字01"
hero01.age = 22
hero01.hp = 4000
hero01.print_info()
hero02 = Hero(22)
print(hero02.name)
hero03 = Hero(18)
print(hero03.name)
print(hero03.age)
|
def bbox_normalize(bbox, image_size):
return tuple(v / image_size[0] if i % 2 == 0 else v / image_size[1] for i, v in enumerate(bbox))
def bbox_denormalize(bbox, image_size):
return tuple(v * image_size[0] if i % 2 == 0 else v * image_size[1] for i, v in enumerate(bbox))
|
# Escreva um programa para aprovar o empréstimo bancário para a compra de uma casa. Pergunte o valor da casa, o salário do comprador e em quantos anos ele vai pagar. A prestação mensal não pode exceder 30% do salário ou então o empréstimo será negado.
casa = float(input('Qual é o valor da casa? R$'))
salário = float(input('Qual é o salário do comprador? R$'))
ano = int(input('Quantos anos de financiamento? '))
prestação = casa / (ano * 12)
mínimo = (salário * 30) / 100
print('Para pagar uma casa de {:.2f} em {:.2f} anos a prestação será de {:.2f}' .format(casa, ano, prestação))
if prestação <= mínimo:
print('Empréstimo aprovado!')
else:
print('Empréstimo negado.') |
def calculateStats(numbers):
if len(numbers) == 0:
stat_dict = {"avg": "nan", "max": "nan", "min": "nan"}
else:
max_value = max(numbers)
min_value = min(numbers)
avg_value = round(sum(numbers) / len(numbers), 3)
stat_dict = {"avg": avg_value, "max": max_value, "min": min_value}
return stat_dict
|
data = (
None, # 0x00
None, # 0x01
None, # 0x02
None, # 0x03
None, # 0x04
'B', # 0x05
'P', # 0x06
'M', # 0x07
'F', # 0x08
'D', # 0x09
'T', # 0x0a
'N', # 0x0b
'L', # 0x0c
'G', # 0x0d
'K', # 0x0e
'H', # 0x0f
'J', # 0x10
'Q', # 0x11
'X', # 0x12
'ZH', # 0x13
'CH', # 0x14
'SH', # 0x15
'R', # 0x16
'Z', # 0x17
'C', # 0x18
'S', # 0x19
'A', # 0x1a
'O', # 0x1b
'E', # 0x1c
'EH', # 0x1d
'AI', # 0x1e
'EI', # 0x1f
'AU', # 0x20
'OU', # 0x21
'AN', # 0x22
'EN', # 0x23
'ANG', # 0x24
'ENG', # 0x25
'ER', # 0x26
'I', # 0x27
'U', # 0x28
'IU', # 0x29
'V', # 0x2a
'NG', # 0x2b
'GN', # 0x2c
None, # 0x2d
None, # 0x2e
None, # 0x2f
None, # 0x30
'g', # 0x31
'gg', # 0x32
'gs', # 0x33
'n', # 0x34
'nj', # 0x35
'nh', # 0x36
'd', # 0x37
'dd', # 0x38
'r', # 0x39
'lg', # 0x3a
'lm', # 0x3b
'lb', # 0x3c
'ls', # 0x3d
'lt', # 0x3e
'lp', # 0x3f
'rh', # 0x40
'm', # 0x41
'b', # 0x42
'bb', # 0x43
'bs', # 0x44
's', # 0x45
'ss', # 0x46
'', # 0x47
'j', # 0x48
'jj', # 0x49
'c', # 0x4a
'k', # 0x4b
't', # 0x4c
'p', # 0x4d
'h', # 0x4e
'a', # 0x4f
'ae', # 0x50
'ya', # 0x51
'yae', # 0x52
'eo', # 0x53
'e', # 0x54
'yeo', # 0x55
'ye', # 0x56
'o', # 0x57
'wa', # 0x58
'wae', # 0x59
'oe', # 0x5a
'yo', # 0x5b
'u', # 0x5c
'weo', # 0x5d
'we', # 0x5e
'wi', # 0x5f
'yu', # 0x60
'eu', # 0x61
'yi', # 0x62
'i', # 0x63
'', # 0x64
'nn', # 0x65
'nd', # 0x66
'ns', # 0x67
'nZ', # 0x68
'lgs', # 0x69
'ld', # 0x6a
'lbs', # 0x6b
'lZ', # 0x6c
'lQ', # 0x6d
'mb', # 0x6e
'ms', # 0x6f
'mZ', # 0x70
'mN', # 0x71
'bg', # 0x72
'', # 0x73
'bsg', # 0x74
'bst', # 0x75
'bj', # 0x76
'bt', # 0x77
'bN', # 0x78
'bbN', # 0x79
'sg', # 0x7a
'sn', # 0x7b
'sd', # 0x7c
'sb', # 0x7d
'sj', # 0x7e
'Z', # 0x7f
'', # 0x80
'N', # 0x81
'Ns', # 0x82
'NZ', # 0x83
'pN', # 0x84
'hh', # 0x85
'Q', # 0x86
'yo-ya', # 0x87
'yo-yae', # 0x88
'yo-i', # 0x89
'yu-yeo', # 0x8a
'yu-ye', # 0x8b
'yu-i', # 0x8c
'U', # 0x8d
'U-i', # 0x8e
None, # 0x8f
'', # 0x90
'', # 0x91
'', # 0x92
'', # 0x93
'', # 0x94
'', # 0x95
'', # 0x96
'', # 0x97
'', # 0x98
'', # 0x99
'', # 0x9a
'', # 0x9b
'', # 0x9c
'', # 0x9d
'', # 0x9e
'', # 0x9f
'BU', # 0xa0
'ZI', # 0xa1
'JI', # 0xa2
'GU', # 0xa3
'EE', # 0xa4
'ENN', # 0xa5
'OO', # 0xa6
'ONN', # 0xa7
'IR', # 0xa8
'ANN', # 0xa9
'INN', # 0xaa
'UNN', # 0xab
'IM', # 0xac
'NGG', # 0xad
'AINN', # 0xae
'AUNN', # 0xaf
'AM', # 0xb0
'OM', # 0xb1
'ONG', # 0xb2
'INNN', # 0xb3
'P', # 0xb4
'T', # 0xb5
'K', # 0xb6
'H', # 0xb7
None, # 0xb8
None, # 0xb9
None, # 0xba
None, # 0xbb
None, # 0xbc
None, # 0xbd
None, # 0xbe
None, # 0xbf
None, # 0xc0
None, # 0xc1
None, # 0xc2
None, # 0xc3
None, # 0xc4
None, # 0xc5
None, # 0xc6
None, # 0xc7
None, # 0xc8
None, # 0xc9
None, # 0xca
None, # 0xcb
None, # 0xcc
None, # 0xcd
None, # 0xce
None, # 0xcf
None, # 0xd0
None, # 0xd1
None, # 0xd2
None, # 0xd3
None, # 0xd4
None, # 0xd5
None, # 0xd6
None, # 0xd7
None, # 0xd8
None, # 0xd9
None, # 0xda
None, # 0xdb
None, # 0xdc
None, # 0xdd
None, # 0xde
None, # 0xdf
None, # 0xe0
None, # 0xe1
None, # 0xe2
None, # 0xe3
None, # 0xe4
None, # 0xe5
None, # 0xe6
None, # 0xe7
None, # 0xe8
None, # 0xe9
None, # 0xea
None, # 0xeb
None, # 0xec
None, # 0xed
None, # 0xee
None, # 0xef
None, # 0xf0
None, # 0xf1
None, # 0xf2
None, # 0xf3
None, # 0xf4
None, # 0xf5
None, # 0xf6
None, # 0xf7
None, # 0xf8
None, # 0xf9
None, # 0xfa
None, # 0xfb
None, # 0xfc
None, # 0xfd
None, # 0xfe
)
|
def solution(num):
"""Find the maximal sequence of consecutive ones"""
try:
num = int(num)
count = 0
if(num > 0):
while(num != 0):
num = (num & (num << 1))
count += 1
return count
except ValueError:
return 'Invalid input.'
|
jibunno = "red"
kimino = "green"
kari = jibunno
jibunno = kimino
kimino = kari
print(jibunno, kimino)
|
class LinkedStack:
class _Node:
__slots__ = '_element', '_next'
def __init__(self, element, next):
self._element = element
self._next = next
def __init__(self):
self._head = None
self._size = 0
def __len__(self):
return self._size
def is_empty(self):
return self._size == 0
def push(self, e):
self._head = self._Node(e, self._head)
self._size += 1
def top(self):
if self.is_empty():
raise Empty('Stack is empty')
return self._head._element
def pop(self):
if self.is_empty():
raise Empty('Stack is empty')
answer = self._head._element
self._head = self._head._next
self._size -= 1
return answer
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
dice = []
for d1 in range(1, 7):
for d2 in range(1, 7):
dice.append((d1, d2))
n = 0
comb = []
for d1, d2 in dice:
if d1 + d2 == 7:
n += 1
comb.append((d1,d2))
print('%d combinations results in the sum 7' % n)
print(comb)
|
"""Sector and industries."""
SECTORS = [
{
"name": "Chemical",
"industries": [
{"name": "Other"},
{"name": "Basic Chemicals"},
{"name": "Specialty Products"},
{"name": "Pharmaceutical Products"},
{"name": "Consumer Products"},
{"name": "Agricultural Products"},
],
},
{
"name": "Commercial Facilities",
"industries": [
{"name": "Entertainment and Media"},
{"name": "Gaming"},
{"name": "Lodging"},
{"name": "Other"},
{"name": "Outdoor Events"},
{"name": "Public Assembly"},
{"name": "Real Estate"},
{"name": "Retail"},
{"name": "Sports Leagues"},
],
},
{
"name": "Communications",
"industries": [
{"name": " Other"},
{"name": " Telecommunications"},
{"name": " Wireless Communications Service Providers"},
{"name": " Broadcasting"},
{"name": " Cable"},
{"name": " Satellite"},
{"name": " Wireline"},
],
},
{
"name": "Critical Manufacturing",
"industries": [
{"name": "Electrical Equipment, Appliance and Component Manufacturing"},
{"name": "Machinery Manufacturing"},
{"name": "Other"},
{"name": "Primary Metal Manufacturing"},
{"name": "Transportation and Heavy Equipment Manufacturing"},
{"name": "Manufacturing"},
{"name": "Heavy Machinery Manufacturing"},
],
},
{
"name": "Dams",
"industries": [
{"name": "Dams"},
{"name": "Other"},
{"name": "Private Hydropower Facilities in the US"},
{"name": "Levees"},
{"name": "Navigation Locks"},
{"name": "Tailings and Waste Impoundments"},
],
},
{
"name": "Defense Industrial Base",
"industries": [
{"name": "Aircraft Industry"},
{"name": "Ammunition"},
{"name": "Combat Vehicle"},
{"name": "Communications"},
{"name": "Defense Contractors "},
{"name": "Electrical Industry Commodities"},
{"name": "Electronics"},
{"name": "Mechanical Industry Commodities"},
{"name": "Missile Industry"},
{"name": "Other"},
{"name": "Research and Development Facilities"},
{"name": "Shipbuilding Industry"},
{"name": "Space"},
{"name": "Structural Industry Commodities"},
{"name": "Troop Support"},
{"name": "Weapons"},
],
},
{
"name": "Emergency Services",
"industries": [
{"name": "Emergency Management"},
{"name": "Emergency Medical Services"},
{"name": "Fire and Rescue Services"},
{"name": "Law Enforcement "},
{"name": "Other"},
{"name": "Public Works"},
],
},
{
"name": "Energy",
"industries": [
{"name": "Electric Power Generation, Transmission and Distribution"},
{"name": "Natural Gas"},
{"name": "Other"},
{"name": "Petroleum Refineries"},
{"name": "Oil and Natural Gas"},
],
},
{
"name": "Financial Services",
"industries": [
{"name": "Other"},
{"name": "US Banks"},
{"name": "US Credit Unions"},
{"name": "Consumer Services"},
{"name": "Credit and Liquidity Products"},
{"name": "Investment Products"},
{"name": "Risk Transfer Products"},
],
},
{
"name": "Food and Agriculture",
"industries": [
{"name": "Beverage Manufacturing Plants"},
{"name": "Food Manufacturing Plants"},
{"name": "Food Services"},
{"name": "Other"},
{"name": "Supply"},
{"name": "Processing, Packaging, and Production"},
{"name": "Product Storage"},
{"name": "Product Transportation"},
{"name": "Product Distribution"},
{"name": "Supporting Facilities"},
],
},
{
"name": "Government Facilities",
"industries": [
{"name": "Local Governments"},
{"name": "Other"},
{"name": "State Governments"},
{"name": "Territorial Governments"},
{"name": "Tribal Governments"},
{"name": "Public Facilities"},
{"name": "Non-Public Facilities"},
],
},
{
"name": "Healthcare and Public Health",
"industries": [
{"name": "Hospitals"},
{"name": "Other"},
{"name": "Residential Care Facilities"},
{"name": "Direct Patient Care"},
{"name": "Health IT"},
{"name": "Health Plans and Payers"},
{"name": "Fatality Management Services"},
{"name": "Medical Materials"},
{"name": "Support Services"},
],
},
{
"name": "Information Technology",
"industries": [
{"name": "Information Technology"},
{"name": "Other"},
{"name": "IT Production"},
{"name": "DNS Services"},
{"name": "Identity and Trust Support Management"},
{"name": "Internet Content and Service Providers"},
{"name": "Internet Routing and Connection"},
{"name": "Incident Management"},
],
},
{
"name": "Nuclear Reactors, Materials, and Waste",
"industries": [
{"name": "Operating Nuclear Power Plants"},
{"name": "Other"},
{"name": "Fuel Cycle Facilities"},
{"name": "Nuclear Materials Transport"},
{"name": "Radioactive Waste"},
{"name": "Radioactive Materials"},
],
},
{
"name": "Transportation Systems",
"industries": [
{"name": "Aviation"},
{"name": "Freight Rail"},
{"name": "Highway (truck transportation)"},
{"name": "Maritime"},
{"name": "Mass Transit and Passenger Rail"},
{"name": "Municipalities with Traffic Control Systems"},
{"name": "Other"},
{
"name": "Pipelines (carries natural gas, hazardous liquids, and various chemicals.)"
},
],
},
{
"name": "Water and Wastewater Systems",
"industries": [
{"name": "Other"},
{"name": "Public Water Systems"},
{"name": "Publicly Owned Treatment Works"},
],
},
]
|
nome = input('Digite seu nome: ').strip()
salario = float(input('Digite o valor do seu salário: '))
valor = float(input('Digite o valor do imóvel que deseja comprar: '))
tempo = int(input('Digite a quantidade de meses que deseja pagar o imóvel: '))
print(' ')
prest = valor / tempo
print(f'Valor da prestação: {prest}')
print(' ')
trintap = (30 / 100) * salario
print(f' 30% do seu salário corresponde ao valor de R$ {trintap}')
print(' ')
if (prest > trintap):
print('Prezado {}, de acordo com nossa análise, o valor solicitado de {} não poderá ser liberado pois excede 30% do seu salário.'.format(nome, valor))
print('')
print('\033[33m EMPRÉSTIMO NEGADO')
else:
print('Prezado {}. De acordo com nossa análise, o valor solicitado de {} poderá ser liberado pois a prestação não excede 30% do seu salário'.format(nome, valor))
print(' ')
print('\033[34m EMPRÉSTIMO LIBERADO') |
def selectionsort(L):
n = len(L)
for i in range(n-1):
max_index=0
for index in range(n - i):
if L[index] > L[max_index]:
max_index = index
L[n-i-1], L[max_index] = L[max_index], L[n-i-1]
|
class Constants:
WINDOW_BACKGROUND_RGB: str = '#393939' # this needs to be the same as in windowstyle.py
GREY_180_RGB: str = '#b4b3b3' # Text color
GREY_127_RGB: str = '#7f7f7f' # 127, 127, 127 disabled
GREY_90_RGB: str = '#5a5a5a' # 90, 90, 90 midlight
GREY_5C_RGB: str = '#026fb2' # button
GREY_80_RGB: str = '#505050' # 80, 80, 80 # Visited link
GREY_66_RGB: str = '#424242' # 66, 66, 66 alternatebase
GREY_53_RGB: str = '#353535' # 53, 53, 53 window
GREY_42_RGB: str = '#2a2a2a' # 42, 42, 42 base
GREY_35_RGB: str = '#232323' # 35, 35, 35 dark
GREY_20_RGB: str = '#141414' # 20, 20, 20 shadow
HIGHLIGHT_RGB: str = '#2a82da' # 42, 130, 218 highlight
HYPERLINK_RGB: str = '#2a82da' # 42, 130, 218 hyperlink
|
# ************************ compare_object_with_info.py ****************************** #
# #
# compare_object_with_info.py - #
# #
# Description: #
# #
# This component, is able to compare any two object, nested and #
# not nested, and gives a simple result == True or False and information. #
# #
# #
# *********************************************************************************** #
success_message = "Objects are equal!"
def compare_objects_with_info(a, b, indention=''):
if not isinstance(a, type(b)):
return False
elif isinstance(a, dict):
return compare_dict_with_info(dict(sorted(a.items())), dict(sorted(b.items())), indention)
elif isinstance(a, list) or isinstance(a, tuple):
return compare_list_with_info(a, b, indention)
else:
comparison = (a == b)
if not comparison:
comparison = (False, f"{indention}mismatch between {a} != {b}")
else:
comparison = True, f"{a} and {b} are equal!"
return comparison
# *************** NESTED COMPLEX OBJECTS ******************** #
def is_complex_with_info(item):
return isinstance(item, dict) or isinstance(item, list) or isinstance(item, tuple) or isinstance(item, set)
# dict nested in other objects
def dict_in_dict_of_dicts_with_info(parent_key, elem, dict_of_elem, indention):
for k, v in dict_of_elem.items():
if isinstance(elem, type(v)) and sorted(elem.keys()) == sorted(v.keys()) and parent_key == k:
result = compare_objects_with_info(elem, v, f"{indention} ")
if not result[0]:
return result
else:
return True, success_message
return False, f"Element {elem}, is not appear correctly in dict {dict_of_elem}"
def dict_in_list_with_info(elem, list_of_elem, indention):
collect_info = ""
counter = 1
for j in range(len(list_of_elem)):
if isinstance(elem, type(list_of_elem[j])):
if sorted(elem.keys()) == sorted(list_of_elem[j].keys()):
result, info = compare_objects_with_info(elem, list_of_elem[j], f"{indention} ")
if result:
return True, success_message
else:
collect_info += f"{indention}{counter} : {info}\n"
counter += 1
else:
info = f'Origin Element keys {sorted(elem.keys())} and current object keys {sorted(list_of_elem[j].keys())}'
collect_info += f"{indention}{counter} : {info}\n"
counter += 1
return False, f"Element {elem}, is not exist in list in the exact way.\n\n{indention}Reasons are : \n{collect_info}"
# indices objects nested in other objects
def list_in_dict_with_info(parent_key, elem, dict_of_elem, indention):
for k, v in dict_of_elem.items():
if parent_key == k:
if type(elem) != type(v):
return False, f"{indention}Elements {elem} and {v}, refer to the same key ``{parent_key}``, with diff types"
elif not compare_objects_with_info(elem, v)[0]:
return compare_objects_with_info(elem, v, f"{indention} ")
else:
return True, success_message
return False, f"Element {elem}, is not appear correctly in dict {dict_of_elem}."
def list_and_tuple_within_list_with_info(elem, list_of_elem, indention):
collect_info = ""
counter = 1
for j in range(len(list_of_elem)):
if isinstance(elem, type(list_of_elem[j])):
result, info = compare_objects_with_info(elem, list_of_elem[j], f"{indention} ")
if result:
return True, success_message
else:
collect_info += f"{indention}{counter} : {info}\n"
counter += 1
return False, f"Element {elem} is not exist in list in the exact way.\n\n{indention}Reasons are : \n{collect_info}"
def properties_do_not_fit_with_info(a, b, indention):
if len(a) != len(b):
return False, f"{indention}{a} length is {len(a)} and {b} length is {len(b)}"
if a.keys() != b.keys():
return False, f"{indention}{a} keys are {list(a.keys())} and {b} keys is {list(b.keys())}"
return True, success_message
def compare_dict_with_info(a, b, indention):
result = properties_do_not_fit_with_info(a, b, indention)
if not result[0]:
return result
for key, value in a.items():
if isinstance(value, dict):
result = dict_in_dict_of_dicts_with_info(key, value, b, indention)
if not result[0]:
return result
elif is_complex_with_info(value):
result = list_in_dict_with_info(key, value, b, indention)
if not result[0]:
return result
else:
if value != b[key]:
return False, f"Mismatch between values for key ``{key}``, {value} != {b[key]}."
return True, success_message
def compare_list_with_info(a, b, indention):
if len(a) != len(b):
return False, f"Mismatch - length of list {a} is {len(a)} and {b} is {len(b)}"
for i in range(len(a)):
if isinstance(a[i], dict):
result = dict_in_list_with_info(a[i], b, f"{indention}")
if not result[0]:
return result
elif is_complex_with_info(a[i]):
result = list_and_tuple_within_list_with_info(a[i], b, f"{indention}")
if not result[0]:
return result
else:
if not a[i] in b:
return False, f"Element {a[i]} is in {a} and not in {b}"
return True, success_message
|
n = int(input())
arr = [int(x) for x in input().split()]
distinct = set(arr)
print(len(distinct)) |
class ZoneinfoError(Exception):
pass
class InvalidZoneinfoFile(ZoneinfoError):
pass
class InvalidTimezone(ZoneinfoError):
def __init__(self, name):
super(InvalidTimezone, self).__init__(
'Invalid timezone "{}"'.format(name)
)
class InvalidPosixSpec(ZoneinfoError):
def __init__(self, spec):
super(InvalidPosixSpec, self).__init__(
'Invalid POSIX spec: {}'.format(spec)
)
|
class RenderQueue:
def __init__(self, window):
self.window = window
self.queue = []
def render(self):
if self.queue:
self.queue[0]()
def next(self):
self.queue = self.queue[1:]
def add(self, source):
self.queue.append(source)
def clear(self):
self.queue = [] |
def question(text, default=None, resp_type=None):
if default is None:
default_str = ''
else:
default_str = f'({default})'
resp = input(f'{text}{default_str}: ')
if not resp:
return default
if resp_type:
return resp_type(resp)
return resp
|
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def __init__(self):
self.arrVal = []
self.arrTmp = []
def isSymmetric(self,root):
if root == None:
return True
num = 0
self.arrTmp.append(root)
while (len(self.arrTmp) != 0):
tempNode = self.arrTmp[0]
del self.arrTmp[0]
if tempNode == None:
self.arrVal.append(None)
self.arrTmp.append(None)
self.arrTmp.append(None)
else:
self.arrVal.append(tempNode.val)
self.arrTmp.append(tempNode.left)
self.arrTmp.append(tempNode.right)
if (len(self.arrVal) == pow(2,num)):
num+=1
if not self.isHUI(self.arrVal):
return False
else:
del self.arrVal[:]
if (self.isNoneList(self.arrTmp)):
return True
else:
continue
print(self.arrVal)
return True
def isHUI(self,lst):
i = 0
while (i < len(lst)-i-1):
if lst[i] != lst[len(lst)-i-1]:
return False
i+=1
return True
def isNoneList(self,lst):
for i in self.arrTmp:
if i != None:
return False
return True
if __name__ == "__main__":
solution = Solution()
leftNode = TreeNode(2)
rightNode = TreeNode(2)
rootNode = TreeNode(1)
rootNode.left = leftNode
rootNode.right = rightNode
print(solution.isSymmetric(rootNode))
|
a,b,n,w=list(map(int,input().split()))
L = []
for i in range(1,n):
if a*i + b*(n-i) == w:
L.append(i)
if len(L) == 1:
print(L[0],n-L[0])
else:
print(-1) |
#!/usr/bin/env python3
def is_terminal(battle_data) -> bool:
return battle_data["ended"]
def get_side_value(side) -> float:
return sum(pokemon["hp"] / pokemon["maxhp"] for pokemon in side["pokemon"])
def get_heuristic_value(battle_data):
sides = battle_data["sides"]
return get_side_value(sides[1]) - get_side_value(sides[0])
def alpha_beta(env, battle, depth, alpha, beta, player_idx, last_move):
client = env.client
battle_id = battle["id"]
battle_data = battle["data"]
next_player_idx = (player_idx + 1) % 2
if depth == 0 or is_terminal(battle_data):
return get_heuristic_value(battle_data), None
best_move_idx = None
if player_idx == 0:
value = -float("inf")
for move_idx in battle["actions"][1]:
successor_value, _ = alpha_beta(
env, battle, depth, alpha, beta, next_player_idx, env.get_move(move_idx)
)
if successor_value > value:
value = successor_value
best_move_idx = move_idx
alpha = max(alpha, value)
if alpha >= beta:
break
return value, best_move_idx
else:
value = float("inf")
for move_idx in battle["actions"][0]:
successor = client.do_move(battle_id, env.get_move(move_idx), last_move)
successor_value, _ = alpha_beta(
env, successor, depth - 1, alpha, beta, next_player_idx, None
)
if successor_value < value:
value = successor_value
best_move_idx = move_idx
beta = min(beta, value)
if alpha >= beta:
break
return value, best_move_idx
def agent(env, depth=1):
_, best_move_idx = alpha_beta(
env, env.current_battle, depth, -float("inf"), float("inf"), 0, None
)
return best_move_idx
|
class Cartes:
couleur = ("bleu", "vert", "rouge", "jaune")
valeur = (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, "+2", "inversion", "passer", "stop")
def __init__(self, coul, val):
self.valeur = val
self.couleur = coul
def validation(self, val, coul):
if val > 0 or val < 14:
raise Exception("La valeur est comprise entre 0 et 14")
if coul > 0 or coul < 4:
raise Exception("La couleur est comprise entre 0 et 3")
def __str__(self):
return str(Cartes.valeur[self.__valeur]) + " de " + (Cartes.couleur[self.__couleur])
def __getValeur(self):
return self.__valeur
def __setValeur(self, val):
self.__valeur = val
valeur = property(__getValeur, __setValeur)
def __getCouleur(self):
return self.__valeur
def __setCouleur(self, val):
self.__couleur = val
couleur = property(__getCouleur, __setCouleur)
|
class Connector:
"""We use this as a public base class. Customized connectors
inherit this class as a framework for building them.
.. note::
Base connector class class as framework for child classes.
"""
def __init__(self):
"""Instantiates the base connector.
"""
pass
def _preprocess(self, message):
"""
This might involve cleaning up the input format, making
everything lowercase (normalizing), removing extra things
that we don't need, adding additional data from other
sources (dictionary/word list), etc.
:param message: Message.
:type message: str.
:returns: str -- the return code.
"""
cleaned_message = message
return cleaned_message
def _process(self, cleaned_message, userinfo=None):
"""
Read in and operate on the cleaned message data from
:func`_preprocess`.
:param cleaned_message: Cleaned message.
:type cleaned_message: str.
:param userinfo: Additional user data (e.g. User name).
:type userinfo: dict.
:returns: str -- the return code.
"""
prediction = cleaned_message
return prediction
def _postprocess(self, prediction):
"""
Operate on the prediction from :func`_process`.
:param prediction: Cleaned message.
:type prediction: str.
:returns: str -- the return code.
"""
result = prediction
return result
def respond(self, message):
"""
This is called by :mod:`botframework` in child classes.
:param message: Cleaned message.
:type message: str.
:returns: str -- the return code.
"""
cleaned_message = self._preprocess(message)
prediction = self._process(cleaned_message)
result = self._postprocess(prediction)
return result
|
inputs = [1,2,3,2.5]
weights1 = [0.2,0.8,-0.5,1.0]
weights2 = [0.5,-0.91,0.26,-0.5]
weights3 = [-0.26, -0.27, 0.17, 0.87]
bias1 = 2
bias2 = 3
bias3 = 0.5
output = [inputs[0] * weights1[0] + inputs[1] * weights1[1] + inputs[2] * weights1[2] + inputs[3] * weights1[3] + bias1,
inputs[0] * weights2[0] + inputs[1] * weights2[1] + inputs[2] * weights2[2] + inputs[3] * weights2[3] + bias2,
inputs[0] * weights3[0] + inputs[1] * weights3[1] + inputs[2] * weights3[2] + inputs[3] * weights3[3] + bias3]
print(output)
|
l1 = [1,2,3]
l2 = [4,5,6]
l3 = [1,2,3]
# number of items in list
print(f"number of items in l1 : {len(l1)}")
# compare lists , == compare the data element wise
print(l1 == l1)
print(l1 == l3)
print(l1 == l2)
|
# -*- coding: utf-8 -*-
"""
This program can calculate the hit and miss % using FIFO method.
Created on Wed Sep 4 16:42:16 2019
@author: tuhin Mitra
"""
# demo = """
# You Can Either enter string like this:
# 1,3,0,3,5,6
# or,
# You can Also Enter like this:
# 130356
# (for better readability the first method is recommended,
#
# However--> For quick and easy entry you can also use the 2nd method of input)
# --- in both the cases you get the same output ---
# """
# print(f"{demo}\n")
# print('Enter the Given string (separated by commas):') # actually if you don't separate by commas,it'll still not give
# # and problem
# string = input()
# if ',' in string:
# string_list = string.replace(' ', '').split(',')
# else:
# string_list = [i for i in string]
def solve(string_list, frame_length):
current_list = []
hit_count = 0
miss_count = 0
count = 0
display_line = "Steps Display for FIFO Method:\n"
# frame_length = int(input('Enter Frame length:'))
# print('Entered length of the string : {}\n{}'.format(len(string_list), 16 * '__'))
for i in string_list:
if i not in current_list:
if len(current_list) == frame_length:
current_list.pop(0)
current_list.append(i)
miss_count += 1
flag = 'miss'
else:
hit_count += 1
flag = 'hit'
count += 1
temp_list = current_list.copy() # a copy list created to keep safe the actual content
temp_list.reverse()
# print('>>{0} {1} with:({2}){3}{4}{5}'.format('step', count, i, '.\n|', '|\n|'.join(temp_list), '|'))
# print(2 * ' ', flag, '\n')
display_line += "\n" + '>>{0} {1} with:({2}){3}{4}{5}'.format('step', count, i, '\n|', '|\n|'.join(temp_list),
'|') + f"\n{2 * ' '} {flag} \n"
miss_pecent = (miss_count / len(string_list)) * 100
hit_percent = (hit_count / len(string_list)) * 100
# print('\nMiss count:{}, Hit count:{}\n{}'.format(miss_count, hit_count, 16 * '__'))
display_line += '\n\nMiss count:{}, Hit count:{}\n{}'.format(miss_count, hit_count, 16 * '__')
# print('The miss percent is: {0:.2f} and\nthe hit percent is: {1:.2f}'.format(miss_pecent, hit_percent))
display_line += '\nThe miss percent is: {0:.2f} and\nthe hit percent is: {1:.2f}\n'.format(miss_pecent, hit_percent)
display_line += f'\n{14*" "}--by Tuhin Mitra'
return display_line |
#!/usr/bin/env python3
"""
Two words form a “metathesis pair” if you can transform one into the other by
swapping two letters; for example, “converse” and “conserve”. Write a
program that finds all of the metathesis pairs in the dictionary.
"""
|
class CannotExploit(Exception):
pass
class CannotExplore(Exception):
pass
class NoSuchShellcode(Exception):
pass
|
# Fonte: https://leetcode.com/problems/sqrtx/
# Autor: Bruno Harlis
# Data: 18/08/2021
"""
Dado um número inteiro x não negativo, calcule e retorne a
raiz quadrada de x .
Como o tipo de retorno é um inteiro, os dígitos decimais são
truncados e apenas a parte inteira do resultado é retornada.
Nota: Você não tem permissão para usar nenhuma função expoente
ou operador embutido, como pow(x, 0.5) ou x ** 0.5.
Exemplo 1
Entrada: x = 4
Saída: 2
Exemplo 2
Entrada: x = 8
Saída: 2
Explicação: A raiz quadrada de 8 é 2,82842 ... e, como a parte
decimal está truncada, 2 é retornado.
Tempo de execução: 32 ms, mais rápido do que 87,42 % das submissões.
Uso de memória: 14,1 MB, menos de 90,02 % dos envios.
"""
class Solution:
def mySqrt(self, x: int) -> int:
if x == 0 or x == 1:
return x
first = 0
last = x
while first <= last:
raiz = (first + last) // 2
if (raiz * raiz) > x:
last = raiz - 1
elif (raiz * raiz) <= x:
if (raiz + 1) * (raiz + 1) > x:
return raiz
else:
first = raiz + 1
|
class TankBoard:
"""
Represents the board tanks are located on.
Attributes:
_height: height of the board
_width: width of the board
_board: 2D Array
"""
def __init__(self, width, height):
self._height = height
self._width = width
self._board = [[None for _ in range(height)] for _ in range(width)] # Initialize the board
def setUpTile(self, x, y, tile):
"""
Sets the tile at a given position to tile object given
:param int x: X coordinate of the tile to be set
:param int y: Y coordinate of the tile to be set
:param Tile tile: Tile object to be set at the given location
:return: None
"""
self._board[x][y] = tile
def getTile(self, x, y):
"""
Returns tile at a given position
:param int x: X coordinate of the tile to be returned
:param int y: Y coordinate of the tile to be returned
:return: Tile at the given location
:rtype: Tile
"""
return self._board[x][y]
@property
def height(self):
return self._height
@height.setter
def height(self, value):
self._height = value
@property
def width(self):
return self._width
|
def soma(lista):
if len(lista) == 0:
return 0
else:
return lista[0] +soma(lista[1:])
print(soma([1,2,3,4,5,6,7]))
def fib(n):
if n==0:
return 0
elif n==1:
return 1
else:
return fib(n-1)+fib(n-2)
|
#
# from src/eulerian.c
#
# Eulerian to eulerianNumber
#
def eulerianNumber(n, k):
if k == 0:
return 1
if k < 0 or k >= n:
return 0
return (k+1) * eulerianNumber(n-1,k) + (n-k) * eulerianNumber(n-1,k-1)
|
# go_board.py
class GoBoard:
EMPTY = 0
WHITE = 1
BLACK = 2
def __init__( self, size = 0 ):
self.size = size
if size > 0:
self.matrix = [ [ self.EMPTY for j in range( size ) ] for i in range( size ) ]
def Serialize( self ):
return {
'size' : self.size,
'matrix' : self.matrix
}
def Deserialize( self, data ):
self.size = data[ 'size' ]
self.matrix = data[ 'matrix' ]
return self
def __eq__( self, board ):
for i in range( self.size ):
for j in range( self.size ):
if self.matrix[i][j] != board.matrix[i][j]:
return False
return True
def AllLocationsOfState( self, state ):
for i in range( self.size ):
for j in range( self.size ):
if self.matrix[i][j] == state:
yield ( i, j )
def AdjacentLocations( self, location ):
for offset in [ ( -1, 0 ), ( 1, 0 ), ( 0, -1 ), ( 0, 1 ) ]:
adjacent_location = ( location[0] + offset[0], location[1] + offset[1] )
if adjacent_location[0] < 0 or adjacent_location[1] < 0:
continue
if adjacent_location[0] >= self.size or adjacent_location[1] >= self.size:
continue
yield adjacent_location
def GetState( self, location ):
return self.matrix[ location[0] ][ location[1] ]
def SetState( self, location, state ):
self.matrix[ location[0] ][ location[1] ] = state
def AnalyzeGroups( self, for_who ):
location_list = [ location for location in self.AllLocationsOfState( for_who ) ]
group_list = []
while len( location_list ) > 0:
location = location_list[0]
group = { 'location_list' : [], 'liberties' : 0, 'liberty_location_list' : [] }
queue = [ location ]
while len( queue ) > 0:
location = queue.pop()
group[ 'location_list' ].append( location )
location_list.remove( location )
for adjacent_location in self.AdjacentLocations( location ):
if adjacent_location in group[ 'location_list' ]:
continue
if adjacent_location in queue:
continue
if self.GetState( adjacent_location ) == for_who:
queue.append( adjacent_location )
if for_who != self.EMPTY:
for location in group[ 'location_list' ]:
for adjacent_location in self.AdjacentLocations( location ):
if self.GetState( adjacent_location ) == self.EMPTY:
if not adjacent_location in group[ 'liberty_location_list' ]:
group[ 'liberties' ] += 1
group[ 'liberty_location_list' ].append( adjacent_location )
else:
del group[ 'liberties' ]
del group[ 'liberty_location_list' ]
group_list.append( group )
return group_list
def CalculateTerritory( self ):
territory = {
self.WHITE : 0,
self.BLACK : 0,
}
group_list = self.AnalyzeGroups( self.EMPTY )
for group in group_list:
location_list = group[ 'location_list' ]
touch_map = {
self.WHITE : set(),
self.BLACK : set(),
}
for location in location_list:
for adjacent_location in self.AdjacentLocations( location ):
state = self.GetState( adjacent_location )
if state != self.EMPTY:
touch_map[ state ].add( adjacent_location )
white_touch_count = len( touch_map[ self.WHITE ] )
black_touch_count = len( touch_map[ self.BLACK ] )
group[ 'owner' ] = None
if white_touch_count > 0 and black_touch_count == 0:
group[ 'owner' ] = self.WHITE
elif black_touch_count > 0 and white_touch_count == 0:
group[ 'owner' ] = self.BLACK
else:
pass # No one owns the territory.
owner = group[ 'owner' ]
if owner:
territory[ owner ] += len( location_list )
return territory, group_list
def Clone( self ):
clone = GoBoard( self.size )
for i in range( self.size ):
for j in range( self.size ):
clone.matrix[i][j] = self.matrix[i][j]
return clone
def __str__( self ):
board_string = ''
for i in range( self.size ):
for j in range( self.size ):
stone = self.matrix[i][j]
if stone == self.EMPTY:
stone = ' '
elif stone == self.WHITE:
stone = 'O'
elif stone == self.BLACK:
stone = '#'
else:
stone = '?'
board_string += '[' + stone + ']'
if j < self.size - 1:
board_string += '--'
board_string += ' %02d\n' % i
if i < self.size - 1:
board_string += ' | ' * self.size + '\n'
else:
for j in range( self.size ):
board_string += ' %02d ' % j
board_string += '\n'
return board_string |
"""
Default config for Workload Automation. DO NOT MODIFY this file. This file
gets copied to ~/.workload_automation/config.py on initial run of run_workloads.
Add your configuration to that file instead.
"""
# *** WARNING: ***
# Configuration listed in this file is NOT COMPLETE. This file sets the default
# configuration for WA and gives EXAMPLES of other configuration available. It
# is not supposed to be an exhaustive list.
# PLEASE REFER TO WA DOCUMENTATION FOR THE COMPLETE LIST OF AVAILABLE
# EXTENSIONS AND THEIR CONFIGURATION.
# This defines when the device will be rebooted during Workload Automation execution. #
# #
# Valid policies are: #
# never: The device will never be rebooted. #
# as_needed: The device will only be rebooted if the need arises (e.g. if it #
# becomes unresponsive #
# initial: The device will be rebooted when the execution first starts, just before executing #
# the first workload spec. #
# each_spec: The device will be rebooted before running a new workload spec. #
# each_iteration: The device will be rebooted before each new iteration. #
# #
reboot_policy = 'as_needed'
# Defines the order in which the agenda spec will be executed. At the moment, #
# the following execution orders are supported: #
# #
# by_iteration: The first iteration of each workload spec is executed one ofter the other, #
# so all workloads are executed before proceeding on to the second iteration. #
# This is the default if no order is explicitly specified. #
# If multiple sections were specified, this will also split them up, so that specs #
# in the same section are further apart in the execution order. #
# by_section: Same as "by_iteration", but runn specs from the same section one after the other #
# by_spec: All iterations of the first spec are executed before moving on to the next #
# spec. This may also be specified as ``"classic"``, as this was the way #
# workloads were executed in earlier versions of WA. #
# random: Randomisizes the order in which specs run. #
execution_order = 'by_iteration'
# This indicates when a job will be re-run.
# Possible values:
# OK: This iteration has completed and no errors have been detected
# PARTIAL: One or more instruments have failed (the iteration may still be running).
# FAILED: The workload itself has failed.
# ABORTED: The user interupted the workload
#
# If set to an empty list, a job will not be re-run ever.
retry_on_status = ['FAILED', 'PARTIAL']
# How many times a job will be re-run before giving up
max_retries = 3
# If WA should delete its files from the device after the run is completed
clean_up = False
####################################################################################################
######################################### Device Settings ##########################################
####################################################################################################
# Specify the device you want to run workload automation on. This must be a #
# string with the ID of the device. Common options are 'generic_android' and #
# 'generic_linux'. Run ``wa list devices`` to see all available options. #
# #
device = 'generic_android'
# Configuration options that will be passed onto the device. These are obviously device-specific, #
# so check the documentation for the particular device to find out which options and values are #
# valid. The settings listed below are common to all devices #
# #
device_config = dict(
# The name used by adb to identify the device. Use "adb devices" in bash to list
# the devices currently seen by adb.
#adb_name='10.109.173.2:5555',
# The directory on the device that WA will use to push files to
#working_directory='/sdcard/wa-working',
# This specifies the device's CPU cores. The order must match how they
# appear in cpufreq. The example below is for TC2.
# core_names = ['a7', 'a7', 'a7', 'a15', 'a15']
# Specifies cluster mapping for the device's cores.
# core_clusters = [0, 0, 0, 1, 1]
)
####################################################################################################
################################### Instrumentation Configuration ####################################
####################################################################################################
# This defines the additionnal instrumentation that will be enabled during workload execution, #
# which in turn determines what additional data (such as /proc/interrupts content or Streamline #
# traces) will be available in the results directory. #
# #
instrumentation = [
# Records the time it took to run the workload
'execution_time',
# Collects /proc/interrupts before and after execution and does a diff.
'interrupts',
# Collects the contents of/sys/devices/system/cpu before and after execution and does a diff.
'cpufreq',
# Gets energy usage from the workload form HWMON devices
# NOTE: the hardware needs to have the right sensors in order for this to work
#'hwmon',
# Run perf in the background during workload execution and then collect the results. perf is a
# standard Linux performance analysis tool.
#'perf',
# Collect Streamline traces during workload execution. Streamline is part of DS-5
#'streamline',
# Collects traces by interacting with Ftrace Linux kernel internal tracer
#'trace-cmd',
# Obtains the power consumption of the target device's core measured by National Instruments
# Data Acquisition(DAQ) device.
#'daq',
# Collects CCI counter data.
#'cci_pmu_logger',
# Collects FPS (Frames Per Second) and related metrics (such as jank) from
# the View of the workload (Note: only a single View per workload is
# supported at the moment, so this is mainly useful for games).
#'fps',
]
####################################################################################################
################################# Result Processors Configuration ##################################
####################################################################################################
# Specifies how results will be processed and presented. #
# #
result_processors = [
# Creates a status.txt that provides a summary status for the run
'status',
# Creates a results.txt file for each iteration that lists all collected metrics
# in "name = value (units)" format
'standard',
# Creates a results.csv that contains metrics for all iterations of all workloads
# in the .csv format.
'csv',
# Creates a summary.csv that contains summary metrics for all iterations of all
# all in the .csv format. Summary metrics are defined on per-worklod basis
# are typically things like overall scores. The contents of summary.csv are
# always a subset of the contents of results.csv (if it is generated).
#'summary_csv',
# Creates a results.csv that contains metrics for all iterations of all workloads
# in the JSON format
#'json',
# Write results to an sqlite3 database. By default, a new database will be
# generated for each run, however it is possible to specify a path to an
# existing DB file (see result processor configuration below), in which
# case results from multiple runs may be stored in the one file.
#'sqlite',
]
####################################################################################################
################################### Logging output Configuration ###################################
####################################################################################################
# Specify the format of logging messages. The format uses the old formatting syntax: #
# #
# http://docs.python.org/2/library/stdtypes.html#string-formatting-operations #
# #
# The attributes that can be used in formats are listested here: #
# #
# http://docs.python.org/2/library/logging.html#logrecord-attributes #
# #
logging = {
# Log file format
'file format': '%(asctime)s %(levelname)-8s %(name)s: %(message)s',
# Verbose console output format
'verbose format': '%(asctime)s %(levelname)-8s %(name)s: %(message)s',
# Regular console output format
'regular format': '%(levelname)-8s %(message)s',
# Colouring the console output
'colour_enabled': True,
}
####################################################################################################
#################################### Instruments Configuration #####################################
####################################################################################################
# Instrumentation Configuration is related to specific instrument's settings. Some of the #
# instrumentations require specific settings in order for them to work. These settings are #
# specified here. #
# Note that these settings only take effect if the corresponding instrument is
# enabled above.
####################################################################################################
######################################## perf configuration ########################################
# The hardware events such as instructions executed, cache-misses suffered, or branches
# mispredicted to be reported by perf. Events can be obtained from the device by tpying
# 'perf list'.
#perf_events = ['migrations', 'cs']
# The perf options which can be obtained from man page for perf-record
#perf_options = '-a -i'
####################################################################################################
####################################### hwmon configuration ########################################
# The kinds of sensors hwmon instrument will look for
#hwmon_sensors = ['energy', 'temp']
####################################################################################################
###################################### trace-cmd configuration #####################################
# trace-cmd events to be traced. The events can be found by rooting on the device then type
# 'trace-cmd list -e'
#trace_events = ['power*']
####################################################################################################
######################################### DAQ configuration ########################################
# The host address of the machine that runs the daq Server which the instrument communicates with
#daq_server_host = '10.1.17.56'
# The port number for daq Server in which daq instrument communicates with
#daq_server_port = 56788
# The values of resistors 1 and 2 (in Ohms) across which the voltages are measured
#daq_resistor_values = [0.002, 0.002]
####################################################################################################
################################### cci_pmu_logger configuration ###################################
# The events to be counted by PMU
# NOTE: The number of events must not exceed the number of counters available (which is 4 for CCI-400)
#cci_pmu_events = ['0x63', '0x83']
# The name of the events which will be used when reporting PMU counts
#cci_pmu_event_labels = ['event_0x63', 'event_0x83']
# The period (in jiffies) between counter reads
#cci_pmu_period = 15
####################################################################################################
################################### fps configuration ##############################################
# Data points below this FPS will dropped as not constituting "real" gameplay. The assumption
# being that while actually running, the FPS in the game will not drop below X frames per second,
# except on loading screens, menus, etc, which should not contribute to FPS calculation.
#fps_drop_threshold=5
# If set to True, this will keep the raw dumpsys output in the results directory (this is maily
# used for debugging). Note: frames.csv with collected frames data will always be generated
# regardless of this setting.
#fps_keep_raw=False
####################################################################################################
################################# Result Processor Configuration ###################################
####################################################################################################
# Specifies an alternative database to store results in. If the file does not
# exist, it will be created (the directiory of the file must exist however). If
# the file does exist, the results will be added to the existing data set (each
# run as a UUID, so results won't clash even if identical agendas were used).
# Note that in order for this to work, the version of the schema used to generate
# the DB file must match that of the schema used for the current run. Please
# see "What's new" secition in WA docs to check if the schema has changed in
# recent releases of WA.
#sqlite_database = '/work/results/myresults.sqlite'
# If the file specified by sqlite_database exists, setting this to True will
# cause that file to be overwritten rather than updated -- existing results in
# the file will be lost.
#sqlite_overwrite = False
# distribution: internal
####################################################################################################
#################################### Resource Getter configuration #################################
####################################################################################################
# The location on your system where /arm/scratch is mounted. Used by
# Scratch resource getter.
#scratch_mount_point = '/arm/scratch'
# end distribution
|
class QuoteInfo:
def __init__(self):
self.symbol = str()
self.exchange = str()
self.date = None
self.time = None
self.local_time = None
self.price = float()
self.volume = int()
self.changepct = float()
def __repr__(self):
d = []
d.append(self.symbol+',')
d.append(self.exchange+',')
d.append(str(self.time)+',')
d.append(str(self.price)+',')
d.append(str(self.volume)+',')
d.append(str(self.changepct)+'%,')
return str().join(d)
class QuoteArray:
def __init__(self):
self.symbol = str()
self.exchange = str()
self.date = None
self.time_arr = []
self.price_arr = []
self.volume_arr = []
def append(self,q):
self.symbol = q.symbol
self.exchange = q.exchange
self.date = q.date
self.price_arr.append(q.price)
self.time_arr.append(q.time)
self.volume_arr.append(q.volume)
class MarketDepth:
def __init__(self):
self.symbol = str()
self.exchange = str()
self.date = None
self.time = None
self.local_time = None
self.bid_q = []
self.bid_qty_q = []
self.ask_q = []
self.ask_qty_q = []
def __repr__(self):
d = []
d.append(self.symbol+',')
d.append(self.exchange+',')
d.append(str(self.time)+',')
for bid in self.bid_q:
d.append(str(bid)+',')
for bid_qty in self.bid_qty_q:
d.append(str(bid_qty)+',')
for ask in self.ask_q:
d.append(str(ask)+',')
for ask_qty in self.ask_qty_q:
d.append(str(ask_qty)+',')
return str().join(d)
class MarketDepthArray:
def __init__(self):
self.symbol = str()
self.exchange = str()
self.date = None
self.time_arr = []
self.bid_q_arr = []
self.bid_qty_q_arr = []
self.ask_q_arr = []
self.ask_qty_q_arr = []
|
class City:
def __init__(self, name, lat, lon):
self.name=name;
self.lat=lat;
self.lon=lon; |
class InvalidInterface(Exception):
pass
class EmptyInterface(InvalidInterface):
pass
class NotAFileError(Exception):
pass
class MissingFileError(Exception):
pass
|
#генератор заданий
shifts = [r'\Opcode{ror}', r'\Opcode{rol}']
logics = [[r'\Opcode{or}', r'\Opcode{not}'], [r'\Opcode{nor}'], [r'\Opcode{nand}'], [r'\Opcode{xor}', r'\Opcode{or}']]
adds = [r'\Opcode{add}', r'\Opcode{sub}']
jumps = [r'\Opcode{jz}', r'\Opcode{jo}']
for shift in shifts:
for logic in logics:
logicstr = ""
for basis in logic:
logicstr = logicstr + ', ' + basis
for add in adds:
for jump in jumps:
common = r'\Opcode{in}, \Opcode{out}, '
print(
"\\item {}{}{}, {}, {};".format(common, shift, logicstr, add, jump))
|
# 자연수 뒤집어 배열로 만들기
def solution(n):
answer = []
n = str(n) # string type casting
for i in reversed(range(len(n))):
answer.append(int(n[i]))
return answer
'''
테스트 1 〉 통과 (0.03ms, 10.3MB)
테스트 2 〉 통과 (0.03ms, 10.3MB)
테스트 3 〉 통과 (0.03ms, 10.4MB)
테스트 4 〉 통과 (0.02ms, 10.3MB)
테스트 5 〉 통과 (0.02ms, 10.3MB)
테스트 6 〉 통과 (0.03ms, 10.4MB)
테스트 7 〉 통과 (0.02ms, 10.3MB)
테스트 8 〉 통과 (0.03ms, 10.4MB)
테스트 9 〉 통과 (0.02ms, 10.4MB)
테스트 10 〉 통과 (0.02ms, 10.3MB)
테스트 11 〉 통과 (0.02ms, 10.3MB)
테스트 12 〉 통과 (0.03ms, 10.4MB)
테스트 13 〉 통과 (0.02ms, 10.3MB)
''' |
'''
Given a set of distinct integers, nums, return all possible subsets (the power set).
'''
class Solution(object):
def subsets(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
result = [[]]
for num in nums:
for j in range(len(result)):
result.append(result[j] + [num])
return result |
#! /usr/bin/env python3
a=[1, 4, 3, 53, 2]
print("list :", a)
print(a.pop()) #poping last element
print("pop element from list :", a)
a.append(21) #adding element in list
print("adding element in list :", a)
|
"""
This file contains ASCII art used in the Birthday Wisher program
"""
logo = """
,-----. ,--. ,--. ,--. ,--. ,--. ,--.,--. ,--.
| |) /_ `--',--.--.,-' '-.| ,---. ,-| | ,--,--.,--. ,--. | | | |`--' ,---. | ,---. ,---. ,--.--.
| .-. \,--.| .--''-. .-'| .-. |' .-. |' ,-. | \ ' / | |.'.| |,--.( .-' | .-. || .-. :| .--'
| '--' /| || | | | | | | |\ `-' |\ '-' | \ ' | ,'. || |.-' `)| | | |\ --.| |
`------' `--'`--' `--' `--' `--' `---' `--`--'.-' / '--' '--'`--'`----' `--' `--' `----'`--'
`---'
"""
|
# Distinct powers
def koliko_potenc():
sez = []
for a in range(2, 101):
for b in range(2, 101):
sez.append(a ** b)
return sez
def izloci_iste(sez):
podvojeni = 0
for i in range(len(sez)):
if sez[i] in sez[:i] :
podvojeni += 1
return len(sez) - podvojeni
print(izloci_iste(koliko_potenc())) |
# MIT 6.006 Introduction to Algorithms, Spring 2020
# see: https://www.youtube.com/watch?v=r4-cftqTcdI&list=PLUl4u3cNGP63EdVPNLG3ToM6LaEUuStEY&index=42
def max_score_prefix(in_list: list):
dp = [0] * (len(in_list) + 1)
in_list.insert(0, 1)
for i in range(1, len(in_list)):
dp[i - 1] = max(dp[i - 2], dp[i - 2] + in_list[i], dp[i - 3] + in_list[i] * in_list[i - 1])
print(dp[-2])
def max_score_suffix(in_list: list):
dp = [0] * (len(in_list) + 2)
in_list.append(1)
for i in reversed(range(len(in_list) - 1)):
dp[i] = max(dp[i + 1], dp[i + 1] + in_list[i], dp[i + 2] + in_list[i] * in_list[i + 1])
print(dp[0])
if __name__ == '__main__':
bowling_pins = [1, 1, 9, 9, 2, -5, -5]
max_score_suffix(bowling_pins)
|
def gen_perms_helper(array, current, subsets, index):
if index >= len(array):
subsets.append(current)
return
gen_perms_helper(array, current.copy(), subsets, index + 1)
current.append(array[index])
gen_perms_helper(array, current.copy(), subsets, index + 1)
def gen_perms(array):
subsets = []
gen_perms_helper(array, [], subsets, 0)
return subsets
print(gen_perms([1, 2, 3]))
|
class WireMap:
def __init__(self):
self.wire_map = {}
self.x = 0
self.y = 0
self.step_counter = 0
def move(self, steps, x_dir, y_dir):
while steps > 0:
self.x += x_dir
self.y += y_dir
self.step_counter += 1
self.wire_map[(self.x, self.y)] = self.step_counter
steps -= 1
def move_up(self, steps):
return self.move(steps, 0, -1)
def move_down(self, steps):
return self.move(steps, 0, 1)
def move_left(self, steps):
return self.move(steps, -1, 0)
def move_right(self, steps):
return self.move(steps, 1, 0)
def mark_wires(self, movements):
for command in movements:
direction = command[0]
s = int(command[1:])
if direction == "R":
self.move_right(s)
elif direction == "L":
self.move_left(s)
elif direction == "U":
self.move_up(s)
else:
self.move_down(s)
maps = [line.strip().split(",") for line in open("Day3.txt")]
map1 = WireMap()
map1.mark_wires(maps[0])
map2 = WireMap()
map2.mark_wires(maps[1])
keys1 = set(map1.wire_map.keys())
keys2 = set(map2.wire_map.keys())
keys_intersection = keys1 & keys2
result = []
for key in keys_intersection:
result.append(map1.wire_map[key] + map2.wire_map[key])
print(min(result))
|
a = float(input('qual o primeiro numero? '))
b = float(input('qual o segundo numero? '))
c = float(input('qual o terceiro numero? '))
if a > b and a > c:
print('o numero {} é o maior de todos '.format(a))
if b > a and b > c:
print('o numero {} é o maior de todos '.format(b))
if c > b and c > a:
print('o numero {} é o maior de todos '.format(c))
if a < b and a < c:
print('o numero {} é o menor de todos '.format(a))
if b < a and b < c:
print('o numero {} é o menor de todos '.format(b))
if c < b and c < a:
print('o numero {} é o menor de todos '.format(c))
if a < b and a > c or a > b and a < c:
print('o numero {} é o do meio '.format(a))
if b < a and b > c or b > a and b < c:
print('o numero {} é o do meio '.format(b))
if c < b and c > a or c > b and c < a:
print('o numero {} é o do meio '.format(c)) |
class Solution:
def longestPalindrome(self, s: str) -> str:
res = ""
for i in range(len(s)):
res = max(res,self.lp(s,i,i),self.lp(s,i,i+1),key=len)
return res
def lp(self,s,l,r):
while l >= 0 and r < len(s) and s[l] == s[r]:
l -= 1
r += 1
return s[l+1:r] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
class Priority(object):
def __init__(self, nlp, object):
self.object = object
self.matcher = object.matcher.Matcher(nlp.vocab)
self.matcher.add("Priority", None,
#important
[{'LOWER': 'important'}],
#crucial
[{'LOWER': 'crucial'}],
#key
[{'LOWER': 'key'}],
#essential
[{'LOWER': 'essential'}],
#critical
[{'LOWER': 'critical'}],
#fundamental
[{'LOWER': 'fundamental'}],
#key
[{'LOWER': 'key'}],
#major
[{'LOWER': 'major'}],
#vital
[{'LOWER': 'vital'}],
#first and foremost
[{'LOWER': 'first'},
{'LOWER': 'and'},
{'LOWER': 'foremost'}],
#(now )?remember (that)?
[{'LOWER': 'now', 'OP':'?'},
{'LOWER': 'remember'}],
#keep in mind (that)?
[{'LOWER': 'keep'},
{'LOWER': 'in'},
{'LOWER': 'mind'}],
#don\'t forget (that)?
[{'LOWER': 'do'},
{'LOWER': 'not'},
{'LOWER': 'forget'}],
#let\'s not forget
[{'LOWER': 'let'},
{'LOWER': 'us'},
{'LOWER': 'not'},
{'LOWER': 'forget'}],
#let\'s keep in mind
[{'LOWER': 'let'},
{'LOWER': 'us'},
{'LOWER': 'keep'},
{'LOWER': 'in'},
{'LOWER': 'mind'}],
#let\'s remember
[{'LOWER': 'let'},
{'LOWER': 'us'},
{'LOWER': 'remember'}],
)
def __call__(self, doc):
matches = self.matcher(doc)
for match_id, start, end in matches:
sents = self.object.tokens.Span(doc, start, end).sent
sent_start, sent_end = sents.start, sents.end
opinion = self.object.tokens.Span(doc, sent_start, sent_end, label = "PRIORITY")
doc._.opinion.append(opinion,)
return doc
|
#!/usr/bin/python3.5
# -*-coding:utf-8 -*
annee=int(input("Taper une année : "))
#yprint(type(annee))
if annee%4==0:
if annee%100==0:
if annee%400==0:
print(annee, "est une annee bissextile")
else:
print(annee, "n\'est pas une annee bissextile")
else:
print(annee, "est une annee bissextile")
else:
print(annee, "n\'est pas une annee bissextile")
|
class TileData(object):
def __init__(self, filename, z, x, y) -> None:
self.filename = filename
self.z = z
self.x = x
self.y = y
|
def f(x):
print(str(x))
i = input("書きたい事を書いてね")
f(str(i)) |
#! /usr/bin/env python
# coding: utf-8
class Singleton(object):
_instance = None
def __new__(cls, *args, **kwargs):
if cls._instance is not None:
return cls._instance
o = object.__new__(cls)
cls._instance = o
return o
@classmethod
def get_instance(cls):
return cls()
|
line_input = list(x for x in input())
num_list = list(int(x) for x in line_input if x.isnumeric())
words = list(x for x in line_input if not x.isnumeric())
take = list(num_list[x] for x in range(len(num_list)) if x % 2 == 0)
skip = list(num_list[y] for y in range(len(num_list)) if y % 2 == 1)
result = []
for i in range(len(take)):
result.append(words[:take[i]])
if skip[i] > 0:
words = words[take[i] + skip[i]:]
for j in range(len(result)):
for k in result[j]:
print(k, end="")
# /home/master/PycharmProjects/pythonProjects/FirstStepsInPython/Fundamentals/Exercise /Lists Advanced/
# More Exercises |
class Sampleinfor:
def __init__(self, samplename, datafile, fregion, hotspots=list()):
self.samplename = samplename
self.datafile = datafile
self.hotspots = hotspots
self.fregion = fregion |
a=1
print( not isinstance(a,str))
if __name__ == "__main__":
codes = [1,2,3,4,5,6,7,8,9,10]
offset = 0
limit = 3
total = len(codes)
while offset < total:
print("offet:{} limit:{} partial codes:{}".format(offset,limit, codes[offset:offset+limit]))
offset += limit |
def func():
nums = [1,3,4,4,3]
sum = 0
# Converting nums to a set which gives us
s = set(nums)
# s = [1,3,4]
for i in s:
if nums.count(i)==1:
sum +=i
print(sum)
func() |
def test_register_function(fresh_db):
@fresh_db.register_function
def reverse_string(s):
return "".join(reversed(list(s)))
result = fresh_db.execute('select reverse_string("hello")').fetchone()[0]
assert result == "olleh"
def test_register_function_multiple_arguments(fresh_db):
@fresh_db.register_function
def a_times_b_plus_c(a, b, c):
return a * b + c
result = fresh_db.execute("select a_times_b_plus_c(2, 3, 4)").fetchone()[0]
assert result == 10
|
#cAssume s is a string of lower case characters.
#Write a program that prints the longest substring of s in which the letters occur
# in alphabetical order. For example, if s = 'azcbobobegghakl', then your program should print
#Longest substring in alphabetical order is: beggh
#In the case of ties, print the first substring. For example, if s = 'abcbcd',
# then your program should print
#Longest substring in alphabetical order is: abc
s = 'abcdcd'
maxLen = 0
current = s[0]
long = s[0]
for x in range(len(s) - 1):
if s[x + 1] >= s[x]:
current += s[x + 1]
if len(current) > maxLen:
maxLen = len(current)
long = current
else:
current = s[x + 1]
x += 1
print ('Longest substring in alphabetical order is: ' + long)
|
#!/usr/bin/env python
"""A module with the database time test mixin."""
class DatabaseTimeTestMixin(object):
"""A mixin for testing time-related methods of database implementations."""
def testNowPositive(self):
timestamp = self.db.Now()
self.assertGreaterEqual(timestamp, 0)
def testNowMonotnic(self):
timestamp_1 = self.db.Now()
timestamp_2 = self.db.Now()
timestamp_3 = self.db.Now()
self.assertLessEqual(timestamp_1, timestamp_2)
self.assertLessEqual(timestamp_2, timestamp_3)
# This is just a mixin file and does not require a `__main__` entry.
|
class Solution:
def combinationSum2(self, candidates, target):
if not candidates or not target:
return []
result = []
cand = sorted(candidates)
N = len(candidates)
def dfs(remainder, curr_combo, start):
if remainder == 0:
result.append(curr_combo)
return
for i in range(start, N):
curr = cand[i]
if i > start and curr == cand[i-1]:
continue
if curr > remainder:
break
dfs(remainder - curr, curr_combo + [curr], i+1)
dfs(target, [], 0)
return result
|
template_file = 'gallery-template.md'
output_filename = 'gallery.md'
base_filename = 'http://alanpryorjr.com/visualizations/'
base_image_filename = '../visualizations/'
key_base = "bokeh_glyphs_{}"
glyph_names = ['annular_wedge', 'annulus','arc','asterisk','bezier', 'circle',
'circle_cross','circle_x','cross','diamond','diamond_cross','ellipse',
'hbar','image','image_rgba', 'image_url', 'inverted_triangle','line',
'oval','patch','patches','quad', 'quadratic', 'ray', 'square', 'square_cross',
'square_x', 'segment', 'text', 'triangle', 'vbar', 'wedge', 'x']
glyph_format_dict = dict(glyph_base_code_filename=base_filename+'bokeh/glyphs/',
glyph_base_html_filename=base_filename+'bokeh/figures/',
glyph_base_image_filename=base_image_filename+'bokeh/figures/')
filler_template = """
<a name="bokeh-glyphs-{glyph_name}"></a>
#### {Glyph_name} ([Interactive]({glyph_base_html_filename}{glyph_name})) [(code)]({glyph_base_code_filename}{glyph_name}/{glyph_name})

"""
format_dict = {}
for glyph_name in glyph_names:
key = key_base.format(glyph_name)
glyph_format_dict['Glyph_name'] = ' '.join(g.capitalize() for g in glyph_name.split('_'))
glyph_format_dict['glyph_name'] = glyph_name
glyph_format_dict['glyph_img_file'] = glyph_format_dict['glyph_base_image_filename'] + glyph_name + '.png'
format_dict[key] = filler_template.format(**glyph_format_dict)
with open(template_file,'r') as fi, open(output_filename, 'w') as fo:
fo.write(fi.read().format(**format_dict).replace('<!DOCTYPE html>','')) |
# crie um programa onde o usuario possa digitar varios valores (perguntar se ele deseja continuar);
# cadastre esses numeros numa lista ; caso o numero já exista ele nao será adicionado
# mostrar todos os valores unicos digitados em ordem crescente
valores = []
while True:
n = int(input("Digite um valor: "))
if n not in valores:
valores.append(n)
print("Valor adicionado com sucesso!")
else:
print("Valor já adicionado.")
resp = " "
while resp not in "SN":
resp = str(input("Deseja continuar? [S/N]")).upper().strip()[0]
if resp == "N":
break
print(valores)
valores.sort()
print(f"Os valores em ordem crescente: {valores}")
valores.sort(reverse=True)
print(f"Os valores em ordem decrescente: {valores}") |
# Exercício 93 do Curso em Vídeo de Python
jogador = dict()
gol = []
partidas = totalgols = 0
print('-=' * 30)
jogador['nome'] = str(input('Nome do jogador: '))
partidas = int(input(f'Quantas partidas {jogador["nome"]} jogou? '))
for p in range(partidas):
gol.append(int(input(f' Quantos gols na partida {p + 1}? ')))
totalgols += gol[p]
jogador['gols'] = gol.copy()
jogador['total'] = totalgols
print('-=' * 30)
print(jogador)
print('-=' * 30)
for k, v in jogador.items():
print(f'O campo {k} tem o valor {v}.')
print('-=' * 30)
print(f'O jogador {jogador["nome"]} jogou {partidas} partidas.')
for p in range(len(jogador['gols'])):
print(f' => Na partida {p + 1}, fez {jogador["gols"][p]} gols.')
print(f'Foi um total de {totalgols} gols.')
print()
|
class BaseReporter(object):
"""Delegate class to provider progress reporting for the resolver.
"""
def starting(self):
"""Called before the resolution actually starts.
"""
def starting_round(self, index):
"""Called before each round of resolution starts.
The index is zero-based.
"""
def ending_round(self, index, state):
"""Called before each round of resolution ends.
This is NOT called if the resolution ends at this round. Use `ending`
if you want to report finalization. The index is zero-based.
"""
def ending(self, state):
"""Called before the resolution ends successfully.
"""
def adding_requirement(self, requirement):
"""Called when the resolver adds a new requirement into the resolve criteria.
"""
def backtracking(self, candidate):
"""Called when the resolver rejects a candidate during backtracking.
"""
def pinning(self, candidate):
"""Called when adding a candidate to the potential solution.
"""
|
'''
2^100
'''
n = 1
result = 1
while n <= 10:
result = result * 2
print(result)
n = n + 1
print('The final is', result)
|
# function to create a list (marks) with the content of every line, every item corresponds to a line with the same index
def scan(vp):
marks = []
for line in vp:
# defining positions of last start and end of general information
text_start = 0
text_end = 0
for idx, mark in enumerate(marks):
if mark == "text_end":
text_end = idx
elif mark == "text_start":
text_start = idx
# finding line with the day the vp is made for
if "Ausfertigung" in line:
marks.append("new_day")
continue
# finding line with pressure on schedule
if " | / " in line:
marks.append("pressure")
continue
# finding blank lines with no information
if line == " |":
# to prevent index errors this part will only run when there are already at least 3 lines marked
if len(marks) > 2:
# if there are two blank lines (the current and the last one),
# the line before those two will contain the last line of the general information block
# text_start > text_end is true when the current line is in the general information block or the end of
# this block has not been found yet
if line == " |" and marks[-1] == "blank" and text_start > text_end:
marks[-2] = "text_end"
marks.append("blank")
continue
# when there are at least 3 lines marked
if len(marks) >= 3:
# two lines after the "pressure"-line the general information block will start
if marks[-2] == "pressure":
marks.append("text_start")
continue
# when the current line is in between text_start and text_end,
# (when text_end does not contain the right index of the end of the general information block,
# it will be smaller then text_start)
# the current line will contain general information
if text_start > text_end:
marks.append("text")
continue
# searching for single class information
if " | " in line:
# when the class name could not be found, the line takes its place
group = str(line)
for idx, mark in enumerate(marks):
if mark == "class_name":
# extracting class names
group = vp[idx][7:].replace(":", "")
marks.append(group)
continue
if " |" in line:
marks.append("class_name")
continue
# default (something is not right)
marks.append("blank")
# postprocessing
for idx, mark in enumerate(marks):
# replacing text_start and text_end marks with text marks
# every possible class name at this point:
# text; new_day; pressure; blank; class_name; [class name]
if mark == "text_start" or mark == "text_end":
marks[idx] = "text"
return marks
# return dictionary with intel for this day
def get_intel(day, groups):
intel = {
# new day info
"new_day": "",
# pressure on schedule
"pressure": "",
# info text
"text": [],
# list with important information for selected groups
"groups_intel": []
}
# scan plan
marks = scan(day)
# going through every day
for idx, line in enumerate(day):
# extract intel from already marked lines
if marks[idx] == "new_day":
intel["new_day"] = day[idx][8:]
elif marks[idx] == "pressure":
intel["pressure"] = day[idx][12:]
elif marks[idx] == "text":
intel["text"].append(day[idx][7:])
elif marks[idx] in groups:
intel["groups_intel"].append(day[idx][9:])
return intel
|
# __ __ _ __ ____ _ _
# \ \ / / | |/ _| _ \ | (_)
# \ \ /\ / /__ | | |_| |_) | ___ | |_ _ __
# \ \/ \/ / _ \| | _| _ < / _ \| | | '_ \
# \ /\ / (_) | | | | |_) | (_) | | | | | |
# \/ \/ \___/|_|_| |____/ \___/|_|_|_| |_|
VERSION = (0, 1, 3)
__version__ = '.'.join(map(str, VERSION))
|
def main():
print(add(1, 3))
def add(a: int, b: int):
return a + b
|
__version__ = "0.29.9"
__db_version__ = 7
__author__ = "Kristian Larsson, Lukas Garberg"
__author_email__ = "kll@tele2.net, lukas@spritelink.net"
__copyright__ = "Copyright 2011-2014, Kristian Larsson, Lukas Garberg"
__license__ = "MIT"
__status__ = "Development"
__url__ = "http://SpriteLink.github.com/NIPAP"
|
CSV_HEADERS = [
"externalID",
"status",
"internalID",
"rescueID",
"name",
"type",
"priBreed",
"secBreed",
"mix",
"sex",
"okwithdogs",
"okwithcats",
"okwithkids",
"declawed",
"housebroken",
"age",
"specialNeeds",
"altered",
"size",
"uptodate",
"color",
"pattern",
"coatLength",
"courtesy",
"dsc",
"found",
"foundDate",
"foundZipcode",
"photo1",
"photo2",
"photo3",
"photo4",
"videoUrl",
]
|
load(
"@io_bazel_rules_scala//scala:providers.bzl",
_DepsInfo = "DepsInfo",
_ScalacProvider = "ScalacProvider",
)
def _compute_strict_deps_mode(input_strict_deps_mode, dependency_mode):
if dependency_mode == "direct":
return "off"
if input_strict_deps_mode == "default":
if dependency_mode == "transitive":
return "error"
else:
return "off"
return input_strict_deps_mode
def _compute_dependency_tracking_method(
dependency_mode,
input_dependency_tracking_method):
if input_dependency_tracking_method == "default":
if dependency_mode == "direct":
return "high-level"
else:
return "ast"
return input_dependency_tracking_method
def _scala_toolchain_impl(ctx):
dependency_mode = ctx.attr.dependency_mode
strict_deps_mode = _compute_strict_deps_mode(
ctx.attr.strict_deps_mode,
dependency_mode,
)
unused_dependency_checker_mode = ctx.attr.unused_dependency_checker_mode
dependency_tracking_method = _compute_dependency_tracking_method(
dependency_mode,
ctx.attr.dependency_tracking_method,
)
# Final quality checks to possibly detect buggy code above
if dependency_mode not in ("direct", "plus-one", "transitive"):
fail("Internal error: invalid dependency_mode " + dependency_mode)
if strict_deps_mode not in ("off", "warn", "error"):
fail("Internal error: invalid strict_deps_mode " + strict_deps_mode)
if dependency_tracking_method not in ("ast", "high-level"):
fail("Internal error: invalid dependency_tracking_method " + dependency_tracking_method)
enable_diagnostics_report = ctx.attr.enable_diagnostics_report
toolchain = platform_common.ToolchainInfo(
scalacopts = ctx.attr.scalacopts,
dep_providers = ctx.attr.dep_providers,
dependency_mode = dependency_mode,
strict_deps_mode = strict_deps_mode,
unused_dependency_checker_mode = unused_dependency_checker_mode,
dependency_tracking_method = dependency_tracking_method,
enable_code_coverage_aspect = ctx.attr.enable_code_coverage_aspect,
scalac_jvm_flags = ctx.attr.scalac_jvm_flags,
scala_test_jvm_flags = ctx.attr.scala_test_jvm_flags,
enable_diagnostics_report = enable_diagnostics_report,
)
return [toolchain]
scala_toolchain = rule(
_scala_toolchain_impl,
attrs = {
"scalacopts": attr.string_list(),
"dep_providers": attr.label_list(
default = [
"@io_bazel_rules_scala//scala:scala_xml_provider",
"@io_bazel_rules_scala//scala:parser_combinators_provider",
"@io_bazel_rules_scala//scala:scala_compile_classpath_provider",
"@io_bazel_rules_scala//scala:scala_library_classpath_provider",
"@io_bazel_rules_scala//scala:scala_macro_classpath_provider",
],
providers = [_DepsInfo],
),
"dependency_mode": attr.string(
default = "direct",
values = ["direct", "plus-one", "transitive"],
),
"strict_deps_mode": attr.string(
default = "default",
values = ["off", "warn", "error", "default"],
),
"unused_dependency_checker_mode": attr.string(
default = "off",
values = ["off", "warn", "error"],
),
"dependency_tracking_method": attr.string(
default = "default",
values = ["ast", "high-level", "default"],
),
"enable_code_coverage_aspect": attr.string(
default = "off",
values = ["off", "on"],
),
"scalac_jvm_flags": attr.string_list(),
"scala_test_jvm_flags": attr.string_list(),
"enable_diagnostics_report": attr.bool(
doc = "Enable the output of structured diagnostics through the BEP",
),
},
fragments = ["java"],
)
|
# Automatically generated
# pylint: disable=all
get = [{'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 16, 'SizeInMiB': 32768, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 16}, 'MemoryInfo': {'SizeInMiB': 32768}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 4.0, 'DefaultVCpus': 48, 'SizeInMiB': 393216, 'TotalSizeInGB': 1800, 'Disks': [{'SizeInGB': 900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'z1d.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 4.0}, 'VCpuInfo': {'DefaultVCpus': 48}, 'MemoryInfo': {'SizeInMiB': 393216}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 1800, 'Disks': [{'SizeInGB': 900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 131072, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c6g.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 131072}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 131072, 'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c6gd.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 131072}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 262144, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'm6g.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 262144}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 262144, 'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'm6gd.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 262144}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 524288, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'r6g.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 524288}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 524288, 'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'r6gd.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 524288}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.4, 'DefaultVCpus': 72, 'SizeInMiB': 196608, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c5n.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.4}, 'VCpuInfo': {'DefaultVCpus': 72}, 'MemoryInfo': {'SizeInMiB': 196608}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 72, 'SizeInMiB': 524288, 'TotalSizeInGB': 15200, 'Disks': [{'SizeInGB': 1900, 'Count': 8, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'i3.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 72}, 'MemoryInfo': {'SizeInMiB': 524288}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 15200, 'Disks': [{'SizeInGB': 1900, 'Count': 8, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.6, 'DefaultVCpus': 96, 'SizeInMiB': 196608, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c5.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.6}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 196608}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.6, 'DefaultVCpus': 96, 'SizeInMiB': 196608, 'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c5d.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.6}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 196608}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 96, 'SizeInMiB': 393216, 'TotalSizeInGB': 1800, 'Disks': [{'SizeInGB': 900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'Gpus': [{'Name': 'T4', 'Manufacturer': 'NVIDIA', 'Count': 8, 'MemoryInfo': {'SizeInMiB': 16384}}], 'TotalGpuMemoryInMiB': 131072, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'g4dn.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 393216}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 1800, 'Disks': [{'SizeInGB': 900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'GpuInfo': {'Gpus': [{'Name': 'T4', 'Manufacturer': 'NVIDIA', 'Count': 8, 'MemoryInfo': {'SizeInMiB': 16384}}], 'TotalGpuMemoryInMiB': 131072}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 786432, 'TotalSizeInGB': 60000, 'Disks': [{'SizeInGB': 7500, 'Count': 8, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'i3en.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 786432}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 60000, 'Disks': [{'SizeInGB': 7500, 'Count': 8, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 393216, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'm5.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 393216}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 393216, 'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'm5d.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 393216}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 786432, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'r5.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 786432}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 786432, 'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'r5d.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 786432}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}] # noqa: E501
def get_instances_list() -> list:
'''Returns list EC2 instances with BareMetal = True .'''
# pylint: disable=all
return get
|
class StringWriter(TextWriter, IDisposable):
"""
Implements a System.IO.TextWriter for writing information to a string. The information is stored in an underlying System.Text.StringBuilder.
StringWriter()
StringWriter(formatProvider: IFormatProvider)
StringWriter(sb: StringBuilder)
StringWriter(sb: StringBuilder,formatProvider: IFormatProvider)
"""
def Close(self):
"""
Close(self: StringWriter)
Closes the current System.IO.StringWriter and the underlying stream.
"""
pass
def Dispose(self):
"""
Dispose(self: StringWriter,disposing: bool)
Releases the unmanaged resources used by the System.IO.StringWriter and optionally releases the
managed resources.
disposing: true to release both managed and unmanaged resources; false to release only unmanaged resources.
"""
pass
def FlushAsync(self):
""" FlushAsync(self: StringWriter) -> Task """
pass
def GetStringBuilder(self):
"""
GetStringBuilder(self: StringWriter) -> StringBuilder
Returns the underlying System.Text.StringBuilder.
Returns: The underlying StringBuilder.
"""
pass
def MemberwiseClone(self, *args):
"""
MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject
Creates a shallow copy of the current System.MarshalByRefObject object.
cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the
object to be assigned a new identity when it is marshaled across a remoting boundary. A value of
false is usually appropriate. true to copy the current System.MarshalByRefObject object's
identity to its clone,which will cause remoting client calls to be routed to the remote server
object.
Returns: A shallow copy of the current System.MarshalByRefObject object.
MemberwiseClone(self: object) -> object
Creates a shallow copy of the current System.Object.
Returns: A shallow copy of the current System.Object.
"""
pass
def ToString(self):
"""
ToString(self: StringWriter) -> str
Returns a string containing the characters written to the current StringWriter so far.
Returns: The string containing the characters written to the current StringWriter.
"""
pass
def Write(self, *__args):
"""
Write(self: StringWriter,value: str)
Writes a string to this instance of the StringWriter.
value: The string to write.
Write(self: StringWriter,buffer: Array[Char],index: int,count: int)
Writes the specified region of a character array to this instance of the StringWriter.
buffer: The character array to read data from.
index: The index at which to begin reading from buffer.
count: The maximum number of characters to write.
Write(self: StringWriter,value: Char)
Writes a character to this instance of the StringWriter.
value: The character to write.
"""
pass
def WriteAsync(self, *__args):
"""
WriteAsync(self: StringWriter,buffer: Array[Char],index: int,count: int) -> Task
WriteAsync(self: StringWriter,value: str) -> Task
WriteAsync(self: StringWriter,value: Char) -> Task
"""
pass
def WriteLineAsync(self, *__args):
"""
WriteLineAsync(self: StringWriter,buffer: Array[Char],index: int,count: int) -> Task
WriteLineAsync(self: StringWriter,value: str) -> Task
WriteLineAsync(self: StringWriter,value: Char) -> Task
"""
pass
def __enter__(self, *args):
"""
__enter__(self: IDisposable) -> object
Provides the implementation of __enter__ for objects which implement IDisposable.
"""
pass
def __exit__(self, *args):
"""
__exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object)
Provides the implementation of __exit__ for objects which implement IDisposable.
"""
pass
def __init__(self, *args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self, *__args):
"""
__new__(cls: type)
__new__(cls: type,formatProvider: IFormatProvider)
__new__(cls: type,sb: StringBuilder)
__new__(cls: type,sb: StringBuilder,formatProvider: IFormatProvider)
"""
pass
def __reduce_ex__(self, *args):
pass
def __str__(self, *args):
pass
Encoding = property(lambda self: object(), lambda self, v: None, lambda self: None)
"""Gets the System.Text.Encoding in which the output is written.
Get: Encoding(self: StringWriter) -> Encoding
"""
CoreNewLine = None
|
# 8. With two given lists [1,3,6,78,35,55] and [12,24,35,24,88,120,155],
# write a program to make a list whose elements are intersection of the above given lists.
listA = [1, 3, 6, 78, 35, 55]
listB = [12, 24, 35, 24, 88, 120, 155]
setA = set(listA)
setB = set(listB)
insersaction_of_AB = setA.intersection(setB) # A&B
listA_insersact_B = list(insersaction_of_AB)
print(listA_insersact_B)
|
#encoding:utf-8
subreddit = 'WikiLeaks'
t_channel = '@r_WikiLeaks'
def send_post(submission, r2t):
return r2t.send_simple(submission)
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 26 15:11:02 2020
@author: dhaar01
"""
n = int(input())
s = set()
for i in range(n):
s.add(input())
print(len(s))
|
"""
Given a string, find the rank of the string amongst its permutations sorted lexicographically.
Assume that no characters are repeated.
Example :
Input : 'acb'
Output : 2
The order permutations with letters 'a', 'c', and 'b' :
abc
acb
bac
bca
cab
cba
The answer might not fit in an integer, so return your answer % 1000003
"""
class Solution:
# Return the rank of given string from sorted permutation of that string
def rankPermutation(self, string):
n, rank, i = len(string), 1, 0
total_permutaion = self.fact(n) # Total number of permutation
while i < n:
total_permutaion = total_permutaion//(n-i)
count = self.smallComb(string, i, n-1)
rank = rank + count*total_permutaion
i+= 1
return rank % 1000003
# Counts the number of small element from string[start] in right
def smallComb(self, string, start, end):
count, i = 0, start+1
while i <= end:
if string[i] < string[start]:
count += 1
i+=1
return count
# Counts factorial of a number k
def fact(self, k):
f, i = 1, 1
while i <k+1:
f *= i
i+=1
return f
# Space : O(n) # Time: O(n*n)
def method_02(self, string):
arr, n = list(string), len(string)
sorted_arr = sorted(arr)
rank, i, j = 1, 0, 0
while i < n and j < len(sorted_arr):
if sorted_arr[i] != arr[j]:
rank += self.fact(len(sorted_arr)-1)
i+= 1
if sorted_arr[i] == arr[j]:
del sorted_arr[i]
j+= 1
i = 0
return rank%1000003
s = Solution()
print(s.rankPermutation("VIEW"))
print(s.method_02("VIEW"))
|
#
# PySNMP MIB module WWP-LEOS-PING-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/WWP-LEOS-PING-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:38:13 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsUnion, ConstraintsIntersection, ValueRangeConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ConstraintsIntersection", "ValueRangeConstraint", "ValueSizeConstraint")
AddressFamilyNumbers, = mibBuilder.importSymbols("IANA-ADDRESS-FAMILY-NUMBERS-MIB", "AddressFamilyNumbers")
InetAddressType, = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Integer32, NotificationType, ModuleIdentity, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, Counter32, iso, ObjectIdentity, TimeTicks, MibIdentifier, Counter64, Unsigned32, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "NotificationType", "ModuleIdentity", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "Counter32", "iso", "ObjectIdentity", "TimeTicks", "MibIdentifier", "Counter64", "Unsigned32", "IpAddress")
DisplayString, TruthValue, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TruthValue", "TextualConvention")
wwpModulesLeos, = mibBuilder.importSymbols("WWP-SMI", "wwpModulesLeos")
wwpLeosPingMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19))
wwpLeosPingMIB.setRevisions(('2012-04-02 00:00', '2001-07-03 12:57',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: wwpLeosPingMIB.setRevisionsDescriptions(('Add wwpLeosPingInetAddrType to support IP protocol version independent Inet addressing.', 'Initial Creation',))
if mibBuilder.loadTexts: wwpLeosPingMIB.setLastUpdated('201204020000Z')
if mibBuilder.loadTexts: wwpLeosPingMIB.setOrganization('Ciena, Inc')
if mibBuilder.loadTexts: wwpLeosPingMIB.setContactInfo(' Mib Meister 115 North Sullivan Road Spokane Valley, WA 99037 USA Phone: +1 509 242 9000 Email: support@ciena.com')
if mibBuilder.loadTexts: wwpLeosPingMIB.setDescription('The MIB for WWP Ping')
class PingFailCause(TextualConvention, Integer32):
description = 'The cause of the last ping failure.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17))
namedValues = NamedValues(("unknownHost", 1), ("socketError", 2), ("bindError", 3), ("connectError", 4), ("missingHost", 5), ("asyncError", 6), ("nonBlockError", 7), ("mcastError", 8), ("ttlError", 9), ("mcastTtlError", 10), ("outputError", 11), ("unreachableError", 12), ("isAlive", 13), ("txRx", 14), ("commandCompleted", 15), ("noStatus", 16), ("sendRecvMismatch", 17))
class PingState(TextualConvention, Integer32):
description = 'The state of the last ping request.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))
namedValues = NamedValues(("idle", 1), ("pinging", 2), ("pingComplete", 3), ("failed", 4))
wwpLeosPingMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1))
wwpLeosPingDelay = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPingDelay.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingDelay.setDescription('The object specifies the minimum amount of time to wait before sending the next packet in a sequence after receiving a response or declaring a timeout for a previous packet.')
wwpLeosPingPacketSize = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 1464)).clone(56)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPingPacketSize.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingPacketSize.setDescription('The size of the ping packets to send to the target.')
wwpLeosPingActivate = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 3), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPingActivate.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingActivate.setDescription("Ping can be activated by setting this object to true. Once the ping operation is completed, the object is set to 'false'. This object can be set to 'false' by the Management Station to stop the ping.")
wwpLeosPingAddrType = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 4), AddressFamilyNumbers()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPingAddrType.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingAddrType.setDescription('The address type associated with wwpLeosPingAddr. With the new wwpLeosPingInetAddrType being introduced to support RFC 4001, this OID will only be used when wwpLeosPingAddr is a host name or an IPv4 address. Otherwise, it will be set to other(0).')
wwpLeosPingAddr = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPingAddr.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingAddr.setDescription('The host name or IP address of the device to be pinged. wwpLeosPingAddrType determines if address is host name or IP address.')
wwpLeosPingPacketCount = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPingPacketCount.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingPacketCount.setDescription('Specifies the number of ICMP requests to send to the target.')
wwpLeosPingPacketTimeout = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPingPacketTimeout.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingPacketTimeout.setDescription("Specifies the amount of time to wait for a response to a transmitted packet before declaring the packet 'dropped'.")
wwpLeosPingSentPackets = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPingSentPackets.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingSentPackets.setDescription('The number of ping packets that have been sent to the target.')
wwpLeosPingReceivedPackets = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPingReceivedPackets.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingReceivedPackets.setDescription('The number of ping packets that have been received from the target.')
wwpLeosPingFailCause = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 10), PingFailCause()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPingFailCause.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingFailCause.setDescription('The result of the ping.')
wwpLeosPingState = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 11), PingState().clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPingState.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingState.setDescription('The state of the ping process. The possible states include pinging, idle, complete or failed.')
wwpLeosPingUntilStopped = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 12), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPingUntilStopped.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingUntilStopped.setDescription("Setting this object to true prior to wwpLeosPingActivate will cause the device to ping the specified host until wwpLeosPingActivate is set to false. The object cannot be modified once the ping is active. The object returns to 'false' once the ping is halted.")
wwpLeosPingInetAddrType = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 13), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPingInetAddrType.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingInetAddrType.setDescription('The Inet address type associated with wwpLeosPingAddr. When set to: ipv4 : wwpLeosPingAddr should be compliant with InetAddressIPv4 from RFC 4001 ipv6 : wwpLeosPingAddr should be compliant with InetAddressIPv6 from RFC 4001.')
mibBuilder.exportSymbols("WWP-LEOS-PING-MIB", wwpLeosPingMIB=wwpLeosPingMIB, wwpLeosPingDelay=wwpLeosPingDelay, wwpLeosPingPacketTimeout=wwpLeosPingPacketTimeout, wwpLeosPingPacketSize=wwpLeosPingPacketSize, wwpLeosPingFailCause=wwpLeosPingFailCause, wwpLeosPingSentPackets=wwpLeosPingSentPackets, PingState=PingState, wwpLeosPingPacketCount=wwpLeosPingPacketCount, wwpLeosPingState=wwpLeosPingState, wwpLeosPingMIBObjects=wwpLeosPingMIBObjects, wwpLeosPingInetAddrType=wwpLeosPingInetAddrType, PingFailCause=PingFailCause, wwpLeosPingReceivedPackets=wwpLeosPingReceivedPackets, PYSNMP_MODULE_ID=wwpLeosPingMIB, wwpLeosPingAddrType=wwpLeosPingAddrType, wwpLeosPingUntilStopped=wwpLeosPingUntilStopped, wwpLeosPingActivate=wwpLeosPingActivate, wwpLeosPingAddr=wwpLeosPingAddr)
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
'chromium_root': '<(DEPTH)/third_party/chromium/src',
},
'targets': [
{
'target_name': 'instaweb_util',
'type': '<(library)',
'dependencies': [
'<(DEPTH)/base/base.gyp:base',
],
'include_dirs': [
'<(DEPTH)',
],
'export_dependent_settings': [
'<(DEPTH)/base/base.gyp:base',
],
'sources': [
# TODO(mdsteele): Add sources here as we need them.
'instaweb/util/function.cc',
],
},
{
'target_name': 'spdy',
'type': '<(library)',
'dependencies': [
'<(DEPTH)/base/base.gyp:base',
'<(DEPTH)/third_party/zlib/zlib.gyp:zlib',
],
'export_dependent_settings': [
'<(DEPTH)/base/base.gyp:base',
],
'include_dirs': [
'<(DEPTH)',
'<(chromium_root)',
],
'sources': [
'<(chromium_root)/net/spdy/buffered_spdy_framer.cc',
'<(chromium_root)/net/spdy/spdy_frame_builder.cc',
'<(chromium_root)/net/spdy/spdy_frame_reader.cc',
'<(chromium_root)/net/spdy/spdy_framer.cc',
],
},
],
}
|
# AUTOGENERATED BY NBDEV! DO NOT EDIT!
__all__ = ["index", "modules", "custom_doc_links", "git_url"]
index = {"CoreClass": "00_core.ipynb",
"TSDataFrame": "01_TSDataFrame.ipynb"}
modules = ["core.py",
"TSDataFrame.py"]
doc_url = "https://alvaroof.github.io/nbdevtest/"
git_url = "https://github.com/alvaroof/nbdevtest/tree/master/"
def custom_doc_links(name): return None
|
# Time: O(n)
# Space: O(1)
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
def __repr__(self):
if self:
return "{} -> {}".format(self.val, self.next)
class Solution(object):
# @param a ListNode
# @return a ListNode
def swapPairs(self, head):
dummy = ListNode(0)
dummy.next = head
current = dummy
while current.next and current.next.next:
next_one, next_two, next_three = current.next, current.next.next, current.next.next.next
current.next = next_two
next_two.next = next_one
next_one.next = next_three
current = next_one
return dummy.next
|
class JobSummary(object):
"""
job summary data structure from job format in couchdb
"""
def __init__(self , jobStatus = None):
self.jobStatus = {
"success": 0,
"canceled": 0,
"transition": 0,
"queued": {"first": 0, "retry": 0},
"submitted": {"first": 0, "retry": 0},
"submitted": {"pending": 0, "running": 0},
"failure": {"create": 0, "submit": 0, "exception": 0},
"cooloff": {"create": 0, "submit": 0, "job": 0},
"paused": {"create": 0, "submit": 0, "job": 0},
}
if jobStatus != None:
self.addJobStatusInfo(jobStatus)
def addJobStatusInfo(self, jobStatus):
#TODO need to validate the structure.
for key, value in self.jobStatus.items():
if isinstance(value, int):
self.jobStatus[key] += jobStatus.get(key, 0)
elif isinstance(value, dict):
for secondKey, secondValue in value.items():
if key in jobStatus and secondKey in jobStatus[key]:
self.jobStatus[key][secondKey] += jobStatus[key][secondKey]
def addJobSummary(self, jobSummary):
self.addJobStatusInfo(jobSummary.jobStatus)
def getTotalJobs(self):
return (self.getSuccess() +
self.jobStatus["canceled"] +
self.jobStatus[ "transition"] +
self.getFailure() +
self.getCooloff() +
self.getPaused() +
self.getQueued() +
self.getRunning() +
self.getPending())
def getSuccess(self):
return self.jobStatus["success"]
def getFailure(self):
return (self.jobStatus["failure"]["create"] +
self.jobStatus["failure"]["submit"] +
self.jobStatus["failure"]["exception"])
def getCompleted(self):
return self.getSuccess() + self.getFailure()
def getSubmitted(self):
return (self.jobStatus["submitted"]["first"] +
self.jobStatus["submitted"]["retry"])
def getRunning(self):
return self.jobStatus["submitted"]["running"];
def getPending(self):
return self.jobStatus["submitted"]["pending"];
def getCooloff(self):
return (self.jobStatus["cooloff"]["create"] +
self.jobStatus["cooloff"]["submit"] +
self.jobStatus["cooloff"]["job"]);
def getPaused(self):
return (self.jobStatus["paused"]["create"] +
self.jobStatus["paused"]["submit"] +
self.jobStatus["paused"]["job"]);
def getQueued(self):
return (self.jobStatus["queued"]["first"] +
self.jobStatus["queued"]["retry"])
def getJSONStatus(self):
return {'sucess': self.getSuccess(),
'failure': self.getFailure(),
'cooloff': self.getCooloff(),
'running': self.getRunning(),
'queued': self.getQueued(),
'pending': self.getPending(),
'paused': self.getPaused(),
'created': self.getTotalJobs()
}
class ProgressSummary(object):
def __init__(self , progressReport = None):
self.progress = {
"totalLumis": 0,
"events": 0,
"size": 0
}
if progressReport != None:
self.addProgressReport(progressReport)
def addProgressReport(self, progressReport):
#TODO need to validate the structure.
for key in self.progress.keys():
self.progress[key] += progressReport.get(key, 0)
def getReport(self):
return self.progress
class TaskInfo(object):
def __init__(self, requestName, taskName, data):
self.requestName = requestName
self.taskName = taskName
self.taskType = data.get('jobtype', "N/A")
self.jobSummary = JobSummary(data.get('status', {}))
def addTaskInfo(self, taskInfo):
if not (self.requestName == taskInfo.requestName and
self.taskName == taskInfo.taskName):
msg = "%s: %s, %s: %s, %s: %s" % (self.requestName, taskInfo.requestName,
self.taskName, taskInfo.taskName,
self.taskType, taskInfo.taskType)
raise Exception("task doesn't match %s" % msg)
self.jobSummary.addJobSummary(taskInfo.jobSummary)
def getRequestName(self):
return self.requestName
def getTaskName(self):
return self.taskName
def getTaskType(self):
return self.taskType
def getJobSummary(self):
return self.jobSummary
def isTaskCompleted(self):
totalJobs = self.jobSummary.getTotalJobs()
completedJobs = self.jobSummary.getCompleted()
return (totalJobs != 0 and totalJobs == completedJobs)
class RequestInfo(object):
def __init__(self, data):
"""
data structure is
{'request_name1':
{'agent_url1': {'status'
}
"""
self.setData(data)
def setData(self, data):
#If RequestName doesn't exist, try legacy format (workflow)
if 'RequestName' in data:
self.requestName = data['RequestName']
else:
self.requestName = data['workflow']
self.data = data
self.jobSummaryByAgent = {}
self.tasks = {}
self.tasksByAgent = {}
self.jobSummary = JobSummary()
if 'AgentJobInfo' in data:
for agentUrl, agentRequestInfo in data['AgentJobInfo'].items():
self.jobSummary.addJobStatusInfo(agentRequestInfo.get('status', {}))
self.jobSummaryByAgent[agentUrl] = JobSummary(agentRequestInfo.get('status', {}))
if 'tasks' in agentRequestInfo:
self.tasksByAgent[agentUrl] = {}
for taskName, data in agentRequestInfo['tasks'].items():
if taskName not in self.tasks:
self.tasks[taskName] = TaskInfo(self.requestName, taskName, data)
else:
self.tasks[taskName].addTaskInfo(TaskInfo(self.requestName, taskName, data))
# only one task by one agent - don't need to combine
self.tasksByAgent[agentUrl][taskName] = TaskInfo(self.requestName, taskName, data)
def getJobSummary(self):
return self.jobSummary
def getJobSummaryByAgent(self, agentUrl = None):
if agentUrl:
return self.jobSummaryByAgent[agentUrl]
else:
return self.jobSummaryByAgent
def getTasksByAgent(self, agentUrl = None):
if agentUrl:
return self.tasksByAgent[agentUrl]
else:
return self.tasksByAgent
def getTasks(self):
return self.tasks
def getTotalTopLevelJobs(self):
return self.data.get("total_jobs", "N/A")
def getTotalTopLevelJobsInWMBS(self):
inWMBS = 0
if "AgentJobInfo" in self.data:
for agentRequestInfo in self.data["AgentJobInfo"].values():
inWMBS += agentRequestInfo['status'].get('inWMBS', 0)
return inWMBS
def getTotalInputLumis(self):
return self.data.get("input_lumis", "N/A")
def getTotalInputEvents(self):
return self.data.get("input_events", "N/A")
def getProgressSummaryByOutputDataset(self):
"""
check sampleResult.json for datastructure
"""
datasets = {};
if "AgentJobInfo" not in self.data:
#ther is no report yet (no agent has reported)
return datasets
for agentRequestInfo in self.data["AgentJobInfo"].values():
tasks = agentRequestInfo.get("tasks", [])
for task in tasks:
for site in tasks[task].get("sites", []):
for outputDS in tasks[task]["sites"][site].get("dataset", {}).keys():
#TODO: need update the record instead of replacing.
datasets.setdefault(outputDS, ProgressSummary())
datasets[outputDS].addProgressReport(tasks[task]["sites"][site]["dataset"][outputDS])
return datasets
def filterRequest(self, conditionFunc):
return conditionFunc(self.data)
def getRequestTransition(self):
return self.data["request_status"]
def getRequestStatus(self, timeFlag = False):
if timeFlag:
return self.data["request_status"][-1]
else:
return self.data["request_status"][-1]['status']
def isWorkflowFinished(self):
"""
check whether workflow is completed including LogCollect and CleanUp tasks
TODO: If the parent task all failed and next task are not created at all,
It can't detect complete status.
If the one of the task doesn't contain any jobs, it will return False
"""
if len(self.tasks) == 0:
return False
for taskInfo in self.tasks.values():
if not taskInfo.isTaskCompleted():
return False
return True
class RequestInfoCollection(object):
def __init__(self, data):
self.collection = {}
self.setData(data)
def setData(self, data):
for requestName, requestInfo in data.items():
self.collection[requestName] = RequestInfo(requestInfo)
def getData(self):
return self.collection
def filterRequests(self, conditionFunc):
filtered = {}
for name, reqInfo in self.collection.items():
if reqInfo.filterRequest(conditionFunc):
filtered[name] = reqInfo
return filtered
def getJSONData(self):
result = {}
for requestInfo in self.collection.values():
result[requestInfo.requestName] = {}
for agentUrl, jobSummary in requestInfo.getJobSummaryByAgent().items():
result[requestInfo.requestName][agentUrl]= jobSummary.getJSONStatus()
return result
|
N, K = map(int, input().split())
if K > 1:
diff = N-K
print(diff)
else:
print(0)
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
load("@bazel_skylib//lib:shell.bzl", "shell")
load("//antlir/bzl:oss_shim.bzl", "buck_genrule")
load("//antlir/bzl:shape.bzl", "shape")
load(":flavor_helpers.bzl", "flavor_helpers")
load(":gpt.shape.bzl", "gpt_partition_t", "gpt_t")
load(":image_utils.bzl", "image_utils")
def image_gpt_partition(package, is_esp = False, is_bios_boot = False, name = None):
return shape.new(
gpt_partition_t,
package = package,
is_esp = is_esp,
is_bios_boot = is_bios_boot,
name = name,
)
def image_gpt(
name,
table,
disk_guid = None,
visibility = None,
build_appliance = None):
visibility = visibility or []
build_appliance = build_appliance or flavor_helpers.default_flavor_build_appliance
gpt = shape.new(gpt_t, name = name, table = table, disk_guid = disk_guid)
buck_genrule(
name = name,
bash = image_utils.wrap_bash_build_in_common_boilerplate(
self_dependency = "//antlir/bzl:image_gpt",
bash = '''
$(exe //antlir:gpt) \
--output-path "$OUT" \
--gpt {opts_quoted} \
--build-appliance $(query_outputs {build_appliance}) \
'''.format(
opts_quoted = shell.quote(shape.do_not_cache_me_json(gpt)),
build_appliance = build_appliance,
),
rule_type = "image_gpt",
target_name = name,
),
cacheable = False,
executable = True,
visibility = visibility,
antlir_rule = "user-internal",
)
|
def part1(data):
return score(combat(*parse(data)))
def parse(data):
players = []
for player, cs in enumerate('\n'.join(data).split('\n\n'), 1):
cards = [int(c.strip()) for c in cs.split('\n')[1:]]
players.append(cards)
return players
def combat(a, b):
if not a:
return b
elif not b:
return a
_a = a.pop(0)
_b = b.pop(0)
if _a > _b:
return combat(a + [_a, _b], b)
return combat(a, b + [_b, _a])
def score(cards):
return sum([i * c for i, c in enumerate(reversed(cards), 1)])
def part2(data):
return score(recursive_combat(*parse(data))[1])
def key(a, b):
return ','.join([str(i) for i in a]) + '#' + ','.join([str(i) for i in b])
def recursive_combat(a, b):
seen = set()
while a and b:
hands = (tuple(a), tuple(b))
if hands in seen:
return 1, a
seen.add(hands)
_a = a.pop(0)
_b = b.pop(0)
if len(a) >= _a and len(b) >= _b:
winner, _ = recursive_combat(a[:_a].copy(), b[:_b].copy())
if winner == 1:
a += [_a, _b]
else:
b += [_b, _a]
else:
if _a > _b:
a += [_a, _b]
else:
b += [_b, _a]
if len(a) > len(b):
return 1, a
return 2, b
|
template = """
From: <{from_email}>
To: <{to_email}>
Subject: {subject}
{message}"""
print(template.format(
from_email = "a@example.com",
to_email = "b@example.com",
message = "Here's some mail for you. "
" Hope you enjoy the message!",
subject = "You have mail!"
))
|
"""
bitproto.grammars
~~~~~~~~~~~~~~~~~
Grammar rules.
"""
# fmt: off
r_optional_semicolon = """
optional_semicolon : ';'
|
"""
r_start = """
start : open_global_scope global_scope close_global_scope
"""
r_open_global_scope = """
open_global_scope :
"""
r_close_global_scope = """
close_global_scope :
"""
r_global_scope = """
global_scope : global_scope_definitions
"""
r_global_scope_definitions = """
global_scope_definitions : global_scope_definition_unit global_scope_definitions
| global_scope_definition_unit
|
"""
r_global_scope_definition_unit = """
global_scope_definition_unit : import
| option
| alias
| const
| enum
| message
| proto
| comment
| newline
"""
r_proto = """
proto : PROTO IDENTIFIER optional_semicolon
"""
r_comment = """
comment : COMMENT NEWLINE
"""
r_newline = """
newline : NEWLINE
"""
r_import = """
import : IMPORT STRING_LITERAL optional_semicolon
| IMPORT IDENTIFIER STRING_LITERAL optional_semicolon
"""
r_option = """
option : OPTION dotted_identifier '=' option_value optional_semicolon
"""
r_option_value = """
option_value : boolean_literal
| integer_literal
| string_literal
"""
r_alias = """
alias : TYPE IDENTIFIER '=' type optional_semicolon
| TYPEDEF type IDENTIFIER optional_semicolon
"""
r_const = """
const : CONST IDENTIFIER '=' const_value optional_semicolon
"""
r_const_value = """
const_value : boolean_literal
| string_literal
| constant_reference
| calculation_expression
"""
r_calculation_expression = """
calculation_expression : calculation_expression_plus
| calculation_expression_minus
| calculation_expression_times
| calculation_expression_divide
| calculation_expression_group
| integer_literal
| constant_reference_for_calculation
"""
r_calculation_expression_plus = """
calculation_expression_plus : calculation_expression PLUS calculation_expression
"""
r_calculation_expression_minus = """
calculation_expression_minus : calculation_expression MINUS calculation_expression
"""
r_calculation_expression_times = """
calculation_expression_times : calculation_expression TIMES calculation_expression
"""
r_calculation_expression_divide = """
calculation_expression_divide : calculation_expression DIVIDE calculation_expression
"""
r_calculation_expression_group = """
calculation_expression_group : '(' calculation_expression ')'
"""
r_constant_reference_for_calculation = """
constant_reference_for_calculation : constant_reference
"""
r_constant_reference = """
constant_reference : dotted_identifier
"""
r_type = """
type : single_type
| array_type
"""
r_single_type = """
single_type : base_type
| type_reference
"""
r_base_type = """
base_type : BOOL_TYPE
| UINT_TYPE
| INT_TYPE
| BYTE_TYPE
"""
r_type_reference = """
type_reference : dotted_identifier
"""
r_optional_extensible_flag = """
optional_extensible_flag : "'"
|
"""
r_array_type = """
array_type : single_type '[' array_capacity ']' optional_extensible_flag
"""
r_array_capacity = """
array_capacity : INT_LITERAL
| constant_reference_for_array_capacity
"""
r_constant_reference_for_array_capacity = """
constant_reference_for_array_capacity : constant_reference
"""
r_enum = """
enum : open_enum_scope enum_scope close_enum_scope
"""
r_open_enum_scope = """
open_enum_scope : ENUM IDENTIFIER ':' UINT_TYPE '{'
"""
r_enum_scope = """
enum_scope : enum_items
"""
r_close_enum_scope = """
close_enum_scope : '}'
"""
r_enum_items = """
enum_items : enum_item enum_items
| enum_item
|
"""
r_enum_item = """
enum_item : enum_field
| enum_item_unsupported
| comment
| newline
"""
r_enum_item_unsupported = """
enum_item_unsupported : alias
| const
| proto
| import
| option
| enum
| message
| message_field
"""
r_enum_field = """
enum_field : IDENTIFIER '=' integer_literal optional_semicolon
"""
r_message = """
message : open_message_scope message_scope close_message_scope
"""
r_open_message_scope = """
open_message_scope : MESSAGE IDENTIFIER optional_extensible_flag '{'
"""
r_close_message_scope = """
close_message_scope : '}'
"""
r_message_scope = """
message_scope : message_items
"""
r_message_items = """
message_items : message_item message_items
| message_item
|
"""
r_message_item = """
message_item : option
| enum
| message_field
| message
| message_item_unsupported
| comment
| newline
"""
r_message_item_unsupported = """
message_item_unsupported : alias
| const
| proto
| import
"""
r_message_field = """
message_field : type message_field_name '=' INT_LITERAL optional_semicolon
"""
# https://github.com/hit9/bitproto/issues/39
# Allow some keywords to be message names.
r_message_field_name = """
message_field_name : IDENTIFIER
| TYPE
"""
r_boolean_literal = """
boolean_literal : BOOL_LITERAL
"""
r_integer_literal = """
integer_literal : INT_LITERAL
| HEX_LITERAL
"""
r_string_literal = """
string_literal : STRING_LITERAL
"""
r_dotted_identifier = """
dotted_identifier : IDENTIFIER '.' dotted_identifier
| IDENTIFIER
"""
# fmt: on
|
# -*- coding: utf-8 -*-
"""
@author: krakowiakpawel9@gmail.com
@site: e-smartdata.org
"""
empty_list = list()
print(empty_list)
# %%
techs = ['python', 'java', 'c++', 'go', 'sql']
techs[0] = 'python 3.7'
print(techs)
# %%
numbers = [3, 5, 3, 5, 23]
print(numbers)
print(type(numbers))
# %%
mixed = ['python', 3.7, 4, True]
print(mixed)
# %%
empty = []
nested = [[1, 2, [3, 'sql']], ['python', 'java', 'go'], 3]
# %%
first = ['mleko', 'ziemniaki', 'makaron']
second = ['woda', 'jajka']
bucket = [first, second]
# %%
len(bucket)
# %%
techs = ['python', 'java', 'c++', 'go', 'sql']
techs += ['javascript']
print(techs)
# %%
print(dir(list))
|
##############################################################################
# Parte do livro Introdução à Programação com Python
# Autor: Nilo Ney Coutinho Menezes
# Editora Novatec (c) 2010-2020
# Primeira edição - Novembro/2010 - ISBN 978-85-7522-250-8
# Segunda edição - Junho/2014 - ISBN 978-85-7522-408-3
# Terceira Edição - Janeiro/2019 - ISBN 978-85-7522-718-3
#
# Site: https://python.nilo.pro.br/
#
# Arquivo: exercicios3\capitulo 10\exercicio-10-03.py
##############################################################################
class Televisão:
def __init__(self, min, max):
self.ligada = False
self.canal = min
self.cmin = min
self.cmax = max
def muda_canal_para_baixo(self):
if self.canal-1 >= self.cmin:
self.canal -= 1
else:
self.canal = self.cmax
def muda_canal_para_cima(self):
if self.canal+1 <= self.cmax:
self.canal += 1
else:
self.canal = self.cmin
tv = Televisão(2, 10)
tv.muda_canal_para_baixo()
print(tv.canal)
tv.muda_canal_para_cima()
print(tv.canal)
|
"""modelinfo_cfg - TEPPr configuration
This is mutable object.
* one teppr cfg points to only one dataset
* one teppr cfg can have many trainings and respective evaluations, reportings
and tracks which of the training is published
* The sequence of trainings can be parallel schedule or sequence schedule.
* In case it is sequence schedule, the linkage between the sequence provides the insight on hyper tunning
* Two differnt teppr cfg can be inter-related from the perspective of transfer learning i.e. base model that
that is being used as the starting point.
teppr workflow steps
# step-1
a) point to a AI Dataset
b) create training experiment
* a) and b) can be done to create the batch workfload
c) run training
* training can run for all the workfload
# step-2
a) create evaluation strategy
b) run the evaluation
#step-3
a) generate training and evaluation reports
b) publish the model to AI port
"""
## deprecated, and only for reference
# teppr cfg
tepprcfg = {
"created_on": None
,"modified_on": None
,"aids_dbname": None
,"aids_id": None
,"timestamp": None
,"log_dir": "logs/<dbname>"
# ,"dnnarch": None
# ,"framework_type": None
,"train_mode": "training"
,"test_mode": "inference"
,"allowed_file_type":['.txt','.csv','.yml','.json']
,"allowed_image_type":['.pdf','.png','.jpg','.jpeg','.gif']
,"allowed_video_type":['.mp4']
,"data": None
,"stats": None
,"summary": None
,"train":[]
,"evaluate": []
,"predict": []
,"publish": []
,"report": []
}
## ARCH CFG
traincfg = {
"MODE": "training"
,"DEVICE": "/gpu:0" ## /cpu:0 or /gpu:0
,"WEIGHTS": None
,"MODEL_INFO": "mask_rcnn-matterport-coco-1.yml"
,"LOAD_WEIGHTS":{
"BY_NAME": True
,"EXCLUDE": ['mrcnn_class_logits', 'mrcnn_bbox_fc', 'mrcnn_bbox', 'mrcnn_mask']
}
,"SCHEDULES":[
{
"EPOCHS": 40
,"LAYERS": "heads"
,"LEARNING_RATE": 0.001
}
,{
"EPOCHS": 120
,"LAYERS": "4+"
,"LEARNING_RATE": 0.001
}
,{
"EPOCHS": 160
,"LAYERS": "all"
,"LEARNING_RATE": 0.0001
}
]
,"CONFIG":{}
}
evaluatecfg = {
"SAVE_VIZ_AND_JSON": True
,"MODE": "inference"
,"DEVICE": "/gpu:0" ## /cpu:0 or /gpu:0
,"WEIGHTS": None
,"MODEL_INFO": "mask_rcnn-vidteq-tsdr-1.yml"
,"LOAD_WEIGHTS":{
"BY_NAME": True
,"EXCLUDE": ['mrcnn_class_logits', 'mrcnn_bbox_fc', 'mrcnn_bbox', 'mrcnn_mask']
}
,"CONFIG":{
"DETECTION_MIN_CONFIDENCE": 0.9
,"GPU_COUNT": 1
,"IMAGES_PER_GPU": 1
,"IMAGE_MIN_DIM": 720
,"IMAGE_MAX_DIM": 1280
}
}
predictcfg = {
"SAVE_VIZ_AND_JSON": True
,"MODE": "inference"
,"DEVICE": "/gpu:0" ## /cpu:0 or /gpu:0
,"WEIGHTS": None
,"MODEL_INFO": "mask_rcnn-vidteq-tsdr-1.yml"
,"LOAD_WEIGHTS":{
"BY_NAME": True
,"EXCLUDE": ['mrcnn_class_logits', 'mrcnn_bbox_fc', 'mrcnn_bbox', 'mrcnn_mask']
}
,"CONFIG":{
"DETECTION_MIN_CONFIDENCE": 0.9
,"GPU_COUNT": 1
,"IMAGES_PER_GPU": 1
,"IMAGE_MIN_DIM": 720
,"IMAGE_MAX_DIM": 1280
}
}
## publish_cfg is model info, whiv is linked to at least one item or more then one in the teppr cfg
publishcfg = {
"DNNARCH": None
,"FRAMEWORK_TYPE": None
,"ID": "tsdr"
,"PROBLEM_ID": "tsdr_segmentation"
,"ORG_NAME": "vidteq"
,"REL_NUM": None
,"CONFIG": {}
,"NAME": "tsdr"
,"DATASET": None
,"WEIGHTS_PATH": None
,"WEIGHTS": "ORG_NAME/ID/REL_NUM/DNNARCH"
,"PROTOTXT": None
,"NUM_CLASSES": None
,"CLASSINFO": []
,"CLASSES": []
,"DESCRIPTION": None
,"TIMESTAMP": None
} |
input = """
#maxint = 10.
total_score(S,X) :- t, S==9, X = #max{ Y :score(Y) }.
total_score1(S,X) :- t, S=9, X = #max{ Y :score(Y) }.
"""
output = """
{}
"""
|
# 2021 June 13 13:48 - 14:06
# 10101
# 01010
# Naturally, the sum would be 11111, which is 100000 - 1.
# The essence then is to know the length of its bits representation.
class Solution:
def findComplement(self, num: int) -> int:
cnt = 0
orig = num
while num != 0:
num >>= 1
cnt += 1
return (1 << cnt) - 1 - orig
# Or else, we can certainly flip it bit-by-bit. Speed-wise quite comparable to
# the last solution. Since there's no
class Solution1:
def findComplement(self, num: int) -> int:
ans = 0
bit = 0
while num != 0:
ans += ((num & 1) ^ 1) << bit
bit += 1
num >>= 1
return ans
# Now comes the most efficient solution to this problem!
# A trick to get the most significant bit:
# for any n,
# n |= n >> 1
# n |= n >> 2
# n |= n >> 4
# n |= n >> 8
# n |= n >> 16
# Started with a 32 bit int n, it's guranteed that we'd get all 1's with the same
# number of bits, after the above operations. And this is because:
# 1) The most significant bit (left bit) would always be a set bit.
# 2) Oring n and n >> 1 would give us n WITH 2 LEADING SET BITS.
# 3) Now when we bitwise or n with n >> 2, we get n with 4 set bits, and it keeps
# going.
# 4) Since we get at most 32 set bits in 32 bit int, the above 5 ops would
# gurantee that n would be all set by the end.
# 5) If we don't get to certain shifts, but already has an all-set n, then what
# the following operations do would be only oring n with 0, making the result
# stays at n.
# With this idea, we can solve it as follow:
class Solution2:
def findComplement(self, num: int) -> int:
orig = num
num |= num >> 1
num |= num >> 2
num |= num >> 4
num |= num >> 8
num |= num >> 16
return num - orig
if __name__ == "__main__":
print(Solution2().findComplement(5))
print(Solution2().findComplement(1)) |
# A way of notifying a number of observer classes depending on the state of observed class is changed.
class Subject:
"""What is observed"""
def __init__(self):
self._observers = []
def notify(self, modifier=None):
for observer in self._observers:
if modifier != observer:
observer.update(self)
def attach(self, observer):
if observer not in self._observers:
self._observers.append(observer)
def detach(self, observer):
try:
self._observers.remove(observer)
except ValueError:
pass
class Data(Subject):
def __init__(self, name=''):
Subject.__init__(self)
self.name = name
self._data = 0
@property
def data(self):
return self._data
@data.setter
def data(self, value):
self._data = value
self.notify()
class HexViewer:
def update(self, subject):
print('HexViewer: Subject {} has data 0x{:x}'.format(
subject.name, subject.data))
class OctalViewer:
def update(self, subject):
print('OctalViewer: Subject' + str(subject.name) + 'has data ' +
str(oct(subject.data)))
class DecimalViewer:
def update(self, subject):
print('DecimalViewer: Subject % s has data % d' %
(subject.name, subject.data))
if __name__ == "__main__":
obj1 = Data('Data 1')
obj2 = Data('Data 2')
view1 = DecimalViewer()
view2 = HexViewer()
view3 = OctalViewer()
obj1.attach(view1)
obj1.attach(view2)
obj1.attach(view3)
obj2.attach(view1)
obj2.attach(view2)
obj2.attach(view3)
obj1.data = 10
obj2.data = 15
|
# Maior e menor valores
'''Faça um programa que leia três números
e mostre qual é o MAIOR e qual é o MENOR'''
n1 = int(input('Digite um número: '))
n2 = int(input('Digite outro número: '))
n3 = int(input('Digite mais um número: '))
# Verificando qual número é menor
menor = n1
if n2 < n1 and n2 < n3:
menor = n2
if n3 < n1 and n3 < n2:
menor = n3
# Verificando qual número é o maior
maior = n1
if n2 > n1 and n2 > n3:
maior = n2
if n3 > n1 and n3 > n2:
maior = n3
print('O menor valor digitado foi \033[32m{}\033[m'.format(menor))
print('O maior valor digitado foi \033[32m{}'.format(maior))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.