content stringlengths 7 1.05M |
|---|
def counter(start, stop):
x=start
if start>stop:
return_string = "Counting down: "
while x != stop:
return_string += str(x)
if x!=stop:
return_string += ","
else:
return_string = "Counting up: "
while x !=stop:
return_string += str(x)
if x!=stop:
return_string += ","
return return_string
print(counter(1,10))
|
number = 9
print(type(number)) # print type of variable "number"
float_number = 9.0
print(float_number)
print(int(float_number))
|
""" This file contains temporary hard-coded holiday overrides that are not in Workalendar.
It is only intended to be temporary, and Workalendar should be updated to hold the
"correct" dates as the source of truth. """
OVERRIDES = {} |
# https://www.codechef.com/problems/FLOW005
for T in range(int(input())):
a,n,s = [100,50,10,5,2,1],int(input()),0
for i in a: s,n = s+(n//i),n%i
print(s) |
def fibonacci(num):
if num < 0:
print("Make it a positive number")
elif num == 1:
return 0
elif num == 2:
return 1
else:
return fibonacci(num-1)+fibonacci(num-2)
|
def close2zero(t):
try:
i,j = sorted(map(int, set(t.split())),
key = lambda x: abs(int(x)))[:2]
return max(i,j) if abs(i)==abs(j) else i
except:
return 0
|
styleDict = {
'clear': f'\033[0m',
'bold': f'\033[1m',
'dim': f'\033[2m',
'italic': f'\033[3m',
'underline': f'\033[4m',
'blinking': f'\033[5m',
'anarch': f'\033[38;2;255;106;51m',
'criminal': f'\033[38;2;65;105;255m',
'shaper': f'\033[38;2;50;205;50m',
'haas-bioroid': f'\033[38;2;138;43;226m',
'jinteki': f'\033[38;2;220;20;60m',
'nbn': f'\033[38;2;255;140;0m',
'weyland-consortium': f'\033[38;2;0;100;0m',
'adam': f'\033[38;2;173;167;73m',
'sunny-lebeau': f'\033[38;2;196;196;196m',
'apex': f'\033[38;2;231;57;56m',
'neutral-runner': '',
'neutral-corp': ''
}
def style(string, styles):
return ''.join(map(lambda x: styleDict[x],styles)) + string + styleDict['clear']
|
DB_HOST = 'localhost'
DB_PORT = 5432
DB_USER = 'platappform'
DB_PASSWORD = 'development'
DB_DATABASE = 'platappform_dev'
DB_POOL_MIN_SIZE = 5 #default
DB_POOL_MAX_SIZE = 10 #default
|
# Определение приложений
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.admindocs',
'django.contrib.redirects',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.sites',
'django.contrib.staticfiles',
'django.contrib.syndication',
'crispy_forms',
'authentication',
'authentication.account',
'authentication.socialaccount',
'welcome',
'project.apps.curiosity',
]
|
"""
200. Number of Islands
Given a 2d grid map of '1's (land) and '0's (water), count the number of islands. An island is surrounded by water and is formed by connecting adjacent lands horizontally or vertically. You may assume all four edges of the grid are all surrounded by water.
Example 1:
Input:
11110
11010
11000
00000
Output: 1
Example 2:
Input:
11000
11000
00100
00011
Output: 3
"""
class Solution:
def numIslands(self, grid: List[List[str]]) -> int:
res = 0
for r in range(len(grid)):
for c in range(len(grid[0])):
if grid[r][c] == "1":
self.dfs(grid, r, c)
res += 1
return res
def dfs(self, grid, i, j):
dirs = [[-1, 0], [0, 1], [0, -1], [1, 0]]
grid[i][j] = "0"
for dir in dirs:
nr, nc = i + dir[0], j + dir[1]
if nr >= 0 and nc >= 0 and nr < len(grid) and nc < len(grid[0]):
if grid[nr][nc] == "1":
self.dfs(grid, nr, nc)
|
###############################################################################################
# 前序遍历一次即可,只不过需要注意要在递归左儿子前记录下自己的右儿子,不然会被覆盖
###########
# 时间复杂度:O(n),每个节点遍历一次
# 空间复杂度:O(n),最坏情况可能是一根链
###############################################################################################
class Solution:
def flatten(self, root: TreeNode) -> None:
last = 0
def dfs(u, level):
nonlocal last
if u:
if last: # 如果不是第一个节点
last.left = None # 上一个的左置None
last.right = u # 上一个的右就是自己
last = u # 将上一个更新为自己,以便下一个使用
right = u.right # 先提前记录当前节点的右,以防被覆盖
dfs(u.left, level+1)
dfs(right, level+1)
dfs(root, 1) |
class Component:
def __init__(self, position):
self.position = position
self.dirty = True
self.focus = False
def update(self, screen):
if self.dirty:
screen.blit(self.surface, self.position)
self.dirty = False
|
#
# THIS CODE AND INFORMATION ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS
# FOR A PARTICULAR PURPOSE. THIS CODE AND INFORMATION ARE NOT SUPPORTED BY XEBIALABS.
#
# get the configuration properties from the UI
params = {
'url': configuration.url,
'username': configuration.username,
'password': configuration.password,
'proxyHost': configuration.proxyHost,
'proxyPort': configuration.proxyPort,
'proxyUsername': configuration.proxyUsername,
'proxyPassword': configuration.proxyPassword
}
# make a http request call to the server - access the user api in this test
response = HttpRequest(params).get('/api/v4/user', contentType='application/json')
# check response status code, if different than 200 exit with error code
if response.status != 200:
raise Exception(
"Failed to connect to Blazemeter Server. Status: %s" % response.status
) |
cities_BL_coordinates = {'Aleksandrów Łódzki': (51.8196533, 19.3035375),
'Augustów': (53.8437331, 22.9802357),
'Bełchatów': (51.3650175, 19.3691833),
'Bełżyce': (51.175063, 22.2749995),
'Białogard': (54.0064367, 15.9870974),
'Białopole': (50.9847276, 23.7331781),
'Białystok': (53.127505049999996, 23.147050870161664),
'Bielsko-Biała': (49.81207845, 19.029198802013944),
'Bieruń': (50.0931922, 19.0918262),
'Biłgoraj': (50.5424159, 22.7209754),
'Bochnia': (49.9693563, 20.430541),
'Bolesławiec': (51.2630333, 15.5651049),
'Braniewo': (54.3824857, 19.8275588),
'Brzeg': (50.8614882, 17.470497),
'Brzesko': (49.9678396, 20.6068496),
'Brzeziny': (51.8004949, 19.7517694),
'Busko-Zdrój': (50.46759, 20.71919),
'Bydgoszcz': (53.12974625, 18.029369658534854),
'Bytom': (50.3657432, 18.87153251609449),
'Chełm': (51.13558825, 23.493936945891463),
'Chełmno': (53.3493915, 18.4235473),
'Chodzież': (52.9886824, 16.9159506),
'Chorzów': (50.288556299999996, 18.97044253093511),
'Chrzanów': (50.1411926, 19.4028502),
'Ciechanów': (52.8819798, 20.6191479),
'Cieszyn': (49.7488623, 18.6334308),
'Cieszyna': (49.8652113, 21.615579),
'Cybinka': (52.1940293, 14.7968841),
'Czarnków': (52.9050335, 16.5639572),
'Częstochowa': (50.8089997, 19.1244089281233),
'Dąbrowa Górnicza': (50.36913425, 19.289871101330505),
'Dąbrowa Tarnowska': (50.1743495, 20.9850707),
'Dębica': (50.0514601, 21.4104804),
'Elbląg': (54.1988997, 19.44108568736702),
'Gdańsk': (54.36143405, 18.62821847557274),
'Gdynia': (54.50383255, 18.462806155884614),
'Giżycko': (54.0400255, 21.7715569),
'Gliwice': (50.30113145, 18.662347227971978),
'Gniezno': (52.5359736, 17.595889),
'Gorlice': (49.65829, 21.1593832),
'Gorzów Wielkopolski': (52.73057555, 15.210755319093852),
'Grodzisk Wielkopolski': (52.2263198, 16.3628106),
'Grudziądz': (53.47251195, 18.761893676880046),
'Gryfino': (53.2535889, 14.4924243),
'Grójec': (51.8655126, 20.8675017),
'Góra': (51.6681499, 16.5393277),
'Głowno': (51.9660873, 19.7135665),
'Głubczyce': (50.2000941, 17.8293199),
'Hrubieszów': (50.8047363, 23.8873816),
'Iława': (53.5976095, 19.5612175),
'Janów Lubelski': (50.7065895, 22.4110663),
'Jarosław': (50.018611, 22.679722),
'Jasło': (49.7470465, 21.4743139),
'Jelenia Góra': (50.9031028, 15.7344306),
'Kalisz': (51.74736505, 18.079590087700034),
'Kartuzy': (54.3342281, 18.1973639),
'Katowice': (50.2137321, 19.00588775660632),
'Kielce': (50.85403585, 20.609914352101452),
'Konin': (52.25590205, 18.267866211748718),
'Koszalin': (54.20727105, 16.217530768271565),
'Kozienice': (51.5849678, 21.549883),
'Kołobrzeg': (54.1759614, 15.5764209),
'Kościan': (52.086325, 16.6449995),
'Kraków': (50.0469432, 19.997153435836697),
'Krapkowice': (50.4747413, 17.9675262),
'Kraśnik': (50.9249236, 22.229206),
'Krosno': (49.68875655, 21.753404714509344),
'Krotoszyn': (51.695926, 17.4374158),
'Kutno': (52.2317559, 19.3570182),
'Kwidzyn': (53.7335842, 18.9319222),
'Kędzierzyn Koźle': (50.3448836, 18.2109594),
'Kędzierzyn-Koźle': (50.3448836, 18.2109594),
'Kętrzyn': (54.0764176, 21.3752627),
'Legnica': (51.2081617, 16.1603187),
'Lesko': (49.4689014, 22.3303816),
'Leszno': (51.8436068, 16.58042182076424),
'Leżajsk': (50.2621687, 22.418892),
'Limanowa': (49.7061713, 20.4217266),
'Lubaczów': (50.1574473, 23.1225859),
'Lubań': (51.118056, 15.289167),
'Lubin': (51.3974465, 16.2051275),
'Lublin': (51.218194499999996, 22.554677565192677),
'Lubliniec': (50.669016, 18.6825574),
'Lubsko': (51.7881902, 14.9721266),
'Mielec': (50.2892506, 21.4243685),
'Mińsk Mazowiecki': (52.17959585, 21.56945595388331),
'Myszków': (50.5736481, 19.3219413),
'Mysłowice': (50.2096333, 19.13277507211892),
'Myślenice': (49.834608, 19.9384283),
'Mława': (53.1116182, 20.3831732),
'Nakło nad Notecią': (53.1386896, 17.5998891),
'Niedabyl': (51.6635408, 21.0813158),
'Nisko': (50.5198985, 22.139886),
'Nowa Sól': (51.8019595, 15.7175608),
'Nowe Miasto Lubawskie': (53.4255933, 19.5936277),
'Nowe Miasto nad Pilicą': (51.616692, 20.5796171),
'Nowy Dwór Gdański': (54.2146299, 19.1150846),
'Nowy Sącz': (49.61030395, 20.71595521113809),
'Nysa': (50.4738029, 17.3324733),
'Olecko': (54.0398024, 22.4934246),
'Olesno': (50.8766575, 18.4219452),
'Oleśnica': (51.2102428, 17.38294699262042),
'Olsztyn': (53.7767239, 20.477781292210345),
'Opoczno': (51.3782439, 20.2931754),
'Opole': (50.678792900000005, 17.929884436033525),
'Ostrowiec Świętokrzyski': (50.9398768, 21.38743),
'Ostróda': (53.7028533, 19.9623028),
'Ostrów Wielkopolski': (51.6495023, 17.8167085),
'Oświęcim': (50.0382443, 19.2214053),
'Pabianice': (51.6639859, 19.3535024),
'Piotrków Trybunalski': (51.412841400000005, 19.688801329018943),
'Pisz': (53.6294943, 21.8098191),
'Piła': (53.1493864, 16.7390964),
'Pińczów': (50.5201929, 20.5256241),
'Poddębice': (51.8931973, 18.9517348),
'Police': (53.5486435, 14.5657399),
'Poznań': (52.40063215, 16.91978694287606),
'Prudnik': (50.3217212, 17.5801041),
'Przemyśl': (49.78355525, 22.78851130307014),
'Przysucha': (51.3585043, 20.6285468),
'Puławy': (51.4171713, 21.9720582),
'Płock': (52.535347099999996, 19.713630180599928),
'Racibórz': (50.0917343, 18.2196766),
'Radom': (51.417157599999996, 21.160945939663797),
'Radomsko': (51.0674769, 19.4446109),
'Radzanów': (52.94307, 20.09258),
'Rawa Mazowiecka': (51.7624929, 20.2519293),
'Rawiczów': (51.92519, 20.20046),
'Rozprza': (51.3016672, 19.6447813),
'Ruda Śląska': (50.270303, 18.864375646334572),
'Rybnik': (50.1105691, 18.553187366087336),
'Rzeszów': (50.00715125, 22.009911525182105),
'Sanok': (49.5609053, 22.207028),
'Sieradz': (51.5942475, 18.7378774),
'Sierpc': (52.8529055, 19.6675433),
'Skarżysko': (51.1138821, 20.857883),
'Skierniewice': (51.95805445, 20.144870111023707),
'Sobolewo': (53.110278, 23.270278),
'Sosnowiec': (50.271229500000004, 19.215561953947084),
'Stalowa Wola': (50.5653105, 22.0644075),
'Starachowice': (51.0518865, 21.0761377),
'Stargard': (53.3379935, 15.0350493),
'Staszów': (50.5612276, 21.1677849),
'Strachowice': (51.1252778, 16.2597222),
'Stryków': (51.9009447, 19.6036969),
'Strzelce Krajeńskie': (52.8785255, 15.5325925),
'Strzelce Opolskie': (50.5109033, 18.3004547),
'Strzelin': (50.78092, 17.0683947),
'Strzyżów': (49.8702087, 21.7856291),
'Sucha': (51.029167, 15.301944),
'Sulęcin': (52.4413768, 15.1161501),
'Szczecin': (53.42968095, 14.592912670292268),
'Szczyrk': (49.7165976, 19.0254613),
'Szczytno': (53.565489, 20.991589),
'Słubice': (52.3412273, 14.549452),
'Tarnobrzeg': (50.58372, 21.711638132824728),
'Tarnowskie Góry': (50.4445194, 18.8554757),
'Tarnów': (50.025988299999995, 20.96405842696229),
'Tczew': (54.0869532, 18.8000293),
'Tolkmicko': (54.3202314, 19.5269874),
'Tomaszów Lubelski': (50.4573288, 23.4170362),
'Tomaszów Mazowiecki': (51.5316918, 20.0084195),
'Toruń': (53.0145361, 18.596583144651557),
'Turek': (52.01595, 18.50191),
'Tychy': (50.126498049999995, 19.006784585824853),
'Wadowice': (49.8834484, 19.4925345),
'Warszawa': (52.2337172, 21.07141112883227),
'Wałbrzych': (50.785221, 16.284640336127822),
'Wałcz': (53.269745, 16.4670655),
'Wejherowo': (54.6026348, 18.247934),
'Wieliczka': (49.9823773, 20.0602114),
'Wolsztyn': (52.11725, 16.1126622),
'Wołów': (51.3363841, 16.6486903),
'Wrocław': (51.1263106, 16.97819633051261),
'Wschowa': (51.8038354, 16.3172771),
'Włocławek': (52.6655636, 19.096129748054388),
'Zambrów': (52.98552, 22.24117),
'Zamość': (50.721273, 23.259588703837892),
'Zawidz': (52.8294792, 19.843663),
'Zawiercie': (50.4844382, 19.433394),
'Zbiczno': (53.3394871, 19.3777504),
'Zgierz': (51.8559434, 19.4056443),
'Zielona Góra': (51.92766315, 15.549005417916433),
'Łańcut': (50.0690725, 22.2320924),
'Łomża': (53.18243065, 22.052183817808846),
'Łowicz': (52.107683, 19.9447873),
'Łuków': (51.93008, 22.3790669),
'Łódź': (51.7687323, 19.4569911),
'Śrem': (51.6428477, 15.9255544),
'Świdnica': (50.8424835, 16.4870549),
'Świdnik': (51.219722, 22.7),
'Świdwin': (53.7747044, 15.7773514),
'Świebodzice': (50.859573, 16.3196056),
'Świebodzin': (52.2498357, 15.5324973),
'Żary': (51.6385162, 15.1422487),
'Żegań': (51.617222, 15.314722),
'Żnin': (52.848462, 17.7218791),
'Żychlin': (52.2440088, 19.6260932),
'Żyrardów': (52.0543308, 20.4435009)}
countries_BL_coordinates = {'Norway': (60.5000209, 9.0999715), 'Germany': (51.0834196, 10.4234469), 'Brazil': (-10.3333333, -53.2), 'Austria': (47.2000338, 13.199959), 'Italy': (42.6384261, 12.674297), 'Czechia': (49.8167003, 15.4749544), 'Spain': (39.3262345, -4.8380649), 'France': (46.603354, 1.8883335), 'United Kingdom': (54.7023545, -3.2765753), 'Ukraine': (49.4871968, 31.2718321), 'Kenya': (1.4419683, 38.4313975), 'Israel': (31.5313113, 34.8667654), 'Ireland': (52.865196, -7.9794599), 'Switzerland': (46.7985624, 8.2319736), 'Denmark': (55.670249, 10.3333283), 'Belgium': (50.6402809, 4.6667145), 'Georgia': (41.716667, 44.783333), 'United States': (39.7837304, -100.4458825), 'Netherlands': (52.5001698, 5.7480821)}
|
title = ' MULTIPLICATION TABLES '
line = len(title) * '-'
print('{}\n{}'.format(title, line))
for x in range(1,13):
for i in range(1,13):
f = i
i = i * x
print(' {} x {} = {}'.format(x,f,i))
print('')
|
def list_view(tree):
print("Listing\n")
movies = tree.list_by_name()
if movies:
print("Every movie in tree:")
for movie in movies:
print(movie)
else:
print("Empty tree.")
input("ENTER to continue")
|
class GameObject:
def __init__(self, id, bounds_offsets, width, height, pos_x, pos_y, speed):
self.id = id
self.bounds_offsets = bounds_offsets
self.bounds = []
for offset_array in bounds_offsets:
self.bounds.append({})
self.width = width # In meters
self.height = height # In meters
self.pos_x = pos_x # In meters
self.pos_y = pos_y # In meters
self.speed = speed # In m/s
#self.speed = speed # In %/s
# Getters
def getId(self):
return self.id
def getBounds(self):
return self.bounds.copy()
def getWidth(self):
return self.width
def getHeight(self):
return self.height
def getX(self):
return self.pos_x
def getY(self):
return self.pos_y
def getSpeed(self):
return self.speed
# Setters
def setX(self, pos_x):
self.pos_x = pos_x # In meters
def setY(self, pos_y):
self.pos_y = pos_y # In meters
def setSpeed(self, speed):
self.speed = speed # In m/s
#self.speed = speed # In %/s
# Update UI
def update(self, dt):
# Set physics bounds
for i in range(len(self.bounds_offsets)):
offset_array = self.bounds_offsets[i]
if len(offset_array) == 4:
b = self.bounds[i]
b["x1"] = self.getX() + offset_array[0] * self.getWidth()
b["y1"] = self.getY() + offset_array[1] * self.getHeight()
b["x2"] = self.getX() + offset_array[2] * self.getWidth()
b["y2"] = self.getY() + offset_array[3] * self.getHeight()
|
#
# Copyright (c) 2017-2018 Joy Diamond. All rights reserved.
#
@gem('Sapphire.UnaryExpression')
def gem():
require_gem('Sapphire.Tree')
@share
class UnaryExpression(SapphireTrunk):
__slots__ = ((
'a', # Expression
))
class_order = CLASS_ORDER__UNARY_EXPRESSION
is_colon = false
is_special_operator = false
def __init__(t, a):
t.a = a
def __repr__(t):
return arrange('<%s %r>', t.__class__.__name__, t.a)
def count_newlines(t):
return t.a.count_newlines()
def display_token(t):
return arrange('<%s %s>', t.display_name, t.a.display_token())
def dump_token(t, f, newline = true):
f.partial('<%s ', t.display_name)
t .frill.dump_token(f)
r = t.a .dump_token(f, false)
return f.token_result(r, newline)
order = order__frill_a
def write(t, w):
w(t.frill.s)
t.a.write(w)
UnaryExpression.k1 = UnaryExpression.a
@share
def produce_conjure_unary_expression(name, Meta):
cache = create_cache(name, conjure_nub)
lookup = cache.lookup
provide = cache.provide
store = cache.store
def conjure_UnaryExpression_WithFrill(a, frill):
UnaryExpression_WithFrill = lookup_adjusted_meta(Meta)
if UnaryExpression_WithFrill is none:
class UnaryExpression_WithFrill(Meta):
__slots__ = ((
'frill', # Operator*
))
def __init__(t, a, frill):
t.a = a
t.frill = frill
def __repr__(t):
return arrange('<%s %r %r>', t.__class__.__name__, t.frill, t.a)
def count_newlines(t):
return t.a.count_newlines() + t.frill.count_newlines()
display_token = attribute(Meta, 'display_token__frill', none)
if display_token is none:
def display_token(t):
return arrange('<%s+frill %s %s>', t.display_name, t.frill.display_token(), t.a.display_token())
UnaryExpression_WithFrill.k2 = UnaryExpression_WithFrill.frill
if __debug__:
UnaryExpression_WithFrill.__name__ = intern_arrange('%s_WithFrill', Meta.__name__)
store_adjusted_meta(Meta, UnaryExpression_WithFrill)
return UnaryExpression_WithFrill(a, frill)
conjure_dual__21 = produce_conjure_unique_dual__21(
name,
conjure_UnaryExpression_WithFrill,
cache,
lookup,
store,
)
meta_frill = Meta.frill
@rename('conjure_%s', name)
def conjure_unary_expression(frill, a):
if frill is meta_frill:
return (lookup(a)) or (provide(a, Meta(a)))
return conjure_dual__21(a, frill)
return conjure_unary_expression
class NegativeExpression(UnaryExpression):
__slots__ = (())
display_name = '-'
frill = conjure_action_word('-', '-')
scout_variables = scout_variables__a
class NotExpression(UnaryExpression):
__slots__ = (())
display_name = 'not'
frill = NOT__W
mutate = produce_mutate__frill__a__priority('not-expression', PRIORITY_UNARY)
scout_variables = scout_variables__a
class StarArgument(UnaryExpression):
__slots__ = (())
display_name = '*-argument'
frill = conjure_star_sign('*')
scout_variables = scout_variables__a
class StarParameter(UnaryExpression):
__slots__ = (())
display_name = '*-parameter'
frill = conjure_star_sign('*')
is_atom = true
add_parameters = add_parameters__a
scout_default_values = scout_default_values__a
class TwosComplementExpression(UnaryExpression):
__slots__ = (())
display_name = '~'
frill = conjure_action_word('~', '~')
scout_variables = scout_variables__a
conjure_negative_expression = produce_conjure_unary_expression('negative', NegativeExpression)
conjure_not_expression = produce_conjure_unary_expression('not', NotExpression)
conjure_star_argument = produce_conjure_unary_expression('*-argument', StarArgument)
conjure_star_parameter = produce_conjure_unary_expression('*-parameter', StarParameter)
conjure_twos_complement = produce_conjure_unary_expression('twos-complement', TwosComplementExpression)
NotExpression .conjure_with_frill = static_method(conjure_not_expression)
StarArgument .conjure_with_frill = static_method(conjure_star_argument)
StarParameter .conjure_with_frill = static_method(conjure_star_parameter)
TwosComplementExpression.conjure_with_frill = static_method(conjure_twos_complement)
#
# .mutate
#
NegativeExpression.mutate = produce_mutate__frill__a_with_priority(
'negative_expression',
PRIORITY_UNARY,
conjure_negative_expression,
)
StarArgument.mutate = produce_mutate__frill__a_with_priority(
'star_argument',
PRIORITY_TERNARY,
conjure_star_argument,
)
TwosComplementExpression.mutate = produce_mutate__frill__a_with_priority(
'twos_complement_expression',
PRIORITY_TERNARY,
conjure_twos_complement,
)
#
# .transform
#
StarParameter.transform = produce_transform__frill_a('star_paramater', conjure_star_parameter)
share(
'conjure_negative_expression', conjure_negative_expression,
'conjure_not_expression', conjure_not_expression,
'conjure_star_argument', conjure_star_argument,
'conjure_star_parameter', conjure_star_parameter,
'conjure_twos_complement', conjure_twos_complement,
)
|
class Retangulo: # definindo a classe
def __init__(self, base, altura): # inicializando
self.la1 = base # passando minha base pra váriavel
self.la2 = altura #passando minha altura pra váriavel
self.area = 0 # iniciando minha area sem valor
self.perimetro = 0 # iniciando minha area sem valor
def calcularAreareta(self): # função para calcular a area
self.area = self.la1 * self.la2 # manipulando e calculando meus valores passando para a area
return self.area #retornando a area para a classe de execução assim que a função é chamada
def calculaPerimetroreta(self): # função para calcular o perimetro
self.perimetro = 2 * (self.la1 + self.la2) # manipulando e calculando meus valores passando para o perimetro
return self.perimetro #retornando o perimetro para a classe de execução assim que a função é chamada
|
list_of_lists = [['a', 'b', 'c'], ['d', 'e', 'f'], ['g', 'h', 'i']]
string = ''
for l in list_of_lists:
max_i = len(l)
for i in range(len(l)):
if i < max_i - 1:
string += l[i]+','
else:
string += l[i]+'\n'
print(string)
|
# encoding: utf-8
# module _functools
# from (built-in)
# by generator 1.145
""" Tools that operate on functions. """
# no imports
# functions
def cmp_to_key(*args, **kwargs): # real signature unknown
""" Convert a cmp= function into a key= function. """
pass
def reduce(function, sequence, initial=None): # real signature unknown; restored from __doc__
"""
reduce(function, sequence[, initial]) -> value
Apply a function of two arguments cumulatively to the items of a sequence,
from left to right, so as to reduce the sequence to a single value.
For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) calculates
((((1+2)+3)+4)+5). If initial is present, it is placed before the items
of the sequence in the calculation, and serves as a default when the
sequence is empty.
"""
pass
# classes
class partial(object):
"""
partial(func, *args, **keywords) - new function with partial application
of the given arguments and keywords.
"""
def __call__(self, *args, **kwargs): # real signature unknown
""" Call self as a function. """
pass
def __delattr__(self, *args, **kwargs): # real signature unknown
""" Implement delattr(self, name). """
pass
def __getattribute__(self, *args, **kwargs): # real signature unknown
""" Return getattr(self, name). """
pass
def __init__(self, func, *args, **keywords): # real signature unknown; restored from __doc__
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
def __setattr__(self, *args, **kwargs): # real signature unknown
""" Implement setattr(self, name, value). """
pass
def __setstate__(self, *args, **kwargs): # real signature unknown
pass
args = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""tuple of arguments to future partial calls"""
func = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""function object to use in future partial calls"""
keywords = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""dictionary of keyword arguments to future partial calls"""
__dict__ = None # (!) real value is ''
class _lru_cache_wrapper(object):
"""
Create a cached callable that wraps another function.
user_function: the function being cached
maxsize: 0 for no caching
None for unlimited cache size
n for a bounded cache
typed: False cache f(3) and f(3.0) as identical calls
True cache f(3) and f(3.0) as distinct calls
cache_info_type: namedtuple class with the fields:
hits misses currsize maxsize
"""
def cache_clear(self, *args, **kwargs): # real signature unknown
pass
def cache_info(self, *args, **kwargs): # real signature unknown
pass
def __call__(self, *args, **kwargs): # real signature unknown
""" Call self as a function. """
pass
def __copy__(self, *args, **kwargs): # real signature unknown
pass
def __deepcopy__(self, *args, **kwargs): # real signature unknown
pass
def __get__(self, *args, **kwargs): # real signature unknown
""" Return an attribute of instance, which is of type owner. """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
pass
__dict__ = None # (!) real value is ''
class __loader__(object):
"""
Meta path import for built-in modules.
All methods are either class or static methods to avoid the need to
instantiate the class.
"""
@classmethod
def create_module(cls, *args, **kwargs): # real signature unknown
""" Create a built-in module """
pass
@classmethod
def exec_module(cls, *args, **kwargs): # real signature unknown
""" Exec a built-in module """
pass
@classmethod
def find_module(cls, *args, **kwargs): # real signature unknown
"""
Find the built-in module.
If 'path' is ever specified then the search is considered a failure.
This method is deprecated. Use find_spec() instead.
"""
pass
@classmethod
def find_spec(cls, *args, **kwargs): # real signature unknown
pass
@classmethod
def get_code(cls, *args, **kwargs): # real signature unknown
""" Return None as built-in modules do not have code objects. """
pass
@classmethod
def get_source(cls, *args, **kwargs): # real signature unknown
""" Return None as built-in modules do not have source code. """
pass
@classmethod
def is_package(cls, *args, **kwargs): # real signature unknown
""" Return False as built-in modules are never packages. """
pass
@classmethod
def load_module(cls, *args, **kwargs): # real signature unknown
"""
Load the specified module into sys.modules and return it.
This method is deprecated. Use loader.exec_module instead.
"""
pass
def module_repr(module): # reliably restored by inspect
"""
Return repr for the module.
The method is deprecated. The import machinery does the job itself.
"""
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
__dict__ = None # (!) real value is ''
# variables with complex values
__spec__ = None # (!) real value is ''
|
quant = int(input())
for c in range(quant):
num_estudantes = int(input())
nomes = input().split(' ')
registro = input().split(' ')
reprovados = []
contador = 0
for frequencia in registro:
media = 0
tam = len(frequencia)
for letra in frequencia:
if letra == "P":
media += 1
elif letra == "A":
pass
elif letra == "M":
tam -= 1
if media < (3 / 4) * tam:
reprovados.append(nomes[contador])
contador += 1
print(' '.join(reprovados))
|
#aula-25-09-19
num1 = int(input("Digite o primeiro numero: "))
num2 = int(input("Digite o ultimo numero: "))
soma = 0
total = 0
while (num1 <= num2):
if (num1 % 2) == 1:
soma = soma + 1
num1 = num1 + 1
print(f" A soma dos números impares é: {soma} números impares.") |
# -*- coding: utf8 -*-
# Copyright (c) 2020 Nicholas de Jong
__title__ = "arpwitch"
__author__ = "Nicholas de Jong <contact@nicholasdejong.com>"
__version__ = '0.3.9'
__license__ = "BSD2"
__logger_default_level__ = 'info'
__sniff_batch_size__ = 16
__sniff_batch_timeout__ = 2
__save_data_interval__default__ = 30
__nmap__exec__ = 'nmap -n -T4 -Pn -oX ' + __title__ + '-nmap-{IP}-{ts}.xml {IP}'
__exec_max_runtime__ = 30
|
category_dict_icml15 = {
"Bandit Learning" : [["Bandits"]],
"Bayesian Nonparametrics" : [["Baysian Nonparametrics"]],
"Bayesian Optimization" : [["Optimization"], ["Baysian Optimization"]],
"Causality" : [["Causality"]],
"Clustering" : [["Clustering"]],
"Computational Advertising And Social Science" : [["Computational Advertising", "Social Science"]],
"Deep Learning" : [["Deep Learning"]],
"Deep Learning And Vision" : [["Deep Learning", "Computer Vision"]],
"Deep Learning Computations" : [["Deep Learning"]],
"Distributed Optimization" : [["Optimization"], ["Distributed Optimization"]],
"Feature Selection" : [["Feature Selection"]],
"Gaussian Processes" : [["Gaussian Processes"]],
"Hashing" : [["Hashing"]],
"Kernel Methods" : [["Kernel Methods"]],
"Large Scale Learning" : [["Large Scale Learning"]],
"Learning Theory" : [["Learning Theory"]],
"Manifold Learning" : [["Manifold Learning"]],
"Matrix Factorization" : [["Matrix Factorization"]],
"Monte Carlo Methods" : [["Monte Carlo Methods"]],
"Natural Language Processing" : [["Natural Language Processing"]],
"Networks And Graphs" : [["Networks", "Graph Analysis"]],
"Online Learning" : [["Online Learning"]],
"Optimization" : [["Optimization"]],
"Privacy" : [["Privacy"]],
"Probabilistic Models" : [["Probabilistic Models"]],
"Ranking Learning" : [["Ranking Learning"]],
"Reinforcement Learning" : [["Reinforcement Learning"]],
"Sparse Optimization" : [["Optimization"], ["Sparse Optimization"]],
"Sparsity" : [["Sparsity"]],
"Structured Prediction" : [["Structured Prediction"]],
"Submodularity" : [["Submodularity"]],
"Supervised Learning" : [["Supervised Learning"]],
"Time Series Analysis" : [["Time-Series"]],
"Topic Models" : [["Probabilistic Models"]],
"Transfer Learning" : [["Transfer Learning"]],
"Unsupervised Learning" : [["Unsupervised Learning"]],
"Variational Inference" : [["Approximate Inference"]],
"Vision" : [["Computer Vision"]]
}
category_dict_icml16 = {
"Applications and Time-Series Analysis" : [["Applications", "Time-Series"]],
"Approximate Inference" : [["Approximate Inference"]],
"Bandit Problems" : [["Bandits"]],
"Bayesian Nonparametric Methods" : [["Baysian Nonparametrics"]],
"Causal Inference" : [["Causal Inference"]],
"Clustering" : [["Clustering"]],
"Crowdsourcing and Interactive Learning" : [["Crowdsourcing", "Interactive Learning"]],
"Dimensionality Reduction / Private Learning" : [["Dimensionality Reduction", "Privacy"]],
"Feature Selection and Dimensionality Reduction" : [["Feature Selection", "Dimensionality Reduction"]],
"Gaussian Processes" : [["Gaussian Processes"]],
"Graph Analysis/ Spectral Methods" : [["Graph Analysis", "Spectral Methods"]],
"Graphical Models" : [["Probabilistic Models"]],
"Kernel Methods" : [["Kernel Methods"]],
"Large Scale Learning and Big Data" : [["Large Scale Learning"]],
"Learning Theory" : [["Learning Theory"]],
"Machine Learning Applications" : [["Applications"]],
"Matrix Factorization / Neuroscience Applications" : [["Matrix Factorization", "Neuroscience Applications"]],
"Matrix Factorization and Related Topics" : [["Matrix Factorization"]],
"Metric and Manifold Learning / Kernel Methods" : [["Metric Learning", "Manifold Learning", "Kernel Methods"]],
"Monte Carlo Methods" : [["Monte Carlo Methods"]],
"Multi-label, multi-task, and neural networks" : [["Multi-Label Learning", "Multi-Task Learning"], ["Deep Learning"]],
"Neural Networks and Deep Learning" : [["Deep Learning"]],
"Neural Networks and Deep Learning I" : [["Deep Learning"]],
"Neural Networks and Deep Learning II" : [["Deep Learning"]],
"Neural Networks and Deep Learning II (Computer Vision)" : [["Deep Learning"], ["Computer Vision"]],
"Online Learning" : [["Online Learning"]],
"Optimization" : [["Optimization"]],
"Optimization (Combinatorial)" : [["Optimization"], ["Combinatorial Optimization"]],
"Optimization (Continuous)" : [["Optimization"], ["Continuous Optimization"]],
"Optimization / Online Learning" : [["Optimization", "Online Learning"]],
"Privacy, Anonymity, and Security" : [["Privacy"]],
"Ranking and Preference Learning" : [["Ranking Learning", "Preference Learning"]],
"Reinforcement Learning" : [["Reinforcement Learning"]],
"Sampling / Kernel Methods" : [["Sampling", "Kernel Methods"]],
"Sparsity and Compressed Sensing" : [["Sparsity", "Compressed Sensing"]],
"Statistical Learning Theory" : [["Statistical Learning Theory"]],
"Structured Prediction / Monte Carlo Methods" : [["Structured Prediction", "Monte Carlo Methods"]],
"Supervised Learning" : [["Supervised Learning"]],
"Transfer Learning / Learning Theory" : [["Transfer Learning", "Learning Theory"]],
"Unsupervised Learning / Applications" : [["Unsupervised Learning", "Applications"]],
"Unsupervised Learning / Representation Learning" : [["Unsupervised Learning", "Representation Learning"]]
}
category_dict_icml17 = {
"Active Learning" : [["Active Learning"]],
"Applications" : [["Applications"]],
"Bayesian Nonparametrics" : [["Baysian Nonparametrics"]],
"Bayesian Optimization" : [["Optimization"], ["Baysian Optimization"]],
"Causal Inference" : [["Causal Inference"]],
"Clustering" : [["Clustering"]],
"Combinatorial Optimization" : [["Optimization"], ["Combinatorial Optimization"]],
"Continuous Control" : [["Continuous Control"]],
"Continuous Optimization" : [["Optimization"], ["Continuous Optimization"]],
"Deep Generative Models" : [["Deep Learning"], ["Deep Generative Models"]],
"Deep Learning" : [["Deep Learning"]],
"Deep Learning : Analysis": [["Deep Learning"]],
"Deep Learning : Backprop": [["Deep Learning"]],
"Deep Learning : Fisher Approximations": [["Deep Learning"]],
"Deep Learning : Hardware": [["Deep Learning"]],
"Deep Learning : Invariances": [["Deep Learning"]],
"Deep Learning : Learning To Learn": [["Deep Learning"]],
"Deep Learning : Metalearning": [["Deep Learning"]],
"Deep Learning : Probabilistic": [["Deep Learning"], ["Probabilistic Models"]],
"Deep Learning Theory" : [["Deep Learning"], ["Deep Learning Theory"]],
"Deep Reinforcement Learning" : [["Deep Reinforcement Learning"], ["Reinforcement Learning"]],
"Distributed Optimization" : [["Optimization"], ["Distributed Optimization"]],
"Ensemble Methods" : [["Ensemble Methods"]],
"Game Theory And Multiagents" : [["Game Theory", "Multi-Agent Learning"]],
"Gaussian Processes" : [["Gaussian Processes"]],
"Healthcare" : [["Healthcare"]],
"High Dimensional Estimation" : [["High Dimensional Estimation"]],
"Infomation Theory" : [["Information Theory"]],
"Kernel Methods" : [["Kernel Methods"]],
"Language" : [["Natural Language Processing"]],
"Large Scale Learning" : [["Large Scale Learning"]],
"Latent Feature Models" : [["Latent Feature Models"]],
"Learning Theory" : [["Learning Theory"]],
"Matrix Factorization" : [["Matrix Factorization"]],
"Metric Learning" : [["Metric Learning"]],
"Ml And Programming" : [["Ml And Programming"]],
"Monte Carlo Methods" : [["Monte Carlo Methods"]],
"Networks And Relational Learning" : [["Networks", "Relational Learning"]],
"Online Learning" : [["Online Learning"]],
"Privacy And Security" : [["Privacy"]],
"Probabilistic Inference" : [["Approximate Inference"]],
"Probabilistic Learning" : [["Probabilistic Models"]],
"Ranking And Preferences" : [["Ranking Learning", "Preference Learning"]],
"Recurrent Neural Networks" : [["Recurrent Neural Networks"]],
"Reinforcement Learning" : [["Reinforcement Learning"]],
"Robust Estimation" : [["Robustness"]],
"Semisupervised And Curriculum Learning" : [
["Semi-Supervised Learning", "Curriculum Learning"]
],
"Sparsity" : [["Sparsity"]],
"Spectral Methods" : [["Spectral Methods"]],
"Structured Prediction" : [["Structured Prediction"]],
"Supervised Learning" : [["Supervised Learning"]],
"Time Series" : [["Time-Series"]],
"Transfer And Multitask Learning": [["Transfer Learning", "Multi-Task Learning"]]
}
category_dict_icml18 = {
"Active Learning" : [["Active Learning"]],
"Approximate Inference" : [["Approximate Inference"]],
"Causal Inference" : [["Causal Inference"]],
"Clustering" : [["Clustering"]],
"Computer Vision" : [["Computer Vision"]],
"Deep Learning (Adversarial)" : [["Deep Learning", "Adversarial"]],
"Deep Learning (Bayesian)" : [["Deep Learning", "Baysian Deep Learning"]],
"Deep Learning (Neural Network Architectures)" : [["Deep Learning", "Architectures"]],
"Deep Learning (Theory)" : [["Deep Learning", "Deep Learning Theory"]],
"Dimensionality Reduction" : [["Dimensionality Reduction"]],
"Feature Selection" : [["Feature Selection"]],
"Gaussian Processes" : [["Gaussian Processes"]],
"Generative Models" : [["Generative Models"]],
"Graphical Models" : [["Probabilistic Models"]],
"Kernel Methods" : [["Kernel Methods"]],
"Large Scale Learning and Big Data" : [["Large Scale Learning"]],
"Matrix Factorization" : [["Matrix Factorization"]],
"Monte Carlo Methods" : [["Monte Carlo Methods"]],
"Multi-Agent Learning" : [["Multi-Agent Learning"]],
"Natural Language and Speech Processing" : [["Natural Language Processing", "Speech Processing"]],
"Networks and Relational Learning" : [["Networks", "Relational Learning"]],
"Online Learning" : [["Online Learning"]],
"Optimization (Bayesian)" : [["Optimization"], ["Baysian Optimization"]],
"Optimization (Combinatorial)" : [["Optimization"], ["Combinatorial Optimization"]],
"Optimization (Convex)" : [["Optimization"], ["Convex Optimization"]],
"Optimization (Non-convex)" : [["Optimization"], ["Non-Convex Optimization"]],
"Other Applications" : [["Applications"]],
"Other Models and Methods" : [["Other Models and Methods"]],
"Parallel and Distributed Learning" : [["Optimization"], ["Distributed Optimization"]],
"Privacy, Anonymity, and Security" : [["Privacy"]],
"Ranking and Preference Learning" : [["Ranking Learning", "Preference Learning"]],
"Reinforcement Learning" : [["Reinforcement Learning"]],
"Representation Learning" : [["Representation Learning"]],
"Society Impacts of Machine Learning" : [["Society Impacts of Machine Learning"]],
"Sparsity and Compressed Sensing" : [["Sparsity", "Compressed Sensing"]],
"Spectral Methods" : [["Spectral Methods"]],
"Statistical Learning Theory" : [["Statistical Learning Theory"]],
"Structured Prediction" : [["Structured Prediction"]],
"Supervised Learning" : [["Supervised Learning"]],
"Time-Series Analysis" : [["Time-Series"]],
"Transfer and Multi-Task Learning" : [["Transfer Learning", "Multi-Task Learning"]],
"Unsupervised Learning" : [["Unsupervised Learning"]]
}
category_dict_icml19 = {
"Active Learning" : [["Active Learning"]],
"Adversarial Examples" : [["Adversarial"]],
"Applications" : [["Applications"]],
"Applications: Computer Vision" : [["Applications"], ["Computer Vision"]],
"Applications: Natural Language Processing" : [["Applications"], ["Natural Language Processing"]],
"Approximate Inference" : [["Approximate Inference"]],
"Bandits and Multiagent Learning" : [["Bandits", "Multi-Agent Learning"]],
"Bayesian Deep Learning" : [["Deep Learning"], ["Baysian Deep Learning"]],
"Bayesian Methods" : [["Baysian Methods"]],
"Bayesian Non-parametrics" : [["Baysian Nonparametrics"]],
"Causality" : [["Causality"]],
"Combinatorial Optimization" : [["Combinatorial Optimization"]],
"Convex Optimization" : [["Optimization"], ["Convex Optimization"]],
"Deep Generative Models" : [["Deep Learning"], ["Deep Generative Models"]],
"Deep Learning" : [["Deep Learning"]],
"Deep Learning Algorithms" : [["Deep Learning"]],
"Deep Learning Architectures" : [["Deep Learning"], ["Architectures"]],
"Deep Learning Optimization" : [["Deep Learning"], ["Optimization"]],
"Deep Learning Theory" : [["Deep Learning"], ["Deep Learning Theory"]],
"Deep RL" : [["Deep Learning"], ["Deep Reinforcement Learning"]],
"Deep Sequence Models" : [["Deep Learning"]],
"Fairness" : [["Fairness"]],
"Gaussian Processes" : [["Gaussian Processes"]],
"General ML" : [["General ML"]],
"Generative Adversarial Networks" : [["Generative Models"]],
"Generative Models" : [["Generative Models"]],
"Information Theory and Estimation" : [["Information Theory", "Estimation"]],
"Interpretability" : [["Interpretability"]],
"Kernel Methods" : [["Kernel Methods"]],
"Large Scale Learning and Systems" : [["Large Scale Learning"]],
"Learning Theory" : [["Learning Theory"]],
"Learning Theory: Games" : [["Learning Theory", "Games"]],
"Monte Carlo Methods" : [["Monte Carlo Methods"]],
"Networks and Relational Learning" : [["Networks", "Relational Learning"]],
"Non-convex Optimization" : [["Optimization"], ["Non-Convex Optimization"]],
"Online Learning" : [["Online Learning"]],
"Optimization" : [["Optimization"]],
"Optimization and Graphical Models" : [["Optimization", "Probabilistic Models"]],
"Optimization: Convex and Non-convex" : [["Optimization"], ["Convex Optimization", "Non-Convex Optimization"]],
"Privacy" : [["Privacy"]],
"Privacy and Fairness" : [["Privacy", "Fairness"]],
"Probabilistic Inference" : [["Probabilistic Inference"]],
"Ranking and Preference Learning" : [["Ranking Learning", "Preference Learning"]],
"Reinforcement Learning" : [["Reinforcement Learning"]],
"Reinforcement Learning Theory" : [["Reinforcement Learning"]],
"Reinforcement Learning and Bandits" : [["Reinforcement Learning", "Bandits"]],
"Representation Learning" : [["Representation Learning"]],
"Robust Statistics and Interpretability" : [["Robustness", "Interpretability"]],
"Robust Statistics and Machine Learning" : [["Robustness"]],
"Statistical Learning Theory" : [["Statistical Learning Theory"]],
"Supervised Learning" : [["Supervised Learning"]],
"Supervised and Transfer Learning" : [["Supervised Learning", "Transfer Learning"]],
"Time Series" : [["Time-Series"]],
"Transfer and Multitask Learning" : [["Transfer Learning", "Multi-Task Learning"]],
"Unsupervised Learning" : [["Unsupervised Learning"]]
} |
class Solution:
def subsets(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
result = []
ln = len(nums)
selectors = [0] * ln
for time in range(2 ** ln):
# Append new Item
result_item = []
for index, selector in enumerate(selectors):
if selector == 1:
result_item.append(nums[index])
result.append(result_item)
# Add selectors
index, c = 0, 1
while c == 1 and index < ln:
selectors[index] += c
c, selectors[index] = selectors[index] >> 1, selectors[index] & 1
index += 1
return result
if __name__ == "__main__":
print(Solution().subsets([1, 2, 4]))
|
# --------------------------------------
# CSCI 127, Lab 4
# May 29, 2020
# Your Name
# --------------------------------------
def process_season(season, games_played, points_earned):
print("Season: " + str(season) + ", Games Played: " + str(games_played) +
", Points earned: " + str(points_earned))
print("Possible Win-Tie-Loss Records")
print("-----------------------------")
pass
print()
# --------------------------------------
def process_seasons(seasons):
pass
# --------------------------------------
def main():
# format of list: [[season-1-games, season-1-points], [season-2-games, season-2-points], etc.]
soccer_seasons = [[1, 3], [1, 1], [1, 0], [20, 30]]
process_seasons(soccer_seasons)
# --------------------------------------
main()
|
# Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def rob(self, root: TreeNode) -> int:
def helper(root):
if root is None:
return [0,0]
x = helper(root.left)
y = helper(root.right)
return [root.val+x[1]+y[1],max(x)+max(y)]
ans = helper(root)
return max(ans)
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'Double Validation on Purchase Requisition',
'version' : '0.1',
'category': 'Purchase Management',
'images' : [],
'depends' : ['purchase_requisition'],
'author' : 'Ecosoft',
'description': """
Double-validation for purchase requisition.
==========================================
The objective of this module is to allow splitting of process between requester and approving manager.
This module adding new state "Send to Purchase", in which state, only manager can - Send to supplier, Create Quotation, Purchase Done
For PR Groups,
* User: Can only click Sent to Purchase
* Manager: Can do everything with the Purchase Requisition
""",
'website': 'http://www.ecosoft.co.th',
'data': [
'purchase_requisition_view.xml',
],
'test': [
],
'demo': [],
'installable': True,
'auto_install': False
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# Made by Wallee#8314/Red-exe-Engineer
# Thanks @Bigjango helping :p
Block = {
"Air": [0, 0],
"Stone": [1, 0],
"Grass": [2, 0],
"Dirt": [3, 0],
"Cobblestone": [4, 0],
"Wooden Planks": [5, 0],
"Saplin":[6, 0],
"Oak Saplin": [6, 0],
"Spruce Saplin": [6, 1],
"Birch Saplin": [6, 2],
"Bedrock": [7, 0],
"Water": [8, 0],
"Water Stationary": [9, 0],
"Lava": [10, 0],
"Lava Staionary": [11, 0],
"Sand": [12, 0],
"Gravel": [13, 0],
"Gold Ore": [14, 0],
"Iron Ore": [15, 0],
"Coal Ore": [16, 0],
"Log": [17, 0],
"Oak Log": [17, 0],
"Spruce Log": [17, 1],
"Birch Log": [17, 2],
"Leaves": [18, 0],
"Oak Leaves": [18, 0],
"Spruce Leaves": [18, 1],
"Birch Leaves": [18, 2],
"Glass": [20, 0],
"Lapis Lazuli Ore": [21, 0],
"Lapis Lazuli Block": [22, 0],
"Sandstone": [24, 0],
"Chiseled Sandstone": [24, 1],
"Smooth Sandstone": [24, 2],
"Bed": [26, 0],
"Bed Foot": [26, 0],
"Bed Head": [26, 8],
"North Bed Foot": [26, 0],
"North Bed Head": [26, 8],
"East Bed Foot": [26, 1],
"East Bed Head": [26, 9],
"South Bed Foot": [26, 2],
"South Bed Head": [26, 10],
"West Bed Foot": [26, 3],
"West Bed Head": [26, 11],
"Cobweb": [30, 0],
"Shrub": [31, 0],
"Tall Grass": [31, 1],
"Fern": [31, 3],
"Wool": [35, 0],
"White Wool": [35, 0],
"Orange Wool": [35, 1],
"Magenta Wool": [35, 2],
"Light Blue Wool": [35, 3],
"Yello Wool": [35, 4],
"Lime Wool": [35, 5],
"Pink Wool": [35, 6],
"Grey Wool": [35, 7],
"Light Grey Wool": [35, 8],
"Cyan Wool": [35, 9],
"Purple Wool": [35, 10],
"Blue Wool": [35, 11],
"Brown Wool": [35, 12],
"Green Wool": [35, 13],
"Red Wool": [35, 14],
"Black Wool": [35, 15],
"Flower": [37, 0],
"Poppy": [38, 0],
"Brown Mushroom": [39, 0],
"Red Mushroom": [40, 0],
"Block of Gold": [41, 0],
"Block of Iron": [42, 0],
"Double Stone Slab": [43, 0],
"Smooth Stone": [43, 6],
"Stone Slab": [44, 0],
"Sandstone Slab": [44, 1],
"Wooden Slab": [44, 2],
"Cobblestone Slab": [44, 3],
"Bricks Slab": [44, 4],
"Stone Bricks Slab": [44, 5],
"Lower Stone Slab": [44, 0],
"Lower Sandstone Slab": [44, 1],
"Lower Wooden Slab": [44, 2],
"Lower Cobblestone Slab": [44, 3],
"Lower Bricks Slab": [44, 4],
"Lower Stone Bricks Slab": [44, 5],
"Upper Stone Slab": [44, 8],
"Upper Sandstone Slab": [44, 9],
"Upper Wooden Slab": [44, 10],
"Upper Cobblestone Slab": [44, 11],
"Upper Bricks Slab": [44, 12],
"Upper Stone Bricks Slab": [44, 13],
"Stone Slab Block": [43, 0],
"Sandstone Slab Block": [43, 1],
"Wooden Slab Block": [43, 2],
"Cobblestone Slab Block": [43, 3],
"Bricks Slab Block": [43, 4],
"Stone Bricks Slab Block": [43, 5],
"Smooth Stone": [43, 6],
"Bricks": [45, 0],
"TNT": [46, 0],
"Active TNT": [46, 1],
"Bookshelf": [47, 0],
"Moss Stone": [48, 0],
"Obsidian": [49, 0],
"Torch": [50, 0],
"Wooden Stairs": [53, 2],
"North Lower Wooden Stairs": [53, 2],
"East Lower Wooden Stairs": [53, 1],
"South Lower Wooden Stairs": [53, 3],
"West Lower Wooden Stairs": [53, 0],
"North Upper Wooden Stairs": [53, 6],
"East Upper Wooden Stairs": [53, 5],
"South Upper Wooden Stairs": [53, 7],
"West Upper Wooden Stairs": [53, 4],
"Chest": [54, 2],
"Blank Chest": [54, 1],
"North Chest": [54, 2],
"East Chest": [54, 5],
"South Chest": [54, 3],
"West Chest": [54, 4],
"Diamond Ore": [56, 0],
"Diamond Block": [57, 0],
"Crafting Table": [58, 0],
"Crops": [59, 0],
"Crops 1": [59, 1],
"Crops 2": [59, 2],
"Crops 3": [59, 3],
"Crops 4": [59, 4],
"Crops 5": [59, 5],
"Crops 6": [59, 6],
"Crops 7": [59, 7],
"Farmland": [60, 0],
"Wet Farmland": [60, 1],
"Furnace": [61, 0],
"Active Furnace": [62, 0],
"Sign": [63, 0],
"Sign 0": [63, 0],
"Sign 1": [63, 1],
"Sign 2": [63, 2],
"Sign 3": [63, 3],
"Sign 4": [63, 4],
"Sign 5": [63, 5],
"Sign 6": [63, 6],
"Sign 7": [63, 7],
"Sign 8": [63, 8],
"Sign 9": [63, 9],
"Sign 10": [63, 10],
"Sign 11": [63, 11],
"Sign 12": [63, 12],
"Sign 13": [63, 13],
"Sign 14": [63, 14],
"Sign 15": [63, 15],
"Lower North Closed Wooden Door": [64, 1],
"Lower North Opened Wooden Door": [64, 5],
"Lower East Closed Wooden Door": [64, 2],
"Lower East Opened Wooden Door": [64, 2],
"Lower South Closed Wooden Door": [46, 3],
"Lower South Opened Wooden Door": [64, 7],
"Lower West Closed Wooden Door": [64, 0],
"Lower West Opened Wooden Door": [64, 4],
"Upper Left Wooden Door": [64, 8],
"Upper Right Wooden Door": [64, 9],
"Lower Wooden Door": [64, 1],
"Upper Wooden Door": [64, 8],
"Ladder": [65, 2],
"North Ladder": [65, 2],
"East Ladder": [65, 5],
"South Ladder": [65, 3],
"West Ladder": [65, 4],
"Cobblestone Stairs": [67, 2],
"North Lower Cobblestone Stairs": [67, 2],
"East Lower Cobblestone Stairs": [67, 1],
"South Lower Cobblestone Stairs": [67, 3],
"West Lower Cobblestone Stairs": [67, 0],
"North Upper Cobblestone Stairs": [57, 6],
"East Upper Cobbleston Stairs": [57, 5],
"South Upper Cobbleston Stairs": [57, 7],
"West Upper Cobbleston Stairs": [57, 4],
"Wall Sign": [68, 2],
"North Wall Sign": [68, 2],
"East Wall Sign": [68, 5],
"South Wall Sign": [68, 1],
"West Wall Sign": [68, 4],
"Lower North Closed Iron Door": [71, 1],
"Lower North Opened Iron Door": [71, 5],
"Lower East Closed Iron Door": [71, 2],
"Lower East Opened Iron Door": [71, 2],
"Lower South Closed Iron Door": [71, 3],
"Lower South Opened Iron Door": [71, 7],
"Lower West Closed Iron Door": [71, 0],
"Lower West Opened Iron Door": [71, 4],
"Upper Left Iron Door": [71, 8],
"Upper Right Iron Door": [71, 9],
"Lower Iron Door": [71, 1],
"Upper Iron Door": [71, 8],
"Redstone Ore": [73, 0],
"Snow": [78, 0],
"Ice": [79, 0],
"Snow Block": [80, 0],
"Cactus": [81, 0],
"Clay Block": [82, 0],
"Sugercane": [83, 0],
"Fence": [85, 0],
"Glowstone Block": [89, 0],
"Invisible Bedrock": [95, 0],
"Trapdoor": [96, 0],
"North Closed Trapdoor": [96, 0],
"East Closed Trapdoor": [96, 3],
"South Closed Trapdoor": [96, 1],
"West Closed Trapdoor": [96, 2],
"North Opened Trapdoor": [96, 4],
"East Opened Trapdoor": [96, 7],
"South Opened Trapdoor": [96, 5],
"West Opened Trapdoor": [96, 6],
"Stone Bricks": [98, 0],
"Mossy Stone Bricks": [98, 1],
"Cracked Stone Bricks": [98, 2],
"Glass Pane": [102, 0],
"Melon": [103, 0],
"Melon Stem": [105, 0],
"Melon Stem 0": [105, 0],
"Melon Stem 1": [105, 1],
"Melon Stem 2": [105, 2],
"Melon Stem 3": [105, 3],
"Melon Stem 4": [105, 4],
"Melon Stem 5": [105, 5],
"Melon Stem 6": [105, 6],
"Melon Stem 7": [105, 7],
"Fence Gate": [107, 0],
"North South Fence Gate": [107, 0],
"South North Fence Gate": [107, 0],
"East West Closed Fence Gate": [107, 3],
"West West Closed Fence Gate": [107, 3],
"North Opened Fence Gate": [107, 4],
"East Opened Fence Gate": [107, 5],
"South Opened Fence Gate": [107, 6],
"West Opened Fence Gate": [107, 7],
"Brick Stairs": [108, 2],
"North Lower Brick Stairs": [108, 2],
"East Lower Brick Stairs": [108, 1],
"South Lower Brick Stairs": [108, 3],
"West Lower Brick Stairs": [108, 0],
"North Upper Brick Stairs": [108, 6],
"East Upper Brick Stairs": [108, 5],
"South Upper Brick Stairs": [108, 7],
"West Upper Brick Stairs": [108, 4],
"Stone Brick Stairs": [109, 2],
"North Lower Stone Brick Stairs": [109, 2],
"East Lower Stone Brick Stairs": [109, 1],
"South Lower Stone Brick Stairs": [109, 3],
"West Lower Stone Brick Stairs": [109, 0],
"North Upper Stone Brick Stairs": [109, 6],
"East Upper Stone Brick Stairs": [109, 5],
"South Upper Stone Brick Stairs": [109, 7],
"West Upper Stone Brick Stairs": [109, 4],
"Nether Brick Stairs": [114, 2],
"North Lower Nether Brick Stairs": [114, 2],
"East Lower Nether Brick Stairs": [114, 1],
"South Lower Nether Brick Stairs": [114, 3],
"West Lower Nether Brick Stairs": [114, 0],
"North Upper Nether Brick Stairs": [114, 6],
"East Upper Nether Brick Stairs": [114, 5],
"South Upper Nether Brick Stairs": [114, 7],
"West Upper Nether Brick Stairs": [114, 4],
"Sandstone Stairs": [128, 2],
"North Lower Sandstone Stairs": [128, 2],
"East Lower Sandstone Stairs": [128, 1],
"South Lower Sandstone Stairs": [128, 3],
"West Lower Sandstone Stairs": [128, 0],
"North Upper Sandstone Stairs": [128, 6],
"East Upper Sandstone Stairs": [128, 5],
"South Upper Sandstone Stairs": [128, 7],
"West Upper Sandstone Stairs": [128, 4],
"Block of Quartz": [155, 0],
"Chiseled Quartz Block": [155, 1],
"Quartz Pillar": [155, 2],
"Glowing Obsidian": [246, 0],
"Nether Reactor Core": [247, 0],
"Active Nether Reactor Core": [247, 1],
"Used Nether Reactor Core": [247, 2],
"Update": [248, 0],
"Ateupd": [249, 0],
"Held Grass": [253, 0],
"Held Leaves": [254, 0],
"Held Oak Leaves": [254, 0],
"Held Spruce Leaves": [254, 1],
"Held Birch Leaves": [254, 2],
"Fire": [255, 0]
}
Item = {
"Iron Shovel": 256,
"Iron Pickaxe": 257,
"Iron Axe": 258,
"Flint and Steel": 259,
"Apple": 260,
"Bow": 261,
"Arrow": 262,
"Coal": 263,
"Diamond": 264,
"Iron Ingot": 265,
"Gold Ingot": 266,
"Iron Sword": 267,
"Wooden Sword": 268,
"Wooden Shovel": 269,
"Wooden Pickaxe": 270,
"Wooden Axe": 271,
"Stone Sword": 272,
"Stone Shovel": 273,
"Stone Pickaxe": 274,
"Stone Axe": 275,
"Diamond Sword": 276,
"Diamond Shovel": 277,
"Diamond Pickace": 278,
"Diamond Axe": 279,
"Stick": 280,
"Bowl": 281,
"Mushroom Stew": 282,
"Gold Sword": 283,
"Gold Shovel": 284,
"Gold Pickaxe": 285,
"Gold Axe": 286,
"String": 287,
"Feather": 288,
"Gunpowder": 289,
"Wooden Hoe": 290,
"Stone Hoe": 291,
"Iron Hoe": 292,
"Diamond Hoe": 293,
"Gold Hoe": 294,
"Seeds": 295,
"Wheet": 296,
"Bread": 297,
"Leather Cap": 298,
"Leather Tunic": 299,
"Leather Pants": 300,
"Leather Boots": 301,
"Chain Helmet": 302,
"Chain Chestplate": 303,
"Chain Leggings": 304,
"Chain Boots": 305,
"Iron Helmet": 306,
"Iron Chestplate": 307,
"Iron Leggings": 308,
"Iron Boots": 309,
"Diamond Helmet": 310,
"Diamond Chestplate": 311,
"Diamond Leggings": 312,
"Diamond Boots": 313,
"Gold Helmet": 314,
"Gold Chestplate": 315,
"Gold Leggings": 316,
"Gold Boots": 317,
"Flint": 318,
"Raw Porkchop": 319,
"Cooked Porkchop": 320,
"Painting": 321,
"Sign": 323,
"Wooden Door": 324,
"Iron Door": 330,
"Snowball": 332,
"Leather": 334,
"Brick": 336,
"Clay": 337,
"Sugarcane": 338,
"Paper": 339,
"Book": 340,
"Slimeball": 341,
"Egg": 344,
"Compass": 345,
"Clock": 347,
"Glowstone Dust": 248,
"Dye": 351,
"Bone": 352,
"Sugar": 353,
"Bed": 356,
"Shears": 359,
"Melon": 360,
"Melon Seeds": 362,
"Raw Beef": 363,
"Steak": 364,
"Raw Chicken": 365,
"Cooked Chicken": 366,
"Camera": 456
}
def searchBlock(search="", printName=False):
search = search.lower()
IDs = []
for block in Block:
blockName = block.lower()
if search in blockName:
IDs.append([block, Block[block]])
if printName == True:
print(f'{block} ({Block[block]})')
return(dict(IDs))
def searchItem(search="", printName=False):
search = search.lower()
IDs = []
for item in Item:
itemName = item.lower()
if search in itemName:
IDs.append([item, Item[item]])
if printName == True:
print(f'{item} ({Item[item]})')
return(dict(IDs))
def searchAll(search="", printName=False):
search = search.lower()
IDs = []
for block in Block:
blockName = block.lower()
if search in blockName:
IDs.append([block, Block[block]])
if printName == True:
print(f'{block} ({Block[block]})')
for item in Item:
itemName = item.lower()
if search in itemName:
IDs.append([item, Item[item]])
if printName == True:
print(f'{item} ({Item[item]})')
return(dict(IDs))
if __name__ == "__main__":
searchFor = input("Search for a block or item: ")
print(searchAll(search=searchFor))
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jan 15 15:31:15 2017
@author: Nadiar
"""
# 1
a = 2
while (a <= 10):
print(a)
a += 2
print("Goodbye!")
# 2
print("Hello!")
a = 10
while(a >= 2):
print(a)
a -= 2
# 3
temp = 0
inc = 1
end = 21
while (end >= inc):
temp += inc
inc += 1
print(temp) |
CONFIG_BINDIR="<CONFIG_BINDIR>"
CONFIG_LIBDIR="<CONFIG_LIBDIR>"
CONFIG_LOCALSTATEDIR="<CONFIG_LOCALSTATEDIR>"
CONFIG_SYSCONFDIR="<CONFIG_SYSCONFDIR>"
CONFIG_SYSCONFDIR_DSC="<CONFIG_SYSCONFDIR_DSC>"
CONFIG_OAAS_CERTPATH="<OAAS_CERTPATH>"
OMI_LIB_SCRIPTS="<OMI_LIB_SCRIPTS>"
PYTHON_PID_DIR="<PYTHON_PID_DIR>"
DSC_NAMESPACE="<DSC_NAMESPACE>"
DSC_SCRIPT_PATH="<DSC_SCRIPT_PATH>"
DSC_MODULES_PATH="<DSC_MODULES_PATH>"
|
h, m = map(int, input().split())
fullMin = (h * 60) + m
hour = (fullMin - 45) // 60
minu = (fullMin - 45) % 60
if hour == -1:
hour = 23
print(f'{hour} {minu}')
else:
print(f'{hour} {minu}')
|
def get_attribute_or_key(obj, name):
if isinstance(obj, dict):
return obj.get(name)
return getattr(obj, name, None)
|
#
# PySNMP MIB module FASTPATH-QOS-AUTOVOIP-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/FASTPATH-QOS-AUTOVOIP-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:58:28 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsUnion, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection")
fastPathQOS, = mibBuilder.importSymbols("FASTPATH-QOS-MIB", "fastPathQOS")
InterfaceIndexOrZero, = mibBuilder.importSymbols("IF-MIB", "InterfaceIndexOrZero")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Counter32, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, IpAddress, ObjectIdentity, MibIdentifier, Unsigned32, NotificationType, Integer32, Gauge32, iso, Counter64, TimeTicks, ModuleIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "IpAddress", "ObjectIdentity", "MibIdentifier", "Unsigned32", "NotificationType", "Integer32", "Gauge32", "iso", "Counter64", "TimeTicks", "ModuleIdentity")
RowStatus, DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "DisplayString", "TextualConvention")
fastPathQOSAUTOVOIP = ModuleIdentity((1, 3, 6, 1, 4, 1, 674, 10895, 5000, 2, 6132, 1, 1, 3, 4))
fastPathQOSAUTOVOIP.setRevisions(('2007-11-23 00:00', '2007-11-23 00:00',))
if mibBuilder.loadTexts: fastPathQOSAUTOVOIP.setLastUpdated('200711230000Z')
if mibBuilder.loadTexts: fastPathQOSAUTOVOIP.setOrganization('Broadcom Corporation')
class PercentByFives(TextualConvention, Unsigned32):
status = 'current'
subtypeSpec = Unsigned32.subtypeSpec + ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(5, 5), ValueRangeConstraint(10, 10), ValueRangeConstraint(15, 15), ValueRangeConstraint(20, 20), ValueRangeConstraint(25, 25), ValueRangeConstraint(30, 30), ValueRangeConstraint(35, 35), ValueRangeConstraint(40, 40), ValueRangeConstraint(45, 45), ValueRangeConstraint(50, 50), ValueRangeConstraint(55, 55), ValueRangeConstraint(60, 60), ValueRangeConstraint(65, 65), ValueRangeConstraint(70, 70), ValueRangeConstraint(75, 75), ValueRangeConstraint(80, 80), ValueRangeConstraint(85, 85), ValueRangeConstraint(90, 90), ValueRangeConstraint(95, 95), ValueRangeConstraint(100, 100), )
class Sixteenths(TextualConvention, Unsigned32):
status = 'current'
subtypeSpec = Unsigned32.subtypeSpec + ValueRangeConstraint(0, 16)
agentAutoVoIPCfgGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 674, 10895, 5000, 2, 6132, 1, 1, 3, 4, 1))
agentAutoVoIPTable = MibTable((1, 3, 6, 1, 4, 1, 674, 10895, 5000, 2, 6132, 1, 1, 3, 4, 1, 1), )
if mibBuilder.loadTexts: agentAutoVoIPTable.setStatus('current')
agentAutoVoIPEntry = MibTableRow((1, 3, 6, 1, 4, 1, 674, 10895, 5000, 2, 6132, 1, 1, 3, 4, 1, 1, 1), ).setIndexNames((0, "FASTPATH-QOS-AUTOVOIP-MIB", "agentAutoVoIPIntfIndex"))
if mibBuilder.loadTexts: agentAutoVoIPEntry.setStatus('current')
agentAutoVoIPIntfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 674, 10895, 5000, 2, 6132, 1, 1, 3, 4, 1, 1, 1, 1), InterfaceIndexOrZero())
if mibBuilder.loadTexts: agentAutoVoIPIntfIndex.setStatus('current')
agentAutoVoIPMode = MibTableColumn((1, 3, 6, 1, 4, 1, 674, 10895, 5000, 2, 6132, 1, 1, 3, 4, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone(2)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentAutoVoIPMode.setStatus('current')
agentAutoVoIPCosQueue = MibTableColumn((1, 3, 6, 1, 4, 1, 674, 10895, 5000, 2, 6132, 1, 1, 3, 4, 1, 1, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentAutoVoIPCosQueue.setStatus('current')
mibBuilder.exportSymbols("FASTPATH-QOS-AUTOVOIP-MIB", fastPathQOSAUTOVOIP=fastPathQOSAUTOVOIP, agentAutoVoIPEntry=agentAutoVoIPEntry, Sixteenths=Sixteenths, agentAutoVoIPIntfIndex=agentAutoVoIPIntfIndex, agentAutoVoIPMode=agentAutoVoIPMode, PercentByFives=PercentByFives, PYSNMP_MODULE_ID=fastPathQOSAUTOVOIP, agentAutoVoIPCosQueue=agentAutoVoIPCosQueue, agentAutoVoIPTable=agentAutoVoIPTable, agentAutoVoIPCfgGroup=agentAutoVoIPCfgGroup)
|
{
"midi_fname": "sample_music/effrhy_131.mid",
"video_fname": "tests/test_out/start_size.mp4",
"note_start_height": 0.0,
# "note_end_height": 0.0,
}
|
par = list()
impar = list()
num = list()
for i in range(1, 8):
num.append(int(input(f'Digite o {i} numero: ')))
for a in num:
if a % 2 == 0:
par.append(a)
else:
impar.append(a)
print(num)
print(par)
print(impar) |
class Solution:
def scheduleCourse(self, courses: List[List[int]]) -> int:
time = 0
maxHeap = []
for duration, lastDay in sorted(courses, key=lambda x: x[1]):
heapq.heappush(maxHeap, -duration)
time += duration
# if current course could not be taken, check if it's able to swap with a
# previously taken course with larger duration, to increase the time
# available to take upcoming courses
if time > lastDay:
time += heapq.heappop(maxHeap)
return len(maxHeap)
|
# AUTOGENERATED BY NBDEV! DO NOT EDIT!
__all__ = ["index", "modules", "custom_doc_links", "git_url"]
index = {"get_data": "01_core.ipynb",
"Dataset": "01_core.ipynb",
"DataBunch": "01_core.ipynb",
"Learner": "01_core.ipynb",
"get_dls": "01_core.ipynb",
"get_model": "01_core.ipynb",
"get_learner": "01_core.ipynb",
"accuracy": "01_core.ipynb",
"camel2snake": "01_core.ipynb",
"Callback": "01_core.ipynb",
"TrainEvalCallback": "01_core.ipynb",
"listify": "01_core.ipynb",
"Runner": "01_core.ipynb",
"AvgStats": "01_core.ipynb",
"AvgStatsCallback": "01_core.ipynb",
"Recorder": "01_core.ipynb",
"ParamScheduler": "01_core.ipynb",
"annealer": "01_core.ipynb",
"sched_lin": "01_core.ipynb",
"sched_cos": "01_core.ipynb",
"sched_no": "01_core.ipynb",
"sched_expo": "01_core.ipynb",
"cos_1cycle_anneal": "01_core.ipynb",
"torch.Tensor.ndim": "01_core.ipynb",
"combine_scheds": "01_core.ipynb"}
modules = ["l10_anneal.py"]
doc_url = "https://weilainicolas.github.io/fastai_lai_lib/"
git_url = "https://github.com/weilainicolas/fastai_lai_lib/tree/master/"
def custom_doc_links(name): return None
|
#function that allows user to filter photos based on ranking
def filterPhotos(photoAlbum, userchoice):
if userchoice:
#display photos in ascending order
return;
else:
#display photos in descending order
return;
return;
|
def test_about_should_return_200(client):
rv = client.get('/about')
assert rv.status_code == 200
assert rv.headers['Content-type'] == 'text/html; charset=utf-8'
|
#-*- coding:utf-8 -*-
title = """"""
text = """""".replace("\n", r"\n")
template = """\t*-*"title": "{}", "content": "{}"-*-"""
complete_text = template.format(title, text).replace("*-*", "{").replace("-*-", "}")
file = open("aktar.txt", "a", encoding="utf-8")
file.write(complete_text+",\n")
file.close() |
# dfs
class Solution:
def numIslands(self, grid: 'List[List[str]]') -> 'int':
if grid == []: return 0
def dfs(i, j, n, m):
if i < 0 or j < 0 or i >= n or j >= m or grid[i][j] == "0" or grid[i][j] == "-1": return
grid[i][j] = "-1"
dfs(i + 1, j, n, m)
dfs(i - 1, j, n, m)
dfs(i, j + 1, n, m)
dfs(i, j - 1, n, m)
num_islands = 0
n, m = len(grid), len(grid[0])
for i in range(n):
for j in range(m):
if grid[i][j] == "1":
dfs(i, j, n, m)
num_islands += 1
return num_islands
|
# Relative path to the database text file
DATABASE_PATH = "resources/anime.txt"
def add_anime(anime_name, episodes, current_episode_=0):
"""
Function to add a new anime to the database /resources/anime.txt
Args:
anime_name (str): Name of the anime.
episodes (int): Total episodes in the anime.
current_episode_ (int) : Current episode count. Default is 0 for new anime.
Returns:
None
"""
with open(DATABASE_PATH, "a") as file:
file.writelines(f"{anime_name}~{current_episode_}~{episodes}\n")
print(f"\nSuccess! Added {anime_name} to database\n\n")
def anime_exists(anime_name):
"""
Function to check if the anime exists in the database or not
Args:
anime_name (str): Name of the anime.
Returns:
bool: True if the anime exists else False.
"""
with open(DATABASE_PATH, "r") as file:
for line in file:
if anime_name in line.split("~")[0]:
return True
return False
def update_anime(anime_name, episode):
"""
Function to update the episodes watched till now of the particular anime.
Args:
anime_name (str): Name of the anime.
episode (int): The current episodes.
Returns:
None
"""
anime_data = [] # Necessary line
with open(DATABASE_PATH, "r") as file:
anime_data = file.readlines()
for idx, anime_ in enumerate(anime_data):
line_list = anime_.split("~")
if anime_name in line_list[0]:
anime_full_name = line_list[0]
anime_data[idx] = f"{line_list[0]}~{episode}~{line_list[2]}"
break
anime_data.sort()
with open(DATABASE_PATH, "w") as file:
file.writelines(anime_data)
print(f"\nSuccess! Updated the episode count of {anime_full_name}!\n\n")
def anime_progress(anime_name):
"""
Function to list the current progress of the anime.
We read the data from the database.
Args:
anime_name (str): Name of the anime.
Returns:
(tuple): (Full name of anime, Current episode, Total episodes)
"""
with open(DATABASE_PATH, "r") as file:
for line in file:
line_list = line.split("~")
if anime_name in line_list[0]:
return tuple([line_list[0], line_list[1], line_list[2].rstrip("\n")])
def anime_progress_all():
"""
Function to list the current progress of the all the anime in the database.
Args:
No arguments
Returns:
tuple of tuples: Tuple of Tuples in the format ((anime-name, current-episode, total-episode))
"""
all_anime_ = []
with open(DATABASE_PATH, "r") as file:
for line in file:
line_list = line.split("~")
all_anime_.append(tuple([line_list[0], line_list[1], line_list[2].rstrip("\n")]))
return tuple(all_anime_)
def main():
while True:
choice = input("""Kon'nichiwa! Hajimemashite!
What do you want to do?
1. Add new anime
2. Update progress on existing anime
3. View progress on existing anime
4. All currently watching anime
5. Exit
Enter your choice:> """)
if choice == "1":
anime = input("Enter the name of the anime:> ")
current_episode = int(input("Enter the current episode number:> "))
total_episodes = int(input("Enter the total number of episodes:> "))
if not anime_exists(anime):
add_anime(anime, total_episodes, current_episode)
else:
print("Anime already exists")
elif choice == "2":
anime = input("Enter the name of the anime:> ")
if anime_exists(anime):
current_episode = int(input("Enter the current episode number:> "))
update_anime(anime, current_episode)
else:
print("\nThe anime does not exist.\n")
elif choice == "3":
anime = input("Enter the name of the anime:> ")
if anime_exists(anime):
anime_full_name, current, total = anime_progress(anime)
print("\nProgress:")
print(f"{anime_full_name}: {current}/{total}\n")
else:
print("\nThe anime does not exist.\n")
elif choice == "4":
all_anime_ = anime_progress_all()
if all_anime == ():
print("You are watching no anime currently\n")
else:
print("\nCurrent Watch list:\n")
for anime in all_anime_:
print(f"{anime[0]}: {anime[1]}/{anime[2]}\n", end="")
print()
elif choice == "5":
break
else:
print("Invalid input\n")
if __name__ == '__main__':
print("Current Watch list:\n")
all_anime = anime_progress_all()
if all_anime == ():
print("You are watching no anime currently\n")
else:
for anime in all_anime:
print(f"{anime[0]}: {anime[1]}/{anime[2]}\n", end="")
print()
main()
|
# Definitions
EXECUTING = 'executing'
QUEUED = 'queued'
PLEDGED = 'pledged'
IGNORE = 'ignore'
class Node(object):
def __init__(self):
self.children = []
def add_child(self, node):
self.children.append(node)
def get_leaves(self, leaves=[]):
# If the node has no leaves, return the node in a list
if not self.children:
leaves.append(self)
return leaves
# Recursively get to the bottom
for child in self.children:
child.get_leaves(leaves)
return leaves
class Share(Node):
"""
Implement the share node
"""
_attributes = ('name', 'value', 'parent', 'prodsourcelabel', 'workinggroup', 'campaign', 'processingtype',
'transpath', 'vo', 'rtype', 'queue_id', 'throttled')
def __str__(self, level=0):
"""
Print the tree structure
"""
ret = "{0} name: {1}, value: {2}\n".format('\t' * level, self.name, self.value)
for child in self.children:
ret += child.__str__(level + 1)
return ret
def __repr__(self):
return self.__str__()
def __mul__(self, other):
"""
If I multiply a share object by a number, multiply the value field
"""
self.value *= other
return self.value
def __rmul__(self, other):
return self.__mul__
def __imul__(self, other):
return self.__mul__
def __init__(self, name, value, parent, prodsourcelabel, workinggroup, campaign, processingtype,
transpath, rtype, vo, queue_id, throttled):
# Create default attributes
for attr in self._attributes:
setattr(self, attr, None)
Node.__init__(self)
self.name = name
self.value = value
self.parent = parent
self.prodsourcelabel = prodsourcelabel
self.workinggroup = workinggroup
self.campaign = campaign
self.processingtype = processingtype
self.transpath = transpath
self.rtype = rtype
self.vo = vo
self.queue_id = queue_id
self.throttled = throttled
def pretty_print_hs_distribution(self, hs_distribution, level=0):
try:
executing = hs_distribution[self.name][EXECUTING]/1000.0
except:
executing = 0
try:
target = hs_distribution[self.name][PLEDGED]/1000.0
except:
target = 0
try:
queued = hs_distribution[self.name][QUEUED]/1000.0
except:
queued = 0
ret = "{0} name: {1}, values: {2:.1f}k|{3:.1f}k|{4:.1f}k\n".format('\t' * level, self.name, executing, target, queued)
for child in self.children:
ret += child.pretty_print_hs_distribution(hs_distribution, level + 1)
return ret
def normalize(self, multiplier=100, divider=100):
"""
Will run down the branch and normalize values beneath
"""
self.value *= (multiplier * 1.0 / divider)
if not self.children:
return
divider = 0
for child in self.children:
divider += child.value
multiplier = self.value
for child in self.children:
child.normalize(multiplier=multiplier, divider=divider)
return
def sort_branch_by_current_hs_distribution(self, hs_distribution):
"""
Runs down the branch in order of under-pledging. It returns a list of sorted leave shares
"""
sorted_shares = []
# If the node has no leaves, return the node in a list
if not self.children:
sorted_shares = [self]
return sorted_shares
# If the node has leaves, sort the children
children_sorted = []
for child1 in self.children:
loop_index = 0
insert_index = len(children_sorted) # insert at the end, if not deemed otherwise
# Calculate under-pledging
try:
child1_under_pledge = hs_distribution[child1.name][EXECUTING] * 1.0 / hs_distribution[child1.name][PLEDGED]
except ZeroDivisionError:
child1_under_pledge = 10**6 # Initialize to a large default number
for child2 in children_sorted:
try:
# Calculate under-pledging
child2_under_pledge = hs_distribution[child2.name][EXECUTING] * 1.0 / hs_distribution[child2.name][PLEDGED]
except ZeroDivisionError:
child2_under_pledge = 10 ** 6 # Initialize to a large default number
except KeyError:
continue # Does not exist
if child1_under_pledge < child2_under_pledge:
insert_index = loop_index
break
loop_index += 1
# Insert the child into the list
children_sorted.insert(insert_index, child1)
# Go recursively and sort the grand* children
for child in children_sorted:
sorted_shares.extend(child.sort_branch_by_current_hs_distribution(hs_distribution))
return sorted_shares
def aggregate_hs_distribution(self, hs_distribution):
"""
We have the current HS distribution values for the leaves, but want to propagate it updwards to the parents.
We will traverse the tree from top to bottom and bring up the aggregated values.
"""
executing, queued, pledged = 0, 0, 0
# If the node has no children, it's a leave and should have an entry in the hs_distribution
if not self.children:
try:
executing = hs_distribution[self.name][EXECUTING]
queued = hs_distribution[self.name][QUEUED]
pledged = hs_distribution[self.name][PLEDGED]
except KeyError:
pass
return executing, queued, pledged
# If the node has children, sum up the values of the children
executing = 0
queued = 0
pledged = 0
for child in self.children:
executing_child, queued_child, pledged_child = child.aggregate_hs_distribution(hs_distribution)
executing += executing_child
queued += queued_child
pledged += pledged_child
# Add the aggregated value to the map
hs_distribution[self.name] = {
EXECUTING: executing,
QUEUED: queued,
PLEDGED: pledged
}
# Return the aggregated values
return executing, queued, pledged
# return column names
def column_names(cls):
ret = ''
for attr in cls._attributes:
if ret != '':
ret += ','
ret += attr
return ret
column_names = classmethod(column_names) |
def tema(msg):
tam = len(msg) + 4
print('~'*tam)
print(f' {msg}')
print('~'*tam)
# programa principal
tema('Olá mundo')
tema('Controle de Números')
tema('Gerador de Cartelas') |
#
# Copyright (c) 2017 Joy Diamond. All rights reserved.
#
@gem('Topaz.Cache')
def gem():
require_gem('Gem.Cache2')
require_gem('Topaz.Core')
require_gem('Topaz.CacheSupport')
#
# Specific instances
#
eight = conjure_number('eight', 8)
five = conjure_number('five', 5)
four = conjure_number('four', 4)
nine = conjure_number('nine', 9)
one = conjure_number('one', 1)
seven = conjure_number('seven', 7)
six = conjure_number('six', 6)
three = conjure_number('three', 3)
two = conjure_number('two', 2)
zero = conjure_number('zero', 0)
red = conjure_color('red')
white = conjure_color('white')
purple = conjure_color('purple')
green = conjure_color('green')
silver = conjure_color('silver')
black = conjure_color('black')
blue = conjure_color('blue')
yellow = conjure_color('yellow')
cyan = conjure_color('cyan')
circle = conjure_shape('circle')
ellipse = conjure_shape('ellipse')
moon = conjure_shape('moon')
pentagon = conjure_shape('pentagon')
oval = conjure_shape('oval')
square = conjure_shape('square')
polygon = conjure_shape('polygon')
star = conjure_shape('star')
trapazoid = conjure_shape('trapazoid')
triangle = conjure_shape('triangle')
def test_conjure_again():
assert one is conjure_number('one', 1)
assert two is conjure_number('two', 2)
assert zero is conjure_number('zero', 0)
assert three is conjure_number('three', 3)
assert four is conjure_number('four', 4)
assert five is conjure_number('five', 5)
assert six is conjure_number('six', 6)
assert seven is conjure_number('seven', 7)
assert eight is conjure_number('eight', 8)
assert nine is conjure_number('nine', 9)
assert black is conjure_color('black')
assert blue is conjure_color('blue')
assert cyan is conjure_color('cyan')
assert green is conjure_color('green')
assert purple is conjure_color('purple')
assert red is conjure_color('red')
assert silver is conjure_color('silver')
assert white is conjure_color('white')
assert yellow is conjure_color('yellow')
assert circle is conjure_shape('circle')
assert ellipse is conjure_shape('ellipse')
assert moon is conjure_shape('moon')
assert oval is conjure_shape('oval')
assert pentagon is conjure_shape('pentagon')
assert polygon is conjure_shape('polygon')
assert square is conjure_shape('square')
assert star is conjure_shape('star')
assert trapazoid is conjure_shape('trapazoid')
assert triangle is conjure_shape('triangle')
@share
def test_conjure_single():
test_conjure_again()
line('PASSED: conjure_single')
|
"""
Author: Resul Emre AYGAN
"""
"""
Project Description: Sum of positive
You get an array of numbers, return the sum of all of the positives ones.
Example [1,-4,7,12] => 1 + 7 + 12 = 20
Note: if there is nothing to sum, the sum is default to 0.
"""
def positive_sum(arr):
return sum([i for i in arr if i > 0])
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 5 16:03:43 2018
@author: James Jiang
"""
all_lines = [line.rstrip('\n') for line in open('Data.txt')]
del all_lines[-2]
original_molecule = all_lines[-1]
capital_letters = [i for i in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ']
count_elements = 0
for letter in original_molecule:
if letter in capital_letters:
count_elements += 1
count_Rn = original_molecule.count('Rn')
count_Ar = original_molecule.count('Ar')
count_Y = original_molecule.count('Y')
print(count_elements - (count_Rn + count_Ar) - 2*count_Y - 1)
|
ENV = 'directory that defines e.g. the boxes'
WORKSPACE = 'workspace directory'
BEAD_REF = '''
bead to load data from
- either an archive file name or a bead name
'''
INPUT_NICK = (
'name of input,'
+ ' its workspace relative location is "input/%(metavar)s"')
BOX = 'Name of box to store bead'
|
a = []
assert a[:] == []
assert a[:2**100] == []
assert a[-2**100:] == []
assert a[::2**100] == []
assert a[10:20] == []
assert a[-20:-10] == []
b = [1, 2]
assert b[:] == [1, 2]
assert b[:2**100] == [1, 2]
assert b[-2**100:] == [1, 2]
assert b[2**100:] == []
assert b[::2**100] == [1]
assert b[-10:1] == [1]
assert b[0:0] == []
assert b[1:0] == []
try:
_ = b[::0]
except ValueError:
pass
else:
assert False, "Zero step slice should raise ValueError"
assert b[::-1] == [2, 1]
assert b[1::-1] == [2, 1]
assert b[0::-1] == [1]
assert b[0:-5:-1] == [1]
assert b[:0:-1] == [2]
assert b[5:0:-1] == [2]
c = list(range(10))
assert c[9:6:-3] == [9]
assert c[9::-3] == [9, 6, 3, 0]
assert c[9::-4] == [9, 5, 1]
assert c[8::-2**100] == [8]
assert c[7:7:-2] == []
assert c[7:8:-2] == []
d = "123456"
assert d[3::-1] == "4321"
assert d[4::-3] == "52"
slice_a = slice(5)
assert slice_a.start is None
assert slice_a.stop == 5
assert slice_a.step is None
slice_b = slice(1, 5)
assert slice_b.start == 1
assert slice_b.stop == 5
assert slice_b.step is None
slice_c = slice(1, 5, 2)
assert slice_c.start == 1
assert slice_c.stop == 5
assert slice_c.step == 2
class SubScript(object):
def __getitem__(self, item):
assert type(item) == slice
def __setitem__(self, key, value):
assert type(key) == slice
ss = SubScript()
_ = ss[:]
ss[:1] = 1
|
class Class:
__students_count = 22
def __init__(self, name):
self.name = name
self.students = []
self.grades = []
def add_student(self, name, grade):
if self.__students_count != 0:
self.students.append(name)
self.grades.append(float(grade))
self.__students_count -= 1
def get_average_grade(self):
return sum(self.grades) / len(self.grades)
def __repr__(self):
return f'The students in {self.name}: {", ".join(self.students)}. Average grade: {self.get_average_grade():.2f}'
a_class = Class("11B")
a_class.add_student("Peter", 4.80)
a_class.add_student("George", 6.00)
a_class.add_student("Amy", 3.50)
print(a_class) |
#!/usr/bin/env python3
#p3_180824_2354.py
# Search synonym #2
# Format:
# Number_of_lines
# Key (space) Value
# Key_word
def main():
num = input()
myDictionary = getDictionary(num)
word = input()
#wordToSearch = getKeySearchWord(myDictionary)
checkDictionary(myDictionary, word)
# Fill dictionary with input lines
def getDictionary(numberOfLines):
myDictionary = dict()
listTemp = list()
for i in range(int(numberOfLines)):
listTemp.append(input().split(' '))
myDictionary = dict(listTemp)
return myDictionary
# Get the word we're looking for
# Not used
def getKeySearchWord(synonymDict):
keyList = list(synonymDict.keys())
keyIndex = int(keyList[0])+1
keyWord = keyList[keyIndex]
return keyWord
# Get a synonym for key-word
def checkDictionary(synonymDict, keyWord):
for wordsN1, wordsN2 in synonymDict.items():
if wordsN1 == keyWord:
synonymDict.update({keyWord: wordsN2})
break
if wordsN2 == keyWord:
synonymDict.update({keyWord: wordsN1})
break
print(synonymDict[keyWord])
if __name__ == '__main__':
main()
|
__title__ = 'cli_command_parser'
__description__ = 'CLI Command Parser'
__url__ = 'https://github.com/dskrypa/cli_command_parser'
__version__ = '2022.06.06'
__author__ = 'Doug Skrypa'
__author_email__ = 'dskrypa@gmail.com'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2022 Doug Skrypa'
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Evgenii sopov <mrseakg@gmail.com>
# pylint: disable=relative-beyond-top-level,missing-function-docstring
"""cpplint list of errors"""
def error_line_too_long(parsed_line):
print("error(00001): Line too long {}:{}".format(
parsed_line.get_filename(),
parsed_line.get_number_of_line()
)
)
def error_whitespace_after_equal(parsed_line):
print("error(00002): Expected whitespace after equal {}:{}".format(
parsed_line.get_filename(),
parsed_line.get_number_of_line()
)
)
def error_whitespace_before_equal(parsed_line):
print("error(00003): Expecetd whitespace before equal {}:{}".format(
parsed_line.get_filename(),
parsed_line.get_number_of_line()
)
)
def error_whitespace_comment(parsed_line):
print("error(00004): Expected whitespace after '//' {}:{}".format(
parsed_line.get_filename(),
parsed_line.get_number_of_line()
)
)
def error_whitespace_end_of_line(parsed_line):
print("error(00005): Excess whitespaces on end of line {}:{}".format(
parsed_line.get_filename(),
parsed_line.get_number_of_line()
)
)
def error_conf_unsupported_param(pc_name, _cnf_file, count):
ret = """error(00006): Unsupported parameter {}
in line {}:{}
""".format(pc_name, _cnf_file, count)
return ret
def error_conf_alredy_defined(checker_id, _cnf_name):
ret = """error(00007): Configuration name '{}' already defined
found in {}
""".format(_cnf_name, checker_id)
return ret
def error_conf_expected_eq(line_content, _cnf_file, count):
ret = """error(00008): Expected '=' in line
line_content = {}
in line {}:{}
""".format(line_content, _cnf_file, count)
return ret
def error_conf_in_regexp(pc_name, pc_value, err, _cnf_file, count):
ret = """error(00009): Problem with regexp
name = {}
value = {}
error = {}
in line {}:{}
""".format(pc_name, pc_value, str(err), _cnf_file, count)
return ret
def error_conf_file_not_exists(_cnf_file):
ret = """error(00010): File did not exists
name = {}
""".format(_cnf_file)
return ret
def error_could_not_parse_line(_line, _file, _number_of_line):
ret = """error(00011): Could not parse line
line content = {}
in line = {}:{}
""".format(_line, _file, _number_of_line)
return ret
def error_not_parse_line_character(_char, _line, _file, _number_of_line):
ret = """error(00012): Could not parse char '{}'
line content = {}
in line = {}:{}
""".format(_char, _line, _file, _number_of_line)
return ret
|
# Based and improved from https://github.com/piratecrew/rez-python
name = "python"
version = "2.7.16"
authors = [
"Guido van Rossum"
]
description = \
"""
The Python programming language.
"""
requires = [
"cmake-3+",
"gcc-6+"
]
variants = [
["platform-linux"]
]
tools = [
"2to3",
"idle",
"pip",
"pip2.7",
"pip2",
"pydoc",
"python-config",
"python",
"python2-config",
"python2.7-config",
"python2.7",
"python2",
"smtpd.py"
]
build_system = "cmake"
with scope("config") as config:
config.build_thread_count = "logical_cores"
uuid = "python-{version}".format(version=str(version))
def commands():
env.PATH.prepend("{root}/bin")
env.LD_LIBRARY_PATH.prepend("{root}/lib")
env.PKG_CONFIG_PATH.prepend("{root}/lib/pkgconfig")
# Helper environment variables.
env.PYTHON_BINARY_PATH.set("{root}/bin")
env.PYTHON_INCLUDE_PATH.set("{root}/include")
env.PYTHON_LIBRARY_PATH.set("{root}/lib")
|
def sda_to_rgb(im_sda, I_0):
"""Transform input SDA image or matrix `im_sda` into RGB space. This
is the inverse of `rgb_to_sda` with respect to the first parameter
Parameters
----------
im_sda : array_like
Image (MxNx3) or matrix (3xN) of pixels
I_0 : float or array_like
Background intensity, either per-channel or for all channels
Note
----
For compatibility purposes, passing I_0=None invokes the behavior of
od_to_rgb.
See Also
--------
histomicstk.preprocessing.color_conversion.rgb_to_sda,
histomicstk.preprocessing.color_conversion.od_to_rgb,
histomicstk.preprocessing.color_deconvolution.color_deconvolution,
histomicstk.preprocessing.color_deconvolution.color_convolution
"""
is_matrix = im_sda.ndim == 2
if is_matrix:
im_sda = im_sda.T
od = I_0 is None
if od: # od_to_rgb compatibility
I_0 = 256
im_rgb = I_0 ** (1 - im_sda / 255.)
return (im_rgb.T if is_matrix else im_rgb) - od
|
altitude=int(input("enter the current altitude : "))
if altitude<=1000:
print(" SAFE to land")
elif altitude>1000 and altitude<=5000:
print("bring it down to 1000ft")
else:
print("turn around") |
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def hasCycle(self, head: Optional[ListNode]) -> bool:
cs = head
cf = head
if head == None:
return False
if cf.next == None:
return False
else:
cf = cf.next
while cf.next != None and cf != cs:
cs = cs.next
cf = cf.next
if cf.next != None:
cf = cf.next
else:
return False
if cf == cs:
return True
return False |
# ***************************************
# ******** Manual Configurations ********
# ***************************************
# ----------------------------------------------------------------
# -------- Part 1, meta-network generation settings ----------
# ----------------------------------------------------------------
# 1. What is the path to the folder containing the BLAST result files ? Please end all folders with a single slash ('/').
blast_results_path = 'C:/Users/Tokuriki Lab/Documents/Sync/Projects/MBL SSN 2020/mbl_cdhit50_all_by_all/'
# 2. Which files in the input folder should be used for this analysis ? Setting None will use all files in the input folder.
# blast_results_files = ['b1_all_by_all.txt','b2_vs_b1_all_blast.txt','b2_vs_b1_all_as_subject.txt']
blast_results_files = None
# 3. Which columns (1st column is 0, 2nd is 1, etc) in the blast files contain the following information ?
# Please ensure this is true for all files given as input.
query_col = 0
subject_col = 1
bitscore_col = 3
# Obsolete : clustering thresholds now provided directly through command line
# 4. At what bitscore threshold should sequences be grouped together into a metanode ?
# metanode_bitscore_range = range(100,310,10)
# 5. At what bitscore threshold should edge connections be ignored for the network ?
# fixed_min_bitscore = None
# bitscore_gap = 10
# 6. Enter the path to the file used to limit the sequences used in the analysis ? Only sequence IDs present in the file will be used in the analysis.
# If set to None, ther filter is ignored.
# The file should be formatted to have a single sequence ID on each line
filter_path = 'input/filters/'
filter_files = []
# filter_files = ['b1_subgroup_all_reps.txt']
# 7. What should the output be named? The label given should only describe the input data, as the bitscore cut-offs and other information will be added as suffixes automatically.
output_label = 'mbl_full_cdhit50'
# output_label = 'b1_subgroup'
# 8. Where should the output be placed ? This will create a directory relative to the scripts running location.
# output_dir = 'result_tiered/'
# output_dir = 'cluster_extraction/'
output_dir = 'metaSSN_v4_test/'
# use_old_metanodes = True # use old if found
# 9. Out put member names can be remapped, supply a mapping file with the old name in the first column and the new name in the second
# names not found in the file will be left as is.
mapping_path = 'C:/Users/Tokuriki Lab/Documents/Sync/Projects/MBL SSN 2020/sequences/'
member_name_mapping = []
# ----------------------------------------------------------------
# ------- Part 2, resolving sequence clustering, OPTIONAL --------
# ----------------------------------------------------------------
# 1. Should member sequences in the metanodes be analysed for cluster members ? Only applicable if each member sequence is the representative of a cluster of sequences,
# and should be set to False otherwise.
# analyze_clustering = True
# use_old_clustering = True
# 2. What is the path to the folder containing clustering information ?
cluster_info_path = 'C:/Users/Tokuriki Lab/Documents/Sync/Projects/MBL SSN 2020/sequences/'
# 3. Describe each file in the input folder using the following Dictionary.
# Each top level key should be the name of the file, and the value should be a dictionary specifying if the file is 1) a 'cdhit' cluster or a 'table',
# 2) a label to give data extracted from this clustering, 3) the table delimiter, 4) the key column of the table for matching sequence IDs,
# 5) the column in the table that contains member information, and 6) the delimiter that separates cluster members in the table's string.
# Properties 3-6 are for tables only and can be set to None for cdhit files. Properties 4-5 should be given with 0 as the 1st column, 1 as the 2nd, etc.
# For heirarchical clustering of cdhit files, only the lowest identity file needs to be assigned, the remainder should be provided below.
# label currently unused, doesn't make sense, since clustering may not necessarily mean distinct datasets
cluster_file_info = {
'mbl_cdhit50.clstr': {'format':'cdhit', 'delim':None, 'key_col':None, 'member_col':None, 'member_delim':None}
# 'B1_230_to_300_cdhit.clstr': {'format':'cdhit', 'label':'metagenome', 'delim':None, 'key_col':None, 'member_col':None, 'member_delim':None},
# 'CARD_b1_cdhit70.clstr': {'format':'cdhit', 'label':'B1', 'delim':None, 'key_col':None, 'member_col':None, 'member_delim':None},
# 'CARD_b2_cdhit70.clstr': {'format':'cdhit', 'label':'B2', 'delim':None, 'key_col':None, 'member_col':None, 'member_delim':None},
# 'uniref50_IPR001279.tab': {'format':'table', 'label':'uniprot', 'delim':'\t', 'key_col':0, 'member_col':4, 'member_delim':';'}
}
# 4. For cdhit based clustering, describe heirarchical clustering, if any. Use a dictionary where the key is the clustering with the lowest identity, and
# the value is a list of the other files in the heirarchy in ascending order of identity cut-off.
# If not, give None as the input.
# example
# cdhit_heirarchy = { '40%.clstr': ['70%.clstr','90%.clstr'] }
cdhit_heirarchy = {'mbl_cdhit50.clstr': ['mbl_cdhit70.clstr','mbl_cdhit90.clstr'] }
# ----------------------------------------------------------------
# ------- Part 3, extra annotation, OPTIONAL --------
# ----------------------------------------------------------------
# 1. Would you like to add extra annotations ?
# annot_metanodes = True
# 2. Provide the path to the folder containing the annotation files.
annot_path = 'input/annot/'
# 3. Provide a list of files to annotate by membership count. Each entry should be a dictionary
# that specifies the filename, the annotation label and the level at which the annotations should be applied (metanode members vs all sequences in metanode)
# Each file should be a series of sequence IDs separated by new lines. The IDs should be non-redundant.
# NOTE: the same file may not work for both 'member' and 'sequence' level extractions, since representative members may have different names than at the individual sequence level
# such as UniRef vs UniProt IDs
membership_count_annot = [
{'files':'b1_refs.txt','id_col':0,'data_col':None,'delim':'\t','label':'CARD B1','level':'sequence'},
{'files':'b2_refs.txt','id_col':0,'data_col':None,'delim':'\t','label':'CARD B2','level':'sequence'},
{'files':'b3_refs.txt','id_col':0,'data_col':None,'delim':'\t','label':'CARD B3','level':'sequence'},
{'files':'mbl_swissprot_acc.txt','id_col':0,'data_col':None,'delim':'\t','label':'SwissProt','level':'sequence'},
{'files':'mbl_uniprot_acc.txt','id_col':0,'data_col':None,'delim':'\t','label':'UniProt','level':'sequence'},
{'files':'jgi_headers.txt','id_col':0,'data_col':None,'delim':'\t','label':'JGI','level':'sequence'}
# {'files':'b1_refs.txt','id_col':0,'data_col':None,'delim':'\t','label':'known B1','level':'member'},
# {'files':'b1_refs.txt','id_col':0,'data_col':None,'delim':'\t','label':'known B1','level':'sequence'},
# {'files':'ipr001279_acc.txt','id_col':0,'data_col':None,'delim':'\t','label':'UniProt','level':'sequence'},
# {'files':'JGI_b1_230-300aa_headers.txt','id_col':0,'data_col':None,'delim':'\t','label':'JGI','level':'sequence'}
]
# 4. Provide a list of files to annotate by frequency of certain traits (genus, kingdom, etc). Each entry should be a dictionary
# that specifies the file name, the column of the sequence id, the column of the data, the file delimiter, the annotation label and the level at which the annotations
# should be applied (metanode members vs all sequences in metanode)
# Each file should be a tab separated file with the sequence ID in the first column, and the trait in the second column.
membership_freq_annot = [
# {'files':'b1_active_sites.txt','id_col':0,'data_col':1,'delim':'\t','label':'active_site','level':'member','highest_entries':3},
# {'files':'b1_active_sites.txt','id_col':0,'data_col':1,'delim':'\t','label':'active_site','level':'member','highest_entries':3},
{'files':'ipr001279_org_annot_filled.txt','id_col':0,'data_col':2,'delim':'\t','label':'genus','level':'sequence','highest_entries':5},
{'files':['B1_activesite.txt'],'id_col':0,'data_col':1,'delim':'\t','label':'B1_activesite','level':'sequence','highest_entries':3},
{'files':['B2_activesite.txt'],'id_col':0,'data_col':1,'delim':'\t','label':'B2_activesite','level':'sequence','highest_entries':3},
{'files':['B3_activesite.txt'],'id_col':0,'data_col':1,'delim':'\t','label':'B3_activesite','level':'sequence','highest_entries':3},
{'files':['B1_200-350aa_activesite.txt'],'id_col':0,'data_col':1,'delim':'\t','label':'B1_200-350aa_activesite','level':'sequence','highest_entries':3},
{'files':['B2_200-350aa_activesite.txt'],'id_col':0,'data_col':1,'delim':'\t','label':'B2_200-350aa_activesite','level':'sequence','highest_entries':3},
{'files':['B3_200-350aa_activesite.txt'],'id_col':0,'data_col':1,'delim':'\t','label':'B3_200-350aa_activesite','level':'sequence','highest_entries':3},
{'files':['B1_MSA_inclusion.txt'],'id_col':0,'data_col':1,'delim':'\t','label':'B1_200-350aa_MSA_inclusion','level':'sequence','highest_entries':3},
{'files':['B2_MSA_inclusion.txt'],'id_col':0,'data_col':1,'delim':'\t','label':'B2_200-350aa_MSA_inclusion','level':'sequence','highest_entries':3},
{'files':['B3_MSA_inclusion.txt'],'id_col':0,'data_col':1,'delim':'\t','label':'B3_200-350aa_MSA_inclusion','level':'sequence','highest_entries':3}
]
# 5. Unique labels
membership_label = [
{'files':'b1_name.txt','id_col':1,'data_col':2 ,'delim':'\t', 'label':'known B1 families', 'level':'sequence'},
{'files':'b2_name.txt','id_col':1,'data_col':2 ,'delim':'\t', 'label':'known B2 families', 'level':'sequence'},
{'files':'b3_name.txt','id_col':1,'data_col':2 ,'delim':'\t', 'label':'known B3 families', 'level':'sequence'},
# {'files':'b1_family_label.txt','id_col':0,'data_col':1 ,'delim':'\t', 'label':'known B1 families', 'level':'member'}
]
# 6. Numerical distribution
membership_dist_annot = [
# {'files':['ipr001279_length.txt', 'JGI_b1_230-300aa_headers+length.txt'],'id_col':0,'data_col':1,'delim':'\t','label':'length_dist','level':'sequence'}
{'files':'mbl_lengths.txt','id_col':0,'data_col':1,'delim':'\t','label':'length_dist','level':'sequence'}
]
# 7. link nodes at higher cut-offs to those observed at lower ones. Make sure the file is a lower cut-off than the currently set metanode bitscores, or else the results
# will not make any sense. It also doesn't need to be from the same set of analysis, since node numbering is arbitrary so long as the clustering is correct.
# level should always be member.
membership_lower_node = [
# {'files':'b1_ssn_50min_115clust_meta-node_members.txt','id_col':1,'data_col':0 ,'delim':'\t', 'label':'115clust node', 'level':'member', 'bitscore':115}
] |
sbox0 = [
0X3e, 0x72, 0x5b, 0x47, 0xca, 0xe0, 0x00, 0x33, 0x04, 0xd1, 0x54, 0x98, 0x09, 0xb9, 0x6d, 0xcb,
0X7b, 0x1b, 0xf9, 0x32, 0xaf, 0x9d, 0x6a, 0xa5, 0xb8, 0x2d, 0xfc, 0x1d, 0x08, 0x53, 0x03, 0x90,
0X4d, 0x4e, 0x84, 0x99, 0xe4, 0xce, 0xd9, 0x91, 0xdd, 0xb6, 0x85, 0x48, 0x8b, 0x29, 0x6e, 0xac,
0Xcd, 0xc1, 0xf8, 0x1e, 0x73, 0x43, 0x69, 0xc6, 0xb5, 0xbd, 0xfd, 0x39, 0x63, 0x20, 0xd4, 0x38,
0X76, 0x7d, 0xb2, 0xa7, 0xcf, 0xed, 0x57, 0xc5, 0xf3, 0x2c, 0xbb, 0x14, 0x21, 0x06, 0x55, 0x9b,
0Xe3, 0xef, 0x5e, 0x31, 0x4f, 0x7f, 0x5a, 0xa4, 0x0d, 0x82, 0x51, 0x49, 0x5f, 0xba, 0x58, 0x1c,
0X4a, 0x16, 0xd5, 0x17, 0xa8, 0x92, 0x24, 0x1f, 0x8c, 0xff, 0xd8, 0xae, 0x2e, 0x01, 0xd3, 0xad,
0X3b, 0x4b, 0xda, 0x46, 0xeb, 0xc9, 0xde, 0x9a, 0x8f, 0x87, 0xd7, 0x3a, 0x80, 0x6f, 0x2f, 0xc8,
0Xb1, 0xb4, 0x37, 0xf7, 0x0a, 0x22, 0x13, 0x28, 0x7c, 0xcc, 0x3c, 0x89, 0xc7, 0xc3, 0x96, 0x56,
0X07, 0xbf, 0x7e, 0xf0, 0x0b, 0x2b, 0x97, 0x52, 0x35, 0x41, 0x79, 0x61, 0xa6, 0x4c, 0x10, 0xfe,
0Xbc, 0x26, 0x95, 0x88, 0x8a, 0xb0, 0xa3, 0xfb, 0xc0, 0x18, 0x94, 0xf2, 0xe1, 0xe5, 0xe9, 0x5d,
0Xd0, 0xdc, 0x11, 0x66, 0x64, 0x5c, 0xec, 0x59, 0x42, 0x75, 0x12, 0xf5, 0x74, 0x9c, 0xaa, 0x23,
0X0e, 0x86, 0xab, 0xbe, 0x2a, 0x02, 0xe7, 0x67, 0xe6, 0x44, 0xa2, 0x6c, 0xc2, 0x93, 0x9f, 0xf1,
0Xf6, 0xfa, 0x36, 0xd2, 0x50, 0x68, 0x9e, 0x62, 0x71, 0x15, 0x3d, 0xd6, 0x40, 0xc4, 0xe2, 0x0f,
0X8e, 0x83, 0x77, 0x6b, 0x25, 0x05, 0x3f, 0x0c, 0x30, 0xea, 0x70, 0xb7, 0xa1, 0xe8, 0xa9, 0x65,
0X8d, 0x27, 0x1a, 0xdb, 0x81, 0xb3, 0xa0, 0xf4, 0x45, 0x7a, 0x19, 0xdf, 0xee, 0x78, 0x34, 0x60
]
sbox1 = [
0x55, 0xc2, 0x63, 0x71, 0x3b, 0xc8, 0x47, 0x86, 0x9f, 0x3c, 0xda, 0x5b, 0x29, 0xaa, 0xfd, 0x77,
0x8c, 0xc5, 0x94, 0x0c, 0xa6, 0x1a, 0x13, 0x00, 0xe3, 0xa8, 0x16, 0x72, 0x40, 0xf9, 0xf8, 0x42,
0x44, 0x26, 0x68, 0x96, 0x81, 0xd9, 0x45, 0x3e, 0x10, 0x76, 0xc6, 0xa7, 0x8b, 0x39, 0x43, 0xe1,
0x3a, 0xb5, 0x56, 0x2a, 0xc0, 0x6d, 0xb3, 0x05, 0x22, 0x66, 0xbf, 0xdc, 0x0b, 0xfa, 0x62, 0x48,
0xdd, 0x20, 0x11, 0x06, 0x36, 0xc9, 0xc1, 0xcf, 0xf6, 0x27, 0x52, 0xbb, 0x69, 0xf5, 0xd4, 0x87,
0x7f, 0x84, 0x4c, 0xd2, 0x9c, 0x57, 0xa4, 0xbc, 0x4f, 0x9a, 0xdf, 0xfe, 0xd6, 0x8d, 0x7a, 0xeb,
0x2b, 0x53, 0xd8, 0x5c, 0xa1, 0x14, 0x17, 0xfb, 0x23, 0xd5, 0x7d, 0x30, 0x67, 0x73, 0x08, 0x09,
0xee, 0xb7, 0x70, 0x3f, 0x61, 0xb2, 0x19, 0x8e, 0x4e, 0xe5, 0x4b, 0x93, 0x8f, 0x5d, 0xdb, 0xa9,
0xad, 0xf1, 0xae, 0x2e, 0xcb, 0x0d, 0xfc, 0xf4, 0x2d, 0x46, 0x6e, 0x1d, 0x97, 0xe8, 0xd1, 0xe9,
0x4d, 0x37, 0xa5, 0x75, 0x5e, 0x83, 0x9e, 0xab, 0x82, 0x9d, 0xb9, 0x1c, 0xe0, 0xcd, 0x49, 0x89,
0x01, 0xb6, 0xbd, 0x58, 0x24, 0xa2, 0x5f, 0x38, 0x78, 0x99, 0x15, 0x90, 0x50, 0xb8, 0x95, 0xe4,
0xd0, 0x91, 0xc7, 0xce, 0xed, 0x0f, 0xb4, 0x6f, 0xa0, 0xcc, 0xf0, 0x02, 0x4a, 0x79, 0xc3, 0xde,
0xa3, 0xef, 0xea, 0x51, 0xe6, 0x6b, 0x18, 0xec, 0x1b, 0x2c, 0x80, 0xf7, 0x74, 0xe7, 0xff, 0x21,
0x5a, 0x6a, 0x54, 0x1e, 0x41, 0x31, 0x92, 0x35, 0xc4, 0x33, 0x07, 0x0a, 0xba, 0x7e, 0x0e, 0x34,
0x88, 0xb1, 0x98, 0x7c, 0xf3, 0x3d, 0x60, 0x6c, 0x7b, 0xca, 0xd3, 0x1f, 0x32, 0x65, 0x04, 0x28,
0x64, 0xbe, 0x85, 0x9b, 0x2f, 0x59, 0x8a, 0xd7, 0xb0, 0x25, 0xac, 0xaf, 0x12, 0x03, 0xe2, 0xf2
]
D = [
0x44d7, 0x26bc, 0x626b, 0x135e, 0x5789, 0x35e2, 0x7135, 0x09af,
0x4d78, 0x2f13, 0x6bc4, 0x1af1, 0x5e26, 0x3c4d, 0x789a, 0x47ac
]
def LFSRCalcS(s):
# return (s[15] << 15 + s[13] << 17 + s[10] << 21 + s[4] << 20 + s[0] << 8 + s[0]) % 0x7fffffff
return (left_rot(s[15], 15, 31) + left_rot(s[13], 17, 31) + left_rot(s[10], 21, 31) + left_rot(s[4], 20, 31) + left_rot(s[0], 8, 31) + s[0]) % 0x7fffffff
def LFSRWithInitMode(s, u):
s.append(0x7fffffff if s[0] == 0 else ((LFSRCalcS(s) + u) % 0x7fffffff))
s.pop(0)
def LFSRWithWorkMode(s):
s.append(0x7fffffff if s[0] == 0 else LFSRCalcS(s))
s.pop(0)
def BitReorganization(s):
X = [0, 0, 0, 0]
X[0] = ((s[15] & 0x7fff8000) << 1) | (s[14] & 0xffff)
X[1] = ((s[11] & 0xffff) << 16) | (s[9] >> 15)
X[2] = ((s[7] & 0xffff) << 16) | (s[5] >> 15)
X[3] = ((s[2] & 0xffff) << 16) | (s[0] >> 15)
return X
def left_rot(x, n, x_len):
n %= x_len
return ((x << n) | (x >> (x_len - n))) & ((1 << x_len) - 1)
def left_rot1(x):
return x ^ left_rot(x, 2, 32) ^ left_rot(x, 10, 32) ^ left_rot(x, 18, 32) ^ left_rot(x, 24, 32)
def left_rot2(x):
return x ^ left_rot(x, 8, 32) ^ left_rot(x, 14, 32) ^ left_rot(x, 22, 32) ^ left_rot(x, 30, 32)
def SBOX(x):
return (sbox0[x >> 24] << 24) | (sbox1[(x >> 16) & 0xff] << 16) | (sbox0[(x >> 8) & 0xff] << 8) | (sbox1[x & 0xff])
def F(R, x0, x1, x2):
W = [0, 0, 0]
W[0] = ((x0 ^ R[0]) + R[1]) & 0xffffffff
W[1] = (R[0] + x1) & 0xffffffff
W[2] = R[1] ^ x2
R[0] = SBOX(left_rot1((((W[1] & 0xffff) << 16) | (W[2] >> 16))))
R[1] = SBOX(left_rot2((((W[2] & 0xffff) << 16) | (W[1] >> 16))))
return W[0]
def pack_key(s, k, iv):
for i in range(15, -1, -1):
s[i] = ((k & 0xff) << 23) | (D[i] << 8) | (iv & 0xff)
k >>= 8
iv >>= 8
def zuc_run(k, iv, key_len):
s = [0 for i in range(16)]
R = [0, 0]
# init mode
pack_key(s, k, iv)
for i in range(32):
X = BitReorganization(s)
W = F(R, X[0], X[1], X[2])
LFSRWithInitMode(s, W >> 1)
''' debug
for j in X:
print ('%x' % j, end = ' ')
for j in R:
print ('%x' % j, end = ' ')
print ('%x' % W, end = ' ')
print ('%x' % s[15])
'''
# work mode
X = BitReorganization(s)
F(R, X[0], X[1], X[2])
LFSRWithWorkMode(s)
Z = []
for i in range(key_len):
X = BitReorganization(s)
Z.append(F(R, X[0], X[1], X[2]) ^ X[3])
LFSRWithWorkMode(s)
return Z
if __name__ == '__main__':
a = zuc_run(0, 0, 2)
print ('%x\n%x\n' % (a[0], a[1]))
a = zuc_run(0xffffffffffffffffffffffffffffffff, 0xffffffffffffffffffffffffffffffff, 2)
print ('%x\n%x\n' % (a[0], a[1]))
a = zuc_run(0x3d4c4be96a82fdaeb58f641db17b455b, 0x84319aa8de6915ca1f6bda6bfbd8c766, 2)
print ('%x\n%x\n' % (a[0], a[1]))
a = zuc_run(0x338985fedc98cb99c9f9b346bd6cae56, 0x7dd3175c2374c7113bcbbfb339563172, 10)
for a_i in a:
print ('%x' % (a_i)) |
# Description: Raw Strings
if __name__ == '__main__':
# Escape characters are honoured
a_string = "this is\na string split\t\tand tabbed"
print(a_string)
# Raw string is printed as it is without interpreting the escape characters.
# This is used a lot in regular expressions
raw_string = r"this is\na string split\t\tand tabbed"
print(raw_string)
b_string = "this is" + chr(10) + "a string split" + chr(9) + chr(9) + "and tabbed"
print(b_string)
backslash_string = "this is a backslash \followed by some text"
print(backslash_string)
backslash_string = "this is a backslash \\followed by some text"
print(backslash_string)
error_string = r"this string ends with \\"
|
number = list()
while True:
number.append(int(input('Digite um numero: ')))
if number.count(number[-1]) == 2:
print('\033[31mNumeros duplicados não serão adicionados.\033[m')
number.pop()
decision = str(input('\033[37mDeseja continuar? \033[m')).strip().lower()
if decision[0] in 'n':
break
number.sort()
print(f'Você digitou os números: {number}')
"""
number = list()
while True:
n = int(input('Digite um numero: '))
if n not in number:
number.append(n)
else:
print('Números repetidos não serão adicionados!')
r = str(input('Quer continuar? ')).strip.lower
if r[0] in 'Nn':
break
print('-='*30)
numero.sort()
print(f'Você digitou os valores: {number}')"""
|
class BaseLangDetect:
"""
Container for what a language detection module
should take as input and should return as output
"""
def get(self, text):
"""
Uses the language detection module to detect a language
Parameters
----------
text : str
The raw text to detect the language of
Returns
-------
tuple(str, float)
The detected language and confidence of detection
"""
return 'und', 0.0 # (lang_str, confidence)
|
class Board():
def __init__(self, matrix):
self.matrix = matrix
self.cols = len(matrix)
self.rows = len(matrix[0])
self.last = None
self.matches = []
for i in range(self.rows):
self.matches.append([False] * self.cols)
def is_winner(self):
for row in self.matches:
if all(row):
return True
for j in range(self.cols):
col = [self.matches[i][j] for i in range(self.rows)]
if all(col):
return True
return False
def add_number(self, number):
for i in range(self.rows):
for j in range(self.cols):
if number == self.matrix[i][j]:
self.matches[i][j] = True
self.last = number
def score(self):
sum_unmarked = 0
for i in range(self.rows):
for j in range(self.cols):
sum_unmarked += self.matrix[i][j] * int(not self.matches[i][j])
return sum_unmarked * self.last
def __str__(self):
string = ""
for i in range(self.rows):
row_val = ""
row_mat = ""
for j in range(self.cols):
val = (" " + str(self.matrix[i][j]))[-2:]
mat = "x " if self.matches[i][j] else " "
row_val += val + " "
row_mat += mat + " "
row = row_val + " | " + row_mat
string += row + "\n"
return string |
'''
faca um programa que tenha uma funcao chamada escreva(), que receba um texto qualquer como parametro e mostre auma mensagem com tamanho adaptavel.
x.: escreva('Olá Mundo!)
saida:
-------------
Olá Mundo?
-------------
'''
def mensagem(txt):
tam = len(txt) + 4
print('-' * tam)
# print(f'{txt:^len(txt)}')
print(f' {txt}')
print('-' * tam)
t = str(input('Digite seu texto: '))
mensagem(t)
# mensagem('Dilson Mascarenhas') |
DATABASE_ENGINE = 'sqlite3'
ROOT_URLCONF = 'tests.urls'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'tests.app',
]
|
# using return in functions
def my_function(name,surname):
full_name = name.title() + " " + surname.title()
return full_name
print(my_function('selim', 'mh'))
|
class Solution:
def reverseWords(self, s: str) -> str:
q = []
raw_split = s.split(' ')
for raw_w in raw_split:
w = raw_w.strip()
if w:
q.append(w)
return ' '.join(q[::-1])
|
class RaddarException(Exception):
pass
class FailedToCloneRepoException(RaddarException):
pass
class FailedToWriteRepoException(RaddarException):
pass
|
# -*- coding: utf-8 -*-
def pagarEstacionamiento(apagar):
total = apagar
print("Total a pagar:" + str(apagar))
while apagar > 0:
moneda = int(input("Ingresa la cantidad de la moneda que quieres ingresar: "))
if moneda == 1 or moneda == 2 or moneda == 5 or moneda == 10 or moneda == 50:
apagar -= moneda
else:
print(
"Moneda con denominación inválidad, solo puedes meter monedas de 1 , 2, 5, 10, 50"
)
cambio = apagar * -1
print("Pagaste: " + str(total + cambio))
print("Haz terminado de pagar, tu cambio es: " + str(cambio))
def main():
pagarEstacionamiento(72)
if __name__ == "__main__":
main()
|
QUERIES = {}
QUERIES['1A'] = '''
SELECT count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, agerange
from vw_cpims_registration {ocbos} {oareas} {odate}
group by gender, agerange
'''
QUERIES['1B'] = '''
SELECT count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id,
CASE exit_status WHEN 'ACTIVE' THEN 'Active'
ELSE 'Exited' END AS active_status
from vw_cpims_registration {ocbos} {oareas} {odate}
group by gender, active_status
UNION
SELECT count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, 'Ever Registered' AS active_status
from vw_cpims_registration {ocbos} {oareas} {odate}
group by gender
'''
QUERIES['1C'] = '''
SELECT count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, schoollevel as school_level
from vw_cpims_registration where exit_status='ACTIVE' {cbos} {areas} {fdate}
group by gender, schoollevel
'''
QUERIES['1D'] = '''
SELECT count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, 'Active' as services
from vw_cpims_registration where exit_status='ACTIVE' {cbos} {areas} {fdate}
group by gender
UNION
SELECT count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, 'Has Birth Certificate' as services
from vw_cpims_registration where exit_status='ACTIVE'
and birthcert = 'HAS BIRTHCERT' {cbos} {areas} {fdate}
group by gender
UNION
SELECT count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, 'Has Disability' as services
from vw_cpims_registration where exit_status='ACTIVE'
and ovcdisability = 'HAS DISABILITY' {cbos} {areas} {fdate}
group by gender
UNION
SELECT count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, 'School Going' as services
from vw_cpims_registration where exit_status='ACTIVE'
and schoollevel != 'Not in School' {cbos} {areas} {fdate}
group by gender
'''
QUERIES['1E'] = '''
Select count(distinct(cpims_ovc_id)) AS dcount,
gender as sex_id,
CASE
when ovchivstatus='POSITIVE' THEN 'HIV Status +Ve'
when ovchivstatus='NEGATIVE' THEN 'HIV Status -Ve'
when ovchivstatus='NOT KNOWN' THEN 'HIV Status Unknown'
when ovchivstatus='HIV Test Not Required' THEN 'HIV Test not Required'
when ovchivstatus='HIV Referred For Testing' THEN 'HIV Referred For Testing'
ELSE 'Others' END AS hivstat
from vw_cpims_registration where exit_status='ACTIVE' {cbos} {areas} {fdate}
group by gender, ovchivstatus
'''
QUERIES['1F'] = '''
SELECT count(cpims_ovc_id) as dcount,
gender as sex_id, eligibility
from vw_cpims_registration {ocbos} {oareas} {odate}
group by gender, eligibility
'''
QUERIES['1G'] = '''
SELECT count(cpims_ovc_id) as dcount,
gender as sex_id, exit_reason
from vw_cpims_registration where exit_status = 'EXITED' {cbos} {areas} {fdate}
group by gender, exit_reason order by count(cpims_ovc_id) desc
'''
QUERIES['1H'] = '''
SELECT count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, 'OVC Registration' as services
from vw_cpims_registration where exit_status = 'ACTIVE' {cbos} {areas} {dates}
group by gender
UNION
SELECT count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, 'OVC Exit' as services
from vw_cpims_registration where exit_status = 'EXITED' {cbos} {areas} {dates}
group by gender
'''
QUERIES['2A'] = '''
SELECT count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, 'Active' as hivstat
from vw_cpims_registration where exit_status='ACTIVE' {cbos} {fdate}
group by gender
UNION
Select count(distinct(cpims_ovc_id)) AS dcount,
gender as sex_id, 'Positive' as hivstat
from vw_cpims_registration where exit_status='ACTIVE'
and ovchivstatus='POSITIVE' {cbos} {fdate} group by gender
UNION
Select count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, 'On ART' as hivstat
from vw_cpims_registration where exit_status='ACTIVE'
and ovchivstatus='POSITIVE' AND artstatus='ART' {cbos} {fdate}
group by gender
UNION
Select count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, 'VL Accessed' as hivstat
from vw_cpims_viral_load {ocbos}
group by gender
UNION
Select count(distinct(v.cpims_ovc_id)) as dcount,
v.gender as sex_id, 'Current VL' as hivstat
from vw_cpims_viral_load v
inner join (
select cpims_ovc_id, max(date_of_event) as most_current_vl_date
from vw_cpims_viral_load
group by cpims_ovc_id ) vl on v.cpims_ovc_id = vl.cpims_ovc_id
and v.date_of_event=vl.most_current_vl_date
where current_date - vl.most_current_vl_date <= 400 {vcbos}
group by v.gender
UNION
Select count(distinct(v.cpims_ovc_id)) AS dcount,
v.gender as sex_id, 'Suppressed' as hivstat
from vw_cpims_viral_load v
inner join (
select cpims_ovc_id, max(date_of_event) as most_current_vl_date
from vw_cpims_viral_load
group by cpims_ovc_id ) vl on v.cpims_ovc_id = vl.cpims_ovc_id
and v.date_of_event=vl.most_current_vl_date
where current_date - vl.most_current_vl_date <= 400
and v.viral_load < 1001 {vcbos} group by v.gender
UNION
Select count(distinct(v.cpims_ovc_id)) AS dcount,
v.gender as sex_id, 'Not Suppressed' as hivstat
from vw_cpims_viral_load v
inner join (
select cpims_ovc_id, max(date_of_event) as most_current_vl_date
from vw_cpims_viral_load
group by cpims_ovc_id ) vl on v.cpims_ovc_id=vl.cpims_ovc_id
and v.date_of_event=vl.most_current_vl_date
where current_date - vl.most_current_vl_date <=400
and v.viral_load > 1000 {vcbos}
group by v.gender
'''
QUERIES['2B'] = '''
Select count(distinct(cpims_ovc_id)) as dcount, gender as sex_id, agerange
from vw_cpims_viral_load where (current_date - date_of_event) < 401
and viral_load > 10000 {cbos} group by gender, agerange
'''
QUERIES['2C'] = '''
select sum(x.cnt) as dcount, x.gender as sex_id,
'OVC_SERV' as hivstat from
(
Select count(distinct(cpims_ovc_id)) as cnt,
gender from vw_cpims_active_beneficiary {ocbos}
group by gender
UNION ALL
Select count(distinct(cpims_ovc_id)), gender
from vw_cpims_benchmark_achieved where (current_date - date_of_event) <= 400
AND cpara_score = 17 {cbos} group by gender
) x group by x.gender
UNION
Select count(distinct(cpims_ovc_id)) AS dcount,
gender as sex_id,
CASE
when ovchivstatus='POSITIVE' THEN 'HIV Status +Ve'
when ovchivstatus='NEGATIVE' THEN 'HIV Status -Ve'
when ovchivstatus='NOT KNOWN' THEN 'HIV Status Unknown'
when ovchivstatus='HIV Test Not Required' THEN 'HIV Test not Required'
when ovchivstatus='HIV Referred For Testing' THEN 'HIV Referred For Testing'
ELSE 'Others' END AS hivstat
from vw_cpims_registration where cpims_ovc_id in
(select distinct(x.cpims_ovc_id) from
(
Select distinct(cpims_ovc_id)
from vw_cpims_active_beneficiary {ocbos}
UNION ALL
Select distinct(cpims_ovc_id)
from vw_cpims_benchmark_achieved where (current_date - date_of_event) <= 400
AND cpara_score = 17 {cbos}
) x)
and exit_status='ACTIVE' {cbos} {areas} {fdate}
group by gender, ovchivstatus
'''
QUERIES['2D'] = '''
SELECT count(distinct(cpims_ovc_id)) as dcount,
'Male' as sex_id, 'HIV Status' as hivstat
from vw_cpims_registration where exit_status='ACTIVE' {cbos}
UNION
Select count(distinct(cpims_ovc_id)) AS dcount,
'Male' as sex_id, 'ART Status' as hivstat
from vw_cpims_registration where exit_status='ACTIVE'
and ovchivstatus='POSITIVE' {cbos}
UNION
Select count(distinct(v.cpims_ovc_id)) as dcount,
'Male' as sex_id, 'Suppression' as hivstat
from vw_cpims_viral_load v
inner join (
select cpims_ovc_id, max(date_of_event) as most_current_vl_date
from vw_cpims_viral_load
group by cpims_ovc_id ) vl on v.cpims_ovc_id = vl.cpims_ovc_id
and v.date_of_event=vl.most_current_vl_date
where current_date - vl.most_current_vl_date <= 400 {vcbos}
UNION
Select count(distinct(cpims_ovc_id)) AS dcount,
'Female' as sex_id, 'HIV Status' as hivstat
from vw_cpims_registration where exit_status='ACTIVE'
and (ovchivstatus='POSITIVE' or ovchivstatus='NEGATIVE'
or ovchivstatus='NOT KNOWN' or ovchivstatus='HIV Test Not Required'
or ovchivstatus='HIV Referred For Testing') {cbos}
UNION
Select count(distinct(cpims_ovc_id)) as dcount,
'Female' as sex_id, 'ART Status' as hivstat
from vw_cpims_registration where exit_status='ACTIVE'
and ovchivstatus='POSITIVE' AND artstatus='ART' {cbos}
UNION
Select count(distinct(v.cpims_ovc_id)) AS dcount,
'Female' as sex_id, 'Suppression' as hivstat
from vw_cpims_viral_load v
inner join (
select cpims_ovc_id, max(date_of_event) as most_current_vl_date
from vw_cpims_viral_load
group by cpims_ovc_id ) vl on v.cpims_ovc_id = vl.cpims_ovc_id
and v.date_of_event=vl.most_current_vl_date
where current_date - vl.most_current_vl_date <= 400
and v.viral_load < 1001 {vcbos}
'''
QUERIES['3A'] = '''
SELECT count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, 'Active' as services
from vw_cpims_registration where exit_status='ACTIVE' {cbos} group by gender
UNION
Select count(distinct(person_id)) AS dcount,
gender as sex_id, 'Served Two Quarters' as services
from vw_cpims_two_quarters where (current_date - date_of_event) <=400 {cbos}
group by gender
UNION
Select count(distinct(cpims_ovc_id)) as dcount,
gender, 'Case Plans' as services
from vw_cpims_case_plan where (current_date - date_of_event) <= 400 {cbos}
group by gender
UNION
Select count(distinct(cpims_ovc_id)) as dcount,
gender, 'CPARA' as services
from vw_cpims_cpara where (current_date - date_of_event) <= 400 {cbos}
group by gender
UNION
Select count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, 'Graduated' as services
from vw_cpims_benchmark_achieved
where (current_date - date_of_event) <= 400
AND cpara_score = 17 {cbos}
group by gender
UNION
Select count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, 'Active Beneficiary' as services
from vw_cpims_active_beneficiary {ocbos}
group by gender
UNION
select sum(x.cnt) as dcount, x.gender as sex_id,
'OVC_SERV' as hivstat from
(
Select count(distinct(cpims_ovc_id)) as cnt,
gender from vw_cpims_active_beneficiary {ocbos}
group by gender
UNION ALL
Select count(distinct(cpims_ovc_id)), gender
from vw_cpims_benchmark_achieved where (current_date - date_of_event) <= 400
AND cpara_score = 17 {cbos} group by gender
) x group by x.gender
UNION
Select count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, 'Exit without Graduation' as services
from vw_cpims_registration where exit_status='ACTIVE'
AND cpims_ovc_id NOT IN
(select distinct(vw_cpims_registration.cpims_ovc_id)
from vw_cpims_two_quarters ) {cbos}
group by gender
'''
QUERIES['3B'] = '''
Select count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, domain as services
from vw_cpims_list_served {ocbos} group by gender, domain
'''
QUERIES['3C'] = '''
SELECT count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, 'OVC Comprehensive' as services
from vw_cpims_registration where exit_status='ACTIVE' {cbos} group by gender
'''
QUERIES['3D'] = '''
Select count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, cpara_score as services
from vw_cpims_benchmark_achieved
where (current_date - date_of_event) <= 400 {cbos}
group by gender, cpara_score
'''
QUERIES['3E'] = '''
Select count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, service as services
from vw_cpims_list_served
where {odates} {cbos}
group by gender, service
order by dcount desc
'''
QUERIES['4A'] = '''
SELECT count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, 'Active' as services
from vw_cpims_registration where exit_status='ACTIVE' {cbos} group by gender
UNION
Select count(distinct(cpims_ovc_id)) as dcount,
gender, 'Current Case Plan' as services
from vw_cpims_case_plan where (current_date - date_of_event) <= 400 {cbos}
group by gender
UNION
Select count(distinct(cpims_ovc_id)) as dcount,
gender, 'Current CPARA' as services
from vw_cpims_cpara where (current_date - date_of_event) <= 400 {cbos}
group by gender
'''
QUERIES['4B'] = '''
Select count(distinct(cpims_ovc_id)) as dcount,
gender as sex_id, graduationpath as services
from vw_cpims_benchmark_achieved
where (current_date - date_of_event) <= 400 {cbos}
group by gender, graduationpath;
'''
|
def countingSort(array, place):
size = len(array)
output = [0] * size
count = [0] * 10
for i in range(0, size):
index = array[i] // place
count[index % 10] += 1
for i in range(1, 10):
count[i] += count[i - 1]
i = size - 1
while i >= 0:
index = array[i] // place
output[count[index % 10] - 1] = array[i]
count[index % 10] -= 1
i -= 1
for i in range(0, size):
array[i] = output[i]
def radixSort(array):
max_element = max(array)
place = 1
while max_element // place > 0:
countingSort(array, place)
place *= 10
alist = input('Enter the list of (nonnegative) numbers: ').split()
data = [int(x) for x in alist]
radixSort(data)
print(data)
|
def main():
num = int(input("Digite o número que deseja ter o fatorial: \n"))
initial_number = num
fat = 1
while num > 0:
fat = fat * num
num = num - 1
print("O fatorial de %d é" %initial_number, fat)
main()
|
'''
Created on Dec 2, 2013
@author: daniel
'''
class PharmacyController(object):
def __init__(self, repository):
self.__pRepo = repository
# cart is used to keep track of all the products
self.__cart = []
self.__cartTotal = 0
def addProductToCart(self, product, qty):
"""
Add a product to cart
Input:
product - instance of type product
qty - int - representing the quantity
Output:
"""
# we will store them as list of tuples [(product_instance, qty)]
self.__cart.append((product, qty))
self.__cartTotal += int(product.getPrice()) * int(qty)
def filterByName(self, name):
"""
Search a product by name and return the closest matching
Input:
name - string - the name of the product
Output:
list - a list of products instances
"""
return_list = []
name = name.lower()
for product in self.__pRepo.getAll():
if name in product.getName().lower():
return_list.append(product)
return return_list
def getProductsInCart(self):
"""
Get the products
Input:
Output:
a list of tuples in the format [(product, qty)]
"""
return self.__cart
def getCartTotal(self):
"""
Get the car total
Input:
Output:
int - representing the total
"""
return self.__cartTotal
def newCart(self):
"""
Init a new cart
"""
self.__cart = []
|
#######################################################################################
# 2.1
# change what is stored in the variables to another types of data
# if a variable stores a string, change it to an int, or a boolean, or a float
some_var = "hi"
some_var2 = 1
some_var3 = True
some_var4 = 1.32
#######################################################################################
# 2.2
# using the variables a, b, c, and d, print out the number 5
# use a combonation of +, -, *, /
# ex. print(a + b - c * d) -> this is not the answer
a = 10
b = 2
c = 3
d = 12
#######################################################################################
# 2.3
# create 2 variables with 1 holding your first name and 1 holding your last name
# add them together with a space so it prints nicely
# ex. print("Ben" + " " + "Campbell") -> "Ben Campbell"
#######################################################################################
# 2.4
# Make 2 variables. 1 Variable will be a string and the other will be a number. Then
# add them together and print them out.
#######################################################################################
# 2.5
# Make 1 variable. Make another variable and set it equal to the other variable. Then
# print out the second variable to see what is stored.
#######################################################################################
# 2.6
# Make a math equation with at least a +, -, /, * and (). Store this equation in a
# variable and then print out what is stored in the variable.
#######################################################################################
# 2.7
# Create string, int, double, and boolean variables. Then print them all out.
|
load("@build_bazel_integration_testing//tools:import.bzl", "bazel_external_dependency_archive")
def bazel_external_dependencies(rules_scala_version, rules_scala_version_sha256):
bazel_external_dependency_archive(
name = "io_bazel_rules_scala_test",
srcs = {
rules_scala_version_sha256: [
"https://github.com/wix/rules_scala/archive/%s.zip" % rules_scala_version,
],
"3023b07cc02f2b0217b2c04f8e636b396130b3a8544a8dfad498a19c3e57a863": [
"https://repo.maven.apache.org/maven2/org/scala-lang/scala-compiler/2.12.6/scala-compiler-2.12.6.jar"
],
"f81d7144f0ce1b8123335b72ba39003c4be2870767aca15dd0888ba3dab65e98": [
"https://repo.maven.apache.org/maven2/org/scala-lang/scala-library/2.12.6/scala-library-2.12.6.jar"
],
"ffa70d522fc9f9deec14358aa674e6dd75c9dfa39d4668ef15bb52f002ce99fa": [
"https://repo.maven.apache.org/maven2/org/scala-lang/scala-reflect/2.12.6/scala-reflect-2.12.6.jar"
],
"b416b5bcef6720da469a8d8a5726e457fc2d1cd5d316e1bc283aa75a2ae005e5": [
"http://central.maven.org/maven2/org/scalatest/scalatest_2.12/3.0.5/scalatest_2.12-3.0.5.jar"
],
"57e25b4fd969b1758fe042595112c874dfea99dca5cc48eebe07ac38772a0c41": [
"http://central.maven.org/maven2/org/scalactic/scalactic_2.12/3.0.5/scalactic_2.12-3.0.5.jar"
],
"f877d304660ac2a142f3865badfc971dec7ed73c747c7f8d5d2f5139ca736513": [
"http://central.maven.org/maven2/commons/io/commons-io/2.6/commons-io-2.6.jar"
],
"8d7ec605ca105747653e002bfe67bddba90ab964da697aaa5daa1060923585db": [
"http://central.maven.org/maven2/com/google/protobuf/protobuf-java/3.1.0/protobuf-java-3.1.0.jar"
],
"39097bdc47407232e0fe7eed4f2c175c067b7eda95873cb76ffa76f1b4c18895": [
"https://mirror.bazel.build/raw.githubusercontent.com/bazelbuild/bazel/0.17.1" +
"/src/main/java/com/google/devtools/build/lib/bazel/rules/java/java_stub_template.txt"
],
}
)
bazel_external_dependency_archive(
name = "com_google_guava_guava_test",
srcs = {
"36a666e3b71ae7f0f0dca23654b67e086e6c93d192f60ba5dfd5519db6c288c8": [
"http://central.maven.org/maven2/com/google/guava/guava/20.0/guava-20.0.jar",
],
}
)
bazel_external_dependency_archive(
name = "bazel_toolchains_test",
srcs = {
"f08758b646beea3b37dc9e07d63020cecd5f9d29f42de1cd60e9325e047c7103": [
"https://github.com/bazelbuild/bazel-toolchains/archive/719f8035a20997289727e16693acdebc8e918e28.tar.gz",
],
}
)
|
class Line:
pic = None
color = -1
def __init__(self, pic, color):
self.pic = pic
self.color = color
def oct1(self, x0, y0, x1, y1):
if (x0 > x1):
x0, y0, x1, y1 = x1, y1, x0, y0
x = x0
y = y0
A = y1 - y0
B = x0 - x1
d = 2 * A + B
while (x <= x1):
self.pic.set(x, y, self.color)
if (d > 0):
y += 1
d += 2 * B
x += 1
d += 2 * A
def oct2(self, x0, y0, x1, y1):
if (y0 > y1):
x0, y0, x1, y1 = x1, y1, x0, y0
x = x0
y = y0
A = y1 - y0
B = x0 - x1
d = A + 2 * B
while (y <= y1):
self.pic.set(x, y, self.color)
if (d < 0):
x += 1
d += 2 * A
y += 1
d += 2 * B
def oct7(self, x0, y0, x1, y1):
if (y1 > y0):
x0, y0, x1, y1 = x1, y1, x0, y0
x = x0
y = y0
A = y1 - y0
B = x0 - x1
d = A - 2 * B
while (y >= y1):
self.pic.set(x, y, self.color)
if (d > 0):
x += 1
d += 2 * A
y -= 1
d -= 2 * B
def oct8(self, x0, y0, x1, y1):
if (x0 > x1):
x0, y0, x1, y1 = x1, y1, x0, y0
x = x0
y = y0
A = y1 - y0
B = x0 - x1
d = 2 * A - B
while (x <= x1):
self.pic.set(x, y, self.color)
if (d < 0):
y -= 1
d -= 2 * B
x += 1
d += 2 * A
def draw(self, x0, y0, x1, y1):
try:
m = (y1 - y0) / (x1 - x0)
except ZeroDivisionError:
m = 2
if (1 <= m):
self.oct2(x0, y0, x1, y1)
elif (0 <= m < 1):
self.oct1(x0, y0, x1, y1)
elif (-1 <= m < 0):
self.oct8(x0, y0, x1, y1)
else:
self.oct7(x0, y0, x1, y1)
def draw(self, matrix):
matrix = matrix.getContent()
for i in range(matrix.getEdges()):
self.draw(matrix[0][2 * i], matrix[1][2 * i], matrix[0][2 * i + 1], matrix[1][2 * i + 1])
|
def test_signal_url_total_length(ranker):
rank = lambda url: ranker.client.get_signal_value_from_url("url_total_length", url)
rank_long = rank("http://www.verrrryyyylongdomain.com/very-long-url-xxxxxxxxxxxxxx.html")
rank_short = rank("https://en.wikipedia.org/wiki/Maceo_Parker")
rank_min = rank("http://t.co")
assert 0 <= rank_long < rank_short < rank_min <= 1
def test_signal_url_path_length(ranker):
rank = lambda url: ranker.client.get_signal_value_from_url("url_path_length", url)
rank_hp = rank("http://www.longdomain.com")
rank_hp2 = rank("http://www.domain.com/")
assert rank_hp == rank_hp2
rank_subpage = rank("http://t.co/p")
assert rank_subpage < rank_hp
rank_subpage_query = rank("http://t.co/p?q=1")
assert rank_subpage_query < rank_subpage
|
terms=int(input("Enter the number of terms:"))
a, b= 0, 1
count=0
if terms <= 0:
print("Please enter a positive number")
elif terms == 1:
print("The fibonacci series upto",terms)
print(a)
else:
print("The fabinocci series upto",terms,"terms is:")
while count < terms:
print(a, end=" ")
c=a+b
a=b
b=c
count+=1
|
OV2640_JPEG_INIT = [
[ 0xff, 0x00 ],
[ 0x2c, 0xff ],
[ 0x2e, 0xdf ],
[ 0xff, 0x01 ],
[ 0x3c, 0x32 ],
[ 0x11, 0x04 ],
[ 0x09, 0x02 ],
[ 0x04, 0x28 ],
[ 0x13, 0xe5 ],
[ 0x14, 0x48 ],
[ 0x2c, 0x0c ],
[ 0x33, 0x78 ],
[ 0x3a, 0x33 ],
[ 0x3b, 0xfB ],
[ 0x3e, 0x00 ],
[ 0x43, 0x11 ],
[ 0x16, 0x10 ],
[ 0x39, 0x92 ],
[ 0x35, 0xda ],
[ 0x22, 0x1a ],
[ 0x37, 0xc3 ],
[ 0x23, 0x00 ],
[ 0x34, 0xc0 ],
[ 0x36, 0x1a ],
[ 0x06, 0x88 ],
[ 0x07, 0xc0 ],
[ 0x0d, 0x87 ],
[ 0x0e, 0x41 ],
[ 0x4c, 0x00 ],
[ 0x48, 0x00 ],
[ 0x5B, 0x00 ],
[ 0x42, 0x03 ],
[ 0x4a, 0x81 ],
[ 0x21, 0x99 ],
[ 0x24, 0x40 ],
[ 0x25, 0x38 ],
[ 0x26, 0x82 ],
[ 0x5c, 0x00 ],
[ 0x63, 0x00 ],
[ 0x61, 0x70 ],
[ 0x62, 0x80 ],
[ 0x7c, 0x05 ],
[ 0x20, 0x80 ],
[ 0x28, 0x30 ],
[ 0x6c, 0x00 ],
[ 0x6d, 0x80 ],
[ 0x6e, 0x00 ],
[ 0x70, 0x02 ],
[ 0x71, 0x94 ],
[ 0x73, 0xc1 ],
[ 0x12, 0x40 ],
[ 0x17, 0x11 ],
[ 0x18, 0x43 ],
[ 0x19, 0x00 ],
[ 0x1a, 0x4b ],
[ 0x32, 0x09 ],
[ 0x37, 0xc0 ],
[ 0x4f, 0x60 ],
[ 0x50, 0xa8 ],
[ 0x6d, 0x00 ],
[ 0x3d, 0x38 ],
[ 0x46, 0x3f ],
[ 0x4f, 0x60 ],
[ 0x0c, 0x3c ],
[ 0xff, 0x00 ],
[ 0xe5, 0x7f ],
[ 0xf9, 0xc0 ],
[ 0x41, 0x24 ],
[ 0xe0, 0x14 ],
[ 0x76, 0xff ],
[ 0x33, 0xa0 ],
[ 0x42, 0x20 ],
[ 0x43, 0x18 ],
[ 0x4c, 0x00 ],
[ 0x87, 0xd5 ],
[ 0x88, 0x3f ],
[ 0xd7, 0x03 ],
[ 0xd9, 0x10 ],
[ 0xd3, 0x82 ],
[ 0xc8, 0x08 ],
[ 0xc9, 0x80 ],
[ 0x7c, 0x00 ],
[ 0x7d, 0x00 ],
[ 0x7c, 0x03 ],
[ 0x7d, 0x48 ],
[ 0x7d, 0x48 ],
[ 0x7c, 0x08 ],
[ 0x7d, 0x20 ],
[ 0x7d, 0x10 ],
[ 0x7d, 0x0e ],
[ 0x90, 0x00 ],
[ 0x91, 0x0e ],
[ 0x91, 0x1a ],
[ 0x91, 0x31 ],
[ 0x91, 0x5a ],
[ 0x91, 0x69 ],
[ 0x91, 0x75 ],
[ 0x91, 0x7e ],
[ 0x91, 0x88 ],
[ 0x91, 0x8f ],
[ 0x91, 0x96 ],
[ 0x91, 0xa3 ],
[ 0x91, 0xaf ],
[ 0x91, 0xc4 ],
[ 0x91, 0xd7 ],
[ 0x91, 0xe8 ],
[ 0x91, 0x20 ],
[ 0x92, 0x00 ],
[ 0x93, 0x06 ],
[ 0x93, 0xe3 ],
[ 0x93, 0x05 ],
[ 0x93, 0x05 ],
[ 0x93, 0x00 ],
[ 0x93, 0x04 ],
[ 0x93, 0x00 ],
[ 0x93, 0x00 ],
[ 0x93, 0x00 ],
[ 0x93, 0x00 ],
[ 0x93, 0x00 ],
[ 0x93, 0x00 ],
[ 0x93, 0x00 ],
[ 0x96, 0x00 ],
[ 0x97, 0x08 ],
[ 0x97, 0x19 ],
[ 0x97, 0x02 ],
[ 0x97, 0x0c ],
[ 0x97, 0x24 ],
[ 0x97, 0x30 ],
[ 0x97, 0x28 ],
[ 0x97, 0x26 ],
[ 0x97, 0x02 ],
[ 0x97, 0x98 ],
[ 0x97, 0x80 ],
[ 0x97, 0x00 ],
[ 0x97, 0x00 ],
[ 0xc3, 0xed ],
[ 0xa4, 0x00 ],
[ 0xa8, 0x00 ],
[ 0xc5, 0x11 ],
[ 0xc6, 0x51 ],
[ 0xbf, 0x80 ],
[ 0xc7, 0x10 ],
[ 0xb6, 0x66 ],
[ 0xb8, 0xA5 ],
[ 0xb7, 0x64 ],
[ 0xb9, 0x7C ],
[ 0xb3, 0xaf ],
[ 0xb4, 0x97 ],
[ 0xb5, 0xFF ],
[ 0xb0, 0xC5 ],
[ 0xb1, 0x94 ],
[ 0xb2, 0x0f ],
[ 0xc4, 0x5c ],
[ 0xc0, 0x64 ],
[ 0xc1, 0x4B ],
[ 0x8c, 0x00 ],
[ 0x86, 0x3D ],
[ 0x50, 0x00 ],
[ 0x51, 0xC8 ],
[ 0x52, 0x96 ],
[ 0x53, 0x00 ],
[ 0x54, 0x00 ],
[ 0x55, 0x00 ],
[ 0x5a, 0xC8 ],
[ 0x5b, 0x96 ],
[ 0x5c, 0x00 ],
[ 0xd3, 0x00 ],
[ 0xc3, 0xed ],
[ 0x7f, 0x00 ],
[ 0xda, 0x00 ],
[ 0xe5, 0x1f ],
[ 0xe1, 0x67 ],
[ 0xe0, 0x00 ],
[ 0xdd, 0x7f ],
[ 0x05, 0x00 ],
[ 0x12, 0x40 ],
[ 0xd3, 0x04 ],
[ 0xc0, 0x16 ],
[ 0xC1, 0x12 ],
[ 0x8c, 0x00 ],
[ 0x86, 0x3d ],
[ 0x50, 0x00 ],
[ 0x51, 0x2C ],
[ 0x52, 0x24 ],
[ 0x53, 0x00 ],
[ 0x54, 0x00 ],
[ 0x55, 0x00 ],
[ 0x5A, 0x2c ],
[ 0x5b, 0x24 ],
[ 0x5c, 0x00 ],
[ 0xff, 0xff ],
]
OV2640_YUV422 = [
[ 0xFF, 0x00 ],
[ 0x05, 0x00 ],
[ 0xDA, 0x10 ],
[ 0xD7, 0x03 ],
[ 0xDF, 0x00 ],
[ 0x33, 0x80 ],
[ 0x3C, 0x40 ],
[ 0xe1, 0x77 ],
[ 0x00, 0x00 ],
[ 0xff, 0xff ],
]
OV2640_JPEG = [
[ 0xe0, 0x14 ],
[ 0xe1, 0x77 ],
[ 0xe5, 0x1f ],
[ 0xd7, 0x03 ],
[ 0xda, 0x10 ],
[ 0xe0, 0x00 ],
[ 0xFF, 0x01 ],
[ 0x04, 0x08 ],
[ 0xff, 0xff ],
]
SENSORADDR = 0x30
PICBUFSIZE = 64
|
class Parent:
value1="This is value 1"
value2="This is value 2"
class Child(Parent):
pass
parent=Parent()
child=Child()
print(parent.value1)
print(child.value2)
|
begin_unit
comment|'# Copyright (c) 2016 Intel, Inc.'
nl|'\n'
comment|'# Copyright (c) 2013 OpenStack Foundation'
nl|'\n'
comment|'# All Rights Reserved.'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
name|'from'
name|'oslo_config'
name|'import'
name|'cfg'
newline|'\n'
nl|'\n'
DECL|variable|debugger_group
name|'debugger_group'
op|'='
name|'cfg'
op|'.'
name|'OptGroup'
op|'('
string|"'remote_debug'"
op|','
nl|'\n'
DECL|variable|title
name|'title'
op|'='
string|"'debugger options'"
op|')'
newline|'\n'
nl|'\n'
DECL|variable|host
name|'host'
op|'='
name|'cfg'
op|'.'
name|'StrOpt'
op|'('
string|"'host'"
op|','
nl|'\n'
DECL|variable|help
name|'help'
op|'='
string|"'Debug host (IP or name) to connect. Note '"
nl|'\n'
string|"'that using the remote debug option changes how '"
nl|'\n'
string|"'Nova uses the eventlet library to support async IO. '"
nl|'\n'
string|"'This could result in failures that do not occur '"
nl|'\n'
string|"'under normal operation. Use at your own risk.'"
op|')'
newline|'\n'
nl|'\n'
DECL|variable|port
name|'port'
op|'='
name|'cfg'
op|'.'
name|'IntOpt'
op|'('
string|"'port'"
op|','
nl|'\n'
DECL|variable|min
name|'min'
op|'='
number|'1'
op|','
nl|'\n'
DECL|variable|max
name|'max'
op|'='
number|'65535'
op|','
nl|'\n'
DECL|variable|help
name|'help'
op|'='
string|"'Debug port to connect. Note '"
nl|'\n'
string|"'that using the remote debug option changes how '"
nl|'\n'
string|"'Nova uses the eventlet library to support async IO. '"
nl|'\n'
string|"'This could result in failures that do not occur '"
nl|'\n'
string|"'under normal operation. Use at your own risk.'"
op|')'
newline|'\n'
nl|'\n'
DECL|variable|CLI_OPTS
name|'CLI_OPTS'
op|'='
op|'['
name|'host'
op|','
name|'port'
op|']'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|register_cli_opts
name|'def'
name|'register_cli_opts'
op|'('
name|'conf'
op|')'
op|':'
newline|'\n'
indent|' '
name|'conf'
op|'.'
name|'register_cli_opts'
op|'('
name|'CLI_OPTS'
op|','
name|'group'
op|'='
name|'debugger_group'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|list_opts
dedent|''
name|'def'
name|'list_opts'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
op|'{'
name|'debugger_group'
op|':'
name|'CLI_OPTS'
op|'}'
newline|'\n'
dedent|''
endmarker|''
end_unit
|
class Frame:
__slots__ = ("name", "class_name", "line_no", "file_path")
def __init__(self, name, class_name=None, line_no=None, file_path=None):
self.name = name
self.class_name = class_name
self.line_no = line_no
self.file_path = file_path
|
def test():
assert len(TRAINING_DATA) == 4, "Les données d'apprentissage ne correspondent pas – attendu 4 exemples."
assert all(
len(entry) == 2 and isinstance(entry[1], dict) for entry in TRAINING_DATA
), "Format incorrect des données d'apprentissage. Attendu une liste de tuples dont le second élément est un dict."
assert all(
entry[1].get("entities") for entry in TRAINING_DATA
), "Toutes les annotations des données d'apprentissage doivent comporter des entités."
assert TRAINING_DATA[0][1]["entities"] == [
(10, 19, "GPE")
], "Vérifie les entités dans le premier exemple."
assert TRAINING_DATA[1][1]["entities"] == [
(17, 22, "GPE")
], "Vérifie les entités dans le deuxième exemple."
assert TRAINING_DATA[2][1]["entities"] == [
(15, 20, "GPE"),
(24, 32, "GPE"),
], "Vérifie les entités dans le troisième exemple."
assert TRAINING_DATA[3][1]["entities"] == [
(0, 6, "GPE")
], "Vérifie les entités dans le quatrième exemple."
__msg__.good(
"Super boulot ! Une fois que le modèle fournit des bons résultats pour "
"la détection d'entités GPE dans les évaluations de voyageurs, tu "
"pourrais ajouter un composant basé sur une règle pour déterminer si "
"l'entité est une destination touristique dans ce contexte. Par "
"exemple, tu pourrais recouper les entités avec une base de données ou "
"les rechercher dans un wiki sur le voyage."
)
|
# 6.Input and Range
# Given an integer n, write a program that generates a dictionary with
# entries from 1 to n. For each key i, the corresponding value should
# be i*i.
def range_dict(_max):
my_dict = {}
for i in range(1, _max + 1):
my_dict[i] = i ** 2
return my_dict
# or with dictionary Comprehension
def range_dict2(_max):
return {i: i ** 2 for i in range(1, _max + 1)}
print(range_dict(10))
print(range_dict2(10))
# for n=10:
# {1: 1, 2: 4, 3: 9, 4: 16, 5: 25, 6: 36, 7: 49, 8: 64, 9: 81, 10: 100}
|
# Holds permission data for a private race room
def get_permission_info(server, race_private_info):
permission_info = PermissionInfo()
for admin_name in race_private_info.admin_names:
for role in server.roles:
if role.name.lower() == admin_name.lower():
permission_info.admin_roles.append(role)
for member in server.members:
if member.name.lower() == admin_name.lower():
permission_info.admins.append(member)
for racer_name in race_private_info.racer_names:
for member in server.members:
if member.name.lower() == racer_name.lower():
permission_info.racers.append(member)
return permission_info
class PermissionInfo(object):
def __init__(self):
self.admins = []
self.admin_roles = []
self.racers = []
def is_admin(self, member):
for role in member.roles:
if role in self.admin_roles:
return True
return member in self.admins
|
WORDLIST =\
('dna',
'vor',
'how',
'hot',
'yud',
'fir',
'fit',
'fix',
'dsc',
'ate',
'ira',
'cup',
'fre',
'fry',
'had',
'has',
'hat',
'hav',
'old',
'fou',
'for',
'fox',
'foe',
'fob',
'foi',
'soo',
'son',
'pet',
'veo',
'vel',
'jim',
'bla',
'one',
'san',
'sad',
'say',
'sap',
'saw',
'sat',
'cim',
'ivy',
'wpi',
'pop',
'act',
'her',
'hey',
'hel',
'heh',
'ohn',
'wav',
'its',
'bye',
'bhi',
'cry',
'set',
'sep',
'sez',
'see',
'sea',
'seo',
'sem',
'vow',
'ago',
'age',
'evo',
'eve',
'try',
'tru',
'uea',
'odd',
'led',
'lee',
'leg',
'leo',
'let',
'bay',
'log',
'low',
'jug',
'jul',
'jay',
'jar',
'aes',
'btw',
'tlc',
'off',
'oft',
'web',
'wee',
'wed',
'wes',
'wer',
'wet',
'ink',
'ing',
'ina',
'hps',
'ale',
'ala',
'bro',
'ere',
'lab',
'lay',
'law',
'zen',
'kin',
'sue',
'sun',
'yew',
'ill',
'air',
'aim',
'aid',
'thy',
'thu',
'tho',
'the',
'way',
'wax',
'was',
'war',
'fog',
'gum',
'gus',
'guy',
'fbi',
'jan',
'ibm',
'car',
'caw',
'can',
'aon',
'not',
'nov',
'now',
'nor',
'nos',
'zap',
'yin',
'egg',
'cen',
'sri',
'lol',
'los',
'lot',
'you',
'mph',
'ask',
'ash',
'awe',
'ego',
'dia',
'asc',
'put',
'str',
'lie',
'lia',
'lib',
'lit',
'lip',
'wow',
'won',
'ads',
'phu',
'bon',
'eel',
'run',
'cue',
'nam',
'ech',
'mad',
'mal',
'ben',
'bel',
'beg',
'bed',
'arc',
'bet',
'she',
'ken',
'oak',
'esp',
'dug',
'eat',
'aug',
'neo',
'new',
'net',
'men',
'met',
'dry',
'ise',
'tyr',
'don',
'doc',
'dog',
'bar',
'bag',
'bad',
'ban',
'sam',
'kid',
'yep',
'yes',
'yer',
'yet',
'dio',
'did',
'die',
'dig',
'dip',
'box',
'boy',
'bot',
'bow',
'bob',
'bod',
'bog',
'fly',
'map',
'mas',
'mar',
'may',
'max',
'mac',
'mao',
'man',
'lsd',
'ort',
'orb',
'las',
'get',
'par',
'yrs',
'pay',
'pan',
'oir',
'moy',
'mor',
'mov',
'tap',
'eye',
'two',
'der',
'dew',
'del',
'dec',
'oil',
'gif',
'cfn',
'gas',
'gap',
'fur',
'raw',
'fun',
'ray',
'cut',
'win',
'big',
'bid',
'apt',
'bit',
'per',
'pen',
'ufo',
'lyr',
'hal',
'chi',
'chs',
'iii',
'afa',
'sip',
'tsp',
'dad',
'dai',
'das',
'day',
'cwn',
'red',
'quo',
'mid',
'mix',
'adf',
'add',
'est',
'dcw',
'buy',
'but',
'bud',
'pin',
'pig',
'our',
'pit',
'end',
'gom',
'god',
'sow',
'got',
'tos',
'top',
'tow',
'too',
'tom',
'toe',
'ran',
'rap',
'abc',
'oct',
'nut',
'mud',
'rit',
'via',
'ice',
'ahp',
'uri',
'tir',
'til',
'tim',
'tin',
'rom',
'rob',
'rod',
'ard',
'msg',
'ahn',
'shu',
'iad',
'iaa',
'iau',
'hug',
'and',
'ann',
'any',
'tip',
'tie',
'jun',
'cbs',
'hit',
'ley',
'all',
'ptv',
'far',
'fan',
'fal',
'fad',
'teh',
'ten',
'tea',
'sum',
'hig',
'nan',
'ieo',
'hip',
'his',
'him',
'art',
'are',
'arm',
'lap',
'key',
'dun',
'dub',
'due',
'rim',
'rig',
'rid',
'row',
'cit',
'cad',
'apr',
'use',
'feb',
'usa',
'few',
'tax',
'tao',
'sir',
'sit',
'six',
'sid',
'sin',
'out',
'who',
'why',
'ear',
'joy',
'job',
'joe',
'jog',
'bbi',
'bbs',
'non',
'nob',
'sky',
'own',
'owe',
'van',
'four',
'pack',
'lore',
'lord',
'dell',
'foul',
'hero',
'herb',
'hera',
'here',
'cult',
'dorn',
'unit',
'holy',
'hurt',
'hole',
'hold',
'wand',
'want',
'dyke',
'turn',
'wins',
'wind',
'wine',
'vary',
'inar',
'wast',
'wash',
'legs',
'tree',
'idle',
'apre',
'that',
'cops',
'camp',
'came',
'prif',
'busy',
'ment',
'rich',
'blew',
'fair',
'pads',
'fail',
'fain',
'best',
'lots',
'dorm',
'diff',
'slay',
'much',
'life',
'dave',
'lift',
'suas',
'doth',
'spin',
'topy',
'tune',
'ease',
'wrap',
'nova',
'jane',
'lays',
'dues',
'role',
'enya',
'yhvh',
'time',
'push',
'gown',
'jerk',
'sign',
'ordo',
'melt',
'love',
'spot',
'date',
'such',
'kong',
'torc',
'webs',
'plea',
'raed',
'lbrp',
'vote',
'open',
'city',
'bite',
'cite',
'note',
'take',
'knew',
'knee',
'ways',
'bade',
'salt',
'laws',
'slow',
'robe',
'many',
'twin',
'boar',
'boat',
'west',
'brow',
'monk',
'nile',
'knot',
'rest',
'sums',
'dark',
'dare',
'clad',
'meal',
'bond',
'wade',
'with',
'pull',
'rush',
'rags',
'gone',
'yoga',
'mass',
'adam',
'tain',
'nora',
'sand',
'sank',
'past',
'pass',
'full',
'hast',
'pick',
'luck',
'mord',
'more',
'door',
'doom',
'sent',
'male',
'bogs',
'scam',
'huge',
'hugh',
'hugs',
'bowl',
'bows',
'ally',
'odin',
'shop',
'isle',
'week',
'ness',
'crow',
'zeus',
'hyde',
'sell',
'self',
'play',
'plan',
'tips',
'gold',
'burp',
'rite',
'nope',
'fame',
'thou',
'thor',
'bell',
'sees',
'firm',
'fire',
'fung',
'owed',
'mote',
'owen',
'read',
'caer',
'unto',
'reap',
'hues',
'sion',
'pale',
'into',
'span',
'suit',
'linn',
'link',
'atom',
'line',
'chan',
'chao',
'chap',
'tara',
'bush',
'land',
'code',
'send',
'wipe',
'pout',
'race',
'bird',
'thin',
'stag',
'mens',
'next',
'rick',
'babe',
'baby',
'this',
'pour',
'bent',
'loch',
'rode',
'high',
'bend',
'rods',
'pair',
'tied',
'fits',
'ties',
'hawk',
'move',
'lamp',
'earl',
'ship',
'kiss',
'mete',
'shut',
'veov',
'muin',
'eryn',
'awen',
'soak',
'awed',
'soar',
'visa',
'face',
'brew',
'fact',
'hops',
'mute',
'tape',
'hope',
'fewr',
'reis',
'wire',
'ends',
'drum',
'drug',
'lush',
'site',
'lust',
'sits',
'juan',
'ball',
'dusk',
'upon',
'dust',
'lest',
'paul',
'five',
'does',
'naff',
'iris',
'asks',
'mere',
'veil',
'rule',
'cian',
'owes',
'horn',
'neat',
'bhur',
'plot',
'ploy',
'yard',
'word',
'wore',
'work',
'worn',
'nuts',
'hong',
'arch',
'then',
'them',
'thee',
'safe',
'bang',
'sack',
'they',
'ther',
'bank',
'nudd',
'tops',
'mojo',
'neck',
'airy',
'john',
'rath',
'nana',
'aunt',
'just',
'runs',
'gaul',
'rune',
'rung',
'away',
'bolt',
'brad',
'bran',
'tone',
'tong',
'tons',
'axis',
'easy',
'east',
'cons',
'cone',
'cong',
'muid',
'rail',
'evil',
'hand',
'hans',
'kept',
'gods',
'soap',
'dwyr',
'born',
'bore',
'hare',
'post',
'pays',
'lewd',
'breo',
'crys',
'true',
'iisi',
'isis',
'anew',
'mold',
'test',
'bedw',
'zone',
'slip',
'cost',
'buch',
'labs',
'live',
'club',
'clue',
'coll',
'cold',
'halt',
'evoe',
'half',
'hall',
'wont',
'drop',
'year',
'carl',
'card',
'care',
'size',
'pomp',
'than',
'fork',
'slan',
'loki',
'only',
'urdr',
'pray',
'aura',
'scot',
'ruby',
'joke',
'lite',
'noah',
'real',
'ream',
'lady',
'llew',
'vahv',
'wolf',
'your',
'area',
'ares',
'poor',
'peat',
'peak',
'pooh',
'pool',
'very',
'verb',
'bass',
'cues',
'dion',
'nine',
'alby',
'lyre',
'rude',
'kuno',
'help',
'soon',
'carn',
'held',
'eros',
'hine',
'fool',
'food',
'foot',
'tear',
'jaws',
'bhar',
'bast',
'bald',
'azif',
'dirt',
'base',
'dire',
'bash',
'misc',
'mist',
'ascs',
'grew',
'grey',
'greg',
'hips',
'pans',
'less',
'mell',
'hype',
'oobe',
'both',
'elsa',
'else',
'pact',
'look',
'loop',
'sham',
'used',
'uses',
'moby',
'ioho',
'game',
'some',
'lips',
'gaia',
'step',
'ache',
'eons',
'duly',
'dips',
'reed',
'hail',
'dull',
'info',
'hair',
'gaze',
'hint',
'draw',
'aser',
'mhol',
'drag',
'dram',
'nuns',
'wave',
'orov',
'jump',
'celt',
'cell',
'crab',
'ride',
'meet',
'fare',
'mode',
'ills',
'keep',
'keen',
'omen',
'bare',
'bard',
'need',
'able',
'wyrd',
'envy',
'tire',
'rash',
'tirs',
'gray',
'wish',
'grab',
'cuts',
'tenm',
'joys',
'tend',
'tent',
'sole',
'magi',
'magh',
'poem',
'poee',
'poet',
'ufos',
'mail',
'owns',
'well',
'bohr',
'sufi',
'dose',
'dair',
'dost',
'vide',
'kick',
'fate',
'lost',
'ring',
'roth',
'lose',
'page',
'shed',
'shea',
'hush',
'home',
'eris',
'eric',
'star',
'stay',
'stan',
'bays',
'void',
'vast',
'yack',
'even',
'ever',
'drew',
'omar',
'hiya',
'arms',
'maes',
'call',
'calm',
'type',
'tell',
'wars',
'warn',
'warm',
'room',
'roof',
'akin',
'akim',
'root',
'give',
'egos',
'deck',
'crew',
'meat',
'mead',
'went',
'side',
'bone',
'mean',
'nave',
'navy',
'aids',
'dawn',
'loud',
'hook',
'hoof',
'mini',
'seer',
'mind',
'mine',
'seed',
'seen',
'seem',
'seek',
'stop',
'earn',
'told',
'ears',
'said',
'vows',
'puts',
'tiny',
'neal',
'ugly',
'near',
'make',
'left',
'nota',
'save',
'nude',
'dean',
'deal',
'dead',
'dear',
'veda',
'burn',
'bury',
'down',
'lies',
'dowm',
'form',
'ford',
'fort',
'zeno',
'toss',
'dies',
'felt',
'fell',
'died',
'skip',
'mild',
'mile',
'skin',
'gaea',
'item',
'anna',
'adds',
'stem',
'wait',
'sake',
'thru',
'soul',
'pink',
'rays',
'idir',
'pine',
'till',
'pure',
'pins',
'idic',
'guys',
'grow',
'tale',
'tall',
'talk',
'ours',
'main',
'beil',
'rock',
'girt',
'girl',
'lone',
'fast',
'eyes',
'memo',
'lugh',
'orgy',
'were',
'lick',
'dash',
'pain',
'paid',
'beth',
'shot',
'show',
'data',
'mung',
'seat',
'sean',
'seal',
'ogma',
'nose',
'arts',
'mark',
'mars',
'mary',
'wake',
'manx',
'same',
'tout',
'pile',
'grip',
'grid',
'grin',
'mare',
'roam',
'road',
'gort',
'deer',
'deep',
'film',
'fill',
'sunk',
'fall',
'none',
'tuan',
'hour',
'maps',
'stir',
'sect',
'pens',
'late',
'good',
'otto',
'rank',
'harm',
'hark',
'hard',
'idea',
'fist',
'harp',
'dont',
'done',
'park',
'part',
'ages',
'most',
'fine',
'find',
'ruis',
'sill',
'vine',
'lion',
'acts',
'fang',
'afar',
'amam',
'bill',
'keys',
'task',
'howe',
'snow',
'snag',
'clan',
'back',
'lark',
'epic',
'from',
'trad',
'boys',
'cure',
'curl',
'yang',
'lend',
'papa',
'lens',
'lent',
'crop',
'body',
'sins',
'sinn',
'sink',
'sing',
'oval',
'fete',
'bind',
'koad',
'kids',
'mate',
'tide',
'have',
'text',
'gorm',
'mica',
'isha',
'beat',
'bear',
'beam',
'halo',
'pulp',
'bull',
'bulb',
'amke',
'hubs',
'file',
'deed',
'sets',
'llyr',
'lake',
'atha',
'like',
'heed',
'soft',
'heel',
'guff',
'host',
'tomb',
'wise',
'flys',
'goin',
'oral',
'yell',
'tibs',
'hath',
'hate',
'risk',
'siul',
'rise',
'nail',
'blue',
'hide',
'sung',
'path',
'must',
'join',
'norn',
'gate',
'mess',
'amid',
'over',
'fade',
'alex',
'each',
'laid',
'sane',
'nite',
'hang',
'free',
'fred',
'rain',
'days',
'heck',
'onto',
'tool',
'took',
'glow',
'flow',
'leis',
'pope',
'sage',
'pops',
'kali',
'weak',
'boss',
'wear',
'news',
'been',
'beer',
'jean',
'gave',
'gifs',
'gift',
'hunt',
'hung',
'cord',
'core',
'corn',
'corp',
'bath',
'rely',
'head',
'heal',
'heat',
'hear',
'trim',
'when',
'whem',
'indo',
'bile',
'town',
'cook',
'cool',
'says',
'port',
'hymn',
'goes',
'iron',
'plug',
'plus',
'yule',
'term',
'name',
'miss',
'view',
'cope',
'alan',
'copy',
'wide',
'oath',
'diug',
'duir',
'rent',
'duit',
'urge',
'sure',
'mbeo',
'trom',
'troy',
'hack',
'wood',
'wool',
'ozma',
'come',
'duty',
'pole',
'case',
'cash',
'cast',
'trip',
'lute',
'weep',
'kill',
'blow',
'blot',
'rose',
'lets',
'rosy',
'ross',
'node',
'mood',
'moon',
'quit',
'cent',
'gary',
'fish',
'wife',
'bapa',
'lack',
'dish',
'song',
'sons',
'hemp',
'list',
'rate',
'what',
'womb',
'flat',
'obod',
'mire',
'flag',
'glad',
'eden',
'omit',
'goal',
'okay',
'jove',
'inch',
'hill',
'feed',
'feel',
'feet',
'king',
'kind',
'danu',
'aims',
'dane',
'dana',
'gall',
'fond',
'font',
'luis',
'dumb',
'bark',
'mban',
'solo',
'sold',
'whom',
'edge',
'gems',
'also',
'pros',
'anne',
'prob',
'long',
'mock',
'nice',
'vice',
'once',
'gang',
'agus',
'fold',
'acid',
'folk',
'rede',
'duck',
'fron',
'cili',
'sort',
'sore',
'tthe',
'flew',
'nuit',
'york',
'nuin',
'obvd',
'cain',
'isbn',
'echo',
'july',
'deny',
'gain',
'fear',
'ones',
'spur',
'viet',
'walk',
'walt',
'lyin',
'mike',
'last',
'wilt',
'will',
'wild',
'thus',
'dual',
'thud',
'gets',
'yoou',
'wren',
'myth',
'know',
'leaf',
'lead',
'lean',
'leap',
'leat',
'obey',
'pike',
'rare',
'made',
'gaps',
'cake',
'mady',
'cave',
'book',
'sick',
'junk',
'agla',
'june',
'jung',
'sase',
'pods',
'woods',
'knelt',
'tired',
'pulse',
'atoms',
'china',
'uisci',
'norse',
'spoke',
'music',
'until',
'relax',
'glass',
'blade',
'pints',
'wrong',
'types',
'keeps',
'wales',
'thyme',
'fnord',
'arrow',
'bowls',
'mayan',
'adapt',
'sheet',
'nigel',
'dwale',
'spawn',
'sooth',
'alive',
'doors',
'henge',
'shall',
'wells',
'mouth',
'entry',
'shalt',
'grove',
'runic',
'gauge',
'theme',
'bliss',
'pelen',
'plate',
'plato',
'afall',
'rings',
'score',
'never',
'lamac',
'logic',
'argue',
'asked',
'raith',
'union',
'child',
'chill',
'tapes',
'split',
'josey',
'neter',
'veils',
'osman',
'posed',
'birth',
'bobby',
'steph',
'steps',
'right',
'crowd',
'creed',
'crown',
'bruig',
'nixon',
'nglas',
'waits',
'avail',
'untie',
'halls',
'happy',
'offer',
'batle',
'beech',
'otter',
'proud',
'exist',
'floor',
'warms',
'smell',
'palms',
'roimh',
'chain',
'chair',
'midst',
'macha',
'copse',
'edred',
'stays',
'exact',
'cooks',
'unify',
'leave',
'whome',
'badge',
'snack',
'egypt',
'owuld',
'alone',
'along',
'tasks',
'carol',
'angry',
'papal',
'films',
'scope',
'deign',
'honey',
'privy',
'stole',
'deity',
'limbo',
'apron',
'temen',
'deady',
'abide',
'quite',
'poems',
'dunno',
'monks',
'uriel',
'waist',
'sayer',
'prone',
'herds',
'nicer',
'alley',
'allen',
'aside',
'pages',
'drive',
'lotus',
'cloak',
'tears',
'going',
'imbas',
'touch',
'prime',
'where',
'mecca',
'clips',
'sites',
'acome',
'garda',
'spark',
'geese',
'wants',
'pinks',
'ascii',
'shuti',
'anseo',
'genie',
'lambs',
'nessa',
'being',
'world',
'snare',
'fives',
'brits',
'troth',
'lucid',
'refer',
'power',
'stone',
'niall',
'other',
'image',
'ciabh',
'myths',
'vanir',
'dirty',
'agree',
'tight',
'herbs',
'thorn',
'groom',
'hello',
'thorr',
'smile',
'cable',
'large',
'harry',
'small',
'hours',
'prior',
'pulls',
'trips',
'goeth',
'leary',
'learn',
'salty',
'bated',
'plant',
'plane',
'waves',
'scota',
'paper',
'scott',
'signs',
'trads',
'roots',
'isaac',
'found',
'eddas',
'ditch',
'zwack',
'rises',
'pairs',
'major',
'gazed',
'guess',
'heads',
'saint',
'grace',
'vocal',
'shops',
'virus',
'emyrs',
'seize',
'cover',
'coven',
'piper',
'xeper',
'wayne',
'cowan',
'beams',
'sunny',
'hilts',
'thoth',
'cease',
'river',
'movie',
'kneel',
'death',
'pagan',
'harps',
'whole',
'acted',
'devin',
'devil',
'teann',
'races',
'awake',
'error',
'cages',
'pound',
'sabha',
'chase',
'funny',
'cigar',
'alert',
'leaps',
'focal',
'picks',
'feces',
'kelly',
'ahura',
'quest',
'spine',
'scape',
'canst',
'maige',
'sighs',
'sight',
'pools',
'santa',
'doubt',
'opens',
'codes',
'evohe',
'swirl',
'sided',
'sides',
'holey',
'fresh',
'essay',
'totem',
'stops',
'young',
'mytle',
'darra',
'magic',
'marry',
'fewer',
'reich',
'imply',
'video',
'ithin',
'index',
'anton',
'punch',
'great',
'makes',
'maker',
'tools',
'folks',
'mabon',
'await',
'ombos',
'allow',
'rilla',
'decay',
'necht',
'betty',
'truth',
'doing',
'books',
'banal',
'ewige',
'venom',
'tempo',
'spill',
'could',
'david',
'blown',
'scene',
'stark',
'owner',
'blows',
'prank',
'start',
'stars',
'haven',
'steed',
'false',
'linda',
'bring',
'brink',
'pause',
'meant',
'bonds',
'lucky',
'stuff',
'exude',
'frame',
'liken',
'cense',
'marty',
'muine',
'beach',
'waste',
'eaton',
'groin',
'mutat',
'drink',
'audio',
'snake',
'flesh',
'gulls',
'rooms',
'photo',
'taunt',
'fools',
'criss',
'yearn',
'onnen',
'grids',
'thier',
'avoid',
'hadit',
'stage',
'seeds',
'burma',
'erect',
'roads',
'afoot',
'ovovo',
'yarns',
'gwion',
'count',
'calls',
'irish',
'jesse',
'tides',
'ghost',
'walks',
'worth',
'pinch',
'brill',
'above',
'sinks',
'pluto',
'items',
'study',
'adopt',
'dowth',
'total',
'dilly',
'reign',
'mages',
'india',
'knees',
'sever',
'after',
'uisce',
'greet',
'greek',
'green',
'south',
'worst',
'order',
'greed',
'break',
'bread',
'rocks',
'doled',
'flock',
'forty',
'forth',
'renew',
'comic',
'rated',
'mercy',
'binds',
'guilt',
'banes',
'reall',
'gravy',
'bruce',
'aesir',
'lewis',
'toast',
'elite',
'steel',
'nasty',
'steal',
'draws',
'props',
'drawn',
'terms',
'gable',
'trunk',
'wider',
'speak',
'ahead',
'beith',
'voice',
'wheel',
'horus',
'swell',
'hills',
'shout',
'board',
'night',
'mazes',
'hypoc',
'sends',
'humor',
'hodge',
'float',
'fight',
'palce',
'moors',
'dying',
'songs',
'dance',
'flash',
'turns',
'woven',
'upper',
'curse',
'argee',
'genes',
'trial',
'triad',
'extra',
'prove',
'super',
'ninny',
'gwynn',
'eidos',
'purge',
'heart',
'topic',
'heard',
'occur',
'means',
'write',
'nosed',
'wwhat',
'brain',
'whats',
'still',
'birds',
'forms',
'saxon',
'feats',
'james',
'couls',
'goose',
'fires',
'album',
'shown',
'space',
'shows',
'thong',
'blind',
'madam',
'beest',
'drove',
'sheep',
'dawns',
'baldr',
'muide',
'shady',
'angel',
'craig',
'anger',
'dread',
'begin',
'prick',
'shade',
'price',
'fifth',
'ratio',
'stair',
'title',
'skith',
'truly',
'loath',
'keyed',
'burst',
'sport',
'huggs',
'pluck',
'blame',
'hurts',
'comes',
'jeans',
'cares',
'crumb',
'these',
'trick',
'celts',
'conys',
'worry',
'sould',
'proto',
'media',
'whyte',
'medic',
'noble',
'fruit',
'heaps',
'speed',
'parts',
'verse',
'rules',
'ruler',
'gonna',
'early',
'using',
'stuns',
'ruled',
'faire',
'fairs',
'nuada',
'sixth',
'throw',
'lymph',
'fraud',
'tones',
'moved',
'oisin',
'salem',
'moves',
'valid',
'month',
'corps',
'nodes',
'robes',
'robed',
'coals',
'loose',
'arena',
'aball',
'aimed',
'lagoz',
'teach',
'takes',
'girds',
'taken',
'broke',
'hurry',
'lacks',
'tried',
'tries',
'banks',
'dream',
'match',
'fines',
'finer',
'vomit',
'payer',
'fully',
'bless',
'fairy',
'heavy',
'event',
'warts',
'since',
'dearg',
'issue',
'stead',
'inert',
'knots',
'elder',
'craps',
'horse',
'storm',
'tunic',
'bride',
'quote',
'clear',
'cleat',
'clean',
'blend',
'pucas',
'withe',
'close',
'duinn',
'vault',
'liked',
'stamp',
'empty',
'lived',
'packs',
'heute',
'while',
'smart',
'fleet',
'guide',
'reads',
'ready',
'grant',
'grand',
'older',
'cedar',
'olden',
'elect',
'elven',
'yeahm',
'maeve',
'march',
'wings',
'holes',
'modus',
'infra',
'feill',
'taboo',
'shine',
'faith',
'gnome',
'kwiat',
'alpha',
'frost',
'malik',
'fears',
'orbit',
'neils',
'bribe',
'etain',
'oliva',
'suits',
'suite',
'tonal',
'plays',
'chant',
'pitch',
'cloud',
'cosmo',
'drops',
'duile',
'hides',
'links',
'echos',
'trees',
'ruadh',
'feels',
'ogham',
'boils',
'outer',
'broom',
'abred',
'sword',
'hands',
'front',
'slide',
'chunk',
'cause',
'sneer',
'paten',
'shred',
'timed',
'spots',
'bears',
'ankle',
'final',
'lists',
'claim',
'screw',
'edric',
'based',
'bases',
'dodge',
'slows',
'joint',
'joins',
'years',
'state',
'dogon',
'merry',
'thumb',
'mindy',
'minds',
'admit',
'lofty',
'kings',
'tread',
'wills',
'treat',
'novel',
'chalk',
'bucks',
'began',
'begat',
'party',
'gaoth',
'skill',
'ought',
'clown',
'likes',
'peter',
'murry',
'inner',
'north',
'xerox',
'goals',
'leper',
'cells',
'limit',
'piece',
'beats',
'conan',
'lives',
'liver',
'aided',
'whose',
'sorry',
'fists',
'force',
'saved',
'niche',
'spiel',
'moral',
'barks',
'oscar',
'cuups',
'dogma',
'adult',
'flora',
'guise',
'foods',
'thine',
'laida',
'third',
'jacob',
'weeks',
'kesey',
'grade',
'girls',
'dwell',
'helen',
'bikka',
'silly',
'caled',
'tells',
'chest',
'atman',
'marie',
'doves',
'dover',
'debts',
'coast',
'bhall',
'cower',
'zones',
'sorta',
'sorts',
'weary',
'scuba',
'lowly',
'ether',
'lttle',
'basis',
'three',
'basic',
'threw',
'seven',
'shame',
'grown',
'bella',
'grows',
'bells',
'meets',
'smoke',
'yours',
'human',
'facts',
'nudge',
'meath',
'jeeah',
'dense',
'track',
'beget',
'faery',
'azure',
'nerve',
'coins',
'erinn',
'sagas',
'suebi',
'gland',
'vista',
'holly',
'aware',
'daily',
'souls',
'marks',
'round',
'seidr',
'visit',
'rigid',
'olive',
'bites',
'lords',
'crazy',
'agent',
'swore',
'wicca',
'waxen',
'maybe',
'block',
'serge',
'tales',
'shake',
'clans',
'group',
'thank',
'views',
'boats',
'ruddy',
'cough',
'thing',
'think',
'first',
'carry',
'murky',
'slept',
'fiery',
'gotta',
'kevin',
'squaw',
'trace',
'enter',
'aloud',
'tract',
'eadha',
'fills',
'ladle',
'black',
'helix',
'sidhe',
'hippo',
'hippy',
'skull',
'yield',
'dagda',
'dates',
'among',
'barry',
'sound',
'issac',
'handy',
'weave',
'conor',
'solve',
'gates',
'money',
'forge',
'adams',
'titan',
'erase',
'gross',
'ellis',
'lunar',
'knife',
'tease',
'mixed',
'vigil',
'strip',
'fates',
'rider',
'adler',
'gaels',
'hound',
'midhe',
'boons',
'casts',
'talks',
'those',
'furze',
'steve',
'angus',
'hogan',
'stood',
'garth',
'bible',
'liber',
'wiped',
'canon',
'sylph',
'halts',
'biodh',
'share',
'needs',
'sired',
'lythe',
'blood',
'kegan',
'heirs',
'bosom',
'balor',
'walls',
'ashes',
'token',
'house',
'deals',
'podge',
'print',
'least',
'paint',
'sated',
'rates',
'stirs',
'youth',
'built',
'couch',
'onset',
'build',
'flute',
'chart',
'charm',
'giant',
'gilly',
'celyn',
'ovate',
'aldus',
'rover',
'sigil',
'cakes',
'point',
'raise',
'inoke',
'solid',
'lingo',
'realm',
'spent',
'flags',
'spend',
'alder',
'gheal',
'shape',
'hated',
'hates',
'knock',
'dtoil',
'often',
'scale',
'smail',
'crone',
'goods',
'yorba',
'drama',
'piled',
'ounce',
'phase',
'grave',
'hoped',
'hopes',
'adair',
'metal',
'ellen',
'druim',
'druid',
'druis',
'wards',
'dhuit',
'querc',
'fitch',
'tombs',
'quert',
'helps',
'queen',
'rhyme',
'tinne',
'pipes',
'gimle',
'safer',
'drank',
'climb',
'proof',
'honor',
'named',
'drums',
'names',
'train',
'hints',
'fetch',
'bones',
'holds',
'orson',
'lines',
'linen',
'chief',
'lands',
'horns',
'bunch',
'labor',
'pangs',
'spell',
'cider',
'faidh',
'cures',
'sadly',
'scant',
'loved',
'draoi',
'lover',
'waken',
'hazel',
'eight',
'sally',
'gurus',
'staff',
'areas',
'organ',
'fixed',
'rowan',
'legis',
'equal',
'sexes',
'laugh',
'thsis',
'plain',
'value',
'esras',
'diety',
'whirr',
'again',
'usual',
'chord',
'seats',
'tough',
'spear',
'field',
'crops',
'raven',
'tests',
'testy',
'dirac',
'ozone',
'works',
'awash',
'prose',
'noise',
'panel',
'gifts',
'about',
'evoke',
'guard',
'hexed',
'globe',
'ivory',
'misty',
'glory',
'mists',
'ledge',
'pupil',
'sleep',
'paris',
'under',
'pride',
'every',
'venus',
'enjoy',
'seeks',
'druel',
'forum',
'stray',
'julia',
'would',
'raids',
'naive',
'phone',
'tampa',
'henry',
'shook',
'bothy',
'times',
'badly',
'canto',
'edits',
'diana',
'newly',
'wowed',
'sobek',
'pyres',
'awoke',
'sober',
'serve',
'gulfs',
'roles',
'aztec',
'flame',
'beket',
'mirth',
'beard',
'bards',
'pylon',
'cleff',
'clock',
'radio',
'queer',
'earth',
'spite',
'watch',
'tammy',
'haite',
'aught',
'beads',
'twice',
'shots',
'swept',
'habit',
'wakan',
'tommy',
'mazda',
'faced',
'fault',
'games',
'faces',
'trust',
'drugs',
'craft',
'catch',
'broth',
'wound',
'lusty',
'sings',
'welsh',
'cycle',
'ocean',
'torah',
'thick',
'suzan',
'judge',
'burns',
'rumor',
'apart',
'adieu',
'usher',
'armed',
'foote',
'khaki',
'choke',
'civil',
'class',
'stuck',
'solar',
'annwn',
'adore',
'check',
'eerie',
'navel',
'focus',
'leads',
'tanka',
'level',
'quick',
'doeth',
'fearn',
'cyber',
'colin',
'dried',
'spiro',
'reply',
'hairs',
'dazed',
'water',
'witch',
'boast',
'weird',
'isles',
'vivid',
'runes',
'tubes',
'today',
'altar',
'lazlo',
'cases',
'piano',
'wands',
'rufus',
'boris',
'nexus',
'sense',
'nuber',
'bigot',
'gulik',
'vital',
'rites',
'amuse',
'chips',
'swans',
'bound',
'opera',
'torch',
'torcs',
'begun',
'plans',
'eagle',
'flyer',
'place',
'swing',
'feign',
'given',
'white',
'circa',
'giver',
'gives',
'lacha',
'cards',
'mated',
'sells',
'ideas',
'ideal',
'falls',
'later',
'uncle',
'perch',
'cheap',
'dirge',
'crime',
'trade',
'olaus',
'naked',
'scots',
'fails',
'shiva',
'seers',
'quiet',
'cabal',
'color',
'deeds',
'abdul',
'cords',
'maire',
'shaft',
'mound',
'thses',
'arose',
'raibh',
'model',
'seems',
'kills',
'boyne',
'acute',
'inate',
'fount',
'tower',
'tarot',
'table',
'frees',
'legal',
'freed',
'raths',
'stand',
'hindu',
'amber',
'tribe',
'there',
'beret',
'grasp',
'grass',
'taste',
'tasty',
'abyss',
'wkshp',
'trash',
'brass',
'oriel',
'curve',
'ments',
'seals',
'woman',
'worse',
'awful',
'vibes',
'brief',
'texts',
'faint',
'wield',
'minor',
'knows',
'waxed',
'known',
'ouija',
'brown',
'arise',
'court',
'maith',
'cults',
'short',
'susan',
'style',
'abbey',
'might',
'alter',
'odubh',
'loops',
'huath',
'roofs',
'fancy',
'blank',
'story',
'comfy',
'syrup',
'store',
'pains',
'hotel',
'exert',
'oaken',
'alike',
'fionn',
'added',
'reach',
'react',
'niver',
'liath',
'lying',
'gains',
'wrote',
'blurb',
'latin',
'epona',
'herne',
'roman',
'finds',
'cynic',
'sweet',
'sweep',
'goats',
'brick',
'miles',
'apear',
'paths',
'flows',
'birch',
'lower',
'redes',
'tends',
'jumps',
'aloft',
'plaza',
'range',
'wanne',
'wanna',
'muzak',
'canal',
'files',
'cloth',
'filed',
'crane',
'freak',
'stick',
'rally',
'peace',
'gloss',
'risin',
'karma',
'broad',
'alien',
'winds',
'simon',
'abode',
'notes',
'dealt',
'dealy',
'noted',
'folds',
'poets',
'chuid',
'chose',
'lends',
'matov',
'apple',
'apply',
'grian',
'porch',
'women',
'roich',
'sakes',
'clues',
'asily',
'abuse',
'light',
'looks',
'quake',
'ships',
'crash',
'enemy',
'crass',
'feast',
'chaos',
'chaol',
'pours',
'hence',
'their',
'shell',
'scone',
'reeks',
'which',
'clasp',
'stove',
'utter',
'local',
'words',
'ended',
'truer',
'magus',
'favor',
'swift',
'grain',
'grail',
'tutor',
'ysgaw',
'anois',
'layer',
'motif',
'pikes',
'cross',
'unite',
'unity',
'units',
'slave',
'idris',
'cried',
'cries',
'tawny',
'press',
'gypsy',
'miami',
'loses',
'hosts',
'nagas',
'noose',
'pasts',
'urged',
'swear',
'sweat',
'brush',
'fired',
'siuil',
'funds',
'below',
'ailim',
'hymns',
'lance',
'kinds',
'cliff',
'yellow',
'verses',
'outwit',
'tingle',
'oceans',
'wooden',
'piling',
'ornate',
'second',
'sailed',
'errors',
'golden',
'strike',
'whorls',
'brings',
'hereby',
'locked',
'pursue',
'tuatha',
'exhale',
'unjust',
'travel',
'drying',
'beauty',
'youths',
'kokomo',
'preeve',
'hidden',
'easier',
'enrich',
'silver',
'rumour',
'barton',
'snakes',
'series',
'george',
'speeds',
'needed',
'master',
'listed',
'bitter',
'listen',
'danish',
'wisdom',
'doreen',
'showed',
'outlaw',
'endure',
'person',
'scummy',
'figger',
'object',
'letter',
'snares',
'brehon',
'mating',
'scream',
'saying',
'layout',
'louise',
'fetish',
'jaguar',
'greens',
'radius',
'result',
'hammer',
'stamps',
'extend',
'nature',
'rolled',
'extent',
'summat',
'gheall',
'worked',
'employ',
'played',
'things',
'format',
'beckon',
'babies',
'fairly',
'maxims',
'echoes',
'bergan',
'sleeps',
'prison',
'elders',
'online',
'shadow',
'unique',
'desire',
'remind',
'people',
'hoover',
'bottom',
'shakes',
'losing',
'bowing',
'collen',
'raised',
'danaan',
'beings',
'raises',
'shoots',
'joseph',
'utmost',
'inside',
'solong',
'proved',
'proven',
'oscail',
'intend',
'models',
'taurus',
'intent',
'cupped',
'recipe',
'choice',
'minute',
'amoral',
'skewed',
'solved',
'settle',
'erotic',
'freaks',
'ground',
'honour',
'carill',
'tyoing',
'saints',
'prefer',
'druids',
'helter',
'loving',
'visual',
'virtue',
'riders',
'nether',
'values',
'winter',
'mythos',
'missal',
'stress',
'briony',
'pulled',
'scents',
'aeonic',
'augury',
'course',
'derive',
'solace',
'dwarfs',
'nroogd',
'nation',
'amulet',
'thuinn',
'square',
'herard',
'astral',
'sorcha',
'shores',
'impure',
'routes',
'spoked',
'saving',
'spoken',
'clause',
'submit',
'mannan',
'loveth',
'typing',
'thetis',
'future',
'gwyned',
'russia',
'sophia',
'turned',
'buried',
'nymphs',
'towers',
'airing',
'crealm',
'bright',
'artist',
'borrow',
'priest',
'liable',
'vision',
'pisces',
'dexter',
'harvey',
'bureau',
'jumper',
'screen',
'planes',
'loudly',
'riddle',
'breath',
'enable',
'benoit',
'formed',
'photos',
'extant',
'former',
'tribal',
'region',
'zonked',
'flocks',
'pacing',
'summer',
'joyful',
'around',
'yearly',
'racial',
'divine',
'thinks',
'biased',
'phagos',
'dagdha',
'burial',
'legged',
'lively',
'gennep',
'cultic',
'buying',
'abused',
'naming',
'shevet',
'glenda',
'johnny',
'forgot',
'carved',
'novice',
'waving',
'caused',
'urbane',
'causes',
'paying',
'sabbat',
'laying',
'joined',
'healed',
'healer',
'seemed',
'method',
'dwells',
'legend',
'fergus',
'bestow',
'action',
'filial',
'select',
'casket',
'coyote',
'ghrian',
'stated',
'cosmic',
'staten',
'accept',
'states',
'gallon',
'pogrom',
'refuse',
'termed',
'gerald',
'hounds',
'olivia',
'misuse',
'always',
'truths',
'osiris',
'arrray',
'detect',
'waking',
'reduce',
'finish',
'scribe',
'really',
'missed',
'dianic',
'finnen',
'occurs',
'salute',
'belief',
'bedlam',
'murmur',
'reared',
'castle',
'number',
'fluxes',
'ulster',
'justly',
'warmly',
'stifle',
'wessex',
'uscias',
'barred',
'barren',
'impact',
'failed',
'factor',
'wiccan',
'weaved',
'peleus',
'weaver',
'insane',
'potent',
'europe',
'barely',
'agents',
'church',
'satire',
'lovest',
'admire',
'varies',
'triple',
'vanish',
'satyrs',
'survey',
'levels',
'recent',
'expend',
'papyri',
'mandan',
'couple',
'falcon',
'formal',
'facets',
'yields',
'tribes',
'spring',
'bounce',
'mighty',
'behave',
'dating',
'temple',
'clancy',
'pietra',
'repaid',
'sneaky',
'mythic',
'custom',
'inches',
'jewish',
'pastor',
'posted',
'horned',
'mature',
'pineal',
'purges',
'fixing',
'bianca',
'unplug',
'scotch',
'walked',
'purged',
'oracle',
'gleini',
'menace',
'having',
'gossip',
'garden',
'timely',
'pledge',
'fables',
'victor',
'waning',
'indian',
'orainn',
'engage',
'stairs',
'defeat',
'quests',
'eleven',
'pencil',
'zimmer',
'bodily',
'pieces',
'chakra',
'martyr',
'senach',
'animal',
'blocks',
'tomato',
'shapes',
'chosen',
'traced',
'fields',
'statis',
'jagged',
'wander',
'motifs',
'perish',
'greedy',
'surely',
'thrust',
'length',
'system',
'norton',
'quarry',
'bodrun',
'aztecs',
'gently',
'gentle',
'viewed',
'korean',
'sirius',
'device',
'rubric',
'decade',
'smacks',
'should',
'riding',
'handle',
'summon',
'rapped',
'needto',
'galaxy',
'gnomes',
'getyou',
'ollomh',
'engulf',
'waseth',
'dunnan',
'banish',
'basket',
'hahaha',
'tattoo',
'denial',
'spends',
'colour',
'strive',
'warded',
'height',
'become',
'aillil',
'singin',
'wields',
'gained',
'sister',
'chrome',
'suited',
'assess',
'immune',
'camden',
'heresy',
'apollo',
'tossed',
'places',
'smooth',
'placed',
'rubble',
'denote',
'deadly',
'behold',
'monday',
'chance',
'lasted',
'defend',
'oldest',
'davies',
'rocnat',
'chapel',
'upload',
'gaelic',
'totems',
'follow',
'equals',
'secure',
'highly',
'dannan',
'groves',
'gloria',
'impair',
'chilly',
'verify',
'coined',
'retain',
'office',
'devote',
'pascal',
'denver',
'versus',
'lifted',
'kneels',
'evolve',
'hosted',
'hazels',
'sprigg',
'alskog',
'thuban',
'raisin',
'oppose',
'eremon',
'rooted',
'nooses',
'emblem',
'albert',
'jingle',
'wasted',
'gwynyd',
'target',
'scenes',
'seated',
'powers',
'return',
'manner',
'forced',
'latter',
'forces',
'magnum',
'maiden',
'lindow',
'oftenj',
'hindic',
'notion',
'echlyn',
'subtle',
'latent',
'masons',
'vowels',
'roinnt',
'cattle',
'imbolg',
'imbolc',
'pasted',
'unable',
'arcane',
'accord',
'unfold',
'relics',
'warmth',
'duties',
'excite',
'abrupt',
'vernal',
'avoids',
'depict',
'faiths',
'humble',
'humbly',
'indigo',
'quoted',
'newton',
'quotes',
'thanks',
'victim',
'swears',
'hallow',
'spread',
'jehana',
'viking',
'usable',
'alther',
'libhse',
'scaoil',
'purple',
'denied',
'framed',
'theirs',
'months',
'treble',
'amidst',
'lowest',
'rabbis',
'injury',
'veiled',
'flecks',
'lugaid',
'prayed',
'gather',
'manure',
'prayer',
'shrine',
'scored',
'pefect',
'murias',
'matron',
'global',
'battle',
'rhythm',
'terror',
'eliade',
'drakes',
'appear',
'aneris',
'shared',
'appeal',
'muslin',
'change',
'flames',
'pillow',
'marked',
'marker',
'rarely',
'market',
'salted',
'angels',
'slogan',
'decked',
'social',
'purity',
'heroes',
'indeed',
'aiding',
'selves',
'window',
'norway',
'domain',
'happen',
'opened',
'orphic',
'opener',
'cycles',
'honest',
'styles',
'flying',
'silent',
'caught',
'friend',
'mostly',
'brains',
'fruits',
'neatly',
'steady',
'sunset',
'filing',
'sceach',
'german',
'dougal',
'cannon',
'samuel',
'cannot',
'seldom',
'fanned',
'flatly',
'herbal',
'import',
'notice',
'aradia',
'wheels',
'nearby',
'differ',
'failte',
'beaver',
'poetic',
'cuchul',
'breezy',
'breeze',
'poplar',
'struck',
'dishes',
'pounds',
'twelve',
'verbal',
'oscars',
'willie',
'verily',
'mircea',
'grange',
'poured',
'shower',
'trying',
'sheath',
'merest',
'issian',
'diruit',
'dabble',
'gorias',
'gavity',
'ceased',
'carpet',
'foster',
'eldest',
'minded',
'decide',
'finger',
'heaven',
'orgasm',
'casual',
'edited',
'excess',
'strong',
'amoung',
'losses',
'amount',
'family',
'chunks',
'excuse',
'sweeps',
'visage',
'pushes',
'pushed',
'phrase',
'firmly',
'reject',
'anoint',
'minuto',
'wiener',
'huathe',
'bagage',
'beyond',
'payson',
'robert',
'safety',
'houses',
'reason',
'fenian',
'launch',
'scouts',
'assign',
'meadow',
'prople',
'pinned',
'expand',
'bardai',
'raping',
'kindle',
'toward',
'mickey',
'option',
'adrift',
'trance',
'salary',
'raving',
'sturdy',
'latest',
'jehova',
'pretty',
'circle',
'albeit',
'famous',
'during',
'stored',
'arrien',
'howled',
'cuirim',
'horrid',
'segais',
'header',
'vessel',
'partly',
'hoodoo',
'soiled',
'dictum',
'herein',
'belong',
'margot',
'influx',
'bearer',
'dearly',
'toyota',
'sketch',
'parade',
'urgent',
'savage',
'wealth',
'conrad',
'wicker',
'seeing',
'within',
'smells',
'innate',
'oghams',
'wicked',
'adults',
'willed',
'rupert',
'turedh',
'smiles',
'kansas',
'leaned',
'rested',
'oregon',
'knowth',
'wizard',
'athame',
'button',
'lleian',
'picked',
'rotten',
'infuse',
'scythe',
'ramses',
'orkney',
'danger',
'manage',
'infest',
'hiding',
'cheeks',
'sought',
'poetry',
'beatha',
'chiefs',
'thence',
'upward',
'ankles',
'inputs',
'morton',
'reacts',
'austin',
'boring',
'attack',
'punish',
'mantra',
'weiser',
'sewing',
'saolta',
'spleen',
'affair',
'parker',
'anyway',
'likens',
'argues',
'stance',
'cardan',
'ordain',
'preach',
'poular',
'humans',
'canine',
'tosses',
'limits',
'career',
'jersey',
'agreed',
'inhale',
'slowly',
'senses',
'league',
'sensed',
'harder',
'yeself',
'campus',
'chants',
'absorb',
'effect',
'fierce',
'single',
'hecate',
'trials',
'spells',
'idunna',
'logged',
'killed',
'sarsen',
'burden',
'starry',
'repeat',
'quarts',
'bundle',
'muchly',
'refuge',
'primal',
'museum',
'harken',
'signed',
'pumped',
'devise',
'kisses',
'upsets',
'relive',
'fodder',
'canvas',
'acroos',
'sphinx',
'todays',
'pardon',
'demand',
'fronts',
'sigils',
'rescue',
'govern',
'affect',
'baking',
'skills',
'graced',
'vector',
'graces',
'dogmas',
'likely',
'panama',
'lights',
'active',
'luther',
'credit',
'permit',
'joshua',
'sliced',
'guests',
'opovig',
'janine',
'expose',
'frenzy',
'sumbel',
'rights',
'climax',
'faults',
'answer',
'murray',
'censer',
'oswald',
'before',
'chatti',
'better',
'glazed',
'taught',
'decree',
'reader',
'grease',
'linear',
'resume',
'shaggy',
'setian',
'aphrod',
'starts',
'plains',
'tokens',
'swells',
'begins',
'matter',
'street',
'palace',
'rattle',
'modern',
'wrists',
'points',
'lyrics',
'hunger',
'retire',
'ending',
'poison',
'nevada',
'saturn',
'sinked',
'mayans',
'derwen',
'softly',
'voyage',
'siging',
'staves',
'peyotl',
'peyote',
'tasted',
'bardic',
'lovely',
'deeper',
'sooooo',
'magics',
'jewels',
'deepen',
'affirm',
'tastes',
'melted',
'anchor',
'mexico',
'tugaim',
'parish',
'writer',
'hooded',
'novels',
'butter',
'berate',
'claims',
'givers',
'unfair',
'legion',
'defray',
'rotate',
'dreams',
'signal',
'strife',
'normal',
'goddeu',
'goddes',
'keeper',
'myrdin',
'commit',
'editor',
'builds',
'bounds',
'tucson',
'cosmos',
'boomed',
'sticks',
'covers',
'awhile',
'handed',
'hangin',
'assume',
'undine',
'forthe',
'invent',
'themes',
'primer',
'father',
'string',
'forked',
'nectar',
'gypsie',
'talked',
'eating',
'troops',
'favour',
'filled',
'french',
'merlin',
'monies',
'useful',
'merely',
'pluses',
'cabals',
'repast',
'effort',
'making',
'arrive',
'sample',
'drawer',
'dennis',
'sunday',
'swayed',
'purify',
'hassan',
'shield',
'entity',
'policy',
'truest',
'tucked',
'sooner',
'arrows',
'zurich',
'richly',
'unlock',
'giveth',
'canada',
'living',
'greeks',
'pamela',
'romans',
'pundit',
'waited',
'rachel',
'sounds',
'little',
'anyone',
'knives',
'booted',
'liveth',
'topics',
'voices',
'roster',
'bowels',
'queens',
'jumped',
'uranus',
'corner',
'storms',
'behind',
'dianna',
'stupid',
'moladh',
'asatru',
'wonder',
'enough',
'across',
'august',
'parent',
'cancer',
'cancel',
'lakota',
'unwise',
'rammed',
'coffee',
'middle',
'sudden',
'ooomph',
'straif',
'wisely',
'doctor',
'driseg',
'speech',
'assist',
'driven',
'bottle',
'amazed',
'aspect',
'dipped',
'fucked',
'aiming',
'serves',
'facing',
'served',
'moving',
'images',
'ascent',
'stages',
'overly',
'caring',
'broken',
'visits',
'refers',
'helgen',
'island',
'tupper',
'dagger',
'downey',
'wiping',
'dolmen',
'cabled',
'cables',
'ngetal',
'ofrail',
'waxing',
'tackle',
'grudge',
'neters',
'forget',
'deosil',
'dollar',
'forged',
'washer',
'titled',
'strung',
'titles',
'public',
'beacon',
'enmity',
'stella',
'narrow',
'africa',
'croons',
'landed',
'trelle',
'strain',
'recall',
'remain',
'clergy',
'meeker',
'attain',
'sacred',
'charms',
'advice',
'coming',
'bathed',
'dragon',
'infant',
'odinic',
'winged',
'suffer',
'libido',
'obeyed',
'oxford',
'detach',
'easily',
'habits',
'fiacha',
'mental',
'energy',
'orient',
'lovers',
'flowed',
'backed',
'luisne',
'twenty',
'plexus',
'ordure',
'malice',
'gotten',
'orders',
'salmon',
'syntax',
'thomas',
'cerrig',
'hopped',
'evoked',
'inanna',
'merged',
'remove',
'common',
'gospel',
'devils',) |
n1 = float(input("Type the first number: "))
n2 = float(input("Type the second number: "))
print("Are equals?", n1 == n2)
print("Are different?", n1 != n2)
print("First number is greater than Second number? =", n1 > n2)
print("Second is greater or equal than First number ? =", n2 >= n1)
strTest = input("Type a string: ")
conditions = 3 <= len(strTest) < 10
print("The string is greater or equal than 3 and less than 10? =", conditions)
magicNumber = 12345679
userNumber = int(input("Type a random number between 1 and 9:"))
userNumber *= 9
magicNumber *= userNumber
print("The magic number is: ", magicNumber)
|
track = dict(
author_username='ryanholbrook',
course_name='Computer Vision',
course_url='https://www.kaggle.com/ryanholbrook/computer-vision',
course_forum_url='https://www.kaggle.com/learn-forum',
)
TOPICS = [
'The Convolutional Classifier',
'Convolution and ReLU',
'Maximum Pooling',
'The Moving Window',
'Custom Convnets',
'Data Augmentation',
]
lessons = [{'topic': topic_name} for topic_name in TOPICS]
notebooks = [
dict(
filename="tut1.ipynb",
lesson_idx=0,
type='tutorial',
enable_gpu=True,
),
dict(
filename="ex1.ipynb",
lesson_idx=0,
type='exercise',
enable_gpu=True,
),
dict(
filename="tut2.ipynb",
lesson_idx=1,
type='tutorial',
),
dict(
filename="ex2.ipynb",
lesson_idx=1,
type='exercise',
),
dict(
filename="tut3.ipynb",
lesson_idx=2,
type='tutorial',
),
dict(
filename="ex3.ipynb",
lesson_idx=2,
type='exercise',
),
dict(
filename="tut4.ipynb",
lesson_idx=3,
type='tutorial',
),
dict(
filename="ex4.ipynb",
lesson_idx=3,
type='exercise',
),
dict(
filename="tut5.ipynb",
lesson_idx=4,
type='tutorial',
enable_gpu=True,
),
dict(
filename="ex5.ipynb",
lesson_idx=4,
type='exercise',
enable_gpu=True,
),
dict(
filename="tut6.ipynb",
lesson_idx=5,
type='tutorial',
enable_gpu=True,
),
dict(
filename="ex6.ipynb",
lesson_idx=5,
type='exercise',
enable_gpu=True,
),
]
for nb in notebooks:
nb['dataset_sources'] = [
'ryanholbrook/car-or-truck',
'ryanholbrook/computer-vision-resources',
'ryanholbrook/cv-course-models',
]
# Add extra datasets to Exercise 6
if '6' in nb['filename']:
nb['dataset_sources'] += [
'ryanholbrook/tensorflow-flowers',
'ryanholbrook/eurosat',
]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) Copyright IBM Corp. 2010, 2020. All Rights Reserved.
"""Contains a dict to validate the app configs"""
VALIDATE_DICT = {
"num_workers": {
"required": False,
"valid_condition": lambda c: True if c >= 1 and c <= 50 else False,
"invalid_msg": "num_workers must be in the range 1 <= 50"
}
}
|
names = ['Felipe','Celina','Jullyana','Lucas','Bryanda']
print(f'\nOlá {names[0]}\n')
print(f'{names[1]}\n')
print(f'{names[2]}\n')
print(f'{names[3]}\n')
print(f'{names[4]}\n')
print('=-'* 30)
print(f'\n{names[-1]}\n')
print(f'{names[-2]}\n')
print(f'{names[-3]}\n')
print(f'{names[-4]}\n')
print(f'{names[-5]}\n')
|
'''
PythonExercicios\from math import factorial
number = int(input('Choose a number: '))
result = factorial(number)
print('The factorial of {}! is = {}'.format(number, result))
'''
number = int(input('Choose a number: '))
control = number
result = 1
print('Calculating {}! = '.format(number), end='')
while control > 0:
if control > 1:
print('{} X '.format(control), end='')
else:
print('{} = '.format(control), end='')
if control > 0:
result = result * control
control -= 1
print('{}'.format(result)) |
exclusions = ['\bCHAPTER (\d*|M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3}))\.*\b',
'CHAPTER',
'\bChapter \d*\b',
'\d{2}:\d{2}:\d{2},\d{3}\s-->\s\d{2}:\d{2}:\d{2},\d{3}',
'^M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})\.?\n',
'^[^\w\d\s]*\n',
'^(CLOV|HAMM|NAGG|NELL)\s\([\w\s\d]*\):',
'^(HAMM|NAGG|NELL|CLOV):', '^-\s',
'(Merryn|Hygd|Physician|Goneril|Cordeil|Gormflaith|The Younger Woman|The Elder Woman).*\.',
'^Scene:.+\.\n',
'^Lear.*\.\n',
'^ACT I{0,3}V{0,1}\. Scene \w\.\n',
'^Enter.*\.\n',
'\[.*\n*.*\]',
'Exeunt\.',
'(Glou|Edm|Bur|Corn|Alb|Edg|Cur|Doct|Osw|Capt|Gent|Her|\d\. Serv|Gon|Reg|Cor|Knight)(\.+|,+)\s*\n*',
'(Bast|Glost|Cord|Edgar|Kent|Lear|Alb|Burg|Duke|Reg|Gon|Att|Gent)\.+\n+',
'S C E N E(\.|,)',
'A C T\s+(IV|I+|V)\.?',
'^\^\).*\n',
'Mme\.+\s+',
'^\*\).*\n',
'\d+\n',
'\b\d+\b',
'\d+',
'\"',
'\s+\.\s+'
]
|
class Agent:
def __init__(self, env, q_function, action_selector, logger):
self.__env = env
self.__q_function = q_function
self.__action_selector = action_selector
self.__logger = logger
def train(self, steps, episodes, filepath, filename):
episode_reward = 0
total_reward = 0
self.__q_function.reset()
self.__action_selector.reset()
for episode in range(episodes):
current_state = self.__env.reset()
episode_reward = 0
episode_length = 0
for t in range(steps):
action = self.__action_selector.select_action(current_state, self.__q_function, self.__env)
next_state, reward, done, info = self.__env.step(action)
td_error = self.__q_function.update_q_function(current_state, next_state, action, reward)
self.__action_selector.post_update(current_state, action, td_error)
current_state = next_state
episode_reward += reward
episode_length += 1
if done:
break
total_reward += episode_reward
self.__logger.log(episode, episode_reward, total_reward, episode_length)
self.__logger.write(filepath + '/training-data/' + filename + '.csv')
self.__logger.write(filepath + '/q_function/' + filename + '-q-function.csv', self.__q_function.get_q_function())
def solve(self, steps, render):
episode_reward = 0
current_state = self.__env.reset()
if render:
self.__env.render()
for i in range(steps):
action = self.__q_function.get_best_action(current_state)
next_state, reward, done, info = self.__env.step(action)
current_state = next_state
episode_reward += reward
if render:
self.__env.render()
if done:
break
print("Episode finished after {} timesteps".format(i+1))
print("Cumulative reward at end = " + str(episode_reward))
|
class Parser():
# Terminals
T_LPAR = 0
T_RPAR = 1
T_ID = 2
T_OP = 3
T_END = 4
T_NEG = 5
T_INVALID = 6
# OPERATORS = ['~', '^', 'or', '->', '<->']
# OPERANDS = ['p', 'q']
# PARENTHESES = ['(', ')']
# LEXICON = OPERATORS + OPERANDS + PARENTHESES
def __init__(self):
#self.lexicon = self.operators + self.operands + self.parentheses
pass
def lexicalAnalysis(self, inputstring):
print('Lexical analysis')
tokens = []
i = 0
while i < len( inputstring ):
c = inputstring[ i ]
if c == '-': # Check '->'
i += 1
c = inputstring[ i ]
tokens.append(self.T_OP if c == '>' else self.T_INVALID)
elif c == '<': # Check '<->'
i += 1
c = inputstring[i]
if c == '-':
i += 1
c = inputstring[i]
tokens.append(self.T_OP if c == '>' else self.T_INVALID)
else: tokens.append(self.T_INVALID)
elif c == 'v': tokens.append(self.T_OP)
elif c == '&': tokens.append(self.T_OP)
elif c == 'p': tokens.append(self.T_ID)
elif c == 'q': tokens.append(self.T_ID)
elif c == '(': tokens.append(self.T_LPAR)
elif c == ')': tokens.append(self.T_RPAR)
elif c == '~': tokens.append(self.T_NEG)
else: tokens.append(self.T_INVALID)
i += 1
tokens.append(self.T_END)
return tokens
"""
Recursively checks if the statement is correctly structured
e.g. 1. ((p -> q) <-> q)
2. ((% -> %) <-> %)
3. Change (% -> %) into % because this belongs to grammar
Result now (% <-> %)
4. Repeat until only % left (return True) or something else (return False)
"""
def checkStructure(self, statement_str):
st = statement_str.replace('p', '%').replace('q', '%')
st = self.structureRecursion(st)
if st == '%':
return True
return False
def structureRecursion(self, st):
# TODO: Puuttuu konnektiiveja!
# Voisiko puun rakentaa tässä samalla rekursoidessa?
# Tosin nyt matchataan merkkijonoja kun pitäisi käsitellä varmaankin niitä tokeneita...?
st_orig = st
st = st.replace('~%', '%').replace('~%', '%')
st = st.replace('(% <-> %)', '%').replace('(% -> %)', '%')
if st != '%' and st_orig != st:
st = self.structureRecursion(st)
return st
def parseTree(self, tokens):
return 1
# return 0 kun error
def translate(self, tree):
return 0
def printMsg(self, msg):
if msg == "syntax_error":
return "Virheellinen syntaksi. Sallitut sanat ovat " + ", ".join(self.LEXICON) |
q = []
q.append('есть')
q.append('спать')
q.append('программировать')
print(q)
# ['есть', 'спать', 'программировать']
# Осторожно: это очень медленная операция!
print(q.pop(0))
# 'есть'
|
student = {"name":"Rolf", "grades":(89,9,93,78,90)}
def average(seq):
return sum(seq)/len(seq)
print(average(student["grades"]))
class Student:
def __init__(self,name,grades):
self.name = name
self.grades = grades
def averageGrade(self):
return sum(self.grades)/len(self.grades)
student = Student("Bob", (89,90,93,78,90) )
student2 = Student("Rolf", (89,9,93,78,90) )
print(student.name)
print(student.grades)
print(student.averageGrade())
print(student2.name)
print(student2.grades)
print(student2.averageGrade())
|
VERSION = '2.0b9'
DEFAULT_TIMEOUT = 60000
REQUEST_ID_HEADER = 'Cko-Request-Id'
API_VERSION_HEADER = 'Cko-Version'
|
#Leia um numero real e imprima a quinta parte deste numero
n=float(input("Informe um numero real: "))
n=n/5
print(n) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.