content stringlengths 7 1.05M |
|---|
__all__ = (
"config_measure_voltage",
"config_measure_resistance",
"enable_source",
"disable_source",
"read",
"config_voltage_pulse",
)
def config_measure_voltage(k2400, nplc=1, voltage=21.0, auto_range=True):
"""Configures the measurement of voltage. (Courtesy of pymeasure, see link below)
args:
k2400 (pyvisa.instrument): Keithley 2400
nplc (float or int): Number of power line cycles (NPLC) from 0.01 to 10
voltage (float): Upper limit of voltage in Volts, from -210 V to 210 V
auto_range (bool): Enables auto_range if True, else uses the set voltage
https://github.com/pymeasure/pymeasure/blob/4249c3a06457d5e4c8a2ba595aea867e99f9e5b6/pymeasure/instruments/keithley/keithley2400.py
"""
k2400.write(":SENS:FUNC 'VOLT';" ":SENS:VOLT:NPLC %f;:FORM:ELEM VOLT;" % nplc)
if auto_range:
k2400.write(":SENS:VOLT:RANG:AUTO 1;")
else:
k2400.write(":SENS:VOLT:RANG %g" % voltage)
return
def config_measure_resistance(k2400, nplc=1, voltage=21.0, auto_range=True):
"""Configures the measurement of resistance. (Courtesy of pymeasure, see link below)
args:
k2400 (pyvisa.instrument): Keithley 2400
nplc (float or int): Number of power line cycles (NPLC) from 0.01 to 10
voltage (float): Upper limit of voltage in Volts, from -210 V to 210 V
auto_range (bool): Enables auto_range if True, else uses the set voltage
https://github.com/pymeasure/pymeasure/blob/4249c3a06457d5e4c8a2ba595aea867e99f9e5b6/pymeasure/instruments/keithley/keithley2400.py
"""
k2400.write(":SENS:FUNC 'RES';" ":SENS:RES:NPLC %f;:FORM:ELEM RES;" % nplc)
if auto_range:
k2400.write(":SENS:RES:RANG:AUTO 1;")
else:
k2400.write(":SENS:RES:RANG %g" % voltage)
return
def enable_source(k2400):
"""Turn on source (either current or voltage)
args:
k2400 (pyvisa.instrument): K2400
"""
k2400.write("OUTPUT ON")
def disable_source(k2400):
"""Turn off source (either current or voltage)
args:
k2400 (pyvisa.instrument): K2400
"""
k2400.write("OUTPUT OFF")
def read(k2400):
"""Do and Read measurement.
args:
k2400 (pyvisa.instrument): K2400
returns:
reading (str)
"""
return k2400.query(":READ?").replace("\n", "")
def config_voltage_pulse(k2400, nplc: float = 0.01, amplitude: float = 5):
"""Configure for Voltage pulse. nplc=.01 gives ~1.5ms pulse nplc=.1 is ~7ms pulse
args:
k2400 (pyvisa.instrument): K2400
nplc (float): (.01, 10) power line cycles to specify speed
amplitude (float): Voltage amplitude in volts
examples:
.. code-block:: python
k24 = pyvisa.ResourceManager().open_resource(<GBIP>)
config_voltage_pulse(k24, amplitude=10) # configure 10V pulse
enable_source(k24) # start the source
read(k24) # apply the pulse
"""
k2400.write(
"""*RST
:SENS:FUNC:CONC OFF
:SOUR:FUNC VOLT
:SOUR:VOLT:MODE SWE
:SOURce:SWEep:POINts 2
:SOURce:VOLTage:STARt {}
:SOURce:VOLTage:STOP 0
:SENS:VOLT:NPLCycles {}
:TRIG:COUN 2
:TRIG:DELay 0
:SOUR:DEL 0
""".format(
amplitude, nplc
)
)
return
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2011-2013 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
The Crazyflie Micro Quadcopter library API used to communicate with the
Crazyflie Micro Quadcopter via a communication link.
The API takes care of scanning, opening and closing the communication link
as well as sending/receiving data from the Crazyflie.
A link is described using an URI of the following format:
<interface>://<interface defined data>.
See each link for the data that can be included in the URI for that interface.
The two main uses-cases are scanning for Crazyflies available on a
communication link and opening a communication link to a Crazyflie.
Example of scanning for available Crazyflies on all communication links:
cflib.crtp.init_drivers()
available = cflib.crtp.scan_interfaces()
for i in available:
print "Found Crazyflie on URI [%s] with comment [%s]"
% (available[0], available[1])
Example of connecting to a Crazyflie with know URI (radio dongle 0 and
radio channel 125):
cf = Crazyflie()
cf.open_link("radio://0/125")
...
cf.close_link()
"""
|
"""Escreva um programa que recebe como entradas dois números inteiros correspondentes
à largura e à altura de um retângulo, respectivamente.
O programa deve imprimir, usando repetições encaixadas, uma cadeia de caracteres que represente
o retângulo informado com caracteres '#' na saída."""
def main():
coluna = int(input("Digite a largura do retangulo: "))
linhas = int(input("Digite a altura do triangulo: "))
i = j = 0
while i < linhas:
i += 1
j = 0
while j < coluna:
j += 1
print("#", end="")
print()
main() |
# Python3 function to calculate number of possible stairs arrangements with given number of boxes/bricks
def solution(n):
dp=[[0 for x in range(n + 5)]
for y in range(n + 5)]
for i in range(n+1):
for j in range (n+1):
dp[i][j]=0
dp[3][2]=1
dp[4][2]=1
for i in range(5,n+1):
for j in range(2,i+1):
if (j == 2) :
dp[i][j] = dp[i-j][j] + 1
else :
dp[i][j] = (dp[i-j][j] + dp[i-j][j - 1])
answer = 0
for i in range (1, n+1):
answer += dp[n][i]
return answer
print(solution(3))
|
class PdfDoc():
def __init__(self, filename):
self.filename = filename
self.pages = []
def page_count(self):
return len(self.pages)
|
arquivo =open('mobydick.txt', 'r')
saida = open('saida.txt', 'w')
texto = arquivo.readlines()[:]
for linha in texto:
if linha == '\n':
continue
else:
linha = linha.split()
for palavra in linha:
saida.write(f'{palavra} ')
saida.write('\n')
arquivo.close()
saida.close() |
file = open('signalsAndNoise_input.txt', 'r')
lines_read = file.readlines()
message_length = len(lines_read[0].strip())
letter_frequencies = [None] * message_length
for index in range(message_length):
letter_frequencies[index] = dict()
for line in lines_read:
line = line.strip()
for i in range(len(line)):
frequency_dict = letter_frequencies[i]
letter = line[i]
if frequency_dict.__contains__(letter):
frequency_dict[letter] += 1
else:
frequency_dict[letter] = 1
for i in range(message_length):
frequency_dict = letter_frequencies[i]
max_letter = ''
max_freq = -1
for key in frequency_dict:
if frequency_dict[key] > max_freq:
max_letter = key
max_freq = frequency_dict[key]
print(max_letter)
|
# Python - 3.6.0
def testing(actual, expected):
Test.assert_equals(actual, expected)
Test.describe('opstrings')
Test.it('Basic tests vert_mirror')
testing(oper(vert_mirror, 'hSgdHQ\nHnDMao\nClNNxX\niRvxxH\nbqTVvA\nwvSyRu'), 'QHdgSh\noaMDnH\nXxNNlC\nHxxvRi\nAvVTqb\nuRySvw')
testing(oper(vert_mirror, 'IzOTWE\nkkbeCM\nWuzZxM\nvDddJw\njiJyHF\nPVHfSx'), 'EWTOzI\nMCebkk\nMxZzuW\nwJddDv\nFHyJij\nxSfHVP')
Test.it('Basic tests hor_mirror')
testing(oper(hor_mirror, 'lVHt\nJVhv\nCSbg\nyeCt'), 'yeCt\nCSbg\nJVhv\nlVHt')
testing(oper(hor_mirror, 'njMK\ndbrZ\nLPKo\ncEYz'), 'cEYz\nLPKo\ndbrZ\nnjMK')
|
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
def headers(agentConfig, **kwargs):
# Build the request headers
res = {
'User-Agent': 'Datadog Agent/%s' % agentConfig.get('version', '0.0.0'),
'Content-Type': 'application/x-www-form-urlencoded',
'Accept': 'text/html, */*',
}
if 'http_host' in kwargs:
res['Host'] = kwargs['http_host']
return res
|
# from ../../.tcp_chat_server import server.py, client.py
# something is wrong with the directory above.
# import pytest
# Linter said it could not import pytest
def test_alive():
""" Does the testing file run?
"""
pass
|
class GlueError(Exception):
"""Base Exception class for glue Errors."""
error_code = 999
class PILUnavailableError(GlueError):
"""Raised if some PIL decoder isn't available."""
error_code = 2
class ValidationError(GlueError):
"""Raised by formats or sprites while ."""
error_code = 3
class SourceImagesNotFoundError(GlueError):
"""Raised if a folder doesn't contain any valid image."""
error_code = 4
class NoSpritesFoldersFoundError(GlueError):
"""Raised if no sprites folders could be found."""
error_code = 5
|
for i in range(101):
if i % 3 == 0:
print(i)
|
"""
Otrzymujesz liste par liczb. Liczby w parze reprezentuja poczatek i koniec przedzialu.
Niektore przedzialy moga na siebie nachodzic.
W takim przypadku polacz je ze soba i zwroc liste niepokrywajacych sie przedzialow.
"""
# Wersja 1
def polacz_przedzialy_v1(lista):
lista = sorted(lista)
wynik = []
pocz, koniec = lista[0][0], lista[0][1]
for x in lista[1:]:
if koniec >= x[0]:
if koniec < x[1]:
koniec = x[1]
else:
wynik.append((pocz, koniec))
pocz, koniec = x[0], x[1]
wynik.append((pocz, koniec))
return wynik
# Testy Poprawnosci
lista = [(23, 67), (23, 53), (45, 88), (77, 88), (10, 22), (11, 12), (42, 45)]
wynik = [(10, 22), (23, 88)]
assert polacz_przedzialy_v1(lista) == wynik
|
to_solve = ''
with open('input.txt') as f:
to_solve = f.readlines()
to_solve = list(map(lambda x: x.split(': '), to_solve))
temp = []
for i in to_solve:
ttemp = i[0].split(' ')
tttemp = ttemp[0].split('-')
ttttemp = {'char': ttemp[1], 'passwd': i[1], 'min': int(tttemp[0]), 'max': int(tttemp[1])}
temp.append(ttttemp)
to_solve = temp
part1 = 0
for i in to_solve:
char = i['char']
string = i['passwd']
counter = 0
for j in string:
if j == char:
counter += 1
if i['min'] <=counter <= i['max']:
part1 += 1
print(f'part 1: {part1}')
part2 = 0
for i in to_solve:
char = i['char']
string = i['passwd']
counter = 0
if string[i['min']-1] == char:
counter += 1
if string[i['max']-1] == char:
counter += 1
if counter == 1:
part2 += 1
print(f'part 2: {part2}')
|
"""
You have an array of logs. Each log is a space delimited string of words. For each log, the first word
in each log is an alphanumeric identifier. Then, either:
Each word after the identifier will consist only of lowercase letters, or;
Each word after the identifier will consist only of digits.
We will call these two varieties of logs letter-logs and digit-logs. It is guaranteed that each log has
at least one word after its identifier. Reorder the logs so that all of the letter-logs come before any
digit-log. The letter-logs are ordered lexicographically ignoring identifier, with the identifier used
in case of ties. The digit-logs should be put in their original order. Return the final order of the logs.
Example 1:
Input: logs = ["dig1 8 1 5 1","let1 art can","dig2 3 6","let2 own kit dig","let3 art zero"]
Output: ["let1 art can","let3 art zero","let2 own kit dig","dig1 8 1 5 1","dig2 3 6"]
Constraints:
1. 0 <= logs.length <= 100
2. 3 <= logs[i].length <= 100
3. logs[i] is guaranteed to have an identifier, and a word after the identifier.
"""
class Solution:
def reorderLogFiles1(self, logs):
letters, digits = [], []
for x in logs:
tmp = x.split()
if tmp[1][0].isalpha():
letters.append((' '.join(tmp[1:]), tmp[0]))
else:
digits.append(x)
return [s2 + ' ' + s1 for s1, s2 in sorted(letters)] + digits
def reorderLogFiles2(self, logs):
letters, digits = [], []
for x in logs:
if x.split()[1][0].isalpha():
letters.append(x)
else:
digits.append(x)
letters.sort(key=lambda x: (' '.join(x.split()[1:]), x.split()[0]))
return letters + digits
|
# http://codingbat.com/prob/p194053
def combo_string(a, b):
if len(a) > len(b):
return b + a + b
else:
return a + b + a
|
"""Tests which check the various ways you can set DJANGO_SETTINGS_MODULE
If these tests fail you probably forgot to run "python setup.py develop".
"""
BARE_SETTINGS = '''
# At least one database must be configured
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:'
},
}
SECRET_KEY = 'foobar'
'''
def test_ds_env(testdir, monkeypatch):
monkeypatch.setenv('DJANGO_SETTINGS_MODULE', 'tpkg.settings_env')
pkg = testdir.mkpydir('tpkg')
settings = pkg.join('settings_env.py')
settings.write(BARE_SETTINGS)
testdir.makepyfile("""
import os
def test_settings():
assert os.environ['DJANGO_SETTINGS_MODULE'] == 'tpkg.settings_env'
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines(['*1 passed*'])
def test_ds_ini(testdir, monkeypatch):
monkeypatch.setenv('DJANGO_SETTINGS_MODULE', 'DO_NOT_USE')
testdir.makeini("""\
[pytest]
DJANGO_SETTINGS_MODULE = tpkg.settings_ini
""")
pkg = testdir.mkpydir('tpkg')
settings = pkg.join('settings_ini.py')
settings.write(BARE_SETTINGS)
testdir.makepyfile("""
import os
def test_ds():
assert os.environ['DJANGO_SETTINGS_MODULE'] == 'tpkg.settings_ini'
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines(['*1 passed*'])
def test_ds_option(testdir, monkeypatch):
monkeypatch.setenv('DJANGO_SETTINGS_MODULE', 'DO_NOT_USE_env')
testdir.makeini("""\
[pytest]
DJANGO_SETTINGS_MODULE = DO_NOT_USE_ini
""")
pkg = testdir.mkpydir('tpkg')
settings = pkg.join('settings_opt.py')
settings.write(BARE_SETTINGS)
testdir.makepyfile("""
import os
def test_ds():
assert os.environ['DJANGO_SETTINGS_MODULE'] == 'tpkg.settings_opt'
""")
result = testdir.runpytest('--ds=tpkg.settings_opt')
result.stdout.fnmatch_lines(['*1 passed*'])
def test_ds_non_existent(testdir, monkeypatch):
# Make sure we do not fail with INTERNALERROR if an incorrect
# DJANGO_SETTINGS_MODULE is given.
monkeypatch.setenv('DJANGO_SETTINGS_MODULE', 'DOES_NOT_EXIST')
testdir.makepyfile('def test_ds(): pass')
result = testdir.runpytest()
result.stderr.fnmatch_lines(
["*Could not import settings 'DOES_NOT_EXIST' (Is it on sys.path?*):*"])
def test_django_settings_configure(testdir, monkeypatch):
"""
Make sure Django can be configured without setting
DJANGO_SETTINGS_MODULE altogether, relying on calling
django.conf.settings.configure() and then invoking pytest.
"""
monkeypatch.delenv('DJANGO_SETTINGS_MODULE')
p = testdir.makepyfile(run="""
from django.conf import settings
settings.configure(SECRET_KEY='set from settings.configure()',
DATABASES={'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:'
}},
INSTALLED_APPS=['django.contrib.auth',
'django.contrib.contenttypes',])
import pytest
pytest.main()
""")
testdir.makepyfile("""
import pytest
from django.conf import settings
from django.test.client import RequestFactory
from django.test import TestCase
from django.contrib.auth.models import User
def test_access_to_setting():
assert settings.SECRET_KEY == 'set from settings.configure()'
# This test requires Django to be properly configured to be run
def test_rf(rf):
assert isinstance(rf, RequestFactory)
# This tests that pytest-django actually configures the database
# according to the settings above
class ATestCase(TestCase):
def test_user_count(self):
assert User.objects.count() == 0
@pytest.mark.django_db
def test_user_count():
assert User.objects.count() == 0
""")
result = testdir.runpython(p)
result.stdout.fnmatch_lines([
"*4 passed*",
])
def test_settings_in_hook(testdir, monkeypatch):
monkeypatch.delenv('DJANGO_SETTINGS_MODULE')
testdir.makeconftest("""
from django.conf import settings
def pytest_configure():
settings.configure(SECRET_KEY='set from pytest_configure',
DATABASES={'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:'}},
INSTALLED_APPS=['django.contrib.auth',
'django.contrib.contenttypes',])
""")
testdir.makepyfile("""
import pytest
from django.conf import settings
from django.contrib.auth.models import User
def test_access_to_setting():
assert settings.SECRET_KEY == 'set from pytest_configure'
@pytest.mark.django_db
def test_user_count():
assert User.objects.count() == 0
""")
r = testdir.runpytest()
assert r.ret == 0
def test_django_not_loaded_without_settings(testdir, monkeypatch):
"""
Make sure Django is not imported at all if no Django settings is specified.
"""
monkeypatch.delenv('DJANGO_SETTINGS_MODULE')
testdir.makepyfile("""
import sys
def test_settings():
assert 'django' not in sys.modules
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines(['*1 passed*'])
def test_debug_false(testdir, monkeypatch):
monkeypatch.delenv('DJANGO_SETTINGS_MODULE')
testdir.makeconftest("""
from django.conf import settings
def pytest_configure():
settings.configure(SECRET_KEY='set from pytest_configure',
DEBUG=True,
DATABASES={'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:'}},
INSTALLED_APPS=['django.contrib.auth',
'django.contrib.contenttypes',])
""")
testdir.makepyfile("""
from django.conf import settings
def test_debug_is_false():
assert settings.DEBUG is False
""")
r = testdir.runpytest()
assert r.ret == 0
|
def tree(entity):
"""tree is a filter to build the file tree
Args:
entity: the current entity
Returns:
A file tree, starting with the highest parent
"""
root = entity
# Get the highest available parent
while hasattr(root, 'parent') and root.parent and root.parent.type == root.type:
root = root.parent
# Use it to build the file tree
return build_tree(root, entity.title)
def build_tree(node, active_title=None):
items = []
if not hasattr(node, 'children') or not node.children:
return {
'title': node.title,
'url': node.url,
'active': True if node.title == active_title else False
}
for child in node.children:
items.append(build_tree(child, active_title))
return {
'title': node.title,
'url': node.url,
'children': items,
'active': True if node.title == active_title else False
}
|
"""
What does the phrase "in-order successor" mean when we are talking about a node in a binary search tree?
A - the node that has the next lowest value
B - the node that has the maximum value
C - the node that has the minimuin value
D - the node that has the next highest value
answer is :
"""
|
def fibonacci(n):
if n == 1 or n == 2:
return 1
return fibonacci(n-1) + fibonacci(n-2)
|
class Event():
def __init__(self, id, dateTime, userId):
self.id = id
self.dateTime = dateTime
self.userId = userId
def getDateTime(self):
return self.dateTime
def getUserId(self):
return self.userId
class Scheduler():
def __init__(self):
self.calendar = {}
self.userIdIndex = {}
self.dateIndex = {}
self.eventId = 0
self.dateTimeFormat = '%Y-%m-%d %H:%M'
def getDateTimeFormat(self):
return self.dateTimeFormat
def getEvents(self, userId):
"""
Get events of a specific user
:param userId: ID of user
"""
events = []
if userId in self.userIdIndex.keys():
eventIds = self.userIdIndex[userId]
for eventId in eventIds:
event = self.calendar[eventId]
responseEvent = {}
responseEvent['date_time'] = event.getDateTime()
responseEvent['user_id'] = event.getUserId()
events.append(responseEvent)
# Sort
events = sorted(events, key=lambda event: event['date_time'].timestamp())
return events
def addEvent(self, dateTime, userId):
"""
Add a single event at the date-time for the user with userId
:param dateTime: date-time
:param userId: ID of user
"""
# Check for other events on same day
dateFormat = self.getDateTimeFormat().split()[0]
date = dateTime.strftime(dateFormat)
if userId in self.dateIndex.keys() and date in self.dateIndex[userId].keys():
return {
"success": False,
"error": "Only one event allowed on the same day.",
}
self.eventId += 1
# Create new event
event = Event(self.eventId, dateTime, userId)
self.calendar[self.eventId] = event
# Update user Id index
if userId not in self.userIdIndex.keys():
self.userIdIndex[userId] = []
self.userIdIndex[userId].append(self.eventId)
# Update date index
if userId not in self.dateIndex.keys():
self.dateIndex[userId] = {}
self.dateIndex[userId][date] = self.eventId
return {
"success": True,
} |
# -*- coding: utf-8 -*-
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def mergeTwoLists(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
"""
if not l1:
return l2
if not l2:
return l1
if l1.val < l2.val:
l1.next = self.mergeTwoLists(l1.next, l2)
return l1
else:
l2.next = self.mergeTwoLists(l1, l2.next)
return l2
|
class Solution:
# @return an integer
def maxArea(self, height):
n = len(height)
i = 0
j = n - 1
max_area = 0
while i < j:
max_area = max(max_area, (j - i) * min(height[i], height[j]))
if height[i] <= height[j]:
i += 1
else:
j -= 1
return max_area
|
#!/usr/bin/env python
"""Solution for problem C to Codejam 2016, Round 1B of Martin Thoma."""
def get_dicts(topics):
a_words = {}
b_words = {}
for a, b in topics:
if a in a_words:
a_words[a] += 1
else:
a_words[a] = 1
if b in b_words:
b_words[b] += 1
else:
b_words[b] = 1
return a_words, b_words
def solve(topics):
"""Solve."""
a_words, b_words = get_dicts(topics)
candidates = []
original = []
duplicates = []
for a, b in topics:
# print(a, b)
# print(a_words[a], b_words[b])
if not (a_words[a] == 1 or b_words[b] == 1):
candidates.append((a, b))
else:
original.append((a, b))
a_words_org, b_words_org = get_dicts(original)
while len(candidates) > 0:
l_candidates = []
for a, b in candidates:
if a_words_org[a] >= 1 and b_words_org[b] >= 1:
duplicates.append((a, b))
else:
l_candidates.append((a, b))
candidates = l_candidates[:]
# print(candidates)
return len(candidates)
if __name__ == "__main__":
testcases = input()
for caseNr in xrange(1, testcases+1):
n = input()
topics = []
for topic in xrange(1, n+1):
a, b = raw_input().split(" ")
topics.append((a, b))
print("Case #%i: %s" % (caseNr, solve(topics)))
|
class InfoUnavailableError(ValueError):
"""CloudVolume was unable to access this layer's info file."""
pass
class ScaleUnavailableError(IndexError):
"""The info file is not configured to support this scale / mip level."""
pass
class AlignmentError(ValueError):
"""Signals that an operation requiring chunk alignment was not aligned."""
pass
class EmptyVolumeException(Exception):
"""Raised upon finding a missing chunk."""
pass
class EmptyRequestException(ValueError):
"""
Requesting uploading or downloading
a bounding box of less than one cubic voxel
is impossible.
"""
pass
class DecodingError(Exception):
"""Generic decoding error. Applies to content aware and unaware codecs."""
pass
class EncodingError(Exception):
"""Generic decoding error. Applies to content aware and unaware codecs."""
pass
class OutOfBoundsError(ValueError):
"""
Raised upon trying to obtain or assign to a bbox of a volume outside
of the volume's bounds
"""
# Inheritance below done for backwards compatibility reasons.
class DecompressionError(DecodingError):
"""
Decompression failed. This exception is used for codecs
that are naieve to data contents like gzip, lzma, etc. as opposed
to codecs that are aware of array shape like fpzip or compressed_segmentation.
"""
pass
class CompressionError(EncodingError):
"""
Compression failed. This exception is used for codecs
that are naieve to data contents like gzip, lzma, etc. as opposed
to codecs that are aware of array shape like fpzip or compressed_segmentation.
"""
pass
class SkeletonUnassignedEdgeError(Exception):
"""This skeleton has an edge to a vertex that doesn't exist."""
pass
class SkeletonDecodeError(Exception):
"""Unable to decode a binary skeleton into a Python object."""
pass
class SkeletonEncodeError(Exception):
"""Unable to encode a PrecomputedSkeleton into a binary object."""
pass
class UnsupportedProtocolError(ValueError):
"""Unknown protocol extension."""
pass |
def isnotebook():
try:
shell = get_ipython().__class__.__name__
if shell == 'ZMQInteractiveShell':
return True # Jupyter notebook or qtconsole
elif shell == 'TerminalInteractiveShell':
return False # Terminal running IPython
else:
return False
except NameError:
return False
|
l = [*map(int, input().split())]
l.sort()
if l[0]+l[3] == l[1]+l[2] or l[3] == l[0]+l[1]+l[2]:
print("YES")
else:
print("NO")
|
def is_positive(num):
if int(num) > 0:
return True
else:
return False
def is_negative(num):
if int(num) < 0:
return True
else:
return False
def is_zero(num):
if int(num) == 0:
return True
else:
return False
def is_odd(num):
if int(num) <= 0:
return False
if int(num) % 2 != 0:
return True
else:
return False
def is_even(num):
if int(num) <= 0:
return False
if int(num) % 2 == 0:
return True
else:
return False
def is_prime(num):
if int(num) == 0 or int(num) == 1:
return False
result = 0
total = 0
while True:
if total == int(num):
break
total += 1
if int(num) % total == 0:
result += 1
if result <= 2:
return True
else:
return False
|
def factorial(curr):
g = 1
for i in range(1, curr + 1):
g *= i
return g
def non_recurrsion():
num = 1
try:
row = int(input("Enter number of rows:"))
except:
print("Invalid input. Please enter an integer")
exit(1)
for i in range(1, row + 1):
for j in range(0, i):
print(factorial(num), end=" ")
num += 1
print("")
# Recursive statement included
def factorial_recursion(numb):
if numb == 1:
return 1
else:
return numb * factorial_recursion(numb - 1)
def recursion():
num = 1
try:
row = int(input("Enter number of rows:"))
except:
print("Invalid input. Please enter an integer")
exit(1)
for i in range(1, row + 1):
for j in range(0, i):
print(factorial_recursion(num), end=" ")
num += 1
print("")
non_recurrsion()
# recursion()
|
# -*- coding: utf-8 -*-
"""Container for all required classes."""
# __init__.py
#
# Created by Thomas Nelson <tn90ca@gmail.com>
# Created..........................2015-01-25
# Modified.........................2015-01-25
#
# This module was developed for use in the Bugs project.
#
# Copyright (C) 2015 Thomas Nelson
__all__ = ["dna", "brain", "bug", "food", "world"] |
def can_build(env, platform):
return platform == "windows" # For now, GGPO isn't available on linux or mac
def configure(env):
env.Append(CPPPATH=["#modules/godotggpo/sdk/include/"])
if env["platform"] == "windows":
if env["CC"] == "cl":
env.Append(LINKFLAGS=["GGPO.lib"])
env.Append(LIBPATH=["#modules/godotggpo/sdk/bin"])
else:
env.Append(LIBS=["ggpo"])
env.Append(LIBPATH=["#modules/godotggpo/sdk/bin"])
def get_doc_classes():
return [
"GGPO",
]
def get_doc_path():
return "doc_class" |
print("find greatest common divisor:")
def gcd(m, n):
cf = []
for i in range (1,min(m,n)+1):
if (m%i) == 0 and (n%i) == 0 :
cf.append(i)
print(cf)
print(cf[-1])
gcd(int(input()), int(input()))
|
def object_function_apply_by_key(object_to_apply, key_to_find, function_to_apply):
if object_to_apply:
if isinstance(object_to_apply, list) and len(object_to_apply) > 0:
for item in object_to_apply:
object_function_apply_by_key(item, key_to_find, function_to_apply)
elif isinstance(object_to_apply, dict):
for k in object_to_apply:
if isinstance(object_to_apply[k], list):
for item in object_to_apply[k]:
object_function_apply_by_key(item, key_to_find, function_to_apply)
elif isinstance(object_to_apply[k], dict):
object_function_apply_by_key(object_to_apply[k], key_to_find, function_to_apply)
if k == key_to_find:
function_to_apply(object_to_apply, k)
|
# encoding: utf-8
"""
@version: v1.0
@author: Richard
@license: Apache Licence
@contact: billions.richard@qq.com
@site:
@software: PyCharm
@time: 2019/11/10 10:50
"""
class TreeNode(object):
def __init__(self, val,
left: 'TreeNode ' = None,
right: 'TreeNode ' = None):
self.val = val
self.left = left
self.right = right
|
# -*- coding: utf-8 -*-
# Scrapy settings for scrapy_multi_thread project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# https://doc.scrapy.org/en/latest/topics/settings.html
# https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'scrapy_multi_thread'
SPIDER_MODULES = ['scrapy_multi_thread.spiders']
NEWSPIDER_MODULE = 'scrapy_multi_thread.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'scrapy_multi_thread (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = True
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See https://doc.scrapy.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
DEFAULT_REQUEST_HEADERS = {
'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Language':'zh-CN,zh;q=0.8',
'Host':'www.lagou.com',
'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.113 Safari/537.36',
'Connection':'keep-alive'
}
# Enable or disable spider middlewares
# See https://doc.scrapy.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'scrapy_multi_thread.middlewares.ScrapyMultiThreadSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'scrapy_multi_thread.middlewares.ScrapyMultiThreadDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See https://doc.scrapy.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See https://doc.scrapy.org/en/latest/topics/item-pipeline.html
ITEM_PIPELINES = {
'scrapy_multi_thread.pipelines.ScrapyMultiThreadPipeline': 300,
}
# Enable and configure the AutoThrottle extension (disabled by default)
# See https://doc.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
COOKIE = {'_ga': 'GA1.2.1992886460.1528164096', 'user_trace_token': '20180605100136-60b0189a-6864-11e8-91c8-525400f775ce', 'LGUID': '20180605100136-60b01bc1-6864-11e8-91c8-525400f775ce', 'WEBTJ-ID': '20180622152859-16426656f5737d-04764d3c65099c-17356950-1024000-16426656f58529',
'Hm_lvt_4233e74dff0ae5bd0a3d81c6ccf756e6': '1528164096,1529652539', 'LGSID': '20180622152859-ee08a019-75ed-11e8-ad33-525400f775ce', 'PRE_UTM': 'm_cf_cpt_baidu_pc', 'PRE_HOST': 'www.baidu.com', 'PRE_SITE': 'https%3A%2F%2Fwww.baidu.com%2Fs%3Fie%3DUTF-8%26wd%3Dlagou',
'PRE_LAND': 'https%3A%2F%2Fwww.lagou.com%2Flp%2Fhtml%2Fcommon.html%3Futm_source%3Dm_cf_cpt_baidu_pc', '_gid': 'GA1.2.319925302.1529652540', 'X_HTTP_TOKEN': 'a7add2acf6a6d137ecc485657757a987', 'LG_LOGIN_USER_ID': '81f93f7448dd1fae5dd21961596148ed6930de94011ecb25',
'_putrc': 'D43179227E56D0FF', 'JSESSIONID': 'ABAAABAAAGGABCB7517959870959C4171979399E2C8D0B6', 'login': 'true', 'unick': '%E5%86%AC%E9%98%B3', 'showExpriedIndex': '1', 'showExpriedCompanyHome': '1', 'showExpriedMyPublish': '1', 'hasDeliver': '9',
'gate_login_token': '40562ece1228ff6afed578937c6aab1a1aed0f320fad6239', 'index_location_city': '%E5%85%A8%E5%9B%BD', 'TG-TRACK-CODE': 'index_navigation', '_gat': '1', 'SEARCH_ID': 'f5504c414f9342c18a155fe0b7b89330', 'Hm_lpvt_4233e74dff0ae5bd0a3d81c6ccf756e6': '1529653961',
'LGRID': '20180622155241-3d4bf9c9-75f1-11e8-ad35-525400f775ce'}
#Mysql数据库的配置信息
MYSQL_HOST = '10.1.3.2'
MYSQL_DBNAME = 'scrapy_test' #数据库名字,请修改
MYSQL_USER = 'medbankrd' #数据库账号,请修改
MYSQL_PASSWD = 'medbankrd' #数据库密码,请修改
MYSQL_PORT = 3306 #数据库端口,在dbhelper中使用
#控制并发
CONCURRENT_REQUESTS_PER_DOMAIN = 1 #使爬虫同时只能对每个域名发起一个请求
DOWNLOAD_DELAY =3 #每两次请求之间存在延迟时间为3秒 |
r=""
for _ in range(int(input())):
x=int(input())
if abs(x)%2==0:
r+=str(x)+" is even\n"
else:
r+=str(x)+" is odd\n"
print(r,end="")
|
def slider_event_cb(slider, event):
if event == lv.EVENT.VALUE_CHANGED:
slider_label.set_text("%u" % slider.get_value())
# Create a slider in the center of the display
slider = lv.slider(lv.scr_act())
slider.set_width(200)
slider.align(None, lv.ALIGN.CENTER, 0, 0)
slider.set_event_cb(slider_event_cb)
slider.set_range(0, 100)
# Create a label below the slider
slider_label = lv.label(lv.scr_act())
slider_label.set_text("0")
slider_label.set_auto_realign(True)
slider_label.align(slider, lv.ALIGN.OUT_BOTTOM_MID, 0, 10)
# Create an informative label
info = lv.label(lv.scr_act())
info.set_text("""Welcome to the slider+label demo!
Move the slider and see that the label
updates to match it.""")
info.align(None, lv.ALIGN.IN_TOP_LEFT, 10, 10)
|
# Dictionaries
# Giving a key value and calling
my_stuff = {'key1': "123", "key2": "Value of key2"}
print(my_stuff['key1'])
print(my_stuff['key2'])
# Something nexted
my_stuff2 = {'key1': "123", "key2": "Value of key2", 'key3': {'key4': [1, 3, 2]}}
print(my_stuff2['key3'])
print(my_stuff2['key3']['key4']) # This will print the nexted dictionary in the mystuff_2
# Printing entire Dictionaries
print(my_stuff, '= Dictionary 1')
print(my_stuff2, '= Dictionary 2') # Note in the output is not same in the dictionary as they are different from lists
# SMALL EXERCISE
my_stuff3 = {'key1': "123", "key2": "Value of key2", 'key3': {'key4': [1, 3, 2, 'grab me']}}
print(my_stuff3['key3']['key4'][3]) # This is just an example for something complicated
print(my_stuff3['key3']['key4'][3].upper()) # Same result but in upper case
print(my_stuff3['key3']['key4'][3].capitalize()) # Same result but in capitalize form
# Redefining a the value
food = {'lunch': 'pizza', 'breakfast': 'eggs'} # Main dictionary for down CODE
food['lunch'] = 'burger'
print(food['lunch']) # re assaigned value for 'lunch'
print(food) # Value is changed here
food['dinner'] = 'Pasta'
print(food) # New value added here
|
def method1(str1, str2, m, n):
# If first string is empty, the only option is to
# insert all characters of second string into first
if m == 0:
return n
# If second string is empty, the only option is to
# remove all characters of first string
if n == 0:
return m
# If last characters of two strings are same, nothing
# much to do. Ignore last characters and get count for
# remaining strings.
if str1[m - 1] == str2[n - 1]:
return method1(str1, str2, m - 1, n - 1)
# If last characters are not same, consider all three
# operations on last character of first string, recursively
# compute minimum cost for all three operations and take
# minimum of three values.
return 1 + min(
method1(str1, str2, m, n - 1), # Insert
method1(str1, str2, m - 1, n), # Remove
method1(str1, str2, m - 1, n - 1), # Replace
)
if __name__ == "__main__":
"""
from timeit import timeit
# Driver code
str1 = "sunday"
str2 = "saturday"
print(
timeit(lambda: method1(str1, str2, len(str1), len(str2)), number=10000)
) # 0.2074630530041759
""" |
# Copyright 2018, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
i0 = Input("i0", ("TENSOR_FLOAT32", [2, 2])) # input 0
o1 = Output("o1", ("TENSOR_FLOAT32", [1, 2])) # output for model1
o2 = Output("o2", ("TENSOR_FLOAT32", [2, 1])) # output for model2
o3 = Output("o3", ("TENSOR_FLOAT32", [1])) # output for model3
model1 = Model().Operation("MEAN", i0, [0], 1).To(o1) # along axis 0, keep_dim=True
model2 = Model().Operation("MEAN", i0, [1], 1).To(o2) # along axis 1, keep_dim=True
model3 = Model().Operation("MEAN", i0, [0, 1], 0).To(o3) # along both axis, keep_dim=False
inputs1 = {i0: [1, 2, 3, 4]}
outputs11 = {o1: [4, 6]}
outputs12 = {o2: [3, 7]}
outputs13 = {o3: [10]}
inputs2 = {i0: [-1, -2, -3, -4]}
outputs21 = {o1: [-4, -6]}
outputs22 = {o2: [-3, -7]}
outputs23 = {o3: [-10]}
Example((inputs1, outputs11), model=model1)
Example((inputs1, outputs12), model=model2)
Example((inputs1, outputs13), model=model3)
Example((inputs2, outputs21), model=model1)
Example((inputs2, outputs22), model=model2)
Example((inputs2, outputs23), model=model3)
|
class PushCrewBaseException(BaseException):
"""
Generic exception
"""
def __init__(self, *args, **kwargs):
BaseException.__init__(self, *args, **kwargs)
|
# auth.signature add default fields like the created on/ create by/ modified by/ modified on
db.define_table('blog_post',
Field('title', requires=IS_NOT_EMPTY()),
Field('body', 'text', requires=IS_NOT_EMPTY()),
Field('photo', 'upload'),
auth.signature)
db.define_table('blog_comment',
Field('blog_post', 'reference blog_post'),
Field('comments', 'text', requires=IS_NOT_EMPTY()),
auth.signature
)
db.blog_post.title.requires = IS_NOT_IN_DB(db, db.blog_post.title)
db.define_table('test',
Field('first_name', requires = IS_NOT_EMPTY()),
Field('last_name', requires = IS_NOT_EMPTY()),
Field('email', requires= IS_EMAIL()),
Field('email_validate',requires = IS_EQUAL_TO(request.vars.email)))
db.define_table("entries", Field("entry", "text"))
|
"""
--- Day 2: Dive! ---
https://adventofcode.com/2021/day/2
summary: process directions to calculate depth and horizontal position
Part 1 - 2322630
Part 2 - 2105273490
"""
def load_data():
#datafile = 'input-day2-example'
datafile = 'input-day2'
data = []
with open(datafile, 'r') as input:
for line in input:
line_list = line.split()
line_list[1] = int(line_list[1])
data.append(line_list)
return data
def part1(commands):
"""
forward X increases the horizontal position by X units.
down X increases the depth by X units.
up X decreases the depth by X units.
Calculate the horizontal position and depth you would have after following the planned course.
What do you get if you multiply your final horizontal position by your final depth
"""
x = 0
y = 0
for command in commands:
dir = command[0]
num = command[1]
if dir == "forward":
x += num
elif dir == "up":
y -= num
else:
y += num
return x * y
def part2(commands):
"""
down X increases your aim by X units.
up X decreases your aim by X units.
forward X does two things:
It increases your horizontal position by X units.
It increases your depth by your aim multiplied by X.
Calculate the horizontal position and depth you would have after following the planned course.
What do you get if you multiply your final horizontal position by your final depth
"""
x = 0
depth = 0
aim = 0
for command in commands:
dir = command[0]
num = command[1]
if dir == "forward":
x += num
depth += aim * num
elif dir == "up":
aim -= num
else:
aim += num
return x * depth
if __name__ == '__main__':
data = load_data()
print(f"{data}\n")
results1 = part1(data)
print(f"Part 1 - {results1}")
results2 = part2(data)
print(f"Part 2 - {results2}\n")
|
"""
multithreading support. See: https://docs.micropython.org/en/v1.17/library/_thread.html
|see_cpython_module| :mod:`python:_thread` https://docs.python.org/3/library/_thread.html .
This module implements multithreading support.
This module is highly experimental and its API is not yet fully settled
and not yet described in this documentation.
"""
# source version: v1_17
# origin module:: micropython/docs/library/_thread.rst
|
# ------------------------------------------------------------------
# Copyright (c) 2020 PyInstaller Development Team.
#
# This file is distributed under the terms of the GNU General Public
# License (version 2.0 or later).
#
# The full license is available in LICENSE.GPL.txt, distributed with
# this software.
#
# SPDX-License-Identifier: GPL-2.0-or-later
# ------------------------------------------------------------------
# Tested on Windows 10 1809 64bit with scikit-learn 0.22.1 and Python 3.7
hiddenimports = ['sklearn.neighbors.typedefs',
'sklearn.utils._cython_blas',
'sklearn.neighbors.quad_tree',
'sklearn.tree._utils']
|
pkgname = "libxshmfence"
pkgver = "1.3"
pkgrel = 0
build_style = "gnu_configure"
configure_args = ["--with-shared-memory-dir=/dev/shm"]
hostmakedepends = ["pkgconf"]
makedepends = ["xorgproto"]
pkgdesc = "X SyncFence synchronization primitive"
maintainer = "q66 <q66@chimera-linux.org>"
license = "MIT"
url = "https://xorg.freedesktop.org"
source = f"$(XORG_SITE)/lib/libxshmfence-{pkgver}.tar.bz2"
sha256 = "b884300d26a14961a076fbebc762a39831cb75f92bed5ccf9836345b459220c7"
def post_install(self):
self.install_license("COPYING")
@subpackage("libxshmfence-devel")
def _devel(self):
return self.default_devel()
|
"""
messages.py
The text replies for certain commands
"""
commands = {
"FAQS": "Please try the fixes and suggestions provided in our <#765222197518139404> channel. Many of the most common issues are already covered there.",
"ANDROID_CACHE": "If you have an android phone, and you're stuck with a Play button in the lobby building screen, please try clearing the app's cache: Go to Settings -> Apps -> Stellar Quest -> Storage & Cache -> Clear Cache. **Please do not select Clear Storage on this screen.**",
"SERIES_SUPPORT": "We have broken our support topic into several, series-specific channels. Please ask your question in the <#842774127563374603>, <#842774158089388112>, or <#765215066420805663>, depending on which quest you're having trouble with. This will give everyone a head start in assisting you.",
"KYC": "Follow the steps here, if you're having trouble with KYC Verification: https://discord.com/channels/763798356484161566/843893111435952148/843895313143169084",
"LOBBY_BUILDING": "Unfortunately, the Lobby Building screen is a glorified loading animation. The stars don't actually reflect how many people are currently assigned to your lobby.",
"WAIT_TIME": "Currently, two lobbies begin their quest every minute. All lobbies should begin within *approximately* 30 minutes. Until then, please enjoy the opportunity to grow in patience.",
"SPOILERS": "**DON'T POST SPOILERS.** We're all learning together, so providing screenshots, descriptions, etc. of the quest at hand before everybody has their chance to begin gives some players an unfair advantage. Don't be that guy.",
"RESOURCES": "Be sure to carefully look at the resources provided in the quest. There are often some hidden clues and hints within those resources.",
"TAX": "We are not tax lawyers. Please don’t ask for specific tax advice, and we’ll try our best not to give it. If you do have any questions, please head over to our <#844983942473973760> channel for a list of useful information."
}
|
nombre_archivo = input("Ingrese el nombre del archivo que contiene las palabras: ")
archivo = open(nombre_archivo,"r")
texto = archivo.read()
palabras = texto.split()
ocurrencias = {}
for palabra in palabras:
if ocurrencias.get(palabra):
ocurrencias[palabra]+=1
else:
ocurrencias[palabra]=1
maxpar = None, 0
for palabra, cantidad in ocurrencias.items():
if maxpar[1]<cantidad:
maxpar=palabra,cantidad
print("La palabra con mayor cantidad de repeticion es: ",maxpar[0],"repetida",maxpar[1],"veces") |
#!/usr/bin/python
# ==============================================================================
# Author: Tao Li (taoli@ucsd.edu)
# Date: May 1, 2015
# Question: 009-Palindrome-Number
# Link: https://leetcode.com/problems/palindrome-number/
# ==============================================================================
# Determine whether an integer is a palindrome. Do this without extra space.
#
# Some hints:
# Could negative integers be palindromes? (ie, -1)
#
# If you are thinking of converting the integer to string, note the restriction of using extra space.
#
# You could also try reversing an integer. However, if you have solved the problem "Reverse Integer", you know that the reversed integer might overflow. How would you handle such case?
#
# There is a more generic way of solving this problem.
# ==============================================================================
class Solution:
# @param {integer} x
# @return {boolean}
def isPalindrome(self, x):
if x < 0:
return False
tmp_str = str(x)
for i in range(0, len(tmp_str)/2):
if tmp_str[i] != tmp_str[-(i+1)]:
return False
return True |
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
def trellis_deps():
maybe(
http_archive,
name = "ecal",
build_file = Label("//third_party:ecal.BUILD"),
sha256 = "1d83d3accfb4a936ffd343524e4a626f0265e600226d6e997b3dbbd7f62eaac6",
strip_prefix = "ecal-5.10.0",
urls = [
"https://github.com/continental/ecal/archive/refs/tags/v5.10.0.tar.gz",
],
)
maybe(
http_archive,
name = "fmt",
build_file = Label("//third_party:fmt.BUILD"),
sha256 = "36016a75dd6e0a9c1c7df5edb98c93a3e77dabcf122de364116efb9f23c6954a",
strip_prefix = "fmt-8.0.0",
urls = [
"https://github.com/fmtlib/fmt/releases/download/8.0.0/fmt-8.0.0.zip",
],
)
maybe(
http_archive,
name = "yaml-cpp",
sha256 = "43e6a9fcb146ad871515f0d0873947e5d497a1c9c60c58cb102a97b47208b7c3",
strip_prefix = "yaml-cpp-yaml-cpp-0.7.0",
urls = [
"https://github.com/jbeder/yaml-cpp/archive/refs/tags/yaml-cpp-0.7.0.tar.gz",
],
)
maybe(
http_archive,
name = "cxxopts",
build_file = Label("//third_party:cxxopts.BUILD"),
sha256 = "984aa3c8917d649b14d7f6277104ce38dd142ce378a9198ec926f03302399681",
strip_prefix = "cxxopts-2.2.1",
urls = [
"https://github.com/jarro2783/cxxopts/archive/refs/tags/v2.2.1.tar.gz",
],
)
maybe(
http_archive,
name = "tclap",
build_file = Label("//third_party:tclap.BUILD"),
sha256 = "7363f8f571e6e733b269c4b4e9c18f392d3cd7240d39a379d95de5a4c4bdc47f",
strip_prefix = "tclap-1.2.4",
urls = [
"https://github.com/xguerin/tclap/archive/refs/tags/v1.2.4.tar.gz",
],
)
maybe(
http_archive,
name = "asio",
build_file = Label("//third_party:asio.BUILD"),
sha256 = "5ee191aee825dfb1325cbacf643d599b186de057c88464ea98f1bae5ba4ff47a",
strip_prefix = "asio-asio-1-19-2",
urls = [
"https://github.com/chriskohlhoff/asio/archive/refs/tags/asio-1-19-2.tar.gz",
],
)
maybe(
http_archive,
name = "gtest",
sha256 = "9dc9157a9a1551ec7a7e43daea9a694a0bb5fb8bec81235d8a1e6ef64c716dcb",
strip_prefix = "googletest-release-1.10.0",
urls = [
"https://github.com/google/googletest/archive/release-1.10.0.tar.gz", # Oct 3, 2019
],
)
maybe(
http_archive,
name = "simpleini",
build_file = Label("//third_party:simpleini.BUILD"),
sha256 = "14e5bc1cb318ed374d45d6faf48da0b79db7e069c12ec6e090523b8652ef47c7",
strip_prefix = "simpleini-4.17",
urls = [
"https://github.com/brofield/simpleini/archive/refs/tags/4.17.tar.gz",
],
)
maybe(
http_archive,
name = "com_google_protobuf",
sha256 = "cf63d46ef743f4c30b0e36a562caf83cabed3f10e6ca49eb476913c4655394d5",
strip_prefix = "protobuf-436bd7880e458532901c58f4d9d1ea23fa7edd52",
urls = [
"https://storage.googleapis.com/grpc-bazel-mirror/github.com/google/protobuf/archive/436bd7880e458532901c58f4d9d1ea23fa7edd52.tar.gz",
"https://github.com/google/protobuf/archive/436bd7880e458532901c58f4d9d1ea23fa7edd52.tar.gz",
],
)
maybe(
http_archive,
name = "hdf5",
build_file = Label("//third_party:hdf5.BUILD"),
sha256 = "7a1a0a54371275ce2dfc5cd093775bb025c365846512961e7e5ceaecb437ef15",
strip_prefix = "hdf5-1.10.7",
urls = [
"https://hdf-wordpress-1.s3.amazonaws.com/wp-content/uploads/manual/HDF5/HDF5_1_10_7/src/hdf5-1.10.7.tar.gz", # Oct 16, 2020
],
)
maybe(
http_archive,
name = "termcolor",
build_file = Label("//third_party:termcolor.BUILD"),
sha256 = "4a73a77053822ca1ed6d4a2af416d31028ec992fb0ffa794af95bd6216bb6a20",
strip_prefix = "termcolor-2.0.0",
urls = [
"https://github.com/ikalnytskyi/termcolor/archive/refs/tags/v2.0.0.tar.gz",
],
)
maybe(
http_archive,
name = "spdlog",
build_file = Label("//third_party:spdlog.BUILD"),
sha256 = "6fff9215f5cb81760be4cc16d033526d1080427d236e86d70bb02994f85e3d38",
strip_prefix = "spdlog-1.9.2",
urls = [
"https://github.com/gabime/spdlog/archive/refs/tags/v1.9.2.tar.gz",
],
)
maybe(
http_archive,
name = "json",
build_file = Label("//third_party:json.BUILD"),
sha256 = "61e605be15e88deeac4582aaf01c09d616f8302edde7adcaba9261ddc3b4ceca",
strip_prefix = "single_include/nlohmann",
urls = [
"https://github.com/nlohmann/json/releases/download/v3.10.2/include.zip",
],
)
# New eCAL dependency as of v5.10.0
maybe(
http_archive,
name = "tcp_pubsub",
build_file = Label("//third_party:tcp_pubsub.BUILD"),
sha256 = "c335faf859219070ef67d62821a5f9ac5a308152bf7a1293a0d0cc74fefb9b58",
strip_prefix = "tcp_pubsub-1.0.0/tcp_pubsub",
urls = [
"https://github.com/continental/tcp_pubsub/archive/refs/tags/v1.0.0.tar.gz",
],
)
# Submodule of tcp_pubsub
maybe(
http_archive,
name = "recycle",
build_file = Label("//third_party:recycle.BUILD"),
sha256 = "d1cf8a5256110c068f366b0e4e16ad39427b9def13876670aad9f167afd7aaee",
strip_prefix = "recycle-c5425709b2273ef6371647247d1a1d86aa75c2e6",
urls = [
"https://github.com/steinwurf/recycle/archive/c5425709b2273ef6371647247d1a1d86aa75c2e6.tar.gz",
],
)
maybe(
http_archive,
name = "rules_pkg",
sha256 = "62eeb544ff1ef41d786e329e1536c1d541bb9bcad27ae984d57f18f314018e66",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/rules_pkg/releases/download/0.6.0/rules_pkg-0.6.0.tar.gz",
"https://github.com/bazelbuild/rules_pkg/releases/download/0.6.0/rules_pkg-0.6.0.tar.gz",
],
)
maybe(
http_archive,
name = "variadic_table",
build_file = Label("//third_party:variadic_table.BUILD"),
sha256 = "6799c0ee507fb3c739bde936630fc826f3c13abeb7b3245ebf997a6446fd0cb3",
strip_prefix = "variadic_table-82fcf65c00c70afca95f71c0c77fba1982a20a86",
urls = [
"https://github.com/friedmud/variadic_table/archive/82fcf65c00c70afca95f71c0c77fba1982a20a86.tar.gz",
],
)
maybe(
http_archive,
name = "eigen",
build_file = Label("//third_party:eigen.BUILD"),
sha256 = "8586084f71f9bde545ee7fa6d00288b264a2b7ac3607b974e54d13e7162c1c72",
strip_prefix = "eigen-3.4.0",
urls = [
"https://github.com/agtonomy/eigen/archive/refs/tags/3.4.0.tar.gz",
],
)
|
# VOLTAGE and CURRENT
#: Unit for Voltage
UNIT_VOLT = 'V'
#: Unit for Voltage*10^-3
UNIT_MILLI_VOLT = 'mV'
#: Unit for Current
UNIT_AMPERE = 'A'
#: Unit for Current*10^-3
UNIT_MILLI_AMPERE = 'mA'
#: Unit for Current*10^-6
UNIT_MICRO_AMPERE = 'uA'
# FREQUENCY
#: Unit for Frequencies
UNIT_HERTZ = 'Hz'
#: Unit for Frequencies*10^3
UNIT_KILO_HERTZ = 'kHz'
# POWER
#: Unit for reactive power
UNIT_VOLT_AMPERE = 'VA'
#: Unit for power*10^-3
UNIT_MILLI_WATT = 'mW'
#: Unit for power
UNIT_WATT = 'W'
#: Unit for power*10^3
UNIT_KILO_WATT = 'kW'
#: Unit for energy*10^3
UNIT_KILO_WATT_HOURS = 'kWh'
# TIME
#: Unit for seconds
UNIT_SECONDS = 's'
#: Unit for milliseconds
UNIT_MILLI_SECONDS = 'ms'
#: Unit for microseconds
UNIT_MICRO_SECONDS = 'us'
#: Unit for minutes
UNIT_MINUTES = 'min'
#: Unit for hours
UNIT_HOURS = 'h'
#: Unit for days
UNIT_DAYS = 'd'
|
players = ['Nicola', 'Penny', 'Dom', 'Nathan', 'Josie']
print(f"Friends: {players[0]}, {players[1]}, {players[2]}, {players[3]}, {players[4]}")
find = input("Who did you find? ")
if find in players:
print(f"{find} has turned into a zombie!")
players[players.index(find)] = "Zombie"
print(f"Remaining players: {players[0]}, {players[1]}, {players[2]}, {players[3]}, {players[4]}")
else:
print("Everyone is still in the game!")
print(f"Remaining players: {players[0]}, {players[1]}, {players[2]}, {players[3]}, {players[4]}") |
"""0MQ Constant names"""
# Copyright (C) PyZMQ Developers
# Distributed under the terms of the Modified BSD License.
#-----------------------------------------------------------------------------
# Python module level constants
#-----------------------------------------------------------------------------
# dictionaries of constants new or removed in particular versions
new_in = {
(2,2,0) : [
'RCVTIMEO',
'SNDTIMEO',
],
(3,2,2) : [
# errnos
'EMSGSIZE',
'EAFNOSUPPORT',
'ENETUNREACH',
'ECONNABORTED',
'ECONNRESET',
'ENOTCONN',
'ETIMEDOUT',
'EHOSTUNREACH',
'ENETRESET',
'IO_THREADS',
'MAX_SOCKETS',
'IO_THREADS_DFLT',
'MAX_SOCKETS_DFLT',
'ROUTER_BEHAVIOR',
'ROUTER_MANDATORY',
'FAIL_UNROUTABLE',
'TCP_KEEPALIVE',
'TCP_KEEPALIVE_CNT',
'TCP_KEEPALIVE_IDLE',
'TCP_KEEPALIVE_INTVL',
'DELAY_ATTACH_ON_CONNECT',
'XPUB_VERBOSE',
'EVENT_CONNECTED',
'EVENT_CONNECT_DELAYED',
'EVENT_CONNECT_RETRIED',
'EVENT_LISTENING',
'EVENT_BIND_FAILED',
'EVENT_ACCEPTED',
'EVENT_ACCEPT_FAILED',
'EVENT_CLOSED',
'EVENT_CLOSE_FAILED',
'EVENT_DISCONNECTED',
'EVENT_ALL',
],
(4,0,0) : [
# socket types
'STREAM',
# socket opts
'IMMEDIATE',
'ROUTER_RAW',
'IPV6',
'MECHANISM',
'PLAIN_SERVER',
'PLAIN_USERNAME',
'PLAIN_PASSWORD',
'CURVE_SERVER',
'CURVE_PUBLICKEY',
'CURVE_SECRETKEY',
'CURVE_SERVERKEY',
'PROBE_ROUTER',
'REQ_RELAXED',
'REQ_CORRELATE',
'CONFLATE',
'ZAP_DOMAIN',
# security
'NULL',
'PLAIN',
'CURVE',
# events
'EVENT_MONITOR_STOPPED',
],
(4,1,0) : [
# socket opts
'ROUTER_HANDOVER',
'TOS',
'IPC_FILTER_PID',
'IPC_FILTER_UID',
'IPC_FILTER_GID',
'CONNECT_RID',
],
}
removed_in = {
(3,2,2) : [
'UPSTREAM',
'DOWNSTREAM',
'HWM',
'SWAP',
'MCAST_LOOP',
'RECOVERY_IVL_MSEC',
]
}
# collections of zmq constant names based on their role
# base names have no specific use
# opt names are validated in get/set methods of various objects
base_names = [
# base
'VERSION',
'VERSION_MAJOR',
'VERSION_MINOR',
'VERSION_PATCH',
'NOBLOCK',
'DONTWAIT',
'POLLIN',
'POLLOUT',
'POLLERR',
'SNDMORE',
'STREAMER',
'FORWARDER',
'QUEUE',
'IO_THREADS_DFLT',
'MAX_SOCKETS_DFLT',
# socktypes
'PAIR',
'PUB',
'SUB',
'REQ',
'REP',
'DEALER',
'ROUTER',
'PULL',
'PUSH',
'XPUB',
'XSUB',
'UPSTREAM',
'DOWNSTREAM',
'STREAM',
# events
'EVENT_CONNECTED',
'EVENT_CONNECT_DELAYED',
'EVENT_CONNECT_RETRIED',
'EVENT_LISTENING',
'EVENT_BIND_FAILED',
'EVENT_ACCEPTED',
'EVENT_ACCEPT_FAILED',
'EVENT_CLOSED',
'EVENT_CLOSE_FAILED',
'EVENT_DISCONNECTED',
'EVENT_ALL',
'EVENT_MONITOR_STOPPED',
# security
'NULL',
'PLAIN',
'CURVE',
## ERRNO
# Often used (these are alse in errno.)
'EAGAIN',
'EINVAL',
'EFAULT',
'ENOMEM',
'ENODEV',
'EMSGSIZE',
'EAFNOSUPPORT',
'ENETUNREACH',
'ECONNABORTED',
'ECONNRESET',
'ENOTCONN',
'ETIMEDOUT',
'EHOSTUNREACH',
'ENETRESET',
# For Windows compatability
'HAUSNUMERO',
'ENOTSUP',
'EPROTONOSUPPORT',
'ENOBUFS',
'ENETDOWN',
'EADDRINUSE',
'EADDRNOTAVAIL',
'ECONNREFUSED',
'EINPROGRESS',
'ENOTSOCK',
# 0MQ Native
'EFSM',
'ENOCOMPATPROTO',
'ETERM',
'EMTHREAD',
]
int64_sockopt_names = [
'AFFINITY',
'MAXMSGSIZE',
# sockopts removed in 3.0.0
'HWM',
'SWAP',
'MCAST_LOOP',
'RECOVERY_IVL_MSEC',
]
bytes_sockopt_names = [
'IDENTITY',
'SUBSCRIBE',
'UNSUBSCRIBE',
'LAST_ENDPOINT',
'TCP_ACCEPT_FILTER',
'PLAIN_USERNAME',
'PLAIN_PASSWORD',
'CURVE_PUBLICKEY',
'CURVE_SECRETKEY',
'CURVE_SERVERKEY',
'ZAP_DOMAIN',
'CONNECT_RID',
]
int_sockopt_names = [
# sockopts
'RECONNECT_IVL_MAX',
# sockopts new in 2.2.0
'SNDTIMEO',
'RCVTIMEO',
# new in 3.x
'SNDHWM',
'RCVHWM',
'MULTICAST_HOPS',
'IPV4ONLY',
'ROUTER_BEHAVIOR',
'TCP_KEEPALIVE',
'TCP_KEEPALIVE_CNT',
'TCP_KEEPALIVE_IDLE',
'TCP_KEEPALIVE_INTVL',
'DELAY_ATTACH_ON_CONNECT',
'XPUB_VERBOSE',
'FD',
'EVENTS',
'TYPE',
'LINGER',
'RECONNECT_IVL',
'BACKLOG',
'ROUTER_MANDATORY',
'FAIL_UNROUTABLE',
'ROUTER_RAW',
'IMMEDIATE',
'IPV6',
'MECHANISM',
'PLAIN_SERVER',
'CURVE_SERVER',
'PROBE_ROUTER',
'REQ_RELAXED',
'REQ_CORRELATE',
'CONFLATE',
'ROUTER_HANDOVER',
'TOS',
'IPC_FILTER_PID',
'IPC_FILTER_UID',
'IPC_FILTER_GID',
]
switched_sockopt_names = [
'RATE',
'RECOVERY_IVL',
'SNDBUF',
'RCVBUF',
'RCVMORE',
]
ctx_opt_names = [
'IO_THREADS',
'MAX_SOCKETS',
]
msg_opt_names = [
'MORE',
]
all_names = (
base_names + ctx_opt_names + msg_opt_names +
bytes_sockopt_names + int_sockopt_names + int64_sockopt_names + switched_sockopt_names
)
def no_prefix(name):
"""does the given constant have a ZMQ_ prefix?"""
return name.startswith('E') and not name.startswith('EVENT')
|
class Solution:
def isAnagram(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
return sorted(s) == sorted(t)
|
WINDOW_WIDTH = 560
MODE_SELECTOR_HEIGHT = 50
CONTROLS_FRAME_HEIGHT = 80
KEYBOARD_HEIGHT = 160
SCORE_DISPLAY_HEIGHT = 110
WINDOW_HEIGHT = KEYBOARD_HEIGHT + CONTROLS_FRAME_HEIGHT + MODE_SELECTOR_HEIGHT + SCORE_DISPLAY_HEIGHT
CHOICES = ['Scales','Chords','Chord Progressions']
|
text = {"greet_new": "👨Здравствуйте, вы обратились в службу экстренной помощи\n"
"Прежде чем использовать все возможности бота, ответьте на несколько вопросов используя клавиатуру\n\n"
"👩Пожалуйста введите своё ФИО",
"greet_old_beginning": "Здравствуйте, ",
"greet_old_ending": ", вы обратились в службу экстренной помощи",
"info": "Поздравляем вас с успешной регистрацией\n"
"Ваша информация предоставлена ниже, вы всегда можете изменить её в настройках",
"surname": "Фамилия: ",
"firstname": "Имя: ",
"middlename": "Отчество: ",
"age": "Возраст: ",
"gender": "Пол: ",
"wrong": "Данные введены некорректно, повторите попытку",
"wrongMessageInput": "Данные введены некорректно, повторите попытку",
"rated_callback":['Принято', "Хорошо","Ок"],
"chooseSymptomes": "Выберите один из симптомов. В случае отсутствия ваших симптомов, введите их\n\nНапишите стоп-слово готово, когда закончите перечислять",
"chooseDiseases": "Выберите вашу проблему или опишите каждую из них через запятую с пробелом\n\nНапишите стоп-слово готово, когда закончите перечислять",
"ageInput": "Введите ваш возраст",
"genderInput": "Введите ваш пол",
"chooseGeo":"Отправьте нам свою геолокацию через скрепочку",
"new_request":"Новое обращение!!!",
"NoneGeo":"Ошибка при получении геолокацис",
"request_sended":"Заявка успешно отправлена!👩"
}
symptomes = ["Насморк", "Головная боль", "Головокружение", "Рвота", "Понос", "Покраснение",]
diseases = ["Отравление", "Ожог", "Аллергия", "Инсульт",
"Инфаркт", "Кровотечение", "Передозировка",
"Туберкулёз", "Пневмония", "Грипп", "Коронавирус","ОРВИ"]
|
# -*- coding: utf-8 -*-
"""
Created on Sun May 5 14:55:31 2019
@author: asus
"""
"""
House hunting
caculate the months to save enough money to make the down payment of your dream house
"""
annual_salary = float(input("Enter your annual salary: "))
portion_saved = float(input("Enter the percent of your salary to save, as a demical: "))
total_cost = float(input("Enter the cost of your dream house: "))
portion_down_payment = total_cost*0.25
r = 0.04 #the annual rate of investment reward
monthly_salary = annual_salary/12
current_savings = 0
months = 0
while current_savings < portion_down_payment:
current_savings += current_savings*(r/12) + monthly_salary*portion_saved
months += 1
print("Number of months: ", months)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `ansible_task_worker` package."""
|
#!/usr/bin/env python3
class _drop_html_event(object):
pass
def drop_html_event(sts):
"""过滤到HTML的事件属性
:param sts:
:return:
"""
pass
|
list_input = {"-": "-",
"Spot price": "Current market price at which asset is bought or sold.",
"Strike": 'The price at which a put or call option can be exercised.',
"Risk-free rate": 'The risk-free interest rate is the rate of return of a hypothetical investment with no'
' risk of financial loss, over a given period of time.',
"Volatility": 'Standard deviation of the underlying asset stock price, in other words the degree of'
' variation of the price.',
"Maturity": 'Date on which the option will cease to exist, and when the investor will be able to exercise'
' his right to buy or sell the underlying asset (for European_Options).',
"Drift": 'Rate at which the underlying stock average changes. It gives the general trend of the stock '
'movements.',
"Discretization step": 'Used in the pricing model of the underlying asset, its mathematical definition is'
' the step at which the continuous period (i.e. from t = 0 to t = maturity) is'
' discretized. Financially speaking, it is time between each pricing of the asset.',
"Tree periods": "Size of the binomial lattice (tree). 10-20 are more than enough to converge to the "
"Black-Scholes price. The number of nodes for a given lattice of n periods is given by an"
" arithmetic progression with common difference between terms of 1, i.e. "
r"$S = \frac{s_1+s_n}{2}n$",
"Rebalancing frequency": "Frequency of replication strategy portfolio rebalancing relative to the "
"discretization step. If equal to 1, the portfolio will be rebalanced at every"
" discretization step. If equal to two, the portfolio will be rebalanced every 2"
" discretization steps, ... The higher, the lower the quality of the replication"
" strategy. It is best left at 1 if looking for the best replication strategy. "
"Min = 1 (rebalanced every discretization step) ; "
"Max = Maturity / Discretization step (rebalanced once, at maturity)",
"Number of simulations": "Number of replication strategies to be computed. Maximum 10.",
"Transaction costs": "Transaction costs are expenses incurred when buying or selling the underlying "
"asset. Can be fixed or proportional to the number of underlying asset bought or "
"sold. Typically a few basis points, i.e. less than a tenth of a percentage. Given"
" the Black-Scholes model assumptions, where transactions costs are null, "
"considering them will decrease the quality of the replication strategy. If left empty, it is assumed null.",
"Correlation": "Correlation between both stocks, i.e. how much they move together.",
"Seed": "The simulations are based on a random number generation. Currently, the generation is fixed, ie the Brownian motion behind"
" the stock random dynamics is fixed, therefore allowing for sensitivity analysis. Clicking on this button will generate a new Brownian motion, thus changing the stoch trajectory."
}
|
'''
Writing and reading files using w+
'''
f=open("foo.txt","w+")
f.writelines(["Hello\n","This is a new line"])
f.flush()
f.seek(0)
print(f.read())
f.close()
|
"""Handler for help messages."""
__all__ = ["handle_generic_help"]
async def handle_generic_help(*, event, app, logger):
"""Handle an event from a user asking for help with the bot.
Note that this is a generic help request, so all backends will be
responding.
Parameters
----------
event : `dict`
The body of the Slack event.
app : `aiohttp.web.Application`
The application instance.
logger
A structlog logger, typically with event information already
bound to it.
"""
event_channel = event["event"]["channel"]
thread_ts = event["event"]["ts"]
httpsession = app["root"]["api.lsst.codes/httpSession"]
headers = {
"content-type": "application/json; charset=utf-8",
"authorization": f'Bearer {app["root"]["templatebot/slackToken"]}',
}
body = {
"token": app["root"]["templatebot/slackToken"],
"channel": event_channel,
"thread_ts": thread_ts,
"text": _make_text_summary(),
"mrkdwn": True,
"blocks": _make_blocks(),
}
url = "https://slack.com/api/chat.postMessage"
async with httpsession.post(url, json=body, headers=headers) as response:
response_json = await response.json()
if not response_json["ok"]:
logger.error(
"Got a Slack error from chat.postMessage", contents=response_json
)
def _make_text_summary():
return (
"Create a new GitHub repo from a template: `create project`.\\n"
"Create a snippet of file from a template: `create file`."
)
def _make_blocks():
main_section = {
"type": "section",
"text": {
"type": "mrkdwn",
"text": (
"• Create a GitHub repo from a template: "
"```create project```\n"
"• Create a file or snippet from a template: "
"```create file```"
),
},
}
context = {
"type": "context",
"elements": [
{
"type": "mrkdwn",
"text": (
"Handled by <https://github.com/lsst-sqre/templatebot"
"|templatebot>. The template repository is "
"https://github.com/lsst/templates."
),
}
],
}
return [main_section, context]
|
class Solution:
def buildTree(self, inorder: List[int], postorder: List[int]) -> TreeNode:
def _build(l1, r1, l2, r2):
if l1 > r1:
return None
if l1 == r1 or l2 == r2:
return TreeNode(postorder[l2])
root = TreeNode(postorder[r2])
idx = inorder.index(root.val)
root.left = _build(l1, idx - 1, l2, l2 + idx - 1 - l1)
root.right = _build(idx + 1, r1, l2 + idx - l1, r2 - 1)
return root
n = len(inorder)
return _build(0, n - 1, 0, n - 1)
|
"""
This module contains utilities to work with code generated by prost-build.
"""
load("@rules_rust//rust:defs.bzl", "rust_binary", "rust_test")
def generated_files_check(name, srcs, deps, data, manifest_dir):
rust_test(
name = name,
srcs = srcs,
data = data + [
"@rules_rust//rust/toolchain:current_exec_rustfmt_files",
"@com_google_protobuf//:protoc",
"@com_google_protobuf//:well_known_protos",
],
edition = "2018",
env = {
"PROTOC": "$(rootpath @com_google_protobuf//:protoc)",
"PROTOC_INCLUDE": "external/com_github_protocolbuffers_protobuf/src",
"CARGO_MANIFEST_DIR": manifest_dir,
},
deps = deps,
)
def protobuf_generator(name, srcs, deps = [], data = []):
rust_binary(
name = name,
srcs = srcs,
data = data + [
"@com_google_protobuf//:protoc",
"@com_google_protobuf//:well_known_protos",
"@rules_rust//rust/toolchain:current_exec_rustfmt_files",
],
edition = "2018",
rustc_env = {
"PROTOC": "$(rootpath @com_google_protobuf//:protoc)",
"PROTOC_INCLUDE": "external/com_github_protocolbuffers_protobuf/src",
},
deps = deps,
)
|
jogos = int ( input ('Digite quantos jogadores vão participar: '))
cibs = 1
soma = 0
if jogos == 1:
for cibs in range(1):
nome1 = str(input('Nome do primeiro : '))
elif jogos == 2:
for cibs in range(1):
nome1 = str(input('Nome do primeiro : '))
nome2 = str(input('Nome do segundo : '))
print (jog1, jog2, jog3)
|
# 364 - Nested List Weight Sum II (Medium)
# https://leetcode.com/problems/nested-list-weight-sum-ii/
# """
# This is the interface that allows for creating nested lists.
# You should not implement it, or speculate about its implementation
# """
#class NestedInteger(object):
#
# def isInteger(self):
# """
# @return True if this NestedInteger holds a single integer, rather than a nested list.
# :rtype bool
# """
#
# def getInteger(self):
# """
# @return the single integer that this NestedInteger holds, if it holds a single integer
# Return None if this NestedInteger holds a nested list
# :rtype int
# """
#
# def getList(self):
# """
# @return the nested list that this NestedInteger holds, if it holds a nested list
# Return None if this NestedInteger holds a single integer
# :rtype List[NestedInteger]
# """
class Solution(object):
# Find the max depth, so that when the usual DFS is done, the inverse
# depth is calculated.
def findMaxDepth(self, nestedList, depth):
if len(nestedList) == 0:
return 0
nextDepth = depth + 1
for NI in nestedList:
if NI.isInteger():
continue
else:
depth = max(depth, self.findMaxDepth(NI.getList(), nextDepth))
return depth
# The usual DFS but instead of multiplying by real depth, do it with the
# inverse which needs to have beforehand the max depth.
def dfs(self, nestedList, depth, maxDepth):
if len(nestedList) == 0:
return 0
acum = 0
for NI in nestedList:
if NI.isInteger():
acum += NI.getInteger() * (maxDepth - depth + 1)
else:
acum += self.dfs(NI.getList(), depth+1, maxDepth)
return acum
def depthSumInverse(self, nestedList):
"""
:type nestedList: List[NestedInteger]
:rtype: int
"""
maxDepth = self.findMaxDepth(nestedList, 1)
return self.dfs(nestedList, 1, maxDepth)
|
class ProductFileMetadata(object):
def __init__(self, output_name, local_path, media_type=None, remote_path=None, data_start=None, data_end=None,
geojson=None):
self.data_start = data_start
self.data_end = data_end
self.geojson = geojson
self.local_path = local_path
self.media_type = media_type
self.output_name = output_name
self.remote_path = remote_path |
{
"name": "PersianTweets",
"version": "2020",
"task": "Corpus",
"splits": [],
"description": "LSCP: Enhanced Large Scale Colloquial Persian Language Understanding <br>\nLearn more about this study at https://iasbs.ac.ir/~ansari/lscp/",
"size": 20665964,
"filenames": ["lscp-0.5-fa-normalized.txt"]
}
|
class LoginLimiter(object):
# use an array to keep track of most recent 10 requests
def __init__(self):
self.rctCalls = []
# receive timestamp of a call attempt
# return true if call is allowed
def isAllowed(self, ts):
if len(self.rctCalls) < 10:
# when client has made less than 10 requests, always pass request
self.rctCalls.append(ts)
print("Call API at timestamp %s" % ts)
return True
else:
# when client has made at least 10 requests, pass request only when
# new request is made less than 60 seconds after 10th most recent legal call
if ts - self.rctCalls[0] < 60:
# deny call
print("Exceeding call limit at timestamp %s. Call denied" % ts)
return False
else:
# pass call and update cache
self.rctCalls.pop(0)
self.rctCalls.append(ts)
print("Call API at timestamp %s" % ts)
return True
# test limiter functionality
print("\nTest basic limiter functionality:")
solver = LoginLimiter()
timestamps = [i * 5 for i in range(100)]
for ts in timestamps:
solver.isAllowed(ts)
print("\nTest multiple clients:")
# apply limiter to multiple client IDs
clientID = ["A", "B"]
clients = {ID: LoginLimiter() for ID in clientID}
# let multiple clients call API simultaneously
timestamps = [i for i in range(15)]
for ts in timestamps:
for ID in clients:
limiter = clients[ID]
print("client ID: %s" % ID, end=' ')
limiter.isAllowed(ts) |
class Unserializable(Exception):
"""
The item is not serializable by the save system.
"""
pass
class DeserializationError(Exception):
"""
Error deserializing a value during game load
"""
pass
class VerbDefinitionError(Exception):
"""
A verb is defined in an incorrect or inconsistent way
"""
pass
class ParserError(Exception):
"""
Error parsing the player command
"""
pass
class VerbMatchError(ParserError):
"""
No matching verb could be identified from the player input
"""
pass
class ObjectMatchError(ParserError):
"""
No matching IFPObject could be found for either the direct or indirect object
in the player command
"""
pass
class OutOfRange(ParserError):
"""
The specified object is out of range for the current verb
"""
pass
class AbortTurn(Exception):
"""
Abort the current turn. Error message will not be printed.
"""
pass
class NoMatchingSuggestion(Exception):
def __init__(self, query, options, matches):
self.query = query
self.options = options
self.matches = matches
msg = (
f"Unable to unambiguaously match a suggestion from options {options} "
f"with query `{query}`. Not excluded: {matches}."
)
super().__init__(msg)
class IFPError(Exception):
pass
|
# class Solution:
# def numSquares(self, n: int) -> int:
# ans = []
# for k in range(n, -1, -1):
# while k != 0:
# if self.fun(k):
# ans.append(k)
# k = n-k
# else:
# break
# if sum(ans) + k == n:
# break
# return ans
# def fun(self, num):
# return int(num**(1/2))**2 == num
# solu = Solution()
# n = 9
# print(solu.numSquares(n))
# class Solution:
# def numSquares(self, n: int) -> int:
# # dp = [0] * (n+1)
# # dp[0] = 0
# dp = [i for i in range(n+1)] #默认的拆法是都拆成全1相加
# dp[0] = 0 #0默认认为不需要数相加
# # print(dp)
# for k in range(1, n+1):
# p = 1
# while k >= p**2:
# # print(dp[k], dp[k-p**2] + 1)
# dp[k] = min(dp[k], dp[k-p**2] + 1)
# p += 1
# return dp[n]
# solu = Solution()
# n = 9
# print(solu.numSquares(n))
# class Solution:
# def numSquares(self, n: int) -> int:
# dp = [i for i in range(n+1)] #默认的拆法是都拆成全1相加
# dp[0] = 0 #0默认认为不需要数相加
# for k in range(1, n+1):
# #p是使得p**2不大于k的最大整数 但是12=9+1+1+1不如12=4+4+4
# p = int(k ** (1/2))
# dp[k] = min(dp[k], dp[k-p**2] + 1)
# return dp[n]
# solu = Solution()
# n = 12#12#9
# print(solu.numSquares(n))
# 拉格朗日四数和定理
'''
定理内容:
每个正整数均可表示成不超过四个整数的平方之和
重要的推论:
数 n 如果只能表示成四个整数的平方和,不能表示成更少的数的平方之和,必定满足 4a(8b+7) 4^a(8b+7) 4a(8b+7)
如果 n%4==0,k=n/4,n 和 k 可由相同个数的整数表示
如何利用推论求一个正整数最少需要多少个数的平方和表示:
先判断这个数是否满足 4a(8b+7) 4^a(8b+7) 4a(8b+7),如果满足,那么这个数就至少需要 4 个数的平方和表示。
如果不满足,再在上面除以 4 之后的结果上暴力尝试只需要 1 个数就能表示和只需要 2 个数就能表示的情况。
如果还不满足,那么就只需要 3 个数就能表示。
'''
class Solution:
def numSquares(self, n: int) -> int:
while n % 4 == 0:
n /= 4
if n % 8 == 7:
return 4
a = 0
while a**2 <= n:
b = int((n - a**2)**0.5)
if a**2 + b**2 == n:
return bool(a) + bool(b)
a += 1
return 3
|
# Copyright 2019 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Code for interacting with git binary to get the file tree checked out at the specified revision.
"""
_GitRepoInfo = provider(
doc = "Provider to organize precomputed arguments for calling git.",
fields = {
"directory": "Working directory path",
"shallow": "Defines the depth of a fetch. Either empty, --depth=1, or --shallow-since=<>",
"reset_ref": """Reference to use for resetting the git repository.
Either commit hash, tag or branch.""",
"fetch_ref": """Reference for fetching. Can be empty (HEAD), tag or branch.
Can not be a commit hash, since typically it is forbidden by git servers.""",
"remote": "URL of the git repository to fetch from.",
"init_submodules": """If True, submodules update command will be called after fetching
and resetting to the specified reference.""",
},
)
def git_repo(ctx, directory):
""" Fetches data from git repository and checks out file tree.
Called by git_repository or new_git_repository rules.
Args:
ctx: Context of the calling rules, for reading the attributes.
Please refer to the git_repository and new_git_repository rules for the description.
directory: Directory where to check out the file tree.
Returns:
The struct with the following fields:
commit: Actual HEAD commit of the checked out data.
shallow_since: Actual date and time of the HEAD commit of the checked out data.
"""
if ctx.attr.shallow_since:
if ctx.attr.tag:
fail("shallow_since not allowed if a tag is specified; --depth=1 will be used for tags")
if ctx.attr.branch:
fail("shallow_since not allowed if a branch is specified; --depth=1 will be used for branches")
shallow = "--depth=1"
if ctx.attr.commit:
# We can not use the commit value in --shallow-since;
# And since we are fetching HEAD in this case, we can not use --depth=1
shallow = ""
# Use shallow-since if given
if ctx.attr.shallow_since:
shallow = "--shallow-since=%s" % ctx.attr.shallow_since
reset_ref = ""
fetch_ref = ""
if ctx.attr.commit:
reset_ref = ctx.attr.commit
elif ctx.attr.tag:
reset_ref = "tags/" + ctx.attr.tag
fetch_ref = "tags/" + ctx.attr.tag + ":tags/" + ctx.attr.tag
elif ctx.attr.branch:
reset_ref = "origin/" + ctx.attr.branch
fetch_ref = ctx.attr.branch + ":origin/" + ctx.attr.branch
git_repo = _GitRepoInfo(
directory = ctx.path(directory),
shallow = shallow,
reset_ref = reset_ref,
fetch_ref = fetch_ref,
remote = str(ctx.attr.remote),
init_submodules = ctx.attr.init_submodules,
)
ctx.report_progress("Cloning %s of %s" % (reset_ref, ctx.attr.remote))
if (ctx.attr.verbose):
print("git.bzl: Cloning or updating %s repository %s using strip_prefix of [%s]" %
(
" (%s)" % shallow if shallow else "",
ctx.name,
ctx.attr.strip_prefix if ctx.attr.strip_prefix else "None",
))
_update(ctx, git_repo)
ctx.report_progress("Recording actual commit")
actual_commit = _get_head_commit(ctx, git_repo)
shallow_date = _get_head_date(ctx, git_repo)
return struct(commit = actual_commit, shallow_since = shallow_date)
def _update(ctx, git_repo):
ctx.delete(git_repo.directory)
init(ctx, git_repo)
add_origin(ctx, git_repo, ctx.attr.remote)
fetch(ctx, git_repo)
reset(ctx, git_repo)
clean(ctx, git_repo)
if git_repo.init_submodules:
ctx.report_progress("Updating submodules")
update_submodules(ctx, git_repo)
def init(ctx, git_repo):
cl = ["git", "init", str(git_repo.directory)]
st = ctx.execute(cl, environment = ctx.os.environ)
if st.return_code != 0:
_error(ctx.name, cl, st.stderr)
def add_origin(ctx, git_repo, remote):
_git(ctx, git_repo, "remote", "add", "origin", remote)
def fetch(ctx, git_repo):
if not git_repo.fetch_ref:
# We need to explicitly specify to fetch all branches and tags, otherwise only
# HEAD-reachable is fetched.
# The semantics of --tags flag of git-fetch have changed in Git 1.9, from 1.9 it means
# "everything that is already specified and all tags"; before 1.9, it used to mean
# "ignore what is specified and fetch all tags".
# The arguments below work correctly for both before 1.9 and after 1.9,
# as we directly specify the list of references to fetch.
_git_maybe_shallow(
ctx,
git_repo,
"fetch",
"origin",
"refs/heads/*:refs/remotes/origin/*",
"refs/tags/*:refs/tags/*",
)
else:
_git_maybe_shallow(ctx, git_repo, "fetch", "origin", git_repo.fetch_ref)
def reset(ctx, git_repo):
_git(ctx, git_repo, "reset", "--hard", git_repo.reset_ref)
def clean(ctx, git_repo):
_git(ctx, git_repo, "clean", "-xdf")
def update_submodules(ctx, git_repo):
_git(ctx, git_repo, "submodule", "update", "--init", "--checkout", "--force")
def _get_head_commit(ctx, git_repo):
return _git(ctx, git_repo, "log", "-n", "1", "--pretty=format:%H")
def _get_head_date(ctx, git_repo):
return _git(ctx, git_repo, "log", "-n", "1", "--pretty=format:%cd", "--date=raw")
def _git(ctx, git_repo, command, *args):
start = ["git", command]
st = _execute(ctx, git_repo, start + list(args))
if st.return_code != 0:
_error(ctx.name, start + list(args), st.stderr)
return st.stdout
def _git_maybe_shallow(ctx, git_repo, command, *args):
start = ["git", command]
args_list = list(args)
if git_repo.shallow:
st = _execute(ctx, git_repo, start + [git_repo.shallow] + args_list)
if st.return_code == 0:
return
st = _execute(ctx, git_repo, start + args_list)
if st.return_code != 0:
_error(ctx.name, start + args_list, st.stderr)
def _execute(ctx, git_repo, args):
return ctx.execute(
args,
environment = ctx.os.environ,
working_directory = str(git_repo.directory),
)
def _error(name, command, stderr):
command_text = " ".join([str(item).strip() for item in command])
fail("error running '%s' while working with @%s:\n%s" % (command_text, name, stderr))
|
class Node_Types:
image_texture = 'TEX_IMAGE'
pbr_node = 'BSDF_PRINCIPLED'
mapping = 'MAPPING'
normal_map = 'NORMAL_MAP'
bump_map = 'BUMP'
material_output = 'OUTPUT_MATERIAL'
class Shader_Node_Types:
emission = "ShaderNodeEmission"
image_texture = "ShaderNodeTexImage"
mapping = "ShaderNodeMapping"
normal = "ShaderNodeNormalMap"
ao = "ShaderNodeAmbientOcclusion"
uv = "ShaderNodeUVMap"
comp_image_node = 'CompositorNodeImage'
mix ="ShaderNodeMixRGB"
class Bake_Passes:
pbr = ["EMISSION"]
lightmap = ["NOISY", "NRM", "COLOR"]
ao = ["AO","COLOR"]
class Material_Suffix:
bake_type_mat_suffix = {
"pbr" : "_Bake",
"ao" : "_AO",
"lightmap" : "_AO"
}
|
# https://www.codewars.com/kata/52b757663a95b11b3d00062d/
'''
Instructions :
Write a function toWeirdCase (weirdcase in Ruby) that accepts a string, and returns the same string with all even indexed characters in each word upper cased, and all odd indexed characters in each word lower cased. The indexing just explained is zero based, so the zero-ith index is even, therefore that character should be upper cased.
The passed in string will only consist of alphabetical characters and spaces(' '). Spaces will only be present if there are multiple words. Words will be separated by a single space(' ').
Examples:
to_weird_case('String'); # => returns 'StRiNg'
to_weird_case('Weird string case') # => returns 'WeIrD StRiNg CaSe'
'''
def to_weird_case(string):
index = 0
new_string = ''
for word in string:
if word == ' ':
index = -1
new_string += word
elif index % 2 == 0:
new_string += word.upper()
else:
new_string += word.lower()
index += 1
return new_string
|
"""
https://edabit.com/challenge/ogjDWJAT2kTXEzkD5
https://www.programiz.com/python-programming/args-and-kwargs#:~:text=Python%20has%20*args%20which%20allow,to%20pass%20variable%20length%20arguments.
"""
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'targets': [
{
'target_name': 'device_hid',
'type': 'static_library',
'include_dirs': [
'../..',
],
'dependencies': [
'../../components/components.gyp:device_event_log_component',
'../../net/net.gyp:net',
'../core/core.gyp:device_core',
],
'sources': [
'hid_collection_info.cc',
'hid_collection_info.h',
'hid_connection.cc',
'hid_connection.h',
'hid_connection_linux.cc',
'hid_connection_linux.h',
'hid_connection_mac.cc',
'hid_connection_mac.h',
'hid_connection_win.cc',
'hid_connection_win.h',
'hid_device_filter.cc',
'hid_device_filter.h',
'hid_device_info.cc',
'hid_device_info.h',
'hid_device_info_linux.cc',
'hid_device_info_linux.h',
'hid_report_descriptor.cc',
'hid_report_descriptor.h',
'hid_report_descriptor_item.cc',
'hid_report_descriptor_item.h',
'hid_service.cc',
'hid_service.h',
'hid_service_mac.cc',
'hid_service_mac.h',
'hid_service_win.cc',
'hid_service_win.h',
'hid_usage_and_page.cc',
'hid_usage_and_page.h',
],
'conditions': [
['OS=="linux" and use_udev==1', {
'dependencies': [
'../udev_linux/udev.gyp:udev_linux',
],
'sources': [
'fake_input_service_linux.cc',
'fake_input_service_linux.h',
'hid_service_linux.cc',
'hid_service_linux.h',
'input_service_linux.cc',
'input_service_linux.h',
],
}],
['OS=="win"', {
'all_dependent_settings': {
'msvs_settings': {
'VCLinkerTool': {
'AdditionalDependencies': [
'hid.lib',
'setupapi.lib',
],
},
},
},
'msvs_settings': {
'VCLinkerTool': {
'AdditionalDependencies': [
'hid.lib',
'setupapi.lib',
],
},
},
}],
],
},
{
'target_name': 'device_hid_mocks',
'type': 'static_library',
'include_dirs': [
'../..',
],
'dependencies': [
'../../testing/gmock.gyp:gmock',
'device_hid',
],
'sources': [
'mock_hid_service.cc',
'mock_hid_service.h',
],
},
],
}
|
'''
Abstract base class for audio speech and sound command processing. Provides
methods shared among all platform implementations.
Copyright (c) 2008 Carolina Computer Assistive Technology
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
'''
class ChannelBase(object):
def __init__(self, ch_id):
# unique id for this channel
self.id = ch_id
# observer for channel callbacks
self.observer = None
# queue of utterances
self.queue = []
# deferred results
self.deferreds = {}
# latest deferred request id that stalled the queue
self.stalled_id = None
# busy flag; used instead of tts and sound busy methods which are
# not documented as to when they are set and reset
self.busy = False
# name assigned by the client to a speech utterance or sound that
# can be paired with callback data
self.name = None
def _processQueue(self):
while (not self.busy) and len(self.queue):
# peek at the top command to see if it is deferred
cmd = self.queue[0]
reqid = cmd.get('deferred')
if reqid is not None:
# check if the deferred result is already available
result = self.deferreds.get(reqid)
if result is None:
# store the current request ID
self.stalled_id = reqid
# and stall the queue for now
return
else:
# set the deferred result action to that of the original
result['action'] = cmd['action']
# remove the deferred from the list of deferreds
del self.deferreds[reqid]
# use the result instead of the original
cmd = result
# handle the next command
self._handleCommand(cmd)
# remember to pop the command
cmd = self.queue.pop(0)
def _handleCommand(self, cmd):
action = cmd.get('action')
if action == 'say':
self.say(cmd)
elif action == 'play':
self.play(cmd)
elif action == 'set-queued':
self.setProperty(cmd)
elif action == 'get-config':
self.getConfig(cmd)
elif action == 'reset-queued':
self.reset()
def setObserver(self, ob):
self.observer = ob
def pushRequest(self, cmd):
action = cmd.get('action')
if action == 'stop':
# process stops immediately
self.stop()
elif action == 'set-now':
# process immediate property changes
self.setProperty(cmd)
elif action == 'reset-now':
# process immediate reset of all properties
self.reset()
elif action == 'deferred-result':
# process incoming deferred result
self.deferred(cmd)
else:
# queue command; slight waste of time if we immediately pull it back
# out again, but it's clean
self.queue.append(cmd)
# process the queue
self._processQueue()
def deferred(self, cmd):
try:
reqid = cmd['deferred']
except KeyError:
return
# put the deferred into holding
self.deferreds[reqid] = cmd
# check if this deferred is the one that stalled the pipe
if reqid == self.stalled_id:
# if so, pump the queue
self._processQueue()
# if not, just continue
def stop(self):
# reset queue and flags
self.queue = []
self.busy = False
self.name = None
# reset deferreds
self.stalled_id = None
self.deferreds = {}
def shutdown(self):
self.observer = None
|
v = float(input('Digite a velocidade do seu carro: '))
if v>80:
multa = (v-80)*7
print ('Você ultrapassou o limite de 80Km/h desta via, valor da multa: R${:.2f}'.format(multa))
|
# -*- coding: utf-8 -*-
"""
Created on Mon Dec 4 13:07:33 2017
@author: James Jiang
"""
with open('Data.txt') as f:
all_lines = []
for line in f:
line = line.split()
all_lines.append(line)
total = 0
for i in range(len(all_lines)):
counter = 0
for j in range(len(all_lines[i])):
for k in range(len(all_lines[i])):
digits_j = [a for a in all_lines[i][j]]
digits_k = [a for a in all_lines[i][k]]
digits_j.sort()
digits_k.sort()
if digits_j != digits_k:
counter += 1
if counter == (len(all_lines[i])**2 - len(all_lines[i])):
total += 1
print(total)
|
text = input().split(" ")
even_words = [i for i in text if len(i) % 2 == 0]
for word in even_words:
print(word) |
# Definition for singly-linked list.
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
def insertionSortList(self, head: ListNode) -> ListNode:
if head == None or head.next == None:
return head
cur = head
root = None
while cur!=None:
post = cur.next
cur.next = None
root = self.insertSort(root, cur)
cur = post
return root
def insertSort(self,root, node):
if root==None or node.val < root.val:
node.next = root
return node
cur = root
while cur.next != None and cur.next.val < node.val:
cur = cur.next
tmp = cur.next
cur.next = node
node.next = tmp
return root
|
#
# PySNMP MIB module APCUPS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/APCUPS-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:07:20 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
enterprises, Unsigned32, TimeTicks, NotificationType, IpAddress, Counter32, Counter64, Gauge32, iso, ModuleIdentity, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, ObjectIdentity, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "enterprises", "Unsigned32", "TimeTicks", "NotificationType", "IpAddress", "Counter32", "Counter64", "Gauge32", "iso", "ModuleIdentity", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "ObjectIdentity", "Integer32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
class DmiCounter(Counter32):
pass
class DmiGauge(Gauge32):
pass
class DmiInteger(Integer32):
pass
class DmiDisplaystring(DisplayString):
pass
class DmiDateX(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(28, 28)
fixedLength = 28
class DmiComponentIndex(Integer32):
pass
apc = MibIdentifier((1, 3, 6, 1, 4, 1, 318))
products = MibIdentifier((1, 3, 6, 1, 4, 1, 318, 1))
software = MibIdentifier((1, 3, 6, 1, 4, 1, 318, 1, 2))
powerChuteDMIAgent = MibIdentifier((1, 3, 6, 1, 4, 1, 318, 1, 2, 2))
dmtfGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1))
tComponentid = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1), )
if mibBuilder.loadTexts: tComponentid.setStatus('mandatory')
eComponentid = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eComponentid.setStatus('mandatory')
a1Manufacturer = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1, 1), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1Manufacturer.setStatus('mandatory')
a1Product = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1, 2), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1Product.setStatus('mandatory')
a1Version = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1, 3), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1Version.setStatus('mandatory')
a1SerialNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1, 4), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1SerialNumber.setStatus('mandatory')
a1Installation = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1, 5), DmiDateX()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1Installation.setStatus('mandatory')
a1Verify = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("vAnErrorOccuredCheckStatusCode", 0), ("vThisComponentDoesNotExist", 1), ("vVerificationIsNotSupported", 2), ("vReserved", 3), ("vThisComponentExistsButTheFunctionalityI", 4), ("vThisComponentExistsButTheFunctionality1", 5), ("vThisComponentExistsAndIsNotFunctioningC", 6), ("vThisComponentExistsAndIsFunctioningCorr", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1Verify.setStatus('mandatory')
tUpsBattery = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2), )
if mibBuilder.loadTexts: tUpsBattery.setStatus('mandatory')
eUpsBattery = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eUpsBattery.setStatus('mandatory')
a2BatteryStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("vUnknown", 1), ("vBatteryNormal", 2), ("vBatteryLow", 3), ("vBatteryDepleted", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2BatteryStatus.setStatus('mandatory')
a2SecondsOnBattery = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 2), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2SecondsOnBattery.setStatus('mandatory')
a2EstimatedMinutesRemaining = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2EstimatedMinutesRemaining.setStatus('mandatory')
a2EstimatedChargeRemaining = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 4), DmiGauge()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2EstimatedChargeRemaining.setStatus('mandatory')
a2BatteryVoltage = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 5), DmiGauge()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2BatteryVoltage.setStatus('mandatory')
a2BatteryCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 6), DmiGauge()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2BatteryCurrent.setStatus('mandatory')
a2TemperatureProbeIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2TemperatureProbeIndex.setStatus('mandatory')
a2FruGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2FruGroupIndex.setStatus('mandatory')
a2OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 2, 1, 9), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2OperationalGroupIndex.setStatus('mandatory')
tTemperatureProbe = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3), )
if mibBuilder.loadTexts: tTemperatureProbe.setStatus('mandatory')
eTemperatureProbe = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eTemperatureProbe.setStatus('mandatory')
a3TemperatureProbeTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3TemperatureProbeTableIndex.setStatus('mandatory')
a3TemperatureProbeLocation = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vProcessor", 3), ("vDisk", 4), ("vPeripheralBay", 5), ("vSmbMaster", 6), ("vMotherboard", 7), ("vMemoryModule", 8), ("vProcessorModule", 9), ("vPowerUnit", 10), ("vAdd-inCard", 11)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3TemperatureProbeLocation.setStatus('mandatory')
a3TemperatureProbeDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 3), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3TemperatureProbeDescription.setStatus('mandatory')
a3TemperatureStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vOk", 3), ("vNon-critical", 4), ("vCritical", 5), ("vNon-recoverable", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3TemperatureStatus.setStatus('mandatory')
a3TemperatureProbeTemperatureReading = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 5), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3TemperatureProbeTemperatureReading.setStatus('mandatory')
a3MonitoredTemperatureNominalReading = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3MonitoredTemperatureNominalReading.setStatus('mandatory')
a3MonitoredTemperatureNormalMaximum = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3MonitoredTemperatureNormalMaximum.setStatus('mandatory')
a3MonitoredTemperatureNormalMinimum = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3MonitoredTemperatureNormalMinimum.setStatus('mandatory')
a3TemperatureProbeMaximum = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 9), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3TemperatureProbeMaximum.setStatus('mandatory')
a3TemperatureProbeMinimum = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 10), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3TemperatureProbeMinimum.setStatus('mandatory')
a3TemperatureReadingLowerThreshold_Non_c = MibScalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 11), DmiInteger()).setLabel("a3TemperatureReadingLowerThreshold-Non-c").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3TemperatureReadingLowerThreshold_Non_c.setStatus('mandatory')
a3TemperatureReadingUpperThreshold_Non_c = MibScalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 12), DmiInteger()).setLabel("a3TemperatureReadingUpperThreshold-Non-c").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3TemperatureReadingUpperThreshold_Non_c.setStatus('mandatory')
a3TemperatureReadingLowerThreshold_Criti = MibScalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 13), DmiInteger()).setLabel("a3TemperatureReadingLowerThreshold-Criti").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3TemperatureReadingLowerThreshold_Criti.setStatus('mandatory')
a3TemperatureReadingUpperThreshold_Criti = MibScalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 14), DmiInteger()).setLabel("a3TemperatureReadingUpperThreshold-Criti").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3TemperatureReadingUpperThreshold_Criti.setStatus('mandatory')
a3TemperatureReadingLowerThreshold_Non_r = MibScalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 15), DmiInteger()).setLabel("a3TemperatureReadingLowerThreshold-Non-r").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3TemperatureReadingLowerThreshold_Non_r.setStatus('mandatory')
a3TemperatureReadingUpperThreshold_Non_r = MibScalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 16), DmiInteger()).setLabel("a3TemperatureReadingUpperThreshold-Non-r").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3TemperatureReadingUpperThreshold_Non_r.setStatus('mandatory')
a3TemperatureProbeResolution = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 17), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3TemperatureProbeResolution.setStatus('mandatory')
a3TemperatureProbeTolerance = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 18), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3TemperatureProbeTolerance.setStatus('mandatory')
a3TemperatureProbeAccuracy = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 19), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3TemperatureProbeAccuracy.setStatus('mandatory')
a3FruGroupIndex = MibScalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 20), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3FruGroupIndex.setStatus('mandatory')
a3OperationalGroupIndex = MibScalar((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 3, 1, 21), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3OperationalGroupIndex.setStatus('mandatory')
tOperationalStateTable = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4), )
if mibBuilder.loadTexts: tOperationalStateTable.setStatus('mandatory')
eOperationalStateTable = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"), (0, "APCUPS-MIB", "a4OperationalStateInstanceIndex"))
if mibBuilder.loadTexts: eOperationalStateTable.setStatus('mandatory')
a4OperationalStateInstanceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4OperationalStateInstanceIndex.setStatus('mandatory')
a4DeviceGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4DeviceGroupIndex.setStatus('mandatory')
a4OperationalStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vEnabled", 3), ("vDisabled", 4), ("vNotApplicable", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4OperationalStatus.setStatus('mandatory')
a4UsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vIdle", 3), ("vActive", 4), ("vBusy", 5), ("vNotApplicable", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4UsageState.setStatus('mandatory')
a4AvailabilityStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vRunning", 3), ("vWarning", 4), ("vInTest", 5), ("vNotApplicable", 6), ("vPowerOff", 7), ("vOffLine", 8), ("vOffDuty", 9), ("vDegraded", 10), ("vNotInstalled", 11), ("vInstallError", 12), ("vPowerSave", 13)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4AvailabilityStatus.setStatus('mandatory')
a4AdministrativeState = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vLocked", 3), ("vUnlocked", 4), ("vNotApplicable", 5), ("vShuttingDown", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4AdministrativeState.setStatus('mandatory')
a4FatalErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 7), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4FatalErrorCount.setStatus('mandatory')
a4MajorErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 8), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4MajorErrorCount.setStatus('mandatory')
a4WarningErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 9), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4WarningErrorCount.setStatus('mandatory')
a4CurrentErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 4, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vOk", 3), ("vNon-critical", 4), ("vCritical", 5), ("vNon-recoverable", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4CurrentErrorStatus.setStatus('mandatory')
tDiagnostics = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5), )
if mibBuilder.loadTexts: tDiagnostics.setStatus('mandatory')
eDiagnostics = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"), (0, "APCUPS-MIB", "a5DiagnosticFunctionTableIndex"))
if mibBuilder.loadTexts: eDiagnostics.setStatus('mandatory')
a5DiagnosticFunctionTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5DiagnosticFunctionTableIndex.setStatus('mandatory')
a5DiagnosticFunctionName = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1, 2), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5DiagnosticFunctionName.setStatus('mandatory')
a5DiagnosticFunctionDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1, 3), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5DiagnosticFunctionDescription.setStatus('mandatory')
a5ExclusiveAccessRequired = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5ExclusiveAccessRequired.setStatus('mandatory')
a5PrerequisiteConditions = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vNoPrerequisites", 3), ("vWrapPlugInstalled", 4), ("vNoMediaInstalled", 5), ("vScratchMediaInstalled", 6), ("vTestMediaInstalled", 7), ("vSystemReferenceDisketteInstalled", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5PrerequisiteConditions.setStatus('mandatory')
a5PrerequisiteDiagnosticFunction = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 5, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5PrerequisiteDiagnosticFunction.setStatus('mandatory')
tDiagnosticRequestGroup = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 6), )
if mibBuilder.loadTexts: tDiagnosticRequestGroup.setStatus('mandatory')
eDiagnosticRequestGroup = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 6, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eDiagnosticRequestGroup.setStatus('mandatory')
a6DiagnosticFunctionReserveKey = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 6, 1, 1), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a6DiagnosticFunctionReserveKey.setStatus('mandatory')
a6DiagnosticFunctionRequest = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 6, 1, 2), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a6DiagnosticFunctionRequest.setStatus('mandatory')
a6DiagnosticFunctionResult = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 6, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6DiagnosticFunctionResult.setStatus('mandatory')
tDiagnosticResults = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 7), )
if mibBuilder.loadTexts: tDiagnosticResults.setStatus('mandatory')
eDiagnosticResults = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 7, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"), (0, "APCUPS-MIB", "a7DiagnosticFunctionId"), (0, "APCUPS-MIB", "a7DiagnosticFunctionResult"))
if mibBuilder.loadTexts: eDiagnosticResults.setStatus('mandatory')
a7DiagnosticFunctionId = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 7, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a7DiagnosticFunctionId.setStatus('mandatory')
a7DiagnosticFunctionResult = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 7, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a7DiagnosticFunctionResult.setStatus('mandatory')
a7DiagnosticFunctionResultDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 7, 1, 3), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a7DiagnosticFunctionResultDescription.setStatus('mandatory')
a7FaultIsolatedToThisComponent = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 7, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a7FaultIsolatedToThisComponent.setStatus('mandatory')
tErrorControlGroup = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8), )
if mibBuilder.loadTexts: tErrorControlGroup.setStatus('mandatory')
eErrorControlGroup = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eErrorControlGroup.setStatus('mandatory')
a8Selfid = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a8Selfid.setStatus('mandatory')
a8NumberOfFatalErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 2), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a8NumberOfFatalErrors.setStatus('mandatory')
a8NumberOfMajorErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 3), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a8NumberOfMajorErrors.setStatus('mandatory')
a8NumberOfWarnings = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 4), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a8NumberOfWarnings.setStatus('mandatory')
a8ErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("vOk", 0), ("vWarning", 1), ("vMajor", 2), ("vFatal", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a8ErrorStatus.setStatus('mandatory')
a8ErrorStatusType = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("vPost", 0), ("vRuntime", 1), ("vDiagnosticTest", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a8ErrorStatusType.setStatus('mandatory')
a8AlarmGeneration = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 8, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOff", 0), ("vOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a8AlarmGeneration.setStatus('mandatory')
tMiftomib = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 99), )
if mibBuilder.loadTexts: tMiftomib.setStatus('mandatory')
eMiftomib = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 99, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eMiftomib.setStatus('mandatory')
a99MibName = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 99, 1, 1), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a99MibName.setStatus('mandatory')
a99MibOid = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 99, 1, 2), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a99MibOid.setStatus('mandatory')
a99DisableTrap = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 99, 1, 3), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a99DisableTrap.setStatus('mandatory')
tTrapGroup = MibTable((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999), )
if mibBuilder.loadTexts: tTrapGroup.setStatus('mandatory')
eTrapGroup = MibTableRow((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1), ).setIndexNames((0, "APCUPS-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eTrapGroup.setStatus('mandatory')
a9999ErrorTime = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ErrorTime.setStatus('mandatory')
a9999ErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ErrorStatus.setStatus('mandatory')
a9999ErrorGroupId = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ErrorGroupId.setStatus('mandatory')
a9999ErrorInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ErrorInstanceId.setStatus('mandatory')
a9999ComponentId = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 5), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ComponentId.setStatus('mandatory')
a9999GroupId = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999GroupId.setStatus('mandatory')
a9999InstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999InstanceId.setStatus('mandatory')
a9999VendorCode1 = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999VendorCode1.setStatus('mandatory')
a9999VendorCode2 = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 9), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999VendorCode2.setStatus('mandatory')
a9999VendorText = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 10), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999VendorText.setStatus('mandatory')
a9999ParentGroupId = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 11), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ParentGroupId.setStatus('mandatory')
a9999ParentInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1, 12), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ParentInstanceId.setStatus('mandatory')
pwrchuteEventError = NotificationType((1, 3, 6, 1, 4, 1, 318, 1, 2, 2, 1, 9999, 1) + (0,1)).setObjects(("APCUPS-MIB", "a9999ErrorTime"), ("APCUPS-MIB", "a9999ErrorStatus"), ("APCUPS-MIB", "a9999ErrorGroupId"), ("APCUPS-MIB", "a9999ErrorInstanceId"), ("APCUPS-MIB", "a9999ComponentId"), ("APCUPS-MIB", "a9999GroupId"), ("APCUPS-MIB", "a9999InstanceId"), ("APCUPS-MIB", "a9999VendorCode1"), ("APCUPS-MIB", "a9999VendorCode2"), ("APCUPS-MIB", "a9999VendorText"), ("APCUPS-MIB", "a9999ParentGroupId"), ("APCUPS-MIB", "a9999ParentInstanceId"))
mibBuilder.exportSymbols("APCUPS-MIB", a9999GroupId=a9999GroupId, a4AvailabilityStatus=a4AvailabilityStatus, dmtfGroups=dmtfGroups, a8ErrorStatusType=a8ErrorStatusType, a2EstimatedMinutesRemaining=a2EstimatedMinutesRemaining, a9999ErrorInstanceId=a9999ErrorInstanceId, a6DiagnosticFunctionReserveKey=a6DiagnosticFunctionReserveKey, a1Installation=a1Installation, a3TemperatureProbeTemperatureReading=a3TemperatureProbeTemperatureReading, a3TemperatureReadingUpperThreshold_Criti=a3TemperatureReadingUpperThreshold_Criti, a3TemperatureProbeMaximum=a3TemperatureProbeMaximum, a9999ErrorStatus=a9999ErrorStatus, tDiagnosticResults=tDiagnosticResults, a9999ErrorTime=a9999ErrorTime, a5DiagnosticFunctionDescription=a5DiagnosticFunctionDescription, a5PrerequisiteConditions=a5PrerequisiteConditions, DmiCounter=DmiCounter, a3MonitoredTemperatureNormalMinimum=a3MonitoredTemperatureNormalMinimum, tOperationalStateTable=tOperationalStateTable, a9999ParentInstanceId=a9999ParentInstanceId, a8ErrorStatus=a8ErrorStatus, powerChuteDMIAgent=powerChuteDMIAgent, a2FruGroupIndex=a2FruGroupIndex, tUpsBattery=tUpsBattery, a5ExclusiveAccessRequired=a5ExclusiveAccessRequired, a5DiagnosticFunctionTableIndex=a5DiagnosticFunctionTableIndex, a99MibOid=a99MibOid, a6DiagnosticFunctionResult=a6DiagnosticFunctionResult, a3FruGroupIndex=a3FruGroupIndex, tDiagnostics=tDiagnostics, a9999InstanceId=a9999InstanceId, a4OperationalStateInstanceIndex=a4OperationalStateInstanceIndex, eErrorControlGroup=eErrorControlGroup, a6DiagnosticFunctionRequest=a6DiagnosticFunctionRequest, a9999ParentGroupId=a9999ParentGroupId, a2OperationalGroupIndex=a2OperationalGroupIndex, DmiDisplaystring=DmiDisplaystring, tComponentid=tComponentid, a4OperationalStatus=a4OperationalStatus, a3TemperatureReadingUpperThreshold_Non_c=a3TemperatureReadingUpperThreshold_Non_c, a3TemperatureProbeLocation=a3TemperatureProbeLocation, a3TemperatureProbeTolerance=a3TemperatureProbeTolerance, a4MajorErrorCount=a4MajorErrorCount, a3TemperatureProbeAccuracy=a3TemperatureProbeAccuracy, a1Version=a1Version, a3TemperatureProbeDescription=a3TemperatureProbeDescription, a4DeviceGroupIndex=a4DeviceGroupIndex, a9999VendorCode2=a9999VendorCode2, a3MonitoredTemperatureNormalMaximum=a3MonitoredTemperatureNormalMaximum, eOperationalStateTable=eOperationalStateTable, a3TemperatureReadingLowerThreshold_Non_c=a3TemperatureReadingLowerThreshold_Non_c, eDiagnostics=eDiagnostics, a99DisableTrap=a99DisableTrap, a2EstimatedChargeRemaining=a2EstimatedChargeRemaining, eTrapGroup=eTrapGroup, DmiInteger=DmiInteger, eTemperatureProbe=eTemperatureProbe, a3TemperatureProbeResolution=a3TemperatureProbeResolution, a2BatteryCurrent=a2BatteryCurrent, a1Manufacturer=a1Manufacturer, a3OperationalGroupIndex=a3OperationalGroupIndex, a1Verify=a1Verify, a9999ComponentId=a9999ComponentId, a8NumberOfFatalErrors=a8NumberOfFatalErrors, apc=apc, a3TemperatureProbeMinimum=a3TemperatureProbeMinimum, a3TemperatureStatus=a3TemperatureStatus, a7DiagnosticFunctionResult=a7DiagnosticFunctionResult, a1Product=a1Product, tTemperatureProbe=tTemperatureProbe, a7DiagnosticFunctionId=a7DiagnosticFunctionId, eMiftomib=eMiftomib, a4WarningErrorCount=a4WarningErrorCount, a8NumberOfMajorErrors=a8NumberOfMajorErrors, a3TemperatureReadingLowerThreshold_Criti=a3TemperatureReadingLowerThreshold_Criti, a2BatteryVoltage=a2BatteryVoltage, a9999ErrorGroupId=a9999ErrorGroupId, a5PrerequisiteDiagnosticFunction=a5PrerequisiteDiagnosticFunction, pwrchuteEventError=pwrchuteEventError, DmiComponentIndex=DmiComponentIndex, eDiagnosticResults=eDiagnosticResults, a9999VendorCode1=a9999VendorCode1, DmiGauge=DmiGauge, eDiagnosticRequestGroup=eDiagnosticRequestGroup, a8Selfid=a8Selfid, eComponentid=eComponentid, a4FatalErrorCount=a4FatalErrorCount, a8NumberOfWarnings=a8NumberOfWarnings, a3TemperatureProbeTableIndex=a3TemperatureProbeTableIndex, a2TemperatureProbeIndex=a2TemperatureProbeIndex, products=products, tErrorControlGroup=tErrorControlGroup, a4CurrentErrorStatus=a4CurrentErrorStatus, a4UsageState=a4UsageState, a2SecondsOnBattery=a2SecondsOnBattery, a3TemperatureReadingLowerThreshold_Non_r=a3TemperatureReadingLowerThreshold_Non_r, software=software, tMiftomib=tMiftomib, a3MonitoredTemperatureNominalReading=a3MonitoredTemperatureNominalReading, tTrapGroup=tTrapGroup, a2BatteryStatus=a2BatteryStatus, DmiDateX=DmiDateX, a99MibName=a99MibName, a1SerialNumber=a1SerialNumber, eUpsBattery=eUpsBattery, a8AlarmGeneration=a8AlarmGeneration, a4AdministrativeState=a4AdministrativeState, tDiagnosticRequestGroup=tDiagnosticRequestGroup, a3TemperatureReadingUpperThreshold_Non_r=a3TemperatureReadingUpperThreshold_Non_r, a7FaultIsolatedToThisComponent=a7FaultIsolatedToThisComponent, a7DiagnosticFunctionResultDescription=a7DiagnosticFunctionResultDescription, a5DiagnosticFunctionName=a5DiagnosticFunctionName, a9999VendorText=a9999VendorText)
|
# -*- coding: utf-8 -*-
# MIT License
# Copyright (c) 2019 Ronnasayd de Sousa Machado
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# VERSION: 5.1.1-beta #
################################################################
## NGIX TEMPLATE ##
################################################################
NGINX_CONFIGURATIN_BASE = '''
worker_processes 1;
events {{
worker_connections 1024;
}}
http {{
proxy_cache_path {STATIC_ROOT} levels=1:2 keys_zone=my_cache:10m max_size=10g
inactive=60m use_temp_path=off;
default_type application/octet-stream;
include /etc/nginx/mime.types;
log_format compression '$remote_addr - $remote_user [$time_local] '
'"$request" $status $body_bytes_sent '
'"$http_referer" "$http_user_agent" "$gzip_ratio"';
sendfile on;
gzip on;
gzip_http_version 1.0;
gzip_proxied any;
gzip_min_length 500;
gzip_disable "MSIE [1-6]\\.";
gzip_types text/plain text/xml text/css
text/comma-separated-values
text/javascript
application/x-javascript
application/atom+xml;
# Configuration containing list of application servers
upstream app_servers {{
ip_hash;
{SERVERS}
}}
# Configuration for Nginx
server {{
#access_log {LOGS_ROOT}/access.log compression;
error_log {LOGS_ROOT}/error.log warn;
# Running port
listen {WEB_PORT};
listen [::]:{WEB_PORT};
listen 80;
listen [::]:80;
{NGINX_SNIPPET_HTTPS}
# Max_size
client_max_body_size 20M;
# Settings to serve static files
location /static/ {{
# Example:
# root /full/path/to/application/static/file/dir;
autoindex on;
alias {STATIC_ROOT}/;
}}
location /media/ {{
autoindex on;
alias {MEDIA_ROOT}/;
}}
# Proxy connections to the application servers
# app_servers
location / {{
proxy_cache my_cache;
proxy_cache_revalidate on;
proxy_cache_min_uses 3;
proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504;
proxy_cache_background_update on;
proxy_cache_lock on;
proxy_pass http://app_servers;
proxy_redirect off;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Host $server_name;
}}
}}
}}
'''
####################################################################
## NGINX_SNIPPET TEMPLATE ##
####################################################################
NGINX_SNIPPET_HTTPS='''
root {WEB_ROOT_PATH};
index index.html intex.htm index.nginx-debian.html;
server_name {SERVER_DNS_NAMES};
location ~ /.well-known/acme-challenge{{
allow all;
root {WEB_ROOT_PATH};
}}'''
####################################################################
## NGINX_SCRIPT TEMPLATE ##
####################################################################
NGINX_CERT_SCRIPT='''certbot certonly --webroot --webroot-path={WEB_ROOT_PATH} --agree-tos --no-eff-email --force-renewal {SERVER_NAMES} && certbot --nginx'''
####################################################################
## GULPFILE TEMPLATE ##
####################################################################
GULPFILE_BASE = '''
const gulp = require("gulp");
const browserSync = require("browser-sync").create();
const sass = require("gulp-sass");
const rename = require("gulp-rename");
const autoprefixer = require("gulp-autoprefixer");
const uglify = require("gulp-uglify");
const sourcemaps = require("gulp-sourcemaps");
const imagemin = require("gulp-imagemin");
const cleanCSS = require("gulp-clean-css");
const purgecss = require("gulp-purgecss");
const cache = require("gulp-cached");
const minimist = require("minimist");
const concat = require("gulp-concat");
const sassPartials = require('gulp-sass-partials-imported');
const jshint = require('gulp-jshint');
const src_scss = "static/src/scss/**/*.scss";
const src_css = "static/src/css/**/*.css";
const src_js = "static/src/js/**/*.js";
const images_folder = "static/images/**/*.{{png,jpeg,jpg,svg,ico}}";
const not_node = "!node_modules/"
const dist_js = "static/dist/js/"
const dist_css = "static/dist/css/"
const html_files = "**/*.html"
const jsHint = ()=>{{
return gulp.src([src_js,not_node],{{allowEmpty: true}})
.pipe(cache("jsHint"))
.pipe(jshint())
.pipe(jshint.reporter('default'));
}}
const minifyJs = ()=>{{
return gulp.src([src_js,not_node],{{allowEmpty: true}})
.pipe(cache("minifyJs"))
.pipe(sourcemaps.init())
.pipe(uglify()).on("error",function(err){{
console.log(err.message);
console.log(err.cause);
browserSync.notify(err.message, 3000); // Display error in the browser
this.emit("end"); // Prevent gulp from catching the error and exiting the watch process
}})
.pipe(rename(function(file){{
file.extname = ".min.js"
}}))
.pipe(sourcemaps.write('./'))
.pipe(gulp.dest(dist_js))
}}
const sassToCssMin = ()=>{{
return gulp.src([src_scss,"!_*.scss",not_node],{{allowEmpty: true}})
.pipe(cache("sassToCssMin"))
.pipe(sassPartials("static/src/scss"))
.pipe(sourcemaps.init({{loadMaps: true, largeFile: true}}))
.pipe(sass({{
errLogToConsole: true,
indentedSyntax: false,
}}).on("error",function(err){{
console.log(err.message);
browserSync.notify(err.message, 3000); // Display error in the browser
this.emit("end"); // Prevent gulp from catching the error and exiting the watch process
}}))
.pipe(autoprefixer({{
browsers: ["last 100 versions"],
cascade: false
}}))
.pipe(purgecss({{content: [html_files,not_node]}}))
.on("error",function(err){{
console.log(err.message,err);
browserSync.notify(err.message, 3000); // Display error in the browser
this.emit("end"); // Prevent gulp from catching the error and exiting the watch process
}})
.pipe(cleanCSS())
.pipe(rename(function(file){{
file.extname = ".min.css"
}}))
.pipe(sourcemaps.write('./'))
.pipe(gulp.dest(dist_css))
.pipe(browserSync.stream())
}}
const minifyCss = ()=>{{
return gulp.src([src_css,"!_*.css", not_node],{{allowEmpty: true}})
.pipe(cache("minifyCss"))
.pipe(sourcemaps.init({{loadMaps: true, largeFile: true}}))
.pipe(autoprefixer({{
browsers: ["last 100 versions"],
cascade: false
}}))
.pipe(purgecss({{content: [html_files,not_node]}}))
.on("error",function(err){{
console.log(err.message,err);
browserSync.notify(err.message, 3000); // Display error in the browser
this.emit("end"); // Prevent gulp from catching the error and exiting the watch process
}})
.pipe(cleanCSS())
.pipe(rename(function(file){{
file.extname = ".min.css"
}}))
.pipe(sourcemaps.write('./'))
.pipe(gulp.dest(dist_css))
.pipe(browserSync.stream())
}}
// gulp concatfiles --files <list_of_files:file1,file2,file3> --name <name_of_file:all.js> --dist <destination>
const concatFiles = ()=>{{
let options = minimist(process.argv.slice(2));
console.log("files: "+options.files);
console.log("name: "+options.name);
console.log("dist: "+options.dist);
return gulp.src(options.files.split(","),{{base: "./", allowEmpty: true}})
.pipe(cache("concatFiles"))
.pipe(sourcemaps.init())
.pipe(concat(options.name))
.pipe(sourcemaps.write("./"))
.pipe(gulp.dest(options.dist))
}}
const browserReload = (done)=>{{
browserSync.reload();
done();
}}
const minifyImages =()=>{{
return gulp.src([images_folder,not_node],{{base: "./",allowEmpty: true}})
.pipe(cache("minifyImages"))
.pipe(imagemin([
imagemin.gifsicle({{interlaced: true}}),
imagemin.jpegtran({{progressive: true}}),
imagemin.optipng({{optimizationLevel: 5}}),
imagemin.svgo({{
plugins: [
{{removeViewBox: true}},
{{cleanupIDs: false}}
]
}})
]))
.pipe(gulp.dest("./"))
}}
const js_line = gulp.series(jsHint,minifyJs);
const sass_line = gulp.series(sassToCssMin)
const css_line = gulp.series(minifyCss);
const image_line = gulp.series(minifyImages);
const browserSyncServer = ()=>{{
browserSync.init({{
open: false,
proxy: {{
target: "http://{WEB_CONTAINER_NAME}:{WEB_PORT}",
ws: true,
}}
}});
gulp.watch(src_scss, {{interval: 100, usePolling: true}}, sass_line);
gulp.watch(src_css, {{interval: 100, usePolling: true}}, css_line);
gulp.watch(src_js, {{interval: 100, usePolling: true}}, gulp.series(js_line,browserReload));
gulp.watch(images_folder, {{interval: 100, usePolling: true}}, image_line);
gulp.watch(html_files, {{interval: 100, usePolling: true}}, gulp.series(browserReload));
}}
const server = gulp.series(gulp.parallel(js_line, css_line, sass_line, image_line),browserSyncServer)
exports.concatfiles = concatFiles
exports.default = server
'''
####################################################################
## MAKE AMBIENT SCRIPT ##
####################################################################
MAKE_AMBIENT_BASE='''
#!/bin/bash
if [ ! -d "{PROJECT_NAME}/static" ]; then
mkdir {PROJECT_NAME}/static
mkdir {PROJECT_NAME}/static/src
mkdir {PROJECT_NAME}/static/src/css
mkdir {PROJECT_NAME}/static/src/scss
mkdir {PROJECT_NAME}/static/src/js
fi
if [ ! -f "{PROJECT_NAME}/requirements.txt" ]; then
sed -i "s/\\r$//" {FOLDER_NAME}/requirements.txt
cp {FOLDER_NAME}/requirements.txt ./{PROJECT_NAME}
fi
trap cleanup 1 2 3 6
cleanup()
{{
echo "Caught Signal ... cleaning up."
rm {PROJECT_NAME}/runserver.sh
rm {PROJECT_NAME}/gulpfile.js
rm {PROJECT_NAME}/wait-for-it.sh
rm {PROJECT_NAME}/package.json
echo "Done cleanup ... quitting."
exit 1
}}
chmod +x {FOLDER_NAME}/wait-for-it.sh
sed -i "s/\\r$//" {FOLDER_NAME}/{RUNSERVER_SCRIPT_NAME}
sed -i "s/\\r$//" {FOLDER_NAME}/wait-for-it.sh
sed -i "s/\\r$//" {FOLDER_NAME}/gulpfile.js
sed -i "s/\\r$//" {FOLDER_NAME}/package.json
sed -i "s/\\r$//" {FOLDER_NAME}/ddsettings.py
sed -i "s/\\r$//" {FOLDER_NAME}/ddurls.py
sed -i "s/\\r$//" {FOLDER_NAME}/manage.py
sed -i "s/\\r$//" {FOLDER_NAME}/.dockerignore
cp {FOLDER_NAME}/{RUNSERVER_SCRIPT_NAME} ./{PROJECT_NAME}
cp {FOLDER_NAME}/wait-for-it.sh ./{PROJECT_NAME}
cp {FOLDER_NAME}/gulpfile.js ./{PROJECT_NAME}
cp {FOLDER_NAME}/package.json ./{PROJECT_NAME}
cp {FOLDER_NAME}/manage.py ./{PROJECT_NAME}
cp {FOLDER_NAME}/ddsettings.py ./{PROJECT_NAME}/{PROJECT_NAME}
cp {FOLDER_NAME}/ddurls.py ./{PROJECT_NAME}/{PROJECT_NAME}
cp {FOLDER_NAME}/.dockerignore ./
'''
MAKE_AMBIENT_DEVELOPMENT='''docker-compose -f {FOLDER_NAME}/{PROJECT_NAME}_development.yml stop
docker-compose -p dd -f {FOLDER_NAME}/{PROJECT_NAME}_production.yml stop
docker-compose -p dd -f {FOLDER_NAME}/{PROJECT_NAME}_development.yml down
docker-compose -p dd -f {FOLDER_NAME}/{PROJECT_NAME}_production.yml down
docker system prune --force
docker-compose -p dd -f {FOLDER_NAME}/{PROJECT_NAME}_development.yml build
COMPOSE_HTTP_TIMEOUT=3600 docker-compose -p dd -f {FOLDER_NAME}/{PROJECT_NAME}_development.yml up --force-recreate'''
MAKE_AMBIENT_PRODUCTION='''docker-compose -p dd -f {FOLDER_NAME}/{PROJECT_NAME}_production.yml stop
docker-compose -p dd -f {FOLDER_NAME}/{PROJECT_NAME}_development.yml stop
docker-compose -p dd -f {FOLDER_NAME}/{PROJECT_NAME}_production.yml down
docker-compose -p dd -f {FOLDER_NAME}/{PROJECT_NAME}_development.yml down
docker system prune --force
docker-compose -p dd -f {FOLDER_NAME}/{PROJECT_NAME}_production.yml build
docker-compose -p dd -f {FOLDER_NAME}/{PROJECT_NAME}_production.yml up -d {SCALE} --force-recreate
docker cp ./{PROJECT_NAME}/ {WEB_CONTAINER_NAME}:/.
docker exec {WEB_CONTAINER_NAME} python manage.py makemigrations
docker exec {WEB_CONTAINER_NAME} python manage.py migrate
'''
######################################################################
## RUNSERVER SCRIPT ##
######################################################################
RUNSERVER_SCRIPT_BASE='''#!/bin/bash
python manage.py makemigrations
python manage.py migrate'''
RUNSERVER_SCRIPT_DEVELOPMENT='''
python manage.py runserver 0.0.0.0:{WEB_PORT}
'''
RUNSERVER_SCRIPT_PRODUCTION='''
python manage.py collectstatic --noinput
DJANGO_SETTINGS_MODULE={PROJECT_NAME}.{SETTINGS_FILE_NAME} gunicorn --bind=0.0.0.0:{WEB_PORT} --workers=3 {PROJECT_NAME}.wsgi
'''
######################################################################
## WAIT-FOR-IT SCRIPT ##
######################################################################
WAIT_FOR_IT='''#!/usr/bin/env bash
# Use this script to test if a given TCP host/port are available
WAITFORIT_cmdname=${0##*/}
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
usage()
{
cat << USAGE >&2
Usage:
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
-h HOST | --host=HOST Host or IP under test
-p PORT | --port=PORT TCP port under test
Alternatively, you specify the host and port as host:port
-s | --strict Only execute subcommand if the test succeeds
-q | --quiet Don't output any status messages
-t TIMEOUT | --timeout=TIMEOUT
Timeout in seconds, zero for no timeout
-- COMMAND ARGS Execute command with args after the test finishes
USAGE
exit 1
}
wait_for()
{
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
else
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
fi
WAITFORIT_start_ts=$(date +%s)
while :
do
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
WAITFORIT_result=$?
else
(echo > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
WAITFORIT_result=$?
fi
if [[ $WAITFORIT_result -eq 0 ]]; then
WAITFORIT_end_ts=$(date +%s)
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
break
fi
sleep 1
done
return $WAITFORIT_result
}
wait_for_wrapper()
{
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
else
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
fi
WAITFORIT_PID=$!
trap "kill -INT -$WAITFORIT_PID" INT
wait $WAITFORIT_PID
WAITFORIT_RESULT=$?
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
fi
return $WAITFORIT_RESULT
}
# process arguments
while [[ $# -gt 0 ]]
do
case "$1" in
*:* )
WAITFORIT_hostport=(${1//:/ })
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
shift 1
;;
--child)
WAITFORIT_CHILD=1
shift 1
;;
-q | --quiet)
WAITFORIT_QUIET=1
shift 1
;;
-s | --strict)
WAITFORIT_STRICT=1
shift 1
;;
-h)
WAITFORIT_HOST="$2"
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
shift 2
;;
--host=*)
WAITFORIT_HOST="${1#*=}"
shift 1
;;
-p)
WAITFORIT_PORT="$2"
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
shift 2
;;
--port=*)
WAITFORIT_PORT="${1#*=}"
shift 1
;;
-t)
WAITFORIT_TIMEOUT="$2"
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
shift 2
;;
--timeout=*)
WAITFORIT_TIMEOUT="${1#*=}"
shift 1
;;
--)
shift
WAITFORIT_CLI=("$@")
break
;;
--help)
usage
;;
*)
echoerr "Unknown argument: $1"
usage
;;
esac
done
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
echoerr "Error: you need to provide a host and port to test."
usage
fi
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
# check to see if timeout is from busybox?
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
WAITFORIT_ISBUSY=1
WAITFORIT_BUSYTIMEFLAG="-t"
else
WAITFORIT_ISBUSY=0
WAITFORIT_BUSYTIMEFLAG=""
fi
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
wait_for
WAITFORIT_RESULT=$?
exit $WAITFORIT_RESULT
else
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
wait_for_wrapper
WAITFORIT_RESULT=$?
else
wait_for
WAITFORIT_RESULT=$?
fi
fi
if [[ $WAITFORIT_CLI != "" ]]; then
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
exit $WAITFORIT_RESULT
fi
exec "${WAITFORIT_CLI[@]}"
else
exit $WAITFORIT_RESULT
fi'''
######################################################################
## SETTINGS FILE ##
######################################################################
SETTINGS='''
from .settings import *
from decouple import config
import os
DEBUG = config('DEBUG', default=False, cast=bool)
STATIC_ROOT = config('STATIC_ROOT')
MEDIA_ROOT = config('MEDIA_ROOT')
STATIC_URL = config('STATIC_URL')
MEDIA_URL = config('MEDIA_URL')
try:
if "default" not in DATABASES:
DATBASE_AUX = {{
"default": {{
'ENGINE': config('DATABASE_ENGINE'),
'HOST': config('DATABASE_HOST'),
'PORT': config('DATABASE_PORT'),
'NAME': config('DATABASE_NAME'),
'USER': config('DATABASE_USER'),
'PASSWORD': config('DATABASE_PASSWORD')
}}
}}
DATABASES.update(DATBASE_AUX)
else:
DATABASES["default"] = {{
'ENGINE': config('DATABASE_ENGINE'),
'HOST': config('DATABASE_HOST'),
'PORT': config('DATABASE_PORT'),
'NAME': config('DATABASE_NAME'),
'USER': config('DATABASE_USER'),
'PASSWORD': config('DATABASE_PASSWORD')
}}
except (KeyError, NameError) as err:
DATABASES = {{
'default': {{
'ENGINE': config('DATABASE_ENGINE'), ## coloque aqui a engine do banco que você vai utilizar ##
'HOST': config('DATABASE_HOST'),
'PORT': config('DATABASE_PORT'),
'NAME': config('DATABASE_NAME'),
'USER': config('DATABASE_USER'),
'PASSWORD': config('DATABASE_PASSWORD')
}}
}}
## CODE IF YOU WILL USE REDIS TO CACHE
if config('REDIS_URL',default=None) != None:
CACHES = {{
"default": {{
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": config('REDIS_URL'),
"OPTIONS": {{
"CLIENT_CLASS": "django_redis.client.DefaultClient",
}}
}}
}}
try:
STATICFILES_DIRS += [
os.path.join(BASE_DIR,"static"),
]
except NameError as err:
STATICFILES_DIRS = [
os.path.join(BASE_DIR,"static"),
]
if DEBUG:
def custom_show_toolbar(request):
return True # Always show toolbar, for example purposes only.
INSTALLED_APPS += [
"debug_toolbar",
"autofixture",
]
MIDDLEWARE += ["debug_toolbar.middleware.DebugToolbarMiddleware"]
DEBUG_TOOLBAR_CONFIG = {{
'SHOW_TOOLBAR_CALLBACK': '{PROJECT_NAME}.ddsettings.custom_show_toolbar',
}}
DEBUG_TOOLBAR_PANELS = [
"debug_toolbar.panels.versions.VersionsPanel",
"debug_toolbar.panels.timer.TimerPanel",
"debug_toolbar.panels.settings.SettingsPanel",
"debug_toolbar.panels.headers.HeadersPanel",
"debug_toolbar.panels.request.RequestPanel",
"debug_toolbar.panels.sql.SQLPanel",
"debug_toolbar.panels.staticfiles.StaticFilesPanel",
"debug_toolbar.panels.templates.TemplatesPanel",
"debug_toolbar.panels.cache.CachePanel",
"debug_toolbar.panels.signals.SignalsPanel",
"debug_toolbar.panels.logging.LoggingPanel",
"debug_toolbar.panels.redirects.RedirectsPanel",
]
ROOT_URLCONF = "{PROJECT_NAME}.ddurls"
'''
######################################################################
## MANAGE FILE ##
######################################################################
MANAGE='''#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{PROJECT_NAME}.{SETTINGS_FILE_NAME}")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
'''
######################################################################
## PACKAGE.JSON FILE ##
######################################################################
PACKAGEJSON='''{
"name": "django-docker",
"description": "Package.json for development front utilities of django-docker",
"version": "5.1.1-beta",
"main": "index.js",
"author": "Ronnasayd de Sousa Machado",
"license": "MIT",
"homepage": "https://github.com/Ronnasayd/django-docker",
"keywords": [],
"repository": {
"type": "git",
"url": "https://github.com/Ronnasayd/django-docker.git"
},
"bugs": {
"url": "https://github.com/Ronnasayd/django-docker/issues"
},
"dependencies": {
"browser-sync": "latest",
"gulp": "latest",
"gulp-autoprefixer": "latest",
"gulp-cached": "latest",
"gulp-clean-css": "latest",
"gulp-concat": "latest",
"gulp-imagemin": "latest",
"gulp-purgecss": "latest",
"gulp-rename": "latest",
"gulp-sass": "latest",
"gulp-sourcemaps": "latest",
"gulp-uglify": "latest",
"minimist": "latest",
"node-sass": "latest",
"gulp-sass-partials-imported":"latest",
"jshint":"latest",
"gulp-jshint":"latest"
}
}
'''
######################################################################
## DOCKERIGNORE FILE ##
######################################################################
DOCKERIGNORE = '''
*/node_modules*
*/gulpfile.js*
*/package.json*
'''
######################################################################
## DDURLS FILE ##
######################################################################
DDURLS = '''
from django.urls import path, include
import debug_toolbar
urlpatterns = [
path('', include('{PROJECT_NAME}.urls')),
path('__debug__/', include(debug_toolbar.urls)),
]
'''
REQUIREMENTS='''
django=={DJANGO_VERSION}
gunicorn
python-decouple
psycopg2-binary
django-debug-toolbar
pillow
django-autofixture
pip-autoremove
'''
|
class frodo:
def __init__(self,x):
self.x = x
def __less__(self,other):
if self.x < other.x:
return False
else:
return True
a = frodo(10)
b = frodo(50)
print(a<b) |
print("%(lang)s is fun!" % {"lang":"test"})
#Output
"""
Python is fun!
"""
|
# forcing a build
def recurring_fibonacci_number(number: int) -> int:
"""
Calculates the fibonacci number needed.
:param number: (int) the Fibonacci number to be calculated
:return: (Optional[int]) the calculated fibonacci number
"""
if number < 0:
raise ValueError("Fibonacci has to be equal or above zero")
elif number <= 1:
return number
else:
return recurring_fibonacci_number(number - 1) + \
recurring_fibonacci_number(number - 2)
|
better_eyesight = False
gold_mult = 1
legday_mult = 1
lifesteal_mult = 0
max_health_mult = 1
acid_blood_mult = 0
bleeding = 0
soul_collector = False
soul_eater = False
soul_blast = False
damage_mult = 1
knockback_mult = 1
resistance_mult = 1
enemy_health_mult = 1
def reset_multipliers():
global better_eyesight, gold_mult, legday_mult, lifesteal_mult, max_health_mult, acid_blood_mult, bleeding, soul_collector, soul_eater, soul_blast, damage_mult, knockback_mult, resistance_mult
better_eyesight = False
gold_mult = 1
legday_mult = 1
lifesteal_mult = 0
max_health_mult = 1
acid_blood_mult = 0
bleeding = 0
soul_collector = False
soul_eater = False
soul_blast = False
damage_mult = 1
knockback_mult = 1
resistance_mult = 1
coins = 0
|
countdown_3_grid = [{(11, 16): 2, (4, 18): 2, (7, 16): 2, (11, 14): 2, (9, 18): 2, (7, 15): 2, (5, 18): 2, (10, 18): 2, (4, 13): 2, (11, 18): 2, (11, 13): 2, (7, 14): 2, (6, 18): 2, (4, 14): 2, (7, 18): 2, (4, 16): 2, (11, 17): 2, (20, 2): 3, (4, 15): 2, (4, 17): 2, (8, 18): 2, (7, 17): 2, (11, 15): 2}, (20, 2)]
countdown_2_grid = [{(11, 16): 2, (5, 13): 2, (7, 16): 2, (11, 14): 2, (6, 17): 2, (9, 15): 2, (5, 12): 2, (5, 18): 2, (10, 12): 2, (4, 13): 2, (11, 18): 2, (8, 16): 2, (8, 15): 2, (11, 13): 2, (6, 18): 2, (9, 14): 2, (5, 17): 2, (10, 13): 2, (4, 14): 2, (4, 16): 2, (9, 16): 2, (8, 17): 2, (11, 12): 2, (9, 13): 2, (11, 17): 2, (20, 2): 3, (4, 15): 2, (4, 17): 2, (10, 14): 2, (7, 17): 2, (11, 15): 2}, (20, 2)]
countdown_1_grid = [{(11, 16): 2, (5, 14): 2, (11, 17): 2, (7, 15): 2, (9, 15): 2, (20, 2): 3, (4, 15): 2, (10, 15): 2, (5, 15): 2, (11, 14): 2, (11, 15): 2, (6, 14): 2, (6, 15): 2, (8, 15): 2, (11, 13): 2}, (20, 2)]
init_grid = [{(14, 17): 2, (12, 1): 2, (16, 9): 2, (6, 28): 2, (18, 4): 2, (7, 25): 2, (13, 17): 2, (12, 31): 2, (4, 2): 2, (3, 7): 2, (6, 7): 2, (4, 19): 2, (6, 10): 2, (7, 19): 2, (14, 1): 2, (18, 9): 2, (15, 4): 2, (12, 28): 2, (1, 1): 2, (15, 18): 2, (14, 14): 2, (14, 24): 2, (4, 16): 2, (15, 29): 2, (12, 11): 2, (17, 13): 2, (7, 22): 2, (15, 1): 2, (12, 22): 2, (17, 18): 2, (4, 15): 2, (3, 1): 2, (16, 18): 2, (4, 26): 2, (6, 13): 2, (4, 21): 2, (15, 16): 2, (12, 8): 2, (1, 21): 2, (7, 21): 2, (12, 27): 2, (1, 26): 2, (17, 29): 2, (2, 1): 2, (1, 15): 2, (5, 1): 2, (15, 22): 2, (12, 2): 2, (7, 15): 2, (1, 16): 2, (7, 1): 2, (16, 29): 2, (17, 24): 2, (13, 29): 2, (18, 24): 2, (4, 1): 2, (4, 28): 2, (14, 21): 2, (16, 4): 2, (12, 7): 2, (1, 7): 2, (16, 23): 2, (1, 19): 2, (12, 29): 2, (18, 21): 2, (2, 7): 2, (16, 13): 2, (6, 1): 2, (5, 7): 2, (12, 4): 2, (15, 9): 2, (13, 9): 2, (12, 23): 2, (15, 21): 2, (1, 3): 2, (4, 8): 2, (5, 13): 2, (4, 27): 2, (15, 13): 2, (4, 22): 2, (14, 9): 2, (12, 9): 2, (1, 20): 2, (7, 20): 2, (2, 19): 2, (1, 25): 2, (3, 10): 2, (1, 14): 2, (4, 13): 2, (2, 13): 2, (12, 3): 2, (7, 14): 2, (1, 28): 2, (18, 2): 2, (7, 27): 2, (7, 28): 2, (1, 9): 2, (1, 22): 2, (14, 29): 2, (18, 1): 2, (7, 26): 2, (7, 13): 2, (17, 21): 2, (12, 30): 2, (2, 25): 2, (18, 18): 2, (7, 16): 2, (3, 4): 2, (2, 4): 2, (5, 9): 2, (4, 7): 2, (3, 25): 2, (7, 7): 2, (15, 3): 2, (16, 21): 2, (12, 16): 2, (13, 21): 2, (1, 2): 2, (4, 9): 2, (18, 29): 2, (7, 10): 2, (6, 22): 2, (5, 22): 2, (12, 10): 2, (15, 15): 2, (12, 21): 2, (18, 13): 2, (13, 24): 2, (3, 13): 2, (1, 13): 2, (4, 14): 2, (2, 10): 2, (4, 25): 2, (3, 19): 2, (5, 28): 2, (4, 20): 2, (17, 4): 2, (15, 23): 2, (13, 1): 2, (12, 15): 2, (18, 3): 2, (17, 9): 2, (15, 2): 2, (13, 14): 2, (1, 27): 2, (1, 8): 2, (4, 3): 2}]
|
description = 'Kompass standard instrument'
group = 'basic'
includes = ['mono', 'guidefocus', 'selector', 'astrium', 'sample',
'reactor',
#'detector',
]
|
class GraphQLEnabledModel:
"""
Subclass used by all the models that are dynamically registered
as a GraphQL object type.
"""
pass
class GraphQLField:
"""
Specify metadata about a model field that is to be registered at
a GraphQL object type.
:param name: Name of the field.
:param resolve_func: A custom resolve function that will be used to
resolve data for this field.
:type resolve_func: callable
:param graphql_type: Graphene type that will be used by that field.
"""
def __init__(self, name, resolve_func=None, graphql_type=None):
if not isinstance(name, str):
raise TypeError('Name has to be a string')
name = name.strip()
if not name:
raise ValueError('Field name cannot be empty')
self.__name = name
self.__resolve_func = resolve_func
self.__graphql_type = graphql_type
@property
def name(self):
return self.__name
@property
def resolve_func(self):
return self.__resolve_func
@property
def graphql_type(self):
return self.__graphql_type
|
"""Websauna Depot models.
Place your SQLAlchemy models in this file.
"""
|
# Use this to take notes on the Edpuzzle video. Try each example rather than just watching it - you will get much more out of it!
# Most things are commented out because they can't all coexist without a syntax error
user = {"name": "Kasey", "age": 15, "courses": ["History, CompSci"]}
for key, value in user.items():
print(key, value)
#print(user.items())
#print(user.values())
#print(user.keys())
#print(len(user))
#age=user.pop("age")
# del user['age']
'''user.update({"name": "Bob", "age": 25, "phone": "888-8888"})
user['phone'] = '888-8888'
user['name']='Bob'''''
#print(user.get("age",'not found'))
#print(user)
#print(age) |
class Solution:
def createTargetArray(self, nums: List[int], index: List[int]) -> List[int]:
target = []
for i, num in enumerate(nums):
idx = index[i]
target = target[:idx] + [num] + target[idx:]
return target
|
class MySQLClimateQuery:
@staticmethod
def drop_sport_climates():
return 'DROP TABLE IF EXISTS sport_climates'
@staticmethod
def create_sport_climates():
return ('CREATE TABLE sport_climates ('
'sport_id int NOT NULL,'
'climate_name varchar(50) NOT NULL,'
'PRIMARY KEY (sport_id, climate_name),'
'FOREIGN KEY (sport_id) REFERENCES sports(id) ON DELETE CASCADE,'
'FOREIGN KEY (climate_name) REFERENCES climates(name) ON DELETE CASCADE'
');')
@staticmethod
def drop_practice_center_climates():
return 'DROP TABLE IF EXISTS practice_center_climates'
@staticmethod
def create_practice_center_climates():
return ('CREATE TABLE practice_center_climates ('
'practice_center_id int NOT NULL,'
'climate_name varchar(50) NOT NULL,'
'PRIMARY KEY (practice_center_id, climate_name),'
'FOREIGN KEY (practice_center_id) REFERENCES practice_centers(id) '
'ON DELETE CASCADE,'
'FOREIGN KEY (climate_name) REFERENCES climates(name) ON DELETE CASCADE'
');')
@staticmethod
def drop_climates():
return 'DROP TABLE IF EXISTS climates'
@staticmethod
def create_climates():
return ('CREATE TABLE climates ('
'name varchar(50) NOT NULL PRIMARY KEY'
');')
|
#!/usr/bin/env python3
# TODO nedd develop logic for handling strucutre with figure element
class FilterModule(object):
def filters(self):
return {
'json_select': self.json_select
}
def jmagik(self, jbody, jpth, jfil):
if jpth != "" and type(jpth) is not int:
jvar=jbody
for i in jpth:
jvar=jvar[i]
elif type(jpth) is int:
jvar=jbody[jpth]
else:
jvar=jbody
if type(jvar) is not list: # we must convert dict to list because without it, we geting ['','','','',''.........,''] but we need [{'','','',...,''}]
jvar = [jvar]
for nm in range(len(jvar)): # chek how levels exist if it's [{'.......'},....{'......'}]
for i in list((jvar[nm])): # convert jvar[nm] because it's dict(we need list) and iterate
jvar[nm].update(jfil)
return jvar
def json_select(self, jbody, jpth, jfil):
if(jpth != "" and type(jpth) is not int ):
if type(jbody is list):
jbody = self.jmagik(jbody, jpth, jfil)
else:
jbody[str(jpth)] = self.jmagik(jbody, jpth, jfil)
del jbody[str(jpth)]
elif(type(jpth) is int):
jbody[jpth] = self.jmagik(jbody, jpth, jfil)
else:
jbody = self.jmagik(jbody, jpth, jfil)
return jbody
|
__pycmd_map = {}
def register_pycmd(name, pycmd):
__pycmd_map[name] = pycmd
def get_pycmd(name):
if isinstance(name, str) and name in __pycmd_map:
return __pycmd_map[name]
elif callable(name):
return name
else:
return None
class PyCmdOption(object):
def __init__(self, globals, locals):
self.__globals = globals
self.__locals = locals
def globals(self):
return self.__globals
def locals(self):
return self.__locals
class PyCmd(object):
def __init__(self, body, name, inType=None, outType=None):
self.__body = body
self.__name = name
self.__inType = inType
self.__outType = outType
def __call__(self, *args, **kwds):
return self.__body(*args, **kwds)
def name(self):
return self.__name
def inType(self):
return self.__inType
def outType(self):
return self.__outType
def pycmd(*args, **kwds):
if args:
assert len(args) == 1
assert not kwds
assert callable(args[0])
cmd = args[0]
if not isinstance(cmd, PyCmd):
cmd = PyCmd(cmd, name=cmd.func_name)
register_pycmd(cmd.name(), cmd)
return cmd
if kwds:
assert not args
def register(func):
if 'name' not in kwds:
kwds['name'] = func.func_name
cmd = PyCmd(func, **kwds)
register_pycmd(cmd.name(), cmd)
return register
else:
raise Exception('Wrong params')
class IOType(object):
Python = 1
File = 2
No = 3
|
"""
Given a set, remove all the even numbers from
it, and for each even number removed, add
"Removed [insert the even number you removed]".
Example: {1,54, 2, 5} becomes {"Removed 54", 1,
5, "Removed 2"}. It is possible to solve this
problem using either discard or remove.
"""
def odd_set_day(given_set):
add_remove = []
for elem in given_set:
if elem % 2 == 0:
add_remove.append(elem)
for remove in add_remove:
given_set.remove(remove)
given_set.add("Removed " + str(remove))
given_set = {1, 2, 4, 5}
odd_set_day(given_set)
print(given_set)
|
def format_words(words):
if not words:
return ""
while "" in words:
words.remove("")
if not words:
return ""
elif len(words)==1:
return words[0]
return ", ".join(words[:-1])+" and "+words[-1] |
class Solution:
def removeDuplicates(self, nums):
i = 0
while i < len(nums):
if i == 0:
lastNum = nums[i]
else:
if lastNum != nums[i]:
lastNum = nums[i]
else:
nums.pop(i)
continue
i = i + 1
return len(nums)
if __name__ == "__main__":
test = Solution()
nums = [0,0,1,1,1,2,2,3,3,4]
ans = test.removeDuplicates(nums)
print(nums)
|
data = (
'You ', # 0x00
'Yang ', # 0x01
'Lu ', # 0x02
'Si ', # 0x03
'Jie ', # 0x04
'Ying ', # 0x05
'Du ', # 0x06
'Wang ', # 0x07
'Hui ', # 0x08
'Xie ', # 0x09
'Pan ', # 0x0a
'Shen ', # 0x0b
'Biao ', # 0x0c
'Chan ', # 0x0d
'Mo ', # 0x0e
'Liu ', # 0x0f
'Jian ', # 0x10
'Pu ', # 0x11
'Se ', # 0x12
'Cheng ', # 0x13
'Gu ', # 0x14
'Bin ', # 0x15
'Huo ', # 0x16
'Xian ', # 0x17
'Lu ', # 0x18
'Qin ', # 0x19
'Han ', # 0x1a
'Ying ', # 0x1b
'Yong ', # 0x1c
'Li ', # 0x1d
'Jing ', # 0x1e
'Xiao ', # 0x1f
'Ying ', # 0x20
'Sui ', # 0x21
'Wei ', # 0x22
'Xie ', # 0x23
'Huai ', # 0x24
'Hao ', # 0x25
'Zhu ', # 0x26
'Long ', # 0x27
'Lai ', # 0x28
'Dui ', # 0x29
'Fan ', # 0x2a
'Hu ', # 0x2b
'Lai ', # 0x2c
None, # 0x2d
None, # 0x2e
'Ying ', # 0x2f
'Mi ', # 0x30
'Ji ', # 0x31
'Lian ', # 0x32
'Jian ', # 0x33
'Ying ', # 0x34
'Fen ', # 0x35
'Lin ', # 0x36
'Yi ', # 0x37
'Jian ', # 0x38
'Yue ', # 0x39
'Chan ', # 0x3a
'Dai ', # 0x3b
'Rang ', # 0x3c
'Jian ', # 0x3d
'Lan ', # 0x3e
'Fan ', # 0x3f
'Shuang ', # 0x40
'Yuan ', # 0x41
'Zhuo ', # 0x42
'Feng ', # 0x43
'She ', # 0x44
'Lei ', # 0x45
'Lan ', # 0x46
'Cong ', # 0x47
'Qu ', # 0x48
'Yong ', # 0x49
'Qian ', # 0x4a
'Fa ', # 0x4b
'Guan ', # 0x4c
'Que ', # 0x4d
'Yan ', # 0x4e
'Hao ', # 0x4f
'Hyeng ', # 0x50
'Sa ', # 0x51
'Zan ', # 0x52
'Luan ', # 0x53
'Yan ', # 0x54
'Li ', # 0x55
'Mi ', # 0x56
'Shan ', # 0x57
'Tan ', # 0x58
'Dang ', # 0x59
'Jiao ', # 0x5a
'Chan ', # 0x5b
None, # 0x5c
'Hao ', # 0x5d
'Ba ', # 0x5e
'Zhu ', # 0x5f
'Lan ', # 0x60
'Lan ', # 0x61
'Nang ', # 0x62
'Wan ', # 0x63
'Luan ', # 0x64
'Xun ', # 0x65
'Xian ', # 0x66
'Yan ', # 0x67
'Gan ', # 0x68
'Yan ', # 0x69
'Yu ', # 0x6a
'Huo ', # 0x6b
'Si ', # 0x6c
'Mie ', # 0x6d
'Guang ', # 0x6e
'Deng ', # 0x6f
'Hui ', # 0x70
'Xiao ', # 0x71
'Xiao ', # 0x72
'Hu ', # 0x73
'Hong ', # 0x74
'Ling ', # 0x75
'Zao ', # 0x76
'Zhuan ', # 0x77
'Jiu ', # 0x78
'Zha ', # 0x79
'Xie ', # 0x7a
'Chi ', # 0x7b
'Zhuo ', # 0x7c
'Zai ', # 0x7d
'Zai ', # 0x7e
'Can ', # 0x7f
'Yang ', # 0x80
'Qi ', # 0x81
'Zhong ', # 0x82
'Fen ', # 0x83
'Niu ', # 0x84
'Jiong ', # 0x85
'Wen ', # 0x86
'Po ', # 0x87
'Yi ', # 0x88
'Lu ', # 0x89
'Chui ', # 0x8a
'Pi ', # 0x8b
'Kai ', # 0x8c
'Pan ', # 0x8d
'Yan ', # 0x8e
'Kai ', # 0x8f
'Pang ', # 0x90
'Mu ', # 0x91
'Chao ', # 0x92
'Liao ', # 0x93
'Gui ', # 0x94
'Kang ', # 0x95
'Tun ', # 0x96
'Guang ', # 0x97
'Xin ', # 0x98
'Zhi ', # 0x99
'Guang ', # 0x9a
'Guang ', # 0x9b
'Wei ', # 0x9c
'Qiang ', # 0x9d
None, # 0x9e
'Da ', # 0x9f
'Xia ', # 0xa0
'Zheng ', # 0xa1
'Zhu ', # 0xa2
'Ke ', # 0xa3
'Zhao ', # 0xa4
'Fu ', # 0xa5
'Ba ', # 0xa6
'Duo ', # 0xa7
'Duo ', # 0xa8
'Ling ', # 0xa9
'Zhuo ', # 0xaa
'Xuan ', # 0xab
'Ju ', # 0xac
'Tan ', # 0xad
'Pao ', # 0xae
'Jiong ', # 0xaf
'Pao ', # 0xb0
'Tai ', # 0xb1
'Tai ', # 0xb2
'Bing ', # 0xb3
'Yang ', # 0xb4
'Tong ', # 0xb5
'Han ', # 0xb6
'Zhu ', # 0xb7
'Zha ', # 0xb8
'Dian ', # 0xb9
'Wei ', # 0xba
'Shi ', # 0xbb
'Lian ', # 0xbc
'Chi ', # 0xbd
'Huang ', # 0xbe
None, # 0xbf
'Hu ', # 0xc0
'Shuo ', # 0xc1
'Lan ', # 0xc2
'Jing ', # 0xc3
'Jiao ', # 0xc4
'Xu ', # 0xc5
'Xing ', # 0xc6
'Quan ', # 0xc7
'Lie ', # 0xc8
'Huan ', # 0xc9
'Yang ', # 0xca
'Xiao ', # 0xcb
'Xiu ', # 0xcc
'Xian ', # 0xcd
'Yin ', # 0xce
'Wu ', # 0xcf
'Zhou ', # 0xd0
'Yao ', # 0xd1
'Shi ', # 0xd2
'Wei ', # 0xd3
'Tong ', # 0xd4
'Xue ', # 0xd5
'Zai ', # 0xd6
'Kai ', # 0xd7
'Hong ', # 0xd8
'Luo ', # 0xd9
'Xia ', # 0xda
'Zhu ', # 0xdb
'Xuan ', # 0xdc
'Zheng ', # 0xdd
'Po ', # 0xde
'Yan ', # 0xdf
'Hui ', # 0xe0
'Guang ', # 0xe1
'Zhe ', # 0xe2
'Hui ', # 0xe3
'Kao ', # 0xe4
None, # 0xe5
'Fan ', # 0xe6
'Shao ', # 0xe7
'Ye ', # 0xe8
'Hui ', # 0xe9
None, # 0xea
'Tang ', # 0xeb
'Jin ', # 0xec
'Re ', # 0xed
None, # 0xee
'Xi ', # 0xef
'Fu ', # 0xf0
'Jiong ', # 0xf1
'Che ', # 0xf2
'Pu ', # 0xf3
'Jing ', # 0xf4
'Zhuo ', # 0xf5
'Ting ', # 0xf6
'Wan ', # 0xf7
'Hai ', # 0xf8
'Peng ', # 0xf9
'Lang ', # 0xfa
'Shan ', # 0xfb
'Hu ', # 0xfc
'Feng ', # 0xfd
'Chi ', # 0xfe
'Rong ', # 0xff
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.