content stringlengths 7 1.05M |
|---|
class Calculadora:
def soma(self, valor_a, valor_b):
return valor_a + valor_b
def subtracao(self, valor_a, valor_b):
return valor_a - valor_b
def multiplacacao(self, valor_a, valor_b):
return valor_a * valor_b
def divisao(self, valor_a, valor_b):
return valor_a / valor_b
calculadora = Calculadora()
print(calculadora.soma(10, 2))
print(calculadora.subtracao(5, 3))
print(calculadora.divisao(100, 2))
print(calculadora.multiplacacao(10, 5)) |
#!/usr/bin/python
#Excercise 1 from Chapter 4
spam = ['apples','bananas','tofu','cats']
def structlist(lists):
sstring = ""
for i in range(len(lists)):
if i == len(lists)-1:
sstring +="and "+lists[i]
print(sstring)
break
else:
sstring += lists[i]+", "
structlist(spam)
spam.append("lions")
#This should prove that the size of the list wont matter.
structlist(spam)
|
# -*- coding: utf-8 -*-
# 基础异常
class BaseError(Exception):
def __init__(self, data=None):
super().__init__()
self._data = data
@property
def data(self):
return self._data
def __repr__(self):
return repr(self._data)
# 数据库只读限制异常
class MySQLReadOnlyError(BaseError):
pass
# 数据库客户端已销毁
class MySQLClientDestroyed(BaseError):
pass
# 常量设置异常
class ConstError(BaseError):
pass
# NTP校准异常
class NTPCalibrateError(BaseError):
pass
|
#crie um programa que leia um número real qualquer e mostre sua porção inteira
#ex: 6.127 --> 6
'''from math import trunc
num = float(input('Digite um valor: '))
print('O valor digitado foi {} e sua porção inteira é {}'.format(num, trunc(num)))'''
num = float(input('Digite um número: '))
print('O valor digitado foi {} e sua porção inteira é {}'.format(num, int(num)))
|
# 源程序文件名
SOURCE_FILE = "{filename}.py_"
# 输出程序文件名
OUTPUT_FILE = "{filename}.py"
# 编译命令行
COMPILE = "cp {source} {output} {extra}"
# 运行命令行
RUN = 'sh -c "python2.7 {program} {redirect}"'
# 显示名
DISPLAY = "Python 2.7"
# 版本
VERSION = "CPython 2.7"
# Ace.js模式
ACE_MODE = "python"
|
# Configuration file for ipcontroller.
c = get_config()
#------------------------------------------------------------------------------
# IPControllerApp configuration
#------------------------------------------------------------------------------
# IPControllerApp will inherit config from: BaseParallelApplication,
# BaseIPythonApplication, Application
# Use threads instead of processes for the schedulers
# c.IPControllerApp.use_threads = False
# Create a massive crash report when IPython encounters what may be an internal
# error. The default is to append a short message to the usual traceback
# c.IPControllerApp.verbose_crash = False
# JSON filename where client connection info will be stored.
# c.IPControllerApp.client_json_file = 'ipcontroller-client.json'
# String id to add to runtime files, to prevent name collisions when using
# multiple clusters with a single profile simultaneously.
#
# When set, files will be named like: 'ipcontroller-<cluster_id>-engine.json'
#
# Since this is text inserted into filenames, typical recommendations apply:
# Simple character strings are ideal, and spaces are not recommended (but should
# generally work).
# c.IPControllerApp.cluster_id = ''
# The date format used by logging formatters for %(asctime)s
# c.IPControllerApp.log_datefmt = '%Y-%m-%d %H:%M:%S'
# Whether to overwrite existing config files when copying
# c.IPControllerApp.overwrite = False
# Set the log level by value or name.
# c.IPControllerApp.log_level = 30
# Set the working dir for the process.
# c.IPControllerApp.work_dir = u'/fusion/gpfs/home/scollis'
# ssh url for engines to use when connecting to the Controller processes. It
# should be of the form: [user@]server[:port]. The Controller's listening
# addresses must be accessible from the ssh server
# c.IPControllerApp.engine_ssh_server = u''
# Path to an extra config file to load.
#
# If specified, load this config file in addition to any other IPython config.
# c.IPControllerApp.extra_config_file = u''
# Whether to create profile dir if it doesn't exist.
# c.IPControllerApp.auto_create = True
# The external IP or domain name of the Controller, used for disambiguating
# engine and client connections.
# c.IPControllerApp.location = u''
# ssh url for clients to use when connecting to the Controller processes. It
# should be of the form: [user@]server[:port]. The Controller's listening
# addresses must be accessible from the ssh server
# c.IPControllerApp.ssh_server = u''
# The IPython profile to use.
# c.IPControllerApp.profile = u'default'
# The ZMQ URL of the iplogger to aggregate logging.
# c.IPControllerApp.log_url = ''
# whether to log to a file
# c.IPControllerApp.log_to_file = False
# The name of the IPython directory. This directory is used for logging
# configuration (through profiles), history storage, etc. The default is usually
# $HOME/.ipython. This option can also be specified through the environment
# variable IPYTHONDIR.
# c.IPControllerApp.ipython_dir = u''
# Whether to install the default config files into the profile dir. If a new
# profile is being created, and IPython contains config files for that profile,
# then they will be staged into the new directory. Otherwise, default config
# files will be automatically generated.
# c.IPControllerApp.copy_config_files = False
# import statements to be run at startup. Necessary in some environments
# c.IPControllerApp.import_statements = []
# Whether to reuse existing json connection files. If False, connection files
# will be removed on a clean exit.
# c.IPControllerApp.reuse_files = False
# Reload engine state from JSON file
# c.IPControllerApp.restore_engines = False
# JSON filename where engine connection info will be stored.
# c.IPControllerApp.engine_json_file = 'ipcontroller-engine.json'
# whether to cleanup old logfiles before starting
# c.IPControllerApp.clean_logs = False
# The Logging format template
# c.IPControllerApp.log_format = '[%(name)s]%(highlevel)s %(message)s'
#------------------------------------------------------------------------------
# ProfileDir configuration
#------------------------------------------------------------------------------
# An object to manage the profile directory and its resources.
#
# The profile directory is used by all IPython applications, to manage
# configuration, logging and security.
#
# This object knows how to find, create and manage these directories. This
# should be used by any code that wants to handle profiles.
# Set the profile location directly. This overrides the logic used by the
# `profile` option.
# c.ProfileDir.location = u''
#------------------------------------------------------------------------------
# Session configuration
#------------------------------------------------------------------------------
# Object for handling serialization and sending of messages.
#
# The Session object handles building messages and sending them with ZMQ sockets
# or ZMQStream objects. Objects can communicate with each other over the
# network via Session objects, and only need to work with the dict-based IPython
# message spec. The Session will handle serialization/deserialization, security,
# and metadata.
#
# Sessions support configurable serialization via packer/unpacker traits, and
# signing with HMAC digests via the key/keyfile traits.
#
# Parameters ----------
#
# debug : bool
# whether to trigger extra debugging statements
# packer/unpacker : str : 'json', 'pickle' or import_string
# importstrings for methods to serialize message parts. If just
# 'json' or 'pickle', predefined JSON and pickle packers will be used.
# Otherwise, the entire importstring must be used.
#
# The functions must accept at least valid JSON input, and output *bytes*.
#
# For example, to use msgpack:
# packer = 'msgpack.packb', unpacker='msgpack.unpackb'
# pack/unpack : callables
# You can also set the pack/unpack callables for serialization directly.
# session : bytes
# the ID of this Session object. The default is to generate a new UUID.
# username : unicode
# username added to message headers. The default is to ask the OS.
# key : bytes
# The key used to initialize an HMAC signature. If unset, messages
# will not be signed or checked.
# keyfile : filepath
# The file containing a key. If this is set, `key` will be initialized
# to the contents of the file.
# Username for the Session. Default is your system username.
# c.Session.username = u'scollis'
# The name of the unpacker for unserializing messages. Only used with custom
# functions for `packer`.
# c.Session.unpacker = 'json'
# Threshold (in bytes) beyond which a buffer should be sent without copying.
# c.Session.copy_threshold = 65536
# The name of the packer for serializing messages. Should be one of 'json',
# 'pickle', or an import name for a custom callable serializer.
# c.Session.packer = 'json'
# The maximum number of digests to remember.
#
# The digest history will be culled when it exceeds this value.
# c.Session.digest_history_size = 65536
# The UUID identifying this session.
# c.Session.session = u''
# The digest scheme used to construct the message signatures. Must have the form
# 'hmac-HASH'.
# c.Session.signature_scheme = 'hmac-sha256'
# execution key, for signing messages.
# c.Session.key = ''
# Debug output in the Session
# c.Session.debug = False
# The maximum number of items for a container to be introspected for custom
# serialization. Containers larger than this are pickled outright.
# c.Session.item_threshold = 64
# path to file containing execution key.
# c.Session.keyfile = ''
# Threshold (in bytes) beyond which an object's buffer should be extracted to
# avoid pickling.
# c.Session.buffer_threshold = 1024
# Metadata dictionary, which serves as the default top-level metadata dict for
# each message.
# c.Session.metadata = {}
#------------------------------------------------------------------------------
# HubFactory configuration
#------------------------------------------------------------------------------
# The Configurable for setting up a Hub.
# HubFactory will inherit config from: RegistrationFactory
# Client/Engine Port pair for Control queue
# c.HubFactory.control = None
# 0MQ transport for monitor messages. [default : tcp]
# c.HubFactory.monitor_transport = 'tcp'
# IP on which to listen for client connections. [default: loopback]
# c.HubFactory.client_ip = u''
# Client/Engine Port pair for Task queue
# c.HubFactory.task = None
# 0MQ transport for engine connections. [default: tcp]
# c.HubFactory.engine_transport = 'tcp'
# 0MQ transport for client connections. [default : tcp]
# c.HubFactory.client_transport = 'tcp'
# Monitor (SUB) port for queue traffic
# c.HubFactory.mon_port = 0
# The IP address for registration. This is generally either '127.0.0.1' for
# loopback only or '*' for all interfaces.
c.HubFactory.ip = '*'
# Engine registration timeout in seconds [default:
# max(30,10*heartmonitor.period)]
# c.HubFactory.registration_timeout = 0
# Client/Engine Port pair for MUX queue
# c.HubFactory.mux = None
# PUB port for sending engine status notifications
# c.HubFactory.notifier_port = 0
# The port on which the Hub listens for registration.
# c.HubFactory.regport = 0
# The 0MQ url used for registration. This sets transport, ip, and port in one
# variable. For example: url='tcp://127.0.0.1:12345' or url='epgm://*:90210'
# c.HubFactory.url = ''
# IP on which to listen for engine connections. [default: loopback]
# c.HubFactory.engine_ip = u''
# Client/Engine Port pair for IOPub relay
# c.HubFactory.iopub = None
# PUB/ROUTER Port pair for Engine heartbeats
# c.HubFactory.hb = None
# The class to use for the DB backend
#
# Options include:
#
# SQLiteDB: SQLite MongoDB : use MongoDB DictDB : in-memory storage (fastest,
# but be mindful of memory growth of the Hub) NoDB : disable database
# altogether (default)
# c.HubFactory.db_class = 'NoDB'
# IP on which to listen for monitor messages. [default: loopback]
# c.HubFactory.monitor_ip = u''
# The 0MQ transport for communications. This will likely be the default of
# 'tcp', but other values include 'ipc', 'epgm', 'inproc'.
# c.HubFactory.transport = 'tcp'
#------------------------------------------------------------------------------
# TaskScheduler configuration
#------------------------------------------------------------------------------
# Python TaskScheduler object.
#
# This is the simplest object that supports msg_id based DAG dependencies.
# *Only* task msg_ids are checked, not msg_ids of jobs submitted via the MUX
# queue.
# select the task scheduler scheme [default: Python LRU] Options are: 'pure',
# 'lru', 'plainrandom', 'weighted', 'twobin','leastload'
# c.TaskScheduler.scheme_name = 'leastload'
# specify the High Water Mark (HWM) for the downstream socket in the Task
# scheduler. This is the maximum number of allowed outstanding tasks on each
# engine.
#
# The default (1) means that only one task can be outstanding on each engine.
# Setting TaskScheduler.hwm=0 means there is no limit, and the engines continue
# to be assigned tasks while they are working, effectively hiding network
# latency behind computation, but can result in an imbalance of work when
# submitting many heterogenous tasks all at once. Any positive value greater
# than one is a compromise between the two.
# c.TaskScheduler.hwm = 1
#------------------------------------------------------------------------------
# HeartMonitor configuration
#------------------------------------------------------------------------------
# A basic HeartMonitor class pingstream: a PUB stream pongstream: an ROUTER
# stream period: the period of the heartbeat in milliseconds
# Whether to include every heartbeat in debugging output.
#
# Has to be set explicitly, because there will be *a lot* of output.
# c.HeartMonitor.debug = False
# The frequency at which the Hub pings the engines for heartbeats (in ms)
# c.HeartMonitor.period = 3000
# Allowed consecutive missed pings from controller Hub to engine before
# unregistering.
# c.HeartMonitor.max_heartmonitor_misses = 10
#------------------------------------------------------------------------------
# DictDB configuration
#------------------------------------------------------------------------------
# Basic in-memory dict-based object for saving Task Records.
#
# This is the first object to present the DB interface for logging tasks out of
# memory.
#
# The interface is based on MongoDB, so adding a MongoDB backend should be
# straightforward.
# The fraction by which the db should culled when one of the limits is exceeded
#
# In general, the db size will spend most of its time with a size in the range:
#
# [limit * (1-cull_fraction), limit]
#
# for each of size_limit and record_limit.
# c.DictDB.cull_fraction = 0.1
# The maximum total size (in bytes) of the buffers stored in the db
#
# When the db exceeds this size, the oldest records will be culled until the
# total size is under size_limit * (1-cull_fraction). default: 1 GB
# c.DictDB.size_limit = 1073741824
# The maximum number of records in the db
#
# When the history exceeds this size, the first record_limit * cull_fraction
# records will be culled.
# c.DictDB.record_limit = 1024
#------------------------------------------------------------------------------
# SQLiteDB configuration
#------------------------------------------------------------------------------
# SQLite3 TaskRecord backend.
# The SQLite Table to use for storing tasks for this session. If unspecified, a
# new table will be created with the Hub's IDENT. Specifying the table will
# result in tasks from previous sessions being available via Clients' db_query
# and get_result methods.
# c.SQLiteDB.table = 'ipython-tasks'
# The directory containing the sqlite task database. The default is to use the
# cluster_dir location.
# c.SQLiteDB.location = ''
# The filename of the sqlite task database. [default: 'tasks.db']
# c.SQLiteDB.filename = 'tasks.db'
|
# Package Constants
# Meraki dashboard API key, set either at instantiation or as an environment variable
API_KEY_ENVIRONMENT_VARIABLE = 'MERAKI_DASHBOARD_API_KEY'
# Base URL preceding all endpoint resources
DEFAULT_BASE_URL = 'https://api.meraki.com/api/v1'
# Maximum number of seconds for each API call
SINGLE_REQUEST_TIMEOUT = 60
# Path for TLS/SSL certificate verification if behind local proxy
CERTIFICATE_PATH = ''
# Proxy server and port, if needed, for HTTPS
REQUESTS_PROXY = ''
# Retry if 429 rate limit error encountered?
WAIT_ON_RATE_LIMIT = True
# Nginx 429 retry wait time
NGINX_429_RETRY_WAIT_TIME = 60
# Action batch concurrency error retry wait time
ACTION_BATCH_RETRY_WAIT_TIME = 60
# Retry if encountering other 4XX error (besides 429)?
RETRY_4XX_ERROR = False
# Other 4XX error retry wait time
RETRY_4XX_ERROR_WAIT_TIME = 60
# Retry up to this many times when encountering 429s or other server-side errors
MAXIMUM_RETRIES = 2
# Create an output log file?
OUTPUT_LOG = True
# Path to output log; by default, working directory of script if not specified
LOG_PATH = ''
# Log file name appended with date and timestamp
LOG_FILE_PREFIX = 'meraki_api_'
# Print output logging to console?
PRINT_TO_CONSOLE = True
# Disable all logging? You're on your own then!
SUPPRESS_LOGGING = False
# Simulate POST/PUT/DELETE calls to prevent changes?
SIMULATE_API_CALLS = False
# Number of concurrent API requests for asynchronous class
AIO_MAXIMUM_CONCURRENT_REQUESTS = 8
# Optional partner identifier for API usage tracking; can also be set as an environment variable BE_GEO_ID
BE_GEO_ID = ''
# Optional identifier for API usage tracking; can also be set as an environment variable MERAKI_PYTHON_SDK_CALLER
MERAKI_PYTHON_SDK_CALLER = ''
|
class Solution:
# @param word1 & word2: Two string.
# @return: The minimum number of steps.
def minDistance(self, word1, word2):
# write your code here
dp = []
for i in range(len(word1) + 1):
dp.append([])
for j in range(len(word2) + 1):
if (i == 0):
dp[i].append(j)
elif (j == 0):
dp[i].append(i)
else:
a = dp[i - 1][j] + 1
b = dp[i][j - 1] + 1
c = dp[i - 1][j - 1] if word1[i - 1] == word2[j - 1] else dp[i - 1][j - 1] + 1
dp[i].append(min(a, b, c))
return dp[-1][-1]
|
fp = open("assets/virus2.csv", "r", encoding="utf-8")
lines = fp.readlines()
fp.close()
n = len(lines)
if lines[n-1][0:10] == lines[n-2][0:10]:
lines.pop(n-2)
pass
with open("assets/virus2.csv", "w", encoding="utf-8") as fp:
start = 10
stop = 26
id = 0
for line in lines:
if id !=0:
fp.write(line[0: 10:] + line[26::])
else:
fp.write(line)
id += 1
print("finished!") |
#
# PySNMP MIB module ZHONE-COM-IP-FILTER-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ZHONE-COM-IP-FILTER-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:40:45 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint", "ValueRangeConstraint")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
MibIdentifier, ObjectIdentity, NotificationType, Unsigned32, Counter32, Integer32, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, Bits, TimeTicks, Gauge32, Counter64, iso = mibBuilder.importSymbols("SNMPv2-SMI", "MibIdentifier", "ObjectIdentity", "NotificationType", "Unsigned32", "Counter32", "Integer32", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "Bits", "TimeTicks", "Gauge32", "Counter64", "iso")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
zhoneModules, zhoneIp = mibBuilder.importSymbols("Zhone", "zhoneModules", "zhoneIp")
ZhoneRowStatus, ZhoneAdminString = mibBuilder.importSymbols("Zhone-TC", "ZhoneRowStatus", "ZhoneAdminString")
comIpFilter = ModuleIdentity((1, 3, 6, 1, 4, 1, 5504, 6, 58))
comIpFilter.setRevisions(('2005-01-10 10:16', '2005-01-03 09:24', '2004-12-21 09:25', '2004-08-30 11:00', '2004-04-06 00:17', '2001-01-17 08:48', '2000-09-11 16:22',))
if mibBuilder.loadTexts: comIpFilter.setLastUpdated('200501100015Z')
if mibBuilder.loadTexts: comIpFilter.setOrganization('Zhone Technologies, Inc.')
filter = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8))
if mibBuilder.loadTexts: filter.setStatus('current')
filterGlobal = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 1))
if mibBuilder.loadTexts: filterGlobal.setStatus('current')
fltGlobalIndexNext = MibScalar((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fltGlobalIndexNext.setStatus('current')
fltGlobalTimeout = MibScalar((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: fltGlobalTimeout.setStatus('current')
filterSpecTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 2), )
if mibBuilder.loadTexts: filterSpecTable.setStatus('current')
filterSpecEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 2, 1), ).setIndexNames((0, "ZHONE-COM-IP-FILTER-MIB", "fltSpecIndex"))
if mibBuilder.loadTexts: filterSpecEntry.setStatus('current')
fltSpecIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: fltSpecIndex.setStatus('current')
fltSpecName = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 2, 1, 2), ZhoneAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltSpecName.setStatus('current')
fltSpecDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 2, 1, 3), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltSpecDesc.setStatus('current')
fltSpecVersion1 = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 2, 1, 4), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltSpecVersion1.setStatus('current')
fltSpecVersion2 = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 2, 1, 5), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltSpecVersion2.setStatus('current')
fltSpecLanguageVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 2, 1, 6), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltSpecLanguageVersion.setStatus('current')
fltSpecRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 2, 1, 7), ZhoneRowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltSpecRowStatus.setStatus('current')
filterStatementTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3), )
if mibBuilder.loadTexts: filterStatementTable.setStatus('current')
filterStatementEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1), ).setIndexNames((0, "ZHONE-COM-IP-FILTER-MIB", "fltSpecIndex"), (0, "ZHONE-COM-IP-FILTER-MIB", "fltStmtIndex"))
if mibBuilder.loadTexts: filterStatementEntry.setStatus('current')
fltStmtIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: fltStmtIndex.setStatus('current')
fltStmtIpSrcAddrLow = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 2), IpAddress().clone(hexValue="00000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtIpSrcAddrLow.setStatus('current')
fltStmtIpSrcAddrHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 3), IpAddress().clone(hexValue="00000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtIpSrcAddrHigh.setStatus('current')
fltStmtSrcPortLow = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtSrcPortLow.setStatus('current')
fltStmtSrcPortHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtSrcPortHigh.setStatus('current')
fltStmtIpDstAddrLow = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 6), IpAddress().clone(hexValue="00000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtIpDstAddrLow.setStatus('current')
fltStmtIpDstAddrHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 7), IpAddress().clone(hexValue="00000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtIpDstAddrHigh.setStatus('current')
fltStmtDstPortLow = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtDstPortLow.setStatus('current')
fltStmtDstPortHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtDstPortHigh.setStatus('current')
fltStmtIpProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("any", 1), ("ip", 2), ("tcp", 3), ("udp", 4), ("icmp", 5))).clone('any')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtIpProtocol.setStatus('current')
fltStmtArbValueBase = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("none", 1), ("ip", 2), ("udp", 3), ("tcp", 4), ("icmp", 5), ("ipOptions", 6), ("tcpOptions", 7))).clone('none')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtArbValueBase.setStatus('current')
fltStmtArbOffset = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 64))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtArbOffset.setStatus('current')
fltStmtArbMask = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 13), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtArbMask.setStatus('current')
fltStmtArbValueLow = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 14), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtArbValueLow.setStatus('current')
fltStmtArbValueHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 15), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtArbValueHigh.setStatus('current')
fltStmtModifier = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 16), Bits().clone(namedValues=NamedValues(("notIpSrc", 0), ("notSrcPort", 1), ("notDstIp", 2), ("notPortDst", 3), ("notProtocol", 4), ("notArbitrary", 5), ("notStatement", 6)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtModifier.setStatus('current')
fltStmtAction = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 17), Bits().clone(namedValues=NamedValues(("reset", 0), ("permit", 1), ("deny", 2), ("forward", 3), ("reject", 4), ("log", 5))).clone(namedValues=NamedValues(("deny", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtAction.setStatus('current')
fltStmtActionArg = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 18), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtActionArg.setStatus('current')
fltStmtRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 19), ZhoneRowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtRowStatus.setStatus('current')
filterStmtRenumTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 4), )
if mibBuilder.loadTexts: filterStmtRenumTable.setStatus('current')
filterStmtRenumEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 4, 1), )
filterStatementEntry.registerAugmentions(("ZHONE-COM-IP-FILTER-MIB", "filterStmtRenumEntry"))
filterStmtRenumEntry.setIndexNames(*filterStatementEntry.getIndexNames())
if mibBuilder.loadTexts: filterStmtRenumEntry.setStatus('current')
fltStmtIndexNew = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fltStmtIndexNew.setStatus('current')
filterStatsTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5), )
if mibBuilder.loadTexts: filterStatsTable.setStatus('current')
filterStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "ZHONE-COM-IP-FILTER-MIB", "fltStatDirection"))
if mibBuilder.loadTexts: filterStatsEntry.setStatus('current')
fltStatDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ingress", 1), ("egress", 2))))
if mibBuilder.loadTexts: fltStatDirection.setStatus('current')
fltStatResetPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 2), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: fltStatResetPkts.setStatus('current')
fltStatPermitPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 3), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: fltStatPermitPkts.setStatus('current')
fltStatDenyPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 4), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: fltStatDenyPkts.setStatus('current')
fltStatForwardPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 5), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: fltStatForwardPkts.setStatus('current')
fltStatRejectPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 6), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: fltStatRejectPkts.setStatus('current')
fltStatLogPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 7), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: fltStatLogPkts.setStatus('current')
fltStatDefaultPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fltStatDefaultPkts.setStatus('current')
fltStatSpecVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 9), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fltStatSpecVersion.setStatus('current')
fltStatSpecIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fltStatSpecIndex.setStatus('current')
mcastControl = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 6))
if mibBuilder.loadTexts: mcastControl.setStatus('current')
mcastControlListTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 6, 1), )
if mibBuilder.loadTexts: mcastControlListTable.setStatus('current')
mcastControlListEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 6, 1, 1), ).setIndexNames((0, "ZHONE-COM-IP-FILTER-MIB", "mcastControlListControlId"), (0, "ZHONE-COM-IP-FILTER-MIB", "mcastControlListControlPrecedence"))
if mibBuilder.loadTexts: mcastControlListEntry.setStatus('current')
mcastControlListControlId = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 6, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: mcastControlListControlId.setStatus('current')
mcastControlListControlPrecedence = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 6, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: mcastControlListControlPrecedence.setStatus('current')
mcastControlListRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 6, 1, 1, 3), ZhoneRowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: mcastControlListRowStatus.setStatus('current')
mcastControlListIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 6, 1, 1, 4), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: mcastControlListIpAddress.setStatus('current')
mcastControlListType = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 6, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("normal", 1), ("always-on", 2), ("periodic", 3))).clone('normal')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: mcastControlListType.setStatus('current')
portAccessControl = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 7))
if mibBuilder.loadTexts: portAccessControl.setStatus('current')
portAccessNextIndex = MibScalar((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 7, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portAccessNextIndex.setStatus('current')
portAccessTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 7, 2), )
if mibBuilder.loadTexts: portAccessTable.setStatus('current')
portAccessEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 7, 2, 1), ).setIndexNames((0, "ZHONE-COM-IP-FILTER-MIB", "portAccessIndex"))
if mibBuilder.loadTexts: portAccessEntry.setStatus('current')
portAccessIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 7, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 100)))
if mibBuilder.loadTexts: portAccessIndex.setStatus('current')
portAccessRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 7, 2, 1, 2), ZhoneRowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: portAccessRowStatus.setStatus('current')
portAccessNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 7, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 1023))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: portAccessNumber.setStatus('current')
portAccessSrcAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 7, 2, 1, 4), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: portAccessSrcAddr.setStatus('current')
portAccessNetMask = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 7, 2, 1, 5), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: portAccessNetMask.setStatus('current')
mibBuilder.exportSymbols("ZHONE-COM-IP-FILTER-MIB", fltStmtSrcPortHigh=fltStmtSrcPortHigh, fltStmtArbMask=fltStmtArbMask, mcastControlListIpAddress=mcastControlListIpAddress, fltSpecDesc=fltSpecDesc, fltSpecName=fltSpecName, fltStmtIpSrcAddrLow=fltStmtIpSrcAddrLow, mcastControlListRowStatus=mcastControlListRowStatus, fltStmtArbOffset=fltStmtArbOffset, fltStatPermitPkts=fltStatPermitPkts, fltStmtRowStatus=fltStmtRowStatus, mcastControlListTable=mcastControlListTable, fltStatDenyPkts=fltStatDenyPkts, filterStatementTable=filterStatementTable, filterGlobal=filterGlobal, PYSNMP_MODULE_ID=comIpFilter, fltStmtIpDstAddrLow=fltStmtIpDstAddrLow, fltStmtDstPortHigh=fltStmtDstPortHigh, fltStmtActionArg=fltStmtActionArg, portAccessNextIndex=portAccessNextIndex, fltSpecVersion1=fltSpecVersion1, filterSpecEntry=filterSpecEntry, filterStmtRenumTable=filterStmtRenumTable, portAccessEntry=portAccessEntry, mcastControlListControlPrecedence=mcastControlListControlPrecedence, portAccessSrcAddr=portAccessSrcAddr, portAccessControl=portAccessControl, mcastControlListControlId=mcastControlListControlId, fltSpecLanguageVersion=fltSpecLanguageVersion, fltStmtIpSrcAddrHigh=fltStmtIpSrcAddrHigh, fltStatDirection=fltStatDirection, mcastControl=mcastControl, fltStatSpecIndex=fltStatSpecIndex, fltStmtArbValueBase=fltStmtArbValueBase, fltStmtArbValueHigh=fltStmtArbValueHigh, fltStmtSrcPortLow=fltStmtSrcPortLow, fltStmtIpProtocol=fltStmtIpProtocol, fltSpecIndex=fltSpecIndex, fltStmtArbValueLow=fltStmtArbValueLow, fltGlobalTimeout=fltGlobalTimeout, fltStmtModifier=fltStmtModifier, fltStatForwardPkts=fltStatForwardPkts, filterSpecTable=filterSpecTable, fltStmtDstPortLow=fltStmtDstPortLow, filterStatsEntry=filterStatsEntry, fltStatDefaultPkts=fltStatDefaultPkts, portAccessRowStatus=portAccessRowStatus, fltStmtAction=fltStmtAction, fltStmtIpDstAddrHigh=fltStmtIpDstAddrHigh, portAccessNetMask=portAccessNetMask, portAccessTable=portAccessTable, filterStatementEntry=filterStatementEntry, filter=filter, fltSpecVersion2=fltSpecVersion2, fltStmtIndexNew=fltStmtIndexNew, filterStmtRenumEntry=filterStmtRenumEntry, fltSpecRowStatus=fltSpecRowStatus, filterStatsTable=filterStatsTable, portAccessIndex=portAccessIndex, fltStatSpecVersion=fltStatSpecVersion, portAccessNumber=portAccessNumber, fltGlobalIndexNext=fltGlobalIndexNext, mcastControlListEntry=mcastControlListEntry, mcastControlListType=mcastControlListType, fltStmtIndex=fltStmtIndex, fltStatRejectPkts=fltStatRejectPkts, comIpFilter=comIpFilter, fltStatLogPkts=fltStatLogPkts, fltStatResetPkts=fltStatResetPkts)
|
class Solution(object):
def findDuplicate(self, paths):
"""
:type paths: List[str]
:rtype: List[List[str]]
"""
ret = []
hm = dict()
self.buildHashMap(paths, hm)
return [v for v in hm.itervalues() if len(v)>1]
def buildHashMap(self, paths, hm):
for s in paths:
tmpList = s.split()
folder = tmpList[0]
for i in xrange(1, len(tmpList)):
fileName, fileContent = tmpList[i].split('(')
fileContent = fileContent[:len(fileContent)-1]
group = hm.setdefault(fileContent, [])
group.append(folder + '/' + fileName) |
user = ""
password = ""
host = "127.0.0.1"
database = ""
raise_on_warnings = True
config = {
"user": user,
"password": password,
"host": host,
"database": database,
"raise_on_warnings": raise_on_warnings,
}
def set_configs(user=None, password=None, database=None):
config["user"] = user
config["password"] = password
config["database"] = database
|
dic = {
'color':'red',
'typw':'car',
'price': 1250
} |
"""
Return text based on the tab number passed
"""
def descriptions(index):
if index == 0: # Machine Tab
return text_0
elif index == 1: # Display Tab
return text_1
elif index == 2: # Axis Tab
return text_2
elif index == 3: # Spindle Tab
return text_3
elif index == 4: # Inputs Tab
return text_4
elif index == 5: # Outputs Tab
return text_5
elif index == 6: # Tool Changer Tab
return text_6
elif index == 70: # SS Cards Tab 0 - 8
return text_70
elif index == 71: # SS Cards Tab 0 - 8
return text_71
elif index == 72: # SS Cards Tab 0 - 8
return text_72
elif index == 73: # SS Cards Tab 0 - 8
return text_73
elif index == 74: # SS Cards Tab 0 - 8
return text_74
elif index == 75: # SS Cards Tab 0 - 8
return text_75
elif index == 76: # SS Cards Tab 0 - 8
return text_76
elif index == 77: # SS Cards Tab 0 - 8
return text_77
elif index == 78: # SS Cards Tab 0 - 8
return text_78
elif index == 8: # Options Tab
return text_8
elif index == 9: # PLC Tab
return text_9
elif index == 10: # Pins Tab
return text_10
elif index == 11: # Info Tab
return text_11
elif index == 12: # PC Tab
return text_12
elif index == 20:
return text_20
elif index == 30:
return text_30
else:
return text_no
text_0 = """
Help Text for Machine Tab
IP Address 10.10.10.10 is recommended to avoid conflicts on your LAN
10.10.10.10 W1 Down W2 Up
192.168.1.121 W1 Down W2 Down
Maximum Linear Velocity is in Selected Units per second.
Firmware
To read the current firmware select the IP Address first.
After reading the current firmware the Copy button will place the text in the clipboard.
To flash a card select the firmware and IP Address first.
After flashing Reload or Power Cycle the card
Only select encoders and stepgens if you want less that default.
"""
text_1 = """
Help Text for Display Tab
Offset and Feedback display use relative (including offsets) or absolute machine.
Overrides use percent of programed value.
QtPyVCP can only be installed on Debian 9
"""
text_2 = """
Help Text for Axis Tab
Joints must be configured starting with 0 and not skipping any.
Any joint can have any axis letter.
Scale is the number of steps to move one user unit (inch or mm).
Limits are in user units.
Velocity is user units per second, Acceleration is user units per second per second
PID Settings
P = Proportional P = (Commanded - Measured) * Pgain.
I = Integral I(new) = I(old) + Igain * (Commanded - Measured).
D = Derivative D = Dgain * (New_measured - Old_Measured)
FF0 = Commanded position * FF0 + Output
FF1 = First derivative of position * FF1
FF2 = Second derivative of position * FF2
FF0 is proportional to position (assuming an axis) or otherwise whatever
parameter is the input to the PID.
FF1 is the first derivative of position, so that is proportional
to velocity.
FF2 is second derivative of position, so it is proportional to acceleration.
Axis, PID Settings and StepGen Settings are required.
Homing fields are optional.
For gantry type of machines just select the same axis for each joint.
"""
text_3 = """
Help Text for Spindle Tab
"""
text_4 = """
Help Text for Inputs Tab
Inputs are optional
If the input is a type that is associated with an axis the axis must be
specified.
"""
text_5 = """
Help Text for Outputs Tab
Outputs are optional.
"""
text_6 = """
Help Text for Tool Changer Tab
"""
text_70 = """
Help Text for SS Cards Tab
"""
text_71 = """
Help Text for 7i64 Tab
"""
text_72 = """
Help Text for 7i69 Tab
"""
text_73 = """
Help Text for 7i70 Tab
"""
text_74 = """
Help Text for 7i71 Tab
"""
text_75 = """
Help Text for 7i72 Tab
"""
text_76 = """
Help Text for 7i73 Tab
Powered up no config running CR1 is solid red and CR2 is off
Powered up and LinuxCNC running CR1 is off and CR2 is blinking green
"""
text_77 = """
Help Text for 7i84 Tab
"""
text_78 = """
Help Text for 7i87 Tab
"""
text_8 = """
Help Text for Options Tab
On Screen Prompt for Manual Tool Change
This option is if you run G code with more than one tool and the tools can be
preset like BT and Cat holders. If you have collet type like ER and R8 you
should not check this and you should only one tool per G code program and
touch it off before running the program.
Hal User Interface
This option enables halui which exports hal pins so they can be connected to
physical or VCP or used in your hal configuration. These include pins related
to abort, tool, spindle, program, mode, mdi, coolant, max velocity, machine,
lube, joint, jog, feed override, rapid override, e stop, axis and home.
PyVCP Panel
This option adds the connections and a basic PyVCP panel.
GladeVCP Panel
Not functioning at this point.
Debug Options
This sets the debug level that is used when an error happens. When an error
occours the error information is sent to dmesg. Open a terminal and clear
dmesg with sudo dmesg -c then run your configuration and to view the error
in a terminal type dmesg.
"""
text_9 = """
Help Text for PLC Tab
Classicladder PLC will add a basic PLC to the configuration. You can also set
the number of components that Classicladder starts with.
"""
text_10 = """
Help Text for Pins Tab
If you have the 7i80 connected press get pins to get the current pinout
"""
text_11 = """
Help Text for Info Tab
Get CPU information and NIC information
"""
text_12 = """
Help Text for PC Tab
To check if the network packet time is ok get the CPU speed from the Info Tab.
Then get the tmax time and put those values into the boxes then hit calculate.
Make sure you select if the CPU speed is gHz or mHz.
To get tMax you must have the 7i80 connected to the PC and be running the
configuration with LinuxCNC.
"""
text_20 = """
Help Text for Building the Configuration
Opening the sample ini file and modifying is the fastest way to get a working configuration.
Check Configuration will scan the configuration for errors
Build Configuration will build all the configuration files needed.
The ini file is always overwritten.
The configName.hal file will always be overwritten.
The tool table, variable file, postgui.hal, custom.hal, configName.clp,
configName.xml files are never overwritten if present. To get a new one delete
the file and a new one will be created when you build the configuration.
"""
text_30 = """
Help Text for PC Setup
7i80 card requires the Master Branch of LinuxCNC
Mesa Ethernet Cards require LinuxCNC Uspace and the PREEMPT kernel.
Instructions to download and install Debian 9 and LinuxCNC Uspace with the
desktop of your choice
https://cdimage.debian.org/cdimage/unofficial/non-free/cd-including-firmware/
drill down to the latest version of the nonfree amd64 iso-cd netinst.iso
Burn to a CD if you have a PCI Ethernet card remove it, setup with the on board LAN only
Boot from the CD
Graphical Install, Do Not enter a Root Password! Just hit enter
Debian desktop environment, Mate, SSH server,Print server, standard system utilities
after booting to Debian 9 open a terminal
sudo nano /etc/lightdm/lightdm.conf
to log in without your user name a password uncomment and add your user name
autologin-user=yourusername
autologin-user-timeout=0
CTRL X and yes to save and exit.
Open the Synaptic Package Manager
search for linux-image and install linux-image-latest.version-rt
reboot the pc
in a terminal
uname -a # it should report back PREEMT RT
sudo apt-get update
sudo apt-get dist-upgrade
sudo apt-get install dirmngr
sudo apt-get install software-properties-common
*** to get the buildbot current build
sudo apt-key adv --keyserver hkp://keys.gnupg.net --recv-key E0EE663E
sudo add-apt-repository "deb http://buildbot.linuxcnc.org/ stretch master-rtpreempt"
sudo apt-get update
sudo apt-get install linuxcnc-uspace
Configure the network adapter to work with an Ethernet card
To find the Ethernet adapter name
ip link show
sudo nano /etc/network/interfaces
auto enp0s25 << change to match your interface name
iface enp0s25 inet static
address 10.10.10.1
netmask 255.255.255.0
shutdown and install a second LAN card if you need to connect to the internet
for git and programming tools
sudo apt-get install git-core git-gui make gcc
to add open in terminal to caja
sudo apt-get install caja-open-terminal
to be able to edit the menu add mozo
sudo apt-get install mozo
You will find it in System > Control Center > Main Menu
"""
text_no = """
No Help is found for this tab
"""
|
# Store the RSS feed IDs for each of the topics in the CNBC client.
cnbc_rss_feeds_id = {
'top_news': 100003114,
'world_news': 100727362,
'us_news': 15837362,
'asia_news': 19832390,
'europe_news': 19794221,
'business': 10001147,
'earnings': 15839135,
'commentary': 100370673,
'economy': 20910258,
'finance': 10000664,
'technology': 19854910,
'politics': 10000113,
'health_care': 10000108,
'real_estate': 10000115,
'wealth': 10001054,
'autos': 10000101,
'energy': 19836768,
'media': 10000110,
'retail': 10000116,
'travel': 10000739,
'small_business': 44877279,
'investing': 15839069,
'financial_advisors': 100646281,
'personal_finance': 21324812,
'charting_asia': 23103686,
'funny_business': 17646093,
'market_insider': 20409666,
'netnet': 38818154,
'trader_talk': 20398120,
'buffett_watch': 19206666,
'top_video': 15839263,
'digital_workshop': 100616801,
'latest_video': 100004038,
'ceo_interviews': 100004032,
'analyst_interviews': 100004033,
'must_watch': 101014894,
'squawk_box': 15838368,
'squawk_on_the_street': 15838381,
'power_lunch': 15838342,
'street_signs': 15838408,
'options_action': 28282083,
'closing_bell': 15838421,
'fast_money': 15838499,
'mad_money': 15838459,
'kudlow_report': 15838446,
'futures_now': 48227449,
'suze_orman': 15838523,
'capital_connection': 17501773,
'squawk_box_europe': 15838652,
'worldwide_exchange': 15838355,
'squawk_box_asia': 15838831,
'the_call': 37447855,
}
|
# START PROBLEM SET 01
print('PROBLEM SET 01 \n')
# PROBLEM 01A (25 points)
# Uncomment the variable name and assigned value that adheres to the styling
# convention described in PEP 08 for function and variable names
# (see https://www.python.org/dev/peps/pep-0008/#function-and-variable-names)
# Choose wisely or you will trigger a syntax error.
# Hint: uncomment = remove hash symbol ('#') and any leading whitespace in front
# of the variable name.
# Guido van Rossum is the original author of Python and former
# Benevolent dictator for life (BDFL) of the project.
# BEGIN 01A SOLUTION
# 1st_bdfl = 'Guido van Rossum'
# benevolent_dictator_for_life! = 'Guido van Rossum'
# python author = 'Guido van Rossum'
# python_author = 'Guido van Rossum'
# lambda = 'Guido van Rossum'
# END 01A SOLUTION
# PROBLEM 01B (25 points)
# Use the appropriate operator to append (i.e., concatenate) Guido's current position
# at the Python Software Foundation (see https://www.python.org/psf/members/#officers)
# to the value assigned to the variable you chose you in Problem 01A. Assign
# the concatenated value to the variable python_foundation_officer.
# Adopt the following format for the new string:
# "<name>, President"
python_foundation_officer = ''
# Note use of the .join() function to join a list of items to an
# empty string in order to form a new string
print(''.join(['python_foundation_officer=', python_foundation_officer, '\n']))
# END 01B SOLUTION
# START PROBLEM 01C-E SETUP (do not modify)
# The Zen of Python, by Tim Peters (1999)
# Mail list (1999): https://mail.python.org/pipermail/python-list/1999-June/001951.html
# PEP 20 (2004): https://www.python.org/dev/peps/pep-0020/
# Note the use of triple (""") quotes to denote a multi-line string.
zen_of_python = """Beautiful is better than ugly.
Explicit is better than implicit.
Simple is better than complex.
Complex is better than complicated.
Flat is better than nested.
Sparse is better than dense.
Readability counts.
Special cases aren't special enough to break the rules.
Although practicality beats purity.
Errors should never pass silently.
Unless explicitly silenced.
In the face of ambiguity, refuse the temptation to guess.
There should be one-- and preferably only one --obvious way to do it.
Although that way may not be obvious at first unless you're Dutch.
Now is better than never.
Although never is often better than *right* now.
If the implementation is hard to explain, it's a bad idea.
If the implementation is easy to explain, it may be a good idea.
Namespaces are one honking great idea -- let's do more of those!"""
print(''.join(['zen_of_python=', zen_of_python, '\n']))
# END SETUP
# PROBLEM 01C (25 points)
# Count the number of characters in the string assigned to the
# variable zen_of_python and assign the value to the variable num_chars.
# BEGIN 01C SOLUTION
num_chars = 0
print(''.join(['num_chars=', str(num_chars), '\n']))
# END 01C SOLUTION
# PROBLEM 01D (25 points)
# Count the number of "words" separated by whitespace (word is used
# figuratively since not all the character chunks you will encounter are
# actually words) in the string assigned to the variable zen_of_python
# and assign the value to the variable num_char_chunks.
# BEGIN 01D SOLUTION
num_char_chunks = 0
# Note use of the built-in str() function to format num_char_chunks as a string.
print(''.join(['num_char_chunks=', str(num_char_chunks), '\n']))
# END 01D SOLUTION
# PROBLEM 01E (25 points)
# Use floor division to divide num_char_chunks by 19 (i.e., the number of lines
# in the Zen of Python). Return an integer rather than a floating point value.
# Assign the value to the variable avg_num_chunks_per_line.
# BEGIN 01E SOLUTION
avg_num_chunks_per_line = 0
print(''.join(['avg_num_chunks_per_line=', str(avg_num_chunks_per_line), '\n']))
# END 01E SOLUTION
# PROBLEM 01F (25 points)
# Substitute your U-M email address for all occurrences of the word "Dutch" using
# the appropriate built-in function in the zen_of_python string. Assign the modified
# Zen of Python string to a new variable named "zen_of_python_uniqname".
# BEGIN 01F SOLUTION
zen_of_python_uniqname = ''
print(''.join(['zen_of_python_uniqname=', zen_of_python_uniqname, '\n']))
# END 01F SOLUTION
# END PROBLEM SET
|
##함수
def isQueueFull():
global SIZE, front, rear
if (rear != SIZE - 1) and (front == -1):
return False
else:
for i in range(front+1,SIZE):
queue[i-1] = queue[i]
queue[i] = None
front -= 1
rear -= 1
return False
def enQueue(data):
global SIZE, front, rear
if isQueueFull():
print('큐가 가득 찼습니다.')
return
rear += 1
queue[rear] = data
def isQueueEmpty():
global SIZE, front, rear
if front == rear:
return True
else:
return False
def deQueue():
global SIZE, front, rear
if isQueueEmpty():
print('큐가비었습니다.')
return None
front += 1
data = queue[front]
queue[front] = None
return data
def peek():
global SIZE, front, rear
if isQueueEmpty():
print('큐가비었습니다.')
return None
front += 1
return queue[front]
##변수
SIZE = 5
queue = [None for _ in range(SIZE)]
front, rear = -1,-1
select = -1
##메인
if __name__ =="__main__":
while(select != 4):
select = int(input("선택하세요(1:삽입,2:추출,3:확인,4.종료)-->"))
if(select == 1):
data = input('입력할 데이터----->')
enQueue(data)
print('현 큐 상태',queue)
elif (select == 2):
data = deQueue()
print('추출 데이터 = ',data)
print('현 스택 상태', queue)
elif (select == 3):
data = peek()
print('확인된 데이터 =',data)
elif (select == 4):
print('현 스택 상태', queue)
exit
else:
print("1~4의 숫자만 이용가능합니다")
continue
|
class BasicCalculator:
def __init__(self):
return
def sum(self, num1, num2): return num1 + num2
def sub(self, num1, num2): return num1 - num2
def mul(self, num1, num2): return num1 * num2
def div(self, num1, num2): return num1 / num2
class AdvancedCalculator(BasicCalculator):
def __init__(self):
return
def pow(self, num, power):
foo = num
while power > 0:
print(power)
num *= foo
power -= 1
return num
def square(self, num): return pow(self, num, 2)
def cube(self, num): return pow(self, num, 3)
class ScientificCalculator(AdvancedCalculator):
def __init__(self):
return
def average(self, *nums):
sum = 0
count = 0
for num in nums:
count += 1
sum += num
return int(sum / count)
|
#!/usr/bin/env python3
# Print out all the unique pairwise amino acid combinations
# AC is the same as CA
# Skip AA, CC etc.
# Also print out how many combinations there are
amino = 'ACDEFGHIKLMNPQRSTVWY'
k = 1
i = -1
for i in range(i, len(amino)):
i += 1
for i2 in range(i+1, len(amino)):
print(amino[i], amino[i2])
"""
s = 'ACCGTACTGCATCAATCGATG'
k = 3
for i in range(len(s)-k+1):
print(s[i:i + 2]) #kmers
print('- end of loop 6')
"""
"""
t = 'ACCGTACTGCATCAATCGATG'
k = 3
for i in range(len(t)-k+1):
print(t[i:i + k]) #kmers
print('- end of loop 6')
s = 'ACGT' # a string
# Square brackets let you access sub-strings as 'slices'
# Follow closely below, because the endpoints can be confusing
# The first number before : is the starting index
# The second number : is one larger than the last index
print(s, s[0], s[1])
print(s[2], s[2:3], s[2:4], s[2:5])
print(s[-1:])
"""
"""
python3 25aapairs.py
A C
A D
A E
A F
A G
A H
A I
A K
A L
A M
A N
A P
A Q
A R
A S
A T
A V
A W
A Y
C D
C E
C F
C G
C H
C I
C K
C L
C M
C N
C P
C Q
C R
C S
C T
C V
C W
C Y
D E
D F
D G
D H
D I
D K
D L
D M
D N
D P
D Q
D R
D S
D T
D V
D W
D Y
E F
E G
E H
E I
E K
E L
E M
E N
E P
E Q
E R
E S
E T
E V
E W
E Y
F G
F H
F I
F K
F L
F M
F N
F P
F Q
F R
F S
F T
F V
F W
F Y
G H
G I
G K
G L
G M
G N
G P
G Q
G R
G S
G T
G V
G W
G Y
H I
H K
H L
H M
H N
H P
H Q
H R
H S
H T
H V
H W
H Y
I K
I L
I M
I N
I P
I Q
I R
I S
I T
I V
I W
I Y
K L
K M
K N
K P
K Q
K R
K S
K T
K V
K W
K Y
L M
L N
L P
L Q
L R
L S
L T
L V
L W
L Y
M N
M P
M Q
M R
M S
M T
M V
M W
M Y
N P
N Q
N R
N S
N T
N V
N W
N Y
P Q
P R
P S
P T
P V
P W
P Y
Q R
Q S
Q T
Q V
Q W
Q Y
R S
R T
R V
R W
R Y
S T
S V
S W
S Y
T V
T W
T Y
V W
V Y
W Y
190
"""
|
#!/usr/bin/env python3
def harmasosztas(szam):
if szam in range(0, 10):
return "00" + str(szam)
elif szam in range(10, 100):
return "0" + str(szam)
elif szam in range(100, 1000):
return szam
else:
return str(harmasosztas(szam // 1000)) + " " + str(harmasosztas(szam % 1000))
def main():
print(harmasosztas(1000222))
main()
|
keywords = ["asm","else","new","this","auto","enum","operator","throw","bool","explicit","private","true","break","export","protected","try","case","extern","public","typedef","catch","false","register","typeid","char","float","reinterpret_cast","typename","class","for","return","union","const","friend","short","unsigned","const_cast","goto","signed", "using","continue","if","sizeof","virtual","default","inline","static","void","delete", "int","static_cast","volatile","do","long","struct","wchar_t","double","mutable","switch","while",'dynamic_cast',"namespace","template"]
operator = ["+", "-", "*", "/", "%", "="]
punc_marks = ["<", ">", "#", "}", "{", "(", ")", ":", ";", ","]
key_count = [0]*len(keywords)
op_count = [0]*len(operator)
pun_count = [0]*len(punc_marks)
with open('1.cpp', 'r') as f:
sample = f.read()
words = []
lines = sample.split("\n")
for i in lines:
for j in range(0, len(i)):
if i[j] == "/" and i[j+1] == "/":
lines.remove(i)
for word in lines:
words.append(word.split(" "))
if word.split("(") not in words:
words.append(word.split("("))
else:
if word.split(")") not in words:
words.append(word.split(")"))
else:
if word.split(")") not in words:
words.append(word.split(")"))
else:
if word.split("#") not in words:
words.append(word.split(";"))
else:
if word.split("#") not in words:
words.append(word.split("#"))
else:
if word.split("<") not in words:
words.append(word.split("<"))
else:
if word.split(">") not in words:
words.append(word.split(">"))
else:
if word.split(":") not in words:
words.append(word.split(":"))
for line in sample:
for ch in line:
for i in range(0, len(operator)):
if ch == operator[i]:
op_count[i] += 1
for i in range(0, len(punc_marks)):
if ch == punc_marks[i]:
pun_count[i] += 1
q = []
for i in words:
for j in i:
q.append(j)
for i in range(0, len(q)):
if q[i] in keywords:
ind = keywords.index(q[i])
key_count[ind] += 1
if q[i] in operator:
ind = operator.index(q[i])
op_count[ind] += 1
if q[i] in punc_marks:
ind = punc_marks.index(q[i])
pun_count[ind] += 1
print("Keywords :")
for i in range(0, len(keywords)):
if key_count[i] > 0:
print(keywords[i], ": ", key_count[i])
print("\nOperators :")
for i in range(0, len(operator)):
if op_count[i] > 0:
print(operator[i], ": ", op_count[i])
print("\nPunctuation Marks :")
for i in range(0, len(punc_marks)):
if pun_count[i] > 0:
print(punc_marks[i], ": ", pun_count[i]) |
EventRandomDialogue = 1
EventRandomEffects = 2
EventEstateGravity = 3
EventGlobalGravity = 4
EventSirMaxBirthday = 5
RandomCheesyList = [
1, 1, 2, 3, 3, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 11, 11]
RandomCheesyMinTime = 3
RandomCheesyMaxTime = 60 |
area_code_filename = 'area_code.json'
hunters_file = 'hunters.xlsx'
# get方法请求头
get_headers = {
'Host': 'www.51jingying.com',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36',
'Cookie': ''
}
# post方法请求头
post_headers = {
'X-Requested-With': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Host': 'www.51jingying.com',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36',
# 因为这句没有注释,导致在requests post数据时**kwargs的cookies加不进去,显示用户没登陆
# 'Cookie': ''
}
# 搜索简历要提交的表单
post_data = {
'url': 'https://www.51jingying.com/spy/searchmanager.php?act=getResumeSrch',
'fulltext': '',
'exparea': '',
'onlyfunc': 1,
'srchpage': 1, # 查找第一页
'type': 'searchall',
'downandup': 0,
'mgrsort': 0,
'resumetime': 6
} |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
__project__ = 'leetcode'
__file__ = '__init__.py'
__author__ = 'king'
__time__ = '2020/1/2 14:55'
_ooOoo_
o8888888o
88" . "88
(| -_- |)
O\ = /O
____/`---'\____
.' \\| |// `.
/ \\||| : |||// \
/ _||||| -:- |||||- \
| | \\\ - /// | |
| \_| ''\---/'' | |
\ .-\__ `-` ___/-. /
___`. .' /--.--\ `. . __
."" '< `.___\_<|>_/___.' >'"".
| | : `- \`.;`\ _ /`;.`/ - ` : | |
\ \ `-. \_ __\ /__ _/ .-` / /
======`-.____`-.___\_____/___.-`____.-'======
`=---='
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
佛祖保佑 永无BUG
"""
"""
难度:中等
给定一个包含m x n个元素的矩阵(m 行, n 列),请按照顺时针螺旋顺序,返回矩阵中的所有元素。
示例1:
输入:
[
[ 1, 2, 3 ],
[ 4, 5, 6 ],
[ 7, 8, 9 ]
]
输出: [1,2,3,6,9,8,7,4,5]
示例2:
输入:
[
[1, 2, 3, 4],
[5, 6, 7, 8],
[9,10,11,12]
]
输出: [1,2,3,4,8,12,11,10,9,5,6,7]
"""
class Solution(object):
def spiralOrder(self, matrix):
"""
:type matrix: List[List[int]]
:rtype: List[int]
"""
res = []
while matrix:
res += matrix.pop(0)
matrix = list(map(list, zip(*matrix)))[::-1]
return res
print(
Solution().spiralOrder([
[1, 2, 3],
[4, 5, 6],
[7, 8, 9]
])
)
|
mylist = [[1,2,3],[4,5,6],[4,3,8]]
print("Multi-Dimensional List",mylist)
mylist[1].reverse()
mylist[0].reverse()
print("Multi-Dimensional After Sublist-List",mylist)
|
N = 10**3
count = 0
for i in range(1, N+1): # 1 ~ N まで
for j in range(i, N+1, i): # N以下の i の倍数
count += 1
print(count)
|
print('{:=^50}'.format('Aono informática'))
valor = float(input('Digite o valor das compras? R$'))
print('''FORMA DE PAGAMENTO
[ 1 ] á Vista dinheiro/cheque com 10% de desconto
[ 2 ] 2x no cartão
[ 3 ] 3x ou mais no cartão com 20% de juros''')
aperte = int(input('Digite a opção desejada'))
print('-=-'*20)
print('Valor a pagar:')
if aperte == 1:
total = valor - (valor * 10 / 100)
print('Total a pagar é {} R$'.format(total))
elif aperte == 2:
total = valor / 2
print('Você irá pagar em 2 vezes de {} R$'.format(total))
elif aperte == 3:
total = (valor + (valor * 20/100)) / 3
print('Você irá pagar {} R$ em 3 prestações '.format(total))
else:
print('Opção inválida, tente novamente!')
print('-=-'*20)
print('Muito obrigado pela preferência, tenha um bom dia!')
print('-=-'*20)
|
class Pessoa:
nome = 'Nome Padrão'
p1 = Pessoa()
p2 = Pessoa()
p1.nome = "Marcelo"
print(p1.nome)
print(p1.__dict__) # {'nome': 'Marcelo'}
# Quando eu tento mudar um atributo da classe através de uma instância, ela não altera o valor do atributo da classe, e sim cria um atributo diretamente na instância
print('\n========================\n')
print(p2.nome)
print(p2.__dict__) # {}
|
def print_metrics(counter,
metrics,
summary_writer=None,
prefix=None,
model_name=""):
if 'cls_report' in metrics.keys():
metrics = metrics.copy()
del metrics['cls_report']
# print(prefix + "\t" + "\t".join([f"{key}:{value:.5f}" for key, value in metrics.items()]))
if summary_writer is not None:
assert prefix is not None
for key, value in metrics.items():
summary_writer.add_scalar(f'{model_name}_{prefix}_{key}', value, counter)
|
class AudioFile:
def __init__(self, filename):
if not filename.endswith(self.ext):
raise Exception ('Formato Inválido!')
self.filename = filename
pass
class MP3File(AudioFile):
ext = 'mp3'
f = MP3File('flamingos.wav')
|
# Argument that are used to create simulation objects.
#
# They are resolved as follows:
#
# 1. Numeric values are inlined.
# 2. List and Tuples are iterated over, creating a
# separate simulation object for each value.
# 3. Strings are evaluated as previous keys of ARGS.
# If it fails to find a key, the value is inlined.
#
# Example
# ARGS = {
# 'x': 10, # Evaluates to "x 10"
# 'y': [10, 20, 30], # Create one simulation for
# # each value of y
# 'logfile': '~/mylog.txt' # As there is no previous entry with
# # the key `logfile`, the string
# # '~/mylog.txt' is inlined.
# # It evaluates to "logfile ~/mylog.txt"
# 'other_x': 'x', # Because `x` is a previous key,
# # 'other_x' will have the same value
# # of x.
# # It evaluates to "other_x 10"
# }
ARGS = dict()
# Sequencial arguments to be passed to the executable.
# To set keyword arguments, use `ARGS`
# Example:
# SEQ_ARGS = [ '-one', '-two' ]
SEQ_ARGS = list()
# Executable
EXECUTABLE = None
# Redirect standard input
STDIN = None
# Redirect standard output
STDOUT = None
# Redirect standard error
STDERR = None
# Number of parallel instaces used on the simulation.
# Internally, the implementation uses subprocesses to
# spawn instances. Avoid using more than the number of
# cpus available
PARALLEL_INSTANCES = 1
# If True, pysim will capture all SIGINT interruptions and
# send them to pysim.core.signals.keyboard_interrupt.
CAPTURE_SIGINT = False
|
def entrada():
p,n = map(int,input().split())
x = input().split()
for i in range(len(x)):
x[i] = int(x[i])
return p,n,x
def verifica(a,b,p):
if abs(a-b) > p:
return True
else:
return False
def main():
p,n,x = entrada()
a = False
for i in range(n-1):
a = verifica(x[i],x[i+1],p)
if a == True:
print('GAME OVER')
break
if a == False:
print('YOU WIN')
main()
|
'''
*** A program for the cryptanalysis of Caesar cipher and identify the plaintext ***
'''
#All albhabet Frequencies in Known and meaningful English words
al_freq = [0.08167, 0.01492, 0.02782, 0.04253, 0.12702, 0.02228, 0.02015, 0.06094, 0.06966,
0.00153, 0.00772, 0.04025, 0.02406, 0.06749, 0.07507, 0.01929, 0.00095, 0.05987,
0.06327, 0.09056, 0.02758, 0.00978, 0.02360, 0.00150, 0.01974, 0.00074]
chi_square = [0] * 25
#Decryption of ciphertext with relative decryption key
def decrypt(ciphertext, s):
pltext = ""
for i in range(len(ciphertext)):
char = ciphertext[i]
if (char.isupper()):
pltext += chr((ord(char) - s - 65) % 26 + 65)
else:
pltext += chr((ord(char) - s - 97) % 26 + 97)
return pltext
#Finding the key by applying Chi - Square method
def find_key(ciphertext, k):
l = len(ciphertext)
cipher_freq = [0] * 26
ci = [0] * 26
ei = [0] * 26
#ci and ei are Current value of letter and Expected value of letter.
for i in range(65, 91):
j = i-65
cipher_freq[j] = ciphertext.count(chr(i))
ci[j] = cipher_freq[j]
ei[j] = al_freq[j] * l
#Calculating Chi - Square value for every plain text with relative decryption key
div = 0
for m in range(0, l):
num = (ci[int(ord(ciphertext[m]) - 65) % 26] - ei[int(ord(ciphertext[m]) - 65) % 26]) ** 2
den = ei[int(ord(ciphertext[m]) - 65) % 26]
div = num / den
chi_square[k-1] += div
for n in range(0, 26):
if ci[n] == 0:
chi_square[k-1] += ei[n]
#cipher = "aoljhlzhyjpwolypzvulvmaollhysplzaruvduhukzptwslzajpwolyzpapzhafwlvmzbizapabapvujpwolypudopjolhjoslaalypuaolwshpualeapzzopmalkhjlyahpuubtilyvmwshjlzkvduaolhswohila"
#cipher = "YMJHFJXFWHNUMJWNXTSJTKYMJJFWQNJXYPSTBSFSIXNRUQJXYHNUMJWX"
print("\nEnter the cipher text : ", end="")
cipher = str(input())
#Calculating 25 Decrypted(Plain) text with key value 1 to 25
for k in range(1, 26):
ciphertext = decrypt(cipher, k)
#print(ciphertext + str(k))
ciphertext = ciphertext.upper()
find_key(ciphertext, k)
#Getting the index of minimum chi - square value which is our Decryption key.
index = min(range(25), key = chi_square.__getitem__)
index += 1
index = int(index)
print("\nFound Decryption Key : " + str(index))
print("\nThe Decrypted Text (Plain Text) : ", end="")
print(decrypt(cipher, index))
print("\n")
|
def main():
usernameStr = input("Enter username:")
passwordStr = input("Enter password:")
browser = webdriver.Chrome()
browser.get(('http://192.168.100.100:8090/'))
# fill in username and hit the next button
username = browser.find_element_by_id('username')
username.send_keys(usernameStr)
nextButton = browser.find_element_by_id('password')
nextButton.click()
# wait for transition then continue to fill items
password = WebDriverWait(browser, 10).until(EC.presence_of_element_located((By.NAME, "password")))
password.send_keys(passwordStr)
signInButton = browser.find_element_by_id('loginbutton')
signInButton.click()
|
class LinearSchedule(object):
r"""A linear scheduling from an initial to a final value over a certain timesteps, then the final
value is fixed constantly afterwards.
.. note::
This could be useful for following use cases:
* Decay of epsilon-greedy: initialized with :math:`1.0` and keep with :attr:`start` time steps, then linearly
decay to :attr:`final` over :attr:`N` time steps, and then fixed constantly as :attr:`final` afterwards.
* Beta parameter in prioritized experience replay.
Note that for learning rate decay, one should use PyTorch ``optim.lr_scheduler`` instead.
Example:
>>> scheduler = LinearSchedule(initial=1.0, final=0.1, N=3, start=0)
>>> [scheduler(i) for i in range(6)]
[1.0, 0.7, 0.4, 0.1, 0.1, 0.1]
Args:
initial (float): initial value
final (float): final value
N (int): number of scheduling timesteps
start (int, optional): the timestep to start the scheduling. Default: 0
"""
def __init__(self, initial, final, N, start=0):
assert N > 0, f'expected N as positive integer, got {N}'
assert start >= 0, f'expected start as non-negative integer, got {start}'
self.initial = initial
self.final = final
self.N = N
self.start = start
self.x = None
def __call__(self, x):
r"""Returns the current value of the scheduling.
Args:
x (int): the current timestep.
Returns:
float: current value of the scheduling.
"""
assert isinstance(x, int) and x >= 0, f'expected as a non-negative integer, got {x}'
if x == 0 or x < self.start:
self.x = self.initial
elif x >= self.start + self.N:
self.x = self.final
else: # scheduling over N steps
delta = self.final - self.initial
ratio = (x - self.start)/self.N
self.x = self.initial + ratio*delta
return self.x
def get_current(self):
return self.x
|
"""
There are several cards arranged in a row, and each card has an associated
number of points The points are given in the integer array cardPoints.
In one step, you can take one card from the beginning or from the end of
the row. You have to take exactly k cards.
Your score is the sum of the points of the cards you have taken.
Given the integer array cardPoints and the integer k, return the maximum
score you can obtain.
Example:
Input: cardPoints = [1,2,3,4,5,6,1], k = 3
Output: 12
Explanation: After the first step, your score will always be 1. However,
choosing the rightmost card first will maximize your total
score. The optimal strategy is to take the three cards on the
right, giving a final score of 1 + 6 + 5 = 12.
Constraints:
- 1 <= cardPoints.length <= 10^5
- 1 <= cardPoints[i] <= 10^4
- 1 <= k <= cardPoints.length
"""
#Difficulty: Medium
#40 / 40 test cases passed.
#Runtime: 464 ms
#Memory Usage: 26.8 MB
#Runtime: 464 ms, faster than 76.94% of Python3 online submissions for Maximum Points You Can Obtain from Cards.
#Memory Usage: 26.8 MB, less than 89.35% of Python3 online submissions for Maximum Points You Can Obtain from Cards.
class Solution:
def maxScore(self, cardPoints: List[int], k: int) -> int:
i = 0
l = len(cardPoints)
total = sum(cardPoints)
points = sum(cardPoints[:k])
window = sum(cardPoints[i:l-k+i])
while l - k + i < l:
points = max(points, total - window)
window = window - cardPoints[i] + cardPoints[l-k+i]
i += 1
return points
|
class DkuApplication(object):
def __init__(self, name, label, source, model_func, preprocessing, weights, input_shape=None):
self.name = name
self.label = label
self.source = source
self.model_func = model_func
self.preprocessing = preprocessing
self.input_shape = input_shape
self.weights = weights
self.model = None
def is_keras_application(self):
return self.source == "keras"
def get_weights_url(self, trained_on):
assert trained_on in self.weights, "You provided a wrong field 'trained_on'. Avilable are {}.".format(
str(self.weights.keys())
)
return self.weights.get(trained_on)
def jsonify(self):
return self.name.value
|
# Use for, .split(), and if to create a Statement that will print out words that start with 's':
print("Challenge 1 : ")
st = 'Print only the words that start with s in this sentence'
for word in st.split():
if word[0]=='s' or word[0]=='S':
print(word)
# Use range() to print all the even numbers from 0 to 10.
l1= list(range(0,11,2))
print(l1)
for num in range(0,11,2):
print(num)
# Use a List Comprehension to create a list of all numbers between 1 and 50 that are divisible by 3.
print("Challenge 3 : ")
list1 =[i for i in range(1,51) if i%3==0]
print(list1)
# Go through the string below and if the length of a word is even print "even!"
st1 = 'Print every word in this sentence that has an even number of letters'
print("Challenge 4 : ")
for i in st1.split():
if len(i) %2==0:
print(f"{i}: even")
# Write a program that prints the integers from 1 to 100. But for multiples of three print "Fizz" instead of the number, and for the multiples of five print "Buzz". For numbers which are multiples of both three and five print "FizzBuzz".
for n in range(1,101):
if n % 3==0 and n % 5== 0:
print("FizzBuzz")
elif n % 3 ==0:
print("Fizz")
elif n % 5 ==0:
print("Buzz")
else:
print(n)
# Use List Comprehension to create a list of the first letters of every word in the string below:
st2 = 'Create a list of the first letters of every word in this string'
list1 =[ i[0] for i in st2.split()]
print(list1) |
eUnit = {
0: "Academy, Disabled",
1: "Gator, disabled",
3: "Archer, D",
4: "Archer",
5: "Hand Cannoneer",
6: "Elite Skirmisher",
7: "Skirmisher",
8: "Longbowman",
9: "Arrow",
10: "Archery Range 3",
11: "Mangudai",
12: "Barracks",
13: "Fishing Ship",
14: "Archery Range 4",
15: "Junk",
16: "Bombard Cannon, D",
17: "Trade Cog",
18: "Blacksmith 3",
19: "Blacksmith 4",
20: "Barracks 4",
21: "War Galley",
22: "Berserk, D",
23: "Battering Ram, D",
24: "Crossbowman",
25: "Teutonic Knight",
26: "Crossbowman, D",
27: "Cataphract, D",
28: "Cho-Ko-Nu, D",
29: "Trading Cog, D",
30: "Monastery 2",
31: "Monastery 3",
32: "Monastery 4",
33: "Castle 4",
34: "Cavalry Archer, D",
35: "Battering Ram",
36: "Bombard Cannon",
37: "LANCE",
38: "Knight",
39: "Cavalry Archer",
40: "Cataphract",
41: "Huskarl",
42: "Trebuchet (Unpacked)",
43: "Deer, D",
44: "Mameluke, D",
45: "Dock",
46: "Janissary",
47: "Dock 3",
48: "Boar",
49: "Siege Workshop",
50: "Farm",
51: "Dock 4",
52: "Fish 1, Disabled",
53: "Fish (Perch)",
54: "Proj, VOL",
55: "Fishing Ship, D",
56: "Fisher, M",
57: "Fisher, F",
58: "Fisher, M D",
59: "Forage Bush",
60: "Fisher, F D",
61: "Galley, D (Canoe?)",
62: "Huskarl (E?), D",
63: "Gate, AA3",
64: "Gate, AA2",
65: "Deer",
66: "Gold Mine",
67: "Gate, AB3",
68: "Mill",
69: "Fish, Shore",
70: "House",
71: "Town Center 2",
72: "Wall, Palisade",
73: "Chu Ko Nu",
74: "Militia",
75: "Man At Arms",
76: "Heavy Swordsman",
77: "Long Swordsman",
78: "Gate, AB2",
79: "Watch Tower",
80: "Gate, AC3",
81: "Gate, AC2",
82: "Castle",
83: "Villager, M",
84: "Market",
85: "Gate, BA3",
86: "Stable 3",
87: "Archery Range",
88: "Gate, BA2",
89: "Dire Wolf",
90: "Gate, BB3",
91: "Gate, BB2",
92: "Gate, BC3",
93: "Spearman",
94: "Berserk 2",
95: "Gate, BC2",
96: "Hawk",
97: "Arrow 1",
98: "Hand Cannoneer, D",
99: "Heavy Swordsman, D",
100: "Elite Skirmisher, D",
101: "Stable",
102: "Stone Mine",
103: "Blacksmith",
104: "Monastery",
105: "Blacksmith 2",
106: "Infiltrator, D",
107: "Janissary, D",
108: "Junk, D",
109: "Town Center",
110: "Trade Workshop",
111: "Knight, D",
112: "Flare",
113: "Lance, D",
114: "Longboat, D",
115: "Longbowman, D",
116: "Market 3",
117: "Wall, Stone",
118: "Builder, M",
119: "Fisherman, disabled",
120: "Forager, M",
121: "Mangonel, D",
122: "Hunter, M",
123: "Lumberjack, M",
124: "Stone Miner, M",
125: "Monk",
126: "Wolf",
127: "Explorer, Old",
128: "Trade Cart, Empty",
129: "Mill 2",
130: "Mill 3",
131: "Mill 4",
132: "Barracks 3",
133: "Dock 2",
134: "Monk, D",
135: "Mangudai, D",
136: "War Elephant, D",
137: "Market 4",
138: "OUTLW, D",
139: "Paladin, D",
140: "Spearman, D",
141: "Town Center 3",
142: "Town Center 4",
143: "Rubble 1 X 1",
144: "Rubble 2 X 2",
145: "Rubble 3 X 3",
146: "Rubble 4 X 4",
147: "Rubble 6",
148: "Rubble 5 X 5",
149: "Scorpion, D",
150: "Siege Workshop 4",
151: "Samurai, D",
152: "Militia, D",
153: "Stable 4",
154: "Man-At-Arms, D",
155: "Wall, Fortified",
156: "Repairer, M",
157: "Throwing Axeman, D",
158: "OUTLW",
159: "Relic Cart",
160: "Richard The Lionharted",
161: "The Black Prince",
162: "FLAGX",
163: "Friar Tuck",
164: "Sherrif Of Notingham",
165: "Charlemagne",
166: "Roland",
167: "Belisarius",
168: "Theodoric The Goth",
169: "Aethelfirth",
170: "Siegfried",
171: "Erik The Red",
172: "Tamerlane",
173: "King Arthur",
174: "Lancelot",
175: "Gawain",
176: "Mordred",
177: "Archbishop",
178: "Trade Cart, D",
179: "Trade Workshop 4",
180: "Long Swordsman, D",
181: "Teutonic Knight, D",
182: "TMISA",
183: "TMISB",
184: "TMISC",
185: "TMISD",
186: "TMISE",
187: "TMISF",
188: "TMISG",
189: "TMISH",
190: "TMISI",
191: "TMISJ",
192: "TMISK",
193: "TMISL",
194: "Trebuchet, D",
195: "Kitabatake",
196: "Minamoto",
197: "Alexander Nevski",
198: "El Cid",
199: "Fish Trap",
200: "Robin Hood",
201: "FLR_R",
202: "Rabid Wolf",
203: "Rabid Wolf, D",
204: "Trade Cart, Full",
205: "Trade Cart, Full D",
206: "VMDL",
207: "VMDL, D",
208: "TWAL",
209: "University",
210: "University 4",
211: "Villager, F D",
212: "Builder, F",
213: "Builder, F D",
214: "Farmer, F",
215: "Farmer, F D",
216: "Hunter, F",
217: "Hunter, F D",
218: "Lumberjack, F",
219: "Lumberjack, F D",
220: "Stone Miner, F",
221: "Stone Miner, F D",
222: "Repairer, F",
223: "Repairer, F D",
224: "Villager, M D",
225: "Builder, M D",
226: "Farmer, M D",
227: "Hunter, M D",
228: "Lumberjack, M D",
229: "Stone Miner, M D",
230: "Repairer, M D",
232: "Woad Raider",
233: "Woad Raider, D",
234: "Guard Tower",
235: "Keep",
236: "Bombard Tower",
237: "Wolf, D",
238: "Skirmisher, D",
239: "War Elephant",
240: "TERRC",
241: "Cracks",
242: "Stone, Catapult",
243: "DOPL",
244: "Stone, Catapult F",
245: "Bolt (Scorpion Proj.)",
246: "Bolt, F",
247: "Smoke",
248: "Pile Of Stone",
249: "POREX",
250: "Longboat",
251: "Goldminer, Disabled",
252: "Pile Of Gold",
253: "Pile Of Wood",
254: "PILE1",
255: "PILE2",
256: "PILE3",
257: "PILE4",
258: "PILE6",
259: "Farmer, M",
260: "Fish3, Disabled",
261: "PILE8",
262: "Pile Of Food",
263: "Fish4, Disabled",
264: "Cliff 1",
265: "Cliff 2",
266: "Cliff 3",
267: "Cliff 4",
268: "Cliff 5",
269: "Cliff 6",
270: "Cliff 7",
271: "Cliff 8",
272: "Cliff 9",
273: "Cliff 10",
274: "Flare 2",
276: "Wonder",
278: "Fishtrap, D",
279: "Scorpion",
280: "Mangonel",
281: "Throwing Axeman",
282: "Mameluke",
283: "Cavalier",
284: "Tree TD",
285: "Relic",
286: "Monk With Relic",
287: "British Relic",
288: "Byzantine Relic",
289: "Chinese Relic",
290: "Frankish Relic",
291: "Samurai",
292: "Gothic Relic",
293: "Villager, F",
294: "Japanese Relic",
295: "Persian Relic",
296: "Saracen Relic",
297: "Teutonic Relic",
298: "Turkish Relic",
299: "Infiltrator",
300: "Monk With British Relic",
301: "Monk With Byzantine Relic",
302: "Monk With Chinese Relic",
303: "Monk With Frankish Relic",
304: "Monk With Gothic Relic",
305: "Monk With Japanese Relic",
306: "Monk With Persian Relic",
307: "Monk With Saracen Relic",
308: "Monk With Teutonic Relic",
309: "Monk With Turkish Relic",
310: "Mountain 1",
311: "Mountain 2",
312: "Arrow 2",
313: "Stone, Treb",
314: "Stone, Mangonel",
315: "Arrow 3",
316: "Arrow 4",
317: "Arrow 5",
318: "Arrow 6",
319: "Arrow 7",
320: "Arrow 8",
321: "Arrow 9",
322: "Arrow 10",
323: "Stone, Catapult 1",
324: "Stone, Catapult 2",
325: "Stone, Catapult 3",
326: "Stone, Catapult 4",
327: "Stone, Catapult 5",
328: "Arrow, F",
329: "Camel",
330: "Heavy Camel",
331: "Trebuchet, P",
332: "Flare 3",
333: "Deer, Nomeat",
334: "Flowers 1",
335: "Flowers 2",
336: "Flowers 3",
337: "Flowers 4",
338: "Path 4",
339: "Path 1",
340: "Path 2",
341: "Path 3",
342: "TERRU",
343: "TERRV",
344: "TERRW",
345: "Ruins",
346: "TERRY",
347: "TERRZ",
348: "Forest, Bamboo",
349: "Forest, Oak",
350: "Forest, Pine",
351: "Forest, Palm",
352: "OREMN",
353: "Forager, M D",
354: "Forager, F",
355: "Forager, F D",
356: "Board, D",
357: "Farm, D Crash",
358: "Pikeman",
359: "Halberdier",
360: "Arrow 2, F",
363: "Proj, Archer",
364: "Proj, Crossbowman",
365: "Proj, Skirmisher",
366: "Proj, Elite Skirmisher",
367: "Proj, Scorpion",
368: "Proj, Bombard Cannon",
369: "Proj, Mangonel",
370: "Fish 2, Disabled",
371: "Proj, Trebuchet",
372: "Proj, Galleon",
373: "Proj, War Galley",
374: "Proj, Cannon Galleon",
375: "Proj, Crossbowman F",
376: "Proj, Skirmisher F",
377: "Proj, Elite Skirmisher F",
378: "Proj, Scorpion F",
380: "Proj, H Cannoneer",
381: "Bolt, F",
385: "Bolt 1, F",
389: "Sea Rocks 1",
390: "TERRB",
391: "TERRD",
392: "TERRE",
393: "TERRF",
394: "TERRH",
395: "TERRI",
396: "Sea Rocks 2",
397: "TERRK",
398: "TERRL",
399: "Tree A",
400: "Tree B",
401: "Tree C",
402: "Tree D",
403: "Tree E",
404: "Tree F",
405: "Tree G",
406: "Tree H",
407: "Tree I",
408: "Tree J",
409: "Tree K",
410: "Tree L",
411: "Forest Tree",
413: "Snow Pine Tree",
414: "Jungle Tree",
415: "Stump",
416: "Debris",
417: "Dust C",
418: "TROCK",
419: "Debris B",
420: "Cannon Galleon",
421: "Cannon Galleon, D",
422: "Capped Ram",
423: "Capped Ram, D",
424: "Charles Martel",
425: "Charles Martel, D",
426: "Harald Hardraade",
427: "Harald Hardraade, D",
428: "Hrolf The Ganger",
429: "Hrolf The Ganger, D",
430: "Joan The Maid",
431: "Joan The Maid, D",
432: "William Wallace",
433: "William Wallace, D",
434: "King",
435: "King, D",
436: "OMTBO",
437: "OMTBO, D",
438: "STRBO",
439: "STRRBO, D",
440: "Petard",
441: "Hussar",
442: "Galleon",
443: "Galleon, Dead",
444: "PTWC",
445: "Church 4",
446: "Port",
447: "Purple Spots",
448: "Scout Cavalry",
449: "Scout Cavalry, D",
450: "Marlin 1",
451: "Marlin 2",
452: "DOLP3",
453: "DOLP4",
454: "DOLP5",
455: "Fish, Dorado",
456: "Fish, Salmon",
457: "Fish, Tuna",
458: "Fish, Snapper",
459: "FISH5",
460: "WHAL1",
461: "WHAL2",
462: "Proj, Mangonel F2",
463: "House 2",
464: "House 3",
465: "House 4",
466: "Proj, Archer F",
468: "Projectile, Mangonel F",
469: "Projectile, Treb F",
470: "Proj, Galleon F",
471: "Proj, War Galley F",
473: "Two Handed Swordsman",
474: "Heavy Cavalry Archer",
475: "Proj, HAR F",
476: "Proj, Harold Haraade F",
477: "Proj, HAR",
478: "Proj, Harold Haraade",
479: "PMANG",
480: "Hussar, D",
481: "Town Center 3A",
482: "Town Center 3B",
483: "Town Center 3C",
484: "Town Center 3X",
485: "Arrow, Town Center",
487: "Gate, AX2",
488: "Gate, AX3",
490: "Gate, BX2",
491: "Gate, BX3",
492: "Arbalest",
493: "Adv Heavy Crossbowman",
494: "Camel, D",
495: "Heavy Camel, D",
496: "Arbalest, D",
497: "AH Crossbowman, D",
498: "Barracks 2",
499: "Torch",
500: "Two-Handed Swordsman, D",
501: "Pikeman, D",
502: "Halberdier, D",
503: "Proj, Watch Tower",
504: "Proj, Guard Tower",
505: "Proj, Keep",
506: "Proj, Bombard Tower",
507: "Proj, Arbalest",
508: "Proj, AH Crossbowman",
509: "Proj, Villager",
510: "Proj, Cho Ko Nu",
511: "Proj, Longbowman",
512: "Proj, Longboat",
513: "Proj, MSU",
514: "Proj, MPC",
515: "Proj, Axeman",
516: "Proj, Watch Tower F",
517: "Proj, Guard Tower F",
518: "Proj, Keep F",
519: "Proj, Arbalest F",
520: "Proj, AH Crossbowman F",
521: "Proj, Villager F",
522: "Proj, Cho Ko Nu F",
523: "Proj, Longbowman F",
524: "Proj, Longboat F",
525: "Proj, MPC F",
526: "Proj, MSU F",
527: "Demolition Ship",
528: "Heavy Demolition Ship",
529: "Fire Ship",
530: "Elite Longbowman",
531: "Elite Throwing Axeman",
532: "Fast Fire Ship",
533: "Elite Longboat",
534: "Elite Woad Raider",
535: "BDGAL",
536: "ABGAL",
537: "Proj, FRG",
538: "Proj, HFG",
539: "Galley",
540: "Proj, Galley",
541: "Proj, Galley F",
542: "Heavy Scorpion",
543: "Heavy Scorpion, D",
544: "FLDOG",
545: "Transport Ship",
546: "Light Cavalry",
547: "Light Cavalry, D",
548: "Siege Ram",
549: "Siege Ram, D",
550: "Onager",
551: "Proj, Onager",
552: "Proj, Onager F",
553: "Elite Cataphract",
554: "Elite Teutonic Knight",
555: "Elite Huskarl",
556: "Elite Mameluke",
557: "Elite Janissary",
558: "Elite War Elephant",
559: "Elite Chu Ko Nu",
560: "Elite Samurai",
561: "Elite Mangudai",
562: "Lumber Camp",
563: "Lumber Camp 2",
564: "Lumber Camp 3",
565: "Lumber Camp 4",
566: "WCTWR",
567: "Champion",
568: "Champion, Dead",
569: "Paladin",
570: "Paladin, D",
571: "RFARC",
572: "RFARC, D",
573: "RFSWD",
574: "RFSWD, D",
575: "RCSWD",
576: "RCSWD, D",
577: "RCARC",
578: "RCARC, D",
579: "Gold Miner, M",
580: "Gold Miner, M D",
581: "Gold Miner, F",
582: "Gold Miner, F D",
583: "Genitour",
584: "Mining Camp",
585: "Mining Camp 2",
586: "Mining Camp 3",
587: "Mining Camp 4",
588: "Siege Onager",
589: "Siege Onager, D",
590: "Shepherd, F",
591: "Shepherd, F D",
592: "Shepherd, M",
593: "Shepherd, M D",
594: "Sheep",
595: "Sheep, D",
596: "Elite Genitour",
597: "Town Center 4X",
598: "Outpost",
599: "Cathedral",
600: "Flag A",
601: "Flag B",
602: "Flag C",
603: "Flag D",
604: "Flag E",
605: "Bridge A Top",
606: "Bridge A Middle",
607: "Bridge A Bottom",
608: "Bridge B Top",
609: "Bridge B Middle",
610: "Bridge B Bottom",
611: "Town Center 4A",
612: "Town Center 4B",
613: "Town Center 4C",
614: "Town Center 2A",
615: "Town Center 2B",
616: "Town Center 2C",
617: "Town Center 2X",
618: "Town Center 1A",
619: "Town Center 1B",
620: "Town Center 1C",
621: "Town Center 1X",
622: "D Iron Boar",
623: "Rock",
624: "Pavilion 1",
625: "Pavilion 3",
626: "Pavilion 2",
627: "Proj, Heavy Scorpion",
628: "Proj, Heavy Scorpion F",
629: "Joan Of Arc",
630: "Joan Of Arc, D",
631: "Subotai, D",
632: "Frankish Paladin",
633: "Frankish Paladin, D",
634: "Sieur De Metz",
635: "Sieur De Metz, D",
636: "Sieur Bertrand",
637: "Sieur Bertrand, D",
638: "Duke D'Alencon",
639: "Duke D'Alencon, D",
640: "La Hire",
641: "La Hire, D",
642: "Lord De Graville",
643: "Lord De Graville, D",
644: "Jean De Lorrain",
645: "Jean De Lorrain, D",
646: "Constable Richemont",
647: "Constable Richemont, D",
648: "Guy Josselyne",
649: "Guy Josselyne, D",
650: "Jean Bureau",
651: "Jean Bureau, D",
652: "Sir John Fastolf",
653: "Sir John Fastolf, D",
654: "S_SMOKE",
655: "Mosque",
656: "Proj, MNB",
657: "Proj, GP1",
658: "Proj, MNB F",
659: "Gate, CA2",
660: "Gate, CA3",
661: "Gate, CB2",
662: "Gate, CB3",
663: "Gate, CC2",
664: "Gate, CC3",
665: "Gate, CX2",
666: "Gate, CX3",
667: "Gate, DA2",
668: "Gate, DA3",
669: "Gate, DB2",
670: "Gate, DB3",
671: "Gate, DC2",
672: "Gate, DC3",
673: "Gate, DX2",
674: "Gate, DX3",
675: "Onager, D",
676: "Proj, FFG F",
677: "S_Fire",
678: "Reynald De Chatillon",
679: "Reynald De Chatillon, D",
680: "Master Of The Templar",
681: "Master Of The Templar, D",
682: "Bad Neighbor",
683: "Gods Own Sling",
684: "The Accursed Tower",
685: "The Tower Of Flies",
686: "Archer Of The Eyes",
687: "Archer Of The Eyes, D",
688: "Piece Of The True Cross",
689: "Pyramid",
690: "Dome Of The Rock",
691: "Elite Cannon Galleon",
692: "Berserk",
693: "Berserk, D",
694: "Elite Berserk",
695: "Elite Berserk, D",
696: "Great Pyramid",
697: "Flare 4",
698: "Subotai",
699: "Subotai, D",
700: "Hunting Wolf",
701: "Hunting Wolf, D",
702: "Kushluk",
703: "Kushluk, D",
704: "Shah",
705: "Shah, D",
706: "Saboteur",
707: "Ornlu The Wolf",
708: "Ornlu The Wolf, D",
709: "Cactus",
710: "Skeleton",
711: "Rugs",
712: "Yurt",
713: "Yurt 2",
714: "Yurt 3",
715: "Yurt 4",
716: "Yurt 5",
717: "Yurt 6",
718: "Yurt 7",
719: "Yurt 8",
720: "Nine Bands",
721: "Shipwreck",
722: "Shipwreck 2",
723: "Crater",
724: "Genitour, Dead",
725: "Jaguar Warrior",
726: "Elite Jaguar Warrior",
728: "Ice Spots",
729: "Gods Own Sling, Packed",
730: "Bad Neighbor, Packed",
731: "Genghis Khan",
732: "Genghis Khan, D",
733: "Emperor In A Barrel",
734: "Emperor In A Barrel, D",
735: "Trebuchet, Packed D",
736: "Proj, Mameluke",
737: "Stump B",
738: "Bridge A Middle Broken",
739: "Bridge A Middle Broken 2",
740: "Bridge A Middle Broken 3",
741: "Bridge B Middle Broken",
742: "Bridge B Middle Broken 2",
743: "Bridge B Middle Broken 3",
744: "Mountain 3",
745: "Mountain 4",
746: "Proj, Castle",
747: "Proj, Castle Flaming",
748: "Cobra Car",
749: "Cobra, D",
750: "Jaguar Warrior, D",
751: "Eagle Warrior",
752: "Elite Eagle Warrior",
754: "Eagle Warrior, D",
755: "Tarkan",
756: "Tarkan, D",
757: "Elite Tarkan",
759: "Huskarl",
760: "Huskarl, Dead",
761: "Elite Huskarl",
762: "Elite Huskarl, Dead",
763: "Plumed Archer",
764: "Plumed Archer, D",
765: "Elite Plumed Archer",
766: "Elite Plumed Archer, D",
767: "Proj, Elite Cannon Galleon",
771: "Conquistador",
772: "Conquistador, D",
773: "Elite Conquistador",
774: "Elite Conquistador, D",
775: "Missionary",
776: "Missionary, D",
777: "Attila The Hun",
778: "Atilla The Hun, D",
779: "Bleda The Hun",
780: "Bleda The Hun, D",
781: "Pope Leo I",
782: "Pope Leo I, D",
783: "Scythian Wild Woman",
784: "Scythian Wild Woman, D",
785: "Sea Tower",
786: "Proj, Sea Tower",
787: "Proj, Sea Tower Flaming",
788: "Sea Wall",
789: "Sea Gate, AA",
790: "Sea Gate, AB",
791: "Sea Gate, AC",
792: "Sea Gate, AX",
793: "Sea Gate, BA",
794: "Sea Gate, BB",
795: "Sea Gate, BC",
796: "Sea Gate, BX",
797: "Sea Gate, CA",
798: "Sea Gate, CB",
799: "Sea Gate, CC",
800: "Sea Gate, CX",
801: "Sea Gate, DA",
802: "Sea Gate, DB",
803: "Sea Gate, DC",
804: "Sea Gate, DX",
805: "S Dock",
806: "S Dock 2",
807: "S Dock 3",
808: "S Dock 4",
809: "Stump 2",
810: "Iron Boar",
811: "Iron Boar, D",
812: "Jaguar",
813: "Jaguar, D",
814: "Horse",
815: "Horse, D",
816: "Macaw",
817: "Statue",
818: "Plants",
819: "Sign",
820: "Grave",
821: "Head",
822: "Javelina",
823: "Javelina, D",
824: "El Cid Campeador",
825: "El Cid Campeador, D",
826: "Monument (KotH)",
827: "War Wagon",
828: "War Wagon, D",
829: "Elite War Wagon",
830: "Elite War Wagon, D",
831: "Turtle Ship",
832: "Elite Turtle Ship",
833: "Turkey",
834: "Turkey, Dead",
835: "Wild Horse",
836: "Wild Horse, D",
837: "Map Revealer",
838: "King Sancho",
839: "King Sancho, D",
840: "King Alfonso",
841: "King Alfonso, D",
842: "Imam",
843: "Imam, D",
844: "Admiral Yi Sun Shin",
845: "Nobunaga",
846: "Nobunaga, D",
847: "Henry V",
848: "Henry V, D",
849: "William The Conqueror",
850: "William The Conqueror, D",
851: "Flag, ES",
852: "Scythian Scout",
853: "Scythian Scout, D",
854: "Torch 2",
855: "Old Stone Head",
856: "Roman Ruins",
857: "Hay Stack",
858: "Broken Cart",
859: "Flower Bed",
860: "Furious The Monkey Boy",
861: "Furious The Monkey Boy, D",
862: "Stormy Dog",
863: "Rubble 1 X 1",
864: "Rubble 2 X 2",
865: "Rubble 3 X 3"
} |
#Return the middle character of the word. If the word's length is odd, return the middle character. If the word's length is even, return the middle 2 characters.
def get_middle(s):
if len(s) % 2 == 0:
return s[int(len(s) / 2) - 1] + s[int((len(s) / 2))]
else:
return s[int(len(s) / 2)]
#Alternate Solution
def get_middle(s):
index, odd = divmod(len(s), 2)
return s[index] if odd else s[index - 1:index + 1] |
# codeforces round 600 div 2 - A
# solved in time
# https://codeforces.com/contest/1253/problem/A?locale=en
t = int(input())
for i in range(t):
n = int(input())
a_list = input().split(' ')
b_list = input().split(' ')
prev_diff = 0
current_diff = 0
diff_found = 0
result = ''
if n == 1:
if b_list[0] >= a_list[0]:
result = 'YES'
else:
result = 'NO'
else:
for j in range(n):
current_diff = int(b_list[j]) - int(a_list[j])
if current_diff < 0:
result = 'NO'
else:
if diff_found == 0:
if current_diff != 0:
prev_diff = current_diff
diff_found = 1
else:
if current_diff == 0:
prev_diff = current_diff
else:
if prev_diff == 0:
result = 'NO'
if prev_diff != 0 and prev_diff != current_diff:
result = 'NO'
if result == '':
result = 'YES'
print(result)
#1
#3
#1 2 3
#2 2 4
|
"""
From Medium post "Google Interview Questions Deconstructed: The Knight’s Dialer" by Alex Golec:
https://hackernoon.com/google-interview-questions-deconstructed-the-knights-dialer-f780d516f029
Imagine you place a knight chess piece on a phone dial pad.
This chess piece moves in an uppercase “L” shape: two steps
horizontally followed by one vertically, or one step horizontally
then two vertically. For example, from the 1, the knight can move
to the 6 or the 8:
1* 2 3
4 5 6*
7 8* 9
0
Suppose you dial keys on the keypad using only hops a knight can make.
Every time the knight lands on a key, we dial that key and make another hop.
The starting position counts as being dialed.
How many distinct numbers can you dial in N hops from a particular starting position?
"""
legal_moves = {1:[6,8], 2:[7,9], 3:[4,8],
4:[3,9,0], 5:[], 6:[1,7,0],
7:[2,6], 8:[1,3], 9:[2,4],
0:[4,6]}
def knight_dialer_count_recursive(start, n):
"""
Recursively counts the number of distinct numbers that can be dialed in n hops
of a knight from a particular starting position.
Complexity:
Runtime is O(3^n) (and Omega(2^n) if start!=5) because for each move you
recursively check 2 or 3 subsequent moves, and you do this n-1 times:
3*3*3*...*3. This counts the total number of recursive calls, which gives
the time complexity.
The space complexity should be O(n) because the maximum depth of recursion is n.
"""
if n <0: return 0
if n==0: return 1
if n==1: return len(legal_moves[start])
return sum(knight_dialer_count_recursive(move, n-1) for move in legal_moves[start])
def knight_dialer_count_memoized(start, n, memo={}):
"""
Recursively counts the number of distinct numbers that can be dialed in n hops
of a knight from a particular starting position.
Complexity:
Now if start!=5, there are 9*n tuples:
(1,n),(2,n),...,(0,n),(1,n-1),(2,n-1),...,(0,n-1),...,(1,1),(2,1),...,(0,1)
This is the maximum possible size of the memo, so the space complexity is still O(n),
though on the order of 10 times as large as the non-memoized version.
Once the memo is filled in, it takes constant time to look up the 2 or 3 values
needed to compute the sum for the final answer. So the time complexity is the time
needed to fill in the memo, which is O(n) since that's its maximum size.
Trace calls for (start,n) = (1,4):
(1,4) -> [(6,3),(8,3)] -> [6:(1,2),(7,2),(0,2);8:(1,2),(3,2)]
-> [1:(6,1)=3,(8,1)=2;7:(2,1)=2,(9,1)=2;0:(4,1)=3,@(6,1)=3;@(1,2)=5;3:@(4,1)=3,(8,1)=2]
"""
if (start,n) in memo:
pass #skip to the end and return memo[(start,n)]
elif n<0: return 0
elif n==0: return 1
elif n==1:
memo[(start,n)] = len(legal_moves[start])
else:
memo[(start,n)] = sum(knight_dialer_count_memoized(move, n-1) for move in legal_moves[start])
return memo[(start,n)]
|
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = ''
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'HOST': '',
'NAME': '',
'USER': '',
'PASSWORD': '',
}
}
|
def add_native_methods(clazz):
def __java_init______(a0):
raise NotImplementedError()
def selectAlternatives__com_sun_xml_internal_ws_policy_EffectivePolicyModifier__com_sun_xml_internal_ws_policy_AssertionValidationProcessor__(a0, a1):
raise NotImplementedError()
clazz.__java_init______ = __java_init______
clazz.selectAlternatives__com_sun_xml_internal_ws_policy_EffectivePolicyModifier__com_sun_xml_internal_ws_policy_AssertionValidationProcessor__ = staticmethod(selectAlternatives__com_sun_xml_internal_ws_policy_EffectivePolicyModifier__com_sun_xml_internal_ws_policy_AssertionValidationProcessor__)
|
# -*- coding: utf-8 -*-
print('%2d-%02d' % (3, 1))
print('%.2f' % 3.1415926)
print('Hello, {0}, 成绩提升了 {1:.1f}%'.format('小明', 17.125))
s1 = 72
s2 = 85
r = '{0} 成绩提升了 {1:.1f} %'
s3 = (s2-s1)%s1
print(r.format('小明',s3))
print('%s 成绩提升了 %.1f %%'%('小明', s3))
ch = '中文'
chencode = ch.encode('utf-8')
print(chencode)
chdecode = chencode.decode('utf-8')
print(chdecode) |
############### Configuration file for Bayesian ###############
n_epochs = 10000
lr_start = 0.0001
num_workers = 16
valid_size = 0.1
batch_size = 1024
train_ens = 1
valid_ens = 1
record_mean_var = True
recording_freq_per_epoch = 8
record_layers = ['fc5']
# Cross-module global variables
mean_var_dir = None
record_now = False
curr_epoch_no = None
curr_batch_no = None
|
'''
Solve the problem of skyscrapers game
github: https://github.com/VictoriyaRoy/skyscrapers
'''
def read_input(path: str) -> list:
"""
Read game board file from path.
Return list of str.
"""
with open(path, mode = 'r', encoding = 'utf-8') as file:
board = file.read().split('\n')
return board
def is_visible(input_line: str, index: int) -> bool:
'''
Check if skyscraper on index position is visible
>>> is_visible('*13245*', 2)
True
>>> is_visible('*13245*', 3)
False
>>> is_visible('132345*', 3)
False
'''
check_number = input_line[index]
for element in input_line[1:index]:
if element >= check_number:
return False
return True
def left_to_right_check(input_line: str, pivot: int) -> bool:
"""
Check row-wise visibility from left to right.
Return True if number of building from the left-most hint is visible looking to the right,
False otherwise.
input_line - representing board row.
pivot - number on the left-most hint of the input_line.
>>> left_to_right_check("412453*", 4)
True
>>> left_to_right_check("452453*", 5)
False
>>> left_to_right_check("132345*", 3)
True
"""
count = 0
for index, _ in enumerate(input_line[1:-1]):
if is_visible(input_line, index+1):
count += 1
return count == pivot
def check_not_finished_board(board: list) -> bool:
"""
Check if skyscraper board is not finished, i.e., '?' present on the game board.
Return True if finished, False otherwise.
>>> check_not_finished_board(['***21**', '4?????*', '4?????*', '*?????5', \
'*?????*', '*?????*', '*2*1***'])
False
>>> check_not_finished_board(['***21**', '412453*', '423145*', '*543215', \
'*35214*', '*41532*', '*2*1***'])
True
>>> check_not_finished_board(['***21**', '412453*', '423145*', '*5?3215', \
'*35214*', '*41532*', '*2*1***'])
False
"""
for line in board:
if '?' in line:
return False
return True
def check_uniqueness_in_rows(board: list) -> bool:
"""
Check buildings of unique height in each row.
Return True if buildings in a row have unique length, False otherwise.
>>> check_uniqueness_in_rows(['***21**', '412453*', '423145*', '*543215', \
'*35214*', '*41532*', '*2*1***'])
True
>>> check_uniqueness_in_rows(['***21**', '452453*', '423145*', '*543215', \
'*35214*', '*41532*', '*2*1***'])
False
>>> check_uniqueness_in_rows(['***21**', '412453*', '423145*', '*553215', \
'*35214*', '*41532*', '*2*1***'])
False
"""
for line in board[1:-1]:
if len(set(line[1:-1])) != len(line[1:-1]):
return False
return True
def check_horizontal_visibility(board: list) -> bool:
"""
Check row-wise visibility (left-right and vice versa)
Return True if all horizontal hints are satisfiable,
i.e., for line 412453* , hint is 4, and 1245 are the four buildings
that could be observed from the hint looking to the right.
>>> check_horizontal_visibility(['***21**', '412453*', '423145*', '*543215', \
'*35214*', '*41532*', '*2*1***'])
True
>>> check_horizontal_visibility(['***21**', '452453*', '423145*', '*543215', \
'*35214*', '*41532*', '*2*1***'])
False
>>> check_horizontal_visibility(['***21**', '452413*', '423145*', '*543215', \
'*35214*', '*41532*', '*2*1***'])
False
"""
for line in board[1:-1]:
check = line[0]
if check != '*':
if not left_to_right_check(line, int(check)):
return False
check = line[-1]
if check != '*':
line = line[::-1]
if not left_to_right_check(line, int(check)):
return False
return True
def check_columns(board: list) -> bool:
"""
Check column-wise compliance of the board for uniqueness (buildings of unique height)
and visibility (top-bottom and vice versa).
Same as for horizontal cases, but aggregated in one function for vertical case, i.e. columns.
>>> check_columns(['***21**', '412453*', '423145*', '*543215', '*35214*', '*41532*', '*2*1***'])
True
>>> check_columns(['***21**', '412453*', '423145*', '*543215', '*35214*', '*41232*', '*2*1***'])
False
>>> check_columns(['***21**', '412553*', '423145*', '*543215', '*35214*', '*41532*', '*2*1***'])
False
"""
length = len(board)
new_board = ['' for i in range(length)]
for line in board:
for index, element in enumerate(line):
new_board[index] += element
return check_horizontal_visibility(new_board) and check_uniqueness_in_rows(new_board)
def check_skyscrapers(input_path: str) -> bool:
"""
Main function to check the status of skyscraper game board.
Return True if the board status is compliant with the rules,
False otherwise.
"""
board = read_input(input_path)
if check_not_finished_board(board) and check_uniqueness_in_rows(board):
if check_horizontal_visibility(board) and check_columns(board):
return True
return False
if __name__ == "__main__":
print(check_skyscrapers('check.txt'))
|
# Copyright (c) 2012-2021 Esri R&D Center Zurich
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# https://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# A copy of the license is available in the repository's LICENSE file.
def visualize_prt_results(models):
"""visualize_prt_results(models)
This helper function is used to output the geometry and report information of a
list of GeneratedModel instances.
Parameters:
models: List[GeneratedModel]
"""
print('\nNumber of generated geometries (= nber of initial shapes):')
print(len(models))
for m in models:
if m:
geometry_vertices = m.get_vertices()
rep = m.get_report()
print()
print('Initial Shape Index: ' + str(m.get_initial_shape_index()))
if len(geometry_vertices) > 0:
print()
print('Size of the model vertices vector: ' +
str(len(geometry_vertices)))
print('Number of model vertices: ' +
str(int(len(geometry_vertices)/3)))
print('Size of the model faces vector: ' +
str(len(m.get_faces())))
if len(rep) > 0:
print()
print('Report of the generated model:')
print(rep)
else:
print('\nError while instanciating the model generator.')
def vertices_vector_to_matrix(vertices):
"""vertices_vector_to_matrix(vertices) -> List[List[float]]
PyPRT outputs the GeneratedModel vertex coordinates as a list. The list
contains the x, y, z coordinates of all the vertices. This function converts the
vertex list into a list of N vertex coordinates lists (with N, the number
of geometry vertices).
Parameters:
vertices: List[float]
Returns:
List[List[float]]
Example:
``[[-10.0, 0.0, 10.0], [-10.0, 0.0, 0.0], [10.0, 0.0, 0.0], [10.0, 0.0, 10.0]]
= vertices_vector_to_matrix([-10.0, 0.0, 10.0, -10.0, 0.0, 0.0, 10.0, 0.0, 0.0,
10.0, 0.0, 10.0])``
"""
vertices_as_matrix = []
for count in range(0, int(len(vertices)/3)):
vector_per_pt = [vertices[count*3],
vertices[count*3+1], vertices[count*3+2]]
vertices_as_matrix.append(vector_per_pt)
return vertices_as_matrix
def faces_indices_vectors_to_matrix(indices, faces):
"""faces_indices_vectors_to_matrix(indices, faces) -> List[List[int]]
PyPRT outputs the GeneratedModel face information as a list of vertex indices
and a list of face indices count. This function converts these two lists into
one list of lists containing the vertex indices per face.
Parameters:
indices: List[int]
faces: List[int]
Returns:
List[List[int]]
Example:
``[[1, 0, 3, 2], [4, 5, 6, 7], [0, 1, 5, 4], [1, 2, 6, 5], [2, 3, 7, 6], [3, 0, 4, 7]]
= faces_indices_vectors_to_matrix(([1, 0, 3, 2, 4, 5, 6, 7, 0, 1, 5, 4, 1, 2, 6, 5,
2, 3, 7, 6, 3, 0, 4, 7],[4, 4, 4, 4, 4, 4]))``
"""
faces_as_matrix = []
offset = 0
for f in faces:
ind_per_face = indices[offset:offset+f]
offset += f
faces_as_matrix.append(ind_per_face)
return faces_as_matrix
|
class ClassPropertyDescriptor(object):
"""Based on https://stackoverflow.com/questions/5189699/how-to-make-a-class-property"""
def __init__(self, fget):
self.fget = fget
def __get__(self, obj, klass):
if klass is None:
klass = type(obj)
return self.fget.__get__(obj, klass)()
def classproperty(func):
return ClassPropertyDescriptor(classmethod(func))
|
idade = 20
if idade >=18:
print("maior de idade")
elif idade < 18:
print("menor de idade")
else:
print("valor inválido ") |
# Exceptions indicating the reaction completer has an error
#
__author__ = 'Haoyan Huo'
__maintainer__ = 'Haoyan Huo'
__email__ = 'haoyan.huo@lbl.gov'
class FormulaException(Exception):
"""
A chemical formula cannot be parsed.
"""
pass
class CannotBalance(Exception):
"""
A general exception suggesting that reaction completer
is unable to create a valid reaction
"""
pass
class TooFewPrecursors(CannotBalance):
"""
Too few precursors cannot be balanced.
"""
pass
class TooManyPrecursors(CannotBalance):
"""
Too many precursors cannot be balanced. For example:
Ba + O + BaO + TiO2 == ? == BaTiO3
"""
pass
class StupidRecipe(CannotBalance):
"""
Exception shows that the recipe is not meaningful for parsing.
List of possible reasons:
1. Target equals precursors: BaTiO3 == BaTiO3
2. Target only has less than three elements: 2H + O == H2O
"""
pass
class ExpressionPrintException(CannotBalance):
"""
A math formula cannot be printed.
"""
pass
|
'''
Given an array arr of integers, check if there exists two integers N and M such that N is the double of M ( i.e. N = 2 * M).
More formally check if there exists two indices i and j such that :
i != j
0 <= i, j < arr.length
arr[i] == 2 * arr[j]
Example 1:
Input: arr = [10,2,5,3]
Output: true
Explanation: N = 10 is the double of M = 5,that is, 10 = 2 * 5.
Example 2:
Input: arr = [7,1,14,11]
Output: true
Explanation: N = 14 is the double of M = 7,that is, 14 = 2 * 7.
Example 3:
Input: arr = [3,1,7,11]
Output: false
Explanation: In this case does not exist N and M, such that N = 2 * M.
'''
class Solution:
def checkIfExist(self, arr: List[int]) -> bool:
# step 1: deal with exceptions
if arr==None:
return False
if len(arr)<=1:
return False
# step 2: traverse the list
tb = set()
for num in arr:
if (2*num in tb)|(num/2 in tb):
return True
if num not in tb:
tb.add(num)
return False
|
# Author: btjanaka (Bryon Tjanaka)
# Problem: (HackerRank) summing-the-n-series
# Title: Summing the N series
# Link: https://www.hackerrank.com/challenges/summing-the-n-series/problem
# Idea: The terms cancel out such that the sum is just n^2.
# Difficulty: easy
# Tags: math
t = int(input())
MOD = 10**9 + 7
for i in range(t):
n = int(input()) % MOD
print((n * n) % MOD)
|
def classify(entries):
for entry in entries:
entry['category_id'] = 1
entry['event_id'] = 1
return entries
|
# __***__ are especial functions and attributes
def dynamic_function():
print('dynamic_function')
class MyClass:
"""A simple example class""" # default: __doc__=...
i: int # it is a instance attribute
att = 'static' # it is a static attribute
def __init__(self, name): # constructor: __init__
print('Init MyClass:', name)
self.i = len(name)
self.name = name # 'other' is a instance attribute
self.dynamic = dynamic_function
def public(self): # public
return 'hello world (' + str(self.i) + ')'
def __private(self): # private
pass
print('0- type of class: ', type(MyClass))
print('1 ---------------- ')
my_class = MyClass('me')
my_class.dynamic()
my_class.new_attribute = "New!!!"
my_class.new_function = dynamic_function
print(my_class.new_attribute)
# Inheritance
class ChildClass(MyClass):
def __init__(self):
super().__init__('child')
print('2 ---------------- ')
ChildClass()
class Two:
two: str
def __init__(self):
print('Init Two')
# Multiple inheritance
class Multiple(MyClass, Two):
value: str
print('3 ---------------- ')
multiple = Multiple('multiple')
multiple.i = 3
multiple.name = "name"
multiple.two = "2"
multiple.value = "value"
class WithDecorator:
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
@classmethod
def from_string(cls, name_str):
first_name, last_name = map(str, name_str.split(' '))
return cls(first_name, last_name)
@staticmethod
def is_full_name(name_str):
names = name_str.split(' ')
return len(names) > 1
# __doc__ __init__ __name__ __module__ __call__ ...
print('4 ---------------- ')
print('__doc__ ', MyClass.__doc__)
print('__name__ ', MyClass.__name__)
print('__module__ ', MyClass.__module__)
class Callable:
def __call__(self, value): # class default function
print('__call__ >> value: ', value)
my_callable = Callable()
my_callable("!!!") # class is callable my_callable.__call__(23)
# my_class() ERROR!!!
|
# Given list of values. We play a game against the opponent and we always grab
# one value from either left or right side of our list of values. Our opponent
# does the same move. What is the maximum value we can grab if our opponent
# plays optimally, just like us.
def optimal_values(V):
T = [[0]*(len(V)+1) for _ in range(len(V)+1)]
for i in range(1, len(V)+1):
T[0][i] = i
for i in range(1, len(V)+1):
T[i][0] = i
for i in range(1, len(T)):
T[i][i] = (V[i-1], 0)
for value in range(2, len(V)+1):
for i in range(1, len(T)-value+1):
j = i+value-1
for k in range(i, j):
first = max(T[i][j-1][1] + V[j-1], T[i+1][j][1]+V[i-1])
second = min(T[i][j-1][0], T[i+1][j][0])
T[i][j] = (first, second)
return T[1][-1][0]
V = [3, 8, 4, 5, 1, 7, 6]
print(optimal_values(V))
|
"""
x + y sum of x and y
x - y difference of x and y
x * y product of x and y
x / y quotient of x and y
x // y floored quotient of x and y
x % y remainder of x / y
-x x negated
+x x unchanged
abs(x) absolute value or magnitude of x
int(x) x converted to integer
float(x) x converted to floating point
complex(re, im) a complex number with real part re, imaginary part im. im defaults to zero.
c.conjugate() conjugate of the complex number c
divmod(x, y) the pair (x // y, x % y)
pow(x, y) x to the power y
x ** y x to the power y
math.trunc(x) x truncated to Integral
round(x[, n]) x rounded to n digits, rounding half to even. If n is omitted, it defaults to 0.
math.floor(x) the greatest Integral <= x
math.ceil(x) the least Integral >= x
~x the bits of x inverted
float.is_integer()
Return True if the float instance is finite with integral value, and False otherwise
"""
"""
x in s True if an item of s is equal to x, else False
x not in s False if an item of s is equal to x, else True
s + t the concatenation of s and t
s * n or n * s equivalent to adding s to itself n times
s[i] ith item of s, origin 0
s[i:j] slice of s from i to j
s[i:j:k] slice of s from i to j with step k
len(s) length of s
min(s) smallest item of s
max(s) largest item of s
s.index(x[, i[, j]]) index of the first occurrence of x in s (at or after index i and before index j)
s.count(x) total number of occurrences of x in s
"""
"""
Python String Methods
capitalize() - Returns the string with first letter capitalized and the rest lowercased.
casefold() - Returns a lowercase string, generally used for caseless matching. This is more aggressive than the lower() method.
center() - Center the string within the specified width with optional fill character.
count() - Count the non-overlapping occurrence of supplied substring in the string.
encode() - Return the encoded version of the string as a bytes object.
endswith() - Returns ture if the string ends with the supplied substring.
expandtabs() - Return a string where all the tab characters are replaced by the supplied number of spaces.
find() - Return the index of the first occurrence of supplied substring in the string. Return -1 if not found.
format() - Format the given string.
format_map() - Format the given string.
index() - Return the index of the first occurrence of supplied substring in the string. Raise ValueError if not found.
isalnum() - Return true if the string is non-empty and all characters are alphanumeric.
isalpha() - Return true if the string is non-empty and all characters are alphabetic.
isdecimal() - Return true if the string is non-empty and all characters are decimal characters.
isdigit() - Return true if the string is non-empty and all characters are digits.
isidentifier() - Return true if the string is a valid identifier.
islower() - Return true if the string has all lowercased characters and at least one is cased character.
isnumeric() - Return true if the string is non-empty and all characters are numeric.
isprintable() - Return true if the string is empty or all characters are printable.
isspace() - Return true if the string is non-empty and all characters are whitespaces.
istitle() - Return true if the string is non-empty and titlecased.
isupper() - Return true if the string has all uppercased characters and at least one is cased character.
join() - Concatenate strings in the provided iterable with separator between them being the string providing this method.
ljust() - Left justify the string in the provided width with optional fill characters.
lower() - Return a copy of all lowercased string.
lstrip() - Return a string with provided leading characters removed.
maketrans() - Return a translation table.
partition() - Partition the string at first occurrence of substring (separator) and return a 3-tuple with part before separator, the separator and part after separator.
replace() - Replace all old substrings with new substrings.
rfind() - Return the index of the last occurrence of supplied substring in the string. Return -1 if not found.
rindex() - Return the index of the last occurrence of supplied substring in the string. Raise ValueError if not found.
rjust() - Right justify the string in the provided width with optional fill characters.
rpartition() - Partition the string at last occurrence of substring (separator) and return a 3-tuple with part before separator, the separator and part after separator.
rsplit() - Return a list of words delimited by the provided subtring. If maximum number of split is specified, it is done from the right.
rstrip() - Return a string with provided trailing characters removed.
split() - Return a list of words delimited by the provided subtring. If maximum number of split is specified, it is done from the left.
splitlines() - Return a list of lines in the string.
startswith() - Return true if the string starts with the provided substring.
strip() - Return a string with provided leading and trailing characters removed.
swapcase() - Return a string with lowercase characters converted to uppercase and vice versa.
title() - Return a title (first character of each word capitalized, others lowercased) cased string.
translate() - Return a copy of string that has been mapped according to the provided map.
upper() - Return a copy of all uppercased string.
zfill() - Return a numeric string left filled with zeros in the provided width.
sort alphabetically the words form a string provided by the user
breakdown the string into a list of words
words = my_str.split()
sort the list
words.sort()
"""
"""
enumerate() -> tuplas
>>> seasons = ['Spring', 'Summer', 'Fall', 'Winter']
>>> list(enumerate(seasons))
[(0, 'Spring'), (1, 'Summer'), (2, 'Fall'), (3, 'Winter')]
>>> list(enumerate(seasons, start=1))
[(1, 'Spring'), (2, 'Summer'), (3, 'Fall'), (4, 'Winter')]
"""
|
a = int(input('Digite o primeiro número:'))
b = int(input('Digite o segundo número:'))
c = int(input('Digite o terceiro número:'))
menor = a
if b < a and b < c:
menor = b
if c < a and c < b:
menor = c
maior = a
if b > a and b > c:
maior = b
if c > a and c > b:
maior = c
print('O maior valor foi {} e o menor valor foi {}'.format(maior, menor))
|
# -*- coding: utf-8 -*-
# ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: percent
# format_version: '1.3'
# jupytext_version: 1.13.6
# kernelspec:
# display_name: Python 3 (ipykernel)
# language: python
# name: python3
# ---
# %% [markdown]
# ---
#
# # <center> <font color=darkgreen>Iterables</font> </center>
#
# ---
#
# <img src="../figs/data_types.png" width="500">
#
# ---
# %% [markdown]
# Iterable types in Python contain sets of elements. There are
# + **string** - ordered sequence of characters
# + **set** - sets of unique objects
# + **tuple** - immutable ordered sets of objects
# + **list** - mutable ordered sets of objects
# + **dictionary** - mutable sets of key/value pairs.
#
# We will study each of these, but first we will look at built-in functions that apply to iterables in general.
# %% [markdown]
# ## Checking membership: `in` and `not in`
#
# `a in A` evaluates to `True` if `a` is an element of the iterable `A`, and `False` otherwise.
#
# You can also use `a not in A` to get the opposite behavior.
#
# In the following example, `A` is a list.
# %%
A = [ 1/2, 0.4, 'hi', [0,1] ]
print(A)
# %% [markdown]
# ## Unpacking
#
# Unpacking is a shorthand syntax for assigning each of the elements of an iterable to different scalar variables.
# %%
# %% [markdown]
# **Question**: What do you think happens if the number of variable names does not match the number of elements in the iterable?
# %% [markdown]
# ---
# # <center> <font color=darkgreen> Indexing iterables</font> </center>
# ---
#
# + Strings, tuples, and lists are **ordered iterables**.
#
# + Indexing is **0-based** and uses square brackets `[]`.
#
# + Sets and dictionaries are not ordered. Therefore they cannot be indexed.
# %% [markdown]
# **Example** : Index a string
# %%
astring = 'Food is an important part of a balanced diet - Fran Lebowitz'
# %% [markdown]
# **Example** : Index a list
# %%
alist = [4.45, 'hello', 4+5j]
# %% [markdown]
# **Example** : Index a tuple
# %%
atuple = (4.45, 'hello', 4+5j)
# %% [markdown]
# **Lists are mutable, tuples and strings are not**
# %%
# %% [markdown]
# ## Slice indexing
# + You can get a 'slice' of an iterable using the colon symbol `:`
# + This returns a new iterable that is a portion of the original object.
# + `astring[a:b]` is the substring starting at index `a` and ending at `b-1`.
# %%
# %% [markdown]
# + Leaving out the start index means 'from the beginning'
# + Leaving out the end index means 'to the end'
# %%
# %% [markdown]
# ## Negative indices
# Negative indices count backward from the end.
# %%
# %% [markdown]
# ## Skipping values
#
# You can specify a "skip" value after a second colon: `A[start:end:skip]`
# %%
A = [0,1,2,3,4,5,6,7,8,9]
# What will this return?
A[0:-2:3]
# %% [markdown]
# ---
# # <center> <font color=darkgreen>[Strings methods](https://docs.python.org/3/library/stdtypes.html#string-methods)</font> </center>
# ---
#
# Things we can do with strings,
# + create them,
# + change cases in various ways,
# + search for a substring,
# + split strings,
# + etc.
# %%
strA = 'ABC'
strB = '123'
# %% [markdown]
# ## String `+`
# %%
print(strA + strB)
# %% [markdown]
# ## Formatting strings - [`.format()`](https://docs.python.org/3/library/stdtypes.html#str.format)
#
# The `format()` method is especially useful for building strings that involve numerical or other variables. The function is powerful, and you can find advanced examples [here](https://docs.python.org/3/library/string.html\#format-examples). However the simple form works most of the time. Here is an example.
#
# ### Example: "Johann Sebastian Bach was born in 1685, and died in 1750."
# %%
name = 'Johann Sebastian Bach'
birth_year = 1685
death_year = 1750
# using the 'format' method
str1 = '{0} was born in {1}, and died in {2}.'.format(name, birth_year, death_year)
print(str1)
# "f-string" method
str2 = f'{name} was born in {birth_year}, and died in {death_year}.'
print(str2)
# %% [markdown]
# ### `.split()`
# %%
a = '1,phone,None,-4.5,'
print(a)
b = a.split(',')
print(b)
# %% [markdown]
# ---
# ## <center><font color=dark> >> 5-minute challenge << </font></center>
# ---
# Use `split` to extract the name of the author in `astring`.
# %%
print(astring)
# %% [markdown]
# ---
# # <center> <font color=darkgreen>Lists: `[]` </font> </center>
# ---
# A **list** is a sequence of objects that is:
# + **ordered**: They can be indexed with `[]`.
# + **inhomogeneous**: They can contain a variety of object types.
# + **mutable**: You can modify them by adding and/or deleting items after they are created.
# %%
# %% [markdown]
# ## [List methods](https://docs.python.org/3/tutorial/datastructures.html#more-on-lists)
#
# The methods attached to list objects provide functionality for adding, removing, and changing the order of the elements in a list.
# %% [markdown]
# ## Building lists
#
# The `append`, `extend`, and `insert` methods can be used to populate a list. Later we will learn about "list comprehensions" which give us a compact syntax for building large lists (as well as sets and dictionaries).
# %% [markdown]
# ### `.append()` Puts a value at the end of the list.
# %% [markdown]
# ### `.extend()` Appends each value of an iterable to the list.
# %% [markdown]
# ### `.insert` Inserts an element at a given location in the list.
# %% [markdown]
# ## Removing items from lists
#
# ### `.remove()` Remove the first instance of a given object.
# %% [markdown]
# ### `.pop()` Extract the item at a given index and return it.
# %% [markdown]
# ### `del`
# Remove an item at a given index.
# %%
a=[4,1,9]
del a[1]
print(a)
# %% [markdown]
# ### `.clear()` Remove all items from the list.
# %%
a=[4,1,2,1,9]
a.clear()
print(a)
# %% [markdown]
# ---
# ## <center><font color=dark> >> 5-minute challenge << </font></center>
# ---
# 1. Create this list: [4,1,9]
# 2. Use list object methods to put it in reverse order: [9,4,1]
#
# **HINT**: `help(a.sort)` and`help(a.reverse)`
# %% [markdown]
# ---
# # <center> <font color=darkgreen>[Tuples](https://docs.python.org/3/tutorial/datastructures.html#tuples-and-sequences): `()`</font> </center>
# ---
#
# A **tuple** is a sequence of objects that is:
# + **ordered**
# + **inhomogeneous**
# + **mutable**
#
# Tuples offer only 2 methods: `count()` and `index()`.
#
# ### Why use a *tuple* instead of a *list*?
#
# + return values from functions
# + keys in dictionaries
# %%
a = (1,'asdf',4.3)
print(a)
a.index(4.3)
# %% [markdown]
#
# ---
# # <center> <font color=darkgreen>[Sets](https://docs.python.org/3/tutorial/datastructures.html#sets): `{}`</font> </center>
# ---
#
# A **set** is a sequence of objects that is:
# + **inhomogeneous**
# + **not ordered**
# + **mutable**
# + **contains no duplicates**
#
# %%
a = {4,6,3,3}
# %% [markdown]
# ---
# ## <center><font color=dark> >> 5-minute challenge << </font></center>
# ---
#
# Use a `set` to count the number of unique words in the following paragraph.
#
# X = "Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python’s elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. The Python interpreter and the extensive standard library are freely available in source or binary form for all major platforms from the Python Web site, https://www.python.org/, and may be freely distributed. The same site also contains distributions of and pointers to many free third party Python modules, programs and tools, and additional documentation."
# %%
X = "Python is an easy to learn, powerful programming language. It has efficient high-level data structures and a simple but effective approach to object-oriented programming. Python’s elegant syntax and dynamic typing, together with its interpreted nature, make it an ideal language for scripting and rapid application development in many areas on most platforms. The Python interpreter and the extensive standard library are freely available in source or binary form for all major platforms from the Python Web site, https://www.python.org/, and may be freely distributed. The same site also contains distributions of and pointers to many free third party Python modules, programs and tools, and additional documentation."
words = X.split(' ')
unique_words = set(words)
print(len(words))
# %% [markdown]
# ---
# # <center> <font color=darkgreen>[Dictionaries](https://docs.python.org/3/tutorial/datastructures.html#dictionaries): `{:}`</font> </center>
# ---
# A **dictionary** is a mapping from a set of *keys* to a set of *values*.
# + The keys in a dictionary must be **immutable** (scalars, strings, tuples).
# + The values in a dictionary can be **anything**.
# + Dictionaries are created with **curly brackets** and **colons**: { a1:b1 , a2:b2 }
# %%
JSB = { 'name' : 'Johann Sebastian Bach' ,
'birth_year' : 1685 ,
'death_year' : 1750 }
print(JSB)
# %% [markdown]
# ### Querying the dictionary: square brackets
# %%
JSB['name']
# %% [markdown]
# ### Get the set of keys
# %%
type(set(JSB.keys()))
# %% [markdown]
# ### Change a value
# %%
JSB['death_year'] = 2030
print(JSB)
# %% [markdown]
# ### Add a new key-value pair
# %%
|
"""
如果二叉树每个节点都具有相同的值,那么该二叉树就是单值二叉树。
只有给定的树是单值二叉树时,才返回 true;否则返回 false。
示例 1:
输入:[1,1,1,1,1,null,1]
输出:true
示例 2:
输入:[2,2,2,5,2]
输出:false
提示:
给定树的节点数范围是 [1, 100]。
每个节点的值都是整数,范围为 [0, 99] 。
"""
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def isUnivalTree(self, root):
"""
:type root: TreeNode
:rtype: bool
"""
if not root:
return True
v = root.val
res = set()
def unival(r):
if not r:
return
res.add(r.val)
unival(r.left)
unival(r.right)
unival(root)
return True if len(res) == 1 else False |
stars = ""
for i in range(0, 5, 1):
for j in range(0, i, 1):
stars += "*"
print(stars) |
# -*- coding: utf-8 -*-
class TestClient(object):
"""
fake_client will call the wrapped api function,
going through all middlewares
"""
def __init__(self, app):
self.app = app
def __getattr__(self, item):
api = self.app.api_map.get(item)
if api is None:
raise ValueError(
'app didn\'t registered this api: {}'.format(item))
return self.app.api_map.get(item)
class FakeClient(TestClient):
"""
fake_client will call the original api function directly,
bypass all middlewares
"""
def __init__(self, app):
self.app = app
def __getattr__(self, item):
api = self.app.api_map.get(item)
if api is None:
raise ValueError(
'app didn\'t registered this api: {}'.format(item))
return self.app.api_map.get(item).__wrapped__ |
'''
Configuration for the style, size, and elements of the GUI
will be configured here
'''
class window:
scale_height = 0.5
scale_width = 0.5
|
linestyles = {'wind_speed': '-', 'wind_gust': '--', 'pressure': '-'}
fig, axes = plt.subplots(1, len(plot_variables), sharex=True, figsize=(18, 6))
for ax, var_names in zip(axes, plot_variables):
for var_name in var_names:
# Grab the color from our dictionary and pass it to plot()
color = colors[var_name]
linestyle = linestyles[var_name]
ax.plot(df.time, df[var_name], color, linestyle=linestyle)
ax.set_ylabel(var_name)
ax.set_title('Buoy {}'.format(var_name))
ax.grid(True)
ax.set_xlabel('Time')
ax.xaxis.set_major_formatter(DateFormatter('%m/%d'))
ax.xaxis.set_major_locator(DayLocator()) |
n =str( input('Digite um valor: '))
# mostra se o valor é numerico, verdadeiro ou falso
print('é numero? ')
print(n.isnumeric())
# mostra se o valor é letra, verdadeiro ou falso
print('é letra? ')
print(n.isalpha())
# mostra se o valor é alfanumerico, verdadeiro ou falso
print('é alfanumérico? ')
print(n.isalnum())
# mostra se tem letra maiuscula, verdadeiro ou falso
print('tem letra maíuscula? ')
print(n.isupper())
|
#! /usr/bin/env python3
REQUEST = 'Request'
PRE_PREPARE = 'PrePrepare'
PREPARE = 'Prepare'
COMMIT = 'Commit'
RESULT = 'Result'
VIEW_CHANGE = 'ViewChange'
NEW_VIEW = 'NewView'
# Decouple actual message from preprepare message is highly recommended for optimization (etc. choice of protocol for small and big sized messages)
# Message is only in Preprepare for educational reasons!
class PBFTPreprepareMessage:
'''
Pre-prepare message
'''
def __init__(self, phase, viewNum, seqNum, digest, signature, message):
self.phase = phase
self.viewNum = viewNum
self.seqNum = seqNum
self.digest = digest
self.signature = signature
# Better use a separate message for the actual request
self.message = message
def __str__(self):
return f'''
(
"self.phase" = {self.phase}
"self.viewNum" = {self.viewNum}
"self.seqNum" = {self.seqNum}
"self.digest" = {self.digest}
"self.signature" = {self.signature}
"self.message" = {self.message}
)
'''
class PBFTMessage:
'''
Message object for Prepare and Commit
'''
def __init__(self, phase, viewNum, seqNum, digest, signature, fromNode):
self.phase = phase
self.viewNum = viewNum
self.seqNum = seqNum
self.digest = digest
self.signature = signature
self.fromNode = fromNode
def __str__(self):
return f'''
(
"self.phase" = {self.phase}
"self.viewNum" = {self.viewNum}
"self.seqNum" = {self.seqNum}
"self.digest" = {self.digest}
"self.signature" = {self.signature}
"self.fromNode" = {self.fromNode}
)
'''
class PBFTResultMessage:
'''
Message object for result
'''
def __init__(self, viewNum, timestamp, toClientHost, toClientPort, fromNode, result, signature):
self.phase = RESULT
self.viewNum = viewNum
self.timestamp = timestamp
self.toClientHost = toClientHost
self.toClientPort = toClientPort
self.fromNode = fromNode
self.result = result
self.signature = signature
def __str__(self):
return f'''
(
"self.phase" = {self.phase}
"self.viewNum" = {self.viewNum}
"self.timestamp" = {self.timestamp}
"self.fromNode" = {self.fromNode}
"self.result" = {self.result}
"self.signature" = {self.signature}
)
''' |
{
"targets": [{
"target_name": "picosat",
"include_dirs": [
"<!(node -e \"require('napi-macros')\")"
],
"sources": [ "index.c", "lib/picosat.c" ]
}]
}
|
################################# FINAL SETTING!
# model settings
voxel_size = [0.05, 0.05, 0.1]
point_cloud_range = [0, -40, -3, 70.4, 40, 1]
# MOCO Model
model = dict(
# type='Inter_Intro_moco',
type='Inter_Intro_moco_better',
img_backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=-1,
# norm_cfg=dict(type='BN'), # for debug
norm_cfg=dict(type='SyncBN', eps=1e-3, momentum=0.01),
norm_eval=False,
style='pytorch'),
# With MOCO
pts_backbone=dict(
type='PointNet2SAMSG',
in_channels=4,
num_points=(4096, 1024, (512, 512)),
radii=((0.2, 0.4, 0.8), (0.4, 0.8, 1.6), (1.6, 3.2, 4.8)),
num_samples=((32, 32, 64), (32, 32, 64), (16, 16, 16)),
sa_channels=(((32, 32, 64), (32, 32, 64), (64, 64, 128)),
((64, 64, 128), (64, 64, 128), (128, 128, 256)),
((128, 128, 256), (128, 128, 256), (256, 256, 512))),
aggregation_channels=(128, 256, 1024),
fps_mods=(('D-FPS'), ('FS'), ('F-FPS', 'D-FPS')),
fps_sample_range_lists=((-1), (-1), (512, -1)),
norm_cfg=dict(type='BN2d', eps=1e-3, momentum=0.1),
sa_cfg=dict(
type='PointSAModuleMSG',
pool_mod='max',
# use_xyz=True,
use_xyz=False,
normalize_xyz=False)),
# model training and testing settings
train_cfg=dict(
cl_strategy = dict(
pts_intro_hidden_dim=1024,
pts_intro_out_dim=128,
img_inter_hidden_dim=2048,
img_inter_out_dim=128,
pts_inter_hidden_dim=1024,
pts_inter_out_dim=128,
pts_feat_dim=1024,
img_feat_dim=2048,
K=8192*4,
m=0.999,
T=0.07,
points_center=[35.2, 0, -1],
cross_factor=1,
moco=False,
simsiam=False,
############################################
img_moco=False,
point_intro=True, # intro-loss
point_branch=True # if pts backbone
)))
# dataset settings
dataset_type = 'KittiDataset'
data_root = 'data/kitti/'
class_names = ['Pedestrian', 'Cyclist', 'Car']
img_norm_cfg = dict(
mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False)
input_modality = dict(use_lidar=True, use_camera=True)
# db_sampler = dict(
# data_root=data_root,
# info_path=data_root + 'kitti_dbinfos_train.pkl',
# rate=1.0,
# prepare=dict(filter_by_difficulty=[-1], filter_by_min_points=dict(Car=5)),
# classes=class_names,
# sample_groups=dict(Car=15))
file_client_args = dict(backend='disk')
# Uncomment the following if use ceph or other file clients.
# See https://mmcv.readthedocs.io/en/latest/api.html#mmcv.fileio.FileClient
# for more details.
# file_client_args = dict(
# backend='petrel', path_mapping=dict(data='s3://kitti_data/'))
train_pipeline = [
dict(type='LoadPointsFromFile', coord_type='LIDAR', load_dim=4, use_dim=4),
dict(type='LoadImageFromFile'),
dict(type='PointsRangeFilter', point_cloud_range=point_cloud_range), # filter range
dict(type='IndoorPointSample', num_points=16384), # sample here only for pretrain!
dict(type='LoadAnnotations3D', with_bbox_3d=True, with_label_3d=True),
##############################
dict(
type='Resize',
# img_scale=[(640, 192), (2560, 768)],
img_scale=[(640, 192), (2400, 720)],
multiscale_mode='range',
keep_ratio=True),
##############################
dict(
type='GlobalRotScaleTrans',
# rot_range=[-0.78539816, 0.78539816],
# scale_ratio_range=[0.95, 1.05],
rot_range=[-1.5707963, 1.5707963],
scale_ratio_range=[0.75, 1.25],
translation_std=[0, 0, 0],
points_center=[35.2, 0, -1]),
dict(type='RandomFlip3D', flip_ratio_bev_horizontal=0.5),
# dict(type='ObjectRangeFilter', point_cloud_range=point_cloud_range),
dict(type='PointShuffle'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle3D', class_names=class_names),
dict(
type='Collect3D',
keys=['points', 'img', 'gt_bboxes_3d', 'gt_labels_3d', 'points_ori']),
]
test_pipeline = [] # No need to test
# for dataset
pretraining=True
cross=True # for cross pretrain
data = dict(
samples_per_gpu=4,
workers_per_gpu=4,
# samples_per_gpu=3,
# workers_per_gpu=3,
train=dict(
type='RepeatDataset',
times=1,
dataset=dict(
type=dataset_type,
data_root=data_root,
ann_file=data_root + 'kitti_infos_train.pkl',
split='training',
pts_prefix='velodyne_reduced',
pipeline=train_pipeline,
modality=input_modality,
classes=class_names,
test_mode=False,
pretraining=True,
cross=True,
# we use box_type_3d='LiDAR' in kitti and nuscenes dataset
# and box_type_3d='Depth' in sunrgbd and scannet dataset.
box_type_3d='LiDAR')),
# actually there is no val
val=dict(
type=dataset_type,
data_root=data_root,
ann_file=data_root + 'kitti_infos_val.pkl',
split='training',
pts_prefix='velodyne_reduced',
pipeline=test_pipeline,
modality=input_modality,
classes=class_names,
test_mode=True,
pretraining=True,
box_type_3d='LiDAR'))
# Not be used in pretrain
evaluation = dict(start=9999, interval=1) # No use
# optimizer
optimizer = dict(
constructor='HybridOptimizerConstructor',
pts=dict(
type='AdamW',
# lr=0.002,
lr=0.001,
betas=(0.95, 0.99),
weight_decay=0.01,
step_interval=1),
img=dict(
type='SGD',
# lr=0.03,
lr=0.03,
momentum=0.9,
weight_decay=0.0001,
step_interval=1),
mlp=dict(
type='SGD',
# lr=0.03,
lr=0.03,
momentum=0.9,
weight_decay=0.0001,
step_interval=1))
# optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
optimizer_config = dict(grad_clip=None)
# lr_config = dict(policy='CosineAnnealing', min_lr=0, warmup='linear', warmup_iters=10, warmup_ratio=0.001, warmup_by_epoch=True)
lr_config = dict(policy='Exp', gamma=0.99)
# runtime settings
checkpoint_config = dict(interval=5)
# yapf:disable
log_config = dict(
interval=30,
hooks=[
dict(type='TextLoggerHook'),
dict(type='TensorboardLoggerHook')
])
# yapf:enable
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = None
load_from = None
resume_from = None
workflow = [('train', 1)]
total_epochs = 100
runner = dict(type='EpochBasedRunner', max_epochs=total_epochs)
find_unused_parameters=True # I cannot find it |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# 最小公倍数の計算
def gcd(n1:int, n2:int):
if n2 == 0:
return n1
else:
return gcd(n2, n1%n2)
if __name__ == "__main__":
print(gcd(120, 180)) |
class ScoreCard:
def __init__(self, score_text: str):
score_texts = score_text.split('|')
self.normal_turns = [score_texts[i] for i in range(10)]
if len(score_texts) == 12:
self.additional_turns = [score_texts[11]]
self.all_turns = self.normal_turns + self.additional_turns
def to_score(self):
sum = 0
for i in range(len(self.normal_turns)):
sum = sum + self.get_score_by_turn(i)
return sum
def get_score_by_turn(self, turn: int)->int:
score = self.text_to_score(self.normal_turns[turn])
if self.__is_strike(self.normal_turns[turn]) or self.__is_spare(self.normal_turns[turn]):
return score + self.__get_bonus_score(turn)
else:
return score
def __get_bonus_score(self, turn:int)->int:
if turn + 1 == len(self.normal_turns):
return self.text_to_score(self.additional_turns[0])
next_2_balls = str(self.all_turns[turn + 1] + self.all_turns[turn + 2])[0:2]
return self.text_to_score(next_2_balls)
def text_to_score(self, score_text:str)->int:
if score_text.find('/') == 1:
return 10
score = 0
for i in range(len(score_text)):
score = score + self.__char_to_score(score_text[i])
return score
def __char_to_score(self, score_text:str)->int:
if self.__is_strike(score_text):
return 10
elif score_text == '-':
return 0
else:
return int(score_text)
def __is_strike(self, score_text: str)->bool:
return True if score_text.upper() == 'X' else False
def __is_spare(self, score_text: str)->bool:
return True if score_text.find('/') == 1 else False
|
class RefreshIPOperation(object):
def __init__(self, vm_service, resource_id_parser):
"""
:param vm_service: cloudshell.cp.azure.domain.services.virtual_machine_service.VirtualMachineService
:param resource_id_parser: cloudshell.cp.azure.common.parsers.azure_model_parser.AzureModelsParser
:return:
"""
self.vm_service = vm_service
self.resource_id_parser = resource_id_parser
def refresh_ip(self, cloudshell_session, compute_client, network_client, resource_group_name, vm_name,
private_ip_on_resource, public_ip_on_resource_attr_tuple, resource_fullname, logger):
"""Refresh Public and Private IP on CloudShell resource from corresponding deployed Azure instance
:param cloudshell_session: cloudshell.api.cloudshell_api.CloudShellAPISession instance
:param compute_client: azure.mgmt.compute.ComputeManagementClient instance
:param network_client: azure.mgmt.network.NetworkManagementClient instance
:param resource_group_name: The name of the resource group
:param vm_name: The name of the virtual machine
:param private_ip_on_resource: private IP on the CloudShell resource
:param public_ip_on_resource_attr_tuple: (key,val) public IP on the CloudShell resource (we preserve public ip namespace key)
:param resource_fullname: full resource name on the CloudShell
:param logger: logging.Logger instance
:return
"""
# check if VM exists and in the correct state
logger.info("Check that VM {} exists under resource group {} and is active".format(
vm_name, resource_group_name))
public_ip_key = public_ip_on_resource_attr_tuple[0]
public_ip_on_resource = public_ip_on_resource_attr_tuple[1]
vm = self.vm_service.get_active_vm(
compute_management_client=compute_client,
group_name=resource_group_name,
vm_name=vm_name)
# find the primary nic
primary_nic_ref = next(iter(filter(lambda x: x.primary, vm.network_profile.network_interfaces)), None)
nic_reference = primary_nic_ref if primary_nic_ref else vm.network_profile.network_interfaces[0]
nic_name = self.resource_id_parser.get_name_from_resource_id(nic_reference.id)
logger.info("Retrieving NIC {} for VM {}".format(nic_name, vm_name))
nic = network_client.network_interfaces.get(resource_group_name, nic_name)
vm_ip_configuration = nic.ip_configurations[0]
private_ip_on_azure = vm_ip_configuration.private_ip_address
public_ip_reference = vm_ip_configuration.public_ip_address
if public_ip_reference is None:
logger.info("There is no Public IP attached to VM {}".format(vm_name))
public_ip_on_azure = ""
else:
public_ip_name = self.resource_id_parser.get_name_from_resource_id(public_ip_reference.id)
logger.info("Retrieving Public IP {} for VM {}".format(public_ip_name, vm_name))
pub_ip_addr = network_client.public_ip_addresses.get(resource_group_name, public_ip_name)
public_ip_on_azure = pub_ip_addr.ip_address
logger.info("Public IP on Azure: '{}'".format(public_ip_on_azure))
logger.info("Public IP on CloudShell: '{}'".format(public_ip_on_resource))
if public_ip_on_azure != public_ip_on_resource:
logger.info("Updating Public IP on the resource to '{}' ...".format(public_ip_on_azure))
cloudshell_session.SetAttributeValue(resource_fullname, public_ip_key, public_ip_on_azure)
logger.info("Private IP on Azure: '{}'".format(private_ip_on_azure))
logger.info("Private IP on CloudShell: '{}'".format(private_ip_on_resource))
if private_ip_on_azure != private_ip_on_resource:
logger.info("Updating Private IP on the resource to '{}' ...".format(private_ip_on_azure))
cloudshell_session.UpdateResourceAddress(resource_fullname, private_ip_on_azure)
|
## Copyright 2018 The Chromium Authors. All rights reserved.
## Use of this source code is governed by a BSD-style license that can be
## found in the LICENSE file.
ProtocolCompatibilityInfo = provider()
def _check_protocol_compatibility_impl(ctx):
stamp = ctx.actions.declare_file("{}.stamp".format(ctx.attr.name))
ctx.actions.run(
outputs = [stamp],
inputs = [ctx.file.protocol],
arguments = [
"--stamp",
stamp.path,
ctx.file.protocol.path,
],
executable = ctx.executable._check_protocol_compatibility,
)
return [
ProtocolCompatibilityInfo(
stamp = stamp,
),
]
check_protocol_compatibility = rule(
implementation = _check_protocol_compatibility_impl,
attrs = {
"protocol": attr.label(
mandatory = True,
allow_single_file = [".json"],
),
"_check_protocol_compatibility": attr.label(
default = Label("//third_party/inspector_protocol:CheckProtocolCompatibility"),
executable = True,
cfg = "host",
),
},
)
|
bot_token = ""
users = [
]
website_links =[
"https://www.google.com",
]
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Clock Tree Synthesis openROAD commands"""
load("//place_and_route:open_road.bzl", "OpenRoadInfo", "format_openroad_do_not_use_list", "merge_open_road_info", "openroad_command")
load("//synthesis:build_defs.bzl", "SynthesisInfo")
load("//pdk:open_road_configuration.bzl", "get_open_road_configuration")
def clock_tree_synthesis(ctx, open_road_info):
"""Performs clock tree synthesis.
Returns:
OpenRoadInfo: the openROAD info provider containing required input files and
and commands run.
Args:
ctx: Bazel rule ctx
open_road_info: OpenRoadInfo provider from a previous step.
"""
netlist_target = ctx.attr.synthesized_rtl
liberty = netlist_target[SynthesisInfo].standard_cell_info.default_corner.liberty
open_road_configuration = get_open_road_configuration(ctx.attr.synthesized_rtl[SynthesisInfo])
rc_script = open_road_configuration.rc_script_configuration
inputs = [
liberty,
]
if rc_script:
inputs.append(rc_script)
open_road_commands = [
"read_liberty {liberty_file}".format(
liberty_file = liberty.path,
),
"source {}".format(rc_script.path) if rc_script else "",
"remove_buffers",
"set_wire_rc -signal -layer \"{}\"".format(open_road_configuration.wire_rc_signal_metal_layer),
"set_wire_rc -clock -layer \"{}\"".format(open_road_configuration.wire_rc_clock_metal_layer),
format_openroad_do_not_use_list(open_road_configuration.do_not_use_cell_list),
"configure_cts_characterization",
"estimate_parasitics -placement",
"repair_clock_inverters",
"clock_tree_synthesis -root_buf \"{cts_buffer}\" -buf_list \"{cts_buffer}\" -sink_clustering_enable".format(
cts_buffer = open_road_configuration.cts_buffer_cell,
),
"repair_clock_nets",
"estimate_parasitics -placement",
"set_propagated_clock [all_clocks]",
"repair_timing",
"detailed_placement",
"report_checks -path_delay min_max -format full_clock_expanded -fields {input_pin slew capacitance} -digits 3",
"detailed_placement",
"filler_placement \"{filler_cells}\"".format(
filler_cells = " ".join(open_road_configuration.fill_cells),
),
"check_placement",
]
command_output = openroad_command(
ctx,
commands = open_road_commands,
input_db = open_road_info.output_db,
inputs = inputs,
step_name = "clock_tree_synthesis",
)
current_action_open_road_info = OpenRoadInfo(
commands = open_road_commands,
input_files = depset(inputs),
output_db = command_output.db,
logs = depset([command_output.log_file]),
)
return merge_open_road_info(open_road_info, current_action_open_road_info)
|
test = { 'name': 'q3_b',
'points': 5,
'suites': [ { 'cases': [ { 'code': '>>> no_match in '
"list(['professor', 'engineer', "
"'scientist', 'cat'])\n"
'True',
'hidden': False,
'locked': False}],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'}]}
|
class CookieContainer(object):
"""
Provides a container for a collection of System.Net.CookieCollection objects.
CookieContainer()
CookieContainer(capacity: int)
CookieContainer(capacity: int,perDomainCapacity: int,maxCookieSize: int)
"""
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return CookieContainer()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Add(self,*__args):
"""
Add(self: CookieContainer,cookie: Cookie)
Adds a System.Net.Cookie to a System.Net.CookieContainer. This method uses the domain from the System.Net.Cookie to determine which domain collection to associate the
System.Net.Cookie with.
cookie: The System.Net.Cookie to be added to the System.Net.CookieContainer.
Add(self: CookieContainer,cookies: CookieCollection)
Adds the contents of a System.Net.CookieCollection to the System.Net.CookieContainer.
cookies: The System.Net.CookieCollection to be added to the System.Net.CookieContainer.
Add(self: CookieContainer,uri: Uri,cookie: Cookie)
Adds a System.Net.Cookie to the System.Net.CookieContainer for a particular URI.
uri: The URI of the System.Net.Cookie to be added to the System.Net.CookieContainer.
cookie: The System.Net.Cookie to be added to the System.Net.CookieContainer.
Add(self: CookieContainer,uri: Uri,cookies: CookieCollection)
Adds the contents of a System.Net.CookieCollection to the System.Net.CookieContainer for a particular URI.
uri: The URI of the System.Net.CookieCollection to be added to the System.Net.CookieContainer.
cookies: The System.Net.CookieCollection to be added to the System.Net.CookieContainer.
"""
pass
def GetCookieHeader(self,uri):
"""
GetCookieHeader(self: CookieContainer,uri: Uri) -> str
Gets the HTTP cookie header that contains the HTTP cookies that represent the System.Net.Cookie instances that are associated with a specific URI.
uri: The URI of the System.Net.Cookie instances desired.
Returns: An HTTP cookie header,with strings representing System.Net.Cookie instances delimited by semicolons.
"""
pass
def GetCookies(self,uri):
"""
GetCookies(self: CookieContainer,uri: Uri) -> CookieCollection
Gets a System.Net.CookieCollection that contains the System.Net.Cookie instances that are associated with a specific URI.
uri: The URI of the System.Net.Cookie instances desired.
Returns: A System.Net.CookieCollection that contains the System.Net.Cookie instances that are associated with a specific URI.
"""
pass
def SetCookies(self,uri,cookieHeader):
"""
SetCookies(self: CookieContainer,uri: Uri,cookieHeader: str)
Adds System.Net.Cookie instances for one or more cookies from an HTTP cookie header to the System.Net.CookieContainer for a specific URI.
uri: The URI of the System.Net.CookieCollection.
cookieHeader: The contents of an HTTP set-cookie header as returned by a HTTP server,with System.Net.Cookie instances delimited by commas.
"""
pass
def __add__(self,*args):
""" x.__add__(y) <==> x+yx.__add__(y) <==> x+yx.__add__(y) <==> x+yx.__add__(y) <==> x+y """
pass
@staticmethod
def __new__(self,capacity=None,perDomainCapacity=None,maxCookieSize=None):
"""
__new__(cls: type)
__new__(cls: type,capacity: int)
__new__(cls: type,capacity: int,perDomainCapacity: int,maxCookieSize: int)
"""
pass
Capacity=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets and sets the number of System.Net.Cookie instances that a System.Net.CookieContainer can hold.
Get: Capacity(self: CookieContainer) -> int
Set: Capacity(self: CookieContainer)=value
"""
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of System.Net.Cookie instances that a System.Net.CookieContainer currently holds.
Get: Count(self: CookieContainer) -> int
"""
MaxCookieSize=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Represents the maximum allowed length of a System.Net.Cookie.
Get: MaxCookieSize(self: CookieContainer) -> int
Set: MaxCookieSize(self: CookieContainer)=value
"""
PerDomainCapacity=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets and sets the number of System.Net.Cookie instances that a System.Net.CookieContainer can hold per domain.
Get: PerDomainCapacity(self: CookieContainer) -> int
Set: PerDomainCapacity(self: CookieContainer)=value
"""
DefaultCookieLengthLimit=4096
DefaultCookieLimit=300
DefaultPerDomainCookieLimit=20
|
#I, Andy Le, student number 000805099, certify that all code submitted is my own work;
# that i have not copied it from any other source. I also certify that I have not allowed my work to be copied by others.
color = input("What is your favourite color? ")
print("You said " + color)
#This is a comment, Python will ignore it.
|
# variables 2
x = 5
y = "John"
print(x)
print(y)
# variables in py dont need to be specified as a boolean int char string etc
|
def format_timestamp(timestamp):
localtime = timestamp.timetuple()
result = unicode(int(time.strftime(u'%I', localtime)))
result += time.strftime(u':%M %p, %A %B ', localtime)
result += unicode(int(time.strftime(u'%d', localtime)))
result += time.strftime(u', %Y')
return result
|
#!/usr/bin/python3
def safe_print_list(my_list=[], x=0):
idx = 0
while my_list and idx < x:
try:
print("{:d}".format(my_list[idx]), end="")
idx += 1
except IndexError:
break
print()
return idx
|
#9TH PROGRAM
# THIS PROGRAM WILL HELP IN ACCESSING DICTIONARY ITEMS AND PERFROM CERTAIN OPERATIONS WITH DICTIONARY
ages = {} #EMPTY DICTIONARY
ages["Micky"] = 24
ages["Lucky"] = 25
print(ages)
keys = ages.keys # .keys prints all the keys avaialble in Dictionary
print(keys)
values = ages.values # .values prints all the values avaialble in Dictionary
print(values)
print(sorted(ages))
# NOTE Unable to sort print(sorted(ages.values))
print(ages.values) # Prints the values
# NOTE has_key() has been replaced by "in" in Python 3 , You can access like below.
# Syntax : "Values" in "dict"
if("Micky" in ages):
print("Micky is there")
else:
print("Micky is not there")
print(len(ages)) # Print the length of the dictionary
#Adding new item
# New initialization
ages = {"Snehasis" : "24" , "Sradhasis" : 25}
print(ages)
# New members
ages["LKP"] = 45 # Here value is saved as int
if("LKP" in ages):
updatedValue = ages.get("LKP") + 10
print("Updated Value = " , updatedValue)
print(ages)
ages["JYOTI"] = "38" # Here value is saved as string
if("JYOTI" in ages):
updatedValue = ages.get("JYOTI") + " New Age"
print("Updated Value = " , updatedValue)
print(ages)
|
# coding=utf-8
"""
@Time: 2020/11/14 2:15 下午
@Author: Aopolin
@File: MolweniConfig.py
@Contact: aopolin.ii@gmail.com
@Description:
"""
class Config(object):
def __init__(self):
self.SQUAD_DIR = "../../Dataset/squad2.0"
self.MOLWENI_DIR = "../../Dataset/Molweni"
self.model_type = "bert" # ["distilbert", "albert", "bert", "xlnet", ...]
self.model_name_or_path = "bert-base-uncased"
self.output_dir = "/tmp/debug_squad/" # 输出目录路径
self.data_dir = ""
self.train_file = self.MOLWENI_DIR + "/train_small.json"
self.predict_file = self.MOLWENI_DIR + "/dev_small.json"
self.config_name = ""
self.tokenizer_name = ""
self.cache_dir = ""
self.version_2_with_negative = True
self.null_score_diff_threshold = 0.0
self.n_gpu = 0
self.max_seq_length = 384
self.doc_stride = 128
self.max_query_length = 64
self.do_train = True
self.do_eval = True
self.evaluate_during_training = False
self.do_lower_case = True
self.per_gpu_train_batch_size = 12
self.per_gpu_eval_batch_size = 8
self.learning_rate = 3e-5
self.gradient_accumulation_steps = 1 # Number of updates steps to accumulate before performing a backward/update pass
self.weight_decay = 0.0
self.adam_epsilon = 1e-8
self.max_grad_norm = 1.0
self.num_train_epochs = 1.0 # 训练的epoch数
self.max_steps = -1 # 最多运行多少步,若设置>0, 将会覆盖num_train_epochs
self.warmup_steps = 0
self.n_best_size = 20
self.max_answer_length = 30
self.verbose_logging = False
self.lang_id = 0
self.logging_steps = 500 # 打log的步长
self.save_steps = 2000 # 保存模型及其参数的步长
self.eval_all_checkpoints = False
self.no_cuda = True
self.overwrite_cache = False # 重写缓存文件
self.seed = 42 # 随机种子
self.local_rank = -1 # 分布式计算用到的进程编号,-1表示不使用分布式
self.fp16 = False
self.fp16_opt_level = "01"
self.server_ip = ""
self.server_port = ""
self.threads = 1
self.bert_dir = "../../Model_files/bert-base-uncased/"
self.device = "cpu"
|
#
# PySNMP MIB module A3COM-HUAWEI-LswIGSP-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/A3COM-HUAWEI-LswIGSP-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 16:51:01 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
lswCommon, = mibBuilder.importSymbols("A3COM-HUAWEI-OID-MIB", "lswCommon")
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ValueSizeConstraint, SingleValueConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsIntersection", "ConstraintsUnion")
InterfaceIndex, = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
IpAddress, Unsigned32, ModuleIdentity, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, iso, TimeTicks, NotificationType, Counter32, Integer32, ObjectIdentity, Gauge32, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "IpAddress", "Unsigned32", "ModuleIdentity", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "iso", "TimeTicks", "NotificationType", "Counter32", "Integer32", "ObjectIdentity", "Gauge32", "Bits")
RowStatus, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "TextualConvention", "DisplayString")
hwLswIgmpsnoopingMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7))
hwLswIgmpsnoopingMib.setRevisions(('2001-06-29 00:00',))
if mibBuilder.loadTexts: hwLswIgmpsnoopingMib.setLastUpdated('200106290000Z')
if mibBuilder.loadTexts: hwLswIgmpsnoopingMib.setOrganization('')
class EnabledStatus(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("enabled", 1), ("disabled", 2))
hwLswIgmpsnoopingMibObject = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1))
hwIgmpSnoopingStatus = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 1), EnabledStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwIgmpSnoopingStatus.setStatus('current')
hwIgmpSnoopingRouterPortAge = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 1000)).clone(105)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwIgmpSnoopingRouterPortAge.setStatus('current')
hwIgmpSnoopingResponseTime = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 25)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwIgmpSnoopingResponseTime.setStatus('current')
hwIgmpSnoopingHostTime = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(200, 1000)).clone(260)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwIgmpSnoopingHostTime.setStatus('current')
hwIgmpSnoopingGroupLimitTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 5), )
if mibBuilder.loadTexts: hwIgmpSnoopingGroupLimitTable.setStatus('current')
hwIgmpSnoopingGroupLimitEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 5, 1), ).setIndexNames((0, "A3COM-HUAWEI-LswIGSP-MIB", "hwIgmpSnoopingGroupIfIndex"))
if mibBuilder.loadTexts: hwIgmpSnoopingGroupLimitEntry.setStatus('current')
hwIgmpSnoopingGroupIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 5, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: hwIgmpSnoopingGroupIfIndex.setStatus('current')
hwIgmpSnoopingGroupLimitNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 5, 1, 2), Unsigned32().clone(4294967295)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwIgmpSnoopingGroupLimitNumber.setStatus('current')
hwIgmpSnoopingFastLeaveTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 6), )
if mibBuilder.loadTexts: hwIgmpSnoopingFastLeaveTable.setStatus('current')
hwIgmpSnoopingFastLeaveEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 6, 1), ).setIndexNames((0, "A3COM-HUAWEI-LswIGSP-MIB", "hwIgmpSnoopingFastLeaveIfIndex"))
if mibBuilder.loadTexts: hwIgmpSnoopingFastLeaveEntry.setStatus('current')
hwIgmpSnoopingFastLeaveIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 6, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: hwIgmpSnoopingFastLeaveIfIndex.setStatus('current')
hwIgmpSnoopingFastLeaveStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 6, 1, 2), EnabledStatus().clone(2)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwIgmpSnoopingFastLeaveStatus.setStatus('current')
hwIgmpSnoopingGroupPolicyTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 7), )
if mibBuilder.loadTexts: hwIgmpSnoopingGroupPolicyTable.setStatus('current')
hwIgmpSnoopingGroupPolicyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 7, 1), ).setIndexNames((0, "A3COM-HUAWEI-LswIGSP-MIB", "hwIgmpSnoopingGroupPolicyIfIndex"), (0, "A3COM-HUAWEI-LswIGSP-MIB", "hwIgmpSnoopingGroupPolicyVlanID"))
if mibBuilder.loadTexts: hwIgmpSnoopingGroupPolicyEntry.setStatus('current')
hwIgmpSnoopingGroupPolicyIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 7, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: hwIgmpSnoopingGroupPolicyIfIndex.setStatus('current')
hwIgmpSnoopingGroupPolicyVlanID = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 7, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094)))
if mibBuilder.loadTexts: hwIgmpSnoopingGroupPolicyVlanID.setStatus('current')
hwIgmpSnoopingGroupPolicyParameter = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 7, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2000, 2999))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwIgmpSnoopingGroupPolicyParameter.setStatus('current')
hwIgmpSnoopingGroupPolicyStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 7, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwIgmpSnoopingGroupPolicyStatus.setStatus('current')
hwIgmpSnoopingNonFloodingStatus = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 8), EnabledStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwIgmpSnoopingNonFloodingStatus.setStatus('current')
hwIgmpSnoopingVlanStatusTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 9), )
if mibBuilder.loadTexts: hwIgmpSnoopingVlanStatusTable.setStatus('current')
hwIgmpSnoopingVlanStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 9, 1), ).setIndexNames((0, "A3COM-HUAWEI-LswIGSP-MIB", "hwIgmpSnoopingVlanID"))
if mibBuilder.loadTexts: hwIgmpSnoopingVlanStatusEntry.setStatus('current')
hwIgmpSnoopingVlanID = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 9, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094)))
if mibBuilder.loadTexts: hwIgmpSnoopingVlanID.setStatus('current')
hwIgmpSnoopingVlanEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 9, 1, 2), EnabledStatus().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwIgmpSnoopingVlanEnabled.setStatus('current')
hwIgmpSnoopingStatsObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 10))
hwRecvIGMPGQueryNum = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 10, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRecvIGMPGQueryNum.setStatus('current')
hwRecvIGMPSQueryNum = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 10, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRecvIGMPSQueryNum.setStatus('current')
hwRecvIGMPV1ReportNum = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 10, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRecvIGMPV1ReportNum.setStatus('current')
hwRecvIGMPV2ReportNum = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 10, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRecvIGMPV2ReportNum.setStatus('current')
hwRecvIGMPLeaveNum = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 10, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRecvIGMPLeaveNum.setStatus('current')
hwRecvErrorIGMPPacketNum = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 10, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwRecvErrorIGMPPacketNum.setStatus('current')
hwSentIGMPSQueryNum = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 10, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwSentIGMPSQueryNum.setStatus('current')
hwIgmpSnoopingClearStats = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 23, 1, 7, 1, 10, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("clear", 1), ("counting", 2))).clone('counting')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwIgmpSnoopingClearStats.setStatus('current')
mibBuilder.exportSymbols("A3COM-HUAWEI-LswIGSP-MIB", hwIgmpSnoopingStatus=hwIgmpSnoopingStatus, hwIgmpSnoopingResponseTime=hwIgmpSnoopingResponseTime, hwIgmpSnoopingGroupPolicyParameter=hwIgmpSnoopingGroupPolicyParameter, hwIgmpSnoopingRouterPortAge=hwIgmpSnoopingRouterPortAge, hwIgmpSnoopingHostTime=hwIgmpSnoopingHostTime, hwRecvIGMPV2ReportNum=hwRecvIGMPV2ReportNum, hwIgmpSnoopingGroupPolicyEntry=hwIgmpSnoopingGroupPolicyEntry, hwIgmpSnoopingGroupPolicyVlanID=hwIgmpSnoopingGroupPolicyVlanID, hwIgmpSnoopingGroupLimitEntry=hwIgmpSnoopingGroupLimitEntry, hwSentIGMPSQueryNum=hwSentIGMPSQueryNum, hwIgmpSnoopingGroupPolicyStatus=hwIgmpSnoopingGroupPolicyStatus, hwLswIgmpsnoopingMibObject=hwLswIgmpsnoopingMibObject, hwRecvIGMPSQueryNum=hwRecvIGMPSQueryNum, hwIgmpSnoopingGroupIfIndex=hwIgmpSnoopingGroupIfIndex, hwLswIgmpsnoopingMib=hwLswIgmpsnoopingMib, hwIgmpSnoopingVlanEnabled=hwIgmpSnoopingVlanEnabled, hwIgmpSnoopingClearStats=hwIgmpSnoopingClearStats, hwIgmpSnoopingStatsObjects=hwIgmpSnoopingStatsObjects, hwRecvErrorIGMPPacketNum=hwRecvErrorIGMPPacketNum, PYSNMP_MODULE_ID=hwLswIgmpsnoopingMib, hwIgmpSnoopingFastLeaveIfIndex=hwIgmpSnoopingFastLeaveIfIndex, hwRecvIGMPLeaveNum=hwRecvIGMPLeaveNum, hwIgmpSnoopingGroupLimitNumber=hwIgmpSnoopingGroupLimitNumber, hwIgmpSnoopingNonFloodingStatus=hwIgmpSnoopingNonFloodingStatus, hwIgmpSnoopingGroupLimitTable=hwIgmpSnoopingGroupLimitTable, hwIgmpSnoopingFastLeaveTable=hwIgmpSnoopingFastLeaveTable, hwRecvIGMPGQueryNum=hwRecvIGMPGQueryNum, EnabledStatus=EnabledStatus, hwIgmpSnoopingVlanStatusEntry=hwIgmpSnoopingVlanStatusEntry, hwIgmpSnoopingGroupPolicyIfIndex=hwIgmpSnoopingGroupPolicyIfIndex, hwIgmpSnoopingFastLeaveStatus=hwIgmpSnoopingFastLeaveStatus, hwIgmpSnoopingVlanID=hwIgmpSnoopingVlanID, hwIgmpSnoopingGroupPolicyTable=hwIgmpSnoopingGroupPolicyTable, hwIgmpSnoopingVlanStatusTable=hwIgmpSnoopingVlanStatusTable, hwIgmpSnoopingFastLeaveEntry=hwIgmpSnoopingFastLeaveEntry, hwRecvIGMPV1ReportNum=hwRecvIGMPV1ReportNum)
|
#!/usr/bin/env python
# encoding: utf-8
def build_log_urls(node, path):
url = node.web_url_for(
'addon_view_or_download_file',
path=path,
provider='osfstorage'
)
return {
'view': url,
'download': url + '?action=download'
}
class OsfStorageNodeLogger(object):
def __init__(self, node, auth, path=None):
self.node = node
self.auth = auth
self.path = path
def log(self, action, extra=None, save=False):
"""Log an event. Wraps the Node#add_log method, automatically adding
relevant parameters and prefixing log events with `"osf_storage_"`.
:param str action: Log action. Should be a class constant from NodeLog.
:param dict extra: Extra parameters to add to the ``params`` dict of the
new NodeLog.
"""
params = {
'project': self.node.parent_id,
'node': self.node._primary_key,
}
# If logging a file-related action, add the file's view and download URLs
if self.path:
params.update({
'urls': build_log_urls(self.node, self.path),
'path': self.path,
})
if extra:
params.update(extra)
# Prefix the action with osf_storage_
self.node.add_log(
action='osf_storage_{0}'.format(action),
params=params,
auth=self.auth,
)
if save:
self.node.save()
|
T = input()
hh, mm = map(int, T.split(':'))
mm += 5
if mm > 59:
hh += 1
mm %= 60
if hh > 23:
hh %= 24
print('%02d:%02d' % (hh, mm))
|
class NodeSocketInterfaceIntUnsigned:
default_value = None
max_value = None
min_value = None
|
'''
# TASK 5 - Write a function that takes a string and a shift integer and returns the string with each letter shifted
- you can iterate over the letters in a string
- for letter in str:
'''
def get_shifted_string(string, shift):
''' return the input string with each letter shifted shift steps '''
raise NotImplementedError
|
mytuple = (9,8,7,5,4,1,2,3)
num = len(mytuple)
print("# of element:", num)
min_value = min(mytuple)
max_value = max(mytuple)
print("Min value:", min_value)
print("Max value:", max_value)
|
class CalcController:
def __init__(self, model, view):
self.model = model
self.model.OnResult = self.OnResult
self.model.OnError = self.OnError
self.view = view
self.model.AllClear()
# interface for view
def command(self, command):
if command in ['0','1','2','3','4','5','6','7','8','9']:
self.model.EnterDigit(command)
elif command == 'DOT':
self.model.EnterDot()
elif command == 'SIGN':
self.model.Sign()
elif command == 'CLEAR':
self.model.Clear()
elif command == 'ALL_CLEAR':
self.model.AllClear()
elif command == 'ADDITION':
self.model.Addition()
elif command == 'SUBSTRACTION':
self.model.Substraction()
elif command == 'MULTIPLICATION':
self.model.Multiplication()
elif command == 'DIVISION':
self.model.Division()
elif command == 'SQRT':
self.model.CalcSqrt()
elif command == 'CALCULATE':
self.model.CalcResult()
def OnResult(self, result):
self.view.ShowResult(result)
def OnError(self):
self.view.ShowResult('ERROR') |
# Delete all keys that start with 'foo'.
for k in hiera.keys():
if k.startswith('foo'):
hiera.pop(k)
|
# -*- coding: utf-8 -*-
#
# Copyright 2012 James Thornton (http://jamesthornton.com)
# BSD License (see LICENSE for details)
#
"""
Bulbs is a Python persistence framework for graph databases.
Bulbs is a Python client library for Neo4j Server and Rexster,
Rexster is a REST server that provides access to any
Blueprints-enabled graph database, including Neo4j, OrientDB, DEX,
TinkerGraph and OpenRDF.
Home Page: http://bulbflow.com
Copyright (c) 2012, James Thornton (http://jamesthornton.com)
License: BSD (see LICENSE for details)
"""
__version__ = '0.3'
__author__ = 'James Thornton'
__license__ = 'BSD'
|
length = float(input())
width = float(input())
height = float(input())
perc_full = float(input())
aquarium_volume = length*width*height
total_liters = aquarium_volume*0.001
perc = perc_full*0.01
needed_liters = total_liters*(1-perc)
print(needed_liters) |
"""Part 2: Write a Python program to add two given lists using lambda. Hint : Map
nums1 = [1, 4, 5, 6, 5]
nums2 = [3, 5, 7, 2, 5]
Expected Input Output
Result: after adding two list
[4, 9, 12, 8, 10]
Update code in script_2/lambda_2.py"""
nums1 = [1, 4, 5, 6, 5]
nums2 = [3, 5, 7, 2, 5]
result = map(lambda x, y: x + y, nums1, nums2)
print(list(result))
|
def maximum_subarray(nums):
current_sum = max_sum = nums[0]
for num in nums[1:]:
current_sum = max(num, current_sum + num)
max_sum = max(max_sum, current_sum)
return max_sum
if __name__ == '__main__':
print(maximum_subarray([-2,1,-3,4,-1,2,1,-5,4])) |
class MocNode:
def __init__(self):
self.name = ''
self.parent = ''
self.refmoc = []
def setName(self, name):
self.name = name
def setParent(self, parent):
self.parent = parent
def setRefmoc(self, refmoc):
self.refmoc = refmoc
|
print("| U0 | U1 | U2 | Element |")
print("|--|--|--|--------|")
stack = []
for a in (0, 1, 2):
for b in (0, 1, 2):
for c in (0, 1, 2):
stack.append(str(a) if a != 0 else '')
stack.append(((str(b) if b > 1 else '') + 'x') if b != 0 else '')
stack.append(((str(c) if c > 1 else '') + 'x^2') if c != 0 else '')
while stack and stack[-1] == '':
stack.pop()
element = stack.pop() if stack else ''
while stack:
element = stack[-1] + ' + ' + element if stack[-1] != '' else element
stack.pop()
print("|" + str(a) + "|" + str(b) + "|" + str(c) + "|" + (element if element != '' else '0') + "|")
# Galios field multiplicative inverse
# SPN
w = int()
def permute(w, P):
new = 0
for i in range(len(P)):
new |= ((w & (1 << (15 - i))) >> (15 - i)) << (16 - P[i])
return new
permute() |
print(" ADIN EGIAZTAKETA\n")
adina=int(input("Sartu zure adina: "))
while adina<0:
print(f"{adina} zenbaki negatibo bat da eta ezin dezu adin negatiborik eduki.")
adina=int(input("Sartu zure adina: "))
while adina>130:
print(f"{adina} adin haundiegi bat da, ez det sinisten adin hoi dezunik.")
adina=int(input("Sartu zure adina: "))
if adina>=18:
print("18 urte baino gehiago dituzu, beraz pasa zaitezke.")
else:
print("18 urte baino gutxiago dituzu, beraz ezin zea pasa.") |
# AUTOGENERATED BY NBDEV! DO NOT EDIT!
__all__ = ["index", "modules", "custom_doc_links", "git_url"]
index = {"show_binmask": "00_core.ipynb",
"BinaryMasksBlock": "00_core.ipynb",
"TensorBinMasks": "00_core.ipynb",
"TensorBinMasks2TensorMask": "00_core.ipynb",
"CocoData": "01_datasets.ipynb",
"ObjectDetectionDataLoaders": "02_dataloaders.ipynb",
"ObjDetAdapter": "03_callbacks.ipynb",
"get_fasterrcnn_model": "04a_models.fasterrcnn.ipynb",
"get_fasterrcnn_model_swin": "04a_models.fasterrcnn.ipynb",
"SwinTransformerFPN": "04a_models.fasterrcnn.ipynb",
"fasterrcnn_resnet18": "04a_models.fasterrcnn.ipynb",
"fasterrcnn_resnet34": "04a_models.fasterrcnn.ipynb",
"fasterrcnn_resnet50": "04a_models.fasterrcnn.ipynb",
"fasterrcnn_resnet101": "04a_models.fasterrcnn.ipynb",
"fasterrcnn_resnet152": "04a_models.fasterrcnn.ipynb",
"fasterrcnn_swinT": "04a_models.fasterrcnn.ipynb",
"fasterrcnn_swinS": "04a_models.fasterrcnn.ipynb",
"fasterrcnn_swinB": "04a_models.fasterrcnn.ipynb",
"fasterrcnn_swinL": "04a_models.fasterrcnn.ipynb",
"get_maskrcnn_model": "04b_models.maskrcnn.ipynb",
"maskrcnn_resnet18": "04b_models.maskrcnn.ipynb",
"maskrcnn_resnet34": "04b_models.maskrcnn.ipynb",
"maskrcnn_resnet50": "04b_models.maskrcnn.ipynb",
"maskrcnn_resnet101": "04b_models.maskrcnn.ipynb",
"maskrcnn_resnet152": "04b_models.maskrcnn.ipynb",
"EffDetModelWrapper": "04c_models.efficientdet.ipynb",
"get_efficientdet_model": "04c_models.efficientdet.ipynb",
"efficientdet_d0": "04c_models.efficientdet.ipynb",
"efficientdet_d1": "04c_models.efficientdet.ipynb",
"efficientdet_d2": "04c_models.efficientdet.ipynb",
"efficientdet_d3": "04c_models.efficientdet.ipynb",
"efficientdet_d4": "04c_models.efficientdet.ipynb",
"efficientdet_d5": "04c_models.efficientdet.ipynb",
"efficientdet_d6": "04c_models.efficientdet.ipynb",
"efficientdet_d7": "04c_models.efficientdet.ipynb",
"no_split": "05_learners.ipynb",
"rcnn_split": "05_learners.ipynb",
"effdet_split": "05_learners.ipynb",
"ObjDetLearner": "05_learners.ipynb",
"ObjDetLearner.get_preds": "05_learners.ipynb",
"ObjDetLearner.show_results": "05_learners.ipynb",
"InstSegLearner": "05_learners.ipynb",
"InstSegLearner.get_preds": "05_learners.ipynb",
"InstSegLearner.show_results": "05_learners.ipynb",
"fasterrcnn_learner": "05_learners.ipynb",
"maskrcnn_learner": "05_learners.ipynb",
"efficientdet_learner": "05_learners.ipynb",
"mAP_Metric": "06_metrics.ipynb",
"create_mAP_metric": "06_metrics.ipynb",
"mAP_at_IoU40": "06_metrics.ipynb",
"mAP_at_IoU50": "06_metrics.ipynb",
"mAP_at_IoU60": "06_metrics.ipynb",
"mAP_at_IoU70": "06_metrics.ipynb",
"mAP_at_IoU80": "06_metrics.ipynb",
"mAP_at_IoU90": "06_metrics.ipynb",
"mAP_at_IoU50_95": "06_metrics.ipynb",
"mAP_Metric_np": "07_metrics_np.ipynb",
"create_mAP_metric_np": "07_metrics_np.ipynb",
"mAP_at_IoU40_np": "07_metrics_np.ipynb",
"mAP_at_IoU50_np": "07_metrics_np.ipynb",
"mAP_at_IoU60_np": "07_metrics_np.ipynb",
"mAP_at_IoU70_np": "07_metrics_np.ipynb",
"mAP_at_IoU80_np": "07_metrics_np.ipynb",
"mAP_at_IoU90_np": "07_metrics_np.ipynb",
"mAP_at_IoU50_95_np": "07_metrics_np.ipynb"}
modules = ["core.py",
"datasets.py",
"dataloaders.py",
"callbacks.py",
"models/fasterrcnn.py",
"models/maskrcnn.py",
"models/efficientdet.py",
"learners.py",
"metrics.py",
"metrics_np.py"]
doc_url = "https://rbrtwlz.github.io/fastai_object_detection/"
git_url = "https://github.com/rbrtwlz/fastai_object_detection/tree/master/"
def custom_doc_links(name): return None
|
#Banar for Em-Bomber
def logo():
return ("""\033[1;92m
___ ___ _
| __>._ _ _ ___ | . > ___ ._ _ _ | |_ ___ _ _
| _> | ' ' ||___|| . \/ . \| ' ' || . \/ ._>| '_>
|___>|_|_|_| |___/\___/|_|_|_||___/\___.|_|
\033[0;0m
\033[1;0;101m Coded by Sajjad
\033[0;0m
\033[1;90;107m github: https://www.github.com/Background-Sajjad \033[1;0;0m
""")
if __name__=="__main__":
print(logo()) |
DEFAULT_TIMEOUT = 120
def test_upgrade_simple(context, client):
_run_upgrade(context, client, 1, 1, finalScale=2, intervalMillis=100)
def test_upgrade_odd_numbers(context, client):
_run_upgrade(context, client, 5, 2, batchSize=100, finalScale=3, intervalMillis=100)
def test_upgrade_to_too_high(context, client):
_run_upgrade(context, client, 1, 5, batchSize=2, finalScale=2, intervalMillis=100)
def test_upgrade_relink(context, client):
service, service2, env = _create_env_and_services(context, client)
image_uuid = context.image_uuid
launch_config = {"imageUuid": image_uuid}
source = client.create_service(name=random_str(), environmentId=env.id, scale=1, launchConfig=launch_config)
lb = client.create_service(name=random_str(), environmentId=env.id, scale=1, launchConfig=launch_config)
source = client.wait_success(client.wait_success(source).activate())
assert source.state == "active"
lb = client.wait_success(client.wait_success(lb).activate())
assert lb.state == "active"
service_link = {"serviceId": service.id, "name": "link1"}
source.setservicelinks(serviceLinks=[service_link])
lb.setservicelinks(serviceLinks=[service_link])
source = client.wait_success(source)
assert source.state == "active"
lb = client.wait_success(lb)
assert lb.state == "active"
assert len(source.consumedservices()) == 1
assert len(lb.consumedservices()) == 1
assert len(service.consumedbyservices()) == 2
assert len(service2.consumedbyservices()) == 0
strategy = {"finalScale": 1, "toServiceId": service2.id, "updateLinks": True, "intervalMillis": 100}
service = service.upgrade_action(toServiceStrategy=strategy)
service = client.wait_success(service, timeout=DEFAULT_TIMEOUT)
assert service.state == "upgraded"
assert len(source.consumedservices()) == 2
assert len(lb.consumedservices()) == 2
assert len(service.consumedbyservices()) == 2
assert len(service2.consumedbyservices()) == 2
links = client.list_service_consume_map(serviceId=lb.id)
assert len(links) == 2
def test_in_service_upgrade_primary(context, client, super_client):
env, svc, up_svc = _insvc_upgrade(context, client, super_client, True, launchConfig={"labels": {"foo": "bar"}}, startFirst=True)
_validate_upgrade(super_client, svc, up_svc, primary="1", secondary1="0", secondary2="0")
def test_in_service_upgrade_inactive(context, client, super_client):
env, svc, up_svc = _insvc_upgrade(context, client, super_client, True, activate=False, launchConfig={"labels": {"foo": "bar"}}, startFirst=True)
_validate_upgrade(super_client, svc, up_svc, primary="1", secondary1="0", secondary2="0")
def test_in_service_upgrade_all(context, client, super_client):
secondary = [{"name": "secondary1", "labels": {"foo": "bar"}}, {"name": "secondary2", "labels": {"foo": "bar"}}]
env, svc, up_svc = _insvc_upgrade(context, client, super_client, True, launchConfig={"labels": {"foo": "bar"}}, secondaryLaunchConfigs=secondary, batchSize=3, intervalMillis=100)
_validate_upgrade(super_client, svc, up_svc, primary="1", secondary1="1", secondary2="1")
def test_in_service_upgrade_one_secondary(context, client, super_client):
secondary = [{"name": "secondary1", "labels": {"foo": "bar"}}]
env, svc, upgraded_svc = _insvc_upgrade(context, client, super_client, True, secondaryLaunchConfigs=secondary, batchSize=2, intervalMillis=100)
_validate_upgrade(super_client, svc, upgraded_svc, primary="0", secondary1="1", secondary2="0")
def test_in_service_upgrade_mix(context, client, super_client):
secondary = [{"name": "secondary1", "labels": {"foo": "bar"}}]
env, svc, up_svc = _insvc_upgrade(context, client, super_client, True, launchConfig={"labels": {"foo": "bar"}}, secondaryLaunchConfigs=secondary, batchSize=1)
_validate_upgrade(super_client, svc, up_svc, primary="1", secondary1="1", secondary2="0")
def test_big_scale(context, client):
env = client.create_environment(name=random_str())
env = client.wait_success(env)
assert env.state == "active"
image_uuid = context.image_uuid
launch_config = {"imageUuid": image_uuid, "networkMode": None}
svc = client.create_service(name=random_str(), environmentId=env.id, scale=10, launchConfig=launch_config, intervalMillis=100)
svc = client.wait_success(svc)
svc = client.wait_success(svc.activate(), DEFAULT_TIMEOUT)
svc = _run_insvc_upgrade(svc, batchSize=1, launchConfig=launch_config)
svc = client.wait_success(svc, DEFAULT_TIMEOUT)
svc = client.wait_success(svc.finishupgrade())
svc = _run_insvc_upgrade(svc, batchSize=5, launchConfig=launch_config)
svc = client.wait_success(svc, DEFAULT_TIMEOUT)
client.wait_success(svc.finishupgrade())
def test_rollback_regular_upgrade(context, client, super_client):
svc, service2, env = _create_env_and_services(context, client, 4, 4)
svc = _run_tosvc_upgrade(svc, service2, toServiceId=service2.id, finalScale=4)
time.sleep(1)
svc = wait_state(client, svc.cancelupgrade(), "canceled-upgrade")
svc = wait_state(client, svc.rollback(), "active")
_wait_for_map_count(super_client, svc)
def _create_and_schedule_inservice_upgrade(client, context, startFirst=False):
env = client.create_environment(name=random_str())
env = client.wait_success(env)
assert env.state == "active"
image_uuid = context.image_uuid
launch_config = {"imageUuid": image_uuid, "networkMode": None}
svc = client.create_service(name=random_str(), environmentId=env.id, scale=4, launchConfig=launch_config, image=image_uuid)
svc = client.wait_success(svc)
svc = client.wait_success(svc.activate(), timeout=DEFAULT_TIMEOUT)
svc = _run_insvc_upgrade(svc, batchSize=2, launchConfig=launch_config, startFirst=startFirst, intervalMillis=100)
def upgrade_not_null():
return _validate_in_svc_upgrade(client, svc)
svc = wait_for(upgrade_not_null, DEFAULT_TIMEOUT)
return svc
def test_rollback_inservice_upgrade(context, client, super_client):
svc = _create_and_schedule_inservice_upgrade(client, context)
time.sleep(1)
svc = _cancel_upgrade(client, svc)
_rollback(client, super_client, svc, 1, 0, 0)
def test_cancelupgrade_remove(context, client):
svc = _create_and_schedule_inservice_upgrade(client, context)
svc = _cancel_upgrade(client, svc)
svc.remove()
def test_cancelupgrade_rollback(context, client):
svc = _create_and_schedule_inservice_upgrade(client, context)
svc = _cancel_upgrade(client, svc)
svc = client.wait_success(svc.rollback())
svc.remove()
def test_cancelupgrade_finish(context, client):
svc = _create_and_schedule_inservice_upgrade(client, context)
svc = _cancel_upgrade(client, svc)
svc.continueupgrade()
def test_upgrade_finish_cancel_rollback(context, client):
svc = _create_and_schedule_inservice_upgrade(client, context)
svc = client.wait_success(svc, DEFAULT_TIMEOUT)
assert svc.state == "upgraded"
svc = svc.finishupgrade()
wait_for(lambda: client.reload(svc).state == "finishing-upgrade")
svc = _cancel_upgrade(client, svc)
assert svc.state == "canceled-upgrade"
svc = client.wait_success(svc.rollback())
def test_state_transition_start_first(context, client):
svc = _create_and_schedule_inservice_upgrade(client, context, startFirst=False)
svc = client.wait_success(svc, DEFAULT_TIMEOUT)
assert svc.state == "upgraded"
svc = svc.rollback()
svc = client.wait_success(svc, DEFAULT_TIMEOUT)
assert svc.state == "active"
return
svc = _create_and_schedule_inservice_upgrade(client, context, startFirst=True)
svc = client.wait_success(svc, DEFAULT_TIMEOUT)
assert svc.state == "upgraded"
svc = svc.rollback()
svc = client.wait_success(svc, DEFAULT_TIMEOUT)
assert svc.state == "active"
svc = _create_and_schedule_inservice_upgrade(client, context, startFirst=False)
svc = client.wait_success(svc, DEFAULT_TIMEOUT)
assert svc.state == "upgraded"
client.wait_success(svc.remove())
def test_in_service_upgrade_networks_from(context, client, super_client):
env = client.create_environment(name=random_str())
env = client.wait_success(env)
image_uuid = context.image_uuid
launch_config = {"imageUuid": image_uuid, "networkMode": "container", "networkLaunchConfig": "secondary1"}
secondary1 = {"imageUuid": image_uuid, "name": "secondary1"}
secondary2 = {"imageUuid": image_uuid, "name": "secondary2"}
svc = client.create_service(name=random_str(), environmentId=env.id, scale=2, launchConfig=launch_config, secondaryLaunchConfigs=[secondary1, secondary2])
svc = client.wait_success(svc)
svc = client.wait_success(svc.activate())
u_svc = _run_insvc_upgrade(svc, secondaryLaunchConfigs=[secondary1], batchSize=1)
u_svc = client.wait_success(u_svc, DEFAULT_TIMEOUT)
assert u_svc.state == "upgraded"
u_svc = client.wait_success(u_svc.finishupgrade(), DEFAULT_TIMEOUT)
_validate_upgrade(super_client, svc, u_svc, primary="1", secondary1="1", secondary2="0")
def test_in_service_upgrade_volumes_from(context, client, super_client):
env = client.create_environment(name=random_str())
env = client.wait_success(env)
image_uuid = context.image_uuid
launch_config = {"imageUuid": image_uuid}
secondary1 = {"imageUuid": image_uuid, "name": "secondary1", "dataVolumesFromLaunchConfigs": ["secondary2"]}
secondary2 = {"imageUuid": image_uuid, "name": "secondary2"}
svc = client.create_service(name=random_str(), environmentId=env.id, scale=2, launchConfig=launch_config, secondaryLaunchConfigs=[secondary1, secondary2])
svc = client.wait_success(svc)
svc = client.wait_success(svc.activate())
u_svc = _run_insvc_upgrade(svc, launchConfig=launch_config, secondaryLaunchConfigs=[secondary2], batchSize=1)
u_svc = client.wait_success(u_svc, DEFAULT_TIMEOUT)
assert u_svc.state == "upgraded"
u_svc = client.wait_success(u_svc.finishupgrade(), DEFAULT_TIMEOUT)
_validate_upgrade(super_client, svc, u_svc, primary="1", secondary1="1", secondary2="1")
def _create_stack(client):
env = client.create_environment(name=random_str())
env = client.wait_success(env)
return env
def test_dns_service_upgrade(client):
env = _create_stack(client)
labels = {"foo": "bar"}
launch_config = {"labels": labels}
dns = client.create_dnsService(name=random_str(), environmentId=env.id, launchConfig=launch_config)
dns = client.wait_success(dns)
assert dns.launchConfig is not None
assert dns.launchConfig.labels == labels
dns = client.wait_success(dns.activate())
labels = {"bar": "foo"}
launch_config = {"labels": labels}
dns = _run_insvc_upgrade(dns, batchSize=1, launchConfig=launch_config)
dns = client.wait_success(dns, DEFAULT_TIMEOUT)
assert dns.launchConfig is not None
assert dns.launchConfig.labels == labels
def test_external_service_upgrade(client):
env = _create_stack(client)
labels = {"foo": "bar"}
launch_config = {"labels": labels}
ips = ["72.22.16.5", "192.168.0.10"]
svc = client.create_externalService(name=random_str(), environmentId=env.id, externalIpAddresses=ips, launchConfig=launch_config)
svc = client.wait_success(svc)
assert svc.launchConfig is not None
assert svc.launchConfig.labels == labels
svc = client.wait_success(svc.activate())
labels = {"bar": "foo"}
launch_config = {"labels": labels}
svc = _run_insvc_upgrade(svc, batchSize=1, launchConfig=launch_config)
svc = client.wait_success(svc, DEFAULT_TIMEOUT)
assert svc.launchConfig is not None
assert svc.launchConfig.labels == labels
def test_service_upgrade_no_image_selector(client):
env = _create_stack(client)
launch_config = {"imageUuid": "rancher/none"}
svc1 = client.create_service(name=random_str(), environmentId=env.id, launchConfig=launch_config, selectorContainer="foo=barbar")
svc1 = client.wait_success(svc1)
svc1 = client.wait_success(svc1.activate())
strategy = {"intervalMillis": 100, "launchConfig": {}}
svc1.upgrade_action(launchConfig=launch_config, inServiceStrategy=strategy)
def test_service_upgrade_mixed_selector(client, context):
env = _create_stack(client)
image_uuid = context.image_uuid
launch_config = {"imageUuid": image_uuid}
svc2 = client.create_service(name=random_str(), environmentId=env.id, launchConfig=launch_config, selectorContainer="foo=barbar")
svc2 = client.wait_success(svc2)
svc2 = client.wait_success(svc2.activate())
_run_insvc_upgrade(svc2, launchConfig=launch_config)
def test_rollback_sidekicks(context, client, super_client):
env = client.create_environment(name=random_str())
env = client.wait_success(env)
image_uuid = context.image_uuid
launch_config = {"imageUuid": image_uuid}
secondary1 = {"imageUuid": image_uuid, "name": "secondary1"}
secondary2 = {"imageUuid": image_uuid, "name": "secondary2"}
svc = client.create_service(name=random_str(), environmentId=env.id, scale=3, launchConfig=launch_config, secondaryLaunchConfigs=[secondary1, secondary2])
svc = client.wait_success(svc)
svc = client.wait_success(svc.activate())
initial_maps = super_client.list_serviceExposeMap(serviceId=svc.id, state="active", upgrade=False)
u_svc = _run_insvc_upgrade(svc, secondaryLaunchConfigs=[secondary1], batchSize=2)
u_svc = client.wait_success(u_svc, DEFAULT_TIMEOUT)
assert u_svc.state == "upgraded"
u_svc = client.wait_success(u_svc.rollback(), DEFAULT_TIMEOUT)
assert u_svc.state == "active"
final_maps = super_client.list_serviceExposeMap(serviceId=u_svc.id, state="active", upgrade=False)
for initial_map in initial_maps:
found = False
for final_map in final_maps:
if final_map.id == initial_map.id:
found = True
break
assert found is True
def test_upgrade_env(client):
env = client.create_environment(name="env-" + random_str())
env = client.wait_success(env)
assert env.state == "active"
env = env.upgrade()
assert env.state == "upgrading"
env = client.wait_success(env)
assert env.state == "upgraded"
def test_upgrade_rollback_env(client):
env = client.create_environment(name="env-" + random_str())
env = client.wait_success(env)
assert env.state == "active"
assert "upgrade" in env
env = env.upgrade()
assert env.state == "upgrading"
env = client.wait_success(env)
assert env.state == "upgraded"
assert "rollback" in env
env = env.rollback()
assert env.state == "rolling-back"
env = client.wait_success(env)
assert env.state == "active"
def _run_insvc_upgrade(svc, **kw):
kw["intervalMillis"] = 100
svc = svc.upgrade_action(inServiceStrategy=kw)
assert svc.state == "upgrading"
return svc
def _insvc_upgrade(context, client, super_client, finish_upgrade, activate=True, **kw):
env, svc = _create_multi_lc_svc(super_client, client, context, activate)
_run_insvc_upgrade(svc, **kw)
def upgrade_not_null():
return _validate_in_svc_upgrade(client, svc)
u_svc = wait_for(upgrade_not_null)
u_svc = client.wait_success(u_svc, timeout=DEFAULT_TIMEOUT)
assert u_svc.state == "upgraded"
if finish_upgrade:
u_svc = client.wait_success(u_svc.finishupgrade(), DEFAULT_TIMEOUT)
assert u_svc.state == "active"
return env, svc, u_svc
def _validate_in_svc_upgrade(client, svc):
s = client.reload(svc)
upgrade = s.upgrade
if upgrade is not None:
strategy = upgrade.inServiceStrategy
c1 = strategy.previousLaunchConfig is not None
c2 = strategy.previousSecondaryLaunchConfigs is not None
if c1 or c2:
return s
def _wait_for_instance_start(super_client, id):
wait_for(lambda: len(super_client.by_id("container", id)) > 0)
return super_client.by_id("container", id)
def _wait_for_map_count(super_client, service, launchConfig=None):
def get_active_launch_config_instances():
match = []
instance_maps = super_client.list_serviceExposeMap(serviceId=service.id, state="active", upgrade=False)
for instance_map in instance_maps:
if launchConfig is not None:
if instance_map.dnsPrefix == launchConfig:
match.append(instance_map)
else:
if instance_map.dnsPrefix is None:
match.append(instance_map)
return match
def active_len():
match = get_active_launch_config_instances()
if len(match) == service.scale:
return match
wait_for(active_len)
return get_active_launch_config_instances()
def _validate_upgraded_instances_count(super_client, svc, primary=0, secondary1=0, secondary2=0):
if primary == 1:
lc = svc.launchConfig
_validate_launch_config(super_client, lc, svc)
if secondary1 == 1:
lc = svc.secondaryLaunchConfigs[0]
_validate_launch_config(super_client, lc, svc)
if secondary2 == 1:
lc = svc.secondaryLaunchConfigs[1]
_validate_launch_config(super_client, lc, svc)
def _validate_launch_config(super_client, launchConfig, svc):
match = _get_upgraded_instances(super_client, launchConfig, svc)
if len(match) == svc.scale:
return match
def _get_upgraded_instances(super_client, launchConfig, svc):
c_name = svc.name
if hasattr(launchConfig, "name"):
c_name = svc.name + "-" + launchConfig.name
match = []
instances = super_client.list_container(state="running", accountId=svc.accountId)
for instance in instances:
if instance.name is not None and c_name in instance.name and instance.version == launchConfig.version:
labels = {"foo": "bar"}
assert all(item in instance.labels for item in labels) is True
match.append(instance)
return match
def _create_env_and_services(context, client, from_scale=1, to_scale=1):
env = client.create_environment(name=random_str())
env = client.wait_success(env)
assert env.state == "active"
image_uuid = context.image_uuid
launch_config = {"imageUuid": image_uuid, "networkMode": None}
service = client.create_service(name=random_str(), environmentId=env.id, scale=from_scale, launchConfig=launch_config)
service = client.wait_success(service)
service = client.wait_success(service.activate(), timeout=DEFAULT_TIMEOUT)
assert service.state == "active"
assert service.upgrade is None
service2 = client.create_service(name=random_str(), environmentId=env.id, scale=to_scale, launchConfig=launch_config)
service2 = client.wait_success(service2)
service2 = client.wait_success(service2.activate(), timeout=DEFAULT_TIMEOUT)
assert service2.state == "active"
assert service2.upgrade is None
return service, service2, env
def _run_tosvc_upgrade(service, service2, **kw):
kw["toServiceId"] = service2.id
service = service.upgrade_action(toServiceStrategy=kw)
assert service.state == "upgrading"
return service
def _run_upgrade(context, client, from_scale, to_scale, **kw):
service, service2, env = _create_env_and_services(context, client, from_scale, to_scale)
_run_tosvc_upgrade(service, service2, **kw)
def upgrade_not_null():
s = client.reload(service)
if s.upgrade is not None:
return s
service = wait_for(upgrade_not_null)
service = client.wait_success(service, timeout=DEFAULT_TIMEOUT)
assert service.state == "upgraded"
assert service.scale == 0
service2 = client.wait_success(service2)
assert service2.state == "active"
assert service2.scale == kw["finalScale"]
service = client.wait_success(service.finishupgrade(), DEFAULT_TIMEOUT)
assert service.state == "active"
def _create_multi_lc_svc(super_client, client, context, activate=True):
env = client.create_environment(name=random_str())
env = client.wait_success(env)
assert env.state == "active"
image_uuid = context.image_uuid
launch_config = {"imageUuid": image_uuid, "networkMode": None}
secondary_lc1 = {"imageUuid": image_uuid, "name": "secondary1", "dataVolumesFromLaunchConfigs": ["secondary2"]}
secondary_lc2 = {"imageUuid": image_uuid, "name": "secondary2"}
secondary = [secondary_lc1, secondary_lc2]
svc = client.create_service(name=random_str(), environmentId=env.id, scale=2, launchConfig=launch_config, secondaryLaunchConfigs=secondary)
svc = client.wait_success(svc)
if activate:
svc = client.wait_success(svc.activate(), timeout=DEFAULT_TIMEOUT)
assert svc.state == "active"
c11, c11_sec1, c11_sec2, c12, c12_sec1, c12_sec2 = _get_containers(super_client, svc)
assert svc.launchConfig.version is not None
assert svc.secondaryLaunchConfigs[0].version is not None
assert svc.secondaryLaunchConfigs[1].version is not None
assert c11.version == svc.launchConfig.version
assert c12.version == svc.launchConfig.version
assert c11_sec1.version == svc.secondaryLaunchConfigs[0].version
assert c12_sec1.version == svc.secondaryLaunchConfigs[0].version
assert c11_sec2.version == svc.secondaryLaunchConfigs[1].version
assert c12_sec2.version == svc.secondaryLaunchConfigs[1].version
return env, svc
def _get_containers(super_client, service):
i_maps = _wait_for_map_count(super_client, service)
c11 = _wait_for_instance_start(super_client, i_maps[0].instanceId)
c12 = _wait_for_instance_start(super_client, i_maps[1].instanceId)
i_maps = _wait_for_map_count(super_client, service, "secondary1")
c11_sec1 = _wait_for_instance_start(super_client, i_maps[0].instanceId)
c12_sec1 = _wait_for_instance_start(super_client, i_maps[1].instanceId)
i_maps = _wait_for_map_count(super_client, service, "secondary2")
c11_sec2 = _wait_for_instance_start(super_client, i_maps[0].instanceId)
c12_sec2 = _wait_for_instance_start(super_client, i_maps[1].instanceId)
return c11, c11_sec1, c11_sec2, c12, c12_sec1, c12_sec2
def _validate_upgrade(super_client, svc, upgraded_svc, primary="0", secondary1="0", secondary2="0"):
_validate_upgraded_instances_count(super_client, upgraded_svc, primary, secondary1, secondary2)
primary_v = svc.launchConfig.version
sec1_v = svc.secondaryLaunchConfigs[0].version
sec2_v = svc.secondaryLaunchConfigs[1].version
primary_upgraded_v = primary_v
sec1_upgraded_v = sec1_v
sec2_upgraded_v = sec2_v
strategy = upgraded_svc.upgrade.inServiceStrategy
if primary == "1":
primary_upgraded_v = upgraded_svc.launchConfig.version
primary_prev_v = strategy.previousLaunchConfig.version
assert primary_v != primary_upgraded_v
assert primary_prev_v == primary_v
if secondary1 == "1":
sec1_upgraded_v = upgraded_svc.secondaryLaunchConfigs[0].version
sec1_prev_v = strategy.previousSecondaryLaunchConfigs[0].version
assert sec1_v != sec1_upgraded_v
assert sec1_prev_v == sec1_v
if secondary2 == "1":
sec2_upgraded_v = upgraded_svc.secondaryLaunchConfigs[1].version
sec2_prev_v = strategy.previousSecondaryLaunchConfigs[1].version
assert sec2_v != sec2_upgraded_v
assert sec2_prev_v == sec2_v
c21, c21_sec1, c21_sec2, c22, c22_sec1, c22_sec2 = _get_containers(super_client, upgraded_svc)
assert upgraded_svc.launchConfig.version == primary_upgraded_v
assert upgraded_svc.secondaryLaunchConfigs[0].version == sec1_upgraded_v
assert upgraded_svc.secondaryLaunchConfigs[1].version == sec2_upgraded_v
assert c21.version == upgraded_svc.launchConfig.version
assert c22.version == upgraded_svc.launchConfig.version
assert c21_sec1.version == upgraded_svc.secondaryLaunchConfigs[0].version
assert c22_sec1.version == upgraded_svc.secondaryLaunchConfigs[0].version
assert c21_sec2.version == upgraded_svc.secondaryLaunchConfigs[1].version
assert c22_sec2.version == upgraded_svc.secondaryLaunchConfigs[1].version
def _validate_rollback(super_client, svc, rolledback_svc, primary=0, secondary1=0, secondary2=0):
_validate_upgraded_instances_count(super_client, svc, primary, secondary1, secondary2)
strategy = svc.upgrade.inServiceStrategy
if primary == 1:
primary_v = rolledback_svc.launchConfig.version
primary_prev_v = strategy.previousLaunchConfig.version
assert primary_prev_v == primary_v
maps = _wait_for_map_count(super_client, rolledback_svc)
for map in maps:
i = _wait_for_instance_start(super_client, map.instanceId)
assert i.version == primary_v
if secondary1 == 1:
sec1_v = rolledback_svc.secondaryLaunchConfigs[0].version
sec1_prev_v = strategy.previousSecondaryLaunchConfigs[0].version
assert sec1_prev_v == sec1_v
maps = _wait_for_map_count(super_client, rolledback_svc, "secondary1")
for map in maps:
i = _wait_for_instance_start(super_client, map.instanceId)
assert i.version == sec1_v
if secondary2 == 1:
sec2_v = rolledback_svc.secondaryLaunchConfigs[1].version
sec2_prev_v = strategy.previousSecondaryLaunchConfigs[1].version
assert sec2_prev_v == sec2_v
maps = _wait_for_map_count(super_client, rolledback_svc, "secondary2")
for map in maps:
i = _wait_for_instance_start(super_client, map.instanceId)
assert i.version == sec2_v
def _cancel_upgrade(client, svc):
svc.cancelupgrade()
wait_for(lambda: client.reload(svc).state == "canceled-upgrade")
svc = client.reload(svc)
strategy = svc.upgrade.inServiceStrategy
assert strategy.previousLaunchConfig is not None
assert strategy.previousSecondaryLaunchConfigs is not None
return svc
def _rollback(client, super_client, svc, primary=0, secondary1=0, secondary2=0):
rolledback_svc = client.wait_success(svc.rollback(), DEFAULT_TIMEOUT)
assert rolledback_svc.state == "active"
roll_v = rolledback_svc.launchConfig.version
strategy = svc.upgrade.inServiceStrategy
assert roll_v == strategy.previousLaunchConfig.version
_validate_rollback(super_client, svc, rolledback_svc, primary, secondary1, secondary2)
def test_rollback_id(context, client, super_client):
env = client.create_environment(name=random_str())
env = client.wait_success(env)
assert env.state == "active"
image_uuid = context.image_uuid
launch_config = {"imageUuid": image_uuid, "networkMode": None}
svc = client.create_service(name=random_str(), environmentId=env.id, scale=1, launchConfig=launch_config, image=image_uuid)
svc = client.wait_success(svc)
svc = client.wait_success(svc.activate(), timeout=DEFAULT_TIMEOUT)
maps = _wait_for_map_count(super_client, svc)
expose_map = maps[0]
c1 = super_client.reload(expose_map.instance())
svc = _run_insvc_upgrade(svc, batchSize=2, launchConfig=launch_config, startFirst=False, intervalMillis=100)
svc = client.wait_success(svc)
svc = client.wait_success(svc.rollback(), DEFAULT_TIMEOUT)
maps = _wait_for_map_count(super_client, svc)
expose_map = maps[0]
c2 = super_client.reload(expose_map.instance())
assert c1.uuid == c2.uuid
def test_in_service_upgrade_port_mapping(context, client, super_client):
env = client.create_environment(name=random_str())
env = client.wait_success(env)
image_uuid = context.image_uuid
launch_config = {"imageUuid": image_uuid, "ports": ["80", "82/tcp"]}
secondary1 = {"imageUuid": image_uuid, "name": "secondary1", "ports": ["90"]}
secondary2 = {"imageUuid": image_uuid, "name": "secondary2", "ports": ["100"]}
svc = client.create_service(name=random_str(), environmentId=env.id, scale=1, launchConfig=launch_config, secondaryLaunchConfigs=[secondary1, secondary2])
svc = client.wait_success(svc)
svc = client.wait_success(svc.activate())
launch_config = {"imageUuid": image_uuid, "ports": ["80", "82/tcp", "8083:83/udp"]}
u_svc = _run_insvc_upgrade(svc, launchConfig=launch_config, secondaryLaunchConfigs=[secondary1, secondary2], batchSize=1)
u_svc = client.wait_success(u_svc, DEFAULT_TIMEOUT)
assert u_svc.state == "upgraded"
u_svc = client.wait_success(u_svc.finishupgrade(), DEFAULT_TIMEOUT)
svc.launchConfig.ports.append(unicode("8083:83/udp"))
assert u_svc.launchConfig.ports == svc.launchConfig.ports
assert u_svc.secondaryLaunchConfigs[0].ports == svc.secondaryLaunchConfigs[0].ports
assert u_svc.secondaryLaunchConfigs[1].ports == svc.secondaryLaunchConfigs[1].ports
def test_sidekick_addition(context, client):
env = client.create_environment(name=random_str())
env = client.wait_success(env)
image_uuid = context.image_uuid
launch_config = {"imageUuid": image_uuid}
secondary1 = {"imageUuid": image_uuid, "name": "secondary1"}
secondary2 = {"imageUuid": image_uuid, "name": "secondary2"}
svc = client.create_service(name=random_str(), environmentId=env.id, scale=1, launchConfig=launch_config, secondaryLaunchConfigs=[secondary1])
svc = client.wait_success(svc)
svc = client.wait_success(svc.activate())
c2_pre = _validate_compose_instance_start(client, svc, env, "1", "secondary1")
u_svc = _run_insvc_upgrade(svc, launchConfig=launch_config, secondaryLaunchConfigs=[secondary2], batchSize=1)
u_svc = client.wait_success(u_svc, DEFAULT_TIMEOUT)
assert u_svc.state == "upgraded"
u_svc = client.wait_success(u_svc.finishupgrade(), DEFAULT_TIMEOUT)
_wait_until_active_map_count(u_svc, 3, client)
c1 = _validate_compose_instance_start(client, svc, env, "1")
assert c1.version != "0"
c2 = _validate_compose_instance_start(client, svc, env, "1", "secondary1")
assert c2.version == "0"
assert c2.id == c2_pre.id
c3 = _validate_compose_instance_start(client, svc, env, "1", "secondary2")
assert c3.version != "0"
def test_sidekick_addition_rollback(context, client):
env = client.create_environment(name=random_str())
env = client.wait_success(env)
image_uuid = context.image_uuid
launch_config = {"imageUuid": image_uuid}
secondary1 = {"imageUuid": image_uuid, "name": "secondary1"}
secondary2 = {"imageUuid": image_uuid, "name": "secondary2"}
svc = client.create_service(name=random_str(), environmentId=env.id, scale=2, launchConfig=launch_config, secondaryLaunchConfigs=[secondary1])
svc = client.wait_success(svc)
svc = client.wait_success(svc.activate())
c11_pre = _validate_compose_instance_start(client, svc, env, "1")
c12_pre = _validate_compose_instance_start(client, svc, env, "2")
c21_pre = _validate_compose_instance_start(client, svc, env, "1", "secondary1")
c22_pre = _validate_compose_instance_start(client, svc, env, "2", "secondary1")
u_svc = _run_insvc_upgrade(svc, launchConfig=launch_config, secondaryLaunchConfigs=[secondary2], batchSize=1)
u_svc = client.wait_success(u_svc, DEFAULT_TIMEOUT)
assert u_svc.state == "upgraded"
u_svc = client.wait_success(u_svc.rollback(), DEFAULT_TIMEOUT)
_wait_until_active_map_count(u_svc, 4, client)
c11 = _validate_compose_instance_start(client, svc, env, "1")
assert c11.version == "0"
assert c11.id == c11_pre.id
c12 = _validate_compose_instance_start(client, svc, env, "2")
assert c12.version == "0"
assert c12.id == c12_pre.id
c21 = _validate_compose_instance_start(client, svc, env, "1", "secondary1")
assert c21.version == "0"
assert c21.id == c21_pre.id
c22 = _validate_compose_instance_start(client, svc, env, "2", "secondary1")
assert c22.version == "0"
assert c22.id == c22_pre.id
def test_sidekick_addition_wo_primary(context, client):
env = client.create_environment(name=random_str())
env = client.wait_success(env)
image_uuid = context.image_uuid
launch_config = {"imageUuid": image_uuid}
secondary1 = {"imageUuid": image_uuid, "name": "secondary1"}
secondary2 = {"imageUuid": image_uuid, "name": "secondary2"}
svc = client.create_service(name=random_str(), environmentId=env.id, scale=1, launchConfig=launch_config, secondaryLaunchConfigs=[secondary1])
svc = client.wait_success(svc)
svc = client.wait_success(svc.activate())
c1_pre = _validate_compose_instance_start(client, svc, env, "1")
c2_pre = _validate_compose_instance_start(client, svc, env, "1", "secondary1")
u_svc = _run_insvc_upgrade(svc, secondaryLaunchConfigs=[secondary2], batchSize=1)
u_svc = client.wait_success(u_svc, DEFAULT_TIMEOUT)
assert u_svc.state == "upgraded"
u_svc = client.wait_success(u_svc.finishupgrade(), DEFAULT_TIMEOUT)
_wait_until_active_map_count(u_svc, 3, client)
c1 = _validate_compose_instance_start(client, svc, env, "1")
assert c1.version == "0"
assert c1.id == c1_pre.id
c2 = _validate_compose_instance_start(client, svc, env, "1", "secondary1")
assert c2.version == "0"
assert c2.id == c2_pre.id
c3 = _validate_compose_instance_start(client, svc, env, "1", "secondary2")
assert c3.version != "0"
def test_sidekick_addition_two_sidekicks(context, client):
env = client.create_environment(name=random_str())
env = client.wait_success(env)
image_uuid = context.image_uuid
launch_config = {"imageUuid": image_uuid}
secondary1 = {"imageUuid": image_uuid, "name": "secondary1"}
secondary2 = {"imageUuid": image_uuid, "name": "secondary2"}
svc = client.create_service(name=random_str(), environmentId=env.id, scale=1, launchConfig=launch_config)
svc = client.wait_success(svc)
svc = client.wait_success(svc.activate())
c1_pre = _validate_compose_instance_start(client, svc, env, "1")
u_svc = _run_insvc_upgrade(svc, secondaryLaunchConfigs=[secondary1, secondary2], batchSize=1)
u_svc = client.wait_success(u_svc, DEFAULT_TIMEOUT)
assert u_svc.state == "upgraded"
u_svc = client.wait_success(u_svc.finishupgrade(), DEFAULT_TIMEOUT)
_wait_until_active_map_count(u_svc, 3, client)
c1 = _validate_compose_instance_start(client, svc, env, "1")
assert c1.version == "0"
assert c1.id == c1_pre.id
c2 = _validate_compose_instance_start(client, svc, env, "1", "secondary1")
assert c2.version != "0"
c3 = _validate_compose_instance_start(client, svc, env, "1", "secondary2")
assert c3.version != "0"
def test_sidekick_removal(context, client):
env = client.create_environment(name=random_str())
env = client.wait_success(env)
image_uuid = context.image_uuid
launch_config = {"imageUuid": image_uuid}
secondary1 = {"imageUuid": image_uuid, "name": "secondary1"}
secondary2 = {"imageUuid": image_uuid, "name": "secondary2"}
svc = client.create_service(name=random_str(), environmentId=env.id, scale=1, launchConfig=launch_config, secondaryLaunchConfigs=[secondary1, secondary2])
svc = client.wait_success(svc)
svc = client.wait_success(svc.activate())
c1_pre = _validate_compose_instance_start(client, svc, env, "1")
secondary2 = {"imageUuid": image_uuid, "name": "secondary2", "imageUuid": "rancher/none"}
u_svc = _run_insvc_upgrade(svc, secondaryLaunchConfigs=[secondary1, secondary2], batchSize=1)
u_svc = client.wait_success(u_svc, DEFAULT_TIMEOUT)
assert u_svc.state == "upgraded"
u_svc = client.wait_success(u_svc.finishupgrade(), DEFAULT_TIMEOUT)
_wait_until_active_map_count(u_svc, 2, client)
c1 = _validate_compose_instance_start(client, svc, env, "1")
assert c1.version == "0"
assert c1.id == c1_pre.id
c2 = _validate_compose_instance_start(client, svc, env, "1", "secondary1")
assert c2.version != "0"
def test_sidekick_removal_rollback(context, client):
env = client.create_environment(name=random_str())
env = client.wait_success(env)
image_uuid = context.image_uuid
launch_config = {"imageUuid": image_uuid}
secondary1 = {"imageUuid": image_uuid, "name": "secondary1"}
secondary2 = {"imageUuid": image_uuid, "name": "secondary2"}
svc = client.create_service(name=random_str(), environmentId=env.id, scale=1, launchConfig=launch_config, secondaryLaunchConfigs=[secondary1, secondary2])
svc = client.wait_success(svc)
svc = client.wait_success(svc.activate())
c1_pre = _validate_compose_instance_start(client, svc, env, "1")
c2_pre = _validate_compose_instance_start(client, svc, env, "1", "secondary1")
c3_pre = _validate_compose_instance_start(client, svc, env, "1", "secondary2")
secondary2 = {"imageUuid": image_uuid, "name": "secondary2", "imageUuid": "rancher/none"}
u_svc = _run_insvc_upgrade(svc, secondaryLaunchConfigs=[secondary1, secondary2], batchSize=1)
u_svc = client.wait_success(u_svc, DEFAULT_TIMEOUT)
assert u_svc.state == "upgraded"
u_svc = client.wait_success(u_svc.rollback(), DEFAULT_TIMEOUT)
_wait_until_active_map_count(u_svc, 3, client)
c1 = _validate_compose_instance_start(client, svc, env, "1")
assert c1.version == "0"
assert c1.id == c1_pre.id
c2 = _validate_compose_instance_start(client, svc, env, "1", "secondary1")
assert c2.version == "0"
assert c2.id == c2_pre.id
c3 = _validate_compose_instance_start(client, svc, env, "1", "secondary2")
assert c3.version == "0"
assert c3.id == c3_pre.id
def _wait_until_active_map_count(service, count, client):
def wait_for_map_count(service):
m = client.list_serviceExposeMap(serviceId=service.id, state="active")
return len(m) == count
wait_for(lambda: wait_for_condition(client, service, wait_for_map_count))
return client.list_serviceExposeMap(serviceId=service.id, state="active")
def _validate_compose_instance_start(client, service, env, number, launch_config_name=None):
cn = launch_config_name + "-" if launch_config_name is not None else ""
name = env.name + "-" + service.name + "-" + cn + number
def wait_for_map_count(service):
instances = client.list_container(name=name, state="running")
return len(instances) == 1
wait_for(lambda: wait_for_condition(client, service, wait_for_map_count))
instances = client.list_container(name=name, state="running")
return instances[0]
def test_upgrade_global_service(new_context):
client = new_context.client
host1 = new_context.host
host2 = register_simulated_host(new_context)
host3 = register_simulated_host(new_context)
client.wait_success(host1)
client.wait_success(host2)
client.wait_success(host3)
env = _create_stack(client)
image_uuid = new_context.image_uuid
launch_config = {"imageUuid": image_uuid, "labels": {"io.rancher.scheduler.global": "true"}}
service = client.create_service(name=random_str(), environmentId=env.id, launchConfig=launch_config)
service = client.wait_success(service)
assert service.state == "inactive"
service = client.wait_success(service.activate(), 120)
assert service.state == "active"
c = client.list_serviceExposeMap(serviceId=service.id, state="active")
strategy = {"launchConfig": launch_config, "intervalMillis": 100}
service.upgrade_action(inServiceStrategy=strategy)
wait_for(lambda: client.reload(service).state == "upgraded")
service = client.reload(service)
service = client.wait_success(service.rollback())
_wait_until_active_map_count(service, len(c), client) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.