text stringlengths 38 1.54M |
|---|
'''
39. Combination Sum [Medium]
Given a set of candidate numbers (candidates) (without duplicates) and
a target number (target), find all unique combinations in candidates
where the candidate numbers sums to target.
The same repeated number may be chosen from candidates unlimited number of times.
Note:
All numbers (including target) will be positive integers.
The solution set must not contain duplicate combinations.
Example 1:
Input: candidates = [2,3,6,7], target = 7,
A solution set is:
[
[7],
[2,2,3]
]
Example 2:
Input: candidates = [2,3,5], target = 8,
A solution set is:
[
[2,2,2,2],
[2,3,3],
[3,5]
]
[Method 1]: Backtracking
Runtime: 96 ms, faster than 41.37% of Python3 online submissions for Combination Sum.
Memory Usage: 12.8 MB, less than 100.00% of Python3 online submissions for Combination Sum.
'''
class Solution:
def combinationSum(self, candidates: List[int], target: int) -> List[List[int]]:
res, tmp = [], []
candidates.sort()
def backtrack(res, tmp, pos, k):
if k == 0:
res.append(tmp[:])
return
if k < 0:
return
for i in range(pos, len(candidates)):
tmp.append(candidates[i])
backtrack(res, tmp, i, k-candidates[i])
tmp.pop()
backtrack(res, tmp, 0, target)
return res
# Or:
class Solution(object):
def combinationSum(self, candidates, target):
def dfs(remain, combo, index):
if remain == 0:
result.append(combo)
return
for i in range(index, len(candy)):
if candy[i] > remain:
# exceeded the sum with candidate[i]
break #the for loop
dfs(remain - candy[i], combo + [candy[i]], i)
candy = sorted(candidates)
result = []
dfs(target, [], 0)
return result
'''
[Method 2]: DP(完全/多重背包)
根据动态规划的基本思路,有:
dp[i][j]: 用前i个数凑齐j的方案,
状态转移方程:dp[i][j] = dp[i-1][j] + dp[i-1][j-nums[i]],
即不用当前第i个数凑齐j的情况加上用当前数凑齐j的情况(也就是不用当前数i凑齐j-nums[i]的情况)
因为是完全背包,dp[i][j]不需要从dp[i-1][j]得到,所以可以优化为:
dp[j] = dp[j] + dp[j-nums[i]]。
但这种情况是用来求能凑齐的方案个数的,也就是dp[j]是一个整数,如题目518. Coin Change 2。
而本题求的是所有的方案的集合,也就是说从之前的一维数组,此题要用到三维数组(dp存的是集合的集合),
我们只能将第三维的具体某一方案的集合写出动态规划的转移方程式:
tmp = [[num] + comb for comb in dp[j-num]]
tmp代表当前用num可能凑齐的组合方式的list容器(因为凑不齐的话(如果dp[j-num]为[])则tmp就是个空容器,加减空容器并不影响原list),
也就是说剩下的j-num刚好是已经凑齐了的(有可能无num也可能有num,毕竟是完全背包)
然后dp[j] += tmp即可。
另外要注意初始状态dp[0], 即target为0的情况是有一解的(空解):
dp[0] = [[]]
因为当target凑不齐的时候都返回[[]]。
[Time]: O(n * target * c), n为nums长度,c为dp[j-num]的长度,肯定有限,所以用常数c表示;
[Space]: O(n * target * c )
Runtime: 56 ms, faster than 76.94% of Python3 online submissions for Combination Sum.
Memory Usage: 12.9 MB, less than 100.00% of Python3 online submissions for Combination Sum.
'''
class Solution:
def combinationSum(self, nums: List[int], target: int) -> int:
# 保存方案,使用三维数组
# 第一维: dp:存储了和为从0到target的所有组合方案(list)的list
# 第二维: dp[j]: 表示和为j的方案的list
# 第三维 dp[j][i]:表示和为j的第i个方案的list
# 初始化 eg: [[[]], [], [], [], [], []], 保证和为0有一种方案 [], 完全背包需要恰好装满的时候,dp[0] 必须初始化为0
dp = [[[]]] + [[] for i in range(target)]
for num in nums: # 对于每个数
for j in range(num, target + 1): # 完全背包,从小到大
# 当前组合方案含num的时候,即剩下的j-num都已经组合好了,如果没有则tmp为[]
tmp = [[num] + comb for comb in dp[j - num]]
dp[j] += tmp # 把新组合方案添加到原来和为j的list里, 注意,加上空集则dp[j]不变
return dp[target]
|
import gzip
from datetime import datetime
from collections import defaultdict
from multicorn import ForeignDataWrapper
from multicorn.utils import log_to_postgres
def get_rows(path):
"""Iterates through compressed access log and yields dicts that
contain ip, time, and error code info.
"""
with gzip.open(path) as finp:
for line in finp:
try:
parts = line.split()
# clean time, chop off timezone info
time = parts[4].replace('[', '')
# clean path and truncate for demo purposes
path = parts[7].split('?')[0].rstrip('/')
path = path[:60]
if not path:
continue
yield {
'path': path,
'ip': parts[0],
'time': _log_time_to_psql(time),
'error': int(parts[-2]),
'elapsed': float(parts[-1]),
}
except Exception:
# Some simple error handling because log file ain't
# clean. Of *course* I would never do this in
# production...
pass
def _log_time_to_psql(timestr):
"""Converts time format from '01/Jul/2014:06:31:24' to '2014-07-01 06:31:24'."""
dt = datetime.strptime(timestr, '%d/%b/%Y:%H:%M:%S')
return dt.strftime("%Y-%m-%d %H:%M:%S")
class AccessLogFDW(ForeignDataWrapper):
"""
CREATE SERVER access_log_srv foreign data wrapper multicorn options ( wrapper 'pygoth.access_log_fdw.AccessLogFDW' );
create foreign table access_log (path text, ip text, time timestamp without time zone, error int, elapsed float) server access_log_srv;
"""
def __init__(self, options, columns):
super(AccessLogFDW, self).__init__(options, columns)
self._access_log_path = '/home/vagrant/access_log.gz'
log_to_postgres("caching row data")
self._rows = list(get_rows(self._access_log_path))
self._rows_by_error = defaultdict(list)
for row in self._rows:
self._rows_by_error[row['error']].append(row)
def execute(self, quals, columns):
# for row in get_rows(self._access_log_path):
# yield row
results = self._rows
for q in quals:
if q.field_name == 'error' and q.operator == '=':
log_to_postgres("filtering query on error to %d" % q.value)
results = self._rows_by_error[q.value]
return results
if __name__ == '__main__':
for row in get_rows('/home/vagrant/access_log.gz'):
print row
|
#!/usr/bin/env python
import unittest
from acme import Product
from acme_report import generate_products, ADJECTIVES, NOUNS
class AcmeProductTests(unittest.TestCase):
"""Making sure Acme products are the tops!"""
def test_default_product_price(self):
"""Test default product price being 10."""
prod = Product('Test Product')
self.assertEqual(prod.price, 10)
def test_default_product_weight(self):
"""Test default product weight being 20."""
prod = Product('Test Product')
self.assertEqual(prod.weight, 20)
def test_default_product_flammability(self):
"""Test default product flammability being 0.5."""
prod = Product('Test Product')
self.assertEqual(prod.flammability, 0.5)
def test_explode(self):
self.assertTrue(Product().explode) == 10.0
#class AcmeReportTests(unittest.TestCase):
if __name__ == '__main__':
unittest.main()
|
# Find out whether a list is a palindrome.
def rev(lst):
reversed = []
for i in lst:
reversed.insert(0, i)
return reversed
def isPalindrome(lst):
if lst == rev(lst):
return True
return False |
f = [41, 32, 212]
v = list(map(lambda x: (x - 32) * 5 / 9, f))
print(v)
rev = [item ** 2 for item in reversed(f)]
print(rev) |
#This problem computes the largest 1 to 9 pandigital 9-digit number that can be formed as the concatenated product of an integer with (1,2,...,n) where n>1.
max = 0
for i in range(2,10):
listfac = range(1,i)
for j in range(1,10000):
n = ''
for k in listfac:
n = n + str(k*j)
if len(n) == 9 and set(n) == {'1','2','3','4','5','6','7','8','9'}:
if int(n) > max: max = int(n)
print max
|
import sys
import time
import datetime
from awsglue.transforms import *
from awsglue.utils import getResolvedOptions
from pyspark.context import SparkContext
from awsglue.context import GlueContext
from awsglue.job import Job
## @params: [JOB_NAME]
args = getResolvedOptions(sys.argv, ['JOB_NAME'])
print "Starting the processing - ", datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
sc = SparkContext()
print "SparkContext Created - ",datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
glueContext = GlueContext(sc)
print "Glue Context Created - ",datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
spark = glueContext.spark_session
print "Spark Session Created - ",datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
job = Job(glueContext)
job.init(args['JOB_NAME'], args)
print "Starting the Job...",datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
## @type: DataSource
## @args: [database = "customer", table_name = "tier1_bucket", transformation_ctx = "datasource0"]
## @return: datasource0
## @inputs: []
customerFullDF = glueContext.create_dynamic_frame.from_catalog(database = "octank-energy-datalake-tier-1-west", table_name = "full", transformation_ctx = "customerFullDF")
print "Creating DFs - ",datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
## @type: ApplyMapping
## @return: applymapping1
## @inputs: [frame = datasource0]
applymapping1 = ApplyMapping.apply(frame = customerFullDF, mappings = [ ("customerid", "string", "customerid", "string"), ("email", "string", "email", "string"), ("lclid", "string", "lclid", "string")], transformation_ctx = "applymapping1")
print "Appying the transformation - ",datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
## @type: ResolveChoice
## @args: [choice = "make_struct", transformation_ctx = "resolvechoice2"]
## @return: resolvechoice2
## @inputs: [frame = applymapping1]
resolvechoice2 = ResolveChoice.apply(frame = applymapping1, choice = "make_struct", transformation_ctx = "resolvechoice2")
## @type: DropNullFields
## @args: [transformation_ctx = "dropnullfields3"]
## @return: dropnullfields3
## @inputs: [frame = resolvechoice2]
partitionedFinalDF = DropNullFields.apply(frame = resolvechoice2, transformation_ctx = "partitionedFinalDF")
## @type: DataSink
## @args: [connection_type = "s3", connection_options = {"path": "s3://tier1-database-table1/Tier2_Bucket"}, format = "parquet", transformation_ctx = "datasink4"]
## @return: datasink4
## @inputs: [frame = dropnullfields3]
print "Final transformation done - ",datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
customerSink = glueContext.write_dynamic_frame.from_options(frame = partitionedFinalDF, connection_type = "s3", connection_options = {"path": "s3://octank-energy-datalake-west/Tier-2/Customer","partitionKeys": ["customerid"]}, format = "csv", transformation_ctx = "customerSink")
print "Written the DFs - ", datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
job.commit()
print "Commited the job ",datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
|
"""Terminal management for exposing terminals to a web interface using Tornado.
"""
from __future__ import absolute_import, print_function, with_statement
import sys
if sys.version_info[0] < 3:
byte_code = ord
else:
byte_code = lambda x: x
unicode = str
import itertools
import logging
import os
import signal
import subprocess
import termios
from ptyprocess import PtyProcessUnicode
import random
try:
random = random.SystemRandom()
except NotImplementedError:
import random
ENV_PREFIX = "PYXTERM_" # Environment variable prefix
NO_COPY_ENV = set([]) # Do not copy these environment variables
DEFAULT_TERM_TYPE = "xterm"
IDLE_TIMEOUT = 300 # Idle timeout in seconds
UPDATE_INTERVAL = 0.05 # Fullscreen update time interval
CHUNK_BYTES = 4096 # Chunk size for receiving data in stdin
# Helper functions
def make_term_cookie():
return "%016d" % random.randrange(10**15, 10**16)
def set_tty_speed(fd, baudrate=termios.B230400):
tem_settings = termios.tcgetattr(fd)
tem_settings[4:6] = (baudrate, baudrate)
termios.tcsetattr(fd, termios.TCSADRAIN, tem_settings)
def set_tty_echo(fd, enabled):
tem_settings = termios.tcgetattr(fd)
if enabled:
tem_settings[3] |= termios.ECHO
else:
tem_settings[3] &= ~termios.ECHO
termios.tcsetattr(fd, termios.TCSADRAIN, tem_settings)
def match_program_name(name):
""" Return full path to command name, if running. else null string"""
std_out = subprocess.check_output(["ps", "aux"], timeout=1, universal_newlines=True)
for line in std_out.split('\n'):
comps = line.split(None, 10)
if not comps or not comps[-1].strip():
continue
cmd_comps = comps[-1].split()
if cmd_comps[0].endswith("/"+name):
return cmd_comps[0]
return ""
class PtyWithClients(object):
def __init__(self, ptyproc):
self.ptyproc = ptyproc
self.clients = []
def resize_to_smallest(self):
"""Set the terminal size to that of the smallest client dimensions.
A terminal not using the full space available is much nicer than a
terminal trying to use more than the available space, so we keep it
sized to the smallest client.
"""
minrows = mincols = 10001
for client in self.clients:
rows, cols = client.size
if rows is not None and rows < minrows:
minrows = rows
if cols is not None and cols < mincols:
mincols = cols
if minrows == 10001 or mincols == 10001:
return
rows, cols = self.ptyproc.getwinsize()
if (rows, cols) != (minrows, mincols):
self.ptyproc.setwinsize(minrows, mincols)
def kill(self, sig=signal.SIGTERM):
self.ptyproc.kill(sig)
self.ptyproc.fileobj.close()
for client in self.clients:
client.on_pty_died()
self.clients = []
class TermManagerBase(object):
def __init__(self, shell_command, server_url="", term_settings={},
ioloop=None):
self.shell_command = shell_command
self.server_url = server_url
self.term_settings = term_settings
self.ptys_by_fd = {}
if ioloop is not None:
self.ioloop = ioloop
else:
import tornado.ioloop
self.ioloop = tornado.ioloop.IOLoop.instance()
def make_term_env(self, height=25, width=80, winheight=0, winwidth=0, **kwargs):
env = os.environ.copy()
env["TERM"] = self.term_settings.get("type",DEFAULT_TERM_TYPE)
dimensions = "%dx%d" % (width, height)
if winwidth and winheight:
dimensions += ";%dx%d" % (winwidth, winheight)
env[ENV_PREFIX+"DIMENSIONS"] = dimensions
env["COLUMNS"] = str(width)
env["LINES"] = str(height)
if self.server_url:
env[ENV_PREFIX+"URL"] = self.server_url
return env
def new_terminal(self, **kwargs):
options = self.term_settings.copy()
options['shell_command'] = self.shell_command
options.update(kwargs)
argv = options['shell_command']
env = self.make_term_env(**options)
pty = PtyProcessUnicode.spawn(argv, env=env, cwd=options.get('cwd', None))
return PtyWithClients(pty)
def start_reading(self, ptywclients):
fd = ptywclients.ptyproc.fd
self.ptys_by_fd[fd] = ptywclients
self.ioloop.add_handler(fd, self.pty_read, self.ioloop.READ)
def on_eof(self, ptywclients):
# Stop trying to read from that terminal
fd = ptywclients.ptyproc.fd
del self.ptys_by_fd[fd]
self.ioloop.remove_handler(fd)
os.close(fd)
# This should reap the child process
ptywclients.ptyproc.isalive()
def pty_read(self, fd, events=None):
ptywclients = self.ptys_by_fd[fd]
try:
s = ptywclients.ptyproc.read(65536)
for client in ptywclients.clients:
client.on_pty_read(s)
except EOFError:
self.on_eof(ptywclients)
for client in ptywclients.clients:
client.on_pty_died()
def get_terminal(self, url_component=None):
raise NotImplementedError
def client_disconnected(self, websocket):
"""Override this to e.g. kill terminals on client disconnection.
"""
pass
def shutdown(self):
self.kill_all()
def kill_all(self):
for term in self.ptys_by_fd.values():
term.kill()
class SingleTermManager(TermManagerBase):
def __init__(self, **kwargs):
super(SingleTermManager, self).__init__(**kwargs)
self.terminal = None
def get_terminal(self, url_component=None):
if self.terminal is None:
self.terminal = self.new_terminal()
self.start_reading(self.terminal)
return self.terminal
def kill_all(self):
super(SingleTermManager, self).kill_all()
self.terminal = None
def MaxTerminalsReached(Exception):
def __init__(self, max_terminals):
self.max_terminals = max_terminals
def __str__(self):
return "Cannot create more than %d terminals" % self.max_terminals
class UniqueTermManager(TermManagerBase):
"""Give each websocket a unique terminal to use."""
def __init__(self, max_terminals=None, **kwargs):
super(UniqueTermManager, self).__init__(**kwargs)
self.max_terminals = max_terminals
def get_terminal(self, url_component=None):
term = self.new_terminal()
self.start_reading(term)
return term
def client_disconnected(self, websocket):
"""Send terminal SIGHUP when client disconnects."""
websocket.terminal.kill(signal.SIGHUP)
class NamedTermManager(TermManagerBase):
"""Share terminals between websockets connected to the same endpoint.
"""
def __init__(self, max_terminals=None, **kwargs):
super(NamedTermManager, self).__init__(**kwargs)
self.max_terminals = max_terminals
self.terminals = {}
def get_terminal(self, term_name):
assert term_name is not None
if term_name in self.terminals:
return self.terminals[term_name]
if self.max_terminals and len(self.terminals) >= self.max_terminals:
raise MaxTerminalsReached(self.max_terminals)
# Create new terminal
logging.info("New terminal %s: %s", term_name)
term = self.new_terminal()
term.term_name = term_name
self.terminals[term_name] = term
self.start_reading(term)
return term
name_template = "%d"
def _next_available_name(self):
for n in itertools.count(start=1):
name = self.name_template % n
if name not in self.terminals:
return name
def new_named_terminal(self):
name = self._next_available_name()
term = self.new_terminal()
self.terminals[name] = term
self.start_reading(term)
return name, term
def kill(self, name, sig=signal.SIGTERM):
term = self.terminals[name]
term.kill() # This should lead to an EOF
def on_eof(self, ptywclients):
super(NamedTermManager, self).on_eof(ptywclients)
name = ptywclients.term_name
self.terminals.pop(name, None)
def kill_all(self):
super(NamedTermManager, self).kill_all()
self.terminals = {}
|
#!C:\python38\python
#coding=utf-8
import os
import time
# 获取当前路径
currDir = os.getcwd()
print(currDir)
# 修改当前路径
os.chdir(r"c:")
print(os.getcwd())
os.chdir(currDir)
print(os.getcwd())
# 获得绝对路径
fileA = os.path.join(os.getcwd(), 'a')
print(fileA)
print(os.path.abspath(fileA))
print("---------------------------------------")
print(os.curdir)
print(os.pardir)
# 删除目录
if os.path.exists('testdir') and os.path.isdir('testdir'):
os.rmdir('testdir')
if os.path.exists('dirname') and os.path.isdir('dirname'):
os.rmdir('dirname')
# 创建目录
os.mkdir('dirname')
# 重命名对象(文件或目录)
os.rename("dirname","testdir")
print("---------------------------------------")
# 创建和删除多层空目录
print(os.makedirs('a/b/c/d'))
print(os.removedirs('a/b/c/d'))
print("---------------------------------------")
# 列出目录下所有文件和子目录
allObjects = os.listdir('.')
for i in allObjects:
print(i)
print("---------------------------------------")
# 判断对象(文件或目录)是否存在:
if os.path.exists('a'):
if os.path.isfile('a'):
#remove file
os.remove('a')
if os.path.isdir('a'):
#remove directory
os.rmdir('a')
print("---------------------------------------")
# walk() 递归查找所有文件和子目录
try:
for root, dirs, files in os.walk(r"C:\00\02-FAJob\techTrees\Python"):
print("%s" % root, "-"*10)
for directory in dirs:
print(" %s" % directory)
for file in files:
print(" %s" % file)
except OSError as ex:
print(ex)
# 获得环境变量
print(os.getenv('PATH'))
print(os.environ.get('PATH')) # same
print("---------------------------------------")
# system调用
print(os.system("ipconfig"))
# 当前文件名
print(__file__)
# 当前文件名的绝对路径
print( os.path.abspath(__file__) )
# 返回当前文件的路径
print(os.path.dirname(os.path.abspath(__file__) ))
print("---------------------------------------")
file = os.path.abspath(__file__)
print( os.path.basename(file) ) # 返回文件名
print( os.path.dirname(file) ) # 返回目录路径
print( os.path.split(file) ) # 分割文件名与路径, 返回包含两个参数(路径,文件名)的tuple
print( os.path.getatime(file) ) # 输出最近访问时间
print( os.path.getctime(file) ) # 输出文件创建时间
print( os.path.getmtime(file) ) # 输出最近修改时间
print( time.gmtime(os.path.getmtime(file)) ) # 以struct_time形式输出最近修改时间
print( os.path.getsize(file) ) # 输出文件大小(字节为单位)
print( os.path.normpath(file) ) # 规范path字符串形式
print(os.path.exists(os.path.dirname(file))) # 目录是否存在
print(os.path.isfile(file)) #判断是否文件
print(os.path.isdir(file)) #判断是否目录
print(os.path.islink(file)) #判断是否link
print( os.path.join('root','test','runoob.txt') ) # 将目录和文件名合成一个路径
def searchPyFiles (dir, result=[]):
allPyFiles = [os.path.join(dir, x) for x in os.listdir(dir) if os.path.isfile(os.path.join(dir, x)) and os.path.splitext(os.path.join(dir, x))[1]=='.py']
result += allPyFiles
for x in os.listdir(dir):
if os.path.isdir(os.path.join(dir, x)):
searchPyFiles(os.path.join(dir, x), result)
return result
for i in searchPyFiles("."):
print(i)
|
# coding: utf8
#
# Project: Time-Resolved EXAFS
# http://www.edna-site.org
#
# Copyright (C) 2013 European Synchrotron Radiation Facility
# Grenoble, France
#
# Principal authors: Olof Svensson (svensson@esrf.fr)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__author__ = "Olof Svensson"
__contact__ = "svensson@esrf.fr"
__license__ = "GPLv3+"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
import numpy
from EDPluginExec import EDPluginExec
from EDUtilsFile import EDUtilsFile
from EDUtilsArray import EDUtilsArray
from XSDataReadDataBM23v1_0 import XSDataInputReadDataBM23
from XSDataReadDataBM23v1_0 import XSDataResultReadDataBM23
class EDPluginExecReadDataBM23v1_0( EDPluginExec ):
"""
This plugin reads an ascii file of DEXAFS data produced by the BM23 beamline at the ESRF.
The energy calibration coefficients must be given as input.
"""
def __init__( self ):
EDPluginExec.__init__(self)
self.setXSDataInputClass(XSDataInputReadDataBM23)
def process(self, _edObject = None):
EDPluginExec.process(self)
self.DEBUG("EDPluginExecReadDataBM23v1_0.process")
self.checkMandatoryParameters(self.dataInput, "Data Input is None")
self.checkMandatoryParameters(self.dataInput.inputFile, "Data Input 'inputFile' is None")
iSkipHeaderLines = 0
if self.dataInput.nSkipHeader:
iSkipHeaderLines = self.dataInput.nSkipHeader.value
# Load input data
numpyDataArray = numpy.genfromtxt(self.dataInput.inputFile.path.value, skip_header=iSkipHeaderLines)
# Create output data
xsDataResultReadDataBM23 = XSDataResultReadDataBM23()
xsDataResultReadDataBM23.energy = EDUtilsArray.arrayToXSData(numpyDataArray[:,0])
xsDataResultReadDataBM23.dataArray = EDUtilsArray.arrayToXSData(numpyDataArray[:,1:])
self.setDataOutput(xsDataResultReadDataBM23)
|
arr1 = input()
A = input()
arr1 = list(map(int,arr1.split()))
M = arr1[1]
A = list(map(int,A.split()))
x = 0
y = max(A)
h = (x + y) // 2
def calS(a, h):
s = 0
for i in A:
if i > h:
s += i - h
return s
k = calS(A, h)
while k != M and y-x > 1:
if k > M:
x = h
elif k < M:
y = h
h = (x + y) // 2
k = calS(A, h)
if k == M:
print(h)
else:
print(x)
|
class Poly:
def __init__(self,mylist = 0):
self.alist = [mylist[0]]
for i in mylist[1:]:
self.alist += [i]
def __str__(self):
return str(self.alist)
def degree(self):
return len(self.alist) - 1
def addTerm(self,exp,coeff):
self.alist +=
|
import cv2
# import and show image
img = cv2.imread("v2_train/image1.jpg") # read image file
cv2.imshow("Output", img) # display image file (but continue execution)
cv2.waitKey() # pause execution for arg ms (0 or none = infinite)
# import and show video
video_cap = cv2.VideoCapture("video_test.mp4") # read video
while True:
success, img = video_cap.read()
cv2.imshow("Video", img)
if cv2.waitKey(1) & 0xFF == ord('q'): # break loop if "q" key is pressed (binary related, did not understand)
break
## import webcam video
# cam_cap = cv2.VideoCapture(0) # argument is webcam id, starting at 0
# cam_cap.set(3,640) # width is id 3
# cam_cap.set(4,480) # length is id 4
# cam_cap.set(10,100) # brightness is id 10
# preprocessing sequence
'''img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
img_proc = preprocessing(img)
img_cont = img.copy()
biggest = get_contours(img_proc, img_cont)
img_warp = get_warp(img_gray, biggest)[2:498, 2:498]
# create a cut location, then divide the picture considering the cut size
# cropped the image a bit and played with the size of the cut to get correct vision on numbers
# (fucked by border thickness variations)
s = 55 # 62
tiles = [img_warp[x:x+s, y:y+s] for x in range(0, img_warp.shape[0], s) for y in range(0, img_warp.shape[1],s)]
tiles_clean = []
for i in index:
tiles_clean.append(tiles[i])
print(np.shape(tiles_clean))
tile_test_vanilla = tiles[0]
# cv2.imshow("Preprocessed", img_proc)
# cv2.imshow("Contour", img_cont)
# cv2.imshow("Warp", img_warp) # img of 500 x 500
# cv2.imshow("first_digit", first_digit)
''' |
#!/usr/bin/env python
import sys
import pandas
import datetime
from pathlib import Path
csv_file = sys.argv[1]
output_dir = Path(csv_file.replace('.csv', '') + '-segments')
if not output_dir.is_dir():
output_dir.mkdir()
df = pandas.read_csv(csv_file, parse_dates=['created'])
df = df.sort_values('created', ascending=True)
start = df.created[0]
start = datetime.datetime(
year=start.year,
month=start.month,
day=start.day,
hour=start.hour
)
segment_num = 0
while True:
segment_num += 1
end = start + datetime.timedelta(hours=4)
segment = df[(df['created'] >= start) & (df['created'] < end)]
if len(segment) == 0:
break
segment = segment.drop('created', axis=1)
segment_file = output_dir / '{}-{}.csv'.format(
start.strftime('%Y%m%d%H%M%S'),
end.strftime('%Y%m%d%H%M%S'),
)
segment.to_csv(segment_file, index=False, header=False)
start = end
|
#! /usr/bin/env python
import numpy as num
from pdb import set_trace as stop
def rot180numpy(image,center):
"""Rotates an image 180 degrees respect to a center, using numpy."""
# IMPORT STUFF
from algorithms_II import shrotate
from time import time
# END IMPORT
#print time()
#shape = image.shape
xin = center[1] + 1. ; yin = center[0] + 1. # RIGHT?
#xout = (shape[1]-1.)/2. + 1
#yout = (shape[0]-1.)/2. + 1
#nullangle = 0.
#image1 = shrotate(image.copy(), nullangle, xout, yout, xin, yin, order = 3,\
#mode = 'constant', cval = 0.0,prefilter = False)
angle = 180.
image1 = image.copy()
#image2 = shrotate(image1, angle, xout, yout, order = 3,\
#mode = 'constant', cval = 0.0,prefilter = False)
image2 = shrotate(image1, angle, xin, yin, order = 3,\
mode = 'constant', cval = 0.0,prefilter = False)
return image1,image2
def bend180numpy(image,x,y,pa):
"""Bends an image over an axis, using numpy."""
# IMPORT STUFF
from algorithms_II import shrotate
# END IMPORT
angle = pa
xin = x + 1. ; yin = y + 1. # RIGHT?
imagerot = shrotate(image, angle, xin, yin, prefilter = False)
indexes = num.where(image == image)
y_indexes = indexes[0].copy()
y_indexes = 2* num.around(num.array([y]))[0] - y_indexes
inside = num.where((y_indexes > 0) & (y_indexes < len(indexes[0])))
bindexes = (y_indexes[inside],indexes[1][inside].copy())
oindexes = (indexes[0][inside].copy(),indexes[1][inside].copy())
imagebend = num.zeros(image.shape,dtype='Float32')
imagebend[bindexes] = imagerot[oindexes]
return imagerot, imagebend
def shrotate(input, angle, xin, yin, xout = None, yout = None,
axes = (-1, -2), reshape = False, output_type = None,
output = None, order = 3, mode = 'constant', cval = 0.0,
prefilter = True):
import math
"""Rotate and shift an array around a center.
Adapted from rotate in numpy, to include a rotation center
defined by (xin,yin) and and optional output center (xout,yout)
F Menanteau, JHU. May 2005.
The array is rotated in the plane definde by the two axes given by
the axes parameter using spline interpolation of the requested
order. The angle is given in degrees. Points outside the
boundaries of the input are filled according to the given
mode. The output type can optionally be given. If not given it is
equal to the input type. If reshape is true, the output shape is
adapted so that the input array is contained completely in the
output. Optionally an output array can be provided that must match
the requested output shape and type. The parameter prefilter
determines if the input is pre-filtered before interpolation, if
False it is assumed that the input is already filtered.
NOTE (by Azzollini): The angle is clock-wise.
"""
input = numpy.asarray(input)
angle = numpy.pi / 180 * angle
if axes[0] < axes[1]:
a1 = axes[0]
a2 = axes[1]
else:
a1 = axes[1]
a2 = axes[0]
oshape = list(input.shape)
ix = input.shape[a1]
iy = input.shape[a2]
if reshape:
# Fix, we now take abs value to avoid crash, when giving
# negative integer values
ox = abs(ix * math.cos(angle) + iy * math.sin(angle) + 0.5)
oy = abs(iy * math.cos(angle) + ix * math.sin(angle) + 0.5)
ox = int(ox)
oy = int(oy)
oshape[a1] = ox
oshape[a2] = oy
else:
ox = oshape[a1]
oy = oshape[a2]
m11 = math.cos(angle)
m12 = math.sin(angle)
m21 = -math.sin(angle)
m22 = math.cos(angle)
matrix = numpy.identity(input.rank, type = numpy.float64)
matrix[a1, a1] = m11
matrix[a1, a2] = m12
matrix[a2, a1] = m21
matrix[a2, a2] = m22
# Fix the indices to start from 1 rather than zero
xin = xin - 1
yin = yin - 1
# Use inputs if output centers not defined
if xout and yout:
xout = xout - 1
yout = yout - 1
else:
xout = xin
yout = yin
offset = numpy.zeros((input.rank,), dtype = numpy.float64)
# offset is just xo,yo
offset[a1] = yin
offset[a2] = xin
# Multiply it by the rotation matrix
offset = numpy.matrixmultiply(matrix, offset)
# New center
tmp = numpy.zeros((input.rank,), dtype = numpy.float64)
tmp[a1] = yout
tmp[a2] = xout
offset = tmp - offset
return numpy.nd_image.affine_transform(input, matrix, offset, oshape,\
output_type,output, order, mode, cval, prefilter)
|
#En este punto de inicio __init__ se harà la configuraciòn de las apis
#desde aquì podremos configurar el inicio por default de las apis
#por lo que pondrémos la documentación de cada una de ellas
from flask_restplus import Api
# Importamos los Namespaces que creamos en cada metodo
from src.methods.items import service as items_namespace
from src.methods.warehouses import service as warehouses_namespace
from src.methods.stocks import service as stock_namespace
api_config = Api(
title='Cargamos Warehouses',
description='Sistema de inventario de almacenes cargamos.'
)
api_config.add_namespace(items_namespace)
api_config.add_namespace(warehouses_namespace)
api_config.add_namespace(stock_namespace) |
# noinspection DuplicatedCode
class BinMinHeap:
def __init__(self):
self.heap = [0]
self.length = 0
def __len__(self):
return self.length
# Easiest and most efficient way to add an element? Append it!
# Good news about appending: guarantees that we will maintain the complete tree property
# Bad news: appending will probably violate the heap structure property.
# Best news: we can write a method that will allow us to regain the heap structure property by
# comparing the newly added item with its parent! (see `current._sift_up()`)
def insert(self, item):
"""This is our client-facing method for inserting a node into the tree.
Add node into heap, fix current.size, then let _sift_up() do the heavy lifting
to position the new element correctly.
"""
self.heap.append(item)
self.length += 1
self._sift_up(self.length)
# place the element at given index in its correct position in the tree
def _sift_up(self, index):
"""This method compares the item at the given index with its parent node and re-adjusts the tree if necessary.
This method when called during `current.insert()` takes the newly added item and pushes it UP the tree until
the heap property is successfully preserved in the tree. If the newly added item is very small (in the case
of a MinHeap), we might have to swap it up several levels — potentially all the way up until it hits the top.
Here is where our wasted element [0] is important. Notice that we can compute the parent of any node via
integer division (aka floor division). The index of the current node divided by 2 (then rounded down) is the
index of the parent node!
"""
# note: double slash is integer division (or "floor division")
# divide operands then round result DOWN
# 5 / 2 = 2.5
# 5 // 2 = 2
while index // 2 > 0: # while the given index has a parent:
if self.heap[index] < self.heap[index // 2]: # if current node is less than its parent
# then swap child with parent
tmp = self.heap[index // 2] # make a copy of parent node
self.heap[index // 2] = self.heap[index] # assign current node to parent location
self.heap[index] = tmp # place the copy of parent node in its child's location (idx.e., index)
index //= 2 # shorthand for `idx = idx // 2`
# place the element at given index in its correct position in the tree
def _sift_down(self, idx):
"""Re-balance the tree by shifting the element at given index down the tree until heap order
property is preserved.
This method takes the node at the given index and first looks to see if that node has children. If it does,
check to see if one child is less than the other.
"""
while (idx * 2) <= self.length: # while element has at least one child
min_child_idx = self.min_child(idx) # get the index of the smaller child
# if current element is larger than its own child, switch child with parent
if self.heap[idx] > self.heap[min_child_idx]:
# the following three lines swap the element at idx with its smallest child
# since its child is smaller than itself.
temp = self.heap[idx] # make a temp copy of current element
self.heap[idx] = self.heap[min_child_idx] # assign the current placement to smaller child
self.heap[min_child_idx] = temp # assign temp (current element) to child
idx = min_child_idx
# return minimum element in constant time O(1)!
def get_min(self):
return self.heap[1]
# delete the smallest node, then re-balance the tree
def del_min(self):
return_val = self.heap[1] # copy the smallest element
self.heap[1] = self.heap[self.length] # set root element to a duplicate of last element
self.length -= 1 # reduce size (b/c we're deleting an element)
self.heap.pop() # remove the original copy of final element (remember we set the root to that element)
self._sift_down(1) # take the root element and find its correct position in the tree.
return return_val # return the key of the freshly-deleted minimum element
# given the index of the parent, find the smaller of that parent's two children
def min_child(self, idx):
if idx * 2 + 1 > self.length: # if right child doesn't exist
return idx * 2 # return left child index
elif self.heap[idx * 2] < self.heap[idx * 2 + 1]: # if the left child is smaller than the right child
return idx * 2 # return left child index
else: # right child must exist AND must be smaller than left child
return idx * 2 + 1 # return right child index
# given a list, build a new heap accordingly
def build_heap(self, list_in):
idx = len(list_in) // 2
self.length = len(list_in)
self.heap = [0] + list_in[:] # make a copy of list in, set current.heap to that copy but start it with a 0
while idx > 0:
self._sift_down(idx)
idx -= 1
# Driver code to test BinMinHeap
bh = BinMinHeap()
bh.build_heap([9, 5, 6, 2, 3])
print(bh.del_min())
for i, el in enumerate(bh.heap):
print(f"{i}, {el}")
bh.insert(4)
print(f"min {bh.get_min()}")
bh.insert(3)
for i, el in enumerate(bh.heap):
print(f"{i}, {el}")
print(bh.del_min())
print(bh.del_min())
print(bh.del_min())
print(bh.del_min())
|
import os
import pty
import select
import shlex
import shutil
import pyte
from cobra_py import rl
from cobra_py.kbd_layout import read_xmodmap
# TODO:
# * mouse support
# * generalize keyboard support for screens/layers
# Codes for ctrl+keys
def ctrl_key(char: bytes):
if 96 < char[0] < 123:
return chr(ord(char) & 31).encode("utf8")
return char
# Color palette
_colors = {
"black": rl.BLACK,
"red": rl.RED,
"green": rl.GREEN,
"brown": rl.BROWN,
"blue": rl.BLUE,
"magenta": rl.MAGENTA,
"cyan": (0, 255, 255, 255),
"white": rl.WHITE,
}
def parse_color(rgb):
r = int(rgb[:2], 16)
g = int(rgb[2:4], 16)
b = int(rgb[4:6], 16)
color = (r, g, b, 255)
return color
class Terminal(pyte.HistoryScreen, rl.Layer):
"""A simple terminal with a graphical interface implemented using Raylib."""
ctrl = False
shift = False
alt = False
alt_gr = False
mouse_pressed = False
p_out = None
last_cursor = (-1, -1)
# Ideally this should change when we get the SGR switch escape sequence
# but Pyte doesn't support that yet
mouse_enabled = False
def __init__(self, screen: rl.Screen, enabled: bool = True, cmd: str = "bash"):
"""Create terminal.
:cmd: command to run in the terminal.
"""
rl.Layer.__init__(self, screen, enabled=enabled)
self.text_size = screen.text_size
self.font = screen.font
self.rows = int(self._screen.height // self.text_size.y)
self.columns = int(self._screen.width // self.text_size.x)
pyte.HistoryScreen.__init__(self, self.columns, self.rows)
self._init_kbd()
self._spawn_shell(cmd)
def _init_kbd(self):
self.keymap = read_xmodmap()
def write_process_input(self, data):
if self.p_out is not None:
self.p_out.write(data.encode("utf-8"))
def _spawn_shell(self, cmd):
self.stream = pyte.ByteStream(self)
cmd = shlex.split(cmd)
cmd_path = shutil.which(cmd[0])
p_pid, master_fd = pty.fork()
if p_pid == 0: # Child process
os.execvpe(
cmd_path,
cmd,
env=dict(
TERM="xterm",
COLUMNS=str(self.columns),
LINES=str(self.rows),
LC_ALL="en_US.UTF-8",
PATH="/usr/bin:/bin",
),
)
self.p_out = os.fdopen(master_fd, "w+b", 0)
def set_margins(self, *args, **kwargs):
# See https://github.com/selectel/pyte/issues/67
kwargs.pop("private", None)
return super().set_margins(*args, **kwargs)
def mouse_event(self):
if not self.mouse_enabled:
return
# See https://github.com/prompt-toolkit/python-prompt-toolkit/blob/master/prompt_toolkit/key_binding/bindings/mouse.py#L23
# For examples of decoding these events we are generating
x = int(rl.get_mouse_x() // self.text_size.x) + 1
y = int(rl.get_mouse_y() // self.text_size.y) + 1
if rl.is_mouse_button_pressed(rl.MOUSE_LEFT_BUTTON):
self.mouse_pressed = True
# This is using SGR mouse codes, which work on some apps and not in others
self.p_out.write(b"\x1b" + f"[<0;{str(x)};{str(y)}M".encode("utf-8"))
# This is "typical" (seems broken)
# self.p_out.write(b"\x1b" + f"M{chr(32)}{chr(x)}{chr(y)}".encode("utf-8"))
elif self.mouse_pressed and not rl.is_mouse_button_pressed(
rl.MOUSE_LEFT_BUTTON
):
self.mouse_pressed = False
# This is using SGR mouse codes, which work on some apps and not in others
self.p_out.write(b"\x1b" + f"[<0;{str(x)};{str(y)}m".encode("utf-8"))
# This is "typical" (seems broken)
# self.p_out.write(b"\x1b" + f"M{chr(35)}{chr(x)}{chr(y)}".encode("utf-8"))
def key_event(
self,
action: int,
mods: int,
ctrl: bool,
shift: bool,
alt: bool,
altgr: bool,
):
"""Process one keyboard event, convert to terminal-appropriate data and feed to app."""
if mods == 0: # key release
return
elif ctrl:
letter = ctrl_key(self.keymap[action][0])
elif alt:
letter = b"\x1b" + self.keymap[action][0]
elif shift:
if altgr:
letter = self.keymap[action][3]
else:
letter = self.keymap[action][1]
else:
if altgr:
letter = self.keymap[action][2]
else:
letter = self.keymap[action][0]
self.p_out.write(letter)
def draw_cell(self, x, y):
char = self.buffer[y][x]
if char.fg == "default":
fg = rl.RAYWHITE
else:
fg = _colors.get(char.fg, None) or parse_color(char.fg)
if char.bg == "default":
bg = rl.BLACK
else:
bg = _colors.get(char.bg, None) or parse_color(char.bg)
if char.reverse:
fg, bg = bg, fg
rl.draw_rectangle(
int(x * self.text_size.x),
int(y * self.text_size.y),
int(self.text_size.x),
int(self.text_size.y),
bg,
)
rl.draw_text_ex(
self.font,
char.data.encode("utf-8"),
(x * self.text_size.x, y * self.text_size.y),
self.font.baseSize,
0,
fg,
)
if (x, y) == (self.cursor.x, self.cursor.y):
self.last_cursor = (x, y)
rl.draw_rectangle(
int(self.cursor.x * self.text_size.x),
int(self.cursor.y * self.text_size.y),
int(self.text_size.x),
int(self.text_size.y),
(255, 255, 255, 100),
)
def update(self):
self.mouse_event()
# Honestly, this could go in a thread and block on select, but who cares
ready, _, _ = select.select([self.p_out], [], [], 0)
if ready:
try:
data = self.p_out.read(65535)
if data:
self.stream.feed(data)
except OSError: # Program went away
return
rl.begin_texture_mode(self.texture)
self.draw_cell(*self.last_cursor)
self.draw_cell(self.cursor.x, self.cursor.y)
for y in self.dirty:
for x in range(self.columns): # Can't enumerate, it's sparse
self.draw_cell(x, y)
self.dirty.clear()
rl.end_texture_mode()
|
from flask_apscheduler import APScheduler
from flask import Blueprint, request, jsonify, session
import requests
import socket
import json
import os
manifest = Blueprint('manifest', 'manifest' ,url_prefix='/manifest')
scheduler = APScheduler()
def set_manifest():
f = open("manifest_cpu.json", "r")
manifest = f.read()
data = json.loads(manifest)
data['host_name'] = socket.gethostname()
gw = os.popen("ip -4 route show default").read().split()
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect((gw[2], 0))
ipaddr = s.getsockname()[0]
data['ip_address'] = ipaddr
url = 'https://ai-benchtest.azurewebsites.net/device'
r = requests.post(url = url, json =data)
txt = r.text
print(txt)
set_manifest()
scheduler.add_job(id ='Scheduled task', func =set_manifest, trigger = 'interval', minutes = 10)
scheduler.start()
|
import hanlp
import json
import torch
from torchtext import data
import argparse
def parse_args():
args = argparse.ArgumentParser()
# network arguments
args.add_argument("-data", "--data",
default="project3_train.csv", help="data directory(默认在data文件夹下)")
args.add_argument("-j_s", "--json_out",
default="data/project3_train.json", help="data directory(默认在data文件夹下)")
args = args.parse_args()
return args
# 读取初始数据并进行分词,保存到json文件方便读取
def participle(train_file, output_file):
tokenizer = hanlp.load('LARGE_ALBERT_BASE')
TEXT = data.Field(tokenize=tokenizer)
LABEL = data.LabelField(dtype=torch.float)
fields = [('text', TEXT), ('label', LABEL)]
train_data = data.TabularDataset.splits(
path='data/',
train=train_file,
format='csv',
fields=fields,
skip_header=True
)
train_data = train_data[0]
print("splits over")
json_file = open(output_file, 'a', encoding='utf-8')
print("ready to write")
for index, text in enumerate(train_data):
print(index, vars(text))
data_json = json.dumps(vars(text), ensure_ascii=False)
json_file.write(data_json + '\n')
print("write successfully")
json_file.close()
if __name__ == '__main__':
args = parse_args()
print(args)
participle(args.data, args.json_out)
|
# coding=utf-8
import time
import abc
from collections import OrderedDict
from ruamel.yaml import dump as ydump, load as yload, RoundTripDumper, resolver, add_constructor, add_representer
from src.meta.abstract import AbstractMeta
from utils.custom_logging import make_logger
from utils.custom_path import Path
logger = make_logger(__name__)
_yaml_mapping = resolver.BaseResolver.DEFAULT_MAPPING_TAG
def odict_represent(dumper, data):
return dumper.represent_dict(data.iteritems())
def odict_construct(loader, node):
return OrderedDict(loader.construct_pairs(node))
add_representer(OrderedDict, odict_represent)
add_constructor(_yaml_mapping, odict_construct)
class Meta(AbstractMeta):
@property
@abc.abstractmethod
def meta_header(self):
""""""
@property
@abc.abstractmethod
def meta_version(self):
""""""
@abc.abstractmethod
def meta_version_upgrade(self, from_version):
""""""
def __init__(self, path: str or Path, init_dict: OrderedDict = None, auto_read=True, encrypted=False):
self.free = True
self.encrypt = encrypted
if init_dict is None:
self._data = OrderedDict()
else:
if not isinstance(init_dict, OrderedDict):
raise TypeError('expected a OrderedDict, got "{}"'.format(type(init_dict)))
self._data = init_dict
self._values, self._keys, self._items = None, None, None
self._init_views()
if isinstance(path, Path):
pass
elif isinstance(path, str):
path = Path(path)
else:
raise TypeError('expected a Path or a str, got: {}'.format(type(path)))
self._path = path
if auto_read:
self.read()
@property
def path(self) -> Path:
return self._path
@path.setter
def path(self, value: str or Path):
if isinstance(value, Path):
pass
elif isinstance(value, str):
value = Path(value)
else:
raise TypeError('expected Path or str, got: {}'.format(type(value)))
self._path = value
# noinspection PyArgumentList
def _init_views(self):
self._values = self._data.values()
self._keys = self._data.keys()
self._items = self._data.items()
@property
def data(self):
return self._data
def get_context(self):
return self.data
@data.setter
def data(self, value: OrderedDict):
if not isinstance(value, OrderedDict):
raise TypeError('expected a OrderedDict, got "{}"'.format(type(value)))
self._data = value
self._init_views()
def __len__(self):
# noinspection PyTypeChecker
return len(self.data)
def __iter__(self):
for k in self.keys():
yield k
def __contains__(self, x):
# noinspection PyArgumentList
return self._data.__contains__(x)
def __delitem__(self, key, _write=False):
del self.data[key]
if _write:
self.write()
def __setitem__(self, key, value, _write=False):
self.data[key] = value
if _write:
self.write()
def __getitem__(self, key):
return self._data.get(key, None)
def __str__(self):
# noinspection PyArgumentList
return self.data.__str__()
def __repr__(self):
return '{}: {}'.format(self.__class__.__name__, self.data.__repr__())
def get(self, key, default=None):
return self._data.get(key, default)
def keys(self):
return self._keys
def values(self):
return self._values
def items(self):
return self._items
def debug(self, txt: str):
logger.debug('{}: {}'.format(self.path.abspath(), txt))
def exception(self, txt: str):
logger.debug('{}: {}'.format(self.path.abspath(), txt))
def dump(self):
return ydump(self.data, Dumper=RoundTripDumper, default_flow_style=False)
def load(self, data):
self.data = yload(data)
def read(self):
self.wait_for_lock()
meta_updated = False
try:
if self.path.exists():
if self.path.getsize() == 0:
self.debug('{}: removing existing empty file: {}'.format(self.__class__.__name__, self.path))
self.path.remove()
return
try:
if self.encrypt:
self.load(self.path.bytes())
else:
self.load(self.path.text(encoding='utf8'))
except ValueError:
raise ValueError('{}: metadata file corrupted'.format(self.path.abspath()))
else:
try:
if not self.data['meta_header'] == self.meta_header:
raise TypeError('meta header mismatch, expected: "{}", got: "{}" on file: {}'.format(
self.meta_header, self.data['meta_header'], self.path.abspath()
))
else:
del self.data['meta_header']
except KeyError:
pass
meta_updated = self.data['meta_version'] < self.meta_version
while self.data['meta_version'] < self.meta_version:
current_version = self.data['meta_version']
next_version = self.data['meta_version'] + 1
logger.debug('upgrading meta from version "{}"'.format(current_version))
if not self.meta_version_upgrade(current_version):
raise RuntimeError('failed to upgrade metadata to version "{}"'.format(next_version))
else:
logger.debug('successfully upgraded meta to version "{}"'.format(next_version))
self.data['meta_version'] = next_version
except OSError:
self.exception('error while reading metadata file')
finally:
self.free = True
if meta_updated:
self.write()
def write(self):
# noinspection PyTypeChecker
if len(self._data) == 0:
raise ValueError('no data to write')
self.wait_for_lock()
self.data['meta_header'] = self.meta_header
self.data['meta_version'] = self.meta_version
try:
if self.encrypt:
self.path.write_bytes(self.dump())
else:
self.path.write_text(self.dump(), encoding='utf8')
except OSError:
self.exception('error while writing metadata to file')
finally:
self.free = True
def wait_for_lock(self):
i = 0
while not self.free:
time.sleep(0.1)
i += 1
if i == 10:
self.debug('waiting for resource lock')
i = 0
self.free = False
@staticmethod
def read_header(path):
path = Path(path)
data = yload(path.text(encoding='utf8'))
return data['header']
def read_meta_header(meta_file_path: Path or str):
return Meta.read_header(meta_file_path)
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
counter = 100 # 赋值整型变量
miles = 1000.0 # 浮点型
name = "John" # 字符串
if counter == 100:
print counter;
else:
print miles;
print name;
# 运行结果
"""
100
John
"""
|
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 6 11:00:28 2018
@author: 鑫鑫玉川
"""
"""
名字:Mod(a,b,n),a,b表示列表,长度都为n
功能:返回a模b的列表
"""
def Mod(a,b,n):
result=a[:] #c拷贝a
d=b[:]
a_len=n
b_len=n
for i in range(n):
if(a[i]!=0):
break
a_len-=1
for i in range(n):
if (b[i]!=0):
break
b_len-=1
#print(a_len,b_len)#测试
if (a_len >= b_len):
for i in range(b_len,a_len):
d.append(0)
for i in range(a_len-1,b_len-2,-1):
#print(n-i-1,a_len, b_len,n,a,b,result)#测试
if(result[n-i-1]==1):
result=AddSub(result,d[i-b_len+1:n+i-b_len+1],n)
#print("Mod",result)#测试代码
return result
"""
名字:AddSub(a,b,n),a,b为列表,长度为n
功能:返回a减b或者a加b
说明:无论是加运算,还是减运算,在模2运算中算法相同
"""
def AddSub(a,b,n):
c=[]
for i in range(n):
c.append(a[i]^b[i])
return c
"""
名字:Multiply(a,b,n),a,b为列表,长度为n
功能:返回a乘b的元素
"""
def Multiply(a, b, n, mod):
c=a[:]
while (n<len(mod)):
c.insert(0,0)
b.insert(0,0)
n+=1
result=[0 for i in range(n)]
for i in range(n-1, -1 , -1):
if ( b[i]==1 ):
result = AddSub(result,c,n)
c.append(0)
c.pop(0) #相当于左移一位
c = list(Mod(c, mod, n))#得到c模基准的多项式
#print("Multi",c)#测试
return result
"""
名字:Divide(a,b,n),a,b为列表,长度为n
功能:返回a整除b后的多项式
"""
def Divide(a , b , n):
c=a[:]
d=b[:]
result=[0 for i in range(n)]
a_len=n
b_len=n
for i in range(n):
if(a[i]!=0):
break
a_len-=1
for i in range(n):
if (b[i]!=0):
break
b_len-=1
if (a_len >= b_len):
for i in range(b_len,a_len):
d.append(0)
for i in range(a_len-1,b_len-2,-1):
if(c[n-i-1]==1):
result[n-i+b_len-2]=1
c=AddSub(c,d[i-b_len+1:n+i-b_len+1],n)
#print(c,result),测试代码
return result
"""
名字:getRes(a,b,n,mod),a,b为列表,长度为n
功能:求多项式a关于b的逆元
原扩展欧几里得算法
def OGLD_pro(a,b):
if (b==0):
return {'d':a,'x':1,'y':0}
else:
t=OGLD_pro(b,a%b)
temp=t['x']
t['x']=t['y']
t['y']=temp-int(a/b)*t['y']
return t
"""
def getRes(a,b,n,mod):
if (max(b)==0):#证明c中,即b中全为0
x=[0 for i in range(n-1)]
x.append(1)
#print("end",x)#测试
return {'d':a[:],'x':x,'y':[0 for i in range(n)]}
else:
t=getRes(b,Mod(a,b,n),n,mod)
#print(t)#测试
temp=list(t['x'])
t['x']=list(t['y'])
t['y']=AddSub(temp,Multiply(Divide(a,b,n),t['y'],n,mod),n)
return t
"""
名字:ExpOfMod(a,k,myMod,n):列表a,k为指数,myMod为要模的多项式,n为列表长度
功能:模的指数运算,重复平方算法
"""
def ExpOfMod(a,k,mod,n):
result=[0 for i in range(n-1)]
result.append(1)
if ( k!= 0):
A = a[:]
if ( k%2 == 1):
result = a[:]
k = k //2 #注意整除
while (k != 0):
A = Multiply(A,A,n,mod)
if (k%2==1):
result = Multiply(A,result,n,mod)
k = k // 2
#print(result,k)#用于测试
return result
"""
名字:transList(lst),lst为列表
功能:用于把列表中二进制数转化为10进制
"""
def transList(lst):
#n=len(lst)
result=0
for i in lst:
result*=2
result+=i
return result
#用列表中元素,从低位到高位依次表示二进制数的高位到低位
#列表,可以进行的操作包括索引,切片,加,乘,检查成员。
"""
名字:基准,即要模得多项式,用多项式基表示
功能:作为全局变量
注意:多项式一定不能拆分,即不可约多项式
"""
myMod = [1,0,1,1,0,0,0,1,1] #基准表示要模的元素,即f(x)=x^8 + x^6 + x^5 + x + 1
testMod =[1,0,0,1,1] #类似myMod,用于测试,f(x)=x^4 + x + 1
a=[0,0,0,0,0,0,1,1,1] #f(x)=x^2+x+1
b=[0,1,0,1,0,1,1,0,1] #f(x)=x^7+x^5+x^3+x^2+1
print("a=",a)
print("b=",b)
print("a+b=a-b=",AddSub(a,b,len(a))) #输出相加减结果
print("b/a=",Divide(b,a,len(a))) #输出相除结果
print("a*b=",Multiply(a,b,len(a),myMod))#输出相乘结果
t=getRes(a,myMod,9,myMod)
a_=t['x'] #a的逆
print("a得逆元为:",a_) #输出逆元
base = [0,0,0,0,0,0,0,1,0] #本原元
ExpList=[0 for i in range(256)]#指数表,初始化为0
LogList=[0 for i in range(256)]#对数表,初始化为0
LogList[0]=255
for i in range(255): #循环得到指数对数表
index=transList(ExpOfMod(base,i,myMod,len(base)))
ExpList[i]=index
LogList[index]=i
print("\n\t\t\t\t\t指数对数表\n\n")
i=0
while(i<256):
while(True):
if (i%16==0):
print("{:8s}".format("序号"),end=' ')
print("{:4d}".format(i),end=' ')
if ((i+1)%16==0):
print('\n')
i=i-15
break;
i+=1
while(True):
if (i%16==0):
print("{:9s}".format("(02)^i"),end=' ')
print("{:4d}".format(ExpList[i]),end=' ')
if ((i+1)%16==0):
print('\n')
i=i-15
break;
i+=1
while(True):
if (i%16==0):
print("{:7s}".format("log(02)^i"),end=' ')
print("{:4d}".format(LogList[i]),end=' ')
if ((i+1)%16==0):
print('\n\n')
i+=1
break;
i+=1
|
#_*_coding:utf-8_*_
print 打飞机游戏
加载背景音乐
播放背景音乐(设置单曲循环)
加载我方飞机
interval=0
while True:
if 用户点击关闭按钮
退出程序
interval+=1
if interval=50
加载敌方飞机
interval=0
敌方飞机移动
屏幕刷新
if 用户鼠标发生移动
鼠标位置==我方飞机的位置
屏幕刷新
elseif 我方飞机的位置==敌方飞机的位置
播放撞机音乐
加载飞机爆炸的图片
print 游戏结束
关闭背景音乐(设置淡出)
|
# -*- coding: utf-8 -*-
studentsSet = {"ahmet" , "ali" , "Erkan"}
print(studentsSet)
for students in studentsSet:
print(students)
print("erkan" in studentsSet)
if "ali" in studentsSet:
print("listede var")
studentsSet.add("melis")
print(studentsSet)
studentsSet.update(["merih","irrem","cansu"])
print(studentsSet)
print(len(studentsSet))
studentsSet.remove("ali")
print(len(studentsSet))
studentsSet.discard("ali")
studentsSet.pop()
studentsSet.clear()
print(type(studentsSet))
print(len(studentsSet))
print(studentsSet)
|
'''
Created on 6 Aug 2014
@author: michael
'''
from mjb.dev.game_utility.shapes.shape import Shape
from mjb.dev.game_utility.shapes.handlers.drawable_shape_handler import DrawableShapeHandler
from mjb.dev.game_utility.capabilities.drawable import Drawable
class DrawableRectangleHandler(DrawableShapeHandler):
'''
Construct a new rectangle drawer to draw a rectangle on the screen!
TODO: complication in the immutability of the shape and its size when setting the rectangle.
We have to change the shape used internally independently
'''
class _Rectangle_Shape(Shape):
'''
The shape used by the rectangle handler
'''
def __init__(self,rect):
'''
Construct a new rectangle shape
@param rect: the rectangle as (x_min,y_min,width,height)
'''
#Make the shape...
Shape.__init__(self, cache_depth=0)
(_,_,width,height) = rect
self.__size = (width,height)
#Override
def get_size(self):
return self.__size
#Override
def _calculate_shape_to_override(self,precision,include_collider):
return ([],None)
def __init__(self,rect,colour,depth=0,visible=True):
'''
Construct a new rectangle to draw on the screen. You should hold onto this object
to change its position etc, so that it maintains properties like its depth and colour
(+implementation stuff...) for you.
@param rect: the rectangle to draw (x_min,y_min,width,height)
@param colour: the colour to solid fill the rectangle with
@param depth: the depth on screen this rectangle should be drawn at. Default 0,
higher values push the object further back
@param visible: True by default - whether or not you want this rectangle to appear
on screen.
@note: the rectangle drawer automatically attaches a drawable capability to itself.
'''
#Make myself as a shape handler...
(x_min,y_min,_,_) = rect
DrawableShapeHandler.__init__(self, DrawableRectangleHandler._Rectangle_Shape(rect), depth, (x_min,y_min))
#Remember everything...
self.__colour = colour
#Attach to a drawable handler...
self.__drawable_capability = Drawable(self,None,self.redraw,enabled=False)
if visible:
#Register with the picture handler so that I will appear!
self.__drawable_capability.enable()
def redraw(self, top_left, surface):
'''
Redraw this rectangle!
@param top_left: the top left coordinate of the surface the rectangle is being drawn on
@param surface: the surface to draw on
'''
(x_offset,y_offset) = top_left
(x_min,y_min,x_max,y_max) = self.get_bounding_rectangle()
#Redraw onto the surface...
target_rect = (x_min-x_offset,y_min-y_offset,x_max-x_min,y_max-y_min)
surface.fill(self.__colour,target_rect)
#Done!
def set_visible(self, visible=True):
'''
@param visible: Default True, whether or not the rectangle should appear on the screen
'''
if self.__drawable_capability.is_enabled()!=visible:
if visible:
#Appear!
self.__drawable_capability.enable()
else:
#Disappear!
self.__drawable_capability.disable()
def set_rectangle(self,rect):
'''
@param rect: set the new rectangular area to be covered by this rectangle (separately from the colour)
The rect should be in the form of (x_min,y_min,width,height)
'''
(x_min,y_min,width,height) = rect
if (self.get_shape().get_size()==(width,height)):
#Keep the old shape...
if (x_min,y_min)==self.get_top_left():
return #nothing to do
self.set_top_left((x_min,y_min))
else:
self.set_shape(DrawableRectangleHandler._Rectangle_Shape(rect), top_left=(x_min,y_min))
def set_colour(self,colour):
'''
@param colour: the colour the rectangle should be filled with
'''
if self.__colour!=colour:
#Change the appearance
self.change_appearance(self.__set_colour, colour)
def __set_colour(self, colour):
'''
The deferred set colour method
@param colour: the colour the rectangle should be turned into
'''
self.__colour = colour
def move_rectangle(self,x_move=0,y_move=0):
'''
@param x_move: the amount to move in the x coordinate along the screen
@param y_move: the amount to move in the y coordinate along the screen
'''
(x_min,y_min) = self.get_top_left()
self.set_top_left((x_min+x_move,y_min+y_move))
#Get the various properties...
def get_rectangle(self):
'''
@return: the rectangle as (x_min,y_min,width,height) being drawn by this drawer
'''
(width,height) = self.get_shape().get_size()
(x_min,y_min) = self.get_top_left()
return (x_min,y_min,width,height)
def get_colour(self):
'''
@return: the colour of the rectangle being drawn by this drawer
'''
return self.__colour
def get_visible(self):
'''
@return: True iff this drawer is visible on screen (in the sense that it is actually drawing)
'''
return self.__drawable_capability.is_enabled()
|
# Generated by Django 2.2.3 on 2019-08-28 22:18
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('sarklo', '0004_auto_20190828_1838'),
]
operations = [
migrations.AddField(
model_name='oglas',
name='date',
field=models.DateTimeField(default=django.utils.timezone.now),
),
]
|
#!/usr/bin/python
import happybase
import struct
import re
import getopt
import sys
from DB.Registry import Registry
def usage():
print "index-stats.py [-h] [-H hbhostname]\n"
def HBConnection(host):
c = happybase.Connection(host)
return c
def load_primary_index_map(reg):
km = {}
for reg_key in reg.get():
reg_val = reg.get(reg_key)
if re.match('^index.primary.', reg_key):
if type(reg_val) is int:
x = re.split('\.', reg_key)
km[reg_val] = x[2]
return km
try:
opts, args = getopt.getopt(sys.argv[1:], 'hH:')
except getopt.GetoptError, err:
print str(err)
usage()
sys.exit(2)
hbhost = "localhost"
for o, a in opts:
if o == "-h":
usage()
sys.exit(2)
elif o == "-H":
hbhost = a
c = HBConnection(hbhost)
registry = Registry(hbhost, False)
num_servers = registry.get('hadoop.num_servers')
if num_servers == None:
num_servers = 1
primary_index_map = load_primary_index_map(registry)
for table in c.tables():
if re.match('^index_', table):
print "\nChecking: ", table
index_entries_per_server = {}
index_entries_per_primary_index = {}
th = c.table(table)
for key, value in th.scan():
salt, dtype = struct.unpack('>HB', key[0:3])
if not salt in index_entries_per_server:
index_entries_per_server[salt] = 0
if not primary_index_map[dtype] in index_entries_per_primary_index:
index_entries_per_primary_index[primary_index_map[dtype]] = 0
index_entries_per_server[salt] = index_entries_per_server[salt] + 1
index_entries_per_primary_index[primary_index_map[dtype]] = index_entries_per_primary_index[primary_index_map[dtype]] + 1
print "\tIndex entries per server:"
for server in index_entries_per_server:
s = server
if s > num_servers:
s = str(server) + "*"
print "\t\tserver=", server, " entries=", index_entries_per_server[server]
print "\tIndex entries per primary index:"
for p_idx in index_entries_per_primary_index:
print "\t\tprimary=", p_idx, " entries=", index_entries_per_primary_index[p_idx]
|
#
# MansOS web server - server-side configuration settings
#
from __future__ import print_function
import configfile
import os
HTTP_SERVER_PORT = 30000
SERIAL_BAUDRATE = 38400
# global variable
c = configfile.ConfigFile("server.cfg", automaticSections = True)
# default values
c.setCfgValue("port", HTTP_SERVER_PORT)
c.setCfgValue("baudrate", SERIAL_BAUDRATE)
c.setCfgValue("motes", [])
c.setCfgValue("selectedMotes", [])
# in format <port>:<platform>, e.g. /dev/ttyUSB0:telosb
c.setCfgValue("motePlatforms", [])
c.setCfgValue("codeType", "C")
c.setCfgValue("saveToFilename", "")
c.setCfgValue("saveToFilenameOnMote", "")
c.setCfgValue("saveProcessedData", False)
c.setCfgValue("slowUpload", False)
c.setCfgValue("htmlDirectory", "html")
c.setCfgValue("dataDirectory", "data")
c.setCfgValue("mansosDirectory", "../..")
c.setCfgValue("sealBlocklyDirectory", "seal-blockly")
c.setCfgValue("contikiDirectory", "/opt/contiki")
c.setCfgValue("tinyosDirectory", "/opt/tinyos")
c.setCfgValue("createDaemon", False)
c.setCfgValue("serverTheme", "simple")
c.setCfgValue("serverWebSettings", ["serverTheme"])
c.setCfgValue("serverSettingsType", ["[simple, green]"])
# database config
c.selectSection("database")
c.setCfgValue("dbName", "mansosdb")
c.setCfgValue("dbUsername", "root")
c.setCfgValue("dbPassword", "ln29Tx")
c.setCfgValue("dbHost", "localhost")
c.setCfgValue("senseApiKey", "cJ4Dm_Qb-3stWTWxCJgiFQ")
c.setCfgValue("senseApiFeeds", "light:37012,humidity:37013,temperature:37014")
c.setCfgValue("saveToDB", False)
c.setCfgValue("sendToOpenSense", False)
# user config
c.selectSection("user")
c.setCfgValue("userDirectory", "user")
c.setCfgValue("userFile", "user.dat")
c.setCfgValue("userAttributes", ["name", "password", "level"])
c.setCfgValue("userAttributesType", ["text", "text", "[1, 9]"])
c.setCfgValue("defaultValues", ["unknown", "5f4dcc3b5aa765d61d8327deb882cf99", "1"]) #password "password"
c.setCfgValue("adminValues", ["admin", "21232f297a57a5a743894a0e4a801fc3", "9"]) #password "admin"
#c.setCfgValue("defaultUserValues", ["user", "5f4dcc3b5aa765d61d8327deb882cf99", "8"]) #password "password"
c.setCfgValue("userWebAttributes", []) #user editable (password is built-in)
c.setCfgValue("adminWebAttributes", ["level"]) #admin editable (reset password is built-in and name is uneditable)
# graph config
c.selectSection("graph")
c.setCfgValue("graphTitle", "Measurements_from_all_motes")
c.setCfgValue("graphYAxis", "Measurements")
c.setCfgValue("graphInterval", 1000)
c.setCfgValue("graphData", ["[all]"])
c.setCfgValue("graphAttributes", ["graphTitle", "graphYAxis", "graphInterval", "graphData"])
# load the config file
try:
c.load()
except Exception as e:
print("Failed to load configuration:")
print(e)
pass # let it be...
# required for TinyOS compilation
def setupPaths():
tinyosPath = c.getCfgValue("tinyosDirectory")
os.environ['TOSROOT'] = tinyosPath
os.environ['TOSDIR'] = tinyosPath + '/tos'
os.environ['MAKERULES'] = tinyosPath + '/support/make/Makerules'
os.environ['PATH'] = tinyosPath + ":" + os.environ['PATH']
|
from .scenes import Scenes
from src.scenes import SceneBase
from src.utils import load_image
import pygame
from pygame.locals import *
class StartMenu(SceneBase):
def __init__(self):
super().__init__()
btns_path = './tresenraya/assets/buttons/'
start_btn, rect = load_image(f'{btns_path}/start.png')
credits_btn, credits_rect = load_image(
f'{btns_path}/credits.png')
screen = pygame.display.get_surface()
area = screen.get_rect()
rect.center = area.center
credits_rect.midbottom = area.midbottom
self.background.blit(start_btn, rect)
self.background.blit(credits_btn, credits_rect)
self.game_rect = rect
self.credits_rect = credits_rect
def process_input(self, scene_manager):
for event in pygame.event.get():
if event.type == QUIT or (event.type == KEYDOWN and event.key == K_ESCAPE):
return True
elif event.type == pygame.KEYDOWN and event.key == pygame.K_RETURN:
scene_manager.update(Scenes.GAME)
return False
elif event.type == MOUSEBUTTONDOWN:
mouse_pos = event.pos
if self.game_rect.collidepoint(mouse_pos):
scene_manager.update(Scenes.GAME)
return False
if self.credits_rect.collidepoint(mouse_pos):
scene_manager.update(Scenes.CREDITS)
return False
return False
|
# -*- coding: utf-8 -*-
import unittest
from pg_requests.exceptions import TokenError
from pg_requests.operators import And, JOIN
from pg_requests.tokens import Token, TupleValue, CommaValue, StringValue, \
NullValue, FilterValue, DictValue, CommaDictValue
class TokensTest(unittest.TestCase):
def test_token_with_tuple_value_type(self):
token = Token(template='VALUES ({})', value_type=TupleValue)
with self.assertRaises(ValueError) as err:
token.value = 'a'
self.assertIn('must be list or tuple', str(err))
token.value = ('a', 3)
self.assertEqual(token.eval(), ("VALUES (%s, %s)", ('a', 3)),)
def test_token_with_iterable_value(self):
token = Token(template='({})', value_type=CommaValue)
with self.assertRaises(ValueError) as err:
token.value = 'a'
self.assertIn('must be list or tuple', str(err))
token.value = ['a', 'b', 'c']
self.assertEqual(token.eval(), "(a, b, c)")
def test_token_with_string_value(self):
token = Token(template='INSERT INTO {}', value_type=StringValue)
token.value = 'MyTable'
self.assertIsInstance(token.value, StringValue, token.value)
self.assertEqual(token.eval(), "INSERT INTO MyTable")
def test_token_with_dict_value_and_join_types(self):
token = Token(template='{join_type} {table_name}',
value_type=DictValue)
# Check correctness of all join types
for join_type in ('INNER', 'CROSS', 'LEFT_OUTER', 'RIGHT_OUTER',
'FULL_OUTER'):
token.value = dict(join_type=getattr(JOIN, join_type),
table_name='MyTable')
self.assertIsInstance(token.value, DictValue, token.value)
self.assertEqual(
token.eval(), "{} MyTable".format(getattr(JOIN, join_type)))
def test_token_with_null_value(self):
token = Token(template='DEFAULT VALUES', value_type=NullValue)
token.value = "any value shouldn't be appeared"
self.assertIsInstance(token.value, NullValue)
self.assertIsInstance(token.value, NullValue)
self.assertEqual(token.eval(), "DEFAULT VALUES")
def test_token_with_filter_value(self):
token = Token(template='WHERE {}', value_type=FilterValue)
token.value = {'a': 1, 'b__gt': 2, 'c__lt': 3, 'd__gte': 4,
'e__lte': 5, 'f__eq': 'test_eq', 'g__neq': 'test_neq',
'h__in': ['p1', 2], 'i__is': True, 'j__is_not': None}
self.assertIsInstance(token.value.value, And)
sql_template, values = token.eval()
expected_parts = (
"WHERE",
"AND",
"a = %s",
"b > %s",
"c < %s",
"d >= %s",
"e <= %s",
"f = %s",
"g != %s",
"h IN %s",
"i IS %s",
"j IS NOT %s",
)
expected_values = (
1, 2, 3, 4, 5, 'test_eq', 'test_neq', ['p1', 2], True, None)
for part in expected_parts:
self.assertIn(part, sql_template,
'%s is not found in\n%s' % (part, sql_template))
for val in expected_values:
self.assertIn(val, values,
'%s is not found in\n%s' % (val, expected_values))
def test_token_required(self):
token = Token(template='INSERT INTO {}', value_type=StringValue,
required=True)
with self.assertRaises(TokenError) as err:
token.eval()
self.assertIn('is not set and required', str(err))
token.value = 'reports'
val = token.eval()
self.assertIsInstance(val, str)
def test_subtoken_eval(self):
t = Token(template='FROM {}', value_type=StringValue,
subtoken=Token(template='({})', value_type=TupleValue))
t.value = 'my_fn'
t.subtoken.value = ('test', 2, True, )
result = t.eval()
self.assertEqual(result, ('FROM my_fn(%s, %s, %s)', ('test', 2, True)))
def test_subtoken_eval_with_multiple_tuple_values(self):
"""Check correctness of composing tuple str and tuple values
This is not a real case
"""
t = Token(template='({})', value_type=TupleValue,
subtoken=Token(template='({})', value_type=TupleValue))
t.value = ('val', 1, False, )
t.subtoken.value = ('subtoken_val', 2, True, )
result = t.eval()
expected = ('(%s, %s, %s)(%s, %s, %s)',
('val', 1, False, 'subtoken_val', 2, True,))
self.assertEqual(result, expected)
class TokenValuesTest(unittest.TestCase):
def test_string_value(self):
t_val = StringValue('test_value')
self.assertIsInstance(t_val.value, str)
self.assertEqual(t_val.eval(), 'test_value')
def test_null_value(self):
t_val = NullValue('test_value')
self.assertEqual(t_val.eval(), None)
def test_comma_value(self):
t_val = CommaValue(['id', 'name'])
self.assertIsInstance(t_val.value, list)
self.assertEqual(t_val.eval(), 'id, name')
def test_tuple_value(self):
pass
def test_filter_value(self):
val = FilterValue({'a': 1, 'b': 2})
self.assertIsInstance(val.value, And)
values = (
('( a = %s AND b = %s )', (1, 2)),
('( b = %s AND a = %s )', (2, 1)),
)
self.assertIn(val.eval(), values)
val.update({'a': 2, 'c': 3})
tokens = ('a = %s', 'b = %s', 'c = %s')
values = (1, 2, 3)
evaluated_val = val.eval()
for t, v in zip(tokens, values):
self.assertIn(t, evaluated_val[0])
self.assertIn(v, evaluated_val[1])
def test_filter_value_with_table_prefix(self):
# 2 key tokens, expect that 'eq' operator must be by default then
val = FilterValue({'users__login': 'Mr.Robot'})
expected_val = ('( users.login = %s )', ('Mr.Robot',))
self.assertEqual(val.eval(), expected_val)
# 3 key tokens: "<table>.<name>" and operator
val = FilterValue({'users__login__in': ('Mr.Robot', )})
expected_val = ('( users.login IN %s )', (('Mr.Robot', ),))
self.assertEqual(val.eval(), expected_val)
def test_dict_value(self):
t_val = DictValue(dict(a=1, b=2))
self.assertIsInstance(t_val.value, dict)
self.assertEqual(t_val.eval(), dict(a=1, b=2))
with self.assertRaises(ValueError) as err:
t_val = DictValue(1)
self.assertIsNone(t_val)
self.assertIn('must be dict', str(err))
def test_comma_dict_value(self):
t_val = CommaDictValue(dict(a=1, b=True))
expected = (
('a = %s, b = %s', (1, True)),
('b = %s, a = %s', (True, 1)),
)
self.assertIn(t_val.eval(), expected)
|
"""from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_marshmallow import Marshmallow
from flask_bcrypt import Bcrypt
def create_app():
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://root:d00m3r456@localhost:3307/llantera'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
return app
app = create_app()
db = SQLAlchemy(app)
ma = Marshmallow(app)
bcrypt = Bcrypt(app)
from api.blueprints.usuarios import usuario
from api.blueprints.index import index
from api.blueprints.empleados import empleado
from api.blueprints.clientes import cliente
from api.blueprints.vehiculos import vehiculo
from api.blueprints.inventarios import inventario
from api.blueprints.reparaciones import reparacion
from api.blueprints.garantias import garantia
from api.blueprints.reparacion_detalles import reparacion_detalle
from api.blueprints.tipos_usuario import tipo_usuario
from api.blueprints.partes import parte
from api.blueprints.tipos_parte import tipo_parte
app.register_blueprint(usuario)
app.register_blueprint(index)
app.register_blueprint(empleado)
app.register_blueprint(cliente)
app.register_blueprint(vehiculo)
app.register_blueprint(inventario)
app.register_blueprint(reparacion)
app.register_blueprint(garantia)
app.register_blueprint(reparacion_detalle)
app.register_blueprint(tipo_usuario)
app.register_blueprint(parte)
app.register_blueprint(tipo_parte)
if __name__ == '__main__':
app.run(debug=True)""" |
for letter in "Python":
if letter == 'h':
break
print letter
index = 10
while index > 0:
print index
index = index - 1
if index == 5 :
break
|
import ast
import os
from collections import defaultdict
THRESHOLD1 = 0.27
THRESHOLD2 = 0.54
FRACTION = 8 # If there are more than 1/FRACTION of the total variants, use thr1 for that family for that app
TRUSTED_NUM_REPRESENTATIVES = 8 # If there are less variants, without looking to neighbours, the higher threshold is used
TOP_NEIGHBOURS = 5 # Top neighbours used to find the proper family for the unknown sample
def get_family(apk):
return apk.split("/")[-2]
def get_custom_fam_threshold(neighbors_fam_freq, db_fams_freq):
fams_threshold = dict()
for fam, freq in neighbors_fam_freq.items():
if db_fams_freq[fam] >= TRUSTED_NUM_REPRESENTATIVES:
if freq >= db_fams_freq[fam]/FRACTION:
fams_threshold[fam] = THRESHOLD1
else:
fams_threshold[fam] = THRESHOLD2
else:
fams_threshold[fam] = THRESHOLD2
return fams_threshold
def read_db_fams_freq(file_path):
with open(file_path, "r") as db_f:
ff = ast.literal_eval(db_f.read().splitlines()[0])
return ff
def get_fams_freq_neighbors(neighbors):
neighbors_fam_freq = dict()
for n in neighbors.keys():
try:
neighbors_fam_freq[get_family(n)] += 1
except:
neighbors_fam_freq[get_family(n)] = 1
return neighbors_fam_freq
def filter_phase(stats_dict):
filtered_detections = defaultdict(dict)
db_fams_freq = read_db_fams_freq("families_frequency.txt")
no_malware = list()
for apk, neighbors in stats_dict.items():
if len(neighbors) > 0:
neighbors_fams_freq = get_fams_freq_neighbors(neighbors)
fams_thr = get_custom_fam_threshold(neighbors_fams_freq, db_fams_freq)
for neighbor, jaccard in neighbors.items():
fam = get_family(neighbor)
if jaccard > fams_thr[fam]:
filtered_detections[apk][neighbor] = jaccard
if len(filtered_detections[apk]) == 0:
no_malware.append(apk)
return filtered_detections, no_malware
def print_neighbors(apk, neighbors, file_name):
with open(file_name, "a") as nf:
tup = (apk, list(neighbors))
nf.write(str(tup)+"\n")
def compute_sample_family(apk, neighbors, neighbors_file_name):
top_neighbors = sorted(neighbors.items(), key=lambda x: -x[1])[:TOP_NEIGHBOURS]
print_neighbors(apk, [apk for apk, j in top_neighbors], neighbors_file_name)
fams = dict()
for n, jaccard in top_neighbors:
try:
fams[get_family(n)] += 1*jaccard
except:
fams[get_family(n)] = jaccard
fams_tup = sorted(fams.items(), key=lambda x: -x[1])
return fams_tup[0][0]
def detection_mlw(apk_neighbors_list, data_folder):
classifications = dict()
neighbors_file_name = data_folder+"/samples_neighbors.txt"
if os.path.exists(neighbors_file_name):
os.remove(neighbors_file_name)
filtered_samples, no_malware = filter_phase(apk_neighbors_list)
for apk, apk_neighbors in filtered_samples.items():
if len(apk_neighbors) != 0:
classifications[apk] = compute_sample_family(apk, apk_neighbors, neighbors_file_name)
with open(data_folder+"/classifications.txt", "w") as class_file:
for apk, family in classifications.items():
tup = (apk, family)
class_file.write(str(tup)+"\n")
for apk in no_malware:
tup = (apk, "safe")
class_file.write(str(tup)+"\n")
return classifications
|
# this lets me just use Square() instead of Square.Square()
# from is the Module name ( file name ), import is the class name
from Square import Square
def print_square_stats( square ):
print("a square with length of", square.length_of_side,
"has an area of", square.calculate_area(),
"and a perimeter of", square.calculate_perimeter())
# Square() calls the __init__ method
square_1 = Square()
square_1.length_of_side = 10
square_2 = Square()
square_2.length_of_side = 12
square_3 = Square()
square_3.length_of_side = 15
print_square_stats(square_1)
print_square_stats(square_2)
print_square_stats(square_3)
|
import pymongo
from pymongo import MongoClient
import json
import os
# 開啟json檔
def open_json_file(CACHE_FNAME):
try:
cache_file = open('./data/'+CACHE_FNAME, 'r')
cache_contents = cache_file.read()
# print(type(cache_contents))
CACHE_DICTION = json.loads(cache_contents)
cache_file.close()
# CACHE_DICTION才是dict格式
return CACHE_DICTION
except:
CACHE_DICTION = {}
return CACHE_DICTION
# 跟mongodb建立連線
def mongo_connect_build():
global mycol
client = MongoClient("mongodb://192.168.1.154:27017/")
# client = pymongo.MongoClient("mongodb://192.168.1.158:27017/")
# client = pymongo.MongoClient(host="192.168.1.158", port=27017)
# 選擇使用的db,不存在則會在資料輸入時自動建立
db = client['Topic_104']
# 選擇collection,不存在則會在資料輸入時自動建立
mycol = db["test"]
# 輸入資料
def data_insert(CACHE_DICTION):
# 輸入json轉成字典的變數名稱
# 若字典裡values為空值,則跳過
if len(list(CACHE_DICTION.values())[0]) == 0:
pass
else:
mycol.insert_one(CACHE_DICTION)
print(f'{CACHE_DICTION.keys()}輸入')
if __name__ == "__main__":
mongo_connect_build() # 連線mongodb
# 班代設計程式爬下來之後,都會在data資料夾裡面,將各個檔案輸入到mongodb
for CACHE_FNAME in os.listdir('./data/'):
CACHE_DICTION = open_json_file(CACHE_FNAME)
data_insert(CACHE_DICTION)
|
from django.contrib import admin
from .models import Impacto
from import_export.admin import ImportExportModelAdmin
from .resources import ImpactoResource
#@admin.register(Impacto)
#class ImpactoAdmin(admin.ModelAdmin):
# pass
@admin.register(Impacto)
class ImpactoAdmin(ImportExportModelAdmin):
resource_class = ImpactoResource
list_display = (
'id',
'estacion',
'w_fc_sal',
'w_fc_imp',
'bolsa',
'parte',
'grupo_parte',
'cantidad_estimada',
'tipo_impacto',
'impactado',
'estado',
'subestado',
'creado',
'actualizado',
)
list_filter = ('estado', 'subestado', 'creado', 'actualizado', 'impactado')
search_fields = ['id', 'estacion__site_name']
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@author Zhett
@email stratos33290@gmail.com
@version 0.1
@copyright 2013 Zhett
"""
from django.contrib.auth.models import User
from django.db import models
from website.models import *
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
from webstore.models import *
from compta.models import Account
class Entities(models.Model):
"""Define Partner model to bdd"""
user = models.ForeignKey(User, unique=True, related_name='profile')
parent = models.ForeignKey("Entities", related_name="Parent", null=True, blank=True)
TYPE_CHOICES = (('RE', 'Reseller'),
('SA', 'SubAccount'),
('TR', 'Trader'),
('CU', 'Customer'),)
shop = models.ForeignKey('Shop', null=True, blank=True)
type_of = models.CharField(max_length=2, choices=TYPE_CHOICES, null=True, blank=True)
contact_phone = models.CharField(_('phone fix'.decode("utf8")), max_length=14, null=True, blank=True)
contact_gsm = models.CharField(_('phone mobile'.decode("utf8")), max_length=60, null=True, blank=True)
contact_fax = models.CharField(_('fax'.decode("utf8")), max_length=14, null=True, blank=True)
email_confirm = models.BooleanField(_("email confirmation".decode('utf8')), default=False)
optin = models.BooleanField(_("Newsletter ?".decode('utf8')), default=False)
subscribe_by = models.ForeignKey(User, null=True, blank=True, related_name="agent_subscriber")
follow_by = models.ForeignKey(User, null=True, blank=True, related_name="agent_follower")
def get_fullname(self):
return unicode(self.user.get_full_name())
def get_linked_account(self):
if self.type_of == "RE":
return Reseller.objects.get(entity=self)
elif self.type_of == "SA":
return SubAccount.objects.get(entity=self)
elif self.type_of == "TR":
return Trader.objects.get(entity=self)
elif self.type_of == "CU":
return Customer.objects.get(entity=self)
else:
return False
def get_addresses(self):
"""Return list of all addresses to current Partner"""
try:
list_adr = Addresse.objects.filter(entities=self)
except:
return None
else:
return list_adr
def get_address_toString(self):
account = self.get_linked_account()
address = account.invoice_addresse
txt = address.street
if address.street2 is not None:
txt += ' ' + address.street2
if address.street3 is not None:
txt += ' ' + address.street3
txt += ' ' + address.zipcode + ' ' + address.city
return txt
def get_parent(self):
"""this method is used to pick the parent of a partner"""
tab_parent = [self]
tmp = self
while True:
try:
tab_parent.append(tmp.parent)
tmp = tmp.parent
except:
break
tab_parent.reverse()
return tab_parent
def __unicode__(self):
return self.id
class Meta:
app_label = "webstore"
class Addresse(models.Model):
"""Define model of addresse to partner"""
# We fill this name with full partner name
entities = models.ForeignKey("Entities", null=True, blank=True)
name = models.CharField(_("address name"), max_length=60, null=True, blank=True)
street = models.CharField(_("Address"), max_length=38, null=True, blank=True)
street2 = models.CharField(_("Address2".decode("utf8")), max_length=38, blank=True, null=True)
street3 = models.CharField(_("Address3".decode("utf8")), max_length=38, blank=True, null=True)
zipcode = models.CharField(_("ZipCode"), max_length=10, null=True, blank=True)
city = models.CharField(_("City"), max_length=60, null=True, blank=True)
country = models.CharField(_("Country"), max_length=60, null=True, blank=True)
comment = models.CharField(_("Comment"), max_length=255, blank=True, null=True)
is_active = models.BooleanField(default=True)
def get_datas(self):
all_datas = self.street, self.street2, self.street3, self.zipcode, self.city, self.country
return all_datas
def to_string(self):
addresse = unicode(self.street + "\n" + self.street2 + "\n" + self.street3 + "\n" + self.zipcode + " " + self.city + "\n" + self.country)
return addresse
def __unicode__(self):
return self.name
class Meta:
app_label = "webstore"
class Reseller(models.Model):
entity = models.ForeignKey("Entities", related_name='entity', null=True, blank=True)
rib_iban = models.CharField(_('RIB ou IBAN'), max_length=60, null=True, blank=True)
bic = models.CharField(_('BIC'), max_length=65, null=True, blank=True)
owner_bank = models.CharField(_('compt owner'), max_length=60, null=True, blank=True)
owner_addresse = models.ForeignKey("Addresse", related_name='Owner Addresse', null=True, blank=True)
siege_addresse = models.ForeignKey("Addresse", related_name='Siege Addresse', null=True, blank=True)
invoice_addresse = models.ForeignKey("Addresse", related_name='Invoice Addresse', null=True, blank=True)
domiciliation = models.CharField(_('Domiciliation du compte'), max_length=24, null=True, blank=True)
assujetti_tva = models.BooleanField(_("Assujetti à la tva".decode("utf8")))
company = models.CharField(_('Raison sociale'), max_length=60, null=True, blank=True)
siret = models.CharField(_('SIRET'), max_length=18, null=True, blank=True, unique=False, help_text=_("Pour modifier votre numéro de SIRET, merci de nous contacter.".decode('utf8')))
vat_number = models.CharField(_('Numéro T.V.A'.decode("utf8")), max_length=60, null=True, blank=True, help_text=_("Pour modifier votre numéro de TVA, merci de nous contacter.".decode('utf8')))
ape_code = models.CharField(_('Code APE'), max_length=5, null=True, blank=True)
activity = models.CharField(_("Métier de l'entreprise".decode("utf8")), max_length=60, null=True, blank=True)
website = models.CharField(_('Site web'), max_length=60, null=True, blank=True)
compta_account_ac = models.ForeignKey("compta.Account", related_name='reseller_account_ac', null=True, blank=True)
compta_account_ve = models.ForeignKey("compta.Account", related_name='reseller_account_ve', null=True, blank=True)
def get_fullname(self):
return unicode(self.entity.user.get_full_name())
def get_user(self):
return self.entity.user
def __unicode__(self):
return self.company
class Meta:
app_label = "webstore"
class Customer(models.Model):
entity = models.ForeignKey("Entities", related_name='entity1', null=True, blank=True)
rib_iban = models.CharField(_('RIB ou IBAN'), max_length=60, null=True, blank=True)
bic = models.CharField(_('BIC'), max_length=65, null=True, blank=True)
owner_bank = models.CharField(_('Titulaire du compte'), max_length=60, null=True, blank=True)
owner_addresse = models.ForeignKey("Addresse", related_name='Owner Addresse1', null=True, blank=True)
siege_addresse = models.ForeignKey("Addresse", related_name='Siege Addresse1', null=True, blank=True)
invoice_addresse = models.ForeignKey("Addresse", related_name='Invoice Addresse1', null=True, blank=True)
domiciliation = models.CharField(_('Domiciliation du compte'), max_length=24, null=True, blank=True)
company = models.CharField(_('Raison sociale'), max_length=60, null=True, blank=True)
siret = models.CharField(_('SIRET'), max_length=18, null=True, blank=True, unique=False, help_text=_("Pour modifier votre numéro de SIRET, merci de nous contacter.".decode('utf8')))
assujetti_tva = models.BooleanField(_("Assujetti à la tva".decode("utf8")))
vat_number = models.CharField(_('VAT number'.decode("utf8")), max_length=60, null=True, blank=True, help_text=_("Pour modifier votre numéro de TVA, merci de nous contacter.".decode('utf8')))
ape_code = models.CharField(_('Code APE'), max_length=5, null=True, blank=True)
activity = models.CharField(_("job".decode("utf8")), max_length=60, null=True, blank=True)
website = models.CharField(_('Website'), max_length=60, null=True, blank=True)
compta_account_ac = models.ForeignKey("compta.Account", related_name='customer_account_ac', null=True, blank=True)
compta_account_ve = models.ForeignKey("compta.Account", related_name='customer_account_ve', null=True, blank=True)
def get_fullname(self):
return unicode(self.entity.user.get_full_name())
def get_user(self):
return self.entity.user
def __unicode__(self):
return str(self.id)
class Meta:
app_label = "webstore"
class Trader(models.Model):
entity = models.ForeignKey("Entities", related_name='entity2', null=True, blank=True)
shop = models.ForeignKey("Shop", related_name='trader_shop', null=True, blank=True)
def get_gestco_groups(self):
list_groups = GestcoAssociates.objects.filter(trader=self)
return list_groups
def get_fullname(self):
return unicode(self.entity.user.get_full_name())
def get_user(self):
return self.entity.user
def __unicode__(self):
return str(self.id)
# return "-".join([str(self.id), str(self.shop.name)])
class Meta:
app_label = "webstore"
class SubAccount(models.Model):
entity = models.ForeignKey("Entities", related_name='entity3', null=True, blank=True)
reseller = models.ForeignKey("Reseller", related_name='Reseller', null=True, blank=True)
shop = models.ForeignKey("Shop", related_name='sub_shop', null=True, blank=True)
def get_fullname(self):
return str(self.entity.user.get_full_name())
def get_user(self):
return self.entity.user
def __unicode__(self):
# return "-".join([str(self.id), self.entity])
return str(self.id)
class Meta:
app_label = "webstore"
class Supplier(models.Model):
"""Define model of suppliers"""
company = models.CharField(_('Societe'), max_length=60, help_text="", null=True, blank=True)
user = models.ForeignKey(User, unique=False)
is_active = models.BooleanField(_("Est actif"))
compta_account_ac = models.ForeignKey("compta.Account", related_name='supplier_account_ac', null=True, blank=True)
compta_account_ve = models.ForeignKey("compta.Account", related_name='supplier_account_ve', null=True, blank=True)
categ_code_name = models.CharField(_('Categ CodeName'), max_length=255, help_text="", null=True, blank=True)
def __unicode__(self):
return str(self.id)
# return self.company
class Meta:
app_label = "webstore"
class SavesAccount(models.Model):
"""Manage account for the saves categ"""
number = models.CharField(_("Unique id"), max_length=255, null=False, blank=True)
customer = models.ForeignKey("Entities", related_name="sub_custo")
order = models.ForeignKey("Order", null=True, blank=True)
subscription = models.ForeignKey("Subscription", related_name='souscription', null=True, blank=True)
date_add = models.DateTimeField(auto_now_add=True, auto_now=False)
product_variant = models.ForeignKey("ProductVariant")
lastname = models.CharField(_("Firstname"), max_length=50, null=True, blank=True)
firstname = models.CharField(_("Lastname"), max_length=50, null=True, blank=True)
email = models.EmailField(_("Email"), max_length=254, null=True, blank=True)
login = models.CharField(_("Login"), max_length=50, null=True, blank=True)
is_allocated = models.BooleanField(_("Is Allocated"), default=False)
def __unicode__(self):
return str(self.customer) + '-' + str(self.number)
def save(self, *args, **kwargs):
if not self.number:
number = str(self.customer.id) + "-" + str(self.order.id) + "-" + str(len(SavesAccount.objects.filter(order=self.order)))
self.number = number
super(SavesAccount, self).save(*args, **kwargs)
class Meta:
app_label = "webstore"
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright (c) 2022 Baidu, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Utility functions used in the ``qcompute_qep`` module.
"""
from qcompute_qep.utils.linalg import dagger
from QCompute.QPlatform.QOperation.QProcedure import QProcedure
import math
import functools
import numpy as np
from typing import Tuple
r"""
Color table for beautiful terminal print.
Common usage:
>>> print("{}".format(COLOR_TABLE['red'] + "Content to be Printed in Color" + COLOR_TABLE.['end']))
Reference: https://stackoverflow.com/questions/287871/how-do-i-print-colored-text-to-the-terminal
"""
COLOR_TABLE = {'red': '\33[31m',
'green': '\33[32m',
'yellow': '\33[33m',
'blue': '\33[34m',
'violet': '\33[35m',
'beige': '\33[36m',
'white': '\33[37m',
'end': '\033[0m'}
def global_phase(U: np.ndarray) -> float:
r"""Compute the global phase of a :math:`2\times 2` unitary matrix.
Each :math:`2\times 2` unitary matrix can be equivalently characterized as:
.. math:: U = e^{i\alpha} R_z(\phi) R_y(\theta) R_z(\lambda).
We aim to compute the global phase :math:`\alpha`.
See Theorem 4.1 in `Nielsen & Chuang`'s book for details.
:param U: the matrix representation of a :math:`2\times 2` unitary operator
:return: the global phase of the unitary matrix
"""
# Notice that the determinant of the unitary is given by :math:`e^{2i\alpha}`
coe = np.linalg.det(U) ** (-0.5)
alpha = - np.angle(coe)
return alpha
def decompose_yzy(U: np.ndarray) -> Tuple[float, float, float, float]:
r"""Compute the Euler angles :math:`(\alpha,\theta,\phi,\lambda)` of a :math:`2\times 2` unitary matrix.
Each :math:`2\times 2` unitary matrix can be equivalently characterized as:
.. math::
U = e^{i\alpha} R_z(\phi) R_y(\theta) R_z(\lambda)
= e^{i(\alpha-\phi/2-\lambda/2)}
\begin{bmatrix}
\cos(\theta/2) & -e^{i\lambda}\sin(\theta/2) \\
e^{i\phi}\sin(\theta/2) & e^{i(\phi+\lambda)}\cos(\theta/2)
\end{bmatrix}.
We aim to compute the parameters :math:`(\alpha,\theta,\phi,\lambda)`.
See Theorem 4.1 in `Nielsen & Chuang`'s book for details.
:param U: the matrix representation of the qubit unitary
:return: Tuple[float, float, float, float], the Euler angles
"""
if U.shape != (2, 2):
raise ValueError("in decompose_yzy(): input should be a 2x2 matrix!")
# Remove the global phase
alpha = global_phase(U)
U = U * np.exp(- 1j * alpha)
U = U.round(10)
# Compute theta
theta = 2 * math.atan2(abs(U[1, 0]), abs(U[0, 0]))
# Compute phi and lambda
phi_lam_sum = 2 * np.angle(U[1, 1])
phi_lam_diff = 2 * np.angle(U[1, 0])
phi = (phi_lam_sum + phi_lam_diff) / 2.0
lam = (phi_lam_sum - phi_lam_diff) / 2.0
return alpha, theta, phi, lam
def str_to_state(state_str: str, bits: int = None, LSB: bool = True) -> np.ndarray:
r"""Return the computational basis state in density matrix form.
Notice that we assume the LSB (the least significant bit) mode, i.e., the right-most bit represents q[0]:
::
"1 0 1"
q[2] q[1] q[0]
:param state_str: string-format, e.g. '1110', '11', '0', etc.
:param bits: int, the number of bits of the input string
:param LSB: the least significant bit (LSB) mode, default is True
:return: np.ndarray, density matrix in type of `ndarray`
**Examples**
>>> str_to_state("1")
array([[0.+0.j, 0.+0.j],
[0.+0.j, 1.+0.j]])
>>> str_to_state("01")
array([[0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],
[0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j],
[0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],
[0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j]])
>>> str_to_state("10")
array([[0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],
[0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],
[0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j],
[0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j]])
"""
# convert 16-base to 2-base
if state_str[:2].lower() == '0x':
state_str = bin(int(state_str, 16))[2:]
if bits is not None:
if bits < len(state_str):
raise ValueError('bits can not be less than length of {}'.format(state_str))
else:
state_str = state_str.zfill(bits)
# Map each binary character to qubit value 0 or 1
qubits = list(map(int, list(state_str)))
# If not LSB, reverse the qubits order
if not LSB:
qubits = reversed(qubits)
# Compute the tensor product state
states = [np.diag([1, 0]).astype(complex) if q == 0 else np.diag([0, 1]).astype(complex) for q in qubits]
state_prod = functools.reduce(np.kron, states)
return state_prod
def expval_from_counts(A: np.ndarray, counts: dict) -> float:
r"""Expectation value of the given operator :math:`A` from counts.
We assume `a priori` that :math:`A` is diagonalized with respect to the measurement basis
on which the quantum state is measured and counts is obtained.
:param A: np.ndarray, a Hermitian operator that is diagonalized in the measurement basis
:param counts: dict, dict-type counts data, means result of shot measurements, e.g. ``{'000000': 481, '111111': 519}``
:return: float, the estimated expectation value
"""
expects = []
if list(counts.keys())[0][:2].lower() == '0x':
bits = len(bin(max(map(lambda x: int(x, 16), counts.keys())))[2:])
else:
bits = None
for k, v in counts.items():
state = str_to_state(k, bits=bits)
if state.shape != A.shape:
raise ValueError("Shapes of density matrix and operator are not equal!")
expects.append(np.real(np.trace(state @ A)))
return np.average(expects, weights=list(counts.values()))
def expval_z_from_counts(counts: dict) -> float:
r"""Expectation value of the :math:`Z^{\otimes n}` operator from counts.
:param counts: dict-type counts data, records the measurement outcomes, e.g. ``{'000000': 481, '111111': 519}``
:return: float, the expectation value of the :math:`Z^{\otimes n}` operator
"""
# Determine the number of qubits
n = len(list(counts)[0])
Z = np.diag([1, -1]).astype(complex)
A = functools.reduce(np.kron, [Z] * n)
return expval_from_counts(A, counts)
def limit_angle(theta: float) -> float:
r"""Limit an angle value into the range :math:`[-\pi, \pi]`.
:param theta: origin angle value
:return: a limited angle value in the range :math:`[-\pi, \pi]`
"""
if theta > 0:
theta = theta % (2 * np.pi)
if theta > np.pi:
return theta - 2 * np.pi
else:
return theta
else:
theta_abs = abs(theta)
theta_abs = theta_abs % (2 * np.pi)
if theta_abs > np.pi:
return - theta_abs + 2 * np.pi
else:
return - theta_abs
|
from __future__ import print_function
import sys
import os
import requests
import datetime
import pandas as pd
from bokeh.io import curdoc
from bokeh.layouts import row, widgetbox
from bokeh.models import ColumnDataSource
from bokeh.models.widgets import TextInput, RadioButtonGroup
from bokeh.models.tools import HoverTool
from bokeh.plotting import figure
from bokeh.embed import components
# Set up data
api_key = 'gw2NbPXKQYZkf46yfNQS'
url = 'https://www.quandl.com/api/v3/datatables/WIKI/PRICES.json?ticker=GOOG&date.gte=2016-01-04&date.lte=2016-02-03&api_key=' + str(api_key)
response = requests.get(url)
meta_data = response.json()
meta_data = response.json()
data = meta_data['datatable']
data = data['data']
df = pd.DataFrame(data)
df.columns = ['Ticker', 'Date', 'Open', 'High', 'Low', 'Close', 'Volume', 'Dividend', 'Split_Ratio', 'Adj_Open', 'Adj_High', 'Adj_Low', 'Adj_Close', 'Adj_Volume']
dates = pd.DataFrame(data).iloc[:,1]
dates= pd.to_datetime(dates)
temp = pd.DataFrame(data, index = dates)
temp = temp.iloc[:,[1,5]]
temp.columns = ['Date', 'Close']
curr_date = curr_date = datetime.datetime(2016, 2, 3).date()
prev_month = curr_date - datetime.timedelta(days=30)
new_idx = pd.date_range(prev_month, curr_date, freq='D')
temp2 = temp.reindex(new_idx)
source = ColumnDataSource(
data=dict(
x1=pd.to_datetime(temp['Date']),
y1 = temp['Close'][:]))
source2 = ColumnDataSource(
data=dict(
x2 = pd.to_datetime(new_idx),
y2 = temp2['Close'][:]))
# Set up plot
p = figure(width=800, height=500, x_axis_type='datetime',
title=str(df['Ticker'][0]) + ' Closing Prices between ' + \
str(df['Date'][0]) + ' and ' + str(df['Date'][len(df)-1]))
p.line('x1', 'y1', source=source, line_width=1, alpha=0.5, color='grey')
p.line('x2', 'y2', source=source2, line_width=3)
p.yaxis.axis_label = 'Price (USD)'
p.xaxis.axis_label = 'Date'
p.xaxis.major_label_orientation = 3.14159/4
cr = p.circle('x1', 'y1', source=source, size=20,
fill_color="grey", hover_fill_color="firebrick",
fill_alpha=0, hover_alpha=0.6,
line_color=None, hover_line_color=None)
p.add_tools(HoverTool(tooltips=[("Price", "@y1")], renderers=[cr], mode="mouse"))
# Set up widgets
ticker_text = TextInput(title="Ticker Symbol", value='GOOG')
date_text = TextInput(title="End Date", value='2016-02-03')
temporal_button = RadioButtonGroup(labels=["1 Month", "6 Months", "1 Year"], active=0)
def get_dates(new_date, active_button):
curr_date = datetime.datetime.strptime(new_date, '%Y-%m-%d').date()
if active_button == 0:
prev_date = curr_date - datetime.timedelta(days=30)
elif active_button == 1:
prev_date = curr_date - datetime.timedelta(days=30.5*6)
else:
prev_date = curr_date - datetime.timedelta(days=365)
return curr_date, prev_date
def update_title_ticker(attrname, old, new):
curr_date, prev_date = get_dates(date_text.value, temporal_button.active)
p.title.text = str(ticker_text.value) + ' Closing Prices between ' + \
str(prev_date) + ' and ' + str(curr_date)
ticker_text.on_change('value', update_title_ticker)
def update_title_date(attrname, old, new):
curr_date, prev_date = get_dates(date_text.value, temporal_button.active)
p.title.text = str(ticker_text.value) + ' Closing Prices between ' + \
str(prev_date) + ' and ' + str(curr_date)
date_text.on_change('value', update_title_date)
def update_title_button(attrname, old, new):
curr_date, prev_date = get_dates(date_text.value, temporal_button.active)
p.title.text = str(ticker_text.value) + ' Closing Prices between ' + \
str(prev_date) + ' and ' + str(curr_date)
temporal_button.on_change('active', update_title_button)
# Set up callbacks
def update_data(attrname, old, new):
# Get the current values
new_ticker = ticker_text.value
curr_date, prev_date = get_dates(date_text.value, temporal_button.active)
# Scrape new dataset
url = 'https://www.quandl.com/api/v3/datatables/WIKI/PRICES.json?ticker=' + str(new_ticker) + '&date.gte=' + str(prev_date) + '&date.lte=' + str(curr_date) + '&api_key=' + str(api_key)
#print(url, sys.stderr)
response = requests.get(url)
meta_data = response.json()
meta_data = response.json()
data = meta_data['datatable']
data = data['data']
df = pd.DataFrame(data)
df.columns = ['Ticker', 'Date', 'Open', 'High', 'Low', 'Close', 'Volume', 'Dividend', 'Split_Ratio', 'Adj_Open', 'Adj_High', 'Adj_Low', 'Adj_Close', 'Adj_Volume']
dates = pd.DataFrame(data).iloc[:,1]
dates= pd.to_datetime(dates)
temp = pd.DataFrame(data, index = dates)
temp = temp.iloc[:,[1,5]]
temp.columns = ['Date', 'Close']
new_idx = pd.date_range(prev_date, curr_date, freq='D')
temp2 = temp.reindex(new_idx)
source.data = dict(x1=pd.to_datetime(temp['Date']),
y1 = temp['Close'][:])
source2.data = dict(x2 = pd.to_datetime(new_idx),
y2 = temp2['Close'][:])
for w in [ticker_text, date_text]:
w.on_change('value', update_data)
temporal_button.on_change('active', update_data)
# Set up layouts and add to document
inputs = widgetbox(ticker_text, date_text, temporal_button)
curdoc().add_root(row(inputs, p, width=800))
curdoc().title = "Closing Prices" |
from flask import Flask
from Crypto.Cipher import AES
from os import urandom as rand
from binascii import unhexlify
app = Flask(__name__)
flag = 'flag{' + rand(32).hex() + '}'
key = rand(32) # AES key
# Creates an AES cipher object
def aes(iv):
return AES.new(key, AES.MODE_CBC, iv)
# apply PKCS#7 padding to m
def pad(m):
ln = 16 - len(m) % 16
return m + (chr(ln) * ln)
# remove PKCS#7 padding. Exposes a padding oracle
def unpad(m):
ln = m[-1]
for c in m[-ln:]:
if c != ln:
return
if 0 < ln <= 16:
return m[:-ln]
# returns the flag, encrypted under the key and a random IV
@app.route('/flag')
def get_flag():
iv = rand(16)
c = aes(iv).encrypt(pad(flag))
return f'{iv.hex()}{c.hex()}'
def decrypt(c):
c = unhexlify(c)
iv = c[:16]
c = c[16:]
if len(c) % 16:
return
return unpad(aes(iv).decrypt(c))
# attempts to decrypt
@app.route('/decrypt/<c>')
def decrypt_silent(c):
p = decrypt(c)
if p is None:
return 'bad padding'
return 'ok'
@app.route('/decrypt_reveal/<c>')
def decrypt_loud(c):
return decrypt(c)
|
# TODO: Write docstring here
"""A program to hype you up on a dreary day."""
__author__: str = "730247598"
# TODO: Initialize __author__ variable here
name: str = input("What is your name? " + "\n")
# TODO: Implement your program logic here
print(name + ", you got this!" + "\n")
print("Hang in there a little longer " + name + "\n")
print("'Sometimes life is like this dark tunnel, " + name + ", you can't always see the light at the end of")
print("the tunnel, but if you just keep moving... you will come to a better place.'")
print("- Iroh, 'Avatar: The Last Airbender'") |
def matrix_for_traces(l,theta1,theta2):
solutions = []
if len(l) <= 1:
return "More than one trace needed!"
for i in range(len(l)+1):
others = []
others.append(l[0:i])
others.append(l[i:len(l)+1])
basetrace = l[i][0:i]
for x in basetrace:
for y in range(len(others)):
for l in others[y]:
coord = l[0:]
if abs(coord[0]-basetrace[0]) <= theta1 and euclidian_distance(coord[1:2],basetrace[1:2]) <= theta2:
if [i,y] not in solutions:
solutions.append([i,y])
trace1 = [(1.0 ,(10.10 ,20.0) ) ,(3.0 ,(10.50 ,20.30) ) ,(5.0 ,(11.0 ,21) ) ]
trace2 = [(1.0 ,(15.00 ,15.0) ) ,(2.0 ,(12.00 ,17.00) ) ,(3.0 ,(10.50 ,20) ) ,(4.0 ,(12.0 ,21.0) ) ]
trace3 = [(1.0 ,(15.00 ,15.0) ) ,(3.0 ,(16.0 ,21.0) ) ,(5.0 ,(20.0 ,21.0) ) ]
print ( matrix_for_traces ([ trace1 , trace2 , trace3 ] ,0.0 ,1.0) )
def euclidian_distance(c1,c2):
x = (c1[1]-c2[1])**2+(c1[0]-c2[0])**2
x = x**0.5
return x
|
def decor(fun):
def inner():
result =fun()
return result*2
return inner
def num():
return 5
newresult =decor(num)
print(newresult())
|
def fullcode():
# code word zo vaak uitgevoerd totdat er (input) geldige BSN's zijn
try:
BSNcounter = int(input("Hoeveel BSN's?: "))
except:
print("Geef een getal op!")
return()
while BSNcounter > 0:
import random
y = 0
h = 0
BSN = []
BSNcheck = 0
BSNMultiply = 9
# list maken van 9 random nummers (BSN)
while y < 9:
z = int(random.randint(1,9))
BSN.append(z)
y += 1
# rekensom voor BSN Check
while h < 8:
BSNcheck = BSNcheck + (BSN[h] * BSNMultiply)
h += 1
BSNMultiply -= 1
BSNcheck += (BSN[8] * -1)
# waneer BSNcheck goed is gegaan
if BSNcheck % 11 == 0:
#alle nummers uit list na elkaar printen
for o in range(9):
print(BSN[o], end="")
#niewe regel maken
print("")
# 1 afhalen aan BSN counter
BSNcounter = BSNcounter - 1
while True:
fullcode()
|
from django.shortcuts import render
from .models import Post
from .forms import PostForm, WishForm
from django.views.generic import ListView, CreateView, TemplateView
# Create your views here.
class PostListView(ListView):
allow_empty = True
model = Post
template_name = 'blog/post-list.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['posts'] = Post.objects.all()
return context
class PostCreateView(CreateView):
# model = Post
form_class = PostForm
template_name = 'blog/add-post.html'
success_url = '/'
class AboutView(TemplateView):
template_name = 'blog/about.html'
|
"""
Takes revision event data file. Keeps just anonymous user data.
Usage:
anonymous_edits (-h|--help)
anonymous_edits <input> <output>
[--debug]
[--verbose]
Options:
-h, --help This help message is printed
<input> Path to input file to process.
<output> Where output will be written
--debug Print debug logging to stderr
--verbose Print dots and stuff to stderr
"""
import docopt
import logging
import operator
import sys
from collections import defaultdict
import mysqltsv
import re
logger = logging.getLogger(__name__)
REGESTERED_USER_ID_RE = re.compile(r'^(\d)+$')
def main(argv=None):
args = docopt.docopt(__doc__)
logging.basicConfig(
level=logging.INFO if not args['--debug'] else logging.DEBUG,
format='%(asctime)s %(levelname)s:%(name)s -- %(message)s'
)
input_file = mysqltsv.Reader(
open(args['<input>'],'rt'), headers=True,
types=[str, str, str, str, str, str, str, str, str, str, str, str, str])
output_file = mysqltsv.Writer(open(args['<output>'], "w"), headers=[
'title', 'rev_id', 'user', 'username', 'comment', 'namespace',
'timestamp','prev_timestamp', 'session_start', 'session_end',
'session_index', 'session_events', 'event_index'])
verbose = args['--verbose']
run(input_file, output_file, verbose)
def run(input_file, output_file, verbose):
for i, line in enumerate(input_file):
if not line["user"]:
if line["username"]:
output_file.write(
[line["title"],
line["rev_id"],
line["user"],
line["username"],
line["comment"],
line["namespace"],
line["timestamp"],
line["prev_timestamp"],
line["session_start"],
line["session_end"],
line["session_index"],
line["session_events"],
line["event_index"]])
if verbose and i % 10000 == 0 and i != 0:
sys.stderr.write("Revisions considered: {0}\n".format(i))
sys.stderr.flush()
main()
|
from django.contrib import admin
from homepage.models import Roast_Boast
# Register your models here.
admin.site.register(Roast_Boast) |
def markDigits(number, digits):
while number:
digit = number % 10
digits[digit] = True
number //= 10
def isTimeToSleep(digits):
for marked in digits:
if not marked:
return False
return True
numbers = []
with open("input.txt", "r") as inputFile:
inputFile.readline()
for line in inputFile:
numbers.append(int(line))
with open("output.txt", "w") as outputFile:
case = 1
for number in numbers:
if number == 0:
outputFile.write("Case #{}: {}\n".format(case, "INSOMNIA"))
else:
digits = [False for i in range(1, 10 + 1)]
N = 0
while not isTimeToSleep(digits):
N += 1
markDigits(number * N, digits)
outputFile.write("Case #{}: {}\n".format(case, number * N))
case += 1
|
from .provider import TicketProvider
import gitlab
import os
GITLAB_URL = os.getenv('GITLAB_URL', 'https://gitlab.fabcloud.org')
GITLAB_TOKEN = os.getenv('GITLAB_TOKEN')
def gitClient(sudo=None):
git = gitlab.Gitlab(GITLAB_URL, GITLAB_TOKEN, api_version=4)
if sudo:
git.headers['Sudo'] = str(sudo)
git.auth()
return git
class GitlabProvider(TicketProvider):
@classmethod
def getTracker(cls, project_path, sudo=None):
""" Get the details from the issue tracker at path """
git = gitClient(sudo)
project = git.projects.get(project_path)
return project
@classmethod
def getMembers(cls, project_path):
""" Get the members associated to the project at path """
project = cls.getTracker(project_path)
return project.members.list(all=True)
@classmethod
def addMember(cls, project_path, user_id, level):
""" Add a member to a tracker"""
tracker = cls.getTracker(project_path)
levels = {
'developer': gitlab.DEVELOPER_ACCESS,
'master': gitlab.MASTER_ACCESS
}
member = tracker.members.create(
{'user_id': user_id, 'access_level': levels[level]})
return member
@classmethod
def removeMember(cls, project_path, user_id):
""" Remove a member from the tracker """
tracker = cls.getTracker(project_path)
member = tracker.members.get(user_id)
member.delete()
return None
@classmethod
def getTickets(cls, project_path):
""" Get all the tickets from a given project path """
tracker = cls.getTracker(project_path)
issues = tracker.issues.list(all=True)
return issues
@classmethod
def getTicket(cls, project_path, ticket_id):
""" Get details from a ticket given a project_path and ticket_id """
tracker = cls.getTracker(project_path)
issue = tracker.issues.get(ticket_id)
return issue
@classmethod
def getTicketDiscussion(cls, project_path, ticket_id):
""" Get the discussion thread associated to a ticket """
tracker = cls.getTracker(project_path)
issue = tracker.issues.get(ticket_id)
return issue.notes.list(all=True)
@classmethod
def addTicketDiscussion(cls, project_path, ticket_id,
discussion_id, user_id, body):
""" Add a new comment to the ticket """
tracker = cls.getTracker(project_path, user_id)
issue = tracker.issues.get(ticket_id)
discussion = issue.discussions.get(discussion_id)
note = discussion.notes.create({"body": body})
return note
@classmethod
def createTicketDiscussion(cls, project_path, ticket_id, user_id, body):
""" Add a new comment to the ticket """
tracker = cls.getTracker(project_path, user_id)
issue = tracker.issues.get(ticket_id)
discussion = issue.discussions.create({"body": body})
return discussion
@classmethod
def getUserByExternalId(cls, provider, external_id):
""" Get a user by external_id """
git = gitClient()
user = git.users.list(
query_parameters={
"extern_uid": external_id,
"provider": provider}
)[0]
return user
@classmethod
def getUserByUsername(cls, username):
""" Get a user by email """
git = gitClient()
user = git.users.list(username=username)[0]
return user
@classmethod
def getUserById(cls, user_id):
""" Get a user by id """
git = gitClient()
user = git.users.get(user_id)
return user
@classmethod
def createTicket(cls,
project_path,
from_user,
to_user,
subject,
body,
labels=[]
):
""" Create a ticket on the given project path """
tracker = cls.getTracker(project_path, from_user)
ticket = tracker.issues.create(
{"title": subject, "description": body})
return ticket
@classmethod
def removeTicket(cls,
project_path, ticket_id):
""" Remove a ticket at the given project path """
tracker = cls.getTracker(project_path)
issue = tracker.issues.get(ticket_id)
issue.delete()
@classmethod
def closeTicket(cls,
project_path, ticket_id):
""" Closes a ticket at a given project path """
tracker = cls.getTracker(project_path)
issue = tracker.issues.get(ticket_id)
issue.state_event = 'close'
issue.save()
@classmethod
def reopenTicket(cls,
project_path, ticket_id):
""" Reopen a ticket at a given project path """
tracker = cls.getTracker(project_path)
issue = tracker.issues.get(ticket_id)
issue.state_event = 'reopen'
issue.save()
@classmethod
def subscribeTicket(
cls, project_path, ticket_id, user_id
):
"""Subscribe a user to the ticket """
tracker = cls.getTracker(project_path, user_id)
issue = tracker.issues.get(ticket_id)
issue.subscribe()
@classmethod
def unsubscribeTicket(cls, project_path, ticket_id, user_id):
"""Unsubscribe a user from the ticket"""
tracker = cls.getTracker(project_path, user_id)
issue = tracker.issues.get(ticket_id)
issue.unsubscribe()
|
#!/usr/bin/python3
import datetime
import inquirer
import requests
import re
import csv
import os
import json
repositories = [
"beagle",
"beagle-web-react",
"beagle-web-core",
"beagle-web-angular",
"charlescd",
"charlescd-docs",
"horusec",
"horusec-engine-docs",
"ritchie-cli",
"ritchie-formulas",
"ritchie-formulas-demo"
]
def run(token):
insights = []
authorization = f"token {token}"
headers = {
"Accept": "application/vnd.github.v3+json",
"Authorization" : authorization,
}
for repository in repositories:
repo_url = f"https://api.github.com/repos/ZupIT/{repository}"
print(f"🐙 Getting insights for ZupIT's \033[36m{repository}\033[0m repository.")
traffic = requests.get(
url = repo_url + "/traffic/views",
headers = headers,
).json()
clones = requests.get(
url = repo_url + "/traffic/clones",
headers = headers,
).json()
contributors = requests.get(
url = repo_url + "/contributors",
headers = headers,
).json()
repo_stats = requests.get(
url = repo_url,
headers = headers,
).json()
try:
clones = clones["count"]
except (IndexError, KeyError) :
clones = "-"
try:
forks = repo_stats["forks_count"]
except (IndexError, KeyError):
forks = "-"
try:
stars = repo_stats["stargazers_count"]
except (IndexError, KeyError):
stars = "-"
try:
watchers = repo_stats["subscribers_count"]
except (IndexError, KeyError):
watchers = "-"
try:
views = traffic["count"]
except (IndexError, KeyError):
views = "-"
try:
uniques = traffic["uniques"]
except (IndexError, KeyError):
uniques = "-"
insights.append(
{
"repo": repository,
"views": views,
"uniques": uniques,
"clones": clones,
"contributors": len(contributors),
"contributors_list": contributors,
"forks": forks,
"stars": stars,
"watchers": watchers,
}
)
create_csv_file(insights)
def get_repositories(url, headers):
result = []
r = requests.get(
url = url,
headers = headers
)
if "next" in r.links :
result += get_repositories(r.links["next"]["url"], headers)
for data in r.json():
result.append(data["name"])
return result
def create_csv_file(insights):
current_date = datetime.datetime.now()
current_date_format = current_date.strftime("%m-%d-%Y-%Hh%M")
current_date_format_string = str(current_date_format)
filename = "zup-insights-" + current_date_format_string + ".csv"
file = open(filename, 'w+', newline ='')
with file:
header = ["Repository", "Views (14d)", "Uniques (14d)", "Clones (14d)", "Contributors", "Forks", "Stars", "Watchers"]
writer = csv.DictWriter(file, fieldnames = header)
writer.writeheader()
file = open(filename, 'a+', newline ='')
with file:
for insight in insights:
data = [[insight["repo"], insight["views"], insight["uniques"], insight["clones"], insight["contributors"], insight["forks"], insight["stars"], insight["watchers"]]]
write = csv.writer(file)
write.writerows(data)
print(f"\n\033[1m✅ Successfully generated \033[4m{filename}\033[0m\033[1m file for ZupIT's repositories\033[0m")
|
"""
This service monitors
"""
import asyncio
import json
import logging
import time
from asyncio_redis import Connection as RedisConnection
from collections import namedtuple
from importlib import import_module
from .utils import (
DEFAULT_REDIS_KEY,
extract_package,
)
LOG = logging.getLogger(__name__)
QueueUpdates = namedtuple(
'QueueUpdates', ['expired_packages', 'highest_priority_package'])
# Very small initial timeout so we read the queue
_INITIAL_TIMEOUT = 0.01
@asyncio.coroutine
def _get_queue_updates(connection, now, redis_key):
cursor = yield from connection.zscan(redis_key)
results = yield from cursor.fetchall()
return _entries_to_queue_updates(results.items(), now=now)
def _entries_to_queue_updates(entries, now):
packages = [extract_package(entry) for entry in entries]
expired_packages = []
highest_priority_package = None
for package in sorted(packages):
if package.expired(now):
expired_packages.append(package)
else:
highest_priority_package = package
# We don't bother with the rest of the packages once we've found a
# valid non expired one
break
return QueueUpdates(
expired_packages=expired_packages,
highest_priority_package=highest_priority_package)
def _time_until_package_expires(package, now):
if package is not None:
return package.ttl(now)
else:
return None
@asyncio.coroutine
def _get_subscription(redis_host, redis_port, redis_key):
connection = yield from RedisConnection.create(
host=redis_host, port=redis_port)
subscriber = yield from connection.start_subscribe()
yield from subscriber.subscribe([redis_key])
return subscriber
@asyncio.coroutine
def _remove_expire_packages(connection, packages, redis_key):
if packages:
LOG.info('Removing packages from "{key}": {packages}'.format(
key=redis_key, packages=packages))
yield from asyncio.wait([connection.zrem(
redis_key, [package.raw_entry for package in packages])])
@asyncio.coroutine
def _main_loop(consumer_function, consumer_function_kwargs, redis_host=None,
redis_port=6379, redis_key=DEFAULT_REDIS_KEY):
LOG.info('Connecting to {host} on port {port}'.format(
host=redis_host, port=redis_port))
connection = yield from RedisConnection.create(
host=redis_host, port=redis_port)
LOG.info('Subscribing to key "{key}"'.format(key=redis_key))
subscriber = yield from _get_subscription(
redis_host=redis_host, redis_port=redis_port, redis_key=redis_key)
timeout = _INITIAL_TIMEOUT
consumer_future = None
while True:
try:
LOG.debug('Waiting for a published message with timeout of '
'{}'.format(timeout))
message = yield from asyncio.wait_for(
subscriber.next_published(), timeout)
LOG.debug('Notified of new message: {}'.format(message))
except asyncio.TimeoutError:
LOG.debug('Timed out after {} seconds'.format(timeout))
# Cancel the currently running consumer as soon as possible
if consumer_future is not None:
LOG.debug('Cancelling future')
consumer_future.cancel()
now = time.time()
queue_updates = yield from _get_queue_updates(
connection, now, redis_key)
yield from _remove_expire_packages(
connection=connection,
packages=queue_updates.expired_packages,
redis_key=redis_key)
highest_priority_package = queue_updates.highest_priority_package
timeout = _time_until_package_expires(highest_priority_package, now)
if asyncio.iscoroutinefunction(consumer_function):
consumer_future = asyncio.async(consumer_function(
highest_priority_package, **consumer_function_kwargs))
else:
consumer_function(
highest_priority_package, **consumer_function_kwargs)
consumer_future = None
def service(consumer_function_path: 'The import path of the function',
consumer_function_kwargs: 'Arguments passed to the function'=None,
redis_host='localhost', redis_port=6379):
"""
Run the kamikaze service and invoke the specified python function every
time a new message comes to the top of the queue
"""
module_name, function_name = consumer_function_path.rsplit('.', 1)
module = import_module(module_name)
consumer_function = getattr(module, function_name)
if consumer_function_kwargs:
consumer_function_kwargs = json.loads(consumer_function_kwargs)
else:
consumer_function_kwargs = {}
loop = asyncio.get_event_loop()
loop.run_until_complete(_main_loop(
consumer_function=consumer_function,
consumer_function_kwargs=consumer_function_kwargs,
redis_host=redis_host, redis_port=redis_port))
|
# Recaman Object:
# Returns a Recaman object with solved sequence up to n,
# where n is the single constructor input argument.
#
# Tested with python version 3.6.6
class Recaman(object):
''' ATTRIBUTES '''
sequence = {}
''' METHODS '''
# constructor
def __init__(self,n):
super(Recaman, self).__init__()
self.n = n
self.sequence = {}
self.solve(0,0)
# recursive solution
def solve(self,i,step):
if(len(self.sequence)==self.n):
return
backStep = i - step
forStep = i + step
if((backStep < 0) or (backStep in self.sequence)):
self.sequence[forStep] = step
self.solve(forStep,step+1)
else:
self.sequence[backStep] = step
self.solve(backStep,step+1)
# get final sequence
def getSequence(self):
return list(self.sequence.keys()) |
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, get_object_or_404, redirect
from django.utils import timezone
from .models import *
from .forms import *
from django.contrib import messages
# Create your views here.
def project_list(request):
projects = Project.objects.filter(published_date__lte=timezone.now()).order_by('published_date')
return render(request, 'portfolio/project_list.html', {'projects':projects})
def project_detail(request, pk):
project = get_object_or_404(Project, pk=pk)
return render(request, 'portfolio/project_detail.html', {'project':project})
@login_required
def project_new(request):
if request.method == "POST":
form=ProjectForm(request.POST)
if form.is_valid():
project=form.save(commit=False)
project.author = request.user
#project.published_date = timezone.now()
project.save()
return redirect('project_detail', pk=project.pk)
else:
form = ProjectForm()
return render(request, 'portfolio/project_edit.html', {'form':form})
@login_required
def project_edit(request, pk):
project = get_object_or_404(Project, pk=pk)
if request.method == "POST":
form=ProjectForm(request.POST,instance=project)
if form.is_valid():
project=form.save(commit=False)
project.author = request.user
project.published_date = timezone.now()
project.save()
return redirect('project_detail', pk=project.pk)
else:
form=ProjectForm(instance=project)
return render(request, 'portfolio/project_edit.html', {'form':form})
@login_required
def project_draft_list(request):
projects= Project.objects.filter(published_date__isnull=True).order_by('created_date')
return render(request, 'portfolio/project_draft_list.html', {'projects':projects})
@login_required
def project_publish(request,pk):
project = get_object_or_404(Project, pk=pk)
project.publish()
return redirect('project_detail', pk=pk)
@login_required
def project_remove(request, pk):
project = get_object_or_404(Project, pk=pk)
project.delete()
return redirect('project_list')
def add_comment_to_project(request,pk):
post = get_object_or_404(Project, pk=pk)
if request.method == "POST":
form = CommentForm(request.POST)
if form.is_valid():
comment = form.save(commit=False)
comment.post = post
comment.save()
messages.success(request,'Thanks :D ! Your response has been submitted. It will be displayed once approved by the author.')
return redirect('project_detail', pk=pk)
else:
form=CommentForm()
return render(request, 'portfolio/add_comment_to_project.html', {'form':form})
@login_required
def comment_approve(request, pk):
comment = get_object_or_404(Comment,pk=pk)
comment.approve()
return redirect('project_detail', pk=comment.post.pk)
@login_required
def comment_remove(request, pk):
comment = get_object_or_404(Comment, pk=pk)
post_pk = comment.post.pk
comment.delete()
return redirect('project_detail', pk=post_pk)
def about_me(request):
return render(request,'portfolio/about_me.html')
#def thanks(request,pk):
# project = get_object_or_404(Project, pk=pk)
# return render(request, 'portfolio/thanks.html', {'project':project})
|
# Libreria para generar datos aleatorios
import random
# Libreria para generar graficas
import matplotlib.pyplot as plt
# Generar un numero aleatorio -> randint, randrange
print(random.randrange(10,100,2))
# Reacomodar una lista al azar
lista=[1,2,3,4,5,6,7,8,9,10]
print('Lista original', lista)
random.shuffle(lista)
print('Lista mixeada',lista)
# Generar campana de gauss
campana=[random.gauss(1,0.5) for i in range(1000)] # Genera los datos de la grafica
plt.hist(campana, bins=15) # Arma la grafica
plt.show() |
import random
def contains(listx, x):
flag=False
for y in listx:
if(x==y):
flag=True
break
return flag
def deal_cards():
listx=[]
while True:
x = random.randint(0, 51)
while contains(listx,x):
x = random.randint(0, 51)
listx+=[x]
if len(listx)==13:
break
typex=['$','&','*','#']
list2=[]
for i in range(0,13):
y=listx[i]%13+1
if y==1:
y= 'A'
elif y==11:
y='J'
elif y==12:
y='Q'
elif y==13:
y='K'
else:
y=str(y)
list2.append(typex[int(listx[i]/13)]+y)
return list2
def main():
listx=deal_cards()
print(listx)
if __name__=='__main__':
main()
|
# -*- coding:utf-8 -*-
from django.shortcuts import redirect, render
from automechanic.client.forms import ClientForm, DeleteForm
from automechanic.client.models import Client
from django.contrib import messages
from automechanic.messages import success_messages, error_messages
from django.views.decorators.http import require_http_methods
from automechanic import templates
from django.core.urlresolvers import reverse
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.auth.decorators import login_required
@login_required
@require_http_methods(["GET"])
def list_all(request):
context = dict()
clients = Client.objects.all()
context['clients'] = clients
context['form'] = DeleteForm()
context['action'] = reverse('client.delete')
return render(request, templates.CLIENT_LIST, context)
@login_required
@require_http_methods(["GET"])
def add(request):
context = dict()
context['form'] = ClientForm()
context['action'] = reverse('client.save')
return render(request, templates.CLIENT_FORM, context)
@login_required
@require_http_methods(["POST"])
def save(request):
context = dict()
form = ClientForm(request.POST)
if form.is_valid():
form.save()
messages.add_message(request, messages.SUCCESS, success_messages.get('success_insert'))
return redirect('client.list')
context['form'] = form
return render(request, templates.CLIENT_FORM, context)
@login_required
@require_http_methods(["GET"])
def edit(request, client_id):
try:
context = dict()
client = Client.objects.get(pk=client_id)
# client['date_of_birth'] = datetime.strftime(client['date_of_birth'], '%d/%m/%Y')
context['form'] = ClientForm(instance=client)
context['action'] = reverse('client.update', args=[client_id])
return render(request, templates.CLIENT_FORM, context)
except ObjectDoesNotExist:
messages.add_message(request, messages.ERROR, error_messages.get('invalid') % 'Cliente')
return redirect('client.list')
@login_required
@require_http_methods(["POST"])
def update(request, client_id):
try:
client = Client.objects.get(pk=client_id)
# client['date_of_birth'] = datetime.strftime(client['date_of_birth'], '%d/%m/%Y')
form = ClientForm(request.POST, instance=client)
if form.is_valid():
form.save(request)
messages.add_message(request, messages.SUCCESS, success_messages.get('success_edit'))
return redirect('client.list')
context = {'form': form}
return render(request, templates.CLIENT_FORM, context)
except ObjectDoesNotExist:
messages.add_message(request, messages.ERROR, error_messages.get('invalid') % 'Cliente')
return redirect('client.list')
@login_required
@require_http_methods(["POST"])
def delete(request):
ids = request.POST.getlist('ids')
clients = Client.objects.filter(id__in=ids)
clients.delete()
messages.add_message(request, messages.SUCCESS, success_messages.get('success_remove'))
return redirect('client.list')
|
import re
from django.core.exceptions import ValidationError
def NotesValidator(input):
# TODO : make this validator 'stronger' (should try to convert input into
# a NoteSeq and see if it works)
try:
str(input)
except Exception:
raise ValidationError("Wrong datatype")
if 'h' in input or 'H' in input:
raise ValidationError("Use english notation!")
elif 'r' in input or 'R' in input:
raise ValidationError("Rests not allowed!")
elif re.search(r"[^a-gA-G#,' ]", input) is not None:
raise ValidationError("Wrong notation")
def KeyValidator(input):
# Not yet implemented (will be used to validate key given by the user)
pass
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-07 13:15
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('voa', '0002_adjustment'),
]
operations = [
migrations.CreateModel(
name='Additional',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('other_oa_description', models.CharField(blank=True, max_length=255, null=True)),
('size', models.DecimalField(decimal_places=2, max_digits=16, null=True)),
('price', models.DecimalField(decimal_places=2, max_digits=16, null=True)),
('value', models.DecimalField(decimal_places=2, max_digits=16, null=True)),
],
),
migrations.AddField(
model_name='property',
name='adjustement_total',
field=models.DecimalField(decimal_places=2, max_digits=16, null=True),
),
migrations.AddField(
model_name='property',
name='adjustement_total_before',
field=models.DecimalField(decimal_places=2, max_digits=16, null=True),
),
]
|
"""
Tests CQGI functionality
Currently, this includes:
Parsing a script, and detecting its available variables
Altering the values at runtime
defining a build_object function to return results
"""
from cadquery import cqgi
from tests import BaseTest
import textwrap
TESTSCRIPT = textwrap.dedent(
"""
height=2.0
width=3.0
(a,b) = (1.0,1.0)
foo="bar"
result = "%s|%s|%s|%s" % ( str(height) , str(width) , foo , str(a) )
show_object(result)
"""
)
TEST_DEBUG_SCRIPT = textwrap.dedent(
"""
height=2.0
width=3.0
(a,b) = (1.0,1.0)
foo="bar"
debug(foo, { "color": 'yellow' } )
result = "%s|%s|%s|%s" % ( str(height) , str(width) , foo , str(a) )
show_object(result)
debug(height )
"""
)
class TestCQGI(BaseTest):
def test_parser(self):
model = cqgi.CQModel(TESTSCRIPT)
metadata = model.metadata
self.assertEqual(
set(metadata.parameters.keys()), {"height", "width", "a", "b", "foo"}
)
def test_build_with_debug(self):
model = cqgi.CQModel(TEST_DEBUG_SCRIPT)
result = model.build()
debugItems = result.debugObjects
self.assertTrue(len(debugItems) == 2)
self.assertTrue(debugItems[0].shape == "bar")
self.assertTrue(debugItems[0].options == {"color": "yellow"})
self.assertTrue(debugItems[1].shape == 2.0)
self.assertTrue(debugItems[1].options == {})
def test_build_with_empty_params(self):
model = cqgi.CQModel(TESTSCRIPT)
result = model.build()
self.assertTrue(result.success)
self.assertTrue(len(result.results) == 1)
self.assertTrue(result.results[0].shape == "2.0|3.0|bar|1.0")
def test_build_with_different_params(self):
model = cqgi.CQModel(TESTSCRIPT)
result = model.build({"height": 3.0})
self.assertTrue(result.results[0].shape == "3.0|3.0|bar|1.0")
def test_describe_parameters(self):
script = textwrap.dedent(
"""
a = 2.0
describe_parameter(a,'FirstLetter')
"""
)
model = cqgi.CQModel(script)
a_param = model.metadata.parameters["a"]
self.assertTrue(a_param.default_value == 2.0)
self.assertTrue(a_param.desc == "FirstLetter")
self.assertTrue(a_param.varType == cqgi.NumberParameterType)
def test_describe_parameter_invalid_doesnt_fail_script(self):
script = textwrap.dedent(
"""
a = 2.0
describe_parameter(a, 2 - 1 )
"""
)
model = cqgi.CQModel(script)
a_param = model.metadata.parameters["a"]
self.assertTrue(a_param.name == "a")
def test_build_with_exception(self):
badscript = textwrap.dedent(
"""
raise ValueError("ERROR")
"""
)
model = cqgi.CQModel(badscript)
result = model.build({})
self.assertFalse(result.success)
self.assertIsNotNone(result.exception)
self.assertTrue(result.exception.args[0] == "ERROR")
def test_that_invalid_syntax_in_script_fails_immediately(self):
badscript = textwrap.dedent(
"""
this doesn't even compile
"""
)
exception = None
try:
cqgi.CQModel(badscript)
except Exception as e:
exception = e
self.assertIsInstance(exception, SyntaxError)
def test_that_two_results_are_returned(self):
script = textwrap.dedent(
"""
h = 1
show_object(h)
h = 2
show_object(h)
"""
)
model = cqgi.CQModel(script)
result = model.build({})
self.assertEqual(2, len(result.results))
self.assertEqual(1, result.results[0].shape)
self.assertEqual(2, result.results[1].shape)
def test_that_assinging_number_to_string_works(self):
script = textwrap.dedent(
"""
h = "this is a string"
show_object(h)
"""
)
result = cqgi.parse(script).build({"h": 33.33})
self.assertEqual(result.results[0].shape, "33.33")
def test_that_assigning_string_to_number_fails(self):
script = textwrap.dedent(
"""
h = 20.0
show_object(h)
"""
)
result = cqgi.parse(script).build({"h": "a string"})
self.assertTrue(isinstance(result.exception, cqgi.InvalidParameterError))
def test_that_assigning_unknown_var_fails(self):
script = textwrap.dedent(
"""
h = 20.0
show_object(h)
"""
)
result = cqgi.parse(script).build({"w": "var is not there"})
self.assertTrue(isinstance(result.exception, cqgi.InvalidParameterError))
def test_that_cq_objects_are_visible(self):
script = textwrap.dedent(
"""
r = cadquery.Workplane('XY').box(1,2,3)
show_object(r)
"""
)
result = cqgi.parse(script).build()
self.assertTrue(result.success)
self.assertIsNotNone(result.first_result)
def test_that_options_can_be_passed(self):
script = textwrap.dedent(
"""
r = cadquery.Workplane('XY').box(1,2,3)
show_object(r, options={"rgba":(128, 255, 128, 0.0)})
"""
)
result = cqgi.parse(script).build()
self.assertTrue(result.success)
self.assertIsNotNone(result.first_result.options)
def test_setting_boolean_variable(self):
script = textwrap.dedent(
"""
h = True
show_object( "*%s*" % str(h) )
"""
)
result = cqgi.parse(script).build({"h": False})
self.assertTrue(result.success)
self.assertEqual(result.first_result.shape, "*False*")
def test_that_only_top_level_vars_are_detected(self):
script = textwrap.dedent(
"""
h = 1.0
w = 2.0
def do_stuff():
x = 1
y = 2
show_object( "result" )
"""
)
model = cqgi.parse(script)
self.assertEqual(2, len(model.metadata.parameters))
|
""" Super class for the model objects """
import numpy as np
class ModelLoader(object):
""" Super class for all ModelLoader objects.
Holds the model information for converting trajectories into data. Also
constructs methods for computing the energy from epsilon model parameters.
The attribute model (object) will interact only with methods inside this
class. Accessing information externally would be through ModelLoader class
methods.
Attributes:
model (object): Object representing the specifics of a model. Typically
will involve importing from some external package. Because of this
specificity, external methods should avoid expecting specific
attributes or methods in this object.
epsilons (Array): The current value of each of the model's
adjustable parameters.
beta (float): Value of 1/KT for the system.
Example:
ml = ModelLoader()
data = ml.load_data(fname)
hepsilon = ml.get_potentials_epsilons(data)
Potential_Energy = hepsilon(ModelLoader.epsilons)
"""
def __init__(self):
""" initialize the model
Intialization for subclasses will be much more complex.
"""
self.GAS_CONSTANT_KJ_MOL = 0.0083144621 #kJ/mol*k
self.model = type('temp', (object,), {})()
self.epsilons = []
self.beta = 1.0
self.temperature = 1.0 / (self.beta*self.GAS_CONSTANT_KJ_MOL)
def load_data(self,fname):
""" Load a data file and format for later use
Args:
fname (string): Name of a file to load.
Return:
Array of floats: Values to be interpreted by the
get_potentials_epsilon() methoc.
"""
return np.loadtxt(fname)
def get_model(self):
return self.model
def get_epsilons(self):
return self.epsilons
def set_temperature(self, temp):
""" Set attribute temperature for the loader
Also updates the setting for the attribute beta.
Args:
temp (float): Temperature in Kelvins
"""
self.temperature = temp
self.beta = 1.0 / (self.temperature*self.GAS_CONSTANT_KJ_MOL)
def set_beta(self, besta):
""" Set attribute beta for the loader
Also updates the setting for the attribute temperature.
Args:
besta (float): Temperature in Kelvins
"""
self.beta = besta
self.temperature = 1.0 / (self.beta*self.GAS_CONSTANT_KJ_MOL)
def _convert_beta_to_temperature(self, beta):
temperature = 1.0 / (beta * self.GAS_CONSTANT_KJ_MOL)
return temperature
def _convert_temperature_to_beta(self, temperature):
beta = 1.0 / (temperature*self.GAS_CONSTANT_KJ_MOL)
return beta
def get_potentials_epsilon(self, data):
""" Return PotentialEnergy(epsilons)
Computes the potential energy for each frame in data. Each
ModelLoader subclass should have all the necessary information
internally to interpret the inputted data format.
Args:
data (array): First index is the frame, rest are coordinates.
Return:
hepsilon (method): potential energy as a function of
epsilons.
Args:
x: Input list of epsilons.
Return:
total (array): Same length as data, gives the
potential energy of each frame.
"""
def hepsilon(x):
total = np.array([0.0])
return total
return hepsilon
|
import os
# 현재 디렉토리
# print(os.getcwd)
# print(os.path.curdir)
# print(os.path.realpath('.'))
# print(os.path.exists()) # 해당 파일이 존재하는지 체크
# print('|'.join(['a.png','b.png','c.png']))
print('abc.png'.split('.')[1]) |
class animal:
def sleep(self):
print("睡")
def __eat(self): # 私有成员不会被子类继承
print("吃")
class dog(animal): # 在dog类里面,没有__eat方法
pass
d = dog()
d.sleep()
# d.__eat() # 这里的代码会出错 |
from random import randint
class Player:
hp = 5 # здоровье
mana = 5 # мана
max_hp = 5 # максимальное здоровье
max_mana = 5 # максимальная мана
pw = 2 # сила(урон с руки)
lvl = 0 # уровень
sp = 5 # скилл поинт
xp = 0 # опыт
max_xp = 10 # максималный опыт
if hp > max_hp:
hp = max_hp
if mana > max_mana:
mana = max_mana
class Enemy:
hp = 1
pw = 1
class Items:
health_potion = 0 # зелье здоровья
mana_potion = 0 # зелье маны
scroll = 0 # свиток магии
class Skills:
# fireball
fbsLevel = 0 # уровень заклинания "Огненный шар"
fbsDamage = 3 # урон
fbsManaCost = 4 # потребление маны за 1 использование
# heal
hsLevel = 0 # уровень заклинания "Лечение"
hsHeal = 2 # лечение
hsManaCost = 3 # # потребление маны за 1 использование
|
# yukicoder No.436 ccw 2020/01/30
s=list(input())
n=len(s)
for i in range(n-1):
if s[i]+s[i+1]=='cw':
ans=min(n-(i+1),i)
print(ans) |
import random
mass = [ random.randrange(1, 1000) for _ in range(20) ]
enter_number = 500
max_numbers =[]
for number in mass:
if number > enter_number:
max_numbers.append(number)
if len(max_numbers) == 3: break
print(max_numbers) |
import cs50
height = int(input("Height: "))
if height > 0 and height < 23:
x = " "
spaces = height
hashes = 2
print(hashes)
for i in range(0,height):
for s in range(1,spaces):
print(x,end="")
for h in range(0,hashes):
print("#",end="")
spaces -= 1
hashes += 1
print()
|
"""Storage-related exceptions."""
from ..core.error import BaseError
class StorageError(BaseError):
"""Base class for Storage errors."""
class StorageNotFoundError(StorageError):
"""Record not found in storage."""
class StorageDuplicateError(StorageError):
"""Duplicate record found in storage."""
class StorageSearchError(StorageError):
"""General exception during record search."""
|
def contribs(x):
links = x[1][0].split()
out_d = len(links)
for link in links:
yield link, x[1][1]/out_d
rank_raw = [('a',.25), ('b', .25), ('c', .25), ('d', .25)] # initial ranks
link_raw = [('a', 'b c d'), ('b', 'a c'), ('c', 'd'), ('d', 'a b')]
ranks = sc.parallelize(rank_raw)
links = sc.parallelize(link_raw)
links.cache()
for i in range(10):
# your code goes here:
# update ranks in each iteration
print ranks.collect()
|
import labs.day9.lsystem as lsys
import labs.day9.draw as draw
lsys_rule = lsys.Rule({
"1": [
["11",0.8],
["1",0.08],
["111",0.008],
["",-1]
],
"0": "1[0]0"
})
#
draw_rule = {
"1": draw.one_rule,
"0": draw.zero_rule,
"[": draw.left_bracket_rule,
"]": draw.right_bracket_rule,
}
# lsys_rule = lsys.Rule({
# "F": [
# ["F-G+F+G-F", 0.9],
# ["F+G-G-G+F", -1]
# ],
# "G": [
# ["GG", 0.999],
# ["G", -1]
# ]
# })
# draw_rule = {
# "F": draw.one_rule,
# "G": draw.one_rule,
# "-": draw.minus_rule,
# "+": draw.plus_rule,
# }
for i in range(-400, 400, 300):
draw.tl.penup()
draw.tl.goto(i,-200)
draw.tl.pendown()
draw.tl.setheading(90)
# sys = lsys.LSystem("FG-+", "F-G-G", lsys_rule)
sys = lsys.LSystem("10[]", "0", lsys_rule)
sys.revolveN(11)
draw.draw(sys.getSequence(), draw_rule)
draw.t.exitonclick()
|
#!/usr/bin/python3
import csv
import json
import os
IN_FILENAME_FOR_SNIPPETS = 'source.txt'
OUT_FILENAME_FOR_VSCODE = os.path.join('out', 'vscode', 'git-commit.json')
def to_vscode_snippet(body: str, prefix: str = None, description: str = None):
if not prefix:
prefix = body.split(' ')[0].split('/')[0]
if not description:
description = body
item = {}
item['prefix'] = prefix.lower()
item['body'] = body.split('\n')
item['description'] = description
return item
def main():
csv_lines = None
with open(IN_FILENAME_FOR_SNIPPETS, newline='') as f:
csv_reader = csv.reader(f, delimiter=',', quotechar='"')
csv_lines = list(csv_reader)
snipetts = { s[0]: to_vscode_snippet(s[0]) for s in csv_lines }
# Debugging
# print([ x['prefix'] for x in snipetts.values() ])
#print(json.dumps(snipetts, indent=2))
with open(OUT_FILENAME_FOR_VSCODE, mode = 'w') as f:
json.dump(snipetts, f, indent=2)
if __name__ == "__main__":
main()
|
from src.HistData.Candle.Candle import Candle
class FileParser:
timeIndex = 0
openIndex = 1
highIndex = 2
lowIndex = 3
closeIndex = 4
cellSeparator = ';'
fileOperation = 'rU'
def __init__(self, filename):
self.filename = filename
# Get all rows from file
# File format: DateTime Stamp; Bar OPEN Bid Quote; Bar HIGH Bid Quote; Bar LOW Bid
def extract_candles_data(self):
data = []
f = open(self.filename, self.fileOperation)
for line in f:
line_data = line.split(self.cellSeparator)
candle = Candle(
line_data[self.timeIndex],
float(line_data[self.openIndex]),
float(line_data[self.highIndex]),
float(line_data[self.lowIndex]),
float(line_data[self.closeIndex])
)
data.append(candle)
f.close()
return data
|
# one 3
# two 3
# three 5
# four 4
# five 4
# six 3
# seven 5
# eight 5
# nine 4
# ten 3
# eleven 6
# twelve 6
# thirteen 8
# fourteen 8
# fifteen 7
# sixteen 7
# seventeen 9
# eighteen 8
# nineteen 8
# twenty 6
# thirty 6
# forty 5
# fifty 5
# sixty 5
# seventy 7
# eighty 6
# ninety 6
# hundred 7
# onethousand
number_map = [0,3,3,5,4,4,3,5,5,4 ,3,6,6,8,8,7,7,9,8,8]
tens_map = [0,0,6,6,5,5,5,7,6,6]
def below_100(n):
print "below 100 {}".format(n)
if n < 20:
return number_map[n]
return number_map[n%10] + tens_map[n/10]
def letter_count(n):
if n > 999:
return 11
if n % 100 == 0:
return number_map[n/100] + 7
if n < 100:
return below_100(n%100)
return number_map[n/100] + 10 + below_100(n%100)
sum = 0
for i in range(1,1001):
# print("{}: {}".format(i, letter_count(i)))
sum += letter_count(i)
print sum
|
import sys
sys.setrecursionlimit(2**20)
DEBUG = 0
def testRemove(string, target, remove, until):
copy = string.copy()
for i in remove[:until]:
copy[i - 1] = 0
if (DEBUG):
print(copy, string)
j = 0
for i in copy:
if (i == target[j]):
j += 1
if (j == len(target)):
return(1)
return(0)
def binSearch(string, target, remove, lo, hi):
mid = lo + (hi - lo + 1) // 2
if (lo >= hi):
if (testRemove(string, target, remove, mid)):
return(mid)
return(mid + 1)
#print("Removing from", mid, testRemove(string, target, remove, mid), lo, hi)
if (testRemove(string, target, remove, mid)):
return(binSearch(string, target, remove, mid, hi))
return(binSearch(string, target, remove, lo, mid - 1))
string = list(input()) + ['\0']
target = list(input()) + ['\0']
remove = list(map(int, input().split()))
answer = binSearch(string, target, remove, 0, len(remove) - 1)
print(answer) |
# Generated by Django 3.0.5 on 2020-04-22 00:40
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("data", "0003_auto_20200422_0031")]
operations = [
migrations.RemoveField(model_name="membership", name="on_behalf_of"),
migrations.RemoveField(model_name="person", name="national_identity"),
migrations.RemoveField(model_name="person", name="sort_name"),
]
|
import pygame
from .base import BaseEffect
from ...arrays.point import Point
class Shadow(BaseEffect):
def __init__(self, parent, color, offset_point, trail=False, step=1, alpha=None):
self.offset = Point(offset_point)
self.trail = trail
self.step = step
BaseEffect.__init__(self, parent, color, alpha)
self.position = (Point(self.parent._rect.topleft) - self.offset).tup()
def render(self, allow_effects=True):
BaseEffect.render(self, allow_effects)
if self.trail:
w, h = Point(self.parent._rect.size) + self.offset
surface = pygame.Surface((w, h))
surface = surface.convert_alpha()
surface.fill((0,0,0,0))
value = max(self.offset.tup())
x_move = self.offset.x / value
y_move = self.offset.y / value
x, y = self.offset
for m in range(0, value, self.step):
surface.blit(self.image, (int(x), int(y)))
x = self.offset.x - x_move * m
y = self.offset.y - y_move * m
self.image = surface
def blit(self, surface, clip_rect=None):
if self.rotation_image:
self.build_rotation_image(clip_rect)
surface.blit(self.rotation_image, self.position, clip_rect)
else:
surface.blit(self.image, self.position, clip_rect)
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import six
import time
from datetime import datetime, tzinfo
import requests
from copy import deepcopy
API_V1_0 = 'https://emp.mos.ru/v1.0'
API_V1_1 = 'https://emp.mos.ru/v1.1'
class AuthException(Exception):
pass
class EmpServerException(Exception):
def __init__(self, message, code):
self.message = message
self.code = code
if six.PY2:
super(Exception, self).__init__('{0} (code:{1})'.format(self.message.encode('utf-8'), self.code))
else:
super().__init__('{0} (code:{1})'.format(self.message, self.code))
class EmpCounterNotVerifiedException(EmpServerException):
"""
'errorCode' 'errorMessage'
3454: 'Не удалось передать показания за сентябрь по счётчикам <серийный номер>: "Невозможно внести показание, поскольку не введены показания за три и более месяца, предшествующих текущему. Для возобновления
удалённой передачи показаний Вам следует обратиться в Центр госуслуг/ГКУ ИС района.
"""
pass
class EmpAlreadySendException(EmpServerException):
"""Не удалось передать показания за X по счётчикам 12345678:
"Вы не можете изменить показания, переданные через портал mos.ru или введенные сотрудниками" \
" Центров госуслуг/ГКУ ИС." 12345678: "Редактируемое показание принято к расчёту." '
"""
pass
class EmpHugeValueException(EmpServerException):
"""
Не удалось передать показания за февраль по счётчикам 123456:
"Не допускается внесение данных, в несколько раз превышающих нормативы водопотребления,
установленные Правительством Москвы." (code:3454)
"""
pass
class EmpValueLessException(EmpServerException):
"""
Не удалось передать показания за февраль по счётчикам 123456:
"Вносимое показание меньше предыдущего. Проверьте корректность вносимого показания.
В случае, если передаваемые данные корректны, Вам следует обратиться в
Центр госуслуг/ГКУ ИС района для уточнения причин ошибки." (code:3454)
"""
pass
class Client(object):
"""
Клиента
"""
def __init__(self, **kwargs):
self.token = kwargs.get('token')
self.guid = kwargs.get('guid')
#requests
self.verify = kwargs.get('verify', True)
self.timeout = kwargs.get('timeout', 3.0)
self.session_id = None # emp mos ru
self.user_agent = kwargs.get('user_agent', 'okhttp/3.8.1')
self.dev_app_version = kwargs.get('dev_app_version')
self.dev_user_agent = kwargs.get('dev_user_agent')
self.post_request_data = {
'info': {
'guid': self.guid,
'user_agent': self.dev_user_agent,
'app_version': self.dev_app_version
},
'auth': {
'session_id': self.session_id
}
}
# 3.8.1.216(108)
#self.dev_mobile = kwargs.get('dev_mobile')
#self.info_field = {
# 'app_version': self.dev_app_version,
# 'guid': self.guid,
# 'mobile': self.dev_mobile,
# 'session_id': None,
# 'user_agent': self.dev_user_agent
#}
self.headers = {
'Cache-Control': 'no-cache',
'Host': 'emp.mos.ru',
'Connection': 'Keep-Alive',
'Accept-Encoding': 'gzip',
'User-Agent': self.user_agent
}
self.pheaders = deepcopy(self.headers)
self.pheaders.update({
'X-Cache-ov': '15552000',
'X-Cache-ov-mode': 'FORCE_NETWORK'
})
self.session = requests.Session()
def raise_for_status(self, answer):
"""
:param answer: JSON
{
u'errorCode': 0, # 0-OK,
401-Auth error
u'execTime': 0.138262,
u'errorMessage': u'', # error message
u'session_id': u'6c3333333c33333e44e21e7d43c46e03',
u'result': None or JSON
}
:return:
"""
# print('Exec time:', answer['execTime'])
code = answer['errorCode']
msg = answer['errorMessage'] if 'errorMessage' in answer and answer['errorMessage'] else ''
if code == 401:
raise AuthException('Ошибка авторизации')
if code == 3454:
if u'показание принято к расчёту' in msg:
raise EmpAlreadySendException(msg, code)
elif u'Не допускается внесение данных, в несколько раз превышающих нормативы водопотребления' in msg:
raise EmpHugeValueException(msg, code)
elif u'Вносимое показание меньше предыдущего' in msg:
raise EmpValueLessException(msg, code)
elif u'Истёк срок поверки прибора учёта' in msg:
raise EmpCounterNotVerifiedException(msg, code)
if code != 0:
raise EmpServerException(msg, code)
def is_active(self):
"""
:return: True если уже залогинился
"""
return self.session_id is not None
def login(self, telephone, pwd):
"""
:param telephone: Телефон. Вид: 7xxxxxxxxxx
:param pwd: Пароль из приложения Госуслуги Москвы
:return: JSON
{
u'is_filled': True,
u'surname': u'x',
u'name': u'x',
u'session_id': u'6c3333333c33333e44e21e7d43c46e03'},
u'request_id': u'UN=PRO-12345678-1234-1234-1234-123456789123'
}
"""
login_data = {
'device_info': {
'guid': self.guid,
'user_agent': self.dev_user_agent,
'app_version': self.dev_app_version,
#'mobile': self.dev_mobile # 3.8.1.216(108)
},
'auth': {
'login': telephone,
'password': pwd,
'guid': self.guid
}
}
ret = self.session.post(API_V1_0 + '/auth/virtualLogin',
params={'token': self.token},
headers={'Content-Type': 'application/json; charset=UTF-8',
'Connection': 'Keep-Alive',
'Accept-Encoding': 'gzip',
'User-Agent': self.user_agent,
'cache-control': 'no-cache',
'Host': 'emp.mos.ru',
'Accept': '*/*'},
verify=self.verify,
timeout=self.timeout,
json=login_data)
response = ret.json()
self.raise_for_status(response)
self.session_id = response['session_id']
self.post_request_data['auth']['session_id'] = self.session_id
return response['result']
def get_profile(self):
"""
:return: JSON
{
u'drive_license': None,
u'firstname': u'x',
u'middlename': u'x',
u'lastname': u'x',
u'birthdate': u'dd.mm.YYYY',
u'msisdn': u'71234567890', # your telephone
u'email_confirmed': True,
u'email': u'x@x.ru'
}
"""
assert self.session_id
ret = self.session.get(API_V1_0 + '/profile/get',
params={'token': self.token,
'info[guid]': self.guid,
'auth[session_id]': self.session_id
},
headers=self.pheaders,
verify=self.verify,
timeout=self.timeout)
response = ret.json()
self.raise_for_status(response)
return response['result']['profile']
def get_flats(self):
"""
:return: JSON array
[{
u'name': u'x',
u'paycode': u'1234567890',
u'flat_id': u'1234567',
u'electro_account': None,
u'flat_number': u'0',
u'unad': u'0',
u'address': u'x',
u'electro_device': None,
u'unom': u'1234567' # Идентификатор дома
},{...}
]
"""
assert self.session_id
ret = self.session.get(API_V1_0 + '/flat/get',
params={'token': self.token,
'info[guid]': self.guid,
'auth[session_id]': self.session_id
},
headers=self.pheaders,
verify=self.verify,
timeout = self.timeout)
response = ret.json()
self.raise_for_status(response)
return response['result']
def address_search(self, pattern, limit=100):
"""
:param pattern: строка шаблон для поиска
:param limit: сколько ответов выводить
:return:
{
"address": "",
"description": "",
"district": "",
"fullMatch": False,
"unad": 1,
"unum": 123456
}
"""
assert self.session_id
wheaders = deepcopy(self.headers)
wheaders.update({'Content-Type': 'application/json; charset=UTF-8'})
wcrequest = deepcopy(self.post_request_data)
wcrequest.update({
'limit': limit,
'pattern': pattern,
})
ret = self.session.post(API_V1_1 + '/flat/addressSearch',
params={'token': self.token},
headers=wheaders,
verify=self.verify,
timeout=self.timeout,
json=wcrequest)
response = ret.json()
self.raise_for_status(response)
return response['result']
def flat_delete(self, flat_id):
"""
Удалить квартиру
:param flat_id: unicode string from flat_response
response[0]['flat_id']
:return: Null
"""
assert self.session_id
wheaders = deepcopy(self.headers)
wheaders.update({
'X-Clears-tags': 'WIDGETS,EPD,ELECTRO_COUNTERS,WATER_COUNTERS,APARTMENT, EPD_WIDGET,ACCRUALS_WIDGET',
'Content-Type': 'application/json; charset=UTF-8' })
wcrequest = deepcopy(self.post_request_data)
wcrequest.update({
'flat_id': flat_id
})
ret = self.session.post(API_V1_0 + '/flat/delete',
params={'token': self.token},
headers=wheaders,
verify=self.verify,
timeout=self.timeout,
json=wcrequest)
response = ret.json()
self.raise_for_status(response)
return response['result']
def flat_add(self, name, unom, unad, address, flat_number, paycode):
"""
Добавление квартиры. Перед добавлением надо узнать unom
:param name: имя квартиры. любое
:param unom: INT из запроса addressSearch
:param unad: INT из запроса addressSearch
:param address: Любой
:param flat_number: Номер квартиры. Любой. Но для отправки показаний нужен точный.
:param paycode: Код плательщика
:param electro_account: Номер счета Мосэнергосбыта
:param electro_device: Серийный номер счетчика. Если заполнен, должен и номер Мосэнергосбыта быть заполнен.
:return:
{
"flat_id":"23611975",
"name":"",
"address":"",
"flat_number":"111",
"unom":"3802879",
"unad":"1",
"paycode":"1234567890",
"electro_account":"",
"electro_device":"",
"intercom":"",
"floor":"",
"entrance_number":"",
"alias":"",
"epd":"1234567890",
"flat":"111",
"building":"",
"street": равно address
}
"""
assert self.session_id
wheaders = deepcopy(self.headers)
wheaders.update({
'X-Clears-tags': 'WIDGETS,EPD,ELECTRO_COUNTERS,WATER_COUNTERS,APARTMENT, EPD_WIDGET,ACCRUALS_WIDGET',
'Content-Type': 'application/json; charset=UTF-8'
})
wcrequest = deepcopy(self.post_request_data)
wcrequest.update({
'a': 0,
'address': address,
'can_update': False,
'flat_number': flat_number,
'name': name,
'paycode': paycode,
'unad': unad,
'unom': unom
})
ret = self.session.post(API_V1_0 + '/flat/add',
params={'token': self.token},
headers=wheaders,
verify=self.verify,
timeout=self.timeout,
json=wcrequest)
response = ret.json()
self.raise_for_status(response)
return response['result']
def get_watercounters(self, flat_id):
"""
:param flat_id: unicode string from flat_response
response[0]['flat_id']
:return:
{
u'stat_title': u'x',
u'archive': [
{
u'cold_indication': 1.55,
u'hot_indication': 1.2,
u'period': u'2017-08-31+03:00'
}, {
u'cold_indication': 1.3,
u'hot_indication': 0.39999999999998,
u'period': u'2017-09-30+03:00'
}, {
+10 эл-тов. Всего 12.
}],
u'counters': [
{
u'checkup': u'2021-08-08+03:00', # дата поверки
u'counterId': 123456, # id счетчика в emp.mos.ru
u'num': u'123456', # серийный номер счетчика
u'type': 1, # ХВС
u'indications': [
{
u'indication': u'200.24',
u'period': u'2018-07-31+03:00'
}, {
u'indication': u'200.6',
u'period': u'2018-06-30+03:00'
}, {
u'indication': u'200.5',
u'period': u'2018-05-31+03:00'
}, {
u'indication': u'200.5',
u'period': u'2018-04-30+03:00'
}]
},
{..}
]
}
"""
assert self.session_id
wheaders = deepcopy(self.headers)
wheaders.update({
'X-Clears-tags': 'WATER_COUNTERS',
'X-Cache-ov-mode': 'DEFAULT',
'Content-Type': 'application/json; charset=UTF-8' })
wcrequest = deepcopy(self.post_request_data)
wcrequest.update({
'flat_id': flat_id,
'is_widget': False
})
ret = self.session.post(API_V1_0 + '/watercounters/get',
params={'token': self.token},
headers=wheaders,
verify=self.verify,
timeout=self.timeout,
json=wcrequest)
response = ret.json()
self.raise_for_status(response)
return response['result']
def send_watercounters(self, flat_id, counters_data):
"""
:param flat_id: flat_response['flat_id']
:param counters_data: array of
[{
'counter_id': u'123456', # ['counters'][0]['counterId']
'period': datetime.now().strftime("%Y-%m-%d"),
'indication': 'xxx,xx'
}, {
...
}]
:return:
"""
assert self.session_id
wheaders = deepcopy(self.headers)
wheaders.update({
'X-Clears-tags': 'WATER_COUNTERS',
'Content-Type': 'application/json; charset=UTF-8'
})
wcrequest = deepcopy(self.post_request_data)
wcrequest.update({
'flat_id': flat_id,
'counters_data': counters_data
})
ret = self.session.post(API_V1_0 + '/watercounters/addValues',
params={'token': self.token},
headers=wheaders,
verify=self.verify,
timeout=self.timeout,
json=wcrequest)
response = ret.json()
self.raise_for_status(response)
return response['result']
def get_electrocounters(self, flat_id):
"""
:param flat_id: unicode string from flat_response
response[0]['flat_id']
:return:
{
"address": "город ...",
"electro_account": "851xxxx475",
"electro_device": "04xxx17",
"balance": 0,
"is_debt": false,
"description": "Внимание! Передача показаний возможна с 15 по 26 число месяца. При вводе текущих показаний обращайте внимание на значность счетчиков. В случае если показания введены с неверной значностью (большей, чем значность счетчика), они не будут загружены.",
"sh_znk": 5,
"zones": [{
"name": "T1",
"value": "6887"
}],
"intervals": [{
"name": "T1",
"value": "00:00-23:59"
}]
}
"""
assert self.session_id
wheaders = deepcopy(self.headers)
wheaders.update({
'X-Clears-tags': 'ELECTRO_COUNTERS',
'X-Cache-ov-mode': 'DEFAULT',
'Content-Type': 'application/json; charset=UTF-8' })
wcrequest = deepcopy(self.post_request_data)
wcrequest.update({
'flat_id': flat_id,
'is_widget': False,
})
ret = self.session.post(API_V1_0 + '/electrocounters/get',
params={'token': self.token},
headers=wheaders,
verify=self.verify,
timeout=self.timeout,
json=wcrequest)
response = ret.json()
self.raise_for_status(response)
return response['result']
def send_electrocounters(self, flat_id, counters_data):
"""
:param flat_id: flat_response['flat_id']
:param counters_data: array of
[{
'counter_id': u'123456', # ['counters'][0]['counterId']
'period': datetime.now().strftime("%Y-%m-%d"),
'indication': 'xxx,xx'
}, {
...
}]
:return:
"""
assert self.session_id
wheaders = deepcopy(self.headers)
wheaders.update({
'X-Clears-tags': 'ELECTRO_COUNTERS',
'Content-Type': 'application/json; charset=UTF-8'
})
wcrequest = deepcopy(self.post_request_data)
wcrequest.update({
'flat_id': flat_id,
'counters_data': counters_data
})
ret = self.session.post(API_V1_0 + '/electrocounters/addValues',
params={'token': self.token},
headers=wheaders,
verify=self.verify,
timeout=self.timeout,
json=wcrequest)
response = ret.json()
self.raise_for_status(response)
return response['result']
def get_epd(self, flat_id, period, is_debt=True):
"""
:param flat_id: unicode string from flat_response
response[0]['flat_id']
:param period: unicode string represents date in 27.09.2018 format
:param is_debt: True/False
:return:
{
"is_debt": true,
"is_paid": true,
"amount": 2982.77,
"service_code": "emp.zkh",
"insurance": 61.93,
"ammount_insurance": 3044.7
}
"""
assert self.session_id
wheaders = deepcopy(self.headers)
wheaders.update({
'X-Clears-tags': 'EPD',
'X-Cache-ov-mode': 'DEFAULT',
'Content-Type': 'application/json; charset=UTF-8' })
wcrequest = deepcopy(self.post_request_data)
wcrequest.update({
'flat_id': flat_id,
'period': period,
'is_debt': is_debt,
})
ret = self.session.post(API_V1_1 + '/epd/get',
params={'token': self.token},
headers=wheaders,
verify=self.verify,
timeout=self.timeout,
json=wcrequest)
response = ret.json()
self.raise_for_status(response)
return response['result']
def get_eepd(self, flat_id, period, epd_type='current', rid=None):
"""
Запросить электронный ЕПД (pdf). Первый запрос возвращает rid и документ начинает готовиться.
Последующие запросы будут либо пустые, либо содержать ссылку на PDF + расшифровку полей.
:param flat_id: unicode string from flat_response
response[0]['flat_id']
:param period: unicode string represents date in 27.09.2018 format
:param epd_type: current
:param rid: Если None, то это первый запрос. В ответе будет rid, который нужно добавить в последующие запросы,
пока документ готовится.
:return:
Первый ответ и последующие , пока не готов документ
{
"rid": GUID,
}
{
"pdf": Ссылка на pdf,
"title": "Платежный документ",
"sections": [
{
"title": "Начисления",
"elements": {
"address": "xxxx",
"epd": 123455,
"hint": "text",
"payment_elements": [
],
"period": "DD-месяц-YYYY",
"total_elements": [
]
}
},
{
"title": "Информация",
"elements": [
{
"title": "text",
"body": "text"
},
{
}
]
}
]
}
"""
assert self.session_id
wheaders = deepcopy(self.headers)
wheaders.update({
'Content-Type': 'application/json; charset=UTF-8'})
wcrequest = deepcopy(self.post_request_data)
wcrequest.update({
'flat_id': flat_id,
'period': period,
'type': epd_type,
})
if rid:
wcrequest.update({'rid': rid})
ret = self.session.post(API_V1_0 + '/eepd/get',
params={'token': self.token},
headers=wheaders,
verify=self.verify,
timeout=self.timeout,
json=wcrequest)
response = ret.json()
self.raise_for_status(response)
return response['result']
def get_eepd_wait_result(self, flat_id, period, timeout=10.0):
"""
Запросить электронный ЕПД (pdf) и дождаться результата
:param flat_id: unicode string from flat_response
response[0]['flat_id']
:param period: unicode string represents date in 27.09.2018 format
:param timeout: сколько ждем в секундах результата
"""
start = time.time()
rid = None
result = None
while not result and time.time() - start < timeout:
time.sleep(2.0)
ret = self.get_eepd(flat_id, period, 'current', rid)
if 'rid' in ret:
rid = ret['rid']
if 'pdf' in ret:
result = deepcopy(ret)
return result
def get_car_fines(self, sts_number):
"""
:param sts_number: unicode string contains car sts_numer
:return:
{
"paid": [{
"seriesAndNumber": "xxxxx",
"date": "2018-08-30+03:00",
"offence_place": "МОСКВА Г. МКАД,xxxx, ВНЕШНЯЯ СТОРОНА",
"offenceType": "12.9ч.2 - Превышение установленной скорости движения транспортного средства на величину от 20 до 40 километров в час ",
"cost": "500",
"is_discount": false,
"drive_license": null,
"sts_number": "xxxxxxx",
"executionState": "Исполнено",
"is_fssp": false
}],
"unpaid": []
}
"""
assert self.session_id
wheaders = deepcopy(self.headers)
wheaders.update({
'X-Clears-tags': 'FORCE_NETWORK',
'X-Cache-ov-mode': 'DEFAULT',
'Content-Type': 'application/json; charset=UTF-8'})
wcrequest = deepcopy(self.post_request_data)
wcrequest.update({
'sts_number': sts_number
})
ret = self.session.post(API_V1_0 + '/offence/getOffence',
params={'token': self.token},
headers=wheaders,
verify=self.verify,
timeout=self.timeout,
json=wcrequest)
response = ret.json()
self.raise_for_status(response)
return response['result']
def logout(self, timeout=None):
"""
Почему то очень долго выполняется (5 сек)
"""
if self.session_id:
logout_data = deepcopy(self.post_request_data)
ret = self.session.post(API_V1_0 + '/auth/logout',
params={'token': self.token},
headers=self.headers,
timeout=timeout or self.timeout,
json=logout_data)
response = ret.json()
self.raise_for_status(response)
self.session_id = None
return response['result']
class MosAPI(object):
"""
kwargs:
token: уникальный ключ приложения
guid: некий уникальный ключ
https_verify: ключ verify в GET, POST запросах, по умолчанию 'False'
timeout: ключ timeout в GET, POST запросах
user_agent: версия веб клиента
dev_user_agent: 'Android' для ОС Android
dev_app_version: версия ОС
"""
def __init__(self, **kwargs):
self.kwargs = kwargs
self._clients = {'default': Client(**kwargs)}
def client(self, client_id='default', **kwargs):
if client_id and client_id not in self._clients:
k = self.kwargs
k.update(kwargs)
self._clients[client_id] = Client(**k)
return self._clients[client_id]
# if only one client
def is_active(self):
return self.client().is_active()
def login(self, *args):
return self.client().login(*args)
def logout(self, *args):
return self.client().logout(*args)
def get_profile(self):
return self.client().get_profile()
# Квартиры
def get_flats(self):
return self.client().get_flats()
def flat_delete(self, *args):
return self.client().flat_delete(*args)
def flat_add(self, *args):
return self.client().flat_add(*args)
def address_search(self, *args):
return self.client().address_search(*args)
# Счетчики воды
def get_watercounters(self, *args):
return self.client().get_watercounters(*args)
def send_watercounters(self, *args):
return self.client().send_watercounters(*args)
# Счетчики электроэнергии
def get_electrocounters(self, *args):
return self.client().get_electrocounters(*args)
def send_electrocounters(self, *args):
return self.client().send_electrocounters(*args)
# Единый платежный документ
def get_epd(self, *args):
return self.client().get_epd(*args)
# Сформировать pdf Единый платежный документ
def get_eepd_wait_result(self, *args):
return self.client().get_eepd_wait_result(*args)
# Штрафы
def get_car_fines(self, *args):
return self.client().get_car_fines(*args)
class Water:
COLD = 1
HOT = 2
@staticmethod
def water_abbr(water):
if water == Water.COLD:
return u'ХВС'
elif water == Water.HOT:
return u'ГВС'
@staticmethod
def name(water):
if water == Water.COLD:
return u'холодная вода'
elif water == Water.HOT:
return u'горячая вода'
class Watercounter:
"""
[{ 'counterId': 1437373, # внутреннее id счетчика
'type': 1, # тип воды
'num': '417944', # серийный номер счетчика
'checkup': '2023-09-25+03:00', # дата следующей поверки
'indications':
[{'period': '2018-08-31+03:00', 'indication': '21.38'},
{'period': '2018-07-31+03:00', 'indication': '20.7'},
{'period': '2018-06-30+03:00', 'indication': '19'}]
},
{...}
]
list(filter(lambda x: x['num'] == num, response['counters']))
list(filter(lambda x: x['counterId'] == id, response['counters']))
list(filter(lambda x: x['type'] == water_type_id, response['counters']))
"""
@staticmethod
def last_value(counter):
"""
alphabetical sort data =)
:param counter: counter JSON
:return: float or None
None - когда показания не сданы более 3-х месяцев
"""
indications = counter['indications']
indications.sort(key=lambda x: x['period'])
if indications:
return float(indications[-1]['indication'])
@staticmethod
def water_title(counter):
return Watercounter.humanreadable_name(counter['type'])
@staticmethod
def checkup(counter):
"""
https://docs.python.org/3/library/datetime.html#datetime.timezone
Если нужно UTC d.replace(tzinfo=None)
:param counter: counter JSON
:return: datetime с временной зоной
"""
checkup = counter['checkup'].split('+')[0] # python 3.6 support https://stackoverflow.com/questions/53291250/python-3-6-datetime-strptime-returns-error-while-python-3-7-works-well
# ValueError: time data '2023-08-08+03:00' does not match format '%Y-%m-%d%z'↵"
d = datetime.strptime(checkup, '%Y-%m-%d') #'checkup': '2023-09-25+03:00'
return d
@staticmethod
def serialize_for_send(counter, value):
"""
:param counterId: id счетчика. не номер из приложения!
:param value: значение float
:return: dict
"""
return {
'counter_id': int(counter['counterId']),
'period': datetime.now().strftime("%Y-%m-%d"),
'indication': '{:.2f}'.format(value).replace('.', ',')
}
|
from petl import *
# Call extract script and get a file.
# Load it as a petl table
# Extract the columns needed
# Save it in data directory as a compressed HDF5 file for later use in Pandas
|
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse, Http404
from django.template import RequestContext, loader
from .models import Post, Page
def index(request):
latest_post_list = Post.objects.order_by('-pub_date')[::-1]
latest_page_list = Page.objects.order_by('-ppub_date')[::-1]
return render(request, 'polls/index.html', {'latest_post_list': latest_post_list, 'latest_page_list': latest_page_list})
def detail(request, post_id):
post = get_object_or_404(Post, pk=post_id)
latest_page_list = Page.objects.order_by('-ppub_date')[::-1]
return render(request, 'polls/detail.html', {'post': post, 'latest_page_list': latest_page_list})
def pdetail(request, page_id):
page = get_object_or_404(Page, pk=page_id)
latest_page_list = Page.objects.order_by('-ppub_date')[::-1]
return render(request, 'polls/pdetail.html', {'page': page, 'latest_page_list': latest_page_list}) |
import numpy as np
def pretty(mat, label):
print('Covariance Matrix {}'.format(label))
print(mat)
matinv = np.linalg.inv(mat)
print('Inverse Covariance Matrix {}'.format(label))
print(matinv)
A = np.array([[9, 3, 1], [3, 9, 3], [1, 3, 9]])
B = np.array([[8, -3, 1], [-3, 9, -3], [1, -3, 8]])
C = np.array([[9, 3, 0], [3, 9, 3], [0, 3, 9]])
D = np.array([[9, -3, 0], [-3, 10, -3], [0, -3, 9]])
K = np.array([[1, 0.5, 0], [0.5, 1, 0.5], [0, 0.5, 1]])
pretty(C, 'C')
pretty(D, 'D')
pretty(K, 'K')
|
import tensorflow as tf
class Optimizer(object):
def __init__(self, parameters):
self.parameters = parameters
def get_optimizer(self):
self.optimizer_object = tf.train.AdamOptimizer(learning_rate=self.parameters.learning_rate)
return self.optimizer_object
|
from sets import Set
from nevow import inevow, tags as T, flat, loaders, rend
from tub.public.web import common as tubcommon
from crux import web, icrux, skin
from navigation import NestedListNavigationFragment
from pollen.nevow import renderers
from basiccms.paging import ListPagingData, PagingControlsFragment
from cms.widgets.itemselection import ItemSelection
def distinct(l):
"""
Return a list of distinct items.
"""
return list(set(l))
def parseTemplateString(template):
"""
Parse the template string returning a list of positional args and a
dict of keyword args.
The string is assumed to be something vaguely like that used when
calling a Python function, only this is more lenient and less powerful.
"""
# Short-circuit if nothing useful passed in
if template is None:
return [], {}
# Convenience function
stripper = lambda s: s.strip()
# Split on comma and remove whitespace
template = map(stripper, template.split(","))
# Extract the args
args = [part for part in template if "=" not in part]
# Extract the kwargs
kwargs = [part for part in template if "=" in part]
kwargs = [part.split("=", 1) for part in kwargs]
kwargs = [map(stripper, part) for part in kwargs]
kwargs = dict(kwargs)
# Return the final result
return args, kwargs
def data_cmsitemselection(encodedItemsel):
def data(ctx, data):
def gotData(data, ctx):
if hasattr(data, 'sort'):
try:
data.sort(key=lambda i: i.date, reverse=True)
except AttributeError:
data.sort(key=lambda i: i.name, reverse=True)
return data
itemsel = ItemSelection.fromString(str(encodedItemsel))
d = inevow.IGettable(itemsel).get(ctx)
d.addCallback(gotData, ctx)
return d
return data
def render_cmsitemselection(encodedItemsel):
itemsel = ItemSelection.fromString(str(encodedItemsel))
def renderer(ctx, data):
from basiccms.web import cmsresources
resource = inevow.IResource(data)
if itemsel.template is not None and hasattr(resource, 'setTemplate'):
resource.setTemplate(*cmsresources.parseTemplateString(itemsel.template))
return resource
return renderer
class NavigationMixin(object):
"""
Mixin the navigation renderer.
"""
def render_navigation(self, *render_args):
def f(ctx, data):
args = []
kwargs = {}
for arg in render_args:
if "=" in arg:
key, value = arg.split('=', 1)
kwargs[str(key)] = value
else:
args.append(arg)
return NestedListNavigationFragment(*args, **kwargs)
return f
class SiteMixin(object):
"""
Mixin the site layout and templating renderers.
"""
def getExtraData(self, ctx):
"""
Load the site data.
"""
avatar = icrux.IAvatar(ctx)
return avatar.realm.staticData.parseYAML('site') or {}
def render_logotext(self,ctx,data):
logotext = self.getExtraData(ctx).get('logotext')
if logotext is not None:
return T.xml(logotext)
return ctx.tag
def render_footer(self,ctx,data):
footer = self.getExtraData(ctx).get('footer')
if footer is not None:
return T.xml(footer)
return ctx.tag
def render_yuiid(self,ctx,data):
yuiid = self.getExtraData(ctx).get('yuiid')
if yuiid is not None:
return T.xml(yuiid)
return 'doc3'
def render_sidebar(self,ctx,data):
sidebar = self.getExtraData(ctx).get('sidebar')
if sidebar is not None:
return T.div(id='sidebar')[sidebar]
else:
return ''
def render_admin(self,ctx,data):
return ''
def render_bodyclass(self,ctx,data):
classes = self.getExtraData(ctx).get('template',[None,{}])[1].get('classes',None)
if classes:
return ' '.join(classes)
else:
return ''
def render_yuiclass(self,ctx,data):
yuiclass = self.getExtraData(ctx).get('yuiclass')
if yuiclass is not None:
return T.xml(yuiclass)
return 'yui-t1'
def render_backgroundswapper(self,ctx,data):
if isInverted(ctx) == True:
if ctx.arg('q'):
return T.a(href="?invert=False&q=%s"%ctx.arg('q'),class_="backgroundswapper")[ T.img(src="/skin/images/swapbackground-invert.gif") ]
else:
return T.a(href="?invert=False",class_="backgroundswapper")[ T.img(src="/skin/images/swapbackground-invert.gif") ]
else:
if ctx.arg('q'):
return T.a(href="?invert=True&q=%s"%ctx.arg('q'),class_="backgroundswapper")[ T.img(src="/skin/images/swapbackground.gif") ]
else:
return T.a(href="?invert=True",class_="backgroundswapper")[ T.img(src="/skin/images/swapbackground.gif") ]
def render_adminswapper(self,ctx,data):
if isAdminOn(ctx) == True:
if ctx.arg('q'):
return T.a(href="?admin=False&q=%s"%ctx.arg('q'),class_="adminswapper")[ T.img(src="/skin/images/swapadmin.gif") ]
else:
return T.a(href="?admin=False",class_="adminswapper")[ T.img(src="/skin/images/swapadmin.gif") ]
else:
if ctx.arg('q'):
return T.a(href="?admin=True&q=%s"%ctx.arg('q'),class_="adminswapper")[ T.img(src="/skin/images/swapadmin.gif") ]
else:
return T.a(href="?admin=True",class_="adminswapper")[ T.img(src="/skin/images/swapadmin.gif") ]
def isInverted(ctx):
request = inevow.IRequest(ctx)
if ctx.arg('invert',None) == 'True':
request.addCookie('invert', 'True', expires=None, path='/')
return False
elif ctx.arg('invert',None) == 'False':
request.addCookie('invert', 'False', expires=None, path='/')
return True
request = inevow.IRequest(ctx)
cookie = request.getCookie('invert')
if cookie is None or cookie == 'False':
return True
else:
return False
def isAdminOn(ctx):
request = inevow.IRequest(ctx)
if ctx.arg('admin',None) == 'True':
request.addCookie('admin', 'True', expires=None, path='/')
return True
elif ctx.arg('admin',None) == 'False':
request.addCookie('admin', 'False', expires=None, path='/')
return False
request = inevow.IRequest(ctx)
cookie = request.getCookie('admin')
if cookie is None or cookie == 'False':
return False
else:
return True
def getInverted(ctx):
i = isInverted(ctx)
if i is True:
return '-inverted'
else:
return ''
class Page(SiteMixin, NavigationMixin, renderers.RenderersMixin, web.Page):
"""
Base class for "ordinary" pages.
"""
def render_title_tag(self, ctx, data):
return ctx.tag
def render_meta_description(self, ctx, data):
return ctx.tag
def render_meta_keywords(self, ctx, data):
return ctx.tag
def render_invert(self,ctx,data):
i = isInverted(ctx)
if i is True:
return '-inverted'
else:
return ''
def render_invertstyles(self,ctx,data):
i = isInverted(ctx)
if i is True:
return T.link(rel='stylesheet',type='text/css',href='/skin/css/styles-inverted.css')
else:
return ''
def render_ifinverted(self,ctx,data):
return render_if(ctx,isInverted(ctx))
def render_if(self,ctx,data):
return render_if(ctx,data)
def data_cmsitemselection(self, encodedItemsel):
return data_cmsitemselection(encodedItemsel)
def render_cmsitemselection(self, encodedItemsel):
return render_cmsitemselection(encodedItemsel)
class CMSResource(tubcommon.CMSMixin, Page):
"""
Base class for CMS item pages.
"""
def getExtraData(self,ctx):
# Get the extra data from the super class and the extra data from the item.
extraData = SiteMixin.getExtraData(self, ctx)
itemExtraData = self.original.getExtraDataAttributeValue('extraData', 'en') or {}
# XXX itemExtraData is a unicode instance and yet the rest of the code
# is expecting a dict. Did this thing ever work (even before my
# changes) or were the exception handlers masking the problems?
# For now, because I'd really like to achieve something today, I'm
# going to just set itemExtraData to an empty dict.
itemExtraData = {}
# ... from the item
templateName = getattr(self.original.getProtectedObject(), "template", None)
templateString = ''
templates = getattr(self.original.plugin,'templates',[])
if templates is not None:
for pluginTemplate in templates:
if pluginTemplate is not None and pluginTemplate[0] == templateName:
templateString = pluginTemplate[2]
break
args,kwargs = parseTemplateString(templateString)
classes = kwargs.get('classes','').split()
# ... from the super class's extra data
edargs,edkwargs = parseTemplateString(extraData.get('template'))
if len(edargs)>1:
args = edargs
classes.extend( edkwargs.get('classes','').split() )
kwargs.update(edkwargs)
# ... from the item's extra data
iargs, ikwargs = parseTemplateString(itemExtraData.get('template'))
if len(iargs)>1:
args = iargs
classes.extend( ikwargs.get('classes','').split() )
kwargs.update(ikwargs)
# Overlay the item's extra data on the "normal" extra data
extraData.update(itemExtraData)
# Replace the templates with out aggregated version
kwargs['classes'] = classes
extraData['template'] = (distinct(args),kwargs)
# Hurray! We finally got there!
return extraData
def data_cmsitemselection(self, encodedItemsel):
from basiccms.web import rest
# This returns a list of objects
return rest.data_cmsitemselection(encodedItemsel)
def render_cmsitemselection(self, encodedItemsel):
# This is expecting to render a single object
from basiccms.web import rest
return rest.render_cmsitemselection(encodedItemsel)
def render_invert(self,ctx,data):
i = isInverted(ctx)
if i is True:
return '-inverted'
else:
return ''
def render_ifinverted(self,ctx,data):
return render_if(ctx,isInverted(ctx))
def render_if(self,ctx,data):
return render_if(ctx,data)
def render_invertstyles(self,ctx,data):
i = isInverted(ctx)
if i is True:
return T.link(rel='stylesheet',type='text/css',href='/skin/css/styles-inverted.css')
else:
return ''
def render_title_tag(self, ctx, data):
titleTag = getattr(self.original.getProtectedObject(), 'title', None)
if titleTag:
ctx.tag.clear()
ctx.tag[titleTag]
return ctx.tag
def render_if(ctx, data):
# Look for (optional) patterns.
truePatterns = ctx.tag.allPatterns('True')
falsePatterns = ctx.tag.allPatterns('False')
# Render the data. If we found patterns use them, otherwise return the tag
# or nothing.
if data:
if truePatterns:
return truePatterns
else:
return ctx.tag
else:
if falsePatterns:
return falsePatterns
else:
return ''
class PagingMixin(object):
def render_paging(self, paging, grouping=None):
paging = int(paging)
if grouping is not None:
grouping = int(grouping)
def render(ctx, data):
def gotPage(ignore, pageData):
def emptyString():
return ''
tag = inevow.IQ(ctx).patternGenerator('item')
if grouping is not None:
try:
septag = inevow.IQ(ctx).patternGenerator('seperator')
except:
septag = emptyString
for cnt,item in enumerate(pageData.data):
yield tag(data=item)
if grouping is not None and (cnt+1)%grouping == 0:
yield septag()
pageData = ListPagingData(ctx, list(data), paging)
d = pageData.runQuery()
d.addCallback(gotPage, pageData)
return d
return render
def fragment_paging_controls(self, ctx, data):
return PagingControlsFragment('PagingControlsFragment.html')
class RichTextFragment(rend.Fragment, PagingMixin, skin.SkinRenderersMixin):
XML_TEMPLATE = """<!DOCTYPE html
PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<div xmlns:n="http://nevow.com/ns/nevow/0.1" >%s</div>"""
def __init__(self,xml):
rend.Fragment.__init__(self, docFactory=loaders.xmlstr(self.XML_TEMPLATE%xml, ignoreDocType=True))
def data_cmsitemselection(self, encodedItemsel):
return data_cmsitemselection(encodedItemsel)
def render_cmsitemselection(self, encodedItemsel):
return render_cmsitemselection(encodedItemsel)
|
from fbs_runtime.application_context.PyQt5 import ApplicationContext
from core.amazon_scraper.scraper import Scraper
from ui.keetext import KeetextGui
import sys
def main():
appctxt = ApplicationContext() # 1. Instantiate ApplicationContext
window = KeetextGui()
window.show()
exit_code = appctxt.app.exec_() # 2. Invoke appctxt.app.exec_()
sys.exit(exit_code)
if __name__ == '__main__':
main()
|
from django import forms
from Crud.models import King,Dummy
class king(forms.ModelForm):
class Meta:
model = King
fields = '__all__'
class dummy(forms.ModelForm):
class Meta:
model = Dummy
fields = '__all__'
|
# This server will be run on the jupyter notebook, where it will call down to jupyter nbconvert
# to execute the notebook from airflow.
import subprocess
from flask import Flask
from flask_restful import reqparse, Resource, Api
parser = reqparse.RequestParser()
parser.add_argument('input_nb', required=True, type=str)
parser.add_argument('output_nb', required=False, type=str)
app = Flask('webserver')
api = Api(app)
HOME_DIR = '/home/jovyan/work/'
class RemoteJupyterExecutor(Resource):
def get(self):
return {'hello': 'world'}
def post(self):
args = parser.parse_args()
input_nb = HOME_DIR + args['input_nb']
output_nb = HOME_DIR + args['output_nb']
cmd_str = f"jupyter nbconvert --to notebook --execute --output {output_nb} {input_nb}"
completed_process = subprocess.run(cmd_str, shell=True, check=True)
return 200
api.add_resource(RemoteJupyterExecutor, '/')
if __name__ == '__main__':
app.run(debug=True)
|
#Youtube video downlaod√√
from pytube import YouTube
link = input('Enter the link :')
yt = YouTube(link)
videos = yt.streams.all()
# videos= videos.index(res)
# print(videos)
# https://www.youtube.com/watch?v=Yqur47HdKd8
i = 1
for stream in videos:
print(stream)
print(str(i)+' '+ str(stream))
i+=1
stream_no = int(input('Enter the number: '))
video = videos[stream_no -1]
video.download('/Users/sachinkawde/Downloads')
print('Downloaded')
|
from datetime import date, timedelta
import pymysql
from baseObject import baseObject
import re
from contract import contractList
class userList(baseObject):
# list object for User table
def __init__(self):
self.setupObject('Users')
def verifyNew(self,n=0):
# check data for errors, append any errors to errorList
self.errorList = []
if self.hasDuplicates('Email', n):
self.errorList.append("Email address is already registered.")
if self.hasDuplicates('Username', n):
self.errorList.append("Username already exists.")
if len(self.data[n]['Username']) < 5 or len(self.data[n]['Username']) > 25:
self.errorList.append("Username must be between 5 and 25 characters long.")
if verifyEmail(self.data[n]['Email']) == False:
self.errorList.append("Please enter a valid email.")
pwErrors = verifyPassword(self.data[n]['Password'])
if len(pwErrors) > 0:
[self.errorList.append(err) for err in pwErrors]
if verifyDOB(self.data[n]['Birthday'], minAge=18) == False:
self.errorList.append("User must be at least 18 years old.")
if verifyPhone(self.data[n]['Phone']) == False:
self.errorList.append("Please enter a valid 10 digit phone number.")
if len(self.data[n]['FirstName']) == 0:
self.errorList.append("First name cannot be blank.")
if len(self.data[n]['LastName']) == 0:
self.errorList.append("Last name cannot be blank.")
if self.data[n]['Type'] not in ['admin', 'landlord', 'tenant']:
self.errorList.append("Invalid user type.")
if len(self.errorList) > 0:
return False
else:
return True
def tryLogin(self,email,pw):
# attempt to login with email and password
sql = 'SELECT * FROM `' + self.tn + '` WHERE `email` = %s AND `password` = %s;'
tokens = (email,pw)
self.connect()
cur = self.conn.cursor(pymysql.cursors.DictCursor)
self.log(sql,tokens)
cur.execute(sql,tokens)
self.data = []
for row in cur:
self.data.append(row)
if len(self.data) == 1: # return true if there is exactly one match
return True
else:
return False
def getTenants(self, username):
# get a list of tenants that have a contract with username
if getUserType(username) == 'landlord':
c = contractList()
c.getByField('LUserName', username)
tenantUsernames = [contract['TUserName'] for contract in c.data]
self.getManyByField('Username', tenantUsernames)
def passMatch(self, Password2, n=0):
# compare password in data with different password
if self.data[n]['Password'] == Password2:
return True
else:
return False
def hasDuplicates(self, field, n=0):
# find if there is another entry with the same value at field
u = userList()
u.getByField(field, self.data[n][field])
if len(u.data) > 0:
return True
else:
return False
def hasNum(string):
# true if there is at least one number
return any(char.isdigit() for char in string)
def hasChar(string):
# true if there is at least ione character
return any(char.isalpha() for char in string)
def verifyPassword(pw):
# check password against defined criteria
errorList = []
if len(pw) < 5:
errorList.append("Password too short.")
if len(pw) > 20:
errorList.append("Password too long.")
if not hasNum(pw):
errorList.append("Password must contain at least one numeric digit.")
if not hasChar(pw):
errorList.append("Password must contain at least one letter.")
return errorList
def verifyDOB(DOB, minAge):
# check DOB to confirm age above minAge
bday = date.fromisoformat(DOB)
year = timedelta(days=365)
minAgeDelta = year * minAge
if bday + minAgeDelta > date.today():
return False
else:
return True
def verifyPhone(num):
# check that a phone number has exactly 10 digits
if len(num) != 10:
return False
if not num.isdigit():
return False
else:
return True
def verifyEmail(email):
# check email to see if it matches general pattern
# regex obtained from https://www.geeksforgeeks.org/check-if-email-address-valid-or-not-in-python/
regex = '^[a-z0-9]+[\._]?[a-z0-9]+[@]\w+[.]\w{2,3}$'
if re.search(regex, email):
return True
else:
return False
def getUserType(username):
# return the type of user with username
u = userList()
u.getByField('Username', username)
if len(u.data) == 1:
return u.data[0]['Type']
else:
return None |
import json
import urllib2
import urllib
import urlparse
import re
import helperfunctions
import base64
import datetime
import hashlib
class lribhelper(object):
def __init__(self, config, logging):
self.config = config
self.logging = logging
self.entityCache = {}
# List of properties that can potentially be used as the entity ID in order of strength
self.potentialEntityIdProperties = ['url', 'name', 'targetUrl', 'targetName']
# Alignment Object is a special case, list of properties to be used in the hash for the entity ID
self.alignmentObjectProperties = ['alignmentType', 'educationalFramework', 'targetDescription', 'targetName', 'targetUrl']
def createEntity(self, entity, submitter):
# Todo: in schema.org entities can have more than one type
try:
up = urlparse.urlparse(entity['type'][0])
if up[1].lower() != 'schema.org' and not up[1].lower().endswith('.schema.org'):
raise ValueError(str.format('invalid entity type: {0}', entity['type'][0]))
entityType = self.makeLowercaseUnderscore(up[2])
entityId = self.makeEntityId(entityType, entity)
# Search for the entity by ID
entitySearchResponse = self.entitySearchById(entityId)
entityGuid = None
if isinstance(entitySearchResponse['response'], list) and \
len(entitySearchResponse['response']) > 0 and \
'props' in entitySearchResponse['response'][0]:
entityGuid = entitySearchResponse['response'][0]['props']['urn:lri:property_type:guid']
# Create the entity if it doesn't already exist
if entityGuid is None or len(entityGuid) == 0:
entityGuid = self.entityCreate(entityId, str.format('urn:schema-org:entity_type:{0}', entityType), submitter)
for key,propArray in entity['properties'].items():
for prop in propArray:
if type(prop) == dict:
# Create child object
self.logging.debug(str.format('Creating child object, key: {0}, value: {1}', key, prop))
childEntityId = self.createEntity(prop, submitter)
self.createProperty(entityGuid, key, childEntityId, submitter)
elif type(prop) == str or type(prop) == unicode:
# Create normal property
self.createProperty(entityGuid, key, prop, submitter)
else:
self.logging.error(str.format('Unexpected property type, key: {0}, value: {1}, type: {2}', key, prop, type(prop)))
return entityId
except:
self.logging.exception(str.format('entity: {0}', json.dumps(entity)))
def createProperty(self, entityGuid, propName, propValue, submitter):
try:
officialPropName = self.makeOfficialPropertyName(propName)
officialPropValue = propValue
propertyTypeResponse = self.entitySearchById(officialPropName, True)
if len(propertyTypeResponse['response']) == 0:
# This gives us a more readable error in the log
raise Exception('Property type "{0}" does not exist'.format(officialPropName))
ranges = propertyTypeResponse['response'][0]['props']['urn:lri:property_type:ranges']
# If the range of the property is an enumeration then make sure the enumeration and our enumeration value exist
if ranges == 'urn:lri:entity_type:enumeration_member':
enumerationId = self.makeEnumerationId(propName)
officialPropValue = self.makeEnumerationValue(propName, propValue)
self.createEnumerationValueIfNeeded(enumerationId, officialPropValue, propValue, submitter)
self.propertyCreate(entityGuid, officialPropName, officialPropValue, submitter)
except:
self.logging.exception(str.format('entityGuid: {0} propName: {1} propValue: {2}', entityGuid, propName, propValue))
def createEnumerationValueIfNeeded(self, enumerationId, enumerationValue, name, submitter):
enumerationResponse = self.entitySearchById(enumerationId)
if len(enumerationResponse['response']) == 0:
self.entityCreate(enumerationId, 'urn:lri:entity_type:enumeration', 'LR_CONNECTOR')
enumerationResponse = self.entitySearchById(enumerationId)
self.logging.debug('props: {0}'.format(enumerationResponse['response'][0]['props']))
if 'urn:lri:property_type:has_enumeration_member' not in enumerationResponse['response'][0]['props'] or \
(enumerationValue != enumerationResponse['response'][0]['props']['urn:lri:property_type:has_enumeration_member'] and \
enumerationValue not in enumerationResponse['response'][0]['props']['urn:lri:property_type:has_enumeration_member']):
enumerationValueEntityGuid = self.entityCreate(enumerationValue, 'urn:lri:entity_type:enumeration_member', submitter)
self.propertyCreate(enumerationValueEntityGuid, 'urn:lri:property_type:name', name, submitter)
self.propertyCreate(enumerationValueEntityGuid, 'urn:lri:property_type:is_member_of_enumeration', enumerationId, submitter)
def makeEntityId(self, entityType, entity):
entityIdTemplate = 'urn:schema-org:{0}:{{0}}:{{1}}'.format(entityType)
if entityType == 'creative_work':
return entityIdTemplate.format('url', helperfunctions.scrubUrl(entity['properties']['url'][0]))
# AlignmentObject is unique. It is essentially a hyperedge and is defined by the combination of its properties.
if entityType == 'alignment_object':
propCollection = ''
for prop in self.alignmentObjectProperties:
if prop in entity['properties']:
if isinstance(entity['properties'][prop], list) and \
len(entity['properties'][prop]) == 1 and \
(type(entity['properties'][prop][0]) == str or type(entity['properties'][prop][0]) == unicode):
propCollection += entity['properties'][prop][0]
else:
raise Exception('Unexpected property in AlignmentObject. name: "{0}", value: "{1}"'.format(prop, entity['properties'][prop]))
propCollection += '|'
propCollection = propCollection[:-1]
return entityIdTemplate.format('hash', hashlib.md5(propCollection).hexdigest())
''' Don't ever trust the id field coming in from the JSON
if 'id' in entity and len(entity['id']) > 0:
# id is specific to stand-alone schema.org JSON, if it starts with "urn:" then
# trust that it is properly formatted and globally unique
if entity['id'].startswith('urn:'):
return entity['id']
return entityIdTemplate.format('id', entity['id'])
'''
for prop in self.potentialEntityIdProperties:
if prop in entity['properties'] and \
isinstance(entity['properties'][prop], list) and \
len(entity['properties'][prop]) > 0 and \
len(entity['properties'][prop][0]) > 0:
value = entity['properties'][prop][0]
if prop.lower().endswith("url"):
value = helperfunctions.scrubUrl(value)
return entityIdTemplate.format(self.makeLowercaseUnderscore(prop), value)
raise ValueError(str.format('unable to create entityId'))
def makeLowercaseUnderscore(self, s):
return re.sub("([a-z])([A-Z])","\\1_\\2",s).lower().lstrip('/')
def makeOfficialPropertyName(self, propName):
return str.format('urn:schema-org:property_type:{0}', self.makeLowercaseUnderscore(propName))
def makeEnumerationId(self, propName):
return str.format('urn:schema-org:enumeration:{0}', self.makeLowercaseUnderscore(propName))
def makeEnumerationValue(self, propName, propValue):
return str.format('urn:schema-org:enumeration_member:{0}:{1}', self.makeLowercaseUnderscore(propName), self.makeLowercaseUnderscore(propValue))
def propertyCreate(self, entityGuid, propType, propValue, submitter):
q = { \
'from': entityGuid, \
'proptype': propType, \
'value': propValue \
}
opts = { \
'access_token': self.config['lrib']['access_token'], \
'on_behalf_of': submitter \
}
response = self.sendRequest('property', 'create', q, opts)
if 'guid' not in response['response']:
raise Exception('Guid not found in property/create response')
return response['response']['guid']
def entitySearchById(self, entityId, useCache = False):
q = { \
'urn:lri:property_type:id': entityId \
}
if useCache and \
entityId in self.entityCache and \
datetime.datetime.now() < (self.entityCache[entityId][0] + datetime.timedelta(seconds=self.config['lrib']['local_cache_expiration'])):
return self.entityCache[entityId][1]
response = self.sendRequest('entity', 'search', q)
if useCache:
self.entityCache[entityId] = [datetime.datetime.now(), response]
return response
def entityCreate(self, entityId, entityType, submitter):
q = { \
'urn:lri:property_type:id': entityId, \
'urn:lri:property_type:types': [ \
'urn:lri:entity_type:thing', \
entityType \
] \
}
opts = { \
'access_token': self.config['lrib']['access_token'], \
'on_behalf_of': submitter \
}
response = self.sendRequest('entity', 'create', q, opts)
if 'urn:lri:property_type:guid' not in response['response']:
raise Exception('Guid not found in entity/create response')
return response['response']['urn:lri:property_type:guid']
def sendRequest(self, objectType, verb, q, opts = None):
now = datetime.datetime.now()
url = str.format('{0}/{1}/{2}?q={3}', self.config['lrib']['url'], objectType, verb, urllib.quote(json.dumps(q)))
if opts is not None:
url += str.format('&opts={0}', urllib.quote(json.dumps(opts)))
self.logging.debug(str.format('LRIB Request: {0}', url))
request = urllib2.Request(url)
if 'username' in self.config['lrib'] and len(self.config['lrib']['username']) > 0 and \
'password' in self.config['lrib'] and len(self.config['lrib']['password']) > 0:
b64 = base64.encodestring(str.format('{0}:{1}', self.config['lrib']['username'], self.config['lrib']['password']))
request.add_header("Authorization", str.format("Basic {0}", b64))
response = json.load(urllib2.urlopen(request))
if response is None:
raise Exception('no response from LRIB')
status = response['status'] if 'status' in response else ''
message = response['message'] if 'message' in response else ''
if status != 'normal':
raise Exception('LRIB did not return "normal" status. status: "{0}", message: "{1}"'.format(status, message))
if 'response' not in response:
raise Exception('"response" missing from LRIB response. status: "{0}", message: "{1}"')
self.logging.debug(str.format('LRIB Response: {0}', json.dumps(response)))
return response
|
from prediction.classes import Meeting, Course, Section
from prediction import schedule_generator
import datetime
#Meeting 1, Section 1, Course 1
meetingType = "Lecture"
campus = "Newark"
startTime = datetime.time(7,0)
endTime = datetime.time(8,5)
professorName = "Suporn Chenhansa"
room = "NC2308"
recurrence = ["MO","WE"]
C1S1M1 = Meeting(meetingType,campus,startTime,endTime,professorName,room,recurrence)
#Meeting 2, Section 1, Course 1
meetingtype = "Laboratory"
campus = "Newark"
startTime = datetime.time(8,5)
endTime = datetime.time(10,10)
professorName = "David D. Topham"
room = "NC2308"
recurrence = ["MO","WE"]
C1S1M2 = Meeting(meetingType,campus,startTime,endTime,professorName,room,recurrence)
#Meeting 3, Section 1, Course 1
meetingtype = "Lab Overload"
campus = "TBA"
startTime = datetime.time(0,0)
endTime = datetime.time(0,0)
professorName = "David D. Topham"
room = "TBA"
recurrence = []
C1S1M3 = Meeting(meetingType,campus,startTime,endTime,professorName,room,recurrence)
# Section 1, Course 1
startDate = datetime.date(2017,8,28)
endDate = datetime.date(2017,12,15)
meetings = [C1S1M1,C1S1M2,C1S1M3]
section_number = "01"
C1S1 = Section(startDate,endDate,meetings,section_number)
###################################
#Meeting 1, Section 2, Course 1
meetingType = "Lecture"
campus = "Newark"
startTime = datetime.time(9,0)
endTime = datetime.time(11,30)
professorName = "Yukai Lin"
room = "NC2306"
recurrence = ["SA"]
C1S2M1 = Meeting(meetingType,campus,startTime,endTime,professorName,room,recurrence)
#Meeting 2, Section 2, Course 1
meetingType = "Text One-Way Lab Days"
campus = "TBADISTANCE LEARNING VIA WEB"
startTime = datetime.time(0,0)
endTime = datetime.time(0,0)
professorName = "Yukai Lin"
room = "WEB"
recurrence = []
C1S2M2 = Meeting(meetingType,campus,startTime,endTime,professorName,room,recurrence)
# Section 2, Course 1
startDate = datetime.date(2017,8,28)
endDate = datetime.date(2017,12,15)
meetings = [C1S2M1,C1S2M2]
section_number = "02"
C1S2 = Section(startDate,endDate,meetings,section_number)
####################################
#Meeting 1, Section 3, Course 1
meetingType = "Lecture"
campus = "Newark"
startTime = datetime.time(18,0)
endTime = datetime.time(20,5)
professorName = "Pamela R. Price"
room = "NC2308"
recurrence = ["TU","TH"]
C1S3M1 = Meeting(meetingType,campus,startTime,endTime,professorName,room,recurrence)
#Meeting 2, Section 3, Course 1
meetingType = "Laboratory"
campus = "Newark"
startTime = datetime.time(20,5)
endTime = datetime.time(21,10)
professorName = "Pamela R. Price"
room = "NC2308"
recurrence = ["TU","TH"]
C1S3M2 = Meeting(meetingType,campus,startTime,endTime,professorName,room,recurrence)
# Section 3, Course 1
startDate = datetime.date(2017,8,28)
endDate = datetime.date(2017,12,15)
meetings = [C1S2M1,C1S2M2]
section_number = "03"
C1S3 = Section(startDate,endDate,meetings,section_number)
###################################
# Course 1
sections = [C1S1,C1S2,C1S3]
credits = "3.00"
subject = "CS"
course_number = "102"
title = "Intro to Programming Using C++"
desc = "An introduction to computer programming using the C++ language for students with no programming experience."
C1 = Course(sections,credits,subject,course_number,title,desc)
###################################
###################################
###################################
#Meeting 1, Section 1, Course 2
meetingType = "Lecture"
campus = "Newark"
startTime = datetime.time(8,0)
endTime = datetime.time(9,35)
professorName = "Bonnie Bennett-Walker"
room = "NC2119"
recurrence = ["MO","WE"]
C2S1M1 = Meeting(meetingType,campus,startTime,endTime,professorName,room,recurrence)
#Meeting 2, Section 1, Course 2
meetingtype = "Text One-Way Lab Days"
campus = "TBADISTANCE LEARNING VIA WEB"
startTime = datetime.time(0,0)
endTime = datetime.time(0,0)
professorName = "Bonnie Bennett-Walker"
room = "WEB"
recurrence = []
C2S1M2 = Meeting(meetingType,campus,startTime,endTime,professorName,room,recurrence)
# Section 1, Course 2
startDate = datetime.date(2017,8,28)
endDate = datetime.date(2017,12,15)
meetings = [C2S1M1,C2S1M2]
section_number = "01"
C2S1 = Section(startDate,endDate,meetings,section_number)
###################################
#Meeting 1, Section 2, Course 2
meetingType = "Lecture"
campus = "Newark"
startTime = datetime.time(8,0)
endTime = datetime.time(9,35)
professorName = "Sobia Saleem"
room = "NP-3"
recurrence = ["MO","WE"]
C2S2M1 = Meeting(meetingType,campus,startTime,endTime,professorName,room,recurrence)
#Meeting 2, Section 2, Course 2
meetingtype = "Text One-Way Lab Days"
campus = "TBADISTANCE LEARNING VIA WEB"
startTime = datetime.time(0,0)
endTime = datetime.time(0,0)
professorName = "Sobia Saleem"
room = "WEB"
recurrence = []
C2S2M2 = Meeting(meetingType,campus,startTime,endTime,professorName,room,recurrence)
# Section 2, Course 2
startDate = datetime.date(2017,8,28)
endDate = datetime.date(2017,12,15)
meetings = [C2S2M1,C2S2M2]
section_number = "02"
C2S2 = Section(startDate,endDate,meetings,section_number)
###################################
#Meeting 1, Section 3, Course 2
meetingType = "Lecture"
campus = "Newark"
startTime = datetime.time(8,0)
endTime = datetime.time(9,35)
professorName = "Margaret McKenzie"
room = "FP-13"
recurrence = ["MO","WE"]
C2S3M1 = Meeting(meetingType,campus,startTime,endTime,professorName,room,recurrence)
#Meeting 2, Section 3, Course 2
meetingtype = "Text One-Way Lab Days"
campus = "TBADISTANCE LEARNING VIA WEB"
startTime = datetime.time(0,0)
endTime = datetime.time(0,0)
professorName = "Margaret McKenzie"
room = "WEB"
recurrence = []
C2S3M2 = Meeting(meetingType,campus,startTime,endTime,professorName,room,recurrence)
# Section 3, Course 2
startDate = datetime.date(2017,8,28)
endDate = datetime.date(2017,12,15)
meetings = [C2S3M1,C2S3M2]
section_number = "03"
C2S3 = Section(startDate,endDate,meetings,section_number)
###################################
# Course 2
sections = [C2S1,C2S2,C2S3]
credits = "4.00"
subject = "ENGL"
course_number = "101A"
title = "Reading & Written Composition"
desc = "Development of college-level reading, writing, and critical thinking skills. Essay writing includes argument, exposition, and research."
C2 = Course(sections,credits,subject,course_number,title,desc)
###################################
###################################
###################################
#Meeting 1, Section 1, Course 3
meetingType = "Lecture"
campus = "Fremont"
startTime = datetime.time(7,30)
endTime = datetime.time(8,45)
professorName = "No Information Availiable"
room = "FP-17"
recurrence = ["MO","TU","WE","TH"]
C3S1M1 = Meeting(meetingType,campus,startTime,endTime,professorName,room,recurrence)
# Section 1, Course 3
startDate = datetime.date(2017,8,28)
endDate = datetime.date(2017,12,15)
meetings = [C3S1M1]
section_number = "01"
C3S1 = Section(startDate,endDate,meetings,section_number)
###################################
#Meeting 1, Section 2, Course 3
meetingType = "Lecture"
campus = "Fremont"
startTime = datetime.time(12,30)
endTime = datetime.time(13,45)
professorName = "Jeffrey P. O'Connell"
room = "FP-15"
recurrence = ["MO","TU","WE","TH"]
C3S2M1 = Meeting(meetingType,campus,startTime,endTime,professorName,room,recurrence)
# Section 2, Course 3
startDate = datetime.date(2017,8,28)
endDate = datetime.date(2017,12,15)
meetings = [C3S2M1]
section_number = "02"
C3S2 = Section(startDate,endDate,meetings,section_number)
###################################
#Meeting 1, Section 3, Course 3
meetingType = "Lecture"
campus = "Newark"
startTime = datetime.time(18,30)
endTime = datetime.time(21,15)
professorName = "Noorullah Wardak"
room = "NC2120"
recurrence = ["MO","WE"]
C3S3M1 = Meeting(meetingType,campus,startTime,endTime,professorName,room,recurrence)
# Section 3, Course 3
startDate = datetime.date(2017,8,28)
endDate = datetime.date(2017,12,15)
meetings = [C3S3M1]
section_number = "03"
C3S3 = Section(startDate,endDate,meetings,section_number)
###################################
# Course 3
sections = [C3S1,C3S2]#,C3S3]
credits = "5.00"
subject = "MATH"
course_number = "101C"
title = "Calculus with Analytic Geom"
desc = "Vectors, functions of several variables, partial derivatives, multiple integration, and applications."
C3 = Course(sections,credits,subject,course_number,title,desc)
###################################
###################################
###################################
testcase = [C1,C2,C3]
schedule_generator.schedule_generator(testcase) |
from optparse import make_option
from six.moves.urllib.request import urlopen
import xml.etree.ElementTree as etree
from django.core.management.base import BaseCommand, CommandError
from speeches.models import Section, Speech, Speaker
from instances.models import Instance
PLAYS = {
"all_well.xml": "All's Well That Ends Well",
"as_you.xml": "As You Like It",
"a_and_c.xml": "Antony and Cleopatra",
"com_err.xml": "A Comedy of Errors",
"coriolan.xml": "Coriolanus",
"cymbelin.xml": "Cymbeline",
"dream.xml": "A Midsummer Night's Dream",
"hamlet.xml": "Hamlet",
"hen_iv_1.xml": "Henry IV, Part I",
"hen_iv_2.xml": "Henry IV, Part II",
"hen_v.xml": "Henry V",
"hen_viii.xml": "Henry VIII",
"hen_vi_1.xml": "Henry VI, Part 1",
"hen_vi_2.xml": "Henry VI, Part 2",
"hen_vi_3.xml": "Henry VI, Part 3",
"john.xml": "The Life and Death of King John",
"j_caesar.xml": "Julius Caesar",
"lear.xml": "King Lear",
"lll.xml": "Love's Labor's Lost",
"macbeth.xml": "Macbeth",
"merchant.xml": "The Merchant of Venice",
"much_ado.xml": "Much Ado About Nothing",
"m_for_m.xml": "Measure for Measure",
"m_wives.xml": "The Merry Wives of Windsor",
"othello.xml": "Othello",
"pericles.xml": "Pericles",
"rich_ii.xml": "Richard II",
"rich_iii.xml": "Richard III",
"r_and_j.xml": "Romeo and Juliet",
"taming.xml": "The Taming of the Shrew",
"tempest.xml": "The Tempest",
"timon.xml": "Timon of Athens",
"titus.xml": "Titus Andronicus",
"troilus.xml": "Troilus and Cressida",
"two_gent.xml": "Two Gentlemen of Verona",
"t_night.xml": "Twelfth Night",
"win_tale.xml": "A Winter's Tale",
}
class Command(BaseCommand):
args = '<play>'
help = 'Import a Shakespeare play into a SayIt instance'
option_list = BaseCommand.option_list + (
make_option('--commit', action='store_true', help='Whether to commit to the database or not'),
make_option('--instance', action='store', default='shakespeare', help='Label of instance to add data to'),
make_option('--list', action='store_true', help='List the plays available'),
)
def make(self, cls, **kwargs):
s = cls(instance=self.instance, **kwargs)
if self.commit:
s.save()
elif s.heading:
print(s.heading)
return s
def handle(self, *args, **options):
if options['list'] or len(args) != 1:
self.stdout.write('Plays:\n')
for play in sorted(PLAYS.values()):
self.stdout.write('* %s\n' % play)
if not options['list']:
raise CommandError("Please specify a play")
return
play = args[0]
file = None
for f, p in PLAYS.items():
if play == p:
file = f
break
if not file:
raise CommandError("No matching play found")
try:
self.instance = Instance.objects.get(label=options['instance'])
except:
raise CommandError("Instance specified not found")
self.commit = options['commit']
xml = urlopen('http://www.ibiblio.org/xml/examples/shakespeare/%s' % file).read()
play_xml = etree.fromstring(xml)
play_section = self.make(Section, heading=play)
speakers = {}
for act in play_xml:
if act.tag != 'ACT':
continue
act_heading = act[0].text
act_section = self.make(Section, heading=act_heading, parent=play_section)
scenes = act[1:]
for scene in scenes:
scene_heading = scene[0].text
scene_section = self.make(Section, heading=scene_heading, parent=act_section)
speeches_xml = scene[1:]
for sp in speeches_xml:
if sp.tag == 'STAGEDIR' or sp.tag == 'SUBHEAD' or sp.tag == 'SUBTITLE':
self.make(Speech, section=scene_section, text='<p><i>%s</i></p>' % sp.text, type='narrative')
continue
if not sp[0].text:
speaker = None
elif self.commit:
name = sp[0].text.replace('[', '').replace(']', '')
if name in speakers:
speaker = speakers[name]
else:
speaker = Speaker.objects.create(name=name, instance=self.instance)
speakers[name] = speaker
else:
speaker = Speaker(name=sp[0].text, instance=self.instance)
text = ""
lines = sp[1:]
for line in lines:
if len(line):
text += '<i>%s</i>' % line[0].text
if line[0].tail:
text += ' %s' % line[0].tail.strip()
text += '<br>\n'
elif line.tag == 'LINE':
text += '%s<br>\n' % line.text
elif line.tag == 'STAGEDIR':
text += '<i>%s</i><br>\n' % line.text
text = '<p>%s</p>' % text
self.make(Speech, speaker=speaker, section=scene_section, text=text, type='speech')
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
# __author__ = "LJ"
# Date: 2019/3/6
filepath = "D:\\test.txt"
#1.读取D盘根目录下文件test.txt的内容
with open(filepath,'r',encoding='utf-8') as rf:
filedata=rf.read()
print(filedata)
# 2.遍历所有行,得到邮箱格式的每一行
with open(filepath,'r',encoding='utf-8') as rf:
emaillist = [] # 保存邮箱格式的每一行到此数组
for line in rf:
#判断得到正确邮箱格式的每一行
if "@" in line and ".com" in line:
print(line,end='')
emaillist.append(line.replace("\n",""))
print(emaillist)
#3.对所有行按长度进行排序
with open(filepath, 'r', encoding='utf-8') as rf:
lines_list = rf.readlines() #读取每一行内容,存放于列表中
lines_list.sort(key=lambda x:len(x),reverse=False)
for l in lines_list:
print(l,end='')
#4.把所有行写入新文件test_new.txt
with open(filepath,'r',encoding='utf-8') as rf,\
open('D:\\test_new.txt', 'w', encoding='utf-8') as wf:
for line in rf:
wf.write(line)
|
import numpy as np
import numpy.linalg as la
file = open("data.txt")
data = np.genfromtxt(file, delimiter=",")
file.close()
print "data ="
print data
M = []
b = []
for x_prime, y_prime, x, y in data:
M.append([x,y,1,0,0,0])
M.append([0,0,0,x,y,1])
b.append([x_prime])
b.append([y_prime])
M = np.matrix(M)
print "M ="
print M
b = np.matrix(b)
print "b ="
print b
a, e, r, s = la.lstsq(M, b)
print "a ="
print a
# print "M*a =", M*a
# print "b =", b
sum_squared_error = pow(la.norm(M*a-b), 2)
print "Sum-squared error =", sum_squared_error
print "Residue =", e |
import requests
from bs4 import BeautifulSoup
from csv import writer
import time
API_KEY='15d2ea6d0dc1d476efbca3eba2b9bbfb'
BASE_URL = 'https://api.themoviedb.org/3/search/movie?api_key=15d2ea6d0dc1d476efbca3eba2b9bbfb&query='
def fetch_movies_data():
url = 'http://www.imdb.com/chart/top?pf_rd_m=A2FGELUUNOQJNL&pf_rd_p=4da9d9a5-d299-43f2-9c53-f0efa18182cd&pf_rd_r=0YACBGCPEXB7G47FJB10&pf_rd_s=right-4&pf_rd_t=15506&pf_rd_i=toptv&ref_=chttvtp_ql_3'
headers = {
'Accept': 'application/json',
# 'Accept-Language': 'en-US,en;q=0.5'
'Accept-Language': 'en-us;q=1.0, pl;q=0'
}
res = requests.get(url, headers = headers)
soup = BeautifulSoup(res.text, 'html.parser')
return soup
def get_ranking(soup):
ranking = []
title_rows = soup.find_all('td', class_='titleColumn')
for movie in title_rows:
a_tag = movie.find('a').get_text()
ranking.append(a_tag)
return ranking
def get_ids(soup):
ids = []
id_rows = soup.find_all('td', class_="watchlistColumn")
for row in id_rows:
ids.append(row.div['data-tconst'])
return ids
def get_years(soup):
years = []
years_rows = soup.find_all('span', class_="secondaryInfo")
for row in years_rows:
number = row.get_text()[1:5]
years.append(number)
return years
def write_ranking_to_file(movies, years, image_urls, ids):
with open('ranking.csv', 'w') as csv_file:
csv_writer = writer(csv_file)
csv_writer.writerow(['rank', 'title', 'year', 'imageUrl','id'])
for rank, movie, imageUrl, year, id in zip(range(1, len(movies)+1), movies, image_urls, years, ids):
csv_writer.writerow([rank, movie, year, imageUrl, id])
def fetch_images_urls(titles):
# base_url = 'http://image.tmdb.org/t/p/w500'
urls = []
for title in titles:
url = BASE_URL + title
headers = {
'Accept': 'application/json',
'Accept-Language': 'en-US,en;q=0.5'
}
res = requests.get(url, headers = headers)
if(res.json()['results'] and isinstance(res.json()['results'][0]['poster_path'], str)):
image_url = res.json()['results'][0]['poster_path'][1:]
print(image_url)
else:
image_url = 'blank'
print('Nie ma plakatu ;(')
urls.append(image_url)
time.sleep(1)
return urls
data = fetch_movies_data()
movies = get_ranking(data)
image_urls = fetch_images_urls(movies)
ids = get_ids(data)
years = get_years(data)
write_ranking_to_file(movies, years, image_urls, ids) |
# First we'll import the os module
# This will allow us to create file paths across operating systems
import os
# Module for reading CSV files
import csv
# csvpath = os.path.join('Resources', 'Netflix.csv')
mainpath = os.path.join('Resources', 'election_data.csv')
output_path = os.path.join('Resources', 'Polling_Results.csv')
# with open (csvpath) as csvfile:
with open(mainpath, newline='') as csvfile:
# CSV reader specifies delimiter and variable that holds contents
csvreader = csv.reader(csvfile, delimiter=',')
# Read the header row first
csv_header = next(csvreader)
print(f"Reading in: {csv_header}. . . please wait")
# Init some variables to zero
rowcount = 0 # so I know how many votes there are
candidate = [] # a list to keep the names of the candidates
votes = [] # a list to keep all of the votes for the candidates
# Read each row of data after the header
for row in csvreader:
rowcount = rowcount + 1
# check if this is a new candidate by looking in the list for the name
if (row[2]) in candidate:
# If in the votes list, increment by 1 the votes for this specific candidate
votes[candidate.index(row[2])] += 1
else:
# else, add this candidate to the voter list
candidate.extend([row[2]])
# add a new variable to the voter array with an initial value of 1
votes.extend([1])
# votes[candidate.index(row[2])] += 1 # adding
print()
print(f"Election Results")
print(f"------------------------------------------")
print(f"Total Votes: {rowcount}")
winner_tot = 0
winner = "nobody"
for name in candidate:
print(f"{name} has {votes[candidate.index(name)]} votes, for {((votes[candidate.index(name)])/rowcount*100):.3f}%")
if votes[candidate.index(name)]>winner_tot:
winner = name
winner_tot = votes[candidate.index(name)]
print(f"------------------------------------------")
print(f"{winner} wins with {winner_tot} votes.")
print(f"------------------------------------------")
with open(output_path, 'w', newline='') as csvfile:
# Initialize csv.writer
csvwriter = csv.writer(csvfile)
csvwriter.writerow([f"Election Results"])
csvwriter.writerow([f"------------------------------------------"])
csvwriter.writerow([f"Total Votes: {rowcount}"])
winner_tot = 0
winner = "nobody"
for name in candidate:
csvwriter.writerow([f'{name} has {votes[candidate.index(name)]} votes, for {((votes[candidate.index(name)])/rowcount*100):.3f}%'])
if votes[candidate.index(name)]>winner_tot:
winner = name
winner_tot = votes[candidate.index(name)]
csvwriter.writerow([f"------------------------------------------"])
csvwriter.writerow([f"{winner} wins with {winner_tot} votes."])
csvwriter.writerow([f"------------------------------------------"])
|
#created a list of non-random numbers
names = ['*']
names2 = []
for i in names:
names2 = i + ('*' * 2)
print (names2)
|
from tmcl.dynamics.tmcl import MCLMultiHeadedCaDMDynamicsModel
from tmcl.trainers.mb_trainer import Trainer
from tmcl.policies.mpc_controller import MPCController
from tmcl.samplers.sampler import Sampler
from tmcl.logger import logger
from tmcl.envs.normalized_env import normalize
from tmcl.utils.utils import ClassEncoder
from tmcl.samplers.model_sample_processor import ModelSampleProcessor
from tmcl.envs.config import get_environment_config
from tmcl.envs.config2 import get_environment_config2
from tensorboardX import SummaryWriter
import json
import os
import gym
import argparse
def run_experiment(config,multi_confound):
if multi_confound:
env, config = get_environment_config(config)
config["confound"] = "multi"
else:
env, config = get_environment_config2(config)
config["confound"] = "single"
# Save final config after editing config with respect to each environment.
EXP_NAME = config["save_name"]
EXP_NAME += (
"hidden_" + str(config["dim_hidden"]) + "_lr_" + str(config["learning_rate"])
)
EXP_NAME += "_horizon_" + str(config["horizon"]) + "_seed_" + str(config["seed"])+"_confounder_"+str(config["confound"])
exp_dir = os.getcwd() + "/data/" + EXP_NAME + "/" + config.get("exp_name", "")
logger.configure(
dir=exp_dir,
format_strs=["stdout", "log", "csv"],
snapshot_mode="last",
only_test=config["only_test_flag"],
)
json.dump(
config,
open(exp_dir + "/params.json", "w"),
indent=2,
sort_keys=True,
cls=ClassEncoder,
)
writer = SummaryWriter(exp_dir)
dynamics_model = MCLMultiHeadedCaDMDynamicsModel(
name="dyn_model",
env=env,
learning_rate=config["learning_rate"],
hidden_sizes=config["hidden_sizes"],
valid_split_ratio=config["valid_split_ratio"],
rolling_average_persitency=config["rolling_average_persitency"],
hidden_nonlinearity=config["hidden_nonlinearity"],
traj_batch_size=config["traj_batch_size"],
sample_batch_size=config["sample_batch_size"],
segment_size=config["segment_size"],
normalize_input=config["normalize_flag"],
n_forwards=config["horizon"],
n_candidates=config["n_candidates"],
ensemble_size=config["ensemble_size"],
head_size=config["head_size"],
n_particles=config["n_particles"],
use_cem=config["use_cem"],
deterministic=config["deterministic"],
weight_decays=config["weight_decays"],
weight_decay_coeff=config["weight_decay_coeff"],
ie_itrs=config["ie_itrs"],
use_ie=config["use_ie"],
use_simulation_param=config["use_simulation_param"],
simulation_param_dim=config["simulation_param_dim"],
sep_layer_size=config["sep_layer_size"],
cp_hidden_sizes=config["context_hidden_sizes"],
context_weight_decays=config["context_weight_decays"],
context_out_dim=config["context_out_dim"],
context_hidden_nonlinearity=config["context_hidden_nonlinearity"],
history_length=config["history_length"],
future_length=config["future_length"],
state_diff=config["state_diff"],
back_coeff=config["back_coeff"],
use_global_head=config["use_global_head"],
non_adaptive_planning=config["non_adaptive_planning"],
)
policy = MPCController(
name="policy",
env=env,
dynamics_model=dynamics_model,
discount=config["discount"],
n_candidates=config["n_candidates"],
horizon=config["horizon"],
use_cem=config["use_cem"],
num_rollouts=config["num_rollouts"],
mcl_cadm=True,
)
sampler = Sampler(
env=env,
policy=policy,
num_rollouts=config["num_rollouts"],
max_path_length=config["max_path_length"],
n_parallel=config["n_parallel"],
random_flag=True,
use_cem=config["use_cem"],
horizon=config["horizon"],
state_diff=config["state_diff"],
history_length=config["history_length"],
mcl_cadm=True,
)
sample_processor = ModelSampleProcessor(
recurrent=True, # MCL
writer=writer,
context=True,
future_length=config["future_length"],
)
algo = Trainer(
env=env,
env_flag=config["dataset"],
policy=policy,
dynamics_model=dynamics_model,
sampler=sampler,
sample_processor=sample_processor,
n_itr=config["n_itr"],
initial_random_samples=config["initial_random_samples"],
dynamics_model_max_epochs=config["dynamic_model_epochs"],
num_test=config["num_test"],
test_range=config["test_range"],
total_test=config["total_test"],
test_max_epochs=config["max_path_length"],
no_test_flag=config["no_test_flag"],
only_test_flag=config["only_test_flag"],
use_cem=config["use_cem"],
horizon=config["horizon"],
writer=writer,
mcl_cadm=True,
history_length=config["history_length"],
state_diff=config["state_diff"],
)
algo.train()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Trajectory-wise MCL")
parser.add_argument("--save_name", default="TMCL/", help="experiments name")
parser.add_argument("--seed", type=int, default=0, help="random_seed")
parser.add_argument("--dataset", default="halfcheetah", help="environment flag")
parser.add_argument(
"--hidden_size", type=int, default=200, help="size of hidden feature"
)
parser.add_argument(
"--traj_batch_size", type=int, default=250, help="batch size (trajectory)"
)
parser.add_argument(
"--sample_batch_size", type=int, default=256, help="batch size (sample)"
)
parser.add_argument("--segment_size", type=int, default=10, help="segment size")
parser.add_argument(
"--n_epochs", type=int, default=50, help="training epochs per iteration"
)
parser.add_argument("--lr", type=float, default=0.001, help="learning_rate")
parser.add_argument("--horizon", type=int, default=30, help="horrizon for planning")
parser.add_argument(
"--normalize_flag", action="store_true", help="flag to normalize"
)
parser.add_argument("--total_test", type=int, default=20, help="# of test")
parser.add_argument(
"--n_candidate", type=int, default=200, help="candidate for planning"
)
parser.add_argument(
"--no_test_flag", action="store_true", help="flag to disable test"
)
parser.add_argument(
"--only_test_flag", action="store_true", help="flag to enable only test"
)
parser.add_argument(
"--ensemble_size", type=int, default=5, help="size of ensembles"
)
parser.add_argument("--head_size", type=int, default=3, help="size of heads")
parser.add_argument(
"--n_particles",
type=int,
default=20,
help="size of particles in trajectory sampling",
)
parser.add_argument("--policy_type", type=str, default="CEM", help="Policy Type")
parser.add_argument(
"--deterministic_flag",
type=int,
default=0,
help="flag to use deterministic dynamics model",
)
parser.add_argument(
"--use_ie_flag", type=int, default=1, help="flag to use ie loss with ie_itrs"
)
parser.add_argument(
"--ie_itrs", type=int, default=3, help="epochs to train with IE loss"
)
parser.add_argument(
"--sim_param_flag",
type=int,
default=0,
help="flag to use simulation parameter as an input",
)
parser.add_argument(
"--sep_layer_size",
type=int,
default=0,
help="size of separated layers in multiheaded architecture",
)
parser.add_argument(
"--tag", type=str, default="", help="additional tag for save directory.."
)
parser.add_argument(
"--history_length",
type=int,
default=10,
help="history length for adaptive planning",
)
parser.add_argument(
"--state_diff",
type=int,
default=1,
help="flag to use state difference for history",
)
parser.add_argument(
"--back_coeff", type=float, default=0.5, help="coefficient for backward loss"
)
parser.add_argument(
"--multi_confound", type=bool, default=True, help="if use multi confounder"
)
parser.add_argument(
"--future_length", type=int, default=5, help="length of future timesteps"
)
parser.add_argument(
"--context_out_dim", type=int, default=10, help="dimension of context vector"
)
parser.add_argument(
"--use_global_head_flag",
type=int,
default=1,
help="use global head for context encoder",
)
parser.add_argument(
"--non_adaptive_planning_flag",
type=int,
default=0,
help="non-adaptive planning. just average",
)
args = parser.parse_args()
if args.normalize_flag:
args.save_name = "/NORMALIZED/" + args.save_name
else:
args.save_name = "/RAW/" + args.save_name
if args.dataset == "hopper":
args.save_name = "/HOPPER/" + args.save_name
elif args.dataset == "halfcheetah":
args.save_name = "/HALFCHEETAH/" + args.save_name
elif args.dataset == "cripple_ant":
args.save_name = "/CRIPPLE_ANT/" + args.save_name
elif args.dataset == "slim_humanoid":
args.save_name = "/SLIM_HUMANOID/" + args.save_name
else:
raise ValueError(args.dataset)
if args.deterministic_flag == 0:
args.save_name += "PROB/"
else:
args.save_name += "DET/"
if args.policy_type in ["RS", "CEM"]:
args.save_name += "{}/".format(args.policy_type)
args.save_name += "CAND_{}/".format(args.n_candidate)
else:
raise ValueError(args.policy_type)
# MCL Path
if args.sim_param_flag == 1:
args.save_name += "SIM_PARAM/"
args.save_name += "T_BATCH_{}/".format(args.traj_batch_size)
args.save_name += "S_BATCH_{}/".format(args.segment_size)
args.save_name += "T_S_BATCH_{}/".format(args.sample_batch_size)
args.save_name += "IE_{}/".format(args.ie_itrs)
args.save_name += "EPOCH_{}/".format(args.n_epochs)
args.save_name += "ENS_{}/".format(args.ensemble_size)
args.save_name += "HEAD_{}/".format(args.head_size)
args.save_name += "SEP_{}/".format(args.sep_layer_size)
if args.use_ie_flag == 1:
args.save_name += "USE_IE/"
if args.non_adaptive_planning_flag == 1:
args.save_name += "NON_ADAPTIVE/"
# CaDM Path
args.save_name += "H_{}/".format(args.history_length)
args.save_name += "F_{}/".format(args.future_length)
args.save_name += "BACK_COEFF_{}/".format(args.back_coeff)
if args.state_diff:
args.save_name += "DIFF/"
else:
args.save_name += "WHOLE/"
if args.use_global_head_flag == 1:
args.save_name += "GLOBAL_HEAD/"
if args.tag != "":
args.save_name += "tag_{}/".format(args.tag)
config = {
# Policy
"n_candidates": args.n_candidate,
"horizon": args.horizon,
# Policy - CEM Hyperparameters
"use_cem": args.policy_type == "CEM",
# Environments
"dataset": args.dataset,
"normalize_flag": args.normalize_flag,
"seed": args.seed,
# Sampling
"max_path_length": 200,
"num_rollouts": 10,
"n_parallel": 10,
"initial_random_samples": True,
# Training Hyperparameters
"n_itr": 10,
"learning_rate": args.lr,
"traj_batch_size": args.traj_batch_size,
"segment_size": args.segment_size,
"sample_batch_size": args.sample_batch_size,
"dynamic_model_epochs": args.n_epochs,
"valid_split_ratio": 0.0,
"rolling_average_persitency": 0.99,
# Testing Hyperparameters
"total_test": args.total_test,
"no_test_flag": args.no_test_flag,
"only_test_flag": args.only_test_flag,
# Dynamics Model Hyperparameters
"dim_hidden": args.hidden_size,
"hidden_sizes": (args.hidden_size,) * 4,
"hidden_nonlinearity": "swish",
"deterministic": (args.deterministic_flag > 0),
"weight_decays": (0.000025, 0.00005, 0.000075, 0.000075, 0.0001),
"weight_decay_coeff": 1.0,
# PE-TS Hyperparameters
"ensemble_size": args.ensemble_size,
"n_particles": args.n_particles,
"head_size": args.head_size,
"sep_layer_size": args.sep_layer_size,
# CaDM Hyperparameters
"context_hidden_sizes": (256, 128, 64),
"context_weight_decays": (0.000025, 0.00005, 0.000075),
"context_out_dim": args.context_out_dim,
"context_hidden_nonlinearity": "relu",
"history_length": args.history_length,
"future_length": args.future_length,
"state_diff": args.state_diff,
"back_coeff": args.back_coeff,
"use_global_head": (args.use_global_head_flag > 0),
# MCL hyperparameters
"ie_itrs": args.ie_itrs,
"use_ie": (args.use_ie_flag > 0),
"use_simulation_param": (args.sim_param_flag > 0),
# Ablation
"non_adaptive_planning": (args.non_adaptive_planning_flag > 0),
# Other
"save_name": args.save_name,
"discount": 1.0,
"tag": args.tag,
}
run_experiment(config,args.multi_confound)
|
# Python Cryptography Toolkit (pycrypto)
from Crypto.Cipher import AES
from .utils import grouper, xor
PAD_CHAR = b'\x04'
IV = b'\x00' * AES.block_size
def encrypt_ecb(plaintext, password):
crypter = AES.new(password, AES.MODE_ECB)
return crypter.encrypt(plaintext)
def decrypt_ecb(ciphertext, password):
crypter = AES.new(password, AES.MODE_ECB)
return crypter.decrypt(ciphertext)
def _encrypt_cbc(plaintext, password):
crypter = AES.new(password, AES.MODE_CBC, IV)
return crypter.encrypt(plaintext)
def _decrypt_cbc(ciphertext, password):
crypter = AES.new(password, AES.MODE_CBC, IV)
return crypter.decrypt(ciphertext).rstrip(PAD_CHAR)
def encrypt_cbc(plaintext, password, iv=IV, block_size=AES.block_size):
# padded_plaintext = pad(plaintext, block_size)
res = []
crypt_block = iv
for plain_block in grouper(block_size, plaintext):
block = xor(plain_block, crypt_block)
crypt_block = encrypt_ecb(block, password)
res.append(crypt_block)
return b''.join(res)
def decrypt_cbc(ciphertext, password, iv=IV, block_size=AES.block_size):
res = []
prev_block = iv
for cipher_block in grouper(block_size, ciphertext):
block = decrypt_ecb(cipher_block, password)
plain_block = xor(block, prev_block)
res.append(plain_block)
prev_block = cipher_block
return b''.join(res).rstrip(PAD_CHAR)
|
import json
import os
import logging
import tornado.httpserver
import tornado.websocket
import tornado.ioloop
import tornado.web
import socket
import redis
import threading
class IndexHandler(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
items = ["Item 1", "Item 2", "Item 3"]
self.render("views/index.html", title="Daemon", items=items)
|
import sys, random
print("Welcome to the NFL 'What If Name Picker.'\n")
print("Imagine If the New York Giants were the New York Rams:\n\n")
first = ('New York', 'Arizona', 'Green Bay', "Jacksonville'",
"Charlotte'", 'Buffalo', 'Tampa Bay', "Cincinnati' ",
'Baltimore', 'Phoenix', 'Kansas City', 'Inidanapolis', 'Cleveland',
'Detroit', 'Atlanta', 'Miami', 'Houston', 'Dallas', 'Oakland',
'Philadelphia', 'Los Angeles', 'Minneapolis', 'Pittsburgh', 'New Jersey',
'Washington', 'New Engalnd', 'San Francisco', 'Chicago', 'Denver', 'Seattle',
'Tennessee', 'New Orleans')
last = ('Cardinals', 'Falcons', 'Ravens', 'Bills',
'Bears', 'Panthers', 'Bengals', 'Cowboys',
'Browns', 'Broncos', 'Lions', 'Packers', 'Texans',
'Colts', 'Jaguars', 'Chiefs', 'Dolphins', 'Vikings',
'Patriots', 'Saints', 'Giants', 'Jets', 'Raiders',
'Eagles', 'Steelers', "Rams", 'Chargers', '49ers', 'Seahawks',
'Buccaneers', 'Titans', 'Redskins')
while True:
firstName = random.choice(first)
lastName = random.choice(last)
print("\n\n")
print(firstName, lastName, file = sys.stderr)
print("\n\n")
try_again = input("\n\nTry again? (Press Enter else n to quit)\n ")
if try_again.lower() == "n":
break
input("\nPress Enter to exit.")
|
# Generated by Django 3.2 on 2021-04-15 19:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Social_Media', '0002_alter_socialmedia_app_img'),
]
operations = [
migrations.AlterField(
model_name='socialmedia',
name='app_img',
field=models.ImageField(default=None, upload_to='App_Images'),
),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.