index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
19,500 | 439d8a0e1bcf2176a36f4a72b0c544f8eb7999b1 | import click
from flask.cli import with_appcontext
from flask_sqlalchemy import SQLAlchemy
from flask_sqlalchemy import inspect
students_db = SQLAlchemy()
def init_app(app):
students_db.init_app(app)
app.cli.add_command(init_db_command)
@click.command('init-db')
@with_appcontext
def init_db_command():
"""Clear the existing data and create new tables."""
students_db.create_all()
click.echo('Initialized the database.')
class Student(students_db.Model):
id = students_db.Column(students_db.Integer, primary_key=True)
fname = students_db.Column(students_db.String(60), nullable=False)
lname = students_db.Column(students_db.String(60), nullable=False)
agegrup = students_db.Column(students_db.Integer, nullable=False)
specialization = students_db.Column(students_db.String(120),
nullable=False)
def update_with(self, update_dict):
self.fname = update_dict['fname']
self.lname = update_dict['lname']
self.agegrup = update_dict['agegrup']
self.specialization = update_dict['specialization']
def object_as_dict(obj):
return {c.key: getattr(obj, c.key)
for c in inspect(obj).mapper.column_attrs}
|
19,501 | 91f951e942be20dd37eaa9d98bc08d1f7bd3a1c2 | from typing import List
class Node:
def __init__(self, key, value, leftchild, sibling, parent=None):
self.key = key
self.value = value
self.leftchild = leftchild
self.sibling = sibling
self.parent = parent
def addChild(self, node):
if self.leftchild is None:
self.leftchild = node
node.parent = self
else:
node.sibling = self.leftchild
self.leftchild.parent = node
self.leftchild = node
class Heap:
def __init__(self):
self.head: Node = None
self.nodes = dict()
self.head = None
def find_min(self) -> Node:
return self.head
def isEmpty(self):
return True if self.head is None else False
def _merge(self, nodeA: Node, nodeB: Node):
if nodeA is None:
return nodeB
if nodeB is None:
return nodeA
# node B becomes the child
# of head
if nodeA.key < nodeB.key:
nodeA.addChild(nodeB)
return nodeA
# head
# head becomes the child of
else:
nodeB.addChild(nodeA)
return nodeB
def _twopassmerge(self, node: Node):
if node is None or node.sibling is None:
return node
else:
heapA = node
heapB = node.sibling
newNode = node.sibling.sibling
heapA.sibling = None
heapB.sibling = None
points: List = []
while True:
# if we reached the end of the list in sibling
# we merge every node from the rightmost node.
if newNode is None or newNode.sibling is None:
while not len(points) == 0:
# get the rightmost node
# store in array
# [n, n+1, n+2, -> rightmost]
node = points.pop(-1)
newNode = self._merge(node, newNode)
break
loopa = newNode
points.append(newNode)
newNode = newNode.sibling
loopa.sibling = None
return self._merge(self._merge(heapA, heapB), newNode)
def insert(self, key, value):
self.nodes[key] = Node(key, value, None, None)
self.head = self._merge(self.head, self.nodes[key])
def delete_min(self):
if not self.head is None:
self.nodes.pop(self.head.key)
self.head = self._twopassmerge(self.head.leftchild)
return True
return False
def decrease_key(self, orig_key, next_key):
if self.head.key == orig_key or next_key < self.head.key:
self.head.key = next_key
else:
# find the node with the orig_key
parent_node: Node = self.nodes[orig_key].parent
node = None
if parent_node.leftchild is not None:
if parent_node.leftchild.key == orig_key:
node = parent_node.leftchild
parent_node.leftchild = None
elif parent_node.sibling is not None:
if parent_node.sibling.key == orig_key:
node = parent_node.sibling
parent_node.sibling = None
else:
# uncaught error, must investigate
raise "Error"
node.key = next_key
self.nodes[node.key] = node
# node.parent = None
siblings = self._twopassmerge(node.sibling)
node.sibling = None
self.head = self._merge(self.head, node)
self.head = self._merge(self.head, siblings)
|
19,502 | 45d04538b5a86f749c0c37ddf693737dcbc9b3e3 | #!/usr/bin/env python
class Stack(object):
def __init__(self):
self.stack = []
def push(self ,item):
self.stack.append(item)
def pop(self):
return self.stack.pop()
def top(self):
return self.stack[len(self.stack)-1]
def is_empty(self):
if len(self.stack) == 0:
return True
else:
return False
if __name__ == "__main__":
stack = Stack()
stack.push(1)
print (stack.pop())
stack.push(2)
print (stack.is_empty())
stack.push(3)
print (stack.pop())
print (stack.pop())
print (stack.is_empty()) |
19,503 | 9bab183528d54db2f2e919dc5da7f958ced271bf | n = int(input())
print("ABC" if n == 1 else "chokudai") |
19,504 | 1c39747b3809d00282b44f8d2ee836f2f6c5146c | """
This file intends to simulate all Backend's API behaviours which will be accessed by the SDP.
""" |
19,505 | 4858f21c998e133ac399a81b40976a7c29ac55f4 | base_dir="/home/pool/"
current="current"
file_dir=base_dir+current
|
19,506 | 382840b21f95ad0cc65526642d0d9292eaf2450b | import inspect
import operator
from gcode import core
from gcode import laser
from gcode import parser
class Interpreter(object):
"""G-Code Interpreter"""
def __init__(self, executor=laser.LaserExecutor()):
self.executor = executor
self.parameters = core.ParameterSet()
self.settings = executor.initial_settings
self.parser = parser.Parser(self.parameters, executor.dialect)
self.modes = dict.fromkeys(executor.dialect.modal_groups)
def interpret_line(self, line, source=None, lineno=None):
"""Interpret one line of G-Code."""
pline = self.parser.parse_line(line, source=source, lineno=lineno)
return self.execute(pline)
def interpret_file(self, file, source=None, process_percents=True):
"""Read and interpret G-Code from a file-like object."""
for pline in self.parser.parse_file(file,
source=source,
process_percents=process_percents):
action = self.execute(pline)
if action is None:
pass
elif action == 'emergency stop':
return 'Emergency Stop'
elif action == 'pause program':
return 'Pause'
elif action == 'optional pause program':
return 'Optional Pause'
elif action == 'end program':
return 'End'
else:
raise core.GCodeException('unknown action: %r' % (action,))
def prep_words(self, pline):
active_groups = {}
active_args = {}
dialect = self.executor.dialect
new_settings = {}
modal_groups = {}
codes = []
for (letter, number) in pline.words:
if letter in dialect.passive_code_letters:
new_settings[letter] = number
else:
code = dialect.find_active_code(letter, number)
if code is None:
msg = 'unknown code %s%s' % (letter, number)
raise parser.GCodeSyntaxError(pline.source.pos, msg)
codes.append(code)
for code in codes:
group = code.group
if group:
if group in active_groups:
prev = active_groups[group]
msg = '%s conflicts with %s' % (code, prev)
raise parser.GCodeSyntaxError(pline.source.pos, msg)
active_groups[group] = code
for arg in code.arg_letters:
if arg in new_settings:
if arg in active_args:
msg = '%s%s ambiguous between %s and %s'
msg %= (arg, new_settings[arg], active_args[arg], code)
raise parser.GCodeSyntaxError(pline.source.pos, msg)
active_args[arg] = code
r_any = code.require_any
if r_any and not any(a in new_settings for a in r_any):
msg = 'code %s requires at least one of %s'
msg %= (code, ', '.join(r_any))
raise parser.GCodeSyntaxError(pline.source.pos, msg)
return active_groups, new_settings
def execute(self, pline):
dialect = self.executor.dialect
active_groups, new_settings = self.prep_words(pline)
self.settings.update(new_settings)
for op in self.executor.order_of_execution:
action = None
if inspect.ismethod(op):
action = op(self.settings, new_settings, pline)
else:
assert isinstance(op, str)
group = dialect.find_group(op)
active_code = active_groups.get(op)
if group.prepare_func:
method = getattr(self.executor,
group.prepare_func.func_name)
active_code = method(mode=self.modes[group],
new_mode=active_code,
settings=self.settings,
new_settings=new_settings)
if active_code:
self.modes[op] = active_code
action = self.call_code(active_code)
if group.finish_func:
method = getattr(self.executor, group.finish_func.func_name)
method(mode=self.modes[group],
new_mode=active_code,
settings=self.settings,
new_settings=new_settings)
if action is not None:
return action
def call_code(self, code):
def get_val(arg):
val = self.settings[arg]
if val is None:
if arg in code.default_args:
return code.default_args[arg]
msg = '%s requires a %s code' % (code, arg)
raise core.GCodeException(msg)
return val
args = {a: get_val(a) for a in code.arg_letters}
method = getattr(self.executor, code)
return method(**args)
|
19,507 | 4296d636f1a7f5258e6501974bda3d0ff811fb8e | import mariadb
import sys
def conectar():
try:
conn = mariadb.connect(user = "root", database ="testDB", host = "mariadb", password = "test123"
except mariadb.error as e:
print(f"Error connecting to MariaDB Platform: {e}")
sys.exit(1)
return conn
con = conectar()
cur = con.cursor()
cons = 'select * from test'
cur.execute(cons)
que = cur.fetchall()
for id,name in que:
print("id: " + str(id))
print("nombre: " + str(name))
|
19,508 | 61a9cd461a144451cfe05ff485c8c2a938330817 | import mcdonalds_positions
global statement
statement ="answer 'yes' or 'no' "
def mcdonalds_work():
print "Wow you really want to spend your life on nothinhg , huh .."
print "So here comes the boss and asks you,where do you want to work:"
garbage=['cleaner' , 'McCafe worker' , 'Kitchen worker ', 'Cash-worker','Grill']
for job_type in garbage:
print " \n > " , job_type
work=raw_input("> ")
if "cleaner" in work or "Cleaner" in work:
mcdonalds_positions.cleaner()
elif "McCafe" in work:
mcdonalds_positions.mcCafe()
elif "Kitchen" in work or "kitchen" in work:
mcdonalds_positions.kitchen_worker()
elif "Cash" in work or "cash" in work:
mcdonalds_positions.cashier()
elif "Grill" in work or "grill" in work:
mcdonalds_positions.grill_worker()
else:
print "Just answer the damn question, since you want to play :/"
mcdonalds_work()
def mcdonalds():
print "Hi you choosed the left side"
print "Welcome to Mcdonalds"
mcdonalds_work()
|
19,509 | bd434bc775f6d5d792fd7fd9980a92d6f02352a4 | # -*- coding: Latin-1 -*-
"""
@file BinarySearch.py
@author Sascha Krieg
@author Daniel Krajzewicz
@author Michael Behrisch
@date 2008-04-01
@version $Id: BinarySearch.py 18096 2015-03-17 09:50:59Z behrisch $
binary search helper functions.
SUMO, Simulation of Urban MObility; see http://sumo.dlr.de/
Copyright (C) 2008-2015 DLR (http://www.dlr.de/) and contributors
This file is part of SUMO.
SUMO is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
"""
def isElmInList(list, elm):
maxindex = len(list) - 1
index = 0
while index <= maxindex:
middle = index + ((maxindex - index) / 2)
if list[middle] == elm: # if elm is found
return True
elif elm < list[middle]:
maxindex = middle - 1
else:
index = middle + 1
return False
def isElmInListFaster(list, elm):
"""Interpolation search only for integers :-("""
links = 0
# linke Teilfeldbegrenzung
rechts = len(list) - 1
# rechte Teilfeldbegrenzung
versch = 0
# Anzahl verschiedener Elemente
pos = 0
# aktuelle Teilungsposition
# solange der Schlüssel im Bereich liegt (andernfalls ist das gesuchte
# Element nicht vorhanden)
while elm >= list[links] and elm <= list[rechts]:
# Aktualisierung der Anzahl der verschiedenen Elemente
versch = list[rechts] - list[links]
# Berechnung der neuen interpolierten Teilungsposition
pos = links + \
int(((rechts - links + 0.0) * (elm - list[links]) / versch))
if elm > list[pos]: # rechtes Teilintervall
links = pos + 1
# daten[pos] bereits überprüft
elif elm < list[pos]: # linkes Teilintervall
rechts = pos - 1
# daten[pos] bereits überprüft
else: # Element gefunden
return True
# Position zurückgeben
return False
# Element nicht gefunden
|
19,510 | 522aee4bb9a3bb9ade6ec34602260610f57202aa | import numpy as np
a=0
b=1
c=5
print(a+b+c,"a+b+c")
print(a*b*c,"a*b*c")
|
19,511 | c6f902e4e0dfce521670ae2950796f35b6cd6fcd | # wrapper for the python "logging" module
#
# Importing this module sets the logger class of the python "logging" module to EsiLogger.
# The EsiLogger class adds the "trace" level to logging.
#
# When this module is imported, EsiLogger is instantiated with the root name 'esi' and the instance
# sets up output to the console, and to the files esi_warn.log, esi_info.log, esi_trace.log and esi_debug.log.
#
# Then, when the importing program instantiates a new logger with the line:
# log = logging_esi.get_logger(logname)
# where logname starts with "esi.", the methods log.warn, log.info, log.trace and log.debug create
# formatted output to the log files.
#
# The log levels used here correspond to the following symbolic names and values:
#
# warn logging.WARN 30
# info logging.INFO 20
# trace logging.TRACE 15
# debug logging.DEBUG 10
#
# Lower-level output files will include log output from higher levels.
from logging import getLoggerClass, addLevelName, setLoggerClass, NOTSET, DEBUG, INFO, WARN
from logging import Formatter, FileHandler, getLogger, StreamHandler
from contextlib import contextmanager
from time import sleep, strftime, localtime
from pymongo import MongoClient
import re
from lib.prune_logs import prune_logs
msg_len_max = 30
msg_src_stack = []
msg_src = ''
root_name = 'esi'
TRACE = (DEBUG + INFO) / 2
current_formatter = None
trace_indent = 0
class EsiLogger(getLoggerClass()):
db = None
db_host = None
db_client = None
db_collection = None
def __init__(self, name, level=NOTSET):
super(EsiLogger, self).__init__(name, level)
addLevelName(TRACE, 'TRACE')
def trace(self, msg, *args, **kwargs):
if self.isEnabledFor(TRACE):
self._log(TRACE, msg, args, **kwargs)
def handle(self, record):
if self.db_collection and current_formatter:
txt = current_formatter.format(record)
msg_dict = parse_msg_to_dict(txt)
if msg_dict:
self.db_collection.insert_one(msg_dict)
super(EsiLogger, self).handle(record)
@classmethod
def set_db(cls, host_name, db_name, collection_name):
cls.db_client = MongoClient(host_name)
cls.db = cls.db_client[db_name]
cls.db_collection = cls.db[collection_name]
setLoggerClass(EsiLogger)
@contextmanager
def msg_src_cm(src):
push_msg_src(src)
yield
pop_msg_src()
def update_handler_formatters(f):
global current_formatter
for handler in _log.handlers:
handler.setFormatter(f)
current_formatter = f
def push_msg_src(src):
# save the previous msg_src
msg_src_stack.append(msg_src)
set_msg_src(src)
def pop_msg_src():
if len(msg_src_stack) > 0:
set_msg_src(msg_src_stack.pop())
else:
set_msg_src('')
def set_msg_src(src='', set_linefeed=False):
global msg_len_max
global msg_src
if len(src) > msg_len_max:
# print "src = '%s', len(src) = %d, msg_len_max = %d" % (src, len(src), msg_len_max)
msg_len_max = len(src)
msg_fmt = "%%-%ds" % (msg_len_max + 1)
msg_src = src
msg_src_formatted = msg_fmt % src
format_str = '%%(asctime)s.%%(msecs)03d [%%(name)-20s] %%(levelname)-5s - [%s] %%(message)s' % msg_src_formatted
formatter = Formatter(format_str, datefmt='%m/%d/%y %H:%M:%S')
update_handler_formatters(formatter)
if set_linefeed:
# insert a linefeed to clean up the display in PyCharm
print
sleep(1)
def get_logger(name):
return getLogger(name)
re_common = re.compile(
'(?P<date>\S+)\s+'
+ '(?P<time>\S+)\s+\['
+ '(?P<src>[^\s\]]+)[\s\]]+'
+ '(?P<level>\S+)[\s\-\]\[]+'
+ '(?P<tc>\S[^\]]+\S)\s*\]\s+'
+ '((?P<type>[A-Z ^:]+):\s+)?'
+ '(?P<tail>.*)'
)
re_tc = re.compile(
'(?P<tc>\S+)\s+'
+ '(?P<status>\S+)\s*'
+ '([- ]+)?(?P<msg>.+)?'
)
re_trace = re.compile(
'(?P<func>[^(\s]+)'
+ '((?P<arglist>\((?P<args>.*)\))?\s*|\s*)'
+ '((?P<event>returned|EXCEPTION)?:?\s+|)(?P<msg>.+)?'
)
def parse_msg_to_dict(msg):
m = re_common.match(msg)
if not m:
print 'Unknown log message format:\n%s' % msg
return None
names = ['date', 'time', 'src', 'level', 'tc', 'type']
# names = ['date', 'time', 'src', 'level', 'tc', 'type', 'tail']
values = [m.group(name) for name in names]
# # print "tail = " + m.group('tail')
if m.group('type') == 'TEST CASE':
mt = re_tc.match(m.group('tail'))
if not mt:
print 'Unknown log message tail format: "%s"' % m.group('tail')
return None
for name in ['tc', 'status', 'msg']:
names.append(name)
values.append(mt.group(name))
elif m.group('type') == 'TRACE':
mt = re_trace.match(m.group('tail'))
if not mt:
print 'Unknown log message tail format: "%s"' % m.group('tail')
return None
for name in ('func', 'args', 'event', 'msg'):
names.append(name)
if name == 'event' and mt.group('arglist') is not None:
values.append('call')
else:
values.append(mt.group(name))
else:
names.append('msg')
values.append(m.group('tail'))
all_values = dict(zip(names, values))
all_values['trace_indent'] = trace_indent
# print ' ' + ', '.join(['%s: %s' % (name, all_values[name]) for name in names])
return all_values
_log = getLogger(root_name)
_log.setLevel(DEBUG)
prune_logs('log/%s_debug_*.log' % root_name, 5)
prune_logs('log/*logcat_*.log', 5)
timestamp = strftime('%m_%d_%y-%H_%M_%S', localtime())
# file logging for info, debug, trace and warn levels, each with its own output file
base_warn_fname = 'log/%s_warn.log' % root_name
base_info_fname = 'log/%s_info.log' % root_name
base_trace_fname = 'log/%s_trace.log' % root_name
base_debug_fname = 'log/%s_debug.log' % root_name
extended_debug_fname = 'log/%s_debug_%s.log' % (root_name, timestamp)
fh = FileHandler(base_warn_fname, mode='w', encoding=None, delay=False)
fh.setLevel(WARN)
_log.addHandler(fh)
fh = FileHandler(base_info_fname, mode='w', encoding=None, delay=False)
fh.setLevel(INFO)
_log.addHandler(fh)
fh = FileHandler(base_trace_fname, mode='w', encoding=None, delay=False)
fh.setLevel(TRACE)
_log.addHandler(fh)
fh = FileHandler(base_debug_fname, mode='w', encoding=None, delay=False)
fh.setLevel(DEBUG)
_log.addHandler(fh)
fh = FileHandler(extended_debug_fname, mode='w', encoding=None, delay=False)
fh.setLevel(DEBUG)
_log.addHandler(fh)
# console logging for info level
console_handler = StreamHandler()
console_handler.setLevel(INFO)
_log.addHandler(console_handler)
push_msg_src('logging_init')
|
19,512 | 0fe5e7c2bea36b9c96a60e11c225b899b63fbc5e | #!/usr/bin/env python
import sys
import datetime
import glob
import operator as op
import numpy as np
import matplotlib.pyplot as plt
from bunch import Bunch
import mygis
wrf_dir="/glade/u/home/gutmann/scratch/wrfoutput/4km/2007/"
DIM_2D_SHAPE=3
DIM_3D_SHAPE=4
def echo(fn):
def wrapped(*v, **k):
print(fn.__name__)
return fn(*v, **k)
return wrapped
def exner(th,p):
Rd=287.058
cp=1004.0
p0=100000
pii=(p/p0)**(Rd/cp)
return th * pii
class DataReader(object):
# only directly accessible public attributes
files=None
times_per_file=1
# curpos, last_rain, and last_rain_pos are accessible via @properties
_curpos=0
_pos_in_file=0
_curfile=0
_last_rain=None
_last_rain_pos=-1
_lr_pos_in_file=-1
_lr_curfile=0
_rainvar="RAINNC"
_testvar=None
# _var_names=["QVAPOR",[op.add,"QCLOUD","QICE"],"RAINNC",[op.add,"T2",300],"U","V","W"]
_var_names=["QVAPOR",[op.add,"QCLOUD","QICE"],[op.add,"QRAIN","QSNOW"],[op.add,"T2",300],"U","V"]
_short_names=dict(QVAPOR="qv",QCLOUD="qc",QICE="qc",RAINNC="rain",T="t",T2="t",U="u",V="v",W="w",QRAIN="rain",
qv="qv",qc="qc",rain="rain",qr="rain",th="t",u="u",v="v",w="w")
_collapse_functions=dict(QVAPOR=np.mean,QCLOUD=np.sum,T=np.mean,U=np.mean,V=np.mean,W=np.mean,
QICE=np.sum,QRAIN=np.sum,QSNOW=np.sum,
qv=np.mean,qc=np.sum,th=np.mean,u=np.mean,v=np.mean,w=np.mean,
qi=np.sum,qr=np.sum,qs=np.sum,p=np.mean)
_wrf_var_names=["QVAPOR",[op.add,"QCLOUD","QICE"],[op.add,"QRAIN","QSNOW"],"T2","U","V"]#[op.add,"T",290],"U","V"]
_icar_var_names=["qv",[op.add,"qc","qi"],[op.add,"qr","qs"],[exner,"th","p"],"u","v"]
x=slice(0,None) #by default take all data in the file in x,y, and z
y=slice(0,None)
# z=slice(0,None)
z=slice(0,10)
# zslices=dict(qv=slice(0,10),qc=slice(0,10),t=slice(1),)
# yslices=dict()
# yslices.setdefault(y)
llh
def __init__(self, filenames,start_pos=0,datatype="WRF"):
super(DataReader,self).__init__()
self.files=filenames
self._datamodel=datatype
if datatype=="WRF":
self._var_names=self._wrf_var_names
test_var=mygis.read_nc(self.files[0],self._var_names[0],returnNCvar=True)
self.times_per_file=test_var.data.shape[0]
test_var.ncfile.close()
self.zaxis=0
self.DIM_2D_SHAPE=3
self.DIM_3D_SHAPE=4
if datatype=="ICAR":
self._var_names=self._icar_var_names
self.times_per_file=1
self._rainvar="rain"
tmp=self.y
self.y=self.z
self.z=tmp
self.zaxis=1
self.DIM_2D_SHAPE=2
self.DIM_3D_SHAPE=3
#note this calls the setter which will set pos_in_file and cur_file
self.curpos=start_pos
def _get_collapsing_func(self,varname):
"""docstring for get_collapsing_func"""
try:
myfunc=self._collapse_functions[varname]
except:
myfunc=np.mean
return myfunc
def collapse_z(self,data,varname):
if len(data.shape)==3:
myfunc=self._get_collapsing_func(varname)
return myfunc(data,axis=self.zaxis)
else:
return data
# Get/Set the position in the timeseries, while properly updating the filenumber and position in file
@property
def curpos(self):
return self._curpos
@curpos.setter
def curpos(self,pos):
self._curpos=pos
self._pos_in_file= int(self._curpos) % int(self.times_per_file)
self._curfile = int(self._curpos) / int(self.times_per_file)
# Get/Set the position in the timeseries, while properly updating the filenumber and position in file
@property
def last_rain_pos(self):
return self._last_rain_pos
@curpos.setter
def last_rain_pos(self,pos):
self._last_rain_pos=pos
self._lr_pos_in_file= int(self._last_rain_pos) % int(self.times_per_file)
self._lr_curfile = int(self._last_rain_pos) / int(self.times_per_file)
# Get/Set the last_rain variable
@property
def last_rain(self):
if self._last_rain==None:
self.last_rain_pos=self.curpos-1
if (self._pos_in_file>0):
nc_data=mygis.read_nc(self.files[self._curfile],self._rainvar,returnNCvar=True)
self._last_rain=nc_data.data[self._last_rain_pos,self.y,self.x]
nc_data.ncfile.close()
elif (self._curfile==0):
nc_data=mygis.read_nc(self.files[self._curfile],self._rainvar,returnNCvar=True)
nx=nc_data.data.shape[1]
ny=nc_data.data.shape[2]
self._last_rain=np.zeros((nx,ny))[self.x,self.y]
nc_data.ncfile.close()
else:
nc_data=mygis.read_nc(self.files[self._curfile-1],self._rainvar,returnNCvar=True)
self._last_rain=nc_data.data[-1,self.x,self.y]
nc_data.ncfile.close()
# else: we already have a valid _last_rain, just return it this should be the case most of the time
return self._last_rain
@last_rain.setter
def last_rain(self,value):
if hasattr(value,__iter__):
self.last_rain_pos=value[0]
self._last_rain=value[1]
else:
self.last_rain_pos=value
self._last_rain=None # the getter will automagically generate last_rain
def load_data(self,varname, filename=None, curtime=None):
if type(varname)!=str:
return varname
if filename==None:
filename=self.files[self._curfile]
if curtime==None:
curtime=self._pos_in_file
data=mygis.read_nc(filename,varname,returnNCvar=True)
dimlen=len(data.data.shape)
# 2D vars e.g. RAINNC, rain
if dimlen==self.DIM_2D_SHAPE:
if dimlen==2:
outputdata=data.data[self.y,self.x]
else:
outputdata=data.data[curtime,self.y,self.x]
# 3D vars e.g. QVAPOR, qv
elif dimlen==self.DIM_3D_SHAPE:
if dimlen==3:
outputdata=self.collapse_z(data.data[self.z,self.y,self.x],varname)
else:
outputdata=self.collapse_z(data.data[curtime,self.z,self.y,self.x],varname)
else:
raise IndexError("Do not know how to process {} dimensions".format(len(data.data.shape)))
if varname==self._rainvar:
curent_rain=outputdata[:]
outputdata-=self.last_rain
self.last_rain=(self.curpos,curent_rain)
return outputdata
def get_current_date(self):
"""Assumes a hard coded filename (e.g. WRF output filenames wrfout_d01_2007-01-01_00:00:00)"""
if self._datamodel=="WRF":
datestring=self.files[self._curfile].split("_")[2]+"-"+str(self._pos_in_file)
return datetime.datetime.strptime(datestring,"%Y-%m-%d-%H")
else:
return datetime.datetime(2007,01,01,00)+datetime.timedelta(self.curpos/24.0)
def __len__(self):
return len(self.files)*self.times_per_file
def __iter__(self):
return self
def __next__(self):
self.curpos+=1
output_data=Bunch()
filename=self.files[self._curfile]
for v in self._var_names:
if type(v)==str:
curdata=self.load_data(v)
curvarname=v
elif type(v)==list:
cur_operator=v[0]
for varname in v[1:]:
if type(varname)==str:
curvarname=v[1]
break
curdata=self.load_data(v[1])
for curv in v[2:]:
next_data=self.load_data(curv)
cur_operator(curdata,next_data)
output_data[self._short_names[curvarname]]=curdata
output_data.date=self.get_current_date()
return output_data
next=__next__
clims=dict( qv=(0,0.004),
qc=(0,0.0003),
t=(260,310),
u=(-15,15),
v=(-15,15),
rain=(0,0.000005))
def make_subplot(data,ny,nx,curplot,v,extra_title):
plt.subplot(ny,nx,curplot)
plt.imshow(data)
plt.clim(clims[v])
plt.colorbar()
plt.title(v+extra_title)
def make_plots(data1,data2,date,fig=None):
plt.close("all")
if fig==None:
fig=plt.figure(figsize=(24,14));
else:
fig.clear()
ny=3
nx=4
curplot=0
varnames=["qv","qc","u","v","t","rain"]
for v in varnames:
curplot+=1
make_subplot(data1[v],ny,nx,curplot,v," "+str(date)[:14])
curplot+=1
make_subplot(data2[v],ny,nx,curplot,v," "+str(date)[:14])
return fig
def main(icar_dir="output/",output_dir="./"):
output_filename=output_dir+"vis_{}.png"
wrf_files=glob.glob(wrf_dir+"wrfout*")
wrf_files.sort()
icar_files=glob.glob(icar_dir+"swim_out*")
icar_files.sort()
wrf_data=DataReader(wrf_files,datatype="WRF")
icar_data=DataReader(icar_files,datatype="ICAR")
fig=plt.figure(figsize=(24,14));
for i in range(len(wrf_data)):
wrf=wrf_data.next()
icar=icar_data.next()
print(str(wrf.date),str(icar.date))
sys.stdout.flush()
fig=make_plots(icar,wrf,wrf.date,fig=fig)
fig.savefig(output_filename.format(str(wrf.date).replace(" ","_")))
if __name__ == '__main__':
global wrf_dir
out_dir="./"
icar_dir="output/"
if len(sys.argv)>1:
if sys.argv[1][:2]=="-h":
print("Usage: real_comparison.py [icar_output_directory] [vis_output_directory] [wrf_dir]")
sys.exit()
icar_dir=sys.argv[1]
if len(sys.argv)>2:
out_dir=sys.argv[2]
if len(sys.argv)>3:
wrf_dir=sys.argv[3]
main(icar_dir,out_dir) |
19,513 | f3a5f3dd9539f3312457245be5cf1c1b91c9fbbd | import subprocess, struct, time, select, threading, os, sys, traceback, itertools,math, collections
import pytcpdump
import re
import tldextract
import numpy as np
import math
#***********************************************************************************
# Header for csv with statistical features
#***********************************************************************************
def stat_head():
return "sni,CSPktNum,CSPktsize25,CSPktSize50,CSPktSize75,CSPktSizeMax,CSPktSizeAvg,CSPktSizeVar,CSPaysize25,CSPaySize50,CSPaySize75,CSPaySizeMax,CSPaySizeAvg,CSPaySizeVar,CSiat25,CSiat50,CSiat75,SCPktNum,SCPktsize25,SCPktSize50,SCPktSize75,SCPktSizeMax,SCPktSizeAvg,SCPktSizeVar,SCPaysize25,SCPaySize50,SCPaySize75,SCPaySizeMax,SCPaySizeAvg,SCPaySizeVar,SCiat25,SCiat50,SCiat75,PktNum,Pktsize25,PktSize50,PktSize75,PktSizeMax,PktSizeAvg,PktSizeVar,iat25,iat50,iat75\n"
#***********************************************************************************
# Header for csv with sequence features
#***********************************************************************************
def sequence_head(n):
return "sni," + ','.join([str(i) for i in range(1,n)]) + "\n"
#***********************************************************************************
# Get features for packets/payloads (25th, 50th, 75th) percentiles, max, mean, var
#***********************************************************************************
def stat_calc(x, iat=False):
if len(x)==0:
return [str(a) for a in [0,0,0,0,0,0]]
if len(x)==1:
return [str(a) for a in [x[0], x[0], x[0], x[0], x[0], 0]]
x = sorted(x)
p25,p50,p75 = get_percentiles(x)
return [str(a) for a in [p25,p50,p75,max(x),np.mean(x),np.var(x)]]
#***********************************************************************************
# Helper function to get percentiles
#***********************************************************************************
def get_percentiles(x):
return x[int(round((len(x)-1)/4.0))], x[int(round((len(x)-1)/2.0))], x[int(round((len(x)-1)*3/4.0))]
#***********************************************************************************
# Helper function to combine milliseconds/seconds timestamps
#***********************************************************************************
def combine_at(sec, usec):
l = len(sec)
return [sec[i]+usec[i]*1e-6 for i in range(l)]
#***********************************************************************************
# Get features for inter-arrival times (25th, 50th, 75th) percentiles
#***********************************************************************************
def stat_prepare_iat(t):
l = len(t)
iat = [t[i+1]-t[i] for i in range(l-1)]
if len(iat)==0:
return [str(a) for a in [0,0,0]]
if len(iat)==1:
return [str(a) for a in [iat[0], iat[0], iat[0]]]
p25,p50,p75 = get_percentiles(iat)
return [str(a) for a in [p25,p50,p75]]
#***********************************************************************************
# Get statistical features from tcp packet sequences
#***********************************************************************************
def stat_create(data,filename,first_n_packets):
with open(filename,'w') as f:
f.write(stat_head())
for id in data:
item=data[id]
sni=SNIModificationbyone(item[0])
# exclude unknown domains
if sni == 'unknown' or sni == 'unknown.':
continue
line=[sni]
# remote->local features
# 1 length
# 2-7 packets stats
# 8-14 payload stats
# 15-17 inter-arrival time stats
line+=[str(len(item[4][0]))]
line+=stat_calc(item[4][0])
line+=stat_calc(item[5][0])
arrival1=combine_at(item[2][0], item[3][0])
line+=stat_prepare_iat(arrival1)
# local->remote
# 18 length
# 19-24 packets stats
# 25-30 payload stats
# 31-33 inter-arrival time stats
line+=[str(len(item[4][1]))]
line+=stat_calc(item[4][1])
line+=stat_calc(item[5][1])
arrival2=combine_at(item[2][1], item[3][1])
line+=stat_prepare_iat(arrival2)
# both
# 34-39 packets stats
# 40-42 inter-arrival time stats
line+=[str(len(item[4][1]) + len(item[4][0]))]
line+=stat_calc(item[4][1] + item[4][0])
line+=stat_prepare_iat(sorted(arrival1 + arrival2))
line= ','.join(line)
f.write(line)
f.write('\n')
#***********************************************************************************
# Create features from tcp packet sequences
#***********************************************************************************
def sequence_create(data, filename, first_n_packets):
with open(filename,'w') as f:
f.write(sequence_head(first_n_packets))
counter = 0
skipped = 0
for id in data:
item=data[id]
sni=SNIModificationbyone(item[0])
# exclude unknown domains
counter = counter + 1
if sni == 'unknown' or sni == 'unknown.':
skipped = skipped + 1
continue
line=[sni]
# Calculate arrival times in millis for local->remote and remote->local
arrival1=combine_at(item[2][0], item[3][0])
arrival2=combine_at(item[2][1], item[3][1])
# Sort all packets by arrival times to get sequence in correct order
packets = zip(arrival1 + arrival2, list(item[4][0]) + list(item[4][1]))
packets = [str(x) for _,x in sorted(packets)]
# Zero padding for sequences that are too short
if len(packets) < first_n_packets:
packets = [str(0)]*(first_n_packets - len(packets)) + packets
line+=packets[0:first_n_packets]
# Sort all payloads by arrival times to get sequence in correct order
payloads = zip(arrival1 + arrival2, list(item[5][0]) + list(item[5][1]))
payloads = [str(x) for _,x in sorted(payloads)]
# Zero padding for sequences that are too short
if len(payloads) < first_n_packets:
payloads = [str(0)]*(first_n_packets - len(payloads)) + payloads
line+=payloads[0:first_n_packets]
# Sort all packets by arrival times to get sequence in correct order
arrivals = sorted(arrival1 + arrival2)
iat = [str(0)] + [str(arrivals[i+1]-arrivals[i]) for i in range(len(arrivals)-1)]
# Zero padding for sequences that are too short
if len(iat) < first_n_packets:
iat = [str(0)]*(first_n_packets - len(iat)) + iat
line+=iat[0:first_n_packets]
# Sort all directions by arrival times to get direction sequence in correct order (-1, 1, 0)
# remote -> local = -1
# local -> remote = 1
# padding = 0
direction = zip(arrival1 + arrival2, [-1]*len(item[5][0]) + [1]*len(item[5][1]))
direction = [str(x) for _,x in sorted(direction)]
# Zero padding for direction sequences that are too short
if len(direction) < first_n_packets:
direction = [str(0)]*(first_n_packets - len(direction)) + direction
line+=direction[0:first_n_packets]
line= ','.join(line)
f.write(line)
f.write('\n')
print("Skipped percentage: ", 1. * skipped / counter)
#***********************************************************************************
# Parts of this function borrowed from the following paper:
#
# Multi-Level identification Framework to Identify HTTPS Services
# Author by Wazen Shbair,
# University of Lorraine,
# France
# wazen.shbair@gmail.com
# January, 2017
#
# SNi modification for the sub-domain parts only
#***********************************************************************************
def SNIModificationbyone(sni):
temp = tldextract.extract(sni.encode().decode())
x = re.sub("\d+", "", temp.subdomain) # remove numbers
x = re.sub("[-,.]", "", x) #remove dashes
x = re.sub("[(?:www.)]", "", x) #remove www
if len(x) > 0:
newsni = x + "." + temp.domain + "." + temp.suffix # reconstruct the sni
else:
newsni = temp.domain + "." + temp.suffix
return newsni
#***********************************************************************************
# Inputs
# 1. pcap file (filtered for SSL)
# 2. output file for statistical features
# 3. output file for sequence features
#***********************************************************************************
if __name__ == "__main__":
pcap_file = ['../pcaps/GCDay1SSL.pcap', '../pcaps/GCDay2SSL.pcap','../pcaps/GCDay3SSL.pcap',
'../pcaps/GCDay4SSL.pcap','../pcaps/GCDay5SSL.pcap','../pcaps/GCDay6SSL.pcap',
'../pcaps/GCDay7SSL.pcap','../pcaps/GCDay8SSL.pcap','../pcaps/GCDay9SSL.pcap',
'../pcaps/GCDay10SSL.pcap','../pcaps/GCDay11SSL.pcap','../pcaps/GCDay12SSL.pcap']
output_file_stats = '../ML/training/GCstats.csv'
output_file_seqs = '../DL/training/GCseq25.csv'
for fname in pcap_file:
print ('process', fname)
pytcpdump.process_file(fname)
print (fname,"finished, kept",len(pytcpdump.cache.cache),'records')
stat_create(pytcpdump.cache.cache, output_file_stats, first_n_packets=25)
sequence_create(pytcpdump.cache.cache, output_file_seqs, first_n_packets=25)
|
19,514 | 22efb63dc03bacdb4a60fe21a8636fd871c12bf0 |
import os
import probe_functions as ProbeF
import pdc_functions as PDCF
import _pickle as cPickle
import pandas as pd
import numpy as np
from functools import reduce
import dynet_statespace as dsspace
import dynet_con as dcon
import xarray as xr
import matplotlib.pyplot as plt
from functools import reduce
class LFPSession(object):
"""
Class to access, store, and retrieve LFP session data, apply pre-processing and estimate iPDC
"""
def __init__(self,cache,session_id,result_path):
"""
Initialize the class based on AllenBrainSDK session
:param cache: cache from EcephysProjectCache.from_warehouse(manifest=manifest_path)
:param session_id: ID for allenSDK session
:param result_path: Path to save the results
"""
self.session_id = session_id
# Add the resultpath folder for this session #### be careful about this variable when saving and loading (both Paths)
if not os.path.exists(os.path.join(result_path, str(self.session_id))):
os.mkdir(os.path.join(result_path, str(self.session_id)))
self.result_path = os.path.join(result_path, str(self.session_id))
# check if the LFP session already exist, load that session preprocessing info
try:
self.load_session()
except FileNotFoundError:
# self.cond_name = cond_name
self.preprocess = [] # any preprocessing is done? list of the preprocessing params
self.RF = False # Channel info is stored?
self.CSD = False # CSD plots for layer assignment are done before?
self.ROIs = {} # empty dictionary indicating cortical ROI (VIS areas) and their relative probes
self.session = cache.get_session_data(session_id) # Get allenSDK session
# variables for running time only
self.probes = dict.fromkeys(self.session.probes.index.values) # Get the probes for this session, make a dictionary maybe
self.loaded_cond = None #Load LFP option
self.layer_selected = False # if the loaded LFP is spatially down-sampled
## Class methods read/write the LFPSession from/to file (note: only preprocessing info is important)
def save_session(self):
"""
Saves session and preprocessing information to a .obj file using cPickle
:return: file path/name
"""
filename = os.path.join(self.result_path, 'LFPSession_{}.obj'.format(self.session_id))
filehandler = open(filename, "wb")
# Do not save the loaded LFP matrices since they are too big
temp = self
temp.probes = dict.fromkeys(temp.probes.keys())
temp.loaded_cond = None
temp.layer_selected = False
cPickle.dump(temp.__dict__, filehandler)
filehandler.close()
return filename
def load_session(self): # be careful about this -> result_path
filename = os.path.join(self.result_path, 'LFPSession_{}.obj'.format(self.session_id))
file = open(filename, 'rb')
dataPickle = file.read()
file.close()
self.__dict__ = cPickle.loads(dataPickle)
def __str__(self):
return str(self.__dict__).replace(", '", ",\n '")
## Processing methods
def preprocessing(self,cond_name='drifting_gratings', down_sample_rate=5, pre_stim = 1, do_RF=False, do_CSD=False, do_probe=False):
"""
Runs the preprocessing on the session with the input parameters, if it has not been run before.
:param cond_name: condition name to be preprocessed
:param do_RF: do receptive field mapping plots? Attention: this may take a while if set True, note it is not RF mappning based on permutation
:param down_sample_rate:
:param pre_stim: prestimulus time in sec
:return:
"""
# first indicate if the
preproc_dict = {
'cond_name': cond_name,
'srate': down_sample_rate,
'prestim': pre_stim,
}
# Attention: remove the zero conditions
if not search_preproc(self.preprocess,preproc_dict):
for probe_id in self.probes.keys():
# Load lfp data
lfp =self.session.get_lfp(probe_id)
# First extract probe info and save
if do_RF:
ProbeF.extract_probeinfo(self.session, lfp, probe_id, self.result_path, do_RF)
self.RF = True
elif not self.RF or do_probe:
ProbeF.extract_probeinfo(self.session, lfp, probe_id, self.result_path, False)
# CSD plot for the probe
if (not self.CSD) and do_CSD:
ProbeF.CSD_plots(self.session, lfp, probe_id, self.result_path)
# Extract and prepare the data for a condition
if cond_name is not None:
ROI = ProbeF.prepare_condition(self.session, self.session_id, lfp, probe_id, cond_name, self.result_path, pre_stim, down_sample_rate)
self.ROIs[ROI] = probe_id
# Add the pre-process params as a dictionary to the list of preprocessed data
if cond_name is not None:
self.preprocess.append(preproc_dict)
if (not self.CSD) and do_CSD:
self.CSD = True
if not self.RF or do_probe:
self.RF = True
# Save the session after preprocessing
self.save_session()
def load_LFPprobes(self, cond_dict):
"""
loads in the preprocessed LFP signal
:param cond_dict: a dictionary with the preprocessing params
:return: Updates the self.probes values
"""
preprocess_ind = search_preproc(self.preprocess, cond_dict)
if not preprocess_ind: # checks if the condition is previously run
print("no preprocessing with these parameters is done")
return
cond = self.preprocess[preprocess_ind[0]]
for probe_id in self.probes.keys():
# first prepare the file name
filename = os.path.join(self.result_path, 'PrepData', '{}_{}{}_pres{}s.pkl'.format(
probe_id, cond['cond_name'], int(cond['srate']),cond['prestim']))
# second load each probe and add it to the ROI list
self.probes[probe_id] = ProbeF.LFPprobe.from_file(filename)
self.loaded_cond = cond['cond_name']
def layer_selection(self, Filename=None):
"""
This will be done on the loaded_cond data
:return:
"""
if Filename==None:
Filename = os.path.join(self.result_path,'PrepData','Cortical_Layers.xlsx')
try:
layer_table = pd.read_excel(Filename)
# set the layer names as index of the dataframe
layer_table.set_index('Layers', inplace=True)
except OSError:
# if the layer file did not exist then return with an error
print("Prepare the cortical layer files first as PrepData/Cortical_Layers.xlsx")
return
for probe_id in self.probes.keys():
print(probe_id)
#ProbeF.layer_reduction(self.probes[probe_id].Y, probe_id, self.result_path)
channel_id = ProbeF.layer_selection(layer_table, probe_id, self.result_path)
# select the LFP of those channels, and relabel the xarray dimensions
if len(channel_id) > 0:
self.probes[probe_id].Y = self.probes[probe_id].Y.sel(channel=channel_id.to_list())
else:
self.probes[probe_id].Y = []
self.layer_selected = True
def pdc_analysis(self, ROI_list=None, Mord=10, ff=.99, pdc_method='iPDC', stim_params=None, Freqs=np.array(range(1, 101)), preproc_params=None, redo = False):
"""
Calculates time- and frequency-resolved functional connectivity between the LFP signals based on STOK algorithm
:param ROI_list: list of ROIs to be considered for this analysis
:param Mord: Model order for ARMA model
:param ff: filter factor between 0 and 1
:param pdc_method: check the pydynet toolbox for that
:param stim_params: Parameters of stimulus to be used to pool the data
:param Freqs: a numpy array uncluding the Frequencies for connectivity analysis
:return:
"""
if ROI_list is None:
ROI_list = ['VISp']
if stim_params is None:
stim_params = []
#----------------------------------------------------------------------------
# Check if the PDC exist, just load it
# analysis params
PDCparam_dict = {
'ROI_list': ROI_list,
'Mord': Mord,
'ff': ff,
'pdc_method': pdc_method,
'stim_param': stim_params
}
filename = PDCF.search_PDC(self.session_id, self.result_path, PDCparam_dict, preproc_params)
if os.path.isfile(filename) and not redo:
# load the file and return it
file = open(filename, 'rb')
PDC_dict = cPickle.load(file)
file.close()
return PDC_dict
#----------------------------------------------------------------------------
# load the preprocessed LFPs and down sample spatially by selecting 6 layers
self.load_LFPprobes(preproc_params)
self.layer_selection()
# select the conditions and pool their trials together
Result_pool = self.pool_data(preproc_params=preproc_params, stim_params= stim_params, ROI_list = ROI_list)
Y = Result_pool['Y']
Srate = Result_pool['Srate']
# pull together and ROI-layer index
srate = np.unique(np.array(list(Srate.values())))
if len(srate) != 1:
print("Sampling rates do not match between probes, please check the preprocessing!")
return
# Put the data from all ROIs together for PDC calculations
Y_temp = np.concatenate(list(Y.values()), axis=1) # second dimension is the channels
Y_temp = np.moveaxis(Y_temp, -1, 0)
YS = list(Y_temp.shape)
Y_pool = Y_temp.reshape([YS[0] * YS[1], YS[2], YS[3]])
# remove possible zero and NaN values (trials)
nzero_trl = Y_pool[:, :, 10] != 0
nzero_trl_ind = reduce((lambda x, y: np.logical_or(x, y)), nzero_trl.transpose())
nNan_trl_ind = np.isnan(Y_pool).sum(axis=2).sum(axis=1) == 0
Y_pooled = Y_pool[nzero_trl_ind & nNan_trl_ind, :, :]
# iPDC matrix
KF = dsspace.dynet_SSM_STOK(Y_pooled, p=Mord, ff=ff)
iPDC = dcon.dynet_ar2pdc(KF, srate, Freqs, metric=pdc_method, univ=1, flow=2, PSD =1)
# iPDC to xarray
Time = Y['VISp'].time.values
ROI_ls = np.array(Result_pool['ROI_labels']).reshape(np.prod(np.array(Result_pool['ROI_labels']).shape))
iPDC_xr = xr.DataArray(iPDC, dims=['target', 'source', 'freq' , 'time'],
coords=dict(target= ROI_ls, source= ROI_ls, freq=Freqs, time=Time))
# ROIs for output
ROIs = list(Y.keys())
chnl_ids = np.array(Result_pool['channel_ids']).reshape(np.prod(np.array(Result_pool['channel_ids']).shape))
prb_ids = np.array(Result_pool['probe_ids']).reshape(np.prod(np.array(Result_pool['probe_ids']).shape))
# save and return the output
PDC_dict = {'session_id':self.session_id, 'KF': KF, 'ROIs': ROIs, 'PDC': iPDC_xr,
'probe_info': {'probe_ids': prb_ids, 'channel_ids': chnl_ids}, 'PDCparam_dict': PDCparam_dict, 'preproc_dict': preproc_params}
PDCF.save_PDC(PDC_dict, self.result_path)
# save?
return PDC_dict
def pool_data(self, preproc_params=None, stim_params= None, ROI_list = None):
# select the conditions and pool their trials together
Y = {} # to prepare the data for PDC analysis
Srate = {} # to make sure that Srates match
ROI_labels = []
channel_ids = []
probe_ids = []
# All ROIs in this session
All_ROIs = [(self.probes[x].ROI, x) for x in self.probes.keys()]
for ROI in ROI_list:
# find the ROIs and the one with Layer assignment
ch_ind = [i for i, y in enumerate([x[0] for x in All_ROIs]) if y == ROI]
if bool(ch_ind): # in case of multiple recordings from the same ROI, I only labeled the one with better data
temp = [len(self.probes[All_ROIs[x][1]].Y)>0 for x in ch_ind]
Emp_ind = np.where(np.array(temp))[0]# find empty probes -> because no layer was assigned
if len(Emp_ind)>0:
ch_ind = ch_ind[Emp_ind[0]]
#ch_ind = ch_ind[temp.index(True)]
else:
ch_ind = []
if bool(ch_ind) or (ch_ind==0): #if there is a probe
probe_id = All_ROIs[ch_ind][1]
cnd_info = self.probes[probe_id].cnd_info
Cnds_inds = []
for k in stim_params.keys():
Cnds = [cnd_info[k] == x for x in stim_params[k]]
if len(Cnds) > 1:
Cnds_temp = reduce((lambda x, y: np.logical_or(x, y)), [c.to_numpy() for c in Cnds])
Cnds_inds.append(Cnds_temp)
else:
Cnds_inds.append(Cnds)
Cnds_final = np.array(reduce((lambda x, y: np.logical_and(x, y)), Cnds_inds))
Cnds_inds_final = cnd_info['stimulus_condition_id'].to_numpy()[Cnds_final.squeeze()]
# Prepare for output
Y[ROI] = self.probes[probe_id].Y.sel(cnd_id=Cnds_inds_final)
Srate[ROI] = self.probes[probe_id].srate
ROI_labels.append(['{}_L{}'.format(ROI, i) for i in range(1, 7)])
channel_ids.append(Y[ROI].channel.values)
probe_ids.append([probe_id for l in range(1, 7)])
# Set other outputs
Time = Y['VISp'].time.values
ROIs = list(Y.keys())
return {'Y': Y, 'Srate': Srate, 'ROI_labels':ROI_labels, 'channel_ids':channel_ids, 'probe_ids':probe_ids}
def plot_LFPs(self, preproc_params=None, stim_params= None, ROI_list = None, TimeWin=None):
self.load_LFPprobes(preproc_params)
self.layer_selection()
Result_pool = self.pool_data(preproc_params=preproc_params, stim_params=stim_params, ROI_list=ROI_list)
figure_path = os.path.join(self.result_path, 'Average_LFP_{}_downs{}.png'.format(
preproc_params['cond_name'], int(preproc_params['srate'])))
colors = ROIColors('layers')
LFP_plot(Result_pool['Y'],TimeWin, colors, figure_path)
# Return averaged Y
return dict((x,y.mean(axis=(0,3))) for x,y in Result_pool['Y'].items())
def search_preproc(list_pre, dic_pre):
"""
Search if the preprocessing with the current parameters has been run before
:param list_pre: self.preprocess
:param dic_pre: dictionary with new params
:return: The index of pre-processes with current params
"""
result = []
for x in list_pre:
shared_items = [x[k] == dic_pre[k] for k in x if k in dic_pre]
result.append(sum(shared_items)==len(dic_pre))
return [i for i, x in enumerate(result) if x]
# maybe also searches if the files exist?
class ROIColors(object):
"""
A Class that defines uniform colorings for ROIs and layers for visualization
"""
def __init__(self,color_type='uni'):
"""
Initializes the colors class
:param color_type: 'uni'/'layers' indicate if it should return only one color per ROI ('Uni')
or 6 colors per ROI, for 6 layers('Layers')
"""
roi_colors_rgb = {'VISp': [.43, .25, .63], 'VISl': [0.03, 0.29, 0.48], 'VISrl': [0.26, 0.68, 0.76],
'VISal': [0.65, 0.46, 0.11], 'VISpm': [1, .7, .3], 'VISam': [0.8, 0.11, 0.11]}
self.ROI_names = {'VISp': 'V1', 'VISl': 'LM', 'VISrl': 'RL', 'VISal': 'AL', 'VISpm': 'PM', 'VISam': 'AM'}
if color_type == 'uni':
self.roi_colors_rgb = roi_colors_rgb
self.roi_colors_hex = dict((x, '#%02x%02x%02x' % (int(v[0] * 255), int(v[1] * 255), int(v[2] * 255))) for x, v in
roi_colors_rgb.items())
elif color_type =='layers':
offset = np.arange(-.25,.26,.1)
roi_colors_rgb_layers = dict(
(x, np.array([np.minimum(np.maximum(v + x, 0), 1) for x in offset])) for x, v in roi_colors_rgb.items())
self.roi_colors_rgb = roi_colors_rgb_layers
self.roi_colors_hex = dict((x,['#%02x%02x%02x' % (int(v[0]*255), int(v[1]*255), int(v[2]*255)) for v in k])
for x,k in roi_colors_rgb_layers.items())
else:
print ('Wrong color type')
return
self.color_type = color_type
def LFP_plot(Y, TimeWin, colors, figure_path):
"""
A general function to plot LFP averages
:param Y: LFP data with dimensions :trials x layers x time x conditions
:param TimeWin:
:param colors:
:param figure_path:
:return:
"""
nroi = len(Y.keys())
fig, axs = plt.subplots(nrows=nroi, ncols=1, figsize=(6, 2 * nroi), sharex=True)
# ordered ROIs: for uniformity puporse
ordered_rois = ['VISp','VISl','VISrl','VISal','VISpm','VISam']
ROIs = list(filter(lambda x: (x in list(Y.keys())), ordered_rois))
# for each ROI plot mean and SEM
for i in range(0, nroi):
roi = ROIs[i]
T = Y[roi].time.values
T_ind = np.where((T >= TimeWin[0]) & (T <= TimeWin[1]))[0]
y = Y[roi].isel(time=T_ind)
y = np.moveaxis(y.__array__(), -1, 0)
dims = y.shape
y2 = y.reshape(dims[0] * dims[1], dims[2], dims[3])
MEAN = np.nanmean(y2, axis=0).transpose()
SEM = (np.nanstd(y2, axis=0) / (y2.shape[0] ** .5)).transpose()
offset = abs(MEAN).max(axis=(0, 1))
yticks = np.zeros([MEAN.shape[1],1])
for l in range(0, MEAN.shape[1]):
MEAN_plot = MEAN[:, l] - (offset * l)
axs[i].plot(T[T_ind], MEAN_plot,
linewidth=1, label='L{}'.format(l), color=colors.roi_colors_hex[roi][l])
axs[i].fill_between(T[T_ind], MEAN[:, l] - (offset * l) + SEM[:, l], MEAN[:, l] - (offset * l) - SEM[:, l],
alpha=.5, color=colors.roi_colors_hex[roi][l])
yticks[l]= MEAN_plot[T[T_ind]<0].mean()
axs[i].set_title(colors.ROI_names[roi])
axs[i].set_yticks(yticks)
axs[i].set_yticklabels(['L{}'.format(i+1) for i in range(0, MEAN.shape[1])])
axs[i].axvline(x=0, linewidth=1, linestyle='--', color='k')
axs[i].grid(True)
if i == nroi - 1:
axs[i].set_xlabel('Time(S)',fontweight='bold')
axs[i].set_xlim(TimeWin[0], TimeWin[1])
#axs[i].legend(loc='right')
plt.savefig(figure_path, bbox_inches='tight', dpi=300)
plt.close(fig)
def aggregate_LFP_ROI(Y_list):
"""
:param Y_list:
:return:
"""
ROIs_All = reduce(lambda x, y: list(set().union(x, y)), [x.keys() for x in Y_list.values()])
Y_ROI_all = {'session_ids': Y_list.keys(),
'ROIs': ROIs_All,
'Y': {}}
# first indicate the ROIs in the list
for roi in ROIs_All:
s_ids = np.where(np.array([list(x.keys()).count(roi) > 0 for x in Y_list.values()]))[0]
# -for animals with that ROI: make a list and concatenate them-
LFP_temp = [Y_list[list(Y_list.keys())[x]][roi] for x in s_ids]
# -time indexes with non NaN values and round them 3 digit to be uniform-
NNan_ind = [np.logical_not(np.isnan(x.time.values)) for x in LFP_temp]
NNan_ind = reduce(lambda x, y: np.logical_and(x[:min(len(x), len(y))], y[:min(len(x), len(y))]), NNan_ind)
LFP_temp2 = []
for lfp in LFP_temp: # loop over animals
lfp.time.values = np.round(lfp.time.values, 3)
lfp.channel.values = np.arange(0,len(lfp.channel.values))
LFP_temp2.append(lfp.isel(time=np.where(NNan_ind)[0]))
# -calculate average over animals-??
#Y_ROI_all['Y'][roi] = np.array(LFP_temp2).mean(axis=0)
Y_temp = np.expand_dims(np.array(LFP_temp2),axis=3)
Y_ROI_all['Y'][roi] = xr.DataArray(Y_temp, dims=['trial', 'channel', 'time', 'cnd_id'],
coords=dict(trial=range(0, Y_temp.shape[0]), channel=lfp.channel.values, time=lfp.time.values[:Y_temp.shape[2]], cnd_id=[1]))
return Y_ROI_all |
19,515 | 082e3fa5305ac3ed849d96124e194cf513d7f89c | from __future__ import absolute_import, unicode_literals
import datetime
import os
from decimal import Decimal
from django import forms
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.validators import ValidationError
from django.db import connection
from django.forms.models import model_to_dict
from django.utils.unittest import skipUnless
from django.test import TestCase
from .models import (Article, ArticleStatus, BetterWriter, BigInt, Book,
Category, CommaSeparatedInteger, CustomFieldForExclusionModel, DerivedBook,
DerivedPost, ExplicitPK, FlexibleDatePost, ImprovedArticle,
ImprovedArticleWithParentLink, Inventory, PhoneNumber, Post, Price,
Product, TextFile, Writer, WriterProfile, test_images)
if test_images:
from .models import ImageFile, OptionalImageFile
class ImageFileForm(forms.ModelForm):
class Meta:
model = ImageFile
class OptionalImageFileForm(forms.ModelForm):
class Meta:
model = OptionalImageFile
class ProductForm(forms.ModelForm):
class Meta:
model = Product
class PriceForm(forms.ModelForm):
class Meta:
model = Price
class BookForm(forms.ModelForm):
class Meta:
model = Book
class DerivedBookForm(forms.ModelForm):
class Meta:
model = DerivedBook
class ExplicitPKForm(forms.ModelForm):
class Meta:
model = ExplicitPK
fields = ('key', 'desc',)
class PostForm(forms.ModelForm):
class Meta:
model = Post
class DerivedPostForm(forms.ModelForm):
class Meta:
model = DerivedPost
class CustomWriterForm(forms.ModelForm):
name = forms.CharField(required=False)
class Meta:
model = Writer
class FlexDatePostForm(forms.ModelForm):
class Meta:
model = FlexibleDatePost
class BaseCategoryForm(forms.ModelForm):
class Meta:
model = Category
class ArticleForm(forms.ModelForm):
class Meta:
model = Article
class ArticleForm(forms.ModelForm):
class Meta:
model = Article
class PartialArticleForm(forms.ModelForm):
class Meta:
model = Article
fields = ('headline','pub_date')
class RoykoForm(forms.ModelForm):
class Meta:
model = Writer
class TestArticleForm(forms.ModelForm):
class Meta:
model = Article
class PartialArticleFormWithSlug(forms.ModelForm):
class Meta:
model = Article
fields=('headline', 'slug', 'pub_date')
class ArticleStatusForm(forms.ModelForm):
class Meta:
model = ArticleStatus
class InventoryForm(forms.ModelForm):
class Meta:
model = Inventory
class SelectInventoryForm(forms.Form):
items = forms.ModelMultipleChoiceField(Inventory.objects.all(), to_field_name='barcode')
class CustomFieldForExclusionForm(forms.ModelForm):
class Meta:
model = CustomFieldForExclusionModel
fields = ['name', 'markup']
class ShortCategory(forms.ModelForm):
name = forms.CharField(max_length=5)
slug = forms.CharField(max_length=5)
url = forms.CharField(max_length=3)
class ImprovedArticleForm(forms.ModelForm):
class Meta:
model = ImprovedArticle
class ImprovedArticleWithParentLinkForm(forms.ModelForm):
class Meta:
model = ImprovedArticleWithParentLink
class BetterWriterForm(forms.ModelForm):
class Meta:
model = BetterWriter
class WriterProfileForm(forms.ModelForm):
class Meta:
model = WriterProfile
class PhoneNumberForm(forms.ModelForm):
class Meta:
model = PhoneNumber
class TextFileForm(forms.ModelForm):
class Meta:
model = TextFile
class BigIntForm(forms.ModelForm):
class Meta:
model = BigInt
class ModelFormWithMedia(forms.ModelForm):
class Media:
js = ('/some/form/javascript',)
css = {
'all': ('/some/form/css',)
}
class Meta:
model = PhoneNumber
class CommaSeparatedIntegerForm(forms.ModelForm):
class Meta:
model = CommaSeparatedInteger
class PriceFormWithoutQuantity(forms.ModelForm):
class Meta:
model = Price
exclude = ('quantity',)
class ModelFormBaseTest(TestCase):
def test_base_form(self):
self.assertEqual(BaseCategoryForm.base_fields.keys(),
['name', 'slug', 'url'])
def test_extra_fields(self):
class ExtraFields(BaseCategoryForm):
some_extra_field = forms.BooleanField()
self.assertEqual(ExtraFields.base_fields.keys(),
['name', 'slug', 'url', 'some_extra_field'])
def test_replace_field(self):
class ReplaceField(forms.ModelForm):
url = forms.BooleanField()
class Meta:
model = Category
self.assertTrue(isinstance(ReplaceField.base_fields['url'],
forms.fields.BooleanField))
def test_override_field(self):
class WriterForm(forms.ModelForm):
book = forms.CharField(required=False)
class Meta:
model = Writer
wf = WriterForm({'name': 'Richard Lockridge'})
self.assertTrue(wf.is_valid())
def test_limit_fields(self):
class LimitFields(forms.ModelForm):
class Meta:
model = Category
fields = ['url']
self.assertEqual(LimitFields.base_fields.keys(),
['url'])
def test_exclude_fields(self):
class ExcludeFields(forms.ModelForm):
class Meta:
model = Category
exclude = ['url']
self.assertEqual(ExcludeFields.base_fields.keys(),
['name', 'slug'])
def test_confused_form(self):
class ConfusedForm(forms.ModelForm):
""" Using 'fields' *and* 'exclude'. Not sure why you'd want to do
this, but uh, "be liberal in what you accept" and all.
"""
class Meta:
model = Category
fields = ['name', 'url']
exclude = ['url']
self.assertEqual(ConfusedForm.base_fields.keys(),
['name'])
def test_mixmodel_form(self):
class MixModelForm(BaseCategoryForm):
""" Don't allow more than one 'model' definition in the
inheritance hierarchy. Technically, it would generate a valid
form, but the fact that the resulting save method won't deal with
multiple objects is likely to trip up people not familiar with the
mechanics.
"""
class Meta:
model = Article
# MixModelForm is now an Article-related thing, because MixModelForm.Meta
# overrides BaseCategoryForm.Meta.
self.assertEqual(
MixModelForm.base_fields.keys(),
['headline', 'slug', 'pub_date', 'writer', 'article', 'categories', 'status']
)
def test_article_form(self):
self.assertEqual(
ArticleForm.base_fields.keys(),
['headline', 'slug', 'pub_date', 'writer', 'article', 'categories', 'status']
)
def test_bad_form(self):
#First class with a Meta class wins...
class BadForm(ArticleForm, BaseCategoryForm):
pass
self.assertEqual(
BadForm.base_fields.keys(),
['headline', 'slug', 'pub_date', 'writer', 'article', 'categories', 'status']
)
def test_subcategory_form(self):
class SubCategoryForm(BaseCategoryForm):
""" Subclassing without specifying a Meta on the class will use
the parent's Meta (or the first parent in the MRO if there are
multiple parent classes).
"""
pass
self.assertEqual(SubCategoryForm.base_fields.keys(),
['name', 'slug', 'url'])
def test_subclassmeta_form(self):
class SomeCategoryForm(forms.ModelForm):
checkbox = forms.BooleanField()
class Meta:
model = Category
class SubclassMeta(SomeCategoryForm):
""" We can also subclass the Meta inner class to change the fields
list.
"""
class Meta(SomeCategoryForm.Meta):
exclude = ['url']
self.assertHTMLEqual(
str(SubclassMeta()),
"""<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="20" /></td></tr>
<tr><th><label for="id_slug">Slug:</label></th><td><input id="id_slug" type="text" name="slug" maxlength="20" /></td></tr>
<tr><th><label for="id_checkbox">Checkbox:</label></th><td><input type="checkbox" name="checkbox" id="id_checkbox" /></td></tr>"""
)
def test_orderfields_form(self):
class OrderFields(forms.ModelForm):
class Meta:
model = Category
fields = ['url', 'name']
self.assertEqual(OrderFields.base_fields.keys(),
['url', 'name'])
self.assertHTMLEqual(
str(OrderFields()),
"""<tr><th><label for="id_url">The URL:</label></th><td><input id="id_url" type="text" name="url" maxlength="40" /></td></tr>
<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="20" /></td></tr>"""
)
def test_orderfields2_form(self):
class OrderFields2(forms.ModelForm):
class Meta:
model = Category
fields = ['slug', 'url', 'name']
exclude = ['url']
self.assertEqual(OrderFields2.base_fields.keys(),
['slug', 'name'])
class TestWidgetForm(forms.ModelForm):
class Meta:
model = Category
fields = ['name', 'url', 'slug']
widgets = {
'name': forms.Textarea,
'url': forms.TextInput(attrs={'class': 'url'})
}
class TestWidgets(TestCase):
def test_base_widgets(self):
frm = TestWidgetForm()
self.assertHTMLEqual(
str(frm['name']),
'<textarea id="id_name" rows="10" cols="40" name="name"></textarea>'
)
self.assertHTMLEqual(
str(frm['url']),
'<input id="id_url" type="text" class="url" name="url" maxlength="40" />'
)
self.assertHTMLEqual(
str(frm['slug']),
'<input id="id_slug" type="text" name="slug" maxlength="20" />'
)
class IncompleteCategoryFormWithFields(forms.ModelForm):
"""
A form that replaces the model's url field with a custom one. This should
prevent the model field's validation from being called.
"""
url = forms.CharField(required=False)
class Meta:
fields = ('name', 'slug')
model = Category
class IncompleteCategoryFormWithExclude(forms.ModelForm):
"""
A form that replaces the model's url field with a custom one. This should
prevent the model field's validation from being called.
"""
url = forms.CharField(required=False)
class Meta:
exclude = ['url']
model = Category
class ValidationTest(TestCase):
def test_validates_with_replaced_field_not_specified(self):
form = IncompleteCategoryFormWithFields(data={'name': 'some name', 'slug': 'some-slug'})
assert form.is_valid()
def test_validates_with_replaced_field_excluded(self):
form = IncompleteCategoryFormWithExclude(data={'name': 'some name', 'slug': 'some-slug'})
assert form.is_valid()
def test_notrequired_overrides_notblank(self):
form = CustomWriterForm({})
assert form.is_valid()
# unique/unique_together validation
class UniqueTest(TestCase):
def setUp(self):
self.writer = Writer.objects.create(name='Mike Royko')
def test_simple_unique(self):
form = ProductForm({'slug': 'teddy-bear-blue'})
self.assertTrue(form.is_valid())
obj = form.save()
form = ProductForm({'slug': 'teddy-bear-blue'})
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['slug'], ['Product with this Slug already exists.'])
form = ProductForm({'slug': 'teddy-bear-blue'}, instance=obj)
self.assertTrue(form.is_valid())
def test_unique_together(self):
"""ModelForm test of unique_together constraint"""
form = PriceForm({'price': '6.00', 'quantity': '1'})
self.assertTrue(form.is_valid())
form.save()
form = PriceForm({'price': '6.00', 'quantity': '1'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['__all__'], ['Price with this Price and Quantity already exists.'])
def test_unique_null(self):
title = 'I May Be Wrong But I Doubt It'
form = BookForm({'title': title, 'author': self.writer.pk})
self.assertTrue(form.is_valid())
form.save()
form = BookForm({'title': title, 'author': self.writer.pk})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['__all__'], ['Book with this Title and Author already exists.'])
form = BookForm({'title': title})
self.assertTrue(form.is_valid())
form.save()
form = BookForm({'title': title})
self.assertTrue(form.is_valid())
def test_inherited_unique(self):
title = 'Boss'
Book.objects.create(title=title, author=self.writer, special_id=1)
form = DerivedBookForm({'title': 'Other', 'author': self.writer.pk, 'special_id': '1', 'isbn': '12345'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['special_id'], ['Book with this Special id already exists.'])
def test_inherited_unique_together(self):
title = 'Boss'
form = BookForm({'title': title, 'author': self.writer.pk})
self.assertTrue(form.is_valid())
form.save()
form = DerivedBookForm({'title': title, 'author': self.writer.pk, 'isbn': '12345'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['__all__'], ['Book with this Title and Author already exists.'])
def test_abstract_inherited_unique(self):
title = 'Boss'
isbn = '12345'
dbook = DerivedBook.objects.create(title=title, author=self.writer, isbn=isbn)
form = DerivedBookForm({'title': 'Other', 'author': self.writer.pk, 'isbn': isbn})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['isbn'], ['Derived book with this Isbn already exists.'])
def test_abstract_inherited_unique_together(self):
title = 'Boss'
isbn = '12345'
dbook = DerivedBook.objects.create(title=title, author=self.writer, isbn=isbn)
form = DerivedBookForm({
'title': 'Other',
'author': self.writer.pk,
'isbn': '9876',
'suffix1': '0',
'suffix2': '0'
})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['__all__'],
['Derived book with this Suffix1 and Suffix2 already exists.'])
def test_explicitpk_unspecified(self):
"""Test for primary_key being in the form and failing validation."""
form = ExplicitPKForm({'key': '', 'desc': '' })
self.assertFalse(form.is_valid())
def test_explicitpk_unique(self):
"""Ensure keys and blank character strings are tested for uniqueness."""
form = ExplicitPKForm({'key': 'key1', 'desc': ''})
self.assertTrue(form.is_valid())
form.save()
form = ExplicitPKForm({'key': 'key1', 'desc': ''})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 3)
self.assertEqual(form.errors['__all__'], ['Explicit pk with this Key and Desc already exists.'])
self.assertEqual(form.errors['desc'], ['Explicit pk with this Desc already exists.'])
self.assertEqual(form.errors['key'], ['Explicit pk with this Key already exists.'])
def test_unique_for_date(self):
p = Post.objects.create(title="Django 1.0 is released",
slug="Django 1.0", subtitle="Finally", posted=datetime.date(2008, 9, 3))
form = PostForm({'title': "Django 1.0 is released", 'posted': '2008-09-03'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['title'], ['Title must be unique for Posted date.'])
form = PostForm({'title': "Work on Django 1.1 begins", 'posted': '2008-09-03'})
self.assertTrue(form.is_valid())
form = PostForm({'title': "Django 1.0 is released", 'posted': '2008-09-04'})
self.assertTrue(form.is_valid())
form = PostForm({'slug': "Django 1.0", 'posted': '2008-01-01'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['slug'], ['Slug must be unique for Posted year.'])
form = PostForm({'subtitle': "Finally", 'posted': '2008-09-30'})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['subtitle'], ['Subtitle must be unique for Posted month.'])
form = PostForm({'subtitle': "Finally", "title": "Django 1.0 is released",
"slug": "Django 1.0", 'posted': '2008-09-03'}, instance=p)
self.assertTrue(form.is_valid())
form = PostForm({'title': "Django 1.0 is released"})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['posted'], ['This field is required.'])
def test_inherited_unique_for_date(self):
p = Post.objects.create(title="Django 1.0 is released",
slug="Django 1.0", subtitle="Finally", posted=datetime.date(2008, 9, 3))
form = DerivedPostForm({'title': "Django 1.0 is released", 'posted': '2008-09-03'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['title'], ['Title must be unique for Posted date.'])
form = DerivedPostForm({'title': "Work on Django 1.1 begins", 'posted': '2008-09-03'})
self.assertTrue(form.is_valid())
form = DerivedPostForm({'title': "Django 1.0 is released", 'posted': '2008-09-04'})
self.assertTrue(form.is_valid())
form = DerivedPostForm({'slug': "Django 1.0", 'posted': '2008-01-01'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['slug'], ['Slug must be unique for Posted year.'])
form = DerivedPostForm({'subtitle': "Finally", 'posted': '2008-09-30'})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['subtitle'], ['Subtitle must be unique for Posted month.'])
form = DerivedPostForm({'subtitle': "Finally", "title": "Django 1.0 is released",
"slug": "Django 1.0", 'posted': '2008-09-03'}, instance=p)
self.assertTrue(form.is_valid())
def test_unique_for_date_with_nullable_date(self):
p = FlexibleDatePost.objects.create(title="Django 1.0 is released",
slug="Django 1.0", subtitle="Finally", posted=datetime.date(2008, 9, 3))
form = FlexDatePostForm({'title': "Django 1.0 is released"})
self.assertTrue(form.is_valid())
form = FlexDatePostForm({'slug': "Django 1.0"})
self.assertTrue(form.is_valid())
form = FlexDatePostForm({'subtitle': "Finally"})
self.assertTrue(form.is_valid())
form = FlexDatePostForm({'subtitle': "Finally", "title": "Django 1.0 is released",
"slug": "Django 1.0"}, instance=p)
self.assertTrue(form.is_valid())
class OldFormForXTests(TestCase):
def test_base_form(self):
self.assertEqual(Category.objects.count(), 0)
f = BaseCategoryForm()
self.assertHTMLEqual(
str(f),
"""<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="20" /></td></tr>
<tr><th><label for="id_slug">Slug:</label></th><td><input id="id_slug" type="text" name="slug" maxlength="20" /></td></tr>
<tr><th><label for="id_url">The URL:</label></th><td><input id="id_url" type="text" name="url" maxlength="40" /></td></tr>"""
)
self.assertHTMLEqual(
str(f.as_ul()),
"""<li><label for="id_name">Name:</label> <input id="id_name" type="text" name="name" maxlength="20" /></li>
<li><label for="id_slug">Slug:</label> <input id="id_slug" type="text" name="slug" maxlength="20" /></li>
<li><label for="id_url">The URL:</label> <input id="id_url" type="text" name="url" maxlength="40" /></li>"""
)
self.assertHTMLEqual(
str(f["name"]),
"""<input id="id_name" type="text" name="name" maxlength="20" />""")
def test_auto_id(self):
f = BaseCategoryForm(auto_id=False)
self.assertHTMLEqual(
str(f.as_ul()),
"""<li>Name: <input type="text" name="name" maxlength="20" /></li>
<li>Slug: <input type="text" name="slug" maxlength="20" /></li>
<li>The URL: <input type="text" name="url" maxlength="40" /></li>"""
)
def test_with_data(self):
self.assertEqual(Category.objects.count(), 0)
f = BaseCategoryForm({'name': 'Entertainment',
'slug': 'entertainment',
'url': 'entertainment'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['name'], 'Entertainment')
self.assertEqual(f.cleaned_data['slug'], 'entertainment')
self.assertEqual(f.cleaned_data['url'], 'entertainment')
c1 = f.save()
# Testing wether the same object is returned from the
# ORM... not the fastest way...
self.assertEqual(c1, Category.objects.all()[0])
self.assertEqual(c1.name, "Entertainment")
self.assertEqual(Category.objects.count(), 1)
f = BaseCategoryForm({'name': "It's a test",
'slug': 'its-test',
'url': 'test'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['name'], "It's a test")
self.assertEqual(f.cleaned_data['slug'], 'its-test')
self.assertEqual(f.cleaned_data['url'], 'test')
c2 = f.save()
# Testing wether the same object is returned from the
# ORM... not the fastest way...
self.assertEqual(c2, Category.objects.get(pk=c2.pk))
self.assertEqual(c2.name, "It's a test")
self.assertEqual(Category.objects.count(), 2)
# If you call save() with commit=False, then it will return an object that
# hasn't yet been saved to the database. In this case, it's up to you to call
# save() on the resulting model instance.
f = BaseCategoryForm({'name': 'Third test', 'slug': 'third-test', 'url': 'third'})
self.assertEqual(f.is_valid(), True)
self.assertEqual(f.cleaned_data['url'], 'third')
self.assertEqual(f.cleaned_data['name'], 'Third test')
self.assertEqual(f.cleaned_data['slug'], 'third-test')
c3 = f.save(commit=False)
self.assertEqual(c3.name, "Third test")
self.assertEqual(Category.objects.count(), 2)
c3.save()
self.assertEqual(Category.objects.count(), 3)
# If you call save() with invalid data, you'll get a ValueError.
f = BaseCategoryForm({'name': '', 'slug': 'not a slug!', 'url': 'foo'})
self.assertEqual(f.errors['name'], ['This field is required.'])
self.assertEqual(f.errors['slug'], ["Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens."])
with self.assertRaises(AttributeError):
f.cleaned_data
with self.assertRaises(ValueError):
f.save()
f = BaseCategoryForm({'name': '', 'slug': '', 'url': 'foo'})
with self.assertRaises(ValueError):
f.save()
# Create a couple of Writers.
w_royko = Writer(name='Mike Royko')
w_royko.save()
w_woodward = Writer(name='Bob Woodward')
w_woodward.save()
# ManyToManyFields are represented by a MultipleChoiceField, ForeignKeys and any
# fields with the 'choices' attribute are represented by a ChoiceField.
f = ArticleForm(auto_id=False)
self.assertHTMLEqual(unicode(f), '''<tr><th>Headline:</th><td><input type="text" name="headline" maxlength="50" /></td></tr>
<tr><th>Slug:</th><td><input type="text" name="slug" maxlength="50" /></td></tr>
<tr><th>Pub date:</th><td><input type="text" name="pub_date" /></td></tr>
<tr><th>Writer:</th><td><select name="writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Mike Royko</option>
</select></td></tr>
<tr><th>Article:</th><td><textarea rows="10" cols="40" name="article"></textarea></td></tr>
<tr><th>Categories:</th><td><select multiple="multiple" name="categories">
<option value="%s">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third test</option>
</select><br /><span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></td></tr>
<tr><th>Status:</th><td><select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></td></tr>''' % (w_woodward.pk, w_royko.pk, c1.pk, c2.pk, c3.pk))
# You can restrict a form to a subset of the complete list of fields
# by providing a 'fields' argument. If you try to save a
# model created with such a form, you need to ensure that the fields
# that are _not_ on the form have default values, or are allowed to have
# a value of None. If a field isn't specified on a form, the object created
# from the form can't provide a value for that field!
f = PartialArticleForm(auto_id=False)
self.assertHTMLEqual(unicode(f), '''<tr><th>Headline:</th><td><input type="text" name="headline" maxlength="50" /></td></tr>
<tr><th>Pub date:</th><td><input type="text" name="pub_date" /></td></tr>''')
# When the ModelForm is passed an instance, that instance's current values are
# inserted as 'initial' data in each Field.
w = Writer.objects.get(name='Mike Royko')
f = RoykoForm(auto_id=False, instance=w)
self.assertHTMLEqual(unicode(f), '''<tr><th>Name:</th><td><input type="text" name="name" value="Mike Royko" maxlength="50" /><br /><span class="helptext">Use both first and last names.</span></td></tr>''')
art = Article(
headline='Test article',
slug='test-article',
pub_date=datetime.date(1988, 1, 4),
writer=w,
article='Hello.'
)
art.save()
art_id_1 = art.id
self.assertEqual(art_id_1 is not None, True)
f = TestArticleForm(auto_id=False, instance=art)
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="Test article" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" value="test-article" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" value="1988-01-04" /></li>
<li>Writer: <select name="writer">
<option value="">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s" selected="selected">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article">Hello.</textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third test</option>
</select> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (w_woodward.pk, w_royko.pk, c1.pk, c2.pk, c3.pk))
f = TestArticleForm({
'headline': 'Test headline',
'slug': 'test-headline',
'pub_date': '1984-02-06',
'writer': unicode(w_royko.pk),
'article': 'Hello.'
}, instance=art)
self.assertEqual(f.errors, {})
self.assertEqual(f.is_valid(), True)
test_art = f.save()
self.assertEqual(test_art.id == art_id_1, True)
test_art = Article.objects.get(id=art_id_1)
self.assertEqual(test_art.headline, 'Test headline')
# You can create a form over a subset of the available fields
# by specifying a 'fields' argument to form_for_instance.
f = PartialArticleFormWithSlug({
'headline': 'New headline',
'slug': 'new-headline',
'pub_date': '1988-01-04'
}, auto_id=False, instance=art)
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="New headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" value="new-headline" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" value="1988-01-04" /></li>''')
self.assertEqual(f.is_valid(), True)
new_art = f.save()
self.assertEqual(new_art.id == art_id_1, True)
new_art = Article.objects.get(id=art_id_1)
self.assertEqual(new_art.headline, 'New headline')
# Add some categories and test the many-to-many form output.
self.assertEqual(map(lambda o: o.name, new_art.categories.all()), [])
new_art.categories.add(Category.objects.get(name='Entertainment'))
self.assertEqual(map(lambda o: o.name, new_art.categories.all()), ["Entertainment"])
f = TestArticleForm(auto_id=False, instance=new_art)
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="New headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" value="new-headline" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" value="1988-01-04" /></li>
<li>Writer: <select name="writer">
<option value="">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s" selected="selected">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article">Hello.</textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s" selected="selected">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third test</option>
</select> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (w_woodward.pk, w_royko.pk, c1.pk, c2.pk, c3.pk))
# Initial values can be provided for model forms
f = TestArticleForm(
auto_id=False,
initial={
'headline': 'Your headline here',
'categories': [str(c1.id), str(c2.id)]
})
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="Your headline here" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" /></li>
<li>Writer: <select name="writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article"></textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s" selected="selected">Entertainment</option>
<option value="%s" selected="selected">It's a test</option>
<option value="%s">Third test</option>
</select> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (w_woodward.pk, w_royko.pk, c1.pk, c2.pk, c3.pk))
f = TestArticleForm({
'headline': 'New headline',
'slug': 'new-headline',
'pub_date': '1988-01-04',
'writer': unicode(w_royko.pk),
'article': 'Hello.',
'categories': [unicode(c1.id), unicode(c2.id)]
}, instance=new_art)
new_art = f.save()
self.assertEqual(new_art.id == art_id_1, True)
new_art = Article.objects.get(id=art_id_1)
self.assertEqual(map(lambda o: o.name, new_art.categories.order_by('name')),
["Entertainment", "It's a test"])
# Now, submit form data with no categories. This deletes the existing categories.
f = TestArticleForm({'headline': 'New headline', 'slug': 'new-headline', 'pub_date': '1988-01-04',
'writer': unicode(w_royko.pk), 'article': 'Hello.'}, instance=new_art)
new_art = f.save()
self.assertEqual(new_art.id == art_id_1, True)
new_art = Article.objects.get(id=art_id_1)
self.assertEqual(map(lambda o: o.name, new_art.categories.all()), [])
# Create a new article, with categories, via the form.
f = ArticleForm({'headline': 'The walrus was Paul', 'slug': 'walrus-was-paul', 'pub_date': '1967-11-01',
'writer': unicode(w_royko.pk), 'article': 'Test.', 'categories': [unicode(c1.id), unicode(c2.id)]})
new_art = f.save()
art_id_2 = new_art.id
self.assertEqual(art_id_2 not in (None, art_id_1), True)
new_art = Article.objects.get(id=art_id_2)
self.assertEqual(map(lambda o: o.name, new_art.categories.order_by('name')), ["Entertainment", "It's a test"])
# Create a new article, with no categories, via the form.
f = ArticleForm({'headline': 'The walrus was Paul', 'slug': 'walrus-was-paul', 'pub_date': '1967-11-01',
'writer': unicode(w_royko.pk), 'article': 'Test.'})
new_art = f.save()
art_id_3 = new_art.id
self.assertEqual(art_id_3 not in (None, art_id_1, art_id_2), True)
new_art = Article.objects.get(id=art_id_3)
self.assertEqual(map(lambda o: o.name, new_art.categories.all()), [])
# Create a new article, with categories, via the form, but use commit=False.
# The m2m data won't be saved until save_m2m() is invoked on the form.
f = ArticleForm({'headline': 'The walrus was Paul', 'slug': 'walrus-was-paul', 'pub_date': '1967-11-01',
'writer': unicode(w_royko.pk), 'article': 'Test.', 'categories': [unicode(c1.id), unicode(c2.id)]})
new_art = f.save(commit=False)
# Manually save the instance
new_art.save()
art_id_4 = new_art.id
self.assertEqual(art_id_4 not in (None, art_id_1, art_id_2, art_id_3), True)
# The instance doesn't have m2m data yet
new_art = Article.objects.get(id=art_id_4)
self.assertEqual(map(lambda o: o.name, new_art.categories.all()), [])
# Save the m2m data on the form
f.save_m2m()
self.assertEqual(map(lambda o: o.name, new_art.categories.order_by('name')), ["Entertainment", "It's a test"])
# Here, we define a custom ModelForm. Because it happens to have the same fields as
# the Category model, we can just call the form's save() to apply its changes to an
# existing Category instance.
cat = Category.objects.get(name='Third test')
self.assertEqual(cat.name, "Third test")
self.assertEqual(cat.id == c3.id, True)
form = ShortCategory({'name': 'Third', 'slug': 'third', 'url': '3rd'}, instance=cat)
self.assertEqual(form.save().name, 'Third')
self.assertEqual(Category.objects.get(id=c3.id).name, 'Third')
# Here, we demonstrate that choices for a ForeignKey ChoiceField are determined
# at runtime, based on the data in the database when the form is displayed, not
# the data in the database when the form is instantiated.
f = ArticleForm(auto_id=False)
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" /></li>
<li>Writer: <select name="writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article"></textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third</option>
</select> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (w_woodward.pk, w_royko.pk, c1.pk, c2.pk, c3.pk))
c4 = Category.objects.create(name='Fourth', url='4th')
self.assertEqual(c4.name, 'Fourth')
w_bernstein = Writer.objects.create(name='Carl Bernstein')
self.assertEqual(w_bernstein.name, 'Carl Bernstein')
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" /></li>
<li>Writer: <select name="writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Carl Bernstein</option>
<option value="%s">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article"></textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third</option>
<option value="%s">Fourth</option>
</select> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (w_woodward.pk, w_bernstein.pk, w_royko.pk, c1.pk, c2.pk, c3.pk, c4.pk))
# ModelChoiceField ############################################################
f = forms.ModelChoiceField(Category.objects.all())
self.assertEqual(list(f.choices), [
('', '---------'),
(c1.pk, 'Entertainment'),
(c2.pk, "It's a test"),
(c3.pk, 'Third'),
(c4.pk, 'Fourth')])
with self.assertRaises(ValidationError):
f.clean('')
with self.assertRaises(ValidationError):
f.clean(None)
with self.assertRaises(ValidationError):
f.clean(0)
self.assertEqual(f.clean(c3.id).name, 'Third')
self.assertEqual(f.clean(c2.id).name, "It's a test")
# Add a Category object *after* the ModelChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
c5 = Category.objects.create(name='Fifth', url='5th')
self.assertEqual(c5.name, 'Fifth')
self.assertEqual(f.clean(c5.id).name, 'Fifth')
# Delete a Category object *after* the ModelChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
Category.objects.get(url='5th').delete()
with self.assertRaises(ValidationError):
f.clean(c5.id)
f = forms.ModelChoiceField(Category.objects.filter(pk=c1.id), required=False)
self.assertEqual(f.clean(''), None)
f.clean('')
self.assertEqual(f.clean(str(c1.id)).name, "Entertainment")
with self.assertRaises(ValidationError):
f.clean('100')
# queryset can be changed after the field is created.
f.queryset = Category.objects.exclude(name='Fourth')
self.assertEqual(list(f.choices), [
('', '---------'),
(c1.pk, 'Entertainment'),
(c2.pk, "It's a test"),
(c3.pk, 'Third')])
self.assertEqual(f.clean(c3.id).name, 'Third')
with self.assertRaises(ValidationError):
f.clean(c4.id)
# check that we can safely iterate choices repeatedly
gen_one = list(f.choices)
gen_two = f.choices
self.assertEqual(gen_one[2], (c2.pk, "It's a test"))
self.assertEqual(list(gen_two), [
('', '---------'),
(c1.pk, 'Entertainment'),
(c2.pk, "It's a test"),
(c3.pk, 'Third')])
# check that we can override the label_from_instance method to print custom labels (#4620)
f.queryset = Category.objects.all()
f.label_from_instance = lambda obj: "category " + str(obj)
self.assertEqual(list(f.choices), [
('', '---------'),
(c1.pk, 'category Entertainment'),
(c2.pk, "category It's a test"),
(c3.pk, 'category Third'),
(c4.pk, 'category Fourth')])
# ModelMultipleChoiceField ####################################################
f = forms.ModelMultipleChoiceField(Category.objects.all())
self.assertEqual(list(f.choices), [
(c1.pk, 'Entertainment'),
(c2.pk, "It's a test"),
(c3.pk, 'Third'),
(c4.pk, 'Fourth')])
with self.assertRaises(ValidationError):
f.clean(None)
with self.assertRaises(ValidationError):
f.clean([])
self.assertEqual(map(lambda o: o.name, f.clean([c1.id])), ["Entertainment"])
self.assertEqual(map(lambda o: o.name, f.clean([c2.id])), ["It's a test"])
self.assertEqual(map(lambda o: o.name, f.clean([str(c1.id)])), ["Entertainment"])
self.assertEqual(map(lambda o: o.name, f.clean([str(c1.id), str(c2.id)])), ["Entertainment", "It's a test"])
self.assertEqual(map(lambda o: o.name, f.clean([c1.id, str(c2.id)])), ["Entertainment", "It's a test"])
self.assertEqual(map(lambda o: o.name, f.clean((c1.id, str(c2.id)))), ["Entertainment", "It's a test"])
with self.assertRaises(ValidationError):
f.clean(['100'])
with self.assertRaises(ValidationError):
f.clean('hello')
with self.assertRaises(ValidationError):
f.clean(['fail'])
# Add a Category object *after* the ModelMultipleChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
c6 = Category.objects.create(id=6, name='Sixth', url='6th')
self.assertEqual(c6.name, 'Sixth')
self.assertEqual(map(lambda o: o.name, f.clean([c6.id])), ["Sixth"])
# Delete a Category object *after* the ModelMultipleChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
Category.objects.get(url='6th').delete()
with self.assertRaises(ValidationError):
f.clean([c6.id])
f = forms.ModelMultipleChoiceField(Category.objects.all(), required=False)
self.assertEqual(f.clean([]), [])
self.assertEqual(f.clean(()), [])
with self.assertRaises(ValidationError):
f.clean(['10'])
with self.assertRaises(ValidationError):
f.clean([str(c3.id), '10'])
with self.assertRaises(ValidationError):
f.clean([str(c1.id), '10'])
# queryset can be changed after the field is created.
f.queryset = Category.objects.exclude(name='Fourth')
self.assertEqual(list(f.choices), [
(c1.pk, 'Entertainment'),
(c2.pk, "It's a test"),
(c3.pk, 'Third')])
self.assertEqual(map(lambda o: o.name, f.clean([c3.id])), ["Third"])
with self.assertRaises(ValidationError):
f.clean([c4.id])
with self.assertRaises(ValidationError):
f.clean([str(c3.id), str(c4.id)])
f.queryset = Category.objects.all()
f.label_from_instance = lambda obj: "multicategory " + str(obj)
self.assertEqual(list(f.choices), [
(c1.pk, 'multicategory Entertainment'),
(c2.pk, "multicategory It's a test"),
(c3.pk, 'multicategory Third'),
(c4.pk, 'multicategory Fourth')])
# OneToOneField ###############################################################
self.assertEqual(ImprovedArticleForm.base_fields.keys(), ['article'])
self.assertEqual(ImprovedArticleWithParentLinkForm.base_fields.keys(), [])
bw = BetterWriter(name='Joe Better', score=10)
bw.save()
self.assertEqual(sorted(model_to_dict(bw).keys()),
['id', 'name', 'score', 'writer_ptr'])
form = BetterWriterForm({'name': 'Some Name', 'score': 12})
self.assertEqual(form.is_valid(), True)
bw2 = form.save()
bw2.delete()
form = WriterProfileForm()
self.assertHTMLEqual(form.as_p(), '''<p><label for="id_writer">Writer:</label> <select name="writer" id="id_writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Carl Bernstein</option>
<option value="%s">Joe Better</option>
<option value="%s">Mike Royko</option>
</select></p>
<p><label for="id_age">Age:</label> <input type="text" name="age" id="id_age" /></p>''' % (w_woodward.pk, w_bernstein.pk, bw.pk, w_royko.pk))
data = {
'writer': unicode(w_woodward.pk),
'age': '65',
}
form = WriterProfileForm(data)
instance = form.save()
self.assertEqual(unicode(instance), 'Bob Woodward is 65')
form = WriterProfileForm(instance=instance)
self.assertHTMLEqual(form.as_p(), '''<p><label for="id_writer">Writer:</label> <select name="writer" id="id_writer">
<option value="">---------</option>
<option value="%s" selected="selected">Bob Woodward</option>
<option value="%s">Carl Bernstein</option>
<option value="%s">Joe Better</option>
<option value="%s">Mike Royko</option>
</select></p>
<p><label for="id_age">Age:</label> <input type="text" name="age" value="65" id="id_age" /></p>''' % (w_woodward.pk, w_bernstein.pk, bw.pk, w_royko.pk))
def test_phone_number_field(self):
f = PhoneNumberForm({'phone': '(312) 555-1212', 'description': 'Assistance'})
self.assertEqual(f.is_valid(), True)
self.assertEqual(f.cleaned_data['phone'], '312-555-1212')
self.assertEqual(f.cleaned_data['description'], 'Assistance')
def test_file_field(self):
# Test conditions when files is either not given or empty.
f = TextFileForm(data={'description': 'Assistance'})
self.assertEqual(f.is_valid(), False)
f = TextFileForm(data={'description': 'Assistance'}, files={})
self.assertEqual(f.is_valid(), False)
# Upload a file and ensure it all works as expected.
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test1.txt', b'hello world')})
self.assertEqual(f.is_valid(), True)
self.assertEqual(type(f.cleaned_data['file']), SimpleUploadedFile)
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test1.txt')
instance.file.delete()
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test1.txt', b'hello world')})
self.assertEqual(f.is_valid(), True)
self.assertEqual(type(f.cleaned_data['file']), SimpleUploadedFile)
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test1.txt')
# Check if the max_length attribute has been inherited from the model.
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test-maxlength.txt', b'hello world')})
self.assertEqual(f.is_valid(), False)
# Edit an instance that already has the file defined in the model. This will not
# save the file again, but leave it exactly as it is.
f = TextFileForm(
data={'description': 'Assistance'},
instance=instance)
self.assertEqual(f.is_valid(), True)
self.assertEqual(f.cleaned_data['file'].name, 'tests/test1.txt')
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test1.txt')
# Delete the current file since this is not done by Django.
instance.file.delete()
# Override the file by uploading a new one.
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test2.txt', b'hello world')}, instance=instance)
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test2.txt')
# Delete the current file since this is not done by Django.
instance.file.delete()
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test2.txt', b'hello world')})
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test2.txt')
# Delete the current file since this is not done by Django.
instance.file.delete()
instance.delete()
# Test the non-required FileField
f = TextFileForm(data={'description': 'Assistance'})
f.fields['file'].required = False
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.file.name, '')
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test3.txt', b'hello world')}, instance=instance)
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test3.txt')
# Instance can be edited w/out re-uploading the file and existing file should be preserved.
f = TextFileForm(
data={'description': 'New Description'},
instance=instance)
f.fields['file'].required = False
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.description, 'New Description')
self.assertEqual(instance.file.name, 'tests/test3.txt')
# Delete the current file since this is not done by Django.
instance.file.delete()
instance.delete()
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test3.txt', b'hello world')})
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test3.txt')
# Delete the current file since this is not done by Django.
instance.file.delete()
instance.delete()
def test_big_integer_field(self):
bif = BigIntForm({'biggie': '-9223372036854775808'})
self.assertEqual(bif.is_valid(), True)
bif = BigIntForm({'biggie': '-9223372036854775809'})
self.assertEqual(bif.is_valid(), False)
self.assertEqual(bif.errors, {'biggie': ['Ensure this value is greater than or equal to -9223372036854775808.']})
bif = BigIntForm({'biggie': '9223372036854775807'})
self.assertEqual(bif.is_valid(), True)
bif = BigIntForm({'biggie': '9223372036854775808'})
self.assertEqual(bif.is_valid(), False)
self.assertEqual(bif.errors, {'biggie': ['Ensure this value is less than or equal to 9223372036854775807.']})
@skipUnless(test_images, "PIL not installed")
def test_image_field(self):
# ImageField and FileField are nearly identical, but they differ slighty when
# it comes to validation. This specifically tests that #6302 is fixed for
# both file fields and image fields.
with open(os.path.join(os.path.dirname(__file__), "test.png"), 'rb') as fp:
image_data = fp.read()
with open(os.path.join(os.path.dirname(__file__), "test2.png"), 'rb') as fp:
image_data2 = fp.read()
f = ImageFileForm(
data={'description': 'An image'},
files={'image': SimpleUploadedFile('test.png', image_data)})
self.assertEqual(f.is_valid(), True)
self.assertEqual(type(f.cleaned_data['image']), SimpleUploadedFile)
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test.png')
self.assertEqual(instance.width, 16)
self.assertEqual(instance.height, 16)
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
instance.image.delete(save=False)
f = ImageFileForm(
data={'description': 'An image'},
files={'image': SimpleUploadedFile('test.png', image_data)})
self.assertEqual(f.is_valid(), True)
self.assertEqual(type(f.cleaned_data['image']), SimpleUploadedFile)
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test.png')
self.assertEqual(instance.width, 16)
self.assertEqual(instance.height, 16)
# Edit an instance that already has the (required) image defined in the model. This will not
# save the image again, but leave it exactly as it is.
f = ImageFileForm(data={'description': 'Look, it changed'}, instance=instance)
self.assertEqual(f.is_valid(), True)
self.assertEqual(f.cleaned_data['image'].name, 'tests/test.png')
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test.png')
self.assertEqual(instance.height, 16)
self.assertEqual(instance.width, 16)
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
instance.image.delete(save=False)
# Override the file by uploading a new one.
f = ImageFileForm(
data={'description': 'Changed it'},
files={'image': SimpleUploadedFile('test2.png', image_data2)}, instance=instance)
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test2.png')
self.assertEqual(instance.height, 32)
self.assertEqual(instance.width, 48)
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
instance.image.delete(save=False)
instance.delete()
f = ImageFileForm(
data={'description': 'Changed it'},
files={'image': SimpleUploadedFile('test2.png', image_data2)})
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test2.png')
self.assertEqual(instance.height, 32)
self.assertEqual(instance.width, 48)
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
instance.image.delete(save=False)
instance.delete()
# Test the non-required ImageField
# Note: In Oracle, we expect a null ImageField to return '' instead of
# None.
if connection.features.interprets_empty_strings_as_nulls:
expected_null_imagefield_repr = ''
else:
expected_null_imagefield_repr = None
f = OptionalImageFileForm(data={'description': 'Test'})
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.image.name, expected_null_imagefield_repr)
self.assertEqual(instance.width, None)
self.assertEqual(instance.height, None)
f = OptionalImageFileForm(
data={'description': 'And a final one'},
files={'image': SimpleUploadedFile('test3.png', image_data)}, instance=instance)
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test3.png')
self.assertEqual(instance.width, 16)
self.assertEqual(instance.height, 16)
# Editing the instance without re-uploading the image should not affect the image or its width/height properties
f = OptionalImageFileForm(
data={'description': 'New Description'},
instance=instance)
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.description, 'New Description')
self.assertEqual(instance.image.name, 'tests/test3.png')
self.assertEqual(instance.width, 16)
self.assertEqual(instance.height, 16)
# Delete the current file since this is not done by Django.
instance.image.delete()
instance.delete()
f = OptionalImageFileForm(
data={'description': 'And a final one'},
files={'image': SimpleUploadedFile('test4.png', image_data2)}
)
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test4.png')
self.assertEqual(instance.width, 48)
self.assertEqual(instance.height, 32)
instance.delete()
# Test callable upload_to behavior that's dependent on the value of another field in the model
f = ImageFileForm(
data={'description': 'And a final one', 'path': 'foo'},
files={'image': SimpleUploadedFile('test4.png', image_data)})
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.image.name, 'foo/test4.png')
instance.delete()
def test_media_on_modelform(self):
# Similar to a regular Form class you can define custom media to be used on
# the ModelForm.
f = ModelFormWithMedia()
self.assertHTMLEqual(unicode(f.media), '''<link href="/some/form/css" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/some/form/javascript"></script>''')
f = CommaSeparatedIntegerForm({'field': '1,2,3'})
self.assertEqual(f.is_valid(), True)
self.assertEqual(f.cleaned_data, {'field': '1,2,3'})
f = CommaSeparatedIntegerForm({'field': '1a,2'})
self.assertEqual(f.errors, {'field': ['Enter only digits separated by commas.']})
f = CommaSeparatedIntegerForm({'field': ',,,,'})
self.assertEqual(f.is_valid(), True)
self.assertEqual(f.cleaned_data, {'field': ',,,,'})
f = CommaSeparatedIntegerForm({'field': '1.2'})
self.assertEqual(f.errors, {'field': ['Enter only digits separated by commas.']})
f = CommaSeparatedIntegerForm({'field': '1,a,2'})
self.assertEqual(f.errors, {'field': ['Enter only digits separated by commas.']})
f = CommaSeparatedIntegerForm({'field': '1,,2'})
self.assertEqual(f.is_valid(), True)
self.assertEqual(f.cleaned_data, {'field': '1,,2'})
f = CommaSeparatedIntegerForm({'field': '1'})
self.assertEqual(f.is_valid(), True)
self.assertEqual(f.cleaned_data, {'field': '1'})
# This Price instance generated by this form is not valid because the quantity
# field is required, but the form is valid because the field is excluded from
# the form. This is for backwards compatibility.
form = PriceFormWithoutQuantity({'price': '6.00'})
self.assertEqual(form.is_valid(), True)
price = form.save(commit=False)
with self.assertRaises(ValidationError):
price.full_clean()
# The form should not validate fields that it doesn't contain even if they are
# specified using 'fields', not 'exclude'.
class Meta:
model = Price
fields = ('price',)
form = PriceFormWithoutQuantity({'price': '6.00'})
self.assertEqual(form.is_valid(), True)
# The form should still have an instance of a model that is not complete and
# not saved into a DB yet.
self.assertEqual(form.instance.price, Decimal('6.00'))
self.assertEqual(form.instance.quantity is None, True)
self.assertEqual(form.instance.pk is None, True)
# Choices on CharField and IntegerField
f = ArticleForm()
with self.assertRaises(ValidationError):
f.fields['status'].clean('42')
f = ArticleStatusForm()
with self.assertRaises(ValidationError):
f.fields['status'].clean('z')
def test_foreignkeys_which_use_to_field(self):
apple = Inventory.objects.create(barcode=86, name='Apple')
pear = Inventory.objects.create(barcode=22, name='Pear')
core = Inventory.objects.create(barcode=87, name='Core', parent=apple)
field = forms.ModelChoiceField(Inventory.objects.all(), to_field_name='barcode')
self.assertEqual(tuple(field.choices), (
('', '---------'),
(86, 'Apple'),
(87, 'Core'),
(22, 'Pear')))
form = InventoryForm(instance=core)
self.assertHTMLEqual(unicode(form['parent']), '''<select name="parent" id="id_parent">
<option value="">---------</option>
<option value="86" selected="selected">Apple</option>
<option value="87">Core</option>
<option value="22">Pear</option>
</select>''')
data = model_to_dict(core)
data['parent'] = '22'
form = InventoryForm(data=data, instance=core)
core = form.save()
self.assertEqual(core.parent.name, 'Pear')
class CategoryForm(forms.ModelForm):
description = forms.CharField()
class Meta:
model = Category
fields = ['description', 'url']
self.assertEqual(CategoryForm.base_fields.keys(),
['description', 'url'])
self.assertHTMLEqual(unicode(CategoryForm()), '''<tr><th><label for="id_description">Description:</label></th><td><input type="text" name="description" id="id_description" /></td></tr>
<tr><th><label for="id_url">The URL:</label></th><td><input id="id_url" type="text" name="url" maxlength="40" /></td></tr>''')
# to_field_name should also work on ModelMultipleChoiceField ##################
field = forms.ModelMultipleChoiceField(Inventory.objects.all(), to_field_name='barcode')
self.assertEqual(tuple(field.choices), ((86, 'Apple'), (87, 'Core'), (22, 'Pear')))
self.assertEqual(map(lambda o: o.name, field.clean([86])), ['Apple'])
form = SelectInventoryForm({'items': [87, 22]})
self.assertEqual(form.is_valid(), True)
self.assertEqual(len(form.cleaned_data), 1)
self.assertEqual(map(lambda o: o.name, form.cleaned_data['items']), ['Core', 'Pear'])
def test_model_field_that_returns_none_to_exclude_itself_with_explicit_fields(self):
self.assertEqual(CustomFieldForExclusionForm.base_fields.keys(), ['name'])
self.assertHTMLEqual(unicode(CustomFieldForExclusionForm()),
'''<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="10" /></td></tr>''')
|
19,516 | a0c22ba031f30f9dc84dcc29b66d1029eaca4221 | # coding=utf-8
import time
from public.common import mytest
from public.pages import qdsIndexPage
from public.common.publicfunction import get_img
class bmkstest(mytest.MyTest):
"""保姆注册测试集"""
def test_bmks(self):
"""保姆快速注册"""
qds = qdsIndexPage.DDSIndexPage(self.dr)
qds.into_qds_page()
time.sleep(1)
qds.cookies()
"""新版首页"""
self.dr.click("css->body > div.section-banner > div.public-navbar > div > ul > li:nth-child(1) > a")
time.sleep(1)
self.assertIn("商标注册-权大师", self.dr.get_title())
print(qds.return_title())
# 保姆快速注册
self.dr.click(
"css->body > div.section-product.width1200 > dl > dd > div.cont-serviceItems > table > tbody > tr > td.td-cont > ul > li.list.active")
##total-price
for a in self.dr.get_elements("css->#total-price"):
print("费用总计:" + a.text)
aa = a.text
self.dr.click(
"css->body > div.section-product.width1200 > dl > dd > div.cont-btnBuy > a.btn.btn-next.buynow")
self.dr.clear_type("name->ownerContactPerson","全大师")
self.dr.clear_type("css->body > div.myOrder-wrap > div.section-myorder.width1200 > div > table:nth-child(2) > tbody > tr:nth-child(2) > td.td-2 > input", "15624992498")
self.dr.clear_type("css->body > div.myOrder-wrap > div.section-myorder.width1200 > div > table:nth-child(2) > tbody > tr:nth-child(3) > td.td-2 > input","4564564@qq.com")
get_img(self.dr, "bmkstest.png")
for i in self.dr.get_elements(
"css->body > div.myOrder-wrap > div.section-myorder.width1200 > div > div > ul > li.row-sense > em > i"):
print("总价:" + i.text)
ii = i.text
self.assertIn(aa, ii)
print("价格一致")
self.dr.click(
"css->body > div.myOrder-wrap > div.section-myorder.width1200 > div > div > ul > li.row-step > a.btn-next.submitOrder")
time.sleep(2)
for o in self.dr.get_elements("class->payable"):
print("订单提交成功,应付金额:" + o.text)
oo = o.text
time.sleep(2)
self.assertIn(oo, ii)
print("测试通过")
self.dr.click("id->alisubmit")
|
19,517 | c1fba9928187054e7024aa5b9a5500487b0b8c04 | import sys
import os
new_path = os.path.join(os.path.dirname(__file__), '../../src/ITransE')
sys.path.append(new_path)
from ITransE import ITransE
model = ITransE(dim=75, save_dir=os.path.join(os.path.dirname(__file__), 'model_ItransE_person_15k.bin'))
model.Train_MT(epochs=400, save_every_epochs=100, languages=['en', 'fr'], graphs=[os.path.join(os.path.dirname(__file__), '../../data/WK3l-15k/en_fr/P_en_v5.csv'),os.path.join(os.path.dirname(__file__), '../../data/WK3l-15k/en_fr/P_fr_v5.csv')], intersect_graph=os.path.join(os.path.dirname(__file__), '../../data/WK3l-15k/en_fr/P_en_fr_v5.csv'), save_dirs = ['model_en.bin','model_fr.bin'], rate=0.01, split_rate=True, L1_flag=False)
model.save(os.path.join(os.path.dirname(__file__), 'model_ItransE_person_15k.bin'))
|
19,518 | 19f0762898b12f3ebec6f36635d69cd18afc2cdd | from django.contrib import admin
from django.urls import path
from Home import views
urlpatterns = [
# path('admin/', admin.site.urls),
#path('login', views.loginU,name = 'login'),
path('logout', views.logoutU,name = 'logout'),
path('login1.html', views.login1,name = 'login1'),
path('', views.index,name = 'index'),
path('aboutus', views.aboutus,name = 'aboutus'),
path('catalog', views.catalog,name = 'catalog'),
path('contactus', views.contactus,name = 'contactus'),
path('c1.html', views.c1,name = 'c1.html'),
path('c2.html', views.c2,name = 'c2.html'),
path('c3.html', views.c3,name = 'c3.html'),
path('c4.html', views.c4,name = 'c4.html'),
path('c5.html', views.c5,name = 'c5.html'),
path('c6.html', views.c6,name = 'c6.html'),
path('resources.html', views.resources, name='resources.html'),
path('signup.html', views.signup,name = 'signup.html'),
path('upload_file.html', views.upload_file, name='upload_file'),
path('login_faculty.html', views.login_faculty,name = 'login_faculty'),
path('catalog_faculty.html', views.catalog_faculty,name = 'catalog_faculty'),
]
|
19,519 | f4e2c778976ad0e12fd9a7648b87a35c2a95d736 | '''
30/06/2020
1496. Path Crossing - Easy
Tag: String
Given a string path, where path[i] = 'N', 'S', 'E' or 'W', each representing moving one unit north, south, east, or west, respectively. You start at the origin (0, 0) on a 2D plane and walk on the path specified by path.
Return True if the path crosses itself at any point, that is, if at any time you are on a location you've previously visited. Return False otherwise.
Example 1:
Input: path = "NES"
Output: false
Explanation: Notice that the path doesn't cross any point more than once.
Example 2:
Input: path = "NESWW"
Output: true
Explanation: Notice that the path visits the origin twice.
Constraints:
1 <= path.length <= 10^4
path will only consist of characters in {'N', 'S', 'E', 'W}
'''
from typing import List
# Solution
class Solution1:
'''
Time complexity : O(n)
Space complexity : O(n)
'''
def isPathCrossing(self, path: str) -> bool:
x, y = 0, 0
check = set([(x,y)])
for p in path:
if p == 'N':
y += 1
elif p == 'S':
y -= 1
elif p == 'E':
x -= 1
elif p == 'W':
x += 1
if (x, y) in check:
return True
check.add((x, y))
return False
# Unit Test
import unittest
class TestCase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_testCase(self):
for Sol in [Solution1()]:
func = Sol.isPathCrossing
self.assertEqual(func("NES"), False)
self.assertEqual(func("NESWW"), True)
if __name__ == '__main__':
unittest.main() |
19,520 | c20df4ee05e68440cb8cfea41361828045e1544a | import pyopenjtalk
def _print_results(njd_features, labels):
for f in njd_features:
s, p = f["string"], f["pron"]
print(s, p)
for label in labels:
print(label)
def test_hello():
njd_features = pyopenjtalk.run_frontend("こんにちは")
labels = pyopenjtalk.make_label(njd_features)
_print_results(njd_features, labels)
def test_njd_features():
njd_features = pyopenjtalk.run_frontend("こんにちは")
expected_feature = [
{
"string": "こんにちは",
"pos": "感動詞",
"pos_group1": "*",
"pos_group2": "*",
"pos_group3": "*",
"ctype": "*",
"cform": "*",
"orig": "こんにちは",
"read": "コンニチハ",
"pron": "コンニチワ",
"acc": 0,
"mora_size": 5,
"chain_rule": "-1",
"chain_flag": -1,
}
]
assert njd_features == expected_feature
def test_fullcontext():
features = pyopenjtalk.run_frontend("こんにちは")
labels = pyopenjtalk.make_label(features)
labels2 = pyopenjtalk.extract_fullcontext("こんにちは")
for a, b in zip(labels, labels2):
assert a == b
def test_jtalk():
for text in [
"今日も良い天気ですね",
"こんにちは。",
"どんまい!",
"パソコンのとりあえず知っておきたい使い方",
]:
njd_features = pyopenjtalk.run_frontend(text)
labels = pyopenjtalk.make_label(njd_features)
_print_results(njd_features, labels)
surface = "".join(map(lambda f: f["string"], njd_features))
assert surface == text
def test_g2p_kana():
for text, pron in [
("今日もこんにちは", "キョーモコンニチワ"),
("いやあん", "イヤーン"),
("パソコンのとりあえず知っておきたい使い方", "パソコンノトリアエズシッテオキタイツカイカタ"),
]:
p = pyopenjtalk.g2p(text, kana=True)
assert p == pron
def test_g2p_phone():
for text, pron in [
("こんにちは", "k o N n i ch i w a"),
("ななみんです", "n a n a m i N d e s U"),
("ハローユーチューブ", "h a r o o y u u ch u u b u"),
]:
p = pyopenjtalk.g2p(text, kana=False)
assert p == pron
|
19,521 | 5756e1f17851c903e68926a0677aab6e616264ea | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
from odoo.addons.base.res.res_partner import WARNING_MESSAGE, WARNING_HELP
class ResPartner(models.Model):
_inherit = 'res.partner'
sale_order_ids = fields.One2many('sale.reject', 'partner_id')
|
19,522 | 8c2b7044069f0ead302fa54689cc31899ffbcb21 | from typing import List
import numpy as np
from Utils.knapsack import Item, Knapsack
def kpBacktracking(W: int, items: List[Item]):
if items == [] or W == 0:
return Knapsack(W)
item = items[0]
if item.getWeight() > W:
# Not choice the item
return kpBacktracking(items[1:], W)
else:
# Choice the item
kpWithItem = kpBacktracking(items[1:], W - item.getWeight())
kpWithItem.capacity += item.getWeight()
kpWithItem.pack(item)
# Not choice the item
kpWithoutItem = kpBacktracking(items[1:], W)
# Decide best option
if kpWithItem.getValue() < kpWithoutItem.getValue():
return kpWithoutItem
else:
return kpWithItem
def kpDP(W: int, items: List[Item]):
n = len(items)
A = np.zeros((n+1, W+1))
# Calculate the DP in the bottom-up fashion way
for idx, item in enumerate(items, start = 1):
w, p = item.getWeight(), item.getProfit()
for Wi in range(W+1):
if Wi == 0:
continue
elif idx == 0:
A[idx, Wi] = p if w <= Wi else 0
elif w <= Wi:
A[idx, Wi] = max(A[idx-1, Wi], A[idx-1, Wi-w]+p)
else:
A[idx, Wi] = A[idx-1, Wi]
# Find the items that get the best solution
kp, idx = Knapsack(W), n
while idx >= 1 and kp.getCapacity() > 0:
if A[idx-1, kp.getCapacity()] != A[idx, kp.getCapacity()]:
kp.pack(items[idx-1])
idx -= 1
return kp
|
19,523 | 6da25430d0e534c960f50871f46164bec2ad5a7b | import os
import datetime
import cv2
import numpy as np
import pyssim.ssim.ssimlib as pyssim
from skimage.measure import compare_ssim as ssim
from sklearn.cluster import SpectralClustering, AffinityPropagation
from sklearn import metrics,decomposition
# Constant definitions
SIM_IMAGE_SIZE = (640, 480)
IMAGES_PER_CLUSTER = 5
#Returns the normalized similarity value (from 0.0 to 1.0) for the provided pair of images.
def get_image_similarity(img1, img2):
# Converting to grayscale and resizing
i1 = cv2.resize(cv2.imread(img1, cv2.IMREAD_GRAYSCALE), SIM_IMAGE_SIZE)
i2 = cv2.resize(cv2.imread(img2, cv2.IMREAD_GRAYSCALE), SIM_IMAGE_SIZE)
similarity = 0.0
# Default SSIM implementation of Scikit-Image
similarity = ssim(i1, i2)
#print("similarity by using ssim",similarity)
return similarity
# Fetches all images from the provided directory and calculates the similarity value per image pair.
def build_similarity_matrix(dir_name):
images = os.listdir(dir_name)
num_images = len(images)
sm = np.zeros(shape=(num_images, num_images), dtype=np.float64)
#print(sm.size)
np.fill_diagonal(sm, 1.0)
print("Building the similarity matrix using SSIM algorithm for %d images" %
(num_images))
start_total = datetime.datetime.now()
# Traversing the upper triangle only - transposed matrix will be used later for filling the empty cells.
k = 0
print("sm.shape[0] here : ",sm.shape[0]," ",sm.shape[1],"\n")
for i in range(sm.shape[0]):
for j in range(sm.shape[1]):
j = j + k
if i != j and j < sm.shape[1]:
sm[i][j] = get_image_similarity('%s/%s' % (dir_name, images[i]),
'%s/%s' % (dir_name, images[j]))
k += 1
# Adding the transposed matrix and subtracting the diagonal to obtain
# the symmetric similarity matrix
sm = sm + sm.T - np.diag(sm.diagonal())
end_total = datetime.datetime.now()
print("Done - total calculation time: %d seconds" % (end_total - start_total).total_seconds())
return sm
# Executes spectral clustering algorithm for similarity-based clustering
def do_cluster(dir_name):
matrix = build_similarity_matrix(dir_name)
print("printing matrix",matrix,"\n\n")
sc = SpectralClustering(n_clusters=int(matrix.shape[0]/IMAGES_PER_CLUSTER),
affinity='precomputed').fit(matrix)
print("printing special cluster matrix",sc,"\n\n")
return sc.labels_
|
19,524 | 1f0d2124aeee21cfc8208615906ad9c399d59279 | from functools import partial
def my_function(m,n):
print(m,n)
print(m/n)
partfun = partial(my_function,n=100)
partfun(19) |
19,525 | 5a99d834aa4ba617f55601cd306e666b196509ef | #wap to input a number and print its factors
n = int(input("enter any +ve number: "))
ls = []
for i in range(1,n+1):
if n % i == 0:
ls.append(i)
print(ls) |
19,526 | 88a0c8dbffd4efdeceb1407bcb0c6786ae7444b4 | # 東京大坂まで何時間?
kyori = 507.5
jisoku = 100
jikan = kyori / jisoku
print(jikan)
|
19,527 | a665377662d8599da714fc5978104d9b8a9b3946 | import argparse
import os
import torch
import torch.utils.data
import matplotlib.pyplot as plt
import numpy as np
from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
import tqdm
import model.yolov3
import utils.datasets
import utils.utils
parser = argparse.ArgumentParser()
parser.add_argument("--image_folder", type=str, default="../../data/voc_test", help="path to image folder")
parser.add_argument("--save_folder", type=str, default='../../demo', help='path to saving result folder')
parser.add_argument("--batch_size", type=int, default=32, help="size of the batches")
parser.add_argument("--num_workers", type=int, default=8, help="number of cpu threads to use during batch generation")
parser.add_argument("--data_config", type=str, default="config/voc.data", help="path to data config file")
parser.add_argument("--pretrained_weights", type=str, default="weights/yolov3_voc.pth",
help="path to pretrained weights file")
parser.add_argument("--image_size", type=int, default=416, help="size of each image dimension")
parser.add_argument("--conf_thres", type=float, default=0.5, help="object confidence threshold")
parser.add_argument("--nms_thres", type=float, default=0.5, help="iou thresshold for non-maximum suppression")
args = parser.parse_args()
print(args)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# 데이터셋 설정값을 가져오기
data_config = utils.utils.parse_data_config(args.data_config)
num_classes = int(data_config['classes'])
class_names = utils.utils.load_classes(data_config['names'])
# 모델 준비하기
model = model.yolov3.YOLOv3(args.image_size, num_classes).to(device)
if args.pretrained_weights.endswith('.pth'):
model.load_state_dict(torch.load(args.pretrained_weights))
else:
model.load_darknet_weights(args.pretrained_weights)
# 데이터셋, 데이터로더 설정
dataset = utils.datasets.ImageFolder(args.image_folder, args.image_size)
dataloader = torch.utils.data.DataLoader(dataset,
batch_size=args.batch_size,
shuffle=False,
num_workers=args.num_workers)
# 객체를 검출하는 코드
model.eval() # 모델을 evaluation mode로 설정
img_predictions = [] # 각 이미지의 예측 결과 저장
img_paths = [] # 각 이미지의 경로 저장
for paths, images in tqdm.tqdm(dataloader, desc='Batch'):
with torch.no_grad():
images = images.to(device)
prediction = model(images)
prediction = utils.utils.non_max_suppression(prediction, args.conf_thres, args.nms_thres)
# 예측 결과 저장
img_predictions.extend(prediction)
img_paths.extend(paths)
# bounding box colormap 설정
cmap = np.array(plt.cm.get_cmap('Paired').colors)
cmap_rgb: list = np.multiply(cmap, 255).astype(np.int32).tolist()
# 결과 이미지를 저장하는 코드
os.makedirs(args.save_folder, exist_ok=True)
for path, prediction in tqdm.tqdm(zip(img_paths, img_predictions), desc='Save images', total=dataset.__len__()):
# 원본 이미지 열기
path = path.replace('\\', '/')
image = Image.open(path).convert('RGB')
draw = ImageDraw.Draw(image)
if prediction is not None:
# 원본 이미지로 bounding box를 rescale한다.
prediction = utils.utils.rescale_boxes_original(prediction, args.image_size, image.size)
for x1, y1, x2, y2, obj_conf, cls_conf, cls_pred in prediction:
# bounding box color 설정
color = tuple(cmap_rgb[int(cls_pred) % len(cmap_rgb)])
# bounding box 그리기
draw.rectangle(((x1, y1), (x2, y2)), outline=color, width=2)
# label 그리기
text = '{} {:.1f}'.format(class_names[int(cls_pred)], obj_conf.item() * 100)
font = ImageFont.truetype('calibri.ttf', size=12)
text_width, text_height = font.getsize(text)
draw.rectangle(((x1, y1), (x1 + text_width, y1 + text_height)), fill=color)
draw.text((x1, y1), text, fill=(0, 0, 0), font=font)
# 결과 이미지 저장
filename = path.split('/')[-1]
image.save(os.path.join(args.save_folder, filename))
image.close()
|
19,528 | 1e3d126afd01ba3353995df381c91bcf96d8fc61 | version https://git-lfs.github.com/spec/v1
oid sha256:24c11001f7936158881bbcbcee78db69c9bf8cf58917db2dcb42b87230b682ce
size 35959
|
19,529 | 5b37dc9b597f54cdab836990b00433a5aef510c0 | import boto3
import json
from datetime import datetime
class S3BucketService:
def __init__(self, bucket_name):
self.s3 = boto3.resource('s3')
self.bucket_name = bucket_name
def generate_raw_filename(self, source_name, table_name, environment, seq_number, upload_time, load_type,
file_format):
"""
Generate correct raw file name
Parameters
----------
source_name : name of the destination source
table_name : name of table
environment : current environment
seq_number : sequence number
upload_time : upload time (datetime object)
load_type : fl | il | dl
file_format : extension of a file
Returns
----------
string:Correct raw bucket object name
"""
file_date = upload_time.strftime(
"%Y-%m-%d-%H-%M-%S-%f")[:-3] # [:-3] => Removing the 3 last characters as %f is for millis.
res = f'{source_name}/{source_name}_{table_name}/' \
f'{source_name}_{environment}_{table_name}_{str(seq_number).zfill(3)}_' \
f'{file_date}_utc_{load_type}.{file_format}'
res = res.lower()
# Check if no illegal chars were passed
#test = FileNameStandardConvention(res)
#test.check_naming_convention()
return res
# json_object = [{'leadslocator__Account_ID': None, 'Account_Name': 'Benton (Sample)', 'id': 4776181000000457200}, {'leadslocator__Account_ID': None, 'Account_Name': 'Chanay (Sample)', 'id': 4776181000000457201}, {'leadslocator__Account_ID': None, 'Account_Name': 'Chemel (Sample)', 'id': 4776181000000457202}, {'leadslocator__Account_ID': None, 'Account_Name': 'Feltz Printing Service (Sample)', 'id': 4776181000000457203}, {'leadslocator__Account_ID': None, 'Account_Name': 'Printing Dimensions (Sample)', 'id': 4776181000000457204}, {'leadslocator__Account_ID': None, 'Account_Name': 'Chapman (Sample)', 'id': 4776181000000457205}, {'leadslocator__Account_ID': None, 'Account_Name': 'Morlong Associates (Sample)', 'id': 4776181000000457206}, {'leadslocator__Account_ID': None, 'Account_Name': 'Commercial Press (Sample)', 'id': 4776181000000457207}, {'leadslocator__Account_ID': None, 'Account_Name': 'Truhlar And Truhlar (Sample)', 'id': 4776181000000457208}, {'leadslocator__Account_ID': None, 'Account_Name': 'King (Sample)', 'id': 4776181000000457209}, {'leadslocator__Account_ID': None, 'Account_Name': 'test', 'id': 4776181000000638027}]
#
# filename = generate_raw_filename(source_name='zoho', table_name='accounts', environment='testenv',seq_number=1,load_type='dl', file_format='json', upload_time=datetime.now())
#
# bucket_name = 'zoho-api-test-fuat'
def write_to_s3(self, json_object, filename):
s3object = self.s3.Object(self.bucket_name, filename)
print(f'Writing file to s3://{self.bucket_name}/{filename}')
print(f'Json Object {json_object}')
print(f'Json Object {json.dumps(json_object)}')
self.s3.Object(self.bucket_name, filename).put(Body=json_object)
# s3object.put(
# #Body=(bytes(json.dumps(json_object).encode('UTF-8-SIG')))
# Body = (bytes(json.dumps(json_object).encode('UTF-8-SIG')))
# )
# write_to_s3(json_object, filename)
# for bucket in s3.buckets.all():
# print(bucket) |
19,530 | 077b02104775c37a28f181b70fd57c78d6adf24b | from django.views.generic.list import ListView
from rc.resources.apps.officers.models import *
from rc.resources.views import ResourceItemListView
class OfficerList(ResourceItemListView):
allow_empty = True
model = CampusSustainabilityOfficer
queryset = CampusSustainabilityOfficer.objects.order_by('organization__picklist_name') |
19,531 | 42bbbb49cd81fc063afdac1349f7dcdc9600ce1d | import os
import sys
import subprocess
from models.diagnostics import diagnostic_wrapper as dw
from models.core import prediction_run as pr
from models.core import independent_run as ir
from models.core import dependent_run as dr
from models.parser import parameter_parser_factory as ppf
class ModelRun(object):
def __init__(self, parameter_file):
self.parser = ppf.get_parameter_parser(parameter_file)
self.parameter_file = parameter_file
def run(self):
os.chdir(self.parser.model_directory)
cmd = 'gnnrun ' + self.parameter_file
subprocess.call(cmd)
def post_process(self):
from models.core import post_process_wrapper as ppw
ppw.main(self.parser)
def run_validation(self, run_accuracy_diagnostics=True,
run_outlier_diagnostics=True):
# Create a PredictionRun object
prediction_run = pr.PredictionRun(self.parser)
# Run the PredictionRun to create the neighbor/distance information
prediction_run.calculate_neighbors_cross_validation()
# Create an IndependentRun object
independent_run = ir.IndependentRun(prediction_run)
# Create the independent predicted data and zonal pixel file
independent_run.create_predictions('LOC_ID')
# Create a DependentRun object
dependent_run = dr.DependentRun(prediction_run)
# Create the dependent predicted data, zonal pixel file and
# nn index file
dependent_run.create_predictions()
# If either type of diagnostic is requested, create the wrapper
if run_accuracy_diagnostics or run_outlier_diagnostics:
diagnostic_wrapper = dw.DiagnosticWrapper(self.parser)
# Run the accuracy diagnostics if requested
if run_accuracy_diagnostics:
diagnostic_wrapper.run_accuracy_diagnostics()
# Run the outlier diagnostics if present
if run_outlier_diagnostics:
diagnostic_wrapper.run_outlier_diagnostics()
if self.parser.parameter_set == 'FULL':
diagnostic_wrapper.load_outliers()
def main():
try:
# model parameter file
parameter_file = sys.argv[1]
# flag for running GNN model (0=no, 1=yes)
run_diag = int(sys.argv[2])
# flag for running accuracy diagnostics (0=no, 1=yes)
aa_diag = int(sys.argv[3])
# flag for running outlier diagnostics (0=no, 1=yes)
out_diag = int(sys.argv[4])
except:
print 'model_run.py usage:'
print 'Parameter file: name and location of model input parameter file'
print 'Full spatial model run flag: 0=no, 1=yes'
print 'Accuracy diagnostics flag: 0=no, 1=yes'
print 'Outlier diagnostics flag: 0=no, 1=yes'
else:
m = ModelRun(parameter_file)
if run_diag == 1:
m.run()
m.post_process()
if aa_diag == 1 or out_diag == 1:
m.run_validation(aa_diag, out_diag)
if __name__ == '__main__':
main()
|
19,532 | f04d2ac3b1ca7abd72f961f5ee6ba2870359c937 | import matplotlib.pyplot as plt
import numpy as np
m=10
#szachownica=[[0 for i in range(m)] for j in range(m)]
#matrix=np.array(szachownica)
#data = matrix
data = np.random.random((m, m))
fig, ax = plt.subplots()
# Using matshow here just because it sets the ticks up nicely. imshow is faster.
ax.matshow(data, cmap='seismic')
for (i, j), z in np.ndenumerate(data):
ax.text(j, i, '{:0.1f}'.format(z), ha='center', va='center')
plt.show()
|
19,533 | 68700e0d72ecfc5eb6d942ae9590e7846db42539 | # 不同的数据类型加法会有不同的解释
print(1 + 2)
print("1" + "2")
class Calculator(object):
def __init__(self, num):
self.num = num
# 运算符重载
def __add__(self, other):
return Calculator(self.num + other.num)
# 方法重写
def __str__(self):
return "num = " + str(self.num)
if __name__ == "__main__":
calc1 = Calculator(1)
calc2 = Calculator(2)
# TypeError: unsupported operand type(s) for +: 'Calculator' and 'Calculator'
print(calc1 + calc2)
# 等价于
# print(calc1.__add__(calc2))
print(calc1)
print(calc2)
|
19,534 | a9f7d57090d323caa229230030de306abcfb734e | from autumn.model import Model
from autumn import validators
from autumn.db.relations import ForeignKey
import validators as custom_validators
class content(Model):
#user = ForeignKey('users')
class Meta:
defaults = {'count': 0,
'count_robot': 0,
'favorites': 0,
'shares': 0,
'comments_count': 0,
'comments_updated': None,
'order': 0,
'album': '',
'thumb': '',
'style': '',
'title': '',
'price': 0.00,
'date_start': None,
'date_end': None,
'date_repeats': 0,
'template': '',
'sort_type': '',
'redirect': 0,
'forum': 0,
'hidden': 0,
'favorited': 0,
'is_spam': 0,
'deleted': 0,
'thread': '',
'thread_user': '',
'avatar': '',
'view': '',
'code': '',}
validations = {'section': (custom_validators.UnicodeString(1, 255),
custom_validators.Id()),
'album': (custom_validators.UnicodeString(0, 255),
custom_validators.IdOrNull()),
'name': (custom_validators.UnicodeString(1, 255),
custom_validators.Id()),
'username': (custom_validators.UnicodeString(1, 255),
custom_validators.Alphanumeric()),
'date_created': custom_validators.Datetime(),
'date_updated': custom_validators.Datetime(),
'date_start': custom_validators.DatetimeOrNull(),
'date_end': custom_validators.DatetimeOrNull(),
'date_repeats': validators.Number(),
'count': validators.Number(),
'count_robot': validators.Number(),
'favorites': validators.Number(),
'shares': validators.Number(),
'comments_count': validators.Number(),
'price': validators.Number(),
'hidden': custom_validators.Boolean(),
}
class content_remote(Model):
class Meta:
defaults = {'type': '',
'from_user': '',
'local_content_name': '',
'username': '',
'creator': '',
'title': '',
'post_id': '',
'read': '0',
'is_spam': 0,
'favorited': 0,
'deleted': 0,
'date_updated': None,
'comments_count': 0,
'comments_updated': None,
'thread': '',
'avatar': '',
'link': '',
'view': '',}
validations = {'to_username': (custom_validators.UnicodeString(1, 255),
custom_validators.Alphanumeric()),
'type': validators.String(),
'date_created': custom_validators.Datetime(),
'date_updated': custom_validators.DatetimeOrNull(),
'comments_count': validators.Number(),
'comments_updated': custom_validators.DatetimeOrNull(),
}
"""
class content_access(Model):
content = ForeignKey('content')
user = ForeignKey('users')
class Meta:
defaults = {'has_access': 1}
validations = {'content': validators.Number(),
'user': validators.Number(),
'has_access': validators.Number(),
}
class resource_access(Model):
user = ForeignKey('users')
class Meta:
defaults = {'has_access': 1}
validations = {'url': custom_validators.Path(),
'user': validators.Number(),
'has_access': validators.Number(),
}
"""
class users(Model):
class Meta:
defaults = {'title': '',
'description': '',
'hostname': '',
'name': '',
'google_analytics': '',
'license': '',
'tipjar': '',
'sidebar_ad': '',
'currency': '',
'newsletter_endpoint': '',
'favicon': '',
'theme': '',
'logo': '',
'magic_key': '',
'extra_head_html': '',
'extra_body_end_html': '',
'theme_title': '',
'theme_link': '',
'theme_author': '',
'theme_author_link': '',
'adult_content': 0,
'twitter': '',
'facebook': '',
'google': '',
'tumblr': '',}
validations = {'username': (custom_validators.UnicodeString(1, 255),
custom_validators.Alphanumeric()),
'name': custom_validators.UnicodeString(0, 255),
'email': validators.String(1, 255),
'author': custom_validators.Boolean(),
'superuser': custom_validators.Boolean(),
'title': custom_validators.UnicodeString(0, 255),
'adult_content': custom_validators.Boolean(),
'magic_key': validators.String(),
'private_key': validators.String(),
}
class users_remote(Model):
class Meta:
defaults = {'username': '',
'name': '',
'order': 0,
'magic_key': '',
'avatar': '',
'favicon': '',
'salmon_url': '',
'webmention_url': '',
'hub_url': '',
'feed_url': '',
'sort_type': '',
'follower': 0,
'following': 0, }
validations = {'local_username': (custom_validators.UnicodeString(1, 255),
custom_validators.Alphanumeric()),
'username': custom_validators.UnicodeString(0, 255),
'name': custom_validators.UnicodeString(0, 255),
'profile_url': validators.String(),
'follower': custom_validators.Boolean(),
'following': custom_validators.Boolean(),
}
|
19,535 | d118318414f61207623f46c7730cf124336b9a90 | import logging
import os
from logging.handlers import SMTPHandler, RotatingFileHandler
from flask import Flask, render_template, request
from flask_login import current_user
from flask_sqlalchemy import get_debug_queries
from flask_wtf.csrf import CSRFError
from flaskipa.blueprints.auth import auth_bp
from flaskipa.extensions import bootstrap, db, login_manager, csrf, moment
from flaskipa.models import User
from flaskipa.settings import config
basedir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def create_app(config_name=None):
if config_name is None:
config_name = os.getenv('FLASK_CONFIG', 'development')
app = Flask('flaskipa')
app.config.from_object(config[config_name])
register_logging(app)
register_extensions(app)
register_blueprints(app)
register_errors(app)
return app
def register_logging(app):
class RequestFormatter(logging.Formatter):
def format(self, record):
record.url = request.url
record.remote_addr = request.remote_addr
return super(RequestFormatter, self).format(record)
request_formatter = RequestFormatter(
'[%(asctime)s] %(remote_addr)s requested %(url)s\n'
'%(levelname)s in %(module)s: %(message)s'
)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
file_handler = RotatingFileHandler(os.path.join(basedir, 'logs/flaskipa.log'),
maxBytes=10 * 1024 * 1024, backupCount=10)
file_handler.setFormatter(formatter)
file_handler.setLevel(logging.INFO)
if not app.debug:
app.logger.addHandler(file_handler)
def register_extensions(app):
bootstrap.init_app(app)
db.init_app(app)
login_manager.init_app(app)
csrf.init_app(app)
moment.init_app(app)
def register_blueprints(app):
app.register_blueprint(auth_bp)
def register_errors(app):
@app.errorhandler(400)
def bad_request(e):
return render_template('errors/400.html'), 400
@app.errorhandler(404)
def page_not_found(e):
return render_template('errors/404.html'), 404
@app.errorhandler(500)
def internal_server_error(e):
return render_template('errors/500.html'), 500
@app.errorhandler(CSRFError)
def handle_csrf_error(e):
return render_template('errors/400.html', description=e.description), 400
|
19,536 | 33728948532ec848304b3e99228d7e64739467cf | from typing import List
from uuid import UUID
from fastapi import APIRouter, Depends, Form, Path, Request
import publishing_platform.users.service as users_service
from publishing_platform.auth.service import validate_token_dependency
from publishing_platform.repo.common.common_dto import UpdateRatingFAPI
from publishing_platform.users.dto import *
from fastapi.responses import HTMLResponse
from publishing_platform.app import templates
users_router = APIRouter()
__all__ = [
"users_router",
]
@users_router.get("", response_model=List[UserFAPI]) # noqa
async def get_users_all():
return await users_service.get_users_all()
@users_router.post("/create_users", response_model=UserAndTokenFAPI)
async def create_user(add_user_info: AddUserFAPI) -> UserAndTokenFAPI:
return await users_service.create_user(add_user_info)
@users_router.put("/{user_id}", response_model=UserFAPI)
async def update_user(update_info: UpdateUserFAPI, user_id: UUID = Path(...)):
return await users_service.update_user(update_info, user_id)
@users_router.delete("/{user_id}")
async def delete_user(user_id: UUID):
await users_service.delete_user(user_id)
@users_router.get("/{user_id}", response_model=UserFAPI)
async def get_user_by_id(user_id: UUID):
return await users_service.get_user_by_id(user_id)
|
19,537 | f83617cf119ebb5f708c5381095eae7e026e287e | #-*- coding=utf-8 -*-
import vte
import gtk
class MyTerm():
def __init__(self):
"""初始化一个term对象"""
self.terminal=vte.Terminal()
self.terminal.connect("child-exited",lambda term: self.vte_exit())
self.terminal.fork_command()
self.terminal.set_size(10,20)
def vte_exit(self):
"""当terminal退出时,直接重新产生一个terminal,并clear初始化"""
self.terminal.fork_command()
self.terminal.feed_child("clear\n")
def vte_message(self):
pass
|
19,538 | fde20fd7f8a9b26bb3544d326b989b4e178816fc | import os
import unittest
import time
import logging
from WMCore.Configuration import ConfigSection
from WMCore.Alerts.Alert import Alert
from WMCore.Alerts.ZMQ.Sinks.FileSink import FileSink
from WMQuality.TestInit import TestInit
class FileSinkTest(unittest.TestCase):
def setUp(self):
self.testInit = TestInit(__file__)
self.testInit.setLogging(logLevel = logging.DEBUG)
self.testDir = self.testInit.generateWorkDir()
self.config = ConfigSection("file")
self.config.outputfile = os.path.join(self.testDir, "FileSinkTestNew.json")
def tearDown(self):
self.testInit.delWorkDir()
def testFileSinkBasic(self):
sink = FileSink(self.config)
alerts = []
nAlerts = 10
for i in range(nAlerts):
a = Alert(Source = __file__, Level = i, Timestamp = time.time(),
Type = "Test")
alerts.append(a)
sink.send(alerts)
# test by reading back
loadAlerts = sink.load()
self.assertEqual(len(loadAlerts), nAlerts)
# Since FileSink implementation depends on line-separated JSONs of
# Alert instance, test handling new lines in the payload
alerts = []
testMsg = "addtional \n message"
for i in range(10, 20):
a = Alert(Source = __file__, Level = i, Timestamp = time.time(),
Type = "Test", Details = {"message": testMsg})
alerts.append(a)
self.failUnless(os.path.exists(self.config.outputfile))
sink.send(alerts)
# test by reading back
loadAlerts = sink.load()
self.assertEqual(len(loadAlerts), 20)
for a in loadAlerts[10:]:
self.assertEqual(a["Details"]["message"], testMsg)
if __name__ == "__main__":
unittest.main()
|
19,539 | 449646a89a595bf536eb27850d65099b65e5e3d1 | import json
import matplotlib.style as style
import numpy as np
import pandas as pd
import pylab as pl
def make_rows(cngrs_prsn):
"""Output a list of dicitonaries for each JSON object representing a
congressperson.
Each individaul dictionary will contain information about the congressperson
as well as info about their term.
"""
name = cngrs_prsn["name"]["first"] + " " + cngrs_prsn["name"]["last"]
birthday = cngrs_prsn["bio"].get("birthday", None)
gender = cngrs_prsn["bio"]["gender"]
terms = cngrs_prsn["terms"]
rows = []
for t in terms:
row = {}
row["name"] = name
row["birthday"] = birthday
row["gender"] = gender
row["term_start"] = t["start"]
row["term_end"] = t["end"]
row["term_type"] = t["type"]
row["party"] = t.get("party") # Defaults to None
rows.append(row)
return rows
def load_df_from_files():
"""Create a DataFrame where each row contains information on a
Congressperson's age on December 31st for each year in which he or she is in
office.
"""
with open("legislators-historical.json") as f:
data_old = json.load(f)
with open("legislators-current.json") as f:
data_new = json.load(f)
data = data_old + data_new
rows = []
for person in data:
try:
these_rows = make_rows(person)
except:
print(person)
rows.extend(these_rows)
df = pd.DataFrame(rows)
return df
def clean_df(df):
"""Transform types and filter some data."""
# TODO: get birthdays for people missing birthdays
df = df[~df.birthday.isnull()]
df["birthday"] = pd.to_datetime(df["birthday"])
return df
def expand_df_dates(df):
"""Expand the dataframe so that each row has the age of a Congressperson in a
particular year.
This code based on:
https://stackoverflow.com/questions/43832484/expanding-a-dataframe-based-on-start-and-end-columns-speed
"""
dates = [pd.bdate_range(r[0], r[1], freq="A").to_series()
for r in df[['term_start', 'term_end']].values]
lens = [len(x) for x in dates]
df = pd.DataFrame(
{col:np.repeat(df[col].values, lens) for col in df.columns}
).assign(date=np.concatenate(dates))
return df
def create_df():
"""Create the dataframe of Congresspeople and their birthdays."""
df = load_df_from_files()
df = clean_df(df)
df = expand_df_dates(df)
df["age_at_t"] = ((df["date"] - df["birthday"]) / 365).dt.days # Yeah, this is weird.
return df
# Load that data
df = create_df()
# Limit to when next term ends (as of time of writing, 2019-03-09)
df = df[df["date"] <= "2020-12-31"]
# Set the style
style.use("seaborn-whitegrid")
# Overall average age
df.groupby("date").age_at_t.mean().plot(figsize=(8, 4))
pl.title("Average Age of Congress")
pl.ylabel("Average Age")
pl.xlabel("Date")
pl.tight_layout()
pl.savefig("fig/time_avgage.png")
# Mean and Median
tmp = df.groupby("date").agg({"age_at_t": ["mean", "median"]}).plot()
pl.title("Average and Median Age of Congress")
# Age by Senate vs. House
tmp = (df
.groupby(["date", "term_type"])
.age_at_t
.mean()
.unstack())
tmp.columns = ["House", "Senate"]
tmp.plot(figsize=(8, 4))
pl.title("Average Age of Congress by House")
pl.ylabel("Average Age")
pl.xlabel("Date")
pl.tight_layout()
pl.savefig("fig/time_avgage_byhouse.png")
# Age by Gender
(df
.groupby(["date", "gender"])
.age_at_t
.mean()
.unstack()
.plot(figsize=(8, 4)))
pl.title("Average Age of Congress by Gender")
pl.ylabel("Average Age")
pl.xlabel("Date")
pl.tight_layout()
pl.savefig("fig/time_avgage_bygender.png")
# Min and Max Age
# df[df.age_at_t > 0].groupby(["date"]).agg({"age_at_t": ["max", "min"]}).plot(figsize=(8, 4))
tmp = (df
.groupby(["date"])
.agg({"age_at_t": ["max", "min"]})
.plot(figsize=(8, 4)))
tmp.columns = ["Min", "Max"]
pl.title("Min and Max Age of Congress")
pl.ylabel("Age")
pl.xlabel("Date")
pl.tight_layout()
pl.savefig("fig/time_minmaxage.png")
tmp = (df[df.date >= "1900"]
.groupby(["date"])
.agg({"age_at_t": ["max", "min"]})
.plot(figsize=(8, 4)))
tmp.columns = ["Min", "Max"]
pl.title("Min and Max Age of Congress")
pl.ylabel("Age")
pl.xlabel("Date")
pl.tight_layout()
pl.savefig("fig/time_minmaxage_filtered.png")
# Age by Party
# Yeah this doesn't look very good.
(df
.groupby(["date", "party"])
.age_at_t
.mean()
.unstack()
.plot())
pl.title("Average Age of Congress by Party")
pl.ylabel("Average Age")
pl.xlabel("Date")
pl.tight_layout()
pl.savefig("fig/time_avgage_byparty_all.png")
# Age by Dem v Rep
(df[df.party.isin(["Democrat", "Republican", "Independent"])]
.groupby(["date", "party"])
.age_at_t
.mean()
.unstack()
.plot())
pl.title("Average Age of Congress by (some) Party")
pl.ylabel("Average Age")
pl.xlabel("Date")
pl.tight_layout()
pl.savefig("fig/time_avgage_byparty_some.png")
|
19,540 | e92dd13951e8a26430c573b4509ff532f933e867 | #Question 1
def area():
r = int(input('Enter radius of sphere: '))
area = 4 * 3.14 * r * r
print('Area of the sphere is:', area)
return
area()
#Question 2
def perfect():
for i in range(1, 1001):
sum = 0
for j in range(1, i):
if i % j == 0 and i != j:
sum += j
if sum == i:
print(i)
perfect()
#Question 3
def mul(i):
if i == 1:
return 12
else:
return 12 + mul(i - 1)
for i in range(1, 11):
print('12 *', i, '=', mul(i))
#Question 4
def power(a, b):
if b == 1:
return a
else:
return a * power(a, b-1)
a = int(input('Enter a: '))
b = int(input('Enter b: '))
print(power(a, b))
#Question 5
def fact(num):
if num == 1:
return 1
else:
return num * fact(num - 1)
num = int(input('Enter a no.: '))
dict = {}
dict[num] = fact(num)
print(dict) |
19,541 | 0e46e23c9b0b79841864fbe018be822e0c94ac04 | import json
import ssl
from config.config import Config
from websocket import create_connection, enableTrace
import websocket
from pprint import pprint
import threading
class Websocketclient(object):
def _init__(self):
self.wsclnt = ""
self.wsToRelay = ""
self.connected=False
def on_error(self, ws,error):
print("ERROR: websocketclient: There was an error coomunicating to central server")
##TODO: handler for local access, set flag or something
print(error)
def on_message(self, ws, message):
print("Got message from central server:" + str(message))
print("Will now forward to WiFi relay")
self.wsToRelay.sendMsgToRelay(message)
def connectToHost(self,host=None, port=None):
main_config = Config()
if host is None:
host = main_config.configOpt["sonoff_ws_server"]
if port is None:
port = main_config.configOpt["sonoff_ws_port"]
# Connect to Zio Host
addr = "wss://" + host + ":" + port + "/api/ws"
print( "Connecting to " + addr )
websocket.enableTrace(False)
try:
#self.wsclnt = create_connection(addr, sslopt={"cert_reqs": ssl.CERT_NONE} )
self.wsclnt = websocket.WebSocketApp( addr, on_error = self.on_error ,on_message = self.on_message )
self.connected=True
self.wsclnt.run_forever( sslopt={"cert_reqs": ssl.CERT_NONE} )
print( "Connection should have been established, but now ended")
self.connected=False
##TODO: handler for local access, set flag or something
except Exception as e:
print("ERROR: websocket client: connectToHost: Failed to connect " + str(e))
self.connected=False
def switchRelay(self,state):
self.wsToRelay.switch(state)
def getRelayState(self):
return self.wsToRelay.getRelayState()
def _send_json_cmd(self,str_json_cmd):
try:
print("Trying to send " + str_json_cmd)
self.wsclnt.send(str_json_cmd)
return "SUCC"
except Exception as e:
print("_send_json_cmd : Error occurred while trying to send command, check if "
"connection was established " + str(e))
print("_send_json_cmd : will try to reconnect")
try:
t = threading.Thread(target=self.connectToHost)
t.start()
except Exception as e:
print("_send_json_cmd : Error occurred while trying to reconnect: " + str(e) )
return "SENDFAIL"
## ToDO wait in thread for this: (recv)
# wait for reply as per requirement
#self.wsclnt.settimeout(float(30))
#try:
# print("will wait for reply")
# result = "no answer"
# #result = self.wsclnt.recv()
#except Exception as e:
# print(" _send_json_cmd : Error getting back result, it's possible that the "
# "timeout was reached " + str(e))
# result = "ERROR"
#return result
return "SUCC"
def forwardRequest(self,json_string):
try:
msg_dict = json.loads(json_string)
except:
print(" forwardRequest : Failed to parse json, please check the passed argument")
return "ERROR"
## modify json if needed
#msg_dict["accessKey"] = "test"
try:
jsoncmd = json.dumps(msg_dict)
except:
print(" forwardRequest : Failed to build json")
return "ERROR"
return self._send_json_cmd(jsoncmd)
|
19,542 | e2724d0d69fc79eaa2b8cfd36819df0acbf01e19 | # -*- coding: utf-8 -*-
import os
import shutil
import mock
import sys
import os
from cpenv.resolver import Resolver, ResolveError
from cpenv.models import VirtualEnvironment, Module
from cpenv import platform
from cpenv.utils import rmtree
from nose.tools import raises
from . import data_path
from .utils import make_files, cwd
ENV_TEXT = '''
environment:
UNRESOLVED_PATH: $NOVAR
RESOLVED_PATH: $ENVIRON/resolved
PLATFORM_PATH:
win: environ_win
osx: environ_osx
linux: environ_linux
MULTI_PLATFORM_PATH:
- nonplat
- win:
- $PYVER/wina
- $PYVER/winb
osx:
- $PYVER/osxa
- $PYVER/osxb
linux:
- $PYVER/linuxa
- $PYVER/linuxb
'''
REDIRECT_TEXT = '''testenv testmod'''
def setup_module():
os.environ['CPENV_HOME'] = data_path('home')
files = (
data_path('home', 'testenv', 'environment.yml'),
data_path('home', 'testenv', 'modules', 'testmod', 'module.yml'),
data_path('not_home', 'testenv', 'environment.yml'),
data_path('cached', 'cachedenv', 'environment.yml')
)
make_files(*files, text=ENV_TEXT)
project_path = data_path('not_home', 'project', 'sequence', 'shot')
os.makedirs(project_path)
make_files(data_path('not_home', 'project', '.cpenv'), text=REDIRECT_TEXT)
make_files(os.path.join(project_path, 'shot_file.txt'), text='')
def teardown_module():
rmtree(data_path('home'))
rmtree(data_path('not_home'))
rmtree(data_path('cached'))
def test_resolve_home():
'''Resolve environment in CPENV_HOME'''
r = Resolver('testenv')
r.resolve()
assert r.resolved[0].path == data_path('home', 'testenv')
def test_resolve_relative():
'''Resolve environment from relative path'''
with cwd(data_path('not_home')):
r = Resolver('testenv')
r.resolve()
assert r.resolved[0].path == data_path('not_home', 'testenv')
def test_resolve_absolute():
'''Resolve environment from absolute path'''
with cwd(data_path('not_home')):
r = Resolver(data_path('home', 'testenv'))
r.resolve()
assert r.resolved[0].path == data_path('home', 'testenv')
def test_resolve_cache():
'''Resolve environment from cache'''
cached_env_path = data_path('cached', 'cachedenv')
mock_cache = mock.Mock()
mock_cache.find = mock.Mock(
return_value=VirtualEnvironment(cached_env_path)
)
r = Resolver('cachedenv', cache=mock_cache)
r.resolve()
assert r.resolved[0].path == cached_env_path
def test_resolve_multi_args():
'''Resolve multiple paths'''
r = Resolver('testenv', 'testmod')
r.resolve()
assert isinstance(r.resolved[0], VirtualEnvironment)
assert isinstance(r.resolved[1], Module)
def test_combine_multi_args():
'''Resolve combine multiple paths'''
pyver = str(sys.version[:3])
expected = {
'PATH': [{
'win': data_path('home', 'testenv', 'Scripts'),
'linux': data_path('home', 'testenv', 'bin'),
'osx': data_path('home', 'testenv', 'bin')
}[platform]],
'CPENV_ACTIVE_MODULES': [],
'UNRESOLVED_PATH': '$NOVAR',
'RESOLVED_PATH': data_path('home', 'testenv', 'resolved'),
'PLATFORM_PATH': 'environ_' + platform,
'MULTI_PLATFORM_PATH': [
'nonplat',
pyver + '/' + platform + 'a',
pyver + '/' + platform + 'b',
]
}
r = Resolver('testenv', 'testmod')
r.resolve()
combined = r.combine()
for k in expected.keys():
if isinstance(expected[k], list):
assert expected[k] == combined[k]
continue
assert os.path.normpath(expected[k]) == os.path.normpath(combined[k])
def test_redirect_resolver_from_folder():
'''Resolve environment from folder, parent folder has .cpenv file'''
expected_paths = [
data_path('home', 'testenv'),
data_path('home', 'testenv', 'modules', 'testmod'),
]
r = Resolver(data_path('not_home', 'project', 'sequence', 'shot'))
r.resolve()
assert r.resolved[0].path == expected_paths[0]
assert r.resolved[1].path == expected_paths[1]
def test_redirect_resolver_from_file():
'''Resolve environment from file, parent folder has .cpenv file'''
expected_paths = [
data_path('home', 'testenv'),
data_path('home', 'testenv', 'modules', 'testmod'),
]
r = Resolver(
data_path('not_home', 'project', 'sequence', 'shot', 'shot_file.txt')
)
r.resolve()
assert r.resolved[0].path == expected_paths[0]
assert r.resolved[1].path == expected_paths[1]
@raises(ResolveError)
def test_nonexistant_virtualenv():
'''Raise ResolveError when environment does not exist'''
r = Resolver('does_not_exist')
r.resolve()
@raises(ResolveError)
def test_nonexistant_module():
'''Raise ResolveError when module does not exist'''
r = Resolver('testenv', 'does_not_exist')
r.resolve()
@raises(ResolveError)
def test_multi_module_does_not_exist():
'''Raise ResolveError when a module does not exist'''
r = Resolver('testenv', 'testmod', 'does_not_exist')
r.resolve()
|
19,543 | 59c1a64ab89e4383bd7472afc738b690b72939d0 | import numpy as np
def azel2polar(az, el):
a = az*(np.pi/180)
e = (el+0.1)*(np.pi/180)
x = np.cos(a)*np.sin((np.pi/2)-e)
z = np.cos((np.pi/2)-e)
alpha = np.arccos(x)
delta = np.arccos(z/(np.sin(np.arccos(x))))
alphaindegs = alpha*(180/np.pi)
deltaindegs = delta*(180/np.pi)
print('Alpha=', alphaindegs)
print('Delta=', deltaindegs)
azel2polar(180, 0)
|
19,544 | 7fc344e62f0a14dd1d1a12b4a7e0abdf7eca0dd1 | ans = int(input())
ans += int(input())
ans *= int(input())
print(ans//2) |
19,545 | cc69a8e0bcb5b9632e6d252b0a28492c932f3966 | # Uses python3
import sys, threading
sys.setrecursionlimit(10**7) # max depth of recursion
threading.stack_size(2**27) # new thread will get stack of such size
def tradeoff(a, b):
if a == -1:
a = []
if b == -1:
b = []
result = []
while a and b:
x = a.pop()
y = b.pop()
if x == y:
result.append(x)
result.append(y)
else:
continue
result += a
result += b
return result
def get_majority_element(a):
if len(a) == 1:
return a
m = int(len(a) / 2)
#write your code here
b = get_majority_element(a[:m])
c = get_majority_element(a[m:])
result = tradeoff(b, c)
if not result:
return -1
else:
candidate = result[0]
if a.count(candidate) / len(a) <= 0.5:
return -1
return result
if __name__ == '__main__':
input = sys.stdin.read()
n, *a = list(map(int, input.split()))
if get_majority_element(a) != -1:
print(1)
else:
print(0)
|
19,546 | 177972210321464e708a4a9eeac4def979d6cb54 | from .nikamap import *
|
19,547 | 02724073a830ca808f638b1e875b7c63125f8061 | import random
import math
from collections import Counter
from matplotlib import pyplot as plt
BOY = 'boy'
GIRL = 'girl'
def random_kid():
return random.choice([BOY, GIRL])
def uniform_pdf(x):
"""
Дифференциальная функция равномерного распределения (ДФР)
probability density function
"""
return 1 if x >= 0 and x < 1 else 0
def uniform_cdf(x):
"""
Интегральная функция распределения (ИФР)
cumulative distribution function
Возвращает вероятность того,
что равномерно распределенная случайная величина <=x
"""
if x < 0:
return 0
elif x < 1:
return x
else:
return 1
def normal_pdf(x, mu=0, sigma=1):
"""ДФР нормального распределения"""
sqrt_two_pi = math.sqrt(2 * math.pi)
return (math.exp(-(x - mu) ** 2 / 2 / (sigma * sigma)) / (sqrt_two_pi * sigma))
def normal_cdf(x, mu=0, sigma=1):
return (1 + math.erf((x - mu) / math.sqrt(2) / sigma)) / 2
def inverse_normal_cdf(p, mu=0, sigma=1, tolerance=0.00001):
"""
Обратная ИФР нормального распределения
tolerance - константа точности
Найти приближенную версию, используя двоичный поиск
"""
# если не стандартизировано, стандартизировать и прошкалировать
if mu != 0 or sigma != 1:
return mu + sigma * inverse_normal_cdf(p, tolerance=tolerance)
low_z, low_p = -10.0, 0 # normal_cdf(-10) = (очень близко) к 0
hi_z, hi_p = 10.0, 0 # normal_cdf(10) = (очень близко) к 1
while hi_z - low_z > tolerance:
mid_z = (low_z + hi_z) / 2 # Взять серидину
mid_p = normal_cdf(mid_z)
if mid_p < p:
# Значение серидины всё еще низкое, искать выше его
low_z, low_p = mid_z, mid_p
elif mid_p > p:
# Значение середины всё еще слишком высокое, искать ниже
hi_z, hi_p = mid_z, mid_p
else:
break
return mid_z
def bernully_trial(p):
"""
Независимое испытание Бернулли, в котором имеется всего два
случайных исхода (1 и 0) с постоянной вероятностью
"""
return 1 if random.random() < p else 0
def binominal(n, p):
"""Биноминальное распределение"""
return sum(bernully_trial(p) for _ in range(n))
def make_hist(p, n, num_points, plt=plt):
data = [binominal(n, p) for _ in range(num_points)]
histogram = Counter(data)
# Столбчатая диаграмма, показывающая фактические биноминальные выборки
plt.bar(
[x - 0.4 for x in histogram.keys()],
[v / num_points for v in histogram.values()],
0.8, color='0.75'
)
mu = p * n
sigma = math.sqrt(n * p * (1 - p))
# Линейный график, показывающий нормальное приближение
xs = range(min(data), max(data) + 1)
ys = [normal_cdf(i + 0.5, mu, sigma) - normal_cdf(i - 0.5, mu, sigma) for i in xs]
plt.plot(xs, ys)
plt.title("Биноминальное распределение и его нормальное приближение")
plt.show()
def plot_normal_pdfs(plt=plt):
xs = [x / 10.0 for x in range(-50, 50)]
plt.plot(xs, [normal_pdf(x, sigma=1) for x in xs], '-', label='mu=0, sigma=1')
plt.plot(xs, [normal_pdf(x, sigma=2) for x in xs], '--', label='mu=0, sigma=2')
plt.plot(xs, [normal_pdf(x, sigma=0.5) for x in xs], ':', label='mu=0, sigma=0.5')
plt.plot(xs, [normal_pdf(x, mu=-1) for x in xs], '-.', label='mu=-1, sigma=1')
plt.legend()
plt.show()
def plot_normal_cdfs(plt=plt):
xs = [x / 10.0 for x in range(-50, 50)]
plt.plot(xs, [normal_cdf(x, sigma=1) for x in xs], '-', label='mu=0, sigma=1')
plt.plot(xs, [normal_cdf(x, sigma=2) for x in xs], '--', label='mu=0, sigma=2')
plt.plot(xs, [normal_cdf(x, sigma=0.5) for x in xs], ':', label='mu=0, sigma=0.5')
plt.plot(xs, [normal_cdf(x, mu=-1) for x in xs], '-.', label='mu=-1, sigma=1')
plt.legend(loc=4)
plt.show()
# if __name__ == "__main__":
# pass
# # проверка парадокса мальчика и девочки
# both_girls = 0
# older_girl = 0
# either_girl = 0
# random.seed(0)
# for _ in range(10000):
# younger = random_kid()
# older = random_kid()
# if older == GIRL: # старшая?
# older_girl += 1
# if older == GIRL and younger == GIRL: # обе?
# both_girls += 1
# if older == GIRL or younger == GIRL: # любая из двух?
# either_girl += 1
# print("P(обе | старшая):", both_girls / older_girl)
# print("P(обе | любая):", both_girls / either_girl)
|
19,548 | fdbc4eaa03de445821d75f66eb07b379c8f72fc6 | # encoding:utf-8
# list[(1,4), (5,1), (2,3)],根据每个元组的中的较大值进行排序
# 期待结果 [(2,3), (1,4), (5.1)]
# 要求用sorted和lambda完成
# 级别1: 用Lambda和max
# 级别2:用lambda不用max
# 提示: True * 4 == 4 False *2 == 0
list1 = [(1, 4), (5, 1), (2, 3)]
# method 1
def sort_array(array):
for i in range(0, len(array)-1):
for j in range(0,len(array)-1-i):
if max(array[j]) > max(array[j+1]):
array[j],array[j+1] = array[j+1], array[j]
return array
# print sort_array(list1)
# method 2
def sort_array(array,getmax):
for i in range(0, len(array)-1):
for j in range(0,len(array)-1-i):
if getmax(array[j]) > getmax(array[j+1]):
array[j],array[j+1] = array[j+1], array[j]
return array
# print sort_array(list1,lambda x : max(x))
# method 3
list1 = [(1, 4), (5, 1), (2, 3)]
list2 = [{'name' : 'lf11'}, {'name' : 'lf1'}, {'name' : 'lf3'} ]
def cmp(x, y):
if x > y:
return True
else:
return False
def sort_array(array,getmax,cpm):
array = array[:]
for i in range(0, len(array)-1):
for j in range(0,len(array)-1-i):
if cpm(getmax(array[j]), getmax(array[j+1])):
array[j],array[j+1] = array[j+1], array[j]
return array
list2 = [{'name' : 'if11'}, {'name' : 'of1'}, {'name' : 'lf3'} ]
list2.sort(key=lambda x : x.get('nname'))
print __name__
if __name__ == '__main__':
print sort_array(list1,lambda x : max(x),cmp)
print sort_array(list2,lambda x : x['name'],cmp)
print list2
|
19,549 | 744ab3c6c98f2bcac86ae9e6aeb429f2949de093 | # -*- coding: utf-8 -*-
# -------------------------------------------------------------------------------
# Name: sfp_yandexdns
# Purpose: SpiderFoot plug-in for looking up whether hosts are blocked by
# Yandex DNS.
#
# Author: Steve Micallef <steve@binarypool.com>
#
# Created: 30/05/2018
# Copyright: (c) Steve Micallef 2018
# Licence: MIT
# -------------------------------------------------------------------------------
import dns.resolver
from spiderfoot import SpiderFootEvent, SpiderFootPlugin
class sfp_yandexdns(SpiderFootPlugin):
meta = {
'name': "Yandex DNS",
'summary': "Check if a host would be blocked by Yandex DNS.",
'flags': [],
'useCases': ["Investigate", "Passive"],
'categories': ["Reputation Systems"],
'dataSource': {
'website': "https://yandex.com/",
'model': "FREE_NOAUTH_UNLIMITED",
'references': [
"https://tech.yandex.com/",
"https://dns.yandex.com/advanced/",
],
'favIcon': "https://yastatic.net/iconostasis/_/tToKamh-mh5XlViKpgiJRQgjz1Q.png",
'logo': "https://yastatic.net/iconostasis/_/tToKamh-mh5XlViKpgiJRQgjz1Q.png",
'description': "Yandex.DNS is a free, recursive DNS service. "
"Yandex.DNS' servers are located in Russia, CIS countries, and Western Europe."
'In "Basic" mode, there is no traffic filtering. '
'In "Safe" mode, protection from infected and fraudulent sites is provided. '
'"Family" mode enables protection from dangerous sites and blocks sites with adult content.'
}
}
opts = {
}
optdescs = {
}
results = None
checks = {
"213.180.193.250": "Yandex - Infected",
"93.158.134.250": "Yandex - Adult",
}
def setup(self, sfc, userOpts=dict()):
self.sf = sfc
self.results = self.tempStorage()
for opt in list(userOpts.keys()):
self.opts[opt] = userOpts[opt]
def watchedEvents(self):
return [
"INTERNET_NAME",
"AFFILIATE_INTERNET_NAME",
"CO_HOSTED_SITE"
]
def producedEvents(self):
return [
"BLACKLISTED_INTERNET_NAME",
"BLACKLISTED_AFFILIATE_INTERNET_NAME",
"BLACKLISTED_COHOST",
"MALICIOUS_INTERNET_NAME",
"MALICIOUS_AFFILIATE_INTERNET_NAME",
"MALICIOUS_COHOST",
]
# Query Yandex DNS "family" servers
def queryAddr(self, qaddr):
if not qaddr:
return None
res = dns.resolver.Resolver()
res.nameservers = ["77.88.8.7", "77.88.8.3"]
try:
return res.resolve(qaddr)
except Exception:
self.debug(f"Unable to resolve {qaddr}")
return None
def handleEvent(self, event):
eventName = event.eventType
eventData = event.data
self.debug(f"Received event, {eventName}, from {event.module}")
if eventData in self.results:
return
self.results[eventData] = True
if eventName == "INTERNET_NAME":
malicious_type = "MALICIOUS_INTERNET_NAME"
blacklist_type = "BLACKLISTED_INTERNET_NAME"
elif eventName == "AFFILIATE_INTERNET_NAME":
malicious_type = "MALICIOUS_AFFILIATE_INTERNET_NAME"
blacklist_type = "BLACKLISTED_AFFILIATE_INTERNET_NAME"
elif eventName == "CO_HOSTED_SITE":
malicious_type = "MALICIOUS_COHOST"
blacklist_type = "BLACKLISTED_COHOST"
else:
self.debug(f"Unexpected event type {eventName}, skipping")
return
res = self.queryAddr(eventData)
if not res:
return
self.debug(f"{eventData} found in Yandex Blocklist: {res}")
for result in res:
k = str(result)
if k not in self.checks:
continue
evt = SpiderFootEvent(blacklist_type, f"{self.checks[k]} [{eventData}]", self.__name__, event)
self.notifyListeners(evt)
if k == '213.180.193.250':
evt = SpiderFootEvent(malicious_type, f"{self.checks[k]} [{eventData}]", self.__name__, event)
self.notifyListeners(evt)
# End of sfp_yandexdns class
|
19,550 | 6c33a3562d4f8ed54486469df09b6b8d709638ce | import tensorflow as tf
import os
import time
from utils import *
import numpy as np
cat_list = [i for i in os.listdir("./data/category_h5py") if os.path.isdir(os.path.join("./data/category_h5py", i))]
NUM_PTS = 4096
if not os.path.exists("test_results"):
os.mkdir("test_results")
test_time = str(time.strftime('%Y_%m_%d_%H_%M', time.localtime(time.time())))
test_dir = os.path.join("test_results", test_time)
if not os.path.exists(test_dir):
os.mkdir(test_dir)
for cat in cat_list:
cat_dir = os.path.join(test_dir, cat)
cat_name = cat.split("_")[-1].split(",")[0]
if not os.path.exists(cat_dir):
os.mkdir(cat_dir)
test_data, test_ndata, test_color = load_single_cat_h5(cat, NUM_PTS,"test","data", "ndata", "color")
nb_samples = test_data.shape[0]
modelPath = "./train_results/2018_07_10_16_27/{}/model/".format(cat)
model_id = 180
graph_file = os.path.join(modelPath, "model-" + str(model_id) + ".meta")
variable_file = os.path.join(modelPath, "model-" + str(model_id))
GAN_graph=tf.Graph()
LOG_FOUT = open(os.path.join(cat_dir, 'log_test.txt'), 'w')
def log_string(out_str):
LOG_FOUT.write(out_str+'\n')
LOG_FOUT.flush()
print(out_str)
log_string(graph_file)
with tf.Session() as sess:
try:
saver = tf.train.import_meta_graph(graph_file)
saver.restore(sess, variable_file)
except:
continue
fake_pts = tf.get_default_graph().get_tensor_by_name("generator/Tanh:0")
input_pt = tf.get_default_graph().get_tensor_by_name("real_pts_color_ph:0")
batch_size = int(input_pt.get_shape()[0])
bn_is_train = tf.get_default_graph().get_tensor_by_name("bn_is_train:0")
total_batch = test_data.shape[0] // batch_size
for i in range(total_batch):
start_idx = batch_size * i
end_idx = batch_size * (i+1)
batch_test_ndata = test_ndata[start_idx:end_idx]
batch_test_data = test_data[start_idx:end_idx]
batch_test_color = test_color[start_idx:end_idx]
batch_test_ndata_color = np.concatenate([batch_test_ndata, batch_test_color], axis=-1)
fake_colored_pts = sess.run(fake_pts, feed_dict={input_pt: batch_test_ndata_color,
bn_is_train: False})
fake_colored_pts = np.squeeze(fake_colored_pts)
test_fake_color256 = ((fake_colored_pts + 127.5) * 127.5).astype(np.int16) # (batch_size, N, 3)
# show_id = 2
for j in range(batch_size):
fname_GT = os.path.join(cat_dir, "test_chair_GT_{}.png".format(i * batch_size + j))
fname_input = os.path.join(cat_dir, "test_chair_input_{}.png".format(i * batch_size + j))
fname_gen = os.path.join(cat_dir, "test_chair_gen_{}.png".format(i * batch_size + j))
display_point(batch_test_data[j], test_fake_color256[j], fname=fname_gen)
display_point(batch_test_data[j], batch_test_color[j], fname=fname_GT)
display_point(batch_test_data[j], 127*np.ones_like(fake_colored_pts[j]), fname=fname_input)
fout = os.path.join(cat_dir, "test_{0}_{1}.png".format(cat_name, i * batch_size + j))
try:
horizontal_concatnate_pic(fout, fname_input, fname_GT, fname_gen)
except:
continue
os.remove(fname_GT)
os.remove(fname_gen)
os.remove(fname_input)
LOG_FOUT.close()
|
19,551 | 967245f0f620eceff063aeafceb6df478fd64f1a | from .serializers_base import ContactSerializerBase
from locations_api.serializers_base import LocationSerializerBase
from companies_api.serializers_base import CompanySerializerWithHeadquarters
class ContactSerializer(ContactSerializerBase): # serializers.ModelSerializer just tells django to convert sql to JSON
home = LocationSerializerBase()
company_set = CompanySerializerWithHeadquarters(many=True)
class Meta(ContactSerializerBase.Meta):
fields = ContactSerializerBase.Meta.fields + ('home', 'company_set')
|
19,552 | 1525971fae8c58426fc03fa69fd2d509c11999e4 | is_male = True
is_tall = False
if is_male and is_tall:
print("You are a male or tall")
elif is_male and not is_tall:
print("You are male but not tall")
else:
print("You are not male nor tall") |
19,553 | 838b5433744086ccdf7ad8c8066394f939eac466 | # Reading the data from input.txt
contents = ""
with open('input.txt', encoding="utf-8") as f:
for line in f.readlines():
contents += line
f.close()
# Tokenization
from nltk.tokenize import word_tokenize
tokenized_words = word_tokenize(contents)
# Lemmatization
from nltk.stem import WordNetLemmatizer
wordnet_lemmatizer = WordNetLemmatizer()
lemmatized_words = [wordnet_lemmatizer.lemmatize(w) for w in tokenized_words]
print("Lemmatized words\n")
print(lemmatized_words)
print("\n")
# Bigrams
from nltk.util import ngrams
bigrams = list(ngrams(tokenized_words, 2))
print("Bigrams\n")
print(bigrams)
print("\n")
# Top 5 Bigrams
import nltk
fdist = nltk.FreqDist(bigrams)
top_5 = fdist.most_common(5)
print("Top 5 bigrams \n ")
print(top_5)
print("\n")
# lines with the top 5 bigrams
summary = ''
for bigram in top_5:
x = bigram[0][0]
y = bigram[0][1]
with open('input.txt', encoding="utf-8") as f:
for line in f.readlines():
words = line.strip().split() # all words on the line
for word1, word2 in zip(words, words[1:]): # iterate through pairs
if word1 == x and word2 == y:
summary = summary + line
print("Final only bigrams concatenated summary \n")
print(summary)
|
19,554 | 4ff4245a91d20be395dcca0d207f5b14ae1bd714 | # multilevel inheritance
class A:
def display1(self):
print("I am inside A class")
class B(A):
def display2(self):
# display1()
print("I am inside B class")
class C(B):
def display3(self):
# display1()
# display2()
super().display1()
super().display2()
print("I am inside C class")
ob1 = C()
ob1.display3()
# multiple inheritance
class A:
def display(self):
print("I am inside A class")
class B:
def display(self):
# display()
print("I am inside B class")
class C(A, B):
# A -> display()
# B -> display()
def display(self):
print("I am inside C class")
ob1 = C()
ob1.display()
|
19,555 | 69fc300085464add1a1dab9affa6b91cbc9d0867 | from circus import get_arbiter
myprogram = {
"cmd": "python",
"args": "-u dummy_fly.py $(circus.wid)",
"numprocesses": 3,
}
arbiter = get_arbiter([myprogram], debug=True)
try:
arbiter.start()
finally:
arbiter.stop()
|
19,556 | 5288c269d01462c5756f35dd4f0f300c9b93037d | from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render,get_object_or_404,redirect
from django.contrib import messages
from django.db.models import Sum
import json
# from django.views.generic.edit import DeleteView
# from django.urls import reverse_lazy
from django_tables2 import RequestConfig
from .models import Posts,Contratos,Legendas
from .forms import PostForm
from .tables import PostsTable
def post_create(request):
form = PostForm(request.POST or None, request.FILES or None)
if form.is_valid():
instance = form.save(commit=False)
#print form.cleaned_data.get("title")
instance.save()
messages.success(request,"Successfully Created!!")
#return HttpResponseRedirect(instance.get_absolute_url())
return redirect("posts:list")
#if request.method == 'POST':
#print request.POST.get("title")
# title = request.POST.get("title")
#print request.POST.get("content")
# content = request.POST.get("content")
# Posts.objects.create(title=title,content=content)
context = {
"form":form,
}
return render(request,"post_form.html", context)
def post_detail(request, local=None):
queryset = Posts.objects.all().filter(contrato__icontains=local)
query = request.GET.get('mes','')
legenda_nomes = Legendas.objects.all().values_list('nome') #Obter nome das legendas na tabela legendas
title = ('Estruturado por Legenda - Contrato: %s - ' % local)+"Todos os meses"
if query:
try:
queryset = queryset.filter(data__month=query)
title = ('Estruturado por Legenda - Contrato: %s - 0' % local)+str(query)+"/2016"
except:
pass
legendas_valores = {}
for nome in legenda_nomes:
valorSaida = queryset.filter(legenda__icontains=nome[0],tipo__icontains='SAIDA').aggregate(Sum('valor'))
legendas_valores[str(nome[0])] = {}
if valorSaida['valor__sum'] != None:
#contrato_valor_saida.update({str(nome[0]):valorSaida['valor__sum']})
legendas_valores[str(nome[0])]['saida'] = valorSaida['valor__sum']
else:
#contrato_valor_saida.update({str(nome[0]):0})
legendas_valores[str(nome[0])]['saida'] = 0
graph_saida = [['Legenda','Saida']]
for x in legendas_valores.items():
graph_saida.append([x[0],x[1]['saida']])
context = {
"title":title,
"graph_saida": json.dumps(graph_saida),
"legendas_valores": legendas_valores,
}
return render(request,"post_detail.html", context)
def post_list(request):
queryset = Posts.objects.all().order_by('data')
query = request.GET
if query:
try:
queryset = queryset.filter(data__month=request.GET.get('mes',''))
except:
pass
queryset = queryset.filter(contrato__icontains=request.GET.get('contrato',''))
queryset = queryset.filter(legenda__icontains=request.GET.get('servico',''))
queryset = queryset.filter(tipo__icontains=request.GET.get('tipo',''))
queryset = queryset.filter(detalhe__icontains=request.GET.get('detalhe',''))
table = PostsTable(queryset)
RequestConfig(request, paginate=False).configure(table)
context = {
"object_list": queryset,
"table": table,
"mes": request.GET.get('contrato',''),
"contrato": request.GET.get('contrato',''),
"servico": request.GET.get('servico',''),
"tipo": request.GET.get('tipo','')
}
return render(request,"post_list.html", context)
def post_update(request, id=None):
instance = get_object_or_404(Posts,id=id)
form = PostForm(request.POST or None, request.FILES or None, instance=instance)
if form.is_valid():
instance = form.save(commit=False)
instance.save()
messages.success(request,"Item Saved!!")
return redirect("posts:list")
context = {
"instance": instance,
"form": form
}
return render(request,"post_form.html", context)
def post_delete(request, id=None):
instance = get_object_or_404(Posts, id=id)
instance.delete()
messages.success(request,"Item Deleted :(")
return redirect("posts:list")
def dre(request):
queryset = Posts.objects.all()
query = request.GET.get('mes','')
contrato_nomes = Contratos.objects.all().values_list('nome') #Obter nome dos contratos na tabela contratos
contrato_valor_entrada = {} #dict com o nome do contrato + o valor de entrada resultante no periodo
contrato_valor_saida = {} #dict com o nome do contrato + o valor de saida resultante no periodo
title = "DRE - TOTAL"
if query:
try:
queryset = queryset.filter(data__month=query)
title = "DRE - 0"+str(query)+"/2016"
except:
pass
contratos_valores = {}
#Loop para preencher os dicts
for nome in contrato_nomes:
valorEntrada = queryset.filter(contrato__icontains=nome[0],tipo__icontains='ENTRADA').aggregate(Sum('valor'))
valorSaida = queryset.filter(contrato__icontains=nome[0],tipo__icontains='SAIDA').aggregate(Sum('valor'))
contratos_valores[str(nome[0])] = {}
if valorEntrada['valor__sum'] != None:
#contrato_valor_entrada.update({ nome[0]:valorEntrada['valor__sum']})
contratos_valores[str(nome[0])]['entrada'] = valorEntrada['valor__sum']
else:
#contrato_valor_entrada.update({nome[0]:0})
contratos_valores[str(nome[0])]['entrada'] = 0
if valorSaida['valor__sum'] != None:
#contrato_valor_saida.update({str(nome[0]):valorSaida['valor__sum']})
contratos_valores[str(nome[0])]['saida'] = valorSaida['valor__sum']
else:
#contrato_valor_saida.update({str(nome[0]):0})
contratos_valores[str(nome[0])]['saida'] = 0
#Calculcar valores Lucros de cada contrato e valores totais:
total_saida = 0
total_entrada = 0
total_lucro = 0
graph_saida = [['Contrato','Saida']]
graph_entrada = [['Contrato','Entrada']]
graph_lucro = [['Contrato','Lucros']]
for x in contratos_valores.items():
contratos_valores[x[0]]['lucro'] = x[1]['entrada'] - x[1]['saida']
total_lucro = total_lucro + contratos_valores[x[0]]['lucro']
total_saida = total_saida + x[1]['saida']
total_entrada = total_entrada + x[1]['entrada']
graph_saida.append([x[0],x[1]['saida']])
graph_entrada.append([x[0],x[1]['entrada']])
graph_lucro.append([x[0],contratos_valores[x[0]]['lucro']])
#print graph_saida
# dictdata=[
# ['Task', 'Hours per Day'],
# ['Work', 11],
# ['Eat', 2],
# ['Commute', 2],
# ['Watch TV', 2],
# ['Sleep', 7]
# ]
context = {
#"object_list": queryset,
"title": title,
"contratos_valores": contratos_valores,
"total_saida": total_saida,
"total_entrada": total_entrada,
"total_lucro": total_lucro,
"graph_saida": json.dumps(graph_saida),
"graph_entrada": json.dumps(graph_entrada),
"graph_lucro": json.dumps(graph_lucro),
}
return render(request,"dre.html", context)
|
19,557 | 885c06ffd841732c01d1a9f2c0cb87e113f03fbb | """
Given a string s and a string t, check if s is subsequence of t.
A subsequence of a string is a new string which is formed from the original string by deleting some
(can be none) of the characters without disturbing the relative positions of the remaining characters.
(ie, "ace" is a subsequence of "abcde" while "aec" is not).
Follow up:
If there are lots of incoming S, say S1, S2, ... , Sk where k >= 1B,
and you want to check one by one to see if T has its subsequence.
In this scenario, how would you change your code?
"""
# SOLUTION
def isSubsequence(s: str, t: str) -> bool:
for c in s:
i = t.find(c)
if i == -1:
return False
else:
t = t[i+1:]
return True
'''
remainder_of_t = iter(t)
for letter in s:
if letter not in remainder_of_t:
return False
return True
t = iter(t)
return all(c in t for c in s)
'''
# i do not know how to answer the follow up, if anyone could help me it would be great :)
# also the approach here is to use iter to only see through forward iterations
# time space complexity : time = O(s*t) space = O(t)
# added solution complexity: time = O(s+t) space = O(1)
|
19,558 | 6afeaea63ba38be960fe993606595ed8e0561e4d | import requests
from bs4 import BeautifulSoup
url = input()
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
heading = soup.find("h1")
# print(hyperlinks)
soup2 = BeautifulSoup(str(heading), 'html.parser')
heading = soup2.get_text()
print(heading) |
19,559 | fdd34afde986392fd77831875939f45dba21b7f1 | from sharrock.descriptors import Descriptor, UnicodeParam, Resource
version = '1.0'
class GetMe(Descriptor):
"""
Gets a hello world message.
"""
visible = False
def execute(self,request,data,params):
return 'Get Method executed!'
class PostMe(Descriptor):
"""
Posts a hello world message.
"""
visible = False
name = UnicodeParam('name',required=True,description='The name to post.')
def execute(self,request,data,params):
posted_name = params['name']
return 'Posted %s' % posted_name
class PutMe(Descriptor):
"""
Puts a hello world message.
"""
visible = False
name = UnicodeParam('name',required=True,description='The name to put.')
def execute(self,request,data,params):
put_name = params['name']
return 'Put this:%s' % put_name
class DeleteMe(Descriptor):
"""
Deletes it.
"""
visible = False
def execute(self,request,data,params):
return 'Aaaarrrggghhhh! I\'m meeelllltttiiinnnngggg!'
class MeResource(Resource):
"""
A resource that you can get,post,put and delete.
"""
get = GetMe()
post = PostMe()
put = PutMe()
delete = DeleteMe()
class PartialResource(Resource):
"""
A resource with only one method implemented.
"""
get = GetMe()
|
19,560 | 1c9e4e41e7cfd8bbdb69c4dd32e3c056dd9c853b | num=("anu","appu","athira","vava")
# print(num)
# num1=list(num)
# num1[1]="unaiz"
# print(num1)
# num=tuple(num1)
# print("final :",num)
|
19,561 | b2dcbf8665a51fd3aa71b32298df5d283146bb06 | from core.node import Node
def _str__helper(node, depth=0):
prefix = "\n" + "\t" * depth
body = prefix + str(node)
# base case, there are no children:
if not node.children:
return body
# recursive case, there are children:
elif node.children:
for child in node.children:
body += _str__helper(node.children[child], depth=depth + 1)
return body
class Tree:
def __init__(self, root=None, name=None):
if not root:
self._root = Node()
else:
self._root = root
self._name = name
@property
def root(self):
return self._root
@root.setter
def root(self, val):
self._root = val
@property
def name(self):
return self._name
@name.setter
def name(self, val):
self._name = val
def __str__(self):
ret = f"<Tree '{self.name}'>"
ret += _str__helper(node=self.root)
return ret
|
19,562 | 31b0f07db3667f1e68773a722fa75215f086115e | def apple(x):
if int(x)**2 >1000:
return 'It\'s hotter than the sun!!'
else:
return 'Help yourself to a honeycomb Yorkie for the glovebox.'
|
19,563 | 13d7b372c4881d86f42ca7a0aacff64b73a4ca14 | #import json
#import pickle
#def my_serialize(my_list):
# f = open("a.txt", 'w', encoding='utf-8')
# json.dump(my_list, f, ensure_ascii=False)
#f.close()
#def my_deserialize():
# input a list consisted with integer
# try block to handle the exception
#try:
# my_list = []
# while True:
# my_list.append(int(input()))
#except:
#print("Invalid input, please input an integer.")
#------------------------------------
lst = [1, 2, 3, 4, 5]
def serialize(l):
res = ''
for i in l :
res = res + str(i)
res = res + ';'
return res[:-1]
# not work with empty list , find a way to solve it
def deserialize(s) :
lst_of_str = s.split(';')
res = list()
for string in lst_of_str :
res.append(int(string))
return res
# input lst from user
serialized = serialize(lst)
print(serialized)
# write to file
# read from file
lst2 = deserialize(serialized)
print(lst)
print(lst2) |
19,564 | 01b34a8738264d81c570af922ba31b7683cdbccd | # coding: utf-8
import numpy as np
import matplotlib.pylab as plt
def softmax(a):
c = np.max(a)
exp_a = np.exp(a - c) # 溢出对策
sum_exp_a = np.sum(exp_a)
y = exp_a / sum_exp_a
return y
X = np.arange(-20.0, 20.0, 0.5)
Y = softmax(X)
print(X)
print(Y)
plt.plot(X, Y)
plt.ylim(-0.1, 0.5)
plt.show()
|
19,565 | 421211aa61738f70f5dafc336609053feeba985d | broker_url = "redis://222.29.81.198/14"
|
19,566 | afff90d42753a45faedff5cf3bcea286ec019b15 | ___ song(first, last=0
verses ''
___ number __ r..(r..(last, first + 1:
verses += verse(number) + '\n'
r.. verses
___ verse(number
r.. ''.j..([
"%s of beer on the wall, " % _bottles(number).capitalize(),
"%s of beer.\n" % _bottles(number),
_action(number),
_next_bottle(number),
])
___ _action(current_verse
__ current_verse __ 0:
r.. "Go to the store and buy some more, "
____
r.. "Take %s down and pass it around, " % (
"one" __ current_verse > 1 ____ "it"
)
___ _next_bottle(current_verse
r.. "%s of beer on the wall.\n" % _bottles(_next_verse(current_verse
___ _bottles(number
__ number __ 0:
r.. 'no more bottles'
__ number __ 1:
r.. '1 bottle'
____
r.. '_d bottles' % number
___ _next_verse(current_verse
r.. current_verse - 1 __ current_verse > 0 ____ 99
|
19,567 | 1cd565361ce0fbd27bca01738d95d0c95ff83c54 | import datetime
from django.test import TestCase
from django.urls import reverse
from django.utils import timezone
from blog.factories import PostFactory
from blog.models import Post
class PostTestCase(TestCase):
post: Post = None
@classmethod
def setUpTestData(cls):
"""Set up test data."""
cls.post = PostFactory()
def test_post(self):
"""Test stering representation"""
self.assertTrue(isinstance(self.post, Post))
self.assertEqual(f'{self.post.title}', self.post.__str__())
def test_last_viewed(self):
"""Test updating of date when viewed."""
last = self.post.last_viewed
self.post.is_viewed()
now = self.post.last_viewed
self.assertGreater(now, last)
def test_increment(self):
"""Test incrementing view."""
previous_views = self.post.views
self.post.is_viewed()
current_views = self.post.views
self.assertGreater(current_views, previous_views)
def test_cretion_error(self):
"""Test that start publication date after end publication date raises error."""
from django.core.exceptions import ValidationError
with self.assertRaises(ValidationError):
PostFactory(
end_publication=(timezone.now() - datetime.timedelta(days=1)),
start_publication=(timezone.now()),
)
class PostViewTestCase(TestCase):
post: Post = None
@classmethod
def setUpTestData(cls):
"""Set up test data."""
cls.post = PostFactory(
author__first_name='Peter',
author__last_name='Mustermann',
title='My test title',
subtitle='A subtitle for the test post',
views=10,
last_viewed=(timezone.now() - datetime.timedelta(days=1)),
is_active=True,
activation_date=None
)
def test_post_detail_content(self):
"""Test post content presented."""
url = reverse(
'blog:post_detail',
kwargs={'slug': self.post.slug}
)
response = self.client.get(url)
self.assertEqual(200, response.status_code)
self.assertTemplateUsed(response, 'blog/blog_detail.html')
self.assertContains(response, self.post.body)
def test_post_tag_content(self):
"""Test post content presented."""
url = reverse(
'blog:post_tag_list',
kwargs={'slug': self.post.slug}
)
response = self.client.get(url)
self.assertEqual(200, response.status_code)
self.assertTemplateUsed(response, 'blog/blog_index.html')
|
19,568 | 65bd5ceb37a45934206f418debe22b134fedd297 | # coding=UTF-8
from string import Template
import random
from commandbase import BaseCommand
class GlitchCommand( BaseCommand ):
def __init__(self):
BaseCommand.__init__( self )
self.command_mappings = [ "glitch" ]
self.templates = [ Template("pours out a ddddddrrrrrrrdrdrdrdrrriiiiiiiinnnnnnnknknknkkkk"),
Template("starts editing dropbox image links"),
Template("puts on some autechre")
]
def generate( self, name ):
template = random.choice( self.templates )
message_out = template.substitute(name=name)
return "/me %s" % message_out
|
19,569 | 7531ddcdbda63676ab8006e1914ededd02ee8f63 | data = []
count = 0
with open('reviews.txt', 'r') as f:
for line in f:
data.append(line)
count +=1
if count % 1000 == 0:
print(len(data))
print('檔案讀取完了,總共有', len(data), '筆資料')
sum_len = 0
for d in data:
sum_len += len(d)
print('全部留言平均長度為', sum_len / len(data))
new = []
for d in data:
if len(d) < 100:
new.append(d)
print('一共有', len(new), '比留言長度小於100')
print(new[0])
print(new[23])
# print(data[0])
# print('-'*30)
# print(data[1]) |
19,570 | b4d5500ebf3dbc60cafa50538cdeec1d4eeac5ff | #Django
from django.db import models
from django.utils import timezone
class Facturacion(models.Model):
'''Modelo para generacion de facturas.'''
detalle = models.CharField(max_length=30, blank=True, null=False)
descripcion = models.CharField(max_length=60, blank=True, null=False, default='')
forma_pago = models.CharField(max_length=15, null=False)
fecha_emision = models.DateField(null=False)
fecha_vencimiento = models.DateField(null=False)
monto_total = models.FloatField(null=False)
monto_facturacion = models.FloatField(null=False)
saldo_facturacion = models.FloatField(null=False)
estado = models.CharField(max_length=10)
def __str__(self):
return self.detalle
class Gasto(models.Model):
'''Modelo que registra los gastos realizados por los empleados durante
la realizacion de un proyecto'''
VIATICOS = 'VIATICOS'
COMBUSTIBLE = 'COMBUSTIBLE'
LOGISTICA = 'LOGISTICA'
HONORARIOS = 'HONORARIOS'
ALQUILERES = 'ALQUILERES'
ARANCELES = 'ARANCELES'
OTROS = 'OTROS'
MOTIVOS_CHOICES = [
(VIATICOS, 'Viáticos por viajes'),
(COMBUSTIBLE, 'Reposición de combustible'),
(LOGISTICA, 'Materiales para logística'),
(HONORARIOS, 'Honorarios profesionales'),
(ALQUILERES, 'Alquileres'),
(ARANCELES, 'Aranceles por plataformas'),
(OTROS, 'Otros'),
]
motivo = models.CharField(
max_length=15,
choices=MOTIVOS_CHOICES,
default=OTROS)
detalle = models.CharField(max_length=75, blank=True, null=False, default='')
fecha = models.DateField(null=False, default=timezone.now())
gasto = models.FloatField(null=False, default=0)
empleado = models.ForeignKey('gestion.Empleado', null=False, on_delete=models.CASCADE)
contrato = models.ForeignKey('proyectos.Contrato', null=False, on_delete=models.CASCADE)
def __str__(self):
return self.motivo + ' - ' + self.empleado.nombre + ' ' + self.empleado.apellido + ' - ' + self.contrato.nombre
def cargar_gasto(self, gasto):
self.gasto = self.gasto + gasto
class Pago(models.Model):
'''Modelo para generacion de Pagos a la consultora.'''
detalle = models.CharField(max_length=30, blank=True, null=False)
descripcion = models.CharField(max_length=60, blank=True, null=False, default='')
monto = models.FloatField(null=False)
nro_cuota = models.IntegerField()
fecha = models.DateField(null=False)
saldo = models.FloatField(null=False)
estado = models.CharField(max_length=10)
def __str__(self):
return self.detalle |
19,571 | 8be9f610196cf3f2dd6630bad3836c2423baaa42 | import pytest
from coordinator.api.models import ReleaseNote
from coordinator.api.factories.release import ReleaseFactory
from coordinator.api.factories.release_note import ReleaseNoteFactory
from coordinator.api.factories.study import StudyFactory
ALL_RELEASE_NOTES = """
query (
$author: String,
$study: ID,
$release: ID,
$createdBefore: Float,
$createdAfter: Float,
$orderBy:String
) {
allReleaseNotes(
author: $author,
study: $study,
release: $release,
createdBefore: $createdBefore,
createdAfter: $createdAfter,
orderBy: $orderBy
) {
edges {
node {
id
kfId
uuid
author
description
createdAt
}
}
}
}
"""
@pytest.mark.parametrize(
"user_type,expected",
[("admin", 30), ("dev", 30), ("user", 20), ("anon", 10)],
)
def test_list_all_permissions(db, test_client, user_type, expected):
"""
ADMIN - Can query all release notes
DEV - Can query all release notes
USER - Can query release notes from published releases and releases that
their studies are in
ANON - Can only query release notes from published release notes
"""
study = StudyFactory(kf_id="SD_00000001")
release_study = ReleaseFactory(state="staging", studies=[study])
release_staged = ReleaseFactory(state="staged")
release_pub = ReleaseFactory(state="published")
release_notes = ReleaseNoteFactory.create_batch(10, release=release_staged)
releases_notes = ReleaseNoteFactory.create_batch(10, release=release_pub)
releases_notes = ReleaseNoteFactory.create_batch(
10, release=release_study, study=study
)
client = test_client(user_type)
resp = client.post("/graphql", data={"query": ALL_RELEASE_NOTES})
# Test that the correct number of release notes are returned
assert len(resp.json()["data"]["allReleaseNotes"]["edges"]) == expected
|
19,572 | 01b65dd0d085fc7316ba958e7e8436dd4fac4ec9 | import heapq
class Solution:
def minMeetingRooms(self, intervals: List[List[int]]) -> int:
if not intervals:
return 0
intervals = sorted(intervals, key=lambda x: x[0])
endtimes = [intervals[0][1]]
ans = 1
for interval in intervals[1:]:
while endtimes and endtimes[0] <= interval[0]:
heapq.heappop(endtimes)
heapq.heappush(endtimes, interval[1])
ans = max(ans, len(endtimes))
return ans
|
19,573 | 6bd549eb474e35da8b2ee808fcacfaaa6117950c | from django.conf.urls.defaults import *
urlpatterns = patterns('',
(r'^$', 'scale.reg6.staff.views.index'),
(r'^checkin/$', 'scale.reg6.staff.views.CheckIn'),
(r'^finish_checkin/$', 'scale.reg6.staff.views.FinishCheckIn'),
(r'^cash_payment/$', 'scale.reg6.staff.views.CashPayment'),
)
|
19,574 | a7a32395698073339816d6287802d63937f4e27f | # Get Binary Representation of PIL Image Without Saving
s = StringIO.StringIO()
window.save(s, "jpg")
encodedWindow = base64.b64encode(s.getvalue())
|
19,575 | a9c1014fa260962ad231ed1f63c0f02c7758d723 | l1 = input ('informe o 1º lado do quadrado:')
l2 = input ('informe o 2º lado do quadrado:')
print('--------------------------------------')
a = int(l1) * int(l2)
da = 2 * a
print('o dobro da área é: ',da)
|
19,576 | b4869442911f63e5dd5f5de1c2123e631830e1a5 | i=1
while i<=10 :
print 3,'x', i,'=', 3*i
i=i+1
print 'done'
|
19,577 | c3aae3ea7fa4fe6c5272db963524919cec448bfe | # 题目:一个 5 位数,判断它是不是回文数。即 12321 是回文数,个位与万位相同,十位与千位相同。 |
19,578 | c67ddb66a4c760453336e1762bb59eef4f33c68c | import sounddevice as sd
import numpy as np
from scipy import floor, ceil
def gaussian(x, mu, sig):
return np.exp(-np.power(x - mu, 2.) / (2 * np.power(sig, 2.)))
fs = 44100
f = 100
duration = 0.03 # secs
t = np.arange(44100 * duration) / fs
var = 80
sin1 = np.sin(2 * np.pi * f * t)
top = 0.2
ran = np.random.uniform(0 - top, top, int(floor(44100 * duration)))
src = sin1 # + ran
sound = src * gaussian(t, 0.05 / fs, 1.0 / fs * var) + ran * gaussian(t, 0.05 / fs, 1.0 / fs * var * 3)
# sd.play(sound, fs)
# plt.plot(t,sound)
# plt.show()
target = 3
rumble = np.zeros([1, target * fs])
i = 0
print(len(rumble[0]))
a = 0
while i < len(rumble[0]) - len(sound):
for sample in sound:
rumble[0, i] += sample
i += 1
if a == 0:
i -= int(ceil(len(sound) / 15))
a = 1
else:
i -= int(ceil(len(sound) / 15))
a = 0
# plt.plot(rumble.flatten())
# plt.show()
sd.play(rumble.flatten())
sd.wait()
sd.play(rumble.flatten(), fs * 1.5)
sd.wait()
sd.play(rumble.flatten(),fs*3)
sd.wait()
|
19,579 | 1fe4d5ae5270bdc4ad7fb3a47e53048278d95c66 | # -*- coding: utf-8 -*-
"""
Policy listing helpers.
:author: Joe Joyce <joe@decafjoe.com>
:copyright: Copyright (c) Joe Joyce and contributors, 2016-2019.
:license: BSD
"""
from safe.cmd.list.util import NONE, nullable, print_detail, print_table, \
sorted_by_name
def get_policy_specs(policy):
"""
Return short summary of secret generation params for ``policy``.
:param safe.model.Policy policy: Policy for which to get specs
:return: Human-friendly description of the policy
:rtype: :class:`str`
"""
if policy.frequency > 0:
if policy.frequency == 1:
change = 'changed every day'
else:
change = 'changed every %i days' % policy.frequency
else:
change = 'never changed'
fmt = '%i chars from %s, %s'
return fmt % (policy.length, policy.generator, change)
def list_policies(policies, verbosity):
"""
Print policy information in ``verbosity`` level of detail.
:param policies: SQLAlchemy query containing policies to be printed
:type policies: :class:`sqlalchemy.orm.query.Query`
:param int verbosity: Must be at least 0, anything past 1 is ignored,
higher means more information
"""
print()
if verbosity < 1:
rows = []
for p in sorted_by_name(policies):
rows.append((p.name, p.generator, p.length, p.frequency))
print_table(('NAME', 'GEN', 'LEN', 'FREQ'), rows)
else:
for policy in sorted_by_name(policies):
chars = NONE
if policy.disallowed_characters:
chars = ''.join(sorted(policy.disallowed_characters))
print_detail(
policy.name, (
('description', nullable(policy.description)),
('specs', get_policy_specs(policy)),
('∅ chars', chars),
),
)
print()
|
19,580 | a4c578761007e6d511c7a798274fb676f3c7314a | from scrapy.spider import BaseSpider
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import HtmlXPathSelector
from crawler.items import CrawlerData
from scrapy.http import Request
from time import sleep
import re
import hashlib
class YeBhiSpider(CrawlSpider):
name = "YeBhiSpider"
allowed_domains = ["yebhi.com"]
start_urls = [
"http://www.yebhi.com"
]
def parse(self, response):
hxs = HtmlXPathSelector(response)
item = CrawlerData()
m = hashlib.md5()
m.update(response.url)
item['identifier'] = m.hexdigest()
item['url'] = response.url
#name: String
item['siteName'] = 'yebhi'
toyield=1
try:
#name: String
item['name'] = hxs.select("//div[@class='product-desc']/text()").extract()[0]
#name: Integer
item['price'] = int(re.sub('\D','',re.sub('\.00','',hxs.select("//span[@class='price-offer']").extract()[0])))
except:
toyield=0
try:
#name: StringList
item['images']=hxs.select("//div[@class='product-thumbnail']/a/img/@src").extract()
except:
item['images']=['wrong']
try:
#name: Integer
item['availability']= str(re.sub('[^ a-zA-Z0-9]','',hxs.select("//div[@class='product-instock']/text()").extract()[0]))
except:
item['availability']= -1
try:
#name: String
item['brand']=hxs.select("//div[@class='middle-content-bg']/div[2]/div[1]/a/img/@src").extract()
except:
item['brand']=' '
try:
#name: String
item['upc']=hxs.select("//div[@class='middle-content-bg']/div[2]/div[1]/div[@class='product-code']/text()").extract()
except:
item['upc']=''
specs={}
upclist=[]
try:
#name:Integer
tables=hxs.select("//div[@id='tabDiv1']")
tablesbooks=hxs.select("//table[@class='fk-specs-type1']")
tableothers=hxs.select("//div[@id='tabDiv1']")
tables.extend(tablesbooks)
if len(tables)==0:
tables.extend(tableothers)
for table in tables:
rows=table.select("tr")
for row in rows:
try:
key=row.select("//div[@id='tabDiv1']//td[1]/text()").extract()[0].strip()
except:
try:
key=row.select("//div[@id='tabDiv1']//td[1]/text()").extract()[0].strip()
except:
try:
key=row.select("//div[@id='tabDiv1']//td[1]/text()").extract()[0].strip()
except:
try:
keyas=row.select("td[1]//text()").extract()
for keya in keyas:
if keya.strip()!='':
key=keya
else:
key=''
except:
key=''
try:
val=row.select("//div[@id='tabDiv1']//td[2]//text()").extract()[0]
except:
val=row.select("td[2]//text()").extract()[0]
val=''
key=re.sub(":","",key)
if key!='' and val!='':
specs[key]=val
if key=="ISBN":
item['upc']=val
if "ISBN" in key:
upclist.extend(val.split(','))
item['upclist']=upclist
item['specs']=specs
except:
item['specs']=specs
item['shippingCost'] = 'Free'
item['barcode'] = item['upc']
item['productID'] = item['identifier']
item['siteLogo'] ='/template/yebhi/images/yebhi.com_logo.jpg'
item['siteID'] = 'yebhi'
item['supportEMIInstallment'] = False #if total order is more then 4000
item['supportCashOnDelivery'] = True #update this to check if cash on delivery is available or not for that particular product - cash on delivery is not available for too costly products
item['supportReplacement'] = '30days'
item['cities']=[]
if toyield:
item['name'] = re.sub('[^ a-zA-Z0-9]',' ',item['name']).strip()
yield item
for url in hxs.select('//a/@href').extract():
try:
yield Request(self.start_urls[0]+url, callback=self.parse)
except:
print "Unexpected error:"
for url in hxs.select('//a/@href').extract():
try:
yield Request(url, callback=self.parse)
except:
print "Unexpected error:"
sleep(2) |
19,581 | 5c10ca960e324e4945b22ea739cceda5909a28d5 | from slt.theme.tests.base import IntegrationTestCase
from slt.theme.browser.view import BaseView
import mock
class BaseViewTestCase(IntegrationTestCase):
"""TestCase for BaseView"""
def test_subclass(self):
from collective.base.view import BaseFormView
self.assertTrue(issubclass(BaseView, BaseFormView))
def test___call__(self):
instance = self.create_view(BaseView)
instance.template = mock.Mock()
self.assertEqual(instance(), instance.template())
def test_title(self):
instance = self.create_view(BaseView)
instance.context.Title = mock.Mock(return_value='TITLE')
self.assertEqual(instance.title(), 'TITLE')
def test_description(self):
instance = self.create_view(BaseView)
instance.context.Description = mock.Mock(return_value='DESCRIPTION')
self.assertEqual(instance.description(), 'DESCRIPTION')
|
19,582 | ee2b222fd706debffd9a565354e455ee5a45c285 | # # These functions need to be implemented
# user = "secret"
# password = "noPow3r"
# endpoint = "bootcamp-tht.sre.wize.mx"
# DB = "bootcamp_tht"
import json
from logging import RootLogger
import jwt
from jwt import algorithms
import hashlib
import mysql.connector
from flask import Flask, render_template
from cryptography.hazmat.primitives import serialization
# read and load the key
def validation():
mydb = mysql.connector.connect(
host="bootcamp-tht.sre.wize.mx",
user="secret",
password="noPow3r",
database="bootcamp_tht"
)
mycursor = mydb.cursor()
mycursor.execute("SELECT * FROM users")
myresult = mycursor.fetchall()
print(type(myresult))
for row in myresult:
if row[0] == "admin":
role = row[3]
salt = row [2]
passw = row[1]
usern = row [0]
print("this is role",role)
print("this is salt",salt)
print("this is pass",passw)
print("this is userame",usern)
break
else:
continue
#to get 512 hash from passw give from user
salty = str(salt)
usr_pass = "secret"
string = (usr_pass+salty)
salt_f = hashlib.sha512(string.encode()).hexdigest()
print(len(salt_f))
print(len(salt))
if salt_f == passw:
print("yes")
payload = {"role": role}
return payload
test = validation()
payload_data = test
secret = "my2w7wjd7yXF64FIADfJxNs1oupTGAuW"
class Token:
def generate_token(self, username, password):
token_jwt = jwt.encode(payload_data,secret,algorithm="HS256").decode('utf-8')
return token_jwt
class Restricted:
def access_data(self, authorization):
return 'You are under protected data.'
|
19,583 | 5a2f7b0e644fa70c0f3259c03a487a84fdba6728 | """
Module contains two classes, Vertex, and Edge, which presents two side classes for graph
"""
__author__ = 'Acko'
class Vertex(object):
""" Class which represents one Vertex in Graph
It contains two static final attributes (used as parameters in some methods)
_key - which is something that declares one Vertex
_data - additional data which can be bound to Vertex instance
_directed - (bool) flag which describes if graph is directed or not
and two lists _outgoing and _incoming, which are two different lists if graph is
directed, otherwise are two reference to same list. They contain list of all edges
with which current Vertex is bound.
"""
# CONSTANTS
OUTGOING, INCOMING = 1, 2
__slots__ = ['_key', '_data', '_directed', '_outgoing', '_incoming']
def __init__(self, key, data=None, directed=False):
""" Constructor, sets initial parameters of Vertex
Args:
key - key which represents one Vertex
data - (initially None) additional data which can be bound to Vertex
directed - (bool, initially False) flag which describes if Vertex is directed
"""
self._key = key
self._data = data
self._directed = directed
self._outgoing = []
self._incoming = [] if directed else self._outgoing
def get_key(self):
""" Getter method for current Vertex key
Return:
key of current Vertex instance
"""
return self._key
def has_data(self):
""" Method which determines if any additional data is bound to Vertex
Return:
True if any data is bound to current Vertex, False otherwise
"""
return self._data is not None
def get_data(self):
""" Getter method for data bound to current Vertex instance
Return:
data (object) which is bound to current Vertex, or None if there is no data bound
"""
return self._data
def set_data(self, data):
""" Setter method for data field
Args:
data - (object) data which will be bound with (to) Vertex
"""
self._data = data
def is_directed(self):
""" Method with which is determined if current Vertex is directed or not
Return:
True if vertex is directed, False otherwise
"""
return self._directed
def add_link(self, link, where_to=OUTGOING):
""" Method for adding new link between current vertex, and one more (which should be in link).
It can bind it as outgoing link, or as incoming, which only make sense when vertex is directed,
it also checks link if it is valid, and then binds it all together.
Args:
link - (Edge instance) which should connect current vertex, with another one
where_to - (Vertex constant) flag which indicates how link should be added
Raise:
TypeError - if link is not Edge instance
Exception - if parameters doesn't match, or given node is already connected
"""
if not isinstance(link, Edge):
raise TypeError("Link must be edge instance")
if where_to == Vertex.OUTGOING:
if not link.get_start_vertex() is self:
raise Exception("Wrong Link connecting")
if link.get_end_vertex() in self.get_all_connected_nodes():
raise Exception("Already connected")
self._outgoing.append(link)
elif where_to == Vertex.INCOMING:
if not link.get_end_vertex() is self:
raise Exception("Wrong Link connecting")
if link.get_start_vertex() in self.get_all_connected_nodes(where_to=Vertex.INCOMING):
raise Exception("Already connected")
self._incoming.append(link)
def connect_to_node(self, vertex, where_to=OUTGOING):
""" Method which connects current vertex to another vertex passed as parameter
It creates Edge instance (with appropriate parameters (self, vertex)) and then adds it
to current vertex list and another vertex list (also depending on parameter and direction)
Args:
vertex - (Vertex instance) another vertex which should be bound with current one
where_to - (Vertex constant) constant which describes if another vertex should be added
with outgoing link or incoming link
Raise:
TypeError - if vertex is not Vertex instance
"""
if not isinstance(vertex, Vertex):
raise TypeError("Graph vertex can only connect to other Graph vertex")
if where_to == Vertex.OUTGOING:
link = Edge(self, vertex)
self.add_link(link, Vertex.OUTGOING)
vertex.add_link(link, Vertex.INCOMING)
elif where_to == Vertex.INCOMING:
link = Edge(vertex, self)
self.add_link(link, Vertex.INCOMING)
vertex.add_link(link, Vertex.OUTGOING)
def get_number_of_edges(self, where_to=OUTGOING):
""" Method which returns exact number of edges for current Vertex
Args:
where_to - (Vertex constant) describes if is asked for number of outgoing or incoming edges
Return:
Number of edges which current vertex contains
"""
if not self._directed:
return len(self._outgoing)
if where_to == Vertex.OUTGOING:
return len(self._outgoing)
elif where_to == Vertex.INCOMING:
return len(self._incoming)
def get_all_edges(self, where_to=OUTGOING):
""" Method for retrieving list of all edges with which current vertex is bound to other vertexes
Args:
where_to - (Vertex constant) describes if should be returned list of all outgoing or incoming edges
Return:
List of all edges with which current vertex is bound to other vertexes
"""
if where_to == Vertex.OUTGOING:
return self._outgoing
elif where_to == Vertex.INCOMING:
return self._incoming
def get_edge(self, vertex, where_to=OUTGOING):
""" Method for retrieving exact edge which connects current vertex with one passed as parameter, if such exists
Args:
vertex - (Vertex instance) with which link is searched for
where_to - (Vertex constant) describes if link should be searched for in outgoing or incoming links
Return:
Edge instance which binds this two vertexes, if such exists, None if not
"""
edge_list = None
if where_to == Vertex.OUTGOING:
edge_list = self._outgoing
elif where_to == Vertex.INCOMING:
edge_list = self._incoming
for edge in edge_list:
if edge.return_other_side(self) is vertex:
return edge
return None
def get_all_connected_nodes(self, where_to=OUTGOING):
""" Method which returns list of all vertexes which are bound to current one
Args:
where_to - (Vertex constant) describes if should be returned list of all incoming
or outgoing linked vertexes
Return:
list of all vertexes which are bound to current vertex (incoming or outgoing depends on parameter)
"""
list_of_all_nodes = []
if not self._directed or where_to == Vertex.OUTGOING:
for edge in self._outgoing:
list_of_all_nodes.append(edge.return_other_side(self))
elif where_to == Vertex.INCOMING:
for edge in self._incoming:
list_of_all_nodes.append(edge.return_other_side(self))
return list_of_all_nodes
def disconnect_node(self, vertex, true=True):
""" Method for removing node from connected nodes (works both ways)
First it checks if passed vertex is Vertex instance, if not raises error, then
checks if given vertex exists in connected nodes lists, also if it is not raises error.
Then looks for edge which connects two nodes, and when it founds it, saves index and delete edge
from edge list. (If node is directed, it goes through list of incoming, if not just through
outgoing list - because it is same list if node is not directed).
And in the end, calls itself if true parameter is set on True for given vertex (assuming that nodes are
connected by default method, and that both nodes have reference to edge which connects them)
so that connection gets destroyed in both sides
Args:
vertex - (Vertex instance or key) vertex (or key to vertex) which should be disconnected
true - (bool) which describes if disconnection will be both ways
"""
if not isinstance(vertex, Vertex):
for node in self.get_all_connected_nodes(where_to=Vertex.OUTGOING):
if node.get_key() == vertex:
vertex = node
break
if not isinstance(vertex, Vertex):
for node in self.get_all_connected_nodes(where_to=Vertex.INCOMING):
if node.get_key() == vertex:
vertex = node
break
if not isinstance(vertex, Vertex):
raise KeyError("Vertex with given key not found in connected nodes")
else:
if vertex not in self.get_all_connected_nodes(where_to=Vertex.OUTGOING)\
and vertex not in self.get_all_connected_nodes(where_to=Vertex.INCOMING):
raise KeyError("Vertex not found in connected nodes")
out_index = None
for edge in self._outgoing:
if edge.return_other_side(self) is vertex:
out_index = self._outgoing.index(edge)
if out_index is not None:
self._outgoing.pop(out_index)
if self._directed:
in_index = None
for edge in self._incoming:
if edge.return_other_side(self) is vertex:
in_index = self._incoming.index(edge)
if in_index is not None:
self._incoming.pop(in_index)
if true:
try:
vertex.disconnect_node(self)
except KeyError:
return
def __hash__(self):
""" Overriding hash method """
return hash(id(self))
class Edge(object):
""" Class which represents link (bind) between two graph nodes (vertexes)
It contains start_vertex, vertex which is at beginning of link, and end_vertex
which is at end of link. And also it contains one field _data which is place for
additional data which can be bound together with Edge instance
"""
def __init__(self, start_vertex, end_vertex, data=None):
""" Constructor, sets initial attributes of Edge instance
Args:
start_vertex - (Vertex instance) vertex which is placed at beginning of link
end_vertex - (Vertex instance) vertex which is placed at end of link
data - (object) initially None, additional data which could be bound to Edge instance
Raise:
TypeError - if start_vertex or end_vertex does not match format (not Vertex instances)
"""
if not isinstance(start_vertex, Vertex) or not isinstance(end_vertex, Vertex):
raise TypeError("Edge can connect only Vertex instances")
self._start_vertex = start_vertex
self._end_vertex = end_vertex
self._data = data
def get_start_vertex(self):
""" Getter method for start_vertex, returns reference to Vertex instance which is at beginning of link
Return:
Reference to Vertex instance which is placed at beginning of link
"""
return self._start_vertex
def get_end_vertex(self):
""" Getter method for end_vertex, returns reference to Vertex instance which is at end of link
Return:
Reference to Vertex instance which is placed at end of link
"""
return self._end_vertex
def has_data(self):
""" Method for establishing if any additional data is bound to current Edge (link)
Return:
True if any data is bound to Edge, False otherwise
"""
return self._data is not None
def get_data(self):
""" Getter method for data field
Return:
Data (object) which is bound to current Edge, if there is any, None otherwise
"""
return self._data
def set_data(self, data):
""" Setter method for data field
Args:
data - (object) object which will be bound to current Edge as additional data
"""
self._data = data
def return_other_side(self, vertex):
""" Method which returns vertex which is placed on other side from vertex passed as parameter
Args:
vertex - (Vertex instance) whose other side is looked for, must be bound to current Edge
or else TypeError will be raised
Return:
Vertex instance at other side of current edge
Raise:
TypeError - if vertex passed as parameter is not bound to current Edge
"""
if vertex is not self._start_vertex and vertex is not self._end_vertex:
raise TypeError("Wrong call")
if vertex is self._start_vertex:
return self._end_vertex
elif vertex is self._end_vertex:
return self._start_vertex |
19,584 | e7f6d359113483c947997195a492b3e7321734a5 | # Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def increasingBST(self, root):
"""
:type root: TreeNode
:rtype: TreeNode
"""
def ios(root,L):
if root is not None:
ios(root.left,L)
L.append(root.val)
ios(root.right,L)
L=[]
ios(root,L)
newtree=TreeNode(L[0])
dummy=newtree
for i in range(1,len(L)):
newtree.right=TreeNode(L[i])
newtree=newtree.right
return dummy
|
19,585 | 626ee249a3b24f5aa58c45d19647dc6b5a49dae0 | class Solution:
def threeSum(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
res = set()
_nums = {}
# Counter 速度慢
for n in nums:
if n not in _nums:
_nums[n] = 1
else:
_nums[n] = _nums[n] + 1
n_s = list(set(nums))
n_s.sort()
for i, n1 in enumerate(n_s):
_nums[n1] = _nums[n1] - 1
for j, n2 in enumerate(n_s[i:]):
if _nums[n2] == 0: continue
n3 = -(n1+n2)
if n3 >= n2 and _nums.get(n3, 0) > (1 if n2 == n3 else 0):
res.add((n1, n2, n3))
_nums[n1] = _nums[n1] + 1
return list(res) |
19,586 | 23f9592700b64973943e1cbd0835822841f60e00 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-12-21 19:14
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('care_point', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='contract',
name='date_from',
field=models.DateField(blank=True, default='2018-12-21', null=True),
),
migrations.AlterField(
model_name='contract',
name='date_to',
field=models.DateField(blank=True, default='2018-12-21', null=True),
),
migrations.AlterField(
model_name='decision',
name='charge',
field=models.DecimalField(decimal_places=2, max_digits=6, max_length=30, validators=[django.core.validators.DecimalValidator]),
),
migrations.AlterField(
model_name='decision',
name='hours',
field=models.DecimalField(decimal_places=2, max_digits=6, max_length=4, validators=[django.core.validators.DecimalValidator]),
),
migrations.AlterField(
model_name='decision',
name='percent_payment',
field=models.DecimalField(decimal_places=2, max_digits=6, max_length=5, validators=[django.core.validators.DecimalValidator]),
),
migrations.AlterField(
model_name='worksheet',
name='date',
field=models.DateField(blank=True, default='2018-12-21', null=True),
),
]
|
19,587 | 229045a578144e1db566c27d52f6581ebb363368 | #Recommender Systems
#TWO Types:
#1,Content-Based
#2,Collaborative Filtering(CF):Memory Based CF and Model-Based CF
#Recommender Systems
#Import Libraries
import numpy as np
import pandas as pd
#Get the Data
import os
os.getcwd()
os.chdir("C:\\Users\\abhishek.b.jaiswal\\Desktop\\DataScience\\sem 2\\BD 3\\codes")
os.getcwd()
column_names = ['user_id', 'item_id', 'rating', 'timestamp']
df = pd.read_csv('u.data', sep='\t', names=column_names)
print(df.head())
movie_titles = pd.read_csv("Movie_Id_Titles")
movie_titles.head()
#We can merge them together:
df = pd.merge(df,movie_titles,on='item_id')
print(df.head())
#EDA
#Visualization Imports
import matplotlib.pyplot as plt
import seaborn as sns
sns.set_style('white')
#Let's create a ratings dataframe with average rating and number of ratings:
df.groupby('title')['rating'].mean().sort_values(ascending=False).head()
df.groupby('title')['rating'].count().sort_values(ascending=False).head()
#Data Frame
ratings = pd.DataFrame(df.groupby('title')['rating'].mean())
ratings.head()
ratings['num of ratings'] = pd.DataFrame(df.groupby('title')['rating'].count())
ratings.head()
#Plot
plt.figure(figsize=(10,4))
ratings['num of ratings'].hist(bins=70)
plt.figure(figsize=(10,4))
ratings['rating'].hist(bins=70)
sns.jointplot(x='rating',y='num of ratings',data=ratings,alpha=0.5)
#Recommending Similar Movies
moviemat = df.pivot_table(index='user_id',columns='title',values='rating')
moviemat.head()
#Most rated movie:
ratings.sort_values('num of ratings',ascending=False).head(10)
#Let's choose two movies: starwars, a sci-fi movie. And Liar Liar, a comedy.
ratings.head()
#Now let's grab the user ratings for those two movies:
starwars_user_ratings = moviemat['Star Wars (1977)']
liarliar_user_ratings = moviemat['Liar Liar (1997)']
starwars_user_ratings.head()
#We can then use corrwith() method to get correlations between two pandas series:
similar_to_starwars = moviemat.corrwith(starwars_user_ratings)
similar_to_liarliar = moviemat.corrwith(liarliar_user_ratings)
#Let's clean this by removing NaN values and using a DataFrame instead of a series:
corr_starwars = pd.DataFrame(similar_to_starwars,columns=['Correlation'])
corr_starwars.dropna(inplace=True)
corr_starwars.head()
corr_starwars.sort_values('Correlation',ascending=False).head(10)
#Let's fix this by filtering out movies that have less than 100 reviews (this value was chosen based off the histogram from earlier).
corr_starwars = corr_starwars.join(ratings['num of ratings'])
corr_starwars.head()
#Now sort the values and notice how the titles make a lot more sense:
corr_starwars[corr_starwars['num of ratings']>100].sort_values('Correlation',ascending=False).head()
#Now the same for the comedy Liar Liar:
corr_liarliar = pd.DataFrame(similar_to_liarliar,columns=['Correlation'])
corr_liarliar.dropna(inplace=True)
corr_liarliar = corr_liarliar.join(ratings['num of ratings'])
corr_liarliar[corr_liarliar['num of ratings']>100].sort_values('Correlation',ascending=False).head()
|
19,588 | 3059538ff53587ffe5de2c1fb6bac4341d2f4146 | # Generated by Django 2.2.6 on 2019-11-04 00:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("movies", "0005_movie_director"),
]
operations = [
migrations.AlterField(
model_name="movie",
name="duration",
field=models.DurationField(blank=True, null=True),
),
]
|
19,589 | bae2cd370f733331eb4e8964cde06c7714a98cf3 | import reachable_ips
import is_ssh
import schedule
import time
def job():
networks = ["192.168.75.*","192.168.13.*","192.168.10.*","192.168.12.*"]
for network in networks:
ip_list = is_ssh.ssh_ips([network])
f = open("ips_"+network+".txt","w+")
ip_list = str(ip_list).strip('[|]')
f.write(ip_list)
f.close()
return
schedule.every().day.at("16:38").do(job,)
while True:
schedule.run_pending()
time.sleep(60) # wait one minut
|
19,590 | 6599b037e8a251d78acc45af893e67c8e6f38286 | from sys import exit
from random import randint
from textwrap import dedent
"""
“Aliens have invaded a space ship and our hero has to go through a maze of
rooms defeating them so he can escape into an escape pod to the planet
below. The game will be more like a Zork or Adventure type game with text
outputs and funny ways to die. The game will involve an engine that runs a
map full of rooms or scenes. Each room will print its own description when
the player enters it and then tell the engine what room to run next out of the
map."""
class Scene(object):
def enter(self):
pass
class Engine(object):
def __init__(self, scene_map):
self.scene_map = scene_map
pass
def play(self):
current_scene = self.scene_map.opening_scene()
final_scene = self.scene_map.next_scene("finished")
while current_scene != final_scene:
next_scene_name = current_scene.enter()
current_scene = self.scene_map.next_scene(next_scene_name)
current_scene.enter()
# Game loop
pass
class Death(Scene):
# Death This is when the player dies and should be something funny.
def enter(self):
pass
class CentralCorridor(Scene):
# This is the starting point and has a Gothon already
# standing there that the players have to defeat with a joke before continuing.
def enter(self):
print("looking around you see a couple doors and a sign: [Central Corridor] ")
if (TheBridge.power_on == False & TheBridge.alarm_off):
print("It's pitch black in here.")
elif TheBridge.power_on == False:
print("The only light seems to be coming from the alarm system.")
print("Illuminating the room in a red light.")
elif TheBridge.power_on:
print("The floor lights are on.")
print("looking around reveals sections of the ceiling had been on fire.")
print("How long was i out for?")
while True:
print("What will you do?")
choice = input("> ")
if choice == "look left":
print("There is a door with a sign [The Bridge]")
elif choice == "go left":
return 'The_bridge'
elif choice == "go right":
return 'Escape_pod'
elif choice == "look right":
print("There is a door with a sign [Armory]")
else:
print("I got no idea what that means.")
pass
class LaserWeaponArmory(Scene):
"""
Laser Weapon Armory This is where the hero gets a neutron bomb to blow
up the ship before getting to the escape pod. It has a keypad the hero has to
guess the number for """
def enter(self):
pass
class TheBridge(Scene):
""" The Bridge Another battle scene with a Gothon where the hero places the
bomb."""
alarm_off = False
power_on = False
def enter(Self):
print("You enter [The bridge] ")
if TheBridge.power_on == False:
print("All the instruments seem to be off.")
if TheBridge.alarm_off == False:
print("The hellish blaring sound is going off and seems louder here.")
print("The room is illuminated in red from the alarm lights.")
while True:
print("What will you do?")
choice = input("> ")
if choice == "help":
print("look at <object>")
print("look <direction>")
print("use <object>")
elif choice == "go back":
return 'Central_corridor'
elif choice == "look at alarm":
if TheBridge.Alarm_off:
print("The yellow marked box on the wall is blinking in full force.")
print("After carefully opening the box, the panel reads: 2049.")
print("There seems to be a blue switch and red switch marked with symbols.")
else:
print("The room is pitch black now....")
elif choice == "use blue switch":
print("You hear a low rattling noise.")
print("There seems to be a flashing light behind you.")
print("The screens of the bridge seem to be powering on.")
print("After an audible thunk the lights come on.")
elif choice == "use red switch":
print("The siren slows down.")
if TheBridge.power_on:
print("A voice comes on the intercome: Alarm override activated")
print("Crew members are mandated to investigate origins of alarm.")
print("To ensure vessel is still capable of the objective.")
else:
print("Now with the noise off i can finally think.")
print("Although now the alarm lights are off it's really dark here.")
pass
class EscapePod(Scene):
""" Escape Pod Where the hero escapes but only after guessing the right escape
pod. """
def enter(self):
while True:
print("What will you do?")
choice = input("> ")
if choice == "use escape pod":
return('finished')
pass
class Finish(Scene):
def enter(self):
print("Thanks for playing. You won")
exit()
class Map(object):
scenes = {
'Central_corridor' : CentralCorridor(),
'Laser_weapon_armory' : LaserWeaponArmory(),
'The_bridge' : TheBridge(),
'Escape_pod' : EscapePod(),
'Death' : Death(),
'finished' : Finish()
}
def __init__(self, start_scene):
self.start_scene = start_scene
pass
def next_scene(self, scene_name):
val = Map.scenes.get(scene_name)
print(val)
return val
def opening_scene(self):
print("You awake on the ground, there is a red pulsing light.")
print("You get on one knee and notice a painfull pressure in your ears")
print("In response you grab your nose and attempt to pop your eardrums")
print("*POP*")
print("The blaring siren is now hardly ignorable.")
print("\'Where the fuck am i\'")
print("With every rotation of the alarm light you can see a section of the corridor.")
print("In flashing moments you realise your predicament and remember your training: ")
print("In case of emergency escape pod!")
return self.next_scene(self.start_scene)
a_map = Map('Central_corridor')
a_game = Engine(a_map)
a_game.play()
|
19,591 | 7286602e616e00521d721f1f8582fc1d872c3a42 | # -*- coding: cp936 -*-
#filename:abaqus_1.py
####################################定义函数
#####
#####点和向量点乘
def doc(vertice,edge):
axis=[edge[0]/sqrt(edge[0]**2+edge[1]**2),edge[1]/sqrt(edge[0]**2+edge[1]**2)]
doc=vertice[0]*edge[0]+vertice[1]*edge[1]
return doc
#####
#####点到点的距离
def disd(vertice1,vertice2):
dis=sqrt((vertice2[1]-vertice1[1])**2+(vertice2[0]-vertice1[0])**2)
return dis
#####
#####点到直线的距离
def disl(vertice,edgev1,edgev2):
k=(edgev2[1]-edgev1[1])/(edgev2[0]-edgev1[0])
b=edgev1[1]-k*edgev1[0]
dis = abs((vertice[1]-k*vertice[0]-b)/sqrt(k**2+1))
return dis
#####
#####两根纤维是否发生交叉
def oveDownLap(fiber1,fiber2):
vertices1=[[]]
vertices2=[[]]
edge=[[]]
del vertices1[0]
del vertices2[0]
del edge[0]
for i in range(4): #####找出两个矩形八个顶点
vertices1.append([fiber1[1+2*i],fiber1[2+2*i]])
for i in range(4):
vertices2.append([fiber2[1+2*i],fiber2[2+2*i]])
for i in range(2): ######找出两个矩形四条投影边
edge.append([vertices1[i+1][0]-vertices1[i][0],vertices1[i+1][1]-vertices1[i][1]])
for i in range(2):
edge.append([vertices2[i+1][0]-vertices2[i][0],vertices2[i+1][1]-vertices2[i][1]])
non_cross=[]
count = 0
for i in range(4):
docs=[]
for j in range(4):
docs.append(doc(vertices1[j],edge[i]))
max1=max(docs)
min1=min(docs)
del docs
docs=[]
for j in range(4):
docs.append(doc(vertices2[j],edge[i]))
max2=max(docs)
min2=min(docs)
del docs
if max2<(min1-0) or min2>(max1+0):
non_cross.append(1)
count += 1
break
else:
non_cross.append(0)
if max(non_cross)==1:
cross.append(0)
else:
return(1)
if abs(disd([fiber1[1],fiber1[2]],[fiber1[3],fiber1[4]])-f_wid)<f_wid:
x_ShortL1=(fiber1[1]+fiber1[3])/2
x_ShortR1=(fiber1[5]+fiber1[7])/2
y_ShortL1=(fiber1[2]+fiber1[4])/2
y_ShortR1=(fiber1[6]+fiber1[8])/2
elif abs(disd([fiber1[1],fiber1[2]],[fiber1[7],fiber1[8]])-f_wid)<f_wid:
x_ShortL1=(fiber1[1]+fiber1[7])/2
x_ShortR1=(fiber1[3]+fiber1[5])/2
y_ShortL1=(fiber1[2]+fiber1[8])/2
y_ShortR1=(fiber1[4]+fiber1[6])/2
elif abs(disd([fiber1[1],fiber1[2]],[fiber1[5],fiber1[6]])-f_wid)<f_wid:
x_ShortL1=(fiber1[1]+fiber1[5])/2
x_ShortR1=(fiber1[3]+fiber1[7])/2
y_ShortL1=(fiber1[2]+fiber1[6])/2
y_ShortR1=(fiber1[4]+fiber1[8])/2
if abs(disd([fiber2[1],fiber2[2]],[fiber2[3],fiber2[4]])-f_wid)<f_wid:
x_ShortL2=(fiber2[1]+fiber2[3])/2
x_ShortR2=(fiber2[5]+fiber2[7])/2
y_ShortL2=(fiber2[2]+fiber2[4])/2
y_ShortR2=(fiber2[6]+fiber2[8])/2
elif abs(disd([fiber2[1],fiber2[2]],[fiber2[7],fiber2[8]])-f_wid)<f_wid:
x_ShortL2=(fiber2[1]+fiber2[7])/2
x_ShortR2=(fiber2[3]+fiber2[5])/2
y_ShortL2=(fiber2[2]+fiber2[8])/2
y_ShortR2=(fiber2[4]+fiber2[6])/2
elif abs(disd([fiber2[1],fiber2[2]],[fiber2[5],fiber2[6]])-f_wid)<f_wid:
x_ShortL2=(fiber2[1]+fiber2[5])/2
x_ShortR2=(fiber2[3]+fiber2[7])/2
y_ShortL2=(fiber2[2]+fiber2[6])/2
y_ShortR2=(fiber2[4]+fiber2[8])/2
dis1=[disl([x_ShortL1,y_ShortL1],vertices2[0],vertices2[1]),disl([x_ShortL1,y_ShortL1],vertices2[1],vertices2[2]),disl([x_ShortL1,y_ShortL1],vertices2[2],vertices2[3]),disl([x_ShortL1,y_ShortL1],vertices2[0],vertices2[3]),disl([x_ShortR1,y_ShortR1],vertices2[0],vertices2[1]),disl([x_ShortR1,y_ShortR1],vertices2[1],vertices2[2]),disl([x_ShortR1,y_ShortR1],vertices2[2],vertices2[3]),disl([x_ShortR1,y_ShortR1],vertices2[0],vertices2[3])]
dis2=[disl([x_ShortL2,y_ShortL2],vertices1[0],vertices1[1]),disl([x_ShortL2,y_ShortL2],vertices1[1],vertices1[2]),disl([x_ShortL2,y_ShortL2],vertices1[2],vertices1[3]),disl([x_ShortL2,y_ShortL2],vertices1[0],vertices1[3]),disl([x_ShortR2,y_ShortR2],vertices1[0],vertices1[1]),disl([x_ShortR2,y_ShortR2],vertices1[1],vertices1[2]),disl([x_ShortR2,y_ShortR2],vertices1[2],vertices1[3]),disl([x_ShortR2,y_ShortR2],vertices1[0],vertices1[3])]
if min(disd([x_ShortL1,y_ShortL1],[x_ShortL2,y_ShortL2]),disd([x_ShortL1,y_ShortL1],[x_ShortR2,y_ShortR2]),disd([x_ShortR1,y_ShortR1],[x_ShortL2,y_ShortL2]),disd([x_ShortR1,y_ShortR1],[x_ShortR2,y_ShortR2])) < 2*c_rad:
return 1
if (x_ShortL1 > x_ShortL2 and x_ShortL1 < x_ShortR2) or (x_ShortR1 > x_ShortL2 and x_ShortR1 < x_ShortR2):
if min(dis1) < c_rad or min(dis2) < c_rad:
return 1
return 0
f=open('D:\\PythonCode\\bishe\\bones.txt','r')
i=0
for line in f:
i=i+1
print i
data=line.split()
bwidx=float(data[0])#基体宽度
bwidy=float(data[1])#基体长度
f_len=float(data[2])#纤维长度
f_wid=float(data[3])#纤维宽度
f_dis=float(data[4])#纤维最小间距一半
c_rad=float(data[5])#圆头半径
r_rad=float(data[6])#圆角半径
f_num=int(data[7]) #纤维数目
size0=float(data[8])#网格尺寸
job_name=data[9] #工作名称
Pathname=data[10]
####################################调用模块
from random import *
from abaqus import *
from abaqusConstants import *
from caeModules import *
from driverUtils import executeOnCaeStartup
from abaqus import backwardCompatibility
backwardCompatibility.setValues(reportDeprecated=False)
import os
os.chdir(Pathname)
executeOnCaeStartup()
Mdb()
#纤维生成
from math import *
L=bwidx #基体长度
B=bwidy #基体宽度
f_num_new=0 #计入被边界截断的纤维数目
f_len_temp=f_len+f_dis#较粗大纤维长度,用以确保纤维之间距离不能太小
f_wid_temp=f_wid+f_dis#较粗大纤维宽度,用以确保纤维之间距离不能太小
fiber_vertices=[[]] #创建矩阵存储纤维编号和四个顶点坐标
fiber_vertices_temp=[[]] #创建临时矩阵存储较宽的纤维编号和四个顶点坐标,用以确保纤维之间距离不能太小
del fiber_vertices[0]
del fiber_vertices_temp[0]
#####
#####纤维生成
for i in range(1,f_num+1):
print(i)
do=0 #判断纤维是否生成成功
while do==0:
x_m=L*random()
y_m=L*random()
angle=pi*gauss(0,23.4)/180
#angle=pi*(random()-0.5)
cl=sqrt(f_len**2+f_wid**2)
cm=sqrt(f_len_temp**2+f_wid_temp**2)
x_L=x_m-f_len/2*cos(angle)
y_L=y_m-f_len/2*sin(angle)
x_R=x_m+f_len/2*cos(angle)
y_R=y_m+f_len/2*sin(angle)
x_L_temp=x_m-f_len_temp/2*cos(angle)
y_L_temp=y_m-f_len_temp/2*sin(angle)
x_R_temp=x_m+f_len_temp/2*cos(angle)
y_R_temp=y_m+f_len_temp/2*sin(angle)
if angle==0 or angle==pi/2 or angle==-pi/2 or angle==pi/4 or angle==-pi/4:
continue
else:
k_=tan(angle)
x_UpR=x_m-(k_*k_*f_wid/abs(sin(angle))-f_len/abs(cos(angle)))/2/(k_*k_+1)
y_UpR=y_m+k_*(f_len/abs(cos(angle))+f_wid/abs(sin(angle)))/2/(k_*k_+1)
x_UpL=x_m-(k_*k_*f_wid/abs(sin(angle))+f_len/abs(cos(angle)))/2/(k_*k_+1)
y_UpL=y_m-k_*(f_len/abs(cos(angle))-f_wid/abs(sin(angle)))/2/(k_*k_+1)
x_DownR=x_m+(k_*k_*f_wid/abs(sin(angle))+f_len/abs(cos(angle)))/2/(k_*k_+1)
y_DownR=y_m+k_*(f_len/abs(cos(angle))-f_wid/abs(sin(angle)))/2/(k_*k_+1)
x_DownL=x_m+(k_*k_*f_wid/abs(sin(angle))-f_len/abs(cos(angle)))/2/(k_*k_+1)
y_DownL=y_m-k_*(f_len/abs(cos(angle))+f_wid/abs(sin(angle)))/2/(k_*k_+1)
x_UpR_temp=x_m-(k_*k_*f_wid_temp/abs(sin(angle))-f_len_temp/abs(cos(angle)))/2/(k_*k_+1)
y_UpR_temp=y_m+k_*(f_len_temp/abs(cos(angle))+f_wid_temp/abs(sin(angle)))/2/(k_*k_+1)
x_UpL_temp=x_m-(k_*k_*f_wid_temp/abs(sin(angle))+f_len_temp/abs(cos(angle)))/2/(k_*k_+1)
y_UpL_temp=y_m-k_*(f_len_temp/abs(cos(angle))-f_wid_temp/abs(sin(angle)))/2/(k_*k_+1)
x_DownR_temp=x_m+(k_*k_*f_wid_temp/abs(sin(angle))+f_len_temp/abs(cos(angle)))/2/(k_*k_+1)
y_DownR_temp=y_m+k_*(f_len_temp/abs(cos(angle))-f_wid_temp/abs(sin(angle)))/2/(k_*k_+1)
x_DownL_temp=x_m+(k_*k_*f_wid_temp/abs(sin(angle))-f_len_temp/abs(cos(angle)))/2/(k_*k_+1)
y_DownL_temp=y_m-k_*(f_len_temp/abs(cos(angle))+f_wid_temp/abs(sin(angle)))/2/(k_*k_+1)
R=c_rad
if abs(disd([x_UpL,y_UpL],[x_DownL,y_DownL])-f_wid)<0.5*f_wid:
x_LC=(x_UpL+x_DownL)/2
y_LC=(y_UpL+y_DownL)/2
x_RC=(x_UpR+x_DownR)/2
y_RC=(y_UpR+y_DownR)/2
elif abs(disd([x_UpL,y_UpL],[x_DownR,y_DownR])-f_wid)<0.5*f_wid:
x_LC=(x_UpL+x_DownR)/2
y_LC=(y_UpL+y_DownR)/2
x_RC=(x_UpR+x_DownL)/2
y_RC=(y_UpR+y_DownL)/2
elif abs(disd([x_UpL,y_UpL],[x_UpR,y_UpR])-f_wid)<0.5*f_wid:
x_LC=(x_UpL+x_UpR)/2
y_LC=(y_UpL+y_UpR)/2
x_RC=(x_DownR+x_DownL)/2
y_RC=(y_DownR+y_DownL)/2
if (min(x_UpL,x_DownL)<=0 and min(y_UpL,y_DownL,y_UpR,y_DownR)<=0) or (x_LC<=R and min(y_LC,y_RC)<=R):
continue
elif (min(x_UpL,x_DownL)<=0 and max(y_UpL,y_DownL,y_UpR,y_DownR)>=L) or (x_LC<=R and max(y_LC,y_RC)>=L-R):
continue
elif (max(x_DownR,x_UpR)-L>=0 and min(y_UpL,y_DownL,y_UpR,y_DownR)<=0) or (x_RC>=L-R and min(y_LC,y_RC)<=R):
continue
elif (max(x_DownR,x_UpR)-L>=0 and max(y_UpL,y_DownL,y_UpR,y_DownR)>=L) or (x_RC>=L-R and max(y_LC,y_RC)>=L-R):
continue
elif min(x_UpL,x_DownL)>=0 and max(x_UpR,x_DownR)<=L and min(y_UpL,y_DownL,y_UpR,y_DownR)>=0 and max(y_UpL,y_DownL,y_UpR,y_DownR)<=L and x_LC>=R and x_RC<=L-R and min(y_LC,y_RC)>=R and max(y_LC,y_RC)<=L-R:#纤维与边界无交点
if k_>0:
cross=[]
if len(fiber_vertices)==0:
fiber_vertices.append([i,x_UpL,y_UpL,x_DownL,y_DownL,x_DownR,y_DownR,x_UpR,y_UpR])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_DownL_temp,y_DownL_temp,x_DownR_temp,y_DownR_temp,x_UpR_temp,y_UpR_temp])
break
else:
temp=[i,x_UpL_temp,y_UpL_temp,x_DownL_temp,y_DownL_temp,x_DownR_temp,y_DownR_temp,x_UpR_temp,y_UpR_temp]
for j in range(len(fiber_vertices_temp)):
cross.append(oveDownLap(fiber_vertices_temp[j],temp))
if max(cross)==1:
del cross
continue
else:
fiber_vertices.append([i,x_UpL,y_UpL,x_DownL,y_DownL,x_DownR,y_DownR,x_UpR,y_UpR])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_DownL_temp,y_DownL_temp,x_DownR_temp,y_DownR_temp,x_UpR_temp,y_UpR_temp])
del cross
break
else:
cross=[]
if len(fiber_vertices)==0:
fiber_vertices.append([i,x_UpL,y_UpL,x_UpR,y_UpR,x_DownR,y_DownR,x_DownL,y_DownL])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_UpR_temp,y_UpR_temp,x_DownR_temp,y_DownR_temp,x_DownL_temp,y_DownL_temp])
break
else:
temp=[i,x_UpL_temp,y_UpL_temp,x_UpR_temp,y_UpR_temp,x_DownR_temp,y_DownR_temp,x_DownL_temp,y_DownL_temp]
for j in range(len(fiber_vertices_temp)):
cross.append(oveDownLap(fiber_vertices_temp[j],temp))
if max(cross)==1:
del cross
continue
else:
fiber_vertices.append([i,x_UpL,y_UpL,x_UpR,y_UpR,x_DownR,y_DownR,x_DownL,y_DownL])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_UpR_temp,y_UpR_temp,x_DownR_temp,y_DownR_temp,x_DownL_temp,y_DownL_temp])
del cross
break
elif x_LC-R<0 and min(x_UpR,x_DownR)>0:#纤维与左边界有交点
if k_>0:
cross=[]
if len(fiber_vertices)==0:
fiber_vertices.append([i,x_UpL,y_UpL,x_DownL,y_DownL,x_DownR,y_DownR,x_UpR,y_UpR])
fiber_vertices.append([i,x_UpL+L,y_UpL,x_DownL+L,y_DownL,x_DownR+L,y_DownR,x_UpR+L,y_UpR])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_DownL_temp,y_DownL_temp,x_DownR_temp,y_DownR_temp,x_UpR_temp,y_UpR_temp])
fiber_vertices_temp.append([i,x_UpL_temp+L,y_UpL_temp,x_DownL_temp+L,y_DownL_temp,x_DownR_temp+L,y_DownR_temp,x_UpR_temp+L,y_UpR_temp])
break
else:
temp1=[i,x_UpL_temp,y_UpL_temp,x_DownL_temp,y_DownL_temp,x_DownR_temp,y_DownR_temp,x_UpR_temp,y_UpR_temp]
for j in range(len(fiber_vertices_temp)):
cross.append(oveDownLap(fiber_vertices_temp[j],temp1))
temp2=[i,x_UpL_temp+L,y_UpL_temp,x_DownL_temp+L,y_DownL_temp,x_DownR_temp+L,y_DownR_temp,x_UpR_temp+L,y_UpR_temp]
for j in range(len(fiber_vertices_temp)):
cross.append(oveDownLap(fiber_vertices_temp[j],temp2))
if max(cross)==1:
del cross
continue
else:
fiber_vertices.append([i,x_UpL,y_UpL,x_DownL,y_DownL,x_DownR,y_DownR,x_UpR,y_UpR])
fiber_vertices.append([i,x_UpL+L,y_UpL,x_DownL+L,y_DownL,x_DownR+L,y_DownR,x_UpR+L,y_UpR])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_DownL_temp,y_DownL_temp,x_DownR_temp,y_DownR_temp,x_UpR_temp,y_UpR_temp])
fiber_vertices_temp.append([i,x_UpL_temp+L,y_UpL_temp,x_DownL_temp+L,y_DownL_temp,x_DownR_temp+L,y_DownR_temp,x_UpR_temp+L,y_UpR_temp])
del cross
break
else:
cross=[]
if len(fiber_vertices)==0:
fiber_vertices.append([i,x_UpL,y_UpL,x_UpR,y_UpR,x_DownR,y_DownR,x_DownL,y_DownL])
fiber_vertices.append([i,x_UpL+L,y_UpL,x_UpR+L,y_UpR,x_DownR+L,y_DownR,x_DownL+L,y_DownL])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_UpR_temp,y_UpR_temp,x_DownR_temp,y_DownR_temp,x_DownL_temp,y_DownL_temp])
fiber_vertices_temp.append([i,x_UpL_temp+L,y_UpL_temp,x_UpR_temp+L,y_UpR_temp,x_DownR_temp+L,y_DownR_temp,x_DownL_temp+L,y_DownL_temp])
break
else:
temp1=[i,x_UpL_temp,y_UpL_temp,x_UpR_temp,y_UpR_temp,x_DownR_temp,y_DownR_temp,x_DownL_temp,y_DownL_temp]
for j in range(len(fiber_vertices_temp)):
cross.append(oveDownLap(fiber_vertices_temp[j],temp1))
temp2=[i,x_UpL_temp+L,y_UpL_temp,x_UpR_temp+L,y_UpR_temp,x_DownR_temp+L,y_DownR_temp,x_DownL_temp+L,y_DownL_temp]
for j in range(len(fiber_vertices_temp)):
cross.append(oveDownLap(fiber_vertices_temp[j],temp2))
if max(cross)==1:
del cross
continue
else:
fiber_vertices.append([i,x_UpL,y_UpL,x_UpR,y_UpR,x_DownR,y_DownR,x_DownL,y_DownL])
fiber_vertices.append([i,x_UpL+L,y_UpL,x_UpR+L,y_UpR,x_DownR+L,y_DownR,x_DownL+L,y_DownL])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_UpR_temp,y_UpR_temp,x_DownR_temp,y_DownR_temp,x_DownL_temp,y_DownL_temp])
fiber_vertices_temp.append([i,x_UpL_temp+L,y_UpL_temp,x_UpR_temp+L,y_UpR_temp,x_DownR_temp+L,y_DownR_temp,x_DownL_temp+L,y_DownL_temp])
del cross
break
elif max(x_UpL,x_DownL)<L and max(x_LC,x_RC)+R>L:#纤维与右边界有交点
if k_>0:
cross=[]
if len(fiber_vertices)==0:
fiber_vertices.append([i,x_UpL,y_UpL,x_DownL,y_DownL,x_DownR,y_DownR,x_UpR,y_UpR])
fiber_vertices.append([i,x_UpL-L,y_UpL,x_DownL-L,y_DownL,x_DownR-L,y_DownR,x_UpR-L,y_UpR])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_DownL_temp,y_DownL_temp,x_DownR_temp,y_DownR_temp,x_UpR_temp,y_UpR_temp])
fiber_vertices_temp.append([i,x_UpL_temp-L,y_UpL_temp,x_DownL_temp-L,y_DownL_temp,x_DownR_temp-L,y_DownR_temp,x_UpR_temp-L,y_UpR_temp])
break
else:
temp1=[i,x_UpL_temp,y_UpL_temp,x_DownL_temp,y_DownL_temp,x_DownR_temp,y_DownR_temp,x_UpR_temp,y_UpR_temp]
for j in range(len(fiber_vertices_temp)):
cross.append(oveDownLap(fiber_vertices_temp[j],temp1))
temp2=[i,x_UpL_temp-L,y_UpL_temp,x_DownL_temp-L,y_DownL_temp,x_DownR_temp-L,y_DownR_temp,x_UpR_temp-L,y_UpR_temp]
for j in range(len(fiber_vertices_temp)):
cross.append(oveDownLap(fiber_vertices_temp[j],temp2))
if max(cross)==1:
del cross
continue
else:
fiber_vertices.append([i,x_UpL,y_UpL,x_DownL,y_DownL,x_DownR,y_DownR,x_UpR,y_UpR])
fiber_vertices.append([i,x_UpL-L,y_UpL,x_DownL-L,y_DownL,x_DownR-L,y_DownR,x_UpR-L,y_UpR])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_DownL_temp,y_DownL_temp,x_DownR_temp,y_DownR_temp,x_UpR_temp,y_UpR_temp])
fiber_vertices_temp.append([i,x_UpL_temp-L,y_UpL_temp,x_DownL_temp-L,y_DownL_temp,x_DownR_temp-L,y_DownR_temp,x_UpR_temp-L,y_UpR_temp])
del cross
break
else:
cross=[]
if len(fiber_vertices)==0:
fiber_vertices.append([i,x_UpL,y_UpL,x_UpR,y_UpR,x_DownR,y_DownR,x_DownL,y_DownL])
fiber_vertices.append([i,x_UpL-L,y_UpL,x_UpR-L,y_UpR,x_DownR-L,y_DownR,x_DownL-L,y_DownL])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_UpR_temp,y_UpR_temp,x_DownR_temp,y_DownR_temp,x_DownL_temp,y_DownL_temp])
fiber_vertices_temp.append([i,x_UpL_temp-L,y_UpL_temp,x_UpR_temp-L,y_UpR_temp,x_DownR_temp-L,y_DownR_temp,x_DownL_temp-L,y_DownL_temp])
break
else:
temp1=[i,x_UpL_temp,y_UpL_temp,x_UpR_temp,y_UpR_temp,x_DownR_temp,y_DownR_temp,x_DownL_temp,y_DownL_temp]
for j in range(len(fiber_vertices_temp)):
cross.append(oveDownLap(fiber_vertices_temp[j],temp1))
temp2=[i,x_UpL_temp-L,y_UpL_temp,x_UpR_temp-L,y_UpR_temp,x_DownR_temp-L,y_DownR_temp,x_DownL_temp-L,y_DownL_temp]
for j in range(len(fiber_vertices_temp)):
cross.append(oveDownLap(fiber_vertices_temp[j],temp2))
if max(cross)==1:
del cross
continue
else:
fiber_vertices.append([i,x_UpL,y_UpL,x_UpR,y_UpR,x_DownR,y_DownR,x_DownL,y_DownL])
fiber_vertices.append([i,x_UpL-L,y_UpL,x_UpR-L,y_UpR,x_DownR-L,y_DownR,x_DownL-L,y_DownL])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_UpR_temp,y_UpR_temp,x_DownR_temp,y_DownR_temp,x_DownL_temp,y_DownL_temp])
fiber_vertices_temp.append([i,x_UpL_temp-L,y_UpL_temp,x_UpR_temp-L,y_UpR_temp,x_DownR_temp-L,y_DownR_temp,x_DownL_temp-L,y_DownL_temp])
del cross
break
elif max(y_LC+R,y_RC+R)>L:#纤维与上边界有交点
if k_>0:
cross=[]
if len(fiber_vertices)==0:
fiber_vertices.append([i,x_UpL,y_UpL,x_DownL,y_DownL,x_DownR,y_DownR,x_UpR,y_UpR])
fiber_vertices.append([i,x_UpL,y_UpL-L,x_DownL,y_DownL-L,x_DownR,y_DownR-L,x_UpR,y_UpR-L])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_DownL_temp,y_DownL_temp,x_DownR_temp,y_DownR_temp,x_UpR_temp,y_UpR_temp])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp-L,x_DownL_temp,y_DownL_temp-L,x_DownR_temp,y_DownR_temp-L,x_UpR_temp,y_UpR_temp-L])
break
else:
temp1=[i,x_UpL_temp,y_UpL_temp,x_DownL_temp,y_DownL_temp,x_DownR_temp,y_DownR_temp,x_UpR_temp,y_UpR_temp]
for j in range(len(fiber_vertices_temp)):
cross.append(oveDownLap(fiber_vertices_temp[j],temp1))
temp2=[i,x_UpL_temp,y_UpL_temp-L,x_DownL_temp,y_DownL_temp-L,x_DownR_temp,y_DownR_temp-L,x_UpR_temp,y_UpR_temp-L]
for j in range(len(fiber_vertices_temp)):
cross.append(oveDownLap(fiber_vertices_temp[j],temp2))
if max(cross)==1:
del cross
continue
else:
fiber_vertices.append([i,x_UpL,y_UpL,x_DownL,y_DownL,x_DownR,y_DownR,x_UpR,y_UpR])
fiber_vertices.append([i,x_UpL,y_UpL-L,x_DownL,y_DownL-L,x_DownR,y_DownR-L,x_UpR,y_UpR-L])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_DownL_temp,y_DownL_temp,x_DownR_temp,y_DownR_temp,x_UpR_temp,y_UpR_temp])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp-L,x_DownL_temp,y_DownL_temp-L,x_DownR_temp,y_DownR_temp-L,x_UpR_temp,y_UpR_temp-L])
del cross
break
else:
cross=[]
if len(fiber_vertices)==0:
fiber_vertices.append([i,x_UpL,y_UpL,x_UpR,y_UpR,x_DownR,y_DownR,x_DownL,y_DownL])
fiber_vertices.append([i,x_UpL,y_UpL-L,x_UpR,y_UpR-L,x_DownR,y_DownR-L,x_DownL,y_DownL-L])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_UpR_temp,y_UpR_temp,x_DownR_temp,y_DownR_temp,x_DownL_temp,y_DownL_temp])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp-L,x_UpR_temp,y_UpR_temp-L,x_DownR_temp,y_DownR_temp-L,x_DownL_temp,y_DownL_temp-L])
break
else:
temp1=[i,x_UpL_temp,y_UpL_temp,x_UpR_temp,y_UpR_temp,x_DownR_temp,y_DownR_temp,x_DownL_temp,y_DownL_temp]
for j in range(len(fiber_vertices_temp)):
cross.append(oveDownLap(fiber_vertices_temp[j],temp1))
temp2=[i,x_UpL_temp,y_UpL_temp-L,x_UpR_temp,y_UpR_temp-L,x_DownR_temp,y_DownR_temp-L,x_DownL_temp,y_DownL_temp-L]
for j in range(len(fiber_vertices_temp)):
cross.append(oveDownLap(fiber_vertices_temp[j],temp2))
if max(cross)==1:
del cross
continue
else:
fiber_vertices.append([i,x_UpL,y_UpL,x_UpR,y_UpR,x_DownR,y_DownR,x_DownL,y_DownL])
fiber_vertices.append([i,x_UpL,y_UpL-L,x_UpR,y_UpR-L,x_DownR,y_DownR-L,x_DownL,y_DownL-L])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_UpR_temp,y_UpR_temp,x_DownR_temp,y_DownR_temp,x_DownL_temp,y_DownL_temp])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp-L,x_UpR_temp,y_UpR_temp-L,x_DownR_temp,y_DownR_temp-L,x_DownL_temp,y_DownL_temp-L])
del cross
break
elif min(y_LC-R,y_RC-R)<0:#纤维与下边界有交点
if k_>0:
cross=[]
if len(fiber_vertices)==0:
fiber_vertices.append([i,x_UpL,y_UpL,x_DownL,y_DownL,x_DownR,y_DownR,x_UpR,y_UpR])
fiber_vertices.append([i,x_UpL,y_UpL+L,x_DownL,y_DownL+L,x_DownR,y_DownR+L,x_UpR,y_UpR+L])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_DownL_temp,y_DownL_temp,x_DownR_temp,y_DownR_temp,x_UpR_temp,y_UpR_temp])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp+L,x_DownL_temp,y_DownL_temp+L,x_DownR_temp,y_DownR_temp+L,x_UpR_temp,y_UpR_temp+L])
break
else:
temp1=[i,x_UpL_temp,y_UpL_temp,x_DownL_temp,y_DownL_temp,x_DownR_temp,y_DownR_temp,x_UpR_temp,y_UpR_temp]
for j in range(len(fiber_vertices_temp)):
cross.append(oveDownLap(fiber_vertices_temp[j],temp1))
temp2=[i,x_UpL_temp,y_UpL_temp+L,x_DownL_temp,y_DownL_temp+L,x_DownR_temp,y_DownR_temp+L,x_UpR_temp,y_UpR_temp+L]
for j in range(len(fiber_vertices_temp)):
cross.append(oveDownLap(fiber_vertices_temp[j],temp2))
if max(cross)==1:
del cross
continue
else:
fiber_vertices.append([i,x_UpL,y_UpL,x_DownL,y_DownL,x_DownR,y_DownR,x_UpR,y_UpR])
fiber_vertices.append([i,x_UpL,y_UpL+L,x_DownL,y_DownL+L,x_DownR,y_DownR+L,x_UpR,y_UpR+L])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_DownL_temp,y_DownL_temp,x_DownR_temp,y_DownR_temp,x_UpR_temp,y_UpR_temp])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp+L,x_DownL_temp,y_DownL_temp+L,x_DownR_temp,y_DownR_temp+L,x_UpR_temp,y_UpR_temp+L])
del cross
break
else:
cross=[]
if len(fiber_vertices)==0:
fiber_vertices.append([i,x_UpL,y_UpL,x_UpR,y_UpR,x_DownR,y_DownR,x_DownL,y_DownL])
fiber_vertices.append([i,x_UpL,y_UpL+L,x_UpR,y_UpR+L,x_DownR,y_DownR+L,x_DownL,y_DownL+L])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_UpR_temp,y_UpR_temp,x_DownR_temp,y_DownR_temp,x_DownL_temp,y_DownL_temp])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp+L,x_UpR_temp,y_UpR_temp+L,x_DownR_temp,y_DownR_temp+L,x_DownL_temp,y_DownL_temp+L])
break
else:
temp1=[i,x_UpL_temp,y_UpL_temp,x_UpR_temp,y_UpR_temp,x_DownR_temp,y_DownR_temp,x_DownL_temp,y_DownL_temp]
for j in range(len(fiber_vertices_temp)):
cross.append(oveDownLap(fiber_vertices_temp[j],temp1))
temp2=[i,x_UpL_temp,y_UpL_temp+L,x_UpR_temp,y_UpR_temp+L,x_DownR_temp,y_DownR_temp+L,x_DownL_temp,y_DownL_temp+L]
for j in range(len(fiber_vertices_temp)):
cross.append(oveDownLap(fiber_vertices_temp[j],temp2))
if max(cross)==1:
del cross
continue
else:
fiber_vertices.append([i,x_UpL,y_UpL,x_UpR,y_UpR,x_DownR,y_DownR,x_DownL,y_DownL])
fiber_vertices.append([i,x_UpL,y_UpL+L,x_UpR,y_UpR+L,x_DownR,y_DownR+L,x_DownL,y_DownL+L])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp,x_UpR_temp,y_UpR_temp,x_DownR_temp,y_DownR_temp,x_DownL_temp,y_DownL_temp])
fiber_vertices_temp.append([i,x_UpL_temp,y_UpL_temp+L,x_UpR_temp,y_UpR_temp+L,x_DownR_temp,y_DownR_temp+L,x_DownL_temp,y_DownL_temp+L])
del cross
break
else:
continue
##############################开始建模
#try:
if TRUE:
from part import * #第一步, 建立建模
s = mdb.models['Model-1'].ConstrainedSketch(name='__profile__', sheetSize=1000.0) #定义模型的草图s
s.rectangle(point1=(0.0, 0.0), point2=(bwidx, bwidy)) #指定两顶点画矩形
p = mdb.models['Model-1'].Part(name='Part-1',dimensionality=TWO_D_PLANAR,type=DEFORMABLE_BODY) #定义模型的部件part-1
p.BaseShell(sketch=s) #将s赋给p
del mdb.models['Model-1'].sketches['__profile__'] #收回建模所占的环境内存
s1 = mdb.models['Model-1'].ConstrainedSketch(name='__profile__', sheetSize=1000.0)
g = s1.geometry
fr = open('D:\\PythonCode\\bishe\\result_bones.txt','w+')
fr.write('CenterX\t\t\tCenterY\t\t\tAngle\n')
for i in range(len(fiber_vertices)):
if abs(disd([fiber_vertices[i][1],fiber_vertices[i][2]],[fiber_vertices[i][3],fiber_vertices[i][4]])-f_wid)<0.5*f_wid:
x_LC=(fiber_vertices[i][1]+fiber_vertices[i][3])/2
y_LC=(fiber_vertices[i][2]+fiber_vertices[i][4])/2
x_DC=(fiber_vertices[i][3]+fiber_vertices[i][5])/2
y_DC=(fiber_vertices[i][4]+fiber_vertices[i][6])/2
x_RC=(fiber_vertices[i][5]+fiber_vertices[i][7])/2
y_RC=(fiber_vertices[i][6]+fiber_vertices[i][8])/2
x_UC=(fiber_vertices[i][7]+fiber_vertices[i][1])/2
y_UC=(fiber_vertices[i][8]+fiber_vertices[i][2])/2
s1.Line(point1=(fiber_vertices[i][3],fiber_vertices[i][4]),point2=(fiber_vertices[i][5],fiber_vertices[i][6]))
s1.Line(point1=(fiber_vertices[i][7],fiber_vertices[i][8]),point2=(fiber_vertices[i][1],fiber_vertices[i][2]))
flag=1
else:
x_LC=(fiber_vertices[i][1]+fiber_vertices[i][7])/2
y_LC=(fiber_vertices[i][2]+fiber_vertices[i][8])/2
x_DC=(fiber_vertices[i][7]+fiber_vertices[i][5])/2
y_DC=(fiber_vertices[i][8]+fiber_vertices[i][6])/2
x_RC=(fiber_vertices[i][5]+fiber_vertices[i][3])/2
y_RC=(fiber_vertices[i][6]+fiber_vertices[i][4])/2
x_UC=(fiber_vertices[i][3]+fiber_vertices[i][1])/2
y_UC=(fiber_vertices[i][4]+fiber_vertices[i][2])/2
s1.Line(point1=(fiber_vertices[i][1],fiber_vertices[i][2]),point2=(fiber_vertices[i][3],fiber_vertices[i][4]))
s1.Line(point1=(fiber_vertices[i][5],fiber_vertices[i][6]),point2=(fiber_vertices[i][7],fiber_vertices[i][8]))
flag=0
k1=R/f_len
rdef=pi/128
x1t=x_LC+k1*(x_RC-x_LC)
x2t=x_LC+(1-k1)*(x_RC-x_LC)
y1t=y_LC+k1*(y_RC-y_LC)
y2t=y_LC+(1-k1)*(y_RC-y_LC)
ta=(y_RC-y_LC)/(x_RC-x_LC)
b=y_RC-ta*x_RC
a1=atan(ta)
a0=asin(0.5*f_wid/R)
angle=a1+a0
xm=(fiber_vertices[i][1]+fiber_vertices[i][3]+fiber_vertices[i][5]+fiber_vertices[i][7])/4
ym=(fiber_vertices[i][2]+fiber_vertices[i][4]+fiber_vertices[i][6]+fiber_vertices[i][8])/4
s1.CircleByCenterPerimeter(center=(x_LC,y_LC), point1=(x_LC-c_rad,y_LC))
s1.CircleByCenterPerimeter(center=(x_RC,y_RC), point1=(x_RC+c_rad,y_RC))
s1.autoTrimCurve(curve1=g.findAt((fiber_vertices[i][1],fiber_vertices[i][2])), point1=(fiber_vertices[i][1],fiber_vertices[i][2]))
s1.autoTrimCurve(curve1=g.findAt((fiber_vertices[i][3],fiber_vertices[i][4])), point1=(fiber_vertices[i][3],fiber_vertices[i][4]))
s1.autoTrimCurve(curve1=g.findAt((fiber_vertices[i][5],fiber_vertices[i][6])), point1=(fiber_vertices[i][5],fiber_vertices[i][6]))
s1.autoTrimCurve(curve1=g.findAt((fiber_vertices[i][7],fiber_vertices[i][8])), point1=(fiber_vertices[i][7],fiber_vertices[i][8]))
s1.autoTrimCurve(curve1=g.findAt((x1t,y1t)), point1=(x1t,y1t))
s1.autoTrimCurve(curve1=g.findAt((x2t,y2t)), point1=(x2t,y2t))
if ta>=0:
s1.FilletByRadius(radius=r_rad, curve1=g.findAt((x_DC,y_DC-0.0001)), nearPoint1=(x_DC,y_DC-0.0001), curve2=g.findAt((x_LC+R*cos(a1-a0-rdef),y_LC+R*sin(a1-a0-rdef))), nearPoint2=(x_LC+R*cos(a1-a0-rdef),y_LC+R*sin(a1-a0-rdef)))#左下
s1.FilletByRadius(radius=r_rad, curve1=g.findAt((x_DC,y_DC-0.0001)), nearPoint1=(x_DC,y_DC-0.0001), curve2=g.findAt((x_RC-R*cos(a1+a0+rdef),y_RC-R*sin(a1+a0+rdef))), nearPoint2=(x_RC-R*cos(a1+a0+rdef),y_RC-R*sin(a1+a0+rdef)))#右下
s1.FilletByRadius(radius=r_rad, curve1=g.findAt((x_UC,y_UC+0.0001)), nearPoint1=(x_UC,y_UC+0.0001), curve2=g.findAt((x_LC+R*cos(a1+a0+rdef),y_LC+R*sin(a1+a0+rdef))), nearPoint2=(x_LC+R*cos(a1+a0+rdef),y_LC+R*sin(a1+a0+rdef)))#左上
s1.FilletByRadius(radius=r_rad, curve1=g.findAt((x_UC,y_UC+0.0001)), nearPoint1=(x_UC,y_UC+0.0001), curve2=g.findAt((x_RC-R*cos(a1-a0-rdef),y_RC-R*sin(a1-a0-rdef))), nearPoint2=(x_RC-R*cos(a1-a0-rdef),y_RC-R*sin(a1-a0-rdef)))#右上
else:
print 'miao'
fr.write(str((fiber_vertices[i][1]+fiber_vertices[i][5])/2)+'\t'+str((fiber_vertices[i][2]+fiber_vertices[i][6])/2)+'\t')
fr.write(str(180/pi*a1)+'\n')
fr.close()
p1 = mdb.models['Model-1'].parts['Part-1']
pickedFaces = p1.faces[0:1]
p1.PartitionFaceBySketch(faces=pickedFaces, sketch=s1)
mdb.models['Model-1'].convertAllSketches()
from material import * #第二步, 材料定义
from section import *
mdb.models['Model-1'].Material(name='MATRIX') #定义材料名称1
mdb.models['Model-1'].materials['MATRIX'].Depvar(n=3) #定义材料刚度
mdb.models['Model-1'].materials['MATRIX'].UserDefinedField()
mdb.models['Model-1'].materials['MATRIX'].Elastic(dependencies=2, table=((1100,
0.38, 0.0, 0.0), (110.0, 0.38, 1.0, 0.0), (110, 0.38, 0.0, 1.0, ),(110, 0.38, 1.0, 1.0)))
mdb.models['Model-1'].HomogeneousSolidSection(name='Section-1',material='MATRIX',thickness=1.0) #定义截面1
mdb.models['Model-1'].Material(name='FIBER') #定义材料名称2
mdb.models['Model-1'].materials['FIBER'].Depvar(n=3) #定义材料刚度
mdb.models['Model-1'].materials['FIBER'].UserDefinedField()
mdb.models['Model-1'].materials['FIBER'].Elastic(dependencies=2, table=((220000,
0.25, 0.0, 0.0), (22000, 0.25, 1.0, 0.0), (22000, 0.25, 0.0, 1.0, ),(22000, 0.25, 1.0, 1.0)))
mdb.models['Model-1'].HomogeneousSolidSection(name='Section-2',material='FIBER',thickness=1.0) #定义截面2
faces = mdb.models['Model-1'].parts['Part-1'].faces.findAt(((0.0, 0.0, 0.0), ))
region =(faces, ) #以上两行找到包含点(0,0,0)的面,保存到region
mdb.models['Model-1'].parts['Part-1'].SectionAssignment(region=region, sectionName='Section-1') #截面属性附给选中的面region
f2=mdb.models['Model-1'].parts['Part-1'].faces
miao=0
flag=0
for i in range(len(f2)):
for j in range(len(faces)):
if (f2[i]==faces[j]):
flag=1
if flag==0:
if miao:
faces4+=f2[i:i+1]
else:
faces4=f2[i:i+1]
miao=1
flag=0
region2 =(faces4, ) #以上找到除faces以外的面,保存到region2
mdb.models['Model-1'].parts['Part-1'].SectionAssignment(region=region2, sectionName='Section-2') #截面属性2附给选中的面region2
from assembly import * #第三步,装配
a1 = mdb.models['Model-1'].rootAssembly
p = mdb.models['Model-1'].parts['Part-1'] #指定part-1
a1.Instance(name='Part-1-1', part=p, dependent=OFF) #生成part-1对象的实体Part-1-1,independent网格在Instance上面
from step import * #第四步, 定义分析步
mdb.models['Model-1'].StaticStep(name='Step-1', previous='Initial',
timeIncrementationMethod=AUTOMATIC) #定义一个固定增量的静态分析步
mdb.models['Model-1'].fieldOutputRequests['F-Output-1'].setValuesInStep(stepName='Step-1',
variables=('S', 'U')) #定义输出到ODB文件的数据(应力、位移)
from mesh import * #第五步, 网格划分控制
#f1 = mdb.models['Model-1'].rootAssembly.instances['Part-1-1'].faces
#mdb.models['Model-1'].rootAssembly.setMeshControls(regions=f1, elemShape=TRI)
elemType1 = mesh.ElemType(elemCode=CPE8, elemLibrary=STANDARD)
elemType2 = mesh.ElemType(elemCode=CPE6, elemLibrary=STANDARD)
faces = mdb.models['Model-1'].rootAssembly.instances['Part-1-1'].faces
pickedRegions =(faces, )
mdb.models['Model-1'].rootAssembly.setElementType(regions=pickedRegions, elemTypes=(elemType1, elemType2)) #定义两种网格类型
#size0=float(getInput("Input the mesh size:","0.1"))
#size0=0.2
m=mdb.models['Model-1']
r=m.rootAssembly
pickedEdges=a1.instances['Part-1-1'].edges
a1.seedEdgeBySize(edges=pickedEdges, size=size0,constraint=FIXED) #撒网格种子
partInstances =(a1.instances['Part-1-1'], )
a1.generateMesh(regions=partInstances) #给partInstances划分网格
from interaction import * #第六步, 定义多点约束条件-----MPC
m=mdb.models['Model-1']
r=m.rootAssembly
node=r.instances['Part-1-1'].nodes
nel=[]
ner=[]
neu=[]
ned=[]
for i in range(len(node)):
x=node[i].coordinates[0]
y=node[i].coordinates[1]
flag=(x-bwidx)*(x-0)*(y-bwidy)*(y-0)
if abs(flag)<0.0001:
if (abs(y-bwidy)>0.01)and(abs(y-0)>0.01):
if (abs(x-0)<0.0001):
nel.append(i)
if (abs(x-bwidx)<0.0001):
ner.append(i)
if (abs(x-bwidx)>0.01)and(abs(x-0)>0.01):
if (abs(y-0)<0.0001):
ned.append(i)
if (abs(y-bwidy)<0.0001):
neu.append(i)
if (abs(x-0)<0.01)and(abs(y-0)<0.01):
r.Set(nodes=node[i:i+1],name='set-01')
elif(abs(x-bwidx)<0.01)and(abs(y-0)<0.01):
r.Set(nodes=node[i:i+1],name='set-02')
elif(abs(x-0)<0.01)and(abs(y-bwidy)<0.01):
r.Set(nodes=node[i:i+1],name='set-03')
elif(abs(x-bwidx)<0.01)and(abs(y-bwidy)<0.01):
r.Set(nodes=node[i:i+1],name='set-04')
m.Equation(name='eq-00',terms=((1,'set-04',1),(-1,'set-02',1),(-1,'set-03',1))) #定义角点的MPC
m.Equation(name='eq-01',terms=((1,'set-04',2),(-1,'set-02',2),(-1,'set-03',2)))
#-----------------------------定义其他边界点的MPC----------------
i=0
for n in range(len(nel)):
x0=node[nel[n]].coordinates[0]
y0=node[nel[n]].coordinates[1]
for j in range(len(ner)):
x1=node[ner[j]].coordinates[0]
y1=node[ner[j]].coordinates[1]
if (abs(y0-y1)<0.3*size0):
r.Set(nodes=node[nel[n]:nel[n]+1],name='set-l-'+str(i))
r.Set(nodes=node[ner[j]:ner[j]+1],name='set-r-'+str(i))
m.Equation(name='eq-lr-x-'+str(i),terms=((1,'set-r-'+str(i),1),(-1,'set-02',1),(-1,'set-l-'+str(i),1)))
m.Equation(name='eq-lr-y-'+str(i),terms=((1,'set-r-'+str(i),2),(-1,'set-02',2),(-1,'set-l-'+str(i),2)))
i=i+1
break
i=0
for n in range(len(ned)):
x0=node[ned[n]].coordinates[0]
y0=node[ned[n]].coordinates[1]
for j in range(len(neu)):
x1=node[neu[j]].coordinates[0]
y1=node[neu[j]].coordinates[1]
if (abs(x0-x1)<0.3*size0):
r.Set(nodes=node[ned[n]:ned[n]+1],name='set-d-'+str(i))
r.Set(nodes=node[neu[j]:neu[j]+1],name='set-u-'+str(i))
m.Equation(name='eq-ud-x-'+str(i),terms=((1,'set-u-'+str(i),1),(-1,'set-03',1),(-1,'set-d-'+str(i),1)))
m.Equation(name='eq-ud-y-'+str(i),terms=((1,'set-u-'+str(i),2),(-1,'set-03',2),(-1,'set-d-'+str(i),2)))
i=i+1
break
print "i=",i
#--------------------------------------------------------------
from load import * #第七步, 荷载边界定义
m=mdb.models['Model-1']
region = m.rootAssembly.sets['set-01'] #选中固支节点,保存到region
m.DisplacementBC(name='BC-1', createStepName='Initial',region=region,
u1=SET, u2=SET, ur3=SET, amplitude=UNSET,distributionType=UNIFORM, localCsys=None) #定义固支边界
region = m.rootAssembly.sets['set-02'] #选中简支节点,保存到region
m.DisplacementBC(name='BC-2', createStepName='Step-1',region=region,
u1=(bwidx/100), u2=SET, ur3=SET, amplitude=UNSET,distributionType=UNIFORM, localCsys=None) #定义简支边界
region = m.rootAssembly.sets['set-03'] #选中加载节点,保存到region
m.DisplacementBC(name='BC-3', createStepName='Step-1',region=region,
u1=SET, u2=UNSET, ur3=SET, amplitude=UNSET, fixed=OFF,distributionType=UNIFORM, localCsys=None) #定义位移载荷
#-----------------------------------------第八步,生成任务以及其他杂项功能
regionDef=mdb.models['Model-1'].rootAssembly.sets['set-02']
mdb.models['Model-1'].historyOutputRequests['H-Output-1'].setValues(variables=(
'U1', 'RF1'), region=regionDef,
sectionPoints=DEFAULT, rebar=EXCLUDE)
regionDef=mdb.models['Model-1'].rootAssembly.sets['set-03']
mdb.models['Model-1'].HistoryOutputRequest(name='H-Output-2',
createStepName='Step-1', variables=('U2', 'RF2'), region=regionDef,
sectionPoints=DEFAULT, rebar=EXCLUDE)
#-----------------------------------------第九步,提交作业
mdb.Job(name=job_name, model='Model-1', description='', type=ANALYSIS,
atTime=None, waitMinutes=0, waitHours=0, queue=None, memory=90,
memoryUnits=PERCENTAGE, getMemoryFromAnalysis=True,
explicitPrecision=SINGLE, nodalOutputPrecision=SINGLE, echoPrint=OFF,
modelPrint=OFF, contactPrint=OFF, historyPrint=OFF,
scratch='', multiprocessingMode=DEFAULT, numCpus=1)
#mdb.jobs[job_name].submit(consistencyChecking=OFF)
mdb.saveAs(Pathname+'\\'+job_name)
#except:
# print('ERROR')
|
19,592 | 6eced33fe1c404886fc9f5ed3b61844b6508ee9b | #!/usr/bin/env python3
""" Basic annotations - concat """
def concat(str1: str, str2: str) -> str:
""" Type-annotated function concat that takes a str arguments.
Args:
str1: str type.
str2: str type.
Return:
Concatenated string.
"""
return str1 + str2
|
19,593 | 50a7ce2e26509125fe1ec4399ac7690ed9f6c243 | import os, os.path
if __name__ == '__main__':
dirpath = './data/src/'
print('loading...')
files = os.listdir(dirpath)
print('loaded!')
min_size = 1
max_size = 10000
interval = 200
lis = [0] * (max_size//interval)
idx = 1
zeros = 0
for f in files:
if idx % 100000 == 0:
print(idx)
idx += 1
size = os.path.getsize(dirpath + str(f))
if size >= max_size:
continue
if size == 0:
zeros += 1
continue
lis[size//interval] += 1
with open('size.csv', 'w') as f:
for i, s in enumerate(lis):
f.write('%d, %d\n' % (i*interval, s))
|
19,594 | b0e047c8777ca80a0675fd280ece13a4cf9308ab | __author__ = 'Pranjal Goswami, QMEL IITKGP, http://pranjalgoswami.in'
from lib import *
import numpy as np
import matplotlib.pyplot as plt
import utils
import matplotlib
from matplotlib import cm
n = 10
Nab_max = 3.0*n**2*(n-1)
print Nab_max
cube1 = Cube(n)
cube1.init_5050()
cube1.evaluate_interactions_with_surface_adjustment()
cube1.evaluate_interactions()
print cube1.interactions['Nab']
print cube1.sa_interactions['Nab']
print cube1.surface_interactions
N = 100
loop = True
x = list()
y = list()
z = list()
w = list()
v = list()
while(N <= 10000000 and loop):
cube1.init_5050()
cube1.evaluate_interactions()
Nab = cube1.interactions['Nab']
skipped_configurations=0
y1=0
for i in range(0,N):
index = np.random.randint(0,n**3/2)
target = np.random.randint(0,n**3/2)
current_interactions_a = cube1.get_neighbours(cube1.a[index])
current_interactions_b = cube1.get_neighbours(cube1.b[target])
current_Nab = current_interactions_a['Nab']+\
current_interactions_b['Nab']
next_Nab = current_interactions_a['Naa']+\
current_interactions_a['Nbb']+\
current_interactions_b['Naa']+\
current_interactions_b['Nbb']
if(next_Nab>=current_Nab):
cube1.swap(index,target)
Nab = Nab - current_Nab + next_Nab
cube1.evaluate_interactions_with_surface_adjustment()
#print 'Nab:'+str(Nab)
#print 'Nab_s :'+str(cube1.sa_interactions['Nab'])
#print '_s_i :'+str(cube1.surface_interactions)
'''
print '% surface Interactions: '+\
str((cube1.surface_interactions\
*100.0)/cube1.sa_interactions['Nab'])+'% \n'
print '%d\t%5.2f\t%5.2f\t%5.2f\t%5.2f\t'%(N,Nab,\
cube1.sa_interactions['Nab'],\
cube1.surface_interactions,\
(cube1.surface_interactions\
*100.0)/cube1.sa_interactions['Nab'])
y1+=1
'''
print('%d\t%d'%(i,N))
else:
skipped_configurations+=1
x.append(i)
y.append(Nab)
z.append(cube1.sa_interactions['Nab'])
w.append(cube1.surface_interactions)
v.append((cube1.surface_interactions\
*100.0)/cube1.sa_interactions['Nab'])
font = {'family' : 'serif',
'color' : 'blue',
'weight' : 'normal',
'size' : 16,
}
fig = plt.figure()
plt.title('%% Surface Interactions vs Iterations (N=%d)'%(N),fontdict=font)
plt.xlabel("No. of Swaps")
plt.ylabel("%% Surface Interactions")
plt.ylim(0,100)
plt.plot(x,v,'b-',label='Nab')
#plt.plot(x,z,'g-.',label='Surface Adjusted Nab')
#plt.plot(x,w,'r--.',label='Surface Interactions')
#plt.legend()
plt.savefig('output/N_si_percent_%d.png'%(N))
N*=10
loop = True
|
19,595 | 53d977ad094e757bea44cc6a035121bd4bbdf892 | a = float(input('Digite o valor de A: '))
b = float(input('Digite o valor de B: '))
c = a
a = b
b = c
print("O valor de A é: ", a)
print("O valor de B é: ", b)
|
19,596 | 989e532b645d8cf655972a822115325e23e37843 | expressao = 30-(3**2)+(8//3**2)*10
print(expressao) |
19,597 | 91b2ab9f58deae39a8a14eb3255185516311c616 | # -*- coding: utf-8 -*-
from . import op
Jh_BlockSize = 64
Jh_StateSize = 32
JH_HX = 8
JH_HY = 4
IV512 = [
(0x6fd14b96), (0x3e00aa17), (0x636a2e05), (0x7a15d543),
(0x8a225e8d), (0x0c97ef0b), (0xe9341259), (0xf2b3c361),
(0x891da0c1), (0x536f801e), (0x2aa9056b), (0xea2b6d80),
(0x588eccdb), (0x2075baa6), (0xa90f3a76), (0xbaf83bf7),
(0x0169e605), (0x41e34a69), (0x46b58a8e), (0x2e6fe65a),
(0x1047a7d0), (0xc1843c24), (0x3b6e71b1), (0x2d5ac199),
(0xcf57f6ec), (0x9db1f856), (0xa706887c), (0x5716b156),
(0xe3c2fcdf), (0xe68517fb), (0x545a4678), (0xcc8cdd4b),
]
C = [
0xa2ded572, 0x67f815df, 0x0a15847b, 0x571523b7, 0x90d6ab81, 0xf6875a4d,
0xc54f9f4e, 0x402bd1c3, 0xe03a98ea, 0x9cfa455c, 0x99d2c503, 0x9a99b266,
0xb4960266, 0x8a53bbf2, 0x1a1456b5, 0x31a2db88, 0x5c5aa303, 0xdb0e199a,
0x0ab23f40, 0x1044c187, 0x8019051c, 0x1d959e84, 0xadeb336f, 0xdccde75e,
0x9213ba10, 0x416bbf02, 0x156578dc, 0xd027bbf7, 0x39812c0a, 0x5078aa37,
0xd2bf1a3f, 0xd3910041, 0x0d5a2d42, 0x907eccf6, 0x9c9f62dd, 0xce97c092,
0x0ba75c18, 0xac442bc7, 0xd665dfd1, 0x23fcc663, 0x036c6e97, 0x1ab8e09e,
0x7e450521, 0xa8ec6c44, 0xbb03f1ee, 0xfa618e5d, 0xb29796fd, 0x97818394,
0x37858e4a, 0x2f3003db, 0x2d8d672a, 0x956a9ffb, 0x8173fe8a, 0x6c69b8f8,
0x4672c78a, 0x14427fc0, 0x8f15f4c5, 0xc45ec7bd, 0xa76f4475, 0x80bb118f,
0xb775de52, 0xbc88e4ae, 0x1e00b882, 0xf4a3a698, 0x338ff48e, 0x1563a3a9,
0x24565faa, 0x89f9b7d5, 0x20edf1b6, 0xfde05a7c, 0x5ae9ca36, 0x362c4206,
0x433529ce, 0x3d98fe4e, 0x74f93a53, 0xa74b9a73, 0x591ff5d0, 0x86814e6f,
0x81ad9d0e, 0x9f5ad8af, 0x670605a7, 0x6a6234ee, 0xbe280b8b, 0x2717b96e,
0x26077447, 0x3f1080c6, 0x6f7ea0e0, 0x7b487ec6, 0xa50a550d, 0xc0a4f84a,
0x9fe7e391, 0x9ef18e97, 0x81727686, 0xd48d6050, 0x415a9e7e, 0x62b0e5f3,
0xec1f9ffc, 0x7a205440, 0x001ae4e3, 0x84c9f4ce, 0xf594d74f, 0xd895fa9d,
0x117e2e55, 0xa554c324, 0x2872df5b, 0x286efebd, 0xe27ff578, 0xb2c4a50f,
0xef7c8905, 0x2ed349ee, 0x85937e44, 0x7f5928eb, 0x37695f70, 0x4a3124b3,
0xf128865e, 0x65e4d61d, 0x04771bc7, 0xe720b951, 0xe843fe74, 0x8a87d423,
0xa3e8297d, 0xf2947692, 0x097acbdd, 0xc1d9309b, 0xfb301b1d, 0xe01bdc5b,
0x4f4924da, 0xbf829cf2, 0x31bae7a4, 0xffbf70b4, 0x0544320d, 0x48bcf8de,
0x32fcae3b, 0x39d3bb53, 0xc1c39f45, 0xa08b29e0, 0xfd05c9e5, 0x0f09aef7,
0x12347094, 0x34f19042, 0x01b771a2, 0x95ed44e3, 0x368e3be9, 0x4a982f4f,
0x631d4088, 0x15f66ca0, 0x4b44c147, 0xffaf5287, 0xf14abb7e, 0x30c60ae2,
0xc5b67046, 0xe68c6ecc, 0x56a4d5a4, 0x00ca4fbd, 0x4b849dda, 0xae183ec8,
0x45ce5773, 0xadd16430, 0x68cea6e8, 0x67255c14, 0xf28cdaa3, 0x16e10ecb,
0x5806e933, 0x9a99949a, 0x20b2601f, 0x7b846fc2, 0x7facced1, 0x1885d1a0,
0xa15b5932, 0xd319dd8d, 0xc01c9a50, 0x46b4a5aa, 0x67633d9f, 0xba6b04e4,
0xab19caf6, 0x7eee560b, 0xea79b11f, 0x742128a9, 0x35f7bde9, 0xee51363b,
0x5aac571d, 0x76d35075, 0xfec2463a, 0x01707da3, 0xafc135f7, 0x42d8a498,
0x20eced78, 0x79676b9e, 0x15638341, 0xa8db3aea, 0x4d3bc3fa, 0x832c8332,
0x1f3b40a7, 0xf347271c, 0x34f04059, 0x9a762db7, 0x6c4e3ee7, 0xfd4f21d2,
0x398dfdb8, 0xef5957dc, 0x490c9b8d, 0xdaeb492b, 0x49d7a25b, 0x0d70f368,
0xd0ae3b7d, 0x84558d7a, 0xf0e9a5f5, 0x658ef8e4, 0xf4a2b8a0, 0x533b1036,
0x9e07a80c, 0x5aec3e75, 0x92946891, 0x4f88e856, 0x555cb05b, 0x4cbcbaf8,
0x993bbbe3, 0x7b9487f3, 0xd6f4da75, 0x5d1c6b72, 0x28acae64, 0x6db334dc,
0x50a5346c, 0x71db28b8, 0xf2e261f8, 0x2a518d10, 0x3364dbe3, 0xfc75dd59,
0xf1bcac1c, 0xa23fce43, 0x3cd1bb67, 0xb043e802, 0xca5b0a33, 0x75a12988,
0x4d19347f, 0x5c5316b4, 0xc3943b92, 0x1e4d790e, 0xd7757479, 0x3fafeeb6,
0xf7d4a8ea, 0x21391abe, 0x097ef45c, 0x5127234c, 0x5324a326, 0xd23c32ba,
0x4a17a344, 0xadd5a66d, 0xa63e1db5, 0x08c9f2af, 0x983d5983, 0x563c6b91,
0xa17cf84c, 0x4d608672, 0xcc3ee246, 0xf6c76e08, 0xb333982f, 0x5e76bcb1,
0xa566d62b, 0x2ae6c4ef, 0xe8b6f406, 0x36d4c1be, 0x1582ee74, 0x6321efbc,
0x0d4ec1fd, 0x69c953f4, 0xc45a7da7, 0x26585806, 0x1614c17e, 0x16fae006,
0x3daf907e, 0x3f9d6328, 0xe3f2c9d2, 0x0cd29b00, 0x30ceaa5f, 0x300cd4b7,
0x16512a74, 0x9832e0f2, 0xd830eb0d, 0x9af8cee3, 0x7b9ec54b, 0x9279f1b5,
0x6ee651ff, 0xd3688604, 0x574d239b, 0x316796e6, 0xf3a6e6cc, 0x05750a17,
0xd98176b1, 0xce6c3213, 0x8452173c, 0x62a205f8, 0xb3cb2bf4, 0x47154778,
0x825446ff, 0x486a9323, 0x0758df38, 0x65655e4e, 0x897cfcf2, 0x8e5086fc,
0x442e7031, 0x86ca0bd0, 0xa20940f0, 0x4e477830, 0x39eea065, 0x8338f7d1,
0x37e95ef7, 0xbd3a2ce4, 0x26b29721, 0x6ff81301, 0xd1ed44a3, 0xe7de9fef,
0x15dfa08b, 0xd9922576, 0xf6f7853c, 0xbe42dc12, 0x7ceca7d8, 0x7eb027ab,
0xda7d8d53, 0xdea83eaa, 0x93ce25aa, 0xd86902bd, 0xfd43f65a, 0xf908731a,
0xdaef5fc0, 0xa5194a17, 0x33664d97, 0x6a21fd4c, 0x3198b435, 0x701541db,
0xbb0f1eea, 0x9b54cded, 0xa163d09a, 0x72409751, 0xbf9d75f6, 0xe26f4791,
]
def Sb(x, c):
x[3] = ~x[3]
x[0] ^= (c) & ~x[2]
tmp = (c) ^ (x[0] & x[1])
x[0] ^= x[2] & x[3]
x[3] ^= ~x[1] & x[2]
x[1] ^= x[0] & x[2]
x[2] ^= x[0] & ~x[3]
x[0] ^= x[1] | x[3]
x[3] ^= x[1] & x[2]
x[1] ^= tmp & x[0]
x[2] ^= tmp
return x
def Lb(x):
x[4] ^= x[1]
x[5] ^= x[2]
x[6] ^= x[3] ^ x[0]
x[7] ^= x[0]
x[0] ^= x[5]
x[1] ^= x[6]
x[2] ^= x[7] ^ x[4]
x[3] ^= x[4]
return x
def Ceven(n, r):
return C[((r) << 3) + 3 - n]
def Codd(n, r):
return C[((r) << 3) + 7 - n]
def S(x0, x1, x2, x3, cb, r):
x = Sb([x0[3], x1[3], x2[3], x3[3]], cb(3, r))
x0[3] = x[0]
x1[3] = x[1]
x2[3] = x[2]
x3[3] = x[3]
x = Sb([x0[2], x1[2], x2[2], x3[2]], cb(2, r))
x0[2] = x[0]
x1[2] = x[1]
x2[2] = x[2]
x3[2] = x[3]
x = Sb([x0[1], x1[1], x2[1], x3[1]], cb(1, r))
x0[1] = x[0]
x1[1] = x[1]
x2[1] = x[2]
x3[1] = x[3]
x = Sb([x0[0], x1[0], x2[0], x3[0]], cb(0, r))
x0[0] = x[0]
x1[0] = x[1]
x2[0] = x[2]
x3[0] = x[3]
def L(x0, x1, x2, x3, x4, x5, x6, x7):
x = Lb([x0[3], x1[3], x2[3], x3[3], x4[3], x5[3], x6[3], x7[3]])
x0[3] = x[0]
x1[3] = x[1]
x2[3] = x[2]
x3[3] = x[3]
x4[3] = x[4]
x5[3] = x[5]
x6[3] = x[6]
x7[3] = x[7]
x = Lb([x0[2], x1[2], x2[2], x3[2], x4[2], x5[2], x6[2], x7[2]])
x0[2] = x[0]
x1[2] = x[1]
x2[2] = x[2]
x3[2] = x[3]
x4[2] = x[4]
x5[2] = x[5]
x6[2] = x[6]
x7[2] = x[7]
x = Lb([x0[1], x1[1], x2[1], x3[1], x4[1], x5[1], x6[1], x7[1]])
x0[1] = x[0]
x1[1] = x[1]
x2[1] = x[2]
x3[1] = x[3]
x4[1] = x[4]
x5[1] = x[5]
x6[1] = x[6]
x7[1] = x[7]
x = Lb([x0[0], x1[0], x2[0], x3[0], x4[0], x5[0], x6[0], x7[0]])
x0[0] = x[0]
x1[0] = x[1]
x2[0] = x[2]
x3[0] = x[3]
x4[0] = x[4]
x5[0] = x[5]
x6[0] = x[6]
x7[0] = x[7]
def Wz(x, c, n):
t = (x[3] & (c)) << (n)
x[3] = ((x[3] >> (n)) & (c)) | t
t = (x[2] & (c)) << (n)
x[2] = ((x[2] >> (n)) & (c)) | t
t = (x[1] & (c)) << (n)
x[1] = ((x[1] >> (n)) & (c)) | t
t = (x[0] & (c)) << (n)
x[0] = ((x[0] >> (n)) & (c)) | t
def W(ro, x):
if ro == 0:
return Wz(x, (0x55555555), 1)
elif ro == 1:
return Wz(x, (0x33333333), 2)
elif ro == 2:
return Wz(x, (0x0F0F0F0F), 4)
elif ro == 3:
return Wz(x, (0x00FF00FF), 8)
elif ro == 4:
return Wz(x, (0x0000FFFF), 16)
elif ro == 5:
t = x[3]
x[3] = x[2]
x[2] = t
t = x[1]
x[1] = x[0]
x[0] = t
return
elif ro == 6:
t = x[3]
x[3] = x[1]
x[1] = t
t = x[2]
x[2] = x[0]
x[0] = t
def SL(h, r, ro):
S(h[0], h[2], h[4], h[6], Ceven, r)
S(h[1], h[3], h[5], h[7], Codd, r)
L(h[0], h[2], h[4], h[6], h[1], h[3], h[5], h[7])
W(ro, h[1])
W(ro, h[3])
W(ro, h[5])
W(ro, h[7])
def READ_STATE(h, state):
h[0][3] = state[0]
h[0][2] = state[1]
h[0][1] = state[2]
h[0][0] = state[3]
h[1][3] = state[4]
h[1][2] = state[5]
h[1][1] = state[6]
h[1][0] = state[7]
h[2][3] = state[8]
h[2][2] = state[9]
h[2][1] = state[10]
h[2][0] = state[11]
h[3][3] = state[12]
h[3][2] = state[13]
h[3][1] = state[14]
h[3][0] = state[15]
h[4][3] = state[16]
h[4][2] = state[17]
h[4][1] = state[18]
h[4][0] = state[19]
h[5][3] = state[20]
h[5][2] = state[21]
h[5][1] = state[22]
h[5][0] = state[23]
h[6][3] = state[24]
h[6][2] = state[25]
h[6][1] = state[26]
h[6][0] = state[27]
h[7][3] = state[28]
h[7][2] = state[29]
h[7][1] = state[30]
h[7][0] = state[31]
def WRITE_STATE(h, state):
state[0] = h[0][3]
state[1] = h[0][2]
state[2] = h[0][1]
state[3] = h[0][0]
state[4] = h[1][3]
state[5] = h[1][2]
state[6] = h[1][1]
state[7] = h[1][0]
state[8] = h[2][3]
state[9] = h[2][2]
state[10] = h[2][1]
state[11] = h[2][0]
state[12] = h[3][3]
state[13] = h[3][2]
state[14] = h[3][1]
state[15] = h[3][0]
state[16] = h[4][3]
state[17] = h[4][2]
state[18] = h[4][1]
state[19] = h[4][0]
state[20] = h[5][3]
state[21] = h[5][2]
state[22] = h[5][1]
state[23] = h[5][0]
state[24] = h[6][3]
state[25] = h[6][2]
state[26] = h[6][1]
state[27] = h[6][0]
state[28] = h[7][3]
state[29] = h[7][2]
state[30] = h[7][1]
state[31] = h[7][0]
def E8(h):
for r in range(0, 42, 7):
SL(h, r + 0, 0)
SL(h, r + 1, 1)
SL(h, r + 2, 2)
SL(h, r + 3, 3)
SL(h, r + 4, 4)
SL(h, r + 5, 5)
SL(h, r + 6, 6)
def bufferXORInsertBackwards(buf, data, x, y, bufferOffsetX=0, bufferOffsetY=0):
for i in range(x):
for j in range(x):
m = i + bufferOffsetX
n = bufferOffsetY + y - 1 - j
buf[m][n] = buf[m][n] ^ data[i * 4 + j]
def jh_update(ctx, msg, msg_len=None):
buf = ctx['buffer']
buf_len = len(buf)
ptr = ctx['ptr']
if msg_len is None:
msg_len = len(msg)
if msg_len < buf_len - ptr:
op.buffer_insert(buf, ptr, msg, msg_len)
ptr += msg_len
ctx['ptr'] = ptr
return
V = [None] * JH_HX
for i in range(JH_HX):
V[i] = [None] * JH_HY
READ_STATE(V, ctx['state'])
while msg_len > 0:
clen = buf_len - ptr
if clen > msg_len:
clen = msg_len
op.buffer_insert(buf, ptr, msg, clen)
ptr += clen
msg = msg[clen:]
msg_len -= clen
if ptr == buf_len:
buf32 = op.swap32_list(op.bytes_to_i32_list(buf))
bufferXORInsertBackwards(V, buf32, 4, 4)
E8(V)
bufferXORInsertBackwards(V, buf32, 4, 4, 4, 0)
blockCountLow = ctx['blockCountLow']
blockCountLow = op.t32(blockCountLow + 1)
ctx['blockCountLow'] = blockCountLow
if blockCountLow == 0:
ctx['blockCountHigh'] += 1
ptr = 0
WRITE_STATE(V, ctx['state'])
ctx['ptr'] = ptr
def jh_close(ctx):
buf = bytearray(128)
l = [None] * 4
buf[0] = 0x80
ptr = ctx['ptr']
if ptr is 0:
numz = 47
else:
numz = 111 - ptr
buf[1:1+numz] = [0] * numz
blockCountLow = ctx['blockCountLow']
blockCountHigh = ctx['blockCountHigh']
l[0] = op.t32(blockCountLow << 9) + (ptr << 3)
l[1] = op.t32(blockCountLow >> 23) + op.t32(blockCountHigh << 9)
l[2] = op.t32(blockCountHigh >> 23)
l[3] = 0
lBytes = op.bytes_from_i32_list(op.swap32_list(l))
op.buffer_insert(buf, 1 + numz, lBytes[::-1], 16)
jh_update(ctx, buf, numz + 17)
out = [None] * 16
state = ctx['state']
for u in range(16):
out[u] = op.swap32(state[u + 16])
return out
def jh(msg, out_array=False, in_array=False):
ctx = {}
ctx['state'] = op.swap32_list(IV512)
ctx['ptr'] = 0
ctx['buffer'] = bytearray(Jh_BlockSize)
ctx['blockCountHigh'] = 0
ctx['blockCountLow'] = 0
if in_array:
msg = op.bytes_from_i32_list(msg)
jh_update(ctx, msg)
res = jh_close(ctx)
if not out_array:
res = op.bytes_from_i32_list(res)
return res
|
19,598 | bbbe0f330e843d1ab028d9563b712857af3e033d | '''
Напишите программу, которая принимает на вход список чисел в одной строке и
выводит на экран в одну строку значения, которые повторяются в нём более одного
раза.
Для решения задачи может пригодиться метод sort списка.
Выводимые числа не должны повторяться, порядок их вывода может быть произвольным.
'''
a = [int(i) for i in input().split()]
a.sort()
b = []
for i in range (1, len(a)):
if a[i] == a[i-1]:
if a[i] not in b:
b.append (a[i])
for i in range (len(b)):
print(b[i], end=' ')
|
19,599 | 6d8634fb2805f899ef9d9bd911edb3c0367d50ab | import msvcrt
import serial
import time
class Arduino(object):
__OUTPUT_PINS = -1
def __init__(self, port, baudrate=115200):
self.serial = serial.Serial(port, baudrate)
self.serial.write(b'99')
def __str__(self):
return "Arduino is on port %s at %d baudrate" %(self.serial.port, self.serial.baudrate)
def __sendData(self, serial_data):
while(self.__getData()[0] != "w"):
pass
serial_data = str(serial_data).encode('utf-8')
self.serial.write(serial_data)
def __getData(self):
input_string = self.serial.readline()
input_string = input_string.decode('utf-8')
return input_string.rstrip('\n')
def close(self):
self.serial.close()
return True
def set1(self):
self.__sendData('1')
return True
def set2(self):
self.__sendData('2')
return True
def set3(self):
self.__sendData('3')
return True
def set4(self):
self.__sendData('4')
return True
def set5(self):
self.__sendData('5')
return True
def set6(self):
self.__sendData('6')
return True
def set7(self):
self.__sendData('7')
return True
def stop(self):
self.__sendData('99')
return True
def set11(self):
self.__sendData('11')
return True
def set12(self):
self.__sendData('12')
return True
def set13(self):
self.__sendData('13')
return True
def set14(self):
self.__sendData('14')
return True
def set15(self):
self.__sendData('15')
return True
def set16(self):
self.__sendData('16')
return True
def set17(self):
self.__sendData('17')
return True
def set21(self):
self.__sendData('21')
return True
def set22(self):
self.__sendData('22')
return True
def set23(self):
self.__sendData('23')
return True
def set24(self):
self.__sendData('24')
return True
def set25(self):
self.__sendData('25')
return True
def set26(self):
self.__sendData('26')
return True
def set27(self):
self.__sendData('27')
return True
try:
b = Arduino('COM3')
except:
try:
b = Arduino('COM4')
except:
try:
b = Arduino('COM5')
except:
print("Connection FAIL!")
exit()
print("\nSuccessfully connect to Arduino!\n")
print("Please press Key to play:\n")
print("Q:c6 W:d6 E:e6 R:f6 T:g6 Y:a6 U:b6\n")
print("A:c5 S:d5 D:e5 F:f5 G:g5 H:a5 J:b5\n")
print("Z:c4 X:d4 C:e4 V:f4 B:g4 N:a4 M:b4\n")
print("space:0\n")
while 1:
key=msvcrt.getch();
key=bytes.decode(key);
if key=='a':
b.set1()
if key=='s':
b.set2()
if key=='d':
b.set3()
if key=='f':
b.set4()
if key=='g':
b.set5()
if key=='h':
b.set6()
if key=='j':
b.set7()
if key=='q':
b.set11()
if key=='w':
b.set12()
if key=='e':
b.set13()
if key=='r':
b.set14()
if key=='t':
b.set15()
if key=='y':
b.set16()
if key=='u':
b.set17()
if key=='z':
b.set21()
if key=='x':
b.set22()
if key=='c':
b.set23()
if key=='v':
b.set24()
if key=='b':
b.set25()
if key=='n':
b.set26()
if key=='m':
b.set27()
if key==' ':
b.stop()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.