text stringlengths 38 1.54M |
|---|
import torch
import random
class E_Greedy_Policy():
def __init__(self, epsilon, decay, min_epsilon):
#initialise parameters
self.epsilon = epsilon
self.epsilon_start = epsilon
self.decay = decay
self.epsilon_min = min_epsilon
def __call__(self, state, n_actions, device, Q_network):
###borrowed and adapted from Lab 6###
is_greedy = random.random() > self.epsilon #if is greedy
if is_greedy :
# we select greedy action
with torch.no_grad():
Q_network.eval()
index_action = Q_network(state).max(1)[1].view(1, 1) #take greedy action
Q_network.train()
else:
index_action = torch.tensor([[random.randrange(n_actions)]], device=device, dtype=torch.long) #take random action
return index_action
def update_epsilon(self):
self.epsilon = self.epsilon*self.decay
if self.epsilon < self.epsilon_min: #can't go below min epsilon
self.epsilon = self.epsilon_min
def reset(self):
self.epsilon = self.epsilon_start |
from __future__ import absolute_import
from __future__ import with_statement
import re
import sys
import warnings
try:
import unittest # noqa
unittest.skip
from unittest.util import safe_repr, unorderable_list_difference
except AttributeError:
import unittest2 as unittest # noqa
from unittest2.util import safe_repr, unorderable_list_difference # noqa
from billiard.five import string_t, items, values
from .compat import catch_warnings
# -- adds assertWarns from recent unittest2, not in Python 2.7.
class _AssertRaisesBaseContext(object):
def __init__(self, expected, test_case, callable_obj=None,
expected_regex=None):
self.expected = expected
self.failureException = test_case.failureException
self.obj_name = None
if isinstance(expected_regex, string_t):
expected_regex = re.compile(expected_regex)
self.expected_regex = expected_regex
class _AssertWarnsContext(_AssertRaisesBaseContext):
"""A context manager used to implement TestCase.assertWarns* methods."""
def __enter__(self):
# The __warningregistry__'s need to be in a pristine state for tests
# to work properly.
warnings.resetwarnings()
for v in values(sys.modules):
if getattr(v, '__warningregistry__', None):
v.__warningregistry__ = {}
self.warnings_manager = catch_warnings(record=True)
self.warnings = self.warnings_manager.__enter__()
warnings.simplefilter('always', self.expected)
return self
def __exit__(self, exc_type, exc_value, tb):
self.warnings_manager.__exit__(exc_type, exc_value, tb)
if exc_type is not None:
# let unexpected exceptions pass through
return
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
first_matching = None
for m in self.warnings:
w = m.message
if not isinstance(w, self.expected):
continue
if first_matching is None:
first_matching = w
if (self.expected_regex is not None and
not self.expected_regex.search(str(w))):
continue
# store warning for later retrieval
self.warning = w
self.filename = m.filename
self.lineno = m.lineno
return
# Now we simply try to choose a helpful failure message
if first_matching is not None:
raise self.failureException(
'%r does not match %r' % (
self.expected_regex.pattern, str(first_matching)))
if self.obj_name:
raise self.failureException(
'%s not triggered by %s' % (exc_name, self.obj_name))
else:
raise self.failureException('%s not triggered' % exc_name)
class Case(unittest.TestCase):
def assertWarns(self, expected_warning):
return _AssertWarnsContext(expected_warning, self, None)
def assertWarnsRegex(self, expected_warning, expected_regex):
return _AssertWarnsContext(expected_warning, self,
None, expected_regex)
def assertDictContainsSubset(self, expected, actual, msg=None):
missing, mismatched = [], []
for key, value in items(expected):
if key not in actual:
missing.append(key)
elif value != actual[key]:
mismatched.append('%s, expected: %s, actual: %s' % (
safe_repr(key), safe_repr(value),
safe_repr(actual[key])))
if not (missing or mismatched):
return
standard_msg = ''
if missing:
standard_msg = 'Missing: %s' % ','.join(map(safe_repr, missing))
if mismatched:
if standard_msg:
standard_msg += '; '
standard_msg += 'Mismatched values: %s' % (
','.join(mismatched))
self.fail(self._formatMessage(msg, standard_msg))
def assertItemsEqual(self, expected_seq, actual_seq, msg=None):
missing = unexpected = None
try:
expected = sorted(expected_seq)
actual = sorted(actual_seq)
except TypeError:
# Unsortable items (example: set(), complex(), ...)
expected = list(expected_seq)
actual = list(actual_seq)
missing, unexpected = unorderable_list_difference(
expected, actual)
else:
return self.assertSequenceEqual(expected, actual, msg=msg)
errors = []
if missing:
errors.append(
'Expected, but missing:\n %s' % (safe_repr(missing), ),
)
if unexpected:
errors.append(
'Unexpected, but present:\n %s' % (safe_repr(unexpected), ),
)
if errors:
standardMsg = '\n'.join(errors)
self.fail(self._formatMessage(msg, standardMsg))
|
import argparse
import cPickle as pickle
import sys
import numpy as np
import os.path
from sklearn.metrics import average_precision_score
## my own library
from my_utils import printParams, myloadData, mypretrainLSTM, glove_init_LSTM
from my_utils import get_dict, vectorize_label, mymap, count_MAP_total
from build_glove_matrix import BuildEmbedMatrix
import tensorflow as tf
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 0.4
set_session(tf.Session(config=config))
parser = argparse.ArgumentParser()
parser.add_argument("-embed_dim", type=int, default=300)
parser.add_argument("-dense_dim", type=int, default=1024)
parser.add_argument("-lstm_dim", type=int, default=128)
parser.add_argument("-epochs", type=int, default=20)
parser.add_argument("-batch_size", type=int, default=256)
parser.add_argument("-mode", type=str, default='train')
parser.add_argument("-exp_name", type=str, required=True, help="Name this experiment!!")
parser.add_argument("-embed_dir", type=str, default='none')
args = parser.parse_args()
# paths
'''common paths'''
source = 'stack'
target = 'interspeech'
datapathin = './splitdata/'
vectorpathin = './splitvector/'
pathweight = './weight/'
weightname = source+'_'+args.exp_name+'.alter.weight'
weightname_target = target+'_'+args.exp_name+'.alter.weight'
source_tag_num = 1000
target_tag_num = 1500
'''train paths'''
file_name_train = vectorpathin+source+'.train.body.vector'
file_name_test = vectorpathin+source+'.test.body.vector'
file_name_train_tag = vectorpathin+source+'.train.tag.vector'
file_name_test_tag = vectorpathin+source+'.test.tag.vector'
file_dic_name = datapathin+'All.dic.body'
'''target train paths'''
file_name_train_target = vectorpathin+target+'.train.body.vector'
file_name_test_target = vectorpathin+target+'.test.body.vector'
file_name_train_tag_target = vectorpathin+target+'.train.tag.vector'
file_name_test_tag_target = vectorpathin+target+'.test.tag.vector'
'''cal_map paths'''
file_tag_dic = datapathin+source+'.dic.tag'
file_test_tag = datapathin+source+'.test.tag'
#resultname = './result/'+source+'_'+args.exp_name+'.result'
file_tag_dic_target = datapathin+target+'.dic.tag'
file_test_tag_target = datapathin+target+'.test.tag'
# parameters
lines = open(file_dic_name,'r').read().splitlines()
max_features = len(lines) + 1
maxlen = 0
for oneline in open(file_name_train):
if len(oneline.split())>maxlen:
maxlen = len(oneline.split())
d_output = source_tag_num
maxlen_target = 0
for oneline in open(file_name_train_target):
if len(oneline.split())>maxlen_target:
maxlen_target = len(oneline.split())
d_output_target = target_tag_num
print '========================='
print 'Doing Alternating Training'
print '========================='
print 'Source Info'
printParams(args,max_features,maxlen,d_output)
print '========================='
print 'Target Info'
printParams(args,max_features,maxlen_target,d_output_target)
if args.embed_dir == 'none':
source_model = mypretrainLSTM(max_features,maxlen,args,d_output,True)
target_model = mypretrainLSTM(max_features,maxlen_target,args,d_output_target,True)
else:
BEM = BuildEmbedMatrix()
embedding_matrix = BEM.buildEmbedMatrix(args.embed_dir)
source_model = glove_init_LSTM(max_features,maxlen,args,d_output,True, embedding_matrix)
target_model = glove_init_LSTM(max_features,maxlen,args,d_output_target,True, embedding_matrix)
if args.mode == 'train':
#load data
X_train, Y_train = myloadData(file_name_train,file_name_train_tag,d_output,maxlen)
Y_train = Y_train.astype(np.float32)
X_test, Y_test = myloadData(file_name_test,file_name_test_tag,d_output,maxlen)
Y_test = Y_test.astype(np.float32)
X_train_target, Y_train_target = myloadData(file_name_train_target,file_name_train_tag_target,d_output_target,maxlen_target)
Y_train_target = Y_train_target.astype(np.float32)
X_test_target, Y_test_target = myloadData(file_name_test_target,file_name_test_tag_target,d_output_target,maxlen_target)
Y_test_target = Y_test_target.astype(np.float32)
word_indices, indices_word = get_dict(file_tag_dic)
y, y_norm, valid_index, oov = vectorize_label(file_test_tag, d_output, word_indices)
y = y[valid_index,0:]
y_norm = y_norm[valid_index,0:]
word_indices_val, indices_word_val = get_dict(file_tag_dic_target)
y_val, y_val_norm, valid_val_index, oov_val = vectorize_label(file_test_tag_target, d_output_target, word_indices_val)
y_val = y_val[valid_val_index,0:]
y_val_norm = y_val_norm[valid_val_index,0:]
print 'Start Training'
for e in range(args.epochs):
print "================================================Epoch %d================================================================" % (e+1)
if (os.path.isfile(pathweight + weightname_target + '_epo'+str(e-1))):
filein = open(pathweight + weightname_target + '_epo'+str(e-1),'rb')
coco = pickle.load(filein)
coco = coco[:-2]
oldweight = source_model.get_weights()
#coco.append(oldweight[-5])
#coco.append(oldweight[-4])
#coco.append(oldweight[-3])
coco.append(oldweight[-2])
coco.append(oldweight[-1])
source_model.set_weights(coco)
filein.close()
source_model.fit(
X_train,
Y_train,
batch_size=args.batch_size,
nb_epoch=1,
#verbose=0
validation_data=(X_test, Y_test)
)
pred = source_model.predict(X_test)
theweight = source_model.get_weights()
fileout = open(pathweight + weightname + '_epo'+str(e),'wb')
pickle.dump(theweight, fileout)
fileout.close()
oldweight = target_model.get_weights()
theweight = theweight[:-2]
#theweight.append(oldweight[-5])
#theweight.append(oldweight[-4])
#theweight.append(oldweight[-3])
theweight.append(oldweight[-2])
theweight.append(oldweight[-1])
target_model.set_weights(theweight)
target_model.fit(
X_train_target,
Y_train_target,
batch_size=args.batch_size,
nb_epoch=1,
#verbose=0
validation_data=(X_test_target, Y_test_target)
)
theweight = target_model.get_weights()
fileout = open(pathweight + weightname_target + '_epo'+str(e),'wb')
pickle.dump(theweight, fileout)
fileout.close()
pred_val = target_model.predict(X_test_target)
if e%2 == 0:
map_oov, pr_oov = count_MAP_total(y, pred, oov, valid_index)
print('MAP of train(oov keyword included) is ',map_oov)
#print('P@R of train(oov keyword included) is ',pr_oov)
map_no_oov, pr_no_oov = count_MAP_total(y, pred, np.zeros(len(oov)), valid_index)
print('MAP of train(oov keyword not included) is ',map_no_oov)
#print('P@R of train(oov keyword not included) is ',pr_no_oov)
map_oov_val, pr_oov_val = count_MAP_total(y_val, pred_val, oov_val, valid_val_index)
print('MAP of target(oov keyword included) is ',map_oov_val)
#print('P@R of target(oov keyword included) is ',pr_oov_val)
map_no_oov_val, pr_no_oov_val = count_MAP_total(y_val, pred_val, np.zeros(len(oov_val)), valid_val_index)
print('MAP of target(oov keyword not included) is ',map_no_oov_val)
#print('P@R of target(oov keyword not included) is ',pr_no_oov_val)
|
# -*- coding:utf-8 -*-
# import time
#
# # 获得当前时间时间戳
# now = int(time.time())
# # 转换为其他日期格式,如:"%Y-%m-%d %H:%M:%S"
# timeStruct = time.localtime(now)
# strTime = time.strftime("%Y-%m-%d-%H:%M", timeStruct)
#
# print(strTime)
# sheet_name_xls = (u'测试专用表格 %s' % strTime)
# print(sheet_name_xls)
# 格式化字符串
# 槽号绑定信号的函数
def soltToSingnal(solt):
# 槽号与信号绑定,1,2,3,4,5,6 与信号Y1,Y2,Y3,Y4,Y5,Y6
solt = "Y" + str(int(solt))
print (solt)
soltToSingnal(1)
|
from datetime import datetime, timedelta
from sqlalchemy.orm import relationship
from app import db
class Token(db.Model):
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id', ondelete='CASCADE', name='token_user_id_fk'))
user = relationship('User')
client_id = db.Column(db.String(40), db.ForeignKey('client.client_id', ondelete='CASCADE', name='token_client_id_fk'), nullable=False)
client = relationship('Client')
token_type = db.Column(db.String(40))
access_token = db.Column(db.String(255))
refresh_token = db.Column(db.String(255))
expires = db.Column(db.DateTime)
scope = db.Column(db.Text)
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
self.expires = datetime.utcnow() + timedelta(6 * 365 / 12) # longen a half year
@property
def scopes(self):
if self.scope:
return self.scope.split()
return []
def delete(self):
db.session.delete(self)
db.session.commit()
return self
|
'''
File : eddieSpread.py
Start Date : 20070111
Description : Spread messaging interface
$Id: eddieSpread.py 900 2007-12-09 09:27:47Z chris $
'''
__version__ = '$Revision: 900 $'
__copyright__ = 'Copyright (c) Chris Miles 2007'
__author__ = 'Chris Miles'
__license__ = '''
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
'''
## Imports: Python
import cPickle
from cStringIO import StringIO
import Queue
import sys
import threading
import time
import traceback
## Imports: Eddie
import log
## Default Spread server settings - empty means Spread is disabled.
## These are overridden by SPREADSERVER and SPREADPORT config file options.
SPREADSERVER=''
SPREADPORT=''
## Constants
ANYTIME=-1
BLOCK=1
## Globals
UseSpread = 1 # Switch Spread usage on by default; disabled if modules not found
## Import spread python module if possible
try:
import spread
except ImportError:
# no Spread module... disable Spread
UseSpread = 0
################################################################
## Exceptions:
class SpreadError(Exception):
pass
class SpreadInitError(Exception):
pass
################################################################
## Message class
class Message(object):
"""Defines a Spread message object which will be placed in the
message queue waiting to be sent. Normally it will be sent instantly
but it is possible (like when Spread server is down or network is
unavailable) that the message could be sent some time after being
inserted into the queue.
Besides the notification message itself, this object contains a
time parameter defining how long after being inserted into the
queue the message is still valid for sending."""
def __init__(self, emsg, validity_time):
self.emsg = emsg # the notification message
self.validity_time = validity_time # message validity time (minutes)
self.timestamp = time.time() # store object creation time
def __str__(self):
string = str(self.emsg)
return(string)
def time_valid(self):
"""Calculate if message is still valid to be sent based on when
it was created (self.timestamp) and the validity time
(self.validity_time) setting."""
if self.validity_time == ANYTIME:
return True # don't care when message is sent
now = time.time()
if (now-self.timestamp) <= self.validity_time*60.0:
return True # message still valid
else:
return False # no longer valid to send
################################################################
## Spread class
class Spread(object):
"""Sets up Spread connection if possible and starts dedicated Spread
thread to handle all messaging."""
def __init__(self):
global UseSpread
if not UseSpread:
raise SpreadInitError("Spread modules not found")
global SPREADPORT
if not SPREADSERVER and not SPREADPORT:
UseSpread = False
raise SpreadInitError("Spread administratively disabled")
if not SPREADPORT:
SPREADPORT = spread.DEFAULT_SPREAD_PORT
self.server = "%d@%s" % (SPREADPORT, SPREADSERVER)
self.eq = Queue.Queue() # Spread message queue
self.connected = False
def startup(self):
"""Start the Spread management thread."""
self.sthread = threading.Thread(group=None, target=self.main, name='Spread', args=(), kwargs={})
self.sthread.setDaemon(1) # die automatically when Main thread dies
self.sthread.start() # start the thread running
def main(self):
"""The Spread management thread.
Loop to watch message queue for any Spread notifications to be sent
from other Spread functions or actions.
This means no other threads should block when sending Spread notifications.
"""
while True:
m = self.eq.get(BLOCK) # get next message or wait for one
log.log("<eddieSpread>Spread.main(): got msg from queue, size now: %d"%(self.eq.qsize()), 9)
if m.time_valid():
while not self.connected:
self.connect()
log.log("<eddieSpread>Spread.main(): Sending msg from queue, %s"%(m), 9)
try:
self._actual_send(m.emsg)
log.log("<eddieSpread>Spread.main(): msg sent, %s"%(m), 6)
except Exception, details:
log.log("<eddieSpread>Spread.main(): Spread exception, %s, msg %s not sent"%(details, m), 3)
if details[0] == -8 or 'closed mbox' in str(details):
# connection has been closed or died, so break out & try to re-connect
log.log("<eddieSpread>Spread.main(): Spread connection closed unexpectedly", 4)
self.connected = False
self.eq.put(m) # put msg back in queue for re-try
else:
log.log("<eddieSpread>Spread.main(): message no longer valid, discarding %s"%(m), 9)
if self.eq.qsize() == 0:
log.log("<eddieSpread>Spread.main(): queue empty, disconnecting from Spread.", 9)
self.disconnect()
def connect(self):
"""Create a Spread connection.
"""
waittime = 1 # time to wait before re-connecting
while not self.connected:
# Create Spread connection
log.log("<eddieSpread>Spread.connect(): Opening connection to Spread, '%s'" %(self.server), 6)
try:
self.connection = spread.connect(self.server, '', 0, 0)
except spread.error, msg:
log.log("<eddieSpread>Spread.connect(): Spread could not connect, '%s'. Waiting %d secs for retry" %(msg, waittime), 5)
time.sleep( waittime )
waittime = min( waittime * 2, 60*10 ) # inc wait time but max 10 minutes
else:
log.log("<eddieSpread>Spread.connect(): Connected to Spread, '%s'" %(self.server), 6)
self.connected = True
def disconnect(self):
try:
log.log("<eddieSpread>Spread.disconnect(): disconnecting from Spread.", 6)
self.connection.disconnect()
except:
pass
self.connected = False
def join(self):
self.connection.join('eddie')
def notify(self, emsg, validity_time=ANYTIME):
"""Add Spread notification message to message queue to be sent by
main Spread management thread as soon as possible.
"""
m = Message(emsg, validity_time)
self.eq.put(m)
log.log("<eddieSpread>Spread.notify(): msg added to queue, size now: %d"%(self.eq.qsize()), 9)
return 0
def _actual_send(self, msg):
sio = StringIO()
p = cPickle.Pickler(sio)
p.dump(msg)
r = self.connection.multicast(spread.FIFO_MESS, 'elvinrrd', sio.getvalue())
if r == 0:
raise SpreadError("Spread multicast failed")
####################################################
## Public methods for Eddie functions/actions to use
def Ticker(self, msg, timeout):
"""Send a Spread tickertape message to the Tickertape group 'Eddie'.
The Tickertape user will be the hostname of the machine sending the message.
msg is the text string to send (TICKERTEXT).
"""
msg = {
'TICKERTAPE': 'Eddie',
'TICKERTEXT': msg,
'USER': log.hostname,
'TIMEOUT': timeout,
}
r = self.notify( msg, validity_time=10 ) # Send message, within 10 mins
if r != 0:
# failed
log.log( "<eddieSpread>Spread.Ticker(), notify failed, msg: %s" % (msg), 5 )
else:
# succeeded
log.log( "<eddieSpread>Spread.Ticker(), msg added to queue, msg: %s" % (msg), 6 )
return r
def rrd(self, key, data):
"""Send a dictionary through Spread to a listener process which should store
the data into an RRDtool database.
- 'key' will be matched by the elvinrrd consumer
- 'data' is a dictionary of data to be sent in the message
"""
# Create db entry creation 'command'
edict = {
'ELVINRRD' : key,
'timestamp' : time.time(),
}
edict.update(data) # add data dictionary to edict
r = self.notify( edict ) # Send message
if r != 0:
# failed
log.log( "<eddieSpread>Spread.rrd(): notify failed, key:%s" % (key), 5 )
else:
log.log( "<eddieSpread>Spread.rrd(): msg added to notify queue, key:%s" % (key), 6 )
return r
def netsaint(self,data):
"""by Dougal Scott <dwagon@connect.com.au>
"""
edict = { 'NETSAINT' : 'NETSAINT' }
edict.update(data) # add data dictionary to edict
r = self.notify( edict ) # Send message
if r != 0:
# failed
log.log( "<eddieSpread>Spread.netsaint(): notify failed", 4 )
else:
# succeeded
log.log( "<eddieSpread>Spread.netsaint(): notify successful", 8 )
return r
##
## END - eddieSpread.py
##
|
# Generated by Django 3.0.5 on 2020-04-07 23:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('urlshortener', '0003_auto_20200407_2253'),
]
operations = [
migrations.AlterField(
model_name='urlshortener',
name='shorten_url',
field=models.CharField(blank=True, max_length=50, null=True),
),
]
|
from pyforest import *
def clean():
FG_df=pd.read_csv('../data/fgplayerdf.csv')
EV_df=pd.read_csv('../data/EVdf.csv')
SS_df=pd.read_csv('../data/SSdf.csv')
shift_df = pd.read_csv('../data/Shiftdf.csv')
#trim to columns with correlation to Babip
FG_df = FG_df[['Year','Player','Babip','Team','PA','SB','CS','ISO','Spd','LD','FB','GB','IFFB','IFH','BUH','Pull',
'Cent','Oppo','Soft','Med','Hard']]
#convert rate stats from % to probabilities
FG_df[['LD','FB','GB','IFFB','IFH','BUH','Pull','Cent','Oppo','Soft','Med','Hard']].apply(lambda x: x/100,inplace=True)
#sort by player-year
FG_df.sort_values(by=['Player','Year'],inplace=True)
#reformat name, trim, rename, convert to prob, sort by player-year
EV_df['Player'] = [f"{name.split(', ')[1]} {name.split(', ')[0]}" for name in EV_df['name']]
EV_df = EV_df[['season','Player','anglesweetspotpercent','avg_hit_angle',
'avg_hit_speed','brl_percent','ev95percent','fbld','gb']]
EV_df.rename(columns={'season':'Year','anglesweetspotpercent':'SweetSpot%','avg_hit_angle':'AvgLauchAngle',
'avg_hit_speed':'AvgEV','brl_percent':'Barrel%','ev95percent':'Above95MPH%','fbld':'FBLDAvgEV',
'gb':'GBAvgEV'},inplace=True)
EV_df[['SweetSpot%','Barrel%','Above95MPH%']].apply(lambda x: x/100,inplace=True)
EV_df.sort_values(by=['Player','Year'],inplace=True)
#reformat name, trim, rename, sort by player-year
SS_df['Player'] = [f"{name.split(', ')[1]} {name.split(', ')[0]}" for name in SS_df['name_display_last_first']]
SS_df = SS_df[['timeframe','Player','age','hp_to_1b','r_sprint_speed_top50percent_pretty']]
SS_df.rename(columns={'timeframe':'Year','age':'Age','hp_to_1b':'HometoFirst',
'r_sprint_speed_top50percent_pretty':'AvgSprintSpeed'},inplace=True)
SS_df.sort_values(by=['Player','Year'],inplace=True)
#first create a column called PlayerYear in each df and set it as index
EV_df['PlayerYear'] = EV_df['Year'].apply(lambda x: str(x).split('.')[0]) + ' ' + smallEVdf['Player']
EV_df.set_index('PlayerYear',inplace=True)
FG_df['PlayerYear'] = FG_df['Year'].apply(lambda x: str(x).split('.')[0]) + ' ' + smallFGdf['Player']
FG_df.set_index('PlayerYear',inplace=True)
SS_df['PlayerYear'] = SS_df['Year'].apply(lambda x: str(x).split('.')[0]) + ' ' + smallSSdf['Player']
SS_df.set_index('PlayerYear',inplace=True)
shift_df['PlayerYear'] = shift_df['Year'].apply(lambda x: str(x).split('.')[0]) + ' ' + smallSSdf['Player']
shift_df.set_index('PlayerYear',inplace=True)
#finally merge into one combined df
combineddf = pd.merge(smallFGdf,smallEVdf,how='outer',left_index=True,right_index=True)
combineddf = pd.merge(combineddf,smallSSdf,how='outer',left_index=True,right_index=True)
combineddf = pd.merge(combineddf,shiftdf,how='outer',left_index=True,right_index=True)
#drop any entries with no babip and/or fewer that 300 PA
combineddf.dropna(subset=['Babip'],inplace=True)
combineddf = combineddf[combineddf.PA >= 300]
combineddf.to_csv('../data/trimmeddf.csv')
if __name__ == '__main__':
clean()
|
from django.db import models
from ecommerce.utils import CustomModelManager, CustomModelQuerySet
from items.models import Item
from settings.models import FiscalYear
from users.models import GuestEmail, User
# Create your models here.
class Comment(models.Model):
user = models.ForeignKey(User, on_delete=models.DO_NOTHING, related_name='comments', null=True, blank=True)
guest_user = models.ForeignKey(GuestEmail, on_delete=models.DO_NOTHING, related_name='comments', null=True,
blank=True)
item = models.ForeignKey(Item, on_delete=models.DO_NOTHING, related_name='comments')
fiscal_year = models.ForeignKey(FiscalYear, on_delete=models.DO_NOTHING, related_name='comments')
comment = models.TextField()
is_deleted = models.BooleanField(default=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
objects = CustomModelManager.from_queryset(CustomModelQuerySet)()
class Meta:
db_table = 'comments_comment'
def __str__(self):
if not self.user == None:
return '{}'.format(self.user.email)
elif not self.guest_user == None:
return '{}'.format(self.guest_user.email)
else:
return 'random '
|
# Import cars data
import pandas as pd
cars = pd.read_csv('cars.csv', index_col = 0)
# Extract drives_right column as Series: dr
dr = cars['drives_right']
# Use dr to subset cars: sel
sel = cars[dr]
# Print sel
print(sel)
|
import sqlite3
conn = sqlite3.connect("test.db")
cursor = conn.cursor()
cursor2 = conn.cursor()
cursor.execute("Update phones set name = 'Police' where name = 'Hello'")
cursor2.execute("Select * from phones")
cursor2.close()
for record in cursor2.fetchall():
print("Name: {}, Phone Number: {}".format(record[0],record[1]))
cursor.close()
try:
cursor.execute("insert into phones values('Hello','911')")
except Exception as ex:
print(str(ex))
for record in cursor2:
print("Name: {}, Phone Number: {}".format(record[0],record[1])) |
# ======================================================
# @Author : Daniel
# @Time : 2020.6.20
# @Desc : 用户视图
# ======================================================
from flask import Blueprint, request, render_template, session, redirect, url_for
from flask_login import login_required, login_user, logout_user
from .models import db
from utils.check_login import is_login
from .models import User, Role, Permission
user_bp = Blueprint('user', __name__)
@user_bp.route('/create_db/')
def create_db():
"""
创建数据库
:return:
"""
db.create_all()
@user_bp.route('/drop_db/')
def drop_db():
"""
删除数据库
:return:
"""
db.drop_all()
return '删除成功!'
@user_bp.route('/home/', methods=['GET'])
@is_login
def home():
"""
首页
:return:
"""
if request.method == 'GET':
return render_template('index.html')
@user_bp.route('/head/', methods=['GET'])
@is_login
def head():
"""
页头
:return:
"""
if request.method == 'GET':
user = session.get('username')
return render_template('head.html', user=user)
@user_bp.route('/left/', methods=['GET'])
@is_login
def left():
"""
左侧栏
:return:
"""
if request.method == 'GET':
# 获取用户
user = session.get('username')
# 获取用户权限
permissions = User.query.filter_by(username=user).first().role.permission
return render_template('left.html', permissions=permissions)
@user_bp.route('/register/', methods=['GET', 'POST'])
def register():
"""
用户注册页面
:return:
"""
# 注册时返回注册页面
if request.method == "GET":
return render_template('register.html')
if request.method == 'POST':
# 获取用户注册信息
username = request.form['username']
pwd1 = request.form['pwd1']
pwd2 = request.form['pwd2']
# 定义个变量来控制过滤用户填写的信息
flag = True
# 判断用户是否信息都填写了.(all()函数可以判断序列中数据是否用空)
if not all([username, pwd1, pwd2]):
msg, flag = '请填写完整用户信息!', False
if len(username) > 20:
msg, flag = '用户名过长!', False
if pwd1 != pwd2:
msg, flag = '两次密码输入不一致!', False
# 核对用户名是否已经被注册
u = User.query.filter_by(username=username).first()
if u:
msg, flag = '用户已被注册!', False
if not flag:
return render_template('register.html', msg=msg)
# 创建新用户
user = User(username=username, password=pwd1)
# 用户保存
user.save()
# 跳转到登录页面
return redirect(url_for('user.login'))
@user_bp.route('/login/', methods=['GET', 'POST'])
def login():
"""
登录
:return:
"""
if request.method == 'GET':
return render_template('login.html')
if request.method == 'POST':
username = request.form['username']
pwd = request.form['password']
if not all([username, pwd]):
msg = '请填写完整信息'
return render_template('login.html', msg=msg)
user = User.query.filter_by(username=username, password=pwd).first()
if user:
login_user(user)
# session['user_id'] = user.u_id
# session['username'] = username
return render_template('index.html')
else:
msg = '用户或密码错误!'
return render_template('login.html', msg=msg)
@user_bp.route('/logout/', methods=['GET'])
def logout():
"""
退吹登录
:return:
"""
if request.method == 'GET':
# 清空session
# session.clear()
logout_user()
# 跳转到登录页面
return redirect(url_for('user.login'))
@user_bp.route('/user_per_list/', methods=['GET'])
@is_login
def user_per_list():
"""
用户权限列表
:return:
"""
if request.method == 'GET':
# 角色id
r_id = request.args.get('r_id')
# 权限
permissions = Role.query.get(int(r_id)).permission
return render_template('permission/permission.html', permissions=permissions)
@user_bp.route('/user_per_add/', methods=['GET', 'POST'])
@is_login
def add_user_per():
"""
用户权限添加
:return:
"""
if request.method == 'GET':
r_id = request.args.get('r_id')
permissions = Permission.query.all()
return render_template('user_per_add.html', permissions=permissions, r_id=r_id)
if request.method == 'POST':
r_id = request.form.get('r_id')
p_id = request.form.get('p_id')
# 获取对象角色
role = Role.query.get(int(r_id))
# 获取权限对象
per = Permission.query.get(int(p_id))
# 添加对应关系
per.roles.append(role)
per.save()
# 重定向到 roles_list 函数, user:蓝图名称
return redirect(url_for('user.role_list'))
@user_bp.route('/user_per_sub/', methods=['GET', 'POST'])
@is_login
def sub_user_per():
"""
用户权限减少
:return:
"""
if request.method == 'GET':
r_id = request.args.get('r_id')
pers = Role.query.get(int(r_id)).permission
return render_template('user_per_list.html', pers=pers, r_id=r_id)
if request.method == 'POST':
r_id = request.form.get('r_id')
p_id = request.form.get('p_id')
# 获取对象角色
role = Role.query.get(int(r_id))
# 获取权限对象
per = Permission.query.get(int(p_id))
# 添加对应关系
per.roles.remove(role)
db.session.commit()
permissions = Role.query.get(int(r_id)).permission
# 重定向到 roles_list 函数, user:蓝图名称
return render_template('user_per_list.html', permissions=permissions, r_id=r_id)
@user_bp.route('/user_list/', methods=['GET'])
@is_login
def user_list():
"""
用户列表
:return:
"""
if request.method == 'GET':
# 页码
page = int(request.args.get('page', 1))
# 页面数据
page_num = int(request.args.get('page_num', 10))
paginate = User.query.order_by('u_id').paginate(page, page_num)
users = paginate.items
return render_template('user/user_list.html', users=users, paginate=paginate)
@user_bp.route('/user_edit/', methods=['GET', 'POST'])
@is_login
def edit_user():
"""
用户添加编辑
:return:
"""
if request.method == 'GET':
u_id = request.args.get('u_id', None)
if u_id:
user = User.query.get(int(u_id))
else:
user = None
return render_template('user/user_edit.html', user=user)
if request.method == 'POST':
# 获取用户注册信息
username = request.form['username']
pwd1 = request.form['password1']
pwd2 = request.form['password2']
# 定义个变量来控制过滤用户填写的信息
flag = True
# 判断用户是否信息都填写了.(all()函数可以判断序列中数据是否用空)
if not all([username, pwd1, pwd2]):
msg, flag = '请填写完整用户信息!', False
if len(username) > 20:
msg, flag = '用户名过长!', False
if pwd1 != pwd2:
msg, flag = '两次密码输入不一致!', False
# 核对用户名是否已经被注册
u = User.query.filter_by(username=username).first()
if u:
msg, flag = '用户已被注册!', False
if not flag:
return render_template('user/user_edit.html', msg=msg)
if 'u_id' in request.form and request.form['u_id']:
user = User.query.get(u_id=int(request.form['u_id']))
user.username = username
user.password = pwd1
else:
user = User(username=username, password=pwd1)
user.save()
return redirect(url_for('user.user_list'))
@user_bp.route('/role_assign/', methods=['GET', 'POST'])
@is_login
def assign_user_role():
"""
分配用户权限
"""
if request.method == 'GET':
u_id = request.args.get('u_id')
roles = Role.query.all()
return render_template('assign_user_role.html', roles=roles, u_id=u_id)
if request.method == 'POST':
r_id = request.form.get('r_id')
u_id = request.form.get('u_id')
user = User.query.filter_by(u_id=u_id).first()
user.role_id = r_id
db.session.commit()
return redirect(url_for('user.user_list'))
@user_bp.route('/pwd_change/', methods=['GET', 'POST'])
@is_login
def change_password():
"""
修改用户密码
"""
if request.method == 'GET':
username = session.get('username')
user = User.query.filter_by(username=username).first()
return render_template('pwd_change.html', user=user)
if request.method == 'POST':
username = session.get('username')
pwd1 = request.form.get('pwd1')
pwd2 = request.form.get('pwd2')
pwd3 = request.form.get('pwd3')
pwd = User.query.filter_by(password=pwd1, username=username).first()
if not pwd:
msg = '请输入正确的旧密码'
username = session.get('username')
user = User.query.filter_by(username=username).first()
return render_template('pwd_change.html', msg=msg, user=user)
else:
if not all([pwd2, pwd3]):
msg = '密码不能为空'
username = session.get('username')
user = User.query.filter_by(username=username).first()
return render_template('pwd_change.html', msg=msg, user=user)
if pwd2 != pwd3:
msg = '两次密码不一致,请重新输入'
username = session.get('username')
user = User.query.filter_by(username=username).first()
return render_template('pwd_change.html', msg=msg, user=user)
pwd.password = pwd2
db.session.commit()
return redirect(url_for('user.change_pass_sucess'))
@user_bp.route('/pwd_change_su/', methods=['GET'])
@is_login
def change_pass_sucess():
"""
修改密码成功后
"""
if request.method == 'GET':
return render_template('pwd_change_su.html')
|
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
class DomainRedirectMiddleware(object):
"""
In Apache's httpd.conf, you may have ServerName set to mysite.com.au along
with a number of aliases: mysite.com, mysite.net, my-site.com etc.
This middleware redirects any request that isn't for mysite.com.au to that
domain, helping with SEO and brand recognition.
"""
def process_request(self, request):
if not request.META['HTTP_HOST'].endswith('heyandie.com') \
and 'localhost' not in request.META['HTTP_HOST']:
new_uri = '%s://%s%s%s' % (
request.is_secure() and 'https' or 'http',
'www.heyandie.com',
urlquote(request.path),
(request.method == 'GET' and len(request.GET) > 0) and '?%s' % request.GET.urlencode() or ''
)
return HttpResponseRedirect(new_uri)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-06-26 08:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('prodsys', '0023_auto_20170626_0824'),
]
operations = [
migrations.RenameField(
model_name='job',
old_name='attempt_merge',
new_name='attempt_merging',
),
migrations.RenameField(
model_name='job',
old_name='panda_id_merge',
new_name='panda_id_merging',
),
migrations.AddField(
model_name='job',
name='status_merging',
field=models.CharField(blank=True, max_length=300, null=True),
),
]
|
"""add join table for ann tasks and term sampling groups
Revision ID: 700869c6a3d1
Revises: 59f532bb2197
Create Date: 2017-06-08 19:44:54.766976
"""
# revision identifiers, used by Alembic.
revision = '700869c6a3d1'
down_revision = '59f532bb2197'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.schema import Sequence, CreateSequence
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('annotation_task_term_sampling_groups',
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('id', sa.BigInteger(), nullable=False),
sa.Column('annotation_task_id', sa.BigInteger(), nullable=True),
sa.Column('term_sampling_group_id', sa.BigInteger(), nullable=True),
sa.ForeignKeyConstraint(['annotation_task_id'], ['annotation_tasks.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.execute(CreateSequence(Sequence('annotation_task_term_sampling_group_id_seq')))
op.create_index(op.f('ix_annotation_task_term_sampling_groups_annotation_task_id'), 'annotation_task_term_sampling_groups', ['annotation_task_id'], unique=False)
op.create_index(op.f('ix_annotation_task_term_sampling_groups_term_sampling_group_id'), 'annotation_task_term_sampling_groups', ['term_sampling_group_id'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_annotation_task_term_sampling_groups_term_sampling_group_id'), table_name='annotation_task_term_sampling_groups')
op.drop_index(op.f('ix_annotation_task_term_sampling_groups_annotation_task_id'), table_name='annotation_task_term_sampling_groups')
op.drop_table('annotation_task_term_sampling_groups')
### end Alembic commands ###
|
from django.db import models
import bcrypt,datetime
class UserManager(models.Manager):
def validate_registration(self, postData):
response = {
'status' : False,
'errors' : []
}
if len(postData['name']) < 2:
response['errors'].append("Name too short")
# if User.objects.filter(alias=postData['alias']).count() > 0:
# response['errors'].append(f"Alias {postData['alias']} already exists")
if len(postData['alias']) <3:
response['errors'].append("alias must be at least 3 characters")
if len(postData['username']) < 9:
response['errors'].append("Invalid Email")
if len(postData['password']) < 8:
response['errors'].append("Invalid Password")
if postData['confirm_pw'] != postData['password']:
response['errors'].append("Invalid Password")
if len(postData['date']) > 0:
today = datetime.datetime.today()
date = datetime.datetime.strptime(postData['date'], '%Y-%m-%d')
if date > today:
response['errors'].append(' BirthDate cannot be in the future')
if len(postData['date']) < 1:
response['errors'].append('Date of Birth required')
if len(response['errors']) == 0:
response['status'] = True
response['user_id'] = User.objects.create(
name=postData['name'],
alias=postData['alias'],
username=postData['username'],
date=postData['date'],
password=bcrypt.hashpw(postData['password'].encode(), bcrypt.gensalt())
).id
return response
def validate_login(self, postData):
response = {
'status' : False,
'errors' : []
}
#if len(User.objects.filter(eamil=postData['email'])) == 0:
existing_users = User.objects.filter(username=postData['username'])
if len(existing_users) == 0:
response['errors'].append("invalid input")
else:
if bcrypt.checkpw(postData['password'].encode(), existing_users[0].password.encode()):
response['status'] = True
response['user_id'] = existing_users[0].id
else:
response['errors'].append("invalid input")
return response
class QuoteManager(models.Manager):
def validate_submit(self,postData,user_id):
response = {
"status" : False,
"errors": []
}
if len(postData['quoted_by']) == 0:
response['errors'].append("quoted_by cannot be empty")
if len(postData['quoted_by']) < 3:
response['errors'].append("quoted_by should be atleast 3 characters")
if len(postData['message']) < 10:
response['errors'].append("message should be atleast 10 characters")
if len(response['errors']) == 0:
response['status'] = True
posted_by = User.objects.get(id=user_id)
quote = Quote.objects.create(
quoted_by=postData['quoted_by'],
message=postData['message'],
posted_by = User.objects.get(id=user_id)
)
# quote.others.add(posted_by)
# quote.save()
return response
def add(self, quote_id, user_id):
me = User.objects.get(id=user_id)
quote = Quote.objects.get(id=quote_id)
quote.others.add(me)
quote.save()
def remove(self, quote_id, user_id):
me = User.objects.get(id=user_id)
quote = Quote.objects.get(id=quote_id)
quote.others.remove(me)
quote.save()
# Create your models here.
class User(models.Model):
name = models.CharField(max_length=255)
alias = models.CharField(max_length=255)
username = models.CharField(max_length=255)
password = models.CharField(max_length=255)
date = models.DateField()
created_at = models.DateTimeField(auto_now_add = True)
updated_at = models.DateTimeField(auto_now = True)
objects = UserManager()
class Quote(models.Model):
quoted_by = models.CharField(max_length=255)
message = models.CharField(max_length=1000)
created_at = models.DateTimeField(auto_now_add = True)
updated_at = models.DateTimeField(auto_now = True)
posted_by = models.ForeignKey(User, related_name="added_quotes", null=True)
others= models.ManyToManyField(User, related_name="quotes", null=True)
objects = QuoteManager() |
# -*- coding: utf-8 -*-
# @Author: yulidong
# @Date: 2018-04-25 19:03:52
# @Last Modified by: yulidong
# @Last Modified time: 2018-08-06 13:13:55
import scipy.io
import numpy as np
import os
data=scipy.io.loadmat('/home/lidong/Documents/datasets/nyu/nyu2_test_index.mat')
test1=data['testNdxs']-1
data=scipy.io.loadmat('/home/lidong/Documents/datasets/nyu/scenes.mat')
data=data['scenes']
scenes=[]
test_scenes=[]
train_scenes=[]
for i in range(len(data)):
scenes.append(data[i][0][0])
for i in range(len(test1)):
if not scenes[test1[i][0]] in test_scenes:
test_scenes.append(scenes[test1[i][0]])
for i in range(len(scenes)):
if not scenes[i] in test_scenes and not scenes[i] in train_scenes:
train_scenes.append(scenes[i])
train_scenes.sort()
test_scenes.sort()
np.save('/home/lidong/Documents/datasets/nyu/train_scenes.npy',train_scenes)
print(len(test_scenes))
print(len(train_scenes))
print(len(test_scenes))
|
import unittest2 as unittest
import sys
sys.path.insert(0, '..')
from driver import Driver
from trip import Trip
from passenger import Passenger
from artist import Artist
from song import Song
from genre import Genre
class TestOneToManyRelationships(unittest.TestCase):
global driver_1
driver_1 = Driver("Daniel", "fast and furious")
global driver_2
driver_2 = Driver("Alice", "faster and furiouser")
global passenger_1
passenger_1 = Passenger("Michael Scott", 38)
global passenger_2
passenger_2 = Passenger("Anna", 25)
global passenger_3
passenger_3 = Passenger("Katie", 20)
global trip_1
trip_1 = Trip(driver_1, passenger_1)
global trip_2
trip_2 = Trip(driver_1, passenger_2)
global trip_3
trip_3 = Trip(driver_1, passenger_3)
global trip_4
trip_4 = Trip(driver_2, passenger_3)
global trip_5
trip_5 = Trip(driver_2, passenger_2)
def test_passenger_property_methods(self):
self.assertEqual(passenger_1._name, "Michael Scott")
self.assertEqual(passenger_1.name, "Michael Scott")
self.assertEqual(passenger_1._age, 38)
self.assertEqual(passenger_1.age, 38)
def test_driver_property_methods(self):
self.assertEqual(driver_1._name, "Daniel")
self.assertEqual(driver_1.name, "Daniel")
self.assertEqual(driver_1._driving_style, "fast and furious")
self.assertEqual(driver_1.driving_style, "fast and furious")
def test_trip_property_methods(self):
self.assertEqual(trip_1._driver, driver_1)
self.assertEqual(trip_1.driver, driver_1)
self.assertEqual(trip_1._passenger, passenger_1)
self.assertEqual(trip_1.passenger, passenger_1)
def test_trip_class_method(self):
self.assertItemsEqual(Trip._all, [trip_1, trip_2, trip_3, trip_4, trip_5])
self.assertItemsEqual(Trip.all(), [trip_1, trip_2, trip_3, trip_4, trip_5])
def test_driver_instance_methods(self):
self.assertItemsEqual(driver_1.trips(), [trip_1, trip_2, trip_3])
self.assertItemsEqual(driver_1.passengers(), [passenger_1, passenger_2, passenger_3])
self.assertEqual(driver_1.trip_count(), 3)
def test_passenger_instance_methods(self):
self.assertItemsEqual(passenger_2.trips(), [trip_2, trip_5])
self.assertItemsEqual(passenger_2.drivers(), [driver_1, driver_2])
self.assertEqual(passenger_2.trip_count(), 2)
global artist_1
artist_1 = Artist("Lady Gaga")
global artist_2
artist_2 = Artist("Vulfpeck")
global genre_1
genre_1 = Genre("Pop")
global genre_2
genre_2 = Genre("Indie")
global genre_3
genre_3 = Genre("Alternative")
global song_1
song_1 = Song("Joanne", artist_1, genre_1)
global song_2
song_2 = Song("Conscious Club", artist_2, genre_2)
global song_3
song_3 = Song("Back Pocket", artist_2, genre_1)
global song_4
song_4 = Song("El Chepe", artist_2, genre_3)
global song_5
song_5 = Song("Sinner's Prayer", artist_1, genre_3)
def test_genre_property_methods(self):
self.assertEqual(genre_1._name, "Pop")
self.assertEqual(genre_1.name, "Pop")
def test_artist_property_methods(self):
self.assertEqual(artist_1._name, "Lady Gaga")
self.assertEqual(artist_1.name, "Lady Gaga")
def test_song_property_methods(self):
self.assertEqual(song_1._name, "Joanne")
self.assertEqual(song_1.name, "Joanne")
self.assertEqual(song_1._artist, artist_1)
self.assertEqual(song_1.artist, artist_1)
self.assertEqual(song_1._genre, genre_1)
self.assertEqual(song_1.genre, genre_1)
def test_song_class_method(self):
self.assertItemsEqual(Song._all, [song_1, song_2, song_3, song_4, song_5])
self.assertItemsEqual(Song.all(), [song_1, song_2, song_3, song_4, song_5])
def test_artist_instance_methods(self):
self.assertItemsEqual(artist_1.songs(), [song_1, song_5])
self.assertItemsEqual(artist_1.genres(), [genre_1, genre_3])
def test_genre_instance_methods(self):
self.assertItemsEqual(genre_3.songs(), [song_4, song_5])
self.assertItemsEqual(genre_3.artists(), [artist_1, artist_2])
|
from lib.DataReader import DataReader
class ReadDoodad:
def __init__(self, filename):
self.read = DataReader(filename)
self.info = self.ReadDoodad()
def ReadDoodad(self):
doodHeader = self.ReadHeader()
doodInfo = {}
doodInfo["fileID"] = doodHeader[0]
doodInfo["version"] = doodHeader[1]
doodInfo["subversion"] = doodHeader[2]
doodInfo["count"] = doodHeader[3]
print "File ID: {0}, Version: {1}, Subversion: {2}".format(doodInfo["fileID"],
doodInfo["version"],
doodInfo["subversion"])
print "Reading {0} trees".format(doodInfo["count"])
doodInfo["trees"] = []
for i in xrange(0, doodInfo["count"]):
doodInfo["trees"].append(self.ReadTreeData())
doodInfo["special"] = self.ReadSpecialDoodads()
return doodInfo
def ReadHeader(self):
fileID = self.read.charArray(4)
version = self.read.int()
subversion = self.read.int()
count = self.read.int()
return fileID, version, subversion, count
def ReadTreeData(self):
treeInfo = {
"treeID" : self.read.charArray(4),
"variation" : self.read.int(),
"coord" : {
"x" : self.read.float(),
"y" : self.read.float(),
"z" : self.read.float()
},
"angle" : self.read.float(),
"scale" : {
"x" : self.read.float(),
"y" : self.read.float(),
"z" : self.read.float()
},
"flags" : self.read.byte(),
"life" : self.read.byte()
}
treeInfo["itemPoint"] = self.read.int()
treeInfo["numberOfItemSets"] = self.read.int()
if treeInfo["numberOfItemSets"] > 0:
treeInfo["itemSets"] = []
## Reading Item Set
for i in xrange(treeInfo["numberOfItemSets"]):
numberOfItems = self.read.int()
itemSet = []
## Each Item Set has a Number of Items
for j in xrange(numberOfItems):
itemID = self.read.charArray(4)
procentualChance = self.read.int()
itemSet.append((itemID, procentualChance))
treeInfo["itemSets"].append(itemSet)
treeInfo["doodID"] = self.read.int()
return treeInfo
def ReadSpecialDoodads(self):
specialInfo = {}
specialInfo["version"] = self.read.int()
specialInfo["count"] = self.read.int()
specialInfo["info"] = []
print "Reading special doodads. Version: {0}, Count: {1}".format(specialInfo["version"], specialInfo["count"])
for i in xrange(specialInfo["count"]):
ID = self.read.charArray(4)
z, x, y = self.read.int(), self.read.int(), self.read.int()
specialInfo["info"].append({"ID" : ID,
"x" : x,"y" : y, "z" : z})
return specialInfo
if __name__ == "__main__":
import os
import sys
import simplejson
dooRead = ReadDoodad(sys.argv[1])
try:
os.makedirs('./output')
except OSError:
pass
with open("output/treeInfo.json", "w") as f:
f.write(simplejson.dumps(dooRead.info, sort_keys=True, indent=4 * ' '))
#Ok, lets just do an x,y dump of every tree (WTst id only)
#treeDB = []
#for tree in dooRead.info["trees"]:
# if tree["treeID"] == "WTst":
# treeDB.append({"x" : tree["coord"]["x"], "y" : tree["coord"]["y"]})
#with open("output/treeDump.json", "w") as f:
# f.write(simplejson.dumps(treeDB, sort_keys=True, indent=4 * ' ')) |
def column_metrics(column):
print("Średnia: \t", column.mean())
print("Wariancja: \t", column.var())
print("Skośność: \t", column.skew())
print("Kurtoza: \t", column.kurtosis())
print("Mediana: \t", column.median())
print("Mediana: \t", column.median())
print("Moda: \t \t", float(column.mode()))
|
"""
Løsningsforslag
Øving 2 - Oppgave 2
@author: Thomas Nyborg
"""
def leg(alder):
return(alder>=18)
alder = int(input("Hvor gammel er du?"))
if leg(alder):
print("Du er gammel nok til å kjøre bil.")
else:
print("Du er ikke gammel nok til å kjøre bil.")
|
import os
from cs50 import SQL
from flask import Flask, flash, jsonify, redirect, render_template, request, session
from flask_session import Session
from tempfile import mkdtemp
from werkzeug.exceptions import default_exceptions, HTTPException, InternalServerError
from werkzeug.security import check_password_hash, generate_password_hash
import time
from helpers import apology, login_required, lookup, usd
# Configure application
app = Flask(__name__)
# Ensure templates are auto-reloaded
app.config["TEMPLATES_AUTO_RELOAD"] = True
# Ensure responses aren't cached
@app.after_request
def after_request(response):
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
# Custom filter
app.jinja_env.filters["usd"] = usd
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_FILE_DIR"] = mkdtemp()
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
db = SQL("sqlite:///finance.db")
#Make sure API key is set
if not os.environ.get("API_KEY"):
raise RuntimeError("API_KEY not set")
@app.route("/")
@login_required
def index():
"""Show portfolio of stocks"""
cash = db.execute("SELECT cash FROM users WHERE id=(:userid)", userid=session["user_id"])[0]["cash"]
stocks = db.execute("SELECT symbol, shares FROM stocks WHERE userid=(:userid)", userid=session["user_id"])
total_assets = 0
for stock in stocks:
api_stock = lookup(stock["symbol"])
stock["price"] = api_stock["price"]
stock["name"] = api_stock["name"]
total_assets += stock["shares"] * stock["price"]
return render_template("index.html", stocks=stocks, cash=cash, total_assets=total_assets)
@app.route("/buy", methods=["GET", "POST"])
@login_required
def buy():
"""Buy shares of stock"""
if request.method == "GET":
return render_template("buy.html", render_card=False)
elif request.method == "POST":
stock = lookup(request.form.get("symbol"))
amount = request.form.get("amount")
if not stock:
return render_template("buy.html", render_card=False, show_error="No stock found with that symbol")
try:
amount = int(amount)
except:
return render_template("buy.html", render_card=False, show_error="Please enter an integer stock amount to purchase")
else:
if amount < 1:
return render_template("buy.html", render_card=False, show_error="Please specify a positive integer")
purchase_price = float(stock["price"]) * amount
cash_available = db.execute("SELECT cash FROM users WHERE id=(:userid)", userid=session["user_id"])[0]["cash"]
if purchase_price <= cash_available:
new_cash = cash_available - purchase_price
db.execute("INSERT INTO history (userid, symbol, amount, purchaseprice, datetime) VALUES (:userid, :symbol, :amount, :purchaseprice, :time)", userid=session["user_id"], symbol=stock["symbol"], amount=amount, purchaseprice=stock["price"], time=time.strftime('%Y-%m-%d %H:%M:%S'))
db.execute("UPDATE users SET cash=(:new_cash) WHERE id=(:userid)", new_cash=new_cash, userid=session["user_id"])
users_stocks = db.execute("SELECT * FROM stocks WHERE userid=(:userid)", userid=session["user_id"])
if not any(d["symbol"] == stock["symbol"] for d in users_stocks):
db.execute("INSERT INTO stocks (userid, symbol, shares) VALUES (:userid, :symbol, :amount)", userid=session["user_id"], symbol=stock["symbol"], amount=amount)
else:
for i in users_stocks:
if i["symbol"] == stock["symbol"]:
new_shares = i["shares"] + amount
db.execute("UPDATE stocks SET shares=(:new_shares) WHERE userid=(:userid) AND symbol=(:symbol)", userid=session["user_id"], new_shares=new_shares, symbol=stock["symbol"])
return render_template("buy.html", render_card=True, purchase_total=purchase_price, amount=amount, stock_name=stock["name"], stock_symbol=stock["symbol"], stock_price=stock["price"], account_total=new_cash)
else:
return render_template("buy.html", render_card=False, show_error="Insufficient funds for purchase")
@app.route("/check", methods=["GET"])
def check():
"""Return true if username available, else false, in JSON format"""
return jsonify("TODO")
@app.route("/history")
@login_required
def history():
"""Show history of transactions"""
history = db.execute("SELECT symbol, amount, purchaseprice, datetime FROM history WHERE userid=(:userid) ORDER BY transid DESC", userid=session["user_id"])
for row in history:
row["transaction"] = "BUY" if row["amount"]>0 else "SELL"
return render_template("history.html", table=history)
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return render_template("login.html", show_error="Please provide a username")
# Ensure password was submitted
elif not request.form.get("password"):
return render_template("login.html", show_error="Please provide a password")
# Query database for username
rows = db.execute("SELECT * FROM users WHERE username = :username",
username=request.form.get("username"))
# Ensure username exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], request.form.get("password")):
return render_template("login.html", show_error="Invalid username or password")
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
@app.route("/quote", methods=["GET", "POST"])
@login_required
def quote():
"""Get stock quote."""
if request.method == "GET":
return render_template("quote.html", render_card=False)
elif request.method == "POST":
stock = lookup(request.form.get("symbol"))
if not stock:
return render_template("quote.html", render_card=False, show_error="No stock found with that symbol")
else:
return render_template("quote.html", render_card=True, stock_name=stock["name"], stock_symbol=stock["symbol"], stock_price=stock["price"])
@app.route("/register", methods=["GET", "POST"])
def register():
"""Register user"""
if request.method == "GET":
return render_template("register.html")
elif request.method == "POST":
username = request.form.get("username")
password = request.form.get("password")
confirmation = request.form.get("confirmation")
if password != confirmation:
return render_template("register.html", show_error="Passwords must match")
if not username or not password:
return render_template("register.html", show_error="Enter a username and a password")
db.execute("INSERT INTO users (username, hash, cash) VALUES (:username, :passhash, 10000)", username=username, passhash=generate_password_hash(password))
return redirect("/")
@app.route("/sell", methods=["GET", "POST"])
@login_required
def sell():
"""Sell shares of stock"""
stock_list = db.execute("SELECT symbol, shares FROM stocks WHERE userid=(:userid)", userid=session["user_id"])
if request.method == "GET":
return render_template("sell.html", stocks=stock_list)
elif request.method == "POST":
stock = lookup(request.form.get("symbol"))
amount = request.form.get("amount")
if not stock:
return render_template("sell.html", render_card=False, show_error="No stock found with that symbol", stocks=stock_list)
try:
amount = int(amount)
except:
return render_template("sell.html", render_card=False, show_error="Please enter an integer stock amount to purchase", stocks=stock_list)
else:
if amount < 1:
return render_template("sell.html", render_card=False, show_error="Please specify a positive integer", stocks=stock_list)
try:
stocks_available = db.execute("SELECT shares FROM stocks WHERE userid=(:userid) AND symbol=(:symbol)", userid=session["user_id"], symbol=stock["symbol"])[0]["shares"]
except:
return render_template("sell.html", render_card=False, show_error="Insufficient " + stock["symbol"] + " stocks", stocks=stock_list)
else:
if stocks_available >= amount:
new_stocks = stocks_available - amount
cash_available = db.execute("SELECT cash FROM users WHERE id=(:userid)", userid=session["user_id"])[0]["cash"]
sale_total = float(stock["price"]) * amount
new_cash = cash_available + sale_total
db.execute("UPDATE users SET cash=(:new_cash) WHERE id=(:userid)", new_cash=new_cash, userid=session["user_id"])
db.execute("UPDATE stocks SET shares=(:new_stocks) WHERE userid=(:userid) AND symbol=(:symbol)", new_stocks=new_stocks, userid=session["user_id"], symbol=stock["symbol"])
db.execute("INSERT INTO history (userid, symbol, amount, purchaseprice, datetime) VALUES (:userid, :symbol, :amount, :purchaseprice, :time)", userid=session["user_id"], symbol=stock["symbol"], amount=(-1*amount), purchaseprice=stock["price"], time=time.strftime('%Y-%m-%d %H:%M:%S'))
return render_template("sell.html", render_card=True, purchase_total=sale_total, amount=amount, stock_name=stock["name"], stock_symbol=stock["symbol"], stock_price=stock["price"], account_total=new_cash)
else:
return render_template("sell.html", render_card=False, show_error="Insufficient " + stock["symbol"] + " stocks", stocks=stock_list)
def errorhandler(e):
"""Handle error"""
if not isinstance(e, HTTPException):
e = InternalServerError()
return apology(e.name, e.code)
# Listen for errors
for code in default_exceptions:
app.errorhandler(code)(errorhandler)
|
"""
Difficulty: *
Code: *
Ref: https://www.hackerrank.com/challenges/camelcase
"""
import unittest
def camel_case(words):
words_found = 1
for letter in words:
if letter.isupper():
words_found += 1
return words_found
class MyTestCases(unittest.TestCase):
def test_camel_case(self):
self.assertEqual(camel_case('saveChangesInTheEditor'), 5)
if __name__ == '__main__':
s = input().strip()
print(camel_case(s))
|
from greedy import greedy_celf
import numpy as np
import copy
class LUCBLearner:
def __init__(self, Graph, budget, n_features, c):
self.graph = copy.deepcopy(Graph)
self.n_features = n_features
self.M = np.identity(self.n_features)
self.b = np.zeros(self.n_features)
self.b = self.b.reshape(4, 1)
self.c = c
self.budget = budget
self.t = 0
self.theta = []
self.theta = np.dot(np.linalg.inv(self.M), self.b)
for node1, node2 in self.graph.edges():
self.graph[node1][node2]['prob'] = np.dot(self.theta.T, self.graph[node1][node2]['features']).item()
def pull_superarm(self):
inv_M = np.linalg.inv(self.M)
# print('M shape: {}'.format(inv_M.shape))
# print('B shape: {}'.format(self.b.shape))
self.theta = np.dot(inv_M, self.b)
# print('Theta :{}'.format(self.theta.reshape(1, 4)))
for edge in self.graph.edges:
feature = self.graph[edge[0]][edge[1]]['features']
ucb = np.clip(np.dot(self.theta.T, feature) + self.c * np.sqrt(
np.dot(feature.T, np.dot(np.linalg.inv(self.M), feature))),
0, 1)
self.graph[edge[0]][edge[1]]['prob'] = ucb
superarm = set()
seeds = []
seeds = greedy_celf(self.graph, self.budget)[1]
print('\nFor t = {} pulled arms from these seeds {}'.format(self.t, seeds))
for seed in seeds:
for u, v in self.graph.edges():
if (u == seed): superarm.add((u, v))
if (v == seed) and (u, v) not in superarm: superarm.add((u, v))
return superarm
def update(self, reward):
self.t += 1
for (u, v) in reward.keys():
features = self.graph[u][v]['features']
self.M = self.M + np.dot(features, features.T)
# print('New M shape: {} '.format(self.M.shape))
# print(self.M)
if reward[(u, v)] == 1:
self.b = self.b + features
return
def get_estimated_probabilities(self):
estimated_prob = dict.fromkeys(self.graph.edges, 1)
for node1, node2 in self.graph.edges():
prob_linucb = np.dot(self.theta.T, self.graph[node1][node2]['features'])
estimated_prob[(node1, node2)] = prob_linucb
return estimated_prob
|
# Copyright 2020 LMNT, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import argparse
import numpy as np
import tensorflow as tf
import haste_tf
import torch
import torch.nn as nn
import haste_pytorch
def stfu():
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '4'
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
def copy_weights_gru(rnn_tf, rnn_pt):
weights = rnn_tf.fw_layer.get_weights()
kernel = torch.Tensor(weights['kernel'].numpy())
recurrent_kernel = torch.Tensor(weights['recurrent_kernel'].numpy())
bias = torch.Tensor(weights['bias'].numpy())
recurrent_bias = torch.Tensor(weights['recurrent_bias'].numpy())
rnn_pt.kernel = nn.Parameter(kernel)
rnn_pt.recurrent_kernel = nn.Parameter(recurrent_kernel)
rnn_pt.bias = nn.Parameter(bias)
rnn_pt.recurrent_bias = nn.Parameter(recurrent_bias)
def copy_weights_indrnn(rnn_tf, rnn_pt):
weights = rnn_tf.fw_layer.get_weights()
kernel = torch.Tensor(weights['kernel'].numpy())
recurrent_scale = torch.Tensor(weights['recurrent_scale'].numpy())
bias = torch.Tensor(weights['bias'].numpy())
rnn_pt.kernel = nn.Parameter(kernel)
rnn_pt.recurrent_scale = nn.Parameter(recurrent_scale)
rnn_pt.bias = nn.Parameter(bias)
def copy_weights_layer_norm_gru(rnn_tf, rnn_pt):
weights = rnn_tf.fw_layer.get_weights()
kernel = torch.Tensor(weights['kernel'].numpy())
recurrent_kernel = torch.Tensor(weights['recurrent_kernel'].numpy())
bias = torch.Tensor(weights['bias'].numpy())
recurrent_bias = torch.Tensor(weights['recurrent_bias'].numpy())
gamma = torch.Tensor(weights['gamma'].numpy())
rnn_pt.kernel = nn.Parameter(kernel)
rnn_pt.recurrent_kernel = nn.Parameter(recurrent_kernel)
rnn_pt.bias = nn.Parameter(bias)
rnn_pt.recurrent_bias = nn.Parameter(recurrent_bias)
rnn_pt.gamma = nn.Parameter(gamma)
def copy_weights_layer_norm_indrnn(rnn_tf, rnn_pt):
weights = rnn_tf.fw_layer.get_weights()
kernel = torch.Tensor(weights['kernel'].numpy())
recurrent_scale = torch.Tensor(weights['recurrent_scale'].numpy())
bias = torch.Tensor(weights['bias'].numpy())
gamma = torch.Tensor(weights['gamma'].numpy())
rnn_pt.kernel = nn.Parameter(kernel)
rnn_pt.recurrent_scale = nn.Parameter(recurrent_scale)
rnn_pt.bias = nn.Parameter(bias)
rnn_pt.gamma = nn.Parameter(gamma)
def copy_weights_layer_norm_lstm(rnn_tf, rnn_pt):
weights = rnn_tf.fw_layer.get_weights()
kernel = torch.Tensor(weights['kernel'].numpy())
recurrent_kernel = torch.Tensor(weights['recurrent_kernel'].numpy())
bias = torch.Tensor(weights['bias'].numpy())
gamma = torch.Tensor(weights['gamma'].numpy())
gamma_h = torch.Tensor(weights['gamma_h'].numpy())
beta_h = torch.Tensor(weights['beta_h'].numpy())
rnn_pt.kernel = nn.Parameter(kernel)
rnn_pt.recurrent_kernel = nn.Parameter(recurrent_kernel)
rnn_pt.bias = nn.Parameter(bias)
rnn_pt.gamma = nn.Parameter(gamma)
rnn_pt.gamma_h = nn.Parameter(gamma_h)
rnn_pt.beta_h = nn.Parameter(beta_h)
def copy_weights_lstm(rnn_tf, rnn_pt):
weights = rnn_tf.fw_layer.get_weights()
kernel = torch.Tensor(weights['kernel'].numpy())
recurrent_kernel = torch.Tensor(weights['recurrent_kernel'].numpy())
bias = torch.Tensor(weights['bias'].numpy())
rnn_pt.kernel = nn.Parameter(kernel)
rnn_pt.recurrent_kernel = nn.Parameter(recurrent_kernel)
rnn_pt.bias = nn.Parameter(bias)
batch_size = 32
time_steps = 250
input_size = 128
hidden_size = 256
RNN_MAP = {
'gru': haste_tf.GRU,
'indrnn': haste_tf.IndRNN,
'layer_norm_gru': haste_tf.LayerNormGRU,
'layer_norm_indrnn': haste_tf.LayerNormIndRNN,
'layer_norm_lstm': haste_tf.LayerNormLSTM,
'lstm': haste_tf.LSTM,
}
TF_TO_PT = {
haste_tf.GRU: haste_pytorch.GRU,
haste_tf.IndRNN: haste_pytorch.IndRNN,
haste_tf.LayerNormGRU: haste_pytorch.LayerNormGRU,
haste_tf.LayerNormIndRNN: haste_pytorch.LayerNormIndRNN,
haste_tf.LayerNormLSTM: haste_pytorch.LayerNormLSTM,
haste_tf.LSTM: haste_pytorch.LSTM,
}
WEIGHT_COPY_MAP = {
haste_tf.GRU: copy_weights_gru,
haste_tf.IndRNN: copy_weights_indrnn,
haste_tf.LayerNormGRU: copy_weights_layer_norm_gru,
haste_tf.LayerNormIndRNN: copy_weights_layer_norm_indrnn,
haste_tf.LayerNormLSTM: copy_weights_layer_norm_lstm,
haste_tf.LSTM: copy_weights_lstm,
}
def run_rnn(rnn_type, x):
rnn_tf = rnn_type(hidden_size)
rnn_pt = TF_TO_PT[rnn_type](input_size, hidden_size, batch_first=True)
rnn_tf.build(x.shape)
WEIGHT_COPY_MAP[type(rnn_tf)](rnn_tf, rnn_pt)
x1 = tf.convert_to_tensor(x)
x2 = torch.Tensor(x)
x2.requires_grad_(True)
with tf.GradientTape() as tape:
tape.watch(x1)
y1, _ = rnn_tf(x1, training=True)
g1 = tape.gradient(y1, x1)
y2, _ = rnn_pt(x2)
y2.backward(torch.ones_like(y2))
print(np.amax(np.abs(y1.numpy() - y2.detach().numpy())))
print(np.amax(np.abs(g1.numpy() - x2.grad.data.numpy())))
def main(args):
tf.compat.v1.enable_eager_execution()
stfu()
x = np.random.normal(size=[time_steps, batch_size, input_size]).astype(np.float32)
if args.rnn_type == 'all':
for type_name, rnn_type in RNN_MAP.items():
print(f'[{type_name}]')
run_rnn(rnn_type, x)
print('')
else:
print(f'[{args.rnn_type}]')
rnn_type = RNN_MAP[args.rnn_type]
rnn = run_rnn(rnn_type, x)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'rnn_type',
nargs='?',
default='all',
choices=list(RNN_MAP.keys()) + ['all'])
main(parser.parse_args())
|
import wechatsogou
from collections import Iterable
import json
def show_info(item):
if isinstance(item, Iterable):
for i in item:
print(i)
elif item is None:
print("没有相关信息,抱歉")
else:
print(item)
print("请确定验证码图片后,关闭图片,准确填写验证码!")
print("请确定验证码图片后,关闭图片,准确填写验证码!")
print("请确定验证码图片后,关闭图片,准确填写验证码!")
def get_length(generator):
if hasattr(generator, "__len__"):
return len(generator)
else:
return sum(1 for _ in generator)
def printf_gzh(gzh):
print("公众号名:" + gzh['wechat_name'])
print("介绍:" + gzh['introduction'])
print("微信公众号ID:" + gzh['wechat_id'])
def printf_article(arts):
if len(arts)>0:
print("文章名:" + art['title'])
print("文章简介:" + art['abstract'])
print("文章链接:" + art['url'])
def printf_json(str):
print(json.dumps(str, sort_keys=True, indent=4, separators=(', ', ': '), ensure_ascii=False))
ws_api = wechatsogou.WechatSogouAPI(captcha_break_time=3)
while True:
print()
print("--------------------------")
print("输入1,查看指定公众号信息")
print("输入2,搜索相关文章")
print("输入3,解析执行公众号最近文章")
print("输入4,请输入关键字,获取相关信息")
print("输入其他字符,退出系统\n")
i = int(input())
if i == 1:
msg = input("输入你要查询的公众号\n")
ws_info = ws_api.get_gzh_info(msg)
printf_gzh(ws_info)
elif i == 2:
msg = input("输入你感兴趣的文章标题\n")
article = ws_api.search_article(msg)
for item in article:
art = item['article']
gzh = item['gzh']
print("公众号名:"+gzh['wechat_name'])
printf_article(art)
break
elif i == 3:
msg = input("输入你关心的公众号名称,将展示其近期文章\n")
new_article = ws_api.get_gzh_article_by_history(msg)
gzh = new_article['gzh']
printf_gzh(gzh)
art = new_article['article']
printf_article(art)
elif i == 4:
kw = input("请输入关键字,获取相关信息\n")
data = ws_api.get_sugg(kw)
for item in data:
print(item)
else:
print("感謝使用!Bye~~~~")
break
|
"""Indy error handling."""
from typing import Type
from indy.error import IndyError
from ...core.error import BaseError
class IndyErrorHandler:
"""Trap IndyError and raise an appropriate LedgerError instead."""
def __init__(self, message: str = None, error_cls: Type[BaseError] = BaseError):
"""Init the context manager."""
self.error_cls = error_cls
self.message = message
def __enter__(self):
"""Enter the context manager."""
return self
def __exit__(self, err_type, err_value, err_traceback):
"""Exit the context manager."""
if isinstance(err_value, IndyError):
raise IndyErrorHandler.wrap_error(
err_value, self.message, self.error_cls
) from err_value
@classmethod
def wrap_error(
cls,
err_value: IndyError,
message: str = None,
error_cls: Type[BaseError] = BaseError,
) -> BaseError:
"""Create an instance of BaseError from an IndyError."""
err_msg = message or "Exception while performing indy operation"
indy_message = hasattr(err_value, "message") and err_value.message
if indy_message:
err_msg += f": {indy_message}"
err = error_cls(err_msg)
err.__traceback__ = err_value.__traceback__
return err
|
def fact(n):
f=1
for i in range(1,n+1):
f=f*i
return
n=int(input("enter n "))
r=int(input("enter r "))
if n<1 or r<1:
print("check numbers ")
else:
ans=fact(n)/fact(n-r)
print(ans) |
targets = [int(x) for x in input().split()]
def shout(idx, power, data):
if idx in range(len(data)):
data[idx] -= power
if data[idx] <= 0:
data.pop(idx)
return data
def add(idx, power, data):
if idx in range(len(data)):
data.insert(idx, power)
else:
print("Invalid placement!")
def strike(idx, radius, data):
if idx in range(len(data)):
left_radius = idx - radius
right_radius = idx + radius
if left_radius in range(len(data)) and right_radius in range(len(data)):
left_part = data[:left_radius]
right_part = data[right_radius + 1:]
data = left_part + right_part
else:
print("Strike missed!")
return data
while True:
command = input()
if command == "End":
break
action, index, value = command.split()
index = int(index)
value = int(value)
if action == "Shoot":
targets = shout(index, value, targets)
elif action == "Add":
add(index, value, targets)
elif action == "Strike":
targets = strike(index, value, targets)
print('|'.join([str(x) for x in targets])) |
import matplotlib.pyplot as plt
# 创建画布
plt.figure(figsize=(3, 2), facecolor='lightgrey')
# 绘制空白图形
plt.plot()
# 划分子图
plt.subplot(2, 2, 1)
plt.subplot(2, 2, 2)
plt.subplot(2, 2, 3)
plt.subplot(2, 2, 4)
# 设置中文字体
plt.rcParams['font.sans-serif'] = 'SimHei'
plt.suptitle('我是Hello')
plt.tight_layout(rect=[0, 0, 1, 0.9])
plt.show()
|
# -*- coding: utf-8 -*-
from django.conf import settings
import re
import operator
OBJECTS_LIST_SEPARATOR = getattr( settings, "OBJECTS_LIST_SEPARATOR","\n")
OBJECTS_TYPE_RE = getattr( settings, "OBJECTS_TYPE_RE", re.compile("^(([^{]+)\.)?([^{]+)") )
MISSION_NOTIFICATION_TOKEN = getattr( settings, "MISSION_NOTIFICATION_TOKEN", "[[mission_notification_token]]")
MISSION_MIDDLEWARE_INSTANCE= None
MISSION_MIDDLEWARE_DEBUG = getattr( settings, "MISSION_MIDDLEWARE_DEBUG", True )
MISSION_DESCRIPTOR_CLASS = getattr( settings, "MISSION_DESCRIPTOR_CLASS", "abe.missions.models.MissionDescriptor" )
MISSION_RESULTS_PROCESSOR = getattr( settings, "MISSION_RESULTS_PROCESSOR", "abe.missions.middleware.default_mission_results_processor" )
MISSION_RESPONSE_PROCESSOR = getattr( settings, "MISSION_RESPONSE_PROCESSOR", "abe.missions.middleware.default_mission_response_processor" )
MISSION_CONTEXT_PROCESSOR = getattr( settings, "MISSION_CONTEXT_PROCESSOR", "abe.missions.middleware.default_mission_context_processor" )
MISSION_TRIGGERS_LIST = getattr( settings, "MISSION_TRIGGERS_LIST", "(mission,trigger)" )
MISSION_CONDITIONS_LIST = getattr( settings, "MISSION_CONDITIONS_LIST", (
"abe.missions.conditions.MissionCondition",
"abe.missions.conditions.TimeBombCondition",
"abe.missions.conditions.TrueCondition",
"abe.missions.conditions.NumericComparisonCondition",
"abe.missions.conditions.MissionRequiredCondition",
"abe.missions.conditions.MissionsDoneCountCondition",
"abe.missions.conditions.MissionStartedSinceCondition",
) )
MISSION_REWARDS_LIST = getattr( settings, "MISSION_REWARDS_LIST", (
"abe.missions.rewards.MissionReward",
"abe.missions.rewards.TemporaryMissionReward",
))
COMPARISON_OPERATORS_MAP = getattr( settings, "COMPARISON_OPERATORS_MAP", {
'==':operator.eq,
'!=':operator.ne,
'>=':operator.ge,
'>':operator.gt,
'<=':operator.le,
'<':operator.lt,
} )
|
#!/usr/bin/env python3
"""
exercise 2 napalm
"""
from pprint import pprint
from napalm_devices import d_devices, network_devices
from napalm_functions import get_connection, get_backup
suffix = ".txt"
if __name__ == "__main__":
for my_device in network_devices:
print("")
print("Open device connection")
device = get_connection(my_device)
print(get_backup(device, suffix))
device.close()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class UnfreezeExtendParams(object):
def __init__(self):
self._quit_type = None
self._total_discount_amount = None
self._total_real_pay_amount = None
self._total_task_count = None
@property
def quit_type(self):
return self._quit_type
@quit_type.setter
def quit_type(self, value):
self._quit_type = value
@property
def total_discount_amount(self):
return self._total_discount_amount
@total_discount_amount.setter
def total_discount_amount(self, value):
self._total_discount_amount = value
@property
def total_real_pay_amount(self):
return self._total_real_pay_amount
@total_real_pay_amount.setter
def total_real_pay_amount(self, value):
self._total_real_pay_amount = value
@property
def total_task_count(self):
return self._total_task_count
@total_task_count.setter
def total_task_count(self, value):
self._total_task_count = value
def to_alipay_dict(self):
params = dict()
if self.quit_type:
if hasattr(self.quit_type, 'to_alipay_dict'):
params['quit_type'] = self.quit_type.to_alipay_dict()
else:
params['quit_type'] = self.quit_type
if self.total_discount_amount:
if hasattr(self.total_discount_amount, 'to_alipay_dict'):
params['total_discount_amount'] = self.total_discount_amount.to_alipay_dict()
else:
params['total_discount_amount'] = self.total_discount_amount
if self.total_real_pay_amount:
if hasattr(self.total_real_pay_amount, 'to_alipay_dict'):
params['total_real_pay_amount'] = self.total_real_pay_amount.to_alipay_dict()
else:
params['total_real_pay_amount'] = self.total_real_pay_amount
if self.total_task_count:
if hasattr(self.total_task_count, 'to_alipay_dict'):
params['total_task_count'] = self.total_task_count.to_alipay_dict()
else:
params['total_task_count'] = self.total_task_count
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = UnfreezeExtendParams()
if 'quit_type' in d:
o.quit_type = d['quit_type']
if 'total_discount_amount' in d:
o.total_discount_amount = d['total_discount_amount']
if 'total_real_pay_amount' in d:
o.total_real_pay_amount = d['total_real_pay_amount']
if 'total_task_count' in d:
o.total_task_count = d['total_task_count']
return o
|
from typing import List
class Solution:
def maxIncreaseKeepingSkyline(self, grid: List[List[int]]) -> int:
m, n = len(grid), len(grid[0])
lr = [
max(row)
for row in grid
]
tb = [
max(
grid[i][j]
for i in range(m)
)
for j in range(n)
]
ret = 0
for i in range(m):
for j in range(n):
diff = min(lr[i], tb[j]) - grid[i][j]
ret += diff
return ret
|
# -*- coding: utf-8 -*-
from collective.transmogrifier.transmogrifier import configuration_registry
from collective.transmogrifier.transmogrifier import Transmogrifier
from optparse import OptionGroup
from optparse import OptionParser
import logging
import mr.migrator
import Products.GenericSetup
import sys
logging.basicConfig(level=logging.INFO)
try:
import configparser
except ImportError:
import ConfigParser as configparser
try:
from Zope2.App.zcml import load_config
except:
try:
from Products.Five.zcml import load_config
except:
from zope.configuration.xmlconfig import XMLConfig as load_config
load_config = lambda config, context: load_config(config, context)() # noqa
class Context:
pass
class NoErrorParser(OptionParser):
def error(self):
pass
def runner(args={}, pipeline=None):
parser = OptionParser()
parser.add_option("--pipeline", dest="pipeline",
help="Transmogrifier pipeline.cfg to use",
metavar="FILE")
parser.add_option("--show-pipeline", dest="showpipeline",
action="store_true",
help="Show contents of the pipeline")
parser.add_option("--zcml", dest="zcml",
action="store",
help="modules in the path to load zcml from")
# Parse just the pipeline args
ispipeline = lambda arg: [
a for a in [
'--pipeline',
'--show-pipeline',
'--zcml'] if arg.startswith(a)]
pargs = [arg for arg in sys.argv[1:] if ispipeline(arg)]
(options, cargs) = parser.parse_args(pargs)
if options.pipeline is not None:
config = options.pipeline
elif pipeline is not None:
config = pipeline
else:
# XXX How about if we look for pipeline.cfg in the cwd?
# config = resource_filename(__name__, 'pipeline.cfg')
config = 'pipeline.cfg'
# XXX This delays loading a bit too long. Getting:
# ConfigurationError: ('Unknown directive',
# u'http://namespaces.zope.org/genericsetup', u'importStep')
# again
#
load_config('configure.zcml', mr.migrator)
if options.zcml:
for zcml in options.zcml.split(','):
if not zcml.strip():
continue
load_config(
'configure.zcml',
__import__(
zcml,
fromlist=zcml.split('.')))
pipelineid, cparser = load_pipeline(config, parser)
pargs = [arg for arg in sys.argv[1:] if not arg.startswith('--template')]
(options, cargs) = parser.parse_args(pargs)
cargs = {}
for k, _, v in [a.partition('=') for a in sys.argv[1:]]:
k = k.lstrip('--')
if ':' in k:
part, _, key = k.partition(':')
if key.lower() == 'debug':
logger = logging.getLogger(part)
logger.setLevel(logging.DEBUG)
else:
section = cargs.setdefault(part, {})
if key in section:
section[key] = '%s\n%s' % (section[key], v)
else:
section[key] = v
else:
pass
# cargs[k] = v
for k, v in cargs.items():
args.setdefault(k, {}).update(v)
overrides = {}
if isinstance(args, type('')):
for arg in args:
section, keyvalue = arg.split(':', 1)
key, value = keyvalue.split('=', 1)
if isinstance(value, type([])):
value = '\n'.join(value)
overrides.setdefault('section', {})[key] = value
else:
overrides = args
if options.showpipeline:
for section, values in overrides.items():
for key, value in values.items():
cparser.set(section, key, value)
cparser.write(sys.stdout)
return
# delay this so arg processing not so slow
load_config("meta.zcml", Products.GenericSetup)
load_config("configure.zcml", Products.GenericSetup)
load_config('configure.zcml', mr.migrator)
# Make sure GS ZCML is loaded before we load ours
context = Context()
transmogrifier = Transmogrifier(context)
transmogrifier(pipelineid, **overrides)
def load_pipeline(config, parser):
cparser = configparser.ConfigParser()
try:
config_info = configuration_registry.getConfiguration(config)
fp = open(config_info['configuration'])
pipelineid = config
except:
fp = open(config)
configuration_registry.registerConfiguration(
u'transmogrify.config.mr.migrator',
u"",
u'', config)
pipelineid = 'transmogrify.config.mr.migrator'
try:
# configparser
cparser.read_file(fp)
except:
# ConfigParser
cparser.read(config)
fp.close()
if cparser.has_option('transmogrifier', 'include'):
load_pipeline(cparser.get('transmogrifier', 'include'), parser)
if cparser.has_option('transmogrifier', 'pipeline'):
pipeline = [
p.strip() for p in cparser.get(
'transmogrifier',
'pipeline').split()]
else:
pipeline = []
for section in pipeline:
if section == 'transmogrifier':
continue
if cparser.has_option(section, '@doc'):
doc = cparser.get(section, '@doc')
else:
doc = ''
group = OptionGroup(parser, section, doc)
for key, value in cparser.items(section):
if key.startswith('@'):
if key == '@doc':
continue
metavar, _, help = value.partition(': ')
if metavar.upper() == metavar:
action = "store"
else:
action = "store_true"
help = value
arg = str("--%s:%s" % (section, key[1:]))
group.add_option(arg, action=action,
help=help,
metavar=metavar)
parser.add_option_group(group)
return pipelineid, cparser
|
import pika
import time
from config import rabbit_mq_data
credentials = pika.PlainCredentials(rabbit_mq_data['login'], rabbit_mq_data['password'])
connection = pika.BlockingConnection(pika.ConnectionParameters(rabbit_mq_data['host'],
rabbit_mq_data['port'],
rabbit_mq_data['v_host'],
credentials))
channel = connection.channel()
def _main():
while True:
method_frame, header_frame, body = channel.basic_get(rabbit_mq_data['queue_name'])
if method_frame:
print(body.encode())
channel.basic_ack(method_frame.delivery_tag)
else:
print('No messages')
time.sleep(5)
if __name__ == '__main__':
_main()
|
import math
import scipy.optimize
import codecad
from codecad.shapes import *
from codecad.util import Vector
import util
import tools
import vitamins
import parameters
from parameters import wheel_clearance
bogie_count = 6 # Count of bogies on both sides of the vehicle
suspension_spacing = 120
arm_clearance = 1
pivot_flat_clearance = 0.15
pivot_round_clearance = 0.1
road_wheel_screw = vitamins.m3x25_screw # Screw used as an axle for the road wheel
spring_screw = vitamins.m3x25_screw # Screw used for mounting the spring to the arm
wheel_diameter = 34
wheel_width = road_wheel_screw.length + road_wheel_screw.head_height # Total width of the wheel pair
arm_width = 8
arm_thickness = 10
arm_pivot_thickness = 15
arm_knee_height = 14
arm_knee_angle = 15
bogie_wheel_spacing = 55 # [mm] distance between wheels of one bogie
bogie_width = arm_width
bogie_pivot_z = 7
bogie_arm_cutout_angle = 75 # Degrees
suspension_min_travel = 30 # [mm]
suspension_sag = 0.2 # Ratio of travel from neutral position down
pivot_guide_length = 10 # How long part of the pivot screw shaft is kept inside the arm
pivot_screw_head_countersink = 4.5
wheel_gap = bogie_width + 2 * wheel_clearance
half_wheel_width = (wheel_width - wheel_gap) / 2
bogie_swing_angle = math.radians(bogie_arm_cutout_angle - arm_knee_angle) / 2
def get_spring_point(spring_arm_length, angle):
""" Return coordinates of the spring attachment to the arm if the spring is at given angle
(angle is between 0 (up position) and travel_angle) """
return Vector(math.cos(spring_up_angle - angle), math.sin(spring_up_angle - angle)) * spring_arm_length
def get_spring_anchor_point(spring_arm_length):
""" Return the spring anchor point coordinates in 2D relative to arm pivot as codecad Vector.
Spring is placed to be at right angle to the arm at full compression. """
return get_spring_point(spring_arm_length, 0) + \
Vector(-math.sin(spring_up_angle), math.cos(spring_up_angle)) * (vitamins.spring.length - vitamins.spring.travel)
def get_travel_angle(spring_arm_length, spring_anchor_point):
""" Calculate travel angle of the suspension arm based on spring length.
Uses spring anchor point placement from get_spring_anchor_point. """
spring_compressed_length = vitamins.spring.length - vitamins.spring.travel
if spring_arm_length == 0:
return 0
spring_anchor_distance = abs(spring_anchor_point)
compressed_anchor_angle = math.atan(spring_compressed_length / spring_arm_length)
tmp = (spring_arm_length**2 + spring_anchor_distance**2 - vitamins.spring.length**2) / \
(2 * spring_arm_length * spring_anchor_distance)
return math.acos(tmp) - compressed_anchor_angle
def spring_arm_length_equation(spring_arm_length):
""" Equation describing spring location relative to the pivot. """
spring_anchor_point = get_spring_anchor_point(spring_arm_length)
travel_angle = get_travel_angle(spring_arm_length, spring_anchor_point)
spring_down_point = get_spring_point(spring_arm_length, travel_angle)
spring_axis_to_pivot_point = util.point_to_line_distance(Vector(0, 0),
spring_anchor_point,
spring_down_point)
return spring_axis_to_pivot_point - (arm_thickness / 2 + vitamins.spring.diameter / 2 + arm_clearance)
spring_up_angle = -math.pi / 2
spring_arm_length = scipy.optimize.brentq(spring_arm_length_equation, 0, arm_thickness + vitamins.spring.diameter)
spring_anchor_point = get_spring_anchor_point(spring_arm_length)
travel_angle = get_travel_angle(spring_arm_length, spring_anchor_point)
# Rotate spring position so that its anchor point is at the same level as pivot
spring_up_angle += math.acos(spring_anchor_point.x / abs(spring_anchor_point))
spring_anchor_point = get_spring_anchor_point(spring_arm_length)
spring_up_point = get_spring_point(spring_arm_length, 0)
spring_down_point = get_spring_point(spring_arm_length, travel_angle)
def get_wheel_force(arm_length, up_angle, angle):
""" Return residual force on a group of wheels. """
spring_point = get_spring_point(spring_arm_length, up_angle - angle)
length = abs(spring_point - spring_anchor_point.flattened())
spring_force = vitamins.spring.force(length)
torque = spring_force * util.point_to_line_distance(Vector(0, 0),
spring_anchor_point.flattened(),
spring_point)
wheel_force = torque / (arm_length * math.cos(angle))
return wheel_force - parameters.design_weight / bogie_count
def get_arm_angle(arm_length, y):
""" Calculate angle of the suspension arm at the top position """
return math.asin(y / arm_length)
def get_arm_travel(arm_length, down_arm_angle, bogie_pivot_up_y):
""" Calculate total length of wheel vertical travel """
return bogie_pivot_up_y - math.sin(down_arm_angle) * arm_length
def get_bogie_wheel_position(angle, side):
s = math.sin(angle)
c = math.cos(angle)
side *= bogie_wheel_spacing / 2
return Vector(c * side + s * bogie_pivot_z,
s * side - c * bogie_pivot_z)
def bogie_pivot_up_y_equation(arm_length, bogie_pivot_up_y):
up_angle = get_arm_angle(arm_length, bogie_pivot_up_y)
down_angle = up_angle - travel_angle
travel = get_arm_travel(arm_length, down_angle, bogie_pivot_up_y)
neutral_angle = get_arm_angle(arm_length, bogie_pivot_up_y - (1 - suspension_sag) * travel)
bogie_pivot_up_point = Vector(math.cos(up_angle), math.sin(up_angle)) * arm_length
left_angle = (up_angle - neutral_angle) - bogie_swing_angle
left_wheel_position = bogie_pivot_up_point + get_bogie_wheel_position(left_angle, -1)
dist_left = util.point_to_line_distance(left_wheel_position, spring_up_point, spring_anchor_point)
dist_right = abs(bogie_pivot_up_point - (spring_down_point + Vector(suspension_spacing, 0)))
ret1 = dist_left - wheel_diameter / 2 - vitamins.spring.diameter / 2 - wheel_clearance
#ret2 = dist_right - math.hypot(bogie_wheel_spacing / 2, bogie_pivot_z) - wheel_diameter / 2 - arm_thickness / 2 - wheel_clearance
return ret1
#return min(ret1, ret2)
def get_optimized_bogie_pivot_up_y(arm_length):
a = -arm_length / 2
b = arm_length
va = bogie_pivot_up_y_equation(arm_length, a)
vb = bogie_pivot_up_y_equation(arm_length, b)
if (va > 0) == (vb > 0):
if abs(va) < abs(vb):
return a
else:
return b
return scipy.optimize.brentq(lambda x: bogie_pivot_up_y_equation(arm_length, x),
a, b)
def arm_length_equation(arm_length):
bogie_pivot_up_y = get_optimized_bogie_pivot_up_y(arm_length)
up_angle = get_arm_angle(arm_length, bogie_pivot_up_y)
down_angle = up_angle - travel_angle
travel = get_arm_travel(arm_length, down_angle, bogie_pivot_up_y)
neutral_angle = get_arm_angle(arm_length, bogie_pivot_up_y - (1 - suspension_sag) * travel)
ret = get_wheel_force(arm_length, up_angle, neutral_angle)
return ret
arm_length = scipy.optimize.brentq(arm_length_equation,
vitamins.spring.length / 2, 3 * vitamins.spring.length)
bogie_pivot_up_y = get_optimized_bogie_pivot_up_y(arm_length)
arm_up_angle = get_arm_angle(arm_length, bogie_pivot_up_y)
arm_down_angle = arm_up_angle - travel_angle
suspension_travel = get_arm_travel(arm_length, arm_down_angle, bogie_pivot_up_y)
arm_neutral_angle = get_arm_angle(arm_length,
bogie_pivot_up_y - (1 - suspension_sag) * suspension_travel)
assert arm_down_angle > -math.pi / 2
assert suspension_travel >= suspension_min_travel
assert arm_length > vitamins.spring.length - vitamins.spring.travel
assert arm_up_angle - arm_neutral_angle < 2 * bogie_swing_angle
assert arm_neutral_angle - arm_down_angle < 2 * bogie_swing_angle
assert abs(bogie_pivot_up_y_equation(arm_length, bogie_pivot_up_y)) < wheel_clearance / 100, "Check that the bogie clearances are met"
assert abs(arm_length_equation(arm_length)) < 1e-6
def road_wheel_generator(diameter, width, axle_diameter,
shoulder_height, shoulder_width,
o_ring_minor_diameter, wall_thickness, hole_blinding_layer_height,
screw_hole_diameter, screw_hole_depth,
hex_hole):
o_ring_protrusion = o_ring_minor_diameter / 2
radius = diameter / 2 - o_ring_protrusion
axle_radius = axle_diameter / 2
wheel = polygon2d([(axle_radius, 0),
(radius, 0),
(radius, width),
(axle_radius + shoulder_width + shoulder_height, width),
(axle_radius + shoulder_width, width + shoulder_height),
(axle_radius, width + shoulder_height),
])
o_ring_count = 2
o_ring_spacing = (width - o_ring_count * o_ring_minor_diameter) / (1 + o_ring_count)
for i in range(o_ring_count):
pos = (i + 1) * o_ring_spacing + (i + 0.5) * o_ring_minor_diameter
wheel -= circle(d=o_ring_minor_diameter).translated(radius, pos)
wheel = wheel.revolved().rotated_x(90)
if hole_blinding_layer_height:
wheel += cylinder(r=radius - o_ring_protrusion,
h=hole_blinding_layer_height,
symmetrical=False).translated_z(screw_hole_depth)
if hex_hole:
wheel -= regular_polygon2d(n=6, d=screw_hole_diameter * 2 / math.sqrt(3)).extruded(2 * screw_hole_depth)
else:
wheel -= cylinder(d=screw_hole_diameter, h=2 * screw_hole_depth)
lightening_hole_count = 5
lightening_hole_inner_radius = max(axle_radius + shoulder_height + shoulder_width,
axle_radius + wall_thickness,
screw_hole_diameter / 2 + wall_thickness)
lightening_hole_outer_radius = radius - o_ring_protrusion - wall_thickness
lightening_hole_center_radius = (lightening_hole_inner_radius + lightening_hole_outer_radius) / 2
lightening_hole_polygon = regular_polygon2d(n=lightening_hole_count,
r=lightening_hole_center_radius)
lightening_hole_diameter = min(lightening_hole_outer_radius - lightening_hole_inner_radius,
lightening_hole_polygon.side_length - wall_thickness)
wheel -= cylinder(d=lightening_hole_diameter, h=float("inf")) \
.translated_x(lightening_hole_center_radius) \
.rotated((0, 0, 1), 360, n=lightening_hole_count)
return wheel
def bogie_generator(wheel_spacing, lower_thickness, max_upper_thickness,
bearing_diameter, bearing_thickness, bearing_shoulder_size,
thin_wall, thick_wall,
pivot_z,
wheel_diameter,
arm_thickness,
arm_cutout_thickness,
arm_cutout_angle,
shoulder_screw,
arm_knee_height,
arm_clearance,
weel_clearance):
assert arm_cutout_angle < 180
upper_thickness = min(max_upper_thickness, shoulder_screw.length + shoulder_screw.head_height)
bearing_radius = bearing_diameter / 2
bearing_shoulder_radius = bearing_radius - bearing_shoulder_size
nut_outer_diameter = shoulder_screw.lock_nut.s * 2 / math.sqrt(3)
pivot_protected_diameter = max(shoulder_screw.diameter2, nut_outer_diameter)
pivot_end_diameter = pivot_protected_diameter + 2 * thick_wall
pivot_protected_diameter += 2 * thin_wall
pivot_to_wheel_distance = math.hypot(wheel_spacing / 2, pivot_z)
wheel_cutout_diameter = wheel_diameter + 2 * wheel_clearance
wheel_cutout_angled_part = min(pivot_to_wheel_distance - wheel_cutout_diameter / 2 - pivot_protected_diameter / 2, (upper_thickness - lower_thickness) / 2)
assert pivot_to_wheel_distance >= thin_wall + wheel_cutout_diameter / 2 + arm_thickness / 2
assert arm_knee_height > pivot_end_diameter / 2 # This is a neccessary but not sufficient condition!
bogie = polygon2d([(-wheel_spacing / 2, 0),
(wheel_spacing / 2, 0),
(0, pivot_z)]) \
.offset((bearing_diameter + thin_wall + thick_wall) / 2) \
.extruded(upper_thickness) \
.rotated_x(90) \
.translated_z((thick_wall - thin_wall) / 2)
bottom_z = -bearing_radius - thin_wall
bogie += cylinder(d=pivot_end_diameter, h=upper_thickness) \
.rotated_x(90) \
.translated_z(pivot_z)
cutout_tmp_point = (wheel_spacing * math.sin(math.radians(arm_cutout_angle / 2)), wheel_spacing * math.cos(math.radians(arm_cutout_angle / 2)))
screw_head_plane_y = -upper_thickness / 2 + shoulder_screw.head_height
nut_plane_y = screw_head_plane_y + shoulder_screw.length - shoulder_screw.lock_nut.height - thin_wall
# Screw head
bogie -= cylinder(d=shoulder_screw.head_diameter, h=upper_thickness, symmetrical=False) \
.rotated_x(90) \
.translated(0, screw_head_plane_y, pivot_z)
# Smooth part
bogie -= cylinder(d=shoulder_screw.diameter2, h=2 * (shoulder_screw.length - shoulder_screw.screw_length)) \
.rotated_x(90) \
.translated(0, screw_head_plane_y, pivot_z)
# Screw part
bogie -= cylinder(d=shoulder_screw.diameter, h=float("inf")) \
.rotated_x(90) \
.translated_z(pivot_z)
# Nut
bogie -= regular_polygon2d(n=6, d=nut_outer_diameter) \
.rotated(30) \
.extruded(upper_thickness, symmetrical=False) \
.rotated_x(-90) \
.translated(0, nut_plane_y, pivot_z)
# Space for the arm
cutout = polygon2d([(0, 0),
cutout_tmp_point,
(-cutout_tmp_point[0], cutout_tmp_point[1])]) \
.offset(arm_thickness / 2)
cutout += circle(d=arm_thickness + 2 * arm_clearance)
bogie -= cutout \
.extruded(arm_cutout_thickness) \
.rotated_x(90) \
.translated_z(pivot_z)
# Wheel and bearing cutouts
cutout = polygon2d([(-bearing_shoulder_radius, -upper_thickness),
(bearing_shoulder_radius, -upper_thickness),
(bearing_shoulder_radius, -bearing_thickness),
(bearing_radius, -bearing_thickness),
(bearing_radius, 0),
(wheel_cutout_diameter / 2, (upper_thickness - lower_thickness) / 2 - wheel_cutout_angled_part),
(wheel_cutout_diameter / 2 + (upper_thickness - lower_thickness) / 2 + wheel_cutout_angled_part, upper_thickness - lower_thickness),
(-bearing_shoulder_radius, upper_thickness - lower_thickness)])
cutout += rectangle(wheel_diameter, 2 * upper_thickness) \
.translated_y(upper_thickness + wheel_clearance) \
.offset(wheel_clearance)
cutout = cutout.translated_y(lower_thickness / 2)
cutout = cutout + cutout.mirrored_y()
cutout = cutout.revolved()
for x in [-1, 1]:
bogie -= cutout.translated_x(x * wheel_spacing / 2)
# bottom lightening angles
base_thickness = upper_thickness / 2 - (pivot_z - bottom_z - pivot_protected_diameter / 2)
for y in [-1, 1]:
bogie -= half_space().rotated_x(90 + y * 135).translated(0, -y * base_thickness, bottom_z)
return bogie
def spring_cutout_generator(spring_angle, r0, r1, chamfer0, chamfer1=0):
""" Make a shape that cuts out a space for the spring to move in.
spring_angle is the angle that the spring will move relative to the cutout,
r0 is the distance from origin that the cutout starts and r1 is distance from origin
where it ends.
chamfer0 is subtracted from r0 middle of the cutout height.
chamfer1 is added to r1 in the middle of the cutout height"""
assert 0 < spring_angle < 180
r0 += arm_clearance
r1 -= arm_clearance
spring_r = vitamins.spring.diameter / 2 + arm_clearance
cos = math.cos(math.radians(spring_angle)) * 2 * r1
sin = math.sin(math.radians(spring_angle)) * 2 * r1
points1 = [(0, 0), (2 * r1, 0)]
if spring_angle > 90:
points1.append((2 * r1, 2 * r1))
points1.append((cos, sin))
points2 = [(0, 0), (0, -r1)]
if spring_angle < 90:
points2.append((-r1, 0))
points2.append((-sin, cos))
p = polygon2d(points1)
s = p.extruded(0).offset(spring_r) + \
p.offset(spring_r).extruded(spring_r, symmetrical=False) - \
polygon2d(points2).offset(r0 - chamfer0).extruded(float("inf"))
chamfer_poly = polygon2d([(r0 - chamfer0, 2 * vitamins.spring.diameter),
(r0 - chamfer0, 0),
(r0, -spring_r),
(r1, -spring_r),
(r1 + chamfer1, 0),
(r1 + chamfer1, 2 * vitamins.spring.diameter)])
mask = chamfer_poly.revolved().rotated_x(90)
mask &= p.extruded(float("inf"))
mask += chamfer_poly.extruded(vitamins.spring.diameter).translated_z(0.95 * vitamins.spring.diameter / 2).rotated_x(90)
mask += chamfer_poly.extruded(vitamins.spring.diameter).translated_z(-0.95 * vitamins.spring.diameter / 2).rotated_x(90).rotated_z(spring_angle)
return s & mask
def arm_generator(thickness, pivot_thickness, width,
bogie_side_width,
arm_length, spring_arm_length,
arm_neutral_angle, arm_up_angle,
knee_height,
knee_angle,
pivot_mount_diameter, pivot_mount_height,
pivot_mount_screw_head_diameter, pivot_mount_screw_head_countersink,
spring_mount_diameter, spring_mount_height,
bogie_pivot_mount_diameter,
thin_wall, thick_wall,
hole_blinding_layer_height):
spring_point_angle = spring_up_angle - arm_up_angle
bogie_pivot = (arm_length, 0)
spring_point = (spring_arm_length * math.cos(spring_point_angle), spring_arm_length * math.sin(spring_point_angle))
knee_mid_angle = math.pi / 2 - arm_neutral_angle
assert pivot_mount_height >= spring_mount_height
knee_point1 = (bogie_pivot[0] + (knee_height + 0.2 * thickness) * math.cos(knee_mid_angle - math.radians(knee_angle / 2)),
bogie_pivot[1] + (knee_height + 0.2 * thickness) * math.sin(knee_mid_angle - math.radians(knee_angle / 2)))
knee_point2 = (bogie_pivot[0] + knee_height * math.cos(knee_mid_angle + math.radians(knee_angle / 2)),
bogie_pivot[1] + knee_height * math.sin(knee_mid_angle + math.radians(knee_angle / 2)))
_, p1 = util.outer_tangent(Vector(*knee_point1), 0,
Vector(0, 0), (pivot_thickness - thickness) / 2)
p2, _ = util.outer_tangent(Vector(0, 0), (pivot_thickness - thickness) / 2,
Vector(*spring_point), 0)
outline = polygon2d([(p1.x, p1.y), knee_point1, bogie_pivot, knee_point2, spring_point, (p2.x, p2.y)]) \
.offset(thickness / 2)
outline += circle(d=pivot_thickness)
arm = outline.extruded(width + spring_mount_height, symmetrical=False)
arm += tools.cone(height=pivot_mount_height - spring_mount_height,
upper_diameter=pivot_mount_diameter + 2 * thick_wall,
lower_diameter=pivot_thickness,
base_height=width + spring_mount_height)
spring_mount_top_diameter = thickness / 2
spring_cutout_r0 = spring_mount_height + spring_mount_top_diameter / 2
spring_down_vector = spring_anchor_point - spring_down_point
rel_spring_down_angle = math.degrees(math.atan2(spring_down_vector.y, spring_down_vector.x) - arm_down_angle)
arm -= spring_cutout_generator(90 + rel_spring_down_angle,
spring_cutout_r0,
2 * arm_length,
(vitamins.spring.diameter / 2 + arm_clearance)) \
.rotated_z(-90) \
.translated(spring_point[0], spring_point[1], width + vitamins.spring.diameter / 2 + arm_clearance)
holes = circle(d=pivot_mount_diameter) + \
circle(d=bogie_pivot_mount_diameter).translated(*bogie_pivot) + \
circle(d=spring_mount_diameter).translated(*spring_point)
arm -= holes.extruded(float("inf"))
# Pivot screw head countersink
arm -= cylinder(d=pivot_mount_screw_head_diameter, h=2*pivot_mount_screw_head_countersink)
if hole_blinding_layer_height:
arm += cylinder(d=pivot_mount_screw_head_diameter,
h=hole_blinding_layer_height,
symmetrical=False).translated_z(pivot_mount_screw_head_countersink)
return arm
inner_road_wheel = road_wheel_generator(wheel_diameter,
half_wheel_width,
vitamins.small_bearing.id,
wheel_clearance,
vitamins.small_bearing.shoulder_size,
vitamins.o_ring.minor_diameter,
4 * parameters.extrusion_width,
parameters.layer_height,
road_wheel_screw.lock_nut.s,
road_wheel_screw.lock_nut.height + road_wheel_screw.diameter / 6,
True
).make_part("inner_road_wheel", ["3d_print"])
outer_road_wheel = road_wheel_generator(wheel_diameter,
half_wheel_width,
vitamins.small_bearing.id,
wheel_clearance,
vitamins.small_bearing.shoulder_size,
vitamins.o_ring.minor_diameter,
4 * parameters.extrusion_width,
parameters.layer_height,
road_wheel_screw.head_diameter,
road_wheel_screw.head_height,
False
).make_part("outer_road_wheel", ["3d_print"])
bogie = bogie_generator(bogie_wheel_spacing,
bogie_width, wheel_width,
vitamins.small_bearing.od, vitamins.small_bearing.thickness, vitamins.small_bearing.shoulder_size,
4 * parameters.extrusion_width, 6 * parameters.extrusion_width,
bogie_pivot_z,
wheel_diameter,
arm_thickness,
arm_width + pivot_flat_clearance,
bogie_arm_cutout_angle,
vitamins.shoulder_screw,
arm_knee_height,
arm_clearance,
wheel_clearance,
).make_part("bogie", ["3d_print"])
arm_right = arm_generator(arm_thickness, arm_pivot_thickness, arm_width,
arm_width - pivot_flat_clearance,
arm_length, spring_arm_length,
arm_neutral_angle, arm_up_angle,
arm_knee_height,
arm_knee_angle,
vitamins.shoulder_screw.diameter2 + pivot_round_clearance,
pivot_guide_length - arm_width + pivot_screw_head_countersink,
vitamins.shoulder_screw.head_diameter + pivot_flat_clearance,
pivot_screw_head_countersink,
vitamins.spring.bottom_mount_id,
(vitamins.spring.diameter - vitamins.spring.bottom_mount_thickness) / 2 + arm_clearance,
vitamins.shoulder_screw.diameter2 + pivot_round_clearance,
3 * parameters.extrusion_width,
6 * parameters.extrusion_width,
parameters.layer_height,
).make_part("suspension_arm_right", ["3d_print"])
arm_left = arm_right.shape().mirrored_x().make_part("suspension_arm_left", ["3d_print"])
bogie_assembly = codecad.assembly("bogie_assembly",
[bogie.translated_z(wheel_diameter / 2),
inner_road_wheel.rotated_x(90).translated(bogie_wheel_spacing / 2,
wheel_width / 2,
wheel_diameter / 2),
inner_road_wheel.rotated_x(90).translated(-bogie_wheel_spacing / 2,
wheel_width / 2,
wheel_diameter / 2),
outer_road_wheel.rotated_x(-90).translated(bogie_wheel_spacing / 2,
-wheel_width / 2,
wheel_diameter / 2),
outer_road_wheel.rotated_x(-90).translated(-bogie_wheel_spacing / 2,
-wheel_width / 2,
wheel_diameter / 2)] +
[vitamins.small_bearing] * 4 +
[road_wheel_screw, road_wheel_screw.lock_nut] * 2 +
[vitamins.o_ring] * 8
)
# Y offset of a right suspension arm base in an assembly.
arm_base_offset = pivot_guide_length + pivot_screw_head_countersink
# Ofset for matching a piece of track with right suspension assembly
track_offset = Vector(arm_length * math.cos(arm_neutral_angle),
arm_base_offset - arm_width / 2,
arm_length * math.sin(arm_neutral_angle) - bogie_pivot_z - wheel_diameter / 2)
# Pivot mating surface is at coordinates 0, 0, 0 for both left and right arm
# Position of the matching surface for spring anchor point on the right side
# This one is rotated in print orientation!
spring_anchor_point = Vector(spring_anchor_point.x, spring_anchor_point.y,
arm_base_offset - (arm_width + arm_clearance + vitamins.spring.diameter / 2 + vitamins.spring.top_mount_thickness / 2))
def suspension_generator(right, arm_angle = arm_neutral_angle, bogie_angle_fraction = None):
spring_point = get_spring_point(spring_arm_length, arm_up_angle - arm_angle)
v = spring_point.flattened() - spring_anchor_point.flattened()
length = abs(v)
spring_degrees = 90 - math.degrees(math.atan2(v.y, v.x))
spring = vitamins.spring(length)
degrees = -math.degrees(arm_angle)
if bogie_angle_fraction is None:
bogie_degrees = 0
else:
low = -bogie_swing_angle + (arm_angle - arm_neutral_angle)
high = bogie_swing_angle + (arm_angle - arm_neutral_angle)
bogie_degrees = low + (high - low) * bogie_angle_fraction
bogie_degrees = math.degrees(bogie_degrees)
if right:
arm = arm_right
bogie = bogie_assembly.rotated_z(180)
multiplier = 1
else:
arm = arm_left.rotated_y(180)
bogie = bogie_assembly
multiplier = -1
asm = codecad.assembly("suspension_assembly_" + ("right" if right else "left"),
[arm \
.rotated_x(90) \
.rotated_y(degrees) \
.translated_y(multiplier * arm_base_offset),
bogie \
.translated_z(-bogie_pivot_z - wheel_diameter / 2) \
.rotated_y(-degrees - bogie_degrees) \
.translated_x(arm_length) \
.rotated_y(degrees) \
.translated_y(multiplier * track_offset.y),
spring \
.rotated_y(spring_degrees) \
.translated(spring_anchor_point.x,
multiplier * (spring_anchor_point.z + vitamins.spring.top_mount_thickness / 2),
spring_anchor_point.y),
road_wheel_screw,
road_wheel_screw.lock_nut,
spring_screw,
spring_screw.lock_nut,
vitamins.shoulder_screw,
vitamins.shoulder_screw,
vitamins.shoulder_screw.lock_nut,
vitamins.shoulder_screw.lock_nut,
vitamins.m5x20_screw,
vitamins.m5x20_screw.lock_nut,
])
return asm
suspension_assembly_left = suspension_generator(False)
suspension_assembly_right = suspension_generator(True)
if __name__ == "__main__":
def p(name, f=lambda x: x):
print(name, f(globals()[name]))
p("arm_length")
p("bogie_pivot_up_y")
p("arm_up_angle", math.degrees)
p("arm_neutral_angle", math.degrees)
p("arm_down_angle", math.degrees)
p("suspension_travel")
def plot_wheel_forces():
import matplotlib.pyplot as plt
import numpy
angles = numpy.linspace(arm_down_angle, arm_up_angle)
travels = (numpy.sin(angles) - math.sin(arm_neutral_angle)) * arm_length
forces = numpy.vectorize(lambda angle: 1e3 * get_wheel_force(arm_length, arm_up_angle, angle))(angles)
plt.plot(travels, forces)
plt.xlabel("Travel distance")
plt.ylabel("Residual bogie force (force - {:.0f} g)".format(1e3 * parameters.design_weight / bogie_count))
plt.grid(True)
plt.show()
plot_wheel_forces()
o = codecad.assembly("suspension_preview",
[suspension_assembly_left.translated_x(-suspension_spacing),
suspension_assembly_right,
suspension_assembly_left.translated_x(suspension_spacing)])
o = suspension_assembly_left
codecad.commandline_render(o)
|
# -*- coding: utf-8 -*-
import scrapy
import copy
from selenium import webdriver
class WangyiSpider(scrapy.Spider):
name = 'wangyi'
# allowed_domains = ['www.wangyi.com']
start_urls = ['https://news.163.com/']
def __init__(self):
self.bro = webdriver.Chrome(r'F:\chromedriver_win32\chromedriver.exe')
def closd(self, spider):
self.bro.quit()
def parse(self, response):
# 获取四大板块URL:国内,国际,军事,航空
li_list = response.xpath('//div[@class="ns_area list"]/ul/li')
item_list = list()
for li in li_list:
url = li.xpath('./a/@href').extract_first()
title = li.xpath('./a/text()').extract_first().strip()
# 过滤出 国内,国际,军事,航空
if title in ['国内', '国际', '军事', '航空']:
item = dict()
item['title'] = title
item['url'] = url
yield scrapy.Request(url=item['url'], callback=self.parse_content, meta={'title': title})
def parse_content(self, response):
title = response.meta.get('title')
div_list = response.xpath("//div[@class='ndi_main']/div")
print(len(div_list))
for div in div_list:
item = {}
item['group'] = title
img_url = div.xpath('./a/img/@src').extract_first()
article_url = div.xpath('./a/img/@href').extract_first()
head = div.xpath('./a/img/@alt').extract_first()
keywords = div.xpath('//div[@class="keywords"]//text()').extract()
# 将列表内容转换成字符串
content = "".join([i.strip() for i in keywords])
item['img_url'] = img_url
item['article_url'] = article_url
item['head'] = head
item['keywords'] = keywords
yield scrapy.Request(
url=article_url,
callback=self.parse_detail,
meta={'item': copy.deepcopy(item)}
)
def parse_detail(self, response):
pass
|
# Created By: Jeenal Suthar
# Created Date:
# Last Modified: 22/01/2020
# Description: This module provide Common Configuration Details.
import os.path, time, socket, calendar
from Common.Utils import str_to_bool
from configparser import ConfigParser
class ConfigManagerBase(object):
_instance = None
filename = ''
def __init__(self,filepath):
""" Virtually private constructor. """
self.fullfilepath = os.path.dirname(filepath)
self.moduleName = os.path.split(self.fullfilepath)[-1]
self._configFillePath = '../' + self.moduleName + '/' + self.moduleName + '.conf'
if ConfigManagerBase._instance != None:
raise Exception("This class is a singleton!")
else:
self.config = ConfigParser()
self.config.read(self._configFillePath)
@staticmethod
def getInstance():
if ConfigManagerBase._instance == None:
raise ValueError("ConfigManagerBase instance was not created!")
return ConfigManagerBase._instance
def get_host_ip(self):
self.host_ip = [l for l in ([ip for ip in socket.gethostbyname_ex(socket.gethostname())[2] if not ip.startswith("127.")][:1], [[(s.connect(('8.8.8.8', 53)), s.getsockname()[0], s.close()) for s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1]]) if l][0][0]
return self.host_ip
def get_timestamp(self):
import datetime
dt = datetime.datetime.today().replace(hour=13, minute=0, second=0, microsecond=0)
timestm = str(calendar.timegm(time.gmtime()))
return timestm
def get_log_name(self):
self.log_name = self.config.get('logging', 'log_name')
return self.log_name
def get_log_level(self):
self.LOG_LEVEL = self.config.get('logging', 'log_level')
lvl ='NOTSET'
if self.LOG_LEVEL=='DEBUG':
lvl=10
elif self.LOG_LEVEL=='WARNING':
lvl=30
elif self.LOG_LEVEL=='INFO':
lvl=20
elif self.LOG_LEVEL=='NOTSET':
lvl=0
elif self.LOG_LEVEL=='ERROR':
lvl=40
elif self.LOG_LEVEL=='CRITICAL':
lvl=50
else:
lvl = 0
return lvl
def get_log_file(self):
self.LOG_FILE = self.config.get('logging', 'log_file')
return self.LOG_FILE
def get_log_file_maxbytpe(self):
self.log_file_maxbytpe = self.config.get('logging', 'log_file_maxbytpe')
inbytes = 1048576 * int(self.log_file_maxbytpe)
return inbytes
def get_log_file_numoffile(self):
self.log_file_numoffile = int(self.config.get('logging', 'log_file_numoffile'))
return self.log_file_numoffile
def get_enable_console_handler(self):
self.enable_console_handler = self.config.get('logging', 'enable_console_handler')
return str_to_bool(self.enable_console_handler)
def messageformat(self,msg, status, enable):
if (enable):
msg = msg + 'Error:' + status
return msg
def get_console_handler_level(self):
self.console_handler_level = self.config.get('logging', 'console_handler_level')
return self.console_handler_level
def get_enable_syslog_handler(self):
self.enable_syslog_handler = self.config.get('logging', 'enable_syslog_handler')
return str_to_bool(self.enable_syslog_handler)
def get_syslog_address(self):
self.syslog_address = self.config.get('logging', 'syslog_address')
return self.syslog_address
def get_syslog_handler_level(self):
self.syslog_handler_level = self.config.get('logging', 'syslog_handler_level')
return self.syslog_handler_level
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'IQAUTOCLICK.ui'
#
# Created by: PyQt5 UI code generator 5.15.1
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(310, 670)
MainWindow.setMaximumSize(QtCore.QSize(310, 670))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("C:/Users/estmo/.designer/backup/icon/icon.ico"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
MainWindow.setWindowIcon(icon)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.lineEdit = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit.setGeometry(QtCore.QRect(10, 10, 111, 31))
self.lineEdit.setObjectName("lineEdit")
self.lineEdit_2 = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit_2.setGeometry(QtCore.QRect(130, 10, 111, 31))
self.lineEdit_2.setEchoMode(QtWidgets.QLineEdit.Password)
self.lineEdit_2.setObjectName("lineEdit_2")
self.pushButton = QtWidgets.QPushButton(self.centralwidget)
self.pushButton.setGeometry(QtCore.QRect(250, 10, 51, 31))
self.pushButton.setStyleSheet("font: 75 10pt \"MS Shell Dlg 2\";")
self.pushButton.setObjectName("pushButton")
self.pushButton_2 = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_2.setGeometry(QtCore.QRect(40, 570, 111, 51))
self.pushButton_2.setStyleSheet("font: 75 28pt \"MS Shell Dlg 2\";\n"
"background-color: rgb(0, 170, 0);\n"
"color: rgb(255, 255, 255);")
self.pushButton_2.setObjectName("pushButton_2")
self.pushButton_3 = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_3.setGeometry(QtCore.QRect(160, 570, 111, 51))
self.pushButton_3.setStyleSheet("font: 75 28pt \"MS Shell Dlg 2\";\n"
"background-color: rgb(255, 0, 0);\n"
"color: rgb(255, 255, 255);")
self.pushButton_3.setObjectName("pushButton_3")
self.groupBox = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox.setGeometry(QtCore.QRect(10, 90, 291, 201))
self.groupBox.setStyleSheet("color: rgb(255, 255, 255);\n"
"font: 75 16pt \"MS Shell Dlg 2\";\n"
"background-color: rgb(0, 0, 0);")
self.groupBox.setObjectName("groupBox")
self.lineEdit_3 = QtWidgets.QLineEdit(self.groupBox)
self.lineEdit_3.setGeometry(QtCore.QRect(10, 60, 101, 31))
self.lineEdit_3.setStyleSheet("background-color: rgb(255, 255, 255);\n"
"color: rgb(0, 0, 0);")
self.lineEdit_3.setObjectName("lineEdit_3")
self.label_2 = QtWidgets.QLabel(self.groupBox)
self.label_2.setGeometry(QtCore.QRect(10, 40, 81, 31))
self.label_2.setStyleSheet("font: 75 10pt \"MS Shell Dlg 2\";")
self.label_2.setObjectName("label_2")
self.lineEdit_4 = QtWidgets.QLineEdit(self.groupBox)
self.lineEdit_4.setGeometry(QtCore.QRect(170, 60, 31, 31))
self.lineEdit_4.setStyleSheet("background-color: rgb(255, 255, 255);\n"
"color: rgb(0, 0, 0);")
self.lineEdit_4.setObjectName("lineEdit_4")
self.label_3 = QtWidgets.QLabel(self.groupBox)
self.label_3.setGeometry(QtCore.QRect(120, 60, 31, 31))
self.label_3.setStyleSheet("font: 75 10pt \"MS Shell Dlg 2\";")
self.label_3.setObjectName("label_3")
self.lineEdit_5 = QtWidgets.QLineEdit(self.groupBox)
self.lineEdit_5.setGeometry(QtCore.QRect(250, 60, 31, 31))
self.lineEdit_5.setStyleSheet("background-color: rgb(255, 255, 255);\n"
"color: rgb(0, 0, 0);")
self.lineEdit_5.setObjectName("lineEdit_5")
self.label_4 = QtWidgets.QLabel(self.groupBox)
self.label_4.setGeometry(QtCore.QRect(220, 60, 21, 31))
self.label_4.setStyleSheet("font: 75 10pt \"MS Shell Dlg 2\";")
self.label_4.setObjectName("label_4")
self.comboBox_2 = QtWidgets.QComboBox(self.groupBox)
self.comboBox_2.setGeometry(QtCore.QRect(10, 100, 101, 41))
self.comboBox_2.setStyleSheet("font: 75 16pt \"MS Shell Dlg 2\";\n"
"color: rgb(0, 0, 0);\n"
"background-color: rgb(255, 255, 255);")
self.comboBox_2.setObjectName("comboBox_2")
self.comboBox_2.addItem("")
self.comboBox_2.addItem("")
self.label_7 = QtWidgets.QLabel(self.groupBox)
self.label_7.setGeometry(QtCore.QRect(210, 110, 31, 31))
self.label_7.setStyleSheet("font: 75 10pt \"MS Shell Dlg 2\";")
self.label_7.setObjectName("label_7")
self.lineEdit_6 = QtWidgets.QLineEdit(self.groupBox)
self.lineEdit_6.setGeometry(QtCore.QRect(250, 110, 31, 31))
self.lineEdit_6.setStyleSheet("background-color: rgb(255, 255, 255);\n"
"color: rgb(0, 0, 0);")
self.lineEdit_6.setObjectName("lineEdit_6")
self.lineEdit_7 = QtWidgets.QLineEdit(self.groupBox)
self.lineEdit_7.setGeometry(QtCore.QRect(170, 110, 31, 31))
self.lineEdit_7.setStyleSheet("background-color: rgb(255, 255, 255);\n"
"color: rgb(0, 0, 0);")
self.lineEdit_7.setObjectName("lineEdit_7")
self.label_8 = QtWidgets.QLabel(self.groupBox)
self.label_8.setGeometry(QtCore.QRect(120, 110, 31, 31))
self.label_8.setStyleSheet("font: 75 10pt \"MS Shell Dlg 2\";")
self.label_8.setObjectName("label_8")
self.label_22 = QtWidgets.QLabel(self.groupBox)
self.label_22.setGeometry(QtCore.QRect(10, 160, 31, 31))
self.label_22.setStyleSheet("font: 75 10pt \"MS Shell Dlg 2\";")
self.label_22.setObjectName("label_22")
self.lineEdit_8 = QtWidgets.QLineEdit(self.groupBox)
self.lineEdit_8.setGeometry(QtCore.QRect(50, 160, 101, 31))
self.lineEdit_8.setStyleSheet("background-color: rgb(255, 255, 255);\n"
"color: rgb(0, 0, 0);")
self.lineEdit_8.setObjectName("lineEdit_8")
self.label_23 = QtWidgets.QLabel(self.groupBox)
self.label_23.setGeometry(QtCore.QRect(140, 160, 31, 31))
self.label_23.setStyleSheet("font: 75 10pt \"MS Shell Dlg 2\";")
self.label_23.setObjectName("label_23")
self.lineEdit_9 = QtWidgets.QLineEdit(self.groupBox)
self.lineEdit_9.setGeometry(QtCore.QRect(180, 160, 101, 31))
self.lineEdit_9.setStyleSheet("background-color: rgb(255, 255, 255);\n"
"color: rgb(0, 0, 0);")
self.lineEdit_9.setObjectName("lineEdit_9")
self.lineEdit_10 = QtWidgets.QLineEdit(self.groupBox)
self.lineEdit_10.setGeometry(QtCore.QRect(170, 20, 113, 31))
self.lineEdit_10.setStyleSheet("background-color: rgb(255, 255, 255);\n"
"color: rgb(0, 0, 0);")
self.lineEdit_10.setObjectName("lineEdit_10")
self.label_24 = QtWidgets.QLabel(self.groupBox)
self.label_24.setGeometry(QtCore.QRect(120, 20, 31, 31))
self.label_24.setStyleSheet("font: 75 10pt \"MS Shell Dlg 2\";")
self.label_24.setObjectName("label_24")
self.label_23.raise_()
self.label_3.raise_()
self.label_2.raise_()
self.lineEdit_3.raise_()
self.lineEdit_4.raise_()
self.lineEdit_5.raise_()
self.label_4.raise_()
self.comboBox_2.raise_()
self.label_7.raise_()
self.lineEdit_6.raise_()
self.lineEdit_7.raise_()
self.label_8.raise_()
self.label_22.raise_()
self.lineEdit_8.raise_()
self.lineEdit_9.raise_()
self.lineEdit_10.raise_()
self.label_24.raise_()
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(130, 50, 171, 31))
self.label.setStyleSheet("background-color: rgb(0, 0, 0);\n"
"color: rgb(0, 255, 0);\n"
"font: 75 16pt \"MS Shell Dlg 2\";")
self.label.setText("")
self.label.setObjectName("label")
self.comboBox = QtWidgets.QComboBox(self.centralwidget)
self.comboBox.setGeometry(QtCore.QRect(10, 50, 111, 31))
self.comboBox.setStyleSheet("font: 75 16pt \"MS Shell Dlg 2\";\n"
"color: rgb(255, 255, 255);\n"
"background-color: rgb(0, 0, 0);")
self.comboBox.setObjectName("comboBox")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.groupBox_2 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_2.setGeometry(QtCore.QRect(10, 450, 291, 111))
self.groupBox_2.setStyleSheet("background-color: rgb(0, 0, 0);\n"
"color: rgb(255, 255, 255);\n"
"font: 75 16pt \"MS Shell Dlg 2\";")
self.groupBox_2.setObjectName("groupBox_2")
self.label_5 = QtWidgets.QLabel(self.groupBox_2)
self.label_5.setGeometry(QtCore.QRect(10, 40, 81, 51))
self.label_5.setStyleSheet("color: rgb(0, 0, 0);\n"
"background-color: rgb(255, 255, 255);")
self.label_5.setText("")
self.label_5.setObjectName("label_5")
self.label_6 = QtWidgets.QLabel(self.groupBox_2)
self.label_6.setGeometry(QtCore.QRect(100, 40, 181, 51))
self.label_6.setStyleSheet("background-color: rgb(255, 0, 0);")
self.label_6.setText("")
self.label_6.setObjectName("label_6")
self.groupBox_3 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_3.setGeometry(QtCore.QRect(10, 290, 291, 151))
self.groupBox_3.setStyleSheet("background-color: rgb(0, 0, 0);\n"
"color: rgb(255, 255, 255);\n"
"font: 75 16pt \"MS Shell Dlg 2\";")
self.groupBox_3.setObjectName("groupBox_3")
self.label_9 = QtWidgets.QLabel(self.groupBox_3)
self.label_9.setGeometry(QtCore.QRect(130, 30, 61, 31))
self.label_9.setStyleSheet("background-color: rgb(0, 0, 255);\n"
"color: rgb(0, 255, 0);\n"
"font: 75 16pt \"MS Shell Dlg 2\";")
self.label_9.setObjectName("label_9")
self.label_10 = QtWidgets.QLabel(self.groupBox_3)
self.label_10.setGeometry(QtCore.QRect(200, 30, 81, 31))
self.label_10.setStyleSheet("background-color: rgb(0, 0, 255);\n"
"color: rgb(0, 255, 0);\n"
"font: 75 16pt \"MS Shell Dlg 2\";")
self.label_10.setText("")
self.label_10.setObjectName("label_10")
self.label_11 = QtWidgets.QLabel(self.groupBox_3)
self.label_11.setGeometry(QtCore.QRect(200, 70, 81, 31))
self.label_11.setStyleSheet("background-color: rgb(0, 0, 255);\n"
"color: rgb(0, 255, 0);\n"
"font: 75 16pt \"MS Shell Dlg 2\";")
self.label_11.setText("")
self.label_11.setObjectName("label_11")
self.label_12 = QtWidgets.QLabel(self.groupBox_3)
self.label_12.setGeometry(QtCore.QRect(130, 70, 61, 31))
self.label_12.setStyleSheet("background-color: rgb(0, 0, 255);\n"
"color: rgb(0, 255, 0);\n"
"font: 75 16pt \"MS Shell Dlg 2\";")
self.label_12.setObjectName("label_12")
self.label_13 = QtWidgets.QLabel(self.groupBox_3)
self.label_13.setGeometry(QtCore.QRect(10, 70, 111, 31))
self.label_13.setStyleSheet("background-color: rgb(0, 0, 255);\n"
"color: rgb(0, 255, 0);\n"
"font: 75 16pt \"MS Shell Dlg 2\";")
self.label_13.setText("")
self.label_13.setObjectName("label_13")
self.label_14 = QtWidgets.QLabel(self.groupBox_3)
self.label_14.setGeometry(QtCore.QRect(10, 30, 111, 31))
self.label_14.setStyleSheet("background-color: rgb(0, 0, 255);\n"
"color: rgb(0, 255, 0);\n"
"font: 75 16pt \"MS Shell Dlg 2\";")
self.label_14.setObjectName("label_14")
self.label_15 = QtWidgets.QLabel(self.groupBox_3)
self.label_15.setGeometry(QtCore.QRect(70, 110, 51, 31))
self.label_15.setStyleSheet("background-color: rgb(0, 0, 255);\n"
"color: rgb(0, 255, 0);\n"
"font: 75 16pt \"MS Shell Dlg 2\";")
self.label_15.setText("")
self.label_15.setObjectName("label_15")
self.label_16 = QtWidgets.QLabel(self.groupBox_3)
self.label_16.setGeometry(QtCore.QRect(10, 110, 51, 31))
self.label_16.setStyleSheet("background-color: rgb(0, 0, 255);\n"
"color: rgb(0, 255, 0);\n"
"font: 75 16pt \"MS Shell Dlg 2\";")
self.label_16.setObjectName("label_16")
self.label_17 = QtWidgets.QLabel(self.groupBox_3)
self.label_17.setGeometry(QtCore.QRect(200, 110, 81, 31))
self.label_17.setStyleSheet("background-color: rgb(0, 0, 255);\n"
"color: rgb(0, 255, 0);\n"
"font: 75 16pt \"MS Shell Dlg 2\";")
self.label_17.setText("")
self.label_17.setObjectName("label_17")
self.label_18 = QtWidgets.QLabel(self.groupBox_3)
self.label_18.setGeometry(QtCore.QRect(130, 110, 61, 31))
self.label_18.setStyleSheet("background-color: rgb(0, 0, 255);\n"
"color: rgb(0, 255, 0);\n"
"font: 75 16pt \"MS Shell Dlg 2\";")
self.label_18.setObjectName("label_18")
self.label_19 = QtWidgets.QLabel(self.centralwidget)
self.label_19.setGeometry(QtCore.QRect(10, 0, 71, 16))
self.label_19.setStyleSheet("background-color: rgb(255, 85, 0);\n"
"color: rgb(255, 255, 255);\n"
"font: 75 10pt \"MS Shell Dlg 2\";")
self.label_19.setObjectName("label_19")
self.label_20 = QtWidgets.QLabel(self.centralwidget)
self.label_20.setGeometry(QtCore.QRect(130, 0, 91, 16))
self.label_20.setStyleSheet("background-color: rgb(255, 85, 0);\n"
"color: rgb(255, 255, 255);\n"
"font: 75 10pt \"MS Shell Dlg 2\";")
self.label_20.setObjectName("label_20")
self.groupBox_4 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_4.setGeometry(QtCore.QRect(10, 80, 291, 561))
self.groupBox_4.setMouseTracking(False)
self.groupBox_4.setStyleSheet("color: rgb(0, 0, 255);\n"
"font: 75 16pt \"MS Shell Dlg 2\";\n"
"border-image: url(:/img/QRPP.jpg);\n"
"")
self.groupBox_4.setTitle("")
self.groupBox_4.setObjectName("groupBox_4")
self.label_21 = QtWidgets.QLabel(self.centralwidget)
self.label_21.setGeometry(QtCore.QRect(10, 50, 291, 581))
self.label_21.setStyleSheet("background-color: rgb(0, 0, 255);\n"
"color: rgb(0, 255, 0);")
self.label_21.setObjectName("label_21")
self.lineEdit.raise_()
self.lineEdit_2.raise_()
self.pushButton.raise_()
self.pushButton_2.raise_()
self.pushButton_3.raise_()
self.groupBox.raise_()
self.label.raise_()
self.comboBox.raise_()
self.groupBox_2.raise_()
self.groupBox_3.raise_()
self.label_19.raise_()
self.label_20.raise_()
self.label_21.raise_()
self.groupBox_4.raise_()
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 310, 21))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
self.pushButton.clicked.connect(self.groupBox_4.hide)
self.pushButton.clicked.connect(self.label_21.hide)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "IQAUTOCLICK V1 - freeware(EDSG)"))
self.pushButton.setText(_translate("MainWindow", "LOGIN"))
self.pushButton_2.setText(_translate("MainWindow", "BUY"))
self.pushButton_3.setText(_translate("MainWindow", "SELL"))
self.groupBox.setTitle(_translate("MainWindow", "Settings"))
self.label_2.setText(_translate("MainWindow", "<html><head/><body><p align=\"right\">Currency pair</p><p align=\"right\"><br/></p></body></html>"))
self.label_3.setText(_translate("MainWindow", "<html><head/><body><p align=\"right\">ATM</p></body></html>"))
self.label_4.setText(_translate("MainWindow", "<html><head/><body><p align=\"right\">RT</p></body></html>"))
self.comboBox_2.setItemText(0, _translate("MainWindow", "Digital"))
self.comboBox_2.setItemText(1, _translate("MainWindow", "Binary"))
self.label_7.setText(_translate("MainWindow", "<html><head/><body><p align=\"right\">*MG</p></body></html>"))
self.label_8.setText(_translate("MainWindow", "<html><head/><body><p align=\"right\">RMG</p></body></html>"))
self.label_22.setText(_translate("MainWindow", "<html><head/><body><p align=\"right\">TP</p></body></html>"))
self.label_23.setText(_translate("MainWindow", "<html><head/><body><p align=\"right\">SL</p></body></html>"))
self.label_24.setText(_translate("MainWindow", "<html><head/><body><p align=\"right\">LINE</p></body></html>"))
self.comboBox.setItemText(0, _translate("MainWindow", "PRACTICE"))
self.comboBox.setItemText(1, _translate("MainWindow", "REAL"))
self.groupBox_2.setTitle(_translate("MainWindow", "Status"))
self.groupBox_3.setTitle(_translate("MainWindow", "Show"))
self.label_9.setText(_translate("MainWindow", "<html><head/><body><p align=\"center\">WIN</p></body></html>"))
self.label_12.setText(_translate("MainWindow", "<html><head/><body><p align=\"center\">LOSS</p></body></html>"))
self.label_14.setText(_translate("MainWindow", "<html><head/><body><p align=\"center\">PROFIT</p></body></html>"))
self.label_16.setText(_translate("MainWindow", "<html><head/><body><p align=\"center\">RMG</p></body></html>"))
self.label_18.setText(_translate("MainWindow", "<html><head/><body><p align=\"center\">ATMS</p></body></html>"))
self.label_19.setText(_translate("MainWindow", "user iq"))
self.label_20.setText(_translate("MainWindow", "password iq"))
self.label_21.setText(_translate("MainWindow", "<html><head/><body><p align=\"center\"><span style=\" font-size:18pt; font-weight:600;\">"Donate"</span></p><p align=\"center\"><br/></p><p align=\"center\"><br/></p><p align=\"center\"><br/></p><p align=\"center\"><br/></p><p align=\"center\"><br/></p><p align=\"center\"><br/></p><p align=\"center\"><br/></p><p align=\"center\"><br/><span style=\" font-size:16pt;\">กสิกรไทย<br/>0288732043<br/>นพรัตน์ คำเสียง<br/>ขอบคุณครัับ</span><br/></p><p align=\"center\"><br/></p><p align=\"center\"><br/></p><p align=\"center\"><br/></p><p align=\"center\"><br/></p><p align=\"center\"><br/></p><p align=\"center\"><br/></p><p align=\"center\"><br/></p></body></html>"))
import rs_rc
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Helpers for using libcloud.
"""
from zope.interface import (
Attribute as InterfaceAttribute, Interface, implementer)
from characteristic import attributes, Attribute
from flocker.provision._ssh import run_remotely, run_from_args
def _fixed_OpenStackNodeDriver_to_node(self, api_node):
"""
This is a copy of
libcloud.compute.drivers.openstack.OpenStack_1_1_NodeDriver._to_node
from libcloud 0.16.0 to fix
https://github.com/apache/libcloud/pull/411
"""
from libcloud.utils.networking import is_public_subnet
from libcloud.compute.base import Node
from libcloud.compute.types import NodeState
public_networks_labels = ['public', 'internet']
public_ips, private_ips = [], []
for label, values in api_node['addresses'].items():
for value in values:
ip = value['addr']
is_public_ip = False
try:
public_subnet = is_public_subnet(ip)
except:
# IPv6
public_subnet = False
# Openstack Icehouse sets 'OS-EXT-IPS:type' to 'floating' for
# public and 'fixed' for private
explicit_ip_type = value.get('OS-EXT-IPS:type', None)
if explicit_ip_type == 'floating':
is_public_ip = True
elif explicit_ip_type == 'fixed':
is_public_ip = False
elif label in public_networks_labels:
# Try label next
is_public_ip = True
elif public_subnet:
# Check for public subnet
is_public_ip = True
if is_public_ip:
public_ips.append(ip)
else:
private_ips.append(ip)
# Sometimes 'image' attribute is not present if the node is in an error
# state
image = api_node.get('image', None)
image_id = image.get('id', None) if image else None
if api_node.get("config_drive", "false").lower() == "true":
config_drive = True
else:
config_drive = False
return Node(
id=api_node['id'],
name=api_node['name'],
state=self.NODE_STATE_MAP.get(api_node['status'],
NodeState.UNKNOWN),
public_ips=public_ips,
private_ips=private_ips,
driver=self,
extra=dict(
hostId=api_node['hostId'],
access_ip=api_node.get('accessIPv4'),
# Docs says "tenantId", but actual is "tenant_id". *sigh*
# Best handle both.
tenantId=api_node.get('tenant_id') or api_node['tenantId'],
imageId=image_id,
flavorId=api_node['flavor']['id'],
uri=next(link['href'] for link in api_node['links'] if
link['rel'] == 'self'),
metadata=api_node['metadata'],
password=api_node.get('adminPass', None),
created=api_node['created'],
updated=api_node['updated'],
key_name=api_node.get('key_name', None),
disk_config=api_node.get('OS-DCF:diskConfig', None),
config_drive=config_drive,
availability_zone=api_node.get('OS-EXT-AZ:availability_zone',
None),
),
)
def monkeypatch():
"""
libcloud 0.16.0 has a broken OpenStackNodeDriver._to_node.
See https://github.com/apache/libcloud/pull/411
"""
from libcloud import __version__
if __version__ == "0.16.0":
from libcloud.compute.drivers.openstack import OpenStack_1_1_NodeDriver
OpenStack_1_1_NodeDriver._to_node = _fixed_OpenStackNodeDriver_to_node
def get_size(driver, size_id):
"""
Return a ``NodeSize`` corresponding to a given id.
:param driver: The libcloud driver to query for sizes.
"""
try:
return [s for s in driver.list_sizes() if s.id == size_id][0]
except IndexError:
raise ValueError("Unknown size.", size_id)
def get_image(driver, image_name):
"""
Return a ``NodeImage`` corresponding to a given name of size.
:param driver: The libcloud driver to query for images.
"""
try:
return [s for s in driver.list_images() if s.name == image_name][0]
except IndexError:
raise ValueError("Unknown image.", image_name)
class INode(Interface):
"""
Interface for node for running acceptance tests.
"""
address = InterfaceAttribute('ip address for node')
distribution = InterfaceAttribute('distribution on node')
@implementer(INode)
@attributes([
# _node gets updated, so we can't make this immutable.
Attribute('_node'),
Attribute('_provisioner'),
'address',
'distribution',
])
class LibcloudNode(object):
"""
A node created with libcloud.
:ivar Node _node: The libcloud node object.
:ivar LibcloudProvisioner _provisioner: The provisioner that created this
node.
:ivar bytes address: The IP address of the node.
:ivar str distribution: The distribution installed on the node.
:ivar bytes name: The name of the node.
"""
def destroy(self):
"""
Destroy the node.
"""
self._node.destroy()
def reboot(self):
"""
Reboot the node.
:return Effect:
"""
def do_reboot(_):
self._node.reboot()
self._node, self.addresses = (
self._node.driver.wait_until_running([self._node])[0])
return
return run_remotely(
username="root",
address=self.address,
commands=run_from_args(["sync"])
).on(success=do_reboot)
def provision(self, package_source, variants=()):
"""
Provision flocker on this node.
:param PackageSource package_source: The source from which to install
flocker.
:param set variants: The set of variant configurations to use when
provisioning
"""
return self._provisioner.provision(
node=self,
package_source=package_source,
distribution=self.distribution,
variants=variants,
).on(success=lambda _: self.address)
@property
def name(self):
return self._node.name
@attributes([
Attribute('_driver'),
Attribute('_keyname'),
Attribute('image_names'),
Attribute('_create_node_arguments'),
Attribute('provision'),
Attribute('default_size'),
], apply_immutable=True)
class LibcloudProvisioner(object):
"""
:ivar libcloud.compute.base.NodeDriver driver: The libcloud driver to use.
:ivar bytes _keyname: The name of an existing ssh public key configured
with the cloud provider. The provision step assumes the corresponding
private key is available from an agent.
:ivar dict image_names: Dictionary mapping distributions to cloud image
names.
:ivar callable _create_node_arguments: Extra arguments to pass to
libcloud's ``create_node``.
:ivar callable provision: Function to call to provision a node.
:ivar str default_size: Name of the default size of node to create.
"""
def create_node(self, name, distribution,
size=None, disk_size=8,
keyname=None, metadata={}):
"""
Create a node.
:param str name: The name of the node.
:param str distribution: The name of the distribution to
install on the node.
:param str size: The name of the size to use.
:param int disk_size: The size of disk to allocate.
:param dict metadata: Metadata to associate with the node.
:param bytes keyname: The name of an existing ssh public key configured
with the cloud provider. The provision step assumes the
corresponding private key is available from an agent.
:return libcloud.compute.base.Node: The created node.
"""
if keyname is None:
keyname = self._keyname
if size is None:
size = self.default_size
image_name = self.image_names[distribution]
create_node_arguments = self._create_node_arguments(
disk_size=disk_size)
node = self._driver.create_node(
name=name,
image=get_image(self._driver, image_name),
size=get_size(self._driver, size),
ex_keyname=keyname,
ex_metadata=metadata,
**create_node_arguments
)
node, addresses = self._driver.wait_until_running([node])[0]
public_address = addresses[0]
return LibcloudNode(
provisioner=self,
node=node, address=public_address,
distribution=distribution)
|
import numpy as np
import glob, os, re, sys, json
import matplotlib.pyplot as plt
from PIL import Image
#get_ipython().magic('matplotlib inline')
def plotBars (f1_scores, elapsed_times):
fig = plt.figure(figsize=(6,6),dpi=720)
subplot = fig.add_subplot(1, 1, 1)
width = 0.05
colors = ['b', 'r', 'g', 'b', 'r', 'g']
xVals = [0.2, 0.3, 0.4, 0.9, 1.0, 1.1]
subplot.bar(xVals,f1_scores,width,color=colors)
subplot.xaxis.set_ticks([])
subplot.xaxis.set_ticklabels([])
subplot.set_ylim(bottom=min(f1_scores)-0.01, top=max(f1_scores)+0.01)
fig.savefig('f1-scores.png', format='png', dpi=720)
fig = plt.figure(figsize=(6,6),dpi=720)
subplot = fig.add_subplot(1, 1, 1)
subplot.bar(xVals,elapsed_times,width,color=colors)
subplot.xaxis.set_ticks([])
subplot.xaxis.set_ticklabels([])
subplot.set_ylim(bottom=min(elapsed_times)-10, top=max(elapsed_times)+10)
fig.savefig('elapsed-times.png', format='png', dpi=720)
def plotConvergence (results):
fig = plt.figure(figsize=(6,6),dpi=720)
subplot = fig.add_subplot(1, 1, 1)
epochs = list(range(0,len(results['val_acc'])))
subplot.plot(epochs,results['val_acc'],color='g', label='Validation')
subplot.plot(epochs,results['val_loss'],color='g')
subplot.plot(epochs,results['acc'],color='b')
subplot.plot(epochs,results['loss'],color='b', label='Training')
subplot.legend(loc='upper right', prop={'size': 10})
fig.savefig('accuracy.png', format='png', dpi=720)
def main():
f1_scores, elapsed_times = [], []
for clf in ['svm', 'lstm']:
for vectorsource in ['none', 'fasttext', 'custom-fasttext']:
filename = clf + '-' + vectorsource
with open (filename+'.json') as fh:
result = json.loads(fh.read())
f1_scores.append(result['classification_report']['weighted avg']['f1-score'])
elapsed_times.append(result['elapsed_time'])
if ( (clf == 'lstm') and (vectorsource == 'fasttext') ):
plotConvergence (result['history'])
plotBars (f1_scores, elapsed_times)
if __name__ == '__main__':
main()
|
"""Flanker task
At each trial, a set of arrows is presented at the center of the
screen and the participant must look at the middle arrow, then press the left arrow key if the middle arrow points the left, and the right arrow key if the middle arrow points the right.
"""
import random
from expyriment import design, control, stimuli
import expyriment
import os
#Make sure that your working directory is the place the picture are
#os.chdir('c:\\users\\utilisateur\\desktop\\lilas')
MAX_RESPONSE_DELAY = 2000
LEFT_RESPONSE = 'Left arrow'
RIGHT_RESPONSE = 'Right arrow'
REPETITIONS=8
TRIALS = REPETITIONS*4
exp = design.Experiment(name="Flanker Task", text_size=40)
expyriment.io.defaults.outputfile_time_stamp = False
control.initialize(exp)
pictures = {"lcong": expyriment.stimuli.Picture("left_congruent.png"),"rcong": expyriment.stimuli.Picture("right_congruent.png"),"lincong": expyriment.stimuli.Picture("left_incongruent.png"),"rincong": expyriment.stimuli.Picture("right_incongruent.png")}
cue = stimuli.FixCross(size=(50, 50), line_width=4)
blankscreen = stimuli.BlankScreen()
instructions = stimuli.TextScreen("Instructions",
f"""When you'll see a set of arrows, your task is to decide, as quickly as possible, whether the middle arrow points the left or the right.
if it points the left, press '{LEFT_RESPONSE}'
if it points the right, press '{RIGHT_RESPONSE}'
There will be '{TRIALS}' trials in total.
Press the space bar to start.""")
exp.add_data_variable_names(['stimulus', 'respkey', 'RT'])
control.start(skip_ready_screen=True)
instructions.present()
exp.keyboard.wait()
for i in range (0,REPETITIONS):
#randomize order of stimulus presentation
#within each repetition of the 4 pictures
random_pictures={}
items = list(pictures.items()) # List of tuples of (key,values)
random.shuffle(items)
for key, value in items:
random_pictures[key]=value
pictures = random_pictures
for picture in pictures:
pictures[picture].preload()
blankscreen.present()
exp.clock.wait(1000)
cue.present()
exp.clock.wait(500)
pictures[picture].present()
key, rt = exp.keyboard.wait( duration=MAX_RESPONSE_DELAY)
exp.data.add([picture, key, rt])
control.end()
|
#导包
from rest_framework import serializers
from .models import *
class UserSerializer(serializers.ModelSerializer):
class Meta:
model=User
fields = "__all__"
class PicsSerializer(serializers.ModelSerializer):
class Meta:
model=Pics
fields = "__all__"
class GoodsSerializer(serializers.ModelSerializer):
class Meta:
model=Goods
fields = "__all__"
class KsGoodsSerializer(serializers.ModelSerializer):
class Meta:
model=KaoshiGoods
fields = "__all__"
# 评论类的数列器
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model=Comment
fields = "__all__"
|
#!/usr/bin/env python3
import os
import sys
import getopt
import subprocess
import shlex
def usage():
usage = """
servers.py -s SERVER -a start|stop|restart
Usage:
-h --help Prints this help
-a --action Action to perform (stop|start|restart)
-s --server all|jenkins|glassfish|apache
By default all
"""
print(usage)
def doServer(server, action):
servers = [
['jenkins', '/etc/init.d/jenkins ACTION'],
['glassfish', '/opt/glassfish/bin/asadmin ACTION-domain domain1'],
['tomcat', 'sh ' + os.environ['CATALINA_HOME'] + '/bin/catalina.sh ACTION'],
['apache', 'service apache2 ACTION'],
['postgresql', 'sudo /etc/init.d/postgresql ACTION'],
]
if(action == "restart"):
doServer(server, 'stop')
action = 'start'
if(action == "start"):
servers.reverse
prc = ''
for s in servers:
if(server == s[0] or server == "all"):
prc = s[1].replace("ACTION", action)
print("Executing %s as root" % (prc))
subprocess.call(shlex.split('sudo ' + prc))
if(prc == ''):
print('Server name not recognized')
def main(argv):
server = 'all'
action = ''
try:
opts, args = getopt.getopt(argv,"ha:s:",["help","server=","action="])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
usage()
sys.exit()
elif opt in ("-s", "--server"):
server = arg
elif opt in ("-a", "--action"):
action = arg
if(action == ""):
print("Error: Action not especified")
usage()
sys.exit(2)
doServer(server, action)
#####################################################################################
if __name__ == "__main__":
main(sys.argv[1:])
|
#!/usr/bin/env python
# Blast putative target query genes against nucleotide biosynthetic clusters databases
import os
import subprocess
import glob
import sys
import math
# BGC0000185 tartrolon Polyketide None
id_to_name = {}
target_to_cluster = {}
f = open("mibig_clusters.txt", 'r')
for line in f.readlines():
line = line.strip()
params = line.split("\t")
mibigid = params[0]
name = params[1]
id_to_name[mibigid] = (params[1:])
#blast_files = glob.glob("blast_results/*.out")
blast_files = glob.glob("blast_results/targets.13.*.out")
for blast_file in blast_files:
q = blast_file.split(".")[0]
target = q.split('blast_results/')[1]
f = open(blast_file, 'r')
for line in f.readlines():
line = line.strip()
qseqid, sseqid, pident, length, mismatch, gapopen, qstart, qend, sstart, send, evalue, bitscore = line.split("\t")
pident = float(pident)
mibigid = sseqid.split("|")[0]
if pident > 30:
if mibigid == "BGC0001482":
print "%s\t%s\tNoparams\t%s\t%.2f\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s" % (mibigid, target, sseqid, pident, length, mismatch, gapopen, qstart, qend, sstart, send, evalue, bitscore)
else:
target_to_cluster[(mibigid, target)] = (sseqid, pident, length, mismatch, gapopen, qstart, qend, sstart, send, evalue, bitscore)
sorted(target_to_cluster.keys())
for key in target_to_cluster.keys():
mibigid, target = key
sseqid, pident, length, mismatch, gapopen, qstart, qend, sstart, send, evalue, bitscore = target_to_cluster[key]
if len(id_to_name[mibigid]) == 4:
a, b, c, d = id_to_name[mibigid]
# if "Antibacterial" in d or "Cytotoxic" in d or c != None or c != "Unknown":
print "%s\t%s\t%s\t%s\t%s\t%s\t%s\t%.2f\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s" % (mibigid, target, a, b, c, d, sseqid, pident, length, mismatch, gapopen, qstart, qend, sstart, send, evalue, bitscore)
elif len(id_to_name[mibigid]) == 3:
a, b, c = id_to_name[mibigid]
print "%s\t%s\t%s\t%s\t%s\tNoparams\t%s\t%.2f\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s" % (mibigid, target, a, b, c, sseqid, pident, length, mismatch, gapopen, qstart, qend, sstart, send, evalue, bitscore)
else:
a, b = id_to_name[mibigid]
print "%s\t%s\t%s\t%s\tNoparams\tNoparams\t%s\t%.2f\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s" % (mibigid, target, a, b, sseqid, pident, length, mismatch, gapopen, qstart, qend, sstart, send, evalue, bitscore)
#print len(target_to_cluster)
|
# -*- coding: utf-8 -*-
'''
@Author: Lingyu
@Date: 2021-10-19
@Description:
'''
from .db import db, dbse
from .users import *
def init_app(app):
db.init_app(app) |
# voom_mode_python.py
# Last Modified: 2014-04-13
# VOoM -- Vim two-pane outliner, plugin for Python-enabled Vim 7.x
# Website: http://www.vim.org/scripts/script.php?script_id=2657
# Author: Vlad Irnov (vlad DOT irnov AT gmail DOT com)
# License: CC0, see http://creativecommons.org/publicdomain/zero/1.0/
"""
VOoM markup mode for Python code.
See |voom-mode-python|, ../../doc/voom.txt#*voom-mode-python*
"""
import token, tokenize
import traceback
import vim
def hook_makeOutline(VO, blines):
"""Return (tlines, bnodes, levels) for Body lines blines.
blines is either Vim buffer object (Body) or list of buffer lines.
"""
Z = len(blines)
tlines, bnodes, levels = [], [], []
tlines_add, bnodes_add, levels_add = tlines.append, bnodes.append, levels.append
#ignore_lnums, func_lnums = get_lnums_from_tokenize(blines)
try:
ignore_lnums, func_lnums = get_lnums_from_tokenize(blines)
except (IndentationError, tokenize.TokenError):
vim.command("call voom#ErrorMsg('VOoM: EXCEPTION WHILE PARSING PYTHON OUTLINE')")
# DO NOT print to sys.stderr -- triggers Vim error when default stderr (no PyLog)
#traceback.print_exc() --this goes to sys.stderr
#print traceback.format_exc() --ok but no highlighting
lines = traceback.format_exc().replace("'","''").split('\n')
for ln in lines:
vim.command("call voom#ErrorMsg('%s')" %ln)
return (['= |!!!ERROR: OUTLINE IS INVALID'], [1], [1])
isHead = False # True if current line is a headline
indents = [0,] # indents of previous levels
funcLevels = [] # levels of previous def or class
indentError = '' # inconsistent indent
isDecor = 0 # keeps track of decorators, set to lnum of the first decorator
X = ' ' # char in Tree's column 2 (marks)
for i in xrange(Z):
bnode = i + 1
if bnode in ignore_lnums: continue
bline = blines[i]
bline_s = bline.strip()
if not bline_s: continue
if bline_s.startswith('#'):
# ignore comment lines consisting only of #, -, =, spaces, tabs (separators, pretty headers)
if not bline_s.lstrip('# \t-='): continue
isComment = True
else:
isComment = False
bline_ls = bline.lstrip()
# compute indent and level
indent = len(bline) - len(bline_ls)
if indent > indents[-1]:
indents.append(indent)
elif indent < indents[-1]:
while indents and (indents[-1] > indent):
indents.pop()
if indents[-1]==indent:
indentError = ''
else:
indentError = '!!! '
lev = len(indents)
# First line after the end of a class or def block.
if funcLevels and lev <= funcLevels[-1]:
isHead = True
while funcLevels and funcLevels[-1] >= lev:
funcLevels.pop()
# First line of a class or def block.
if bnode in func_lnums:
isHead = True
if isDecor:
bnode = isDecor
isDecor = 0
X = 'd'
if not funcLevels or (lev > funcLevels[-1]):
funcLevels.append(lev)
# Line after a decorator. Not a def or class.
elif isDecor:
# ingore valid lines between the first decorator and function/class
if bline_s.startswith('@') or isComment or not bline_s:
isHead = False
continue
# Invalid line after a decorator (should be syntax error): anything
# other than another decorator, comment, blank line, def/class.
# If it looks like a headline, let it be a headline.
else:
isDecor = 0
# Decorator line (the first one if a group of several).
elif bline_s.startswith('@'):
isDecor = bnode
isHead = False
continue
# Special comment line (unconditional headline). Not a separator or pretty header line.
elif isComment:
if bline_s.startswith('###') or bline_s.startswith('#--') or bline_s.startswith('#=='):
isHead = True
if isHead:
##########################################
# Take care of pretty headers like this. #
##########################################
if isComment:
# add preceding lines to the current node if they consist only of #, =, -, whitespace
while bnode > 1:
bline_p = blines[bnode-2].lstrip()
if not bline_p.startswith('#') or bline_p.lstrip('# \t-='):
break
else:
bnode -= 1
# the end
isHead = False
tline = ' %s%s|%s%s' %(X, '. '*(lev-1), indentError, bline_s)
X = ' '
tlines_add(tline)
bnodes_add(bnode)
levels_add(lev)
return (tlines, bnodes, levels)
class BLines:
"""Wrapper around Vim buffer object or list of Body lines to provide
readline() method for use with tokenize.generate_tokens().
"""
def __init__(self, blines):
self.blines = blines
self.size = len(blines)
self.idx = -1
def readline(self):
self.idx += 1
if self.idx == self.size:
return ''
return "%s\n" %self.blines[self.idx]
### toktypes of tokens
STRING = token.STRING
NAME = token.NAME
NEWLINE = token.NEWLINE
def get_lnums_from_tokenize(blines):
"""Return dicts. Keys are Body lnums.
The main purpose is to get list of lnums to ignore: multi-line strings and
expressions.
"""
# lnums to ignore: multi-line strings and expressions other than the first line
ignore_lnums = {}
# lnums of 'class' and 'def' tokens
func_lnums = {}
inName = False
for tok in tokenize.generate_tokens(BLines(blines).readline):
toktype, toktext, (srow, scol), (erow, ecol), line = tok
#print token.tok_name[toktype], tok
if toktype == NAME:
if not inName:
inName = True
srow_name = srow
if toktext in ('def','class'):
func_lnums[srow] = toktext
elif toktype == NEWLINE and inName:
inName = False
if srow_name != erow:
for i in xrange(srow_name+1, erow+1):
ignore_lnums[i] = 0
elif toktype == STRING:
if srow != erow:
for i in xrange(srow+1, erow+1):
ignore_lnums[i] = 0
return (ignore_lnums, func_lnums)
def get_body_indent(body):
"""Return string used for indenting Body lines."""
et = int(vim.eval("getbufvar(%s,'&et')" %body))
if et:
ts = int(vim.eval("getbufvar(%s,'&ts')" %body))
return ' '*ts
else:
return '\t'
def hook_newHeadline(VO, level, blnum, tlnum):
"""Return (tree_head, bodyLines).
tree_head is new headline string in Tree buffer (text after |).
bodyLines is list of lines to insert in Body buffer.
"""
tree_head = '### NewHeadline'
indent = get_body_indent(VO.body)
body_head = '%s%s' %(indent*(level-1), tree_head)
return (tree_head, [body_head])
#def hook_changeLevBodyHead(VO, h, levDelta):
#"""Increase of decrease level number of Body headline by levDelta."""
#if levDelta==0: return h
def hook_doBodyAfterOop(VO, oop, levDelta, blnum1, tlnum1, blnum2, tlnum2, blnumCut, tlnumCut):
# this is instead of hook_changeLevBodyHead()
#print oop, levDelta, blnum1, tlnum1, blnum2, tlnum2, tlnumCut, blnumCut
Body = VO.Body
Z = len(Body)
ind = get_body_indent(VO.body)
# levDelta is wrong when pasting because hook_makeOutline() looks at relative indent
# determine level of pasted region from indent of its first line
if oop=='paste':
bline1 = Body[blnum1-1]
lev = (len(bline1) - len(bline1.lstrip())) / len(ind) + 1
levDelta = VO.levels[tlnum1-1] - lev
if not levDelta: return
indent = abs(levDelta) * ind
#--- copied from voom_mode_thevimoutliner.py -----------------------------
if blnum1:
assert blnum1 == VO.bnodes[tlnum1-1]
if tlnum2 < len(VO.bnodes):
assert blnum2 == VO.bnodes[tlnum2]-1
else:
assert blnum2 == Z
# dedent (if possible) or indent every non-blank line in Body region blnum1,blnum2
blines = []
for i in xrange(blnum1-1,blnum2):
line = Body[i]
if not line.strip():
blines.append(line)
continue
if levDelta > 0:
line = '%s%s' %(indent,line)
elif levDelta < 0 and line.startswith(indent):
line = line[len(indent):]
blines.append(line)
# replace Body region
Body[blnum1-1:blnum2] = blines
assert len(Body)==Z
|
from django.db import models
# Create your models here.
class ProductCategory(models.Model):
name = models.CharField(
verbose_name='наименование',
unique=True,
max_length=128,
)
short_desc = models.CharField(
max_length=256,
blank=True,
verbose_name='краткое описание',
)
description = models.TextField(
verbose_name='описание',
blank=True,
)
is_active = models.BooleanField(
verbose_name='активна',
default=True
)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
def invited_users(self, user): # --> User queryset
return User.objects.filter(deep_link=str(self.user_id), created_at__gt=self.created_at)
class Meta:
verbose_name = 'категория'
verbose_name_plural = 'категории'
class Product(models.Model):
category = models.ForeignKey(
ProductCategory,
on_delete=models.CASCADE,
verbose_name='категория',
)
name = models.CharField(
verbose_name='наименование фин.продукта',
max_length=128,
)
short_desc = models.CharField(
max_length=256,
blank=True,
verbose_name='краткое описание',
)
description = models.TextField(
verbose_name='описание',
blank=True,
)
is_active = models.BooleanField(
verbose_name='активный',
default=True
)
def __str__(self):
return f'{self.name} ({self.category.name})'
def get_user_products(self, user): # --> queryset
return Product.objects.filter(is_active=True, user=user)
class Meta:
verbose_name = 'фин.продукт'
verbose_name_plural = 'фин.продукты'
|
#!/usr/bin/env python
import os, sys, time
import esgf
submission_config = {
'metadata': [
{
'name': 'name',
'value': 'Test publication'
},
{
'name': 'organization',
'value': 'University of Chicago',
},
{
'name': 'firstname',
'value': 'Lukasz',
},
{
'name': 'lastname',
'value': 'Lacinski',
},
{
'name': 'description',
'value': 'Test submission - ESGF/ACME REST API',
},
{
'name': 'datanode',
'value': 'dev.esgf.anl.gov'
}
],
'facets': [
{
'name': 'project',
'value': 'ACME'
},
{
'name': 'data_type',
'value': 'h0'
},
{
'name': 'experiment',
'value': 'b1850c5_m1a'
},
{
'name': 'versionnum',
'value': 'v0_1'
},
{
'name': 'realm',
'value': 'atm'
},
{
'name': 'regridding',
'value': 'ne30_g16'
},
{
'name': 'range',
'value': 'all'
}
],
'scan': {
'options': '',
'path': '/esg/gridftp_root/ingestion/test/acme/1'
},
'publish': {
'options': {
'files': 'all'
},
'files': []
}
}
if __name__ == '__main__':
# Establish a session with the publication service
pwd = os.path.dirname(__file__)
client = esgf.IngestionClient(config_file=os.path.join(pwd, 'user_client_config.yml'))
if client is None:
sys.exit('Error: Could not create a session with the publication service')
print('Created a session with the publication service')
# Create a new submission
response, content = client.submit(submission_config)
if response['status'] != '200':
sys.exit('Error: HTTP Status %s: Could not start a new submission' % response['status'])
if content['status'] != 'Success':
sys.exit('Error: %s\n' % content['message'])
submission_id = content['submission_id']
print('New submission has been created successfully. Submission ID: %s' % submission_id)
sys.exit(0)
# Scan a directory with dataset files
response, content = client.scan(submission_id, submission_config['scan'])
if response['status'] != '200':
sys.exit('Error: HTTP Status %s: Could not scan dataset files\n' % response['status'])
if content['status'] == 'Error':
sys.exit('Error: %s\n' % content['message'])
while True:
time.sleep(10)
response, content = client.get_status(submission_id)
if response['status'] != '200':
sys.exit('Error: HTTP Status %s: Could not scan dataset files\n' % response['status'])
if content['status'] == 'Error':
sys.exit('Error: %s\n' % content['message'])
elif content['status'] == 'Success':
print('Files scanned: %s' % content['files'])
break
# Create a THREDDS catalog and publish to Solr
client.publish(submission_id, submission_config['publish'])
if response['status'] != '200':
sys.exit('Error: HTTP Status %s: Could not publish dataset files\n' % response['status'])
if content['status'] == 'Error':
sys.exit('Error: %s\n' % content['message'])
while True:
time.sleep(10)
response, content = client.get_status(submission_id)
if response['status'] != '200':
sys.exit('Error: HTTP Status %s: Could not publish dataset files\n' % response['status'])
if content['status'] == 'Error':
sys.exit('Error: %s\n' % content['message'])
elif content['status'] == 'Success':
print('Dataset files have been published')
break
|
rate = {224: 1152, 256: 1088, 384: 832, 512: 576}
rot_vals = [
[153, 231, 3, 10, 171],
[55, 276, 36, 300, 6],
[28, 91, 0, 1, 190],
[120, 78, 210, 66, 253],
[21, 136, 105, 45, 15]
]
RC = [
0x0000000000000001, 0x0000000000008082, 0x800000000000808A, 0x8000000080008000,
0x000000000000808B, 0x0000000080000001, 0x8000000080008081, 0x8000000000008009,
0x000000000000008A, 0x0000000000000088, 0x0000000080008009, 0x000000008000000A,
0x000000008000808B, 0x800000000000008B, 0x8000000000008089, 0x8000000000008003,
0x8000000000008002, 0x8000000000000080, 0x000000000000800A, 0x800000008000000A,
0x8000000080008081, 0x8000000000008080, 0x0000000080000001, 0x8000000080008008
]
def ascii_to_binary(chars):
return ''.join('{:08b}'.format(ord(char)) for char in chars)
def binary_to_hex(binary):
return ''.join('{:0x}'.format(int(binary[i:i+4], 2)) for i in range(0, len(binary), 4))
class SHA3():
def __init__(self, output=224):
self.output = output
assert self.output in rate, 'Invalid bit length'
self.rate = rate[self.output]
self.state_value = 1600
self.capacity = self.state_value - self.rate
self.bus = self.rate + self.capacity
def __str__(self):
return self.output
def __call__(self):
return self
def init_state(self):
state_ = []
for x in range(5):
tmp_1 = []
for y in range(5):
tmp_2 = []
for z in range(64):
tmp_2.append('')
tmp_1.append(tmp_2)
state_.append(tmp_1)
return state_
def pad_with(self, x, m):
assert x > 0 and m >= 0
j = (-m - 2) % x
pad = '1' + ('0' * j) + '1'
return pad
def preprocess(self, message):
message = ascii_to_binary(message)
padded_message = message + self.pad_with(self.rate, len(message))
message_blocks = []
for i in range(0, len(padded_message), self.rate):
message_blocks.append(padded_message[i : i + self.rate])
return message_blocks
def change_conventions(self, state):
state_ = self.init_state()
for x in range(2, 7):
for y in range(2, 7):
state_[x-2][y-2] = list(self.lane(state, x%5, y%5))
return state_
def theta(self, state):
def C(x, z):
return self.xor(*[state[x][i][z] for i in range(5)])
def D(x, z):
val_1 = C((x-1) % 5, z)
val_2 = C((x+1) % 5, (z-1) % 64)
return self.xor_2(val_1, val_2)
state_ = self.init_state()
for x in range(5):
for y in range(5):
for z in range(64):
state_[x][y][z] = self.xor_2(state[x][y][z], D(x, z))
return state_
def rot(self, word, shift):
shift = shift % len(word)
return word[-shift:]+word[:-shift]
def rho(self, state):
state_ = self.init_state()
for x in range(5):
for y in range(5):
state_[y][((2*x) + (3*y)) %5] = list(self.rot(self.lane(state, x, y), rot_vals[(y+2)%5][(x+2)%5]%64))
return state_
def pi(self, state):
state_ = self.init_state()
for x in range(5):
for y in range(5):
for z in range(64):
state_[x][y][z] = state[(x+(3*y))%5][x][z]
return state_
def chi(self, state):
state_ = self.init_state()
for x in range(5):
for y in range(5):
state_[x][y] = list(self.xor_2(self.lane(state, x, y), bin((int(self.lane(state, (x+1)%5, y), 2) ^ 1) & int(self.lane(state, (x+2)%5, y), 2))[2:].zfill(64)))
return state_
def iota(self, state):
state_ = self.init_state()
for x in range(5):
for y in range(5):
state_[x][y] = list(self.xor_2(self.lane(state, x, y), bin(RC[self.round_count])[2:].zfill(64)))
return state_
def lane(self, state, x, y):
lane_ = ''.join(state[x][y])
assert len(state[x][y]) == 64
assert len(lane_) == 64, (x, y)
return lane_
def plane(self, state, y):
return ''.join(self.lane(state, i, y) for i in range(5))
def form_state(self, data):
assert len(data) == 1600
state = self.init_state()
for x in range(5):
for y in range(5):
for z in range(64):
state[x][y][z] = data[64 * ((5 * y) + x) + z]
assert data == ''.join(self.plane(state, i) for i in range(5))
return self.change_conventions(state)
def xor_2(self, a, b):
return ''.join('0' if i == j else '1' for i, j in zip(a, b))
def xor(self, *words):
first, *words = words
result = first
for word in words:
result = self.xor_2(result, word)
return result
def _round(self, b):
b = self.theta(b)
b = self.rho(b)
b = self.pi(b)
b = self.chi(b)
b = self.iota(b)
return b
def f(self, r, c):
b = r + c
assert len(b) == 1600
state = self.form_state(b)
self.rounds = 24
for i in range(self.rounds):
self.round_count = i
state = self._round(state)
unpacked_state = ''.join(self.plane(state, i) for i in range(5))
return unpacked_state[0:self.rate], unpacked_state[self.rate:]
def hash(self, message):
message = self.preprocess(message)
r = '0' * self.rate
c = '0' * self.capacity
for block in message:
f_inp = self.xor_2(block, r)
r, c = self.f(f_inp, c)
value = r[0:self.output]
return binary_to_hex(value)
|
from gi.repository import Gtk, Gdk
import threading
import logging
import collections
import os
import datetime
class Ui(object):
(SYM_LIST_NAME,
SYM_LIST_TYPE,
SYM_LIST_SIZE,
THUMB_LIST_NUM_COLS) = range(4)
UI_FILE = 'ui.glade'
def __init__(self, cfg):
self.cfg = cfg
self.log = logging.getLogger('root')
# Data
self.on_exit_handler = None
self._createUi()
pass
def _createUi(self):
self.builder = Gtk.Builder()
self.builder.add_from_file(self.UI_FILE)
self.main_window = self.builder.get_object('main_window')
# Folder navigation tree
self.sym_store = Gtk.TreeStore(str, str, int)
sym_tree = self.builder.get_object('symbol_treeview')
sym_tree.set_model(self.sym_store)
sym_tree.set_search_column(self.SYM_LIST_NAME)
sym_tree_renderer = Gtk.CellRendererText()
sym_tree_name_col = Gtk.TreeViewColumn(
"Name", sym_tree_renderer, text=self.SYM_LIST_NAME)
sym_tree_type_col = Gtk.TreeViewColumn(
"Type", sym_tree_renderer, text=self.SYM_LIST_TYPE)
sym_tree_size_col = Gtk.TreeViewColumn(
"Size", sym_tree_renderer, text=self.SYM_LIST_SIZE)
sym_tree_name_col.set_resizable(True)
sym_tree_type_col.set_resizable(True)
sym_tree_size_col.set_resizable(True)
sym_tree.append_column(sym_tree_name_col)
sym_tree.append_column(sym_tree_type_col)
sym_tree.append_column(sym_tree_size_col)
handlers = {
'on_main_window_destroy' : self._exit_handler,
}
self.builder.connect_signals(handlers)
pass
def _exit_handler(self, widget):
self.log.info("User exit requested!")
if self.on_exit_handler is not None:
self.on_exit_handler()
pass
else:
self.log.warning("No exit handler installed!")
pass
pass
def add_exit_handler(self, on_exit_handler):
self.on_exit_handler = on_exit_handler
pass
def add(self, name, type_name, size, members):
tree_iter = self.sym_store.append(None, [name, type_name, size])
for member in members:
self.sym_store.append(tree_iter, [member, "member", 0])
pass
pass
def show(self):
self.main_window.show_all()
pass
|
#coding:utf-8
from simple import (ExtDateField,
ExtStringField,
ExtNumberField,
ExtComboBox,
ExtTextArea,
ExtCheckBox,
ExtTimeField,
ExtHiddenField,
ExtDisplayField,
ExtRadio)
from complex import (ExtDictSelectField,
ExtDictSelectScrollField,
ExtMultiSelectField,
ExtSearchField,
ExtFileUploadField,
ExtImageUploadField)
from base import BaseExtField, BaseExtTriggerField |
"""
Multimedia Web Databases - Fall 2019: Project Group 17
Authors:
1. Sumukh Ashwin Kamath
2. Rakesh Ramesh
3. Baani Khurana
4. Karishma Joseph
5. Shantanu Gupta
6. Kanishk Bashyam
This is the CLI for task 8 of Phase 2 of the project
"""
from classes.dimensionreduction import DimensionReduction
from classes.globalconstants import GlobalConstants
from utils.excelcsv import CSVReader
from utils.inputhelper import get_input_k, get_input_folder
from utils.termweight import print_tw
global_constants = GlobalConstants()
def main():
"""Main function for the Task 8"""
k_value = get_input_k()
while k_value > 8:
print("Please enter a value of k within 8")
k_value = get_input_k()
folder = get_input_folder()
print(global_constants.LINE_SEPARATOR)
print("User Inputs summary")
print(global_constants.LINE_SEPARATOR)
print("k-value: {}\nFolder: {}".format(k_value, folder))
print(global_constants.LINE_SEPARATOR)
dim_red = DimensionReduction(None, "NMF", k_value, image_metadata=True, folder_metadata=folder)
w, h, model = dim_red.execute()
# printing the term weight
print_tw(w, h, image_metadata=True)
# save to csv
filename = "task8" + "_" + str(k_value)
CSVReader().save_to_csv(w, h, filename, image_metadata=True)
print("Please check the CSV file: output/{}.csv".format(filename))
if __name__ == "__main__":
main()
|
# Ch2 Exercise 2.15
# Macky Ruiz
# CIS 007
#
# This program prompts the user to enter the side of a hexagon and displays its area.
#
# /////////////////////////////////////////////////////
# ex: Enter the side: 5.5
# Output: The area of the hexagon is 78.59180539343781
# /////////////////////////////////////////////////////
#
#
import math
# Prompt user to enter a number
number = eval(input("Enter the side: "))
# Calculate and print
print ("The area of the hexagon is",((3 * math.sqrt(3))) / 2 * number ** 2 )
|
data = input('Enter the sentence: ').split()
result = []
i = 0
while i < len(data):
if not data[i] in result:
result.append(data[i])
i += 1
print(' '.join(result))
|
import re
from ztag.annotation import Annotation
from ztag.annotation import OperatingSystem
from ztag.annotation import Type
from ztag.annotation import Manufacturer
from ztag import protocols
import ztag.test
class FtpSpeedPort(Annotation):
protocol = protocols.FTP
subprotocol = protocols.FTP.BANNER
port = None
manufact_re = re.compile("^220 Speedport( )?W", re.IGNORECASE)
product_re = re.compile("^220 Speedport (.+) FTP Server", re.IGNORECASE)
impl_re = re.compile("FTP Server v(\d+(?:\.\d+)*) ready", re.IGNORECASE)
tests = {
"FtpSpeedPort_1": {
"global_metadata": {
"device_type": Type.SOHO_ROUTER,
"manufacturer": Manufacturer.SPEEDPORT,
"product": "W 723V Typ B",
},
"local_metadata": {
"version": "1.37.000"
}
}
}
def process(self, obj, meta):
banner = obj["banner"]
if self.manufact_re.search(banner):
meta.global_metadata.device_type = Type.SOHO_ROUTER
meta.global_metadata.manufacturer = Manufacturer.SPEEDPORT
product = self.product_re.search(banner).group(1)
meta.global_metadata.product = product
version = self.impl_re.search(banner).group(1)
meta.local_metadata.version = version
return meta
""" Tests
"220 Speedport W 723V Typ B FTP Server v1.37.000 ready\r\n"
"220 Speedport W 723V Typ B FTP Server v1.37.000 ready\r\n"
"220 Speedport W 504V Typ A FTP Server v1.17.000 ready\r\n"
"220 Speedport W 723V Typ B FTP Server v1.37.000 ready\r\n"
"220 Speedport W 723V Typ B FTP Server v1.37.000 ready\r\n"
"220 Speedport W 921V FTP Server v1.37.000 ready\r\n"
"220 Speedport W 723V Typ B FTP Server v1.37.000 ready\r\n"
"220 Speedport W 723V Typ B FTP Server v1.37.000 ready\r\n"
"220 Speedport W 921V FTP Server v1.37.000 ready\r\n"
"220 Speedport W 723V Typ B FTP Server v1.37.000 ready\r\n"
"220 Speedport W 723V Typ B FTP Server v1.37.000 ready\r\n"
"220 Speedport W 504V Typ A FTP Server v1.17.000 ready\r\n"
"220 Speedport W 504V Typ A FTP Server v1.17.000 ready\r\n"
"220 Speedport W 921V FTP Server v1.37.000 ready\r\n"
"220 Speedport W 723V Typ B FTP Server v1.37.000 ready\r\n"
"220 Speedport W 723V Typ B FTP Server v1.37.000 ready\r\n"
"""
|
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
from django.shortcuts import render
from django.utils.decorators import method_decorator
from django.views import View
from django.views.generic import ListView, DetailView
from blogs.forms import BlogForm
from blogs.models import Blog
from posts.models import Post, Category
class BlogsView(ListView):
template_name = 'blogs.html'
queryset = Blog.objects.select_related('user').all()
context_object_name = 'list_all_blogs'
class BlogDetailView(DetailView):
model = Blog
template_name = 'detail_blog.html'
def get_context_data(self, **kwargs):
context = super(BlogDetailView, self).get_context_data(**kwargs)
filter_category = self.request.GET.get('filter_category', None)
if filter_category is None or filter_category == 'Elige una opcion...':
list_posts = Post.objects.filter(blog_id=context.get('object').id).order_by('pub_date')
else:
list_posts = Post.objects.filter(blog_id=context.get('object').id, categorys = filter_category).order_by('pub_date')
page = self.request.GET.get('page')
paginator = Paginator(list_posts, 5)
try:
posts = paginator.page(page)
except PageNotAnInteger:
posts = paginator.page(1)
except EmptyPage:
posts = paginator.page(paginator.num_pages)
context['list'] = posts
context['categorys'] = Category.objects.all()
return context
class NewBlogView(View):
@method_decorator(login_required)
def get(self, request):
blog_form = BlogForm(request.user)
return render(request, 'new_blog.html', {'form': blog_form})
@method_decorator(login_required)
def post(self, request):
new_blog = Blog()
blog_form = BlogForm(request.user, request.POST, request.FILES, instance=new_blog)
if blog_form.is_valid():
new_blog = blog_form.save()
messages.success(request, 'El blog {0} se ha creado corretamente!'.format(new_blog.title))
blog_form = BlogForm(request.user)
return render(request, 'new_blog.html', {'form': blog_form})
|
# coding: utf-8
import pandas as pd
import numpy as np
import networkx as nx
import matplotlib.pyplot as plt
import math
import random
from sklearn.utils import shuffle
from sklearn.svm import SVC
from sklearn import tree
from sklearn.ensemble import RandomForestClassifier
from sklearn.neighbors import KNeighborsClassifier
import csv
import nltk
nltk.download('punkt') # for tokenization
nltk.download('stopwords')
stpwds = set(nltk.corpus.stopwords.words("english"))
stemmer = nltk.stem.PorterStemmer()
def readCsv(path):
df = pd.read_csv(path)
return df
period1_df = readCsv('data/Period1.csv')
period2_df = readCsv('data/Period2.csv')
testdata_df = pd.read_csv('data/TestData.csv')
period1_df_len = period1_df.shape[0]
period2_df_len = period2_df.shape[0]
testdata_df_len = testdata_df.shape[0]
# node_info
with open("data/node_information.csv", "r") as f:
reader = csv.reader(f)
node_info = list(reader)
IDs = []
ID_pos = {}
for element in node_info:
ID_pos[element[0]] = len(IDs)
IDs.append(element[0])
def removeDuplicateEdge(df):
df_len = df.shape[0]
for i in range(df_len):
source = int(df['source id'][i])
target = int(df['target id'][i])
if (source < target):
temp = source
source = target
target = temp
df.set_value(i, 'source id', source)
df.set_value(i, 'target id', target)
df.drop('year', axis=1, inplace=True)
df = df.loc[df.duplicated() == False]
df = df.reset_index(drop=True)
# remove dup edge
removeDuplicateEdge(period1_df)
removeDuplicateEdge(period2_df)
removeDuplicateEdge(testdata_df)
# retuen df column['node']
def getUniqueNode(list1, list2):
temp = pd.concat([list1, list2], ignore_index=True)
temp = temp.loc[temp.duplicated() == False]
temp = temp.reset_index(drop=True)
return temp
peroid1_node_uni = getUniqueNode(period1_df['source id'], period1_df['target id'])
peroid2_node_uni = getUniqueNode(period2_df['source id'], period2_df['target id'])
testdata_node_uni = getUniqueNode(testdata_df['source id'], testdata_df['target id'])
period_all_node_uni = getUniqueNode(peroid1_node_uni, peroid2_node_uni)
period_test_node_uni = getUniqueNode(period_all_node_uni, testdata_node_uni)
# retuen array((x,y))
def getUniqueEdge(data, input_type='dataframe'):
# temp = [(df.loc[i, 'source id'],df.loc[i, 'target id']) for i in range(df.shape[0])]
if input_type == 'dataframe':
temp_set = set((data.loc[i, 'source id'], data.loc[i, 'target id']) for i in range(data.shape[0]))
else:
temp_set = set(data)
return list(temp_set)
peroid1_edge = getUniqueEdge(period1_df)
peroid2_edge = getUniqueEdge(period2_df)
period_test_edge = [(testdata_df.loc[i, 'source id'], testdata_df.loc[i, 'target id']) for i in
range(testdata_df.shape[0])]
period_all_edge = list(set(peroid1_edge + peroid2_edge))
# data period info
print('period1 ', 'node:', len(peroid1_node_uni.values), 'edge:', len(peroid1_edge))
print('period2 ', 'node:', len(peroid2_node_uni.values), 'edge:', len(peroid2_edge))
print('period all ', 'node:', len(period_all_node_uni.values), 'edge:', len(period_all_edge))
print('period test ', 'node:', len(period_test_node_uni.values), 'edge:', len(period_all_edge))
period1_all_possible_edgs = []
# edge 大的在前面
def getAllPossibleEdge(nodes):
temp_edges = []
temp_nodes = []
# range
r = 97
# !!!這裡每個300抽樣一次
for i in range(0, len(nodes), r):
for j in range(i + 1, len(nodes), r):
if (nodes[i] == nodes[j]):
continue
else:
# nodes
if (nodes[i] not in temp_nodes):
temp_nodes.append(nodes[i])
if (nodes[j] not in temp_nodes):
temp_nodes.append(nodes[j])
# edges
if (nodes[i] > nodes[j]):
temp_edges.append((nodes[i], nodes[j]))
else:
temp_edges.append((nodes[j], nodes[i]))
if i % (r * 10) == 0:
print('round', i, ',has', len(temp_edges), 'edges', ',has', len(temp_nodes), 'nodes')
return temp_nodes, temp_edges
# 取得period裡面nodes所有可能的edges
# period1_all_possible_nodes, period1_all_possible_edges = getAllPossibleEdge(peroid1_node_uni.values)
# period1_all_possible_edgs_sets = set(getUniqueEdge(period1_all_possible_edges, 'list'))
# print('get all possible edges in period1 done!!')
# 取得period1全部與已知的差集合
period1_df_set = set(period1_df)
# period1_all_possible_edgs_different_period2_edgs_set = period1_all_possible_edgs_sets.difference(period1_df_set)
# period1_all_possible_edgs_different_period2_edgs = list(period1_all_possible_edgs_different_period2_edgs_set)
# save to csv
def save2Csv(path, data):
temp_df = pd.DataFrame(data=data)
temp_df.to_csv(path, encoding='utf-8', index=False)
print('get csv file!!')
return temp_df
# temp_d = {'edges': period1_all_possible_edgs_different_period2_edgs}
# save2Csv('data/t1_only_point.csv', temp_d)
'''
創建圖
'''
def createGraph(nodes, edges):
network = nx.Graph()
network.add_nodes_from(nodes)
network.add_edges_from(edges)
print('Graph has nodes:', network.number_of_nodes(), ', edges:', network.number_of_edges())
return network
G1 = createGraph(peroid1_node_uni.values, peroid1_edge) # peroid1_edge
G2 = G1
G2.add_nodes_from(peroid2_node_uni.values)
G_all = createGraph(period_all_node_uni.values, period_all_edge)
G_test = G_all
G_test.add_nodes_from(testdata_node_uni.values)
'''
畫出圖
'''
# network graph
def printGraph(G, nodes):
sub_graph = G.subgraph(nodes) # 原圖太大取前幾個邊出來畫
pos = nx.spring_layout(sub_graph) # 圖的畫法
nx.draw(sub_graph, pos=pos, node_size=40, vim=0.0, vmax=1.0, node_color="red")
plt.show()
need_print = False
if need_print == True: printGraph(G1, list(peroid1_node_uni[0:100]))
'''
set training feature and label
'''
def generateTrainData(edges01, edge02):
train_label = []
train_edges = []
print('Different edges has', len(edges01))
# 注意順序
for edge in edge02:
train_label.append(1)
train_edges.append(edge)
for i, edge in enumerate(edges01):
if edge not in edge02:
train_label.append(0)
train_edges.append(edge)
if i % 10000 == 0:
print('round', i)
temp_d = {'edges': train_edges, 'label': train_label}
temp_df = save2Csv('data/t1_only_point_with_label.csv', temp_d)
return temp_df
def generateTrainData2(G, nodes01, nodes02, edges02, path):
period1_not_in_2 = nodes01.append(nodes02).drop_duplicates(keep=False)
period1_node_shuffle = random.Random(23).sample(list(period1_not_in_2), 650)
sub_graph = G.subgraph(period1_node_shuffle)
sub_graph_complement = nx.complement(sub_graph)
# pos = nx.spring_layout(sub_graph) # 圖的畫法
# nx.draw(sub_graph_complement, pos=pos, node_size=40, vim=0.0, vmax=1.0, node_color="red")
# tag label
train_label = []
# 注意順序
for edge in edges02:
train_label.append(1)
for edge in list(sub_graph_complement.edges()):
train_label.append(0)
train_data_edge = edges02 + list(sub_graph_complement.edges())
train_data = pd.DataFrame(data={'edges': train_data_edge, 'label': train_label})
train_data = shuffle(train_data, random_state=32).reset_index(drop=True)
train_data.head()
train_data.to_csv(path, encoding='utf-8', index=False)
return train_data
# !!!! 設定是否要重新train
need_generate_train_data = True
train_df = None
if need_generate_train_data == True:
# train_df = generateTrainData(period1_all_possible_edgs_different_period2_edgs, peroid2_edge)
print('generate train data')
train_df = generateTrainData2(G2, peroid1_node_uni, peroid2_node_uni, peroid2_edge,
'data/t1_only_point_with_label.csv')
else:
train_df = readCsv('data/train_data.csv')
'''
score function
'''
# common neighbor score (neighbor = 1 best)
def common_neighbor(network, input_node1, input_node2):
source_neighbor = [n for n in network.neighbors(input_node1)]
target_neighbor = [n for n in network.neighbors(input_node2)]
intersection = list(set(source_neighbor) & set(target_neighbor))
return len(intersection)
# Jaccard's cofficient
def jaccard_cofficient(network, input_node1, input_node2):
cofficient = 0
source_neighbor = [n for n in network.neighbors(input_node1)]
target_neighbor = [n for n in network.neighbors(input_node2)]
union = list(set(source_neighbor) | set(target_neighbor))
intersection = list(set(source_neighbor) & set(target_neighbor))
if len(union) == 0:
return 0
else:
return (len(intersection) / len(union))
# Adamic/Adar
def adamic_adar(network, input_node1, input_node2):
adamic_score = 0
source_neighbor = [n for n in network.neighbors(input_node1)]
target_neighbor = [n for n in network.neighbors(input_node2)]
intersection = list(set(source_neighbor) & set(target_neighbor))
if len(intersection) == 0:
return 0
else:
for v in intersection:
adamic_score += 1 / math.log(len([nv for nv in network.neighbors(v)]))
return adamic_score
# clustering coefficient
def clustering_coefficient(network, input_node):
node_degree = network.degree[input_node]
node_triangle = nx.triangles(network, input_node)
if node_degree - 1 <= 0:
return 0
else:
return (2 * node_triangle) / (node_degree * (node_degree - 1))
# perferential attachment
def perferential_attachment(network, input_node1, input_node2):
source_neighbor = len([n for n in network.neighbors(input_node1)])
target_neighbor = len([n for n in network.neighbors(input_node2)])
return {'pa_mul': source_neighbor * target_neighbor, 'pa_add': source_neighbor + target_neighbor}
'''
add feature
'''
# train data feature
def calFeature(data, G, path):
cn, jaccard, adamic, cc_mul, cc_add, pa_mul, pa_add = [], [], [], [], [], [], []
overlap_title = []
# temporal distance between the papers
temp_diff = []
# number of common authors
comm_auth = []
source_id, target_id = [], []
for edge_id, edge in enumerate(data['edges'].values):
source_id.append(edge[0])
target_id.append(edge[1])
# node info
if (str(edge[0]) in ID_pos.keys() and str(edge[1]) in ID_pos.keys()):
source_info = node_info[ID_pos[str(edge[0])]]
target_info = node_info[ID_pos[str(edge[1])]]
source_title = source_info[2].lower().split(" ")
# remove stopwords
source_title = [token for token in source_title if token not in stpwds]
source_title = [stemmer.stem(token) for token in source_title]
target_title = target_info[2].lower().split(" ")
target_title = [token for token in target_title if token not in stpwds]
target_title = [stemmer.stem(token) for token in target_title]
source_auth = source_info[3].split(",")
target_auth = target_info[3].split(",")
overlap_title.append(len(set(source_title).intersection(set(target_title))))
temp_diff.append(int(source_info[1]) - int(target_info[1]))
comm_auth.append(len(set(source_auth).intersection(set(target_auth))))
else:
overlap_title.append(0)
temp_diff.append(0)
comm_auth.append(0)
# neighbor base
cn.append(common_neighbor(G, edge[0], edge[1]))
jaccard.append(jaccard_cofficient(G, edge[0], edge[1]))
# other
adamic.append((adamic_adar(G, edge[0], edge[1])))
source_cc = clustering_coefficient(G, edge[0])
target_cc = clustering_coefficient(G, edge[1])
cc_mul.append(source_cc * target_cc)
cc_add.append(source_cc + target_cc)
pa = perferential_attachment(G, edge[0], edge[1])
pa_mul.append(pa['pa_mul'])
pa_add.append(pa['pa_add'])
if edge_id % 10000 == 0:
print(edge_id, len(data))
#
data['source id'] = pd.Series(source_id, index=data.index)
data['target id'] = pd.Series(target_id, index=data.index)
data['cn'] = pd.Series(cn, index=data.index)
data['jaccard'] = pd.Series(jaccard, index=data.index)
data['adam'] = pd.Series(adamic, index=data.index)
data['cc_mul'] = pd.Series(cc_mul, index=data.index)
data['cc_add'] = pd.Series(cc_add, index=data.index)
data['pa_mul'] = pd.Series(pa_mul, index=data.index)
data['pa_add'] = pd.Series(pa_add, index=data.index)
data['temp_diff'] = pd.Series(temp_diff, index=data.index)
data['comm_auth'] = pd.Series(comm_auth, index=data.index)
#data['overlap_title'] = pd.Series(overlap_title, index=data.index)
print(data.head(10))
data = data.drop('edges', axis=1)
data.to_csv(path, index=False)
if need_generate_train_data == True:
calFeature(train_df, G2, "data/train_data.csv")
'''
cal test data
'''
if need_generate_train_data == True:
test_temp = {'edges': period_test_edge}
test_df = pd.DataFrame(data=test_temp)
calFeature(test_df, G_test, "data/test_data.csv")
else:
test_df = readCsv('data/test_data.csv')
'''
ML
'''
# train
train_feature = zip(train_df['cn'], train_df['jaccard'], train_df['adam'], train_df['cc_mul'], train_df['cc_add'],
train_df['pa_mul'], train_df['pa_add'], train_df['temp_diff'], train_df['comm_auth'])
train_feature = [[cn, jaccard, adam, cc_mul, cc_add, pa_mul, pa_add,temp_diff,comm_auth]
for cn, jaccard, adam, cc_mul, cc_add, pa_mul, pa_add,temp_diff,comm_auth in train_feature]
train_label = train_df['label'].tolist()
print('Run ML')
# svm
# svm = SVC(C=1.0, cache_size=4096)
# svm.fit(train_feature, train_label)
# tree
dt = tree.DecisionTreeClassifier()
dt = dt.fit(train_feature, train_label)
# rf
#rf = RandomForestClassifier(random_state=0, n_estimators=300)
#rf.fit(train_feature, train_label)
print('Run ML done')
# test
test_feature = zip(test_df['cn'], test_df['jaccard'], test_df['adam'], test_df['cc_mul'], test_df['cc_add'],
test_df['pa_mul'], test_df['pa_add'],test_df['temp_diff'], test_df['comm_auth'])
test_feature = [[cn, jaccard, adam, cc_mul, cc_add, pa_mul, pa_add,temp_diff,comm_auth]
for cn, jaccard, adam, cc_mul, cc_add, pa_mul, pa_add,temp_diff,comm_auth in test_feature]
predict = dt.predict(test_feature)
print('Predict')
# out
row = [i for i in range(1, 10001)]
data = {'target id': row, 'label': predict}
predict = pd.DataFrame(data=data, columns=['target id', 'label'])
predict.to_csv("predict/predict.csv", index=False)
print('Get predict file')
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
answer = input("Решите пример: 4 * 100 - 54 = ")
print("Правильный ответ: 346")
print(("Ваш ответ: ") + answer)
|
from DOTA_configs._base_.datasets.DIOR_full_ms_test import num_classes, max_bbox_per_img
dataset_config = '../_base_/datasets/DIOR_full_ms_test.py'
|
import boto3, json, os
from sshClient import get_ssh_client, ssh_install_docker_apt, ssh_install_docker_yum, ssh_install_docker_images, load_config_file
def get_key_pair(ec2, private_key_filename):
if private_key_filename not in os.listdir("."):
key_pair_name = private_key_filename.rstrip(".pem")
key_pair_string = ec2.create_key_pair(KeyName=key_pair_name)['KeyMaterial']
print("Key-Pair: " + private_key_filename + " successfully created.\n")
with open(private_key_filename, "w") as f:
f.write(key_pair_string)
return key_pair_string
else:
print("Key-Pair: " + private_key_filename + " already exists.\n")
def create_instances(vm_config_list, private_ssh_key_file):
vm_instances_created = []
to_create = [vm['instance_name'] for vm in vm_config_list]
ec2_res = boto3.resource('ec2', 'us-east-1')
instances = ec2_res.instances.filter()
if (len(list(instances)) == 0):
print("Currently no instances present.")
else:
for instance in instances:
if instance.tags:
for tag in instance.tags:
if tag['Key'] == "Name" and tag['Value'] in to_create:
print("Virtual machine instance " + tag['Value'] + " already exists.")
to_create.remove(tag['Value'])
for vm_config in vm_config_list:
if vm_config['instance_name'] in to_create:
print("Creating virtual machine instance " + vm_config['instance_name'] + ".", end="")
print(" [DONE]")
created_instances = create_instance(ec2_res, vm_config, private_ssh_key_file)
vm_instances_created.extend([instance for instance in created_instances])
return vm_instances_created
def create_instance(ec2_res, vm_config, private_ssh_key_file):
key_pair_name = private_ssh_key_file.rstrip(".pem")
created_instances = None
if "storage" in vm_config:
created_instances = ec2_res.create_instances(
ImageId=vm_config['vm_image_name'],
MinCount=vm_config['count'],
MaxCount=vm_config['count'],
InstanceType=vm_config['size'],
BlockDeviceMappings=[{
'DeviceName': '/dev/xvda',
'Ebs': {
'VolumeSize': vm_config['storage']['size']
}
}],
KeyName=key_pair_name
)
else:
created_instances = ec2_res.create_instances(
ImageId=vm_config['vm_image_name'],
MinCount=vm_config['count'],
MaxCount=vm_config['count'],
InstanceType=vm_config['size'],
KeyName=key_pair_name
)
for instance in created_instances:
instance.create_tags(Tags=[
{'Key': 'Name', 'Value': vm_config['instance_name']},
{'Key': 'AdminUsername', 'Value': vm_config['username']}
])
instance.username = vm_config['username']
instance.package_manager = vm_config['package_manager']
if "docker_images" in vm_config:
instance.docker_images = vm_config['docker_images']
else:
instance.docker_images = None
return created_instances
def get_running_vms():
ec2_res = boto3.resource('ec2', 'us-east-1')
instances = ec2_res.instances.filter(Filters=[{'Name':'instance-state-name','Values':['running']}])
instances = {instance.id for instance in instances}
return instances
if __name__ == "__main__":
ec2 = boto3.client('ec2', 'us-east-1')
config = load_config_file("./config.json")
aws_vms = config['aws_vms']
private_key_filename = config['aws_ssh_key']['private_key']
print("--------------------------------")
print("Getting/constructing key pair...")
print("--------------------------------")
get_key_pair(ec2, private_key_filename)
print("------------------------------------------------")
print("Creating vm_instances from configuration file...")
print("------------------------------------------------")
created_instances = create_instances(aws_vms, private_key_filename)
for instance in created_instances:
print("\n---------------------------------------------------------------------------------")
print("Installing docker on virtual machine with id: " + instance.id + "...")
print("---------------------------------------------------------------------------------")
print("Waiting for virtual machine with id: " + instance.id + " to instantiate...")
while instance.id not in get_running_vms():
continue
print("Waiting for port 22 to open on: " + instance.public_dns_name + "...")
ssh_client = get_ssh_client(instance.public_dns_name, private_key_filename, instance.username)
if instance.package_manager == "yum":
ssh_install_docker_yum(ssh_client)
elif instance.package_manager == 'apt':
ssh_install_docker_apt(ssh_client)
if instance.docker_images is not None:
ssh_install_docker_images(ssh_client, instance.docker_images)
ssh_client.close()
|
# 请你来实现一个 atoi 函数,使其能将字符串转换成整数。
#
# 首先,该函数会根据需要丢弃无用的开头空格字符,直到寻找到第一个非空格的字符为止。接下来的转化规则如下:
#
#
# 如果第一个非空字符为正或者负号时,则将该符号与之后面尽可能多的连续数字字符组合起来,形成一个有符号整数。
# 假如第一个非空字符是数字,则直接将其与之后连续的数字字符组合起来,形成一个整数。
# 该字符串在有效的整数部分之后也可能会存在多余的字符,那么这些字符可以被忽略,它们对函数不应该造成影响。
#
#
# 注意:假如该字符串中的第一个非空格字符不是一个有效整数字符、字符串为空或字符串仅包含空白字符时,则你的函数不需要进行转换,即无法进行有效转换。
#
# 在任何情况下,若函数不能进行有效的转换时,请返回 0 。
#
# 提示:
#
#
# 本题中的空白字符只包括空格字符 ' ' 。
# 假设我们的环境只能存储 32 位大小的有符号整数,那么其数值范围为 [−231, 231 − 1]。如果数值超过这个范围,请返回 INT_MAX (231
# − 1) 或 INT_MIN (−231) 。
#
#
#
#
# 示例 1:
#
# 输入: "42"
# 输出: 42
#
#
# 示例 2:
#
# 输入: " -42"
# 输出: -42
# 解释: 第一个非空白字符为 '-', 它是一个负号。
# 我们尽可能将负号与后面所有连续出现的数字组合起来,最后得到 -42 。
#
#
# 示例 3:
#
# 输入: "4193 with words"
# 输出: 4193
# 解释: 转换截止于数字 '3' ,因为它的下一个字符不为数字。
#
#
# 示例 4:
#
# 输入: "words and 987"
# 输出: 0
# 解释: 第一个非空字符是 'w', 但它不是数字或正、负号。
# 因此无法执行有效的转换。
#
# 示例 5:
#
# 输入: "-91283472332"
# 输出: -2147483648
# 解释: 数字 "-91283472332" 超过 32 位有符号整数范围。
# 因此返回 INT_MIN (−231) 。
#
# Related Topics 数学 字符串
# 👍 813 👎 0
# leetcode submit region begin(Prohibit modification and deletion)
class Solution:
def myAtoi(self, str: str) -> int:
k = 0
while k < len(str) and str[k] == ' ':
k += 1
if k == len(str):
return 0
minus = 1
if str[k] == '-':
minus = -1
k += 1
elif str[k] == '+':
k += 1
res = 0
while k < len(str) and str[k].isdigit():
res = res * 10 + int(str[k])
k += 1
if minus == 1 and res >= 1 << 31:
return (1 << 31) - 1
if minus == -1 and res > 1 << 31:
return - 1 << 31
return minus * res
# leetcode submit region end(Prohibit modification and deletion)
|
"""
ID: rk91091
LANG: PYTHON3
TASK: beads
"""
fin = open('beads.in', 'r')
fout = open('beads.out', 'w')
num = int(fin.readline().strip())
beads = fin.readline().strip()
count = [['x', 0]] # placeholder
for bead in beads:
if count[-1][0] == bead:
count[-1][1] += 1
else: count.append([bead, 1])
count.remove(['x', 0]) # remove placeholder
lengths = []
if len(count) == 1:
lengths.append(count[0][1])
else:
for i in range(len(count)):
length = 0
index = i
truth = [True, count[i][0]]
if (truth[1] == 'w'):
if (i + 1 == len(count)): truth[1] = count[0][0]
else: truth[1] = count[i + 1][0]
for j in range(len(count)):
if (index == len(count)): index = 0
color = count[index][0]
if (color != truth[1]) and (color != 'w') and (truth[0] == True):
truth[0] = False
truth[1] = color
if (color != truth[1]) and (color != 'w') and (truth[0] != True):
break
length += count[index][1]
index += 1
lengths.append(length)
fout.write(str(max(lengths)) + '\n')
fout.close()
|
def is_armstrong_number(number):
order = len(str(number))
sum = 0
temp = number
while temp > 0:
digit = temp % 10
sum += digit ** order
temp //= 10
if number == sum:
return True
else:
return False
|
from sklearn.neighbors import KNeighborsClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.neural_network import MLPClassifier
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import KFold
import random
from anapy.datamanip import datasetSeparator as ds
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import pickle
random.seed(1)
#######################################################################################################
# Preliminary steps for loading the data and splitting it into data and target
#######################################################################################################
# Load the raw machine learning data set
# learning context data -- mentioned as 'lc' with variables
raw_df = pd.read_csv("D:\\lcif\\16032017-IndividualFiles\\prelimMLdatasetFull.csv",
dtype='unicode')
datalabel_lc = raw_df.ix[:,1].unique().astype(str)
#create an instance of data separator by passing the datafram raw_df as the argument
data_sep = ds.DataSeparator(raw_df)
df_lc = pd.DataFrame([])
# removal of the columns containing the text data of the labels and the final column with 25 labels
# as the 25 labels have been replaced with 10 labels for the purpose of simplicity
# only the columns from 4 to 12 consist of the actual machine learning set with labels being the
# last column
df_lc = data_sep.remCols([0,1,2,3,13])
# assuming the last column to be the target and the rest as data this method divides the data set
# into data and target labels
data_lc, target_lc = data_sep.sep_data_target(df_lc)
data_lc = data_lc.astype('float')
data_lc_std = StandardScaler().fit_transform(data_lc)
target_lc = target_lc.astype('float')
target_lc = np.ravel(target_lc)
#####################################################################################################
#######################################################################################################
# Process of splitting the data into training set and test set
#######################################################################################################
# K fold cross validation
# create an instance of the KFold class
# number of splits = 5 so that the data is split as 80% and 20%
# the data is shuffled
train_data_array, train_target_array, test_data_array, test_target_array = [], [], [], []
kf = KFold(n_splits=5, shuffle=True)
dataset_number = 0
file_path = "D:\\lcif\\16032017-IndividualFiles\\TrainTestdataset\\"
for train_index, test_index in kf.split(data_lc_std):
train_data = data_lc_std[train_index].astype('float')
train_target = target_lc[train_index].astype('float')
train_target = np.reshape(train_target,(np.shape(train_target)[0],1))
test_data = data_lc_std[test_index].astype('float')
test_target = target_lc[test_index].astype('float')
test_target = np.reshape(test_target,(np.shape(test_target)[0],1))
train_data_array.append(train_data)
train_target_array.append(train_target)
test_data_array.append(test_data)
test_target_array.append(test_target)
# the following code writes the training set and testing set to files as an ndarray
# if write_to_file value is set to True
write_to_file = False
if write_to_file == True:
with open(file_path+'training_set'+str(dataset_number+1)+".dat",'wb') as f:
train_data_target = np.concatenate((train_data,train_target),axis=1)
pickle.dump(train_data_target, f)
with open(file_path+'testing_set'+str(dataset_number+1)+".dat",'wb') as g:
test_data_target = np.concatenate((test_data,test_target),axis=1)
pickle.dump(test_data_target,g)
dataset_number = dataset_number + 1
#####################################################################################################
#######################################################################################################
# Machine learning Algorithms --- Simple Classifiers
#######################################################################################################
ml_algorithms = [
"Nearest Neighbors",
"Decision Tree",
"Gaussian NB",
"Neural Net"
]
trainacc =[]
testacc = []
n_neighbors_range = 5
for neighbors in range(n_neighbors_range):
kfoldtrainacc = []
kfoldtestacc = []
for num,ele in enumerate(train_data_array):
knn = KNeighborsClassifier(n_neighbors= neighbors+1 ,weights='uniform',metric='minkowski')
knn.fit(train_data_array[num],train_target_array[num])
train_accuracy = knn.score(train_data_array[num],train_target_array[num])
test_accuracy = knn.score(test_data_array[num],test_target_array[num])
print("training accurracy -- traindata no. "+ str(num) +"kneighbor: "+ str(neighbors) + str(train_accuracy))
kfoldtrainacc.append(train_accuracy)
kfoldtestacc.append(test_accuracy)
trainacc.append(kfoldtrainacc)
testacc.append(kfoldtestacc)
traintestacc = np.array(np.concatenate((np.array(trainacc),np.array(testacc)),axis=1))
traintestacc_df = pd.DataFrame(traintestacc)
traintestacc_df.to_html(file_path+'crossvalidationset_'+str(num)+".html")
plt.figure(1)
train_accuracy_mean = np.mean(np.array(trainacc),axis=1)
test_accuracy_mean = np.mean(np.array(testacc),axis=1)
plt.ylim([0.0,1.1])
plt.plot(np.arange(1,n_neighbors_range+1), train_accuracy_mean)
plt.plot(np.arange(1,n_neighbors_range+1), test_accuracy_mean)
plt.show()
dtc =DecisionTreeClassifier(max_depth=12)
naive_bayes=GaussianNB()
neural_net = MLPClassifier(max_iter=500)
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from mox3.mox import IsA # noqa
from openstack_dashboard.api import cinder
from openstack_dashboard.test import helpers as test
from openstack_dashboard.dashboards.admin.volumes.snapshots import forms
INDEX_URL = reverse('horizon:admin:volumes:volumes_tab')
class VolumeViewTests(test.BaseAdminViewTests):
@test.create_stubs({cinder: ('volume_reset_state',
'volume_get')})
def test_update_volume_status(self):
volume = self.volumes.first()
formData = {'status': 'error'}
cinder.volume_get(IsA(http.HttpRequest), volume.id).AndReturn(volume)
cinder.volume_reset_state(IsA(http.HttpRequest),
volume.id,
formData['status'])
self.mox.ReplayAll()
res = self.client.post(
reverse('horizon:admin:volumes:volumes:update_status',
args=(volume.id,)),
formData)
self.assertNoFormErrors(res)
@test.create_stubs({cinder: ('volume_manage',
'volume_type_list',
'availability_zone_list',
'extension_supported')})
def test_manage_volume(self):
metadata = {'key': u'k1',
'value': u'v1'}
formData = {'host': 'host-1',
'identifier': 'vol-1',
'id_type': u'source-name',
'name': 'name-1',
'description': 'manage a volume',
'volume_type': 'vol_type_1',
'availability_zone': 'nova',
'metadata': metadata['key'] + '=' + metadata['value'],
'bootable': False}
cinder.volume_type_list(
IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
cinder.availability_zone_list(
IsA(http.HttpRequest)).\
AndReturn(self.availability_zones.list())
cinder.extension_supported(
IsA(http.HttpRequest),
'AvailabilityZones').\
AndReturn(True)
cinder.volume_manage(
IsA(http.HttpRequest),
host=formData['host'],
identifier=formData['identifier'],
id_type=formData['id_type'],
name=formData['name'],
description=formData['description'],
volume_type=formData['volume_type'],
availability_zone=formData['availability_zone'],
metadata={metadata['key']: metadata['value']},
bootable=formData['bootable'])
self.mox.ReplayAll()
res = self.client.post(
reverse('horizon:admin:volumes:volumes:manage'),
formData)
self.assertNoFormErrors(res)
@test.create_stubs({cinder: ('volume_unmanage',
'volume_get')})
def test_unmanage_volume(self):
# important - need to get the v2 cinder volume which has host data
volume_list = \
filter(lambda x: x.name == 'v2_volume', self.cinder_volumes.list())
volume = volume_list[0]
formData = {'volume_name': volume.name,
'host_name': 'host@backend-name#pool',
'volume_id': volume.id}
cinder.volume_get(IsA(http.HttpRequest), volume.id).AndReturn(volume)
cinder.volume_unmanage(IsA(http.HttpRequest), volume.id).\
AndReturn(volume)
self.mox.ReplayAll()
res = self.client.post(
reverse('horizon:admin:volumes:volumes:unmanage',
args=(volume.id,)),
formData)
self.assertNoFormErrors(res)
@test.create_stubs({cinder: ('pool_list',
'volume_get',)})
def test_volume_migrate_get(self):
volume = self.cinder_volumes.get(name='v2_volume')
cinder.volume_get(IsA(http.HttpRequest), volume.id) \
.AndReturn(volume)
cinder.pool_list(IsA(http.HttpRequest)) \
.AndReturn(self.pools.list())
self.mox.ReplayAll()
url = reverse('horizon:admin:volumes:volumes:migrate',
args=[volume.id])
res = self.client.get(url)
self.assertTemplateUsed(res,
'admin/volumes/volumes/migrate_volume.html')
@test.create_stubs({cinder: ('volume_get',)})
def test_volume_migrate_get_volume_get_exception(self):
volume = self.cinder_volumes.get(name='v2_volume')
cinder.volume_get(IsA(http.HttpRequest), volume.id) \
.AndRaise(self.exceptions.cinder)
self.mox.ReplayAll()
url = reverse('horizon:admin:volumes:volumes:migrate',
args=[volume.id])
res = self.client.get(url)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({cinder: ('pool_list',
'volume_get',)})
def test_volume_migrate_list_pool_get_exception(self):
volume = self.cinder_volumes.get(name='v2_volume')
cinder.volume_get(IsA(http.HttpRequest), volume.id) \
.AndReturn(volume)
cinder.pool_list(IsA(http.HttpRequest)) \
.AndRaise(self.exceptions.cinder)
self.mox.ReplayAll()
url = reverse('horizon:admin:volumes:volumes:migrate',
args=[volume.id])
res = self.client.get(url)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({cinder: ('pool_list',
'volume_get',
'volume_migrate',)})
def test_volume_migrate_post(self):
volume = self.cinder_volumes.get(name='v2_volume')
host = self.pools.first().name
cinder.volume_get(IsA(http.HttpRequest), volume.id) \
.AndReturn(volume)
cinder.pool_list(IsA(http.HttpRequest)) \
.AndReturn(self.pools.list())
cinder.volume_migrate(IsA(http.HttpRequest),
volume.id,
host,
False) \
.AndReturn(None)
self.mox.ReplayAll()
url = reverse('horizon:admin:volumes:volumes:migrate',
args=[volume.id])
res = self.client.post(url, {'host': host, 'volume_id': volume.id})
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({cinder: ('pool_list',
'volume_get',
'volume_migrate',)})
def test_volume_migrate_post_api_exception(self):
volume = self.cinder_volumes.get(name='v2_volume')
host = self.pools.first().name
cinder.volume_get(IsA(http.HttpRequest), volume.id) \
.AndReturn(volume)
cinder.pool_list(IsA(http.HttpRequest)) \
.AndReturn(self.pools.list())
cinder.volume_migrate(IsA(http.HttpRequest),
volume.id,
host,
False) \
.AndRaise(self.exceptions.cinder)
self.mox.ReplayAll()
url = reverse('horizon:admin:volumes:volumes:migrate',
args=[volume.id])
res = self.client.post(url, {'host': host, 'volume_id': volume.id})
self.assertRedirectsNoFollow(res, INDEX_URL)
def test_get_volume_status_choices_without_current(self):
current_status = {'status': 'available'}
status_choices = forms.populate_status_choices(current_status,
forms.STATUS_CHOICES)
self.assertEqual(len(status_choices), len(forms.STATUS_CHOICES))
self.assertNotIn(current_status['status'],
[status[0] for status in status_choices])
@test.create_stubs({cinder: ('volume_get',)})
def test_update_volume_status_get(self):
volume = self.cinder_volumes.get(name='v2_volume')
cinder.volume_get(IsA(http.HttpRequest), volume.id) \
.AndReturn(volume)
self.mox.ReplayAll()
url = reverse('horizon:admin:volumes:volumes:update_status',
args=[volume.id])
res = self.client.get(url)
status_option = "<option value=\"%s\"></option>" % volume.status
self.assertNotContains(res, status_option)
|
class Solution(object):
def plusOne(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
# node1->node2-> node3
def reverse(head):
node = head
pre = None
while node:
tmp = node.next
node.next = pre
pre = node
node = tmp
return pre
head = reverse(head)
node = head
carry = 1
pre = None
while carry > 0 and node:
v = node.val + carry
node.val = v % 10
carry = v/10
pre = node
node = node.next
if carry > 0:
pre.next = ListNode(carry)
head = reverse(head)
return head
|
from rest_framework import generics
from base import mixins
from . import serializers
from .models import Assignment
from .models import Channel
class Assignments(mixins.AdminPermission, generics.ListAPIView):
queryset = Assignment.objects.all()
serializer_class = serializers.AssignmentSerializer
def get_pk(self):
return int(self.kwargs.get('pk'))
def get_queryset_kwargs(self):
kwargs = super().get_queryset_kwargs()
kwargs.update({
'client_id': self.get_pk()
})
return kwargs
class AssignmentDetail(mixins.AdminPermission,
generics.RetrieveUpdateDestroyAPIView):
lookup_url_kwarg = 'assignment_pk'
queryset = Assignment.objects.all()
serializer_class = serializers.AssignmentSerializer
class Channels(mixins.AdminPermission, generics.ListAPIView):
queryset = Channel.objects.all()
serializer_class = serializers.ChannelSerializer
def get_serializer_context(self):
context = super().get_serializer_context()
context.update({
'pk': int(self.kwargs.get('pk'))
})
return context
|
from random import randint
#Hangman Game
#all the words available are stored in this list
word_list = []
#this list is used to store above words but without newline characters (rstrip method)
new_word_list = []
#opens file with the list of words in it
with open('hangman_words.txt') as file_obj:
word_list = file_obj.readlines()
#save words without newline character in new list(subject to change)
for word in word_list:
new_word_list.append(word.rstrip('\n'))
#choosing a word from above list with randint
secret_word = new_word_list[randint(0, len(word_list))]
#initializing amount of dashes for a secret word
dashes = '-'*(len(secret_word))
#number of guesses user has (10 in this case)
guesses_left = 10
#loop ends if secret word and user guesses word(after filling all dashes) are equal
#and he has positive number of guesses left
while secret_word != dashes and guesses_left >= 0:
#first print dashes
print (dashes)
#then take user input
user_input = input('Enter a letter: ')
#print how many guesses user has left
print ('You have ' + str(guesses_left) + ' guesses left\n')
#if the guess is correct
if user_input in secret_word:
#print this message
print ('That letter is in a word!')
#and change dashes string with new string, which countains letter user just guesses
dashes = change_dashes(secret_word, user_input, dashes)
else:
#if they didn't guess, decrement user's guesses by 1
guesses_left -= 1
#changes dashes string with a new string which contains guessed letters
def change_dashes(secret_word, user_letter, current_word):
#initiate empty string which will be returned at the end
result = ''
#iterate over dashed string
for i in range(len(current_word)):
#if user's letter is in secret word
if user_letter == secret_word[i]:
#append this letter to result string
result += user_letter
else:
#if not, add characters which are in dashed string
#this includes dashes and letters which user had already guesses
result += current_word[i]
return result |
names = ['Ram', 'Raj', 'Amir', 'Shyam']
def filter_names_using_map(name):
if (name.startswith('R')):
return name
def filter_names_using_filter(name):
if(name.startswith('R')):
return True
# else:
# return False
#
filteredNames= list(filter(filter_names_using_filter, names))
# filteredNames= list(map(filter_names_using_map, names))
print('The filtered Names are:')
print(filteredNames) |
from _collections import deque
n = int(input())
r1,c1,r2,c2 = map(int,input().split())
dx = [-2,-2,0,0,2,2]
dy = [-1,1,-2,2,-1,1]
visit = [[False]*(n+1) for _ in range(n+1)]
q = deque()
q.append((r1,c1,0))
visit[r1][c1] = True
def dfs():
while q:
x,y,cnt = q.popleft()
if x == r2 and y == c2:
return cnt
for i in range(6):
X = x + dx[i]
Y = y + dy[i]
if 0<=X<n and 0<=Y<n and visit[X][Y] == False:
q.append((X,Y,cnt+1))
visit[X][Y] = True
return -1
print(dfs()) |
import pygame
from pygame.locals import * # 调包
import time
import math
from sys import exit
import random
pygame.init() # 初始化
screen = pygame.display.set_mode((551, 401), 0, 32) # 创建图形化窗口
pygame.display.set_caption("Start Coding Now! 黄金矿工") # 窗口标题
background_start = 'all_start.png' # 加载背景
backGround = pygame.image.load(background_start).convert_alpha() # 初始背景为启动背景
backGround_judge = 'start'
pointer = '钻石.png' # 添加指针
poInter = pygame.image.load(pointer).convert_alpha()
gold_small = pygame.image.load(
'金矿.png').convert_alpha() # 小金矿,尺寸 46 * 42
gold_mid = pygame.transform.smoothscale(gold_small, (66, 61)) # 中金矿,尺寸100 * 84
gold_big = pygame.transform.smoothscale(gold_small, (121, 104)) # 大金矿,尺寸121 * 104
background_play_1 = pygame.image.load('background1.png').convert()
background_play_2 = pygame.image.load('background2.png').convert()
background_success_end = pygame.image.load('success_end.png').convert()
background_end_all = pygame.image.load('end_all.png').convert()
hook = pygame.image.load('hook.png').convert_alpha() # 图片尺寸 44 * 25
stone_1 = pygame.image.load('stone1.png').convert_alpha() # 68 * 60
stone_2 = pygame.image.load('stone2.png').convert_alpha() # 65 * 54
background_start_1 = pygame.image.load('start_1.png').convert_alpha()
background_start_2 = pygame.image.load('start_2.png').convert_alpha()
background_start = [background_start_1, background_start_2]
screen.blit(backGround, (0, 0))
hook_pos_x = 476
hook_pos_y = 76
levels = 1
score_levels = [650, 1150]
pygame.time.Clock().tick(180)
time_countdown = 60
TIME_DOWN = pygame.USEREVENT + 1
pygame.time.set_timer(TIME_DOWN, 1000)
DIRECRTION_FLAG = -1
ROTATE_DEGREE = pygame.USEREVENT + 3
pygame.time.set_timer(ROTATE_DEGREE, 20)
degree = 75
pygame.mouse.set_visible(True)
pygame.event.set_grab(True)
speed = {'x': 0, 'y': 0}
gold_levels = [[[150, 210, 'small', 46, 42], [680, 180, 'small', 46, 42], [240, 320, 'mid', 66, 61],
[0, 320, 'big', 121, 104], [900, 280, 'small', 46, 42], [250, 440, 'mid', 66, 61],
[550, 210, 'small', 46, 42], [580, 600, 'mid', 66, 61], [680, 400, 'big', 121, 104],
[260, 230, 'small', 46, 42]]]
stone_levels = [[120, 260, 1, 68, 60], [550, 300, 2, 65, 54], [420, 160, 1, 68, 60]
]
score = 0 # 对变量进行初始化::分数
hook_state = 'spare' # 初始化钩子状态:空闲
carry = []
def init_start(): # 初始化启动
global hook_state
global degree
global backGround
global backGround_judge
global DIRECRTION_FLAG
global time_countdown
hook_state = 'spare'
degree = 75
DIRECRTION_FLAG = -1
backGround = background_start[levels - 1]
screen = pygame.display.set_mode((891, 647), 0, 32)
screen.blit(backGround, (0, 0))
pygame.display.update()
time.sleep(2.0)
L = [background_play_2, background_play_1]
x = random.choice(L)
backGround_judge = 'play'
backGround = x
screen = pygame.display.set_mode((995, 712), 0, 32)
screen.blit(backGround, (0, 0))
time_countdown = 30
pygame.display.update()
def end_level():
global backGround
global backGround_judge
global levels
if score >= score_levels[levels - 1]:
backGround_judge = 'success'
backGround = background_success_end
screen = pygame.display.set_mode((550, 400), 0, 32)
screen.blit(backGround, (0, 0))
pygame.display.update()
levels += 1
else:
backGround = background_end_all
backGround_judge = 'end'
screen = pygame.display.set_mode((544, 400), 0, 32)
screen.blit(backGround, (0, 0))
pygame.display.update()
def throw_hook(): # 扔钩子
global hook_state
hook_state = 'down' # 重置钩子状态:向下
speed['x'] = math.sin(abs(math.radians(degree)))
speed['y'] = abs(math.cos(abs(math.radians(degree))))
if degree > 0:
speed['x'] = abs(speed['x'])
else:
speed['x'] = -abs(speed['x'])
def clasp_hook(): # 收钩子
speed['x'] = -speed['x']
speed['y'] = -speed['y']
def hit_gold(element): # 判定
global hook_state
print(speed['x'], speed['y'], sep=' ')
hook_state = 'carry'
if element == 'big':
hook_state = 'carry_big'
k = 0.3
elif element == 'mid':
hook_state = 'carry_mid'
k = 0.55
elif element == 'small':
hook_state = 'carry_small'
k = 0.75
else:
k = 1
speed['x'] = -speed['x'] * k
speed['y'] = -speed['y'] * k
print(speed['x'], speed['y'], sep=' ')
def hit_stone():
global hook_state
hook_state = 'carry_stone'
print(speed['x'], speed['y'], sep=' ')
speed['x'] = -speed['x'] * 0.2
speed['y'] = -speed['y'] * 0.2
print(speed['x'], speed['y'], sep=' ')
while True: # 实现指针移动//游戏主循环
# print(backGround_judge, hook_state, degree, sep=' ')
screen.blit(backGround, (0, 0))
for event in pygame.event.get():
if event.type == QUIT:
exit() # 设置键盘退出ESC
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
exit() # 设置退出
if backGround_judge == 'start':
if event.key == K_SPACE or event.key == K_KP_ENTER or event.key == K_0:
init_start()
if backGround_judge == 'play':
if event.key == K_SPACE or event.key == K_KP_ENTER:
if hook_state == 'spare':
throw_hook()
if backGround_judge == 'success':
if event.key == K_SPACE or event.key == K_KP_ENTER:
init_start()
if event.type == pygame.MOUSEBUTTONDOWN: #
if backGround_judge == 'start':
if 47 <= event.pos[0] <= 195 and 46 <= event.pos[1] <= 188:
init_start()
if backGround_judge == 'play':
if 720 <= event.pos[0] <= 820 and 20 <= event.pos[1] <= 80:
exit()
if backGround_judge == 'success':
if 170 <= event.pos[0] <= 400 and 270 <= event.pos[1] <= 321:
init_start()
if backGround_judge == 'end':
if 42 <= event.pos[0] <= 320 and 515 <= event.pos[1] <= 370:
exit()
if event.type == TIME_DOWN:
time_countdown -= 1
if event.type == ROTATE_DEGREE and hook_state == 'spare':
if DIRECRTION_FLAG == -1:
degree = degree - 1
else:
degree = degree + 1
if degree == 75:
DIRECRTION_FLAG = -1
if degree == -75:
DIRECRTION_FLAG = 1
if backGround_judge == 'play':
font_object = pygame.font.SysFont(None, 40)
time_text_screen = font_object.render(str(time_countdown), True, (23, 168, 61))
time_text_rect = time_text_screen.get_rect()
time_text_rect.center = (951, 32)
screen.blit(time_text_screen, time_text_rect)
levels_text_screen = font_object.render(str(levels), True, (23, 168, 61))
levels_text_rect = levels_text_screen.get_rect()
levels_text_rect.center = (902, 75)
screen.blit(levels_text_screen, levels_text_rect)
score_levels_screen = font_object.render(str(score_levels[levels - 1]), True, (23, 168, 61))
score_levels_rect = score_levels_screen.get_rect()
score_levels_rect.center = (204, 79)
screen.blit(score_levels_screen, score_levels_rect)
score_screen = font_object.render(str(score), True, (23, 168, 61))
score_rect = score_screen.get_rect()
score_rect.center = (171, 34)
screen.blit(score_screen, score_rect)
i = 0
while i < (len(gold_levels[levels - 1])):
if gold_levels[levels - 1][i][2] == 'small':
screen.blit(gold_small, (gold_levels[levels - 1][i][0], gold_levels[levels - 1][i][1]))
elif gold_levels[levels - 1][i][2] == 'mid':
screen.blit(gold_mid, (gold_levels[levels - 1][i][0], gold_levels[levels - 1][i][1]))
else:
screen.blit(gold_big, (gold_levels[levels - 1][i][0], gold_levels[levels - 1][i][1]))
if int(hook_pos_x + 20) in range(gold_levels[levels - 1][i][0],
gold_levels[levels - 1][i][0] + gold_levels[levels - 1][i][3]) \
and int(hook_pos_y + 25) in range(gold_levels[levels - 1][i][1],
gold_levels[levels - 1][i][1] + gold_levels[levels - 1][i][4]):
hit_gold(gold_levels[levels - 1][i][2])
carry.append(gold_levels[levels - 1][i])
print(gold_levels[levels - 1][i])
del gold_levels[levels - 1][i]
else:
i += 1
i = 0
while i < len(stone_levels):
if stone_levels[i][2] == 1:
screen.blit(stone_2, (stone_levels[i][0], stone_levels[i][1]))
else:
screen.blit(stone_1, (stone_levels[i][0], stone_levels[i][1]))
if int(hook_pos_x + 20) in range(stone_levels[i][0], stone_levels[i][0] + stone_levels[i][3]) \
and int(hook_pos_y + 25) in range(stone_levels[i][1], stone_levels[i][1] + stone_levels[i][4]):
hit_stone()
carry.append(stone_levels[i])
print(stone_levels[i])
del stone_levels[i]
else:
i += 1
if hook_state == 'spare':
hook_rotate = pygame.transform.rotate(hook, degree)
screen.blit(hook_rotate, (hook_pos_x, hook_pos_y))
if hook_state == 'down' or hook_state == 'carry_small' or \
hook_state == "carry_mid" or hook_state == 'carry_big' or hook_state == 'carry_stone':
hook_rotate = pygame.transform.rotate(hook, degree)
hook_pos_x = hook_pos_x + speed['x']
hook_pos_y = hook_pos_y + speed['y']
if hook_pos_x <= 0 or hook_pos_x >= 910 or hook_pos_y <= 0 or hook_pos_y >= 660:
clasp_hook()
if 475 <= hook_pos_x <= 476 or 75 <= hook_pos_y <= 76:
hook_pos_x = 476
hook_pos_y = 76
if hook_state == 'carry_big':
score += 500
elif hook_state == 'carry_mid':
score += 200
elif hook_state == 'carry_small':
score += 50
elif hook_state == 'carry_stone':
score += 50
hook_state = 'spare'
carry = []
speed['x'] = 0
speed['y'] = 0
if carry != []:
if carry[0][2] == 'small':
screen.blit(gold_small, (hook_pos_x + 30, hook_pos_y + 20))
elif carry[0][2] == 'mid':
screen.blit(gold_mid, (hook_pos_x + 40, hook_pos_y + 44))
elif carry[0][2] == 'big':
screen.blit(gold_big, (hook_pos_x - 20, hook_pos_y + 60))
elif carry[0][2] == 1:
screen.blit(stone_1, (hook_pos_x, hook_pos_y + 30))
else:
screen.blit(stone_2, (hook_pos_x, hook_pos_y + 30))
screen.blit(hook_rotate, (hook_pos_x, hook_pos_y))
if time_countdown == 0:
end_level()
pygame.display.update() # 界面刷新
|
"""
Created on 14/12/2019
@author: Sunny Raj
"""
"""
problem statement:
Write a Python program to count the number of strings where the string length is 2
or more and the first and last character are same from a given list of strings
"""
# initializing a sample list given in problem
sample_List = ['abc', 'xyz', 'aba', '1221']
# making a function to find expected output
def find_count(sample_list):
count_string = 0;
# iterating through elements in string
for string in sample_List:
#condition to check
if len(string) > 2 and string[0] == string[-1]:
#increasing the count
count_string = count_string + 1;
#returning the counted value
return count_string;
#printing the output
print("list contains {} numbers of such type of strings".format(find_count(sample_List)))
|
from flask import make_response, jsonify
def render(view_func, status=200, *data_dicts):
"""
Wrap the result of a view function as a Flask response object,
given the view function, status code, and data dict that will passed to the function
Argument:
view_func - a function, the function for generating a html string of a view (as found in the view files)
[status] - an int, the HTTP status code [200]
data_dicts - a variable-len list of dictionaries, the data dictionaries to pass to the view_func. These get flattened into a single dict.
Return:
a response_class object, the real response object
Notes:
The view_func needs to return a string
"""
initial_dict = {}
if isinstance(status, dict):
initial_dict.update(status)
status = 200
data_dict = reduce(lambda old, new: dict(old, **new), data_dicts, initial_dict)
if view_func == jsonify:
return jsonify(**data_dict)
html_str = view_func(data_dict) or "Your view function (%s) needs to return a string" % view_func.__name__
return render_string(html_str, status)
def render_string(html_str, status=200):
"""
Wrap a [most likely html] string as a Flask response object with status code
Arguments:
html_str - a string, the html string to be used as the body of the response
[status] - a int, the status code of the response [200]
Returns:
res - a response_class object, the real response object
"""
res = make_response(html_str, status)
return res
|
#
# trafficLightDemo.py
#
# Example of a simple state machine modeling the state of a traffic light
#
import statemachine
import trafficlightstate
class TrafficLight(trafficlightstate.TrafficLightStateMixin):
def __init__(self):
self.initialize_state(trafficlightstate.Red)
def change(self):
self._state = self._state.next_state()
light = TrafficLight()
for i in range(10):
print("{} {}".format(light, ("STOP", "GO")[light.cars_can_go]))
light.crossing_signal()
light.delay()
print()
light.change()
|
#Import modules
import os
import csv
#Read through the resource csv file titled "election_data.csv"
election_data = os.path.join("election_data.csv")
election_analysis = os.path.join("election_analysis.txt")
#Open the CSV File & make sure new line is an empty space
with open(election_data, 'r') as csvfile:
#read through the csvfile and separate the three columns with a "," as the delimiter
csvreader = csv.reader(csvfile, delimiter=',')
# Read the header
csvheader = next(csvreader)
#set each variable to an index of zero
total_votes= 0
candidatevotes_won= 0
candidatevotes_lost= 0
election_winner= ''
#Variables for each candidate's name:
Khan = 0
Correy = 0
Li = 0
OTooley = 0
#Complete a list of candidates who received votes
Candidates = ["Khan", "Correy", "Li", "O'Tooley"]
CandidateVotes = {}
#Use Variables and Candidates List to count number of votes each candidate received...
for row in csvreader:
#Find the total vote count
total_votes += 1
#Calculate total number of votes cast for each candidate:
if row[3] == Candidates[0]:
Khan += 1
elif row[3] == Candidates[1]:
Correy += 1
elif row[3] == Candidates[2]:
Li += 1
else row[3] == Candidates[3]:
OTooley += 1
for candidate in Candidates:
#Calculate percentage of votes each candidate won
if Khan >= (Candidates[1], Candidates[2], Candidates [3]):
#Calculate the total number of votes each candidate won
#print(f'{candidates["name"]}')
for i in range(len(candidatelist)):
#The winner of the election votes based on popular vote
#Print out the solution in a text file
outputtextfile = \
f'''Election Results
--------------------------------
Total Votes: {total_votes}
--------------------------------
Khan: {won_candidatevotes} ({total_votes})
Correy: {lost_candidatevotes} ({total_votes})
Li: {}
O'Tooley: {}
---------------------------------
Winner: {election_winner}''' |
import dm3
import time
import struct
import unittest
from Fixture.fixture import *
from Util.decorator import method_tracer
from Util.mixin import *
class TestNandRules(unittest.TestCase):
def setUp(self):
print
print '*********************************************************'
print 'Set up starts'
print '*********************************************************'
# FIXME add wait to wait for power on delay from last case
print 'sleep 5'
# time.sleep(5)
ssd = self.ssd = dut_setup_ssd(self)
test_nand_rules_mixin_module_name = 'test_%s_nand_rules_mixin' % ssd['abbreviation'].lower()
exec('import %s' % test_nand_rules_mixin_module_name)
exec('mixin(self, %s.Test%sNandRules)' % (test_nand_rules_mixin_module_name, ssd.product_familiy_id()))
self.set_up()
print
print '*********************************************************'
print 'Workload starts'
print '*********************************************************'
def tearDown(self):
print
print '*********************************************************'
print 'Tear down starts'
print '*********************************************************'
ssd = self.ssd
try:
self.tear_down()
finally:
dut_teardown_device(ssd)
@method_tracer()
def secure_erase(self):
ssd = self.ssd
buf = dm3.Buffer(1)
buf.FillZeros()
password = "UserPassword"
buf.SetString(2, password)
with dm3.DeviceContext(ssd, commandTimeOut = 60):
ssd.security().SetPassword(buf)
ssd.security().ErasePrepare()
ssd.security().EraseUnit(buf)
@method_tracer()
def write_one_sector_of_data(self, lba, data):
ssd = self.ssd
if isinstance(data, int):
buf = dm3.Buffer(1)
buf.Fill(data)
else:
ssd.assert_true(isinstance(data, dm3.Buffer))
buf = data
ssd.WriteDmaExt(lba, 1, buf)
return self
@method_tracer()
def read_verify_one_sector_of_data(self, lba, data):
ssd = self.ssd
if isinstance(data, int):
buf = dm3.Buffer(1)
buf.Fill(data)
else:
ssd.assert_true(isinstance(data, dm3.Buffer))
buf = data
rBuffer = dm3.Buffer(1)
ssd.ReadDmaExt(lba, 1, rBuffer)
rBuffer.CompareTo(buf, 0, 1)
return self
@method_tracer()
def write_full_pack_of_data(self):
return self.write_fi_aligned(0, self._align(self.ssd.max_lba(), self.ssd['max_sectors_per_read_write']))
@method_tracer(name_only = True)
def write_fi_aligned(self, start_lba, end_lba, data = 0xA5, sectors_per_write = None):
ssd = self.ssd
self.assertTrue(start_lba % ssd['sectors_per_fi_chunk'] == 0 and (end_lba - start_lba) % ssd['sectors_per_fi_chunk'] == 0)
# Find max divider
if sectors_per_write is None:
sectors_per_write = self._calc_max_sectors_per_read_write(start_lba, end_lba)
self.assertTrue(sectors_per_write % ssd['sectors_per_fi_chunk'] == 0)
self.assertTrue((end_lba - start_lba) % sectors_per_write == 0)
wBuffer = dm3.Buffer(sectors_per_write)
wBuffer.Fill(data)
for lba in xrange(start_lba, end_lba, sectors_per_write):
ssd.WriteDmaExt(lba, sectors_per_write, wBuffer)
return self
@method_tracer()
def read_full_pack_of_data(self):
return self.read_fi_aligned(0, self._align(self.ssd.max_lba(), self.ssd['max_sectors_per_read_write']))
@method_tracer(name_only = True)
def read_fi_aligned(self, start_lba, end_lba, data = 0xA5, sectors_per_read = None, verify = False):
ssd = self.ssd
# Find max divider
if sectors_per_read is None:
sectors_per_read = self._calc_max_sectors_per_read_write(start_lba, end_lba)
self.assertTrue((end_lba - start_lba) % sectors_per_read == 0)
uncorrectable_reads = []
wBuffer = dm3.Buffer(sectors_per_read)
wBuffer.Fill(data)
rBuffer = dm3.Buffer(sectors_per_read)
rBuffer.Fill(0x00)
for lba in xrange(start_lba, end_lba, sectors_per_read):
try:
ssd.ReadDmaExt(lba, sectors_per_read, rBuffer)
except:
uncorrectable_reads.append((lba, sectors_per_read))
else:
if verify:
self.assertTrue(rBuffer.CompareTo(wBuffer, 0, sectors_per_read).AreEqual)
return uncorrectable_reads
def _align(self, data, align):
return data - data % align
def _calc_max_sectors_per_read_write(self, start_lba, end_lba):
ssd = self.ssd
sectors_per_read_write = ssd['max_sectors_per_read_write']
while (end_lba - start_lba) % sectors_per_read_write != 0:
sectors_per_read_write -= ssd['sectors_per_fi_chunk']
print 'sectors_per_read_write', sectors_per_read_write
return sectors_per_read_write
|
import time
import libtorrent as lt
from werkzeug.urls import url_decode, url_unquote
from flask import Flask, Response, request, render_template
from flaskext.cache import Cache
DEBUG = True
CACHE_TYPE = 'simple'
CACHE_THRESHOLD = 1000
app = Flask(__name__)
app.config.from_object(__name__)
cache = Cache(app)
ses = lt.session()
ses.listen_on(6881, 6891)
def create_torrent(info):
entry = {'info': lt.bdecode(info.metadata())}
trackers = [tracker.url for tracker in info.trackers()]
if trackers:
entry['announce'] = trackers[0]
if len(trackers) > 1:
entry['announce-list'] = trackers
return (info.name() + ".torrent", lt.bencode(entry))
@app.route('/')
def index():
if request.args.has_key('magnet'):
magnet = url_unquote(request.args['magnet']).encode(request.charset)
magnet_xt = url_decode(magnet[magnet.index("?") + 1:])['xt']
torrent = cache.get(magnet_xt)
if not torrent:
try:
handle = lt.add_magnet_uri(ses, magnet,
{'save_path': "./invalid",
'paused': False,
'auto_managed': False,
'duplicate_is_error': False})
while not handle.has_metadata():
time.sleep(0.01)
handle.pause()
info = handle.get_torrent_info()
torrent = create_torrent(info)
cache.set(magnet_xt, torrent)
ses.remove_torrent(handle, lt.options_t.delete_files)
except:
torrent = cache.get(magnet_xt)
response = Response(response=torrent[1], mimetype='application/x-bittorrent')
response.headers.add('Content-Disposition', 'attachment',
filename=torrent[0])
return response
return render_template('index.html')
if __name__ == '__main__':
app.run()
|
#---------------------------------------------------#
#Estructuras de Control de flujo(Condicional If-else)
#---------------------------------------------------#
print("Verificacion de Acceso");
#Pido y almaceno edad usuario
edadUsuario = int(input("Intruduce tu edad: "));
# Valido usuario con if--elif(else if)--else
if edadUsuario < 18:
print("No puedes Pasar");
elif edadUsuario > 100:
print("Edad Incorrecta");
elif edadUsuario > 80:
print("Muy Viejo, No puede Pasar");
else:
print("Puedes Pasar :)");
print("El programa ha finalizado");
|
from util import readDatabase, AccuracyHistory, showPerformance, showConfusionMatrix
from keras.models import Sequential
from keras.layers.core import Dense, Flatten
from keras.layers.convolutional import MaxPooling2D, Conv2D
from keras.optimizers import Adam
from keras import backend as K
# Neural network structure for this sample:
#
# · · · · · · · · · · (input data, 1-deep) X [batch, 28, 28, 1]
# @ @ @ @ @ @ @ @ @ @ -- conv. layer 5x5x1=>4 stride 1 W1 [5, 5, 1, 4] B1 [4]
# ∶∶∶∶∶∶∶∶∶∶∶∶∶∶∶∶∶∶∶ Y1 [batch, 28, 28, 4]
# @ @ @ @ @ @ @ @ -- conv. layer 5x5x4=>8 stride 2 W2 [5, 5, 4, 8] B2 [8]
# ∶∶∶∶∶∶∶∶∶∶∶∶∶∶∶ Y2 [batch, 14, 14, 8]
# @ @ @ @ @ @ -- conv. layer 4x4x8=>12 stride 2 W3 [4, 4, 8, 12] B3 [12]
# ∶∶∶∶∶∶∶∶∶∶∶ Y3 [batch, 7, 7, 12] => reshaped to YY [batch, 7*7*12]
# \x/x\x\x/ -- fully connected layer (relu) W4 [7*7*12, 200] B4 [200]
# · · · · Y4 [batch, 200]
# \x/x\x/ -- fully connected layer (softmax) W5 [200, 10] B5 [10]
# · · · Y [batch, 10]
# Read the training / testing dataset and labels
import argparse
ap = argparse.ArgumentParser()
ap.add_argument("-v", "--verbose", required=False, help="show images (0 = False, 1 = True)")
args = vars(ap.parse_args())
verbose = args["verbose"]
if verbose is None:
verbose = False
else:
if verbose == '1':
verbose = True
else:
verbose = False
xTrain, yTrain, xTest, yTest, yLabels = readDatabase(reshape=True)
# Network parameters
firstConvLayerDepth = 6
firstKernelSize = (5, 5)
secondConvLayerDepth = 12
secondKernelSize = (5, 5)
thirdConvLayerDepth = 24
thirdKernelSize = (5, 5)
numberOfNeurons = 200
# Training hyperparameters
learningRate = 0.001
noOfEpochs = 3
batchSize = 32
numberOfClasses = yTrain.shape[1]
featureSize = xTrain.shape[1]
# Program parameters
history = AccuracyHistory()
showPlot = verbose
# Network architecture
model = Sequential()
model.add(Conv2D(firstConvLayerDepth, kernel_size=firstKernelSize,
activation='relu',
strides=(1, 1),
padding='same',
input_shape=(28, 28, 1)))
# output is 28x28
model.add(Conv2D(secondConvLayerDepth, kernel_size=secondKernelSize,
activation='relu',
strides=(2, 2),
padding='same'))
# output is 14x14
model.add(Conv2D(thirdConvLayerDepth, kernel_size=thirdKernelSize,
activation='relu',
strides=(2, 2),
padding='same'))
# output is 7x7
model.add(Flatten())
model.add(Dense(numberOfNeurons, activation='relu'))
model.add(Dense(numberOfClasses, activation='softmax'))
sgd = Adam(lr=learningRate)
model.compile(optimizer=sgd,
loss='categorical_crossentropy',
metrics=['accuracy'])
model.fit(x=xTrain,
y=yTrain,
epochs=noOfEpochs,
batch_size=batchSize,
verbose=1,
callbacks=[history])
(loss, accuracy) = model.evaluate(xTest, yTest)
showPerformance(accuracy, loss, noOfEpochs, history, plot=showPlot)
if showPlot:
predictedValues = model.predict(xTest, batch_size=1)
showConfusionMatrix(yLabels, predictedValues)
K.clear_session()
# Acuracy 0.9862 |
#!/usr/bin/env python3
from socket import *
import os.path
def checkFile(file):
return os.path.isfile(file)
if __name__ == '__main__':
serverPort = 6969
serverSocket = socket(AF_INET,SOCK_DGRAM)
serverSocket.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
serverSocket.bind(("127.0.0.1",serverPort))
while 1 :
try :
print("begin")
message, clientaddr = serverSocket.recvfrom(1024)
print(message)
except :
print("exception")
|
## ========================================================================= ##
## Copyright (c) 2019 Agustin Durand Diaz. ##
## This code is licensed under the MIT license. ##
## utils.py ##
## ========================================================================= ##
import pygame
import pickle
from pathlib import Path
from tkinter import Tk
from tkinter.filedialog import askopenfilename
from tkinter.filedialog import asksaveasfilename
def existsFile(path):
my_file = Path(path)
if my_file.is_file():
# file exists
return True
return False
def existsDir(path):
my_file = Path(path)
if my_file.is_dir():
# dir exists
return True
return False
def getPathWithoutExtension(path):
my_file = Path(path)
res = path
if my_file.is_file():
suffix = my_file.suffix
res = str(path).replace(suffix,'')
return res
def getImageSize(path):
image = pygame.image.load(path)
rect = image.get_rect()
return (rect[2], rect[3])
def loadPickle(defaultPath):
Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing
filename = askopenfilename(filetypes=[("Pkl files", "*.pkl")]) # show an "Open" dialog box and return the path to the selected file
if len(filename) == 0:
filename = defaultPath
if filename:
return filename, pickle.load(open(filename, 'rb'))
return filename, None
def savePickle(obj, defaultPath):
Tk().withdraw()
filename = asksaveasfilename(filetypes=[("Pkl files", "*.pkl")])
file_path = Path(filename)
file_path = file_path.with_suffix('.pkl')
filename = file_path.as_posix()
if len(filename) == 0:
filename = defaultPath
if filename:
pickle.dump(obj, open(filename, 'wb'))
return filename
import neat
import neat_utils.visualize
# Use this way:
# generatePickleGraph(defaultPath='../pkl_files/winner_neat_dip.pkl', configFile='../config_files/config_neat_dip')
def generatePickleGraph(defaultPath, configFile):
pickleBundle = loadPickle(defaultPath=defaultPaht)
config = neat.Config(neat.DefaultGenome,
neat.DefaultReproduction,
neat.DefaultSpeciesSet,
neat.DefaultStagnation,
'../config_files/config_neat_dip')
path = getPathWithoutExtension(pickleBundle[0])
node_names = {-1:'a1', -2: 'a1\'',-3:'a2', -4: 'a2\'',-5:'a0', -6: 'a0\'', 0:'u'}
neat_utils.visualize.draw_net(config, pickleBundle[1], False, filename=path, fmt="png", node_names=node_names) |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2021/10/5 16:04
# @Author : SuenDanny
# @Site :
# @File : nn_linear.py
# @Software: PyCharm
import torch
import torchvision
from torch import nn
from torch.nn import Conv2d, Linear
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter
dataset = torchvision.datasets.CIFAR10("../data", train= False, transform= torchvision.transforms.ToTensor(),
download= True)
dataloader = DataLoader(dataset, batch_size= 64, drop_last= True)
class Tudui(nn.Module):
def __init__(self):
super(Tudui, self).__init__()
self.linear1 = Linear(196608, 10)
def forward(self,input):
output = self.linear1(input)
return output
tudui = Tudui()
for data in dataloader:
imgs,targets =data
print(imgs.shape)
# output = torch.reshape(imgs, (1, 1, 1, -1))
output = torch.flatten(imgs)
print(output.shape)
output = tudui(output)
print(output.shape) |
import numpy as np
from sklearn.utils.extmath import randomized_svd
def ols_regression(X, Y):
"""
Performs standard linear regression
:param X: Feature matrix (n, p)
:param Y: Dependent data (n, m)
:return: Linear regression coefficients (p, m)
"""
return np.linalg.solve(np.matmul(X.T, X), np.matmul(X.T, Y))
def ridge_regression(X, Y, lamda):
"""
Performs ridge regression
:param X: Feature matrix (n, p)
:param Y: Dependent data (n, m)
:param lamda: Regularisation parameter
:return: Regression coefficients (p, m)
"""
return np.linalg.solve(np.matmul(X.T, X) + lamda * np.eye(X.shape[1]), np.matmul(X.T, Y))
def covariance_regression(X, Y, cov):
"""
Performs regression with pre-computed prior belief covariance matrix
:param X: Feature matrix (n, p)
:param Y: Dependent data (n, m)
:param cov: Prior belief covariance matrix
:return: Regression coefficients (p, m)
"""
return np.linalg.solve(np.matmul(X.T, X) + cov, np.matmul(X.T, Y))
class RRR:
"""
Class for implementing reduced rank regression
Includes methods for making predictions and generating the regression coefficients with
any desired rank after fitting once
"""
def __init__(self, rank=30):
"""
Initialises variables that will be used later and sets the rank
:param rank: Maximum rank for later analysis
"""
self.V = 0
self.bhat = 0
self.rank = rank
def fit(self, bhat, X=None, yhat=None):
"""
Calculates the fitted values yhat if not passed and then an SVD of yhat for use later
:param bhat: Fitted linear coefficients
:param X: Feature matrix (optional, not required if yhat passed directly)
:param yhat: Fitted values with full rank (optional, speeds up computation if passed)
:return:
"""
self.bhat = bhat
if yhat is None:
yhat = np.matmul(X, bhat)
_, _, self.V = randomized_svd(yhat, n_components=self.rank)
def predict(self, X, rank):
"""
For a given feature matrix predicts values using low rank coefficients
:param X: Feature matrix
:param rank: Rank of linear coefficients
:return: Predictions
"""
if rank > self.rank:
print(f'Requested prediction rank exceeds fitted rank of {self.rank}')
return None
else:
W = self.V[0:rank, :].T
A = np.matmul(self.bhat, W)
return np.matmul(X, np.matmul(A, W.T))
def gen_bhat(self, rank):
"""
Outputs fitted linear coefficients for the desired rank
:param rank: Rank of linear coefficients
:return: Coefficients
"""
if rank > self.rank:
print(f'Requested prediction rank exceeds fitted rank of {self.rank}')
return None
else:
W = self.V[0:rank, :].T
A = np.matmul(self.bhat, W)
return np.matmul(A, W.T)
def r2(y, yhat, var=None):
"""
Function for calculating the r2 statistic per target variable. When the target variable is
constant the returned value is 0
:param y: Array of target variables (must be numpy array)
:param yhat: Predicted values (must be numpy array)
:param var: Optional, pre-computed variance of array to speed up calculations
:return: Array of r2 values for each target variable
"""
if var is None:
var = y.var(axis=0)
mask = var == 0
return (var - np.mean((y - yhat) ** 2, axis=0)) / (var + mask)
|
import random
numero = random.randint(0,99)
respuesta = int (input ('introduce un numero entre 0 y 99'))
intentos= 0
while respuesta != numero:
if respuesta < numero:
print ("Muy Pequeño")
intentos = intentos + 1
respuesta = int (input ('introduce un numero entre o y 99'))
if respuesta > numero:
print ("Muy Grande")
intentos = intentos + 1
respuesta = int (input ('introduce un numero entre o y 99'))
if respuesta == numero:
print ("Felicidades")
intentos = intentos + 1
print ("Lo has conseguido en" ,intentos, "intentos") |
import sys
import os
f = open("C:/Users/user/Documents/python/other/import.txt","r")
sys.stdin = f
# -*- coding: utf-8 -*-
cand = [1,2,3,4]
def dfs (i):
if i <= 0:
return [["1"],["2"],["3"],["4"]]
temp = []
for j in range(4**i):
for k in range(4):
tmp = dfs(i-1)[j] + list(str(cand[k]))
temp.append(tmp)
return temp
v = dfs(3)
str = ""
for i in range(len(v)):
str += "".join(v[i]) + "\n"
print(str)
|
combustivel = 1.4
desconto = 0.1
litros = float(input("Litros abastecidos: "))
preco = 1.4 * litros
if litros > 40:
preco *= (1-desconto)
print("Custo: {} euros".format(round(preco, 2)))
|
import matplotlib.pyplot as plt
from pandas import date_range, Series, DataFrame, read_csv, qcut
from pandas.tools import plotting
from numpy.random import rand, randn
from pylab import *
import brewer2mpl
from matplotlib import rcParams
#colorbrewer2 Dark2 qualitative color table
dark2_colors = brewer2mpl.get_map('Dark2', 'Qualitative', 7).mpl_colors
rcParams['figure.figsize'] = (10, 6)
rcParams['figure.dpi'] = 150
rcParams['axes.color_cycle'] = dark2_colors
rcParams['lines.linewidth'] = 2
rcParams['axes.facecolor'] = 'white'
rcParams['font.size'] = 14
rcParams['patch.edgecolor'] = 'white'
rcParams['patch.facecolor'] = dark2_colors[0]
rcParams['font.family'] = 'StixGeneral'
def remove_border(axes = None, top = False, right = False, left = True, bottom = True):
ax = axes or plt.gca()
ax.spines['top'].set_visible(top)
ax.spines['right'].set_visible(right)
ax.spines['left'].set_visible(left)
ax.spines['bottom'].set_visible(bottom)
#turn off all ticks
ax.yaxis.set_ticks_position('none')
ax.xaxis.set_ticks_position('none')
#now re-enable visibles
if top:
ax.xaxis.tick_top()
if bottom:
ax.xaxis.tick_bottom()
if left:
ax.yaxis.tick_left()
if right:
ax.yaxis.tick_right()
if __name__ == '__main__':
x = linspace(0, 5, 10)
y = x ** 2
figure()
plot(x, y, 'r')
xlabel('x')
ylabel('y')
title('title')
##########################################################
fig, ax = plt.subplots()
ax.plot(x, x**2, label = r"$y = \alpha^2$")
ax.plot(x, x**3, label = r"$y = \alpha^3$")
ax.set_xlabel(r'$\alpha$', fontsize = 18)
ax.set_ylabel(r'$y$', fontsize = 18)
ax.set_title('title')
ax.legend(loc = 2) #upper left corner
##########################################################
fig, axes = plt.subplots(nrows = 1, ncols = 2)
for ax in axes:
ax.plot(x, y, 'r')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_title('title')
fig.tight_layout()
show()
##########################################################
#Метод .plot() Для Series и DataFrame обьектов , это всего лишь
#обёртка для plt.plot:
ts = Series(randn(1000), index = date_range('1/1/2000', periods = 1000))
ts = ts.cumsum()
ts.plot()
df = DataFrame(randn(1000, 4), index = ts.index, columns = list('ABCD'))
df = df.cumsum()
plt.figure()
df.plot()
plt.legend(loc = 'best')
show()
##########################################################
#Для того, чтобы перейти на логарифмическую шкалу надо задать параметр Logy.
#df.plot(logy = True)
plt.figure()
df.ix[5].plot(kind = 'bar')
plt.axhline(0, color = 'k')
show()
##########################################################
#Gistogram
plt.figure()
df['A'].diff().hist()
plt.figure()
df['A'].diff().hist(bins = 50)
plt.figure()
df.diff().hist(color = 'k', alpha = 0.5, bins = 50)
show()
##########################################################
#Box-plot
df = DataFrame(rand(10, 2), columns = ['Col1', 'Col2'])
df['X'] = Series(['A', 'A', 'A', 'A', 'A', 'B', 'B', 'B', 'B', 'B'])
plt.figure()
bp = df.boxplot(by = 'X')
show()
|
def dynamicArray(n, queries):
# Write your code here
last_answer = 0
result = []
seq_list=[]
for a in range(n):
seq_list.append([])
for i in range(len(queries)):
query = queries[i]
if query[0] == 1:
seq = ((query[1] ^ last_answer)%n)
seq_list[seq].append(query[2])
elif query[0] == 2:
seq = ((query[1] ^ last_answer)%n)
size = len(seq_list[seq])
num = query[2]%size
last_answer = seq_list[seq][num]
result.append(last_answer)
return result
|
'''
Delete contents of s3 bucket (so that delete-stack call will work)
'''
import boto3, sys
if len(sys.argv) == 1:
print ("must pass the bucketname you want to delete contents from")
sys.exit()
else:
bucketname = sys.argv[1]
client = boto3.client('s3')
s3 = boto3.resource('s3')
paginator = client.get_paginator('list_objects_v2')
page_iterator = paginator.paginate(Bucket=bucketname)
for page in page_iterator:
for item in page['Contents']:
print ('deleting: ' + item['Key'] + ' from bucket: ' + bucketname)
s3.Object(bucketname, item['Key']).delete()
|
n=int(input())
for i in range(n):
a,b=input().split()
a,b=[int(a),int(b)]
if a==b:
if a%2==0:
print(a*2)
else:
print(a*2-1)
elif a-b==2:
if a%2==0:
print(a+b)
else:
print(a+(b-1))
else:
print("No Number")
|
ipt = []
a, b, c = map(int, input().split(' '))
ipt.append(a)
ipt.append(b)
ipt.append(c)
ipt.sort()
print(ipt[1])
|
#!/usr/bin/env python
from pylab import *
import PylabUtils as plu
if __name__ == '__main__':
xPrime = array ([[-1, 1, 1],
[1, 1, 1],
[1, -1, 1],
[-1, -1, 1]]).T
x = array ([[0, 0, 1],
[640, 0, 1],
[640, 480, 1],
[0, 480, 1]]).T
H = plu.cv.dlt.homog2D (xPrime, x)
print "Homography from DLT: "
print H
print 'x_i:'
print plu.cv.dehomogenize (x)
transformed = H.dot (x)
print 'H * x_i:'
print plu.cv.dehomogenize (transformed)
fx, fy = (1772.074719488086, 1775.368219152238)
cx, cy = (639.0952186799014, 609.3077173645565)
x_wc = array ([-.2, .3, 4, -.1, .3, -.05])
K = array ([[fx, 0, cx],
[0, fy, cy],
[0, 0, 1.]])
cam = plu.cv.Camera (x_wc, K)
# uv = array ([453., 123.])
uv1 = array ([453., 123.])
uv2 = array ([872., 631.])
uv = column_stack ((uv1, uv2))
lam = 1.
Xtrace = cam.raytrace (uv, lam)
print
print 'Camera pose in world frame:'
print x_wc
print 'Observed point in image plane:'
print uv
print 'Back-projected 3D point in camera frame (lambda=%f):' % lam
print Xtrace
print 'Corresponding forward-projected image point'
print cam.project (Xtrace)
|
import json
dic=json.load(open("cal.json"))
dic_se=json.load(open("se.json"))
def Count_cal(text_food, text_exe):
val = 1785
count = 0
cal_val = 0
burn_val = 0
list_food = text_food.split(",")
list_exe = str(text_exe)
print(list_food)
for data in dic:
for item in list_food:
if(str(item)==str(data['name'])):
count = count+1
cal_val=cal_val+int(data['value'])
for data in dic_se:
if(list_exe==str(data['name'])):
burn_val = data['value']
print(cal_val)
r_value = ((val+(cal_val*2))/2)-burn_val
return [cal_val, burn_val,r_value]
def Count_per(temp):
print(temp)
new_val = int(temp)/10
if new_val<0:
new_val = 0
elif new_val>100:
new_val=100
print("new:"+str(new_val))
return new_val
def Count_money(per):
print(per)
sav = 0.0
if float(per)>=50:
sav=((per*7900)/100)
elif float(per)<50 and float(per)>30:
sav=((per*3995)/100)
elif float(per)<30:
sav = 0.0
return sav
if __name__=="__main__":
#print(dic[0]['value'])
t = "milk,bread,coffee,pizza,burgar,cheese,butter,chicken,beef"
ans = Count_cal(text_food=t,text_exe="walking")
print(ans)
|
import cv2
import numpy as np
import flycapture2 as fc2
import time
import datetime
import sys
def getXY(img, colorMin, colorMax, threshVal, size_rank_of_target):
# imgt = cv2.inRange(img, colorMin, colorMax) # restricts to a color range
rv, imgt = cv2.threshold(imgr, threshVal, 255, cv2.THRESH_BINARY_INV) #threshold(src, threshCuttOffVal, hiValPostThresh, threshType)
imgtcopy = imgt.copy()
ctours, h = cv2.findContours(imgt, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
if ctours: # check to see we have contours
ctour_areas = [cv2.contourArea(c) for c in ctours]
ctour_ind = np.argsort(ctour_areas)
target_ctour = np.where(ctour_ind == size_rank_of_target)
print ctours[ctour_ind[target_ctour[0][0]]]
cnt = ctours[ctour_ind[target_ctour[0][0]]]
bbxy, bbwh, bbtheta = cv2.minAreaRect(cnt)
M = cv2.moments(cnt)
if int(M['m00']) > 0: # in case we lose all blobs
cx = int(M['m10']/M['m00'])
cy = int(M['m01']/M['m00'])
else:
cx = 0
cy = 0
else:
cx = 0
cy = 0
bbxy = (0,0)
bbwh = (0,0)
bbtheta = 0
return cx, cy, bbxy[0], bbxy[1], bbwh[0], bbwh[1], bbtheta, imgtcopy
print "hellodfd"
print "dfdfdfdfddfdfdf"
# set up various things
colorMax = np.array([10, 10, 10],np.uint8) # 3 color channels
colorMin = np.array([250, 250, 250],np.uint8) # must be >= all values in colorThreshMin
dilate_kernel = np.ones((5,5), np.uint8)
threshVal = 20 #CHANGE THIS TO ADJUST LIGHTING CUT-OFFS
# to run program: press "up" arrow so it says "python ....."
frInd = 0
fps = 0
viz = True
size_rank_of_target = 0
# set up logfile
cur_datetime = datetime.datetime.now()
cur_dtime_str = cur_datetime.strftime('%Y-%m-%d_%Hh%Mm%Ss')
logfname = '//arenatracker_%s.csv' % cur_dtime_str
logf = open(logfname, 'w', 0) # 0 forces flush after each write call (buffer = 0)
logf.write('framenum,UTCtime,fps,animx,animy,bbx,bby,bbw,bbh,bbtheta\n')
# set up camera
c = fc2.Context()
c.connect(*c.get_camera_from_index(0))
p = c.get_property(fc2.FRAME_RATE)
c.set_property(**p)
if viz:
cv2.namedWindow('image', cv2.WINDOW_NORMAL)
print c.get_camera_info()
print 'Camera setup complete'
# start capture process
c.start_capture()
im = fc2.Image()
start = time.time()
progress_t = start
first_start = start
progress_interval = 10 # time between prints in seconds
print "Starting tracking"
print "...press control-c to stop."
try:
while True:
img = np.array(c.retrieve_buffer(im)) # these are coming in as crazy bayered things--and in grey
timestamp = time.time()
img = cv2.cvtColor(img, cv2.COLOR_BAYER_BG2RGB)# REal is COLOR_BAYER_BG2BGR) # debayer (pattern verified--cv2 likes BGR order, if vizing with cv2, use this)
imgr = img[:,:,2].copy()
# img = cv2.dilate(img, dilate_kernel, iterations = 2) # close image instead of dilating (or opening)
cx, cy, bbx, bby, bbw, bbh, bbtheta, imgtcopy = getXY(imgr, colorMin, colorMax, threshVal, size_rank_of_target)
# log data
fps = 1./(timestamp-start)
logf.write('%d,%.4f,%.4f,%d,%d,%.4f,%.4f,%.4f,%.4f,%.4f\n' % (frInd, timestamp, fps, cx, cy, bbx, bby, bbw, bbh, bbtheta))
# print "FPS: %.3f, x: %.2f, y: %.2f\r" % (fps,cx,cy)
# if timestamp >= progress_t + progress_interval:
# m, s = divmod(timestamp - first_start, 60)
# h, m = divmod(m, 60)
# print "Since start: %d h, %d m, %d s, FPS: %.3f, x: %.2f,y: %.2f\r" % (h, m, s, fps,cx,cy)
# progress_t = timestamp
# # threshVal = np.percentile(imgr, 0.5) # works with r led panel and bright window light
if viz:
cv2.circle(imgr, (cx,cy), 10, (0,0,255), -1)
cv2.imshow('image',imgtcopy) #imgtcopy #img
cv2.waitKey(1)
frInd = frInd + 1
start = timestamp
if divmod(frInd,1000000)[1] == 0: # if we go beyond 1m samples, close file and start a new one (prevents nastiness of huge files)
logf.close()
cur_datetime = datetime.datetime.now()
cur_dtime_str = cur_datetime.strftime('%Y-%m-%d_%Hh%Mm%Ss')
logfname = '//arenatracker_%s.csv' % cur_dtime_str
logf = open(logfname, 'w', 0) # 0 forces flush after each write call (buffer = 0)
logf.write('framenum,UTCtime,fps,animx,animy,bbx,bby,bbw,bbh,bbtheta\n')
except KeyboardInterrupt:
# close file
logf.close()
c.stop_capture()
c.disconnect()
if viz:
cv2.destroyAllWindows()
print '\nQuiting...'
# Code to track a bunch of flies contained in non-overlapping arenas.
# Spence lab 25 May 2016
#
# Define the locations of the wells.
# - Snap an image, then have user click and drag over each well.
# - shade previously drawn boxes
# - give a box ID to each box (draw this on to screen), this will be animalID
# - have option to abort process (if need to redo it)
# Track 2nd largest object in each roi (largest will be boarder)
#
# Output will be UTC of frame grab, bbx, bby, bbh, bbw, bbTheta, animalID
#
# To run on spencelab pc:
# 1. load virtual env: source ~/virtualenvs/flycapture2/bin/activate
# 2. cd ./spencelab/code/python/arenatracker/
# 3. python arenatracker_fly.py
#
# sources:
# The wrapper for the point grey flycapture library:
# https://github.com/jordens/pyflycapture2
# Tracking:
# http://stackoverflow.com/questions/16538774/dealing-with-contours-and-bounding-rectangle-in-opencv-2-4-python-2-7
# http://opencvpython.blogspot.it/2012/06/contours-2-brotherhood.html
# http://docs.opencv.org/3.1.0/d7/d4d/tutorial_py_thresholding.html#gsc.tab=0
# import cv2
# import numpy as np
# import flycapture2 as fc2
# import time
# import datetime
#
# def getXY(img, colorMin, colorMax, threshVal, size_rank_of_target):
# # imgt = cv2.inRange(img, colorMin, colorMax) # restricts to a color range
# rv, imgt = cv2.threshold(imgr, threshVal, 255, cv2.THRESH_BINARY_INV) #threshold(src, threshCuttOffVal, hiValPostThresh, threshType)
# imgtcopy = imgt.copy()
# ctours, h = cv2.findContours(imgt, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# if ctours: # check to see we have contours
# ctour_areas = [cv2.contourArea(c) for c in ctours]
# ctour_ind = np.argsort(ctour_areas)
# target_ctour = np.where(ctour_ind == size_rank_of_target)
# print ctours[ctour_ind[target_ctour[0][0]]]
# cnt = ctours[ctour_ind[target_ctour[0][0]]]
# bbxy, bbwh, bbtheta = cv2.minAreaRect(cnt)
# M = cv2.moments(cnt)
# if int(M['m00']) > 0: # in case we lose all blobs
# cx = int(M['m10']/M['m00'])
# cy = int(M['m01']/M['m00'])
# else:
# cx = 0
# cy = 0
# else:
# cx = 0
# cy = 0
# bbxy = (0,0)
# bbwh = (0,0)
# bbtheta = 0
# return cx, cy, bbxy[0], bbxy[1], bbwh[0], bbwh[1], bbtheta, imgtcopy
#
# # set up various things
# colorMax = np.array([10, 10, 10],np.uint8) # 3 color channels
# colorMin = np.array([250, 250, 250],np.uint8) # must be >= all values in colorThreshMin
# dilate_kernel = np.ones((5,5), np.uint8)
# threshVal = 20 #CHANGE THIS TO ADJUST LIGHTING CUT-OFFS
# # to run program: press "up" arrow so it says "python ....."
# frInd = 0
# fps = 0
# viz = True
# size_rank_of_target = 0
#
# # set up logfile
# cur_datetime = datetime.datetime.now()
# cur_dtime_str = cur_datetime.strftime('%Y-%m-%d_%Hh%Mm%Ss')
# logfname = '//arenatracker_%s.csv' % cur_dtime_str
# logf = open(logfname, 'w', 0) # 0 forces flush after each write call (buffer = 0)
# logf.write('framenum,UTCtime,fps,animx,animy,bbx,bby,bbw,bbh,bbtheta\n')
#
# # set up camera
# c = fc2.Context()
# c.connect(*c.get_camera_from_index(0))
# p = c.get_property(fc2.FRAME_RATE)
# c.set_property(**p)
# if viz:
# cv2.namedWindow('image', cv2.WINDOW_NORMAL)
# print c.get_camera_info()
# print 'Camera setup complete'
#
# # start capture process
# c.start_capture()
# im = fc2.Image()
# start = time.time()
# progress_t = start
# first_start = start
# progress_interval = 10 # time between prints in seconds
#
# print "Starting tracking"
# print "...press control-c to stop."
# try:
# while False:
# img = np.array(c.retrieve_buffer(im)) # these are coming in as crazy bayered things--and in grey
# timestamp = time.time()
# img = cv2.cvtColor(img, cv2.COLOR_BAYER_BG2BGR) # debayer (pattern verified--cv2 likes BGR order, if vizing with cv2, use this)
# imgr = img[:,:,2].copy()
# # img = cv2.dilate(img, dilate_kernel, iterations = 2) # close image instead of dilating (or opening)
# cx, cy, bbx, bby, bbw, bbh, bbtheta, imgtcopy = getXY(imgr, colorMin, colorMax, threshVal, size_rank_of_target)
# # log data
# fps = 1./(timestamp-start)
# logf.write('%d,%.4f,%.4f,%d,%d,%.4f,%.4f,%.4f,%.4f,%.4f\n' % (frInd, timestamp, fps, cx, cy, bbx, bby, bbw, bbh, bbtheta))
# # print "FPS: %.3f, x: %.2f, y: %.2f\r" % (fps,cx,cy)
# if timestamp >= progress_t + progress_interval:
# m, s = divmod(timestamp - first_start, 60)
# h, m = divmod(m, 60)
# print "Since start: %d h, %d m, %d s, FPS: %.3f, x: %.2f,y: %.2f\r" % (h, m, s, fps,cx,cy)
# progress_t = timestamp
# # threshVal = np.percentile(imgr, 0.5) # works with r led panel and bright window light
#
# if viz:
# cv2.circle(imgtcopy, (cx,cy), 10, (0,0,255), -1)
# cv2.imshow('image',imgtcopy) #imgtcopy #img
# cv2.waitKey(1)
#
# frInd = frInd + 1
# start = timestamp
#
# if divmod(frInd,1000000)[1] == 0: # if we go beyond 1m samples, close file and start a new one (prevents nastiness of huge files)
# logf.close()
#
# cur_datetime = datetime.datetime.now()
# cur_dtime_str = cur_datetime.strftime('%Y-%m-%d_%Hh%Mm%Ss')
# logfname = '//arenatracker_%s.csv' % cur_dtime_str
# logf = open(logfname, 'w', 0) # 0 forces flush after each write call (buffer = 0)
# logf.write('framenum,UTCtime,fps,animx,animy,bbx,bby,bbw,bbh,bbtheta\n')
#
# except KeyboardInterrupt:
# # close file
# logf.close()
# c.stop_capture()
# c.disconnect()
# if viz:
# cv2.destroyAllWindows()
# print '\nQuiting...'
# print fc2.get_library_version()
# print '1'
# c = fc2.Context()
# print c.get_num_of_cameras()
# print '2'
# c.connect(*c.get_camera_from_index(0))
# print c.get_camera_info()
# print '2.5'
# m, f = c.get_video_mode_and_frame_rate()
#
# print '3'
# c.set_video_mode_and_frame_rate(fc2.VIDEOMODE_1280x960Y8, fc2.FRAMERATE_7_5)
# #c.set_video_mode_and_frame_rate()
#
# #p = c.get_property(fc2.FRAME_RATE)
# #c.set_property(**p)
#
# m, f = c.get_video_mode_and_frame_rate()
# print m, f
# print '4'
# print c.get_video_mode_and_frame_rate_info(m, f)
# print '5'
# print c.get_property_info(fc2.FRAME_RATE)
# print '6'
# p = c.get_property(fc2.FRAME_RATE)
# print p
# print'7'
# c.set_property(**p)
# c.start_capture()
# im = fc2.Image()
# print [np.array(c.retrieve_buffer(im)).sum() for i in range(80)]
# print '8'
# a = np.array(im)
# print a.shape, a.base
# print '9'
# c.stop_capture()
# c.disconnect()
#
#
#
# while(False):
#
# # Capture frame-by-frame
# ret, frame = cap.read()
#
# # Our operations on the frame come here
#
# gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# # Display the resulting frame
# cv2.imshow('frame',gray)
# if cv2.waitKey(1) & 0xFF == ord('q'):
# break
# # When everything done, release the capture
# #cap.release()
# #cv2.destroyAllWindows()
|
import hyperneat
import numpy
import os
import sys
from art_basics import *
from render_help import *
nm=novelty_mapper()
import glob
from PIL import Image
def render_nov(direc,gen,out):
arcsize=len(glob.glob("%s/generation%d/archive*" %(direc,gen)))
for k in range(arcsize):
to_render = "%s/generation%d/archive%d"%(direc,gen,k)
out_image = "%s/generation%d/rend_archive%d.png"%(direc,gen,k)
print to_render,out_image
render(to_render,out_image)
if(k%50==0):
print k
def render(in_fname,out_fname):
newartist=hyperneat.artist.load(in_fname)
newartist.render_big()
obj=numpy.array(newartist.get_big(),'|i1')
out=Image.fromarray(obj)
out=out.convert("RGB")
out.save(out_fname)
"""
for k in range(10,15):
print "rendering ", k
outdir = "render/nov%d/" % k
#os.system("mkdir %s" % outdir)
render_nov("res/artnov/run%d"%k,500,outdir)
"""
|
"""Testing for days between challenge."""
from days_between import days_diff
def test_same_day():
"""The difference between the same day is 0."""
assert days_diff((1982, 4, 19), (1982, 4, 19)) == 0
def test_it_can_return_a_few_days_apart():
"""Return the difference in two close together days."""
assert days_diff((1982, 4, 19), (1982, 4, 22)) == 3
def test_it_can_return_a_larger_date_range():
"""Return the difference in two far apart days."""
assert days_diff((2014, 1, 1), (2014, 8, 27)) == 238
def test_it_can_return_days_over_many_years():
"""Return the difference in two days years apart."""
assert days_diff((2018, 1, 1), (2020, 1, 1)) == 730
def test_it_can_take_leap_years_into_account():
"""Every 4 years is a leap year."""
assert days_diff((1, 1, 1), (2, 1, 1,)) == 365
assert days_diff((2020, 1, 1), (2021, 1, 1,)) == 366
def test_it_can_return_days_from_the_start_to_the_end_of_time():
"""Start from 1 and end at 9999."""
assert days_diff((1, 1, 1), (9999, 12, 31)) == 3652058
|
import json
from flask_login import login_user, login_required, logout_user
from itsdangerous import TimestampSigner, BadSignature, URLSafeSerializer, SignatureExpired
from SampleApp.DataManagement.db import User
from SampleApp import db, login_manager
from flask import (
Blueprint, request, Response, session
)
from SampleApp.DataManagement.serialization import UserSchema
bp = Blueprint('login', __name__, url_prefix='/login')
@bp.route('/login', methods=['POST'])
def login():
username = request.json['username']
password = request.json['password']
user = User.query.filter_by(username=username).first()
if user is None:
return Response(
response=json.dumps({'message': 'user does not exists'}),
status=401,
mimetype='application/json'
)
elif user.check_password(password):
login_user(user)
return Response(
response=json.dumps({'message': 'authentication successful'}),
status=201,
mimetype='application/json'
)
else:
return Response(
response=json.dumps({'message': 'authentication failed'}),
status=401,
mimetype='application/json'
)
@bp.route('/register', methods=['POST'])
def register():
username = request.json['username']
user = User.query.filter_by(username=username).first()
if user is not None:
return Response(
response=json.dumps({'message': f'username: {username} already exists'}),
status=409,
mimetype='application/json'
)
request.json['password'] = User.generate_password(request.json['password'])
new_user = UserSchema().load(request.json)
db.session.add(new_user)
db.session.commit()
return {'username': new_user.username}
@bp.route('/logout', methods=['POST'])
@login_required
def logout():
try:
logout_user()
return Response(
response=json.dumps({'message': 'logout successful'}),
status=200,
mimetype='application/json'
)
except Exception:
return Response(
response=json.dumps({'message': 'logout went wrong'}),
status=404,
mimetype='application/json'
)
@login_manager.user_loader
def load_user(token):
serializer = URLSafeSerializer('secret-key')
serialized_token = serializer.loads(token)
signer = TimestampSigner('secret-key')
try:
username = signer.unsign(serialized_token, max_age=1000)
except SignatureExpired:
session['failed_authentication_cause'] = 'token expired'
return None
except BadSignature:
session['failed_authentication_cause'] = 'unauthorized token'
return None
user = User.query.filter_by(username=username.decode('utf-8')).first()
return user
@login_manager.unauthorized_handler
def unauthorized():
message = f'failed to authorize: {session["failed_authentication_cause"]}'
return Response(
response=json.dumps({'message': message}),
status=401,
mimetype='application/json'
)
|
try:
a = 5/0
except Exception as e:
print(e)
try:
a = 5/0
except Exception as e:
print(e)
finally:
print("Final block")
#If use return in try block ,except block wont execute but final block get execute
def exam():
try:
a = 5/0
return 0
except Exception as e:
print(e)
finally:
print("Final block of Exam")
exam()
#if we use exit() function in try block, then finally block wont execute as well as except block also.
def exam02():
try:
a= 5/0
exit()
except Exception as e:
print(e)
finally:
print("Final block of Exam02")
#In python u can give except class in any manner but in java first Arithmetic excpetion follows Exception,
# we cannot give Excption class follows Arithemetic Exception
try:
a = 5/0
except Exception as e:
print(e)
except ArithmeticError as e:
print(e)
#Nested Exception
try:
try:
pass
except:
pass
a = 5/0
except Exception as e:
print(e)
#raise exception You can explicitly throw an exception in Python using ?raise? statement.
# raise will cause an exception to occur and thus execution control will stop in case it is not handled.
try:
raise Exception("Hello")
except Exception as e:
print(e)
try:
raise Exception("Hello")
except Exception as e:
print(e)
else:
print("else")
#Exception propagation
#An exception is first thrown from the top of the stack and if it is not caught, it drops down the call stack to the previous method,If not caught there,
# the exception again drops down to the previous method, and so on until they are caught or until they reach the very bottom of the call stack.
# This is called exception propagation.
#Example 01
def Exam():
raise Exception("Hello")
try:
Exam()
except Exception as e:
print(e)
#Example 02
def Exam01():
raise Exception("Hello")
def Exam02():
Exam01()
print("bye") # This line wont print because execption is caught so control jump to except block of the Exam03
def Exam03():
try:
Exam02()
except Exception as e:
print(e)
# else block after try ,except block but before finall block , if execption doesnt occur then else block get executed
try:
a = 5/1
except Exception as e:
print(e)
else:
print("Ok")
finally:
print("Final block")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.