index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
15,300 | 46748cfd8c946045aef12e881bc9c187cf7925e9 | '''
TestFilters.py
@author: River Allen
@date: July 8, 2010
@requires: pygtk, gobject, cairo
Some hacky code to simulate filters with. It is a bit dirty and undocumented, so be forewarned.
'''
from Data import models
import os
from numpy.random import randn
from Movement import MoveExplorer, Movement
from FilterManager import FilterManager
import KalmanFilter, ParticleFilter
import threading
import Sensor
import numpy as np
import gobject
import gtk
import pygtk
import cairo
import util
import time
class TestGUI:
def delete_event(self, widget, data=None):
print 'Exit GUI!'
return False
def destroy(self, widget, data=None):
gtk.main_quit()
def _update_clock(self):
self.window.queue_draw()
return True
def __init__(self, filter_draw_methods):
self.init_pos = [50, 475]
self.transform_mat = cairo.Matrix(1.5, 0, 0, -1.5, self.init_pos[0], self.init_pos[1])
self._filter_draw_methods = filter_draw_methods
self._draw_methods = []
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.window.connect('delete_event', self.delete_event)
self.window.connect('destroy', self.destroy)
self.window.set_title('Roomba Localization')
self.window.set_size_request(500, 600)
self.area = gtk.DrawingArea()
self.area.add_events(gtk.gdk.BUTTON_PRESS_MASK)
self.area.connect("expose-event", self._expose_cb)
self.area.connect('button_press_event', self._click_cb)
self.map_frame = gtk.Frame('Map')
self.map_frame.add(self.area)
#self.window.add(self.area)
self.window.add(self.map_frame)
self.window.show_all()
self._click_positions = []
gobject.timeout_add(500, self._update_clock)
def add_draw_method(self, meth):
self._draw_methods.append(meth)
def _click_cb(self, widget, event):
cr = widget.window.cairo_create()
cr.set_operator(cairo.OPERATOR_SOURCE)
cr.transform(self.transform_mat)
#print 'Device Click Coords', event.get_coords()
self._click_positions.append(cr.device_to_user(*event.get_coords()))
#print 'Local Click Coords', self._click_positions
#print self.click_positions
def _expose_cb(self, widget, event):
cr = widget.window.cairo_create()
cr.set_operator(cairo.OPERATOR_SOURCE)
#cr.translate(*self.init_pos)
#cr.scale(1.5, 1.5)
cr.transform(self.transform_mat)
#cr.paint()
self._draw_map(cr)
for draw_meth in self._draw_methods:
draw_meth(cr)
for draw_meth in self._filter_draw_methods.values():
draw_meth(cr)
def _draw_map(self, cr):
# Draw grid
grid_lims = [200, 300]
box_size = 25
cr.set_source_rgba(0.7, 1.0, 0.3, 0.5)
#cr.arc(50, 50, 10, 0, 2*3.1415)
for x in range(grid_lims[0]/box_size):
for y in range(grid_lims[1]/box_size):
cr.rectangle(x * box_size, y * box_size, box_size, box_size)
#cr.fill()
cr.stroke()
def get_click_positions(self):
ret_pos = self._click_positions
self._click_positions = []
return ret_pos
def mainloop(self):
gtk.main()
class TestThread(threading.Thread):
def __init__(self, fm, tg, auto=False):
super(TestThread, self).__init__()
self.fm = fm
# Need the Gui in order to get the click positions...
self.tg = tg
self.quit = False
self.auto = auto
def _auto_run(self):
'''
Does not work properly. Use _manual_run.
'''
pthjoin = os.path.join
(translation_model, translation_data, rotation_model,
rotation_data, measurement_model, measurement_data, beacons) = models.load_data(pthjoin('Data','001'))
# Kluge fix -- need to fix the .mat file
measurement_model = measurement_model[0]
total_beacons = beacons.shape[0]
# Move Straight: Vector based on Motion Model measurements
#translation_vec = [dist_hypot, dist_hypot, translation_model[2,0]]
translation_vec = translation_model[:,0]
translation_cov = np.cov(translation_data.T)
# Turn: Vector based on Motion Model measurements
left_rotation_vec = rotation_model[:,0]
right_rotation_vec = rotation_model[:,0] * -1
# May want to change this. This is a guess to get it working.
# Playing around with cov(rotation_data) may lead to better results.
rotation_cov = np.zeros([3,3])
rotation_cov[2,2] = rotation_model[2,1]
still_vec = np.array([0, 0, 0])
still_cov = np.zeros([3,3])
turn_leniency = np.deg2rad(10) # Used as an error judgement for deciding when to turn
moves = []
print 'D.TF.166'
explorer_pos = self.fm.get_explorer_pos_mean()
while not self.quit:
transition_vec = still_vec
transition_cov = still_cov
movement_type = 0 #0 - still, 1 - straight, 2 - left, 3 - right
#print 'explorer_pos', explorer_pos
if (explorer_pos[0] >= 100 and explorer_pos[1] <= 100 and abs((explorer_pos[2] % (2*np.pi)) - np.pi/2) > turn_leniency):
# Bottom Right Corner
#print 'BR'
movement_type = 2
elif (explorer_pos[0] >= 100 and explorer_pos[1] >= 200 and abs((explorer_pos[2] % (2*np.pi)) - np.pi) > turn_leniency):
# Top Right Corner
#print 'TR'
movement_type = 2
elif (explorer_pos[0] <= 100 and explorer_pos[1] >= 200 and abs((explorer_pos[2] % (2*np.pi)) - (3*np.pi)/2) > turn_leniency):
# Top Left Corner
#print 'TL'
movement_type = 2
# Slight error here: can only turn if angle is greater than or equal to zero
elif (explorer_pos[0] <= 100 and explorer_pos[1] <= 100 and abs((explorer_pos[2] % (2*np.pi)) - 0) > turn_leniency):
# Bottom Left Corner
#print 'BL'
movement_type = 2
else:
# Straight
movement_type = 1
moves.append(movement_type) # Keep track of moves taken
if movement_type == 1:
# Drive Straight
transition_vec = translation_vec
transition_cov = translation_cov
elif movement_type == 2:
# Rotate Left
transition_vec = left_rotation_vec
transition_cov = rotation_cov
elif movement_type == 3:
# Rotation Right
transition_vec = right_rotation_vec
transition_cov = rotation_cov
else:
# Motionless
pass
theta = explorer_pos[2]
transform = np.array([[np.cos(theta), -np.sin(theta), 0], [np.sin(theta), np.cos(theta), 0],
[0, 0, 1]])
transition_vec = np.dot(transform, transition_vec)
transition_cov = np.dot(np.dot(transform, transition_cov), transform.T)
# For plotting later
#old_particles = particles.copy()
# Perform motion model on samples
self.fm.move(transition_vec, transition_cov)
# For plotting later
#particles_before_resample = particles.copy()
beacon_ranges = []
for j in range(total_beacons):
prob_pos = self.fm.get_explorer_pos_mean()
obs_dis = np.sqrt((beacons[j][0] - prob_pos[0])**2 + (beacons[j,1] - prob_pos[1])**2) + (randn() * measurement_model[1]) + measurement_model[0]
beacon_ranges.append(obs_dis)
# Only Perform particle filter when translating straight.
# Doing it while it is stationary / rotating will make it too
# confident.
if movement_type == 1:
self.fm.observation(obs_dis, measurement_model[0], measurement_model[1],
beacons[j][0], beacons[j][1])
explorer_pos = self.fm.get_explorer_pos_mean()
def _manual_run(self):
pthjoin = os.path.join
(translation_model, translation_data, rotation_model,
rotation_data, measurement_model, measurement_data, beacons) = models.load_data(pthjoin('..','Data','001'))
# Kluge fix -- need to fix the .mat file
measurement_model = measurement_model[0]
sm = Sensor.SensorManager()
# Create Beacons
# Need to add draw beacon functions
total_beacons = beacons.shape[0]
for i in range(total_beacons):
sm.add_sensor(Sensor.BeaconSensor(measurement_model[0], measurement_model[1],
[0,10000], beacons[i][0], beacons[i][1]))
sm.add_sensor(Sensor.CompassSensor(0, np.deg2rad(5), [0, np.deg2rad(359.9)]))
sm.add_sensor(Sensor.Trilateration2DSensor(None, np.eye(2)*measurement_model[1], None))
# Move Straight: Vector based on Motion Model measurements
#translation_vec = [dist_hypot, dist_hypot, translation_model[2,0]]
translation_vec = translation_model[:,0]
translation_cov = np.cov(translation_data.T)
#translation_mov = Movement.Movement(translation_vec, translation_cov)
# Turn: Vector based on Motion Model measurements
left_rotation_vec = rotation_model[:,0]
right_rotation_vec = rotation_model[:,0] * -1
# May want to change this. This is a guess to get it working.
# Playing around with cov(rotation_data) may lead to better results.
rotation_cov = np.zeros([3,3])
rotation_cov[2,2] = rotation_model[2,1]
#left_rot_mov = Movement.Movement(left_rotation_vec, rotation_cov)
#right_rot_mov = Movement.Movement(right_rotation_vec, rotation_cov)
#turn_leniency = np.deg2rad(10) # Used as an error judgement for deciding when to turn
print 'D.TF.284'
explorer_pos = self.fm.get_explorer_pos_mean()
me = MoveExplorer(explorer_pos, [12, 12, np.deg2rad(6)], debug=True,
translation_moves=[Movement(translation_vec, translation_cov),
Movement(np.array([10, 0.5, np.deg2rad(0.5)]), np.eye(3, dtype=np.float32) * 0.0001),
Movement(np.array([50, 1, np.deg2rad(0.7)]), np.eye(3, dtype=np.float32) * 0.0003)],
rotation_moves=[Movement(right_rotation_vec, rotation_cov),
Movement(left_rotation_vec, rotation_cov),
Movement(np.array([0, 0, np.deg2rad(5)]), rotation_cov),
Movement(np.array([0, 0, np.deg2rad(-5)]), rotation_cov)])
def draw_all_waypoints(cr):
#cr.select_font_face('arial')
cr.set_font_size(15)
#cr.set_font_matrix(cairo.Matrix(1, 0, 0, -1, 0, 0))
cr.new_path()
def draw_waypoints(cr, i, waypoints, rgba_color):
for pnt in waypoints:
cr.set_source_rgba(*rgba_color)
cr.arc(pnt[0], pnt[1], 6, 0, 2 * np.pi)
cr.stroke()
cr.move_to(pnt[0], pnt[1])
#cr.show_text(str(i))
#i += 1
waypoints = me.get_old_waypoints()[-2:]
draw_waypoints(cr, -2, waypoints, (0.5, 0, 0, 0.8))
waypoints = me.get_current_waypoints()
draw_waypoints(cr, 1, waypoints[0:1], (0.4, 0.9, 0.8, 0.8))
if len(waypoints) > 1:
draw_waypoints(cr, 2, waypoints[1:], (0, 0, 0.5, 0.8))
cr.new_path()
#=======================================================================
# Add Drawing Methods
#=======================================================================
self.tg.add_draw_method(draw_all_waypoints)
for beacon in sm.sensors_by_type['Beacon']:
self.tg.add_draw_method(beacon.draw)
accumulate_distance = np.array([0, 0, 0], np.float32)
while not self.quit:
# Poll for any new user input.
waypoints = tg.get_click_positions()
for pnt in waypoints:
me.move_to(pnt[0], pnt[1])
transition_mov = me.get_next_move(explorer_pos)
if transition_mov is not None:
print 'D.TF.337'
explorer_pos = self.fm.get_explorer_pos_mean()
self.fm.move(transition_mov.vec, transition_mov.cov)
accumulate_distance += np.abs(transition_mov.vec)
time.sleep(0.5)
# Only perform filter observation when enough translation has occurred.
# Observations must be independent.
if accumulate_distance[0] >= 20:
accumulate_distance[0] = 0
# Testing Trilateration
beacon_sensors = sm.sensors_by_type['Beacon']
print 'D.TF.351'
prob_pos = self.fm.get_explorer_pos_mean()
for j in range(total_beacons):
#prob_pos = self.fm.get_explorer_pos_mean()
obs_dis = np.sqrt((beacons[j][0] - prob_pos[0])**2 + (beacons[j,1] - prob_pos[1])**2) + (randn() * measurement_model[1]) + measurement_model[0]
#self.fm.observation(obs_dis, sm.sensors_by_type['Beacon'][j])
beacon_sensors[j].obs = obs_dis
self.fm.observation(beacon_sensors, sm.sensors_by_type['Trilateration2D'][0])
if accumulate_distance[2] >= np.deg2rad(10):
print 'D.TF.364'
explorer_pos = self.fm.get_explorer_pos_mean()
#self.fm.observation(explorer_pos[2] + (randn() * sm.sensors_by_type['Compass'][0].variance), sm.sensors_by_type['Compass'][0])
accumulate_distance[2] = 0
time.sleep(0.1)
def run(self):
if self.auto:
self._auto_run()
else:
self._manual_run()
if __name__ == '__main__':
print 'Testing FilterManager...'
origin_pos = np.array([200, 0, np.pi/2])
origin_cov = np.eye(3) * 3 # Initial Covariance
origin_cov[2,2] = 0.02
# Need to call this or gtk will never release locks
gobject.threads_init()
fm = FilterManager()
run_kf = False
run_pf = False
#run_kf = True
run_pf = True
if run_kf:
fm.add_filter(KalmanFilter.KalmanFilter(origin_pos, origin_cov))
if run_pf:
fm.add_filter(ParticleFilter.ParticleFilter(origin_pos, origin_cov))
tg = TestGUI(fm.get_draw())
#tt = TestThread(fm, tg, auto=True)
tt = TestThread(fm, tg, auto=False)
tt.start()
tg.mainloop()
tt.quit = True |
15,301 | 7fc756367b9de2bc3a8bbade1823cada3ca094e9 | # ---------------------------------------------------------
# Outlier detection by commute-time and Euclidean distances
#
# Sercan Taha Ahi, Nov 2011 (tahaahi at gmail dot com)
# ---------------------------------------------------------
import numpy as np
import scipy as Sci
from scipy import linalg
from scipy.cluster.vq import *
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.mlab as mm
import matplotlib.animation as animation
from mpl_toolkits.mplot3d import Axes3D
import pylab
import sys
#import math
import string
import nnmf
import csv
def parse_stats(fname):
ifile = open(fname, "rb")
reader = csv.reader(ifile)
n_players = 0
for row in reader:
n_players += 1
n_players -= 1
ifile.close()
PLY = list() # [0]
POS = np.zeros((n_players,1)) # [2]
AGE = np.zeros((n_players,1)) # [6]
G = np.zeros((n_players,1)) # [7]
MINPG = np.zeros((n_players,1)) # [9]
s1MPG = np.zeros((n_players,1))
s1APG = np.zeros((n_players,1))
s2MPG = np.zeros((n_players,1))
s2APG = np.zeros((n_players,1))
s3MPG = np.zeros((n_players,1))
s3APG = np.zeros((n_players,1))
ORBPG = np.zeros((n_players,1))
DRBPG = np.zeros((n_players,1))
ASTPG = np.zeros((n_players,1))
STLPG = np.zeros((n_players,1))
BLKPG = np.zeros((n_players,1))
TOPG = np.zeros((n_players,1))
PFPG = np.zeros((n_players,1))
ifile = open(fname, "rb")
reader = csv.reader(ifile)
rownum = 0
for row in reader:
if (rownum > 0):
colnum = 1
for col in row:
if (colnum==1):
PLY.append(col)
elif (colnum==3):
if col=='C':
POS[rownum-1] = 1
elif col=='F':
POS[rownum-1] = 2
else:
POS[rownum-1] = 3
elif (colnum==7):
AGE[rownum-1] = float(col)
elif (colnum==8):
G[rownum-1] = float(col)
elif (colnum==10):
MINPG[rownum-1] = float(col)
elif (colnum==11):
PTS = float(col)
elif (colnum==13):
FGM = float(col)
elif (colnum==14):
FGA = float(col)
elif (colnum==16):
s1MPG[rownum-1] = float(col) / G[rownum-1]
elif (colnum==17):
s1APG[rownum-1] = float(col) / G[rownum-1]
elif (colnum==19):
TPM = float(col)
elif (colnum==20):
TPA = float(col)
elif (colnum==23):
ORBPG[rownum-1] = float(col)
elif (colnum==25):
DRBPG[rownum-1] = float(col)
elif (colnum==29):
ASTPG[rownum-1] = float(col)
elif (colnum==31):
STLPG[rownum-1] = float(col)
elif (colnum==33):
BLKPG[rownum-1] = float(col)
elif (colnum==35):
TOPG[rownum-1] = float(col)
elif (colnum==37):
PFPG[rownum-1] = float(col)
colnum += 1
s3MPG[rownum-1] = TPM / G[rownum-1]
s3APG[rownum-1] = TPA / G[rownum-1]
s2MPG[rownum-1] = (FGM-TPM) / G[rownum-1]
s2APG[rownum-1] = (FGA-TPA) / G[rownum-1]
rownum += 1
ifile.close()
flabels = ["POS", "AGE", "G", "MINPG", "s1MPG", "s1APG", "s2MPG", "s2APG", "s3MPG", "s3APG", "ORBPG", "DRBPG", "ASTPG", "STLPG", "BLKPG", "TOPG", "PFPG"]
X = np.hstack([POS, AGE, G, MINPG, s1MPG, s1APG, s2MPG, s2APG, s3MPG, s3APG, ORBPG, DRBPG, ASTPG, STLPG, BLKPG, TOPG, PFPG])
print "\tX: " + str(X.shape[0]) + "x" + str(X.shape[1])
ptr1 = np.array(X[:,0]==1).transpose()
ptr2 = np.array(X[:,0]==2).transpose()
ptr3 = np.array(X[:,0]==3).transpose()
return X, flabels, PLY, ptr1, ptr2, ptr3
def get_stats(X, ptr1, ptr2, ptr3, PLY, idx, flabels):
plt.figure()
ax1 = plt.axes()
bp = plt.boxplot(X, whis=1.5)
plt.setp(bp['boxes'], color='black')
plt.setp(bp['whiskers'], color='black')
plt.setp(bp['fliers'], color='red', marker='+')
xtickNames = plt.setp(ax1, xticklabels=flabels)
plt.setp(xtickNames, rotation=45, fontsize=8)
plt.draw()
plt.savefig("boxplot.png", dpi=300)
# (PCA)
# pos = {C, F, G}
p = mm.PCA(X[:,1:])
print "\nPCA:"
print p.fracs
print sum(p.fracs)
xmin = min(p.Y[:,0])
xmax = max(p.Y[:,0])
xrng = xmax - xmin
ymin = min(p.Y[:,1])
ymax = max(p.Y[:,1])
yrng = ymax - ymin
fx = 8
fy = yrng * fx / xrng
plt.figure(figsize=(fx,fy))
'''
plt.plot(p.Y[ptr1,0], np.zeros((sum(ptr1),1))+1, 'ro', hold='on')
plt.plot(p.Y[ptr2,0], np.zeros((sum(ptr2),1))+2, 'go', hold='on')
plt.plot(p.Y[ptr3,0], np.zeros((sum(ptr3),1))+3, 'bo', hold='on')
plt.ylim(0,4)
'''
plt.plot(p.Y[ptr1,0], p.Y[ptr1,1], 'ro', hold='on')
plt.plot(p.Y[ptr2,0], p.Y[ptr2,1], 'go', hold='on')
plt.plot(p.Y[ptr3,0], p.Y[ptr3,1], 'bo', hold='on')
'''
for i in range(10):
print idx[i]
if sum(nonzero(ptr1)==idx[i])>0:
plt.plot(p.Y[idx[i],0], p.Y[idx[i],1], 'rs', hold='on')
elif sum(nonzero(ptr2)==idx[i])>0:
plt.plot(p.Y[idx[i],0], p.Y[idx[i],1], 'gs', hold='on')
elif sum(nonzero(ptr3)==idx[i])>0:
plt.plot(p.Y[idx[i],0], p.Y[idx[i],1], 'bs', hold='on')
'''
plt.draw()
plt.savefig("PCA_out1.png", dpi=300)
return
def normalize(X):
n_samples = X.shape[0]
n_features = X.shape[1]
print "\tn_samples = " + str(n_samples)
print "\tn_features = " + str(n_features)
Xmean = X.mean(axis=0)
Xstd = X.std(axis=0)
Y = np.zeros((n_samples, n_features))
for i in range(0,n_samples):
Y[i,:] = X[i,:] - Xmean
Y[i,:] = Y[i,:] / Xstd
#for i in range(0,n_features):
# print "\t\t" + str(Y[:,i].std())
#print "\tmax = " + str(Y.max())
#print "\tmin = " + str(Y.min())
return Y
def dist_mahalanobis(X):
n_samples = X.shape[0]
n_features = X.shape[1]
Xmean = X.mean(axis=0)
Xc = np.zeros((n_samples, n_features))
for i in range(0,n_samples):
Xc[i,:] = X[i,:] - Xmean
S = np.dot(Xc.T, Xc) / (n_features-1)
Si = linalg.inv(S)
D = np.zeros((n_samples, n_samples))
for i in range(0,n_samples):
a = X[i,:]
a.reshape(1,n_features)
for j in range(0,n_samples):
b = X[j,:]
b.reshape(1,n_features)
D[i,j] = np.dot(np.dot(a-b,Si),(a-b).T)
return D
def dist_euclidean(X):
n_samples = X.shape[0]
#n_features = X.shape[1]
D = np.zeros((n_samples, n_samples))
for i in range(0,n_samples):
a = X[i,:]
for j in range(0,n_samples):
b = X[j,:]
D[i,j] = linalg.norm(a-b)
return D
def dist_commute_time(X):
n_samples = X.shape[0]
#n_features = X.shape[1]
E = dist_euclidean(X)
Estd = E.std()
A = np.exp(-E**2 / Estd**2);
D = np.zeros((n_samples,n_samples))
for i in range(0,n_samples):
A[i,i] = 0
D[i,i] = A[i,:].sum(dtype=float)
V = A.sum(dtype=float)
print "\tGraph volume = " + str(V)
#D = diag(A.sum(axis=1));
L = D - A;
Lp = linalg.pinv(L);
CTD = np.zeros((n_samples,n_samples))
for i in range(0,n_samples):
for j in range(0,n_samples):
CTD[i,j] = V * (Lp[i,i] + Lp[j,j] - 2*Lp[i,j])
#CTD = CTD / CTD.max()
#E = E / E.max()
return CTD, E
def get_top_n_outliers(D, knn, n, dtype):
n_samples = D.shape[0]
Dtop = np.zeros((n_samples))
for i in range(0,n_samples):
idx = np.argsort(D[i,:])
idx = idx[1:knn+1]
Dtop[i] = D[i,idx].mean()
idx = np.argsort(-Dtop)
idx = idx[0:n]
fname = dtype + ".png"
x = np.arange(n_samples)
y = np.sort(Dtop)
plt.figure()
plt.plot(x, y, 'b-')
plt.grid()
plt.title(dtype + " Distance")
plt.xlabel("Player Index")
plt.ylabel("Distance")
plt.draw()
plt.savefig(fname, dpi=300)
return idx
def main():
if len(sys.argv) == 1:
target_year = 1997
else:
target_year = int(sys.argv[1])
fname = str(target_year) + '-' + str(target_year+1) + '.csv'
X, flabels, PLY, ptr1, ptr2, ptr3 = parse_stats(fname)
X = normalize(X)
CTD, E = dist_commute_time(X)
M = dist_mahalanobis(X)
knn = 10
n = 10
print "\nOutliers based on commute time distance:"
dtype = "Commute Time"
idx1 = get_top_n_outliers(CTD, knn, n, dtype)
for i in range(0,n):
print str(i+1).rjust(3) + "\t" + PLY[idx1[i]]
print "\nOutliers based on Euclidean distance:"
dtype = "Euclidean"
idx2 = get_top_n_outliers(E, knn, n, dtype)
for i in range(0,n):
print str(i+1).rjust(3) + "\t" + PLY[idx2[i]]
print "\nOutliers based on Mahalanobis distance:"
dtype = "Mahalanobis"
idx3 = get_top_n_outliers(M, knn, n, dtype)
for i in range(0,n):
print str(i+1).rjust(3) + "\t" + PLY[idx3[i]]
get_stats(X, ptr1, ptr2, ptr3, PLY, idx1, flabels)
return
def kmeans_test():
# generate some random xy points and
# give them some striation so there will be "real" groups.
xy = np.random.rand(30,2)
xy[3:8,1] -= .9
xy[22:28,1] += .9
# make some z vlues
z = np.sin(xy[:,1]-0.2*xy[:,1])
# whiten them
z = whiten(z)
# let scipy do its magic (k==3 groups)
#res, idx = kmeans2(np.array(zip(xy[:,0],xy[:,1],z)), 3)
res, idx = kmeans2(xy, 3)
# convert groups to rbg 3-tuples.
colors = ([([0,0,0],[1,0,0],[0,0,1])[i] for i in idx])
# show sizes and colors. each color belongs in diff cluster.
pylab.scatter(xy[:,0], xy[:,1], c=colors)
pylab.savefig('clust.png')
#print idx
return
def nnmf_analysis():
if len(sys.argv) == 1:
target_year = 1997
else:
target_year = int(sys.argv[1])
fname = str(target_year) + '-' + str(target_year+1) + '.xls'
X, flabels, PLY, ptr1, ptr2, ptr3 = parse_stats(fname)
# pos = {C, F, G}
ptr1 = np.array(X[:,0]==1).transpose()
ptr2 = np.array(X[:,0]==2).transpose()
ptr3 = np.array(X[:,0]==3).transpose()
X = normalize(X)
# NNMF
w, h = nnmf.factorize(X[:,1:], pc=3, iter=100)
print np.shape(h)
print np.shape(w)
print
print h[:,0]
print
print h[:,1]
print
print h[:,2]
print
print h[:,3]
return
def kmeans_players():
if len(sys.argv) == 1:
target_year = 1997
else:
target_year = int(sys.argv[1])
fname = str(target_year) + '-' + str(target_year+1) + '.xls'
X, flabels, PLY, ptr1, ptr2, ptr3 = parse_stats(fname)
# pos = {C, F, G}
ptr1 = np.array(X[:,0]==1).transpose()
ptr2 = np.array(X[:,0]==2).transpose()
ptr3 = np.array(X[:,0]==3).transpose()
X = normalize(X)
res, idx = kmeans2(X[:,1:], 3)
colors = ([([1,0,0],[0,1,0],[0,0,1])[i] for i in idx])
#print ptr1.shape
# (PCA)
# pos = {C, F, G}
p = mm.PCA(X[:,1:])
xmin = min(p.Y[:,0])
xmax = max(p.Y[:,0])
xrng = xmax - xmin
ymin = min(p.Y[:,1])
ymax = max(p.Y[:,1])
yrng = ymax - ymin
fx = 8
fy = yrng * fx / xrng
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
for n in np.nonzero(ptr1)[0].tolist():
ax.text(p.Y[n,0], p.Y[n,1], p.Y[n,2], str(n), color='r', size=8)
ax.hold(True)
for n in np.nonzero(ptr2)[0].tolist():
ax.text(p.Y[n,0], p.Y[n,1], p.Y[n,2], str(n), color='g', size=8)
ax.hold(True)
for n in np.nonzero(ptr3)[0].tolist():
ax.text(p.Y[n,0], p.Y[n,1], p.Y[n,2], str(n), color='b', size=8)
ax.hold(True)
ax.set_xlim(p.Y[:,0].min(), p.Y[:,0].max())
ax.set_ylim(p.Y[:,1].min(), p.Y[:,1].max())
ax.set_zlim(p.Y[:,2].min(), p.Y[:,2].max())
#ax.plot(p.Y[ptr1,0], p.Y[ptr1,1], p.Y[ptr1,2], 'ro'); ax.hold(True)
#ax.plot(p.Y[ptr2,0], p.Y[ptr2,1], p.Y[ptr2,2], 'go'); ax.hold(True)
#ax.plot(p.Y[ptr3,0], p.Y[ptr3,1], p.Y[ptr3,2], 'bo'); ax.hold(True)
ax.view_init(30, 60)
plt.draw()
plt.savefig("pca_3d.png", dpi=100)
'''
for angle in range(90):
ax.view_init(30, angle)
#ax.view_init(angle, 60)
plt.draw()
fname = "pca_3d_" + str(angle).zfill(3) + ".png"
plt.savefig(fname, dpi=100)
'''
print
print '\t# of centers = ' + str(sum(ptr1))
print '\t# of forwards = ' + str(sum(ptr2))
print '\t# of guards = ' + str(sum(ptr3))
print
print '\t# of class1 = ' + str(sum(idx==0))
print '\t# of class2 = ' + str(sum(idx==1))
print '\t# of class3 = ' + str(sum(idx==2))
#'''
plt.figure(figsize=(fx,fy))
for n in range(len(idx)):
if ptr1[n]==True:
plt.plot(p.Y[n,0], p.Y[n,1], 'o', c=colors[n], hold='on')
elif ptr2[n]==True:
plt.plot(p.Y[n,0], p.Y[n,1], 's', c=colors[n], hold='on')
elif ptr3[n]==True:
plt.plot(p.Y[n,0], p.Y[n,1], '^', c=colors[n], hold='on')
else:
print '\t!! ' + str(n)
plt.draw()
plt.savefig("kmeans_on_pca.png", dpi=300)
#'''
return
def scatterplot(data, data_name):
'''
Makes a scatterplot matrix:
Inputs:
data - a list of data [dataX, dataY,dataZ,...];
all elements must have same length
data_name - a list of descriptions of the data;
len(data) should be equal to len(data_name)
Output:
fig - matplotlib.figure.Figure Object
'''
N = len(data_name)
print N
fig = plt.figure()
for i in xrange(N):
for j in xrange(N):
ax = fig.add_subplot(N, N, i*N+j+1)
if j == 0: ax.set_ylabel(data_name[i], size='10')
if i == 0: ax.set_title(data_name[j], size='10')
if i == j:
ax.hist(data[i], 10)
else:
ax.scatter(data[j], data[i])
return fig
def demo_scatter_matrix_plot():
target_year = 1997
fname = str(target_year) + '-' + str(target_year+1) + '.csv'
X, flabels, PLY, ptr1, ptr2, ptr3 = parse_stats(fname)
#fig = scatterplot(X, flabels)
#fig.savefig('scatterplot.png', dpi=120)
#plt.show()
return
#main()
#kmeans_players()
#nnmf_analysis()
demo_scatter_matrix_plot() |
15,302 | 596dd3339bfd35752f4eb95862ac77f04b3c8b39 | from __future__ import print_function, division
import numpy as np
import cv2 as cv
import os
from tqdm import tqdm
import time
import face_recognition
from imutils import build_montages
from imutils import paths
import sys
sys.path.append("../")
from baseline import sklearn_cluster
if __name__ == "__main__":
data = []
for imagePath in tqdm(list(paths.list_images("../dataset"))):
# img = cv.imread(imagePath)
# rgb = img[:, :, ::-1]
rgb = face_recognition.load_image_file(imagePath)
boxes = face_recognition.face_locations(
rgb, model='hog') # 'cnn' or 'hog'
encodings = face_recognition.face_encodings(rgb, boxes)
d = [{'imagePath': imagePath, 'loc': box, 'encoding': enc}
for (box, enc) in zip(boxes, encodings)]
data.extend(d)
data = np.array(data)
encodings = [d['encoding'] for d in data]
# Method Choice
s = time.time()
# labels = baseline.dbscan(encodings, eps=0.5, min_samples=3)
labels = sklearn_cluster.chinese_whispers(encodings, threshold=0.45)
print("[INFO] time: {}".format((time.time() - s) * 1000))
labelIDs = np.unique(labels)
# print(labelIDs)
numUniqueFaces = len(np.where(labelIDs > -1)[0])
# print(numUniqueFaces)
for labelID in labelIDs:
idxs = np.where(labels == labelID)[0]
faces = []
for i in idxs:
bgr = cv.imread(data[i]['imagePath'])
(top, right, bottom, left) = data[i]['loc']
face = cv.resize(bgr[top: bottom, left: right], (128, 128))
faces.append(face)
montage = build_montages(faces, (128, 128), (2, 5))[0]
title = "Face ID #{}".format(labelID)
title = "Unknown Faces" if labelID == -1 else title
cv.imshow(title, montage)
cv.waitKey(-1)
|
15,303 | 66f4910b81bea3868419f23c4f350db1da673129 | from __future__ import absolute_import
from . import alltests
__all__ = ['alltests']
|
15,304 | f8f9f734abc4d5272dbabea90d9ea1a91fcb778b | /home/openerp/production/extra-addons/training/wizard/wizard_subscription_line_invoice.py |
15,305 | 233dbabd2ef524d0ca47a9026a89ff7c35f1652b | from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import seaborn as sns
import numpy as np
import torch
import torch.nn as nn
from tqdm import tqdm
from Gaussian import GaussianDistribution
class BayesianNRegression:
#self.mapping a function could map x->a
#self.Xdim
#self.Adim
#self.A_gsd gaussian distribution for A space
def __init__(self, mapping, miu=None,uncertainty=1):
self.mapping = mapping
self.Adim = mapping.Adim
self.Xdim = mapping.Xdim
self.A_gsd = GaussianDistribution(dim=self.Adim,miu=miu,sigma=np.diag([uncertainty**2]*self.Adim))
self.nodes = []
def GetNode(self, *keys, belief=3,display=False):
keys = np.array(keys)
if len(keys.shape) != 1:
raise Exception("only get 1 node at a time")
if len(keys) != self.Xdim:
raise Exception("dimension not fitted")
self.nodes.append(keys)
a_vec, dist = self.mapping.GetVector(keys)
base_vec = a_vec / np.linalg.norm(a_vec)
#build R
R = [base_vec]
index=0
count=0
while count < self.Adim - 1:
current_vec = np.zeros(self.Adim)
current_vec[index] = 1
index += 1
if current_vec.tolist() == base_vec.tolist():
continue
else:
count+=1
for vec in R:
current_vec -= vec * np.sum(vec * current_vec)
current_vec = current_vec / np.linalg.norm(current_vec)
R.append(current_vec)
R = np.stack(R,-1)
R = R[:, ::-1]
R[:,0]*R[:,1]
#build S
belief = np.exp(belief)
sdia = [belief] * (self.Adim - 1) + [1. / belief]
S = np.diag(sdia)
#build sigma
T = R.dot(S)
sigma = T.dot(T.T)
#build miu
miu = base_vec * dist
#build gaussian
node_gsd = GaussianDistribution(dim=self.Adim, miu=miu, sigma=sigma)
if display:
node_gsd.Plot(scale=[-2,2],title='Node{},{}'.format(keys[0],keys[1]))
#mul new gaussian
self.A_gsd = self.A_gsd.Mul(node_gsd)
def Integrate(self, xdensity=100, adensity=100, xscale=[-2, 2], ascale=[-2, 2]):
aslice_len = adensity ** (self.Adim)
xslice_len = int(5e8 /aslice_len)
if xslice_len < 1:
xslice_len = 1
step = (xscale[1]-xscale[0])/xdensity
x_s = np.arange(xscale[0], xscale[1], step)
x_pos = None
loc = {'x_pos':x_pos,'x_s':x_s,'np':np}
glb = {}
exec('x_pos = np.meshgrid({})'.format(''.join('x_s,' for i in range(self.Xdim))[:-1]),glb,loc)
x_pos = loc['x_pos']
x_pos = np.stack(x_pos)
x_pos = x_pos.reshape([self.Xdim,-1])
step = (ascale[1]-ascale[0]) / adensity
a_s = np.arange(ascale[0],ascale[1],step)
a_pos = None
loc = {'a_pos':a_pos,'a_s':a_s,'np':np}
glb = {}
exec('a_pos = np.meshgrid({})'.format(''.join('a_s,' for i in range(self.Adim-1))[:-1]),glb,loc)
a_pos = loc['a_pos']
a_pos = np.stack(a_pos)
a_pos = a_pos.reshape([self.Adim-1,-1])
x_pos = torch.FloatTensor(x_pos).cuda()
a_pos = np.ascontiguousarray(a_pos,np.float32)
a_pos = torch.FloatTensor(a_pos).cuda()
if not self.mapping.pole:
#xpos shape [xdim,xnum]
#apos shape [adim-1,anum]
a_tile = a_pos.reshape([ a_pos.shape[0], a_pos.shape[1],1])
lprob=[]
for i in tqdm(np.arange(0,x_pos.shape[1],xslice_len)):
x_slice = x_pos[:, i: (i + xslice_len)]
a_tiled = a_tile.repeat(1, 1, x_slice.shape[-1])
a_full, delta = self.mapping.GetFullA(x_s=x_slice, a_s=a_tiled, step=step) #delta.shape [xnum]
a_full = a_full.reshape([self.Adim, -1]) #a_full shape [adim, anum*xnum]
prob = self.A_gsd[a_full]
prob = prob.reshape([a_pos.shape[1],x_slice.shape[-1]])
prob *= delta.cpu().numpy()
prob = np.sum(prob, 0)
lprob.append(prob.copy())
probs = np.concatenate(lprob)
prob = probs.reshape([xdensity] * self.Xdim)
else:
prob = []
for x in tqdm(x_pos.T):
full_a, delta = self.mapping.GetFullA(x, a_pos,step=step)
lprob = self.A_gsd[full_a]
prob.append(np.sum(lprob)*delta)
prob = np.array(prob)
prob = prob.reshape([xdensity]*self.Xdim)
# prob = prob.T
return prob
def PlotX(self, read=False,show_nodes=False, style='2D',choice_num=1,xdensity=100, adensity=100, xscale=[-2, 2], ascale=[-2, 2]):
#style choose from ['2D','3D','choice2D','choice3D']
if self.Xdim!=2 and style!='choice3D':
raise Exception("xdim is not 2 but {}".format(self.Xdim))
elif self.Xdim != 3 and style == 'choice3D':
raise Exception("choice 3D plot 3D lines")
if style == 'choice2D':
step = (xscale[1]-xscale[0]) / xdensity
x_s = np.arange(xscale[0],xscale[1],step)
a_s = self.A_gsd.miu
x_s = self.mapping.GetFullX(x_s=x_s,a_s=a_s)
if type(x_s) != type(None):
plt.plot(x_s[0],x_s[1])
else:
return
if show_nodes and len(self.nodes)>0:
nodes = np.array(self.nodes)
sizes = [30+10*i for i in range(len(nodes))]
plt.scatter(nodes[:,0], nodes[:,1], c='none', edgecolors='c', s=sizes, marker='d')
elif style == 'choice3D':
pass
elif style == '2D':
if not read:
x_prob = self.Integrate( xdensity=xdensity,
adensity=adensity,
xscale=xscale,
ascale=ascale)
else:
x_prob = self.Load()
f, ax = plt.subplots(figsize=(10, 7))
sns.heatmap(x_prob, fmt='d', cmap='Spectral_r',ax=ax)
ax.invert_yaxis()
new_axis = []
step = (xscale[1] - xscale[0]) / xdensity
x = np.arange(xscale[0],xscale[1],step)
step = int(xdensity/5)
for i_iter,i in enumerate(x):
if i_iter % step == 0 or i_iter == xdensity-1:
new_axis.append(round(x[i_iter], 3))
else:
new_axis.append('')
plt.title('X space')
plt.xlabel('x0')
plt.ylabel('x1')
plt.xticks(np.arange(xdensity),new_axis)
plt.yticks(np.arange(xdensity),new_axis)
if show_nodes and len(self.nodes)>0:
nodes = np.array(self.nodes)
nodex = [((i-xscale[0])*xdensity)/(xscale[1]-xscale[0]) for i in nodes[:,0]]
nodey = [((i-xscale[0])*xdensity)/(xscale[1]-xscale[0]) for i in nodes[:,1]]
sizes = [30+10*i for i in range(len(nodes))]
plt.scatter(nodex, nodey, c='none', edgecolors='c', s=sizes, marker='d')
elif style == '3D':
x_prob = self.Integrate( xdensity=xdensity,
adensity=adensity,
xscale=xscale,
ascale=ascale)
x_prob = x_prob[::-1]
step = (xscale[1]-xscale[0])/xdensity
x = np.arange(xscale[0],xscale[1],step)
x,y = np.meshgrid(x,x)
fig = plt.figure()
ax = Axes3D(fig)
ax.plot_surface(x, y, x_prob, rstride=1, cstride=1, cmap='rainbow')
if show_nodes and len(self.nodes) > 0:
nodes = np.array(self.nodes)
nodex = [int(((i-xscale[0])*xdensity)/(xscale[1]-xscale[0]))-1 for i in nodes[:,0]]
nodey = [int(((i - xscale[0]) * xdensity) / (xscale[1] - xscale[0]))-1 for i in nodes[:, 1]]
ax.scatter(nodes[:, 0], nodes[:, 1], 0, color='r')
ax.invert_yaxis()
plt.xlabel('x0',size = 10)
plt.ylabel('x1',size = 10)
ax.set_zlabel('probability',size=10)
else:
raise Exception("Do not have this type {} only '2D' and '3D' ".format(style))
return plt
|
15,306 | ba3be201c9dbc888933b1ebb09a32e62b1d22882 | #coding=utf-8
import random
import os
import sys
allNamesList = os.listdir('Image')
selectedNum = len(allNamesList)
posPairNum = selectedNum
negPairNum = 4 * selectedNum
def initializeNotChosenList():
global selectedNum
notChosenList = []
for i in range(selectedNum):
notChosenList.append(i)
return notChosenList
def generatePosTestList(pairNum):
global notChosenList, allNamesList
for i in range(pairNum):
leftForChooseNum = len(notChosenList)
randomChosenNum = random.randint(0, leftForChooseNum-1)
selectedPersonFolder = allNamesList[notChosenList[randomChosenNum]]
picsOfOnePersonList = os.listdir('Image\\' + selectedPersonFolder)
firstPic = selectedPersonFolder+'-a.jpg'
secondPic = selectedPersonFolder+'-b.jpg'
thirdPic = selectedPersonFolder+'-c.jpg'
if not (firstPic in picsOfOnePersonList and secondPic in picsOfOnePersonList and thirdPic in picsOfOnePersonList):
print 'something wrong in %s, maybe it is empty'% selectedPersonFolder
sys.exit()
filehandler = open('pos_result_ab.csv', 'a')
filehandler.write(selectedPersonFolder+'\\'+firstPic+','+selectedPersonFolder+'\\'+secondPic+',\n')
filehandler.close()
filehandler = open('pos_result_ac.csv', 'a')
filehandler.write(selectedPersonFolder+'\\'+firstPic+','+selectedPersonFolder+'\\'+thirdPic+',\n')
filehandler.close()
filehandler = open('pos_result_bc.csv', 'a')
filehandler.write(selectedPersonFolder+'\\'+secondPic+','+selectedPersonFolder+'\\'+thirdPic+',\n')
filehandler.close()
notChosenList.remove(notChosenList[randomChosenNum])
if len(notChosenList) == 0:
notChosenList = initializeNotChosenList()
def generateNegTestList(pairNum):
global notChosenList, allNamesList
for i in range(pairNum):
leftForChooseNum = len(notChosenList)
firstPersonNum = notChosenList[random.randint(0, leftForChooseNum-1)]
notChosenList.remove(firstPersonNum)
secondPersonNum = notChosenList[random.randint(0, leftForChooseNum-2)]
notChosenList.remove(secondPersonNum)
if len(notChosenList) == 0 or len(notChosenList) == 1:
notChosenList = initializeNotChosenList()
firstPic = allNamesList[firstPersonNum]+'-a.jpg'
secondPic = allNamesList[secondPersonNum]+'-b.jpg'
thirdPic = allNamesList[secondPersonNum]+'-c.jpg'
fourthPic = allNamesList[firstPersonNum] + '-b.jpg'
allPicsOfFirstPerson = os.listdir('Image\\' + allNamesList[firstPersonNum])
allPicsOfSecondPerson = os.listdir('Image\\' + allNamesList[secondPersonNum])
if not (firstPic in allPicsOfFirstPerson and fourthPic in allPicsOfFirstPerson):
print 'something wrong in %s, maybe it is empty' % allNamesList[firstPersonNum]
sys.exit()
if not (secondPic in allPicsOfSecondPerson and thirdPic in allPicsOfSecondPerson):
print 'something wrong in %s, maybe it is empty ' % allNamesList[secondPersonNum]
sys.exit()
filehandler = open('neg_result_ab.csv', 'a')
filehandler.write(allNamesList[firstPersonNum]+'\\' + firstPic + ',' + allNamesList[secondPersonNum] + '\\'+ secondPic +',\n')
filehandler.close()
filehandler = open('neg_result_ac.csv', 'a')
filehandler.write(allNamesList[firstPersonNum]+'\\' + firstPic + ',' + allNamesList[secondPersonNum] + '\\'+ thirdPic +',\n')
filehandler.close()
filehandler = open('neg_result_bc.csv', 'a')
filehandler.write(allNamesList[firstPersonNum]+'\\' + fourthPic + ',' + allNamesList[secondPersonNum] + '\\'+ thirdPic +',\n')
filehandler.close()
notChosenList = initializeNotChosenList()
generatePosTestList(posPairNum)
notChosenList = initializeNotChosenList()
generateNegTestList(negPairNum)
|
15,307 | 08bdaffafdddebd4e68dd8e014f26bd0d8c76ab9 | from django import forms
from Product.models import Product
class StockForm(forms.Form):
name = forms.CharField(label='', required=True, widget=forms.TextInput(attrs={ "placeholder":"Nazwa produktu *", "class":"input_field"}))
qt = forms.IntegerField(label='', required=True, widget=forms.NumberInput(attrs={ "placeholder":"Ilość *", "class":"input_field"}))
def clean_name(self, *args, **kwargs):
name = self.cleaned_data.get('name')
try:
match = Product.objects.get(name__iexact=name)
except Product.DoesNotExist:
raise forms.ValidationError('Podany produkt nie istnieje')
return match
def clean_qt(self, *args, **kwargs):
qt = self.cleaned_data.get('qt')
if qt < 0:
raise forms.ValidationError('Nie można zaktualizować o ujemną wartość')
return qt |
15,308 | f7cce07c4ab32848a33a3648ae9fc3b32d63d683 | #! /usr/bin/python3
from __future__ import print_function
import sys
import textwrap
import re
import xml.dom.minidom
if len(sys.argv) < 3:
print("Usage: %s help.xml install|live" % sys.argv[0], file=sys.stderr)
sys.exit(1)
if sys.version >= '3':
# Force encoding to UTF-8 even in non-UTF-8 locales.
import io
sys.stdout = io.TextIOWrapper(
sys.stdout.detach(), encoding="UTF-8", line_buffering=True)
else:
# Avoid having to do .encode('UTF-8') everywhere. This is a pain; I wish
# Python supported something like "sys.stdout.encoding = 'UTF-8'".
def fix_stdout():
import codecs
sys.stdout = codecs.EncodedFile(sys.stdout, 'UTF-8')
def null_decode(input, errors='strict'):
return input, len(input)
sys.stdout.decode = null_decode
fix_stdout()
document = xml.dom.minidom.parse(sys.argv[1])
mode = sys.argv[2]
text = ""
paratext = ""
def getText(nodelist):
text = ""
for node in nodelist:
if node.nodeType == node.ELEMENT_NODE:
if node.tagName == "link":
text += "<\x12%s\x13%s\x10>" % (node.getAttribute("linkend"),
getText(node.childNodes))
elif node.tagName == "ulink":
text += "\x11%s\x10" % node.getAttribute("url")
elif node.tagName in ("emphasis", "screen", "literal"):
text += "\x11%s\x10" % getText(node.childNodes)
elif node.tagName == "userinput":
text += "\x11%s\x10" % getText(node.childNodes).rstrip("\x10")
elif node.tagName == "optional":
text += "[%s]" % getText(node.childNodes)
else:
text += getText(node.childNodes)
elif node.nodeType == node.TEXT_NODE:
text += node.data
return re.sub(re.compile(r'^ +| +$', re.MULTILINE), '', text.strip())
def fill(text, width=76, indent=''):
squashed = re.sub(r'\n+', ' ', text)
wrapper = textwrap.TextWrapper(width=width, initial_indent=indent,
subsequent_indent=indent)
# Eww - but textwrap doesn't provide any other way to turn off
# hyphenation ... This may break in a future version of Python.
wrapper.wordsep_re = re.compile(r'(\s+)')
return wrapper.fill(squashed)
def stripLinks(text):
return re.sub(r'\x12.+?\x13(.+?)\x10', r'\1', text)
def handleReference(reference):
for refentry in reference.getElementsByTagName("refentry"):
handleRefEntry(refentry)
sys.stdout.write("\0\n")
def handleRefEntry(refentry):
global text
sys.stdout.write("\x04")
handleRefNameDiv(refentry.getElementsByTagName("refnamediv")[0])
handleRefSection(refentry.getElementsByTagName("refsection")[0])
sys.stdout.write(text.rstrip('\n'))
text = ''
def handleRefNameDiv(refnamediv):
global text
refdescriptor = refnamediv.getElementsByTagName("refdescriptor")[0]
keycap = refdescriptor.getElementsByTagName("keycap")[0]
linkname = getText(keycap.childNodes)
refname = refnamediv.getElementsByTagName("refname")[0]
title = getText(refname.childNodes)
text += "\x12%s\x14%s\x10" % (linkname, title)
def handleRefSection(refsection):
for node in refsection.childNodes:
if node.nodeType == node.ELEMENT_NODE:
if node.tagName == "title":
handleRefSectionTitle(node)
elif node.tagName == "segmentedlist":
handleSegmentedList(node)
elif node.tagName == "variablelist":
handleVariableList(node)
elif node.tagName == "informalexample":
handleInformalExample(node)
elif node.tagName == "para":
handlePara(node)
else:
handleRefSection(node)
def handleRefSectionTitle(title):
global text
if len(title.childNodes) > 0:
text += "\x11%s\x10" % getText(title.childNodes)
text += "\n\n"
def handleSegmentedList(segmentedlist):
global text
segmentedlistclass = segmentedlist.getAttribute("class")
if segmentedlistclass == "helpindex":
keywidth = 7
elif segmentedlistclass == "bootparams-hardware":
keywidth = 39
elif segmentedlistclass == "bootparams-disk":
keywidth = 29
else: # segmentedlistclass == "bootparams-installer"
keywidth = 40
handleSegmentedListTitle(segmentedlist.getElementsByTagName("title")[0])
handleSegTitles(segmentedlist.getElementsByTagName("segtitle"), keywidth)
handleSegListItems(segmentedlist.getElementsByTagName("seglistitem"),
keywidth)
text += "\n"
def handleSegmentedListTitle(title):
global text
if len(title.childNodes) > 0:
text += "\x11%s\x10" % getText(title.childNodes)
text += "\n\n"
def handleSegTitles(segtitles, keywidth):
global text
if len(segtitles) >= 2:
text += "\x11%-*s%s\x10" % (keywidth, getText(segtitles[0].childNodes),
getText(segtitles[1].childNodes))
text += "\n\n"
def handleSegListItems(seglistitems, keywidth):
global text
for seglistitem in seglistitems:
segs = seglistitem.getElementsByTagName("seg")
key = fill(getText(segs[0].childNodes))
plainkey = stripLinks(key.split("\n")[-1])
topic = getText(segs[1].childNodes)
if len(plainkey) > keywidth - 1:
text += "%s\n%s%s" % (key, " " * keywidth, topic)
else:
text += "%s%s%s" % (key, " " * (keywidth - len(plainkey)), topic)
text += "\n"
def handleVariableList(variablelist):
global text
for varlistentry in variablelist.getElementsByTagName("varlistentry"):
handleVarListEntry(varlistentry)
text += "\n"
def handleVarListEntry(varlistentry):
global text
terms = varlistentry.getElementsByTagName("term")
text += ", ".join(map(
lambda term: "\x11%s\x10" % getText(term.childNodes), terms))
text += "\n"
listitem = varlistentry.getElementsByTagName("listitem")[0]
text += fill(getText(listitem.childNodes), 76, ' ')
text += "\n"
def handleInformalExample(informalexample):
global text
for screen in informalexample.getElementsByTagName("screen"):
text += " " + getText(screen.childNodes)
text += "\n\n"
# This whole nobreak business sucks. It's there because
# Locale::Po4a::Docbook doesn't want to translate <phrase> elements within
# <para>s separately, but instead globs them together into one big msgid.
# To work around this, we do <para class="nobreak"> <para class="nobreak">
# ... <para>, but of course then we have to make sure to collect all the
# text up to the break and make sure to fill it together.
def handlePara(para):
global text, paratext
if paratext != "":
lastchar = ord(paratext[-1])
if lastchar >= 32 and lastchar <= 127 and not paratext[-1].isspace():
paratext += " "
paratext += getText(para.childNodes)
if (not para.hasAttribute("class") or
para.getAttribute("class") != "nobreak"):
text += fill(paratext)
text += "\n\n"
paratext = ""
def preprocess(parent):
global mode
nodelist = parent.childNodes
for node in nodelist:
if node.nodeType == node.ELEMENT_NODE:
if node.hasAttribute("condition"):
match = True
for condition in node.getAttribute("condition").split(';'):
if condition != "gfxboot" and condition != mode:
match = False
break
if not match:
parent.removeChild(node).unlink()
continue
preprocess(node)
preprocess(document)
reference = document.getElementsByTagName("reference")[0]
handleReference(reference)
|
15,309 | b777ca0432f7c338d8d55816d3fcabaa8425ae96 |
#calss header
class _FROSTBITE():
def __init__(self,):
self.name = "FROSTBITE"
self.definitions = [u'injury to someone caused by severe cold, usually to their toes, fingers, ears, or nose']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
|
15,310 | ed8744e3092724bc07e74ac22ada410a6ba1ce93 | # -*- coding: utf-8 -*-
class DogLife:
_DOG = ['''
____ ____ ____ ____
( ) | | | | | | | |
( . . ) |____| | | | | |____|
( u ) | | | | | |
(_________) | |____| |____| |
''',
'''
_ _
\ / | | | \ / |
oooooo \___/ | | | \/ |
oooooooooo | | | | |
|__________| | |____| | |
''',
'''
. . . ______ ____
. . | | | \ /
. * . | | | \___/
. ** ** . | | | |
. * * . |___| |____| |
. . .
''',
'''
____ ____ ____
*~~~~~* | | | | | | | | \ /
( ) . . ( ) |____| |____| |____| |____| \___/
( ) v ( ) | | | | | | |
( ) u ( ) | | | | | | |
''',
'''
____ ____ ___
*~~~~~* | | | | \
( ) . . ( ) |____ |____| | \
( ) v ( ) | | | | /
( ) n ( ) ____| | | |___/
''',
'''
___ ____ ____ ___
*~~~~~* | \ | | | | \
( ) x x ( ) | \ |___ |____| | \
( ) v ( ) | / | | | | /
( ) . ( ) |___/ |____ | | |___/
''',
'''
z z
z ____ ____ ____ ____
*~~~~~* | | | | | |
( ) . . ( ) |___ | |___ |___ |____|
( ) v ( ) | | | | |
( ) - ( ) ____| |____ |____ |____ |
''',
'''
. . .
_|_|_|_ ____ ___
__|~ ~ ~ ~|__ | | | \ | \
| | |____| | __/ | \
__ |~ ~ ~ ~ ~ ~ ~|___ | | | \ | /
| | | | |____/ |___/
|_____________________|
''',
'''
_____________________________
|__\__\__\__\__\__\__\__\__\__| 1O
''',
'''
__________________________
|__\__\__\__\__\__\__\__\__| 9
''',
'''
_______________________
|__\__\__\__\__\__\__\__| 8
''',
'''
____________________
|__\__\__\__\__\__\__| 7
''',
'''
_________________
|__\__\__\__\__\__| 6
''',
'''
______________
|__\__\__\__\__| 5
''',
'''
___________
|__\__\__\__| 4
''',
'''
________
|__\__\__| 3
''',
'''
_____
|__\__| 2
''',
'''
__
|__| 1
''']
def __init__(self):
self._popis = 10
self._comida = 10
self._sueño = 10
self._diversion = 10
self._vivo = True
self._feliz= True
self._edad = 0
def vida(self):
if self._comida == 10:
print ('COMER {}'.format(self._DOG[8]))
elif self._comida == 9:
print ('COMER {}'.format(self._DOG[9]))
elif self._comida == 8:
print ('COMER {}'.format(self._DOG[10]))
elif self._comida == 7:
print ('COMER {}'.format(self._DOG[11]))
elif self._comida == 6:
print ('COMER {}'.format(self._DOG[12]))
elif self._comida == 5:
print ('COMER {}'.format(self._DOG[13]))
elif self._comida == 4:
print ('COMER {}'.format(self._DOG[14]))
elif self._comida == 3:
print ('COMER {}'.format(self._DOG[15]))
elif self._comida == 2:
print ('COMER {}'.format(self._DOG[16]))
elif self._comida == 1:
print ('COMER {}'.format(self._DOG[17]))
if self._sueño == 10:
print ('DORMIR {}'.format(self._DOG[8]))
elif self._sueño == 9:
print ('DORMIR {}'.format(self._DOG[9]))
elif self._sueño == 8:
print ('DORMIR {}'.format(self._DOG[10]))
elif self._sueño == 7:
print ('DORMIR {}'.format(self._DOG[11]))
elif self._sueño == 6:
print ('DORMIR {}'.format(self._DOG[12]))
elif self._sueño == 5:
print ('DORMIR {}'.format(self._DOG[13]))
elif self._sueño == 4:
print ('DORMIR {}'.format(self._DOG[14]))
elif self._sueño == 3:
print ('DORMIR {}'.format(self._DOG[15]))
elif self._sueño == 2:
print ('DORMIR {}'.format(self._DOG[16]))
elif self._sueño == 1:
print ('DORMIR {}'.format(self._DOG[17]))
if self._popis == 10:
print ('BAÑO {}'.format(self._DOG[8]))
elif self._popis == 9:
print ('BAÑO {}'.format(self._DOG[9]))
elif self._popis == 8:
print ('BAÑO {}'.format(self._DOG[10]))
elif self._popis == 7:
print ('BAÑO {}'.format(self._DOG[11]))
elif self._popis == 6:
print ('BAÑO {}'.format(self._DOG[12]))
elif self._popis == 5:
print ('BAÑO {}'.format(self._DOG[13]))
elif self._popis == 4:
print ('BAÑO {}'.format(self._DOG[14]))
elif self._popis == 3:
print ('BAÑO {}'.format(self._DOG[15]))
elif self._popis == 2:
print ('BAÑO {}'.format(self._DOG[16]))
elif self._popis == 1:
print ('BAÑO {}'.format(self._DOG[17]))
if self._diversion == 10:
print ('JUGAR {}'.format(self._DOG[8]))
elif self._diversion == 9:
print ('JUGAR {}'.format(self._DOG[9]))
elif self._diversion == 8:
print ('JUGAR {}'.format(self._DOG[10]))
elif self._diversion == 7:
print ('JUGAR {}'.format(self._DOG[11]))
elif self._diversion == 6:
print ('JUGAR {}'.format(self._DOG[12]))
elif self._diversion == 5:
print ('JUGAR {}'.format(self._DOG[13]))
elif self._diversion == 4:
print ('JUGAR {}'.format(self._DOG[14]))
elif self._diversion == 3:
print ('JUGAR {}'.format(self._DOG[15]))
elif self._diversion == 2:
print ('JUGAR {}'.format(self._DOG[16]))
elif self._diversion == 1:
print ('JUGAR {}'.format(self._DOG[17]))
def comer(self):
print(self._DOG[1])
if self._comida <= 8:
self._comida += 3
elif self._comida == 8:
self._comida += 2
elif self._comida == 9:
self._comida +=1
else:
self._comida = 10
if self._popis ==2:
self._popis -=2
elif self._popis == 1:
self._popis -=1
elif self._popis <= 10 and self._popis > 2:
self._popis -= 2
if self._diversion == 1:
self._diversion -= 1
elif self._diversion <= 10 and self._diversion >= 2:
self._diversion -= 1
if self._sueño == 1:
self._sueño -= 1
elif self._sueño <= 10 and self._sueño >= 2:
self._sueño -= 1
self._mood()
def dormir(self):
print(self._DOG[6])
self._sueño = 10
if self._comida ==2:
self._comida -=2
elif self._comida == 1:
self._comida -=1
elif self._comida <= 10 and self._comida > 2:
self._comida -= 2
if self._popis ==2:
self._popis -=2
elif self._popis == 1:
self._popis -=1
elif self._popis <= 10 and self._popis > 2:
self._popis -= 2
if self._diversion == 1:
self._diversion -= 1
elif self._diversion <= 10 and self._diversion >= 2:
self._diversion -= 1
self._mood()
def defecar(self):
print(self._DOG[0])
if self._popis <= 8:
self._popis += 3
elif self._popis == 8:
self._popis += 2
elif self._popis == 9:
self._popis +=1
else:
self._popis = 10
if self._comida ==2:
self._comida -=2
elif self._comida == 1:
self._comida -=1
elif self._comida <= 10 and self._comida > 2:
self._comida -= 2
if self._diversion == 1:
self._diversion -= 1
elif self._diversion <= 10 and self._diversion >= 2:
self._diversion -= 1
if self._sueño == 1:
self._sueño -= 1
elif self._sueño <= 10 and self._sueño >= 2:
self._sueño -= 1
self._mood()
def jugar(self):
print(self._DOG[2])
if self._diversion >= 5:
self._diversion = 10
else:
self ._diversion += 5
if self._comida ==3:
self._comida -=3
elif self._comida ==2:
self._comida -=2
elif self._comida == 1:
self._comida -=1
elif self._comida <= 10 and self._comida > 3:
self._comida -= 3
if self._sueño == 1:
self._sueño -= 1
elif self._sueño <= 10 and self._sueño >= 2:
self._sueño -= 1
if self._popis == 1:
self._popis -= 1
elif self._popis <= 10 and self._popis >= 2:
self._popis -= 1
self._mood()
def _mood (self):
if (self._popis == 0) or (self._sueño == 0) or (self._diversion == 0) or (self._comida == 0) :
self._vivo = False
self._dead()
if (self._popis <= 5 and self._popis > 0) or (self._sueño <= 5 and self._sueño > 0) or (self._diversion <= 5 and self._diversion > 0) or (self._comida <= 5 and self._comida > 0):
self._feliz = False
self._happiness()
if self._feliz == True:
if (self._popis >=8) or (self._comida >=8) or (self._sueño >=8) or (self._diversion >=8):
self._feliz = True
self._happiness()
def _dead(self):
if self._vivo == False:
print(self._DOG[5])
return False
def _happiness(self):
if self._feliz == True:
print(self._DOG[3])
else:
print(self._DOG[4])
def _crece(self):
self._edad += 1
if self._edad == 10:
self._dead()
print('\nTu amigo ha llegado a su edad máxima, pero no estes triste, vivio una vida feliz junto a ti. ')
print('\n G A M E O V E R')
else:
self._vivo = True
print (self._DOG[7])
def run():
print('B I E N V E N I D O A T A M A G O G O D . S .\n')
raza = input ('Razas disponibles: \na) Chihuahua \nb) Husky \nc) Pug \nd) Labrador \n \n¿Cuál escogeras?: ')
name = input('\n¿Cómo llamaras a tu cachorro?: ')
print ('\n-----------------------------------------------------------------------------------------------------------------------')
perro = DogLife()
print ('\n¡Felicidades! Ahora tienes un nuevo amigo \n{} tendrá necesidades como dormir, jugar y alimentarse; así que asegurate de darle atención y cariño\n'.format(name))
acciones = 0
while True:
if perro._dead()== False:
print ('Tu amigo ha muerto')
print('\n G A M E O V E R')
break
print ('Calidad de vida de {} \n'.format(name))
perro.vida()
print ('\n___________________________________________________________________________________________________________________')
accion = input ('\nAcciones: \n[C]omer \n[J]ugar \n[D]ormir \n[B]año \n[S]alir' )
print ('\n___________________________________________________________________________________________________________________')
accion.lower()
if accion == 'c':
acciones +=1
perro.comer()
elif accion == 'j':
acciones +=1
perro.jugar()
elif accion == 'd':
acciones +=1
perro.dormir()
elif accion == 'b':
acciones +=1
perro.defecar()
elif accion == 's':
break
else:
print ('\nComando incorrecto, intentalo de nuevo \n')
continue
if acciones == 5:
perro._crece()
print ('¡Felicidades, {} ha crecido un año más!'.format(name))
acciones = 0
if __name__ == '__main__':
run()
|
15,311 | d5e0aa78b195b1bed7649030359ae08eab73831d | '''
File: cityHandler.py
Project: controllers
File Created: 2019-01-28 10:59:03 am
Author: wangwei (wangw11.thu@gmail.com)
-----
Last Modified: 2019-01-28 5:30:54 pm
Modified By: wangwei (wangw11.thu@gmail.com>)
'''
import os
import sys
import time
import json
import requests
import traceback
import tornado.web
import tornado.httpserver
from utils.base import *
from config import config_instance
from controllers.BaseHandler import BaseHandler
class initEye(BaseHandler):
def post(self):
ret = {'result': 'OK'}
try:
file_metas = self.request.files.get('open_eye', None) # 提取表单中‘name’为‘file’的文件元数据
for meta in file_metas:
filename = meta['filename']
file_path = os.path.join('image', filename)
with open(file_path, 'wb') as up:
up.write(meta['body'])
ret['open_eye_rate'] = check_blink_face(file_path)
# ret['open_eye_rate'] = check_blink(file_path)
config_instance.open_eye_rate = ret['open_eye_rate']
file_metas = self.request.files.get('close_eye', None) # 提取表单中‘name’为‘file’的文件元数据
for meta in file_metas:
filename = meta['filename']
file_path = os.path.join('image', filename)
with open(file_path, 'wb') as up:
up.write(meta['body'])
ret['close_eye_rate'] = check_blink_face(file_path)
# ret['close_eye_rate'] = check_blink(file_path)
config_instance.close_eye_rate = ret['close_eye_rate']
if config_instance.close_eye_rate * 1.3 > config_instance.open_eye_rate:
ret['message'] = 'error'
self.finish(json.dumps(ret))
return
config_instance.eye_th = config_instance.close_eye_rate + (config_instance.open_eye_rate - config_instance.close_eye_rate) / 3
print(config_instance.eye_th)
self.finish(json.dumps(ret))
return
except Exception as e:
ret['error_msg'] = str(e)
traceback.print_exc()
self.finish(json.dumps(ret)) |
15,312 | c47ccb0713c75ab5192d78389761df71c681331b | # Day 3
# -----
# Project: Treasure Island
print('''
*******************************************************************************
| | | |
_________|________________.=""_;=.______________|_____________________|_______
| | ,-"_,="" `"=.| |
|___________________|__"=._o`"-._ `"=.______________|___________________
| `"=._o`"=._ _`"=._ |
_________|_____________________:=._o "=._."_.-="'"=.__________________|_______
| | __.--" , ; `"=._o." ,-"""-._ ". |
|___________________|_._" ,. .` ` `` , `"-._"-._ ". '__|___________________
| |o`"=._` , "` `; .". , "-._"-._; ; |
_________|___________| ;`-.o`"=._; ." ` '`."\` . "-._ /_______________|_______
| | |o; `"-.o`"=._`` '` " ,__.--o; |
|___________________|_| ; (#) `-.o `"=.`_.--"_o.-; ;___|___________________
____/______/______/___|o;._ " `".o|o_.--" ;o;____/______/______/____
/______/______/______/_"=._o--._ ; | ; ; ;/______/______/______/_
____/______/______/______/__"=._o--._ ;o|o; _._;o;____/______/______/____
/______/______/______/______/____"=._o._; | ;_.--"o.--"_/______/______/______/_
____/______/______/______/______/_____"=.o|o_.--""___/______/______/______/____
/______/______/______/______/______/______/______/______/______/______/_____ /
*******************************************************************************
''')
print("Welcome to Treasure Island.")
print("Your mission is to find the treasure.")
choice = input(
'You\'re at a cross road. Where do you want to go? Type "left" or "right".\n').lower()
if not choice == 'left':
print('Fall into a hole 🕳 ... Game Over.')
else:
choice = input(
'You see a lake. What do you want to do? Type "swim" or "wait"\n').lower()
if not choice == 'wait':
print('An alligator eats you 🐊 ... Game Over.')
else:
choice = input(
'You\'re in front of three dors: one is red, one is blue and the other is yellow. What color do you want to open?\n').lower()
if choice == 'red':
print('You are trapped in a room and burned by fire 🔥 ... Game Over.')
elif choice == 'blue':
print('You got bullied by ghosts 👻 ... Game Over.')
elif choice == 'yellow':
print('Congratulations, you found the treasure!! You win 🏴☠️🏅✌')
else:
print('Game Over. 😭')
|
15,313 | 143ed1565112ead07365551203ee7278f92fb65a | import boto3
s3 = boto3.resource("s3")
conn = boto3.client("s3")
client = boto3.client("batch")
bucketName = "s3logs-manifest-qa-niaid-planx-pla-net"
jobIndex = 0
for obj in conn.list_objects(Bucket=bucketName)["Contents"]:
key = str(obj["Key"])
print "submitting job for " + key
job = "test-" + str(jobIndex)
jobIndex = jobIndex + 1
response = client.submit_job(
jobName=job,
parameters={"bucket": bucketName, "object": obj["Key"]},
jobQueue="test",
jobDefinition="test",
)
if response["ResponseMetadata"]["HTTPStatusCode"] != 200:
print "there was a problem with submitting the job for " + key
|
15,314 | f6df81d9071460d219ad41bfbdbff0ed5b0f8d90 | from api.view.pipeline import *
def setup(app):
pre = '/api/pipelines'
app.router.add_get(pre, get_pipes)
app.router.add_put(pre, create_pipe)
app.router.add_get(pre + '/{id}', getPipelineInfo)
app.router.add_delete(pre + '/{id}', deletePipe)
app.router.add_post(pre + '/{id}/start', startStream)
app.router.add_post(pre + '/{id}/pause', pauseStream)
app.router.add_post(pre + '/{id}/stop', stopStream)
app.router.add_post(pre + '/{id}/reset', resetStream)
app.router.add_put(pre + '/{pipeId}/elements', addElement)
app.router.add_get(pre + '/{pipeId}/elements/{elementId}', getElementInfo)
app.router.add_post(pre + '/{pipeId}/elements/{elementId}', setElementInfo)
app.router.add_delete(pre + '/{pipeId}/elements/{elementId}', deleteElement)
app.router.add_get(pre + '/{pipeId}/links', getLinks)
app.router.add_put(pre + '/{pipeId}/links', newLink)
app.router.add_delete(pre + '/{pipeId}/links', deleteLink)
|
15,315 | 44ae05d2d780854484af394757cef32cef6f5cc3 | # Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
import unittest
from reggen import gen_rtl
class TestFieldCheck(unittest.TestCase):
def test_check_bool(self):
arg = {'field1': "true", 'field2': "false", 'field3': "True"}
result = gen_rtl.check_field_bool(arg, 'field1', False)
self.assertTrue(result)
result = gen_rtl.check_field_bool(arg, 'field2', True)
self.assertFalse(result)
result = gen_rtl.check_field_bool(arg, 'field3', False)
self.assertFalse(result)
result = gen_rtl.check_field_bool(arg, 'field4', False)
self.assertFalse(result)
|
15,316 | 021b8979890173760539abc6f07266bdbfa96121 |
###
### Goals
###
# Select one main catalog. Checks all other selected catalogs for similar stars
# and creats a joint dataframe with all the gathered parameters
###
### Imports
###
import pickle
import galpy
import numpy as np
import csv
import matplotlib.pyplot as plt
from astroquery.vizier import Vizier
from astroquery.simbad import Simbad
from multiprocessing import Queue, Process
import pandas as pd
import time as time
###
### Inputs
###
# Examples extracted from VizieR, check Instructions
main_catalog = 'result_J-A+A-545-A32-table45.csv'
catalogs = ['result_J-A+A-606-A94-table1.csv',
'result_J-A+A-634-A136-table3.csv',
'result_J-ApJ-724-154-table1.csv']
# The label corresponding to the star's list in each catalog
keyword_main_catalog = 'SimbadName'
keywords_catalogs = ['SimbadName', 'SimbadName', 'Name']
# Find it through:
# dataframe_cat = pd.read_csv('Catalogs/' + catalogs[i])
# list(dataframe_cat)
###
### General Functions
###
def Simbad_names(star_names, time_pause = 60, n_step = 402):
""" Converts list of star names to the Simbad ID of each star. Useful for
comparision with same stars written in different ways.
Args
----------
star_names: List of stars
time_pause: Time to pause every number of n_step to avoid query problems from Simbad
n_step: Number of star evalueted before stopping to avoid crashing Simbad's query
Returns
----------
Simbad ID's for a list of stars
"""
all_names_my_stars = []
count = 1
for star in star_names:
result_table = Simbad.query_objectids(star)
if result_table != None:
names_star_Simbad = result_table['ID']
all_names_my_stars.append(names_star_Simbad)
else:
pass
if count%n_step == 0:
time.sleep(time_pause)
#print_progress(count, len(star_names)-1, prefix='Progress:', suffix='Complete', decimals=1, bar_length=100)
count += 1
return(all_names_my_stars)
def check_dataframe_Simbad_list(dataframe_cat, stars_dataframe_cat, compare_Simbad,
time_pause = 60, n_step = 300):
""" Checks correspondence between in stars between two dataframes
Args
----------
dataframe_cat: List of stars
stars_dataframe_cat: Time to pause every number of n_step to avoid query
problems from Simbad
compare_Simbad: List of Simbad ID stars (from previous catalog) that I want
to match
time_pause: Time to pause every number of n_step to avoid query problems from Simbad
n_step: Number of star evalueted before stopping to avoid crashing Simbad's query
Returns
----------
Simbad ID's for a list of stars
"""
index_new_dataframe = [None]*len(dataframe_cat)
count_orig = 0
for index_star in range(len(dataframe_cat)):
result_table = Simbad.query_objectids(stars_dataframe_cat[index_star])
#print(count_orig)
if result_table != None:
names_star_Simbad = result_table['ID']
first_name = names_star_Simbad[0]
count_original_dataframe = 0
for simbad_name in compare_Simbad:
if first_name == simbad_name[0]:
index_new_dataframe[count_orig] = (count_original_dataframe)
count_original_dataframe += 1
else:
continue
if count_orig%n_step == 0:
time.sleep(time_pause)
count_orig += 1
return index_new_dataframe
|
15,317 | 96c8396e2ac1f8a5e219a782e1720afd0d8013ae | import socket
import os, os.path
print "Connecting..."
if os.path.exists( "/var/lib/libvirt/qemu/channel/target/Virtual.org.qemu.guest_agent.0" ):
client = socket.socket( socket.AF_UNIX, socket.SOCK_STREAM )
client.connect( "/var/lib/libvirt/qemu/channel/target/Virtual.org.qemu.guest_agent.0" )
print "Ready."
print "Ctrl-C to quit."
print "Sending 'DONE' shuts down the server and quits."
while True:
try:
x = raw_input( "> " )
if "" != x:
print "SEND:", x
client.sendall( x )
if "DONE" == x:
print "Shutting down."
break
except KeyboardInterrupt, k:
print "Shutting down."
client.close()
else:
print "Couldn't Connect!"
print "Done"
|
15,318 | 0e792f55a27073a28d892181bb237633b23af542 | import Graffity
import matplotlib.pyplot as pyplot
import numpy
import scipy
import glob
import PIL
import images2gif
import io
fig = pyplot.figure(0)
fig.clear()
ax = fig.add_axes([0.1, 0.1, 0.8, 0.8])
# Change this directory to the location of the .TIF files on your system
directory= '/home/fprakt/Data/Derotator/derot_10112015/'
# Change this to match the structure of the files
files = glob.glob(directory+'pupil*001.TIF')
# These are the (x,y) coordinates and radius of the pupil cutout
xinit = 156
yinit = 87
width = 60
# Zoom Factor - how finely you want to sample the grid
ZF = 3.0
# Want to check how the Pupil Image looks like? Set to True
checkPupil = True
image = Graffity.FLIRCamImage(files[0])
pupilImage = image.zoomIn(image.extractCutout(xinit, yinit, width, chopTop=True), ZF)
if checkPupil:
ax.matshow(pupilImage)
fig.show()
input()
ax.clear()
# Measures the center of mass of the Pupil Image
center = scipy.ndimage.measurements.center_of_mass(pupilImage)
center_x = center[0]/ZF - width
center_y = center[1]/ZF - width
# GUESSES for the rotation axis (in pixels)
XGUESS = 167
YGUESS = 87
x = []
y = []
sigma = []
angle = []
cutouts = []
# Start the loop
for df in files:
image = Graffity.FLIRCamImage(df) # Read in the next file
# parse the angle from the filename
angle.append(float(df.split('/')[-1].split('_')[1].split('deg')[0]))
# Finds the center of the image by cross-correlation routine
centroid = image.findPupilCenter(XGUESS, YGUESS, zoomFactor = ZF, pupilImage=pupilImage)
x.append(centroid[0]+center_x)
y.append(centroid[1]+center_y)
print("%.3f, %.3f" % ( x[-1], y[-1]))
cutouts.append(image.imdata)
ax.clear()
x = numpy.array(x)
y = numpy.array(y)
angle = numpy.array(angle)
order = numpy.argsort(angle)
ax.plot(x[order], y[order])
frames = []
buf = []
#outfile = open('Pupil_Runout_10nov.txt', 'w')
outfile = open('../derotator2/pupil_0.txt', 'w')
outfile.write("%5s %16s %16s\n" % ( 'angle', 'x' , 'y'))
outfile.write("%5s %16s %16s\n" % ( '-----', '-------------', '---------'))
for i in order:
ax.clear()
ax.matshow(cutouts[i])
ax.plot(x[order], y[order], color = 'y', lw=4.0)
ax.scatter(x[i], y[i], color = 'k', s=85.0)
ax.set_xbound(lower=95, upper=230)
ax.set_ybound(lower=20, upper=160)
ax.text(100.0, 25.0, 'Angle = %d' % angle[i], fontsize=16, color = 'y')
ax.text(100.0, 30.0, 'X = %.2f' % x[i], fontsize=16, color = 'y')
ax.text(100.0, 35.0, 'Y = %.2f' % y[i], fontsize=16, color = 'y')
buf.append(io.BytesIO())
fig.savefig(buf[-1], format='png')
buf[-1].seek(0)
print("%d %.3f %.3f" % ( angle[i], x[i], y[i]))
outfile.write("%5s %16s %16s\n" % ( '%.1f' % angle[i], '%.5f' % x[i], '%.5f' %y [i]))
#outfile.write("%d %.2f %.2f\n" % (angle[i], x[i], y[i]))
frames.append(PIL.Image.open(buf[-1]))
outfile.close()
#images2gif.writeGif('Pupil_Runout.gif', frames, duration=0.5)
|
15,319 | 4af6806934e51b778b375084c722d42670d57463 | from django import forms
from django.forms import ModelForm
from posts.models import Post, Comment
class AddPostForm(ModelForm):
class Meta:
model = Post
fields = ('body',)
class EditPostForm(ModelForm):
class Meta:
model = Post
fields = ('body',)
class AddCommentForm(ModelForm):
class Meta:
model = Comment
fields = ('body',)
widgets = {
'body': forms.Textarea(attrs={'class': 'form-control'})
}
error_messages = {
'body': {
'required': 'این فیلد اجباری است',
}
}
help_texts = {
'body': 'max 400 character'
}
class AddReplyForm(ModelForm):
class Meta:
model = Comment
fields = ('body',)
widgets = {
'body': forms.Textarea(attrs={'class': 'form-control', 'style': 'height: 100px'})
}
|
15,320 | 553cbbe61c0cc904f903f85a315517fa031011dc | #!/usr/bin/env python
from socket import *
import sys
port = 8089
host = "192.168.1.35" # ip target ,eg ip 's raspberry pi
s = socket(AF_INET, SOCK_DGRAM)
buf = 32768
addr = (host,port)
f1 = open("Testfile.txt",'rb') # Open in binary
f2 = open("picture.npg",'rb') # Open in binary
f3 = open("Testpdf.pdf",'rb') # Open in binary
f4 = open("voice.m4a",'rb') # Open in binary
f5 = open("video.mp4",'rb') # Open in binary
data = f4.read(buf)
while (data):
if(s.sendto(data,addr)):
print("sending ...")
break
s.close()
f4.close()
|
15,321 | 06599dba17a1eb4adcdf4ff8f5ee90b195aa218b | import numpy as np
import numpy.ma as ma
import matplotlib.pyplot as plt
from netCDF4 import Dataset
from mpl_toolkits.basemap import Basemap
#m = Basemap(projection='cyl',lon_0=180,lat_0=0,urcrnrlat=60)
#parallels = np.arange(-30,30,30)
#meridians = np.arange(0,360,60122
#m.drawcoastlines()
#m.drawparallels(parallels,labels=[1,1,0,0],fontsize=10)
#m.drawmeridians(meridians,labels=[0,0,0,1],fontsize=10)
years = np.arange(1999,2015)
path='/data/dadm1/obs/TRMM/TRMM3B42size'
nc = Dataset(path+'/TRMMsize_3hrs_1998.nc')
path2 = '/data/dadm1/obs/TRMM/TRMM3B42'
nc2 = Dataset(path2+'/3B42.1998.3hr.nc')
print (nc2.variables['latitude'][80:320])
times = np.arange(0,2920-2820)
cldsize = nc.variables['objsize'][times,80:320,:]
hqp = nc2.variables['pcp'][times,80:320,:]
#MASK = np.ma.array(hqp, mask=hqp < 20)
prec_size_num_array = np.zeros((20,20))
prec_per_size = np.arange(0,12)
prec_per_size_log = np.arange(0,12)
num_per_size = np.arange(0,12)
#plt.contourf(cldsize)
#plt.colorbar(orientation='horizontal')
#print(np.sum(hqp[hqp < 10))
#tt = np.arange(0,20)
#zz = 15#np.arange(0,27)
#yy = np.arange(80,161)
#lon=nc.variables['longitude'][:]
#lat=nc.variables['latitude'][yy]
#print(tt)
#print(level)
#print(lat)
#W_mean_sum = np.mean(nc.variables['w'][tt,zz,yy,:],axis=0)
for year in years:
nc = Dataset(path+'/TRMMsize_3hrs_' + str(year) +'.nc')
nc2 = Dataset(path2+'/3B42.'+str(year)+'.3hr.nc')
cldsize = nc.variables['objsize'][times,:,:]
hqp = nc2.variables['pcp'][times,:,:]
print(cldsize.size)
hqp = hqp[cldsize > 0]
cldsize = cldsize[cldsize > 0]
print(cldsize.size)
# work for number-precipitation array
print(np.floor_divide(cldsize,1000))
for aa in range(0,cldsize[:].size-1):
#print(sizem)
sizem = np.floor_divide(np.log2(cldsize[aa]),1)
precm = np.floor_divide(hqp[aa]*3,1)
#print(precm,'prec')
if sizem >19:
sizem = 19
if precm > 19:
precm = 19
if sizem >0:
#print(int(np.floor_divide(sizem,1000)),int(3.5))
prec_size_num_array[int(sizem),int(precm)] \
=prec_size_num_array[int(sizem),int(precm)] + 1
#
for jj in range(0,20):
print(np.sum(prec_size_num_array[jj,:]))
prec_size_num_array[jj,:] = prec_size_num_array[jj,:]/np.sum(prec_size_num_array[jj,:])
#prec_per_size[jj] = prec_per_size[jj] + np.sum(hqp[cldsize > 20000*jj])
#prec_per_size_log[jj] = prec_per_size_log[jj] + np.sum(hqp[cldsize > 400*np.power(2,jj)])
#print(hqp[cldsize > 400*np.power(2,jj)].size,np.power(2,jj))
#print(hqp[cldsize > 400*np.power(2,jj)])
#num_per_size[jj] = num_per_size[jj] + hqp[cldsize > 400*np.power(2,jj)].size + 0.0
p_s_itgl_array = 0*prec_size_num_array
for jj in range(0,20):
for kk in range(0,20):
p_s_itgl_array[jj,kk] = np.sum(prec_size_num_array[jj,0:kk+1])
#print(prec_per_size)
#plt.figure(0)
#plt.plot(np.arange(0,12)*20000,prec_per_size/prec_per_size[0],'.')
#plt.figure(1)
#plt.plot(400*np.power(2,np.arange(0,12)),prec_per_size_log/prec_per_size_log[0],'.')
#plt.xscale('log')
#plt.figure(2)
#plt.plot(400*np.power(2,np.arange(0,12)),num_per_size/num_per_size[0],'.')
#plt.xscale('log')
plt.figure(3)
plt.pcolor(prec_size_num_array)
plt.colorbar()
plt.figure(4)
plt.pcolor(p_s_itgl_array)
plt.colorbar()
plt.show()
#W_mean = W_mean_sum /(years.size+1)
#lat2d,lon2d = np.meshgrid(lat,lon,indexing='ij')
#level3d = np.ones((level.size,lat.size,lon.size))*level.reshape(level.size,1,1)
#cwv = -np.trapz(2260000*q_mean+1005*T_mean+Z_mean,x=level3d*100,axis=0)/9.8/1005
#print(lat2d.size,W_mean.size)
#plt.contourf(lon2d.T,lat2d.T,W_mean.T)
#plt.colorbar(orientation='horizontal')
#plt.show()
|
15,322 | c6418df068d3fd2cab5b14002aa33a8a68ad73f9 | # -*- coding: ISO-8859-1 -*-
'''
Task Coach - Your friendly task manager
Copyright (C) 2004-2009 Frank Niessink <frank@niessink.com>
Task Coach is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Task Coach is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from taskcoachlib import meta
header = '''
<!DOCTYPE HTML PUBLIC "-//W3C//DTD html 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<style type="text/css" media="screen">@import "default.css";</style>
<link rel="shortcut icon" href="favicon.ico" type="image/x-icon" />
<title>%(name)s</title>
</head>
<body>
<script type="text/javascript">
var gaJsHost = (("https:" == document.location.protocol) ? "https://ssl." : "http://www.");
document.write(unescape("%%3Cscript src='" + gaJsHost + "google-analytics.com/ga.js' type='text/javascript'%%3E%%3C/script%%3E"));
</script>
<script type="text/javascript">
try {
var pageTracker = _gat._getTracker("UA-8814256-1");
pageTracker._trackPageview();
} catch(err) {}</script>
<div class="content">
<table cellspacing=5>
<tr>
<td valign=top>
<a href="index.html">
<img align="center" src="taskcoach.png"
style="border-style: none"/>
</a>
</td>
<td>
<h1>%(name)s - %(description)s</h1>
</td>
</tr>
</table>
</div>
<div class="content">
'''%meta.metaDict
footer = '''
</div><!-- end of content div -->
<div id="navAlpha">
<div class="navbox">
<h2>About %(name)s</h2>
<p>%(name)s %(version)s was released on %(date)s.</p>
<ul>
<li><b><a href="download.html" title="Download %(name)s">Download</a></b></li>
<li><a href="index.html" title="%(name)s overview">Overview</a></li>
<li><a href="screenshots.html"
title="View some screenshots of %(name)s here">Screenshots</a></li>
<li><a href="features.html"
title="List of features in the current version of %(name)s">Features</a></li>
<li><a href="i18n.html"
title="Available translations">Translations</a></li>
<li><a href="changes.html"
title="An overview of bugs fixed and features added per version of %(name)s">Change history</a></li>
<li><a href="roadmap.html"
title="Future plans for %(name)s">Roadmap</a></li>
<li><a href="license.html"
title="Your rights and obligations when using %(name)s">License</a></li>
</ul>
</div>
<div class="navbox">
<h2>Get support</h2>
<ul>
<li><a href="mailinglist.html">Join mailinglist</a></li>
<li><a href="faq.html">Frequently asked questions</a></li>
<li><a href="https://sourceforge.net/tracker/?group_id=130831&atid=719135"
onClick="javascript: pageTracker._trackPageview('/outgoing/sourceforge.net/tracker/request_support');">Request support</a>
</li>
<li><a href="https://sourceforge.net/tracker/?group_id=130831&atid=719134"
onClick="javascript: pageTracker._trackPageview('/outgoing/sourceforge.net/tracker/browse_bugs');">Browse known bugs</a>
</li>
<li><a href="https://sourceforge.net/tracker/?func=add&group_id=130831&atid=719134"
onClick="javascript: pageTracker._trackPageview('/outgoing/sourceforge.net/tracker/submit_bug');">Submit a bug report</a>
</li>
<li><a href="https://sourceforge.net/tracker/?group_id=130831&atid=719137"
onClick="javascript: pageTracker._trackPageview('/outgoing/sourceforge.net/tracker/request_feature');">Request a feature</a>
</li>
</ul>
</div>
<div class="navbox">
<h2>Give support</h2>
<ul>
<li><a href="i18n.html">Help translate</a></li>
<li><a href="devinfo.html">Help develop</a></li>
<li><a href="donations.html">Donate</a></li>
<li><a href="http://www.cafepress.com/taskcoach/"
onClick="javascript: pageTracker._trackPageview('/outgoing/cafepress.com/taskcoach');">Buy the mug</a>
</li>
</p>
</div>
<div class="navbox">
<h2>Credits</h2>
<p>
<a href="http://www.python.org"
onClick="javascript: pageTracker._trackPageview('/outgoing/python.org');"><img src="python-powered-w-70x28.png" alt="Python"
width="70" height="28" border="0"></a><br>
<a href="http://www.wxpython.org"
onClick="javascript: pageTracker._trackPageview('/outgoing/wxpython.org');"><img
src="powered-by-wxpython-80x15.png"
alt="wxPython" width="80" height="15" border="0"></a><br>
<a href="http://www.icon-king.com"
onClick="javascript: pageTracker._trackPageview('/outgoing/icon-king.com');">Nuvola icon set</a><br>
<a href="http://www.jrsoftware.org"
onClick="javascript: pageTracker._trackPageview('/outgoing/jrsoftware.org');">Inno Setup</a><br>
<a href="http://www.bluerobot.com"
onClick="javascript: pageTracker._trackPageview('/outgoing/bluerobot.com');">Bluerobot.com</a><br>
<a href="http://sourceforge.net/projects/taskcoach"
onClick="javascript: pageTracker._trackPageview('/outgoing/sourceforge.net/projects/taskcoach');">
<img src="http://sflogo.sourceforge.net/sflogo.php?group_id=130831&type=8"
width="80" height="15" border="0" alt="Task Coach at SourceForge.net"/>
</a><br>
<SCRIPT type='text/javascript' language='JavaScript'
src='http://www.ohloh.net/projects/5109;badge_js'></SCRIPT>
</p>
</div>
</div>
<div id="navBeta">
<div class="navbox">
<p>
<script type="text/javascript"><!--
google_ad_client = "pub-2371570118755412";
/* 120x240, gemaakt 10-5-09 */
google_ad_slot = "6528039249";
google_ad_width = 120;
google_ad_height = 240;
//-->
</script>
<script type="text/javascript"
src="http://pagead2.googlesyndication.com/pagead/show_ads.js">
</script>
</p>
</div>
<div class="navbox">
<h2>Twitter updates</h2>
<div id="twitter_div">
<h2 style="display: none;" >Twitter Updates</h2>
<ul id="twitter_update_list"></ul>
<a href="http://twitter.com/taskcoach" id="twitter-link" style="display:block;text-align:left;">Follow Task Coach on Twitter</a>
</div>
<script type="text/javascript" src="http://twitter.com/javascripts/blogger.js"></script>
<script type="text/javascript" src="http://twitter.com/statuses/user_timeline/taskcoach.json?callback=twitterCallback2&count=3"></script>
</div>
</div>
</body>
</html>
'''%meta.metaDict
footer_fr = unicode('''
</div><!-- end of content div -->
<div id="navAlpha">
<div class="navbox">
<h2>A propos de %(name)s</h2>
<p>%(name)s %(version)s est sorti le %(date)s.</p>
<ul>
<li><b><a href="download.html" title="Télécharger %(name)s">Téléchargement</a></b></li>
<li><a href="index.html" title="Survol de %(name)s">Survol</a></li>
<li><a href="screenshots.html"
title="Voir des captures d'écran de %(name)s ici">Captures d'écran</a></li>
<li><a href="features.html"
title="Liste des fonctionnalités de la version actuelle de %(name)s">Fonctionnalités</a></li>
<li><a href="i18n.html"
title="Traductions disponibles">Traductions</a></li>
<li><a href="changes.html"
title="Un survol des bogues corrigés et des fonctionnalités ajoutées par version de %(name)s">Historique</a></li>
<li><a href="roadmap.html"
title="Plans à venir pour %(name)s">Plans</a></li>
<li><a href="license.html"
title="Vos droits et devoirs liés à l'utilisation de %(name)s">Licence</a></li>
</ul>
</div>
<div class="navbox">
<h2>Obtenir de l'aide</h2>
<ul>
<li><a href="mailinglist.html">S'inscrire à la chaîne de courriels</a></li>
<li><a href="faq.html">Questions fréquemment posées</a></li>
<li><a href="https://sourceforge.net/tracker/?group_id=130831&atid=719135"
onClick="javascript: pageTracker._trackPageview('/outgoing/sourceforge.net/tracker/request_support');">Demande d'aide</a>
</li>
<li><a href="https://sourceforge.net/tracker/?group_id=130831&atid=719134"
onClick="javascript: pageTracker._trackPageview('/outgoing/sourceforge.net/tracker/browse_bugs');">Parcourir les bogues connus</a>
</li>
<li><a href="https://sourceforge.net/tracker/?func=add&group_id=130831&atid=719134"
onClick="javascript: pageTracker._trackPageview('/outgoing/sourceforge.net/tracker/submit_bug');">Soumettre un rapport de bogue</a>
</li>
<li><a href="https://sourceforge.net/tracker/?group_id=130831&atid=719137"
onClick="javascript: pageTracker._trackPageview('/outgoing/sourceforge.net/tracker/request_feature');">Demander une fonctionnalité</a>
</li>
</ul>
</div>
<div class="navbox">
<h2>Nous aider</h2>
<ul>
<li><a href="i18n.html">Aider à traduire</a></li>
<li><a href="devinfo.html">Aider à développer</a></li>
<li><a href="donations.html">Dons</a></li>
<li><a href="http://www.cafepress.com/taskcoach/"
onClick="javascript: pageTracker._trackPageview('/outgoing/cafepress.com/taskcoach');">Acheter le mug</a>
</li>
</p>
</div>
<div class="navbox">
<h2>Références</h2>
<p>
<a href="http://www.python.org"
onClick="javascript: pageTracker._trackPageview('/outgoing/python.org');"><img src="python-powered-w-70x28.png" alt="Python"
width="70" height="28" border="0"></a><br>
<a href="http://www.wxpython.org"
onClick="javascript: pageTracker._trackPageview('/outgoing/wxpython.org');"><img
src="powered-by-wxpython-80x15.png"
alt="wxPython" width="80" height="15" border="0"></a><br>
<a href="http://www.icon-king.com"
onClick="javascript: pageTracker._trackPageview('/outgoing/icon-king.com');">Nuvola icon set</a><br>
<a href="http://www.jrsoftware.org"
onClick="javascript: pageTracker._trackPageview('/outgoing/jrsoftware.org');">Inno Setup</a><br>
<a href="http://www.bluerobot.com"
onClick="javascript: pageTracker._trackPageview('/outgoing/bluerobot.com');">Bluerobot.com</a><br>
<a href="http://sourceforge.net/projects/taskcoach"
onClick="javascript: pageTracker._trackPageview('/outgoing/sourceforge.net/projects/taskcoach');">
<img src="http://sflogo.sourceforge.net/sflogo.php?group_id=130831&type=8"
width="80" height="15" border="0" alt="Task Coach at SourceForge.net"/>
</a><br>
<SCRIPT type='text/javascript' language='JavaScript'
src='http://www.ohloh.net/projects/5109;badge_js'></SCRIPT>
</p>
</div>
</div>
<div id="navBeta">
<div class="navbox">
<p>
<script type="text/javascript"><!--
google_ad_client = "pub-2371570118755412";
/* 120x240, gemaakt 10-5-09 */
google_ad_slot = "6528039249";
google_ad_width = 120;
google_ad_height = 240;
//-->
</script>
<script type="text/javascript"
src="http://pagead2.googlesyndication.com/pagead/show_ads.js">
</script>
</p>
</div>
<div class="navbox">
<h2>Mises à jour Twitter</h2>
<div id="twitter_div">
<h2 style="display: none;" >Mises à jour Twitter</h2>
<ul id="twitter_update_list"></ul>
<a href="http://twitter.com/taskcoach" id="twitter-link" style="display:block;text-align:left;">Suivre Task Coach sur Twitter</a>
</div>
<script type="text/javascript" src="http://twitter.com/javascripts/blogger.js"></script>
<script type="text/javascript" src="http://twitter.com/statuses/user_timeline/taskcoach.json?callback=twitterCallback2&count=3"></script>
</div>
</div>
</body>
</html>
''', 'ISO-8859-1')%meta.metaDict
|
15,323 | 3ca60d94e169b817dff7a92ff30308038b8d320c | import numpy as np
import matplotlib
matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
import cv2
from pathfinder import pathfinder
import random
import time
import math
from multiprocessing import Process, Manager
import os
import argparse
from libtiff import TIFF
from osgeo import osr, ogr
# from tqdm import tqdm
import multiprocessing
DL = 1 # 1 道路
SX = 2 # 2 水系
FW = 3 # 3 房屋(居民用地)
JZYD = 4 # 4 建设用地
NT = 5 # 5 农田
LM = 6 # 6 林木
class1 = [5, 6]
class2 = [2]
class3 = [3, 1]
class4 = [4]
unique_tag = str(round(time.time()))[-5:]
def output_profile():
if not os.path.exists("output/"):
os.makedirs("output/")
output_file = open("output/profile.txt", 'w')
output_file.write("地图大小:100,100\n")
output_file.write("电压等级为:3\n")
output_file.write("起点为:0,0;终点为:100,100\n")
output_file.write("起点与终点的直线距离为:100\n")
output_file.write("规划路线总长为:100,共架设:10个塔基\n")
output_file.write("综合代价为:1000\n")
output_file.write("跨越水系次数:3\n")
output_file.write("跨越交通线次数:3\n")
output_file.close()
def road_extract(layer):
line_list = []
layer_shape = layer.GetExtent()
for i in range(0, layer.GetFeatureCount()):
feat = layer.GetFeature(i)
geom = feat.geometry()
if geom is None:
continue
line = []
if geom.GetGeometryCount() > 0:
for j in range(0, geom.GetGeometryCount()):
g = feat.geometry().GetGeometryRef(j)
for p in range(0, g.GetPointCount()):
pt = g.GetPoint(p)
# new_x,new_y=coordinate_transfer(pt[0],pt[1],layer_shape, sketch_shape)
# line.append((new_x,new_y))
line.append((int(pt[0]), int(layer_shape[3] - int(pt[1]))))
else:
for p in range(0, geom.GetPointCount()):
pt = geom.GetPoint(p)
# new_x,new_y=coordinate_transfer(pt[0],pt[1],layer_shape, sketch_shape)
# line.append((new_x,new_y))
line.append((int(pt[0]), int(layer_shape[3] - int(pt[1]))))
line_list.append(line)
return line_list
def point_generator(gridMap, type):
X, Y = gridMap.shape
while True:
p_x = random.randint(0, X)
p_y = random.randint(0, Y)
if gridMap[p_x][p_y] == type:
return p_x, p_y
def se_generator(gridMap):
s_x, s_y = point_generator(gridMap, 1)
e_x, e_y = point_generator(gridMap, 1)
return s_x, s_y, e_x, e_y
def run(_ver, _return_dict, start, end, neigh_range, gridMap, background, openset_size, length_part, degree_delta,
roads=None):
# time1 = time.time()
# gridMap = np.load('../res/sampled_sketch.npy')
# gridMap = np.load('map.npy')
# time2 = time.time()
# print("图片加载完毕,耗时{}".format(time2 - time1))
# maze = cv2.inRange(gridMap, 2.9, 3.1)
# start = (0, 0)
# end = (2500, 1000)
# neigh_range = (200, 250)
# sample_n = 20
# road1 = [(776, 523), (1425, 393), (2930, 122)]
# road2 = [(1285, 166), (1425, 393), (1880, 1075), (2020, 1973), (2086, 3737)]
# com_line = [(3125, 718), (900, 1700), (1000, 2265), (1166, 3337), (3060, 3142)]
# forbidden =
# finder = pathfinder(maze, neigh_range, sample_n, [road1, road2], [com_line], gridMap)
# print("maze shape:{},{}".format(gridMap.shape[0], gridMap.shape[1]))
# print("类型:起点:{},终点:{}".format(gridMap[start[1]][start[0]], gridMap[end[1]][end[0]]))
time3 = time.time()
plt.figure()
finder = pathfinder(_ver, gridMap, neigh_range, openset_size=openset_size, length_part=length_part,
degree_delta=degree_delta, roads=roads)
path, close_list = finder.astar(start, end)
if path is None:
# print("查找失败,无解")
for p in close_list:
cv2.circle(background, p, 5, (255, 0, 0), 2)
plt.imshow(background)
plt.savefig("output/{}/fail_fig_{}_{}_{}_ver{}.png".format(unique_tag, neigh_range[0], neigh_range[1],
str(round(time.time()))[-5:], _ver))
_return_dict[_ver] = 0
return False
time4 = time.time()
print("寻路完毕,耗时{}".format(time4 - time3))
p1 = path[0]
for p in close_list:
cv2.circle(background, p, 10, (0, 0, 255), 5)
for index, p in enumerate(path):
if index == 0:
continue
p2 = p
cv2.line(background, p1, p2, (255, 0, 0), 40)
p1 = p
for p in path:
cv2.circle(background, p, 10, (0, 0, 0), 40)
# for p in finder.close_set:
# cv2.circle(background, p, 3, (0, 255, 0))
plt.imshow(background)
plt.savefig(
"output/{}/fig_{}_{}_{}_ver{}.png".format(unique_tag, neigh_range[0], neigh_range[1],
str(round(time.time()))[-5:],
_ver))
np.save("output/{}/path__{}_ver{}.npy".format(unique_tag, str(round(time.time()))[-5:], _ver), np.array(path))
_return_dict[_ver] = 1
return 1
if __name__ == "__main__":
if not os.path.exists("output/"):
os.makedirs("output/")
if not os.path.exists("output/{}/".format(unique_tag)):
os.makedirs("output/{}/".format(unique_tag))
parser = argparse.ArgumentParser(description='电力寻路程序')
parser.add_argument("--gridMap", help="地图的路径", type=str)
parser.add_argument("--start", nargs="+", help="起点", type=int)
parser.add_argument("--end", nargs="+", help="终点", type=int)
parser.add_argument("-v", "--voltage", help="电压等级", type=int)
parser.add_argument("-b", "--buffer", help="搜索集大小", type=int)
parser.add_argument("-p", "--precision", help="搜索精确等级", type=int)
parser.add_argument("-r", "--road", help="道路SHP文件的路径", type=str)
args = parser.parse_args()
try:
start = (args.start[0], args.start[1])
except:
print("没有输入起点!")
try:
end = (args.end[0], args.end[1])
except:
print("没有输入终点!")
voltage_level = args.voltage
neigh_range = (500, 600)
if voltage_level == 35:
neigh_range = (100, 150)
elif voltage_level == 110:
neigh_range = (150, 250)
elif voltage_level == 220:
neigh_range = (250, 450)
elif voltage_level == 330:
neigh_range = (300, 400)
elif voltage_level == 500:
neigh_range = (350, 450)
elif voltage_level == 750:
neigh_range = (450, 500)
elif voltage_level == 1000:
neigh_range = (500, 600)
else:
raise Exception("电压等级输入错误!")
try:
openset_size = args.buffer
except:
print("请输入合适的搜索集大小!")
try:
precision = args.precision
except:
print("请输入搜索精确度!")
if precision == 1:
length_part = 5
degree_delta = 90 # 20
elif precision == 2:
length_part = 10
degree_delta = 90 # 40
elif precision == 3:
length_part = 10
degree_delta = 90 # 40
elif precision == 4:
length_part = 5
degree_delta = 45 # 40
elif precision == 5:
length_part = 10
degree_delta = 45 # 80
elif precision == 6:
length_part = 20
degree_delta = 45 # 160
elif precision == 7:
length_part = 20
degree_delta = 30 # 240
else:
length_part = 5
degree_delta = 90 # 20
print("读取TIFF文件中...")
try:
tif = TIFF.open(args.gridMap, mode='r') # 打開tiff文件進行讀取
except:
print("输入的路径有误!")
im = tif.read_image()
print("正在分析各类地块...")
class4 = cv2.inRange(im, 3.9, 4.1)
class1 = cv2.inRange(im, 4.9, 5.1) + cv2.inRange(im, 5.9, 6.1)
class2 = cv2.inRange(im, 1.9, 2.1)
class3 = cv2.inRange(im, 0.9, 1.1) + cv2.inRange(im, 2.9, 3.1)
print("正在生成各类地块预览图并保存...")
plt.figure(num='sketch', figsize=(16, 16))
plt.subplot(2, 2, 1) # 将窗口分为两行两列四个子图,则可显示四幅图片
plt.title('class1') # 第一幅图片标题
plt.imshow(class1) # 绘制第一幅图片
plt.subplot(2, 2, 2) # 将窗口分为两行两列四个子图,则可显示四幅图片
plt.title('class2') # 第一幅图片标题
plt.imshow(class2) # 绘制第一幅图片
plt.subplot(2, 2, 3) # 将窗口分为两行两列四个子图,则可显示四幅图片
plt.title('class3') # 第一幅图片标题
plt.imshow(class3) # 绘制第一幅图片
plt.subplot(2, 2, 4) # 将窗口分为两行两列四个子图,则可显示四幅图片
plt.title('class4') # 第一幅图片标题
plt.imshow(class4) # 绘制第一幅图片
plt.savefig("output/{}/preview_landtype_tag{}.png".format(unique_tag, unique_tag))
del class1, class2, class3, class4
print("正在生成背景预览图...")
plt.imsave("output/{}/background_tag{}.png".format(unique_tag, unique_tag), im)
background = cv2.imread("output/{}/background_tag{}.png".format(unique_tag, unique_tag))
background = cv2.cvtColor(background, cv2.COLOR_BGR2RGB)
np.save("output/{}/sketch_tag{}.npy".format(unique_tag,unique_tag), im)
print("提取道路信息...")
driver = ogr.GetDriverByName("ESRI Shapefile")
filename = args.road
dataSource = driver.Open(filename, 0)
try:
layer = dataSource.GetLayer(0)
except:
print("输入的道路文件有误!")
roads = road_extract(layer)
print("共有{}条道路".format(len(roads)))
im.astype(int)
# processes = []
# for ver in range(6):
# processes.append(Process(target=run, args=(ver, start, end, neigh_range, im, background, openset_size, length_part, degree_delta, roads)))
# for ver in range(6):
# processes[ver].start()
# for ver in range(6):
# processes[ver].join()
# print('Process will start.')
# for ver in range(5):
# processes[ver].start()
# for ver in range(5):
# processes[ver].join()
# print('Process end.')
print("开始跑程序...")
count = 0
ver_count = 0
processes = []
# pbar = tqdm(total=4)
while True:
manager = Manager()
d = manager.dict()
for ver in range(ver_count, ver_count + 5):
p = Process(target=run, args=(
ver, d, start, end, neigh_range, im, background, openset_size, length_part, degree_delta,
roads))
processes.append(p)
p.start()
print("载入进程...")
for i in range(ver_count, ver_count + 5):
processes[i].join()
for result in d.values():
count = count + result
# if result==1:
# pbar.update(1)
ver_count = ver_count + 5
# print(d.keys())
# print("count大小:{}".format(count))
if count > 4:
# pbar.close()
break
print('Process end.')
print("结束")
|
15,324 | 8fdccc9669fed3fd2f46cd24ffcb92bd992cbd83 | # args02.py
import argparse
parser = argparse.ArgumentParser(description='fixed size arguement list example')
parser.add_argument('size', nargs=2)
args = parser.parse_args('1024 768'.split())
print (args.size)
|
15,325 | 3dbd3037f4f3d7b02314de25a0de25aed778bb37 | ''' Let d(n) be defined as the sum of proper divisors of n (numbers less than n which divide evenly into n).
If d(a) = b and d(b) = a, where a ≠ b, then a and b are an amicable pair and each of a and b are called amicable numbers.
For example, the proper divisors of 220 are 1, 2, 4, 5, 10, 11, 20, 22, 44, 55 and 110; therefore d(220) = 284. The proper divisors of 284 are 1, 2, 4, 71 and 142; so d(284) = 220.
Evaluate the sum of all the amicable numbers under 10000. '''
import time
import sys
start = time.time()
def check_divisors(n):
divs = [1]
for i in range(2, n // 2 + 1):
if n % i == 0:
divs.append(i)
return divs
def amicable_numbers(max):
amicable_nums = [220, 284]
for i in range(1, max+1):
a_divisors = check_divisors(i)
j = sum(a_divisors)
b_divisors = check_divisors(j)
if i == sum(b_divisors) and (i not in amicable_nums or j not in amicable_nums) and i != j:
amicable_nums.append(i)
amicable_nums.append(j)
print(amicable_nums)
print(sum(amicable_nums))
def main():
if len(sys.argv) > 1 and int(sys.argv[1]) >= 0:
n = int(sys.argv[1])
else:
n = 10000
amicable_numbers(n)
if __name__ == '__main__':
main()
print("Time elapsed: " + str(time.time() - start) + " seconds")
'''
> python3 problem21.py
[220, 284, 1184, 1210, 2620, 2924, 5020, 5564, 6232, 6368]
Sum: 31626
Time elapsed: 2.6568055152893066 seconds
''' |
15,326 | 45930e4881d47e3b9166e5120d0cc5cc79f0bfa8 | from numpy import*
a = array(eval(input()))
b = zeros(37, dtype=int)
for i in range(size(a)):
if a[i]==0:
b[0]+=1
if a[i] == 1:
b[1] += 1
elif a[i] == 2:
b[2] +=1
elif a[i] == 3:
b[3] +=1
elif a[i] == 4:
b[4] +=1
elif a[i] ==5:
b[5] +=1
elif a[i]==6:
b[6]+=1
elif a[i]==7:
b[7]+=1
elif a[i]==8:
b[8]+=1
elif a[i]==9:
b[9]+=1
elif a[i]==10:
b[10]+=1
elif a[i]==11:
b[11]+=1
elif a[i] == 12:
b[12]+=1
elif a[i]==13:
b[13]==1
elif a[i]== 14:
b[14]+=1
elif a[i]==15:
b[15] +=1
elif a[i]==16:
b[16]+=1
elif a[i]==17:
b[17]+=1
elif a[i]==18:
b[18]+=1
elif a[i]==19:
b[19]+=1
elif a[i]==20:
b[20]+=1
elif a[i]==21:
b[21]+=1
elif a[i]==22:
b[22]+=1
elif a[i]==23:
b[23]+=1
elif a[i]==24:
b[24]+=1
elif a[i]==25:
b[25]+=1
elif a[i]==26:
b[26]+=1
elif a[i]==27:
b[27]+=1
elif a[i]==28:
b[28]+=1
elif a[i]==29:
b[29]+=1
elif a[i]==30:
b[30]+=1
elif a[i]==31:
b[31]+=1
elif a[i]==32:
b[32]+=1
elif a[i]==33:
b[33]+=1
elif a[i]==34:
b[34]+=1
elif a[i]==35:
b[35]+=1
elif a[i]==36:
b[36]+=1
print(b) |
15,327 | d90b5c38bbe4f8682e073891bd008a67e3694ade | import pygame
import simple_draw as sd
class Button:
def __init__(self, x, y, caption, event=None):
self.x = x
self.y = y
self.caption = caption
self._color1 = sd.COLOR_YELLOW
self._color2 = sd.COLOR_DARK_YELLOW
self._height = 10
self._width = 30
if event is None:
self._event = self._even_null
else:
self._event = event
def set_size(self, width, height):
self._height = height
self._width = width
return self
def set_color_passive(self, color):
self._color1 = color
return self
def set_color_active(self, color):
self._color2 = color
return self
def draw(self, is_passive=True):
color1 = self._color1 if is_passive else self._color2
color2 = self._color2 if is_passive else self._color1
sd.rectangle(left_bottom=sd.get_point(self.x, self.y),
right_top=sd.get_point(self.x + self._width, self.y + self._height),
color=color1,
width=0)
sd.rectangle(left_bottom=sd.get_point(self.x, self.y),
right_top=sd.get_point(self.x + self._width, self.y + self._height),
color=color2,
width=2)
self._draw_text_on_button()
def _draw_text_on_button(self):
myfont = pygame.font.SysFont('Comic Sans MS', int(self._height * 0.7))
textsurface = myfont.render(self.caption, False, (0, 0, 0))
sd._screen.blit(textsurface,
(self.x + (self._width - textsurface.get_width()) // 2,
sd.resolution[1] - self.y + (self._height - textsurface.get_height()) // 2 - self._height))
def check_over(self, cursor_pos):
return (not cursor_pos is None
and self.x <= cursor_pos.x <= self.x + self._width
and self.y <= cursor_pos.y <= self.y + self._height)
def _even_null(self):
pass
class UserInterface:
def __init__(self):
self.buttons = []
def add_button(self, x, y, caption, event=None):
new_button = Button(x=x, y=y, caption=caption, event=event)
self.buttons.append(new_button)
return new_button
def show(self, cursor_pos, is_click=False):
for button in self.buttons:
is_over = button.check_over(cursor_pos)
button.draw(is_over)
if is_over and is_click:
button._event() |
15,328 | eadda476e27d70c0779bf4c300ca0ddeea6fb289 | import time
def test_customer_get(iamport):
customer_uid = 'customer_get_cuid_{}'.format(str(time.time()))
try:
iamport.customer_get(customer_uid)
except iamport.ResponseError as e:
assert e.code == 1
assert e.message == u'요청하신 customer_uid({})로 등록된 정보를 찾을 수 없습니다.'.format(customer_uid)
|
15,329 | b7323c57b636e0308f565461f102aa01f3df32ff | #!/usr/bin/python3
'''
Implements the Aho-Corasick automaton
'''
from collections import deque
patterns = list(map(lambda x: x.strip(), open('patterns.txt', 'r')))
letters = [chr(ord('a') + i) for i in range(26)]
class trieNode:
'''
Trie Node data structure
Each trie node contains a boolean indicating whether it is a leaf
An array of size 26 representing the English alphabet
A pointer to suffix link, None if not any
A pointer to output link, None if not any
'''
def __init__(self, parent = None, level = 0, isLeaf = False):
self.leaf = isLeaf
self.neighbors = [None] * 26
self.suffix = None
self.output = None
self.parent = parent
self.level = level
def addNeighbor(self, letter, isLeaf = False):
# Add neighbor to the current node with letter from a-z
self.neighbors[ord(letter) - ord('a')] = trieNode(parent = self, level = self.level + 1, isLeaf = isLeaf)
def getNeighbor(self, path):
# Reach the neighbor through a chain
curr = self
for letter in path:
if not curr:
return
curr = curr.neighbors[ord(letter) - ord('a')]
return curr
def getNeighbors(self):
return [(letter, self.getNeighbor(letter)) for letter in letters if self.getNeighbor(letter)]
def addSuffixLink(self, destination):
self.suffix = destination
def addOutputLink(self, destination):
self.output = destination
def __str__(self):
rtn = ' '.join([letter for letter in letters if self.getNeighbor(letter)])
if self.leaf:
rtn += ' *' # Mark for leaf
return rtn
def constructTries(root, patterns):
# Construct the tries from the root and the patterns
for pattern in patterns:
patternLength = len(pattern)
curr = root
for i, char in enumerate(pattern):
if not curr.getNeighbor(char):
curr.addNeighbor(char, i == patternLength - 1)
curr = curr.getNeighbor(char)
def addLinks(root):
'''
Construct the suffix links and output links
For suffix links:
Do a breadth-first search of the trie
If the node is the root, no suffix link
If the node is one hop away from the root, points to the root
Otherwise, the node corresponds to some string wa
Let w->x:
If xa exists, wa->xa
Else if x is the root node, wa->root
Else x->x.suffix
For output links:
u = v.suffix
If u is a leaf, set v.output = u
otherwise, v.output = u.output
'''
# The elements in the queue contains the node itself, the level, and the letter corresponding to the current node
frontier = deque([(root, 0, None)])
while frontier:
node, level, letter = frontier.popleft()
# Add the suffix link
if level == 1: # node is one hop away
node.addSuffixLink(root)
elif level != 0:
x = node.parent.suffix
while True:
if x.getNeighbor(letter):
node.addSuffixLink(x.getNeighbor(letter))
break
elif x == root:
node.addSuffixLink(root)
break
else:
x = x.suffix
# Add the output link
if level != 0:
u = node.suffix
if u.leaf:
node.output = u
else:
node.output = u.output
# Add all neighbors
for letter, neighbor in node.getNeighbors():
frontier.append((neighbor, level + 1, letter))
class automaton:
'''
automaton class preprocessing the patterns to construct the automaton with links
and query function to search for all occurrences
'''
def __init__(self, patterns):
self.root = trieNode()
constructTries(self.root, patterns)
addLinks(self.root)
root = trieNode()
constructTries(root, patterns)
addLinks(root)
text = list(open('text.txt', 'r'))[0].strip()
'''
The final matching algorithm
Start at the root node in the trie
For each character c in the string:
while no edge labeled c:
if at the root, break; otherwise, follow a suffix link
if there is an edge labeled c, follow it
If the current node is a leaf, output that pattern
Output all the words in the chain of output links originating at this node.
'''
print('Automaton...')
curr = root
automatonOutputs = set()
for i, c in enumerate(text):
while not curr.getNeighbor(c):
if curr == root:
break
curr = curr.suffix
if curr.getNeighbor(c):
curr = curr.getNeighbor(c)
if curr.leaf:
automatonOutputs.add((i - curr.level + 1, i))
outputNode = curr
while outputNode.output:
automatonOutputs.add((i - outputNode.output.level + 1, i))
outputNode = outputNode.output
print('Brute force search...')
# Compare the results from the naive brute force search algorithm
bruteforceOutputs = set()
for i in range(len(text)):
for j in range(i, len(text)):
if text[i:j + 1] in patterns:
bruteforceOutputs.add((i, j))
if automatonOutputs != bruteforceOutputs:
raise Exception('Incorrect match result!')
else:
print('Correct matching!')
|
15,330 | cbccfb24c1b7bac4d2475a538b01d97f5e232397 | """ NOTE: Probably do not actually need any of this: equivalent tests have been incorporated into get_lines in
process_methods.py
Contains methods used to "clean up" a lines_list, i.e.
remove lines that are likely not actually there """
import numpy as np
from main import *
# from process_methods import *
import math
def cleanup(lines_list, num_chambers):
assert num_chambers*2 <= lines_list, "Didn't find enough lines (cleanup)"
# TODO something going wrong here: check slope-related methods
""" Return list of the actual lines by using only the (num_chambers*2) lowest scores """
cleaned_list = []
avgscore_list = []
for l1 in lines_list:
score = 0 # smaller score better
for l2 in lines_list:
score = score + pairwise_score(l1, l2)
avgscore = (score/len(lines_list))**-1
avgscore_list.append(avgscore)
keylist = np.argsort(avgscore_list)
i = 0
while i < num_chambers*2:
cleaned_list.append(lines_list[keylist[i]])
i += 1
return cleaned_list
def pairwise_score(l1, l2):
""" Might not actually need this
return how "similar" the slopes are """
theta1 = math.atan(slope(l1))
theta2 = math.atan(slope(l2))
return (theta1-theta2)**2
def slope(l):
""" Return the slope of line. If vertical, return infinity """
if l[1] == l[0]:
return float("inf")
else:
return float(l[3]-l[2])/(l[1]-l[0])
|
15,331 | dbd1041a57c462dfaf38a3c50f61f0f2a2700533 | def mkdir_p(path):
'''
Implements ``mkdir -p``.
'''
import errno
import os
if path:
try:
os.makedirs(path)
except OSError as ex:
if ex.errno == errno.EEXIST:
pass
else:
raise
|
15,332 | b5719fb5981f14de33d5c7873df957aeeba18247 | # This code generates a dictionary with the query id as it's key and it's top 100 documents
# generated by bm25 model, which does not consider the relevance judgement, as it's corresponding value
import pickle
import os
# make the directory if it doesn't exist already
newpath = r'../../Encoded Data Structures (Phase 3)/'
if not os.path.exists(newpath):
os.makedirs(newpath)
path = r'../../../Phase 1/Task 1/Encoded Data Structures/Encoded-BM25-NoRelevance-Top100Docs-perQuery'
queryID_top100Docs = {} # dictionary to store query id as key and it's corresponding
# top 100 documents as it's value
for file in os.listdir(path):
doc_bm25Score = {}
current_file = os.path.join(path,file)
string = current_file.split("Encoded-Top100Docs-BM25-NoRelevance_")
id = string[1].split(".")[0] # gives the query id, example: id =1, id = 2, etc.
with open(current_file, 'rb') as f:
doc_bm25Score = pickle.loads(f.read())
all_docs = list(doc_bm25Score.keys())
top_100_docs = all_docs[:100] # gets the top 100 documents
queryID_top100Docs[id] = top_100_docs
# write the dictionary to a file, in encoded format using pickle library of python
output = open(newpath + 'Encoded-QueryID_Top100Docs_BM25_NoRelevance.txt', 'wb')
pickle.dump(queryID_top100Docs, output)
output.close()
|
15,333 | 258132cdb1ee990966835f49adaa6ca4b31dcfe8 | while True:
c0 = int(input("Give me a non-negative and non-zero number: "))
count = 0
while c0 != 1 and c0 != -1:
if c0 % 2 == 0:
c0 = c0 / 2
print(c0)
count += 1
else:
c0 = (3 * c0 + 1)
print(c0)
count += 1
count = str(count)
c0 = str(c0)
print("\ntook " + count + " steps")
|
15,334 | 161c09b849186313714948e389b5b1baa32a10fe | import re
def is_Nigerian(args1):
"""
This function checks if a given number is a Nigerian phone number.
Parameters:
args1 (str): the phone number to be checked.
Return
str: a remark that indicates if the phone number is Nigerian or not.
"""
if len(args1) == 0:
return "Invalid input!, input must not be empty."
if (args1.isalnum() and not(args1.isnumeric())) or args1.isalpha():
return "Invalid Input!, input must be a string of numbers."
#creating a matching pattern for nigerian numbers by making use of regular expressions
pattern_match = re.compile(r"(\+234|0)(0?)(8[01]|[79]0)(\d{8}$)")
matches = pattern_match.match(args1)
if matches != None:
if matches.group(2) == "0":
return "True, {0} is a Nigerian number but you don't have to add the '0' that follows the country code, conventionally the number should be {1}".format(args1, "{0}{1}{2}".format(matches.group(1),matches.group(3),matches.group(4)))
else:
return "True, {0} is a Nigerian number".format(args1)
else:
return "False, {0} is not a Nigerian number".format(args1)
print(is_Nigerian("+23409060990102")) |
15,335 | 725d7a8637908cec164002d856a9c4077e2bf8b7 | import scrapy
from scrapy.crawler import CrawlerProcess
items = []
class AzaniSpider(scrapy.Spider):
name = "azani"
custom_settings = {
'FEED_FORMAT': 'csv',
'FEED_URI': 'User/export.csv'
}
start_urls = [
'https://www.liveyoursport.com/squash/?search_query=&page=1&limit=36&sort=featured&category=353&is_category_page=1']
def parse(self, response):
page_list = []
for product in response.css('ul.ProductList li'):
product_link = product.css('div.ProductDetails a::attr(href)').extract_first()
yield scrapy.Request(product_link, callback=self.parse_product, meta={'url': product_link})
# Handling Pagination
for li in response.css('ul.PagingList li'):
page_num = li.css('a::text').extract_first()
page_list.append(page_num)
active_page = page_list.index(None)
if active_page == 0:
next_page_index = 2
else:
next_page_index = int(page_list[active_page - 1]) + 2
next_page = self.get_next_url(next_page_index)
yield scrapy.Request(next_page, callback=self.parse)
@staticmethod
def get_next_url(page_no):
return 'https://www.liveyoursport.com/squash/?search_query=&page=' + str(
page_no) + '&limit=36&sort=featured&category=353&is_category_page=1'
@staticmethod
def parse_product(response):
product_name = response.css('div.ProductMain h1::text').extract_first()
product_cost = response.css('div.PriceRow em.ProductPrice::text').extract_first()
product_description = ''.join(response.css('div.ProductDescription span *::text').extract())
product_link = response.meta.get('url')
items.append({
'Product Name': u''.join(product_name).encode('utf-8').strip(),
'Price': u''.join(product_cost).encode('utf-8').strip(),
'Description': u''.join(product_description).encode('utf-8').strip(),
'URL': u''.join(product_link).encode('utf-8').strip()
})
yield {
'Product Name': product_name,
'Price': product_cost,
'Description': product_description,
'URL': product_link
}
|
15,336 | bd68d3f5448ac5c6684eab1e9cb337f216d9a39d | # --- Day 4: Repose Record ---
from urllib.request import urlopen
from collections import Counter
import pandas as pd
import numpy as np
data = urlopen('https://raw.githubusercontent.com/MarynaLongnickel/RandomStuff/master/Advent%20of%20Code%202018/Day%204/day4.txt')
df = []
# parse each line to extract timestamp, guard ID, and whether they are asleep
for line in data:
line = line[1:-1].decode()
line = line.replace(']', '')
line = line.split(' ')
line[1] = line[0] + ' ' + line[1]
line = line[1:]
if line [1] == 'wakes':
line = [line[0], 'U']
if line [1] == 'falls':
line = [line[0], 'S']
if line [1] == 'Guard':
line = [line[0], line[2][1:]]
df.append(line)
# convert data to DataFrame and sort by timestamp
df = pd.DataFrame(df)
df = df.sort_values(by = [0])
dates = list(df[0])
l = list(df[1])
# guards dictionary will contain all the minutes a guard is asleep
guards = {}
g = None
i = 0
# populate the guards dictionary
while i < len(l):
x = l[i]
if x not in ['U', 'S']:
if x not in guards.keys():
guards[x] = []
g = x
elif x == 'S':
guards[g].extend(range(int(dates[i][-2:]), int(dates[i+1][-2:])))
i += 1
i += 1
total = 0
minute = 0
guard = None
# count the total number of minutes each guard slept and find the largest
for i in guards.keys():
l = len(guards[i])
if l > total:
total = l
c = Counter(guards[i]).most_common()[0]
minute = c[0]
guard = int(i)
print(guard, minute, ': ', guard * minute)
# ---------------------- PART 2 ----------------------
total = 0
minute = 0
guard = None
# find which minute a guard slept the most
for i in guards.keys():
c = Counter(guards[i]).most_common()
if len(c) > 0:
c = c[0]
if c[1] > total:
minute = c[0]
total = c[1]
guard = int(i)
print(guard, minute, ': ', guard * minute)
|
15,337 | eefced65993a244aa32a1c71abaa6252b35321c3 | #!/usr/bin/env python
from distutils.core import setup, Extension
import os
import subprocess
from os.path import join, exists
# If you are creating a sdist from the full bllipparser code base, you
# may need the swig and flex packages. The Python packages include the
# outputs of these commands so you can build the Python modules without
# these dependencies.
def run(args):
cmd = ' '.join(map(str, args))
print("Running %r" % cmd)
message = None
try:
assert subprocess.check_call(args) == 0
except OSError, exc:
if exc.errno == 2:
message = "Command %r not found." % args[0]
else:
message = "OSError: %r" % exc
except AssertionError, exc:
message = "Bad exit code from %r" % cmd
if message:
raise SystemExit("Error while running command: %s\nBuild failed!" %
message)
parser_base = 'first-stage/PARSE/'
parser_wrapper = 'swig/wrapper.C'
parser_wrapper_full = join(parser_base, parser_wrapper)
def is_newer(filename1, filename2):
"""Returns True if filename1 has a newer modification time than
filename2."""
return os.stat(filename1).st_mtime > os.stat(filename2).st_mtime
def maybe_run_swig(wrapper_filename, module_name, base_directory,
extra_deps=None):
"""Run SWIG if its outputs are missing or out of date."""
module_filename = 'python/bllipparser/%s.py' % module_name
swig_filename = join(base_directory, 'swig', 'wrapper.i')
extra_deps = extra_deps or []
if exists(wrapper_filename) and exists(module_filename):
newer = any(is_newer(f, module_filename)
for f in [swig_filename] + extra_deps)
if not newer:
return
print('Generating ' + module_name + ' SWIG wrapper files')
run(['swig', '-python', '-c++', '-module',
module_name, '-I' + base_directory,
'-Wall', '-classic', '-outdir', 'python/bllipparser',
'-o', wrapper_filename, swig_filename])
# generate parser SWIG files if needed
maybe_run_swig(parser_wrapper_full, 'CharniakParser', parser_base,
extra_deps=[join(parser_base, 'SimpleAPI.' + suffix)
for suffix in 'Ch'])
parser_sources = (parser_wrapper, 'Bchart.C', 'BchartSm.C', 'Bst.C',
'FBinaryArray.C', 'CntxArray.C', 'ChartBase.C',
'ClassRule.C', 'ECArgs.C', 'Edge.C', 'EdgeHeap.C',
'ExtPos.C', 'Feat.C', 'Feature.C', 'FeatureTree.C',
'Field.C', 'FullHist.C', 'GotIter.C', 'InputTree.C',
'Item.C', 'Link.C', 'Params.C', 'ParseStats.C',
'SentRep.C', 'ScoreTree.C', 'Term.C', 'TimeIt.C',
'UnitRules.C', 'ValHeap.C', 'edgeSubFns.C',
'ewDciTokStrm.C', 'extraMain.C', 'fhSubFns.C',
'headFinder.C', 'headFinderCh.C', 'utils.C',
'MeChart.C', 'Fusion.C')
parser_sources = [join(parser_base, src) for src in parser_sources]
parser_module = Extension('bllipparser._CharniakParser',
sources=parser_sources, include_dirs=[parser_base],
libraries=['stdc++'])
reranker_base = 'second-stage/programs/features/'
reranker_wrapper = 'swig/wrapper.C'
reranker_wrapper_full = reranker_base + reranker_wrapper
reranker_read_tree = 'read-tree.cc'
reranker_read_tree_full = reranker_base + 'read-tree.cc'
# generate reranker SWIG files if needed
maybe_run_swig(reranker_wrapper_full, 'JohnsonReranker', reranker_base)
# generate reranker tree reader if needed
if not exists(reranker_read_tree_full):
run(['flex', '-o' + reranker_read_tree_full,
reranker_read_tree_full.replace('.cc', '.l')])
reranker_sources = [join(reranker_base, src) for src in
(reranker_wrapper, 'simple-api.cc', 'heads.cc',
reranker_read_tree, 'sym.cc')]
reranker_module = Extension('bllipparser._JohnsonReranker',
sources=reranker_sources,
extra_compile_args=['-iquote', reranker_base,
'-DSWIGFIX'])
setup(name='bllipparser',
version='2015.08.18',
description='Python bindings for the BLLIP natural language parser',
long_description=file('README-python.rst').read(),
author='Eugene Charniak, Mark Johnson, David McClosky, many others',
maintainer='David McClosky',
maintainer_email='notsoweird+pybllipparser@gmail.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: POSIX',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
url='http://github.com/BLLIP/bllip-parser',
license='Apache 2.0',
platforms=['POSIX'],
ext_modules=[parser_module, reranker_module],
packages=['bllipparser'],
package_dir={'bllipparser': 'python/bllipparser'})
|
15,338 | 8de43d37e06335c301f0035ca83907b782f036de | import torch
from typing import Optional, List
from contextlib import contextmanager, ExitStack
from typing import ContextManager
class PostInitProcessor(type):
def __call__(cls, *args, **kwargs):
obj = type.__call__(cls, *args, **kwargs)
obj.__post__init__()
return obj
@contextmanager
def nested(*contexts):
"""
Chain and apply a list of contexts
"""
with ExitStack() as stack:
for ctx in contexts:
stack.enter_context(ctx())
yield contexts
class E2EBenchmarkModel(metaclass=PostInitProcessor):
"""
A base class for adding models for all e2e models.
"""
def __init__(self, test: str, batch_size: Optional[int]=None, extra_args: List[str]=[]):
self.test = test
assert self.test == "train" or self.test == "eval", f"Test must be 'train' or 'eval', but get {self.test}. Please submit a bug report."
self.batch_size = batch_size
if not self.batch_size:
self.batch_size = self.DEFAULT_TRAIN_BSIZE if test == "train" else self.DEFAULT_EVAL_BSIZE
# If the model doesn't implement test or eval test
# its DEFAULT_TRAIN_BSIZE or DEFAULT_EVAL_BSIZE will still be None
if not self.batch_size:
raise NotImplementedError(f"Test {test} is not implemented.")
self.extra_args = extra_args
if "--torchdynamo" in self.extra_args:
self.dynamo = True
from torchbenchmark.util.backends.torchdynamo import parse_torchdynamo_args
self.opt_args, self.extra_args = parse_torchdynamo_args(self, self.extra_args)
else:
self.dynamo = False
# Run the post processing for model acceleration
def __post__init__(self):
# sanity checks of the options
assert self.test == "train" or self.test == "eval", f"Test must be 'train' or 'eval', but provided {self.test}."
# initialize run contexts
self.run_contexts = []
if self.dynamo:
from torchbenchmark.util.backends.torchdynamo import apply_torchdynamo_args
apply_torchdynamo_args(self, self.opt_args, precision=self.tb_args.fp16)
def add_context(self, context_fn):
ctx = context_fn()
assert isinstance(ctx, ContextManager), f"Expected adding a ContextManager, get {type(ctx)}. Please report a bug."
self.run_contexts.append(context_fn)
def get_optimizer(self):
raise NotImplementedError("Every E2EModel should implement a way to access the optimizer used.")
def set_optimizer(self, optimizer) -> None:
raise NotImplementedError("Every E2EModel should implement a way to swap out the optimizer(s).")
def next_batch(self):
raise NotImplementedError("Every E2EModel should implement a way to retrieve the next batch.")
def run_forward(self, input):
raise NotImplementedError("Every E2EModel should implement a modular forward step.")
def run_backward(self, loss):
raise NotImplementedError("Every E2EModel should implement a modular backward step.")
def run_optimizer_step(self):
raise NotImplementedError("Every E2EModel should implement a modular optimizer step.")
|
15,339 | 64331b86df954f27de3f2e0c292da9430c104986 | # Copyright 2009-2010 by Ka-Ping Yee
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
NOTE: THIS MODULE IS CURRENTLY UNUSED.
The current permissions scheme for resource finder is:
- Anyone (logged-in and non-logged-in users) can view and print
- Any logged-in user can edit data
THE CODE BELOW IS UNNECESSARY WITH THIS PERMISSION SCHEME
Handler for allowing an Account with 'grant' permission to grant access using
the permission scheme provided in access.py
"""
import logging
import model
import utils
from utils import DateTime, ErrorMessage, Redirect
from utils import db, html_escape, users, _
from access import check_action_permitted
class GrantAccess(utils.Handler):
def get(self):
"""Shows all access requests that are waiting for approval."""
self.require_action_permitted('grant')
q = model.Account.all().filter('requested_actions !=', None)
requests = []
for account in q.fetch(100):
for action in account.requested_actions:
if check_action_permitted(self.account, 'grant'):
requests.append({'email': account.email,
'requested_action': action,
'key': account.key()})
self.render('templates/grant_access.html',
requests=requests,
params=self.params,
grant_url=self.get_url('/grant_access'),
logout_url=users.create_logout_url('/'),
subdomain=self.subdomain)
def post(self):
"""Grants or denies a single request."""
action = self.request.get('action')
if not action:
raise ErrorMessage(404, 'missing action (requested_action) params')
self.require_action_permitted('grant')
account = model.Account.get(self.request.get('key'))
if not account:
raise ErrorMessage(404, 'bad key given')
#TODO(eyalf): define account.display_name() or something
name = account.email
if not action in account.requested_actions:
#i18n: Error message
raise ErrorMessage(404, _('No pending request for '
'%(account_action)s by %(user)s')
% (action, name))
account.requested_actions.remove(action)
grant = self.request.get('grant', 'deny')
if grant == 'approve':
account.actions.append(action)
account.put()
logging.info('%s request for %s was %s' % (account.email,
action,
grant))
if self.params.embed:
if grant == 'approve':
self.write(
#i18n: Application for the given permission action approved
_('Request for becoming %(action)s was approved.') % action)
else:
self.write(
#i18n: Application for the given permission action denied
_('Request for becoming %(action)s was denied.') % action)
else:
raise Redirect(self.get_url('/grant_access'))
if __name__ == '__main__':
utils.run([('/grant_access', GrantAccess)], debug=True)
|
15,340 | 051109e642c21e999075cb5bdefe2b5d2938f943 | import time
from crawler.message_bus import MessageBus
def test_receive_message():
message_bus = MessageBus('queue-kafka-bootstrap:9092', 'tweet-created')
message_bus.send(b'test_message')
time.sleep(5)
response = message_bus.consume_one()
assert response.value == b'test_message'
|
15,341 | bbf5a119be0854445a482ba9d35f547854687150 | # This files contains your custom actions which can be used to run
# custom Python code.
#
# See this guide on how to implement these action:
# https://rasa.com/docs/rasa/custom-actions
# This is a simple example for a custom action which utters "Hello World!"
from typing import Any, Text, Dict, List
from rasa_sdk import Action, Tracker
from rasa_sdk.executor import CollectingDispatcher
import pyrebase
firebaseConfig = {
"apiKey": "AIzaSyAGKs-Z8AHlEvIVH0D7Od_ZNqilgrvxXxU",
"authDomain": "chatbot-tuyen-sin.firebaseapp.com",
"databaseURL": "https://chatbot-tuyen-sin.firebaseio.com",
"projectId": "chatbot-tuyen-sin",
"storageBucket": "chatbot-tuyen-sin.appspot.com",
"messagingSenderId": "1026978833872",
"appId": "1:1026978833872:web:9e35340cf057d9576fc5d5",
"measurementId": "G-W2K68M5GLN"}
firebase = pyrebase.initialize_app(firebaseConfig)
db = firebase.database()
#push data
# data = {"name":"Phu", "age":"21", "address":["Bac Ninh","Ha Noi"]}
# db.push(data)
class ActionHelloWorld(Action):
def name(self) -> Text:
return "action_hello_world"
def run(self, dispatcher: CollectingDispatcher,
tracker: Tracker,
domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
dispatcher.utter_message(text="Hello World!")
return []
class ActionChaoHoi(Action):
def name(self) -> Text:
return "action_ChaoHoiQuenBiet"
def run(self, dispatcher: CollectingDispatcher,
tracker: Tracker,
domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
user = db.child("-MMxUEEhVtc3FvC2Hp8S").child("name").get()
message=user.val()
dispatcher.utter_message(text=message)
return []
class ActionGioiThieuChung(Action):
def name(self) -> Text:
return "action_GioiThieuChung"
def run(self, dispatcher: CollectingDispatcher,
tracker: Tracker,
domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
user = db.child("MenuChat").child("GioiThieuChung").get()
message=user.val()
dispatcher.utter_message(text=message)
return [] |
15,342 | efb66a5658891fc45c9eaf39289ea103506f3bb9 | sum = lambda n1, n2: n1 + n2
s1 = sum(10, 20)
print(s1)
people1 = [("c", 30), ("b", 200), ("a", 1000), ("d", 4)]
people2 = sorted(people1, key=lambda t:t[1])
print(people2)
|
15,343 | 8c34e8ea163424a15226e250a91f5218e1a5c4cb | from flask_wtf import FlaskForm
from wtforms import StringField, TextAreaField, BooleanField
from wtforms.validators import DataRequired
class CategoriaForm(FlaskForm):
name = StringField(
'Nome',
validators = [
DataRequired(message="Campo obrigatório")
],
render_kw = {
'placeholder':'Nome',
'data-async-key': 'category-input'
}
)
description = TextAreaField(
'Descrição',
render_kw = {
'placeholder':'Descrição'
}
)
destacado = BooleanField(
''
)
|
15,344 | 80f79f386e4145c50f12353ae402fd41ff013870 | from tempfile import gettempdir
from os.path import join
import tempfile
class File:
def __init__(self, filename):
self.filename = filename
def write(self, row):
with open(self.filename, 'a') as f:
f.write(row)
def __add__(self, other):
tfile = tempfile.NamedTemporaryFile(delete=False)
with open(tfile.name, 'a+') as f:
f.write(self.readfile())
f.write(other.readfile())
#return self.__str__() +
return File(tfile.name)
def __str__(self):
return self.filename
def __iter__(self):
yield from open(self.filename)
def readfile(self):
with open(self.filename, 'r') as f:
return f.read()
def __next__(self):
with open(self.filename, 'r') as f:
for fileline in f.readlines():
yield fileline
raise StopIteration
if __name__ == '__main__':
obj = File('C:\\1\\1.txt')
obj.write('line\n')
first = File('C:\\1\\2.txt')
second = File('C:\\1\\3.txt')
new_obj = first + second
for line in File('C:\\1\\1.txt'):
print(line)
print(obj) |
15,345 | 3aba3e721c33f655c34706bee05296226738b3aa | import pandas as pd
import numpy as np
import torch
from pytorch.param_helper import create_dir, create_main_dir, import_config
from active_learning.model_pipeline import TrainPipeline
from active_learning.data_gen import create_data_loader
from active_learning.extract_features import extract_features
import glob
import json
import joblib
from tqdm import tqdm
from evaluate.metrics import accuracy, avg_acc, get_cm
from custom_math.kappa import quadratic_kappa
import torch.utils.model_zoo as model_zoo
def reset_model(model):
model.load_state_dict(model_zoo.load_url('http://data.lip6.fr/cadene/pretrainedmodels/xception-43020ad28.pth'))
return model
def unfamiliarity_index(feature, centroid_dict):
ui = 0
for key, val in centroid_dict.items():
d = np.linalg.norm(feature-val)
ui += np.sqrt(d)
return ui
class ActiveLearning():
def __init__(self, **config):
self.result_df = None
self.result_df2 = None
self.config = config
self.max_steps = self.config.get("max_steps")
self.create_dir(**self.config) #create main dir, return updated_main_dir (increment version if exist)
full_df, initial_d_unlabel = self.temp_get_filenames(self.config.get("main_data_dir"), train_dir = "full_train")
test_df, _ = self.temp_get_filenames(self.config.get("main_data_dir"), train_dir = "val")
full_df["ui"] = 0
full_df["features"] = 0
current_step = 0
## STEP 0:
# phase 0: init step j directory and config
current_step_dir = self.create_step_dir(current_step)
if current_step == 0:
label_df, val_df, unlabel_df = self.construct_initial_training_set(full_df, initial_d_unlabel, **config)
# phase 1: Formation of initial cluster
model, metric_fc = self.train(current_step_dir, label_df, val_df, None, None, **self.config)
centroid = self.extract_features_and_form_clusters(model, label_df, **config)
# phase 2: active learning
while current_step < self.max_steps:
if current_step > 0:
# init new step
current_step_dir = self.create_step_dir(current_step)
# re-train model
# model, metric_fc = self.train(current_step_dir, label_df, label_df, model, metric_fc, **self.config) # currently only train and validate on label_df
reset_model(model)
model, metric_fc = self.train(current_step_dir, label_df, val_df, model, metric_fc, **self.config) # currently only train and validate on label_df
# extract features and update clusters
centroid = self.extract_features_and_form_clusters(model, label_df, **config)
# add selected n samples to labelled
label_df = label_df.append(val_df)
# compute unfamiliarity index and remove selected n samples from unlabelled
val_df, unlabel_df = self.extract_features_and_compute_index(model, unlabel_df, centroid, **self.config)
# # add selected n samples to labelled
# label_df = label_df.append(val_df)
# evaluate model on selected n samples
result_df = self.evaluate(model, metric_fc, val_df, **self.config)
result_df2 = self.evaluate(model, metric_fc, test_df, **self.config)
# dump results
# dump label_df, val_df, unlabel_df and dump centroid
self.dump_df(current_step_dir, label_df, val_df, unlabel_df)
self.dump_centroid(current_step_dir, centroid)
# dump evaluation metrics
self.dump_step_result(current_step_dir, result_df)
self.dump_step_result2(current_step_dir, result_df2)
# repeat
current_step += 1
def create_dir(self, main_dir, root_dir, **kwargs):
updated_main_dir = create_main_dir(f"{root_dir}/{main_dir}")
self.config["updated_main_dir"] = updated_main_dir
def temp_get_filenames(self, main_data_dir, train_dir = "full_train", **kwargs):
f1 = glob.glob(f"{main_data_dir}/{train_dir}/*/*.jpeg")
# f2 = glob.glob(f"{main_data_dir}/val/*/*.jpeg")
# all_files = f1 + f2
all_files = f1
labels = [j.split("/")[-2] for j in all_files]
full_df = pd.DataFrame(dict(files = all_files, labels = labels))
initial_d_unlabel = all_files
return full_df, initial_d_unlabel
def create_step_dir(self, current_step):
updated_main_dir = self.config["updated_main_dir"]
current_step = str(current_step).zfill(3)
current_step_dir = create_main_dir(f"{updated_main_dir}/step_{current_step}")
return current_step_dir
def construct_initial_training_set(self, full_df, d_unlabel, m, n, random_state = 123, **kwargs):
# d_unlabel: files names of unlabelled training images
# m is the initial sample size
# n is the subsequent resampling size
label_df = full_df.sample(m, random_state = random_state)
unlabel_df = full_df.drop(label_df.index).copy()
while len(label_df["labels"].unique()) < 5:
sub_df = unlabel_df.sample(n, random_state = random_state)
label_df = label_df.append(sub_df)
unlabel_df = unlabel_df.drop(sub_df.index).copy()
# val_df = unlabel_df.sample(n, random_state = random_state) # val_df is for next step training, but used for this step validation
# unlabel_df = unlabel_df.drop(val_df.index).copy()
val_df = label_df.copy()
return label_df, val_df, unlabel_df
def extract_features_and_form_clusters(self, model, label_df, size, workers, **kwargs):
data_loader = create_data_loader(label_df, size, batch_size = 6, workers = workers)
f, y = extract_features(model, data_loader)
label_df["features"] = [j for j in f]
centroid = {}
for i in range(5):
uf = label_df["labels"] == str(i)
cent = label_df.loc[uf, "features"].values.mean()
centroid[i] = cent
return centroid
def extract_features_and_compute_index(self, model, unlabel_df, centroid, n, size, workers, outlier = 0, **kwargs):
data_loader = create_data_loader(unlabel_df, size, batch_size = 6, workers = workers)
f, y = extract_features(model, data_loader)
ui_list = [unfamiliarity_index(feature, centroid) for feature in f]
unlabel_df["ui"] = ui_list
unlabel_df = unlabel_df.sort_values("ui", ascending = False)
N = unlabel_df.shape[0]
subN = int(N*(1-outlier))
toadd_df = unlabel_df.tail(subN).head(n) # selected n samples
unlabel_df = unlabel_df.drop(toadd_df.index)
return toadd_df, unlabel_df
def train(self, current_step_dir, label_df, val_df, model, metric_fc, **kwargs):
CNN = TrainPipeline(step_dir = current_step_dir, label_df = label_df, val_df = val_df, model = model, metric_fc = metric_fc, **kwargs)
model, metric_fc = CNN.get_model()
return model, metric_fc
def evaluate(self, model, metric_fc, val_df, size, workers, metric_type, **kwargs):
unseen_test_loader = create_data_loader(val_df, size, batch_size = 6, workers = workers)
y_true = []
y_pred = []
for i, (input, target) in tqdm(enumerate(unseen_test_loader), total=len(unseen_test_loader)):
input, target = input.cuda(), target.cuda()
feature = model(input)
if metric_type=="softmax":
output = metric_fc(feature)
else:
output = metric_fc(feature, target)
y_pred += output.cpu().detach().numpy().argmax(axis = 1).tolist()
y_true += target.cpu().numpy().tolist()
y_true = np.array(y_true)
y_pred = np.array(y_pred)
acc = accuracy(y_true, y_pred)
avg = avg_acc(y_true, y_pred)
cm = get_cm(y_true, y_pred)
qk = quadratic_kappa(y_true, y_pred)
result = dict(accuracy = acc, average_accuracy = avg, kappa = qk, cm = cm)
result_df = pd.DataFrame([result])
return result_df
def dump_df(self, current_step_dir, label_df, val_df, unlabel_df):
label_df.to_csv(f"{current_step_dir}/label_df.csv")
val_df.to_csv(f"{current_step_dir}/selected_df.csv")
unlabel_df.to_csv(f"{current_step_dir}/unlabel_df.csv")
def dump_centroid(self, current_step_dir, centroid):
joblib.dump(centroid, f'{current_step_dir}/centroid.pkl')
for i in range(5):
centroid[i] = centroid[i].tolist()
with open(f'{current_step_dir}/centroid.json', 'w') as outfile:
json.dump(centroid, outfile)
def dump_step_result(self, current_step_dir, result_df):
if self.result_df is None:
self.result_df = result_df
else:
self.result_df = self.result_df.append(result_df)
self.result_df.to_csv(f"{current_step_dir}/result.csv")
def dump_step_result2(self, current_step_dir, result_df2):
if self.result_df2 is None:
self.result_df2 = result_df2
else:
self.result_df2 = self.result_df2.append(result_df2)
self.result_df2.to_csv(f"{current_step_dir}/result_2.csv") |
15,346 | be7b7addd4a0b387f9ca6da690dc443cbec53342 | # coding: utf-8
from flask import (Blueprint, request, url_for, redirect,
render_template, flash, json)
from tango import db, cache
from tango.ui.tables import make_table
from tango.models import Setting, DictCode, Category
from tango.ui import navbar
from nodes.models import NodeHost
from nodes.tables import NodeHostTable
from users.models import User
from alarms.models import AlarmSeverity
from .models import Threshold, Metric
from .forms import ThresholdEditForm, ThresholdNewForm, MetricNewEditForm
from .models import OperationLog, SecurityLog, SubSystem, TimePeriod
from .tables import MetricTable, ThresholdTable
from .tables import (SettingTable, OperationLogTable, SecurityLogTable,
DictCodeTable, SubSystemTable, TimePeriodTable)
from .forms import (SettingEditForm, SearchForm, OplogFilterForm, DictCodeFilterForm,
DictCodeNewEditForm, NodeHostEditForm, TimePeriodNewEditForm)
sysview = Blueprint('system', __name__)
@sysview.context_processor
def inject_navid():
return dict(navid = 'system')
# ==============================================================================
# 系统设置
# ==============================================================================
@sysview.route('/system/')
@sysview.route('/system/settings/')
def settings():
table = make_table(Setting.query, SettingTable)
return render_template('/system/settings/index.html', table=table)
@sysview.route('/system/setting/edit/<int:id>', methods=('GET', 'POST'))
def settings_edit(id):
form = SettingEditForm()
setting = Setting.query.get_or_404(id)
if form.is_submitted and form.validate_on_submit():
old_value = setting.value
setting.value = form.value.data
db.session.commit()
cache.delete(setting.mod+'.'+setting.name)
flash(u'%s 被修改:(%s)--> %s' % (setting.name, old_value, form.value.data), 'success')
return redirect('/system/settings/')
form.process(obj=setting)
return render_template('/system/settings/edit.html', form=form, setting=setting)
# ==============================================================================
# 字典管理
# ==============================================================================
@sysview.route('/dict-codes/')
def dict_codes():
form = DictCodeFilterForm(formdata=request.args)
query = DictCode.query
if form.type.data:
query = query.filter_by(type_id=form.type.data.id)
if form.is_valid.data:
query = query.filter_by(is_valid=form.is_valid.data)
table = make_table(query, DictCodeTable)
return render_template('/system/dict-codes/index.html', table=table, form=form)
@sysview.route('/dict-codes/new', methods=('GET', 'POST'))
def dict_codes_new():
form = DictCodeNewEditForm()
if form.is_submitted and form.validate_on_submit():
dict_code = DictCode()
form.populate_obj(dict_code)
db.session.add(dict_code)
db.session.commit()
flash(u'字典(%s)添加成功' % dict_code.code_label, 'success')
return redirect('/dict-codes/')
return render_template('/system/dict-codes/new_edit.html', form=form,
action='/dict-codes/new', title=u'添加字典')
@sysview.route('/dict-codes/edit/<int:id>', methods=('GET', 'POST'))
def dict_codes_edit(id):
dict_code = DictCode.query.get_or_404(id)
form = DictCodeNewEditForm()
if form.is_submitted and form.validate_on_submit():
form.populate_obj(dict_code)
db.session.commit()
flash(u'字典(%s)修改成功' % dict_code.code_label, 'success')
return redirect('/dict-codes/')
form.process(obj=dict_code)
return render_template('/system/dict-codes/new_edit.html', form=form,
action=url_for('system.dict_codes_edit', id=id), title=u'修改字典')
# ==============================================================================
# 阀值管理
# ==============================================================================
@sysview.route('/')
@sysview.route('/thresholds/')
def thresholds():
query = Threshold.query
form = SearchForm(formdata=request.args)
keyword = form.keyword.data
if keyword and keyword != '':
ikeyword = '%' + keyword + '%'
query = query.filter(db.or_(Threshold.name.ilike(ikeyword),
Threshold.alias.ilike(ikeyword),
Threshold.category.has(Category.alias.ilike(ikeyword)),
Threshold.summary.ilike(ikeyword)))
table = make_table(query, ThresholdTable)
return render_template("system/thresholds/index.html",
filterForm = form, table=table)
@sysview.route('/thresholds/new', methods=['GET', 'POST'])
def thresholds_new():
form = ThresholdNewForm()
if form.is_submitted and form.validate_on_submit():
threshold = Threshold()
form.populate_obj(threshold)
db.session.add(threshold)
db.session.commit()
flash(u'阀值(%s)添加成功' % threshold.name, 'success')
return redirect(url_for('system.thresholds'))
return render_template("system/thresholds/new.html", form=form, )
@sysview.route('/thresholds/edit/<int:id>', methods=['GET', 'POST'])
def thresholds_edit(id):
form = ThresholdEditForm()
threshold = Threshold.query.get_or_404(id)
if form.is_submitted and form.validate_on_submit():
form.populate_obj(threshold)
db.session.commit()
flash(u'阀值(%s)修改成功' % threshold.name, 'success')
return redirect(url_for('system.thresholds'))
form.process(obj=threshold)
return render_template("system/thresholds/edit.html", form=form, id=id)
# ==============================================================================
# 指标管理
# ==============================================================================
@sysview.route('/metrics/')
def metrics():
query = Metric.query
form = SearchForm(formdata=request.args)
keyword = form.keyword.data
if keyword and keyword != '':
ikeyword = '%' + keyword + '%'
query = query.filter(db.or_(Metric.name.ilike(ikeyword),
Metric.grp.ilike(ikeyword),
Metric.alias.ilike(ikeyword)))
table = make_table(query, MetricTable)
return render_template('system/metrics/index.html',
filterForm=form, table=table)
@sysview.route('/metrics/new', methods=['GET', 'POST'])
def metrics_new():
form = MetricNewEditForm()
if form.is_submitted and form.validate_on_submit():
metric = Metric()
form.populate_obj(metric)
db.session.add(metric)
db.session.commit()
flash(u'指标 (%s) 添加成功!' % metric.alias, 'success')
return redirect(url_for('system.metrics'))
return render_template('system/metrics/new-edit.html', form=form,
action=url_for('system.metrics_new'), title=u'添加指标')
@sysview.route('/metrics/edit/<int:id>', methods=['GET', 'POST'])
def metrics_edit(id):
form = MetricNewEditForm()
metric = Metric.query.get_or_404(id)
if form.is_submitted and form.validate_on_submit():
form.populate_obj(metric)
db.session.commit()
flash(u'指标 (%s) 修改成功' % metric.alias, 'success')
return redirect(url_for('system.metrics'))
form.process(obj=metric)
return render_template('system/metrics/new-edit.html', form=form,
action=url_for('system.metrics_edit', id=id), title=u'修改指标')
@sysview.route('/metric/delete/<int:id>', methods=['GET', 'POST'])
def metrics_delete(id):
metric = Metric.query.get_or_404(id)
if request.method == 'POST':
db.session.delete(metric)
db.session.commit()
flash(u'指标 (%s) 删除成功!' % metric.alias, 'success')
return redirect(url_for('system.metrics'))
kwargs = {
'title' : u'删除指标',
'action': url_for('system.metrics_delete', id=id),
'fields': [(u'名称', metric.name), (u'显示名', metric.alias)],
'type' : 'delete'
}
return render_template('tango/_modal.html', **kwargs)
# ==============================================================================
# 采集规则管理
# ==============================================================================
@sysview.route('/timeperiods/')
def timeperiods():
query = TimePeriod.query
form = SearchForm(formdata=request.args)
keyword = form.keyword.data
if keyword and keyword != '':
ikeyword = '%' + keyword + '%'
query = query.filter(db.or_(TimePeriod.name.ilike(ikeyword),
TimePeriod.alias.ilike(ikeyword)))
table = make_table(query, TimePeriodTable)
return render_template('/system/timeperiods/index.html',
filterForm = form, table=table)
@sysview.route('/timeperiods/new', methods=['GET', 'POST'])
def timeperiods_new():
form = TimePeriodNewEditForm()
if form.is_submitted and form.validate_on_submit():
timeperiod = TimePeriod()
form.populate_obj(timeperiod)
db.session.add(timeperiod)
db.session.commit()
flash(u'规则添加成功!', 'success')
return redirect(url_for('system.timeperiods'))
return render_template('/system/timeperiods/new-edit.html', form=form,
action=url_for('system.timeperiods_new'), title=u'添加规则')
@sysview.route('/timeperiods/edit/<int:id>', methods=['GET', 'POST'])
def timeperiods_edit(id):
form = TimePeriodNewEditForm()
timeperiod = TimePeriod.query.get_or_404(id)
if form.is_submitted and form.validate_on_submit():
form.populate_obj(timeperiod)
db.session.commit()
flash(u'修改成功!', 'success')
return redirect(url_for('system.timeperiods'))
form.process(obj=timeperiod)
return render_template('/system/timeperiods/new-edit.html', form=form,
action=url_for('system.timeperiods_edit', id=id), title=u'修改规则')
# ==============================================================================
# 日志管理
# ==============================================================================
@sysview.route('/oplogs/')
def oplogs():
query = OperationLog.query
filterForm = OplogFilterForm(formdata=request.args)
keyword = filterForm.keyword.data
if keyword and keyword != '':
keyword = keyword.strip()
query = query.filter(db.or_(
OperationLog.terminal_ip.ilike('%'+keyword+'%'),
OperationLog.summary.ilike('%'+keyword+'%')))
user = filterForm.uid.data
if user :
query = query.filter(OperationLog.uid == user.id)
ip = filterForm.ip.data
if ip:
query = query.filter(OperationLog.terminal_ip == ip)
start_date = filterForm.start_date.data
if start_date:
query = query.filter(OperationLog.created_at >= start_date)
end_date = filterForm.end_date.data
if end_date:
query = query.filter(OperationLog.created_at <= end_date)
table = make_table(query, OperationLogTable)
return render_template('/system/oplogs.html',
table=table, filterForm=filterForm)
@sysview.route('/seclogs/')
def seclogs():
query = SecurityLog.query
form = SearchForm(formdata=request.args)
keyword = form.keyword.data
if keyword and keyword != '':
keyword = keyword.strip()
query = query.filter(db.or_(
SecurityLog.terminal_ip.ilike('%'+keyword+'%'),
SecurityLog.user.has(User.username.ilike('%'+keyword+'%'))))
table = make_table(query, SecurityLogTable)
return render_template('/system/seclogs.html',
filterForm=form, table=table)
# ==============================================================================
# 网管系统
# ==============================================================================
@sysview.route('/hosts/')
def hosts():
table = make_table(NodeHost.query, NodeHostTable)
return render_template("/system/hosts/index.html", table=table)
@sysview.route('/hosts/edit/<int:id>', methods=['GET', 'POST'])
def hosts_edit(id):
host = NodeHost.query.get_or_404(id)
form = NodeHostEditForm()
if form.is_submitted and form.validate_on_submit():
form.populate_obj(host)
db.session.commit()
flash(u'%s 修改成功' % host.name, 'success')
return redirect('/hosts/')
form.process(obj=host)
return render_template("/system/hosts/edit.html", form=form, host=host)
@sysview.route('/subsystems/', methods=['GET'])
def subsystems():
table = make_table(SubSystem.query, SubSystemTable)
return render_template('/system/subsystems.html', table=table)
navbar.add('system', u'系统', 'wrench', '/system')
|
15,347 | 3c8d0e908b7737d3a186b948a629e5887203c5ed | # -*- coding: utf-8 -*-
"""
Created on Thu Sep 19 15:07:13 2019
@author: agnib
"""
country = ('India','America','Mexico','Bangladesh')
capital = ('Delhi NCR','Washington DC','Mexico City','Dhaka')
zipped = tuple(zip(country,capital))
print(zipped) |
15,348 | 15417fe2e520de87a75e13290ac0f2a5d5fbc055 | # -*- coding: utf-8 -*-
"""
Created on Fri Dec 29 14:36:50 2017
@author: Kim
"""
import os
import glob
import shutil as sh
import sys
path=sys.path[0]
for line in open("%s/Parameters.txt" %path, "r"):
if line.startswith("Directory"):
directory=line.split(":")[1].strip()
if line.startswith("File Size"):
filesize=int(line.split(":")[1].strip())*1000
os.mkdir("%s/ANALYSIS" %directory)
for file in glob.glob(r"%s/*.fasta" %directory):
if os.path.getsize("%s" %file) > filesize:
sh.copy2("%s" %file, "%s/ANALYSIS" %directory) |
15,349 | 3551d1d6ff2169079383bc7abe5bea0fc6bb1f29 | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Source: https://leetcode.com/problems/can-make-arithmetic-progression-from-sequence/
# Author: Miao Zhang
# Date: 2021-05-12
class Solution:
def canMakeArithmeticProgression(self, arr: List[int]) -> bool:
arr.sort()
diff = arr[1] - arr[0]
for i in range(2, len(arr)):
if diff != arr[i] - arr[i - 1]:
return False
return True
|
15,350 | 5001f7c8c953ffc9c6162bc5172b94e87b68244f | import joblib
import pickle
import numpy as np
from sklearn.neural_network import MLPClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score
from keras.models import Sequential
from keras.layers import Dense
from keras.utils import to_categorical
from keras.callbacks import EarlyStopping
def load_precomputed_embeddings(precomputed_path, mm_ann, label_mapping=None):
all_anns, all_vecs = [], []
with open(precomputed_path, 'r') as f:
for line in f:
elems = line.split('\t')
cui = elems[3]
sty = elems[4]
vec = np.array(list(map(float, elems[-1].split())), dtype=np.float32)
if mm_ann == 'sty':
all_anns.append(sty)
elif mm_ann == 'cui':
all_anns.append(cui)
all_vecs.append(vec)
if label_mapping is None:
label_mapping = {a: i + 1 for i, a in enumerate(set(all_anns))}
label_mapping['UNK'] = 0
X = np.vstack(all_vecs)
y = []
for ann in all_anns:
try:
y.append(label_mapping[ann])
except KeyError:
y.append(0)
return X, y, label_mapping
# mm_ann = 'sty' # MLP512 Acc: 0.8110184669494629 SOFTMAX Acc: 0.7777806720469078
mm_ann = 'cui'
path_precomputed_train_vecs = 'mm_st21pv.train.scibert_scivocab_uncased.precomputed'
path_precomputed_dev_vecs = 'mm_st21pv.dev.scibert_scivocab_uncased.precomputed'
print('Loading precomputed ...')
X_train, y_train, train_label_mapping = load_precomputed_embeddings(path_precomputed_train_vecs, mm_ann)
X_dev, y_dev, _ = load_precomputed_embeddings(path_precomputed_dev_vecs, mm_ann, train_label_mapping)
# clf = MLPClassifier(hidden_layer_sizes=(512,), activation='relu', solver='adam', max_iter=200, verbose=True, random_state=42)
# clf = MLPClassifier(hidden_layer_sizes=(64,), activation='relu', solver='adam', max_iter=200, verbose=True, random_state=42)
# clf = LogisticRegression(random_state=42, multi_class='multinomial', solver='sag', max_iter=200, n_jobs=4, verbose=True)
n_classes = len(set(y_train)) + 1 # UNK
# model = Sequential([
# Dense(512, activation='relu', input_shape=(768,)),
# Dense(n_classes, activation='softmax'),
# ])
model = Sequential([
Dense(n_classes, activation='softmax', input_shape=(768,)),
])
model.compile(
optimizer='adam',
loss='categorical_crossentropy',
metrics=['accuracy'],
)
es = EarlyStopping(monitor='acc', mode='max', verbose=1, min_delta=0.01, patience=10)
print('Training ...')
# clf.fit(X_train, y_train)
model.fit(
X_train,
to_categorical(y_train),
epochs=100,
batch_size=64,
callbacks=[es],
)
print('Evaluating ...')
# y_dev_preds = mlp.predict_proba(X_dev)
# y_dev_preds = clf.predict(X_dev)
# acc = accuracy_score(y_dev, y_dev_preds)
loss, acc = model.evaluate(X_dev, to_categorical(y_dev, num_classes=len(train_label_mapping)))
print('Acc:', acc)
print('Saving model ...')
# joblib.dump(clf, 'lr_multi.%s.model.joblib' % mm_ann)
model.save('softmax.%s.model.h5' % mm_ann)
joblib.dump(train_label_mapping, 'softmax.%s.mapping.joblib' % mm_ann)
|
15,351 | 7156d9612b9bc4db72c86ae4c812f37e96042ce1 | # -*- coding: utf-8 -*-
"""
Created on Sun Nov 24 13:52:24 2019
@author: Eric
"""
import os
import io
import csv
import pandas as pd
import numpy as np
import psycopg2
from psycopg2 import sql
from sys import exit
from sqlalchemy import create_engine
# list of states to explore
states = ['tennessee','west_virginia','alabama',
'california','hawaii','washington_dc']
# Creates a connection string
engine = create_engine('postgresql+psycopg2://python:password@localhost/arcos')
# raw connection
conn = engine.raw_connection()
sellers = []
i = 0
for state in states:
# Opens a cursor for the table create
select_cursor = conn.cursor()
#table_name = states[i]
# query to select the count from the table
select_query = sql.SQL('''SELECT DISTINCT reporter_name, COUNT(*) AS count
FROM {}
GROUP BY reporter_name
ORDER BY count DESC
LIMIT 10''').format(
sql.Identifier(states[i]))
#sql.Identifier(states[i]))
# executes the query
select_cursor.execute(select_query)
# appends query results to sellers list
sellers.append(select_cursor.fetchall())
i+=1
# closes the SQL cursor, commits all SQL transactions
select_cursor.close()
# flattens sellers into a single list of tuples
flat = [item for sublist in sellers for item in sublist]
# empty list
data = []
# creates a new list with state name, company and total
# uses ranges to set the correct state with the data
for i in range(len(flat)):
if i < 9:
data.append([states[0], flat[i][0], flat[i][1]])
if i > 9 and i < 19:
data.append([states[1], flat[i][0], flat[i][1]])
if i > 19 and i < 29:
data.append([states[2], flat[i][0], flat[i][1]])
if i > 29 and i < 39:
data.append([states[3], flat[i][0], flat[i][1]])
if i > 39 and i < 49:
data.append([states[4], flat[i][0], flat[i][1]])
if i > 49:
data.append([states[5], flat[i][0], flat[i][1]])
# turns the data list into a dataframe
state_df = pd.DataFrame(data, columns = ['state', 'company', 'total'])
|
15,352 | e7c433d1b38141eea5e71e4c84eeb365686d0a1d | # Copyright (C) Rocket Software 1993-2015
hello_name = input ('Type your name: ')
print("Hello", hello_name)
|
15,353 | 69e655b1b5d7aaf04467f9f4308beda775283831 |
# check with Geert:
# Correct files, and correct order for test images
# check that JC .names correspond to the order of the test set
from models.fixedLDA import fixedLDA
from evaluation import metrics
from gibbs_input import process_gibbs_input as gibbs_input
nominal_alpha = 0.35
n_vis_words = 750
# '{0}, {1}, {2}'.format('a', 'b', 'c')
rpath = '../../DATASETS/geert_output/zappos-preprocessing/flda/textvis{}/'.format(n_vis_words)
train_wdir = rpath + '/ptm_training_output/alpha_{0}/text/'.format(nominal_alpha, n_vis_words)
infer_wdir = rpath + '/ptm_inference_output/zappos-preprocessing{1}/alpha_{0}/vis{1}/'.format(nominal_alpha, n_vis_words)
flda = fixedLDA.FixedLDA(train_wdir, infer_wdir)
flda.fit()
predicted_wordids = flda.predict()
# print predicted_wordids
# [[196 11 106 ..., 166 71 177]
# [80 191 95..., 71 166 177]
# [4 51 180..., 150 145 71]
# ...,
# [185 74 184..., 71 166 177]
# [9 57 180..., 150 166 177]
# [175 192 74..., 150 166 177]]
# Compute Precision, Recall and F1
# Load true list
rpath2 = '../../DATASETS/dress_attributes/txt_represention/out_all/zappos/'
dot_docs_fname = rpath2 + 'text_features_test_zappos_0.0.docs'
dot_docs = gibbs_input.DotDocs(dot_docs_fname)
dot_names_fname = rpath2 + 'file_names_test_zappos_0.0.names'
dot_names = gibbs_input.DotNames(dot_names_fname)
dot_words_fname = rpath2 + 'filtered_vocabulary_zappos_0.0.words'
dot_words = gibbs_input.DotWordsFile(dot_words_fname)
dot_wc_fname = rpath2 + 'vocabulary_counts_test_zappos_0.0.wc'
dot_wc = gibbs_input.DotWCFile(dot_wc_fname)
true_list = dot_docs.wordid_list_all_docs
predicted_list = predicted_wordids
assert len(true_list) == len(predicted_list)
k = 5
avg_prec = metrics.avg_metric_at_k(metrics.precision_at_k, true_list, predicted_list, k)
avg_recall = metrics.avg_metric_at_k(metrics.recall_at_k, true_list, predicted_list, k)
avg_f1 = metrics.avg_metric_at_k(metrics.f1_score, true_list, predicted_list, k)
print "avg_prec", avg_prec
print "avg_recall", avg_recall
print "avg_f1", avg_f1
# visualize predictions
|
15,354 | fb67ae4d7799fc36cf2ff3dcd3b58f7d2d2c25f4 | from django.db import models
# Create your models here.
class Member(models.Model):
"""メンバー"""
name = models.CharField('name', max_length=255)
slackName = models.CharField('slack_name', max_length=255)
graduateDate = models.DateTimeField('graduate_date')
def __str__(self):
return self.name |
15,355 | 499f83e4996885a6dc3729d6a69442152cc900a8 | import time
import numpy as np
from scipy import sparse
from sklearn.preprocessing import normalize
import util, imageio, pickle, frame, scipy
from util import *
class Frame:
def __init__(self):
util.raiseNotDefined()
def load_sketch(self, sketch):
self.sketch = sketch
def load_gray(self, gray):
util.raiseNotDefined()
def load_weight(self, Wn):
self.Wn = Wn
def neighbors(self, p):
util.raiseNotDefined()
def idx(self, p):
util.raiseNotDefined()
def weight(self, p):
N = self.neighbors(p)
idx = [self.idx(p) for p in N]
ys = [self.Y[n] for n in N]
S = np.std(ys)
if (S <= 0):
return np.zeros(shape=len(N)), idx
else:
return [np.exp(-1 * np.square(self.Y[tuple(p)] - y) / 2 / S / S) for y in ys], idx
def build_weights_matrix(self):
start_time = time.time()
x, y = self.shape[:2]
for i in range(x):
for j in range(y):
weights, idx = self.weight(i, j, self.Y)
self.Wn[self.idx([i, j]), idx] = -1 * np.asmatrix(weights)
if (i % 10 == 0):
print(i, self.Y.shape[0], time.time() - start_time)
self.Wn = normalize(self.Wn, norm='l1', axis=1).tolil()
self.Wn[np.arange(x * y), np.arange(x * y)] = 1
def color(self):
util.raiseNotDefined()
class StaticFrame(Frame):
def __init__(self, sketch, gray):
self.shape = sketch.shape
self.sketch = sketch
self.gray = gray
self.Y = np.array(gray[:, :, 0], dtype='float64')
self.Wn = sparse.lil_matrix((self.shape[0] * self.shape[1], self.shape[0] * self.shape[1]))
self.solution = np.zeros(shape=self.shape)
colored = abs(self.sketch[:, :, 1]-self.gray[:, :, 1]) + abs(self.sketch[:, :, 2]-self.gray[:, :, 2]) > 0
self.idx_marks = np.nonzero(colored)
self.idx_marks = self.idx(self.idx_marks)
white = (abs(self.sketch[:, :, 0]-np.ones(shape=(sketch.shape[:2]))) + abs(self.sketch[:, :, 1]) + abs(self.sketch[:, :, 2]))<1e-8
self.idx_white = np.nonzero(white)
self.idx_white = self.idx(self.idx_white)
self.idx_white = [i for i in self.idx_white if i in self.idx_marks]
self.idx_marks = [i for i in self.idx_marks if i not in self.idx_white]
def load_weight(self, Wn):
self.Wn = Wn
def neighbors(self, p, d=3):
i, j = p[0], p[1]
x, y = self.Y.shape[:2]
x1 = max(i - d, 0)
x2 = min(i + d+1, x)
y1 = max(j - d, 0)
y2 = min(j + d+1, y)
N = []
for a in range(x1, x2):
for b in range(y1, y2):
if (a != i or b != j):
N.append(tuple([a, b]))
return N
def idx(self, p):
return p[0]*self.shape[1]+p[1]
def build_weights_matrix(self):
print("Starting calculating weight matrix......")
start_time = time.time()
x, y = self.shape[:2]
for i in range(x):
for j in range(y):
weights, idxthis = self.weight([i, j])
self.Wn[self.idx([i, j]), idxthis] = -1 * np.asarray(weights)
if (i % 10 == 0):
print(i, self.Y.shape[0], time.time() - start_time)
self.Wn = normalize(self.Wn, norm='l1', axis=1).tolil()
self.Wn[np.arange(x * y), np.arange(x * y)] = 1
def color(self):
start_time = time.time()
## set rows in colored indices
Wn = self.Wn.tocsc()
for p in list(self.idx_marks):
Wn[p] = sparse.csr_matrix(([1.0], ([0], [p])), shape=(1, self.shape[0]*self.shape[1]))
for p in list(self.idx_white):
Wn[p] = sparse.csr_matrix(([1.0], ([0], [p])), shape=(1, self.shape[0]*self.shape[1]))
print("Finish adding colored to Wn {}".format(time.time() - start_time))
b1 = np.zeros(shape=(self.shape[0]*self.shape[1]))
b2 = np.zeros(shape=(self.shape[0]*self.shape[1]))
b1[self.idx_marks] = (self.sketch[:, :, 1]).flatten()[self.idx_marks]
b2[self.idx_marks] = (self.sketch[:, :, 2]).flatten()[self.idx_marks]
b1[self.idx_white] = (self.gray[:, :, 1]).flatten()[self.idx_white]
b2[self.idx_white] = (self.gray[:, :, 2]).flatten()[self.idx_white]
x1 = sparse.linalg.spsolve(Wn, b1)
x2 = sparse.linalg.spsolve(Wn, b2)
print("Finish solving LU {}".format(time.time() - start_time))
self.solution[:, :, 0] = self.Y
self.solution[:, :, 1] = x1.reshape(self.shape[:2])
self.solution[:, :, 2] = x2.reshape(self.shape[:2])
return self.solution
class DynamicFrame(Frame):
def __init__(self, sketch, gray, previous):
self.shape = sketch.shape
self.sketch = sketch
self.gray = gray
self.previous = previous
self.Y = np.zeros(shape=(self.shape[0], self.shape[1]*2))
self.Y[:, :self.shape[1]] = gray[:, :, 0]
self.Y[:, self.shape[1]:] = self.previous.Y
self.Wn = sparse.csc_matrix((self.shape[0] * self.shape[1] * 2, self.shape[0] * self.shape[1] * 2))
self.Ix, self.Iy = np.gradient(self.Y)
self.It = gray[:, :, 0] - self.previous.Y
self.solution = np.zeros(shape=self.shape)
colored = abs(self.sketch[:, :, 1] - self.gray[:, :, 1]) + abs(self.sketch[:, :, 2] - self.gray[:, :, 2]) > 0
self.idx_marks = np.nonzero(colored)
self.idx_marks = self.idx(self.idx_marks)
white = (abs(self.sketch[:, :, 0] - np.ones(shape=(sketch.shape[:2]))) + abs(self.sketch[:, :, 1]) + abs(
self.sketch[:, :, 2])) < 1e-8
self.idx_white = np.nonzero(white)
self.idx_white = self.idx(self.idx_white)
self.idx_white = [i for i in self.idx_white if i in self.idx_marks]
self.idx_marks = [i for i in self.idx_marks if i not in self.idx_white]
def idx(self, p):
return p[0]*self.shape[1]+p[1]
def build_weights_matrix(self):
start_time = time.time()
print("call build_weights_matrix")
x, y = self.shape[:2]
xs, ys = self.previous.Wn.nonzero()
self.Wn.tolil()
self.previous.Wn.tolil()
for i in range(x):
self.Wn[i+x*y, x*y:] = self.previous.Wn[i, :]
print("copy weights from previous {}".format(time.time()-start_time))
for i in range(x):
for j in range(y):
weights, idxthis = self.weight([i, j])
self.Wn[self.idx([i, j]), idxthis] = -1 * np.asarray(weights)
if (i % 10 == 0):
print(i, self.Y.shape[0], time.time() - start_time)
self.Wn = normalize(self.Wn, norm='l1', axis=1).tolil()
self.Wn[np.arange(x * y * 2), np.arange(x * y * 2)] = 1
def velocity(self, p):
x1 = max(0, p[0]-2)
x2 = min(self.shape[0]-1, p[0]+2)
y1 = max(0, p[1] - 2)
y2 = min(self.shape[1] - 1, p[1] + 2)
A = []
b = []
for i in range(x1, x2+1):
for j in range(y1, y2+1):
A.append([self.Ix[i, j], self.Iy[i, j]])
b.append([self.It[i, j]])
try:
v = scipy.linalg.solve(np.dot(np.transpose(A), A), np.dot(np.transpose(A), b))
except:
return [0, 0]
if(v[0]==None):
v[0]=0
print(p, v)
if (v[1] == None):
v[1] = 0
print(p, v)
return v
def neighbors(self, p):
i, j = p[0], p[1]
x, y = self.shape[:2]
x1 = max(i - 1, 0)
x2 = min(i + 2, x)
y1 = max(j - 1, 0)
y2 = min(j + 2, y)
N = []
for a in range(x1, x2):
for b in range(y1, y2):
if (a != i or b != j):
N.append(tuple([a, b]))
vx, vy = self.velocity(p)
i, j = int(p[0]-vx), int(p[1]-vy)
x1 = max(i - 1, 0)
x2 = min(i + 2, x)
y1 = max(j - 1, 0)
y2 = min(j + 2, y)
for a in range(x1, x2):
for b in range(y1, y2):
if (a != i or b != j):
N.append(tuple([a, y+b]))
return N
def color(self):
start_time = time.time()
## set rows in colored indices
Wn = self.Wn.tocsc()
for i in range(self.shape[0]):
for j in range(self.shape[1]):
if (self.sketch[i, j, 1] != self.gray[i, j, 1] or self.sketch[i, j, 2] != self.gray[i, j, 2]):
Wn[self.idx([i, j])] = sparse.csr_matrix(([1.0], ([0], [self.idx([i, j])])), shape=(1, self.shape[0]*self.shape[1]*2))
print("Finish adding colored to Wn {}".format(time.time() - start_time))
print(self.sketch.shape)
print(self.previous.solution.shape)
b10 = np.zeros(shape=(self.shape[1]*self.shape[0]))
b20 = np.zeros(shape=(self.shape[1]*self.shape[0]))
b10[self.idx_marks] = (self.sketch[:, :, 1]).flatten()[self.idx_marks]
b20[self.idx_marks] = (self.sketch[:, :, 2]).flatten()[self.idx_marks]
b10[self.idx_white] = (self.gray[:, :, 1]).flatten()[self.idx_white]
b20[self.idx_white] = (self.gray[:, :, 2]).flatten()[self.idx_white]
b1 = np.concatenate((b10, self.previous.solution[:, :, 1].flatten()), axis=None)
b2 = np.concatenate((b20, self.previous.solution[:, :, 2].flatten()), axis=None)
print(b1.shape)
x1 = sparse.linalg.spsolve(Wn, b1)[:self.shape[1]*self.shape[0]]
x2 = sparse.linalg.spsolve(Wn, b2)[:self.shape[1]*self.shape[0]]
print("Finish solving LU {}".format(time.time() - start_time))
self.solution[:, :, 0] = self.Y[:, :self.shape[1]]
self.solution[:, :, 1] = x1.reshape(self.shape[:2])
self.solution[:, :, 2] = x2.reshape(self.shape[:2])
self.Y = self.Y[:, :self.shape[1]]
self.Wn = self.Wn[:self.shape[0]*self.shape[1], :self.shape[1]*self.shape[0]]
return self.solution
if __name__ == "__main__":
dir = "videos/butterfly/"
start_time = time.time()
origin_rgb = imageio.imread("{}gray/frame1.png".format(dir, str(31)))
origin_yiq = rgb2yiq(origin_rgb)
sketch_rgb = imageio.imread("{}sketch/frame1.png".format(dir, str(31)))
sketch_yiq = rgb2yiq(sketch_rgb)
Y = np.array(origin_yiq[:, :, 0], dtype='float64')
s_origin_yiq = compress2(origin_yiq)
s_sketch_yiq = compress2(sketch_yiq)
with open("{}frame/dynamic_frame{}.pickle".format(dir, str(30)), "rb") as f:
pre = pickle.load(f)
curr = frame.DynamicFrame(s_sketch_yiq, s_origin_yiq, pre)
print("Starting building Wn")
curr.build_weights_matrix()
print(curr.Wn.shape)
sol = curr.color()
|
15,356 | e88793aa8b448fc1d21eb5d0865849b73f8f086a | """Logistic regression on the basic data"""
import numpy as np
import mdp
from sklearn import cross_validation
from sklearn import linear_model
import sys
import load_data
from base import calc_score, split_data
from logistic_base_model import Logistic_Base_Model, prepare_params
class Logistic_Inverse_Model(Logistic_Base_Model):
def load_train_data(self,data):
data = super(Logistic_Inverse_Model, self).load_train_data(data)
X, y = data
cols = range(1, X.shape[1], 2)
for i in cols:
new_feature = np.power(X[:,i]+1, -1)
X = np.concatenate((X, np.atleast_2d(new_feature).T), axis=1)
X = self.normalise_data(X)
# self.pcan = mdp.nodes.PCANode(output_dim=0.99)
# self.pcan.train(X)
# train_data = self.pcan.execute(X)
return (X, y)
def load_data(self, data, cv=True):
data = super(Logistic_Inverse_Model, self).load_data(data, cv)
X, y = data
cols = range(1, X.shape[1], 2)
for i in cols:
new_feature = np.power(X[:,i]+1, -1)
X = np.concatenate((X, np.atleast_2d(new_feature).T), axis=1)
X = self.normalise_data(X)
# cv_test_data = self.pcan.execute(X)
return (X, y)
if __name__ == '__main__':
if len(sys.argv) == 1:
data = load_data.load_data()
# Set up datasets for cross validation
rs = cross_validation.ShuffleSplit(150000, n_iterations=3, test_fraction=.30)
C, train_results, cv_results = prepare_params(0.0000001, 1000, 50)
# Run through the cross validation iterations
for train_index, cv_index in rs:
train_data, cv_data = split_data(data[1], train_index, cv_index)
model = Logistic_Inverse_Model()
train_X, train_y = model.load_train_data(train_data)
cv_X, cv_y = model.load_data(cv_data)
for c in C:
print c
model.train_model(train_X, train_y, {'c':c})
train_probs = model.run_model(train_X)
train_results[c].append(calc_score(train_probs, train_y))
cv_probs = model.run_model(cv_X)
cv_results[c].append(calc_score(cv_probs, cv_y))
# Display results
keys = train_results.keys()
keys.sort()
for k in keys:
print '{0:.2e} : {1:.4f} : {2:.4f}'.format(k,
np.array(train_results[k]).mean(),
np.array(cv_results[k]).mean())
elif '--gen' in sys.argv[1]:
options = sys.argv[1].split('=')
c = options[1]
data = load_data.load_data()
model = Logistic_Inverse_Model()
train_X, train_y = model.load_train_data(data[1])
test_X, test_y = model.load_data(data[2], False)
model.train_model(train_X, train_y, {'c':float(c)})
test_predictions = model.run_model(test_X)
for i,v in enumerate(test_predictions):
print '{0},{1}'.format(i+1,v) |
15,357 | 36ded3f4cb5cb4f1f5aef8442fb15a26bb3c1955 | import common
year_common = common.import_year_common(2019)
tape = common.extract_numbers(common.read_file('2019/25/data.txt'))
moves = [
'south',
'take monolith',
'east',
'take asterisk',
'west',
'north',
# 'west',
# 'take coin',
# 'north',
# 'east',
# 'take astronaut ice cream',
# 'west',
# 'south',
# 'east',
'north',
'north',
# 'take mutex',
'west',
'take astrolabe',
'west',
# 'take dehydrated water',
'west',
'take wreath',
'east',
'south',
'east',
'north',
'north'
]
def gen_input():
for move in moves:
for c in move:
yield ord(c)
yield 10
# manual controls
# while True:
# inp = input()
# for c in inp:
# yield ord(c)
# yield 10
mem = year_common.tape_to_mem(tape)
it = year_common.run_intcode(mem, gen_input())
for c in it:
print(chr(c), end='')
|
15,358 | 323d7e2938e9fba78a04e36a8780ab71cda9d9c2 | # Copyright 2018 ZTE Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from tempest.lib.api_schema.response.compute.v2_1 import parameter_types
list_services = {
'status_code': [200],
'response_body': {
'type': 'object',
'properties': {
'services': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'binary': {'type': 'string'},
'disabled_reason': {'type': ['string', 'null']},
'host': {'type': 'string'},
'state': {'enum': ['up', 'down']},
'status': {'enum': ['enabled', 'disabled']},
'frozen': {'type': 'boolean'},
'updated_at': parameter_types.date_time,
'zone': {'type': 'string'},
'replication_status': {'type': 'string'},
'active_backend_id': {'type': ['string', 'null']},
'backend_state': {'type': 'string'},
},
'additionalProperties': False,
'required': ['binary', 'disabled_reason', 'host', 'state',
'status', 'updated_at', 'zone']
}
}
},
'additionalProperties': False,
'required': ['services']
}
}
enable_service = {
'status_code': [200],
'response_body': {
'type': 'object',
'properties': {
'disabled': {'type': 'boolean'},
'status': {'enum': ['enabled', 'disabled']},
'host': {'type': 'string'},
'service': {'type': 'string'},
'binary': {'type': 'string'},
'disabled_reason': {'type': ['string', 'null']}
},
'additionalProperties': False,
'required': ['disabled', 'status', 'host', 'service',
'binary', 'disabled_reason']
}
}
disable_service = {
'status_code': [200],
'response_body': {
'type': 'object',
'properties': {
'disabled': {'type': 'boolean'},
'status': {'enum': ['enabled', 'disabled']},
'host': {'type': 'string'},
'service': {'type': 'string'},
'binary': {'type': 'string'},
},
'additionalProperties': False,
'required': ['disabled', 'status', 'host', 'service', 'binary']
}
}
disable_log_reason = copy.deepcopy(enable_service)
freeze_host = {'status_code': [200]}
thaw_host = {'status_code': [200]}
|
15,359 | 4de14486c81bc41a4129de1eb8f4de11e7bbdf45 | # -*- coding: utf-8 -*-
import re
import scrapy
from scrapy.http import Request
from urllib import parse # python2使用 import urlparse
from MovieSpider.settings import SQL_DATETIME_FORMAT, SQL_DATE_FORMAT
from datetime import datetime
from MovieSpider.items import Ygdy8Item, MovieItemLoader
from MovieSpider.utils.common import get_md5
from scrapy.xlib.pydispatch import dispatcher # scrapy 分发器
from scrapy import signals # scrapy 信号量
class Ygdy8netSpider(scrapy.Spider):
name = 'ygdy8net'
allowed_domains = ['ygdy8.net']
start_urls = [
"http://www.ygdy8.net/html/gndy/rihan/index.html",
"http://www.ygdy8.net/html/gndy/oumei/index.html",
"http://www.ygdy8.net/html/gndy/china/index.html",
"http://www.ygdy8.net/html/gndy/dyzz/index.html",
"http://www.ygdy8.net/html/gndy/jddy/index.html",
"http://www.ygdy8.net/html/tv/tvzz/index.html",
"http://www.ygdy8.net/html/tv/rihantv/index.html",
"http://www.ygdy8.net/html/tv/rihantv/riju/index.html",
"http://www.ygdy8.net/html/tv/rihantv/riju/index.html",
"http://www.ygdy8.net/html/tv/oumeitv/index.html",
"http://www.ygdy8.net/html/tv/gangtai/index.html",
"http://www.ygdy8.net/html/tv/hepai/index.html",
"http://www.ygdy8.net/html/tv/hytv/index.html",
"http://www.ygdy8.net/html/zongyi2013/index.html",
"http://www.ygdy8.net/html/zongyi2013/taiwanzongyi/index.html",
"http://www.ygdy8.net/html/zongyi2013/daluzongyi/index.html",
"http://www.ygdy8.net/html/zongyi2013/tiyujiemu/index.html",
"http://www.ygdy8.net/html/zongyi2013/qitazongyi/index.html",
"http://www.ygdy8.net/html/dongman/index.html",
"http://www.ygdy8.net/html/dongman/new/index.html",
"http://www.ygdy8.net/html/dongman/ss/index.html",
"http://www.ygdy8.net/html/dongman/qitadongman/index.html",
"http://www.ygdy8.net/html/dongman/gcdh/index.html",
"http://www.ygdy8.net/html/dongman/haizeiwangqu/index.html",
"http://www.ygdy8.net/html/dongman/hy/index.html",
"http://www.ygdy8.net/html/2009zongyi/index.html",
"http://www.ygdy8.net/html/2009zongyi/tiyujiemu/index.html",
"http://www.ygdy8.net/html/2009zongyi/taiwanzongyi/index.html",
"http://www.ygdy8.net/html/2009zongyi/daluzongyi/index.html",
"http://www.ygdy8.net/html/3gp/index.html",
"http://www.ygdy8.net/html/3gp/3gpmovie/index.html",
]
# headers = {
# 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36',
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
# 'Accept-Encoding': 'gzip, deflate'
# }
custom_settings = {
# "COOKIES_ENABLED": True,
"DOWNLOAD_DELAY": 3,
"DOWNLOADER_MIDDLEWARES": {
'MovieSpider.middlewares.RandomUserAgentMiddleware': 10,
# 'MovieSpider.middlewares.RandomProxyMiddleware': 100,
# 'scrapy.contrib.downloadermiddleware.useragent.UserAgentMiddleware': None,
# 'scrapy.contrib.downloadermiddleware.httpproxy.HttpProxyMiddleware': None,
},
"ITEM_PIPELINES": {
# 'MovieSpider.pipelines.MysqlPipeline': 3,
# 'MovieSpider.pipelines.JsonWithEncodingPipeline': 3,
'MovieSpider.pipelines.MysqlTwistedPipeline': 100,
'MovieSpider.pipelines.ElasticsearchPipeline': 300,
},
# 请求超时
"DOWNLOAD_TIMEOUT": 5,
# 重新请求
"RETRY_ENABLED": True,
# 重试次数
"RETRY_TIMES": 3,
# 保存日志
"LOG_FILE": "./logs/ygdy8net.log",
"LOG_LEVEL": "INFO",
}
# 数据收集
# 收集所有404的url、以及404页面数
# 默认过滤404页面、重写
# 信号设置
handle_httpstatus_list = [404, 304, 403]
def __init__(self, **kwargs):
self.fail_urls = []
dispatcher.connect(self.handle_spider_closed, signals.spider_closed)
def handle_spider_closed(self, spider, reason):
self.crawler.stats.set_value("failed_urls", ",".join(self.fail_urls))
def parse(self, response):
# 404页面
if response.status in self.handle_httpstatus_list:
self.fail_urls.append(response.url)
# spider中crawler scrapy/statscol.py
self.crawler.stats.inc_value("failed_url")
movie_nodes = response.css(
".co_content8 ul a.ulink::attr(href)").extract()
for movie_url in movie_nodes:
# yield Request(url=parse.urljoin(response.url, movie_url), callback=self.parse_detail)
yield Request(url=parse.urljoin(response.url, movie_url), callback=self.parse_detail)
# 提取下一页并交给scrapy进行下载
# next_url = response.css(".co_content8 div.x a::attr(href)").extract()[-2]
next_url = response.xpath(
'//a[contains(text(),"下一页")]/@href').extract_first()
if next_url:
yield Request(url=parse.urljoin(response.url, next_url), callback=self.parse)
def parse_detail(self, response):
item_loader = MovieItemLoader(item=Ygdy8Item(), response=response)
# 通过CSS选择器提取字段
item_loader.add_value('url', response.url)
item_loader.add_value('url_object_id', get_md5(response.url))
item_loader.add_css(
'title_detail', ".bd3r .co_area2 .title_all h1 font::text")
item_loader.add_css(
'title', ".bd3r .co_area2 .title_all h1 font::text")
download_url = response.css(
"#Zoom table tbody tr td a::attr(href)").extract()
item_loader.add_value('download_url', download_url)
item_loader.add_value('thunder_url', download_url)
# item_loader.add_value("crawl_time", datetime.now().strftime(SQL_DATETIME_FORMAT))
# title_detail = response.css(".bd3r .co_area2 .title_all h1 font::text").extract_first("")
# match_re = re.match(r'.*?《(.*?)》.*',title_detail)
# if match_re:
# title = match_re.group(1)
# ftp_url = response.css("#Zoom table tbody tr td a::attr(href)").extract_first("")
item_loader.add_value("crawl_time", datetime.now())
movie_item = item_loader.load_item()
if len(download_url) > 1:
return movie_item
|
15,360 | 164ff80648fce136f5b724c52bfa277ce9591530 | def fun(a, b, c, d, e, g):
return print(a + " " + b + " " + c + " " + d + " " + e + " " + g)
print(fun(a = input("введите имя "), b = input("введите фамилию "), c = input("введите город "), d = input("введите год рождения "), e = input("введите email "), g = input("введите номер телеыона ")))
|
15,361 | 412a5e5c854b5f5f8fb417c801021990abc5cc2e | import cv2
import numpy as np
image = cv2.imread('./black-background-white-font-believe-in-yourself-460x460.jpg',0)
cv2.imshow("Original",image)
cv2.waitKey(0)
kernel = np.ones((5,5),np.uint8)
# Erosion
erosion = cv2.erode(image,kernel,iterations=1)
cv2.imshow("Erosion",erosion)
cv2.waitKey(0)
# Dilation
dilation = cv2.dilate(image,kernel,iterations=1)
cv2.imshow("Dilation",dilation)
cv2.waitKey(0)
#Opening - erosion followed by dilation
opening = cv2.morphologyEx(image,cv2.MORPH_OPEN,kernel)
cv2.imshow("Opening",opening)
cv2.waitKey(0)
# Closing - dilation followed by erosion
closing = cv2.morphologyEx(image,cv2.MORPH_CLOSE,kernel)
cv2.imshow("Closing",closing)
cv2.waitKey(0)
cv2.destroyAllWindows() |
15,362 | e5db1a9d7307f3ce66f47d223142cd46ca32a281 | # import numpy as np
import caffe
import tables
import os
from os import walk
import sys
import timeit
import numpy as np
proto_txt = 'ssdh.prototxt'
caffe_model = 'ssdh.caffemodel'
image = '/home/angle/hackathon/query/img.jpg'
#Function to generate hash codes, given the model and path to images
def ann_images(id):
#Load the network its weights
print '"Loading network and its weights ..."'
net = caffe.Net(proto_txt, caffe_model, caffe.TEST)
#Load input and configure preprocessing
transformer = caffe.io.Transformer({'data': net.blobs['data'].data.shape})
transformer.set_mean('data', np.load('/home/angle/software/caffe/python/caffe/imagenet/ilsvrc_2012_mean.npy').mean(1).mean(1))
transformer.set_transpose('data', (2,0,1))
transformer.set_channel_swap('data', (2,1,0))
transformer.set_raw_scale('data', 255.0)
#Read the database which is in HDF5 format
print '"Reading the hashcode database ..."'
f = tables.open_file('/home/angle/hackathon/hc_db.h5','r')
t0 = timeit.default_timer()
im = caffe.io.load_image(id)
im = transformer.preprocess('data',im)
net.blobs['data'].reshape(1, 3, 227, 227)
net.blobs['data'].data[...] = im
out = net.forward()
h = out['latent_sigmoid']
h[h<=0.5] = 0
h[h>0.5] = 1
h=h.astype(int)
hc = f.root.hashcodes
ids = f.root.imageids
#Compute distance between the search image and nearest neighbors
#g = tables.openFile('/home/vivek/visual_search/cluster_db.h5','r')
#clusters = g.root.cluster
#centers = g.root.cluster_centers
#d = [sum(h[0,i]!=centers[n,i] for i in range(centers.shape[1])) for n in range(centers.shape[0])]
#d = np.argsort(np.asarray(d))
#h1 = np.where(clusters[:,0]==d[0])
#h1 = np.asarray(h1)
#h1 = h1.reshape(h1.shape[1],)
#h2 = np.where(clusters[:,0]==d[1])
#h2 = np.asarray(h2)
#h2 = h2.reshape(h2.shape[1],)
#ann_hcs = np.sort(np.concatenate((h1,h2),axis=0))
#print 'Number of nearest neighbors'
#print ann_hcs.shape
###ann = [sum(h[0,i]!=hc[j,i] for i in range(centers.shape[1])) for j in ann_hcs]
###ann = [sum(h[0,i]!=hc[j,i] for i in range(centers.shape[1])) for j in range(hc.shape[0])]
#a_tile = np.tile(h[0,:], (ann_hcs.shape[0], 1))
#ann = sum(a_tile[:,i]!=hc[ann_hcs[:,],i] for i in range(centers.shape[1]))
#Compute distance between the search image & all the images in the database
a_tile = np.tile(h[0,:], (hc.shape[0],1))
#ann = sum(a_tile[:,i]!=hc[:,i] for i in range(hc.shape[1]))
ann = np.sum(a_tile[:,]!=hc[:,], axis=1)
#Find the images in the database with least hamming distance with the search image
ann = np.argsort(ann)
ann_image_ids = np.take(ids,ann[0:20])
print ann_image_ids
t1 = timeit.default_timer()
print 'time for search'
print t1-t0
os.system('mkdir search_results')
for id in ann_image_ids:
os.system('cp '+id+' ./search_results/')
f.close()
ann_images(image)
|
15,363 | 93a76129e863ad178116a839215c7a41868c6733 | # -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand
from apps.shelf.models import (Book, Keyword)
class Command(BaseCommand):
def handle(self, *args, **opts):
books = Book.objects.filter(for_random=None)
for book in books:
book.set_for_random()
print "No.", book.pk
print "for Random:", book.for_random
keywords = Keyword.objects.filter(for_random=None)
for keyword in keywords:
keyword.set_for_random()
print "Keyword:", keyword.name
print "for Random", keyword.for_random
|
15,364 | 2272bc4303bfdc91bc7e23e11c2652d97aefc0d1 | #!/usr/bin/python
import alsaaudio as audio
import time
import os
import struct
import sys
audioformat=audio.PCM_FORMAT_S16_LE
# second arg to PCM can be ,audio.PCM_NONBLOCK
periodsize=1024
rate=44100
indev='hw:1,0'
outdev='hw:1,0'
skip=3
def fix(d):
"""Turn mono data into stereo"""
line=d
n=2
return ''.join([line[i:i+n]*2 for i in range(0, len(line), n)])
shorts = struct.unpack('<' + 'h' * (len(d)/2), d)
dbl = reduce(lambda x,y: x+y, zip(shorts, shorts))
return struct.pack('<' + 'h' * len(d), *dbl)
def record(inp):
triggered = False
alldata = []
silent = 0
# Skip a bit before starting.
print '... skipping %ds' % (skip)
for _ in range(int(skip*float(rate)/periodsize)):
inp.read()
print '... ok, listening'
while True:
l,data = inp.read()
if len(data) == 0:
continue
if len(data) & 1 != 0:
continue
shorts = struct.unpack('<' + 'h' * (len(data)/2), data)
avg = sum([abs(x) for x in shorts])/len(shorts)
if not triggered and avg < 2000:
continue
elif not triggered:
print '... triggered with avg %f' % (avg)
triggered = True
if avg < 2000:
silent += 1
if silent > 100:
return alldata
alldata.append(fix(data))
def main():
inp = audio.PCM(audio.PCM_CAPTURE,card=indev)
inp.setchannels(1)
inp.setrate(rate)
#inp.setformat(audioformat)
inp.setperiodsize(periodsize)
out = audio.PCM(audio.PCM_PLAYBACK,card=outdev)
out.setchannels(2)
out.setrate(rate)
#out.setformat(audioformat)
out.setperiodsize(periodsize)
while True:
print "recording..."
st = time.time()
rec = record(inp)
print '... took %ds and gave %d data' % (time.time()-st, len(rec))
print '... %d data should be %.2fs' % (len(rec), len(rec)*periodsize/44100)
if False:
st = time.time()
fixed = []
print 'transforming...'
for piece in rec:
fixed.append(fix(piece))
print '... took', (time.time()-st)
else:
fixed = rec
st = time.time()
print "playback..."
for piece in fixed:
out.write(piece)
print '... took', (time.time()-st)
main()
|
15,365 | ad4a8d7460853942f3ab8fb570042725095a4cff | import sys
import os
sys.path.append( os.path.dirname( os.path.dirname( os.path.abspath(__file__) ) ) )
import numpy as np
import configargparse
import skvideo.io
from tqdm import tqdm
import time
import matplotlib.pyplot as plt
import pickle
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.tensorboard import SummaryWriter
from modules.modeling_utils import get_model, construct_model_args
from modules.utils import batch_indices_generator, batched_apply, ConfusionMatrix
import skimage.io
import trimesh
import pyembree
_mesh_paths = {
'dragon': './data/dragon.ply',
'bunny': './data/bunny.ply',
'buddha': './data/buddha.ply',
'armadillo': './data/armadillo.ply',
'lucy': './data/lucy.ply'
}
gt_fn = lambda queries, mesh : mesh.ray.contains_points(queries.reshape([-1,3])).reshape(queries.shape[:-1])
def make_test_pts(mesh, corners, test_size=2**20):
c0, c1 = corners
test_easy = np.random.uniform(size=[test_size, 3]) * (c1-c0) + c0
batch_pts, batch_normals = get_normal_batch(mesh, test_size)
test_hard = batch_pts + np.random.normal(size=[test_size,3]) * .001
return test_easy, test_hard
def as_mesh(scene_or_mesh):
"""
Convert a possible scene to a mesh.
If conversion occurs, the returned mesh has only vertex and face data.
"""
if isinstance(scene_or_mesh, trimesh.Scene):
if len(scene_or_mesh.geometry) == 0:
mesh = None # empty scene
else:
# we lose texture information here
mesh = trimesh.util.concatenate(
tuple(trimesh.Trimesh(vertices=g.vertices, faces=g.faces)
for g in scene_or_mesh.geometry.values()))
else:
assert(isinstance(scene_or_mesh, trimesh.Trimesh))
mesh = scene_or_mesh
return mesh
def recenter_mesh(mesh):
mesh.vertices -= mesh.vertices.mean(0)
mesh.vertices /= np.max(np.abs(mesh.vertices))
mesh.vertices = .5 * (mesh.vertices + 1.)
def load_mesh(mesh_name, verbose=True):
mesh = trimesh.load(_mesh_paths[mesh_name])
mesh = as_mesh(mesh)
if verbose:
print(mesh.vertices.shape)
recenter_mesh(mesh)
c0, c1 = mesh.vertices.min(0) - 1e-3, mesh.vertices.max(0) + 1e-3
corners = [c0, c1]
if verbose:
print(c0, c1)
print(c1-c0)
print(np.prod(c1-c0))
print(.5 * (c0+c1) * 2 - 1)
return mesh, corners
def load_test_pts(mesh_name, mesh_obj=None, regen=True, verbose=True):
test_pt_file = os.path.join(os.path.split(_mesh_paths[mesh_name])[0], mesh_name + '_test_pts.npz')
if mesh_obj is None:
mesh, corners = load_mesh(mesh_name)
else:
mesh, corners = mesh_obj
if regen or not os.path.exists(test_pt_file):
test_pts_easy, test_pts_hard = make_test_pts(mesh, corners)
np.savez(test_pt_file, easy=test_pts_easy, hard=test_pts_hard)
else:
if verbose: print('load pts')
test_pts_dict = np.load(test_pt_file)
test_pts_easy, test_pts_hard = test_pts_dict['easy'], test_pts_dict['hard']
if verbose: print(test_pts_easy.shape)
test_labels_easy = gt_fn(test_pts_easy, mesh)
test_labels_hard = gt_fn(test_pts_hard, mesh)
if verbose:
print(f"Test points [easy] - Inside obj: {np.sum(test_labels_easy):d} - Outside obj: {np.sum(1 - test_labels_easy):d}")
print(f"Test points [hard] - Inside obj: {np.sum(test_labels_hard):d} - Outside obj: {np.sum(1 - test_labels_hard):d}")
return {
'easy': (test_pts_easy, test_labels_easy),
'hard': (test_pts_hard, test_labels_hard)
}
###################
trans_t = lambda t : np.array([
[1,0,0,0],
[0,1,0,0],
[0,0,1,t],
[0,0,0,1],
], dtype=np.float32)
rot_phi = lambda phi : np.array([
[1,0,0,0],
[0,np.cos(phi),-np.sin(phi),0],
[0,np.sin(phi), np.cos(phi),0],
[0,0,0,1],
], dtype=np.float32)
rot_theta = lambda th : np.array([
[np.cos(th),0,-np.sin(th),0],
[0,1,0,0],
[np.sin(th),0, np.cos(th),0],
[0,0,0,1],
], dtype=np.float32)
def pose_spherical(theta, phi, radius):
c2w = trans_t(radius)
c2w = rot_phi(phi/180.*np.pi) @ c2w
c2w = rot_theta(theta/180.*np.pi) @ c2w
# c2w = np.array([[-1,0,0,0],[0,0,1,0],[0,1,0,0],[0,0,0,1]]) @ c2w
return c2w
def get_rays(H, W, focal, c2w):
i, j = np.meshgrid(np.arange(W), np.arange(H), indexing='xy')
dirs = np.stack([(i-W*.5)/focal, -(j-H*.5)/focal, -np.ones_like(i)], -1)
rays_d = np.sum(dirs[..., np.newaxis, :] * c2w[:3,:3], -1)
rays_o = np.broadcast_to(c2w[:3,-1], rays_d.shape)
return np.stack([rays_o, rays_d], 0)
#########
def render_rays_native_hier(model, rays, corners, near, far, N_samples, N_samples_2, clip, device): #, rand=False):
rays_o, rays_d = rays
c0, c1 = corners
th = .5
# Compute 3D query points
z_vals = torch.linspace(near, far, N_samples)
pts = rays_o[...,None,:] + rays_d[...,None,:] * z_vals[...,:,None]
pts = 0.5 * (pts + 1)
h, w, d = pts.shape[:-1]
# Run network
model_output = batched_apply(model, pts.view(-1, 3), batch_size=50000, device=device)
alpha = torch.sigmoid(model_output).view(h, w, d)
if clip:
mask = torch.logical_or(torch.any(pts < c0, -1), torch.any(pts > c1, -1)).to(device)
alpha = torch.where(mask, torch.zeros_like(alpha).to(device), alpha)
alpha = torch.where(alpha > th, torch.ones_like(alpha).to(device), torch.zeros_like(alpha).to(device))
trans = 1.-alpha + 1e-10
trans = torch.cat([torch.ones_like(trans[...,:1]).to(trans.device), trans[...,:-1]], -1)
weights = alpha * torch.cumprod(trans, -1)
depth_map = torch.sum(weights * z_vals.to(device), -1)
acc_map = torch.sum(weights, -1)
# Second pass to refine isosurface
z_vals = torch.linspace(-1., 1., N_samples_2) * .01 + depth_map[...,None].cpu()
pts = rays_o[...,None,:] + rays_d[...,None,:] * z_vals[...,:,None]
pts = 0.5 * (pts + 1)
# Run network
model_output = batched_apply(model, pts.view(-1, 3), batch_size=50000, device=device)
alpha = torch.sigmoid(model_output).view(h, w, d)
if clip:
mask = torch.logical_or(torch.any(pts < c0, -1), torch.any(pts > c1, -1)).to(device)
alpha = torch.where(mask, torch.zeros_like(alpha).to(device), alpha)
alpha = torch.where(alpha > th, torch.ones_like(alpha).to(device), torch.zeros_like(alpha).to(device))
trans = 1.-alpha + 1e-10
trans = torch.cat([torch.ones_like(trans[...,:1]).to(trans.device), trans[...,:-1]], -1)
weights = alpha * torch.cumprod(trans, -1)
depth_map = torch.sum(weights * z_vals.to(device), -1)
acc_map = torch.sum(weights, -1)
return depth_map, acc_map
def make_normals(rays, depth_map):
rays_o, rays_d = rays
pts = rays_o + rays_d * depth_map[...,None]
dx = pts - torch.roll(pts, -1, dims=0)
dy = pts - torch.roll(pts, -1, dims=1)
normal_map = torch.cross(dx, dy)
normal_map = normal_map / torch.clamp_min(torch.norm(normal_map, dim=-1, keepdim=True), 1e-5)
return normal_map
def render_mesh_normals(mesh, rays):
origins, dirs = rays.reshape([2,-1,3])
origins = origins * .5 + .5
dirs = dirs * .5
z = mesh.ray.intersects_first(origins, dirs)
pic = np.zeros([origins.shape[0],3])
pic[z!=-1] = mesh.face_normals[z[z!=-1]]
pic = np.reshape(pic, rays.shape[1:])
return pic
def uniform_bary(u):
su0 = np.sqrt(u[..., 0])
b0 = 1. - su0
b1 = u[..., 1] * su0
return np.stack([b0, b1, 1. - b0 - b1], -1)
def get_normal_batch(mesh, bsize):
batch_face_inds = np.array(np.random.randint(0, mesh.faces.shape[0], [bsize]))
batch_barys = np.array(uniform_bary(np.random.uniform(size=[bsize, 2])))
batch_faces = mesh.faces[batch_face_inds]
batch_normals = mesh.face_normals[batch_face_inds]
batch_pts = np.sum(mesh.vertices[batch_faces] * batch_barys[...,None], 1)
return batch_pts, batch_normals
gt_fn = lambda queries, mesh : mesh.ray.contains_points(queries.reshape([-1,3])).reshape(queries.shape[:-1])
R = 2.
c2w = pose_spherical(90. + 10 + 45, -30., R)
N_samples = 64
N_samples_2 = 64
H = 256
W = H
focal = H * .9
rays = get_rays(H, W, focal, c2w[:3,:4])
render_args_lr = [get_rays(H, W, focal, c2w[:3,:4]), None, R-1, R+1, N_samples, N_samples_2, True]
N_samples = 256
N_samples_2 = 256
H = 512
W = H
focal = H * .9
rays = get_rays(H, W, focal, c2w[:3,:4])
render_args_hr = [get_rays(H, W, focal, c2w[:3,:4]), None, R-1, R+1, N_samples, N_samples_2, True]
def train(args, mesh_obj, model, opt=None, iters=10000, device='cuda', liveplot=False, run_label=None):
"""Standard training/evaluation epoch over the dataset"""
criterion = lambda x, z: torch.mean(torch.relu(x) - x * z + torch.log(1 + torch.exp(-torch.abs(x))))
data_iter = tqdm(range(1, iters + 1))
if run_label is not None:
data_iter.set_description(run_label)
mesh, corners = mesh_obj
c0, c1 = [torch.tensor(t, dtype=torch.float32) for t in corners]
render_args_hr[0] = [torch.tensor(t, dtype=torch.float32) for t in render_args_hr[0]]
render_args_lr[0] = [torch.tensor(t, dtype=torch.float32) for t in render_args_lr[0]]
render_args_hr[1] = [c0, c1]
render_args_lr[1] = [c0, c1]
c1, c0 = c1.to(device), c0.to(device)
step_list = []
loss_list = []
step_time_list = []
test_psnr_list = []
model = model.to(device)
# Main training Loop
postfix = {'loss': np.inf, 'psnr': 0., 'forward_steps': 0}
for i in data_iter:
start_time = time.time()
inputs = torch.rand(args.train_batch_size, 3).to(device) * (c1 - c0) + c0
z_init = torch.zeros(1, model.interm_channels).to(device)
target = torch.tensor(gt_fn(inputs.cpu().numpy(), mesh), dtype=torch.bool).to(device)
model_outputs = model(inputs, z_init, skip_solver=False, verbose=False, include_grad=False)
pred = torch.sigmoid(model_outputs['output'].squeeze())
loss = criterion(model_outputs['output'].squeeze(), target.float())
loss_list.append(loss.item())
if opt:
opt.zero_grad()
loss.backward()
opt.step()
postfix['forward_steps'] = model_outputs['forward_steps']
postfix['loss'] = loss.item()
step_time = time.time() - start_time
step_time_list.append(step_time)
do_log = i % args.log_freq == 0
do_vis = i % args.vis_freq == 0
if i % args.save_freq == 0:
torch.save(model.state_dict(), f'{args.save_dir:s}/model_step{i:d}.pth')
if do_log:
summary_dict = {
'train_loss': loss_list,
'step_time': step_time_list
}
with open('{:s}/summary.pkl'.format(args.log_dir), 'wb') as summary_f:
pickle.dump(summary_dict, summary_f)
if do_vis:
with torch.no_grad():
depth_map, acc_map = render_rays_native_hier(model, *render_args_lr, device=device)
normal_map = make_normals(render_args_lr[0], depth_map.cpu()) * 0.5 + 0.5
fig, axes = plt.subplots(1, 3)
fig.set_size_inches(6 * 3, 6)
for ax in axes:
ax.clear()
ax.set_axis_off()
axes[0].imshow(depth_map.squeeze().cpu().numpy())
axes[1].imshow(acc_map.squeeze().cpu().numpy())
axes[2].imshow(normal_map.squeeze().cpu().numpy())
fig.set_tight_layout(True)
fig.savefig(args.vis_dir + '/vis_step{:d}.png'.format(i))
data_iter.set_postfix(postfix)
with torch.no_grad():
depth_map, acc_map = render_rays_native_hier(model, *render_args_hr, device=device)
normal_map = make_normals(render_args_hr[0], depth_map.cpu()) * 0.5 + 0.5
skimage.io.imsave(args.vis_dir + '/final_rendered.png', normal_map.squeeze().cpu().numpy())
return {
'loss': loss_list,
'step_time': step_time_list,
'step': step_list,
'test_psnr': test_psnr_list,
}
def eval(args, model):
assert args.restore_path is not None, 'Restore path cannot be empty'
state_dict = torch.load(args.restore_path)
model.load_state_dict(state_dict)
model.to(args.device)
mesh_obj = load_mesh(args.dataset)
all_tests = load_test_pts(args.dataset, mesh_obj, regen=False, verbose=True)
log_file = os.path.join(args.log_dir, "test_output.txt")
with open(log_file, 'w') as f:
for test_name in all_tests:
test_pts, test_labels = [torch.tensor(arr, dtype=torch.float32) for arr in all_tests[test_name]]
test_ds = torch.utils.data.TensorDataset(test_pts, test_labels)
test_loader = torch.utils.data.DataLoader(test_ds, shuffle=False, batch_size=10000, drop_last=False, pin_memory=True)
cm = ConfusionMatrix()
for (pts, labels) in iter(test_loader):
pts, labels = pts.to(args.device), labels.to(args.device)
model_outputs = model(pts, skip_solver=False, verbose=False, include_grad=False)
pred = model_outputs['output'].squeeze() > 0
# print(pred, labels)
cm.update(pred.detach().cpu().numpy(), labels.cpu().numpy())
f.write(f"Test: {test_name}\n")
f.write(f"\tAccuracy {cm.get_acc():.5f}\n")
f.write(f"\tPrecision {cm.get_precision():.5f}\n")
f.write(f"\tRecall {cm.get_recall():.5f}\n")
f.write(f"\tIoU {cm.get_iou():.5f}\n")
f.write("\n")
mesh, corners = mesh_obj
c0, c1 = [torch.tensor(t, dtype=torch.float32) for t in corners]
render_args_hr[0] = [torch.tensor(t, dtype=torch.float32) for t in render_args_hr[0]]
render_args_lr[0] = [torch.tensor(t, dtype=torch.float32) for t in render_args_lr[0]]
render_args_hr[1] = [c0, c1]
render_args_lr[1] = [c0, c1]
with torch.no_grad():
depth_map, acc_map = render_rays_native_hier(model, *render_args_hr, device=args.device)
normal_map = make_normals(render_args_hr[0], depth_map.cpu()) * 0.5 + 0.5
skimage.io.imsave(args.vis_dir + '/final_rendered.png', normal_map.squeeze().cpu().numpy())
def main(args):
model_args = construct_model_args(
model_type=args.model_type,
n_layers=args.n_layers,
in_channels=3,
interm_channels=args.interm_channels,
out_channels=1,
input_scale=args.input_scale,
use_implicit=args.use_implicit,
filter_type=args.filter_type,
filter_options={'alpha': args.gabor_alpha},
norm_type=args.norm_type,
forward_solver=args.forward_solver,
backward_solver=args.backward_solver,
tol=args.test_tol if args.eval else args.train_tol
)
model = get_model(model_args)
print(model)
if args.restore_path is not None:
model.load_state_dict(torch.load(args.restore_path))
opt = torch.optim.Adam(model.parameters(), lr=args.lr)
if not args.eval:
mesh_obj = load_mesh(args.dataset)
train(
args,
mesh_obj,
model,
opt,
iters=args.max_train_iters,
device=args.device
)
else:
eval(
args,
model
)
if __name__ == '__main__':
parser = configargparse.ArgumentParser(config_file_parser_class=configargparse.YAMLConfigFileParser)
parser.add_argument('--experiment_id', default='vanilla', type=str)
parser.add_argument('-c', '--config_file', default=None, is_config_file=True)
parser.add_argument('--dataset', default='dragon', choices=['dragon', 'bunny', 'buddha', 'armadillo', 'lucy'])
parser.add_argument('--device', default='cuda')
parser.add_argument('--eval', default=False, action='store_true')
parser.add_argument('--restore_path', default=None, type=str)
parser.add_argument('--vis_freq', default=2000, type=int)
parser.add_argument('--log_freq', default=2000, type=int)
parser.add_argument('--save_freq', default=2000, type=int)
parser.add_argument('--max_train_iters', default=10000, type=int)
parser.add_argument('--model_type', default='implicit', choices=['implicit', 'siren', 'ffn'])
parser.add_argument('--n_layers', default=1, type=int)
parser.add_argument('--interm_channels', default=256, type=int)
parser.add_argument('--use_implicit', default=False, action='store_true')
parser.add_argument('--input_scale', default=256., type=float)
parser.add_argument('--filter_type', default='fourier', choices=['fourier', 'gabor', 'siren_like'])
parser.add_argument('--norm_type', default='none', choices=['none', 'spectral_norm', 'weight_norm'])
parser.add_argument('--gabor_alpha', default=3., type=float)
parser.add_argument('--forward_solver', default='forward_iter', choices=['forward_iter', 'broyden'])
parser.add_argument('--backward_solver', default='forward_iter', choices=['onestep', 'forward_iter', 'broyden'])
parser.add_argument('--train_tol', default=1e-3, type=float)
parser.add_argument('--test_tol', default=1e-4, type=float)
parser.add_argument('--train_batch_size', default=50000, type=int)
parser.add_argument('--test_batch_size', default=50000, type=int)
parser.add_argument('--lr', default=5e-4, type=float)
args = parser.parse_args()
args.log_dir = f'logs/3d_occupancy/{args.experiment_id}{args.dataset}'
args.vis_dir = f'{args.log_dir}/visualizations'
args.save_dir = f'{args.log_dir}/saved_models'
[os.makedirs(path, exist_ok=True) for path in (args.log_dir, args.vis_dir, args.save_dir)]
main(args) |
15,366 | 34c888ff27db5bd95f7f41bf72a17e201aa0c729 | # vim: set fileencoding=utf-8 :
"""Test L{gbp.command_wrappers.Command}'s tarball unpack"""
import unittest
from gbp.command_wrappers import Command, CommandExecFailed
from . testutils import GbpLogTester, patch_popen
class TestCommandWrapperFailures(unittest.TestCase, GbpLogTester):
def setUp(self):
self.false = Command('/does/not/matter')
self.log_tester = GbpLogTester()
self.log_tester._capture_log(True)
def tearDown(self):
self.log_tester._capture_log(False)
@patch_popen(stdout=b'', stderr=b'', returncode=1)
def test_log_default_error_msg(self, create_mock):
with self.assertRaises(CommandExecFailed):
self.false.__call__()
self.log_tester._check_log(0, "gbp:error: '/does/not/matter' failed: it exited with 1")
self.assertEqual(self.false.retcode, 1)
self.assertEqual(self.false.stderr, '')
self.assertEqual(self.false.stdout, '')
@patch_popen(stdout=b'', stderr=b'we have a problem', returncode=1)
def test_log_use_stderr_for_err_message(self, create_mock):
self.false.capture_stderr = True
self.false.run_error = "Erpel {stderr}"
with self.assertRaises(CommandExecFailed):
self.false.__call__()
self.log_tester._check_log(0, "gbp:error: Erpel we have a problem")
self.assertEqual(self.false.retcode, 1)
self.assertEqual(self.false.stderr, 'we have a problem')
self.assertEqual(self.false.stdout, '')
@patch_popen(stdout=b'we have a problem', stderr=b'', returncode=1)
def test_log_use_stdout_for_err_message(self, create_mock):
self.false.capture_stdout = True
self.false.run_error = "Erpel {stdout}"
with self.assertRaises(CommandExecFailed):
self.false.__call__()
self.log_tester._check_log(0, "gbp:error: Erpel we have a problem")
self.assertEqual(self.false.retcode, 1)
self.assertEqual(self.false.stderr, '')
self.assertEqual(self.false.stdout, 'we have a problem')
def test_log_use_err_or_reason_for_error_messge_reason(self):
self.false.run_error = "AFAIK {stderr_or_reason}"
with self.assertRaises(CommandExecFailed):
self.false.__call__()
self.log_tester._check_log(0, "gbp:error: AFAIK execution failed: .Errno 2. No such file or directory")
self.assertEqual(self.false.retcode, 1)
@patch_popen(stderr=b'we have a problem', returncode=1)
def test_log_use_err_or_reason_for_error_messge_error(self, create_mock):
self.false.run_error = "Erpel {stderr_or_reason}"
with self.assertRaises(CommandExecFailed):
self.false.__call__()
self.log_tester._check_log(0, "gbp:error: Erpel we have a problem")
self.assertEqual(self.false.retcode, 1)
@patch_popen(returncode=0)
def test_no_log_on_success(self, create_mock):
self.false.__call__()
self.log_tester._check_log_empty()
self.assertEqual(self.false.retcode, 0)
|
15,367 | 969b67f628708f946bfc55855c57e39099204024 | import numpy as np
import cv2
import random
from CNNUtil import paths
LENGTH = 180
def findRegion(img):
gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
rct, thr = cv2.threshold(gray, 1, 255, cv2.THRESH_BINARY)
contours, hierachy = cv2.findContours(thr, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
x, y, w, h = cv2.boundingRect(contours[0])
len = w if w > h else h
dst = img[y : y+len, x: x+len]
return dst
def img_padding_2(img, LENGTH):
blank_image = np.zeros((LENGTH, LENGTH, 3), np.uint8)
(w, h)=(img.shape[0], img.shape[1])
len = w if w > h else h
if len>LENGTH:
big_img = np.zeros((len, len, 3), np.uint8)
big_img[0: w, 0: h] = img
dst = cv2.resize(big_img, (LENGTH, LENGTH))
blank_image = dst
else:
blank_image[0: w, 0: h] = img
return blank_image
# def img_padding(img):
# # 이미지의 x, y가 300이 넘을 경우 작게해주기
# blank_image = np.zeros((WIDTH, WIDTH, 3), np.uint8)
# percent = 1
# if(img.shape[1] >WIDTH):
# if (img.shape[1] > img.shape[0]): # 이미지의 가로가 세보다 크면 가로를 300으로 맞추고 세로를 비율에 맞춰서
# percent = WIDTH / img.shape[1]
# else:
# percent = WIDTH / img.shape[0]
# if (img.shape[0] > WIDTH):
# if (img.shape[1] > img.shape[0]): # 이미지의 가로가 세보다 크면 가로를 300으로 맞추고 세로를 비율에 맞춰서
# percent = WIDTH / img.shape[1]
# else:
# percent = WIDTH / img.shape[0]
#
# img = cv2.resize(img, dsize=(0, 0), fx=percent, fy=percent, interpolation=cv2.INTER_LINEAR)
#
# blank_image[ 0: img.shape[0],0: img.shape[1] ] = img
#
# return blank_image
# data_dir = 'D:\Data\iris_pattern\Original\defect'
data_dir = 'D:/Data/iris_pattern/Binary/defect_binary/train/defect'
# data_dir = 'D:/Data/iris_pattern/test_image/11'
imagePaths = sorted(list(paths.list_images(data_dir)))
for imagePath in imagePaths:
image = cv2.imread(imagePath)
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image = findRegion(image)
image = img_padding_2(image, 180)
# image = findRegion(image)
# image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
# image = cv2.resize(image, (HEIGHT, WIDTH))
# cv2.namedWindow("img_re", cv2.WINDOW_NORMAL)
# cv2.resizeWindow("img_re", 400, 400)
cv2.imshow("img_re", image)
# cv2.waitKey(0) |
15,368 | aebd28a1282286826338a7d77d5652366462ebbf | /home/durgesh/miniconda3/lib/python3.6/posixpath.py |
15,369 | cb80c72a8feb3f4277438c2738733dbcd7869966 | import os
import re
i = 0
start = 'bbc'
names = os.listdir(start)
for name in names:
path_to_file = os.path.join(start, name)
if os.path.isdir(path_to_file):
match = re.search('[a-z]+', name)
match2 = re.search('[а-я]+', name)
if match and match2:
print(name)
i += 1
print(i)
|
15,370 | cb58cc6b5a05a4807eec1e7c6410065a58bd8c7c | import pickle
from collections import defaultdict
from collections import Counter
from heapq import nlargest
def label_ne(ne,post_bi_un,pre_bi_un,post_si_un,pre_si_un, main_cat, decision_list):
# check which main cat it belongs to
candidate_tags = []
if main_cat == "LOC":
# keys are every subcat and their values
for key, value in decision_list.items():
if main_cat == key[:3]:
# bi, si or unique(shown as subcat name)
bi_or_si = key.split("_")[-2]
# the specific name of the feature without category mentioned
feat = "_".join(key.split("_")[2:])
# the subcategory the feature is in
subcat = key.split("_")[1]
# working with list of tuples ((bigram), weight)
if bi_or_si == "bi":
for item in value:
bigram = item[0]
weight = item[1]
# see if there is a match in bigram features
if feat == "post_bi_un":
# see if the feature given matches a feature in the dictionary
if post_bi_un == bigram:
candidate_tags.append(tuple((subcat, weight)))
elif feat == "pre_bi_un":
if pre_bi_un == bigram:
candidate_tags.append(tuple((subcat, weight)))
# working with list of tuples (string, weight)
else:
for item in value:
string = item[0]
weight = item[1]
# see if there is a match in unigram of ne features
#print(key, string, weight)
if feat == "post_si_un":
if post_si_un == string:
candidate_tags.append(tuple((subcat, weight)))
elif feat == "pre_si_un":
if pre_si_un == string:
candidate_tags.append(tuple((subcat, weight)))
elif feat == "unique":
if ne == string:
candidate_tags.append(tuple((subcat, weight)))
w = 0
if len(candidate_tags) > 0:
for tag, weighted in candidate_tags:
if weighted >= w:
label = tag
w = weighted
else:
label = "none_found"
# add feature to certain subcategory
return label
if main_cat == "ORG":
# keys are every subcat and their values
for key, value in decision_list.items():
if main_cat == key[:3]:
# bi, si or unique(shown as subcat name)
bi_or_si = key.split("_")[-2]
# the specific name of the feature without category mentioned
feat = "_".join(key.split("_")[2:])
# the subcategory the feature is in
subcat = key.split("_")[1]
# working with list of tuples ((bigram), weight)
if bi_or_si == "bi":
for item in value:
bigram = item[0]
weight = item[1]
# see if there is a match in bigram features
if feat == "post_bi_un":
# see if the feature given matches a feature in the dictionary
if post_bi_un == bigram:
candidate_tags.append(tuple((subcat, weight)))
elif feat == "pre_bi_un":
if pre_bi_un == bigram:
candidate_tags.append(tuple((subcat, weight)))
# working with list of tuples (string, weight)
else:
for item in value:
string = item[0]
weight = item[1]
# see if there is a match in unigram of ne features
#print(key, string, weight)
if feat == "post_si_un":
if post_si_un == string:
candidate_tags.append(tuple((subcat, weight)))
elif feat == "pre_si_un":
if pre_si_un == string:
candidate_tags.append(tuple((subcat, weight)))
elif feat == "unique":
if ne == string:
candidate_tags.append(tuple((subcat, weight)))
w = 0
if len(candidate_tags) > 0:
for tag, weighted in candidate_tags:
if weighted >= w:
label = tag
w = weighted
else:
label = "none_found"
return label
def load_final_DL():
pickle_in = open("../data/baseline2_DL.pkl","rb")
final_DL = pickle.load(pickle_in)
return final_DL
def precision(match, no_match):
if no_match == 0:
return 1
precision = (match/(match+no_match))
return round(precision, 2)
def recall(match, present):
if present == 0:
return 1
recall = (match/present)
return round(recall, 2)
def f1_measure(precision, recall):
if precision == 0 and recall == 0:
return 0
numerator = (2* precision * recall)
denominator = precision + recall
f = numerator/denominator
return round(f, 2)
def main():
final_DL = load_final_DL()
lines = []
infile = open("../data/SoNaR1_test.txt", "r",encoding="utf8")
next(infile)
for line in infile:
line = line.rstrip().split("\t")
if line[0] != "DOCUMENT":
lines.append(line)
no_match_org = 0
no_match_loc = 0
match_org = 0
match_loc = 0
punt_match = 0
lijn_match = 0
bc_match = 0
water_match = 0
none_match = 0
regio_match = 0
fictief_match = 0
land_match = 0
cont_match = 0
heelal_match = 0
punt_no_match = 0
lijn_no_match = 0
bc_no_match = 0
water_no_match = 0
none_no_match = 0
regio_no_match = 0
fictief_no_match = 0
land_no_match = 0
cont_no_match = 0
heelal_no_match = 0
punt_present = 0
lijn_present = 0
bc_present = 0
water_present = 0
none_present = 0
regio_present = 0
fictief_present = 0
land_present = 0
cont_present = 0
heelal_present = 0
loc_present = 0
org_misc_match = 0
com_match = 0
gov_match = 0
org_none_match = 0
org_misc_no_match = 0
com_no_match = 0
gov_no_match = 0
org_none_no_match = 0
none_found = 0
com_present = 0
org_misc_present = 0
gov_present = 0
org_none_present = 0
org_present = 0
for i in range((len(lines)-4)):
line_one = lines[i]
line_two = lines[(i+1)]
line_three = lines[(i+2)]
line_four = lines[(i+3)]
line_five = lines[(i+4)]
# step2: when NE found try to label it using decision list
# when able to label ne save feature information in temporary decision list
if i == 0:
# do for the two three lines in the file, then switch to the 3rdline for information
if len(line_one) == 5 and line_one[0] != "SENTENCE":
main_cat = line_one[3]
if line_two[0] == 'SENTENCE':
line_two = ["XXXXXXXX","XXXXXXXX","XXXXXXXX"]
line_three = ["XXXXXXXX","XXXXXXXX","XXXXXXXX"]
if line_three[0] == 'SENTENCE':
line_three = ["XXXXXXXX","XXXXXXXX","XXXXXXXX"]
ne = line_one[2]
post_bi_un = tuple((line_two[2], line_three[2]))
pre_bi_un = tuple(("XXXXXXXX", "XXXXXXXX"))
post_si_un = line_two[2]
pre_si_un = "XXXXXXXX"
# label the NE
label = label_ne(ne,post_bi_un,pre_bi_un,post_si_un,pre_si_un, main_cat, final_DL)
print(label)
#print(line_one, label)
if main_cat == "LOC":
label = "LAND"
subcat = line_one[-1].upper()
if subcat == label:
if label == "PUNT":
punt_match += 1
elif label == "LIJN":
lijn_match += 1
elif label == "BC":
bc_match += 1
elif label == "WATER":
water_match += 1
elif label == "REGIO":
regio_match += 1
elif label == "FICTIEF":
fictief_match += 1
elif label == 'LAND':
land_match += 1
elif label == 'CONT':
cont_match += 1
elif label == 'HEELAL':
heelal_match += 1
else:
print(label)
match_loc += 1
else:
if label == "PUNT":
punt_no_match += 1
no_match_loc += 1
elif label == "LIJN":
lijn_no_match += 1
no_match_loc += 1
elif label == "BC":
bc_no_match += 1
no_match_loc += 1
elif label == "WATER":
water_no_match += 1
no_match_loc += 1
elif label == "REGIO":
regio_no_match += 1
no_match_loc += 1
elif label == "FICTIEF":
ficitef_no_match += 1
no_match_loc += 1
elif label == 'LAND':
land_no_match += 1
no_match_loc += 1
elif label == 'CONT':
cont_no_match += 1
no_match_loc += 1
elif label == 'HEELAL':
heelal_no_match += 1
no_match_loc += 1
if subcat == "PUNT":
punt_present += 1
elif subcat == "LIJN":
lijn_present += 1
elif subcat == "BC":
bc_present += 1
elif subcat == "WATER":
water_present += 1
elif subcat == "REGIO":
regio_present += 1
elif subcat == "FICTIEF":
fictief_present += 1
elif subcat == 'LAND':
land_present += 1
elif subcat == 'CONT':
cont_present += 1
elif subcat == 'HEELAL':
heelal_present += 1
loc_present += 1
if main_cat == "ORG":
label = "MISC"
subcat = line_one[-1].upper()
if subcat == label:
if label == "MISC":
org_misc_match += 1
elif label == "COM":
com_match += 1
elif label == "GOV":
gov_match += 1
match_org += 1
else:
if subcat == "MISC":
org_misc_no_match += 1
elif subcat == "COM":
com_no_match += 1
elif subcat == "GOV":
gov_no_match += 1
no_match_org += 1
if subcat == "MISC":
org_misc_present += 1
elif subcat == "COM":
com_present += 1
elif subcat == "GOV":
gov_present += 1
org_present += 1
elif i == 1:
if len(line_two) == 5 and line_two[0] != "SENTENCE":
main_cat = line_two[3]
if line_three[0] == "SENTENCE":
line_three = ["XXXXXXXX","XXXXXXXX","XXXXXXXX"]
line_four = ["XXXXXXXX","XXXXXXXX","XXXXXXXX"]
if line_four[0] == "SENTENCE":
line_four = ["XXXXXXXX","XXXXXXXX","XXXXXXXX"]
ne = line_two[2]
post_bi_un = tuple((line_three[2], line_four[2]))
pre_bi_un = tuple(("XXXXXXXX", line_one[2]))
post_si_un = line_four[2]
pre_si_un = line_one[2]
# label the NE
label = label_ne(ne,post_bi_un,pre_bi_un,post_si_un,pre_si_un, main_cat, final_DL)
#print(line_two, label)
if main_cat == "LOC":
label = "LAND"
subcat = line_two[-1].upper()
if subcat == label:
if label == "PUNT":
punt_match += 1
elif label == "LIJN":
lijn_match += 1
elif label == "BC":
bc_match += 1
elif label == "WATER":
water_match += 1
elif label == "REGIO":
regio_match += 1
elif label == "FICTIEF":
fictief_match += 1
elif label == 'LAND':
land_match += 1
elif label == 'CONT':
cont_match += 1
elif label == 'HEELAL':
heelal_match += 1
else:
print(label)
match_loc += 1
else:
if label == "PUNT":
punt_no_match += 1
elif label == "LIJN":
lijn_no_match += 1
elif label == "BC":
bc_no_match += 1
elif label == "WATER":
water_no_match += 1
elif label == "REGIO":
regio_no_match += 1
elif label == "FICTIEF":
fictief_no_match += 1
elif label == 'LAND':
land_no_match += 1
elif label == 'CONT':
cont_no_match += 1
elif label == 'HEELAL':
heelal_no_match += 1
no_match_loc += 1
if subcat == "PUNT":
punt_present += 1
elif subcat == "LIJN":
lijn_present += 1
elif subcat == "BC":
bc_present += 1
elif subcat == "WATER":
water_present += 1
elif subcat == "REGIO":
regio_present += 1
elif subcat == "FICTIEF":
fictief_present += 1
elif subcat == 'LAND':
land_present += 1
elif subcat == 'CONT':
cont_present += 1
elif subcat == 'HEELAL':
heelal_present += 1
loc_present += 1
if main_cat == "ORG":
label = "MISC"
subcat = line_two[-1].upper()
if subcat == label:
if label == "MISC":
org_misc_match += 1
elif label == "COM":
com_match += 1
elif label == "GOV":
gov_match += 1
match_org += 1
else:
if subcat == "MISC":
org_misc_no_match += 1
elif subcat == "COM":
com_no_match += 1
elif subcat == "GOV":
gov_no_match += 1
no_match_org += 1
if subcat == "MISC":
org_misc_present += 1
elif subcat == "COM":
com_present += 1
elif subcat == "GOV":
gov_present += 1
org_present += 1
# for the rest of the document
else:
if len(line_three) == 5 and line_three[0] != "SENTENCE":
main_cat = line_three[3]
if line_one[0] == "SENTENCE":
line_one = ["XXXXXXXX","XXXXXXXX","XXXXXXXX"]
if line_two[0] == "SENTENCE":
line_one = ["XXXXXXXX","XXXXXXXX","XXXXXXXX"]
line_two = ["XXXXXXXX","XXXXXXXX","XXXXXXXX"]
if line_four[0] == "SENTENCE":
line_four = ["XXXXXXXX","XXXXXXXX","XXXXXXXX"]
line_five = ["XXXXXXXX","XXXXXXXX","XXXXXXXX"]
if line_five[0] == "SENTENCE":
line_five = ["XXXXXXXX","XXXXXXXX","XXXXXXXX"]
ne = line_three[2]
post_bi_un = tuple((line_four[2], line_five[2]))
pre_bi_un = tuple((line_one[2], line_two[2]))
post_si_un = line_four[2]
pre_si_un = line_two[2]
# label the NE
string_list = []
for item in ["punt","lijn","bc","water","none","regio","fictief","land","cont","heelal","ORG_none","ORG_misc","gov","com"]:
string_list.append(item.upper())
label = label_ne(ne,post_bi_un,pre_bi_un,post_si_un,pre_si_un, main_cat, final_DL)
if label == "none_found":
none_found += 1
if main_cat == "LOC":
label = "LAND"
subcat = line_three[-1].upper()
if subcat == label:
if label == "PUNT":
punt_match += 1
elif label == "LIJN":
lijn_match += 1
elif label == "BC":
bc_match += 1
elif label == "WATER":
water_match += 1
elif label == "REGIO":
regio_match += 1
elif label == "FICTIEF":
fictief_match += 1
elif label == 'LAND':
land_match += 1
elif label == 'CONT':
cont_match += 1
elif label == 'HEELAL':
heelal_match += 1
else:
print(label)
match_loc += 1
else:
if label == "PUNT":
punt_no_match += 1
no_match_loc += 1
elif label == "LIJN":
lijn_no_match += 1
no_match_loc += 1
elif label == "BC":
bc_no_match += 1
no_match_loc += 1
elif label == "WATER":
water_no_match += 1
no_match_loc += 1
elif label == "REGIO":
regio_no_match += 1
no_match_loc += 1
elif label == "FICTIEF":
fictief_no_match += 1
no_match_loc += 1
elif label == 'LAND':
land_no_match += 1
no_match_loc += 1
elif label == 'CONT':
cont_no_match += 1
no_match_loc += 1
elif label == 'HEELAL':
heelal_no_match += 1
no_match_loc += 1
if subcat == "PUNT":
punt_present += 1
elif subcat == "LIJN":
lijn_present += 1
elif subcat == "BC":
bc_present += 1
elif subcat == "WATER":
water_present += 1
elif subcat == "REGIO":
regio_present += 1
elif subcat == "FICTIEF":
fictief_present += 1
elif subcat == 'LAND':
land_present += 1
elif subcat == 'CONT':
cont_present += 1
elif subcat == 'HEELAL':
heelal_present += 1
loc_present += 1
if main_cat == "ORG":
label = "MISC"
subcat = line_three[-1].upper()
if subcat == label:
if label == "MISC":
org_misc_match += 1
elif label == "COM":
com_match += 1
elif label == "GOV":
gov_match += 1
match_org += 1
else:
if label == "MISC":
org_misc_no_match += 1
no_match_org += 1
elif label == "COM":
com_no_match += 1
no_match_org += 1
elif label == "GOV":
gov_no_match += 1
no_match_org += 1
if subcat == "MISC":
org_misc_present += 1
elif subcat == "COM":
com_present += 1
elif subcat == "GOV":
gov_present += 1
org_present += 1
mode = "n =20 k = 12"
print("mode = {} \n".format(mode))
print("LOC \n")
print("LOC matches = {}, no_matches = {} ,present = {}, precision = {}, recall = {}, f1_score = {} \n".format(match_loc, no_match_loc,loc_present, precision(match_loc, no_match_loc), recall(match_loc,loc_present),f1_measure(precision(match_loc, no_match_loc), recall(match_loc,loc_present))))
print("PUNT matches = {}, no_matches = {},present = {}, precision = {}, recall = {}, f1_score = {} \n".format(punt_match, punt_no_match,punt_present, precision(punt_match, punt_no_match), recall(punt_match,punt_present),f1_measure(precision(punt_match, punt_no_match), recall(punt_match,punt_present))))
print("LIJN matches = {}, no_matches = {},present = {}, precision = {}, recall = {}, f1_score = {} \n".format(lijn_match, lijn_no_match,lijn_present, precision(lijn_match, lijn_no_match), recall(lijn_match,lijn_present),f1_measure(precision(lijn_match, lijn_no_match), recall(lijn_match,lijn_present))))
print("BC matches = {}, no_matches = {},present = {}, precision = {}, recall = {}, f1_score = {} \n".format(bc_match, bc_no_match,bc_present, precision(bc_match, bc_no_match), recall(bc_match,bc_present),f1_measure(precision(bc_match, bc_no_match), recall(bc_match,bc_present))))
print("WATER matches = {}, no_matches = {},present = {}, precision = {}, recall = {}, f1_score = {} \n".format(water_match, water_no_match,water_present, precision(water_match, water_no_match), recall(water_match,water_present),f1_measure(precision(water_match, water_no_match), recall(water_match,water_present))))
'''
print("NONE matches = {}, no_matches = {},present = {}, precision = {}, recall = {} \n".format(none_match, none_no_match,none_present, precision(none_match, none_no_match), recall(none_match,none_present)))'''
print("REGIO matches = {}, no_matches = {},present = {}, precision = {}, recall = {}, f1_score = {} \n".format(regio_match, regio_no_match,regio_present, precision(regio_match, regio_no_match), recall(regio_match,regio_present),f1_measure(precision(regio_match, regio_no_match), recall(regio_match,regio_present))))
print("FICTIEF matches = {}, no_matches = {},present = {}, precision = {}, recall = {}, f1_score = {} \n".format(fictief_match, fictief_no_match,fictief_present, precision(fictief_match, fictief_no_match), recall(fictief_match,fictief_present),f1_measure(precision(fictief_match, fictief_no_match), recall(fictief_match,fictief_present))))
print("LAND matches = {}, no_matches = {},present = {}, precision = {}, recall = {}, f1_score = {} \n".format(land_match, land_no_match,land_present, precision(land_match, land_no_match), recall(land_match,land_present),f1_measure(precision(land_match, land_no_match), recall(land_match,land_present))))
print("CONT matches = {}, no_matches = {},present = {}, precision = {}, recall = {}, f1_score = {} \n".format(cont_match, cont_no_match,cont_present, precision(cont_match, cont_no_match), recall(cont_match,cont_present),f1_measure(precision(cont_match, cont_no_match), recall(cont_match,cont_present))))
print("HEELAL matches = {}, no_matches = {},present = {}, precision = {}, recall = {}, f1_score = {} \n".format(heelal_match, heelal_no_match, heelal_present, precision(heelal_match, heelal_no_match), recall(heelal_match, heelal_present),f1_measure(precision(heelal_match, heelal_no_match), recall(heelal_match, heelal_present))))
print("\n ORG \n")
print("ORG matches = {}, no_matches = {},present = {}, precision = {}, recall = {}, f1_score = {} \n".format(match_org, no_match_org,org_present, precision(match_org, no_match_org), recall(match_org,org_present),f1_measure(precision(match_org, no_match_org), recall(match_org,org_present))))
print("MISC matches = {}, no_matches = {},present = {}, precision = {}, recall = {}, f1_score = {} \n".format(org_misc_match, org_misc_no_match,org_misc_present, precision(org_misc_match, org_misc_no_match), recall(org_misc_match,org_misc_present),f1_measure(precision(org_misc_match, org_misc_no_match), recall(org_misc_match,org_misc_present))))
print("COM matches = {}, no_matches = {},present = {}, precision = {}, recall = {}, f1_score = {} \n".format(com_match, com_no_match,com_present, precision(com_match, com_no_match), recall(com_match,com_present),f1_measure(precision(com_match, com_no_match), recall(com_match,com_present))))
print("GOV matches = {}, no_matches = {},present = {}, precision = {}, recall = {}, f1_score = {} \n".format(gov_match, gov_no_match,gov_present, precision(gov_match, gov_no_match), recall(gov_match,gov_present),f1_measure(precision(gov_match, gov_no_match), recall(gov_match,gov_present))))
'''
print("NONE matches = {}, no_matches = {},present = {}, precision = {}, recall = {} \n".format(org_none_match, org_none_no_match,org_none_present, precision(org_none_match, org_none_no_match), recall(org_none_match,org_none_present)))'''
#print("ORG matches = {}, no_matches = {} \n".format(match_org, no_match_org))
'''
for key, value in final_DL.items():
subcat = key.split("_")[1]
feat = "_".join(key.split("_")[2:])
if subcat == "bc":
#if feat == "unique":
print(key, value)
'''
print(none_found)
print("\n For LateX n = 5 k = 12")
print("LOC overall & {} & {} & {} & {} & {} & {} \n".format(match_loc, no_match_loc,loc_present, precision(match_loc, no_match_loc), recall(match_loc,loc_present),f1_measure(precision(match_loc, no_match_loc), recall(match_loc,loc_present))))
print("POINT & {} & {} & {} & {} & {} & {} \n".format(punt_match, punt_no_match,punt_present, precision(punt_match, punt_no_match), recall(punt_match,punt_present),f1_measure(precision(punt_match, punt_no_match), recall(punt_match,punt_present))))
print("LINE & {} & {} & {} & {} & {} & {} \n".format(lijn_match, lijn_no_match,lijn_present, precision(lijn_match, lijn_no_match), recall(lijn_match,lijn_present),f1_measure(precision(lijn_match, lijn_no_match), recall(lijn_match,lijn_present))))
print("PC & {} & {} & {} & {} & {} & {} \n".format(bc_match, bc_no_match,bc_present, precision(bc_match, bc_no_match), recall(bc_match,bc_present),f1_measure(precision(bc_match, bc_no_match), recall(bc_match,bc_present))))
print("WATER & {} & {} & {} & {} & {} & {} \n".format(water_match, water_no_match,water_present, precision(water_match, water_no_match), recall(water_match,water_present),f1_measure(precision(water_match, water_no_match), recall(water_match,water_present))))
'''
print("NONE matches = {}, no_matches = {},present = {}, precision = {}, recall = {} \n".format(none_match, none_no_match,none_present, precision(none_match, none_no_match), recall(none_match,none_present)))'''
print("REGION & {} & {} & {} & {} & {} & {} \n".format(regio_match, regio_no_match,regio_present, precision(regio_match, regio_no_match), recall(regio_match,regio_present),f1_measure(precision(regio_match, regio_no_match), recall(regio_match,regio_present))))
print("FICTION & {} & {} & {} & {} & {} & {} \n".format(fictief_match, fictief_no_match,fictief_present, precision(fictief_match, fictief_no_match), recall(fictief_match,fictief_present),f1_measure(precision(fictief_match, fictief_no_match), recall(fictief_match,fictief_present))))
print("COUNTRY & {} & {} & {} & {} & {} & {} \n".format(land_match, land_no_match,land_present, precision(land_match, land_no_match), recall(land_match,land_present),f1_measure(precision(land_match, land_no_match), recall(land_match,land_present))))
print("CONTINENT & {} & {} & {} & {} & {} & {} \n".format(cont_match, cont_no_match,cont_present, precision(cont_match, cont_no_match), recall(cont_match,cont_present),f1_measure(precision(cont_match, cont_no_match), recall(cont_match,cont_present))))
print("COSMOS & {} & {} & {} & {} & {} & {} \n".format(heelal_match, heelal_no_match, heelal_present, precision(heelal_match, heelal_no_match), recall(heelal_match, heelal_present),f1_measure(precision(heelal_match, heelal_no_match), recall(heelal_match, heelal_present))))
print("ORG overall & {} & {} & {} & {} & {} & {} \n".format(match_org, no_match_org,org_present, precision(match_org, no_match_org), recall(match_org,org_present),f1_measure(precision(match_org, no_match_org), recall(match_org,org_present))))
print("MISC & {} & {} & {} & {} & {} & {} \n".format(org_misc_match, org_misc_no_match,org_misc_present, precision(org_misc_match, org_misc_no_match), recall(org_misc_match,org_misc_present),f1_measure(precision(org_misc_match, org_misc_no_match), recall(org_misc_match,org_misc_present))))
print("COMPANY & {} & {} & {} & {} & {} & {} \n".format(com_match, com_no_match,com_present, precision(com_match, com_no_match), recall(com_match,com_present),f1_measure(precision(com_match, com_no_match), recall(com_match,com_present))))
print("GOVERNMENT & {} & {} & {} & {} & {} & {} \n".format(gov_match, gov_no_match,gov_present, precision(gov_match, gov_no_match), recall(gov_match,gov_present),f1_measure(precision(gov_match, gov_no_match), recall(gov_match,gov_present))))
if __name__ == '__main__':
main()
|
15,371 | eb9df1a929585e8dce0f7f6d38d6b4e22e081c8d | import speech_recognition as sr
from gtts import gTTS
from playsound import playsound
from datetime import datetime
from googletrans import LANGUAGES, Translator
import os
# Code For Speech Output
def speak(text, lang='en'):
tts = gTTS(text, lang=lang)
filename=f'voice.mp3'
tts.save(filename)
playsound(filename)
os.remove(filename)
# Code for Speech Input
def takeCommand(lang):
r = sr.Recognizer()
with sr.Microphone() as source:
r.pause_threshold = 1
r.energy_threshold = 1500
speak('Speak the sentence')
audio = r.listen(source)
speak('done')
try:
query = r.recognize_google(audio, language = lang)
print(f"User said: {query}\n")
speak(f"User said: {query}\n")
return query
except Exception as e:
global i
if i<3:
speak("Say that again please......")
i += 1
return takeCommand(lang)
else:
speak('There is some problem in the voice connection, please check your internet connection.')
i=0
return 0
# Program Starts From Here
i = 0
speak('Hello Sir. You may translate any of your sentences into almost any language by following three basic steps, which are, Selecting your text language, Speaking the text to be translated, Selecting the language of the output text. ')
speak('This is the list of languages with the code words. Please enter the code word of the language you are using.')
for lang in LANGUAGES:
print(f"{lang} - {LANGUAGES[lang]}")
lang = input('Enter the language code: ')
text = takeCommand(lang)
if text:
speak('Please enter the output language')
for lang in LANGUAGES:
print(f"{lang} - {LANGUAGES[lang]}")
outLang = input('Enter the Output Language code: ')
trans = Translator()
trans = trans.translate(text, src=lang, dest=outLang)
speak(trans.text)
print(trans.text) |
15,372 | d73b0d24e6c301977ebcbc8afe941ab2a92dca0a |
tup=('a','b','c','d','d')
print(tup.count('d'))
print(tup.index('d')) |
15,373 | b0f0769f68fb5012b54d60affab05c0566c6e5e6 | #application of linked lists
from LinkedListPKG import LinkedList;
myLL = LinkedList();
myLL.insertStart(11);
myLL.insertStart(22);
myLL.insertStart(33);
#myLL.insertEnd(99);
myLL.traverse();
print(myLL.size()) |
15,374 | 6377569c1d8b0671d8c7071e422be18d78be195e | from django.db import models
import datetime
class GeoPhoto(models.Model):
title = models.CharField(max_length=250)
latitude=models.FloatField()
longitude=models.FloatField()
photo_url = models.CharField(max_length=500, unique=True)
photo_thumbnail_url = models.CharField(max_length=500)
date_added = models.DateField(default=datetime.datetime.now)
def __str__(self):
return self.title
class PresetList(models.Model):
name = models.CharField(max_length=250)
latitude=models.FloatField()
longitude=models.FloatField()
date_added = models.DateField(default=datetime.datetime.now)
class Meta:
unique_together = ('latitude', 'longitude')
def __str__(self):
return self.name
class FavList(models.Model):
geophoto = models.ForeignKey(GeoPhoto, on_delete=models.CASCADE)
date_added = models.DateField(default=datetime.datetime.now)
def __str__(self):
return self.geophoto.title
|
15,375 | 2618a507ee9afbde1316caeba300157ebcec5a1c | '''Program to print the greater number between two number
Developer:Aakash
Date:21.02.2020
--------------------------------'''
a=int(input("Enter any number="))
b=int(input("Enter any other number="))
if(a>b):
print("The First number is greater i.e.,",a)
else:
print("The Second number is greater i.e.,",b)
|
15,376 | b1b9bf08309d4ebfffaf61a1c3096166fdbc88a2 | sentence = 'xaxax'
sentence = sentence.lower()
score = 0
def check_palindrome(segment):
reversed = segment[::-1]
return reversed == segment
# for begin in range(len(sentence)-2):
# for stop in range(3, len(sentence)+1):
# end = begin + stop
# if end > len(sentence):
# continue
for begin in range(len(sentence)-2):
for stop in range(begin + 3, len(sentence)+1):
# end = begin + stop
# if end > len(sentence):
# continue
segment = sentence[begin:stop]
# print(begin, begin + stop)
print(segment)
result = check_palindrome(segment)
if result:
score += len(segment)
print(score)
# stuff = '232323abc'
# print(stuff[-3:]) |
15,377 | 63771847f0a23e6bcc98fd372f8742a0b608754b | # Embedded file name: scripts/client/ArenaHelpers/GameModes/AreaConquest/ACGameModeClient.py
import BigWorld
import Math
from ACSector import ACSector
from ArenaHelpers.GameModes import GameModeClient
from ArenaHelpers.GameModes.AreaConquest import ACSectorClient
from ArenaHelpers.GameModes.AreaConquest import AC_EVENTS
from ArenaHelpers.GameModes.AreaConquest import RocketV2Manager
from ArenaHelpers.GameModes.AreaConquest import SignalFlaresManager
from ArenaHelpers.GameModes.AreaConquest.LastPlayerManager import LastPlayerManager
from ArenaHelpers.GameModes.AreaConquest.WaveInfoManager import WaveInfoManager
from Event import eventHandler, Event, EventManager
from EventHelpers import CompositeSubscription, EventSubscription
from GameModeSettings import ACSettings as SETTINGS
from consts import ARENA_UPDATE, TEAM_ID, AIR_STRIKE_WAVE_STATE, GAME_MODE
class AC_ARENA_UPDATE_EVENTS:
"""Events for update arena calls.
You should not use them outside ACGameModeClient, use AC_EVENT instead
"""
UPDATE_AC_POINTS = 'update_ac_points'
UPDATE_GLOBAL_COUNTERS = 'update_global_counters'
CHANGE_DYNAMIC_TIME = 'change_dynamic_time'
AC_ACTION_MESSAGE = 'ac_action_message'
AC_SECTORS_STATUS = 'ac_sectors_status'
AC_GAME_TICK = 'ac_game_tick'
AC_BATTLE_EVENT = 'ac_battle_event'
AC_ROCKET_V2_LAUNCHED = 'ac_rocket_v2_launched'
AC_ROCKET_V2_HIT_TARGET = 'ac_rocket_v2_hit_target'
AC_ROCKET_V2_TARGET_OBJECT_CHANGED = 'ac_rocket_v2_target_object_changed'
AC_BOMBER_IN_WAVE_DIED = 'ac_bomber_in_wave_died'
AC_BOMBER_DISPATCHER_TARGET_SECTOR_CHANGED = 'ac_bomber_dispatcher_target_sector_changed'
AC_SECTOR_PERMANENT_LOCK = 'ac_sector_permanent_lock'
UPDATE_RESOURCE_POINTS = 'update_resource_points'
class ACGameModeClient(GameModeClient.GameModeClient):
updateEventsMap = {ARENA_UPDATE.UPDATE_AC_POINTS: AC_ARENA_UPDATE_EVENTS.UPDATE_AC_POINTS,
ARENA_UPDATE.UPDATE_GLOBAL_COUNTERS: AC_ARENA_UPDATE_EVENTS.UPDATE_GLOBAL_COUNTERS,
ARENA_UPDATE.UPDATE_RESOURCE_POINTS: AC_ARENA_UPDATE_EVENTS.UPDATE_RESOURCE_POINTS,
ARENA_UPDATE.CHANGE_DYNAMIC_TIME: AC_ARENA_UPDATE_EVENTS.CHANGE_DYNAMIC_TIME,
ARENA_UPDATE.AC_SECTOR_PERMANENT_LOCK: AC_ARENA_UPDATE_EVENTS.AC_SECTOR_PERMANENT_LOCK,
ARENA_UPDATE.AC_ACTION_MESSAGE: AC_ARENA_UPDATE_EVENTS.AC_ACTION_MESSAGE,
ARENA_UPDATE.AC_SECTORS_STATUS: AC_ARENA_UPDATE_EVENTS.AC_SECTORS_STATUS,
ARENA_UPDATE.AC_GAME_TICK: AC_ARENA_UPDATE_EVENTS.AC_GAME_TICK,
ARENA_UPDATE.AC_BATTLE_EVENT: AC_ARENA_UPDATE_EVENTS.AC_BATTLE_EVENT,
ARENA_UPDATE.AC_ROCKET_V2_LAUNCHED: AC_ARENA_UPDATE_EVENTS.AC_ROCKET_V2_LAUNCHED,
ARENA_UPDATE.AC_ROCKET_V2_HIT_TARGET: AC_ARENA_UPDATE_EVENTS.AC_ROCKET_V2_HIT_TARGET,
ARENA_UPDATE.AC_ROCKET_V2_TARGET_OBJECT_CHANGED: AC_ARENA_UPDATE_EVENTS.AC_ROCKET_V2_TARGET_OBJECT_CHANGED,
ARENA_UPDATE.AC_BOMBER_IN_WAVE_DIED: AC_ARENA_UPDATE_EVENTS.AC_BOMBER_IN_WAVE_DIED,
ARENA_UPDATE.AC_BOMBER_DISPATCHER_TARGET_SECTOR_CHANGED: AC_ARENA_UPDATE_EVENTS.AC_BOMBER_DISPATCHER_TARGET_SECTOR_CHANGED}
def __init__(self, clientArena):
super(ACGameModeClient, self).__init__(clientArena)
self._scoreGlobal = (0, 0)
self._globalCounters = {}
self._sectors = {}
self._currentTick = 0
self._currentTickStartedAt = self.player.arenaStartTime
self._globalTime = 0
self._dynamicTime = 0
self._isReady = False
self._eManager = EventManager()
self.eGameModeReady = Event(self._eManager)
self._rocketV2Manager = RocketV2Manager.RocketV2Manager(self)
self._signalFlaresManager = SignalFlaresManager.SignalFlaresManager(self)
self._waveInfoManager = WaveInfoManager(self)
self._lastPlayerManager = LastPlayerManager(self)
self._pendingEvents = []
self.createSectorsData()
self.registerArenaUpdateEvents(self.updateEventsMap)
gameActionsManager = self.clientArena.gameActionsManager
self._subscription = CompositeSubscription(EventSubscription(gameActionsManager.eWaveAdded, self._onASWaveAdded), EventSubscription(gameActionsManager.eWaveRemoved, self._onASWaveRemoved), EventSubscription(gameActionsManager.eWaveStateChanged, self._onASWaveStateChanged), EventSubscription(gameActionsManager.eBomberStateChanged, self._onASBomberStateChanged))
self._subscription.subscribe()
@property
def isReady(self):
return self._isReady
@property
def rocketV2Manager(self):
"""
@rtype: RocketV2Manager.RocketV2Manager
"""
return self._rocketV2Manager
@property
def scoreGlobal(self):
"""Global game score
@rtype: (int, int)
"""
return self._scoreGlobal
@property
def sectors(self):
"""Sectors dict
@rtype: dict[basestring, ACSectorClient.ACSectorClient]
"""
raise self.isReady or AssertionError('Attempt to get sectors data while GameMode is not ready')
return self._sectors
@property
def currentTick(self):
"""Current game tick number
@rtype: int
"""
return self._currentTick
@property
def currentTickStartedAt(self):
"""Time when current tick started by BigWorld.serverTime()
@rtype: float
"""
return self._currentTickStartedAt
@property
def arenaTimeRemaining(self):
"""battle time remaining
@rtype: float
"""
return self.player.arenaStartTime + self.arenaTypeData.gameModeSettings.battleDuration - BigWorld.serverTime()
@property
def waveInfoManager(self):
"""
@rtype: WaveInfoManager.WaveInfoManager
"""
return self._waveInfoManager
@property
def uiSettings(self):
return self.arenaTypeData.gameModeSettings.uiSettings
@property
def gameModeName(self):
return GAME_MODE.NAMES[self.clientArena.gameModeEnum]
@eventHandler(AC_ARENA_UPDATE_EVENTS.AC_BATTLE_EVENT)
def onBattleEvent(self, payload, *args, **kwargs):
self._logDebug(':onUpdateACSBattleEvent: onBattleEvent'.format(payload))
battleEventId = payload
self.dispatch(AC_EVENTS.BATTLE_EVENT, battleEventId)
@eventHandler(AC_ARENA_UPDATE_EVENTS.UPDATE_AC_POINTS)
def onUpdateACPoints(self, payload, *args, **kwargs):
points = tuple(payload)
self._logDebug(':onUpdateACPoints: points={0}'.format(points))
maxPoints = self._arenaTypeData.gameModeSettings.pointsToWin
self._scoreGlobal = (min(points[0], maxPoints), min(points[1], maxPoints))
self.dispatch(AC_EVENTS.GLOBAL_SCORE_UPDATED, self.scoreGlobal)
@eventHandler(AC_ARENA_UPDATE_EVENTS.CHANGE_DYNAMIC_TIME)
def onChangeDynamicTime(self, payload, *args, **kwargs):
time = payload
oldTime = self._dynamicTime
self._dynamicTime = time
self.dispatch(AC_EVENTS.DYNAMIC_TIMER_UPDATE, time, oldTime)
@eventHandler(AC_ARENA_UPDATE_EVENTS.AC_SECTOR_PERMANENT_LOCK)
def onSectorPermanentLock(self, payload, *args, **kwargs):
sectorId = payload
self.dispatch(AC_EVENTS.SECTOR_PERMANENT_LOCK, sectorId)
@eventHandler(AC_ARENA_UPDATE_EVENTS.UPDATE_GLOBAL_COUNTERS)
def onUpdateGlobalCounters(self, payload, *args, **kwargs):
counters = payload
self._logDebug(':onUpdateGlobalCounters: counters={0}'.format(counters))
self._globalCounters = counters
self.dispatch(AC_EVENTS.GLOBAL_COUNTERS_UPDATED, self._globalCounters)
@eventHandler(AC_ARENA_UPDATE_EVENTS.UPDATE_RESOURCE_POINTS)
def onUpdateResourcePoints(self, payload, *args, **kwargs):
totalPoints, killerID, victimID, pointsInc = payload
self._logDebug(':onUpdateResourcePoints: {},{},{},{}'.format(totalPoints, killerID, victimID, pointsInc))
self.dispatch(AC_EVENTS.RESOURCE_POINTS_UPDATED, totalPoints, killerID, victimID, pointsInc)
@eventHandler(AC_ARENA_UPDATE_EVENTS.AC_SECTORS_STATUS)
def onACSectorStatus(self, payload, *args, **kwargs):
self._logDebug(':onACSectorStatus: payload={0}'.format(payload))
sectorID, sectorPoints, capturePoints = payload
sector = self.sectors[sectorID]
sector.updateCapturePoints(sectorPoints)
self.dispatch(AC_EVENTS.SECTOR_CAPTURE_POINTS_CHANGED, sector.ident, sector.capturePointsByTeams)
@eventHandler(AC_ARENA_UPDATE_EVENTS.AC_ACTION_MESSAGE)
def onACActionMessage(self, payload, *args, **kwargs):
self._logDebug(':onACActionMessage: payload={0}'.format(payload))
action, teamIndex, avatarId, sectorId, tickNumber, points = payload
settings = SETTINGS.ACTION_SETTINGS.get(action)
if not settings:
self._logError(':onACActionMessage: Unknown action got, id={0}'.format(action))
return
self.dispatch(AC_EVENTS.SECTOR_ACTION, sectorId, teamIndex, settings)
if settings['sectorScore']:
sector = self.sectors[sectorId]
sector.addCapturePoints(teamIndex, points)
self._logDebug(':onACActionMessage: updated sector capture points: {0}'.format(sector.capturePointsByTeams))
self.dispatch(AC_EVENTS.SECTOR_CAPTURE_POINTS_CHANGED, sector.ident, sector.capturePointsByTeams)
@eventHandler(AC_ARENA_UPDATE_EVENTS.AC_GAME_TICK)
def onACGameTick(self, payload, *args, **kwargs):
self._logDebug(':onACGameTick: payload={0}'.format(payload))
tickNumber = payload
self.dispatch(AC_EVENTS.GAME_MODE_TICK, tickNumber)
self._currentTick = tickNumber + 1
self._currentTickStartedAt = BigWorld.serverTime()
@eventHandler(AC_ARENA_UPDATE_EVENTS.AC_ROCKET_V2_LAUNCHED)
def onRocketV2Launched(self, payload, *args, **kwargs):
launchPosition, targetPosition = Math.Vector3(), Math.Vector3()
sectorIdent, launchPosition.x, launchPosition.y, launchPosition.z, targetID, targetPosition.x, targetPosition.y, targetPosition.z, flyingTime = payload
self.dispatch(AC_EVENTS.ROCKET_V2_LAUNCHED, sectorIdent, launchPosition, targetPosition, flyingTime)
def onRocketV2TargetSectorChanged(self, sectorID, oldTargetID, newTargetID, *args, **kwargs):
self.dispatch(AC_EVENTS.ROCKET_V2_TARGET_SECTOR_CHANGED, sectorID, newTargetID)
@eventHandler(AC_ARENA_UPDATE_EVENTS.AC_ROCKET_V2_TARGET_OBJECT_CHANGED)
def onRocketV2TargetObjectChanged(self, payload, *args, **kwargs):
sectorIdent, sectorTeamIndex, newTargetObjId = payload
self.dispatch(AC_EVENTS.ROCKET_V2_TARGET_OBJECT_CHANGED, sectorIdent, sectorTeamIndex, newTargetObjId)
@eventHandler(AC_ARENA_UPDATE_EVENTS.AC_ROCKET_V2_HIT_TARGET)
def onRocketV2HitTarget(self, payload, *args, **kwargs):
sectorIdent, teamIndex, targetTeamObjectId, targetPositionX, targetPositionY, targetPositionZ, targetTeamIndex = payload
self.dispatch(AC_EVENTS.ROCKET_V2_HIT_TARGET, sectorIdent, teamIndex, targetTeamObjectId)
@eventHandler(AC_ARENA_UPDATE_EVENTS.AC_BOMBER_IN_WAVE_DIED)
def onBomberInWaveDied(self, payload):
sectorIdent, waveID, bomberID = payload
self.dispatch(AC_EVENTS.BOMBER_IN_WAVE_DIED, sectorIdent, waveID, bomberID)
@eventHandler(AC_ARENA_UPDATE_EVENTS.AC_BOMBER_DISPATCHER_TARGET_SECTOR_CHANGED)
def onBombersChangeTarget(self, payload):
sectorIdent, newTargetSectorIdent = payload
self.dispatch(AC_EVENTS.BOMBER_DISPATCHER_TARGET_SECTOR_CHANGED, sectorIdent, newTargetSectorIdent)
def createSectorsData(self):
"""Create sectors data using arena settings
"""
for sectorId, settings in self.arenaTypeData.sectors.sectors.iteritems():
entity = next((sector for sector in ACSector.entities if sector.ident == sectorId), None)
if entity:
self._sectors[sectorId] = sector = ACSectorClient.ACSectorClient(settings, entity)
sector.eStateChanged += self.onSectorStateChanged
sector.eRocketV2TargetSectorIDChanged += self.onRocketV2TargetSectorChanged
self._checkIsReady()
return
def getPointsInTick(self, tickNumber = None):
"""Return Points in tick
@param tickNumber: tick number
"""
tickNumber = tickNumber or self.currentTick
score = [0, 0]
for sector in self.sectors.itervalues():
if sector.teamIndex in (TEAM_ID.TEAM_0, TEAM_ID.TEAM_1):
score[sector.teamIndex] += sector.getPointsInTick(tickNumber)
return score
def getTickPeriod(self):
"""Return tick period
"""
period = self.arenaTypeData.gameModeSettings.globalTickPeriod
if self.teamSuperiority(TEAM_ID.TEAM_0) or self.teamSuperiority(TEAM_ID.TEAM_1):
period = self.arenaTypeData.gameModeSettings.superiorityGlobalTickPeriod
return period
def capturedSectors(self, teamIndex):
"""Return captured sectors
"""
capturedSectors = 0
for sector in self._sectors.itervalues():
if not sector.isCapturable:
continue
if sector.teamIndex == teamIndex:
capturedSectors += 1
return capturedSectors
def teamSuperiority(self, teamIndex):
"""Check capture sectors by team
"""
sectors = 0
capturedSectors = 0
for sector in self._sectors.itervalues():
if not sector.isCapturable:
continue
sectors += 1
if sector.teamIndex == teamIndex:
capturedSectors += 1
return sectors == capturedSectors
def checkSectorForLock(self, ident):
if ident in self._sectors:
return self._sectors[ident].isLockForBattle
return False
def onSectorStateChanged(self, ident, oldState, state, *args, **kwargs):
"""Event handler for state changed event
@param ident: Sector identifier
@type state: BWUserTypesCommon.ACSectorState.ACSectorState
@type oldState: BWUserTypesCommon.ACSectorState.ACSectorState
"""
self.dispatch(AC_EVENTS.SECTOR_STATE_CHANGED, ident, oldState.state, oldState.teamIndex, state.state, state.teamIndex, state.nextStateTimestamp)
def _checkIsReady(self):
if self._isReady:
return
self._isReady = all((ident in self._sectors for ident in self.arenaTypeData.sectors.sectors))
if self._isReady:
self._processSuspendedEvents()
self.eGameModeReady()
def onACSectorCreated(self, entity):
"""Callback from sector entity on enter world event
@type entity: ACSector
"""
raise entity.ident in self.arenaTypeData.sectors.sectors or AssertionError("Unexpected sector created: '{0}', arena: {1}".format(entity.ident, self.arenaTypeData.typeName))
settings = self.arenaTypeData.sectors.sectors[entity.ident]
self._sectors[entity.ident] = sector = ACSectorClient.ACSectorClient(settings, entity)
sector.eStateChanged += self.onSectorStateChanged
self._checkIsReady()
@property
def lastPlayerManager(self):
return self._lastPlayerManager
def dispatch(self, event, *args, **kwargs):
if not self.isReady:
self._suspendEvent(event, args, kwargs)
self._logDebug("Suspended event processing while game mode is not ready: event = '{0}', args = {1}, kwargs = {2}".format(event, args, kwargs))
return
super(ACGameModeClient, self).dispatch(event, *args, **kwargs)
def destroy(self):
self._subscription.unsubscribe()
self._subscription = None
self._waveInfoManager.destroy()
self._lastPlayerManager.destroy()
self._eManager.clear()
self._rocketV2Manager.destroy()
self._signalFlaresManager.destroy()
self.clear()
super(ACGameModeClient, self).destroy()
return
def _suspendEvent(self, event, args, kwargs):
self._pendingEvents.append((event, args, kwargs))
def _processSuspendedEvents(self):
for event, args, kwargs in self._pendingEvents:
self.dispatch(event, *args, **kwargs)
self._pendingEvents[:] = []
def _onASWaveAdded(self, record, *args, **kwargs):
"""Handler for GameActionsManager.eWaveAdded event
:param record: AIR_STRIKE_WAVE_RECORD
"""
self.dispatch(AC_EVENTS.BOMBERS_LAUNCHED, record['sectorID'], record['targetID'], record['teamIndex'], record['waveID'], record['bomberIDsStates'], record['startTime'])
def _onASWaveRemoved(self, waveID, *args, **kwargs):
"""Handler for GameActionsManager.eWaveRemoved event
:param waveID: Unique wave identifier
"""
self.dispatch(AC_EVENTS.BOMBERS_DIED, waveID)
def _onASWaveStateChanged(self, record, stateOld, state, *args, **kwargs):
"""Handler for GameActionsManager.eWaveStateChanged event
:param record: AIR_STRIKE_WAVE_RECORD
:param stateOld: Old state value
:param state: New state value
"""
if state == AIR_STRIKE_WAVE_STATE.ATTACK_IN_PROGRESS and stateOld == AIR_STRIKE_WAVE_STATE.BOMBS_DROPPED:
self.dispatch(AC_EVENTS.BOMBERS_ATTACK_STARTED, record['sectorID'], record['waveID'], record['size'], len(record['bomberIDsStates']))
def _onASBomberStateChanged(self, record, bomberID, stateOld, state, *args, **kwargs):
"""Handler for GameActionsManager.eBomberStateChanged event
:param record: AIR_STRIKE_WAVE_RECORD
:param bomberID: Unique bomber id
:param stateOld: Old state value
:param state: New state value
"""
if state == AIR_STRIKE_WAVE_STATE.ATTACK_NOTIFIED and stateOld == AIR_STRIKE_WAVE_STATE.INTRO_FLIGHT:
self.dispatch(AC_EVENTS.BOMBER_ATTACK_NOTIFIED, record['sectorID'], record['waveID'], bomberID, record['size'], len(record['bomberIDsStates']))
elif state == AIR_STRIKE_WAVE_STATE.BOMBS_DROPPED and stateOld == AIR_STRIKE_WAVE_STATE.ATTACK_NOTIFIED:
self.dispatch(AC_EVENTS.BOMBER_BOMBS_DROPPED, record['sectorID'], record['waveID'], bomberID, record['size'], len(record['bomberIDsStates'])) |
15,378 | 6c656e482e11e043f17d92404eafe4dd920c3236 | def ism(x):
if x>='a' and x<='z':
return True
return False
def isM(x):
if x>='A' and x<='Z':
return True
return False
def rot13( string ):
for x in range(len(string)):
if ism(string[x]):
if chr(ord(string[x])+13) <= 'z':
string[x]=chr(ord(string[x])+13)
else:
pm=(ord(string[x])-ord('a'))-13
string[x]=chr(ord('a')+pm)
if isM(string[x]):
if chr(ord(string[x])+13) <= 'Z':
string[x]=chr(ord(string[x])+13)
else:
pM=(ord(string[x])-ord('A'))-13
string[x]=chr(ord('A')+pM)
return "".join(string)
while True:
try:
print rot13(list(raw_input()))
except:
break |
15,379 | 7f5d2436659c47db04c431965e48751e4840a970 | nome = input('Digite seu nome: ')
print('Olá,', nome, 'bom dia!')
primeiro_numero = int(input('Digite o primeiro número: '))
segundo_numero = int(input('Digite o segundo número: '))
soma = primeiro_numero + segundo_numero
texto = 'O resultado da soma é:'
print(texto, soma)
|
15,380 | 3bf8d290780cf30b5d311af5fb9aefaea0a099bb | import itertools
def app():
n = 1000
b = 1
m = 0
for i in itertools.count():
if len(str(m)) > n:
raise RuntimeError("Not found")
elif len(str(m)) == n:
return str(i)
b, m = m, b + m
if __name__ == "__main__":
print(app())
|
15,381 | 54508d6dfb471d6d6b4e04dffc405a8cc4b153fb | import json
from . import TrafficResponseObject
class GoogleResponseObject(TrafficResponseObject.TrafficResponse):
json_data = ""
duration = 0
distance = 0
def __init__(self, json_data=""):
print(json_data)
self.json_data = json_data
parsedData = json.loads(json_data)
self.distance = self.parseDistance(parsedData)
self.duration = self.parseDuration(parsedData)
def getDistance(self):
return self.distance
def getDuration(self):
return self.duration
def parseDistance(self, parsedData):
return parsedData["routes"][0]["legs"][0]["distance"]
def parseDuration(self, parsedData):
return parsedData["routes"][0]["legs"][0]["duration"] |
15,382 | 90b6c7763b199cd7098031d3728ab6439890deec | from selenium import webdriver
from selenium.webdriver.common.by import By
driver = webdriver.Chrome('/usr/local/bin/chromedriver')
driver.get('https://www.convertworld.com/hu/homerseklet/celsius.html')
deg = input("Hőmérséklet=")
print(deg)
xpath = "//input[@name='amount']"
print(xpath)
driver.find_element(By.XPATH, xpath).send_keys(deg)
driver.find_element(By.XPATH, "//img[@alt='Átalakít']").click()
result = driver.find_element_by_id("value_3").text
print(result)
driver.quit()
|
15,383 | b82b9047054ff367cb32830245ccaf686f5136c7 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.0-a53ec6ee1b on 2019-05-07.
# 2019, SMART Health IT.
import os
import io
import unittest
import json
from . import eventdefinition
from .fhirdate import FHIRDate
class EventDefinitionTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("EventDefinition", js["resourceType"])
return eventdefinition.EventDefinition(js)
def testEventDefinition1(self):
inst = self.instantiate_from("eventdefinition-example.json")
self.assertIsNotNone(inst, "Must have instantiated a EventDefinition instance")
self.implEventDefinition1(inst)
js = inst.as_json()
self.assertEqual("EventDefinition", js["resourceType"])
inst2 = eventdefinition.EventDefinition(js)
self.implEventDefinition1(inst2)
def implEventDefinition1(self, inst):
self.assertEqual(inst.id, "example")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.purpose, "Monitor all admissions to Emergency")
self.assertEqual(inst.status, "draft")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.trigger[0].condition.description, "Encounter Location = emergency (active/completed encounters, current or previous)")
self.assertEqual(inst.trigger[0].condition.expression, "(this | %previous).location.where(location = 'Location/emergency' and status in {'active', 'completed'}).exists()")
self.assertEqual(inst.trigger[0].condition.language, "text/fhirpath")
self.assertEqual(inst.trigger[0].data[0].type, "Encounter")
self.assertEqual(inst.trigger[0].name, "monitor-emergency-admissions")
self.assertEqual(inst.trigger[0].type, "named-event")
|
15,384 | 2ccf7f6d58e70c70203f57c6580abfecb034d1ce | # -*- coding: UTF-8 -*-
# 工具模块
import os
from datetime import datetime
import uuid
import PIL
from PIL import Image
from werkzeug.utils import secure_filename
ALLOWED_IMAGE_EXTENSIONS = set(['png','jpg','jpeg','gif','bmp'])
ALLOWED_VIDEO_EXTENSIONS = set(['mp4','avi'])
ALLOWED_AUDIO_EXTENSIONS = set(['mp3','m4a'])
def create_folder(folderpath):
if not os.path.exists(folderpath):
os.makedirs(folderpath)
os.chmod(folderpath,os.O_RDWR)
#修改文件名称(时间戳+uuid)
def change_filename_with_timestamp_uuid(filename):
fileinfo = os.path.splitext(filename)
filename = datetime.now().strftime('%Y%m%d%H%M%S')+str(uuid.uuid4().hex)+fileinfo[-1]
return filename
#修改文件名称(文件名安全+uuid),避免一个文件上传多次覆盖
def secure_filename_with_uuid(filename):
fileinfo = os.path.splitext(filename)
filename_pre = secure_filename(fileinfo[0])
filename = filename_pre+str(uuid.uuid4().hex)[:6]+fileinfo[-1]
return filename
# 检测文件后缀名
def check_files_extension(filenamelist,allowed_extensions):
for filename in filenamelist:
if '.' in filename and filename.split('.')[1] in allowed_extensions:
return True
else:
return False
# 创建缩略图
def create_thumbnail(path,filename,basewidth=300):
imagname,ext = os.path.splitext(filename)
newfilename = imagname + '_thumb_' + ext
img = Image.open(os.path.join(path,filename))
if img.size[0] > basewidth:
# 获取百分比
w_percent = basewidth / float(img.size[0])
# 依据百分比修改高度
h_size = int(float(img.size[1])*w_percent)
img = img.resize((basewidth,h_size),PIL.Image.ANTIALIAS)
img.save(os.path.join(path, newfilename))
return newfilename
# 创建展示图
def create_show(path,filename,basewidth=800):
imagname,ext = os.path.splitext(filename)
newfilename = imagname + '_show_' + ext
img = Image.open(os.path.join(path,filename))
if img.size[0] < basewidth:
# 获取百分比
w_percent = basewidth / float(img.size[0])
# 依据百分比修改高度
h_size = int(float(img.size[1])*w_percent)
img = img.resize((basewidth,h_size),PIL.Image.ANTIALIAS)
img.save(os.path.join(path, newfilename))
return newfilename
|
15,385 | fca01e325c7ff9602d9a4eab68b944e01314da00 | """
Event message representation
"""
from .core import AbstractMessage
from .core import MessageType
class EventMesasge(AbstractMessage):
"""A string notification that will appear in the web app
and will express the overall state of the job."""
def __init__(self, message):
self._message = message
@property
def message(self) :
"""
Event description message.
Type:
str: description message
"""
return self._message
@message.setter
def message(self, message) :
self._message = message
def messageType(self) :
return MessageType.Event
def messageData(self) :
return {
"Label" : self.message
} |
15,386 | 0471fddeacb48ae69fbb3d3d9be8b3e79f2b382d | '''
Created on 2017年1月3日
@author: wujianxin
'''
import re
import requests
import urllib.request
class QSBK:
def __init__(self):
self.pageIndex = 1
self.user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
self.headers = {'User-Agent': self.user_agent}
self.stories = []
self.enable = False
def getPage(self, pageIndex):
try:
# url = 'http://www.qiushibaike.com/hot/page/' + str(pageIndex)
# full_url = urllib.request.Request(url, headers=self.headers)
# response = urllib.request.urlopen(full_url)
# content = response.read().decode('utf-8')
url = 'http://www.qiushibaike.com/hot/page/' + str(pageIndex)
full_url = urllib.request.Request(url, headers=self.headers)
response = urllib.request.urlopen(full_url)
content = response.read().decode('utf-8')
return content
except Exception as e:
print(e)
return None
def getPageItems(self, pageIndex):
content = self.getPage(pageIndex)
if not content:
print('页面加载失败')
return None
pattern = re.compile('<div.*?clearfix">.*?<h2>(.*?)</h2.*?' + '<div.*?content">.*?<span>(.*?)</span.*?</div>',
re.S)
items = re.findall(pattern, content)
pageStories = []
for item in items:
pageStories.append([item[0].strip(), item[1].strip()])
return pageStories
def loadPage(self):
if self.enable == True:
if len(self.stories) < 2:
pageStories = self.getPageItems(self.pageIndex)
if pageStories:
self.stories.append(pageStories)
self.pageIndex += 1
def getOneStory(self, pageStories, page):
for story in pageStories:
input_txt = input('')
self.loadPage()
if input_txt == 'q':
self.enable = False
return
print(u"第%d页\t发布人:%s\n%s" % (page, story[0], story[1]))
def start(self):
print(u"正在读取糗事百科,按回车查看新段子,q退出")
self.enable = True
self.loadPage()
nowPage = 0
while self.enable:
if len(self.stories) > 0:
pageStories = self.stories[0]
nowPage += 1
del self.stories[0]
self.getOneStory(pageStories, nowPage)
if __name__ == '__main__':
spider = QSBK()
spider.start()
|
15,387 | 37b34851f7831ed2657d4800e405bb6efc5acc7f | #You will be given an array with 5 numbers.
#The first 2 numbers represent a range, and the next two numbers represent another range.
#The final number in the array is X.
#The goal of your program is to determine if both ranges overlap by at least X numbers.
#For example, in the array [4, 10, 2, 6, 3] the ranges 4 to 10 and 2 to 6 overlap by at least 3 numbers (4, 5, 6),
#so your program should return true.
def overlap(arr):
overlaped = "false"
list1 = range(arr[0],arr[1]+1)
list2 = range(arr[2],arr[3]+1)
threshhold = arr[4]
numbers_in_both_l1_and_l2 = 0
for i in list1:
if i in list2:
numbers_in_both_l1_and_l2 += 1
if numbers_in_both_l1_and_l2 >= threshhold:
overlaped = "true"
return overlaped
print overlap([4, 10, 2, 5, 3])
print overlap([4, 10, 2, 5, 2])
print overlap([4, 10, 2, 6, 3])
|
15,388 | ba49afe13f8e149ccb95ee15674eaf102ec2b936 | from src.definitions import Set
from .BaseViewerScene import BaseViewerScene
class SendersViewerScene(BaseViewerScene):
def __init__(self, session, redis):
super().__init__()
self.session = session
self.redis = redis
def fetch(self, start, end):
senders = self.redis.zrange(Set.ACTIVE_SENDERS, start, end, withscores=True)
return list(map(lambda t: '"%s" with %i messages' % (t[0], t[1]), senders))
def items_count(self):
return self.redis.zcard(Set.ACTIVE_SENDERS)
|
15,389 | 8628e29a5798e85e9bef24c36071fad47e34fbac | # Generated by Django 3.1.2 on 2020-10-08 19:21
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import provider.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='MovieDetails',
fields=[
('movie_id', models.AutoField(primary_key=True, serialize=False)),
('movie_name', models.CharField(max_length=100)),
('description', models.TextField()),
('language', models.PositiveSmallIntegerField(choices=[(1, 'english'), (2, 'hindi'), (3, 'bengali'), (4, 'kannada'), (5, 'malayalam'), (6, 'marathi'), (7, 'tamil'), (8, 'telugu')])),
('date_of_creation', models.DateTimeField()),
('thumbnail_image', models.ImageField(upload_to=provider.models.movie_thumbnail_directory_path)),
('provider_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'Movie Details',
},
),
migrations.CreateModel(
name='SeriesDetails',
fields=[
('series_id', models.AutoField(primary_key=True, serialize=False)),
('series_name', models.CharField(max_length=100)),
('description', models.TextField()),
('language', models.PositiveSmallIntegerField(choices=[(1, 'english'), (2, 'hindi'), (3, 'bengali'), (4, 'kannada'), (5, 'malayalam'), (6, 'marathi'), (7, 'tamil'), (8, 'telugu')])),
('category', models.PositiveSmallIntegerField(choices=[(1, 'sports'), (2, 'entertainment')])),
('date_of_creation', models.DateTimeField()),
('thumbnail_image', models.ImageField(upload_to=provider.models.series_thumbnail_directory_path)),
('provider_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'Series Details',
},
),
migrations.CreateModel(
name='SeriesSeasonDetails',
fields=[
('series_season_id', models.AutoField(primary_key=True, serialize=False)),
('season_no', models.PositiveSmallIntegerField()),
('description', models.TextField()),
('date_of_creation', models.DateTimeField()),
('thumbnail_image', models.ImageField(upload_to=provider.models.series_season_thumbnail_directory_path)),
('verification_status', models.PositiveSmallIntegerField(choices=[(1, 'pending'), (2, 'verified'), (3, 'rejected'), (4, 'not submitted')])),
('series_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='provider.seriesdetails')),
],
options={
'verbose_name_plural': 'Series Season Details',
},
),
migrations.CreateModel(
name='Videos',
fields=[
('video_id', models.AutoField(primary_key=True, serialize=False)),
('video_type', models.PositiveSmallIntegerField(choices=[(1, 'free'), (2, 'series'), (3, 'movie')])),
],
options={
'verbose_name_plural': 'Videos',
},
),
migrations.CreateModel(
name='SeriesVideosTags',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('episode_no', models.PositiveSmallIntegerField()),
('tag_word', models.CharField(max_length=50)),
('video_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='provider.videos')),
],
options={
'verbose_name_plural': 'Series Videos Tags',
},
),
migrations.CreateModel(
name='SeriesVideos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('video_name', models.CharField(max_length=100)),
('firebase_save_name', models.CharField(max_length=50)),
('firebase_token', models.CharField(max_length=50)),
('description', models.TextField()),
('thumbnail_image', models.ImageField(upload_to=provider.models.video_thumbnail_directory_path)),
('date_of_upload', models.DateTimeField()),
('date_of_release', models.DateTimeField()),
('episode_no', models.PositiveSmallIntegerField()),
('duration_of_video', models.IntegerField()),
('quality_of_video', models.PositiveSmallIntegerField(choices=[(1, '144'), (2, '240'), (3, '360'), (4, '480'), (5, '720'), (6, '1080')])),
('verification_status', models.PositiveSmallIntegerField(choices=[(1, 'pending'), (2, 'verified'), (3, 'rejected'), (4, 'not submitted')])),
('cost_of_video', models.PositiveSmallIntegerField()),
('series_season_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='provider.seriesseasondetails')),
('video_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='provider.videos')),
],
options={
'verbose_name_plural': 'Series Videos',
},
),
migrations.CreateModel(
name='SeriesSubCategories',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sub_category', models.PositiveSmallIntegerField(choices=[(1, 'cricket'), (2, 'football'), (3, 'tennis'), (4, 'martial arts'), (5, 'esports'), (6, 'hockey'), (7, 'badminton'), (8, 'wrestling'), (9, 'kabaddi'), (10, 'table tennis'), (11, 'action'), (12, 'adventure'), (13, 'animation'), (14, 'comedy'), (15, 'crime'), (16, 'drama'), (17, 'horror'), (18, 'romance'), (19, 'thriller')])),
('series_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='provider.seriesdetails')),
],
options={
'verbose_name_plural': 'Series Sub Categories',
},
),
migrations.CreateModel(
name='MovieVideoTags',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tag_word', models.CharField(max_length=50)),
('video_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='provider.videos')),
],
options={
'verbose_name_plural': 'Movie Video Tags',
},
),
migrations.CreateModel(
name='MovieVideo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('video_name', models.CharField(max_length=100)),
('description', models.TextField()),
('firebase_save_name', models.CharField(max_length=50)),
('firebase_token', models.CharField(max_length=50)),
('thumbnail_image', models.ImageField(upload_to=provider.models.video_thumbnail_directory_path)),
('date_of_upload', models.DateTimeField()),
('date_of_release', models.DateTimeField()),
('duration_of_video', models.IntegerField()),
('quality_of_video', models.PositiveSmallIntegerField(choices=[(1, '144'), (2, '240'), (3, '360'), (4, '480'), (5, '720'), (6, '1080')])),
('verification_status', models.PositiveSmallIntegerField(choices=[(1, 'pending'), (2, 'verified'), (3, 'rejected'), (4, 'not submitted')])),
('cost_of_video', models.PositiveSmallIntegerField()),
('movie_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='provider.moviedetails')),
('video_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='provider.videos')),
],
options={
'verbose_name_plural': 'Movie Video',
},
),
migrations.CreateModel(
name='MovieSubCategories',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sub_category', models.PositiveSmallIntegerField(choices=[(1, 'cricket'), (2, 'football'), (3, 'tennis'), (4, 'martial arts'), (5, 'esports'), (6, 'hockey'), (7, 'badminton'), (8, 'wrestling'), (9, 'kabaddi'), (10, 'table tennis'), (11, 'action'), (12, 'adventure'), (13, 'animation'), (14, 'comedy'), (15, 'crime'), (16, 'drama'), (17, 'horror'), (18, 'romance'), (19, 'thriller')])),
('movie_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='provider.moviedetails')),
],
options={
'verbose_name_plural': 'Movie Sub Categories',
},
),
migrations.CreateModel(
name='FreeSeriesVideosTags',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tag_word', models.CharField(max_length=50)),
('video_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='provider.videos')),
],
options={
'verbose_name_plural': 'Free Series Videos Tags',
},
),
migrations.CreateModel(
name='FreeSeriesVideos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('video_name', models.CharField(max_length=100)),
('firebase_save_name', models.CharField(max_length=50)),
('firebase_token', models.CharField(max_length=50)),
('description', models.TextField()),
('thumbnail_image', models.ImageField(upload_to=provider.models.video_thumbnail_directory_path)),
('date_of_upload', models.DateTimeField()),
('date_of_release', models.DateTimeField()),
('duration_of_video', models.IntegerField()),
('quality_of_video', models.PositiveSmallIntegerField(choices=[(1, '144'), (2, '240'), (3, '360'), (4, '480'), (5, '720'), (6, '1080')])),
('verification_status', models.PositiveSmallIntegerField(choices=[(1, 'pending'), (2, 'verified'), (3, 'rejected'), (4, 'not submitted')])),
('series_season_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='provider.seriesseasondetails')),
('video_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='provider.videos')),
],
options={
'verbose_name_plural': 'Free Series Videos',
},
),
migrations.CreateModel(
name='FreeMovieVideoTags',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tag_word', models.CharField(max_length=50)),
('video_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='provider.videos')),
],
options={
'verbose_name_plural': 'Free Movie Video Tags',
},
),
migrations.CreateModel(
name='FreeMovieVideo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('video_name', models.CharField(max_length=100)),
('description', models.TextField()),
('firebase_save_name', models.CharField(max_length=50)),
('firebase_token', models.CharField(max_length=50)),
('thumbnail_image', models.ImageField(upload_to=provider.models.video_thumbnail_directory_path)),
('date_of_upload', models.DateTimeField()),
('date_of_release', models.DateTimeField()),
('duration_of_video', models.IntegerField()),
('quality_of_video', models.PositiveSmallIntegerField(choices=[(1, '144'), (2, '240'), (3, '360'), (4, '480'), (5, '720'), (6, '1080')])),
('verification_status', models.PositiveSmallIntegerField(choices=[(1, 'pending'), (2, 'verified'), (3, 'rejected'), (4, 'not submitted')])),
('movie_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='provider.moviedetails')),
('video_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='provider.videos')),
],
options={
'verbose_name_plural': 'Free Movie Video',
},
),
]
|
15,390 | 77f61da573c98810b5aad69a4258d20a127a18f5 | import enum
class NAME_TYPE(enum.Enum):
UNKNOWN = 0 #(0), -- Name type not known
PRINCIPAL = 1 #(1), -- Just the name of the principal as in
SRV_INST = 2 #(2), -- Service and other unique instance (krbtgt)
SRV_HST = 3 #(3), -- Service with host name as instance
SRV_XHST = 4 # (4), -- Service with host as remaining components
UID = 5 # (5), -- Unique ID
X500_PRINCIPAL = 6 #(6), -- PKINIT
SMTP_NAME = 7 #(7), -- Name in form of SMTP email name
ENTERPRISE_PRINCIPAL = 10 #(10), -- Windows 2000 UPN
WELLKNOWN = 11 #(11), -- Wellknown
ENT_PRINCIPAL_AND_ID = -130 #(-130), -- Windows 2000 UPN and SID
MS_PRINCIPAL = -128 #(-128), -- NT 4 style name
MS_PRINCIPAL_AND_ID = -129 #(-129), -- NT style name and SID
NTLM = -1200 #(-1200) -- NTLM name, realm is domain
class MESSAGE_TYPE(enum.Enum):
KRB_AS_REQ = 10
KRB_AS_REP = 11
KRB_TGS_REQ = 12
KRB_TGS_REP = 13
KRB_AP_REQ = 14
KRB_AP_REP = 15
KRB_SAFE = 20
KRB_PRIV = 21
KRB_CRED = 22
KRB_ERROR = 30
class EncryptionType(enum.Enum):
NULL = 0#
DES_CBC_CRC = 1#
DES_CBC_MD4 = 2#
DES_CBC_MD5 = 3#
DES3_CBC_MD5 = 5#
OLD_DES3_CBC_SHA1 = 7#
SIGN_DSA_GENERATE = 8#
ENCRYPT_RSA_PRIV = 9#
ENCRYPT_RSA_PUB = 10#
DES3_CBC_SHA1 = 16# -- with key derivation
AES128_CTS_HMAC_SHA1_96 = 17#
AES256_CTS_HMAC_SHA1_96 = 18#
ARCFOUR_HMAC_MD5 = 23#
ARCFOUR_HMAC_MD5_56 = 24#
ENCTYPE_PK_CROSS = 48#
ARCFOUR_MD4 = -128#
ARCFOUR_HMAC_OLD = -133#
ARCFOUR_HMAC_OLD_EXP = -135#
DES_CBC_NONE = -0x1000#
DES3_CBC_NONE = -0x1001#
DES_CFB64_NONE = -0x1002#
DES_PCBC_NONE = -0x1003#
DIGEST_MD5_NONE = -0x1004# -- private use, lukeh@padl.com
CRAM_MD5_NONE = -0x1005# -- private use, lukeh@padl.com
class PaDataType(enum.Enum):
NONE = 0#
TGS_REQ = 1#
AP_REQ = 1#
ENC_TIMESTAMP = 2#
PW_SALT = 3#
ENC_UNIX_TIME = 5#
SANDIA_SECUREID = 6#
SESAME = 7#
OSF_DCE = 8#
CYBERSAFE_SECUREID = 9#
AFS3_SALT = 10#
ETYPE_INFO = 11#
SAM_CHALLENGE = 12# __ = sam/otp)
SAM_RESPONSE = 13# __ = sam/otp)
PK_AS_REQ_19 = 14# __ = PKINIT_19)
PK_AS_REP_19 = 15# __ = PKINIT_19)
PK_AS_REQ_WIN = 15# __ = PKINIT _ old number)
PK_AS_REQ = 16# __ = PKINIT_25)
PK_AS_REP = 17# __ = PKINIT_25)
PA_PK_OCSP_RESPONSE = 18#
ETYPE_INFO2 = 19#
USE_SPECIFIED_KVNO = 20#
SVR_REFERRAL_INFO = 20# ___ old ms referral number
SAM_REDIRECT = 21# __ = sam/otp)
GET_FROM_TYPED_DATA = 22#
SAM_ETYPE_INFO = 23#
SERVER_REFERRAL = 25#
ALT_PRINC = 24# __ = crawdad@fnal.gov)
SAM_CHALLENGE2 = 30# __ = kenh@pobox.com)
SAM_RESPONSE2 = 31# __ = kenh@pobox.com)
PA_EXTRA_TGT = 41# __ Reserved extra TGT
TD_KRB_PRINCIPAL = 102# __ PrincipalName
PK_TD_TRUSTED_CERTIFIERS = 104# __ PKINIT
PK_TD_CERTIFICATE_INDEX = 105# __ PKINIT
TD_APP_DEFINED_ERROR = 106# __ application specific
TD_REQ_NONCE = 107# __ INTEGER
TD_REQ_SEQ = 108# __ INTEGER
TD_DH_PARAMETERS = 109 #__ PKINIT
PA_PAC_REQUEST = 128# __ jbrezak@exchange.microsoft.com
FOR_USER = 129# __ MS_KILE
FOR_X509_USER = 130# __ MS_KILE
FOR_CHECK_DUPS = 131# __ MS_KILE
AS_CHECKSUM = 132# __ MS_KILE
PK_AS_09_BINDING = 132# __ client send this to __ tell KDC that is supports __ the asCheckSum in the __ PK_AS_REP
CLIENT_CANONICALIZED = 133# __ referals
FX_COOKIE = 133# __ krb_wg_preauth_framework
AUTHENTICATION_SET = 134# __ krb_wg_preauth_framework
AUTH_SET_SELECTED = 135# __ krb_wg_preauth_framework
FX_FAST = 136# __ krb_wg_preauth_framework
FX_ERROR = 137# __ krb_wg_preauth_framework
ENCRYPTED_CHALLENGE = 138# __ krb_wg_preauth_framework
OTP_CHALLENGE = 141# __ = gareth.richards@rsa.com)
OTP_REQUEST = 142# __ = gareth.richards@rsa.com)
OTP_CONFIRM = 143# __ = gareth.richards@rsa.com)
OTP_PIN_CHANGE = 144# __ = gareth.richards@rsa.com)
EPAK_AS_REQ = 145#
EPAK_AS_REP = 146#
PKINIT_KX = 147# __ krb_wg_anon
PKU2U_NAME = 148# __ zhu_pku2u
REQ_ENC_PA_REP = 149# __
SPAKE = 151# __https://datatracker.ietf.org/doc/draft-ietf-kitten-krb-spake-preauth/?include_text=1
SUPPORTED_ETYPES = 165 #) __ MS_KILE
# Full list of key_usage numbers: https://tools.ietf.org/html/rfc4120#section-7.5.1
#
class KEY_USAGE(enum.Enum):
AS_REQ_PA_ENC_TS = 1
KDC_REP_TICKET = 2
AS_REP_ENCPART = 3
TGS_REQ_AD_SESSKEY = 4
TGS_REQ_AD_SUBKEY = 5
TGS_REQ_AUTH_CKSUM = 6
TGS_REQ_AUTH = 7
TGS_REP_ENCPART_SESSKEY = 8
TGS_REP_ENCPART_SUBKEY = 9
AP_REQ_AUTH_CKSUM = 10
AP_REQ_AUTH = 11
AP_REP_ENCPART = 12
KRB_PRIV_ENCPART = 13
KRB_CRED_ENCPART = 14
KRB_SAFE_CKSUM = 15
APP_DATA_ENCRYPT = 16
APP_DATA_CKSUM = 17
KRB_ERROR_CKSUM = 18
AD_KDCISSUED_CKSUM = 19
AD_MTE = 20
AD_ITE = 21
GSS_TOK_MIC = 22
GSS_TOK_WRAP_INTEG = 23
GSS_TOK_WRAP_PRIV = 24
PA_SAM_CHALLENGE_CKSUM = 25
#PA_SAM_CHALLENGE_TRACKID 26 #/** Note conflict with @ref KRB5_KEYUSAGE_PA_S4U_X509_USER_REQUEST */
#PA_SAM_RESPONSE 27 #/** Note conflict with @ref KRB5_KEYUSAGE_PA_S4U_X509_USER_REPLY */
PA_S4U_X509_USER_REQUEST = 26 #/* Defined in [MS-SFU] *//** Note conflict with @ref KRB5_KEYUSAGE_PA_SAM_CHALLENGE_TRACKID */
PA_S4U_X509_USER_REPLY = 27 #/** Note conflict with @ref KRB5_KEYUSAGE_PA_SAM_RESPONSE */
AD_SIGNEDPATH = -21
IAKERB_FINISHED = 42
PA_PKINIT_KX = 44
PA_OTP_REQUEST = 45
FAST_REQ_CHKSUM = 50
FAST_ENC = 51
FAST_REP = 52
FAST_FINISHED = 53
ENC_CHALLENGE_CLIENT = 54
ENC_CHALLENGE_KDC = 55
AS_REQ = 56
CAMMAC = 64
SPAKE = 65 |
15,391 | 64f37f99424d3006b292cf7db7146d1cfcccebf9 | def chek(k,li):
global turn,result
mid = (len(li)-1)//2
if li[mid] == k:
result += 1
return
else:
left = li[:mid]
right = li[mid+1:]
if k < li[mid]:
turn += 1
chek(k,left)
elif k > li[mid]:
turn -= 1
chek(k,right)
T = int(input())
for tc in range(1,T+1):
N,M = map(int,input().split())
A = sorted(list(map(int,input().split())))
B = list(map(int,input().split()))
result = 0
num = []
flag = True
for i in A:
if i in B:
num += [i]
if len(num) == 0:
flag = False
if flag:
for z in num:
turn = 0
chek(z,A)
if turn > 1 or turn < -1:
result -= 1
print('#{} {}'.format(tc,result)) |
15,392 | 646afc9465e77a32ba248f84e76476df4345061c | import pygame
import math
#Display
pygame.init()
WIDTH,HEIGHT = 800,500
win = pygame.display.set_mode((WIDTH,HEIGHT))
pygame.display.set_caption("Hangman")
#adding the images
images = []
for i in range(7):
image = pygame.image.load("hangman"+ str(i)+".png")
images.append(image)
#button letters location
RADIUS = 20
GAP = 15
startx = round((WIDTH -(RADIUS*2 +GAP)*13)/2)
starty = 400
letters = []
ascii_letter = 65
for i in range(26):
x = startx + GAP*2 + ((RADIUS*2 + GAP)*(i%13))
y = starty +((i//13)*(GAP+RADIUS*2))
letters.append([x,y,chr(ascii_letter+i),True])
letters.append([WIDTH*4//5,HEIGHT*3//5,"ENTER",True])
#font
LETTER_FONT = pygame.font.SysFont("comicsans",40)
WORD_FONT = pygame.font.SysFont("comicsans",60)
TITLE_FONT = pygame.font.SysFont("comicsans",80)
#variables
hangman_status = 0
word = ""
word = word.upper()
guessed = []
FPS = 60
clock = pygame.time.Clock()
#colors
WHITE= (255,255,255)
BLACK = (0,0,0)
#draw when guessing
def drawGame():
win.fill(WHITE)
# draw title
text = TITLE_FONT.render("HANGMAN",1,BLACK)
win.blit(text,((WIDTH//2 - text.get_width()//2),20))
# draw word
display_word = ""
for letter in word:
if letter in guessed:
display_word += letter + " "
else:
display_word += "_ "
text = WORD_FONT.render(display_word,1,BLACK)
win.blit(text, (400,200))
#draw letters
for letter in letters:
x,y,ltr,visible = letter
if visible:
pygame.draw.circle(win,BLACK,(x,y),RADIUS,3)
text = LETTER_FONT.render(ltr, 1 ,BLACK)
win.blit(text,(x-(text.get_width()//2),y - (text.get_height()//2)))
win.blit(images[hangman_status],(100,100))
pygame.display.update()
#Function to display final message on screen
def display_message(msg):
pygame.time.delay(500)
win.fill(WHITE)
text = WORD_FONT.render(msg,1,BLACK)
win.blit(text, (WIDTH//2-(text.get_width()//2),HEIGHT//2 - (text.get_height()//2)))
pygame.display.update()
pygame.time.delay(3000)
#drawing home page
def drawHome():
win.fill(WHITE)
# draw title
text = TITLE_FONT.render("HANGMAN",1,BLACK)
win.blit(text,((WIDTH//2 - text.get_width()//2),20))
# draw word
text = WORD_FONT.render(word,1,BLACK)
win.blit(text, (400,200))
#draw letters
for letter in letters:
x,y,ltr,visible = letter
if ltr == "ENTER":
pygame.draw.rect(win,BLACK,(x-10,y-10,120,50))
text = LETTER_FONT.render(ltr,1,WHITE)
win.blit(text,(x,y))
else:
pygame.draw.circle(win,BLACK,(x,y),RADIUS,3)
text = LETTER_FONT.render(ltr, 1 ,BLACK)
win.blit(text,(x-(text.get_width()//2),y - (text.get_height()//2)))
win.blit(images[hangman_status],(100,100))
pygame.display.update()
def resetGame():
global hasWord,word,hangman_status
hasWord = False
word = ""
for letter in letters:
letter[3] = True
hangman_status = 0
guessed.clear()
#Game Loop
hasWord = False
run = True
while(run):
clock.tick(FPS)
if hasWord:
for event in pygame.event.get():
if event.type == pygame.QUIT:
run = False
if event.type == pygame.MOUSEBUTTONDOWN:
m_x, m_y = pygame.mouse.get_pos()
# checkButtonClicked(m_x,m_y,hangman_status)
for letter in letters:
x,y,ltr,visible = letter
if visible:
dis = math.sqrt((m_x - x)**2 + (m_y - y)**2)
if dis<RADIUS:
letter[3] = False
guessed.append(ltr)
if ltr not in word:
hangman_status+=1
drawGame()
won = True
for letter in word:
if letter not in guessed:
won = False
break
if won:
display_message("You won!")
resetGame()
if hangman_status == 6:
display_message("You lost! The word was "+ word)
resetGame()
# checking if button clicked for homepage
else:
for event in pygame.event.get():
if event.type == pygame.QUIT:
run = False
if event.type == pygame.MOUSEBUTTONDOWN:
m_x, m_y = pygame.mouse.get_pos()
for letter in letters:
x,y,ltr,visible = letter
if visible:
if ltr=="ENTER" and ((x-10)<=m_x<= (x+110) and (y-10<=m_y<=y+40)):
hasWord = True
letter[3] = False
else:
dis = math.sqrt((m_x - x)**2 + (m_y - y)**2)
if dis<RADIUS:
word += ltr
drawHome()
pygame.quit() |
15,393 | 3a59d631b3c3d35e08659929bbdcbde424ee02b5 | import bpy
import logging
import os
import json
from bpy_extras.io_utils import ExportHelper, ImportHelper
from bpy.app.handlers import persistent
import traceback
import time
import ctypes
import sys
import platform
import random
import math
import subprocess
import datetime
from bpy.props import EnumProperty, StringProperty, BoolVectorProperty
from . import facs_process as facs
logger = logging.getLogger(__name__)
# Global sliders
global_sliders_set = False
global_sliders = {}
init_state = False
plot_all = False
def set_rotation_type(rtype):
rotation_types = ('BOUNDING_BOX_CENTER', 'CURSOR', 'INDIVIDUAL_ORIGINS', 'MEDIAN_POINT', 'ACTIVE_ELEMENT')
if not rtype in rotation_types:
raise RuntimeError(rtype, 'not a valid rotation type. Should be: ', rotation_types)
bpy.context.scene.tool_settings.transform_pivot_point = rtype
def get_rotation_type():
return bpy.context.scene.tool_settings.transform_pivot_point
def get_override(area_type, region_type):
for area in bpy.context.screen.areas:
if area.type == area_type:
for region in area.regions:
if region.type == region_type:
override = {'area': area, 'region': region}
return override
#error message if the area or region wasn't found
raise RuntimeError("Wasn't able to find", region_type," in area ", area_type,
"\n Make sure it's open while executing script.")
def rotate_obj_quaternion(obj, axis='Z', value=0.0):
bpy.context.scene.cursor.location = (0,0,0)
if not axis in ['X', 'Y', 'Z']:
return
for o in bpy.data.objects:
o.select_set(False)
obj.select_set(True)
orig_rt = get_rotation_type()
set_rotation_type('CURSOR')
override = get_override('VIEW_3D', 'WINDOW')
obj.rotation_mode = 'QUATERNION'
bpy.ops.transform.rotate(override, value=value, orient_axis=axis,
orient_type='CURSOR')
set_rotation_type(orig_rt)
def set_init_state(state):
global init_state
init_state = state
if init_state:
facs.init_database()
class FACE_OT_clear_animation(bpy.types.Operator):
bl_idname = "yafr.del_animation"
bl_label = "Delete Animation"
bl_description = "Clear Facial Animation"
def execute(self, context):
global global_sliders_set
global global_sliders
global init_state
for obj in bpy.data.objects:
if 'facs_rig_slider_' in obj.name:
obj.animation_data_clear()
obj = get_mb_rig()
if obj:
obj.animation_data_clear()
global_sliders_set = False
global_sliders = {}
facs.reset_database()
return {'FINISHED'}
def get_mb_rig():
rig_names = ['MBLab_skeleton_muscle_ik', 'MBLab_skeleton_base_ik', 'MBLab_skeleton_muscle_fk', 'MBLab_skeleton_base_fk']
for obj in bpy.data.objects:
if obj.type == 'ARMATURE' and obj.data.name in rig_names:
return obj
return None
def process_csv_file(csv, ws, po):
try:
js = facs.process_openface_csv(csv, ws, po)
except Exception as e:
msg = 'failed to process results\n'+traceback.format_exc()
logger.critical(msg)
return False, msg
if not js:
msg = 'Failed to process results'
return False, msg
return True, 'Success'
class FACE_OT_animate(bpy.types.Operator):
bl_idname = "yafr.animate_face"
bl_label = "Animate Face"
bl_description = "Create Facial Animation"
def run_openface(self, openface, video):
outdir = os.path.join(os.path.dirname(openface),
os.path.splitext(os.path.basename(video))[0]+"_processed")
if not os.path.exists(outdir):
os.makedirs(outdir)
rc = subprocess.run([openface, '-verbose', '-f', video, '-out_dir', outdir],
stdout=subprocess.PIPE)
return rc.returncode, rc.stdout.decode(), outdir
def set_keyframes_hr(self, result, array, attr, head_bone, intensity):
rotation = {'Rx': 1, 'Ry': 2, 'Rz': 3}
for m in array:
# angle in radians
val = result[m] + (result[m] * intensity)
head_bone.rotation_quaternion[rotation[attr]] = val
head_bone.keyframe_insert('rotation_quaternion', index=rotation[attr], frame=m)
def get_head_bone(self, mb_rig):
for obj in bpy.data.objects:
obj.select_set(False)
mb_rig.select_set(True)
bpy.context.view_layer.objects.active = mb_rig
bpy.ops.object.mode_set(mode='POSE')
head_bone = None
msg = "Success"
try:
head_bone = bpy.context.object.pose.bones['head']
except:
msg = "no head bone found"
return None, msg
return head_bone, msg
def set_keyframes(self, result, array, slider_bone, intensity, vgi, hgi):
global global_sliders
if bpy.context.scene.yafr_start_frame > 0:
frame_offset = bpy.context.scene.yafr_start_frame - 1
else:
frame_offset = 0
for m in array:
if not 'GZ' in slider_bone.name:
value = (result[m] / 5) * 0.377
else:
# normalize the gaze values to fit in the -0.189 - 0.189
# range of the gaze slider
# TODO: if we're going to fit that with other rig systems
# we need to be a bit smarter than this.
value = result[m]
if value > 0:
value = min(value, 1)
value = value * 0.189
elif value < 0:
value = max(value, -1)
value = value * 0.189
gaze_intensity = hgi
# intensify the gaze motion independently
if 'GZ0V' in slider_bone.name:
# reverse sign to get the correct up/down motion
#value = -value
gaze_intensity = vgi
if value > 0:
value = value + (value * gaze_intensity)
elif value < 0:
value = value - ((value * -1) * gaze_intensity)
if intensity > 0 and not 'GZ' in slider_bone.name:
# don't accept negative values
if value < 0:
value = -value
value = value + (value * intensity)
slider_bone.location[0] = value
slider_bone.keyframe_insert(data_path="location",
frame=m+frame_offset, index=0)
global_sliders[slider_bone.name].append(m)
def set_every_keyframe(self, result, slider_bone, intensity):
frame = 1
for m in result:
if not 'GZ' in slider_bone.name:
value = (m / 5) * 0.377
else:
value = m
if intensity > 0:
value = value + (value * (intensity/100))
slider_bone.location[0] = value
if not 'GZ' in slider_bone.name:
slider_bone.keyframe_insert(data_path="location", frame=frame, index=0)
frame = frame + 1
def set_animation_prereq(self, scn):
if not scn.yafr_facs_rig:
facs_rig = bpy.data.objects.get('MBLab_skeleton_facs_rig')
else:
facs_rig = bpy.data.objects.get(scn.yafr_facs_rig)
if not facs_rig:
return False
# select the rig and put it in POSE mode
for obj in bpy.data.objects:
obj.select_set(False)
facs_rig.select_set(True)
bpy.context.view_layer.objects.active = facs_rig
bpy.ops.object.mode_set(mode='POSE')
return True
def animate_face(self, mouth, head, animation_data, intensity, vgi, hgi):
global global_sliders_set
if not self.set_animation_prereq(bpy.context.scene):
print("Animation prerequisites not set")
return
# animation already done
if global_sliders_set:
print("Animation already set. Delete animation first")
return
mouth_aus = ['AU10', 'AU12', 'AU13', 'AU14', 'AU15', 'AU16', 'AU17', 'AU20', 'AU23']
for key, value in animation_data.items():
# don't use specific AUs if mouth is not selected
if key.strip('_r') in mouth_aus and not mouth:
continue
if 'pose_' in key and not head:
continue
slider_name = ''
head_animation = False
if 'AU' in key:
slider_name = 'facs_rig_slider_' + key.strip('_r')
elif key == 'gaze_angle_x':
slider_name = 'facs_rig_slider_GZ0H'
elif key == 'gaze_angle_y':
slider_name = 'facs_rig_slider_GZ0V'
elif 'pose_R' in key:
# only look at the head rotation for now
head_animation = True
else:
continue
result = value[facs.VALUES]
maximas = value[facs.MAXIMAS]
minimas = value[facs.MINIMAS]
slider_bone = None
mb_rig = None
if head_animation:
mb_rig = get_mb_rig()
if not mb_rig:
msg = "no MB rig found"
logger.critical(msg)
self.report({'ERROR'}, msg)
return
head_bone, msg = self.get_head_bone(mb_rig)
if not head_bone:
logger.critical(msg)
self.report({'ERROR'}, msg)
return
self.set_keyframes_hr(result, maximas, key.strip('pose_'), head_bone, intensity)
self.set_keyframes_hr(result, minimas, key.strip('pose_'), head_bone, intensity)
else:
global_sliders[slider_name] = []
slider_bone = bpy.context.object.pose.bones.get(slider_name)
if not slider_bone:
logger.critical('slider %s not found', slider_name)
continue
self.set_keyframes(result, maximas, slider_bone, intensity, vgi, hgi)
self.set_keyframes(result, minimas, slider_bone, intensity, vgi, hgi)
#self.set_every_keyframe(result, slider_bone, intensity, vgi, hgi)
global_sliders_set = True
def execute(self, context):
global global_sliders_set
set_init_state(False)
scn = context.scene
dirname = os.path.dirname(os.path.realpath(__file__))
openface = os.path.join(dirname, "openface", "FeatureExtraction")
video = scn.yafr_videofile
csv = scn.yafr_csvfile
ws = scn.yafr_openface_ws
po = scn.yafr_openface_polyorder
intensity = scn.yafr_openface_au_intensity
hgi = scn.yafr_openface_hgaze_intensity
vgi = scn.yafr_openface_vgaze_intensity
mouth = scn.yafr_openface_mouth
head = scn.yafr_openface_head
if global_sliders_set:
self.report({'ERROR'}, "Delete current animation first")
return {'FINISHED'}
if po >= ws:
msg = "polyorder must be less than window_length."
logger.critical(msg)
self.report({'ERROR'}, msg)
return {'FINISHED'}
if ws % 2 == 0:
msg = "window size needs to be an odd number"
logger.critical(msg)
self.report({'ERROR'}, msg)
return {'FINISHED'}
# csv file provided use that instead of the video file
if csv:
if not os.path.isfile(csv):
if not os.path.isfile(dirname+csv):
msg = "bad csv file provided "+csv
logger.critical(msg)
self.report({'ERROR'}, msg)
return {'FINISHED'}
else:
csv = dirname+csv
rc, msg = process_csv_file(csv, ws, po)
# animate the data
if rc:
facs_data = facs.get_facs_data()
self.animate_face(mouth, head, facs_data, intensity, vgi, hgi)
return {'FINISHED'}
self.report({'ERROR'}, msg)
return {'FINISHED'}
# run openface on the videofile
# TODO: check if openface is an executable and videofile is a video
# file.
if not os.path.isfile(openface):
if not os.path.isfile(dirname+openface):
msg = "Bad path to openFace: " + openface
self.report({'ERROR'}, msg)
return {'FINISHED'}
else:
openface = dirname+openface
if not os.path.isfile(video):
# try another tac
if not os.path.isfile(dirname+video):
msg = "Bad path to video file: " + video
self.report({'ERROR'}, 'Bad path to video file')
return {'FINISHED'}
else:
video = dirname+video
outdir = ''
try:
rc, output, outdir = self.run_openface(openface, video)
if rc:
self.report({'ERROR'}, ouput)
return {'FINISHED'}
except Exception as e:
logger.critical(e)
msg = 'failed to run openface\n'+traceback.format_exc()
self.report({'ERROR'}, msg)
return {'FINISHED'}
# process the csv file
csv = os.path.join(outdir,
os.path.splitext(os.path.basename(video))[0]+'.csv')
if not os.path.isfile(csv):
msg = "Failed to process video. No csv file found: "+csv
self.report({'ERROR'}, msg)
return {'FINISHED'}
# animate the data
rc, msg = process_csv_file(csv, ws, po)
# animate the data
if rc:
facs_data = facs.get_facs_data()
self.animate_face(mouth, head, facs_data, intensity, vgi, hgi)
#frame_end = facs_data['frame'][facs.VALUES][-1]
#bpy.context.scene.frame_end = frame_end
return {'FINISHED'}
self.report({'ERROR'}, msg)
return {'FINISHED'}
class FACE_OT_pdm_del_animate(bpy.types.Operator):
bl_idname = "yafr.del_pdm_animation"
bl_label = "Delete"
bl_description = "Experimental feature"
def execute(self, context):
for obj in bpy.data.objects:
obj.select_set(False)
for obj in bpy.data.objects:
if 'pdm2d_' in obj.name or 'pdm3d_' in obj.name:
obj.animation_data_clear()
bpy.context.view_layer.objects.active = obj
obj.select_set(True)
bpy.ops.object.delete(use_global=True)
return {'FINISHED'}
class FACE_OT_pdm3d_rm_rotation(bpy.types.Operator):
bl_idname = "yafr.rm_pdm3d_rotation"
bl_label = "Remove Rotation"
bl_description = "Experimental feature"
def rotate_obj(self, obj, rx, ry, rz):
if len(rx) != len(ry) or len(rx) != len(rz):
self.report({'ERROR'}, "bad rotation information")
return
for f in range(0, len(rx)):
rotate_obj_quaternion(obj, 'X', rx[f])
rotate_obj_quaternion(obj, 'Y', ry[f])
rotate_obj_quaternion(obj, 'Z', rz[f])
obj.keyframe_insert(data_path="location", frame=f)
#obj.keyframe_insert(data_path="rotation_quaternion", frame=f)
#obj.rotation_mode = 'QUATERNION'
#obj.rotation_quaternion[1] = rx[f]
#obj.rotation_quaternion[2] = ry[f] * -1
#obj.rotation_quaternion[3] = rz[f] * -1
#obj.keyframe_insert(data_path="rotation_quaternion", frame=f, index=1)
#obj.keyframe_insert(data_path="rotation_quaternion", frame=f, index=2)
#obj.keyframe_insert(data_path="rotation_quaternion", frame=f, index=3)
def execute(self, context):
data = facs.get_facs_data()
rx = data['pose_Rx'][facs.VALUES]
ry = data['pose_Ry'][facs.VALUES]
rz = data['pose_Rz'][facs.VALUES]
# Store the current location of the object for this frame.
# find out the 3D location of the object after applying the
# rotation.
# The delta between the current location and the rotated location
# is eliminated by subtracting the X,Y,Z locations.
for obj in bpy.data.objects:
if not 'amir' in obj.name:
#if not 'pdm3d_' in obj.name:
continue
logger.critical("Rotating object %s: %s", obj.name,
str(datetime.datetime.now()))
self.rotate_obj(obj, rx, ry, rz)
return {'FINISHED'}
class FACE_OT_pdm2d_animate(bpy.types.Operator):
bl_idname = "yafr.animate_pdm2d_face"
bl_label = "Plot"
bl_description = "Experimental feature"
def plot_axis(self, obj, axis, result, array, adj=[], div=400):
if plot_all:
f = 0
values = []
for p in result:
# the adjustment array brings the points to the center
# point.
if len(adj) == len(result):
av = adj[f]
value = (p - av) / div
else:
value = p / div
if axis == 1 or axis == 2:
value = value * -1
obj.location[axis] = value
obj.keyframe_insert(data_path="location", frame=f, index=axis)
values.append(value)
f = f+1
return values
for m in array:
value = result[m] / div
if axis == 1 or axis == 2:
value = value * -1
obj.location[axis] = value
obj.keyframe_insert(data_path="location", frame=m, index=axis)
def animate_2d_empty(self, obj, attr, pdm_2d, rigid_data):
y_name = 'y_'+attr.strip('x_')
x_info = pdm_2d[attr]
y_info = pdm_2d[y_name]
p_tx = rigid_data['p_tx'][facs.VALUES]
p_ty = rigid_data['p_ty'][facs.VALUES]
x_values = self.plot_axis(obj, 0, x_info[facs.VALUES],
x_info[facs.MAXIMAS], adj=p_tx)
if not plot_all:
self.plot_axis(obj, 0, x_info[facs.VALUES],
x_info[facs.MINIMAS], adj=p_tx)
y_values = self.plot_axis(obj, 1, y_info[facs.VALUES],
y_info[facs.MAXIMAS], adj=p_ty)
if not plot_all:
self.plot_axis(obj, 1, y_info[facs.VALUES],
y_info[facs.MINIMAS], adj=p_ty)
return x_values, y_values
def animate_3d_empty(self, obj, attr, pdm_3d, head_pose):
y_name = 'Y_'+attr.strip('X_')
z_name = 'Z_'+attr.strip('X_')
div = 40
x_info = pdm_3d[attr]
y_info = pdm_3d[y_name]
z_info = pdm_3d[z_name]
tx_adj = head_pose['pose_Tx'][facs.VALUES]
ty_adj = head_pose['pose_Ty'][facs.VALUES]
tz_adj = head_pose['pose_Tz'][facs.VALUES]
self.plot_axis(obj, 0, x_info[facs.VALUES],
x_info[facs.MAXIMAS], adj=tx_adj, div=div)
if not plot_all:
self.plot_axis(obj, 0, x_info[facs.VALUES],
x_info[facs.MINIMAS], adj=tx_adj, div=div)
self.plot_axis(obj, 1, y_info[facs.VALUES],
x_info[facs.MAXIMAS], adj=ty_adj, div=div)
if not plot_all:
self.plot_axis(obj, 1, y_info[facs.VALUES],
x_info[facs.MINIMAS], adj=ty_adj, div=div)
self.plot_axis(obj, 2, z_info[facs.VALUES],
x_info[facs.MAXIMAS], adj=tz_adj, div=div)
if not plot_all:
self.plot_axis(obj, 2, z_info[facs.VALUES],
x_info[facs.MINIMAS], adj=tz_adj, div=div)
def delta(self, pp, startpoint, endpoints):
sp = pp[startpoint]
num_frames = len(sp[0])
resutl = []
for i in range(0, len(num_frames)):
a = np.array(sp[0][i], sp[1][i])
eps = []
for e in endpoints:
eps.append(pp[e][0][i] + pp[e][1][i])
b = np.array(eps)
dist = scipy.spatial.distance.cdist(a,b)
flat_dist = [item for sublist in dist.tolist() for item in sublist]
avg = sum(flat_dist) / len(flat_dist)
result.append(avg)
return result
def animate_pdm2d(self, pdm_2d, rigid_data):
# create all the empties
pp = {}
for k, v in pdm_2d.items():
if 'y_' in k or 'frame' in k or 'timestamp' in k:
continue
bpy.ops.object.empty_add(type='SPHERE', radius=0.01)
empty = bpy.context.view_layer.objects.active
entry = k.strip('x_')
empty.name = 'pdm2d_'+k.strip('x_')
# animate each empty
logger.critical('Start plotting %s: %s', k,
str(datetime.datetime.now()))
x_values, y_values = self.animate_2d_empty(empty, k, pdm_2d, rigid_data)
# post process the values
if len(x_values) != len(y_values):
print("Unexpected array lengths")
continue
if not plot_all:
continue
if int(entry) in [51, 62, 57, 66, 54, 12, 48, 4, 54, 11, 5, 57, 8, 53, 29, 49, 55, 9, 59, 7] + \
list(range(13, 15)) + list(range(3,1)):
pp[int(entry)] = [x_values, y_values]
if not plot_all:
return
# keeping deltas on the following points
# The array is indexed by frames
# upper lip roll: 51-62
# lower lip roll: 57-66
# left lip side: 54-12
# Right lip side: 48-4
# left lip up: 54-[13-15]
# right lip up: 48-[3-1]
# Left lip down: 54-11
# Right lip down: 48-5
# Chin: 57-8
# left upper lip curl: 53-29
# right upper lip curl: 49-29
# left lower lip curl: 55-9
# right lower lip curl: 59-7
deltas = [{'upper-lip-roll': self.delta(pp, 51, [62]), 'lower-lip-roll': self.delta(pp, 57, [66]),
'left-lip-side': self.delta(pp, 54, [12]), 'right-lip-side': self.delta(pp, 48, [4]),
'left-lip-up': self.delta(pp, 54, list(range(13-16))), 'right-lip-up': self.delta(pp, 48, list(range(1, 4))),
'left-lip-down': self.delta(pp, 48, [5]), 'right-lip-down': self.delta(pp, 48, [5]),
'chin': self.delta(pp, 57, [8]), 'left-upper-lip-curl': self.delta(pp, 53, [29]),
'right-upper-lip-curl': self.delta(pp, 49, [29]),
'left-lower-lip-curl': self.delta(pp, 55, [9]),
'right-lower-lip-curl': self.delta(pp, 59, [7])}]
print(deltas)
# The idea now is that we can use that delta in comparison with
# the basis delta we collected to calculate the percentage of
def animate_pdm3d(self, pdm_3d, head_pose):
# create all the empties
for k, v in pdm_3d.items():
if 'Y_' in k or 'Z_' in k or \
'frame' in k or 'timestamp' in k:
continue
bpy.ops.object.empty_add(type='SPHERE', radius=0.05)
empty = bpy.context.view_layer.objects.active
empty.name = 'pdm3d_'+k.strip('X_')
# animate each empty
logger.critical('Start plotting %s: %s', k,
str(datetime.datetime.now()))
self.animate_3d_empty(empty, k, pdm_3d, head_pose)
#logger.critical('Finished plotting %s: %s', k,
# str(datetime.datetime.now()))
def execute(self, context):
global plot_all
scn = context.scene
dirname = os.path.dirname(os.path.realpath(__file__))
csv = scn.yafr_csvfile
ws = scn.yafr_openface_ws
po = scn.yafr_openface_polyorder
two_d = scn.yafr_pdm_2d
plot_all = scn.yafr_pdm_plot_all
set_init_state(False)
if po >= ws:
msg = "polyorder must be less than window_length."
logger.critical(msg)
self.report({'ERROR'}, msg)
return {'FINISHED'}
if ws % 2 == 0:
msg = "window size needs to be an odd number"
logger.critical(msg)
self.report({'ERROR'}, msg)
return {'FINISHED'}
if not csv:
self.report({'ERROR'}, 'No CSV file specified')
return {'FINISHED'}
if not os.path.isfile(csv):
if not os.path.isfile(dirname+csv):
msg = "bad csv file provided "+csv
logger.critical(msg)
self.report({'ERROR'}, msg)
return {'FINISHED'}
else:
csv = dirname+csv
# reset and reload the data base
facs.reset_database()
logger.critical('Start processing CSV: %s',
str(datetime.datetime.now()))
rc, msg = process_csv_file(csv, ws, po)
logger.critical('Finished processing CSV: %s',
str(datetime.datetime.now()))
# animate the data
if not rc:
self.report({'ERROR'}, msg)
return {'FINISHED'}
if two_d:
logger.critical('Start plotting 2D: %s',
str(datetime.datetime.now()))
pdm2d_data = facs.get_pdm2d_data()
rigid_data = facs.get_rigid_data()
self.animate_pdm2d(pdm2d_data, rigid_data)
logger.critical('Finished plotting 2D: %s',
str(datetime.datetime.now()))
else:
logger.critical('Start plotting 3D: %s',
str(datetime.datetime.now()))
pdm3d_data = facs.get_pdm3d_data()
head_pose = facs.get_facs_data()
self.animate_pdm3d(pdm3d_data, head_pose)
logger.critical('Finished plotting 3D: %s',
str(datetime.datetime.now()))
if two_d:
frame_end = pdm2d_data['frame'][facs.VALUES][-1]
else:
frame_end = pdm3d_data['frame'][facs.VALUES][-1]
bpy.context.scene.frame_end = frame_end
return {'FINISHED'}
class VIEW3D_PT_tools_openface(bpy.types.Panel):
bl_label = "Open Face"
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
bl_category = "YAFR"
#bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
scn = context.scene
layout = self.layout
wm = context.window_manager
col = layout.column(align=True)
col.label(text="FACS Rig Name")
col.prop(scn, "yafr_facs_rig", text='')
col.label(text="FACS CSV file")
col.prop(scn, "yafr_csvfile", text='')
col.label(text="Video file")
col.prop(scn, "yafr_videofile", text='')
col.label(text="Animation Start Frame")
col.prop(scn, "yafr_start_frame", text='')
col.label(text="Smoothing Window Size")
col.prop(scn, "yafr_openface_ws", text='')
col.label(text="Polynomial Order")
col.prop(scn, "yafr_openface_polyorder", text='')
col.label(text="Animation Intensity")
col.prop(scn, "yafr_openface_au_intensity", text='')
col.label(text="Vertical Gaze Intensity")
col.prop(scn, "yafr_openface_vgaze_intensity", text='')
col.label(text="Horizontal Gaze Intensity")
col.prop(scn, "yafr_openface_hgaze_intensity", text='')
col.prop(scn, "yafr_openface_mouth", text='Mouth Animation')
col.prop(scn, "yafr_openface_head", text='Head Animation')
col = layout.column(align=False)
col.operator('yafr.animate_face', icon='ANIM_DATA')
col = layout.column(align=False)
col.operator('yafr.del_animation', icon='DECORATE_ANIMATE')
class VIEW3D_PT_pdm2d_openface(bpy.types.Panel):
bl_label = "PDM Experimental"
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
bl_category = "YAFR"
#bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
scn = context.scene
layout = self.layout
wm = context.window_manager
col = layout.column(align=True)
col.label(text="Experimental")
col.prop(scn, "yafr_pdm_2d", text='2D Plotting')
col.prop(scn, "yafr_pdm_plot_all", text='Plot All')
col.operator('yafr.animate_pdm2d_face', icon='ANIM_DATA')
col = layout.column(align=False)
col.operator('yafr.rm_pdm3d_rotation', icon='ANIM_DATA')
col = layout.column(align=False)
col.operator('yafr.del_pdm_animation', icon='DECORATE_ANIMATE')
|
15,394 | 00a15e283f26d6c724d280d0e37a2d906f14f3f5 | SCALE_FACTOR = 0.1
FEATHER_AMOUNT = 11
COLOUR_CORRECT_BLUR_FRAC = 0.6
FACE_POINTS = list(range(17, 68))
MOUTH_POINTS = list(range(48, 61))
RIGHT_BROW_POINTS = list(range(17, 22))
LEFT_BROW_POINTS = list(range(22, 27))
RIGHT_EYE_POINTS = list(range(36, 42))
LEFT_EYE_POINTS = list(range(42, 48))
NOSE_POINTS = list(range(27, 35))
JAW_POINTS = list(range(0, 17))
# Points used to line up the images.
# ALIGN_POINTS = (MOUTH_POINTS)
ALIGN_POINTS = (LEFT_BROW_POINTS + RIGHT_EYE_POINTS + LEFT_EYE_POINTS + RIGHT_BROW_POINTS + NOSE_POINTS + MOUTH_POINTS)
# Points from the second image to overlay on the first. The convex hull of each # element will be overlaid.
# OVERLAY_POINTS = [MOUTH_POINTS]
OVERLAY_POINTS = [LEFT_EYE_POINTS + RIGHT_EYE_POINTS + LEFT_BROW_POINTS + RIGHT_BROW_POINTS, NOSE_POINTS + MOUTH_POINTS]
|
15,395 | 1a1c3b9aa5e3d7ccba09918d7fa74e2371398870 | import unittest
from unittest.mock import patch
from infra.controller_factory import ControllerFactory
from config.game import Game
class GameTest(unittest.TestCase):
def test_if_this_class_is_singleton(self):
instance_one = Game()
instance_two = Game()
self.assertEqual(instance_one, instance_two)
# def test_if_goes_to_home_screen_when_game_starts(self):
# with patch.object(ControllerFactory, "dispatch") as mock_dispatch:
# game = Game()
# game.run()
# mock_dispatch.assert_called_with("menu", "home")
|
15,396 | 56ad3ef5a1e191d5fe3bb051b31a69cb9044d632 | import sys
import numpy as np
from collections import defaultdict
sr = lambda: sys.stdin.readline().rstrip()
ir = lambda: int(sr())
lr = lambda: list(map(int, sr().split()))
N, K = lr()
A = np.array([0] + lr())
A = (A-1) % K
Acum = A.cumsum() % K
counter = defaultdict(int)
answer = 0
for i, x in enumerate(Acum):
answer += counter[x]
counter[x] += 1
if i >= K-1:
counter[Acum[i-(K-1)]] -= 1
print(answer)
|
15,397 | cd6808f272fb20848b38fdb96c85759db0bb39c3 | from pexpect.popen_spawn import PopenSpawn
import pexpect
from datetime import datetime
import requests
import json
import urllib3
import os
import subprocess
import time
urllib3.disable_warnings()
import sys
import re
import ipv6linklocalforwarding
import signal
import tempfile
class QuantaSkylake(object):
def __init__(self, host, username, password):
self.host = host
# Some applications do not work via IPv6 Link Local. Adding ipv6linklocal instance
self.hostforwardinstance = None
self.username = username
self.password = password
#self.redfishapi2 = 'https://[' + host.replace('%','%25') + ']/redfish/v1/'
self.redfishapi2 = 'https://' + host.replace('%', '%25') + '/redfish/v1/'
self.redfishheader = {
'Content-Type': 'application/json',
'User-Agent': 'curl/7.54.0',
'Host': '[' + host.split('%')[0] + ']'
}
self.redfishheader2 = {
'Content-Type': 'application/json',
'User-Agent': 'curl/7.54.0',
}
self.payload = json.dumps( {"Attributes":{
"FBO001":"UEFI",
"FBO201":"CD/DVD",
"FBO202":"USB",
"FBO203":"Hard Disk",
"FBO204":"Network",
"CRCS005":"Enable",
"IIOS1FE":"Enable",
"IPMI100":"Disabled"
}
})
self.amiheader = {}
self.amiloggedin = False
self.cookie = None
self.token = None
self.BMCVersion = None
self.BIOSVersion = None
self.BIOSJSONCache = None
self.ManagersJSONCache = None
self.SystemsJSONCache = None
self.IPMIPre = 'ipmitool -I lanplus -H ' + host + ' -U ' + username + ' -P ' + password + ' '
self.ipv4Address = None
self.ipv4Subnet = None
self.ipv4Gateway = None
self.ipv4Src = None
self.mgmtMAC = None
self.lastButtonTime = None
self.SOLSession = None
self.VMCLISession = None
# Fill UP JSON Cache
self.getJSONs()
def spawn(self, command, **kwargs):
if 'Linux' in sys.platform:
session = pexpect.spawn(command, **kwargs)
else:
session = PopenSpawn(command, **kwargs)
return session
# if 'win' in sys.platform:
# session = PopenSpawn(command, **kwargs)
# else:
# session = pexpect.spawn(command, **kwargs)
# return session
def poweroff(self):
# session = PopenSpawn(self.IPMIPre + ' power off')
session = self.spawn(self.IPMIPre + ' power off')
output = session.read(2000)
output = output.decode('utf-8')
output = output.replace('\n', '')
print(self.host + ' ' + output)
def poweron(self):
# session = PopenSpawn(self.IPMIPre + ' power on')
#session = self.spawn(self.IPMIPre + ' power on')
session = self.spawn(self.IPMIPre + ' power on') # Jenny Changed on 8/5/2019
output = session.read(2000)
output = output.decode('utf-8')
output = output.replace('\n', '')
print(self.host + ' ' + output)
def powersoft(self):
# If node is on, press power button softly.
if self.getPowerStatus():
# session = PopenSpawn(self.IPMIPre + ' power soft')
session = self.spawn(self.IPMIPre + ' power soft')
output = session.read(2000)
output = output.decode('utf-8')
output = output.replace('\n', '')
print(self.host + ' ' + output)
def idon(self):
# session = PopenSpawn(self.IPMIPre + ' chassis identify force')
session = self.spawn(self.IPMIPre + ' chassis identify force')
output = session.read(2000)
def idoff(self):
# session = PopenSpawn(self.IPMIPre + ' chassis identify 0')
session = self.spawn(self.IPMIPre + ' chassis identify 0')
output = session.read(2000)
def idblink(self):
# session = PopenSpawn(self.IPMIPre + ' chassis identify 0')
session = self.spawn(self.IPMIPre + ' chassis identify 240')
output = session.read(2000)
def resetBMC(self):
session = self.spawn(self.IPMIPre + ' mc reset cold')
output = session.read(2000)
def updateUserPass(self, username, password):
print(self.host + ' Setting username to ' + username + ' and password to ' + password)
if username == 'admin':
# session = PopenSpawn(self.IPMIPre + ' user set password 2 ' + password)
session = self.spawn(self.IPMIPre + ' user set password 2 \"' + password + '\"')
else:
print(self.host + ' This tool kit does not support setting different usernames yet.')
return False
output = session.read(2000)
output = output.decode('utf-8')
print(self.host + ' ' + output)
if 'successful' in output:
self.username = username
self.password = password
self.IPMIPre = 'ipmitool -I lanplus -H ' + self.host + ' -U ' + self.username + ' -P \"' + self.password + '\" '
return True
else:
return False
# Get FRU Data
def getFRU(self):
cmd = self.IPMIPre + 'fru print'
session = self.spawn(cmd)
output = session.read(2000)
lines = output.splitlines()
print(output)
test = self.readFRU(0)
print(test)
# Start AMI Section
# DO NOT USE THIS FOR OFFICIAL PURPOSES. ONLY CREATED FOR BMC 4.22.06 PURPOSES SINCE WE CAN'T LOG INTO REDFISH AT INITIAL BMC BOOTUP.
# FORCE CHANGE PASSWORD TO SAME PASSWORD
def forcePasswordChange(self):
# Get QSESSIONID and X-CSRFTOKEN to log into AMI API
header = {'Content-Type': 'application/x-www-form-urlencoded', 'User-Agent': 'curl/7.54.0', 'Host': '[' + self.host.split('%')[0] + ']'}
url_prep = 'https://[' + self.host.replace('%','%25') + ']/'
session = requests.post(url = url_prep + 'api/session', data="username=admin&password=cmb9.admin", headers=header, verify=False)
if session.ok:
try:
j = session.json()
except:
print(self.host + " Failed to Force Change Password")
return False
# print(j)
CSRFToken = j["CSRFToken"]
QSESSIONID = session.cookies["QSESSIONID"]
else:
print(self.host + " Failed to Force Change Password")
return False
# Update Header with QSESSIONID, X-CSRFTOKEN Details and new Content Type
header.update({'Cookie':'QSESSIONID=' + QSESSIONID})
header.update({"X-CSRFTOKEN": CSRFToken})
header.update({'Content-Type': 'application/json'})
session = requests.post(url = url_prep + 'api/force_change_password', data="{\"this_userid\":\"2\",\"password\":\"cmb9.admin\",\"confirm_password\":\"cmb9.admin\",\"password_size\":\"0\"}", headers=header, verify=False)
if session.ok:
print(self.host + " Successfully Force Change Password")
else:
print(self.host + " Failed to Force Change Password")
# Don't forget to log our of session
session = requests.delete(url = url_prep + 'api/session', headers=header, verify=False)
if session.ok:
return True
else:
print(self.host + " Failed to Force Change Password")
return False
def createAPISession(self):
# Get QSESSIONID and X-CSRFTOKEN to log into AMI API
self.amiheader = {'Content-Type': 'application/x-www-form-urlencoded', 'User-Agent': 'curl/7.54.0', 'Host': '[' + self.host.split('%')[0] + ']'}
url_prep = 'https://[' + self.host.replace('%','%25') + ']/'
session = requests.post(url = url_prep + 'api/session', data="username=admin&password=cmb9.admin", headers=self.amiheader, verify=False)
if session.ok:
try:
j = session.json()
except:
print(self.host + " Failed to log into AMI Session")
return False
# print(j)
CSRFToken = j["CSRFToken"]
QSESSIONID = session.cookies["QSESSIONID"]
else:
print(self.host + " Failed to log into AMI Session")
return False
# Update Header with QSESSIONID, X-CSRFTOKEN Details and new Content Type
self.amiheader.update({'Cookie':'QSESSIONID=' + QSESSIONID})
self.amiheader.update({"X-CSRFTOKEN": CSRFToken})
self.amiheader.update({'Content-Type': 'application/json'})
self.amiloggedin = True
def destroyAPISession(self):
# Don't forget to log our of session
url_prep = 'https://[' + self.host.replace('%', '%25') + ']/'
session = requests.delete(url = url_prep + 'api/session', headers=self.amiheader, verify=False)
if session.ok:
self.amiloggedin = False
return True
else:
print(self.host + " Failed to lot out of AMI session")
return False
def getVirtualMediaStatus(self):
if self.amiloggedin:
pass
else:
return {}
url_prep = 'https://[' + self.host.replace('%', '%25') + ']/'
session = requests.get(url=url_prep + 'api/settings/media/instance', headers=self.amiheader, verify=False)
if session.ok:
try:
j = session.json()
except:
return {}
return j
# End AMI Section #
def setHDDBoot(self):
try:
session = requests.put(self.redfishapi2 + 'Systems/Self/Bios/SD', auth=(self.username, self.password),
verify=False, headers=self.redfishheader,
data='{"Attributes":{"FBO201":"Hard Disk","FBO202":"USB","FBO203":"CD/DVD","FBO204":"Network"}}')
except:
pass
if session.status_code == 204:
print(self.host + ' ' + ' Successfully set HDD First Boot order')
else:
print(self.host + ' ' + ' Failed to set HDD First Boot order')
def setCDROMBoot(self):
try:
session = requests.put(self.redfishapi2 + 'Systems/Self/Bios/SD', auth=(self.username, self.password),
verify=False, headers=self.redfishheader,
data='{"Attributes":{"FBO201":"CD/DVD","FBO202":"USB","FBO203":"Hard Disk","FBO204":"Network"}}')
except:
pass
if session.status_code == 204:
print(self.host + ' ' + ' Successfully set CD/DVD First Boot order')
else:
print(self.host + ' ' + ' Failed to set CD/DVD First Boot order')
def setIIOS1FE(self, value):
# For more details about IIOS1FE, go to https://(BMC IP)/redfish/v1/Registries/BiosAttributeRegistry0.0.0.0.json
if value is True:
self.setBIOSAttribute('IIOS1FE', 'Enable')
else:
self.setBIOSAttribute('IIOS1FE', 'Disable')
def setCRCS005(self, value):
# For more details about CRCS005, go to https://(BMC IP)/redfish/v1/Registries/BiosAttributeRegistry0.0.0.0.json
if value is True:
self.setBIOSAttribute('CRCS005', 'Enable')
else:
self.setBIOSAttribute('CRCS005', 'Disable')
# Repairs rebooting issue when OS takes too long to boot
def setSMI(self, value=False):
if value is True:
session = self.spawn(self.IPMIPre + ' raw 0x36 0x1c 0x4c 0x1c 0x00 0x01 0x01')
output = session.read(2000)
print(self.host + ' Enabling S')
else:
session = self.spawn(self.IPMIPre + ' raw 0x36 0x1c 0x4c 0x1c 0x00 0x01 0x00')
output = session.read(2000)
print(self.host + ' Disabling SMI Timer')
# Each Redfish Update Requires just one PUT Call. Can't use multiple PUT Calls
def setUCPCIDefaults(self):
try:
session = requests.put(self.redfishapi2 + 'Systems/Self/Bios/SD', auth=(self.username, self.password),
verify=False, headers=self.redfishheader,
data='{"Attributes":{"FBO001":"UEFI","FBO201":"CD/DVD","FBO202":"USB","FBO203":"Hard Disk","FBO204":"Network","CRCS005":"Enable","IIOS1FE":"Enable", "IPMI100":"Disabled"}}')
except:
pass
if session.status_code == 204:
print(self.host + ' ' + 'Successfully set UCP CI/HC/RS BIOS Settings')
else:
print(self.host + ' ' + 'Failed to set UCP CI/HC/RS BIOS Settings')
# Each Redfish Update Requires just one PUT Call. Can't use multiple PUT Calls
def setMiniOSDefaults(self):
try:
session = requests.put(self.redfishapi2 + 'Systems/Self/Bios/SD', auth=(self.username, self.password),
verify=False, headers=self.redfishheader,
# data='{"Attributes":{"FBO001":"LEGACY","FBO101":"CD/DVD","FBO102":"USB","FBO103":"Hard Disk","FBO104":"Network"}}')
data='{"Attributes":{"FBO001":"UEFI","FBO201":"CD/DVD","FBO202":"USB","FBO203":"Hard Disk","FBO204":"Network","CRCS005":"Enable","IIOS1FE":"Enable", "IPMI100":"Disabled"}}')
except:
pass
if session.status_code == 204:
print(self.host + ' ' + 'Successfully set MiniOS BIOS Settings')
else:
print(self.host + ' ' + 'Failed to set MiniOS BIOS Settings')
def setMiniOSDefaults2(self):
try:
session = requests.put(self.redfishapi22 + 'Systems/Self/Bios/SD', auth=(self.username, self.password),
verify=False, headers=self.redfishheader2,
data=self.payload)
print(self.redfishapi22)
print(self.username)
print(self.password)
print(self.payload)
print(session.status_code)
if session.status_code == 204:
print(self.host + ' ' + 'In quantaskylake: Successfully set MiniOS BIOS Settings')
else:
print(self.host + ' ' + 'In quantaskylake: Failed to set MiniOS BIOS Settings')
except:
print("Error Out")
pass
# This technically doesn't work if used in a loop.
def setBIOSAttribute(self, key, value):
try:
session = requests.put(self.redfishapi2 + 'Systems/Self/Bios/SD', auth=(self.username, self.password),
verify=False, headers=self.redfishheader,
data='{"Attributes":{"' + str(key) + '":"' + str(value) + '"}}')
except:
pass
if session.status_code == 204:
print(self.host + ' ' + ' Successfully set key ' + str(key) + ' with value ' + str(value) +'.')
else:
print(self.host + ' ' + ' Failed to set key ' + str(key) + ' with value ' + str(value) +'.')
@staticmethod
def validate(host, username, password):
temp = host + ' ' + username + ' ' + password
print('Start ' + temp)
#redfishapi2 = 'https://[' + host + ']/redfish/v1/'
# Jenny Modified
redfishapi2 = 'https://' + host + '/redfish/v1/'
redfishheader = {
'Content-Type': 'application/json',
'User-Agent': 'curl/7.54.0',
'Host': '[' + host.split('%')[0] + ']'
}
# Attempt to connect
try:
session = requests.get(redfishapi2 + 'Systems/Self', auth=(username, password), verify=False,
headers=redfishheader, timeout=30)
except:
print('Finish ' + temp)
return None
print('Finish ' + temp)
# If redfish responded, we are good.
if session.ok:
try:
j = session.json()
except:
return None
print(j)
try:
SKU = j['SKU']
except:
return None
if ('D52B' in SKU) or ('DS120' in SKU) or ('DS220' in SKU):
return QuantaSkylake(host, username, password)
else:
return None
else:
return None
def getJSONs(self):
self.getManagersJSON()
self.getSystemsJSON()
def getSystemsJSON(self):
# Get Redfish Systems/Self Details
session = requests.get(self.redfishapi2 + 'Systems/Self', auth=(self.username, self.password), verify=False, headers=self.redfishheader)
try:
# Decode JSON to Dictionary
j = session.json()
# Store it
self.SystemsJSONCache = j
return j
except:
return {}
def getManagersJSON(self):
session = requests.get(self.redfishapi2 + 'Managers/Self', auth=(self.username, self.password), verify=False, headers=self.redfishheader)
# Decode JSON to Dictionary
j = session.json()
# Store it
self.ManagersJSONCache = j
return j
def getRegistriesJSON(self):
session = requests.get(self.redfishapi2 + 'Registries/Self', auth=(self.username, self.password), verify=False, headers=self.redfishheader)
# Decode JSON to Dictionary
j = session.json()
return j
def getBIOSVersion(self):
try:
self.BIOSVersion = self.SystemsJSONCache['BiosVersion']
except:
raise ValueError('BIOs from server has\'t turned on yet. Please turn on server.')
return self.BIOSVersion
def getBIOSJSON(self):
session = requests.get(self.redfishapi2 + 'Systems/Self/Bios', auth=(self.username, self.password), verify=False, headers=self.redfishheader)
try:
j = session.json()
except:
return {'error':'error'}
self.BIOSJSONCache = j
return j
def restoreBIOSJSON(self, json):
inputdata = str(json)
inputdata = inputdata.replace('\'','\"')
inputdata = inputdata.replace('False', 'false')
inputdata = inputdata.replace('True', 'true')
try:
session = requests.put(self.redfishapi2 + 'Systems/Self/Bios/SD', auth=(self.username, self.password),
verify=False, headers=self.redfishheader,
data=inputdata)
except:
pass
if session.status_code == 204:
print(self.host + ' ' + 'Successfully set BIOS Settings')
else:
print(self.host + ' ' + 'Failed to set BIOS Settings. Redfish API said ' + str(session.content))
def restoreBIOSJSONtest(self, json):
for key, value in json['Attributes'].items():
# print(key + ' ' + str(value))
self.setBIOSAttribute(key, value)
def getBIOSJSONRegistries(self):
# Get BIOS Registries
session = requests.get(self.redfishapi2 + 'Registries/BiosAttributeRegistry0.0.0.0.json', auth=(self.username, self.password), verify=False, headers=self.redfishheader)
j = session.json()
return j
def getBMCVersion(self):
try:
self.BMCVersion = self.ManagersJSONCache['FirmwareVersion']
except:
raise ValueError('BMC Version isn\'t in the output. This shouldn\'t happen. Did you run get JSONS yet? :(')
return self.BMCVersion
def getJSON(self, inputurl):
if self.redfishapi2 in inputurl:
url = str(inputurl)
else:
url = self.redfishapi2 + str(inputurl)
session = requests.get(url, auth=(self.username, self.password), verify=False,
headers=self.redfishheader)
try:
j = session.json()
except:
return {'error': 'error'}
self.BIOSJSONCache = j
return j
def getIPv4Address(self):
# Ask ipmitool to go to node and print out LAN details
# session = PopenSpawn(self.IPMIPre + 'lan print')
session = self.spawn(self.IPMIPre + 'lan print')
output = session.read(2000)
output = output.decode('utf-8')
output = output.splitlines()
for line in output:
if 'IP Address ' in line:
ipv4 = line.split(': ')[1]
self.ipv4Address = ipv4
elif 'Subnet Mask ' in line:
subnet = line.split(': ')[1]
self.ipv4Subnet = subnet
elif 'Default Gateway IP ' in line:
gateway = line.split(': ')[1]
self.ipv4Gateway = gateway
elif 'IP Address Source ' in line:
source = line.split(': ')[1]
self.ipv4Src = source
elif 'MAC Address ' in line:
mgmtMAC = line.split(': ')[1].replace(":", "").lower()
self.mgmtMAC = mgmtMAC
try:
print(self.host + ' ' + str(self.__class__.__name__) + ' Address: ' + ipv4 + ' Subnet: ' + subnet + ' Gateway: ' + gateway)
return self.ipv4Address
except:
print(self.host + ' This host has a failing IPMI interface. Please do not continue and reflash this system.')
return None
def setIPv4Address(self, IPv4Address = None, subnet = None, gateway = None):
print(self.host + ' Setting IPv4 LAN Parameters')
# Ask ipmitool to set DHCP Mode if IPv4Address is None. Otherwise, set to Static
if IPv4Address is None:
# session = PopenSpawn(self.IPMIPre + ' lan set 1 ipsrc dhcp')
session = self.spawn(self.IPMIPre + ' lan set 1 ipsrc dhcp')
output = session.read(2000)
output = output.decode('utf-8')
output = output.replace('\n', '')
print(self.host + ' Setting IP Source to DHCP')
self.ipv4Address = None
return True
else:
# session = PopenSpawn(self.IPMIPre + ' lan set 1 ipsrc static')
session = self.spawn(self.IPMIPre + ' lan set 1 ipsrc static')
output = session.read(2000)
output = output.decode('utf-8')
output = output.replace('\n', '')
print(self.host + ' Setting IP Source to Static')
time.sleep(15)
if subnet is None:
raise ValueError('Subnet cannot be blank.')
# Wait for interface to come back
time.sleep(10)
if IPv4Address is not None and subnet is not None:
# session = PopenSpawn(self.IPMIPre + ' lan set 1 netmask ' + str(subnet), timeout=120)
session = self.spawn(self.IPMIPre + ' lan set 1 netmask ' + str(subnet), timeout=120)
output = session.read(2000)
output = output.decode('utf-8')
output = output.replace('\n', '')
print(self.host + ' ' + output)
# session = PopenSpawn(self.IPMIPre + ' lan set 1 ipaddr ' + str(IPv4Address), timeout=120)
session = self.spawn(self.IPMIPre + ' lan set 1 ipaddr ' + str(IPv4Address), timeout=120)
output = session.read(2000)
output = output.decode('utf-8')
output = output.replace('\n', '')
print(self.host + ' ' + output)
if gateway is not None:
# session = PopenSpawn(self.IPMIPre + ' lan set 1 defgw ipaddr ' + str(gateway), timeout=120)
session = self.spawn(self.IPMIPre + ' lan set 1 defgw ipaddr ' + str(gateway), timeout=120)
output = session.read(2000)
output = output.decode('utf-8')
output = output.replace('\n', '')
print(self.host + ' ' + output)
return True
def setIPv4SubnetAddress(self, subnet = None, gateway = None):
print(self.host + ' Setting IPv4 LAN Parameters')
if subnet is None:
raise ValueError('Subnet cannot be blank.')
if subnet is not None:
# session = PopenSpawn(self.IPMIPre + ' lan set 1 netmask ' + str(subnet), timeout=120)
session = self.spawn(self.IPMIPre + ' lan set 1 netmask ' + str(subnet), timeout=120)
output = session.read(2000)
output = output.decode('utf-8')
output = output.replace('\n', '')
print(self.host + ' ' + output)
if gateway is not None:
# session = PopenSpawn(self.IPMIPre + ' lan set 1 defgw ipaddr ' + str(gateway), timeout=120)
session = self.spawn(self.IPMIPre + ' lan set 1 defgw ipaddr ' + str(gateway), timeout=120)
output = session.read(2000)
output = output.decode('utf-8')
output = output.replace('\n', '')
print(self.host + ' ' + output)
return True
def getPowerStatus(self):
# session = PopenSpawn(self.IPMIPre + ' power status')
session = self.spawn(self.IPMIPre + ' power status')
output = session.read(2000)
output = output.decode('utf-8')
output = output.replace('\n', '')
print(self.host + ' ' + output)
if 'off' in output:
return False
else:
return True
def setTime(self):
# session = PopenSpawn(self.IPMIPre + ' sel time set "' + datetime.now().strftime("%m/%d/%Y %H:%M:%S") + '"')
session = self.spawn(self.IPMIPre + ' sel time set "' + datetime.now().strftime("%m/%d/%Y %H:%M:%S") + '"')
output = session.read(2000)
output = output.decode('utf-8').rstrip()
print(self.host + ' ' + output)
def getTime(self):
# session = PopenSpawn(self.IPMIPre + ' sel time get')
session = self.spawn(self.IPMIPre + ' sel time get')
output = session.read(2000)
output = output.decode('utf-8').lstrip().rstrip()
print(self.host + ' ' + output)
return datetime.strptime(output, '%m/%d/%Y %H:%M:%S')
def clearSEL(self):
# session = PopenSpawn(self.IPMIPre + ' sel clear')
session = self.spawn(self.IPMIPre + ' sel clear')
output = session.read(200000)
output = output.decode('utf-8').rstrip()
print(self.host + ' ' + output)
def rawIPMI(self,input):
session = self.spawn(self.IPMIPre + ' raw ' + input)
output = session.read(2000)
output = output.decode('utf-8').rstrip()
return output
def readFRU(self, fruID):
fruID = str(fruID)
file, path = tempfile.mkstemp()
session = self.spawn(self.IPMIPre + ' fru read ' + fruID + ' ' + path)
output = session.read(2000)
output = output.decode('utf-8').rstrip()
content = ''
try:
with os.fdopen(file, 'rb') as tmp:
content = tmp.read()
finally:
os.remove(path)
return content
def writeFRU(self, fruID, fruData):
fruID = str(fruID)
@staticmethod
def getLastButtonTime(node):
session = PopenSpawn(node.IPMIPre + ' sel list', timeout=60)
output = session.read(200000)
output = output.decode('utf-8')
# print(output)
output = output.splitlines()
buttons = []
for line in output:
if "Button #" in line:
# Only get the date and time
buttons.append(datetime.strptime(line[7:28], '%m/%d/%Y | %H:%M:%S'))
if buttons.__len__() > 0:
node.lastButtonTime = buttons[-1]
else:
node.lastButtonTime = datetime.strptime('1/1/1970 | 00:00:00', '%m/%d/%Y | %H:%M:%S')
return node
def deleteVMCLIapp(self):
self.stopVMCLIapp()
# session = PopenSpawn('sc delete VMCLI_' + self.host, timeout=60)
session = self.spawn('sc delete VMCLI_' + self.host, timeout=60)
output = session.read(2000)
output = output.decode('utf-8')
# print(self.host + ' ' + output)
# Needs help
def createVMCLIapp(self):
if 'win' in sys.platform:
'''
cwd = os.getcwd()
session = PopenSpawn('sc create VMCLI_' + self.host + ' binPath= "' + cwd + '\\VMCLI.exe', timeout=60)
# session = self.spawn()
output = session.read(2000)
output = output.decode('utf-8')
# print(self.host + ' ' + output)
'''
print('VMCLI Service creation isn\'t required for Windows environments.')
else:
print('VMCLI Service creation isn\'t required for Linux environments.')
def stopVMCLIapp(self):
if self.VMCLISession is not None:
self.VMCLISession.kill(signal.CTRL_C_EVENT)
self.VMCLISession = None
def startVMCLIapp(self, isofile):
self.createAPISession()
self.stopVMCLIapp()
print(self.host + ' Starting VMCLI Service with ' + isofile)
time.sleep(1)
if 'win' in sys.platform:
cwd = os.getcwd()
# cmd = 'sc start VMCLI_' + self.host + ' -r [' + self.host + ']:443 -u ' + self.username + ' -p ' + self.password + ' -c "' + cwd + '\\' + isofile + '"'
cmd = "VMCLI.exe -r [" + self.host + "]:443 -u " + self.username + " -p " + self.password + " -c " + cwd + "/" + isofile
cmd = cmd.replace("\\","/")
count = 0
while count < 10:
try:
# output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
# print(self.host + ' ' + output)
session = PopenSpawn(cmd, timeout=30, encoding = 'utf-8', searchwindowsize=100)
self.VMCLISession = session
session.expect('Starting CD redirection', timeout=30)
except:
pass
time.sleep(30)
if self.statusVMCLIapp():
break
else:
print(self.host + " VMCLI Failed to start. Attempt #" + str(count))
session.kill(signal.CTRL_C_EVENT)
count += 1
else:
self.startTunnel(['443','5120'])
cmd = 'VMCLIEXE -r [' + self.host.split('%')[0].split('fe80')[1] + ']:443 -u ' + self.username + ' -p ' + self.password + ' -c ' + isofile
session = self.spawn(cmd, encoding = 'utf-8')
try:
session.expect('CD redirection in progress')
self.VMCLISession = session
time.sleep(1)
print(self.host + ' ' + ' CD redirection in progress')
except:
print(self.host + 'VMCLI Failed to start')
if not self.statusVMCLIapp():
print(self.host + ' VMCLI Failed to start')
else:
print(self.host + ' CD redirection in progress')
self.destroyAPISession()
# Needs help
def statusVMCLIapp(self):
# Check #1
if self.VMCLISession is None:
return False
else:
if not self.VMCLISession.closed:
index = self.VMCLISession.expect(['Stopping all the redirections', 'Error', 'Ejected', pexpect.EOF, pexpect.TIMEOUT])
if index < 4:
self.stopVMCLIapp()
return False
# else:
# return True
else:
return False
# Check #2
cdrom_status = False
if self.amiloggedin:
j = self.getVirtualMediaStatus()
try:
# Set True if the AMI API says CD redirection is active
cdrom_status = bool(int(j['cd_active_sessions'])%2)
except:
pass
return cdrom_status
def SOLActivate(self):
count = 0
while count < 5:
count += 1
#self.SOLDeactivate()
try:
# session = PopenSpawn(self.IPMIPre + 'sol activate', encoding='utf-8')
session = self.spawn(self.IPMIPre + 'sol activate', encoding='utf-8')
except Exception as e:
continue
# Pexpect will wait for these two outputs.
result = session.expect(['[SOL Session operational. Use ~? for help]','Info: SOL payload already active on another session'])
# If the output is SOL Session Operational, return the session. Otherwise, return nothing.
if result == 0:
self.SOLSession = session
return session
else:
continue
return None
def SOLDeactivate(self):
# session = PopenSpawn(self.IPMIPre + 'sol deactivate')
session = self.spawn(self.IPMIPre + 'sol deactivate')
try:
output = session.read(2000)
except:
pass
# output = output.decode('utf-8')
# print(self.host + ' ' + output)
def ipmicmdraw(self, input):
# session = PopenSpawn(self.IPMIPre + ' ' + str(input))
session = self.spawn(self.IPMIPre + ' ' + str(input))
output = session.read(2000)
output = output.decode('utf-8')
print(self.host + ' ' + self.IPMIPre + ' ' + str(input) + '\n' + output)
def startTunnel(self, port):
self.hostforwardinstance = ipv6linklocalforwarding.forwarding(self.host.split('%')[0].split('fe80')[1], port, self.host, port)
self.hostforwardinstance.start()
def stopTunnel(self):
if self.hostforwardinstance is not None:
self.hostforwardinstance.stop()
def bmcFlash(self, file):
if 'win' in sys.platform:
cmd = 'Yafuflash2 -nw -vyes -fb -host ' + self.host + ' -u ' + self.username + ' -p ' + self.password + ' ' + file
else:
# Yafuflash2 doesn't support IPv6 Link-Local Address. Adding Tunnel
self.startTunnel(['623'])
cmd = 'Yafuflash2 -nw -vyes -fb -host ' + self.host.split('%')[0].split('fe80')[1] + ' -u ' + self.username + ' -p ' + self.password + ' ' + file
# session = PopenSpawn(cmd, maxread=20000)
session = self.spawn(cmd, maxread=20000)
filename = re.sub('[%:]', '.', self.host.split('%')[0])
if 'win' in sys.platform:
fout = open('temp\\' + filename + '_BMCFLASH.txt',"wb")
else:
fout = open('temp/' + filename + '_BMCFLASH.txt', "wb")
fout.write(str.encode(cmd))
session.logfile = fout
print(self.host + ' Running: ' + cmd)
returnCode = 0
timetowait = 240
try:
session.expect('Uploading Firmware Image : 0', timeout=600)
print(self.host + ' Uploading BMC Image')
except:
print(self.host + ' Failed to upload BMC image')
timetowait = 10
returnCode = returnCode + 1
try:
session.expect('Flashing \[boot\] Module', timeout=timetowait)
print(self.host + ' Flashing [boot] Module')
except:
print(self.host + ' Failed to flash boot module')
timetowait = 10
returnCode = returnCode + 2
try:
session.expect('Flashing \[conf\] Module', timeout=timetowait)
print(self.host + ' Flashing [conf] Module')
except:
print(self.host + ' Failed to flash [conf] Module')
timetowait = 10
returnCode = returnCode + 4
try:
session.expect('Flashing \[bkupconf\] Module', timeout=timetowait)
print(self.host + ' Flashing [bkupconf] Module')
except:
print(self.host + ' Failed to flash [bkupconf] Module')
timetowait = 10
returnCode = returnCode + 8
try:
session.expect('Flashing \[root\] Module', timeout=timetowait)
print(self.host + ' Flashing [root] Module')
except:
print(self.host + ' Failed to flash [root] Module')
timetowait = 10
returnCode = returnCode + 16
try:
session.expect('Flashing \[osimage\] Module', timeout=timetowait)
print(self.host + ' Flashing [osimage] Module')
except:
print(self.host + ' Failed to flash [osimage] Module')
timetowait = 10
returnCode = returnCode + 32
try:
session.expect('Flashing \[www\] Module', timeout=timetowait)
print(self.host + ' Flashing [www] Module')
except:
print(self.host + ' failed to flash [www] Module')
timetowait = 10
returnCode = returnCode + 64
try:
session.expect('Flashing \[ast2500e\] Module', timeout=timetowait)
print(self.host + ' Flashing [ast2500e] Module')
except:
print(self.host + ' Failed tp flash [ast2500e] Module')
timetowait = 10
returnCode = returnCode + 128
try:
session.expect('Resetting the firmware', timeout=timetowait)
print(self.host + ' Resetting Firmware')
except:
print(self.host + ' Failed to reset Firmware. Please wait for yafuflash to exit cleanly.')
timetowait = 10
returnCode = returnCode + 256
# Wait for Yafuflash to exit
session.wait()
if returnCode < 1:
print(self.host + ' Successfully flashed BMC')
else:
print(self.host + ' Failed to flash BMC')
fout.close()
self.stopTunnel()
return returnCode
def biosFlash(self, file):
if 'win' in sys.platform:
cmd = 'Yafuflash2 -nw -vyes -fb -host ' + self.host + ' -u ' + self.username + ' -p ' + self.password + ' -d 2 ' + file
else:
# Yafuflash2 doesn't support IPv6 Link-Local Address. Adding Tunnel
self.startTunnel(['623'])
cmd = 'Yafuflash2 -nw -vyes -fb -host ' + self.host.split('%')[0].split('fe80')[1] + ' -u ' + self.username + ' -p ' + self.password + ' -d 2 ' + file
# session = PopenSpawn(cmd, maxread=2000)
session = self.spawn(cmd, maxread=2000)
filename = re.sub('[%:]', '.', self.host.split('%')[0])
if 'win' in sys.platform:
fout = open('temp\\' + filename +'_BIOSFLASH.txt',"wb")
else:
fout = open('temp/' + filename + '_BIOSFLASH.txt', "wb")
fout.write(str.encode(cmd))
session.logfile = fout
print(self.host + ' Running: ' + cmd)
returnCode = 0
timetowait = 240
try:
session.expect('Beginning BIOS Update', timeout=600)
print(self.host + ' Uploading BIOS Image')
except:
print(self.host + ' Failed to upload BIOS image')
timetowait = 10
returnCode = returnCode + 1
try:
session.expect('Flashing Firmware Image :', timeout=timetowait)
print(self.host + ' Flashing BIOS Image')
except:
print(self.host + ' Failed to flash BIOS image')
timetowait = 10
returnCode = returnCode + 2
try:
session.expect('Verifying Firmware Image :', timeout=timetowait)
print(self.host + ' Verifying BIOS Image')
session.expect('done', timeout=timetowait)
except:
print(self.host + ' Failed to verify BIOS image. Please wait for yafuflash to exit cleanly.')
timetowait = 10
returnCode = returnCode + 4
# Wait for Yafuflash to exit
session.wait()
if returnCode < 1:
print(self.host + ' Successfully flashed BIOS')
else:
print(self.host + ' Failed to flash BIOS')
fout.close()
self.stopTunnel()
return returnCode
def cmcFlash(self, file):
print(self.host + ' doesn\'t support CMC flashing.')
def cpldFlash(self, file):
if 'win' in sys.platform:
cmd = 'Yafuflash2 -nw -vyes -fb -host ' + self.host + ' -u ' + self.username + ' -p ' + self.password + ' -d 4 ' + file
else:
# Yafuflash2 doesn't support IPv6 Link-Local Address. Adding Tunnel
self.startTunnel(['623'])
cmd = 'Yafuflash2 -nw -vyes -fb -host ' + self.host.split('%')[0].split('fe80')[1] + ' -u ' + self.username + ' -p ' + self.password + ' -d 4 ' + file
# session = PopenSpawn(cmd, maxread=2000)
session = self.spawn(cmd, maxread=2000)
filename = re.sub('[%:]', '.', self.host.split('%')[0])
if 'win' in sys.platform:
fout = open('temp\\' + filename + '_CPLDFLASH.txt', "wb")
else:
fout = open('temp/' + filename + '_CPLDFLASH.txt', "wb")
fout.write(str.encode(cmd))
session.logfile = fout
print(self.host + ' Running: ' + cmd)
returnCode = 0
timetowait = 240
try:
session.expect('Beginning CPLD Update', timeout=600)
print(self.host + ' Uploading CPLD Image')
except:
print(self.host + ' Failed to upload CPLD image')
timetowait = 10
returnCode = returnCode + 1
try:
session.expect('Flashing Firmware Image :', timeout=timetowait)
print(self.host + ' Flashing CPLD Image')
except:
print(self.host + ' Failed to flash CPLD image')
timetowait = 10
returnCode = returnCode + 2
try:
session.expect('Verifying Firmware Image :', timeout=timetowait)
print(self.host + ' Verifying CPLD Image')
session.expect('done', timeout=timetowait)
except:
print(self.host + ' Failed to verify CPLD image. Please wait for yafuflash to exit cleanly.')
timetowait = 10
returnCode = returnCode + 4
# Wait for Yafuflash to exit
session.wait()
if returnCode < 1:
print(self.host + ' Successfully flashed CPLD')
else:
print(self.host + ' Failed to flash CPLD')
fout.close()
self.stopTunnel()
return returnCode
class D52B(QuantaSkylake):
def __init__(self, host, username, password):
QuantaSkylake.__init__(self, host, username, password)
self.OCPpciloc = 'af:00'
self.model = "D52B"
self.Usize = 1
class DS120(D52B):
def __init__(self, host, username, password):
D52B.__init__(self, host, username, password)
self.model = "DS120"
self.Usize = 1
class DS220(D52B):
def __init__(self, host, username, password):
D52B.__init__(self, host, username, password)
self.model = "DS220"
self.Usize = 2
class D52BV(QuantaSkylake):
def __init__(self, host, username, password):
QuantaSkylake.__init__(self, host, username, password)
self.model = "D52BV"
self.Usize = 1
class DS225(D52BV):
def __init__(self, host, username, password):
D52B.__init__(self, host, username, password)
self.model = "DS225"
self.Usize = 2
class Q72D(QuantaSkylake):
def __init__(self, host, username, password):
QuantaSkylake.__init__(self, host, username, password)
self.model = "Q72D"
self.Usize = 2
# CMC Flashing only applies to the Q72D Nodes
def cmcFlash(self, file):
if 'win' in sys.platform:
cmd = 'Yafuflash2 -nw -vyes -fb -host ' + self.host + ' -u ' + self.username + ' -p ' + self.password + ' -d 0x20 ' + file
else:
# Yafuflash2 doesn't support IPv6 Link-Local Address. Adding Tunnel
self.startTunnel(['623'])
cmd = 'Yafuflash2 -nw -vyes -fb -host ' + self.host.split('%')[0].split('fe80')[1] + ' -u ' + self.username + ' -p ' + self.password + ' -d 0x20 ' + file
# session = PopenSpawn(cmd, maxread=2000)
session = self.spawn(cmd, maxread=2000)
filename = re.sub('[%:]', '.', self.host.split('%')[0])
if 'win' in sys.platform:
fout = open('temp\\' + filename +'_CMCFLASH.txt',"wb")
else:
fout = open('temp/' + filename + '_CMCFLASH.txt', "wb")
fout.write(str.encode(cmd))
session.logfile = fout
print(self.host + ' Running: ' + cmd)
returnCode = 0
timetowait = 240
try:
session.expect('Beginning BIC Update', timeout=600)
print(self.host + ' Uploading CMC Image')
except:
print(self.host + ' Failed to upload CMC image')
timetowait = 10
returnCode = returnCode + 1
try:
session.expect('Flashing Firmware Image :', timeout=timetowait)
print(self.host + ' Flashing CMC Image')
except:
print(self.host + ' Failed to flash CMC image')
timetowait = 10
returnCode = returnCode + 2
try:
session.expect('Verifying Firmware Image :', timeout=timetowait)
print(self.host + ' Verifying CMC Image')
session.expect('done', timeout=timetowait)
except:
print(self.host + ' Failed to verify CMC image. Please wait for yafuflash to exit cleanly.')
timetowait = 10
returnCode = returnCode + 4
# Wait for Yafuflash to exit
session.wait()
if returnCode < 1:
print(self.host + ' Successfully flashed CMC')
else:
print(self.host + ' Failed to flash CMC')
fout.close()
self.stopTunnel()
return returnCode
# DOESN'T SUPPORT "ipmitool sel time set" COMMAND!!! UGHHHH!!!!!! Using Redfish Instead
def setTime(self):
nowtime = datetime.now().strftime("%Y-%m-%dT%H:%M:%S")
timezone = '+00:00'
senddata = '{"DateTime": "' + nowtime + timezone + '", "DateTimeLocalOffset": "' + timezone + '"}'
try:
session = requests.patch(self.redfishapi2 + 'Managers/Self/LogServices/SEL', auth=(self.username, self.password),
verify=False, headers=self.redfishheader,
data=senddata)
except:
pass
if session.status_code == 204:
print(self.host + ' ' + nowtime)
else:
print(self.host + ' Failed to set time.')
class DS240(Q72D):
def __init__(self, host, username, password):
Q72D.__init__(self, host, username, password)
self.model = "DS240"
self.Usize = 2
|
15,398 | 0321ed32197d1a1623699d7f708c73371f10beb5 | import argparse
import json
import os
import warnings
import datetime
class GatherOptions():
def __init__(self):
parser = argparse.ArgumentParser(description="Scoring thinking game")
current_time = datetime.datetime.now()
parser.add_argument("--save_dir", default=("Thinking\log\{:%Y%m%d_%H_%M}".format(current_time)), help="path for saving")
parser.add_argument("--model_name", default="cc.zh.300.bin", help="Model name")
parser.add_argument("--game", default=("oneimagetest"), help="Choose thinking game")
parser.add_argument("--k_sim", default=10, help="Select topk similar word from fasttext model")
parser.add_argument("--_layer_depth", default=7, help="Set Ehow tree depth. (the lower number, easier)")
parser.add_argument("--host", default='', help="Host")
parser.add_argument("--user", default='', help="User name")
parser.add_argument("--password", default='', help="Password")
parser.add_argument("--port", default='', type=int, help="Port")
parser.add_argument("--db", default='', help="Select database")
self.parser = parser
def parse(self, argv=None):
if argv == None:
opt = self.parser.parse_args(argv) # for running in jupyter notebook
else:
opt = self.parser.parse_args()
self.opt = opt
self.config_path = os.path.join(opt.save_dir, 'opt.json')
os.makedirs(opt.save_dir, exist_ok=True)
with open(self.config_path, 'w') as f:
json.dump(self.opt.__dict__, f)
return opt
|
15,399 | 8397fb9f846139dc5b19cc9d43f01afad0a26207 | #Задача 4. Вариант 11.
#Напишите программу, которая выводит имя, под которым скрывается Йоханнес Бруфельдт. Дополнительно необходимо вывести область интересов указанной личности, место рождения, годы рождения и смерти (если человек умер), вычислить возраст на данный момент (или момент смерти). Для хранения всех необходимых данных требуется использовать переменные. После вывода информации программа должна дожидаться пока пользователь нажмет Enter для выхода.
#Лохина Л. М.
#12.03.2016
print("Йоханнес Бруфельдт более известен, как писатель, журналист Юхани Ахо.")
birthplace = "Лапинлахти, Финляндия"
birth = 1861
death = 1921
age = 59
interests = "журналистика"
print("Место рождения: " + birthplace)
print("Год рождения: " + str(birth))
print("Год смерти: " + str(death))
print("Возраст: " + str(age))
print("Область интересов: " + interests)
input("\n\nНажмите Enter для выхода.")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.